From 4c5501d802cee24a9eec749c3acb7a5016434a53 Mon Sep 17 00:00:00 2001 From: FlorisCalkoen Date: Thu, 8 Feb 2024 07:15:46 +0000 Subject: [PATCH] deploy: de5a66bd86c78c812826802efbf5c36b85f01cec --- .../notebooks/1_coastal_classification.ipynb | 495 ++++--- .../notebooks/5_cross_shore_transport.ipynb | 245 ++++ notebooks/1_coastal_classification.html | 1166 ++++------------- notebooks/5_cross_shore_transport.html | 1109 ++++++++++++++++ objects.inv | Bin 433 -> 480 bytes .../1_coastal_classification.err.log | 158 +-- searchindex.js | 2 +- 7 files changed, 1860 insertions(+), 1315 deletions(-) create mode 100644 _sources/notebooks/5_cross_shore_transport.ipynb create mode 100644 notebooks/5_cross_shore_transport.html diff --git a/_sources/notebooks/1_coastal_classification.ipynb b/_sources/notebooks/1_coastal_classification.ipynb index 507f0ba..eb96dda 100644 --- a/_sources/notebooks/1_coastal_classification.ipynb +++ b/_sources/notebooks/1_coastal_classification.ipynb @@ -1,15 +1,5 @@ { "cells": [ - { - "cell_type": "code", - "execution_count": null, - "id": "2b073407-5430-4071-b9f3-92aeabc484e9", - "metadata": {}, - "outputs": [], - "source": [ - "# %run initialize/1_coastal_classification.ipynb" - ] - }, { "cell_type": "markdown", "id": "c5ee3b6d-9a37-4e12-be67-40d150c235c2", @@ -49,38 +39,13 @@ "import geopandas as gpd\n", "import numpy as np\n", "import panel as pn\n", + "import pandas as pd\n", "import holoviews as hv\n", "import hvplot.pandas # noqa: API import\n", "from bokeh.models import PanTool, WheelZoomTool\n", + "import pooch\n", "\n", - "print(\"Packages succesfully loaded\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "d5e278d9", - "metadata": {}, - "outputs": [], - "source": [ - "# THESE PACKAGES ARE NO LONGER USED\n", - "\n", - "# import os\n", - "\n", - "# import hvplot.xarray # noqa: API import\n", - "# import ipyleaflet\n", - "# import pandas as pd\n", - "# import matplotlib.pyplot as plt\n", - "# import matplotlib.animation as animation\n", - "# from geoviews import tile_sources as gvts\n", - "# from ipyleaflet import Map, Marker, ScaleControl, basemaps\n", - "# import ipywidgets as widgets\n", - "# from ipywidgets import HTML, interact, fixed, interact_manual, interactive\n", - "# import IPython\n", - "# from IPython.display import HTML, display\n", - "# from random import shuffle, uniform\n", - "# from coastal_dynamics.geometries import geo_bbox\n", - "# from PIL import Image" + "import logging" ] }, { @@ -116,7 +81,7 @@ "QUESTION_DIR = pathlib.Path.cwd().parent / \"notebooks\" / \"questions\"\n", "question_fp = QUESTION_DIR / \"1_coastal_classification.json\"\n", "\n", - "questions = load_questions(question_fp)\n" + "questions = load_questions(question_fp)" ] }, { @@ -175,124 +140,55 @@ "# To save memory we drop most of the columns. Also we drop the polar latitudes that cannot be displayed in the web mercator projection.\n", "df = df[[\"mag\", \"depth\", \"latitude\", \"longitude\", \"place\", \"type\"]][\n", " df[\"northing\"] < WEB_MERCATOR_LIMITS[1]\n", - "]\n", - "# df.head()" + "]" ] }, { "cell_type": "markdown", - "id": "2fd26216-4946-495d-8b81-66c90dcf08f2", + "id": "50f41e8e", "metadata": {}, "source": [ - "### Visualization of the earthquake data\n", - "\n", - "To explore the data we use visualization tools from the [Holoviz project](https://holoviz.org/) that makes high-level tools to simplify visualization in Python. In the next cell we enable the interactive mode on the data dataframe, create widgets to explore the data and filter the dataframe accordingly. To explore the eartquake data we create an overlay of the eartquakes on a tileset of ESRI Imagery. Please note that the code in the next cell will only do the computations and store the result in an object called `panel`. To actually see the results you have to run one more cell; the one that calls this object panel. \n", - "\n" + "We also get the bathymetric contours for a water depth of -200m, which we will use as a proxy to find the boundary of the continental shelf." ] }, { "cell_type": "code", "execution_count": null, - "id": "7edf870f-1082-4b16-922f-e46fb7297d24", - "metadata": { - "tags": [] - }, + "id": "e4684097", + "metadata": {}, "outputs": [], "source": [ - "# title_bar = pn.pane.Markdown(\n", - "# \"##Exercise 1: Plate tectonics & first-order coastal features\",\n", - "# styles={\"color\": \"black\"},\n", - "# width=800,\n", - "# align='center'\n", - "# # margin=(10, 5, 10, 15),\n", - "# )\n", - "\n", - "# define widgets that can be used to index the data\n", - "magnitude_slider = pn.widgets.RangeSlider(\n", - " name=\"Earthquake magnitude [Richter]\", start=0.1, end=10\n", - ")\n", - "depth_slider = pn.widgets.RangeSlider(name=\"Earthquake depth [km]\", start=0.1, end=650)\n", - "date_slider = pn.widgets.DateRangeSlider(\n", - " name=\"Date\", start=df.index[0], end=df.index[-1]\n", + "isobath_fp = pooch.retrieve(\n", + " \"https://coclico.blob.core.windows.net/coastal-dynamics/1_coastal_classification/isobaths200.gpkg\",\n", + " known_hash=\"2b25adb7d3923e3969f6fb0c1f53e5e5850acd3bf6a3468722f0a1434a395ae5\",\n", ")\n", - "column_types = pn.widgets.Select(options=[\"mag\", \"depth\"])\n", - "\n", "\n", - "@pn.depends(\n", - " magnitude_slider.param.value_start,\n", - " magnitude_slider.param.value_end,\n", - " depth_slider.param.value_start,\n", - " depth_slider.param.value_end,\n", - " date_slider.param.value_start,\n", - " date_slider.param.value_end,\n", - " column_types.param.value,\n", - ")\n", - "def plot_earthquake_panel(\n", - " magnitude_start,\n", - " magnitude_end,\n", - " depth_start,\n", - " depth_end,\n", - " date_start,\n", - " date_end,\n", - " column_type,\n", - "):\n", - " panel = df[\n", - " (df.mag > magnitude_start)\n", - " & (df.mag < magnitude_end)\n", - " & (df.depth > depth_start)\n", - " & (df.depth < depth_end)\n", - " & (df.index >= date_start)\n", - " & (df.index <= date_end)\n", - " ]\n", - " # inverted fire colormap from colorcet\n", - " cmap = cc.CET_L4[::-1]\n", - " colorbar_labels = {\"mag\": \"Magnitude [Richter]\", \"depth\": \"Earthquake depth [km]\"}\n", - "\n", - " p = panel.hvplot.points(\n", - " x=\"longitude\",\n", - " y=\"latitude\",\n", - " geo=True,\n", - " color=column_type,\n", - " global_extent=True,\n", - " tiles=\"ESRI\",\n", - " # frame_width=900,\n", - " ylabel=\"Latitude [deg]\",\n", - " xlabel=\"Longitude [deg]\",\n", - " cmap=cmap,\n", - " tools=[\"tap\"],\n", - " hover_cols=[\"place\", \"time\"],\n", - " logz=True,\n", - " clim=(1, None),\n", - " clabel=colorbar_labels[column_type],\n", - " )\n", - "\n", - " p.opts(width=1000, height=500, tools=[\"wheel_zoom\"])\n", - "\n", - " return p\n", - "\n", - "\n", - "earthquake_panel = pn.Column(\n", - " # pn.Row(title_bar, align='center'),\n", - " pn.Row(column_types, align=\"center\"),\n", - " pn.Row(magnitude_slider, align=\"center\"),\n", - " pn.Row(depth_slider, align=\"center\"),\n", - " pn.Row(date_slider, align=\"center\"),\n", - " pn.Row(plot_earthquake_panel, align=\"center\"),\n", - ")" + "data200 = gpd.read_file(isobath_fp)" ] }, { "cell_type": "code", "execution_count": null, - "id": "ab73e969-86c6-4145-b408-93189c5df66f", + "id": "197d1c76", "metadata": {}, "outputs": [], "source": [ - "# change value of 'plot_where' to:\n", - "# 'inline' if you would like the plot to show in the notebook\n", - "# 'pop-out' if you would like the plot to show in a new tab (i.e. seperate window)\n", + "# To make plotting a bit faster, we only use isobaths longer than a certain length. We therefore first\n", + "# have to project to a new coordinate system to get the lengths of the isobaths, and use that as a\n", + "# mask to select the isobaths we want to keep\n", "\n", - "plot_where = \"pop-out\"" + "data200['length'] = data200.to_crs('EPSG:3857').geometry.length\n", + "data200 = data200[data200['length']>5*10**6]" + ] + }, + { + "cell_type": "markdown", + "id": "2fd26216-4946-495d-8b81-66c90dcf08f2", + "metadata": {}, + "source": [ + "### Visualization of the earthquake data\n", + "\n", + "To explore the data we use visualization tools from the [Holoviz project](https://holoviz.org/) that makes high-level tools to simplify visualization in Python. Run the cell eblow " ] }, { @@ -300,7 +196,7 @@ "id": "6677f6fa-4fae-4b9a-8a7a-457cda4e306e", "metadata": {}, "source": [ - "**If the visualization is too slow, please follow the instructions in loading the data for taking a sample.**\n", + "**If the visualization is too slow, please adjust the sliders such that less data is shown.**\n", "\n", "After running the cell below you will have a panel with several widgets to index the eartquake data; by magnitude, depth and time, while the colors on the map show either the magintude or the depth of the earthquakes. " ] @@ -314,16 +210,136 @@ }, "outputs": [], "source": [ - "def show_earthquake(plot_where):\n", + "# The function below is used to generate the plot used in this exercise. You are not required to understand it,\n", + "# but feel free to have a look at it if you're interested in how these kind of panels can be made!\n", + "\n", + "def show_earthquakes(plot_where):\n", + " \"\"\"\n", + " change value of 'plot_where' to:\n", + " 'inline' if you would like the plot to show in the notebook\n", + " 'pop-out' if you would like the plot to show in a new tab (i.e. seperate window)\n", + " \"\"\"\n", + "\n", + " #Below we build the earthquake widget\n", + " title_bar = pn.pane.Markdown(\n", + " \"## Part 1: Tectonic classification\",\n", + " styles={\"color\": \"black\"},\n", + " width=400,\n", + " # margin=(10, 5, 10, 15),\n", + " )\n", + "\n", + " # define widgets that can be used to index the data\n", + " magnitude_slider = pn.widgets.RangeSlider(\n", + " name=\"Earthquake magnitude [Richter]\", start=0.1, end=10\n", + " )\n", + " depth_slider = pn.widgets.RangeSlider(name=\"Earthquake depth [km]\", start=0.1, end=650)\n", + " date_slider = pn.widgets.DateRangeSlider(\n", + " name=\"Date\", start=df.index[0], end=df.index[-1]\n", + " )\n", + " column_types = pn.widgets.Select(name='Show earthquake magnitude or depth?', options=[\"mag\", \"depth\"])\n", + "\n", + " plot_isobaths = pn.widgets.Select(name='Plot isobaths -200m?', options=['no', 'yes'])\n", + "\n", + "\n", + " @pn.depends(\n", + " magnitude_slider.param.value_start,\n", + " magnitude_slider.param.value_end,\n", + " depth_slider.param.value_start,\n", + " depth_slider.param.value_end,\n", + " date_slider.param.value_start,\n", + " date_slider.param.value_end,\n", + " column_types.param.value,\n", + " plot_isobaths.param.value,\n", + " )\n", + " def plot_earthquake_panel(\n", + " magnitude_start,\n", + " magnitude_end,\n", + " depth_start,\n", + " depth_end,\n", + " date_start,\n", + " date_end,\n", + " column_type,\n", + " plot_isobath\n", + " ):\n", + " \n", + " panel = df[\n", + " (df.mag > magnitude_start)\n", + " & (df.mag < magnitude_end)\n", + " & (df.depth > depth_start)\n", + " & (df.depth < depth_end)\n", + " & (df.index >= pd.Timestamp(date_start))\n", + " & (df.index <= pd.Timestamp(date_end))\n", + " ]\n", + " # inverted fire colormap from colorcet\n", + " cmap = cc.CET_L4[::-1]\n", + " colorbar_labels = {\"mag\": \"Magnitude [Richter]\", \"depth\": \"Earthquake depth [km]\"}\n", + "\n", + " p = panel.hvplot.points(\n", + " x=\"longitude\",\n", + " y=\"latitude\",\n", + " geo=True,\n", + " color=column_type,\n", + " global_extent=True,\n", + " tiles=\"ESRI\",\n", + " # frame_width=900,\n", + " ylabel=\"Latitude [deg]\",\n", + " xlabel=\"Longitude [deg]\",\n", + " cmap=cmap,\n", + " tools=[\"tap\"],\n", + " hover_cols=[\"place\", \"time\"],\n", + " logz=True,\n", + " clim=(1, None),\n", + " clabel=colorbar_labels[column_type],\n", + " )\n", + "\n", + " if plot_isobath=='yes':\n", + " baths = data200.hvplot(geo=True, line_width=2, line_color='white', line_dash='dashed')\n", + " p = p*baths\n", + "\n", + " p.opts(width=1000, height=500, tools=[\"wheel_zoom\"])\n", + "\n", + " return p\n", + "\n", + "\n", + " earthquake_panel = pn.Column(pn.Row(\n", + " pn.Column(\n", + " pn.Row(title_bar, align='start'),\n", + " pn.Row(plot_isobaths, align=\"start\"),\n", + " pn.Row(column_types, align=\"start\"),\n", + " ),\n", + " pn.Column(\n", + " pn.Row(magnitude_slider, align=\"start\"),\n", + " pn.Row(depth_slider, align=\"start\"),\n", + " pn.Row(date_slider, align=\"start\"),\n", + " ), pn.Column()\n", + " ),\n", + " pn.Row(plot_earthquake_panel, align=\"center\"),\n", + " )\n", + "\n", + "\n", " if plot_where == \"inline\":\n", " return earthquake_panel\n", " elif plot_where == \"pop-out\":\n", " earthquake_panel.show()\n", " else:\n", - " print(\"please use either inline or pop-out for the plot_where variable\")\n", + " print(\"please use either inline or pop-out for the plot_where variable\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "79a4214f-f3da-4eed-8f9b-0fe6021ce475", + "metadata": {}, + "outputs": [], + "source": [ + "# Run this cell to produce the plot. Note that adjusting the sliders and dropdown menus \n", + "# might produce a warning. This warning is harmless and can be ignored, which is why the logging line is used\n", "\n", + "# Plotting the isobathymetry at -200m greatly increases the time it takes to generate plots, so only use this functionality when needed\n", "\n", - "show_earthquake(plot_where);" + "logging.getLogger().setLevel(logging.ERROR)\n", + "\n", + "show_earthquakes(plot_where='pop-out')" ] }, { @@ -347,7 +363,6 @@ "q3 = cd.QuestionFactory(questions[\"Q1-3\"]).serve()\n", "q4 = cd.QuestionFactory(questions[\"Q1-4\"]).serve()\n", "\n", - "\n", "pn.Column(q1, q2, q3, q4)" ] }, @@ -461,27 +476,6 @@ "pn.Column(q17, q18, q19)" ] }, - { - "cell_type": "markdown", - "id": "a12d60a7-59dc-4a2f-b7a0-9cd1a3ed5bfa", - "metadata": {}, - "source": [ - "####################################################\n", - "\n", - "TO BE REMOVED\n", - "\n", - "1) How do the earthquake magnitude and earthquake depth relate to the coasts that we see? (Hint: See Figure 2.3 in the textbook and consider how deep under the ground the plates are moving. Extra hint: How do earthquake magnitude and depth differ for convergent and divergent plate boundaries?)\n", - "\n", - "2) Earthquake data support one of the most fundamental processes in the geology: plate tectonics. Although plate tectonics is a relatively slow process that acts on the [geological time scale](https://cdn.britannica.com/67/73167-050-B9A74092/chart.jpg), it has had an enormous impact on the formation of coastlines and determines the broadest features of the coast. What are two important inherited aspects of this process? (Hint: see Figure 2.10 and Sec. 2.3.3 in the textbook.) \n", - "\n", - "3) In 1971 Inman, D. L. & Nordstrom, C. E. used plate tectonics to classify the coast. Explain the classification that they introduced. What are the three different classes that they distinguish? How do they match with the earthquake data as you can explore in the panel? \n", - "\n", - "4) Can you identify or predict areas around the world where you will find the coasts that are distinguished by Inman, D. L. & Nordstrom, C. E.? For instance, what kind of coasts do you have in Chili? And how are they different to the east coast of the USA? And what is characteristic about the East China sea? \n", - "\n", - "5) Inman, D. L. & Nordstrom (1971) further distinguish Afro-trailing-edge coasts and Amero-trailing-edge coasts based on differences in sediment supplies. What is the main cause of these differences in sediment supply? And how do you expect the differences in sediment input to show in the coastal geomorphology?\n", - "####################################################" - ] - }, { "cell_type": "markdown", "id": "bdf2a34d-2edd-43f4-a45b-793a3779ec74", @@ -516,16 +510,33 @@ "coastal_systems = gpd.read_file(coastal_systems_fp)" ] }, + { + "cell_type": "markdown", + "id": "1dff7b26-b616-4add-91c5-1f1bfba2f6ce", + "metadata": {}, + "source": [ + "After running the cell below you will have a panel showing sattelite images of different coastal systems." + ] + }, { "cell_type": "code", "execution_count": null, - "id": "e6732048-50e9-4f28-a553-f4221cccdb95", - "metadata": { - "tags": [] - }, + "id": "0f18a18f-d7dc-4824-bf92-60a827370c38", + "metadata": {}, "outputs": [], "source": [ - "title_bar = pn.Row(\n", + "# The function below is used to generate the plot used in this exercise. You are not required to understand it,\n", + "# but feel free to have a look at it if you're interested in how these kind of panels can be made!\n", + "\n", + "def show_coastal_systems(plot_where):\n", + " \"\"\"\n", + " change value of 'plot_where' to:\n", + " 'inline' if you would like the plot to show in the notebook\n", + " 'pop-out' if you would like the plot to show in a new tab (i.e. seperate window)\n", + " \"\"\"\n", + " \n", + " # Below we build the widget\n", + " title_bar = pn.Row(\n", " pn.pane.Markdown(\n", " \"## Exercise 2: Coastal system characterization\",\n", " styles={\"color\": \"black\"},\n", @@ -534,86 +545,75 @@ " margin=(10, 5, 10, 15),\n", " ),\n", " pn.Spacer(),\n", - ")\n", + " )\n", "\n", - "options = coastal_systems.name.to_list()\n", - "coastal_systems_slider = pn.widgets.Select(\n", - " name=\"Coastal system\", options=options, value=np.random.choice(options)\n", - ")\n", + " options = coastal_systems.name.to_list()\n", + " coastal_systems_slider = pn.widgets.Select(\n", + " name=\"Coastal system\", options=options, value=np.random.choice(options)\n", + " )\n", "\n", + " plot_isobaths = pn.widgets.Select(name='Plot isobaths -200m?', options=['no', 'yes'])\n", "\n", - "@pn.depends(coastal_systems_slider.param.value)\n", - "def plot_coastal_system(name):\n", - " system = coastal_systems.loc[coastal_systems[\"name\"] == name].copy()\n", - " west, south, east, north = system[\n", - " [\"west\", \"south\", \"east\", \"north\"]\n", - " ].values.flatten()\n", - "\n", - " p = system.hvplot.points(\n", - " x=\"lon\",\n", - " y=\"lat\",\n", - " geo=True,\n", - " color=\"red\",\n", - " alpha=0,\n", - " xlim=(west, east),\n", - " ylim=(south, north),\n", - " tiles=\"ESRI\",\n", - " frame_width=1100,\n", - " ylabel=\"Latitude [deg]\",\n", - " xlabel=\"Longitude [deg]\",\n", - " )\n", + " @pn.depends(coastal_systems_slider.param.value, plot_isobaths.param.value)\n", + " def plot_coastal_system(name, plot_isobath):\n", + " system = coastal_systems.loc[coastal_systems[\"name\"] == name].copy()\n", + " west, south, east, north = system[\n", + " [\"west\", \"south\", \"east\", \"north\"]\n", + " ].values.flatten()\n", + "\n", + " p = system.hvplot.points(\n", + " x=\"lon\",\n", + " y=\"lat\",\n", + " geo=True,\n", + " color=\"red\",\n", + " alpha=0,\n", + " xlim=(west, east),\n", + " ylim=(south, north),\n", + " tiles=\"ESRI\",\n", + " frame_width=1100,\n", + " ylabel=\"Latitude [deg]\",\n", + " xlabel=\"Longitude [deg]\",\n", + " )\n", "\n", - " p.opts(frame_width=1000, frame_height=500, tools=[\"pan\", \"wheel_zoom\"])\n", + " if plot_isobath=='yes':\n", + " baths = data200.hvplot(geo=True, line_width=2, line_color='white', line_dash='dashed')\n", + " p = p*baths\n", "\n", - " return p\n", + " p.opts(frame_width=1000, frame_height=500, tools=[\"pan\", \"wheel_zoom\"])\n", "\n", + " return p\n", "\n", - "app = pn.Column(\n", + "\n", + " app = pn.Column(\n", " # title_bar,\n", + " pn.Row(plot_isobaths, align=\"center\"),\n", " pn.Row(coastal_systems_slider, align=\"center\"),\n", " pn.Row(plot_coastal_system, align=\"center\"),\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "1dff7b26-b616-4add-91c5-1f1bfba2f6ce", - "metadata": {}, - "source": [ - "After running the cell below you will have a panel showing sattelite images of different coastal systems." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "8465c61c-5ced-4498-8e7f-7b8f28a73be8", - "metadata": {}, - "outputs": [], - "source": [ - "# change value of 'plot_where' to:\n", - "# 'inline' if you would like the plot to show in the notebook\n", - "# 'pop-out' if you would like the plot to show in a new tab (i.e. seperate window)\n", + " )\n", "\n", - "plot_where = \"pop-out\"" + " if plot_where == \"inline\":\n", + " return app\n", + " elif plot_where == \"pop-out\":\n", + " app.show()\n", + " else:\n", + " print(\"please use either 'inline' or 'pop-out' for the 'plot_where' variable\")" ] }, { "cell_type": "code", "execution_count": null, - "id": "0f18a18f-d7dc-4824-bf92-60a827370c38", + "id": "f2496568-7300-47a4-b7a8-f74314637a0e", "metadata": {}, "outputs": [], "source": [ - "def show_coastal_system(plot_where):\n", - " if plot_where == \"inline\":\n", - " return app\n", - " elif plot_where == \"pop-out\":\n", - " app.show()\n", - " else:\n", - " print(\"please use either 'inline' or 'pop-out' for the 'plot_where' variable\")\n", + "# Run this cell to produce the plot. Note that adjusting the sliders and dropdown menus \n", + "# might produce a warning. This warning is harmless and can be ignored, which is why the logging line is used.\n", "\n", + "# Plotting the isobathymetry at -200m greatly increases the time it takes to generate plots, so only use this functionality when needed\n", "\n", - "show_coastal_system(plot_where);" + "logging.getLogger().setLevel(logging.ERROR)\n", + "\n", + "show_coastal_systems(plot_where=\"pop-out\")" ] }, { @@ -701,53 +701,6 @@ "source": [ "This is the end of the notebook for week 1. We discussed tectonic and process-based classification of coastal systems. For further reading, see chapter 2 of the book. We hope you had fun!" ] - }, - { - "cell_type": "markdown", - "id": "a8171c69-ab76-47d0-8c9e-5aea130944d5", - "metadata": {}, - "source": [ - "############################################################################\n", - "\n", - "TO BE REMOVED\n", - "\n", - "### Explore the coastal systems\n", - "\n", - "While sampling over a range of coastal systems, try to answer the following questions. \n", - "\n", - "1. Find and compare a heavily engineered river-dominated delta and a more natural river-dominated delta\n", - "2. Compare the scale of the biggest and smallest tidal basin in the dataset\n", - "3. Find the estuarine and deltaic systems with a spit\n", - "4. Compare and contrast wave-dominated deltas with high and low sediment supply. How can you tell?\n", - "5. Find a tidal estuary with large fine (muddy) sediment supply, then find one with a large coarse (sandy) sediment supply. How can you tell the difference?\n", - "6. The eastern and western tips of the Dutch and German Wadden Islands are very different beach ridge environments. How might differences in sediment supply explain this? Where is the sediment coming from?\n", - "7. The Dune du Pilat in France is one of the world's largest coastal sand dunes (it is also one of the coolest and you should definitely visit if you get the chance!). Why is it located on the east side of Arcachon Inlet and not the west?\n", - "8. Look at the northern Jiangsu coast in China. What might explain the limited sediment supply in this location?\n", - "9. Find an estuary or tidal bay with extensive intertidal flats. Do you see salt marshes or mangrove forests nearby? Why or why not?\n", - "10. Find an inlet with jetties. How might this affect the way it evolves?\n", - "11. Find a delta/estuary/inlet whose shape is constrained by the presence of rocky coastal features. \n", - "12. The Albufeira Lagoon in Portugal opens and closes seasonally. In the image shown, is it open or closed? When and how might it open or close? \n", - "13. Find examples of heavily urbanized estuaries. How might these human interventions influence the natural processes there?\n", - "14. Based on these satellite images, which is the most beautiful site? Taking a moment to appreciate the beauty of these natural systems is an important part of your job as coastal engineers.\n", - "\n", - "############################################################################" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "eec0ac86-cc6f-4ddb-b1c0-545ff52f08c5", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "ceb791dc-c60d-40b3-bdd7-5da642fe2bed", - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": { @@ -766,7 +719,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.6" + "version": "3.11.7" } }, "nbformat": 4, diff --git a/_sources/notebooks/5_cross_shore_transport.ipynb b/_sources/notebooks/5_cross_shore_transport.ipynb new file mode 100644 index 0000000..baa15bc --- /dev/null +++ b/_sources/notebooks/5_cross_shore_transport.ipynb @@ -0,0 +1,245 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "5e606d71-8522-424c-afed-03c2c0446e4c", + "metadata": {}, + "outputs": [], + "source": [ + "# %run initialize/5_cross_shore_transport.ipynb" + ] + }, + { + "cell_type": "markdown", + "id": "968bd444-a7c0-44f6-8648-60b84b483abf", + "metadata": {}, + "source": [ + "## First import some necessary packages" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7d6f5660-3939-4d95-b5bd-79b770efb391", + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "import pathlib\n", + "\n", + "import colorcet as cc\n", + "import dask.dataframe as dd\n", + "import geopandas as gpd\n", + "import holoviews as hv\n", + "import hvplot.pandas # noqa: API import\n", + "import hvplot.xarray # noqa: API import\n", + "import ipyleaflet\n", + "import numpy as np\n", + "import pandas as pd\n", + "import panel as pn\n", + "\n", + "from random import shuffle, uniform\n", + "\n", + "import IPython\n", + "import ipywidgets as widgets\n", + "import matplotlib.animation as animation\n", + "import matplotlib.pyplot as plt\n", + "from IPython.display import HTML, display\n", + "from ipywidgets import interact\n", + "from matplotlib.animation import FuncAnimation\n", + "from matplotlib.ticker import MultipleLocator" + ] + }, + { + "cell_type": "markdown", + "id": "0ab15b81-15ce-4f27-8f82-04338a14cfb9", + "metadata": {}, + "source": [ + "# (Cross-shore) sediment transport\n", + "Welcome to the notebook of week 5! This notebook covers chapter 6 and 7 from the book. The layout of this notebook is as follows:\n", + "* Modes of sediment transport (Chapter 6)\n", + "* Net transport for secondary flow (Chapter 6)\n", + "* Equillibrium states (Chapter 7)\n", + "* Beach states (Chapter 7)\n", + "\n", + "Each section contains questions for you to practice with (cross-shore) sediment transport. Let's get started!" + ] + }, + { + "cell_type": "markdown", + "id": "af8b08d4-5d25-43a0-95ad-d7d7f3a2a43e", + "metadata": {}, + "source": [ + "## Modes of sediment transport" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "88beaa75-eb1c-4434-bb04-f11ed63c8ec0", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b2dd9273-ff8e-40ef-bcc9-2f4de48f2759", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7371762f-02f9-4a04-99fd-587ba0240f17", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "id": "1eafeac4-9de6-4f01-a6fc-d51021245129", + "metadata": {}, + "source": [ + "## Net transport for secondary flow" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0a4e5d83-e396-47df-a2ed-6a3877ae20ee", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "171ee9a0-1670-4030-8aeb-ffc7a8ed8458", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b6287bf6-fb8a-4811-add6-c8324e6904a2", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "id": "3bd0725c-6883-4bfc-aa8b-e16be9cf3a26", + "metadata": {}, + "source": [ + "## Equillirium states\n", + "This section considers equillibrium shoreline profiles (section 7.2 from the book). As you know, the shoreface is highly dynamic and can be subject to change on both short and long time scales. We often choose to model this as a dynamic equillibrium profile. Different formulations exist for the equillibrium profile, and most are derived at least somewhat empirically." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1a1d96d8-7e73-45f5-82e9-f9aa3ccd2b4c", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5ee9ada3-b3e6-45f9-b0a9-316d626ae603", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1927ab2b-5fe3-4a90-a2ff-9566d2d21d9c", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "515286ae-54d6-4888-aed2-8fa7ff0caa88", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ba2dd74d-4080-4fd9-8397-7e68153356d7", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "id": "736718bc-f613-417e-8fca-fdcb96e72160", + "metadata": {}, + "source": [ + "## Beach states\n", + "This section considers beach states (section 7.3 from the book). We have previously discussed equillibrium profiles of the upper shoreface. However, a beach is hardly in equillibrium. In this section, the focus is on the high variability of the upper shoreface, for instance following an episodic event." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f1adb27b-122a-4b20-b633-926e19cc18d6", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5a84e0bf-ba26-4697-b697-a17fe8fa2cae", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "aa870bbd-72f6-4fe8-8e01-3547d26d0c00", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8dd00ae2-4995-4e7e-9603-f5b3f0e8674a", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.6" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/notebooks/1_coastal_classification.html b/notebooks/1_coastal_classification.html index 70b839e..84ece2a 100644 --- a/notebooks/1_coastal_classification.html +++ b/notebooks/1_coastal_classification.html @@ -391,7 +391,6 @@

Contents

  • The coastal systems data
  • Questions 2a: theory
  • Questions 2b: application
  • -
  • Explore the coastal systems
  • @@ -405,622 +404,30 @@

    Contents

    -
    -
    -
    # %run initialize/1_coastal_classification.ipynb
    -
    -
    -
    -
    -
    -

    Characterization of Coastal Systems#

    -

    Welcome to the first notebook exercise of Coastal Systems (TU Delft, MSc Coastal Engineering)! This is the first year that we will experiment with notebooks in this course. With these notebooks we hope to provide you with interactive course material that helps you better understand the processes and concepts that we teach in this course. Please let us know how you find the notebooks - we appreciate your feedback!

    -

    Chapter 2 of Coastal Dynamics Open Textbook describes the large geographical variation of coasts across the world. It explains how the coasts that we have today are shaped by both present-day processes and processes millions years ago. It distinguishes between three different order of features, which are associated to different orders of of time. In this notebook we will look at coastal systems at these different orders of scale.

    -
    -

    Import libraries that we use for our analysis#

    -

    In the two cells below we import the libraries that we need for the analysis. We also set some path settings to load the data and source code. For example, in the cell below we add the src directory to the system path, which allows us to import generic functions from ../../src/coastpy.

    -
    -
    -
    import pathlib
    -import sys
    -
    -import colorcet as cc
    -import dask.dataframe as dd
    -import geopandas as gpd
    -import numpy as np
    -import panel as pn
    -import holoviews as hv
    -import hvplot.pandas  # noqa: API import
    -from bokeh.models import PanTool, WheelZoomTool
    -
    -print("Packages succesfully loaded")
    -
    -
    -
    -
    -
    Packages succesfully loaded
    -
    -
    -
    -
    -
    -
    -
    # THESE PACKAGES ARE NO LONGER USED
    -
    -# import os
    -
    -# import hvplot.xarray  # noqa: API import
    -# import ipyleaflet
    -# import pandas as pd
    -# import matplotlib.pyplot as plt
    -# import matplotlib.animation as animation
    -# from geoviews import tile_sources as gvts
    -# from ipyleaflet import Map, Marker, ScaleControl, basemaps
    -# import ipywidgets as widgets
    -# from ipywidgets import HTML, interact, fixed, interact_manual, interactive
    -# import IPython
    -# from IPython.display import HTML, display
    -# from random import shuffle, uniform
    -# from coastal_dynamics.geometries import geo_bbox
    -# from PIL import Image
    -
    -
    -
    -
    -
    -
    -
    # Set project directory
    -cwd = pathlib.Path().resolve()
    -proj_dir = cwd.parent  # this is the root of the CoastalCodeBook
    -sys.path.append(str(proj_dir / "src"))
    -
    -DATA_DIR = proj_dir / "data"
    -coastal_systems_fp = DATA_DIR / "01_coastal_systems.gpkg"
    -
    -
    -
    -
    +
    +

    Characterization of Coastal Systems#

    +

    Welcome to the first notebook exercise of Coastal Systems (TU Delft, MSc Coastal Engineering)! This is the first year that we will experiment with notebooks in this course. With these notebooks we hope to provide you with interactive course material that helps you better understand the processes and concepts that we teach in this course. Please let us know how you find the notebooks - we appreciate your feedback!

    +

    Chapter 2 of Coastal Dynamics Open Textbook describes the large geographical variation of coasts across the world. It explains how the coasts that we have today are shaped by both present-day processes and processes millions years ago. It distinguishes between three different order of features, which are associated to different orders of of time. In this notebook we will look at coastal systems at these different orders of scale.

    +
    +

    Import libraries that we use for our analysis#

    +

    In the two cells below we import the libraries that we need for the analysis. We also set some path settings to load the data and source code. For example, in the cell below we add the src directory to the system path, which allows us to import generic functions from ../../src/coastpy.

    -
    # Set up the questions
    -import coastal_dynamics as cd
    -from coastal_dynamics.io import load_questions
    -
    -# Activate the Panel widget to make the visualizations.
    -pn.extension()
    +
    import pathlib
    +import sys
     
    -QUESTION_DIR = pathlib.Path.cwd().parent / "notebooks" / "questions"
    -question_fp = QUESTION_DIR / "1_coastal_classification.json"
    +import colorcet as cc
    +import dask.dataframe as dd
    +import geopandas as gpd
    +import numpy as np
    +import panel as pn
    +import pandas as pd
    +import holoviews as hv
    +import hvplot.pandas  # noqa: API import
    +from bokeh.models import PanTool, WheelZoomTool
    +import pooch
     
    -questions = load_questions(question_fp)
    +import logging
     
    @@ -1033,7 +440,7 @@

    Import libraries that we use for our analysisImport libraries that we use for our analysisImport libraries that we use for our analysis
    ---------------------------------------------------------------------------
    +ModuleNotFoundError                       Traceback (most recent call last)
    +Cell In[1], line 13
    +     11 import hvplot.pandas  # noqa: API import
    +     12 from bokeh.models import PanTool, WheelZoomTool
    +---> 13 import pooch
    +     15 import logging
    +
    +ModuleNotFoundError: No module named 'pooch'
    +
    +

    +
    +
    +
    +
    +
    # Set project directory
    +cwd = pathlib.Path().resolve()
    +proj_dir = cwd.parent  # this is the root of the CoastalCodeBook
    +sys.path.append(str(proj_dir / "src"))
    +
    +DATA_DIR = proj_dir / "data"
    +coastal_systems_fp = DATA_DIR / "01_coastal_systems.gpkg"
    +
    +
    +
    +
    +
    +
    +
    # Set up the questions
    +import coastal_dynamics as cd
    +from coastal_dynamics.io import load_questions
    +
    +# Activate the Panel widget to make the visualizations.
    +pn.extension()
    +
    +QUESTION_DIR = pathlib.Path.cwd().parent / "notebooks" / "questions"
    +question_fp = QUESTION_DIR / "1_coastal_classification.json"
    +
    +questions = load_questions(question_fp)
    +
    +
    +
    @@ -1585,244 +1033,170 @@

    Load the earthquake datadf = df[["mag", "depth", "latitude", "longitude", "place", "type"]][ df["northing"] < WEB_MERCATOR_LIMITS[1] ] -# df.head() - - - -
    -
    ---------------------------------------------------------------------------
    -FileNotFoundError                         Traceback (most recent call last)
    -File ~/micromamba/envs/311-book/lib/python3.11/site-packages/dask/backends.py:141, in CreationDispatch.register_inplace.<locals>.decorator.<locals>.wrapper(*args, **kwargs)
    -    140 try:
    ---> 141     return func(*args, **kwargs)
    -    142 except Exception as e:
    -
    -File ~/micromamba/envs/311-book/lib/python3.11/site-packages/dask/dataframe/io/parquet/core.py:529, in read_parquet(path, columns, filters, categories, index, storage_options, engine, use_nullable_dtypes, dtype_backend, calculate_divisions, ignore_metadata_file, metadata_task_size, split_row_groups, blocksize, aggregate_files, parquet_file_extension, filesystem, **kwargs)
    -    527     blocksize = None
    ---> 529 read_metadata_result = engine.read_metadata(
    -    530     fs,
    -    531     paths,
    -    532     categories=categories,
    -    533     index=index,
    -    534     use_nullable_dtypes=use_nullable_dtypes,
    -    535     dtype_backend=dtype_backend,
    -    536     gather_statistics=calculate_divisions,
    -    537     filters=filters,
    -    538     split_row_groups=split_row_groups,
    -    539     blocksize=blocksize,
    -    540     aggregate_files=aggregate_files,
    -    541     ignore_metadata_file=ignore_metadata_file,
    -    542     metadata_task_size=metadata_task_size,
    -    543     parquet_file_extension=parquet_file_extension,
    -    544     dataset=dataset_options,
    -    545     read=read_options,
    -    546     **other_options,
    -    547 )
    -    549 # In the future, we may want to give the engine the
    -    550 # option to return a dedicated element for `common_kwargs`.
    -    551 # However, to avoid breaking the API, we just embed this
    -    552 # data in the first element of `parts` for now.
    -    553 # The logic below is inteded to handle backward and forward
    -    554 # compatibility with a user-defined engine.
    -
    -File ~/micromamba/envs/311-book/lib/python3.11/site-packages/dask/dataframe/io/parquet/arrow.py:536, in ArrowDatasetEngine.read_metadata(cls, fs, paths, categories, index, use_nullable_dtypes, dtype_backend, gather_statistics, filters, split_row_groups, blocksize, aggregate_files, ignore_metadata_file, metadata_task_size, parquet_file_extension, **kwargs)
    -    535 # Stage 1: Collect general dataset information
    ---> 536 dataset_info = cls._collect_dataset_info(
    -    537     paths,
    -    538     fs,
    -    539     categories,
    -    540     index,
    -    541     gather_statistics,
    -    542     filters,
    -    543     split_row_groups,
    -    544     blocksize,
    -    545     aggregate_files,
    -    546     ignore_metadata_file,
    -    547     metadata_task_size,
    -    548     parquet_file_extension,
    -    549     kwargs,
    -    550 )
    -    552 # Stage 2: Generate output `meta`
    -
    -File ~/micromamba/envs/311-book/lib/python3.11/site-packages/dask/dataframe/io/parquet/arrow.py:1051, in ArrowDatasetEngine._collect_dataset_info(cls, paths, fs, categories, index, gather_statistics, filters, split_row_groups, blocksize, aggregate_files, ignore_metadata_file, metadata_task_size, parquet_file_extension, kwargs)
    -   1050 if ds is None:
    --> 1051     ds = pa_ds.dataset(
    -   1052         paths,
    -   1053         filesystem=_wrapped_fs(fs),
    -   1054         **_processed_dataset_kwargs,
    -   1055     )
    -   1057 # Get file_frag sample and extract physical_schema
    -
    -File ~/micromamba/envs/311-book/lib/python3.11/site-packages/pyarrow/dataset.py:785, in dataset(source, schema, format, filesystem, partitioning, partition_base_dir, exclude_invalid_files, ignore_prefixes)
    -    784 if all(_is_path_like(elem) for elem in source):
    ---> 785     return _filesystem_dataset(source, **kwargs)
    -    786 elif all(isinstance(elem, Dataset) for elem in source):
    -
    -File ~/micromamba/envs/311-book/lib/python3.11/site-packages/pyarrow/dataset.py:463, in _filesystem_dataset(source, schema, filesystem, partitioning, format, partition_base_dir, exclude_invalid_files, selector_ignore_prefixes)
    -    462 if isinstance(source, (list, tuple)):
    ---> 463     fs, paths_or_selector = _ensure_multiple_sources(source, filesystem)
    -    464 else:
    -
    -File ~/micromamba/envs/311-book/lib/python3.11/site-packages/pyarrow/dataset.py:382, in _ensure_multiple_sources(paths, filesystem)
    -    381 elif file_type == FileType.NotFound:
    ---> 382     raise FileNotFoundError(info.path)
    -    383 elif file_type == FileType.Directory:
    -
    -FileNotFoundError: /home/runner/work/CoastalCodebook/CoastalCodebook/book/data/01_earthquakes_sample.parquet
    -
    -The above exception was the direct cause of the following exception:
    -
    -FileNotFoundError                         Traceback (most recent call last)
    -Cell In[6], line 7
    -      1 WEB_MERCATOR_LIMITS = (
    -      2     -20037508.342789244,
    -      3     20037508.342789244,
    -      4 )  # max polar latitudes that can be handled in World Mercator
    -      6 df = (
    -----> 7     dd.read_parquet(DATA_DIR / "01_earthquakes_sample.parquet")
    -      8     .sample(
    -      9         frac=0.1
    -     10     )  # uncomment this line if loading the data takes too long on your computer
    -     11     .set_index("time")
    -     12     .compute()
    -     13     .tz_localize(None)
    -     14     .sort_index()
    -     15 )
    -     18 # To save memory we drop most of the columns. Also we drop the polar latitudes that cannot be displayed in the web mercator projection.
    -     19 df = df[["mag", "depth", "latitude", "longitude", "place", "type"]][
    -     20     df["northing"] < WEB_MERCATOR_LIMITS[1]
    -     21 ]
    -
    -File ~/micromamba/envs/311-book/lib/python3.11/site-packages/dask/backends.py:143, in CreationDispatch.register_inplace.<locals>.decorator.<locals>.wrapper(*args, **kwargs)
    -    141     return func(*args, **kwargs)
    -    142 except Exception as e:
    ---> 143     raise type(e)(
    -    144         f"An error occurred while calling the {funcname(func)} "
    -    145         f"method registered to the {self.backend} backend.\n"
    -    146         f"Original Message: {e}"
    -    147     ) from e
    -
    -FileNotFoundError: An error occurred while calling the read_parquet method registered to the pandas backend.
    -Original Message: /home/runner/work/CoastalCodebook/CoastalCodebook/book/data/01_earthquakes_sample.parquet
     
    -

    -
    -

    Visualization of the earthquake data#

    -

    To explore the data we use visualization tools from the Holoviz project that makes high-level tools to simplify visualization in Python. In the next cell we enable the interactive mode on the data dataframe, create widgets to explore the data and filter the dataframe accordingly. To explore the eartquake data we create an overlay of the eartquakes on a tileset of ESRI Imagery. Please note that the code in the next cell will only do the computations and store the result in an object called panel. To actually see the results you have to run one more cell; the one that calls this object panel.

    +

    We also get the bathymetric contours for a water depth of -200m, which we will use as a proxy to find the boundary of the continental shelf.

    -
    # title_bar =  pn.pane.Markdown(
    -#         "##Exercise 1: Plate tectonics & first-order coastal features",
    -#         styles={"color": "black"},
    -#         width=800,
    -#         align='center'
    -#         # margin=(10, 5, 10, 15),
    -#     )
    -
    -# define widgets that can be used to index the data
    -magnitude_slider = pn.widgets.RangeSlider(
    -    name="Earthquake magnitude [Richter]", start=0.1, end=10
    -)
    -depth_slider = pn.widgets.RangeSlider(name="Earthquake depth [km]", start=0.1, end=650)
    -date_slider = pn.widgets.DateRangeSlider(
    -    name="Date", start=df.index[0], end=df.index[-1]
    +
    isobath_fp = pooch.retrieve(
    +    "https://coclico.blob.core.windows.net/coastal-dynamics/1_coastal_classification/isobaths200.gpkg",
    +    known_hash="2b25adb7d3923e3969f6fb0c1f53e5e5850acd3bf6a3468722f0a1434a395ae5",
     )
    -column_types = pn.widgets.Select(options=["mag", "depth"])
     
    -
    -@pn.depends(
    -    magnitude_slider.param.value_start,
    -    magnitude_slider.param.value_end,
    -    depth_slider.param.value_start,
    -    depth_slider.param.value_end,
    -    date_slider.param.value_start,
    -    date_slider.param.value_end,
    -    column_types.param.value,
    -)
    -def plot_earthquake_panel(
    -    magnitude_start,
    -    magnitude_end,
    -    depth_start,
    -    depth_end,
    -    date_start,
    -    date_end,
    -    column_type,
    -):
    -    panel = df[
    -        (df.mag > magnitude_start)
    -        & (df.mag < magnitude_end)
    -        & (df.depth > depth_start)
    -        & (df.depth < depth_end)
    -        & (df.index >= date_start)
    -        & (df.index <= date_end)
    -    ]
    -    # inverted fire colormap from colorcet
    -    cmap = cc.CET_L4[::-1]
    -    colorbar_labels = {"mag": "Magnitude [Richter]", "depth": "Earthquake depth [km]"}
    -
    -    p = panel.hvplot.points(
    -        x="longitude",
    -        y="latitude",
    -        geo=True,
    -        color=column_type,
    -        global_extent=True,
    -        tiles="ESRI",
    -        # frame_width=900,
    -        ylabel="Latitude [deg]",
    -        xlabel="Longitude [deg]",
    -        cmap=cmap,
    -        tools=["tap"],
    -        hover_cols=["place", "time"],
    -        logz=True,
    -        clim=(1, None),
    -        clabel=colorbar_labels[column_type],
    -    )
    -
    -    p.opts(width=1000, height=500, tools=["wheel_zoom"])
    -
    -    return p
    -
    -
    -earthquake_panel = pn.Column(
    -    # pn.Row(title_bar, align='center'),
    -    pn.Row(column_types, align="center"),
    -    pn.Row(magnitude_slider, align="center"),
    -    pn.Row(depth_slider, align="center"),
    -    pn.Row(date_slider, align="center"),
    -    pn.Row(plot_earthquake_panel, align="center"),
    -)
    +data200 = gpd.read_file(isobath_fp)
     
    -
    # change value of 'plot_where' to:
    -# 'inline' if you would like the plot to show in the notebook
    -# 'pop-out' if you would like the plot to show in a new tab (i.e. seperate window)
    +
    # To make plotting a bit faster, we only use isobaths longer than a certain length. We therefore first
    +# have to project to a new coordinate system to get the lengths of the isobaths, and use that as a
    +# mask to select the isobaths we want to keep
     
    -plot_where = "pop-out"
    +data200['length'] = data200.to_crs('EPSG:3857').geometry.length
    +data200 = data200[data200['length']>5*10**6]
     
    -

    If the visualization is too slow, please follow the instructions in loading the data for taking a sample.

    +
    +
    +

    Visualization of the earthquake data#

    +

    To explore the data we use visualization tools from the Holoviz project that makes high-level tools to simplify visualization in Python. Run the cell eblow

    +

    If the visualization is too slow, please adjust the sliders such that less data is shown.

    After running the cell below you will have a panel with several widgets to index the eartquake data; by magnitude, depth and time, while the colors on the map show either the magintude or the depth of the earthquakes.

    -
    def show_earthquake(plot_where):
    +
    # The function below is used to generate the plot used in this exercise. You are not required to understand it,
    +# but feel free to have a look at it if you're interested in how these kind of panels can be made!
    +
    +def show_earthquakes(plot_where):
    +    """
    +    change value of 'plot_where' to:
    +    'inline' if you would like the plot to show in the notebook
    +    'pop-out' if you would like the plot to show in a new tab (i.e. seperate window)
    +    """
    +
    +    #Below we build the earthquake widget
    +    title_bar =  pn.pane.Markdown(
    +        "## Part 1: Tectonic classification",
    +        styles={"color": "black"},
    +        width=400,
    +        # margin=(10, 5, 10, 15),
    +    )
    +
    +    # define widgets that can be used to index the data
    +    magnitude_slider = pn.widgets.RangeSlider(
    +        name="Earthquake magnitude [Richter]", start=0.1, end=10
    +    )
    +    depth_slider = pn.widgets.RangeSlider(name="Earthquake depth [km]", start=0.1, end=650)
    +    date_slider = pn.widgets.DateRangeSlider(
    +        name="Date", start=df.index[0], end=df.index[-1]
    +    )
    +    column_types = pn.widgets.Select(name='Show earthquake magnitude or depth?', options=["mag", "depth"])
    +
    +    plot_isobaths = pn.widgets.Select(name='Plot isobaths -200m?', options=['no', 'yes'])
    +
    +
    +    @pn.depends(
    +        magnitude_slider.param.value_start,
    +        magnitude_slider.param.value_end,
    +        depth_slider.param.value_start,
    +        depth_slider.param.value_end,
    +        date_slider.param.value_start,
    +        date_slider.param.value_end,
    +        column_types.param.value,
    +        plot_isobaths.param.value,
    +    )
    +    def plot_earthquake_panel(
    +        magnitude_start,
    +        magnitude_end,
    +        depth_start,
    +        depth_end,
    +        date_start,
    +        date_end,
    +        column_type,
    +        plot_isobath
    +    ):
    +        
    +        panel = df[
    +            (df.mag > magnitude_start)
    +            & (df.mag < magnitude_end)
    +            & (df.depth > depth_start)
    +            & (df.depth < depth_end)
    +            & (df.index >= pd.Timestamp(date_start))
    +            & (df.index <= pd.Timestamp(date_end))
    +        ]
    +        # inverted fire colormap from colorcet
    +        cmap = cc.CET_L4[::-1]
    +        colorbar_labels = {"mag": "Magnitude [Richter]", "depth": "Earthquake depth [km]"}
    +
    +        p = panel.hvplot.points(
    +            x="longitude",
    +            y="latitude",
    +            geo=True,
    +            color=column_type,
    +            global_extent=True,
    +            tiles="ESRI",
    +            # frame_width=900,
    +            ylabel="Latitude [deg]",
    +            xlabel="Longitude [deg]",
    +            cmap=cmap,
    +            tools=["tap"],
    +            hover_cols=["place", "time"],
    +            logz=True,
    +            clim=(1, None),
    +            clabel=colorbar_labels[column_type],
    +        )
    +
    +        if plot_isobath=='yes':
    +            baths = data200.hvplot(geo=True, line_width=2, line_color='white', line_dash='dashed')
    +            p = p*baths
    +
    +        p.opts(width=1000, height=500, tools=["wheel_zoom"])
    +
    +        return p
    +
    +
    +    earthquake_panel = pn.Column(pn.Row(
    +        pn.Column(
    +        pn.Row(title_bar, align='start'),
    +        pn.Row(plot_isobaths, align="start"),
    +        pn.Row(column_types, align="start"),
    +        ),
    +        pn.Column(
    +        pn.Row(magnitude_slider, align="start"),
    +        pn.Row(depth_slider, align="start"),
    +        pn.Row(date_slider, align="start"),
    +        ), pn.Column()
    +        ),
    +        pn.Row(plot_earthquake_panel, align="center"),
    +    )
    +
    +
         if plot_where == "inline":
             return earthquake_panel
         elif plot_where == "pop-out":
             earthquake_panel.show()
         else:
             print("please use either inline or pop-out for the plot_where variable")
    +
    +
    +
    +
    +
    +
    +
    # Run this cell to produce the plot. Note that adjusting the sliders and dropdown menus 
    +# might produce a warning. This warning is harmless and can be ignored, which is why the logging line is used
     
    +# Plotting the isobathymetry at -200m greatly increases the time it takes to generate plots, so only use this functionality when needed
     
    -show_earthquake(plot_where);
    +logging.getLogger().setLevel(logging.ERROR)
    +
    +show_earthquakes(plot_where='pop-out')
     
    @@ -1838,7 +1212,6 @@

    Explore the earthquake data & questionsq3 = cd.QuestionFactory(questions["Q1-3"]).serve() q4 = cd.QuestionFactory(questions["Q1-4"]).serve() - pn.Column(q1, q2, q3, q4)

    @@ -1904,16 +1277,6 @@

    Explore the earthquake data & questions -
  • How do the earthquake magnitude and earthquake depth relate to the coasts that we see? (Hint: See Figure 2.3 in the textbook and consider how deep under the ground the plates are moving. Extra hint: How do earthquake magnitude and depth differ for convergent and divergent plate boundaries?)

  • -
  • Earthquake data support one of the most fundamental processes in the geology: plate tectonics. Although plate tectonics is a relatively slow process that acts on the geological time scale, it has had an enormous impact on the formation of coastlines and determines the broadest features of the coast. What are two important inherited aspects of this process? (Hint: see Figure 2.10 and Sec. 2.3.3 in the textbook.)

  • -
  • In 1971 Inman, D. L. & Nordstrom, C. E. used plate tectonics to classify the coast. Explain the classification that they introduced. What are the three different classes that they distinguish? How do they match with the earthquake data as you can explore in the panel?

  • -
  • Can you identify or predict areas around the world where you will find the coasts that are distinguished by Inman, D. L. & Nordstrom, C. E.? For instance, what kind of coasts do you have in Chili? And how are they different to the east coast of the USA? And what is characteristic about the East China sea?

  • -
  • Inman, D. L. & Nordstrom (1971) further distinguish Afro-trailing-edge coasts and Amero-trailing-edge coasts based on differences in sediment supplies. What is the main cause of these differences in sediment supply? And how do you expect the differences in sediment input to show in the coastal geomorphology? -####################################################

  • -

    @@ -1930,9 +1293,21 @@

    The coastal systems data
    -
    title_bar = pn.Row(
    +
    # The function below is used to generate the plot used in this exercise. You are not required to understand it,
    +# but feel free to have a look at it if you're interested in how these kind of panels can be made!
    +
    +def show_coastal_systems(plot_where):
    +    """
    +    change value of 'plot_where' to:
    +    'inline' if you would like the plot to show in the notebook
    +    'pop-out' if you would like the plot to show in a new tab (i.e. seperate window)
    +    """
    +    
    +    # Below we build the widget
    +    title_bar = pn.Row(
         pn.pane.Markdown(
             "## Exercise 2: Coastal system characterization",
             styles={"color": "black"},
    @@ -1941,73 +1316,72 @@ 

    The coastal systems datamargin=(10, 5, 10, 15), ), pn.Spacer(), -) + ) -options = coastal_systems.name.to_list() -coastal_systems_slider = pn.widgets.Select( - name="Coastal system", options=options, value=np.random.choice(options) -) + options = coastal_systems.name.to_list() + coastal_systems_slider = pn.widgets.Select( + name="Coastal system", options=options, value=np.random.choice(options) + ) + plot_isobaths = pn.widgets.Select(name='Plot isobaths -200m?', options=['no', 'yes']) -@pn.depends(coastal_systems_slider.param.value) -def plot_coastal_system(name): - system = coastal_systems.loc[coastal_systems["name"] == name].copy() - west, south, east, north = system[ - ["west", "south", "east", "north"] - ].values.flatten() - - p = system.hvplot.points( - x="lon", - y="lat", - geo=True, - color="red", - alpha=0, - xlim=(west, east), - ylim=(south, north), - tiles="ESRI", - frame_width=1100, - ylabel="Latitude [deg]", - xlabel="Longitude [deg]", - ) + @pn.depends(coastal_systems_slider.param.value, plot_isobaths.param.value) + def plot_coastal_system(name, plot_isobath): + system = coastal_systems.loc[coastal_systems["name"] == name].copy() + west, south, east, north = system[ + ["west", "south", "east", "north"] + ].values.flatten() + + p = system.hvplot.points( + x="lon", + y="lat", + geo=True, + color="red", + alpha=0, + xlim=(west, east), + ylim=(south, north), + tiles="ESRI", + frame_width=1100, + ylabel="Latitude [deg]", + xlabel="Longitude [deg]", + ) - p.opts(frame_width=1000, frame_height=500, tools=["pan", "wheel_zoom"]) + if plot_isobath=='yes': + baths = data200.hvplot(geo=True, line_width=2, line_color='white', line_dash='dashed') + p = p*baths - return p + p.opts(frame_width=1000, frame_height=500, tools=["pan", "wheel_zoom"]) + return p -app = pn.Column( + + app = pn.Column( # title_bar, + pn.Row(plot_isobaths, align="center"), pn.Row(coastal_systems_slider, align="center"), pn.Row(plot_coastal_system, align="center"), -) -

    -
    -
    -
    -

    After running the cell below you will have a panel showing sattelite images of different coastal systems.

    -
    -
    -
    # change value of 'plot_where' to:
    -# 'inline' if you would like the plot to show in the notebook
    -# 'pop-out' if you would like the plot to show in a new tab (i.e. seperate window)
    +    )
     
    -plot_where = "pop-out"
    -
    -
    -
    -
    -
    -
    -
    def show_coastal_system(plot_where):
         if plot_where == "inline":
             return app
         elif plot_where == "pop-out":
             app.show()
         else:
             print("please use either 'inline' or 'pop-out' for the 'plot_where' variable")
    +
    +
    +
    +
    +
    +
    +
    # Run this cell to produce the plot. Note that adjusting the sliders and dropdown menus 
    +# might produce a warning. This warning is harmless and can be ignored, which is why the logging line is used.
     
    +# Plotting the isobathymetry at -200m greatly increases the time it takes to generate plots, so only use this functionality when needed
     
    -show_coastal_system(plot_where);
    +logging.getLogger().setLevel(logging.ERROR)
    +
    +show_coastal_systems(plot_where="pop-out")
     
    @@ -2068,29 +1442,6 @@

    Questions 2b: application -

    Explore the coastal systems#

    -

    While sampling over a range of coastal systems, try to answer the following questions.

    -
      -
    1. Find and compare a heavily engineered river-dominated delta and a more natural river-dominated delta

    2. -
    3. Compare the scale of the biggest and smallest tidal basin in the dataset

    4. -
    5. Find the estuarine and deltaic systems with a spit

    6. -
    7. Compare and contrast wave-dominated deltas with high and low sediment supply. How can you tell?

    8. -
    9. Find a tidal estuary with large fine (muddy) sediment supply, then find one with a large coarse (sandy) sediment supply. How can you tell the difference?

    10. -
    11. The eastern and western tips of the Dutch and German Wadden Islands are very different beach ridge environments. How might differences in sediment supply explain this? Where is the sediment coming from?

    12. -
    13. The Dune du Pilat in France is one of the world’s largest coastal sand dunes (it is also one of the coolest and you should definitely visit if you get the chance!). Why is it located on the east side of Arcachon Inlet and not the west?

    14. -
    15. Look at the northern Jiangsu coast in China. What might explain the limited sediment supply in this location?

    16. -
    17. Find an estuary or tidal bay with extensive intertidal flats. Do you see salt marshes or mangrove forests nearby? Why or why not?

    18. -
    19. Find an inlet with jetties. How might this affect the way it evolves?

    20. -
    21. Find a delta/estuary/inlet whose shape is constrained by the presence of rocky coastal features.

    22. -
    23. The Albufeira Lagoon in Portugal opens and closes seasonally. In the image shown, is it open or closed? When and how might it open or close?

    24. -
    25. Find examples of heavily urbanized estuaries. How might these human interventions influence the natural processes there?

    26. -
    27. Based on these satellite images, which is the most beautiful site? Taking a moment to appreciate the beauty of these natural systems is an important part of your job as coastal engineers.

    28. -
    -

    ############################################################################

    @@ -2161,7 +1512,6 @@

    Explore the coastal systemsThe coastal systems data
  • Questions 2a: theory
  • Questions 2b: application
  • -
  • Explore the coastal systems
  • diff --git a/notebooks/5_cross_shore_transport.html b/notebooks/5_cross_shore_transport.html new file mode 100644 index 0000000..8cddb47 --- /dev/null +++ b/notebooks/5_cross_shore_transport.html @@ -0,0 +1,1109 @@ + + + + + + + + + + + + First import some necessary packages — CoastalCodeBook + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + + + + + + + + + + +
    +
    +
    +
    +
    + + + + +
    +
    + + + + + + + +
    + + + +
    + +
    +
    + +
    +
    + +
    + +
    + +
    + + +
    + +
    + +
    + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + +
    + +
    +
    + + + + + + + + +
    + +
    +
    +
    # %run initialize/5_cross_shore_transport.ipynb
    +
    +
    +
    +
    +
    +

    First import some necessary packages#

    +
    +
    +
    import os
    +import pathlib
    +
    +import colorcet as cc
    +import dask.dataframe as dd
    +import geopandas as gpd
    +import holoviews as hv
    +import hvplot.pandas  # noqa: API import
    +import hvplot.xarray  # noqa: API import
    +import ipyleaflet
    +import numpy as np
    +import pandas as pd
    +import panel as pn
    +
    +from random import shuffle, uniform
    +
    +import IPython
    +import ipywidgets as widgets
    +import matplotlib.animation as animation
    +import matplotlib.pyplot as plt
    +from IPython.display import HTML, display
    +from ipywidgets import interact
    +from matplotlib.animation import FuncAnimation
    +from matplotlib.ticker import MultipleLocator
    +
    +
    +
    +
    +
    +
    +
    +
    +

    (Cross-shore) sediment transport#

    +

    Welcome to the notebook of week 5! This notebook covers chapter 6 and 7 from the book. The layout of this notebook is as follows:

    +
      +
    • Modes of sediment transport (Chapter 6)

    • +
    • Net transport for secondary flow (Chapter 6)

    • +
    • Equillibrium states (Chapter 7)

    • +
    • Beach states (Chapter 7)

    • +
    +

    Each section contains questions for you to practice with (cross-shore) sediment transport. Let’s get started!

    +
    +

    Modes of sediment transport#

    +
    +
    +

    Net transport for secondary flow#

    +
    +
    +

    Equillirium states#

    +

    This section considers equillibrium shoreline profiles (section 7.2 from the book). As you know, the shoreface is highly dynamic and can be subject to change on both short and long time scales. We often choose to model this as a dynamic equillibrium profile. Different formulations exist for the equillibrium profile, and most are derived at least somewhat empirically.

    +
    +
    +

    Beach states#

    +

    This section considers beach states (section 7.3 from the book). We have previously discussed equillibrium profiles of the upper shoreface. However, a beach is hardly in equillibrium. In this section, the focus is on the high variability of the upper shoreface, for instance following an episodic event.

    +
    +
    + + + + +
    + + + + + + +
    + +
    +
    +
    + +
    + + + + + + +
    +
    + + +
    + + +
    +
    +
    + + + + + +
    +
    + + \ No newline at end of file diff --git a/objects.inv b/objects.inv index c790dabaf88f16deccab3fc7f203171b613af98f..5d288b7e4c1327da903bca66d8083fa0271f146b 100644 GIT binary patch delta 365 zcmV-z0h0c)1K2qiz%%baSh5HQKz5 zkYqvZ!~e#^Tz@G=@iR7_I$A=r5}a`wzI&yW1=vnIphIH#%zyfqj01WgGCsg6tU)k_ z1W7ErBn<`PDXUR0B%IsTP>2@0(36Ff<&b+Ztv9)fhPkqm$Q+H5(@EWCr{*i^rDE{r zCwk!V^4vp93^k3CA9LlAD5nZNM L8q9tHj+1vDAz-pW delta 318 zcmV-E0m1&@1F-{;e1H8bGQv=*u51+pVn9MF5@Y4q*TkyhL%uUmemzdkDQQxvixq#b zd&IiN#A|Ji0lz)i2092#6|sG^XsXqw(pMD^EqA`!&QK*{7dWC#h8>1w)RdK&mkClL zh-1E|2uuH&67v(LNWH9}s)68wg`dIcbq&7vj_6o1f2yIAX@7wM5XA?0gB?_ixq>Fv zeI*Mu;!$;IuT=Ovtwus3Uh_;IO09rAm}$M)8wuPSuZi5j89m?BZFL#G)lnOU=zb*w z59fAwCt~PGP5Qdg@Y{*ooN>`NfsUrVGzuo3o$znE0hK79YlC@_ryMr6dCL?a3+wXnd QzL`LR-Y-G*2SCP1n@_!(v;Y7A diff --git a/reports/notebooks/1_coastal_classification.err.log b/reports/notebooks/1_coastal_classification.err.log index 011c532..f34034a 100644 --- a/reports/notebooks/1_coastal_classification.err.log +++ b/reports/notebooks/1_coastal_classification.err.log @@ -18,143 +18,31 @@ Traceback (most recent call last): raise CellExecutionError.from_cell_and_msg(cell, exec_reply_content) nbclient.exceptions.CellExecutionError: An error occurred while executing the following cell: ------------------ -WEB_MERCATOR_LIMITS = ( - -20037508.342789244, - 20037508.342789244, -) # max polar latitudes that can be handled in World Mercator - -df = ( - dd.read_parquet(DATA_DIR / "01_earthquakes_sample.parquet") - .sample( - frac=0.1 - ) # uncomment this line if loading the data takes too long on your computer - .set_index("time") - .compute() - .tz_localize(None) - .sort_index() -) - - -# To save memory we drop most of the columns. Also we drop the polar latitudes that cannot be displayed in the web mercator projection. -df = df[["mag", "depth", "latitude", "longitude", "place", "type"]][ - df["northing"] < WEB_MERCATOR_LIMITS[1] -] -# df.head() +import pathlib +import sys + +import colorcet as cc +import dask.dataframe as dd +import geopandas as gpd +import numpy as np +import panel as pn +import pandas as pd +import holoviews as hv +import hvplot.pandas # noqa: API import +from bokeh.models import PanTool, WheelZoomTool +import pooch + +import logging ------------------ --------------------------------------------------------------------------- -FileNotFoundError Traceback (most recent call last) -File ~/micromamba/envs/311-book/lib/python3.11/site-packages/dask/backends.py:141, in CreationDispatch.register_inplace..decorator..wrapper(*args, **kwargs) - 140 try: ---> 141 return func(*args, **kwargs) - 142 except Exception as e: - -File ~/micromamba/envs/311-book/lib/python3.11/site-packages/dask/dataframe/io/parquet/core.py:529, in read_parquet(path, columns, filters, categories, index, storage_options, engine, use_nullable_dtypes, dtype_backend, calculate_divisions, ignore_metadata_file, metadata_task_size, split_row_groups, blocksize, aggregate_files, parquet_file_extension, filesystem, **kwargs) - 527 blocksize = None ---> 529 read_metadata_result = engine.read_metadata( - 530  fs, - 531  paths, - 532  categories=categories, - 533  index=index, - 534  use_nullable_dtypes=use_nullable_dtypes, - 535  dtype_backend=dtype_backend, - 536  gather_statistics=calculate_divisions, - 537  filters=filters, - 538  split_row_groups=split_row_groups, - 539  blocksize=blocksize, - 540  aggregate_files=aggregate_files, - 541  ignore_metadata_file=ignore_metadata_file, - 542  metadata_task_size=metadata_task_size, - 543  parquet_file_extension=parquet_file_extension, - 544  dataset=dataset_options, - 545  read=read_options, - 546  **other_options, - 547 ) - 549 # In the future, we may want to give the engine the - 550 # option to return a dedicated element for `common_kwargs`. - 551 # However, to avoid breaking the API, we just embed this - 552 # data in the first element of `parts` for now. - 553 # The logic below is inteded to handle backward and forward - 554 # compatibility with a user-defined engine. - -File ~/micromamba/envs/311-book/lib/python3.11/site-packages/dask/dataframe/io/parquet/arrow.py:536, in ArrowDatasetEngine.read_metadata(cls, fs, paths, categories, index, use_nullable_dtypes, dtype_backend, gather_statistics, filters, split_row_groups, blocksize, aggregate_files, ignore_metadata_file, metadata_task_size, parquet_file_extension, **kwargs) - 535 # Stage 1: Collect general dataset information ---> 536 dataset_info = cls._collect_dataset_info( - 537  paths, - 538  fs, - 539  categories, - 540  index, - 541  gather_statistics, - 542  filters, - 543  split_row_groups, - 544  blocksize, - 545  aggregate_files, - 546  ignore_metadata_file, - 547  metadata_task_size, - 548  parquet_file_extension, - 549  kwargs, - 550 ) - 552 # Stage 2: Generate output `meta` - -File ~/micromamba/envs/311-book/lib/python3.11/site-packages/dask/dataframe/io/parquet/arrow.py:1051, in ArrowDatasetEngine._collect_dataset_info(cls, paths, fs, categories, index, gather_statistics, filters, split_row_groups, blocksize, aggregate_files, ignore_metadata_file, metadata_task_size, parquet_file_extension, kwargs) - 1050 if ds is None: --> 1051 ds = pa_ds.dataset( - 1052  paths, - 1053  filesystem=_wrapped_fs(fs), - 1054  **_processed_dataset_kwargs, - 1055  ) - 1057 # Get file_frag sample and extract physical_schema - -File ~/micromamba/envs/311-book/lib/python3.11/site-packages/pyarrow/dataset.py:785, in dataset(source, schema, format, filesystem, partitioning, partition_base_dir, exclude_invalid_files, ignore_prefixes) - 784 if all(_is_path_like(elem) for elem in source): ---> 785 return _filesystem_dataset(source, **kwargs) - 786 elif all(isinstance(elem, Dataset) for elem in source): - -File ~/micromamba/envs/311-book/lib/python3.11/site-packages/pyarrow/dataset.py:463, in _filesystem_dataset(source, schema, filesystem, partitioning, format, partition_base_dir, exclude_invalid_files, selector_ignore_prefixes) - 462 if isinstance(source, (list, tuple)): ---> 463 fs, paths_or_selector = _ensure_multiple_sources(source, filesystem) - 464 else: - -File ~/micromamba/envs/311-book/lib/python3.11/site-packages/pyarrow/dataset.py:382, in _ensure_multiple_sources(paths, filesystem) - 381 elif file_type == FileType.NotFound: ---> 382 raise FileNotFoundError(info.path) - 383 elif file_type == FileType.Directory: - -FileNotFoundError: /home/runner/work/CoastalCodebook/CoastalCodebook/book/data/01_earthquakes_sample.parquet - -The above exception was the direct cause of the following exception: - -FileNotFoundError Traceback (most recent call last) -Cell In[6], line 7 - 1 WEB_MERCATOR_LIMITS = ( - 2 -20037508.342789244, - 3 20037508.342789244, - 4 ) # max polar latitudes that can be handled in World Mercator - 6 df = ( -----> 7 dd.read_parquet(DATA_DIR / "01_earthquakes_sample.parquet") - 8 .sample( - 9 frac=0.1 - 10 ) # uncomment this line if loading the data takes too long on your computer - 11 .set_index("time") - 12 .compute() - 13 .tz_localize(None) - 14 .sort_index() - 15 ) - 18 # To save memory we drop most of the columns. Also we drop the polar latitudes that cannot be displayed in the web mercator projection. - 19 df = df[["mag", "depth", "latitude", "longitude", "place", "type"]][ - 20 df["northing"] < WEB_MERCATOR_LIMITS[1] - 21 ] - -File ~/micromamba/envs/311-book/lib/python3.11/site-packages/dask/backends.py:143, in CreationDispatch.register_inplace..decorator..wrapper(*args, **kwargs) - 141 return func(*args, **kwargs) - 142 except Exception as e: ---> 143 raise type(e)( - 144 f"An error occurred while calling the {funcname(func)} " - 145 f"method registered to the {self.backend} backend.\n" - 146 f"Original Message: {e}" - 147 ) from e - -FileNotFoundError: An error occurred while calling the read_parquet method registered to the pandas backend. -Original Message: /home/runner/work/CoastalCodebook/CoastalCodebook/book/data/01_earthquakes_sample.parquet +ModuleNotFoundError Traceback (most recent call last) +Cell In[1], line 13 + 11 import hvplot.pandas # noqa: API import + 12 from bokeh.models import PanTool, WheelZoomTool +---> 13 import pooch + 15 import logging + +ModuleNotFoundError: No module named 'pooch' diff --git a/searchindex.js b/searchindex.js index 8b345f4..dc45f5d 100644 --- a/searchindex.js +++ b/searchindex.js @@ -1 +1 @@ -Search.setIndex({"docnames": ["about/acknowledgements", "about/introduction", "about/structure", "intro", "notebooks/1_coastal_classification", "usage/contributing", "usage/getting_started", "usage/installation", "usage/troubleshooting"], "filenames": ["about/acknowledgements.md", "about/introduction.md", "about/structure.md", "intro.md", "notebooks/1_coastal_classification.ipynb", "usage/contributing.md", "usage/getting_started.md", "usage/installation.md", "usage/troubleshooting.md"], "titles": ["Acknowledgements", "Introduction", "Course structure", "Welcome to Coastal Codebook", "Characterization of Coastal Systems", "Contributing (Advanced)", "Getting started", "Installation", "Troubleshooting"], "terms": {"welcom": [0, 4], "first": [4, 7, 8], "notebook": [4, 7, 8], "tu": 4, "delft": 4, "msc": [3, 4], "engin": [3, 4], "thi": [0, 1, 2, 4, 5, 6, 7, 8], "year": 4, "experi": 4, "cours": [3, 4, 8], "With": 4, "hope": 4, "provid": [3, 4, 7, 8], "you": [0, 4, 5, 6, 7, 8], "interact": [4, 6, 7], "materi": [4, 8], "help": [4, 8], "better": 4, "understand": [4, 7], "concept": 4, "teach": 4, "pleas": [4, 7, 8], "let": 4, "know": 4, "how": [1, 2, 3, 4, 7], "find": [4, 7], "appreci": 4, "your": [4, 5, 6, 7, 8], "feedback": 4, "chapter": 4, "dynam": [3, 4], "open": [0, 3, 4, 6, 7], "textbook": [3, 4], "describ": [4, 7, 8], "larg": 4, "geograph": 4, "variat": 4, "coast": 4, "across": [4, 8], "world": 4, "It": [3, 4, 7, 8], "explain": [3, 4], "have": [4, 6, 7, 8], "todai": 4, "ar": [1, 2, 4, 6, 7], "shape": 4, "both": 4, "present": 4, "dai": 4, "million": 4, "ago": 4, "distinguish": 4, "between": [3, 4], "three": [4, 7], "differ": 4, "order": 4, "featur": 4, "which": [4, 7], "associ": 4, "time": [4, 7, 8], "In": [4, 6, 7], "look": [4, 6, 7, 8], "scale": 4, "two": 4, "cell": [4, 6], "below": [4, 7], "need": [4, 7], "also": [4, 7], "set": [4, 7], "some": [4, 6, 7], "path": [4, 6, 7], "sourc": [0, 4], "code": [4, 6, 7], "For": [4, 7], "exampl": 4, "add": [4, 5, 7], "src": 4, "directori": [4, 6, 7], "allow": [4, 7], "gener": [3, 4], "function": 4, "from": [4, 7, 8], "coastpi": 4, "os": 4, "pathlib": 4, "sy": 4, "make": [4, 7], "append": 4, "root": [4, 7], "cwd": 4, "resolv": [4, 8], "proj_dir": 4, "parent": 4, "coastalcodebook": [4, 5, 7], "str": 4, "warn": [], "environ": [4, 5, 6, 8], "use_pygeo": [], "0": [4, 7], "instead": [], "pygeo": [], "silenc": [], "colorcet": 4, "cc": 4, "dask": 4, "datafram": 4, "dd": 4, "geopanda": 4, "gpd": 4, "holoview": 4, "hv": 4, "hvplot": 4, "panda": 4, "noqa": 4, "api": 4, "xarrai": 4, "ipyleaflet": 4, "numpi": 4, "np": 4, "pd": 4, "panel": 4, "pn": 4, "geoview": 4, "tile_sourc": 4, "gvt": 4, "map": 4, "marker": 4, "scalecontrol": 4, "basemap": 4, "ipywidget": 4, "html": [4, 5], "extens": [4, 7], "geometri": 4, "geo_bbox": 4, "definitli": [], "best": [], "practic": 7, "workaround": 5, "avoid": [4, 7, 8], "mani": [1, 2], "trigger": [], "releas": 7, "filterwarn": [], "ignor": [], "categori": 4, "runtimewarn": [], "userwarn": [], "data_dir": 4, "coastal_systems_fp": 4, "01_coastal_system": 4, "gpkg": 4, "modulenotfounderror": [], "traceback": 4, "most": 4, "recent": 4, "call": 4, "last": 4, "line": [4, 7], "21": 4, "17": 4, "19": 4, "23": 4, "24": 4, "file": [4, 7], "dev": [], "__init__": [], "py": [4, 5], "3": [4, 6], "toml": [], "5": 4, "get": [3, 4], "contain": [4, 7], "current": 0, "6": 4, "base_dir": [], "dirnam": [], "abspath": [], "__file__": [], "No": [], "modul": 3, "name": [4, 5], "start": [3, 4, 7, 8], "broadest": 4, "cover": [1, 2, 4], "distanc": 4, "thousand": 4, "kilometr": 4, "link": [3, 4], "long": 4, "term": 4, "geolog": 4, "do": [1, 2, 4, 6, 7, 8], "so": [1, 2, 4, 7], "usg": 4, "dataset": 4, "sampl": 4, "10": 4, "observ": 4, "eartquak": 4, "jan": 4, "2000": 4, "dec": 4, "2018": 4, "why": 4, "reveal": 4, "geologist": 4, "mysteri": 4, "deep": 4, "research": 4, "insight": [3, 4], "s": [4, 7, 8], "run": [4, 5, 7, 8], "next": [4, 7], "tabular": 4, "includ": [4, 7, 8], "index": 4, "column": 4, "onli": 4, "keep": 4, "memori": 4, "actual": 4, "total": 4, "here": [4, 8], "approx": 4, "220k": 4, "entri": 4, "If": [4, 5, 6, 7, 8], "respond": 4, "slow": 4, "adjust": 4, "widget": 4, "consid": 4, "take": 4, "anoth": [4, 7], "can": [0, 4, 6, 7, 8], "uncom": 4, "frac": 4, "22k": 4, "uniqu": 4, "over": [4, 7], "web_mercator_limit": 4, "20037508": 4, "342789244": 4, "max": 4, "polar": 4, "latitud": 4, "handl": 4, "mercat": 4, "df": 4, "read_parquet": 4, "01_earthquakes_sampl": 4, "parquet": 4, "too": 4, "comput": [4, 7], "set_index": 4, "tz_local": 4, "none": 4, "sort_index": 4, "To": [4, 7, 8], "save": 4, "drop": 4, "cannot": 4, "displai": [4, 5], "web": 4, "project": [0, 4], "mag": 4, "depth": 4, "longitud": 4, "place": 4, "type": 4, "north": 4, "head": 4, "tool": 4, "holoviz": 4, "high": 4, "level": 4, "simplifi": 4, "python": [4, 5, 6, 7], "enabl": [4, 8], "mode": 4, "creat": [0, 4, 5, 6, 7, 8], "filter": 4, "accordingli": 4, "an": [4, 7], "overlai": 4, "tileset": 4, "esri": 4, "imageri": 4, "note": [4, 6, 8], "store": 4, "result": [3, 4], "object": [3, 4], "see": [0, 4, 6, 7], "one": [4, 6, 7], "more": 4, "title_bar": 4, "row": 4, "pane": 4, "markdown": 4, "style": 4, "color": 4, "black": 4, "width": 4, "800": 4, "sizing_mod": 4, "fix": [4, 5, 8], "margin": 4, "15": 4, "spacer": 4, "defin": 4, "magnitude_slid": 4, "rangeslid": 4, "magnitud": 4, "richter": 4, "end": 4, "depth_slid": 4, "km": 4, "650": 4, "date_slid": 4, "daterangeslid": 4, "date": 4, "column_typ": 4, "select": [4, 7], "option": [4, 7], "depend": [4, 7], "param": 4, "value_start": 4, "value_end": 4, "valu": 4, "def": 4, "plot_earthquake_panel": 4, "magnitude_start": 4, "magnitude_end": 4, "depth_start": 4, "depth_end": 4, "date_start": 4, "date_end": 4, "invert": 4, "fire": 4, "colormap": 4, "cmap": 4, "cet_l4": 4, "colorbar_label": 4, "return": 4, "point": 4, "x": 4, "y": 4, "geo": 4, "true": 4, "global_ext": 4, "tile": 4, "frame_width": 4, "900": 4, "ylabel": 4, "deg": 4, "xlabel": 4, "tap": 4, "hover_col": 4, "logz": 4, "clim": 4, "clabel": 4, "earthquake_panel": 4, "follow": [4, 6, 7], "instruct": [4, 7], "after": 4, "sever": [4, 7], "while": 4, "show": 4, "either": [4, 7], "magintud": 4, "relat": [4, 8], "hint": 4, "figur": 4, "under": 4, "ground": 4, "move": 4, "extra": 4, "converg": 4, "diverg": 4, "boundari": 4, "support": [4, 7, 8], "fundament": 4, "geologi": 4, "although": 4, "rel": 4, "act": 4, "ha": 4, "had": 4, "enorm": 4, "impact": 4, "format": 4, "coastlin": 4, "determin": 4, "what": [2, 4, 7, 8], "inherit": 4, "aspect": [3, 4], "sec": [4, 6], "1971": 4, "inman": 4, "d": [4, 5], "l": [4, 7], "nordstrom": 4, "c": 4, "e": 4, "classifi": 4, "thei": [4, 7], "introduc": 4, "class": 4, "match": 4, "identifi": [4, 8], "predict": 4, "area": 4, "around": 4, "where": [4, 6, 7], "instanc": [4, 6], "kind": 4, "chili": 4, "And": 4, "east": 4, "usa": 4, "characterist": 4, "about": [4, 8], "china": 4, "sea": 4, "further": 4, "afro": 4, "trail": 4, "edg": 4, "amero": 4, "sediment": [3, 4], "suppli": 4, "main": 4, "caus": 4, "expect": [4, 8], "input": 4, "geomorpholog": 4, "section": [1, 2, 4], "part": [], "second": 4, "third": 4, "accord": 4, "influenc": 4, "fluvial": 4, "wave": [3, 4], "tidal": 4, "idea": 4, "signatur": 4, "small": 4, "plot": 4, "given": 4, "zoom": 4, "sheet": 4, "geospati": 4, "afterward": 4, "sinc": 4, "random": 4, "might": [4, 5], "encount": 4, "same": 4, "multipl": 4, "just": [4, 7], "again": 4, "draw": 4, "coastal_system": 4, "read_fil": 4, "to_list": 4, "coastal_systems_slid": 4, "choic": 4, "plot_coastal_system": 4, "loc": 4, "copi": [4, 7], "west": 4, "south": 4, "flatten": 4, "lon": 4, "lat": 4, "red": 4, "alpha": 4, "xlim": 4, "ylim": 4, "1100": 4, "app": 4, "rang": 4, "try": 4, "answer": 4, "compar": 4, "heavili": 4, "river": 4, "domin": 4, "delta": 4, "natur": 4, "biggest": 4, "smallest": 4, "basin": 4, "estuarin": 4, "deltaic": 4, "spit": 4, "contrast": 4, "low": 4, "tell": 4, "estuari": 4, "fine": 4, "muddi": 4, "coars": 4, "sandi": 4, "eastern": 4, "western": 4, "tip": [1, 2, 4], "dutch": 4, "german": 4, "wadden": 4, "island": 4, "veri": 4, "beach": 4, "ridg": 4, "come": [4, 8], "dune": 4, "du": 4, "pilat": 4, "franc": 4, "largest": 4, "sand": 4, "coolest": 4, "should": [4, 5, 6, 7], "definit": 4, "visit": 4, "chanc": 4, "locat": 4, "side": 4, "arcachon": 4, "inlet": 4, "northern": 4, "jiangsu": 4, "limit": 4, "bai": 4, "intertid": 4, "flat": 4, "salt": 4, "marsh": 4, "mangrov": 4, "forest": 4, "nearbi": 4, "jetti": 4, "affect": 4, "wai": [1, 2, 4, 7], "evolv": 4, "whose": 4, "constrain": 4, "presenc": 4, "rocki": 4, "albufeira": 4, "lagoon": 4, "portug": 4, "close": [4, 7], "season": 4, "imag": 4, "shown": 4, "when": [4, 5, 7], "urban": 4, "human": 4, "intervent": 4, "satellit": 4, "beauti": 4, "site": 4, "moment": 4, "job": 4, "There": [1, 2], "write": [1, 2], "jupyt": [0, 1, 2, 5, 6, 7], "book": [0, 1, 2, 4, 5], "short": [1, 2, 7], "few": [1, 2, 7], "give": 4, "feel": 8, "content": [1, 2, 7], "structur": 3, "off": [], "major": [], "well": 3, "doe": 7, "go": 7, "ani": 8, "particular": [], "topic": [], "check": 7, "out": 4, "document": [7, 8], "inform": 4, "page": 7, "bundl": [], "myst": [], "character": 3, "outlin": [], "introduction2": [], "types3": [], "tuples4": [], "methods5": [], "dictionaries6": [], "empties7": [], "differenti": [], "standard": [], "datatyp": [], "int": [], "float": [], "dict": [], "etc": [], "perform": [], "like": [4, 5, 8], "lower": [], "split": [], "manipul": [], "us": [0, 3, 5, 6, 7, 8], "assign": [], "subset": [], "statement": [], "elif": 4, "block": 7, "indent": [], "The": [3, 7], "websit": [], "assum": 7, "prior": [], "knowledg": 8, "program": [], "languag": [], "requir": [], "compris": [], "core": 4, "read": 4, "through": [7, 8], "leisur": [], "exercis": 4, "complement": [], "each": 4, "solut": 8, "hand": [], "solidifi": [], "A": 5, "piec": [], "work": [3, 4], "number": [], "text": [], "42": 4, "integ": [], "hello": [], "variabl": 4, "refer": [3, 7], "mathemat": [], "statist": [], "we": [0, 6, 7, 8], "usual": [], "word": [], "letter": [], "underscor": [], "howev": 4, "reserv": [], "lambda": [], "encod": [], "special": [], "don": [7, 8], "t": [7, 8], "want": 4, "overwrit": [], "think": [], "box": [], "hold": [], "singl": [], "vector": [], "modifi": [], "medium": [], "com": 7, "summari": [], "english": [], "descript": 8, "posit": [], "neg": [], "whole": [], "real": [], "decim": [], "form": 3, "14159": [], "bool": [], "fals": [], "i": 4, "cheezburg": [], "collect": 4, "ali": [], "xinyi": [], "miriam": [], "thursdai": [], "9": [4, 7], "kei": [], "pair": [], "dsci": [], "511": [], "credit": [], "nonetyp": [], "null": [], "repres": [], "distinct": [], "complex": [], "print": 4, "ipython": 4, "version": [5, 7], "automat": [], "screen": [], "explicitli": [], "anyth": 7, "pound": [], "hash": [], "symbol": [], "comment": [], "pi": [], "tabl": [], "syntax": [], "addit": [], "subtract": [], "divis": [], "exponenti": [], "floor": [], "modulo": [], "appli": 3, "multipli": [], "28318": [], "expon": [], "1024": [], "mai": 4, "produc": [], "dtype": [], "than": [], "chang": [4, 6, 7], "int_2": [], "divison": [], "But": 7, "aka": [], "retain": [], "alwai": [], "round": [], "down": [], "101": [], "50": [], "becaus": 8, "nearest": [], "remaind": [], "100": [], "mod": [], "divid": [], "its": [], "own": [], "possibl": 7, "ll": [], "later": [], "charact": [], "unicod": [], "great": [], "blog": [], "post": [], "re": [7, 8], "interest": [], "enclos": [], "quot": [], "g": [], "doubl": [], "goodby": [], "case": [], "tripl": [], "typic": 4, "my_nam": [], "toma": [], "beuzen": [], "quotat": [], "apostroph": [], "combin": [], "sentenc": [], "raini": [], "donald": [], "knuth": [], "prematur": [], "optim": [], "all": [0, 4, 8], "evil": [], "the_truth": [], "li": [], "back": [], "equal": [], "greater": [], "less": [], "solv": 8, "problem": 8, "syntaxwarn": [], "liter": [], "did": [], "mean": [], "var": [], "folder": [], "p8": [], "qxnqn2ns5kbczrp91kx4_1nm0000gn": [], "ipykernel_64343": [], "564785565": [], "evalu": [], "least": [], "bitwis": [], "bit": [], "That": [], "beyond": [], "scope": [], "ve": [], "snippet": [], "them": [], "action": [], "f": [4, 5, 7], "represent": [], "0b": [], "sometim": [], "tri": [], "convers": [], "throw": [], "error": [4, 5], "valueerror": [], "60": [], "could": [], "convert": [], "thing": [], "element": 4, "explor": 7, "littl": [], "squar": [], "bracket": [], "my_list": [], "even": [], "other": 8, "another_list": [], "five": [], "length": [], "len": [], "similar": 8, "parenthes": [], "access": [6, 8], "insid": [], "zero": [], "base": [7, 8], "indexerror": [], "74": [], "indic": [], "count": [], "backward": 4, "colon": [], "sub": [], "abov": 4, "inclus": [], "exclus": [], "fetch": [], "behav": [], "rememb": [], "alphabet": [], "abcdefghijklmnopqrstuvwxyz": [], "z": [], "abcd": [], "12": 4, "20": 4, "mnopqrst": [], "itself": [], "period": [], "item": [], "prime": [], "11": 4, "13": 4, "un": [], "Being": [], "unord": [], "record": [], "insert": [], "noth": [], "typeerror": [], "93": [], "subscript": [], "new": [4, 6, 7, 8], "variou": [], "names_list": [], "indiana": [], "fang": [], "linsei": [], "cool": [], "gui": [], "names_tupl": [], "Not": [], "97": [], "goe": [], "onc": 6, "modifii": [], "tom": [], "q": [], "99": [], "all_cap": [], "new_str": [], "doesn": [], "origin": 4, "rather": [], "o": 7, "One": [], "caps_list": [], "h": [], "w": [], "r": [], "u": [], "join": [], "chain": [], "togeth": [], "fill": [], "blank": [], "nice": [], "recommend": 7, "were": [], "put": [], "front": [], "curli": [], "notat": [], "newborn": [], "babi": [], "ag": [], "month": [], "2020": [], "template_new": [], "my": [], "am": [], "2f": [], "old": [], "wa": [4, 7], "born": [], "02": [], "33": 4, "06": [], "argument": [], "brace": [], "hous": [], "bedroom": [], "bathroom": [], "citi": [], "vancouv": [], "price": [], "2499999": [], "date_sold": [], "2015": [], "condo": [], "burnabi": [], "699999": [], "27": 4, "8": 4, "2011": [], "specif": [], "field": [], "edit": 7, "alreadi": [], "wood": [], "delet": [], "entir": [], "though": [], "rare": [], "del": [], "easili": [], "443345": [], "777": [], "non": [], "exist": [], "keyerror": [], "126": [], "lst": [], "appar": [], "faster": [], "tup": [], "dic": [], "st": [], "certain": [], "execut": [7, 8], "state": [], "keyword": [], "santa": [], "funni": [], "meet": [], "notic": [], "express": [], "space": [], "exit": [], "necessarili": [], "default": 7, "nest": [], "super": [], "startswith": [], "realli": [], "superpow": [], "simpl": 8, "simplic": [], "unless": [], "doc": [], "m": [5, 7], "truthi": [], "falsei": [], "known": [5, 7], "circut": [], "stop": [], "been": [], "fake_vari": [], "nameerror": [], "142": 4, "144": 4, "detail": [], "b": 3, "loops2": [], "loops3": [], "comprehensions4": [], "except5": [], "functions6": [], "type7": [], "functions8": [], "functions9": [], "generators10": [], "iter": [], "list": [0, 4], "dictionari": [], "global": [], "modular": [], "assess": [], "whether": [], "paramet": [], "behaviour": [], "usag": [], "n": 4, "outsid": [], "49": [], "25": 4, "begin": [], "henc": [], "taken": [], "tupl": 4, "string": [], "sequenc": [], "gimm": [], "spell": [], "p": 4, "common": [], "pattern": [], "up": 4, "specifi": 4, "skip": [], "31": 4, "41": 4, "51": [], "61": [], "71": [], "81": [], "91": [], "dimens": [], "list_1": [], "list_2": [], "clever": [], "tend": [], "zip": [], "enumer": [], "quit": [], "lot": [], "unpack": [], "directli": [], "j": [], "counter": [], "within": [], "521": [], "awesom": [], "551": 4, "rivet": [], "naptim": [], "course_num": [], "pack": [], "excut": [], "bewar": [], "condit": [], "got": [], "infintit": [], "blast": [], "decrement": [], "hard": [], "collatz": [], "conjectur": [], "matter": [], "eventu": [], "reach": [], "els": 4, "odd": [], "34": 4, "52": [], "26": 4, "40": 4, "16": 4, "forc": [], "criteria": [], "break": 4, "123": [], "ugh": [], "370": [], "185": [], "556": [], "278": [], "139": [], "418": [], "209": [], "628": [], "314": [], "continu": 7, "won": [], "restart": [], "top": [], "never": [], "build": [3, 5, 7, 8], "conveni": [], "compact": [], "sublimin": [], "ingest": [], "egg": [], "outrun": [], "eagl": [], "first_lett": [], "complic": [], "antidisestablishmentarian": [], "would": 4, "word_length": [], "28": 4, "NOT": [], "genexpr": [], "0x1037da400": [], "blue": [], "death": [], "nine": [], "inch": [], "nail": [], "concert": [], "cnet": [], "someth": 7, "wrong": [], "our": [7, 8], "crash": [], "fail": [], "gracefulli": [], "accomplish": [], "basic": [], "this_variable_does_not_exist": [], "befor": [], "29": 4, "pass": [], "bad": [], "rais": 4, "catch": [], "saw": [], "zerodivisionerror": [], "ok": [], "bunch": [], "ex": [], "caught": [], "messag": 4, "without": [], "exactli": [], "made": [], "sort": [], "final": [], "almost": [], "anywai": [], "purpos": [], "add_on": [], "isinst": 4, "sorri": [], "must": [], "numer": [], "enter": 7, "weird": [], "much": [], "clearer": [], "user": [4, 5, 7], "ideal": [], "customadditionerror": [], "reusabl": [], "accept": 7, "n_squar": [], "10000": [], "12345": [], "152399025": [], "output": [4, 7], "cat_str": [], "str1": [], "str2": [], "said": [], "silly_sum": [], "sum": [], "wait": [], "mention": [], "touch": [], "termin": [6, 7], "999": [], "repeat_str": [], "md": [], "mdsmd": [], "mdsmdsmdsmdsmd": [], "carefulli": [], "chosen": [], "repeat": 8, "me": [], "reason": 7, "appear": [], "fact": [], "3rd": [], "confus": [], "terribl": [], "technic": [], "sum_and_product": [], "30": 4, "omit": [], "often": [], "implicitli": [], "comma": [], "immedi": [], "separ": [], "As": [], "asid": [], "convent": [], "_": [], "arg": 4, "kwarg": 4, "do_noth": [], "0x1037c2520": [], "evaluate_function_on_x_plus_1": [], "fun": 4, "36": 4, "happen": 2, "becom": [], "beenus": [], "until": 5, "now": [2, 4, 6, 7], "Or": [], "approach": 8, "ident": [], "aren": [], "appropri": [], "smaller": [], "otic": [], "Then": [], "At": [], "stand": 7, "yourself": [], "relev": [], "wikipedia": [], "articl": [], "task": [], "turn": [], "palindrom": [], "milad": [], "tiffani": [], "slice": [], "step": [7, 8], "mot": [], "names_backward": [], "miladdalim": [], "tommot": [], "tiffanyynaffit": [], "gross": [], "yucki": [], "slightli": [], "life": [], "easier": [], "make_palindrom": [], "okai": [], "names1": [], "names2": [], "appl": [], "orang": [], "banana": [], "appleelppa": [], "orangeegnaro": [], "bananaananab": [], "far": [], "choos": 8, "context": [], "These": [], "decis": [], "ambigu": [], "ever": [], "twice": [], "person": [], "opinion": [], "prefer": [], "everi": 7, "built": [5, 7], "recal": [], "earlier": [], "full": [], "product": [], "fit": [], "0x1037d9a40": [], "recip": [], "ask": [], "gen": [], "exhaust": [], "longer": 4, "defeat": [], "yield": [], "motiv": [], "sai": [], "canada": [], "talk": 7, "bring": [], "right": 6, "wrap": [], "concaten": [], "revers": [], "view": 4, "cursor": [], "shortcut": [], "shift": 7, "tab": [0, 4, 8], "press": 7, "pep": [], "257": [], "exact": [], "import": [], "render": 5, "id": [6, 7], "pars": [], "rest": [], "googl": [], "function_nam": [], "param1": [], "param2": [], "param3": [], "paragraph": [], "algorithm": [], "mayb": [], "explan": 3, "scipi": [], "blah": [], "blahblahblah": [], "sound": [], "func": 4, "bug": 8, "still": [], "enough": [], "vscode": [], "screenshot": 8, "ipynb": [4, 6], "regular": [], "flavor": [], "markedli": [], "slight": [], "commonmark": [], "sphinx": [], "ecosystem": 7, "overview": [], "power": [], "written": [], "markup": [], "serv": 4, "wherea": [], "span": [], "those": [], "being": 7, "inlin": 4, "cite": [], "bibtex": [], "holdgraf_evidence_2014": [], "hdhpk14": [], "moreov": [], "bibliographi": [], "properli": [], "bib": [], "christoph": [], "ramsai": [], "holdgraf": [], "wendi": [], "de": [], "heer": [], "brian": [], "paslei": [], "robert": [], "knight": [], "evid": [], "auditori": [], "cortex": [], "intern": [], "confer": [], "cognit": [], "neurosci": [], "brisban": [], "australia": [], "2014": [], "frontier": [], "starter": [], "jupyterbook": [], "org": [], "direct": [4, 8], "2": [5, 6], "4": 4, "kernel": [], "jupytext": [], "treat": 3, "command": [6, 7], "init": [], "markdownfil": [], "ipykernel_53982": [], "0x107759630": [], "0x10774dc60": [], "0x107759700": [], "workbook": 3, "system": 3, "lectur": 3, "tutori": [3, 7, 8], "dut": 3, "brightspac": 3, "independ": 3, "prepar": 3, "per": 3, "week": 4, "ipykernel_55748": [], "0x105a064d0": [], "0x1059f20c0": [], "0x105a05b10": [], "tuturi": 7, "session": 7, "mostli": 7, "reli": 7, "numfocu": 7, "commun": [7, 8], "control": 7, "github": [6, 7, 8], "client": [6, 7], "subsect": 7, "configur": 7, "familiar": 7, "excel": [0, 7], "introduct": [3, 6, 7], "instal": [3, 5, 6, 8], "clone": [5, 6, 7], "repositori": [5, 6, 7, 8], "local": [4, 6, 7], "brows": [6, 7], "webpag": 7, "click": [7, 8], "green": 7, "button": [7, 8], "desktop": 7, "simpli": 7, "past": 7, "url": 7, "menu": 7, "bash": 7, "shell": 7, "avail": 7, "http": 7, "floriscalkoen": 7, "underli": 7, "machin": 7, "By": [7, 8], "host": 7, "pull": 7, "reflect": 7, "yet": 7, "bottom": 7, "good": 7, "conflict": 7, "lightweight": 7, "mambaforg": 7, "found": 7, "conda": [5, 6, 7], "forg": 7, "download": 7, "miniforg": [6, 7], "On": 7, "binari": 7, "sure": 7, "stai": 7, "prompt": [6, 7], "search": 7, "issu": [5, 7], "firewal": 7, "troubl": 7, "temporarili": 7, "disabl": 7, "iterm": 7, "hotkei": 7, "cntrl": 7, "curl": 7, "latest": 7, "unam": 7, "sh": 7, "agreement": 7, "script": 7, "profil": 7, "bashrc": 7, "zshrc": 7, "seper": [4, 7], "jupyterlab": [6, 7], "22": [4, 7], "navig": [7, 8], "cd": [4, 6, 7], "userprofil": 7, "yml": [5, 7], "lab": [6, 7], "env": [4, 5, 6, 7], "encourag": [6, 8], "repo": 6, "backslash": 6, "activ": [4, 5, 6], "coastal": [5, 6], "browser": 6, "01_coastal_classif": 6, "upper": 6, "corner": 6, "ipykernel": [5, 6], "analysi": 6, "process": 8, "free": 8, "tracker": 8, "fellow": 8, "student": [3, 8], "probabl": 8, "troubleshoot": 3, "email": 8, "develop": 5, "fulli": 5, "_build": 5, "nb_conda_kernel": 5, "expos": 5, "kernelspec": 5, "executablebook": 0, "1348": [], "manual": 5, "kernselspec": 5, "recogn": 0, "contribut": [0, 3, 8], "cookiecutt": 0, "templat": 0, "ipykernel_56317": [], "0x103cbd630": [], "0x103cb1080": [], "0x103cbd700": [], "mamba": [5, 6], "question": 8, "packag": [4, 6], "manag": 6, "window": [4, 6], "initi": 4, "1_coastal_classif": 4, "bokeh": 4, "model": 4, "pantool": 4, "wheelzoomtool": 4, "succesfulli": 4, "THESE": 4, "NO": 4, "matplotlib": 4, "pyplot": 4, "plt": 4, "anim": 4, "interact_manu": 4, "shuffl": 4, "uniform": 4, "coastal_dynam": 4, "pil": 4, "io": 4, "load_quest": 4, "question_dir": 4, "question_fp": 4, "json": 4, "filenotfounderror": 4, "href_or_fp": [], "storage_opt": 4, "7": 4, "fsspec": [], "lib": 4, "python3": 4, "openfil": [], "__enter__": [], "self": 4, "98": [], "replac": [], "fs": 4, "102": [], "fobject": [], "104": [], "compress": [], "spec": [], "1309": [], "abstractfilesystem": [], "block_siz": [], "cache_opt": [], "1307": [], "1308": [], "ac": [], "pop": 4, "autocommit": [], "_intran": [], "_open": [], "1310": [], "1311": [], "1312": [], "1313": [], "1314": [], "1315": [], "1316": [], "1317": [], "1318": [], "compr": [], "implement": [], "180": [], "localfilesystem": [], "178": [], "auto_mkdir": [], "179": [], "makedir": [], "_parent": [], "exist_ok": [], "localfileopen": [], "298": [], "296": [], "get_compress": [], "297": [], "blocksiz": 4, "default_buffer_s": [], "303": [], "301": [], "302": [], "304": [], "305": [], "errno": [], "calkoen": [], "plate": 4, "align": 4, "center": 4, "opt": 4, "1000": 4, "height": 4, "500": 4, "wheel_zoom": 4, "plot_wher": 4, "show_earthquak": 4, "q1": 4, "questionfactori": 4, "q2": 4, "q3": 4, "q4": 4, "q5": 4, "upcom": 4, "australian": 4, "gold": 4, "q6": 4, "q7": 4, "q8": 4, "q9": 4, "lead": [4, 8], "correct": 4, "q10": 4, "q11": 4, "q12": 4, "q13": 4, "q14": 4, "14": 4, "q15": 4, "q16": 4, "q17": 4, "q18": 4, "18": 4, "q19": 4, "TO": 4, "BE": 4, "remov": 4, "frame_height": 4, "pan": 4, "sattelit": 4, "show_coastal_system": 4, "q20": 4, "q21": 4, "q22": 4, "q23": 4, "q24": 4, "q25": 4, "q26": 4, "q27": 4, "q28": 4, "q29": 4, "q30": 4, "q31": 4, "q32": 4, "32": 4, "q33": 4, "q34": 4, "q35": 4, "35": 4, "q36": 4, "q37": 4, "37": 4, "q38": 4, "38": 4, "q39": 4, "39": 4, "q40": 4, "q41": 4, "q42": 4, "q43": 4, "43": 4, "q44": 4, "44": 4, "q45": 4, "45": 4, "q46": 4, "46": 4, "q47": 4, "47": 4, "discuss": [3, 4, 8], "ipykernel_56478": [], "0x1042c2400": [], "0x1042b28e0": [], "0x1042c1a40": [], "interrel": 3, "physic": 3, "flow": 3, "transport": 3, "phenomena": 3, "morphodynam": 3, "wide": 3, "varieti": 3, "phenomenolog": 3, "theoret": 3, "civil": 3, "upon": 3, "bsc": 3, "basi": 3, "b1": 3, "ciem3210": 3, "acknowledg": 3, "ipykernel_57261": [], "0x1100bd630": [], "0x1100b28e0": [], "0x1100bd700": [], "ipykernel_57976": [], "0x106fc2400": [], "0x106fb2200": [], "0x106fc1a40": [], "311": 4, "103": [], "105": [], "107": [], "1293": [], "1291": [], "1292": [], "1294": [], "1295": [], "1296": [], "1297": [], "1298": [], "1299": [], "1300": [], "1301": [], "1302": [], "184": [], "182": [], "183": [], "306": [], "309": [], "310": [], "312": [], "313": [], "worri": 8, "hurdl": 8, "collabor": 8, "learn": 8, "comprehens": 8, "strongli": 8, "share": 8, "seen": 8, "duplic": 8, "prevent": 8, "queri": 8, "benefit": 8, "resolut": 8, "improv": 8, "resourc": 8, "everyon": 8, "unfamiliar": 8, "guid": 8, "walk": 8, "involv": 8, "clear": 8, "concis": 8, "reproduc": 8, "outcom": 8, "applic": 8, "priorit": 8, "request": 8, "address": 8, "ensur": 8, "transpar": 8, "effici": 8, "advanc": 3, "backend": 4, "141": 4, "creationdispatch": 4, "register_inplac": 4, "decor": 4, "wrapper": 4, "140": 4, "except": 4, "529": 4, "use_nullable_dtyp": 4, "dtype_backend": 4, "calculate_divis": 4, "ignore_metadata_fil": 4, "metadata_task_s": 4, "split_row_group": 4, "aggregate_fil": 4, "parquet_file_extens": 4, "filesystem": 4, "527": 4, "read_metadata_result": 4, "read_metadata": 4, "530": 4, "531": 4, "532": 4, "533": 4, "534": 4, "535": 4, "536": 4, "gather_statist": 4, "537": 4, "538": 4, "539": 4, "540": 4, "541": 4, "542": 4, "543": 4, "544": 4, "dataset_opt": 4, "545": 4, "read_opt": 4, "546": 4, "other_opt": 4, "547": 4, "549": 4, "futur": 4, "550": 4, "dedic": 4, "common_kwarg": 4, "emb": 4, "552": 4, "553": 4, "logic": 4, "inted": 4, "forward": 4, "554": 4, "compat": 4, "arrow": 4, "arrowdatasetengin": 4, "cl": 4, "stage": 4, "dataset_info": 4, "_collect_dataset_info": 4, "548": 4, "meta": 4, "1051": 4, "1050": 4, "ds": 4, "pa_d": 4, "1052": 4, "1053": 4, "_wrapped_f": 4, "1054": 4, "_processed_dataset_kwarg": 4, "1055": 4, "1057": 4, "file_frag": 4, "extract": 4, "physical_schema": 4, "pyarrow": 4, "785": 4, "schema": 4, "partit": 4, "partition_base_dir": 4, "exclude_invalid_fil": 4, "ignore_prefix": 4, "784": 4, "_is_path_lik": 4, "elem": 4, "_filesystem_dataset": 4, "786": 4, "463": 4, "selector_ignore_prefix": 4, "462": 4, "paths_or_selector": 4, "_ensure_multiple_sourc": 4, "464": 4, "382": 4, "381": 4, "file_typ": 4, "filetyp": 4, "notfound": 4, "info": 4, "383": 4, "143": 4, "occur": 4, "funcnam": 4, "145": 4, "method": 4, "regist": 4, "146": 4, "147": 4, "funcanim": [], "ticker": [], "multipleloc": [], "hidden": [], "mario": [], "escofi": [], "curv": [], "outdat": [], "pip": [], "load": [], "vs": [], "visual": [], "atm": [], "watch": [], "utilti": [], "four": [], "ipw": [], "data": [], "correctli": [], "fig_dir": [], "1": 5, "micromamba": 4, "home": 4, "runner": 4}, "objects": {}, "objtypes": {}, "objnames": {}, "titleterms": {"character": 4, "coastal": [3, 4], "system": 4, "import": 4, "librari": 4, "we": 4, "us": 4, "our": 4, "analysi": 4, "exercis": [], "1": [3, 4, 7], "plate": [], "tecton": 4, "load": 4, "earthquak": 4, "data": 4, "visual": 4, "explor": 4, "question": 4, "2": [4, 7], "process": 4, "base": 4, "classif": 4, "The": 4, "content": [], "codebook": 3, "welcom": 3, "about": 3, "thi": 3, "book": 3, "assign": [], "chapter": [], "python": [], "basic": [], "learn": [], "object": [], "introduct": 1, "type": [], "common": [], "built": [], "numer": [], "arithmet": [], "oper": [], "none": [], "string": [], "boolean": [], "comparison": [], "cast": [], "3": 7, "list": [], "tupl": [], "index": [], "slice": [], "sequenc": [], "method": [], "set": [], "mutabl": [], "vs": [], "immut": [], "4": [], "format": [], "5": [], "dictionari": [], "6": [], "empti": [], "7": [], "condit": [], "inlin": [], "els": [], "truth": [], "valu": [], "test": [], "short": [], "circuit": [], "loop": [], "function": [], "while": [], "comprehens": [], "try": [], "except": [], "side": [], "effect": [], "local": [], "variabl": [], "null": [], "return": [], "option": [], "requir": [], "argument": [], "multipl": [], "arbitrari": [], "number": [], "anonym": [], "8": [], "dry": [], "principl": [], "design": [], "good": [], "9": [], "gener": [], "10": [], "docstr": [], "structur": 2, "hint": [], "markdown": [], "file": [], "what": [], "myst": [], "sampl": [], "role": [], "direct": [], "citat": [], "more": [], "notebook": 6, "an": 8, "exampl": [], "cell": [], "creat": [], "quickli": [], "add": [], "yaml": [], "metadata": [], "cours": 2, "usag": 3, "git": 7, "mamba": 7, "packag": 7, "manag": 7, "window": 7, "unix": 7, "like": 7, "mac": 7, "linux": 7, "softwar": 7, "environ": 7, "run": 6, "tutori": 6, "build": [], "contributor": 0, "credit": 0, "week": 3, "contribut": 5, "advanc": 5, "instal": 7, "issu": 8, "part": 4, "2a": 4, "theori": 4, "2b": 4, "applic": 4, "acknowledg": 0, "get": 6, "start": 6, "troubleshoot": 8, "why": 8, "open": 8, "how": 8, "comment": [], "flori": [], "directori": [], "initi": [], "section": []}, "envversion": {"sphinx.domains.c": 2, "sphinx.domains.changeset": 1, "sphinx.domains.citation": 1, "sphinx.domains.cpp": 6, "sphinx.domains.index": 1, "sphinx.domains.javascript": 2, "sphinx.domains.math": 2, "sphinx.domains.python": 3, "sphinx.domains.rst": 2, "sphinx.domains.std": 2, "sphinx.ext.intersphinx": 1, "sphinxcontrib.bibtex": 9, "sphinx": 56}}) \ No newline at end of file +Search.setIndex({"docnames": ["about/acknowledgements", "about/introduction", "about/structure", "intro", "notebooks/1_coastal_classification", "notebooks/5_cross_shore_transport", "usage/contributing", "usage/getting_started", "usage/installation", "usage/troubleshooting"], "filenames": ["about/acknowledgements.md", "about/introduction.md", "about/structure.md", "intro.md", "notebooks/1_coastal_classification.ipynb", "notebooks/5_cross_shore_transport.ipynb", "usage/contributing.md", "usage/getting_started.md", "usage/installation.md", "usage/troubleshooting.md"], "titles": ["Acknowledgements", "Introduction", "Course structure", "Welcome to Coastal Codebook", "Characterization of Coastal Systems", "First import some necessary packages", "Contributing (Advanced)", "Getting started", "Installation", "Troubleshooting"], "terms": {"welcom": [0, 4, 5], "first": [4, 8, 9], "notebook": [4, 5, 8, 9], "tu": 4, "delft": 4, "msc": [3, 4], "engin": [3, 4], "thi": [0, 1, 2, 4, 5, 6, 7, 8, 9], "year": 4, "experi": 4, "cours": [3, 4, 9], "With": 4, "hope": 4, "provid": [3, 4, 8, 9], "you": [0, 4, 5, 6, 7, 8, 9], "interact": [4, 5, 7, 8], "materi": [4, 9], "help": [4, 9], "better": 4, "understand": [4, 8], "concept": 4, "teach": 4, "pleas": [4, 8, 9], "let": [4, 5], "know": [4, 5], "how": [1, 2, 3, 4, 8], "find": [4, 8], "appreci": 4, "your": [4, 6, 7, 8, 9], "feedback": 4, "chapter": [4, 5], "dynam": [3, 4, 5], "open": [0, 3, 4, 7, 8], "textbook": [3, 4], "describ": [4, 8, 9], "larg": 4, "geograph": 4, "variat": 4, "coast": 4, "across": [4, 9], "world": 4, "It": [3, 4, 8, 9], "explain": [3, 4], "have": [4, 5, 7, 8, 9], "todai": 4, "ar": [1, 2, 4, 5, 7, 8], "shape": 4, "both": [4, 5], "present": 4, "dai": 4, "million": 4, "ago": 4, "distinguish": 4, "between": [3, 4], "three": [4, 8], "differ": [4, 5], "order": 4, "featur": 4, "which": [4, 8], "associ": 4, "time": [4, 5, 8, 9], "In": [4, 5, 7, 8], "look": [4, 7, 8, 9], "scale": [4, 5], "two": 4, "cell": [4, 7], "below": [4, 8], "need": [4, 8], "also": [4, 8], "set": [4, 8], "some": [4, 7, 8], "path": [4, 7, 8], "sourc": [0, 4], "code": [4, 7, 8], "For": [4, 8], "exampl": 4, "add": [4, 6, 8], "src": 4, "directori": [4, 7, 8], "allow": [4, 8], "gener": [3, 4], "function": 4, "from": [4, 5, 8, 9], "coastpi": 4, "os": 5, "pathlib": [4, 5], "sy": 4, "make": [4, 8], "append": 4, "root": [4, 8], "cwd": 4, "resolv": [4, 9], "proj_dir": 4, "parent": 4, "coastalcodebook": [4, 6, 8], "str": 4, "warn": 4, "environ": [6, 7, 9], "use_pygeo": [], "0": [4, 8], "instead": [], "pygeo": [], "silenc": [], "colorcet": [4, 5], "cc": [4, 5], "dask": [4, 5], "datafram": [4, 5], "dd": [4, 5], "geopanda": [4, 5], "gpd": [4, 5], "holoview": [4, 5], "hv": [4, 5], "hvplot": [4, 5], "panda": [4, 5], "noqa": [4, 5], "api": [4, 5], "xarrai": 5, "ipyleaflet": 5, "numpi": [4, 5], "np": [4, 5], "pd": [4, 5], "panel": [4, 5], "pn": [4, 5], "geoview": [], "tile_sourc": [], "gvt": [], "map": 4, "marker": [], "scalecontrol": [], "basemap": 4, "ipywidget": 5, "html": [5, 6], "extens": [4, 8], "geometri": 4, "geo_bbox": [], "definitli": [], "best": [], "practic": [5, 8], "workaround": 6, "avoid": [8, 9], "mani": [1, 2], "trigger": [], "releas": 8, "filterwarn": [], "ignor": 4, "categori": [], "runtimewarn": [], "userwarn": [], "data_dir": 4, "coastal_systems_fp": 4, "01_coastal_system": 4, "gpkg": 4, "modulenotfounderror": 4, "traceback": 4, "most": [4, 5], "recent": 4, "call": 4, "last": 4, "line": [4, 8], "21": 4, "17": 4, "19": 4, "23": 4, "24": 4, "file": 8, "dev": [], "__init__": [], "py": 6, "3": [4, 5, 7], "toml": [], "5": [4, 5], "get": [3, 4, 5], "contain": [4, 5, 8], "current": 0, "6": [4, 5], "base_dir": [], "dirnam": [], "abspath": [], "__file__": [], "No": 4, "modul": [3, 4], "name": [4, 6], "start": [3, 4, 5, 8, 9], "broadest": 4, "cover": [1, 2, 4, 5], "distanc": 4, "thousand": 4, "kilometr": 4, "link": [3, 4], "long": [4, 5], "term": 4, "geolog": 4, "do": [1, 2, 4, 7, 8, 9], "so": [1, 2, 4, 8], "usg": 4, "dataset": 4, "sampl": 4, "10": 4, "observ": 4, "eartquak": 4, "jan": 4, "2000": 4, "dec": 4, "2018": 4, "why": 4, "reveal": 4, "geologist": 4, "mysteri": 4, "deep": 4, "research": 4, "insight": [3, 4], "s": [4, 5, 8, 9], "run": [4, 5, 6, 8, 9], "next": [4, 8], "tabular": 4, "includ": [4, 8, 9], "index": 4, "column": 4, "onli": 4, "keep": 4, "memori": 4, "actual": 4, "total": 4, "here": [4, 9], "approx": 4, "220k": 4, "entri": 4, "If": [4, 6, 7, 8, 9], "respond": 4, "slow": 4, "adjust": 4, "widget": [4, 5], "consid": [4, 5], "take": 4, "anoth": [4, 8], "can": [0, 4, 5, 7, 8, 9], "uncom": 4, "frac": 4, "22k": 4, "uniqu": 4, "over": [4, 8], "web_mercator_limit": 4, "20037508": 4, "342789244": 4, "max": 4, "polar": 4, "latitud": 4, "handl": 4, "mercat": 4, "df": 4, "read_parquet": 4, "01_earthquakes_sampl": 4, "parquet": 4, "too": 4, "comput": [4, 8], "set_index": 4, "tz_local": 4, "none": 4, "sort_index": 4, "To": [4, 8, 9], "save": 4, "drop": 4, "cannot": 4, "displai": [4, 5, 6], "web": 4, "project": [0, 4], "mag": 4, "depth": 4, "longitud": 4, "place": 4, "type": 4, "north": 4, "head": [], "tool": 4, "holoviz": 4, "high": [4, 5], "level": 4, "simplifi": 4, "python": [4, 6, 7, 8], "enabl": 9, "mode": [], "creat": [0, 6, 7, 8, 9], "filter": [], "accordingli": [], "an": [4, 5, 8], "overlai": [], "tileset": [], "esri": 4, "imageri": 4, "note": [4, 7, 9], "store": [], "result": 3, "object": 3, "see": [0, 4, 7, 8], "one": [4, 7, 8], "more": [], "title_bar": 4, "row": 4, "pane": 4, "markdown": 4, "style": 4, "color": 4, "black": 4, "width": 4, "800": 4, "sizing_mod": 4, "fix": [4, 6, 9], "margin": 4, "15": 4, "spacer": 4, "defin": 4, "magnitude_slid": 4, "rangeslid": 4, "magnitud": 4, "richter": 4, "end": 4, "depth_slid": 4, "km": 4, "650": 4, "date_slid": 4, "daterangeslid": 4, "date": 4, "column_typ": 4, "select": [4, 8], "option": [4, 8], "depend": [4, 8], "param": 4, "value_start": 4, "value_end": 4, "valu": 4, "def": 4, "plot_earthquake_panel": 4, "magnitude_start": 4, "magnitude_end": 4, "depth_start": 4, "depth_end": 4, "date_start": 4, "date_end": 4, "invert": 4, "fire": 4, "colormap": 4, "cmap": 4, "cet_l4": 4, "colorbar_label": 4, "return": 4, "point": 4, "x": 4, "y": 4, "geo": 4, "true": 4, "global_ext": 4, "tile": 4, "frame_width": 4, "900": 4, "ylabel": 4, "deg": 4, "xlabel": 4, "tap": 4, "hover_col": 4, "logz": 4, "clim": 4, "clabel": 4, "earthquake_panel": 4, "follow": [5, 7, 8], "instruct": 8, "after": 4, "sever": [4, 8], "while": 4, "show": 4, "either": [4, 8], "magintud": 4, "relat": 9, "hint": [], "figur": 4, "under": [], "ground": [], "move": [], "extra": [], "converg": [], "diverg": [], "boundari": 4, "support": [4, 8, 9], "fundament": 4, "geologi": 4, "although": 4, "rel": 4, "act": 4, "ha": 4, "had": 4, "enorm": 4, "impact": 4, "format": 4, "coastlin": 4, "determin": 4, "what": [2, 8, 9], "inherit": [], "aspect": 3, "sec": 7, "1971": 4, "inman": 4, "d": [4, 6], "l": [4, 8], "nordstrom": 4, "c": 4, "e": 4, "classifi": 4, "thei": [4, 8], "introduc": 4, "class": 4, "match": 4, "identifi": [4, 9], "predict": [], "area": [], "around": 4, "where": [7, 8], "instanc": [5, 7], "kind": 4, "chili": [], "And": [], "east": 4, "usa": [], "characterist": 4, "about": 9, "china": [], "sea": 4, "further": 4, "afro": 4, "trail": 4, "edg": 4, "amero": 4, "sediment": [3, 4], "suppli": 4, "main": 4, "caus": [], "expect": 9, "input": [], "geomorpholog": [], "section": [1, 2, 4, 5], "part": [], "second": 4, "third": 4, "accord": 4, "influenc": 4, "fluvial": 4, "wave": [3, 4], "tidal": 4, "idea": 4, "signatur": 4, "small": 4, "plot": 4, "given": 4, "zoom": 4, "sheet": 4, "geospati": 4, "afterward": 4, "sinc": 4, "random": [4, 5], "might": [4, 6], "encount": 4, "same": 4, "multipl": 4, "just": [4, 8], "again": 4, "draw": 4, "coastal_system": 4, "read_fil": 4, "to_list": 4, "coastal_systems_slid": 4, "choic": 4, "plot_coastal_system": 4, "loc": 4, "copi": [4, 8], "west": 4, "south": 4, "flatten": 4, "lon": 4, "lat": 4, "red": 4, "alpha": 4, "xlim": 4, "ylim": 4, "1100": 4, "app": 4, "rang": [], "try": 4, "answer": 4, "compar": [], "heavili": [], "river": [], "domin": [], "delta": [], "natur": [], "biggest": [], "smallest": [], "basin": [], "estuarin": [], "deltaic": [], "spit": [], "contrast": [], "low": [], "tell": [], "estuari": [], "fine": [], "muddi": [], "coars": [], "sandi": [], "eastern": [], "western": [], "tip": [1, 2], "dutch": [], "german": [], "wadden": [], "island": [], "veri": [], "beach": [], "ridg": [], "come": 9, "dune": [], "du": [], "pilat": [], "franc": [], "largest": [], "sand": [], "coolest": [], "should": [6, 7, 8], "definit": [], "visit": [], "chanc": [], "locat": 4, "side": [], "arcachon": [], "inlet": [], "northern": [], "jiangsu": [], "limit": [], "bai": [], "intertid": [], "flat": [], "salt": [], "marsh": [], "mangrov": [], "forest": [], "nearbi": [], "jetti": [], "affect": [], "wai": [1, 2, 8], "evolv": [], "whose": [], "constrain": [], "presenc": [], "rocki": [], "albufeira": [], "lagoon": [], "portug": [], "close": 8, "season": [], "imag": 4, "shown": 4, "when": [4, 6, 8], "urban": [], "human": [], "intervent": [], "satellit": [], "beauti": [], "site": [], "moment": [], "job": [], "There": [1, 2], "write": [1, 2], "jupyt": [0, 1, 2, 6, 7, 8], "book": [0, 1, 2, 4, 5, 6], "short": [1, 2, 5, 8], "few": [1, 2, 8], "give": [], "feel": [4, 9], "content": [1, 2, 8], "structur": 3, "off": [], "major": [], "well": 3, "doe": 8, "go": 8, "ani": 9, "particular": [], "topic": [], "check": 8, "out": 4, "document": [8, 9], "inform": [], "page": 8, "bundl": [], "myst": [], "character": 3, "outlin": [], "introduction2": [], "types3": [], "tuples4": [], "methods5": [], "dictionaries6": [], "empties7": [], "differenti": [], "standard": [], "datatyp": [], "int": [], "float": [], "dict": [], "etc": [], "perform": [], "like": [4, 6, 9], "lower": [], "split": [], "manipul": [], "us": [0, 3, 6, 7, 8, 9], "assign": [], "subset": [], "statement": [], "elif": 4, "block": 8, "indent": [], "The": [3, 5, 8], "websit": [], "assum": 8, "prior": [], "knowledg": 9, "program": [], "languag": [], "requir": 4, "compris": [], "core": 4, "read": 4, "through": [8, 9], "leisur": [], "exercis": 4, "complement": [], "each": [4, 5], "solut": 9, "hand": [], "solidifi": [], "A": 6, "piec": [], "work": 3, "number": [], "text": [], "42": 4, "integ": [], "hello": [], "variabl": [4, 5], "refer": [3, 8], "mathemat": [], "statist": [], "we": [0, 5, 7, 8, 9], "usual": [], "word": [], "letter": [], "underscor": [], "howev": 5, "reserv": [], "lambda": [], "encod": [], "special": [], "don": [8, 9], "t": [8, 9], "want": 4, "overwrit": [], "think": [], "box": [], "hold": [], "singl": [], "vector": [], "modifi": [], "medium": [], "com": 8, "summari": [], "english": [], "descript": 9, "posit": [], "neg": [], "whole": [], "real": [], "decim": [], "form": 3, "14159": [], "bool": [], "fals": [], "i": 4, "cheezburg": [], "collect": [], "ali": [], "xinyi": [], "miriam": [], "thursdai": [], "9": [4, 8], "kei": [], "pair": [], "dsci": [], "511": [], "credit": [], "nonetyp": [], "null": [], "repres": [], "distinct": [], "complex": [], "print": 4, "ipython": 5, "version": [6, 8], "automat": [], "screen": [], "explicitli": [], "anyth": 8, "pound": [], "hash": [], "symbol": [], "comment": [], "pi": [], "tabl": [], "syntax": [], "addit": [], "subtract": [], "divis": [], "exponenti": [], "floor": [], "modulo": [], "appli": 3, "multipli": [], "28318": [], "expon": [], "1024": [], "mai": [], "produc": 4, "dtype": [], "than": 4, "chang": [4, 5, 7, 8], "int_2": [], "divison": [], "But": 8, "aka": [], "retain": [], "alwai": [], "round": [], "down": [], "101": [], "50": [], "becaus": 9, "nearest": [], "remaind": [], "100": [], "mod": [], "divid": [], "its": [], "own": [], "possibl": 8, "ll": [], "later": [], "charact": [], "unicod": [], "great": [], "blog": [], "post": [], "re": [4, 8, 9], "interest": 4, "enclos": [], "quot": [], "g": [], "doubl": [], "goodby": [], "case": [], "tripl": [], "typic": 4, "my_nam": [], "toma": [], "beuzen": [], "quotat": [], "apostroph": [], "combin": [], "sentenc": [], "raini": [], "donald": [], "knuth": [], "prematur": [], "optim": [], "all": [0, 9], "evil": [], "the_truth": [], "li": [], "back": [], "equal": [], "greater": [], "less": 4, "solv": 9, "problem": 9, "syntaxwarn": [], "liter": [], "did": [], "mean": [], "var": [], "folder": [], "p8": [], "qxnqn2ns5kbczrp91kx4_1nm0000gn": [], "ipykernel_64343": [], "564785565": [], "evalu": [], "least": 5, "bitwis": [], "bit": 4, "That": [], "beyond": [], "scope": [], "ve": [], "snippet": [], "them": [], "action": [], "f": [6, 8], "represent": [], "0b": [], "sometim": [], "tri": [], "convers": [], "throw": [], "error": [4, 6], "valueerror": [], "60": [], "could": [], "convert": [], "thing": [], "element": [], "explor": 8, "littl": [], "squar": [], "bracket": [], "my_list": [], "even": [], "other": 9, "another_list": [], "five": [], "length": 4, "len": [], "similar": 9, "parenthes": [], "access": [7, 9], "insid": [], "zero": [], "base": [8, 9], "indexerror": [], "74": [], "indic": [], "count": [], "backward": [], "colon": [], "sub": [], "abov": [], "inclus": [], "exclus": [], "fetch": [], "behav": [], "rememb": [], "alphabet": [], "abcdefghijklmnopqrstuvwxyz": [], "z": [], "abcd": [], "12": 4, "20": 4, "mnopqrst": [], "itself": [], "period": [], "item": [], "prime": [], "11": 4, "13": 4, "un": [], "Being": [], "unord": [], "record": [], "insert": [], "noth": [], "typeerror": [], "93": [], "subscript": [], "new": [4, 7, 8, 9], "variou": [], "names_list": [], "indiana": [], "fang": [], "linsei": [], "cool": [], "gui": [], "names_tupl": [], "Not": [], "97": [], "goe": [], "onc": 7, "modifii": [], "tom": [], "q": [], "99": [], "all_cap": [], "new_str": [], "doesn": [], "origin": [], "rather": [], "o": 8, "One": [], "caps_list": [], "h": [], "w": [], "r": [], "u": [], "join": [], "chain": [], "togeth": [], "fill": [], "blank": [], "nice": [], "recommend": 8, "were": [], "put": [], "front": [], "curli": [], "notat": [], "newborn": [], "babi": [], "ag": [], "month": [], "2020": [], "template_new": [], "my": [], "am": [], "2f": [], "old": [], "wa": 8, "born": [], "02": [], "33": 4, "06": [], "argument": [], "brace": [], "hous": [], "bedroom": [], "bathroom": [], "citi": [], "vancouv": [], "price": [], "2499999": [], "date_sold": [], "2015": [], "condo": [], "burnabi": [], "699999": [], "27": 4, "8": 4, "2011": [], "specif": [], "field": [], "edit": 8, "alreadi": [], "wood": [], "delet": [], "entir": [], "though": [], "rare": [], "del": [], "easili": [], "443345": [], "777": [], "non": [], "exist": 5, "keyerror": [], "126": [], "lst": [], "appar": [], "faster": 4, "tup": [], "dic": [], "st": [], "certain": 4, "execut": [8, 9], "state": [], "keyword": [], "santa": [], "funni": [], "meet": [], "notic": [], "express": [], "space": [], "exit": [], "necessarili": [], "default": 8, "nest": [], "super": [], "startswith": [], "realli": [], "superpow": [], "simpl": 9, "simplic": [], "unless": [], "doc": [], "m": [6, 8], "truthi": [], "falsei": [], "known": [6, 8], "circut": [], "stop": [], "been": [], "fake_vari": [], "nameerror": [], "142": [], "144": [], "detail": [], "b": 3, "loops2": [], "loops3": [], "comprehensions4": [], "except5": [], "functions6": [], "type7": [], "functions8": [], "functions9": [], "generators10": [], "iter": [], "list": 0, "dictionari": [], "global": [], "modular": [], "assess": [], "whether": [], "paramet": [], "behaviour": [], "usag": [], "n": [], "outsid": [], "49": [], "25": 4, "begin": [], "henc": [], "taken": [], "tupl": [], "string": [], "sequenc": [], "gimm": [], "spell": [], "p": 4, "common": [], "pattern": [], "up": 4, "specifi": 4, "skip": [], "31": 4, "41": 4, "51": [], "61": [], "71": [], "81": [], "91": [], "dimens": [], "list_1": [], "list_2": [], "clever": [], "tend": [], "zip": [], "enumer": [], "quit": [], "lot": [], "unpack": [], "directli": [], "j": [], "counter": [], "within": [], "521": [], "awesom": [], "551": [], "rivet": [], "naptim": [], "course_num": [], "pack": [], "excut": [], "bewar": [], "condit": [], "got": [], "infintit": [], "blast": [], "decrement": [], "hard": [], "collatz": [], "conjectur": [], "matter": [], "eventu": [], "reach": [], "els": 4, "odd": [], "34": 4, "52": [], "26": 4, "40": 4, "16": 4, "forc": [], "criteria": [], "break": [], "123": [], "ugh": [], "370": [], "185": [], "556": [], "278": [], "139": [], "418": [], "209": [], "628": [], "314": [], "continu": 8, "won": [], "restart": [], "top": [], "never": [], "build": [3, 4, 6, 8, 9], "conveni": [], "compact": [], "sublimin": [], "ingest": [], "egg": [], "outrun": [], "eagl": [], "first_lett": [], "complic": [], "antidisestablishmentarian": [], "would": 4, "word_length": [], "28": 4, "NOT": [], "genexpr": [], "0x1037da400": [], "blue": [], "death": [], "nine": [], "inch": [], "nail": [], "concert": [], "cnet": [], "someth": 8, "wrong": [], "our": [8, 9], "crash": [], "fail": [], "gracefulli": [], "accomplish": [], "basic": [], "this_variable_does_not_exist": [], "befor": [], "29": 4, "pass": [], "bad": [], "rais": [], "catch": [], "saw": [], "zerodivisionerror": [], "ok": [], "bunch": [], "ex": [], "caught": [], "messag": [], "without": [], "exactli": [], "made": 4, "sort": [], "final": [], "almost": [], "anywai": [], "purpos": [], "add_on": [], "isinst": [], "sorri": [], "must": [], "numer": [], "enter": 8, "weird": [], "much": [], "clearer": [], "user": [6, 8], "ideal": [], "customadditionerror": [], "reusabl": [], "accept": 8, "n_squar": [], "10000": [], "12345": [], "152399025": [], "output": 8, "cat_str": [], "str1": [], "str2": [], "said": [], "silly_sum": [], "sum": [], "wait": [], "mention": [], "touch": [], "termin": [7, 8], "999": [], "repeat_str": [], "md": [], "mdsmd": [], "mdsmdsmdsmdsmd": [], "carefulli": [], "chosen": [], "repeat": 9, "me": [], "reason": 8, "appear": [], "fact": [], "3rd": [], "confus": [], "terribl": [], "technic": [], "sum_and_product": [], "30": 4, "omit": [], "often": 5, "implicitli": [], "comma": [], "immedi": [], "separ": [], "As": 5, "asid": [], "convent": [], "_": [], "arg": [], "kwarg": [], "do_noth": [], "0x1037c2520": [], "evaluate_function_on_x_plus_1": [], "fun": 4, "36": 4, "happen": 2, "becom": [], "beenus": [], "until": 6, "now": [2, 4, 7, 8], "Or": [], "approach": 9, "ident": [], "aren": [], "appropri": [], "smaller": [], "otic": [], "Then": [], "At": [], "stand": 8, "yourself": [], "relev": [], "wikipedia": [], "articl": [], "task": [], "turn": [], "palindrom": [], "milad": [], "tiffani": [], "slice": [], "step": [8, 9], "mot": [], "names_backward": [], "miladdalim": [], "tommot": [], "tiffanyynaffit": [], "gross": [], "yucki": [], "slightli": [], "life": [], "easier": [], "make_palindrom": [], "okai": [], "names1": [], "names2": [], "appl": [], "orang": [], "banana": [], "appleelppa": [], "orangeegnaro": [], "bananaananab": [], "far": [], "choos": [5, 9], "context": [], "These": [], "decis": [], "ambigu": [], "ever": [], "twice": [], "person": [], "opinion": [], "prefer": [], "everi": 8, "built": [6, 8], "recal": [], "earlier": [], "full": [], "product": [], "fit": [], "0x1037d9a40": [], "recip": [], "ask": [], "gen": [], "exhaust": [], "longer": 4, "defeat": [], "yield": [], "motiv": [], "sai": [], "canada": [], "talk": 8, "bring": [], "right": 7, "wrap": [], "concaten": [], "revers": [], "view": 4, "cursor": [], "shortcut": [], "shift": 8, "tab": [0, 4, 9], "press": 8, "pep": [], "257": [], "exact": [], "import": [], "render": 6, "id": [7, 8], "pars": [], "rest": [], "googl": [], "function_nam": [], "param1": [], "param2": [], "param3": [], "paragraph": [], "algorithm": [], "mayb": [], "explan": 3, "scipi": [], "blah": [], "blahblahblah": [], "sound": [], "func": [], "bug": 9, "still": [], "enough": [], "vscode": [], "screenshot": 9, "ipynb": [5, 7], "regular": [], "flavor": [], "markedli": [], "slight": [], "commonmark": [], "sphinx": [], "ecosystem": 8, "overview": [], "power": [], "written": [], "markup": [], "serv": 4, "wherea": [], "span": [], "those": [], "being": 8, "inlin": 4, "cite": [], "bibtex": [], "holdgraf_evidence_2014": [], "hdhpk14": [], "moreov": [], "bibliographi": [], "properli": [], "bib": [], "christoph": [], "ramsai": [], "holdgraf": [], "wendi": [], "de": [], "heer": [], "brian": [], "paslei": [], "robert": [], "knight": [], "evid": [], "auditori": [], "cortex": [], "intern": [], "confer": [], "cognit": [], "neurosci": [], "brisban": [], "australia": [], "2014": [], "frontier": [], "starter": [], "jupyterbook": [], "org": [], "direct": 9, "2": [5, 6, 7], "4": 4, "kernel": [], "jupytext": [], "treat": 3, "command": [7, 8], "init": [], "markdownfil": [], "ipykernel_53982": [], "0x107759630": [], "0x10774dc60": [], "0x107759700": [], "workbook": 3, "system": 3, "lectur": 3, "tutori": [3, 8, 9], "dut": 3, "brightspac": 3, "independ": 3, "prepar": 3, "per": 3, "week": [4, 5], "ipykernel_55748": [], "0x105a064d0": [], "0x1059f20c0": [], "0x105a05b10": [], "tuturi": 8, "session": 8, "mostli": 8, "reli": 8, "numfocu": 8, "commun": [8, 9], "control": 8, "github": [7, 8, 9], "client": [7, 8], "subsect": 8, "configur": 8, "familiar": 8, "excel": [0, 8], "introduct": [3, 7, 8], "instal": [3, 6, 7, 9], "clone": [6, 7, 8], "repositori": [6, 7, 8, 9], "local": [7, 8], "brows": [7, 8], "webpag": 8, "click": [8, 9], "green": 8, "button": [8, 9], "desktop": 8, "simpli": 8, "past": 8, "url": 8, "menu": [4, 8], "bash": 8, "shell": 8, "avail": 8, "http": [4, 8], "floriscalkoen": 8, "underli": 8, "machin": 8, "By": [8, 9], "host": 8, "pull": 8, "reflect": 8, "yet": 8, "bottom": 8, "good": 8, "conflict": 8, "lightweight": 8, "mambaforg": 8, "found": 8, "conda": [6, 7, 8], "forg": 8, "download": 8, "miniforg": [7, 8], "On": 8, "binari": 8, "sure": 8, "stai": 8, "prompt": [7, 8], "search": 8, "issu": [6, 8], "firewal": 8, "troubl": 8, "temporarili": 8, "disabl": 8, "iterm": 8, "hotkei": 8, "cntrl": 8, "curl": 8, "latest": 8, "unam": 8, "sh": 8, "agreement": 8, "script": 8, "profil": [5, 8], "bashrc": 8, "zshrc": 8, "seper": [4, 8], "jupyterlab": [7, 8], "22": [4, 8], "navig": [8, 9], "cd": [4, 7, 8], "userprofil": 8, "yml": [6, 8], "lab": [7, 8], "env": [6, 7, 8], "encourag": [7, 9], "repo": 7, "backslash": 7, "activ": [4, 6, 7], "coastal": [6, 7], "browser": 7, "01_coastal_classif": 7, "upper": [5, 7], "corner": 7, "ipykernel": [6, 7], "analysi": 7, "process": 9, "free": [4, 9], "tracker": 9, "fellow": 9, "student": [3, 9], "probabl": 9, "troubleshoot": 3, "email": 9, "develop": 6, "fulli": 6, "_build": 6, "nb_conda_kernel": 6, "expos": 6, "kernelspec": 6, "executablebook": 0, "1348": [], "manual": 6, "kernselspec": 6, "recogn": 0, "contribut": [0, 3, 9], "cookiecutt": 0, "templat": 0, "ipykernel_56317": [], "0x103cbd630": [], "0x103cb1080": [], "0x103cbd700": [], "mamba": [6, 7], "question": [5, 9], "packag": 7, "manag": 7, "window": [4, 7], "initi": 5, "1_coastal_classif": 4, "bokeh": 4, "model": [4, 5], "pantool": 4, "wheelzoomtool": 4, "succesfulli": [], "THESE": [], "NO": [], "matplotlib": 5, "pyplot": 5, "plt": 5, "anim": 5, "interact_manu": [], "shuffl": 5, "uniform": 5, "coastal_dynam": 4, "pil": [], "io": 4, "load_quest": 4, "question_dir": 4, "question_fp": 4, "json": 4, "filenotfounderror": [], "href_or_fp": [], "storage_opt": [], "7": [4, 5], "fsspec": [], "lib": [], "python3": [], "openfil": [], "__enter__": [], "self": [], "98": [], "replac": [], "fs": [], "102": [], "fobject": [], "104": [], "compress": [], "spec": [], "1309": [], "abstractfilesystem": [], "block_siz": [], "cache_opt": [], "1307": [], "1308": [], "ac": [], "pop": 4, "autocommit": [], "_intran": [], "_open": [], "1310": [], "1311": [], "1312": [], "1313": [], "1314": [], "1315": [], "1316": [], "1317": [], "1318": [], "compr": [], "implement": [], "180": [], "localfilesystem": [], "178": [], "auto_mkdir": [], "179": [], "makedir": [], "_parent": [], "exist_ok": [], "localfileopen": [], "298": [], "296": [], "get_compress": [], "297": [], "blocksiz": [], "default_buffer_s": [], "303": [], "301": [], "302": [], "304": [], "305": [], "errno": [], "calkoen": [], "plate": 4, "align": 4, "center": 4, "opt": 4, "1000": 4, "height": 4, "500": 4, "wheel_zoom": 4, "plot_wher": 4, "show_earthquak": 4, "q1": 4, "questionfactori": 4, "q2": 4, "q3": 4, "q4": 4, "q5": 4, "upcom": 4, "australian": 4, "gold": 4, "q6": 4, "q7": 4, "q8": 4, "q9": 4, "lead": [4, 9], "correct": 4, "q10": 4, "q11": 4, "q12": 4, "q13": 4, "q14": 4, "14": 4, "q15": 4, "q16": 4, "q17": 4, "q18": 4, "18": 4, "q19": 4, "TO": [], "BE": [], "remov": [], "frame_height": 4, "pan": 4, "sattelit": 4, "show_coastal_system": 4, "q20": 4, "q21": 4, "q22": 4, "q23": 4, "q24": 4, "q25": 4, "q26": 4, "q27": 4, "q28": 4, "q29": 4, "q30": 4, "q31": 4, "q32": 4, "32": 4, "q33": 4, "q34": 4, "q35": 4, "35": 4, "q36": 4, "q37": 4, "37": 4, "q38": 4, "38": 4, "q39": 4, "39": 4, "q40": 4, "q41": 4, "q42": 4, "q43": 4, "43": 4, "q44": 4, "44": 4, "q45": 4, "45": 4, "q46": 4, "46": 4, "q47": 4, "47": 4, "discuss": [3, 4, 5, 9], "ipykernel_56478": [], "0x1042c2400": [], "0x1042b28e0": [], "0x1042c1a40": [], "interrel": 3, "physic": 3, "flow": 3, "transport": 3, "phenomena": 3, "morphodynam": 3, "wide": 3, "varieti": 3, "phenomenolog": 3, "theoret": 3, "civil": 3, "upon": 3, "bsc": 3, "basi": 3, "b1": 3, "ciem3210": 3, "acknowledg": 3, "ipykernel_57261": [], "0x1100bd630": [], "0x1100b28e0": [], "0x1100bd700": [], "ipykernel_57976": [], "0x106fc2400": [], "0x106fb2200": [], "0x106fc1a40": [], "311": [], "103": [], "105": [], "107": [], "1293": [], "1291": [], "1292": [], "1294": [], "1295": [], "1296": [], "1297": [], "1298": [], "1299": [], "1300": [], "1301": [], "1302": [], "184": [], "182": [], "183": [], "306": [], "309": [], "310": [], "312": [], "313": [], "worri": 9, "hurdl": 9, "collabor": 9, "learn": 9, "comprehens": 9, "strongli": 9, "share": 9, "seen": 9, "duplic": 9, "prevent": 9, "queri": 9, "benefit": 9, "resolut": 9, "improv": 9, "resourc": 9, "everyon": 9, "unfamiliar": 9, "guid": 9, "walk": 9, "involv": 9, "clear": 9, "concis": 9, "reproduc": 9, "outcom": 9, "applic": 9, "priorit": 9, "request": 9, "address": 9, "ensur": 9, "transpar": 9, "effici": 9, "advanc": 3, "backend": [], "141": [], "creationdispatch": [], "register_inplac": [], "decor": [], "wrapper": [], "140": [], "except": [], "529": [], "use_nullable_dtyp": [], "dtype_backend": [], "calculate_divis": [], "ignore_metadata_fil": [], "metadata_task_s": [], "split_row_group": [], "aggregate_fil": [], "parquet_file_extens": [], "filesystem": [], "527": [], "read_metadata_result": [], "read_metadata": [], "530": [], "531": [], "532": [], "533": [], "534": [], "535": [], "536": [], "gather_statist": [], "537": [], "538": [], "539": [], "540": [], "541": [], "542": [], "543": [], "544": [], "dataset_opt": [], "545": [], "read_opt": [], "546": [], "other_opt": [], "547": [], "549": [], "futur": [], "550": [], "dedic": [], "common_kwarg": [], "emb": [], "552": [], "553": [], "logic": [], "inted": [], "forward": [], "554": [], "compat": [], "arrow": [], "arrowdatasetengin": [], "cl": [], "stage": [], "dataset_info": [], "_collect_dataset_info": [], "548": [], "meta": [], "1051": [], "1050": [], "ds": [], "pa_d": [], "1052": [], "1053": [], "_wrapped_f": [], "1054": [], "_processed_dataset_kwarg": [], "1055": [], "1057": [], "file_frag": [], "extract": [], "physical_schema": [], "pyarrow": [], "785": [], "schema": [], "partit": [], "partition_base_dir": [], "exclude_invalid_fil": [], "ignore_prefix": [], "784": [], "_is_path_lik": [], "elem": [], "_filesystem_dataset": [], "786": [], "463": [], "selector_ignore_prefix": [], "462": [], "paths_or_selector": [], "_ensure_multiple_sourc": [], "464": [], "382": [], "381": [], "file_typ": [], "filetyp": [], "notfound": [], "info": [], "383": [], "143": [], "occur": [], "funcnam": [], "145": [], "method": [], "regist": [], "146": [], "147": [], "funcanim": 5, "ticker": 5, "multipleloc": 5, "hidden": [], "mario": [], "escofi": [], "curv": [], "outdat": [], "pip": [], "load": [], "vs": [], "visual": [], "atm": [], "watch": [], "utilti": [], "four": [], "ipw": [], "data": [], "correctli": [], "fig_dir": [], "1": 6, "pooch": 4, "log": 4, "bathymetr": 4, "contour": 4, "water": 4, "200m": 4, "proxi": 4, "continent": 4, "shelf": 4, "isobath_fp": 4, "retriev": 4, "coclico": 4, "blob": 4, "net": 4, "isobaths200": 4, "known_hash": 4, "2b25adb7d3923e3969f6fb0c1f53e5e5850acd3bf6a3468722f0a1434a395ae5": 4, "data200": 4, "isobath": 4, "therefor": 4, "coordin": 4, "mask": 4, "to_cr": 4, "epsg": 4, "3857": 4, "eblow": 4, "slider": 4, "400": 4, "plot_isobath": 4, "ye": 4, "timestamp": 4, "bath": 4, "line_width": 4, "line_color": 4, "white": 4, "line_dash": 4, "dash": 4, "dropdown": 4, "harmless": 4, "isobathymetri": 4, "greatli": 4, "increas": 4, "getlogg": 4, "setlevel": 4, "5_cross_shore_transport": 5, "layout": 5, "equillibrium": 5, "shorelin": 5, "shorefac": 5, "highli": 5, "subject": 5, "formul": 5, "deriv": 5, "somewhat": 5, "empir": 5, "previous": 5, "hardli": 5, "focu": 5, "episod": 5, "event": 5}, "objects": {}, "objtypes": {}, "objnames": {}, "titleterms": {"character": 4, "coastal": [3, 4], "system": 4, "import": [4, 5], "librari": 4, "we": 4, "us": 4, "our": 4, "analysi": 4, "exercis": [], "1": [3, 4, 8], "plate": [], "tecton": 4, "load": 4, "earthquak": 4, "data": 4, "visual": 4, "explor": 4, "question": 4, "2": [4, 8], "process": 4, "base": 4, "classif": 4, "The": 4, "content": [], "codebook": 3, "welcom": 3, "about": 3, "thi": 3, "book": 3, "assign": [], "chapter": [], "python": [], "basic": [], "learn": [], "object": [], "introduct": 1, "type": [], "common": [], "built": [], "numer": [], "arithmet": [], "oper": [], "none": [], "string": [], "boolean": [], "comparison": [], "cast": [], "3": 8, "list": [], "tupl": [], "index": [], "slice": [], "sequenc": [], "method": [], "set": [], "mutabl": [], "vs": [], "immut": [], "4": [], "format": [], "5": [], "dictionari": [], "6": [], "empti": [], "7": [], "condit": [], "inlin": [], "els": [], "truth": [], "valu": [], "test": [], "short": [], "circuit": [], "loop": [], "function": [], "while": [], "comprehens": [], "try": [], "except": [], "side": [], "effect": [], "local": [], "variabl": [], "null": [], "return": [], "option": [], "requir": [], "argument": [], "multipl": [], "arbitrari": [], "number": [], "anonym": [], "8": [], "dry": [], "principl": [], "design": [], "good": [], "9": [], "gener": [], "10": [], "docstr": [], "structur": 2, "hint": [], "markdown": [], "file": [], "what": [], "myst": [], "sampl": [], "role": [], "direct": [], "citat": [], "more": [], "notebook": 7, "an": 9, "exampl": [], "cell": [], "creat": [], "quickli": [], "add": [], "yaml": [], "metadata": [], "cours": 2, "usag": 3, "git": 8, "mamba": 8, "packag": [5, 8], "manag": 8, "window": 8, "unix": 8, "like": 8, "mac": 8, "linux": 8, "softwar": 8, "environ": 8, "run": 7, "tutori": 7, "build": [], "contributor": 0, "credit": 0, "week": 3, "contribut": 6, "advanc": 6, "instal": 8, "issu": 9, "part": 4, "2a": 4, "theori": 4, "2b": 4, "applic": 4, "acknowledg": 0, "get": 7, "start": 7, "troubleshoot": 9, "why": 9, "open": 9, "how": 9, "comment": [], "flori": [], "directori": [], "initi": [], "section": [], "first": 5, "some": 5, "necessari": 5, "cross": 5, "shore": 5, "sediment": 5, "transport": 5, "mode": 5, "net": 5, "secondari": 5, "flow": 5, "equillirium": 5, "state": 5, "beach": 5}, "envversion": {"sphinx.domains.c": 2, "sphinx.domains.changeset": 1, "sphinx.domains.citation": 1, "sphinx.domains.cpp": 6, "sphinx.domains.index": 1, "sphinx.domains.javascript": 2, "sphinx.domains.math": 2, "sphinx.domains.python": 3, "sphinx.domains.rst": 2, "sphinx.domains.std": 2, "sphinx.ext.intersphinx": 1, "sphinxcontrib.bibtex": 9, "sphinx": 56}}) \ No newline at end of file