diff --git a/.eggs/README.txt b/.eggs/README.txt new file mode 100644 index 0000000..5d01668 --- /dev/null +++ b/.eggs/README.txt @@ -0,0 +1,6 @@ +This directory contains eggs that were downloaded by setuptools to build, test, and run plug-ins. + +This directory caches those eggs to prevent repeated downloads. + +However, it is safe to delete this directory. + diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 0b8b031..1862435 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -9,7 +9,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.9", "3.10", "3.11"] + python-version: ["3.11"] steps: - name: Checkout repository and submodules uses: actions/checkout@v2 @@ -17,7 +17,7 @@ jobs: submodules: recursive - name: Install packages run: | - sudo apt-get -y install pandoc + sudo apt-get -y install pandoc metview - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v2 with: @@ -29,11 +29,13 @@ jobs: if [ -f requirements.txt ]; then pip install -r requirements.txt; fi if [ -f requirements_dev.txt ]; then pip install -r requirements_dev.txt; fi - name: Test with pytest ⚙️ - run: make test + run: + export METVIEW_PYTHON_START_TIMEOUT=20 + make test - name: Lint with flake8 ⚙️ run: make lint - if: matrix.python-version == 3.9 + if: matrix.python-version == 3.11 - name: Build docs 🏗️ run: make docs - if: matrix.python-version == 3.9 + if: matrix.python-version == 3.11 diff --git a/data/Unet_sevenAreas_fullStd_10lag_model.keras b/data/Unet_sevenAreas_fullStd_10lag_model.keras new file mode 100644 index 0000000..494a390 Binary files /dev/null and b/data/Unet_sevenAreas_fullStd_10lag_model.keras differ diff --git a/data/Unet_sevenAreas_fullStd_11lag_model.keras b/data/Unet_sevenAreas_fullStd_11lag_model.keras new file mode 100644 index 0000000..8275dd2 Binary files /dev/null and b/data/Unet_sevenAreas_fullStd_11lag_model.keras differ diff --git a/data/Unet_sevenAreas_fullStd_12lag_model.keras b/data/Unet_sevenAreas_fullStd_12lag_model.keras new file mode 100644 index 0000000..b00f94e Binary files /dev/null and b/data/Unet_sevenAreas_fullStd_12lag_model.keras differ diff --git a/data/Unet_sevenAreas_fullStd_13lag_model.keras b/data/Unet_sevenAreas_fullStd_13lag_model.keras new file mode 100644 index 0000000..347c3b1 Binary files /dev/null and b/data/Unet_sevenAreas_fullStd_13lag_model.keras differ diff --git a/data/Unet_sevenAreas_fullStd_1lag_model.keras b/data/Unet_sevenAreas_fullStd_1lag_model.keras new file mode 100644 index 0000000..1b0ea37 Binary files /dev/null and b/data/Unet_sevenAreas_fullStd_1lag_model.keras differ diff --git a/data/Unet_sevenAreas_fullStd_2lag_model.keras b/data/Unet_sevenAreas_fullStd_2lag_model.keras new file mode 100644 index 0000000..44ec8b5 Binary files /dev/null and b/data/Unet_sevenAreas_fullStd_2lag_model.keras differ diff --git a/data/Unet_sevenAreas_fullStd_3lag_model.keras b/data/Unet_sevenAreas_fullStd_3lag_model.keras new file mode 100644 index 0000000..88d4d72 Binary files /dev/null and b/data/Unet_sevenAreas_fullStd_3lag_model.keras differ diff --git a/data/Unet_sevenAreas_fullStd_4lag_model.keras b/data/Unet_sevenAreas_fullStd_4lag_model.keras new file mode 100644 index 0000000..3dbc5ea Binary files /dev/null and b/data/Unet_sevenAreas_fullStd_4lag_model.keras differ diff --git a/data/Unet_sevenAreas_fullStd_5lag_model.keras b/data/Unet_sevenAreas_fullStd_5lag_model.keras new file mode 100644 index 0000000..795686f Binary files /dev/null and b/data/Unet_sevenAreas_fullStd_5lag_model.keras differ diff --git a/data/Unet_sevenAreas_fullStd_6lag_model.keras b/data/Unet_sevenAreas_fullStd_6lag_model.keras new file mode 100644 index 0000000..f912067 Binary files /dev/null and b/data/Unet_sevenAreas_fullStd_6lag_model.keras differ diff --git a/data/Unet_sevenAreas_fullStd_7lag_model.keras b/data/Unet_sevenAreas_fullStd_7lag_model.keras new file mode 100644 index 0000000..bcd8c21 Binary files /dev/null and b/data/Unet_sevenAreas_fullStd_7lag_model.keras differ diff --git a/data/Unet_sevenAreas_fullStd_8lag_model.keras b/data/Unet_sevenAreas_fullStd_8lag_model.keras new file mode 100644 index 0000000..6e412f0 Binary files /dev/null and b/data/Unet_sevenAreas_fullStd_8lag_model.keras differ diff --git a/data/Unet_sevenAreas_fullStd_9lag_model.keras b/data/Unet_sevenAreas_fullStd_9lag_model.keras new file mode 100644 index 0000000..46dbd74 Binary files /dev/null and b/data/Unet_sevenAreas_fullStd_9lag_model.keras differ diff --git a/docs/source/processes.rst b/docs/source/processes.rst index f0b1600..86c56bd 100644 --- a/docs/source/processes.rst +++ b/docs/source/processes.rst @@ -9,7 +9,3 @@ Processes Say Hello --------- - -.. autoprocess:: shearwater.processes.wps_say_hello.SayHello - :docstring: - :skiplines: 1 diff --git a/environment.yml b/environment.yml index d236cd8..fd706d3 100644 --- a/environment.yml +++ b/environment.yml @@ -22,3 +22,5 @@ dependencies: # tests - pytest - pandoc +- metview +- metview-python \ No newline at end of file diff --git a/notebooks/first_test.ipynb b/notebooks/first_test.ipynb index 38ee2b0..43e4f60 100644 --- a/notebooks/first_test.ipynb +++ b/notebooks/first_test.ipynb @@ -10,7 +10,8 @@ "name": "stderr", "output_type": "stream", "text": [ - "pyproj not installed\n" + "2024-10-09 17:25:33.409107: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations.\n", + "To enable the following instructions: SSE4.1 SSE4.2 AVX AVX2 FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags.\n" ] } ], @@ -41,83 +42,99 @@ " Processes\n", " ---------\n", " \n", - " hello\n", - " Just says a friendly Hello.Returns a literal string output with Hello plus the inputed name.\n", - " \n", " cyclone\n", - " A process to forecast tropical cyclone activities.\n", + " A process to forecast tropical cyclone activity.\n", "\n" ] } ], "source": [ "url = \"http://localhost:5000/wps\"\n", + "# url = \"https://shearwater.copernicus-climate.eu/wps\"\n", + "\n", "wps = WPSClient(url, verify=False)\n", "help(wps)" ] }, { "cell_type": "code", - "execution_count": 3, - "id": "cafc00e2-76c9-403a-8a4f-8c930c5f955a", - "metadata": { - "tags": [] - }, + "execution_count": 4, + "id": "c5419eca-ac01-43ed-b54b-861f4232b69d", + "metadata": {}, + "outputs": [], + "source": [ + "# for day in range(1,30):\n", + "# print(day)\n", + "# if day<10:\n", + "# resp = wps.cyclone(init_date=f\"2022-01-0{day}\", leadtime=\"0-48 h\", region=\"Southern Indian\")\n", + "# else:\n", + "# resp = wps.cyclone(init_date=f\"2022-01-{day}\", leadtime=\"0-48 h\", region=\"Southern Indian\")\n", + "# print(resp)\n", + "# print(resp.get())\n", + "# " + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "fcc0024d-1374-4084-a24d-8d094508fa9a", + "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "\n" + "\n" ] }, { "data": { "text/plain": [ - "helloResponse(\n", - " output='Hello Pluto'\n", + "cycloneResponse(\n", + " output_csv='http://localhost:5000/outputs/bac8a806-8624-11ef-9aeb-080038c050cd/tcactivity_48_17_20240701_lag0_Sindian.csv',\n", + " output_png='http://localhost:5000/outputs/bac8a806-8624-11ef-9aeb-080038c050cd/tcactivity_48_17_20240701_lag0_Sindian.1.png'\n", ")" ] }, - "execution_count": 3, + "execution_count": 4, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "resp = wps.hello(name=\"Pluto\")\n", + "resp = wps.cyclone(init_date=\"2024-07-01\", leadtime=\"0-48 h\", region=\"North Atlantic\")\n", "print(resp)\n", "resp.get()" ] }, { "cell_type": "code", - "execution_count": 5, - "id": "c5419eca-ac01-43ed-b54b-861f4232b69d", + "execution_count": 3, + "id": "dc2b0678-6b9a-4c45-8152-30b293ae434d", "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "\n" + "\n" ] }, { - "data": { - "text/plain": [ - "cycloneResponse(\n", - " output_csv='../shearwater/data/prediction_Sindian.csv'\n", - ")" - ] - }, - "execution_count": 5, - "metadata": {}, - "output_type": "execute_result" + "ename": "ProcessFailed", + "evalue": "Sorry, process failed.", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mProcessFailed\u001b[0m Traceback (most recent call last)", + "Cell \u001b[0;32mIn[3], line 3\u001b[0m\n\u001b[1;32m 1\u001b[0m resp \u001b[38;5;241m=\u001b[39m wps\u001b[38;5;241m.\u001b[39mcyclone(init_date\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m2024-06-29\u001b[39m\u001b[38;5;124m\"\u001b[39m, leadtime\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m48-96 h\u001b[39m\u001b[38;5;124m\"\u001b[39m, region\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mNorth Atlantic\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m 2\u001b[0m \u001b[38;5;28mprint\u001b[39m(resp)\n\u001b[0;32m----> 3\u001b[0m \u001b[43mresp\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mget\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m/work/bk1318/b382633/conda/envs/shearwater/lib/python3.11/site-packages/birdy/client/outputs.py:40\u001b[0m, in \u001b[0;36mWPSResult.get\u001b[0;34m(self, asobj)\u001b[0m\n\u001b[1;32m 37\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m ProcessIsNotComplete(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mPlease wait ...\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m 38\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39misSucceded():\n\u001b[1;32m 39\u001b[0m \u001b[38;5;66;03m# TODO: add reason for failure\u001b[39;00m\n\u001b[0;32m---> 40\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m ProcessFailed(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mSorry, process failed.\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m 41\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_make_output(asobj)\n", + "\u001b[0;31mProcessFailed\u001b[0m: Sorry, process failed." + ] } ], "source": [ - "resp = wps.cyclone(start_day=\"2019-01-04\", end_day=\"2019-01-05\", area=\"Sindian\")\n", + "resp = wps.cyclone(init_date=\"2024-06-29\", leadtime=\"48-96 h\", region=\"North Atlantic\")\n", "print(resp)\n", "resp.get()" ] @@ -125,7 +142,7 @@ { "cell_type": "code", "execution_count": null, - "id": "c69328d3-9f92-4f42-a4e9-c07efa00d39c", + "id": "85251270-6efb-413b-892d-63372ad41e3a", "metadata": {}, "outputs": [], "source": [] @@ -133,9 +150,9 @@ ], "metadata": { "kernelspec": { - "display_name": "Python 3.9 (tensorflow)", + "display_name": "shearwater", "language": "python", - "name": "tensorflow" + "name": "shearwater" }, "language_info": { "codemirror_mode": { @@ -147,7 +164,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.13" + "version": "3.11.10" } }, "nbformat": 4, diff --git a/requirements.txt b/requirements.txt index 71da141..28f65bd 100644 --- a/requirements.txt +++ b/requirements.txt @@ -7,8 +7,9 @@ intake-esm pydantic<2 requests aiohttp -birdy tensorflow scipy ipykernel -numpy \ No newline at end of file +numpy +eccodes +metview \ No newline at end of file diff --git a/requirements_dev.txt b/requirements_dev.txt index 765d88b..59e3295 100644 --- a/requirements_dev.txt +++ b/requirements_dev.txt @@ -14,7 +14,6 @@ bump2version Click cruft # Changing dependencies above this comment will create merge conflicts when updating the cookiecutter template with cruft. Add extra requirements below this line. -birdy tensorflow scipy ipykernel diff --git a/shearwater/processes/wps_cyclone.py b/shearwater/processes/wps_cyclone.py index 808c114..1cbf8f1 100644 --- a/shearwater/processes/wps_cyclone.py +++ b/shearwater/processes/wps_cyclone.py @@ -1,69 +1,90 @@ from pywps import Process, LiteralInput, ComplexOutput from pywps.app.Common import Metadata -# from tensorflow.keras import models -# import pickle import numpy as np import numpy import pandas as pd -# from datetime import datetime import os -from pywps import FORMATS +from pywps import FORMATS, Format from pathlib import Path import urllib.request - -# import intake +import metview as mv import logging LOGGER = logging.getLogger("PYWPS") +FORMAT_PNG = Format("image/png", extension=".png", encoding="base64") + class Cyclone(Process): """A process to forecast tropical cyclone activities.""" def __init__(self): inputs = [ LiteralInput( - "start_day", - "Start Day", + "init_date", + "Initialisation date", data_type="string", - abstract="Enter the start date, like 2021-01-01", - default="2022-01-01" + abstract="Enter the initialisation date, like 2024-02-17", + default="2024-02-17" ), LiteralInput( - "end_day", - "End Day", + "leadtime", + "Lead time", data_type="string", - abstract="Enter the end date, like 2023-10-12", - default="2022-01-31" + abstract="Enter the lead time, like 0-48 h", + allowed_values=[ + "0-48 h", + "24-72 h", + "48-96 h", + "72-120 h", + "96-144 h", + "120-168 h", + "144-192 h", + "168-216 h", + "192-240 h", + "216-264 h", + "240-288 h", + "264-312 h", + "288-336 h", + "312-360 h", + ], + default="0-48 h" ), LiteralInput( - "area", - "Area", + "region", + "Region", data_type="string", abstract="Choose the region of your interest", allowed_values=[ - "Sindian", - "TBD" + "Southern Indian", + "North Atlantic", + "Northwest Pacific", + "Australia", + "Northern Indian", + "East Pacific", + "South Pacific", ], - default="Sindian", + default="Southern Indian", ) ] outputs = [ - # LiteralOutput('output', 'Cyclone activity forecast', - # abstract='netcdf', - # # keywords=['output', 'result', 'response'], - # data_type='string'), - ComplexOutput('output_csv', 'Cyclone activity forecast', + ComplexOutput('output_csv', + 'Cyclone activity forecast', abstract='csv file', as_reference=True, - # keywords=['output', 'result', 'response'], - supported_formats=[FORMATS.CSV],) + keywords=['output', 'result', 'response'], + supported_formats=[FORMATS.CSV],), + ComplexOutput("output_png", + "Cyclone activity forecast", + abstract="png file", + as_reference=True, + supported_formats=[FORMAT_PNG],), ] super(Cyclone, self).__init__( self._handler, identifier='cyclone', title='Cyclone', - abstract='A process to forecast tropical cyclone activities.', + abstract='A process to forecast tropical cyclone activity.', # keywords=['hello', 'demo'], metadata=[ Metadata('PyWPS', 'https://pywps.org/'), @@ -80,45 +101,101 @@ def _handler(self, request, response): # TODO: lazy load tensorflow ... issues with sphinx doc build from tensorflow.keras import models -# master_catalog = intake.open_catalog(["https://gitlab.dkrz.de/data-infrastructure-services/intake-esm/-/raw/master/esm-collections/cloud-access/dkrz_catalog.yaml"]) # noqa -# # master_catalog = intake.open_catalog('/pool/data/Catalogs/dkrz_catalog.yaml') -# era5_catalog = master_catalog['dkrz_era5_disk'] - -# query = { -# 'era_id': 'ET', -# 'level_type': 'surface', -# 'table_id': 128, -# # 'frequency':'hourly', -# 'code': 34, -# 'dataType': 'an', -# 'validation_date': '2023-06-27', -# } - -# my_catalog = era5_catalog.search(**query) -# # my_catalog.df - -# era_path = my_catalog.df['uri'].iloc[0] -# response.outputs['output'].data = f'netcdf {era_path}' -# response.outputs['output_csv'].data = 'csv ' + request.inputs['model'][0].data -# return response - - start_date = request.inputs['start_day'][0].data - end_date = request.inputs['end_day'][0].data - # area = request.inputs['area'][0].data - - # to be updated with data repository - data1 = pd.read_csv( - "https://github.com/climateintelligence/shearwater/raw/main/data/test_dailymeans_Sindian_1.zip") - # ("../shearwater/data/test_dailymeans_Sindian_1.zip") - data2 = pd.read_csv( - "https://github.com/climateintelligence/shearwater/raw/main/data/test_dailymeans_Sindian_2.zip") - # ("../shearwater/data/test_dailymeans_Sindian_2.zip") - data = pd.concat((data1, data2), ignore_index=True) - data = data.loc[(data.time >= start_date) & (data.time <= end_date)] - - variables = ['vo', 'r', 'u_200', 'u_850', 'v_200', 'v_850', 'tcwv', 'sst', 'shear'] - # with open("https://github.com/climateintelligence/shearwater/raw/main/data/full_statistics.pkl", 'rb') as f: - # means, stds = pickle.load(f) + init_date = request.inputs['init_date'][0].data + + leadtime = request.inputs['leadtime'][0].data + region = request.inputs['region'][0].data + + parameters = [ + [138, "vo", [850]], + [157, "r", [700]], + [131, "u", [200, 850]], + [132, "v", [200, 850]], + [34, "sst", [0]], + [137, "tcwv", [0]], + ] + reso = 2.5 + + regions_dict = { + "Southern Indian": [0, 20, -30, 90], # Southern Indian + "North Atlantic": [40, -90, 10, -20], # North Atlantic + "Northwest Pacific": [35, 100, 5, 170], # Northwest Pacific + "Australia": [0, 90, -30, 160], # Australia + "Northern Indian": [30, 30, 0, 100], # Northern Indian + "East Pacific": [30, -170, 0, -100], # East Pacific + "South Pacific": [0, 160, -30, 230], # South Pacific + } + + lags_dict = { + "0-48 h": 0, + "24-72 h": 1, + "48-96 h": 2, + "72-120 h": 3, + "96-144 h": 4, + "120-168 h": 5, + "144-192 h": 6, + "168-216 h": 7, + "192-240 h": 8, + "216-264 h": 9, + "240-288 h": 10, + "264-312 h": 11, + "288-336 h": 12, + "312-360 h": 13, + } + + region_bbox = regions_dict[region] + lag = lags_dict[leadtime] + + data = pd.DataFrame() + for param1 in parameters: + path = f'/pool/data/ERA5/E5/{"sf" if param1[2]==[0] else "pl"}/an/1D/{str(param1[0]).zfill(3)}' + fs1_param = mv.read( + f"{path}/E5{'sf' if param1[2]==[0] else 'pl'}00_1D_{init_date[:7]}_{str(param1[0]).zfill(3)}.grb" + ) + fs1_param = fs1_param.select( + date=init_date.replace("-", ""), level=param1[2] + ) + fs1_param_interp = mv.read( + data=fs1_param, + grid=[reso, reso], + area=region_bbox, + interpolation='"--interpolation=grid-box-average"', + ) + for level in param1[2]: + data.loc[ + :, + f"{param1[1]}{'_'+str(level) if (param1[1]=='u' or param1[1]=='v') else ''}", + ] = ( + fs1_param_interp.select(level=level) + .to_dataset() + .to_dataframe() + .reset_index(drop=True)[param1[1]] + ) + + data.loc[:, ["latitude", "longitude"]] = ( + fs1_param_interp.select(level=level) + .to_dataset() + .to_dataframe() + .reset_index()[["latitude", "longitude"]] + ) + data.loc[:, "time"] = init_date + data.loc[:, "shear"] = ( + (data.u_200 - data.u_850) ** 2 + (data.v_200 - data.v_850) ** 2 + ) ** 0.5 + data.loc[:, "sst"] = data.sst.fillna(0) + + variables = [ + "vo", + "r", + "u_200", + "u_850", + "v_200", + "v_850", + "tcwv", + "sst", + "shear", + ] + means, stds = pd.read_pickle( "https://github.com/climateintelligence/shearwater/raw/main/data/full_statistics.zip") @@ -145,25 +222,76 @@ def _handler(self, request, response): test_img_std = np.pad(test_img_std, ((0, 0), (1, 2), (1, 2), (0, 0)), 'constant') workdir = Path(self.workdir) - model_path = os.path.join(workdir, "Unet_sevenAreas_fullStd_0lag_model.keras") + model_path = os.path.join(workdir, f"Unet_sevenAreas_fullStd_{lag}lag_model.keras") + git_path = "https://github.com/climateintelligence/shearwater/raw/main" urllib.request.urlretrieve( - "https://github.com/climateintelligence/shearwater/raw/main/data/Unet_sevenAreas_fullStd_0lag_model.keras", - model_path # "Unet_sevenAreas_fullStd_0lag_model.keras" + f"{git_path}/data/Unet_sevenAreas_fullStd_{lag}lag_model.keras", + model_path ) - # model_trained = models.load_model( - # "https://github.com/climateintelligence/shearwater/raw/main/data/Unet_sevenAreas_fullStd_0lag_model.keras") - # ('../shearwater/data/Unet_sevenAreas_fullStd_0lag_model.keras') - model_trained = models.load_model(model_path) prediction = model_trained.predict(test_img_std) data = data[["latitude", "longitude", "time"]] - data['predictions_lag0'] = prediction.reshape(-1, 1) + data[f'predictions_lag{lag}'] = prediction.reshape(-1, 1) + + workdir = Path(self.workdir) + outfilename = os.path.join( + workdir, f'tcactivity_48_17_{init_date.replace("-","")}_lag{lag}_Sindian' + ) + + if True: + predscol = f"predictions_lag{lag}" + gpt = mv.create_geo( + latitudes=data["latitude"].values, + longitudes=data["longitude"].values, + values=data[predscol].values, + ).set_dates([pd.Timestamp(init_date)] * data.shape[0]) + fs = mv.geo_to_grib(geopoints=gpt, grid=[2.5, 2.5], tolerance=1.5) * 1e2 + + # cont_gen = mv.mcont( + # legend="on", + # contour_line_colour="avocado", + # contour_shade="on", + # contour_shade_technique="grid_shading", + # contour_shade_max_level_colour="red", + # contour_shade_min_level_colour="blue", + # contour_shade_colour_direction="clockwise", + # ) + # cont_tc = mv.mcont( + # legend="on", + # contour_line_colour="avocado", + # contour_shade="on", + # contour_max_level=105, + # contour_min_level=0, + # contour_shade_technique="grid_shading", + # contour_shade_max_level_colour="red", + # contour_shade_min_level_colour="blue", + # contour_shade_colour_direction="clockwise", + # ) + + cont_oper = mv.mcont( + contour_automatic_setting="style_name", + contour_style_name="prob_green2yellow", + legend="on", + ) + coastlines = mv.mcoast( + map_coastline_land_shade="on", map_coastline_land_shade_colour="grey" + ) + + gview = mv.geoview( + map_area_definition="corners", area=region_bbox, coastlines=coastlines + ) + legend = mv.mlegend( + legend_text_font_size=0.5, + ) + + mv.setoutput(mv.png_output(output_name=outfilename)) + mv.plot(gview, fs, cont_oper, legend) + response.outputs["output_png"].file = outfilename + ".1.png" - prediction_path = os.path.join(workdir, "prediction_Sindian.csv") - data.to_csv(prediction_path) + data.to_csv(outfilename + ".csv") + response.outputs["output_csv"].file = outfilename + ".csv" - response.outputs['output_csv'].file = prediction_path return response