From a4cd019f93b26db73a5342b7202d591aa5bea9b1 Mon Sep 17 00:00:00 2001 From: Christopher Polster Date: Fri, 14 Oct 2022 16:38:05 +0200 Subject: [PATCH] Add code changes from revision --- Makefile | 74 +++++-- README.md | 17 +- data/README.md | 1 + .../forecast-combination.tex | 10 +- scripts/common/plotting.py | 7 +- scripts/plot-evaluation.py | 4 + scripts/plot-idealized.py | 174 +++++++++++++++++ scripts/plot.py | 59 +++--- scripts/run-idealized.py | 184 ++++++++++++++++++ 9 files changed, 476 insertions(+), 54 deletions(-) create mode 100644 scripts/plot-idealized.py create mode 100644 scripts/run-idealized.py diff --git a/Makefile b/Makefile index 2072061..01f539b 100644 --- a/Makefile +++ b/Makefile @@ -2,11 +2,14 @@ NSAMPLES := 1000 # How many ensemble members in the sampled ensemble? -SAMPLE_SIZE := 75 +SAMPLE_SIZE := 100 # Threshold for the correlation distribution width below which datapoints are # considered to be statistically significant -SIGNIFICANCE_LEVEL := 0.1 +SIGNIFICANCE_LEVEL := 0.09 + +# How many forecast members per cluster? +NCLUSTER := 15 # Determine filename suffix for C-extensions to installed Python PYEXT := $(shell python3-config --extension-suffix) @@ -18,6 +21,7 @@ FIGURES := \ figures/event24-plume.pdf \ figures/event24-budget.pdf \ figures/event24-evaluation.pdf \ + figures/idealized.pdf \ figures/event24-maps+hovmoeller.pdf \ figures/event24-cluster+scatter.pdf \ figures/event24-maps-separate.pdf \ @@ -25,7 +29,7 @@ FIGURES := \ figures/event24-nh18fig4.pdf -.PHONY: all clean local +.PHONY: all clean all: $(FIGURES) @@ -38,6 +42,28 @@ clean: rm -f scripts/hn2016_falwa/*.so +# Simulations with the traffic jam model + +SIMULATIONS := \ + data/idealized-0.40-60-2.0.nc \ + data/idealized-0.40-55-2.0.nc \ + data/idealized-0.40-65-2.0.nc \ + data/idealized-0.40-60-1.8.nc \ + data/idealized-0.40-60-2.2.nc + +figures/idealized.pdf: $(SIMULATIONS) scripts/plot-idealized.py + python3 -m scripts.plot-idealized $(SIMULATIONS) --output $@ --data-output "data/idealized.json" + +data/idealized-%-60-2.0.nc: scripts/run-idealized.py + python3 -m scripts.run-idealized $@ --alpha="$*" --urefcg="60" --forcing-peak="-2.0" + +data/idealized-0.40-%-2.0.nc: scripts/run-idealized.py + python3 -m scripts.run-idealized $@ --alpha="0.40" --urefcg="$*" --forcing-peak="-2.0" + +data/idealized-0.40-60-%.nc: scripts/run-idealized.py + python3 -m scripts.run-idealized $@ --alpha="0.40" --urefcg="60" --forcing-peak="-$*" + + # Python scipts for data processing and plotting, C-extensions scripts/calculate-lwa.py: scripts/common/regrid.py scripts/hn2016_falwa/oopinterface.py @@ -86,7 +112,7 @@ scripts/hn2016_falwa/%$(PYEXT): scripts/hn2016_falwa/%.f90 # Write processed data to netcdf files: event24_ana := data/ERA-2016-12-10-to-2016-12-24-lwa-qg.nc -event24_ens := data/ENS-2016-12-10T00Z-lwa-qg.nc data/ENS-2016-12-10T12Z-lwa-qg.nc data/ENS-2016-12-11T00Z-lwa-qg.nc +event24_ens := data/ENS-2016-12-09T12Z-lwa-qg.nc data/ENS-2016-12-10T00Z-lwa-qg.nc data/ENS-2016-12-10T12Z-lwa-qg.nc data/ENS-2016-12-11T00Z-lwa-qg.nc event24_eval := data/EVAL-2016-12-18T00Z-lwa-qg.nc # Don't remove these intermediate files @@ -102,7 +128,7 @@ data/ENS-%-lwa-qg.nc: scripts/calculate-lwa.py data/IFS-ENS/ENS-%/ENS-*-[uvt].nc python3 -m scripts.calculate-lwa --half-resolution $(filter-out $<,$^) # Forecast evaluation data for 18 Dec 00 UTC -data/EVAL-2016-12-18T00Z-lwa-qg.nc: scripts/calculate-lwa.py data/IFS-ENS/EVAL/ENS-DEC18-EVAL-*.nc | data +data/EVAL-2016-12-18T00Z-lwa-qg.nc: scripts/calculate-lwa.py data/IFS-ENS/EVAL/ENS-DEC18-EVAL-*.nc python3 -m scripts.calculate-lwa --half-resolution --eval data/IFS-ENS/EVAL/ENS-DEC18-EVAL-*.nc @@ -114,40 +140,51 @@ figures/forecast-combination.pdf: figures/forecast-combination/forecast-combinat cd $(dir $<) && lualatex $(notdir $<) mv $(<:.tex=.pdf) $@ + # Figure 2: Reanalysis overview +figures/event24-reanalysis+nh18fig5.pdf: scripts/plot.py $(event24_ana) $(event24_ens) + python3 -m scripts.plot reanalysis+nh18fig5 2016-12-18T00,65,10,35,-30 $(event24_ens) \ + --reanalysis $(event24_ana) \ + --output $@ \ + --hovmoeller-extent "target" \ + --end 2016-12-24T00 + + # Figure 3: Ensemble target metric overview -# Figure 6: ESA maps and Hovmöller with full flux -# Figure 7: Cluster and scatter analysis of upstream region -figures/event24-%.pdf: scripts/plot.py $(event24_ana) $(event24_ens) +# Figure 7: ESA maps and Hovmöller with full flux +# Figure 8: Cluster and scatter analysis of upstream region (needs data from idealized experiment) +figures/event24-%.pdf: scripts/plot.py figures/idealized.pdf $(event24_ana) $(event24_ens) python3 -m scripts.plot $* 2016-12-18T00,65,10,35,-30 $(event24_ens) \ --reanalysis $(event24_ana) \ --output $@ \ --nsamples $(NSAMPLES) \ --sample-size $(SAMPLE_SIZE) \ --significance-level $(SIGNIFICANCE_LEVEL) \ + --ncluster $(NCLUSTER) \ --delta-hours "0,-24,-48,-72,-96" \ --source "f1+f2+f3" \ --scatter 2016-12-16T00,65,-90,35,-15 \ + --scatter-idealized "data/idealized.json" \ --hovmoeller-extent "target" \ - --end 2016-12-24T00 + --end 2016-12-21T00 # Figure 4: Integrated budget in 2.5 days prior to onset -figures/event24-budget.pdf: scripts/plot-budget.py $(event24_ana) | figures +figures/event24-budget.pdf: scripts/plot-budget.py $(event24_ana) python3 -m scripts.plot-budget $(event24_ana) 2016-12-15T12 2016-12-18T00,65,10,35,-30 \ --statistics-output "data/budget-statistics.json" \ --output $@ # Figure 5: Target forecast evaluation -figures/event24-evaluation.pdf: scripts/plot-evaluation.py $(event24_eval) $(event24_ana) | figures +figures/event24-evaluation.pdf: scripts/plot-evaluation.py $(event24_eval) $(event24_ana) python3 -m scripts.plot-evaluation 65,10,35,-30 $(event24_eval) \ --reanalysis $(event24_ana) \ - --highlight 2016-12-10T00 2016-12-10T12 2016-12-11T00 \ + --highlight 2016-12-09T12 2016-12-10T00 2016-12-10T12 2016-12-11T00 \ --output $@ -# Figure 8: ESA maps with flux terms +# Figure 9: ESA maps with flux terms figures/event24-maps-separate.pdf: figures/event24-maps-separate/event24-maps-separate.tex \ figures/event24-maps-separate/f1f3.pdf figures/event24-maps-separate/f2.pdf cd $(dir $<) && lualatex $(notdir $<) @@ -160,6 +197,7 @@ figures/event24-maps-separate/f1f3.pdf: scripts/plot.py $(event24_ana) $(event24 --nsamples $(NSAMPLES) \ --sample-size $(SAMPLE_SIZE) \ --significance-level $(SIGNIFICANCE_LEVEL) \ + --ncluster $(NCLUSTER) \ --delta-hours "24, 0,-24, -48" \ --source "f1+f3" \ --panel-letters "abcd" @@ -171,11 +209,13 @@ figures/event24-maps-separate/f2.pdf: scripts/plot.py $(event24_ana) $(event24_e --nsamples $(NSAMPLES) \ --sample-size $(SAMPLE_SIZE) \ --significance-level $(SIGNIFICANCE_LEVEL) \ + --ncluster $(NCLUSTER) \ --delta-hours "24, 0,-24, -48" \ --source "f2" \ --panel-letters "efgh" -# Figure 9: Cluster analysis of F2 in block + +# Figure 10: Cluster analysis of F2 in block figures/event24-cluster-f2.pdf: scripts/plot.py $(event24_ana) $(event24_ens) python3 -m scripts.plot cluster 2016-12-18T00,65,10,35,-30 $(event24_ens) \ --reanalysis $(event24_ana) \ @@ -183,9 +223,11 @@ figures/event24-cluster-f2.pdf: scripts/plot.py $(event24_ana) $(event24_ens) --nsamples $(NSAMPLES) \ --sample-size $(SAMPLE_SIZE) \ --significance-level $(SIGNIFICANCE_LEVEL) \ + --ncluster $(NCLUSTER) \ --source "f2" -# Figure 10: Flux-LWA relationship in block (similar to NH18's Fig. 4) + +# Figure 11: Flux-LWA relationship in block (similar to NH18's Fig. 4) figures/event24-nh18fig4.pdf: scripts/plot.py $(event24_ana) $(event24_ens) python3 -m scripts.plot nh18fig4 2016-12-18T00,65,10,35,-30 $(event24_ens) \ --reanalysis $(event24_ana) \ @@ -193,5 +235,5 @@ figures/event24-nh18fig4.pdf: scripts/plot.py $(event24_ana) $(event24_ens) --nsamples $(NSAMPLES) \ --sample-size $(SAMPLE_SIZE) \ --significance-level $(SIGNIFICANCE_LEVEL) \ + --ncluster $(NCLUSTER) \ --scatter 2016-12-18T00,50,-5,40,-15 - diff --git a/README.md b/README.md index 3a0d39b..fb0de09 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # Block-Dec2016-LWA-ESA -Code to produce all figures of the article **The Onset of a Blocked Flow Event as a "Traffic Jam": Characterization with Ensemble Sensitivity Analysis** (in revision at JAS). +Code to produce all figures of the article **The Onset of a Blocking Event as a "Traffic Jam": Characterization with Ensemble Sensitivity Analysis** (in revision at JAS). - Research by [Christopher Polster](https://dynmet.ipa.uni-mainz.de/christopher-polster/) and [Volkmar Wirth](https://dynmet.ipa.uni-mainz.de/volkmar-wirth/). - Software by Christopher Polster. @@ -23,6 +23,10 @@ Use the scripts and instructions in the [`data`](data) directory to obtain the r data + IFS-ENS + | + ENS-2016-12-09T12Z + | | + ENS-2016-12-09T12Z-t.nc + | | + ENS-2016-12-09T12Z-u.nc + | | + ENS-2016-12-09T12Z-v.nc | + ENS-2016-12-10T00Z | | + ENS-2016-12-10T00Z-t.nc | | + ENS-2016-12-10T00Z-u.nc @@ -78,11 +82,12 @@ Figures will appear in the `figures` directory: - Fig. 3: `figures/event24-plume.pdf` - Fig. 4: `figures/event24-budget.pdf` - Fig. 5: `figures/event24-evaluation.pdf` -- Fig. 6: `figures/event24-maps+hovmoeller.pdf` -- Fig. 7: `figures/event24-cluster+scatter.pdf` -- Fig. 8: `figures/event24-maps-separate.pdf` -- Fig. 9: `figures/event24-cluster-f2.pdf` -- Fig. 10: `figures/event24-nh18fig4.pdf`, does not contain the fits of [Nakamura and Huang (2018)](https://doi.org/10.1126/science.aat0721) due to licensing restrictions +- Fig. 6: `figures/idealized.pdf` +- Fig. 7: `figures/event24-maps+hovmoeller.pdf` +- Fig. 8: `figures/event24-cluster+scatter.pdf` +- Fig. 9: `figures/event24-maps-separate.pdf` +- Fig. 10: `figures/event24-cluster-f2.pdf` +- Fig. 11: `figures/event24-nh18fig4.pdf`, does not contain the fits of [Nakamura and Huang (2018)](https://doi.org/10.1126/science.aat0721) due to licensing restrictions Because the statistical significance test is based on a randomized procedure, results may vary slightly each time the plots are created. diff --git a/data/README.md b/data/README.md index 22d8046..7ef289a 100644 --- a/data/README.md +++ b/data/README.md @@ -4,6 +4,7 @@ IFS data can be downloaded from the ECMWF MARS archive. If you have the required credentials, log in to ecgate and use the `job.sh` script provided in the `IFS-ENS` folder to obtain the files + $ sbatch job.sh 2016-12-09 12 $ sbatch job.sh 2016-12-10 00 $ sbatch job.sh 2016-12-10 12 $ sbatch job.sh 2016-12-11 00 diff --git a/figures/forecast-combination/forecast-combination.tex b/figures/forecast-combination/forecast-combination.tex index 5a6037c..60a750d 100644 --- a/figures/forecast-combination/forecast-combination.tex +++ b/figures/forecast-combination/forecast-combination.tex @@ -6,12 +6,12 @@ \begin{document} \begin{tikzpicture}[xscale=0.45,yscale=0.5] - % No-data zones - \fill[black!15!white] (4,0) rectangle (22,4); + % Data zone + \fill[black!15!white] (4,0) rectangle (20,4); % Forecasts - \foreach \x in {1,2,3} { - \draw[thick,-latex] (\x-3,\x) -- (\x+22,\x); - \fill[black] (\x-3,\x) circle (0.15) node[anchor=east] {ENS\x\;}; + \foreach \x in {1,2,3,4} { + \draw[thick,-latex] (\x-4,\x) -- (\x+21,\x); + \fill[black] (\x-4,\x) circle (0.15) node[anchor=east] {ENS\x\;}; } % Last initial and target evaluation time \draw[thick,dashed] (0,0) -- (0,4); diff --git a/scripts/common/plotting.py b/scripts/common/plotting.py index 88deac3..97fb41d 100644 --- a/scripts/common/plotting.py +++ b/scripts/common/plotting.py @@ -64,6 +64,7 @@ def format_date_full(x): # Axes limits as a multiple of some value def mult_limit(values, mult=10, tol=0, minval=None, maxval=None): + values = np.asarray(values).flatten() # Find next multiple below minium ymin = int(min(values) / mult) * mult while min(values) < ymin: @@ -151,7 +152,7 @@ def plot_box(ax, box, **kwargs): # Plot presets -def plume(ax, x, ys, clusters, reanalysis=None, percentile=None, linewidth=1.0, **kwargs): +def plume(ax, x, ys, clusters, reanalysis=None, ncluster=None, linewidth=1.0, **kwargs): # Unpack clusters bot_cluster, not_cluster, top_cluster = clusters # Number of members in ensemble to determine line alpha @@ -164,8 +165,8 @@ def plume(ax, x, ys, clusters, reanalysis=None, percentile=None, linewidth=1.0, legend_handles = [line_not[0], line_top[0], line_bot[0]] legend_labels = [ "ENS member", - "Top" if percentile is None else "{}% top".format(percentile), - "Bottom" if percentile is None else "{}% bottom".format(percentile), + "Top" + ("" if ncluster is None else " {}".format(ncluster)), + "Bottom" + ("" if ncluster is None else " {}".format(ncluster)), ] # Add the reanalysis timeseries to the plot and legend if reanalysis is not None: diff --git a/scripts/plot-evaluation.py b/scripts/plot-evaluation.py index a51c42a..7308254 100644 --- a/scripts/plot-evaluation.py +++ b/scripts/plot-evaluation.py @@ -90,6 +90,8 @@ def parse_datehour(arg): half = len(fx) // 2 ax.plot(fx[:half], fy[:half], linewidth=0, color=plotting.esa_blue, marker="x", markersize=4, zorder=10) ax.plot(fx[half:], fy[half:], linewidth=0, color=plotting.esa_red, marker="x", markersize=4, zorder=10) + # Print some statistics about the inividual forecasts + print("INIT {:%Y-%m-%d %H}Z: {:6.2f} max {:6.2f} min".format(init, max(values), min(values))) # Add marker and horizontal line for reanalysis value if available if args.reanalysis is not None: @@ -97,6 +99,8 @@ def parse_datehour(arg): ax.plot([i], [target_ana], marker="*", markersize=10, color="#000", linewidth=1, zorder=99, label="Reanalysis") ax.axhline(target_ana, linewidth=1, color="#000", zorder=0) ax.legend(loc="center right", fontsize="small") + # Print the reanalysis target value too + print("REANALYSIS: {:6.2f}".format(target_ana.values)) # Translate x-ticks to dates and add nice labels ticks = np.arange(0, len(target_ens)+1, 4) diff --git a/scripts/plot-idealized.py b/scripts/plot-idealized.py new file mode 100644 index 0000000..e8e2d9a --- /dev/null +++ b/scripts/plot-idealized.py @@ -0,0 +1,174 @@ +import argparse +import json + +parser = argparse.ArgumentParser() +parser.add_argument("infiles", type=str, nargs="+") +parser.add_argument("--output", type=str, default=None) +parser.add_argument("--data-output", type=str, default=None) + +if __name__ == "__main__": + args = parser.parse_args() + + import numpy as np + import xarray as xr + import matplotlib.pyplot as plt + import matplotlib.lines as mlines + from .common import plotting, esa + + # Upstream region + up_d, up_s, up_e = -2, -90, -15 + # Blocking region + bl_d, bl_s, bl_e = 0, -30, 10 + + def prep_data(ds): + # 2° resolution is good enough for these plots + ds = ds.interp({ "longitude": np.arange(-104, 77, 2) }) + # The prescribed stationary wave activity leads to a stationary component + # of the transient LWA flux. Remove this stationary component by + # subtracting the stationary state after spin-up at the beginning of the + # simulation from the flux field, then visualize flux anomalies. + ds["flux*"] = ds["flux"] - ds["flux"].isel(time=0) + # Determine if member is jammed at block time (require more than 5x2°=10° above threshold) + ds["jammed"] = (ds["lwa"].sel({ "time": bl_d }) > ds["lwa_thresh"]).sum(dim="longitude") > 5 + return ds + + # Mean of the reference ensmeble + mean = xr.load_dataset(args.infiles[0]) + mean = prep_data(mean) + nens = mean["number"].size + mean = mean.mean(dim="number") + # Full ensemble including perturbed members + data = [] + labels = [] + for infile in args.infiles: + _ds = xr.load_dataset(infile) + if not labels: + labels.append("Reference") + ref_alpha = _ds.attrs["alpha"] + ref_urefcg = _ds.attrs["urefcg"] + ref_peak = _ds.attrs["forcing_peak"] + elif _ds.attrs["alpha"] != ref_alpha: + labels.append(r"$\alpha = {:.2f}$".format(_ds.attrs["alpha"])) + elif _ds.attrs["urefcg"] != ref_urefcg: + labels.append(r"$u_\mathrm{{ref}} + c_\mathrm{{g}} = {:.0f} \;\mathrm{{m}}\,\mathrm{{s}}^{{-1}}$".format(_ds.attrs["urefcg"])) + elif _ds.attrs["forcing_peak"] != ref_peak: + labels.append(r"$t_\mathrm{{up}} = {:.1f} \;\mathrm{{d}}$".format(_ds.attrs["forcing_peak"])) + else: + labels.append("???") + data.append(prep_data(_ds)) + data = xr.concat(data, dim="number") + # Aggregated data for the sensitivity analysis + upstream = data.sel({ "time": up_d, "longitude": slice(up_s, up_e) }).mean(dim="longitude") + blocked = data.sel({ "time": bl_d, "longitude": slice(bl_s, bl_e) }).mean(dim="longitude") + + fig = plt.figure(figsize=(9, 4.1)) + + lon = data["longitude"].values + time = data["time"].values + + _, ax_lwa = plotting.add_hov_axes(fig, (0.02, 0.225, 0.16, 0.71), lonlim=(-105, 75), latlim=(35, 65)) + _, ax_flx = plotting.add_hov_axes(fig, (0.20, 0.225, 0.16, 0.71), lonlim=(-105, 75), latlim=(35, 65)) + _, ax_esa = plotting.add_hov_axes(fig, (0.38, 0.225, 0.16, 0.71), lonlim=(-105, 75), latlim=(35, 65)) + ax_sct = fig.add_axes((0.67, 0.115, 0.31, 0.82)) + cx_lwa = fig.add_axes((0.02, 0.12, 0.16, 0.025)) + cx_flx = fig.add_axes((0.20, 0.12, 0.16, 0.025)) + cx_esa = fig.add_axes((0.38, 0.12, 0.16, 0.025)) + + ct_lwa = ax_lwa.contour(lon, time, mean["lwa"].values, levels=[50], colors="#000", linewidths=0.8) + plt.clabel(ct_lwa, fontsize="small") + cf_lwa = ax_lwa.contourf( + lon, time, mean["lwa"].values, + levels=[0, 10, 20, 30, 40, 50, 60, 70], + colors=plotting.lwa_colors, + extend="max" + ) + ax_lwa.plot([bl_s, bl_e], [bl_d, bl_d], color="#000", linewidth=2) + cb_lwa = fig.colorbar(cf_lwa, cax=cx_lwa, orientation="horizontal", spacing="proportional", extendfrac=0.1) + cb_lwa.set_ticks([0, 20, 40, 60]) + cb_lwa.set_label(r"$A_0 + \hat A$ [$\mathrm{m} \; \mathrm{s}^{-1}$]") + ax_lwa.set_yticklabels([]) + + ct_flx = ax_flx.contour(lon, time, mean["flux*"].values, levels=[360], colors="#000", linewidths=0.8) + plt.clabel(ct_flx, fontsize="small") + cf_flx = ax_flx.contourf( + lon, time, mean["flux*"].values, + levels=[120, 240, 360, 480], + colors=plotting.flux_colors[4:], + extend="both" + ) + ax_flx.plot([up_s, up_e], [up_d, up_d], color="#000", linewidth=2) + cb_flx = fig.colorbar(cf_flx, cax=cx_flx, orientation="horizontal", spacing="proportional", extendfrac=0.1) + cb_flx.set_ticks([120, 240, 360, 480]) + cb_flx.set_label(r"$\hat F$ anomaly [$\mathrm{m}^2 \; \mathrm{s}^{-2}$]") + ax_flx.set_yticklabels([]) + + ct_esa = ax_esa.contour(lon, time, mean["flux*"].values, levels=[360], colors="#000", linewidths=0.8) + plt.clabel(ct_esa, fontsize="small") + target = blocked["lwa"].values + source = data["flux"].values + cf_esa = ax_esa.contourf( + lon, time, esa.esa_corr(source, target), + levels=plotting.esa_levels, + colors=plotting.esa_colors, + extend="both" + ) + cb_esa = fig.colorbar(cf_esa, cax=cx_esa, orientation="horizontal", spacing="proportional", extendfrac=0.1) + cb_esa.set_ticks([-0.8, -0.4, 0.4, 0.8]) + cb_esa.set_label(r"Sensitivity to $\hatF$") + ax_esa.yaxis.set_major_formatter(r"${x:+.0f} \;\mathrm{{d}}$") + + _colors = ["#999", "#999", "#CCC", "#CCC"] + for i in range(data["number"].size // nens): + slc = slice(i*nens, (i+1)*nens) + src = upstream["flux*"].values[slc] + jam = data["jammed"].values[slc] + tgt = target[slc] + # Reference simulation + if i == 0: + kw = { "color": "#000", "zorder": 10 } + fc = np.where(jam, "#000", "#FFF") + ax_sct.scatter(src, tgt, s=25, marker="o", color="#000", facecolor=fc, zorder=11) + # Save values of src-tgt curve to file for use in other plots + curve = { "x": src.tolist(), "y": tgt.tolist(), "z": jam.tolist() } + if args.data_output is None: + print(json.dumps(curve, indent=4)) + else: + with open(args.data_output, "w") as f: + json.dump(curve, f, indent=4) + # Perturbation experiments + else: + kw = { + "color": _colors.pop(), + "zorder": 0, + "label": labels[i], + "linestyle": ("solid" if i % 2 == 0 else "dashed") + } + kw["zorder"] -= 1 + ax_sct.plot(src, tgt, linewidth=1.2, **kw) + + handles = [ + mlines.Line2D([], [], color="#000", label="Reference", markerfacecolor="#FFF"), + mlines.Line2D([], [], marker="o", color="#000", linewidth=0, label="< threshold", markerfacecolor="#FFF"), + mlines.Line2D([], [], marker="o", color="#000", linewidth=0, label="> threshold") + ] + leg1 = ax_sct.legend(handles=handles, loc="upper left", handlelength=1.2, fontsize="small") + leg2 = ax_sct.legend(loc="lower center", fontsize="small", handlelength=1.2, ncol=2) + ax_sct.add_artist(leg1) + + ax_sct.set_xlim((-50, 950)) + ax_sct.set_xlabel(r"agg. LWA Flux (${:.0f} \;\mathrm{{d}}$) [$\mathrm{{m}}^2 \; \mathrm{{s}}^{{-2}}$]".format(up_d)) + ax_sct.set_ylim((22, 85)) + ax_sct.set_ylabel(r"agg. LWA (${:.0f} \;\mathrm{{d}}$) [$\mathrm{{m}} \; \mathrm{{s}}^{{-1}}$]".format(bl_d)) + plotting.set_grid(ax_sct) + + ax_lwa.set_title("(a) LWA", loc="left") + ax_flx.set_title("(b) LWA Flux", loc="left") + ax_esa.set_title("(c) ESA (corr.)", loc="left") + ax_sct.set_title("(d) Sensitivity Scatter", loc="left") + + if args.output is None: + plt.show() + else: + fig.savefig(args.output, dpi=200) + + diff --git a/scripts/plot.py b/scripts/plot.py index c06c080..f61f952 100644 --- a/scripts/plot.py +++ b/scripts/plot.py @@ -3,6 +3,7 @@ import datetime as dt import operator import os +import json from functools import partial, reduce @@ -39,8 +40,8 @@ def parse_delta_hours(s): Point this at a reanalysis dataset to produce an additional panel with reanalysis LWA and PV as well as a reanalysis timeseries in the plume panel """) -parser.add_argument("--percentile", type=float, default=10, help=""" - Which percentiles to use for the cluster analysis +parser.add_argument("--ncluster", type=int, default=5, help=""" + How many members selected for the cluster analysis """) parser.add_argument("--nsamples", type=int, default=1000, help=""" Number of samples drawn for the bootstrap significance test @@ -64,6 +65,9 @@ def parse_delta_hours(s): parser.add_argument("--scatter", type=parse_target, default=None, help=""" valid,N,E,S,W for scatter plot source definition. """) +parser.add_argument("--scatter-idealized", type=verify_path, default=None, help=""" + Reference curve added to scatter plot (json format). +""") parser.add_argument("--hovmoeller-extent", type=str, default="target", help=""" The meridional extend used in the spatial reduction of data for the Hovmöller diagram. Specify as N,S or use the special keywords 'target' @@ -155,7 +159,7 @@ def set_panel_title(ax, title, **kwargs): # 3-panel plot with target plume and LWA/PV rank clusters if args.layout == "plume": fig = plt.figure(figsize=(9, 3.5)) - ax_plu = fig.add_axes( (0.06, 0.08, 0.46, 0.83)) + ax_plu = fig.add_axes( (0.06, 0.08, 0.45, 0.83)) ax_top = add_map_axes(fig, (0.58, 0.58, 0.35, 0.33), lonlim=(-105, 75)) ax_bot = add_map_axes(fig, (0.58, 0.08, 0.35, 0.33), lonlim=(-105, 75)) cx_lwa = fig.add_axes((0.925, 0.08, 0.01, 0.83)) # for the LWA colorbar @@ -275,7 +279,7 @@ def set_panel_title(ax, title, **kwargs): ax_sct = fig.add_axes((0.68, 0.10, 0.29, 0.46)) ax_top = add_map_axes(fig, (0.06, 0.58, 0.52, 0.30), noxlabels=True) ax_bot = add_map_axes(fig, (0.06, 0.21, 0.52, 0.30)) - cx_src = fig.add_axes((0.06, 0.10, 0.45, 0.023)) + cx_src = fig.add_axes((0.06, 0.10, 0.52, 0.023)) tx_src = fig.text(0.06, 0.98, "", ha="left", va="top", fontsize="x-large") # Scatter plot with corresponding map on top elif args.layout == "map+scatter": @@ -382,12 +386,10 @@ def _get_source(ds, name): # Compute rank-sorted indices of members for cluster selection target_ens_ranks = np.argsort(target_ens_values) - # How many members per cluster? - ncluster = int(target_ens_ranks.size * args.percentile / 100.) # Member indices for clusters - bot_cluster = target_ens_ranks[ : ncluster] - not_cluster = target_ens_ranks[ ncluster:-ncluster] - top_cluster = target_ens_ranks[-ncluster: ] + bot_cluster = target_ens_ranks[ : args.ncluster] + not_cluster = target_ens_ranks[ args.ncluster:-args.ncluster] + top_cluster = target_ens_ranks[-args.ncluster: ] # Same for reanalysis data if given if args.reanalysis is not None: @@ -509,7 +511,7 @@ def contours_esa(ax, y, x, src, tgt, cluster=None): target_ens.values, clusters=(bot_cluster, not_cluster, top_cluster), reanalysis=(None if args.reanalysis is None else target_ana.values), - percentile=args.percentile + ncluster=args.ncluster ) ax_plu.legend(*_legend, loc="upper left", fontsize="small") # Only use 00 times as time axis labels @@ -517,8 +519,7 @@ def contours_esa(ax, y, x, src, tgt, cluster=None): ax_plu.set_xticks(time_ticks[::2]) ax_plu.set_xticks(time_ticks, minor=True) ax_plu.xaxis.set_major_formatter(date_formatter) - ax_plu.set_ylim(mult_limit(target_ens_values, mult=10, tol=10, minval=0)) - # Configure plume plot + ax_plu.set_ylim(mult_limit(target_ens.values, mult=5, tol=5, minval=0)) ax_plu.set_xlim((min(target_ens.time.values), max(target_ens.time.values))) set_grid(ax_plu) ax_plu.set_ylabel(target_metric.label(lwa_attrs)) @@ -691,7 +692,7 @@ def contours_esa(ax, y, x, src, tgt, cluster=None): clusters=(bot_cluster, not_cluster, top_cluster), reanalysis=(None if args.reanalysis is None else _scatter_metric(source_ana.sel(_period)).values), linewidth=0.8, - percentile=args.percentile + ncluster=args.ncluster ) ax_src.legend(*_legend, loc="upper left", fontsize="x-small", handlelength=1.0) # Only use 00 times as time axis labels @@ -699,7 +700,7 @@ def contours_esa(ax, y, x, src, tgt, cluster=None): ax_src.set_xticks([t.values for t in source_ens_plume.time if t.dt.hour == 12], minor=True) ax_src.xaxis.set_major_formatter(date_formatter) ax_src.set_xlim((min(source_ens_plume.time.values), max(source_ens_plume.time.values))) - ax_src.set_ylim(mult_limit(source_ens_plume.values.flatten(), 50)) + ax_src.set_ylim(mult_limit(source_ens_plume.values[:-2,:], 50)) set_panel_title(ax_src, "Evolution of Agg. " + texify(source_ens.attrs["name"])) ax_src.set_ylabel("Agg. {source} [{unit}]".format( source=texify(source_ens.attrs["name"]), @@ -730,18 +731,26 @@ def contours_esa(ax, y, x, src, tgt, cluster=None): ) # Linear fit to ensemble reg = linregress(source_ens_values, target_ens_values) - xmin = min(source_ens_values) - 20 - xmax = max(source_ens_values) + 20 - ax_sct.plot( - [xmin, xmax], - [reg.intercept + reg.slope * xmin, reg.intercept + reg.slope * xmax], - color="#000000", - linewidth=1. - ) + # Idealized reference curve if data given + if args.scatter_idealized is not None: + with open(args.scatter_idealized, "r") as f: + idealized = { k: np.asarray(v) for k, v in json.load(f).items() } + _a = idealized["z"] # jammed + _b = ~idealized["z"] | np.roll(~idealized["z"], 1) # non-jammed + ax_sct.plot(idealized["x"][_a], idealized["y"][_a], color="#000000", linewidth=1.8, linestyle="solid", label="Idealized") + ax_sct.plot(idealized["x"][_b], idealized["y"][_b], color="#000000", linewidth=1.8, linestyle=(0, (2, 1))) + else: + xmin = min(source_ens_values) - 20 + xmax = max(source_ens_values) + 20 + ax_sct.plot( + [xmin, xmax], + [reg.intercept + reg.slope * xmin, reg.intercept + reg.slope * xmax], + color="#000000", + linewidth=1. + ) # Configure scatter panel ax_sct.legend(loc="upper left", fontsize="x-small", markerscale=0.7, handlelength=1.0) - _ymin, _ymax = mult_limit(target_ens_values, mult=5, minval=0) - ax_sct.set_ylim((_ymin, 1.10*_ymax - 0.10*_ymin)) + ax_sct.set_ylim(mult_limit(target_ens_values, mult=5, minval=0)) ax_sct.set_ylabel("Target ({}) [{}]".format( format_date_full(args.target.valid), texify(lwa_attrs["unit"]) @@ -806,6 +815,8 @@ def contours_esa(ax, y, x, src, tgt, cluster=None): ax_map.yaxis.tick_right() ax_map.set_yticks([]) ax_map.set_xticks([]) + # Print the correlation value for F1+F3 vs. cos(ϕ) + print("corr[cos(ϕ), F1+F3] = {:.2f}".format(linregress(_lwa_ens, _f1f3_ens).rvalue)) # LWA and flux Hovmöller panels of reanalysis # diff --git a/scripts/run-idealized.py b/scripts/run-idealized.py new file mode 100644 index 0000000..1a70ec4 --- /dev/null +++ b/scripts/run-idealized.py @@ -0,0 +1,184 @@ +import argparse +import datetime as dt +import numpy as np + +DAYS = 24 * 60 * 60 + +def roots_of_unity_upper(n): + """(2n)-th roots of unity (positive imaginary only)""" + return np.exp(1j * np.pi * (np.arange(n) + 0.5) / n) + + +class ETDRK4Diagonal: + """Exponential time differencing 4-th oder Runge-Kutta scheme + + Supports diagonal linear operators only. + + Based on the implementation of the Cox and Matthews (2002) scheme by Kassam + and Trefethen (2005) using contour integrals to evaluate the phi-functions. + """ + + def __init__(self, step, linear_diag, nonlinear, n_contour=16): + self.linear_diag = np.asarray(linear_diag) + self.nonlinear = nonlinear + # Number of quadrature nodes for contour integral + self._n_contour = n_contour + self.step = step + self._prepare() + + def _prepare(self): + h = self.step + L = self.linear_diag + A = h*L + self._E = np.exp( A) + self._E2 = np.exp(0.5*A) + # Contour integral quadrature nodes (along complex circle) + r = roots_of_unity_upper(self._n_contour) + # Translate nodes so circles are centered around the points of evaluation + LR = A[:,None] + r[None,:] + # Evaluate contour integrals + # For a, b, c coefficients: Q = (e^Lh/2 - I) / L + self._Q = h * np.real(np.mean((np.exp(LR/2) - 1) / LR, axis=1)) + # Alpha, beta, gamma coefficients + self._α = h * np.real(np.mean((-4 - LR + np.exp(LR)*( 4 - 3*LR + (LR**2))) / (LR**3), axis=1)) + self._β = h * np.real(np.mean(( 2 + LR + np.exp(LR)*(-2 + LR )) / (LR**3), axis=1)) + self._γ = h * np.real(np.mean((-4 - 3*LR - LR**2 + np.exp(LR)*( 4 - LR )) / (LR**3), axis=1)) + + def advance(self, u, t): + N = self.nonlinear + Q = self._Q + E = self._E + E2 = self._E2 + # Evaluate the nonlinear term + Nu = N(u, t) + # Coefficient a and associated nonlinear term + a = E2*u + Q*Nu + Na = N(a, t + self.step*0.5) + # Coefficient b and associated nonlinear term + b = E2*u + Q*Na + Nb = N(b, t + self.step*0.5) + # Coefficient c and associated nonlinear term + c = E2*a + Q*(2*Nb - Nu) + Nc = N(c, t + self.step) + # Combine + return E*u + Nu*self._α + 2*(Na+Nb)*self._β + Nc*self._γ + + def run(self, u, nsteps, t, keep_every=0): + n = 0 # step counter + us, ns = [u], [n] + while n <= nsteps: + u = self.advance(u, t) + t += self.step + n += 1 + if keep_every > 0 and n % keep_every == 0: + us.append(u) + ns.append(n) + # Zero-valued keep_every means no intermediate steps are kept, only + # initial conditions and end state + if keep_every <= 0: + us.append(u) + ns.append(n) + # Create time array from counted steps and return data numpyified + ts = np.asarray(ns) * self.step + ts += t - n*self.step + us = np.asarray(us) + return ts, us + + +def hann(x, x0, width): + x = np.asarray(x) + y = (x - x0) / width * np.pi + out = np.zeros(y.shape) + sup = (-0.5*np.pi <= y) & (y <= 0.5*np.pi) + out[sup] = np.cos(y[sup])**2 + return out + + +parser = argparse.ArgumentParser() +parser.add_argument("outfile", type=str) +parser.add_argument("--nens", type=int, default=25) +parser.add_argument("--alpha", type=float, default=0.4, help="wave-mean flow interaction parameter") +parser.add_argument("--urefcg", type=float, default=60., help="background state group velocity") +parser.add_argument("--forcing-peak", type=float, default=-2.) + +if __name__ == "__main__": + args = parser.parse_args() + + import xarray as xr + + N = 1024 # number of grid points + step = 300 # time step size [s] + + xmin = 0. + xmax = 28e6 + xdel = xmax - xmin + # Grid space coordinates + x = np.linspace(xmin, xmax, N, endpoint=False) + lon = x / xdel * 360 - 180 + # Spectral space wavenumbers + k = 2 * np.pi * np.arange(0, N//2+1, dtype=complex) / xdel + + tau = 10. * DAYS # relaxation time scale [s] + kappa = 3.26e5 # diffusion coefficient [m²/s] + linear = - 1. / tau - kappa * k**2 + + # Stationary LWA + A0 = 18 * hann(lon, 10., 150) + # ... + base_forcing = 1.825e-5 * np.ones_like(x) + + def flux(lwa): + return (args.urefcg - 2. * args.alpha * A0 - args.alpha * lwa) * lwa + + # Model with base forcing only for spin-up + def nonlinear(lwa_spectral, t): + lwa = np.fft.irfft(lwa_spectral) + fluxdiv = 1j * k * np.fft.rfft(flux(lwa)) + forcing = base_forcing + return np.fft.rfft(forcing) - fluxdiv + model = ETDRK4Diagonal(step, linear, nonlinear) + # Start with no transient LWA + init = np.fft.rfft(np.zeros_like(A0)) + # Run 200 days of spin-up to reach steady state + _, [_, init] = model.run(init, 200*DAYS//step, t=0, keep_every=0)#TODO + + lwa, flx = [], [] + # Run each ensemble members with a different forcing strength + strengths = np.linspace(0., 0.0009, args.nens) + for strength in strengths: + # Model with upstream forcing + def nonlinear(lwa_spectral, t): + lwa = np.fft.irfft(lwa_spectral) + fluxdiv = 1j * k * np.fft.rfft(flux(lwa)) + forcing = base_forcing + (strength * hann(t, args.forcing_peak*DAYS, 4.0*DAYS) * hann(lon, -75., 50.)) + return np.fft.rfft(forcing) - fluxdiv + model = ETDRK4Diagonal(step, linear, nonlinear) + # Run 8 days of actual simulation + t, A = model.run(init, 8*DAYS//step, t=-5*DAYS, keep_every=72) + # Add simulation data to ensemble + A = np.fft.irfft(A) + lwa.append(A + A0) + flx.append(flux(A)) + + data_vars = { + "lwa": (("number", "time", "longitude"), np.asarray(lwa, dtype=np.float32)), + "flux": (("number", "time", "longitude"), np.asarray(flx, dtype=np.float32)), + "lwa_thresh": (("longitude",), 0.5 * (args.urefcg - 2. * args.alpha * A0) / args.alpha) + } + coords = { + "number": np.arange(len(lwa)), + "time": t / DAYS, + "longitude": lon, + } + attrs = { + "nens": args.nens, + "urefcg": args.urefcg, + "alpha": args.alpha, + "forcing_peak": args.forcing_peak, + "step": step, + "tau": tau, + "kappa": kappa, + "created": dt.datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S UTC") + } + xr.Dataset(data_vars, coords, attrs).to_netcdf(args.outfile) +