diff --git a/README.md b/README.md index 399b890..b5aae73 100644 --- a/README.md +++ b/README.md @@ -16,14 +16,13 @@ cftime * xESMF ## Getting the code -``` -git clone https://github.com/jkrasting/mdtf_test_data.git -``` + +The mdtf_test_data package is available on [PyPI](https://pypi.org/project/mdtf-test-data/). ## Installation ``` -cd mdtf_test_data -pip install . +mkdir mdtf_test_data && cd mdtf_test_data +pip install mdtf-test-data ``` ## Usage @@ -31,7 +30,7 @@ This package can be used to generate fully-synthetic datasets based on NCAR's CESM2 and GFDL's CM4 model output that can be used to test the MDTF-Diagnostics package. ``` -usage: ./scripts/mdtf_synthetic.py [-h] [-c CONVENTION] [--startyear year] [--nyears years] +usage: mdtf_synthetic.py [-h] [-c CONVENTION] [--startyear year] [--nyears years] [--dlat latitude resolution in degrees] [--dlon longitude resolution in degrees] [--unittest] Required arguments: @@ -48,23 +47,24 @@ Optional arguments: To generate NCAR CESM output in a directory called `NCAR.Synthetic`: ``` -./scripts/mdtf_synthetic.py -c [ NCAR | CESM ] --nyears 7 +mdtf_synthetic.py -c [ NCAR | CESM ] --nyears 7 ``` Note that "NCAR" and "CESM" are both valid input values that generate output files appended with "NCAR_Synthetic". To generate GFDL CM4 output in a directory called `GFDL.Synthetic`: ``` -./scripts/mdtf_synthetic.py -c GFDL --nyears 10 +mdtf_synthetic.py -c GFDL --nyears 10 ``` To generate CMIP output in a directory called `CMIP.Synthetic`: ``` -./scripts/mdtf_synthetic.py -c CMIP --nyears 10 +mdtf_synthetic.py -c CMIP --nyears 10 ``` To coarsen an existing NetCDF file: ``` -mdtf_synthetic/util/mdtf-coarsen.py +git clone https://github.com/jkrasting/mdtf_test_data.git +cd mdtf_test_data usage: mdtf_synthetic/util/mdtf-coarsen.py [-h] [-r REGRID_METHOD] [-o OUTFILE] [-O] infile Coarsen a NetCDF file. diff --git a/mdtf_test_data/config/cmip_mon.yml b/mdtf_test_data/config/cmip_mon.yml new file mode 100755 index 0000000..aa3f278 --- /dev/null +++ b/mdtf_test_data/config/cmip_mon.yml @@ -0,0 +1,230 @@ +variables : + name : + - "areacello" + - "zos" + - "tauuo" + - "tauvo" + - "so" + - "thetao" + +areacello : + atts : + cell_methods : "area: sum" + comment : "TAREA" + coordinates : "lat lon" + description : "Cell areas for any grid used to report ocean variables and variables which are requested as used on the model ocean grid (e.g. hfsso, which is a downward heat flux from the atmosphere interpolated onto the ocean grid). These cell areas should be defined to enable exact calculation of global integrals (e.g., of vertical fluxes of energy at the surface and top of the atmosphere)." + frequency : "fx" + id : "areacello" + long_name : "Grid-Cell Area for Ocean Variables" + mipTable : "Ofx" + missing_value : 1.e+20 + out_name : "areacello" + prov : "Ofx ((isd.003))" + realm : "ocean" + standard_name : "cell_area" + time_label : "None" + time_title : "No temporal dimensions ... fixed field" + title : "Grid-Cell Area for Ocean Variables" + type : "real" + units : "m2" + variable_id : "areacello" + grid : "tripolar" + static : true + +zos : + atts : + cell_measures : 'area: areacello' + cell_methods : 'area: mean where sea time: mean' + comment : 'SSH' + coordinates : "lat lon" + description : 'This is the dynamic sea level, so should have zero global area mean. It should not include inverse barometer depressions from sea ice.' + frequency : 'mon' + id : 'zos' + long_name : 'Sea Surface Height Above Geoid' + mipTable : 'Omon' + out_name : 'zos' + prov : 'Omon ((isd.003))' + realm : 'ocean' + standard_name : 'sea_surface_height_above_geoid' + time : 'time' + time_label : 'time-mean' + time_title : 'Temporal mean' + title : 'Sea Surface Height Above Geoid' + type : 'real' + units : 'm' + variable_id : 'zos' + grid : "tripolar" + stats : + - [-0.09352626651525497, 0.7232627272605896] + +tauuo : + atts : + cell_measures : '--OPT' + cell_methods : 'time: mean' + comment : 'TAUX' + coordinates : "lat lon" + description : 'This is the stress on the liquid ocean from overlying atmosphere, sea ice, ice shelf, etc.' + frequency : 'mon' + id : 'tauuo' + long_name : 'Surface Downward X Stress' + mipTable : 'Omon' + out_name : 'tauuo' + positive : 'down' + prov : 'Omon ((isd.003))' + realm : 'ocean' + standard_name : 'surface_downward_x_stress' + time : 'time' + time_label : 'time-mean' + time_title : 'Temporal mean' + title : 'Surface Downward X Stress' + type : 'real' + units : 'N m-2' + variable_id : 'tauuo' + grid : "tripolar" + stats : + - [0.001569337327964604, 0.07642165571451187] + +tauvo : + atts : + cell_measures : '--OPT' + cell_methods : 'time: mean' + comment : 'TAUY' + coordinates : "lat lon" + description : 'This is the stress on the liquid ocean from overlying atmosphere, sea ice, ice shelf, etc.' + frequency : 'mon' + id : 'tauuo' + long_name : 'Surface Downward Y Stress' + mipTable : 'Omon' + out_name : 'tauvo' + positive : 'down' + prov : 'Omon ((isd.003))' + realm : 'ocean' + standard_name : 'surface_downward_y_stress' + time : 'time' + time_label : 'time-mean' + time_title : 'Temporal mean' + title : 'Surface Downward Y Stress' + type : 'real' + units : 'N m-2' + variable_id : 'tauuo' + grid : "tripolar" + stats : + - [0.0020488635636866093, 0.04154461994767189] + +so : + atts : + cell_measures : '--OPT' + cell_methods : 'time: mean' + coordinates : "lat lon" + description : 'sea water salinity' + frequency : 'mon' + id : 'so' + long_name : 'sea water salinity' + out_name : 'so' + positive : 'down' + realm : 'ocean' + standard_name : 'sea_water_salinity' + time : 'time' + time_label : 'time-mean' + time_title : 'Temporal mean' + title : 'Sea Water Salinity' + type : 'real' + units : 'psu' + variable_id : 'so' + grid : "tripolar" + stats : + - [ 37.091705, 37.169098 ] + - [ 37.090427, 37.16815 ] + - [ 37.086548, 37.163586 ] + - [ 37.054672, 37.13838 ] + - [ 36.948807, 37.03889 ] + - [ 36.872665, 36.96365 ] + - [ 36.825863, 36.92407 ] + - [ 36.793346, 36.899162 ] + - [ 36.767387, 36.878147 ] + - [ 36.744244, 36.85687 ] + - [ 36.721256, 36.8339 ] + - [ 36.698128, 36.80898 ] + - [ 36.673008, 36.782074 ] + - [ 36.644700, 36.753128 ] + - [ 36.61258, 36.72212 ] + - [ 36.576668, 36.689003 ] + - [ 36.537296, 36.65366 ] + - [ 36.495052, 36.615482 ] + - [ 36.45111, 36.574173 ] + - [ 36.406807, 36.529804 ] + - [ 36.36356, 36.483067 ] + - [ 36.322468, 36.43506 ] + - [ 36.28378, 36.387257 ] + - [ 36.247128, 36.340984 ] + - [ 36.21161, 36.297478 ] + - [ 36.176785, 36.257397 ] + - [ 36.14187, 36.22075 ] + - [ 36.106518, 36.186737 ] + - [ 36.07005, 36.154045 ] + - [ 36.03362, 36.121113 ] + - [ 36.000076, 36.087433 ] + - [ 35.974377, 36.05332 ] + - [ 35.96569, 36.020916 ] + - [ 35.982994, 35.994526 ] + - [ 36.03397, 35.97875 ] + +thetao: + atts: + cell_measures: '--OPT' + cell_methods: 'time: mean' + coordinates: "lat lon" + description: 'sea water potential temperature' + frequency: 'mon' + id: 'thetao' + long_name: 'sea water potential temperature' + out_name: 'thetao' + positive: 'down' + realm: 'ocean' + standard_name: 'sea_water_potential_temperature' + time: 'time' + time_label: 'time-mean' + time_title: 'Temporal mean' + title: 'Sea Water Potential Temperature' + type: 'real' + units: 'degC' + variable_id: 'thetao' + grid: "tripolar" + stats: + - [ 23.438206, 23.501827 ] + - [ 23.393858, 23.457973 ] + - [ 23.332743, 23.399124 ] + - [ 23.145956, 23.248714 ] + - [ 22.192282, 22.395288 ] + - [ 21.162468, 21.3618 ] + - [ 20.480297, 20.662258 ] + - [ 20.052425, 20.224604 ] + - [ 19.769545, 19.938387 ] + - [ 19.559475, 19.728683 ] + - [ 19.38311, 19.554249 ] + - [ 19.219982, 19.393152 ] + - [ 19.060669, 19.235872 ] + - [ 18.89744, 19.073635 ] + - [ 18.726213, 18.900887 ] + - [ 18.545187, 18.714998 ] + - [ 18.352053, 18.513506 ] + - [ 18.143354, 18.293566 ] + - [ 17.917408, 18.054794 ] + - [ 17.674921, 17.799715 ] + - [ 17.41897, 17.532667 ] + - [ 17.154121, 17.25874 ] + - [ 16.885664, 16.982744 ] + - [ 16.61834, 16.708748 ] + - [ 16.356245, 16.439966 ] + - [ 16.10178, 16.178211 ] + - [ 15.854486, 15.922733 ] + - [ 15.610342, 15.66957 ] + - [ 15.362126, 15.412205 ] + - [ 15.101099, 15.14296 ] + - [ 14.81898, 14.854497 ] + - [ 14.509788, 14.541605 ] + - [ 14.171214, 14.202383 ] + - [ 13.806399, 13.840008 ] + - [ 13.422476, 13.461085 ] + + diff --git a/mdtf_test_data/resources/ocean_static_5deg.nc b/mdtf_test_data/resources/ocean_static_5deg.nc new file mode 100644 index 0000000..3d05dc8 Binary files /dev/null and b/mdtf_test_data/resources/ocean_static_5deg.nc differ diff --git a/mdtf_test_data/synthetic/__init__.py b/mdtf_test_data/synthetic/__init__.py index 37f6993..2e732ed 100755 --- a/mdtf_test_data/synthetic/__init__.py +++ b/mdtf_test_data/synthetic/__init__.py @@ -8,3 +8,4 @@ from . import time from . import vertical +from . import horizontal diff --git a/mdtf_test_data/synthetic/horizontal/__init__.py b/mdtf_test_data/synthetic/horizontal/__init__.py new file mode 100644 index 0000000..04662f9 --- /dev/null +++ b/mdtf_test_data/synthetic/horizontal/__init__.py @@ -0,0 +1,2 @@ +from .construct_rect_grid import construct_rect_grid +from .construct_tripolar_grid import construct_tripolar_grid diff --git a/mdtf_test_data/synthetic/horizontal/construct_rect_grid.py b/mdtf_test_data/synthetic/horizontal/construct_rect_grid.py new file mode 100755 index 0000000..29649b8 --- /dev/null +++ b/mdtf_test_data/synthetic/horizontal/construct_rect_grid.py @@ -0,0 +1,145 @@ +""" Collection of tools to coarsen model data """ + +__all__ = ["construct_rect_grid"] + +import warnings + +import numpy as np +import xarray as xr + +try: + import xesmf as xe +except: + warnings.warn("Unable to load `xesmf`. Regridding functionality will not work.") + + +def construct_rect_grid(dlon, dlat, add_attrs=False, attr_fmt="ncar", bounds=False): + """Generate a rectilinear grid based on values of dx and dy + + Parameters + ---------- + dlon : float + Grid spacing in the x-dimension (longitude) + dlat : float + Grid spacing in the y-dimension (latitude) + add_attrs : bool, optional + Include lat and lon variable attributes, by default False + + Returns + ------- + xarray.Dataset + Empty shell dataset with lat and lon dimensions + """ + + if 180.0 % dlat != 0: + dlat = 180.0 / np.floor(180.0 / dlat) + warnings.warn( + f"180 degrees does not divide evenly by dlat. Adjusting dlat to {dlat}" + ) + + if 360.0 % dlon != 0: + dlon = 360.0 / np.floor(360.0 / dlon) + warnings.warn( + f"360 degrees does not divide evenly by dlon. Adjusting dlon to {dlon}" + ) + + lat = np.arange(-90.0 + (dlat / 2.0), 90.0, dlat) + lon = np.arange(0.0 + (dlon / 2.0), 360.0, dlon) + + dset = xr.Dataset({"lat": (["lat"], lat), "lon": (["lon"], lon)}) + + if bounds: + lat_bnds = np.arange(-90.0, 90.0 + (dlat / 2.0), dlat) + lon_bnds = np.arange(0.0, 360.0 + (dlon / 2.0), dlon) + + lat_bnds = np.array(list(zip(lat_bnds[0:-1], lat_bnds[1::]))) + lon_bnds = np.array(list(zip(lon_bnds[0:-1], lon_bnds[1::]))) + + bnds = np.array([0.0, 1.0]) + + dset["lat_bnds"] = xr.DataArray(lat_bnds, coords=(dset.lat, ("bnds", bnds))) + dset["lon_bnds"] = xr.DataArray(lon_bnds, coords=(dset.lon, ("bnds", bnds))) + + dset = dset.drop("bnds") + + if attr_fmt == "ncar": + dset["lat"].attrs = ( + {"long_name": "latitude", "units": "degrees_north"} if add_attrs else {} + ) + dset["lon"].attrs = ( + {"long_name": "longitude", "units": "degrees_east"} if add_attrs else {} + ) + + elif attr_fmt == "gfdl": + dset["lat"].attrs = ( + {"long_name": "latitude", "units": "degrees_N", "cartesian_axis": "Y"} + if add_attrs + else {} + ) + dset["lon"].attrs = ( + {"long_name": "longitude", "units": "degrees_E", "cartesian_axis": "X"} + if add_attrs + else {} + ) + + if bounds: + dset["lat"].attrs["bounds"] = "lat_bnds" + dset["lon"].attrs["bounds"] = "lon_bnds" + + dset["lat_bnds"].attrs = ( + {"long_name": "latitude bounds", "cartesian_axis": "Y"} + if add_attrs + else {} + ) + dset["lon_bnds"].attrs = ( + {"long_name": "longitude bounds", "cartesian_axis": "X"} + if add_attrs + else {} + ) + + elif attr_fmt == "cmip": + dset["lat"].attrs = ( + { + "long_name": "latitude", + "units": "degrees_north", + "axis": "Y", + "standard_name": "latitude", + "cell_methods": "time: point", + } + if add_attrs + else {} + ) + dset["lon"].attrs = ( + { + "long_name": "longitude", + "units": "degrees_east", + "axis": "X", + "standard_name": "longitude", + "cell_methods": "time: point", + } + if add_attrs + else {} + ) + + if bounds: + dset["bnds"] = xr.DataArray(bnds, dims={"bnds": bnds}) + dset["bnds"].attrs["long_name"] = "vertex number" + + dset["lat"].attrs["bounds"] = "lat_bnds" + dset["lon"].attrs["bounds"] = "lon_bnds" + + dset["lat_bnds"].attrs = ( + {"long_name": "latitude bounds", "axis": "Y", "units": "degrees_north"} + if add_attrs + else {} + ) + dset["lon_bnds"].attrs = ( + {"long_name": "longitude bounds", "axis": "X", "units": "degrees_east"} + if add_attrs + else {} + ) + + else: + raise ValueError("Unknown model attribute format") + + return dset diff --git a/mdtf_test_data/synthetic/horizontal/construct_tripolar_grid.py b/mdtf_test_data/synthetic/horizontal/construct_tripolar_grid.py new file mode 100755 index 0000000..ea5a956 --- /dev/null +++ b/mdtf_test_data/synthetic/horizontal/construct_tripolar_grid.py @@ -0,0 +1,122 @@ +""" Collection of tools to coarsen model data """ + +__all__ = ["construct_tripolar_grid"] + +import warnings + +import numpy as np +import xarray as xr +import pkg_resources as pkgr + +try: + import xesmf as xe +except: + warnings.warn("Unable to load `xesmf`. Regridding functionality will not work.") + + +def construct_tripolar_grid( + point_type="t", add_attrs=False, attr_fmt="gfdl", retain_coords=False +): + """Generate a tripolar grid based on a real 5-degree MOM6 configuration + + Parameters + ---------- + point_type : str, optional + Grid type. Options are t, u, v, c. By default "t" (tracer) + add_attrs : bool, optional + Include lat and lon variable attributes, by default False + attr_fmt : str, optional + Modeling center attribute format, by default "gfdl" + retain_coords : bool, optional + Keep geolon, geolat, and wet in the dataset, by default False + + Returns + ------- + xarray.Dataset + Shell dataset with masked variable and ocean depth field + """ + + ds_in = pkgr.resource_filename("mdtf_test_data", "resources/ocean_static_5deg.nc") + ds_in = xr.open_dataset(ds_in) + + if point_type == "t": + lat = ds_in["geolat"] + lon = ds_in["geolon"] + wet = ds_in["wet"] + elif point_type == "u": + lat = ds_in["geolat_u"] + lon = ds_in["geolon_u"] + wet = ds_in["wet_u"] + elif point_type == "v": + lat = ds_in["geolat_v"] + lon = ds_in["geolon_v"] + wet = ds_in["wet_v"] + elif point_type == "c": + lat = ds_in["geolat_c"] + lon = ds_in["geolon_c"] + wet = ds_in["wet_c"] + + dset = xr.Dataset() + dset["mask"] = xr.where(wet == 0.0, np.nan, 1.0) + + if point_type == "t": + dset["depth"] = xr.DataArray(ds_in["depth_ocean"]) + + if retain_coords is True: + dset[lat.name] = lat + dset[lon.name] = lon + dset[wet.name] = wet + + if add_attrs is True: + if attr_fmt == "gfdl": + dset[lat.name].attrs = {} + dset[lon.name].attrs = {} + dset[wet.name].attrs = {} + elif attr_fmt == "ncar": + dset[lat.name].attrs = { + "axis": "Y", + "standard_name": "latitude", + "title": "Latitude", + "type": "double", + "units": "degrees_north", + "valid_max": 90.0, + "valid_min": -90.0, + } + dset[lon.name].attrs = { + "axis": "X", + "standard_name": "longitude", + "title": "Longitude", + "type": "double", + "units": "degrees_east", + "valid_max": 360.0, + "valid_min": 0.0, + } + dset[wet.name].attrs = {} + else: + raise ValueError("Unknown attribute format") + + else: + dset[lat.name].attrs = {} + dset[lon.name].attrs = {} + dset[wet.name].attrs = {} + + if attr_fmt == "ncar": + dset = dset.rename({"xh": "nlon", "yh": "nlat"}) + + lat_range = np.array(np.arange(1, len(dset["nlat"]) + 1, dtype=np.intc)) + dset["nlat"] = xr.DataArray(lat_range, dims=("nlat")) + dset["nlat"].attrs = { + "long_name": "cell index along second dimension", + "units": "1", + } + + lon_range = np.array(np.arange(1, len(dset["nlon"]) + 1, dtype=np.intc)) + dset["nlon"] = xr.DataArray(lon_range, dims=("nlon")) + dset["nlon"].attrs = { + "long_name": "cell index along first dimension", + "units": "1", + } + + dset = dset.rename({lat.name: "lat", lon.name: "lon"}) + + return dset diff --git a/mdtf_test_data/synthetic/synthetic_data.py b/mdtf_test_data/synthetic/synthetic_data.py index 93c7117..6f99bad 100755 --- a/mdtf_test_data/synthetic/synthetic_data.py +++ b/mdtf_test_data/synthetic/synthetic_data.py @@ -11,7 +11,8 @@ import cftime import xarray as xr import numpy as np -from mdtf_test_data.util.rectilinear import construct_rect_grid +from mdtf_test_data.synthetic.horizontal import construct_rect_grid +from mdtf_test_data.synthetic.horizontal import construct_tripolar_grid import mdtf_test_data.generators as generators from mdtf_test_data.synthetic.time import generate_monthly_time_axis @@ -21,6 +22,7 @@ from mdtf_test_data.synthetic.vertical import gfdl_plev19_vertical_coord from mdtf_test_data.synthetic.vertical import gfdl_vertical_coord from mdtf_test_data.synthetic.vertical import ncar_hybrid_coord +from mdtf_test_data.synthetic.vertical import mom6_z_coord def dataset_stats(filename, var=None, limit=None): @@ -68,6 +70,9 @@ def generate_synthetic_dataset( generator="normal", generator_kwargs=None, stats=None, + static=False, + data=None, + grid="standard", ): """Generates xarray dataset of syntheic data in NCAR format @@ -91,6 +96,11 @@ def generate_synthetic_dataset( Variable attributes, by default None stats : tuple or list of tuples Array statistics in the format of [(mean,stddev)] + static : bool + Flag denoting if variable is static + grid : str + Type of output grid, either "standard" or "tripolar", + by default "standard" Returns ------- @@ -100,45 +110,68 @@ def generate_synthetic_dataset( attrs = {} if attrs is None else attrs + # some logical control flags do_bounds = True if fmt == "cmip" else False - dset = construct_rect_grid( - dlon, dlat, add_attrs=True, attr_fmt=fmt, bounds=do_bounds - ) - lat = dset.lat - lon = dset.lon - xyshape = (len(dset["lat"]), len(dset["lon"])) - - if timeres == "mon": - ds_time = generate_monthly_time_axis(startyear, nyears, timefmt=fmt) - elif timeres == "day": - ds_time = generate_daily_time_axis(startyear, nyears, timefmt=fmt) - elif timeres == "3hr": - ds_time = generate_hourly_time_axis(startyear, nyears, 3, timefmt=fmt) - elif timeres == "1hr": - ds_time = generate_hourly_time_axis(startyear, nyears, 1, timefmt=fmt) + # Step 1: set up the horizontal grid + if grid == "tripolar": + dset = construct_tripolar_grid( + attr_fmt="ncar", retain_coords=True, add_attrs=True + ) + xyshape = dset["mask"].shape + lat = dset.nlat + lon = dset.nlon else: - print(timeres) - raise ValueError("Unknown time resolution requested") - - dset = ds_time.merge(dset) - time = dset["time"] + dset = construct_rect_grid( + dlon, dlat, add_attrs=True, attr_fmt=fmt, bounds=do_bounds + ) + lat = dset.lat + lon = dset.lon + xyshape = (len(dset["lat"]), len(dset["lon"])) + + # Step 2: set up the time axis + if static is False: + if timeres == "mon": + ds_time = generate_monthly_time_axis(startyear, nyears, timefmt=fmt) + elif timeres == "day": + ds_time = generate_daily_time_axis(startyear, nyears, timefmt=fmt) + elif timeres == "3hr": + ds_time = generate_hourly_time_axis(startyear, nyears, 3, timefmt=fmt) + elif timeres == "1hr": + ds_time = generate_hourly_time_axis(startyear, nyears, 1, timefmt=fmt) + else: + print(timeres) + raise ValueError("Unknown time resolution requested") - generator_kwargs = {} if generator_kwargs is None else generator_kwargs + dset = ds_time.merge(dset) + time = dset["time"] + ntimes = len(time) + else: + ntimes = 1 + # Step 3: generate the vertical coordinate if stats is not None: stats = [stats] if not isinstance(stats, list) else stats if len(stats) > 1: if fmt == "ncar": dset = dset.merge(ncar_hybrid_coord()) lev = dset.lev - elif fmt == "gfdl": + elif fmt == "gfdl" : if len(stats) == 19: dset = dset.merge(gfdl_plev19_vertical_coord()) lev = dset.plev19 else: dset = dset.merge(gfdl_vertical_coord()) lev = dset.pfull + elif fmt == "cmip" and grid == "tripolar": + dset = dset.merge(mom6_z_coord()) + lev = dset.lev + assert len(stats) == len(lev),\ + f' Length of stats {data.shape[1]} must match number of levels {len(lev)}.' + + # Step 4: define the synthetic data generator kernel + generator_kwargs = {} if generator_kwargs is None else generator_kwargs + if stats is not None: generator_kwargs["stats"] = stats assert generator in list( @@ -146,16 +179,40 @@ def generate_synthetic_dataset( ), f"Unknown generator method: {generator}" generator = generators.__dict__[generator] - data = generators.generate_random_array( - xyshape, len(time), generator=generator, generator_kwargs=generator_kwargs + # Step 5: generate the synthetic data array + data = ( + generators.generate_random_array( + xyshape, ntimes, generator=generator, generator_kwargs=generator_kwargs + ) + if data is None + else data ) data = data.squeeze() - if len(data.shape) == 4: - assert data.shape[1] == len(lev), "Length of stats must match number of levels" - dset[varname] = xr.DataArray(data, coords=(time, lev, lat, lon), attrs=attrs) + # Step 6: convert to Xarray DataArray by assigning coords + mask = dset["mask"].values if "mask" in dset.variables else 1.0 + data = np.array(data * mask, dtype=np.float32) + + if static is True: + if len(data.shape) == 4: + assert data.shape[1] == len( + lev + ), f' Length of stats {data.shape[1]} must match number of levels {len(lev)}.' + dset[varname] = xr.DataArray(data, coords=(lev, lat, lon), attrs=attrs) + else: + dset[varname] = xr.DataArray(data, coords=(lat, lon), attrs=attrs) else: - dset[varname] = xr.DataArray(data, coords=(time, lat, lon), attrs=attrs) + if len(data.shape) == 4: + print(varname) + assert data.shape[1] == len( + lev + ), f' Length of stats {data.shape[1]} must match number of levels {len(lev)}.' + dset[varname] = xr.DataArray( + data, coords=(time, lev, lat, lon), attrs=attrs + ) + else: + dset[varname] = xr.DataArray(data, coords=(time, lat, lon), attrs=attrs) + dset.set_coords(("lat", "lon")) if coords is not None: dset[coords["name"]] = xr.DataArray(coords["value"], attrs=coords["atts"]) @@ -164,7 +221,8 @@ def generate_synthetic_dataset( dset.attrs["convention"] = fmt if fmt == "cmip": - dset["bnds"].attrs = {"long_name": "vertex number"} + if "bnds" in dset.variables: + dset["bnds"].attrs = {"long_name": "vertex number"} cmip_global_atts = [ "external_variables", "history", @@ -215,6 +273,10 @@ def generate_synthetic_dataset( cmip_global_atts = {x: "" for x in cmip_global_atts} dset.attrs = {**dset.attrs, **cmip_global_atts} + # remove unused fields + if grid == "tripolar": + dset = dset.drop(["mask", "wet", "depth"]) + return dset diff --git a/mdtf_test_data/synthetic/synthetic_setup.py b/mdtf_test_data/synthetic/synthetic_setup.py index 1693c0f..8512b50 100755 --- a/mdtf_test_data/synthetic/synthetic_setup.py +++ b/mdtf_test_data/synthetic/synthetic_setup.py @@ -1,5 +1,9 @@ #!/usr/bin/env python +import warnings +import xarray as xr +import pkg_resources as pkgr + __all__ = ["create_output_dirs", "synthetic_main"] """ Script to generate synthetic GFDL CM4 output """ import os @@ -7,11 +11,8 @@ from .synthetic_data import write_to_netcdf -def generate_date_string(STARTYEAR=1, - NYEARS=1, - TIME_RES="" - ): - """ formulate the date string in the file name""" +def generate_date_string(STARTYEAR=1, NYEARS=1, TIME_RES=""): + """formulate the date string in the file name""" date_string = ( str(STARTYEAR).zfill(4), str(STARTYEAR + NYEARS - 1).zfill(4), @@ -20,36 +21,36 @@ def generate_date_string(STARTYEAR=1, date_string = (date_string[0] + "01", date_string[1] + "12") elif TIME_RES == "day": date_string = (date_string[0] + "0101", date_string[1] + "1231") - date_string = ("-").join(list(date_string)) + date_string = ("-").join(list(date_string)) return date_string -def create_output_dirs(CASENAME="", - STARTYEAR=1, - NYEARS=10, - TIME_RES="day" - ): + +def create_output_dirs(CASENAME="", STARTYEAR=1, NYEARS=10, TIME_RES="day"): """Create output data directories""" + if "cmip" in str.lower(CASENAME): + # formulate the date string in the file name + date_string = generate_date_string( + STARTYEAR=STARTYEAR, NYEARS=NYEARS, TIME_RES="day" + ) + # output root directory and file name base must match + out_dir_root = f"{CASENAME.replace('.', '_')}_r1i1p1f1_gr1_{date_string}" + else: + out_dir_root = CASENAME - out_dir_root = CASENAME print("Creating output data directories") - if "CMIP" in CASENAME: - date_string = generate_date_string(STARTYEAR=STARTYEAR, - NYEARS=NYEARS, - TIME_RES=TIME_RES - ) - out_dir_root= f"{CASENAME.replace('.','_')}_r1i1p1f1_gr1_{date_string}" if not os.path.exists(f"{out_dir_root}/day"): os.makedirs(f"{out_dir_root}/day") - if "NCAR" in CASENAME: - if not os.path.exists(f"{out_dir_root}/mon"): - os.makedirs(f"{out_dir_root}/mon") + if not os.path.exists(f"{out_dir_root}/mon"): + os.makedirs(f"{out_dir_root}/mon") + if "ncar" in str.lower(out_dir_root): if not os.path.exists(f"{out_dir_root}/3hr"): os.makedirs(f"{out_dir_root}/3hr") if not os.path.exists(f"{out_dir_root}/1hr"): os.makedirs(f"{out_dir_root}/1hr") + def synthetic_main( yaml_dict={}, DLAT=20.0, @@ -61,12 +62,17 @@ def synthetic_main( DATA_FORMAT="", ): """Main script to generate synthetic data using GFDL naming conventions""" - create_output_dirs(CASENAME) + create_output_dirs(CASENAME, STARTYEAR=STARTYEAR, NYEARS=NYEARS) # parse the yaml dictionary var_names = yaml_dict["variables.name"] # -- Create Data print("Generating data with time resolution of ", TIME_RES) for v in var_names: + static = ( + yaml_dict[v + ".static"] + if str(v + ".static") in list(yaml_dict.keys()) + else False + ) stats = ( yaml_dict[v + ".stats"] if str(v + ".stats") in list(yaml_dict.keys()) @@ -82,8 +88,16 @@ def synthetic_main( if str(v + ".generator.args") in list(yaml_dict.keys()) else {} ) - # vinfo = yaml_dict[v] - # print(vinfo) + grid = ( + yaml_dict[v + ".grid"] + if str(v + ".grid") in list(yaml_dict.keys()) + else "standard" + ) + + assert grid in [ + "tripolar", + "standard", + ], f"Unknown grid `{grid}` specified for variable `{v}`" coords = ( yaml_dict[v]["coordinates"] @@ -91,6 +105,30 @@ def synthetic_main( else None ) + def _load_default_static(): + """Function to read packaged static file""" + _ds = pkgr.resource_filename( + "mdtf_test_data", f"resources/ocean_static_5deg.nc" + ) + return xr.open_dataset(_ds)["areacello"].values + + # Load the ocean static file + if static: + if str(v + ".source") in list(yaml_dict.keys()): + staticfilepath = yaml_dict[v + ".source.filename"] + if os.path.exists(staticfilepath): + _ds = xr.open_dataset(staticfilepath) + data = _ds[yaml_dict[v + ".source.variable"]].values + else: + raise ValueError( + f"Specified ocean static file does not exist: {staticfilepath}" + ) + else: + warnings.warn("Using default 5-degree ocean static file for grid") + data = _load_default_static() + else: + data = None + dset_out = generate_synthetic_dataset( DLON, DLAT, @@ -102,16 +140,18 @@ def synthetic_main( fmt=DATA_FORMAT, generator=generator, stats=stats, + static=static, coords=coords, + data=data, generator_kwargs=generator_kwargs, + grid=grid, ) if DATA_FORMAT == "cmip": # formulate the date string in the file name - date_string = generate_date_string(STARTYEAR=STARTYEAR, - NYEARS=NYEARS, - TIME_RES=TIME_RES - ) + date_string = generate_date_string( + STARTYEAR=STARTYEAR, NYEARS=NYEARS, TIME_RES="day" + ) outname = f"{CASENAME.replace('.','_')}_r1i1p1f1_gr1_{date_string}.{v}.{TIME_RES}.nc" # output root directory and file name base must match diff --git a/mdtf_test_data/synthetic/vertical/__init__.py b/mdtf_test_data/synthetic/vertical/__init__.py index fca56a5..47a9b2a 100644 --- a/mdtf_test_data/synthetic/vertical/__init__.py +++ b/mdtf_test_data/synthetic/vertical/__init__.py @@ -1,3 +1,5 @@ from .gfdl_plev19_vertical_coord import gfdl_plev19_vertical_coord from .gfdl_vertical_coord import gfdl_vertical_coord +from .mom6_z_coord import mom6_z_coord +from .mom6_rho2_coord import mom6_rho2_coord from .ncar_hybrid_coord import ncar_hybrid_coord diff --git a/mdtf_test_data/synthetic/vertical/gfdl_plev19_vertical_coord.py b/mdtf_test_data/synthetic/vertical/gfdl_plev19_vertical_coord.py index 4d2e605..50fc800 100755 --- a/mdtf_test_data/synthetic/vertical/gfdl_plev19_vertical_coord.py +++ b/mdtf_test_data/synthetic/vertical/gfdl_plev19_vertical_coord.py @@ -7,7 +7,6 @@ import cftime import xarray as xr import numpy as np -from mdtf_test_data.util.rectilinear import construct_rect_grid import mdtf_test_data.generators as generators diff --git a/mdtf_test_data/synthetic/vertical/gfdl_vertical_coord.py b/mdtf_test_data/synthetic/vertical/gfdl_vertical_coord.py index 747bc6f..ebcd5d1 100755 --- a/mdtf_test_data/synthetic/vertical/gfdl_vertical_coord.py +++ b/mdtf_test_data/synthetic/vertical/gfdl_vertical_coord.py @@ -7,7 +7,6 @@ import cftime import xarray as xr import numpy as np -from mdtf_test_data.util.rectilinear import construct_rect_grid import mdtf_test_data.generators as generators diff --git a/mdtf_test_data/synthetic/vertical/mom6_rho2_coord.py b/mdtf_test_data/synthetic/vertical/mom6_rho2_coord.py new file mode 100755 index 0000000..f5379e7 --- /dev/null +++ b/mdtf_test_data/synthetic/vertical/mom6_rho2_coord.py @@ -0,0 +1,126 @@ +""" Module for generating synthetic datasets """ + +___all__ = [ + "mom6_rho2_coord", +] + +import cftime +import xarray as xr +import numpy as np +import mdtf_test_data.generators as generators + +from mdtf_test_data.synthetic.time import generate_monthly_time_axis +from mdtf_test_data.synthetic.time import generate_daily_time_axis +from mdtf_test_data.synthetic.time import generate_hourly_time_axis + + +def mom6_rho2_coord(): + """Generates GFDL AM4 pressure coordinate + + Returns + ------- + xarray.DataArray + GFDL AM4 pressure levels and half levels + """ + + rho2_l = [ + 1013.75, + 1028.5, + 1029.2421875, + 1029.71875, + 1030.1796875, + 1030.625, + 1031.0546875, + 1031.46875, + 1031.8671875, + 1032.25, + 1032.6171875, + 1032.96875, + 1033.3046875, + 1033.625, + 1033.9296875, + 1034.21875, + 1034.4921875, + 1034.75, + 1034.9921875, + 1035.21875, + 1035.4296875, + 1035.625, + 1035.8046875, + 1035.96875, + 1036.1171875, + 1036.25, + 1036.375, + 1036.5, + 1036.625, + 1036.75, + 1036.875, + 1037, + 1037.125, + 1037.25, + 1037.65625, + ] + + rho2_i = [ + 999.5, + 1028, + 1029, + 1029.484375, + 1029.953125, + 1030.40625, + 1030.84375, + 1031.265625, + 1031.671875, + 1032.0625, + 1032.4375, + 1032.796875, + 1033.140625, + 1033.46875, + 1033.78125, + 1034.078125, + 1034.359375, + 1034.625, + 1034.875, + 1035.109375, + 1035.328125, + 1035.53125, + 1035.71875, + 1035.890625, + 1036.046875, + 1036.1875, + 1036.3125, + 1036.4375, + 1036.5625, + 1036.6875, + 1036.8125, + 1036.9375, + 1037.0625, + 1037.1875, + 1037.3125, + 1038, + ] + + rho2_i_attrs = { + "long_name": "Target Potential Density at interface", + "units": "kg m-3", + "axis": "Z", + "positive": "down", + } + + rho2_l_attrs = { + "long_name": "Target Potential Density at cell center", + "units": "kg m-3", + "axis": "Z", + "positive": "down", + "edges": "rho2_i", + } + + dset_out = xr.Dataset() + dset_out["rho2_l"] = xr.DataArray( + rho2_l, dims={"rho2_l": rho2_l}, coords={"rho2_l": rho2_l}, attrs=rho2_l_attrs + ) + dset_out["rho2_i"] = xr.DataArray( + rho2_i, dims={"rho2_i": rho2_i}, coords={"rho2_i": rho2_i}, attrs=rho2_i_attrs + ) + + return dset_out diff --git a/mdtf_test_data/synthetic/vertical/mom6_z_coord.py b/mdtf_test_data/synthetic/vertical/mom6_z_coord.py new file mode 100755 index 0000000..846b762 --- /dev/null +++ b/mdtf_test_data/synthetic/vertical/mom6_z_coord.py @@ -0,0 +1,140 @@ +""" Module for generating synthetic datasets """ + +___all__ = [ + "mom6_z_coord", +] + +import cftime +import xarray as xr +import numpy as np +import mdtf_test_data.generators as generators + +from mdtf_test_data.synthetic.time import generate_monthly_time_axis +from mdtf_test_data.synthetic.time import generate_daily_time_axis +from mdtf_test_data.synthetic.time import generate_hourly_time_axis + + +def mom6_z_coord(): + """Generates GFDL AM4 pressure coordinate + + Returns + ------- + xarray.DataArray + GFDL AM4 pressure levels and half levels + """ + + z_l = [ + 2.5, + 10, + 20, + 32.5, + 51.25, + 75, + 100, + 125, + 156.25, + 200, + 250, + 312.5, + 400, + 500, + 600, + 700, + 800, + 900, + 1000, + 1100, + 1200, + 1300, + 1400, + 1537.5, + 1750, + 2062.5, + 2500, + 3000, + 3500, + 4000, + 4500, + 5000, + 5500, + 6000, + 6500, + ] + + z_i = [ + 0, + 5, + 15, + 25, + 40, + 62.5, + 87.5, + 112.5, + 137.5, + 175, + 225, + 275, + 350, + 450, + 550, + 650, + 750, + 850, + 950, + 1050, + 1150, + 1250, + 1350, + 1450, + 1625, + 1875, + 2250, + 2750, + 3250, + 3750, + 4250, + 4750, + 5250, + 5750, + 6250, + 6750, + ] + + z_i_attrs = { + "standard_name": "depth_at_cell_interface", + "long_name": "Depth at interface", + "units": "meters", + "axis": "Z", + "positive": "down", + } + + z_l_attrs = { + "standard_name": "depth_at_cell_center", + "long_name": "Depth at cell center", + "units": "meters", + "axis": "Z", + "positive": "down", + "edges": "z_i", + } + + # duplicate of z_l with CMIP standard attributes + lev_attrs = { + "standard_name": "depth", + "long_name": "depth", + "units": "meters", + "axis": "Z", + "positive": "down" + } + + dset_out = xr.Dataset() + dset_out["z_l"] = xr.DataArray( + z_l, dims={"z_l": z_l}, coords={"z_l": z_l}, attrs=z_l_attrs + ) + dset_out["z_i"] = xr.DataArray( + z_i, dims={"z_i": z_i}, coords={"z_i": z_i}, attrs=z_i_attrs + ) + dset_out["lev"] = xr.DataArray( + z_l, dims={"lev": z_l}, coords={"lev": z_l}, attrs=lev_attrs + ) + + return dset_out diff --git a/mdtf_test_data/synthetic/vertical/ncar_hybrid_coord.py b/mdtf_test_data/synthetic/vertical/ncar_hybrid_coord.py index 35d866f..00b9d08 100755 --- a/mdtf_test_data/synthetic/vertical/ncar_hybrid_coord.py +++ b/mdtf_test_data/synthetic/vertical/ncar_hybrid_coord.py @@ -7,7 +7,6 @@ import cftime import xarray as xr import numpy as np -from mdtf_test_data.util.rectilinear import construct_rect_grid import mdtf_test_data.generators as generators diff --git a/mdtf_test_data/tests/test_synthetic_data.py b/mdtf_test_data/tests/test_synthetic_data.py index 0ed9a63..74b24ae 100644 --- a/mdtf_test_data/tests/test_synthetic_data.py +++ b/mdtf_test_data/tests/test_synthetic_data.py @@ -18,6 +18,8 @@ from mdtf_test_data.synthetic.vertical import gfdl_plev19_vertical_coord from mdtf_test_data.synthetic.vertical import gfdl_vertical_coord from mdtf_test_data.synthetic.vertical import ncar_hybrid_coord +from mdtf_test_data.synthetic.vertical import mom6_z_coord +from mdtf_test_data.synthetic.vertical import mom6_rho2_coord __all__ = [ "test_xr_times_from_tuples_ncar", @@ -25,6 +27,7 @@ "test_ncar_hybrid_coord", "test_gfdl_plev19_vertical_coord", "test_gfdl_vertical_coord", + "test_mom6_z_coord", "test_generate_random_array", "test_generate_daily_time_axis", "test_generate_hourly_time_axis", @@ -104,6 +107,20 @@ def test_gfdl_vertical_coord(): pytest.gfdl_vert = result +def test_mom6_z_coord(): + result = mom6_z_coord() + assert isinstance(result, xr.Dataset) + assert np.allclose(float(result.z_l.sum()), 57085.0) + assert np.allclose(float(result.z_i.sum()), 60460.0) + + +def test_mom6_rho2_coord(): + result = mom6_rho2_coord() + assert isinstance(result, xr.Dataset) + assert np.allclose(float(result.rho2_l.sum()), 36176.125) + assert np.allclose(float(result.rho2_i.sum()), 37194.875) + + def test_generate_daily_time_axis(): result = generate_daily_time_axis(1850, 2) assert isinstance(result, xr.Dataset) diff --git a/mdtf_test_data/util/__init__.py b/mdtf_test_data/util/__init__.py index 048bdc9..2c904c9 100755 --- a/mdtf_test_data/util/__init__.py +++ b/mdtf_test_data/util/__init__.py @@ -1,4 +1,2 @@ """ init file for util directory """ -from .rectilinear import (regrid_lat_lon_dataset, - construct_rect_grid -) \ No newline at end of file +from .rectilinear import regrid_lat_lon_dataset diff --git a/mdtf_test_data/util/rectilinear.py b/mdtf_test_data/util/rectilinear.py index 4634d6c..be1d028 100755 --- a/mdtf_test_data/util/rectilinear.py +++ b/mdtf_test_data/util/rectilinear.py @@ -1,151 +1,21 @@ #!/usr/bin/env python """ Collection of tools to coarsen model data """ -__all__ = ["construct_rect_grid", "regrid_lat_lon_dataset"] +__all__ = ["regrid_lat_lon_dataset"] import warnings import numpy as np import xarray as xr +from mdtf_test_data.synthetic.horizontal import construct_rect_grid + try: import xesmf as xe except: warnings.warn("Unable to load `xesmf`. Regridding functionality will not work.") -def construct_rect_grid(dlon, dlat, add_attrs=False, attr_fmt="ncar", bounds=False): - """Generate a rectilinear grid based on values of dx and dy - - Parameters - ---------- - dlon : float - Grid spacing in the x-dimension (longitude) - dlat : float - Grid spacing in the y-dimension (latitude) - add_attrs : bool, optional - Include lat and lon variable attributes, by default False - - Returns - ------- - xarray.Dataset - Empty shell dataset with lat and lon dimensions - """ - - if 180.0 % dlat != 0: - dlat = 180.0 / np.floor(180.0 / dlat) - warnings.warn( - f"180 degrees does not divide evenly by dlat. Adjusting dlat to {dlat}" - ) - - if 360.0 % dlon != 0: - dlon = 360.0 / np.floor(360.0 / dlon) - warnings.warn( - f"360 degrees does not divide evenly by dlon. Adjusting dlon to {dlon}" - ) - - lat = np.arange(-90.0 + (dlat / 2.0), 90.0, dlat) - lon = np.arange(0.0 + (dlon / 2.0), 360.0, dlon) - - dset = xr.Dataset({"lat": (["lat"], lat), "lon": (["lon"], lon)}) - - if bounds: - lat_bnds = np.arange(-90.0, 90.0 + (dlat / 2.0), dlat) - lon_bnds = np.arange(0.0, 360.0 + (dlon / 2.0), dlon) - - lat_bnds = np.array(list(zip(lat_bnds[0:-1], lat_bnds[1::]))) - lon_bnds = np.array(list(zip(lon_bnds[0:-1], lon_bnds[1::]))) - - bnds = np.array([0.0, 1.0]) - - dset["lat_bnds"] = xr.DataArray(lat_bnds, coords=(dset.lat, ("bnds", bnds))) - dset["lon_bnds"] = xr.DataArray(lon_bnds, coords=(dset.lon, ("bnds", bnds))) - - dset = dset.drop("bnds") - - if attr_fmt == "ncar": - dset["lat"].attrs = ( - {"long_name": "latitude", "units": "degrees_north"} if add_attrs else {} - ) - dset["lon"].attrs = ( - {"long_name": "longitude", "units": "degrees_east"} if add_attrs else {} - ) - - elif attr_fmt == "gfdl": - dset["lat"].attrs = ( - {"long_name": "latitude", "units": "degrees_N", "cartesian_axis": "Y"} - if add_attrs - else {} - ) - dset["lon"].attrs = ( - {"long_name": "longitude", "units": "degrees_E", "cartesian_axis": "X"} - if add_attrs - else {} - ) - - if bounds: - dset["lat"].attrs["bounds"] = "lat_bnds" - dset["lon"].attrs["bounds"] = "lon_bnds" - - dset["lat_bnds"].attrs = ( - {"long_name": "latitude bounds", "cartesian_axis": "Y"} - if add_attrs - else {} - ) - dset["lon_bnds"].attrs = ( - {"long_name": "longitude bounds", "cartesian_axis": "X"} - if add_attrs - else {} - ) - - elif attr_fmt == "cmip": - dset["lat"].attrs = ( - { - "long_name": "latitude", - "units": "degrees_north", - "axis": "Y", - "standard_name": "latitude", - "cell_methods": "time: point", - } - if add_attrs - else {} - ) - dset["lon"].attrs = ( - { - "long_name": "longitude", - "units": "degrees_east", - "axis": "X", - "standard_name": "longitude", - "cell_methods": "time: point", - } - if add_attrs - else {} - ) - - if bounds: - dset["bnds"] = xr.DataArray(bnds, dims={"bnds": bnds}) - dset["bnds"].attrs["long_name"] = "vertex number" - - dset["lat"].attrs["bounds"] = "lat_bnds" - dset["lon"].attrs["bounds"] = "lon_bnds" - - dset["lat_bnds"].attrs = ( - {"long_name": "latitude bounds", "axis": "Y", "units": "degrees_north"} - if add_attrs - else {} - ) - dset["lon_bnds"].attrs = ( - {"long_name": "longitude bounds", "axis": "X", "units": "degrees_east"} - if add_attrs - else {} - ) - - else: - raise ValueError("Unknown model attribute format") - - return dset - - def regrid_lat_lon_dataset(dset, dlon=10.0, dlat=10.0, method="bilinear"): """Regrids xarray dataset to a standard lat-lon grid diff --git a/scripts/mdtf_synthetic.py b/scripts/mdtf_synthetic.py index 3a3cb55..ee1fa70 100755 --- a/scripts/mdtf_synthetic.py +++ b/scripts/mdtf_synthetic.py @@ -143,20 +143,24 @@ def main(): ) if cli_info.convention == "CMIP": print("Importing CMIP variable information") - input_data = pkgr.resource_filename("mdtf_test_data", "config/cmip_day.yml") - input_data = read_yaml(input_data) + time_res = ["mon", "day"] + for t in time_res: + input_data = pkgr.resource_filename( + "mdtf_test_data", f"config/cmip_{t}.yml" + ) + input_data = read_yaml(input_data) - print("Calling Synthetic Data Generator for CMIP data") - synthetic_main( - input_data, - DLAT=cli_info.dlat, - DLON=cli_info.dlon, - STARTYEAR=cli_info.startyear, - NYEARS=cli_info.nyears, - CASENAME="CMIP.Synthetic", - TIME_RES="day", - DATA_FORMAT="cmip", - ) + print("Calling Synthetic Data Generator for CMIP data") + synthetic_main( + input_data, + DLAT=cli_info.dlat, + DLON=cli_info.dlon, + STARTYEAR=cli_info.startyear, + NYEARS=cli_info.nyears, + CASENAME="CMIP.Synthetic", + TIME_RES=t, + DATA_FORMAT="cmip", + ) if __name__ == "__main__": diff --git a/setup.cfg b/setup.cfg index f0bc74d..d8e5a51 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = mdtf_test_data -version = 1.0.3.post2 +version = 1.0.4.rc1 description = Tools for working with MDTF Diagnostics test data sets url = https://github.com/jkrasting/mdtf_test_data author = MDTF Development Team @@ -49,3 +49,4 @@ install_requires = mdtf_test_data = config/* tests/* + resources/*