From f956dd2b64bc029d77237cf978f1cdebfab6e11f Mon Sep 17 00:00:00 2001 From: Fabian Gans Date: Tue, 25 Aug 2020 17:36:06 +0200 Subject: [PATCH] Make NetCDF dataset not return handles --- Manifest.toml | 5 +++++ Project.toml | 4 ++-- src/datasets/netcdf.jl | 26 +++++++++++++++++++++++--- 3 files changed, 30 insertions(+), 5 deletions(-) diff --git a/Manifest.toml b/Manifest.toml index b05ba51..e319b54 100644 --- a/Manifest.toml +++ b/Manifest.toml @@ -13,6 +13,11 @@ version = "0.17.11" deps = ["Printf"] uuid = "ade2ca70-3891-5945-98fb-dc099432e06a" +[[DiskArrays]] +git-tree-sha1 = "3bfd0eb19711297e1b3656b7d4709f7b4b240195" +uuid = "3c3547ce-8d99-4f5e-a174-61eb10b00ae3" +version = "0.2.4" + [[Distributed]] deps = ["Random", "Serialization", "Sockets"] uuid = "8ba89e20-285c-5b6f-9357-94700520ee1b" diff --git a/Project.toml b/Project.toml index ee7797c..09f9dfb 100644 --- a/Project.toml +++ b/Project.toml @@ -1,7 +1,7 @@ name = "YAXArrayBase" uuid = "90b8fcef-0c2d-428d-9c56-5f86629e9d14" authors = ["Fabian Gans "] -version = "0.1.0" +version = "0.2.0" [deps] DataStructures = "864edb3b-99cc-5e75-8d2d-829cb0a9cfe8" @@ -9,9 +9,9 @@ Dates = "ade2ca70-3891-5945-98fb-dc099432e06a" Requires = "ae029012-a4dd-5104-9daa-d747884805df" [compat] -julia = "^1" DataStructures = "0.17, 0.18" Requires = "1" +julia = "^1" [extras] ArchGDAL = "c9ce4bd3-c3d5-55b8-8973-c0e20141b8c3" diff --git a/src/datasets/netcdf.jl b/src/datasets/netcdf.jl index ef383ce..0d0861b 100644 --- a/src/datasets/netcdf.jl +++ b/src/datasets/netcdf.jl @@ -16,11 +16,31 @@ struct NetCDFDataset end NetCDFDataset(filename) = NetCDFDataset(filename,NC_NOWRITE) +import .NetCDF: AbstractDiskArray, readblock!, writeblock!, haschunks, eachchunk + +struct NetCDFVariable{T,N} <: AbstractDiskArray{T,N} + filename::String + varname::String + size::NTuple{N,Int} +end +#Define method forwarding for DiskArray methods +for m in [:haschunks, :eachchunk] + eval(:(function $(m)(v::NetCDFVariable,args...;kwargs...) + NetCDF.open(a->$(m)(a,args...;kwargs...), v.filename, v.varname) +end +)) +end +writeblock!(v::NetCDFVariable, aout, r::AbstractUnitRange...) = NetCDF.open(a->writeblock!(a,aout,r...), v.filename, v.varname, mode=NC_WRITE) +readblock!(v::NetCDFVariable, aout, r::AbstractUnitRange...) = NetCDF.open(a->readblock!(a,aout,r...), v.filename, v.varname) + +Base.size(v::NetCDFVariable) = v.size + get_var_dims(ds::NetCDFDataset,name) = NetCDF.open(v->map(i->i.name,v[name].dim),ds.filename) get_varnames(ds::NetCDFDataset) = NetCDF.open(v->collect(keys(v.vars)),ds.filename) get_var_attrs(ds::NetCDFDataset, name) = NetCDF.open(v->v[name].atts,ds.filename) function Base.getindex(ds::NetCDFDataset, i) - NetCDF.open(ds.filename,i,mode=ds.mode) + s,et = NetCDF.open(j->(size(j),eltype(j)),ds.filename,i) + NetCDFVariable{et,length(s)}(ds.filename, i, s) end Base.haskey(ds::NetCDFDataset,k) = NetCDF.open(nc->haskey(nc.vars,k),ds.filename) @@ -28,12 +48,12 @@ function add_var(p::NetCDFDataset, T::Type, varname, s, dimnames, attr; chunksize=s, compress = -1) dimsdescr = Iterators.flatten(zip(dimnames,s)) nccreate(p.filename, varname, dimsdescr..., atts = attr, t=T, chunksize=chunksize, compress=compress) - NetCDF.open(p.filename,varname,mode=p.mode) + NetCDFVariable{T,length(s)}(p.filename,varname,s) end function create_empty(::Type{NetCDFDataset}, path) NetCDF.create(path, NcVar[]) - NetCDFDataset(path,NC_WRITE) + NetCDFDataset(path) end allow_parallel_write(::NetCDFDataset) = false