From 6f9a8b4cbafafb6c3d2208000fb7f31dfc964380 Mon Sep 17 00:00:00 2001 From: rafaqz Date: Fri, 19 Jan 2024 11:24:49 +0100 Subject: [PATCH 1/8] fix common data model chunk propagation, and other things --- ext/RastersArchGDALExt/gdal_source.jl | 4 +- .../gribdatasets_source.jl | 6 +- ext/RastersHDF5Ext/smap_source.jl | 6 +- ext/RastersNCDatasetsExt/ncdatasets_source.jl | 28 ++-- src/array.jl | 4 +- src/sources/commondatamodel.jl | 121 ++++++++++-------- src/sources/grd.jl | 2 +- src/stack.jl | 2 +- test/sources/gribdatasets.jl | 13 +- test/sources/ncdatasets.jl | 7 +- 10 files changed, 105 insertions(+), 88 deletions(-) diff --git a/ext/RastersArchGDALExt/gdal_source.jl b/ext/RastersArchGDALExt/gdal_source.jl index a84a3d5fb..7e8aaa4ff 100644 --- a/ext/RastersArchGDALExt/gdal_source.jl +++ b/ext/RastersArchGDALExt/gdal_source.jl @@ -36,7 +36,7 @@ const GDAL_VIRTUAL_FILESYSTEMS = "/vsi" .* ( # Array ######################################################################## -function RA.FileArray(raster::AG.RasterDataset{T}, filename; kw...) where {T} +function RA.FileArray{GDALsource}(raster::AG.RasterDataset{T}, filename; kw...) where {T} eachchunk, haschunks = DA.eachchunk(raster), DA.haschunks(raster) RA.FileArray{GDALsource,T,3}(filename, size(raster); eachchunk, haschunks, kw...) end @@ -242,7 +242,7 @@ function RA.Raster(ds::AG.RasterDataset; filelist = AG.filelist(ds) raster = if lazy && length(filelist) > 0 filename = first(filelist) - A = Raster(FileArray(ds, filename), args...) + A = Raster(FileArray{GDALSource}(ds, filename), args...) else Raster(Array(ds), args...) end diff --git a/ext/RastersGRIBDatasetsExt/gribdatasets_source.jl b/ext/RastersGRIBDatasetsExt/gribdatasets_source.jl index 5d0d2c9df..b0dac9993 100644 --- a/ext/RastersGRIBDatasetsExt/gribdatasets_source.jl +++ b/ext/RastersGRIBDatasetsExt/gribdatasets_source.jl @@ -1,7 +1,5 @@ const GDS = GRIBDatasets -RA.FileStack{GRIBsource}(ds::AbstractDataset, filename::AbstractString; write=false, keys) = RA.FileStack(GRIBsource, ds, filename; write, keys) - function RA.OpenStack(fs::RA.FileStack{GRIBsource,K}) where K RA.OpenStack{GRIBsource,K}(GDS.GRIBDataset(RA.filename(fs))) end @@ -14,3 +12,7 @@ function RA._open(f, ::Type{GRIBsource}, filename::AbstractString; write=false, ds = GRIBDatasets.GRIBDataset(filename) RA._open(f, GRIBsource, ds; kw...) end + +# Hack to get the inner DiskArrays chunks as they are not exposed at the top level +RA._get_eachchunk(var::GDS.Variable) = DiskArrays.eachchunk(var.values) +RA._get_haschunks(var::GDS.Variable) = DiskArrays.haschunks(var.values) diff --git a/ext/RastersHDF5Ext/smap_source.jl b/ext/RastersHDF5Ext/smap_source.jl index 09515b8f0..ba8b1697b 100644 --- a/ext/RastersHDF5Ext/smap_source.jl +++ b/ext/RastersHDF5Ext/smap_source.jl @@ -87,10 +87,10 @@ DA.haschunks(var::SMAPvar) = DA.Unchunked() # Raster ###################################################################### -function RA.FileArray(ds::SMAPhdf5, filename::AbstractString; key, kw...) - RA.FileArray(ds[key], filename; key, kw...) +function RA.FileArray{SMAPsource}(ds::SMAPhdf5, filename::AbstractString; key, kw...) + RA.FileArray{SMAPsource}(ds[key], filename; key, kw...) end -function RA.FileArray(var::SMAPvar, filename::AbstractString; key, kw...) +function RA.FileArray{SMAPsource}(var::SMAPvar, filename::AbstractString; key, kw...) T = eltype(var) N = ndims(var) eachchunk = DA.eachchunk(var) diff --git a/ext/RastersNCDatasetsExt/ncdatasets_source.jl b/ext/RastersNCDatasetsExt/ncdatasets_source.jl index 82804f326..0fd194ad2 100644 --- a/ext/RastersNCDatasetsExt/ncdatasets_source.jl +++ b/ext/RastersNCDatasetsExt/ncdatasets_source.jl @@ -23,7 +23,7 @@ function Base.write(filename::AbstractString, ::Type{<:CDMsource}, A::AbstractRa mode = !isfile(filename) || !append ? "c" : "a"; ds = NCD.Dataset(filename, mode; attrib=RA._attribdict(metadata(A))) try - _ncdwritevar!(ds, A; kw...) + _cdmwritevar!(ds, A; kw...) finally close(ds) end @@ -65,15 +65,13 @@ function Base.write(filename::AbstractString, ::Type{<:CDMsource}, s::AbstractRa mode = !isfile(filename) || !append ? "c" : "a"; ds = NCD.Dataset(filename, mode; attrib=RA._attribdict(metadata(s))) try - map(key -> _ncdwritevar!(ds, s[key]), keys(s); kw...) + map(key -> _cdmwritevar!(ds, s[key]), keys(s); kw...) finally close(ds) end return filename end -RA.FileStack{NCDsource}(ds::AbstractDataset, filename::AbstractString; write=false, keys) = RA.FileStack(NCDsource, ds, filename; write, keys) - function RA.OpenStack(fs::RA.FileStack{NCDsource,K}) where K RA.OpenStack{NCDsource,K}(NCD.Dataset(RA.filename(fs))) end @@ -88,7 +86,7 @@ function RA._open(f, ::Type{NCDsource}, filename::AbstractString; write=false, k end # Add a var array to a dataset before writing it. -function _ncdwritevar!(ds::AbstractDataset, A::AbstractRaster{T,N}; kw...) where {T,N} +function _cdmwritevar!(ds::AbstractDataset, A::AbstractRaster{T,N}; kw...) where {T,N} _def_dim_var!(ds, A) attrib = RA._attribdict(metadata(A)) # Set _FillValue @@ -117,9 +115,8 @@ function _ncdwritevar!(ds::AbstractDataset, A::AbstractRaster{T,N}; kw...) where dimnames = lowercase.(string.(map(RA.name, dims(A)))) var = NCD.defVar(ds, key, eltyp, dimnames; attrib=attrib, kw...) - # NCDatasets needs Colon indices to write without allocations - # TODO do this with DiskArrays broadcast ?? - var[map(_ -> Colon(), axes(A))...] = parent(read(A)) + # Write with a DiskArays.jl broadcast + var .= A return nothing end @@ -132,13 +129,13 @@ function _def_dim_var!(ds::AbstractDataset, dim::Dimension) lookup(dim) isa NoLookup && return nothing # Shift index before conversion to Mapped - dim = RA._ncdshiftlocus(dim) + dim = RA._cdmshiftlocus(dim) if dim isa Y || dim isa X dim = convertlookup(Mapped, dim) end # Attributes attrib = RA._attribdict(metadata(dim)) - RA._ncd_set_axis_attrib!(attrib, dim) + RA._cdm_set_axis_attrib!(attrib, dim) # Bounds variables if sampling(dim) isa Intervals bounds = Dimensions.dim2boundsmatrix(dim) @@ -150,10 +147,13 @@ function _def_dim_var!(ds::AbstractDataset, dim::Dimension) return nothing end -const _NCDVar = NCDatasets.CFVariable{Union{Missing, Float32}, 3, NCDatasets.Variable{Float32, 3, NCDatasets.NCDataset}, NCDatasets.Attributes{NCDatasets.NCDataset{Nothing}}, NamedTuple{(:fillvalue, :scale_factor, :add_offset, :calendar, :time_origin, :time_factor), Tuple{Float32, Nothing, Nothing, Nothing, Nothing, Nothing}}} +# Hack to get the inner DiskArrays chunks as they are not exposed at the top level +RA._get_eachchunk(var::NCD.Variable) = DiskArrays.eachchunk(var) +RA._get_haschunks(var::NCD.Variable) = DiskArrays.haschunks(var) # precompilation +# const _NCDVar = NCDatasets.CFVariable{Union{Missing, Float32}, 3, NCDatasets.Variable{Float32, 3, NCDatasets.NCDataset}, NCDatasets.Attributes{NCDatasets.NCDataset{Nothing}}, NamedTuple{(:fillvalue, :scale_factor, :add_offset, :calendar, :time_origin, :time_factor), Tuple{Float32, Nothing, Nothing, Nothing, Nothing, Nothing}}} # function _precompile(::Type{NCDsource}) # ccall(:jl_generating_output, Cint, ()) == 1 || return nothing @@ -165,9 +165,9 @@ const _NCDVar = NCDatasets.CFVariable{Union{Missing, Float32}, 3, NCDatasets.Var # precompile(dims, (_NCDVar,Symbol,Nothing,EPSG)) # precompile(dims, (_NCDVar,Symbol,EPSG,EPSG)) # precompile(_firstkey, (NCDatasets.NCDataset{Nothing},)) -# precompile(_ncddim, (NCDatasets.NCDataset{Nothing}, Symbol, Nothing, Nothing)) -# precompile(_ncddim, (NCDatasets.NCDataset{Nothing}, Symbol, Nothing, EPSG)) -# precompile(_ncddim, (NCDatasets.NCDataset{Nothing}, Symbol, EPSG, EPSG)) +# precompile(_cdmdim, (NCDatasets.NCDataset{Nothing}, Symbol, Nothing, Nothing)) +# precompile(_cdmdim, (NCDatasets.NCDataset{Nothing}, Symbol, Nothing, EPSG)) +# precompile(_cdmdim, (NCDatasets.NCDataset{Nothing}, Symbol, EPSG, EPSG)) # precompile(Raster, (NCDatasets.NCDataset{Nothing}, String, Nothing)) # precompile(Raster, (NCDatasets.NCDataset{Nothing}, String, Symbol)) # precompile(Raster, (_NCDVar, String, Symbol)) diff --git a/src/array.jl b/src/array.jl index 7d033546b..82d46b450 100644 --- a/src/array.jl +++ b/src/array.jl @@ -251,7 +251,7 @@ function Raster(filename::AbstractString; source = isnothing(source) ? _sourcetype(filename) : _sourcetype(source) _open(filename; source) do ds key = filekey(ds, key) - Raster(ds, filename, key; kw...) + Raster(ds, filename, key; source, kw...) end end function Raster(ds, filename::AbstractString, key=nothing; @@ -265,7 +265,7 @@ function Raster(ds, filename::AbstractString, key=nothing; mappedcrs = defaultmappedcrs(source, mappedcrs) dims = dims isa Nothing ? DD.dims(ds, crs, mappedcrs) : dims data = if lazy - FileArray(ds, filename; key, write) + FileArray{source}(ds, filename; key, write) else _open(Array, source, ds; key) end diff --git a/src/sources/commondatamodel.jl b/src/sources/commondatamodel.jl index d687fe527..7e57ee91b 100644 --- a/src/sources/commondatamodel.jl +++ b/src/sources/commondatamodel.jl @@ -31,24 +31,38 @@ const CDM_STANDARD_NAME_MAP = Dict( "time" => Ti, ) +# CFVariable is imblemented again here because the one +# in CommonDataModel is not DiskArrays.jl compatible. +# TODO move this code to CommonDataModel.jl struct CFVariable{T,N,TV,TA,TSA} <: CDM.AbstractVariable{T,N} var::CDM.CFVariable{T,N,TV,TA,TSA} end @implement_diskarray CFVariable +# Base methods Base.parent(A::CFVariable) = A.var +Base.getindex(os::OpenStack{<:CDMsource}, key::Symbol) = CFVariable(dataset(os)[key]) +# DiskArrays.jl methods function DiskArrays.readblock!(A::CFVariable, aout, i::AbstractUnitRange...) - aout[i...] = getindex(parent(A), i...) + aout .= getindex(parent(A), i...) end - function DiskArrays.writeblock!(A::CFVariable, data, i::AbstractUnitRange...) setindex!(parent(A), data, i...) return data end _open(f, ::Type{<:CDMsource}, var::CFVariable; kw...) = cleanreturn(f(var)) -# Methods from CommonDataModel +# We have to dig down to find the chunks as they are not immplemented +# properly in the source packages, but they are in their internal objects. +# We just make it work here even though it doesn't work in NCDatasets.jl or GRIBDatasets.jl +DiskArrays.eachchunk(var::CFVariable) = _get_eachchunk(var.var.var) +DiskArrays.haschunks(var::CFVariable) = _get_haschunks(var.var.var) + +function _get_eachchunk end +function _get_haschunks end + +# CommonDataModel.jl methods for method in (:size, :name, :dimnames, :dataset, :attribnames) @eval begin CDM.$(method)(var::CFVariable) = CDM.$(method)(parent(var)) @@ -61,43 +75,40 @@ for method in (:attrib, :dim) end end -Base.getindex(os::OpenStack{<:CDMsource}, key::Symbol) = CFVariable(dataset(os)[key]) - -_dataset(var::AbstractVariable) = CDM.dataset(var) - +# Rasters methods haslayers(::Type{<:CDMsource}) = true defaultcrs(::Type{<:CDMsource}) = EPSG(4326) defaultmappedcrs(::Type{<:CDMsource}) = EPSG(4326) +_dataset(var::AbstractVariable) = CDM.dataset(var) + # Raster ######################################################################## -function Raster(ds::AbstractDataset, filename::AbstractString, key=nothing; kw...) +function Raster(ds::AbstractDataset, filename::AbstractString, key=nothing; source=nothing, kw...) + source = isnothing(source) ? _sourcetype(filename) : _sourcetype(source) if isnothing(key) # Find the first valid variable for key in layerkeys(ds) if ndims(ds[key]) > 0 @info "No `name` or `key` keyword provided, using first valid layer with name `:$key`" - return Raster(CFVariable(ds[key]), filename, key; source=CDMsource, kw...) + return Raster(CFVariable(ds[key]), filename, key; source, kw...) end end throw(ArgumentError("dataset at $filename has no array variables")) else - return Raster(CFVariable(ds[key]), filename, key; kw...) + return Raster(CFVariable(ds[key]), filename, key; source, kw...) end end _firstkey(ds::AbstractDataset, key::Nothing=nothing) = Symbol(first(layerkeys(ds))) _firstkey(ds::AbstractDataset, key) = Symbol(key) -function FileArray(var::AbstractVariable, filename::AbstractString; kw...) - source = _sourcetype(filename) - da = RasterDiskArray{source}(var) - size_ = size(da) - eachchunk = DA.eachchunk(da) - haschunks = DA.haschunks(da) +function FileArray{source}(var::AbstractVariable, filename::AbstractString; kw...) where source + eachchunk = DA.eachchunk(var) + haschunks = DA.haschunks(var) T = eltype(var) - N = length(size_) - FileArray{source,T,N}(filename, size_; eachchunk, haschunks, kw...) + N = ndims(var) + FileArray{source,T,N}(filename, size(var); eachchunk, haschunks, kw...) end function Base.open(f::Function, A::FileArray{source}; write=A.write, kw...) where source <: CDMsource @@ -125,13 +136,13 @@ end function DD.dims(ds::AbstractDataset, crs=nothing, mappedcrs=nothing) map(_dimkeys(ds)) do key - _ncddim(ds, key, crs, mappedcrs) + _cdmdim(ds, key, crs, mappedcrs) end |> Tuple end function DD.dims(var::AbstractVariable, crs=nothing, mappedcrs=nothing) names = CDM.dimnames(var) map(names) do name - _ncddim(_dataset(var), name, crs, mappedcrs) + _cdmdim(_dataset(var), name, crs, mappedcrs) end |> Tuple end @@ -148,7 +159,7 @@ function DD.layerdims(ds::AbstractDataset) end function DD.layerdims(var::AbstractVariable) map(CDM.dimnames(var)) do dimname - _ncddim(_dataset(var), dimname) + _cdmdim(_dataset(var), dimname) end |> Tuple end @@ -194,11 +205,13 @@ function layerkeys(ds::AbstractDataset) nondim = setdiff(nondim, grid_mapping) end -function FileStack(source::Type{<:CDMsource}, ds::AbstractDataset, filename::AbstractString; write, keys) +function FileStack{source}( + ds::AbstractDataset, filename::AbstractString; write=false, keys +) where source<:CDMsource keys = map(Symbol, keys isa Nothing ? layerkeys(ds) : keys) |> Tuple type_size_ec_hc = map(keys) do key var = ds[string(key)] - Union{Missing,eltype(var)}, size(var), _ncd_eachchunk(var), _ncd_haschunks(var) + Union{Missing,eltype(var)}, size(var), _cdm_eachchunk(var), _cdm_haschunks(var) end layertypes = map(x->x[1], type_size_ec_hc) layersizes = map(x->x[2], type_size_ec_hc) @@ -219,11 +232,11 @@ cleanreturn(A::CFVariable) = Array(A) # Utils ######################################################################## -function _ncddim(ds, dimname::Key, crs=nothing, mappedcrs=nothing) +function _cdmdim(ds, dimname::Key, crs=nothing, mappedcrs=nothing) if haskey(ds, dimname) var = ds[dimname] - D = _ncddimtype(_attrib(var), dimname) - lookup = _ncdlookup(ds, dimname, D, crs, mappedcrs) + D = _cdmdimtype(_attrib(var), dimname) + lookup = _cdmlookup(ds, dimname, D, crs, mappedcrs) return D(lookup) else # The var doesn't exist. Maybe its `complex` or some other marker, @@ -248,7 +261,7 @@ end # Find the matching dimension constructor. If its an unknown name # use the generic Dim with the dim name as type parameter -function _ncddimtype(attrib, dimname) +function _cdmdimtype(attrib, dimname) if haskey(attrib, "axis") k = attrib["axis"] if haskey(CDM_AXIS_MAP, k) @@ -267,22 +280,22 @@ function _ncddimtype(attrib, dimname) return DD.basetypeof(DD.key2dim(Symbol(dimname))) end -# _ncdlookup +# _cdmlookup # Generate a `LookupArray` from a netcdf dim. -function _ncdlookup(ds::AbstractDataset, dimname, D::Type, crs, mappedcrs) +function _cdmlookup(ds::AbstractDataset, dimname, D::Type, crs, mappedcrs) dvar = ds[dimname] index = dvar[:] metadata = _metadatadict(CDMsource, _attrib(dvar)) - return _ncdlookup(ds, dimname, D, index, metadata, crs, mappedcrs) + return _cdmlookup(ds, dimname, D, index, metadata, crs, mappedcrs) end # For unknown types we just make a Categorical lookup -function _ncdlookup(ds::AbstractDataset, dimname, D::Type, index::AbstractArray, metadata, crs, mappedcrs) +function _cdmlookup(ds::AbstractDataset, dimname, D::Type, index::AbstractArray, metadata, crs, mappedcrs) Categorical(index; order=Unordered(), metadata=metadata) end # For Number and AbstractTime we generate order/span/sampling # We need to include `Missing` in unions in case `_FillValue` is used # on coordinate variables in a file and propagates here. -function _ncdlookup( +function _cdmlookup( ds::AbstractDataset, dimname, D::Type, index::AbstractArray{<:Union{Missing,Number,Dates.AbstractTime}}, metadata, crs, mappedcrs ) @@ -294,17 +307,17 @@ function _ncdlookup( boundskey = var.attrib["bounds"] boundsmatrix = Array(ds[boundskey]) span, sampling = Explicit(boundsmatrix), Intervals(Center()) - return _ncdlookup(D, index, order, span, sampling, metadata, crs, mappedcrs) + return _cdmlookup(D, index, order, span, sampling, metadata, crs, mappedcrs) elseif eltype(index) <: Union{Missing,Dates.AbstractTime} - span, sampling = _ncdperiod(index, metadata) - return _ncdlookup(D, index, order, span, sampling, metadata, crs, mappedcrs) + span, sampling = _cdmperiod(index, metadata) + return _cdmlookup(D, index, order, span, sampling, metadata, crs, mappedcrs) else - span, sampling = _ncdspan(index, order), Points() - return _ncdlookup(D, index, order, span, sampling, metadata, crs, mappedcrs) + span, sampling = _cdmspan(index, order), Points() + return _cdmlookup(D, index, order, span, sampling, metadata, crs, mappedcrs) end end # For X and Y use a Mapped <: AbstractSampled lookup -function _ncdlookup( +function _cdmlookup( D::Type{<:Union{<:XDim,<:YDim}}, index, order::Order, span, sampling, metadata, crs, mappedcrs ) # If the index is regularly spaced and there is no crs @@ -318,15 +331,15 @@ function _ncdlookup( return Mapped(index, order, span, sampling, metadata, crs, mappedcrs, dim) end # Band dims have a Categorical lookup, with order -function _ncdlookup(D::Type{<:Band}, index, order::Order, span, sampling, metadata, crs, mappedcrs) +function _cdmlookup(D::Type{<:Band}, index, order::Order, span, sampling, metadata, crs, mappedcrs) Categorical(index, order, metadata) end # Otherwise use a regular Sampled lookup -function _ncdlookup(D::Type, index, order::Order, span, sampling, metadata, crs, mappedcrs) +function _cdmlookup(D::Type, index, order::Order, span, sampling, metadata, crs, mappedcrs) Sampled(index, order, span, sampling, metadata) end -function _ncdspan(index, order) +function _cdmspan(index, order) # Handle a length 1 index length(index) == 1 && return Regular(zero(eltype(index))) step = index[2] - index[1] @@ -352,7 +365,7 @@ function _ncdspan(index, order) end # delta_t and ave_period are not CF standards, but CDC -function _ncdperiod(index, metadata::Metadata{<:CDMsource}) +function _cdmperiod(index, metadata::Metadata{<:CDMsource}) if haskey(metadata, "delta_t") period = _parse_period(metadata["delta_t"]) period isa Nothing || return Regular(period), Points() @@ -392,15 +405,15 @@ _dimkeys(ds::AbstractDataset) = CDM.dimnames(ds) # Add axis and standard name attributes to dimension variabls # We need to get better at guaranteeing if X/Y is actually measured in `longitude/latitude` # CF standards requires that we specify "units" if we use these standard names -_ncd_set_axis_attrib!(atr, dim::X) = atr["axis"] = "X" # at["standard_name"] = "longitude"; -_ncd_set_axis_attrib!(atr, dim::Y) = atr["axis"] = "Y" # at["standard_name"] = "latitude"; -_ncd_set_axis_attrib!(atr, dim::Z) = (atr["axis"] = "Z"; atr["standard_name"] = "depth") -_ncd_set_axis_attrib!(atr, dim::Ti) = (atr["axis"] = "T"; atr["standard_name"] = "time") -_ncd_set_axis_attrib!(atr, dim) = nothing - -_ncdshiftlocus(dim::Dimension) = _ncdshiftlocus(lookup(dim), dim) -_ncdshiftlocus(::LookupArray, dim::Dimension) = dim -function _ncdshiftlocus(lookup::AbstractSampled, dim::Dimension) +_cdm_set_axis_attrib!(atr, dim::X) = atr["axis"] = "X" # at["standard_name"] = "longitude"; +_cdm_set_axis_attrib!(atr, dim::Y) = atr["axis"] = "Y" # at["standard_name"] = "latitude"; +_cdm_set_axis_attrib!(atr, dim::Z) = (atr["axis"] = "Z"; atr["standard_name"] = "depth") +_cdm_set_axis_attrib!(atr, dim::Ti) = (atr["axis"] = "T"; atr["standard_name"] = "time") +_cdm_set_axis_attrib!(atr, dim) = nothing + +_cdmshiftlocus(dim::Dimension) = _cdmshiftlocus(lookup(dim), dim) +_cdmshiftlocus(::LookupArray, dim::Dimension) = dim +function _cdmshiftlocus(lookup::AbstractSampled, dim::Dimension) if span(lookup) isa Regular && sampling(lookup) isa Intervals # We cant easily shift a DateTime value if eltype(dim) isa Dates.AbstractDateTime @@ -416,16 +429,16 @@ function _ncdshiftlocus(lookup::AbstractSampled, dim::Dimension) end end -_unuseddimerror(dimname) = error("Netcdf contains unused dimension $dimname") +_unuseddimerror(dimname) = error("Dataset contains unused dimension $dimname") -function _ncd_eachchunk(var) +function _cdm_eachchunk(var) # chunklookup, chunkvec = NCDatasets.chunking(var) # chunksize = chunklookup == :chunked ? Tuple(chunkvec) : chunksize = size(var) DA.GridChunks(var, chunksize) end -function _ncd_haschunks(var) +function _cdm_haschunks(var) # chunklookup, _ = NCDatasets.chunking(var) # chunklookup == :chunked ? DA.Chunked() : DA.Unchunked() diff --git a/src/sources/grd.jl b/src/sources/grd.jl index 918366b94..5a10a5d56 100644 --- a/src/sources/grd.jl +++ b/src/sources/grd.jl @@ -128,7 +128,7 @@ Base.Array(grd::GRDattrib) = _mmapgrd(Array, grd) # Array ######################################################################## -function FileArray(grd::GRDattrib, filename=filename(grd); kw...) +function FileArray{GRDsource}(grd::GRDattrib, filename=filename(grd); kw...) filename = first(splitext(filename)) size_ = size(grd) eachchunk = DiskArrays.GridChunks(size_, size_) diff --git a/src/stack.jl b/src/stack.jl index 66e52a47c..81f74baea 100644 --- a/src/stack.jl +++ b/src/stack.jl @@ -178,7 +178,7 @@ function RasterStack(filenames::NamedTuple{K,<:Tuple{<:AbstractString,Vararg}}; mappedcrs = defaultmappedcrs(source, mappedcrs) _open(source, fn; key) do ds data = if lazy - FileArray(ds, fn; key) + FileArray{source}(ds, fn; key) else _open(Array, source, ds; key) end diff --git a/test/sources/gribdatasets.jl b/test/sources/gribdatasets.jl index 8eb1529e9..5192b9b6b 100644 --- a/test/sources/gribdatasets.jl +++ b/test/sources/gribdatasets.jl @@ -28,26 +28,27 @@ era5 = joinpath(gribexamples_dir, "era5-levels-members.grib") @testset "Raster" begin @time gribarray = Raster(era5) @time lazyarray = Raster(era5; lazy=true); - @time lazystack= RasterStack(era5; lazy=true); + @time lazystack = RasterStack(era5; lazy=true); @time eagerstack = RasterStack(era5; lazy=false); - @time ds = GRIBDataset(era5) + @time ds = GRIBDataset(era5); @testset "lazyness" begin - @time read(Raster(era5)); + @time Raster(era5); @test parent(gribarray) isa Array @test parent(lazyarray) isa FileArray end @testset "read" begin - @time A = read(gribarray); + @time A = read(lazyarray); @test A isa Raster @test parent(A) isa Array A2 = zero(A) - @time read!(gribarray, A2); + @time read!(lazyarray, A2); A3 = zero(A) @time read!(gribarray, A3) @test all(A .=== A2) @test all(A .=== A3) + @time st = read(lazystack); end @testset "stack, compare to GRIBDataset" begin @@ -143,4 +144,4 @@ era5 = joinpath(gribexamples_dir, "era5-levels-members.grib") a = gribarray[X(At(21.0)), Y(Between(50, 52)), Ti(Near(DateTime(2002, 12)))] @test Rasters.bounds(a) == ((51.0, 51.0), (500, 850), (0, 9)) end -end \ No newline at end of file +end diff --git a/test/sources/ncdatasets.jl b/test/sources/ncdatasets.jl index 78bfde2c0..33d50358b 100644 --- a/test/sources/ncdatasets.jl +++ b/test/sources/ncdatasets.jl @@ -44,6 +44,7 @@ end @testset "Raster" begin @time ncarray = Raster(ncsingle) @time lazyarray = Raster(ncsingle; lazy=true); + DiskArrays.eachchunk(lazyarray) @time eagerarray = Raster(ncsingle; lazy=false); @test_throws ArgumentError Raster("notafile.nc") @@ -57,9 +58,9 @@ end @testset "from url" begin # TODO we need a permanent url here that doesn't end in .nc - # url = "http://apdrc.soest.hawaii.edu:80/dods/public_data/Reanalysis_Data/NCEP/NCEP2/daily/surface/mslp" - # r = Raster(url; name=:mslp, source=:netcdf, lazy=true) - # @test sum(r[Ti(1)]) == 1.0615972f9 + url = "http://apdrc.soest.hawaii.edu:80/dods/public_data/Reanalysis_Data/NCEP/NCEP2/daily/surface/mslp" + r = Raster(url; name=:mslp, source=:netcdf, lazy=true) + @test sum(r[Ti(1)]) == 1.0615972f9 end @testset "open" begin From f37f76db25a1cc321136414ad3e0a5511134c27e Mon Sep 17 00:00:00 2001 From: rafaqz Date: Tue, 23 Jan 2024 23:09:00 +0100 Subject: [PATCH 2/8] typeo --- ext/RastersArchGDALExt/gdal_source.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ext/RastersArchGDALExt/gdal_source.jl b/ext/RastersArchGDALExt/gdal_source.jl index 7e8aaa4ff..8f7d5b83c 100644 --- a/ext/RastersArchGDALExt/gdal_source.jl +++ b/ext/RastersArchGDALExt/gdal_source.jl @@ -242,7 +242,7 @@ function RA.Raster(ds::AG.RasterDataset; filelist = AG.filelist(ds) raster = if lazy && length(filelist) > 0 filename = first(filelist) - A = Raster(FileArray{GDALSource}(ds, filename), args...) + A = Raster(FileArray{GDALsource}(ds, filename), args...) else Raster(Array(ds), args...) end From 652baeb20c04caa30f0fa1132291eb27ef0e17ff Mon Sep 17 00:00:00 2001 From: rafaqz Date: Wed, 24 Jan 2024 20:22:29 +0100 Subject: [PATCH 3/8] remove cruft --- test/sources/ncdatasets.jl | 1 - 1 file changed, 1 deletion(-) diff --git a/test/sources/ncdatasets.jl b/test/sources/ncdatasets.jl index 33d50358b..9097560a9 100644 --- a/test/sources/ncdatasets.jl +++ b/test/sources/ncdatasets.jl @@ -44,7 +44,6 @@ end @testset "Raster" begin @time ncarray = Raster(ncsingle) @time lazyarray = Raster(ncsingle; lazy=true); - DiskArrays.eachchunk(lazyarray) @time eagerarray = Raster(ncsingle; lazy=false); @test_throws ArgumentError Raster("notafile.nc") From 23333c46976b350d02c0219321d8b26b6efd44d9 Mon Sep 17 00:00:00 2001 From: rafaqz Date: Sun, 10 Mar 2024 02:20:55 +0100 Subject: [PATCH 4/8] bugfix and update for DD changes --- Project.toml | 2 +- ext/RastersArchGDALExt/RastersArchGDALExt.jl | 4 +- .../RastersCoordinateTransformationsExt.jl | 4 +- .../affineprojected.jl | 2 +- .../RastersGRIBDatasetsExt.jl | 4 +- ext/RastersHDF5Ext/RastersHDF5Ext.jl | 4 +- .../RastersNCDatasetsExt.jl | 4 +- ext/RastersNCDatasetsExt/ncdatasets_source.jl | 6 +- .../RastersRasterDataSourcesExt.jl | 2 +- src/Rasters.jl | 8 +-- src/extensions.jl | 2 +- src/lookup.jl | 28 ++++---- src/methods/aggregate.jl | 14 ++-- src/methods/burning/line.jl | 4 +- src/methods/extract.jl | 2 +- src/methods/mask.jl | 4 +- src/methods/mosaic.jl | 12 ++-- src/methods/rasterize.jl | 17 ++--- src/methods/reproject.jl | 4 +- src/methods/slice_combine.jl | 2 +- src/methods/zonal.jl | 2 +- src/plotrecipes.jl | 2 +- src/show.jl | 67 +++++++++---------- src/sources/commondatamodel.jl | 43 +++++------- src/stack.jl | 2 +- src/utils.jl | 5 +- test/adapt.jl | 2 +- test/aggregate.jl | 2 +- test/array.jl | 2 +- test/cellsize.jl | 2 +- test/methods.jl | 4 +- test/plotrecipes.jl | 2 +- test/rasterize.jl | 8 +-- test/reproject.jl | 2 +- test/resample.jl | 2 +- test/series.jl | 2 +- test/set.jl | 2 +- test/sources/gdal.jl | 9 ++- test/sources/grd.jl | 6 +- test/sources/gribdatasets.jl | 2 +- test/sources/ncdatasets.jl | 2 +- test/sources/smap.jl | 2 +- test/stack.jl | 36 +++++----- 43 files changed, 157 insertions(+), 180 deletions(-) diff --git a/Project.toml b/Project.toml index 02e6b0852..887d1861a 100644 --- a/Project.toml +++ b/Project.toml @@ -54,7 +54,7 @@ ConstructionBase = "1" CoordinateTransformations = "0.6.2" DataFrames = "1" DimensionalData = "0.25.1" -DiskArrays = "^0.3.3" +DiskArrays = "0.3, 0.4" Extents = "0.1" FillArrays = "0.12, 0.13, 1" Flatten = "0.4" diff --git a/ext/RastersArchGDALExt/RastersArchGDALExt.jl b/ext/RastersArchGDALExt/RastersArchGDALExt.jl index 63dce90e2..315cfb19e 100644 --- a/ext/RastersArchGDALExt/RastersArchGDALExt.jl +++ b/ext/RastersArchGDALExt/RastersArchGDALExt.jl @@ -14,7 +14,7 @@ using DimensionalData, GeoFormatTypes, GeoInterface -using Rasters.LookupArrays +using Rasters.Lookups using Rasters.Dimensions using Rasters: GDALsource, AbstractProjected, RasterStackOrArray, FileArray, RES_KEYWORD, SIZE_KEYWORD, CRS_KEYWORD, FILENAME_KEYWORD, SUFFIX_KEYWORD, EXPERIMENTAL, @@ -26,7 +26,7 @@ const RA = Rasters const DD = DimensionalData const DA = DiskArrays const GI = GeoInterface -const LA = LookupArrays +const LA = Lookups include("cellsize.jl") include("gdal_source.jl") diff --git a/ext/RastersCoordinateTransformationsExt/RastersCoordinateTransformationsExt.jl b/ext/RastersCoordinateTransformationsExt/RastersCoordinateTransformationsExt.jl index 2530261cb..39d0ee745 100644 --- a/ext/RastersCoordinateTransformationsExt/RastersCoordinateTransformationsExt.jl +++ b/ext/RastersCoordinateTransformationsExt/RastersCoordinateTransformationsExt.jl @@ -7,14 +7,14 @@ else end using DimensionalData -using Rasters.LookupArrays +using Rasters.Lookups using Rasters.Dimensions import Rasters: AffineProjected, GDAL_EMPTY_TRANSFORM, GDAL_TOPLEFT_X, GDAL_WE_RES, GDAL_ROT1, GDAL_TOPLEFT_Y, GDAL_ROT2, GDAL_NS_RES const RA = Rasters const DD = DimensionalData -const LA = LookupArrays +const LA = Lookups include("affineprojected.jl") diff --git a/ext/RastersCoordinateTransformationsExt/affineprojected.jl b/ext/RastersCoordinateTransformationsExt/affineprojected.jl index 62cf18c6d..949ca1ab6 100644 --- a/ext/RastersCoordinateTransformationsExt/affineprojected.jl +++ b/ext/RastersCoordinateTransformationsExt/affineprojected.jl @@ -1,5 +1,5 @@ function AffineProjected(f; - data=LA.AutoIndex(), metadata=DD.NoMetadata(), crs=nothing, mappedcrs=nothing, paired_lookup, dim=RA.AutoDim() + data=LA.AutoValues(), metadata=DD.NoMetadata(), crs=nothing, mappedcrs=nothing, paired_lookup, dim=RA.AutoDim() ) AffineProjected(f, data, metadata, crs, mappedcrs, paired_lookup, dim) end diff --git a/ext/RastersGRIBDatasetsExt/RastersGRIBDatasetsExt.jl b/ext/RastersGRIBDatasetsExt/RastersGRIBDatasetsExt.jl index 4c2de53f0..59c69048f 100644 --- a/ext/RastersGRIBDatasetsExt/RastersGRIBDatasetsExt.jl +++ b/ext/RastersGRIBDatasetsExt/RastersGRIBDatasetsExt.jl @@ -16,7 +16,7 @@ using Dates, DimensionalData, GeoFormatTypes -using Rasters.LookupArrays +using Rasters.Lookups using Rasters.Dimensions using Rasters: GRIBsource @@ -26,7 +26,7 @@ const RA = Rasters const DD = DimensionalData const DA = DiskArrays const GI = GeoInterface -const LA = LookupArrays +const LA = Lookups include("gribdatasets_source.jl") diff --git a/ext/RastersHDF5Ext/RastersHDF5Ext.jl b/ext/RastersHDF5Ext/RastersHDF5Ext.jl index aadb54a7d..e9f42c8d3 100644 --- a/ext/RastersHDF5Ext/RastersHDF5Ext.jl +++ b/ext/RastersHDF5Ext/RastersHDF5Ext.jl @@ -17,7 +17,7 @@ using Dates, GeoInterface, Rasters -using Rasters.LookupArrays +using Rasters.Lookups using Rasters.Dimensions using Rasters: SMAPsource @@ -27,7 +27,7 @@ const RA = Rasters const DD = DimensionalData const DA = DiskArrays const GI = GeoInterface -const LA = LookupArrays +const LA = Lookups include("smap_source.jl") diff --git a/ext/RastersNCDatasetsExt/RastersNCDatasetsExt.jl b/ext/RastersNCDatasetsExt/RastersNCDatasetsExt.jl index 6bb1ef7b9..5c323974d 100644 --- a/ext/RastersNCDatasetsExt/RastersNCDatasetsExt.jl +++ b/ext/RastersNCDatasetsExt/RastersNCDatasetsExt.jl @@ -16,7 +16,7 @@ using Dates, DimensionalData, GeoFormatTypes -using Rasters.LookupArrays +using Rasters.Lookups using Rasters.Dimensions using Rasters: CDMsource, NCDsource @@ -26,7 +26,7 @@ const RA = Rasters const DD = DimensionalData const DA = DiskArrays const GI = GeoInterface -const LA = LookupArrays +const LA = Lookups include("ncdatasets_source.jl") diff --git a/ext/RastersNCDatasetsExt/ncdatasets_source.jl b/ext/RastersNCDatasetsExt/ncdatasets_source.jl index 0fd194ad2..73034af16 100644 --- a/ext/RastersNCDatasetsExt/ncdatasets_source.jl +++ b/ext/RastersNCDatasetsExt/ncdatasets_source.jl @@ -23,7 +23,7 @@ function Base.write(filename::AbstractString, ::Type{<:CDMsource}, A::AbstractRa mode = !isfile(filename) || !append ? "c" : "a"; ds = NCD.Dataset(filename, mode; attrib=RA._attribdict(metadata(A))) try - _cdmwritevar!(ds, A; kw...) + _writevar!(ds, A; kw...) finally close(ds) end @@ -65,7 +65,7 @@ function Base.write(filename::AbstractString, ::Type{<:CDMsource}, s::AbstractRa mode = !isfile(filename) || !append ? "c" : "a"; ds = NCD.Dataset(filename, mode; attrib=RA._attribdict(metadata(s))) try - map(key -> _cdmwritevar!(ds, s[key]), keys(s); kw...) + map(key -> _writevar!(ds, s[key]), keys(s); kw...) finally close(ds) end @@ -86,7 +86,7 @@ function RA._open(f, ::Type{NCDsource}, filename::AbstractString; write=false, k end # Add a var array to a dataset before writing it. -function _cdmwritevar!(ds::AbstractDataset, A::AbstractRaster{T,N}; kw...) where {T,N} +function _writevar!(ds::AbstractDataset, A::AbstractRaster{T,N}; kw...) where {T,N} _def_dim_var!(ds, A) attrib = RA._attribdict(metadata(A)) # Set _FillValue diff --git a/ext/RastersRasterDataSourcesExt/RastersRasterDataSourcesExt.jl b/ext/RastersRasterDataSourcesExt/RastersRasterDataSourcesExt.jl index 52738e2e4..126916606 100644 --- a/ext/RastersRasterDataSourcesExt/RastersRasterDataSourcesExt.jl +++ b/ext/RastersRasterDataSourcesExt/RastersRasterDataSourcesExt.jl @@ -7,7 +7,7 @@ else end # using RasterDataSources: RasterDataSource -using Rasters.LookupArrays +using Rasters.Lookups using Rasters.Dimensions const RA = Rasters diff --git a/src/Rasters.jl b/src/Rasters.jl index 1858ae513..64cbd3f14 100644 --- a/src/Rasters.jl +++ b/src/Rasters.jl @@ -37,13 +37,13 @@ end Reexport.@reexport using DimensionalData, GeoFormatTypes using DimensionalData.Tables, - DimensionalData.LookupArrays, + DimensionalData.Lookups, DimensionalData.Dimensions - DimensionalData.LookupArrays.IntervalSets + DimensionalData.Lookups.IntervalSets using DimensionalData: Name, NoName using .Dimensions: StandardIndices, DimTuple -using .LookupArrays: LookupArrayTuple +using .Lookups: LookupTuple using RecipesBase: @recipe, @series using Base: tail, @propagate_inbounds @@ -74,7 +74,7 @@ export reproject, convertlookup const DD = DimensionalData const DA = DiskArrays const GI = GeoInterface -const LA = LookupArrays +const LA = Lookups # DimensionalData documentation urls const DDdocs = "https://rafaqz.github.io/DimensionalData.jl/stable/api" diff --git a/src/extensions.jl b/src/extensions.jl index 4dff7411e..af21869b0 100644 --- a/src/extensions.jl +++ b/src/extensions.jl @@ -171,7 +171,7 @@ Run `using ArchGDAL` to make this method available. ## Example ```julia -using Rasters, ArchGDAL, Rasters.LookupArrays +using Rasters, ArchGDAL, Rasters.Lookups dimz = X(Projected(90.0:10.0:120; sampling=Intervals(Start()), order=ForwardOrdered(), span=Regular(10.0), crs=EPSG(4326))), Y(Projected(0.0:10.0:50; sampling=Intervals(Start()), order=ForwardOrdered(), span=Regular(10.0), crs=EPSG(4326))) diff --git a/src/lookup.jl b/src/lookup.jl index 8b058b098..0360777b4 100644 --- a/src/lookup.jl +++ b/src/lookup.jl @@ -13,8 +13,8 @@ function Adapt.adapt_structure(to, l::AbstractProjected) return Adapt.adapt_structure(to, sampled) end -GeoInterface.crs(lookup::LookupArray) = nothing -mappedcrs(lookup::LookupArray) = nothing +GeoInterface.crs(lookup::Lookup) = nothing +mappedcrs(lookup::Lookup) = nothing # When the lookup is formatted with an array we match the `dim` field with the # wrapper dimension. We will need this later for e.g. projecting with GDAL, @@ -35,7 +35,7 @@ end Projected(order, span, sampling, crs, mappedcrs) Projected(; order=AutoOrder(), span=AutoSpan(), sampling=AutoSampling(), crs, mappedcrs=nothing) -An [`AbstractSampled`]($DDabssampleddocs) `LookupArray` with projections attached. +An [`AbstractSampled`]($DDabssampleddocs) `Lookup` with projections attached. Fields and behaviours are identical to [`Sampled`]($DDsampleddocs) with the addition of `crs` and `mappedcrs` fields. @@ -100,7 +100,7 @@ end Mapped(order, span, sampling, crs, mappedcrs) Mapped(; order=AutoOrder(), span=AutoSpan(), sampling=AutoSampling(), crs=nothing, mappedcrs) -An [`AbstractSampled`]($DDabssampleddocs) `LookupArray`, where the dimension index has +An [`AbstractSampled`]($DDabssampleddocs) `Lookup`, where the dimension index has been mapped to another projection, usually lat/lon or `EPSG(4326)`. `Mapped` matches the dimension format commonly used in netcdf files. @@ -138,19 +138,19 @@ mappedcrs(lookup::Mapped) = lookup.mappedcrs dim(lookup::Mapped) = lookup.dim """ - convertlookup(dstlookup::Type{<:LookupArray}, x) + convertlookup(dstlookup::Type{<:Lookup}, x) Convert the dimension lookup between `Projected` and `Mapped`. Other dimension lookups pass through unchanged. This is used to e.g. save a netcdf file to GeoTiff. """ -convertlookup(T::Type{<:LookupArray}, A::AbstractDimArray) = +convertlookup(T::Type{<:Lookup}, A::AbstractDimArray) = rebuild(A; dims=convertlookup(T, dims(A))) -convertlookup(T::Type{<:LookupArray}, dims::Tuple) = map(d -> convertlookup(T, d), dims) -convertlookup(T::Type{<:LookupArray}, d::Dimension) = rebuild(d, convertlookup(T, lookup(d))) -# Non-projected LookupArray lookupss pass through -convertlookup(::Type, lookup::LookupArray) = lookup +convertlookup(T::Type{<:Lookup}, dims::Tuple) = map(d -> convertlookup(T, d), dims) +convertlookup(T::Type{<:Lookup}, d::Dimension) = rebuild(d, convertlookup(T, lookup(d))) +# Non-projected Lookup lookupss pass through +convertlookup(::Type, lookup::Lookup) = lookup # AbstractProjected passes through if it's the same as dstlookup convertlookup(::Type{T1}, lookup::T2) where {T1,T2<:T1} = lookup # Otherwise AbstractProjected needs ArchGDAL @@ -210,7 +210,7 @@ function mappedbounds end mappedbounds(dims::Tuple) = map(mappedbounds, dims) mappedbounds(dim::Dimension) = mappedbounds(parent(dim), dim) -mappedbounds(::LookupArray, dim) = bounds(dim) +mappedbounds(::Lookup, dim) = bounds(dim) mappedbounds(lookup::Projected, dim) = mappedbounds(mappedcrs(lookup), lookup, dim) mappedbounds(mappedcrs::Nothing, lookup::Projected, dim) = error("No mappedcrs attached to $(name(dim)) dimension") @@ -219,7 +219,7 @@ mappedbounds(mappedcrs::GeoFormat, lookup::Projected, dim) = projectedbounds(dims::Tuple) = map(projectedbounds, dims) projectedbounds(dim::Dimension) = projectedbounds(parent(dim), dim) -projectedbounds(::LookupArray, dim) = bounds(dim) +projectedbounds(::Lookup, dim) = bounds(dim) projectedbounds(lookup::Mapped, dim) = projectedbounds(crs(lookup), lookup, dim) projectedbounds(crs::Nothing, lookup::Mapped, dim) = error("No projection crs attached to $(name(dim)) dimension") @@ -240,7 +240,7 @@ function mappedindex end mappedindex(dims::Tuple) = map(mappedindex, dims) mappedindex(dim::Dimension) = _mappedindex(parent(dim), dim) -_mappedindex(::LookupArray, dim::Dimension) = index(dim) +_mappedindex(::Lookup, dim::Dimension) = index(dim) _mappedindex(lookup::Projected, dim::Dimension) = _mappedindex(mappedcrs(lookup), lookup, dim) _mappedindex(mappedcrs::Nothing, lookup::Projected, dim) = error("No mappedcrs attached to $(name(dim)) dimension") @@ -250,7 +250,7 @@ _mappedindex(mappedcrs::GeoFormat, lookup::Projected, dim) = projectedindex(dims::Tuple) = map(projectedindex, dims) projectedindex(dim::Dimension) = _projectedindex(parent(dim), dim) -_projectedindex(::LookupArray, dim::Dimension) = index(dim) +_projectedindex(::Lookup, dim::Dimension) = index(dim) _projectedindex(lookup::Mapped, dim::Dimension) = _projectedindex(crs(lookup), lookup, dim) _projectedindex(crs::Nothing, lookup::Mapped, dim::Dimension) = error("No projection crs attached to $(name(dim)) dimension") diff --git a/src/methods/aggregate.jl b/src/methods/aggregate.jl index ba7311bb8..77dea0793 100644 --- a/src/methods/aggregate.jl +++ b/src/methods/aggregate.jl @@ -83,7 +83,7 @@ function aggregate(method, src::AbstractRaster, scale; aggregate!(method, dst, src, scale; kw...) end aggregate(method, d::Dimension, scale) = rebuild(d, aggregate(method, lookup(d), scale)) -function aggregate(method, lookup::LookupArray, scale) +function aggregate(method, lookup::Lookup, scale) intscale = _scale2int(Ag(), lookup, scale) intscale == 1 && return lookup start, stop = _endpoints(method, lookup, intscale) @@ -223,7 +223,7 @@ end function disaggregate(locus::Locus, dim::Dimension, scale) rebuild(dim, disaggregate(locus, lookup(dim), scale)) end -function disaggregate(locus, lookup::LookupArray, scale) +function disaggregate(locus, lookup::Lookup, scale) intscale = _scale2int(DisAg(), lookup, scale) intscale == 1 && return lookup @@ -329,17 +329,17 @@ end _scale2int(x, dims::DimTuple, scale::Int) = map(d -> _scale2int(x, d, scale), dims) _scale2int(x, dims::DimTuple, scale::Colon) = map(d -> _scale2int(x, d, scale), dims) _scale2int(x, dim::Dimension, scale::Int) = _scale2int(x, lookup(dim), scale) -_scale2int(::Ag, l::LookupArray, scale::Int) = scale > length(l) ? length(l) : scale -_scale2int(::DisAg, l::LookupArray, scale::Int) = scale +_scale2int(::Ag, l::Lookup, scale::Int) = scale > length(l) ? length(l) : scale +_scale2int(::DisAg, l::Lookup, scale::Int) = scale _scale2int(x, dim::Dimension, scale::Colon) = 1 -_agoffset(locus::Locus, l::LookupArray, scale) = _agoffset(locus, scale) -_agoffset(method, l::LookupArray, scale) = _agoffset(locus(l), scale) +_agoffset(locus::Locus, l::Lookup, scale) = _agoffset(locus, scale) +_agoffset(method, l::Lookup, scale) = _agoffset(locus(l), scale) _agoffset(locus::Start, scale) = 0 _agoffset(locus::End, scale) = scale - 1 _agoffset(locus::Center, scale) = scale ÷ 2 -function _endpoints(method, l::LookupArray, scale) +function _endpoints(method, l::Lookup, scale) start = firstindex(l) + _agoffset(method, l, scale) stop = (length(l) ÷ scale) * scale return start, stop diff --git a/src/methods/burning/line.jl b/src/methods/burning/line.jl index 06fbacdde..f7932589d 100644 --- a/src/methods/burning/line.jl +++ b/src/methods/burning/line.jl @@ -81,8 +81,8 @@ function _burn_line!(A::AbstractRaster, line, fill) """ all(regular) || throw(ArgumentError(msg)) - @assert order(xdim) == order(ydim) == LookupArrays.ForwardOrdered() - @assert locus(xdim) == locus(ydim) == LookupArrays.Center() + @assert order(xdim) == order(ydim) == Lookups.ForwardOrdered() + @assert locus(xdim) == locus(ydim) == Lookups.Center() raster_x_step = abs(step(span(A, X))) raster_y_step = abs(step(span(A, Y))) diff --git a/src/methods/extract.jl b/src/methods/extract.jl index 30c88b86f..d285acdaf 100644 --- a/src/methods/extract.jl +++ b/src/methods/extract.jl @@ -19,7 +19,7 @@ sliced arrays or stacks will be returned instead of single values. - `index`: include an `:index` column of the `CartesianIndex` for each value, `false` by default. - `names`: `Tuple` of `Symbol` corresponding to layers of a `RasterStack`. All layers by default. - `skipmissing`: skip missing points automatically. -- `atol`: a tolorerance for floating point lookup values for when the `LookupArray` +- `atol`: a tolorerance for floating point lookup values for when the `Lookup` contains `Points`. `atol` is ignored for `Intervals`. # Example diff --git a/src/methods/mask.jl b/src/methods/mask.jl index 0f905c9ef..446e9ee19 100644 --- a/src/methods/mask.jl +++ b/src/methods/mask.jl @@ -252,7 +252,7 @@ function boolmask(stack::AbstractRasterStack; alllayers = true, to = dims(stack) if alllayers _mask_multilayer(stack, to; kw..., _dest_missingval = false) else - boolmask(first(stack); kw...) + boolmask(layers(stack, 1); kw...) end end @@ -345,7 +345,7 @@ function missingmask(stack::AbstractRasterStack; alllayers = true, to = dims(sta if alllayers _mask_multilayer(stack, to; kw..., _dest_missingval = missing) else - missingmask(first(stack); kw...) + missingmask(layers(stack, 1); kw...) end end diff --git a/src/methods/mosaic.jl b/src/methods/mosaic.jl index a3dd8e55e..dd6680a43 100644 --- a/src/methods/mosaic.jl +++ b/src/methods/mosaic.jl @@ -182,19 +182,19 @@ function _mosaic(dims::Dimension...) end return rebuild(first(dims), _mosaic(lookup(dims))) end -_mosaic(lookups::LookupArrayTuple) = _mosaic(first(lookups), lookups) -function _mosaic(lookup::Categorical, lookups::LookupArrayTuple) +_mosaic(lookups::LookupTuple) = _mosaic(first(lookups), lookups) +function _mosaic(lookup::Categorical, lookups::LookupTuple) newindex = union(lookups...) if order isa ForwardOrdered newindex = sort(newindex; order=LA.ordering(order(lookup))) end return rebuild(lookup; data=newindex) end -function _mosaic(lookup::AbstractSampled, lookups::LookupArrayTuple) +function _mosaic(lookup::AbstractSampled, lookups::LookupTuple) order(lookup) isa Unordered && throw(ArgumentError("Cant mozaic an Unordered lookup")) return _mosaic(span(lookup), lookup, lookups) end -function _mosaic(span::Regular, lookup::AbstractSampled, lookups::LookupArrayTuple) +function _mosaic(span::Regular, lookup::AbstractSampled, lookups::LookupTuple) newindex = if order(lookup) isa ForwardOrdered mi = minimum(map(first, lookups)) ma = maximum(map(last, lookups)) @@ -217,11 +217,11 @@ function _mosaic(span::Regular, lookup::AbstractSampled, lookups::LookupArrayTup return rebuild(lookup; data=newindex) end -function _mosaic(::Irregular, lookup::AbstractSampled, lookups::LookupArrayTuple) +function _mosaic(::Irregular, lookup::AbstractSampled, lookups::LookupTuple) newindex = sort(union(map(parent, lookups)...); order=LA.ordering(order(lookup))) return rebuild(lookup; data=newindex) end -function _mosaic(span::Explicit, lookup::AbstractSampled, lookups::LookupArrayTuple) +function _mosaic(span::Explicit, lookup::AbstractSampled, lookups::LookupTuple) # TODO make this less fragile to floating point innaccuracy newindex = sort(union(map(parent, lookups)...); order=LA.ordering(order(lookup))) bounds = map(val ∘ DD.span, lookups) diff --git a/src/methods/rasterize.jl b/src/methods/rasterize.jl index 916274eb8..7520a5f92 100644 --- a/src/methods/rasterize.jl +++ b/src/methods/rasterize.jl @@ -397,7 +397,7 @@ Rasterize a shapefile for China and plot, with a border. ```jldoctest using Rasters, RasterDataSources, ArchGDAL, Plots, Dates, Shapefile, Downloads -using Rasters.LookupArrays +using Rasters.Lookups # Download a borders shapefile shapefile_url = "https://github.com/nvkelso/natural-earth-vector/raw/master/10m_cultural/ne_10m_admin_0_countries.shp" @@ -531,7 +531,7 @@ $GEOM_KEYWORDS ```jldoctest using Rasters, RasterDataSources, ArchGDAL, Plots, Dates, Shapefile, GeoInterface, Downloads -using Rasters.LookupArrays +using Rasters.Lookups # Download a borders shapefile shapefile_url = "https://github.com/nvkelso/natural-earth-vector/raw/master/10m_cultural/ne_10m_admin_0_countries.shp" @@ -600,7 +600,8 @@ function _rasterize!(A, ::GI.AbstractGeometryTrait, geom, fill, r::Rasterizer; a else ext = _extent(geom) V = view(A, Touches(ext)) - length(V) > 0 || return false + # TODO use length when this is fixed in DimensionalData + prod(size(V)) > 0 || return false bools = _init_bools(commondims(V, DEFAULT_POINT_ORDER), BitArray; metadata=metadata(A)) boolmask!(bools, geom; allocs, lock, shape, boundary, verbose, progress) @@ -744,7 +745,7 @@ end n = 0 for point in points I = dims2indices(A, (X(point[1]), Y(point[2]))) - _checkbounds(A, I...) || continue + checkbounds(Bool, A, I...) || continue _fill_func!(fillfunc, A, I) # Mark that we have written at least one index hasburned = true @@ -770,7 +771,7 @@ end for (point, fill) in points_fill I = dims2indices(A, (X(point[1]), Y(point[2]))) - _checkbounds(A, I...) || continue + checkbounds(Bool, A, I...) || continue _fill_op!(op, A, fill, init, missingval, I) # Mark that we have written at least one index hasburned = true @@ -806,7 +807,7 @@ type_length(tup::Type{T}) where {T<:Union{Tuple,NamedTuple}} = length(tup.types) continue # We will reduce all of these points together later on else I = dims2indices(A, (X(prevpoint[1]), Y(prevpoint[2]))) - _checkbounds(A, I...) || continue + checkbounds(Bool, A, I...) || continue startind = _fill_reduce!(reducer, A, I, points_fill, startind, n, missingval) end prevpoint = point # Update the previous point to the current @@ -815,7 +816,7 @@ type_length(tup::Type{T}) where {T<:Union{Tuple,NamedTuple}} = length(tup.types) # Fill the last points I = dims2indices(A, (X(prevpoint[1]), Y(prevpoint[2]))) n = lastindex(points_fill) + 1 - _checkbounds(A, I...) && _fill_reduce!(reducer, A, I, points_fill, startind, n, missingval) + checkbounds(Bool, A, I...) && _fill_reduce!(reducer, A, I, points_fill, startind, n, missingval) return hasburned end @@ -862,7 +863,7 @@ function _reduce_bitarray!(f, st::AbstractRasterStack, geoms, fill::NamedTuple, geom_axis = axes(masks, Dim{:geometry}()) fill = map(itr -> [v for (_, v) in zip(geom_axis, itr)], fill) T = NamedTuple{keys(st),Tuple{map(eltype, st)...}} - range = axes(first(st), Y()) + range = axes(st, Y()) _run(range, threaded, progress, "Reducing...") do y _reduce_bitarray_loop(f, st, T, fill, masks, y) end diff --git a/src/methods/reproject.jl b/src/methods/reproject.jl index 2c8ad74b0..81bcb422d 100644 --- a/src/methods/reproject.jl +++ b/src/methods/reproject.jl @@ -17,13 +17,13 @@ are silently returned without modification. # Arguments -- `obj`: a `LookupArray`, `Dimension`, `Tuple` of `Dimension`, `Raster` or `RasterStack`. +- `obj`: a `Lookup`, `Dimension`, `Tuple` of `Dimension`, `Raster` or `RasterStack`. $CRS_KEYWORD """ reproject(x; crs::GeoFormat) = reproject(crs, x) reproject(target::GeoFormat, x) = rebuild(x; dims=reproject(target, dims(x))) reproject(target::GeoFormat, dims::Tuple) = map(d -> reproject(target, d), dims) -reproject(target::GeoFormat, l::LookupArray) = l +reproject(target::GeoFormat, l::Lookup) = l reproject(target::GeoFormat, dim::Dimension) = rebuild(dim, reproject(target, lookup(dim))) function reproject(target::GeoFormat, l::AbstractProjected) source = crs(l) diff --git a/src/methods/slice_combine.jl b/src/methods/slice_combine.jl index a3b42bc5a..16e6fd55b 100644 --- a/src/methods/slice_combine.jl +++ b/src/methods/slice_combine.jl @@ -57,7 +57,7 @@ function combine(ser::AbstractRasterSeries{<:Any,N}) where N rD = map(d -> rebuild(d, :), DD.dims(r1)) source = ser[sD...] if dest isa RasterStack - foreach(source, dest) do source_r, dest_r + foreach(layers(source), layers(dest)) do source_r, dest_r view(dest_r, rD..., sD...) .= source_r end else diff --git a/src/methods/zonal.jl b/src/methods/zonal.jl index 06304a579..729d3718f 100644 --- a/src/methods/zonal.jl +++ b/src/methods/zonal.jl @@ -105,7 +105,7 @@ function _zonal(f, x::AbstractRaster, ::GI.AbstractGeometryTrait, geom; kw...) end function _zonal(f, st::AbstractRasterStack, ::GI.AbstractGeometryTrait, geom; kw...) cropped = crop(st; to=geom, touches=true) - prod(size(first(cropped))) > 0 || return map(_ -> missing, st) + prod(size(cropped)) > 0 || return map(_ -> missing, st) masked = mask(cropped; with=geom, kw...) return map(masked) do A prod(size(A)) > 0 || return missing diff --git a/src/plotrecipes.jl b/src/plotrecipes.jl index 38384014b..e93e74351 100644 --- a/src/plotrecipes.jl +++ b/src/plotrecipes.jl @@ -325,7 +325,7 @@ _maybe_shift(sampling, d) = d _maybe_mapped(dims::Tuple) = map(_maybe_mapped, dims) _maybe_mapped(dim::Dimension) = _maybe_mapped(lookup(dim), dim) -_maybe_mapped(lookup::LookupArray, dim::Dimension) = dim +_maybe_mapped(lookup::Lookup, dim::Dimension) = dim _maybe_mapped(lookup::Projected, dim::Dimension) = _maybe_mapped(mappedcrs(lookup), dim) _maybe_mapped(::Nothing, dim::Dimension) = dim _maybe_mapped(::GeoFormat, dim::Dimension) = convertlookup(Mapped, dim) diff --git a/src/show.jl b/src/show.jl index 4f04f8ff1..ce42ff8bd 100644 --- a/src/show.jl +++ b/src/show.jl @@ -1,40 +1,52 @@ -function DD.show_after(io::IO, mime::MIME"text/plain", A::AbstractRaster) - - printstyled(io, "extent: "; color=:light_black) - show(io, mime, Extents.extent(A)) - println() - if missingval(A) !== nothing - printstyled(io, "missingval: "; color=:light_black) - show(io, mime, missingval(A)) - println() - end - if crs(A) !== nothing - printstyled(io, "crs: "; color=:light_black) - print(io, convert(String, crs(A)), "\n") - end - if mappedcrs(A) !== nothing - printstyled(io, "mappedcrs: "; color=:light_black) - print(io, convert(String, mappedcrs(A)), "\n") - end +function DD.show_after(io::IO, mime::MIME"text/plain", A::AbstractRaster; kw...) + blockwidth = get(io, :blockwidth, 0) + print_geo(io, mime, A; blockwidth) + DD.print_block_close(io, blockwidth) + ndims(A) > 0 && println(io) if parent(A) isa DiskArrays.AbstractDiskArray if parent(A) isa FileArray printstyled(io, "from file:\n"; color=:light_black) print(io, filename(parent(A))) + else + show(io, mime, parent(A)) end else - printstyled(io, "parent:\n"; color=:light_black) DD.print_array(io, mime, A) end end -function DD.show_after(io, mime, stack::AbstractRasterStack) +function DD.show_after(io, mime, stack::AbstractRasterStack; kw...) + blockwidth = get(io, :blockwidth, 0) + print_geo(io, mime, stack; blockwidth) if parent(stack) isa FileStack printstyled(io, "from file:\n"; color=:light_black) println(io, filename(stack)) end + DD.print_block_close(io, blockwidth) +end + +function print_geo(io, mime, A; blockwidth) + DD.print_block_separator(io, "raster", blockwidth) + printstyled(io, "\n extent: "; color=:light_black) + show(io, mime, Extents.extent(A)) + println(io) + if missingval(A) !== nothing + printstyled(io, " missingval: "; color=:light_black) + show(io, mime, missingval(A)) + end + if crs(A) !== nothing + printstyled(io, "\n crs: "; color=:light_black) + print(io, convert(String, crs(A))) + end + if mappedcrs(A) !== nothing + printstyled(io, "\n mappedcrs: "; color=:light_black) + print(io, convert(String, mappedcrs(A))) + end + println(io) end + # Stack types can be enourmous. Just use nameof(T) function Base.summary(io::IO, ser::AbstractRasterSeries{T,N}) where {T,N} if N == 0 @@ -58,18 +70,3 @@ function Base.show(io::IO, mime::MIME"text/plain", A::AbstractRasterSeries{T,N}) ds = displaysize(io) ioctx = IOContext(io, :displaysize => (ds[1] - lines, ds[2])) end - -function LA.show_properties(io::IO, lookup::AbstractProjected) - print(io, " ") - LA.print_order(io, lookup) - print(io, " ") - LA.print_span(io, lookup) - print(io, " ") - LA.print_sampling(io, lookup) - if !(crs(lookup) isa Nothing) - print(io, " crs: ", nameof(typeof(crs(lookup)))) - end - if !(mappedcrs(lookup) isa Nothing) - print(io, " mappedcrs: ", nameof(typeof(mappedcrs(lookup)))) - end -end diff --git a/src/sources/commondatamodel.jl b/src/sources/commondatamodel.jl index 7e57ee91b..7943fae15 100644 --- a/src/sources/commondatamodel.jl +++ b/src/sources/commondatamodel.jl @@ -56,11 +56,13 @@ _open(f, ::Type{<:CDMsource}, var::CFVariable; kw...) = cleanreturn(f(var)) # We have to dig down to find the chunks as they are not immplemented # properly in the source packages, but they are in their internal objects. # We just make it work here even though it doesn't work in NCDatasets.jl or GRIBDatasets.jl -DiskArrays.eachchunk(var::CFVariable) = _get_eachchunk(var.var.var) -DiskArrays.haschunks(var::CFVariable) = _get_haschunks(var.var.var) +DiskArrays.eachchunk(var::CFVariable) = _get_eachchunk(var) +DiskArrays.haschunks(var::CFVariable) = _get_haschunks(var) -function _get_eachchunk end -function _get_haschunks end +_get_eachchunk(var::CFVariable) = _get_eachchunk(var.var) +_get_eachchunk(var::CDM.CFVariable) = _get_eachchunk(var.var) +_get_haschunks(var::CFVariable) = _get_haschunks(var.var) +_get_haschunks(var::CDM.CFVariable) = _get_haschunks(var.var) # CommonDataModel.jl methods for method in (:size, :name, :dimnames, :dataset, :attribnames) @@ -140,9 +142,8 @@ function DD.dims(ds::AbstractDataset, crs=nothing, mappedcrs=nothing) end |> Tuple end function DD.dims(var::AbstractVariable, crs=nothing, mappedcrs=nothing) - names = CDM.dimnames(var) - map(names) do name - _cdmdim(_dataset(var), name, crs, mappedcrs) + map(CDM.dimnames(var)) do key + _cdmdim(_dataset(var), key, crs, mappedcrs) end |> Tuple end @@ -211,7 +212,7 @@ function FileStack{source}( keys = map(Symbol, keys isa Nothing ? layerkeys(ds) : keys) |> Tuple type_size_ec_hc = map(keys) do key var = ds[string(key)] - Union{Missing,eltype(var)}, size(var), _cdm_eachchunk(var), _cdm_haschunks(var) + Union{Missing,eltype(var)}, size(var), _get_eachchunk(var), _get_haschunks(var) end layertypes = map(x->x[1], type_size_ec_hc) layersizes = map(x->x[2], type_size_ec_hc) @@ -241,14 +242,15 @@ function _cdmdim(ds, dimname::Key, crs=nothing, mappedcrs=nothing) else # The var doesn't exist. Maybe its `complex` or some other marker, # so make it a custom `Dim` with `NoLookup` - len = _ncfinddimlen(ds, dimname) + len = _cdmfinddimlen(ds, dimname) len === nothing && _unuseddimerror() lookup = NoLookup(Base.OneTo(len)) - return Dim{Symbol(dimname)}(lookup) + D = _cdmdimtype(NoMetadata(), dimname) + return D(lookup) end end -function _ncfinddimlen(ds, dimname) +function _cdmfinddimlen(ds, dimname) for key in keys(ds) var = ds[key] dimnames = CDM.dimnames(var) @@ -281,7 +283,7 @@ function _cdmdimtype(attrib, dimname) end # _cdmlookup -# Generate a `LookupArray` from a netcdf dim. +# Generate a `Lookup` from a netcdf dim. function _cdmlookup(ds::AbstractDataset, dimname, D::Type, crs, mappedcrs) dvar = ds[dimname] index = dvar[:] @@ -412,7 +414,7 @@ _cdm_set_axis_attrib!(atr, dim::Ti) = (atr["axis"] = "T"; atr["standard_name"] = _cdm_set_axis_attrib!(atr, dim) = nothing _cdmshiftlocus(dim::Dimension) = _cdmshiftlocus(lookup(dim), dim) -_cdmshiftlocus(::LookupArray, dim::Dimension) = dim +_cdmshiftlocus(::Lookup, dim::Dimension) = dim function _cdmshiftlocus(lookup::AbstractSampled, dim::Dimension) if span(lookup) isa Regular && sampling(lookup) isa Intervals # We cant easily shift a DateTime value @@ -430,18 +432,3 @@ function _cdmshiftlocus(lookup::AbstractSampled, dim::Dimension) end _unuseddimerror(dimname) = error("Dataset contains unused dimension $dimname") - -function _cdm_eachchunk(var) - # chunklookup, chunkvec = NCDatasets.chunking(var) - # chunksize = chunklookup == :chunked ? Tuple(chunkvec) : - chunksize = size(var) - DA.GridChunks(var, chunksize) -end - -function _cdm_haschunks(var) - # chunklookup, _ = NCDatasets.chunking(var) - # chunklookup == :chunked ? DA.Chunked() : - DA.Unchunked() -end - - diff --git a/src/stack.jl b/src/stack.jl index 81f74baea..7a4dc60a7 100644 --- a/src/stack.jl +++ b/src/stack.jl @@ -30,7 +30,7 @@ missingval(stack::AbstractRasterStack) = getfield(stack, :missingval) filename(stack::AbstractRasterStack) = filename(parent(stack)) missingval(s::AbstractRasterStack, key::Symbol) = _singlemissingval(missingval(s), key) -isdisk(A::AbstractRasterStack) = isdisk(first(A)) +isdisk(st::AbstractRasterStack) = isdisk(layers(st, 1)) setcrs(x::AbstractRasterStack, crs) = set(x, setcrs(dims(x), crs)...) setmappedcrs(x::AbstractRasterStack, mappedcrs) = set(x, setmappedcrs(dims(x), mappedcrs)...) diff --git a/src/utils.jl b/src/utils.jl index 3280fab59..4c434ef40 100644 --- a/src/utils.jl +++ b/src/utils.jl @@ -46,7 +46,7 @@ end # We often need to convert the locus and the lookup in the same step, # as doing it in the wrong order can give errors. -# function convert_locus_lookup(M1::Type{<:LookupArray}, L1::Type{<:Locus}, dim::Dimension) +# function convert_locus_lookup(M1::Type{<:Lookup}, L1::Type{<:Locus}, dim::Dimension) # _convert(S1, L1, sampling(dim), locus(dim), span(dim), dim) # end @@ -204,9 +204,6 @@ const URLREGEX = r"^[a-zA-Z][a-zA-Z\d+\-.]*:" _isurl(str::AbstractString) = !occursin(WINDOWSREGEX, str) && occursin(URLREGEX, str) -_checkbounds(A::AbstractRasterStack, I...) = checkbounds(Bool, first(A), I...) -_checkbounds(A::AbstractRaster, I...) = checkbounds(Bool, A, I...) - # Run `f` threaded or not, w function _run(f, range::OrdinalRange, threaded::Bool, progress::Bool, desc::String) p = progress ? _progress(length(range); desc) : nothing diff --git a/test/adapt.jl b/test/adapt.jl index dcd8f4abd..8ee759d5d 100644 --- a/test/adapt.jl +++ b/test/adapt.jl @@ -1,5 +1,5 @@ using Rasters, Adapt, Test -using Rasters.GeoFormatTypes, Rasters.LookupArrays +using Rasters.GeoFormatTypes, Rasters.Lookups struct CustomArray{T,N} <: AbstractArray{T,N} arr::Array diff --git a/test/aggregate.jl b/test/aggregate.jl index 5abec04ca..0c0d372f1 100644 --- a/test/aggregate.jl +++ b/test/aggregate.jl @@ -1,5 +1,5 @@ using Rasters, Test, Dates, Statistics -using Rasters.LookupArrays, Rasters.Dimensions +using Rasters.Lookups, Rasters.Dimensions using Rasters: upsample, downsample @testset "upsample" begin diff --git a/test/array.jl b/test/array.jl index eb242237f..8118a0b42 100644 --- a/test/array.jl +++ b/test/array.jl @@ -1,5 +1,5 @@ using Rasters, Test, Dates, DiskArrays -using Rasters.LookupArrays, Rasters.Dimensions +using Rasters.Lookups, Rasters.Dimensions using Rasters: isdisk, ismem, filename data1 = cumsum(cumsum(ones(10, 11); dims=1); dims=2) diff --git a/test/cellsize.jl b/test/cellsize.jl index ca1ab919a..2e1b079ad 100644 --- a/test/cellsize.jl +++ b/test/cellsize.jl @@ -1,4 +1,4 @@ -using Rasters, Rasters.LookupArrays, ArchGDAL +using Rasters, Rasters.Lookups, ArchGDAL using Test include(joinpath(dirname(pathof(Rasters)), "../test/test_utils.jl")) diff --git a/test/methods.jl b/test/methods.jl index e7132a435..0c095029c 100644 --- a/test/methods.jl +++ b/test/methods.jl @@ -1,6 +1,6 @@ using Rasters, Test, ArchGDAL, ArchGDAL.GDAL, Dates, Statistics, DataFrames, Extents, Shapefile, GeometryBasics import GeoInterface -using Rasters.LookupArrays, Rasters.Dimensions +using Rasters.Lookups, Rasters.Dimensions using Rasters: bounds include(joinpath(dirname(pathof(Rasters)), "../test/test_utils.jl")) @@ -529,7 +529,7 @@ end A1 = Raster(ones(2, 2, 2), (X(2.0:-1.0:1.0), Y(5.0:1.0:6.0), Ti(DateTime(2001):Year(1):DateTime(2002)))) A2 = Raster(zeros(2, 2, 2), (X(3.0:-1.0:2.0), Y(4.0:1.0:5.0), Ti(DateTime(2002):Year(1):DateTime(2003)))) @test all(mosaic(mean, A1, A2) |> parent .=== - first(mosaic(mean, RasterStack(A1), RasterStack(A2))) .=== + mosaic(mean, RasterStack(A1), RasterStack(A2)).layer1 .=== cat([missing missing missing missing 1.0 1.0 missing 1.0 1.0 ], diff --git a/test/plotrecipes.jl b/test/plotrecipes.jl index b236bca82..2cb8ec6c0 100644 --- a/test/plotrecipes.jl +++ b/test/plotrecipes.jl @@ -24,7 +24,7 @@ plot(ga4x[X(At(0.0))]) heatmap(ga4x[X(At(0.0)), Y(At(0.0))]) # Cant plot 4d @test_throws ErrorException plot(ga4x) -# 3d plot by NoLookupArray X dim +# 3d plot by NoLookup X dim @test_broken plot(ga4x[Y(1)]) # 3d plot by Ti dim diff --git a/test/rasterize.jl b/test/rasterize.jl index 32407d876..bb88287cd 100644 --- a/test/rasterize.jl +++ b/test/rasterize.jl @@ -1,6 +1,6 @@ using Rasters, Test, ArchGDAL, ArchGDAL.GDAL, Dates, Statistics, DataFrames, Extents, Shapefile, GeometryBasics import GeoInterface -using Rasters.LookupArrays, Rasters.Dimensions +using Rasters.Lookups, Rasters.Dimensions using Rasters: bounds include(joinpath(dirname(pathof(Rasters)), "../test/test_utils.jl")) @@ -192,7 +192,7 @@ end end @testset "NamedTuple of value fill makes a stack" begin rst = rasterize(sum, data; to=A, fill=(fill1=3, fill2=6.0f0)) - @test eltype(rst) == (fill1=Union{Missing,Int64}, fill2=Union{Missing,Float32}) + @test eltype(rst) == @NamedTuple{fill1::Union{Missing,Int64}, fill2::Union{Missing,Float32}} @test keys(rst) == (:fill1, :fill2) @test dims(rst) == dims(A) @test map(sum ∘ skipmissing, rst) === (fill1=15, fill2=30.0f0) @@ -399,12 +399,12 @@ end (12 * 1 + 8 * 2 + 8 * 3 + 12 * 4) + (4 * 1 * 2 + 4 * 2 * 3 + 4 * 3 * 4) prod_st = rasterize(prod, polygons; res=5, fill=(a=1:4, b=4:-1:1), missingval=missing, boundary=:center, threaded) - @test all(prod_st.a .=== rot180(prod_st.b)) + @test all(prod_st.a .=== rot180(parent(prod_st.b))) @test all(prod_r .=== prod_st.a) prod_r_m = rasterize(prod, polygons; res=5, fill=1:4, missingval=-1, boundary=:center, threaded) prod_st_m = rasterize(prod, polygons; res=5, fill=(a=1:4, b=4.0:-1.0:1.0), missingval=(a=-1, b=-1.0), boundary=:center, threaded) @test all(prod_st_m.a .=== prod_r_m) - @test all( prod_st_m.b .=== rot180(Float64.(prod_r_m))) + @test all(prod_st_m.b .=== rot180(parent(Float64.(prod_r_m)))) r = rasterize(last, polygons; res=5, fill=(a=1, b=2), boundary=:center, threaded) @test all(r.a .* 2 .=== r.b) diff --git a/test/reproject.jl b/test/reproject.jl index 01cb07723..740df14bc 100644 --- a/test/reproject.jl +++ b/test/reproject.jl @@ -1,5 +1,5 @@ using Rasters, Test, ArchGDAL -using Rasters.LookupArrays, Rasters.Dimensions +using Rasters.Lookups, Rasters.Dimensions using Rasters: reproject, convertlookup @testset "reproject" begin diff --git a/test/resample.jl b/test/resample.jl index 09ebbe470..26d5d8436 100644 --- a/test/resample.jl +++ b/test/resample.jl @@ -1,6 +1,6 @@ using Rasters, ArchGDAL, GeoInterface, Extents using Test -using Rasters.LookupArrays +using Rasters.Lookups include(joinpath(dirname(pathof(Rasters)), "../test/test_utils.jl")) diff --git a/test/series.jl b/test/series.jl index 67e5d8ff7..69beb01af 100644 --- a/test/series.jl +++ b/test/series.jl @@ -1,5 +1,5 @@ using Rasters, Test, Dates, DimensionalData -using Rasters.LookupArrays, Rasters.Dimensions +using Rasters.Lookups, Rasters.Dimensions include(joinpath(dirname(pathof(Rasters)), "../test/test_utils.jl")) # RasterSeries from Raster/RasterStack components diff --git a/test/set.jl b/test/set.jl index 314e32f94..d8bc2e6ab 100644 --- a/test/set.jl +++ b/test/set.jl @@ -1,5 +1,5 @@ using Rasters, Test -using Rasters.LookupArrays, Rasters.Dimensions +using Rasters.Lookups, Rasters.Dimensions @testset "set" begin A = [missing 7; 2 missing] diff --git a/test/sources/gdal.jl b/test/sources/gdal.jl index bb3e4402e..a79597d16 100644 --- a/test/sources/gdal.jl +++ b/test/sources/gdal.jl @@ -1,5 +1,5 @@ using Rasters, Test, Statistics, Dates, Plots, DiskArrays, RasterDataSources, CoordinateTransformations, Extents -using Rasters.LookupArrays, Rasters.Dimensions +using Rasters.Lookups, Rasters.Dimensions import ArchGDAL, NCDatasets using Rasters: FileArray, GDALsource, crs, bounds @@ -448,7 +448,6 @@ gdalpath = maybedownload(url) @test occursin("Raster", sh) @test occursin("Y", sh) @test occursin("X", sh) - @test occursin("Band", sh) end @testset "plot" begin # TODO write some tests for this @@ -467,7 +466,7 @@ gdalpath = maybedownload(url) xap = Rasters.AffineProjected(am; crs=crs(gdalarray), paired_lookup=parent(lookup(gdalarray, X))) yap = Rasters.AffineProjected(am; crs=crs(gdalarray), paired_lookup=parent(lookup(gdalarray, Y))) twoband = cat(gdalarray, gdalarray; dims=Band(1:2)) - affine_dims = DimensionalData.format((X(xap), Y(yap), Band(1:2)), twoband) + affine_dims = DimensionalData.format((X(xap), Y(yap), Band(Categorical(1:2))), twoband) rotated = rebuild(twoband; dims=affine_dims); @test occursin("Extent", sprint(show, MIME"text/plain"(), rotated)) @test rotated[X=At(-1e4; atol=0.5), Y=Near(4.24e6), Band=1] == 0x8c @@ -481,7 +480,7 @@ gdalpath = maybedownload(url) write("rotated.tif", rotated; force=true) newrotated = Raster("rotated.tif") plot(newrotated) - @test rotated == newrotated + @test dims(rotated) == dims(newrotated) @test lookup(rotated, X).affinemap.linear == lookup(newrotated, X).affinemap.linear @test lookup(rotated, X).affinemap.translation == lookup(newrotated, X).affinemap.translation rm("rotated.tif") @@ -520,7 +519,7 @@ end @testset "RasterStack" begin @time gdalstack = RasterStack((a=gdalpath, b=gdalpath)) - @test length(gdalstack) == 2 + @test length(layers(gdalstack)) == 2 @test dims(gdalstack) isa Tuple{<:X,<:Y} @testset "read" begin diff --git a/test/sources/grd.jl b/test/sources/grd.jl index 97b78de77..6abc210e3 100644 --- a/test/sources/grd.jl +++ b/test/sources/grd.jl @@ -1,5 +1,5 @@ using Rasters, Test, Statistics, Dates, Plots -using Rasters.LookupArrays, Rasters.Dimensions +using Rasters.Lookups, Rasters.Dimensions import NCDatasets, ArchGDAL using Rasters: FileArray, GRDsource, GDALsource, metadata @@ -291,7 +291,7 @@ end @test parent(eagerstack[:a]) isa Array end - @test length(grdstack) == 2 + @test length(layers(grdstack)) == 2 @test dims(grdstack) isa Tuple{<:X,<:Y,<:Band} @testset "read" begin @@ -358,7 +358,7 @@ end @test keys(RasterStack(grdpath)) == (Symbol("red:green:blue"),) grdstack = RasterStack(grdpath; layersfrom=Band) - @test length(grdstack) == 3 + @test length(layers(grdstack)) == 3 @test dims(grdstack) isa Tuple{<:X,<:Y} @testset "read" begin diff --git a/test/sources/gribdatasets.jl b/test/sources/gribdatasets.jl index 5192b9b6b..b7770feb4 100644 --- a/test/sources/gribdatasets.jl +++ b/test/sources/gribdatasets.jl @@ -1,6 +1,6 @@ using Rasters, Test, GRIBDatasets using Rasters: FileArray, CDMsource -using Rasters.LookupArrays, Rasters.Dimensions +using Rasters.Lookups, Rasters.Dimensions using Statistics using Dates diff --git a/test/sources/ncdatasets.jl b/test/sources/ncdatasets.jl index 9097560a9..11474d5da 100644 --- a/test/sources/ncdatasets.jl +++ b/test/sources/ncdatasets.jl @@ -1,5 +1,5 @@ using Rasters, DimensionalData, Test, Statistics, Dates, CFTime, Plots -using Rasters.LookupArrays, Rasters.Dimensions +using Rasters.Lookups, Rasters.Dimensions import ArchGDAL, NCDatasets using Rasters: FileArray, FileStack, NCDsource, crs, bounds, name testdir = realpath(joinpath(dirname(pathof(Rasters)), "../test")) diff --git a/test/sources/smap.jl b/test/sources/smap.jl index 6445a7324..1f63be69e 100644 --- a/test/sources/smap.jl +++ b/test/sources/smap.jl @@ -1,5 +1,5 @@ using Rasters, Test, Statistics, Dates, Plots -using Rasters.LookupArrays, Rasters.Dimensions +using Rasters.Lookups, Rasters.Dimensions import ArchGDAL, NCDatasets, HDF5, CFTime using Rasters: layerkeys, SMAPsource, FileArray diff --git a/test/stack.jl b/test/stack.jl index 2806fe510..f86af66d8 100644 --- a/test/stack.jl +++ b/test/stack.jl @@ -1,5 +1,5 @@ using Rasters, DimensionalData, Test, Statistics, Dates, ArchGDAL -using Rasters.LookupArrays, Rasters.Dimensions +using Rasters.Lookups, Rasters.Dimensions data1 = cumsum(cumsum(ones(10, 11); dims=1); dims=2) data2 = 2cumsum(cumsum(ones(10, 11, 1); dims=1); dims=2) @@ -22,11 +22,9 @@ ga2 = Raster(data2, dims2) st3 = RasterStack((data1, data2), dims2; keys=[:ga1, :ga2]) st4 = RasterStack(st3) st5 = RasterStack(st3, dims2) - st6 = RasterStack(RasterStack(ga1)..., RasterStack(ga2)...; name=(:ga1, :ga2)) - st7 = RasterStack(RasterStack(ga1; name=:ga1)..., RasterStack(ga2; name=:ga2)...) - st8 = RasterStack((; ga1, ga2)) - st9 = RasterStack((ga1=data1, ga2=data2), dims2) - @test st1 == st2 == st3 == st4 == st5 == st6 == st7 == st8 == st9 + st6 = RasterStack((ga1=data1, ga2=data2), dims2) + st7 = RasterStack((; ga1, ga2)) + @test st1 == st2 == st3 == st4 == st5 == st6 == st7 # The dimension differences are lost because the table # is tidy - every column is the same length @@ -37,9 +35,9 @@ end st = RasterStack((ga1, ga2); name=(:ga1, :ga2)) @testset "stack layers" begin - @test length(st) == 2 - @test first(st) == ga1 - @test last(st) == ga2 + @test length(layers(st)) == 2 + @test first(layers(st)) == ga1 + @test last(layers(st)) == ga2 @test DimensionalData.layers(st) isa NamedTuple @test st.ga1 == ga1 @test st[:ga2] == ga2 @@ -49,8 +47,6 @@ st = RasterStack((ga1, ga2); name=(:ga1, :ga2)) @test haskey(st, :ga1) @test names(st) == (:ga1, :ga2) @test collect(values(st)) == [ga1, ga2] - # We can iterate/splat stacks as GeArrays - @test st == RasterStack(st...) end @testset "st fields " begin @@ -130,21 +126,21 @@ end @testset "concatenate stacks" begin dims1b = X(110:10:200), Y(-50:10:50) dims2b = (dims1b..., Ti([DateTime(2019)])) - stack_a = RasterStack((ga1=ga1, ga2=ga2)) - stack_b = RasterStack((ga1=Raster(data1 .+ 10, dims1b), ga2=Raster(data2 .+ 20, dims2b))) + stack_a = RasterStack((l1=ga1, l2=ga2)) + stack_b = RasterStack((l1=Raster(data1 .+ 10, dims1b), l2=Raster(data2 .+ 20, dims2b))) catstack = cat(stack_a, stack_b; dims=X) - @test size(first(catstack)) == (20, 11) - @test size(last(catstack)) == (20, 11, 1) + @test size(catstack.l1) == (20, 11) + @test size(catstack.l2) == (20, 11, 1) @test val(dims(catstack, X)) ≈ 10.0:10.0:200.0 #@test step(dims(first(catstack), X())) == 10.0 - @test DimensionalData.bounds(dims(first(catstack), X)) == (10.0, 200.0) - @test catstack[:ga1][Y(1)] == 1.0:20.0 - @test catstack[:ga2][Y(1), Ti(1)] == 2.0:2.0:40.0 + @test DimensionalData.bounds(dims(layers(catstack, 1), X)) == (10.0, 200.0) + @test catstack.l1[Y(1)] == 1.0:20.0 + @test catstack.l2[Y(1), Ti(1)] == 2.0:2.0:40.0 dims2c = (dims1b..., Ti([DateTime(2019)])) stack_c = set(stack_b, X=>110:10:200, Y=>60:10:160) catstack = cat(stack_a, stack_c; dims=(X, Y)) - @test size(first(catstack)) == (20, 22) - @test size(last(catstack)) == (20, 22, 1) + @test size(catstack.l1) == (20, 22) + @test size(catstack.l2) == (20, 22, 1) @test dims(catstack) == (X(Sampled(10:10.0:200, ForwardOrdered(), Regular(10.0), Points(), NoMetadata())), Y(Sampled(-50:10.0:160, ForwardOrdered(), Regular(10.0), Points(), NoMetadata())), From a4191007b6c48f1dab31c69e0e6b2653999090c7 Mon Sep 17 00:00:00 2001 From: rafaqz Date: Sun, 10 Mar 2024 17:14:59 +0100 Subject: [PATCH 5/8] require DD 0.26 --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 887d1861a..78c6397a0 100644 --- a/Project.toml +++ b/Project.toml @@ -53,7 +53,7 @@ CommonDataModel = "0.2.3" ConstructionBase = "1" CoordinateTransformations = "0.6.2" DataFrames = "1" -DimensionalData = "0.25.1" +DimensionalData = "0.26" DiskArrays = "0.3, 0.4" Extents = "0.1" FillArrays = "0.12, 0.13, 1" From 7b1dd6cf4e20e4a4bd2f2f50405c828c8edf7b90 Mon Sep 17 00:00:00 2001 From: rafaqz Date: Sun, 10 Mar 2024 17:23:47 +0100 Subject: [PATCH 6/8] limit to 1.9 and above --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c88d0fadc..ef6d6e085 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -13,7 +13,7 @@ jobs: fail-fast: true matrix: version: - - '1.8' + - '1.9' - '1' os: - ubuntu-latest From 39123174337040e1a63beed3d85002ae215f39ab Mon Sep 17 00:00:00 2001 From: rafaqz Date: Sun, 10 Mar 2024 17:48:28 +0100 Subject: [PATCH 7/8] remove Requires --- Project.toml | 4 +--- src/Rasters.jl | 5 ----- 2 files changed, 1 insertion(+), 8 deletions(-) diff --git a/Project.toml b/Project.toml index 78c6397a0..1552923c0 100644 --- a/Project.toml +++ b/Project.toml @@ -22,7 +22,6 @@ OffsetArrays = "6fe1bfb0-de20-5000-8ca7-80f57d26f881" ProgressMeter = "92933f4c-e287-5a05-a399-4b506db050ca" RecipesBase = "3cdcf5f2-1ef4-517c-9805-6587b60abb01" Reexport = "189a3867-3050-52da-a836-e630ba90ab69" -Requires = "ae029012-a4dd-5104-9daa-d747884805df" Setfield = "efcf1570-3423-57d1-acb7-fd33fddbac46" [weakdeps] @@ -71,13 +70,12 @@ ProgressMeter = "1" RasterDataSources = "0.5.7" RecipesBase = "0.7, 0.8, 1.0" Reexport = "0.2, 1.0" -Requires = "0.5, 1" SafeTestsets = "0.1" Setfield = "0.6, 0.7, 0.8, 1" Shapefile = "0.10, 0.11" Statistics = "1" Test = "1" -julia = "1.8" +julia = "1.9" [extras] Aqua = "4c88cf16-eb10-579e-8560-4a9242c79595" diff --git a/src/Rasters.jl b/src/Rasters.jl index 64cbd3f14..8dc9f885c 100644 --- a/src/Rasters.jl +++ b/src/Rasters.jl @@ -29,11 +29,6 @@ import Adapt, Reexport, Setfield -# This symbol is only defined on Julia versions that support extensions. -@static if !isdefined(Base, :get_extension) - using Requires -end - Reexport.@reexport using DimensionalData, GeoFormatTypes using DimensionalData.Tables, From 9dd6d0e5ed9cd1aa4eaeb488c233d17440cab329 Mon Sep 17 00:00:00 2001 From: rafaqz Date: Sun, 10 Mar 2024 18:02:08 +0100 Subject: [PATCH 8/8] no AutoIndex --- src/lookup.jl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/lookup.jl b/src/lookup.jl index 0360777b4..388c21a17 100644 --- a/src/lookup.jl +++ b/src/lookup.jl @@ -64,7 +64,7 @@ struct Projected{T,A<:AbstractVector{T},O<:Order,Sp<:Span,Sa<:Sampling,MD,PC,MC, mappedcrs::MC dim::D end -function Projected(data=AutoIndex(); +function Projected(data=AutoValues(); order=AutoOrder(), span=AutoSpan(), sampling=AutoSampling(), metadata=NoMetadata(), crs, mappedcrs=nothing, dim=AutoDim() ) @@ -120,7 +120,7 @@ struct Mapped{T,A<:AbstractVector{T},O<:Order,Sp<:Span,Sa<:Sampling,MD,PC,MC,D} mappedcrs::MC dim::D end -function Mapped(data=AutoIndex(); +function Mapped(data=AutoValues(); order=AutoOrder(), span=AutoSpan(), sampling=AutoSampling(), metadata=NoMetadata(), crs=nothing, mappedcrs, dim=AutoDim() )