diff --git a/src/compression.jl b/src/compression.jl index b0d0fea..b056e04 100644 --- a/src/compression.jl +++ b/src/compression.jl @@ -10,33 +10,32 @@ const BLOSC_MAX_BUFFERSIZE = typemax(Cint) - BLOSC_MAX_OVERHEAD import TranscodingStreams, CodecZlib """ -Return the uncompressed data. -May return src if no compression was used, or buffer, if compression was used. - -`src` is the compressed data. -`buffer` is a buffer used to avoid allocations, it may be resized and returned. +Uncompressed the data. """ -function decompress!(buffer::Vector{UInt8}, src::Vector{UInt8}, metadata::ParsedMetaData)::Vector{UInt8} - expected_output_size = prod(metadata.chunks)*metadata.dtype.zarr_size - @argcheck expected_output_size > 0 - if isnothing(metadata.compressor) - return src - end - id = metadata.compressor.id - if id == "blosc" +@noinline function unsafe_decompress!(p::Ptr{UInt8}, n::Int, src::Vector{UInt8}, compressor)::Nothing + @argcheck n > 0 + if !isnothing(compressor) && compressor.id == "blosc" numinternalthreads = 1 - buffer = Vector{UInt8}(undef, expected_output_size) sz = ccall((:blosc_decompress_ctx,Blosc_jll.libblosc), Cint, - (Ptr{Cvoid},Ptr{Cvoid},Csize_t,Cint), src, buffer, expected_output_size, numinternalthreads) - sz == expected_output_size || error("Blosc decompress error, compressed data is corrupted") - buffer - elseif id == "zlib" - TranscodingStreams.transcode(CodecZlib.ZlibDecompressor, src) - elseif id == "gzip" - TranscodingStreams.transcode(CodecZlib.GzipDecompressor, src) + (Ptr{Cvoid},Ptr{Cvoid},Csize_t,Cint), src, p, n, numinternalthreads) + sz == n || error("Blosc decompress error, compressed data is corrupted") + return + end + r = if isnothing(compressor) + src else - error("$(id) compressor not supported yet") + id = compressor.id + if id == "zlib" + TranscodingStreams.transcode(CodecZlib.ZlibDecompressor, src) + elseif id == "gzip" + TranscodingStreams.transcode(CodecZlib.GzipDecompressor, src) + else + error("$(id) compressor not supported yet") + end end + @argcheck length(r) == n + GC.@preserve r Base.unsafe_copyto!(p, Base.unsafe_convert(Ptr{UInt8}, r), n) + nothing end """ diff --git a/src/loading.jl b/src/loading.jl index 5012141..6142da8 100644 --- a/src/loading.jl +++ b/src/loading.jl @@ -42,7 +42,7 @@ function load_dir(reader::AbstractReader)::ZGroup output = ZGroup() keynames = key_names(reader) splitkeys = map(x->split(x,'/';keepempty=false), keynames) - keyname_dict = Dict(zip(keynames,eachindex(keynames))) + keyname_dict::Dict{String, Int} = Dict{String, Int}(zip(keynames,eachindex(keynames))) try_add_attrs!(output, reader, keyname_dict, "") for splitkey in sort(splitkeys) if length(splitkey) < 2 @@ -56,69 +56,21 @@ function load_dir(reader::AbstractReader)::ZGroup arrayname = join(splitkey[begin:end-1],'/') arrayidx = keyname_dict[arrayname*"/.zarray"] metadata = parse_zarr_metadata(JSON3.read(read_key_idx(reader, arrayidx))) - fill_value = reinterpret(metadata.dtype.julia_type, metadata.fill_value)[1] - shape, chunks = if metadata.is_column_major - metadata.shape, metadata.chunks - else - reverse(metadata.shape), reverse(metadata.chunks) - end - array = fill(fill_value, shape...) - zarr_size = metadata.dtype.zarr_size - julia_size = metadata.dtype.julia_size + fill_value = metadata.fill_value + zarray = load_array( + fill_value, + Tuple(metadata.shape), + Tuple(metadata.chunks), + arrayname, + metadata.dimension_separator, + keyname_dict, + reader, + metadata.dtype.in_native_order, + metadata.is_column_major, + metadata.compressor, + ) - # If there is no actual data don't load chunks - if !(any(==(0), shape) || julia_size == 0 || zarr_size == 0) - # load chunks - for chunkidx in CartesianIndices(Tuple(cld.(shape,chunks))) - chunktuple = Tuple(chunkidx) .- 1 - chunknametuple = if metadata.is_column_major - chunktuple - else - #shape and chunks have been pre reversed so reverse chunkidx as well. - reverse(chunktuple) - end - # empty chunk has name "0" this is the case for zero dim arrays - chunkname = arrayname*"/"*(isempty(chunknametuple) ? "0" : join(chunknametuple, metadata.dimension_separator)) - chunknameidx = get(Returns(0), keyname_dict, chunkname) - if chunknameidx > 0 - rawchunkdata = read_key_idx(reader, chunknameidx) - decompressed_chunkdata = decompress!(Vector{UInt8}(), rawchunkdata, metadata) - chunkstart = chunktuple .* chunks .+ 1 - chunkstop = min.(chunkstart .+ chunks .- 1, shape) - real_chunksize = chunkstop .- chunkstart .+ 1 - if julia_size == 1 - @assert zarr_size == 1 - shaped_chunkdata = reshape(decompressed_chunkdata, chunks...) - shaped_array = reinterpret(UInt8, array) - array_view = view(shaped_array, (range.(chunkstart, chunkstop))...) - chunk_view = view(shaped_chunkdata, (range.(1, real_chunksize))...) - array_view .= chunk_view - else - shaped_chunkdata = reshape(decompressed_chunkdata, zarr_size, chunks...) - shaped_array = reinterpret(reshape, UInt8, array) - # now create overlapping views - array_view = view(shaped_array, :, (range.(chunkstart, chunkstop))...) - chunk_view = view(shaped_chunkdata, :, (range.(1, real_chunksize))...) - # TODO check if the data can just be directly copied. - for (zarr_byte, julia_byte) in enumerate(metadata.dtype.byteorder) - selectdim(array_view, 1, julia_byte) .= selectdim(chunk_view, 1, zarr_byte) - end - end - end - end - end - zarray = if metadata.is_column_major || ndims(array) ≤ 1 - ZArray(array; - chunks = Tuple(chunks), - compressor = metadata.compressor, - ) - else - ZArray(permutedims(array,reverse(1:length(shape))); - chunks = Tuple(reverse(chunks)), - compressor = metadata.compressor, - ) - end output[arrayname] = zarray try_add_attrs!(zarray, reader, keyname_dict, arrayname*"/") @@ -126,3 +78,64 @@ function load_dir(reader::AbstractReader)::ZGroup end output end + + +function load_array( + fill_value::T, + shape::NTuple{N, Int}, + chunks::NTuple{N, Int}, + arrayname::String, + dimension_separator::Char, + keyname_dict::Dict{String,Int}, + reader, + in_native_order::Bool, + is_column_major::Bool, + compressor, + )::ZArray{T, N} where {T, N} + array = fill(fill_value, shape...) + # If there is no actual data don't load chunks + if !(any(==(0), shape) || sizeof(T) == 0) + # load chunks + for chunkidx in CartesianIndices(Tuple(cld.(shape,chunks))) + chunktuple = Tuple(chunkidx) .- 1 + # empty chunk has name "0" this is the case for zero dim arrays + chunkname = arrayname*"/"*(isempty(chunktuple) ? "0" : join(chunktuple, dimension_separator)) + chunknameidx = get(Returns(0), keyname_dict, chunkname) + if chunknameidx > 0 + rawchunkdata = read_key_idx(reader, chunknameidx) + decompressed_chunkdata = Vector{T}(undef, prod(chunks)) + GC.@preserve decompressed_chunkdata unsafe_decompress!( + Base.unsafe_convert(Ptr{UInt8}, decompressed_chunkdata), + sizeof(decompressed_chunkdata), + rawchunkdata, + compressor, + ) + if !in_native_order + for i in eachindex(decompressed_chunkdata) + decompressed_chunkdata[i] = htol(ntoh(decompressed_chunkdata[i])) + end + end + chunkstart = chunktuple .* chunks .+ 1 + chunkstop = min.(chunkstart .+ chunks .- 1, shape) + real_chunksize = chunkstop .- chunkstart .+ 1 + + shaped_chunkdata = if is_column_major || N ≤ 1 + reshape(decompressed_chunkdata, chunks...) + else + permutedims(reshape(decompressed_chunkdata, reverse(chunks)...), ((N:-1:1)...,)) + end + copyto!( + array, + CartesianIndices(((range.(chunkstart, chunkstop))...,)), + shaped_chunkdata, + CartesianIndices(((range.(1, real_chunksize))...,)) + ) + end + end + end + + ZArray(array; + chunks, + compressor, + ) +end \ No newline at end of file diff --git a/src/saving.jl b/src/saving.jl index 35c3122..ca0c678 100644 --- a/src/saving.jl +++ b/src/saving.jl @@ -83,15 +83,16 @@ function _save_zarray(writer::AbstractWriter, key_prefix::String, z::ZArray) dtype_str::String = sprint(write_type, eltype(data)) dtype::ParsedType = parse_zarr_type(JSON3.read(dtype_str)) @assert dtype.julia_type == eltype(data) + @assert dtype.in_native_order shape = size(data) - zarr_size = dtype.zarr_size + zarr_size = dtype.type_size norm_compressor = normalize_compressor(z.compressor) if zarr_size != 0 && !any(iszero, shape) chunks = Tuple(z.chunks) # store chunks shaped_chunkdata = zeros(UInt8, zarr_size, reverse(chunks)...) permuted_shaped_chunkdata = PermutedDimsArray(shaped_chunkdata, (1, ndims(z)+1:-1:2...)) - shaped_array = if dtype.julia_size == 1 + shaped_array = if zarr_size == 1 reshape(reinterpret(reshape, UInt8, data), 1, shape...) else reinterpret(reshape, UInt8, data) @@ -104,10 +105,7 @@ function _save_zarray(writer::AbstractWriter, key_prefix::String, z::ZArray) # now create overlapping views array_view = view(shaped_array, :, (range.(chunkstart, chunkstop))...) chunk_view = view(permuted_shaped_chunkdata, :, (range.(1, real_chunksize))...) - # TODO check if the data can just be directly copied. - for (zarr_byte, julia_byte) in enumerate(dtype.byteorder) - selectdim(chunk_view, 1, zarr_byte) .= selectdim(array_view, 1, julia_byte) - end + copy!(chunk_view, array_view) compressed_chunkdata = compress(norm_compressor, reshape(shaped_chunkdata,:), zarr_size) # empty chunk has name "0" this is the case for zero dim arrays chunkname = key_prefix*(isempty(chunktuple) ? "0" : join(chunktuple, '.')) diff --git a/src/zarr-meta-parsing.jl b/src/zarr-meta-parsing.jl index 5720e69..0875f50 100644 --- a/src/zarr-meta-parsing.jl +++ b/src/zarr-meta-parsing.jl @@ -4,11 +4,6 @@ using ArgCheck import JSON3 import Base64 -"Usually 8, maybe this could be 4 on a 32 bit machine?" -const DOUBLE_ALIGN = (sizeof(Tuple{Float64,Int8}) == 16) ? 3 : 2 - -const ALIGNMENT_LOOKUP = (0, 1, 2, DOUBLE_ALIGN) - "Character for native byte order" const NATIVE_ORDER = ENDIAN_BOM == 0x04030201 ? '<' : '>' "Character for other byte order" @@ -17,27 +12,13 @@ const OTHER_ORDER = ENDIAN_BOM == 0x04030201 ? '>' : '<' Base.@kwdef struct ParsedType "Julia type that this type represents. This must be an isbits type" - julia_type::Type - - "Number of bytes julia type takes." - julia_size::Int64 - - "Number of bytes the type takes up in zarr. - This can differ from `julia_size` because in zarr structs are packed, - In Julia structs are aligned. See: - https://en.wikipedia.org/wiki/Data_structure_alignment" - zarr_size::Int64 = julia_size - - "How bytes should be copied from the zarr type to the julia type. - Has length equal to `zarr_size`" - byteorder::Vector{Int64} + julia_type::DataType - "Alignment requirements for this type: - 0 is 1 byte, 1 is 2 byte, 2 is 4 byte, 3 is 8 byte" - alignment::Int + "Number of bytes the type takes." + type_size::Int64 - # "`zarr_size == julia_size && byteorder == 1:julia_size`" - # just_copy::Bool + "Does the endianess need to be swapped" + in_native_order::Bool end function Base.:(==)(a::ParsedType, b::ParsedType) @@ -62,9 +43,8 @@ function parse_zarr_type(typestr::String; silence_warnings=false)::ParsedType @argcheck numthings == 1 return ParsedType(; julia_type = Bool, - julia_size = 1, - byteorder = [1], - alignment = 0, + type_size = 1, + in_native_order = true, ) elseif typechar == 'i' @argcheck numthings in 1:8 @@ -74,9 +54,8 @@ function parse_zarr_type(typestr::String; silence_warnings=false)::ParsedType tz = trailing_zeros(numthings) return ParsedType(; julia_type = (Int8, Int16, Int32, Int64)[tz+1], - julia_size = numthings, - byteorder = in_native_order ? (1:numthings) : (numthings:-1:1), - alignment = ALIGNMENT_LOOKUP[tz+1], + type_size = numthings, + in_native_order, ) elseif typechar == 'u' @argcheck numthings in 1:8 @@ -86,9 +65,8 @@ function parse_zarr_type(typestr::String; silence_warnings=false)::ParsedType tz = trailing_zeros(numthings) return ParsedType(; julia_type = (UInt8, UInt16, UInt32, UInt64)[tz+1], - julia_size = numthings, - byteorder = in_native_order ? (1:numthings) : (numthings:-1:1), - alignment = ALIGNMENT_LOOKUP[tz+1], + type_size = numthings, + in_native_order, ) elseif typechar == 'f' @argcheck numthings in 2:8 @@ -98,9 +76,8 @@ function parse_zarr_type(typestr::String; silence_warnings=false)::ParsedType tz = trailing_zeros(numthings) return ParsedType(; julia_type = (Float16, Float32, Float64)[tz], - julia_size = numthings, - byteorder = in_native_order ? (1:numthings) : (numthings:-1:1), - alignment = ALIGNMENT_LOOKUP[tz+1], + type_size = numthings, + in_native_order, ) elseif typechar == 'c' @argcheck numthings in 4:16 @@ -110,16 +87,14 @@ function parse_zarr_type(typestr::String; silence_warnings=false)::ParsedType tz = trailing_zeros(numthings) return ParsedType(; julia_type = (ComplexF16, ComplexF32, ComplexF64)[tz - 1], - julia_size = numthings, - byteorder = in_native_order ? (1:numthings) : [numthings÷2:-1:1; numthings:-1:numthings÷2+1;], - alignment = ALIGNMENT_LOOKUP[tz], + type_size = numthings, + in_native_order, ) elseif typechar == 'V' return ParsedType(; julia_type = NTuple{numthings, UInt8}, - julia_size = numthings, - byteorder = 1:numthings, - alignment = 0, + type_size = numthings, + in_native_order = true, ) else error("Unreachable") @@ -135,32 +110,30 @@ end """ -Return the fill value in bytes that should be copied to the julia type. +Return the fill value in the julia type. """ -function parse_zarr_fill_value(fill_value::Union{String,Nothing}, dtype::ParsedType)::Vector{UInt8} - if isnothing(fill_value) - zeros(UInt8, dtype.julia_size) - elseif (fill_value in ("NaN","Infinity","-Infinity")) && (dtype.julia_type <: AbstractFloat) - reinterpret(UInt8,[parse(dtype.julia_type, fill_value)]) +function parse_zarr_fill_value(fill_value::String, dtype::ParsedType) + if (fill_value in ("NaN","Infinity","-Infinity")) && (dtype.julia_type <: AbstractFloat) + parse(dtype.julia_type, fill_value) else zarr_bytes = Base64.base64decode(fill_value) - @argcheck length(zarr_bytes) == dtype.zarr_size - output = zeros(UInt8, dtype.julia_size) - for i in 1:dtype.zarr_size - output[dtype.byteorder[i]] = zarr_bytes[i] + @argcheck length(zarr_bytes) == dtype.type_size + v = reinterpret(dtype.julia_type, zarr_bytes)[1] + if dtype.in_native_order + v + else + htol(ntoh(v)) end - output end end - -""" -Return the fill value in bytes that should be copied to the julia type. -""" -function parse_zarr_fill_value(fill_value::Union{Bool,Float64,Int64}, dtype::ParsedType)::Vector{UInt8} +function parse_zarr_fill_value(fill_value::Nothing, dtype::ParsedType) + reinterpret(dtype.julia_type, zeros(UInt8, dtype.type_size))[1] +end +function parse_zarr_fill_value(fill_value::Union{Bool,Float64,Int64}, dtype::ParsedType) if iszero(fill_value) # If its zero just set all bytes to zero. - zeros(UInt8, dtype.julia_size) + reinterpret(dtype.julia_type, zeros(UInt8, dtype.type_size))[1] else - reinterpret(UInt8,[convert(dtype.julia_type, fill_value)]) + convert(dtype.julia_type, fill_value) end end @@ -174,7 +147,7 @@ Base.@kwdef struct ParsedMetaData chunks::Vector{Int} dtype::ParsedType compressor::Union{Nothing, JSON3.Object} - fill_value::Vector{UInt8} + fill_value::Any is_column_major::Bool dimension_separator::Char='.' end diff --git a/test/test_read-write-fixture.jl b/test/test_read-write-fixture.jl index b52f986..191744c 100644 --- a/test/test_read-write-fixture.jl +++ b/test/test_read-write-fixture.jl @@ -70,21 +70,14 @@ function compare_jl_py_zarray(jl_zarray::SmallZarrGroups.ZArray, py_zarray) # compare shapes @test size(jl_zarray.data) == pyconvert(Tuple,py_zarray.shape) # test values equal - py_data, ok = try - Array(PyArray(py_zarray.get_basic_selection())), true - catch e - if endswith(e.msg, "cannot convert this Python 'ndarray' to a 'PyArray'") - Array(PyArray(py_zarray.get_basic_selection().tobytes())), false - else - rethrow() - end - end - if ok - if eltype(py_data) <: PythonCall.Utils.StaticString{UInt32} - @test rstrip.(String.(jl_zarray.data), '\0') == rstrip.(String.(py_data), '\0') - else - @test isequal(py_data,jl_zarray.data) - end + py_data = Array(PyArray(py_zarray.get_basic_selection())) + if isequal(py_data, jl_zarray.data) + @test isequal(py_data, jl_zarray.data) + else + @error "mismatch between python data and julia data" + @error jl_zarray.compressor + @error jl_zarray.chunks + @test false end end diff --git a/test/test_zarr-meta-parsing.jl b/test/test_zarr-meta-parsing.jl index 1e53410..9b4c5e6 100644 --- a/test/test_zarr-meta-parsing.jl +++ b/test/test_zarr-meta-parsing.jl @@ -11,10 +11,8 @@ const OTHER_ORDER = (ENDIAN_BOM == 0x04030201) ? '>' : '<' @testset "zero byte types" begin zerobytetype(t) = SmallZarrGroups.ParsedType( julia_type = t, - julia_size = 0, - zarr_size = 0, - byteorder = [], - alignment = 0, + type_size = 0, + in_native_order = true, ) tests = [ "V0"=>NTuple{0,UInt8}, @@ -29,10 +27,8 @@ const OTHER_ORDER = (ENDIAN_BOM == 0x04030201) ? '>' : '<' @testset "one byte types" begin onebytetype(t) = SmallZarrGroups.ParsedType( julia_type = t, - julia_size = 1, - zarr_size = 1, - byteorder = [1], - alignment = 0, + type_size = 1, + in_native_order = true, ) tests = [ "b1"=>Bool, @@ -64,15 +60,13 @@ const OTHER_ORDER = (ENDIAN_BOM == 0x04030201) ? '>' : '<' s = sizeof(t) @test SmallZarrGroups.parse_zarr_type(NATIVE_ORDER*pair[1]) == SmallZarrGroups.ParsedType( julia_type = t, - julia_size = s, - byteorder = 1:s, - alignment = trailing_zeros(s), + type_size = s, + in_native_order = true, ) @test SmallZarrGroups.parse_zarr_type(OTHER_ORDER*pair[1]) == SmallZarrGroups.ParsedType( julia_type = t, - julia_size = s, - byteorder = s:-1:1, - alignment = trailing_zeros(s), + type_size = s, + in_native_order = false, ) end end @@ -88,24 +82,21 @@ const OTHER_ORDER = (ENDIAN_BOM == 0x04030201) ? '>' : '<' hs = s÷2 @test SmallZarrGroups.parse_zarr_type(NATIVE_ORDER*pair[1]) == SmallZarrGroups.ParsedType( julia_type = t, - julia_size = s, - byteorder = 1:s, - alignment = trailing_zeros(hs), + type_size = s, + in_native_order = true, ) @test SmallZarrGroups.parse_zarr_type(OTHER_ORDER*pair[1]) == SmallZarrGroups.ParsedType( julia_type = t, - julia_size = s, - byteorder = [(hs:-1:1); (s:-1:(hs+1));], - alignment = trailing_zeros(hs), + type_size = s, + in_native_order = false, ) end end @testset "static bytes types" begin staticstringtype(t,n) = SmallZarrGroups.ParsedType( julia_type = t{n}, - julia_size = n, - byteorder = 1:n, - alignment = 0, + type_size = n, + in_native_order = true, ) for (typestr, t) in ("V" => (NTuple{N,UInt8} where N),) for n in 0:1050 @@ -120,42 +111,42 @@ end @testset "parsing fill value" begin - tests = [ - (nothing, "$(NATIVE_ORDER)f8") => zeros(UInt8, 8), - (nothing, "|u1") => zeros(UInt8, 1), - ("NaN", "$(NATIVE_ORDER)f8") => collect(reinterpret(UInt8,[NaN64])), - ("NaN", "$(NATIVE_ORDER)f4") => collect(reinterpret(UInt8,[NaN32])), - ("NaN", "$(NATIVE_ORDER)f2") => collect(reinterpret(UInt8,[NaN16])), - ("NaN", "$(OTHER_ORDER)f8") => collect(reinterpret(UInt8,[NaN64])), - ("NaN", "$(OTHER_ORDER)f4") => collect(reinterpret(UInt8,[NaN32])), - ("NaN", "$(OTHER_ORDER)f2") => collect(reinterpret(UInt8,[NaN16])), + tests = Any[ + (nothing, "$(NATIVE_ORDER)f8") => 0.0, + (nothing, "|u1") => 0x00, + ("NaN", "$(NATIVE_ORDER)f8") => NaN64, + ("NaN", "$(NATIVE_ORDER)f4") => NaN32, + ("NaN", "$(NATIVE_ORDER)f2") => NaN16, + ("NaN", "$(OTHER_ORDER)f8") => NaN64, + ("NaN", "$(OTHER_ORDER)f4") => NaN32, + ("NaN", "$(OTHER_ORDER)f2") => NaN16, - ("Infinity", "$(NATIVE_ORDER)f8") => collect(reinterpret(UInt8,[Inf64])), - ("Infinity", "$(NATIVE_ORDER)f4") => collect(reinterpret(UInt8,[Inf32])), - ("Infinity", "$(NATIVE_ORDER)f2") => collect(reinterpret(UInt8,[Inf16])), - ("Infinity", "$(OTHER_ORDER)f8") => collect(reinterpret(UInt8,[Inf64])), - ("Infinity", "$(OTHER_ORDER)f4") => collect(reinterpret(UInt8,[Inf32])), - ("Infinity", "$(OTHER_ORDER)f2") => collect(reinterpret(UInt8,[Inf16])), + ("Infinity", "$(NATIVE_ORDER)f8") => Inf64, + ("Infinity", "$(NATIVE_ORDER)f4") => Inf32, + ("Infinity", "$(NATIVE_ORDER)f2") => Inf16, + ("Infinity", "$(OTHER_ORDER)f8") => Inf64, + ("Infinity", "$(OTHER_ORDER)f4") => Inf32, + ("Infinity", "$(OTHER_ORDER)f2") => Inf16, - ("-Infinity", "$(NATIVE_ORDER)f8") => collect(reinterpret(UInt8,[-Inf64])), - ("-Infinity", "$(NATIVE_ORDER)f4") => collect(reinterpret(UInt8,[-Inf32])), - ("-Infinity", "$(NATIVE_ORDER)f2") => collect(reinterpret(UInt8,[-Inf16])), - ("-Infinity", "$(OTHER_ORDER)f8") => collect(reinterpret(UInt8,[-Inf64])), - ("-Infinity", "$(OTHER_ORDER)f4") => collect(reinterpret(UInt8,[-Inf32])), - ("-Infinity", "$(OTHER_ORDER)f2") => collect(reinterpret(UInt8,[-Inf16])), + ("-Infinity", "$(NATIVE_ORDER)f8") => -Inf64, + ("-Infinity", "$(NATIVE_ORDER)f4") => -Inf32, + ("-Infinity", "$(NATIVE_ORDER)f2") => -Inf16, + ("-Infinity", "$(OTHER_ORDER)f8") => -Inf64, + ("-Infinity", "$(OTHER_ORDER)f4") => -Inf32, + ("-Infinity", "$(OTHER_ORDER)f2") => -Inf16, - ("BBB=", "$(NATIVE_ORDER)f2") => [0x04, 0x10], - ("BBB=", "$(OTHER_ORDER)f2") => [0x10, 0x04], + ("BBB=", "$(NATIVE_ORDER)f2") => reinterpret(Float16,[0x04, 0x10])[1], + ("BBB=", "$(OTHER_ORDER)f2") => reinterpret(Float16,[0x10, 0x04])[1], - (0, "$(NATIVE_ORDER)f2") => [0x00, 0x00], - (1, "$(NATIVE_ORDER)u2") => [0x01, 0x00], - (1.0, "$(NATIVE_ORDER)f2") => [0x00, 0x3c], - (1, "$(OTHER_ORDER)u2") => [0x01, 0x00], - (1.0, "$(OTHER_ORDER)f2") => [0x00, 0x3c], - (1.5, "$(OTHER_ORDER)f2") => [0x00, 0x3e], + (0, "$(NATIVE_ORDER)f2") => Float16(0.0), + (1, "$(NATIVE_ORDER)u2") => 0x0001, + (1.0, "$(NATIVE_ORDER)f2") => Float16(1.0), + (1, "$(OTHER_ORDER)u2") => 0x0001, + (1.0, "$(OTHER_ORDER)f2") => Float16(1.0), + (1.5, "$(OTHER_ORDER)f2") => Float16(1.5), ] for testpair in tests dtype = SmallZarrGroups.parse_zarr_type(testpair[1][2]) - @test SmallZarrGroups.parse_zarr_fill_value(testpair[1][1], dtype) == testpair[2] + @test SmallZarrGroups.parse_zarr_fill_value(testpair[1][1], dtype) === testpair[2] end end \ No newline at end of file