From 44e0ff10656a4a29643768ee2342c7b74be451e7 Mon Sep 17 00:00:00 2001 From: Sam Ansmink Date: Tue, 27 Aug 2024 19:17:11 +0200 Subject: [PATCH 1/3] Fix CI (#375) * add time option to satisfy clippy * attempt to satisfy clippy --- crates/duckdb/Cargo.toml | 3 ++ crates/duckdb/src/column.rs | 2 +- crates/duckdb/src/types/mod.rs | 73 ---------------------------------- 3 files changed, 4 insertions(+), 74 deletions(-) diff --git a/crates/duckdb/Cargo.toml b/crates/duckdb/Cargo.toml index d2783176..e04260e4 100644 --- a/crates/duckdb/Cargo.toml +++ b/crates/duckdb/Cargo.toml @@ -32,6 +32,9 @@ extensions-full = ["json", "parquet", "vtab-full"] buildtime_bindgen = ["libduckdb-sys/buildtime_bindgen"] modern-full = ["chrono", "serde_json", "url", "r2d2", "uuid", "polars"] polars = ["dep:polars"] +# FIXME: These were added to make clippy happy: these features appear unused and should perhaps be removed +column_decltype = [] +extra_check = [] [dependencies] libduckdb-sys = { workspace = true } diff --git a/crates/duckdb/src/column.rs b/crates/duckdb/src/column.rs index 8e898fef..8d4c8935 100644 --- a/crates/duckdb/src/column.rs +++ b/crates/duckdb/src/column.rs @@ -146,7 +146,7 @@ impl Statement<'_> { #[cfg(feature = "column_decltype")] pub fn columns(&self) -> Vec { let n = self.column_count(); - let mut cols = Vec::with_capacity(n as usize); + let mut cols = Vec::with_capacity(n); for i in 0..n { let name = self.column_name_unwrap(i); let slice = self.stmt.column_decltype(i); diff --git a/crates/duckdb/src/types/mod.rs b/crates/duckdb/src/types/mod.rs index 6b65a9c1..f8ae5c58 100644 --- a/crates/duckdb/src/types/mod.rs +++ b/crates/duckdb/src/types/mod.rs @@ -1,68 +1,3 @@ -//! Traits dealing with DuckDB data types. -//! -//! DuckDB uses a [dynamic type system](https://www.sqlite.org/datatype3.html). Implementations of -//! the [`ToSql`] and [`FromSql`] traits are provided for the basic types that -//! DuckDB provides methods for: -//! -//! * Strings (`String` and `&str`) -//! * Blobs (`Vec` and `&[u8]`) -//! * Numbers -//! -//! The number situation is a little complicated due to the fact that all -//! numbers in DuckDB are stored as `INTEGER` (`i64`) or `REAL` (`f64`). -//! -//! [`ToSql`] and [`FromSql`] are implemented for all primitive number types. -//! [`FromSql`] has different behaviour depending on the SQL and Rust types, and -//! the value. -//! -//! * `INTEGER` to integer: returns an -//! [`Error::IntegralValueOutOfRange`](crate::Error::IntegralValueOutOfRange) -//! error if the value does not fit in the Rust type. -//! * `REAL` to integer: always returns an -//! [`Error::InvalidColumnType`](crate::Error::InvalidColumnType) error. -//! * `INTEGER` to float: casts using `as` operator. Never fails. -//! * `REAL` to float: casts using `as` operator. Never fails. -//! -//! [`ToSql`] always succeeds except when storing a `u64` or `usize` value that -//! cannot fit in an `INTEGER` (`i64`). Also note that DuckDB ignores column -//! types, so if you store an `i64` in a column with type `REAL` it will be -//! stored as an `INTEGER`, not a `REAL`. -//! -//! If the `time` feature is enabled, implementations are -//! provided for `time::OffsetDateTime` that use the RFC 3339 date/time format, -//! `"%Y-%m-%dT%H:%M:%S.%fZ"`, to store time values as strings. These values -//! can be parsed by SQLite's builtin -//! [datetime](https://www.sqlite.org/lang_datefunc.html) functions. If you -//! want different storage for datetimes, you can use a newtype. -#![cfg_attr( - feature = "time", - doc = r##" -For example, to store datetimes as `i64`s counting the number of seconds since -the Unix epoch: - -``` -use duckdb::types::{FromSql, FromSqlResult, ToSql, ToSqlOutput, ValueRef}; -use duckdb::Result; - -pub struct DateTimeSql(pub time::OffsetDateTime); - -impl FromSql for DateTimeSql { - fn column_result(value: ValueRef) -> FromSqlResult { - i64::column_result(value).map(|as_i64| { - DateTimeSql(time::OffsetDateTime::from_unix_timestamp(as_i64)) - }) - } -} - -impl ToSql for DateTimeSql { - fn to_sql(&self) -> Result { - Ok(self.0.timestamp().into()) - } -} -``` - -"## -)] //! [`ToSql`] and [`FromSql`] are also implemented for `Option` where `T` //! implements [`ToSql`] or [`FromSql`] for the cases where you want to know if //! a value was NULL (which gets translated to `None`). @@ -395,10 +330,6 @@ mod test { assert!(is_invalid_column_type(row.get::<_, i64>(0).err().unwrap())); assert!(is_invalid_column_type(row.get::<_, c_double>(0).err().unwrap())); assert!(is_invalid_column_type(row.get::<_, String>(0).err().unwrap())); - #[cfg(feature = "time")] - assert!(is_invalid_column_type( - row.get::<_, time::OffsetDateTime>(0).err().unwrap() - )); assert!(is_invalid_column_type(row.get::<_, Option>(0).err().unwrap())); // 1 is actually a text (String) @@ -426,10 +357,6 @@ mod test { assert!(is_invalid_column_type(row.get::<_, c_double>(4).err().unwrap())); assert!(is_invalid_column_type(row.get::<_, String>(4).err().unwrap())); assert!(is_invalid_column_type(row.get::<_, Vec>(4).err().unwrap())); - #[cfg(feature = "time")] - assert!(is_invalid_column_type( - row.get::<_, time::OffsetDateTime>(4).err().unwrap() - )); Ok(()) } From 02a0f3e78d0e168373e5ef4a408b238418f86790 Mon Sep 17 00:00:00 2001 From: yfu Date: Fri, 30 Aug 2024 21:21:07 +1000 Subject: [PATCH 2/3] Set nulls correctly for all type of arrays/vectors (#344) * Set nulls for all possible arrays * set nulls for all possible array to vectors * add more set nulls * wip * only change flat vector * Revert "only change flat vector" This reverts commit 90c9d75dc0448d49d621df61c9b27bfeb529178f. * add list vector nulls * add tests to cover set_nulls * fix test * fix clippy * clippy --------- Co-authored-by: peasee <98815791+peasee@users.noreply.github.com> --- crates/duckdb/src/core/data_chunk.rs | 1 + crates/duckdb/src/core/vector.rs | 29 +++++- crates/duckdb/src/vtab/arrow.rs | 130 ++++++++++++++++++++++----- crates/libduckdb-sys/Cargo.toml | 1 + 4 files changed, 137 insertions(+), 24 deletions(-) diff --git a/crates/duckdb/src/core/data_chunk.rs b/crates/duckdb/src/core/data_chunk.rs index 7b6d2e2c..3ef35992 100644 --- a/crates/duckdb/src/core/data_chunk.rs +++ b/crates/duckdb/src/core/data_chunk.rs @@ -26,6 +26,7 @@ impl Drop for DataChunkHandle { } impl DataChunkHandle { + #[allow(dead_code)] pub(crate) unsafe fn new_unowned(ptr: duckdb_data_chunk) -> Self { Self { ptr, owned: false } } diff --git a/crates/duckdb/src/core/vector.rs b/crates/duckdb/src/core/vector.rs index befda697..92e5622a 100644 --- a/crates/duckdb/src/core/vector.rs +++ b/crates/duckdb/src/core/vector.rs @@ -173,6 +173,15 @@ impl ListVector { self.entries.as_mut_slice::()[idx].length = length as u64; } + /// Set row as null + pub fn set_null(&mut self, row: usize) { + unsafe { + duckdb_vector_ensure_validity_writable(self.entries.ptr); + let idx = duckdb_vector_get_validity(self.entries.ptr); + duckdb_validity_set_row_invalid(idx, row as u64); + } + } + /// Reserve the capacity for its child node. fn reserve(&self, capacity: usize) { unsafe { @@ -190,7 +199,6 @@ impl ListVector { /// A array vector. (fixed-size list) pub struct ArrayVector { - /// ArrayVector does not own the vector pointer. ptr: duckdb_vector, } @@ -223,11 +231,19 @@ impl ArrayVector { pub fn set_child(&self, data: &[T]) { self.child(data.len()).copy(data); } + + /// Set row as null + pub fn set_null(&mut self, row: usize) { + unsafe { + duckdb_vector_ensure_validity_writable(self.ptr); + let idx = duckdb_vector_get_validity(self.ptr); + duckdb_validity_set_row_invalid(idx, row as u64); + } + } } /// A struct vector. pub struct StructVector { - /// ListVector does not own the vector pointer. ptr: duckdb_vector, } @@ -277,4 +293,13 @@ impl StructVector { let logical_type = self.logical_type(); unsafe { duckdb_struct_type_child_count(logical_type.ptr) as usize } } + + /// Set row as null + pub fn set_null(&mut self, row: usize) { + unsafe { + duckdb_vector_ensure_validity_writable(self.ptr); + let idx = duckdb_vector_get_validity(self.ptr); + duckdb_validity_set_row_invalid(idx, row as u64); + } + } } diff --git a/crates/duckdb/src/vtab/arrow.rs b/crates/duckdb/src/vtab/arrow.rs index 0dbbd7f5..1e985146 100644 --- a/crates/duckdb/src/vtab/arrow.rs +++ b/crates/duckdb/src/vtab/arrow.rs @@ -268,13 +268,7 @@ pub fn record_batch_to_duckdb_data_chunk( fn primitive_array_to_flat_vector(array: &PrimitiveArray, out_vector: &mut FlatVector) { // assert!(array.len() <= out_vector.capacity()); out_vector.copy::(array.values()); - if let Some(nulls) = array.nulls() { - for (i, null) in nulls.into_iter().enumerate() { - if !null { - out_vector.set_null(i); - } - } - } + set_nulls_in_flat_vector(array, out_vector); } fn primitive_array_to_flat_vector_cast( @@ -285,13 +279,7 @@ fn primitive_array_to_flat_vector_cast( let array = arrow::compute::kernels::cast::cast(array, &data_type).unwrap(); let out_vector: &mut FlatVector = out_vector.as_mut_any().downcast_mut().unwrap(); out_vector.copy::(array.as_primitive::().values()); - if let Some(nulls) = array.nulls() { - for (i, null) in nulls.iter().enumerate() { - if !null { - out_vector.set_null(i); - } - } - } + set_nulls_in_flat_vector(&array, out_vector); } fn primitive_array_to_vector(array: &dyn Array, out: &mut dyn Vector) -> Result<(), Box> { @@ -441,13 +429,7 @@ fn decimal_array_to_vector(array: &Decimal128Array, out: &mut FlatVector, width: } // Set nulls - if let Some(nulls) = array.nulls() { - for (i, null) in nulls.into_iter().enumerate() { - if !null { - out.set_null(i); - } - } - } + set_nulls_in_flat_vector(array, out); } /// Convert Arrow [BooleanArray] to a duckdb vector. @@ -457,6 +439,7 @@ fn boolean_array_to_vector(array: &BooleanArray, out: &mut FlatVector) { for i in 0..array.len() { out.as_mut_slice()[i] = array.value(i); } + set_nulls_in_flat_vector(array, out); } fn string_array_to_vector(array: &GenericStringArray, out: &mut FlatVector) { @@ -467,6 +450,7 @@ fn string_array_to_vector(array: &GenericStringArray, out let s = array.value(i); out.insert(i, s); } + set_nulls_in_flat_vector(array, out); } fn binary_array_to_vector(array: &BinaryArray, out: &mut FlatVector) { @@ -476,6 +460,7 @@ fn binary_array_to_vector(array: &BinaryArray, out: &mut FlatVector) { let s = array.value(i); out.insert(i, s); } + set_nulls_in_flat_vector(array, out); } fn list_array_to_vector>( @@ -504,6 +489,8 @@ fn list_array_to_vector>( let length = array.value_length(i); out.set_entry(i, offset.as_(), length.as_()); } + set_nulls_in_list_vector(array, out); + Ok(()) } @@ -528,6 +515,8 @@ fn fixed_size_list_array_to_vector( } } + set_nulls_in_array_vector(array, out); + Ok(()) } @@ -575,6 +564,7 @@ fn struct_array_to_vector(array: &StructArray, out: &mut StructVector) -> Result } } } + set_nulls_in_struct_vector(array, out); Ok(()) } @@ -611,6 +601,46 @@ pub fn arrow_ffi_to_query_params(array: FFI_ArrowArray, schema: FFI_ArrowSchema) [arr as *mut _ as usize, sch as *mut _ as usize] } +fn set_nulls_in_flat_vector(array: &dyn Array, out_vector: &mut FlatVector) { + if let Some(nulls) = array.nulls() { + for (i, null) in nulls.into_iter().enumerate() { + if !null { + out_vector.set_null(i); + } + } + } +} + +fn set_nulls_in_struct_vector(array: &dyn Array, out_vector: &mut StructVector) { + if let Some(nulls) = array.nulls() { + for (i, null) in nulls.into_iter().enumerate() { + if !null { + out_vector.set_null(i); + } + } + } +} + +fn set_nulls_in_array_vector(array: &dyn Array, out_vector: &mut ArrayVector) { + if let Some(nulls) = array.nulls() { + for (i, null) in nulls.into_iter().enumerate() { + if !null { + out_vector.set_null(i); + } + } + } +} + +fn set_nulls_in_list_vector(array: &dyn Array, out_vector: &mut ListVector) { + if let Some(nulls) = array.nulls() { + for (i, null) in nulls.into_iter().enumerate() { + if !null { + out_vector.set_null(i); + } + } + } +} + #[cfg(test)] mod test { use super::{arrow_recordbatch_to_query_params, ArrowVTab}; @@ -705,6 +735,44 @@ mod test { Ok(()) } + #[test] + fn test_append_struct_contains_null() -> Result<(), Box> { + let db = Connection::open_in_memory()?; + db.execute_batch("CREATE TABLE t1 (s STRUCT(v VARCHAR, i INTEGER))")?; + { + let struct_array = StructArray::try_new( + vec![ + Arc::new(Field::new("v", DataType::Utf8, true)), + Arc::new(Field::new("i", DataType::Int32, true)), + ] + .into(), + vec![ + Arc::new(StringArray::from(vec![Some("foo"), Some("bar")])) as ArrayRef, + Arc::new(Int32Array::from(vec![Some(1), Some(2)])) as ArrayRef, + ], + Some(vec![true, false].into()), + )?; + + let schema = Schema::new(vec![Field::new( + "s", + DataType::Struct(Fields::from(vec![ + Field::new("v", DataType::Utf8, true), + Field::new("i", DataType::Int32, true), + ])), + true, + )]); + + let record_batch = RecordBatch::try_new(Arc::new(schema), vec![Arc::new(struct_array)])?; + let mut app = db.appender("t1")?; + app.append_record_batch(record_batch)?; + } + let mut stmt = db.prepare("SELECT s FROM t1 where s IS NOT NULL")?; + let rbs: Vec = stmt.query_arrow([])?.collect(); + assert_eq!(rbs.iter().map(|op| op.num_rows()).sum::(), 1); + + Ok(()) + } + fn check_rust_primitive_array_roundtrip( input_array: PrimitiveArray, expected_array: PrimitiveArray, @@ -762,7 +830,7 @@ mod test { db.register_table_function::("arrow")?; // Roundtrip a record batch from Rust to DuckDB and back to Rust - let schema = Schema::new(vec![Field::new("a", arry.data_type().clone(), false)]); + let schema = Schema::new(vec![Field::new("a", arry.data_type().clone(), true)]); let rb = RecordBatch::try_new(Arc::new(schema), vec![Arc::new(arry.clone())])?; let param = arrow_recordbatch_to_query_params(rb); @@ -910,6 +978,24 @@ mod test { Ok(()) } + #[test] + fn test_check_generic_array_roundtrip_contains_null() -> Result<(), Box> { + check_generic_array_roundtrip(ListArray::new( + Arc::new(Field::new("item", DataType::Utf8, true)), + OffsetBuffer::new(ScalarBuffer::from(vec![0, 2, 4, 5])), + Arc::new(StringArray::from(vec![ + Some("foo"), + Some("baz"), + Some("bar"), + Some("foo"), + Some("baz"), + ])), + Some(vec![true, false, true].into()), + ))?; + + Ok(()) + } + #[test] fn test_utf8_roundtrip() -> Result<(), Box> { check_generic_byte_roundtrip( diff --git a/crates/libduckdb-sys/Cargo.toml b/crates/libduckdb-sys/Cargo.toml index d02c0ea0..8e6804bd 100644 --- a/crates/libduckdb-sys/Cargo.toml +++ b/crates/libduckdb-sys/Cargo.toml @@ -22,6 +22,7 @@ buildtime_bindgen = ["bindgen", "pkg-config", "vcpkg"] json = ["bundled"] parquet = ["bundled"] extensions-full = ["json", "parquet"] +winduckdb = [] [dependencies] From 36b83bcc912ec69583ea41280fcd585bdc3472e9 Mon Sep 17 00:00:00 2001 From: yfu Date: Fri, 30 Aug 2024 21:21:54 +1000 Subject: [PATCH 3/3] add support of fixedsizebinary, duration, interval support in arrow (#374) * add support of fixedsizebinary, duration, interval support in arrow * chore: Cargo fmt * fix: clippy::expect-fun-call --------- Co-authored-by: peasee <98815791+peasee@users.noreply.github.com> --- crates/duckdb/src/vtab/arrow.rs | 130 +++++++++++++++++++++++++++++--- 1 file changed, 119 insertions(+), 11 deletions(-) diff --git a/crates/duckdb/src/vtab/arrow.rs b/crates/duckdb/src/vtab/arrow.rs index 1e985146..219f6f71 100644 --- a/crates/duckdb/src/vtab/arrow.rs +++ b/crates/duckdb/src/vtab/arrow.rs @@ -2,10 +2,14 @@ use super::{BindInfo, DataChunkHandle, Free, FunctionInfo, InitInfo, LogicalType use std::ptr::null_mut; use crate::core::{ArrayVector, FlatVector, Inserter, ListVector, StructVector, Vector}; -use arrow::array::{ - as_boolean_array, as_generic_binary_array, as_large_list_array, as_list_array, as_primitive_array, as_string_array, - as_struct_array, Array, ArrayData, AsArray, BinaryArray, BooleanArray, Decimal128Array, FixedSizeListArray, - GenericListArray, GenericStringArray, LargeStringArray, OffsetSizeTrait, PrimitiveArray, StructArray, +use arrow::{ + array::{ + as_boolean_array, as_generic_binary_array, as_large_list_array, as_list_array, as_primitive_array, + as_string_array, as_struct_array, Array, ArrayData, AsArray, BinaryArray, BooleanArray, Decimal128Array, + FixedSizeBinaryArray, FixedSizeListArray, GenericListArray, GenericStringArray, IntervalMonthDayNanoArray, + LargeBinaryArray, LargeStringArray, OffsetSizeTrait, PrimitiveArray, StructArray, + }, + compute::cast, }; use arrow::{ @@ -194,9 +198,12 @@ pub fn to_duckdb_logical_type(data_type: &DataType) -> Result { - Ok(LogicalTypeHandle::from(to_duckdb_type_id(data_type)?)) - } + DataType::Boolean + | DataType::Utf8 + | DataType::LargeUtf8 + | DataType::Binary + | DataType::LargeBinary + | DataType::FixedSizeBinary(_) => Ok(LogicalTypeHandle::from(to_duckdb_type_id(data_type)?)), dtype if dtype.is_primitive() => Ok(LogicalTypeHandle::from(to_duckdb_type_id(data_type)?)), _ => Err(format!( "Unsupported data type: {data_type}, please file an issue https://github.com/wangfenjin/duckdb-rs" @@ -238,6 +245,18 @@ pub fn record_batch_to_duckdb_data_chunk( DataType::Binary => { binary_array_to_vector(as_generic_binary_array(col.as_ref()), &mut chunk.flat_vector(i)); } + DataType::FixedSizeBinary(_) => { + fixed_size_binary_array_to_vector(col.as_ref().as_fixed_size_binary(), &mut chunk.flat_vector(i)); + } + DataType::LargeBinary => { + large_binary_array_to_vector( + col.as_ref() + .as_any() + .downcast_ref::() + .ok_or_else(|| Box::::from("Unable to downcast to LargeBinaryArray"))?, + &mut chunk.flat_vector(i), + ); + } DataType::List(_) => { list_array_to_vector(as_list_array(col.as_ref()), &mut chunk.list_vector(i))?; } @@ -276,7 +295,7 @@ fn primitive_array_to_flat_vector_cast( array: &dyn Array, out_vector: &mut dyn Vector, ) { - let array = arrow::compute::kernels::cast::cast(array, &data_type).unwrap(); + let array = cast(array, &data_type).unwrap_or_else(|_| panic!("array is casted into {data_type}")); let out_vector: &mut FlatVector = out_vector.as_mut_any().downcast_mut().unwrap(); out_vector.copy::(array.as_primitive::().values()); set_nulls_in_flat_vector(&array, out_vector); @@ -354,7 +373,21 @@ fn primitive_array_to_vector(array: &dyn Array, out: &mut dyn Vector) -> Result< *width, ); } - + DataType::Interval(_) | DataType::Duration(_) => { + let array = IntervalMonthDayNanoArray::from( + cast(array, &DataType::Interval(IntervalUnit::MonthDayNano)) + .expect("array is casted into IntervalMonthDayNanoArray") + .as_primitive::() + .values() + .iter() + .map(|a| IntervalMonthDayNanoType::make_value(a.months, a.days, a.nanoseconds / 1000)) + .collect::>(), + ); + primitive_array_to_flat_vector::( + as_primitive_array(&array), + out.as_mut_any().downcast_mut().unwrap(), + ); + } // DuckDB Only supports timetamp_tz in microsecond precision DataType::Timestamp(_, Some(tz)) => primitive_array_to_flat_vector_cast::( DataType::Timestamp(TimeUnit::Microsecond, Some(tz.clone())), @@ -463,6 +496,28 @@ fn binary_array_to_vector(array: &BinaryArray, out: &mut FlatVector) { set_nulls_in_flat_vector(array, out); } +fn fixed_size_binary_array_to_vector(array: &FixedSizeBinaryArray, out: &mut FlatVector) { + assert!(array.len() <= out.capacity()); + + for i in 0..array.len() { + let s = array.value(i); + out.insert(i, s); + } + // Put this back once the other PR # + // set_nulls_in_flat_vector(array, out); +} + +fn large_binary_array_to_vector(array: &LargeBinaryArray, out: &mut FlatVector) { + assert!(array.len() <= out.capacity()); + + for i in 0..array.len() { + let s = array.value(i); + out.insert(i, s); + } + // Put this back once the other PR # + // set_nulls_in_flat_vector(array, out); +} + fn list_array_to_vector>( array: &GenericListArray, out: &mut ListVector, @@ -648,12 +703,16 @@ mod test { use arrow::{ array::{ Array, ArrayRef, AsArray, BinaryArray, Date32Array, Date64Array, Decimal128Array, Decimal256Array, - FixedSizeListArray, GenericByteArray, GenericListArray, Int32Array, LargeStringArray, ListArray, + DurationSecondArray, FixedSizeListArray, GenericByteArray, GenericListArray, Int32Array, + IntervalDayTimeArray, IntervalMonthDayNanoArray, IntervalYearMonthArray, LargeStringArray, ListArray, OffsetSizeTrait, PrimitiveArray, StringArray, StructArray, Time32SecondArray, Time64MicrosecondArray, TimestampMicrosecondArray, TimestampMillisecondArray, TimestampNanosecondArray, TimestampSecondArray, }, buffer::{OffsetBuffer, ScalarBuffer}, - datatypes::{i256, ArrowPrimitiveType, ByteArrayType, DataType, Field, Fields, Schema}, + datatypes::{ + i256, ArrowPrimitiveType, ByteArrayType, DataType, DurationSecondType, Field, Fields, IntervalDayTimeType, + IntervalMonthDayNanoType, IntervalYearMonthType, Schema, + }, record_batch::RecordBatch, }; use std::{error::Error, sync::Arc}; @@ -1088,6 +1147,55 @@ mod test { Ok(()) } + #[test] + fn test_interval_roundtrip() -> Result<(), Box> { + let array: PrimitiveArray = IntervalMonthDayNanoArray::from(vec![ + IntervalMonthDayNanoType::make_value(1, 1, 1000), + IntervalMonthDayNanoType::make_value(2, 2, 2000), + IntervalMonthDayNanoType::make_value(3, 3, 3000), + ]); + check_rust_primitive_array_roundtrip(array.clone(), array)?; + + let array: PrimitiveArray = IntervalYearMonthArray::from(vec![ + IntervalYearMonthType::make_value(1, 10), + IntervalYearMonthType::make_value(2, 20), + IntervalYearMonthType::make_value(3, 30), + ]); + let expected_array: PrimitiveArray = IntervalMonthDayNanoArray::from(vec![ + IntervalMonthDayNanoType::make_value(22, 0, 0), + IntervalMonthDayNanoType::make_value(44, 0, 0), + IntervalMonthDayNanoType::make_value(66, 0, 0), + ]); + check_rust_primitive_array_roundtrip(array, expected_array)?; + + let array: PrimitiveArray = IntervalDayTimeArray::from(vec![ + IntervalDayTimeType::make_value(1, 1), + IntervalDayTimeType::make_value(2, 2), + IntervalDayTimeType::make_value(3, 3), + ]); + let expected_array: PrimitiveArray = IntervalMonthDayNanoArray::from(vec![ + IntervalMonthDayNanoType::make_value(0, 1, 1_000_000), + IntervalMonthDayNanoType::make_value(0, 2, 2_000_000), + IntervalMonthDayNanoType::make_value(0, 3, 3_000_000), + ]); + check_rust_primitive_array_roundtrip(array, expected_array)?; + + Ok(()) + } + + #[test] + fn test_duration_roundtrip() -> Result<(), Box> { + let array: PrimitiveArray = DurationSecondArray::from(vec![1, 2, 3]); + let expected_array: PrimitiveArray = IntervalMonthDayNanoArray::from(vec![ + IntervalMonthDayNanoType::make_value(0, 0, 1_000_000_000), + IntervalMonthDayNanoType::make_value(0, 0, 2_000_000_000), + IntervalMonthDayNanoType::make_value(0, 0, 3_000_000_000), + ]); + check_rust_primitive_array_roundtrip(array, expected_array)?; + + Ok(()) + } + #[test] fn test_timestamp_tz_insert() -> Result<(), Box> { // TODO: This test should be reworked once we support TIMESTAMP_TZ properly