Skip to content

Commit

Permalink
cleanup ,test
Browse files Browse the repository at this point in the history
  • Loading branch information
a10y committed Jun 5, 2024
1 parent 75b8781 commit b74b219
Show file tree
Hide file tree
Showing 4 changed files with 70 additions and 15 deletions.
4 changes: 0 additions & 4 deletions vortex-array/src/array/struct/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -70,10 +70,6 @@ impl StructArray {
}

if fields.iter().any(|a| a.with_dyn(|a| a.len()) != length) {
println!(
"FIELD LENGTHS: {:?}",
fields.iter().map(|field| field.len()).collect::<Vec<_>>()
);
vortex_bail!("Expected all struct fields to have length {}", length);
}

Expand Down
11 changes: 2 additions & 9 deletions vortex-array/src/compute/as_contiguous.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,12 @@ use vortex_error::{vortex_bail, vortex_err, VortexResult};

use crate::{Array, ArrayDType};

/// Trait for typed array variants which support the process of unfurling to somewhere else.
/// Trait that exposes an operation for repacking (and possibly decompressing) an [Array] into
/// a new Array that occupies a contiguous memory range.
pub trait AsContiguousFn {
fn as_contiguous(&self, arrays: &[Array]) -> VortexResult<Array>;
}

/// Macro that
#[macro_export]
macro_rules! impl_default_as_contiguous_fn {
($typ:ty) => {
Expand Down Expand Up @@ -45,13 +45,6 @@ pub fn as_contiguous(arrays: &[Array]) -> VortexResult<Array> {
vortex_bail!(ComputeError: "No arrays to concatenate");
}
if !arrays.iter().map(|chunk| chunk.encoding().id()).all_equal() {
println!(
"ENCODINGS: {:?}",
arrays
.iter()
.map(|chunk| chunk.encoding().id())
.collect_vec()
);
vortex_bail!(ComputeError: "Chunks have differing encodings");
}
if !arrays.iter().map(|chunk| chunk.dtype()).all_equal() {
Expand Down
68 changes: 68 additions & 0 deletions vortex-datetime-parts/src/compute.rs
Original file line number Diff line number Diff line change
Expand Up @@ -184,3 +184,71 @@ impl AsContiguousFn for DateTimePartsArray {
as_contiguous(chunks.as_slice())
}
}

#[cfg(test)]
mod test {
use vortex::array::datetime::{LocalDateTimeArray, TimeUnit};
use vortex::array::primitive::PrimitiveArray;
use vortex::compute::scalar_at::scalar_at;
use vortex::validity::Validity;
use vortex::IntoArray;
use vortex_dtype::{DType, ExtDType, ExtID, Nullability};

use crate::compute::decode_to_localdatetime;
use crate::DateTimePartsArray;

#[test]
fn test_decode_to_localdatetime() {
let nanos = TimeUnit::Ns;

let days = PrimitiveArray::from_vec(vec![2i64, 3], Validity::NonNullable).into_array();
let seconds = PrimitiveArray::from_vec(vec![2i64, 3], Validity::NonNullable).into_array();
let subsecond = PrimitiveArray::from_vec(vec![2i64, 3], Validity::NonNullable).into_array();

let date_times = DateTimePartsArray::try_new(
DType::Extension(
ExtDType::new(
ExtID::from(LocalDateTimeArray::ID),
Some(nanos.metadata().clone()),
),
Nullability::NonNullable,
),
days,
seconds,
subsecond,
)
.unwrap();

let local = decode_to_localdatetime(&date_times.into_array()).unwrap();

let elem0: i64 = scalar_at(&local.timestamps(), 0)
.unwrap()
.value()
.as_pvalue()
.unwrap()
.unwrap()
.try_into()
.unwrap();
let elem1: i64 = scalar_at(&local.timestamps(), 1)
.unwrap()
.value()
.as_pvalue()
.unwrap()
.unwrap()
.try_into()
.unwrap();

assert_eq!(
elem0,
vec![(2i64 * 86_400 * 1_000_000_000), 2i64 * 1_000_000_000, 2i64,]
.into_iter()
.sum(),
);
assert_eq!(
elem1,
vec![(3i64 * 86_400 * 1_000_000_000), 3i64 * 1_000_000_000, 3i64,]
.into_iter()
.sum(),
);
}
}
2 changes: 0 additions & 2 deletions vortex-dict/src/compress.rs
Original file line number Diff line number Diff line change
Expand Up @@ -149,8 +149,6 @@ pub fn dict_encode_typed_primitive<T: NativePType>(
Validity::NonNullable
};

println!("values_validity: {:?}", values_validity);

(
PrimitiveArray::from(codes),
PrimitiveArray::from_vec(values, values_validity),
Expand Down

0 comments on commit b74b219

Please sign in to comment.