diff --git a/ciborium/Cargo.toml b/ciborium/Cargo.toml index acb54d4..e49a7dc 100644 --- a/ciborium/Cargo.toml +++ b/ciborium/Cargo.toml @@ -34,7 +34,6 @@ hex = "0.4" [features] default = ["std"] std = ["ciborium-io/std", "serde/std"] -canonical = ["std"] [package.metadata.docs.rs] all-features = true diff --git a/ciborium/README.md b/ciborium/README.md index 1549c28..d5c0277 100644 --- a/ciborium/README.md +++ b/ciborium/README.md @@ -11,20 +11,13 @@ Ciborium contains CBOR serialization and deserialization implementations for ser ## Quick Start -You're probably looking for [`from_reader()`](crate::de::from_reader) -and [`into_writer()`](crate::ser::into_writer), which are -the main functions. Note that byte slices are also readers and writers and can be -passed to these functions just as streams can. +You're probably looking for [`from_reader()`](crate::de::from_reader), +[`to_vec()`](crate::ser::to_vec), and [`into_writer()`](crate::ser::into_writer), +which are the main functions. Note that byte slices are also readers and writers +and can be passed to these functions just as streams can. For dynamic CBOR value creation/inspection, see [`Value`](crate::value::Value). -## Features -- `std`: enabled by default. -- `canonical`: allows serializing with a `CanonicalizationScheme` for deterministic - outputs. Incurs a small performance penalty (~20% slower) when serializing - without a canonicalization scheme, and a large penalty (~100% slower) when - serializing with a canonicalization scheme. - ## Design Decisions ### Always Serialize Numeric Values to the Smallest Size @@ -96,4 +89,23 @@ be avoided because it can be fragile as it exposes invariants of your Rust code to remote actors. We might consider adding this in the future. If you are interested in this, please contact us. +### Canonical Encodings + +The ciborium crate has support for various canonical encodings during +serialization. + +- [`NoCanonicalization`](crate::canonical::NoCanonicalization): the default, + numbers are still encoded in their smallest form, but map keys are not + sorted for maximum serialization speed. +- [`Rfc7049`](crate::canonical::Rfc7049): the canonicalization scheme from + RFC 7049 that sorts map keys in a length-first order. Eg. + `["a", "b", "aa"]`. +- [`Rfc8949`](crate::canonical::Rfc8949): the canonicalization scheme from + RFC 8949 that sorts map keys in a bytewise lexicographic order. Eg. + `["a", "aa", "b"]`. + +To use canonicalization, you must enable the `std` feature. See the examples +in [`to_vec_canonical`](crate::ser::to_vec_canonical) and +[`into_writer_canonical`](crate::ser::into_writer_canonical) for more. + License: Apache-2.0 diff --git a/ciborium/src/canonical.rs b/ciborium/src/canonical.rs new file mode 100644 index 0000000..6b6104f --- /dev/null +++ b/ciborium/src/canonical.rs @@ -0,0 +1,73 @@ +//! Canonicalization support for CBOR serialization. +//! +//! Supports various canonicalization schemes for deterministic CBOR serialization. The default is +//! [NoCanonicalization] for the fastest serialization. Canonical serialization is around 2x slower. + +/// Which canonicalization scheme to use for CBOR serialization. +/// +/// Can only be initialized with the `std` feature enabled. +#[doc(hidden)] +#[derive(Debug, Copy, Clone, PartialEq, Eq)] +pub enum CanonicalizationScheme { + /// Sort map keys in output according to [RFC 7049]'s deterministic encoding spec. + /// + /// Also aligns with [RFC 8949 4.2.3]'s backwards compatibility sort order. + /// + /// Uses length-first map key ordering. Eg. `["a", "b", "aa"]`. + #[cfg(feature = "std")] + Rfc7049, + + /// Sort map keys in output according to [RFC 8949]'s deterministic encoding spec. + /// + /// Uses bytewise lexicographic map key ordering. Eg. `["a", "aa", "b"]`. + #[cfg(feature = "std")] + Rfc8949, +} + +/// Don't sort map key output. +pub struct NoCanonicalization; + +/// Sort map keys in output according to [RFC 7049]'s deterministic encoding spec. +/// +/// Also aligns with [RFC 8949 4.2.3]'s backwards compatibility sort order. +/// +/// Uses length-first map key ordering. Eg. `["a", "b", "aa"]`. +#[cfg(feature = "std")] +pub struct Rfc7049; + +/// Sort map keys in output according to [RFC 8949]'s deterministic encoding spec. +/// +/// Uses bytewise lexicographic map key ordering. Eg. `["a", "aa", "b"]`. +#[cfg(feature = "std")] +pub struct Rfc8949; + +/// Trait for canonicalization schemes. +/// +/// See implementors: +/// - [NoCanonicalization] for no canonicalization (fastest). +/// - [Rfc7049] for length-first map key sorting. +/// - [Rfc8949] for bytewise lexicographic map key sorting. +pub trait Canonicalization { + /// True if keys should be cached and sorted. + const IS_CANONICAL: bool; + + /// Determines which sorting implementation to use. + const SCHEME: Option; +} + +impl Canonicalization for NoCanonicalization { + const IS_CANONICAL: bool = false; + const SCHEME: Option = None; +} + +#[cfg(feature = "std")] +impl Canonicalization for Rfc7049 { + const IS_CANONICAL: bool = true; + const SCHEME: Option = Some(CanonicalizationScheme::Rfc7049); +} + +#[cfg(feature = "std")] +impl Canonicalization for Rfc8949 { + const IS_CANONICAL: bool = true; + const SCHEME: Option = Some(CanonicalizationScheme::Rfc8949); +} diff --git a/ciborium/src/lib.rs b/ciborium/src/lib.rs index 3852212..432675a 100644 --- a/ciborium/src/lib.rs +++ b/ciborium/src/lib.rs @@ -6,20 +6,13 @@ //! //! # Quick Start //! -//! You're probably looking for [`from_reader()`](crate::de::from_reader) -//! and [`into_writer()`](crate::ser::into_writer), which are -//! the main functions. Note that byte slices are also readers and writers and can be -//! passed to these functions just as streams can. +//! You're probably looking for [`from_reader()`](crate::de::from_reader), +//! [`to_vec()`](crate::ser::to_vec), and [`into_writer()`](crate::ser::into_writer), +//! which are the main functions. Note that byte slices are also readers and writers +//! and can be passed to these functions just as streams can. //! //! For dynamic CBOR value creation/inspection, see [`Value`](crate::value::Value). //! -//! # Features -//! - `std`: enabled by default. -//! - `canonical`: allows serializing with a `CanonicalizationScheme` for deterministic -//! outputs. Incurs a small performance penalty (~20% slower) when serializing -//! without a canonicalization scheme, and a large penalty (~100% slower) when -//! serializing with a canonicalization scheme. -//! //! # Design Decisions //! //! ## Always Serialize Numeric Values to the Smallest Size @@ -90,6 +83,25 @@ //! be avoided because it can be fragile as it exposes invariants of your Rust //! code to remote actors. We might consider adding this in the future. If you //! are interested in this, please contact us. +//! +//! ## Canonical Encodings +//! +//! The ciborium crate has support for various canonical encodings during +//! serialization. +//! +//! - [`NoCanonicalization`](crate::canonical::NoCanonicalization): the default, +//! numbers are still encoded in their smallest form, but map keys are not +//! sorted for maximum serialization speed. +//! - [`Rfc7049`](crate::canonical::Rfc7049): the canonicalization scheme from +//! RFC 7049 that sorts map keys in a length-first order. Eg. +//! `["a", "b", "aa"]`. +//! - [`Rfc8949`](crate::canonical::Rfc8949): the canonicalization scheme from +//! RFC 8949 that sorts map keys in a bytewise lexicographic order. Eg. +//! `["a", "aa", "b"]`. +//! +//! To use canonicalization, you must enable the `std` feature. See the examples +//! in [`to_vec_canonical`](crate::ser::to_vec_canonical) and +//! [`into_writer_canonical`](crate::ser::into_writer_canonical) for more. #![cfg_attr(not(feature = "std"), no_std)] #![deny(missing_docs)] @@ -99,6 +111,7 @@ extern crate alloc; +pub mod canonical; pub mod de; pub mod ser; pub mod tag; @@ -113,11 +126,7 @@ pub use crate::ser::{into_writer, Serializer}; #[doc(inline)] #[cfg(feature = "std")] -pub use crate::ser::to_vec; - -#[doc(inline)] -#[cfg(feature = "canonical")] -pub use crate::ser::{into_writer_canonical, to_vec_canonical}; +pub use crate::ser::{into_writer_canonical, to_vec, to_vec_canonical}; #[cfg(feature = "std")] #[doc(inline)] diff --git a/ciborium/src/ser/mod.rs b/ciborium/src/ser/mod.rs index ff6705a..7fbe31b 100644 --- a/ciborium/src/ser/mod.rs +++ b/ciborium/src/ser/mod.rs @@ -9,27 +9,10 @@ pub use error::Error; use alloc::string::ToString; use ciborium_io::Write; use ciborium_ll::*; +use core::marker::PhantomData; use serde::ser; -/// Which canonicalization scheme to use for CBOR serialization. -/// -/// Can only be initialized with the `std` feature enabled. -#[derive(Debug, Copy, Clone, PartialEq, Eq)] -pub enum CanonicalizationScheme { - /// Sort map keys in output according to [RFC 7049]'s deterministic encoding spec. - /// - /// Also aligns with [RFC 8949 4.2.3]'s backwards compatibility sort order. - /// - /// Uses length-first map key ordering. Eg. `["a", "b", "aa"]`. - #[cfg(feature = "canonical")] - Rfc7049, - - /// Sort map keys in output according to [RFC 8949]'s deterministic encoding spec. - /// - /// Uses bytewise lexicographic map key ordering. Eg. `["a", "aa", "b"]`. - #[cfg(feature = "canonical")] - Rfc8049, -} +use crate::canonical::{Canonicalization, NoCanonicalization}; /// A serializer for CBOR. /// @@ -49,77 +32,75 @@ pub enum CanonicalizationScheme { /// /// let mut buffer = Vec::with_capacity(1024); /// -/// #[cfg(feature = "canonical")] { -/// let mut serializer = Serializer::new(&mut buffer, Some(ciborium::ser::CanonicalizationScheme::Rfc8049)); +/// #[cfg(feature = "std")] { +/// use ciborium::canonical::Rfc8949; +/// let mut serializer = Serializer::<_, Rfc8949>::new(&mut buffer); /// example.serialize(&mut serializer).unwrap(); /// assert_eq!(hex::encode(&buffer), "a36161182a61621910686261611901a4"); /// } /// -/// #[cfg(not(feature = "canonical"))] { -/// let mut serializer = Serializer::new(&mut buffer, None); // uses no canonicalization +/// #[cfg(not(feature = "std"))] { +/// use ciborium::canonical::NoCanonicalization; +/// let mut serializer = Serializer::<_, NoCanonicalization>::new(&mut buffer); // uses no canonicalization /// example.serialize(&mut serializer).unwrap(); /// assert_eq!(hex::encode(&buffer), "a36161182a6261611901a46162191068"); /// } /// ``` -pub struct Serializer { +pub struct Serializer { encoder: Encoder, - /// Whether to canonically sort map keys in output according a particular - /// [CanonicalizationScheme] map key sort ordering. - canonicalization: Option, + /// PhantomData to allow for type parameterization based on canonicalization scheme. + canonicalization: PhantomData, } -impl Serializer { +impl Serializer { /// Create a new CBOR serializer. /// /// `canonicalization` can be used to change the [CanonicalizationScheme] used for sorting /// output map and struct keys to ensure deterministic outputs. #[inline] - pub fn new( - encoder: impl Into>, - canonicalization: Option, - ) -> Self { + pub fn new(encoder: impl Into>) -> Self { Self { encoder: encoder.into(), - canonicalization, + canonicalization: PhantomData, } } } -impl From for Serializer { +impl From for Serializer { #[inline] fn from(writer: W) -> Self { Self { encoder: writer.into(), - canonicalization: None, + canonicalization: PhantomData, } } } -impl From> for Serializer { +impl From> for Serializer { #[inline] fn from(writer: Encoder) -> Self { Self { encoder: writer, - canonicalization: None, + canonicalization: PhantomData, } } } -impl<'a, W: Write> ser::Serializer for &'a mut Serializer +impl<'a, W: Write, C: Canonicalization> ser::Serializer for &'a mut Serializer where W::Error: core::fmt::Debug, { type Ok = (); type Error = Error; - type SerializeSeq = CollectionSerializer<'a, W>; - type SerializeTuple = CollectionSerializer<'a, W>; - type SerializeTupleStruct = CollectionSerializer<'a, W>; - type SerializeTupleVariant = CollectionSerializer<'a, W>; - type SerializeMap = CollectionSerializer<'a, W>; - type SerializeStruct = CollectionSerializer<'a, W>; - type SerializeStructVariant = CollectionSerializer<'a, W>; + type SerializeSeq = CollectionSerializer<'a, W, C>; + type SerializeTuple = CollectionSerializer<'a, W, C>; + type SerializeTupleStruct = CollectionSerializer<'a, W, C>; + type SerializeTupleVariant = CollectionSerializer<'a, W, C>; + type SerializeMap = CollectionSerializer<'a, W, C>; + type SerializeStruct = CollectionSerializer<'a, W, C>; + type SerializeStructVariant = CollectionSerializer<'a, W, C>; #[inline] fn serialize_bool(self, v: bool) -> Result<(), Self::Error> { @@ -378,7 +359,7 @@ macro_rules! end { #[allow(unused_mut)] #[inline] fn end(mut self) -> Result<(), Self::Error> { - match self.serializer.canonicalization { + match C::SCHEME { None => { if self.length.is_none() { // Not canonical and no length => indefinite length break. @@ -386,10 +367,10 @@ macro_rules! end { } } - #[cfg(not(feature = "canonical"))] + #[cfg(not(feature = "std"))] Some(_) => {} - #[cfg(feature = "canonical")] + #[cfg(feature = "std")] Some(_scheme) => { // Canonical serialization holds back writing headers, as it doesn't allow // indefinite length structs. This allows us to always compute the length. @@ -411,7 +392,7 @@ macro_rules! end_map { #[allow(unused_mut)] #[inline] fn end(mut self) -> Result<(), Self::Error> { - match self.serializer.canonicalization { + match C::SCHEME { None => { if self.length.is_none() { // Not canonical and no length => indefinite length break. @@ -419,18 +400,20 @@ macro_rules! end_map { } } - #[cfg(not(feature = "canonical"))] + #[cfg(not(feature = "std"))] Some(_) => unreachable!(), - #[cfg(feature = "canonical")] + #[cfg(feature = "std")] Some(scheme) => { + use crate::canonical::CanonicalizationScheme; + // Canonical serialization holds back writing headers, as it doesn't allow // indefinite length structs. This allows us to always compute the length. self.push_header(Some(self.cache_keys.len()))?; // Sort our cached output and write it to the encoder. match scheme { - CanonicalizationScheme::Rfc8049 => { + CanonicalizationScheme::Rfc8949 => { // keys get sorted in lexicographical byte order let keys = self.cache_keys; let values = self.cache_values; @@ -494,8 +477,8 @@ enum CollectionType { /// /// Not to be used externally, only exposed as part of the [Serializer] type. #[doc(hidden)] -pub struct CollectionSerializer<'a, W> { - serializer: &'a mut Serializer, +pub struct CollectionSerializer<'a, W, C: Canonicalization> { + serializer: &'a mut Serializer, collection_type: CollectionType, /// None if the collection is indefinite length. Canonical serialization will ignore this. @@ -505,19 +488,19 @@ pub struct CollectionSerializer<'a, W> { /// been written yet. Only relevant for tag collections. tag_written: bool, - #[cfg(feature = "canonical")] + #[cfg(feature = "std")] cache_keys: Vec>, - #[cfg(feature = "canonical")] + #[cfg(feature = "std")] cache_values: Vec>, } -impl<'a, W: Write> CollectionSerializer<'a, W> +impl<'a, W: Write, C: Canonicalization> CollectionSerializer<'a, W, C> where W::Error: core::fmt::Debug, { #[inline(always)] fn new( - serializer: &'a mut Serializer, + serializer: &'a mut Serializer, collection_type: CollectionType, length: Option, ) -> Result> { @@ -526,13 +509,13 @@ where collection_type, length, tag_written: false, - #[cfg(feature = "canonical")] + #[cfg(feature = "std")] cache_keys: Vec::new(), - #[cfg(feature = "canonical")] + #[cfg(feature = "std")] cache_values: Vec::new(), }; - if collection_serializer.serializer.canonicalization.is_none() { + if !C::IS_CANONICAL { collection_serializer.push_header(length)?; } @@ -555,16 +538,17 @@ where &mut self, key: &U, ) -> Result<(), Error> { - match self.serializer.canonicalization { - None => key.serialize(&mut *self.serializer), + match C::IS_CANONICAL { + false => key.serialize(&mut *self.serializer), - #[cfg(not(feature = "canonical"))] - Some(_) => unreachable!(), + #[cfg(not(feature = "std"))] + true => unreachable!(), - #[cfg(feature = "canonical")] - Some(_) => { - let key_bytes = to_vec_small(key, self.serializer.canonicalization) - .map_err(|e| Error::Value(e.to_string()))?; + #[cfg(feature = "std")] + true => { + // use to_vec_small, we expect keys to be smaller than values + let key_bytes = + to_vec_small::<_, C>(key).map_err(|e| Error::Value(e.to_string()))?; self.cache_keys.push(key_bytes); Ok(()) } @@ -576,17 +560,17 @@ where &mut self, value: &U, ) -> Result<(), Error> { - match self.serializer.canonicalization { - None => value.serialize(&mut *self.serializer), + match C::IS_CANONICAL { + false => value.serialize(&mut *self.serializer), - #[cfg(not(feature = "canonical"))] - Some(_) => unreachable!(), + #[cfg(not(feature = "std"))] + true => unreachable!(), - #[cfg(feature = "canonical")] - Some(_) => { + #[cfg(feature = "std")] + true => { // use to_vec_canonical, we expect values to be bigger than keys - let value_bytes = to_vec_canonical(value, self.serializer.canonicalization) - .map_err(|e| Error::Value(e.to_string()))?; + let value_bytes = + to_vec_canonical::<_, C>(value).map_err(|e| Error::Value(e.to_string()))?; self.cache_values.push(value_bytes); Ok(()) } @@ -594,7 +578,7 @@ where } } -impl<'a, W: Write> ser::SerializeSeq for CollectionSerializer<'a, W> +impl<'a, W: Write, C: Canonicalization> ser::SerializeSeq for CollectionSerializer<'a, W, C> where W::Error: core::fmt::Debug, { @@ -612,7 +596,7 @@ where end!(); } -impl<'a, W: Write> ser::SerializeTuple for CollectionSerializer<'a, W> +impl<'a, W: Write, C: Canonicalization> ser::SerializeTuple for CollectionSerializer<'a, W, C> where W::Error: core::fmt::Debug, { @@ -630,7 +614,7 @@ where end!(); } -impl<'a, W: Write> ser::SerializeTupleStruct for CollectionSerializer<'a, W> +impl<'a, W: Write, C: Canonicalization> ser::SerializeTupleStruct for CollectionSerializer<'a, W, C> where W::Error: core::fmt::Debug, { @@ -648,7 +632,8 @@ where end!(); } -impl<'a, W: Write> ser::SerializeTupleVariant for CollectionSerializer<'a, W> +impl<'a, W: Write, C: Canonicalization> ser::SerializeTupleVariant + for CollectionSerializer<'a, W, C> where W::Error: core::fmt::Debug, { @@ -676,7 +661,7 @@ where end!(); } -impl<'a, W: Write> ser::SerializeMap for CollectionSerializer<'a, W> +impl<'a, W: Write, C: Canonicalization> ser::SerializeMap for CollectionSerializer<'a, W, C> where W::Error: core::fmt::Debug, { @@ -699,7 +684,7 @@ where end_map!(); } -impl<'a, W: Write> ser::SerializeStruct for CollectionSerializer<'a, W> +impl<'a, W: Write, C: Canonicalization> ser::SerializeStruct for CollectionSerializer<'a, W, C> where W::Error: core::fmt::Debug, { @@ -720,7 +705,8 @@ where end_map!(); } -impl<'a, W: Write> ser::SerializeStructVariant for CollectionSerializer<'a, W> +impl<'a, W: Write, C: Canonicalization> ser::SerializeStructVariant + for CollectionSerializer<'a, W, C> where W::Error: core::fmt::Debug, { @@ -747,14 +733,13 @@ where /// /// We use a very small buffer (2 words) to ensure it's cheap to initialize the Vec. Often the keys /// and values may only be a couple bytes long such as with integer values. -#[cfg(feature = "canonical")] +#[cfg(feature = "std")] #[inline] -fn to_vec_small( +fn to_vec_small( value: &T, - canonicalization_scheme: Option, ) -> Result, Error> { let mut buffer = Vec::with_capacity(256); - let mut serializer = Serializer::new(&mut buffer, canonicalization_scheme); + let mut serializer: Serializer<_, C> = Serializer::new(&mut buffer); value.serialize(&mut serializer)?; Ok(buffer) } @@ -781,7 +766,7 @@ fn to_vec_small( #[inline] pub fn to_vec(value: &T) -> Result, Error> { let mut buffer = Vec::with_capacity(1024); - let mut serializer = Serializer::new(&mut buffer, None); + let mut serializer: Serializer<_, NoCanonicalization> = Serializer::new(&mut buffer); value.serialize(&mut serializer)?; Ok(buffer) } @@ -791,7 +776,7 @@ pub fn to_vec(value: &T) -> Result, Error(value: &T) -> Result, Error(&example).unwrap(); /// /// assert_eq!(hex::encode(&bytes), "a36161182a61621910686261611901a4"); /// ``` -#[cfg(feature = "canonical")] +#[cfg(feature = "std")] #[inline] -pub fn to_vec_canonical( +pub fn to_vec_canonical( value: &T, - scheme: Option, ) -> Result, Error> { let mut buffer = Vec::with_capacity(1024); - let mut serializer = Serializer::new(&mut buffer, scheme); + let mut serializer: Serializer<_, C> = Serializer::new(&mut buffer); value.serialize(&mut serializer)?; Ok(buffer) } @@ -845,7 +829,7 @@ pub fn into_writer( where W::Error: core::fmt::Debug, { - let mut encoder = Serializer::from(writer); + let mut encoder = Serializer::<_, NoCanonicalization>::from(writer); value.serialize(&mut encoder) } @@ -856,7 +840,7 @@ where /// # Example /// ```rust /// use ciborium::into_writer_canonical; -/// use ciborium::ser::CanonicalizationScheme; +/// use ciborium::canonical::Rfc8949; /// /// #[derive(serde::Serialize)] /// struct Example { @@ -868,20 +852,19 @@ where /// let example = Example { a: 42, aa: 420, b: 4200 }; /// /// let mut bytes = Vec::new(); -/// into_writer_canonical(&example, &mut bytes, Some(CanonicalizationScheme::Rfc8049)).unwrap(); +/// into_writer_canonical::<_, _, Rfc8949>(&example, &mut bytes).unwrap(); /// /// assert_eq!(hex::encode(&bytes), "a36161182a61621910686261611901a4"); /// ``` -#[cfg(feature = "canonical")] +#[cfg(feature = "std")] #[inline] -pub fn into_writer_canonical( +pub fn into_writer_canonical( value: &T, writer: W, - scheme: Option, ) -> Result<(), Error> where W::Error: core::fmt::Debug, { - let mut encoder = Serializer::new(writer, scheme); + let mut encoder: Serializer = Serializer::new(writer); value.serialize(&mut encoder) } diff --git a/ciborium/tests/canonical.rs b/ciborium/tests/canonical.rs index 30101cb..dcf9a54 100644 --- a/ciborium/tests/canonical.rs +++ b/ciborium/tests/canonical.rs @@ -65,9 +65,9 @@ fn map_old() { /// Use length-first ordering for keys. #[test] -#[cfg(feature = "canonical")] +#[cfg(feature = "std")] fn map_rfc7049() { - use ciborium::ser::CanonicalizationScheme; + use ciborium::canonical::Rfc7049; let mut map = BTreeMap::new(); map.insert(cval!(false), val!(2)); @@ -79,8 +79,7 @@ fn map_rfc7049() { map.insert(cval!("z"), val!(4)); map.insert(cval!("aa"), val!(6)); - let bytes1 = - ciborium::ser::to_vec_canonical(&map, Some(CanonicalizationScheme::Rfc7049)).unwrap(); + let bytes1 = ciborium::ser::to_vec_canonical::<_, Rfc7049>(&map).unwrap(); assert_eq!( hex::encode(bytes1), @@ -94,9 +93,9 @@ fn map_rfc7049() { /// /// [RFC 8949]: https://www.rfc-editor.org/rfc/rfc8949.html#name-core-deterministic-encoding #[test] -#[cfg(feature = "canonical")] +#[cfg(feature = "std")] fn map_rfc8949() { - use ciborium::ser::CanonicalizationScheme; + use ciborium::canonical::Rfc8949; let mut map = BTreeMap::new(); map.insert(cval!(false), val!(2)); @@ -108,8 +107,7 @@ fn map_rfc8949() { map.insert(cval!("z"), val!(4)); map.insert(cval!("aa"), val!(6)); - let bytes1 = - ciborium::ser::to_vec_canonical(&map, Some(CanonicalizationScheme::Rfc8049)).unwrap(); + let bytes1 = ciborium::ser::to_vec_canonical::<_, Rfc8949>(&map).unwrap(); assert_eq!( hex::encode(bytes1),