From 13afd8cc7588abb4adcf584f7983c1b93ae70f9d Mon Sep 17 00:00:00 2001 From: iliana etaoin Date: Thu, 31 Oct 2024 14:51:50 -0700 Subject: [PATCH] replace ring with sha2 (#67) --- Cargo.toml | 2 +- src/blob.rs | 10 +++++----- src/digest.rs | 12 +++++++----- 3 files changed, 13 insertions(+), 11 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 565e5d3..a6f130e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -25,11 +25,11 @@ futures = "0.3" futures-util = "0.3" hex = "0.4.3" reqwest = { version = "0.12", default-features = false, features = ["rustls-tls", "stream"] } -ring = "0.16.20" semver = { version = "1.0.17", features = ["std", "serde"] } serde = { version = "1.0", features = [ "derive" ] } serde_derive = "1.0" serde_json = "1.0" +sha2 = "0.10.8" slog = "2.7" tar = "0.4" thiserror = "1.0" diff --git a/src/blob.rs b/src/blob.rs index 042443f..86b86e7 100644 --- a/src/blob.rs +++ b/src/blob.rs @@ -9,8 +9,8 @@ use camino::{Utf8Path, Utf8PathBuf}; use chrono::{DateTime, FixedOffset, Utc}; use futures_util::StreamExt; use reqwest::header::{CONTENT_LENGTH, LAST_MODIFIED}; -use ring::digest::{Context as DigestContext, Digest, SHA256}; use serde::{Deserialize, Serialize}; +use sha2::{Digest, Sha256}; use std::str::FromStr; use tokio::io::{AsyncReadExt, AsyncWriteExt, BufReader}; @@ -173,13 +173,13 @@ pub async fn download( Ok(()) } -async fn get_sha256_digest(path: &Utf8Path) -> Result { +async fn get_sha256_digest(path: &Utf8Path) -> Result<[u8; 32]> { let mut reader = BufReader::new( tokio::fs::File::open(path) .await .with_context(|| format!("could not open {path:?}"))?, ); - let mut context = DigestContext::new(&SHA256); + let mut hasher = Sha256::new(); let mut buffer = [0; 1024]; loop { @@ -190,10 +190,10 @@ async fn get_sha256_digest(path: &Utf8Path) -> Result { if count == 0 { break; } else { - context.update(&buffer[..count]); + hasher.update(&buffer[..count]); } } - Ok(context.finish()) + Ok(hasher.finalize().into()) } #[test] diff --git a/src/digest.rs b/src/digest.rs index 27b3705..25cc255 100644 --- a/src/digest.rs +++ b/src/digest.rs @@ -9,8 +9,8 @@ use async_trait::async_trait; use blake3::{Hash as BlakeDigest, Hasher as BlakeHasher}; use camino::Utf8Path; use hex::ToHex; -use ring::digest::{Context as DigestContext, Digest as ShaDigest, SHA256}; use serde::{Deserialize, Serialize}; +use sha2::{Digest as _, Sha256}; use tokio::io::{AsyncReadExt, BufReader}; // The buffer size used to hash smaller files. @@ -22,6 +22,8 @@ const HASH_BUFFER_SIZE: usize = 16 * (1 << 10); // NOTE: This is currently only blake3-specific. const LARGE_HASH_SIZE: usize = 1 << 20; +struct ShaDigest([u8; 32]); + /// Implemented by algorithms which can take digests of files. #[async_trait] pub trait FileDigester { @@ -36,7 +38,7 @@ impl FileDigester for ShaDigest { .await .with_context(|| format!("could not open {path:?}"))?, ); - let mut context = DigestContext::new(&SHA256); + let mut hasher = Sha256::new(); let mut buffer = [0; HASH_BUFFER_SIZE]; loop { let count = reader @@ -46,10 +48,10 @@ impl FileDigester for ShaDigest { if count == 0 { break; } else { - context.update(&buffer[..count]); + hasher.update(&buffer[..count]); } } - let digest = context.finish().into(); + let digest = ShaDigest(hasher.finalize().into()).into(); Ok(digest) } @@ -106,7 +108,7 @@ pub enum Digest { impl From for Digest { fn from(digest: ShaDigest) -> Self { - Self::Sha2(digest.as_ref().encode_hex::()) + Self::Sha2(digest.0.as_ref().encode_hex::()) } }