diff --git a/Cargo.lock b/Cargo.lock index 7de8be0..90f0492 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1151,9 +1151,7 @@ dependencies = [ [[package]] name = "me3-coalesced-parser" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e90a2e0071fb00bee14818c88c51a5f4e116d1aecaf60218af06bf1fe7be109" +version = "0.2.0" dependencies = [ "bitvec", "serde", diff --git a/Cargo.toml b/Cargo.toml index 3dc28d3..e5c6c3f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -18,7 +18,7 @@ serde = { version = "1", features = ["derive"] } serde_json = "1" # Coalesced parser -me3-coalesced-parser = "0.1.0" +me3-coalesced-parser = { version = "0.2.0", path = "../coalesced-parser" } # Logging facade log = { version = "0.4", features = ["serde"] } diff --git a/src/services/config.rs b/src/services/config.rs new file mode 100644 index 0000000..8afdb70 --- /dev/null +++ b/src/services/config.rs @@ -0,0 +1,59 @@ +use embeddy::Embedded; +use log::error; +use me3_coalesced_parser::Coalesced; +use std::path::Path; + +/// Embedded copy of the default known talk files +#[derive(Embedded)] +#[folder = "src/resources/data/tlk"] +struct DefaultTlkFiles; + +/// Attempts to load a talk file from a local file +pub async fn local_talk_file(lang: &str) -> std::io::Result> { + let file_name = format!("{}.tlk", lang); + let local_path = format!("data/{}", file_name); + let local_path = Path::new(&local_path); + tokio::fs::read(local_path).await +} + +/// Loads a fallback talk file from the embedded talk files list +/// using the specified language. Will fallback to default if the +/// language is not found. +pub fn fallback_talk_file(lang: &str) -> &'static [u8] { + let file_name = format!("{}.tlk", lang); + + // Fallback to embedded tlk files + DefaultTlkFiles::get(&file_name) + // Fallback to default tlk + .unwrap_or_else(|| { + DefaultTlkFiles::get("default.tlk").expect("Server missing default embedded tlk file") + }) +} + +/// Embedded default coalesced +static DEFAULT_COALESCED: &[u8] = include_bytes!("../resources/data/coalesced.json"); + +/// Attempts to load the local coalesced file from the data folder +pub async fn local_coalesced_file() -> std::io::Result { + let local_path = Path::new("data/coalesced.json"); + let bytes = tokio::fs::read(local_path).await?; + + match serde_json::from_slice(&bytes) { + Ok(value) => Ok(value), + Err(err) => { + error!("Failed to parse server coalesced: {}", err); + + Err(std::io::Error::new( + std::io::ErrorKind::Other, + "Failed to parse server coalesced", + )) + } + } +} + +/// Loads the fallback coalesced from the embedded bytes +pub fn fallback_coalesced_file() -> Coalesced { + serde_json::from_slice(DEFAULT_COALESCED) + // Game cannot run without a proper coalesced + .expect("Server fallback coalesced is malformed") +} diff --git a/src/services/mod.rs b/src/services/mod.rs index 32d4f8a..0ec8166 100644 --- a/src/services/mod.rs +++ b/src/services/mod.rs @@ -1,3 +1,4 @@ +pub mod config; pub mod game; pub mod retriever; pub mod sessions; diff --git a/src/session/routes/util.rs b/src/session/routes/util.rs index ccf947b..1c62bf4 100644 --- a/src/session/routes/util.rs +++ b/src/session/routes/util.rs @@ -1,6 +1,9 @@ use crate::{ config::{RuntimeConfig, VERSION}, database::entities::PlayerData, + services::config::{ + fallback_coalesced_file, fallback_talk_file, local_coalesced_file, local_talk_file, + }, session::{ models::{ errors::{BlazeError, GlobalError, ServerResult}, @@ -10,22 +13,18 @@ use crate::{ router::{Blaze, Extension, SessionAuth}, SessionLink, }, + utils::encoding::{create_base64_map, generate_coalesced, ChunkMap}, }; -use base64ct::{Base64, Encoding}; -use embeddy::Embedded; -use flate2::{write::ZlibEncoder, Compression}; use log::{debug, error}; use me3_coalesced_parser::{serialize_coalesced, Coalesced}; use sea_orm::DatabaseConnection; use std::{ + borrow::Cow, cmp::Ordering, - io::Write, - path::Path, sync::Arc, time::{Duration, SystemTime, UNIX_EPOCH}, }; use tdf::TdfMap; -use tokio::fs::read; /// Handles retrieving the details about the telemetry server /// @@ -196,41 +195,29 @@ fn load_entitlements() -> TdfMap { .collect() } -async fn load_coalesced() -> std::io::Result { - let local_path = Path::new("data/coalesced.json"); - - if local_path.is_file() { - if let Ok(value) = read(local_path).await.and_then(|bytes| { - serde_json::from_slice(&bytes).map_err(|_| { - std::io::Error::new( - std::io::ErrorKind::Other, - "Failed to parse server coalesced", - ) - }) - }) { - return Ok(value); - } +async fn load_coalesced() -> Coalesced { + match local_coalesced_file().await { + Ok(result) => result, + Err(err) => { + // Log errors if the file existed + if !matches!(err.kind(), std::io::ErrorKind::NotFound) { + error!( + "Unable to load local coalesced file falling back to default: {}", + err + ); + } - error!( - "Unable to compress local coalesced from data/coalesced.json falling back to default." - ); + // Fallback to default + fallback_coalesced_file() + } } - - // Fallback to embedded default coalesced.bin - let bytes: &[u8] = include_bytes!("../../resources/data/coalesced.json"); - serde_json::from_slice(bytes).map_err(|_| { - std::io::Error::new( - std::io::ErrorKind::Other, - "Failed to parse server coalesced", - ) - }) } /// Loads the local coalesced if one is present falling back /// to the default one on error or if its missing async fn create_coalesced_map() -> std::io::Result { // Load the coalesced from JSON - let coalesced = load_coalesced().await?; + let coalesced = load_coalesced().await; // Serialize the coalesced to bytes let serialized = serialize_coalesced(&coalesced); @@ -239,93 +226,28 @@ async fn create_coalesced_map() -> std::io::Result { generate_coalesced(&serialized) } -/// Generates a compressed coalesced from the provided bytes -/// -/// `bytes` The coalesced bytes -fn generate_coalesced(bytes: &[u8]) -> std::io::Result { - let compressed: Vec = { - let mut encoder = ZlibEncoder::new(Vec::new(), Compression::new(6)); - encoder.write_all(bytes)?; - encoder.finish()? - }; - - let mut encoded = Vec::with_capacity(16 + compressed.len()); - encoded.extend_from_slice(b"NIBC"); - encoded.extend_from_slice(&1u32.to_le_bytes()); - encoded.extend_from_slice(&(compressed.len() as u32).to_le_bytes()); - encoded.extend_from_slice(&(bytes.len() as u32).to_le_bytes()); - encoded.extend_from_slice(&compressed); - Ok(create_base64_map(&encoded)) -} - -/// Type of a base64 chunks map -type ChunkMap = TdfMap; - -/// Converts to provided slice of bytes into an ordered TdfMap where -/// the keys are the chunk index and the values are the bytes encoded -/// as base64 chunks. The map contains a CHUNK_SIZE key which states -/// how large each chunk is and a DATA_SIZE key indicating the total -/// length of the chunked value -/// -/// `bytes` The bytes to convert -fn create_base64_map(bytes: &[u8]) -> ChunkMap { - // The size of the chunks - const CHUNK_LENGTH: usize = 255; - - let encoded: String = Base64::encode_string(bytes); - let length = encoded.len(); - let mut output: ChunkMap = TdfMap::with_capacity((length / CHUNK_LENGTH) + 2); - - let mut index = 0; - let mut offset = 0; - - while offset < length { - let o1 = offset; - offset += CHUNK_LENGTH; - - let slice = if offset < length { - &encoded[o1..offset] - } else { - &encoded[o1..] - }; - - output.insert(format!("CHUNK_{}", index), slice.to_string()); - index += 1; - } - - output.insert("CHUNK_SIZE".to_string(), CHUNK_LENGTH.to_string()); - output.insert("DATA_SIZE".to_string(), length.to_string()); - output -} - -/// Default talk file values -#[derive(Embedded)] -#[folder = "src/resources/data/tlk"] -struct DefaultTlkFiles; - /// Retrieves a talk file for the specified language code falling back /// to the `ME3_TLK_DEFAULT` default talk file if it could not be found /// /// `lang` The talk file language async fn talk_file(lang: &str) -> ChunkMap { - let file_name = format!("{}.tlk", lang); - - let local_path = format!("data/{}", file_name); - let local_path = Path::new(&local_path); - if local_path.is_file() { - if let Ok(map) = read(local_path) - .await - .map(|bytes| create_base64_map(&bytes)) - { - return map; - } - error!("Unable to load local talk file falling back to default."); - } + let bytes: Cow<'static, [u8]> = match local_talk_file(lang).await { + Ok(result) => Cow::Owned(result), + Err(err) => { + // Log errors if the file existed + if !matches!(err.kind(), std::io::ErrorKind::NotFound) { + error!( + "Unable to load local talk file falling back to default: {}", + err + ); + } - let bytes = DefaultTlkFiles::get(&file_name) - .unwrap_or(include_bytes!("../../resources/data/tlk/default.tlk")); + // Fallback to default + Cow::Borrowed(fallback_talk_file(lang)) + } + }; - create_base64_map(bytes) + create_base64_map(&bytes) } /// Loads the messages that should be displayed to the client and diff --git a/src/utils/encoding.rs b/src/utils/encoding.rs new file mode 100644 index 0000000..9fa2bf0 --- /dev/null +++ b/src/utils/encoding.rs @@ -0,0 +1,60 @@ +use std::io::Write; + +use base64ct::{Base64, Encoding}; +use flate2::{write::ZlibEncoder, Compression}; +use tdf::TdfMap; + +/// Type of a base64 chunks map +pub type ChunkMap = TdfMap; + +/// Converts to provided slice of bytes into an ordered TdfMap where +/// the keys are the chunk index and the values are the bytes encoded +/// as base64 chunks. The map contains a CHUNK_SIZE key which states +/// how large each chunk is and a DATA_SIZE key indicating the total +/// length of the chunked value +pub fn create_base64_map(bytes: &[u8]) -> ChunkMap { + // The size of the chunks + const CHUNK_LENGTH: usize = 255; + + let encoded: String = Base64::encode_string(bytes); + let length = encoded.len(); + let mut output: ChunkMap = TdfMap::with_capacity((length / CHUNK_LENGTH) + 2); + + let mut index = 0; + let mut offset = 0; + + while offset < length { + let o1 = offset; + offset += CHUNK_LENGTH; + + let slice = if offset < length { + &encoded[o1..offset] + } else { + &encoded[o1..] + }; + + output.insert(format!("CHUNK_{}", index), slice.to_string()); + index += 1; + } + + output.insert("CHUNK_SIZE".to_string(), CHUNK_LENGTH.to_string()); + output.insert("DATA_SIZE".to_string(), length.to_string()); + output +} + +/// Generates a compressed coalesced from the provided bytes +pub fn generate_coalesced(bytes: &[u8]) -> std::io::Result { + let compressed: Vec = { + let mut encoder = ZlibEncoder::new(Vec::new(), Compression::new(6)); + encoder.write_all(bytes)?; + encoder.finish()? + }; + + let mut encoded = Vec::with_capacity(16 + compressed.len()); + encoded.extend_from_slice(b"NIBC"); + encoded.extend_from_slice(&1u32.to_le_bytes()); + encoded.extend_from_slice(&(compressed.len() as u32).to_le_bytes()); + encoded.extend_from_slice(&(bytes.len() as u32).to_le_bytes()); + encoded.extend_from_slice(&compressed); + Ok(create_base64_map(&encoded)) +} diff --git a/src/utils/mod.rs b/src/utils/mod.rs index 79849c6..191c985 100644 --- a/src/utils/mod.rs +++ b/src/utils/mod.rs @@ -1,4 +1,5 @@ pub mod components; +pub mod encoding; pub mod hashing; pub mod lock; pub mod logging;