Skip to content

Commit

Permalink
feat: cleanup and separate tlk and coalesced loading logic
Browse files Browse the repository at this point in the history
  • Loading branch information
jacobtread committed Jun 30, 2024
1 parent e152e0a commit 9c72e13
Show file tree
Hide file tree
Showing 7 changed files with 158 additions and 117 deletions.
4 changes: 1 addition & 3 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ serde = { version = "1", features = ["derive"] }
serde_json = "1"

# Coalesced parser
me3-coalesced-parser = "0.1.0"
me3-coalesced-parser = { version = "0.2.0", path = "../coalesced-parser" }

# Logging facade
log = { version = "0.4", features = ["serde"] }
Expand Down
59 changes: 59 additions & 0 deletions src/services/config.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
use embeddy::Embedded;
use log::error;
use me3_coalesced_parser::Coalesced;
use std::path::Path;

/// Embedded copy of the default known talk files
#[derive(Embedded)]
#[folder = "src/resources/data/tlk"]
struct DefaultTlkFiles;

/// Attempts to load a talk file from a local file
pub async fn local_talk_file(lang: &str) -> std::io::Result<Vec<u8>> {
let file_name = format!("{}.tlk", lang);
let local_path = format!("data/{}", file_name);
let local_path = Path::new(&local_path);
tokio::fs::read(local_path).await
}

/// Loads a fallback talk file from the embedded talk files list
/// using the specified language. Will fallback to default if the
/// language is not found.
pub fn fallback_talk_file(lang: &str) -> &'static [u8] {
let file_name = format!("{}.tlk", lang);

// Fallback to embedded tlk files
DefaultTlkFiles::get(&file_name)
// Fallback to default tlk
.unwrap_or_else(|| {
DefaultTlkFiles::get("default.tlk").expect("Server missing default embedded tlk file")
})
}

/// Embedded default coalesced
static DEFAULT_COALESCED: &[u8] = include_bytes!("../resources/data/coalesced.json");

/// Attempts to load the local coalesced file from the data folder
pub async fn local_coalesced_file() -> std::io::Result<Coalesced> {
let local_path = Path::new("data/coalesced.json");
let bytes = tokio::fs::read(local_path).await?;

match serde_json::from_slice(&bytes) {
Ok(value) => Ok(value),
Err(err) => {
error!("Failed to parse server coalesced: {}", err);

Err(std::io::Error::new(
std::io::ErrorKind::Other,
"Failed to parse server coalesced",
))
}
}
}

/// Loads the fallback coalesced from the embedded bytes
pub fn fallback_coalesced_file() -> Coalesced {
serde_json::from_slice(DEFAULT_COALESCED)
// Game cannot run without a proper coalesced
.expect("Server fallback coalesced is malformed")
}
1 change: 1 addition & 0 deletions src/services/mod.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
pub mod config;
pub mod game;
pub mod retriever;
pub mod sessions;
Expand Down
148 changes: 35 additions & 113 deletions src/session/routes/util.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,9 @@
use crate::{
config::{RuntimeConfig, VERSION},
database::entities::PlayerData,
services::config::{
fallback_coalesced_file, fallback_talk_file, local_coalesced_file, local_talk_file,
},
session::{
models::{
errors::{BlazeError, GlobalError, ServerResult},
Expand All @@ -10,22 +13,18 @@ use crate::{
router::{Blaze, Extension, SessionAuth},
SessionLink,
},
utils::encoding::{create_base64_map, generate_coalesced, ChunkMap},
};
use base64ct::{Base64, Encoding};
use embeddy::Embedded;
use flate2::{write::ZlibEncoder, Compression};
use log::{debug, error};
use me3_coalesced_parser::{serialize_coalesced, Coalesced};
use sea_orm::DatabaseConnection;
use std::{
borrow::Cow,
cmp::Ordering,
io::Write,
path::Path,
sync::Arc,
time::{Duration, SystemTime, UNIX_EPOCH},
};
use tdf::TdfMap;
use tokio::fs::read;

/// Handles retrieving the details about the telemetry server
///
Expand Down Expand Up @@ -196,41 +195,29 @@ fn load_entitlements() -> TdfMap<String, String> {
.collect()
}

async fn load_coalesced() -> std::io::Result<Coalesced> {
let local_path = Path::new("data/coalesced.json");

if local_path.is_file() {
if let Ok(value) = read(local_path).await.and_then(|bytes| {
serde_json::from_slice(&bytes).map_err(|_| {
std::io::Error::new(
std::io::ErrorKind::Other,
"Failed to parse server coalesced",
)
})
}) {
return Ok(value);
}
async fn load_coalesced() -> Coalesced {
match local_coalesced_file().await {
Ok(result) => result,
Err(err) => {
// Log errors if the file existed
if !matches!(err.kind(), std::io::ErrorKind::NotFound) {
error!(
"Unable to load local coalesced file falling back to default: {}",
err
);
}

error!(
"Unable to compress local coalesced from data/coalesced.json falling back to default."
);
// Fallback to default
fallback_coalesced_file()
}
}

// Fallback to embedded default coalesced.bin
let bytes: &[u8] = include_bytes!("../../resources/data/coalesced.json");
serde_json::from_slice(bytes).map_err(|_| {
std::io::Error::new(
std::io::ErrorKind::Other,
"Failed to parse server coalesced",
)
})
}

/// Loads the local coalesced if one is present falling back
/// to the default one on error or if its missing
async fn create_coalesced_map() -> std::io::Result<ChunkMap> {
// Load the coalesced from JSON
let coalesced = load_coalesced().await?;
let coalesced = load_coalesced().await;

// Serialize the coalesced to bytes
let serialized = serialize_coalesced(&coalesced);
Expand All @@ -239,93 +226,28 @@ async fn create_coalesced_map() -> std::io::Result<ChunkMap> {
generate_coalesced(&serialized)
}

/// Generates a compressed coalesced from the provided bytes
///
/// `bytes` The coalesced bytes
fn generate_coalesced(bytes: &[u8]) -> std::io::Result<ChunkMap> {
let compressed: Vec<u8> = {
let mut encoder = ZlibEncoder::new(Vec::new(), Compression::new(6));
encoder.write_all(bytes)?;
encoder.finish()?
};

let mut encoded = Vec::with_capacity(16 + compressed.len());
encoded.extend_from_slice(b"NIBC");
encoded.extend_from_slice(&1u32.to_le_bytes());
encoded.extend_from_slice(&(compressed.len() as u32).to_le_bytes());
encoded.extend_from_slice(&(bytes.len() as u32).to_le_bytes());
encoded.extend_from_slice(&compressed);
Ok(create_base64_map(&encoded))
}

/// Type of a base64 chunks map
type ChunkMap = TdfMap<String, String>;

/// Converts to provided slice of bytes into an ordered TdfMap where
/// the keys are the chunk index and the values are the bytes encoded
/// as base64 chunks. The map contains a CHUNK_SIZE key which states
/// how large each chunk is and a DATA_SIZE key indicating the total
/// length of the chunked value
///
/// `bytes` The bytes to convert
fn create_base64_map(bytes: &[u8]) -> ChunkMap {
// The size of the chunks
const CHUNK_LENGTH: usize = 255;

let encoded: String = Base64::encode_string(bytes);
let length = encoded.len();
let mut output: ChunkMap = TdfMap::with_capacity((length / CHUNK_LENGTH) + 2);

let mut index = 0;
let mut offset = 0;

while offset < length {
let o1 = offset;
offset += CHUNK_LENGTH;

let slice = if offset < length {
&encoded[o1..offset]
} else {
&encoded[o1..]
};

output.insert(format!("CHUNK_{}", index), slice.to_string());
index += 1;
}

output.insert("CHUNK_SIZE".to_string(), CHUNK_LENGTH.to_string());
output.insert("DATA_SIZE".to_string(), length.to_string());
output
}

/// Default talk file values
#[derive(Embedded)]
#[folder = "src/resources/data/tlk"]
struct DefaultTlkFiles;

/// Retrieves a talk file for the specified language code falling back
/// to the `ME3_TLK_DEFAULT` default talk file if it could not be found
///
/// `lang` The talk file language
async fn talk_file(lang: &str) -> ChunkMap {
let file_name = format!("{}.tlk", lang);

let local_path = format!("data/{}", file_name);
let local_path = Path::new(&local_path);
if local_path.is_file() {
if let Ok(map) = read(local_path)
.await
.map(|bytes| create_base64_map(&bytes))
{
return map;
}
error!("Unable to load local talk file falling back to default.");
}
let bytes: Cow<'static, [u8]> = match local_talk_file(lang).await {
Ok(result) => Cow::Owned(result),
Err(err) => {
// Log errors if the file existed
if !matches!(err.kind(), std::io::ErrorKind::NotFound) {
error!(
"Unable to load local talk file falling back to default: {}",
err
);
}

let bytes = DefaultTlkFiles::get(&file_name)
.unwrap_or(include_bytes!("../../resources/data/tlk/default.tlk"));
// Fallback to default
Cow::Borrowed(fallback_talk_file(lang))
}
};

create_base64_map(bytes)
create_base64_map(&bytes)
}

/// Loads the messages that should be displayed to the client and
Expand Down
60 changes: 60 additions & 0 deletions src/utils/encoding.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
use std::io::Write;

use base64ct::{Base64, Encoding};
use flate2::{write::ZlibEncoder, Compression};
use tdf::TdfMap;

/// Type of a base64 chunks map
pub type ChunkMap = TdfMap<String, String>;

/// Converts to provided slice of bytes into an ordered TdfMap where
/// the keys are the chunk index and the values are the bytes encoded
/// as base64 chunks. The map contains a CHUNK_SIZE key which states
/// how large each chunk is and a DATA_SIZE key indicating the total
/// length of the chunked value
pub fn create_base64_map(bytes: &[u8]) -> ChunkMap {
// The size of the chunks
const CHUNK_LENGTH: usize = 255;

let encoded: String = Base64::encode_string(bytes);
let length = encoded.len();
let mut output: ChunkMap = TdfMap::with_capacity((length / CHUNK_LENGTH) + 2);

let mut index = 0;
let mut offset = 0;

while offset < length {
let o1 = offset;
offset += CHUNK_LENGTH;

let slice = if offset < length {
&encoded[o1..offset]
} else {
&encoded[o1..]
};

output.insert(format!("CHUNK_{}", index), slice.to_string());
index += 1;
}

output.insert("CHUNK_SIZE".to_string(), CHUNK_LENGTH.to_string());
output.insert("DATA_SIZE".to_string(), length.to_string());
output
}

/// Generates a compressed coalesced from the provided bytes
pub fn generate_coalesced(bytes: &[u8]) -> std::io::Result<ChunkMap> {
let compressed: Vec<u8> = {
let mut encoder = ZlibEncoder::new(Vec::new(), Compression::new(6));
encoder.write_all(bytes)?;
encoder.finish()?
};

let mut encoded = Vec::with_capacity(16 + compressed.len());
encoded.extend_from_slice(b"NIBC");
encoded.extend_from_slice(&1u32.to_le_bytes());
encoded.extend_from_slice(&(compressed.len() as u32).to_le_bytes());
encoded.extend_from_slice(&(bytes.len() as u32).to_le_bytes());
encoded.extend_from_slice(&compressed);
Ok(create_base64_map(&encoded))
}
1 change: 1 addition & 0 deletions src/utils/mod.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
pub mod components;
pub mod encoding;
pub mod hashing;
pub mod lock;
pub mod logging;
Expand Down

0 comments on commit 9c72e13

Please sign in to comment.