Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Example of eyre use for extract command #170

Draft
wants to merge 1 commit into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
36 changes: 36 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ edition = "2021"
bytes = "1.5.0"
chrono = "0.4.34"
clap = { version = "4.5.1", features = ["derive", "string"] }
color-eyre = { version = "0.6.2", default-features = false }
crossterm = "0.27.0"
derive_more = "0.99"
dialoguer = "0.11.0"
Expand Down
1 change: 1 addition & 0 deletions moss/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ vfs = { path = "../crates/vfs" }

bytes.workspace = true
chrono.workspace = true
color-eyre.workspace = true
clap.workspace = true
derive_more.workspace = true
itertools.workspace = true
Expand Down
223 changes: 119 additions & 104 deletions moss/src/cli/extract.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,14 +6,17 @@ use std::{
fs::{create_dir_all, hard_link, remove_dir_all, remove_file, File},
io::{copy, Read, Seek, SeekFrom},
os::unix::fs::symlink,
path::PathBuf,
path::{Path, PathBuf},
};

use clap::{arg, ArgMatches, Command};
use moss::package::{self, MissingMetaFieldError};
use color_eyre::{
eyre::{eyre, Context},
Result, Section,
};
use moss::package;
use rayon::prelude::{IntoParallelRefIterator, ParallelIterator};
use stone::{payload::layout, read::PayloadKind};
use thiserror::{self, Error};
use tui::{ProgressBar, ProgressStyle};

pub fn command() -> Command {
Expand All @@ -24,127 +27,139 @@ pub fn command() -> Command {
}

/// Handle the `extract` command
pub fn handle(args: &ArgMatches) -> Result<(), Error> {
pub fn handle(args: &ArgMatches) -> Result<()> {
let paths = args
.get_many::<PathBuf>("PATH")
.into_iter()
.flatten()
.cloned()
.collect::<Vec<_>>();

// Begin unpack
create_dir_all(".stoneStore")?;
let content_store = Path::new(".stoneStore");

let content_store = PathBuf::from(".stoneStore");
// Begin unpack
create_dir_all(content_store)
.context("create temporary extract directory")
.suggestion("is the current directory writable?")?;

for path in paths {
println!("Extract: {:?}", path);
extract(&path, content_store).with_context(|| eyre!("extract {path:?}"))?;
}

let rdr = File::open(path).map_err(Error::IO)?;
let mut reader = stone::read(rdr).map_err(Error::Format)?;
// Clean up.
remove_dir_all(content_store).context("remove temporary extract directory")?;

let payloads = reader.payloads()?.collect::<Result<Vec<_>, _>>()?;
let content = payloads.iter().find_map(PayloadKind::content);
let layouts = payloads.iter().find_map(PayloadKind::layout);
let meta = payloads.iter().find_map(PayloadKind::meta).ok_or(Error::MissingMeta)?;
Ok(())
}

let pkg = package::Meta::from_stone_payload(&meta.body).map_err(Error::MalformedMeta)?;
let extraction_root = PathBuf::from(pkg.id().to_string());
fn extract(path: &Path, content_store: &Path) -> Result<()> {
println!("Extract: {:?}", path);

let rdr = File::open(path)
.context("open file")
.suggestion("does the file exist?")?;
let mut reader = stone::read(rdr)
.context("read stone file")
.suggestion("is this a valid stone file?")?;

let payloads = reader
.payloads()
.context("seeking payloads")?
.collect::<Result<Vec<_>, _>>()
.context("decode payload")?;
let content = payloads.iter().find_map(PayloadKind::content);
let layouts = payloads.iter().find_map(PayloadKind::layout);
let meta = payloads
.iter()
.find_map(PayloadKind::meta)
.ok_or_else(|| eyre!("missing metadata payload"))?;

let pkg = package::Meta::from_stone_payload(&meta.body).context("metadata payload is malformed")?;
let extraction_root = PathBuf::from(pkg.id().to_string());

// Cleanup old extraction root
if extraction_root.exists() {
remove_dir_all(&extraction_root).context("remove temporary stone extract directory")?;
}

// Cleanup old extraction root
if extraction_root.exists() {
remove_dir_all(&extraction_root)?;
}
if let Some(content) = content {
let content_file = File::options()
.read(true)
.write(true)
.create(true)
.open(".stoneContent")
.context("open temporary content extract file")?;

let progress = ProgressBar::new(content.header.plain_size).with_style(
ProgressStyle::with_template("|{bar:20.cyan/bue}| {percent}%")
.unwrap()
.progress_chars("■≡=- "),
);
reader
.unpack_content(content, &mut progress.wrap_write(&content_file))
.context("unpacking stone content payload")?;

// Extract all indices from the `.stoneContent` into hash-indexed unique files
payloads
.par_iter()
.filter_map(PayloadKind::index)
.flat_map(|p| &p.body)
.map(|idx| {
// Split file reader over index range
let mut file = &content_file;
file.seek(SeekFrom::Start(idx.start))
.with_context(|| eyre!("seek to byte {}", idx.start))?;
let mut split_file = (&mut file).take(idx.end - idx.start);

let mut output = File::create(format!(".stoneStore/{:02x}", idx.digest))
.with_context(|| eyre!("create output file .stoneStore/{:02x}", idx.digest))?;

copy(&mut split_file, &mut output).with_context(|| eyre!("copy bytes {} to {}", idx.start, idx.end))?;

Ok(())
})
.collect::<Result<Vec<_>>>()
.context("unpack file from content payload")?;

remove_file(".stoneContent").context("remove temporary content extract file")?;
}

if let Some(content) = content {
let content_file = File::options()
.read(true)
.write(true)
.create(true)
.open(".stoneContent")?;

let progress = ProgressBar::new(content.header.plain_size).with_style(
ProgressStyle::with_template("|{bar:20.cyan/bue}| {percent}%")
.unwrap()
.progress_chars("■≡=- "),
);
reader.unpack_content(content, &mut progress.wrap_write(&content_file))?;

// Extract all indices from the `.stoneContent` into hash-indexed unique files
payloads
.par_iter()
.filter_map(PayloadKind::index)
.flat_map(|p| &p.body)
.map(|idx| {
// Split file reader over index range
let mut file = &content_file;
file.seek(SeekFrom::Start(idx.start))?;
let mut split_file = (&mut file).take(idx.end - idx.start);

let mut output = File::create(format!(".stoneStore/{:02x}", idx.digest))?;

copy(&mut split_file, &mut output)?;

Ok(())
})
.collect::<Result<Vec<_>, Error>>()?;

remove_file(".stoneContent")?;
}
if let Some(layouts) = layouts {
for layout in &layouts.body {
match &layout.entry {
layout::Entry::Regular(id, target) => {
let store_path = content_store.join(format!("{:02x}", id));
let target_disk = extraction_root.join("usr").join(target);

// drop it into a valid dir
// TODO: Fix the permissions & mask
let directory_target = target_disk.parent().unwrap();
create_dir_all(directory_target).context("create extract directory")?;

// link from CA store
hard_link(&store_path, &target_disk)
.with_context(|| eyre!("hardlink from {store_path:?} to {target_disk:?}"))?;
}
layout::Entry::Symlink(source, target) => {
let target_disk = extraction_root.join("usr").join(target);
let directory_target = target_disk.parent().unwrap();

// ensure dumping ground exists
create_dir_all(directory_target).context("create extract directory")?;

if let Some(layouts) = layouts {
for layout in &layouts.body {
match &layout.entry {
layout::Entry::Regular(id, target) => {
let store_path = content_store.join(format!("{:02x}", id));
let target_disk = extraction_root.join("usr").join(target);

// drop it into a valid dir
// TODO: Fix the permissions & mask
let directory_target = target_disk.parent().unwrap();
create_dir_all(directory_target)?;

// link from CA store
hard_link(store_path, target_disk)?;
}
layout::Entry::Symlink(source, target) => {
let target_disk = extraction_root.join("usr").join(target);
let directory_target = target_disk.parent().unwrap();

// ensure dumping ground exists
create_dir_all(directory_target)?;

// join the link path to the directory target for relative joinery
symlink(source, target_disk)?;
}
layout::Entry::Directory(target) => {
let target_disk = extraction_root.join("usr").join(target);
// TODO: Fix perms!
create_dir_all(target_disk)?;
}
_ => unreachable!(),
// join the link path to the directory target for relative joinery
symlink(source, &target_disk)
.with_context(|| eyre!("hardlink from {source:?} to {target_disk:?}"))?;
}
layout::Entry::Directory(target) => {
let target_disk = extraction_root.join("usr").join(target);
// TODO: Fix perms!
create_dir_all(target_disk).context("create extract directory")?;
}
_ => unreachable!(),
}
}
}

// Clean up.
remove_dir_all(content_store)?;

Ok(())
}

#[derive(Debug, Error)]
pub enum Error {
#[error("Missing metadata")]
MissingMeta,

#[error("malformed meta")]
MalformedMeta(#[from] MissingMetaFieldError),

#[error("io")]
IO(#[from] std::io::Error),

#[error("stone format")]
Format(#[from] stone::read::Error),
}
Loading
Loading