Skip to content

Commit

Permalink
fixes #7
Browse files Browse the repository at this point in the history
  • Loading branch information
sam0x17 committed Sep 14, 2023
1 parent 79b45e4 commit ce074bf
Show file tree
Hide file tree
Showing 3 changed files with 47 additions and 24 deletions.
2 changes: 1 addition & 1 deletion examples/integration.rs
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ pub struct TestWithWeirdOrderingAndKeywords;
pub struct OliverExample2;

/// This example is from Liam
#[doc = docify::embed!("examples/samples.rs", CurrentAndPreviousValue)]
#[doc = docify::embed!("examples/samples.rs", LiamIssue7)]
pub struct LiamExample;

/// This will compile all markdown files in the `markdown_source` directory to `markdown_bin`
Expand Down
2 changes: 1 addition & 1 deletion examples/samples.rs
Original file line number Diff line number Diff line change
Expand Up @@ -128,9 +128,9 @@ mod some_module {
}
}

#[docify::export]
/// Example struct holding the most recently set [`u32`] and the second
/// most recently set [`u32`] (if one existed).
#[docify::export]
struct LiamIssue7;

fn main() {}
67 changes: 45 additions & 22 deletions macros/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -85,16 +85,24 @@ fn caller_crate_root() -> Option<PathBuf> {
if !entry.file_type().is_file() {
continue;
}
let Some(file_name) = entry.path().file_name() else { continue };
let Some(file_name) = entry.path().file_name() else {
continue;
};
if !file_name.eq_ignore_ascii_case("Cargo.toml") {
continue;
}
let Ok(cargo_toml) = std::fs::read_to_string(&entry.path()) else {
continue
continue;
};
let Ok(table) = Table::from_str(cargo_toml.as_str()) else {
continue;
};
let Some(package) = table.get("package") else {
continue;
};
let Some(Value::String(package_name)) = package.get("name") else {
continue;
};
let Ok(table) = Table::from_str(cargo_toml.as_str()) else { continue };
let Some(package) = table.get("package") else { continue };
let Some(Value::String(package_name)) = package.get("name") else { continue };
if package_name.eq_ignore_ascii_case(&crate_name) {
return Some(entry.path().parent().unwrap().to_path_buf());
}
Expand Down Expand Up @@ -394,7 +402,9 @@ struct EmbedArgs {
impl ToTokens for EmbedArgs {
fn to_tokens(&self, tokens: &mut TokenStream2) {
tokens.extend(self.file_path.to_token_stream());
let Some(item_ident) = &self.item_ident else { return };
let Some(item_ident) = &self.item_ident else {
return;
};
tokens.extend(quote!(,));
tokens.extend(item_ident.to_token_stream());
}
Expand Down Expand Up @@ -458,12 +468,18 @@ impl<'ast> Visit<'ast> for ItemVisitor {
let attrs = item_attributes(node);
for attr in attrs {
i += 1; // note, 1-based
let AttrStyle::Outer = attr.style else { continue };
let Some(last_seg) = attr.path().segments.last() else { continue };
let AttrStyle::Outer = attr.style else {
continue;
};
let Some(last_seg) = attr.path().segments.last() else {
continue;
};
if last_seg.ident != "export" {
continue;
}
let Some(second_to_last_seg) = attr.path().segments.iter().rev().nth(1) else { continue };
let Some(second_to_last_seg) = attr.path().segments.iter().rev().nth(1) else {
continue;
};
if second_to_last_seg.ident != last_seg.ident && second_to_last_seg.ident != "docify" {
continue;
}
Expand Down Expand Up @@ -574,7 +590,8 @@ impl CompressedString {
static DOCIFY_ATTRIBUTES: Lazy<Regex> =
Lazy::new(|| Regex::new(r"\#\[(?:\w+::)*export(?:\s*\(\s*(\w+)\s*\))?\]").unwrap());
static DOC_COMMENT: Lazy<Regex> = Lazy::new(|| Regex::new(r"///.*").unwrap());
static DOC_COMMENT_ATTR: Lazy<Regex> = Lazy::new(|| Regex::new(r#"#\[doc = ".*"]"#).unwrap());
static DOC_COMMENT_ATTR: Lazy<Regex> =
Lazy::new(|| Regex::new(r#"#\[doc\s*=\s*".*"\s*]"#).unwrap());
static LINE_COMMENT: Lazy<Regex> = Lazy::new(|| Regex::new(r"//.*").unwrap());
static MULTI_LINE_COMMENT: Lazy<Regex> = Lazy::new(|| Regex::new(r"/\*[\s\S]*?\*/").unwrap());
static HTML_COMMENT: Lazy<Regex> = Lazy::new(|| Regex::new(r"<!--[\s\S]*?-->").unwrap());
Expand Down Expand Up @@ -652,8 +669,8 @@ fn source_excerpt<'a>(source: &'a String, item: &'a Item) -> Result<String> {
"You have found a bug in docify! Please submit a new GitHub issue at \
https://github.com/sam0x17/docify/issues/new?title=%60source_excerpt\
%60%3A%20can%27t%20find%20item%20in%20source with a sample of the item \
you are trying to embed."
))
you are trying to embed.",
));
};
let start_c = compressed_source.chars[&found_start];
let start_pos = start_c.original_pos;
Expand All @@ -678,7 +695,9 @@ fn source_excerpt<'a>(source: &'a String, item: &'a Item) -> Result<String> {
fn embed_internal_str(tokens: impl Into<TokenStream2>, lang: MarkdownLanguage) -> Result<String> {
let args = parse2::<EmbedArgs>(tokens.into())?;
// return blank result if we can't properly resolve `caller_crate_root`
let Some(root) = caller_crate_root() else { return Ok(String::from("")) };
let Some(root) = caller_crate_root() else {
return Ok(String::from(""));
};
let file_path = root.join(args.file_path.value());
let source_code = match fs::read_to_string(&file_path) {
Ok(src) => src,
Expand Down Expand Up @@ -748,7 +767,9 @@ fn compile_markdown_internal(tokens: impl Into<TokenStream2>) -> Result<TokenStr
}
let input_path = std::path::PathBuf::from(&args.input.value());
// return blank result if we can't properly resolve `caller_crate_root`
let Some(root) = caller_crate_root() else { return Ok(quote!()) };
let Some(root) = caller_crate_root() else {
return Ok(quote!());
};
let input_path = root.join(input_path);
if !input_path.exists() {
return Err(Error::new(
Expand Down Expand Up @@ -782,14 +803,14 @@ fn compile_markdown_internal(tokens: impl Into<TokenStream2>) -> Result<TokenStr
let Ok(source) = fs::read_to_string(&input_path) else {
return Err(Error::new(
Span::call_site(),
format!("Failed to read markdown file at '{}'", input_path.display())
format!("Failed to read markdown file at '{}'", input_path.display()),
));
};
let compiled = compile_markdown_source(source.as_str())?;
let Ok(_) = overwrite_file(&output, &compiled) else {
return Err(Error::new(
Span::call_site(),
format!("Failed to write to '{}'", output.display())
format!("Failed to write to '{}'", output.display()),
));
};
}
Expand All @@ -804,7 +825,7 @@ fn compile_markdown_internal(tokens: impl Into<TokenStream2>) -> Result<TokenStr
let Ok(source) = fs::read_to_string(&input_path) else {
return Err(Error::new(
Span::call_site(),
format!("Failed to read markdown file at '{}'", input_path.display())
format!("Failed to read markdown file at '{}'", input_path.display()),
));
};
let compiled = compile_markdown_source(source.as_str())?;
Expand Down Expand Up @@ -861,7 +882,9 @@ fn compile_markdown_dir<P1: AsRef<Path>, P2: AsRef<Path>>(
if !e.file_type().is_file() && !e.file_type().is_symlink() {
return false;
}
let Some(ext) = e.path().extension() else { return false };
let Some(ext) = e.path().extension() else {
return false;
};
if ext.eq_ignore_ascii_case("md") {
return true;
}
Expand All @@ -883,29 +906,29 @@ fn compile_markdown_dir<P1: AsRef<Path>, P2: AsRef<Path>>(
let Ok(_) = fs::create_dir_all(parent) else {
return Err(Error::new(
Span::call_site(),
format!("Failed to create output directory '{}'", parent.display())
format!("Failed to create output directory '{}'", parent.display()),
));
};
}
let Ok(source) = fs::read_to_string(src_path) else {
return Err(Error::new(
Span::call_site(),
format!("Failed to read markdown file at '{}'", src_path.display())
format!("Failed to read markdown file at '{}'", src_path.display()),
));
};
let compiled = compile_markdown_source(source.as_str())?;
if let Some(parent) = dest_path.parent() {
let Ok(_) = fs::create_dir_all(parent) else {
return Err(Error::new(
Span::call_site(),
format!("Failed to create directory '{}'", parent.display())
format!("Failed to create directory '{}'", parent.display()),
));
};
}
let Ok(_) = overwrite_file(&dest_path, &compiled) else {
return Err(Error::new(
Span::call_site(),
format!("Failed to write to '{}'", dest_path.display())
format!("Failed to write to '{}'", dest_path.display()),
));
};
}
Expand Down

0 comments on commit ce074bf

Please sign in to comment.