Skip to content

Commit

Permalink
🎨 - Make sourcedir creation optional
Browse files Browse the repository at this point in the history
  • Loading branch information
Roland Peelen committed May 6, 2024
1 parent 91f41a7 commit bad85d4
Show file tree
Hide file tree
Showing 7 changed files with 59 additions and 15 deletions.
2 changes: 1 addition & 1 deletion Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "rewatch"
version = "1.0.5"
version = "1.0.6"
edition = "2021"

[dependencies]
Expand Down
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@rolandpeelen/rewatch",
"version": "1.0.5",
"version": "1.0.6",
"license": "BSD-3-Clause",
"bin": {
"rewatch": "rewatch"
Expand Down
14 changes: 11 additions & 3 deletions src/build.rs
Original file line number Diff line number Diff line change
Expand Up @@ -252,6 +252,7 @@ pub fn incremental_build(
default_timing: Option<Duration>,
initial_build: bool,
only_incremental: bool,
create_sourcedirs: bool,
) -> Result<(), IncrementalBuildError> {
logs::initialize(&build_state.packages);
let num_dirty_modules = build_state.modules.values().filter(|m| is_dirty(m)).count() as u64;
Expand Down Expand Up @@ -345,7 +346,9 @@ pub fn incremental_build(
let compile_duration = start_compiling.elapsed();

logs::finalize(&build_state.packages);
sourcedirs::print(&build_state);
if create_sourcedirs {
sourcedirs::print(&build_state);
}
pb.finish();
if !compile_errors.is_empty() {
if helpers::contains_ascii_characters(&compile_warnings) {
Expand Down Expand Up @@ -404,7 +407,12 @@ impl fmt::Display for BuildError {
}
}

pub fn build(filter: &Option<regex::Regex>, path: &str, no_timing: bool) -> Result<BuildState, BuildError> {
pub fn build(
filter: &Option<regex::Regex>,
path: &str,
no_timing: bool,
create_sourcedirs: bool,
) -> Result<BuildState, BuildError> {
let default_timing: Option<std::time::Duration> = if no_timing {
Some(std::time::Duration::new(0.0 as u64, 0.0 as u32))
} else {
Expand All @@ -414,7 +422,7 @@ pub fn build(filter: &Option<regex::Regex>, path: &str, no_timing: bool) -> Resu
let mut build_state =
initialize_build(default_timing, filter, path).map_err(BuildError::InitializeBuild)?;

match incremental_build(&mut build_state, default_timing, true, false) {
match incremental_build(&mut build_state, default_timing, true, false, create_sourcedirs) {
Ok(_) => {
let timing_total_elapsed = timing_total.elapsed();
println!(
Expand Down
19 changes: 17 additions & 2 deletions src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,11 @@ struct Args {
#[arg(short, long)]
no_timing: Option<bool>,

/// This creates a source_dirs.json file at the root of the monorepo, which is needed when you
/// want to use Reanalyze
#[arg(short, long)]
create_sourcedirs: Option<bool>,

#[arg(long)]
compiler_args: Option<String>,

Expand Down Expand Up @@ -73,7 +78,12 @@ fn main() {
lock::Lock::Aquired(_) => match command {
Command::Clean => build::clean::clean(&folder),
Command::Build => {
match build::build(&filter, &folder, args.no_timing.unwrap_or(false)) {
match build::build(
&filter,
&folder,
args.no_timing.unwrap_or(false),
args.create_sourcedirs.unwrap_or(false),
) {
Err(e) => {
eprintln!("Error Building: {e}");
std::process::exit(1)
Expand All @@ -87,7 +97,12 @@ fn main() {
};
}
Command::Watch => {
watcher::start(&filter, &folder, args.after_build);
watcher::start(
&filter,
&folder,
args.after_build,
args.create_sourcedirs.unwrap_or(false),
);
}
},
}
Expand Down
12 changes: 9 additions & 3 deletions src/sourcedirs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ pub fn print(buildstate: &BuildState) {

// Write sourcedirs.json
write_sourcedirs_files(
package.get_build_path(),
package.get_bs_build_path(),
&SourceDirs {
dirs: &dirs.clone().into_iter().collect::<Vec<Dir>>(),
pkgs: &pkgs.clone().flatten().collect::<Vec<Pkg>>(),
Expand All @@ -101,12 +101,18 @@ pub fn print(buildstate: &BuildState) {
})
.unzip();

let mut merged_dirs: AHashSet<Dir> = AHashSet::new();
let mut merged_pkgs: AHashMap<PackageName, AbsolutePath> = AHashMap::new();

dirs.into_iter().for_each(|dir_set| merged_dirs.extend(dir_set));
pkgs.into_iter().for_each(|pkg_set| merged_pkgs.extend(pkg_set));

// Write sourcedirs.json
write_sourcedirs_files(
root_package.get_bs_build_path(),
&SourceDirs {
dirs: &dirs.into_iter().flatten().collect::<Vec<Dir>>(),
pkgs: &pkgs.into_iter().flatten().collect::<Vec<Pkg>>(),
dirs: &merged_dirs.into_iter().collect::<Vec<Dir>>(),
pkgs: &merged_pkgs.into_iter().collect::<Vec<Pkg>>(),
generated: &vec![],
},
)
Expand Down
23 changes: 19 additions & 4 deletions src/watcher.rs
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@ async fn async_watch(
path: &str,
filter: &Option<regex::Regex>,
after_build: Option<String>,
create_sourcedirs: bool,
) -> notify::Result<()> {
let mut build_state = build::initialize_build(None, filter, path).expect("Can't initialize build");
let mut needs_compile_type = CompileType::Incremental;
Expand Down Expand Up @@ -178,7 +179,15 @@ async fn async_watch(
match needs_compile_type {
CompileType::Incremental => {
let timing_total = Instant::now();
if build::incremental_build(&mut build_state, None, initial_build, !initial_build).is_ok() {
if build::incremental_build(
&mut build_state,
None,
initial_build,
!initial_build,
create_sourcedirs,
)
.is_ok()
{
if let Some(a) = after_build.clone() {
cmd::run(a)
}
Expand All @@ -197,7 +206,8 @@ async fn async_watch(
CompileType::Full => {
let timing_total = Instant::now();
build_state = build::initialize_build(None, filter, path).expect("Can't initialize build");
let _ = build::incremental_build(&mut build_state, None, initial_build, false);
let _ =
build::incremental_build(&mut build_state, None, initial_build, false, create_sourcedirs);
if let Some(a) = after_build.clone() {
cmd::run(a)
}
Expand All @@ -220,7 +230,12 @@ async fn async_watch(
}
}

pub fn start(filter: &Option<regex::Regex>, folder: &str, after_build: Option<String>) {
pub fn start(
filter: &Option<regex::Regex>,
folder: &str,
after_build: Option<String>,
create_sourcedirs: bool,
) {
futures::executor::block_on(async {
let queue = Arc::new(FifoQueue::<Result<Event, Error>>::new());
let producer = queue.clone();
Expand All @@ -232,7 +247,7 @@ pub fn start(filter: &Option<regex::Regex>, folder: &str, after_build: Option<St
.watch(folder.as_ref(), RecursiveMode::Recursive)
.expect("Could not start watcher");

if let Err(e) = async_watch(consumer, folder, filter, after_build).await {
if let Err(e) = async_watch(consumer, folder, filter, after_build, create_sourcedirs).await {
println!("error: {:?}", e)
}
})
Expand Down

0 comments on commit bad85d4

Please sign in to comment.