From 2a1d816a56c1c34306c787ca7d8e11aaade965f9 Mon Sep 17 00:00:00 2001 From: IsaacShelton Date: Mon, 9 Dec 2024 13:52:27 -0600 Subject: [PATCH] Refactored and cleaned up remaining workspace comilation code --- src/cli/build/invoke.rs | 38 +--- src/cli/build/mod.rs | 1 + src/cli/build/supported_targets.rs | 35 ++++ src/compiler/mod.rs | 2 +- src/workspace/export_and_link.rs | 65 ++++++ src/workspace/mod.rs | 325 ++++++++++++----------------- 6 files changed, 242 insertions(+), 224 deletions(-) create mode 100644 src/cli/build/supported_targets.rs create mode 100644 src/workspace/export_and_link.rs diff --git a/src/cli/build/invoke.rs b/src/cli/build/invoke.rs index 9faafba..b427fd2 100644 --- a/src/cli/build/invoke.rs +++ b/src/cli/build/invoke.rs @@ -1,12 +1,11 @@ -use super::BuildCommand; +use super::{supported_targets::warn_if_unsupported_target, BuildCommand}; use crate::{ c, cli::CliInvoke, compiler::Compiler, - diagnostics::{DiagnosticFlags, Diagnostics, WarningDiagnostic}, + diagnostics::{DiagnosticFlags, Diagnostics}, single_file_only::compile_single_file_only, source_files::SourceFiles, - target::{Target, TargetArch, TargetOs}, text::{IntoText, IntoTextStream}, unerror::unerror, workspace::compile_workspace, @@ -26,7 +25,7 @@ impl CliInvoke for BuildCommand { return Err(()); }; - ensure_supported_target(&target, &diagnostics); + warn_if_unsupported_target(&target, &diagnostics); let mut compiler = Compiler { options, @@ -47,37 +46,6 @@ impl CliInvoke for BuildCommand { } } -fn ensure_supported_target(target: &Target, diagnostics: &Diagnostics) { - if target.arch().is_none() { - diagnostics.push(WarningDiagnostic::plain( - "Target architecture is not supported, falling back to best guess", - )); - } - - if target.os().is_none() { - diagnostics.push(WarningDiagnostic::plain( - "Target os is not supported, falling back to best guess", - )); - } - - match target.os().zip(target.arch()) { - Some((TargetOs::Windows, TargetArch::X86_64)) => (), - Some((TargetOs::Windows, TargetArch::Aarch64)) => (), - Some((TargetOs::Mac, TargetArch::X86_64)) => (), - Some((TargetOs::Mac, TargetArch::Aarch64)) => (), - Some((TargetOs::Linux, TargetArch::X86_64)) => (), - Some((TargetOs::Linux, TargetArch::Aarch64)) => (), - Some((TargetOs::FreeBsd, TargetArch::X86_64)) => (), - None => (), - #[allow(unreachable_patterns)] - _ => { - diagnostics.push(WarningDiagnostic::plain( - "Host os/architecture configuration is not officially supported, taking best guess", - )); - } - } -} - fn compile_header(compiler: &Compiler, filepath: &Path) -> Result<(), ()> { let source_files = compiler.source_files; diff --git a/src/cli/build/mod.rs b/src/cli/build/mod.rs index bfe4d84..21a0660 100644 --- a/src/cli/build/mod.rs +++ b/src/cli/build/mod.rs @@ -1,6 +1,7 @@ mod invoke; mod options; mod parse; +mod supported_targets; pub use options::BuildOptions; diff --git a/src/cli/build/supported_targets.rs b/src/cli/build/supported_targets.rs new file mode 100644 index 0000000..d1281e5 --- /dev/null +++ b/src/cli/build/supported_targets.rs @@ -0,0 +1,35 @@ +use crate::{ + diagnostics::{Diagnostics, WarningDiagnostic}, + target::{Target, TargetArch, TargetOs}, +}; + +pub fn warn_if_unsupported_target(target: &Target, diagnostics: &Diagnostics) { + if target.arch().is_none() { + diagnostics.push(WarningDiagnostic::plain( + "Target architecture is not supported, falling back to best guess", + )); + } + + if target.os().is_none() { + diagnostics.push(WarningDiagnostic::plain( + "Target os is not supported, falling back to best guess", + )); + } + + match target.os().zip(target.arch()) { + Some((TargetOs::Windows, TargetArch::X86_64)) => (), + Some((TargetOs::Windows, TargetArch::Aarch64)) => (), + Some((TargetOs::Mac, TargetArch::X86_64)) => (), + Some((TargetOs::Mac, TargetArch::Aarch64)) => (), + Some((TargetOs::Linux, TargetArch::X86_64)) => (), + Some((TargetOs::Linux, TargetArch::Aarch64)) => (), + Some((TargetOs::FreeBsd, TargetArch::X86_64)) => (), + None => (), + #[allow(unreachable_patterns)] + _ => { + diagnostics.push(WarningDiagnostic::plain( + "Host os/architecture configuration is not officially supported, taking best guess", + )); + } + } +} diff --git a/src/compiler/mod.rs b/src/compiler/mod.rs index 9a75e7d..5f82082 100644 --- a/src/compiler/mod.rs +++ b/src/compiler/mod.rs @@ -25,7 +25,7 @@ impl<'a> Compiler<'a> { &self.options.target } - pub fn maybe_execute_result(&self, output_binary_filepath: &Path) -> Result<(), ()> { + pub fn execute_result(&self, output_binary_filepath: &Path) -> Result<(), ()> { if !self.options.excute_result { return Ok(()); } diff --git a/src/workspace/export_and_link.rs b/src/workspace/export_and_link.rs new file mode 100644 index 0000000..d687036 --- /dev/null +++ b/src/workspace/export_and_link.rs @@ -0,0 +1,65 @@ +use crate::{compiler::Compiler, ir, llvm_backend::llvm_backend, resolved, unerror::unerror}; +use std::{ + ffi::OsString, + fs::create_dir_all, + path::{Path, PathBuf}, + time::Duration, +}; + +#[derive(Clone, Debug)] +pub struct ExportDetails { + pub linking_duration: Duration, + pub executable_filepath: PathBuf, +} + +pub fn export_and_link( + compiler: &mut Compiler, + project_folder: &Path, + resolved_ast: &resolved::Ast, + ir_module: &ir::Module, +) -> Result { + let target = &compiler.options.target; + let project_name = project_name(project_folder); + + let binary_artifacts_folder = project_folder.join("bin"); + let object_files_folder = project_folder.join("obj"); + create_dir_all(&binary_artifacts_folder).expect("failed to create bin folder"); + create_dir_all(&object_files_folder).expect("failed to create obj folder"); + + let object_file_filepath = + object_files_folder.join(target.default_object_file_name(&project_name)); + + let executable_filepath = + binary_artifacts_folder.join(target.default_executable_name(&project_name)); + + let linking_duration = unerror( + unsafe { + llvm_backend( + compiler, + &ir_module, + &resolved_ast, + &object_file_filepath, + &executable_filepath, + &compiler.diagnostics, + ) + }, + compiler.source_files, + )?; + + Ok(ExportDetails { + linking_duration, + executable_filepath, + }) +} + +fn project_name(project_folder: &Path) -> OsString { + project_folder + .file_name() + .map(OsString::from) + .or_else(|| { + std::env::current_dir() + .ok() + .and_then(|dir| dir.file_name().map(OsString::from)) + }) + .unwrap_or_else(|| OsString::from("main")) +} diff --git a/src/workspace/mod.rs b/src/workspace/mod.rs index adfe36b..bcdaf3f 100644 --- a/src/workspace/mod.rs +++ b/src/workspace/mod.rs @@ -7,6 +7,7 @@ pub mod compile; mod explore; mod explore_within; +mod export_and_link; mod file; pub mod fs; mod module_file; @@ -21,13 +22,13 @@ use crate::{ inflow::Inflow, interpreter_env::{run_build_system_interpreter, setup_build_system_interpreter_symbols}, line_column::Location, - llvm_backend::llvm_backend, lower::lower, resolve::resolve, show::Show, source_files::{Source, SourceFileKey}, token::Token, unerror::unerror, + workspace::export_and_link::export_and_link, }; use compile::{ compile_code_file, @@ -36,7 +37,7 @@ use compile::{ use explore::{explore, ExploreResult}; use explore_within::{explore_within, ExploreWithinResult}; use file::CodeFile; -use fs::{Fs, FsNodeId}; +use fs::Fs; use indexmap::IndexMap; use module_file::ModuleFile; use path_absolutize::Absolutize; @@ -44,8 +45,6 @@ use queue::WorkspaceQueue; use stats::CompilationStats; use std::{ collections::HashMap, - ffi::OsString, - fs::create_dir_all, path::{Path, PathBuf}, sync::Barrier, }; @@ -53,83 +52,6 @@ use thousands::Separable; const NUM_THREADS: usize = 8; -fn queue_dependencies>( - compiler: &Compiler, - fs: &Fs, - mut settings: Settings, - source_file: SourceFileKey, - stats: &CompilationStats, - queue: &WorkspaceQueue, -) -> Settings { - for folder in settings.namespace_to_dependency.values().flatten() { - let infrastructure = compiler - .options - .infrastructure - .as_ref() - .expect("must have infrastructure specified in order to import") - .absolutize() - .expect("failed to get absolute path for compiler infrastructure"); - - let absolute_folder = infrastructure.join("import").join(&**folder); - let already_discovered = fs.find(&absolute_folder).is_some(); - - if !already_discovered { - let Some(ExploreResult { - module_files: new_module_files, - normal_files: new_normal_files, - }) = explore(&fs, &absolute_folder) - else { - ErrorDiagnostic::new( - format!("Dependency '{}' could not be found", &**folder), - Source::new(source_file, Location::new(0, 1)), - ) - .eprintln(compiler.source_files); - stats.fail_module_file(); - return settings; - }; - - queue.push_module_files(new_module_files.into_iter()); - queue.push_code_files(new_normal_files.into_iter().map(CodeFile::Normal)); - } - - let module_fs_node_id = fs.find(&absolute_folder).expect("module loaded"); - settings - .dependency_to_module - .insert(folder.to_string(), module_fs_node_id); - } - - settings -} - -fn process_module_file<'a, 'b: 'a, I: Inflow>( - compiler: &Compiler, - fs: &Fs, - module_file: ModuleFile, - compiled_module: CompiledModule<'a, I>, - stats: &CompilationStats, - queue: &WorkspaceQueue<'a, I>, -) { - let folder_fs_node_id = fs - .get(module_file.fs_node_id) - .parent - .expect("module file has parent"); - - let CompiledModule { - settings, - source_file, - total_file_size, - remaining_input, - } = compiled_module; - - let settings = queue_dependencies(compiler, fs, settings, source_file, stats, queue); - - queue.push_module_folder(folder_fs_node_id, settings); - queue.push_code_file(CodeFile::Module(module_file, remaining_input)); - - stats.process_file(); - stats.process_bytes(total_file_size); -} - pub fn compile_workspace( compiler: &mut Compiler, project_folder: &Path, @@ -138,6 +60,7 @@ pub fn compile_workspace( let stats = CompilationStats::start(); let fs = Fs::new(); + let source_files = compiler.source_files; let ExploreWithinResult { explored, entry } = explore_within(&fs, project_folder, single_file); let Some(ExploreResult { @@ -161,32 +84,29 @@ pub fn compile_workspace( scope.spawn(|| { // ===== Process module files ===== queue.for_module_files(|module_file| { - let compiled_module = match compile_module_file(compiler, &module_file.path) { - Ok(values) => values, - Err(err) => { - err.eprintln(compiler.source_files); + match compile_module_file(compiler, &module_file.path) { + Ok(compiled_module) => { + process_module_file( + compiler, + &fs, + module_file, + compiled_module, + &stats, + &queue, + ); + } + Err(failed_module_message) => { + failed_module_message.eprintln(source_files); stats.fail_module_file(); - return; } - }; - - process_module_file( - compiler, - &fs, - module_file, - compiled_module, - &stats, - &queue, - ); + } }); - // NOTE: This synchronizes the threads, and marks the end of module-related modifications/processing. - // `num_module_files_failed` can now be consistently read from... + // NOTE: This synchronizes the threads, and marks the end of module-related modifications/processing all_modules_done.wait(); // ==== Don't continue if module files had errors ===== - // SAFETY: This is okay, as all the modifications happened before we synchronized - // the modifying threads. + // SAFETY: This is okay, as all the modifications happen before synchronizing the modifying threads if stats.failed_modules_estimate() != 0 { return; } @@ -198,8 +118,8 @@ pub fn compile_workspace( stats.process_file(); stats.process_bytes(did_bytes); } - Err(err) => { - err.eprintln(compiler.source_files); + Err(error_message) => { + error_message.eprintln(source_files); stats.fail_file(); } }; @@ -208,70 +128,25 @@ pub fn compile_workspace( } }); - let in_how_many_seconds = stats.seconds_elapsed(); - - // SAFETY: This is okay since all modifying threads were joined (and thereby synchronized) - let num_module_files_failed = stats.failed_modules_estimate(); - if num_module_files_failed != 0 { - eprintln!( - "error: {num_module_files_failed} module file(s) were determined to have errors in {in_how_many_seconds:.2} seconds", - ); - return Err(()); - } - - // SAFETY: This is okay since all modifying threads were joined (and thereby synchronized) - let num_files_failed = stats.failed_files_estimate(); - if num_files_failed != 0 { - eprintln!( - "error: {num_files_failed} file(s) were determined to have errors in {in_how_many_seconds:.2} seconds", - ); - return Err(()); - } - - let Some(_adept_version) = compiler.version.get() else { - eprintln!("error: No Adept version was specified! Use `pragma => adept(\"3.0\")` at the top of the module file"); - return Err(()); - }; + print_module_errors(compiler, &stats)?; - let module_folders = HashMap::::from_iter(queue.module_folders.into_iter()); + let module_folders = HashMap::from_iter(queue.module_folders.into_iter()); let mut files = IndexMap::from_iter(queue.ast_files.into_iter()); if compiler.options.interpret { - if let Some(guaranteed_entry) = entry { - setup_build_system_interpreter_symbols(files.get_mut(&guaranteed_entry).unwrap()); - } else { + let Some(guaranteed_entry) = entry else { eprintln!( - "error: experimental manual interpreter does not properly handle multiple files yet" + "error: experimental manually-invoked interpreter does not properly handle multiple files yet" ); return Err(()); - } + }; + + setup_build_system_interpreter_symbols(files.get_mut(&guaranteed_entry).unwrap()); } let workspace = AstWorkspace::new(fs, files, compiler.source_files, Some(module_folders)); - - let resolved_ast = unerror( - resolve(&workspace, &compiler.options), - compiler.source_files, - )?; - - let ir_module = unerror( - lower(&compiler.options, &resolved_ast), - compiler.source_files, - )?; - - let project_name = project_folder - .file_name() - .map(OsString::from) - .unwrap_or_else(|| { - std::env::current_dir() - .ok() - .map(|dir| { - dir.file_name() - .map(OsString::from) - .unwrap_or_else(|| OsString::from("main")) - }) - .unwrap_or_else(|| OsString::from("main")) - }); + let resolved_ast = unerror(resolve(&workspace, &compiler.options), source_files)?; + let ir_module = unerror(lower(&compiler.options, &resolved_ast), source_files)?; if compiler.options.interpret { return run_build_system_interpreter(&resolved_ast, &ir_module) @@ -279,43 +154,90 @@ pub fn compile_workspace( .map_err(|err| eprintln!("{}", err)); } - let bin_folder = project_folder.join("bin"); - let obj_folder = project_folder.join("obj"); + let export_details = export_and_link(compiler, project_folder, &resolved_ast, &ir_module)?; + print_summary(&stats); + compiler.execute_result(&export_details.executable_filepath) +} - create_dir_all(&bin_folder).expect("failed to create bin folder"); - create_dir_all(&obj_folder).expect("failed to create obj folder"); +fn process_module_file<'a, 'b: 'a, I: Inflow>( + compiler: &Compiler, + fs: &Fs, + module_file: ModuleFile, + compiled_module: CompiledModule<'a, I>, + stats: &CompilationStats, + queue: &WorkspaceQueue<'a, I>, +) { + let folder_fs_node_id = fs + .get(module_file.fs_node_id) + .parent + .expect("module file has parent"); - let exe_filepath = bin_folder.join( - compiler - .options - .target - .default_executable_name(&project_name), - ); - let obj_filepath = obj_folder.join( - compiler + let CompiledModule { + settings, + source_file, + total_file_size, + remaining_input, + } = compiled_module; + + let settings = queue_dependencies(compiler, fs, settings, source_file, stats, queue); + + queue.push_module_folder(folder_fs_node_id, settings); + queue.push_code_file(CodeFile::Module(module_file, remaining_input)); + + stats.process_file(); + stats.process_bytes(total_file_size); +} + +fn queue_dependencies>( + compiler: &Compiler, + fs: &Fs, + mut settings: Settings, + source_file: SourceFileKey, + stats: &CompilationStats, + queue: &WorkspaceQueue, +) -> Settings { + for folder in settings.namespace_to_dependency.values().flatten() { + let infrastructure = compiler .options - .target - .default_object_file_name(&project_name), - ); + .infrastructure + .as_ref() + .expect("must have infrastructure specified in order to import") + .absolutize() + .expect("failed to get absolute path for compiler infrastructure"); - let linking_duration = unerror( - unsafe { - llvm_backend( - compiler, - &ir_module, - &resolved_ast, - &obj_filepath, - &exe_filepath, - &compiler.diagnostics, - ) - }, - compiler.source_files, - )?; - - // Print summary: + let absolute_folder = infrastructure.join("import").join(&**folder); + let already_discovered = fs.find(&absolute_folder).is_some(); + if !already_discovered { + let Some(ExploreResult { + module_files: new_module_files, + normal_files: new_normal_files, + }) = explore(&fs, &absolute_folder) + else { + ErrorDiagnostic::new( + format!("Dependency '{}' could not be found", &**folder), + Source::new(source_file, Location::new(0, 1)), + ) + .eprintln(compiler.source_files); + stats.fail_module_file(); + return settings; + }; + + queue.push_module_files(new_module_files.into_iter()); + queue.push_code_files(new_normal_files.into_iter().map(CodeFile::Normal)); + } + + let module_fs_node_id = fs.find(&absolute_folder).expect("module loaded"); + settings + .dependency_to_module + .insert(folder.to_string(), module_fs_node_id); + } + + settings +} + +fn print_summary(stats: &CompilationStats) { let in_how_many_seconds = stats.seconds_elapsed(); - let _linking_took = linking_duration.as_millis() as f64 / 1000.0; // SAFETY: These are okay, as we synchronized by joining let files_processed = stats.files_processed_estimate().separate_with_commas(); @@ -326,6 +248,33 @@ pub fn compile_workspace( "Compiled {} from {} files in {:.2} seconds", bytes_processed, files_processed, in_how_many_seconds, ); +} + +fn print_module_errors(compiler: &Compiler, stats: &CompilationStats) -> Result<(), ()> { + let in_how_many_seconds = stats.seconds_elapsed(); + + // SAFETY: This is okay since all modifying threads were joined (and thereby synchronized) + let num_module_files_failed = stats.failed_modules_estimate(); + if num_module_files_failed != 0 { + eprintln!( + "error: {num_module_files_failed} module file(s) were determined to have errors in {in_how_many_seconds:.2} seconds", + ); + return Err(()); + } + + // SAFETY: This is okay since all modifying threads were joined (and thereby synchronized) + let num_files_failed = stats.failed_files_estimate(); + if num_files_failed != 0 { + eprintln!( + "error: {num_files_failed} file(s) were determined to have errors in {in_how_many_seconds:.2} seconds", + ); + return Err(()); + } + + let Some(_adept_version) = compiler.version.get() else { + eprintln!("error: No Adept version was specified! Use `pragma => adept(\"3.0\")` at the top of the module file"); + return Err(()); + }; - compiler.maybe_execute_result(&exe_filepath) + Ok(()) }