diff --git a/CMakeLists.txt b/CMakeLists.txt index 747826ac..590a8478 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -70,7 +70,8 @@ find_package(netCDF CONFIG REQUIRED) find_package(yaml-cpp CONFIG REQUIRED) set(EIGEN3_INCLUDES "${PROJECT_SOURCE_DIR}/src/support/eigen-git-mirror" CACHE PATH "Eigen include folder") - +# todo +# option(BUILD_WITH_OPENCL "Build with OPENCL support" OFF) option(BUILD_WITH_PYBIND11 "Build python binding with PyBind11" OFF) option(BUILD_WITH_ZMQ "Build with ZeroMQ" OFF) option(BUILD_FOR_PHI "Build for Intel Phi" OFF) @@ -468,6 +469,11 @@ IF (BUILD_WITH_TIRPC) target_link_libraries (xrf_maps LINK_PUBLIC libtirpc.so) ENDIF() +IF (BUILD_WITH_OPENCL) + find_package(CLBlast CONFIG REQUIRED) + target_link_libraries(xrf_maps PRIVATE clblast) +ENDIF() + #install(TARGETS xrf_maps libxrf_io libxrf_fit # EXPORT libxrf-export # LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR} diff --git a/src/core/main.cpp b/src/core/main.cpp index e3de6839..c4b4df96 100644 --- a/src/core/main.cpp +++ b/src/core/main.cpp @@ -50,6 +50,9 @@ POSSIBILITY OF SUCH DAMAGE. #include "core/process_whole.h" #include + +#define MAX_DETECTORS 7 + // ---------------------------------------------------------------------------- void help() @@ -59,6 +62,7 @@ void help() logit_s<<"Options: \n"; logit_s<<"--nthreads : number of threads to use (default is all system threads) \n"; logit_s<<"--quantify-with : File to use as quantification standard \n"; + logit_s<<"--quantify-fit : If you want to perform quantification without having to re-fit all datasets. See --fit for routine options \n"; logit_s<<"--detectors : Detectors to process, Defaults to 0,1,2,3 for 4 detector \n"; logit_s<<"--generate-avg-h5 : Generate .h5 file which is the average of all detectors .h50 - h.53 or range specified. \n"; logit_s<<"--add-v9layout : Generate .h5 file which has v9 layout able to open in IDL MAPS software. \n"; @@ -74,7 +78,8 @@ void help() " 1 = matrix batch fit\n 2 = batch fit without tails\n 3 = batch fit with tails\n 4 = batch fit with free E, everything else fixed \n"; logit_s<<"--optimize-fit-routine : General (default): passes elements amplitudes as fit parameters. Hybrid only passes fit parameters and fits element amplitudes using NNLS\n"; logit_s<<"--optimizer : Choose which optimizer to use for --optimize-fit-override-params or matrix fit routine \n"; - logit_s<<"--optimize-rois : Looks in 'rois' directory and performs --optimize-fit-override-params on each roi separately. \n"; + logit_s<<"--optimizer-fxg-tols : Default is LM_FIT = " << DP_LM_USERTOL << " , MP_FIT = " << 1.192e-10 << "\n"; + logit_s<<"--optimize-rois : Looks in 'rois' directory and performs --optimize-fit-override-params on each roi separately. Needs to have --quantify-rois and --quantify-fit \n"; logit_s<<"Fitting Routines: \n"; logit_s<< "--fit comma seperated \n"; logit_s<<" roi : element energy region of interest \n"; @@ -97,6 +102,8 @@ void help() logit_s<<"xrf_maps --fit roi,matrix --dir /data/dataset1 --files scan1.mda,scan2.mda \n"; logit_s<<" Perform roi, matrix, and nnls analysis on the directory /data/dataset1, use maps_standard.txt information for quantification \n"; logit_s<<"xrf_maps --fit roi,matrix,nnls --quantify-with maps_standard.txt --dir /data/dataset1 \n"; + logit_s<<" Perform optimization of an integrated roi spectra \n"; + logit_s<<"xrf_maps --optimize-rois --quantify-rois maps_standardinfo.txt --quantify-fit roi,matrix,nnls --dir /data/dataset1 \n"; } // ---------------------------------------------------------------------------- @@ -115,9 +122,32 @@ void set_optimizer(Command_Line_Parser& clp, data_struct::Analysis_Job& std::string opt = clp.get_option("--optimize-fit-routine"); if (opt == "hybrid") { + logI << "Using Hybrid optimizer\n"; analysis_job.optimize_fit_routine = OPTIMIZE_FIT_ROUTINE::HYBRID; } } + + if (clp.option_exists("--optimizer-fxg-tols")) + { + T_real fxg_tol; + if (std::is_same::value) + { + fxg_tol = std::stof(clp.get_option("--optimizer-fxg-tols")); + } + else if (std::is_same::value) + { + fxg_tol = std::stod(clp.get_option("--optimizer-fxg-tols")); + } + + std::unordered_map opt_map; + opt_map[STR_OPT_FTOL] = fxg_tol; + opt_map[STR_OPT_XTOL] = fxg_tol; + opt_map[STR_OPT_GTOL] = fxg_tol; + + logI << "Setting FTOL, XTOL, GTOL to " << fxg_tol << "\n"; + + analysis_job.optimizer()->set_options(opt_map); + } } // ---------------------------------------------------------------------------- @@ -209,7 +239,7 @@ void set_detectors(Command_Line_Parser& clp, data_struct::Analysis_Job& } else { - for (size_t det = 0; det < 7; det++) + for (size_t det = 0; det < MAX_DETECTORS; det++) { analysis_job.detector_num_arr.push_back(det); } @@ -316,27 +346,28 @@ int set_dir_and_files(Command_Line_Parser& clp, data_struct::Analysis_Jobfind_all_dataset_files(dataset_dir, ".hdf5")) - { - analysis_job.dataset_files.push_back(itr); - } - for (auto& itr : io::file::File_Scan::inst()->find_all_dataset_files(dataset_dir, ".h5")) - { - analysis_job.dataset_files.push_back(itr); - } - for (auto& itr : io::file::File_Scan::inst()->find_all_dataset_files(dataset_dir, ".emd")) - { - analysis_job.dataset_files.push_back(itr); - } - for (auto& itr : io::file::File_Scan::inst()->find_all_dataset_files(dataset_dir, ".mda")) - { - analysis_job.dataset_files.push_back(itr); - } - for (auto& itr : io::file::File_Scan::inst()->find_all_dataset_files(dataset_dir + "mda" + DIR_END_CHAR, ".mda")) + std::vector search_ext_list; + std::vector search_ext_mda_list; + + search_ext_list.push_back(".hdf5"); + search_ext_list.push_back(".h5"); + search_ext_list.push_back(".emd"); + search_ext_list.push_back(".mda"); + + io::file::File_Scan::inst()->find_all_dataset_files_by_list(dataset_dir, search_ext_list, analysis_job.dataset_files); + + + search_ext_mda_list.push_back(".mda"); + search_ext_mda_list.push_back(".mca"); + for (int i = 0; i < MAX_DETECTORS; i++) { - analysis_job.dataset_files.push_back(itr); + std::string mca_str = ".mca" + std::to_string(i); + search_ext_mda_list.push_back(mca_str); } - // don't want to open h5 avg files for optimize + + io::file::File_Scan::inst()->find_all_dataset_files_by_list(dataset_dir + "mda" + DIR_END_CHAR, search_ext_mda_list, analysis_job.dataset_files); + + // don't want to open h5 avg files for optimize because we optimize by detector , not avg for (auto& itr : analysis_job.dataset_files) { analysis_job.optimize_dataset_files.push_back(itr); @@ -499,16 +530,13 @@ int run_quantification(Command_Line_Parser& clp) if (analysis_job.quantification_standard_filename.length() > 0) { - perform_quantification(&analysis_job); + perform_quantification(&analysis_job, true); } else { logE << "Please specify filename with quantification information (usually maps_standardinfo.txt)\n"; return -1; } - - - //iterate_datasets_and_update(analysis_job); } else { @@ -524,12 +552,40 @@ int run_optimize_rois(Command_Line_Parser& clp) { data_struct::Analysis_Job analysis_job; + // Force perform quantification until we have proper loading quant from hdf5 finished if (set_general_options(clp, analysis_job) == -1) { return -1; } + set_fit_routines(clp, analysis_job); + /* + if (analysis_job.fitting_routines.size() == 0) + { + logE << "Please specify fit routines with --quantify-fit roi,nnls,matrix \n"; + return -1; + } + */ + //Check if we want to quantifiy with a standard + if (clp.option_exists("--quantify-rois")) + { + analysis_job.quantification_standard_filename = clp.get_option("--quantify-rois"); + } - optimize_rois(analysis_job); + if (io::file::init_analysis_job_detectors(&analysis_job)) + { + io::file::File_Scan::inst()->populate_netcdf_hdf5_files(analysis_job.dataset_directory); + + if (analysis_job.quantification_standard_filename.length() > 0) + { + perform_quantification(&analysis_job, false); + } + optimize_rois(analysis_job); + } + else + { + logE << "Error initalizing detectors!\n"; + return -1; + } return 0; } diff --git a/src/core/process_whole.cpp b/src/core/process_whole.cpp index e39e8705..72a31365 100644 --- a/src/core/process_whole.cpp +++ b/src/core/process_whole.cpp @@ -68,8 +68,8 @@ bool optimize_integrated_fit_params(data_struct::Analysis_Job * analysis fitting::models::Range energy_range = data_struct::get_energy_range(int_spectra.size(), &(params_override->fit_params)); //Fitting routines - fitting::routines::Param_Optimized_Fit_Routine *fit_routine; - + fitting::routines::Param_Optimized_Fit_Routine* fit_routine; + if (analysis_job->optimize_fit_routine == OPTIMIZE_FIT_ROUTINE::HYBRID) { @@ -81,7 +81,7 @@ bool optimize_integrated_fit_params(data_struct::Analysis_Job * analysis } fit_routine->set_optimizer(analysis_job->optimizer()); - fit_routine->set_update_coherent_amplitude_on_fit(false); + fit_routine->set_update_coherent_amplitude_on_fit(false); //reset model fit parameters to defaults model.reset_to_default_fit_params(); @@ -95,6 +95,7 @@ bool optimize_integrated_fit_params(data_struct::Analysis_Job * analysis //Fit the spectra saving the element counts in element_fit_count_dict fitting::optimizers::OPTIMIZER_OUTCOME outcome = fit_routine->fit_spectra_parameters(&model, &int_spectra, ¶ms_override->elements_to_fit, out_fitp); + out_fitp.add_parameter(data_struct::Fit_Param(STR_OUTCOME, double(outcome))); std::string result = optimizer_outcome_to_str(outcome); logI << "Outcome = " << result << "\n"; switch (outcome) @@ -429,7 +430,7 @@ void load_and_fit_quatification_datasets(data_struct::Analysis_Job* anal // ---------------------------------------------------------------------------- -bool perform_quantification(data_struct::Analysis_Job* analysis_job) +bool perform_quantification(data_struct::Analysis_Job* analysis_job, bool save_when_done) { quantification::models::Quantification_Model quantification_model; std::chrono::time_point start, end; @@ -480,8 +481,10 @@ bool perform_quantification(data_struct::Analysis_Job* analysis_job) } } - io::file::save_quantification_plots(analysis_job->dataset_directory, detector); - + if (save_when_done) + { + io::file::save_quantification_plots(analysis_job->dataset_directory, detector); + } } } else @@ -490,25 +493,27 @@ bool perform_quantification(data_struct::Analysis_Job* analysis_job) return false; } - //Save quantification to each file - for (auto& dataset_file : analysis_job->dataset_files) + if (save_when_done) { - for (size_t detector_num : analysis_job->detector_num_arr) + //Save quantification to each file + for (auto& dataset_file : analysis_job->dataset_files) { - data_struct::Detector* detector = analysis_job->get_detector(detector_num); - std::string str_detector_num = std::to_string(detector_num); - std::string full_save_path = analysis_job->dataset_directory + DIR_END_CHAR + "img.dat" + DIR_END_CHAR + dataset_file + ".h5" + str_detector_num; - if (io::file::HDF5_IO::inst()->start_save_seq(full_save_path, false, true)) + for (size_t detector_num : analysis_job->detector_num_arr) { - if(false ==io::file::HDF5_IO::inst()->save_quantification(detector)) + data_struct::Detector* detector = analysis_job->get_detector(detector_num); + std::string str_detector_num = std::to_string(detector_num); + std::string full_save_path = analysis_job->dataset_directory + DIR_END_CHAR + "img.dat" + DIR_END_CHAR + dataset_file + ".h5" + str_detector_num; + if (io::file::HDF5_IO::inst()->start_save_seq(full_save_path, false, true)) { - logE << "Error saving quantification to hdf5\n"; + if (false == io::file::HDF5_IO::inst()->save_quantification(detector)) + { + logE << "Error saving quantification to hdf5\n"; + } + io::file::HDF5_IO::inst()->end_save_seq(); } - io::file::HDF5_IO::inst()->end_save_seq(); } } } - end = std::chrono::system_clock::now(); std::chrono::duration elapsed_seconds = end-start; @@ -520,7 +525,11 @@ bool perform_quantification(data_struct::Analysis_Job* analysis_job) // ---------------------------------------------------------------------------- -void find_and_optimize_roi(data_struct::Analysis_Job& analysis_job, int detector_num, std::map>>& rois, std::string search_filename) +void find_and_optimize_roi(data_struct::Analysis_Job& analysis_job, + int detector_num, + std::map>>& rois, + std::string search_filename, + std::map>& out_roi_fit_params) { std::vector files = io::file::File_Scan::inst()->find_all_dataset_files(analysis_job.dataset_directory + "img.dat", search_filename); if (files.size() > 0) @@ -531,22 +540,59 @@ void find_and_optimize_roi(data_struct::Analysis_Job& analysis_job, int std::string file_path = analysis_job.dataset_directory + "img.dat" + DIR_END_CHAR + files[0]; if (io::file::HDF5_IO::inst()->load_integrated_spectra_analyzed_h5_roi(file_path, &int_spectra, roi_itr.second)) { - data_struct::Params_Override params_override; - //load override parameters - if (false == io::file::load_override_params(analysis_job.dataset_directory, detector_num, ¶ms_override)) + //io::file::HDF5_IO::inst()->load_scalers_analyzed_h5(); + + data_struct::Detector* detector = analysis_job.get_detector(detector_num); + if (detector == nullptr) { - if (false == io::file::load_override_params(analysis_job.dataset_directory, -1, ¶ms_override)) - { - logE << "Loading maps_fit_parameters_override.txt\n"; - return; - } + logE << "Detector " << detector_num << " is not initialized, skipping..\n"; + continue; } + + data_struct::Params_Override* params_override = &(detector->fit_params_override_dict); + + // Need to save more info from Quantification first, then we can finish implementing loading quant from hdf5 instead of rerunning each time roi's are done + /* + if (io::file::HDF5_IO::inst()->load_quantification_analyzed_h5(file_path, analysis_job.get_detector(detector_num))) + { + + } + */ + data_struct::Fit_Parameters out_fitp; std::string roi_name = std::to_string(roi_itr.first); - if (false == optimize_integrated_fit_params(&analysis_job, int_spectra, detector_num, ¶ms_override, files[0]+ "_roi_" + roi_name, out_fitp)) + if (false == optimize_integrated_fit_params(&analysis_job, int_spectra, detector_num, params_override, files[0]+ "_roi_" + roi_name, out_fitp)) { logE << "Failed to optimize ROI "<< file_path<<" : "<< roi_name<<".\n"; } + + double sr_current = 0.0; + double us_ic = 0.0; + double ds_ic = 0.0; + + + // add in other properties that will be saved to csv + out_fitp.add_parameter(data_struct::Fit_Param("real_time", int_spectra.elapsed_realtime())); + out_fitp.add_parameter(data_struct::Fit_Param("live_time", int_spectra.elapsed_livetime())); + out_fitp.add_parameter(data_struct::Fit_Param("SRcurrent", sr_current)); + out_fitp.add_parameter(data_struct::Fit_Param("us_IC", us_ic)); + out_fitp.add_parameter(data_struct::Fit_Param("ds_IC", ds_ic)); + //out_fitp.add_parameter(data_struct::Fit_Param("total_counts", )); + out_fitp.add_parameter(data_struct::Fit_Param("status", out_fitp.at(STR_OUTCOME).value)); + out_fitp.add_parameter(data_struct::Fit_Param("niter", out_fitp.at(STR_NUM_ITR).value)); + out_fitp.add_parameter(data_struct::Fit_Param("total_perror", out_fitp.at(STR_RESIDUAL).value)); + out_fitp.add_parameter(data_struct::Fit_Param("abs_error", std::abs(out_fitp.at(STR_RESIDUAL).value))); + //out_fitp.add_parameter(data_struct::Fit_Param("relative_error", )); + //out_fitp.add_parameter(data_struct::Fit_Param("roi_areas", )); + out_fitp.add_parameter(data_struct::Fit_Param("roi_pixels", rois.size())); + out_fitp.add_parameter(data_struct::Fit_Param("US_num", params_override->us_amp_sens_num)); + out_fitp.add_parameter(data_struct::Fit_Param("US_unit", io::file::translate_back_sens_unit(params_override->us_amp_sens_unit))); + out_fitp.add_parameter(data_struct::Fit_Param("US_sensfactor", 0)); + out_fitp.add_parameter(data_struct::Fit_Param("DS_num", params_override->ds_amp_sens_num)); + out_fitp.add_parameter(data_struct::Fit_Param("DS_unit", io::file::translate_back_sens_unit(params_override->ds_amp_sens_unit))); + out_fitp.add_parameter(data_struct::Fit_Param("DS_sensfactor", 0)); + out_roi_fit_params[files[0] + "_roi_" + roi_name] = out_fitp; + } } } @@ -558,7 +604,9 @@ void find_and_optimize_roi(data_struct::Analysis_Job& analysis_job, int // ---------------------------------------------------------------------------- -void optimize_single_roi(data_struct::Analysis_Job& analysis_job, std::string roi_file_name) +void optimize_single_roi(data_struct::Analysis_Job& analysis_job, + std::string roi_file_name, + std::map>> & out_roi_fit_params) { //std::map>> rois; std::map>> rois; @@ -570,7 +618,14 @@ void optimize_single_roi(data_struct::Analysis_Job& analysis_job, std::s //get dataset file number std::string dataset_num; std::istringstream strstream(roi_file_name); - std::getline(strstream, dataset_num, '_'); + if (roi_file_name.find('_') != std::string::npos) + { + std::getline(strstream, dataset_num, '_'); + } + else if (roi_file_name.find('-') != std::string::npos) + { + std::getline(strstream, dataset_num, '-'); + } if (dataset_num.length() > 0) { // iterate through all @@ -580,15 +635,18 @@ void optimize_single_roi(data_struct::Analysis_Job& analysis_job, std::s if (detector_num != -1) { std::string str_detector_num = std::to_string(detector_num); - search_filename = dataset_num + ".h5" + str_detector_num; - find_and_optimize_roi(analysis_job, detector_num, rois, search_filename); + search_filename = dataset_num + ".h5" + str_detector_num; // v9 save + find_and_optimize_roi(analysis_job, detector_num, rois, search_filename, out_roi_fit_params[detector_num]); + search_filename = dataset_num + ".mda.h5" + str_detector_num; + find_and_optimize_roi(analysis_job, detector_num, rois, search_filename, out_roi_fit_params[detector_num]); } } //now for the avg h5 - //detector = analysis_job.get_detector(-1); - search_filename = dataset_num + ".h5"; - dataset_num + ".h5"; - find_and_optimize_roi(analysis_job, -1, rois, search_filename); + //detector = analysis_job.get_detector(-1); + search_filename = dataset_num + ".h5"; // v9 save + find_and_optimize_roi(analysis_job, -1, rois, search_filename, out_roi_fit_params[-1]); + search_filename = dataset_num + ".mda.h5"; + find_and_optimize_roi(analysis_job, -1, rois, search_filename, out_roi_fit_params[-1]); } else { @@ -606,9 +664,24 @@ void optimize_single_roi(data_struct::Analysis_Job& analysis_job, std::s void optimize_rois(data_struct::Analysis_Job& analysis_job) { + // detector_num file_name_roi fit_params + std::map>> roi_fit_params; + for (auto& itr : io::file::File_Scan::inst()->find_all_dataset_files(analysis_job.dataset_directory + "rois", ".roi")) { - optimize_single_roi(analysis_job, itr); + optimize_single_roi(analysis_job, itr, roi_fit_params); + } + + // save all to csv + for (auto detector_num : analysis_job.detector_num_arr) + { + std::string save_path = analysis_job.dataset_directory + "output/specfit_results" + std::to_string(detector_num) + ".csv"; + std::string quant_save_path = analysis_job.dataset_directory + "output/specfit_results" + std::to_string(detector_num) + "_quantified.csv"; + io::file::csv::save_v9_specfit(save_path, roi_fit_params.at(detector_num)); + if (analysis_job.get_detector(detector_num)->quantification_standards.size() > 0) + { + io::file::csv::save_v9_specfit_quantified(quant_save_path, analysis_job.get_detector(detector_num), roi_fit_params.at(detector_num)); + } } } diff --git a/src/core/process_whole.h b/src/core/process_whole.h index b4df24bf..3fbdbfab 100644 --- a/src/core/process_whole.h +++ b/src/core/process_whole.h @@ -107,7 +107,7 @@ using namespace std::placeholders; //for _1, _2, // ---------------------------------------------------------------------------- -DLL_EXPORT bool perform_quantification(data_struct::Analysis_Job* analysis_job); +DLL_EXPORT bool perform_quantification(data_struct::Analysis_Job* analysis_job, bool save_when_done); DLL_EXPORT bool optimize_integrated_fit_params(data_struct::Analysis_Job* analysis_job, std::string dataset_filename, @@ -434,7 +434,10 @@ DLL_EXPORT void process_dataset_files(data_struct::Analysis_Job* analysi std::string fullpath; size_t dlen = dataset_file.length(); - if (dataset_file[dlen - 4] == '.' && dataset_file[dlen - 3] == 'm' && dataset_file[dlen - 2] == 'd' && dataset_file[dlen - 1] == 'a') + bool is_mda = (dataset_file[dlen - 4] == '.' && dataset_file[dlen - 3] == 'm' && dataset_file[dlen - 2] == 'd' && dataset_file[dlen - 1] == 'a'); + bool is_mca = (dataset_file[dlen - 4] == '.' && dataset_file[dlen - 3] == 'm' && dataset_file[dlen - 2] == 'c' && dataset_file[dlen - 1] == 'a'); + bool is_mcad = (dataset_file[dlen - 5] == '.' && dataset_file[dlen - 4] == 'm' && dataset_file[dlen - 3] == 'c' && dataset_file[dlen - 2] == 'a'); + if (is_mda || is_mca || is_mcad) { std::string str_detector_num = ""; if (detector_num != -1) diff --git a/src/data_struct/analysis_job.cpp b/src/data_struct/analysis_job.cpp index 799c28b6..4b4d0226 100644 --- a/src/data_struct/analysis_job.cpp +++ b/src/data_struct/analysis_job.cpp @@ -164,10 +164,12 @@ void Analysis_Job::set_optimizer(std::string optimizer) { if(optimizer == "mpfit") { + logI << "Setting optimizer to MPFIT\n"; _optimizer = &_mpfit_optimizer; } else { + logI << "Setting optimizer to LMFIT\n"; _optimizer = &_lmfit_optimizer; } } diff --git a/src/data_struct/fit_parameters.h b/src/data_struct/fit_parameters.h index 768d64ee..5a5a30a3 100644 --- a/src/data_struct/fit_parameters.h +++ b/src/data_struct/fit_parameters.h @@ -200,8 +200,6 @@ class DLL_EXPORT Fit_Parameters Fit_Param& operator [](std::string name) { return _params[name]; } - //const Fit_Param& operator [](std::string name) const { return _params[name]; } - void add_parameter(Fit_Param param); void append_and_update(const Fit_Parameters& fit_params); diff --git a/src/fitting/routines/hybrid_param_nnls_fit_routine.cpp b/src/fitting/routines/hybrid_param_nnls_fit_routine.cpp index 32dcf8a9..131d09bd 100644 --- a/src/fitting/routines/hybrid_param_nnls_fit_routine.cpp +++ b/src/fitting/routines/hybrid_param_nnls_fit_routine.cpp @@ -109,6 +109,9 @@ OPTIMIZER_OUTCOME Hybrid_Param_NNLS_Fit_Routine::fit_spectra_parameters( Fit_Parameters fit_params = model->fit_parameters(); + fit_params.add_parameter(Fit_Param(STR_NUM_ITR, 0.0)); + fit_params.add_parameter(Fit_Param(STR_RESIDUAL, 0.0)); + if (fit_params.contains(STR_COMPTON_AMPLITUDE)) { fit_params[STR_COMPTON_AMPLITUDE].bound_type = E_Bound_Type::FIXED; diff --git a/src/io/file/aps/aps_roi.cpp b/src/io/file/aps/aps_roi.cpp index 1593dd02..2838c535 100644 --- a/src/io/file/aps/aps_roi.cpp +++ b/src/io/file/aps/aps_roi.cpp @@ -127,7 +127,6 @@ bool load_v9_rois(std::string path, std::map>& rois) i++; } } - fileStream.close(); } else { diff --git a/src/io/file/csv_io.h b/src/io/file/csv_io.h index 0b24bca8..8ef6ed4e 100644 --- a/src/io/file/csv_io.h +++ b/src/io/file/csv_io.h @@ -238,6 +238,180 @@ namespace csv // ---------------------------------------------------------------------------- + template + DLL_EXPORT bool save_v9_specfit(std::string path, + std::map>& roi_files_fits_map) + { + const std::vector e_list = { "Na", "Mg", "Al", "Si", "P", "S", "Cl", "Ar", "K", "Ca", "Sc", "Ti", "V", "Cr", "Mn", "Fe", "Co", "Ni", "Cu", "Zn", "Ga", "Ge", "As", "Se", "Br", "Kr", "Rb", "Sr", "Y", "Zr", "Nb", "Mo", "Tc", "Ru", "Rh", "Pd", "Ag", "Cd", "In", "Sn", "Sb", "Te", "I", "dummy", "dummy", "Mo_L", "Tc_L", "Ru_L", "Rh_L", "Pd_L", "Ag_L", "Cd_L", "In_L", "Sn_L", "Sb_L", "Te_L", "I_L", "Xe_L", " ", "Cs_L", "Ba_L", "La_L", "Ce_L", "Pr_L", "Nd_L", "Pm_L", "Sm_L", "Eu_L", "Gd_L", "Tb_L", "Dy_L", "Br_L", "Er_L", "Tm_L", "Yb_L", "Lu_L", "Hf_L", "Ta_L", "W_L", "Re_L", "Os_L", "Ir_L", "Pt_L", "Au_L", "Hg_L", "Tl_L", "Pb_L", "Bi_L", "Po_L", "At_L", "Rn_L", "Ho_L", "Ac_L", "Th_L", "Pa_L", "U_L", "Np_L", "Pu_L", "Zr_L", "Au_M", "Pb_M", "U_M", "Hg_M", "Pt_M", "Os_M", "Bi_M", "dummy", "dummy", "real_time", "live_time", "SRcurrent", "us_IC", "ds_IC", "total_counts", "status", "niter", "total_perror", "abs_error", "relative_error", "roi_areas", "roi_pixels", "US_num", "US_unit", "US_sensfactor", "DS_num", "DS_unit", "DS_sensfactor" }; + + const std::vector p_list = { "perror_Na", "perror_Mg", "perror_Al", "perror_Si", "perror_P", "perror_S", "perror_Cl", "perror_Ar", "perror_K", "perror_Ca", "perror_Sc", "perror_Ti", "perror_V", "perror_Cr", "perror_Mn", "perror_Fe", "perror_Co", "perror_Ni", "perror_Cu", "perror_Zn", "perror_Ga", "perror_Ge", "perror_As", "perror_Se", "perror_Br", "perror_Kr", "perror_Rb", "perror_Sr", "perror_Y", "perror_Zr", "perror_Nb", "perror_Mo", "perror_Tc", "perror_Ru", "perror_Rh", "perror_Pd", "perror_Ag", "perror_Cd", "perror_In", "perror_Sn", "perror_Sb", "perror_Te", "perror_I", "perror_dummy", "perror_dummy", "perror_Mo_L", "perror_Tc_L", "perror_Ru_L", "perror_Rh_L", "perror_Pd_L", "perror_Ag_L", "perror_Cd_L", "perror_In_L", "perror_Sn_L", "perror_Sb_L", "perror_Te_L", "perror_I_L", "perror_Xe_L", "", "perror_Cs_L", "perror_Ba_L", "perror_La_L", "perror_Ce_L", "perror_Pr_L", "perror_Nd_L", "perror_Pm_L", "perror_Sm_L", "perror_Eu_L", "perror_Gd_L", "perror_Tb_L", "perror_Dy_L", "perror_Br_L", "perror_Er_L", "perror_Tm_L", "perror_Yb_L", "perror_Lu_L", "perror_Hf_L", "perror_Ta_L", "perror_W_L", "perror_Re_L", "perror_Os_L", "perror_Ir_L", "perror_Pt_L", "perror_Au_L", "perror_Hg_L", "perror_Tl_L", "perror_Pb_L", "perror_Bi_L", "perror_Po_L", "perror_At_L", "perror_Rn_L", "perror_Ho_L", "perror_Ac_L", "perror_Th_L", "perror_Pa_L", "perror_U_L", "perror_Np_L", "perror_Pu_L", "perror_Zr_L", "perror_Au_M", "perror_Pb_M", "perror_U_M", "perror_Hg_M", "perror_Pt_M", "perror_Os_M", "perror_Bi_M", "perror_dummy", "perror_dummy" }; + + const std::vector l_list = { "chisquare", "chisqred", "gen_pars_at_bndry", "ele_pars_at_bndry", "free_pars" }; + + logI << "Exporting roi results to " << path << "\n"; + + std::ofstream file_stream(path); + if (file_stream.is_open()) + { + + file_stream << "spec_name, e_offset, e_linear, e_quadratic, fwhm_offset, fwhm_fanoprime, coherent_sct_energy, coherent_sct_amplitude, compton_angle, compton_fwhm_corr, compton_amplitude, compton_f_step, compton_f_tail, compton_gamma, compton_hi_f_tail, compton_hi_gamma, snip_width, si_escape, ge_escape, linear, pileup, pileup, pileup, pileup, pileup, pileup, pileup, pileup, pileup, f_step_offset, f_step_linear, f_step_quadratic, f_tail_offset, f_tail_linear, f_tail_quadratic, gamma_offset, gamma_linear, gamma_quadratic, kb_f_tail_offset, kb_f_tail_linear, kb_f_tail_quadratic, Na, Mg, Al, Si, P, S, Cl, Ar, K, Ca, Sc, Ti, V, Cr, Mn, Fe, Co, Ni, Cu, Zn, Ga, Ge, As, Se, Br, Kr, Rb, Sr, Y, Zr, Nb, Mo, Tc, Ru, Rh, Pd, Ag, Cd, In, Sn, Sb, Te, I, dummy, dummy, Mo_L, Tc_L, Ru_L, Rh_L, Pd_L, Ag_L, Cd_L, In_L, Sn_L, Sb_L, Te_L, I_L, Xe_L, , Cs_L, Ba_L, La_L, Ce_L, Pr_L, Nd_L, Pm_L, Sm_L, Eu_L, Gd_L, Tb_L, Dy_L, Br_L, Er_L, Tm_L, Yb_L, Lu_L, Hf_L, Ta_L, W_L, Re_L, Os_L, Ir_L, Pt_L, Au_L, Hg_L, Tl_L, Pb_L, Bi_L, Po_L, At_L, Rn_L, Ho_L, Ac_L, Th_L, Pa_L, U_L, Np_L, Pu_L, Zr_L, Au_M, Pb_M, U_M, Hg_M, Pt_M, Os_M, Bi_M, dummy, dummy, real_time, live_time, SRcurrent, us_IC, ds_IC, total_counts, status, niter, total_perror, abs_error, relative_error, roi_areas, roi_pixels, US_num, US_unit, US_sensfactor, DS_num, DS_unit, DS_sensfactor, perror_Na, perror_Mg, perror_Al, perror_Si, perror_P, perror_S, perror_Cl, perror_Ar, perror_K, perror_Ca, perror_Sc, perror_Ti, perror_V, perror_Cr, perror_Mn, perror_Fe, perror_Co, perror_Ni, perror_Cu, perror_Zn, perror_Ga, perror_Ge, perror_As, perror_Se, perror_Br, perror_Kr, perror_Rb, perror_Sr, perror_Y, perror_Zr, perror_Nb, perror_Mo, perror_Tc, perror_Ru, perror_Rh, perror_Pd, perror_Ag, perror_Cd, perror_In, perror_Sn, perror_Sb, perror_Te, perror_I, perror_dummy, perror_dummy, perror_Mo_L, perror_Tc_L, perror_Ru_L, perror_Rh_L, perror_Pd_L, perror_Ag_L, perror_Cd_L, perror_In_L, perror_Sn_L, perror_Sb_L, perror_Te_L, perror_I_L, perror_Xe_L, , perror_Cs_L, perror_Ba_L, perror_La_L, perror_Ce_L, perror_Pr_L, perror_Nd_L, perror_Pm_L, perror_Sm_L, perror_Eu_L, perror_Gd_L, perror_Tb_L, perror_Dy_L, perror_Br_L, perror_Er_L, perror_Tm_L, perror_Yb_L, perror_Lu_L, perror_Hf_L, perror_Ta_L, perror_W_L, perror_Re_L, perror_Os_L, perror_Ir_L, perror_Pt_L, perror_Au_L, perror_Hg_L, perror_Tl_L, perror_Pb_L, perror_Bi_L, perror_Po_L, perror_At_L, perror_Rn_L, perror_Ho_L, perror_Ac_L, perror_Th_L, perror_Pa_L, perror_U_L, perror_Np_L, perror_Pu_L, perror_Zr_L, perror_Au_M, perror_Pb_M, perror_U_M, perror_Hg_M, perror_Pt_M, perror_Os_M, perror_Bi_M, perror_dummy, perror_dummy, chisquare, chisqred, gen_pars_at_bndry, ele_pars_at_bndry, free_pars,\n"; + for (const auto& itr : roi_files_fits_map) + { + file_stream << itr.first << "," << itr.second.at(STR_ENERGY_OFFSET).value << "," << itr.second.at(STR_ENERGY_SLOPE).value << "," << itr.second.at(STR_ENERGY_QUADRATIC).value << "," << itr.second.at(STR_FWHM_OFFSET).value + << "," << itr.second.at(STR_FWHM_FANOPRIME).value << "," << itr.second.at(STR_COHERENT_SCT_ENERGY).value << "," << itr.second.at(STR_COHERENT_SCT_AMPLITUDE).value << "," << itr.second.at(STR_COMPTON_ANGLE).value + << "," << itr.second.at(STR_COMPTON_FWHM_CORR).value << "," << itr.second.at(STR_COMPTON_AMPLITUDE).value << "," << itr.second.at(STR_COMPTON_F_STEP).value << "," << itr.second.at(STR_COMPTON_F_TAIL).value + << "," << itr.second.at(STR_COMPTON_GAMMA).value << "," << itr.second.at(STR_COMPTON_HI_F_TAIL).value << "," << itr.second.at(STR_COMPTON_HI_GAMMA).value << "," << itr.second.at(STR_SNIP_WIDTH).value + << ",0,0,0,0,0,0,0,0,0,0,0,0," << itr.second.at(STR_F_STEP_OFFSET).value << "," << itr.second.at(STR_F_STEP_LINEAR).value << "," << itr.second.at(STR_F_STEP_QUADRATIC).value << "," << itr.second.at(STR_F_TAIL_OFFSET).value + << "," << itr.second.at(STR_F_TAIL_LINEAR).value << "," << itr.second.at(STR_F_TAIL_QUADRATIC).value << "," << itr.second.at(STR_GAMMA_OFFSET).value << "," << itr.second.at(STR_GAMMA_LINEAR).value + << "," << itr.second.at(STR_GAMMA_QUADRATIC).value << "," << itr.second.at(STR_KB_F_TAIL_OFFSET).value << "," << itr.second.at(STR_KB_F_TAIL_LINEAR).value << "," << itr.second.at(STR_KB_F_TAIL_QUADRATIC).value << ","; + for (auto& e_itr : e_list) + { + if (itr.second.contains(e_itr)) + { + file_stream << itr.second.at(e_itr).value << ","; + } + else + { + file_stream << "1.0e-10, "; + } + } + + for (auto& p_itr : p_list) + { + file_stream << "1.0e-10, "; + } + + for (auto& l_itr : l_list) + { + if (itr.second.contains(l_itr)) + { + file_stream << itr.second.at(l_itr).value << ","; + } + else + { + file_stream << "1.0e-10, "; + } + } + + file_stream << "\n"; + } + + file_stream.close(); + } + else + { + logE << "Could not open file " << path << "\n"; + return false; + } + + return true; + } + + // ---------------------------------------------------------------------------- + + template + DLL_EXPORT bool save_v9_specfit_quantified(std::string path, + Detector* detector, + std::map>& roi_files_fits_map) + { + if (detector == nullptr) + { + logW << "standards or quants_map or detector are null. Cannot save csv " << path << ". \n"; + return false; + } + + std::ofstream file_stream(path); + if (file_stream.is_open()) + { + + for (const auto& itr : detector->quantification_standards) + { + file_stream << "Standard Filename: " << itr.first << "\n"; + file_stream << " SR_Current: " << itr.second.sr_current << "\n"; + file_stream << " US_IC: " << itr.second.US_IC << "\n"; + file_stream << " DS_IC: " << itr.second.DS_IC << "\n"; + file_stream << "\n\n"; + } + file_stream << "beryllium_window_thickness : " << detector->beryllium_window_thickness << "\n"; + file_stream << "germanium_dead_layer : " << detector->germanium_dead_layer << "\n"; + file_stream << "detector_chip_thickness : " << detector->detector_chip_thickness << "\n"; + file_stream << "incident_energy : " << detector->incident_energy << "\n"; + file_stream << "airpath : " << detector->airpath << "\n"; + file_stream << "detector_element : " << detector->detector_element->name << "\n"; + /* + if (detector->avg_quantification_scaler_map.count(quantifier_scaler_name) > 0) + { + file_stream << quantifier_scaler_name << ": " << detector->avg_quantification_scaler_map.at(quantifier_scaler_name) << "\n"; + } + + file_stream << "\n\n"; + + for (const auto& shell_itr : Shells_Quant_List) + { + file_stream << "\n\n"; + file_stream << "Element,Z,Counts,e_cal_ratio,absorption,transmission_Be,transmission_Ge,yield,transmission_through_Si_detector,transmission_through_air,weight \n"; + + for (const auto& itr : quants_map->curve_quant_map[shell_itr]) + { + string name = itr.name; + T_real counts = 0.0; + if (shell_itr == Electron_Shell::L_SHELL) + { + name += "_L"; + } + else if (shell_itr == Electron_Shell::M_SHELL) + { + name += "_M"; + } + + for (const auto& s_itr : *standards) + { + if (s_itr.second.element_counts.at(routine).count(name) > 0) + { + counts = s_itr.second.element_counts.at(routine).at(name); + break; + } + } + + file_stream << name << "," << + itr.Z << "," << + counts << "," << + itr.e_cal_ratio << "," << + itr.absorption << "," << + itr.transmission_Be << "," << + itr.transmission_Ge << "," << + itr.yield << "," << + itr.transmission_through_Si_detector << "," << + itr.transmission_through_air << "," << + itr.weight << "\n"; + } + } + file_stream << "\n\n"; + file_stream << "\n\n"; + + file_stream << "Element,Z,K Shell,L Shell,M Shell\n"; + for (int i = 0; i < quants_map->curve_quant_map[Electron_Shell::K_SHELL].size(); i++) + { + file_stream << quants_map->curve_quant_map[Electron_Shell::K_SHELL][i].name << "," + << i + 1 << "," + << quants_map->curve_quant_map[Electron_Shell::K_SHELL][i].calib_curve_val << "," + << quants_map->curve_quant_map[Electron_Shell::L_SHELL][i].calib_curve_val << "," + << quants_map->curve_quant_map[Electron_Shell::M_SHELL][i].calib_curve_val << "\n"; + } + file_stream << "\n\n"; + */ + file_stream.close(); + } + else + { + logE << "Could not open file " << path << "\n"; + return false; + } + return true; + } + + // ---------------------------------------------------------------------------- + template DLL_EXPORT void save_quantification(std::string path, Detector* detector) { diff --git a/src/io/file/file_scan.cpp b/src/io/file/file_scan.cpp index 90ea92c3..77c44b68 100644 --- a/src/io/file/file_scan.cpp +++ b/src/io/file/file_scan.cpp @@ -163,6 +163,42 @@ namespace io } // ---------------------------------------------------------------------------- + + void File_Scan::find_all_dataset_files_by_list(std::string dataset_directory, std::vector& search_strs, std::vector& out_dataset_files) + { + DIR* dir; + struct dirent* ent; + if ((dir = opendir(dataset_directory.c_str())) != NULL) + { + /* print all the files and directories within directory */ + while ((ent = readdir(dir)) != NULL) + { + std::string fname(ent->d_name); + // check if extension is .mda + if (fname.size() > 4) + { + for (auto& itr : search_strs) + { + size_t search_str_size = itr.length(); + if (fname.rfind(itr) == fname.size() - search_str_size) + { + out_dataset_files.push_back(fname); + } + } + } + } + closedir(dir); + } + else + { + /* could not open directory */ + logW << "Could not open directory " << dataset_directory << "\n"; + } + + } + + // ---------------------------------------------------------------------------- + /* void File_Scan::check_and_create_dirs(std::string dataset_directory) { diff --git a/src/io/file/file_scan.h b/src/io/file/file_scan.h index 56ad3fad..92a5160b 100644 --- a/src/io/file/file_scan.h +++ b/src/io/file/file_scan.h @@ -92,6 +92,8 @@ namespace io std::vector find_all_dataset_files(std::string dataset_directory, std::string search_str); + void find_all_dataset_files_by_list(std::string dataset_directory, std::vector& search_strs, std::vector& out_dataset_files); + void sort_dataset_files_by_size(std::string dataset_directory, std::vector* dataset_files); const std::vector& netcdf_files() { return _netcdf_files; } diff --git a/src/io/file/hdf5_io.cpp b/src/io/file/hdf5_io.cpp index 01c9e1fb..e9da68c3 100644 --- a/src/io/file/hdf5_io.cpp +++ b/src/io/file/hdf5_io.cpp @@ -363,6 +363,14 @@ void HDF5_IO::_close_h5_objects(std::stack > &clos logW<<"Could not close h5 attribute id "< 0) + { + logW << "Could not close h5 type id " << obj.first << "\n"; + } + } else if (obj.second == H5O_PROPERTY) { err = H5Pclose(obj.first); diff --git a/src/io/file/hdf5_io.h b/src/io/file/hdf5_io.h index e4c5fac9..ade53508 100644 --- a/src/io/file/hdf5_io.h +++ b/src/io/file/hdf5_io.h @@ -80,7 +80,7 @@ namespace file #define HDF5_EXCHANGE_VERSION 1.0 -enum H5_OBJECTS{H5O_FILE, H5O_GROUP, H5O_DATASPACE, H5O_DATASET, H5O_ATTRIBUTE, H5O_PROPERTY}; +enum H5_OBJECTS{H5O_FILE, H5O_GROUP, H5O_DATASPACE, H5O_DATASET, H5O_ATTRIBUTE, H5O_PROPERTY, H5O_DATATYPE}; enum H5_SPECTRA_LAYOUTS {MAPS_RAW, MAPS_V9, MAPS_V10, XSPRESS, APS_SEC20}; @@ -109,67 +109,67 @@ int parse_str_val_to_int(std::string start_delim, std::string end_delim, std::st } template -float translate_back_sens_num(int value) +T_real translate_back_sens_num(int value) { if (value == 1) { - return 0.; + return T_real(0.); } else if (value == 2) { - return 1.; + return T_real(1.); } else if (value == 5) { - return 2.; + return T_real(2.); } else if (value == 10) { - return 3.; + return T_real(3.); } else if (value == 20) { - return 4.; + return T_real(4.); } else if (value == 50) { - return 5.; + return T_real(5.); } else if (value == 100) { - return 6.; + return T_real(6.); } else if (value == 200) { - return 7.; + return T_real(7.); } else if (value == 500) { - return 8.; + return T_real(8.); } - return -1.; + return T_real(-1.); } template -float translate_back_sens_unit(string value) +T_real translate_back_sens_unit(string value) { if (value == "pA/V") { - return 0; + return T_real(0); } else if (value == "nA/V") { - return 1; + return T_real(1); } else if (value == "uA/V") { - return 2; + return T_real(2); } else if (value == "mA/V") { - return 3; + return T_real(3); } - return -1; + return T_real (-1); } template @@ -704,6 +704,7 @@ class DLL_EXPORT HDF5_IO char* acquisition[1]; hid_t ftype = H5Dget_type(acqui_id); + close_map.push({ ftype, H5O_DATATYPE }); hid_t type = H5Tget_native_type(ftype, H5T_DIR_ASCEND); error = H5Dread(acqui_id, type, H5S_ALL, H5S_ALL, H5P_DEFAULT, acquisition); @@ -1010,6 +1011,7 @@ class DLL_EXPORT HDF5_IO char* acquisition[1]; hid_t ftype = H5Dget_type(acqui_id); + close_map.push({ ftype, H5O_DATATYPE }); hid_t type = H5Tget_native_type(ftype, H5T_DIR_ASCEND); error = H5Dread(acqui_id, type, H5S_ALL, H5S_ALL, H5P_DEFAULT, acquisition); @@ -2889,6 +2891,7 @@ class DLL_EXPORT HDF5_IO dataspace_scaler_names = H5Dget_space(dset_scaler_names); close_map.push({ dataspace_scaler_names, H5O_DATASPACE }); hid_t memtype = H5Dget_type(dset_scaler_names); + close_map.push({ memtype, H5O_DATATYPE }); // read scaler names and search for elt1, ert1, incnt1, outcnt1 hsize_t dims_out[1]; @@ -3078,6 +3081,10 @@ class DLL_EXPORT HDF5_IO //----------------------------------------------------------------------------- + /** + * Loads only Upstream/Downstream Ion chambers and SR_Current + */ + template bool load_quantification_scalers_analyzed_h5(std::string path, data_struct::Params_Override *override_values) { @@ -3128,21 +3135,27 @@ class DLL_EXPORT HDF5_IO //read in scaler hid_t d_space = H5Dget_space(srcurrent_id); + close_map.push({ d_space, H5O_DATASPACE }); hid_t d_type = H5Dget_type(srcurrent_id); + close_map.push({ d_type, H5O_DATATYPE }); hid_t status = H5Dread(srcurrent_id, d_type, readwrite_space, d_space, H5P_DEFAULT, (void*)&srcurrent); if (status > -1) { override_values->sr_current = (srcurrent); } d_space = H5Dget_space(us_ic_id); + close_map.push({ d_space, H5O_DATASPACE }); d_type = H5Dget_type(us_ic_id); + close_map.push({ d_type, H5O_DATATYPE }); status = H5Dread(us_ic_id, d_type, readwrite_space, d_space, H5P_DEFAULT, (void*)&us_ic); if (status > -1) { override_values->US_IC = (us_ic); } d_space = H5Dget_space(ds_ic_id); + close_map.push({ d_space, H5O_DATASPACE }); d_type = H5Dget_type(ds_ic_id); + close_map.push({ d_type, H5O_DATATYPE }); status = H5Dread(ds_ic_id, d_type, readwrite_space, d_space, H5P_DEFAULT, (void*)&ds_ic); if (status > -1) { @@ -3160,6 +3173,245 @@ class DLL_EXPORT HDF5_IO return true; } + + //----------------------------------------------------------------------------- + + template + bool _load_calibration_curve_analyzed_h5(hid_t file_id, Fitting_Routines fitting_routine, data_struct::Detector* detector) + { + std::stack > close_map; + + hid_t cc_id; + hid_t cc_labels_id; + + std::string str_cc_ds = "/MAPS/"+ STR_QUANTIFICATION + "/" + STR_CALIBRATION + "/" + Fitting_Routine_To_Str.at(fitting_routine) + "/" + STR_CALIB_CURVE_DS_IC; + std::string str_cc_us = "/MAPS/" + STR_QUANTIFICATION + "/" + STR_CALIBRATION + "/" + Fitting_Routine_To_Str.at(fitting_routine) + "/" + STR_CALIB_CURVE_US_IC; + std::string str_cc_sr = "/MAPS/" + STR_QUANTIFICATION + "/" + STR_CALIBRATION + "/" + Fitting_Routine_To_Str.at(fitting_routine) + "/" + STR_CALIB_CURVE_SR_CUR; + std::string str_cc_labels = "/MAPS/" + STR_QUANTIFICATION + "/" + STR_CALIBRATION + "/" + Fitting_Routine_To_Str.at(fitting_routine) + "/" + STR_CALIB_LABELS; + + if (false == _open_h5_object(cc_labels_id, H5O_DATASET, close_map, str_cc_labels, file_id)) + { + logW << "Could not find labels " << str_cc_labels << ". Can not load the rest for " << Fitting_Routine_To_Str.at(fitting_routine) << "\n"; + return false; + } + + std::map ion_chambers = { {STR_DS_IC,str_cc_ds}, {STR_US_IC,str_cc_us}, {STR_SR_CURRENT,str_cc_sr} }; + + // read ion chambers DS_IC, US_IC, and SR_Current + for (auto& itr : ion_chambers) + { + //detector->fitting_quant_map.at(fitting_routine)[itr.first] = Quantification_Scaler_Struct(); + if (true == _open_h5_object(cc_id, H5O_DATASET, close_map, itr.second.c_str(), file_id, false, false)) + { + hsize_t offset[2] = { 0,0 }; + hsize_t count[2] = { 1,1 }; + hsize_t dims_in[2] = { 0,0 }; + hid_t d_space = H5Dget_space(cc_id); + close_map.push({ d_space, H5O_DATASPACE }); + hid_t d_type = H5Dget_type(cc_id); + close_map.push({ d_type, H5O_DATATYPE }); + + int rank = H5Sget_simple_extent_ndims(d_space); + if (rank != 2) + { + _close_h5_objects(close_map); + logE << itr.second <<" rank != 2. rank = " << rank << ". Can't load dataset. returning" << "\n"; + return false; + } + + int status_n = H5Sget_simple_extent_dims(d_space, &dims_in[0], nullptr); + if (status_n < 0) + { + _close_h5_objects(close_map); + logE << "getting dataset dims for "<< itr.second << "\n"; + return false; + } + + for (int i = 0; i < rank; i++) + { + + offset[i] = 0; + count[i] = dims_in[i]; + } + + //detector->fitting_quant_map.at(fitting_routine).at(itr.first).at(Electron_Shell::K_SHELL) + + //detector->fitting_quant_map.at(fitting_routine).at(itr.first][Electron_Shell::K_SHELL].resize(count[1]); + //detector->fitting_quant_map.at(fitting_routine).at(itr.first][Electron_Shell::L_SHELL].resize(count[1]); + //detector->fitting_quant_map.at(fitting_routine)[itr.first][Electron_Shell::M_SHELL].resize(count[1]); + // vector < Element_Quant + } + else + { + logW << "Could not find " << itr.second << "\n"; + } + } + + _close_h5_objects(close_map); + + return true; + } + + //----------------------------------------------------------------------------- + + /** + * Loads whole quantification info + */ + + // TODO FINISH before using + template + bool load_quantification_analyzed_h5(std::string path, data_struct::Detector* detector) + { + + std::lock_guard lock(_mutex); + + std::chrono::time_point start, end; + start = std::chrono::system_clock::now(); + + quantification::models::Quantification_Model quantification_model; + + int num_standards = 0; + + hid_t file_id = -1; + hid_t num_standards_id = -1; + hid_t name_id = -1; + hid_t ic_id = -1; + + herr_t status; + + char dset_name[2048] = { 0 }; + + hsize_t count[1] = { 1 }; + + std::vector< Fitting_Routines> fit_routes = { Fitting_Routines::NNLS, Fitting_Routines::GAUSS_MATRIX ,Fitting_Routines::ROI, Fitting_Routines::SVD }; + + std::stack > close_map; + + if (detector == nullptr) + { + logW << "Can not load to nullptr detector\n"; + return false; + } + + hid_t readwrite_space = H5Screate_simple(1, &count[0], &count[0]); + close_map.push({ readwrite_space, H5O_DATASPACE }); + + if (false == _open_h5_object(file_id, H5O_FILE, close_map, path, -1)) + { + logE << "Failed to open " << path << "\n"; + return false; + } + + if (_open_h5_object(num_standards_id, H5O_DATASET, close_map, "/MAPS/Quantification/Number_Of_Standards", file_id)) + { + hid_t d_space = H5Dget_space(num_standards_id); + close_map.push({ d_space, H5O_DATASPACE }); + hid_t d_type = H5Dget_type(num_standards_id); + close_map.push({ d_type, H5O_DATATYPE }); + status = H5Dread(num_standards_id, d_type, readwrite_space, d_space, H5P_DEFAULT, (void*)&num_standards); + } + else + { + logE << "Could not determine the number of standards! Returning\n"; + return false; + } + + // Init fit quant map with all fitting routines + for (auto& itr : fit_routes) + { + detector->fitting_quant_map[itr] = Fitting_Quantification_Struct(); + } + + // iterate over number of standards and load + for (int i = 0; i < num_standards; i++) + { + // name + std::string std_name = ""; + std::string str_std_name_loc = "/MAPS/Quantification/Standard" + std::to_string(0) + "/" + STR_STANDARD_NAME; + if (_open_h5_object(name_id, H5O_DATASET, close_map, str_std_name_loc, file_id, false, false)) + { + hid_t d_space = H5Dget_space(name_id); + close_map.push({ d_space, H5O_DATASPACE }); + hid_t d_type = H5Dget_type(name_id); + close_map.push({ d_type, H5O_DATATYPE }); + status = H5Dread(name_id, d_type, readwrite_space, d_space, H5P_DEFAULT, (void*)dset_name); + if (status > -1) + { + std_name = std::string(dset_name, 2048); + std_name.erase(std::remove_if(std_name.begin(), std_name.end(), ::isspace), std_name.end()); + } + } + + if (std_name.length() == 0) + { + std_name = std::to_string(i); + } + + detector->quantification_standards[std_name] = Quantification_Standard(); + + std::vector ion_chambers = { STR_DS_IC, STR_US_IC, STR_SR_CURRENT }; + + // read ion chambers DS_IC, US_IC, and SR_Current + for (auto& itr : ion_chambers) + { + std::string str_loc = "/MAPS/Quantification/Standard" + std::to_string(0) + "/" + STR_SCALERS + "/" + itr; + if (_open_h5_object(ic_id, H5O_DATASET, close_map, str_loc, file_id, false, false)) + { + + hid_t d_space = H5Dget_space(ic_id); + close_map.push({ d_space, H5O_DATASPACE }); + hid_t d_type = H5Dget_type(ic_id); + close_map.push({ d_type, H5O_DATATYPE }); + + + T_real* val_addr = nullptr; + + if (itr == STR_DS_IC) + val_addr = &(detector->quantification_standards[std_name].DS_IC); + else if (itr == STR_US_IC) + val_addr = &(detector->quantification_standards[std_name].US_IC); + else if (itr == STR_SR_CURRENT) + val_addr = &(detector->quantification_standards[std_name].sr_current); + else + val_addr = nullptr; + + + if (val_addr != nullptr) + { + //read value + status = _read_h5d(ic_id, readwrite_space, d_space, H5P_DEFAULT, (void*)val_addr); + if (status > -1) + { + Quantification_Standard* quantification_standard = &(detector->quantification_standards[std_name]); + for (auto& fit_itr : fit_routes) + { + detector->update_element_quants(fit_itr, itr, quantification_standard, &quantification_model, *val_addr); + } + } + else + { + logE << "Could not read in " << str_loc << "\n"; + } + } + } + } + } + + for (auto& itr : fit_routes) + { + _load_calibration_curve_analyzed_h5(file_id, itr, detector); + } + + _close_h5_objects(close_map); + + end = std::chrono::system_clock::now(); + std::chrono::duration elapsed_seconds = end - start; + + logI << "elapsed time: " << elapsed_seconds.count() << "s" << "\n"; + + return true; + } + //----------------------------------------------------------------------------- template @@ -3211,7 +3463,7 @@ class DLL_EXPORT HDF5_IO //read in scaler hid_t d_space = H5Dget_space(us_ic_id); - //hid_t d_type = H5Dget_type(us_ic_id); + close_map.push({ d_space, H5O_DATASPACE }); status = H5Sget_simple_extent_dims(d_space, &dims3[0], nullptr); if (status < 0) { @@ -3378,8 +3630,11 @@ class DLL_EXPORT HDF5_IO } hid_t name_type = H5Dget_type(scaler_name_id); + close_map.push({ name_type, H5O_DATATYPE }); hid_t scaler_type = H5Dget_type(scaler_val_id); + close_map.push({ scaler_type, H5O_DATATYPE }); hid_t scaler_val_space = H5Dget_space(scaler_val_id); + close_map.push({ scaler_val_space, H5O_DATASPACE }); hid_t val_rank = H5Sget_simple_extent_ndims(scaler_val_space); val_dims_in = new hsize_t[val_rank]; H5Sget_simple_extent_dims(scaler_val_space, &val_dims_in[0], NULL); @@ -3555,6 +3810,7 @@ class DLL_EXPORT HDF5_IO close_map.push({ mem_name_space, H5O_DATASPACE }); hid_t scalers_type = H5Dget_type(data2_id); + close_map.push({ scalers_type, H5O_DATATYPE }); err = H5Dread(data2_id, scalers_type, mem_name_space, dspace_id, H5P_DEFAULT, acqui_data); if (err > -1) @@ -3649,6 +3905,7 @@ class DLL_EXPORT HDF5_IO hid_t scaler_space = H5Dget_space(dsid); close_map.push({ scaler_space, H5O_DATASPACE }); hid_t scalers_type = H5Dget_type(dsid); + close_map.push({ scalers_type, H5O_DATATYPE }); if (first_save) { @@ -3672,6 +3929,7 @@ class DLL_EXPORT HDF5_IO close_map.push({ scaler_space, H5O_DATASPACE }); H5Sget_simple_extent_dims(scaler_space, &scalers_count[0], NULL); hid_t scalers_type = H5Dget_type(detectors_grp_id); + close_map.push({ scalers_type, H5O_DATATYPE }); hsize_t scaler_amt = scalers_count[2]; scalers_count[2] = 1; mem_count[0] = scalers_count[0]; @@ -3795,8 +4053,11 @@ class DLL_EXPORT HDF5_IO if (_open_h5_object(value_id, H5O_DATASET, close_map, "value", environ_grp_id, false, false)) { hid_t name_type = H5Dget_type(name_id); + close_map.push({ name_type, H5O_DATATYPE }); hid_t value_type = H5Dget_type(value_id); + close_map.push({ value_type, H5O_DATATYPE }); hid_t value_space = H5Dget_space(value_id); + close_map.push({ value_space, H5O_DATASPACE }); hid_t val_rank = H5Sget_simple_extent_ndims(value_space); val_dims_in = new hsize_t[val_rank]; H5Sget_simple_extent_dims(value_space, &val_dims_in[0], NULL); @@ -3926,8 +4187,11 @@ class DLL_EXPORT HDF5_IO } hid_t name_type = H5Dget_type(scaler_name_id); + close_map.push({ name_type, H5O_DATATYPE }); hid_t scaler_type = H5Dget_type(scaler_val_id); + close_map.push({ scaler_type, H5O_DATATYPE }); hid_t scaler_val_space = H5Dget_space(scaler_val_id); + close_map.push({ scaler_val_space, H5O_DATASPACE }); hid_t val_rank = H5Sget_simple_extent_ndims(scaler_val_space); val_dims_in = new hsize_t[val_rank]; H5Sget_simple_extent_dims(scaler_val_space, &val_dims_in[0], NULL); @@ -5410,6 +5674,7 @@ class DLL_EXPORT HDF5_IO _global_close_map.push({ ypos_dataspace_id, H5O_DATASPACE }); hid_t xy_type = H5Dget_type(xpos_id); + _global_close_map.push({ xy_type, H5O_DATATYPE }); det_rank = H5Sget_simple_extent_ndims(xpos_dataspace_id); det_dims_in = new hsize_t[det_rank]; H5Sget_simple_extent_dims(xpos_dataspace_id, &det_dims_in[0], NULL); @@ -5481,6 +5746,7 @@ class DLL_EXPORT HDF5_IO hid_t scaler_space = H5Dget_space(dsid); _global_close_map.push({ scaler_space, H5O_DATASPACE }); hid_t scalers_type = H5Dget_type(dsid); + _global_close_map.push({ scalers_type, H5O_DATATYPE }); names_off[0] = i; if (first_save) { @@ -5553,6 +5819,7 @@ class DLL_EXPORT HDF5_IO value_count[1] = scalers_count[0]; value_count[2] = scalers_count[1]; hid_t scalers_type = H5Dget_type(dset_detectors_id); + _global_close_map.push({ scalers_type, H5O_DATATYPE }); if (false == _open_h5_dataset(STR_VALUES, scalers_type, scalers_grp_id, 3, &value_count[0], &value_count[0], values_id, value_space)) { logE << "Error creating " << STR_NAMES << "\n"; @@ -5709,6 +5976,7 @@ class DLL_EXPORT HDF5_IO _global_close_map.push({ xypos_dataspace_id, H5O_DATASPACE }); hid_t xy_type = H5Dget_type(xypos_id); + _global_close_map.push({ xy_type, H5O_DATATYPE }); det_rank = H5Sget_simple_extent_ndims(xypos_dataspace_id); det_dims_in = new hsize_t[det_rank]; H5Sget_simple_extent_dims(xypos_dataspace_id, &det_dims_in[0], NULL); @@ -5988,6 +6256,7 @@ class DLL_EXPORT HDF5_IO _global_close_map.push({ xypos_dataspace_id, H5O_DATASPACE }); hid_t xy_type = H5Dget_type(xypos_id); + _global_close_map.push({ xy_type, H5O_DATATYPE }); det_rank = H5Sget_simple_extent_ndims(xypos_dataspace_id); det_dims_in = new hsize_t[det_rank]; H5Sget_simple_extent_dims(xypos_dataspace_id, &det_dims_in[0], NULL); @@ -6044,7 +6313,9 @@ class DLL_EXPORT HDF5_IO } hid_t name_type = H5Dget_type(scaler_name_id); + _global_close_map.push({ name_type, H5O_DATATYPE }); hid_t scaler_type = H5Dget_type(scaler_val_id); + _global_close_map.push({ scaler_type, H5O_DATATYPE }); hid_t scaler_val_space = H5Dget_space(scaler_val_id); hid_t val_rank = H5Sget_simple_extent_ndims(scaler_val_space); val_dims_in = new hsize_t[val_rank]; diff --git a/src/io/file/hl_file_io.h b/src/io/file/hl_file_io.h index 3f79ef1c..b790e40c 100644 --- a/src/io/file/hl_file_io.h +++ b/src/io/file/hl_file_io.h @@ -110,6 +110,8 @@ DLL_EXPORT bool load_quantification_standardinfo(std::string dataset_directory, DLL_EXPORT void save_optimized_fit_params(std::string dataset_dir, std::string dataset_filename, int detector_num, string result, data_struct::Fit_Parameters* fit_params, data_struct::Spectra* spectra, data_struct::Fit_Element_Map_Dict* elements_to_fit); +////DLL_EXPORT void save_roi_fit_params(std::string dataset_dir, std::string dataset_filename, int detector_num, string result, data_struct::Fit_Parameters* fit_params, data_struct::Spectra* spectra, data_struct::Fit_Element_Map_Dict* elements_to_fit); + //DLL_EXPORT void sort_dataset_files_by_size(std::string dataset_directory, std::vector* dataset_files); // ---------------------------------------------------------------------------- @@ -705,11 +707,16 @@ DLL_EXPORT bool load_spectra_volume(std::string dataset_directory, } bool ends_in_h5 = false; + bool ends_in_mca = false; size_t dlen = dataset_file.length(); if (dataset_file[dlen - 3] == '.' && dataset_file[dlen - 2] == 'h' && dataset_file[dlen - 1] == '5') { ends_in_h5 = true; } + else if (dataset_file[dlen - 4] == '.' && dataset_file[dlen - 3] == 'm' && dataset_file[dlen - 2] == 'c' && dataset_file[dlen - 1] == 'a') + { + ends_in_mca = true; + } else { for (int i = 0; i < 8; i++) @@ -720,6 +727,55 @@ DLL_EXPORT bool load_spectra_volume(std::string dataset_directory, ends_in_h5 = true; break; } + else if (dataset_file[dlen - 5] == '.' && dataset_file[dlen - 4] == 'm' && dataset_file[dlen - 3] == 'c' && dataset_file[dlen - 2] == 'a' && dataset_file[dlen - 1] == s1[0]) + { + ends_in_mca = true; + break; + } + } + } + + if (ends_in_mca) + { + Spectra spec; + unordered_map pv_map; + if (true == io::file::mca::load_integrated_spectra(dataset_directory + "mda" + DIR_END_CHAR + dataset_file, &spec, pv_map)) + { + data_struct::Scan_Info scan_info; + + for (auto& itr : pv_map) + { + data_struct::Scaler_Map sm; + sm.name = itr.first; + sm.unit = " "; + sm.time_normalized = false; + sm.values.resize(1,1); + sm.values(0, 0) = itr.second; + scan_info.scaler_maps.push_back(sm); + } + + scan_info.meta_info.requested_rows = 1; + scan_info.meta_info.requested_cols = 1; + scan_info.meta_info.x_axis.push_back(0.0); + scan_info.meta_info.y_axis.push_back(0.0); + scan_info.meta_info.theta = 0.0; + + data_struct::Extra_PV ep; + ep.name = "Name"; + ep.description = "Filename"; + ep.unit = "Str"; + ep.value = dataset_file; + + scan_info.extra_pvs.push_back(ep); + + spectra_volume->resize_and_zero(1, 1, spec.size()); + (*spectra_volume)[0][0] = spec; + io::file::HDF5_IO::inst()->start_save_seq(true); + + // add ELT, ERT, INCNT, OUTCNT to scaler map + spectra_volume->generate_scaler_maps(&(scan_info.scaler_maps)); + io::file::HDF5_IO::inst()->save_scan_scalers(detector_num, &scan_info, params_override); + return true; } } diff --git a/src/io/file/mda_io.cpp b/src/io/file/mda_io.cpp index 6ab9537b..a99eb82b 100644 --- a/src/io/file/mda_io.cpp +++ b/src/io/file/mda_io.cpp @@ -285,7 +285,8 @@ bool MDA_IO::load_spectra_volume(std::string path, } else { - if(_mda_file->header->dimensions[1] == 2000) + // 2000 = APS step scan, 2048 = APS xanes scan + if(_mda_file->header->dimensions[1] == 2000 || _mda_file->header->dimensions[1] == 2048) { if((size_t)_mda_file->scan->sub_scans[0]->number_detectors-1 < detector_num) { @@ -544,7 +545,7 @@ bool MDA_IO::load_spectra_volume_with_callback(std::string path, } else { - if(_mda_file->header->dimensions[1] == 2000) + if(_mda_file->header->dimensions[1] == 2000 || _mda_file->header->dimensions[1] == 2048) { if((size_t)_mda_file->scan->sub_scans[0]->number_detectors-1 < max_detecotr_num) { @@ -802,7 +803,7 @@ bool MDA_IO::load_integrated_spectra(std::string path, } else { - if (_mda_file->header->dimensions[1] == 2000) + if (_mda_file->header->dimensions[1] == 2000 || _mda_file->header->dimensions[1] == 2048) { if ((size_t)_mda_file->scan->sub_scans[0]->number_detectors - 1 < detector_num) { @@ -1016,7 +1017,7 @@ void MDA_IO::_load_scalers(bool load_int_spec) if (_mda_file->header->data_rank == 2) { - if (_hasNetcdf == false && _mda_file->header->dimensions[1] == 2000) + if (_hasNetcdf == false && (_mda_file->header->dimensions[1] == 2000 || _mda_file->header->dimensions[1] == 2048)) { single_row_scan = true; } @@ -1301,7 +1302,7 @@ void MDA_IO::_load_meta_info() { if (_mda_file->header->data_rank == 2) { - if (_mda_file->header->dimensions[1] == 2000) + if (_mda_file->header->dimensions[1] == 2000 || _mda_file->header->dimensions[1] == 2048) { single_row_scan = true; } @@ -1321,16 +1322,36 @@ void MDA_IO::_load_meta_info() { _scan_info.meta_info.requested_rows = _mda_file->header->dimensions[0]; _scan_info.meta_info.requested_cols = _mda_file->header->dimensions[1]; - // save y axis - for (int32_t i = 0; i < _mda_file->scan->last_point; i++) + if (_mda_file->scan->number_positioners > 0) + { + // save y axis + for (int32_t i = 0; i < _mda_file->scan->last_point; i++) + { + _scan_info.meta_info.y_axis.push_back(_mda_file->scan->positioners_data[0][i]); + } + } + else + { + for (int32_t i = 0; i < _mda_file->scan->last_point; i++) + { + _scan_info.meta_info.y_axis.push_back(0); + } + } + + if (_mda_file->scan->sub_scans[0]->number_positioners > 0) { - _scan_info.meta_info.y_axis.push_back(_mda_file->scan->positioners_data[0][i]); + // save x axis + for (int32_t i = 0; i < _mda_file->scan->sub_scans[0]->last_point; i++) + { + _scan_info.meta_info.x_axis.push_back(_mda_file->scan->sub_scans[0]->positioners_data[0][i]); + } } - - // save x axis - for (int32_t i = 0; i < _mda_file->scan->sub_scans[0]->last_point; i++) + else { - _scan_info.meta_info.x_axis.push_back(_mda_file->scan->sub_scans[0]->positioners_data[0][i]); + for (int32_t i = 0; i < _mda_file->scan->sub_scans[0]->last_point; i++) + { + _scan_info.meta_info.x_axis.push_back(0); + } } } }