From 18cd472020f59085e749eb4833307cfa790b8561 Mon Sep 17 00:00:00 2001 From: simonge Date: Wed, 30 Aug 2023 15:51:08 +0100 Subject: [PATCH 01/53] Added NN training script --- .../LOWQ2/training/TaggerRegressionEICrecon.C | 138 ++++++++++++++++++ 1 file changed, 138 insertions(+) create mode 100644 benchmarks/LOWQ2/training/TaggerRegressionEICrecon.C diff --git a/benchmarks/LOWQ2/training/TaggerRegressionEICrecon.C b/benchmarks/LOWQ2/training/TaggerRegressionEICrecon.C new file mode 100644 index 00000000..48230414 --- /dev/null +++ b/benchmarks/LOWQ2/training/TaggerRegressionEICrecon.C @@ -0,0 +1,138 @@ +#include +#include +#include +#include + +#include "TChain.h" +#include "TFile.h" +#include "TTree.h" +#include "TString.h" +#include "TObjString.h" +#include "TSystem.h" +#include "TROOT.h" + +#include "TMVA/MethodDNN.h" +#include "TMVA/Reader.h" +#include "TMVA/Tools.h" +#include "TMVA/Factory.h" +#include "TMVA/DataLoader.h" +#include "TMVA/TMVARegGui.h" + +using namespace TMVA; + +void TaggerRegressionEICrecon( + TString inDataNames = "/scratch/EIC/ReconOut/qr_18x275_ab/qr_18x275_ab*_recon.edm4hep.root", + TString outDataName = "/scratch/EIC/Results/ML-Out/trainedData.root", + TString outWeightName = "dataset/weights/weights.xml" + ) +{ + + Bool_t loadWeights = 0; + + //--------------------------------------------------------------- + // This loads the library + TMVA::Tools::Instance(); + + ROOT::EnableImplicitMT(8); + + // -------------------------------------------------------------------------------------------------- + // Here the preparation phase begins + // Create a new root output file + TFile* outputFile = TFile::Open( outDataName, "RECREATE" ); + + // Create the factory object. Later you can choose the methods + TString typeName = "RealHits"; + + TMVA::Factory *factory = new TMVA::Factory( typeName, outputFile, + "!V:!Silent:Color:DrawProgressBar:AnalysisType=Regression" ); + + TString dataFolderName = "dataset"; + TMVA::DataLoader *dataloader=new TMVA::DataLoader(dataFolderName); + + TString methodName = "DNN"; + + // Input TrackParameters variables from EICrecon - + TString collectionName = "LowQ2Tracks[0]"; + dataloader->AddVariable( collectionName+".loc.a", "fit_position_y", "units", 'F' ); + dataloader->AddVariable( collectionName+".loc.b", "fit_position_z", "units", 'F' ); + dataloader->AddVariable( "sin("+collectionName+".phi)*sin("+collectionName+".theta)", "fit_vector_x", "units", 'F' ); + dataloader->AddVariable( "cos("+collectionName+".phi)*sin("+collectionName+".theta)", "fit_vector_y", "units", 'F' ); + + // Regression target particle 3-momentum, normalised to beam energy. + // Takes second particle, in the test data this is the scattered electron + // TODO add energy and array element information to be read directly from datafile - EMD4eic and EICrecon changes. + dataloader->AddTarget( "MCParticles[2].momentum.x/18" ); + dataloader->AddTarget( "MCParticles[2].momentum.y/18" ); + dataloader->AddTarget( "MCParticles[2].momentum.z/18" ); + + std::cout << "--- TMVARegression : Using input files: " << inDataNames << std::endl; + + // Register the regression tree + + TChain* regChain = new TChain("events"); + regChain->Add(inDataNames); + regChain->SetEntries(8000); //Set smaller sample for tests + + // global event weights per tree (see below for setting event-wise weights) + Double_t regWeight = 1.0; + + // You can add an arbitrary number of regression trees + dataloader->AddRegressionTree( regChain, regWeight ); + + // This would set individual event weights (the variables defined in the + // expression need to exist in the original TTree) + // dataloader->SetWeightExpression( "1/(eE)", "Regression" ); + // Apply additional cuts on the data + TCut mycut = "@LowQ2Tracks.size()==1"; //Make sure there's one reconstructed track in event + + dataloader->PrepareTrainingAndTestTree(mycut,"nTrain_Regression=0:nTest_Regression=0:SplitMode=Random:NormMode=NumEvents:!V"); + + // TODO - Optimise layout and training more + TString layoutString("Layout=TANH|1024,TANH|128,TANH|64,TANH|32,LINEAR"); + + TString trainingStrategyString("TrainingStrategy="); + trainingStrategyString +="LearningRate=1e-4,Momentum=0,MaxEpochs=20,ConvergenceSteps=200,BatchSize=64,TestRepetitions=1,Regularization=None,Optimizer=ADAM"; + // trainingStrategyString +="LearningRate=1e-4,Momentum=0,MaxEpochs=2000,ConvergenceSteps=200,BatchSize=64,TestRepetitions=1,Regularization=None,Optimizer=ADAM"; + + // Transformation of data prior to training layers - decorrelate and normalise whole dataset + TString nnOptions("!H:V:ErrorStrategy=SUMOFSQUARES:VarTransform=D,N:WeightInitialization=XAVIERUNIFORM:Architecture=GPU"); + + nnOptions.Append(":"); + nnOptions.Append(layoutString); + nnOptions.Append(":"); + nnOptions.Append(trainingStrategyString); + + TMVA::MethodDNN* method = (MethodDNN*)factory->BookMethod(dataloader, TMVA::Types::kDL, methodName, nnOptions); // NN + + //std::istream xmlFile(xmlFileName); + if(loadWeights){ + TMVA::Reader *reader = new TMVA::Reader( "!Color:!Silent" ); + reader->BookMVA( methodName, outWeightName); + TMVA::MethodDNN* kl = dynamic_cast(reader->FindMVA(methodName)); + method = kl; + } + // -------------------------------------------------------------------------------------------------- + + // Now you can tell the factory to train, test, and evaluate the MVAs + + // Train MVAs using the set of training events + factory->TrainAllMethods(); + + // Evaluate all MVAs using the set of test events + factory->TestAllMethods(); + + // Evaluate and compare performance of all configured MVAs + factory->EvaluateAllMethods(); + + // -------------------------------------------------------------- + + // Save the output + outputFile->Close(); + + std::cout << "==> Wrote root file: " << outputFile->GetName() << std::endl; + std::cout << "==> TMVARegression is done!" << std::endl; + + delete factory; + delete dataloader; + +} From 0c5ed318b6ea493f5aa2497855ac35fd7859be12 Mon Sep 17 00:00:00 2001 From: simonge Date: Thu, 21 Sep 2023 16:02:57 +0100 Subject: [PATCH 02/53] updated training script for clarity --- .../LOWQ2/training/TaggerRegressionEICrecon.C | 47 ++++++++++++------- 1 file changed, 31 insertions(+), 16 deletions(-) diff --git a/benchmarks/LOWQ2/training/TaggerRegressionEICrecon.C b/benchmarks/LOWQ2/training/TaggerRegressionEICrecon.C index 48230414..819627cc 100644 --- a/benchmarks/LOWQ2/training/TaggerRegressionEICrecon.C +++ b/benchmarks/LOWQ2/training/TaggerRegressionEICrecon.C @@ -20,10 +20,14 @@ using namespace TMVA; +// The training currently requires files which only contained a single DIS scattered electron to have been simulated e.g. generated using GETaLM +// The scattered electron must be the 3rd particle in the file after the two beam particles +// At least one track reconstructed by EIC algorithms in the LOWQ2 tagger is needed. + void TaggerRegressionEICrecon( TString inDataNames = "/scratch/EIC/ReconOut/qr_18x275_ab/qr_18x275_ab*_recon.edm4hep.root", TString outDataName = "/scratch/EIC/Results/ML-Out/trainedData.root", - TString outWeightName = "dataset/weights/weights.xml" + TString inWeightName = "dataset/weights/LowQ2Reconstruction_DNN.weights.xml" ) { @@ -41,7 +45,7 @@ void TaggerRegressionEICrecon( TFile* outputFile = TFile::Open( outDataName, "RECREATE" ); // Create the factory object. Later you can choose the methods - TString typeName = "RealHits"; + TString typeName = "LowQ2Reconstruction"; TMVA::Factory *factory = new TMVA::Factory( typeName, outputFile, "!V:!Silent:Color:DrawProgressBar:AnalysisType=Regression" ); @@ -60,10 +64,12 @@ void TaggerRegressionEICrecon( // Regression target particle 3-momentum, normalised to beam energy. // Takes second particle, in the test data this is the scattered electron - // TODO add energy and array element information to be read directly from datafile - EMD4eic and EICrecon changes. - dataloader->AddTarget( "MCParticles[2].momentum.x/18" ); - dataloader->AddTarget( "MCParticles[2].momentum.y/18" ); - dataloader->AddTarget( "MCParticles[2].momentum.z/18" ); + // TODO add energy and array element information to be read directly from datafile - EMD4eic and EICrecon changes. + TString mcParticleName = "MCParticles[2]"; + TString mcBeamEnergy = "18"; + dataloader->AddTarget( mcParticleName+".momentum.x/"+mcBeamEnergy ); + dataloader->AddTarget( mcParticleName+".momentum.y/"+mcBeamEnergy ); + dataloader->AddTarget( mcParticleName+".momentum.z/"+mcBeamEnergy ); std::cout << "--- TMVARegression : Using input files: " << inDataNames << std::endl; @@ -71,7 +77,7 @@ void TaggerRegressionEICrecon( TChain* regChain = new TChain("events"); regChain->Add(inDataNames); - regChain->SetEntries(8000); //Set smaller sample for tests + //regChain->SetEntries(8000); // Set smaller sample for tests // global event weights per tree (see below for setting event-wise weights) Double_t regWeight = 1.0; @@ -81,22 +87,30 @@ void TaggerRegressionEICrecon( // This would set individual event weights (the variables defined in the // expression need to exist in the original TTree) - // dataloader->SetWeightExpression( "1/(eE)", "Regression" ); + // dataloader->SetWeightExpression( "1/(eE)", "Regression" ); // If MC event weights are kept use these // Apply additional cuts on the data - TCut mycut = "@LowQ2Tracks.size()==1"; //Make sure there's one reconstructed track in event + TCut mycut = "@LowQ2Tracks.size()==1"; // Make sure there's one reconstructed track in event - dataloader->PrepareTrainingAndTestTree(mycut,"nTrain_Regression=0:nTest_Regression=0:SplitMode=Random:NormMode=NumEvents:!V"); + dataloader->PrepareTrainingAndTestTree(mycut,"nTrain_Regression=0:nTest_Regression=0:SplitMode=Random:SplitSeed=1:NormMode=NumEvents:!V"); // TODO - Optimise layout and training more TString layoutString("Layout=TANH|1024,TANH|128,TANH|64,TANH|32,LINEAR"); TString trainingStrategyString("TrainingStrategy="); - trainingStrategyString +="LearningRate=1e-4,Momentum=0,MaxEpochs=20,ConvergenceSteps=200,BatchSize=64,TestRepetitions=1,Regularization=None,Optimizer=ADAM"; - // trainingStrategyString +="LearningRate=1e-4,Momentum=0,MaxEpochs=2000,ConvergenceSteps=200,BatchSize=64,TestRepetitions=1,Regularization=None,Optimizer=ADAM"; + trainingStrategyString +="LearningRate=1e-4,Momentum=0,MaxEpochs=2000,ConvergenceSteps=200,BatchSize=64,TestRepetitions=1,Regularization=None,Optimizer=ADAM"; + TString nnOptions("!H:V:ErrorStrategy=SUMOFSQUARES:WeightInitialization=XAVIERUNIFORM:RandomSeed=1234"); + + // Use GPU if possible on the machine + TString architectureString("Architecture=GPU"); + // Transformation of data prior to training layers - decorrelate and normalise whole dataset - TString nnOptions("!H:V:ErrorStrategy=SUMOFSQUARES:VarTransform=D,N:WeightInitialization=XAVIERUNIFORM:Architecture=GPU"); + TString transformString("VarTransform=D,N"); + nnOptions.Append(":"); + nnOptions.Append(architectureString); + nnOptions.Append(":"); + nnOptions.Append(transformString); nnOptions.Append(":"); nnOptions.Append(layoutString); nnOptions.Append(":"); @@ -104,15 +118,16 @@ void TaggerRegressionEICrecon( TMVA::MethodDNN* method = (MethodDNN*)factory->BookMethod(dataloader, TMVA::Types::kDL, methodName, nnOptions); // NN - //std::istream xmlFile(xmlFileName); + // If loading previous model for further training if(loadWeights){ TMVA::Reader *reader = new TMVA::Reader( "!Color:!Silent" ); - reader->BookMVA( methodName, outWeightName); + reader->BookMVA( methodName, inWeightName ); TMVA::MethodDNN* kl = dynamic_cast(reader->FindMVA(methodName)); method = kl; } + + // -------------------------------------------------------------------------------------------------- - // Now you can tell the factory to train, test, and evaluate the MVAs // Train MVAs using the set of training events From 82fefd961f9b1b3c234b2ad94c941582bb30ac16 Mon Sep 17 00:00:00 2001 From: simonge Date: Wed, 4 Oct 2023 10:45:02 +0100 Subject: [PATCH 03/53] added simple first benchmarks --- benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.C | 84 +++++++++++++++++++ benchmarks/LOWQ2/analysis/LOWQ2clusters.h | 19 +++++ benchmarks/LOWQ2/analysis/LOWQ2hits.h | 93 +++++++++++++++++++++ benchmarks/LOWQ2/analysis/functors.h | 34 ++++++++ 4 files changed, 230 insertions(+) create mode 100755 benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.C create mode 100644 benchmarks/LOWQ2/analysis/LOWQ2clusters.h create mode 100644 benchmarks/LOWQ2/analysis/LOWQ2hits.h create mode 100644 benchmarks/LOWQ2/analysis/functors.h diff --git a/benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.C b/benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.C new file mode 100755 index 00000000..2e2102fe --- /dev/null +++ b/benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.C @@ -0,0 +1,84 @@ +#include "TString.h" +#include "TCanvas.h" +#include "ROOT/RDataFrame.hxx" +#include +#include "TH1.h" +#include "TFile.h" +#include "LOWQ2hits.h" +#include "LOWQ2clusters.h" + +// Define alias +using RNode = ROOT::RDF::RNode; +using H1ResultPtr = ROOT::RDF::RResultPtr; +using H2ResultPtr = ROOT::RDF::RResultPtr; + +using RVecS = ROOT::VecOps::RVec; +using RVecI = ROOT::VecOps::RVec; + +// std::map hHists1D; +// std::map hHists2D; + +std::map,std::map>> histMap; + + +// Create dataframe from input file(s) +RNode initialise( string fileName = "/scratch/EIC/ReconOut/tempEvents10x100-compare.root" ){ + + ROOT::RDataFrame d0("events",fileName); + return d0; + +} + + +// Format and write plots +void writePlots( TString outName = "LOWQ2Benchmarks.root"){ + + TFile* rootfile = new TFile(outName,"RECREATE"); + auto benchmarkdir = rootfile->mkdir("LOWQ2"); + + for(auto &[bmkey,hists]: histMap){ + + auto histsdir = benchmarkdir->mkdir(bmkey)->cd(); + + for(auto &[key,hist]: hists.first){ + hist->Write(); + } + + for(auto &[key,hist]: hists.second){ + + // Make projection of 2D pixel binned histogram + auto nBins = hist->GetNcells(); + TString pixelFluxName = TString(hist->GetTitle())+"Flux"; + TH1D* pixelFlux = new TH1D(pixelFluxName,pixelFluxName,20,0,20); + for(int i=0; iFill(hist->GetBinContent(i)); + } + + hist->Write(); + pixelFlux->Write(); + + } + } + + rootfile->Close(); + +} + +// Main method +void LOWQ2Benchmarks(){ + + auto node = initialise(); + + RVecS colNames = node.GetColumnNames(); + + if(Any(colNames=="TaggerTrackerHits")){ + histMap["SimDistributions"] = createHitPlots(node); + } + + if(Any(colNames=="TaggerTrackerClusterPositions")){ + histMap["ClusterDistributions"] = createClusterPlots(node); + } + + writePlots(); + +} diff --git a/benchmarks/LOWQ2/analysis/LOWQ2clusters.h b/benchmarks/LOWQ2/analysis/LOWQ2clusters.h new file mode 100644 index 00000000..a7a90cdf --- /dev/null +++ b/benchmarks/LOWQ2/analysis/LOWQ2clusters.h @@ -0,0 +1,19 @@ +// Define alias +using RNode = ROOT::RDF::RNode; +using H1ResultPtr = ROOT::RDF::RResultPtr; +using H2ResultPtr = ROOT::RDF::RResultPtr; +using RVecI = ROOT::VecOps::RVec; + +// Lazy execution methods +std::pair,std::map> createClusterPlots( RNode d1 ){ + + std::map hHists1D; + std::map hHists2D; + + // auto d2 = d1.Define("ClusterSize",[](RVecI clu){return clu.size();},{"TaggerTrackerClusterPositions_0.index"}); + auto d2 = d1.Define("ClusterSize","TaggerTrackerClusterPositions.rawHits_end-TaggerTrackerClusterPositions.rawHits_begin"); + hHists1D["hClusterSize"] = d2.Histo1D({"hClusterSize","hClusterSize",100,0,100}, "ClusterSize"); + + return {hHists1D,hHists2D}; + +} diff --git a/benchmarks/LOWQ2/analysis/LOWQ2hits.h b/benchmarks/LOWQ2/analysis/LOWQ2hits.h new file mode 100644 index 00000000..cc3ebb69 --- /dev/null +++ b/benchmarks/LOWQ2/analysis/LOWQ2hits.h @@ -0,0 +1,93 @@ +#include "functors.h" + +// Define alias +using RNode = ROOT::RDF::RNode; +using H1ResultPtr = ROOT::RDF::RResultPtr; +using H2ResultPtr = ROOT::RDF::RResultPtr; +using RVecI = ROOT::VecOps::RVec; + +std::map xPixelMin = {{1,-1344},{2,-1344}}; +std::map xPixelMax = {{1, 1344},{2, 1344}}; + +std::map yPixelMin = {{1,-1818},{2,-1364}}; +std::map yPixelMax = {{1, 1818},{2, 1364}}; + +// Lazy execution methods +std::pair,std::map> createHitPlots( RNode d1 ){ + + std::map hHists1D; + std::map hHists2D; + + string compactName = "/home/simon/EIC/epic/epic_18x275.xml"; + dd4hep::Detector& detector = dd4hep::Detector::getInstance(); + detector.fromCompact(compactName); + //----------------------------------------- + // Hit detector IDs + //----------------------------------------- + auto ids = detector.readout("TaggerTrackerHits").idSpec().fields(); + for(auto &[key,id]: ids){ + TString colName = key+"ID"; + d1 = d1.Define(colName,getSubID(key,detector),{"TaggerTrackerHits"}); + } + + hHists1D["hmoduleID"] = d1.Histo1D({"hmoduleID", "hmoduleID", 3, 0, 3 }, "moduleID"); + + // Module Loop + for(int i=1; i<=2; i++){ + + double xMin = xPixelMin[i]; + double xMax = xPixelMax[i]; + double yMin = yPixelMin[i]; + double yMax = yPixelMax[i]; + int xRange = xMax-xMin; + int yRange = yMax-yMin; + + TString modNum = std::to_string(i); + TString moduleTag = "module"+modNum; + TString layerName = "layerID"+moduleTag; + TString layerHistName = "h"+layerName; + + + auto d2 = d1.Define("ModuleFilter",[i](RVecI modID){return modID==i;},{"moduleID"}) + .Define(layerName,"layerID[ModuleFilter]"); + + hHists1D[layerHistName] = d2.Histo1D({layerHistName, layerHistName, 4, 0, 4 }, layerName ); + + // Layer Loop + for(int j=0; j<=3; j++){ + + TString layerNum = std::to_string(j); + TString layerTag = "layer"+layerNum; + + TString xName = "xID"+moduleTag+layerTag; + TString xHistName = "h"+xName; + + TString yName = "yID"+moduleTag+layerTag; + TString yHistName = "h"+yName; + + TString xyHistName = "h"+xName+yName; + + std::vector layerSizeInput = {xName.Data()}; + TString layerSizeName = "HitsPerEvent"+moduleTag+layerTag; + TString sizeHistName = "h"+layerSizeName; + + auto d3 = d2.Define("LayerFilter",[j](RVecI layID){return layID==j;},{"layerID"}) + .Define(xName,"xID[LayerFilter&&ModuleFilter]") + .Define(yName,"yID[LayerFilter&&ModuleFilter]") + .Define(layerSizeName,[](RVecI lay){return lay.size();},layerSizeInput); + + + hHists1D[xHistName] = d3.Histo1D({xHistName, xHistName, xRange, xMin, xMax }, xName ); + hHists1D[yHistName] = d3.Histo1D({yHistName, yHistName, yRange, yMin, yMax }, yName ); + + hHists2D[xyHistName] = d3.Histo2D({xyHistName, xyHistName, xRange, xMin, xMax, yRange, yMin, yMax}, xName, yName ); + + hHists1D[sizeHistName] = d3.Histo1D({sizeHistName, sizeHistName, 100, 0, 100 }, layerSizeName ); + + } + + } + + return {hHists1D,hHists2D}; + +} diff --git a/benchmarks/LOWQ2/analysis/functors.h b/benchmarks/LOWQ2/analysis/functors.h new file mode 100644 index 00000000..c251f682 --- /dev/null +++ b/benchmarks/LOWQ2/analysis/functors.h @@ -0,0 +1,34 @@ +#include "DD4hep/Detector.h" +#include "DDRec/CellIDPositionConverter.h" +#include "edm4hep/MCParticleCollection.h" +#include "edm4hep/SimTrackerHitCollection.h" +#include "edm4hep/SimCalorimeterHitCollection.h" + +//----------------------------------------------------------------------------------------- +// Grab Component functor +//----------------------------------------------------------------------------------------- +struct getSubID{ + getSubID(std::string cname, dd4hep::Detector& det, std::string rname = "TaggerTrackerHits") : componentName(cname), detector(det), readoutName(rname){} + + ROOT::VecOps::RVec operator()(const std::vector& evt) { + auto decoder = detector.readout(readoutName).idSpec().decoder(); + auto indexID = decoder->index(componentName); + ROOT::VecOps::RVec result; + for(const auto& h: evt) { + result.push_back(decoder->get(h.cellID,indexID)); + } + return result; + }; + + void SetComponent(std::string cname){ + componentName = cname; + } + void SetReadout(std::string rname){ + readoutName = rname; + } + + private: + std::string componentName; + dd4hep::Detector& detector; + std::string readoutName; +}; From a61cfaec8ccbcc9315b535389491d5039514d13a Mon Sep 17 00:00:00 2001 From: simonge Date: Tue, 9 Jan 2024 17:27:48 +0000 Subject: [PATCH 04/53] benchmark change --- benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.C | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.C b/benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.C index 2e2102fe..adb8ee91 100755 --- a/benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.C +++ b/benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.C @@ -22,7 +22,7 @@ std::map,std::mapmkdir("LOWQ2"); @@ -65,9 +65,10 @@ void writePlots( TString outName = "LOWQ2Benchmarks.root"){ } // Main method -void LOWQ2Benchmarks(){ +void LOWQ2Benchmarks( string inName = "/scratch/EIC/ReconOut/recon_qr_10x100_ab0_small.edm4hep.root", + TString outName = "LOWQ2Benchmarks.root" ){ - auto node = initialise(); + auto node = initialise( inName ); RVecS colNames = node.GetColumnNames(); @@ -79,6 +80,10 @@ void LOWQ2Benchmarks(){ histMap["ClusterDistributions"] = createClusterPlots(node); } - writePlots(); + if(Any(colNames=="LowQ2TrackParameters") && Any(colNames=="MCParticles")){ + histMap["ReconstructedDistributions"] = createReconstructionPlots(node); + } + + writePlots( outName ); } From 1a259d531e8b45d3b95d17de6b9d6a116e9783a6 Mon Sep 17 00:00:00 2001 From: simonge Date: Thu, 25 Jan 2024 15:32:54 +0000 Subject: [PATCH 05/53] Extended hit benchmarks --- benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.C | 11 ++-- benchmarks/LOWQ2/analysis/LOWQ2hits.h | 59 ++++++++++++++++----- benchmarks/LOWQ2/analysis/functors.h | 4 +- 3 files changed, 57 insertions(+), 17 deletions(-) diff --git a/benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.C b/benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.C index adb8ee91..e5ca17bd 100755 --- a/benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.C +++ b/benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.C @@ -5,6 +5,7 @@ #include "TH1.h" #include "TFile.h" #include "LOWQ2hits.h" +#include "LOWQ2acceptance.h" #include "LOWQ2clusters.h" // Define alias @@ -76,13 +77,17 @@ void LOWQ2Benchmarks( string inName = "/scratch/EIC/ReconOut/recon_qr_10x100_ab0 histMap["SimDistributions"] = createHitPlots(node); } + if(Any(colNames=="TaggerTrackerHits") && Any(colNames=="MCParticles")){ + histMap["AcceptanceDistributions"] = createAcceptancePlots(node); + } + if(Any(colNames=="TaggerTrackerClusterPositions")){ histMap["ClusterDistributions"] = createClusterPlots(node); } - if(Any(colNames=="LowQ2TrackParameters") && Any(colNames=="MCParticles")){ - histMap["ReconstructedDistributions"] = createReconstructionPlots(node); - } + // if(Any(colNames=="LowQ2TrackParameters") && Any(colNames=="MCParticles")){ + // histMap["ReconstructedDistributions"] = createReconstructionPlots(node); + // } writePlots( outName ); diff --git a/benchmarks/LOWQ2/analysis/LOWQ2hits.h b/benchmarks/LOWQ2/analysis/LOWQ2hits.h index cc3ebb69..bfc9101f 100644 --- a/benchmarks/LOWQ2/analysis/LOWQ2hits.h +++ b/benchmarks/LOWQ2/analysis/LOWQ2hits.h @@ -1,4 +1,7 @@ +#pragma once + #include "functors.h" +#include "DD4hep/Detector.h" // Add the missing include // Define alias using RNode = ROOT::RDF::RNode; @@ -6,19 +9,22 @@ using H1ResultPtr = ROOT::RDF::RResultPtr; using H2ResultPtr = ROOT::RDF::RResultPtr; using RVecI = ROOT::VecOps::RVec; -std::map xPixelMin = {{1,-1344},{2,-1344}}; -std::map xPixelMax = {{1, 1344},{2, 1344}}; +// Lazy execution methods +std::pair,std::map> createHitPlots( RNode d1, string compactName = "/home/simong/EIC/epic/epic_18x275.xml" ){ + + int xChip = 448; + int yChip = 512; + std::map xPixelMin = {{1,-xChip*3},{2,-xChip*3}}; + std::map xPixelMax = {{1, xChip*3},{2, xChip*3}}; -std::map yPixelMin = {{1,-1818},{2,-1364}}; -std::map yPixelMax = {{1, 1818},{2, 1364}}; + std::map yPixelMin = {{1,-yChip*3},{2,-yChip*3}}; + std::map yPixelMax = {{1, yChip*3},{2, yChip*3}}; -// Lazy execution methods -std::pair,std::map> createHitPlots( RNode d1 ){ std::map hHists1D; std::map hHists2D; - - string compactName = "/home/simon/EIC/epic/epic_18x275.xml"; + + dd4hep::Detector& detector = dd4hep::Detector::getInstance(); detector.fromCompact(compactName); //----------------------------------------- @@ -30,6 +36,11 @@ std::pair,std::map> createHit d1 = d1.Define(colName,getSubID(key,detector),{"TaggerTrackerHits"}); } + d1 = d1.Define("boardID",[xChip](RVecI xID){ return (xID + 3*xChip) / (2 * xChip); },{"xID"}) + .Define("xChipID", [xChip](RVecI xID){ return (xID + 3*xChip) / (xChip); },{"xID"}) + .Define("xColumnID", [xChip](RVecI xID){ return (xID + 3*xChip) / 2; },{"xID"}) + .Define("yChipID", [yChip](RVecI yID){ return (yID + 3*yChip) / (yChip); },{"yID"}); + hHists1D["hmoduleID"] = d1.Histo1D({"hmoduleID", "hmoduleID", 3, 0, 3 }, "moduleID"); // Module Loop @@ -65,16 +76,28 @@ std::pair,std::map> createHit TString yName = "yID"+moduleTag+layerTag; TString yHistName = "h"+yName; + TString xChipName = "xChipID"+moduleTag+layerTag; + TString yChipName = "yChipID"+moduleTag+layerTag; + TString xyHistName = "h"+xName+yName; + TString xColumnName = "xColumnID"+moduleTag+layerTag; + + TString boardName = "boardID"+moduleTag+layerTag; + std::vector layerSizeInput = {xName.Data()}; TString layerSizeName = "HitsPerEvent"+moduleTag+layerTag; TString sizeHistName = "h"+layerSizeName; auto d3 = d2.Define("LayerFilter",[j](RVecI layID){return layID==j;},{"layerID"}) - .Define(xName,"xID[LayerFilter&&ModuleFilter]") - .Define(yName,"yID[LayerFilter&&ModuleFilter]") - .Define(layerSizeName,[](RVecI lay){return lay.size();},layerSizeInput); + .Define(xName,"xID[LayerFilter&&ModuleFilter]") + .Define(yName,"yID[LayerFilter&&ModuleFilter]") + .Define(boardName,"boardID[LayerFilter&&ModuleFilter]") + .Define(xChipName,"xChipID[LayerFilter&&ModuleFilter]") + .Define(yChipName,"yChipID[LayerFilter&&ModuleFilter]") + .Define(xColumnName,"xColumnID[LayerFilter&&ModuleFilter]") + .Define(layerSizeName,[](RVecI lay){return lay.size();},layerSizeInput); + hHists1D[xHistName] = d3.Histo1D({xHistName, xHistName, xRange, xMin, xMax }, xName ); @@ -83,9 +106,21 @@ std::pair,std::map> createHit hHists2D[xyHistName] = d3.Histo2D({xyHistName, xyHistName, xRange, xMin, xMax, yRange, yMin, yMax}, xName, yName ); hHists1D[sizeHistName] = d3.Histo1D({sizeHistName, sizeHistName, 100, 0, 100 }, layerSizeName ); + + // Plot number of hits per boardID for each layer + TString boardIDHistName = "hBoardID" +moduleTag + layerTag; + hHists1D[boardIDHistName] = d3.Histo1D({boardIDHistName, boardIDHistName, 3, 0, 3}, boardName); + + // Plot number of hits per chipID for each layer + TString chipIDHistName = "hChipID" +moduleTag + layerTag; + hHists2D[chipIDHistName] = d3.Histo2D({chipIDHistName, chipIDHistName, 6, 0, 6, 6, 0, 6}, xChipName, yChipName); + + // Plot number of hits per chipID for each layer + TString xColumnIDHistName = "hxColumnID" +moduleTag + layerTag; + hHists2D[xColumnIDHistName] = d3.Histo2D({xColumnIDHistName, xColumnIDHistName, 3*xChip, 0, 3.0*xChip, 6, 0, 6}, xColumnName, yChipName); - } + } } return {hHists1D,hHists2D}; diff --git a/benchmarks/LOWQ2/analysis/functors.h b/benchmarks/LOWQ2/analysis/functors.h index c251f682..59964f76 100644 --- a/benchmarks/LOWQ2/analysis/functors.h +++ b/benchmarks/LOWQ2/analysis/functors.h @@ -1,8 +1,8 @@ #include "DD4hep/Detector.h" #include "DDRec/CellIDPositionConverter.h" #include "edm4hep/MCParticleCollection.h" -#include "edm4hep/SimTrackerHitCollection.h" -#include "edm4hep/SimCalorimeterHitCollection.h" +#include "edm4hep/SimTrackerHit.h" +#include "edm4hep/SimCalorimeterHit.h" //----------------------------------------------------------------------------------------- // Grab Component functor From 83eed26536b6403f83b48683fbc84617cf3eac34 Mon Sep 17 00:00:00 2001 From: simonge Date: Thu, 25 Jan 2024 23:08:25 +0000 Subject: [PATCH 06/53] Added deeper directory structure for benchmarks --- benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.C | 100 +++++++++++++++++--- 1 file changed, 86 insertions(+), 14 deletions(-) diff --git a/benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.C b/benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.C index e5ca17bd..c4b55232 100755 --- a/benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.C +++ b/benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.C @@ -6,6 +6,7 @@ #include "TFile.h" #include "LOWQ2hits.h" #include "LOWQ2acceptance.h" +// #include "LOWQ2rates.h" #include "LOWQ2clusters.h" // Define alias @@ -30,7 +31,6 @@ RNode initialise( string fileName ){ } - // Format and write plots void writePlots( TString outName ){ @@ -42,21 +42,68 @@ void writePlots( TString outName ){ auto histsdir = benchmarkdir->mkdir(bmkey)->cd(); for(auto &[key,hist]: hists.first){ - hist->Write(); + TDirectory* currentDir = gDirectory; + std::stringstream ss(key.Data()); + std::string part; + TDirectory* dir = currentDir; + std::vector parts; + while (std::getline(ss, part, '/')) { + parts.push_back(part); + } + for (size_t i = 0; i < parts.size(); ++i) { + if (i == parts.size() - 1) { + // This is the last part, write the histogram + hist->Write(parts[i].c_str()); + } else { + // This is not the last part, create or get the directory + if (!dir->GetDirectory(parts[i].c_str())) { + dir = dir->mkdir(parts[i].c_str()); + } else { + dir = dir->GetDirectory(parts[i].c_str()); + } + dir->cd(); + } + } + currentDir->cd(); + // hist->Write(key); } for(auto &[key,hist]: hists.second){ - // Make projection of 2D pixel binned histogram - auto nBins = hist->GetNcells(); - TString pixelFluxName = TString(hist->GetTitle())+"Flux"; - TH1D* pixelFlux = new TH1D(pixelFluxName,pixelFluxName,20,0,20); - for(int i=0; iFill(hist->GetBinContent(i)); + TDirectory* currentDir = gDirectory; + std::stringstream ss(key.Data()); + std::string part; + TDirectory* dir = currentDir; + std::vector parts; + while (std::getline(ss, part, '/')) { + parts.push_back(part); } - - hist->Write(); - pixelFlux->Write(); + for (size_t i = 0; i < parts.size(); ++i) { + if (i == parts.size() - 1) { + // This is the last part, write the histogram + // hist->Write(parts[i].c_str()); + + // Make projection of 2D pixel binned histogram + auto nBins = hist->GetNcells(); + TString pixelFluxName = TString(parts[i].c_str())+"Flux"; + TH1D* pixelFlux = new TH1D(pixelFluxName,pixelFluxName,20,0,20); + for(int i=0; iFill(hist->GetBinContent(i)); + } + + hist->Write(); + pixelFlux->Write(); + } else { + // This is not the last part, create or get the directory + if (!dir->GetDirectory(parts[i].c_str())) { + dir = dir->mkdir(parts[i].c_str()); + } else { + dir = dir->GetDirectory(parts[i].c_str()); + } + dir->cd(); + } + } + currentDir->cd(); } } @@ -73,11 +120,29 @@ void LOWQ2Benchmarks( string inName = "/scratch/EIC/ReconOut/recon_qr_10x100_ab0 RVecS colNames = node.GetColumnNames(); - if(Any(colNames=="TaggerTrackerHits")){ - histMap["SimDistributions"] = createHitPlots(node); + std::string readoutName = "TaggerTrackerHits"; + + if(Any(colNames==readoutName)){ + std::string compactName = "/home/simong/EIC/epic/epic_18x275.xml"; + dd4hep::Detector& detector = dd4hep::Detector::getInstance(); + detector.fromCompact(compactName); + //----------------------------------------- + // Hit detector IDs + //----------------------------------------- + auto ids = detector.readout(readoutName).idSpec().fields(); + for(auto &[key,id]: ids){ + TString colName = key+"ID"; + node = node.Define(colName,getSubID(key,detector),{readoutName}); + } + } + + //Create Plots + if(Any(colNames==readoutName)){ + histMap["SimDistributions"] = createHitPlots(node); + } - if(Any(colNames=="TaggerTrackerHits") && Any(colNames=="MCParticles")){ + if(Any(colNames==readoutName) && Any(colNames=="MCParticles")){ histMap["AcceptanceDistributions"] = createAcceptancePlots(node); } @@ -89,6 +154,13 @@ void LOWQ2Benchmarks( string inName = "/scratch/EIC/ReconOut/recon_qr_10x100_ab0 // histMap["ReconstructedDistributions"] = createReconstructionPlots(node); // } + //Postprocess plots + + // check histMap["SimDistributions"] exists + // if (histMap.count("SimDistributions") > 0) { + // histMap["RateDistributions"] = createRatePlots(histMap["SimDistributions"], 0.0001); + // } + writePlots( outName ); } From 5c13f13c1e7600726dc89ec1ab64df237ac5996a Mon Sep 17 00:00:00 2001 From: simonge Date: Thu, 25 Jan 2024 23:09:36 +0000 Subject: [PATCH 07/53] Added directory structure and moved geometry id definitions to a separate file --- benchmarks/LOWQ2/analysis/LOWQ2hits.h | 41 ++++++++++----------------- 1 file changed, 15 insertions(+), 26 deletions(-) diff --git a/benchmarks/LOWQ2/analysis/LOWQ2hits.h b/benchmarks/LOWQ2/analysis/LOWQ2hits.h index bfc9101f..b1b27482 100644 --- a/benchmarks/LOWQ2/analysis/LOWQ2hits.h +++ b/benchmarks/LOWQ2/analysis/LOWQ2hits.h @@ -1,7 +1,7 @@ #pragma once #include "functors.h" -#include "DD4hep/Detector.h" // Add the missing include +#include "DD4hep/Detector.h" // Define alias using RNode = ROOT::RDF::RNode; @@ -10,7 +10,7 @@ using H2ResultPtr = ROOT::RDF::RResultPtr; using RVecI = ROOT::VecOps::RVec; // Lazy execution methods -std::pair,std::map> createHitPlots( RNode d1, string compactName = "/home/simong/EIC/epic/epic_18x275.xml" ){ +std::pair,std::map> createHitPlots( RNode d1){ int xChip = 448; int yChip = 512; @@ -25,23 +25,12 @@ std::pair,std::map> createHit std::map hHists2D; - dd4hep::Detector& detector = dd4hep::Detector::getInstance(); - detector.fromCompact(compactName); - //----------------------------------------- - // Hit detector IDs - //----------------------------------------- - auto ids = detector.readout("TaggerTrackerHits").idSpec().fields(); - for(auto &[key,id]: ids){ - TString colName = key+"ID"; - d1 = d1.Define(colName,getSubID(key,detector),{"TaggerTrackerHits"}); - } - - d1 = d1.Define("boardID",[xChip](RVecI xID){ return (xID + 3*xChip) / (2 * xChip); },{"xID"}) - .Define("xChipID", [xChip](RVecI xID){ return (xID + 3*xChip) / (xChip); },{"xID"}) - .Define("xColumnID", [xChip](RVecI xID){ return (xID + 3*xChip) / 2; },{"xID"}) - .Define("yChipID", [yChip](RVecI yID){ return (yID + 3*yChip) / (yChip); },{"yID"}); + d1 = d1.Define("boardID", [xChip](RVecI xID){ return (xID + 3*xChip) / (2 * xChip); },{"xID"}) + .Define("xChipID", [xChip](RVecI xID){ return (xID + 3*xChip) / (xChip); },{"xID"}) + .Define("xColumnID", [xChip](RVecI xID){ return (xID + 3*xChip) / 2; },{"xID"}) + .Define("yChipID", [yChip](RVecI yID){ return (yID + 2.75*yChip) / (yChip); },{"yID"}); - hHists1D["hmoduleID"] = d1.Histo1D({"hmoduleID", "hmoduleID", 3, 0, 3 }, "moduleID"); + hHists1D["hmoduleID"] = d1.Histo1D({"hmoduleID", "hmoduleID;Module ID", 2, 1, 3 }, "moduleID"); // Module Loop for(int i=1; i<=2; i++){ @@ -62,7 +51,7 @@ std::pair,std::map> createHit auto d2 = d1.Define("ModuleFilter",[i](RVecI modID){return modID==i;},{"moduleID"}) .Define(layerName,"layerID[ModuleFilter]"); - hHists1D[layerHistName] = d2.Histo1D({layerHistName, layerHistName, 4, 0, 4 }, layerName ); + hHists1D[moduleTag+"/"+layerHistName] = d2.Histo1D({layerHistName, layerHistName+";Layer ID", 4, 0, 4 }, layerName ); // Layer Loop for(int j=0; j<=3; j++){ @@ -100,24 +89,24 @@ std::pair,std::map> createHit - hHists1D[xHistName] = d3.Histo1D({xHistName, xHistName, xRange, xMin, xMax }, xName ); - hHists1D[yHistName] = d3.Histo1D({yHistName, yHistName, yRange, yMin, yMax }, yName ); + hHists1D[moduleTag+"/"+layerTag+"/"+xHistName] = d3.Histo1D({xHistName, xHistName+";x pixel column", xRange, xMin, xMax }, xName ); + hHists1D[moduleTag+"/"+layerTag+"/"+yHistName] = d3.Histo1D({yHistName, yHistName+";y pixel column", yRange, yMin, yMax }, yName ); - hHists2D[xyHistName] = d3.Histo2D({xyHistName, xyHistName, xRange, xMin, xMax, yRange, yMin, yMax}, xName, yName ); + hHists2D[moduleTag+"/"+layerTag+"/"+xyHistName] = d3.Histo2D({xyHistName, xyHistName+";x pixel;y pixel", xRange, xMin, xMax, yRange, yMin, yMax}, xName, yName ); - hHists1D[sizeHistName] = d3.Histo1D({sizeHistName, sizeHistName, 100, 0, 100 }, layerSizeName ); + hHists1D[moduleTag+"/"+layerTag+"/"+sizeHistName] = d3.Histo1D({sizeHistName, sizeHistName+";hits per event", 100, 0, 100 }, layerSizeName ); // Plot number of hits per boardID for each layer TString boardIDHistName = "hBoardID" +moduleTag + layerTag; - hHists1D[boardIDHistName] = d3.Histo1D({boardIDHistName, boardIDHistName, 3, 0, 3}, boardName); + hHists1D[moduleTag+"/"+layerTag+"/"+boardIDHistName] = d3.Histo1D({boardIDHistName, boardIDHistName+";board ID", 3, 0, 3}, boardName); // Plot number of hits per chipID for each layer TString chipIDHistName = "hChipID" +moduleTag + layerTag; - hHists2D[chipIDHistName] = d3.Histo2D({chipIDHistName, chipIDHistName, 6, 0, 6, 6, 0, 6}, xChipName, yChipName); + hHists2D[moduleTag+"/"+layerTag+"/"+chipIDHistName] = d3.Histo2D({chipIDHistName, chipIDHistName+";x Chip;y Chip", 6, 0, 6, 6, 0, 6}, xChipName, yChipName); // Plot number of hits per chipID for each layer TString xColumnIDHistName = "hxColumnID" +moduleTag + layerTag; - hHists2D[xColumnIDHistName] = d3.Histo2D({xColumnIDHistName, xColumnIDHistName, 3*xChip, 0, 3.0*xChip, 6, 0, 6}, xColumnName, yChipName); + hHists2D[moduleTag+"/"+layerTag+"/"+xColumnIDHistName] = d3.Histo2D({xColumnIDHistName, xColumnIDHistName+";x Column;y Chip", 3*xChip, 0, 3.0*xChip, 6, 0, 6}, xColumnName, yChipName); } From ed0a7a5cdcfad5a8246cec4ea505cbde8a1e9714 Mon Sep 17 00:00:00 2001 From: simonge Date: Thu, 25 Jan 2024 23:10:46 +0000 Subject: [PATCH 08/53] Added acceptance plots --- benchmarks/LOWQ2/analysis/LOWQ2acceptance.h | 123 ++++++++++++++++++++ 1 file changed, 123 insertions(+) create mode 100644 benchmarks/LOWQ2/analysis/LOWQ2acceptance.h diff --git a/benchmarks/LOWQ2/analysis/LOWQ2acceptance.h b/benchmarks/LOWQ2/analysis/LOWQ2acceptance.h new file mode 100644 index 00000000..1944c098 --- /dev/null +++ b/benchmarks/LOWQ2/analysis/LOWQ2acceptance.h @@ -0,0 +1,123 @@ + #pragma once + +// #include "functors.h" + + #include + #include + #include + +// Define alias +using RNode = ROOT::RDF::RNode; +using H1ResultPtr = ROOT::RDF::RResultPtr; +using H2ResultPtr = ROOT::RDF::RResultPtr; +using RVecI = ROOT::VecOps::RVec; +using RVecD = ROOT::VecOps::RVec; + +// Lazy execution methods +std::pair,std::map> createAcceptancePlots( RNode d1 ){ + + std::map hHists1D; + std::map hHists2D; + + int ePrimID = 2; + int pBeamID = 1; + int eBeamID = 0; + + // d1 = d1.Define("primMom","MCParticles[2].momentum.z"); + d1 = d1.Define("eBeam", [eBeamID] + (std::vector h) + {return ROOT::Math::PxPyPzMVector (h[eBeamID].momentum.x,h[eBeamID].momentum.y,h[eBeamID].momentum.z,h[eBeamID].mass);}, + {"MCParticles"}) + .Define("pBeam", [pBeamID] + (std::vector h) + {return ROOT::Math::PxPyPzMVector (h[pBeamID].momentum.x,h[pBeamID].momentum.y,h[pBeamID].momentum.z,h[pBeamID].mass);}, + {"MCParticles"}) + .Define("primMom", [ePrimID] + (std::vector h) + {return ROOT::Math::PxPyPzMVector (h[ePrimID].momentum.x,h[ePrimID].momentum.y,h[ePrimID].momentum.z,h[ePrimID].mass);}, + {"MCParticles"}) + .Define("primE","primMom.E()") + .Define("primTheta","1000*(TMath::Pi()-primMom.Theta())") + .Define("primEta","primMom.Eta()") + .Define("scatteredV","eBeam-primMom") + .Define("Q2","-scatteredV.M2()") + .Define("logQ2","log10(Q2)") + .Define("x","Q2/(2*scatteredV.Dot(pBeam))") + .Define("logx","log10(x)") + .Define("primPhi","primMom.Phi()") + .Define("W2","(pBeam+eBeam-primMom).M2()"); + // .Define("primTheta",[](std::vector part){return part[4].getMomentum().getTheta()},"{MCParticles}"); + + std::map filters; + filters["Raw"] = "true"; + filters["Any"] = "TaggerTrackerHits.size() > 0"; + filters["Mod1"] = "moduleID[moduleID==1].size()>0"; + filters["Mod2"] = "moduleID[moduleID==2].size()>0"; + filters["ThetaCut"] = "primTheta<11"; + + //Store the counts for each filter + RVecI filterCounts; + int rawCount = 0; + + for (auto& filter : filters) + { + RNode filterNode = d1.Filter(filter.second); + TString rawString = filter.first; + TString energyHistName = "h" + rawString + "Energy"; + TString thetaHistName = "h" + rawString + "Theta"; + TString etaHistName = "h" + rawString + "Eta"; + TString logQ2HistName = "h" + rawString + "logQ2"; + TString logxHistName = "h" + rawString + "logx"; + TString Q2HistName = "h" + rawString + "Q2"; + TString xHistName = "h" + rawString + "x"; + TString W2HistName = "h" + rawString + "W2"; + TString Q2xHistName = "h" + rawString + "Q2x"; + TString logQ2logxHistName = "h" + rawString + "logQ2logx"; + TString WlogQ2HistName = "h" + rawString + "WlogQ2"; + TString WlogxHistName = "h" + rawString + "Wlogx"; + TString primElogQ2HistName = "h" + rawString + "primElogQ2"; + + hHists1D[rawString+"/"+energyHistName] = filterNode.Histo1D({energyHistName, energyHistName + ";Energy [GeV]", 100, 0, 18}, "primE"); + hHists1D[rawString+"/"+thetaHistName] = filterNode.Histo1D({thetaHistName, thetaHistName + ";Theta [mrad]", 100, 0, 18}, "primTheta"); + hHists1D[rawString+"/"+etaHistName] = filterNode.Histo1D({etaHistName, etaHistName + ";Eta", 100, -15, 5}, "primEta"); + hHists1D[rawString+"/"+logQ2HistName] = filterNode.Histo1D({logQ2HistName, logQ2HistName + ";log(Q2) [GeV]", 100, -10, 4}, "logQ2"); + hHists1D[rawString+"/"+logxHistName] = filterNode.Histo1D({logxHistName, logxHistName + ";log(x) [GeV]", 100, -12, 0}, "logx"); + hHists1D[rawString+"/"+Q2HistName] = filterNode.Histo1D({Q2HistName, Q2HistName + ";Q2 [GeV]", 100, 0, 500}, "Q2"); + hHists1D[rawString+"/"+xHistName] = filterNode.Histo1D({xHistName, xHistName + ";x [GeV]", 100, 0, 1}, "x"); + hHists1D[rawString+"/"+W2HistName] = filterNode.Histo1D({W2HistName, W2HistName + ";W2 [GeV]", 100, 0, 500}, "W2"); + + hHists2D[rawString+"/"+Q2xHistName] = filterNode.Histo2D({Q2xHistName, Q2xHistName + ";Q2 [GeV];x [GeV]", 100, 0, 500, 100, 0, 1}, "Q2", "x"); + hHists2D[rawString+"/"+logQ2logxHistName] = filterNode.Histo2D({logQ2logxHistName, logQ2logxHistName + ";log(Q2) [GeV];log(x) [GeV]", 100, -10, 4, 100, -12, 0}, "logQ2", "logx"); + hHists2D[rawString+"/"+WlogQ2HistName] = filterNode.Histo2D({WlogQ2HistName, WlogQ2HistName + ";W2 [GeV];log(Q2) [GeV]", 100, 0, 500, 100, -10, 4}, "W2", "logQ2"); + hHists2D[rawString+"/"+WlogxHistName] = filterNode.Histo2D({WlogxHistName, WlogxHistName + ";W2 [GeV];log(x) [GeV]", 100, 0, 500, 100, -12, 0}, "W2", "logx"); + hHists2D[rawString+"/"+primElogQ2HistName] = filterNode.Histo2D({primElogQ2HistName, primElogQ2HistName + ";E [GeV];log(Q2) [GeV]", 100, 0, 18, 100, -10, 4}, "primE", "logQ2"); + + + int counts = *filterNode.Count(); + filterCounts.push_back(counts); + if (filter.first == "Raw") { + rawCount = counts; + } + } + + // Create a dataframe with one entry for each filter + int Nfilters = filterCounts.size(); + ROOT::RDataFrame df(Nfilters); + + // Create a vector of counts and fractions + RVecD fractions = filterCounts/rawCount; + + // Define the columns "count" and "fraction" in the dataframe + auto dfWithCounts = df.Define("count", [&filterCounts](ULong64_t entry) { return filterCounts[entry]; }, {"rdfentry_"}) + .Define("fraction", [&fractions](ULong64_t entry) { return fractions[entry]; }, {"rdfentry_"}); + + // Create the histograms + auto hTotalCounts = dfWithCounts.Histo1D({"hTotalCounts", "Total Counts;Filter;Count", Nfilters, 0, static_cast(Nfilters)}, "rdfentry_","count"); + auto hFractions = dfWithCounts.Histo1D({"hFractions", "Fractions;Filter;Fraction", Nfilters, 0, static_cast(Nfilters)}, "rdfentry_","fraction"); + + // Add the histograms to the map + hHists1D["TotalCounts"] = hTotalCounts; + hHists1D["Fractions"] = hFractions; + + return {hHists1D,hHists2D}; +} \ No newline at end of file From 132dfbcb96b0762b240d1ca01d56e14ddc139347 Mon Sep 17 00:00:00 2001 From: simonge Date: Fri, 26 Jan 2024 00:14:23 +0000 Subject: [PATCH 09/53] Fixed memory and sumw2 issues --- benchmarks/LOWQ2/analysis/LOWQ2acceptance.h | 38 ++++++++++----------- 1 file changed, 19 insertions(+), 19 deletions(-) diff --git a/benchmarks/LOWQ2/analysis/LOWQ2acceptance.h b/benchmarks/LOWQ2/analysis/LOWQ2acceptance.h index 1944c098..0553d916 100644 --- a/benchmarks/LOWQ2/analysis/LOWQ2acceptance.h +++ b/benchmarks/LOWQ2/analysis/LOWQ2acceptance.h @@ -2,9 +2,10 @@ // #include "functors.h" - #include + #include "ROOT/RDF/RInterface.hxx" #include #include + #include "ROOT/RVec.hxx" // Define alias using RNode = ROOT::RDF::RNode; @@ -49,7 +50,7 @@ std::pair,std::map> createAcc // .Define("primTheta",[](std::vector part){return part[4].getMomentum().getTheta()},"{MCParticles}"); std::map filters; - filters["Raw"] = "true"; + filters["All"] = "true"; filters["Any"] = "TaggerTrackerHits.size() > 0"; filters["Mod1"] = "moduleID[moduleID==1].size()>0"; filters["Mod2"] = "moduleID[moduleID==2].size()>0"; @@ -95,29 +96,28 @@ std::pair,std::map> createAcc int counts = *filterNode.Count(); filterCounts.push_back(counts); - if (filter.first == "Raw") { + if (filter.first == "All") { rawCount = counts; } } - // Create a dataframe with one entry for each filter + // Number of filters int Nfilters = filterCounts.size(); - ROOT::RDataFrame df(Nfilters); - - // Create a vector of counts and fractions - RVecD fractions = filterCounts/rawCount; - - // Define the columns "count" and "fraction" in the dataframe - auto dfWithCounts = df.Define("count", [&filterCounts](ULong64_t entry) { return filterCounts[entry]; }, {"rdfentry_"}) - .Define("fraction", [&fractions](ULong64_t entry) { return fractions[entry]; }, {"rdfentry_"}); + RVecI entryInt = ROOT::VecOps::Range(Nfilters); - // Create the histograms - auto hTotalCounts = dfWithCounts.Histo1D({"hTotalCounts", "Total Counts;Filter;Count", Nfilters, 0, static_cast(Nfilters)}, "rdfentry_","count"); - auto hFractions = dfWithCounts.Histo1D({"hFractions", "Fractions;Filter;Fraction", Nfilters, 0, static_cast(Nfilters)}, "rdfentry_","fraction"); - - // Add the histograms to the map - hHists1D["TotalCounts"] = hTotalCounts; - hHists1D["Fractions"] = hFractions; + // Fraction of events passing each filter + RVecD fractions = filterCounts/static_cast(rawCount); + + ROOT::RDataFrame df(Nfilters); + auto dfWithCounts = df.Define("entry", [entryInt](ULong64_t entry) { return (double)entryInt[(int)entry]; }, {"rdfentry_"}) + .Define("count", [filterCounts](ULong64_t entry) { return (double)filterCounts[(int)entry]; }, {"rdfentry_"}) + .Define("fraction", [fractions](ULong64_t entry) { return (double)fractions[(int)entry]; }, {"rdfentry_"}); + + // Use the new column in the histograms + hHists1D["TotalCounts"] = dfWithCounts.Histo1D({"hTotalCounts", "Total Counts;Filter;Count", Nfilters, 0, static_cast(Nfilters)}, "entry","count"); + hHists1D["Fractions"] = dfWithCounts.Histo1D({"hFractions", "Fractions;Filter;Fraction", Nfilters, 0, static_cast(Nfilters)}, "entry","fraction"); + hHists1D["TotalCounts"]->Sumw2(false); + hHists1D["Fractions"]->Sumw2(false); return {hHists1D,hHists2D}; } \ No newline at end of file From 72496d2971e4090999227d26d304ef10e608e18c Mon Sep 17 00:00:00 2001 From: simonge Date: Fri, 26 Jan 2024 17:58:26 +0000 Subject: [PATCH 10/53] Added reconstruction and fixed some bugs --- .../{LOWQ2Benchmarks.C => LOWQ2Benchmarks.h} | 43 ++++----- benchmarks/LOWQ2/analysis/LOWQ2acceptance.h | 30 ++++-- benchmarks/LOWQ2/analysis/LOWQ2hits.h | 57 ++++++++--- .../LOWQ2/analysis/LOWQ2reconstruction.h | 96 +++++++++++++++++++ benchmarks/LOWQ2/analysis/RunLOWQ2.C | 26 +++++ 5 files changed, 203 insertions(+), 49 deletions(-) rename benchmarks/LOWQ2/analysis/{LOWQ2Benchmarks.C => LOWQ2Benchmarks.h} (78%) create mode 100644 benchmarks/LOWQ2/analysis/LOWQ2reconstruction.h create mode 100644 benchmarks/LOWQ2/analysis/RunLOWQ2.C diff --git a/benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.C b/benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.h similarity index 78% rename from benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.C rename to benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.h index c4b55232..d8ca11b1 100755 --- a/benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.C +++ b/benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.h @@ -6,8 +6,8 @@ #include "TFile.h" #include "LOWQ2hits.h" #include "LOWQ2acceptance.h" -// #include "LOWQ2rates.h" #include "LOWQ2clusters.h" +#include "LOWQ2reconstruction.h" // Define alias using RNode = ROOT::RDF::RNode; @@ -17,12 +17,11 @@ using H2ResultPtr = ROOT::RDF::RResultPtr; using RVecS = ROOT::VecOps::RVec; using RVecI = ROOT::VecOps::RVec; -// std::map hHists1D; -// std::map hHists2D; - std::map,std::map>> histMap; + + // Create dataframe from input file(s) RNode initialise( string fileName ){ @@ -86,10 +85,15 @@ void writePlots( TString outName ){ // Make projection of 2D pixel binned histogram auto nBins = hist->GetNcells(); TString pixelFluxName = TString(parts[i].c_str())+"Flux"; - TH1D* pixelFlux = new TH1D(pixelFluxName,pixelFluxName,20,0,20); + //Get maximum bin content to set range + double maxBinContent = hist->GetMaximum(); + double logMax = log10(maxBinContent); + + TH1D* pixelFlux = new TH1D(pixelFluxName,pixelFluxName,100,0,logMax); for(int i=0; iFill(hist->GetBinContent(i)); + pixelFlux->Fill(log10(hist->GetBinContent(i))); } + pixelFlux->GetXaxis()->SetTitle("log(N_{hits})"); hist->Write(); pixelFlux->Write(); @@ -112,9 +116,8 @@ void writePlots( TString outName ){ } -// Main method -void LOWQ2Benchmarks( string inName = "/scratch/EIC/ReconOut/recon_qr_10x100_ab0_small.edm4hep.root", - TString outName = "LOWQ2Benchmarks.root" ){ +void LOWQ2Benchmarks( string inName = "/scratch/EIC/G4out/qr_18x275_new.edm4hep*.root", + TString outName = "LOWQ2QRRates.root", dd4hep::Detector& detector=dd4hep::Detector::getInstance(), double eventRate=0.0 ){ auto node = initialise( inName ); @@ -123,9 +126,6 @@ void LOWQ2Benchmarks( string inName = "/scratch/EIC/ReconOut/recon_qr_10x100_ab0 std::string readoutName = "TaggerTrackerHits"; if(Any(colNames==readoutName)){ - std::string compactName = "/home/simong/EIC/epic/epic_18x275.xml"; - dd4hep::Detector& detector = dd4hep::Detector::getInstance(); - detector.fromCompact(compactName); //----------------------------------------- // Hit detector IDs //----------------------------------------- @@ -138,11 +138,11 @@ void LOWQ2Benchmarks( string inName = "/scratch/EIC/ReconOut/recon_qr_10x100_ab0 //Create Plots if(Any(colNames==readoutName)){ - histMap["SimDistributions"] = createHitPlots(node); + histMap["SimDistributions"] = createHitPlots(node,eventRate); } - if(Any(colNames==readoutName) && Any(colNames=="MCParticles")){ + if((Any(colNames==readoutName) || Any(colNames=="InclusiveKinematicsElectron")) && Any(colNames=="MCParticles")){ histMap["AcceptanceDistributions"] = createAcceptancePlots(node); } @@ -150,17 +150,10 @@ void LOWQ2Benchmarks( string inName = "/scratch/EIC/ReconOut/recon_qr_10x100_ab0 histMap["ClusterDistributions"] = createClusterPlots(node); } - // if(Any(colNames=="LowQ2TrackParameters") && Any(colNames=="MCParticles")){ - // histMap["ReconstructedDistributions"] = createReconstructionPlots(node); - // } - - //Postprocess plots - - // check histMap["SimDistributions"] exists - // if (histMap.count("SimDistributions") > 0) { - // histMap["RateDistributions"] = createRatePlots(histMap["SimDistributions"], 0.0001); - // } + if(Any(colNames=="LowQ2TrackParameters") && Any(colNames=="MCParticles")){ + histMap["ReconstructedDistributions"] = createReconstructionPlots(node); + } writePlots( outName ); -} +} \ No newline at end of file diff --git a/benchmarks/LOWQ2/analysis/LOWQ2acceptance.h b/benchmarks/LOWQ2/analysis/LOWQ2acceptance.h index 0553d916..c6d3336d 100644 --- a/benchmarks/LOWQ2/analysis/LOWQ2acceptance.h +++ b/benchmarks/LOWQ2/analysis/LOWQ2acceptance.h @@ -1,4 +1,4 @@ - #pragma once +#pragma once // #include "functors.h" @@ -13,6 +13,7 @@ using H1ResultPtr = ROOT::RDF::RResultPtr; using H2ResultPtr = ROOT::RDF::RResultPtr; using RVecI = ROOT::VecOps::RVec; using RVecD = ROOT::VecOps::RVec; +using RVecS = ROOT::VecOps::RVec; // Lazy execution methods std::pair,std::map> createAcceptancePlots( RNode d1 ){ @@ -20,7 +21,7 @@ std::pair,std::map> createAcc std::map hHists1D; std::map hHists2D; - int ePrimID = 2; + int ePrimID = 4; int pBeamID = 1; int eBeamID = 0; @@ -49,21 +50,30 @@ std::pair,std::map> createAcc .Define("W2","(pBeam+eBeam-primMom).M2()"); // .Define("primTheta",[](std::vector part){return part[4].getMomentum().getTheta()},"{MCParticles}"); + RVecS colNames = d1.GetColumnNames(); std::map filters; filters["All"] = "true"; - filters["Any"] = "TaggerTrackerHits.size() > 0"; - filters["Mod1"] = "moduleID[moduleID==1].size()>0"; - filters["Mod2"] = "moduleID[moduleID==2].size()>0"; filters["ThetaCut"] = "primTheta<11"; + if(Any(colNames=="TaggerTrackerHits")){ + filters["Any"] = "TaggerTrackerHits.size() > 0"; + filters["Mod1"] = "moduleID[moduleID==1].size()>0"; + filters["Mod2"] = "moduleID[moduleID==2].size()>0"; + } + + if(Any(colNames=="LowQ2Tracks")){ + filters["Reconstructed"] = "LowQ2Tracks.size()>0"; + } //Store the counts for each filter RVecI filterCounts; int rawCount = 0; + TString filtersName = ""; for (auto& filter : filters) { RNode filterNode = d1.Filter(filter.second); TString rawString = filter.first; + filtersName += rawString + "_"; TString energyHistName = "h" + rawString + "Energy"; TString thetaHistName = "h" + rawString + "Theta"; TString etaHistName = "h" + rawString + "Eta"; @@ -95,12 +105,14 @@ std::pair,std::map> createAcc int counts = *filterNode.Count(); - filterCounts.push_back(counts); + filterCounts.push_back(counts); if (filter.first == "All") { rawCount = counts; } } - + + filtersName = filtersName(0,filtersName.Length()-1); + // Number of filters int Nfilters = filterCounts.size(); RVecI entryInt = ROOT::VecOps::Range(Nfilters); @@ -114,8 +126,8 @@ std::pair,std::map> createAcc .Define("fraction", [fractions](ULong64_t entry) { return (double)fractions[(int)entry]; }, {"rdfentry_"}); // Use the new column in the histograms - hHists1D["TotalCounts"] = dfWithCounts.Histo1D({"hTotalCounts", "Total Counts;Filter;Count", Nfilters, 0, static_cast(Nfilters)}, "entry","count"); - hHists1D["Fractions"] = dfWithCounts.Histo1D({"hFractions", "Fractions;Filter;Fraction", Nfilters, 0, static_cast(Nfilters)}, "entry","fraction"); + hHists1D["TotalCounts"] = dfWithCounts.Histo1D({"hTotalCounts", "Total Counts;"+filtersName+";Count", Nfilters, 0, static_cast(Nfilters)}, "entry","count"); + hHists1D["Fractions"] = dfWithCounts.Histo1D({"hFractions", "Fractions;"+filtersName+";Fraction", Nfilters, 0, static_cast(Nfilters)}, "entry","fraction"); hHists1D["TotalCounts"]->Sumw2(false); hHists1D["Fractions"]->Sumw2(false); diff --git a/benchmarks/LOWQ2/analysis/LOWQ2hits.h b/benchmarks/LOWQ2/analysis/LOWQ2hits.h index b1b27482..50867824 100644 --- a/benchmarks/LOWQ2/analysis/LOWQ2hits.h +++ b/benchmarks/LOWQ2/analysis/LOWQ2hits.h @@ -10,7 +10,7 @@ using H2ResultPtr = ROOT::RDF::RResultPtr; using RVecI = ROOT::VecOps::RVec; // Lazy execution methods -std::pair,std::map> createHitPlots( RNode d1){ +std::pair,std::map> createHitPlots( RNode d1, double eventRate ){ int xChip = 448; int yChip = 512; @@ -23,14 +23,21 @@ std::pair,std::map> createHit std::map hHists1D; std::map hHists2D; - + + int events = *d1.Count(); + double eventWeight = eventRate / events; d1 = d1.Define("boardID", [xChip](RVecI xID){ return (xID + 3*xChip) / (2 * xChip); },{"xID"}) .Define("xChipID", [xChip](RVecI xID){ return (xID + 3*xChip) / (xChip); },{"xID"}) .Define("xColumnID", [xChip](RVecI xID){ return (xID + 3*xChip) / 2; },{"xID"}) - .Define("yChipID", [yChip](RVecI yID){ return (yID + 2.75*yChip) / (yChip); },{"yID"}); + .Define("yChipID", [yChip](RVecI yID){ return (yID + 2.75*yChip) / (yChip); },{"yID"}) + .Define("yHalfChipID", [yChip](RVecI yID){ return (yID + 2.75*yChip) / (0.5*yChip); },{"yID"}) + .Define("eventWeight", [eventWeight](){return eventWeight;},{}); - hHists1D["hmoduleID"] = d1.Histo1D({"hmoduleID", "hmoduleID;Module ID", 2, 1, 3 }, "moduleID"); + + hHists1D["hits/hmoduleID"] = d1.Histo1D({"hmoduleID", "hmoduleID;Module ID", 2, 1, 3 }, "moduleID"); + hHists1D["rate/hmoduleID"] = d1.Histo1D({"hmoduleID", "hmoduleID;Module ID", 2, 1, 3 }, "moduleID", "eventWeight"); + hHists1D["rate/hmoduleID"]->Sumw2(false); // Module Loop for(int i=1; i<=2; i++){ @@ -51,7 +58,9 @@ std::pair,std::map> createHit auto d2 = d1.Define("ModuleFilter",[i](RVecI modID){return modID==i;},{"moduleID"}) .Define(layerName,"layerID[ModuleFilter]"); - hHists1D[moduleTag+"/"+layerHistName] = d2.Histo1D({layerHistName, layerHistName+";Layer ID", 4, 0, 4 }, layerName ); + hHists1D["hits/"+moduleTag+"/"+layerHistName] = d2.Histo1D({layerHistName, layerHistName+";Layer ID", 4, 0, 4 }, layerName ); + hHists1D["rate/"+moduleTag+"/"+layerHistName] = d2.Histo1D({layerHistName, layerHistName+";Layer ID", 4, 0, 4 }, layerName, "eventWeight"); + hHists1D["rate/"+moduleTag+"/"+layerHistName]->Sumw2(false); // Layer Loop for(int j=0; j<=3; j++){ @@ -68,6 +77,8 @@ std::pair,std::map> createHit TString xChipName = "xChipID"+moduleTag+layerTag; TString yChipName = "yChipID"+moduleTag+layerTag; + TString yHalfChipName = "yHalfChipID"+moduleTag+layerTag; + TString xyHistName = "h"+xName+yName; TString xColumnName = "xColumnID"+moduleTag+layerTag; @@ -79,39 +90,55 @@ std::pair,std::map> createHit TString sizeHistName = "h"+layerSizeName; auto d3 = d2.Define("LayerFilter",[j](RVecI layID){return layID==j;},{"layerID"}) - .Define(xName,"xID[LayerFilter&&ModuleFilter]") - .Define(yName,"yID[LayerFilter&&ModuleFilter]") + .Define(xName,"xID[LayerFilter&&ModuleFilter]") + .Define(yName,"yID[LayerFilter&&ModuleFilter]") .Define(boardName,"boardID[LayerFilter&&ModuleFilter]") .Define(xChipName,"xChipID[LayerFilter&&ModuleFilter]") .Define(yChipName,"yChipID[LayerFilter&&ModuleFilter]") + .Define(yHalfChipName,"yHalfChipID[LayerFilter&&ModuleFilter]") .Define(xColumnName,"xColumnID[LayerFilter&&ModuleFilter]") .Define(layerSizeName,[](RVecI lay){return lay.size();},layerSizeInput); - hHists1D[moduleTag+"/"+layerTag+"/"+xHistName] = d3.Histo1D({xHistName, xHistName+";x pixel column", xRange, xMin, xMax }, xName ); - hHists1D[moduleTag+"/"+layerTag+"/"+yHistName] = d3.Histo1D({yHistName, yHistName+";y pixel column", yRange, yMin, yMax }, yName ); + hHists1D["hits/"+moduleTag+"/"+layerTag+"/"+xHistName] = d3.Histo1D({xHistName, xHistName+";x pixel column", xRange, xMin, xMax }, xName ); + hHists1D["rate/"+moduleTag+"/"+layerTag+"/"+xHistName] = d3.Histo1D({xHistName, xHistName+";x pixel column", xRange, xMin, xMax }, xName, "eventWeight"); + hHists1D["rate/"+moduleTag+"/"+layerTag+"/"+xHistName]->Sumw2(false); + + hHists1D["hits/"+moduleTag+"/"+layerTag+"/"+yHistName] = d3.Histo1D({yHistName, yHistName+";y pixel column", yRange, yMin, yMax }, yName ); + hHists1D["rate/"+moduleTag+"/"+layerTag+"/"+yHistName] = d3.Histo1D({yHistName, yHistName+";y pixel column", yRange, yMin, yMax }, yName, "eventWeight"); + hHists1D["rate/"+moduleTag+"/"+layerTag+"/"+yHistName]->Sumw2(false); - hHists2D[moduleTag+"/"+layerTag+"/"+xyHistName] = d3.Histo2D({xyHistName, xyHistName+";x pixel;y pixel", xRange, xMin, xMax, yRange, yMin, yMax}, xName, yName ); + hHists2D["hits/"+moduleTag+"/"+layerTag+"/"+xyHistName] = d3.Histo2D({xyHistName, xyHistName+";x pixel;y pixel", xRange, xMin, xMax, yRange, yMin, yMax}, xName, yName ); + hHists2D["rate/"+moduleTag+"/"+layerTag+"/"+xyHistName] = d3.Histo2D({xyHistName, xyHistName+";x pixel;y pixel", xRange, xMin, xMax, yRange, yMin, yMax}, xName, yName, "eventWeight"); + hHists2D["rate/"+moduleTag+"/"+layerTag+"/"+xyHistName]->Sumw2(false); - hHists1D[moduleTag+"/"+layerTag+"/"+sizeHistName] = d3.Histo1D({sizeHistName, sizeHistName+";hits per event", 100, 0, 100 }, layerSizeName ); + hHists1D["hits/"+moduleTag+"/"+layerTag+"/"+sizeHistName] = d3.Histo1D({sizeHistName, sizeHistName+";hits per event", 100, 0, 100 }, layerSizeName ); + hHists1D["rate/"+moduleTag+"/"+layerTag+"/"+sizeHistName] = d3.Histo1D({sizeHistName, sizeHistName+";hits per event", 100, 0, 100 }, layerSizeName, "eventWeight"); + hHists1D["rate/"+moduleTag+"/"+layerTag+"/"+sizeHistName]->Sumw2(false); // Plot number of hits per boardID for each layer TString boardIDHistName = "hBoardID" +moduleTag + layerTag; - hHists1D[moduleTag+"/"+layerTag+"/"+boardIDHistName] = d3.Histo1D({boardIDHistName, boardIDHistName+";board ID", 3, 0, 3}, boardName); + hHists1D["hits/"+moduleTag+"/"+layerTag+"/"+boardIDHistName] = d3.Histo1D({boardIDHistName, boardIDHistName+";board ID", 3, 0, 3}, boardName); + hHists1D["rate/"+moduleTag+"/"+layerTag+"/"+boardIDHistName] = d3.Histo1D({boardIDHistName, boardIDHistName+";board ID", 3, 0, 3}, boardName, "eventWeight"); + hHists1D["rate/"+moduleTag+"/"+layerTag+"/"+boardIDHistName]->Sumw2(false); // Plot number of hits per chipID for each layer TString chipIDHistName = "hChipID" +moduleTag + layerTag; - hHists2D[moduleTag+"/"+layerTag+"/"+chipIDHistName] = d3.Histo2D({chipIDHistName, chipIDHistName+";x Chip;y Chip", 6, 0, 6, 6, 0, 6}, xChipName, yChipName); + hHists2D["hits/"+moduleTag+"/"+layerTag+"/"+chipIDHistName] = d3.Histo2D({chipIDHistName, chipIDHistName+";x Chip;y Chip", 6, 0, 6, 6, 0, 6}, xChipName, yChipName); + hHists2D["rate/"+moduleTag+"/"+layerTag+"/"+chipIDHistName] = d3.Histo2D({chipIDHistName, chipIDHistName+";x Chip;y Chip", 6, 0, 6, 6, 0, 6}, xChipName, yChipName, "eventWeight"); + hHists2D["rate/"+moduleTag+"/"+layerTag+"/"+chipIDHistName]->Sumw2(false); // Plot number of hits per chipID for each layer TString xColumnIDHistName = "hxColumnID" +moduleTag + layerTag; - hHists2D[moduleTag+"/"+layerTag+"/"+xColumnIDHistName] = d3.Histo2D({xColumnIDHistName, xColumnIDHistName+";x Column;y Chip", 3*xChip, 0, 3.0*xChip, 6, 0, 6}, xColumnName, yChipName); + hHists2D["hits/"+moduleTag+"/"+layerTag+"/"+xColumnIDHistName] = d3.Histo2D({xColumnIDHistName, xColumnIDHistName+";x Column;y Half Chip", 3*xChip, 0, 3.0*xChip, 12, 0, 12}, xColumnName, yHalfChipName); + hHists2D["rate/"+moduleTag+"/"+layerTag+"/"+xColumnIDHistName] = d3.Histo2D({xColumnIDHistName, xColumnIDHistName+";x Column;y Half Chip", 3*xChip, 0, 3.0*xChip, 12, 0, 12}, xColumnName, yHalfChipName, "eventWeight"); + hHists2D["rate/"+moduleTag+"/"+layerTag+"/"+xColumnIDHistName]->Sumw2(false); - } } + return {hHists1D,hHists2D}; } diff --git a/benchmarks/LOWQ2/analysis/LOWQ2reconstruction.h b/benchmarks/LOWQ2/analysis/LOWQ2reconstruction.h new file mode 100644 index 00000000..2fc53e48 --- /dev/null +++ b/benchmarks/LOWQ2/analysis/LOWQ2reconstruction.h @@ -0,0 +1,96 @@ +#pragma once + +// Define alias +using RNode = ROOT::RDF::RNode; +using H1ResultPtr = ROOT::RDF::RResultPtr; +using H2ResultPtr = ROOT::RDF::RResultPtr; +using RVecI = ROOT::VecOps::RVec; + +// Lazy execution methods +std::pair,std::map> createReconstructionPlots( RNode d1 ){ + + int ePrimID = 4; + int pBeamID = 1; + int eBeamID = 0; + + std::map hHists1D; + std::map hHists2D; + + d1 = d1.Define("eBeam", [eBeamID] + (std::vector h) + {return ROOT::Math::PxPyPzMVector (h[eBeamID].momentum.x,h[eBeamID].momentum.y,h[eBeamID].momentum.z,h[eBeamID].mass);}, + {"MCParticles"}) + .Define("pBeam", [pBeamID] + (std::vector h) + {return ROOT::Math::PxPyPzMVector (h[pBeamID].momentum.x,h[pBeamID].momentum.y,h[pBeamID].momentum.z,h[pBeamID].mass);}, + {"MCParticles"}) + .Define("primMom", [ePrimID] + (std::vector h) + {return ROOT::Math::PxPyPzMVector (h[ePrimID].momentum.x,h[ePrimID].momentum.y,h[ePrimID].momentum.z,h[ePrimID].mass);}, + {"MCParticles"}) + .Define("primE","primMom.E()") + .Define("primPx","primMom.Px()") + .Define("primPy","primMom.Py()") + .Define("primPz","primMom.Pz()") + .Define("primTheta","1000*(TMath::Pi()-primMom.Theta())") + .Define("primEta","primMom.Eta()") + .Define("scatteredV","eBeam-primMom") + .Define("Q2","-scatteredV.M2()") + .Define("logQ2","log10(Q2)") + .Define("x","Q2/(2*scatteredV.Dot(pBeam))") + .Define("logx","log10(x)") + .Define("primPhi","primMom.Phi()") + .Define("W2","(pBeam+eBeam-primMom).M2()") + .Define("reconTheta","(double)LowQ2TrackParameters[0].theta") + .Define("reconPhi","(double)LowQ2TrackParameters[0].phi") + .Define("reconP","(double)(LowQ2TrackParameters[0].qOverP*LowQ2TrackParameters[0].charge)") + .Define("reconMom",[](double p, double theta, double phi){return ROOT::Math::PxPyPzMVector(p*sin(theta)*cos(phi),p*sin(theta)*sin(phi),p*cos(theta),0.000511);},{"reconP","reconTheta","reconPhi"}) + .Define("reconE","reconMom.E()") + .Define("reconScatteredV","eBeam-reconMom") + .Define("reconQ2","-reconScatteredV.M2()") + .Define("reconlogQ2","log10(reconQ2)") + .Define("reconX","reconQ2/(2*reconScatteredV.Dot(pBeam))") + .Define("reconlogx","log10(reconX)") + .Define("reconW2","(pBeam+eBeam-reconMom).M2()") + .Define("reconPx","(double)reconMom.Px()") + .Define("reconPy","(double)reconMom.Py()") + .Define("reconPz","(double)reconMom.Pz()") + .Define("thetaRes","reconTheta-primTheta") + .Define("phiRes","reconPhi-primPhi") + .Define("ERes","reconMom.E()-primE") + .Define("Q2Res","reconQ2-Q2") + .Define("XRes","reconX-x") + .Define("W2Res","reconW2-W2"); + + + + hHists2D["reconThetaVsPrimTheta"] = d1.Histo2D({"reconThetaVsPrimTheta","reconThetaVsPrimTheta;#theta_{prim} [mrad];#theta_{recon} [mrad]",100,0,100,100,0,100},"primTheta","reconTheta"); + + hHists2D["reconPhiVsPrimPhi"] = d1.Histo2D({"reconPhiVsPrimPhi","reconPhiVsPrimPhi;#phi_{prim} [rad];#phi_{recon} [rad]",100,-TMath::Pi(),TMath::Pi(),100,-TMath::Pi(),TMath::Pi()},"primPhi","reconPhi"); + + hHists2D["reconPxVsPrimPx"] = d1.Histo2D({"reconPxVsPrimPx","reconPxVsPrimPx;p_{x,prim} [GeV];p_{x,recon} [GeV]",100,-0.5,0.5,100,-0.5,0.5},"primPx","reconPx"); + hHists2D["reconPyVsPrimPy"] = d1.Histo2D({"reconPyVsPrimPy","reconPyVsPrimPy;p_{y,prim} [GeV];p_{y,recon} [GeV]",100,-0.5,0.5,100,-0.5,0.5},"primPy","reconPy"); + hHists2D["reconPzVsPrimPz"] = d1.Histo2D({"reconPzVsPrimPz","reconPzVsPrimPz;p_{z,prim} [GeV];p_{z,recon} [GeV]",100,0,0.5,100,0,0.5},"primPz","reconPz"); + hHists2D["reconEVsPrimE"] = d1.Histo2D({"reconEVsPrimE","reconEVsPrimE;E_{prim} [GeV];E_{recon} [GeV]",100,0,0.5,100,0,0.5},"primE","reconE"); + hHists2D["reconQ2VsPrimQ2"] = d1.Histo2D({"reconQ2VsPrimQ2","reconQ2VsPrimQ2;Q^{2}_{prim} [GeV^{2}];Q^{2}_{recon} [GeV^{2}]",100,0,0.5,100,0,0.5},"Q2","reconQ2"); + hHists2D["reconXVsPrimX"] = d1.Histo2D({"reconXVsPrimX","reconXVsPrimX;x_{prim};x_{recon}",100,0,0.5,100,0,0.5},"x","reconX"); + hHists2D["reconW2VsPrimW2"] = d1.Histo2D({"reconW2VsPrimW2","reconW2VsPrimW2;W^{2}_{prim} [GeV^{2}];W^{2}_{recon} [GeV^{2}]",100,0,0.5,100,0,0.5},"W2","reconW2"); + + hHists1D["thetaRes"] = d1.Histo1D({"thetaRes","thetaRes;#theta_{recon}-#theta_{prim} [mrad]",100,-50,50},"thetaRes"); + hHists1D["phiRes"] = d1.Histo1D({"phiRes","phiRes;#phi_{recon}-#phi_{prim} [rad]",100,-0.1,0.1},"phiRes"); + hHists1D["ERes"] = d1.Histo1D({"ERes","ERes;E_{recon}-E_{prim} [GeV]",100,-0.1,0.1},"ERes"); + hHists1D["Q2Res"] = d1.Histo1D({"Q2Res","Q2Res;Q^{2}_{recon}-Q^{2}_{prim} [GeV^{2}]",100,-0.1,0.1},"Q2Res"); + hHists1D["XRes"] = d1.Histo1D({"XRes","XRes;x_{recon}-x_{prim}",100,-0.1,0.1},"XRes"); + hHists1D["W2Res"] = d1.Histo1D({"W2Res","W2Res;W^{2}_{recon}-W^{2}_{prim} [GeV^{2}]",100,-0.1,0.1},"W2Res"); + + hHists1D["thetaResVsE"] = d1.Histo1D({"thetaResVsE","thetaResVsE;E_{prim} [GeV];#theta_{recon}-#theta_{prim} [mrad]",100,0,0.5},"primE","thetaRes"); + hHists1D["phiResVsE"] = d1.Histo1D({"phiResVsE","phiResVsE;E_{prim} [GeV];#phi_{recon}-#phi_{prim} [rad]",100,0,0.5},"primE","phiRes"); + hHists1D["EResVsE"] = d1.Histo1D({"EResVsE","EResVsE;E_{prim} [GeV];E_{recon}-E_{prim} [GeV]",100,0,0.5},"primE","ERes"); + hHists1D["Q2ResVsE"] = d1.Histo1D({"Q2ResVsE","Q2ResVsE;E_{prim} [GeV];Q^{2}_{recon}-Q^{2}_{prim} [GeV^{2}]",100,0,0.5},"primE","Q2Res"); + + //Plot showing where the phi resolution is less than 30 degrees in terms of E and theta + //hHists2D["phiResVsETheta"] = d1.Histo2D({"phiResVsETheta","phiResVsETheta;E_{prim} [GeV];#theta_{prim} [mrad]",100,0,0.5,100,0,100},"primE","primTheta",[](double phiRes){return fabs(phiRes)<0.5;},{"phiRes-primPhi"}); + + return {hHists1D,hHists2D}; + +} diff --git a/benchmarks/LOWQ2/analysis/RunLOWQ2.C b/benchmarks/LOWQ2/analysis/RunLOWQ2.C new file mode 100644 index 00000000..301a25a2 --- /dev/null +++ b/benchmarks/LOWQ2/analysis/RunLOWQ2.C @@ -0,0 +1,26 @@ +#include "LOWQ2Benchmarks.h" + +void RunLOWQ2(){ + + std::string compactName = "/home/simong/EIC/epic/epic_18x275.xml"; + dd4hep::Detector& detector = dd4hep::Detector::getInstance(); + detector.fromCompact(compactName); + + double luminosity = 1e34; // [cm^-2 s^-1] + double eBeamEnergy = 18.0; // [GeV] + double pBeamEnergy = 275.0; // [GeV] + double eventCrossSectionQR = 0.0551; // [mb] + double eventCrossSectionBrems = 1.0; // [mb] + double bunchSpacing = 10.15*1e-9; // [s] + + double eventRateQR = luminosity * eventCrossSectionQR * 1e-27; // [Hz] + double eventRateBrems = luminosity * eventCrossSectionBrems * 1e-27; // [Hz] + double bunchRate = 1.0 / bunchSpacing; // [Hz] + + LOWQ2Benchmarks("/scratch/EIC/ReconOut/tempEventsQR2.root","LOWQ2QR.root",detector,eventRateQR); + LOWQ2Benchmarks("/scratch/EIC/ReconOut/qr_18x275_ab/qr_18x275_ab0_recon.edm4hep.root","LOWQ2QROld.root",detector,eventRateQR); + LOWQ2Benchmarks("/scratch/EIC/G4out/qr_18x275_new.edm4hep*.root","LOWQ2QRRates.root",detector,eventRateQR); + LOWQ2Benchmarks("/scratch/EIC/G4out/brems_18x275_new.edm4hep*.root","LOWQ2BremsRates.root",detector,bunchRate); + + +} \ No newline at end of file From 58a034f6db4889bc9e1489d7ad89a5017bd60993 Mon Sep 17 00:00:00 2001 From: simonge Date: Mon, 29 Jan 2024 21:44:04 +0000 Subject: [PATCH 11/53] Formatting and range corrections --- benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.h | 1 + benchmarks/LOWQ2/analysis/LOWQ2acceptance.h | 111 ++++++++++++------ benchmarks/LOWQ2/analysis/LOWQ2hits.h | 32 ++--- .../LOWQ2/analysis/LOWQ2reconstruction.h | 101 +++++++++++----- benchmarks/LOWQ2/analysis/RunLOWQ2.C | 15 ++- 5 files changed, 172 insertions(+), 88 deletions(-) diff --git a/benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.h b/benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.h index d8ca11b1..91e26bb9 100755 --- a/benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.h +++ b/benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.h @@ -52,6 +52,7 @@ void writePlots( TString outName ){ for (size_t i = 0; i < parts.size(); ++i) { if (i == parts.size() - 1) { // This is the last part, write the histogram + hist->SetMinimum(0); hist->Write(parts[i].c_str()); } else { // This is not the last part, create or get the directory diff --git a/benchmarks/LOWQ2/analysis/LOWQ2acceptance.h b/benchmarks/LOWQ2/analysis/LOWQ2acceptance.h index c6d3336d..0a9a8c1c 100644 --- a/benchmarks/LOWQ2/analysis/LOWQ2acceptance.h +++ b/benchmarks/LOWQ2/analysis/LOWQ2acceptance.h @@ -21,7 +21,7 @@ std::pair,std::map> createAcc std::map hHists1D; std::map hHists2D; - int ePrimID = 4; + int ePrimID = 2; int pBeamID = 1; int eBeamID = 0; @@ -48,7 +48,6 @@ std::pair,std::map> createAcc .Define("logx","log10(x)") .Define("primPhi","primMom.Phi()") .Define("W2","(pBeam+eBeam-primMom).M2()"); - // .Define("primTheta",[](std::vector part){return part[4].getMomentum().getTheta()},"{MCParticles}"); RVecS colNames = d1.GetColumnNames(); std::map filters; @@ -74,36 +73,80 @@ std::pair,std::map> createAcc RNode filterNode = d1.Filter(filter.second); TString rawString = filter.first; filtersName += rawString + "_"; - TString energyHistName = "h" + rawString + "Energy"; - TString thetaHistName = "h" + rawString + "Theta"; - TString etaHistName = "h" + rawString + "Eta"; - TString logQ2HistName = "h" + rawString + "logQ2"; - TString logxHistName = "h" + rawString + "logx"; - TString Q2HistName = "h" + rawString + "Q2"; - TString xHistName = "h" + rawString + "x"; - TString W2HistName = "h" + rawString + "W2"; - TString Q2xHistName = "h" + rawString + "Q2x"; - TString logQ2logxHistName = "h" + rawString + "logQ2logx"; - TString WlogQ2HistName = "h" + rawString + "WlogQ2"; - TString WlogxHistName = "h" + rawString + "Wlogx"; - TString primElogQ2HistName = "h" + rawString + "primElogQ2"; - - hHists1D[rawString+"/"+energyHistName] = filterNode.Histo1D({energyHistName, energyHistName + ";Energy [GeV]", 100, 0, 18}, "primE"); - hHists1D[rawString+"/"+thetaHistName] = filterNode.Histo1D({thetaHistName, thetaHistName + ";Theta [mrad]", 100, 0, 18}, "primTheta"); - hHists1D[rawString+"/"+etaHistName] = filterNode.Histo1D({etaHistName, etaHistName + ";Eta", 100, -15, 5}, "primEta"); - hHists1D[rawString+"/"+logQ2HistName] = filterNode.Histo1D({logQ2HistName, logQ2HistName + ";log(Q2) [GeV]", 100, -10, 4}, "logQ2"); - hHists1D[rawString+"/"+logxHistName] = filterNode.Histo1D({logxHistName, logxHistName + ";log(x) [GeV]", 100, -12, 0}, "logx"); - hHists1D[rawString+"/"+Q2HistName] = filterNode.Histo1D({Q2HistName, Q2HistName + ";Q2 [GeV]", 100, 0, 500}, "Q2"); - hHists1D[rawString+"/"+xHistName] = filterNode.Histo1D({xHistName, xHistName + ";x [GeV]", 100, 0, 1}, "x"); - hHists1D[rawString+"/"+W2HistName] = filterNode.Histo1D({W2HistName, W2HistName + ";W2 [GeV]", 100, 0, 500}, "W2"); - - hHists2D[rawString+"/"+Q2xHistName] = filterNode.Histo2D({Q2xHistName, Q2xHistName + ";Q2 [GeV];x [GeV]", 100, 0, 500, 100, 0, 1}, "Q2", "x"); - hHists2D[rawString+"/"+logQ2logxHistName] = filterNode.Histo2D({logQ2logxHistName, logQ2logxHistName + ";log(Q2) [GeV];log(x) [GeV]", 100, -10, 4, 100, -12, 0}, "logQ2", "logx"); - hHists2D[rawString+"/"+WlogQ2HistName] = filterNode.Histo2D({WlogQ2HistName, WlogQ2HistName + ";W2 [GeV];log(Q2) [GeV]", 100, 0, 500, 100, -10, 4}, "W2", "logQ2"); - hHists2D[rawString+"/"+WlogxHistName] = filterNode.Histo2D({WlogxHistName, WlogxHistName + ";W2 [GeV];log(x) [GeV]", 100, 0, 500, 100, -12, 0}, "W2", "logx"); - hHists2D[rawString+"/"+primElogQ2HistName] = filterNode.Histo2D({primElogQ2HistName, primElogQ2HistName + ";E [GeV];log(Q2) [GeV]", 100, 0, 18, 100, -10, 4}, "primE", "logQ2"); - + + + //Histogram names + TString energyHistName = "h" + rawString + "Energy"; + TString thetaHistName = "h" + rawString + "Theta"; + TString etaHistName = "h" + rawString + "Eta"; + TString logQ2HistName = "h" + rawString + "logQ2"; + TString logxHistName = "h" + rawString + "logx"; + TString Q2HistName = "h" + rawString + "Q2"; + TString xHistName = "h" + rawString + "x"; + TString W2HistName = "h" + rawString + "W2"; + TString Q2xHistName = "h" + rawString + "Q2x"; + TString logQ2logxHistName = "h" + rawString + "logQ2logx"; + TString WlogQ2HistName = "h" + rawString + "WlogQ2"; + TString WlogxHistName = "h" + rawString + "Wlogx"; + TString primElogQ2HistName = "h" + rawString + "primElogQ2"; + TString primEthetaHistName = "h" + rawString + "primEtheta"; + + //Histogram Ranges + double energyMin = 0; + double energyMax = 18; + double thetaMin = 0; + double thetaMax = 12; + double etaMin = -15; + double etaMax = 5; + double logQ2Min = -8.5; + double logQ2Max = 0; + double logxMin = -12; + double logxMax = 0; + double Q2Min = 0; + double Q2Max = 500; + double xMin = 0; + double xMax = 1; + double W2Min = 0; + double W2Max = 500; + + int nBins1D = 400; + int nBins2D = 200; + + //Histogram axis names + TString energyAxisName = "Energy [GeV]"; + TString thetaAxisName = "Theta [mrad]"; + TString etaAxisName = "Eta"; + TString logQ2AxisName = "log(Q2) [GeV]"; + TString logxAxisName = "log(x) [GeV]"; + TString Q2AxisName = "Q2 [GeV]"; + TString xAxisName = "x [GeV]"; + TString W2AxisName = "W2 [GeV]"; + TString Q2xAxisName = Q2AxisName+";"+xAxisName; + TString logQ2logxAxisName = logQ2AxisName+";"+logxAxisName; + TString WlogQ2AxisName = W2AxisName+";"+logQ2AxisName; + TString WlogxAxisName = W2AxisName+";"+logxAxisName; + TString primElogQ2AxisName = energyAxisName+";"+logQ2AxisName; + TString primEthetaAxisName = energyAxisName+";"+thetaAxisName; + + + hHists1D[rawString+"/"+energyHistName] = filterNode.Histo1D({energyHistName, energyHistName + ";Energy [GeV]", nBins1D, energyMin, energyMax}, "primE"); + hHists1D[rawString+"/"+thetaHistName] = filterNode.Histo1D({thetaHistName, thetaHistName + ";Theta [mrad]", nBins1D, thetaMin, thetaMax}, "primTheta"); + hHists1D[rawString+"/"+etaHistName] = filterNode.Histo1D({etaHistName, etaHistName + ";Eta", nBins1D, etaMin, etaMax}, "primEta"); + hHists1D[rawString+"/"+logQ2HistName] = filterNode.Histo1D({logQ2HistName, logQ2HistName + ";log(Q2) [GeV]", nBins1D, logQ2Min, logQ2Max}, "logQ2"); + hHists1D[rawString+"/"+logxHistName] = filterNode.Histo1D({logxHistName, logxHistName + ";log(x) [GeV]", nBins1D, logxMin, logxMax}, "logx"); + hHists1D[rawString+"/"+Q2HistName] = filterNode.Histo1D({Q2HistName, Q2HistName + ";Q2 [GeV]", nBins1D, Q2Min, Q2Max}, "Q2"); + hHists1D[rawString+"/"+xHistName] = filterNode.Histo1D({xHistName, xHistName + ";x [GeV]", nBins1D, xMin, xMax}, "x"); + hHists1D[rawString+"/"+W2HistName] = filterNode.Histo1D({W2HistName, W2HistName + ";W2 [GeV]", nBins1D, W2Min, W2Max}, "W2"); + + hHists2D[rawString+"/"+Q2xHistName] = filterNode.Histo2D({Q2xHistName, Q2xHistName + ";Q2 [GeV];x [GeV]", nBins2D, Q2Min, Q2Max, nBins2D, xMin, xMax}, "Q2", "x"); + hHists2D[rawString+"/"+logQ2logxHistName] = filterNode.Histo2D({logQ2logxHistName, logQ2logxHistName + ";log(Q2) [GeV];log(x) [GeV]", nBins2D, logQ2Min, logQ2Max, nBins2D, logxMin, logxMax}, "logQ2", "logx"); + hHists2D[rawString+"/"+WlogQ2HistName] = filterNode.Histo2D({WlogQ2HistName, WlogQ2HistName + ";W2 [GeV];log(Q2) [GeV]", nBins2D, W2Min, W2Max, nBins2D, logQ2Min, logQ2Max}, "W2", "logQ2"); + hHists2D[rawString+"/"+WlogxHistName] = filterNode.Histo2D({WlogxHistName, WlogxHistName + ";W2 [GeV];log(x) [GeV]", nBins2D, W2Min, W2Max, nBins2D, logxMin, logxMax}, "W2", "logx"); + hHists2D[rawString+"/"+primElogQ2HistName] = filterNode.Histo2D({primElogQ2HistName, primElogQ2HistName + ";E [GeV];log(Q2) [GeV]", nBins2D, energyMin, energyMax, nBins2D, logQ2Min, logQ2Max}, "primE", "logQ2"); + hHists2D[rawString+"/"+primEthetaHistName] = filterNode.Histo2D({primEthetaHistName, primEthetaHistName + ";E [GeV];Theta [mrad]", nBins2D, energyMin, energyMax, nBins2D, thetaMin, thetaMax}, "primE", "primTheta"); + + int counts = *filterNode.Count(); filterCounts.push_back(counts); if (filter.first == "All") { @@ -114,7 +157,7 @@ std::pair,std::map> createAcc filtersName = filtersName(0,filtersName.Length()-1); // Number of filters - int Nfilters = filterCounts.size(); + int Nfilters = filterCounts.size(); RVecI entryInt = ROOT::VecOps::Range(Nfilters); // Fraction of events passing each filter @@ -122,8 +165,8 @@ std::pair,std::map> createAcc ROOT::RDataFrame df(Nfilters); auto dfWithCounts = df.Define("entry", [entryInt](ULong64_t entry) { return (double)entryInt[(int)entry]; }, {"rdfentry_"}) - .Define("count", [filterCounts](ULong64_t entry) { return (double)filterCounts[(int)entry]; }, {"rdfentry_"}) - .Define("fraction", [fractions](ULong64_t entry) { return (double)fractions[(int)entry]; }, {"rdfentry_"}); + .Define("count", [filterCounts](ULong64_t entry) { return (double)filterCounts[(int)entry]; }, {"rdfentry_"}) + .Define("fraction", [fractions](ULong64_t entry) { return (double)fractions[(int)entry]; }, {"rdfentry_"}); // Use the new column in the histograms hHists1D["TotalCounts"] = dfWithCounts.Histo1D({"hTotalCounts", "Total Counts;"+filtersName+";Count", Nfilters, 0, static_cast(Nfilters)}, "entry","count"); diff --git a/benchmarks/LOWQ2/analysis/LOWQ2hits.h b/benchmarks/LOWQ2/analysis/LOWQ2hits.h index 50867824..9f5b9c4d 100644 --- a/benchmarks/LOWQ2/analysis/LOWQ2hits.h +++ b/benchmarks/LOWQ2/analysis/LOWQ2hits.h @@ -35,8 +35,8 @@ std::pair,std::map> createHit .Define("eventWeight", [eventWeight](){return eventWeight;},{}); - hHists1D["hits/hmoduleID"] = d1.Histo1D({"hmoduleID", "hmoduleID;Module ID", 2, 1, 3 }, "moduleID"); - hHists1D["rate/hmoduleID"] = d1.Histo1D({"hmoduleID", "hmoduleID;Module ID", 2, 1, 3 }, "moduleID", "eventWeight"); + hHists1D["hits/hmoduleID"] = d1.Histo1D({"hmoduleID", "hmoduleID;Module ID;Counts", 2, 1, 3 }, "moduleID"); + hHists1D["rate/hmoduleID"] = d1.Histo1D({"hmoduleID", "hmoduleID;Module ID;Rate [Hz]", 2, 1, 3 }, "moduleID", "eventWeight"); hHists1D["rate/hmoduleID"]->Sumw2(false); // Module Loop @@ -58,8 +58,8 @@ std::pair,std::map> createHit auto d2 = d1.Define("ModuleFilter",[i](RVecI modID){return modID==i;},{"moduleID"}) .Define(layerName,"layerID[ModuleFilter]"); - hHists1D["hits/"+moduleTag+"/"+layerHistName] = d2.Histo1D({layerHistName, layerHistName+";Layer ID", 4, 0, 4 }, layerName ); - hHists1D["rate/"+moduleTag+"/"+layerHistName] = d2.Histo1D({layerHistName, layerHistName+";Layer ID", 4, 0, 4 }, layerName, "eventWeight"); + hHists1D["hits/"+moduleTag+"/"+layerHistName] = d2.Histo1D({layerHistName, layerHistName+";Layer ID;Counts", 4, 0, 4 }, layerName ); + hHists1D["rate/"+moduleTag+"/"+layerHistName] = d2.Histo1D({layerHistName, layerHistName+";Layer ID;Rate [Hz]", 4, 0, 4 }, layerName, "eventWeight"); hHists1D["rate/"+moduleTag+"/"+layerHistName]->Sumw2(false); // Layer Loop @@ -101,16 +101,16 @@ std::pair,std::map> createHit - hHists1D["hits/"+moduleTag+"/"+layerTag+"/"+xHistName] = d3.Histo1D({xHistName, xHistName+";x pixel column", xRange, xMin, xMax }, xName ); - hHists1D["rate/"+moduleTag+"/"+layerTag+"/"+xHistName] = d3.Histo1D({xHistName, xHistName+";x pixel column", xRange, xMin, xMax }, xName, "eventWeight"); + hHists1D["hits/"+moduleTag+"/"+layerTag+"/"+xHistName] = d3.Histo1D({xHistName, xHistName+";x pixel column;Counts", xRange, xMin, xMax }, xName ); + hHists1D["rate/"+moduleTag+"/"+layerTag+"/"+xHistName] = d3.Histo1D({xHistName, xHistName+";x pixel column;Rate [Hz]", xRange, xMin, xMax }, xName, "eventWeight"); hHists1D["rate/"+moduleTag+"/"+layerTag+"/"+xHistName]->Sumw2(false); - hHists1D["hits/"+moduleTag+"/"+layerTag+"/"+yHistName] = d3.Histo1D({yHistName, yHistName+";y pixel column", yRange, yMin, yMax }, yName ); - hHists1D["rate/"+moduleTag+"/"+layerTag+"/"+yHistName] = d3.Histo1D({yHistName, yHistName+";y pixel column", yRange, yMin, yMax }, yName, "eventWeight"); + hHists1D["hits/"+moduleTag+"/"+layerTag+"/"+yHistName] = d3.Histo1D({yHistName, yHistName+";y pixel column;Counts", yRange, yMin, yMax }, yName ); + hHists1D["rate/"+moduleTag+"/"+layerTag+"/"+yHistName] = d3.Histo1D({yHistName, yHistName+";y pixel column;Rate [Hz]", yRange, yMin, yMax }, yName, "eventWeight"); hHists1D["rate/"+moduleTag+"/"+layerTag+"/"+yHistName]->Sumw2(false); - hHists2D["hits/"+moduleTag+"/"+layerTag+"/"+xyHistName] = d3.Histo2D({xyHistName, xyHistName+";x pixel;y pixel", xRange, xMin, xMax, yRange, yMin, yMax}, xName, yName ); - hHists2D["rate/"+moduleTag+"/"+layerTag+"/"+xyHistName] = d3.Histo2D({xyHistName, xyHistName+";x pixel;y pixel", xRange, xMin, xMax, yRange, yMin, yMax}, xName, yName, "eventWeight"); + hHists2D["hits/"+moduleTag+"/"+layerTag+"/"+xyHistName] = d3.Histo2D({xyHistName, xyHistName+";x pixel;y pixel;Counts", xRange, xMin, xMax, yRange, yMin, yMax}, xName, yName ); + hHists2D["rate/"+moduleTag+"/"+layerTag+"/"+xyHistName] = d3.Histo2D({xyHistName, xyHistName+";x pixel;y pixel;Rate [Hz]", xRange, xMin, xMax, yRange, yMin, yMax}, xName, yName, "eventWeight"); hHists2D["rate/"+moduleTag+"/"+layerTag+"/"+xyHistName]->Sumw2(false); hHists1D["hits/"+moduleTag+"/"+layerTag+"/"+sizeHistName] = d3.Histo1D({sizeHistName, sizeHistName+";hits per event", 100, 0, 100 }, layerSizeName ); @@ -119,20 +119,20 @@ std::pair,std::map> createHit // Plot number of hits per boardID for each layer TString boardIDHistName = "hBoardID" +moduleTag + layerTag; - hHists1D["hits/"+moduleTag+"/"+layerTag+"/"+boardIDHistName] = d3.Histo1D({boardIDHistName, boardIDHistName+";board ID", 3, 0, 3}, boardName); - hHists1D["rate/"+moduleTag+"/"+layerTag+"/"+boardIDHistName] = d3.Histo1D({boardIDHistName, boardIDHistName+";board ID", 3, 0, 3}, boardName, "eventWeight"); + hHists1D["hits/"+moduleTag+"/"+layerTag+"/"+boardIDHistName] = d3.Histo1D({boardIDHistName, boardIDHistName+";board ID;Counts", 3, 0, 3}, boardName); + hHists1D["rate/"+moduleTag+"/"+layerTag+"/"+boardIDHistName] = d3.Histo1D({boardIDHistName, boardIDHistName+";board ID;Rate [Hz]", 3, 0, 3}, boardName, "eventWeight"); hHists1D["rate/"+moduleTag+"/"+layerTag+"/"+boardIDHistName]->Sumw2(false); // Plot number of hits per chipID for each layer TString chipIDHistName = "hChipID" +moduleTag + layerTag; - hHists2D["hits/"+moduleTag+"/"+layerTag+"/"+chipIDHistName] = d3.Histo2D({chipIDHistName, chipIDHistName+";x Chip;y Chip", 6, 0, 6, 6, 0, 6}, xChipName, yChipName); - hHists2D["rate/"+moduleTag+"/"+layerTag+"/"+chipIDHistName] = d3.Histo2D({chipIDHistName, chipIDHistName+";x Chip;y Chip", 6, 0, 6, 6, 0, 6}, xChipName, yChipName, "eventWeight"); + hHists2D["hits/"+moduleTag+"/"+layerTag+"/"+chipIDHistName] = d3.Histo2D({chipIDHistName, chipIDHistName+";x Chip;y Chip;Counts", 6, 0, 6, 6, 0, 6}, xChipName, yChipName); + hHists2D["rate/"+moduleTag+"/"+layerTag+"/"+chipIDHistName] = d3.Histo2D({chipIDHistName, chipIDHistName+";x Chip;y Chip;Rate [Hz]", 6, 0, 6, 6, 0, 6}, xChipName, yChipName, "eventWeight"); hHists2D["rate/"+moduleTag+"/"+layerTag+"/"+chipIDHistName]->Sumw2(false); // Plot number of hits per chipID for each layer TString xColumnIDHistName = "hxColumnID" +moduleTag + layerTag; - hHists2D["hits/"+moduleTag+"/"+layerTag+"/"+xColumnIDHistName] = d3.Histo2D({xColumnIDHistName, xColumnIDHistName+";x Column;y Half Chip", 3*xChip, 0, 3.0*xChip, 12, 0, 12}, xColumnName, yHalfChipName); - hHists2D["rate/"+moduleTag+"/"+layerTag+"/"+xColumnIDHistName] = d3.Histo2D({xColumnIDHistName, xColumnIDHistName+";x Column;y Half Chip", 3*xChip, 0, 3.0*xChip, 12, 0, 12}, xColumnName, yHalfChipName, "eventWeight"); + hHists2D["hits/"+moduleTag+"/"+layerTag+"/"+xColumnIDHistName] = d3.Histo2D({xColumnIDHistName, xColumnIDHistName+";x Column;y Half Chip;Counts", 3*xChip, 0, 3.0*xChip, 12, 0, 12}, xColumnName, yHalfChipName); + hHists2D["rate/"+moduleTag+"/"+layerTag+"/"+xColumnIDHistName] = d3.Histo2D({xColumnIDHistName, xColumnIDHistName+";x Column;y Half Chip;Rate [Hz]", 3*xChip, 0, 3.0*xChip, 12, 0, 12}, xColumnName, yHalfChipName, "eventWeight"); hHists2D["rate/"+moduleTag+"/"+layerTag+"/"+xColumnIDHistName]->Sumw2(false); } diff --git a/benchmarks/LOWQ2/analysis/LOWQ2reconstruction.h b/benchmarks/LOWQ2/analysis/LOWQ2reconstruction.h index 2fc53e48..b136978a 100644 --- a/benchmarks/LOWQ2/analysis/LOWQ2reconstruction.h +++ b/benchmarks/LOWQ2/analysis/LOWQ2reconstruction.h @@ -9,7 +9,7 @@ using RVecI = ROOT::VecOps::RVec; // Lazy execution methods std::pair,std::map> createReconstructionPlots( RNode d1 ){ - int ePrimID = 4; + int ePrimID = 2; int pBeamID = 1; int eBeamID = 0; @@ -32,18 +32,21 @@ std::pair,std::map> createRec .Define("primPx","primMom.Px()") .Define("primPy","primMom.Py()") .Define("primPz","primMom.Pz()") - .Define("primTheta","1000*(TMath::Pi()-primMom.Theta())") + .Define("primTheta","(TMath::Pi()-primMom.Theta())") + .Define("primTheta_mrad","1000*primTheta") .Define("primEta","primMom.Eta()") .Define("scatteredV","eBeam-primMom") .Define("Q2","-scatteredV.M2()") .Define("logQ2","log10(Q2)") .Define("x","Q2/(2*scatteredV.Dot(pBeam))") .Define("logx","log10(x)") - .Define("primPhi","primMom.Phi()") + .Define("primPhi","primMom.Phi() * TMath::RadToDeg()") .Define("W2","(pBeam+eBeam-primMom).M2()") - .Define("reconTheta","(double)LowQ2TrackParameters[0].theta") + .Define("reconTheta","(double)(LowQ2TrackParameters[0].theta)") + .Define("reconTheta_mrad","1000.0*reconTheta)") .Define("reconPhi","(double)LowQ2TrackParameters[0].phi") - .Define("reconP","(double)(LowQ2TrackParameters[0].qOverP*LowQ2TrackParameters[0].charge)") + .Define("reconPhi_deg","reconPhi * TMath::RadToDeg()") + .Define("reconP","(18./10.)*(double)(1/(LowQ2TrackParameters[0].qOverP*LowQ2TrackParameters[0].charge))") .Define("reconMom",[](double p, double theta, double phi){return ROOT::Math::PxPyPzMVector(p*sin(theta)*cos(phi),p*sin(theta)*sin(phi),p*cos(theta),0.000511);},{"reconP","reconTheta","reconPhi"}) .Define("reconE","reconMom.E()") .Define("reconScatteredV","eBeam-reconMom") @@ -55,42 +58,76 @@ std::pair,std::map> createRec .Define("reconPx","(double)reconMom.Px()") .Define("reconPy","(double)reconMom.Py()") .Define("reconPz","(double)reconMom.Pz()") - .Define("thetaRes","reconTheta-primTheta") + .Define("thetaRes","1000*(reconTheta-primTheta)") .Define("phiRes","reconPhi-primPhi") - .Define("ERes","reconMom.E()-primE") - .Define("Q2Res","reconQ2-Q2") + .Define("ERes","(reconMom.E()-primE)/primE") + .Define("Q2Res","reconlogQ2-logQ2") .Define("XRes","reconX-x") .Define("W2Res","reconW2-W2"); - + + + double thetaMin = 0; // mrad + double thetaMax = 100; // mrad + double phiMin = -180; // deg + double phiMax = 180; // deg + double pxMin = -0.2; // GeV + double pxMax = 0.2; // GeV + double pyMin = -0.2; // GeV + double pyMax = 0.2; // GeV + double pzMin = 0; // GeV + double pzMax = 18; // GeV + double eMin = 0; // GeV + double eMax = 18; // GeV + double logQ2Min = -8.5; // GeV^2 + double logQ2Max = 0; // GeV^2 + double logxMin = -11; + double logxMax = 0; + double w2Min = 0; // GeV^2 + double w2Max = 0.5; // GeV^2 + double thetaResMin = -50; // mrad + double thetaResMax = 50; // mrad + double phiResMin = -45; // deg + double phiResMax = 45; // deg + double eResMin = -0.1; // GeV + double eResMax = 0.1; // GeV + double q2ResMin = -0.01; // GeV^2 + double q2ResMax = 0.01; // GeV^2 + double xResMin = -0.1; + double xResMax = 0.1; + double w2ResMin = -0.1; // GeV^2 + double w2ResMax = 0.1; // GeV^2 - hHists2D["reconThetaVsPrimTheta"] = d1.Histo2D({"reconThetaVsPrimTheta","reconThetaVsPrimTheta;#theta_{prim} [mrad];#theta_{recon} [mrad]",100,0,100,100,0,100},"primTheta","reconTheta"); - - hHists2D["reconPhiVsPrimPhi"] = d1.Histo2D({"reconPhiVsPrimPhi","reconPhiVsPrimPhi;#phi_{prim} [rad];#phi_{recon} [rad]",100,-TMath::Pi(),TMath::Pi(),100,-TMath::Pi(),TMath::Pi()},"primPhi","reconPhi"); + int bins1D = 400; + int bins2D = 200; + + hHists2D["reconThetaVsPrimTheta"] = d1.Histo2D({"reconThetaVsPrimTheta","reconThetaVsPrimTheta;#theta_{prim} [mrad];#theta_{recon} [mrad]",bins2D,thetaMin,thetaMax,bins2D,thetaMin,thetaMax},"primTheta_mrad","reconTheta_mrad"); + hHists2D["reconPhiVsPrimPhi"] = d1.Histo2D({"reconPhiVsPrimPhi","reconPhiVsPrimPhi;#phi_{prim} [deg];#phi_{recon} [deg]",bins2D,phiMin,phiMax,bins2D,phiMin,phiMax},"primPhi","reconPhi"); - hHists2D["reconPxVsPrimPx"] = d1.Histo2D({"reconPxVsPrimPx","reconPxVsPrimPx;p_{x,prim} [GeV];p_{x,recon} [GeV]",100,-0.5,0.5,100,-0.5,0.5},"primPx","reconPx"); - hHists2D["reconPyVsPrimPy"] = d1.Histo2D({"reconPyVsPrimPy","reconPyVsPrimPy;p_{y,prim} [GeV];p_{y,recon} [GeV]",100,-0.5,0.5,100,-0.5,0.5},"primPy","reconPy"); - hHists2D["reconPzVsPrimPz"] = d1.Histo2D({"reconPzVsPrimPz","reconPzVsPrimPz;p_{z,prim} [GeV];p_{z,recon} [GeV]",100,0,0.5,100,0,0.5},"primPz","reconPz"); - hHists2D["reconEVsPrimE"] = d1.Histo2D({"reconEVsPrimE","reconEVsPrimE;E_{prim} [GeV];E_{recon} [GeV]",100,0,0.5,100,0,0.5},"primE","reconE"); - hHists2D["reconQ2VsPrimQ2"] = d1.Histo2D({"reconQ2VsPrimQ2","reconQ2VsPrimQ2;Q^{2}_{prim} [GeV^{2}];Q^{2}_{recon} [GeV^{2}]",100,0,0.5,100,0,0.5},"Q2","reconQ2"); - hHists2D["reconXVsPrimX"] = d1.Histo2D({"reconXVsPrimX","reconXVsPrimX;x_{prim};x_{recon}",100,0,0.5,100,0,0.5},"x","reconX"); - hHists2D["reconW2VsPrimW2"] = d1.Histo2D({"reconW2VsPrimW2","reconW2VsPrimW2;W^{2}_{prim} [GeV^{2}];W^{2}_{recon} [GeV^{2}]",100,0,0.5,100,0,0.5},"W2","reconW2"); + hHists2D["reconPxVsPrimPx"] = d1.Histo2D({"reconPxVsPrimPx","reconPxVsPrimPx;p_{x,prim} [GeV];p_{x,recon} [GeV]",bins2D,pxMin,pxMax,bins2D,pxMin,pxMax},"primPx","reconPx"); + hHists2D["reconPyVsPrimPy"] = d1.Histo2D({"reconPyVsPrimPy","reconPyVsPrimPy;p_{y,prim} [GeV];p_{y,recon} [GeV]",bins2D,pyMin,pyMax,bins2D,pyMin,pyMax},"primPy","reconPy"); + hHists2D["reconPzVsPrimPz"] = d1.Histo2D({"reconPzVsPrimPz","reconPzVsPrimPz;p_{z,prim} [GeV];p_{z,recon} [GeV]",bins2D,pzMin,pzMax,bins2D,pzMin,pzMax},"primPz","reconPz"); + hHists2D["reconEVsPrimE"] = d1.Histo2D({"reconEVsPrimE","reconEVsPrimE;E_{prim} [GeV];E_{recon} [GeV]",bins2D,eMin,eMax,bins2D,eMin,eMax},"primE","reconE"); + hHists2D["reconQ2VsPrimQ2"] = d1.Histo2D({"reconQ2VsPrimQ2","reconQ2VsPrimQ2;log(Q^{2}_{prim}) [GeV^{2}];log(Q^{2}_{recon}) [GeV^{2}]",bins2D,logQ2Min,logQ2Max,bins2D,logQ2Min,logQ2Max},"logQ2","reconlogQ2"); + hHists2D["reconXVsPrimX"] = d1.Histo2D({"reconXVsPrimX","reconXVsPrimX;log(x_{prim});log(x_{recon})",bins2D,logxMin,logxMax,bins2D,logxMin,logxMax},"logx","reconlogx"); + hHists2D["reconW2VsPrimW2"] = d1.Histo2D({"reconW2VsPrimW2","reconW2VsPrimW2;W^{2}_{prim} [GeV^{2}];W^{2}_{recon} [GeV^{2}]",bins2D,w2Min,w2Max,bins2D,w2Min,w2Max},"W2","reconW2"); - hHists1D["thetaRes"] = d1.Histo1D({"thetaRes","thetaRes;#theta_{recon}-#theta_{prim} [mrad]",100,-50,50},"thetaRes"); - hHists1D["phiRes"] = d1.Histo1D({"phiRes","phiRes;#phi_{recon}-#phi_{prim} [rad]",100,-0.1,0.1},"phiRes"); - hHists1D["ERes"] = d1.Histo1D({"ERes","ERes;E_{recon}-E_{prim} [GeV]",100,-0.1,0.1},"ERes"); - hHists1D["Q2Res"] = d1.Histo1D({"Q2Res","Q2Res;Q^{2}_{recon}-Q^{2}_{prim} [GeV^{2}]",100,-0.1,0.1},"Q2Res"); - hHists1D["XRes"] = d1.Histo1D({"XRes","XRes;x_{recon}-x_{prim}",100,-0.1,0.1},"XRes"); - hHists1D["W2Res"] = d1.Histo1D({"W2Res","W2Res;W^{2}_{recon}-W^{2}_{prim} [GeV^{2}]",100,-0.1,0.1},"W2Res"); + hHists1D["thetaRes"] = d1.Histo1D({"thetaRes","thetaRes;#theta_{recon}-#theta_{prim} [mrad]",bins1D,thetaResMin,thetaResMax},"thetaRes_mrad"); + hHists1D["phiRes"] = d1.Histo1D({"phiRes","phiRes;#phi_{recon}-#phi_{prim} [rad]",bins1D,phiResMin,phiResMax},"phiRes"); + hHists1D["ERes"] = d1.Histo1D({"ERes","ERes;E_{recon}-E_{prim} [GeV]",bins1D,eResMin,eResMax},"ERes"); + hHists1D["Q2Res"] = d1.Histo1D({"Q2Res","Q2Res;log(Q^{2}_{recon})-log(Q^{2}_{prim}) [GeV^{2}]",100,q2ResMin,q2ResMax},"Q2Res"); + hHists1D["XRes"] = d1.Histo1D({"XRes","XRes;log(x_{recon})-log(x_{prim})",100,xResMin,xResMax},"XRes"); + hHists1D["W2Res"] = d1.Histo1D({"W2Res","W2Res;W^{2}_{recon}-W^{2}_{prim} [GeV^{2}]",100,w2ResMin,w2ResMax},"W2Res"); - hHists1D["thetaResVsE"] = d1.Histo1D({"thetaResVsE","thetaResVsE;E_{prim} [GeV];#theta_{recon}-#theta_{prim} [mrad]",100,0,0.5},"primE","thetaRes"); - hHists1D["phiResVsE"] = d1.Histo1D({"phiResVsE","phiResVsE;E_{prim} [GeV];#phi_{recon}-#phi_{prim} [rad]",100,0,0.5},"primE","phiRes"); - hHists1D["EResVsE"] = d1.Histo1D({"EResVsE","EResVsE;E_{prim} [GeV];E_{recon}-E_{prim} [GeV]",100,0,0.5},"primE","ERes"); - hHists1D["Q2ResVsE"] = d1.Histo1D({"Q2ResVsE","Q2ResVsE;E_{prim} [GeV];Q^{2}_{recon}-Q^{2}_{prim} [GeV^{2}]",100,0,0.5},"primE","Q2Res"); + hHists2D["thetaResVsE"] = d1.Histo2D({"thetaResVsE", "thetaResVsE;#theta_{recon}-#theta_{prim} [mrad];E_{prim} [GeV]", 100, thetaResMin, thetaResMax, 100, eMin, eMax}, "thetaRes_mrad", "primE"); + hHists2D["phiResVsE"] = d1.Histo2D({"phiResVsE", "phiResVsE;#phi_{recon}-#phi_{prim} [rad];E_{prim} [GeV]", 100, phiResMin, phiResMax, 100, eMin, eMax}, "phiRes", "primE"); + hHists2D["EResVsE"] = d1.Histo2D({"EResVsE", "EResVsE;E_{recon}-E_{prim} [GeV];E_{prim} [GeV]", 100, eResMin, eResMax, 100, eMin, eMax}, "ERes", "primE"); + hHists2D["Q2ResVsE"] = d1.Histo2D({"Q2ResVsE", "Q2ResVsE;log(Q^{2}_{recon})-log(Q^{2}_{prim}) [GeV^{2}];E_{prim} [GeV]", 100, q2ResMin, q2ResMax, 100, eMin, eMax}, "Q2Res", "primE"); //Plot showing where the phi resolution is less than 30 degrees in terms of E and theta - //hHists2D["phiResVsETheta"] = d1.Histo2D({"phiResVsETheta","phiResVsETheta;E_{prim} [GeV];#theta_{prim} [mrad]",100,0,0.5,100,0,100},"primE","primTheta",[](double phiRes){return fabs(phiRes)<0.5;},{"phiRes-primPhi"}); + //hHists2D["phiResVsETheta"] = d1.Histo2D({"phiResVsETheta","phiResVsETheta;E_{prim} [GeV];#theta_{prim} [mrad]",100,eMin,eMax,100,thetaMin,thetaMax},"primE","primTheta",[](double phiRes){return fabs(phiRes)<0.5;},{"phiRes-primPhi"}); - return {hHists1D,hHists2D}; - + return {hHists1D, hHists2D}; + } + diff --git a/benchmarks/LOWQ2/analysis/RunLOWQ2.C b/benchmarks/LOWQ2/analysis/RunLOWQ2.C index 301a25a2..95d8efbe 100644 --- a/benchmarks/LOWQ2/analysis/RunLOWQ2.C +++ b/benchmarks/LOWQ2/analysis/RunLOWQ2.C @@ -1,5 +1,6 @@ #include "LOWQ2Benchmarks.h" + void RunLOWQ2(){ std::string compactName = "/home/simong/EIC/epic/epic_18x275.xml"; @@ -10,17 +11,19 @@ void RunLOWQ2(){ double eBeamEnergy = 18.0; // [GeV] double pBeamEnergy = 275.0; // [GeV] double eventCrossSectionQR = 0.0551; // [mb] - double eventCrossSectionBrems = 1.0; // [mb] + double eventCrossSectionBrems = 171.3; // [mb] double bunchSpacing = 10.15*1e-9; // [s] double eventRateQR = luminosity * eventCrossSectionQR * 1e-27; // [Hz] double eventRateBrems = luminosity * eventCrossSectionBrems * 1e-27; // [Hz] double bunchRate = 1.0 / bunchSpacing; // [Hz] - LOWQ2Benchmarks("/scratch/EIC/ReconOut/tempEventsQR2.root","LOWQ2QR.root",detector,eventRateQR); - LOWQ2Benchmarks("/scratch/EIC/ReconOut/qr_18x275_ab/qr_18x275_ab0_recon.edm4hep.root","LOWQ2QROld.root",detector,eventRateQR); - LOWQ2Benchmarks("/scratch/EIC/G4out/qr_18x275_new.edm4hep*.root","LOWQ2QRRates.root",detector,eventRateQR); - LOWQ2Benchmarks("/scratch/EIC/G4out/brems_18x275_new.edm4hep*.root","LOWQ2BremsRates.root",detector,bunchRate); - + // LOWQ2Benchmarks("/scratch/EIC/ReconOut/tempEventsQR2.root","LOWQ2QR2.root",detector,eventRateQR); + // LOWQ2Benchmarks("/scratch/EIC/ReconOut/qr_18x275_ab/qr_18x275_ab0_recon.edm4hep.root","LOWQ2QROld.root",detector,eventRateQR); + // LOWQ2Benchmarks("/scratch/EIC/G4out/qr_18x275_new.edm4hep*.root","LOWQ2QRRates.root",detector,eventRateQR); + // LOWQ2Benchmarks("/scratch/EIC/G4out/brems_18x275_new.edm4hep*.root","LOWQ2BremsRates.root",detector,bunchRate); + // LOWQ2Benchmarks("/scratch/EIC/G4out/brems_10x100_ab/brems_10x100_ab_0.edm4hep.root","LOWQ2BremsRates2.root",detector,eventRateBrems); + LOWQ2Benchmarks("/scratch/EIC/ReconOut/QR_new.root","LOWQ2QRRates3.root",detector,eventRateQR); + LOWQ2Benchmarks("/scratch/EIC/ReconOut/Brems_new.root","LOWQ2BremsRates3.root",detector,eventRateBrems); } \ No newline at end of file From 77e327d19c70fd0171b1ab50dfc0adfbf50401af Mon Sep 17 00:00:00 2001 From: simonge Date: Mon, 29 Jan 2024 21:44:37 +0000 Subject: [PATCH 12/53] Added postprocessing script to create and format histograms into canvasses --- benchmarks/LOWQ2/analysis/PostprocessLOWQ2.C | 260 +++++++++++++++++++ 1 file changed, 260 insertions(+) create mode 100644 benchmarks/LOWQ2/analysis/PostprocessLOWQ2.C diff --git a/benchmarks/LOWQ2/analysis/PostprocessLOWQ2.C b/benchmarks/LOWQ2/analysis/PostprocessLOWQ2.C new file mode 100644 index 00000000..65004979 --- /dev/null +++ b/benchmarks/LOWQ2/analysis/PostprocessLOWQ2.C @@ -0,0 +1,260 @@ +#include +#include +#include +#include + +void SetStyle() { + // Set global plot format variables + gStyle->SetOptStat(0); + gStyle->SetPadLeftMargin(0.15); + gStyle->SetPadRightMargin(0.18); + gStyle->SetPadBottomMargin(0.12); + gStyle->SetTitleSize(0.04, "XYZ"); + + gStyle->SetTitleOffset(4.0, "Z"); + // gStyle->SetTitleOffset(1.0, "Y"); + +} + +//---------------------------------------------------------------------- +TH1* AcceptancePlot(TDirectory* inputDir, TString ReconHistName, TString AllHistName, TString Tag="Quasi-Real") { + + // Read in the plots from the input file + TH1* ReconPlot = (TH1*)inputDir->Get(ReconHistName); + TH1* AllPlot = (TH1*)inputDir->Get(AllHistName); + + // Check plots exist + if (!ReconPlot || !AllPlot) { + std::cout << "Error: plots "<< ReconHistName <<" and/or "<< AllHistName <<" not found in input file" << std::endl; + return nullptr; + } + + //Divide the reconstructed plot by the all plot + ReconPlot->Divide(AllPlot); + + return ReconPlot; + +} + +TH2* RatePlot(TDirectory* inputDir, int Module, int Layer, TString Tag="Quasi-Real") { + + TString histName = "rate/module"+std::to_string(Module)+"/layer"+std::to_string(Layer)+"/hxIDmodule"+std::to_string(Module)+"layer"+std::to_string(Layer)+"yIDmodule"+std::to_string(Module)+"layer"+std::to_string(Layer); + + // Read in the plots from the input file + TH2* RatePlot = (TH2*)inputDir->Get(histName); + // Check plots exist + if (!RatePlot) { + cout << "Error: plot "<< histName <<" not found in input file" << endl; + return nullptr; + } + + // Format the plot + int rebin = 32; + RatePlot->Rebin2D(rebin,rebin); + RatePlot->Scale(1.0/(rebin*rebin)); + TString title = "Tagger module "+std::to_string(Module)+", layer "+std::to_string(Layer)+" - Mean "+Tag+" rate per "+std::to_string(rebin)+"x"+std::to_string(rebin)+" pixel group"; + // TString title = "Tagger module 2, layer 0 rate, integrated over "+std::to_string(rebin)+"x"+std::to_string(rebin)+" pixel group"; + RatePlot->SetTitle(title); + RatePlot->GetXaxis()->SetTitle("x pixel"); + RatePlot->GetYaxis()->SetTitle("y pixel"); + RatePlot->GetZaxis()->SetTitle("Average Rate [Hz/55 #mum pixel]"); + + RatePlot->SetStats(0); + + return RatePlot; + +} + + +void FormatAcceptancePlots(TDirectory* inputDir, TFile* outputFile, TString Tag="Quasi-Real") { + + //E-Q2 acceptance plots + TString ReconEQ2Name = "Reconstructed/hReconstructedprimElogQ2"; + TString AllEQ2Name = "All/hAllprimElogQ2"; + + TH1* AcceptEQ2 = AcceptancePlot(inputDir,ReconEQ2Name,AllEQ2Name); + + TCanvas* canvasEQ2 = new TCanvas("AcceptanceEQ2Canvas", "AcceptanceEQ2Canvas", 1600, 1200); + + // Draw the plots on the canvas + canvasEQ2->cd(); + + TString title = "Acceptance as a function of scattered electron energy and reaction log_{10}(Q^{2})"; + AcceptEQ2->SetTitle(title); + AcceptEQ2->GetXaxis()->SetTitle("E_{e'} [GeV]"); + AcceptEQ2->GetYaxis()->SetTitle("log_{10}(Q^{2}) [GeV^{2}]"); + AcceptEQ2->GetZaxis()->SetTitle("Acceptance"); + + AcceptEQ2->SetStats(0); + AcceptEQ2->Draw("colz"); + + // Save the canvas to output file + outputFile->WriteTObject(canvasEQ2); + + // Clean up + delete canvasEQ2; + + //E-Theta acceptance plots + TString ReconEThetaName = "Reconstructed/hReconstructedprimEtheta"; + TString AllEThetaName = "All/hAllprimEtheta"; + + TH1* AcceptETheta = AcceptancePlot(inputDir,ReconEThetaName,AllEThetaName); + + TCanvas* canvasETheta = new TCanvas("AcceptanceEThetaCanvas", "AcceptanceEThetaCanvas", 1600, 1200); + + // Draw the plots on the canvas + canvasETheta->cd(); + + title = "Acceptance as a function of scattered electron energy and theta"; + AcceptETheta->SetTitle(title); + AcceptETheta->GetXaxis()->SetTitle("E_{e'} [GeV]"); + AcceptETheta->GetYaxis()->SetTitle("Theta_{e'} [mrad]"); + AcceptETheta->GetZaxis()->SetTitle("Acceptance"); + + AcceptETheta->SetStats(0); + AcceptETheta->Draw("colz"); + + // Save the canvas to output file + outputFile->WriteTObject(canvasETheta); + + // Clean up + delete canvasETheta; + +} + +//---------------------------------------------------------------------- + +void FormatRatePlots(TDirectory* inputDir, TFile* outputFile, TString Tag="Quasi-Real") { + + TString histName = "rate/module2/layer0/hxIDmodule2layer0yIDmodule2layer0"; + + TCanvas* canvas = new TCanvas("RateCanvas", "RateCanvas", 3200, 1200); + canvas->Divide(2,1); + + // Read in the plots from the input file + TH2* RatePlot1_0 = RatePlot(inputDir,1,0,Tag); + TH2* RatePlot2_0 = RatePlot(inputDir,2,0,Tag); + + // Draw the plots on the canvas + canvas->cd(1); + gPad->SetLogz(); + RatePlot1_0->Draw("colz"); + + canvas->cd(2); + gPad->SetLogz(); + RatePlot2_0->Draw("colz"); + + // Save the canvas to output file + outputFile->WriteTObject(canvas); + + // Clean up + delete canvas; + +} + +//---------------------------------------------------------------------- + +void FormatReconstructionPlots(TDirectory* inputDir, TFile* outputFile, TString Tag="Quasi-Real") { + + TString Q2HistName = "reconQ2VsPrimQ2"; + TString EHistName = "reconEVsPrimE"; + + TCanvas* canvas = new TCanvas("ReconCanvas", "ReconCanvas", 2400, 1200); + + // Read in the plots from the input file + TH1* Q2plot = (TH1*)inputDir->Get(Q2HistName); + TH1* Eplot = (TH1*)inputDir->Get(EHistName); + + // Check plots exist + if (!Q2plot || !Eplot) { + cout << "Error: plots "<< Q2HistName <<" and/or "<< EHistName <<" not found in input file" << endl; + return; + } + + // Draw the plots on the canvas + canvas->Divide(2,1); + canvas->cd(1); + gPad->SetLogz(); + + Q2plot->SetTitle("Reconstructed Q^{2} vs. primary Q^{2}"); + Q2plot->GetXaxis()->SetTitle("log_{10}(Q^{2}_{prim}) [GeV^{2}]"); + Q2plot->GetYaxis()->SetTitle("log_{10}(Q^{2}_{recon}) [GeV^{2}]"); + Q2plot->GetZaxis()->SetTitle("Counts"); + + Q2plot->SetStats(0); + Q2plot->Draw("colz"); + + canvas->cd(2); + gPad->SetLogz(); + + Eplot->SetTitle("Reconstructed electron energy vs. primary electron energy"); + Eplot->GetXaxis()->SetTitle("E_{prim} [GeV]"); + Eplot->GetYaxis()->SetTitle("E_{recon} [GeV]"); + Eplot->GetZaxis()->SetTitle("Counts"); + + Eplot->SetStats(0); + Eplot->Draw("colz"); + + // Save the canvas to output file + outputFile->WriteTObject(canvas); + + // Clean up + delete canvas; + +} + +//---------------------------------------------------------------------- +// This function is called by the benchmarking script +//---------------------------------------------------------------------- +void Postprocess(TString inName="LOWQ2QRRates3.root", TString outName="LOWQ2Plots.root", TString Tag="Quasi-Real") { + + SetStyle(); + + // Open the input file + TFile* inputFile = TFile::Open(inName); + if (!inputFile) { + cout << "Error opening input file:" << inName << endl; + return; + } + + // Check the directory LOWQ2 exists and cd into it + TDirectory* dir = inputFile->GetDirectory("LOWQ2"); + if (!dir) { + cout << "Error: directory LOWQ2 not found in input file" << endl; + return; + } + + // Open the output file + TFile* outputFile = TFile::Open(outName, "RECREATE"); + + // Format the plots + + //Check if AcceptanceDistributions directory exists + TDirectory* dirA = dir->GetDirectory("AcceptanceDistributions"); + if (dirA) { + FormatAcceptancePlots(dirA, outputFile,Tag); + } + + //Check if SimDistributions directory exists + TDirectory* dirS = dir->GetDirectory("SimDistributions"); + if (dirS) { + FormatRatePlots(dirS, outputFile,Tag); + } + + //Check if ReconstructedDistributions directory exists + TDirectory* dirR = dir->GetDirectory("ReconstructedDistributions"); + if (dirR) { + FormatReconstructionPlots(dirR, outputFile,Tag); + } + + inputFile->Close(); + outputFile->Close(); + +} + +//---------------------------------------------------------------------- + +void PostprocessLOWQ2() { + Postprocess("LOWQ2QRRates3.root", "LOWQ2QRPlots.root", "Quasi-Real"); + Postprocess("LOWQ2BremsRates3.root", "LOWQ2BremsPlots.root", "Bremsstrahlung"); +} \ No newline at end of file From 0bc51e4e9e7531cd99ad045bcdef2dc645fefd5d Mon Sep 17 00:00:00 2001 From: simonge Date: Mon, 29 Jan 2024 22:03:32 +0000 Subject: [PATCH 13/53] Fixed typo in dataframe string --- .../LOWQ2/analysis/LOWQ2reconstruction.h | 25 ++++++++++++------- 1 file changed, 16 insertions(+), 9 deletions(-) diff --git a/benchmarks/LOWQ2/analysis/LOWQ2reconstruction.h b/benchmarks/LOWQ2/analysis/LOWQ2reconstruction.h index b136978a..604a5290 100644 --- a/benchmarks/LOWQ2/analysis/LOWQ2reconstruction.h +++ b/benchmarks/LOWQ2/analysis/LOWQ2reconstruction.h @@ -43,7 +43,7 @@ std::pair,std::map> createRec .Define("primPhi","primMom.Phi() * TMath::RadToDeg()") .Define("W2","(pBeam+eBeam-primMom).M2()") .Define("reconTheta","(double)(LowQ2TrackParameters[0].theta)") - .Define("reconTheta_mrad","1000.0*reconTheta)") + .Define("reconTheta_mrad","1000.0*reconTheta") .Define("reconPhi","(double)LowQ2TrackParameters[0].phi") .Define("reconPhi_deg","reconPhi * TMath::RadToDeg()") .Define("reconP","(18./10.)*(double)(1/(LowQ2TrackParameters[0].qOverP*LowQ2TrackParameters[0].charge))") @@ -58,7 +58,7 @@ std::pair,std::map> createRec .Define("reconPx","(double)reconMom.Px()") .Define("reconPy","(double)reconMom.Py()") .Define("reconPz","(double)reconMom.Pz()") - .Define("thetaRes","1000*(reconTheta-primTheta)") + .Define("thetaRes","1000.0*(reconTheta-primTheta)") .Define("phiRes","reconPhi-primPhi") .Define("ERes","(reconMom.E()-primE)/primE") .Define("Q2Res","reconlogQ2-logQ2") @@ -115,14 +115,21 @@ std::pair,std::map> createRec hHists1D["thetaRes"] = d1.Histo1D({"thetaRes","thetaRes;#theta_{recon}-#theta_{prim} [mrad]",bins1D,thetaResMin,thetaResMax},"thetaRes_mrad"); hHists1D["phiRes"] = d1.Histo1D({"phiRes","phiRes;#phi_{recon}-#phi_{prim} [rad]",bins1D,phiResMin,phiResMax},"phiRes"); hHists1D["ERes"] = d1.Histo1D({"ERes","ERes;E_{recon}-E_{prim} [GeV]",bins1D,eResMin,eResMax},"ERes"); - hHists1D["Q2Res"] = d1.Histo1D({"Q2Res","Q2Res;log(Q^{2}_{recon})-log(Q^{2}_{prim}) [GeV^{2}]",100,q2ResMin,q2ResMax},"Q2Res"); - hHists1D["XRes"] = d1.Histo1D({"XRes","XRes;log(x_{recon})-log(x_{prim})",100,xResMin,xResMax},"XRes"); - hHists1D["W2Res"] = d1.Histo1D({"W2Res","W2Res;W^{2}_{recon}-W^{2}_{prim} [GeV^{2}]",100,w2ResMin,w2ResMax},"W2Res"); + hHists1D["Q2Res"] = d1.Histo1D({"Q2Res","Q2Res;log(Q^{2}_{recon})-log(Q^{2}_{prim}) [GeV^{2}]",bins1D,q2ResMin,q2ResMax},"Q2Res"); + hHists1D["XRes"] = d1.Histo1D({"XRes","XRes;log(x_{recon})-log(x_{prim})",bins1D,xResMin,xResMax},"XRes"); + hHists1D["W2Res"] = d1.Histo1D({"W2Res","W2Res;W^{2}_{recon}-W^{2}_{prim} [GeV^{2}]",bins1D,w2ResMin,w2ResMax},"W2Res"); - hHists2D["thetaResVsE"] = d1.Histo2D({"thetaResVsE", "thetaResVsE;#theta_{recon}-#theta_{prim} [mrad];E_{prim} [GeV]", 100, thetaResMin, thetaResMax, 100, eMin, eMax}, "thetaRes_mrad", "primE"); - hHists2D["phiResVsE"] = d1.Histo2D({"phiResVsE", "phiResVsE;#phi_{recon}-#phi_{prim} [rad];E_{prim} [GeV]", 100, phiResMin, phiResMax, 100, eMin, eMax}, "phiRes", "primE"); - hHists2D["EResVsE"] = d1.Histo2D({"EResVsE", "EResVsE;E_{recon}-E_{prim} [GeV];E_{prim} [GeV]", 100, eResMin, eResMax, 100, eMin, eMax}, "ERes", "primE"); - hHists2D["Q2ResVsE"] = d1.Histo2D({"Q2ResVsE", "Q2ResVsE;log(Q^{2}_{recon})-log(Q^{2}_{prim}) [GeV^{2}];E_{prim} [GeV]", 100, q2ResMin, q2ResMax, 100, eMin, eMax}, "Q2Res", "primE"); + hHists2D["thetaResVsE"] = d1.Histo2D({"thetaResVsE", "thetaResVsE;#theta_{recon}-#theta_{prim} [mrad];E_{prim} [GeV]", bins2D, thetaResMin, thetaResMax, bins2D, eMin, eMax}, "thetaRes_mrad", "primE"); + hHists2D["phiResVsE"] = d1.Histo2D({"phiResVsE", "phiResVsE;#phi_{recon}-#phi_{prim} [rad];E_{prim} [GeV]", bins2D, phiResMin, phiResMax, bins2D, eMin, eMax}, "phiRes", "primE"); + hHists2D["EResVsE"] = d1.Histo2D({"EResVsE", "EResVsE;E_{recon}-E_{prim} [GeV];E_{prim} [GeV]", bins2D, eResMin, eResMax, bins2D, eMin, eMax}, "ERes", "primE"); + hHists2D["Q2ResVsE"] = d1.Histo2D({"Q2ResVsE", "Q2ResVsE;log(Q^{2}_{recon})-log(Q^{2}_{prim}) [GeV^{2}];E_{prim} [GeV]", bins2D, q2ResMin, q2ResMax, bins2D, eMin, eMax}, "Q2Res", "primE"); + + hHists2D["phiResVsTheta"] = d1.Histo2D({"phiResVsTheta", "phiResVsTheta;#phi_{recon}-#phi_{prim} [rad];#theta_{prim} [mrad]", bins2D, phiResMin, phiResMax, bins2D, thetaMin, thetaMax}, "phiRes", "primTheta_mrad"); + + auto d2 = d1.Filter("reconTheta_mrad<2"); + + hHists2D["thetacut/phiResVsE"] = d2.Histo2D({"phiResVsE", "phiResVsE;#phi_{recon}-#phi_{prim} [rad];E_{prim} [GeV]", bins2D, phiResMin, phiResMax, bins2D, eMin, eMax}, "phiRes", "primE"); + hHists2D["thetacut/reconPhiVsPrimPhi"] = d2.Histo2D({"reconPhiVsPrimPhi","reconPhiVsPrimPhi;#phi_{prim} [deg];#phi_{recon} [deg]",bins2D,phiMin,phiMax,bins2D,phiMin,phiMax},"primPhi","reconPhi"); //Plot showing where the phi resolution is less than 30 degrees in terms of E and theta //hHists2D["phiResVsETheta"] = d1.Histo2D({"phiResVsETheta","phiResVsETheta;E_{prim} [GeV];#theta_{prim} [mrad]",100,eMin,eMax,100,thetaMin,thetaMax},"primE","primTheta",[](double phiRes){return fabs(phiRes)<0.5;},{"phiRes-primPhi"}); From 7c2adb0fc99e104f5c5c03233047d6208948e5ce Mon Sep 17 00:00:00 2001 From: simonge Date: Wed, 31 Jan 2024 13:40:49 +0000 Subject: [PATCH 14/53] Fixed some bugs, added some new resolution plots and sped things up --- benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.h | 201 +++++++++++++++--- benchmarks/LOWQ2/analysis/LOWQ2acceptance.h | 14 +- benchmarks/LOWQ2/analysis/LOWQ2clusters.h | 6 +- benchmarks/LOWQ2/analysis/LOWQ2hits.h | 73 +++---- .../LOWQ2/analysis/LOWQ2reconstruction.h | 47 ++-- benchmarks/LOWQ2/analysis/PostprocessLOWQ2.C | 10 +- benchmarks/LOWQ2/analysis/RunLOWQ2.C | 10 +- benchmarks/LOWQ2/analysis/functors.h | 4 +- 8 files changed, 255 insertions(+), 110 deletions(-) diff --git a/benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.h b/benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.h index 91e26bb9..3838a542 100755 --- a/benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.h +++ b/benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.h @@ -13,16 +13,18 @@ using RNode = ROOT::RDF::RNode; using H1ResultPtr = ROOT::RDF::RResultPtr; using H2ResultPtr = ROOT::RDF::RResultPtr; +using H3ResultPtr = ROOT::RDF::RResultPtr; using RVecS = ROOT::VecOps::RVec; using RVecI = ROOT::VecOps::RVec; -std::map,std::map>> histMap; - +std::map,std::map,std::map>> histMap; +//--------------------------------------------------------------------------------------------- // Create dataframe from input file(s) +//--------------------------------------------------------------------------------------------- RNode initialise( string fileName ){ ROOT::RDataFrame d0("events",fileName); @@ -30,25 +32,101 @@ RNode initialise( string fileName ){ } +//--------------------------------------------------------------------------------------------- +// Create histograms showing the variation of flux in various system components +//--------------------------------------------------------------------------------------------- +void WriteFluxPlots(H2ResultPtr hist, std::string parts){ + // Make projection of 2D pixel binned histogram + auto nBins = hist->GetNcells(); + TString pixelFluxName = TString(parts)+"Flux"; + TString logPixelFluxName = TString(parts)+"LogFlux"; + + //Get maximum bin content to set range + double maxBinContent = hist->GetMaximum(); + double logMax = log10(maxBinContent); + + //Create pixel flux histogram + TH1D* pixelFlux = new TH1D(pixelFluxName,pixelFluxName,100,0,maxBinContent); + + //Create log pixel flux histogram + TH1D* logPixelFlux = new TH1D(logPixelFluxName,logPixelFluxName,100,0,logMax); + for(int i=0; iFill(hist->GetBinContent(i)); + logPixelFlux->Fill(log10(hist->GetBinContent(i))); + } + pixelFlux->GetXaxis()->SetTitle("N_{hits}"); + logPixelFlux->GetXaxis()->SetTitle("log(N_{hits})"); + pixelFlux->GetYaxis()->SetTitle("N_{pixels}"); + logPixelFlux->GetYaxis()->SetTitle("N_{pixels}"); + + pixelFlux->Write(); + logPixelFlux->Write(); + +} + +//--------------------------------------------------------------------------------------------- +//Create histograms showing the variation of resolution accross the kinematic phase space +//--------------------------------------------------------------------------------------------- +void WriteResolutionPlots(H2ResultPtr hist, std::string parts){ + + hist->FitSlicesX(nullptr,0,-1,5); + TH1D* h1 = (TH1D*)gDirectory->Get(TString(hist->GetName())+"_2"); + h1->GetXaxis()->SetTitle(hist->GetXaxis()->GetTitle()); + h1->GetYaxis()->SetTitle(TString(hist->GetYaxis()->GetTitle())+"Resolution"); + + h1->Write(); + +} + + +//--------------------------------------------------------------------------------------------- // Format and write plots +//--------------------------------------------------------------------------------------------- void writePlots( TString outName ){ TFile* rootfile = new TFile(outName,"RECREATE"); - auto benchmarkdir = rootfile->mkdir("LOWQ2"); + auto benchmarkDir = rootfile->mkdir("LOWQ2"); + //Loop over 1D histograms saving for(auto &[bmkey,hists]: histMap){ - auto histsdir = benchmarkdir->mkdir(bmkey)->cd(); + //Split mapped distributions name into parts + std::stringstream ss(bmkey.Data()); + std::string part; + std::vector parts; + while (std::getline(ss, part, '/')) { + parts.push_back(part); + } - for(auto &[key,hist]: hists.first){ - TDirectory* currentDir = gDirectory; + TDirectory* dir = benchmarkDir; + for (size_t i = 0; i < parts.size(); ++i) { + if (i == parts.size() - 1) { + // This is the last part, write the histogram + dir->cd(); + } else { + // This is not the last part, create or get the directory + if (!dir->GetDirectory(parts[i].c_str())) { + dir = dir->mkdir(parts[i].c_str()); + } else { + dir = dir->GetDirectory(parts[i].c_str()); + } + dir->cd(); + } + } + + TDirectory* currentDir = gDirectory; + + for(auto &[key,hist]: get<0>(hists)){ + + //Split histogram name into parts std::stringstream ss(key.Data()); std::string part; - TDirectory* dir = currentDir; std::vector parts; while (std::getline(ss, part, '/')) { parts.push_back(part); } + + TDirectory* dir = currentDir; for (size_t i = 0; i < parts.size(); ++i) { if (i == parts.size() - 1) { // This is the last part, write the histogram @@ -68,7 +146,8 @@ void writePlots( TString outName ){ // hist->Write(key); } - for(auto &[key,hist]: hists.second){ + //Loop over 2D histograms saving and calculating fluxes as needed + for(auto &[key,hist]: get<1>(hists)){ TDirectory* currentDir = gDirectory; std::stringstream ss(key.Data()); @@ -79,25 +158,56 @@ void writePlots( TString outName ){ parts.push_back(part); } for (size_t i = 0; i < parts.size(); ++i) { - if (i == parts.size() - 1) { - // This is the last part, write the histogram - // hist->Write(parts[i].c_str()); - - // Make projection of 2D pixel binned histogram - auto nBins = hist->GetNcells(); - TString pixelFluxName = TString(parts[i].c_str())+"Flux"; - //Get maximum bin content to set range - double maxBinContent = hist->GetMaximum(); - double logMax = log10(maxBinContent); - - TH1D* pixelFlux = new TH1D(pixelFluxName,pixelFluxName,100,0,logMax); - for(int i=0; iFill(log10(hist->GetBinContent(i))); + if (i == parts.size() - 1) { + // If histogram name contains rate or hits then calculate flux + if(parts[i].find("Rate") != std::string::npos || parts[i].find("Hits") != std::string::npos){ + WriteFluxPlots(hist,parts[i].c_str()); + } + + // If histogram name contains Res then create a profile + if(parts[i].find("Res") != std::string::npos){ + WriteResolutionPlots(hist,parts[i].c_str()); } - pixelFlux->GetXaxis()->SetTitle("log(N_{hits})"); - + + hist->Sumw2(false); hist->Write(); - pixelFlux->Write(); + } else { + // This is not the last part, create or get the directory + if (!dir->GetDirectory(parts[i].c_str())) { + dir = dir->mkdir(parts[i].c_str()); + } else { + dir = dir->GetDirectory(parts[i].c_str()); + } + dir->cd(); + } + } + currentDir->cd(); + } + + //Loop over 3D histograms saving + for(auto &[key,hist]: get<2>(hists)){ + TDirectory* currentDir = gDirectory; + std::stringstream ss(key.Data()); + std::string part; + TDirectory* dir = currentDir; + std::vector parts; + while (std::getline(ss, part, '/')) { + parts.push_back(part); + } + for (size_t i = 0; i < parts.size(); ++i) { + if (i == parts.size() - 1) { + // This is the last part, write the histogram + hist->Write(parts[i].c_str()); + + //Fit histogram z slices and save 2d histogram + hist->FitSlicesZ(nullptr,0,-1,5); + TH2D* h2 = (TH2D*)gDirectory->Get(TString(hist->GetName())+"_2"); + h2->GetXaxis()->SetTitle(hist->GetXaxis()->GetTitle()); + h2->GetYaxis()->SetTitle(hist->GetYaxis()->GetTitle()); + h2->GetZaxis()->SetTitle(TString(hist->GetZaxis()->GetTitle())+"Resolution"); + h2->SetTitle(hist->GetTitle()); + h2->Write(); + } else { // This is not the last part, create or get the directory if (!dir->GetDirectory(parts[i].c_str())) { @@ -109,18 +219,27 @@ void writePlots( TString outName ){ } } currentDir->cd(); - + // hist->Write(key); } + + benchmarkDir->cd(); + } rootfile->Close(); } +//--------------------------------------------------------------------------------------------- +// Create the benchmark plots +//--------------------------------------------------------------------------------------------- void LOWQ2Benchmarks( string inName = "/scratch/EIC/G4out/qr_18x275_new.edm4hep*.root", TString outName = "LOWQ2QRRates.root", dd4hep::Detector& detector=dd4hep::Detector::getInstance(), double eventRate=0.0 ){ auto node = initialise( inName ); + + int events = *node.Count(); + double eventWeight = eventRate / events; RVecS colNames = node.GetColumnNames(); @@ -131,16 +250,34 @@ void LOWQ2Benchmarks( string inName = "/scratch/EIC/G4out/qr_18x275_new.edm4hep* // Hit detector IDs //----------------------------------------- auto ids = detector.readout(readoutName).idSpec().fields(); + + node = node.Define("hitParticleIndex","_TaggerTrackerHits_MCParticle.index") + .Define("generatorStat","MCParticles.generatorStatus") + .Define("primary",[](RVecI index, RVecI status){ + RVecI result(index.size()); + for (size_t i = 0; i < index.size(); ++i) { + result[i] = status[index[i]] == 1; + } + return result; + },{"hitParticleIndex","generatorStat"}); + + std::string filterName = "TaggerTrackerHitsFilter"; + auto allnode = node.Define(filterName,"TaggerTrackerHits"); + auto primarynode = node.Define(filterName,"TaggerTrackerHits[primary==1]"); + auto secondarynode = node.Define(filterName,"TaggerTrackerHits[primary==0]"); + for(auto &[key,id]: ids){ TString colName = key+"ID"; - node = node.Define(colName,getSubID(key,detector),{readoutName}); + node = node.Define(colName,getSubID(key,detector),{readoutName}); + primarynode = primarynode.Define(colName,getSubID(key,detector),{filterName}); + secondarynode = secondarynode.Define(colName,getSubID(key,detector),{filterName}); } - } - //Create Plots - if(Any(colNames==readoutName)){ - histMap["SimDistributions"] = createHitPlots(node,eventRate); - + //Create Rate Plots + histMap["Rates/AllHits"] = createHitPlots(node,eventWeight); + histMap["Rates/PrimaryHits"] = createHitPlots(primarynode,eventWeight); + histMap["Rates/SecondaryHits"] = createHitPlots(secondarynode,eventWeight); + } if((Any(colNames==readoutName) || Any(colNames=="InclusiveKinematicsElectron")) && Any(colNames=="MCParticles")){ diff --git a/benchmarks/LOWQ2/analysis/LOWQ2acceptance.h b/benchmarks/LOWQ2/analysis/LOWQ2acceptance.h index 0a9a8c1c..c0063709 100644 --- a/benchmarks/LOWQ2/analysis/LOWQ2acceptance.h +++ b/benchmarks/LOWQ2/analysis/LOWQ2acceptance.h @@ -2,24 +2,26 @@ // #include "functors.h" - #include "ROOT/RDF/RInterface.hxx" - #include - #include - #include "ROOT/RVec.hxx" +#include "ROOT/RDF/RInterface.hxx" +#include +#include +#include "ROOT/RVec.hxx" // Define alias using RNode = ROOT::RDF::RNode; using H1ResultPtr = ROOT::RDF::RResultPtr; using H2ResultPtr = ROOT::RDF::RResultPtr; +using H3ResultPtr = ROOT::RDF::RResultPtr; using RVecI = ROOT::VecOps::RVec; using RVecD = ROOT::VecOps::RVec; using RVecS = ROOT::VecOps::RVec; // Lazy execution methods -std::pair,std::map> createAcceptancePlots( RNode d1 ){ +std::tuple,std::map,std::map> createAcceptancePlots( RNode d1 ){ std::map hHists1D; std::map hHists2D; + std::map hHists3D; int ePrimID = 2; int pBeamID = 1; @@ -174,5 +176,5 @@ std::pair,std::map> createAcc hHists1D["TotalCounts"]->Sumw2(false); hHists1D["Fractions"]->Sumw2(false); - return {hHists1D,hHists2D}; + return {hHists1D,hHists2D,hHists3D}; } \ No newline at end of file diff --git a/benchmarks/LOWQ2/analysis/LOWQ2clusters.h b/benchmarks/LOWQ2/analysis/LOWQ2clusters.h index a7a90cdf..2022cc40 100644 --- a/benchmarks/LOWQ2/analysis/LOWQ2clusters.h +++ b/benchmarks/LOWQ2/analysis/LOWQ2clusters.h @@ -2,18 +2,20 @@ using RNode = ROOT::RDF::RNode; using H1ResultPtr = ROOT::RDF::RResultPtr; using H2ResultPtr = ROOT::RDF::RResultPtr; +using H3ResultPtr = ROOT::RDF::RResultPtr; using RVecI = ROOT::VecOps::RVec; // Lazy execution methods -std::pair,std::map> createClusterPlots( RNode d1 ){ +std::tuple,std::map,std::map> createClusterPlots( RNode d1 ){ std::map hHists1D; std::map hHists2D; + std::map hHists3D; // auto d2 = d1.Define("ClusterSize",[](RVecI clu){return clu.size();},{"TaggerTrackerClusterPositions_0.index"}); auto d2 = d1.Define("ClusterSize","TaggerTrackerClusterPositions.rawHits_end-TaggerTrackerClusterPositions.rawHits_begin"); hHists1D["hClusterSize"] = d2.Histo1D({"hClusterSize","hClusterSize",100,0,100}, "ClusterSize"); - return {hHists1D,hHists2D}; + return {hHists1D,hHists2D,hHists3D}; } diff --git a/benchmarks/LOWQ2/analysis/LOWQ2hits.h b/benchmarks/LOWQ2/analysis/LOWQ2hits.h index 9f5b9c4d..9bb03f6a 100644 --- a/benchmarks/LOWQ2/analysis/LOWQ2hits.h +++ b/benchmarks/LOWQ2/analysis/LOWQ2hits.h @@ -1,16 +1,17 @@ #pragma once #include "functors.h" -#include "DD4hep/Detector.h" // Define alias using RNode = ROOT::RDF::RNode; using H1ResultPtr = ROOT::RDF::RResultPtr; using H2ResultPtr = ROOT::RDF::RResultPtr; +using H3ResultPtr = ROOT::RDF::RResultPtr; using RVecI = ROOT::VecOps::RVec; +using RVecD = ROOT::VecOps::RVec; // Lazy execution methods -std::pair,std::map> createHitPlots( RNode d1, double eventRate ){ +std::tuple,std::map,std::map> createHitPlots( RNode d2, double eventWeight ){ int xChip = 448; int yChip = 512; @@ -23,22 +24,19 @@ std::pair,std::map> createHit std::map hHists1D; std::map hHists2D; - - int events = *d1.Count(); - double eventWeight = eventRate / events; - - d1 = d1.Define("boardID", [xChip](RVecI xID){ return (xID + 3*xChip) / (2 * xChip); },{"xID"}) - .Define("xChipID", [xChip](RVecI xID){ return (xID + 3*xChip) / (xChip); },{"xID"}) - .Define("xColumnID", [xChip](RVecI xID){ return (xID + 3*xChip) / 2; },{"xID"}) - .Define("yChipID", [yChip](RVecI yID){ return (yID + 2.75*yChip) / (yChip); },{"yID"}) - .Define("yHalfChipID", [yChip](RVecI yID){ return (yID + 2.75*yChip) / (0.5*yChip); },{"yID"}) + std::map hHists3D; + + d2 = d2.Define("boardID", [xChip](RVecI xID){ return (xID + 3*xChip) / (2 * xChip); },{"xID"}) + .Define("xChipID", [xChip](RVecI xID){ return (xID + 3*xChip) / (xChip); },{"xID"}) + .Define("xColumnID", [xChip](RVecI xID){ return (xID + 3*xChip) / 2; },{"xID"}) + .Define("yChipID", [yChip](RVecI yID){ return (yID + 2.75*yChip) / (yChip); },{"yID"}) + .Define("yHalfChipID", [yChip](RVecI yID){ return (yID + 2.75*yChip) / (0.5*yChip); },{"yID"}) .Define("eventWeight", [eventWeight](){return eventWeight;},{}); - hHists1D["hits/hmoduleID"] = d1.Histo1D({"hmoduleID", "hmoduleID;Module ID;Counts", 2, 1, 3 }, "moduleID"); - hHists1D["rate/hmoduleID"] = d1.Histo1D({"hmoduleID", "hmoduleID;Module ID;Rate [Hz]", 2, 1, 3 }, "moduleID", "eventWeight"); - hHists1D["rate/hmoduleID"]->Sumw2(false); - + // Plot number of hits per moduleID + hHists1D["rate/hmoduleID"] = d2.Histo1D({"hmoduleID", "hmoduleID;Module ID;Rate [Hz]", 2, 1, 3 }, "moduleID", "eventWeight"); + // Module Loop for(int i=1; i<=2; i++){ @@ -55,12 +53,11 @@ std::pair,std::map> createHit TString layerHistName = "h"+layerName; - auto d2 = d1.Define("ModuleFilter",[i](RVecI modID){return modID==i;},{"moduleID"}) + auto d3 = d2.Define("ModuleFilter",[i](RVecI modID){return modID==i;},{"moduleID"}) .Define(layerName,"layerID[ModuleFilter]"); - hHists1D["hits/"+moduleTag+"/"+layerHistName] = d2.Histo1D({layerHistName, layerHistName+";Layer ID;Counts", 4, 0, 4 }, layerName ); - hHists1D["rate/"+moduleTag+"/"+layerHistName] = d2.Histo1D({layerHistName, layerHistName+";Layer ID;Rate [Hz]", 4, 0, 4 }, layerName, "eventWeight"); - hHists1D["rate/"+moduleTag+"/"+layerHistName]->Sumw2(false); + + hHists1D["rate/"+moduleTag+"/"+layerHistName] = d3.Histo1D({layerHistName, layerHistName+";Layer ID;Rate [Hz]", 4, 0, 4 }, layerName, "eventWeight"); // Layer Loop for(int j=0; j<=3; j++){ @@ -89,7 +86,7 @@ std::pair,std::map> createHit TString layerSizeName = "HitsPerEvent"+moduleTag+layerTag; TString sizeHistName = "h"+layerSizeName; - auto d3 = d2.Define("LayerFilter",[j](RVecI layID){return layID==j;},{"layerID"}) + auto d4 = d3.Define("LayerFilter",[j](RVecI layID){return layID==j;},{"layerID"}) .Define(xName,"xID[LayerFilter&&ModuleFilter]") .Define(yName,"yID[LayerFilter&&ModuleFilter]") .Define(boardName,"boardID[LayerFilter&&ModuleFilter]") @@ -101,44 +98,30 @@ std::pair,std::map> createHit - hHists1D["hits/"+moduleTag+"/"+layerTag+"/"+xHistName] = d3.Histo1D({xHistName, xHistName+";x pixel column;Counts", xRange, xMin, xMax }, xName ); - hHists1D["rate/"+moduleTag+"/"+layerTag+"/"+xHistName] = d3.Histo1D({xHistName, xHistName+";x pixel column;Rate [Hz]", xRange, xMin, xMax }, xName, "eventWeight"); - hHists1D["rate/"+moduleTag+"/"+layerTag+"/"+xHistName]->Sumw2(false); - - hHists1D["hits/"+moduleTag+"/"+layerTag+"/"+yHistName] = d3.Histo1D({yHistName, yHistName+";y pixel column;Counts", yRange, yMin, yMax }, yName ); - hHists1D["rate/"+moduleTag+"/"+layerTag+"/"+yHistName] = d3.Histo1D({yHistName, yHistName+";y pixel column;Rate [Hz]", yRange, yMin, yMax }, yName, "eventWeight"); - hHists1D["rate/"+moduleTag+"/"+layerTag+"/"+yHistName]->Sumw2(false); + hHists1D[moduleTag+"/"+layerTag+"/"+xHistName] = d4.Histo1D({xHistName, xHistName+";x pixel column;Rate [Hz]", xRange, xMin, xMax }, xName, "eventWeight"); + + hHists1D[moduleTag+"/"+layerTag+"/"+yHistName] = d4.Histo1D({yHistName, yHistName+";y pixel column;Rate [Hz]", yRange, yMin, yMax }, yName, "eventWeight"); - hHists2D["hits/"+moduleTag+"/"+layerTag+"/"+xyHistName] = d3.Histo2D({xyHistName, xyHistName+";x pixel;y pixel;Counts", xRange, xMin, xMax, yRange, yMin, yMax}, xName, yName ); - hHists2D["rate/"+moduleTag+"/"+layerTag+"/"+xyHistName] = d3.Histo2D({xyHistName, xyHistName+";x pixel;y pixel;Rate [Hz]", xRange, xMin, xMax, yRange, yMin, yMax}, xName, yName, "eventWeight"); - hHists2D["rate/"+moduleTag+"/"+layerTag+"/"+xyHistName]->Sumw2(false); + hHists2D[moduleTag+"/"+layerTag+"/"+xyHistName] = d4.Histo2D({xyHistName, xyHistName+";x pixel;y pixel;Rate [Hz]", xRange, xMin, xMax, yRange, yMin, yMax}, xName, yName, "eventWeight"); + + hHists1D[moduleTag+"/"+layerTag+"/"+sizeHistName] = d4.Histo1D({sizeHistName, sizeHistName+";hits per event", 100, 0, 100 }, layerSizeName ); - hHists1D["hits/"+moduleTag+"/"+layerTag+"/"+sizeHistName] = d3.Histo1D({sizeHistName, sizeHistName+";hits per event", 100, 0, 100 }, layerSizeName ); - hHists1D["rate/"+moduleTag+"/"+layerTag+"/"+sizeHistName] = d3.Histo1D({sizeHistName, sizeHistName+";hits per event", 100, 0, 100 }, layerSizeName, "eventWeight"); - hHists1D["rate/"+moduleTag+"/"+layerTag+"/"+sizeHistName]->Sumw2(false); - // Plot number of hits per boardID for each layer TString boardIDHistName = "hBoardID" +moduleTag + layerTag; - hHists1D["hits/"+moduleTag+"/"+layerTag+"/"+boardIDHistName] = d3.Histo1D({boardIDHistName, boardIDHistName+";board ID;Counts", 3, 0, 3}, boardName); - hHists1D["rate/"+moduleTag+"/"+layerTag+"/"+boardIDHistName] = d3.Histo1D({boardIDHistName, boardIDHistName+";board ID;Rate [Hz]", 3, 0, 3}, boardName, "eventWeight"); - hHists1D["rate/"+moduleTag+"/"+layerTag+"/"+boardIDHistName]->Sumw2(false); + hHists1D[moduleTag+"/"+layerTag+"/"+boardIDHistName] = d4.Histo1D({boardIDHistName, boardIDHistName+";board ID;Rate [Hz]", 3, 0, 3}, boardName, "eventWeight"); // Plot number of hits per chipID for each layer TString chipIDHistName = "hChipID" +moduleTag + layerTag; - hHists2D["hits/"+moduleTag+"/"+layerTag+"/"+chipIDHistName] = d3.Histo2D({chipIDHistName, chipIDHistName+";x Chip;y Chip;Counts", 6, 0, 6, 6, 0, 6}, xChipName, yChipName); - hHists2D["rate/"+moduleTag+"/"+layerTag+"/"+chipIDHistName] = d3.Histo2D({chipIDHistName, chipIDHistName+";x Chip;y Chip;Rate [Hz]", 6, 0, 6, 6, 0, 6}, xChipName, yChipName, "eventWeight"); - hHists2D["rate/"+moduleTag+"/"+layerTag+"/"+chipIDHistName]->Sumw2(false); - + hHists2D[moduleTag+"/"+layerTag+"/"+chipIDHistName] = d4.Histo2D({chipIDHistName, chipIDHistName+";x Chip;y Chip;Rate [Hz]", 6, 0, 6, 6, 0, 6}, xChipName, yChipName, "eventWeight"); + // Plot number of hits per chipID for each layer TString xColumnIDHistName = "hxColumnID" +moduleTag + layerTag; - hHists2D["hits/"+moduleTag+"/"+layerTag+"/"+xColumnIDHistName] = d3.Histo2D({xColumnIDHistName, xColumnIDHistName+";x Column;y Half Chip;Counts", 3*xChip, 0, 3.0*xChip, 12, 0, 12}, xColumnName, yHalfChipName); - hHists2D["rate/"+moduleTag+"/"+layerTag+"/"+xColumnIDHistName] = d3.Histo2D({xColumnIDHistName, xColumnIDHistName+";x Column;y Half Chip;Rate [Hz]", 3*xChip, 0, 3.0*xChip, 12, 0, 12}, xColumnName, yHalfChipName, "eventWeight"); - hHists2D["rate/"+moduleTag+"/"+layerTag+"/"+xColumnIDHistName]->Sumw2(false); + hHists2D[moduleTag+"/"+layerTag+"/"+xColumnIDHistName] = d4.Histo2D({xColumnIDHistName, xColumnIDHistName+";x Column;y Half Chip;Rate [Hz]", 3*xChip, 0, 3.0*xChip, 12, 0, 12}, xColumnName, yHalfChipName, "eventWeight"); } } - return {hHists1D,hHists2D}; + return {hHists1D,hHists2D,hHists3D}; } diff --git a/benchmarks/LOWQ2/analysis/LOWQ2reconstruction.h b/benchmarks/LOWQ2/analysis/LOWQ2reconstruction.h index 604a5290..6112231b 100644 --- a/benchmarks/LOWQ2/analysis/LOWQ2reconstruction.h +++ b/benchmarks/LOWQ2/analysis/LOWQ2reconstruction.h @@ -4,10 +4,11 @@ using RNode = ROOT::RDF::RNode; using H1ResultPtr = ROOT::RDF::RResultPtr; using H2ResultPtr = ROOT::RDF::RResultPtr; +using H3ResultPtr = ROOT::RDF::RResultPtr; using RVecI = ROOT::VecOps::RVec; // Lazy execution methods -std::pair,std::map> createReconstructionPlots( RNode d1 ){ +std::tuple,std::map,std::map> createReconstructionPlots( RNode d1 ){ int ePrimID = 2; int pBeamID = 1; @@ -15,6 +16,7 @@ std::pair,std::map> createRec std::map hHists1D; std::map hHists2D; + std::map hHists3D; d1 = d1.Define("eBeam", [eBeamID] (std::vector h) @@ -40,9 +42,10 @@ std::pair,std::map> createRec .Define("logQ2","log10(Q2)") .Define("x","Q2/(2*scatteredV.Dot(pBeam))") .Define("logx","log10(x)") - .Define("primPhi","primMom.Phi() * TMath::RadToDeg()") + .Define("primPhi","primMom.Phi()") + .Define("primPhi_deg","primPhi * TMath::RadToDeg()") .Define("W2","(pBeam+eBeam-primMom).M2()") - .Define("reconTheta","(double)(LowQ2TrackParameters[0].theta)") + .Define("reconTheta","(double)(TMath::Pi()-LowQ2TrackParameters[0].theta)") .Define("reconTheta_mrad","1000.0*reconTheta") .Define("reconPhi","(double)LowQ2TrackParameters[0].phi") .Define("reconPhi_deg","reconPhi * TMath::RadToDeg()") @@ -58,25 +61,26 @@ std::pair,std::map> createRec .Define("reconPx","(double)reconMom.Px()") .Define("reconPy","(double)reconMom.Py()") .Define("reconPz","(double)reconMom.Pz()") - .Define("thetaRes","1000.0*(reconTheta-primTheta)") - .Define("phiRes","reconPhi-primPhi") + .Define("thetaRes","(reconTheta-primTheta)") + .Define("thetaRes_mrad","1000.0*thetaRes") + .Define("phiRes","reconPhi_deg-primPhi_deg") .Define("ERes","(reconMom.E()-primE)/primE") - .Define("Q2Res","reconlogQ2-logQ2") + .Define("logQ2Res","reconlogQ2-logQ2") .Define("XRes","reconX-x") .Define("W2Res","reconW2-W2"); double thetaMin = 0; // mrad - double thetaMax = 100; // mrad + double thetaMax = 12; // mrad double phiMin = -180; // deg double phiMax = 180; // deg double pxMin = -0.2; // GeV double pxMax = 0.2; // GeV double pyMin = -0.2; // GeV double pyMax = 0.2; // GeV - double pzMin = 0; // GeV - double pzMax = 18; // GeV + double pzMin = -18; // GeV + double pzMax = 0; // GeV double eMin = 0; // GeV double eMax = 18; // GeV double logQ2Min = -8.5; // GeV^2 @@ -85,8 +89,8 @@ std::pair,std::map> createRec double logxMax = 0; double w2Min = 0; // GeV^2 double w2Max = 0.5; // GeV^2 - double thetaResMin = -50; // mrad - double thetaResMax = 50; // mrad + double thetaResMin = -5; // mrad + double thetaResMax = 5; // mrad double phiResMin = -45; // deg double phiResMax = 45; // deg double eResMin = -0.1; // GeV @@ -98,11 +102,13 @@ std::pair,std::map> createRec double w2ResMin = -0.1; // GeV^2 double w2ResMax = 0.1; // GeV^2 - int bins1D = 400; - int bins2D = 200; + int bins1D = 400; + int bins2D = 200; + int bins3D = 50; + int bins3DRes = 100; hHists2D["reconThetaVsPrimTheta"] = d1.Histo2D({"reconThetaVsPrimTheta","reconThetaVsPrimTheta;#theta_{prim} [mrad];#theta_{recon} [mrad]",bins2D,thetaMin,thetaMax,bins2D,thetaMin,thetaMax},"primTheta_mrad","reconTheta_mrad"); - hHists2D["reconPhiVsPrimPhi"] = d1.Histo2D({"reconPhiVsPrimPhi","reconPhiVsPrimPhi;#phi_{prim} [deg];#phi_{recon} [deg]",bins2D,phiMin,phiMax,bins2D,phiMin,phiMax},"primPhi","reconPhi"); + hHists2D["reconPhiVsPrimPhi"] = d1.Histo2D({"reconPhiVsPrimPhi","reconPhiVsPrimPhi;#phi_{prim} [deg];#phi_{recon} [deg]",bins2D,phiMin,phiMax,bins2D,phiMin,phiMax},"primPhi_deg","reconPhi_deg"); hHists2D["reconPxVsPrimPx"] = d1.Histo2D({"reconPxVsPrimPx","reconPxVsPrimPx;p_{x,prim} [GeV];p_{x,recon} [GeV]",bins2D,pxMin,pxMax,bins2D,pxMin,pxMax},"primPx","reconPx"); hHists2D["reconPyVsPrimPy"] = d1.Histo2D({"reconPyVsPrimPy","reconPyVsPrimPy;p_{y,prim} [GeV];p_{y,recon} [GeV]",bins2D,pyMin,pyMax,bins2D,pyMin,pyMax},"primPy","reconPy"); @@ -115,26 +121,31 @@ std::pair,std::map> createRec hHists1D["thetaRes"] = d1.Histo1D({"thetaRes","thetaRes;#theta_{recon}-#theta_{prim} [mrad]",bins1D,thetaResMin,thetaResMax},"thetaRes_mrad"); hHists1D["phiRes"] = d1.Histo1D({"phiRes","phiRes;#phi_{recon}-#phi_{prim} [rad]",bins1D,phiResMin,phiResMax},"phiRes"); hHists1D["ERes"] = d1.Histo1D({"ERes","ERes;E_{recon}-E_{prim} [GeV]",bins1D,eResMin,eResMax},"ERes"); - hHists1D["Q2Res"] = d1.Histo1D({"Q2Res","Q2Res;log(Q^{2}_{recon})-log(Q^{2}_{prim}) [GeV^{2}]",bins1D,q2ResMin,q2ResMax},"Q2Res"); + hHists1D["Q2Res"] = d1.Histo1D({"Q2Res","Q2Res;log(Q^{2}_{recon})-log(Q^{2}_{prim}) [GeV^{2}]",bins1D,q2ResMin,q2ResMax},"logQ2Res"); hHists1D["XRes"] = d1.Histo1D({"XRes","XRes;log(x_{recon})-log(x_{prim})",bins1D,xResMin,xResMax},"XRes"); hHists1D["W2Res"] = d1.Histo1D({"W2Res","W2Res;W^{2}_{recon}-W^{2}_{prim} [GeV^{2}]",bins1D,w2ResMin,w2ResMax},"W2Res"); hHists2D["thetaResVsE"] = d1.Histo2D({"thetaResVsE", "thetaResVsE;#theta_{recon}-#theta_{prim} [mrad];E_{prim} [GeV]", bins2D, thetaResMin, thetaResMax, bins2D, eMin, eMax}, "thetaRes_mrad", "primE"); hHists2D["phiResVsE"] = d1.Histo2D({"phiResVsE", "phiResVsE;#phi_{recon}-#phi_{prim} [rad];E_{prim} [GeV]", bins2D, phiResMin, phiResMax, bins2D, eMin, eMax}, "phiRes", "primE"); hHists2D["EResVsE"] = d1.Histo2D({"EResVsE", "EResVsE;E_{recon}-E_{prim} [GeV];E_{prim} [GeV]", bins2D, eResMin, eResMax, bins2D, eMin, eMax}, "ERes", "primE"); - hHists2D["Q2ResVsE"] = d1.Histo2D({"Q2ResVsE", "Q2ResVsE;log(Q^{2}_{recon})-log(Q^{2}_{prim}) [GeV^{2}];E_{prim} [GeV]", bins2D, q2ResMin, q2ResMax, bins2D, eMin, eMax}, "Q2Res", "primE"); + hHists2D["Q2ResVsE"] = d1.Histo2D({"Q2ResVsE", "Q2ResVsE;log(Q^{2}_{recon})-log(Q^{2}_{prim}) [GeV^{2}];E_{prim} [GeV]", bins2D, q2ResMin, q2ResMax, bins2D, eMin, eMax}, "logQ2Res", "primE"); hHists2D["phiResVsTheta"] = d1.Histo2D({"phiResVsTheta", "phiResVsTheta;#phi_{recon}-#phi_{prim} [rad];#theta_{prim} [mrad]", bins2D, phiResMin, phiResMax, bins2D, thetaMin, thetaMax}, "phiRes", "primTheta_mrad"); + //3D histograms to extract the resolution as a function of E and theta + hHists3D["thetaResVsETheta"] = d1.Histo3D({"thetaResVsETheta","thetaResVsETheta;E_{prim} [GeV];#theta_{prim} [mrad];#theta_{recon}-#theta_{prim} [mrad]",bins3D,eMin,eMax,bins3D,thetaMin,thetaMax,bins3DRes,thetaResMin,thetaResMax},"primE","primTheta_mrad","thetaRes_mrad"); + hHists3D["phiResVsETheta"] = d1.Histo3D({"phiResVsETheta","phiResVsETheta;E_{prim} [GeV];#theta_{prim} [mrad];#phi_{recon}-#phi_{prim} [rad]",bins3D,eMin,eMax,bins3D,thetaMin,thetaMax,bins3DRes,phiResMin,phiResMax},"primE","primTheta_mrad","phiRes"); + hHists3D["EResVsETheta"] = d1.Histo3D({"EResVsETheta","EResVsETheta;E_{prim} [GeV];#theta_{prim} [mrad];E_{recon}-E_{prim} [GeV]",bins3D,eMin,eMax,bins3D,thetaMin,thetaMax,bins3DRes,eResMin,eResMax},"primE","primTheta_mrad","ERes"); + auto d2 = d1.Filter("reconTheta_mrad<2"); hHists2D["thetacut/phiResVsE"] = d2.Histo2D({"phiResVsE", "phiResVsE;#phi_{recon}-#phi_{prim} [rad];E_{prim} [GeV]", bins2D, phiResMin, phiResMax, bins2D, eMin, eMax}, "phiRes", "primE"); - hHists2D["thetacut/reconPhiVsPrimPhi"] = d2.Histo2D({"reconPhiVsPrimPhi","reconPhiVsPrimPhi;#phi_{prim} [deg];#phi_{recon} [deg]",bins2D,phiMin,phiMax,bins2D,phiMin,phiMax},"primPhi","reconPhi"); + hHists2D["thetacut/reconPhiVsPrimPhi"] = d2.Histo2D({"reconPhiVsPrimPhi","reconPhiVsPrimPhi;#phi_{prim} [deg];#phi_{recon} [deg]",bins2D,phiMin,phiMax,bins2D,phiMin,phiMax},"primPhi_deg","reconPhi_deg"); //Plot showing where the phi resolution is less than 30 degrees in terms of E and theta //hHists2D["phiResVsETheta"] = d1.Histo2D({"phiResVsETheta","phiResVsETheta;E_{prim} [GeV];#theta_{prim} [mrad]",100,eMin,eMax,100,thetaMin,thetaMax},"primE","primTheta",[](double phiRes){return fabs(phiRes)<0.5;},{"phiRes-primPhi"}); - return {hHists1D, hHists2D}; + return {hHists1D, hHists2D, hHists3D}; } diff --git a/benchmarks/LOWQ2/analysis/PostprocessLOWQ2.C b/benchmarks/LOWQ2/analysis/PostprocessLOWQ2.C index 65004979..ddc1bdc6 100644 --- a/benchmarks/LOWQ2/analysis/PostprocessLOWQ2.C +++ b/benchmarks/LOWQ2/analysis/PostprocessLOWQ2.C @@ -2,6 +2,8 @@ #include #include #include +#include +#include void SetStyle() { // Set global plot format variables @@ -44,7 +46,7 @@ TH2* RatePlot(TDirectory* inputDir, int Module, int Layer, TString Tag="Quasi-Re TH2* RatePlot = (TH2*)inputDir->Get(histName); // Check plots exist if (!RatePlot) { - cout << "Error: plot "<< histName <<" not found in input file" << endl; + std::cout << "Error: plot "<< histName <<" not found in input file" << std::endl; return nullptr; } @@ -167,7 +169,7 @@ void FormatReconstructionPlots(TDirectory* inputDir, TFile* outputFile, TString // Check plots exist if (!Q2plot || !Eplot) { - cout << "Error: plots "<< Q2HistName <<" and/or "<< EHistName <<" not found in input file" << endl; + std::cout << "Error: plots "<< Q2HistName <<" and/or "<< EHistName <<" not found in input file" << std::endl; return; } @@ -213,14 +215,14 @@ void Postprocess(TString inName="LOWQ2QRRates3.root", TString outName="LOWQ2Plot // Open the input file TFile* inputFile = TFile::Open(inName); if (!inputFile) { - cout << "Error opening input file:" << inName << endl; + std::cout << "Error opening input file:" << inName << std::endl; return; } // Check the directory LOWQ2 exists and cd into it TDirectory* dir = inputFile->GetDirectory("LOWQ2"); if (!dir) { - cout << "Error: directory LOWQ2 not found in input file" << endl; + std::cout << "Error: directory LOWQ2 not found in input file" << std::endl; return; } diff --git a/benchmarks/LOWQ2/analysis/RunLOWQ2.C b/benchmarks/LOWQ2/analysis/RunLOWQ2.C index 95d8efbe..533627b3 100644 --- a/benchmarks/LOWQ2/analysis/RunLOWQ2.C +++ b/benchmarks/LOWQ2/analysis/RunLOWQ2.C @@ -3,6 +3,9 @@ void RunLOWQ2(){ + //Set implicit multi-threading + ROOT::EnableImplicitMT(); + std::string compactName = "/home/simong/EIC/epic/epic_18x275.xml"; dd4hep::Detector& detector = dd4hep::Detector::getInstance(); detector.fromCompact(compactName); @@ -23,7 +26,10 @@ void RunLOWQ2(){ // LOWQ2Benchmarks("/scratch/EIC/G4out/qr_18x275_new.edm4hep*.root","LOWQ2QRRates.root",detector,eventRateQR); // LOWQ2Benchmarks("/scratch/EIC/G4out/brems_18x275_new.edm4hep*.root","LOWQ2BremsRates.root",detector,bunchRate); // LOWQ2Benchmarks("/scratch/EIC/G4out/brems_10x100_ab/brems_10x100_ab_0.edm4hep.root","LOWQ2BremsRates2.root",detector,eventRateBrems); - LOWQ2Benchmarks("/scratch/EIC/ReconOut/QR_new.root","LOWQ2QRRates3.root",detector,eventRateQR); - LOWQ2Benchmarks("/scratch/EIC/ReconOut/Brems_new.root","LOWQ2BremsRates3.root",detector,eventRateBrems); + + LOWQ2Benchmarks("/scratch/EIC/ReconOut/QR_new.root","LOWQ2QRRecon2.root",detector,eventRateQR); + LOWQ2Benchmarks("/scratch/EIC/ReconOut/Brems_new.root","LOWQ2BremsRecon2.root",detector,eventRateBrems); + + // LOWQ2Benchmarks("/scratch/EIC/ReconOut/Brems_new.root","LOWQ2BremsHits.root",detector,eventRateBrems); } \ No newline at end of file diff --git a/benchmarks/LOWQ2/analysis/functors.h b/benchmarks/LOWQ2/analysis/functors.h index 59964f76..e978ff89 100644 --- a/benchmarks/LOWQ2/analysis/functors.h +++ b/benchmarks/LOWQ2/analysis/functors.h @@ -4,13 +4,15 @@ #include "edm4hep/SimTrackerHit.h" #include "edm4hep/SimCalorimeterHit.h" +using RVecHits = ROOT::VecOps::RVec; + //----------------------------------------------------------------------------------------- // Grab Component functor //----------------------------------------------------------------------------------------- struct getSubID{ getSubID(std::string cname, dd4hep::Detector& det, std::string rname = "TaggerTrackerHits") : componentName(cname), detector(det), readoutName(rname){} - ROOT::VecOps::RVec operator()(const std::vector& evt) { + ROOT::VecOps::RVec operator()(const RVecHits& evt) { auto decoder = detector.readout(readoutName).idSpec().decoder(); auto indexID = decoder->index(componentName); ROOT::VecOps::RVec result; From 80933e9efb583ced7810a1e091d082acabfb7058 Mon Sep 17 00:00:00 2001 From: simonge Date: Wed, 31 Jan 2024 13:45:49 +0000 Subject: [PATCH 15/53] Added .gitignore and moved plots into directory --- benchmarks/LOWQ2/analysis/.gitignore | 1 + benchmarks/LOWQ2/analysis/RunLOWQ2.C | 16 ++++++++-------- 2 files changed, 9 insertions(+), 8 deletions(-) create mode 100644 benchmarks/LOWQ2/analysis/.gitignore diff --git a/benchmarks/LOWQ2/analysis/.gitignore b/benchmarks/LOWQ2/analysis/.gitignore new file mode 100644 index 00000000..56fbe15d --- /dev/null +++ b/benchmarks/LOWQ2/analysis/.gitignore @@ -0,0 +1 @@ +plots* \ No newline at end of file diff --git a/benchmarks/LOWQ2/analysis/RunLOWQ2.C b/benchmarks/LOWQ2/analysis/RunLOWQ2.C index 533627b3..be3c20d2 100644 --- a/benchmarks/LOWQ2/analysis/RunLOWQ2.C +++ b/benchmarks/LOWQ2/analysis/RunLOWQ2.C @@ -21,15 +21,15 @@ void RunLOWQ2(){ double eventRateBrems = luminosity * eventCrossSectionBrems * 1e-27; // [Hz] double bunchRate = 1.0 / bunchSpacing; // [Hz] - // LOWQ2Benchmarks("/scratch/EIC/ReconOut/tempEventsQR2.root","LOWQ2QR2.root",detector,eventRateQR); - // LOWQ2Benchmarks("/scratch/EIC/ReconOut/qr_18x275_ab/qr_18x275_ab0_recon.edm4hep.root","LOWQ2QROld.root",detector,eventRateQR); - // LOWQ2Benchmarks("/scratch/EIC/G4out/qr_18x275_new.edm4hep*.root","LOWQ2QRRates.root",detector,eventRateQR); - // LOWQ2Benchmarks("/scratch/EIC/G4out/brems_18x275_new.edm4hep*.root","LOWQ2BremsRates.root",detector,bunchRate); - // LOWQ2Benchmarks("/scratch/EIC/G4out/brems_10x100_ab/brems_10x100_ab_0.edm4hep.root","LOWQ2BremsRates2.root",detector,eventRateBrems); + // LOWQ2Benchmarks("/scratch/EIC/ReconOut/tempEventsQR2.root","plots/LOWQ2QR2.root",detector,eventRateQR); + // LOWQ2Benchmarks("/scratch/EIC/ReconOut/qr_18x275_ab/qr_18x275_ab0_recon.edm4hep.root","plots/LOWQ2QROld.root",detector,eventRateQR); + // LOWQ2Benchmarks("/scratch/EIC/G4out/qr_18x275_new.edm4hep*.root","plots/LOWQ2QRRates.root",detector,eventRateQR); + // LOWQ2Benchmarks("/scratch/EIC/G4out/brems_18x275_new.edm4hep*.root","plots/LOWQ2BremsRates.root",detector,bunchRate); + // LOWQ2Benchmarks("/scratch/EIC/G4out/brems_10x100_ab/brems_10x100_ab_0.edm4hep.root","plots/LOWQ2BremsRates2.root",detector,eventRateBrems); - LOWQ2Benchmarks("/scratch/EIC/ReconOut/QR_new.root","LOWQ2QRRecon2.root",detector,eventRateQR); - LOWQ2Benchmarks("/scratch/EIC/ReconOut/Brems_new.root","LOWQ2BremsRecon2.root",detector,eventRateBrems); + LOWQ2Benchmarks("/scratch/EIC/ReconOut/QR_new.root","plots/LOWQ2QRRecon2.root",detector,eventRateQR); + LOWQ2Benchmarks("/scratch/EIC/ReconOut/Brems_new.root","plots/LOWQ2BremsRecon2.root",detector,eventRateBrems); - // LOWQ2Benchmarks("/scratch/EIC/ReconOut/Brems_new.root","LOWQ2BremsHits.root",detector,eventRateBrems); + // LOWQ2Benchmarks("/scratch/EIC/ReconOut/Brems_new.root","plots/LOWQ2BremsHits.root",detector,eventRateBrems); } \ No newline at end of file From 48d7d1af61c39fab2205ca568679392d8c0b1aa1 Mon Sep 17 00:00:00 2001 From: simonge Date: Wed, 31 Jan 2024 13:50:51 +0000 Subject: [PATCH 16/53] Added calibrations and fieldmaps to .gitignore --- benchmarks/LOWQ2/analysis/.gitignore | 4 +++- benchmarks/LOWQ2/analysis/PostprocessLOWQ2.C | 4 ++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/benchmarks/LOWQ2/analysis/.gitignore b/benchmarks/LOWQ2/analysis/.gitignore index 56fbe15d..a8ce0618 100644 --- a/benchmarks/LOWQ2/analysis/.gitignore +++ b/benchmarks/LOWQ2/analysis/.gitignore @@ -1 +1,3 @@ -plots* \ No newline at end of file +plots* +calibrations* +fieldmaps* diff --git a/benchmarks/LOWQ2/analysis/PostprocessLOWQ2.C b/benchmarks/LOWQ2/analysis/PostprocessLOWQ2.C index ddc1bdc6..cad8d169 100644 --- a/benchmarks/LOWQ2/analysis/PostprocessLOWQ2.C +++ b/benchmarks/LOWQ2/analysis/PostprocessLOWQ2.C @@ -257,6 +257,6 @@ void Postprocess(TString inName="LOWQ2QRRates3.root", TString outName="LOWQ2Plot //---------------------------------------------------------------------- void PostprocessLOWQ2() { - Postprocess("LOWQ2QRRates3.root", "LOWQ2QRPlots.root", "Quasi-Real"); - Postprocess("LOWQ2BremsRates3.root", "LOWQ2BremsPlots.root", "Bremsstrahlung"); + Postprocess("LOWQ2QRRates3.root", "plots/LOWQ2QRPlots.root", "Quasi-Real"); + Postprocess("LOWQ2BremsRates3.root", "plots/LOWQ2BremsPlots.root", "Bremsstrahlung"); } \ No newline at end of file From c8f49b843381449063ca8951cb177d923827f312 Mon Sep 17 00:00:00 2001 From: simonge Date: Wed, 31 Jan 2024 20:35:36 +0000 Subject: [PATCH 17/53] Removed excess text from histogram titles and cleaned up integrated acceptance plot --- benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.h | 43 ++-- benchmarks/LOWQ2/analysis/LOWQ2acceptance.h | 74 ++++--- benchmarks/LOWQ2/analysis/LOWQ2hits.h | 22 +- .../LOWQ2/analysis/LOWQ2reconstruction.h | 10 +- benchmarks/LOWQ2/analysis/PostprocessLOWQ2.C | 202 ++++++++++++++---- 5 files changed, 238 insertions(+), 113 deletions(-) diff --git a/benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.h b/benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.h index 3838a542..2adaef78 100755 --- a/benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.h +++ b/benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.h @@ -100,18 +100,13 @@ void writePlots( TString outName ){ TDirectory* dir = benchmarkDir; for (size_t i = 0; i < parts.size(); ++i) { - if (i == parts.size() - 1) { - // This is the last part, write the histogram - dir->cd(); + // This is not the last part, create or get the directory + if (!dir->GetDirectory(parts[i].c_str())) { + dir = dir->mkdir(parts[i].c_str()); } else { - // This is not the last part, create or get the directory - if (!dir->GetDirectory(parts[i].c_str())) { - dir = dir->mkdir(parts[i].c_str()); - } else { - dir = dir->GetDirectory(parts[i].c_str()); - } - dir->cd(); + dir = dir->GetDirectory(parts[i].c_str()); } + dir->cd(); } TDirectory* currentDir = gDirectory; @@ -119,25 +114,25 @@ void writePlots( TString outName ){ for(auto &[key,hist]: get<0>(hists)){ //Split histogram name into parts - std::stringstream ss(key.Data()); - std::string part; - std::vector parts; - while (std::getline(ss, part, '/')) { - parts.push_back(part); + std::stringstream ss2(key.Data()); + std::string part2; + std::vector parts2; + while (std::getline(ss2, part2, '/')) { + parts2.push_back(part2); } TDirectory* dir = currentDir; - for (size_t i = 0; i < parts.size(); ++i) { - if (i == parts.size() - 1) { + for (size_t i = 0; i < parts2.size(); ++i) { + if (i == parts2.size() - 1) { // This is the last part, write the histogram hist->SetMinimum(0); - hist->Write(parts[i].c_str()); + hist->Write(parts2[i].c_str()); } else { // This is not the last part, create or get the directory - if (!dir->GetDirectory(parts[i].c_str())) { - dir = dir->mkdir(parts[i].c_str()); + if (!dir->GetDirectory(parts2[i].c_str())) { + dir = dir->mkdir(parts2[i].c_str()); } else { - dir = dir->GetDirectory(parts[i].c_str()); + dir = dir->GetDirectory(parts2[i].c_str()); } dir->cd(); } @@ -281,15 +276,15 @@ void LOWQ2Benchmarks( string inName = "/scratch/EIC/G4out/qr_18x275_new.edm4hep* } if((Any(colNames==readoutName) || Any(colNames=="InclusiveKinematicsElectron")) && Any(colNames=="MCParticles")){ - histMap["AcceptanceDistributions"] = createAcceptancePlots(node); + histMap["Acceptance"] = createAcceptancePlots(node); } if(Any(colNames=="TaggerTrackerClusterPositions")){ - histMap["ClusterDistributions"] = createClusterPlots(node); + histMap["Clusters"] = createClusterPlots(node); } if(Any(colNames=="LowQ2TrackParameters") && Any(colNames=="MCParticles")){ - histMap["ReconstructedDistributions"] = createReconstructionPlots(node); + histMap["Reconstruction"] = createReconstructionPlots(node); } writePlots( outName ); diff --git a/benchmarks/LOWQ2/analysis/LOWQ2acceptance.h b/benchmarks/LOWQ2/analysis/LOWQ2acceptance.h index c0063709..d7d1037b 100644 --- a/benchmarks/LOWQ2/analysis/LOWQ2acceptance.h +++ b/benchmarks/LOWQ2/analysis/LOWQ2acceptance.h @@ -52,47 +52,38 @@ std::tuple,std::map,std::map< .Define("W2","(pBeam+eBeam-primMom).M2()"); RVecS colNames = d1.GetColumnNames(); - std::map filters; - filters["All"] = "true"; - filters["ThetaCut"] = "primTheta<11"; + std::vector> filters; + filters.push_back(std::make_pair("Generated",d1)); + filters.push_back(std::make_pair("Theta<10mrad",d1.Filter("primTheta<10"))); if(Any(colNames=="TaggerTrackerHits")){ - filters["Any"] = "TaggerTrackerHits.size() > 0"; - filters["Mod1"] = "moduleID[moduleID==1].size()>0"; - filters["Mod2"] = "moduleID[moduleID==2].size()>0"; + filters.push_back(std::make_pair("Module1-Hit",d1.Filter("moduleID[moduleID==1].size()>0"))); + filters.push_back(std::make_pair("Module2-Hit",d1.Filter("moduleID[moduleID==2].size()>0"))); + filters.push_back(std::make_pair("Any-Hit",d1.Filter("TaggerTrackerHits.size() > 0"))); } if(Any(colNames=="LowQ2Tracks")){ - filters["Reconstructed"] = "LowQ2Tracks.size()>0"; + filters.push_back(std::make_pair("Reconstructed-Track",d1.Filter("LowQ2Tracks.size()>0"))); } - //Store the counts for each filter - RVecI filterCounts; - int rawCount = 0; - TString filtersName = ""; - for (auto& filter : filters) + for (auto& [rawString,filterNode] : filters) { - RNode filterNode = d1.Filter(filter.second); - TString rawString = filter.first; - filtersName += rawString + "_"; - - //Histogram names - TString energyHistName = "h" + rawString + "Energy"; - TString thetaHistName = "h" + rawString + "Theta"; - TString etaHistName = "h" + rawString + "Eta"; - TString logQ2HistName = "h" + rawString + "logQ2"; - TString logxHistName = "h" + rawString + "logx"; - TString Q2HistName = "h" + rawString + "Q2"; - TString xHistName = "h" + rawString + "x"; - TString W2HistName = "h" + rawString + "W2"; - TString Q2xHistName = "h" + rawString + "Q2x"; - TString logQ2logxHistName = "h" + rawString + "logQ2logx"; - TString WlogQ2HistName = "h" + rawString + "WlogQ2"; - TString WlogxHistName = "h" + rawString + "Wlogx"; - TString primElogQ2HistName = "h" + rawString + "primElogQ2"; - TString primEthetaHistName = "h" + rawString + "primEtheta"; + TString energyHistName = "hEnergy"; + TString thetaHistName = "hTheta"; + TString etaHistName = "hEta"; + TString logQ2HistName = "hlogQ2"; + TString logxHistName = "hlogx"; + TString Q2HistName = "hQ2"; + TString xHistName = "hx"; + TString W2HistName = "hW2"; + TString Q2xHistName = "hQ2x"; + TString logQ2logxHistName = "hlogQ2logx"; + TString WlogQ2HistName = "hWlogQ2"; + TString WlogxHistName = "hWlogx"; + TString primElogQ2HistName = "hprimElogQ2"; + TString primEthetaHistName = "hprimEtheta"; //Histogram Ranges double energyMin = 0; @@ -148,10 +139,25 @@ std::tuple,std::map,std::map< hHists2D[rawString+"/"+primElogQ2HistName] = filterNode.Histo2D({primElogQ2HistName, primElogQ2HistName + ";E [GeV];log(Q2) [GeV]", nBins2D, energyMin, energyMax, nBins2D, logQ2Min, logQ2Max}, "primE", "logQ2"); hHists2D[rawString+"/"+primEthetaHistName] = filterNode.Histo2D({primEthetaHistName, primEthetaHistName + ";E [GeV];Theta [mrad]", nBins2D, energyMin, energyMax, nBins2D, thetaMin, thetaMax}, "primE", "primTheta"); + } + + //---------------------------------------------------------------------------------------------------- + // Creates integrated acceptance plots for various filters + //---------------------------------------------------------------------------------------------------- + + //Store the counts for each filter + RVecI filterCounts; + int rawCount = 0; + TString filtersName = ""; + //Calculate the counts for each filter + for (auto& [rawString,filterNode] : filters) + { + filtersName += rawString + "_"; int counts = *filterNode.Count(); + std::cout << rawString << " " << counts << std::endl; filterCounts.push_back(counts); - if (filter.first == "All") { + if (rawString == "Generated") { rawCount = counts; } } @@ -172,9 +178,9 @@ std::tuple,std::map,std::map< // Use the new column in the histograms hHists1D["TotalCounts"] = dfWithCounts.Histo1D({"hTotalCounts", "Total Counts;"+filtersName+";Count", Nfilters, 0, static_cast(Nfilters)}, "entry","count"); - hHists1D["Fractions"] = dfWithCounts.Histo1D({"hFractions", "Fractions;"+filtersName+";Fraction", Nfilters, 0, static_cast(Nfilters)}, "entry","fraction"); + hHists1D["IntegratedAcceptance"] = dfWithCounts.Histo1D({"hFractions", "Fractions;"+filtersName+";Fraction", Nfilters, 0, static_cast(Nfilters)}, "entry","fraction"); hHists1D["TotalCounts"]->Sumw2(false); - hHists1D["Fractions"]->Sumw2(false); + hHists1D["IntegratedAcceptance"]->Sumw2(false); return {hHists1D,hHists2D,hHists3D}; } \ No newline at end of file diff --git a/benchmarks/LOWQ2/analysis/LOWQ2hits.h b/benchmarks/LOWQ2/analysis/LOWQ2hits.h index 9bb03f6a..39d20ce2 100644 --- a/benchmarks/LOWQ2/analysis/LOWQ2hits.h +++ b/benchmarks/LOWQ2/analysis/LOWQ2hits.h @@ -65,25 +65,25 @@ std::tuple,std::map,std::map< TString layerNum = std::to_string(j); TString layerTag = "layer"+layerNum; - TString xName = "xID"+moduleTag+layerTag; + TString xName = "xPixel"; TString xHistName = "h"+xName; - TString yName = "yID"+moduleTag+layerTag; + TString yName = "yPixel"; TString yHistName = "h"+yName; - TString xChipName = "xChipID"+moduleTag+layerTag; - TString yChipName = "yChipID"+moduleTag+layerTag; + TString xChipName = "xChip"; + TString yChipName = "yChip"; - TString yHalfChipName = "yHalfChipID"+moduleTag+layerTag; + TString yHalfChipName = "yHalfChip"; TString xyHistName = "h"+xName+yName; - TString xColumnName = "xColumnID"+moduleTag+layerTag; + TString xColumnName = "xColumn"; - TString boardName = "boardID"+moduleTag+layerTag; + TString boardName = "board"; std::vector layerSizeInput = {xName.Data()}; - TString layerSizeName = "HitsPerEvent"+moduleTag+layerTag; + TString layerSizeName = "HitsPerEvent"; TString sizeHistName = "h"+layerSizeName; auto d4 = d3.Define("LayerFilter",[j](RVecI layID){return layID==j;},{"layerID"}) @@ -107,15 +107,15 @@ std::tuple,std::map,std::map< hHists1D[moduleTag+"/"+layerTag+"/"+sizeHistName] = d4.Histo1D({sizeHistName, sizeHistName+";hits per event", 100, 0, 100 }, layerSizeName ); // Plot number of hits per boardID for each layer - TString boardIDHistName = "hBoardID" +moduleTag + layerTag; + TString boardIDHistName = "hBoardID"; hHists1D[moduleTag+"/"+layerTag+"/"+boardIDHistName] = d4.Histo1D({boardIDHistName, boardIDHistName+";board ID;Rate [Hz]", 3, 0, 3}, boardName, "eventWeight"); // Plot number of hits per chipID for each layer - TString chipIDHistName = "hChipID" +moduleTag + layerTag; + TString chipIDHistName = "hChipID"; hHists2D[moduleTag+"/"+layerTag+"/"+chipIDHistName] = d4.Histo2D({chipIDHistName, chipIDHistName+";x Chip;y Chip;Rate [Hz]", 6, 0, 6, 6, 0, 6}, xChipName, yChipName, "eventWeight"); // Plot number of hits per chipID for each layer - TString xColumnIDHistName = "hxColumnID" +moduleTag + layerTag; + TString xColumnIDHistName = "hxColumnID"; hHists2D[moduleTag+"/"+layerTag+"/"+xColumnIDHistName] = d4.Histo2D({xColumnIDHistName, xColumnIDHistName+";x Column;y Half Chip;Rate [Hz]", 3*xChip, 0, 3.0*xChip, 12, 0, 12}, xColumnName, yHalfChipName, "eventWeight"); } diff --git a/benchmarks/LOWQ2/analysis/LOWQ2reconstruction.h b/benchmarks/LOWQ2/analysis/LOWQ2reconstruction.h index 6112231b..2db1d505 100644 --- a/benchmarks/LOWQ2/analysis/LOWQ2reconstruction.h +++ b/benchmarks/LOWQ2/analysis/LOWQ2reconstruction.h @@ -91,8 +91,8 @@ std::tuple,std::map,std::map< double w2Max = 0.5; // GeV^2 double thetaResMin = -5; // mrad double thetaResMax = 5; // mrad - double phiResMin = -45; // deg - double phiResMax = 45; // deg + double phiResMin = -100; // deg + double phiResMax = 100; // deg double eResMin = -0.1; // GeV double eResMax = 0.1; // GeV double q2ResMin = -0.01; // GeV^2 @@ -139,12 +139,10 @@ std::tuple,std::map,std::map< auto d2 = d1.Filter("reconTheta_mrad<2"); - hHists2D["thetacut/phiResVsE"] = d2.Histo2D({"phiResVsE", "phiResVsE;#phi_{recon}-#phi_{prim} [rad];E_{prim} [GeV]", bins2D, phiResMin, phiResMax, bins2D, eMin, eMax}, "phiRes", "primE"); + hHists1D["thetacut/phiRes"] = d2.Histo1D({"phiRes","phiRes;#phi_{recon}-#phi_{prim} [rad]",bins1D,phiResMin,phiResMax},"phiRes"); + hHists2D["thetacut/phiResVsE"] = d2.Histo2D({"phiResVsE", "phiResVsE;#phi_{recon}-#phi_{prim} [rad];E_{prim} [GeV]", bins2D, phiResMin, phiResMax, bins2D, eMin, eMax}, "phiRes", "primE"); hHists2D["thetacut/reconPhiVsPrimPhi"] = d2.Histo2D({"reconPhiVsPrimPhi","reconPhiVsPrimPhi;#phi_{prim} [deg];#phi_{recon} [deg]",bins2D,phiMin,phiMax,bins2D,phiMin,phiMax},"primPhi_deg","reconPhi_deg"); - //Plot showing where the phi resolution is less than 30 degrees in terms of E and theta - //hHists2D["phiResVsETheta"] = d1.Histo2D({"phiResVsETheta","phiResVsETheta;E_{prim} [GeV];#theta_{prim} [mrad]",100,eMin,eMax,100,thetaMin,thetaMax},"primE","primTheta",[](double phiRes){return fabs(phiRes)<0.5;},{"phiRes-primPhi"}); - return {hHists1D, hHists2D, hHists3D}; } diff --git a/benchmarks/LOWQ2/analysis/PostprocessLOWQ2.C b/benchmarks/LOWQ2/analysis/PostprocessLOWQ2.C index cad8d169..f2224c91 100644 --- a/benchmarks/LOWQ2/analysis/PostprocessLOWQ2.C +++ b/benchmarks/LOWQ2/analysis/PostprocessLOWQ2.C @@ -5,6 +5,9 @@ #include #include +//---------------------------------------------------------------------- +// Set global plot format variables +//---------------------------------------------------------------------- void SetStyle() { // Set global plot format variables gStyle->SetOptStat(0); @@ -15,9 +18,10 @@ void SetStyle() { gStyle->SetTitleOffset(4.0, "Z"); // gStyle->SetTitleOffset(1.0, "Y"); - } +//---------------------------------------------------------------------- +// Create acceptance plots //---------------------------------------------------------------------- TH1* AcceptancePlot(TDirectory* inputDir, TString ReconHistName, TString AllHistName, TString Tag="Quasi-Real") { @@ -38,9 +42,12 @@ TH1* AcceptancePlot(TDirectory* inputDir, TString ReconHistName, TString AllHist } +//---------------------------------------------------------------------- +// Create rate plots +//---------------------------------------------------------------------- TH2* RatePlot(TDirectory* inputDir, int Module, int Layer, TString Tag="Quasi-Real") { - TString histName = "rate/module"+std::to_string(Module)+"/layer"+std::to_string(Layer)+"/hxIDmodule"+std::to_string(Module)+"layer"+std::to_string(Layer)+"yIDmodule"+std::to_string(Module)+"layer"+std::to_string(Layer); + TString histName = "AllHits/module"+std::to_string(Module)+"/layer"+std::to_string(Layer)+"/hxPixelyPixel"; // Read in the plots from the input file TH2* RatePlot = (TH2*)inputDir->Get(histName); @@ -67,12 +74,16 @@ TH2* RatePlot(TDirectory* inputDir, int Module, int Layer, TString Tag="Quasi-Re } - +//---------------------------------------------------------------------- +// Create formatted acceptance plots on a canvas +//---------------------------------------------------------------------- void FormatAcceptancePlots(TDirectory* inputDir, TFile* outputFile, TString Tag="Quasi-Real") { - //E-Q2 acceptance plots - TString ReconEQ2Name = "Reconstructed/hReconstructedprimElogQ2"; - TString AllEQ2Name = "All/hAllprimElogQ2"; + //---------------------------------------------------------------------- + // E-Q2 acceptance plot + //---------------------------------------------------------------------- + TString ReconEQ2Name = "Reconstructed-Track/hprimElogQ2"; + TString AllEQ2Name = "Generated/hprimElogQ2"; TH1* AcceptEQ2 = AcceptancePlot(inputDir,ReconEQ2Name,AllEQ2Name); @@ -96,9 +107,11 @@ void FormatAcceptancePlots(TDirectory* inputDir, TFile* outputFile, TString Tag= // Clean up delete canvasEQ2; - //E-Theta acceptance plots - TString ReconEThetaName = "Reconstructed/hReconstructedprimEtheta"; - TString AllEThetaName = "All/hAllprimEtheta"; + //---------------------------------------------------------------------- + // E-Theta acceptance plot + //---------------------------------------------------------------------- + TString ReconEThetaName = "Reconstructed-Track/hprimEtheta"; + TString AllEThetaName = "Generated/hprimEtheta"; TH1* AcceptETheta = AcceptancePlot(inputDir,ReconEThetaName,AllEThetaName); @@ -121,14 +134,77 @@ void FormatAcceptancePlots(TDirectory* inputDir, TFile* outputFile, TString Tag= // Clean up delete canvasETheta; + + //---------------------------------------------------------------------- + // Integrated acceptance plot + //---------------------------------------------------------------------- + TString ReconIntegratedName = "IntegratedAcceptance"; + + // Read in the plots from the input file + TH1* IntegratedAcceptancePlot = (TH1*)inputDir->Get(ReconIntegratedName); + // Check plots exist + if (!IntegratedAcceptancePlot) { + std::cout << "Error: plot "<< ReconIntegratedName <<" not found in input file" << std::endl; + return; + } + + TCanvas* canvasIntegratedAcceptance = new TCanvas("IntegratedAcceptance", "IntegratedAcceptance", 1600, 1200); + + //Get xAxis title + TString xAxisTitle = IntegratedAcceptancePlot->GetXaxis()->GetTitle(); + //Break up xAxis title into words by "_" + TObjArray* xAxisTitleWords = xAxisTitle.Tokenize("_"); + //SetBinLabel for each bin + for (int i=1; i<=xAxisTitleWords->GetEntries(); i++) { + IntegratedAcceptancePlot->GetXaxis()->SetBinLabel(i, xAxisTitleWords->At(i-1)->GetName()); + } + + // Format the plot + IntegratedAcceptancePlot->SetTitle("Integrated acceptance"); + IntegratedAcceptancePlot->GetXaxis()->SetTitle(""); + IntegratedAcceptancePlot->GetYaxis()->SetTitle("Acceptance"); + + IntegratedAcceptancePlot->SetStats(0); + IntegratedAcceptancePlot->Draw(); + + + // Get the number of bins in the histogram + int nBins = IntegratedAcceptancePlot->GetNbinsX(); + + // Create a TText object to draw the text + TText t; + t.SetTextAlign(22); // Center the text + t.SetTextSize(0.02); // Set the text size + + // Loop over the bins + for (int i = 1; i <= nBins; ++i) { + // Get the bin content + double binContent = IntegratedAcceptancePlot->GetBinContent(i); + + // Get the bin center + double binCenter = IntegratedAcceptancePlot->GetBinCenter(i); + + // Draw the bin content at the bin center + t.DrawText(binCenter, binContent+0.02, Form("%.3f", binContent)); + } + + // Update the canvas to show the changes + gPad->Update(); + + // Save the canvas to output file + outputFile->WriteTObject(canvasIntegratedAcceptance); + + // Clean up + delete canvasIntegratedAcceptance; } //---------------------------------------------------------------------- - +// Create formatted rate plots on a canvas +//---------------------------------------------------------------------- void FormatRatePlots(TDirectory* inputDir, TFile* outputFile, TString Tag="Quasi-Real") { - TString histName = "rate/module2/layer0/hxIDmodule2layer0yIDmodule2layer0"; + TString histName = "AllHits/module2/layer0/hxPixelyPixel"; TCanvas* canvas = new TCanvas("RateCanvas", "RateCanvas", 3200, 1200); canvas->Divide(2,1); @@ -155,47 +231,96 @@ void FormatRatePlots(TDirectory* inputDir, TFile* outputFile, TString Tag="Quasi } //---------------------------------------------------------------------- - +// Create formatted reconstruction plots on a canvas +//---------------------------------------------------------------------- void FormatReconstructionPlots(TDirectory* inputDir, TFile* outputFile, TString Tag="Quasi-Real") { - TString Q2HistName = "reconQ2VsPrimQ2"; - TString EHistName = "reconEVsPrimE"; + TString EHistName = "reconEVsPrimE"; + TString ThetaHistName = "reconThetaVsPrimTheta"; + TString PhiHistName = "thetacut/reconPhiVsPrimPhi"; + + TString EResName = "ERes"; + TString ThetaResName = "thetaRes"; + TString PhiResName = "thetacut/phiRes"; TCanvas* canvas = new TCanvas("ReconCanvas", "ReconCanvas", 2400, 1200); // Read in the plots from the input file - TH1* Q2plot = (TH1*)inputDir->Get(Q2HistName); - TH1* Eplot = (TH1*)inputDir->Get(EHistName); + TH1* EPlot = (TH1*)inputDir->Get(EHistName); + TH1* ThetaPlot = (TH1*)inputDir->Get(ThetaHistName); + TH1* PhiPlot = (TH1*)inputDir->Get(PhiHistName); + TH1* EResPlot = (TH1*)inputDir->Get(EResName); + TH1* ThetaResPlot = (TH1*)inputDir->Get(ThetaResName); + TH1* PhiResPlot = (TH1*)inputDir->Get(PhiResName); + // Check plots exist - if (!Q2plot || !Eplot) { - std::cout << "Error: plots "<< Q2HistName <<" and/or "<< EHistName <<" not found in input file" << std::endl; + if (!ThetaPlot || !EPlot || !PhiPlot || !ThetaResPlot || !EResPlot || !PhiResPlot) { + std::cout << "Error: plots "<< ThetaHistName <<", "<< EHistName <<", "<< PhiHistName <<", "<< ThetaResName <<", "<< EResName <<", "<< PhiResName <<" not found in input file" << std::endl; return; } // Draw the plots on the canvas - canvas->Divide(2,1); + canvas->Divide(3,2); canvas->cd(1); gPad->SetLogz(); - Q2plot->SetTitle("Reconstructed Q^{2} vs. primary Q^{2}"); - Q2plot->GetXaxis()->SetTitle("log_{10}(Q^{2}_{prim}) [GeV^{2}]"); - Q2plot->GetYaxis()->SetTitle("log_{10}(Q^{2}_{recon}) [GeV^{2}]"); - Q2plot->GetZaxis()->SetTitle("Counts"); + EPlot->SetTitle("Reconstructed electron energy vs. primary electron energy"); + EPlot->GetXaxis()->SetTitle("E_{prim} [GeV]"); + EPlot->GetYaxis()->SetTitle("E_{recon} [GeV]"); + EPlot->GetZaxis()->SetTitle("Counts"); - Q2plot->SetStats(0); - Q2plot->Draw("colz"); + EPlot->SetStats(0); + EPlot->Draw("colz"); canvas->cd(2); gPad->SetLogz(); - Eplot->SetTitle("Reconstructed electron energy vs. primary electron energy"); - Eplot->GetXaxis()->SetTitle("E_{prim} [GeV]"); - Eplot->GetYaxis()->SetTitle("E_{recon} [GeV]"); - Eplot->GetZaxis()->SetTitle("Counts"); + ThetaPlot->SetTitle("Reconstructed Theta vs. primary Theta"); + ThetaPlot->GetXaxis()->SetTitle("Theta_{prim} [mrad]"); + ThetaPlot->GetYaxis()->SetTitle("Theta_{recon} [mrad]"); + ThetaPlot->GetZaxis()->SetTitle("Counts"); + + ThetaPlot->SetStats(0); + ThetaPlot->Draw("colz"); + + canvas->cd(3); + gPad->SetLogz(); + + PhiPlot->SetTitle("Reconstructed Phi vs. primary Phi"); + PhiPlot->GetXaxis()->SetTitle("Phi_{prim} [deg]"); + PhiPlot->GetYaxis()->SetTitle("Phi_{recon} [deg]"); + PhiPlot->GetZaxis()->SetTitle("Counts"); + + PhiPlot->SetStats(0); + PhiPlot->Draw("colz"); + + canvas->cd(4); + + EResPlot->SetTitle("Reconstructed electron energy resolution"); + EResPlot->GetXaxis()->SetTitle("E_{recon} - E_{prim} / E_{prim}"); + EResPlot->GetYaxis()->SetTitle("Counts"); + + EResPlot->SetStats(0); + EResPlot->Draw(); + + canvas->cd(5); - Eplot->SetStats(0); - Eplot->Draw("colz"); + ThetaResPlot->SetTitle("Reconstructed Theta resolution"); + ThetaResPlot->GetXaxis()->SetTitle("Theta_{recon} - Theta_{prim} [mrad]"); + ThetaResPlot->GetYaxis()->SetTitle("Counts"); + + ThetaResPlot->SetStats(0); + ThetaResPlot->Draw(); + + canvas->cd(6); + + PhiResPlot->SetTitle("Reconstructed Phi resolution"); + PhiResPlot->GetXaxis()->SetTitle("Phi_{recon} - Phi_{prim} [deg]"); + PhiResPlot->GetYaxis()->SetTitle("Counts"); + + PhiResPlot->SetStats(0); + PhiResPlot->Draw(); // Save the canvas to output file outputFile->WriteTObject(canvas); @@ -206,7 +331,7 @@ void FormatReconstructionPlots(TDirectory* inputDir, TFile* outputFile, TString } //---------------------------------------------------------------------- -// This function is called by the benchmarking script +// This function is called by the benchmarking script maybe //---------------------------------------------------------------------- void Postprocess(TString inName="LOWQ2QRRates3.root", TString outName="LOWQ2Plots.root", TString Tag="Quasi-Real") { @@ -232,19 +357,19 @@ void Postprocess(TString inName="LOWQ2QRRates3.root", TString outName="LOWQ2Plot // Format the plots //Check if AcceptanceDistributions directory exists - TDirectory* dirA = dir->GetDirectory("AcceptanceDistributions"); + TDirectory* dirA = dir->GetDirectory("Acceptance"); if (dirA) { FormatAcceptancePlots(dirA, outputFile,Tag); } //Check if SimDistributions directory exists - TDirectory* dirS = dir->GetDirectory("SimDistributions"); + TDirectory* dirS = dir->GetDirectory("Rates"); if (dirS) { FormatRatePlots(dirS, outputFile,Tag); } //Check if ReconstructedDistributions directory exists - TDirectory* dirR = dir->GetDirectory("ReconstructedDistributions"); + TDirectory* dirR = dir->GetDirectory("Reconstruction"); if (dirR) { FormatReconstructionPlots(dirR, outputFile,Tag); } @@ -255,8 +380,9 @@ void Postprocess(TString inName="LOWQ2QRRates3.root", TString outName="LOWQ2Plot } //---------------------------------------------------------------------- - +// Main function to create canvases +//---------------------------------------------------------------------- void PostprocessLOWQ2() { - Postprocess("LOWQ2QRRates3.root", "plots/LOWQ2QRPlots.root", "Quasi-Real"); - Postprocess("LOWQ2BremsRates3.root", "plots/LOWQ2BremsPlots.root", "Bremsstrahlung"); + Postprocess("plots/LOWQ2QRRecon2.root", "plots/LOWQ2QR_FormattedPlots.root", "Quasi-Real"); + Postprocess("plots/LOWQ2BremsRecon2.root", "plots/LOWQ2Brems_FormattedPlots.root", "Bremsstrahlung"); } \ No newline at end of file From eacf3c85c39fcaa54f35053ea60241426525c523 Mon Sep 17 00:00:00 2001 From: simonge Date: Thu, 1 Feb 2024 14:43:32 +0000 Subject: [PATCH 18/53] Improved formatted historgams and fixed some inconsistencies --- benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.h | 18 +- benchmarks/LOWQ2/analysis/LOWQ2hits.h | 14 +- .../LOWQ2/analysis/LOWQ2reconstruction.h | 9 +- benchmarks/LOWQ2/analysis/PostprocessLOWQ2.C | 155 ++++++++++++++++-- benchmarks/LOWQ2/analysis/RunLOWQ2.C | 6 +- 5 files changed, 162 insertions(+), 40 deletions(-) diff --git a/benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.h b/benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.h index 2adaef78..c681bda2 100755 --- a/benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.h +++ b/benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.h @@ -275,17 +275,17 @@ void LOWQ2Benchmarks( string inName = "/scratch/EIC/G4out/qr_18x275_new.edm4hep* } - if((Any(colNames==readoutName) || Any(colNames=="InclusiveKinematicsElectron")) && Any(colNames=="MCParticles")){ - histMap["Acceptance"] = createAcceptancePlots(node); - } + // if((Any(colNames==readoutName) || Any(colNames=="InclusiveKinematicsElectron")) && Any(colNames=="MCParticles")){ + // histMap["Acceptance"] = createAcceptancePlots(node); + // } - if(Any(colNames=="TaggerTrackerClusterPositions")){ - histMap["Clusters"] = createClusterPlots(node); - } + // if(Any(colNames=="TaggerTrackerClusterPositions")){ + // histMap["Clusters"] = createClusterPlots(node); + // } - if(Any(colNames=="LowQ2TrackParameters") && Any(colNames=="MCParticles")){ - histMap["Reconstruction"] = createReconstructionPlots(node); - } + // if(Any(colNames=="LowQ2TrackParameters") && Any(colNames=="MCParticles")){ + // histMap["Reconstruction"] = createReconstructionPlots(node); + // } writePlots( outName ); diff --git a/benchmarks/LOWQ2/analysis/LOWQ2hits.h b/benchmarks/LOWQ2/analysis/LOWQ2hits.h index 39d20ce2..26b3b027 100644 --- a/benchmarks/LOWQ2/analysis/LOWQ2hits.h +++ b/benchmarks/LOWQ2/analysis/LOWQ2hits.h @@ -15,11 +15,13 @@ std::tuple,std::map,std::map< int xChip = 448; int yChip = 512; - std::map xPixelMin = {{1,-xChip*3},{2,-xChip*3}}; - std::map xPixelMax = {{1, xChip*3},{2, xChip*3}}; + int xChips = 6; + int yChips = 6; + std::map xPixelMin = {{1,-xChip*xChips/2},{2,-xChip*xChips/2}}; + std::map xPixelMax = {{1, xChip*xChips/2},{2, xChip*xChips/2}}; - std::map yPixelMin = {{1,-yChip*3},{2,-yChip*3}}; - std::map yPixelMax = {{1, yChip*3},{2, yChip*3}}; + std::map yPixelMin = {{1,-yChip*yChips/2},{2,-yChip*yChips/2}}; + std::map yPixelMax = {{1, yChip*yChips/2},{2, yChip*yChips/2}}; std::map hHists1D; @@ -35,7 +37,7 @@ std::tuple,std::map,std::map< // Plot number of hits per moduleID - hHists1D["rate/hmoduleID"] = d2.Histo1D({"hmoduleID", "hmoduleID;Module ID;Rate [Hz]", 2, 1, 3 }, "moduleID", "eventWeight"); + hHists1D["hmoduleID"] = d2.Histo1D({"hmoduleID", "hmoduleID;Module ID;Rate [Hz]", 2, 1, 3 }, "moduleID", "eventWeight"); // Module Loop for(int i=1; i<=2; i++){ @@ -57,7 +59,7 @@ std::tuple,std::map,std::map< .Define(layerName,"layerID[ModuleFilter]"); - hHists1D["rate/"+moduleTag+"/"+layerHistName] = d3.Histo1D({layerHistName, layerHistName+";Layer ID;Rate [Hz]", 4, 0, 4 }, layerName, "eventWeight"); + hHists1D[moduleTag+"/"+layerHistName] = d3.Histo1D({layerHistName, layerHistName+";Layer ID;Rate [Hz]", 4, 0, 4 }, layerName, "eventWeight"); // Layer Loop for(int j=0; j<=3; j++){ diff --git a/benchmarks/LOWQ2/analysis/LOWQ2reconstruction.h b/benchmarks/LOWQ2/analysis/LOWQ2reconstruction.h index 2db1d505..444b6546 100644 --- a/benchmarks/LOWQ2/analysis/LOWQ2reconstruction.h +++ b/benchmarks/LOWQ2/analysis/LOWQ2reconstruction.h @@ -18,7 +18,8 @@ std::tuple,std::map,std::map< std::map hHists2D; std::map hHists3D; - d1 = d1.Define("eBeam", [eBeamID] + d1 = d1.Filter("LowQ2TrackParameters.size()>0") + .Define("eBeam", [eBeamID] (std::vector h) {return ROOT::Math::PxPyPzMVector (h[eBeamID].momentum.x,h[eBeamID].momentum.y,h[eBeamID].momentum.z,h[eBeamID].mass);}, {"MCParticles"}) @@ -91,8 +92,8 @@ std::tuple,std::map,std::map< double w2Max = 0.5; // GeV^2 double thetaResMin = -5; // mrad double thetaResMax = 5; // mrad - double phiResMin = -100; // deg - double phiResMax = 100; // deg + double phiResMin = -180; // deg + double phiResMax = 180; // deg double eResMin = -0.1; // GeV double eResMax = 0.1; // GeV double q2ResMin = -0.01; // GeV^2 @@ -137,7 +138,7 @@ std::tuple,std::map,std::map< hHists3D["phiResVsETheta"] = d1.Histo3D({"phiResVsETheta","phiResVsETheta;E_{prim} [GeV];#theta_{prim} [mrad];#phi_{recon}-#phi_{prim} [rad]",bins3D,eMin,eMax,bins3D,thetaMin,thetaMax,bins3DRes,phiResMin,phiResMax},"primE","primTheta_mrad","phiRes"); hHists3D["EResVsETheta"] = d1.Histo3D({"EResVsETheta","EResVsETheta;E_{prim} [GeV];#theta_{prim} [mrad];E_{recon}-E_{prim} [GeV]",bins3D,eMin,eMax,bins3D,thetaMin,thetaMax,bins3DRes,eResMin,eResMax},"primE","primTheta_mrad","ERes"); - auto d2 = d1.Filter("reconTheta_mrad<2"); + auto d2 = d1.Filter("reconTheta_mrad>1"); hHists1D["thetacut/phiRes"] = d2.Histo1D({"phiRes","phiRes;#phi_{recon}-#phi_{prim} [rad]",bins1D,phiResMin,phiResMax},"phiRes"); hHists2D["thetacut/phiResVsE"] = d2.Histo2D({"phiResVsE", "phiResVsE;#phi_{recon}-#phi_{prim} [rad];E_{prim} [GeV]", bins2D, phiResMin, phiResMax, bins2D, eMin, eMax}, "phiRes", "primE"); diff --git a/benchmarks/LOWQ2/analysis/PostprocessLOWQ2.C b/benchmarks/LOWQ2/analysis/PostprocessLOWQ2.C index f2224c91..a099bd38 100644 --- a/benchmarks/LOWQ2/analysis/PostprocessLOWQ2.C +++ b/benchmarks/LOWQ2/analysis/PostprocessLOWQ2.C @@ -4,6 +4,7 @@ #include #include #include +#include //---------------------------------------------------------------------- // Set global plot format variables @@ -18,6 +19,13 @@ void SetStyle() { gStyle->SetTitleOffset(4.0, "Z"); // gStyle->SetTitleOffset(1.0, "Y"); + + // // Set global plot format variables + // int font = 42; // + // gStyle->SetTextFont(font); // Set the font for text + // gStyle->SetLabelFont(font, "XYZ"); // Set the font for axis labels + // gStyle->SetTitleFont(font, "XYZ"); // Set the font for titles + } //---------------------------------------------------------------------- @@ -68,6 +76,7 @@ TH2* RatePlot(TDirectory* inputDir, int Module, int Layer, TString Tag="Quasi-Re RatePlot->GetYaxis()->SetTitle("y pixel"); RatePlot->GetZaxis()->SetTitle("Average Rate [Hz/55 #mum pixel]"); + RatePlot->SetStats(0); return RatePlot; @@ -123,7 +132,7 @@ void FormatAcceptancePlots(TDirectory* inputDir, TFile* outputFile, TString Tag= title = "Acceptance as a function of scattered electron energy and theta"; AcceptETheta->SetTitle(title); AcceptETheta->GetXaxis()->SetTitle("E_{e'} [GeV]"); - AcceptETheta->GetYaxis()->SetTitle("Theta_{e'} [mrad]"); + AcceptETheta->GetYaxis()->SetTitle("#theta_{e'} [mrad]"); AcceptETheta->GetZaxis()->SetTitle("Acceptance"); AcceptETheta->SetStats(0); @@ -185,7 +194,7 @@ void FormatAcceptancePlots(TDirectory* inputDir, TFile* outputFile, TString Tag= double binCenter = IntegratedAcceptancePlot->GetBinCenter(i); // Draw the bin content at the bin center - t.DrawText(binCenter, binContent+0.02, Form("%.3f", binContent)); + t.DrawText(binCenter, binContent+0.02, Form("%.1f %s", binContent*100,"%")); } // Update the canvas to show the changes @@ -228,6 +237,75 @@ void FormatRatePlots(TDirectory* inputDir, TFile* outputFile, TString Tag="Quasi // Clean up delete canvas; + TCanvas* canvas2 = new TCanvas("RateCanvasOverlay", "RateCanvasOverlay", 3200, 1200); + canvas2->Divide(2,1); + + // Draw the plots on the canvas + canvas2->cd(1); + gPad->SetLogz(); + RatePlot1_0->Draw("colz"); + + //Add a grid ontop of the histogram showing 448x512 pixel chip boundaries + double xMin = RatePlot1_0->GetXaxis()->GetXmin(); + double xMax = RatePlot1_0->GetXaxis()->GetXmax(); + double yMin = RatePlot1_0->GetYaxis()->GetXmin()+512/4; + double yMax = RatePlot1_0->GetYaxis()->GetXmax()+512/4; + std::vector verticalLineWidths = {3,1,3,1,3,1,3}; + std::vector horizontalLineWidths = {3,1,2,1,2,1,2,1,2,1,2,1,3}; + std::vector horizontalLineStyles = {1,7,1,7,1,7,1,7,1,7,1,7,1}; + std::vector verticalLineColors = {kRed,kBlue,kRed,kBlue,kRed,kBlue,kRed}; + std::vector horizontalLineColors = {kRed,kBlue,kBlue,kBlue,kBlue,kBlue,kBlue,kBlue,kBlue,kBlue,kBlue,kBlue,kRed}; + //Vertical lines + for (int i=0; i<=6; i++) { + TLine* line = new TLine(xMin+i*448, yMin, xMin+i*448, yMax); + line->SetLineColor(verticalLineColors[i]); + line->SetLineWidth(verticalLineWidths[i]); + line->Draw(); + } + //Horizontal lines + for (int i=0; i<=12; i++) { + TLine* line = new TLine(xMin, yMin+i*256, xMax, yMin+i*256); + line->SetLineColor(horizontalLineColors[i]); + line->SetLineWidth(horizontalLineWidths[i]); + line->SetLineStyle(horizontalLineStyles[i]); + line->Draw(); + } + + gPad->Update(); + + canvas2->cd(2); + gPad->SetLogz(); + RatePlot2_0->Draw("colz"); + + //Add a grid on top of the histogram showing 448x512 pixel chip boundaries + xMin = RatePlot2_0->GetXaxis()->GetXmin(); + xMax = RatePlot2_0->GetXaxis()->GetXmax(); + yMin = RatePlot2_0->GetYaxis()->GetXmin()+512/4; + yMax = RatePlot2_0->GetYaxis()->GetXmax()+512/4; + //Vertical lines + for (int i = 0; i <= 6; i++) { + TLine* line = new TLine(xMin+i*448, yMin, xMin+i*448, yMax); + line->SetLineColor(verticalLineColors[i]); + line->SetLineWidth(verticalLineWidths[i]); + line->Draw(); + } + //Horizontal lines + for (int i = 0; i <= 12; i++) { + TLine* line = new TLine(xMin, yMin+i*256, xMax, yMin+i*256); + line->SetLineColor(horizontalLineColors[i]); + line->SetLineWidth(horizontalLineWidths[i]); + line->SetLineStyle(horizontalLineStyles[i]); + line->Draw(); + } + + gPad->Update(); + + // Save the canvas to output file + outputFile->WriteTObject(canvas); + + // Clean up + delete canvas; + } //---------------------------------------------------------------------- @@ -246,9 +324,9 @@ void FormatReconstructionPlots(TDirectory* inputDir, TFile* outputFile, TString TCanvas* canvas = new TCanvas("ReconCanvas", "ReconCanvas", 2400, 1200); // Read in the plots from the input file - TH1* EPlot = (TH1*)inputDir->Get(EHistName); - TH1* ThetaPlot = (TH1*)inputDir->Get(ThetaHistName); - TH1* PhiPlot = (TH1*)inputDir->Get(PhiHistName); + TH2* EPlot = (TH2*)inputDir->Get(EHistName); + TH2* ThetaPlot = (TH2*)inputDir->Get(ThetaHistName); + TH2* PhiPlot = (TH2*)inputDir->Get(PhiHistName); TH1* EResPlot = (TH1*)inputDir->Get(EResName); TH1* ThetaResPlot = (TH1*)inputDir->Get(ThetaResName); @@ -265,10 +343,11 @@ void FormatReconstructionPlots(TDirectory* inputDir, TFile* outputFile, TString canvas->cd(1); gPad->SetLogz(); - EPlot->SetTitle("Reconstructed electron energy vs. primary electron energy"); + EPlot->SetTitle("Reconstructed electron energy vs. Primary electron energy"); EPlot->GetXaxis()->SetTitle("E_{prim} [GeV]"); EPlot->GetYaxis()->SetTitle("E_{recon} [GeV]"); EPlot->GetZaxis()->SetTitle("Counts"); + EPlot->Rebin2D(2,2); EPlot->SetStats(0); EPlot->Draw("colz"); @@ -276,10 +355,11 @@ void FormatReconstructionPlots(TDirectory* inputDir, TFile* outputFile, TString canvas->cd(2); gPad->SetLogz(); - ThetaPlot->SetTitle("Reconstructed Theta vs. primary Theta"); - ThetaPlot->GetXaxis()->SetTitle("Theta_{prim} [mrad]"); - ThetaPlot->GetYaxis()->SetTitle("Theta_{recon} [mrad]"); + ThetaPlot->SetTitle("Reconstructed #theta vs. Primary #theta"); + ThetaPlot->GetXaxis()->SetTitle("#theta_{prim} [mrad]"); + ThetaPlot->GetYaxis()->SetTitle("#theta_{recon} [mrad]"); ThetaPlot->GetZaxis()->SetTitle("Counts"); + ThetaPlot->Rebin2D(2,2); ThetaPlot->SetStats(0); ThetaPlot->Draw("colz"); @@ -287,41 +367,80 @@ void FormatReconstructionPlots(TDirectory* inputDir, TFile* outputFile, TString canvas->cd(3); gPad->SetLogz(); - PhiPlot->SetTitle("Reconstructed Phi vs. primary Phi"); - PhiPlot->GetXaxis()->SetTitle("Phi_{prim} [deg]"); - PhiPlot->GetYaxis()->SetTitle("Phi_{recon} [deg]"); + PhiPlot->SetTitle("Reconstructed #varphi vs. Primary #varphi (#theta>1mrad)"); + PhiPlot->GetXaxis()->SetTitle("#phi_{prim} [deg]"); + PhiPlot->GetYaxis()->SetTitle("#phi_{recon} [deg]"); PhiPlot->GetZaxis()->SetTitle("Counts"); + PhiPlot->Rebin2D(2,2); PhiPlot->SetStats(0); PhiPlot->Draw("colz"); canvas->cd(4); - EResPlot->SetTitle("Reconstructed electron energy resolution"); - EResPlot->GetXaxis()->SetTitle("E_{recon} - E_{prim} / E_{prim}"); + EResPlot->SetTitle("Electron energy resolution"); + EResPlot->GetXaxis()->SetTitle("(E_{recon} - E_{prim}) / E_{prim}"); EResPlot->GetYaxis()->SetTitle("Counts"); EResPlot->SetStats(0); EResPlot->Draw(); + // Write fitted Gaussian standard deviation in pad + //Fit Gaussian to histogram maximum bin +- 10 bins + int maxBin = EResPlot->GetMaximumBin(); + int fitMin = maxBin-10; + int fitMax = maxBin+10; + EResPlot->Fit("gaus", "Q", "", EResPlot->GetBinCenter(fitMin), EResPlot->GetBinCenter(fitMax)); + double EResStdDev = EResPlot->GetFunction("gaus")->GetParameter(2); + TLatex* latex = new TLatex(); + latex->SetTextSize(0.03); + latex->DrawLatexNDC(0.2, 0.8, Form("#sigma_{E} = %.2f %s", EResStdDev*100, "%")); + + // Remove fitted Gaussian from histogram + EResPlot->GetListOfFunctions()->Remove(EResPlot->GetFunction("gaus")); + canvas->cd(5); - ThetaResPlot->SetTitle("Reconstructed Theta resolution"); - ThetaResPlot->GetXaxis()->SetTitle("Theta_{recon} - Theta_{prim} [mrad]"); + ThetaResPlot->SetTitle("Theta resolution"); + ThetaResPlot->GetXaxis()->SetTitle("#theta_{recon} - #theta_{prim} [mrad]"); ThetaResPlot->GetYaxis()->SetTitle("Counts"); ThetaResPlot->SetStats(0); ThetaResPlot->Draw(); + // Write fitted Gaussian standard deviation in pad + //Fit Gaussian to histogram maximum bin +- 10 bins + maxBin = ThetaResPlot->GetMaximumBin(); + fitMin = maxBin-10; + fitMax = maxBin+10; + ThetaResPlot->Fit("gaus", "Q", "", ThetaResPlot->GetBinCenter(fitMin), ThetaResPlot->GetBinCenter(fitMax)); + double ThetaResStdDev = ThetaResPlot->GetFunction("gaus")->GetParameter(2); + latex->DrawLatexNDC(0.2, 0.8, Form("#sigma_{#theta} = %.2f mrad", ThetaResStdDev)); + + // Remove fitted Gaussian from histogram + ThetaResPlot->GetListOfFunctions()->Remove(ThetaResPlot->GetFunction("gaus")); + canvas->cd(6); - PhiResPlot->SetTitle("Reconstructed Phi resolution"); - PhiResPlot->GetXaxis()->SetTitle("Phi_{recon} - Phi_{prim} [deg]"); + PhiResPlot->SetTitle("Phi resolution (#theta>1mrad)"); + PhiResPlot->GetXaxis()->SetTitle("#phi_{recon} - #phi_{prim} [deg]"); PhiResPlot->GetYaxis()->SetTitle("Counts"); PhiResPlot->SetStats(0); PhiResPlot->Draw(); + // Write fitted Gaussian standard deviation in pad + //Fit Gaussian to histogram maximum bin +- 10 bins + maxBin = PhiResPlot->GetMaximumBin(); + fitMin = maxBin-10; + fitMax = maxBin+10; + PhiResPlot->Fit("gaus", "Q", "", PhiResPlot->GetBinCenter(fitMin), PhiResPlot->GetBinCenter(fitMax)); + double PhiResStdDev = PhiResPlot->GetFunction("gaus")->GetParameter(2); + latex->DrawLatexNDC(0.2, 0.8, Form("#sigma_{#phi} = %.1f deg", PhiResStdDev)); + + // Remove fitted Gaussian from histogram + PhiResPlot->GetListOfFunctions()->Remove(PhiResPlot->GetFunction("gaus")); + // Save the canvas to output file outputFile->WriteTObject(canvas); diff --git a/benchmarks/LOWQ2/analysis/RunLOWQ2.C b/benchmarks/LOWQ2/analysis/RunLOWQ2.C index be3c20d2..ea392d46 100644 --- a/benchmarks/LOWQ2/analysis/RunLOWQ2.C +++ b/benchmarks/LOWQ2/analysis/RunLOWQ2.C @@ -27,9 +27,9 @@ void RunLOWQ2(){ // LOWQ2Benchmarks("/scratch/EIC/G4out/brems_18x275_new.edm4hep*.root","plots/LOWQ2BremsRates.root",detector,bunchRate); // LOWQ2Benchmarks("/scratch/EIC/G4out/brems_10x100_ab/brems_10x100_ab_0.edm4hep.root","plots/LOWQ2BremsRates2.root",detector,eventRateBrems); - LOWQ2Benchmarks("/scratch/EIC/ReconOut/QR_new.root","plots/LOWQ2QRRecon2.root",detector,eventRateQR); - LOWQ2Benchmarks("/scratch/EIC/ReconOut/Brems_new.root","plots/LOWQ2BremsRecon2.root",detector,eventRateBrems); + // LOWQ2Benchmarks("/scratch/EIC/ReconOut/QR_new.root","plots/LOWQ2QRRecon2.root",detector,eventRateQR); + // LOWQ2Benchmarks("/scratch/EIC/ReconOut/Brems_new.root","plots/LOWQ2BremsRecon2.root",detector,eventRateBrems); - // LOWQ2Benchmarks("/scratch/EIC/ReconOut/Brems_new.root","plots/LOWQ2BremsHits.root",detector,eventRateBrems); + LOWQ2Benchmarks("/scratch/EIC/ReconOut/Brems_new.root","plots/LOWQ2BremsHits.root",detector,eventRateBrems); } \ No newline at end of file From 1608444134380c2afe4db4e5b52179292e305542 Mon Sep 17 00:00:00 2001 From: simonge Date: Thu, 1 Feb 2024 16:03:47 +0000 Subject: [PATCH 19/53] Added primary-secondary hits example plot --- benchmarks/LOWQ2/analysis/PostprocessLOWQ2.C | 43 ++++++++++++++++++-- 1 file changed, 39 insertions(+), 4 deletions(-) diff --git a/benchmarks/LOWQ2/analysis/PostprocessLOWQ2.C b/benchmarks/LOWQ2/analysis/PostprocessLOWQ2.C index a099bd38..8ede89a5 100644 --- a/benchmarks/LOWQ2/analysis/PostprocessLOWQ2.C +++ b/benchmarks/LOWQ2/analysis/PostprocessLOWQ2.C @@ -4,7 +4,8 @@ #include #include #include -#include +#include +#include //---------------------------------------------------------------------- // Set global plot format variables @@ -53,9 +54,9 @@ TH1* AcceptancePlot(TDirectory* inputDir, TString ReconHistName, TString AllHist //---------------------------------------------------------------------- // Create rate plots //---------------------------------------------------------------------- -TH2* RatePlot(TDirectory* inputDir, int Module, int Layer, TString Tag="Quasi-Real") { +TH2* RatePlot(TDirectory* inputDir, int Module, int Layer, TString Tag="Quasi-Real", TString inTag="AllHits") { - TString histName = "AllHits/module"+std::to_string(Module)+"/layer"+std::to_string(Layer)+"/hxPixelyPixel"; + TString histName = inTag+"/module"+std::to_string(Module)+"/layer"+std::to_string(Layer)+"/hxPixelyPixel"; // Read in the plots from the input file TH2* RatePlot = (TH2*)inputDir->Get(histName); @@ -213,7 +214,6 @@ void FormatAcceptancePlots(TDirectory* inputDir, TFile* outputFile, TString Tag= //---------------------------------------------------------------------- void FormatRatePlots(TDirectory* inputDir, TFile* outputFile, TString Tag="Quasi-Real") { - TString histName = "AllHits/module2/layer0/hxPixelyPixel"; TCanvas* canvas = new TCanvas("RateCanvas", "RateCanvas", 3200, 1200); canvas->Divide(2,1); @@ -306,6 +306,41 @@ void FormatRatePlots(TDirectory* inputDir, TFile* outputFile, TString Tag="Quasi // Clean up delete canvas; + // Canvas showing primary and secondary hits + // Todo: Neaten up + TCanvas* canvas3 = new TCanvas("PrimarySecondary-RateCanvas", "PrimarySecondary-RateCanvas", 2400, 2400); + canvas3->Divide(2,2); + + TH2* RatePlotPrimary1_0 = RatePlot(inputDir,1,0,Tag,"PrimaryHits"); + TH2* RatePlotPrimary2_0 = RatePlot(inputDir,2,0,Tag,"PrimaryHits"); + + TH2* RatePlotSecondary1_0 = RatePlot(inputDir,1,0,Tag,"SecondaryHits"); + TH2* RatePlotSecondary2_0 = RatePlot(inputDir,2,0,Tag,"SecondaryHits"); + + // Draw the plots on the canvas + canvas3->cd(1); + gPad->SetLogz(); + RatePlotPrimary1_0->Draw("colz"); + + canvas3->cd(2); + gPad->SetLogz(); + RatePlotPrimary2_0->Draw("colz"); + + canvas3->cd(3); + gPad->SetLogz(); + RatePlotSecondary1_0->Draw("colz"); + + canvas3->cd(4); + gPad->SetLogz(); + RatePlotSecondary2_0->Draw("colz"); + + // Save the canvas to output file + outputFile->WriteTObject(canvas3); + + // Clean up + delete canvas3; + + } //---------------------------------------------------------------------- From 057570ba4412ab9ff3f43d1b3b65f4e35c48d46f Mon Sep 17 00:00:00 2001 From: simonge Date: Tue, 30 Apr 2024 16:14:56 +0100 Subject: [PATCH 20/53] Added Low-Q2 benchmarks to CI, does nothing currently --- .gitlab-ci.yml | 1 + benchmarks/LOWQ2/config.yml | 5 +++++ 2 files changed, 6 insertions(+) create mode 100644 benchmarks/LOWQ2/config.yml diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 8911954c..1347d6ea 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -155,6 +155,7 @@ include: - local: 'benchmarks/pid/config.yml' - local: 'benchmarks/timing/config.yml' - local: 'benchmarks/b0_tracker/config.yml' + - local: 'benchmarks/LOWQ2/config.yml' - local: 'benchmarks/others/config.yml' deploy_results: diff --git a/benchmarks/LOWQ2/config.yml b/benchmarks/LOWQ2/config.yml new file mode 100644 index 00000000..c3a7876c --- /dev/null +++ b/benchmarks/LOWQ2/config.yml @@ -0,0 +1,5 @@ +training: + # Training configuration goes here + +analysis: + # Analysis configuration goes here From e13ff349dce42ead5d1bcca1f654e453f3e0f18d Mon Sep 17 00:00:00 2001 From: simonge Date: Mon, 6 May 2024 10:04:06 +0100 Subject: [PATCH 21/53] Some changes --- benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.h | 18 +++++++------- benchmarks/LOWQ2/analysis/LOWQ2hits.h | 16 ++++++------- .../LOWQ2/analysis/LOWQ2reconstruction.h | 2 +- benchmarks/LOWQ2/analysis/PostprocessLOWQ2.C | 6 ++--- benchmarks/LOWQ2/analysis/RunLOWQ2.C | 24 +++++++++---------- .../LOWQ2/training/TaggerRegressionEICrecon.C | 23 +++++++++--------- 6 files changed, 44 insertions(+), 45 deletions(-) diff --git a/benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.h b/benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.h index c681bda2..2adaef78 100755 --- a/benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.h +++ b/benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.h @@ -275,17 +275,17 @@ void LOWQ2Benchmarks( string inName = "/scratch/EIC/G4out/qr_18x275_new.edm4hep* } - // if((Any(colNames==readoutName) || Any(colNames=="InclusiveKinematicsElectron")) && Any(colNames=="MCParticles")){ - // histMap["Acceptance"] = createAcceptancePlots(node); - // } + if((Any(colNames==readoutName) || Any(colNames=="InclusiveKinematicsElectron")) && Any(colNames=="MCParticles")){ + histMap["Acceptance"] = createAcceptancePlots(node); + } - // if(Any(colNames=="TaggerTrackerClusterPositions")){ - // histMap["Clusters"] = createClusterPlots(node); - // } + if(Any(colNames=="TaggerTrackerClusterPositions")){ + histMap["Clusters"] = createClusterPlots(node); + } - // if(Any(colNames=="LowQ2TrackParameters") && Any(colNames=="MCParticles")){ - // histMap["Reconstruction"] = createReconstructionPlots(node); - // } + if(Any(colNames=="LowQ2TrackParameters") && Any(colNames=="MCParticles")){ + histMap["Reconstruction"] = createReconstructionPlots(node); + } writePlots( outName ); diff --git a/benchmarks/LOWQ2/analysis/LOWQ2hits.h b/benchmarks/LOWQ2/analysis/LOWQ2hits.h index 26b3b027..709f8c1d 100644 --- a/benchmarks/LOWQ2/analysis/LOWQ2hits.h +++ b/benchmarks/LOWQ2/analysis/LOWQ2hits.h @@ -37,7 +37,7 @@ std::tuple,std::map,std::map< // Plot number of hits per moduleID - hHists1D["hmoduleID"] = d2.Histo1D({"hmoduleID", "hmoduleID;Module ID;Rate [Hz]", 2, 1, 3 }, "moduleID", "eventWeight"); + hHists1D["hmoduleIDRate"] = d2.Histo1D({"hmoduleIDRate", "hmoduleID;Module ID;Rate [Hz]", 2, 1, 3 }, "moduleID", "eventWeight"); // Module Loop for(int i=1; i<=2; i++){ @@ -52,7 +52,7 @@ std::tuple,std::map,std::map< TString modNum = std::to_string(i); TString moduleTag = "module"+modNum; TString layerName = "layerID"+moduleTag; - TString layerHistName = "h"+layerName; + TString layerHistName = "h"+layerName+"Rate"; auto d3 = d2.Define("ModuleFilter",[i](RVecI modID){return modID==i;},{"moduleID"}) @@ -68,17 +68,17 @@ std::tuple,std::map,std::map< TString layerTag = "layer"+layerNum; TString xName = "xPixel"; - TString xHistName = "h"+xName; + TString xHistName = "h"+xName+"Rate"; TString yName = "yPixel"; - TString yHistName = "h"+yName; + TString yHistName = "h"+yName+"Rate"; TString xChipName = "xChip"; TString yChipName = "yChip"; TString yHalfChipName = "yHalfChip"; - TString xyHistName = "h"+xName+yName; + TString xyHistName = "h"+xName+yName+"Rate"; TString xColumnName = "xColumn"; @@ -109,15 +109,15 @@ std::tuple,std::map,std::map< hHists1D[moduleTag+"/"+layerTag+"/"+sizeHistName] = d4.Histo1D({sizeHistName, sizeHistName+";hits per event", 100, 0, 100 }, layerSizeName ); // Plot number of hits per boardID for each layer - TString boardIDHistName = "hBoardID"; + TString boardIDHistName = "hBoardIDRate"; hHists1D[moduleTag+"/"+layerTag+"/"+boardIDHistName] = d4.Histo1D({boardIDHistName, boardIDHistName+";board ID;Rate [Hz]", 3, 0, 3}, boardName, "eventWeight"); // Plot number of hits per chipID for each layer - TString chipIDHistName = "hChipID"; + TString chipIDHistName = "hChipIDRate"; hHists2D[moduleTag+"/"+layerTag+"/"+chipIDHistName] = d4.Histo2D({chipIDHistName, chipIDHistName+";x Chip;y Chip;Rate [Hz]", 6, 0, 6, 6, 0, 6}, xChipName, yChipName, "eventWeight"); // Plot number of hits per chipID for each layer - TString xColumnIDHistName = "hxColumnID"; + TString xColumnIDHistName = "hxColumnIDRate"; hHists2D[moduleTag+"/"+layerTag+"/"+xColumnIDHistName] = d4.Histo2D({xColumnIDHistName, xColumnIDHistName+";x Column;y Half Chip;Rate [Hz]", 3*xChip, 0, 3.0*xChip, 12, 0, 12}, xColumnName, yHalfChipName, "eventWeight"); } diff --git a/benchmarks/LOWQ2/analysis/LOWQ2reconstruction.h b/benchmarks/LOWQ2/analysis/LOWQ2reconstruction.h index 444b6546..01b0dfa2 100644 --- a/benchmarks/LOWQ2/analysis/LOWQ2reconstruction.h +++ b/benchmarks/LOWQ2/analysis/LOWQ2reconstruction.h @@ -50,7 +50,7 @@ std::tuple,std::map,std::map< .Define("reconTheta_mrad","1000.0*reconTheta") .Define("reconPhi","(double)LowQ2TrackParameters[0].phi") .Define("reconPhi_deg","reconPhi * TMath::RadToDeg()") - .Define("reconP","(18./10.)*(double)(1/(LowQ2TrackParameters[0].qOverP*LowQ2TrackParameters[0].charge))") + .Define("reconP","(18./10.)*(double)(-1/(LowQ2TrackParameters[0].qOverP))") .Define("reconMom",[](double p, double theta, double phi){return ROOT::Math::PxPyPzMVector(p*sin(theta)*cos(phi),p*sin(theta)*sin(phi),p*cos(theta),0.000511);},{"reconP","reconTheta","reconPhi"}) .Define("reconE","reconMom.E()") .Define("reconScatteredV","eBeam-reconMom") diff --git a/benchmarks/LOWQ2/analysis/PostprocessLOWQ2.C b/benchmarks/LOWQ2/analysis/PostprocessLOWQ2.C index 8ede89a5..63882d4d 100644 --- a/benchmarks/LOWQ2/analysis/PostprocessLOWQ2.C +++ b/benchmarks/LOWQ2/analysis/PostprocessLOWQ2.C @@ -56,7 +56,7 @@ TH1* AcceptancePlot(TDirectory* inputDir, TString ReconHistName, TString AllHist //---------------------------------------------------------------------- TH2* RatePlot(TDirectory* inputDir, int Module, int Layer, TString Tag="Quasi-Real", TString inTag="AllHits") { - TString histName = inTag+"/module"+std::to_string(Module)+"/layer"+std::to_string(Layer)+"/hxPixelyPixel"; + TString histName = inTag+"/module"+std::to_string(Module)+"/layer"+std::to_string(Layer)+"/hxPixelyPixelRate"; // Read in the plots from the input file TH2* RatePlot = (TH2*)inputDir->Get(histName); @@ -537,6 +537,6 @@ void Postprocess(TString inName="LOWQ2QRRates3.root", TString outName="LOWQ2Plot // Main function to create canvases //---------------------------------------------------------------------- void PostprocessLOWQ2() { - Postprocess("plots/LOWQ2QRRecon2.root", "plots/LOWQ2QR_FormattedPlots.root", "Quasi-Real"); - Postprocess("plots/LOWQ2BremsRecon2.root", "plots/LOWQ2Brems_FormattedPlots.root", "Bremsstrahlung"); +// Postprocess("plots/LOWQ2QRRecon2.root", "plots/LOWQ2QR_FormattedPlots.root", "Quasi-Real"); + Postprocess("plots/LOWQ2BremsRecon3.root", "plots/LOWQ2Brems_FormattedPlots3.root", "Bremsstrahlung"); } \ No newline at end of file diff --git a/benchmarks/LOWQ2/analysis/RunLOWQ2.C b/benchmarks/LOWQ2/analysis/RunLOWQ2.C index ea392d46..3f5bf3c4 100644 --- a/benchmarks/LOWQ2/analysis/RunLOWQ2.C +++ b/benchmarks/LOWQ2/analysis/RunLOWQ2.C @@ -1,12 +1,16 @@ #include "LOWQ2Benchmarks.h" +#include - -void RunLOWQ2(){ +void RunLOWQ2(std::string inputFileName="Brems_input.root", std::string outputFileName="plots/LOWQ2BremsRecon3.root", + double eventCrossSection=0, std::string compactName="/opt/detector/epic-nightly/share/epic/epic.xml") { //Set implicit multi-threading ROOT::EnableImplicitMT(); + + // Output script running conditions + std::cout << "Running LOWQ2 benchmarks with the following parameters:" << std::endl; + std::cout << " - xml file: " << compactName << std::endl; - std::string compactName = "/home/simong/EIC/epic/epic_18x275.xml"; dd4hep::Detector& detector = dd4hep::Detector::getInstance(); detector.fromCompact(compactName); @@ -21,15 +25,11 @@ void RunLOWQ2(){ double eventRateBrems = luminosity * eventCrossSectionBrems * 1e-27; // [Hz] double bunchRate = 1.0 / bunchSpacing; // [Hz] - // LOWQ2Benchmarks("/scratch/EIC/ReconOut/tempEventsQR2.root","plots/LOWQ2QR2.root",detector,eventRateQR); - // LOWQ2Benchmarks("/scratch/EIC/ReconOut/qr_18x275_ab/qr_18x275_ab0_recon.edm4hep.root","plots/LOWQ2QROld.root",detector,eventRateQR); - // LOWQ2Benchmarks("/scratch/EIC/G4out/qr_18x275_new.edm4hep*.root","plots/LOWQ2QRRates.root",detector,eventRateQR); - // LOWQ2Benchmarks("/scratch/EIC/G4out/brems_18x275_new.edm4hep*.root","plots/LOWQ2BremsRates.root",detector,bunchRate); - // LOWQ2Benchmarks("/scratch/EIC/G4out/brems_10x100_ab/brems_10x100_ab_0.edm4hep.root","plots/LOWQ2BremsRates2.root",detector,eventRateBrems); - - // LOWQ2Benchmarks("/scratch/EIC/ReconOut/QR_new.root","plots/LOWQ2QRRecon2.root",detector,eventRateQR); - // LOWQ2Benchmarks("/scratch/EIC/ReconOut/Brems_new.root","plots/LOWQ2BremsRecon2.root",detector,eventRateBrems); + double eventRate = luminosity * eventCrossSection * 1e-27; // [Hz] - LOWQ2Benchmarks("/scratch/EIC/ReconOut/Brems_new.root","plots/LOWQ2BremsHits.root",detector,eventRateBrems); + //LOWQ2Benchmarks("/scratch/EIC/ReconOut/QR_new.root","plots/LOWQ2QRRecon2.root",detector,eventRateQR); + LOWQ2Benchmarks("/scratch/EIC/ReconOut/Brems_new.root","plots/LOWQ2BremsRecon2.root",detector,eventRateBrems); + LOWQ2Benchmarks("/scratch/EIC/ReconOut/Brems_new.root","plots/LOWQ2BremsRecon3.root",detector,bunchRate); + //LOWQ2Benchmarks(inputFileName,outputFileName,detector,eventRate); } \ No newline at end of file diff --git a/benchmarks/LOWQ2/training/TaggerRegressionEICrecon.C b/benchmarks/LOWQ2/training/TaggerRegressionEICrecon.C index 819627cc..9d02190a 100644 --- a/benchmarks/LOWQ2/training/TaggerRegressionEICrecon.C +++ b/benchmarks/LOWQ2/training/TaggerRegressionEICrecon.C @@ -25,9 +25,13 @@ using namespace TMVA; // At least one track reconstructed by EIC algorithms in the LOWQ2 tagger is needed. void TaggerRegressionEICrecon( - TString inDataNames = "/scratch/EIC/ReconOut/qr_18x275_ab/qr_18x275_ab*_recon.edm4hep.root", - TString outDataName = "/scratch/EIC/Results/ML-Out/trainedData.root", - TString inWeightName = "dataset/weights/LowQ2Reconstruction_DNN.weights.xml" + TString inDataNames = "/scratch/EIC/ReconOut/qr_18x275_ab/qr_18x275_ab*_recon.edm4hep.root", + TString outDataName = "/scratch/EIC/LowQ2Model/trainedData.root", + TString dataFolderName = "/scratch/EIC/LowQ2Model/", + TString mcBeamEnergy = "18", + TString typeName = "LowQ2MomentumRegression", + TString methodName = "DNN", + TString inWeightName = "dataset/weights/LowQ2Reconstruction_DNN.weights.xml" ) { @@ -45,16 +49,13 @@ void TaggerRegressionEICrecon( TFile* outputFile = TFile::Open( outDataName, "RECREATE" ); // Create the factory object. Later you can choose the methods - TString typeName = "LowQ2Reconstruction"; TMVA::Factory *factory = new TMVA::Factory( typeName, outputFile, "!V:!Silent:Color:DrawProgressBar:AnalysisType=Regression" ); - TString dataFolderName = "dataset"; + ; TMVA::DataLoader *dataloader=new TMVA::DataLoader(dataFolderName); - - TString methodName = "DNN"; - + // Input TrackParameters variables from EICrecon - TString collectionName = "LowQ2Tracks[0]"; dataloader->AddVariable( collectionName+".loc.a", "fit_position_y", "units", 'F' ); @@ -65,16 +66,14 @@ void TaggerRegressionEICrecon( // Regression target particle 3-momentum, normalised to beam energy. // Takes second particle, in the test data this is the scattered electron // TODO add energy and array element information to be read directly from datafile - EMD4eic and EICrecon changes. - TString mcParticleName = "MCParticles[2]"; - TString mcBeamEnergy = "18"; + TString mcParticleName = "ScatteredElectron[0]"; dataloader->AddTarget( mcParticleName+".momentum.x/"+mcBeamEnergy ); dataloader->AddTarget( mcParticleName+".momentum.y/"+mcBeamEnergy ); dataloader->AddTarget( mcParticleName+".momentum.z/"+mcBeamEnergy ); std::cout << "--- TMVARegression : Using input files: " << inDataNames << std::endl; - // Register the regression tree - + // Register the regression tree TChain* regChain = new TChain("events"); regChain->Add(inDataNames); //regChain->SetEntries(8000); // Set smaller sample for tests From 067f01c85692915328da6096b9eac808b4be2456 Mon Sep 17 00:00:00 2001 From: simonge Date: Mon, 6 May 2024 10:09:36 +0100 Subject: [PATCH 22/53] Added basic training components --- benchmarks/LOWQ2/config.yml | 5 + benchmarks/LOWQ2/training/Snakefile | 57 +++++++ .../LOWQ2/training/TaggerRegressionEICrecon.C | 152 ++++++++++++++++++ 3 files changed, 214 insertions(+) create mode 100644 benchmarks/LOWQ2/config.yml create mode 100644 benchmarks/LOWQ2/training/Snakefile create mode 100644 benchmarks/LOWQ2/training/TaggerRegressionEICrecon.C diff --git a/benchmarks/LOWQ2/config.yml b/benchmarks/LOWQ2/config.yml new file mode 100644 index 00000000..c3a7876c --- /dev/null +++ b/benchmarks/LOWQ2/config.yml @@ -0,0 +1,5 @@ +training: + # Training configuration goes here + +analysis: + # Analysis configuration goes here diff --git a/benchmarks/LOWQ2/training/Snakefile b/benchmarks/LOWQ2/training/Snakefile new file mode 100644 index 00000000..7788e862 --- /dev/null +++ b/benchmarks/LOWQ2/training/Snakefile @@ -0,0 +1,57 @@ +# Snakemake file for training a new neural network for LOW-Q2 tagger electron momentum reconstruction +from itertools import product + +import os +import shutil +from snakemake.remote.S3 import RemoteProvider as S3RemoteProvider + +S3 = S3RemoteProvider( + endpoint_url="https://eics3.sdcc.bnl.gov:9000", + access_key_id=os.environ["S3_ACCESS_KEY"], + secret_access_key=os.environ["S3_SECRET_KEY"], +) + + +SIM_DIRECTORY = "/scratch/EIC/G4out/S3/" +RECON_DIRECTORY = "/scratch/EIC/ReconOut/S3/" +MODEL_DIRECTORY = "/scratch/EIC/LowQ2Model/" +REMOTE_DIRECTORY = "eictest/EPIC/EVGEN/SIDIS/pythia6-eic/1.0.0/10x100/q2_0to1/" +FILE_BASE = "pythia_ep_noradcor_10x100_q2_0.000000001_1.0_run" +XML_FILE = "/home/simong/EIC/epic/epic_18x275.xml" + +rule download_input: + input: + S3.remote(REMOTE_DIRECTORY+FILE_BASE+"{index}"+EVENT_EXTENSION), + output: + EVENTS_DIRECTORY+FILE_BASE+"{index}"+EVENT_EXTENSION, + run: + shutil.move(input[0], output[0]) + +rule run_reconstruction: + input: + SIM_DIRECTORY+FILE_BASE+"{index}"+SIM_EXTENSION, + "/home/simong/EIC/EICrecon/bin/eicrecon", + params: + XML=XML_FILE, + collections="LowQ2Tracks,ScatteredElectron", + output: + RECON_DIRECTORY+FILE_BASE+"{index}_reco.{tag}"+SIM_EXTENSION, + shell: """ +/home/simong/EIC/EICrecon/bin/eicrecon {input[0]} -Pjana:nevents=400 -Pdd4hep:xml_files={params.XML} -Ppodio:output_include_collections={params.collections} -Ppodio:output_file={output[0]} -PLOWQ2:LowQ2Trajectories:electron_beamE=18 +""" + +rule low_q2_train_network: + params: + beam_energy="18", + type_name="LowQ2MomentumRegression", + method_name="DNN" + input: + train_data=RECON_DIRECTORY+FILE_BASE+"{index}_reco.{tag}"+SIM_EXTENSION + output: + root_output=MODEL_DIRECTORY+trainedData.root", + model_output=MODEL_DIRECTORY+"weights/" + shell: + """ + root -l -b -q 'TaggerRegressionEICrecon.C+("{input.train_data}", "{output.root_output}", MODEL_DIRECTORY, "{params.beam_energy}", "{params.type_name}", "{params.method_name}")' + """ + diff --git a/benchmarks/LOWQ2/training/TaggerRegressionEICrecon.C b/benchmarks/LOWQ2/training/TaggerRegressionEICrecon.C new file mode 100644 index 00000000..9d02190a --- /dev/null +++ b/benchmarks/LOWQ2/training/TaggerRegressionEICrecon.C @@ -0,0 +1,152 @@ +#include +#include +#include +#include + +#include "TChain.h" +#include "TFile.h" +#include "TTree.h" +#include "TString.h" +#include "TObjString.h" +#include "TSystem.h" +#include "TROOT.h" + +#include "TMVA/MethodDNN.h" +#include "TMVA/Reader.h" +#include "TMVA/Tools.h" +#include "TMVA/Factory.h" +#include "TMVA/DataLoader.h" +#include "TMVA/TMVARegGui.h" + +using namespace TMVA; + +// The training currently requires files which only contained a single DIS scattered electron to have been simulated e.g. generated using GETaLM +// The scattered electron must be the 3rd particle in the file after the two beam particles +// At least one track reconstructed by EIC algorithms in the LOWQ2 tagger is needed. + +void TaggerRegressionEICrecon( + TString inDataNames = "/scratch/EIC/ReconOut/qr_18x275_ab/qr_18x275_ab*_recon.edm4hep.root", + TString outDataName = "/scratch/EIC/LowQ2Model/trainedData.root", + TString dataFolderName = "/scratch/EIC/LowQ2Model/", + TString mcBeamEnergy = "18", + TString typeName = "LowQ2MomentumRegression", + TString methodName = "DNN", + TString inWeightName = "dataset/weights/LowQ2Reconstruction_DNN.weights.xml" + ) +{ + + Bool_t loadWeights = 0; + + //--------------------------------------------------------------- + // This loads the library + TMVA::Tools::Instance(); + + ROOT::EnableImplicitMT(8); + + // -------------------------------------------------------------------------------------------------- + // Here the preparation phase begins + // Create a new root output file + TFile* outputFile = TFile::Open( outDataName, "RECREATE" ); + + // Create the factory object. Later you can choose the methods + + TMVA::Factory *factory = new TMVA::Factory( typeName, outputFile, + "!V:!Silent:Color:DrawProgressBar:AnalysisType=Regression" ); + + ; + TMVA::DataLoader *dataloader=new TMVA::DataLoader(dataFolderName); + + // Input TrackParameters variables from EICrecon - + TString collectionName = "LowQ2Tracks[0]"; + dataloader->AddVariable( collectionName+".loc.a", "fit_position_y", "units", 'F' ); + dataloader->AddVariable( collectionName+".loc.b", "fit_position_z", "units", 'F' ); + dataloader->AddVariable( "sin("+collectionName+".phi)*sin("+collectionName+".theta)", "fit_vector_x", "units", 'F' ); + dataloader->AddVariable( "cos("+collectionName+".phi)*sin("+collectionName+".theta)", "fit_vector_y", "units", 'F' ); + + // Regression target particle 3-momentum, normalised to beam energy. + // Takes second particle, in the test data this is the scattered electron + // TODO add energy and array element information to be read directly from datafile - EMD4eic and EICrecon changes. + TString mcParticleName = "ScatteredElectron[0]"; + dataloader->AddTarget( mcParticleName+".momentum.x/"+mcBeamEnergy ); + dataloader->AddTarget( mcParticleName+".momentum.y/"+mcBeamEnergy ); + dataloader->AddTarget( mcParticleName+".momentum.z/"+mcBeamEnergy ); + + std::cout << "--- TMVARegression : Using input files: " << inDataNames << std::endl; + + // Register the regression tree + TChain* regChain = new TChain("events"); + regChain->Add(inDataNames); + //regChain->SetEntries(8000); // Set smaller sample for tests + + // global event weights per tree (see below for setting event-wise weights) + Double_t regWeight = 1.0; + + // You can add an arbitrary number of regression trees + dataloader->AddRegressionTree( regChain, regWeight ); + + // This would set individual event weights (the variables defined in the + // expression need to exist in the original TTree) + // dataloader->SetWeightExpression( "1/(eE)", "Regression" ); // If MC event weights are kept use these + // Apply additional cuts on the data + TCut mycut = "@LowQ2Tracks.size()==1"; // Make sure there's one reconstructed track in event + + dataloader->PrepareTrainingAndTestTree(mycut,"nTrain_Regression=0:nTest_Regression=0:SplitMode=Random:SplitSeed=1:NormMode=NumEvents:!V"); + + // TODO - Optimise layout and training more + TString layoutString("Layout=TANH|1024,TANH|128,TANH|64,TANH|32,LINEAR"); + + TString trainingStrategyString("TrainingStrategy="); + trainingStrategyString +="LearningRate=1e-4,Momentum=0,MaxEpochs=2000,ConvergenceSteps=200,BatchSize=64,TestRepetitions=1,Regularization=None,Optimizer=ADAM"; + + TString nnOptions("!H:V:ErrorStrategy=SUMOFSQUARES:WeightInitialization=XAVIERUNIFORM:RandomSeed=1234"); + + // Use GPU if possible on the machine + TString architectureString("Architecture=GPU"); + + // Transformation of data prior to training layers - decorrelate and normalise whole dataset + TString transformString("VarTransform=D,N"); + + nnOptions.Append(":"); + nnOptions.Append(architectureString); + nnOptions.Append(":"); + nnOptions.Append(transformString); + nnOptions.Append(":"); + nnOptions.Append(layoutString); + nnOptions.Append(":"); + nnOptions.Append(trainingStrategyString); + + TMVA::MethodDNN* method = (MethodDNN*)factory->BookMethod(dataloader, TMVA::Types::kDL, methodName, nnOptions); // NN + + // If loading previous model for further training + if(loadWeights){ + TMVA::Reader *reader = new TMVA::Reader( "!Color:!Silent" ); + reader->BookMVA( methodName, inWeightName ); + TMVA::MethodDNN* kl = dynamic_cast(reader->FindMVA(methodName)); + method = kl; + } + + + // -------------------------------------------------------------------------------------------------- + // Now you can tell the factory to train, test, and evaluate the MVAs + + // Train MVAs using the set of training events + factory->TrainAllMethods(); + + // Evaluate all MVAs using the set of test events + factory->TestAllMethods(); + + // Evaluate and compare performance of all configured MVAs + factory->EvaluateAllMethods(); + + // -------------------------------------------------------------- + + // Save the output + outputFile->Close(); + + std::cout << "==> Wrote root file: " << outputFile->GetName() << std::endl; + std::cout << "==> TMVARegression is done!" << std::endl; + + delete factory; + delete dataloader; + +} From c319d8398aec70cdafb3f54c1fea7704ac1138bd Mon Sep 17 00:00:00 2001 From: simonge Date: Tue, 7 May 2024 19:19:04 +0100 Subject: [PATCH 23/53] Updated to work locally at least --- benchmarks/LOWQ2/config.yml | 6 +- benchmarks/LOWQ2/training/Snakefile | 108 +++++++++++++++--- .../LOWQ2/training/TaggerRegressionEICrecon.C | 19 +-- 3 files changed, 103 insertions(+), 30 deletions(-) diff --git a/benchmarks/LOWQ2/config.yml b/benchmarks/LOWQ2/config.yml index c3a7876c..3382ffa0 100644 --- a/benchmarks/LOWQ2/config.yml +++ b/benchmarks/LOWQ2/config.yml @@ -1,5 +1,5 @@ +# Run Snakemake for the training training: - # Training configuration goes here + + -analysis: - # Analysis configuration goes here diff --git a/benchmarks/LOWQ2/training/Snakefile b/benchmarks/LOWQ2/training/Snakefile index 7788e862..13e7a3ca 100644 --- a/benchmarks/LOWQ2/training/Snakefile +++ b/benchmarks/LOWQ2/training/Snakefile @@ -11,47 +11,117 @@ S3 = S3RemoteProvider( secret_access_key=os.environ["S3_SECRET_KEY"], ) +EVENTS_DIRECTORY = "/scratch/EIC/Events/S3in/" +SIM_DIRECTORY = "/scratch/EIC/G4out/S3processed/" +RECO_DIRECTORY = "/scratch/EIC/ReconOut/S3in/" +RECO_DIRECTORY_P = "/scratch/EIC/ReconOut/S3processed/" +MODEL_DIRECTORY = "/scratch/EIC/LowQ2Model/" -SIM_DIRECTORY = "/scratch/EIC/G4out/S3/" -RECON_DIRECTORY = "/scratch/EIC/ReconOut/S3/" -MODEL_DIRECTORY = "/scratch/EIC/LowQ2Model/" -REMOTE_DIRECTORY = "eictest/EPIC/EVGEN/SIDIS/pythia6-eic/1.0.0/10x100/q2_0to1/" -FILE_BASE = "pythia_ep_noradcor_10x100_q2_0.000000001_1.0_run" -XML_FILE = "/home/simong/EIC/epic/epic_18x275.xml" +REMOTE_EVENTS_DIRECTORY = "EPIC/EVGEN/SIDIS/pythia6-eic/1.0.0/10x100/q2_0to1/" +REMOTE_RECO_DIRECTORY = "EPIC/RECO/24.02.1/epic_craterlake/SIDIS/pythia6-eic/1.0.0/10x100/q2_0to1/" +S3_DIR = "eictest/" +XROOTD_SERVER = "root://dtn-eic.jlab.org/" +XROOTD_DIR = "/work/eic2/" -rule download_input: +EVENT_EXTENSION = ".ab.hepmc3.tree.root" +SIM_EXTENSION = ".edm4hep.root" +RECO_EXTENSION = ".eicrecon.tree.edm4eic.root" + +FILE_BASE = "pythia_ep_noradcor_10x100_q2_0.000000001_1.0_run" +XML_FILE = "/home/simong/EIC/epic/install/share/epic/epic_craterlake_10x100.xml" +XML_FILE_TAGGER = "/home/simong/EIC/epic/install/share/epic/epic_ip6.xml" +XML_FILE_EDIT = "/home/simong/EIC/epic/epic_edit.xml" +BEAM_ENERGY = "10" + +rule download_event_input: input: - S3.remote(REMOTE_DIRECTORY+FILE_BASE+"{index}"+EVENT_EXTENSION), + S3.remote(REMOTE_EVENTS_DIRECTORY+FILE_BASE+"{index}"+EVENT_EXTENSION), output: EVENTS_DIRECTORY+FILE_BASE+"{index}"+EVENT_EXTENSION, run: shutil.move(input[0], output[0]) + +def remote_file_exists(server,url): + try: + subprocess.check_output(['xrdfs', server, 'stat', url]) + return True + except subprocess.CalledProcessError: + return False + +rule run_simulation_tagger: + params: + XML=XML_FILE_EDIT, + input=lambda wildcards: XROOTD_SERVER+XROOTD_DIR+REMOTE_EVENTS_DIRECTORY+FILE_BASE+wildcards.index+EVENT_EXTENSION if remote_file_exists(XROOTD_SERVER,XROOTD_DIR+REMOTE_EVENTS_DIRECTORY+FILE_BASE+wildcards.index+EVENT_EXTENSION) else None, + output: + SIM_DIRECTORY+FILE_BASE+"{index}_tagger"+SIM_EXTENSION, + shell: """ +npsim \ + --inputFiles {params.input} \ + --outputFile {output[0]} \ + --compactFile {params.XML} \ + --runType run \ + --numberOfEvents 100000 \ + --physics.list FTFP_BERT \ + --field.eps_min 5e-06 \ + --field.eps_max 1e-04 \ + --physics.rangecut 50 \ +""" + +rule download_recon: + input: + S3.remote(S3_DIR+REMOTE_RECO_DIRECTORY+FILE_BASE+"{runindex}"+".ab."+"{subindex}"+RECO_EXTENSION), + output: + RECO_DIRECTORY+FILE_BASE+"{runindex}"+".ab."+"{subindex}"+RECO_EXTENSION, + run: + shutil.move(input[0], output[0]) + rule run_reconstruction: + params: + XML=XML_FILE, + beam_energy=BEAM_ENERGY, + collections="TaggerTrackerProjectedTracks,MCScatteredElectrons,MCParticles,EventHeader", input: - SIM_DIRECTORY+FILE_BASE+"{index}"+SIM_EXTENSION, - "/home/simong/EIC/EICrecon/bin/eicrecon", + expand( + SIM_DIRECTORY+FILE_BASE+"{index}_tagger"+SIM_EXTENSION, + index=range(1,10), + ), + output: + RECO_DIRECTORY_P+FILE_BASE+".fab"+RECO_EXTENSION, + shell: """ +/home/simong/EIC/EICrecon/bin/eicrecon {input} -Pdd4hep:xml_files={params.XML} -Ppodio:output_include_collections={params.collections} -Ppodio:output_file={output[0]} -PLOWQ2:LowQ2Trajectories:electron_beamE={params.beam_energy} +""" + +rule run_reconstruction_remote: params: XML=XML_FILE, - collections="LowQ2Tracks,ScatteredElectron", + beam_energy=BEAM_ENERGY, + collections="TaggerTrackerProjectedTracks,MCScatteredElectrons,MCParticles,EventHeader", + stringValue=RECO_DIRECTORY+FILE_BASE+"*.ab.*"+RECO_EXTENSION, + input: + expand( + RECO_DIRECTORY+FILE_BASE+"{runindex}"+".ab."+"{subindex:04d}"+RECO_EXTENSION, + subindex=range(1,4), + runindex=1 + ), output: - RECON_DIRECTORY+FILE_BASE+"{index}_reco.{tag}"+SIM_EXTENSION, + RECO_DIRECTORY_P+FILE_BASE+".ab"+RECO_EXTENSION, shell: """ -/home/simong/EIC/EICrecon/bin/eicrecon {input[0]} -Pjana:nevents=400 -Pdd4hep:xml_files={params.XML} -Ppodio:output_include_collections={params.collections} -Ppodio:output_file={output[0]} -PLOWQ2:LowQ2Trajectories:electron_beamE=18 +/home/simong/EIC/EICrecon/bin/eicrecon {input} -Pdd4hep:xml_files={params.XML} -Ppodio:output_include_collections={params.collections} -Ppodio:output_file={output[0]} -PLOWQ2:LowQ2Trajectories:electron_beamE={params.beam_energy} """ rule low_q2_train_network: params: - beam_energy="18", + beam_energy=BEAM_ENERGY, type_name="LowQ2MomentumRegression", - method_name="DNN" + method_name="DNN", + model_dir="LowQ2Model" input: - train_data=RECON_DIRECTORY+FILE_BASE+"{index}_reco.{tag}"+SIM_EXTENSION + train_data=RECO_DIRECTORY_P+FILE_BASE+".fab"+RECO_EXTENSION, output: - root_output=MODEL_DIRECTORY+trainedData.root", - model_output=MODEL_DIRECTORY+"weights/" + root_output=MODEL_DIRECTORY+"trainedData.root", shell: """ - root -l -b -q 'TaggerRegressionEICrecon.C+("{input.train_data}", "{output.root_output}", MODEL_DIRECTORY, "{params.beam_energy}", "{params.type_name}", "{params.method_name}")' + root -l -b -q 'TaggerRegressionEICrecon.C++("{input.train_data}", "{output.root_output}", "{params.model_dir}", "{params.beam_energy}", "{params.type_name}", "{params.method_name}")' """ diff --git a/benchmarks/LOWQ2/training/TaggerRegressionEICrecon.C b/benchmarks/LOWQ2/training/TaggerRegressionEICrecon.C index 9d02190a..fc2226f2 100644 --- a/benchmarks/LOWQ2/training/TaggerRegressionEICrecon.C +++ b/benchmarks/LOWQ2/training/TaggerRegressionEICrecon.C @@ -27,7 +27,7 @@ using namespace TMVA; void TaggerRegressionEICrecon( TString inDataNames = "/scratch/EIC/ReconOut/qr_18x275_ab/qr_18x275_ab*_recon.edm4hep.root", TString outDataName = "/scratch/EIC/LowQ2Model/trainedData.root", - TString dataFolderName = "/scratch/EIC/LowQ2Model/", + TString dataFolderName = "LowQ2Model", TString mcBeamEnergy = "18", TString typeName = "LowQ2MomentumRegression", TString methodName = "DNN", @@ -57,7 +57,7 @@ void TaggerRegressionEICrecon( TMVA::DataLoader *dataloader=new TMVA::DataLoader(dataFolderName); // Input TrackParameters variables from EICrecon - - TString collectionName = "LowQ2Tracks[0]"; + TString collectionName = "TaggerTrackerProjectedTracks[0]"; dataloader->AddVariable( collectionName+".loc.a", "fit_position_y", "units", 'F' ); dataloader->AddVariable( collectionName+".loc.b", "fit_position_z", "units", 'F' ); dataloader->AddVariable( "sin("+collectionName+".phi)*sin("+collectionName+".theta)", "fit_vector_x", "units", 'F' ); @@ -66,7 +66,8 @@ void TaggerRegressionEICrecon( // Regression target particle 3-momentum, normalised to beam energy. // Takes second particle, in the test data this is the scattered electron // TODO add energy and array element information to be read directly from datafile - EMD4eic and EICrecon changes. - TString mcParticleName = "ScatteredElectron[0]"; + TString mcParticleName = "MCParticles[MCScatteredElectrons_objIdx[0].index]"; + //TString mcParticleName = "MCParticles[0]"; dataloader->AddTarget( mcParticleName+".momentum.x/"+mcBeamEnergy ); dataloader->AddTarget( mcParticleName+".momentum.y/"+mcBeamEnergy ); dataloader->AddTarget( mcParticleName+".momentum.z/"+mcBeamEnergy ); @@ -88,7 +89,7 @@ void TaggerRegressionEICrecon( // expression need to exist in the original TTree) // dataloader->SetWeightExpression( "1/(eE)", "Regression" ); // If MC event weights are kept use these // Apply additional cuts on the data - TCut mycut = "@LowQ2Tracks.size()==1"; // Make sure there's one reconstructed track in event + TCut mycut = "@TaggerTrackerProjectedTracks.size()==1"; // Make sure there's one reconstructed track in event dataloader->PrepareTrainingAndTestTree(mycut,"nTrain_Regression=0:nTest_Regression=0:SplitMode=Random:SplitSeed=1:NormMode=NumEvents:!V"); @@ -96,12 +97,14 @@ void TaggerRegressionEICrecon( TString layoutString("Layout=TANH|1024,TANH|128,TANH|64,TANH|32,LINEAR"); TString trainingStrategyString("TrainingStrategy="); - trainingStrategyString +="LearningRate=1e-4,Momentum=0,MaxEpochs=2000,ConvergenceSteps=200,BatchSize=64,TestRepetitions=1,Regularization=None,Optimizer=ADAM"; + //trainingStrategyString +="LearningRate=1e-4,Momentum=0,MaxEpochs=2000,ConvergenceSteps=200,BatchSize=64,TestRepetitions=1,Regularization=None,Optimizer=ADAM"; + trainingStrategyString +="LearningRate=1e-4,Momentum=0,MaxEpochs=2000,ConvergenceSteps=200,BatchSize=10,TestRepetitions=1,Regularization=None,Optimizer=ADAM"; TString nnOptions("!H:V:ErrorStrategy=SUMOFSQUARES:WeightInitialization=XAVIERUNIFORM:RandomSeed=1234"); // Use GPU if possible on the machine - TString architectureString("Architecture=GPU"); + //TString architectureString("Architecture=GPU"); + TString architectureString("Architecture=CPU"); // Transformation of data prior to training layers - decorrelate and normalise whole dataset TString transformString("VarTransform=D,N"); @@ -146,7 +149,7 @@ void TaggerRegressionEICrecon( std::cout << "==> Wrote root file: " << outputFile->GetName() << std::endl; std::cout << "==> TMVARegression is done!" << std::endl; - delete factory; - delete dataloader; + // delete factory; + // delete dataloader; } From 2d58b4f9620034787bfe45b2253b64aef10c9725 Mon Sep 17 00:00:00 2001 From: simonge Date: Tue, 7 May 2024 23:16:45 +0100 Subject: [PATCH 24/53] Works locally and has settings to test on github --- benchmarks/LOWQ2/config.yml | 9 +- benchmarks/LOWQ2/training/Snakefile | 94 +++---------- .../LOWQ2/training/TaggerRegressionEICrecon.C | 6 +- benchmarks/LOWQ2/training/epic_edit.xml | 133 ++++++++++++++++++ benchmarks/LOWQ2/training/local_config.yml | 3 + 5 files changed, 167 insertions(+), 78 deletions(-) create mode 100644 benchmarks/LOWQ2/training/epic_edit.xml create mode 100644 benchmarks/LOWQ2/training/local_config.yml diff --git a/benchmarks/LOWQ2/config.yml b/benchmarks/LOWQ2/config.yml index 3382ffa0..fde59721 100644 --- a/benchmarks/LOWQ2/config.yml +++ b/benchmarks/LOWQ2/config.yml @@ -1,5 +1,12 @@ # Run Snakemake for the training -training: +train:LOWQ2: + extends: .det_benchmark + stage: benchmarks + script: + - snakemake --snakefile training/Snakefile --configfile config.yml +SIM_DIRECTORY: "LowQ2_G4out/" +RECO_DIRECTORY: "LowQ2_ReconOut/" +MODEL_DIRECTORY: "LowQ2_Model/" diff --git a/benchmarks/LOWQ2/training/Snakefile b/benchmarks/LOWQ2/training/Snakefile index 13e7a3ca..df9b9545 100644 --- a/benchmarks/LOWQ2/training/Snakefile +++ b/benchmarks/LOWQ2/training/Snakefile @@ -3,58 +3,32 @@ from itertools import product import os import shutil -from snakemake.remote.S3 import RemoteProvider as S3RemoteProvider -S3 = S3RemoteProvider( - endpoint_url="https://eics3.sdcc.bnl.gov:9000", - access_key_id=os.environ["S3_ACCESS_KEY"], - secret_access_key=os.environ["S3_SECRET_KEY"], -) +configfile: "local_config.yml" -EVENTS_DIRECTORY = "/scratch/EIC/Events/S3in/" -SIM_DIRECTORY = "/scratch/EIC/G4out/S3processed/" -RECO_DIRECTORY = "/scratch/EIC/ReconOut/S3in/" -RECO_DIRECTORY_P = "/scratch/EIC/ReconOut/S3processed/" -MODEL_DIRECTORY = "/scratch/EIC/LowQ2Model/" - -REMOTE_EVENTS_DIRECTORY = "EPIC/EVGEN/SIDIS/pythia6-eic/1.0.0/10x100/q2_0to1/" -REMOTE_RECO_DIRECTORY = "EPIC/RECO/24.02.1/epic_craterlake/SIDIS/pythia6-eic/1.0.0/10x100/q2_0to1/" -S3_DIR = "eictest/" -XROOTD_SERVER = "root://dtn-eic.jlab.org/" -XROOTD_DIR = "/work/eic2/" - -EVENT_EXTENSION = ".ab.hepmc3.tree.root" -SIM_EXTENSION = ".edm4hep.root" -RECO_EXTENSION = ".eicrecon.tree.edm4eic.root" +EVENT_EXTENSION = ".ab.hepmc3.tree.root" +SIM_EXTENSION = ".edm4hep.root" +RECO_EXTENSION = ".eicrecon.tree.edm4eic.root" -FILE_BASE = "pythia_ep_noradcor_10x100_q2_0.000000001_1.0_run" -XML_FILE = "/home/simong/EIC/epic/install/share/epic/epic_craterlake_10x100.xml" -XML_FILE_TAGGER = "/home/simong/EIC/epic/install/share/epic/epic_ip6.xml" -XML_FILE_EDIT = "/home/simong/EIC/epic/epic_edit.xml" -BEAM_ENERGY = "10" - -rule download_event_input: - input: - S3.remote(REMOTE_EVENTS_DIRECTORY+FILE_BASE+"{index}"+EVENT_EXTENSION), - output: - EVENTS_DIRECTORY+FILE_BASE+"{index}"+EVENT_EXTENSION, - run: - shutil.move(input[0], output[0]) +REMOTE_EVENTS_DIRECTORY = "root://dtn-eic.jlab.org//work/eic2/EPIC/EVGEN/SIDIS/pythia6-eic/1.0.0/10x100/q2_0to1/" +FILE_BASE = "pythia_ep_noradcor_10x100_q2_0.000000001_1.0_run" +XML_FILE = "epic_edit.xml" +BEAM_ENERGY = "10" def remote_file_exists(server,url): try: subprocess.check_output(['xrdfs', server, 'stat', url]) - return True + return url except subprocess.CalledProcessError: - return False - + return None +''' rule run_simulation_tagger: params: - XML=XML_FILE_EDIT, - input=lambda wildcards: XROOTD_SERVER+XROOTD_DIR+REMOTE_EVENTS_DIRECTORY+FILE_BASE+wildcards.index+EVENT_EXTENSION if remote_file_exists(XROOTD_SERVER,XROOTD_DIR+REMOTE_EVENTS_DIRECTORY+FILE_BASE+wildcards.index+EVENT_EXTENSION) else None, + XML=XML_FILE, + input=remote_file_exists(REMOTE_EVENTS_DIRECTORY+FILE_BASE+wildcards.index+EVENT_EXTENSION), output: - SIM_DIRECTORY+FILE_BASE+"{index}_tagger"+SIM_EXTENSION, + config["SIM_DIRECTORY"]+FILE_BASE+"{index}_tagger"+SIM_EXTENSION, shell: """ npsim \ --inputFiles {params.input} \ @@ -67,15 +41,7 @@ npsim \ --field.eps_max 1e-04 \ --physics.rangecut 50 \ """ - -rule download_recon: - input: - S3.remote(S3_DIR+REMOTE_RECO_DIRECTORY+FILE_BASE+"{runindex}"+".ab."+"{subindex}"+RECO_EXTENSION), - output: - RECO_DIRECTORY+FILE_BASE+"{runindex}"+".ab."+"{subindex}"+RECO_EXTENSION, - run: - shutil.move(input[0], output[0]) - +''' rule run_reconstruction: params: XML=XML_FILE, @@ -83,31 +49,13 @@ rule run_reconstruction: collections="TaggerTrackerProjectedTracks,MCScatteredElectrons,MCParticles,EventHeader", input: expand( - SIM_DIRECTORY+FILE_BASE+"{index}_tagger"+SIM_EXTENSION, - index=range(1,10), - ), - output: - RECO_DIRECTORY_P+FILE_BASE+".fab"+RECO_EXTENSION, - shell: """ -/home/simong/EIC/EICrecon/bin/eicrecon {input} -Pdd4hep:xml_files={params.XML} -Ppodio:output_include_collections={params.collections} -Ppodio:output_file={output[0]} -PLOWQ2:LowQ2Trajectories:electron_beamE={params.beam_energy} -""" - -rule run_reconstruction_remote: - params: - XML=XML_FILE, - beam_energy=BEAM_ENERGY, - collections="TaggerTrackerProjectedTracks,MCScatteredElectrons,MCParticles,EventHeader", - stringValue=RECO_DIRECTORY+FILE_BASE+"*.ab.*"+RECO_EXTENSION, - input: - expand( - RECO_DIRECTORY+FILE_BASE+"{runindex}"+".ab."+"{subindex:04d}"+RECO_EXTENSION, - subindex=range(1,4), - runindex=1 + config["SIM_DIRECTORY"]+FILE_BASE+"{index}_tagger"+SIM_EXTENSION, + index=range(1,4), ), output: - RECO_DIRECTORY_P+FILE_BASE+".ab"+RECO_EXTENSION, + config["RECO_DIRECTORY"]+FILE_BASE+".tagger_recon"+RECO_EXTENSION, shell: """ -/home/simong/EIC/EICrecon/bin/eicrecon {input} -Pdd4hep:xml_files={params.XML} -Ppodio:output_include_collections={params.collections} -Ppodio:output_file={output[0]} -PLOWQ2:LowQ2Trajectories:electron_beamE={params.beam_energy} +/home/simong/EIC/EICrecon/bin/eicrecon {input} -Pdd4hep:xml_files={params.XML} -Ppodio:output_include_collections={params.collections} -Ppodio:output_file={output} -PLOWQ2:LowQ2Trajectories:electron_beamE={params.beam_energy} """ rule low_q2_train_network: @@ -117,9 +65,9 @@ rule low_q2_train_network: method_name="DNN", model_dir="LowQ2Model" input: - train_data=RECO_DIRECTORY_P+FILE_BASE+".fab"+RECO_EXTENSION, + train_data=config["RECO_DIRECTORY"]+FILE_BASE+".tagger_recon"+RECO_EXTENSION, output: - root_output=MODEL_DIRECTORY+"trainedData.root", + root_output=config["MODEL_DIRECTORY"]+"trainedData.root", shell: """ root -l -b -q 'TaggerRegressionEICrecon.C++("{input.train_data}", "{output.root_output}", "{params.model_dir}", "{params.beam_energy}", "{params.type_name}", "{params.method_name}")' diff --git a/benchmarks/LOWQ2/training/TaggerRegressionEICrecon.C b/benchmarks/LOWQ2/training/TaggerRegressionEICrecon.C index fc2226f2..0e333dc6 100644 --- a/benchmarks/LOWQ2/training/TaggerRegressionEICrecon.C +++ b/benchmarks/LOWQ2/training/TaggerRegressionEICrecon.C @@ -97,14 +97,12 @@ void TaggerRegressionEICrecon( TString layoutString("Layout=TANH|1024,TANH|128,TANH|64,TANH|32,LINEAR"); TString trainingStrategyString("TrainingStrategy="); - //trainingStrategyString +="LearningRate=1e-4,Momentum=0,MaxEpochs=2000,ConvergenceSteps=200,BatchSize=64,TestRepetitions=1,Regularization=None,Optimizer=ADAM"; - trainingStrategyString +="LearningRate=1e-4,Momentum=0,MaxEpochs=2000,ConvergenceSteps=200,BatchSize=10,TestRepetitions=1,Regularization=None,Optimizer=ADAM"; + trainingStrategyString +="LearningRate=1e-4,Momentum=0,MaxEpochs=2000,ConvergenceSteps=200,BatchSize=64,TestRepetitions=1,Regularization=None,Optimizer=ADAM"; TString nnOptions("!H:V:ErrorStrategy=SUMOFSQUARES:WeightInitialization=XAVIERUNIFORM:RandomSeed=1234"); // Use GPU if possible on the machine - //TString architectureString("Architecture=GPU"); - TString architectureString("Architecture=CPU"); + TString architectureString("Architecture=GPU"); // Transformation of data prior to training layers - decorrelate and normalise whole dataset TString transformString("VarTransform=D,N"); diff --git a/benchmarks/LOWQ2/training/epic_edit.xml b/benchmarks/LOWQ2/training/epic_edit.xml new file mode 100644 index 00000000..79cfb1e3 --- /dev/null +++ b/benchmarks/LOWQ2/training/epic_edit.xml @@ -0,0 +1,133 @@ + + + + + + + + + + + + + + # EPIC Detector + - https://github.com/eic/epic + - https://github.com/eic/ip6 + + + + + EPIC + + + + + + + + ## Main Constant Definitions + + The ip6 (or other ip) defines should be included first. + These files have only a define tags. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ## World Volume + + The world is a simple box, but could be a union of multiple regions. + + + + + + + + + ## Detector Subsystems + + ### IP Subsystems + + The interaction point subsystems are included before the central detector subsystems. + This is becuase the IP subsystems, for example the beampipe, will define paramters + which are subsquently used in the central detector construction -- e.g. the vertex tracker + uses the beampipe OD to help define its placement. + + The IP subsystems include the Far forward and backward regions. The list of subsystem includes: + - Interaction region beampipe + - B0 tracker + - Off-momentum tracker + - Far forward roman pots + - Zero Degree Calorimeter + - Beam line magnets. + - and more... + + + + + ## Main magnet and its field + + + + + + ## Central tracking detectors + + + + + + ## Central beam pipe + + + + + ## Far backward detectors + + + diff --git a/benchmarks/LOWQ2/training/local_config.yml b/benchmarks/LOWQ2/training/local_config.yml new file mode 100644 index 00000000..781f2bbf --- /dev/null +++ b/benchmarks/LOWQ2/training/local_config.yml @@ -0,0 +1,3 @@ +SIM_DIRECTORY: "/scratch/EIC/G4out/S3processed/" +RECO_DIRECTORY: "/scratch/EIC/ReconOut/S3processed/" +MODEL_DIRECTORY: "/scratch/EIC/LowQ2Model/" From 7b158fa97ddd151252f4b40ae9df4276970c21a9 Mon Sep 17 00:00:00 2001 From: simonge Date: Tue, 7 May 2024 23:30:15 +0100 Subject: [PATCH 25/53] Removed full local eicrecon path --- benchmarks/LOWQ2/config.yml | 1 - benchmarks/LOWQ2/training/Snakefile | 2 +- benchmarks/LOWQ2/training/local_config.yml | 2 +- 3 files changed, 2 insertions(+), 3 deletions(-) diff --git a/benchmarks/LOWQ2/config.yml b/benchmarks/LOWQ2/config.yml index fde59721..27666351 100644 --- a/benchmarks/LOWQ2/config.yml +++ b/benchmarks/LOWQ2/config.yml @@ -9,4 +9,3 @@ train:LOWQ2: SIM_DIRECTORY: "LowQ2_G4out/" RECO_DIRECTORY: "LowQ2_ReconOut/" MODEL_DIRECTORY: "LowQ2_Model/" - diff --git a/benchmarks/LOWQ2/training/Snakefile b/benchmarks/LOWQ2/training/Snakefile index df9b9545..5d2c7288 100644 --- a/benchmarks/LOWQ2/training/Snakefile +++ b/benchmarks/LOWQ2/training/Snakefile @@ -55,7 +55,7 @@ rule run_reconstruction: output: config["RECO_DIRECTORY"]+FILE_BASE+".tagger_recon"+RECO_EXTENSION, shell: """ -/home/simong/EIC/EICrecon/bin/eicrecon {input} -Pdd4hep:xml_files={params.XML} -Ppodio:output_include_collections={params.collections} -Ppodio:output_file={output} -PLOWQ2:LowQ2Trajectories:electron_beamE={params.beam_energy} +eicrecon {input} -Pdd4hep:xml_files={params.XML} -Ppodio:output_include_collections={params.collections} -Ppodio:output_file={output} -PLOWQ2:LowQ2Trajectories:electron_beamE={params.beam_energy} """ rule low_q2_train_network: diff --git a/benchmarks/LOWQ2/training/local_config.yml b/benchmarks/LOWQ2/training/local_config.yml index 781f2bbf..0b26b11c 100644 --- a/benchmarks/LOWQ2/training/local_config.yml +++ b/benchmarks/LOWQ2/training/local_config.yml @@ -1,3 +1,3 @@ SIM_DIRECTORY: "/scratch/EIC/G4out/S3processed/" RECO_DIRECTORY: "/scratch/EIC/ReconOut/S3processed/" -MODEL_DIRECTORY: "/scratch/EIC/LowQ2Model/" +MODEL_DIRECTORY: "/scratch/EIC/LowQ2Model/" \ No newline at end of file From cbce8fdca21106bdc968855f92173c2d6f7d847c Mon Sep 17 00:00:00 2001 From: Dmitry Kalinkin Date: Wed, 8 May 2024 16:22:19 -0400 Subject: [PATCH 26/53] Empty commit From 861c47b8d6b2c11de0e3dea73628353b92d05834 Mon Sep 17 00:00:00 2001 From: simonge Date: Thu, 16 May 2024 11:28:16 +0100 Subject: [PATCH 27/53] Updated to remove training from branch --- benchmarks/LOWQ2/analysis/Snakefile | 21 +++ .../LOWQ2/training/TaggerRegressionEICrecon.C | 152 ------------------ 2 files changed, 21 insertions(+), 152 deletions(-) create mode 100644 benchmarks/LOWQ2/analysis/Snakefile delete mode 100644 benchmarks/LOWQ2/training/TaggerRegressionEICrecon.C diff --git a/benchmarks/LOWQ2/analysis/Snakefile b/benchmarks/LOWQ2/analysis/Snakefile new file mode 100644 index 00000000..cee7e3d7 --- /dev/null +++ b/benchmarks/LOWQ2/analysis/Snakefile @@ -0,0 +1,21 @@ +rule run_benchmarks: + input: + "/scratch/EIC/ReconOut/{tag}_new.root" + params: + crossSection = 0.0551, + output: + "plots/LOWQ2{tag}_Recon.root" + shell: + """ + root -l -b -q 'RunLOWQ2.C++("{input}", "{output}", {params.crossSection})' + """ + +rule run_plots: + input: + "plots/LOWQ2{tag}_Recon.root" + output: + "plots/LOWQ2{tag}_FormattedPlots2.root" + shell: + """ + root -l -b -q 'PostprocessLOWQ2.C("{input}", "{output}","{wildcards.tag}")' + """ diff --git a/benchmarks/LOWQ2/training/TaggerRegressionEICrecon.C b/benchmarks/LOWQ2/training/TaggerRegressionEICrecon.C deleted file mode 100644 index 9d02190a..00000000 --- a/benchmarks/LOWQ2/training/TaggerRegressionEICrecon.C +++ /dev/null @@ -1,152 +0,0 @@ -#include -#include -#include -#include - -#include "TChain.h" -#include "TFile.h" -#include "TTree.h" -#include "TString.h" -#include "TObjString.h" -#include "TSystem.h" -#include "TROOT.h" - -#include "TMVA/MethodDNN.h" -#include "TMVA/Reader.h" -#include "TMVA/Tools.h" -#include "TMVA/Factory.h" -#include "TMVA/DataLoader.h" -#include "TMVA/TMVARegGui.h" - -using namespace TMVA; - -// The training currently requires files which only contained a single DIS scattered electron to have been simulated e.g. generated using GETaLM -// The scattered electron must be the 3rd particle in the file after the two beam particles -// At least one track reconstructed by EIC algorithms in the LOWQ2 tagger is needed. - -void TaggerRegressionEICrecon( - TString inDataNames = "/scratch/EIC/ReconOut/qr_18x275_ab/qr_18x275_ab*_recon.edm4hep.root", - TString outDataName = "/scratch/EIC/LowQ2Model/trainedData.root", - TString dataFolderName = "/scratch/EIC/LowQ2Model/", - TString mcBeamEnergy = "18", - TString typeName = "LowQ2MomentumRegression", - TString methodName = "DNN", - TString inWeightName = "dataset/weights/LowQ2Reconstruction_DNN.weights.xml" - ) -{ - - Bool_t loadWeights = 0; - - //--------------------------------------------------------------- - // This loads the library - TMVA::Tools::Instance(); - - ROOT::EnableImplicitMT(8); - - // -------------------------------------------------------------------------------------------------- - // Here the preparation phase begins - // Create a new root output file - TFile* outputFile = TFile::Open( outDataName, "RECREATE" ); - - // Create the factory object. Later you can choose the methods - - TMVA::Factory *factory = new TMVA::Factory( typeName, outputFile, - "!V:!Silent:Color:DrawProgressBar:AnalysisType=Regression" ); - - ; - TMVA::DataLoader *dataloader=new TMVA::DataLoader(dataFolderName); - - // Input TrackParameters variables from EICrecon - - TString collectionName = "LowQ2Tracks[0]"; - dataloader->AddVariable( collectionName+".loc.a", "fit_position_y", "units", 'F' ); - dataloader->AddVariable( collectionName+".loc.b", "fit_position_z", "units", 'F' ); - dataloader->AddVariable( "sin("+collectionName+".phi)*sin("+collectionName+".theta)", "fit_vector_x", "units", 'F' ); - dataloader->AddVariable( "cos("+collectionName+".phi)*sin("+collectionName+".theta)", "fit_vector_y", "units", 'F' ); - - // Regression target particle 3-momentum, normalised to beam energy. - // Takes second particle, in the test data this is the scattered electron - // TODO add energy and array element information to be read directly from datafile - EMD4eic and EICrecon changes. - TString mcParticleName = "ScatteredElectron[0]"; - dataloader->AddTarget( mcParticleName+".momentum.x/"+mcBeamEnergy ); - dataloader->AddTarget( mcParticleName+".momentum.y/"+mcBeamEnergy ); - dataloader->AddTarget( mcParticleName+".momentum.z/"+mcBeamEnergy ); - - std::cout << "--- TMVARegression : Using input files: " << inDataNames << std::endl; - - // Register the regression tree - TChain* regChain = new TChain("events"); - regChain->Add(inDataNames); - //regChain->SetEntries(8000); // Set smaller sample for tests - - // global event weights per tree (see below for setting event-wise weights) - Double_t regWeight = 1.0; - - // You can add an arbitrary number of regression trees - dataloader->AddRegressionTree( regChain, regWeight ); - - // This would set individual event weights (the variables defined in the - // expression need to exist in the original TTree) - // dataloader->SetWeightExpression( "1/(eE)", "Regression" ); // If MC event weights are kept use these - // Apply additional cuts on the data - TCut mycut = "@LowQ2Tracks.size()==1"; // Make sure there's one reconstructed track in event - - dataloader->PrepareTrainingAndTestTree(mycut,"nTrain_Regression=0:nTest_Regression=0:SplitMode=Random:SplitSeed=1:NormMode=NumEvents:!V"); - - // TODO - Optimise layout and training more - TString layoutString("Layout=TANH|1024,TANH|128,TANH|64,TANH|32,LINEAR"); - - TString trainingStrategyString("TrainingStrategy="); - trainingStrategyString +="LearningRate=1e-4,Momentum=0,MaxEpochs=2000,ConvergenceSteps=200,BatchSize=64,TestRepetitions=1,Regularization=None,Optimizer=ADAM"; - - TString nnOptions("!H:V:ErrorStrategy=SUMOFSQUARES:WeightInitialization=XAVIERUNIFORM:RandomSeed=1234"); - - // Use GPU if possible on the machine - TString architectureString("Architecture=GPU"); - - // Transformation of data prior to training layers - decorrelate and normalise whole dataset - TString transformString("VarTransform=D,N"); - - nnOptions.Append(":"); - nnOptions.Append(architectureString); - nnOptions.Append(":"); - nnOptions.Append(transformString); - nnOptions.Append(":"); - nnOptions.Append(layoutString); - nnOptions.Append(":"); - nnOptions.Append(trainingStrategyString); - - TMVA::MethodDNN* method = (MethodDNN*)factory->BookMethod(dataloader, TMVA::Types::kDL, methodName, nnOptions); // NN - - // If loading previous model for further training - if(loadWeights){ - TMVA::Reader *reader = new TMVA::Reader( "!Color:!Silent" ); - reader->BookMVA( methodName, inWeightName ); - TMVA::MethodDNN* kl = dynamic_cast(reader->FindMVA(methodName)); - method = kl; - } - - - // -------------------------------------------------------------------------------------------------- - // Now you can tell the factory to train, test, and evaluate the MVAs - - // Train MVAs using the set of training events - factory->TrainAllMethods(); - - // Evaluate all MVAs using the set of test events - factory->TestAllMethods(); - - // Evaluate and compare performance of all configured MVAs - factory->EvaluateAllMethods(); - - // -------------------------------------------------------------- - - // Save the output - outputFile->Close(); - - std::cout << "==> Wrote root file: " << outputFile->GetName() << std::endl; - std::cout << "==> TMVARegression is done!" << std::endl; - - delete factory; - delete dataloader; - -} From 29374e00855359589c1329192ecfcb5113baa3d1 Mon Sep 17 00:00:00 2001 From: simonge Date: Thu, 16 May 2024 11:32:45 +0100 Subject: [PATCH 28/53] removed some local paths and generalised --- benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.h | 5 +-- benchmarks/LOWQ2/analysis/LOWQ2hits.h | 4 +-- benchmarks/LOWQ2/analysis/RunLOWQ2.C | 37 ++++++++++++--------- 3 files changed, 23 insertions(+), 23 deletions(-) diff --git a/benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.h b/benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.h index 2adaef78..13ac9f4d 100755 --- a/benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.h +++ b/benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.h @@ -20,8 +20,6 @@ using RVecI = ROOT::VecOps::RVec; std::map,std::map,std::map>> histMap; - - //--------------------------------------------------------------------------------------------- // Create dataframe from input file(s) //--------------------------------------------------------------------------------------------- @@ -228,8 +226,7 @@ void writePlots( TString outName ){ //--------------------------------------------------------------------------------------------- // Create the benchmark plots //--------------------------------------------------------------------------------------------- -void LOWQ2Benchmarks( string inName = "/scratch/EIC/G4out/qr_18x275_new.edm4hep*.root", - TString outName = "LOWQ2QRRates.root", dd4hep::Detector& detector=dd4hep::Detector::getInstance(), double eventRate=0.0 ){ +void LOWQ2Benchmarks( string inName, TString outName, dd4hep::Detector& detector, double eventRate ){ auto node = initialise( inName ); diff --git a/benchmarks/LOWQ2/analysis/LOWQ2hits.h b/benchmarks/LOWQ2/analysis/LOWQ2hits.h index 709f8c1d..ac3a26df 100644 --- a/benchmarks/LOWQ2/analysis/LOWQ2hits.h +++ b/benchmarks/LOWQ2/analysis/LOWQ2hits.h @@ -97,9 +97,8 @@ std::tuple,std::map,std::map< .Define(yHalfChipName,"yHalfChipID[LayerFilter&&ModuleFilter]") .Define(xColumnName,"xColumnID[LayerFilter&&ModuleFilter]") .Define(layerSizeName,[](RVecI lay){return lay.size();},layerSizeInput); + - - hHists1D[moduleTag+"/"+layerTag+"/"+xHistName] = d4.Histo1D({xHistName, xHistName+";x pixel column;Rate [Hz]", xRange, xMin, xMax }, xName, "eventWeight"); hHists1D[moduleTag+"/"+layerTag+"/"+yHistName] = d4.Histo1D({yHistName, yHistName+";y pixel column;Rate [Hz]", yRange, yMin, yMax }, yName, "eventWeight"); @@ -123,7 +122,6 @@ std::tuple,std::map,std::map< } } - return {hHists1D,hHists2D,hHists3D}; } diff --git a/benchmarks/LOWQ2/analysis/RunLOWQ2.C b/benchmarks/LOWQ2/analysis/RunLOWQ2.C index 3f5bf3c4..b5c3c422 100644 --- a/benchmarks/LOWQ2/analysis/RunLOWQ2.C +++ b/benchmarks/LOWQ2/analysis/RunLOWQ2.C @@ -1,35 +1,40 @@ #include "LOWQ2Benchmarks.h" #include -void RunLOWQ2(std::string inputFileName="Brems_input.root", std::string outputFileName="plots/LOWQ2BremsRecon3.root", - double eventCrossSection=0, std::string compactName="/opt/detector/epic-nightly/share/epic/epic.xml") { +void RunLOWQ2( std::string inputFileName = "Brems_input.root", + std::string outputFileName = "plots/LOWQ2QRRecon3.root", + std::string compactName = "/opt/detector/epic-nightly/share/epic/epic.xml", + bool inputIsTimeBased = false, // true if the event sample is time-based, false if it is event-based + double timeWindow = 10.15*1e-9, //[s] + double eventCrossSection = 0.0551, // [mb] + double luminosity = 1e34, // [cm^-2 s^-1] + ) { //Set implicit multi-threading ROOT::EnableImplicitMT(); // Output script running conditions std::cout << "Running LOWQ2 benchmarks with the following parameters:" << std::endl; + std::cout << " - input file: " << inputFileName << std::endl; + std::cout << " - output file: " << outputFileName << std::endl; std::cout << " - xml file: " << compactName << std::endl; + std::cout << " - input is time-based: " << inputIsTimeBased << std::endl; + std::cout << " - time window: " << timeWindow << " s" << std::endl; + std::cout << " - event cross section: " << eventCrossSection << " mb" << std::endl; + std::cout << " - luminosity: " << luminosity << " cm^-2 s^-1" << std::endl; dd4hep::Detector& detector = dd4hep::Detector::getInstance(); detector.fromCompact(compactName); - double luminosity = 1e34; // [cm^-2 s^-1] - double eBeamEnergy = 18.0; // [GeV] - double pBeamEnergy = 275.0; // [GeV] - double eventCrossSectionQR = 0.0551; // [mb] - double eventCrossSectionBrems = 171.3; // [mb] - double bunchSpacing = 10.15*1e-9; // [s] - - double eventRateQR = luminosity * eventCrossSectionQR * 1e-27; // [Hz] - double eventRateBrems = luminosity * eventCrossSectionBrems * 1e-27; // [Hz] - double bunchRate = 1.0 / bunchSpacing; // [Hz] + // eventCrossSectionBrems = 171.3; [mb] + // eventCrossSectionQR = 0.0551; [mb] double eventRate = luminosity * eventCrossSection * 1e-27; // [Hz] - //LOWQ2Benchmarks("/scratch/EIC/ReconOut/QR_new.root","plots/LOWQ2QRRecon2.root",detector,eventRateQR); - LOWQ2Benchmarks("/scratch/EIC/ReconOut/Brems_new.root","plots/LOWQ2BremsRecon2.root",detector,eventRateBrems); - LOWQ2Benchmarks("/scratch/EIC/ReconOut/Brems_new.root","plots/LOWQ2BremsRecon3.root",detector,bunchRate); - //LOWQ2Benchmarks(inputFileName,outputFileName,detector,eventRate); + if(inputIsTimeBased){ + eventRate = 1.0 / timeWindow; // [Hz] + } + + LOWQ2Benchmarks(inputFileName,outputFileName,detector,eventRate); } \ No newline at end of file From e3475b1c608293bcb2eafe5829ca86e761738972 Mon Sep 17 00:00:00 2001 From: simonge Date: Thu, 16 May 2024 16:39:50 +0100 Subject: [PATCH 29/53] Changes to for remote running --- benchmarks/LOWQ2/analysis/.gitignore | 2 + benchmarks/LOWQ2/analysis/PostprocessLOWQ2.C | 21 ++---- benchmarks/LOWQ2/analysis/RunLOWQ2.C | 2 +- benchmarks/LOWQ2/analysis/Snakefile | 79 ++++++++++++++++++-- benchmarks/LOWQ2/config.yml | 7 +- 5 files changed, 84 insertions(+), 27 deletions(-) diff --git a/benchmarks/LOWQ2/analysis/.gitignore b/benchmarks/LOWQ2/analysis/.gitignore index a8ce0618..c96503c7 100644 --- a/benchmarks/LOWQ2/analysis/.gitignore +++ b/benchmarks/LOWQ2/analysis/.gitignore @@ -1,3 +1,5 @@ plots* calibrations* fieldmaps* +gdml* +.snakemake* \ No newline at end of file diff --git a/benchmarks/LOWQ2/analysis/PostprocessLOWQ2.C b/benchmarks/LOWQ2/analysis/PostprocessLOWQ2.C index 63882d4d..3bc5b380 100644 --- a/benchmarks/LOWQ2/analysis/PostprocessLOWQ2.C +++ b/benchmarks/LOWQ2/analysis/PostprocessLOWQ2.C @@ -17,15 +17,7 @@ void SetStyle() { gStyle->SetPadRightMargin(0.18); gStyle->SetPadBottomMargin(0.12); gStyle->SetTitleSize(0.04, "XYZ"); - gStyle->SetTitleOffset(4.0, "Z"); - // gStyle->SetTitleOffset(1.0, "Y"); - - // // Set global plot format variables - // int font = 42; // - // gStyle->SetTextFont(font); // Set the font for text - // gStyle->SetLabelFont(font, "XYZ"); // Set the font for axis labels - // gStyle->SetTitleFont(font, "XYZ"); // Set the font for titles } @@ -71,7 +63,6 @@ TH2* RatePlot(TDirectory* inputDir, int Module, int Layer, TString Tag="Quasi-Re RatePlot->Rebin2D(rebin,rebin); RatePlot->Scale(1.0/(rebin*rebin)); TString title = "Tagger module "+std::to_string(Module)+", layer "+std::to_string(Layer)+" - Mean "+Tag+" rate per "+std::to_string(rebin)+"x"+std::to_string(rebin)+" pixel group"; - // TString title = "Tagger module 2, layer 0 rate, integrated over "+std::to_string(rebin)+"x"+std::to_string(rebin)+" pixel group"; RatePlot->SetTitle(title); RatePlot->GetXaxis()->SetTitle("x pixel"); RatePlot->GetYaxis()->SetTitle("y pixel"); @@ -485,9 +476,9 @@ void FormatReconstructionPlots(TDirectory* inputDir, TFile* outputFile, TString } //---------------------------------------------------------------------- -// This function is called by the benchmarking script maybe +// This function is called by the benchmarking script //---------------------------------------------------------------------- -void Postprocess(TString inName="LOWQ2QRRates3.root", TString outName="LOWQ2Plots.root", TString Tag="Quasi-Real") { +void PostprocessLOWQ2(TString inName, TString outName, TString Tag) { SetStyle(); @@ -536,7 +527,7 @@ void Postprocess(TString inName="LOWQ2QRRates3.root", TString outName="LOWQ2Plot //---------------------------------------------------------------------- // Main function to create canvases //---------------------------------------------------------------------- -void PostprocessLOWQ2() { -// Postprocess("plots/LOWQ2QRRecon2.root", "plots/LOWQ2QR_FormattedPlots.root", "Quasi-Real"); - Postprocess("plots/LOWQ2BremsRecon3.root", "plots/LOWQ2Brems_FormattedPlots3.root", "Bremsstrahlung"); -} \ No newline at end of file +// void PostprocessLOWQ2() { +// // Postprocess("plots/LOWQ2QRRecon2.root", "plots/LOWQ2QR_FormattedPlots.root", "Quasi-Real"); +// Postprocess("plots/LOWQ2BremsRecon3.root", "plots/LOWQ2Brems_FormattedPlots3.root", "Bremsstrahlung"); +// } \ No newline at end of file diff --git a/benchmarks/LOWQ2/analysis/RunLOWQ2.C b/benchmarks/LOWQ2/analysis/RunLOWQ2.C index b5c3c422..e73e9849 100644 --- a/benchmarks/LOWQ2/analysis/RunLOWQ2.C +++ b/benchmarks/LOWQ2/analysis/RunLOWQ2.C @@ -7,7 +7,7 @@ void RunLOWQ2( std::string inputFileName = "Brems_input.root", bool inputIsTimeBased = false, // true if the event sample is time-based, false if it is event-based double timeWindow = 10.15*1e-9, //[s] double eventCrossSection = 0.0551, // [mb] - double luminosity = 1e34, // [cm^-2 s^-1] + double luminosity = 1e34 // [cm^-2 s^-1] ) { //Set implicit multi-threading diff --git a/benchmarks/LOWQ2/analysis/Snakefile b/benchmarks/LOWQ2/analysis/Snakefile index cee7e3d7..ed1c1b28 100644 --- a/benchmarks/LOWQ2/analysis/Snakefile +++ b/benchmarks/LOWQ2/analysis/Snakefile @@ -1,21 +1,88 @@ +# Possible tags for local/remote running +tag_params = { + "QR": {"timeBased": 0, "timeWindow": 0, "eventCrossSection": 0.0551, "tagName": "Quasi-Real"}, + "Brem": {"timeBased": 1, "timeWindow": 10.15*1e-9, "eventCrossSection": 0, "tagName": "Bremsstrahlung"}, + "pythia6": {"timeBased": 0, "timeWindow": 0, "eventCrossSection": 0.054, "tagName": "Deep Inelastic Scattering"}, + # Add more mappings here if needed +} + +EVENT_EXTENSION = ".ab.hepmc3.tree.root" +SIM_EXTENSION = ".edm4hep.root" +RECO_EXTENSION = ".eicrecon.tree.edm4eic.root" + +REMOTE_EVENTS_DIRECTORY = "root://dtn-eic.jlab.org//work/eic2/EPIC/EVGEN/SIDIS/pythia6-eic/1.0.0/10x100/q2_0to1/" +FILE_BASE = "pythia_ep_noradcor_10x100_q2_0.000000001_1.0_run" + +BEAM_ENERGY = "10" +XML_FILE = "/opt/detector/epic-main/share/epic/epic.xml", + +# Function to check if the remote file exists +def remote_file_exists(server,url): + try: + subprocess.check_output(['xrdfs', server, 'stat', url]) + return url + except subprocess.CalledProcessError: + return None + +rule run_simulation_remote: + params: + XML=XML_FILE, + input=remote_file_exists(REMOTE_EVENTS_DIRECTORY+FILE_BASE+wildcards.index+EVENT_EXTENSION), + output: + config["SIM_DIRECTORY"]+FILE_BASE+"{index}_tagger"+SIM_EXTENSION, + shell: """ +npsim \ + --inputFiles {params.input} \ + --outputFile {output[0]} \ + --compactFile {params.XML} \ + --runType run \ + --numberOfEvents 100 \ + --physics.list FTFP_BERT \ + --field.eps_min 5e-06 \ + --field.eps_max 1e-04 \ + --physics.rangecut 50 \ +""" + +rule run_reconstruction: + params: + XML=XML_FILE, + beam_energy=BEAM_ENERGY, + collections="TaggerTrackerProjectedTracks,MCScatteredElectrons,MCParticles,EventHeader", + input: + expand( + config["SIM_DIRECTORY"]+FILE_BASE+"{index}_tagger"+SIM_EXTENSION, + index=range(1,4), + ), + output: + config["RECO_DIRECTORY"]+FILE_BASE+".tagger_recon"+RECO_EXTENSION, + shell: """ +eicrecon {input} -Pdd4hep:xml_files={params.XML} -Ppodio:output_include_collections={params.collections} -Ppodio:output_file={output} -PLOWQ2:LowQ2Trajectories:electron_beamE={params.beam_energy} +""" + rule run_benchmarks: input: "/scratch/EIC/ReconOut/{tag}_new.root" params: - crossSection = 0.0551, + xmlName=XMLFILE, + timeBased = lambda wildcards: tag_params[wildcards.tag]["timeBased"], + timeWindow = lambda wildcards: tag_params[wildcards.tag]["timeWindow"], + eventCrossSection = lambda wildcards: tag_params[wildcards.tag]["eventCrossSection"] + output: - "plots/LOWQ2{tag}_Recon.root" + "{dir}/results/LOWQ2/LOWQ2{tag}_Plots.root" shell: """ - root -l -b -q 'RunLOWQ2.C++("{input}", "{output}", {params.crossSection})' + root -l -b -q 'RunLOWQ2.C++("{input}", "{output}", "{params.xmlName}", {params.timeBased}, {params.timeWindow}, {params.eventCrossSection})' """ rule run_plots: input: - "plots/LOWQ2{tag}_Recon.root" + "{dir}/results/LOWQ2/LOWQ2{tag}_Plots.root" + params: + tagName = lambda wildcards: tag_params[wildcards.tag]["tagName"], output: - "plots/LOWQ2{tag}_FormattedPlots2.root" + "{dir}/results/LOWQ2/LOWQ2{tag}_FormattedPlots.root" shell: """ - root -l -b -q 'PostprocessLOWQ2.C("{input}", "{output}","{wildcards.tag}")' + root -l -b -q 'PostprocessLOWQ2.C("{input}", "{output}","{params.tagName}")' """ diff --git a/benchmarks/LOWQ2/config.yml b/benchmarks/LOWQ2/config.yml index c3a7876c..cf41f5a3 100644 --- a/benchmarks/LOWQ2/config.yml +++ b/benchmarks/LOWQ2/config.yml @@ -1,5 +1,2 @@ -training: - # Training configuration goes here - -analysis: - # Analysis configuration goes here +include: + - local: 'analysis/config.yml' From 5b972e4c3fc99853abd95e7ff6a2fc142d36bc9d Mon Sep 17 00:00:00 2001 From: simonge Date: Thu, 16 May 2024 16:48:41 +0100 Subject: [PATCH 30/53] Removed other tests while investigating CI --- .gitlab-ci.yml | 24 ++++++++++++------------ benchmarks/LOWQ2/analysis/Snakefile | 2 +- 2 files changed, 13 insertions(+), 13 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index a6c80b27..01eff517 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -130,18 +130,18 @@ get_data: include: # - local: 'benchmarks/backgrounds/config.yml' - - local: 'benchmarks/ecal_gaps/config.yml' - - local: 'benchmarks/tracking_detectors/config.yml' - - local: 'benchmarks/barrel_ecal/config.yml' - - local: 'benchmarks/barrel_hcal/config.yml' - - local: 'benchmarks/zdc/config.yml' - - local: 'benchmarks/material_maps/config.yml' - - local: 'benchmarks/material_scan/config.yml' - - local: 'benchmarks/pid/config.yml' - - local: 'benchmarks/timing/config.yml' - - local: 'benchmarks/b0_tracker/config.yml' +# - local: 'benchmarks/ecal_gaps/config.yml' +# - local: 'benchmarks/tracking_detectors/config.yml' +# - local: 'benchmarks/barrel_ecal/config.yml' +# - local: 'benchmarks/barrel_hcal/config.yml' +# - local: 'benchmarks/zdc/config.yml' +# - local: 'benchmarks/material_maps/config.yml' +# - local: 'benchmarks/material_scan/config.yml' +# - local: 'benchmarks/pid/config.yml' +# - local: 'benchmarks/timing/config.yml' +# - local: 'benchmarks/b0_tracker/config.yml' - local: 'benchmarks/LOWQ2/config.yml' - +''' deploy_results: stage: deploy needs: @@ -222,4 +222,4 @@ pages: paths: - public - +''' \ No newline at end of file diff --git a/benchmarks/LOWQ2/analysis/Snakefile b/benchmarks/LOWQ2/analysis/Snakefile index ed1c1b28..ffdc6cfe 100644 --- a/benchmarks/LOWQ2/analysis/Snakefile +++ b/benchmarks/LOWQ2/analysis/Snakefile @@ -61,7 +61,7 @@ eicrecon {input} -Pdd4hep:xml_files={params.XML} -Ppodio:output_include_collecti rule run_benchmarks: input: - "/scratch/EIC/ReconOut/{tag}_new.root" + config["RECO_DIRECTORY"]+FILE_BASE+".tagger_recon"+RECO_EXTENSION params: xmlName=XMLFILE, timeBased = lambda wildcards: tag_params[wildcards.tag]["timeBased"], From bf1ab67823d5826c3a8f2a47716db95b1f82d3a0 Mon Sep 17 00:00:00 2001 From: simonge Date: Thu, 16 May 2024 16:49:24 +0100 Subject: [PATCH 31/53] Included analysis config.yml --- benchmarks/LOWQ2/analysis/config.yml | 9 +++++++++ 1 file changed, 9 insertions(+) create mode 100644 benchmarks/LOWQ2/analysis/config.yml diff --git a/benchmarks/LOWQ2/analysis/config.yml b/benchmarks/LOWQ2/analysis/config.yml new file mode 100644 index 00000000..4946c2d5 --- /dev/null +++ b/benchmarks/LOWQ2/analysis/config.yml @@ -0,0 +1,9 @@ +bench:LOWQ2_events: + extends: .det_benchmark + stage: benchmarks + script: + - snakemake --cores 8 $DETECTOR_CONFIG/results/LOWQ2/LOWQ2_FormattedPlots.edm4hep.root --configfile config.yml + +SIM_DIRECTORY: "sim_output/LOWQ2" +RECO_DIRECTORY: "reconstruction/LOWQ2" +RESULTS_DIRECTORY: "results/LOWQ2" \ No newline at end of file From 27d9a432074d0693fc77e3f18e36192530cf58a7 Mon Sep 17 00:00:00 2001 From: simonge Date: Thu, 16 May 2024 17:07:38 +0100 Subject: [PATCH 32/53] Some more simplification --- benchmarks/LOWQ2/analysis/Snakefile | 6 +++--- benchmarks/LOWQ2/analysis/config.yml | 10 +++++----- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/benchmarks/LOWQ2/analysis/Snakefile b/benchmarks/LOWQ2/analysis/Snakefile index ffdc6cfe..7c2b1a30 100644 --- a/benchmarks/LOWQ2/analysis/Snakefile +++ b/benchmarks/LOWQ2/analysis/Snakefile @@ -69,7 +69,7 @@ rule run_benchmarks: eventCrossSection = lambda wildcards: tag_params[wildcards.tag]["eventCrossSection"] output: - "{dir}/results/LOWQ2/LOWQ2{tag}_Plots.root" + "{dir}/LOWQ2{tag}_Plots.root" shell: """ root -l -b -q 'RunLOWQ2.C++("{input}", "{output}", "{params.xmlName}", {params.timeBased}, {params.timeWindow}, {params.eventCrossSection})' @@ -77,11 +77,11 @@ rule run_benchmarks: rule run_plots: input: - "{dir}/results/LOWQ2/LOWQ2{tag}_Plots.root" + "{dir}/LOWQ2{tag}_Plots.root" params: tagName = lambda wildcards: tag_params[wildcards.tag]["tagName"], output: - "{dir}/results/LOWQ2/LOWQ2{tag}_FormattedPlots.root" + "{dir}/LOWQ2{tag}_FormattedPlots.root" shell: """ root -l -b -q 'PostprocessLOWQ2.C("{input}", "{output}","{params.tagName}")' diff --git a/benchmarks/LOWQ2/analysis/config.yml b/benchmarks/LOWQ2/analysis/config.yml index 4946c2d5..5edfbaf3 100644 --- a/benchmarks/LOWQ2/analysis/config.yml +++ b/benchmarks/LOWQ2/analysis/config.yml @@ -1,9 +1,9 @@ +SIM_DIRECTORY: ${LOCAL_DATA_PATH}/"sim_output" +RECO_DIRECTORY: ${LOCAL_DATA_PATH}/"results" +RESULTS_DIRECTORY: ${LOCAL_DATA_PATH}/"results" + bench:LOWQ2_events: extends: .det_benchmark stage: benchmarks script: - - snakemake --cores 8 $DETECTOR_CONFIG/results/LOWQ2/LOWQ2_FormattedPlots.edm4hep.root --configfile config.yml - -SIM_DIRECTORY: "sim_output/LOWQ2" -RECO_DIRECTORY: "reconstruction/LOWQ2" -RESULTS_DIRECTORY: "results/LOWQ2" \ No newline at end of file + - snakemake --cores 8 ${RESULTS_DIRECTORY}/LOWQ2_FormattedPlots.edm4hep.root --configfile config.yml From efd92c9bdda12182418af01ca49e90fcdfc5237c Mon Sep 17 00:00:00 2001 From: simonge Date: Thu, 16 May 2024 17:10:57 +0100 Subject: [PATCH 33/53] Remove other CI checks temporarily --- .gitlab-ci.yml | 28 +++++++++++++++------------- 1 file changed, 15 insertions(+), 13 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 97f28ace..f3700055 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -8,6 +8,7 @@ variables: GITHUB_REPOSITORY: '' workflow: + name: '$PIPELINE_NAME' rules: - if: '$CI_PIPELINE_SOURCE == "merge_request_event"' - if: '$CI_PIPELINE_SOURCE == "web"' @@ -116,7 +117,7 @@ get_data: - source .local/bin/env.sh - ls -lrtha - ln -s "${LOCAL_DATA_PATH}/sim_output" sim_output - - ln -s "${LOCAL_DATA_PATH}/datasets/data" data + - ln -s "${LOCAL_DATA_PATH}/datasets/data" data # snakemake support - mkdir "${DETECTOR_CONFIG}" - ln -s "${LOCAL_DATA_PATH}/sim_output" "${DETECTOR_CONFIG}/sim_output" @@ -129,17 +130,18 @@ get_data: include: # - local: 'benchmarks/backgrounds/config.yml' - - local: 'benchmarks/ecal_gaps/config.yml' - - local: 'benchmarks/tracking_detectors/config.yml' - - local: 'benchmarks/barrel_ecal/config.yml' - - local: 'benchmarks/barrel_hcal/config.yml' - - local: 'benchmarks/zdc/config.yml' - - local: 'benchmarks/material_maps/config.yml' - - local: 'benchmarks/material_scan/config.yml' - - local: 'benchmarks/pid/config.yml' - - local: 'benchmarks/timing/config.yml' - - local: 'benchmarks/b0_tracker/config.yml' - +# - local: 'benchmarks/ecal_gaps/config.yml' +# - local: 'benchmarks/tracking_detectors/config.yml' +# - local: 'benchmarks/barrel_ecal/config.yml' +# - local: 'benchmarks/barrel_hcal/config.yml' +# - local: 'benchmarks/zdc/config.yml' +# - local: 'benchmarks/material_maps/config.yml' +# - local: 'benchmarks/material_scan/config.yml' +# - local: 'benchmarks/pid/config.yml' +# - local: 'benchmarks/timing/config.yml' +# - local: 'benchmarks/b0_tracker/config.yml' + - local: 'benchmarks/LOWQ2/config.yml' +''' deploy_results: stage: deploy needs: @@ -220,4 +222,4 @@ pages: paths: - public - +''' \ No newline at end of file From 669ff1bab5f01cea0a6a88c6b8ee88e266afea17 Mon Sep 17 00:00:00 2001 From: simonge Date: Thu, 16 May 2024 17:12:48 +0100 Subject: [PATCH 34/53] Added analysis gitignore --- benchmarks/LOWQ2/analysis/.gitignore | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 benchmarks/LOWQ2/analysis/.gitignore diff --git a/benchmarks/LOWQ2/analysis/.gitignore b/benchmarks/LOWQ2/analysis/.gitignore new file mode 100644 index 00000000..c96503c7 --- /dev/null +++ b/benchmarks/LOWQ2/analysis/.gitignore @@ -0,0 +1,5 @@ +plots* +calibrations* +fieldmaps* +gdml* +.snakemake* \ No newline at end of file From 285dfa0a66a58ad9804cebdcf978e96567e53c44 Mon Sep 17 00:00:00 2001 From: simonge Date: Thu, 16 May 2024 17:17:02 +0100 Subject: [PATCH 35/53] Split and updated config.yml --- benchmarks/LOWQ2/config.yml | 13 ++----------- benchmarks/LOWQ2/training/config.yml | 11 +++++++++++ 2 files changed, 13 insertions(+), 11 deletions(-) create mode 100644 benchmarks/LOWQ2/training/config.yml diff --git a/benchmarks/LOWQ2/config.yml b/benchmarks/LOWQ2/config.yml index 27666351..f32514e8 100644 --- a/benchmarks/LOWQ2/config.yml +++ b/benchmarks/LOWQ2/config.yml @@ -1,11 +1,2 @@ -# Run Snakemake for the training -train:LOWQ2: - extends: .det_benchmark - stage: benchmarks - script: - - snakemake --snakefile training/Snakefile --configfile config.yml - - -SIM_DIRECTORY: "LowQ2_G4out/" -RECO_DIRECTORY: "LowQ2_ReconOut/" -MODEL_DIRECTORY: "LowQ2_Model/" +include: + - local: 'training/config.yml' diff --git a/benchmarks/LOWQ2/training/config.yml b/benchmarks/LOWQ2/training/config.yml new file mode 100644 index 00000000..b5758357 --- /dev/null +++ b/benchmarks/LOWQ2/training/config.yml @@ -0,0 +1,11 @@ +SIM_DIRECTORY: "LowQ2_G4out/" +RECO_DIRECTORY: "LowQ2_ReconOut/" +MODEL_DIRECTORY: "LowQ2_Model/" + +# Run Snakemake for the training +train:LOWQ2: + extends: .det_benchmark + stage: benchmarks + script: + - snakemake --cores 8 ${MODEL_DIRECTORY}"trainedData.root" --configfile config.yml + From 3ee84edb2218edf0ca231e5c6c7559a7ac26673c Mon Sep 17 00:00:00 2001 From: simonge Date: Thu, 16 May 2024 17:17:46 +0100 Subject: [PATCH 36/53] Updated stage --- benchmarks/LOWQ2/training/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/benchmarks/LOWQ2/training/config.yml b/benchmarks/LOWQ2/training/config.yml index b5758357..45f7f318 100644 --- a/benchmarks/LOWQ2/training/config.yml +++ b/benchmarks/LOWQ2/training/config.yml @@ -5,7 +5,7 @@ MODEL_DIRECTORY: "LowQ2_Model/" # Run Snakemake for the training train:LOWQ2: extends: .det_benchmark - stage: benchmarks + stage: calibrate script: - snakemake --cores 8 ${MODEL_DIRECTORY}"trainedData.root" --configfile config.yml From dbd39e2d83affbc7f000d705835c346d0c13a451 Mon Sep 17 00:00:00 2001 From: Dmitry Kalinkin Date: Fri, 17 May 2024 10:47:04 -0400 Subject: [PATCH 37/53] .gitlab-ci.yml: fix syntax --- .gitlab-ci.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 33fb5e30..cdc8b458 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -141,7 +141,7 @@ include: # - local: 'benchmarks/timing/config.yml' # - local: 'benchmarks/b0_tracker/config.yml' - local: 'benchmarks/LOWQ2/config.yml' -''' + deploy_results: stage: deploy needs: @@ -224,4 +224,4 @@ pages: paths: - public -''' \ No newline at end of file +''' From 9183d9310e1a737224988eb5d9f7b6970d903b9c Mon Sep 17 00:00:00 2001 From: Dmitry Kalinkin Date: Fri, 17 May 2024 10:48:45 -0400 Subject: [PATCH 38/53] .gitlab-ci.yml: fix syntax --- .gitlab-ci.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index cdc8b458..ff77a447 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -223,5 +223,3 @@ pages: artifacts: paths: - public - -''' From dd29b98b3937225e94c32c7beaedfb060322c5d0 Mon Sep 17 00:00:00 2001 From: Dmitry Kalinkin Date: Fri, 17 May 2024 10:51:18 -0400 Subject: [PATCH 39/53] .gitlab-ci.yml: full include path --- .gitlab-ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index ff77a447..dbc1ff88 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -140,7 +140,7 @@ include: # - local: 'benchmarks/pid/config.yml' # - local: 'benchmarks/timing/config.yml' # - local: 'benchmarks/b0_tracker/config.yml' - - local: 'benchmarks/LOWQ2/config.yml' + - local: 'benchmarks/LOWQ2/analysis/config.yml' deploy_results: stage: deploy From 79ed678418e0e4efdad4fda32615ee5de67012f7 Mon Sep 17 00:00:00 2001 From: Dmitry Kalinkin Date: Fri, 17 May 2024 10:52:12 -0400 Subject: [PATCH 40/53] config.yml: this is not a snakemake config --- benchmarks/LOWQ2/analysis/config.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/benchmarks/LOWQ2/analysis/config.yml b/benchmarks/LOWQ2/analysis/config.yml index 5edfbaf3..83291c0e 100644 --- a/benchmarks/LOWQ2/analysis/config.yml +++ b/benchmarks/LOWQ2/analysis/config.yml @@ -1,6 +1,6 @@ -SIM_DIRECTORY: ${LOCAL_DATA_PATH}/"sim_output" -RECO_DIRECTORY: ${LOCAL_DATA_PATH}/"results" -RESULTS_DIRECTORY: ${LOCAL_DATA_PATH}/"results" +#SIM_DIRECTORY: ${LOCAL_DATA_PATH}/"sim_output" +#RECO_DIRECTORY: ${LOCAL_DATA_PATH}/"results" +#RESULTS_DIRECTORY: ${LOCAL_DATA_PATH}/"results" bench:LOWQ2_events: extends: .det_benchmark From fe37ac878894f006ed4d6d28223b020e2e045b8b Mon Sep 17 00:00:00 2001 From: Dmitry Kalinkin Date: Fri, 17 May 2024 10:53:06 -0400 Subject: [PATCH 41/53] .gitlab-ci.yml: enable zdc back --- .gitlab-ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index dbc1ff88..fdb39e41 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -134,7 +134,7 @@ include: # - local: 'benchmarks/tracking_detectors/config.yml' # - local: 'benchmarks/barrel_ecal/config.yml' # - local: 'benchmarks/barrel_hcal/config.yml' -# - local: 'benchmarks/zdc/config.yml' + - local: 'benchmarks/zdc/config.yml' # - local: 'benchmarks/material_maps/config.yml' # - local: 'benchmarks/material_scan/config.yml' # - local: 'benchmarks/pid/config.yml' From 21bbf74c3b68edc63f36a4550d693202334e0779 Mon Sep 17 00:00:00 2001 From: Dmitry Kalinkin Date: Fri, 17 May 2024 10:54:37 -0400 Subject: [PATCH 42/53] .gitlab-ci.yml: just comment needs instead --- .gitlab-ci.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index fdb39e41..ddafd052 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -134,7 +134,7 @@ include: # - local: 'benchmarks/tracking_detectors/config.yml' # - local: 'benchmarks/barrel_ecal/config.yml' # - local: 'benchmarks/barrel_hcal/config.yml' - - local: 'benchmarks/zdc/config.yml' +# - local: 'benchmarks/zdc/config.yml' # - local: 'benchmarks/material_maps/config.yml' # - local: 'benchmarks/material_scan/config.yml' # - local: 'benchmarks/pid/config.yml' @@ -145,7 +145,7 @@ include: deploy_results: stage: deploy needs: - - ["collect_results:zdc","collect_results:barrel_ecal","collect_results:barrel_hcal","collect_results:material_scan"] +# - ["collect_results:zdc","collect_results:barrel_ecal","collect_results:barrel_hcal","collect_results:material_scan"] script: - echo "deploy results!" - find results -print | sort | tee summary.txt From cf8c5a4281bc1b7c7a7f3ef0b881fdfba1f6ce3e Mon Sep 17 00:00:00 2001 From: simonge Date: Tue, 21 May 2024 14:18:28 +0100 Subject: [PATCH 43/53] Added gitignores --- benchmarks/LOWQ2/signal_training/.gitignore | 5 +++++ benchmarks/LOWQ2/training/.gitignore | 5 +++++ 2 files changed, 10 insertions(+) create mode 100644 benchmarks/LOWQ2/signal_training/.gitignore create mode 100644 benchmarks/LOWQ2/training/.gitignore diff --git a/benchmarks/LOWQ2/signal_training/.gitignore b/benchmarks/LOWQ2/signal_training/.gitignore new file mode 100644 index 00000000..c96503c7 --- /dev/null +++ b/benchmarks/LOWQ2/signal_training/.gitignore @@ -0,0 +1,5 @@ +plots* +calibrations* +fieldmaps* +gdml* +.snakemake* \ No newline at end of file diff --git a/benchmarks/LOWQ2/training/.gitignore b/benchmarks/LOWQ2/training/.gitignore new file mode 100644 index 00000000..c96503c7 --- /dev/null +++ b/benchmarks/LOWQ2/training/.gitignore @@ -0,0 +1,5 @@ +plots* +calibrations* +fieldmaps* +gdml* +.snakemake* \ No newline at end of file From d6dc119432343555772ef25cc0872c08c6868061 Mon Sep 17 00:00:00 2001 From: simonge Date: Tue, 21 May 2024 20:11:32 +0100 Subject: [PATCH 44/53] Updated to (probably) work when the remote flie isn't available or not --- benchmarks/LOWQ2/config.yml | 2 +- .../.gitignore | 0 .../LOWQ2/reconstruction_training/Snakefile | 110 ++++++++++++++++++ .../TaggerRegressionEICrecon.C | 0 .../config.yml | 6 +- .../epic_edit.xml | 0 .../reconstruction_training/local_config.yml | 3 + benchmarks/LOWQ2/training/Snakefile | 75 ------------ benchmarks/LOWQ2/training/local_config.yml | 3 - 9 files changed, 115 insertions(+), 84 deletions(-) rename benchmarks/LOWQ2/{training => reconstruction_training}/.gitignore (100%) create mode 100644 benchmarks/LOWQ2/reconstruction_training/Snakefile rename benchmarks/LOWQ2/{training => reconstruction_training}/TaggerRegressionEICrecon.C (100%) rename benchmarks/LOWQ2/{training => reconstruction_training}/config.yml (57%) rename benchmarks/LOWQ2/{training => reconstruction_training}/epic_edit.xml (100%) create mode 100644 benchmarks/LOWQ2/reconstruction_training/local_config.yml delete mode 100644 benchmarks/LOWQ2/training/Snakefile delete mode 100644 benchmarks/LOWQ2/training/local_config.yml diff --git a/benchmarks/LOWQ2/config.yml b/benchmarks/LOWQ2/config.yml index f32514e8..da088db8 100644 --- a/benchmarks/LOWQ2/config.yml +++ b/benchmarks/LOWQ2/config.yml @@ -1,2 +1,2 @@ include: - - local: 'training/config.yml' + - local: 'reconstruction_training/config.yml' diff --git a/benchmarks/LOWQ2/training/.gitignore b/benchmarks/LOWQ2/reconstruction_training/.gitignore similarity index 100% rename from benchmarks/LOWQ2/training/.gitignore rename to benchmarks/LOWQ2/reconstruction_training/.gitignore diff --git a/benchmarks/LOWQ2/reconstruction_training/Snakefile b/benchmarks/LOWQ2/reconstruction_training/Snakefile new file mode 100644 index 00000000..7fea9478 --- /dev/null +++ b/benchmarks/LOWQ2/reconstruction_training/Snakefile @@ -0,0 +1,110 @@ +# Snakemake file for training a new neural network for LOW-Q2 tagger electron momentum reconstruction +from itertools import product + +import os +import shutil +from snakemake.remote.S3 import RemoteProvider as S3RemoteProvider + +configfile: "local_config.yml" + +S3 = S3RemoteProvider( + endpoint_url="https://eics3.sdcc.bnl.gov:9000", + access_key_id=os.environ["S3_ACCESS_KEY"], + secret_access_key=os.environ["S3_SECRET_KEY"], +) + +EVENT_EXTENSION = ".ab.hepmc3.tree.root" +SIM_EXTENSION = ".edm4hep.root" +RECO_EXTENSION = ".eicrecon.tree.edm4eic.root" + +REMOTE_EVENTS_SERVER = "root://dtn-eic.jlab.org/" +REMOTE_EVENTS_DIRECTORY = "/work/eic2/EPIC/EVGEN/SIDIS/pythia6-eic/1.0.0/10x100/q2_0to1/" + +S3_RECON_DIRECTORY = "eictest/EPIC/RECO/24.05.0/epic_craterlake/SIDIS/pythia6-eic/1.0.0/10x100/q2_0to1/" +FILE_BASE = "pythia_ep_noradcor_10x100_q2_0.000000001_1.0_run" + +XML_FILE = "epic_edit.xml" +BEAM_ENERGY = "10" + +################################################################### +# Find and download the input files directly from the S3 bucket +################################################################### +rule download_recon_input: + input: + S3.remote(S3_RECON_DIRECTORY+FILE_BASE+"{run_index}.ab.{file_index}"+RECO_EXTENSION), + output: + config["RECO_IN_DIRECTORY"]+FILE_BASE+"{run_index}.ab.{file_index}"+RECO_EXTENSION, + run: + shutil.move(input[0], output[0]) + +################################################################### +# Generate the input files for the training from the event files +################################################################### +def remote_file_exists(server,url): + try: + subprocess.check_output(['xrdfs', server, 'stat', url]) + return url + except subprocess.CalledProcessError: + return None + +rule run_simulation_tagger: + params: + XML=XML_FILE, + input=lambda wildcards: remote_file_exists(REMOTE_EVENTS_SERVER,REMOTE_EVENTS_DIRECTORY+FILE_BASE+wildcards.fileindex+EVENT_EXTENSION), + output: + config["SIM_DIRECTORY"]+FILE_BASE+"{fileindex}.ab.{subindex:04d}"+SIM_EXTENSION, + shell: """ + npsim \ + --inputFiles {params.input} \ + --outputFile {output[0]} \ + --compactFile {params.XML} \ + --runType run \ + --numberOfEvents 1000 \ + --skipNEvents 1000*{subindex} \ + --physics.list FTFP_BERT \ + --field.eps_min 5e-06 \ + --field.eps_max 1e-04 \ + --physics.rangecut 50 \ + """ + +rule generate_recon_input: + params: + XML=XML_FILE, + beam_energy=BEAM_ENERGY, + collections="TaggerTrackerProjectedTracks,MCScatteredElectrons,MCParticles,EventHeader", + input: + config["SIM_DIRECTORY"]+FILE_BASE+"{fileindex}.ab.{subindex:04d}"+SIM_EXTENSION, + output: + config["RECO_IN_DIRECTORY"]+FILE_BASE+"{fileindex}.ab.{subindex:04d}"+RECO_EXTENSION, + shell: """ + eicrecon {input} -Pdd4hep:xml_files={params.XML} -Ppodio:output_include_collections={params.collections} -Ppodio:output_file={output} -PLOWQ2:LowQ2Trajectories:electron_beamE={params.beam_energy} + """ + +################################################################### +# Try to download the input files from the S3 bucket before generating them +################################################################### +ruleorder: download_recon_input > generate_recon_input + +################################################################### +# Train the network to reconstruct the electron momentum +################################################################### +rule low_q2_train_network: + params: + beam_energy=BEAM_ENERGY, + type_name="LowQ2MomentumRegression", + method_name="DNN", + model_dir="LowQ2Model", + input_files=config["RECO_IN_DIRECTORY"]+FILE_BASE+"*.ab.000[1234]"+RECO_EXTENSION, + input: + train_data=expand( + config["RECO_IN_DIRECTORY"]+FILE_BASE+"{fileindex}.ab.{subindex:04d}"+RECO_EXTENSION, + fileindex=range(1,20), + subindex=range(1,4), + ), + output: + root_output=config["MODEL_DIRECTORY"]+"trainedData.root", + shell: + """ + root -l -b -q 'TaggerRegressionEICrecon.C++("{params.input_files}", "{output.root_output}", "{params.model_dir}", "{params.beam_energy}", "{params.type_name}", "{params.method_name}")' + """ + diff --git a/benchmarks/LOWQ2/training/TaggerRegressionEICrecon.C b/benchmarks/LOWQ2/reconstruction_training/TaggerRegressionEICrecon.C similarity index 100% rename from benchmarks/LOWQ2/training/TaggerRegressionEICrecon.C rename to benchmarks/LOWQ2/reconstruction_training/TaggerRegressionEICrecon.C diff --git a/benchmarks/LOWQ2/training/config.yml b/benchmarks/LOWQ2/reconstruction_training/config.yml similarity index 57% rename from benchmarks/LOWQ2/training/config.yml rename to benchmarks/LOWQ2/reconstruction_training/config.yml index 45f7f318..a03f2028 100644 --- a/benchmarks/LOWQ2/training/config.yml +++ b/benchmarks/LOWQ2/reconstruction_training/config.yml @@ -1,11 +1,7 @@ -SIM_DIRECTORY: "LowQ2_G4out/" -RECO_DIRECTORY: "LowQ2_ReconOut/" -MODEL_DIRECTORY: "LowQ2_Model/" - # Run Snakemake for the training train:LOWQ2: extends: .det_benchmark stage: calibrate script: - - snakemake --cores 8 ${MODEL_DIRECTORY}"trainedData.root" --configfile config.yml + - snakemake --cores 8 ${MODEL_DIRECTORY}"trainedData.root" --configfile remote_config.yml diff --git a/benchmarks/LOWQ2/training/epic_edit.xml b/benchmarks/LOWQ2/reconstruction_training/epic_edit.xml similarity index 100% rename from benchmarks/LOWQ2/training/epic_edit.xml rename to benchmarks/LOWQ2/reconstruction_training/epic_edit.xml diff --git a/benchmarks/LOWQ2/reconstruction_training/local_config.yml b/benchmarks/LOWQ2/reconstruction_training/local_config.yml new file mode 100644 index 00000000..437975b8 --- /dev/null +++ b/benchmarks/LOWQ2/reconstruction_training/local_config.yml @@ -0,0 +1,3 @@ +SIM_DIRECTORY: "/scratch/EIC/SimOut/S3in/" +RECO_IN_DIRECTORY: "/scratch/EIC/ReconOut/S3in/" +MODEL_DIRECTORY: "/scratch/EIC/LowQ2Model/" \ No newline at end of file diff --git a/benchmarks/LOWQ2/training/Snakefile b/benchmarks/LOWQ2/training/Snakefile deleted file mode 100644 index 5d2c7288..00000000 --- a/benchmarks/LOWQ2/training/Snakefile +++ /dev/null @@ -1,75 +0,0 @@ -# Snakemake file for training a new neural network for LOW-Q2 tagger electron momentum reconstruction -from itertools import product - -import os -import shutil - -configfile: "local_config.yml" - -EVENT_EXTENSION = ".ab.hepmc3.tree.root" -SIM_EXTENSION = ".edm4hep.root" -RECO_EXTENSION = ".eicrecon.tree.edm4eic.root" - -REMOTE_EVENTS_DIRECTORY = "root://dtn-eic.jlab.org//work/eic2/EPIC/EVGEN/SIDIS/pythia6-eic/1.0.0/10x100/q2_0to1/" -FILE_BASE = "pythia_ep_noradcor_10x100_q2_0.000000001_1.0_run" - -XML_FILE = "epic_edit.xml" -BEAM_ENERGY = "10" - -def remote_file_exists(server,url): - try: - subprocess.check_output(['xrdfs', server, 'stat', url]) - return url - except subprocess.CalledProcessError: - return None -''' -rule run_simulation_tagger: - params: - XML=XML_FILE, - input=remote_file_exists(REMOTE_EVENTS_DIRECTORY+FILE_BASE+wildcards.index+EVENT_EXTENSION), - output: - config["SIM_DIRECTORY"]+FILE_BASE+"{index}_tagger"+SIM_EXTENSION, - shell: """ -npsim \ - --inputFiles {params.input} \ - --outputFile {output[0]} \ - --compactFile {params.XML} \ - --runType run \ - --numberOfEvents 100000 \ - --physics.list FTFP_BERT \ - --field.eps_min 5e-06 \ - --field.eps_max 1e-04 \ - --physics.rangecut 50 \ -""" -''' -rule run_reconstruction: - params: - XML=XML_FILE, - beam_energy=BEAM_ENERGY, - collections="TaggerTrackerProjectedTracks,MCScatteredElectrons,MCParticles,EventHeader", - input: - expand( - config["SIM_DIRECTORY"]+FILE_BASE+"{index}_tagger"+SIM_EXTENSION, - index=range(1,4), - ), - output: - config["RECO_DIRECTORY"]+FILE_BASE+".tagger_recon"+RECO_EXTENSION, - shell: """ -eicrecon {input} -Pdd4hep:xml_files={params.XML} -Ppodio:output_include_collections={params.collections} -Ppodio:output_file={output} -PLOWQ2:LowQ2Trajectories:electron_beamE={params.beam_energy} -""" - -rule low_q2_train_network: - params: - beam_energy=BEAM_ENERGY, - type_name="LowQ2MomentumRegression", - method_name="DNN", - model_dir="LowQ2Model" - input: - train_data=config["RECO_DIRECTORY"]+FILE_BASE+".tagger_recon"+RECO_EXTENSION, - output: - root_output=config["MODEL_DIRECTORY"]+"trainedData.root", - shell: - """ - root -l -b -q 'TaggerRegressionEICrecon.C++("{input.train_data}", "{output.root_output}", "{params.model_dir}", "{params.beam_energy}", "{params.type_name}", "{params.method_name}")' - """ - diff --git a/benchmarks/LOWQ2/training/local_config.yml b/benchmarks/LOWQ2/training/local_config.yml deleted file mode 100644 index 0b26b11c..00000000 --- a/benchmarks/LOWQ2/training/local_config.yml +++ /dev/null @@ -1,3 +0,0 @@ -SIM_DIRECTORY: "/scratch/EIC/G4out/S3processed/" -RECO_DIRECTORY: "/scratch/EIC/ReconOut/S3processed/" -MODEL_DIRECTORY: "/scratch/EIC/LowQ2Model/" \ No newline at end of file From a27c89f218578551ab3990c9ddaf6477fc9c86ee Mon Sep 17 00:00:00 2001 From: simonge Date: Wed, 22 May 2024 09:12:58 +0100 Subject: [PATCH 45/53] edited while epic-main isn't named right --- benchmarks/LOWQ2/analysis/Snakefile | 45 ++++++++++++++++------------- benchmarks/LOWQ2/local_config.yml | 3 ++ 2 files changed, 28 insertions(+), 20 deletions(-) create mode 100644 benchmarks/LOWQ2/local_config.yml diff --git a/benchmarks/LOWQ2/analysis/Snakefile b/benchmarks/LOWQ2/analysis/Snakefile index 7c2b1a30..be66a777 100644 --- a/benchmarks/LOWQ2/analysis/Snakefile +++ b/benchmarks/LOWQ2/analysis/Snakefile @@ -6,15 +6,20 @@ tag_params = { # Add more mappings here if needed } +configfile: "../local_config.yml" + EVENT_EXTENSION = ".ab.hepmc3.tree.root" SIM_EXTENSION = ".edm4hep.root" RECO_EXTENSION = ".eicrecon.tree.edm4eic.root" -REMOTE_EVENTS_DIRECTORY = "root://dtn-eic.jlab.org//work/eic2/EPIC/EVGEN/SIDIS/pythia6-eic/1.0.0/10x100/q2_0to1/" + +REMOTE_EVENTS_SERVER = "root://dtn-eic.jlab.org/" +REMOTE_EVENTS_DIRECTORY = "/work/eic2/EPIC/EVGEN/SIDIS/pythia6-eic/1.0.0/10x100/q2_0to1/" + FILE_BASE = "pythia_ep_noradcor_10x100_q2_0.000000001_1.0_run" BEAM_ENERGY = "10" -XML_FILE = "/opt/detector/epic-main/share/epic/epic.xml", +XML_FILE = "/opt/detector/epic-24.05.0/share/epic/epic.xml", # Function to check if the remote file exists def remote_file_exists(server,url): @@ -27,21 +32,21 @@ def remote_file_exists(server,url): rule run_simulation_remote: params: XML=XML_FILE, - input=remote_file_exists(REMOTE_EVENTS_DIRECTORY+FILE_BASE+wildcards.index+EVENT_EXTENSION), + input=lambda wildcards: remote_file_exists(REMOTE_EVENTS_SERVER,REMOTE_EVENTS_DIRECTORY+FILE_BASE+wildcards.index+EVENT_EXTENSION), output: config["SIM_DIRECTORY"]+FILE_BASE+"{index}_tagger"+SIM_EXTENSION, - shell: """ -npsim \ - --inputFiles {params.input} \ - --outputFile {output[0]} \ - --compactFile {params.XML} \ - --runType run \ - --numberOfEvents 100 \ - --physics.list FTFP_BERT \ - --field.eps_min 5e-06 \ - --field.eps_max 1e-04 \ - --physics.rangecut 50 \ -""" + shell: """ + npsim \ + --inputFiles {params.input} \ + --outputFile {output[0]} \ + --compactFile {params.XML} \ + --runType run \ + --numberOfEvents 100 \ + --physics.list FTFP_BERT \ + --field.eps_min 5e-06 \ + --field.eps_max 1e-04 \ + --physics.rangecut 50 \ + """ rule run_reconstruction: params: @@ -54,14 +59,14 @@ rule run_reconstruction: index=range(1,4), ), output: - config["RECO_DIRECTORY"]+FILE_BASE+".tagger_recon"+RECO_EXTENSION, - shell: """ -eicrecon {input} -Pdd4hep:xml_files={params.XML} -Ppodio:output_include_collections={params.collections} -Ppodio:output_file={output} -PLOWQ2:LowQ2Trajectories:electron_beamE={params.beam_energy} -""" + config["RECO_IN_DIRECTORY"]+FILE_BASE+".tagger_recon"+RECO_EXTENSION, + shell: """ + eicrecon {input} -Pdd4hep:xml_files={params.XML} -Ppodio:output_include_collections={params.collections} -Ppodio:output_file={output} -PLOWQ2:LowQ2Trajectories:electron_beamE={params.beam_energy} + """ rule run_benchmarks: input: - config["RECO_DIRECTORY"]+FILE_BASE+".tagger_recon"+RECO_EXTENSION + config["RECO_IN_DIRECTORY"]+FILE_BASE+".tagger_recon"+RECO_EXTENSION params: xmlName=XMLFILE, timeBased = lambda wildcards: tag_params[wildcards.tag]["timeBased"], diff --git a/benchmarks/LOWQ2/local_config.yml b/benchmarks/LOWQ2/local_config.yml new file mode 100644 index 00000000..437975b8 --- /dev/null +++ b/benchmarks/LOWQ2/local_config.yml @@ -0,0 +1,3 @@ +SIM_DIRECTORY: "/scratch/EIC/SimOut/S3in/" +RECO_IN_DIRECTORY: "/scratch/EIC/ReconOut/S3in/" +MODEL_DIRECTORY: "/scratch/EIC/LowQ2Model/" \ No newline at end of file From 117685eaaaadd6e04cb1986c9f0056636985ac31 Mon Sep 17 00:00:00 2001 From: simonge Date: Wed, 22 May 2024 09:17:10 +0100 Subject: [PATCH 46/53] removed config.yml file in LOWQ2 as analysis called directly --- benchmarks/LOWQ2/config.yml | 2 -- 1 file changed, 2 deletions(-) delete mode 100644 benchmarks/LOWQ2/config.yml diff --git a/benchmarks/LOWQ2/config.yml b/benchmarks/LOWQ2/config.yml deleted file mode 100644 index cf41f5a3..00000000 --- a/benchmarks/LOWQ2/config.yml +++ /dev/null @@ -1,2 +0,0 @@ -include: - - local: 'analysis/config.yml' From 6e6794e77a7e2b8ab258ee667d4fae65a48f850a Mon Sep 17 00:00:00 2001 From: simonge Date: Wed, 22 May 2024 09:26:11 +0100 Subject: [PATCH 47/53] Changed config path --- benchmarks/LOWQ2/analysis/config.yml | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/benchmarks/LOWQ2/analysis/config.yml b/benchmarks/LOWQ2/analysis/config.yml index 83291c0e..4d45650a 100644 --- a/benchmarks/LOWQ2/analysis/config.yml +++ b/benchmarks/LOWQ2/analysis/config.yml @@ -1,9 +1,5 @@ -#SIM_DIRECTORY: ${LOCAL_DATA_PATH}/"sim_output" -#RECO_DIRECTORY: ${LOCAL_DATA_PATH}/"results" -#RESULTS_DIRECTORY: ${LOCAL_DATA_PATH}/"results" - bench:LOWQ2_events: extends: .det_benchmark stage: benchmarks script: - - snakemake --cores 8 ${RESULTS_DIRECTORY}/LOWQ2_FormattedPlots.edm4hep.root --configfile config.yml + - snakemake --cores 8 ${RESULTS_DIRECTORY}/LOWQ2_FormattedPlots.edm4hep.root --configfile ../remote_config.yml From cc5e54c3066b8479be3a4ae0b48753d4fd6f9d55 Mon Sep 17 00:00:00 2001 From: simonge Date: Wed, 22 May 2024 09:27:23 +0100 Subject: [PATCH 48/53] Added gitignores for training directories --- benchmarks/LOWQ2/reconstruction_training/.gitignore | 5 +++++ benchmarks/LOWQ2/signal_training/.gitignore | 5 +++++ 2 files changed, 10 insertions(+) create mode 100644 benchmarks/LOWQ2/reconstruction_training/.gitignore create mode 100644 benchmarks/LOWQ2/signal_training/.gitignore diff --git a/benchmarks/LOWQ2/reconstruction_training/.gitignore b/benchmarks/LOWQ2/reconstruction_training/.gitignore new file mode 100644 index 00000000..c96503c7 --- /dev/null +++ b/benchmarks/LOWQ2/reconstruction_training/.gitignore @@ -0,0 +1,5 @@ +plots* +calibrations* +fieldmaps* +gdml* +.snakemake* \ No newline at end of file diff --git a/benchmarks/LOWQ2/signal_training/.gitignore b/benchmarks/LOWQ2/signal_training/.gitignore new file mode 100644 index 00000000..c96503c7 --- /dev/null +++ b/benchmarks/LOWQ2/signal_training/.gitignore @@ -0,0 +1,5 @@ +plots* +calibrations* +fieldmaps* +gdml* +.snakemake* \ No newline at end of file From ebe03d39590f31f89fb7124d3973d117fc5e8b50 Mon Sep 17 00:00:00 2001 From: simonge Date: Thu, 23 May 2024 21:50:16 +0100 Subject: [PATCH 49/53] Added remote config from other branch, getting confused about what is where --- benchmarks/LOWQ2/remote_config.yml | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 benchmarks/LOWQ2/remote_config.yml diff --git a/benchmarks/LOWQ2/remote_config.yml b/benchmarks/LOWQ2/remote_config.yml new file mode 100644 index 00000000..b793dbac --- /dev/null +++ b/benchmarks/LOWQ2/remote_config.yml @@ -0,0 +1,3 @@ +SIM_DIRECTORY: "LowQ2_Sim/" +RECO_IN_DIRECTORY: "LowQ2_ReconIn/" +MODEL_DIRECTORY: "LowQ2_Model/" \ No newline at end of file From 550cca51062414aa6e5ea2533b841b39f0bc1022 Mon Sep 17 00:00:00 2001 From: simonge Date: Wed, 29 May 2024 21:36:07 +0100 Subject: [PATCH 50/53] Add training config to gitlab yml --- .gitlab-ci.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 8f8c9b2b..855ce817 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -140,7 +140,8 @@ include: # - local: 'benchmarks/pid/config.yml' # - local: 'benchmarks/timing/config.yml' # - local: 'benchmarks/b0_tracker/config.yml' - - local: 'benchmarks/LOWQ2/analysis/config.yml' +# - local: 'benchmarks/LOWQ2/analysis/config.yml' + - local: 'benchmarks/LOWQ2/reconstruction_training/config.yml' deploy_results: stage: deploy From 2ddd9645030c389a8413c67609d5dccdde8c7523 Mon Sep 17 00:00:00 2001 From: simonge Date: Thu, 8 Aug 2024 15:11:51 +0100 Subject: [PATCH 51/53] Added equivilent pytorch model compatable with current eic_shell --- .../reconstruction_training/ProcessData.py | 40 +++++++++++++ .../RegressionModel.py | 56 +++++++++++++++++++ .../TaggerRegression.py | 41 ++++++++++++++ 3 files changed, 137 insertions(+) create mode 100644 benchmarks/LOWQ2/reconstruction_training/ProcessData.py create mode 100644 benchmarks/LOWQ2/reconstruction_training/RegressionModel.py create mode 100644 benchmarks/LOWQ2/reconstruction_training/TaggerRegression.py diff --git a/benchmarks/LOWQ2/reconstruction_training/ProcessData.py b/benchmarks/LOWQ2/reconstruction_training/ProcessData.py new file mode 100644 index 00000000..166f49c4 --- /dev/null +++ b/benchmarks/LOWQ2/reconstruction_training/ProcessData.py @@ -0,0 +1,40 @@ +import uproot +import awkward as ak +import numpy as np +def create_arrays(dataFiles,beamEnergy): + + # List of branches to load + branches = ["TaggerTrackerProjectedTracks","MCParticles","MCScatteredElectrons_objIdx"] + + # Load data from concatenated list of files + # file = uproot.open(dataFiles) + # tree = file['events'] + # data = tree.arrays(branches, library="ak") + data = uproot.concatenate([f"{file}:events" for file in dataFiles], branches, library="ak") + + # Filter events with at least one track + tracks = data["TaggerTrackerProjectedTracks"] + num_tracks = ak.num(tracks["TaggerTrackerProjectedTracks.pdg"]) + filtered_data = data[num_tracks == 1] + + # Filter tracks for intput data + filtered_tracks = filtered_data["TaggerTrackerProjectedTracks"] + ia = filtered_tracks["TaggerTrackerProjectedTracks.loc.a"] + ib = filtered_tracks["TaggerTrackerProjectedTracks.loc.b"] + ipx = np.sin(filtered_tracks["TaggerTrackerProjectedTracks.theta"]) * np.cos(filtered_tracks["TaggerTrackerProjectedTracks.phi"]) + ipy = np.sin(filtered_tracks["TaggerTrackerProjectedTracks.theta"]) * np.sin(filtered_tracks["TaggerTrackerProjectedTracks.phi"]) + + # Filter particle array to select scattered electron for target data + electron_idx = filtered_data["MCScatteredElectrons_objIdx"]["MCScatteredElectrons_objIdx.index"][:,0] + filtered_particles = filtered_data["MCParticles"][np.arange(len(electron_idx)), electron_idx] + + # Normalize the target variables + tpx = filtered_particles["MCParticles.momentum.x"]/beamEnergy + tpy = filtered_particles["MCParticles.momentum.y"]/beamEnergy + tpz = filtered_particles["MCParticles.momentum.z"]/beamEnergy + + input_data = ak.concatenate([ia,ib,ipx,ipy], axis=1) + target_data = ak.concatenate([tpx[:,None], tpy[:,None], tpz[:,None]], axis=1) + + return input_data, target_data + diff --git a/benchmarks/LOWQ2/reconstruction_training/RegressionModel.py b/benchmarks/LOWQ2/reconstruction_training/RegressionModel.py new file mode 100644 index 00000000..9453acf7 --- /dev/null +++ b/benchmarks/LOWQ2/reconstruction_training/RegressionModel.py @@ -0,0 +1,56 @@ +import torch +import torch.nn as nn +import torch.optim as optim + +class RegressionModel(nn.Module): + def __init__(self): + super(RegressionModel, self).__init__() + self.fc1 = nn.Linear(4, 1024) + self.fc2 = nn.Linear(1024, 128) + self.fc3 = nn.Linear(128, 64) + self.fc4 = nn.Linear(64, 32) + self.fc5 = nn.Linear(32, 3) + + def forward(self, x): + x = torch.relu(self.fc1(x)) + x = torch.relu(self.fc2(x)) + x = torch.relu(self.fc3(x)) + x = torch.relu(self.fc4(x)) + x = self.fc5(x) + return x + +def makeModel(): + # Create the model + model = RegressionModel() + # Define the optimizer + optimizer = optim.Adam(model.parameters(), lr=0.001) + + # Define the loss function + criterion = nn.MSELoss() + + return model, optimizer, criterion + +def trainModel(epochs, input_data, target_data, val_input, val_target): + model, optimizer, criterion = makeModel() + for epoch in range(epochs): + model.train() + # Zero the parameter gradients + optimizer.zero_grad() + + # Forward pass + output = model(input_data) + loss = criterion(output, target_data) + + # Backward pass + loss.backward() + optimizer.step() + + # Validation step + model.eval() + with torch.no_grad(): + val_outputs = model(val_input) + val_loss = criterion(val_outputs, val_target) + + print(f"Epoch [{epoch+1}/{epochs}], Loss: {loss.item()}, Val Loss: {val_loss.item()}") + + return model \ No newline at end of file diff --git a/benchmarks/LOWQ2/reconstruction_training/TaggerRegression.py b/benchmarks/LOWQ2/reconstruction_training/TaggerRegression.py new file mode 100644 index 00000000..d378028c --- /dev/null +++ b/benchmarks/LOWQ2/reconstruction_training/TaggerRegression.py @@ -0,0 +1,41 @@ +# import edm4hep +import torch +import argparse +from ProcessData import create_arrays + +from RegressionModel import makeModel, trainModel + +# Parse arguments +parser = argparse.ArgumentParser(description='Train a regression model for the Tagger.') +parser.add_argument('dataFiles', type=str, nargs='+', help='Path to the data files') +parser.add_argument('beamEnergy', type=float, help='Electron beam energy') +args = parser.parse_args() +dataFiles = args.dataFiles +beamEnergy = args.beamEnergy + +input_data, target_data = create_arrays(dataFiles, beamEnergy) +print(input_data) +print(target_data) + +torch_input_data = torch.tensor(input_data) +torch_target_data = torch.tensor(target_data) + +# Split data into training and validation sets +validation_fraction = 0.1 +split_index = int(len(torch_input_data) * (1 - validation_fraction)) +val_input_data = torch_input_data[split_index:] +val_target_data = torch_target_data[split_index:] +torch_input_data = torch_input_data[:split_index] +torch_target_data = torch_target_data[:split_index] + +epochs = 1000 +model = trainModel(epochs, torch_input_data, torch_target_data, val_input_data, val_target_data) + +# Save the trained model to ONNX format +dummy_input = torch_input_data[0].unsqueeze(0) # Create a dummy input for the model +onnx_file_path = "regression_model.onnx" +torch.onnx.export(model, dummy_input, onnx_file_path, + input_names=['input'], output_names=['output'], + dynamic_axes={'input': {0: 'batch_size'}, 'output': {0: 'batch_size'}}) + +print(f"Model has been saved to {onnx_file_path}") \ No newline at end of file From f37cd3191e8eb3b57103bea9e92dec8e06d5513a Mon Sep 17 00:00:00 2001 From: simonge Date: Thu, 8 Aug 2024 16:29:17 +0100 Subject: [PATCH 52/53] Added normalization and test script --- .../RegressionModel.py | 9 ++++ .../reconstruction_training/TestModel.py | 54 +++++++++++++++++++ 2 files changed, 63 insertions(+) create mode 100644 benchmarks/LOWQ2/reconstruction_training/TestModel.py diff --git a/benchmarks/LOWQ2/reconstruction_training/RegressionModel.py b/benchmarks/LOWQ2/reconstruction_training/RegressionModel.py index 9453acf7..62a2a48b 100644 --- a/benchmarks/LOWQ2/reconstruction_training/RegressionModel.py +++ b/benchmarks/LOWQ2/reconstruction_training/RegressionModel.py @@ -10,14 +10,22 @@ def __init__(self): self.fc3 = nn.Linear(128, 64) self.fc4 = nn.Linear(64, 32) self.fc5 = nn.Linear(32, 3) + self.mean = torch.tensor([0.0, 0.0, 0.0, 0.0]) + self.std = torch.tensor([1.0, 1.0, 1.0, 1.0]) def forward(self, x): + x = (x-self.mean)/self.std x = torch.relu(self.fc1(x)) x = torch.relu(self.fc2(x)) x = torch.relu(self.fc3(x)) x = torch.relu(self.fc4(x)) x = self.fc5(x) return x + + def adapt(self, input_data): + self.mean = input_data.mean(axis=0) + self.std = input_data.std(axis=0) + def makeModel(): # Create the model @@ -32,6 +40,7 @@ def makeModel(): def trainModel(epochs, input_data, target_data, val_input, val_target): model, optimizer, criterion = makeModel() + model.adapt(input_data) for epoch in range(epochs): model.train() # Zero the parameter gradients diff --git a/benchmarks/LOWQ2/reconstruction_training/TestModel.py b/benchmarks/LOWQ2/reconstruction_training/TestModel.py new file mode 100644 index 00000000..712c6f2d --- /dev/null +++ b/benchmarks/LOWQ2/reconstruction_training/TestModel.py @@ -0,0 +1,54 @@ +import onnxruntime as ort +import argparse +import numpy as np +from ProcessData import create_arrays +import matplotlib.pyplot as plt + +# Parse arguments +parser = argparse.ArgumentParser(description='Train a regression model for the Tagger.') +parser.add_argument('dataFiles', type=str, nargs='+', help='Path to the data files') +parser.add_argument('beamEnergy', type=float, help='Electron beam energy') +args = parser.parse_args() +dataFiles = args.dataFiles +beamEnergy = args.beamEnergy + +input_data, target_data = create_arrays(dataFiles, beamEnergy) + +target_data = np.array(target_data) + +# Load the ONNX model +onnx_file_path = "regression_model.onnx" +session = ort.InferenceSession(onnx_file_path) + +# Run the model on the input data +input_name = session.get_inputs()[0].name +output_name = session.get_outputs()[0].name +input_data = np.array(input_data,dtype=np.float32) +output = session.run([output_name], {input_name: input_data}) +output = np.array(output[0])#*beamEnergy +print(output) + +diff = (target_data - output)/target_data +diffrange = [[-2,2],[-2,2],[-2,2]] +datarange = [[-0.02,0.02],[-0.02,0.02],[-1,0]] + + +# Creates histograms to compare the target and output data +fig, axs = plt.subplots(3, 3, figsize=(6, 12)) +for i in range(3): + # 2D histograms showing trends in the data + axs[0,i].hist2d(target_data[:,i], output[:,i], bins=100, range=[datarange[i],datarange[i]], cmap="viridis", label="Output vs Target") + axs[0,i].set_xlabel(f"Variable {i} Target") + axs[0,i].set_ylabel(f"Variable {i} Output") + + axs[1,i].hist(diff[:,i], bins=100, alpha=0.5, range=diffrange[i], label="Difference") + axs[1,i].set_xlabel(f"Variable {i} Difference") + axs[1,i].set_ylabel("Counts") + + axs[2,i].hist2d(target_data[:,i], diff[:,i], bins=100, range=[datarange[i],diffrange[i]], cmap="viridis", label="Difference vs Target") + axs[2,i].set_xlabel(f"Variable {i} Target") + axs[2,i].set_ylabel(f"Variable {i} Difference") + + +plt.show() +plt.savefig("output_vs_target.png") From 32aaaef87877ad1afcb77bc77cca60eef73105e6 Mon Sep 17 00:00:00 2001 From: simonge Date: Mon, 16 Sep 2024 13:12:53 +0100 Subject: [PATCH 53/53] Updates --- .../LOWQ2/reconstruction_training/Snakefile | 63 +++++++++++-- .../TaggerRegression.py | 14 +-- .../reconstruction_training/TestModel.py | 89 ++++++++++++++++--- 3 files changed, 141 insertions(+), 25 deletions(-) diff --git a/benchmarks/LOWQ2/reconstruction_training/Snakefile b/benchmarks/LOWQ2/reconstruction_training/Snakefile index 7fea9478..80b921a0 100644 --- a/benchmarks/LOWQ2/reconstruction_training/Snakefile +++ b/benchmarks/LOWQ2/reconstruction_training/Snakefile @@ -18,13 +18,64 @@ SIM_EXTENSION = ".edm4hep.root" RECO_EXTENSION = ".eicrecon.tree.edm4eic.root" REMOTE_EVENTS_SERVER = "root://dtn-eic.jlab.org/" -REMOTE_EVENTS_DIRECTORY = "/work/eic2/EPIC/EVGEN/SIDIS/pythia6-eic/1.0.0/10x100/q2_0to1/" +REMOTE_EVENTS_DIRECTORY = "/work/eic2/EPIC/EVGEN/SIDIS/pythia6-eic/1.0.0/18x275/q2_0to1/" +REMOTE_RECO_DIRECTORY = "/work/eic2/EPIC/RECO/24.07.0/epic_craterlake/SIDIS/pythia6-eic/1.0.0/18x275/q2_0to1/" S3_RECON_DIRECTORY = "eictest/EPIC/RECO/24.05.0/epic_craterlake/SIDIS/pythia6-eic/1.0.0/10x100/q2_0to1/" -FILE_BASE = "pythia_ep_noradcor_10x100_q2_0.000000001_1.0_run" +FILE_BASE = "pythia_ep_noradcor_18x275_q2_0.000000001_1.0_run" XML_FILE = "epic_edit.xml" -BEAM_ENERGY = "10" +BEAM_ENERGY = "18" + +def remote_file_exists(server,url): + try: + subprocess.check_output(['xrdfs', server, 'stat', url]) + return url + except subprocess.CalledProcessError: + return None + +def check_files_exist(file_indices): + existing_files = [] + for fileindex in file_indices: + file_path = REMOTE_EVENTS_DIRECTORY + FILE_BASE + fileindex + EVENT_EXTENSION + if remote_file_exists(REMOTE_EVENTS_SERVER, file_path): + existing_files.append(file_path) + return existing_files + +################################################################### +# Run training on remote XRootD server data +################################################################### +rule run_training: + params: + input = expand( + REMOTE_EVENTS_SERVER + REMOTE_RECO_DIRECTORY + FILE_BASE + "{i}.ab.{j}" + RECO_EXTENSION, + i=range(1, 11), + j=range(1050, 1152) + ), + output: + "regression_model{tag}.onnx", + run: + shell("python TaggerRegression.py --dataFiles {params.input} --beamEnergy 18 --outModelFile {output}") + + +################################################################### +# Test training on remote XRootD server data +################################################################### +rule run_testing: + params: + input = expand( + REMOTE_EVENTS_SERVER + REMOTE_RECO_DIRECTORY + FILE_BASE + "{i}.ab.{j}" + RECO_EXTENSION, + i=range(11, 31), + j=range(1050, 1054) + ), + input: + "regression_model{tag}.onnx", + output: + direct = "output_vs_target{tag}.png", + transformed = "output_vs_target_transformed{tag}.png", + run: + shell("python TestModel.py --modelFile {input} --dataFiles {params.input} --beamEnergy 18 --outGraphFile {output.direct} --outGraphFile2 {output.transformed}") + ################################################################### # Find and download the input files directly from the S3 bucket @@ -40,12 +91,6 @@ rule download_recon_input: ################################################################### # Generate the input files for the training from the event files ################################################################### -def remote_file_exists(server,url): - try: - subprocess.check_output(['xrdfs', server, 'stat', url]) - return url - except subprocess.CalledProcessError: - return None rule run_simulation_tagger: params: diff --git a/benchmarks/LOWQ2/reconstruction_training/TaggerRegression.py b/benchmarks/LOWQ2/reconstruction_training/TaggerRegression.py index d378028c..659eddde 100644 --- a/benchmarks/LOWQ2/reconstruction_training/TaggerRegression.py +++ b/benchmarks/LOWQ2/reconstruction_training/TaggerRegression.py @@ -7,11 +7,13 @@ # Parse arguments parser = argparse.ArgumentParser(description='Train a regression model for the Tagger.') -parser.add_argument('dataFiles', type=str, nargs='+', help='Path to the data files') -parser.add_argument('beamEnergy', type=float, help='Electron beam energy') +parser.add_argument('--dataFiles', type=str, nargs='+', help='Path to the data files') +parser.add_argument('--beamEnergy', type=float, help='Electron beam energy') +parser.add_argument('--outModelFile', type=str, default="regression_model.onnx", help='Output file for the trained model') args = parser.parse_args() dataFiles = args.dataFiles beamEnergy = args.beamEnergy +outModelFile = args.outModelFile input_data, target_data = create_arrays(dataFiles, beamEnergy) print(input_data) @@ -28,14 +30,14 @@ torch_input_data = torch_input_data[:split_index] torch_target_data = torch_target_data[:split_index] -epochs = 1000 +epochs = 5000 model = trainModel(epochs, torch_input_data, torch_target_data, val_input_data, val_target_data) # Save the trained model to ONNX format dummy_input = torch_input_data[0].unsqueeze(0) # Create a dummy input for the model -onnx_file_path = "regression_model.onnx" -torch.onnx.export(model, dummy_input, onnx_file_path, + +torch.onnx.export(model, dummy_input, outModelFile, input_names=['input'], output_names=['output'], dynamic_axes={'input': {0: 'batch_size'}, 'output': {0: 'batch_size'}}) -print(f"Model has been saved to {onnx_file_path}") \ No newline at end of file +print(f"Model has been saved to {outModelFile}") \ No newline at end of file diff --git a/benchmarks/LOWQ2/reconstruction_training/TestModel.py b/benchmarks/LOWQ2/reconstruction_training/TestModel.py index 712c6f2d..b31b3f2f 100644 --- a/benchmarks/LOWQ2/reconstruction_training/TestModel.py +++ b/benchmarks/LOWQ2/reconstruction_training/TestModel.py @@ -6,19 +6,24 @@ # Parse arguments parser = argparse.ArgumentParser(description='Train a regression model for the Tagger.') -parser.add_argument('dataFiles', type=str, nargs='+', help='Path to the data files') -parser.add_argument('beamEnergy', type=float, help='Electron beam energy') +parser.add_argument('--modelFile', type=str, default="regression_model.onnx", help='Path to the ONNX model file') +parser.add_argument('--dataFiles', type=str, nargs='+', help='Path to the data files') +parser.add_argument('--beamEnergy', type=float, help='Electron beam energy') +parser.add_argument('--outGraphFile', type=str, default="output_vs_target.png", help='Output file for the graph') +parser.add_argument('--outGraphFile2', type=str, default="output_vs_target2.png", help='Output file for the graph') args = parser.parse_args() -dataFiles = args.dataFiles -beamEnergy = args.beamEnergy +modelFile = args.modelFile +dataFiles = args.dataFiles +beamEnergy = args.beamEnergy +outGraphFile = args.outGraphFile +outGraphFile2 = args.outGraphFile2 input_data, target_data = create_arrays(dataFiles, beamEnergy) target_data = np.array(target_data) # Load the ONNX model -onnx_file_path = "regression_model.onnx" -session = ort.InferenceSession(onnx_file_path) +session = ort.InferenceSession(modelFile) # Run the model on the input data input_name = session.get_inputs()[0].name @@ -26,15 +31,29 @@ input_data = np.array(input_data,dtype=np.float32) output = session.run([output_name], {input_name: input_data}) output = np.array(output[0])#*beamEnergy -print(output) +#print(output) + +out_theta = np.arctan2(np.sqrt(output[:,0]**2 + output[:,1]**2),output[:,2]) +out_phi = np.arctan2(output[:,1],output[:,0]) +out_mag = np.sqrt(output[:,0]**2 + output[:,1]**2 + output[:,2]**2) +in_theta = np.arctan2(np.sqrt(target_data[:,0]**2 + target_data[:,1]**2),target_data[:,2]) +in_phi = np.arctan2(target_data[:,1],target_data[:,0]) +in_mag = np.sqrt(target_data[:,0]**2 + target_data[:,1]**2 + target_data[:,2]**2) + + +thetadiff = out_theta - in_theta +phidiff = out_phi - in_phi +# Move phidiff to within -pi/2 and pi/2 +phidiff = (phidiff + np.pi) % (2 * np.pi) - np.pi +magdiff = out_mag - in_mag diff = (target_data - output)/target_data -diffrange = [[-2,2],[-2,2],[-2,2]] +diffrange = [[-5,5],[-5,5],[-2,2]] datarange = [[-0.02,0.02],[-0.02,0.02],[-1,0]] # Creates histograms to compare the target and output data -fig, axs = plt.subplots(3, 3, figsize=(6, 12)) +fig, axs = plt.subplots(3, 3, figsize=(12, 12)) for i in range(3): # 2D histograms showing trends in the data axs[0,i].hist2d(target_data[:,i], output[:,i], bins=100, range=[datarange[i],datarange[i]], cmap="viridis", label="Output vs Target") @@ -49,6 +68,56 @@ axs[2,i].set_xlabel(f"Variable {i} Target") axs[2,i].set_ylabel(f"Variable {i} Difference") +plt.show() +plt.savefig(outGraphFile) + +# Creates histograms to compare theta, phi and mag target and output data +fig2, axs2 = plt.subplots(3, 3, figsize=(12, 12)) + +thetarange = [np.pi-0.01,np.pi] +phirange = [-np.pi,np.pi] +magrange = [0,1] + +thetadiffrange = [-0.02,0.02] +phidiffrange = [-np.pi,np.pi] +magdiffrange = [-0.2,0.2] + +# 2D histograms showing trends in the data +axs2[0,0].hist2d(out_theta, in_theta, bins=100, range=[thetarange,thetarange], cmap="viridis", label="Output vs Target") +axs2[0,0].set_xlabel("Theta Target") +axs2[0,0].set_ylabel("Theta Output") + +axs2[0,1].hist2d(out_phi, in_phi, bins=100, range=[phirange,phirange], cmap="viridis", label="Output vs Target") +axs2[0,1].set_xlabel("Phi Target") +axs2[0,1].set_ylabel("Phi Output") + +axs2[0,2].hist2d(out_mag, in_mag, bins=100, range=[magrange,magrange], cmap="viridis", label="Output vs Target") +axs2[0,2].set_xlabel("Mag Target") +axs2[0,2].set_ylabel("Mag Output") + +axs2[1,0].hist(thetadiff, bins=100, alpha=0.5, range=thetadiffrange, label="Difference") +axs2[1,0].set_xlabel("Theta Difference") +axs2[1,0].set_ylabel("Counts") + +axs2[1,1].hist(phidiff, bins=100, alpha=0.5, range=phidiffrange, label="Difference") +axs2[1,1].set_xlabel("Phi Difference") +axs2[1,1].set_ylabel("Counts") + +axs2[1,2].hist(magdiff, bins=100, alpha=0.5, range=magdiffrange, label="Difference") +axs2[1,2].set_xlabel("Mag Difference") +axs2[1,2].set_ylabel("Counts") + +axs2[2,0].hist2d(in_theta, thetadiff, bins=100, range=[thetarange,thetadiffrange], cmap="viridis", label="Difference vs Target") +axs2[2,0].set_xlabel("Theta Target") +axs2[2,0].set_ylabel("Theta Difference") + +axs2[2,1].hist2d(in_phi, phidiff, bins=100, range=[phirange,phidiffrange], cmap="viridis", label="Difference vs Target") +axs2[2,1].set_xlabel("Phi Target") +axs2[2,1].set_ylabel("Phi Difference") + +axs2[2,2].hist2d(in_mag, magdiff, bins=100, range=[magrange,magdiffrange], cmap="viridis", label="Difference vs Target") +axs2[2,2].set_xlabel("Mag Target") +axs2[2,2].set_ylabel("Mag Difference") plt.show() -plt.savefig("output_vs_target.png") +plt.savefig(outGraphFile2) \ No newline at end of file