diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index f5f7b7c5..9e5edd6e 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -112,7 +112,7 @@ get_data: - source /opt/detector/epic-main/setup.sh - ls -lrtha - ln -s "${LOCAL_DATA_PATH}/sim_output" sim_output - - ln -s "${LOCAL_DATA_PATH}/datasets/data" data + - ln -s "${LOCAL_DATA_PATH}/datasets/data" data # snakemake support - mkdir "${DETECTOR_CONFIG}" - ln -s "${LOCAL_DATA_PATH}/sim_output" "${DETECTOR_CONFIG}/sim_output" @@ -125,33 +125,35 @@ get_data: - runner_system_failure include: - - local: 'benchmarks/backgrounds/config.yml' - - local: 'benchmarks/backwards_ecal/config.yml' - - local: 'benchmarks/calo_pid/config.yml' - - local: 'benchmarks/ecal_gaps/config.yml' - - local: 'benchmarks/tracking_detectors/config.yml' - - local: 'benchmarks/tracking_performances/config.yml' - - local: 'benchmarks/tracking_performances_dis/config.yml' - - local: 'benchmarks/barrel_ecal/config.yml' - - local: 'benchmarks/barrel_hcal/config.yml' - - local: 'benchmarks/lfhcal/config.yml' - - local: 'benchmarks/zdc/config.yml' - - local: 'benchmarks/zdc_lyso/config.yml' - - local: 'benchmarks/zdc_neutron/config.yml' - - local: 'benchmarks/zdc_photon/config.yml' - - local: 'benchmarks/zdc_pi0/config.yml' - - local: 'benchmarks/material_scan/config.yml' - - local: 'benchmarks/pid/config.yml' - - local: 'benchmarks/timing/config.yml' - - local: 'benchmarks/b0_tracker/config.yml' - - local: 'benchmarks/insert_muon/config.yml' - - local: 'benchmarks/insert_tau/config.yml' - - local: 'benchmarks/zdc_sigma/config.yml' - - local: 'benchmarks/zdc_lambda/config.yml' - - local: 'benchmarks/insert_neutron/config.yml' - - local: 'benchmarks/femc_electron/config.yml' - - local: 'benchmarks/femc_photon/config.yml' - - local: 'benchmarks/femc_pi0/config.yml' +# - local: 'benchmarks/backgrounds/config.yml' +# - local: 'benchmarks/backwards_ecal/config.yml' +# - local: 'benchmarks/calo_pid/config.yml' +# - local: 'benchmarks/ecal_gaps/config.yml' +# - local: 'benchmarks/tracking_detectors/config.yml' +# - local: 'benchmarks/tracking_performances/config.yml' +# - local: 'benchmarks/tracking_performances_dis/config.yml' +# - local: 'benchmarks/barrel_ecal/config.yml' +# - local: 'benchmarks/barrel_hcal/config.yml' +# - local: 'benchmarks/lfhcal/config.yml' +# - local: 'benchmarks/zdc/config.yml' +# - local: 'benchmarks/zdc_lyso/config.yml' +# - local: 'benchmarks/zdc_neutron/config.yml' +# - local: 'benchmarks/zdc_photon/config.yml' +# - local: 'benchmarks/zdc_pi0/config.yml' +# - local: 'benchmarks/material_scan/config.yml' +# - local: 'benchmarks/pid/config.yml' +# - local: 'benchmarks/timing/config.yml' +# - local: 'benchmarks/b0_tracker/config.yml' +# - local: 'benchmarks/insert_muon/config.yml' +# - local: 'benchmarks/insert_tau/config.yml' +# - local: 'benchmarks/zdc_sigma/config.yml' +# - local: 'benchmarks/zdc_lambda/config.yml' +# - local: 'benchmarks/insert_neutron/config.yml' +# - local: 'benchmarks/femc_electron/config.yml' +# - local: 'benchmarks/femc_photon/config.yml' +# - local: 'benchmarks/femc_pi0/config.yml' +# - local: 'benchmarks/LOWQ2/analysis/config.yml' + - local: 'benchmarks/LOWQ2/reconstruction_training/config.yml' deploy_results: allow_failure: true stage: deploy diff --git a/benchmarks/LOWQ2/analysis/.gitignore b/benchmarks/LOWQ2/analysis/.gitignore new file mode 100644 index 00000000..c96503c7 --- /dev/null +++ b/benchmarks/LOWQ2/analysis/.gitignore @@ -0,0 +1,5 @@ +plots* +calibrations* +fieldmaps* +gdml* +.snakemake* \ No newline at end of file diff --git a/benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.h b/benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.h new file mode 100755 index 00000000..13ac9f4d --- /dev/null +++ b/benchmarks/LOWQ2/analysis/LOWQ2Benchmarks.h @@ -0,0 +1,289 @@ +#include "TString.h" +#include "TCanvas.h" +#include "ROOT/RDataFrame.hxx" +#include +#include "TH1.h" +#include "TFile.h" +#include "LOWQ2hits.h" +#include "LOWQ2acceptance.h" +#include "LOWQ2clusters.h" +#include "LOWQ2reconstruction.h" + +// Define alias +using RNode = ROOT::RDF::RNode; +using H1ResultPtr = ROOT::RDF::RResultPtr; +using H2ResultPtr = ROOT::RDF::RResultPtr; +using H3ResultPtr = ROOT::RDF::RResultPtr; + +using RVecS = ROOT::VecOps::RVec; +using RVecI = ROOT::VecOps::RVec; + +std::map,std::map,std::map>> histMap; + +//--------------------------------------------------------------------------------------------- +// Create dataframe from input file(s) +//--------------------------------------------------------------------------------------------- +RNode initialise( string fileName ){ + + ROOT::RDataFrame d0("events",fileName); + return d0; + +} + +//--------------------------------------------------------------------------------------------- +// Create histograms showing the variation of flux in various system components +//--------------------------------------------------------------------------------------------- +void WriteFluxPlots(H2ResultPtr hist, std::string parts){ + // Make projection of 2D pixel binned histogram + auto nBins = hist->GetNcells(); + TString pixelFluxName = TString(parts)+"Flux"; + TString logPixelFluxName = TString(parts)+"LogFlux"; + + //Get maximum bin content to set range + double maxBinContent = hist->GetMaximum(); + double logMax = log10(maxBinContent); + + //Create pixel flux histogram + TH1D* pixelFlux = new TH1D(pixelFluxName,pixelFluxName,100,0,maxBinContent); + + //Create log pixel flux histogram + TH1D* logPixelFlux = new TH1D(logPixelFluxName,logPixelFluxName,100,0,logMax); + for(int i=0; iFill(hist->GetBinContent(i)); + logPixelFlux->Fill(log10(hist->GetBinContent(i))); + } + pixelFlux->GetXaxis()->SetTitle("N_{hits}"); + logPixelFlux->GetXaxis()->SetTitle("log(N_{hits})"); + pixelFlux->GetYaxis()->SetTitle("N_{pixels}"); + logPixelFlux->GetYaxis()->SetTitle("N_{pixels}"); + + pixelFlux->Write(); + logPixelFlux->Write(); + +} + +//--------------------------------------------------------------------------------------------- +//Create histograms showing the variation of resolution accross the kinematic phase space +//--------------------------------------------------------------------------------------------- +void WriteResolutionPlots(H2ResultPtr hist, std::string parts){ + + hist->FitSlicesX(nullptr,0,-1,5); + TH1D* h1 = (TH1D*)gDirectory->Get(TString(hist->GetName())+"_2"); + h1->GetXaxis()->SetTitle(hist->GetXaxis()->GetTitle()); + h1->GetYaxis()->SetTitle(TString(hist->GetYaxis()->GetTitle())+"Resolution"); + + h1->Write(); + +} + + +//--------------------------------------------------------------------------------------------- +// Format and write plots +//--------------------------------------------------------------------------------------------- +void writePlots( TString outName ){ + + TFile* rootfile = new TFile(outName,"RECREATE"); + auto benchmarkDir = rootfile->mkdir("LOWQ2"); + + //Loop over 1D histograms saving + for(auto &[bmkey,hists]: histMap){ + + //Split mapped distributions name into parts + std::stringstream ss(bmkey.Data()); + std::string part; + std::vector parts; + while (std::getline(ss, part, '/')) { + parts.push_back(part); + } + + TDirectory* dir = benchmarkDir; + for (size_t i = 0; i < parts.size(); ++i) { + // This is not the last part, create or get the directory + if (!dir->GetDirectory(parts[i].c_str())) { + dir = dir->mkdir(parts[i].c_str()); + } else { + dir = dir->GetDirectory(parts[i].c_str()); + } + dir->cd(); + } + + TDirectory* currentDir = gDirectory; + + for(auto &[key,hist]: get<0>(hists)){ + + //Split histogram name into parts + std::stringstream ss2(key.Data()); + std::string part2; + std::vector parts2; + while (std::getline(ss2, part2, '/')) { + parts2.push_back(part2); + } + + TDirectory* dir = currentDir; + for (size_t i = 0; i < parts2.size(); ++i) { + if (i == parts2.size() - 1) { + // This is the last part, write the histogram + hist->SetMinimum(0); + hist->Write(parts2[i].c_str()); + } else { + // This is not the last part, create or get the directory + if (!dir->GetDirectory(parts2[i].c_str())) { + dir = dir->mkdir(parts2[i].c_str()); + } else { + dir = dir->GetDirectory(parts2[i].c_str()); + } + dir->cd(); + } + } + currentDir->cd(); + // hist->Write(key); + } + + //Loop over 2D histograms saving and calculating fluxes as needed + for(auto &[key,hist]: get<1>(hists)){ + + TDirectory* currentDir = gDirectory; + std::stringstream ss(key.Data()); + std::string part; + TDirectory* dir = currentDir; + std::vector parts; + while (std::getline(ss, part, '/')) { + parts.push_back(part); + } + for (size_t i = 0; i < parts.size(); ++i) { + if (i == parts.size() - 1) { + // If histogram name contains rate or hits then calculate flux + if(parts[i].find("Rate") != std::string::npos || parts[i].find("Hits") != std::string::npos){ + WriteFluxPlots(hist,parts[i].c_str()); + } + + // If histogram name contains Res then create a profile + if(parts[i].find("Res") != std::string::npos){ + WriteResolutionPlots(hist,parts[i].c_str()); + } + + hist->Sumw2(false); + hist->Write(); + } else { + // This is not the last part, create or get the directory + if (!dir->GetDirectory(parts[i].c_str())) { + dir = dir->mkdir(parts[i].c_str()); + } else { + dir = dir->GetDirectory(parts[i].c_str()); + } + dir->cd(); + } + } + currentDir->cd(); + } + + //Loop over 3D histograms saving + for(auto &[key,hist]: get<2>(hists)){ + TDirectory* currentDir = gDirectory; + std::stringstream ss(key.Data()); + std::string part; + TDirectory* dir = currentDir; + std::vector parts; + while (std::getline(ss, part, '/')) { + parts.push_back(part); + } + for (size_t i = 0; i < parts.size(); ++i) { + if (i == parts.size() - 1) { + // This is the last part, write the histogram + hist->Write(parts[i].c_str()); + + //Fit histogram z slices and save 2d histogram + hist->FitSlicesZ(nullptr,0,-1,5); + TH2D* h2 = (TH2D*)gDirectory->Get(TString(hist->GetName())+"_2"); + h2->GetXaxis()->SetTitle(hist->GetXaxis()->GetTitle()); + h2->GetYaxis()->SetTitle(hist->GetYaxis()->GetTitle()); + h2->GetZaxis()->SetTitle(TString(hist->GetZaxis()->GetTitle())+"Resolution"); + h2->SetTitle(hist->GetTitle()); + h2->Write(); + + } else { + // This is not the last part, create or get the directory + if (!dir->GetDirectory(parts[i].c_str())) { + dir = dir->mkdir(parts[i].c_str()); + } else { + dir = dir->GetDirectory(parts[i].c_str()); + } + dir->cd(); + } + } + currentDir->cd(); + // hist->Write(key); + } + + benchmarkDir->cd(); + + } + + rootfile->Close(); + +} + +//--------------------------------------------------------------------------------------------- +// Create the benchmark plots +//--------------------------------------------------------------------------------------------- +void LOWQ2Benchmarks( string inName, TString outName, dd4hep::Detector& detector, double eventRate ){ + + auto node = initialise( inName ); + + int events = *node.Count(); + double eventWeight = eventRate / events; + + RVecS colNames = node.GetColumnNames(); + + std::string readoutName = "TaggerTrackerHits"; + + if(Any(colNames==readoutName)){ + //----------------------------------------- + // Hit detector IDs + //----------------------------------------- + auto ids = detector.readout(readoutName).idSpec().fields(); + + node = node.Define("hitParticleIndex","_TaggerTrackerHits_MCParticle.index") + .Define("generatorStat","MCParticles.generatorStatus") + .Define("primary",[](RVecI index, RVecI status){ + RVecI result(index.size()); + for (size_t i = 0; i < index.size(); ++i) { + result[i] = status[index[i]] == 1; + } + return result; + },{"hitParticleIndex","generatorStat"}); + + std::string filterName = "TaggerTrackerHitsFilter"; + auto allnode = node.Define(filterName,"TaggerTrackerHits"); + auto primarynode = node.Define(filterName,"TaggerTrackerHits[primary==1]"); + auto secondarynode = node.Define(filterName,"TaggerTrackerHits[primary==0]"); + + for(auto &[key,id]: ids){ + TString colName = key+"ID"; + node = node.Define(colName,getSubID(key,detector),{readoutName}); + primarynode = primarynode.Define(colName,getSubID(key,detector),{filterName}); + secondarynode = secondarynode.Define(colName,getSubID(key,detector),{filterName}); + } + + //Create Rate Plots + histMap["Rates/AllHits"] = createHitPlots(node,eventWeight); + histMap["Rates/PrimaryHits"] = createHitPlots(primarynode,eventWeight); + histMap["Rates/SecondaryHits"] = createHitPlots(secondarynode,eventWeight); + + } + + if((Any(colNames==readoutName) || Any(colNames=="InclusiveKinematicsElectron")) && Any(colNames=="MCParticles")){ + histMap["Acceptance"] = createAcceptancePlots(node); + } + + if(Any(colNames=="TaggerTrackerClusterPositions")){ + histMap["Clusters"] = createClusterPlots(node); + } + + if(Any(colNames=="LowQ2TrackParameters") && Any(colNames=="MCParticles")){ + histMap["Reconstruction"] = createReconstructionPlots(node); + } + + writePlots( outName ); + +} \ No newline at end of file diff --git a/benchmarks/LOWQ2/analysis/LOWQ2acceptance.h b/benchmarks/LOWQ2/analysis/LOWQ2acceptance.h new file mode 100644 index 00000000..d7d1037b --- /dev/null +++ b/benchmarks/LOWQ2/analysis/LOWQ2acceptance.h @@ -0,0 +1,186 @@ +#pragma once + +// #include "functors.h" + +#include "ROOT/RDF/RInterface.hxx" +#include +#include +#include "ROOT/RVec.hxx" + +// Define alias +using RNode = ROOT::RDF::RNode; +using H1ResultPtr = ROOT::RDF::RResultPtr; +using H2ResultPtr = ROOT::RDF::RResultPtr; +using H3ResultPtr = ROOT::RDF::RResultPtr; +using RVecI = ROOT::VecOps::RVec; +using RVecD = ROOT::VecOps::RVec; +using RVecS = ROOT::VecOps::RVec; + +// Lazy execution methods +std::tuple,std::map,std::map> createAcceptancePlots( RNode d1 ){ + + std::map hHists1D; + std::map hHists2D; + std::map hHists3D; + + int ePrimID = 2; + int pBeamID = 1; + int eBeamID = 0; + + // d1 = d1.Define("primMom","MCParticles[2].momentum.z"); + d1 = d1.Define("eBeam", [eBeamID] + (std::vector h) + {return ROOT::Math::PxPyPzMVector (h[eBeamID].momentum.x,h[eBeamID].momentum.y,h[eBeamID].momentum.z,h[eBeamID].mass);}, + {"MCParticles"}) + .Define("pBeam", [pBeamID] + (std::vector h) + {return ROOT::Math::PxPyPzMVector (h[pBeamID].momentum.x,h[pBeamID].momentum.y,h[pBeamID].momentum.z,h[pBeamID].mass);}, + {"MCParticles"}) + .Define("primMom", [ePrimID] + (std::vector h) + {return ROOT::Math::PxPyPzMVector (h[ePrimID].momentum.x,h[ePrimID].momentum.y,h[ePrimID].momentum.z,h[ePrimID].mass);}, + {"MCParticles"}) + .Define("primE","primMom.E()") + .Define("primTheta","1000*(TMath::Pi()-primMom.Theta())") + .Define("primEta","primMom.Eta()") + .Define("scatteredV","eBeam-primMom") + .Define("Q2","-scatteredV.M2()") + .Define("logQ2","log10(Q2)") + .Define("x","Q2/(2*scatteredV.Dot(pBeam))") + .Define("logx","log10(x)") + .Define("primPhi","primMom.Phi()") + .Define("W2","(pBeam+eBeam-primMom).M2()"); + + RVecS colNames = d1.GetColumnNames(); + std::vector> filters; + filters.push_back(std::make_pair("Generated",d1)); + filters.push_back(std::make_pair("Theta<10mrad",d1.Filter("primTheta<10"))); + if(Any(colNames=="TaggerTrackerHits")){ + filters.push_back(std::make_pair("Module1-Hit",d1.Filter("moduleID[moduleID==1].size()>0"))); + filters.push_back(std::make_pair("Module2-Hit",d1.Filter("moduleID[moduleID==2].size()>0"))); + filters.push_back(std::make_pair("Any-Hit",d1.Filter("TaggerTrackerHits.size() > 0"))); + } + + if(Any(colNames=="LowQ2Tracks")){ + filters.push_back(std::make_pair("Reconstructed-Track",d1.Filter("LowQ2Tracks.size()>0"))); + } + + + for (auto& [rawString,filterNode] : filters) + { + + //Histogram names + TString energyHistName = "hEnergy"; + TString thetaHistName = "hTheta"; + TString etaHistName = "hEta"; + TString logQ2HistName = "hlogQ2"; + TString logxHistName = "hlogx"; + TString Q2HistName = "hQ2"; + TString xHistName = "hx"; + TString W2HistName = "hW2"; + TString Q2xHistName = "hQ2x"; + TString logQ2logxHistName = "hlogQ2logx"; + TString WlogQ2HistName = "hWlogQ2"; + TString WlogxHistName = "hWlogx"; + TString primElogQ2HistName = "hprimElogQ2"; + TString primEthetaHistName = "hprimEtheta"; + + //Histogram Ranges + double energyMin = 0; + double energyMax = 18; + double thetaMin = 0; + double thetaMax = 12; + double etaMin = -15; + double etaMax = 5; + double logQ2Min = -8.5; + double logQ2Max = 0; + double logxMin = -12; + double logxMax = 0; + double Q2Min = 0; + double Q2Max = 500; + double xMin = 0; + double xMax = 1; + double W2Min = 0; + double W2Max = 500; + + int nBins1D = 400; + int nBins2D = 200; + + //Histogram axis names + TString energyAxisName = "Energy [GeV]"; + TString thetaAxisName = "Theta [mrad]"; + TString etaAxisName = "Eta"; + TString logQ2AxisName = "log(Q2) [GeV]"; + TString logxAxisName = "log(x) [GeV]"; + TString Q2AxisName = "Q2 [GeV]"; + TString xAxisName = "x [GeV]"; + TString W2AxisName = "W2 [GeV]"; + TString Q2xAxisName = Q2AxisName+";"+xAxisName; + TString logQ2logxAxisName = logQ2AxisName+";"+logxAxisName; + TString WlogQ2AxisName = W2AxisName+";"+logQ2AxisName; + TString WlogxAxisName = W2AxisName+";"+logxAxisName; + TString primElogQ2AxisName = energyAxisName+";"+logQ2AxisName; + TString primEthetaAxisName = energyAxisName+";"+thetaAxisName; + + + hHists1D[rawString+"/"+energyHistName] = filterNode.Histo1D({energyHistName, energyHistName + ";Energy [GeV]", nBins1D, energyMin, energyMax}, "primE"); + hHists1D[rawString+"/"+thetaHistName] = filterNode.Histo1D({thetaHistName, thetaHistName + ";Theta [mrad]", nBins1D, thetaMin, thetaMax}, "primTheta"); + hHists1D[rawString+"/"+etaHistName] = filterNode.Histo1D({etaHistName, etaHistName + ";Eta", nBins1D, etaMin, etaMax}, "primEta"); + hHists1D[rawString+"/"+logQ2HistName] = filterNode.Histo1D({logQ2HistName, logQ2HistName + ";log(Q2) [GeV]", nBins1D, logQ2Min, logQ2Max}, "logQ2"); + hHists1D[rawString+"/"+logxHistName] = filterNode.Histo1D({logxHistName, logxHistName + ";log(x) [GeV]", nBins1D, logxMin, logxMax}, "logx"); + hHists1D[rawString+"/"+Q2HistName] = filterNode.Histo1D({Q2HistName, Q2HistName + ";Q2 [GeV]", nBins1D, Q2Min, Q2Max}, "Q2"); + hHists1D[rawString+"/"+xHistName] = filterNode.Histo1D({xHistName, xHistName + ";x [GeV]", nBins1D, xMin, xMax}, "x"); + hHists1D[rawString+"/"+W2HistName] = filterNode.Histo1D({W2HistName, W2HistName + ";W2 [GeV]", nBins1D, W2Min, W2Max}, "W2"); + + hHists2D[rawString+"/"+Q2xHistName] = filterNode.Histo2D({Q2xHistName, Q2xHistName + ";Q2 [GeV];x [GeV]", nBins2D, Q2Min, Q2Max, nBins2D, xMin, xMax}, "Q2", "x"); + hHists2D[rawString+"/"+logQ2logxHistName] = filterNode.Histo2D({logQ2logxHistName, logQ2logxHistName + ";log(Q2) [GeV];log(x) [GeV]", nBins2D, logQ2Min, logQ2Max, nBins2D, logxMin, logxMax}, "logQ2", "logx"); + hHists2D[rawString+"/"+WlogQ2HistName] = filterNode.Histo2D({WlogQ2HistName, WlogQ2HistName + ";W2 [GeV];log(Q2) [GeV]", nBins2D, W2Min, W2Max, nBins2D, logQ2Min, logQ2Max}, "W2", "logQ2"); + hHists2D[rawString+"/"+WlogxHistName] = filterNode.Histo2D({WlogxHistName, WlogxHistName + ";W2 [GeV];log(x) [GeV]", nBins2D, W2Min, W2Max, nBins2D, logxMin, logxMax}, "W2", "logx"); + hHists2D[rawString+"/"+primElogQ2HistName] = filterNode.Histo2D({primElogQ2HistName, primElogQ2HistName + ";E [GeV];log(Q2) [GeV]", nBins2D, energyMin, energyMax, nBins2D, logQ2Min, logQ2Max}, "primE", "logQ2"); + hHists2D[rawString+"/"+primEthetaHistName] = filterNode.Histo2D({primEthetaHistName, primEthetaHistName + ";E [GeV];Theta [mrad]", nBins2D, energyMin, energyMax, nBins2D, thetaMin, thetaMax}, "primE", "primTheta"); + + } + + //---------------------------------------------------------------------------------------------------- + // Creates integrated acceptance plots for various filters + //---------------------------------------------------------------------------------------------------- + + //Store the counts for each filter + RVecI filterCounts; + int rawCount = 0; + TString filtersName = ""; + + //Calculate the counts for each filter + for (auto& [rawString,filterNode] : filters) + { + filtersName += rawString + "_"; + int counts = *filterNode.Count(); + std::cout << rawString << " " << counts << std::endl; + filterCounts.push_back(counts); + if (rawString == "Generated") { + rawCount = counts; + } + } + + filtersName = filtersName(0,filtersName.Length()-1); + + // Number of filters + int Nfilters = filterCounts.size(); + RVecI entryInt = ROOT::VecOps::Range(Nfilters); + + // Fraction of events passing each filter + RVecD fractions = filterCounts/static_cast(rawCount); + + ROOT::RDataFrame df(Nfilters); + auto dfWithCounts = df.Define("entry", [entryInt](ULong64_t entry) { return (double)entryInt[(int)entry]; }, {"rdfentry_"}) + .Define("count", [filterCounts](ULong64_t entry) { return (double)filterCounts[(int)entry]; }, {"rdfentry_"}) + .Define("fraction", [fractions](ULong64_t entry) { return (double)fractions[(int)entry]; }, {"rdfentry_"}); + + // Use the new column in the histograms + hHists1D["TotalCounts"] = dfWithCounts.Histo1D({"hTotalCounts", "Total Counts;"+filtersName+";Count", Nfilters, 0, static_cast(Nfilters)}, "entry","count"); + hHists1D["IntegratedAcceptance"] = dfWithCounts.Histo1D({"hFractions", "Fractions;"+filtersName+";Fraction", Nfilters, 0, static_cast(Nfilters)}, "entry","fraction"); + hHists1D["TotalCounts"]->Sumw2(false); + hHists1D["IntegratedAcceptance"]->Sumw2(false); + + return {hHists1D,hHists2D,hHists3D}; +} \ No newline at end of file diff --git a/benchmarks/LOWQ2/analysis/LOWQ2clusters.h b/benchmarks/LOWQ2/analysis/LOWQ2clusters.h new file mode 100644 index 00000000..2022cc40 --- /dev/null +++ b/benchmarks/LOWQ2/analysis/LOWQ2clusters.h @@ -0,0 +1,21 @@ +// Define alias +using RNode = ROOT::RDF::RNode; +using H1ResultPtr = ROOT::RDF::RResultPtr; +using H2ResultPtr = ROOT::RDF::RResultPtr; +using H3ResultPtr = ROOT::RDF::RResultPtr; +using RVecI = ROOT::VecOps::RVec; + +// Lazy execution methods +std::tuple,std::map,std::map> createClusterPlots( RNode d1 ){ + + std::map hHists1D; + std::map hHists2D; + std::map hHists3D; + + // auto d2 = d1.Define("ClusterSize",[](RVecI clu){return clu.size();},{"TaggerTrackerClusterPositions_0.index"}); + auto d2 = d1.Define("ClusterSize","TaggerTrackerClusterPositions.rawHits_end-TaggerTrackerClusterPositions.rawHits_begin"); + hHists1D["hClusterSize"] = d2.Histo1D({"hClusterSize","hClusterSize",100,0,100}, "ClusterSize"); + + return {hHists1D,hHists2D,hHists3D}; + +} diff --git a/benchmarks/LOWQ2/analysis/LOWQ2hits.h b/benchmarks/LOWQ2/analysis/LOWQ2hits.h new file mode 100644 index 00000000..ac3a26df --- /dev/null +++ b/benchmarks/LOWQ2/analysis/LOWQ2hits.h @@ -0,0 +1,127 @@ +#pragma once + +#include "functors.h" + +// Define alias +using RNode = ROOT::RDF::RNode; +using H1ResultPtr = ROOT::RDF::RResultPtr; +using H2ResultPtr = ROOT::RDF::RResultPtr; +using H3ResultPtr = ROOT::RDF::RResultPtr; +using RVecI = ROOT::VecOps::RVec; +using RVecD = ROOT::VecOps::RVec; + +// Lazy execution methods +std::tuple,std::map,std::map> createHitPlots( RNode d2, double eventWeight ){ + + int xChip = 448; + int yChip = 512; + int xChips = 6; + int yChips = 6; + std::map xPixelMin = {{1,-xChip*xChips/2},{2,-xChip*xChips/2}}; + std::map xPixelMax = {{1, xChip*xChips/2},{2, xChip*xChips/2}}; + + std::map yPixelMin = {{1,-yChip*yChips/2},{2,-yChip*yChips/2}}; + std::map yPixelMax = {{1, yChip*yChips/2},{2, yChip*yChips/2}}; + + + std::map hHists1D; + std::map hHists2D; + std::map hHists3D; + + d2 = d2.Define("boardID", [xChip](RVecI xID){ return (xID + 3*xChip) / (2 * xChip); },{"xID"}) + .Define("xChipID", [xChip](RVecI xID){ return (xID + 3*xChip) / (xChip); },{"xID"}) + .Define("xColumnID", [xChip](RVecI xID){ return (xID + 3*xChip) / 2; },{"xID"}) + .Define("yChipID", [yChip](RVecI yID){ return (yID + 2.75*yChip) / (yChip); },{"yID"}) + .Define("yHalfChipID", [yChip](RVecI yID){ return (yID + 2.75*yChip) / (0.5*yChip); },{"yID"}) + .Define("eventWeight", [eventWeight](){return eventWeight;},{}); + + + // Plot number of hits per moduleID + hHists1D["hmoduleIDRate"] = d2.Histo1D({"hmoduleIDRate", "hmoduleID;Module ID;Rate [Hz]", 2, 1, 3 }, "moduleID", "eventWeight"); + + // Module Loop + for(int i=1; i<=2; i++){ + + double xMin = xPixelMin[i]; + double xMax = xPixelMax[i]; + double yMin = yPixelMin[i]; + double yMax = yPixelMax[i]; + int xRange = xMax-xMin; + int yRange = yMax-yMin; + + TString modNum = std::to_string(i); + TString moduleTag = "module"+modNum; + TString layerName = "layerID"+moduleTag; + TString layerHistName = "h"+layerName+"Rate"; + + + auto d3 = d2.Define("ModuleFilter",[i](RVecI modID){return modID==i;},{"moduleID"}) + .Define(layerName,"layerID[ModuleFilter]"); + + + hHists1D[moduleTag+"/"+layerHistName] = d3.Histo1D({layerHistName, layerHistName+";Layer ID;Rate [Hz]", 4, 0, 4 }, layerName, "eventWeight"); + + // Layer Loop + for(int j=0; j<=3; j++){ + + TString layerNum = std::to_string(j); + TString layerTag = "layer"+layerNum; + + TString xName = "xPixel"; + TString xHistName = "h"+xName+"Rate"; + + TString yName = "yPixel"; + TString yHistName = "h"+yName+"Rate"; + + TString xChipName = "xChip"; + TString yChipName = "yChip"; + + TString yHalfChipName = "yHalfChip"; + + TString xyHistName = "h"+xName+yName+"Rate"; + + TString xColumnName = "xColumn"; + + TString boardName = "board"; + + std::vector layerSizeInput = {xName.Data()}; + TString layerSizeName = "HitsPerEvent"; + TString sizeHistName = "h"+layerSizeName; + + auto d4 = d3.Define("LayerFilter",[j](RVecI layID){return layID==j;},{"layerID"}) + .Define(xName,"xID[LayerFilter&&ModuleFilter]") + .Define(yName,"yID[LayerFilter&&ModuleFilter]") + .Define(boardName,"boardID[LayerFilter&&ModuleFilter]") + .Define(xChipName,"xChipID[LayerFilter&&ModuleFilter]") + .Define(yChipName,"yChipID[LayerFilter&&ModuleFilter]") + .Define(yHalfChipName,"yHalfChipID[LayerFilter&&ModuleFilter]") + .Define(xColumnName,"xColumnID[LayerFilter&&ModuleFilter]") + .Define(layerSizeName,[](RVecI lay){return lay.size();},layerSizeInput); + + + hHists1D[moduleTag+"/"+layerTag+"/"+xHistName] = d4.Histo1D({xHistName, xHistName+";x pixel column;Rate [Hz]", xRange, xMin, xMax }, xName, "eventWeight"); + + hHists1D[moduleTag+"/"+layerTag+"/"+yHistName] = d4.Histo1D({yHistName, yHistName+";y pixel column;Rate [Hz]", yRange, yMin, yMax }, yName, "eventWeight"); + + hHists2D[moduleTag+"/"+layerTag+"/"+xyHistName] = d4.Histo2D({xyHistName, xyHistName+";x pixel;y pixel;Rate [Hz]", xRange, xMin, xMax, yRange, yMin, yMax}, xName, yName, "eventWeight"); + + hHists1D[moduleTag+"/"+layerTag+"/"+sizeHistName] = d4.Histo1D({sizeHistName, sizeHistName+";hits per event", 100, 0, 100 }, layerSizeName ); + + // Plot number of hits per boardID for each layer + TString boardIDHistName = "hBoardIDRate"; + hHists1D[moduleTag+"/"+layerTag+"/"+boardIDHistName] = d4.Histo1D({boardIDHistName, boardIDHistName+";board ID;Rate [Hz]", 3, 0, 3}, boardName, "eventWeight"); + + // Plot number of hits per chipID for each layer + TString chipIDHistName = "hChipIDRate"; + hHists2D[moduleTag+"/"+layerTag+"/"+chipIDHistName] = d4.Histo2D({chipIDHistName, chipIDHistName+";x Chip;y Chip;Rate [Hz]", 6, 0, 6, 6, 0, 6}, xChipName, yChipName, "eventWeight"); + + // Plot number of hits per chipID for each layer + TString xColumnIDHistName = "hxColumnIDRate"; + hHists2D[moduleTag+"/"+layerTag+"/"+xColumnIDHistName] = d4.Histo2D({xColumnIDHistName, xColumnIDHistName+";x Column;y Half Chip;Rate [Hz]", 3*xChip, 0, 3.0*xChip, 12, 0, 12}, xColumnName, yHalfChipName, "eventWeight"); + + } + } + + return {hHists1D,hHists2D,hHists3D}; + +} diff --git a/benchmarks/LOWQ2/analysis/LOWQ2reconstruction.h b/benchmarks/LOWQ2/analysis/LOWQ2reconstruction.h new file mode 100644 index 00000000..01b0dfa2 --- /dev/null +++ b/benchmarks/LOWQ2/analysis/LOWQ2reconstruction.h @@ -0,0 +1,150 @@ +#pragma once + +// Define alias +using RNode = ROOT::RDF::RNode; +using H1ResultPtr = ROOT::RDF::RResultPtr; +using H2ResultPtr = ROOT::RDF::RResultPtr; +using H3ResultPtr = ROOT::RDF::RResultPtr; +using RVecI = ROOT::VecOps::RVec; + +// Lazy execution methods +std::tuple,std::map,std::map> createReconstructionPlots( RNode d1 ){ + + int ePrimID = 2; + int pBeamID = 1; + int eBeamID = 0; + + std::map hHists1D; + std::map hHists2D; + std::map hHists3D; + + d1 = d1.Filter("LowQ2TrackParameters.size()>0") + .Define("eBeam", [eBeamID] + (std::vector h) + {return ROOT::Math::PxPyPzMVector (h[eBeamID].momentum.x,h[eBeamID].momentum.y,h[eBeamID].momentum.z,h[eBeamID].mass);}, + {"MCParticles"}) + .Define("pBeam", [pBeamID] + (std::vector h) + {return ROOT::Math::PxPyPzMVector (h[pBeamID].momentum.x,h[pBeamID].momentum.y,h[pBeamID].momentum.z,h[pBeamID].mass);}, + {"MCParticles"}) + .Define("primMom", [ePrimID] + (std::vector h) + {return ROOT::Math::PxPyPzMVector (h[ePrimID].momentum.x,h[ePrimID].momentum.y,h[ePrimID].momentum.z,h[ePrimID].mass);}, + {"MCParticles"}) + .Define("primE","primMom.E()") + .Define("primPx","primMom.Px()") + .Define("primPy","primMom.Py()") + .Define("primPz","primMom.Pz()") + .Define("primTheta","(TMath::Pi()-primMom.Theta())") + .Define("primTheta_mrad","1000*primTheta") + .Define("primEta","primMom.Eta()") + .Define("scatteredV","eBeam-primMom") + .Define("Q2","-scatteredV.M2()") + .Define("logQ2","log10(Q2)") + .Define("x","Q2/(2*scatteredV.Dot(pBeam))") + .Define("logx","log10(x)") + .Define("primPhi","primMom.Phi()") + .Define("primPhi_deg","primPhi * TMath::RadToDeg()") + .Define("W2","(pBeam+eBeam-primMom).M2()") + .Define("reconTheta","(double)(TMath::Pi()-LowQ2TrackParameters[0].theta)") + .Define("reconTheta_mrad","1000.0*reconTheta") + .Define("reconPhi","(double)LowQ2TrackParameters[0].phi") + .Define("reconPhi_deg","reconPhi * TMath::RadToDeg()") + .Define("reconP","(18./10.)*(double)(-1/(LowQ2TrackParameters[0].qOverP))") + .Define("reconMom",[](double p, double theta, double phi){return ROOT::Math::PxPyPzMVector(p*sin(theta)*cos(phi),p*sin(theta)*sin(phi),p*cos(theta),0.000511);},{"reconP","reconTheta","reconPhi"}) + .Define("reconE","reconMom.E()") + .Define("reconScatteredV","eBeam-reconMom") + .Define("reconQ2","-reconScatteredV.M2()") + .Define("reconlogQ2","log10(reconQ2)") + .Define("reconX","reconQ2/(2*reconScatteredV.Dot(pBeam))") + .Define("reconlogx","log10(reconX)") + .Define("reconW2","(pBeam+eBeam-reconMom).M2()") + .Define("reconPx","(double)reconMom.Px()") + .Define("reconPy","(double)reconMom.Py()") + .Define("reconPz","(double)reconMom.Pz()") + .Define("thetaRes","(reconTheta-primTheta)") + .Define("thetaRes_mrad","1000.0*thetaRes") + .Define("phiRes","reconPhi_deg-primPhi_deg") + .Define("ERes","(reconMom.E()-primE)/primE") + .Define("logQ2Res","reconlogQ2-logQ2") + .Define("XRes","reconX-x") + .Define("W2Res","reconW2-W2"); + + + + double thetaMin = 0; // mrad + double thetaMax = 12; // mrad + double phiMin = -180; // deg + double phiMax = 180; // deg + double pxMin = -0.2; // GeV + double pxMax = 0.2; // GeV + double pyMin = -0.2; // GeV + double pyMax = 0.2; // GeV + double pzMin = -18; // GeV + double pzMax = 0; // GeV + double eMin = 0; // GeV + double eMax = 18; // GeV + double logQ2Min = -8.5; // GeV^2 + double logQ2Max = 0; // GeV^2 + double logxMin = -11; + double logxMax = 0; + double w2Min = 0; // GeV^2 + double w2Max = 0.5; // GeV^2 + double thetaResMin = -5; // mrad + double thetaResMax = 5; // mrad + double phiResMin = -180; // deg + double phiResMax = 180; // deg + double eResMin = -0.1; // GeV + double eResMax = 0.1; // GeV + double q2ResMin = -0.01; // GeV^2 + double q2ResMax = 0.01; // GeV^2 + double xResMin = -0.1; + double xResMax = 0.1; + double w2ResMin = -0.1; // GeV^2 + double w2ResMax = 0.1; // GeV^2 + + int bins1D = 400; + int bins2D = 200; + int bins3D = 50; + int bins3DRes = 100; + + hHists2D["reconThetaVsPrimTheta"] = d1.Histo2D({"reconThetaVsPrimTheta","reconThetaVsPrimTheta;#theta_{prim} [mrad];#theta_{recon} [mrad]",bins2D,thetaMin,thetaMax,bins2D,thetaMin,thetaMax},"primTheta_mrad","reconTheta_mrad"); + hHists2D["reconPhiVsPrimPhi"] = d1.Histo2D({"reconPhiVsPrimPhi","reconPhiVsPrimPhi;#phi_{prim} [deg];#phi_{recon} [deg]",bins2D,phiMin,phiMax,bins2D,phiMin,phiMax},"primPhi_deg","reconPhi_deg"); + + hHists2D["reconPxVsPrimPx"] = d1.Histo2D({"reconPxVsPrimPx","reconPxVsPrimPx;p_{x,prim} [GeV];p_{x,recon} [GeV]",bins2D,pxMin,pxMax,bins2D,pxMin,pxMax},"primPx","reconPx"); + hHists2D["reconPyVsPrimPy"] = d1.Histo2D({"reconPyVsPrimPy","reconPyVsPrimPy;p_{y,prim} [GeV];p_{y,recon} [GeV]",bins2D,pyMin,pyMax,bins2D,pyMin,pyMax},"primPy","reconPy"); + hHists2D["reconPzVsPrimPz"] = d1.Histo2D({"reconPzVsPrimPz","reconPzVsPrimPz;p_{z,prim} [GeV];p_{z,recon} [GeV]",bins2D,pzMin,pzMax,bins2D,pzMin,pzMax},"primPz","reconPz"); + hHists2D["reconEVsPrimE"] = d1.Histo2D({"reconEVsPrimE","reconEVsPrimE;E_{prim} [GeV];E_{recon} [GeV]",bins2D,eMin,eMax,bins2D,eMin,eMax},"primE","reconE"); + hHists2D["reconQ2VsPrimQ2"] = d1.Histo2D({"reconQ2VsPrimQ2","reconQ2VsPrimQ2;log(Q^{2}_{prim}) [GeV^{2}];log(Q^{2}_{recon}) [GeV^{2}]",bins2D,logQ2Min,logQ2Max,bins2D,logQ2Min,logQ2Max},"logQ2","reconlogQ2"); + hHists2D["reconXVsPrimX"] = d1.Histo2D({"reconXVsPrimX","reconXVsPrimX;log(x_{prim});log(x_{recon})",bins2D,logxMin,logxMax,bins2D,logxMin,logxMax},"logx","reconlogx"); + hHists2D["reconW2VsPrimW2"] = d1.Histo2D({"reconW2VsPrimW2","reconW2VsPrimW2;W^{2}_{prim} [GeV^{2}];W^{2}_{recon} [GeV^{2}]",bins2D,w2Min,w2Max,bins2D,w2Min,w2Max},"W2","reconW2"); + + hHists1D["thetaRes"] = d1.Histo1D({"thetaRes","thetaRes;#theta_{recon}-#theta_{prim} [mrad]",bins1D,thetaResMin,thetaResMax},"thetaRes_mrad"); + hHists1D["phiRes"] = d1.Histo1D({"phiRes","phiRes;#phi_{recon}-#phi_{prim} [rad]",bins1D,phiResMin,phiResMax},"phiRes"); + hHists1D["ERes"] = d1.Histo1D({"ERes","ERes;E_{recon}-E_{prim} [GeV]",bins1D,eResMin,eResMax},"ERes"); + hHists1D["Q2Res"] = d1.Histo1D({"Q2Res","Q2Res;log(Q^{2}_{recon})-log(Q^{2}_{prim}) [GeV^{2}]",bins1D,q2ResMin,q2ResMax},"logQ2Res"); + hHists1D["XRes"] = d1.Histo1D({"XRes","XRes;log(x_{recon})-log(x_{prim})",bins1D,xResMin,xResMax},"XRes"); + hHists1D["W2Res"] = d1.Histo1D({"W2Res","W2Res;W^{2}_{recon}-W^{2}_{prim} [GeV^{2}]",bins1D,w2ResMin,w2ResMax},"W2Res"); + + hHists2D["thetaResVsE"] = d1.Histo2D({"thetaResVsE", "thetaResVsE;#theta_{recon}-#theta_{prim} [mrad];E_{prim} [GeV]", bins2D, thetaResMin, thetaResMax, bins2D, eMin, eMax}, "thetaRes_mrad", "primE"); + hHists2D["phiResVsE"] = d1.Histo2D({"phiResVsE", "phiResVsE;#phi_{recon}-#phi_{prim} [rad];E_{prim} [GeV]", bins2D, phiResMin, phiResMax, bins2D, eMin, eMax}, "phiRes", "primE"); + hHists2D["EResVsE"] = d1.Histo2D({"EResVsE", "EResVsE;E_{recon}-E_{prim} [GeV];E_{prim} [GeV]", bins2D, eResMin, eResMax, bins2D, eMin, eMax}, "ERes", "primE"); + hHists2D["Q2ResVsE"] = d1.Histo2D({"Q2ResVsE", "Q2ResVsE;log(Q^{2}_{recon})-log(Q^{2}_{prim}) [GeV^{2}];E_{prim} [GeV]", bins2D, q2ResMin, q2ResMax, bins2D, eMin, eMax}, "logQ2Res", "primE"); + + hHists2D["phiResVsTheta"] = d1.Histo2D({"phiResVsTheta", "phiResVsTheta;#phi_{recon}-#phi_{prim} [rad];#theta_{prim} [mrad]", bins2D, phiResMin, phiResMax, bins2D, thetaMin, thetaMax}, "phiRes", "primTheta_mrad"); + + //3D histograms to extract the resolution as a function of E and theta + hHists3D["thetaResVsETheta"] = d1.Histo3D({"thetaResVsETheta","thetaResVsETheta;E_{prim} [GeV];#theta_{prim} [mrad];#theta_{recon}-#theta_{prim} [mrad]",bins3D,eMin,eMax,bins3D,thetaMin,thetaMax,bins3DRes,thetaResMin,thetaResMax},"primE","primTheta_mrad","thetaRes_mrad"); + hHists3D["phiResVsETheta"] = d1.Histo3D({"phiResVsETheta","phiResVsETheta;E_{prim} [GeV];#theta_{prim} [mrad];#phi_{recon}-#phi_{prim} [rad]",bins3D,eMin,eMax,bins3D,thetaMin,thetaMax,bins3DRes,phiResMin,phiResMax},"primE","primTheta_mrad","phiRes"); + hHists3D["EResVsETheta"] = d1.Histo3D({"EResVsETheta","EResVsETheta;E_{prim} [GeV];#theta_{prim} [mrad];E_{recon}-E_{prim} [GeV]",bins3D,eMin,eMax,bins3D,thetaMin,thetaMax,bins3DRes,eResMin,eResMax},"primE","primTheta_mrad","ERes"); + + auto d2 = d1.Filter("reconTheta_mrad>1"); + + hHists1D["thetacut/phiRes"] = d2.Histo1D({"phiRes","phiRes;#phi_{recon}-#phi_{prim} [rad]",bins1D,phiResMin,phiResMax},"phiRes"); + hHists2D["thetacut/phiResVsE"] = d2.Histo2D({"phiResVsE", "phiResVsE;#phi_{recon}-#phi_{prim} [rad];E_{prim} [GeV]", bins2D, phiResMin, phiResMax, bins2D, eMin, eMax}, "phiRes", "primE"); + hHists2D["thetacut/reconPhiVsPrimPhi"] = d2.Histo2D({"reconPhiVsPrimPhi","reconPhiVsPrimPhi;#phi_{prim} [deg];#phi_{recon} [deg]",bins2D,phiMin,phiMax,bins2D,phiMin,phiMax},"primPhi_deg","reconPhi_deg"); + + return {hHists1D, hHists2D, hHists3D}; + +} + diff --git a/benchmarks/LOWQ2/analysis/PostprocessLOWQ2.C b/benchmarks/LOWQ2/analysis/PostprocessLOWQ2.C new file mode 100644 index 00000000..3bc5b380 --- /dev/null +++ b/benchmarks/LOWQ2/analysis/PostprocessLOWQ2.C @@ -0,0 +1,533 @@ +#include +#include +#include +#include +#include +#include +#include +#include + +//---------------------------------------------------------------------- +// Set global plot format variables +//---------------------------------------------------------------------- +void SetStyle() { + // Set global plot format variables + gStyle->SetOptStat(0); + gStyle->SetPadLeftMargin(0.15); + gStyle->SetPadRightMargin(0.18); + gStyle->SetPadBottomMargin(0.12); + gStyle->SetTitleSize(0.04, "XYZ"); + gStyle->SetTitleOffset(4.0, "Z"); + +} + +//---------------------------------------------------------------------- +// Create acceptance plots +//---------------------------------------------------------------------- +TH1* AcceptancePlot(TDirectory* inputDir, TString ReconHistName, TString AllHistName, TString Tag="Quasi-Real") { + + // Read in the plots from the input file + TH1* ReconPlot = (TH1*)inputDir->Get(ReconHistName); + TH1* AllPlot = (TH1*)inputDir->Get(AllHistName); + + // Check plots exist + if (!ReconPlot || !AllPlot) { + std::cout << "Error: plots "<< ReconHistName <<" and/or "<< AllHistName <<" not found in input file" << std::endl; + return nullptr; + } + + //Divide the reconstructed plot by the all plot + ReconPlot->Divide(AllPlot); + + return ReconPlot; + +} + +//---------------------------------------------------------------------- +// Create rate plots +//---------------------------------------------------------------------- +TH2* RatePlot(TDirectory* inputDir, int Module, int Layer, TString Tag="Quasi-Real", TString inTag="AllHits") { + + TString histName = inTag+"/module"+std::to_string(Module)+"/layer"+std::to_string(Layer)+"/hxPixelyPixelRate"; + + // Read in the plots from the input file + TH2* RatePlot = (TH2*)inputDir->Get(histName); + // Check plots exist + if (!RatePlot) { + std::cout << "Error: plot "<< histName <<" not found in input file" << std::endl; + return nullptr; + } + + // Format the plot + int rebin = 32; + RatePlot->Rebin2D(rebin,rebin); + RatePlot->Scale(1.0/(rebin*rebin)); + TString title = "Tagger module "+std::to_string(Module)+", layer "+std::to_string(Layer)+" - Mean "+Tag+" rate per "+std::to_string(rebin)+"x"+std::to_string(rebin)+" pixel group"; + RatePlot->SetTitle(title); + RatePlot->GetXaxis()->SetTitle("x pixel"); + RatePlot->GetYaxis()->SetTitle("y pixel"); + RatePlot->GetZaxis()->SetTitle("Average Rate [Hz/55 #mum pixel]"); + + + RatePlot->SetStats(0); + + return RatePlot; + +} + +//---------------------------------------------------------------------- +// Create formatted acceptance plots on a canvas +//---------------------------------------------------------------------- +void FormatAcceptancePlots(TDirectory* inputDir, TFile* outputFile, TString Tag="Quasi-Real") { + + //---------------------------------------------------------------------- + // E-Q2 acceptance plot + //---------------------------------------------------------------------- + TString ReconEQ2Name = "Reconstructed-Track/hprimElogQ2"; + TString AllEQ2Name = "Generated/hprimElogQ2"; + + TH1* AcceptEQ2 = AcceptancePlot(inputDir,ReconEQ2Name,AllEQ2Name); + + TCanvas* canvasEQ2 = new TCanvas("AcceptanceEQ2Canvas", "AcceptanceEQ2Canvas", 1600, 1200); + + // Draw the plots on the canvas + canvasEQ2->cd(); + + TString title = "Acceptance as a function of scattered electron energy and reaction log_{10}(Q^{2})"; + AcceptEQ2->SetTitle(title); + AcceptEQ2->GetXaxis()->SetTitle("E_{e'} [GeV]"); + AcceptEQ2->GetYaxis()->SetTitle("log_{10}(Q^{2}) [GeV^{2}]"); + AcceptEQ2->GetZaxis()->SetTitle("Acceptance"); + + AcceptEQ2->SetStats(0); + AcceptEQ2->Draw("colz"); + + // Save the canvas to output file + outputFile->WriteTObject(canvasEQ2); + + // Clean up + delete canvasEQ2; + + //---------------------------------------------------------------------- + // E-Theta acceptance plot + //---------------------------------------------------------------------- + TString ReconEThetaName = "Reconstructed-Track/hprimEtheta"; + TString AllEThetaName = "Generated/hprimEtheta"; + + TH1* AcceptETheta = AcceptancePlot(inputDir,ReconEThetaName,AllEThetaName); + + TCanvas* canvasETheta = new TCanvas("AcceptanceEThetaCanvas", "AcceptanceEThetaCanvas", 1600, 1200); + + // Draw the plots on the canvas + canvasETheta->cd(); + + title = "Acceptance as a function of scattered electron energy and theta"; + AcceptETheta->SetTitle(title); + AcceptETheta->GetXaxis()->SetTitle("E_{e'} [GeV]"); + AcceptETheta->GetYaxis()->SetTitle("#theta_{e'} [mrad]"); + AcceptETheta->GetZaxis()->SetTitle("Acceptance"); + + AcceptETheta->SetStats(0); + AcceptETheta->Draw("colz"); + + // Save the canvas to output file + outputFile->WriteTObject(canvasETheta); + + // Clean up + delete canvasETheta; + + //---------------------------------------------------------------------- + // Integrated acceptance plot + //---------------------------------------------------------------------- + TString ReconIntegratedName = "IntegratedAcceptance"; + + // Read in the plots from the input file + TH1* IntegratedAcceptancePlot = (TH1*)inputDir->Get(ReconIntegratedName); + // Check plots exist + if (!IntegratedAcceptancePlot) { + std::cout << "Error: plot "<< ReconIntegratedName <<" not found in input file" << std::endl; + return; + } + + TCanvas* canvasIntegratedAcceptance = new TCanvas("IntegratedAcceptance", "IntegratedAcceptance", 1600, 1200); + + //Get xAxis title + TString xAxisTitle = IntegratedAcceptancePlot->GetXaxis()->GetTitle(); + //Break up xAxis title into words by "_" + TObjArray* xAxisTitleWords = xAxisTitle.Tokenize("_"); + //SetBinLabel for each bin + for (int i=1; i<=xAxisTitleWords->GetEntries(); i++) { + IntegratedAcceptancePlot->GetXaxis()->SetBinLabel(i, xAxisTitleWords->At(i-1)->GetName()); + } + + // Format the plot + IntegratedAcceptancePlot->SetTitle("Integrated acceptance"); + IntegratedAcceptancePlot->GetXaxis()->SetTitle(""); + IntegratedAcceptancePlot->GetYaxis()->SetTitle("Acceptance"); + + IntegratedAcceptancePlot->SetStats(0); + IntegratedAcceptancePlot->Draw(); + + + // Get the number of bins in the histogram + int nBins = IntegratedAcceptancePlot->GetNbinsX(); + + // Create a TText object to draw the text + TText t; + t.SetTextAlign(22); // Center the text + t.SetTextSize(0.02); // Set the text size + + // Loop over the bins + for (int i = 1; i <= nBins; ++i) { + // Get the bin content + double binContent = IntegratedAcceptancePlot->GetBinContent(i); + + // Get the bin center + double binCenter = IntegratedAcceptancePlot->GetBinCenter(i); + + // Draw the bin content at the bin center + t.DrawText(binCenter, binContent+0.02, Form("%.1f %s", binContent*100,"%")); + } + + // Update the canvas to show the changes + gPad->Update(); + + // Save the canvas to output file + outputFile->WriteTObject(canvasIntegratedAcceptance); + + // Clean up + delete canvasIntegratedAcceptance; + +} + +//---------------------------------------------------------------------- +// Create formatted rate plots on a canvas +//---------------------------------------------------------------------- +void FormatRatePlots(TDirectory* inputDir, TFile* outputFile, TString Tag="Quasi-Real") { + + + TCanvas* canvas = new TCanvas("RateCanvas", "RateCanvas", 3200, 1200); + canvas->Divide(2,1); + + // Read in the plots from the input file + TH2* RatePlot1_0 = RatePlot(inputDir,1,0,Tag); + TH2* RatePlot2_0 = RatePlot(inputDir,2,0,Tag); + + // Draw the plots on the canvas + canvas->cd(1); + gPad->SetLogz(); + RatePlot1_0->Draw("colz"); + + canvas->cd(2); + gPad->SetLogz(); + RatePlot2_0->Draw("colz"); + + // Save the canvas to output file + outputFile->WriteTObject(canvas); + + // Clean up + delete canvas; + + TCanvas* canvas2 = new TCanvas("RateCanvasOverlay", "RateCanvasOverlay", 3200, 1200); + canvas2->Divide(2,1); + + // Draw the plots on the canvas + canvas2->cd(1); + gPad->SetLogz(); + RatePlot1_0->Draw("colz"); + + //Add a grid ontop of the histogram showing 448x512 pixel chip boundaries + double xMin = RatePlot1_0->GetXaxis()->GetXmin(); + double xMax = RatePlot1_0->GetXaxis()->GetXmax(); + double yMin = RatePlot1_0->GetYaxis()->GetXmin()+512/4; + double yMax = RatePlot1_0->GetYaxis()->GetXmax()+512/4; + std::vector verticalLineWidths = {3,1,3,1,3,1,3}; + std::vector horizontalLineWidths = {3,1,2,1,2,1,2,1,2,1,2,1,3}; + std::vector horizontalLineStyles = {1,7,1,7,1,7,1,7,1,7,1,7,1}; + std::vector verticalLineColors = {kRed,kBlue,kRed,kBlue,kRed,kBlue,kRed}; + std::vector horizontalLineColors = {kRed,kBlue,kBlue,kBlue,kBlue,kBlue,kBlue,kBlue,kBlue,kBlue,kBlue,kBlue,kRed}; + //Vertical lines + for (int i=0; i<=6; i++) { + TLine* line = new TLine(xMin+i*448, yMin, xMin+i*448, yMax); + line->SetLineColor(verticalLineColors[i]); + line->SetLineWidth(verticalLineWidths[i]); + line->Draw(); + } + //Horizontal lines + for (int i=0; i<=12; i++) { + TLine* line = new TLine(xMin, yMin+i*256, xMax, yMin+i*256); + line->SetLineColor(horizontalLineColors[i]); + line->SetLineWidth(horizontalLineWidths[i]); + line->SetLineStyle(horizontalLineStyles[i]); + line->Draw(); + } + + gPad->Update(); + + canvas2->cd(2); + gPad->SetLogz(); + RatePlot2_0->Draw("colz"); + + //Add a grid on top of the histogram showing 448x512 pixel chip boundaries + xMin = RatePlot2_0->GetXaxis()->GetXmin(); + xMax = RatePlot2_0->GetXaxis()->GetXmax(); + yMin = RatePlot2_0->GetYaxis()->GetXmin()+512/4; + yMax = RatePlot2_0->GetYaxis()->GetXmax()+512/4; + //Vertical lines + for (int i = 0; i <= 6; i++) { + TLine* line = new TLine(xMin+i*448, yMin, xMin+i*448, yMax); + line->SetLineColor(verticalLineColors[i]); + line->SetLineWidth(verticalLineWidths[i]); + line->Draw(); + } + //Horizontal lines + for (int i = 0; i <= 12; i++) { + TLine* line = new TLine(xMin, yMin+i*256, xMax, yMin+i*256); + line->SetLineColor(horizontalLineColors[i]); + line->SetLineWidth(horizontalLineWidths[i]); + line->SetLineStyle(horizontalLineStyles[i]); + line->Draw(); + } + + gPad->Update(); + + // Save the canvas to output file + outputFile->WriteTObject(canvas); + + // Clean up + delete canvas; + + // Canvas showing primary and secondary hits + // Todo: Neaten up + TCanvas* canvas3 = new TCanvas("PrimarySecondary-RateCanvas", "PrimarySecondary-RateCanvas", 2400, 2400); + canvas3->Divide(2,2); + + TH2* RatePlotPrimary1_0 = RatePlot(inputDir,1,0,Tag,"PrimaryHits"); + TH2* RatePlotPrimary2_0 = RatePlot(inputDir,2,0,Tag,"PrimaryHits"); + + TH2* RatePlotSecondary1_0 = RatePlot(inputDir,1,0,Tag,"SecondaryHits"); + TH2* RatePlotSecondary2_0 = RatePlot(inputDir,2,0,Tag,"SecondaryHits"); + + // Draw the plots on the canvas + canvas3->cd(1); + gPad->SetLogz(); + RatePlotPrimary1_0->Draw("colz"); + + canvas3->cd(2); + gPad->SetLogz(); + RatePlotPrimary2_0->Draw("colz"); + + canvas3->cd(3); + gPad->SetLogz(); + RatePlotSecondary1_0->Draw("colz"); + + canvas3->cd(4); + gPad->SetLogz(); + RatePlotSecondary2_0->Draw("colz"); + + // Save the canvas to output file + outputFile->WriteTObject(canvas3); + + // Clean up + delete canvas3; + + +} + +//---------------------------------------------------------------------- +// Create formatted reconstruction plots on a canvas +//---------------------------------------------------------------------- +void FormatReconstructionPlots(TDirectory* inputDir, TFile* outputFile, TString Tag="Quasi-Real") { + + TString EHistName = "reconEVsPrimE"; + TString ThetaHistName = "reconThetaVsPrimTheta"; + TString PhiHistName = "thetacut/reconPhiVsPrimPhi"; + + TString EResName = "ERes"; + TString ThetaResName = "thetaRes"; + TString PhiResName = "thetacut/phiRes"; + + TCanvas* canvas = new TCanvas("ReconCanvas", "ReconCanvas", 2400, 1200); + + // Read in the plots from the input file + TH2* EPlot = (TH2*)inputDir->Get(EHistName); + TH2* ThetaPlot = (TH2*)inputDir->Get(ThetaHistName); + TH2* PhiPlot = (TH2*)inputDir->Get(PhiHistName); + + TH1* EResPlot = (TH1*)inputDir->Get(EResName); + TH1* ThetaResPlot = (TH1*)inputDir->Get(ThetaResName); + TH1* PhiResPlot = (TH1*)inputDir->Get(PhiResName); + + // Check plots exist + if (!ThetaPlot || !EPlot || !PhiPlot || !ThetaResPlot || !EResPlot || !PhiResPlot) { + std::cout << "Error: plots "<< ThetaHistName <<", "<< EHistName <<", "<< PhiHistName <<", "<< ThetaResName <<", "<< EResName <<", "<< PhiResName <<" not found in input file" << std::endl; + return; + } + + // Draw the plots on the canvas + canvas->Divide(3,2); + canvas->cd(1); + gPad->SetLogz(); + + EPlot->SetTitle("Reconstructed electron energy vs. Primary electron energy"); + EPlot->GetXaxis()->SetTitle("E_{prim} [GeV]"); + EPlot->GetYaxis()->SetTitle("E_{recon} [GeV]"); + EPlot->GetZaxis()->SetTitle("Counts"); + EPlot->Rebin2D(2,2); + + EPlot->SetStats(0); + EPlot->Draw("colz"); + + canvas->cd(2); + gPad->SetLogz(); + + ThetaPlot->SetTitle("Reconstructed #theta vs. Primary #theta"); + ThetaPlot->GetXaxis()->SetTitle("#theta_{prim} [mrad]"); + ThetaPlot->GetYaxis()->SetTitle("#theta_{recon} [mrad]"); + ThetaPlot->GetZaxis()->SetTitle("Counts"); + ThetaPlot->Rebin2D(2,2); + + ThetaPlot->SetStats(0); + ThetaPlot->Draw("colz"); + + canvas->cd(3); + gPad->SetLogz(); + + PhiPlot->SetTitle("Reconstructed #varphi vs. Primary #varphi (#theta>1mrad)"); + PhiPlot->GetXaxis()->SetTitle("#phi_{prim} [deg]"); + PhiPlot->GetYaxis()->SetTitle("#phi_{recon} [deg]"); + PhiPlot->GetZaxis()->SetTitle("Counts"); + PhiPlot->Rebin2D(2,2); + + PhiPlot->SetStats(0); + PhiPlot->Draw("colz"); + + canvas->cd(4); + + EResPlot->SetTitle("Electron energy resolution"); + EResPlot->GetXaxis()->SetTitle("(E_{recon} - E_{prim}) / E_{prim}"); + EResPlot->GetYaxis()->SetTitle("Counts"); + + EResPlot->SetStats(0); + EResPlot->Draw(); + + // Write fitted Gaussian standard deviation in pad + //Fit Gaussian to histogram maximum bin +- 10 bins + int maxBin = EResPlot->GetMaximumBin(); + int fitMin = maxBin-10; + int fitMax = maxBin+10; + EResPlot->Fit("gaus", "Q", "", EResPlot->GetBinCenter(fitMin), EResPlot->GetBinCenter(fitMax)); + double EResStdDev = EResPlot->GetFunction("gaus")->GetParameter(2); + TLatex* latex = new TLatex(); + latex->SetTextSize(0.03); + latex->DrawLatexNDC(0.2, 0.8, Form("#sigma_{E} = %.2f %s", EResStdDev*100, "%")); + + // Remove fitted Gaussian from histogram + EResPlot->GetListOfFunctions()->Remove(EResPlot->GetFunction("gaus")); + + canvas->cd(5); + + ThetaResPlot->SetTitle("Theta resolution"); + ThetaResPlot->GetXaxis()->SetTitle("#theta_{recon} - #theta_{prim} [mrad]"); + ThetaResPlot->GetYaxis()->SetTitle("Counts"); + + ThetaResPlot->SetStats(0); + ThetaResPlot->Draw(); + + // Write fitted Gaussian standard deviation in pad + //Fit Gaussian to histogram maximum bin +- 10 bins + maxBin = ThetaResPlot->GetMaximumBin(); + fitMin = maxBin-10; + fitMax = maxBin+10; + ThetaResPlot->Fit("gaus", "Q", "", ThetaResPlot->GetBinCenter(fitMin), ThetaResPlot->GetBinCenter(fitMax)); + double ThetaResStdDev = ThetaResPlot->GetFunction("gaus")->GetParameter(2); + latex->DrawLatexNDC(0.2, 0.8, Form("#sigma_{#theta} = %.2f mrad", ThetaResStdDev)); + + // Remove fitted Gaussian from histogram + ThetaResPlot->GetListOfFunctions()->Remove(ThetaResPlot->GetFunction("gaus")); + + canvas->cd(6); + + PhiResPlot->SetTitle("Phi resolution (#theta>1mrad)"); + PhiResPlot->GetXaxis()->SetTitle("#phi_{recon} - #phi_{prim} [deg]"); + PhiResPlot->GetYaxis()->SetTitle("Counts"); + + PhiResPlot->SetStats(0); + PhiResPlot->Draw(); + + // Write fitted Gaussian standard deviation in pad + //Fit Gaussian to histogram maximum bin +- 10 bins + maxBin = PhiResPlot->GetMaximumBin(); + fitMin = maxBin-10; + fitMax = maxBin+10; + PhiResPlot->Fit("gaus", "Q", "", PhiResPlot->GetBinCenter(fitMin), PhiResPlot->GetBinCenter(fitMax)); + double PhiResStdDev = PhiResPlot->GetFunction("gaus")->GetParameter(2); + latex->DrawLatexNDC(0.2, 0.8, Form("#sigma_{#phi} = %.1f deg", PhiResStdDev)); + + // Remove fitted Gaussian from histogram + PhiResPlot->GetListOfFunctions()->Remove(PhiResPlot->GetFunction("gaus")); + + // Save the canvas to output file + outputFile->WriteTObject(canvas); + + // Clean up + delete canvas; + +} + +//---------------------------------------------------------------------- +// This function is called by the benchmarking script +//---------------------------------------------------------------------- +void PostprocessLOWQ2(TString inName, TString outName, TString Tag) { + + SetStyle(); + + // Open the input file + TFile* inputFile = TFile::Open(inName); + if (!inputFile) { + std::cout << "Error opening input file:" << inName << std::endl; + return; + } + + // Check the directory LOWQ2 exists and cd into it + TDirectory* dir = inputFile->GetDirectory("LOWQ2"); + if (!dir) { + std::cout << "Error: directory LOWQ2 not found in input file" << std::endl; + return; + } + + // Open the output file + TFile* outputFile = TFile::Open(outName, "RECREATE"); + + // Format the plots + + //Check if AcceptanceDistributions directory exists + TDirectory* dirA = dir->GetDirectory("Acceptance"); + if (dirA) { + FormatAcceptancePlots(dirA, outputFile,Tag); + } + + //Check if SimDistributions directory exists + TDirectory* dirS = dir->GetDirectory("Rates"); + if (dirS) { + FormatRatePlots(dirS, outputFile,Tag); + } + + //Check if ReconstructedDistributions directory exists + TDirectory* dirR = dir->GetDirectory("Reconstruction"); + if (dirR) { + FormatReconstructionPlots(dirR, outputFile,Tag); + } + + inputFile->Close(); + outputFile->Close(); + +} + +//---------------------------------------------------------------------- +// Main function to create canvases +//---------------------------------------------------------------------- +// void PostprocessLOWQ2() { +// // Postprocess("plots/LOWQ2QRRecon2.root", "plots/LOWQ2QR_FormattedPlots.root", "Quasi-Real"); +// Postprocess("plots/LOWQ2BremsRecon3.root", "plots/LOWQ2Brems_FormattedPlots3.root", "Bremsstrahlung"); +// } \ No newline at end of file diff --git a/benchmarks/LOWQ2/analysis/RunLOWQ2.C b/benchmarks/LOWQ2/analysis/RunLOWQ2.C new file mode 100644 index 00000000..e73e9849 --- /dev/null +++ b/benchmarks/LOWQ2/analysis/RunLOWQ2.C @@ -0,0 +1,40 @@ +#include "LOWQ2Benchmarks.h" +#include + +void RunLOWQ2( std::string inputFileName = "Brems_input.root", + std::string outputFileName = "plots/LOWQ2QRRecon3.root", + std::string compactName = "/opt/detector/epic-nightly/share/epic/epic.xml", + bool inputIsTimeBased = false, // true if the event sample is time-based, false if it is event-based + double timeWindow = 10.15*1e-9, //[s] + double eventCrossSection = 0.0551, // [mb] + double luminosity = 1e34 // [cm^-2 s^-1] + ) { + + //Set implicit multi-threading + ROOT::EnableImplicitMT(); + + // Output script running conditions + std::cout << "Running LOWQ2 benchmarks with the following parameters:" << std::endl; + std::cout << " - input file: " << inputFileName << std::endl; + std::cout << " - output file: " << outputFileName << std::endl; + std::cout << " - xml file: " << compactName << std::endl; + std::cout << " - input is time-based: " << inputIsTimeBased << std::endl; + std::cout << " - time window: " << timeWindow << " s" << std::endl; + std::cout << " - event cross section: " << eventCrossSection << " mb" << std::endl; + std::cout << " - luminosity: " << luminosity << " cm^-2 s^-1" << std::endl; + + dd4hep::Detector& detector = dd4hep::Detector::getInstance(); + detector.fromCompact(compactName); + + // eventCrossSectionBrems = 171.3; [mb] + // eventCrossSectionQR = 0.0551; [mb] + + double eventRate = luminosity * eventCrossSection * 1e-27; // [Hz] + + if(inputIsTimeBased){ + eventRate = 1.0 / timeWindow; // [Hz] + } + + LOWQ2Benchmarks(inputFileName,outputFileName,detector,eventRate); + +} \ No newline at end of file diff --git a/benchmarks/LOWQ2/analysis/Snakefile b/benchmarks/LOWQ2/analysis/Snakefile new file mode 100644 index 00000000..be66a777 --- /dev/null +++ b/benchmarks/LOWQ2/analysis/Snakefile @@ -0,0 +1,93 @@ +# Possible tags for local/remote running +tag_params = { + "QR": {"timeBased": 0, "timeWindow": 0, "eventCrossSection": 0.0551, "tagName": "Quasi-Real"}, + "Brem": {"timeBased": 1, "timeWindow": 10.15*1e-9, "eventCrossSection": 0, "tagName": "Bremsstrahlung"}, + "pythia6": {"timeBased": 0, "timeWindow": 0, "eventCrossSection": 0.054, "tagName": "Deep Inelastic Scattering"}, + # Add more mappings here if needed +} + +configfile: "../local_config.yml" + +EVENT_EXTENSION = ".ab.hepmc3.tree.root" +SIM_EXTENSION = ".edm4hep.root" +RECO_EXTENSION = ".eicrecon.tree.edm4eic.root" + + +REMOTE_EVENTS_SERVER = "root://dtn-eic.jlab.org/" +REMOTE_EVENTS_DIRECTORY = "/work/eic2/EPIC/EVGEN/SIDIS/pythia6-eic/1.0.0/10x100/q2_0to1/" + +FILE_BASE = "pythia_ep_noradcor_10x100_q2_0.000000001_1.0_run" + +BEAM_ENERGY = "10" +XML_FILE = "/opt/detector/epic-24.05.0/share/epic/epic.xml", + +# Function to check if the remote file exists +def remote_file_exists(server,url): + try: + subprocess.check_output(['xrdfs', server, 'stat', url]) + return url + except subprocess.CalledProcessError: + return None + +rule run_simulation_remote: + params: + XML=XML_FILE, + input=lambda wildcards: remote_file_exists(REMOTE_EVENTS_SERVER,REMOTE_EVENTS_DIRECTORY+FILE_BASE+wildcards.index+EVENT_EXTENSION), + output: + config["SIM_DIRECTORY"]+FILE_BASE+"{index}_tagger"+SIM_EXTENSION, + shell: """ + npsim \ + --inputFiles {params.input} \ + --outputFile {output[0]} \ + --compactFile {params.XML} \ + --runType run \ + --numberOfEvents 100 \ + --physics.list FTFP_BERT \ + --field.eps_min 5e-06 \ + --field.eps_max 1e-04 \ + --physics.rangecut 50 \ + """ + +rule run_reconstruction: + params: + XML=XML_FILE, + beam_energy=BEAM_ENERGY, + collections="TaggerTrackerProjectedTracks,MCScatteredElectrons,MCParticles,EventHeader", + input: + expand( + config["SIM_DIRECTORY"]+FILE_BASE+"{index}_tagger"+SIM_EXTENSION, + index=range(1,4), + ), + output: + config["RECO_IN_DIRECTORY"]+FILE_BASE+".tagger_recon"+RECO_EXTENSION, + shell: """ + eicrecon {input} -Pdd4hep:xml_files={params.XML} -Ppodio:output_include_collections={params.collections} -Ppodio:output_file={output} -PLOWQ2:LowQ2Trajectories:electron_beamE={params.beam_energy} + """ + +rule run_benchmarks: + input: + config["RECO_IN_DIRECTORY"]+FILE_BASE+".tagger_recon"+RECO_EXTENSION + params: + xmlName=XMLFILE, + timeBased = lambda wildcards: tag_params[wildcards.tag]["timeBased"], + timeWindow = lambda wildcards: tag_params[wildcards.tag]["timeWindow"], + eventCrossSection = lambda wildcards: tag_params[wildcards.tag]["eventCrossSection"] + + output: + "{dir}/LOWQ2{tag}_Plots.root" + shell: + """ + root -l -b -q 'RunLOWQ2.C++("{input}", "{output}", "{params.xmlName}", {params.timeBased}, {params.timeWindow}, {params.eventCrossSection})' + """ + +rule run_plots: + input: + "{dir}/LOWQ2{tag}_Plots.root" + params: + tagName = lambda wildcards: tag_params[wildcards.tag]["tagName"], + output: + "{dir}/LOWQ2{tag}_FormattedPlots.root" + shell: + """ + root -l -b -q 'PostprocessLOWQ2.C("{input}", "{output}","{params.tagName}")' + """ diff --git a/benchmarks/LOWQ2/analysis/config.yml b/benchmarks/LOWQ2/analysis/config.yml new file mode 100644 index 00000000..4d45650a --- /dev/null +++ b/benchmarks/LOWQ2/analysis/config.yml @@ -0,0 +1,5 @@ +bench:LOWQ2_events: + extends: .det_benchmark + stage: benchmarks + script: + - snakemake --cores 8 ${RESULTS_DIRECTORY}/LOWQ2_FormattedPlots.edm4hep.root --configfile ../remote_config.yml diff --git a/benchmarks/LOWQ2/analysis/functors.h b/benchmarks/LOWQ2/analysis/functors.h new file mode 100644 index 00000000..e978ff89 --- /dev/null +++ b/benchmarks/LOWQ2/analysis/functors.h @@ -0,0 +1,36 @@ +#include "DD4hep/Detector.h" +#include "DDRec/CellIDPositionConverter.h" +#include "edm4hep/MCParticleCollection.h" +#include "edm4hep/SimTrackerHit.h" +#include "edm4hep/SimCalorimeterHit.h" + +using RVecHits = ROOT::VecOps::RVec; + +//----------------------------------------------------------------------------------------- +// Grab Component functor +//----------------------------------------------------------------------------------------- +struct getSubID{ + getSubID(std::string cname, dd4hep::Detector& det, std::string rname = "TaggerTrackerHits") : componentName(cname), detector(det), readoutName(rname){} + + ROOT::VecOps::RVec operator()(const RVecHits& evt) { + auto decoder = detector.readout(readoutName).idSpec().decoder(); + auto indexID = decoder->index(componentName); + ROOT::VecOps::RVec result; + for(const auto& h: evt) { + result.push_back(decoder->get(h.cellID,indexID)); + } + return result; + }; + + void SetComponent(std::string cname){ + componentName = cname; + } + void SetReadout(std::string rname){ + readoutName = rname; + } + + private: + std::string componentName; + dd4hep::Detector& detector; + std::string readoutName; +}; diff --git a/benchmarks/LOWQ2/config.yml b/benchmarks/LOWQ2/config.yml new file mode 100644 index 00000000..da088db8 --- /dev/null +++ b/benchmarks/LOWQ2/config.yml @@ -0,0 +1,2 @@ +include: + - local: 'reconstruction_training/config.yml' diff --git a/benchmarks/LOWQ2/local_config.yml b/benchmarks/LOWQ2/local_config.yml new file mode 100644 index 00000000..437975b8 --- /dev/null +++ b/benchmarks/LOWQ2/local_config.yml @@ -0,0 +1,3 @@ +SIM_DIRECTORY: "/scratch/EIC/SimOut/S3in/" +RECO_IN_DIRECTORY: "/scratch/EIC/ReconOut/S3in/" +MODEL_DIRECTORY: "/scratch/EIC/LowQ2Model/" \ No newline at end of file diff --git a/benchmarks/LOWQ2/reconstruction_training/.gitignore b/benchmarks/LOWQ2/reconstruction_training/.gitignore new file mode 100644 index 00000000..c96503c7 --- /dev/null +++ b/benchmarks/LOWQ2/reconstruction_training/.gitignore @@ -0,0 +1,5 @@ +plots* +calibrations* +fieldmaps* +gdml* +.snakemake* \ No newline at end of file diff --git a/benchmarks/LOWQ2/reconstruction_training/ProcessData.py b/benchmarks/LOWQ2/reconstruction_training/ProcessData.py new file mode 100644 index 00000000..166f49c4 --- /dev/null +++ b/benchmarks/LOWQ2/reconstruction_training/ProcessData.py @@ -0,0 +1,40 @@ +import uproot +import awkward as ak +import numpy as np +def create_arrays(dataFiles,beamEnergy): + + # List of branches to load + branches = ["TaggerTrackerProjectedTracks","MCParticles","MCScatteredElectrons_objIdx"] + + # Load data from concatenated list of files + # file = uproot.open(dataFiles) + # tree = file['events'] + # data = tree.arrays(branches, library="ak") + data = uproot.concatenate([f"{file}:events" for file in dataFiles], branches, library="ak") + + # Filter events with at least one track + tracks = data["TaggerTrackerProjectedTracks"] + num_tracks = ak.num(tracks["TaggerTrackerProjectedTracks.pdg"]) + filtered_data = data[num_tracks == 1] + + # Filter tracks for intput data + filtered_tracks = filtered_data["TaggerTrackerProjectedTracks"] + ia = filtered_tracks["TaggerTrackerProjectedTracks.loc.a"] + ib = filtered_tracks["TaggerTrackerProjectedTracks.loc.b"] + ipx = np.sin(filtered_tracks["TaggerTrackerProjectedTracks.theta"]) * np.cos(filtered_tracks["TaggerTrackerProjectedTracks.phi"]) + ipy = np.sin(filtered_tracks["TaggerTrackerProjectedTracks.theta"]) * np.sin(filtered_tracks["TaggerTrackerProjectedTracks.phi"]) + + # Filter particle array to select scattered electron for target data + electron_idx = filtered_data["MCScatteredElectrons_objIdx"]["MCScatteredElectrons_objIdx.index"][:,0] + filtered_particles = filtered_data["MCParticles"][np.arange(len(electron_idx)), electron_idx] + + # Normalize the target variables + tpx = filtered_particles["MCParticles.momentum.x"]/beamEnergy + tpy = filtered_particles["MCParticles.momentum.y"]/beamEnergy + tpz = filtered_particles["MCParticles.momentum.z"]/beamEnergy + + input_data = ak.concatenate([ia,ib,ipx,ipy], axis=1) + target_data = ak.concatenate([tpx[:,None], tpy[:,None], tpz[:,None]], axis=1) + + return input_data, target_data + diff --git a/benchmarks/LOWQ2/reconstruction_training/RegressionModel.py b/benchmarks/LOWQ2/reconstruction_training/RegressionModel.py new file mode 100644 index 00000000..62a2a48b --- /dev/null +++ b/benchmarks/LOWQ2/reconstruction_training/RegressionModel.py @@ -0,0 +1,65 @@ +import torch +import torch.nn as nn +import torch.optim as optim + +class RegressionModel(nn.Module): + def __init__(self): + super(RegressionModel, self).__init__() + self.fc1 = nn.Linear(4, 1024) + self.fc2 = nn.Linear(1024, 128) + self.fc3 = nn.Linear(128, 64) + self.fc4 = nn.Linear(64, 32) + self.fc5 = nn.Linear(32, 3) + self.mean = torch.tensor([0.0, 0.0, 0.0, 0.0]) + self.std = torch.tensor([1.0, 1.0, 1.0, 1.0]) + + def forward(self, x): + x = (x-self.mean)/self.std + x = torch.relu(self.fc1(x)) + x = torch.relu(self.fc2(x)) + x = torch.relu(self.fc3(x)) + x = torch.relu(self.fc4(x)) + x = self.fc5(x) + return x + + def adapt(self, input_data): + self.mean = input_data.mean(axis=0) + self.std = input_data.std(axis=0) + + +def makeModel(): + # Create the model + model = RegressionModel() + # Define the optimizer + optimizer = optim.Adam(model.parameters(), lr=0.001) + + # Define the loss function + criterion = nn.MSELoss() + + return model, optimizer, criterion + +def trainModel(epochs, input_data, target_data, val_input, val_target): + model, optimizer, criterion = makeModel() + model.adapt(input_data) + for epoch in range(epochs): + model.train() + # Zero the parameter gradients + optimizer.zero_grad() + + # Forward pass + output = model(input_data) + loss = criterion(output, target_data) + + # Backward pass + loss.backward() + optimizer.step() + + # Validation step + model.eval() + with torch.no_grad(): + val_outputs = model(val_input) + val_loss = criterion(val_outputs, val_target) + + print(f"Epoch [{epoch+1}/{epochs}], Loss: {loss.item()}, Val Loss: {val_loss.item()}") + + return model \ No newline at end of file diff --git a/benchmarks/LOWQ2/reconstruction_training/Snakefile b/benchmarks/LOWQ2/reconstruction_training/Snakefile new file mode 100644 index 00000000..80b921a0 --- /dev/null +++ b/benchmarks/LOWQ2/reconstruction_training/Snakefile @@ -0,0 +1,155 @@ +# Snakemake file for training a new neural network for LOW-Q2 tagger electron momentum reconstruction +from itertools import product + +import os +import shutil +from snakemake.remote.S3 import RemoteProvider as S3RemoteProvider + +configfile: "local_config.yml" + +S3 = S3RemoteProvider( + endpoint_url="https://eics3.sdcc.bnl.gov:9000", + access_key_id=os.environ["S3_ACCESS_KEY"], + secret_access_key=os.environ["S3_SECRET_KEY"], +) + +EVENT_EXTENSION = ".ab.hepmc3.tree.root" +SIM_EXTENSION = ".edm4hep.root" +RECO_EXTENSION = ".eicrecon.tree.edm4eic.root" + +REMOTE_EVENTS_SERVER = "root://dtn-eic.jlab.org/" +REMOTE_EVENTS_DIRECTORY = "/work/eic2/EPIC/EVGEN/SIDIS/pythia6-eic/1.0.0/18x275/q2_0to1/" +REMOTE_RECO_DIRECTORY = "/work/eic2/EPIC/RECO/24.07.0/epic_craterlake/SIDIS/pythia6-eic/1.0.0/18x275/q2_0to1/" + +S3_RECON_DIRECTORY = "eictest/EPIC/RECO/24.05.0/epic_craterlake/SIDIS/pythia6-eic/1.0.0/10x100/q2_0to1/" +FILE_BASE = "pythia_ep_noradcor_18x275_q2_0.000000001_1.0_run" + +XML_FILE = "epic_edit.xml" +BEAM_ENERGY = "18" + +def remote_file_exists(server,url): + try: + subprocess.check_output(['xrdfs', server, 'stat', url]) + return url + except subprocess.CalledProcessError: + return None + +def check_files_exist(file_indices): + existing_files = [] + for fileindex in file_indices: + file_path = REMOTE_EVENTS_DIRECTORY + FILE_BASE + fileindex + EVENT_EXTENSION + if remote_file_exists(REMOTE_EVENTS_SERVER, file_path): + existing_files.append(file_path) + return existing_files + +################################################################### +# Run training on remote XRootD server data +################################################################### +rule run_training: + params: + input = expand( + REMOTE_EVENTS_SERVER + REMOTE_RECO_DIRECTORY + FILE_BASE + "{i}.ab.{j}" + RECO_EXTENSION, + i=range(1, 11), + j=range(1050, 1152) + ), + output: + "regression_model{tag}.onnx", + run: + shell("python TaggerRegression.py --dataFiles {params.input} --beamEnergy 18 --outModelFile {output}") + + +################################################################### +# Test training on remote XRootD server data +################################################################### +rule run_testing: + params: + input = expand( + REMOTE_EVENTS_SERVER + REMOTE_RECO_DIRECTORY + FILE_BASE + "{i}.ab.{j}" + RECO_EXTENSION, + i=range(11, 31), + j=range(1050, 1054) + ), + input: + "regression_model{tag}.onnx", + output: + direct = "output_vs_target{tag}.png", + transformed = "output_vs_target_transformed{tag}.png", + run: + shell("python TestModel.py --modelFile {input} --dataFiles {params.input} --beamEnergy 18 --outGraphFile {output.direct} --outGraphFile2 {output.transformed}") + + +################################################################### +# Find and download the input files directly from the S3 bucket +################################################################### +rule download_recon_input: + input: + S3.remote(S3_RECON_DIRECTORY+FILE_BASE+"{run_index}.ab.{file_index}"+RECO_EXTENSION), + output: + config["RECO_IN_DIRECTORY"]+FILE_BASE+"{run_index}.ab.{file_index}"+RECO_EXTENSION, + run: + shutil.move(input[0], output[0]) + +################################################################### +# Generate the input files for the training from the event files +################################################################### + +rule run_simulation_tagger: + params: + XML=XML_FILE, + input=lambda wildcards: remote_file_exists(REMOTE_EVENTS_SERVER,REMOTE_EVENTS_DIRECTORY+FILE_BASE+wildcards.fileindex+EVENT_EXTENSION), + output: + config["SIM_DIRECTORY"]+FILE_BASE+"{fileindex}.ab.{subindex:04d}"+SIM_EXTENSION, + shell: """ + npsim \ + --inputFiles {params.input} \ + --outputFile {output[0]} \ + --compactFile {params.XML} \ + --runType run \ + --numberOfEvents 1000 \ + --skipNEvents 1000*{subindex} \ + --physics.list FTFP_BERT \ + --field.eps_min 5e-06 \ + --field.eps_max 1e-04 \ + --physics.rangecut 50 \ + """ + +rule generate_recon_input: + params: + XML=XML_FILE, + beam_energy=BEAM_ENERGY, + collections="TaggerTrackerProjectedTracks,MCScatteredElectrons,MCParticles,EventHeader", + input: + config["SIM_DIRECTORY"]+FILE_BASE+"{fileindex}.ab.{subindex:04d}"+SIM_EXTENSION, + output: + config["RECO_IN_DIRECTORY"]+FILE_BASE+"{fileindex}.ab.{subindex:04d}"+RECO_EXTENSION, + shell: """ + eicrecon {input} -Pdd4hep:xml_files={params.XML} -Ppodio:output_include_collections={params.collections} -Ppodio:output_file={output} -PLOWQ2:LowQ2Trajectories:electron_beamE={params.beam_energy} + """ + +################################################################### +# Try to download the input files from the S3 bucket before generating them +################################################################### +ruleorder: download_recon_input > generate_recon_input + +################################################################### +# Train the network to reconstruct the electron momentum +################################################################### +rule low_q2_train_network: + params: + beam_energy=BEAM_ENERGY, + type_name="LowQ2MomentumRegression", + method_name="DNN", + model_dir="LowQ2Model", + input_files=config["RECO_IN_DIRECTORY"]+FILE_BASE+"*.ab.000[1234]"+RECO_EXTENSION, + input: + train_data=expand( + config["RECO_IN_DIRECTORY"]+FILE_BASE+"{fileindex}.ab.{subindex:04d}"+RECO_EXTENSION, + fileindex=range(1,20), + subindex=range(1,4), + ), + output: + root_output=config["MODEL_DIRECTORY"]+"trainedData.root", + shell: + """ + root -l -b -q 'TaggerRegressionEICrecon.C++("{params.input_files}", "{output.root_output}", "{params.model_dir}", "{params.beam_energy}", "{params.type_name}", "{params.method_name}")' + """ + diff --git a/benchmarks/LOWQ2/reconstruction_training/TaggerRegression.py b/benchmarks/LOWQ2/reconstruction_training/TaggerRegression.py new file mode 100644 index 00000000..659eddde --- /dev/null +++ b/benchmarks/LOWQ2/reconstruction_training/TaggerRegression.py @@ -0,0 +1,43 @@ +# import edm4hep +import torch +import argparse +from ProcessData import create_arrays + +from RegressionModel import makeModel, trainModel + +# Parse arguments +parser = argparse.ArgumentParser(description='Train a regression model for the Tagger.') +parser.add_argument('--dataFiles', type=str, nargs='+', help='Path to the data files') +parser.add_argument('--beamEnergy', type=float, help='Electron beam energy') +parser.add_argument('--outModelFile', type=str, default="regression_model.onnx", help='Output file for the trained model') +args = parser.parse_args() +dataFiles = args.dataFiles +beamEnergy = args.beamEnergy +outModelFile = args.outModelFile + +input_data, target_data = create_arrays(dataFiles, beamEnergy) +print(input_data) +print(target_data) + +torch_input_data = torch.tensor(input_data) +torch_target_data = torch.tensor(target_data) + +# Split data into training and validation sets +validation_fraction = 0.1 +split_index = int(len(torch_input_data) * (1 - validation_fraction)) +val_input_data = torch_input_data[split_index:] +val_target_data = torch_target_data[split_index:] +torch_input_data = torch_input_data[:split_index] +torch_target_data = torch_target_data[:split_index] + +epochs = 5000 +model = trainModel(epochs, torch_input_data, torch_target_data, val_input_data, val_target_data) + +# Save the trained model to ONNX format +dummy_input = torch_input_data[0].unsqueeze(0) # Create a dummy input for the model + +torch.onnx.export(model, dummy_input, outModelFile, + input_names=['input'], output_names=['output'], + dynamic_axes={'input': {0: 'batch_size'}, 'output': {0: 'batch_size'}}) + +print(f"Model has been saved to {outModelFile}") \ No newline at end of file diff --git a/benchmarks/LOWQ2/reconstruction_training/TaggerRegressionEICrecon.C b/benchmarks/LOWQ2/reconstruction_training/TaggerRegressionEICrecon.C new file mode 100644 index 00000000..0e333dc6 --- /dev/null +++ b/benchmarks/LOWQ2/reconstruction_training/TaggerRegressionEICrecon.C @@ -0,0 +1,153 @@ +#include +#include +#include +#include + +#include "TChain.h" +#include "TFile.h" +#include "TTree.h" +#include "TString.h" +#include "TObjString.h" +#include "TSystem.h" +#include "TROOT.h" + +#include "TMVA/MethodDNN.h" +#include "TMVA/Reader.h" +#include "TMVA/Tools.h" +#include "TMVA/Factory.h" +#include "TMVA/DataLoader.h" +#include "TMVA/TMVARegGui.h" + +using namespace TMVA; + +// The training currently requires files which only contained a single DIS scattered electron to have been simulated e.g. generated using GETaLM +// The scattered electron must be the 3rd particle in the file after the two beam particles +// At least one track reconstructed by EIC algorithms in the LOWQ2 tagger is needed. + +void TaggerRegressionEICrecon( + TString inDataNames = "/scratch/EIC/ReconOut/qr_18x275_ab/qr_18x275_ab*_recon.edm4hep.root", + TString outDataName = "/scratch/EIC/LowQ2Model/trainedData.root", + TString dataFolderName = "LowQ2Model", + TString mcBeamEnergy = "18", + TString typeName = "LowQ2MomentumRegression", + TString methodName = "DNN", + TString inWeightName = "dataset/weights/LowQ2Reconstruction_DNN.weights.xml" + ) +{ + + Bool_t loadWeights = 0; + + //--------------------------------------------------------------- + // This loads the library + TMVA::Tools::Instance(); + + ROOT::EnableImplicitMT(8); + + // -------------------------------------------------------------------------------------------------- + // Here the preparation phase begins + // Create a new root output file + TFile* outputFile = TFile::Open( outDataName, "RECREATE" ); + + // Create the factory object. Later you can choose the methods + + TMVA::Factory *factory = new TMVA::Factory( typeName, outputFile, + "!V:!Silent:Color:DrawProgressBar:AnalysisType=Regression" ); + + ; + TMVA::DataLoader *dataloader=new TMVA::DataLoader(dataFolderName); + + // Input TrackParameters variables from EICrecon - + TString collectionName = "TaggerTrackerProjectedTracks[0]"; + dataloader->AddVariable( collectionName+".loc.a", "fit_position_y", "units", 'F' ); + dataloader->AddVariable( collectionName+".loc.b", "fit_position_z", "units", 'F' ); + dataloader->AddVariable( "sin("+collectionName+".phi)*sin("+collectionName+".theta)", "fit_vector_x", "units", 'F' ); + dataloader->AddVariable( "cos("+collectionName+".phi)*sin("+collectionName+".theta)", "fit_vector_y", "units", 'F' ); + + // Regression target particle 3-momentum, normalised to beam energy. + // Takes second particle, in the test data this is the scattered electron + // TODO add energy and array element information to be read directly from datafile - EMD4eic and EICrecon changes. + TString mcParticleName = "MCParticles[MCScatteredElectrons_objIdx[0].index]"; + //TString mcParticleName = "MCParticles[0]"; + dataloader->AddTarget( mcParticleName+".momentum.x/"+mcBeamEnergy ); + dataloader->AddTarget( mcParticleName+".momentum.y/"+mcBeamEnergy ); + dataloader->AddTarget( mcParticleName+".momentum.z/"+mcBeamEnergy ); + + std::cout << "--- TMVARegression : Using input files: " << inDataNames << std::endl; + + // Register the regression tree + TChain* regChain = new TChain("events"); + regChain->Add(inDataNames); + //regChain->SetEntries(8000); // Set smaller sample for tests + + // global event weights per tree (see below for setting event-wise weights) + Double_t regWeight = 1.0; + + // You can add an arbitrary number of regression trees + dataloader->AddRegressionTree( regChain, regWeight ); + + // This would set individual event weights (the variables defined in the + // expression need to exist in the original TTree) + // dataloader->SetWeightExpression( "1/(eE)", "Regression" ); // If MC event weights are kept use these + // Apply additional cuts on the data + TCut mycut = "@TaggerTrackerProjectedTracks.size()==1"; // Make sure there's one reconstructed track in event + + dataloader->PrepareTrainingAndTestTree(mycut,"nTrain_Regression=0:nTest_Regression=0:SplitMode=Random:SplitSeed=1:NormMode=NumEvents:!V"); + + // TODO - Optimise layout and training more + TString layoutString("Layout=TANH|1024,TANH|128,TANH|64,TANH|32,LINEAR"); + + TString trainingStrategyString("TrainingStrategy="); + trainingStrategyString +="LearningRate=1e-4,Momentum=0,MaxEpochs=2000,ConvergenceSteps=200,BatchSize=64,TestRepetitions=1,Regularization=None,Optimizer=ADAM"; + + TString nnOptions("!H:V:ErrorStrategy=SUMOFSQUARES:WeightInitialization=XAVIERUNIFORM:RandomSeed=1234"); + + // Use GPU if possible on the machine + TString architectureString("Architecture=GPU"); + + // Transformation of data prior to training layers - decorrelate and normalise whole dataset + TString transformString("VarTransform=D,N"); + + nnOptions.Append(":"); + nnOptions.Append(architectureString); + nnOptions.Append(":"); + nnOptions.Append(transformString); + nnOptions.Append(":"); + nnOptions.Append(layoutString); + nnOptions.Append(":"); + nnOptions.Append(trainingStrategyString); + + TMVA::MethodDNN* method = (MethodDNN*)factory->BookMethod(dataloader, TMVA::Types::kDL, methodName, nnOptions); // NN + + // If loading previous model for further training + if(loadWeights){ + TMVA::Reader *reader = new TMVA::Reader( "!Color:!Silent" ); + reader->BookMVA( methodName, inWeightName ); + TMVA::MethodDNN* kl = dynamic_cast(reader->FindMVA(methodName)); + method = kl; + } + + + // -------------------------------------------------------------------------------------------------- + // Now you can tell the factory to train, test, and evaluate the MVAs + + // Train MVAs using the set of training events + factory->TrainAllMethods(); + + // Evaluate all MVAs using the set of test events + factory->TestAllMethods(); + + // Evaluate and compare performance of all configured MVAs + factory->EvaluateAllMethods(); + + // -------------------------------------------------------------- + + // Save the output + outputFile->Close(); + + std::cout << "==> Wrote root file: " << outputFile->GetName() << std::endl; + std::cout << "==> TMVARegression is done!" << std::endl; + + // delete factory; + // delete dataloader; + +} diff --git a/benchmarks/LOWQ2/reconstruction_training/TestModel.py b/benchmarks/LOWQ2/reconstruction_training/TestModel.py new file mode 100644 index 00000000..b31b3f2f --- /dev/null +++ b/benchmarks/LOWQ2/reconstruction_training/TestModel.py @@ -0,0 +1,123 @@ +import onnxruntime as ort +import argparse +import numpy as np +from ProcessData import create_arrays +import matplotlib.pyplot as plt + +# Parse arguments +parser = argparse.ArgumentParser(description='Train a regression model for the Tagger.') +parser.add_argument('--modelFile', type=str, default="regression_model.onnx", help='Path to the ONNX model file') +parser.add_argument('--dataFiles', type=str, nargs='+', help='Path to the data files') +parser.add_argument('--beamEnergy', type=float, help='Electron beam energy') +parser.add_argument('--outGraphFile', type=str, default="output_vs_target.png", help='Output file for the graph') +parser.add_argument('--outGraphFile2', type=str, default="output_vs_target2.png", help='Output file for the graph') +args = parser.parse_args() +modelFile = args.modelFile +dataFiles = args.dataFiles +beamEnergy = args.beamEnergy +outGraphFile = args.outGraphFile +outGraphFile2 = args.outGraphFile2 + +input_data, target_data = create_arrays(dataFiles, beamEnergy) + +target_data = np.array(target_data) + +# Load the ONNX model +session = ort.InferenceSession(modelFile) + +# Run the model on the input data +input_name = session.get_inputs()[0].name +output_name = session.get_outputs()[0].name +input_data = np.array(input_data,dtype=np.float32) +output = session.run([output_name], {input_name: input_data}) +output = np.array(output[0])#*beamEnergy +#print(output) + +out_theta = np.arctan2(np.sqrt(output[:,0]**2 + output[:,1]**2),output[:,2]) +out_phi = np.arctan2(output[:,1],output[:,0]) +out_mag = np.sqrt(output[:,0]**2 + output[:,1]**2 + output[:,2]**2) +in_theta = np.arctan2(np.sqrt(target_data[:,0]**2 + target_data[:,1]**2),target_data[:,2]) +in_phi = np.arctan2(target_data[:,1],target_data[:,0]) +in_mag = np.sqrt(target_data[:,0]**2 + target_data[:,1]**2 + target_data[:,2]**2) + + +thetadiff = out_theta - in_theta +phidiff = out_phi - in_phi +# Move phidiff to within -pi/2 and pi/2 +phidiff = (phidiff + np.pi) % (2 * np.pi) - np.pi +magdiff = out_mag - in_mag + +diff = (target_data - output)/target_data +diffrange = [[-5,5],[-5,5],[-2,2]] +datarange = [[-0.02,0.02],[-0.02,0.02],[-1,0]] + + +# Creates histograms to compare the target and output data +fig, axs = plt.subplots(3, 3, figsize=(12, 12)) +for i in range(3): + # 2D histograms showing trends in the data + axs[0,i].hist2d(target_data[:,i], output[:,i], bins=100, range=[datarange[i],datarange[i]], cmap="viridis", label="Output vs Target") + axs[0,i].set_xlabel(f"Variable {i} Target") + axs[0,i].set_ylabel(f"Variable {i} Output") + + axs[1,i].hist(diff[:,i], bins=100, alpha=0.5, range=diffrange[i], label="Difference") + axs[1,i].set_xlabel(f"Variable {i} Difference") + axs[1,i].set_ylabel("Counts") + + axs[2,i].hist2d(target_data[:,i], diff[:,i], bins=100, range=[datarange[i],diffrange[i]], cmap="viridis", label="Difference vs Target") + axs[2,i].set_xlabel(f"Variable {i} Target") + axs[2,i].set_ylabel(f"Variable {i} Difference") + +plt.show() +plt.savefig(outGraphFile) + +# Creates histograms to compare theta, phi and mag target and output data +fig2, axs2 = plt.subplots(3, 3, figsize=(12, 12)) + +thetarange = [np.pi-0.01,np.pi] +phirange = [-np.pi,np.pi] +magrange = [0,1] + +thetadiffrange = [-0.02,0.02] +phidiffrange = [-np.pi,np.pi] +magdiffrange = [-0.2,0.2] + +# 2D histograms showing trends in the data +axs2[0,0].hist2d(out_theta, in_theta, bins=100, range=[thetarange,thetarange], cmap="viridis", label="Output vs Target") +axs2[0,0].set_xlabel("Theta Target") +axs2[0,0].set_ylabel("Theta Output") + +axs2[0,1].hist2d(out_phi, in_phi, bins=100, range=[phirange,phirange], cmap="viridis", label="Output vs Target") +axs2[0,1].set_xlabel("Phi Target") +axs2[0,1].set_ylabel("Phi Output") + +axs2[0,2].hist2d(out_mag, in_mag, bins=100, range=[magrange,magrange], cmap="viridis", label="Output vs Target") +axs2[0,2].set_xlabel("Mag Target") +axs2[0,2].set_ylabel("Mag Output") + +axs2[1,0].hist(thetadiff, bins=100, alpha=0.5, range=thetadiffrange, label="Difference") +axs2[1,0].set_xlabel("Theta Difference") +axs2[1,0].set_ylabel("Counts") + +axs2[1,1].hist(phidiff, bins=100, alpha=0.5, range=phidiffrange, label="Difference") +axs2[1,1].set_xlabel("Phi Difference") +axs2[1,1].set_ylabel("Counts") + +axs2[1,2].hist(magdiff, bins=100, alpha=0.5, range=magdiffrange, label="Difference") +axs2[1,2].set_xlabel("Mag Difference") +axs2[1,2].set_ylabel("Counts") + +axs2[2,0].hist2d(in_theta, thetadiff, bins=100, range=[thetarange,thetadiffrange], cmap="viridis", label="Difference vs Target") +axs2[2,0].set_xlabel("Theta Target") +axs2[2,0].set_ylabel("Theta Difference") + +axs2[2,1].hist2d(in_phi, phidiff, bins=100, range=[phirange,phidiffrange], cmap="viridis", label="Difference vs Target") +axs2[2,1].set_xlabel("Phi Target") +axs2[2,1].set_ylabel("Phi Difference") + +axs2[2,2].hist2d(in_mag, magdiff, bins=100, range=[magrange,magdiffrange], cmap="viridis", label="Difference vs Target") +axs2[2,2].set_xlabel("Mag Target") +axs2[2,2].set_ylabel("Mag Difference") + +plt.show() +plt.savefig(outGraphFile2) \ No newline at end of file diff --git a/benchmarks/LOWQ2/reconstruction_training/config.yml b/benchmarks/LOWQ2/reconstruction_training/config.yml new file mode 100644 index 00000000..a03f2028 --- /dev/null +++ b/benchmarks/LOWQ2/reconstruction_training/config.yml @@ -0,0 +1,7 @@ +# Run Snakemake for the training +train:LOWQ2: + extends: .det_benchmark + stage: calibrate + script: + - snakemake --cores 8 ${MODEL_DIRECTORY}"trainedData.root" --configfile remote_config.yml + diff --git a/benchmarks/LOWQ2/reconstruction_training/epic_edit.xml b/benchmarks/LOWQ2/reconstruction_training/epic_edit.xml new file mode 100644 index 00000000..79cfb1e3 --- /dev/null +++ b/benchmarks/LOWQ2/reconstruction_training/epic_edit.xml @@ -0,0 +1,133 @@ + + + + + + + + + + + + + + # EPIC Detector + - https://github.com/eic/epic + - https://github.com/eic/ip6 + + + + + EPIC + + + + + + + + ## Main Constant Definitions + + The ip6 (or other ip) defines should be included first. + These files have only a define tags. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ## World Volume + + The world is a simple box, but could be a union of multiple regions. + + + + + + + + + ## Detector Subsystems + + ### IP Subsystems + + The interaction point subsystems are included before the central detector subsystems. + This is becuase the IP subsystems, for example the beampipe, will define paramters + which are subsquently used in the central detector construction -- e.g. the vertex tracker + uses the beampipe OD to help define its placement. + + The IP subsystems include the Far forward and backward regions. The list of subsystem includes: + - Interaction region beampipe + - B0 tracker + - Off-momentum tracker + - Far forward roman pots + - Zero Degree Calorimeter + - Beam line magnets. + - and more... + + + + + ## Main magnet and its field + + + + + + ## Central tracking detectors + + + + + + ## Central beam pipe + + + + + ## Far backward detectors + + + diff --git a/benchmarks/LOWQ2/reconstruction_training/local_config.yml b/benchmarks/LOWQ2/reconstruction_training/local_config.yml new file mode 100644 index 00000000..437975b8 --- /dev/null +++ b/benchmarks/LOWQ2/reconstruction_training/local_config.yml @@ -0,0 +1,3 @@ +SIM_DIRECTORY: "/scratch/EIC/SimOut/S3in/" +RECO_IN_DIRECTORY: "/scratch/EIC/ReconOut/S3in/" +MODEL_DIRECTORY: "/scratch/EIC/LowQ2Model/" \ No newline at end of file diff --git a/benchmarks/LOWQ2/remote_config.yml b/benchmarks/LOWQ2/remote_config.yml new file mode 100644 index 00000000..b793dbac --- /dev/null +++ b/benchmarks/LOWQ2/remote_config.yml @@ -0,0 +1,3 @@ +SIM_DIRECTORY: "LowQ2_Sim/" +RECO_IN_DIRECTORY: "LowQ2_ReconIn/" +MODEL_DIRECTORY: "LowQ2_Model/" \ No newline at end of file diff --git a/benchmarks/LOWQ2/signal_training/.gitignore b/benchmarks/LOWQ2/signal_training/.gitignore new file mode 100644 index 00000000..c96503c7 --- /dev/null +++ b/benchmarks/LOWQ2/signal_training/.gitignore @@ -0,0 +1,5 @@ +plots* +calibrations* +fieldmaps* +gdml* +.snakemake* \ No newline at end of file