Skip to content

Commit

Permalink
Merge branch 'master' into insert_tau
Browse files Browse the repository at this point in the history
  • Loading branch information
sebouh137 authored Oct 23, 2024
2 parents 693b08f + 33be27a commit aa20ee9
Show file tree
Hide file tree
Showing 51 changed files with 2,312 additions and 245 deletions.
22 changes: 11 additions & 11 deletions .gitlab-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,9 @@ image: ${BENCHMARKS_REGISTRY}/${BENCHMARKS_CONTAINER}:${BENCHMARKS_TAG}
variables:
DETECTOR: epic
DETECTOR_CONFIG: epic_craterlake
DETECTOR_REPOSITORYURL: 'https://github.com/eic/epic.git'
GITHUB_SHA: ''
GITHUB_REPOSITORY: ''
SNAKEMAKE_FLAGS: '--cache'

workflow:
name: '$PIPELINE_NAME'
Expand Down Expand Up @@ -89,21 +89,14 @@ common:setup:
echo "BENCHMARKS_CONTAINER: ${BENCHMARKS_CONTAINER}"
echo "BENCHMARKS_REGISTRY: ${BENCHMARKS_REGISTRY}"
- source setup/bin/env.sh && ./setup/bin/install_common.sh


common:detector:
stage: initialize
needs: ["common:setup"]
script:
- source .local/bin/env.sh && build_detector.sh
- mkdir_local_data_link sim_output
- mkdir -p results
- mkdir -p config
- print_env.sh

get_data:
stage: data_init
needs: ["common:detector"]
needs: ["common:setup"]
script:
- source .local/bin/env.sh
- ln -s "${LOCAL_DATA_PATH}/sim_output" sim_output
Expand All @@ -112,10 +105,11 @@ get_data:

.det_benchmark:
needs:
- ["get_data","common:detector"]
- ["get_data","common:setup"]
before_script:
- mc config host add S3 https://eics3.sdcc.bnl.gov:9000 ${S3_ACCESS_KEY} ${S3_SECRET_KEY}
- source .local/bin/env.sh
- source /opt/detector/epic-main/setup.sh
- ls -lrtha
- ln -s "${LOCAL_DATA_PATH}/sim_output" sim_output
- ln -s "${LOCAL_DATA_PATH}/datasets/data" data
Expand All @@ -139,6 +133,7 @@ include:
- local: 'benchmarks/tracking_performances_dis/config.yml'
- local: 'benchmarks/barrel_ecal/config.yml'
- local: 'benchmarks/barrel_hcal/config.yml'
- local: 'benchmarks/lfhcal/config.yml'
- local: 'benchmarks/zdc/config.yml'
- local: 'benchmarks/zdc_lyso/config.yml'
- local: 'benchmarks/zdc_photon/config.yml'
Expand All @@ -152,7 +147,9 @@ include:
- local: 'benchmarks/zdc_sigma/config.yml'
- local: 'benchmarks/zdc_lambda/config.yml'
- local: 'benchmarks/insert_neutron/config.yml'

- local: 'benchmarks/femc_electron/config.yml'
- local: 'benchmarks/femc_photon/config.yml'
- local: 'benchmarks/femc_pi0/config.yml'
deploy_results:
allow_failure: true
stage: deploy
Expand All @@ -176,6 +173,9 @@ deploy_results:
- "collect_results:insert_tau"
- "collect_results:zdc_photon"
- "collect_results:zdc_pi0"
- "collect_results:femc_electron"
- "collect_results:femc_photon"
- "collect_results:femc_pi0"
script:
- echo "deploy results!"
- find results -print | sort | tee summary.txt
Expand Down
7 changes: 7 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v5.0.0
hooks:
- id: check-added-large-files
args: ['--maxkb=128']
- id: check-yaml
9 changes: 9 additions & 0 deletions Snakefile
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,9 @@ include: "benchmarks/ecal_gaps/Snakefile"
include: "benchmarks/material_scan/Snakefile"
include: "benchmarks/tracking_performances/Snakefile"
include: "benchmarks/tracking_performances_dis/Snakefile"
include: "benchmarks/lfhcal/Snakefile"
include: "benchmarks/insert_muon/Snakefile"
include: "benchmarks/zdc_lambda/Snakefile"
include: "benchmarks/zdc_lyso/Snakefile"
include: "benchmarks/insert_muon/Snakefile"
include: "benchmarks/zdc_lambda/Snakefile"
Expand All @@ -15,6 +18,9 @@ include: "benchmarks/zdc_pi0/Snakefile"
include: "benchmarks/zdc_sigma/Snakefile"
include: "benchmarks/insert_neutron/Snakefile"
include: "benchmarks/insert_tau/Snakefile"
include: "benchmarks/femc_electron/Snakefile"
include: "benchmarks/femc_photon/Snakefile"
include: "benchmarks/femc_pi0/Snakefile"

use_s3 = config["remote_provider"].lower() == "s3"
use_xrootd = config["remote_provider"].lower() == "xrootd"
Expand All @@ -32,6 +38,9 @@ def get_remote_path(path):
rule fetch_epic:
output:
filepath="EPIC/{PATH}"
params:
# wildcards are not included in hash for caching, we need to add them as params
PATH=lambda wildcards: wildcards.PATH
cache: True
retries: 3
shell: """
Expand Down
6 changes: 3 additions & 3 deletions benchmarks/backgrounds/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ sim:backgrounds:
- mkdir -p $LOCAL_DATA_PATH/input
- ln -s $LOCAL_DATA_PATH/input input
- |
snakemake -cache --cores 2 \
snakemake $SNAKEMAKE_FLAGS --cores 2 \
sim_output/$DETECTOR_CONFIG/backgrounds/EPIC/EVGEN/BACKGROUNDS/BEAMGAS/electron/GETaLM1.0.0-1.0/10GeV/GETaLM1.0.0-1.0_ElectronBeamGas_10GeV_foam_emin10keV_run001.edm4hep.root \
sim_output/$DETECTOR_CONFIG/backgrounds/EPIC/EVGEN/DIS/NC/10x100/minQ2=1/pythia8NCDIS_10x100_minQ2=1_beamEffects_xAngle=-0.025_hiDiv_1.edm4hep.root \
sim_output/$DETECTOR_CONFIG/backgrounds/EPIC/EVGEN/BACKGROUNDS/BEAMGAS/proton/pythia8.306-1.0/100GeV/pythia8.306-1.0_ProtonBeamGas_100GeV_run001.edm4hep.root
Expand All @@ -19,7 +19,7 @@ bench:backgrounds_emcal_backwards:
- ln -s $LOCAL_DATA_PATH/input input
- export PYTHONUSERBASE=$LOCAL_DATA_PATH/deps
- pip install -r benchmarks/backgrounds/requirements.txt
- snakemake --cores 8 backgrounds_ecal_backwards
- snakemake $SNAKEMAKE_FLAGS --cores 8 backgrounds_ecal_backwards

collect_results:backgrounds:
extends: .det_benchmark
Expand All @@ -29,5 +29,5 @@ collect_results:backgrounds:
script:
- ls -lrht
- mv results{,_save}/ # move results directory out of the way to preserve it
- snakemake --cores 1 --delete-all-output backgrounds_ecal_backwards
- snakemake $SNAKEMAKE_FLAGS --cores 1 --delete-all-output backgrounds_ecal_backwards
- mv results{_save,}/
36 changes: 32 additions & 4 deletions benchmarks/backwards_ecal/Snakefile
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,29 @@ def get_n_events(wildcards):
n_events = int(n_events // (energy ** 0.5))
return n_events

import functools
import json
import ctypes.util
import warnings
from snakemake.logging import logger

@functools.cache
def get_spack_package_hash(package_name):
try:
ver_info = json.loads(subprocess.check_output(["spack", "find", "--json", package_name]))
return ver_info[0]["package_hash"]
except FileNotFoundError as e:
logger.warning("Spack is not installed")
return ""
except subprocess.CalledProcessError as e:
print(e)
return ""

rule backwards_ecal_sim:
input:
steering_file=ancient("EPIC/EVGEN/SINGLE/{PARTICLE}/{ENERGY}/{PHASE_SPACE}/{PARTICLE}_{ENERGY}_{PHASE_SPACE}.steer"),
warmup="warmup/{DETECTOR_CONFIG}.edm4hep.root",
geometry_lib=os.environ["DETECTOR_PATH"] + "/../../lib/" + ctypes.util.find_library("epic"),
output:
"sim_output/backwards_ecal/{DETECTOR_CONFIG}/{PARTICLE}/{ENERGY}/{PHASE_SPACE}/{PARTICLE}_{ENERGY}_{PHASE_SPACE}.{INDEX}.edm4hep.root",
log:
Expand All @@ -19,19 +37,25 @@ rule backwards_ecal_sim:
PHASE_SPACE="(3to50|45to135|130to177)deg",
INDEX="\d{4}",
params:
N_EVENTS=get_n_events
N_EVENTS=get_n_events,
SEED=lambda wildcards: "1" + wildcards.INDEX,
DETECTOR_PATH=os.environ["DETECTOR_PATH"],
DETECTOR_CONFIG=lambda wildcards: wildcards.DETECTOR_CONFIG,
DD4HEP_HASH=get_spack_package_hash("dd4hep"),
NPSIM_HASH=get_spack_package_hash("npsim"),
cache: True
shell:
"""
set -m # monitor mode to prevent lingering processes
exec ddsim \
--runType batch \
--enableGun \
--steeringFile "{input.steering_file}" \
--random.seed 1{wildcards.INDEX} \
--random.seed {params.SEED} \
--filter.tracker edep0 \
-v WARNING \
--numberOfEvents {params.N_EVENTS} \
--compactFile $DETECTOR_PATH/{wildcards.DETECTOR_CONFIG}.xml \
--compactFile {params.DETECTOR_PATH}/{params.DETECTOR_CONFIG}.xml \
--outputFile {output}
"""

Expand All @@ -45,9 +69,13 @@ rule backwards_ecal_recon:
"sim_output/backwards_ecal/{DETECTOR_CONFIG}/{PARTICLE}/{ENERGY}/{PHASE_SPACE}/{PARTICLE}_{ENERGY}_{PHASE_SPACE}.{INDEX}.eicrecon.tree.edm4eic.root.log",
wildcard_constraints:
INDEX="\d{4}",
params:
DETECTOR_CONFIG=lambda wildcards: wildcards.DETECTOR_CONFIG,
EICRECON_HASH=get_spack_package_hash("eicrecon"),
cache: True
shell: """
set -m # monitor mode to prevent lingering processes
exec env DETECTOR_CONFIG={wildcards.DETECTOR_CONFIG} \
exec env DETECTOR_CONFIG={params.DETECTOR_CONFIG} \
eicrecon {input} -Ppodio:output_file={output} \
-Ppodio:output_collections=MCParticles,EcalEndcapNRecHits,EcalEndcapNClusters
"""
Expand Down
122 changes: 122 additions & 0 deletions benchmarks/backwards_ecal/backwards_ecal.org
Original file line number Diff line number Diff line change
Expand Up @@ -113,6 +113,124 @@ for energy in energies:
))
#+end_src

** Energy resolution

#+begin_src jupyter-python
fig, axs = plt.subplots(2, 4, sharex=True, sharey=True, figsize=(15, 6))

fig.suptitle(PLOT_TITLE)

axs = np.ravel(np.array(axs))

sigmas_rel_FWHM_cb = {}
fractions_below = {}

for ix, energy in enumerate(energies):
for use_clusters in [False, True]:
energy_value = float(energy.replace("GeV", "").replace("MeV", "e-3"))
if use_clusters:
clf_label = "leading cluster"
else:
clf_label = "sum all hits"
def clf(events):
if use_clusters:
return ak.drop_none(ak.max(events["EcalEndcapNClusters.energy"], axis=-1)) / energy_value
else:
return ak.sum(events["EcalEndcapNRecHits.energy"], axis=-1) / energy_value
e_pred = clf(e_eval[energy])

plt.sca(axs[ix])
counts, bins, patches = plt.hist(e_pred, weights=np.full_like(e_pred, 1.0 / ak.num(e_pred, axis=0)), bins=np.linspace(0.01, 1.01, 101), label=rf"$e^-$ {clf_label}", hatch=None if use_clusters else r"xxx", alpha=0.8 if use_clusters else 1.)
plt.title(f"{energy}")

e_over_p = (bins[1:] + bins[:-1]) / 2
import scipy.stats
def f(x, n, beta, m, loc, scale):
return n * scipy.stats.crystalball.pdf(x, beta, m, loc, scale)
p0 = (np.sum(counts[10:]), 2., 3., 0.95, 0.05)

try:
import scipy.optimize
par, pcov = scipy.optimize.curve_fit(f, e_over_p[5:], counts[5:], p0=p0, maxfev=10000)
except RuntimeError:
par = None
plt.plot(e_over_p, f(e_over_p, *par), label=rf"Crystal Ball fit", color="tab:green" if use_clusters else "green", lw=0.8)

def summarize_fit(par):
_, _, _, loc_cb, scale_cb = par
# Calculate FWHM
y_max = np.max(f(np.linspace(0., 1., 100), *par))
f_prime = lambda x: f(x, *par) - y_max / 2
x_plus, = scipy.optimize.root(f_prime, loc_cb + scale_cb).x
x_minus, = scipy.optimize.root(f_prime, loc_cb - scale_cb).x
color = "cyan" if use_clusters else "orange"
plt.axvline(x_minus, ls="--", lw=0.75, color=patches[0].get_facecolor(), label=r"$\mu - $FWHM")
plt.axvline(x_plus, ls=":", lw=0.75, color=patches[0].get_facecolor(), label=r"$\mu + $FWHM")
fwhm = (x_plus - x_minus) / loc_cb
sigma_rel_FWHM_cb = fwhm / 2 / np.sqrt(2 * np.log(2))

cutoff_x = loc_cb - fwhm
fraction_below = np.sum(counts[e_over_p < cutoff_x]) / ak.num(e_pred, axis=0)

return sigma_rel_FWHM_cb, fraction_below

sigma_rel_FWHM_cb, fraction_below = summarize_fit(par)
sigmas_rel_FWHM_cb.setdefault(clf_label, {})[energy] = sigma_rel_FWHM_cb
fractions_below.setdefault(clf_label, {})[energy] = fraction_below

plt.legend()
plt.xlabel("$E/p$", loc="right")
plt.ylabel("Event yield", loc="top")

fig.savefig(output_dir / f"resolution_plots.pdf", bbox_inches="tight")
fig.savefig(output_dir / f"resolution_plots.png", bbox_inches="tight")
plt.show()
plt.close(fig)

plt.figure()
energy_values = np.array([float(energy.replace("GeV", "").replace("MeV", "e-3")) for energy in energies])

for clf_label, sigma_rel_FWHM_cb in sigmas_rel_FWHM_cb.items():
sigma_over_e = np.array([sigma_rel_FWHM_cb[energy] for energy in energies]) * 100 # convert to %

def f(energy, stochastic, constant):
return np.sqrt((stochastic / np.sqrt(energy)) ** 2 + constant ** 2)
cond = energy_values >= 0.5
try:
import scipy.optimize
par, pcov = scipy.optimize.curve_fit(f, energy_values[cond], sigma_over_e[cond], maxfev=10000)
except RuntimeError:
par = None
stochastic, constant = par

plt.plot(
energy_values,
sigma_over_e,
marker=".",
label=f"{clf_label}"
)
plt.plot(
energy_values[cond],
f(energy_values[cond], *par),
color="black",
ls="--",
lw=0.5,
label=f"{clf_label}, ${np.ceil(stochastic * 10) / 10:.1f}\% / \sqrt{{E}} \oplus {np.ceil(constant * 10) / 10:.1f}\%$",
)
plt.plot(
energy_values,
np.sqrt((1 / energy_values) ** 2 + (1 / np.sqrt(energy_values)) ** 2 + 1 ** 2),
color="black", label=r"YR requirement $1\% / E \oplus 2.5\% / \sqrt{E} \oplus 1\%$",
)
plt.title(INPUT_PATH_FORMAT)
plt.legend()
plt.xlabel("Energy, GeV", loc="right")
plt.ylabel(r"$\sigma_{E} / E$ derived from FWHM, %", loc="top")
plt.savefig(output_dir / f"resolution.pdf", bbox_inches="tight")
plt.savefig(output_dir / f"resolution.png", bbox_inches="tight")
plt.show()
#+end_src

** Pion rejection

#+begin_src jupyter-python
Expand Down Expand Up @@ -176,10 +294,13 @@ for ix, energy in enumerate(energies):
plt.ylabel("Pion rejection factor")

fig.savefig(output_dir / f"pred.pdf", bbox_inches="tight")
fig.savefig(output_dir / f"pred.png", bbox_inches="tight")
plt.close(fig)
fig_log.savefig(output_dir / f"pred_log.pdf", bbox_inches="tight")
fig_log.savefig(output_dir / f"pred_log.png", bbox_inches="tight")
fig_log.show()
fig_roc.savefig(output_dir / f"roc.pdf", bbox_inches="tight")
fig_roc.savefig(output_dir / f"roc.png", bbox_inches="tight")
fig_roc.show()

plt.figure()
Expand All @@ -196,5 +317,6 @@ plt.legend()
plt.xlabel("Energy, GeV")
plt.ylabel("Pion rejection at 95%")
plt.savefig(output_dir / f"pion_rej.pdf", bbox_inches="tight")
plt.savefig(output_dir / f"pion_rej.png", bbox_inches="tight")
plt.show()
#+end_src
6 changes: 3 additions & 3 deletions benchmarks/backwards_ecal/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ sim:backwards_ecal:
]
script:
- |
snakemake --cache --cores 5 sim_output/backwards_ecal/${DETECTOR_CONFIG}/${PARTICLE}/${MOMENTUM}/130to177deg/flag
snakemake $SNAKEMAKE_FLAGS --cores 5 sim_output/backwards_ecal/${DETECTOR_CONFIG}/${PARTICLE}/${MOMENTUM}/130to177deg/flag
bench:backwards_ecal:
extends: .det_benchmark
Expand All @@ -26,7 +26,7 @@ bench:backwards_ecal:
script:
- export PYTHONUSERBASE=$LOCAL_DATA_PATH/deps
- pip install -r benchmarks/backwards_ecal/requirements.txt
- snakemake --cores 1 backwards_ecal
- snakemake $SNAKEMAKE_FLAGS --cores 1 backwards_ecal

collect_results:backwards_ecal:
extends: .det_benchmark
Expand All @@ -36,5 +36,5 @@ collect_results:backwards_ecal:
script:
- ls -lrht
- mv results{,_save}/ # move results directory out of the way to preserve it
- snakemake --cores 1 --delete-all-output backwards_ecal
- snakemake $SNAKEMAKE_FLAGS --cores 1 --delete-all-output backwards_ecal
- mv results{_save,}/
Loading

0 comments on commit aa20ee9

Please sign in to comment.