From a4f859c3f48682f75487ce8fca7d8530e3fae1fe Mon Sep 17 00:00:00 2001 From: Ole Schmidt Date: Thu, 15 Feb 2024 21:27:38 +0100 Subject: [PATCH 01/53] Debug mode for topology generation (#1460) --- DATA/tools/epn/gen_topo.sh | 6 +++++- DATA/tools/epn/gen_topo_o2dpg.sh | 17 +++++++++++++++-- 2 files changed, 20 insertions(+), 3 deletions(-) diff --git a/DATA/tools/epn/gen_topo.sh b/DATA/tools/epn/gen_topo.sh index 973dd64b5..297dddd2f 100755 --- a/DATA/tools/epn/gen_topo.sh +++ b/DATA/tools/epn/gen_topo.sh @@ -18,7 +18,11 @@ if [[ -z "$EPN2EOS_METAFILES_DIR" ]] && [[ "0$WORKFLOWMODE" != "0print" ]]; then export EPN2EOS_METAFILES_DIR=/data/epn2eos_tool/epn2eos # Directory for epn2eos meta data files fi if [[ $USER == "epn" ]]; then - [[ -z "$GEN_TOPO_WORKDIR" ]] && export GEN_TOPO_WORKDIR=/scratch/services/gen_topo # Working directory for checkout of O2DPG repository and for XML cache. If this directory is wiped, gen_topo will recreate all necessary content the next time it runs. The folder should be persistent to cache workflows. + if [[ "${GEN_TOPO_DEPLOYMENT_TYPE:-}" == "ALICE_STAGING" ]]; then + [[ -z "$GEN_TOPO_WORKDIR" ]] && export GEN_TOPO_WORKDIR=/scratch/services/staging_gen_topo # Working directory for checkout of O2DPG repository and for XML cache. If this directory is wiped, gen_topo will recreate all necessary content the next time it runs. The folder should be persistent to cache workflows. + else + [[ -z "$GEN_TOPO_WORKDIR" ]] && export GEN_TOPO_WORKDIR=/scratch/services/gen_topo # Working directory for checkout of O2DPG repository and for XML cache. If this directory is wiped, gen_topo will recreate all necessary content the next time it runs. The folder should be persistent to cache workflows. + fi else [[ -z "$GEN_TOPO_WORKDIR" ]] && export GEN_TOPO_WORKDIR=$HOME/gen_topo # Working directory for checkout of O2DPG repository and for XML cache. If this directory is wiped, gen_topo will recreate all necessary content the next time it runs. The folder should be persistent to cache workflows. mkdir -p $HOME/gen_topo diff --git a/DATA/tools/epn/gen_topo_o2dpg.sh b/DATA/tools/epn/gen_topo_o2dpg.sh index 29346796d..3da6f701e 100755 --- a/DATA/tools/epn/gen_topo_o2dpg.sh +++ b/DATA/tools/epn/gen_topo_o2dpg.sh @@ -22,6 +22,15 @@ if [[ -z "$MULTIPLICITY_FACTOR_REST" ]]; then echo \$MULTIPLICITY_FACTOR_REST mi if [[ -z "$RECOSHMSIZE" ]]; then echo \$RECOSHMSIZE missing; exit 1; fi # SHM Size for reconstruction collections if [[ -z "$DDSHMSIZE" ]]; then echo \$DDSHMSIZE missing; exit 1; fi # SHM Size for DD +# In case of debug mode, overwrite some settings +if [[ "${DEBUG_TOPOLOGY_GENERATION:=0}" == "1" ]]; then + echo "Debugging mode enabled. Setting options accordingly" 1>&2 + RECO_NUM_NODES_OVERRIDE=1 # to avoid slurm query, specify number of nodes to fixed value + GEN_TOPO_MI100_NODES=1 # also for MI100 nodes + GEN_TOPO_OVERRIDE_TEMPDIR=$PWD # keep temporary files like QC jsons in local directory + EPN2EOS_METAFILES_DIR=/tmp # nothing is written here, just needs to be set to something +fi + # Check settings coming from the EPN if [[ -z "$FILEWORKDIR" ]]; then echo \$FILEWORKDIR missing; exit 1; fi if [[ -z "$INRAWCHANNAME" ]]; then echo \$INRAWCHANNAME missing; exit 1; fi @@ -103,6 +112,7 @@ while true; do break done + if [[ ! -z "$GEN_TOPO_ODC_EPN_TOPO_POST_CACHING_CMD" ]] && [[ "0$WORKFLOWMODE" != "0print" ]]; then TMP_POST_CACHING_CMD="$GEN_TOPO_ODC_EPN_TOPO_POST_CACHING_CMD $GEN_TOPO_ODC_EPN_TOPO_POST_CACHING_ARGS" TMP_POST_CACHING_NMIN=$(( $RECO_NUM_NODES_OVERRIDE > $RECO_MAX_FAIL_NODES_OVERRIDE ? $RECO_NUM_NODES_OVERRIDE - $RECO_MAX_FAIL_NODES_OVERRIDE : 0 )) @@ -126,6 +136,9 @@ if [[ ! -z "$ECS_ENVIRONMENT_ID" && -d "/var/log/topology/" && $USER == "epn" ]] fi cat $GEN_TOPO_WORKDIR/output.xml -echo Removing temporary output file $GEN_TOPO_WORKDIR/output.xml 1>&2 -rm $GEN_TOPO_WORKDIR/output.xml + +if [[ "$DEBUG_TOPOLOGY_GENERATION" == "0" ]]; then + echo Removing temporary output file $GEN_TOPO_WORKDIR/output.xml 1>&2 + rm $GEN_TOPO_WORKDIR/output.xml +fi rm -f $GEN_TOPO_LOCKFILE From bda1a376702e52ad891b2490aad7a52973f23024 Mon Sep 17 00:00:00 2001 From: benedikt-voelkel Date: Fri, 16 Feb 2024 10:13:02 +0100 Subject: [PATCH 02/53] Revert "Update anchorMC.sh - added proc arg (#1462)" This reverts commit 843a10020b04fa8462ee73ecc371d74a97242f82. --- MC/run/ANCHOR/anchorMC.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/MC/run/ANCHOR/anchorMC.sh b/MC/run/ANCHOR/anchorMC.sh index 438a790b8..b5d0d80fb 100755 --- a/MC/run/ANCHOR/anchorMC.sh +++ b/MC/run/ANCHOR/anchorMC.sh @@ -191,7 +191,7 @@ ALICEO2_CCDB_LOCALCACHE=${ALICEO2_CCDB_LOCALCACHE:-$(pwd)/ccdb} baseargs="-tf ${NTIMEFRAMES} --split-id ${SPLITID} --prod-split ${PRODSPLIT} --cycle ${CYCLE} --run-number ${ALIEN_JDL_LPMRUNNUMBER}" # these arguments will be passed as well but only evetually be digested by o2dpg_sim_workflow.py which is called from o2dpg_sim_workflow_anchored.py -remainingargs="-gen pythia8 -proc inel -seed ${SEED} -ns ${NSIGEVENTS} --include-local-qc --pregenCollContext" +remainingargs="-gen pythia8 -seed ${SEED} -ns ${NSIGEVENTS} --include-local-qc --pregenCollContext" remainingargs="${remainingargs} -e ${ALIEN_JDL_SIMENGINE} -j ${NWORKERS}" remainingargs="${remainingargs} -productionTag ${ALIEN_JDL_LPMPRODUCTIONTAG:-alibi_anchorTest_tmp}" remainingargs="${remainingargs} --anchor-config config-json.json" From e13c468a0dc52e06396dd8e4ac43337ad3c59f4d Mon Sep 17 00:00:00 2001 From: shahoian Date: Fri, 16 Feb 2024 17:17:42 +0100 Subject: [PATCH 03/53] adjust matching chi2 cut to 100 and max c14 diff to 2.5 --- DATA/production/configurations/asyncReco/setenv_extra.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/DATA/production/configurations/asyncReco/setenv_extra.sh b/DATA/production/configurations/asyncReco/setenv_extra.sh index d178ca5db..4f9992a67 100644 --- a/DATA/production/configurations/asyncReco/setenv_extra.sh +++ b/DATA/production/configurations/asyncReco/setenv_extra.sh @@ -293,8 +293,8 @@ elif [[ $ALIGNLEVEL == 1 ]]; then ERROB="100e-8" [[ -z $TPCITSTIMEERR ]] && TPCITSTIMEERR="0.2" [[ -z $ITS_CONFIG || "$ITS_CONFIG" != *"--tracking-mode"* ]] && export ITS_CONFIG+=" --tracking-mode async" - CUT_MATCH_CHI2=80 - export ITSTPCMATCH="tpcitsMatch.safeMarginTimeCorrErr=2.;tpcitsMatch.XMatchingRef=60.;tpcitsMatch.cutMatchingChi2=$CUT_MATCH_CHI2;;tpcitsMatch.crudeAbsDiffCut[0]=6;tpcitsMatch.crudeAbsDiffCut[1]=6;tpcitsMatch.crudeAbsDiffCut[2]=0.3;tpcitsMatch.crudeAbsDiffCut[3]=0.3;tpcitsMatch.crudeAbsDiffCut[4]=1.5;tpcitsMatch.crudeNSigma2Cut[0]=64;tpcitsMatch.crudeNSigma2Cut[1]=64;tpcitsMatch.crudeNSigma2Cut[2]=64;tpcitsMatch.crudeNSigma2Cut[3]=64;tpcitsMatch.crudeNSigma2Cut[4]=64;" + CUT_MATCH_CHI2=100 + export ITSTPCMATCH="tpcitsMatch.safeMarginTimeCorrErr=2.;tpcitsMatch.XMatchingRef=60.;tpcitsMatch.cutMatchingChi2=$CUT_MATCH_CHI2;;tpcitsMatch.crudeAbsDiffCut[0]=6;tpcitsMatch.crudeAbsDiffCut[1]=6;tpcitsMatch.crudeAbsDiffCut[2]=0.3;tpcitsMatch.crudeAbsDiffCut[3]=0.3;tpcitsMatch.crudeAbsDiffCut[4]=2.5;tpcitsMatch.crudeNSigma2Cut[0]=64;tpcitsMatch.crudeNSigma2Cut[1]=64;tpcitsMatch.crudeNSigma2Cut[2]=64;tpcitsMatch.crudeNSigma2Cut[3]=64;tpcitsMatch.crudeNSigma2Cut[4]=64;" #-------------------------------------- TPC corrections ----------------------------------------------- # we need to provide to TPC From 598afb3d09044292eeecdbd95d67a93c384f10ca Mon Sep 17 00:00:00 2001 From: swenzel Date: Mon, 5 Feb 2024 16:46:31 +0100 Subject: [PATCH 04/53] Fix type error in pipeline runner --- MC/bin/o2_dpg_workflow_runner.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/MC/bin/o2_dpg_workflow_runner.py b/MC/bin/o2_dpg_workflow_runner.py index 44f56303a..b8cd05bc0 100755 --- a/MC/bin/o2_dpg_workflow_runner.py +++ b/MC/bin/o2_dpg_workflow_runner.py @@ -36,7 +36,7 @@ formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('-f','--workflowfile', help='Input workflow file name', required=True) -parser.add_argument('-jmax','--maxjobs', help='Number of maximal parallel tasks.', default=100) +parser.add_argument('-jmax','--maxjobs', type=int, help='Number of maximal parallel tasks.', default=100) parser.add_argument('-k','--keep-going', action='store_true', help='Keep executing the pipeline as far possibe (not stopping on first failure)') parser.add_argument('--dry-run', action='store_true', help='Show what you would do.') parser.add_argument('--visualize-workflow', action='store_true', help='Saves a graph visualization of workflow.') @@ -1730,5 +1730,5 @@ def speedup_ROOT_Init(): exit(code) actionlogger.info("Running in cgroup") -executor=WorkflowExecutor(args.workflowfile,jmax=args.maxjobs,args=args) +executor=WorkflowExecutor(args.workflowfile,jmax=int(args.maxjobs),args=args) exit (executor.execute()) From e996e47d70442af1400dec323dbef7d1f6bf1aaf Mon Sep 17 00:00:00 2001 From: swenzel Date: Mon, 19 Feb 2024 09:18:36 +0100 Subject: [PATCH 05/53] pipeline_runner: Fix script creation following a recent change in class data layout introduced here: https://github.com/AliceO2Group/O2DPG/commit/ec4acee8f0d38616b6fa45809661213c9b938acc --- MC/bin/o2_dpg_workflow_runner.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/MC/bin/o2_dpg_workflow_runner.py b/MC/bin/o2_dpg_workflow_runner.py index b8cd05bc0..3c826476a 100755 --- a/MC/bin/o2_dpg_workflow_runner.py +++ b/MC/bin/o2_dpg_workflow_runner.py @@ -1488,8 +1488,8 @@ def produce_script(self, filename): # we record the global environment setting # in particular to capture global workflow initialization lines.append('#-- GLOBAL INIT SECTION FROM WORKFLOW --\n') - for e in self.globalenv: - lines.append('export ' + str(e) + '=' + str(self.globalenv[e]) + '\n') + for e in self.globalinit['env']: + lines.append('export ' + str(e) + '=' + str(self.globalinit['env'][e]) + '\n') lines.append('#-- TASKS FROM WORKFLOW --\n') for tid in taskorder: print ('Doing task ' + self.idtotask[tid]) From a883150b1806f5006add0b5523cabb25455df6ca Mon Sep 17 00:00:00 2001 From: benedikt-voelkel Date: Mon, 19 Feb 2024 10:15:06 +0100 Subject: [PATCH 06/53] [AnchorMC] Allow for any additional option for sim WF (#1463) Use as ALIEN_JDL_ANCHOR_SIM_OPTIONS="-ini -confKey "key=value;otherKey=otherValue" -trigger -productionTag myTag" Build the final remainingargs by prepending it. The last argument wins, hence in this case -productionTag would not be overwritten with this user choice Co-authored-by: Benedikt Volkel --- MC/bin/o2dpg_sim_workflow_anchored.py | 3 ++- MC/run/ANCHOR/anchorMC.sh | 10 +++++++--- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/MC/bin/o2dpg_sim_workflow_anchored.py b/MC/bin/o2dpg_sim_workflow_anchored.py index a50a0532a..9b859e9f0 100755 --- a/MC/bin/o2dpg_sim_workflow_anchored.py +++ b/MC/bin/o2dpg_sim_workflow_anchored.py @@ -409,7 +409,7 @@ def main(): effTrigger = 28.0 # this is ZDC else: effTrigger = 0.759 - + # time needs to be converted to seconds ==> timestamp / 1000 rate = retrieve_MinBias_CTPScaler_Rate(ctp_scalers, timestamp/1000., effTrigger, grplhcif.getBunchFilling().getNBunches(), ColSystem) @@ -424,6 +424,7 @@ def main(): # we finally pass forward to the unanchored MC workflow creation # TODO: this needs to be done in a pythonic way clearly + # NOTE: forwardargs can - in principle - contain some of the arguments that are appended here. However, the last passed argument wins, so they would be overwritten. forwardargs += " -tf " + str(args.tf) + " --sor " + str(sor) + " --timestamp " + str(timestamp) + " --production-offset " + str(prod_offset) + " -run " + str(args.run_number) + " --run-anchored --first-orbit " \ + str(first_orbit) + " -field ccdb -bcPatternFile ccdb" + " --orbitsPerTF " + str(GLOparams["OrbitsPerTF"]) + " -col " + str(ColSystem) + " -eCM " + str(eCM) + ' --readoutDets ' + GLOparams['detList'] print ("forward args ", forwardargs) diff --git a/MC/run/ANCHOR/anchorMC.sh b/MC/run/ANCHOR/anchorMC.sh index b5d0d80fb..b5c0422c3 100755 --- a/MC/run/ANCHOR/anchorMC.sh +++ b/MC/run/ANCHOR/anchorMC.sh @@ -76,10 +76,12 @@ done # Allow for both "ALIEN_JDL_LPM" as well as "KEY" -# the only two where there is a real default for +# the only four where there is a real default for export ALIEN_JDL_CPULIMIT=${ALIEN_JDL_CPULIMIT:-${CPULIMIT:-8}} export ALIEN_JDL_SIMENGINE=${ALIEN_JDL_SIMENGINE:-${SIMENGINE:-TGeant4}} export ALIEN_JDL_WORKFLOWDETECTORS=${ALIEN_JDL_WORKFLOWDETECTORS:-ITS,TPC,TOF,FV0,FT0,FDD,MID,MFT,MCH,TRD,EMC,PHS,CPV,HMP,CTP} +# can be passed to contain additional options that will be passed to o2dpg_sim_workflow_anchored.py and eventually to o2dpg_sim_workflow.py +export ALIEN_JDL_ANCHOR_SIM_OPTIONS=${ALIEN_JDL_ANCHOR_SIM_OPTIONS:--gen pythia8} # all others MUST be set by the user/on the outside export ALIEN_JDL_LPMANCHORPASSNAME=${ALIEN_JDL_LPMANCHORPASSNAME:-${ANCHORPASSNAME}} export ALIEN_JDL_MCANCHOR=${ALIEN_JDL_MCANCHOR:-${MCANCHOR}} @@ -191,10 +193,12 @@ ALICEO2_CCDB_LOCALCACHE=${ALICEO2_CCDB_LOCALCACHE:-$(pwd)/ccdb} baseargs="-tf ${NTIMEFRAMES} --split-id ${SPLITID} --prod-split ${PRODSPLIT} --cycle ${CYCLE} --run-number ${ALIEN_JDL_LPMRUNNUMBER}" # these arguments will be passed as well but only evetually be digested by o2dpg_sim_workflow.py which is called from o2dpg_sim_workflow_anchored.py -remainingargs="-gen pythia8 -seed ${SEED} -ns ${NSIGEVENTS} --include-local-qc --pregenCollContext" +remainingargs="-seed ${SEED} -ns ${NSIGEVENTS} --include-local-qc --pregenCollContext" remainingargs="${remainingargs} -e ${ALIEN_JDL_SIMENGINE} -j ${NWORKERS}" remainingargs="${remainingargs} -productionTag ${ALIEN_JDL_LPMPRODUCTIONTAG:-alibi_anchorTest_tmp}" -remainingargs="${remainingargs} --anchor-config config-json.json" +# prepend(!) ALIEN_JDL_ANCHOR_SIM_OPTIONS +# since the last passed argument wins, e.g. -productionTag cannot be overwritten by the user +remainingargs="${ALIEN_JDL_ANCHOR_SIM_OPTIONS} ${remainingargs} --anchor-config config-json.json" echo "baseargs: ${baseargs}" echo "remainingargs: ${remainingargs}" From f721817f954fa477b145175122188b42dede1cb4 Mon Sep 17 00:00:00 2001 From: benedikt-voelkel Date: Mon, 19 Feb 2024 11:56:37 +0100 Subject: [PATCH 07/53] [SimCI] Do NOT run everything if anything changed in test directory (#1475) --- test/run_generator_tests.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/run_generator_tests.sh b/test/run_generator_tests.sh index bb256ba78..34efd5443 100755 --- a/test/run_generator_tests.sh +++ b/test/run_generator_tests.sh @@ -378,7 +378,7 @@ pushd ${REPO_DIR} > /dev/null # First check, if testing itself has changed. In that case this will add INI files # for which a test can be found -global_testing_changed=$(get_changed_files | grep -E ".C$|.sh$" | grep "^test/") +global_testing_changed=$(get_changed_files | grep -E "common/kine_tests/test_generic_kine.C|run_generator_tests.sh" | grep "^test/") [[ "${global_testing_changed}" != "" ]] && add_ini_files_from_all_tests # Then add the ini files that have changed as well. We need to do that so we get information From 26319e42c27e7f5c15a17d0b351ebbfc1f18d403 Mon Sep 17 00:00:00 2001 From: David Rohr Date: Mon, 19 Feb 2024 10:09:28 +0100 Subject: [PATCH 08/53] dpl-workflow: Change default FLP processing in sync mode to CTP only --- DATA/common/setenv.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/DATA/common/setenv.sh b/DATA/common/setenv.sh index f7342e553..b4e5152eb 100755 --- a/DATA/common/setenv.sh +++ b/DATA/common/setenv.sh @@ -134,7 +134,7 @@ else # Defaults when running on the EPN if [[ -z "${SHMTHROW:-}" ]]; then export SHMTHROW=0; fi if [[ -z "${TIMEFRAME_SHM_LIMIT:-}" ]]; then export TIMEFRAME_SHM_LIMIT=$(( $SHMSIZE / 2 )); fi if [[ -z "${EDJSONS_DIR:-}" ]]; then export EDJSONS_DIR="/scratch/services/ed/jsons_${RUNTYPE}"; fi - if [[ -z "${WORKFLOW_DETECTORS_FLP_PROCESSING+x}" ]]; then export WORKFLOW_DETECTORS_FLP_PROCESSING="TOF,CTP"; fi # Current default in sync processing is that FLP processing is only enabled for TOF + if [[ -z "${WORKFLOW_DETECTORS_FLP_PROCESSING+x}" ]]; then export WORKFLOW_DETECTORS_FLP_PROCESSING="CTP"; fi # Current default in sync processing is that FLP processing is only enabled for TOF if [[ -z "${GEN_TOPO_AUTOSCALE_PROCESSES:-}" ]]; then export GEN_TOPO_AUTOSCALE_PROCESSES=1; fi # On the EPN we should make sure to always use the node to the full extent fi # Some more options for running on the EPN From d3e61262e4ec8eaf2f4cedd077976f5d6f414704 Mon Sep 17 00:00:00 2001 From: benedikt-voelkel Date: Tue, 20 Feb 2024 10:16:39 +0100 Subject: [PATCH 09/53] [SimWF] Small fixes (#1476) * o2dpg_sim_workflow_anchored.py * remove unused function from CCDBAccessor due to undefined utility function that is used inside it * fix variable name * o2dpg_sim_workflow.py * fix dependency of cleanup task * o2dpg-workflow-tools.py * use return value of function correctly Co-authored-by: Benedikt Volkel --- MC/bin/o2dpg-workflow-tools.py | 7 ++++--- MC/bin/o2dpg_sim_workflow.py | 6 +++--- MC/bin/o2dpg_sim_workflow_anchored.py | 12 +----------- 3 files changed, 8 insertions(+), 17 deletions(-) diff --git a/MC/bin/o2dpg-workflow-tools.py b/MC/bin/o2dpg-workflow-tools.py index 78b82294e..09bd4b115 100755 --- a/MC/bin/o2dpg-workflow-tools.py +++ b/MC/bin/o2dpg-workflow-tools.py @@ -15,15 +15,16 @@ def extend(args): is kept """ # load workflows - workflow_orig = read_workflow(args.orig_wf) - workflow_extend = read_workflow(args.extend_wf) + workflow_orig, meta = read_workflow(args.orig_wf) + workflow_extend, _ = read_workflow(args.extend_wf) # extend workflow_orig.extend(workflow_extend) # dump in new file filename = args.output if args.output else args.orig_wf - dump_workflow(workflow_orig, filename) + # propagate meta information from original workflow that is extended + dump_workflow(workflow_orig, filename, meta) def create(args): diff --git a/MC/bin/o2dpg_sim_workflow.py b/MC/bin/o2dpg_sim_workflow.py index 5b236027e..d0812d42c 100755 --- a/MC/bin/o2dpg_sim_workflow.py +++ b/MC/bin/o2dpg_sim_workflow.py @@ -1039,7 +1039,7 @@ def getDigiTaskName(det): TOFRECOtask['cmd'] = '${O2_ROOT}/bin/o2-tof-reco-workflow --use-ccdb ' + getDPL_global_options() + putConfigValuesNew() + ('',' --disable-mc')[args.no_mc_labels] workflow['stages'].append(TOFRECOtask) - + toftpcmatchneeds = [TOFRECOtask['name'], TPCRECOtask['name'], ITSTPCMATCHtask['name'], TRDTRACKINGtask2['name']] toftracksrcdefault = anchorConfig.get('o2-tof-matcher-workflow-options', {}).get('track-sources', 'TPC,ITS-TPC,TPC-TRD,ITS-TPC-TRD') TOFTPCMATCHERtask = createTask(name='toftpcmatch_'+str(tf), needs=toftpcmatchneeds, tf=tf, cwd=timeframeworkdir, lab=["RECO"], mem='1000') @@ -1407,10 +1407,10 @@ def addQCPerTF(taskName, needs, readerCommand, configFilePath, objectsFile=''): # taking away digits, clusters and other stuff as soon as possible. # TODO: cleanup by labels or task names if args.early_tf_cleanup == True: - TFcleanup = createTask(name='tfcleanup_'+str(tf), needs= [ AOD_merge_task['name'] ], tf=tf, cwd=timeframeworkdir, lab=["CLEANUP"], mem='0', cpu='1') + TFcleanup = createTask(name='tfcleanup_'+str(tf), needs= [ AODtask['name'] ], tf=tf, cwd=timeframeworkdir, lab=["CLEANUP"], mem='0', cpu='1') TFcleanup['cmd'] = 'rm *digi*.root;' TFcleanup['cmd'] += 'rm *cluster*.root' - workflow['stages'].append(TFcleanup); + workflow['stages'].append(TFcleanup) # AOD merging as one global final step aodmergerneeds = ['aod_' + str(tf) for tf in range(1, NTIMEFRAMES + 1)] diff --git a/MC/bin/o2dpg_sim_workflow_anchored.py b/MC/bin/o2dpg_sim_workflow_anchored.py index 9b859e9f0..9708e2bc3 100755 --- a/MC/bin/o2dpg_sim_workflow_anchored.py +++ b/MC/bin/o2dpg_sim_workflow_anchored.py @@ -50,16 +50,6 @@ def __init__(self, url): # we allow nullptr responsens and will treat it ourselves o2.ccdb.BasicCCDBManager.instance().setFatalWhenNull(False) - def list(self, path, dump_path=None): - ret = self.api.list(path, False, "application/json") - ret = json.loads(ret) - if ret and "objects" in ret: - ret = ret["objects"] - if ret and dump_path: - print(f"CCDB object information for path {path} stored in {dump_path}") - dump_json(ret, dump_path) - return ret - def fetch(self, path, obj_type, timestamp=None, meta_info=None): """ TODO We could use CcdbApi::snapshot at some point, needs revision @@ -95,7 +85,7 @@ def retrieve_sor_eor(ccdbreader, run_number): path_run_info = "RCT/Info/RunInformation" header = ccdbreader.fetch_header(path_run_info, run_number) if not header: - print(f"WARNING: Cannot find run information for run number {r}") + print(f"WARNING: Cannot find run information for run number {run_number}") return None # return this a dictionary return {"SOR": int(header["SOR"]), "EOR": int(header["EOR"])} From 2c5d4f3271f066dd0a7e0050b38ccf8ac855b460 Mon Sep 17 00:00:00 2001 From: Ole Schmidt Date: Tue, 20 Feb 2024 10:29:53 +0100 Subject: [PATCH 10/53] Fix syntax checks for shell scripts (#1473) --- DATA/common/gen_topo_helper_functions.sh | 14 +++++++------- DATA/common/setenv_calib.sh | 2 +- .../2022/LHC22f/apass1/setenv_extra.sh | 1 + .../configurations/asyncReco/async_pass.sh | 4 ++-- 4 files changed, 11 insertions(+), 10 deletions(-) diff --git a/DATA/common/gen_topo_helper_functions.sh b/DATA/common/gen_topo_helper_functions.sh index 00fc1f118..6efe2d7f8 100755 --- a/DATA/common/gen_topo_helper_functions.sh +++ b/DATA/common/gen_topo_helper_functions.sh @@ -104,37 +104,37 @@ _check_multiple() has_detectors() { - _check_multiple has_detector $@ + _check_multiple has_detector "$@" } has_detectors_qc() { - _check_multiple has_detector_qc $@ + _check_multiple has_detector_qc "$@" } has_detectors_calib() { - _check_multiple has_detector_calib $@ + _check_multiple has_detector_calib "$@" } has_detectors_reco() { - _check_multiple has_detector_reco $@ + _check_multiple has_detector_reco "$@" } has_detectors_ctf() { - _check_multiple has_detector_ctf $@ + _check_multiple has_detector_ctf "$@" } has_detectors_flp_processing() { - _check_multiple has_detector_flp_processing $@ + _check_multiple has_detector_flp_processing "$@" } workflow_has_parameters() { - _check_multiple workflow_has_parameter $@ + _check_multiple workflow_has_parameter "$@" } add_comma_separated() diff --git a/DATA/common/setenv_calib.sh b/DATA/common/setenv_calib.sh index a1d2e7692..0b44fe23c 100755 --- a/DATA/common/setenv_calib.sh +++ b/DATA/common/setenv_calib.sh @@ -14,7 +14,7 @@ SOURCE_GUARD_SETENV_CALIB=1 # define the conditions for each calibration if has_detector_calib ITS && has_detectors_reco ITS && has_detector_matching PRIMVTX && [[ ! -z "$VERTEXING_SOURCES" ]]; then CAN_DO_CALIB_PRIMVTX_MEANVTX=1; else CAN_DO_CALIB_PRIMVTX_MEANVTX=0; fi if has_detector_calib TOF && has_detector_reco TOF; then CAN_DO_CALIB_TOF_DIAGNOSTICS=1; CAN_DO_CALIB_TOF_INTEGRATEDCURR=1; else CAN_DO_CALIB_TOF_DIAGNOSTICS=0; CAN_DO_CALIB_TOF_INTEGRATEDCURR=0; fi -if has_detector_calib TOF && has_detector_reco TOF && (( has_detectors_reco ITS TPC && has_detector_matching ITSTPCTOF ) || ( has_detectors_reco ITS TPC TRD && has_detector_matching ITSTPCTRDTOF )); then CAN_DO_CALIB_TOF_LHCPHASE=1; CAN_DO_CALIB_TOF_CHANNELOFFSETS=1; else CAN_DO_CALIB_TOF_LHCPHASE=0; CAN_DO_CALIB_TOF_CHANNELOFFSETS=0; fi +if has_detector_calib TOF && has_detector_reco TOF && ( ( has_detectors_reco ITS TPC && has_detector_matching ITSTPCTOF ) || ( has_detectors_reco ITS TPC TRD && has_detector_matching ITSTPCTRDTOF ) ); then CAN_DO_CALIB_TOF_LHCPHASE=1; CAN_DO_CALIB_TOF_CHANNELOFFSETS=1; else CAN_DO_CALIB_TOF_LHCPHASE=0; CAN_DO_CALIB_TOF_CHANNELOFFSETS=0; fi if has_detector_calib TPC && has_detectors ITS TPC TOF TRD && has_detector_matching ITSTPCTRDTOF; then CAN_DO_CALIB_TPC_SCDCALIB=1; else CAN_DO_CALIB_TPC_SCDCALIB=0; fi if has_detector_calib TPC && has_processing_step TPC_DEDX; then CAN_DO_CALIB_TPC_TIMEGAIN=1; CAN_DO_CALIB_TPC_RESPADGAIN=1; else CAN_DO_CALIB_TPC_TIMEGAIN=0; CAN_DO_CALIB_TPC_RESPADGAIN=0; fi if has_detector_calib TPC && has_detectors ITS TPC && has_detector_matching ITSTPC; then CAN_DO_CALIB_TPC_VDRIFTTGL=1; else CAN_DO_CALIB_TPC_VDRIFTTGL=0; fi diff --git a/DATA/production/configurations/2022/LHC22f/apass1/setenv_extra.sh b/DATA/production/configurations/2022/LHC22f/apass1/setenv_extra.sh index 648e9a61b..e814f833a 100644 --- a/DATA/production/configurations/2022/LHC22f/apass1/setenv_extra.sh +++ b/DATA/production/configurations/2022/LHC22f/apass1/setenv_extra.sh @@ -72,6 +72,7 @@ fi CTP_BC_SHIFT=0 if [[ $ALIEN_JDL_LPMANCHORYEAR == "2022" ]]; then CTP_BC_SHIFT=-294 +fi if [[ $RUNNUMBER -ge 538923 ]] && [[ $RUNNUMBER -le 539700 ]]; then # 3 BC offset (future direction) in CTP data observed for LHC23zd - LHC23zs CTP_BC_SHIFT=-3 diff --git a/DATA/production/configurations/asyncReco/async_pass.sh b/DATA/production/configurations/asyncReco/async_pass.sh index 05ed0dea2..9d1f49025 100755 --- a/DATA/production/configurations/asyncReco/async_pass.sh +++ b/DATA/production/configurations/asyncReco/async_pass.sh @@ -576,7 +576,7 @@ else echo "nCTFsFilesInspected_step1 = $nCTFsFilesInspected_step1, nCTFsFilesInspected_step2 = $nCTFsFilesInspected_step2" > validation_error.message echo "nCTFsFilesOK_step1 = $nCTFsFilesOK_step1, nCTFsFilesOK_step2 = $nCTFsFilesOK_step2" > validation_error.message echo "nCTFsProcessed_step1 = $nCTFsProcessed_step1, nCTFsProcessed_step2 = $nCTFsProcessed_step2" > validation_error.message - exit 1000 + exit 255 fi fi fi @@ -745,7 +745,7 @@ if [[ $ALIEN_JDL_AODOFF != 1 ]]; then CURRENT_POOL_SIZE=`jobs -r | wc -l` done < $JOB_LIST # collecting return codes of the merging processes - for i in ${!arr[@]}; do + for i in "${!arr[@]}"; do wait ${arr[$i]} exitcode=$? if [[ $exitcode -ne 0 ]]; then From 5452aba0aaaf8683defeb19a39e334bab8adcdce Mon Sep 17 00:00:00 2001 From: Ole Schmidt Date: Tue, 20 Feb 2024 10:30:16 +0100 Subject: [PATCH 11/53] TPC SCD calib send track data by default (#1474) --- DATA/common/setenv_calib.sh | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/DATA/common/setenv_calib.sh b/DATA/common/setenv_calib.sh index 0b44fe23c..bdb836509 100755 --- a/DATA/common/setenv_calib.sh +++ b/DATA/common/setenv_calib.sh @@ -36,8 +36,9 @@ if [[ $SYNCMODE != 1 ]] && has_detector_reco TPC && has_detector_reco ITS && has # additional individual settings for calibration workflows if has_detector CTP; then export CALIB_TPC_SCDCALIB_CTP_INPUT="--enable-ctp"; else export CALIB_TPC_SCDCALIB_CTP_INPUT=""; fi if [[ ${DISABLE_TRD_PH:-} == 1 ]]; then CAN_DO_CALIB_TRD_T0=0; fi -# the slot length needs to be known both on the aggregator and the processing nodes, therefore it is defined (in seconds!) here -: ${CALIB_TPC_SCDCALIB_SLOTLENGTH:=600} + +: ${CALIB_TPC_SCDCALIB_SLOTLENGTH:=600} # the slot length needs to be known both on the aggregator and the processing nodes, therefore it is defined (in seconds!) here +: ${CALIB_TPC_SCDCALIB_SENDTRKDATA:=1} # by default, we want to write the track information in addition to unbinned residuals to allow finer filtering offline if [[ $BEAMTYPE != "cosmic" ]] || [[ ${FORCECALIBRATIONS:-} == 1 ]] ; then From d24070e8be0a4e7180825f40402eee5f21e45e53 Mon Sep 17 00:00:00 2001 From: Ole Schmidt Date: Tue, 20 Feb 2024 10:31:26 +0100 Subject: [PATCH 12/53] For synthetic runs we set IS_SIMULATED_DATA=1 (#1477) --- DATA/common/setenv.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/DATA/common/setenv.sh b/DATA/common/setenv.sh index b4e5152eb..74ae5640e 100755 --- a/DATA/common/setenv.sh +++ b/DATA/common/setenv.sh @@ -104,6 +104,7 @@ if [[ -z "${RAWINPUTDIR:-}" ]]; then export RAWINPUTDIR=$FILEWORKDIR; fi # if [[ -z "${EPNSYNCMODE:-}" ]]; then export EPNSYNCMODE=0; fi # Is this workflow supposed to run on EPN for sync processing? Will enable InfoLogger / metrics / fetching QC JSONs from consul... if [[ -z "${BEAMTYPE:-}" ]]; then export BEAMTYPE=PbPb; fi # Beam type, must be PbPb, pp, pPb, cosmic, technical if [[ -z "${RUNTYPE:-}" ]]; then export RUNTYPE=Standalone; fi # Run Type, standalone for local tests, otherwise PHYSICS, COSMICS, TECHNICAL, SYNTHETIC +if [[ $RUNTYPE == "SYNTHETIC" ]]; then export IS_SIMULATED_DATA=1; fi # For SYNTHETIC runs we always process simulated data if [[ -z "${IS_SIMULATED_DATA:-}" ]]; then export IS_SIMULATED_DATA=1; fi # processing simulated data if [[ -z "${IS_TRIGGERED_DATA:-}" ]]; then export IS_TRIGGERED_DATA=0; fi # processing triggered data (TPC triggered instead of continuous) if [[ -z "${CTF_DIR:-}" ]]; then CTF_DIR=$FILEWORKDIR; fi # Directory where to store CTFs From 7f48808ee5ee611abfbe0f01b363cf1954ca22c2 Mon Sep 17 00:00:00 2001 From: benedikt-voelkel Date: Tue, 20 Feb 2024 10:40:41 +0100 Subject: [PATCH 13/53] Add tool to fetch PR information based on assigned labels (#1478) * applicable to different repos * distinguishes between merged and other (simply closed or still open) PRs * dumps the output into a simple text file for further proessing Co-authored-by: Benedikt Volkel --- UTILS/o2dpg_make_github_pr_report.py | 141 +++++++++++++++++++++++++++ 1 file changed, 141 insertions(+) create mode 100755 UTILS/o2dpg_make_github_pr_report.py diff --git a/UTILS/o2dpg_make_github_pr_report.py b/UTILS/o2dpg_make_github_pr_report.py new file mode 100755 index 000000000..b6a4ac9c9 --- /dev/null +++ b/UTILS/o2dpg_make_github_pr_report.py @@ -0,0 +1,141 @@ +#!/usr/bin/env python3 + +# Get list of PRs from provided repo that have a certain label assigned +# Can be used to figure out which PRs should be ported + +import sys +import argparse +import requests + + +def organise_prs(prs): + """ + Sort PRs by time merged, starting from old to recent + """ + # collect merged PRs + prs_merged = [] + # collect the time of merged PRs + merged_at = [] + # other PRs, open, closed and not merged + prs_other = [] + + for pr in prs: + if not pr['merged_at']: + # that has not been merged + prs_other.append(pr) + continue + # get the PR itself and the merged timestamp + prs_merged.append(pr) + merged_at.append(pr['merged_at']) + + # sort the merged PRs by their merged timestamp + prs_merged = [pr for _, pr in sorted(zip(merged_at, prs))] + + return prs_merged, prs_other + + +def get_prs(owner, repo, prod_label, pr_state, include_unmerged, per_page=50, start_page=1, pages=1): + """ + Get PRs according to some selection + """ + # GitHub API endpoint for listing closed pull requests with a specific label + merged_token = '&is:merged=true' if not include_unmerged else '' + prs_return = [] + + has_error = False + for page in range(start_page, pages + 1): + url = f'https://api.github.com/repos/{owner}/{repo}/pulls?state={pr_state}{merged_token}&page={page}&per_page={per_page}' + print(f'Fetch PRs accrodring to {url}') + + # Send GET request to GitHub API + response = requests.get(url) + + # Check if the request was successful (status code 200) + if response.status_code == 200: + # Parse JSON response + prs = response.json() + # PRs to return because we filter on a specific label + for pr in prs: + labels = pr['labels'] + accept = False + for label in labels: + if label['name'] == prod_label: + # only with the correct the label will be accepted + accept = True + break + if not accept: + continue + # we will end up here if accepted, so append + prs_return.append(pr) + + else: + print(f'Failed to retrieve data: {response.status_code} - {response.text}') + has_error = True + break + + if has_error: + return None, None + + # organise PRs into different lists (merged and others) + return organise_prs(prs_return) + + +def make_report(prs_merged, prs_other, outfile): + """ + Make a report + + simply dump into text file + """ + + with open(outfile, 'w') as f: + f.write('# FROM OLDEST TO RECENT\n') + # our common header + f.write('| Date of next tag | Requestor | Package | PR | Data or MC | Comment | JIRA (if it exists) | Accepted | In production | Validated by requestor |\n') + f.write('| ---------------- | ------------ | ------- | --------------------------------------------------------:|:--------------------------------------------- | ------------------- | ---------------- | ------------- |-------------| ------------------|\n') + + # first put the merged PRs + for pr in prs_merged: + mc_data = [] + + for label in pr['labels']: + if label['name'] in ('MC', 'DATA'): + # get assigned MC or DATA label if this PR has it + mc_data.append(label['name']) + + # if no specific MC or DATA label, assume valid for both + mc_data = ','.join(mc_data) if mc_data else 'MC,DATA' + # add the full line to the output file + f.write(f'| {args.date} | {pr["user"]["login"]} | {args.repo} | [PR]({pr["html_url"]}) | {mc_data} | {pr["title"]} | | | | |\n') + + # add all the other commits + f.write('OTHER PRs\n') + for pr in prs_other: + f.write(f'| {args.date} | {pr["user"]["login"]} | {args.repo} | [PR]({pr["html_url"]}) | | {pr["title"]} | | | | |\n') + + +if __name__ == '__main__': + # Parse command-line arguments + parser = argparse.ArgumentParser(description='Retrieve closed pull requests with a specific label from a GitHub repository') + parser.add_argument('--owner', help='GitHub repository owner', default='AliceO2Group') + parser.add_argument('--repo', required=True, help='GitHub repository name, e.g. O2DPG or AliceO2') + parser.add_argument('--prod-label', dest='prod_label', required=True, help='Production label to filter PRs') + parser.add_argument('--pr-state', dest='pr_state', default='closed', help='The state of the PR') + parser.add_argument('--include-unmerged', dest='include_unmerged', action='store_true', help='To fetch also unmerged PRs') + parser.add_argument('--output', default='o2dpg_pr_report.txt') + parser.add_argument('--date', help='The date tag to be put', required=True) + parser.add_argument('--per-page', dest='per_page', default=50, help='How many results per page') + parser.add_argument('--start-page', dest='start_page', type=int, default=1, help='Start on this page') + parser.add_argument('--pages', type=int, default=1, help='Number of pages') + + + args = parser.parse_args() + + # Retrieve closed pull requests with the specified label + prs_merged, prs_other = get_prs(args.owner, args.repo, args.prod_label, args.pr_state, args.include_unmerged, args.per_page, args.start_page, args.pages) + if prs_merged is None: + print('ERROR: There was a problem fetching the info.') + sys.exit(1) + + make_report(prs_merged, prs_other, args.output) + + sys.exit(0) From ed7cc8640f26e1098f5ac1e467b57806400ba59c Mon Sep 17 00:00:00 2001 From: benedikt-voelkel Date: Tue, 20 Feb 2024 11:22:06 +0100 Subject: [PATCH 14/53] [Anchor] Add test for anchored MC (#1464) Runs a very simple anchored production, 2TFs, 50 pp events each Co-authored-by: Benedikt Volkel --- MC/run/ANCHOR/anchorMC.sh | 2 + .../tests/test_anchor_2023_apass2_PbPb.sh | 36 ++++++++++ .../tests/test_anchor_2023_apass2_pp.sh | 39 +++++++++++ test/common/utils/utils.sh | 2 +- test/run_workflow_tests.sh | 67 +++++++++++++++++-- 5 files changed, 139 insertions(+), 7 deletions(-) create mode 100755 MC/run/ANCHOR/tests/test_anchor_2023_apass2_PbPb.sh create mode 100755 MC/run/ANCHOR/tests/test_anchor_2023_apass2_pp.sh diff --git a/MC/run/ANCHOR/anchorMC.sh b/MC/run/ANCHOR/anchorMC.sh index b5c0422c3..2bab586de 100755 --- a/MC/run/ANCHOR/anchorMC.sh +++ b/MC/run/ANCHOR/anchorMC.sh @@ -241,6 +241,8 @@ if [[ -z "${DISABLE_QC}" && "${MCRC}" = "0" && "${remainingargs}" == *"--include # do QC tasks echo "Doing QC" ${O2DPG_ROOT}/MC/bin/o2_dpg_workflow_runner.py -f workflow.json --target-labels QC --cpu-limit ${ALIEN_JDL_CPULIMIT:-8} -k + # NOTE that with the -k|--keep-going option, the runner will try to keep on executing even if some tasks fail. + # That means, even if there is a failing QC task, the return code will be 0 MCRC=$? fi diff --git a/MC/run/ANCHOR/tests/test_anchor_2023_apass2_PbPb.sh b/MC/run/ANCHOR/tests/test_anchor_2023_apass2_PbPb.sh new file mode 100755 index 000000000..095908e4b --- /dev/null +++ b/MC/run/ANCHOR/tests/test_anchor_2023_apass2_PbPb.sh @@ -0,0 +1,36 @@ +#!/bin/bash + +# +# An example steering script for anchored MC simulations, PbPb +# + +# example anchoring +# taken from https://its.cern.ch/jira/browse/O2-4586 +export ALIEN_JDL_LPMANCHORPASSNAME=apass2 +export ALIEN_JDL_MCANCHOR=apass2 +export ALIEN_JDL_COLLISIONSYSTEM=Pb-Pb +export ALIEN_JDL_CPULIMIT=8 +export ALIEN_JDL_LPMPASSNAME=apass2 +export ALIEN_JDL_LPMRUNNUMBER=544121 +export ALIEN_JDL_LPMPRODUCTIONTYPE=MC +export ALIEN_JDL_LPMINTERACTIONTYPE=PbPb +export ALIEN_JDL_LPMPRODUCTIONTAG=LHC24a1 +export ALIEN_JDL_LPMANCHORRUN=544121 +export ALIEN_JDL_LPMANCHORPRODUCTION=LHC23zzh +export ALIEN_JDL_LPMANCHORYEAR=2023 + +export NTIMEFRAMES=2 +export NSIGEVENTS=2 +export SPLITID=100 +export PRODSPLIT=153 +export CYCLE=0 + +# on the GRID, this is set, for our use case, we can mimic any job ID +export ALIEN_PROC_ID=2963436952 + +# run the central anchor steering script; this includes +# * derive timestamp +# * derive interaction rate +# * extract and prepare configurations (which detectors are contained in the run etc.) +# * run the simulation (and QC) +${O2DPG_ROOT}/MC/run/ANCHOR/anchorMC.sh diff --git a/MC/run/ANCHOR/tests/test_anchor_2023_apass2_pp.sh b/MC/run/ANCHOR/tests/test_anchor_2023_apass2_pp.sh new file mode 100755 index 000000000..e528b8a0a --- /dev/null +++ b/MC/run/ANCHOR/tests/test_anchor_2023_apass2_pp.sh @@ -0,0 +1,39 @@ +#!/bin/bash + +# +# An example steering script for anchored MC simulations, pp +# + +# example anchoring +# taken from https://its.cern.ch/jira/browse/O2-4586 +export ALIEN_JDL_LPMANCHORPASSNAME=apass2 +export ALIEN_JDL_MCANCHOR=apass2 +export ALIEN_JDL_COLLISIONSYSTEM=p-p +export ALIEN_JDL_CPULIMIT=8 +export ALIEN_JDL_LPMPASSNAME=apass2 +export ALIEN_JDL_LPMRUNNUMBER=535069 +export ALIEN_JDL_LPMPRODUCTIONTYPE=MC +export ALIEN_JDL_LPMINTERACTIONTYPE=pp +export ALIEN_JDL_LPMPRODUCTIONTAG=LHC24a2 +export ALIEN_JDL_LPMANCHORRUN=535069 +export ALIEN_JDL_LPMANCHORPRODUCTION=LHC23f +export ALIEN_JDL_LPMANCHORYEAR=2023 + +export NTIMEFRAMES=2 +export NSIGEVENTS=50 +export SPLITID=100 +export PRODSPLIT=153 +export CYCLE=0 + +# on the GRID, this is set, for our use case, we can mimic any job ID +export ALIEN_PROC_ID=2963436952 + +# for pp and 50 events per TF, we launch only 4 workers. +export NWORKERS=4 + +# run the central anchor steering script; this includes +# * derive timestamp +# * derive interaction rate +# * extract and prepare configurations (which detectors are contained in the run etc.) +# * run the simulation (and QC) +${O2DPG_ROOT}/MC/run/ANCHOR/anchorMC.sh diff --git a/test/common/utils/utils.sh b/test/common/utils/utils.sh index e2cae2171..0c34c5395 100644 --- a/test/common/utils/utils.sh +++ b/test/common/utils/utils.sh @@ -78,7 +78,7 @@ make_wf_creation_script() print_error_logs() { local search_dir=${1} - local search_pattern="TASK-EXIT-CODE: ([1-9][0-9]*)|[Ss]egmentation violation|[Ee]xception caught|\[FATAL\]|uncaught exception|\(int\) ([1-9][0-9]*)|fair::FatalException" + local search_pattern="TASK-EXIT-CODE: ([1-9][0-9]*)|[Ss]egmentation violation|[Ss]egmentation fault|Program crashed|[Ee]xception caught|\[FATAL\]|uncaught exception|\(int\) ([1-9][0-9]*)|fair::FatalException" local error_files=$(find ${search_dir} -maxdepth 4 -type f \( -name "*.log" -or -name "*serverlog*" -or -name "*workerlog*" -or -name "*mergerlog*" \) | xargs grep -l -E "${search_pattern}" | sort) for ef in ${error_files} ; do echo_red "Error found in log $(realpath ${ef})" diff --git a/test/run_workflow_tests.sh b/test/run_workflow_tests.sh index c4a2daeaa..adc610393 100755 --- a/test/run_workflow_tests.sh +++ b/test/run_workflow_tests.sh @@ -3,12 +3,14 @@ # The test parent dir to be cretaed in current directory TEST_PARENT_DIR_PWG="o2dpg_tests/workflows_pwgs" TEST_PARENT_DIR_BIN="o2dpg_tests/workflows_bin" +TEST_PARENT_DIR_ANCHORED="o2dpg_tests/anchored" # a global counter for tests TEST_COUNTER=0 # unified names of log files LOG_FILE_WF="o2dpg-test-wf.log" +LOG_FILE_ANCHORED="o2dpg-test-anchored.log" # Prepare some colored output SRED="\033[0;31m" @@ -123,6 +125,26 @@ run_workflow_creation() return ${RET} } +test_anchored() +{ + local to_run="${1:-${O2DPG_ROOT}/MC/run/ANCHOR/tests/test_anchor_2023_apass2_pp.sh}" + local RET=0 + for anchored_script in ${to_run} ; do + [[ ! -f ${anchored_script} ]] && { echo "Desired test script ${anchored_script} does not exist. Skip." ; continue ; } + ((TEST_COUNTER++)) + local test_dir=${TEST_COUNTER}_$(basename ${anchored_script})_dir + rm -rf ${test_dir} 2> /dev/null + mkdir ${test_dir} + pushd ${test_dir} > /dev/null + echo -n "Test ${TEST_COUNTER}: ${anchored_script}" + ${anchored_script} >> ${LOG_FILE_ANCHORED} 2>&1 + local ret_this=${?} + [[ "${ret_this}" != "0" ]] && RET=${ret_this} + popd > /dev/null + done + return ${RET} +} + collect_changed_pwg_wf_files() { # Collect all INI files which have changed @@ -188,8 +210,9 @@ source ${REPO_DIR}/test/common/utils/utils.sh pushd ${REPO_DIR} > /dev/null # flag if anything changed in the sim workflow bin dir -changed_wf_bin=$(get_changed_files | grep "MC/bin") +changed_wf_bin=$(get_changed_files | grep -E "MC/bin") changed_wf_bin_related=$(get_changed_files | grep -E "MC/analysis_testing|MC/config/analysis_testing/json|MC/config/QC/json") +changed_anchored_related=$(get_changed_files | grep -E "MC/run/ANCHOR/anchorMC.sh|MC/run/ANCHOR/tests|MC/bin|UTILS/parse-async-WorkflowConfig.py") # collect what has changed for PWGs @@ -215,6 +238,27 @@ REPO_DIR=$(realpath ${REPO_DIR}) export O2DPG_ROOT=${REPO_DIR} +############### +# ANCHORED MC # +############### +# prepare our local test directory for PWG tests +rm -rf ${TEST_PARENT_DIR_ANCHORED} 2>/dev/null +mkdir -p ${TEST_PARENT_DIR_ANCHORED} 2>/dev/null +pushd ${TEST_PARENT_DIR_ANCHORED} > /dev/null + +# global return code for PWGs +ret_global_anchored=0 +if [[ "${changed_anchored_related}" != "" ]] ; then + echo "### Test anchored ###" + # Run an anchored test + test_anchored + ret_global_anchored=${?} + echo +fi + +# return to where we came from +popd > /dev/null + ######## # PWGs # ######## @@ -226,7 +270,7 @@ pushd ${TEST_PARENT_DIR_PWG} > /dev/null # global return code for PWGs ret_global_pwg=0 if [[ "${changed_wf_bin}" != "" ]] ; then - # Run all the PWG related WF creations, hence overwrite what was collected by collect_changed_pwg_wf_files eal=rlier + # Run all the PWG related WF creations, hence overwrite what was collected by collect_changed_pwg_wf_files earlier WF_FILES=$(get_all_workflows "MC/run/.*/") echo fi @@ -240,7 +284,6 @@ if [[ "${WF_FILES}" != "" ]] ; then echo fi - # return to where we came from popd > /dev/null @@ -285,9 +328,21 @@ if [[ "${ret_global_bin}" != "0" ]] ; then echo "###################################" echo print_error_logs ${TEST_PARENT_DIR_BIN} - exit ${ret_global_bin} fi +# However, if a central test fails, exit code will be !=0 +if [[ "${ret_global_anchored}" != "0" ]] ; then + echo + echo "##########################" + echo "# ERROR for anchored MCs #" + echo "##########################" + echo + print_error_logs ${TEST_PARENT_DIR_ANCHORED} +fi + +RET=$(( ret_global_bin + ret_global_anchored )) + echo -echo_green "All required workflow tests successful" -echo +[[ "${RET}" != "0" ]] && echo "There were errors, please check!" || echo_green "All required workflow tests successful" + +exit ${RET} From 070a0f30bf57a53b44ace67876afd7bbbe2de915 Mon Sep 17 00:00:00 2001 From: benedikt-voelkel Date: Tue, 20 Feb 2024 12:32:08 +0100 Subject: [PATCH 15/53] [SimCI] Change $@ to $* (#1480) Co-authored-by: Benedikt Volkel --- test/run_analysisqc_tests.sh | 4 ++-- test/run_generator_tests.sh | 4 ++-- test/run_relval_tests.sh | 4 ++-- test/run_workflow_tests.sh | 4 ++-- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/test/run_analysisqc_tests.sh b/test/run_analysisqc_tests.sh index fab8dd083..bd57493fd 100755 --- a/test/run_analysisqc_tests.sh +++ b/test/run_analysisqc_tests.sh @@ -14,13 +14,13 @@ SEND="\033[0m" echo_green() { - echo -e "${SGREEN}$@${SEND}" + echo -e "${SGREEN}${*}${SEND}" } echo_red() { - echo -e "${SRED}$@${SEND}" + echo -e "${SRED}${*}${SEND}" } diff --git a/test/run_generator_tests.sh b/test/run_generator_tests.sh index 34efd5443..d5a4d3c74 100755 --- a/test/run_generator_tests.sh +++ b/test/run_generator_tests.sh @@ -37,13 +37,13 @@ SEND="\033[0m" echo_green() { - echo -e "${SGREEN}$@${SEND}" + echo -e "${SGREEN}${*}${SEND}" } echo_red() { - echo -e "${SRED}$@${SEND}" + echo -e "${SRED}${*}${SEND}" } diff --git a/test/run_relval_tests.sh b/test/run_relval_tests.sh index 6294e1361..a14c3e0ed 100755 --- a/test/run_relval_tests.sh +++ b/test/run_relval_tests.sh @@ -14,13 +14,13 @@ SEND="\033[0m" echo_green() { - echo -e "${SGREEN}$@${SEND}" + echo -e "${SGREEN}${*}${SEND}" } echo_red() { - echo -e "${SRED}$@${SEND}" + echo -e "${SRED}${*}${SEND}" } diff --git a/test/run_workflow_tests.sh b/test/run_workflow_tests.sh index adc610393..e96b07141 100755 --- a/test/run_workflow_tests.sh +++ b/test/run_workflow_tests.sh @@ -20,13 +20,13 @@ SEND="\033[0m" echo_green() { - echo -e "${SGREEN}$@${SEND}" + echo -e "${SGREEN}${*}${SEND}" } echo_red() { - echo -e "${SRED}$@${SEND}" + echo -e "${SRED}${*}${SEND}" } get_git_repo_directory() From eb41cb7c60d1a1cf5544e4e321dbb298a7ae19ea Mon Sep 17 00:00:00 2001 From: benedikt-voelkel Date: Tue, 20 Feb 2024 12:40:52 +0100 Subject: [PATCH 16/53] [RelVa] Fix imported function name (#1481) Co-authored-by: Benedikt Volkel --- RelVal/o2dpg_overlay_plots.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/RelVal/o2dpg_overlay_plots.py b/RelVal/o2dpg_overlay_plots.py index 9f9a420b7..12682ca0b 100755 --- a/RelVal/o2dpg_overlay_plots.py +++ b/RelVal/o2dpg_overlay_plots.py @@ -17,7 +17,7 @@ o2dpg_release_validation = importlib.util.module_from_spec(spec) spec.loader.exec_module(o2dpg_release_validation) sys.modules["o2dpg_release_validation"] = o2dpg_release_validation -from o2dpg_release_validation import only_extract_impl +from o2dpg_release_validation import extract_and_flatten spec = importlib.util.spec_from_file_location("o2dpg_release_validation_plot", join(O2DPG_ROOT, "RelVal", "utils", 'o2dpg_release_validation_plot.py')) o2dpg_release_validation_plot = importlib.util.module_from_spec(spec) @@ -39,7 +39,7 @@ def run(args): ref_file = None for i, (input_file, label) in enumerate(zip(args.inputs, args.labels)): - _, config = only_extract_impl(input_file, args.output, label, prefix=i, reference_extracted=ref_file) + _, config = extract_and_flatten(input_file, args.output, label, prefix=i, reference_extracted=ref_file) if not config: print(f"ERROR: Problem with input file {input_file}, cannot extract") return 1 From 27b9a48382e6cd1ed4597d281fde7dcf09d82938 Mon Sep 17 00:00:00 2001 From: Ole Schmidt Date: Tue, 20 Feb 2024 14:18:46 +0100 Subject: [PATCH 17/53] More bash syntax check fixes (#1479) --- DATA/production/calib/mch-badchannel-processing.sh | 1 - DATA/production/calib/tpc-pulser-long.sh | 1 - DATA/production/calib/tpc-pulser.sh | 1 - .../configurations/2021/ctf_recreation/ctf_recreation.sh | 2 +- .../configurations/2022/LHC22f/apass1/async_pass.sh | 4 ++-- 5 files changed, 3 insertions(+), 6 deletions(-) diff --git a/DATA/production/calib/mch-badchannel-processing.sh b/DATA/production/calib/mch-badchannel-processing.sh index 771aeab87..738020262 100755 --- a/DATA/production/calib/mch-badchannel-processing.sh +++ b/DATA/production/calib/mch-badchannel-processing.sh @@ -1,4 +1,3 @@ - #!/bin/bash source common/setenv.sh diff --git a/DATA/production/calib/tpc-pulser-long.sh b/DATA/production/calib/tpc-pulser-long.sh index e1aaab4d4..e7124fd28 100755 --- a/DATA/production/calib/tpc-pulser-long.sh +++ b/DATA/production/calib/tpc-pulser-long.sh @@ -1,4 +1,3 @@ - #!/usr/bin/env bash source common/setenv.sh diff --git a/DATA/production/calib/tpc-pulser.sh b/DATA/production/calib/tpc-pulser.sh index 6b68a030b..6b62e559f 100755 --- a/DATA/production/calib/tpc-pulser.sh +++ b/DATA/production/calib/tpc-pulser.sh @@ -1,4 +1,3 @@ - #!/usr/bin/env bash source common/setenv.sh diff --git a/DATA/production/configurations/2021/ctf_recreation/ctf_recreation.sh b/DATA/production/configurations/2021/ctf_recreation/ctf_recreation.sh index bbf361c95..be5085e9f 100755 --- a/DATA/production/configurations/2021/ctf_recreation/ctf_recreation.sh +++ b/DATA/production/configurations/2021/ctf_recreation/ctf_recreation.sh @@ -72,7 +72,7 @@ fi # if "$DETCONFIG" is set explicitly, this has the second highest priority # last option is to have it from the JDL if [[ -z "$DETCONFIG" ]]; then - if [[ -z "ALIEN_JDL_DETCONFIG" ]]; then + if [[ -z "$ALIEN_JDL_DETCONFIG" ]]; then echo "nothing set the detector configuration to use, exiting" exit 4 else diff --git a/DATA/production/configurations/2022/LHC22f/apass1/async_pass.sh b/DATA/production/configurations/2022/LHC22f/apass1/async_pass.sh index 3dc46a02c..ce764852f 100755 --- a/DATA/production/configurations/2022/LHC22f/apass1/async_pass.sh +++ b/DATA/production/configurations/2022/LHC22f/apass1/async_pass.sh @@ -504,7 +504,7 @@ else echo "nCTFsFilesInspected_step1 = $nCTFsFilesInspected_step1, nCTFsFilesInspected_step2 = $nCTFsFilesInspected_step2" > validation_error.message echo "nCTFsFilesOK_step1 = $nCTFsFilesOK_step1, nCTFsFilesOK_step2 = $nCTFsFilesOK_step2" > validation_error.message echo "nCTFsProcessed_step1 = $nCTFsProcessed_step1, nCTFsProcessed_step2 = $nCTFsProcessed_step2" > validation_error.message - exit 1000 + exit 255 fi fi fi @@ -663,7 +663,7 @@ if [[ $ALIEN_JDL_AODOFF != 1 ]]; then CURRENT_POOL_SIZE=`jobs -r | wc -l` done < $JOB_LIST # collecting return codes of the merging processes - for i in ${!arr[@]}; do + for i in "${!arr[@]}"; do wait ${arr[$i]} exitcode=$? if [[ $exitcode -ne 0 ]]; then From 6d2d239238e99971d9856682d97e5f71fea7bcfb Mon Sep 17 00:00:00 2001 From: Timo Wilken Date: Tue, 20 Feb 2024 14:22:18 +0100 Subject: [PATCH 18/53] Fix Bash syntax issues found by shellcheck (#1482) Fix issues found in https://github.com/AliceO2Group/O2DPG/pull/1469, in addition to those fixed in https://github.com/AliceO2Group/O2DPG/pull/1479. --- GRID/utils/extractErroredLogFiles.sh | 4 ++-- GRID/utils/grid_submit.sh | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/GRID/utils/extractErroredLogFiles.sh b/GRID/utils/extractErroredLogFiles.sh index 81c21839d..20a0750ba 100755 --- a/GRID/utils/extractErroredLogFiles.sh +++ b/GRID/utils/extractErroredLogFiles.sh @@ -4,13 +4,13 @@ # Beware that errors might occur outside of O2DPG tasks such as in preprocessing etc or not visible in logs mytar () { - tar $@ + tar "$@" } if [[ $(uname) == "Darwin" ]]; then echo "Running on macOS. This needs gnu-tar" $(which gtar) mytar () { - gtar $@ + gtar "$@" } fi diff --git a/GRID/utils/grid_submit.sh b/GRID/utils/grid_submit.sh index 7913f83ad..f364bed68 100755 --- a/GRID/utils/grid_submit.sh +++ b/GRID/utils/grid_submit.sh @@ -401,7 +401,7 @@ EOF spin[1]="|" spin[0]="\\" JOBSTATUS="I" - if [ "{WAITFORALIEN}" ]; then + if [ "${WAITFORALIEN}" ]; then echo -n "Waiting for jobs to return ... Last status : ${spin[0]} ${JOBSTATUS}" fi counter=0 From ba2ab9450929a2c14706ca87b3235fde3a285a22 Mon Sep 17 00:00:00 2001 From: Timo Wilken Date: Tue, 20 Feb 2024 16:54:14 +0100 Subject: [PATCH 19/53] Add syntax checkers for Bash and Python (#1469) --- .github/workflows/check-json-syntax.yml | 37 ---------- .github/workflows/syntax-checks.yml | 95 +++++++++++++++++++++++++ 2 files changed, 95 insertions(+), 37 deletions(-) delete mode 100644 .github/workflows/check-json-syntax.yml create mode 100644 .github/workflows/syntax-checks.yml diff --git a/.github/workflows/check-json-syntax.yml b/.github/workflows/check-json-syntax.yml deleted file mode 100644 index fae51ae62..000000000 --- a/.github/workflows/check-json-syntax.yml +++ /dev/null @@ -1,37 +0,0 @@ ---- -name: Validate JSON syntax - -# Run on any commit or PR that changes any JSON file. -'on': - push: - paths: - - '**.json' - pull_request: - paths: - - '**.json' - -permissions: {} - -jobs: - json-syntax: - name: validate syntax - runs-on: ubuntu-latest - - steps: - - name: Checkout code - uses: actions/checkout@v4 - - - name: Validate syntax for JSON files - run: | - error=0 - readarray -d '' json_files < \ - <(find . \( -path ./.git -or -path ./DATA/testing/private \) -prune -false -or -type f -name '*.json' -print0) - for jsonf in "${json_files[@]}"; do - echo "::debug::Checking $jsonf..." - if ! errmsg=$(jq . "$jsonf" 2>&1 >/dev/null); then - error=1 - echo "Invalid JSON syntax found in $jsonf:" >&2 - printf '::error file=%s,title=%s::%s\n' "$jsonf" 'Invalid JSON syntax' "$errmsg" - fi - done - exit "$error" diff --git a/.github/workflows/syntax-checks.yml b/.github/workflows/syntax-checks.yml new file mode 100644 index 000000000..54164102d --- /dev/null +++ b/.github/workflows/syntax-checks.yml @@ -0,0 +1,95 @@ +--- +name: Validate syntax + +'on': + - push + - pull_request + +permissions: {} + +jobs: + json-syntax: + name: JSON + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Validate syntax for JSON files + run: | + error=0 + readarray -d '' json_files < \ + <(find . \( -path ./.git -or -path ./DATA/testing/private \) -prune -false -or -type f -name '*.json' -print0) + for jsonf in "${json_files[@]}"; do + echo "::debug::Checking $jsonf..." + if ! errmsg=$(jq . "$jsonf" 2>&1 >/dev/null); then + error=1 + echo "Invalid JSON syntax found in $jsonf:" >&2 + printf '::error file=%s,title=%s::%s\n' "$jsonf" 'Invalid JSON syntax' "$errmsg" + fi + done + exit "$error" + + bash-syntax: + name: Bash + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Validate syntax with bash -n + run: | + error=0 + readarray -d '' files < \ + <(find . -path ./.git -prune -false -or -type f -name '*.sh' -print0) + for bashf in "${files[@]}"; do + echo "::debug::Checking $bashf..." + if ! errmsg=$(bash -n "$bashf" 2>&1 >/dev/null); then + error=1 + echo "Invalid Bash syntax found in $bashf:" >&2 + printf '::error file=%s,title=%s::%s\n' "$bashf" 'Invalid syntax' "$errmsg" + fi + done + exit "$error" + + shellcheck: + name: Shellcheck + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Run Shellcheck to find errors + run: | + error=0 + find . -path ./.git -prune -false -or -type f -name '*.sh' -print0 | + xargs -0 shellcheck -xf json1 -S error -s bash > errors.json || error=$? + # Produce code annotations in GitHub's format. + jq -r '.comments[] | "Error found in \(.file) line \(.line):\n::error file=\(.file),line=\(.line),endLine=\(.endLine),col=\(.column),endColumn=\(.endColumn)::\(.message)"' errors.json + exit "$error" + + pylint: + name: Pylint + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Install prerequisites + run: | + sudo apt update -y + sudo apt install -y pylint + + - name: Run Pylint to find errors + run: | + error=0 + find . -path ./.git -prune -false -or -type f -name '*.py' -print0 | + # "import-errors" are shown for valid modules like ROOT, so ignore them. + xargs -0 pylint -E -f json --disable import-error > errors.json || error=$? + # Produce code annotations in GitHub's format. + jq -r '.[] | "Error found in \(.path) line \(.line):\n::error file=\(.path),line=\(.line),endLine=\(.endLine),col=\(.column),endColumn=\(.endColumn),title=Pylint \(.type) \(.symbol)::\(.message)"' errors.json + exit "$error" From fe36cb8f8e27faee1f39c45b3912931d0905a77c Mon Sep 17 00:00:00 2001 From: Ole Schmidt Date: Tue, 20 Feb 2024 16:10:48 +0100 Subject: [PATCH 20/53] Set beam type dependent defaults for TPC_CORR_SCALING --- DATA/common/setenv.sh | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/DATA/common/setenv.sh b/DATA/common/setenv.sh index 74ae5640e..ae7e276f2 100755 --- a/DATA/common/setenv.sh +++ b/DATA/common/setenv.sh @@ -110,7 +110,6 @@ if [[ -z "${IS_TRIGGERED_DATA:-}" ]]; then export IS_TRIGGERED_DATA=0; fi if [[ -z "${CTF_DIR:-}" ]]; then CTF_DIR=$FILEWORKDIR; fi # Directory where to store CTFs if [[ -z "${CALIB_DIR:-}" ]]; then CALIB_DIR="/dev/null"; fi # Directory where to store output from calibration workflows, /dev/null : skip their writing if [[ -z "${EPN2EOS_METAFILES_DIR:-}" ]]; then EPN2EOS_METAFILES_DIR="/dev/null"; fi # Directory where to store epn2eos files metada, /dev/null : skip their writing -if [[ -z "${TPC_CORR_SCALING:-}" ]]; then export TPC_CORR_SCALING=""; fi # TPC corr.map lumi scaling options, any combination of --lumi-type <0,1,2> --corrmap-lumi-mode <0,1> and TPCCorrMap... configurable param if [[ $EPNSYNCMODE == 0 ]]; then if [[ -z "${SHMSIZE:-}" ]]; then export SHMSIZE=$(( 8 << 30 )); fi # Size of shared memory for messages if [[ -z "${NGPUS:-}" ]]; then export NGPUS=1; fi # Number of GPUs to use, data distributed round-robin @@ -164,6 +163,13 @@ DISABLE_ROOT_INPUT="--disable-root-input" : ${DISABLE_DIGIT_CLUSTER_INPUT="--clusters-from-upstream"} # Special detector related settings +if [[ -z "${TPC_CORR_SCALING:-}" ]]; then # TPC corr.map lumi scaling options, any combination of --lumi-type <0,1,2> --corrmap-lumi-mode <0,1> and TPCCorrMap... configurable param + TPC_CORR_SCALING= + if [[ $BEAMTYPE == "pp" ]] || [[ $BEAMTYPE == "PbPb" ]]; then TPC_CORR_SCALING+="--lumi-type 1 TPCCorrMap.lumiInstFactor=2.414"; fi + if [[ $BEAMTYPE == "cosmic" ]]; then TPC_CORR_SCALING=" TPCCorrMap.lumiMean=-1;"; fi # for COSMICS we disable all corrections + export TPC_CORR_SCALING=$TPC_CORR_SCALING +fi + MID_FEEID_MAP="$FILEWORKDIR/mid-feeId_mapper.txt" ITSMFT_STROBES="" From 7313180ebb96573bc6fc08d48f5b5e852cec702e Mon Sep 17 00:00:00 2001 From: Ole Schmidt Date: Tue, 20 Feb 2024 16:11:14 +0100 Subject: [PATCH 21/53] ED_VERTEX_MODE has been removed --- DATA/production/workflow-multiplicities.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/DATA/production/workflow-multiplicities.sh b/DATA/production/workflow-multiplicities.sh index 81db5f102..275200d4f 100644 --- a/DATA/production/workflow-multiplicities.sh +++ b/DATA/production/workflow-multiplicities.sh @@ -222,7 +222,7 @@ if [[ -z ${EVE_NTH_EVENT:-} ]]; then EVE_NTH_EVENT=2 elif [[ "$HIGH_RATE_PP" == "1" ]]; then EVE_NTH_EVENT=10 - elif [[ $BEAMTYPE == "pp" && "${ED_VERTEX_MODE:-}" == "1" ]]; then + elif [[ $BEAMTYPE == "pp" ]]; then EVE_NTH_EVENT=$((4 * 250 / $RECO_NUM_NODES_WORKFLOW_CMP)) else # COSMICS / TECHNICALS / ... EVE_NTH_EVENT=1 From 62aaa86dcc059b172221debf96ca8e2d1033e9b2 Mon Sep 17 00:00:00 2001 From: Mattia Faggin Date: Wed, 21 Feb 2024 20:07:14 +0100 Subject: [PATCH 22/53] Change eta cut for TPC tracks in GLO QC. (#1487) Co-authored-by: Mattia Faggin --- DATA/production/qc-async/itstpc.json | 2 +- MC/config/QC/json/ITSTPCmatchedTracks_direct_MC.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/DATA/production/qc-async/itstpc.json b/DATA/production/qc-async/itstpc.json index 5bce54d2e..b294b8cf9 100644 --- a/DATA/production/qc-async/itstpc.json +++ b/DATA/production/qc-async/itstpc.json @@ -49,7 +49,7 @@ "minNITSClustersCut": "0", "maxChi2PerClusterITS": "100000", "minPtTPCCut": "0.1f", - "etaTPCCut": "1.4f", + "etaTPCCut": "0.9f", "minNTPCClustersCut": "60", "minDCACut": "100.f", "minDCACutY": "10.f", diff --git a/MC/config/QC/json/ITSTPCmatchedTracks_direct_MC.json b/MC/config/QC/json/ITSTPCmatchedTracks_direct_MC.json index a005c00de..27dff87b8 100644 --- a/MC/config/QC/json/ITSTPCmatchedTracks_direct_MC.json +++ b/MC/config/QC/json/ITSTPCmatchedTracks_direct_MC.json @@ -46,7 +46,7 @@ "minNITSClustersCut": "0", "maxChi2PerClusterITS": "100000", "minPtTPCCut": "0.1f", - "etaTPCCut": "1.4f", + "etaTPCCut": "0.9f", "minNTPCClustersCut": "60", "minDCACut": "100.f", "minDCACutY": "10.f", From 30afe4927ae7bf1ad556305aaecd1c1dde9588a4 Mon Sep 17 00:00:00 2001 From: Ole Schmidt Date: Thu, 22 Feb 2024 15:13:22 +0100 Subject: [PATCH 23/53] Enable IDC and SAC processing by default (#1485) * Enable IDC and SAC processing by default * Add missing fi --- DATA/common/setenv_calib.sh | 20 +++++++++++++++----- 1 file changed, 15 insertions(+), 5 deletions(-) diff --git a/DATA/common/setenv_calib.sh b/DATA/common/setenv_calib.sh index bdb836509..ae07638c6 100755 --- a/DATA/common/setenv_calib.sh +++ b/DATA/common/setenv_calib.sh @@ -76,15 +76,25 @@ if [[ $BEAMTYPE != "cosmic" ]] || [[ ${FORCECALIBRATIONS:-} == 1 ]] ; then if [[ $CAN_DO_CALIB_TPC_VDRIFTTGL == 1 ]]; then if [[ -z ${CALIB_TPC_VDRIFTTGL+x} ]]; then CALIB_TPC_VDRIFTTGL=1; fi fi - # IDCs + # IDCs (by default we enable it for running the synch. reco on the EPNs) if [[ $CAN_DO_CALIB_TPC_IDC == 1 ]]; then - if [[ -z ${CALIB_TPC_IDC+x} ]] || [[ $CALIB_TPC_IDC == 0 ]]; then - CALIB_TPC_IDC=0; # default is off + if [[ -z ${CALIB_TPC_IDC+x} ]]; then + if [[ $EPNSYNCMODE == 1 ]]; then + CALIB_TPC_IDC=1; + else + CALIB_TPC_IDC=0; + fi fi fi - # SAC + # SAC (by default we enable it for running the synch. reco on the EPNs) if [[ $CAN_DO_CALIB_TPC_SAC == 1 ]]; then - if [[ -z ${CALIB_TPC_SAC+x} ]]; then CALIB_TPC_SAC=0; fi # default is off + if [[ -z ${CALIB_TPC_SAC+x} ]]; then + if [[ $EPNSYNCMODE == 1 ]]; then + CALIB_TPC_SAC=1; + else + CALIB_TPC_SAC=0; + fi + fi fi # calibrations for TRD From 6d4924eef63aaf6f4922b2fe2691fffbd0387fb7 Mon Sep 17 00:00:00 2001 From: fmazzasc Date: Wed, 21 Feb 2024 14:55:11 +0100 Subject: [PATCH 24/53] Update hypernuclei gun for pp --- MC/config/PWGLF/pythia8/generator/hypernuclei.gun | 14 +++++++------- MC/run/PWGLF/run_HyperNucleiInjectedGap.sh | 2 +- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/MC/config/PWGLF/pythia8/generator/hypernuclei.gun b/MC/config/PWGLF/pythia8/generator/hypernuclei.gun index 55af09ba2..af21f0faf 100644 --- a/MC/config/PWGLF/pythia8/generator/hypernuclei.gun +++ b/MC/config/PWGLF/pythia8/generator/hypernuclei.gun @@ -1,8 +1,8 @@ # PDG N ptMin ptMax yMin yMax -1000010020 1 0.2 6 -1 1 -1000010030 1 0.2 6 -1 1 -1000020030 1 0.2 6 -1 1 -1000020040 1 0.2 6 -1 1 -1010010030 1 0.2 6 -1 1 -1010010040 1 0.2 6 -1 1 -1010020040 1 0.2 6 -1 1 +1000010020 1 0.2 10 -1 1 +1000010030 1 0.2 10 -1 1 +1000020030 1 0.2 10 -1 1 +1000020040 1 0.2 10 -1 1 +1010010030 1 0.2 10 -1 1 +1010010040 1 0.2 10 -1 1 +1010020040 1 0.2 10 -1 1 diff --git a/MC/run/PWGLF/run_HyperNucleiInjectedGap.sh b/MC/run/PWGLF/run_HyperNucleiInjectedGap.sh index 027f6d0a0..cbf094547 100644 --- a/MC/run/PWGLF/run_HyperNucleiInjectedGap.sh +++ b/MC/run/PWGLF/run_HyperNucleiInjectedGap.sh @@ -15,7 +15,7 @@ NWORKERS=${NWORKERS:-8} MODULES="--skipModules ZDC" -SIMENGINE=${SIMENGINE:-TGeant3} +SIMENGINE=${SIMENGINE:-TGeant4} NSIGEVENTS=${NSIGEVENTS:-1} NBKGEVENTS=${NBKGEVENTS:-1} NTIMEFRAMES=${NTIMEFRAMES:-1} From 31573f5c3ef96868fe8ce33f1c157a390fa67657 Mon Sep 17 00:00:00 2001 From: fmazzasc Date: Tue, 20 Feb 2024 17:55:49 +0100 Subject: [PATCH 25/53] Update hypernuclei gun for pbpb --- MC/config/PWGLF/ini/GeneratorLFHyperNucleiPbPbGap.ini | 2 +- MC/config/PWGLF/pythia8/generator/hypernuclei_pbpb.gun | 8 ++++++++ 2 files changed, 9 insertions(+), 1 deletion(-) create mode 100644 MC/config/PWGLF/pythia8/generator/hypernuclei_pbpb.gun diff --git a/MC/config/PWGLF/ini/GeneratorLFHyperNucleiPbPbGap.ini b/MC/config/PWGLF/ini/GeneratorLFHyperNucleiPbPbGap.ini index f548c57cf..feceea039 100644 --- a/MC/config/PWGLF/ini/GeneratorLFHyperNucleiPbPbGap.ini +++ b/MC/config/PWGLF/ini/GeneratorLFHyperNucleiPbPbGap.ini @@ -1,6 +1,6 @@ [GeneratorExternal] fileName=${O2DPG_ROOT}/MC/config/PWGLF/pythia8/generator_pythia8_longlived_gaptriggered.C -funcName=generateLongLivedGapTriggered({1000010020, 1000010030, 1000020030, 1000020040, 1010010030}, 5, 10) +funcName=generateLongLivedGapTriggered("${O2DPG_ROOT}/MC/config/PWGLF/pythia8/generator/hypernuclei.gun", 1) [GeneratorPythia8] config=${O2_ROOT}/share/Generators/egconfig/pythia8_hi.cfg diff --git a/MC/config/PWGLF/pythia8/generator/hypernuclei_pbpb.gun b/MC/config/PWGLF/pythia8/generator/hypernuclei_pbpb.gun new file mode 100644 index 000000000..d15d11554 --- /dev/null +++ b/MC/config/PWGLF/pythia8/generator/hypernuclei_pbpb.gun @@ -0,0 +1,8 @@ +# PDG N ptMin ptMax yMin yMax +1000010020 4 0.2 10 -1 1 +1000010030 4 0.2 10 -1 1 +1000020030 4 0.2 10 -1 1 +1000020040 4 0.2 10 -1 1 +1010010030 4 0.2 10 -1 1 +1010010040 4 0.2 10 -1 1 +1010020040 4 0.2 10 -1 1 From 763bc731607f5710a9d31b8dafa7608b3c9517f5 Mon Sep 17 00:00:00 2001 From: fmazzasc Date: Tue, 20 Feb 2024 17:57:38 +0100 Subject: [PATCH 26/53] fix --- MC/config/PWGLF/ini/GeneratorLFHyperNucleiPbPbGap.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/MC/config/PWGLF/ini/GeneratorLFHyperNucleiPbPbGap.ini b/MC/config/PWGLF/ini/GeneratorLFHyperNucleiPbPbGap.ini index feceea039..4acc1d268 100644 --- a/MC/config/PWGLF/ini/GeneratorLFHyperNucleiPbPbGap.ini +++ b/MC/config/PWGLF/ini/GeneratorLFHyperNucleiPbPbGap.ini @@ -1,6 +1,6 @@ [GeneratorExternal] fileName=${O2DPG_ROOT}/MC/config/PWGLF/pythia8/generator_pythia8_longlived_gaptriggered.C -funcName=generateLongLivedGapTriggered("${O2DPG_ROOT}/MC/config/PWGLF/pythia8/generator/hypernuclei.gun", 1) +funcName=generateLongLivedGapTriggered("${O2DPG_ROOT}/MC/config/PWGLF/pythia8/generator/hypernuclei_pbpb.gun", 1) [GeneratorPythia8] config=${O2_ROOT}/share/Generators/egconfig/pythia8_hi.cfg From c9f8bc00168c7461ec73326ecd7a2cbe5fe72de5 Mon Sep 17 00:00:00 2001 From: fmazzasc Date: Tue, 20 Feb 2024 18:04:24 +0100 Subject: [PATCH 27/53] Increase number of injected particles --- .../PWGLF/pythia8/generator/hypernuclei_pbpb.gun | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/MC/config/PWGLF/pythia8/generator/hypernuclei_pbpb.gun b/MC/config/PWGLF/pythia8/generator/hypernuclei_pbpb.gun index d15d11554..cc68fa63b 100644 --- a/MC/config/PWGLF/pythia8/generator/hypernuclei_pbpb.gun +++ b/MC/config/PWGLF/pythia8/generator/hypernuclei_pbpb.gun @@ -1,8 +1,8 @@ # PDG N ptMin ptMax yMin yMax -1000010020 4 0.2 10 -1 1 -1000010030 4 0.2 10 -1 1 -1000020030 4 0.2 10 -1 1 -1000020040 4 0.2 10 -1 1 -1010010030 4 0.2 10 -1 1 -1010010040 4 0.2 10 -1 1 -1010020040 4 0.2 10 -1 1 +1000010020 10 0.2 10 -1 1 +1000010030 10 0.2 10 -1 1 +1000020030 10 0.2 10 -1 1 +1000020040 10 0.2 10 -1 1 +1010010030 10 0.2 10 -1 1 +1010010040 10 0.2 10 -1 1 +1010020040 10 0.2 10 -1 1 From c399748f3c79bc39f06d46604101a1e55914d71a Mon Sep 17 00:00:00 2001 From: fmazzasc Date: Thu, 22 Feb 2024 15:31:02 +0100 Subject: [PATCH 28/53] Inject more particles per event --- .../PWGLF/pythia8/generator/hypernuclei_pbpb.gun | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/MC/config/PWGLF/pythia8/generator/hypernuclei_pbpb.gun b/MC/config/PWGLF/pythia8/generator/hypernuclei_pbpb.gun index cc68fa63b..3b4a8d274 100644 --- a/MC/config/PWGLF/pythia8/generator/hypernuclei_pbpb.gun +++ b/MC/config/PWGLF/pythia8/generator/hypernuclei_pbpb.gun @@ -1,8 +1,8 @@ # PDG N ptMin ptMax yMin yMax -1000010020 10 0.2 10 -1 1 -1000010030 10 0.2 10 -1 1 -1000020030 10 0.2 10 -1 1 -1000020040 10 0.2 10 -1 1 -1010010030 10 0.2 10 -1 1 -1010010040 10 0.2 10 -1 1 -1010020040 10 0.2 10 -1 1 +1000010020 20 0.2 10 -1 1 +1000010030 20 0.2 10 -1 1 +1000020030 20 0.2 10 -1 1 +1000020040 20 0.2 10 -1 1 +1010010030 20 0.2 10 -1 1 +1010010040 20 0.2 10 -1 1 +1010020040 20 0.2 10 -1 1 From 045895f64490c42027828618818b5e3f5bbf3981 Mon Sep 17 00:00:00 2001 From: benedikt-voelkel Date: Fri, 23 Feb 2024 14:41:07 +0100 Subject: [PATCH 29/53] Add asyn-label workflows for PRs (#1489) * Add asyn-label workflows for PRs --------- Co-authored-by: Benedikt Volkel --- .github/workflows/async-auto-label.yml | 18 ++++++++++++++++++ .github/workflows/async-list-label.yml | 20 ++++++++++++++++++++ 2 files changed, 38 insertions(+) create mode 100644 .github/workflows/async-auto-label.yml create mode 100644 .github/workflows/async-list-label.yml diff --git a/.github/workflows/async-auto-label.yml b/.github/workflows/async-auto-label.yml new file mode 100644 index 000000000..8ec0659ca --- /dev/null +++ b/.github/workflows/async-auto-label.yml @@ -0,0 +1,18 @@ +--- +name: Apply requested async label + +'on': + issue_comment: + types: + - created + - edited + +permissions: + pull-requests: write # to update labels + +jobs: + apply_async_labels: + name: Apply requested async label + uses: alisw/ali-bot/.github/workflows/async-auto-label.yml@master + permissions: + pull-requests: write # to update labels diff --git a/.github/workflows/async-list-label.yml b/.github/workflows/async-list-label.yml new file mode 100644 index 000000000..97144b6b1 --- /dev/null +++ b/.github/workflows/async-list-label.yml @@ -0,0 +1,20 @@ +--- +name: Collect and print async labels + +'on': + pull_request: + types: + - opened + - reopened + branches: + - master + +permissions: + pull-requests: write # to update labels + +jobs: + list_async_labels: + name: Collect and print async labels + uses: alisw/ali-bot/.github/workflows/async-list-label.yml@master + permissions: + pull-requests: write # to update labels From e8511152b4dd58dbc3098b380d17cc468761d17c Mon Sep 17 00:00:00 2001 From: Ole Schmidt Date: Fri, 23 Feb 2024 15:40:23 +0100 Subject: [PATCH 30/53] Topology generation fixes for staging (#1491) * Only enable CTP lumi scaling if its available * Disable CALIB_TPC_IDC on staging due to lack of resources --- DATA/common/setenv.sh | 2 +- DATA/common/setenv_calib.sh | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/DATA/common/setenv.sh b/DATA/common/setenv.sh index ae7e276f2..61c817a54 100755 --- a/DATA/common/setenv.sh +++ b/DATA/common/setenv.sh @@ -165,7 +165,7 @@ DISABLE_ROOT_INPUT="--disable-root-input" # Special detector related settings if [[ -z "${TPC_CORR_SCALING:-}" ]]; then # TPC corr.map lumi scaling options, any combination of --lumi-type <0,1,2> --corrmap-lumi-mode <0,1> and TPCCorrMap... configurable param TPC_CORR_SCALING= - if [[ $BEAMTYPE == "pp" ]] || [[ $BEAMTYPE == "PbPb" ]]; then TPC_CORR_SCALING+="--lumi-type 1 TPCCorrMap.lumiInstFactor=2.414"; fi + if ( [[ $BEAMTYPE == "pp" ]] || [[ $BEAMTYPE == "PbPb" ]] ) && has_detector CTP; then TPC_CORR_SCALING+="--lumi-type 1 TPCCorrMap.lumiInstFactor=2.414"; fi if [[ $BEAMTYPE == "cosmic" ]]; then TPC_CORR_SCALING=" TPCCorrMap.lumiMean=-1;"; fi # for COSMICS we disable all corrections export TPC_CORR_SCALING=$TPC_CORR_SCALING fi diff --git a/DATA/common/setenv_calib.sh b/DATA/common/setenv_calib.sh index ae07638c6..95db74f33 100755 --- a/DATA/common/setenv_calib.sh +++ b/DATA/common/setenv_calib.sh @@ -76,10 +76,10 @@ if [[ $BEAMTYPE != "cosmic" ]] || [[ ${FORCECALIBRATIONS:-} == 1 ]] ; then if [[ $CAN_DO_CALIB_TPC_VDRIFTTGL == 1 ]]; then if [[ -z ${CALIB_TPC_VDRIFTTGL+x} ]]; then CALIB_TPC_VDRIFTTGL=1; fi fi - # IDCs (by default we enable it for running the synch. reco on the EPNs) + # IDCs (by default we enable it for running the synch. reco on the EPNs, but not on staging since we have only 1 calibration node available) if [[ $CAN_DO_CALIB_TPC_IDC == 1 ]]; then if [[ -z ${CALIB_TPC_IDC+x} ]]; then - if [[ $EPNSYNCMODE == 1 ]]; then + if [[ $EPNSYNCMODE == 1 ]] && [[ "${GEN_TOPO_DEPLOYMENT_TYPE:-}" != "ALICE_STAGING" ]]; then CALIB_TPC_IDC=1; else CALIB_TPC_IDC=0; From fbbe5cd3002e47430e632840bba3c60db7067516 Mon Sep 17 00:00:00 2001 From: benedikt-voelkel Date: Fri, 23 Feb 2024 15:43:46 +0100 Subject: [PATCH 31/53] [WF] Take back unneccessary permissions (#1492) Co-authored-by: Benedikt Volkel --- .github/workflows/async-auto-label.yml | 3 +-- .github/workflows/async-list-label.yml | 3 +-- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/.github/workflows/async-auto-label.yml b/.github/workflows/async-auto-label.yml index 8ec0659ca..b0a17c7fa 100644 --- a/.github/workflows/async-auto-label.yml +++ b/.github/workflows/async-auto-label.yml @@ -7,8 +7,7 @@ name: Apply requested async label - created - edited -permissions: - pull-requests: write # to update labels +permissions: {} jobs: apply_async_labels: diff --git a/.github/workflows/async-list-label.yml b/.github/workflows/async-list-label.yml index 97144b6b1..6b6f6f57d 100644 --- a/.github/workflows/async-list-label.yml +++ b/.github/workflows/async-list-label.yml @@ -9,8 +9,7 @@ name: Collect and print async labels branches: - master -permissions: - pull-requests: write # to update labels +permissions: {} jobs: list_async_labels: From 77bda58b7e2f0b9988966bb6f74a6d793d3836c6 Mon Sep 17 00:00:00 2001 From: benedikt-voelkel Date: Fri, 23 Feb 2024 17:37:13 +0100 Subject: [PATCH 32/53] [WF] Raise write permission (#1495) --- .github/workflows/async-list-label.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/async-list-label.yml b/.github/workflows/async-list-label.yml index 6b6f6f57d..97144b6b1 100644 --- a/.github/workflows/async-list-label.yml +++ b/.github/workflows/async-list-label.yml @@ -9,7 +9,8 @@ name: Collect and print async labels branches: - master -permissions: {} +permissions: + pull-requests: write # to update labels jobs: list_async_labels: From 1ce5e589d811175cbd5fccfa56e37bc74362d896 Mon Sep 17 00:00:00 2001 From: shahoian Date: Fri, 23 Feb 2024 17:30:20 +0100 Subject: [PATCH 33/53] Use alio2-cr1-flp199-ib:8083 as DCS-CCDB server, define in setenv.sh --- DATA/common/setenv.sh | 2 ++ DATA/production/calib/hmp-pedestals-processing.sh | 2 +- DATA/production/calib/its-noise-aggregator.sh | 2 +- DATA/production/calib/its-threshold-aggregator.sh | 2 +- DATA/production/calib/mch-badchannel-aggregator.sh | 2 +- DATA/production/calib/mft-noise-aggregator.sh | 2 +- DATA/production/calib/mid-badchannels.sh | 2 +- DATA/testing/detectors/MID/mid-calib-workflow.sh | 2 +- 8 files changed, 9 insertions(+), 7 deletions(-) diff --git a/DATA/common/setenv.sh b/DATA/common/setenv.sh index 61c817a54..18307ebf2 100755 --- a/DATA/common/setenv.sh +++ b/DATA/common/setenv.sh @@ -110,6 +110,8 @@ if [[ -z "${IS_TRIGGERED_DATA:-}" ]]; then export IS_TRIGGERED_DATA=0; fi if [[ -z "${CTF_DIR:-}" ]]; then CTF_DIR=$FILEWORKDIR; fi # Directory where to store CTFs if [[ -z "${CALIB_DIR:-}" ]]; then CALIB_DIR="/dev/null"; fi # Directory where to store output from calibration workflows, /dev/null : skip their writing if [[ -z "${EPN2EOS_METAFILES_DIR:-}" ]]; then EPN2EOS_METAFILES_DIR="/dev/null"; fi # Directory where to store epn2eos files metada, /dev/null : skip their writing +if [[ -z "${DCSCCDBSERVER:-}" ]]; then export DCSCCDBSERVER="http://alio2-cr1-flp199-ib:8083"; fi # server for transvering calibration data to DCS + if [[ $EPNSYNCMODE == 0 ]]; then if [[ -z "${SHMSIZE:-}" ]]; then export SHMSIZE=$(( 8 << 30 )); fi # Size of shared memory for messages if [[ -z "${NGPUS:-}" ]]; then export NGPUS=1; fi # Number of GPUs to use, data distributed round-robin diff --git a/DATA/production/calib/hmp-pedestals-processing.sh b/DATA/production/calib/hmp-pedestals-processing.sh index f7c48d737..b14d38660 100755 --- a/DATA/production/calib/hmp-pedestals-processing.sh +++ b/DATA/production/calib/hmp-pedestals-processing.sh @@ -57,7 +57,7 @@ fi SPEC_PARAM="" if [ $HMP_NODCSCCDB_REC == 'false' ]; then - SPEC_PARAM+="--use-dcsccdb --dcsccdb-uri 'http://alio2-cr1-flp199.cern.ch:8083' --dcsccdb-alivehours 3 " + SPEC_PARAM+="--use-dcsccdb --dcsccdb-uri $DCSCCDBSERVER --dcsccdb-alivehours 3 " fi if [ $HMP_CCDB_REC == 'true' ]; then diff --git a/DATA/production/calib/its-noise-aggregator.sh b/DATA/production/calib/its-noise-aggregator.sh index 97bfcf696..e6aeb04d9 100755 --- a/DATA/production/calib/its-noise-aggregator.sh +++ b/DATA/production/calib/its-noise-aggregator.sh @@ -17,7 +17,7 @@ fi if [[ -z $NTHREADS ]] ; then NTHREADS=1; fi CCDBPATH1="http://o2-ccdb.internal" -CCDBPATH2="http://alio2-cr1-flp199.cern.ch:8083" +CCDBPATH2="$DCSCCDBSERVER" if [[ $RUNTYPE == "SYNTHETIC" || "${GEN_TOPO_DEPLOYMENT_TYPE:-}" == "ALICE_STAGING" || ! -z $ISTEST ]]; then CCDBPATH1="http://ccdb-test.cern.ch:8080" CCDBPATH2="http://ccdb-test.cern.ch:8080" diff --git a/DATA/production/calib/its-threshold-aggregator.sh b/DATA/production/calib/its-threshold-aggregator.sh index cc788d79e..81e1b11c6 100755 --- a/DATA/production/calib/its-threshold-aggregator.sh +++ b/DATA/production/calib/its-threshold-aggregator.sh @@ -11,7 +11,7 @@ PROXY_INSPEC="tunestring:ITS/TSTR;runtype:ITS/RUNT;fittype:ITS/FITT;scantype:ITS CCDBPATH1="" CCDBPATH2="" if [ $RUNTYPE_ITS == "tuning" ] || [ $RUNTYPE_ITS == "digital" ] || [ $RUNTYPE_ITS == "tuningbb" ]; then - CCDBPATH1="http://alio2-cr1-flp199.cern.ch:8083" + CCDBPATH1="$DCSCCDBSERVER" CCDBPATH2="http://o2-ccdb.internal" else CCDBPATH1="http://o2-ccdb.internal" diff --git a/DATA/production/calib/mch-badchannel-aggregator.sh b/DATA/production/calib/mch-badchannel-aggregator.sh index e362e373a..8af400b9d 100755 --- a/DATA/production/calib/mch-badchannel-aggregator.sh +++ b/DATA/production/calib/mch-badchannel-aggregator.sh @@ -20,7 +20,7 @@ BADCHANNEL_CONFIG="${ARGS_ALL_CONFIG};MCHBadChannelCalibratorParam.maxPed=${MCH_ WORKFLOW="o2-dpl-raw-proxy $ARGS_ALL --proxy-name mch-badchannel-input-proxy --dataspec \"$PROXY_INSPEC\" --network-interface ib0 --channel-config \"name=mch-badchannel-input-proxy,method=bind,type=pull,rateLogging=0,transport=zeromq\" | " WORKFLOW+="o2-calibration-mch-badchannel-calib-workflow $ARGS_ALL --configKeyValues \"$BADCHANNEL_CONFIG\" | " WORKFLOW+="o2-calibration-ccdb-populator-workflow $ARGS_ALL --configKeyValues \"$ARGS_ALL_CONFIG\" --ccdb-path=\"http://o2-ccdb.internal\" --sspec-min 0 --sspec-max 0 | " -WORKFLOW+="o2-calibration-ccdb-populator-workflow $ARGS_ALL --configKeyValues \"$ARGS_ALL_CONFIG\" --ccdb-path=\"http://ali-calib-dcs.cern.ch:8083\" --sspec-min 1 --sspec-max 1 --name-extention dcs | " +WORKFLOW+="o2-calibration-ccdb-populator-workflow $ARGS_ALL --configKeyValues \"$ARGS_ALL_CONFIG\" --ccdb-path=\"$DCSCCDBSERVER\" --sspec-min 1 --sspec-max 1 --name-extention dcs | " add_QC_from_consul "/o2/components/qc/ANY/any/mch-badchannel" "" WORKFLOW+="o2-dpl-run $ARGS_ALL $GLOBALDPLOPT" diff --git a/DATA/production/calib/mft-noise-aggregator.sh b/DATA/production/calib/mft-noise-aggregator.sh index 3fa908025..8c2ef1290 100755 --- a/DATA/production/calib/mft-noise-aggregator.sh +++ b/DATA/production/calib/mft-noise-aggregator.sh @@ -11,7 +11,7 @@ PROXY_INSPEC="A:MFT/DIGITS/0;B:MFT/DIGITSROF/0" WORKFLOW="o2-dpl-raw-proxy $ARGS_ALL --proxy-name mft-noise-input-proxy --dataspec \"$PROXY_INSPEC\" --network-interface ib0 --channel-config \"name=mft-noise-input-proxy,method=bind,type=pull,rateLogging=0,transport=zeromq\" | " WORKFLOW+="o2-calibration-mft-calib-workflow $ARGS_ALL --configKeyValues \"$ARGS_ALL_CONFIG\" --useDigits --prob-threshold 1e-5 --send-to-server DCS-CCDB --path-CCDB \"/MFT/Calib/NoiseMap\" --path-DCS \"/MFT/Config/NoiseMap\" | " WORKFLOW+="o2-calibration-ccdb-populator-workflow $ARGS_ALL --configKeyValues \"$ARGS_ALL_CONFIG\" --ccdb-path=\"http://o2-ccdb.internal\" --sspec-min 0 --sspec-max 0 | " -WORKFLOW+="o2-calibration-ccdb-populator-workflow $ARGS_ALL --configKeyValues \"$ARGS_ALL_CONFIG\" --ccdb-path=\"http://ali-calib-dcs.cern.ch:8083\" --sspec-min 1 --sspec-max 1 --name-extention dcs | " +WORKFLOW+="o2-calibration-ccdb-populator-workflow $ARGS_ALL --configKeyValues \"$ARGS_ALL_CONFIG\" --ccdb-path=\"$DCSCCDBSERVER\" --sspec-min 1 --sspec-max 1 --name-extention dcs | " WORKFLOW+="o2-dpl-run $ARGS_ALL $GLOBALDPLOPT" if [ $WORKFLOWMODE == "print" ]; then diff --git a/DATA/production/calib/mid-badchannels.sh b/DATA/production/calib/mid-badchannels.sh index 6060b4116..edc18eaec 100755 --- a/DATA/production/calib/mid-badchannels.sh +++ b/DATA/production/calib/mid-badchannels.sh @@ -26,7 +26,7 @@ if [[ -z ${CCDB_POPULATOR_UPLOAD_PATH} ]]; then CCDB_POPULATOR_UPLOAD_PATH_DCS="$CCDB_POPULATOR_UPLOAD_PATH" else CCDB_POPULATOR_UPLOAD_PATH="http://o2-ccdb.internal" - CCDB_POPULATOR_UPLOAD_PATH_DCS="http://alio2-cr1-flp199.cern.ch:8083" + CCDB_POPULATOR_UPLOAD_PATH_DCS="$DCSCCDBSERVER" fi fi if [[ "${GEN_TOPO_VERBOSE:-}" == "1" ]]; then diff --git a/DATA/testing/detectors/MID/mid-calib-workflow.sh b/DATA/testing/detectors/MID/mid-calib-workflow.sh index e846ea94f..a9ad5a7a7 100755 --- a/DATA/testing/detectors/MID/mid-calib-workflow.sh +++ b/DATA/testing/detectors/MID/mid-calib-workflow.sh @@ -7,7 +7,7 @@ WORKFLOW="o2-dpl-raw-proxy $ARGS_ALL --dataspec \"$MID_RAW_PROXY_INSPEC\" --inje WORKFLOW+="o2-mid-raw-to-digits-workflow $ARGS_ALL $MID_RAW_TO_DIGITS_OPTS | " WORKFLOW+="o2-mid-calibration-workflow $ARGS_ALL | " WORKFLOW+="o2-calibration-ccdb-populator-workflow $ARGS_ALL --configKeyValues \"$ARGS_ALL_CONFIG\" --ccdb-path=\"http://o2-ccdb.internal\" --sspec-min 0 --sspec-max 0 | " -WORKFLOW+="o2-calibration-ccdb-populator-workflow $ARGS_ALL --configKeyValues \"$ARGS_ALL_CONFIG\" --ccdb-path=\"http://alio2-cr1-flp199.cern.ch:8083\" --sspec-min 1 --sspec-max 1 --name-extention dcs | " +WORKFLOW+="o2-calibration-ccdb-populator-workflow $ARGS_ALL --configKeyValues \"$ARGS_ALL_CONFIG\" --ccdb-path=\"${DCSCCDBSERVER:-http://alio2-cr1-flp199-ib:8083}\" --sspec-min 1 --sspec-max 1 --name-extention dcs | " WORKFLOW+="o2-dpl-run $ARGS_ALL $GLOBALDPLOPT" if [ "$WORKFLOWMODE" == "print" ]; then From 340b4fe7720e0b87b377d1a16910f4adf2f5daa8 Mon Sep 17 00:00:00 2001 From: benedikt-voelkel Date: Fri, 23 Feb 2024 17:56:21 +0100 Subject: [PATCH 34/53] [WF] Run on pull_request_target (#1496) Co-authored-by: Benedikt Volkel --- .github/workflows/async-list-label.yml | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/.github/workflows/async-list-label.yml b/.github/workflows/async-list-label.yml index 97144b6b1..b53ec6e6e 100644 --- a/.github/workflows/async-list-label.yml +++ b/.github/workflows/async-list-label.yml @@ -2,15 +2,14 @@ name: Collect and print async labels 'on': - pull_request: + pull_request_target: types: - opened - reopened branches: - master -permissions: - pull-requests: write # to update labels +permissions: {} jobs: list_async_labels: From a09b91ff6db85564d63d3f186b0aebdf01956978 Mon Sep 17 00:00:00 2001 From: Francesco Mazzaschi <43742195+fmazzasc@users.noreply.github.com> Date: Tue, 27 Feb 2024 13:44:03 +0100 Subject: [PATCH 35/53] Enable strangeness tracking in strange particle simulations (#1499) --- MC/run/PWGLF/run_HyperNucleiInjectedGap.sh | 2 +- MC/run/PWGLF/run_StrangenessInjected.sh | 2 +- MC/run/PWGLF/run_StrangenessTriggered.sh | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/MC/run/PWGLF/run_HyperNucleiInjectedGap.sh b/MC/run/PWGLF/run_HyperNucleiInjectedGap.sh index cbf094547..04659f024 100644 --- a/MC/run/PWGLF/run_HyperNucleiInjectedGap.sh +++ b/MC/run/PWGLF/run_HyperNucleiInjectedGap.sh @@ -26,7 +26,7 @@ ENERGY=${ENERGY:-13600} # create workflow ${O2DPG_ROOT}/MC/bin/o2dpg_sim_workflow.py -eCM ${ENERGY} -col ${SYSTEM} -gen external -j ${NWORKERS} -ns ${NSIGEVENTS} -tf ${NTIMEFRAMES} -interactionRate ${INTRATE} -confKey "Diamond.width[0]=0.1;Diamond.width[1]=0.1;Diamond.width[2]=6." -e ${SIMENGINE} ${SEED} -mod "--skipModules ZDC" \ - -ini ${O2DPG_ROOT}/MC/config/PWGLF/ini/GeneratorLFHyperNuclei${SYSTEM}Gap.ini + -ini ${O2DPG_ROOT}/MC/config/PWGLF/ini/GeneratorLFHyperNuclei${SYSTEM}Gap.ini --with-strangeness-tracking # run workflow # allow increased timeframe parallelism with --cpu-limit 32 diff --git a/MC/run/PWGLF/run_StrangenessInjected.sh b/MC/run/PWGLF/run_StrangenessInjected.sh index 9c07c9271..e8cb17068 100755 --- a/MC/run/PWGLF/run_StrangenessInjected.sh +++ b/MC/run/PWGLF/run_StrangenessInjected.sh @@ -35,7 +35,7 @@ O2_SIM_WORKFLOW=${O2_SIM_WORKFLOW:-"${O2DPG_ROOT}/MC/bin/o2dpg_sim_workflow.py"} $O2_SIM_WORKFLOW -eCM ${ENERGY} -col ${SYSTEM} -gen external \ -j ${NWORKERS} \ -ns ${NSIGEVENTS} -tf ${NTIMEFRAMES} -interactionRate ${INTRATE} \ - -confKey "Diamond.width[2]=6." \ + -confKey "Diamond.width[2]=6." --with-strangeness-tracking \ ${SEED} \ -procBkg "inel" -colBkg $SYSTEM --embedding -nb ${NBKGEVENTS} -genBkg pythia8 \ -e ${SIMENGINE} \ diff --git a/MC/run/PWGLF/run_StrangenessTriggered.sh b/MC/run/PWGLF/run_StrangenessTriggered.sh index 412edf995..39ff2ff1e 100755 --- a/MC/run/PWGLF/run_StrangenessTriggered.sh +++ b/MC/run/PWGLF/run_StrangenessTriggered.sh @@ -34,7 +34,7 @@ O2_SIM_WORKFLOW=${O2_SIM_WORKFLOW:-"${O2DPG_ROOT}/MC/bin/o2dpg_sim_workflow.py"} $O2_SIM_WORKFLOW -eCM ${ENERGY} -col ${SYSTEM} -gen external \ -j ${NWORKERS} \ -ns ${NSIGEVENTS} -tf ${NTIMEFRAMES} -interactionRate ${INTRATE} \ - -confKey "Diamond.width[2]=6." \ + -confKey "Diamond.width[2]=6." --with-strangeness-tracking \ ${SEED} \ -e ${SIMENGINE} \ -ini $CFGINIFILE From f568fd496d33b39d46bc6f6582f56269f946f2bd Mon Sep 17 00:00:00 2001 From: benedikt-voelkel Date: Tue, 27 Feb 2024 14:09:19 +0100 Subject: [PATCH 36/53] [SimCI] Make mem limit configurable in WF tests (#1502) --- test/run_workflow_tests.sh | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/test/run_workflow_tests.sh b/test/run_workflow_tests.sh index e96b07141..15746b3ce 100755 --- a/test/run_workflow_tests.sh +++ b/test/run_workflow_tests.sh @@ -74,10 +74,11 @@ test_single_wf() if [[ "${ret_this}" != "0" ]] ; then echo "[FATAL]: O2DPG_TEST Workflow creation failed" >> ${LOG_FILE_WF} elif [[ "${execute}" != "" ]] ; then - ${O2DPG_ROOT}/MC/bin/o2_dpg_workflow_runner.py -f workflow.json --cpu-limit 8 -tt aod >> ${LOG_FILE_WF} 2>&1 + local memlimit=${O2DPG_TEST_WORKFLOW_MEMLIMIT:+--mem-limit O2DPG_TEST_WORKFLOW_MEMLIMIT} + ${O2DPG_ROOT}/MC/bin/o2_dpg_workflow_runner.py -f workflow.json --cpu-limit 8 -tt aod ${memlimit} >> ${LOG_FILE_WF} 2>&1 ret_this=${?} - [[ "${ret_this}" == "0" ]] && { ${O2DPG_ROOT}/MC/bin/o2_dpg_workflow_runner.py -f workflow.json --cpu-limit 8 --target-labels QC >> ${LOG_FILE_WF} 2>&1 ; ret_this_qc=${?} ; } - [[ "${ret_this}" == "0" ]] && { ${O2DPG_ROOT}/MC/bin/o2_dpg_workflow_runner.py -f workflow.json --cpu-limit 8 --target-labels Analysis >> ${LOG_FILE_WF} 2>&1 ; ret_this_analysis=${?} ; } + [[ "${ret_this}" == "0" ]] && { ${O2DPG_ROOT}/MC/bin/o2_dpg_workflow_runner.py -f workflow.json --cpu-limit 8 --target-labels QC ${memlimit} >> ${LOG_FILE_WF} 2>&1 ; ret_this_qc=${?} ; } + [[ "${ret_this}" == "0" ]] && { ${O2DPG_ROOT}/MC/bin/o2_dpg_workflow_runner.py -f workflow.json --cpu-limit 8 --target-labels Analysis ${memlimit} >> ${LOG_FILE_WF} 2>&1 ; ret_this_analysis=${?} ; } ret_this=$((ret_this + ret_this_qc + ret_this_analysis)) [[ "${ret_this}" != "0" ]] && echo "[FATAL]: O2DPG_TEST Workflow execution failed" >> ${LOG_FILE_WF} fi @@ -173,6 +174,8 @@ print_usage() echo " If also not set, this will be set to HEAD. However, if there are unstaged" echo " changes, it will left blank." echo + echo " O2DPG_TEST_WORKFLOW_MEMLIMIT : The memory limit that is passed to the workflow runner in case a workflow is executed (optional)" + echo } while [ "$1" != "" ] ; do From 8c1886416bdef6ea80d0f0cbe976c6211e34e72a Mon Sep 17 00:00:00 2001 From: benedikt-voelkel Date: Tue, 27 Feb 2024 15:47:12 +0100 Subject: [PATCH 37/53] [SimCI] Make a variable (#1503) --- test/run_workflow_tests.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/run_workflow_tests.sh b/test/run_workflow_tests.sh index 15746b3ce..2acde102e 100755 --- a/test/run_workflow_tests.sh +++ b/test/run_workflow_tests.sh @@ -74,7 +74,7 @@ test_single_wf() if [[ "${ret_this}" != "0" ]] ; then echo "[FATAL]: O2DPG_TEST Workflow creation failed" >> ${LOG_FILE_WF} elif [[ "${execute}" != "" ]] ; then - local memlimit=${O2DPG_TEST_WORKFLOW_MEMLIMIT:+--mem-limit O2DPG_TEST_WORKFLOW_MEMLIMIT} + local memlimit=${O2DPG_TEST_WORKFLOW_MEMLIMIT:+--mem-limit ${O2DPG_TEST_WORKFLOW_MEMLIMIT}} ${O2DPG_ROOT}/MC/bin/o2_dpg_workflow_runner.py -f workflow.json --cpu-limit 8 -tt aod ${memlimit} >> ${LOG_FILE_WF} 2>&1 ret_this=${?} [[ "${ret_this}" == "0" ]] && { ${O2DPG_ROOT}/MC/bin/o2_dpg_workflow_runner.py -f workflow.json --cpu-limit 8 --target-labels QC ${memlimit} >> ${LOG_FILE_WF} 2>&1 ; ret_this_qc=${?} ; } From dfc147c8f73c02a975003baa196b00fbdca153b1 Mon Sep 17 00:00:00 2001 From: David Rohr Date: Thu, 15 Feb 2024 18:39:39 +0100 Subject: [PATCH 38/53] Revert "Temporary workaround, to be reverted once O2 PR 12412 is in all async tags" This reverts commit fd619fe62c5c4f622492992a367da6945f1d5186. --- DATA/production/configurations/asyncReco/async_pass.sh | 8 -------- 1 file changed, 8 deletions(-) diff --git a/DATA/production/configurations/asyncReco/async_pass.sh b/DATA/production/configurations/asyncReco/async_pass.sh index 9d1f49025..a49a941b0 100755 --- a/DATA/production/configurations/asyncReco/async_pass.sh +++ b/DATA/production/configurations/asyncReco/async_pass.sh @@ -371,8 +371,6 @@ if [[ -n "$ALIEN_JDL_USEGPUS" && $ALIEN_JDL_USEGPUS != 0 ]] ; then if [[ $ALIEN_JDL_UNOPTIMIZEDGPUSETTINGS != 1 ]]; then export OPTIMIZED_PARALLEL_ASYNC=pp_1gpu # sets the multiplicities to optimized defaults for this configuration (1 job with 1 gpu on EPNs) export OPTIMIZED_PARALLEL_ASYNC_AUTO_SHM_LIMIT=1 - export TIMEFRAME_RATE_LIMIT=8 # WORKAROUND: Needed until O2 PR 12412 is in all async tags - export SHMSIZE=30000000000 # WORKAROUND: Needed until O2 PR 12412 is in all async tags else # forcing multiplicities to be 1 export MULTIPLICITY_PROCESS_tof_matcher=1 @@ -392,13 +390,9 @@ if [[ -n "$ALIEN_JDL_USEGPUS" && $ALIEN_JDL_USEGPUS != 0 ]] ; then if [[ $BEAMTYPE == "pp" ]]; then export OPTIMIZED_PARALLEL_ASYNC=pp_4gpu # sets the multiplicities to optimized defaults for this configuration (1 Numa, pp) export OPTIMIZED_PARALLEL_ASYNC_AUTO_SHM_LIMIT=1 - export TIMEFRAME_RATE_LIMIT=45 # WORKAROUND: Needed until O2 PR 12412 is in all async tags - export SHMSIZE=100000000000 # WORKAROUND: Needed until O2 PR 12412 is in all async tags else # PbPb export OPTIMIZED_PARALLEL_ASYNC=PbPb_4gpu # sets the multiplicities to optimized defaults for this configuration (1 Numa, PbPb) export OPTIMIZED_PARALLEL_ASYNC_AUTO_SHM_LIMIT=1 - export TIMEFRAME_RATE_LIMIT=30 # WORKAROUND: Needed until O2 PR 12412 is in all async tags - export SHMSIZE=100000000000 # WORKAROUND: Needed until O2 PR 12412 is in all async tags fi fi fi @@ -428,8 +422,6 @@ else else export OPTIMIZED_PARALLEL_ASYNC=pp_64cpu # to use EPNs with full NUMA domain but without GPUs export OPTIMIZED_PARALLEL_ASYNC_AUTO_SHM_LIMIT=1 - export TIMEFRAME_RATE_LIMIT=32 # WORKAROUND: Needed until O2 PR 12412 is in all async tags - export SHMSIZE=90000000000 # WORKAROUND: Needed until O2 PR 12412 is in all async tags fi fi fi From ffe6861adc210e9828278702dd5b372342224685 Mon Sep 17 00:00:00 2001 From: Chiara Zampolli Date: Mon, 26 Feb 2024 14:56:46 +0100 Subject: [PATCH 39/53] Do not ask for geometry in GLO ITS-TPC and TOF matching QC --- DATA/production/qc-async/itstpc.json | 2 +- DATA/production/qc-async/itstpctof.json | 2 +- DATA/production/qc-async/itstpctofwtrd.json | 4 ++-- DATA/production/qc-sync/glo-itstpc-mtch-qcmn-epn.json | 2 +- DATA/production/qc-sync/itstpctof.json | 2 +- DATA/production/qc-sync/itstpctrdtof.json | 2 +- MC/config/QC/json/ITSTPCmatchedTracks_direct_MC.json | 4 ++-- MC/config/QC/json/tofMatchedTracks_AllTypes_direct_MC.json | 2 +- .../QC/json/tofMatchedTracks_ITSTPCTOF_TPCTOF_direct_MC.json | 2 +- 9 files changed, 11 insertions(+), 11 deletions(-) diff --git a/DATA/production/qc-async/itstpc.json b/DATA/production/qc-async/itstpc.json index b294b8cf9..59607bc4d 100644 --- a/DATA/production/qc-async/itstpc.json +++ b/DATA/production/qc-async/itstpc.json @@ -58,7 +58,7 @@ "etaCut": "1e10f" }, "grpGeomRequest" : { - "geomRequest": "Aligned", + "geomRequest": "None", "askGRPECS": "false", "askGRPLHCIF": "false", "askGRPMagField": "true", diff --git a/DATA/production/qc-async/itstpctof.json b/DATA/production/qc-async/itstpctof.json index a843da3ea..c5c8697af 100644 --- a/DATA/production/qc-async/itstpctof.json +++ b/DATA/production/qc-async/itstpctof.json @@ -46,7 +46,7 @@ "minDCACutY" : "10.f" }, "grpGeomRequest" : { - "geomRequest": "Aligned", + "geomRequest": "None", "askGRPECS": "false", "askGRPLHCIF": "false", "askGRPMagField": "true", diff --git a/DATA/production/qc-async/itstpctofwtrd.json b/DATA/production/qc-async/itstpctofwtrd.json index 55094c2ad..2c397df57 100644 --- a/DATA/production/qc-async/itstpctofwtrd.json +++ b/DATA/production/qc-async/itstpctofwtrd.json @@ -46,7 +46,7 @@ "minDCACutY" : "10.f" }, "grpGeomRequest" : { - "geomRequest": "Aligned", + "geomRequest": "None", "askGRPECS": "false", "askGRPLHCIF": "false", "askGRPMagField": "true", @@ -55,7 +55,7 @@ "askOnceAllButField": "true", "needPropagatorD": "false" }, - "saveObjectsToFile" : "TOFmatchedITSTPCTOF_TPCTOF.root", + "saveObjectsToFile" : "TOFmatchedITSTPCTOF_TPCTOF_wTRD.root", "" : "For debugging, path to the file where to save. If empty or missing it won't save." } } diff --git a/DATA/production/qc-sync/glo-itstpc-mtch-qcmn-epn.json b/DATA/production/qc-sync/glo-itstpc-mtch-qcmn-epn.json index da8964a41..53a1dca39 100644 --- a/DATA/production/qc-sync/glo-itstpc-mtch-qcmn-epn.json +++ b/DATA/production/qc-sync/glo-itstpc-mtch-qcmn-epn.json @@ -44,7 +44,7 @@ "minDCACutY" : "10.f" }, "grpGeomRequest" : { - "geomRequest": "Aligned", + "geomRequest": "None", "askGRPECS": "false", "askGRPLHCIF": "false", "askGRPMagField": "true", diff --git a/DATA/production/qc-sync/itstpctof.json b/DATA/production/qc-sync/itstpctof.json index dcc986436..fefe85c87 100644 --- a/DATA/production/qc-sync/itstpctof.json +++ b/DATA/production/qc-sync/itstpctof.json @@ -53,7 +53,7 @@ "minDCACutY" : "10.f" }, "grpGeomRequest" : { - "geomRequest": "Aligned", + "geomRequest": "None", "askGRPECS": "false", "askGRPLHCIF": "false", "askGRPMagField": "true", diff --git a/DATA/production/qc-sync/itstpctrdtof.json b/DATA/production/qc-sync/itstpctrdtof.json index 2fb6363ba..087449715 100644 --- a/DATA/production/qc-sync/itstpctrdtof.json +++ b/DATA/production/qc-sync/itstpctrdtof.json @@ -53,7 +53,7 @@ "minDCACutY" : "10.f" }, "grpGeomRequest" : { - "geomRequest": "Aligned", + "geomRequest": "None", "askGRPECS": "false", "askGRPLHCIF": "false", "askGRPMagField": "true", diff --git a/MC/config/QC/json/ITSTPCmatchedTracks_direct_MC.json b/MC/config/QC/json/ITSTPCmatchedTracks_direct_MC.json index 27dff87b8..5aba15f8a 100644 --- a/MC/config/QC/json/ITSTPCmatchedTracks_direct_MC.json +++ b/MC/config/QC/json/ITSTPCmatchedTracks_direct_MC.json @@ -56,7 +56,7 @@ "isMC": "true" }, "grpGeomRequest": { - "geomRequest": "Aligned", + "geomRequest": "None", "askGRPECS": "false", "askGRPLHCIF": "false", "askGRPMagField": "true", @@ -72,4 +72,4 @@ } }, "dataSamplingPolicies": [] -} \ No newline at end of file +} diff --git a/MC/config/QC/json/tofMatchedTracks_AllTypes_direct_MC.json b/MC/config/QC/json/tofMatchedTracks_AllTypes_direct_MC.json index 4d5acc87a..7ae8f16e6 100644 --- a/MC/config/QC/json/tofMatchedTracks_AllTypes_direct_MC.json +++ b/MC/config/QC/json/tofMatchedTracks_AllTypes_direct_MC.json @@ -49,7 +49,7 @@ "minDCACutY": "10.f" }, "grpGeomRequest": { - "geomRequest": "Aligned", + "geomRequest": "None", "askGRPECS": "false", "askGRPLHCIF": "false", "askGRPMagField": "true", diff --git a/MC/config/QC/json/tofMatchedTracks_ITSTPCTOF_TPCTOF_direct_MC.json b/MC/config/QC/json/tofMatchedTracks_ITSTPCTOF_TPCTOF_direct_MC.json index ef0d21311..95f6f9008 100644 --- a/MC/config/QC/json/tofMatchedTracks_ITSTPCTOF_TPCTOF_direct_MC.json +++ b/MC/config/QC/json/tofMatchedTracks_ITSTPCTOF_TPCTOF_direct_MC.json @@ -50,7 +50,7 @@ "minDCACutY": "10.f" }, "grpGeomRequest": { - "geomRequest": "Aligned", + "geomRequest": "None", "askGRPECS": "false", "askGRPLHCIF": "false", "askGRPMagField": "true", From 95c2b213b346569b6c93f7cde096cfd8dd5ea6ff Mon Sep 17 00:00:00 2001 From: shahoian Date: Wed, 28 Feb 2024 17:48:27 +0100 Subject: [PATCH 40/53] Move RECO_NUM_NODES_WORKFLOW_CMP definition to the beginning --- DATA/production/workflow-multiplicities.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/DATA/production/workflow-multiplicities.sh b/DATA/production/workflow-multiplicities.sh index 275200d4f..dd04d93ba 100644 --- a/DATA/production/workflow-multiplicities.sh +++ b/DATA/production/workflow-multiplicities.sh @@ -30,6 +30,8 @@ if [[ $SYNCMODE == 1 ]]; then NTRDTRKTHREADS=1; else NTRDTRKTHREADS=; fi : ${NGPURECOTHREADS:=-1} # -1 = auto-detect +RECO_NUM_NODES_WORKFLOW_CMP=$((($RECO_NUM_NODES_WORKFLOW > 15 ? ($RECO_NUM_NODES_WORKFLOW < 230 ? $RECO_NUM_NODES_WORKFLOW : 230) : 15) * ($NUMAGPUIDS != 0 ? 2 : 1))) # Limit the lower scaling factor, multiply by 2 if we have 2 NUMA domains + # --------------------------------------------------------------------------------------------------------------------- # Process multiplicities @@ -157,7 +159,6 @@ elif [[ $EPNPIPELINES != 0 ]]; then NTRDTRKTHREADS=2 ITSTRK_THREADS=2 ITSTPC_THREADS=2 - RECO_NUM_NODES_WORKFLOW_CMP=$((($RECO_NUM_NODES_WORKFLOW > 15 ? ($RECO_NUM_NODES_WORKFLOW < 230 ? $RECO_NUM_NODES_WORKFLOW : 230) : 15) * ($NUMAGPUIDS != 0 ? 2 : 1))) # Limit the lower scaling factor, multiply by 2 if we have 2 NUMA domains # Tuned multiplicities for sync pp / Pb-Pb processing if [[ $BEAMTYPE == "pp" ]]; then N_ITSRAWDEC=$(math_max $((6 * $EPNPIPELINES * $NGPUS / 4)) 1) From d47944257ead124fdd7b33b1430c8a8e5956da59 Mon Sep 17 00:00:00 2001 From: Chiara Zampolli Date: Wed, 28 Feb 2024 23:48:07 +0100 Subject: [PATCH 41/53] To allow to set the trackQC sampling from the JDL, or have a subsample with full sampling --- .../configurations/asyncReco/async_pass.sh | 2 +- .../configurations/asyncReco/setenv_extra.sh | 24 ++++++++++++++++++- 2 files changed, 24 insertions(+), 2 deletions(-) diff --git a/DATA/production/configurations/asyncReco/async_pass.sh b/DATA/production/configurations/asyncReco/async_pass.sh index a49a941b0..ce6d3ce5e 100755 --- a/DATA/production/configurations/asyncReco/async_pass.sh +++ b/DATA/production/configurations/asyncReco/async_pass.sh @@ -20,7 +20,7 @@ run_AOD_merging() { timeStartFullProcessing=`date +%s` # to skip positional arg parsing before the randomizing part. -inputarg="${1}" +export inputarg="${1}" if [[ "${1##*.}" == "root" ]]; then #echo ${1##*.} diff --git a/DATA/production/configurations/asyncReco/setenv_extra.sh b/DATA/production/configurations/asyncReco/setenv_extra.sh index 4f9992a67..c1d82bbca 100644 --- a/DATA/production/configurations/asyncReco/setenv_extra.sh +++ b/DATA/production/configurations/asyncReco/setenv_extra.sh @@ -622,7 +622,29 @@ fi # ad-hoc settings for AOD echo ALIEN_JDL_LPMPRODUCTIONTAG = $ALIEN_JDL_LPMPRODUCTIONTAG echo ALIEN_JDL_LPMPASSNAME = $ALIEN_JDL_LPMPASSNAME -export ARGS_EXTRA_PROCESS_o2_aod_producer_workflow="$ARGS_EXTRA_PROCESS_o2_aod_producer_workflow --aod-writer-maxfilesize $AOD_FILE_SIZE --lpmp-prod-tag $ALIEN_JDL_LPMPRODUCTIONTAG --reco-pass $ALIEN_JDL_LPMPASSNAME" +# Track QC table sampling +if [[ -n $ALIEN_JDL_TRACKQCFRACTION ]]; then + TRACKQC_FRACTION=$ALIEN_JDL_TRACKQCFRACTION +else + if [[ $ALIEN_JDL_ENABLEPERMILFULLTRACKQC == "1" ]]; then + PERMIL_FULLTRACKQC=${ALIEN_JDL_PERMILFULLTRACKQC:-100} + INVERSE_PERMIL_FULLTRACKQC=$((1000/PERMIL_FULLTRACKQC)) + if [[ -f wn.xml ]]; then + HASHCODE=`grep alien:// wn.xml | tr ' ' '\n' | grep ^lfn | cut -d\" -f2 | head -1 | cksum | cut -d ' ' -f 1` + else + HASHCODE=`echo "${inputarg}" | cksum | cut -d ' ' -f 1` + fi + if [[ "$((HASHCODE%INVERSE_PERMIL_FULLTRACKQC))" -eq "0" ]]; then + TRACKQC_FRACTION=1 + else + TRACKQC_FRACTION=0.1 + fi + else + TRACKQC_FRACTION=0.1 + fi +fi +echo TRACKQC_FRACTION = $TRACKQC_FRACTION +export ARGS_EXTRA_PROCESS_o2_aod_producer_workflow="$ARGS_EXTRA_PROCESS_o2_aod_producer_workflow --aod-writer-maxfilesize $AOD_FILE_SIZE --lpmp-prod-tag $ALIEN_JDL_LPMPRODUCTIONTAG --reco-pass $ALIEN_JDL_LPMPASSNAME --trackqc-fraction $TRACKQC_FRACTION" if [[ $PERIOD == "LHC22c" ]] || [[ $PERIOD == "LHC22d" ]] || [[ $PERIOD == "LHC22e" ]] || [[ $PERIOD == "JUN" ]] || [[ $PERIOD == "LHC22f" ]] || [[ $PERIOD == "LHC22m" ]] || [[ "$RUNNUMBER" == @(526463|526465|526466|526467|526468|526486|526505|526508|526510|526512|526525|526526|526528|526534|526559|526596|526606|526612|526638|526639|526641|526643|526647|526649|526689|526712|526713|526714|526715|526716|526719|526720|526776|526886|526926|526927|526928|526929|526934|526935|526937|526938|526963|526964|526966|526967|526968|527015|527016|527028|527031|527033|527034|527038|527039|527041|527057|527076|527108|527109|527228|527237|527259|527260|527261|527262|527345|527347|527349|527446|527518|527523|527734) ]] ; then export ARGS_EXTRA_PROCESS_o2_aod_producer_workflow="$ARGS_EXTRA_PROCESS_o2_aod_producer_workflow --ctpreadout-create 1" fi From 2b9b08709777b7f85bdd1282eb8c06519c361c34 Mon Sep 17 00:00:00 2001 From: Chiara Zampolli Date: Thu, 29 Feb 2024 09:47:08 +0100 Subject: [PATCH 42/53] Do not split the metrics, as it is too slow, unless requested --- .../configurations/asyncReco/async_pass.sh | 32 ++++++++++--------- 1 file changed, 17 insertions(+), 15 deletions(-) diff --git a/DATA/production/configurations/asyncReco/async_pass.sh b/DATA/production/configurations/asyncReco/async_pass.sh index ce6d3ce5e..bd6e9ce89 100755 --- a/DATA/production/configurations/asyncReco/async_pass.sh +++ b/DATA/production/configurations/asyncReco/async_pass.sh @@ -610,21 +610,23 @@ else fi # now extract all performance metrics -IFS=$'\n' -timeStart=`date +%s` -for perfMetricsFiles in performanceMetrics.json performanceMetrics_1.json performanceMetrics_2.json performanceMetrics_3.json ; do - suffix=`echo $perfMetricsFiles | sed 's/performanceMetrics\(.*\).json/\1/'` - if [[ -f "performanceMetrics.json" ]]; then - for workflow in `grep ': {' $perfMetricsFiles`; do - strippedWorkflow=`echo $workflow | cut -d\" -f2` - cat $perfMetricsFiles | jq '.'\"${strippedWorkflow}\"'' > ${strippedWorkflow}_metrics${suffix}.json - done - fi -done -timeEnd=`date +%s` -timeUsed=$(( $timeUsed+$timeEnd-$timeStart )) -delta=$(( $timeEnd-$timeStart )) -echo "Time spent in splitting the metrics files = $delta s" +if [[ $ALIEN_JDL_EXTRACTMETRICS == "1" ]]; then + IFS=$'\n' + timeStart=`date +%s` + for perfMetricsFiles in performanceMetrics.json performanceMetrics_1.json performanceMetrics_2.json performanceMetrics_3.json ; do + suffix=`echo $perfMetricsFiles | sed 's/performanceMetrics\(.*\).json/\1/'` + if [[ -f "performanceMetrics.json" ]]; then + for workflow in `grep ': {' $perfMetricsFiles`; do + strippedWorkflow=`echo $workflow | cut -d\" -f2` + cat $perfMetricsFiles | jq '.'\"${strippedWorkflow}\"'' > ${strippedWorkflow}_metrics${suffix}.json + done + fi + done + timeEnd=`date +%s` + timeUsed=$(( $timeUsed+$timeEnd-$timeStart )) + delta=$(( $timeEnd-$timeStart )) + echo "Time spent in splitting the metrics files = $delta s" +fi if [[ $ALIEN_JDL_AODOFF != 1 ]]; then # flag to possibly enable Analysis QC From b04143af8dbefc4befc2853eba74a2ce63e21846 Mon Sep 17 00:00:00 2001 From: Chiara Zampolli Date: Wed, 28 Feb 2024 22:20:45 +0100 Subject: [PATCH 43/53] Running extraction of TPC time series in anchored MC --- MC/run/ANCHOR/anchorMC.sh | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/MC/run/ANCHOR/anchorMC.sh b/MC/run/ANCHOR/anchorMC.sh index 2bab586de..c30597eb6 100755 --- a/MC/run/ANCHOR/anchorMC.sh +++ b/MC/run/ANCHOR/anchorMC.sh @@ -235,6 +235,11 @@ echo "Ready to start main workflow" ${O2DPG_ROOT}/MC/bin/o2_dpg_workflow_runner.py -f workflow.json -tt ${ALIEN_JDL_O2DPGWORKFLOWTARGET:-aod} --cpu-limit ${ALIEN_JDL_CPULIMIT:-8} MCRC=$? # <--- we'll report back this code +if [[ "${ALIEN_JDL_ADDTIMESERIESINMC}" != "0" ]]; then + echo "Running TPC time series" + ${O2DPG_ROOT}/MC/bin/o2_dpg_workflow_runner.py -f workflow.json -tt tpctimes +fi + [[ ! -z "${DISABLE_QC}" ]] && echo "INFO: QC is disabled, skip it." if [[ -z "${DISABLE_QC}" && "${MCRC}" = "0" && "${remainingargs}" == *"--include-local-qc"* ]] ; then From b9cb67a0845587b3d34bb6d320711dc0478c4022 Mon Sep 17 00:00:00 2001 From: benedikt-voelkel Date: Thu, 29 Feb 2024 17:49:32 +0100 Subject: [PATCH 44/53] [Anchor] Add some more help messages (#1511) * help messages * set ALIEN_JDL_ADDTIMESERIESINMC to a value at the beginning --- MC/run/ANCHOR/anchorMC.sh | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/MC/run/ANCHOR/anchorMC.sh b/MC/run/ANCHOR/anchorMC.sh index c30597eb6..13512fea7 100755 --- a/MC/run/ANCHOR/anchorMC.sh +++ b/MC/run/ANCHOR/anchorMC.sh @@ -41,10 +41,13 @@ print_help() echo "PRODSPLIT." echo echo "Optional are:" - echo "NWORKERS," - echo "ALIEN_JDL_CPULIMIT or CPULIMIT," - echo "ALIEN_JDL_SIMENGINE or SIMENGINE." - echo "DISABLE_QC (set this to disable QC, e.g. DISABLE_QC=1)" + echo "ALIEN_JDL_CPULIMIT or CPULIMIT, set the CPU limit of the workflow runner, default: 8," + echo "NWORKERS, set the number of workers during detector transport, default: 8," + echo "ALIEN_JDL_SIMENGINE or SIMENGINE, choose the transport engine, default: TGeant4," + echo "ALIEN_JDL_WORKFLOWDETECTORS, set detectors to be taken into account, default: ITS,TPC,TOF,FV0,FT0,FDD,MID,MFT,MCH,TRD,EMC,PHS,CPV,HMP,CTP," + echo "ALIEN_JDL_ANCHOR_SIM_OPTIONS, additional options that are passed to the workflow creation, default: -gen pythia8," + echo "ALIEN_JDL_ADDTIMESERIESINMC, run TPC time series. Default: 1, switch off by setting to 0," + echo "DISABLE_QC, set this to disable QC, e.g. to 1" } # Prevent the script from being soured to omit unexpected surprises when exit is used @@ -93,6 +96,8 @@ export ALIEN_JDL_LPMPRODUCTIONTAG=${ALIEN_JDL_LPMPRODUCTIONTAG:-${PRODUCTIONTAG} export ALIEN_JDL_LPMANCHORRUN=${ALIEN_JDL_LPMANCHORRUN:-${ANCHORRUN}} export ALIEN_JDL_LPMANCHORPRODUCTION=${ALIEN_JDL_LPMANCHORPRODUCTION:-${ANCHORPRODUCTION}} export ALIEN_JDL_LPMANCHORYEAR=${ALIEN_JDL_LPMANCHORYEAR:-${ANCHORYEAR}} +# decide whether to run TPC time series; on by default, switched off by setting to 0 +export ALIEN_JDL_ADDTIMESERIESINMC=${ALIEN_JDL_ADDTIMESERIESINMC:-1} # cache the production tag, will be set to a special anchor tag; reset later in fact ALIEN_JDL_LPMPRODUCTIONTAG_KEEP=$ALIEN_JDL_LPMPRODUCTIONTAG @@ -236,6 +241,7 @@ ${O2DPG_ROOT}/MC/bin/o2_dpg_workflow_runner.py -f workflow.json -tt ${ALIEN_JDL_ MCRC=$? # <--- we'll report back this code if [[ "${ALIEN_JDL_ADDTIMESERIESINMC}" != "0" ]]; then + # Default value is 1 so this is run by default. echo "Running TPC time series" ${O2DPG_ROOT}/MC/bin/o2_dpg_workflow_runner.py -f workflow.json -tt tpctimes fi From b818d3447fc3f65c4b4cf8f33088b9232862887c Mon Sep 17 00:00:00 2001 From: shahoian Date: Thu, 29 Feb 2024 23:39:42 +0100 Subject: [PATCH 45/53] Add ITS/MFT time-dependent DeadMaps calibration --- DATA/common/setenv_calib.sh | 22 ++++++++++++++++++- .../configurations/asyncReco/setenv_extra.sh | 2 ++ 2 files changed, 23 insertions(+), 1 deletion(-) diff --git a/DATA/common/setenv_calib.sh b/DATA/common/setenv_calib.sh index 95db74f33..a2da3ec46 100755 --- a/DATA/common/setenv_calib.sh +++ b/DATA/common/setenv_calib.sh @@ -13,6 +13,8 @@ SOURCE_GUARD_SETENV_CALIB=1 # define the conditions for each calibration if has_detector_calib ITS && has_detectors_reco ITS && has_detector_matching PRIMVTX && [[ ! -z "$VERTEXING_SOURCES" ]]; then CAN_DO_CALIB_PRIMVTX_MEANVTX=1; else CAN_DO_CALIB_PRIMVTX_MEANVTX=0; fi +if has_detector_calib ITS ; then CAN_DO_CALIB_ITS_DEADMAP_TIME=1; else CAN_DO_CALIB_ITS_DEADMAP_TIME=0; fi +if has_detector_calib MFT ; then CAN_DO_CALIB_MFT_DEADMAP_TIME=1; else CAN_DO_CALIB_MFT_DEADMAP_TIME=0; fi if has_detector_calib TOF && has_detector_reco TOF; then CAN_DO_CALIB_TOF_DIAGNOSTICS=1; CAN_DO_CALIB_TOF_INTEGRATEDCURR=1; else CAN_DO_CALIB_TOF_DIAGNOSTICS=0; CAN_DO_CALIB_TOF_INTEGRATEDCURR=0; fi if has_detector_calib TOF && has_detector_reco TOF && ( ( has_detectors_reco ITS TPC && has_detector_matching ITSTPCTOF ) || ( has_detectors_reco ITS TPC TRD && has_detector_matching ITSTPCTRDTOF ) ); then CAN_DO_CALIB_TOF_LHCPHASE=1; CAN_DO_CALIB_TOF_CHANNELOFFSETS=1; else CAN_DO_CALIB_TOF_LHCPHASE=0; CAN_DO_CALIB_TOF_CHANNELOFFSETS=0; fi if has_detector_calib TPC && has_detectors ITS TPC TOF TRD && has_detector_matching ITSTPCTRDTOF; then CAN_DO_CALIB_TPC_SCDCALIB=1; else CAN_DO_CALIB_TPC_SCDCALIB=0; fi @@ -48,7 +50,17 @@ if [[ $BEAMTYPE != "cosmic" ]] || [[ ${FORCECALIBRATIONS:-} == 1 ]] ; then if [[ $CAN_DO_CALIB_PRIMVTX_MEANVTX == 1 ]]; then if [[ -z ${CALIB_PRIMVTX_MEANVTX+x} ]]; then CALIB_PRIMVTX_MEANVTX=1; fi fi - + + # calibrations for ITS + if [[ $CAN_DO_CALIB_ITS_DEADMAP_TIME == 1 ]]; then + if [[ -z ${CALIB_ITS_DEADMAP_TIME+x} ]]; then CALIB_ITS_DEADMAP_TIME=1; fi + fi + + # calibrations for MFT + if [[ $CAN_DO_CALIB_MFT_DEADMAP_TIME == 1 ]]; then + if [[ -z ${CALIB_MFT_DEADMAP_TIME+x} ]]; then CALIB_MFT_DEADMAP_TIME=1; fi + fi + # calibrations for TOF if [[ $CAN_DO_CALIB_TOF_DIAGNOSTICS == 1 ]]; then if [[ -z ${CALIB_TOF_DIAGNOSTICS+x} ]]; then CALIB_TOF_DIAGNOSTICS=1; fi @@ -185,6 +197,8 @@ fi ( [[ -z ${CALIB_PHS_L1PHASE:-} ]] || [[ $CAN_DO_CALIB_PHS_L1PHASE == 0 ]] ) && CALIB_PHS_L1PHASE=0 ( [[ -z ${CALIB_CPV_GAIN:-} ]] || [[ $CAN_DO_CALIB_CPV_GAIN == 0 ]] ) && CALIB_CPV_GAIN=0 ( [[ -z ${CALIB_ZDC_TDC:-} ]] || [[ $CAN_DO_CALIB_ZDC_TDC == 0 ]] ) && CALIB_ZDC_TDC=0 +( [[ -z ${CALIB_ITS_DEADMAP_TIME:-} ]] || [[ $CAN_DO_CALIB_ITS_DEADMAP_TIME == 0 ]] ) && CALIB_ITS_DEADMAP_TIME=0 +( [[ -z ${CALIB_MFT_DEADMAP_TIME:-} ]] || [[ $CAN_DO_CALIB_MFT_DEADMAP_TIME == 0 ]] ) && CALIB_MFT_DEADMAP_TIME=0 # for async: ( [[ -z ${CALIB_EMC_ASYNC_RECALIB:-} ]] || [[ $CAN_DO_CALIB_EMC_ASYNC_RECALIB == 0 ]] ) && CALIB_EMC_ASYNC_RECALIB=0 ( [[ -z ${CALIB_ASYNC_EXTRACTTPCCURRENTS:-} ]] || [[ $CAN_DO_CALIB_ASYNC_EXTRACTTPCCURRENTS == 0 ]] ) && CALIB_ASYNC_EXTRACTTPCCURRENTS=0 @@ -228,6 +242,12 @@ if [[ -z ${CALIBDATASPEC_BARREL_TF:-} ]]; then # prim vtx if [[ $CALIB_PRIMVTX_MEANVTX == 1 ]]; then add_semicolon_separated CALIBDATASPEC_BARREL_TF "pvtx:GLO/PVTX/0"; fi + # ITS + if [[ $CALIB_ITS_DEADMAP_TIME == 1 ]]; then add_semicolon_separated CALIBDATASPEC_BARREL_TF "itsChipStatus:ITS/CHIPSSTATUS/0"; fi + + # MFT + if [[ $CALIB_MFT_DEADMAP_TIME == 1 ]]; then add_semicolon_separated CALIBDATASPEC_BARREL_TF "mftChipStatus:MFT/CHIPSSTATUS/0"; fi + # TOF if [[ $CALIB_TOF_LHCPHASE == 1 ]] || [[ $CALIB_TOF_CHANNELOFFSETS == 1 ]]; then add_semicolon_separated CALIBDATASPEC_BARREL_TF "calibTOF:TOF/CALIBDATA/0"; fi if [[ $CALIB_TOF_DIAGNOSTICS == 1 ]]; then add_semicolon_separated CALIBDATASPEC_BARREL_TF "diagWords:TOF/DIAFREQ/0"; fi diff --git a/DATA/production/configurations/asyncReco/setenv_extra.sh b/DATA/production/configurations/asyncReco/setenv_extra.sh index c1d82bbca..3a26bc4d8 100644 --- a/DATA/production/configurations/asyncReco/setenv_extra.sh +++ b/DATA/production/configurations/asyncReco/setenv_extra.sh @@ -557,6 +557,8 @@ if [[ $ADD_CALIB == "1" ]]; then export CALIB_FV0_INTEGRATEDCURR=0 export CALIB_FDD_INTEGRATEDCURR=0 export CALIB_TOF_INTEGRATEDCURR=0 + export CALIB_ITS_DEADMAP_TIME=0 + export CALIB_MFT_DEADMAP_TIME=0 if [[ $DO_TPC_RESIDUAL_EXTRACTION == "1" ]]; then export CALIB_TPC_SCDCALIB=1 export CALIB_TPC_SCDCALIB_SENDTRKDATA=1 From 5510ade973ccd2092cc18554ea075f902b21129e Mon Sep 17 00:00:00 2001 From: benedikt-voelkel Date: Fri, 1 Mar 2024 12:20:05 +0100 Subject: [PATCH 46/53] [AnalysisQC] Make common args better adjustable (#1514) Co-authored-by: Benedikt Volkel --- MC/analysis_testing/o2dpg_analysis_test_utils.py | 9 ++++++++- MC/analysis_testing/o2dpg_analysis_test_workflow.py | 2 +- MC/config/analysis_testing/json/analyses_config.json | 3 +++ 3 files changed, 12 insertions(+), 2 deletions(-) diff --git a/MC/analysis_testing/o2dpg_analysis_test_utils.py b/MC/analysis_testing/o2dpg_analysis_test_utils.py index 1a3901fe6..ee896f12f 100755 --- a/MC/analysis_testing/o2dpg_analysis_test_utils.py +++ b/MC/analysis_testing/o2dpg_analysis_test_utils.py @@ -69,7 +69,7 @@ def full_ana_name(raw_ana_name): return f"{ANALYSIS_LABEL}_{raw_ana_name}" -def get_common_args_as_string(analysis_name, all_common_args): +def get_common_args_as_string(ana, all_common_args): """ all_common_args is of the form [-shm-segment-size , -readers , ...] @@ -88,6 +88,11 @@ def make_args_string(args_map_in): "readers": 1, "aod-memory-rate-limit": 500000000} + # get common args from analysis configuration and add to args_map + common_args_from_config = ana.get("common_args", {}) + for key, value in common_args_from_config.items(): + args_map[key] = value + # arguments dedicated for this analysis args_map_overwrite = {} @@ -98,6 +103,8 @@ def make_args_string(args_map_in): print("ERROR: Cannot digest common args.") return None + analysis_name = ana["name"] + for i in range(0, len(all_common_args), 2): tokens = all_common_args[i].split("-") key = "-".join(tokens[1:]) diff --git a/MC/analysis_testing/o2dpg_analysis_test_workflow.py b/MC/analysis_testing/o2dpg_analysis_test_workflow.py index 75058219e..c50ed6999 100755 --- a/MC/analysis_testing/o2dpg_analysis_test_workflow.py +++ b/MC/analysis_testing/o2dpg_analysis_test_workflow.py @@ -251,7 +251,7 @@ def add_analysis_tasks(workflow, input_aod="./AO2D.root", output_dir="./Analysis continue print(f"INFO: Analysis {ana['name']} uses configuration {configuration}") - add_common_args_ana = get_common_args_as_string(ana["name"], add_common_args) + add_common_args_ana = get_common_args_as_string(ana, add_common_args) if not add_common_args_ana: print(f"ERROR: Cannot parse common args for analysis {ana['name']}") continue diff --git a/MC/config/analysis_testing/json/analyses_config.json b/MC/config/analysis_testing/json/analyses_config.json index 63c67b285..6be135b8b 100644 --- a/MC/config/analysis_testing/json/analyses_config.json +++ b/MC/config/analysis_testing/json/analyses_config.json @@ -233,6 +233,9 @@ "expected_output": ["AnalysisResults.root"], "valid_mc": true, "valid_data": true, + "common_args": { + "shm-segment-size": 2500000000 + }, "tasks": ["o2-analysis-je-emc-eventselection-qa", "o2-analysis-je-emc-cellmonitor", "o2-analysis-je-emcal-correction-task", From 0bb1b31c03377dc1caebf22e5e5ba75923ad2f05 Mon Sep 17 00:00:00 2001 From: benedikt-voelkel Date: Fri, 1 Mar 2024 12:29:52 +0100 Subject: [PATCH 47/53] [Anchor] Remove -k from QC run, return error code (#1513) Co-authored-by: Benedikt Volkel --- MC/run/ANCHOR/anchorMC.sh | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/MC/run/ANCHOR/anchorMC.sh b/MC/run/ANCHOR/anchorMC.sh index 13512fea7..c1096e7ac 100755 --- a/MC/run/ANCHOR/anchorMC.sh +++ b/MC/run/ANCHOR/anchorMC.sh @@ -46,8 +46,8 @@ print_help() echo "ALIEN_JDL_SIMENGINE or SIMENGINE, choose the transport engine, default: TGeant4," echo "ALIEN_JDL_WORKFLOWDETECTORS, set detectors to be taken into account, default: ITS,TPC,TOF,FV0,FT0,FDD,MID,MFT,MCH,TRD,EMC,PHS,CPV,HMP,CTP," echo "ALIEN_JDL_ANCHOR_SIM_OPTIONS, additional options that are passed to the workflow creation, default: -gen pythia8," - echo "ALIEN_JDL_ADDTIMESERIESINMC, run TPC time series. Default: 1, switch off by setting to 0," - echo "DISABLE_QC, set this to disable QC, e.g. to 1" + echo "ALIEN_JDL_ADDTIMESERIESINMC, run TPC time series. Switch off by setting to 0, default: 1," + echo "ALIEN_JDL_ANCHOR_SIM_DISABLE_QC|ANCHOR_SIM_DISABLE_QC, set this to disable QC, e.g. to 1, default: 0," } # Prevent the script from being soured to omit unexpected surprises when exit is used @@ -85,6 +85,7 @@ export ALIEN_JDL_SIMENGINE=${ALIEN_JDL_SIMENGINE:-${SIMENGINE:-TGeant4}} export ALIEN_JDL_WORKFLOWDETECTORS=${ALIEN_JDL_WORKFLOWDETECTORS:-ITS,TPC,TOF,FV0,FT0,FDD,MID,MFT,MCH,TRD,EMC,PHS,CPV,HMP,CTP} # can be passed to contain additional options that will be passed to o2dpg_sim_workflow_anchored.py and eventually to o2dpg_sim_workflow.py export ALIEN_JDL_ANCHOR_SIM_OPTIONS=${ALIEN_JDL_ANCHOR_SIM_OPTIONS:--gen pythia8} +export ALIEN_JDL_ANCHOR_SIM_DISABLE_QC=${ALIEN_JDL_ANCHOR_SIM_DISABLE_QC:-${ANCHOR_SIM_DISABLE_QC:-0}} # all others MUST be set by the user/on the outside export ALIEN_JDL_LPMANCHORPASSNAME=${ALIEN_JDL_LPMANCHORPASSNAME:-${ANCHORPASSNAME}} export ALIEN_JDL_MCANCHOR=${ALIEN_JDL_MCANCHOR:-${MCANCHOR}} @@ -237,7 +238,7 @@ export FAIRMQ_IPC_PREFIX=./ echo "Ready to start main workflow" -${O2DPG_ROOT}/MC/bin/o2_dpg_workflow_runner.py -f workflow.json -tt ${ALIEN_JDL_O2DPGWORKFLOWTARGET:-aod} --cpu-limit ${ALIEN_JDL_CPULIMIT:-8} +${O2DPG_ROOT}/MC/bin/o2_dpg_workflow_runner.py -f workflow.json -tt ${ALIEN_JDL_O2DPGWORKFLOWTARGET:-aod} --cpu-limit ${ALIEN_JDL_CPULIMIT} MCRC=$? # <--- we'll report back this code if [[ "${ALIEN_JDL_ADDTIMESERIESINMC}" != "0" ]]; then @@ -246,14 +247,12 @@ if [[ "${ALIEN_JDL_ADDTIMESERIESINMC}" != "0" ]]; then ${O2DPG_ROOT}/MC/bin/o2_dpg_workflow_runner.py -f workflow.json -tt tpctimes fi -[[ ! -z "${DISABLE_QC}" ]] && echo "INFO: QC is disabled, skip it." +[[ "${ALIEN_JDL_ANCHOR_SIM_DISABLE_QC}" != "0" ]] && echo "INFO: QC is disabled, skip it." -if [[ -z "${DISABLE_QC}" && "${MCRC}" = "0" && "${remainingargs}" == *"--include-local-qc"* ]] ; then +if [[ "${ALIEN_JDL_ANCHOR_SIM_DISABLE_QC}" == "0" && "${MCRC}" = "0" && "${remainingargs}" == *"--include-local-qc"* ]] ; then # do QC tasks echo "Doing QC" - ${O2DPG_ROOT}/MC/bin/o2_dpg_workflow_runner.py -f workflow.json --target-labels QC --cpu-limit ${ALIEN_JDL_CPULIMIT:-8} -k - # NOTE that with the -k|--keep-going option, the runner will try to keep on executing even if some tasks fail. - # That means, even if there is a failing QC task, the return code will be 0 + ${O2DPG_ROOT}/MC/bin/o2_dpg_workflow_runner.py -f workflow.json --target-labels QC --cpu-limit ${ALIEN_JDL_CPULIMIT} MCRC=$? fi From 928d82f57b81889dbdf0b71e55921cb4d5cfc483 Mon Sep 17 00:00:00 2001 From: benedikt-voelkel Date: Fri, 1 Mar 2024 13:06:07 +0100 Subject: [PATCH 48/53] Revert "[Anchor] Remove -k from QC run, return error code (#1513)" (#1515) This reverts commit 0bb1b31c03377dc1caebf22e5e5ba75923ad2f05. --- MC/run/ANCHOR/anchorMC.sh | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/MC/run/ANCHOR/anchorMC.sh b/MC/run/ANCHOR/anchorMC.sh index c1096e7ac..13512fea7 100755 --- a/MC/run/ANCHOR/anchorMC.sh +++ b/MC/run/ANCHOR/anchorMC.sh @@ -46,8 +46,8 @@ print_help() echo "ALIEN_JDL_SIMENGINE or SIMENGINE, choose the transport engine, default: TGeant4," echo "ALIEN_JDL_WORKFLOWDETECTORS, set detectors to be taken into account, default: ITS,TPC,TOF,FV0,FT0,FDD,MID,MFT,MCH,TRD,EMC,PHS,CPV,HMP,CTP," echo "ALIEN_JDL_ANCHOR_SIM_OPTIONS, additional options that are passed to the workflow creation, default: -gen pythia8," - echo "ALIEN_JDL_ADDTIMESERIESINMC, run TPC time series. Switch off by setting to 0, default: 1," - echo "ALIEN_JDL_ANCHOR_SIM_DISABLE_QC|ANCHOR_SIM_DISABLE_QC, set this to disable QC, e.g. to 1, default: 0," + echo "ALIEN_JDL_ADDTIMESERIESINMC, run TPC time series. Default: 1, switch off by setting to 0," + echo "DISABLE_QC, set this to disable QC, e.g. to 1" } # Prevent the script from being soured to omit unexpected surprises when exit is used @@ -85,7 +85,6 @@ export ALIEN_JDL_SIMENGINE=${ALIEN_JDL_SIMENGINE:-${SIMENGINE:-TGeant4}} export ALIEN_JDL_WORKFLOWDETECTORS=${ALIEN_JDL_WORKFLOWDETECTORS:-ITS,TPC,TOF,FV0,FT0,FDD,MID,MFT,MCH,TRD,EMC,PHS,CPV,HMP,CTP} # can be passed to contain additional options that will be passed to o2dpg_sim_workflow_anchored.py and eventually to o2dpg_sim_workflow.py export ALIEN_JDL_ANCHOR_SIM_OPTIONS=${ALIEN_JDL_ANCHOR_SIM_OPTIONS:--gen pythia8} -export ALIEN_JDL_ANCHOR_SIM_DISABLE_QC=${ALIEN_JDL_ANCHOR_SIM_DISABLE_QC:-${ANCHOR_SIM_DISABLE_QC:-0}} # all others MUST be set by the user/on the outside export ALIEN_JDL_LPMANCHORPASSNAME=${ALIEN_JDL_LPMANCHORPASSNAME:-${ANCHORPASSNAME}} export ALIEN_JDL_MCANCHOR=${ALIEN_JDL_MCANCHOR:-${MCANCHOR}} @@ -238,7 +237,7 @@ export FAIRMQ_IPC_PREFIX=./ echo "Ready to start main workflow" -${O2DPG_ROOT}/MC/bin/o2_dpg_workflow_runner.py -f workflow.json -tt ${ALIEN_JDL_O2DPGWORKFLOWTARGET:-aod} --cpu-limit ${ALIEN_JDL_CPULIMIT} +${O2DPG_ROOT}/MC/bin/o2_dpg_workflow_runner.py -f workflow.json -tt ${ALIEN_JDL_O2DPGWORKFLOWTARGET:-aod} --cpu-limit ${ALIEN_JDL_CPULIMIT:-8} MCRC=$? # <--- we'll report back this code if [[ "${ALIEN_JDL_ADDTIMESERIESINMC}" != "0" ]]; then @@ -247,12 +246,14 @@ if [[ "${ALIEN_JDL_ADDTIMESERIESINMC}" != "0" ]]; then ${O2DPG_ROOT}/MC/bin/o2_dpg_workflow_runner.py -f workflow.json -tt tpctimes fi -[[ "${ALIEN_JDL_ANCHOR_SIM_DISABLE_QC}" != "0" ]] && echo "INFO: QC is disabled, skip it." +[[ ! -z "${DISABLE_QC}" ]] && echo "INFO: QC is disabled, skip it." -if [[ "${ALIEN_JDL_ANCHOR_SIM_DISABLE_QC}" == "0" && "${MCRC}" = "0" && "${remainingargs}" == *"--include-local-qc"* ]] ; then +if [[ -z "${DISABLE_QC}" && "${MCRC}" = "0" && "${remainingargs}" == *"--include-local-qc"* ]] ; then # do QC tasks echo "Doing QC" - ${O2DPG_ROOT}/MC/bin/o2_dpg_workflow_runner.py -f workflow.json --target-labels QC --cpu-limit ${ALIEN_JDL_CPULIMIT} + ${O2DPG_ROOT}/MC/bin/o2_dpg_workflow_runner.py -f workflow.json --target-labels QC --cpu-limit ${ALIEN_JDL_CPULIMIT:-8} -k + # NOTE that with the -k|--keep-going option, the runner will try to keep on executing even if some tasks fail. + # That means, even if there is a failing QC task, the return code will be 0 MCRC=$? fi From cf41e3246dcd6accdda8493135cd276b72479507 Mon Sep 17 00:00:00 2001 From: benedikt-voelkel Date: Fri, 1 Mar 2024 18:15:09 +0100 Subject: [PATCH 49/53] [SimCI] Revise tests (#1517) * standalone AnalysisQC test has been remobved in favor of developments in workflow test * generators * do not blindly test all INI files when run_generator_tests.sh changes * find files correctly when there are other changed files not related to generator testing * workflows revised logic such that * if MC/bin changes --> run anchored, --> check correct creation of workflows implemented by PWGs, --> test AnalysisQC and QC * if MC/analysis_testing or MC-related QC configurations change --> test AnalysisQC and QC, --> test O2DPG AnalysisQC CLI * if anchored-related shell scripts change --> run anchored * relval * no changes Co-authored-by: Benedikt Volkel --- test/common/utils/utils.sh | 34 +++- test/run_analysisqc_tests.sh | 158 +----------------- test/run_generator_tests.sh | 48 +++--- test/run_workflow_tests.sh | 314 +++++++++++++++++------------------ 4 files changed, 207 insertions(+), 347 deletions(-) diff --git a/test/common/utils/utils.sh b/test/common/utils/utils.sh index 0c34c5395..03c01d96f 100644 --- a/test/common/utils/utils.sh +++ b/test/common/utils/utils.sh @@ -4,6 +4,33 @@ # Test utility functionality # +# a global counter for tests +TEST_COUNTER=0 + +# Prepare some colored output +SRED="\033[0;31m" +SGREEN="\033[0;32m" +SYELLOW="\033[0;33m" +SEND="\033[0m" + +echo_green() +{ + echo -e "${SGREEN}${*}${SEND}" +} + + +echo_red() +{ + echo -e "${SRED}${*}${SEND}" +} + + +echo_yellow() +{ + echo -e "${SYELLOW}${*}${SEND}" +} + + remove_artifacts() { [[ "${KEEP_ONLY_LOGS}" == "1" ]] && find . -type f ! -name '*.log' -and ! -name "*serverlog*" -and ! -name "*mergerlog*" -and ! -name "*workerlog*" -delete @@ -25,7 +52,12 @@ get_changed_files() [[ ! -z "$(git diff)" && -z ${ALIBUILD_HEAD_HASH+x} && -z ${O2DPG_TEST_HASH_HEAD+x} ]] && hash_head="" # if there are unstaged changes and no base from user, set to HEAD [[ ! -z "$(git diff)" && -z ${ALIBUILD_HEAD_HASH+x} && -z ${O2DPG_TEST_HASH_BASE+x} ]] && hash_base="HEAD" - git diff --diff-filter=AMR --name-only ${hash_base} ${hash_head} + local paths=$(git diff --diff-filter=AMR --name-only ${hash_base} ${hash_head}) + local absolute_paths= + for p in ${paths} ; do + absolute_paths+="$(realpath ${p}) " + done + echo "${absolute_paths}" } diff --git a/test/run_analysisqc_tests.sh b/test/run_analysisqc_tests.sh index bd57493fd..a4064ba30 100755 --- a/test/run_analysisqc_tests.sh +++ b/test/run_analysisqc_tests.sh @@ -1,160 +1,4 @@ #!/bin/bash -# The test parent dir to be cretaed in current directory -TEST_PARENT_DIR="o2dpg_tests/analysisqc" - -# unified names of log files -LOG_FILE="o2dpg-test-analysisqc.log" - -# Prepare some colored output -SRED="\033[0;31m" -SGREEN="\033[0;32m" -SEND="\033[0m" - - -echo_green() -{ - echo -e "${SGREEN}${*}${SEND}" -} - - -echo_red() -{ - echo -e "${SRED}${*}${SEND}" -} - - -get_git_repo_directory() -{ - local repo= - if [[ -d .git ]] ; then - pwd - else - repo=$(git rev-parse --git-dir 2> /dev/null) - fi - [[ "${repo}" != "" ]] && repo=${repo%%/.git} - echo ${repo} -} - - -test_analysisqc() -{ - echo "### Testing AnalysisQC creation for MC ###" > ${LOG_FILE} - ${O2DPG_ROOT}/MC/analysis_testing/o2dpg_analysis_test_workflow.py -f AO2D.root --is-mc -o wokflow_test_mc.json >> ${LOG_FILE} 2>&1 - local ret=${?} - [[ "${ret}" != "0" ]] && echo "[FATAL]: O2DPG_TEST failed" >> ${LOG_FILE} - echo "### Testing AnalysisQC creation for data ###" >> ${LOG_FILE} - ${O2DPG_ROOT}/MC/analysis_testing/o2dpg_analysis_test_workflow.py -f AO2D.root -o wokflow_test_data.json >> ${LOG_FILE} 2>&1 - local ret_data=${?} - [[ "${ret_data}" != "0" ]] && { echo "[FATAL]: O2DPG_TEST failed" >> ${LOG_FILE} ; ret=${ret_data} ; } - return ${ret} -} - - -print_usage() -{ - echo - echo "usage: run_workflow_tests.sh" - echo - echo " ENVIRONMENT VARIABLES:" - echo - echo " O2DPG_TEST_REPO_DIR : Point to the source repository you want to test." - echo " O2DPG_TEST_HASH_BASE : The base hash you want to use for comparison (optional)" - echo " O2DPG_TEST_HASH_HEAD : The head hash you want to use for comparison (optional)" - echo - echo " If O2DPG_TEST_HASH_BASE is not set, it will be looked for ALIBUILD_BASE_HASH." - echo " If also not set, this will be set to HEAD~1. However, if there are unstaged" - echo " changes, it will be set to HEAD." - echo - echo " If O2DPG_TEST_HASH_HEAD is not set, it will be looked for ALIBUILD_HEAD_HASH." - echo " If also not set, this will be set to HEAD. However, if there are unstaged" - echo " changes, it will left blank." - echo -} - -while [ "$1" != "" ] ; do - case $1 in - --help|-h ) print_usage - exit 1 - ;; - * ) echo "Unknown argument ${1}" - exit 1 - ;; - esac -done - -echo -echo "################################" -echo "# Run O2DPG AnalysisQC testing #" -echo "################################" -echo - -REPO_DIR=${O2DPG_TEST_REPO_DIR:-$(get_git_repo_directory)} -if [[ ! -d ${REPO_DIR}/.git ]] ; then - echo_red "Directory \"${REPO_DIR}\" is not a git repository." - exit 1 -fi - -if [[ -z ${O2DPG_ROOT+x} ]] ; then - echo_red "O2DPG is not loaded, probably other packages are missing as well in this environment." - exit 1 -fi - -# source the utilities -source ${REPO_DIR}/test/common/utils/utils.sh - -# Do the initial steps in the source dir where we have the full git repo -pushd ${REPO_DIR} > /dev/null - -# flag if anything changed for AnalysisQC -need_testing=$(get_changed_files | grep "MC/.*analysis_testing") - -# go back to where we came from -popd > /dev/null -REPO_DIR=$(realpath ${REPO_DIR}) - -# Now, do the trick: -# We just use the source dir since O2DPG's installation is basically just a copy of the whole repo. -# This makes sense in particular for local testing but also in the CI it works in the same way. We could do -# [[ -z {ALIBUILD_HEAD_HASH+x} ]] && export O2DPG_ROOT=${REPO_DIR} -# but let's do the same for both local and CI consistently -export O2DPG_ROOT=${REPO_DIR} - - -############### -# Let's do it # -############### -ret_global=0 -# prepare our local test directory for PWG tests -rm -rf ${TEST_PARENT_DIR} 2>/dev/null -mkdir -p ${TEST_PARENT_DIR} 2>/dev/null -pushd ${TEST_PARENT_DIR} > /dev/null - -# Test what we found -if [[ "${need_testing}" != "" ]] ; then - test_analysisqc - ret_global=${?} -else - echo "Nothing to test" - exit 0 -fi - -# return to where we came from -popd > /dev/null - -# However, if a central test fails, exit code will be !=0 -if [[ "${ret_global}" != "0" ]] ; then - echo - echo "########################" - echo "# ERROR for AnalysisQC #" - echo "########################" - echo - print_error_logs ${TEST_PARENT_DIR} - exit ${ret_global} -fi - -echo -echo_green "AnalysisQC tests successful" -echo - +# for now, obsolete exit 0 diff --git a/test/run_generator_tests.sh b/test/run_generator_tests.sh index d5a4d3c74..79addd3ea 100755 --- a/test/run_generator_tests.sh +++ b/test/run_generator_tests.sh @@ -250,35 +250,26 @@ add_ini_files_from_tests() done } -add_ini_files_from_all_tests() -{ - # Collect also those INI files for which the test has been changed - local all_tests=$(find ${REPO_DIR} -name "*.C" | grep "MC/.*/ini/tests") - local repo_dir_head=${REPO_DIR} - for t in ${all_tests} ; do - local this_test=$(realpath ${t}) - this_test=${this_test##${repo_dir_head}/} - local tc=$(basename ${this_test}) - this_test=${this_test%%/tests/*} - tc=${tc%.C}.ini - tc=${this_test}/${tc} - [[ "${INI_FILES}" == *"${tc}"* ]] && continue - INI_FILES+=" ${tc} " - done -} - collect_ini_files() { # Collect all INI files which have changed - local ini_files=$(get_changed_files | grep ".ini$" | grep "MC/config") - for ini in ${ini_files} ; do + local changed_files=$(get_changed_files) + for ini in ${changed_files} ; do + [[ "${ini}" != *"MC/config"*".ini" ]] && continue [[ "${INI_FILES}" == *"${ini}"* ]] && continue || INI_FILES+=" ${ini} " done # this relies on INI_FILES and MACRO_FILES_POTENTIALLY_INCLUDED # collect all INI files that might include some changed macros - add_ini_files_from_macros $(get_changed_files | grep ".C$" | grep "MC/config") + changed_files=$(get_changed_files) + local macros= + for m in ${changed_files} ; do + [[ "${m}" != *"MC/config"*".C" ]] && continue + macros+=" ${m} " + done + + add_ini_files_from_macros ${macros} # this relies on MACRO_FILES_POTENTIALLY_INCLUDED # collect all INI files that might contain macros which in turn include changed macros @@ -286,7 +277,13 @@ collect_ini_files() add_ini_files_from_macros $(find_including_macros) # also tests might have changed in which case we run them - add_ini_files_from_tests $(get_changed_files | grep ".C$" | grep "MC/.*/ini/tests") + changed_files=$(get_changed_files) + local macros= + for m in ${changed_files} ; do + [[ "${m}" != *"MC/"*"ini/tests"*".C" ]] && continue + macros+=" ${m} " + done + add_ini_files_from_tests ${macros} } @@ -361,12 +358,12 @@ echo REPO_DIR=${O2DPG_TEST_REPO_DIR:-$(get_git_repo_directory)} if [[ ! -d ${REPO_DIR}/.git ]] ; then - echo_red "Directory \"${REPO_DIR}\" is not a git repository." + echo "ERROR: Directory \"${REPO_DIR}\" is not a git repository." exit 1 fi if [[ -z ${O2DPG_ROOT+x} ]] ; then - echo_red "O2DPG is not loaded, probably other packages are missing as well in this environment." + echo "ERROR: O2DPG is not loaded, probably other packages are missing as well in this environment." exit 1 fi @@ -376,11 +373,6 @@ source ${REPO_DIR}/test/common/utils/utils.sh # Do the initial steps in the source dir where we have the full git repo pushd ${REPO_DIR} > /dev/null -# First check, if testing itself has changed. In that case this will add INI files -# for which a test can be found -global_testing_changed=$(get_changed_files | grep -E "common/kine_tests/test_generic_kine.C|run_generator_tests.sh" | grep "^test/") -[[ "${global_testing_changed}" != "" ]] && add_ini_files_from_all_tests - # Then add the ini files that have changed as well. We need to do that so we get information # about missing tests etc. collect_ini_files diff --git a/test/run_workflow_tests.sh b/test/run_workflow_tests.sh index 2acde102e..9962b3293 100755 --- a/test/run_workflow_tests.sh +++ b/test/run_workflow_tests.sh @@ -2,56 +2,30 @@ # The test parent dir to be cretaed in current directory TEST_PARENT_DIR_PWG="o2dpg_tests/workflows_pwgs" -TEST_PARENT_DIR_BIN="o2dpg_tests/workflows_bin" +TEST_PARENT_DIR_BIN="o2dpg_tests/workflows_analysisqc" TEST_PARENT_DIR_ANCHORED="o2dpg_tests/anchored" -# a global counter for tests -TEST_COUNTER=0 - # unified names of log files LOG_FILE_WF="o2dpg-test-wf.log" LOG_FILE_ANCHORED="o2dpg-test-anchored.log" - -# Prepare some colored output -SRED="\033[0;31m" -SGREEN="\033[0;32m" -SEND="\033[0m" - - -echo_green() -{ - echo -e "${SGREEN}${*}${SEND}" -} +LOG_FILE_ANALYSISQC="o2dpg-test_analysisqc.log" -echo_red() -{ - echo -e "${SRED}${*}${SEND}" -} - get_git_repo_directory() { + local look_dir=${1:-$(pwd)} + look_dir=$(realpath "${look_dir}") + look_dir=${look_dir%%/.git} local repo= - if [[ -d .git ]] ; then - pwd - else + ( + cd "${look_dir}" repo=$(git rev-parse --git-dir 2> /dev/null) - fi - [[ "${repo}" != "" ]] && repo=${repo%%/.git} + [[ "${repo}" != "" ]] && { repo=$(realpath "${repo}") ; repo=${repo%%/.git} ; } + ) echo ${repo} } -get_all_workflows() -{ - # Collect also those INI files for which the test has been changed - local repo_dir_head=${REPO_DIR} - local grep_dir=${1} - local all_workflows=$(find ${repo_dir_head} -name "*.sh" | grep "${grep_dir}") - echo ${all_workflows} -} - - test_single_wf() { local wf_script=${1} @@ -59,12 +33,12 @@ test_single_wf() make_wf_creation_script ${wf_script} ${wf_script_local} local has_wf_script_local=${?} echo -n "Test ${TEST_COUNTER}: ${wfs}" - [[ "${has_wf_script_local}" != "0" ]] && { echo "No WF creation in script ${wfs} ##########" ; return 1 ; } + [[ "${has_wf_script_local}" != "0" ]] && { echo -n " (No WF creation in script)" ; echo_red " -> FAILED" ; return 1 ; } # Check if there is an "exit" other than the usual # [ ! "${O2DPG_ROOT}" ] && echo "Error: This needs O2DPG loaded" && exit 1 # like ones. # This is not perfect but might prevent us from running into some checks the WF script does before launching the WF creation - [[ "$(grep exit ${wf_script_local} | grep -v "This needs")" != "" ]] && { echo -e -n "\nFound \"exit\" in ${wfs} so will not test automatically" ; return 0 ; } + [[ "$(grep exit ${wf_script_local} | grep -v "This needs")" != "" ]] && { echo -n " (Found \"exit\" in script, not testing automatically)" ; echo_yellow " -> WARNING" ; return 0 ; } # one single test echo "Test ${wf_line} from ${wfs}" > ${LOG_FILE_WF} bash ${wf_script_local} >> ${LOG_FILE_WF} 2>&1 @@ -72,6 +46,7 @@ test_single_wf() local ret_this_qc=0 local ret_this_analysis=0 if [[ "${ret_this}" != "0" ]] ; then + echo_red " -> FAILED" echo "[FATAL]: O2DPG_TEST Workflow creation failed" >> ${LOG_FILE_WF} elif [[ "${execute}" != "" ]] ; then local memlimit=${O2DPG_TEST_WORKFLOW_MEMLIMIT:+--mem-limit ${O2DPG_TEST_WORKFLOW_MEMLIMIT}} @@ -80,7 +55,9 @@ test_single_wf() [[ "${ret_this}" == "0" ]] && { ${O2DPG_ROOT}/MC/bin/o2_dpg_workflow_runner.py -f workflow.json --cpu-limit 8 --target-labels QC ${memlimit} >> ${LOG_FILE_WF} 2>&1 ; ret_this_qc=${?} ; } [[ "${ret_this}" == "0" ]] && { ${O2DPG_ROOT}/MC/bin/o2_dpg_workflow_runner.py -f workflow.json --cpu-limit 8 --target-labels Analysis ${memlimit} >> ${LOG_FILE_WF} 2>&1 ; ret_this_analysis=${?} ; } ret_this=$((ret_this + ret_this_qc + ret_this_analysis)) - [[ "${ret_this}" != "0" ]] && echo "[FATAL]: O2DPG_TEST Workflow execution failed" >> ${LOG_FILE_WF} + [[ "${ret_this}" != "0" ]] && echo "[FATAL]: O2DPG_TEST Workflow execution failed" >> ${LOG_FILE_WF} || echo_green " -> PASSED" + else + echo_green " -> PASSED" fi return ${ret_this} } @@ -91,8 +68,8 @@ run_workflow_creation() local execute= while [ "$1" != "" ] ; do case $1 in - --execute ) shift - execute=1 + --execute ) execute=1 + shift ;; * ) wf_scripts+="${1} " shift @@ -116,16 +93,34 @@ run_workflow_creation() local ret_this=${?} [[ "${ret_this}" != "0" ]] && RET=${ret_this} popd > /dev/null - if [[ "${ret_this}" != "0" ]] ; then - echo_red " -> FAILED" - else - echo_green " -> PASSED" - fi done return ${RET} } + +test_analysisqc_cli() +{ + ((TEST_COUNTER++)) + local test_dir="${TEST_COUNTER}_analysisqc_cli" + rm -rf ${test_dir} 2> /dev/null + mkdir ${test_dir} + pushd ${test_dir} > /dev/null + echo "### Testing AnalysisQC creation for MC ###" > ${LOG_FILE_ANALYSISQC} + echo -n "Test ${TEST_COUNTER}: Running AnalysisQC CLI" + ${O2DPG_ROOT}/MC/analysis_testing/o2dpg_analysis_test_workflow.py -f AO2D.root --is-mc -o wokflow_test_mc.json >> ${LOG_FILE_ANALYSISQC} 2>&1 + local ret=${?} + [[ "${ret}" != "0" ]] && echo "[FATAL]: O2DPG_TEST failed" >> ${LOG_FILE_ANALYSISQC} + echo "### Testing AnalysisQC creation for data ###" >> ${LOG_FILE_ANALYSISQC} + ${O2DPG_ROOT}/MC/analysis_testing/o2dpg_analysis_test_workflow.py -f AO2D.root -o wokflow_test_data.json >> ${LOG_FILE_ANALYSISQC} 2>&1 + local ret_data=${?} + [[ "${ret_data}" != "0" ]] && { echo "[FATAL]: O2DPG_TEST failed" >> ${LOG_FILE_ANALYSISQC} ; ret=${ret_data} ; } + popd > /dev/null + [[ "${ret}" != "0" ]] && echo_red " -> FAILED" || echo_green " -> PASSED" + return ${ret} +} + + test_anchored() { local to_run="${1:-${O2DPG_ROOT}/MC/run/ANCHOR/tests/test_anchor_2023_apass2_pp.sh}" @@ -140,27 +135,24 @@ test_anchored() echo -n "Test ${TEST_COUNTER}: ${anchored_script}" ${anchored_script} >> ${LOG_FILE_ANCHORED} 2>&1 local ret_this=${?} - [[ "${ret_this}" != "0" ]] && RET=${ret_this} + if [[ "${ret_this}" != "0" ]] ; then + echo_red " -> FAILED" + RET=${ret_this} + else + echo_green " -> PASSED" + fi popd > /dev/null done return ${RET} } -collect_changed_pwg_wf_files() -{ - # Collect all INI files which have changed - local wf_scripts=$(get_changed_files | grep ".sh$" | grep "MC/run") - for wfs in ${wf_scripts} ; do - [[ "${WF_FILES}" == *"${wfs}"* ]] && continue || WF_FILES+=" ${wfs} " - done -} - print_usage() { + echo echo "usage: run_workflow_tests.sh" echo - echo " ENVIRONMENT VARIABLES:" + echo " ENVIRONMENT VARIABLES TO DETERMINE WHAT TO COMPARE:" echo echo " O2DPG_TEST_REPO_DIR : Point to the source repository you want to test." echo " O2DPG_TEST_HASH_BASE : The base hash you want to use for comparison (optional)" @@ -174,10 +166,15 @@ print_usage() echo " If also not set, this will be set to HEAD. However, if there are unstaged" echo " changes, it will left blank." echo + echo " SPECIFIC ENVIRONMENT VARIABLES FOR THIS TEST:" echo " O2DPG_TEST_WORKFLOW_MEMLIMIT : The memory limit that is passed to the workflow runner in case a workflow is executed (optional)" echo } + +############# +# Main part # +############# while [ "$1" != "" ] ; do case $1 in --help|-h ) print_usage @@ -189,47 +186,70 @@ while [ "$1" != "" ] ; do esac done -echo -echo "##############################" -echo "# Run O2DPG workflow testing #" -echo "##############################" -echo - +# determine the repository directory REPO_DIR=${O2DPG_TEST_REPO_DIR:-$(get_git_repo_directory)} if [[ ! -d ${REPO_DIR}/.git ]] ; then - echo_red "Directory \"${REPO_DIR}\" is not a git repository." + echo "ERROR: Directory \"${REPO_DIR}\" is not a git repository." exit 1 fi if [[ -z ${O2DPG_ROOT+x} ]] ; then - echo_red "O2DPG is not loaded, probably other packages are missing as well in this environment." + echo "ERROR: O2DPG is not loaded, probably other packages are missing as well in this environment." exit 1 fi # source the utilities source ${REPO_DIR}/test/common/utils/utils.sh + +echo "##############################" +echo "# Run O2DPG workflow testing #" +echo "##############################" + # Do the initial steps in the source dir where we have the full git repo pushd ${REPO_DIR} > /dev/null # flag if anything changed in the sim workflow bin dir -changed_wf_bin=$(get_changed_files | grep -E "MC/bin") -changed_wf_bin_related=$(get_changed_files | grep -E "MC/analysis_testing|MC/config/analysis_testing/json|MC/config/QC/json") -changed_anchored_related=$(get_changed_files | grep -E "MC/run/ANCHOR/anchorMC.sh|MC/run/ANCHOR/tests|MC/bin|UTILS/parse-async-WorkflowConfig.py") - +changed_sim_bin=$(get_changed_files | grep -E "MC/bin") +# collect if anything has changed related to AnalysisQC +changed_analysis_qc=$(get_changed_files | grep -E "MC/analysis_testing|MC/config/analysis_testing/json|MC/config/QC/json") +# check if anything has changed concerning anchoring +changed_anchored=$(get_changed_files | grep -E "MC/bin|MC/run/ANCHOR/anchorMC.sh|MC/run/ANCHOR/tests|MC/bin|UTILS/parse-async-WorkflowConfig.py|DATA/production/configurations/asyncReco/setenv_extra.sh|DATA/production/configurations/asyncReco/async_pass.sh|DATA/common/setenv.sh|DATA/production/workflow-multiplicities.sh") +# collect changed workflow scripts +changed_workflows= +# workflows to be executed +execute_workflows= +echo "==> Test outline" +if [[ "${changed_sim_bin}" != "" ]] ; then + # in this case, something central has changed, test creation of all workflows against it + echo " - The creation of simulation workflows from all run scripts (MC/run/**/*.sh) will be tested." + for p in $(find MC/run -name "*.sh") ; do + changed_workflows+="$(realpath ${p}) " + done + # definitely run anchored if central python scripts have changed + echo " - Changes in MC/bin/ detected, mark anchored MC test to be run." + changed_anchored="1" +else + # otherwise, only take the changed shell scripts + changed_workflows= + changed_files=$(get_changed_files) + for cf in ${changed_files} ; do + [[ "${cf}" != *"MC/run"*".sh" ]] && continue + changed_workflows+="${cf} " + done + [[ "${changed_workflows}" != "" ]] && echo " - The creation of simulation workflows from changed run scripts (sub-sect of MC/run/**/*.sh) will be tested." +fi -# collect what has changed for PWGs -collect_changed_pwg_wf_files +if [[ "${changed_analysis_qc}" != "" || "${changed_sim_bin}" ]] ; then + for p in $(find "MC/bin/tests" -name "*.sh") ; do + execute_workflows+="$(realpath ${p}) " + done + echo " - Test AnalysisQC CLI and execution with a simulation." +fi -# get realpaths for all changes -wf_files_tmp=${WF_FILES} -WF_FILES= -for wf_tmp in ${wf_files_tmp} ; do - # convert to full path so that we can find it from anywhere - WF_FILES+="$(realpath ${wf_tmp}) " -done +[[ "${changed_anchored}" != "" ]] && echo " - Test anchored simulation." -# go back to where we came from +# everything collected, go back to where we came from popd > /dev/null REPO_DIR=$(realpath ${REPO_DIR}) @@ -241,111 +261,83 @@ REPO_DIR=$(realpath ${REPO_DIR}) export O2DPG_ROOT=${REPO_DIR} -############### -# ANCHORED MC # -############### -# prepare our local test directory for PWG tests -rm -rf ${TEST_PARENT_DIR_ANCHORED} 2>/dev/null -mkdir -p ${TEST_PARENT_DIR_ANCHORED} 2>/dev/null -pushd ${TEST_PARENT_DIR_ANCHORED} > /dev/null - -# global return code for PWGs -ret_global_anchored=0 -if [[ "${changed_anchored_related}" != "" ]] ; then - echo "### Test anchored ###" - # Run an anchored test - test_anchored - ret_global_anchored=${?} - echo -fi - -# return to where we came from -popd > /dev/null - -######## -# PWGs # -######## -# prepare our local test directory for PWG tests -rm -rf ${TEST_PARENT_DIR_PWG} 2>/dev/null -mkdir -p ${TEST_PARENT_DIR_PWG} 2>/dev/null -pushd ${TEST_PARENT_DIR_PWG} > /dev/null - +############################## +# PWG workflow shell scripts # +############################## # global return code for PWGs ret_global_pwg=0 -if [[ "${changed_wf_bin}" != "" ]] ; then - # Run all the PWG related WF creations, hence overwrite what was collected by collect_changed_pwg_wf_files earlier - WF_FILES=$(get_all_workflows "MC/run/.*/") - echo -fi # Test what we found -if [[ "${WF_FILES}" != "" ]] ; then - echo "### Test PWG-related workflow creation ###" +if [[ "${changed_workflows}" != "" ]] ; then + # prepare our local test directory for PWG tests + rm -rf ${TEST_PARENT_DIR_PWG} 2>/dev/null + mkdir -p ${TEST_PARENT_DIR_PWG} 2>/dev/null + pushd ${TEST_PARENT_DIR_PWG} > /dev/null + echo - run_workflow_creation ${WF_FILES} + echo "==> START BLOCK: Test PWG-related workflow creation <==" + run_workflow_creation ${changed_workflows} ret_global_pwg=${?} - echo + [[ "${ret_global_pwg}" != "0" ]] && { echo "WARNING for workflows creations, some could not be built." ; print_error_logs ./ ; } + echo "==> END BLOCK: Test PWG-related workflow creation <==" + + # return to where we came from + popd > /dev/null fi -# return to where we came from -popd > /dev/null -#################### -# sim workflow bin # -#################### +#################################### +# sim workflow bin with AnalysisQC # +#################################### # prepare our local test directory for bin tests -rm -rf ${TEST_PARENT_DIR_BIN} 2>/dev/null -mkdir -p ${TEST_PARENT_DIR_BIN} 2>/dev/null -pushd ${TEST_PARENT_DIR_BIN} > /dev/null - # global return code for PWGs -ret_global_bin=0 -if [[ "${changed_wf_bin}" != "" || "${changed_wf_bin_related}" != "" ]] ; then - echo "### Test bin-related workflow creation ###" +ret_analysis_qc=0 +if [[ "${changed_analysis_qc}" != "" ]] ; then + rm -rf ${TEST_PARENT_DIR_BIN} 2>/dev/null + mkdir -p ${TEST_PARENT_DIR_BIN} 2>/dev/null + pushd ${TEST_PARENT_DIR_BIN} > /dev/null + echo + echo "==> START BLOCK: Test running workflow with AnalysisQC <==" + # test command line interface + test_analysisqc_cli + ret_analysis_qc=${?} # Run all the bin test WF creations - run_workflow_creation $(get_all_workflows "MC/bin/tests") --execute - ret_global_bin=${?} - echo + [[ "${ret_analysis_qc}" == "0" ]] && { run_workflow_creation ${execute_workflows} --execute ; ret_analysis_qc=${?} ; } + [[ "${ret_analysis_qc}" != "0" ]] && { echo "ERROR for workflows execution and AnalysisQC." ; print_error_logs ./ ; } + echo "==> END BLOCK: Test running workflow with AnalysisQC <==" + + # return to where we came from + popd > /dev/null fi -# return to where we came from -popd > /dev/null -# final printing of log files of failed tests -# For PWG workflows, this triggers only a warning at the moment -if [[ "${ret_global_pwg}" != "0" ]] ; then - echo - echo "#####################################" - echo "# WARNING for PWG-related workflows #" - echo "#####################################" - echo - print_error_logs ${TEST_PARENT_DIR_PWG} -fi +############### +# ANCHORED MC # +############### +# global return code for PWGs +ret_global_anchored=0 +if [[ "${changed_anchored}" != "" ]] ; then + # prepare our local test directory for PWG tests + rm -rf ${TEST_PARENT_DIR_ANCHORED} 2>/dev/null + mkdir -p ${TEST_PARENT_DIR_ANCHORED} 2>/dev/null + pushd ${TEST_PARENT_DIR_ANCHORED} > /dev/null -# However, if a central test fails, exit code will be !=0 -if [[ "${ret_global_bin}" != "0" ]] ; then - echo - echo "###################################" - echo "# ERROR for bin-related workflows #" - echo "###################################" echo - print_error_logs ${TEST_PARENT_DIR_BIN} -fi + echo "==> START BLOCK: Test anchored simulation" + # Run an anchored test + test_anchored + ret_global_anchored=${?} + [[ "${ret_global_anchored}" != "0" ]] && { echo "ERROR executing anchored simulation." ; print_error_logs ./ ; } + echo "==> END BLOCK: Test anchored simulation" -# However, if a central test fails, exit code will be !=0 -if [[ "${ret_global_anchored}" != "0" ]] ; then - echo - echo "##########################" - echo "# ERROR for anchored MCs #" - echo "##########################" - echo - print_error_logs ${TEST_PARENT_DIR_ANCHORED} + # return to where we came from + popd > /dev/null fi -RET=$(( ret_global_bin + ret_global_anchored )) +RET=$(( ret_analysis_qc + ret_global_anchored )) echo -[[ "${RET}" != "0" ]] && echo "There were errors, please check!" || echo_green "All required workflow tests successful" +[[ "${RET}" != "0" ]] && echo_red "There were errors, please check!" || echo_green "All required workflow tests successful" exit ${RET} From 77e6a0613fae0c61f33d65d00b316f9bd2f2d54d Mon Sep 17 00:00:00 2001 From: Chiara Zampolli Date: Fri, 1 Mar 2024 14:36:01 +0100 Subject: [PATCH 50/53] Possibility to take QC alone when we split the wf --- .../configurations/asyncReco/async_pass.sh | 46 +++++++++++++++++-- 1 file changed, 42 insertions(+), 4 deletions(-) diff --git a/DATA/production/configurations/asyncReco/async_pass.sh b/DATA/production/configurations/asyncReco/async_pass.sh index bd6e9ce89..8f1e9e653 100755 --- a/DATA/production/configurations/asyncReco/async_pass.sh +++ b/DATA/production/configurations/asyncReco/async_pass.sh @@ -575,11 +575,17 @@ else fi if ([[ -z "$ALIEN_JDL_SSPLITSTEP" ]] && [[ -z "$ALIEN_JDL_SSPLITSTEP" ]]) || [[ "$ALIEN_JDL_SSPLITSTEP" -eq 3 ]] || ( [[ -n $ALIEN_JDL_STARTSPLITSTEP ]] && [[ "$ALIEN_JDL_STARTSPLITSTEP" -le 3 ]]) || [[ "$ALIEN_JDL_SSPLITSTEP" -eq "all" ]]; then - # 3. matching, QC, calib, AOD + # 3. matching, calib, AOD, potentially QC WORKFLOW_PARAMETERS=$WORKFLOW_PARAMETERS_START + if [[ "$ALIEN_JDL_KEEPQCSEPARATE" == "1" ]]; then + echo "QC will be run as last step, removing it from 3rd step" + for i in QC; do + export WORKFLOW_PARAMETERS=$(echo $WORKFLOW_PARAMETERS | sed -e "s/,$i,/,/g" -e "s/^$i,//" -e "s/,$i"'$'"//" -e "s/^$i"'$'"//") + done + fi echo "WORKFLOW_PARAMETERS=$WORKFLOW_PARAMETERS" - echo "Step 3) matching, QC, calib, AOD" - echo -e "\nStep 3) matching, QC, calib, AOD" >> workflowconfig.log + echo "Step 3) matching, calib, AOD, potentially QC" + echo -e "\nStep 3) matching, calib, AOD, potentially QC" >> workflowconfig.log export TIMEFRAME_RATE_LIMIT=0 echo "Removing detectors $DETECTORS_EXCLUDE" READER_DELAY=${ALIEN_JDL_READERDELAY:-30} @@ -607,13 +613,45 @@ else fi fi fi + if [[ "$ALIEN_JDL_KEEPQCSEPARATE" == "1" ]]; then + if ([[ -z "$ALIEN_JDL_SSPLITSTEP" ]] && [[ -z "$ALIEN_JDL_SSPLITSTEP" ]]) || [[ "$ALIEN_JDL_SSPLITSTEP" -eq 4 ]] || ( [[ -n $ALIEN_JDL_STARTSPLITSTEP ]] && [[ "$ALIEN_JDL_STARTSPLITSTEP" -le 4 ]]) || [[ "$ALIEN_JDL_SSPLITSTEP" -eq "all" ]]; then + # 4. QC + WORKFLOW_PARAMETERS="QC" + echo "WORKFLOW_PARAMETERS=$WORKFLOW_PARAMETERS" + echo "Step 4) QC" + echo -e "\nStep 4) QC" >> workflowconfig.log + export TIMEFRAME_RATE_LIMIT=0 + echo "Removing detectors $DETECTORS_EXCLUDE" + env $SETTING_ROOT_OUTPUT IS_SIMULATED_DATA=0 WORKFLOWMODE=print TFDELAY=$TFDELAYSECONDS WORKFLOW_DETECTORS=ALL WORKFLOW_DETECTORS_EXCLUDE=$DETECTORS_EXCLUDE WORKFLOW_DETECTORS_USE_GLOBAL_READER_TRACKS=ALL WORKFLOW_DETECTORS_USE_GLOBAL_READER_CLUSTERS=ALL WORKFLOW_DETECTORS_EXCLUDE_GLOBAL_READER_TRACKS=HMP WORKFLOW_DETECTORS_EXCLUDE_QC=CPV,$DETECTORS_EXCLUDE ./run-workflow-on-inputlist.sh $INPUT_TYPE list.list >> workflowconfig.log + # run it + if [[ "0$RUN_WORKFLOW" != "00" ]]; then + timeStart=`date +%s` + time env $SETTING_ROOT_OUTPUT IS_SIMULATED_DATA=0 WORKFLOWMODE=run TFDELAY=$TFDELAYSECONDS WORKFLOW_DETECTORS=ALL WORKFLOW_DETECTORS_EXCLUDE=$DETECTORS_EXCLUDE WORKFLOW_DETECTORS_USE_GLOBAL_READER_TRACKS=ALL WORKFLOW_DETECTORS_USE_GLOBAL_READER_CLUSTERS=ALL WORKFLOW_DETECTORS_EXCLUDE_GLOBAL_READER_TRACKS=HMP WORKFLOW_DETECTORS_EXCLUDE_QC=CPV,$DETECTORS_EXCLUDE ./run-workflow-on-inputlist.sh $INPUT_TYPE list.list + exitcode=$? + timeEnd=`date +%s` + timeUsed=$(( $timeUsed+$timeEnd-$timeStart )) + delta=$(( $timeEnd-$timeStart )) + echo "Time spent in running the workflow, Step 4 = $delta s" + echo "exitcode = $exitcode" + if [[ $exitcode -ne 0 ]]; then + echo "exit code from Step 4 of processing is " $exitcode > validation_error.message + echo "exit code from Step 4 of processing is " $exitcode + exit $exitcode + fi + mv latest.log latest_reco_4.log + if [[ -f performanceMetrics.json ]]; then + mv performanceMetrics.json performanceMetrics_4.json + fi + fi + fi + fi fi # now extract all performance metrics if [[ $ALIEN_JDL_EXTRACTMETRICS == "1" ]]; then IFS=$'\n' timeStart=`date +%s` - for perfMetricsFiles in performanceMetrics.json performanceMetrics_1.json performanceMetrics_2.json performanceMetrics_3.json ; do + for perfMetricsFiles in performanceMetrics.json performanceMetrics_1.json performanceMetrics_2.json performanceMetrics_3.json performanceMetrics_4.json ; do suffix=`echo $perfMetricsFiles | sed 's/performanceMetrics\(.*\).json/\1/'` if [[ -f "performanceMetrics.json" ]]; then for workflow in `grep ': {' $perfMetricsFiles`; do From c95a0484615cc6970332498d1d437f40ca84c173 Mon Sep 17 00:00:00 2001 From: Jeremy Wilkinson Date: Sat, 2 Mar 2024 10:11:30 +0100 Subject: [PATCH 51/53] Fix converter logic for o2-analysis-v0converter in test workflow (#1493) * fix converter logic for o2v0converter in test workflow * add ft0-corrected-table --- MC/analysis_testing/o2dpg_analysis_test_workflow.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/MC/analysis_testing/o2dpg_analysis_test_workflow.py b/MC/analysis_testing/o2dpg_analysis_test_workflow.py index c50ed6999..de50128ed 100755 --- a/MC/analysis_testing/o2dpg_analysis_test_workflow.py +++ b/MC/analysis_testing/o2dpg_analysis_test_workflow.py @@ -190,8 +190,9 @@ def get_additional_workflows(input_aod): o2_analysis_converters = {"O2collision_001": "o2-analysis-collision-converter --doNotSwap", "O2zdc_001": "o2-analysis-zdc-converter", "O2bc_001": "o2-analysis-bc-converter", - "O2v0_001": "o2-analysis-v0converter", - "O2trackextra_001": "o2-analysis-tracks-extra-converter"} + "O2v0_002": "o2-analysis-v0converter", + "O2trackextra_001": "o2-analysis-tracks-extra-converter", + "O2ft0corrected": "o2-analysis-ft0-corrected-table"} for i in froot.GetListOfKeys(): if "DF_" not in i.GetName(): continue From 3d0840e5094d8ed12cc73b586aa91a6b05970a1e Mon Sep 17 00:00:00 2001 From: benedikt-voelkel Date: Mon, 4 Mar 2024 12:13:49 +0100 Subject: [PATCH 52/53] [SimWF] Use __global_init_task__ more consistently (#1518) * centralise function that creates the task * apply also when using AnalysisQC CLI Co-authored-by: Benedikt Volkel --- .../o2dpg_analysis_test_workflow.py | 8 +++-- MC/bin/o2dpg_sim_workflow.py | 18 ++-------- MC/bin/o2dpg_workflow_utils.py | 33 +++++++++++++++++-- 3 files changed, 39 insertions(+), 20 deletions(-) diff --git a/MC/analysis_testing/o2dpg_analysis_test_workflow.py b/MC/analysis_testing/o2dpg_analysis_test_workflow.py index de50128ed..2bcd2038a 100755 --- a/MC/analysis_testing/o2dpg_analysis_test_workflow.py +++ b/MC/analysis_testing/o2dpg_analysis_test_workflow.py @@ -80,7 +80,7 @@ o2dpg_workflow_utils = importlib.util.module_from_spec(spec) sys.modules[module_name] = o2dpg_workflow_utils spec.loader.exec_module(o2dpg_workflow_utils) -from o2dpg_workflow_utils import createTask, dump_workflow +from o2dpg_workflow_utils import createTask, dump_workflow, createGlobalInitTask module_name = "o2dpg_analysis_test_utils" spec = importlib.util.spec_from_file_location(module_name, join(O2DPG_ROOT, "MC", "analysis_testing", "o2dpg_analysis_test_utils.py")) @@ -322,7 +322,9 @@ def run(args): print("ERROR: QC upload was requested, however in that case a --pass-name and --period-name are required") return 1 - workflow = [] + ### setup global environment variables which are valid for all tasks, set as first task + global_env = {"ALICEO2_CCDB_CONDITION_NOT_AFTER": args.condition_not_after} if args.condition_not_after else None + workflow = [createGlobalInitTask(global_env)] add_analysis_tasks(workflow, args.input_file, expanduser(args.analysis_dir), is_mc=args.is_mc, analyses_only=args.only_analyses, autoset_converters=args.autoset_converters, include_disabled_analyses=args.include_disabled, timeout=args.timeout, collision_system=args.collision_system, add_common_args=args.add_common_args) if args.with_qc_upload: add_analysis_qc_upload_tasks(workflow, args.period_name, args.run_number, args.pass_name) @@ -350,6 +352,8 @@ def main(): parser.add_argument("--timeout", type=int, default=None, help="Timeout for analysis tasks in seconds.") parser.add_argument("--collision-system", dest="collision_system", help="Set the collision system. If not set, tried to be derived from ALIEN_JDL_LPMInterationType. Fallback to pp") parser.add_argument("--add-common-args", dest="add_common_args", nargs="*", help="Pass additional common arguments per analysis, for instance --add-common-args EMCAL-shm-segment-size 2500000000 will add --shm-segment-size 2500000000 to the EMCAL analysis") + parser.add_argument('--condition-not-after', dest="condition_not_after", type=int, help="only consider CCDB objects not created after this timestamp (for TimeMachine)", default=3385078236000) + parser.set_defaults(func=run) args = parser.parse_args() return(args.func(args)) diff --git a/MC/bin/o2dpg_sim_workflow.py b/MC/bin/o2dpg_sim_workflow.py index d0812d42c..31d2ad06a 100755 --- a/MC/bin/o2dpg_sim_workflow.py +++ b/MC/bin/o2dpg_sim_workflow.py @@ -20,7 +20,7 @@ import sys import importlib.util import argparse -from os import environ, mkdir, getcwd +from os import environ, mkdir from os.path import join, dirname, isdir, isabs import random import json @@ -326,20 +326,8 @@ def extractVertexArgs(configKeyValuesStr, finalDiamondDict): workflow['stages'] = [] ### setup global environment variables which are valid for all tasks -globalenv = {} -if args.condition_not_after: - # this is for the time-machine CCDB mechanism - globalenv['ALICEO2_CCDB_CONDITION_NOT_AFTER'] = args.condition_not_after - # this is enforcing the use of local CCDB caching - if environ.get('ALICEO2_CCDB_LOCALCACHE') == None: - print ("ALICEO2_CCDB_LOCALCACHE not set; setting to default " + getcwd() + '/ccdb') - globalenv['ALICEO2_CCDB_LOCALCACHE'] = getcwd() + "/ccdb" - else: - # fixes the workflow to use and remember externally provided path - globalenv['ALICEO2_CCDB_LOCALCACHE'] = environ.get('ALICEO2_CCDB_LOCALCACHE') - globalenv['IGNORE_VALIDITYCHECK_OF_CCDB_LOCALCACHE'] = '${ALICEO2_CCDB_LOCALCACHE:+"ON"}' - -globalinittask = createGlobalInitTask(globalenv) +global_env = {'ALICEO2_CCDB_CONDITION_NOT_AFTER': args.condition_not_after} if args.condition_not_after else None +globalinittask = createGlobalInitTask(global_env) globalinittask['cmd'] = 'o2-ccdb-cleansemaphores -p ${ALICEO2_CCDB_LOCALCACHE}' workflow['stages'].append(globalinittask) #### diff --git a/MC/bin/o2dpg_workflow_utils.py b/MC/bin/o2dpg_workflow_utils.py index 748129de2..18fd600c9 100755 --- a/MC/bin/o2dpg_workflow_utils.py +++ b/MC/bin/o2dpg_workflow_utils.py @@ -1,5 +1,6 @@ #!/usr/bin/env python3 +from os import environ, getcwd from copy import deepcopy import json @@ -84,18 +85,44 @@ def createTask(name='', needs=[], tf=-1, cwd='./', lab=[], cpu=1, relative_cpu=N 'cwd' : cwd } -def createGlobalInitTask(envdict): +def createGlobalInitTask(keys_values=None, set_defaults=True): """Returns a special task that is recognized by the executor as a task whose environment section is to be globally applied to all tasks of a workflow. - envdict: dictionary of environment variables and values to be globally applied to all tasks + Args: + keys_values: dict or None + dictionary of environment variables and values to be globally applied to all tasks + if sharing keys with defaults, keys_values takes precedence + set_defaults: bool + whether or not some default values will be added + + Returns: + dict: task dictionary """ + + # dictionary holding global environment to be passed to task + env_dict = {} + + if set_defaults: + if environ.get('ALICEO2_CCDB_LOCALCACHE') is None: + print ("ALICEO2_CCDB_LOCALCACHE not set; setting to default " + getcwd() + '/ccdb') + env_dict['ALICEO2_CCDB_LOCALCACHE'] = getcwd() + "/ccdb" + else: + # fixes the workflow to use and remember externally provided path + env_dict['ALICEO2_CCDB_LOCALCACHE'] = environ.get('ALICEO2_CCDB_LOCALCACHE') + env_dict['IGNORE_VALIDITYCHECK_OF_CCDB_LOCALCACHE'] = '${ALICEO2_CCDB_LOCALCACHE:+"ON"}' + + if keys_values: + # keys_values takes priority in case of same keys + env_dict |= keys_values + t = createTask(name = '__global_init_task__') t['cmd'] = 'NO-COMMAND' - t['env'] = envdict + t['env'] = env_dict return t + def summary_workflow(workflow): print("=== WORKFLOW SUMMARY ===\n") print(f"-> There are {len(workflow)} tasks") From 826526fdecf78c66359eba4f3fa3ef4c1cdc6973 Mon Sep 17 00:00:00 2001 From: benedikt-voelkel Date: Mon, 4 Mar 2024 15:29:35 +0100 Subject: [PATCH 53/53] Run only analyses, no QCDB upload (#1519) Achieved by changing `-tt Analysis_` to `--target-labels Analysis`. Upload tasks to not have that label, instead they can be triggered with `--target-labels AnalysisUpload` --- MC/run/examples/O2DPG_pp_minbias.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/MC/run/examples/O2DPG_pp_minbias.sh b/MC/run/examples/O2DPG_pp_minbias.sh index be23c9d80..f9b2a99c7 100755 --- a/MC/run/examples/O2DPG_pp_minbias.sh +++ b/MC/run/examples/O2DPG_pp_minbias.sh @@ -58,7 +58,7 @@ fi RETANA=0 if [ "${DOANALYSIS}" != "" ] && [ "${RETMC}" = "0" ]; then # run test analyses if requested - ${O2DPG_ROOT}/MC/bin/o2_dpg_workflow_runner.py -f workflow.json -tt "Analysis_" ${MEMLIMIT} ${CPULIMIT} + ${O2DPG_ROOT}/MC/bin/o2_dpg_workflow_runner.py -f workflow.json --target-labels Analysis ${MEMLIMIT} ${CPULIMIT} RETANA=${?} fi