Skip to content

Commit

Permalink
Merge branch 'AliceO2Group:master' into master
Browse files Browse the repository at this point in the history
  • Loading branch information
nasirmehdimalik authored Mar 9, 2024
2 parents f9cba95 + 1cde53e commit fa1a669
Show file tree
Hide file tree
Showing 67 changed files with 1,083 additions and 518 deletions.
17 changes: 17 additions & 0 deletions .github/workflows/async-auto-label.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
---
name: Apply requested async label

'on':
issue_comment:
types:
- created
- edited

permissions: {}

jobs:
apply_async_labels:
name: Apply requested async label
uses: alisw/ali-bot/.github/workflows/async-auto-label.yml@master
permissions:
pull-requests: write # to update labels
19 changes: 19 additions & 0 deletions .github/workflows/async-list-label.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
---
name: Collect and print async labels

'on':
pull_request_target:
types:
- opened
- reopened
branches:
- master

permissions: {}

jobs:
list_async_labels:
name: Collect and print async labels
uses: alisw/ali-bot/.github/workflows/async-list-label.yml@master
permissions:
pull-requests: write # to update labels
37 changes: 0 additions & 37 deletions .github/workflows/check-json-syntax.yml

This file was deleted.

95 changes: 95 additions & 0 deletions .github/workflows/syntax-checks.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,95 @@
---
name: Validate syntax

'on':
- push
- pull_request

permissions: {}

jobs:
json-syntax:
name: JSON
runs-on: ubuntu-latest

steps:
- name: Checkout code
uses: actions/checkout@v4

- name: Validate syntax for JSON files
run: |
error=0
readarray -d '' json_files < \
<(find . \( -path ./.git -or -path ./DATA/testing/private \) -prune -false -or -type f -name '*.json' -print0)
for jsonf in "${json_files[@]}"; do
echo "::debug::Checking $jsonf..."
if ! errmsg=$(jq . "$jsonf" 2>&1 >/dev/null); then
error=1
echo "Invalid JSON syntax found in $jsonf:" >&2
printf '::error file=%s,title=%s::%s\n' "$jsonf" 'Invalid JSON syntax' "$errmsg"
fi
done
exit "$error"
bash-syntax:
name: Bash
runs-on: ubuntu-latest

steps:
- name: Checkout code
uses: actions/checkout@v4

- name: Validate syntax with bash -n
run: |
error=0
readarray -d '' files < \
<(find . -path ./.git -prune -false -or -type f -name '*.sh' -print0)
for bashf in "${files[@]}"; do
echo "::debug::Checking $bashf..."
if ! errmsg=$(bash -n "$bashf" 2>&1 >/dev/null); then
error=1
echo "Invalid Bash syntax found in $bashf:" >&2
printf '::error file=%s,title=%s::%s\n' "$bashf" 'Invalid syntax' "$errmsg"
fi
done
exit "$error"
shellcheck:
name: Shellcheck
runs-on: ubuntu-latest

steps:
- name: Checkout code
uses: actions/checkout@v4

- name: Run Shellcheck to find errors
run: |
error=0
find . -path ./.git -prune -false -or -type f -name '*.sh' -print0 |
xargs -0 shellcheck -xf json1 -S error -s bash > errors.json || error=$?
# Produce code annotations in GitHub's format.
jq -r '.comments[] | "Error found in \(.file) line \(.line):\n::error file=\(.file),line=\(.line),endLine=\(.endLine),col=\(.column),endColumn=\(.endColumn)::\(.message)"' errors.json
exit "$error"
pylint:
name: Pylint
runs-on: ubuntu-latest

steps:
- name: Checkout code
uses: actions/checkout@v4

- name: Install prerequisites
run: |
sudo apt update -y
sudo apt install -y pylint
- name: Run Pylint to find errors
run: |
error=0
find . -path ./.git -prune -false -or -type f -name '*.py' -print0 |
# "import-errors" are shown for valid modules like ROOT, so ignore them.
xargs -0 pylint -E -f json --disable import-error > errors.json || error=$?
# Produce code annotations in GitHub's format.
jq -r '.[] | "Error found in \(.path) line \(.line):\n::error file=\(.path),line=\(.line),endLine=\(.endLine),col=\(.column),endColumn=\(.endColumn),title=Pylint \(.type) \(.symbol)::\(.message)"' errors.json
exit "$error"
4 changes: 2 additions & 2 deletions CODEOWNERS
Validating CODEOWNERS rules …
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,9 @@
/DATA/testing/detectors/FV0
/DATA/testing/detectors/HMP
/DATA/testing/detectors/ITS
/DATA/testing/detectors/MCH @aphecethce
/DATA/testing/detectors/MCH @aphecetche
/DATA/testing/detectors/MFT
/DATA/testing/detectors/MID @aphecethce
/DATA/testing/detectors/MID @aphecetche
/DATA/testing/detectors/PHS
/DATA/testing/detectors/TOF @noferini @chiarazampolli
/DATA/testing/detectors/TPC @wiechula
Expand Down
14 changes: 7 additions & 7 deletions DATA/common/gen_topo_helper_functions.sh
Original file line number Diff line number Diff line change
Expand Up @@ -104,37 +104,37 @@ _check_multiple()

has_detectors()
{
_check_multiple has_detector $@
_check_multiple has_detector "$@"
}

has_detectors_qc()
{
_check_multiple has_detector_qc $@
_check_multiple has_detector_qc "$@"
}

has_detectors_calib()
{
_check_multiple has_detector_calib $@
_check_multiple has_detector_calib "$@"
}

has_detectors_reco()
{
_check_multiple has_detector_reco $@
_check_multiple has_detector_reco "$@"
}

has_detectors_ctf()
{
_check_multiple has_detector_ctf $@
_check_multiple has_detector_ctf "$@"
}

has_detectors_flp_processing()
{
_check_multiple has_detector_flp_processing $@
_check_multiple has_detector_flp_processing "$@"
}

workflow_has_parameters()
{
_check_multiple workflow_has_parameter $@
_check_multiple workflow_has_parameter "$@"
}

add_comma_separated()
Expand Down
13 changes: 11 additions & 2 deletions DATA/common/setenv.sh
Original file line number Diff line number Diff line change
Expand Up @@ -104,12 +104,14 @@ if [[ -z "${RAWINPUTDIR:-}" ]]; then export RAWINPUTDIR=$FILEWORKDIR; fi #
if [[ -z "${EPNSYNCMODE:-}" ]]; then export EPNSYNCMODE=0; fi # Is this workflow supposed to run on EPN for sync processing? Will enable InfoLogger / metrics / fetching QC JSONs from consul...
if [[ -z "${BEAMTYPE:-}" ]]; then export BEAMTYPE=PbPb; fi # Beam type, must be PbPb, pp, pPb, cosmic, technical
if [[ -z "${RUNTYPE:-}" ]]; then export RUNTYPE=Standalone; fi # Run Type, standalone for local tests, otherwise PHYSICS, COSMICS, TECHNICAL, SYNTHETIC
if [[ $RUNTYPE == "SYNTHETIC" ]]; then export IS_SIMULATED_DATA=1; fi # For SYNTHETIC runs we always process simulated data
if [[ -z "${IS_SIMULATED_DATA:-}" ]]; then export IS_SIMULATED_DATA=1; fi # processing simulated data
if [[ -z "${IS_TRIGGERED_DATA:-}" ]]; then export IS_TRIGGERED_DATA=0; fi # processing triggered data (TPC triggered instead of continuous)
if [[ -z "${CTF_DIR:-}" ]]; then CTF_DIR=$FILEWORKDIR; fi # Directory where to store CTFs
if [[ -z "${CALIB_DIR:-}" ]]; then CALIB_DIR="/dev/null"; fi # Directory where to store output from calibration workflows, /dev/null : skip their writing
if [[ -z "${EPN2EOS_METAFILES_DIR:-}" ]]; then EPN2EOS_METAFILES_DIR="/dev/null"; fi # Directory where to store epn2eos files metada, /dev/null : skip their writing
if [[ -z "${TPC_CORR_SCALING:-}" ]]; then export TPC_CORR_SCALING=""; fi # TPC corr.map lumi scaling options, any combination of --lumi-type <0,1,2> --corrmap-lumi-mode <0,1> and TPCCorrMap... configurable param
if [[ -z "${DCSCCDBSERVER:-}" ]]; then export DCSCCDBSERVER="http://alio2-cr1-flp199-ib:8083"; fi # server for transvering calibration data to DCS

if [[ $EPNSYNCMODE == 0 ]]; then
if [[ -z "${SHMSIZE:-}" ]]; then export SHMSIZE=$(( 8 << 30 )); fi # Size of shared memory for messages
if [[ -z "${NGPUS:-}" ]]; then export NGPUS=1; fi # Number of GPUs to use, data distributed round-robin
Expand All @@ -134,7 +136,7 @@ else # Defaults when running on the EPN
if [[ -z "${SHMTHROW:-}" ]]; then export SHMTHROW=0; fi
if [[ -z "${TIMEFRAME_SHM_LIMIT:-}" ]]; then export TIMEFRAME_SHM_LIMIT=$(( $SHMSIZE / 2 )); fi
if [[ -z "${EDJSONS_DIR:-}" ]]; then export EDJSONS_DIR="/scratch/services/ed/jsons_${RUNTYPE}"; fi
if [[ -z "${WORKFLOW_DETECTORS_FLP_PROCESSING+x}" ]]; then export WORKFLOW_DETECTORS_FLP_PROCESSING="TOF,CTP"; fi # Current default in sync processing is that FLP processing is only enabled for TOF
if [[ -z "${WORKFLOW_DETECTORS_FLP_PROCESSING+x}" ]]; then export WORKFLOW_DETECTORS_FLP_PROCESSING="CTP"; fi # Current default in sync processing is that FLP processing is only enabled for TOF
if [[ -z "${GEN_TOPO_AUTOSCALE_PROCESSES:-}" ]]; then export GEN_TOPO_AUTOSCALE_PROCESSES=1; fi # On the EPN we should make sure to always use the node to the full extent
fi
# Some more options for running on the EPN
Expand Down Expand Up @@ -163,6 +165,13 @@ DISABLE_ROOT_INPUT="--disable-root-input"
: ${DISABLE_DIGIT_CLUSTER_INPUT="--clusters-from-upstream"}

# Special detector related settings
if [[ -z "${TPC_CORR_SCALING:-}" ]]; then # TPC corr.map lumi scaling options, any combination of --lumi-type <0,1,2> --corrmap-lumi-mode <0,1> and TPCCorrMap... configurable param
TPC_CORR_SCALING=
if ( [[ $BEAMTYPE == "pp" ]] || [[ $BEAMTYPE == "PbPb" ]] ) && has_detector CTP; then TPC_CORR_SCALING+="--lumi-type 1 TPCCorrMap.lumiInstFactor=2.414"; fi
if [[ $BEAMTYPE == "cosmic" ]]; then TPC_CORR_SCALING=" TPCCorrMap.lumiMean=-1;"; fi # for COSMICS we disable all corrections
export TPC_CORR_SCALING=$TPC_CORR_SCALING
fi

MID_FEEID_MAP="$FILEWORKDIR/mid-feeId_mapper.txt"

ITSMFT_STROBES=""
Expand Down
49 changes: 40 additions & 9 deletions DATA/common/setenv_calib.sh
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,10 @@ SOURCE_GUARD_SETENV_CALIB=1

# define the conditions for each calibration
if has_detector_calib ITS && has_detectors_reco ITS && has_detector_matching PRIMVTX && [[ ! -z "$VERTEXING_SOURCES" ]]; then CAN_DO_CALIB_PRIMVTX_MEANVTX=1; else CAN_DO_CALIB_PRIMVTX_MEANVTX=0; fi
if has_detector_calib ITS ; then CAN_DO_CALIB_ITS_DEADMAP_TIME=1; else CAN_DO_CALIB_ITS_DEADMAP_TIME=0; fi
if has_detector_calib MFT ; then CAN_DO_CALIB_MFT_DEADMAP_TIME=1; else CAN_DO_CALIB_MFT_DEADMAP_TIME=0; fi
if has_detector_calib TOF && has_detector_reco TOF; then CAN_DO_CALIB_TOF_DIAGNOSTICS=1; CAN_DO_CALIB_TOF_INTEGRATEDCURR=1; else CAN_DO_CALIB_TOF_DIAGNOSTICS=0; CAN_DO_CALIB_TOF_INTEGRATEDCURR=0; fi
if has_detector_calib TOF && has_detector_reco TOF && (( has_detectors_reco ITS TPC && has_detector_matching ITSTPCTOF ) || ( has_detectors_reco ITS TPC TRD && has_detector_matching ITSTPCTRDTOF )); then CAN_DO_CALIB_TOF_LHCPHASE=1; CAN_DO_CALIB_TOF_CHANNELOFFSETS=1; else CAN_DO_CALIB_TOF_LHCPHASE=0; CAN_DO_CALIB_TOF_CHANNELOFFSETS=0; fi
if has_detector_calib TOF && has_detector_reco TOF && ( ( has_detectors_reco ITS TPC && has_detector_matching ITSTPCTOF ) || ( has_detectors_reco ITS TPC TRD && has_detector_matching ITSTPCTRDTOF ) ); then CAN_DO_CALIB_TOF_LHCPHASE=1; CAN_DO_CALIB_TOF_CHANNELOFFSETS=1; else CAN_DO_CALIB_TOF_LHCPHASE=0; CAN_DO_CALIB_TOF_CHANNELOFFSETS=0; fi
if has_detector_calib TPC && has_detectors ITS TPC TOF TRD && has_detector_matching ITSTPCTRDTOF; then CAN_DO_CALIB_TPC_SCDCALIB=1; else CAN_DO_CALIB_TPC_SCDCALIB=0; fi
if has_detector_calib TPC && has_processing_step TPC_DEDX; then CAN_DO_CALIB_TPC_TIMEGAIN=1; CAN_DO_CALIB_TPC_RESPADGAIN=1; else CAN_DO_CALIB_TPC_TIMEGAIN=0; CAN_DO_CALIB_TPC_RESPADGAIN=0; fi
if has_detector_calib TPC && has_detectors ITS TPC && has_detector_matching ITSTPC; then CAN_DO_CALIB_TPC_VDRIFTTGL=1; else CAN_DO_CALIB_TPC_VDRIFTTGL=0; fi
Expand All @@ -36,8 +38,9 @@ if [[ $SYNCMODE != 1 ]] && has_detector_reco TPC && has_detector_reco ITS && has
# additional individual settings for calibration workflows
if has_detector CTP; then export CALIB_TPC_SCDCALIB_CTP_INPUT="--enable-ctp"; else export CALIB_TPC_SCDCALIB_CTP_INPUT=""; fi
if [[ ${DISABLE_TRD_PH:-} == 1 ]]; then CAN_DO_CALIB_TRD_T0=0; fi
# the slot length needs to be known both on the aggregator and the processing nodes, therefore it is defined (in seconds!) here
: ${CALIB_TPC_SCDCALIB_SLOTLENGTH:=600}

: ${CALIB_TPC_SCDCALIB_SLOTLENGTH:=600} # the slot length needs to be known both on the aggregator and the processing nodes, therefore it is defined (in seconds!) here
: ${CALIB_TPC_SCDCALIB_SENDTRKDATA:=1} # by default, we want to write the track information in addition to unbinned residuals to allow finer filtering offline

if [[ $BEAMTYPE != "cosmic" ]] || [[ ${FORCECALIBRATIONS:-} == 1 ]] ; then

Expand All @@ -47,7 +50,17 @@ if [[ $BEAMTYPE != "cosmic" ]] || [[ ${FORCECALIBRATIONS:-} == 1 ]] ; then
if [[ $CAN_DO_CALIB_PRIMVTX_MEANVTX == 1 ]]; then
if [[ -z ${CALIB_PRIMVTX_MEANVTX+x} ]]; then CALIB_PRIMVTX_MEANVTX=1; fi
fi


# calibrations for ITS
if [[ $CAN_DO_CALIB_ITS_DEADMAP_TIME == 1 ]]; then
if [[ -z ${CALIB_ITS_DEADMAP_TIME+x} ]]; then CALIB_ITS_DEADMAP_TIME=1; fi
fi

# calibrations for MFT
if [[ $CAN_DO_CALIB_MFT_DEADMAP_TIME == 1 ]]; then
if [[ -z ${CALIB_MFT_DEADMAP_TIME+x} ]]; then CALIB_MFT_DEADMAP_TIME=1; fi
fi

# calibrations for TOF
if [[ $CAN_DO_CALIB_TOF_DIAGNOSTICS == 1 ]]; then
if [[ -z ${CALIB_TOF_DIAGNOSTICS+x} ]]; then CALIB_TOF_DIAGNOSTICS=1; fi
Expand Down Expand Up @@ -75,15 +88,25 @@ if [[ $BEAMTYPE != "cosmic" ]] || [[ ${FORCECALIBRATIONS:-} == 1 ]] ; then
if [[ $CAN_DO_CALIB_TPC_VDRIFTTGL == 1 ]]; then
if [[ -z ${CALIB_TPC_VDRIFTTGL+x} ]]; then CALIB_TPC_VDRIFTTGL=1; fi
fi
# IDCs
# IDCs (by default we enable it for running the synch. reco on the EPNs, but not on staging since we have only 1 calibration node available)
if [[ $CAN_DO_CALIB_TPC_IDC == 1 ]]; then
if [[ -z ${CALIB_TPC_IDC+x} ]] || [[ $CALIB_TPC_IDC == 0 ]]; then
CALIB_TPC_IDC=0; # default is off
if [[ -z ${CALIB_TPC_IDC+x} ]]; then
if [[ $EPNSYNCMODE == 1 ]] && [[ "${GEN_TOPO_DEPLOYMENT_TYPE:-}" != "ALICE_STAGING" ]]; then
CALIB_TPC_IDC=1;
else
CALIB_TPC_IDC=0;
fi
fi
fi
# SAC
# SAC (by default we enable it for running the synch. reco on the EPNs)
if [[ $CAN_DO_CALIB_TPC_SAC == 1 ]]; then
if [[ -z ${CALIB_TPC_SAC+x} ]]; then CALIB_TPC_SAC=0; fi # default is off
if [[ -z ${CALIB_TPC_SAC+x} ]]; then
if [[ $EPNSYNCMODE == 1 ]]; then
CALIB_TPC_SAC=1;
else
CALIB_TPC_SAC=0;
fi
fi
fi

# calibrations for TRD
Expand Down Expand Up @@ -174,6 +197,8 @@ fi
( [[ -z ${CALIB_PHS_L1PHASE:-} ]] || [[ $CAN_DO_CALIB_PHS_L1PHASE == 0 ]] ) && CALIB_PHS_L1PHASE=0
( [[ -z ${CALIB_CPV_GAIN:-} ]] || [[ $CAN_DO_CALIB_CPV_GAIN == 0 ]] ) && CALIB_CPV_GAIN=0
( [[ -z ${CALIB_ZDC_TDC:-} ]] || [[ $CAN_DO_CALIB_ZDC_TDC == 0 ]] ) && CALIB_ZDC_TDC=0
( [[ -z ${CALIB_ITS_DEADMAP_TIME:-} ]] || [[ $CAN_DO_CALIB_ITS_DEADMAP_TIME == 0 ]] ) && CALIB_ITS_DEADMAP_TIME=0
( [[ -z ${CALIB_MFT_DEADMAP_TIME:-} ]] || [[ $CAN_DO_CALIB_MFT_DEADMAP_TIME == 0 ]] ) && CALIB_MFT_DEADMAP_TIME=0
# for async:
( [[ -z ${CALIB_EMC_ASYNC_RECALIB:-} ]] || [[ $CAN_DO_CALIB_EMC_ASYNC_RECALIB == 0 ]] ) && CALIB_EMC_ASYNC_RECALIB=0
( [[ -z ${CALIB_ASYNC_EXTRACTTPCCURRENTS:-} ]] || [[ $CAN_DO_CALIB_ASYNC_EXTRACTTPCCURRENTS == 0 ]] ) && CALIB_ASYNC_EXTRACTTPCCURRENTS=0
Expand Down Expand Up @@ -217,6 +242,12 @@ if [[ -z ${CALIBDATASPEC_BARREL_TF:-} ]]; then
# prim vtx
if [[ $CALIB_PRIMVTX_MEANVTX == 1 ]]; then add_semicolon_separated CALIBDATASPEC_BARREL_TF "pvtx:GLO/PVTX/0"; fi

# ITS
if [[ $CALIB_ITS_DEADMAP_TIME == 1 ]]; then add_semicolon_separated CALIBDATASPEC_BARREL_TF "itsChipStatus:ITS/CHIPSSTATUS/0"; fi

# MFT
if [[ $CALIB_MFT_DEADMAP_TIME == 1 ]]; then add_semicolon_separated CALIBDATASPEC_BARREL_TF "mftChipStatus:MFT/CHIPSSTATUS/0"; fi

# TOF
if [[ $CALIB_TOF_LHCPHASE == 1 ]] || [[ $CALIB_TOF_CHANNELOFFSETS == 1 ]]; then add_semicolon_separated CALIBDATASPEC_BARREL_TF "calibTOF:TOF/CALIBDATA/0"; fi
if [[ $CALIB_TOF_DIAGNOSTICS == 1 ]]; then add_semicolon_separated CALIBDATASPEC_BARREL_TF "diagWords:TOF/DIAFREQ/0"; fi
Expand Down
2 changes: 1 addition & 1 deletion DATA/production/calib/hmp-pedestals-processing.sh
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ fi
SPEC_PARAM=""
if [ $HMP_NODCSCCDB_REC == 'false' ];
then
SPEC_PARAM+="--use-dcsccdb --dcsccdb-uri 'http://alio2-cr1-flp199.cern.ch:8083' --dcsccdb-alivehours 3 "
SPEC_PARAM+="--use-dcsccdb --dcsccdb-uri $DCSCCDBSERVER --dcsccdb-alivehours 3 "
fi
if [ $HMP_CCDB_REC == 'true' ];
then
Expand Down
2 changes: 1 addition & 1 deletion DATA/production/calib/its-noise-aggregator.sh
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ fi
if [[ -z $NTHREADS ]] ; then NTHREADS=1; fi

CCDBPATH1="http://o2-ccdb.internal"
CCDBPATH2="http://alio2-cr1-flp199.cern.ch:8083"
CCDBPATH2="$DCSCCDBSERVER"
if [[ $RUNTYPE == "SYNTHETIC" || "${GEN_TOPO_DEPLOYMENT_TYPE:-}" == "ALICE_STAGING" || ! -z $ISTEST ]]; then
CCDBPATH1="http://ccdb-test.cern.ch:8080"
CCDBPATH2="http://ccdb-test.cern.ch:8080"
Expand Down
2 changes: 1 addition & 1 deletion DATA/production/calib/its-threshold-aggregator.sh
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ PROXY_INSPEC="tunestring:ITS/TSTR;runtype:ITS/RUNT;fittype:ITS/FITT;scantype:ITS
CCDBPATH1=""
CCDBPATH2=""
if [ $RUNTYPE_ITS == "tuning" ] || [ $RUNTYPE_ITS == "digital" ] || [ $RUNTYPE_ITS == "tuningbb" ]; then
CCDBPATH1="http://alio2-cr1-flp199.cern.ch:8083"
CCDBPATH1="$DCSCCDBSERVER"
CCDBPATH2="http://o2-ccdb.internal"
else
CCDBPATH1="http://o2-ccdb.internal"
Expand Down
2 changes: 1 addition & 1 deletion DATA/production/calib/mch-badchannel-aggregator.sh
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ BADCHANNEL_CONFIG="${ARGS_ALL_CONFIG};MCHBadChannelCalibratorParam.maxPed=${MCH_
WORKFLOW="o2-dpl-raw-proxy $ARGS_ALL --proxy-name mch-badchannel-input-proxy --dataspec \"$PROXY_INSPEC\" --network-interface ib0 --channel-config \"name=mch-badchannel-input-proxy,method=bind,type=pull,rateLogging=0,transport=zeromq\" | "
WORKFLOW+="o2-calibration-mch-badchannel-calib-workflow $ARGS_ALL --configKeyValues \"$BADCHANNEL_CONFIG\" | "
WORKFLOW+="o2-calibration-ccdb-populator-workflow $ARGS_ALL --configKeyValues \"$ARGS_ALL_CONFIG\" --ccdb-path=\"http://o2-ccdb.internal\" --sspec-min 0 --sspec-max 0 | "
WORKFLOW+="o2-calibration-ccdb-populator-workflow $ARGS_ALL --configKeyValues \"$ARGS_ALL_CONFIG\" --ccdb-path=\"http://ali-calib-dcs.cern.ch:8083\" --sspec-min 1 --sspec-max 1 --name-extention dcs | "
WORKFLOW+="o2-calibration-ccdb-populator-workflow $ARGS_ALL --configKeyValues \"$ARGS_ALL_CONFIG\" --ccdb-path=\"$DCSCCDBSERVER\" --sspec-min 1 --sspec-max 1 --name-extention dcs | "
add_QC_from_consul "/o2/components/qc/ANY/any/mch-badchannel" ""
WORKFLOW+="o2-dpl-run $ARGS_ALL $GLOBALDPLOPT"

Expand Down
Loading

0 comments on commit fa1a669

Please sign in to comment.