Skip to content

Commit ba5a233

Browse files
authored
Merge branch 'AliceO2Group:master' into master
2 parents 986c8ba + c359dc3 commit ba5a233

File tree

114 files changed

+2132
-133
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

114 files changed

+2132
-133
lines changed

DATA/common/setenv.sh

Lines changed: 3 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -195,6 +195,7 @@ TRD_SOURCES=
195195
TOF_SOURCES=
196196
HMP_SOURCES=
197197
TRACK_SOURCES=
198+
: ${TRACK_SOURCES_GLO:=}
198199
has_detectors_reco ITS TPC && has_detector_matching ITSTPC && add_comma_separated TRACK_SOURCES "ITS-TPC"
199200
has_detectors_reco TPC TRD && has_detector_matching TPCTRD && { add_comma_separated TRD_SOURCES TPC; add_comma_separated TRACK_SOURCES "TPC-TRD"; }
200201
has_detectors_reco ITS TPC TRD && has_detector_matching ITSTPC && has_detector_matching ITSTPCTRD && { add_comma_separated TRD_SOURCES ITS-TPC; add_comma_separated TRACK_SOURCES "ITS-TPC-TRD"; }
@@ -211,6 +212,8 @@ has_detectors_reco HMP TPC TOF && has_detector_matching TPCTOF && add_comma_sepa
211212
has_detectors_reco HMP TPC TRD TOF && has_detector_matching TPCTRD && has_detector_matching TPCTRDTOF && add_comma_separated HMP_SOURCES "TPC-TRD-TOF"
212213
has_detectors_reco MFT MCH && has_detector_matching MFTMCH && add_comma_separated TRACK_SOURCES "MFT-MCH"
213214
has_detectors_reco MCH MID && has_detector_matching MCHMID && add_comma_separated TRACK_SOURCES "MCH-MID"
215+
[[ "0$TRACK_SOURCES_GLO" == "0" ]] && TRACK_SOURCES_GLO=$TRACK_SOURCES
216+
214217
for det in `echo $LIST_OF_DETECTORS | sed "s/,/ /g"`; do
215218
if [[ $LIST_OF_ASYNC_RECO_STEPS =~ (^| )${det}( |$) ]]; then
216219
has_detector ${det} && has_processing_step ${det}_RECO && add_comma_separated TRACK_SOURCES "$det"
@@ -241,12 +244,6 @@ fi
241244
[[ ! -z $VERTEXING_SOURCES ]] && PVERTEX_CONFIG+=" --vertexing-sources $VERTEXING_SOURCES"
242245
[[ ! -z $VERTEX_TRACK_MATCHING_SOURCES ]] && PVERTEX_CONFIG+=" --vertex-track-matching-sources $VERTEX_TRACK_MATCHING_SOURCES"
243246

244-
if [[ -z ${SVERTEXING_SOURCES:-} ]]; then
245-
SVERTEXING_SOURCES="$VERTEXING_SOURCES"
246-
elif [[ "${SVERTEXING_SOURCES^^}" == "NONE" ]]; then
247-
SVERTEXING_SOURCES=
248-
fi
249-
250247
# this option requires well calibrated timing beween different detectors, at the moment suppress it
251248
#has_detector_reco FT0 && PVERTEX_CONFIG+=" --validate-with-ft0"
252249

GRID/utils/grid_submit.sh

Lines changed: 24 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -208,7 +208,7 @@ while [ $# -gt 0 ] ; do
208208
--cores) CPUCORES=$2; shift 2 ;; # allow to specify the CPU cores (check compatibility with partition !)
209209
--dry) DRYRUN="ON"; shift 1 ;; # do a try run and not actually interact with the GRID (just produce local jdl file)
210210
--o2tag) O2TAG=$2; shift 2 ;; #
211-
--packagespec) PACKAGESPEC=$2; shift 2 ;; # the alisw, cvmfs package list (command separated - example: '"VO_ALICE@FLUKA_VMC::4-1.1-vmc3-1","VO_ALICE@O2::daily-20230628-0200-1"')
211+
--packagespec) PACKAGESPEC=$2; shift 2 ;; # the alisw, cvmfs package list (command separated - example: '"VO_ALICE@FLUKA_VMC::4-1.1-vmc3-1","VO_ALICE@O2::daily-20230628-0200-1"')
212212
--asuser) ASUSER=$2; shift 2 ;; #
213213
--label) JOBLABEL=$2; shift 2 ;; # label identifying the production (e.g. as a production identifier)
214214
--mattermost) MATTERMOSTHOOK=$2; shift 2 ;; # if given, status and metric information about the job will be sent to this hook
@@ -218,7 +218,8 @@ while [ $# -gt 0 ] ; do
218218
--wait) WAITFORALIEN=ON; shift 1 ;; #wait for alien jobs to finish
219219
--wait-any) WAITFORALIENANY=ON; WAITFORALIEN=ON; shift 1 ;; #wait for any good==done alien jobs to return
220220
--outputspec) OUTPUTSPEC=$2; shift 2 ;; #provide comma separate list of JDL file specs to be put as part of JDL Output field (example '"*.log@disk=1","*.root@disk=2"')
221-
-h) Usage ; exit ;;
221+
--split-on-collection) DATACOLLECTION=$2; shift 2 ;; # this will split the jobs on InputDataCollection and "file" mode
222+
-h) Usage ; exit ;;
222223
--help) Usage ; exit ;;
223224
--fetch-output) FETCHOUTPUT=ON; shift 1 ;; # if to fetch all JOB output locally (to make this job as if it ran locally); only works when we block until all JOBS EXIT
224225
*) break ;;
@@ -355,13 +356,21 @@ if [[ "${IS_ALIEN_JOB_SUBMITTER}" ]]; then
355356
cd "${GRID_SUBMIT_WORKDIR}"
356357

357358
QUOT='"'
359+
SPLITMODE="production:1-${PRODSPLIT}"
360+
if [ "${DATACOLLECTION}" ]; then
361+
SPLITMODE="file"
362+
fi
358363
# ---- Generate JDL ----------------
359364
# TODO: Make this configurable or read from a preamble section in the jobfile
360365
cat > "${MY_JOBNAMEDATE}.jdl" <<EOF
361366
Executable = "${MY_BINDIR}/${MY_JOBNAMEDATE}.sh";
362367
Arguments = "${CONTINUE_WORKDIR:+"-c ${CONTINUE_WORKDIR}"} --local ${O2TAG:+--o2tag ${O2TAG}} --ttl ${JOBTTL} --label ${JOBLABEL:-label} --prodsplit ${PRODSPLIT} ${MATTERMOSTHOOK:+--mattermost ${MATTERMOSTHOOK}} ${CONTROLSERVER:+--controlserver ${CONTROLSERVER}}";
363368
InputFile = "LF:${MY_JOBWORKDIR}/alien_jobscript.sh";
364-
${PRODSPLIT:+Split = ${QUOT}production:1-${PRODSPLIT}${QUOT};}
369+
${DATACOLLECTION:+InputDataList = ${QUOT}input.list${QUOT};}
370+
${DATACOLLECTION:+InputDataListFormat = ${QUOT}txt-list${QUOT};}
371+
${DATACOLLECTION:+InputDataCollection = ${QUOT}LF:${MY_JOBWORKDIR}/collection.xml,nodownload${QUOT};}
372+
${PRODSPLIT:+Split = ${QUOT}${SPLITMODE}${QUOT};}
373+
${DATACOLLECTION:+SplitMaxInputFileNumber = 1;}
365374
OutputDir = "${MY_JOBWORKDIR}/${PRODSPLIT:+#alien_counter_03i#}";
366375
Requirements = member(other.GridPartitions,"${GRIDPARTITION:-multicore_8}");
367376
CPUCores = "${CPUCORES}";
@@ -382,6 +391,15 @@ EOF
382391
if [ ! "${DRYRUN}" ]; then
383392
command_file="alien_commands.txt"
384393

394+
if [ "$DATACOLLECTION" ]; then
395+
pok "Preparing data collection XML file"
396+
alien.py toXml $(cat ${DATACOLLECTION}) > collection.xml
397+
if [ "$?" != "0" ]; then
398+
per "Problem with data collection preparation"
399+
exit 1
400+
fi
401+
fi
402+
385403
pok "Preparing job \"$MY_JOBNAMEDATE\""
386404
(
387405
# assemble all GRID interaction in a single script / transaction
@@ -396,6 +414,9 @@ EOF
396414
echo "rm ${MY_BINDIR}/${MY_JOBNAMEDATE}.sh" >> ${command_file} # remove current job script
397415
echo "cp file:${PWD}/${MY_JOBNAMEDATE}.jdl alien://${MY_JOBWORKDIR}/${MY_JOBNAMEDATE}.jdl@DISK=1" >> ${command_file} # copy the jdl
398416
echo "cp file:${THIS_SCRIPT} alien://${MY_BINDIR}/${MY_JOBNAMEDATE}.sh@DISK=1" >> ${command_file} # copy current job script to AliEn
417+
if [ "${DATACOLLECTION}" ]; then
418+
echo "cp file:collection.xml alien://${MY_JOBWORKDIR}/collection.xml" >> ${command_file}
419+
fi
399420
[ ! "${CONTINUE_WORKDIR}" ] && echo "cp file:${MY_JOBSCRIPT} alien://${MY_JOBWORKDIR}/alien_jobscript.sh" >> ${command_file}
400421
) > alienlog.txt 2>&1
401422

MC/bin/o2dpg_sim_workflow.py

Lines changed: 38 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -8,12 +8,12 @@
88
#
99
# Execution examples:
1010
# - pp PYTHIA jets, 2 events, triggered on high pT decay photons on all barrel calorimeters acceptance, eCMS 13 TeV
11-
# ./o2dpg_sim_workflow.py -e TGeant3 -ns 2 -j 8 -tf 1 -mod "--skipModules ZDC" -col pp -eCM 13000 \
11+
# ./o2dpg_sim_workflow.py -e TGeant3 -ns 2 -j 8 -tf 1 -col pp -eCM 13000 \
1212
# -proc "jets" -ptHatBin 3 \
1313
# -trigger "external" -ini "\$O2DPG_ROOT/MC/config/PWGGAJE/ini/trigger_decay_gamma_allcalo_TrigPt3_5.ini"
1414
#
1515
# - pp PYTHIA ccbar events embedded into heavy-ion environment, 2 PYTHIA events into 1 bkg event, beams energy 2.510
16-
# ./o2dpg_sim_workflow.py -e TGeant3 -nb 1 -ns 2 -j 8 -tf 1 -mod "--skipModules ZDC" \
16+
# ./o2dpg_sim_workflow.py -e TGeant3 -nb 1 -ns 2 -j 8 -tf 1 \
1717
# -col pp -eA 2.510 -proc "ccbar" --embedding
1818
#
1919

@@ -108,7 +108,8 @@
108108
parser.add_argument('--force-n-workers', dest='force_n_workers', action='store_true', help='by default, number of workers is re-computed '
109109
'for given interaction rate; '
110110
'pass this to avoid that')
111-
parser.add_argument('-mod',help='Active modules (deprecated)', default='--skipModules ZDC')
111+
parser.add_argument('--skipModules',nargs="*", help="List of modules to skip in geometry budget (and therefore processing)", default=["ZDC"])
112+
parser.add_argument('--skipReadout',nargs="*", help="List of modules to take out from readout", default=[""])
112113
parser.add_argument('--with-ZDC', action='store_true', help='Enable ZDC in workflow')
113114
parser.add_argument('-seed',help='random seed number', default=None)
114115
parser.add_argument('-o',help='output workflow file', default='workflow.json')
@@ -124,6 +125,7 @@
124125
# power features (for playing) --> does not appear in help message
125126
# help='Treat smaller sensors in a single digitization')
126127
parser.add_argument('--pregenCollContext', action='store_true', help=argparse.SUPPRESS) # Now the default, giving this option or not makes not difference. We keep it for backward compatibility
128+
parser.add_argument('--data-anchoring', type=str, default='', help="Take collision contexts (from data) stored in this path")
127129
parser.add_argument('--no-combine-smaller-digi', action='store_true', help=argparse.SUPPRESS)
128130
parser.add_argument('--no-combine-dpl-devices', action='store_true', help=argparse.SUPPRESS)
129131
parser.add_argument('--no-mc-labels', action='store_true', default=False, help=argparse.SUPPRESS)
@@ -256,6 +258,19 @@ def load_external_config(configfile):
256258
print(f"INFO: Written additional config key parameters to JSON {config_key_param_path}")
257259
json.dump(anchorConfig, f, indent=2)
258260

261+
# Processing skipped material budget (modules):
262+
# - If user did NOT specify --with-ZDC
263+
# - AND ZDC is not already in the list
264+
# --> append ZDC automatically
265+
if args.with_ZDC:
266+
# User wants ZDC to *not* be skipped → ensure it's removed
267+
args.skipModules = [m for m in args.skipModules if m != "ZDC"]
268+
else:
269+
# If user did not request --with-ZDC,
270+
# auto-append ZDC unless already present
271+
if "ZDC" not in args.skipModules:
272+
args.skipModules.append("ZDC")
273+
259274
# with this we can tailor the workflow to the presence of
260275
# certain detectors
261276
# these are all detectors that should be assumed active
@@ -266,14 +281,14 @@ def load_external_config(configfile):
266281
# if "all" here, there was in fact nothing in the anchored script, set to what is passed to this script (which it either also "all" or a subset)
267282
activeDetectors = readout_detectors
268283
elif readout_detectors != 'all' and activeDetectors != 'all':
269-
# in this case both are comma-seperated lists. Take intersection
284+
# in this case both are comma-separated lists. Take intersection
270285
r = set(readout_detectors.split(','))
271286
a = set(activeDetectors.split(','))
272287
activeDetectors = ','.join(r & a)
273288
# the last case: simply take what comes from the anchored config
274289

275290
# convert to set/hashmap
276-
activeDetectors = { det:1 for det in activeDetectors.split(',') }
291+
activeDetectors = { det:1 for det in activeDetectors.split(',') if det not in args.skipModules and det not in args.skipReadout}
277292
for det in activeDetectors:
278293
activate_detector(det)
279294

@@ -407,7 +422,9 @@ def extractVertexArgs(configKeyValuesStr, finalDiamondDict):
407422

408423
NTIMEFRAMES=int(args.tf)
409424
NWORKERS=args.n_workers
410-
MODULES = "--skipModules ZDC" if not isActive("ZDC") else ""
425+
426+
427+
SKIPMODULES = " ".join(["--skipModules"] + args.skipModules) if len(args.skipModules) > 0 else ""
411428
SIMENGINE=args.e
412429
BFIELD=args.field
413430
RNDSEED=args.seed # typically the argument should be the jobid, but if we get None the current time is used for the initialisation
@@ -623,7 +640,8 @@ def getDPL_global_options(bigshm=False, ccdbbackend=True, runcommand=True):
623640
+ ' --extract-per-timeframe tf:sgn' \
624641
+ ' --with-vertices ' + vtxmode_precoll \
625642
+ ' --maxCollsPerTF ' + str(args.ns) \
626-
+ ' --orbitsEarly ' + str(args.orbits_early)
643+
+ ' --orbitsEarly ' + str(args.orbits_early) \
644+
+ ('',f" --import-external {args.data_anchoring}")[len(args.data_anchoring) > 0]
627645

628646
PreCollContextTask['cmd'] += ' --bcPatternFile ccdb' # <--- the object should have been set in (local) CCDB
629647
if includeQED:
@@ -714,7 +732,7 @@ def getDPL_global_options(bigshm=False, ccdbbackend=True, runcommand=True):
714732
bkgsimneeds = [BKG_CONFIG_task['name'], GRP_TASK['name'], PreCollContextTask['name']]
715733
BKGtask=createTask(name='bkgsim', lab=["GEANT"], needs=bkgsimneeds, cpu=NWORKERS)
716734
BKGtask['cmd']='${O2_ROOT}/bin/o2-sim -e ' + SIMENGINE + ' -j ' + str(NWORKERS) + ' -n ' + str(NBKGEVENTS) \
717-
+ ' -g ' + str(GENBKG) + ' ' + str(MODULES) + ' -o bkg ' + str(INIBKG) \
735+
+ ' -g ' + str(GENBKG) + ' ' + str(SKIPMODULES) + ' -o bkg ' + str(INIBKG) \
718736
+ ' --field ccdb ' + str(CONFKEYBKG) \
719737
+ ('',' --timestamp ' + str(args.timestamp))[args.timestamp!=-1] + ' --run ' + str(args.run) \
720738
+ ' --vertexMode ' + vtxmode_sgngen \
@@ -945,7 +963,7 @@ def getDPL_global_options(bigshm=False, ccdbbackend=True, runcommand=True):
945963
sgnmem = 6000 if COLTYPE == 'PbPb' else 4000
946964
SGNtask=createTask(name='sgnsim_'+str(tf), needs=signalneeds, tf=tf, cwd='tf'+str(tf), lab=["GEANT"],
947965
relative_cpu=7/8, n_workers=NWORKERS_TF, mem=str(sgnmem))
948-
sgncmdbase = '${O2_ROOT}/bin/o2-sim -e ' + str(SIMENGINE) + ' ' + str(MODULES) + ' -n ' + str(NSIGEVENTS) + ' --seed ' + str(TFSEED) \
966+
sgncmdbase = '${O2_ROOT}/bin/o2-sim -e ' + str(SIMENGINE) + ' ' + str(SKIPMODULES) + ' -n ' + str(NSIGEVENTS) + ' --seed ' + str(TFSEED) \
949967
+ ' --field ccdb -j ' + str(NWORKERS_TF) + ' ' + str(CONFKEY) + ' ' + str(INIFILE) + ' -o ' + signalprefix + ' ' + embeddinto \
950968
+ ' --detectorList ' + args.detectorList \
951969
+ ('', ' --timestamp ' + str(args.timestamp))[args.timestamp!=-1] + ' --run ' + str(args.run)
@@ -1105,6 +1123,7 @@ def putConfigValues(listOfMainKeys=[], localCF = {}, globalTFConfig = True):
11051123
+ ' --onlyDet TPC --TPCuseCCDB --interactionRate ' + str(INTRATE) + ' --tpc-lanes ' + str(NWORKERS_TF) \
11061124
+ ' --incontext ' + str(CONTEXTFILE) + ' --disable-write-ini --early-forward-policy always --forceSelectedDets ' \
11071125
+ ' --tpc-distortion-type ' + str(tpcDistortionType) \
1126+
+ ' --n-threads-distortions 1 ' \
11081127
+ putConfigValues(["TPCGasParam","TPCGEMParam","TPCEleParam","TPCITCorr","TPCDetParam"],
11091128
localCF=tpcLocalCF)
11101129
TPCDigitask['cmd'] += (' --tpc-chunked-writer','')[args.no_tpc_digitchunking]
@@ -1427,7 +1446,7 @@ def getDigiTaskName(det):
14271446
TPCRECOtask['name'],
14281447
ITSTPCMATCHtask['name'],
14291448
TRDTRACKINGtask2['name'] if isActive("TRD") else None]
1430-
toftracksrcdefault = dpl_option_from_config(anchorConfig, 'o2-tof-matcher-workflow', '--track-sources', default_value='TPC,ITS-TPC,TPC-TRD,ITS-TPC-TRD')
1449+
toftracksrcdefault = cleanDetectorInputList(dpl_option_from_config(anchorConfig, 'o2-tof-matcher-workflow', '--track-sources', default_value='TPC,ITS-TPC,TPC-TRD,ITS-TPC-TRD'))
14311450
tofusefit = option_if_available('o2-tof-matcher-workflow', '--use-fit', envfile=async_envfile)
14321451
TOFTPCMATCHERtask = createTask(name='toftpcmatch_'+str(tf), needs=toftpcmatchneeds, tf=tf, cwd=timeframeworkdir, lab=["RECO"], mem='1000')
14331452
tofmatcher_cmd_parts = [
@@ -1495,7 +1514,8 @@ def getDigiTaskName(det):
14951514
MIDRECOtask['cmd'] += task_finalizer(['${O2_ROOT}/bin/o2-mid-reco-workflow',
14961515
getDPL_global_options(),
14971516
putConfigValues(),('',' --disable-mc')[args.no_mc_labels]])
1498-
workflow['stages'].append(MIDRECOtask)
1517+
if isActive('MID'):
1518+
workflow['stages'].append(MIDRECOtask)
14991519

15001520
#<--------- FDD reco workflow
15011521
FDDRECOtask = createTask(name='fddreco_'+str(tf), needs=[getDigiTaskName("FDD")], tf=tf, cwd=timeframeworkdir, lab=["RECO"], mem='1500')
@@ -1583,15 +1603,19 @@ def getDigiTaskName(det):
15831603
getDPL_global_options(ccdbbackend=False),
15841604
putConfigValues(),
15851605
('',' --disable-mc')[args.no_mc_labels]])
1586-
workflow['stages'].append(MCHMIDMATCHtask)
1606+
if isActive("MID") and isActive("MCH"):
1607+
workflow['stages'].append(MCHMIDMATCHtask)
15871608

15881609
#<--------- MFT-MCH forward matching
1589-
MFTMCHMATCHtask = createTask(name='mftmchMatch_'+str(tf), needs=[MCHMIDMATCHtask['name'], MFTRECOtask['name']], tf=tf, cwd=timeframeworkdir, lab=["RECO"], mem='1500')
1610+
forwardmatchneeds = [MCHRECOtask['name'],
1611+
MFTRECOtask['name'],
1612+
MCHMIDMATCHtask['name'] if isActive("MID") else None]
1613+
MFTMCHMATCHtask = createTask(name='mftmchMatch_'+str(tf), needs=forwardmatchneeds, tf=tf, cwd=timeframeworkdir, lab=["RECO"], mem='1500')
15901614
MFTMCHMATCHtask['cmd'] = task_finalizer(
15911615
['${O2_ROOT}/bin/o2-globalfwd-matcher-workflow',
15921616
putConfigValues(['ITSAlpideConfig',
15931617
'MFTAlpideConfig',
1594-
'FwdMatching'],{"FwdMatching.useMIDMatch":"true"}),
1618+
'FwdMatching'],{"FwdMatching.useMIDMatch": "true" if isActive("MID") else "false"}),
15951619
('',' --disable-mc')[args.no_mc_labels]])
15961620

15971621
if args.fwdmatching_assessment_full == True:
Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
[Diamond]
2+
width[2]=6.0
3+
4+
[GeneratorExternal]
5+
fileName=${O2DPG_MC_CONFIG_ROOT}/MC/config/ALICE3/pythia8/generator_pythia8_ALICE3.C
6+
funcName=generator_pythia8_ALICE3()
7+
8+
[GeneratorPythia8]
9+
config=${O2DPG_MC_CONFIG_ROOT}/MC/config/ALICE3/pythia8/generator/pythia8_pp_clr-blc0_13tev.cfg
Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
[Diamond]
2+
width[2]=6.0
3+
4+
[GeneratorExternal]
5+
fileName=${O2DPG_MC_CONFIG_ROOT}/MC/config/ALICE3/pythia8/generator_pythia8_ALICE3.C
6+
funcName=generator_pythia8_ALICE3()
7+
8+
[GeneratorPythia8]
9+
config=${O2DPG_MC_CONFIG_ROOT}/MC/config/ALICE3/pythia8/generator/pythia8_pp_clr-blc2_13tev.cfg
Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
[Diamond]
2+
width[2]=6.0
3+
4+
[GeneratorExternal]
5+
fileName=${O2DPG_MC_CONFIG_ROOT}/MC/config/ALICE3/pythia8/generator_pythia8_ALICE3.C
6+
funcName=generator_pythia8_ALICE3()
7+
8+
[GeneratorPythia8]
9+
config=${O2DPG_MC_CONFIG_ROOT}/MC/config/ALICE3/pythia8/generator/pythia8_pp_clr-blc3_13tev.cfg
Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
int External()
2+
{
3+
std::string path{"o2sim_Kine.root"};
4+
5+
TFile file(path.c_str(), "read");
6+
if (file.IsZombie()) {
7+
std::cerr << "Cannot open ROOT file " << path << std::endl;
8+
return 1;
9+
}
10+
11+
TTree* tree = (TTree*)file.Get("o2sim");
12+
13+
if (!tree) {
14+
std::cerr << "Cannot find tree o2sim in file " << path << "\n";
15+
return 1;
16+
}
17+
18+
return 0;
19+
}
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
pythia8_pp_clr-blc0_13tev.C
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
pythia8_pp_clr-blc0_13tev.C

0 commit comments

Comments
 (0)