9292cd ${WORKING_DIR}
9393
9494# now we submit all the jobs in the background and wait for them to return
95+ declare -A logfiles
96+ declare -A urls
9597for s in ` ls submit* .sh` ; do
9698 echo " submitting ${s} "
9799 export GRID_SUBMIT_WORKDIR=" ${WORKING_DIR} /${s} _workdir"
98100 (
99101 bash ${s} & > log_${s}
100102 echo " Job ${s} returned"
101103 ) &
104+ logfiles[" $s " ]=" log_${s} "
102105done
103106
104- # for for all (GRID) jobs to return
107+ # Next stage is to wait until all jobs are actually running on
108+ # AliEn
109+ waitcounter=0
110+ maxwait=100
111+ while (( ${# logfiles[@]} > 0 && waitcounter < maxwait )) ; do
112+ for script in " ${! logfiles[@]} " ; do
113+ logfile=${logfiles["$script"]}
114+ if grep -q " https://alimonitor.cern.ch/agent/jobs/details.jsp?pid=" " $logfile " 2> /dev/null; then
115+ # Extract URL: strip ANSI codes, find URL, take first match
116+ url=$( sed ' s/\x1B\[[0-9;]*[a-zA-Z]//g' " $logfile " \
117+ | grep -o ' https://alimonitor.cern.ch/agent/jobs/details.jsp?pid=[0-9]*' \
118+ | head -n1)
119+
120+ echo " Job ${script} has AliEn job URL: ${url} "
121+ urls[" $script " ]=${url}
122+ unset logfiles[" $script " ]
123+ fi
124+ done
125+ sleep 1
126+ (( waitcounter++ ))
127+ done
128+
129+ # wait for all (GRID) jobs to return
105130echo " Waiting for jobs to return/finish"
106131wait
107132
@@ -123,7 +148,7 @@ for s in `ls submit*.sh`; do
123148 WORKFLOWS_FOUND=$( alien.py find ${ALIEN_OUTPUT_FOLDER} workflow.json)
124149
125150 if [[ -z ${WORKFLOWS_FOUND} || -z ${AODS_FOUND} ]]; then
126- echo " ❌ Missing files for case $s "
151+ echo " ❌ Missing files for case $s : Check here for logs ${urls[${s}]} "
127152 FINAL_SUCCESS=1 # mark as failure
128153 else
129154 echo " ✅ Files found in $s "
0 commit comments