Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
29 changes: 27 additions & 2 deletions MC/run/ANCHOR/tests/test_looper.sh
Original file line number Diff line number Diff line change
Expand Up @@ -92,16 +92,41 @@ done
cd ${WORKING_DIR}

# now we submit all the jobs in the background and wait for them to return
declare -A logfiles
declare -A urls
for s in `ls submit*.sh`; do
echo "submitting ${s}"
export GRID_SUBMIT_WORKDIR="${WORKING_DIR}/${s}_workdir"
(
bash ${s} &> log_${s}
echo "Job ${s} returned"
) &
logfiles["$s"]="log_${s}"
done

# for for all (GRID) jobs to return
# Next stage is to wait until all jobs are actually running on
# AliEn
waitcounter=0
maxwait=100
while (( ${#logfiles[@]} > 0 && waitcounter < maxwait )); do
for script in "${!logfiles[@]}"; do
logfile=${logfiles["$script"]}
if grep -q "https://alimonitor.cern.ch/agent/jobs/details.jsp?pid=" "$logfile" 2>/dev/null; then
# Extract URL: strip ANSI codes, find URL, take first match
url=$(sed 's/\x1B\[[0-9;]*[a-zA-Z]//g' "$logfile" \
| grep -o 'https://alimonitor.cern.ch/agent/jobs/details.jsp?pid=[0-9]*' \
| head -n1)

echo "Job ${script} has AliEn job URL: ${url}"
urls["$script"]=${url}
unset logfiles["$script"]
fi
done
sleep 1
((waitcounter++))
done

# wait for all (GRID) jobs to return
echo "Waiting for jobs to return/finish"
wait

Expand All @@ -123,7 +148,7 @@ for s in `ls submit*.sh`; do
WORKFLOWS_FOUND=$(alien.py find ${ALIEN_OUTPUT_FOLDER} workflow.json)

if [[ -z ${WORKFLOWS_FOUND} || -z ${AODS_FOUND} ]]; then
echo "❌ Missing files for case $s"
echo "❌ Missing files for case $s: Check here for logs ${urls[${s}]}"
FINAL_SUCCESS=1 # mark as failure
else
echo "✅ Files found in $s"
Expand Down
Loading