1+ #! /bin/bash
2+
3+ # loops over all test cases and executes them
4+
5+ #! /bin/bash
6+
7+ # Read the CSV file
8+ INPUT_FILE=" test_anchor_cases.csv"
9+ INPUT_FILE_STRIPPED=${INPUT_FILE} _clean
10+ TEMPLATE_FILE=" test_anchor_2tag_template.sh"
11+ OUTPUT_FILE=" test_anchor_generated"
12+
13+ SITES_FILE=" test_GRID_sites.dat"
14+
15+ REQUIRE_STRING=" "
16+ {
17+ while read -r -a values; do
18+ if [ ! " ${REQUIRE_STRING} " == " " ]; then
19+ REQUIRE_STRING=" ${REQUIRE_STRING} ||"
20+ fi
21+ REQUIRE_STRING=" ${REQUIRE_STRING} (other.CE == \" ${values} \" )"
22+ done
23+ } < ${SITES_FILE}
24+ REQUIRE_STRING=" (${REQUIRE_STRING} );"
25+
26+ echo " REQUIRE STRING ${REQUIRE_STRING} "
27+
28+ # strip comments from CSV file
29+ grep -v ' #' ${INPUT_FILE} > ${INPUT_FILE_STRIPPED}
30+
31+ # Read the header line and convert it into variable names
32+ IFS=' ,' read -r -a headers < " $INPUT_FILE_STRIPPED "
33+
34+ # Replace placeholders in the header (e.g., %{VAR} → VAR)
35+ for i in " ${! headers[@]} " ; do
36+ headers[$i ]=$( echo " ${headers[$i]} " | sed -E ' s/#?%\{//;s/\}//g' )
37+ done
38+
39+ # Read and process each subsequent line
40+ {
41+ read # Skip the header line
42+
43+ count=1 # Counter for output files
44+ while IFS=' ,' read -r -a values; do
45+ # Assign each value to its corresponding variable
46+ for i in " ${! headers[@]} " ; do
47+ declare " ${headers[$i]} " =" ${values[$i]} "
48+ done
49+
50+ # Example: Print assigned variables
51+ echo " SOFTWARETAG_SIM: $SOFTWARETAG_SIM "
52+ echo " SOFTWARETAG_ASYNC: $SOFTWARETAG_ASYNC "
53+ echo " PASSNAME: $PASSNAME "
54+ echo " COL_SYSTEM: $COL_SYSTEM "
55+ echo " RUN_NUMBER: $RUN_NUMBER "
56+ echo " INTERACTIONTYPE: $INTERACTIONTYPE "
57+ echo " PRODUCTION_TAG: $PRODUCTION_TAG "
58+ echo " ANCHOR_PRODUCTION: $ANCHOR_PRODUCTION "
59+ echo " ANCHORYEAR: $ANCHORYEAR "
60+ echo " SIM_OPTIONS: $SIM_OPTIONS "
61+ echo " --------------------------------"
62+
63+ OUTPUT_FILE_FINAL=" ${OUTPUT_FILE} _case${count} .sh"
64+ cp " $TEMPLATE_FILE " " $OUTPUT_FILE_FINAL "
65+
66+ # create final test script with these values
67+ cp " $TEMPLATE_FILE " " $OUTPUT_FILE_FINAL "
68+ for var in " ${headers[@]} " ; do
69+ sed -i " s|%{$var }|${! var} |g" " $OUTPUT_FILE_FINAL "
70+ done
71+ # put the require spec
72+ sed -i " s/%{JDL_REQUIREMENT}/${REQUIRE_STRING} /g" " $OUTPUT_FILE_FINAL "
73+
74+ # THIS COULD BE DONE CONDITIONALLY
75+ # we submit the test to the GRID
76+ echo " ${O2DPG_ROOT} /GRID/utils/grid_submit.sh --prodsplit 1 --local --singularity --ttl 360 --script ${OUTPUT_FILE_FINAL} --jobname " anchorTest${count} " --wait --fetch-output" > submit_case${count} .sh
77+ # TODO: optional local execution
78+
79+ (( count++ )) # Increment counter for next row
80+ done
81+ } < " ${INPUT_FILE_STRIPPED} " # Redirect file input here to avoid subshell issues
82+ exit 0
83+
84+ # now we submit all the jobs in the background and wait for them to return
85+ for s in ` ls submit* .sh` ; do
86+ echo " submitting ${s} "
87+ bash ${s} & > log_${s} &
88+ done
89+
90+ # for for all (GRID) jobs to return
91+ wait
92+
93+ # verify / validate the output produced from these jobs
94+ # The test is successfull if at least one subjob from each test
95+ # produced the AO2D output.
96+ echo " -- Jobs done ... validating --"
97+
98+ for s in ` ls submit* .sh` ; do
99+ # find out output path
100+ # Local working directory is /tmp/alien_work/anchorTest1-20250306-052755
101+ TEST_OUTPUT_PATH=$( grep " Local working directory is" log_${s} | awk ' //{print $5}' )
102+
103+ # see if there is an AO2D.root and a workflow.json in one of the jobs in that folder
104+ find ${TEST_OUTPUT_PATH} -name " AO2D.root"
105+ SUCCESS_AOD=$?
106+
107+ find ${TEST_OUTPUT_PATH} -name " workflow.json"
108+ done
109+
110+
111+ echo " -- Cleaning up ... "
0 commit comments