Stats of processed input files as CSV or JSON, and of uploaded output files as CSV or JSON (up to 10000 files included)
Jobscript
#!/bin/bash
:<<'EOF'
To use this jobscript to process 10 files from the dc4-vd-coldbox-bottom
data and put the output in the usertests namespace (MetaCat) and
scope (Rucio), and in the usertests:output-test-01 dataset in MetaCat and
Rucio, use this command to create the workflow:
justin simple-workflow \
--mql "files from justin-tutorial:justin-tutorial-2024 limit 10" \
--jobscript dc4-vd-coldbox-bottom.jobscript --max-distance 30 --rss-mb 4000 \
--scope usertests --output-pattern '*_reco_data_*.root:output-test-01' \
--lifetime-days 1
The following optional environment variables can be set when creating the
workflow/stage: FCL_FILE, NUM_EVENTS, DUNE_VERSION, DUNE_QUALIFIER
EOF
# fcl file and DUNE software version/qualifier to be used
FCL_FILE=${FCL_FILE:-myreco_beam.fcl}
DUNE_VERSION=${DUNE_VERSION:-v09_91_02d01}
DUNE_QUALIFIER=${DUNE_QUALIFIER:-e26:prof}
HAS_ART_OUTPUT=${HAS_ART_OUTPUT:true}
# number of events to process from the input file
if [ "$NUM_EVENTS" != "" ] ; then
events_option="-n $NUM_EVENTS"
fi
# First get an unprocessed file from this stage
did_pfn_rse=`$JUSTIN_PATH/justin-get-file`
if [ "$did_pfn_rse" = "" ] ; then
echo "Nothing to process - exit jobscript"
exit 0
fi
# pfn is also needed when creating justin-processed-pfns.txt
pfn=`echo $did_pfn_rse | cut -f2 -d' '`
echo "Input PFN = $pfn"
# Setup DUNE environment
source /cvmfs/dune.opensciencegrid.org/products/dune/setup_dune.sh
# the xroot lib for streaming non-root files is in testproducts,
# so add it to the start of the path
#export PRODUCTS=/cvmfs/dune.opensciencegrid.org/products/dune/testproducts:${PRODUCTS}
export INPUT_TAR_DIR_LOCAL=${CODE_TAR_DIR_LOCAL}
export PRODUCTS="${INPUT_TAR_DIR_LOCAL}/localProducts_larsoft_v09_91_02_e26_prof/:$PRODUCTS"
setup dunesw "$DUNE_VERSION" -q "$DUNE_QUALIFIER"
#if [ ! -z "$CODE_TAR_DIR_LOCAL" ]; then
# echo "Using custom sources from $CODE_TAR_DIR_LOCAL"
# source ${CODE_TAR_DIR_LOCAL}/localProducts*/setup
# mrbslp
#fi
# Construct outFile from input $pfn
now=$(date -u +"%Y-%m-%dT_%H%M%SZ")
Ffname=`echo $pfn | awk -F/ '{print $NF}'`
fname=`echo $Ffname | awk -F. '{print $1}'`
outFile=${fname}_reco_data_${now}.root
campaign="justIN.w${JUSTIN_WORKFLOW_ID}s${JUSTIN_STAGE_ID}"
if [ "$HAS_ART_OUTPUT" = true ];then
OUTPUT_CMD="-o $outFile"
echo "Art output set to $outFile"
else
OUTPUT_CMD="-T ${fname}_ana_${now}.root"
fi
echo "Contents of $INPUT_TAR_DIR_LOCAL:"
ls ${CODE_TAR_DIR_LOCAL}
export FW_SEARCH_PATH=${CODE_TAR_DIR_LOCAL}:$FW_SEARCH_PATH #Necessary for the local path to be searched first for xml files.
#echo "$FW_SEARCH_PATH"
(
# Do the scary preload stuff in a subshell!
export LD_PRELOAD=${XROOTD_LIB}/libXrdPosixPreload.so
echo "$LD_PRELOAD"
lar -c $FCL_FILE $events_option $OUTPUT_CMD "$pfn" > ${fname}_reco_${now}.log 2>&1
mv Validation_ccnc.root Validation_ccnc_${fname}.root
)
# Subshell exits with exit code of last command
larExit=$?
echo "lar exit code $larExit"
echo '=== Start last 100 lines of lar log file ==='
tail -200 ${fname}_reco_${now}.log
echo '=== End last 100 lines of lar log file ==='
if [ $larExit -eq 0 ] ; then
# Success !
echo "$pfn" > justin-processed-pfns.txt
jobscriptExit=0
else
# Oh :(
jobscriptExit=1
fi
ls
ls ${CODE_TAR_DIR_LOCAL}
# Create compressed tar file with all log files
tar zcf `echo "$JUSTIN_JOBSUB_ID.logs.tgz" | sed 's/@/_/g'` *.log
exit $jobscriptExit
justIN time: 2024-11-21 15:40:13 UTC justIN version: 01.01.09