Stats of processed input files as CSV or JSON, and of uploaded output files as CSV or JSON (up to 10000 files included)
Jobscript
#!/bin/bash
#
FCL_FILE=/cvmfs/fifeuser4.opensciencegrid.org/sw/dune/ac3164fcb1e76377d6d1ee7523a1957713af7671/atmo_test/srcs/duneana/duneana/AtmoAnalysis/jobAtmoAnalysis.fcl
source /cvmfs/dune.opensciencegrid.org/products/dune/setup_dune.sh
setup metacat
export METACAT_SERVER_URL=https://metacat.fnal.gov:9443/dune_meta_prod/app
export METACAT_AUTH_SERVER_URL=https://metacat.fnal.gov:8143/auth/dune
echo $INPUT_TAR_DIR_LOCAL
#Setup recent lar software suite
setup dunesw \
"${DUNE_VERSION:-v09_91_02d01}" \
-q "${DUNE_QUALIFIER:-e26:prof}"
echo "printing env"
export PRODUCTS="${INPUT_TAR_DIR_LOCAL}/atmo_test/localProducts_larsoft_v09_91_02_e26_prof/:$PRODUCTS"
setup duneana v09_91_02d01 -q e26:prof
if [ -z ${JUSTIN_PROCESSORS} ]; then
JUSTIN_PROCESSORS=1
fi
echo "Justin processors: ${JUSTIN_PROCESSORS}"
echo $FCL_FILE
export TF_NUM_THREADS=${JUSTIN_PROCESSORS}
export OPENBLAS_NUM_THREADS=${JUSTIN_PROCESSORS}
export JULIA_NUM_THREADS=${JUSTIN_PROCESSORS}
export MKL_NUM_THREADS=${JUSTIN_PROCESSORS}
export NUMEXPR_NUM_THREADS=${JUSTIN_PROCESSORS}
export OMP_NUM_THREADS=${JUSTIN_PROCESSORS}
# number of events to process from the input file
if [ "$NUM_EVENTS" != "" ] ; then
events_option="-n $NUM_EVENTS"
fi
for nf in {1..10}
do
DID_PFN_RSE=`$JUSTIN_PATH/justin-get-file`
##Check that any file was returned
if [ "${DID_PFN_RSE}" == "" ] ; then
echo "Could not get file"
# exit 0
continue
fi
FILE=`echo ${DID_PFN_RSE} | cut -f2 -d' '`
DID=`echo ${DID_PFN_RSE} | cut -f1 -d' '`
echo ${DID} >> did.list
echo ${FILE} >> file.list
done
# First get an unprocessed file from this stage
#did_pfn_rse=`$JUSTIN_PATH/justin-get-file`
#if [ "$did_pfn_rse" = "" ] ; then
# echo "Nothing to process - exit jobscript"
# exit 0
#fi
# Keep a record of all input DIDs, for pdjson2meta file -> DID mapping
#echo "$did_pfn_rse" | cut -f1 -d' ' >>all-input-dids.txt
# pfn is also needed when creating justin-processed-pfns.txt
#pfn=`echo $did_pfn_rse | cut -f2 -d' '`
#echo "Input PFN = $pfn"
# Construct outFile from input $pfn
now=$(date -u +"%Y-%m-%dT_%H%M%SZ")
#Ffname=`echo $pfn | awk -F/ '{print $NF}'`
#fname=`echo $Ffname | awk -F. '{print $1}'`
#campaign="justIN.w${JUSTIN_WORKFLOW_ID}s${JUSTIN_STAGE_ID}"
# Here is where the LArSoft command is call it
(
# Do the scary preload stuff in a subshell!
export LD_PRELOAD=${XROOTD_LIB}/libXrdPosixPreload.so
echo "$LD_PRELOAD"
lar -c $FCL_FILE -S "$file.list" > ana_${now}.log
mv atmo_ana_wreco2_lbl_model.root atmo_ana_${now}.root
)
# Subshell exits with exit code of last command
larExit=$?
echo "lar exit code $larExit"
if [ $larExit -eq 0 ] ; then
# Success !
cat file.list > justin-processed-pfns.txt
jobscriptExit=0
else
# Oh :(
jobscriptExit=1
fi
# Create compressed tar file with all log files
tar zcf `echo "$JUSTIN_JOBSUB_ID.logs.tgz" | sed 's/@/_/g'` *.log
exit $jobscriptExit
justIN time: 2025-08-14 16:31:51 UTC justIN version: 01.03.02