Stats of processed input files as CSV or JSON, and of uploaded output files as CSV or JSON (up to 10000 files included)
Jobscript
#!/bin/bash
:<<'EOF'
To use this jobscript to process 10 files from the dc4-vd-coldbox-bottom
data and put the output in the usertests namespace (MetaCat) and
scope (Rucio), and in the usertests:output-test-01 dataset in MetaCat and
Rucio, use this command to create the workflow:
justin simple-workflow \
--mql \
"files from dune:all where core.run_type='dc4-vd-coldbox-bottom' and dune.campaign='dc4' limit 10" \
--jobscript dc4-vd-coldbox-bottom.jobscript --max-distance 30 --rss-mb 4000 \
--scope usertests --output-pattern '*_reco_data_*.root:output-test-01'
The following optional environment variables can be set when creating the
workflow/stage: FCL_FILE, NUM_EVENTS, DUNE_VERSION, DUNE_QUALIFIER
EOF
# fcl file and DUNE software version/qualifier to be used
FCL_FILE=crp6_daphne_data_decoder.fcl
ANA_FILE=runPNSCRP.fcl
#DUNE_VERSION=${DUNE_VERSION:-v09_91_02d01}
DUNE_VERSION=${DUNE_VERSION:-v09_91_02d00}
DUNE_QUALIFIER=${DUNE_QUALIFIER:-e26:prof}
# number of events to process from the input file
if [ "$NUM_EVENTS" != "" ] ; then
events_option="-n $NUM_EVENTS"
fi
# First get an unprocessed file from this stage
did_pfn_rse=`$JUSTIN_PATH/justin-get-file`
if [ "$did_pfn_rse" = "" ] ; then
echo "Nothing to process - exit jobscript"
exit 0
fi
# Keep a record of all input DIDs, for pdjson2meta file -> DID mapping
echo "$did_pfn_rse" | cut -f1 -d' ' >>all-input-dids.txt
# pfn is also needed when creating justin-processed-pfns.txt
pfn=`echo $did_pfn_rse | cut -f2 -d' '`
echo "Input PFN = $pfn"
# Setup DUNE environment
source /cvmfs/dune.opensciencegrid.org/products/dune/setup_dune.sh
#export PRODUCTS="${INPUT_TAR_DIR_LOCAL}/localProducts_larsoft_v09_91_03d00_e26_prof/:$PRODUCTS" #working
#source $INPUT_TAR_DIR_LOCAL/localProducts_larsoft_v09_91_03d00_e26_prof/setup #not working
export PRODUCTS=$INPUT_TAR_DIR_LOCAL:${PRODUCTS}
#source $INPUT_TAR_DIR_LOCAL/localProducts_larsoft_v09_91_03d00_e26_prof/setup #not working
echo "PRODUCTS: ${PRODUCTS}"
setup dunesw "$DUNE_VERSION" -q "$DUNE_QUALIFIER"
#mrbslp
#echo "DUNESW_DIR: ${DUNESW_DIR}"
export FHICL_FILE_PATH=${INPUT_TAR_DIR_LOCAL}:${FHICL_FILE_PATH}
export FW_SEARCH_PATH=${INPUT_TAR_DIR_LOCAL}/config_data:$FW_SEARCH_PATH
export FW_SEARCH_PATH=${INPUT_TAR_DIR_LOCAL}:$FW_SEARCH_PATH
# Properly setup custom code with INPUT_TAR_DIR_LOCAL
export OMP_NUM_THREADS=${JUSTIN_PROCESSORS}
# Construct outFile from input $pfn
now=$(date -u +"%Y-%m-%dT_%H%M%SZ")
Ffname=`echo $pfn | awk -F/ '{print $NF}'`
fname=`echo $Ffname | awk -F. '{print $1}'`
outRecoFile=${fname}_crp6_daphne_data_reco_${now}
outAnaFile=${fname}_ana_${now}
outFile=${fname}_PNSfiles_${now}.root
#campaign="justIN.r${JUSTIN_WORKFLOW_ID}s${JUSTIN_STAGE_ID}"
campaign="justIN.w${JUSTIN_WORKFLOW_ID}s${JUSTIN_STAGE_ID}"
(
# Do the scary preload stuff in a subshell!
export LD_PRELOAD=${XROOTD_LIB}/libXrdPosixPreload.so
echo "$LD_PRELOAD"
#lar -c $FCL_FILE $events_option -T ${outRecoFile}.root -o ${fname}_crpcb_bottom_reco.root $pfn > ${outRecoFile}.log 2>&1
#lar -c $FCL_FILE $events_option -o ${outRecoFile}.root "$pfn" > ${fname}_myreco2_${now}.log 2>&1
lar -c $FCL_FILE $events_option -T ${outRecoFile}.root -o $outFile $pfn > ${outFile}.log 2>&1
#lar -c $ANA_FILE $events_option -T ${outAnaFile}_hist.root -o ${outAnaFile}.root ${fname}_crpcb_bottom_reco.root > ${fname}_myana_${now}.log 2>&1
)
# Subshell exits with exit code of last command
larExit=$?
echo "lar exit code $larExit"
echo '=== Start last 100 lines of lar log file ==='
tail -100 ${fname}_reco_${now}.log
echo '=== End last 100 lines of lar log file ==='
jobscriptExit=1
if [ $larExit -eq 0 ] ; then
# write metadata file if lar succeeded
extractor_prod.py --infile "$outFile" --no_crc --appname reco \
--appversion ${DUNE_VERSION} --appfamily art \
--campaign ${campaign} > $outFile.ext.json
extractorExit=$?
echo "extractor_prod.py exit code $extractorExit"
# Run pdjson2meta. THIS SHOULD MOVE TO SOMEWHERE LIKE duneutil ?
/cvmfs/dune.opensciencegrid.org/products/dune/justin/pro/NULL/jobutils/pdjson2metadata \
$outFile.ext.json all-input-dids.txt > $outFile.json
p2mExit=$?
echo "pdjson2metadata exit code $p2mExit"
if [ $extractorExit -eq 0 -a $p2mExit -eq 0 ] ; then
echo "Metadata extraction succeeds"
echo "$pfn" > justin-processed-pfns.txt
echo "===Metadata JSON==="
cat $outFile.json
echo
echo "==================="
jobscriptExit=0
fi
fi
ls -lRS
# Create compressed tar file with all log files
tar zcf `echo "$JUSTIN_JOBSUB_ID.logs.tgz" | sed 's/@/_/g'` *.log
exit $jobscriptExit
justIN time: 2024-11-24 08:57:08 UTC justIN version: 01.01.09