Workflow 2988, Stage 1
Priority | 50 |
Processors | 1 |
Wall seconds | 18000 |
RSS bytes | 8388608000 (8000 MiB) |
Max distance for inputs | 100.0 |
Enabled input RSEs |
CERN_PDUNE_EOS, DUNE_CERN_EOS, DUNE_ES_PIC, DUNE_FR_CCIN2P3_DISK, DUNE_IN_TIFR, DUNE_IT_INFN_CNAF, DUNE_UK_LANCASTER_CEPH, DUNE_UK_MANCHESTER_CEPH, DUNE_US_BNL_SDCC, DUNE_US_FNAL_DISK_STAGE, FNAL_DCACHE, FNAL_DCACHE_STAGING, FNAL_DCACHE_TEST, MANCHESTER, MONTECARLO, NIKHEF, PRAGUE, QMUL, RAL-PP, RAL_ECHO, SURFSARA, T3_US_NERSC |
Enabled output RSEs |
CERN_PDUNE_EOS, DUNE_CERN_EOS, DUNE_ES_PIC, DUNE_FR_CCIN2P3_DISK, DUNE_IN_TIFR, DUNE_IT_INFN_CNAF, DUNE_UK_LANCASTER_CEPH, DUNE_UK_MANCHESTER_CEPH, DUNE_US_BNL_SDCC, DUNE_US_FNAL_DISK_STAGE, FNAL_DCACHE, FNAL_DCACHE_STAGING, FNAL_DCACHE_TEST, MANCHESTER, NIKHEF, PRAGUE, QMUL, RAL-PP, RAL_ECHO, SURFSARA, T3_US_NERSC |
Enabled sites |
BR_CBPF, CA_SFU, CA_Victoria, CERN, CH_UNIBE-LHEP, CZ_FZU, ES_CIEMAT, ES_PIC, FR_CCIN2P3, IN_TIFR, IT_CNAF, UK_Bristol, UK_Brunel, UK_Durham, UK_Edinburgh, UK_Imperial, UK_Lancaster, UK_Liverpool, UK_Manchester, UK_Oxford, UK_RAL-Tier1, UK_Sheffield, US_BNL, US_Caltech, US_Colorado, US_FNAL-FermiGrid, US_FNAL-T1, US_Michigan, US_MIT, US_Nebraska, US_NotreDame, US_PuertoRico, US_SU-ITS, US_Swan, US_UChicago, US_UConn-HPC, US_UCSD, US_Wisconsin |
Scope | usertests |
Events for this stage |
Output patterns
| Destination | Pattern | Lifetime | For next stage |
---|
1 | https://fndcadoor.fnal.gov:2880/dune/scratch/users/chappell/pdhd_1gev/02988/1 | hits*.root | | |
2 | https://fndcadoor.fnal.gov:2880/dune/scratch/users/chappell/pdhd_1gev/02988/1 | mc*.root | | |
3 | https://fndcadoor.fnal.gov:2880/dune/scratch/users/chappell/pdhd_1gev/02988/1 | events*.root | | |
Environment variables
Name | Value |
---|
INPUT_TAR_DIR_LOCAL | /cvmfs/fifeuser2.opensciencegrid.org/sw/dune/d1d52c8acc5260159f447c5d7ede9bffb2926baf |
NUM_EVENTS | 10 |
File states
Total files | Finding | Unallocated | Allocated | Outputting | Processed | Not found | Failed |
---|
|
100 | 0 | 1 | 0 | 0 | 97 | 0 | 2 |
Job states
Total | Submitted | Started | Processing | Outputting | Finished | Notused | Aborted | Stalled | Jobscript error | Outputting failed | None processed |
---|
195 | 0 | 0 | 0 | 0 | 130 | 0 | 17 | 13 | 25 | 3 | 7 |
RSEs used
Name | Inputs | Outputs |
---|
MONTECARLO | 133 | 0 |
Stats of processed input files as CSV or JSON, and of uploaded output files as CSV or JSON (up to 10000 files included)
File reset events, by site
Site | Allocated | Outputting |
---|
UK_RAL-Tier1 | 3 | 3 |
US_FNAL-FermiGrid | 1 | 0 |
US_PuertoRico | 1 | 0 |
US_Wisconsin | 1 | 0 |
CERN | 0 | 2 |
Jobscript
#!/bin/bash
# fcl file and DUNE software version/qualifier to be used
FCL_FILE=prod_beam_cosmics_1GeV_protodunehd.fcl
DUNE_VERSION=${DUNE_VERSION:-v09_85_00d00}
DUNE_QUALIFIER=${DUNE_QUALIFIER:-e26:prof}
export FW_SEARCH_PATH=$FW_SEARCH_PATH:$INPUT_TAR_DIR_LOCAL
export FHICL_FILE_PATH=.:$FHICL_FILE_PATH
echo $FW_SEARCH_PATH
cp $INPUT_TAR_DIR_LOCAL/*.fcl .
cp $INPUT_TAR_DIR_LOCAL/*.xml .
cp $INPUT_TAR_DIR_LOCAL/setup-grid .
cp -r $INPUT_TAR_DIR_LOCAL/localProducts* .
# number of events to process from the input file
if [ "$NUM_EVENTS" != "" ] ; then
events_option="-n $NUM_EVENTS"
fi
# First get an unprocessed file from this stage
did_pfn_rse=`$JUSTIN_PATH/justin-get-file`
if [ "$did_pfn_rse" = "" ] ; then
echo "Nothing to process - exit jobscript"
exit 0
fi
# Keep a record of all input DIDs, for pdjson2meta file -> DID mapping
echo "$did_pfn_rse" | cut -f1 -d' ' >>all-input-dids.txt
# pfn is also needed when creating justin-processed-pfns.txt
pfn=`echo $did_pfn_rse | cut -f2 -d' '`
echo "Input PFN = $pfn"
# Setup DUNE environment
source /cvmfs/dune.opensciencegrid.org/products/dune/setup_dune.sh
# the xroot lib for streaming non-root files is in testproducts,
# so add it to the start of the path
export PRODUCTS=/cvmfs/dune.opensciencegrid.org/products/dune/testproducts:${PRODUCTS}
setup dunesw "$DUNE_VERSION" -q "$DUNE_QUALIFIER"
export OMP_NUM_THREADS=${JUSTIN_PROCESSORS}
source setup-grid
mrbslp
# Construct outFile from input $pfn
now=$(date -u +"%Y-%m-%dT_%H%M%SZ")
Ffname=`echo $pfn | awk -F/ '{print $NF}'`
fname=`echo $Ffname | awk -F. '{print $1}'`
campaign="justIN.r${JUSTIN_WORKFLOW_ID}s${JUSTIN_STAGE_ID}"
(
# Do the scary preload stuff in a subshell!
export LD_PRELOAD=${XROOTD_LIB}/libXrdPosixPreload.so
echo "$LD_PRELOAD"
lar -c $FCL_FILE $events_option -o pdhd_gen.root > ${fname}_pdhd_${now}.log 2>&1 \
&& lar -c standard_g4_protodunehd.fcl -s pdhd_gen.root -o pdhd_g4.root > ${fname}_pdhd_g4_${now}.log 2>&1 \
&& lar -c standard_detsim_protodunehd.fcl -s pdhd_g4.root -o pdhd_detsim.root > ${fname}_pdhd_detsim_${now}.log 2>&1 \
&& lar -c standard_reco_protodunehd.fcl -s pdhd_detsim.root -o pdhd_reco.root > ${fname}_pdhd_reco_${now}.log 2>&1
)
# Subshell exits with exit code of last command
larExit=$?
echo "lar exit code $larExit"
echo "$pfn" > justin-processed-pfns.txt
#mv pdhd_gen.root pdhd_gen_${fname}.root
#mv pdhd_g4.root pdhd_g4_${fname}.root
#mv pdhd_detsim.root pdhd_detsim_${fname}.root
#mv pdhd_reco.root pdhd_reco_${fname}.root
#mv Pandora_Events.pndr Pandora_Events_${fname}.pndr
mv hits.root hits_${fname}.root
mv hits3D.root hits3D_${fname}.root
mv mc.root mc_${fname}.root
mv events.root events_${fname}.root
ls -lRS
# Create compressed tar file with all log files
tar zcf `echo "$JUSTIN_JOBSUB_ID.logs.tgz" | sed 's/@/_/g'` *.log
exit $larExit