Workflow 3548, Stage 1
Priority | 50 |
Processors | 1 |
Wall seconds | 36000 |
RSS bytes | 4194304000 (4000 MiB) |
Max distance for inputs | 100.0 |
Enabled input RSEs |
CERN_PDUNE_EOS, DUNE_CERN_EOS, DUNE_ES_PIC, DUNE_FR_CCIN2P3_DISK, DUNE_IN_TIFR, DUNE_IT_INFN_CNAF, DUNE_UK_GLASGOW, DUNE_UK_LANCASTER_CEPH, DUNE_UK_MANCHESTER_CEPH, DUNE_US_BNL_SDCC, DUNE_US_FNAL_DISK_STAGE, FNAL_DCACHE, FNAL_DCACHE_STAGING, FNAL_DCACHE_TEST, MANCHESTER, MONTECARLO, NIKHEF, PRAGUE, QMUL, RAL-PP, RAL_ECHO, SURFSARA, T3_US_NERSC |
Enabled output RSEs |
CERN_PDUNE_EOS, DUNE_CERN_EOS, DUNE_ES_PIC, DUNE_FR_CCIN2P3_DISK, DUNE_IN_TIFR, DUNE_IT_INFN_CNAF, DUNE_UK_GLASGOW, DUNE_UK_LANCASTER_CEPH, DUNE_UK_MANCHESTER_CEPH, DUNE_US_BNL_SDCC, DUNE_US_FNAL_DISK_STAGE, FNAL_DCACHE, FNAL_DCACHE_STAGING, FNAL_DCACHE_TEST, MANCHESTER, NIKHEF, PRAGUE, QMUL, RAL-PP, RAL_ECHO, SURFSARA, T3_US_NERSC |
Enabled sites |
BR_CBPF, CA_SFU, CA_Victoria, CERN, CH_UNIBE-LHEP, CZ_FZU, ES_CIEMAT, ES_PIC, FR_CCIN2P3, IN_TIFR, IT_CNAF, UK_Bristol, UK_Brunel, UK_Durham, UK_Edinburgh, UK_Imperial, UK_Lancaster, UK_Liverpool, UK_Manchester, UK_Oxford, UK_RAL-Tier1, UK_Sheffield, US_BNL, US_Caltech, US_Colorado, US_FNAL-FermiGrid, US_FNAL-T1, US_Michigan, US_MIT, US_Nebraska, US_NotreDame, US_PuertoRico, US_SU-ITS, US_Swan, US_UChicago, US_UConn-HPC, US_UCSD, US_Wisconsin |
Scope | usertests |
Events for this stage |
Output patterns
| Destination | Pattern | Lifetime | For next stage |
---|
1 | https://fndcadoor.fnal.gov:2880/dune/scratch/users/ykermaid/03548/1 | pdvd_electron_2_reco_*.* | | |
Environment variables
Name | Value |
---|
INPUT_TAR_DIR_LOCAL | /cvmfs/fifeuser4.opensciencegrid.org/sw/dune/691e4fd918a73bcf81856627b6bc9c6163bb1086 |
KEY | electron_2 |
NUM_EVENTS | 20 |
File states
Total files | Finding | Unallocated | Allocated | Outputting | Processed | Not found | Failed |
---|
|
50 | 0 | 50 | 0 | 0 | 0 | 0 | 0 |
Job states
Total | Submitted | Started | Processing | Outputting | Finished | Notused | Aborted | Stalled | Jobscript error | Outputting failed | None processed |
---|
171 | 0 | 0 | 0 | 0 | 45 | 0 | 2 | 2 | 119 | 0 | 3 |
RSEs used
Name | Inputs | Outputs |
---|
MONTECARLO | 121 | 0 |
Stats of processed input files as CSV or JSON, and of uploaded output files as CSV or JSON (up to 10000 files included)
File reset events, by site
Site | Allocated | Outputting |
---|
CZ_FZU | 1 | 0 |
UK_Manchester | 1 | 0 |
Jobscript
#!/bin/bash
:<<'EOF'
To use this jobscript to process 10 files from the dc4-vd-coldbox-bottom
data and put the output in the usertests namespace (MetaCat) and
scope (Rucio), and in the usertests:output-test-01 dataset in MetaCat and
Rucio, use this command to create the workflow:
justin simple-workflow \
--mql \
"files from dune:all where core.run_type='dc4-vd-coldbox-bottom' and dune.campaign='dc4' limit 10" \
--jobscript dc4-vd-coldbox-bottom.jobscript --max-distance 30 --rss-mb 4000 \
--scope usertests --output-pattern '*_reco_data_*.root:output-test-01'
The following optional environment variables can be set when creating the
workflow/stage: FCL_FILE, NUM_EVENTS, DUNE_VERSION, DUNE_QUALIFIER
EOF
# fcl file and DUNE software version/qualifier to be used
FCL_GEN_FILE=gen_protodunevd_${KEY}GeV.fcl
FCL_ST1_FILE=protodunevd_refactored_g4_stage1.fcl
FCL_ST2_FILE=protodunevd_refactored_g4_stage2.fcl
FCL_DET_FILE=protodunevd_refactored_detsim.fcl
FCL_REC_FILE=protodunevd_reco.fcl
FCL_ANA_FILE=runProtoDUNEelectronWireAna2mcGun.fcl
DUNE_VERSION=${DUNE_VERSION:-v09_91_04d00}
DUNE_QUALIFIER=${DUNE_QUALIFIER:-e26:prof}
# number of events to process from the input file
if [ "$NUM_EVENTS" != "" ] ; then
events_option="-n $NUM_EVENTS"
fi
# First get an unprocessed file from this stage
did_pfn_rse=`$JUSTIN_PATH/justin-get-file`
if [ "$did_pfn_rse" = "" ] ; then
echo "Nothing to process - exit jobscript"
exit 0
fi
# Keep a record of all input DIDs, for pdjson2meta file -> DID mapping
echo "$did_pfn_rse" | cut -f1 -d' ' >>all-input-dids.txt
# pfn is also needed when creating justin-processed-pfns.txt
pfn=`echo $did_pfn_rse | cut -f2 -d' '`
echo "Input PFN = $pfn"
# Setup DUNE environment
source /cvmfs/dune.opensciencegrid.org/products/dune/setup_dune.sh
export PRODUCTS=$INPUT_TAR_DIR_LOCAL:${PRODUCTS}
echo "PRODUCTS: ${PRODUCTS}"
setup dunesw "$DUNE_VERSION" -q "$DUNE_QUALIFIER"
export DUNESW_DIR=${INPUT_TAR_DIR_LOCAL}/dunesw/${DUNE_VERSION}
export DUNESW_FQ_DIR=${DUNESW_DIR}/slf7.x86_64.e26.prof
export DUNESW_LIB=${DUNESW_FQ_DIR}/lib
export PROTODUNEANA_DIR=${INPUT_TAR_DIR_LOCAL}/protoduneana/${DUNE_VERSION}
export PROTODUNEANA_LIB=${PROTODUNEANA_DIR}/include
export PROTODUNEANA_FQ_DIR=${PROTODUNEANA_DIR}/slf7.x86_64.e26.prof
export PROTODUNEANA_LIB=${PROTODUNEANA_FQ_DIR}/lib
export DUNERECO_DIR=${INPUT_TAR_DIR_LOCAL}/dunereco/${DUNE_VERSION}
export DUNERECO_LIB=${DUNERECO_DIR}/include
export DUNERECO_FQ_DIR=${DUNERECO_DIR}/slf7.x86_64.e26.prof
export DUNERECO_LIB=${DUNERECO_FQ_DIR}/lib
export DUNEPROTOTYPES_DIR=${INPUT_TAR_DIR_LOCAL}/duneprototypes/${DUNE_VERSION}
export DUNEPROTOTYPES_INC=${DUNEPROTOTYPES_DIR}/include
export DUNEPROTOTYPES_FQ_DIR=${DUNEPROTOTYPES_DIR}/slf7.x86_64.e26.prof
export DUNEPROTOTYPES_LIB=${DUNEPROTOTYPES_FQ_DIR}/lib
export FHICL_FILE_PATH=${DUNESW_DIR}/fcl:${FHICL_FILE_PATH}
export FHICL_FILE_PATH=${DUNEPROTOTYPES_DIR}/fcl:${FHICL_FILE_PATH}
export FHICL_FILE_PATH=${PROTODUNEANA_DIR}/job:${FHICL_FILE_PATH}
export FHICL_FILE_PATH=${DUNERECO_DIR}/fcl:${FHICL_FILE_PATH}
export FHICL_FILE_PATH=${INPUT_TAR_DIR_LOCAL}:${FHICL_FILE_PATH}
echo "DUNESW_DIR: ${DUNESW_DIR}"
echo "PROTODUNEANA_DIR: ${PROTODUNEANA_DIR}"
echo "DUNERECO_DIR: ${DUNERECO_DIR}"
echo "DUNEPROTOTYPES_DIR: ${DUNEPROTOTYPES_DIR}"
echo "FHICL_FILE_PATH: ${FHICL_FILE_PATH}"
# Properly setup custom code with INPUT_TAR_DIR_LOCAL
export OMP_NUM_THREADS=${JUSTIN_PROCESSORS}
# Construct outFile from input $pfn
now=$(date -u +"%Y-%m-%dT_%H%M%SZ")
outGenFile=pdvd_${KEY}GeV_gen_${now}
outSt1File=pdvd_${KEY}GeV_g4_stage1_${now}
outSt2File=pdvd_${KEY}GeV_g4_stage2_${now}
outDetFile=pdvd_${KEY}GeV_detsim_${now}
outRecFile=pdvd_${KEY}GeV_reco_${now}
outAnaFile=pdvd_${KEY}GeV_ana_${now}
campaign="justIN.r${JUSTIN_WORKFLOW_ID}s${JUSTIN_STAGE_ID}"
(
# Do the scary preload stuff in a subshell!
lar -c $FCL_GEN_FILE $events_option -T ${outGenFile}_hist.root -o ${outGenFile}.root > ${outGenFile}.log 2>&1
lar -c $FCL_ST1_FILE $events_option -T ${outSt1File}_hist.root -o ${outSt1File}.root ${outGenFile}.root > ${outSt1File}.log 2>&1
lar -c $FCL_ST2_FILE $events_option -T ${outSt2File}_hist.root -o ${outSt2File}.root ${outSt1File}.root > ${outSt2File}.log 2>&1
lar -c $FCL_DET_FILE $events_option -T ${outDetFile}_hist.root -o ${outDetFile}.root ${outSt2File}.root > ${outDetFile}.log 2>&1
lar -c $FCL_REC_FILE $events_option -T ${outRecFile}_hist.root -o ${outRecFile}.root ${outDetFile}.root > ${outRecFile}.log 2>&1
#lar -c $FCL_ANA_FILE $events_option -T ${outAnaFile}_hist.root -o ${outAnaFile}.root ${outRecFile}.root > ${outAnaFile}.log 2>&1
)
# Subshell exits with exit code of last command
larExit=$?
echo "lar exit code $larExit"
echo "$pfn" > justin-processed-pfns.txt
ls -lRS
# Create compressed tar file with all log files
tar zcf `echo "$JUSTIN_JOBSUB_ID.logs.tgz" | sed 's/@/_/g'` *.log
#exit $jobscriptExit
exit $larExit