justIN           Dashboard       Workflows       Jobs       AWT       Sites       Storages       Docs       Login

Workflow 4089, Stage 1

Priority50
Processors1
Wall seconds3600
RSS bytes6291456000 (6000 MiB)
Max distance for inputs30.0
Enabled input RSEs CERN_PDUNE_EOS, DUNE_CA_SFU, DUNE_CERN_EOS, DUNE_ES_PIC, DUNE_FR_CCIN2P3_DISK, DUNE_IN_TIFR, DUNE_IT_INFN_CNAF, DUNE_UK_GLASGOW, DUNE_UK_LANCASTER_CEPH, DUNE_UK_MANCHESTER_CEPH, DUNE_US_BNL_SDCC, DUNE_US_FNAL_DISK_STAGE, FNAL_DCACHE, FNAL_DCACHE_STAGING, FNAL_DCACHE_TEST, MONTECARLO, NIKHEF, PRAGUE, QMUL, RAL-PP, RAL_ECHO, SURFSARA, T3_US_NERSC
Enabled output RSEs CERN_PDUNE_EOS, DUNE_CA_SFU, DUNE_CERN_EOS, DUNE_ES_PIC, DUNE_FR_CCIN2P3_DISK, DUNE_IN_TIFR, DUNE_IT_INFN_CNAF, DUNE_UK_GLASGOW, DUNE_UK_LANCASTER_CEPH, DUNE_UK_MANCHESTER_CEPH, DUNE_US_BNL_SDCC, DUNE_US_FNAL_DISK_STAGE, FNAL_DCACHE, FNAL_DCACHE_STAGING, FNAL_DCACHE_TEST, NIKHEF, PRAGUE, QMUL, RAL-PP, RAL_ECHO, SURFSARA, T3_US_NERSC
Enabled sites BR_CBPF, CA_SFU, CA_Victoria, CERN, CH_UNIBE-LHEP, CZ_FZU, ES_CIEMAT, ES_PIC, FR_CCIN2P3, IN_TIFR, IT_CNAF, NL_NIKHEF, NL_SURFsara, UK_Bristol, UK_Brunel, UK_Durham, UK_Edinburgh, UK_Imperial, UK_Lancaster, UK_Liverpool, UK_Manchester, UK_Oxford, UK_QMUL, UK_RAL-PPD, UK_RAL-Tier1, UK_Sheffield, US_BNL, US_Caltech, US_Colorado, US_FNAL-FermiGrid, US_FNAL-T1, US_Michigan, US_MIT, US_Nebraska, US_NotreDame, US_PuertoRico, US_SU-ITS, US_Swan, US_UChicago, US_UConn-HPC, US_UCSD, US_Wisconsin
Scopeusertests
Events for this stage

Output patterns

 DestinationPatternLifetimeFor next stage
1https://fndcadoor.fnal.gov:2880/dune/scratch/users/chappell/sec_vtx/04089/1*.csv

Environment variables

NameValue
INPUT_TAR_DIR_LOCAL/cvmfs/fifeuser1.opensciencegrid.org/sw/dune/e3881a0d258a2bb4055531c871f414306dbaa817
NUM_EVENTS100

File states

Total filesFindingUnallocatedAllocatedOutputtingProcessedNot foundFailed
6000006000

Job states

TotalSubmittedStartedProcessingOutputtingFinishedNotusedAbortedStalledJobscript errorOutputting failedNone processed
1610000158002001
Files processed0010102020303040405050Nov-13 16:00Nov-13 17:00Nov-13 18:00Nov-13 19:00Files processedBin start timesNumber per binUS_ColoradoCERNUS_UChicagoUS_PuertoRicoUS_WisconsinNL_SURFsaraNL_NIKHEFUK_RAL-Tier1US_FNAL-T1US_FNAL-FermiG…US_FNAL-FermiGridUK_QMULUK_Manchester
Replicas per RSE60477.3821972418682198.359628376933860306.95681025071735292.63771081146886302.56199733499176159.323007485126144318.40926307873025143.989549425909164333.260466971698134.44526021992843347.45336377971745128.336734525634372358.08180739676305125.342354052231832366.78105676316324123.797491683000841373.3761472307346123.143359963124041377.79062751598815122.94927002638474Replicas per RSEFNAL_DCACHE (41%)DUNE_US_FNAL_DISK_S…DUNE_US_FNAL_DISK_STAGE (41%)SURFSARA (4%)NIKHEF (2%)RAL_ECHO (2%)QMUL (2%)RAL-PP (1%)DUNE_US_BNL_SDCC (1…DUNE_US_BNL_SDCC (1%)PRAGUE (0%)DUNE_FR_CCIN2P3_DIS…DUNE_FR_CCIN2P3_DISK (0%)

RSEs used

NameInputsOutputs
DUNE_US_FNAL_DISK_STAGE850
SURFSARA80
NIKHEF50
RAL_ECHO40
QMUL30
RAL-PP20
DUNE_US_BNL_SDCC20
PRAGUE10
DUNE_FR_CCIN2P3_DISK10

Stats of processed input files as CSV or JSON, and of uploaded output files as CSV or JSON (up to 10000 files included)

File reset events, by site

SiteAllocatedOutputting
US_PuertoRico20

Jobscript

#!/bin/bash
# fcl file and DUNE software version/qualifier to be used
FCL_FILE=pndr.fcl
DUNE_VERSION=${DUNE_VERSION:-v09_92_00d00}
DUNE_QUALIFIER=${DUNE_QUALIFIER:-e26:prof}
FW_SEARCH_PATH=.:$INPUT_TAR_DIR_LOCAL:$FW_SEARCH_PATH
FHICL_FILE_PATH=.:INPUT_TAR_DIR_LOCAL:$FHICL_FILE_PATH
echo $FW_SEARCH_PATH

cp $INPUT_TAR_DIR_LOCAL/pndr.fcl .
cp $INPUT_TAR_DIR_LOCAL/setup-grid .
cp -r $INPUT_TAR_DIR_LOCAL/localProducts* .

# number of events to process from the input file
if [ "$NUM_EVENTS" != "" ] ; then
 events_option="-n $NUM_EVENTS"
fi

# Setup DUNE environment
source /cvmfs/dune.opensciencegrid.org/products/dune/setup_dune.sh

# the xroot lib for streaming non-root files is in testproducts, 
# so add it to the start of the path
export PRODUCTS=/cvmfs/dune.opensciencegrid.org/products/dune/testproducts:${PRODUCTS}
setup dunesw "$DUNE_VERSION" -q "$DUNE_QUALIFIER"
export OMP_NUM_THREADS=${JUSTIN_PROCESSORS} 

source setup-grid
source localProducts*/setup
mrbslp

for i in {0..19}; do
    # First get an unprocessed file from this stage
    did_pfn_rse=`$JUSTIN_PATH/justin-get-file`

    if [ "$did_pfn_rse" = "" ] ; then
      echo "Nothing to process - exit jobscript"
      exit 0
    fi

    # Keep a record of all input DIDs, for pdjson2meta file -> DID mapping
    echo "$did_pfn_rse" | cut -f1 -d' ' >>all-input-dids.txt

    # pfn is also needed when creating justin-processed-pfns.txt
    pfn=`echo $did_pfn_rse | cut -f2 -d' '`
    echo "Input PFN = $pfn"

    # Construct outFile from input $pfn 
    now=$(date -u +"%Y-%m-%dT_%H%M%SZ")
    Ffname=`echo $pfn | awk -F/ '{print $NF}'`
    fname=`echo $Ffname | awk -F. '{print $1}'`

    campaign="justIN.r${JUSTIN_WORKFLOW_ID}s${JUSTIN_STAGE_ID}"

    (
    # Do the scary preload stuff in a subshell!
    export LD_PRELOAD=${XROOTD_LIB}/libXrdPosixPreload.so
    echo "$LD_PRELOAD"

    lar -c $FCL_FILE $events_option "$pfn" > ${fname}_reco_${now}.log 2>&1
    )

    # Subshell exits with exit code of last command
    larExit=$?
    echo "lar exit code $larExit"
    echo "$pfn" > justin-processed-pfns.txt
done

mv SecVtx_CaloHitListU.csv SecVtx_CaloHitListU_${fname}.csv
mv SecVtx_CaloHitListV.csv SecVtx_CaloHitListV_${fname}.csv
mv SecVtx_CaloHitListW.csv SecVtx_CaloHitListW_${fname}.csv

#rm -f SecVtx_CaloHitList_*_*.csv

ls -lRS

# Create compressed tar file with all log files 
tar zcf `echo "$JUSTIN_JOBSUB_ID.logs.tgz" | sed 's/@/_/g'` *.log
exit $larExit
justIN time: 2024-11-17 05:50:37 UTC       justIN version: 01.01.09