Workflow 3377, Stage 1
Priority | 50 |
Processors | 1 |
Wall seconds | 80000 |
RSS bytes | 4193255424 (3999 MiB) |
Max distance for inputs | 0.0 |
Enabled input RSEs |
CERN_PDUNE_EOS, DUNE_CERN_EOS, DUNE_ES_PIC, DUNE_FR_CCIN2P3_DISK, DUNE_IN_TIFR, DUNE_IT_INFN_CNAF, DUNE_UK_GLASGOW, DUNE_UK_LANCASTER_CEPH, DUNE_UK_MANCHESTER_CEPH, DUNE_US_BNL_SDCC, DUNE_US_FNAL_DISK_STAGE, FNAL_DCACHE, FNAL_DCACHE_STAGING, FNAL_DCACHE_TEST, MANCHESTER, MONTECARLO, NIKHEF, PRAGUE, QMUL, RAL-PP, RAL_ECHO, SURFSARA, T3_US_NERSC |
Enabled output RSEs |
CERN_PDUNE_EOS, DUNE_CERN_EOS, DUNE_ES_PIC, DUNE_FR_CCIN2P3_DISK, DUNE_IN_TIFR, DUNE_IT_INFN_CNAF, DUNE_UK_GLASGOW, DUNE_UK_LANCASTER_CEPH, DUNE_UK_MANCHESTER_CEPH, DUNE_US_BNL_SDCC, DUNE_US_FNAL_DISK_STAGE, FNAL_DCACHE, FNAL_DCACHE_STAGING, FNAL_DCACHE_TEST, MANCHESTER, NIKHEF, PRAGUE, QMUL, RAL-PP, RAL_ECHO, SURFSARA, T3_US_NERSC |
Enabled sites |
BR_CBPF, CA_SFU, CA_Victoria, CERN, CH_UNIBE-LHEP, CZ_FZU, ES_CIEMAT, ES_PIC, FR_CCIN2P3, IN_TIFR, IT_CNAF, UK_Bristol, UK_Brunel, UK_Durham, UK_Edinburgh, UK_Imperial, UK_Lancaster, UK_Liverpool, UK_Manchester, UK_Oxford, UK_RAL-Tier1, UK_Sheffield, US_BNL, US_Caltech, US_Colorado, US_FNAL-FermiGrid, US_FNAL-T1, US_Michigan, US_MIT, US_Nebraska, US_NotreDame, US_PuertoRico, US_SU-ITS, US_Swan, US_UChicago, US_UConn-HPC, US_UCSD, US_Wisconsin |
Scope | usertests |
Events for this stage |
Output patterns
| Destination | Pattern | Lifetime | For next stage |
---|
1 | Rucio usertests:fardet-vd-reco_3377 | *reco.root | 864000 | False |
Environment variables
Name | Value |
---|
INPUT_DIR | /cvmfs/fifeuser2.opensciencegrid.org/sw/dune/0d1a13df3faca10e92b85bed1c6933ab2bd9a74b |
File states
Total files | Finding | Unallocated | Allocated | Outputting | Processed | Not found | Failed |
---|
|
0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
Job states
Total | Submitted | Started | Processing | Outputting | Finished | Notused | Aborted | Stalled | Jobscript error | Outputting failed | None processed |
---|
0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
Stats of processed input files as CSV or JSON, and of uploaded output files as CSV or JSON (up to 10000 files included)
Jobscript
#!/bin/bash
#
source /cvmfs/dune.opensciencegrid.org/products/dune/setup_dune.sh
setup metacat
export METACAT_SERVER_URL=https://metacat.fnal.gov:9443/dune_meta_prod/app
export METACAT_AUTH_SERVER_URL=https://metacat.fnal.gov:8143/auth/dune
#Setup recent lar software suite
setup dunesw \
"${DUNE_VERSION:-v09_91_04d00}" \
-q "${DUNE_QUALIFIER:-e26:prof}"
echo "printing env"
if [ -z ${JUSTIN_PROCESSORS} ]; then
JUSTIN_PROCESSORS=1
fi
echo "Justin processors: ${JUSTIN_PROCESSORS}"
export TF_NUM_THREADS=${JUSTIN_PROCESSORS}
export OPENBLAS_NUM_THREADS=${JUSTIN_PROCESSORS}
export JULIA_NUM_THREADS=${JUSTIN_PROCESSORS}
export MKL_NUM_THREADS=${JUSTIN_PROCESSORS}
export NUMEXPR_NUM_THREADS=${JUSTIN_PROCESSORS}
export OMP_NUM_THREADS=${JUSTIN_PROCESSORS}
#env
echo "Will use justin-get-file"
DID_PFN_RSE=`$JUSTIN_PATH/justin-get-file`
if [ "${DID_PFN_RSE}" == "" ] ; then
echo "Could not get file"
exit 0
fi
pfn=`echo ${DID_PFN_RSE} | cut -f2 -d' '`
if [ -z ${LINENUMBER} ] ; then
LINENUMBER=$pfn
fi
# echo "linenumber " $LINENUMBER
ex_code=0
pstep=10
tstep=0
now=$(date -u +"%Y%m%dT%H%M%SZ")
namespace=${JUSTIN_SCOPE:-"usertests"}
# Run Generator
echo "============generator========================="
prodname="prodmarley_nue_cc_flat_radiological_decay0_dunevd10kt_1x8x14_3view_30deg_${now}_gen_${pfn}"
istep=1
tstep=$((tstep+1))
nstep=$(($pstep*$tstep))
tcode=$((nstep+istep))
tcode=$((tcode*1000))
lar -c prodmarley_nue_cc_flat_radiological_decay0_dunevd10kt_1x8x14_3view_30deg.fcl -o ${prodname}.root -n 1
exit_code=$?
ex_code=$((exit_code+tcode))
files=`ls *_${now}_*`
if [ $exit_code -ne 0 ]; then
echo "ERROR: lar (generation) exit code: $ex_code "
echo "output files size: "
for f in $files
do
size=`stat -c %s $f`
echo $f $size
done
return $ex_code
fi
# Stage 1 G4
echo "============G4 stage1========================="
g4stage1_name="${prodname}_supernova_g4stage1"
tstep=$((tstep+1))
nstep=$(($pstep*$tstep))
istep=1
tcode=$((nstep+istep))
tcode=$((tcode*1000))
lar -c supernova_g4stage1_dunevd10kt_1x8x14_3view_30deg.fcl ${prodname}.root -o ${g4stage1_name}.root
exit_code=$?
ex_code=$((exit_code+tcode))
files=`ls *_${now}_*`
if [ $exit_code -ne 0 ]; then
echo "ERROR: lar (geant4 step1) exit code: $ex_code "
echo "output files size: "
for f in $files
do
size=`stat -c %s $f`
echo $f $size
done
return $ex_code
fi
# Stage 2 G4
echo "============G4 stage2========================="
g4stage2_name="${g4stage1_name}_g4stage2"
tstep=$((tstep+1))
nstep=$(($pstep*$tstep))
istep=1
tcode=$((nstep+istep))
tcode=$((tcode*1000))
lar -c standard_g4stage2_dunevd10kt_1x8x14_3view_30deg.fcl ${g4stage1_name}.root -o ${g4stage2_name}.root
exit_code=$?
ex_code=$((exit_code+tcode))
files=`ls *_${now}_*`
if [ $exit_code -ne 0 ]; then
echo "ERROR: lar (geant4 step2) exit code: $ex_code "
echo "output files size: "
for f in $files
do
size=`stat -c %s $f`
echo $f $size
done
return $ex_code
fi
# Detsim
echo "============detsim========================="
detsim_name="${g4stage2_name}_detsim"
tstep=$((tstep+1))
nstep=$(($pstep*$tstep))
istep=1
tcode=$((nstep+istep))
tcode=$((tcode*1000))
lar -c standard_detsim_dunevd10kt_1x8x14_3view_30deg.fcl ${g4stage2_name}.root -o ${detsim_name}.root
exit_code=$?
ex_code=$((exit_code+tcode))
files=`ls *_${now}_*`
if [ $exit_code -ne 0 ]; then
echo "ERROR: lar (detsim) exit code: $ex_code "
echo "output files size: "
for f in $files
do
size=`stat -c %s $f`
echo $f $size
done
return $ex_code
fi
# Reco
echo "============reco ========================="
reco_name="${detsim_name}_reco"
tstep=$((tstep+1))
nstep=$(($pstep*$tstep))
istep=1
tcode=$((nstep+istep))
tcode=$((tcode*1000))
lar -c standard_reco1_dunevd10kt_1x8x14_3view_30deg.fcl ${detsim_name}.root -o ${reco_name}.root
exit_code=$?
ex_code=$((exit_code+tcode))
files=`ls *_${now}_*`
if [ $exit_code -ne 0 ]; then
echo "ERROR: lar (reco) exit code: $ex_code "
echo "output files size: "
for f in $files
do
size=`stat -c %s $f`
echo $f $size
done
return $ex_code
fi
istep=$((istep+1))
tcode=$((nstep+istep))
tcode=$((tcode*1000))
extractor_prod.py --infile ${reco_name}.root --no_crc --appfamily art --appname reco --appversion v09_91_04d00 --requestid ritm2205749 --strip_parents --input_json ${INPUT_DIR}/le_input.json> ${reco_name}.root.ext.json && sed -i -e 's/stepfcl/standard_reco1_dunevd10kt_1x8x14_3view_30deg.fcl/g' ${reco_name}.root.ext.json
exit_code=$?
ex_code=$((exit_code+tcode))
files=`ls *_${now}_*`
if [ $exit_code -gt 1 ]; then
echo "ERROR: metadata generation $ex_code "
echo "output files size: "
for f in $files
do
size=`stat -c %s $f`
echo $f $size
done
return $ex_code
fi
rm -fr all-input-dids.txt
echo "noparents:noparents.root" > all-input-dids.txt
python ${INPUT_DIR}/pdjson2metadata ${reco_name}.root.ext.json all-input-dids.txt usertests > ${reco_name}.root.json
exit_code=$?
ex_code=$((exit_code+tcode))
files=`ls *_${now}_*`
if [ $exit_code -ne 0 ]; then
echo "ERROR: metadata writing $ex_code "
echo "output files size: "
for f in $files
do
size=`stat -c %s $f`
echo $f $size
done
return $ex_code
fi
if [ $? -ne 0 ]
then
echo "Exiting with error"
return 1
else
files=`ls *_${now}_*`
for f in $files
do
size=`stat -c %s $f`
echo "written output file: $f $size"
done
echo "$pfn" > justin-processed-pfns.txt
fi