Workflow 1764, Stage 1
Priority | 50 |
Processors | 1 |
Wall seconds | 21000 |
RSS bytes | 5767168000 (5500 MiB) |
Max distance for inputs | 100.0 |
Enabled input RSEs |
|
Enabled output RSEs |
|
Enabled sites |
|
Scope | hd-protodune |
Events for this stage |
Output patterns
| Destination | Pattern | Lifetime | For next stage |
---|
0 | Rucio hd-protodune:pdhd_1gev_beam_cosmics_official_sce_e500_reco_limit5000_skip18000_1764 | *sce_E500*reco.root | 7776000 | False |
0 | Rucio hd-protodune:pdhd_1gev_beam_cosmics_official_sce_off_reco_limit5000_skip18000_1764 | *sce_off*reco.root | 7776000 | False |
0 | Rucio hd-protodune:pdhd_1gev_beam_cosmics_official_sce_e500_pandora_limit5000_skip18000_1764 | *sce_E500*.pndr | 7776000 | False |
0 | Rucio hd-protodune:pdhd_1gev_beam_cosmics_official_sce_off_pandora_limit5000_skip18000_1764 | *sce_off*.pndr | 7776000 | False |
Environment variables
Name | Value |
---|
INPUT_DIR | /cvmfs/fifeuser2.opensciencegrid.org/sw/dune/dced620ad7bd201a97f7ad5df759f50927786057 |
STARTLINE | 18000 |
File states
Total files | Finding | Unallocated | Allocated | Outputting | Processed | Not found | Failed |
---|
|
5000 | 0 | 0 | 1 | 0 | 4243 | 0 | 756 |
Job states
Total | Submitted | Started | Processing | Outputting | Finished | Notused | Aborted | Stalled | Jobscript error | Outputting failed | None processed |
---|
20054 | 0 | 0 | 0 | 0 | 4381 | 373 | 19 | 7709 | 7572 | 0 | 0 |
RSEs used
Name | Inputs | Outputs |
---|
MONTECARLO | 14993 | 0 |
SURFSARA | 0 | 3182 |
DUNE_CERN_EOS | 0 | 2714 |
DUNE_US_BNL_SDCC | 0 | 2196 |
DUNE_US_FNAL_DISK_STAGE | 0 | 1996 |
RAL-PP | 0 | 1570 |
QMUL | 0 | 1334 |
RAL_ECHO | 0 | 1192 |
DUNE_FR_CCIN2P3_DISK | 0 | 990 |
PRAGUE | 0 | 854 |
NIKHEF | 0 | 506 |
DUNE_ES_PIC | 0 | 430 |
Stats of processed input files as CSV or JSON, and of uploaded output files as CSV or JSON (up to 10000 files included)
File reset events, by site
Site | Allocated | Outputting |
---|
IT_CNAF | 613 | 1 |
NL_SURFsara | 554 | 0 |
CERN | 531 | 0 |
US_NotreDame | 236 | 0 |
CA_Victoria | 208 | 0 |
US_BNL | 153 | 0 |
FR_CCIN2P3 | 88 | 0 |
UK_RAL-PPD | 75 | 0 |
UK_Brunel | 69 | 0 |
US_UChicago | 55 | 0 |
UK_Manchester | 52 | 0 |
US_FNAL-FermiGrid | 48 | 1 |
CZ_FZU | 46 | 0 |
BR_CBPF | 38 | 0 |
UK_QMUL | 36 | 0 |
UK_Bristol | 25 | 0 |
US_Colorado | 19 | 0 |
CA_SFU | 18 | 0 |
UK_Lancaster | 15 | 0 |
NL_NIKHEF | 14 | 0 |
UK_Edinburgh | 13 | 0 |
US_FNAL-T1 | 13 | 0 |
US_UCSD | 12 | 0 |
UK_Liverpool | 11 | 0 |
UK_Oxford | 10 | 0 |
US_UConn-HPC | 10 | 0 |
UK_Sheffield | 10 | 0 |
UK_Imperial | 7 | 0 |
US_PuertoRico | 2 | 0 |
Jobscript
#!/bin/bash
#
source /cvmfs/dune.opensciencegrid.org/products/dune/setup_dune.sh
#Setup recent lar software suite
DUNE_VERSION=${DUNE_VERSION:-v09_85_00d00}
setup dunesw \
"${DUNE_VERSION}" \
-q "${DUNE_QUALIFIER:-e26:prof}"
if [ $? -ne 0 ]; then
echo "Failed to setup dunesw $DUNE_VERSION $DUNE_QUALIFIER"
fi
echo "DUNESW loc:"
ups active | grep dunesw
if [ -z ${JUSTIN_PROCESSORS} ]; then
JUSTIN_PROCESSORS=1
fi
echo "Justin processors: ${JUSTIN_PROCESSORS}"
export TF_NUM_THREADS=${JUSTIN_PROCESSORS}
export OPENBLAS_NUM_THREADS=${JUSTIN_PROCESSORS}
export JULIA_NUM_THREADS=${JUSTIN_PROCESSORS}
export MKL_NUM_THREADS=${JUSTIN_PROCESSORS}
export NUMEXPR_NUM_THREADS=${JUSTIN_PROCESSORS}
export OMP_NUM_THREADS=${JUSTIN_PROCESSORS}
echo "printing env"
env
echo "Will use justin-get-file"
DID_PFN_RSE=`$JUSTIN_PATH/justin-get-file`
if [ "${DID_PFN_RSE}" == "" ] ; then
echo "Could not get file"
exit 0
fi
pfn=`echo ${DID_PFN_RSE} | cut -f2 -d' '`
if [ -z ${LINENUMBER} ] ; then
LINENUMBER=$pfn
fi
STARTLINE=${STARTLINE:-0}
echo "Adding ${STARTLINE} to linenumber ${LINENUMBER}"
LINENUMBER=$(( 10#$LINENUMBER + $STARTLINE ))
echo $LINENUMBER
# Get the file number and iteration within the file that we want to use
get_nums=true
line=`sed -n "${LINENUMBER}p" ${INPUT_DIR}/all_h4_prod_nums.txt` || get_nums=false
echo "using ${LINENUMBER} from input file"
echo ${line}
split=($line)
iter=${split[1]} || get_nums=false
filenum=${split[0]} || get_nums=false
nevents=${split[2]} || get_nums=false
#art Event counting is 1-indexed,
#but need 0-indexed for particle vector
#so add 1 here -- 1 is subtracted in module
eventnum=$[ iter * 10 + 1 ] || get_nums=false
#run=$((10**6 + 1)) || get_nums=false
#subrun=$(($run*10**5 + $filenum)) || get_nums=false
if [ $get_nums = false ]; then
echo "Failed to get numbers from input file"
exit 1
fi
echo "File: ${filenum}"
echo "Iter: ${iter}"
echo "eventum: ${eventnum}"
echo "subrun: ${subrun}"
echo "jobsub_id: ${JUSTIN_JOBSUB_ID:-1}"
run=`echo "${JUSTIN_JOBSUB_ID:-1}" | awk -F '.' '{print $1}'`
echo "run: $run"
#-e "s/StartEvent: 0/StartEvent: ${iter}/" \
#-e "s/StartEvent: 1/StartEvent: ${eventnum}/" \
sed ${INPUT_DIR}/pdhd_prod.fcl \
-e "s/_1.root/_${filenum}.root/" \
-e "s/firstRun: 1/firstRun: ${run}/" \
-e "s/firstSubRun: 1/firstSubRun: ${filenum}/" \
-e "s/firstEvent: 1/firstEvent: ${eventnum}/" > prod.fcl
if [ $? -ne 0 ]; then
echo "Could not edit pdhd_prod.fcl numbers"
exit 1
fi
echo "prod.fcl"
cat prod.fcl
nevents=${NEVENTS:-$nevents} #Set from env override or from above
echo "nevents: $nevents"
if [ $nevents -eq 0 ]; then
echo "No events in this line -- exiting safely"
echo "$pfn" > justin-processed-pfns.txt
exit
fi
# Run Generator
echo "Running prod"
now=$(date -u +"%Y%m%dT%H%M%SZ")
prodname="prod_beam_p1GeV_cosmics_protodunehd_${now}_${run}_${pfn}"
lar -c prod.fcl \
-o ${prodname}.root \
-n ${nevents} >prod.log 2>&1
prodExit=$?
if [ $prodExit -ne 0 ]; then
echo "Error in prod"
tail -100 prod.log
exit $prodExit
fi
# Stage 1 G4
echo "Running g4stage1"
g4stage1_name="${prodname}_g4_stage1"
g4start=`date +"%s"`.0
lar -c standard_g4_protodunehd_stage1.fcl \
${prodname}.root \
-o ${g4stage1_name}.root >g4stage1.log 2>&1
g4stage1Exit=$?
if [ $g4stage1Exit -ne 0 ]; then
echo "Error in g4stage1"
tail -100 g4stage1.log
exit $g4stage1Exit
fi
g4end=`date +"%s"`.0
# Make metadata
#overrides="dune_mc.h4_input_file=H4_v34b_1GeV_-27.7_10M_${filenum}.root \
# core.start_time=${g4start} \
# core.end_time=${g4end} \
#"
#python ${INPUT_DIR}/pdhd_meta_writer.py \
# --json ${INPUT_DIR}/pdhd_base_meta.json \
# --overrides ${overrides} \
# --run ${filenum} \
# --event ${eventnum} \
# --nevents ${nevents} \
# -o ${g4stage1_name}.root.json
#if [ $? -ne 0 ]; then
# echo "Error writing g4stage1 json"
# exit 1
#fi
if [ -z ${NO_SCE_OFF} ]; then
# Stage 2 G4 -- No SCE
echo "Running g4stage2"
g4stage2_name="${g4stage1_name}_g4_stage2_sce_off"
lar -c standard_g4_protodunehd_stage2.fcl \
${g4stage1_name}.root \
-o ${g4stage2_name}.root >g4stage2.log 2>&1
g4stage2Exit=$?
if [ $g4stage2Exit -ne 0 ]; then
echo "Error in g4stage2"
tail -100 g4stage2.log
exit $g4stage2Exit
fi
# Detsim
echo "Running detsim"
detsim_name="${g4stage2_name}_detsim"
lar -c standard_detsim_protodunehd.fcl \
${g4stage2_name}.root \
-o ${detsim_name}.root >detsim.log 2>&1
detsimExit=$?
if [ $detsimExit -ne 0 ]; then
echo "Error in detsim"
tail -100 detsim.log
exit $detsimExit
fi
# Reco
echo "Running reco"
reco_name="${detsim_name}_reco"
recostart=`date +"%s"`.0
lar -c standard_reco_protodunehd.fcl \
${detsim_name}.root \
-o ${reco_name}.root >reco.log 2>&1
recoExit=$?
if [ $recoExit -ne 0 ]; then
echo "Error in reco"
tail -100 reco.log
exit $recoExit
fi
recoend=`date +"%s"`.0
# Make metadata
overrides="core.data_tier=full-reconstructed \
core.application.version=${DUNE_VERSION} \
dune.config_file=standard_reco_protodunehd.fcl \
core.start_time=${recostart} \
core.end_time=${recoend} \
core.application.name=reco \
core.application=art.reco \
dune_mc.h4_input_file=H4_v34b_1GeV_-27.7_10M_${filenum}.root \
"
namespace=${JUSTIN_SCOPE:-"usertests"}
#-- ${filenum} \
python ${INPUT_DIR}/pdhd_meta_writer.py \
--json ${INPUT_DIR}/pdhd_base_meta.json \
--overrides ${overrides} \
--event ${eventnum} \
--nevents ${nevents} \
--filenum ${filenum} \
--jobid ${JUSTIN_JOBSUB_ID} \
--past_fcls prod_beam_cosmics_1GeV_protodunehd.fcl \
standard_g4_protodunehd_stage1.fcl \
standard_g4_protodunehd_stage2.fcl \
standard_detsim_protodunehd.fcl \
--past_apps gen g4_stage1 g4_stage2 detsim \
-o ${reco_name}.root.json
if [ $? -ne 0 ]; then
echo "Error writing reco json"
exit 1
fi
## TODO -- CHECK WITH LEIGH IF WE NEED EVERY PNDR FILE
mv Pandora_Events.pndr ${reco_name}_Pandora_Events.pndr
if [ $? -ne 0 ]; then
echo "Error mving/renaming pndr file"
exit 1
fi
## Write-out PNDR metadata
## Copy from reco json but change data tier and file format
overrides="core.data_tier=pandora_info \
core.file_format=binary \
"
python ${INPUT_DIR}/pdhd_meta_writer.py \
--json ${reco_name}.root.json \
--overrides ${overrides} \
--filenum ${filenum} \
--event ${eventnum} \
--nevents ${nevents} \
--jobid ${JUSTIN_JOBSUB_ID} \
-o ${reco_name}_Pandora_Events.pndr.json
echo "All logs:"
echo "-----------G4 STAGE2------------"
cat g4stage2.log
echo "--------------------------------"
echo "-----------DETSIM---------------"
cat detsim.log
echo "--------------------------------"
echo "-----------RECO-----------------"
cat reco.log
echo "--------------------------------"
fi
# Stage 2 G4 -- With SCE
if [ -z ${NO_SCE_ON} ]; then
echo "Running g4stage2 sce on"
g4stage2_name="${g4stage1_name}_g4_stage2_sce_E500"
lar -c standard_g4_protodunehd_stage2_sce_E500.fcl \
${g4stage1_name}.root \
-o ${g4stage2_name}.root >g4stage2.log 2>&1
g4stage2Exit=$?
if [ $g4stage2Exit -ne 0 ]; then
echo "Error in g4stage2"
tail -100 g4stage2.log
exit $g4stage2Exit
fi
# Detsim
echo "Running detsim sce on"
detsim_name="${g4stage2_name}_detsim"
lar -c standard_detsim_protodunehd.fcl \
${g4stage2_name}.root \
-o ${detsim_name}.root >detsim.log 2>&1
detsimExit=$?
if [ $detsimExit -ne 0 ]; then
echo "Error in detsim"
tail -100 detsim.log
exit $detsimExit
fi
# Reco -- With SCE Corrections
##TODO -- MAKE SURE TO REPLACE WITH THE SCE ON ONE
echo "Running reco sce on"
reco_name="${detsim_name}_reco"
#reco_sce_fcl=${INPUT_DIR}/test_sce_E500.fcl
reco_sce_fcl=standard_reco_protodunehd_sce_E500.fcl
recostart=`date +"%s"`.0
lar -c $reco_sce_fcl \
${detsim_name}.root \
-o ${reco_name}.root >reco.log 2>&1
recoExit=$?
if [ $recoExit -ne 0 ]; then
echo "Error in reco"
tail -100 reco.log
exit $recoExit
fi
recoend=`date +"%s"`.0
# Make metadata
overrides="core.data_tier=full-reconstructed \
core.application.version=${DUNE_VERSION} \
dune.config_file=${reco_sce_fcl} \
core.start_time=${recostart} \
core.end_time=${recoend} \
core.application.name=reco \
core.application=art.reco \
dune_mc.space_charge=yes \
dune_mc.h4_input_file=H4_v34b_1GeV_-27.7_10M_${filenum}.root \
"
namespace=${JUSTIN_SCOPE:-"usertests"}
python ${INPUT_DIR}/pdhd_meta_writer.py \
--json ${INPUT_DIR}/pdhd_base_meta.json \
--overrides ${overrides} \
--filenum ${filenum} \
--event ${eventnum} \
--nevents ${nevents} \
--jobid ${JUSTIN_JOBSUB_ID} \
--past_fcls prod_beam_cosmics_1GeV_protodunehd.fcl \
standard_g4_protodunehd_stage1.fcl \
standard_g4_protodunehd_stage2_sce_E500.fcl \
standard_detsim_protodunehd.fcl \
--past_apps gen g4_stage1 g4_stage2 detsim \
-o ${reco_name}.root.json
#--parent "${namespace}:${g4stage1_name}.root" \
if [ $? -ne 0 ]; then
echo "Error writing reco json"
exit 1
fi
## TODO -- CHECK WITH LEIGH IF WE NEED EVERY PNDR FILE
mv Pandora_Events.pndr ${reco_name}_Pandora_Events.pndr
if [ $? -ne 0 ]; then
echo "Error mving/renaming pndr file"
exit 1
fi
## Write-out PNDR metadata
## Copy from reco json but change data tier and file format
overrides="core.data_tier=pandora_info \
core.file_format=binary \
"
python ${INPUT_DIR}/pdhd_meta_writer.py \
--json ${reco_name}.root.json \
--overrides ${overrides} \
--filenum ${filenum} \
--event ${eventnum} \
--nevents ${nevents} \
--jobid ${JUSTIN_JOBSUB_ID} \
-o ${reco_name}_Pandora_Events.pndr.json
echo "All logs:"
echo "-----------G4 STAGE2------------"
cat g4stage2.log
echo "--------------------------------"
echo "-----------DETSIM---------------"
cat detsim.log
echo "--------------------------------"
echo "-----------RECO-----------------"
cat reco.log
echo "--------------------------------"
fi
echo "$pfn" > justin-processed-pfns.txt