Workflow 1679, Stage 1
Priority | 50 |
Processors | 1 |
Wall seconds | 80000 |
RSS bytes | 5767168000 (5500 MiB) |
Max distance for inputs | 100.0 |
Enabled input RSEs |
|
Enabled output RSEs |
|
Enabled sites |
|
Scope | usertests |
Events for this stage |
Output patterns
| Destination | Pattern | Lifetime | For next stage |
---|
0 | Rucio usertests:pdhd_1gev_beam_cosmics_test_sce_e500_pndr_06 | *sce_E500*.pndr | 86400 | False |
0 | Rucio usertests:pdhd_1gev_beam_cosmics_test_sce_e500_reco_06 | *sce_E500*reco.root | 86400 | False |
0 | Rucio usertests:pdhd_1gev_beam_cosmics_test_sce_off_pndr_06 | *sce_off*.pndr | 86400 | False |
0 | Rucio usertests:pdhd_1gev_beam_cosmics_test_sce_off_reco_06 | *sce_off*reco.root | 86400 | False |
Environment variables
Name | Value |
---|
INPUT_DIR | /cvmfs/fifeuser1.opensciencegrid.org/sw/dune/ce118761db32e8ea5459948798ec92e12aaed034 |
NEVENTS | 1 |
STARTLINE | 100 |
File states
Total files | Finding | Unallocated | Allocated | Outputting | Processed | Not found | Failed |
---|
|
20 | 0 | 0 | 0 | 0 | 20 | 0 | 0 |
Job states
Total | Submitted | Started | Processing | Outputting | Finished | Notused | Aborted | Stalled | Jobscript error | Outputting failed | None processed |
---|
41 | 0 | 0 | 0 | 0 | 36 | 0 | 4 | 1 | 0 | 0 | 0 |
RSEs used
Name | Inputs | Outputs |
---|
MONTECARLO | 25 | 0 |
RAL_ECHO | 0 | 16 |
RAL-PP | 0 | 16 |
NIKHEF | 0 | 16 |
DUNE_CERN_EOS | 0 | 12 |
QMUL | 0 | 8 |
DUNE_US_FNAL_DISK_STAGE | 0 | 8 |
DUNE_US_BNL_SDCC | 0 | 4 |
Stats of processed input files as CSV or JSON, and of uploaded output files as CSV or JSON (up to 10000 files included)
File reset events, by site
Site | Allocated | Outputting |
---|
US_UConn-HPC | 1 | 0 |
UK_Brunel | 1 | 0 |
NL_NIKHEF | 1 | 0 |
UK_Lancaster | 1 | 0 |
BR_CBPF | 1 | 0 |
Jobscript
#!/bin/bash
#
source /cvmfs/dune.opensciencegrid.org/products/dune/setup_dune.sh
#Setup recent lar software suite
DUNE_VERSION=${DUNE_VERSION:-v09_85_00d00}
setup dunesw \
"${DUNE_VERSION}" \
-q "${DUNE_QUALIFIER:-e26:prof}"
if [ $? -ne 0 ]; then
echo "Failed to setup dunesw $DUNE_VERSION $DUNE_QUALIFIER"
fi
echo "DUNESW loc:"
ups active | grep dunesw
if [ -z ${JUSTIN_PROCESSORS} ]; then
JUSTIN_PROCESSORS=1
fi
echo "Justin processors: ${JUSTIN_PROCESSORS}"
export TF_NUM_THREADS=${JUSTIN_PROCESSORS}
export OPENBLAS_NUM_THREADS=${JUSTIN_PROCESSORS}
export JULIA_NUM_THREADS=${JUSTIN_PROCESSORS}
export MKL_NUM_THREADS=${JUSTIN_PROCESSORS}
export NUMEXPR_NUM_THREADS=${JUSTIN_PROCESSORS}
export OMP_NUM_THREADS=${JUSTIN_PROCESSORS}
echo "printing env"
env
echo "Will use justin-get-file"
DID_PFN_RSE=`$JUSTIN_PATH/justin-get-file`
if [ "${DID_PFN_RSE}" == "" ] ; then
echo "Could not get file"
exit 0
fi
pfn=`echo ${DID_PFN_RSE} | cut -f2 -d' '`
if [ -z ${LINENUMBER} ] ; then
LINENUMBER=$pfn
fi
STARTLINE=${STARTLINE:-0}
echo "Adding ${STARTLINE} to linenumber ${LINENUMBER}"
LINENUMBER=$(( 10#$LINENUMBER + $STARTLINE ))
echo $LINENUMBER
# Get the file number and iteration within the file that we want to use
get_nums=true
line=`sed -n "${LINENUMBER}p" ${INPUT_DIR}/all_h4_prod_nums.txt` || get_nums=false
echo "using ${LINENUMBER} from input file"
echo ${line}
split=($line)
iter=${split[1]} || get_nums=false
filenum=${split[0]} || get_nums=false
nevents=${split[2]} || get_nums=false
eventnum=$[ iter * 10 ] || get_nums=false
#run=$((10**6 + 1)) || get_nums=false
#subrun=$(($run*10**5 + $filenum)) || get_nums=false
if [ $get_nums = false ]; then
echo "Failed to get numbers from input file"
exit 1
fi
echo "File: ${filenum}"
echo "Iter: ${iter}"
echo "eventum: ${eventnum}"
echo "subrun: ${subrun}"
echo "jobsub_id: ${JUSTIN_JOBSUB_ID:-1}"
run=`echo "${JUSTIN_JOBSUB_ID:-1}" | awk -F '.' '{print $1}'`
echo "run: $run"
#-e "s/StartEvent: 0/StartEvent: ${iter}/" \
#-e "s/StartEvent: 1/StartEvent: ${eventnum}/" \
sed ${INPUT_DIR}/pdhd_prod.fcl \
-e "s/_1.root/_${filenum}.root/" \
-e "s/firstRun: 1/firstRun: ${run}/" \
-e "s/firstSubRun: 1/firstSubRun: ${filenum}/" \
-e "s/firstEvent: 1/firstEvent: ${eventnum}/" > prod.fcl
if [ $? -ne 0 ]; then
echo "Could not edit pdhd_prod.fcl numbers"
exit 1
fi
echo "prod.fcl"
cat prod.fcl
nevents=${NEVENTS:-$nevents} #Set from env override or from above
echo "nevents: $nevents"
# Run Generator
echo "Running prod"
now=$(date -u +"%Y%m%dT%H%M%SZ")
prodname="prod_beam_p1GeV_cosmics_protodunehd_${now}"
lar -c prod.fcl \
-o ${prodname}.root \
-n ${nevents} >prod.log 2>&1
prodExit=$?
if [ $prodExit -ne 0 ]; then
echo "Error in prod"
tail -100 prod.log
exit
fi
# Stage 1 G4
echo "Running g4stage1"
g4stage1_name="${prodname}_g4_stage1"
g4start=`date +"%s"`.0
lar -c standard_g4_protodunehd_stage1.fcl \
${prodname}.root \
-o ${g4stage1_name}.root >g4stage1.log 2>&1
g4stage1Exit=$?
if [ $g4stage1Exit -ne 0 ]; then
echo "Error in g4stage1"
tail -100 g4stage1.log
exit
fi
g4end=`date +"%s"`.0
# Make metadata
#overrides="dune_mc.h4_input_file=H4_v34b_1GeV_-27.7_10M_${filenum}.root \
# core.start_time=${g4start} \
# core.end_time=${g4end} \
#"
#python ${INPUT_DIR}/pdhd_meta_writer.py \
# --json ${INPUT_DIR}/pdhd_base_meta.json \
# --overrides ${overrides} \
# --run ${filenum} \
# --event ${eventnum} \
# --nevents ${nevents} \
# -o ${g4stage1_name}.root.json
#if [ $? -ne 0 ]; then
# echo "Error writing g4stage1 json"
# exit 1
#fi
if [ -z ${NO_SCE_OFF} ]; then
# Stage 2 G4 -- No SCE
echo "Running g4stage2"
g4stage2_name="${g4stage1_name}_g4_stage2_sce_off"
lar -c standard_g4_protodunehd_stage2.fcl \
${g4stage1_name}.root \
-o ${g4stage2_name}.root >g4stage2.log 2>&1
g4stage2Exit=$?
if [ $g4stage2Exit -ne 0 ]; then
echo "Error in g4stage2"
tail -100 g4stage2.log
exit
fi
# Detsim
echo "Running detsim"
detsim_name="${g4stage2_name}_detsim"
lar -c standard_detsim_protodunehd.fcl \
${g4stage2_name}.root \
-o ${detsim_name}.root >detsim.log 2>&1
detsimExit=$?
if [ $detsimExit -ne 0 ]; then
echo "Error in detsim"
tail -100 detsim.log
exit
fi
# Reco
echo "Running reco"
reco_name="${detsim_name}_reco"
recostart=`date +"%s"`.0
lar -c standard_reco_protodunehd.fcl \
${detsim_name}.root \
-o ${reco_name}.root >reco.log 2>&1
recoExit=$?
if [ $recoExit -ne 0 ]; then
echo "Error in reco"
tail -100 reco.log
exit
fi
recoend=`date +"%s"`.0
# Make metadata
overrides="core.data_tier=full-reconstructed \
core.application.version=${DUNE_VERSION} \
dune.config_file=standard_reco_protodunehd.fcl \
core.start_time=${recostart} \
core.end_time=${recoend} \
core.application.name=reco \
core.application=art.reco \
dune_mc.h4_input_file=H4_v34b_1GeV_-27.7_10M_${filenum}.root \
"
namespace=${JUSTIN_SCOPE:-"usertests"}
#-- ${filenum} \
python ${INPUT_DIR}/pdhd_meta_writer.py \
--json ${INPUT_DIR}/pdhd_base_meta.json \
--overrides ${overrides} \
--event ${eventnum} \
--nevents ${nevents} \
--filenum ${filenum} \
--jobid ${JUSTIN_JOBSUB_ID} \
--past_fcls prod_beam_cosmics_1GeV_protodunehd.fcl \
standard_g4_protodunehd_stage1.fcl \
standard_g4_protodunehd_stage2.fcl \
standard_detsim_protodunehd.fcl \
--past_apps prod g4_stage1 g4_stage2 detsim \
-o ${reco_name}.root.json
if [ $? -ne 0 ]; then
echo "Error writing reco json"
exit 1
fi
## TODO -- CHECK WITH LEIGH IF WE NEED EVERY PNDR FILE
mv Pandora_Events.pndr ${reco_name}_Pandora_Events.pndr
if [ $? -ne 0 ]; then
echo "Error mving/renaming pndr file"
exit 1
fi
## Write-out PNDR metadata
## Copy from reco json but change data tier and file format
overrides="core.data_tier=pandora_info \
core.file_format=binary \
"
python ${INPUT_DIR}/pdhd_meta_writer.py \
--json ${reco_name}.root.json \
--overrides ${overrides} \
--filenum ${filenum} \
--event ${eventnum} \
--nevents ${nevents} \
--jobid ${JUSTIN_JOBSUB_ID} \
-o ${reco_name}_Pandora_Events.pndr.json
fi
# Stage 2 G4 -- With SCE
if [ -z ${NO_SCE_ON} ]; then
echo "Running g4stage2 sce on"
g4stage2_name="${g4stage1_name}_g4_stage2_sce_E500"
lar -c standard_g4_protodunehd_stage2_sce_E500.fcl \
${g4stage1_name}.root \
-o ${g4stage2_name}.root >g4stage2.log 2>&1
g4stage2Exit=$?
if [ $g4stage2Exit -ne 0 ]; then
echo "Error in g4stage2"
tail -100 g4stage2.log
exit
fi
# Detsim
echo "Running detsim sce on"
detsim_name="${g4stage2_name}_detsim"
lar -c standard_detsim_protodunehd.fcl \
${g4stage2_name}.root \
-o ${detsim_name}.root >detsim.log 2>&1
detsimExit=$?
if [ $detsimExit -ne 0 ]; then
echo "Error in detsim"
tail -100 detsim.log
exit
fi
# Reco -- With SCE Corrections
##TODO -- MAKE SURE TO REPLACE WITH THE SCE ON ONE
echo "Running reco sce on"
reco_name="${detsim_name}_reco"
#reco_sce_fcl=${INPUT_DIR}/test_sce_E500.fcl
reco_sce_fcl=standard_reco_protodunehd_sce_E500.fcl
recostart=`date +"%s"`.0
lar -c $reco_sce_fcl \
${detsim_name}.root \
-o ${reco_name}.root >reco.log 2>&1
recoExit=$?
if [ $recoExit -ne 0 ]; then
echo "Error in reco"
tail -100 reco.log
exit
fi
recoend=`date +"%s"`.0
# Make metadata
overrides="core.data_tier=full-reconstructed \
core.application.version=${DUNE_VERSION} \
dune.config_file=${reco_sce_fcl} \
core.start_time=${recostart} \
core.end_time=${recoend} \
core.application.name=reco \
core.application=art.reco \
dune_mc.space_charge=yes \
dune_mc.h4_input_file=H4_v34b_1GeV_-27.7_10M_${filenum}.root \
"
namespace=${JUSTIN_SCOPE:-"usertests"}
python ${INPUT_DIR}/pdhd_meta_writer.py \
--json ${INPUT_DIR}/pdhd_base_meta.json \
--overrides ${overrides} \
--filenum ${filenum} \
--event ${eventnum} \
--nevents ${nevents} \
--jobid ${JUSTIN_JOBSUB_ID} \
--past_fcls prod_beam_cosmics_1GeV_protodunehd.fcl \
standard_g4_protodunehd_stage1.fcl \
standard_g4_protodunehd_stage2_sce_E500.fcl \
standard_detsim_protodunehd.fcl \
--past_apps prod g4_stage1 g4_stage2 detsim \
-o ${reco_name}.root.json
#--parent "${namespace}:${g4stage1_name}.root" \
if [ $? -ne 0 ]; then
echo "Error writing reco json"
exit 1
fi
## TODO -- CHECK WITH LEIGH IF WE NEED EVERY PNDR FILE
mv Pandora_Events.pndr ${reco_name}_Pandora_Events.pndr
if [ $? -ne 0 ]; then
echo "Error mving/renaming pndr file"
exit 1
fi
## Write-out PNDR metadata
## Copy from reco json but change data tier and file format
overrides="core.data_tier=pandora_info \
core.file_format=binary \
"
python ${INPUT_DIR}/pdhd_meta_writer.py \
--json ${reco_name}.root.json \
--overrides ${overrides} \
--filenum ${filenum} \
--event ${eventnum} \
--nevents ${nevents} \
--jobid ${JUSTIN_JOBSUB_ID} \
-o ${reco_name}_Pandora_Events.pndr.json
fi
echo "$pfn" > justin-processed-pfns.txt