Stats of processed input files as CSV or JSON, and of uploaded output files as CSV or JSON (up to 10000 files included)
File reset events, by site
Site
Allocated
Outputting
CERN
20
0
UK_Manchester
11
0
UK_RAL-PPD
5
0
ES_PIC
5
0
UK_QMUL
2
0
UK_Oxford
1
0
UK_RAL-Tier1
1
0
Jobscript
#!/bin/bash
:<<'EOF'
This jobscript generates CaloHitList-based graph data
from input reco2 ROOT files using your custom LArSoft setup.
Required environment variables:
- FCL_FILE
- CODE_TAR_DIR_LOCAL
- DUNE_VERSION
- DUNE_QUALIFIER
- XML_MASTER
- XML_NEUTRINO
- NUM_EVENTS (optional)
EOF
# === Setup FCL and version info ===
FCL_FILE=${FCL_FILE:-atm-training-extract.fcl}
DUNE_VERSION=${DUNE_VERSION:-v10_04_06d00}
DUNE_QUALIFIER=${DUNE_QUALIFIER:-e26:prof}
# === Number of events option ===
if [ -n "$NUM_EVENTS" ]; then
events_option="-n $NUM_EVENTS"
fi
# === Get a file from justIN ===
did_pfn_rse=$($JUSTIN_PATH/justin-get-file)
if [ -z "$did_pfn_rse" ]; then
echo "No file assigned. Exiting jobscript."
exit 0
fi
# === Track input DID for MetaCat ===
echo "$did_pfn_rse" | cut -f1 -d' ' >> all-input-dids.txt
# === Parse PFN from DID ===
pfn=$(echo "$did_pfn_rse" | cut -d' ' -f2)
echo "Input PFN = $pfn"
# === Setup DUNE software ===
source /cvmfs/dune.opensciencegrid.org/products/dune/setup_dune.sh
setup dunesw "$DUNE_VERSION" -q "$DUNE_QUALIFIER"
# === Mirror CODE_TAR_DIR_LOCAL ===
INPUT_TAR_DIR_LOCAL="$CODE_TAR_DIR_LOCAL"
echo "INPUT_TAR_DIR_LOCAL = $INPUT_TAR_DIR_LOCAL"
# === Setup custom code ===
if [ -n "$CODE_TAR_DIR_LOCAL" ]; then
echo "Using local products from $CODE_TAR_DIR_LOCAL"
source "$CODE_TAR_DIR_LOCAL/larsoft_graph_V1_2025/localProducts_larsoft_v10_04_06_e26_prof/setup-grid"
mrbslp
fi
# === Generate common timestamp and random suffix for output renaming ===
timestamp=$(date -u +"%Y-%m-%dT_%H%M%SZ")
rand_suffix=$((1 + RANDOM % 10))
# === Output file naming ===
fname=$(basename "$pfn" .root)
outFile="${fname}_graph_${timestamp}.root"
logFile="${fname}_graph_${timestamp}.log"
# === Set FW search path ===
XML_DIR_MASTER=$(dirname "$XML_MASTER")
XML_DIR_NEUTRINO=$(dirname "$XML_NEUTRINO")
export FW_SEARCH_PATH="$XML_DIR_MASTER:$XML_DIR_NEUTRINO:$FW_SEARCH_PATH"
# === Run lar (primary) ===
export LD_PRELOAD=${XROOTD_LIB}/libXrdPosixPreload.so
echo "Running LArSoft with FCL: $FCL_FILE"
lar -c "$FCL_FILE" $events_option -o "$outFile" "$pfn" > "$logFile" 2>&1
larExit=$?
# === Run lar (secondary) if needed ===
if [ -n "$FCL_SECONDARY" ]; then
secondary_out="secondary_${outFile}"
secondary_log="secondary_${logFile}"
echo "Running LArSoft with secondary FCL: $FCL_SECONDARY"
lar -c "$FCL_SECONDARY" $events_option "$pfn" > "$secondary_log" 2>&1
fi
if [ -f "$secondary_log" ]; then
echo '=== Start last 100 lines of secondary lar log file ==='
tail -100 "$secondary_log"
echo '=== End last 100 lines of secondary lar log file ==='
fi
# === Rename .data and .root files with timestamp and suffix ===
if [ $larExit -eq 0 ]; then
for f in *.data; do
if [ -f "$f" ]; then
newname="graph_output_${timestamp}_${rand_suffix}_$f"
mv -f "$f" "$newname"
echo "Renamed $f -> $newname"
fi
done
for f in *eid.root; do
if [ -f "$f" ]; then
newname="graph_output_${timestamp}_${rand_suffix}_$f"
mv -f "$f" "$newname"
echo "Renamed $f -> $newname"
fi
done
fi
# === Show lar log tail ===
echo '=== Start last 100 lines of lar log file ==='
tail -100 "$logFile"
echo '=== End last 100 lines of lar log file ==='
# === Mark processed ===
if [ $larExit -eq 0 ]; then
echo "$pfn" > justin-processed-pfns.txt
jobscriptExit=0
else
jobscriptExit=1
fi
# === Package logs ===
tar zcf "${JUSTIN_JOBSUB_ID//[@]/_}.logs.tgz" *.log
# === Display output summary ===
echo "=== Generated output files ==="
ls -1 *.* 2>/dev/null | grep -v 'all-input-dids.txt' || echo "No output files found."
exit $jobscriptExit
justIN time: 2025-05-22 14:16:16 UTC justIN version: 01.03.01