Workflow 10321, Stage 1
| Workflow | 10321 |
| Priority | 50 |
| Processors | 1 |
| Wall seconds | 80000 |
| Image | /cvmfs/singularity.opensciencegrid.org/fermilab/fnal-wn-sl7:latest |
| RSS bytes | 8388608000 (8000 MiB) |
| Max distance for inputs | 0.0 |
| Enabled input RSEs |
CERN_PDUNE_EOS, DUNE_CA_SFU, DUNE_CERN_EOS, DUNE_ES_PIC, DUNE_FR_CCIN2P3_DISK, DUNE_IN_TIFR, DUNE_IT_INFN_CNAF, DUNE_UK_GLASGOW, DUNE_UK_LANCASTER_CEPH, DUNE_UK_MANCHESTER_CEPH, DUNE_US_BNL_SDCC, DUNE_US_FNAL_DISK_STAGE, FNAL_DCACHE, FNAL_DCACHE_STAGING, FNAL_DCACHE_TEST, MONTECARLO, NIKHEF, PRAGUE, QMUL, RAL-PP, RAL_ECHO, SURFSARA, T3_US_NERSC |
| Enabled output RSEs |
CERN_PDUNE_EOS, DUNE_CA_SFU, DUNE_CERN_EOS, DUNE_ES_PIC, DUNE_FR_CCIN2P3_DISK, DUNE_IN_TIFR, DUNE_IT_INFN_CNAF, DUNE_UK_GLASGOW, DUNE_UK_LANCASTER_CEPH, DUNE_UK_MANCHESTER_CEPH, DUNE_US_BNL_SDCC, DUNE_US_FNAL_DISK_STAGE, FNAL_DCACHE, FNAL_DCACHE_STAGING, FNAL_DCACHE_TEST, NIKHEF, PRAGUE, QMUL, RAL-PP, RAL_ECHO, SURFSARA, T3_US_NERSC |
| Enabled sites |
BR_CBPF, CA_SFU, CERN, CH_UNIBE-LHEP, CZ_FZU, ES_CIEMAT, ES_PIC, FR_CCIN2P3, IT_CNAF, NL_NIKHEF, NL_SURFsara, UK_Bristol, UK_Brunel, UK_Durham, UK_Edinburgh, UK_Lancaster, UK_Liverpool, UK_Manchester, UK_Oxford, UK_QMUL, UK_RAL-PPD, UK_RAL-Tier1, UK_Sheffield, US_Colorado, US_FNAL-FermiGrid, US_FNAL-T1, US_Michigan, US_PuertoRico, US_SU-ITS, US_Swan, US_UChicago, US_UConn-HPC, US_UCSD, US_Wisconsin |
| Scope | usertests |
| Events for this stage |
Environment variables
| Name | Value |
|---|
| FINAL_OUTPUT_DIR | /pnfs/dune/scratch/users/szh2/pdhd_DATA_Michel/output_justIN |
| INPUT_TAR_DIR_LOCAL | /cvmfs/fifeuser4.opensciencegrid.org/sw/dune/b5e92ef62cbb0d46ffb94520c375047d4432e511 |
| MQL_QUERY | files from dune:all where core.file_type=detector and core.run_type=hd-protodune and core.data_tier=full-reconstructed and core.data_stream=cosmics and core.runs[any]=28116 limit 3 |
File states
| Total files | Finding | Unallocated | Allocated | Outputting | Processed | Not found | Failed |
|---|
|
| 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
Job states
| Total | Submitted | Started | Processing | Outputting | Finished | Notused | Aborted | Stalled | Jobscript error | Outputting failed | None processed |
|---|
| 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
Stats of processed input files as CSV or JSON, and of uploaded output files as CSV or JSON (up to 10000 files included)
Jobscript
#!/bin/bash
############################################################
# ProtoDUNE-HD Michel Processing justIN Jobscript
#
# Steps:
# 1. Run deconvolution FHiCL on raw ART file
# 2. Run Michel-time FHiCL on deconv_gen.root
# 3. Extract name fields 38 for output naming
# 4. Save final analysis ROOT file to PNFS
# 5. Mark PFN as processed
############################################################
echo "----- justIN DEBUG -----"
echo "WFID: $JUSTIN_WORKFLOW_ID"
echo "SID: $JUSTIN_STAGE_ID"
echo "MQL: ${MQL_QUERY:-'NOT PROVIDED TO JOBSCRIPT'}"
echo "-------------------------"
############## USER SETTINGS ################################
# LArSoft version
DUNE_VERSION=v10_04_00d00
DUNE_QUALIFIER=e26:prof
# Output directory
FINAL_OUTPUT_DIR=${FINAL_OUTPUT_DIR:-/pnfs/dune/scratch/users/szh2/pdhd_DATA_Michel/output_justIN}
echo "[INFO]=========================================================="
echo "[INFO] Initial working directory:"
pwd
echo "[INFO] Listing working directory:"
ls -lh
if [ ! -d "${INPUT_TAR_DIR_LOCAL}" ]; then
echo "[ERROR] INPUT_TAR_DIR_LOCAL is missing tarball not unpacked!"
exit 2
fi
echo "[INFO] Listing tarball contents at INPUT_TAR_DIR_LOCAL:"
ls -lh "${INPUT_TAR_DIR_LOCAL}"
echo "[INFO] Output dir = ${FINAL_OUTPUT_DIR}"
echo ""
############## GET INPUT PFN FROM JUSTIN #####################
did_pfn_rse="$(${JUSTIN_PATH}/justin-get-file)"
echo "[DEBUG] Raw output from justin-get-file: '${did_pfn_rse}'"
if [ -z "${did_pfn_rse}" ]; then
echo "[INFO] No file to process. Exiting."
exit 0
fi
did=$(echo "${did_pfn_rse}" | cut -f1 -d' ')
pfn=$(echo "${did_pfn_rse}" | cut -f2 -d' ')
rse=$(echo "${did_pfn_rse}" | cut -f3 -d' ')
echo "[INFO] DID : ${did}"
echo "[INFO] PFN : ${pfn}"
echo "[INFO] RSE : ${rse}"
############## EXTRACT RAW BASENAME ##########################
raw_base=$(basename "${pfn}")
# Example filename:
# np04hd_raw_run028116_0390_dataflow5_datawriter_0_20240724T121733_reco_stage1_reco_stage2_20240911T044206_keepup.root
#
# You want fields 3-8:
# run028116_0390_dataflow5_datawriter_0_20240724T121733
extracted=$(echo "${raw_base}" | awk -F'_' '{print $3"_"$4"_"$5"_"$6"_"$7"_"$8}')
echo "[INFO] Extracted pattern: ${extracted}"
############## SETUP ENVIRONMENT #############################
# Prevent UPS from using your old MRB environment on dunegpvm
unset MRB_TOP
unset MRB_SOURCE
unset MRB_BUILDDIR
unset MRB_INSTALL
unset CETPKG_INSTALL
unset PRODUCTS
unset FHICL_FILE_PATH # optional, but recommended
source /cvmfs/dune.opensciencegrid.org/products/dune/setup_dune.sh
setup dunesw ${DUNE_VERSION} -q ${DUNE_QUALIFIER}
echo "[INFO] Loading localProducts from tarball..."
LOCALPROD_SETUP="${INPUT_TAR_DIR_LOCAL}/localProducts_larsoft_v10_04_00d00_e26_prof/setup"
if [ ! -f "${LOCALPROD_SETUP}" ]; then
echo "[ERROR] localProducts setup script missing!"
exit 3
fi
source "${LOCALPROD_SETUP}"
# Verify 'lar' exists
which lar
if [ $? -ne 0 ]; then
echo "[ERROR] lar not found after environment setup!"
exit 4
fi
# Add FHiCL directory inside the UNPACKED tarball
echo "[INFO] Setting FHICL search path..."
export FHICL_FILE_PATH="${INPUT_TAR_DIR_LOCAL}:${FHICL_FILE_PATH}"
############## STEP 1 DECONVOLUTION ########################
FCL_DECON="${INPUT_TAR_DIR_LOCAL}/work/michel_DATA_deconv/newProcess202511/scripts/protodunehd_deconvolution_run.fcl"
if [ ! -f "$FCL_DECON" ]; then
echo "ERROR: $FCL_DECON not found!"
exit 80
fi
echo "[STEP 1] Running deconvolution..."
rm -f deconv_gen.root
lar -c "${FCL_DECON}" -s "${pfn}" > deconvolution.log 2>&1
if [ $? -ne 0 ] || [ ! -f "deconv_gen.root" ]; then
echo "[ERROR] Deconvolution failed!"
tail -50 deconvolution.log
exit 1
fi
echo "[INFO] Deconvolution finished."
echo ""
############## STEP 2 MICHEL TIME ##########################
FCL_MICHEL="${INPUT_TAR_DIR_LOCAL}/work/michel_DATA_deconv/newProcess202511/scripts/runmicheltime.fcl"
if [ ! -f "$FCL_MICHEL" ]; then
echo "ERROR: $FCL_MICHEL not found!"
exit 81
fi
echo "[STEP 2] Running Michel time matching..."
rm -f michelt0_Decon.root
lar -c "${FCL_MICHEL}" -s deconv_gen.root > michel.log 2>&1
if [ $? -ne 0 ] || [ ! -f "michelt0_Decon.root" ]; then
echo "[ERROR] Michel processing failed!"
tail -50 michel.log
exit 1
fi
echo "[INFO] Michel-time processing finished."
echo ""
############## STEP 3 SAVE OUTPUT ##########################
final_output="michelt0_decon_${extracted}.root"
echo "[INFO] Saving final file to ${FINAL_OUTPUT_DIR}"
mkdir -p "${FINAL_OUTPUT_DIR}"
ifdh cp -D michelt0_Decon.root "${FINAL_OUTPUT_DIR}/${final_output}"
echo "[INFO] File saved successfully."
############## MARK PROCESSED ################################
echo "${pfn}" > justin-processed-pfns.txt
############## PACKAGE LOGS ##################################
tar zcf "${JUSTIN_JOBSUB_ID}.logs.tgz" *.log
echo "[INFO] Job completed successfully."
exit 0