justIN           Dashboard       Workflows       Jobs       AWT       Sites       Storages       Docs       Login

Workflow 10586, Stage 1

Workflow10586
Priority50
Processors1
Wall seconds80000
Image/cvmfs/singularity.opensciencegrid.org/fermilab/fnal-wn-sl7:latest
RSS bytes6815744000 (6500 MiB)
Max distance for inputs100.0
Enabled input RSEs CERN_PDUNE_EOS, DUNE_CA_SFU, DUNE_CERN_EOS, DUNE_ES_PIC, DUNE_FR_CCIN2P3_DISK, DUNE_IN_TIFR, DUNE_IT_INFN_CNAF, DUNE_UK_GLASGOW, DUNE_UK_LANCASTER_CEPH, DUNE_UK_MANCHESTER_CEPH, DUNE_US_BNL_SDCC, DUNE_US_FNAL_DISK_STAGE, FNAL_DCACHE, FNAL_DCACHE_STAGING, FNAL_DCACHE_TEST, MONTECARLO, NIKHEF, PRAGUE, QMUL, RAL-PP, RAL_ECHO, SURFSARA, T3_US_NERSC
Enabled output RSEs CERN_PDUNE_EOS, DUNE_CA_SFU, DUNE_CERN_EOS, DUNE_ES_PIC, DUNE_FR_CCIN2P3_DISK, DUNE_IN_TIFR, DUNE_IT_INFN_CNAF, DUNE_UK_GLASGOW, DUNE_UK_LANCASTER_CEPH, DUNE_UK_MANCHESTER_CEPH, DUNE_US_BNL_SDCC, DUNE_US_FNAL_DISK_STAGE, FNAL_DCACHE, FNAL_DCACHE_STAGING, FNAL_DCACHE_TEST, NIKHEF, PRAGUE, QMUL, RAL-PP, RAL_ECHO, SURFSARA, T3_US_NERSC
Enabled sites BR_CBPF, CA_SFU, CERN, CH_UNIBE-LHEP, CZ_FZU, ES_CIEMAT, ES_PIC, FR_CCIN2P3, IT_CNAF, NL_NIKHEF, NL_SURFsara, UK_Bristol, UK_Brunel, UK_Durham, UK_Edinburgh, UK_Lancaster, UK_Liverpool, UK_Manchester, UK_Oxford, UK_QMUL, UK_RAL-PPD, UK_RAL-Tier1, UK_Sheffield, US_Colorado, US_FNAL-FermiGrid, US_FNAL-T1, US_Michigan, US_PuertoRico, US_SU-ITS, US_Swan, US_UChicago, US_UConn-HPC, US_UCSD, US_Wisconsin
Scopeusertests
Events for this stage

Output patterns

 DestinationPatternLifetimeFor next stageRSE expression
1Rucio usertests:pdvd-beam-test-fnal-w10586s1p1*merged.root2592000False

Environment variables

NameValue
MCJob1
NTUPLE_DIR/cvmfs/fifeuser4.opensciencegrid.org/sw/dune/b8bc301d8bfc965077077441dfb84225fedd133a
pipyaml1
RUN39273
WF_IDusertests_fnal-w8500s1p2

File states

Total filesFindingUnallocatedAllocatedOutputtingProcessedNot foundFailed
10100000

Job states

TotalSubmittedStartedProcessingOutputtingFinishedNotusedAbortedStalledJobscript errorOutputting failedNone processed
15000000000015

RSEs used

NameInputsOutputs
DUNE_US_FNAL_DISK_STAGE015

Stats of processed input files as CSV or JSON, and of uploaded output files as CSV or JSON (up to 10000 files included)

Jobscript

#!/bin/bash
#

#These must be defined
if [ -z $WF_ID ]; then
  echo "Fatal Must provide WF ID env var"
  exit 1
fi

if [ -z $RUN ]; then
  echo "Fatal Must provide RUN env var"
  exit 1
fi

if [ -z $NTUPLE_DIR ]; then
  echo "Fatal Must provide NTUPLE_DIR env var"
  exit 1
fi

stat ${NTUPLE_DIR}
if [ $? -ne 0 ]; then
  echo "Failed to $NTUPLE_DIR. Exiting safely"
  exit 0
fi

source /cvmfs/dune.opensciencegrid.org/products/dune/setup_dune.sh
setup metacat
export METACAT_SERVER_URL=https://metacat.fnal.gov:9443/dune_meta_prod/app
export METACAT_AUTH_SERVER_URL=https://metacat.fnal.gov:8143/auth/dune

#Setup recent lar software suite
DUNE_VERSION=${DUNE_VERSION:-v10_13_00d00}
setup dunesw \
   "${DUNE_VERSION}" \
   -q "${DUNE_QUALIFIER:-e26:prof}"

setup_exit=$?
if [ $? -ne 0 ]; then
  echo "Failed to setup dunesw $DUNE_VERSION $DUNE_QUALIFIER"
  exit $setup_exit
fi

echo "DUNESW loc:"
ups active | grep dunesw

#source /cvmfs/larsoft.opensciencegrid.org/products/root/v6_28_12/Linux64bit+3.10-2.17-e26-p3915-prof/bin/thisroot.sh

if [ -z ${JUSTIN_PROCESSORS} ]; then
  JUSTIN_PROCESSORS=1
fi

echo "Justin processors: ${JUSTIN_PROCESSORS}"

export TF_NUM_THREADS=${JUSTIN_PROCESSORS}   
export OPENBLAS_NUM_THREADS=${JUSTIN_PROCESSORS} 
export JULIA_NUM_THREADS=${JUSTIN_PROCESSORS} 
export MKL_NUM_THREADS=${JUSTIN_PROCESSORS} 
export NUMEXPR_NUM_THREADS=${JUSTIN_PROCESSORS} 
export OMP_NUM_THREADS=${JUSTIN_PROCESSORS}  

echo "printing env"
env

echo "Justin specific env vars"
env | grep JUSTIN

echo "Will use justin-get-file"
#

nfiles=${NFILES:-1}

n_files_retrieved=`wc -l $NTUPLE_DIR/input_pfns_${WF_ID}.list | cut -f1 -d' '`
echo "Files retrieved: ${n_files_retrieved}"

if [ $n_files_retrieved -eq 0 ]; then
  echo "No files retrieved. Exiting safely"
  exit 0
fi

pfn_list=`cat $NTUPLE_DIR/input_pfns_${WF_ID}.list`
echo "PFN list:"
echo $pfn_list

now=$(date -u +"%Y%m%dT%H%M%SZ")

##TODO -- edit this
#jobid=`echo "${JUSTIN_JOBSUB_ID:-1}" | awk -F '.' '{print $1}'`
jobid=`echo "${JUSTIN_JOBSUB_ID:-1}" | cut -f1 -d'@' | sed -e "s/\./_/"`
PREFIX=$(basename $(echo ${pfn_list} | cut -d ' ' -f1) .root)
OUTFILE="${PREFIX}_${now}_merged.root"
echo "Output file: ${OUTFILE}"

nevents=${NEVENTS:--1}

echo "Running hadd"
touch ana.log

echo "hadd ${OUTFILE} ${pfn_list}"

starttime=`date +"%s"`.0
hadd ${OUTFILE} ${pfn_list} > merge.log 2>&1
larExit=$?
endtime=`date +"%s"`.0

if [ $larExit -ne 0 ]; then
  echo "Error in ntuple production"
  cat merge.log
  exit $larExit
fi

echo "Ran successfully"

echo "Forming metadata"

### Get the metadata
python $NTUPLE_DIR/ntuple_prod_utils.py \
  metadata \
  --root_file ${OUTFILE} \
  --dids $NTUPLE_DIR/input_dids_${WF_ID}.list \
  --version ${DUNE_VERSION} \
  --tree_name "beamevent/tree" \
  --run ${RUN} \
  --fcl_name "hadd" \
  -o $OUTFILE.json
  #--log_file md.log \
mdExit=$?

if [ $mdExit -ne 0 ]; then
  echo "Error in ntuple metdata production"
  cat md.log
  exit $mdExit
fi

echo "formed"
cat ${OUTFILE}.json

echo "$pfn_list" > justin-processed-pfns.txt
justIN time: 2026-02-08 16:53:00 UTC       justIN version: 01.06.00