justIN           Dashboard       Workflows       Jobs       AWT       Sites       Storages       Docs       Login

Workflow 9631, Stage 1

Priority50
Processors1
Wall seconds80000
Image/cvmfs/singularity.opensciencegrid.org/fermilab/fnal-wn-sl7:latest
RSS bytes6815744000 (6500 MiB)
Max distance for inputs30.0
Enabled input RSEs CERN_PDUNE_EOS, DUNE_CA_SFU, DUNE_CERN_EOS, DUNE_ES_PIC, DUNE_FR_CCIN2P3_DISK, DUNE_IN_TIFR, DUNE_IT_INFN_CNAF, DUNE_UK_GLASGOW, DUNE_UK_LANCASTER_CEPH, DUNE_UK_MANCHESTER_CEPH, DUNE_US_BNL_SDCC, DUNE_US_FNAL_DISK_STAGE, FNAL_DCACHE, FNAL_DCACHE_STAGING, FNAL_DCACHE_TEST, MONTECARLO, NIKHEF, PRAGUE, QMUL, RAL-PP, RAL_ECHO, SURFSARA, T3_US_NERSC
Enabled output RSEs CERN_PDUNE_EOS, DUNE_CA_SFU, DUNE_CERN_EOS, DUNE_ES_PIC, DUNE_FR_CCIN2P3_DISK, DUNE_IN_TIFR, DUNE_IT_INFN_CNAF, DUNE_UK_GLASGOW, DUNE_UK_LANCASTER_CEPH, DUNE_UK_MANCHESTER_CEPH, DUNE_US_BNL_SDCC, DUNE_US_FNAL_DISK_STAGE, FNAL_DCACHE, FNAL_DCACHE_STAGING, FNAL_DCACHE_TEST, NIKHEF, PRAGUE, QMUL, RAL-PP, RAL_ECHO, SURFSARA, T3_US_NERSC
Enabled sites BR_CBPF, CA_SFU, CERN, CH_UNIBE-LHEP, CZ_FZU, ES_CIEMAT, ES_PIC, FR_CCIN2P3, IT_CNAF, NL_NIKHEF, NL_SURFsara, UK_Bristol, UK_Brunel, UK_Durham, UK_Edinburgh, UK_Lancaster, UK_Liverpool, UK_Manchester, UK_Oxford, UK_QMUL, UK_RAL-PPD, UK_RAL-Tier1, UK_Sheffield, US_Colorado, US_FNAL-FermiGrid, US_FNAL-T1, US_Michigan, US_PuertoRico, US_SU-ITS, US_Swan, US_UChicago, US_UConn-HPC, US_UCSD, US_Wisconsin
Scopeusertests
Events for this stage

Output patterns

 DestinationPatternLifetimeFor next stageRSE expression
1https://fndcadoor.fnal.gov:2880/dune/scratch/users/ykermaid/fnal/09631/1*_ana.root

Environment variables

NameValue
FCL_FILErunSingleHit_PDVD.fcl
NTUPLE_DIR/cvmfs/fifeuser1.opensciencegrid.org/sw/dune/f7cf64ee30ed1538f19cf08c7a2134cbc2c64147

Condor Class Ads

NameValue
HAS_CVMFS_dune_osgstorage_orgtrue

File states

Total filesFindingUnallocatedAllocatedOutputtingProcessedNot foundFailed
1001000000

Job states

TotalSubmittedStartedProcessingOutputtingFinishedNotusedAbortedStalledJobscript errorOutputting failedNone processed
760000400040032
Replicas per RSE10380.00057375369.7499999985656Replicas per RSEDUNE_US_FNAL_DISK_STAGE (100%)

RSEs used

NameInputsOutputs
DUNE_US_FNAL_DISK_STAGE360
None032

Stats of processed input files as CSV or JSON, and of uploaded output files as CSV or JSON (up to 10000 files included)

File reset events, by site

SiteAllocatedOutputting
CA_SFU40

Jobscript

#!/bin/bash
#

#These must be defined
if [ -z $FCL_FILE ]; then
  echo "Fatal Must provide FCL_FILE env var"
  exit 1
fi

if [ -z $NTUPLE_DIR ]; then
  echo "Fatal Must provide NTUPLE_DIR env var"
  exit 1
fi

stat ${NTUPLE_DIR}
if [ $? -ne 0 ]; then
  echo "Failed to $NTUPLE_DIR. Exiting safely"
  exit 0
fi

source /cvmfs/dune.opensciencegrid.org/products/dune/setup_dune.sh
setup metacat
export METACAT_SERVER_URL=https://metacat.fnal.gov:9443/dune_meta_prod/app
export METACAT_AUTH_SERVER_URL=https://metacat.fnal.gov:8143/auth/dune

#Setup recent lar software suite
DUNE_VERSION=${DUNE_VERSION:-v10_12_01d00}
setup dunesw \
   "${DUNE_VERSION}" \
   -q "${DUNE_QUALIFIER:-e26:prof}"

setup_exit=$?
if [ $? -ne 0 ]; then
  echo "Failed to setup dunesw $DUNE_VERSION $DUNE_QUALIFIER"
  exit $setup_exit
fi


echo "DUNESW loc:"
ups active | grep dunesw

if [ -z ${JUSTIN_PROCESSORS} ]; then
  JUSTIN_PROCESSORS=1
fi

echo "Justin processors: ${JUSTIN_PROCESSORS}"

export TF_NUM_THREADS=${JUSTIN_PROCESSORS}   
export OPENBLAS_NUM_THREADS=${JUSTIN_PROCESSORS} 
export JULIA_NUM_THREADS=${JUSTIN_PROCESSORS} 
export MKL_NUM_THREADS=${JUSTIN_PROCESSORS} 
export NUMEXPR_NUM_THREADS=${JUSTIN_PROCESSORS} 
export OMP_NUM_THREADS=${JUSTIN_PROCESSORS}  

echo "printing env"
env

echo "Justin specific env vars"
env | grep JUSTIN

echo "Will use justin-get-file"
DID_PFN_RSE=`$JUSTIN_PATH/justin-get-file`
##Check that any file was returned
if [ "${DID_PFN_RSE}" == "" ] ; then
  echo "Could not get file"
  exit 0
fi

pfn=`echo ${DID_PFN_RSE} | cut -f2 -d' '`
did=`echo ${DID_PFN_RSE} | cut -f1 -d' '`
echo "pfn: ${pfn}"
echo "did: ${did}"

now=$(date -u +"%Y%m%dT%H%M%SZ")

##TODO -- edit this
#jobid=`echo "${JUSTIN_JOBSUB_ID:-1}" | awk -F '.' '{print $1}'`
jobid=`echo "${JUSTIN_JOBSUB_ID:-1}" | cut -f1 -d'@' | sed -e "s/\./_/"`
PREFIX=$(basename ${pfn} .root)
OUTFILE="${PREFIX}_${now}_ana.root"
echo "Output file: ${OUTFILE}"

nevents=${NEVENTS:--1}

echo "Running singlehit"
touch ana.log

starttime=`date +"%s"`.0
lar -c ${FCL_FILE} \
    -n ${nevents} \
    -T ${OUTFILE} \
    ${pfn}  >ana.log 2>&1
    #-s ${pfn_list}  >ana.log 2>&1
larExit=$?
endtime=`date +"%s"`.0

if [ $larExit -ne 0 ]; then
  echo "Error in ntuple production"
  cat ana.log
  exit $larExit
fi

echo "Ran successfully"

#echo "Forming metadata"

### Get the metadata
#python $NTUPLE_DIR/ntuple_prod_utils.py \
#  metadata \
#  --root_file ${OUTFILE} \
#  --dids $NTUPLE_DIR/input_dids_${KEY}.list \
#  --version ${DUNE_VERSION} \
#  --fcl_name ${FCL_FILE} \
#  --tree_name ${TREE_NAME:-"r4/ClusterTree"} \
#  -o $OUTFILE.json
#  #--log_file md.log \
#mdExit=$?
#
#if [ $mdExit -ne 0 ]; then
#  echo "Error in ntuple metdata production"
#  cat md.log
#  exit $mdExit
#fi
#
#echo "formed"
#cat ${OUTFILE}.json

echo "$pfn_list" > justin-processed-pfns.txt
justIN time: 2025-11-05 18:35:42 UTC       justIN version: 01.05.01