justIN           Dashboard       Workflows       Jobs       AWT       Sites       Storages       Docs       Login

Workflow 9021, Stage 1

Priority50
Processors1
Wall seconds80000
Image/cvmfs/singularity.opensciencegrid.org/fermilab/fnal-wn-sl7:latest
RSS bytes2097152000 (2000 MiB)
Max distance for inputs30.0
Enabled input RSEs CERN_PDUNE_EOS, DUNE_CA_SFU, DUNE_CERN_EOS, DUNE_ES_PIC, DUNE_FR_CCIN2P3_DISK, DUNE_IN_TIFR, DUNE_IT_INFN_CNAF, DUNE_UK_GLASGOW, DUNE_UK_LANCASTER_CEPH, DUNE_UK_MANCHESTER_CEPH, DUNE_US_BNL_SDCC, DUNE_US_FNAL_DISK_STAGE, FNAL_DCACHE, FNAL_DCACHE_STAGING, FNAL_DCACHE_TEST, MONTECARLO, NIKHEF, PRAGUE, QMUL, RAL-PP, RAL_ECHO, SURFSARA, T3_US_NERSC
Enabled output RSEs CERN_PDUNE_EOS, DUNE_CA_SFU, DUNE_CERN_EOS, DUNE_ES_PIC, DUNE_FR_CCIN2P3_DISK, DUNE_IN_TIFR, DUNE_IT_INFN_CNAF, DUNE_UK_GLASGOW, DUNE_UK_LANCASTER_CEPH, DUNE_UK_MANCHESTER_CEPH, DUNE_US_BNL_SDCC, DUNE_US_FNAL_DISK_STAGE, FNAL_DCACHE, FNAL_DCACHE_STAGING, FNAL_DCACHE_TEST, NIKHEF, PRAGUE, QMUL, RAL-PP, RAL_ECHO, SURFSARA, T3_US_NERSC
Enabled sites BR_CBPF, CA_SFU, CERN, CH_UNIBE-LHEP, CZ_FZU, ES_CIEMAT, ES_PIC, FR_CCIN2P3, IT_CNAF, NL_NIKHEF, NL_SURFsara, UK_Bristol, UK_Brunel, UK_Durham, UK_Edinburgh, UK_Lancaster, UK_Liverpool, UK_Manchester, UK_Oxford, UK_QMUL, UK_RAL-PPD, UK_RAL-Tier1, UK_Sheffield, US_Colorado, US_FNAL-FermiGrid, US_FNAL-T1, US_Michigan, US_PuertoRico, US_SU-ITS, US_Swan, US_UChicago, US_UConn-HPC, US_UCSD, US_Wisconsin
Scopeusertests
Events for this stage

Output patterns

 DestinationPatternLifetimeFor next stageRSE expression
1https://fndcadoor.fnal.gov:2880/dune/scratch/users/calcuttj/beam_inst_pdhd/fnal/09021/1*beam*root

Environment variables

NameValue
beam_fclrun_pdhd_beamevent_notof.fcl
extra_fcl_path/cvmfs/fifeuser2.opensciencegrid.org/sw/dune/6f85814e7ccd57cb25b263f3ffe141f4c9ebeb12
nevents-1
NFILES30

File states

Total filesFindingUnallocatedAllocatedOutputtingProcessedNot foundFailed
75000000750000

Job states

TotalSubmittedStartedProcessingOutputtingFinishedNotusedAbortedStalledJobscript errorOutputting failedNone processed
314800003002002901170
Files processed0010001000200020003000300040004000Oct-15 14:00Oct-15 18:00Oct-15 22:00Oct-16 02:00Oct-16 06:00Oct-16 10:00Oct-16 14:00Oct-16 18:00Oct-16 22:00Oct-17 02:00Oct-17 06:00Oct-17 10:00Oct-17 14:00Oct-17 18:00Oct-17 22:00Oct-18 02:00Oct-18 06:00Oct-18 10:00Files processedBin start timesNumber per binUK_QMULUK_OxfordUK_SheffieldUK_ManchesterUK_RAL-Tier1UK_RAL-PPDCERNUK_BristolNL_SURFsaraUK_LancasterNL_NIKHEFFR_CCIN2P3CZ_FZUES_PICES_CIEMATUK_Brunel
Replicas per RSE7500490.025244.57500269.975244.50000000000003Replicas per RSEDUNE_CERN_EOS (50%)FNAL_DCACHE (50%)

RSEs used

NameInputsOutputs
DUNE_CERN_EOS86210
None0946

Stats of processed input files as CSV or JSON, and of uploaded output files as CSV or JSON (up to 10000 files included)

File reset events, by site

SiteAllocatedOutputting
UK_Lancaster16320
CERN13386
UK_Sheffield5842
ES_PIC5410
UK_RAL-PPD40159
NL_SURFsara4010
UK_Manchester4012
UK_RAL-Tier12081
CZ_FZU059
UK_QMUL045
NL_NIKHEF025
UK_Bristol014
UK_Oxford06
FR_CCIN2P304

Jobscript

#!/bin/sh
#
# Example jobscript that runs lar for all the files
# referred to by the MQL expression given on the justin command line.
#
# Submit with something like this:
#
# ./justin simple-workflow \
#  --max-distance 30 \
#  --mql "rucio-dataset protodune-sp:np04_raw_run_number_5769" \
#  --jobscript lar.jobscript
#
# Then monitor with dashboard or ./justin show-jobs --workflow-id ID
# where ID is the value printed by the first command
#


# the xroot lib for streaming non-root files is in testproducts, 
# so add it to the start of the path
source /cvmfs/dune.opensciencegrid.org/products/dune/setup_dune.sh
if [ -z ${JUSTIN_PROCESSORS} ]; then
  JUSTIN_PROCESSORS=1
fi

echo "Justin processors: ${JUSTIN_PROCESSORS}"

export TF_NUM_THREADS=${JUSTIN_PROCESSORS}   
export OPENBLAS_NUM_THREADS=${JUSTIN_PROCESSORS} 
export JULIA_NUM_THREADS=${JUSTIN_PROCESSORS} 
export MKL_NUM_THREADS=${JUSTIN_PROCESSORS} 
export NUMEXPR_NUM_THREADS=${JUSTIN_PROCESSORS} 
export OMP_NUM_THREADS=${JUSTIN_PROCESSORS}  

# From jobsub
export CLUSTER=${CLUSTER:-1}
export PROCESS=${PROCESS:-1}

setup dunesw ${DUNESW_VERSION:-v10_10_04d00} -q e26:prof
setup_exit=$?
if [ "$setup_exit" != 0 ]; then
  echo "Error setting up dunesw ${DUNESW_VERSION}"
  exit
fi

export RUCIO_ACCOUNT=justinreadonly

if [ -n "${extra_fcl_path}" ]; then
  echo "ADDING ${extra_fcl_path} to fcl file path"
  sleep 1
  export FHICL_FILE_PATH=${extra_fcl_path}:${FHICL_FILE_PATH}
fi

# Get an unprocessed file from this stage
#did_pfn_rse=`$JUSTIN_PATH/justin-get-file`
#if [ "${did_pfn_rse}" == "" ] ; then
#  echo "Could not get file"
#  exit 0
#fi
#
#did=`echo $did_pfn_rse | cut -f1 -d' '`
#pfn=`echo $did_pfn_rse | cut -f2 -d' '`
#rse=`echo $did_pfn_rse | cut -f3 -d' '`
#name_only=$(echo $did | cut -f2 -d':')
#echo "name" $name_only

NFILES=${NFILES:-1}
files=()
nfiles=0
#if [ $NFILES -eq 1 ]; then
#  echo "Will use justin-get-file"
#  DID_PFN_RSE=`$JUSTIN_PATH/justin-get-file`
#  if [ "${DID_PFN_RSE}" == "" ] ; then
#    echo "Could not get file"
#    exit 0
#  fi
#  export pfn=`echo ${DID_PFN_RSE} | cut -f2 -d' '`
#  export did=`echo ${DID_PFN_RSE} | cut -f1 -d' '`
#
#  if [ -z ${TESTFILE} ]; then
#    files+=($pfn)
#  else
#    files+=(${TESTFILE})
#  fi
#
if [ -n "${TESTFILE}" ]; then
  echo "Using testfile"
  files+=(${TESTFILE})
  nfiles=1
else
  nfiles=0
  for i in `seq 1 ${NFILES:-1}`; do
    echo $i
    DID_PFN_RSE=`$JUSTIN_PATH/justin-get-file`

    if [ "${DID_PFN_RSE}" == "" ] ; then
      echo "Could not get file -- exiting loop"
      break
    fi

    echo "did_pfn_rse: ${DID_PFN_RSE}"

    THISFILE=`echo ${DID_PFN_RSE} | cut -f2 -d' '`

    echo $THISFILE
    files+=(${THISFILE})
    nfiles=$(( nfiles + 1 ))
  done
fi

echo "Got $nfiles Files"
echo ${files[@]}

if [[ $nfiles == 0 ]]; then
  echo "No files exiting"
  exit 0
fi

echo "Found input file URL $pfn at $rse"

#Get the stage and momentum 
export stage=${stage:-"Gen"}
export momentum=${momentum:-1}

beam_fcl=${beam_fcl:-"run_pdhd_beamevent.fcl"}

now=$(date -u +"%Y%m%dT%H%M%SZ")
jobid=`echo "${JUSTIN_JOBSUB_ID:-1}" | cut -f1 -d'@' | sed -e "s/\./_/"`
stageid=${JUSTIN_STAGE_ID:-1}
output_preamble="pdhd_prod_beam_"

nevents=${nevents:--1}
nskip=${nskip:-0}
echo "XROOTD: $XROOTD_LIB"
#Generation
LD_PRELOAD=$XROOTD_LIB/libXrdPosixPreload.so lar \
    -n  ${nevents} \
    --nskip ${nskip} \
    -c  ${beam_fcl} \
    -o  pdhd_${jobid}_${stageid}_${now}_beam.root \
    -T pdhd_${jobid}_${stageid}_${now}_beam_hist.root \
    -s "${files[@]}"

larReturnCode=$?
echo "gen lar returns $larReturnCode"
if [ "$larReturnCode" != 0 ] ; then
  exit $larReturnCode
fi
#Generation Done

####EDIT METADATA FOR THE FOLLOWING FILE
#${name_only}_${jobid}_${stageid}_${now}_detsim_reco1.root

# Record that we processed the input file ok (did we???)
#echo "$pfn" > justin-processed-pfns.txt
for i in ${files[@]}; do
  echo "${i}" >> justin-processed-pfns.txt
done

# For debugging
for i in *.json
do
  echo "==== Start $i ===="
  cat $i
  echo "==== End $i ===="
done

ls -ltR

exit 0
justIN time: 2025-11-04 01:33:08 UTC       justIN version: 01.05.01