justIN           Dashboard       Workflows       Jobs       AWT       Sites       Storages       Docs       Login

Workflow 8497, Stage 1

Priority50
Processors1
Wall seconds80000
Image/cvmfs/singularity.opensciencegrid.org/fermilab/fnal-wn-sl7:latest
RSS bytes4194304000 (4000 MiB)
Max distance for inputs100.0
Enabled input RSEs CERN_PDUNE_EOS, DUNE_CA_SFU, DUNE_CERN_EOS, DUNE_ES_PIC, DUNE_FR_CCIN2P3_DISK, DUNE_IN_TIFR, DUNE_IT_INFN_CNAF, DUNE_UK_GLASGOW, DUNE_UK_LANCASTER_CEPH, DUNE_UK_MANCHESTER_CEPH, DUNE_US_BNL_SDCC, DUNE_US_FNAL_DISK_STAGE, FNAL_DCACHE, FNAL_DCACHE_STAGING, FNAL_DCACHE_TEST, MONTECARLO, NIKHEF, PRAGUE, QMUL, RAL-PP, RAL_ECHO, SURFSARA, T3_US_NERSC
Enabled output RSEs CERN_PDUNE_EOS, DUNE_CA_SFU, DUNE_CERN_EOS, DUNE_ES_PIC, DUNE_FR_CCIN2P3_DISK, DUNE_IN_TIFR, DUNE_IT_INFN_CNAF, DUNE_UK_GLASGOW, DUNE_UK_LANCASTER_CEPH, DUNE_UK_MANCHESTER_CEPH, DUNE_US_BNL_SDCC, DUNE_US_FNAL_DISK_STAGE, FNAL_DCACHE, FNAL_DCACHE_STAGING, FNAL_DCACHE_TEST, NIKHEF, PRAGUE, QMUL, RAL-PP, RAL_ECHO, SURFSARA, T3_US_NERSC
Enabled sites BR_CBPF, CA_SFU, CERN, CH_UNIBE-LHEP, CZ_FZU, ES_CIEMAT, ES_PIC, FR_CCIN2P3, IT_CNAF, NL_NIKHEF, NL_SURFsara, UK_Bristol, UK_Brunel, UK_Durham, UK_Edinburgh, UK_Glasgow, UK_Lancaster, UK_Liverpool, UK_Manchester, UK_Oxford, UK_QMUL, UK_RAL-PPD, UK_RAL-Tier1, UK_Sheffield, US_Colorado, US_FNAL-FermiGrid, US_FNAL-T1, US_Michigan, US_PuertoRico, US_SU-ITS, US_Swan, US_UChicago, US_UConn-HPC, US_UCSD, US_Wisconsin
Scopefardet-hd
Events for this stage

Output patterns

 DestinationPatternLifetimeFor next stageRSE expression
1Rucio fardet-hd:fardet-hd-detsim_8497-fnal-w8497s1p1*detsim.root51840000False

Environment variables

NameValue
INPUT_DIR/cvmfs/fifeuser1.opensciencegrid.org/sw/dune/0cae91efd80caa22be95220d05627d4f57dfe62e

File states

Total filesFindingUnallocatedAllocatedOutputtingProcessedNot foundFailed
60000000600000

Job states

TotalSubmittedStartedProcessingOutputtingFinishedNotusedAbortedStalledJobscript errorOutputting failedNone processed
646700006042031394000
Files processed00100100200200300300400400500500600600700700800800900900100010001100110012001200130013001400140015001500Oct-03 05:00Oct-03 06:00Oct-03 07:00Oct-03 08:00Oct-03 09:00Oct-03 10:00Oct-03 11:00Oct-03 12:00Oct-03 13:00Files processedBin start timesNumber per binUK_BristolNL_SURFsaraUK_DurhamUK_QMULUK_ManchesterUK_RAL-Tier1UK_LancasterNL_NIKHEFBR_CBPFES_PICCZ_FZUCERNIT_CNAFUK_GlasgowUK_SheffieldUK_RAL-PPDUS_UChicago

RSEs used

NameInputsOutputs
MONTECARLO64030
DUNE_FR_CCIN2P3_DISK05989
QMUL011
DUNE_UK_MANCHESTER_CEPH05
DUNE_US_FNAL_DISK_STAGE02
DUNE_IT_INFN_CNAF01
SURFSARA01

Stats of processed input files as CSV or JSON, and of uploaded output files as CSV or JSON (up to 10000 files included)

File reset events, by site

SiteAllocatedOutputting
UK_Manchester7225
CERN4619
IT_CNAF224
US_UChicago214
ES_PIC214
UK_Lancaster217
UK_QMUL187
UK_RAL-Tier1154
NL_SURFsara122
CZ_FZU125
UK_RAL-PPD1118
UK_Glasgow81
UK_Bristol51
NL_NIKHEF56
UK_Durham23
BR_CBPF11

Jobscript

#!/bin/bash
#
source /cvmfs/dune.opensciencegrid.org/products/dune/setup_dune.sh
setup metacat
export METACAT_SERVER_URL=https://metacat.fnal.gov:9443/dune_meta_prod/app
export METACAT_AUTH_SERVER_URL=https://metacat.fnal.gov:8143/auth/dune

#Setup recent lar software suite
setup dunesw \
   "${DUNE_VERSION:-v10_06_00d01}" \
   -q "${DUNE_QUALIFIER:-e26:prof}"
echo "printing env"


if [ -z ${JUSTIN_PROCESSORS} ]; then
  JUSTIN_PROCESSORS=1
fi

echo "Justin processors: ${JUSTIN_PROCESSORS}"

export TF_NUM_THREADS=${JUSTIN_PROCESSORS}   
export OPENBLAS_NUM_THREADS=${JUSTIN_PROCESSORS} 
export JULIA_NUM_THREADS=${JUSTIN_PROCESSORS} 
export MKL_NUM_THREADS=${JUSTIN_PROCESSORS} 
export NUMEXPR_NUM_THREADS=${JUSTIN_PROCESSORS} 
export OMP_NUM_THREADS=${JUSTIN_PROCESSORS}  

#env
#echo "Will use justin-get-file"
DID_PFN_RSE=`$JUSTIN_PATH/justin-get-file`
if [ "${DID_PFN_RSE}" == "" ] ; then
  echo "Could not get file"
  exit 0
fi
pfn=`echo ${DID_PFN_RSE} | cut -f2 -d' '` 

if [ -z ${LINENUMBER} ] ; then
  LINENUMBER=$pfn
fi

# echo "linenumber " $LINENUMBER
ex_code=0
pstep=10
tstep=0

now=$(date -u +"%Y%m%dT%H%M%SZ")
namespace=${JUSTIN_SCOPE:-"usertests"} 

echo "===============JUSTIN_JOBSUB_ID"
runid=$JUSTIN_WORKFLOW_ID
CLUSTER=`echo $JUSTIN_JOBSUB_ID | awk '{split($0,a,"."); print a[1]}'`
echo $CLUSTER

# define run number and set number of events 
firstsubrun=0
#nevts=100
#nevts=50
nevts=10
e_pfn=$(echo "$pfn" | sed 's/^0*//')
start_e_pfn=$((e_pfn -1))
firstevent=$((start_e_pfn * nevts + 1)) 

echo "======checking===="
echo ${DID_PFN_RSE} 
echo ${pfn}
echo $runid 
echo $firstsubrun
echo $nevts
echo $firstevent


 
 


# Run Generator
echo "============generator=========================" 
prodname="prod_p_1-500MeV_dune10kt_1x2x6_${now}_gen_${pfn}" 
istep=1
tstep=$((tstep+1))
nstep=$(($pstep*$tstep)) 
tcode=$((nstep+istep))
tcode=$((tcode*1000)) 

lar -c prod_p_1-500MeV_dune10kt_1x2x6.fcl  -o ${prodname}.root -n $nevts -e ${runid}:${firstsubrun}:${firstevent}
 
exit_code=$?  
ex_code=$((exit_code+tcode))
files=`ls *_${now}_*`  
if [ $exit_code -ne 0 ]; then
 echo "ERROR: lar (generation) exit code: $ex_code " 
 echo "output files size: "
     for f in $files 
      do
       size=`stat -c %s $f`
       echo   $f $size 
      done   
 return $ex_code 
fi
 
#  Stage 1 G4
echo "============G4 stage1=========================" 
g4_name="${prodname}_g4"
tstep=$((tstep+1))
nstep=$(($pstep*$tstep)) 
istep=1 
tcode=$((nstep+istep))
tcode=$((tcode*1000))
 
lar -c standard_g4_dune10kt_1x2x6.fcl  ${prodname}.root -o ${g4_name}.root -n -1

exit_code=$?    
ex_code=$((exit_code+tcode))
files=`ls *_${now}_*`  
if [ $exit_code -ne 0 ]; then
  echo "ERROR: lar (geant4 step1) exit code: $ex_code " 
   echo "output files size: "
     for f in $files 
      do
       size=`stat -c %s $f`
       echo   $f $size 
      done   
  return $ex_code 
fi
#  Stage 2 G4 
tstep=$((tstep+1))
nstep=$(($pstep*$tstep)) 
istep=1 
tcode=$((nstep+istep))
tcode=$((tcode*1000))
 
# Detsim
echo "============detsim========================="  
detsim_name="${g4_name}_detsim"
tstep=$((tstep+1))
nstep=$(($pstep*$tstep)) 
istep=1  
tcode=$((nstep+istep))
tcode=$((tcode*1000))

lar -c   detsim_dune10kt_1x2x6_notpcsigproc.fcl  ${g4_name}.root  -o ${detsim_name}.root -n -1

exit_code=$?    
ex_code=$((exit_code+tcode))
files=`ls *_${now}_*`  
if [ $exit_code -ne 0 ]; then 
  echo "ERROR: lar (detsim) exit code: $ex_code " 
  echo "output files size: "
     for f in $files 
      do
       size=`stat -c %s $f`
       echo   $f $size 
      done   
  return $ex_code 
fi 
  
extractor_prod.py --infile ${detsim_name}.root --no_crc    --appfamily art --appname detsim --appversion  v10_06_00d01  --requestid ritm2469557  --strip_parents --input_json ${INPUT_DIR}/trg_input.json> ${detsim_name}.root.ext.json  && sed -i -e 's/stepfcl/detsim_dune10kt_1x2x6_notpcsigproc.fcl/g'  -e 's/physics/fardet-hd/g'  ${detsim_name}.root.ext.json 

exit_code=$?   
ex_code=$((exit_code+tcode))
files=`ls *_${now}_*`  
if [ $exit_code -gt 1 ]; then 
  echo "ERROR: metadata generation   $ex_code " 
   echo "output files size: "
     for f in $files 
      do
       size=`stat -c %s $f`
       echo   $f $size 
      done   
 return $ex_code 
fi 
 

rm -fr all-input-dids.txt
echo "noparents:noparents.root" > all-input-dids.txt 
python ${INPUT_DIR}/pdjson2metadata ${detsim_name}.root.ext.json  all-input-dids.txt usertests > ${detsim_name}.root.temp.json 
sed '/DUNE/ s/.*/\L&/' ${detsim_name}.root.temp.json > ${detsim_name}.root.json 
exit_code=$?    
ex_code=$((exit_code+tcode))
files=`ls *_${now}_*` 
if [ $exit_code -ne 0 ]; then
  echo "ERROR: metadata writing  $ex_code " 
   echo "output files size: "
     for f in $files 
      do
       size=`stat -c %s $f`
       echo   $f $size 
      done   
  return $ex_code 
fi  
 
if [ $? -ne 0 ]
then
  echo "Exiting with error"
  exit 1
else
  files=`ls *_${now}_*`
   for f in $files
      do
       size=`stat -c %s $f`
       echo "written output file: $f $size"
      done

  echo "$pfn" > justin-processed-pfns.txt
fi

justIN time: 2025-11-03 18:43:11 UTC       justIN version: 01.05.01