justIN           Dashboard       Workflows       Jobs       AWT       Sites       Storages       Docs       Login

Workflow 9867, Stage 1

Priority50
Processors1
Wall seconds80000
Image/cvmfs/singularity.opensciencegrid.org/fermilab/fnal-wn-sl7:latest
RSS bytes4194304000 (4000 MiB)
Max distance for inputs30.0
Enabled input RSEs CERN_PDUNE_EOS, DUNE_CA_SFU, DUNE_CERN_EOS, DUNE_ES_PIC, DUNE_FR_CCIN2P3_DISK, DUNE_IN_TIFR, DUNE_IT_INFN_CNAF, DUNE_UK_GLASGOW, DUNE_UK_LANCASTER_CEPH, DUNE_UK_MANCHESTER_CEPH, DUNE_US_BNL_SDCC, DUNE_US_FNAL_DISK_STAGE, FNAL_DCACHE, FNAL_DCACHE_STAGING, FNAL_DCACHE_TEST, MONTECARLO, NIKHEF, PRAGUE, QMUL, RAL-PP, RAL_ECHO, SURFSARA, T3_US_NERSC
Enabled output RSEs CERN_PDUNE_EOS, DUNE_CA_SFU, DUNE_CERN_EOS, DUNE_ES_PIC, DUNE_FR_CCIN2P3_DISK, DUNE_IN_TIFR, DUNE_IT_INFN_CNAF, DUNE_UK_GLASGOW, DUNE_UK_LANCASTER_CEPH, DUNE_UK_MANCHESTER_CEPH, DUNE_US_BNL_SDCC, DUNE_US_FNAL_DISK_STAGE, FNAL_DCACHE, FNAL_DCACHE_STAGING, FNAL_DCACHE_TEST, NIKHEF, PRAGUE, QMUL, RAL-PP, RAL_ECHO, SURFSARA, T3_US_NERSC
Enabled sites BR_CBPF, CA_SFU, CERN, CH_UNIBE-LHEP, CZ_FZU, ES_CIEMAT, ES_PIC, FR_CCIN2P3, IT_CNAF, NL_NIKHEF, NL_SURFsara, UK_Bristol, UK_Brunel, UK_Durham, UK_Edinburgh, UK_Lancaster, UK_Liverpool, UK_Manchester, UK_Oxford, UK_QMUL, UK_RAL-PPD, UK_RAL-Tier1, UK_Sheffield, US_Colorado, US_FNAL-FermiGrid, US_FNAL-T1, US_Michigan, US_PuertoRico, US_SU-ITS, US_Swan, US_UChicago, US_UConn-HPC, US_UCSD, US_Wisconsin
Scopehd-protodune
Events for this stage

Output patterns

 DestinationPatternLifetimeFor next stageRSE expression
1Rucio hd-protodune:gen_pdhd_-1GeV_ritm2592486-fnal-w9867s1p1*IonScintPDExt.root8640000False

Environment variables

NameValue
momentum1
nskip170
stageGen

File states

Total filesFindingUnallocatedAllocatedOutputtingProcessedNot foundFailed
10770000107700

Job states

TotalSubmittedStartedProcessingOutputtingFinishedNotusedAbortedStalledJobscript errorOutputting failedNone processed
47610000194812965914390136
Files processed00100100200200300300400400500500600600Nov-06 14:00Nov-06 19:00Nov-07 00:00Nov-07 05:00Nov-07 10:00Nov-07 15:00Nov-07 20:00Nov-08 01:00Nov-08 06:00Nov-08 11:00Nov-08 16:00Nov-08 21:00Nov-09 02:00Nov-09 07:00Nov-09 12:00Nov-09 17:00Nov-09 22:00Nov-10 03:00Nov-10 08:00Nov-10 13:00Nov-10 18:00Files processedBin start timesNumber per binES_PICUK_QMULUS_FNAL-FermiG…US_FNAL-FermiGridUS_ColoradoUS_UChicagoUS_WisconsinCA_SFUUS_PuertoRicoUS_UCSDUS_FNAL-T1
Replicas per RSE1077490.0248831944343244.339678288778881077269.97605124860326244.980963772048541379.51903622795146134.476051248603261379.8396782887789134.47511680556568Replicas per RSEDUNE_US_FNAL_DISK_STAGE (49%)FNAL_DCACHE (49%)DUNE_FR_CCIN2P3_DISK (0%)RAL-PP (0%)

RSEs used

NameInputsOutputs
DUNE_US_FNAL_DISK_STAGE15421029
DUNE_FR_CCIN2P3_DISK20
RAL-PP10
DUNE_US_BNL_SDCC020

Stats of processed input files as CSV or JSON, and of uploaded output files as CSV or JSON (up to 10000 files included)

File reset events, by site

SiteAllocatedOutputting
US_UChicago138112
US_FNAL-FermiGrid7068
CA_SFU262
US_UCSD155
US_Wisconsin1213
US_Colorado22
US_PuertoRico20
NL_NIKHEF01

Jobscript

#!/bin/sh
#
# Example jobscript that runs lar for all the files
# referred to by the MQL expression given on the justin command line.
#
# Submit with something like this:
#
# ./justin simple-workflow \
#  --max-distance 30 \
#  --mql "rucio-dataset protodune-sp:np04_raw_run_number_5769" \
#  --jobscript lar.jobscript
#
# Then monitor with dashboard or ./justin show-jobs --workflow-id ID
# where ID is the value printed by the first command
#


# the xroot lib for streaming non-root files is in testproducts, 
# so add it to the start of the path
source /cvmfs/dune.opensciencegrid.org/products/dune/setup_dune.sh
if [ -z ${JUSTIN_PROCESSORS} ]; then
  JUSTIN_PROCESSORS=1
fi

echo "Justin processors: ${JUSTIN_PROCESSORS}"

export TF_NUM_THREADS=${JUSTIN_PROCESSORS}   
export OPENBLAS_NUM_THREADS=${JUSTIN_PROCESSORS} 
export JULIA_NUM_THREADS=${JUSTIN_PROCESSORS} 
export MKL_NUM_THREADS=${JUSTIN_PROCESSORS} 
export NUMEXPR_NUM_THREADS=${JUSTIN_PROCESSORS} 
export OMP_NUM_THREADS=${JUSTIN_PROCESSORS}  

# From jobsub
export CLUSTER=${CLUSTER:-1}
export PROCESS=${PROCESS:-1}

setup dunesw ${DUNESW_VERSION:-v10_10_04d00} -q e26:prof
setup_exit=$?
if [ "$setup_exit" != 0 ]; then
  echo "Error setting up dunesw ${DUNESW_VERSION}"
  exit
fi

# Get an unprocessed file from this stage
did_pfn_rse=`$JUSTIN_PATH/justin-get-file`
if [ "${did_pfn_rse}" == "" ] ; then
  echo "Could not get file"
  exit 0
fi

# Keep a record of all input DIDs, for pdjson2meta file -> DID mapping
echo "$did_pfn_rse" | cut -f1 -d' ' >>all-input-dids.txt

did=`echo $did_pfn_rse | cut -f1 -d' '`
pfn=`echo $did_pfn_rse | cut -f2 -d' '`
rse=`echo $did_pfn_rse | cut -f3 -d' '`
name_only=$(echo $did | cut -f2 -d':')
echo "name" $name_only


echo "Found input file URL $pfn at $rse"

#Get the stage and momentum 
export stage=${stage:-"Gen"}
export momentum=${momentum:-1}

if [ "$momentum" == 1 ]; then
  export gen_fcl="pdhd_1GeV_h4input_cosmics.fcl"
elif [ "$momentum" == 5 ]; then
  export gen_fcl="pdhd_5GeV_h4input_cosmics.fcl"
else
  echo "Error. Provided invalid momentum: ${momentum}. Can only use 1 or 5"
  exit 1
fi
echo "Using gen fcl: ${gen_fcl}"

now=$(date -u +"%Y%m%dT%H%M%SZ")
jobid=`echo "${JUSTIN_JOBSUB_ID:-1}" | cut -f1 -d'@' | sed -e "s/\./_/"`
stageid=${JUSTIN_STAGE_ID:-1}
output_preamble="pdhd_prod_beam_"

nevents=${nevents:--1}
nskip=${nskip:-0}
echo "RUNNING STAGE ${stage}"
#-------------- GEN STAGE --------------------
if [ "$stage" == "Gen" ]; then
  if [[ $nevents -gt 10 || $nevents -lt 1 ]]; then
    echo "Setting nevents to 10 in gen"
    nevents=10
  fi
  #Generation
  LD_PRELOAD=$XROOTD_LIB/libXrdPosixPreload.so lar \
      -n  ${nevents} \
      --nskip ${nskip} \
      -c  ${gen_fcl} \
      -o temp_gen.root \
      $pfn

  larReturnCode=$?
  echo "gen lar returns $larReturnCode"
  if [ "$larReturnCode" != 0 ] ; then
    exit $larReturnCode
  fi
  #Generation Done

  #G4
  lar \
      -c  standard_g4_protodunehd_stage1.fcl \
      -o temp_largeant.root \
      temp_gen.root

  larReturnCode=$?
  echo "g4 lar returns $larReturnCode"
  if [ "$larReturnCode" != 0 ] ; then
    exit $larReturnCode
  fi
  #G4 Done
  rm temp_gen.root #remove previous file
  #IonScintPDExt
  lar \
      -c  standard_g4_protodunehd_IonScint_PDExt.fcl \
      -o ${output_preamble}_${jobid}_${stageid}_${now}_gen_g4_IonScintPDExt.root \
      temp_largeant.root

  larReturnCode=$?
  echo "ionscint pdext lar returns $larReturnCode"
  if [ "$larReturnCode" != 0 ] ; then
    exit $larReturnCode
  fi
  #IonScintPDExt Done
  rm temp_largeant.root #remove the previous file
#---------------------------------------------

#-------------- PD STAGE --------------------
elif [ "$stage" = "PD" ]; then 
  #TPC1
  lar \
      -n ${nevents} \
      --nskip ${nskip} \
      -c  standard_g4_protodunehd_PDInt_TPC1.fcl \
      -o temp_PD_TPC1.root \
      $pfn

  larReturnCode=$?
  echo "PD TPC1 returns $larReturnCode"
  if [ "$larReturnCode" != 0 ] ; then
    exit $larReturnCode
  fi
  #TPC1 Done

  #TPC2
  lar \
      -c  standard_g4_protodunehd_PDInt_TPC2.fcl \
      -o temp_PD_TPC2.root \
      temp_PD_TPC1.root

  larReturnCode=$?
  echo "PD TPC2 returns $larReturnCode"
  if [ "$larReturnCode" != 0 ] ; then
    exit $larReturnCode
  fi
  #TPC2 Done
  rm temp_PD_TPC1.root ##Clean up the previous file 
  #TPC5
  lar \
      -c  standard_g4_protodunehd_PDInt_TPC5.fcl \
      -o temp_PD_TPC5.root \
      temp_PD_TPC2.root

  larReturnCode=$?
  echo "PD TPC5 returns $larReturnCode"
  if [ "$larReturnCode" != 0 ] ; then
    exit $larReturnCode
  fi
  #TPC5 Done
  rm temp_PD_TPC2.root ##Clean up the previous file 
  #TPC6
  lar \
      -c  standard_g4_protodunehd_PDInt_TPC6.fcl \
      -o ${name_only}_${jobid}_${stageid}_${now}_PDInt.root \
      temp_PD_TPC5.root

  larReturnCode=$?
  echo "PD TPC6 returns $larReturnCode"
  if [ "$larReturnCode" != 0 ] ; then
    exit $larReturnCode
  fi
  #TPC6 Done
  rm temp_PD_TPC5.root ##Clean up the previous file 
#--------------------------------------------

#-------------- DetsimReco STAGE --------------------
elif [ "$stage" == "DetsimReco" ]; then
  #Detsim
  lar \
      -n ${nevents} \
      --nskip ${nskip} \
      -c  standard_detsim_protodunehd.fcl \
      -o temp_detsim.root \
      $pfn

  larReturnCode=$?
  echo "Detsim returns $larReturnCode"
  if [ "$larReturnCode" != 0 ] ; then
    exit $larReturnCode
  fi
  #Detsim Done


  #Reco
  lar \
      -c  standard_reco_protodunehd_MC.fcl \
      -o ${output_preamble}_gen_g4_IonScintPDExt_PDInt_${jobid}_${stageid}_${now}_detsim_reco1.root \
      temp_detsim.root

  #Do need to save parent info
  rm -fr all-input-dids.txt
  echo "noparents:noparents.root" > all-input-dids.txt 


  larReturnCode=$?
  echo "Reco returns $larReturnCode"
  if [ "$larReturnCode" != 0 ] ; then
    exit $larReturnCode
  fi
  #Reco Done
  rm temp_detsim.root #remove parent file
  OUTFILE=`ls *reco1.root`
  extractor_prod.py --infile ${OUTFILE}  --no_crc    --appfamily art --appname reco --appversion   10_10_04d00  --requestid 2592486  --input_json $INPUT_TAR_DIR_LOCAL/pdhd_input_${momentum}GeV.json > ${OUTFILE}.ext.json  && sed -i -e 's/physics/fardet-hd/g' ${OUTFILE}.ext.json

  python ${INPUT_TAR_DIR_LOCAL}/pdjson2metadata ${OUTFILE}.root.ext.json  all-input-dids.txt usertests > ${OUTFILE}.temp.json
  sed '/DUNE/ s/.*/\L&/' ${OUTFILE}.temp.json > ${OUTFILE}.json 
else
  echo "ERROR. Provided invalid stage ${stage}. Can only provide one of: (Gen, PD, DetsimReco)"
  exit 3
fi
 
# Record that we processed the input file ok (did we???)
echo "$pfn" > justin-processed-pfns.txt

# For debugging
for i in *.json
do
  echo "==== Start $i ===="
  cat $i
  echo "==== End $i ===="
done

ls -ltR

exit 0
justIN time: 2025-12-19 21:57:33 UTC       justIN version: 01.05.03