justIN           Dashboard       Workflows       Jobs       AWT       Sites       Storages       Docs       Login

Workflow 9490, Stage 1

Priority50
Processors1
Wall seconds80000
Image/cvmfs/singularity.opensciencegrid.org/fermilab/fnal-wn-sl7:latest
RSS bytes4194304000 (4000 MiB)
Max distance for inputs30.0
Enabled input RSEs CERN_PDUNE_EOS, DUNE_CA_SFU, DUNE_CERN_EOS, DUNE_ES_PIC, DUNE_FR_CCIN2P3_DISK, DUNE_IN_TIFR, DUNE_IT_INFN_CNAF, DUNE_UK_GLASGOW, DUNE_UK_LANCASTER_CEPH, DUNE_UK_MANCHESTER_CEPH, DUNE_US_BNL_SDCC, DUNE_US_FNAL_DISK_STAGE, FNAL_DCACHE, FNAL_DCACHE_STAGING, FNAL_DCACHE_TEST, MONTECARLO, NIKHEF, PRAGUE, QMUL, RAL-PP, RAL_ECHO, SURFSARA, T3_US_NERSC
Enabled output RSEs CERN_PDUNE_EOS, DUNE_CA_SFU, DUNE_CERN_EOS, DUNE_ES_PIC, DUNE_FR_CCIN2P3_DISK, DUNE_IN_TIFR, DUNE_IT_INFN_CNAF, DUNE_UK_GLASGOW, DUNE_UK_LANCASTER_CEPH, DUNE_UK_MANCHESTER_CEPH, DUNE_US_BNL_SDCC, DUNE_US_FNAL_DISK_STAGE, FNAL_DCACHE, FNAL_DCACHE_STAGING, FNAL_DCACHE_TEST, NIKHEF, PRAGUE, QMUL, RAL-PP, RAL_ECHO, SURFSARA, T3_US_NERSC
Enabled sites BR_CBPF, CA_SFU, CERN, CH_UNIBE-LHEP, CZ_FZU, ES_CIEMAT, ES_PIC, FR_CCIN2P3, IT_CNAF, NL_NIKHEF, NL_SURFsara, UK_Bristol, UK_Brunel, UK_Durham, UK_Edinburgh, UK_Lancaster, UK_Liverpool, UK_Manchester, UK_Oxford, UK_QMUL, UK_RAL-PPD, UK_RAL-Tier1, UK_Sheffield, US_Colorado, US_FNAL-FermiGrid, US_FNAL-T1, US_Michigan, US_PuertoRico, US_SU-ITS, US_Swan, US_UChicago, US_UConn-HPC, US_UCSD, US_Wisconsin
Scopeusertests
Events for this stage

Output patterns

 DestinationPatternLifetimeFor next stageRSE expression
1Rucio usertests:gen_pdhd_1GeV_ritm2592486_001-fnal-w9490s1p1*IonScintPDExt.root1209600False

Environment variables

NameValue
momentum1
nskip0
stageGen

File states

Total filesFindingUnallocatedAllocatedOutputtingProcessedNot foundFailed
399039900000

Job states

TotalSubmittedStartedProcessingOutputtingFinishedNotusedAbortedStalledJobscript errorOutputting failedNone processed
57600000555002100
Replicas per RSE399490.024787378373244.28369617706235399269.9769135897125245.148908124785581379.5673931901323134.4758504856863Replicas per RSEDUNE_US_FNAL_DISK_STAGE (49%)FNAL_DCACHE (49%)DUNE_UK_GLASGOW (0%)

RSEs used

NameInputsOutputs
DUNE_US_FNAL_DISK_STAGE210

Stats of processed input files as CSV or JSON, and of uploaded output files as CSV or JSON (up to 10000 files included)

Jobscript

#!/bin/sh
#
# Example jobscript that runs lar for all the files
# referred to by the MQL expression given on the justin command line.
#
# Submit with something like this:
#
# ./justin simple-workflow \
#  --max-distance 30 \
#  --mql "rucio-dataset protodune-sp:np04_raw_run_number_5769" \
#  --jobscript lar.jobscript
#
# Then monitor with dashboard or ./justin show-jobs --workflow-id ID
# where ID is the value printed by the first command
#


# the xroot lib for streaming non-root files is in testproducts, 
# so add it to the start of the path
source /cvmfs/dune.opensciencegrid.org/products/dune/setup_dune.sh
if [ -z ${JUSTIN_PROCESSORS} ]; then
  JUSTIN_PROCESSORS=1
fi

echo "Justin processors: ${JUSTIN_PROCESSORS}"

export TF_NUM_THREADS=${JUSTIN_PROCESSORS}   
export OPENBLAS_NUM_THREADS=${JUSTIN_PROCESSORS} 
export JULIA_NUM_THREADS=${JUSTIN_PROCESSORS} 
export MKL_NUM_THREADS=${JUSTIN_PROCESSORS} 
export NUMEXPR_NUM_THREADS=${JUSTIN_PROCESSORS} 
export OMP_NUM_THREADS=${JUSTIN_PROCESSORS}  

# From jobsub
export CLUSTER=${CLUSTER:-1}
export PROCESS=${PROCESS:-1}

setup dunesw ${DUNESW_VERSION:-v10_10_04d00} -q e26:prof
setup_exit=$?
if [ "$setup_exit" != 0 ]; then
  echo "Error setting up dunesw ${DUNESW_VERSION}"
  exit
fi

# Get an unprocessed file from this stage
did_pfn_rse=`$JUSTIN_PATH/justin-get-file`
if [ "${did_pfn_rse}" == "" ] ; then
  echo "Could not get file"
  exit 0
fi

# Keep a record of all input DIDs, for pdjson2meta file -> DID mapping
echo "$did_pfn_rse" | cut -f1 -d' ' >>all-input-dids.txt

did=`echo $did_pfn_rse | cut -f1 -d' '`
pfn=`echo $did_pfn_rse | cut -f2 -d' '`
rse=`echo $did_pfn_rse | cut -f3 -d' '`
name_only=$(echo $did | cut -f2 -d':')
echo "name" $name_only


echo "Found input file URL $pfn at $rse"

#Get the stage and momentum 
export stage=${stage:-"Gen"}
export momentum=${momentum:-1}

if [ "$momentum" == 1 ]; then
  export gen_fcl="pdhd_1GeV_h4input_cosmics.fcl"
elif [ "$momentum" == 5 ]; then
  export gen_fcl="pdhd_5GeV_h4input_cosmics.fcl"
else
  echo "Error. Provided invalid momentum: ${momentum}. Can only use 1 or 5"
  exit 1
fi
echo "Using gen fcl: ${gen_fcl}"

now=$(date -u +"%Y%m%dT%H%M%SZ")
jobid=`echo "${JUSTIN_JOBSUB_ID:-1}" | cut -f1 -d'@' | sed -e "s/\./_/"`
stageid=${JUSTIN_STAGE_ID:-1}
output_preamble="pdhd_prod_beam_"

nevents=${nevents:--1}
nskip=${nskip:-0}
echo "RUNNING STAGE ${stage}"
#-------------- GEN STAGE --------------------
if [ "$stage" == "Gen" ]; then
  if [[ $nevents -gt 10 || $nevents -lt 1 ]]; then
    echo "Setting nevents to 10 in gen"
    nevents=10
  fi
  #Generation
  LD_PRELOAD=$XROOTD_LIB/libXrdPosixPreload.so lar \
      -n  ${nevents} \
      --nskip ${nskip} \
      -c  ${gen_fcl} \
      -o temp_gen.root \
      $pfn

  larReturnCode=$?
  echo "gen lar returns $larReturnCode"
  if [ "$larReturnCode" != 0 ] ; then
    exit $larReturnCode
  fi
  #Generation Done

  #G4
  lar \
      -c  standard_g4_protodunehd_stage1.fcl \
      -o temp_largeant.root \
      temp_gen.root

  larReturnCode=$?
  echo "g4 lar returns $larReturnCode"
  if [ "$larReturnCode" != 0 ] ; then
    exit $larReturnCode
  fi
  #G4 Done
  rm temp_gen.root #remove previous file
  #IonScintPDExt
  lar \
      -c  standard_g4_protodunehd_IonScint_PDExt.fcl \
      -o ${output_preamble}_${jobid}_${stageid}_${now}_gen_g4_IonScintPDExt.root \
      temp_largeant.root

  larReturnCode=$?
  echo "ionscint pdext lar returns $larReturnCode"
  if [ "$larReturnCode" != 0 ] ; then
    exit $larReturnCode
  fi
  #IonScintPDExt Done
  rm temp_largeant.root #remove the previous file
#---------------------------------------------

#-------------- PD STAGE --------------------
elif [ "$stage" = "PD" ]; then 
  #TPC1
  lar \
      -n ${nevents} \
      --nskip ${nskip} \
      -c  standard_g4_protodunehd_PDInt_TPC1.fcl \
      -o temp_PD_TPC1.root \
      $pfn

  larReturnCode=$?
  echo "PD TPC1 returns $larReturnCode"
  if [ "$larReturnCode" != 0 ] ; then
    exit $larReturnCode
  fi
  #TPC1 Done

  #TPC2
  lar \
      -c  standard_g4_protodunehd_PDInt_TPC2.fcl \
      -o temp_PD_TPC2.root \
      temp_PD_TPC1.root

  larReturnCode=$?
  echo "PD TPC2 returns $larReturnCode"
  if [ "$larReturnCode" != 0 ] ; then
    exit $larReturnCode
  fi
  #TPC2 Done
  rm temp_PD_TPC1.root ##Clean up the previous file 
  #TPC5
  lar \
      -c  standard_g4_protodunehd_PDInt_TPC5.fcl \
      -o temp_PD_TPC5.root \
      temp_PD_TPC2.root

  larReturnCode=$?
  echo "PD TPC5 returns $larReturnCode"
  if [ "$larReturnCode" != 0 ] ; then
    exit $larReturnCode
  fi
  #TPC5 Done
  rm temp_PD_TPC2.root ##Clean up the previous file 
  #TPC6
  lar \
      -c  standard_g4_protodunehd_PDInt_TPC6.fcl \
      -o ${name_only}_${jobid}_${stageid}_${now}_PDInt.root \
      temp_PD_TPC5.root

  larReturnCode=$?
  echo "PD TPC6 returns $larReturnCode"
  if [ "$larReturnCode" != 0 ] ; then
    exit $larReturnCode
  fi
  #TPC6 Done
  rm temp_PD_TPC5.root ##Clean up the previous file 
#--------------------------------------------

#-------------- DetsimReco STAGE --------------------
elif [ "$stage" == "DetsimReco" ]; then
  #Detsim
  lar \
      -n ${nevents} \
      --nskip ${nskip} \
      -c  standard_detsim_protodunehd.fcl \
      -o temp_detsim.root \
      $pfn

  larReturnCode=$?
  echo "Detsim returns $larReturnCode"
  if [ "$larReturnCode" != 0 ] ; then
    exit $larReturnCode
  fi
  #Detsim Done


  #Reco
  lar \
      -c  standard_reco_protodunehd_MC.fcl \
      -o ${output_preamble}_gen_g4_IonScintPDExt_PDInt_${jobid}_${stageid}_${now}_detsim_reco1.root \
      temp_detsim.root

  larReturnCode=$?
  echo "Reco returns $larReturnCode"
  if [ "$larReturnCode" != 0 ] ; then
    exit $larReturnCode
  fi
  #Reco Done
  rm temp_detsim.root #remove parent file
else
  echo "ERROR. Provided invalid stage ${stage}. Can only provide one of: (Gen, PD, DetsimReco)"
  exit 3
fi
OUTFILE=`ls *reco1.root`
####EDIT METADATA FOR THE FOLLOWING FILE
#${name_only}_${jobid}_${stageid}_${now}_detsim_reco1.root
extractor_prod.py --infile ${OUTFILE}  --no_crc    --appfamily art --appname reco --appversion   10_10_04d00  --requestid 2592486  --input_json $INPUT_TAR_DIR_LOCAL/pdhd_input_${momentum}GeV.json > ${OUTFILE}.ext.json  && sed -i -e 's/physics/fardet-hd/g' ${OUTFILE}.ext.json

extractorExit=$?
if [ $extractorExit -eq 0 ] ; then
  # Success !
  echo "Extracted metadata"
else
  # Error -- exit immediately 
  jobscriptExit=1
  echo "Failed to extract md"
  exit $extractorExit
fi

python $INPUT_TAR_DIR_LOCAL/pdjson2metadata ${OUTFILE}.ext.json  all-input-dids.txt usertests > ${OUTFILE}.temp.json
sed '/DUNE/ s/.*/\L&/' ${OUTFILE}.temp.json > ${OUTFILE}.json
exit_code=$?
files=`ls *_${now}_*`
if [ $exit_code -ne 0 ]; then
  echo "ERROR: metadata writing " 
  echo "output files size: "
   for f in $files
    do
     size=`stat -c %s $f`
     echo   $f $size 
    done
fi
 
# Record that we processed the input file ok (did we???)
echo "$pfn" > justin-processed-pfns.txt

# For debugging
for i in *.json
do
  echo "==== Start $i ===="
  cat $i
  echo "==== End $i ===="
done

ls -ltR

exit 0
justIN time: 2025-11-03 18:43:03 UTC       justIN version: 01.05.01