justIN           Dashboard       Workflows       Jobs       AWT       Sites       Storages       Docs       Login

Workflow 2461, Stage 1

Priority50
Processors1
Wall seconds18000
Image/cvmfs/singularity.opensciencegrid.org/fermilab/fnal-wn-sl7:latest
RSS bytes4194304000 (4000 MiB)
Max distance for inputs100.0
Enabled input RSEs CERN_PDUNE_EOS, DUNE_CA_SFU, DUNE_CERN_EOS, DUNE_ES_PIC, DUNE_FR_CCIN2P3_DISK, DUNE_IN_TIFR, DUNE_IT_INFN_CNAF, DUNE_UK_GLASGOW, DUNE_UK_LANCASTER_CEPH, DUNE_UK_MANCHESTER_CEPH, DUNE_US_BNL_SDCC, DUNE_US_FNAL_DISK_STAGE, FNAL_DCACHE, FNAL_DCACHE_STAGING, FNAL_DCACHE_TEST, MONTECARLO, NIKHEF, PRAGUE, QMUL, RAL-PP, RAL_ECHO, SURFSARA, T3_US_NERSC
Enabled output RSEs CERN_PDUNE_EOS, DUNE_CA_SFU, DUNE_CERN_EOS, DUNE_ES_PIC, DUNE_FR_CCIN2P3_DISK, DUNE_IN_TIFR, DUNE_IT_INFN_CNAF, DUNE_UK_GLASGOW, DUNE_UK_LANCASTER_CEPH, DUNE_UK_MANCHESTER_CEPH, DUNE_US_BNL_SDCC, DUNE_US_FNAL_DISK_STAGE, FNAL_DCACHE, FNAL_DCACHE_STAGING, FNAL_DCACHE_TEST, NIKHEF, PRAGUE, QMUL, RAL-PP, RAL_ECHO, SURFSARA, T3_US_NERSC
Enabled sites BR_CBPF, CA_SFU, CERN, CH_UNIBE-LHEP, CZ_FZU, ES_CIEMAT, ES_PIC, FR_CCIN2P3, IT_CNAF, NL_NIKHEF, NL_SURFsara, UK_Bristol, UK_Brunel, UK_Durham, UK_Edinburgh, UK_Glasgow, UK_Lancaster, UK_Liverpool, UK_Manchester, UK_Oxford, UK_QMUL, UK_RAL-PPD, UK_RAL-Tier1, UK_Sheffield, US_Colorado, US_FNAL-FermiGrid, US_FNAL-T1, US_Michigan, US_PuertoRico, US_SU-ITS, US_Swan, US_UChicago, US_UConn-HPC, US_UCSD, US_Wisconsin
Scopeusertests
Events for this stage

Output patterns

 DestinationPatternLifetimeFor next stageRSE expression
1https://fndcadoor.fnal.gov:2880/dune/scratch/users/pgranger/cafs/fnal/02461/1caf_fd_hd*.root

Environment variables

NameValue
DUNE_QUALIFIERe26:prof
DUNE_VERSIONv10_10_00d00
FCL_FILE/cvmfs/fifeuser4.opensciencegrid.org/sw/dune/823ff6a419a8685db70967089497d4f07fb2d380/reco_caf.fcl
HAS_ART_OUTPUTfalse

File states

Total filesFindingUnallocatedAllocatedOutputtingProcessedNot foundFailed
2000001802

Job states

TotalSubmittedStartedProcessingOutputtingFinishedNotusedAbortedStalledJobscript errorOutputting failedNone processed
61000055005100
Files processed001122334455667788991010Sep-10 09:00Sep-10 15:00Sep-10 21:00Sep-11 03:00Sep-11 09:00Sep-11 15:00Sep-11 21:00Sep-12 03:00Sep-12 09:00Sep-12 15:00Sep-12 21:00Sep-13 03:00Sep-13 09:00Sep-13 15:00Sep-13 21:00Sep-14 03:00Sep-14 09:00Sep-14 15:00Sep-14 21:00Sep-15 03:00Sep-15 09:00Files processedBin start timesNumber per binUS_UChicagoES_PICNL_SURFsara
Replicas per RSE20476.6881560544929192.2928006360767820343.9852789078071332.94963296875625277.53837512794456209.716148810821074303.40755554235795161.738755532422433338.2928006360767137.311843945507182367.7754161426131129.41200092977343Replicas per RSEFNAL_DCACHE (37%)PRAGUE (37%)SURFSARA (9%)QMUL (7%)NIKHEF (5%)DUNE_ES_PIC (3%)

RSEs used

NameInputsOutputs
PRAGUE230
SURFSARA130
NIKHEF30
DUNE_ES_PIC10
None03

Stats of processed input files as CSV or JSON, and of uploaded output files as CSV or JSON (up to 10000 files included)

File reset events, by site

SiteAllocatedOutputting
US_UChicago100
ES_PIC50
NL_SURFsara30
UK_Manchester20

Jobscript

#!/bin/bash
#
source /cvmfs/dune.opensciencegrid.org/products/dune/setup_dune.sh
setup metacat
export METACAT_SERVER_URL=https://metacat.fnal.gov:9443/dune_meta_prod/app
export METACAT_AUTH_SERVER_URL=https://metacat.fnal.gov:8143/auth/dune


##TODO -- write usage

#CAF_FCL="cafmaker_dunevd10kt_1x8x6_3view_30deg_runreco-nuenergy_geov3.fcl"
# RECO_FCL="reco2_atmos_dune10kt_1x2x6_geov5.fcl"
# CAF_FCL="cafmaker_atmos_dune10kt_1x2x6_runreco-nuenergy-nuangular_geov5.fcl"
# echo "Using CAF fcl: ${CAF_FCL}"

# export INPUT_TAR_DIR_LOCAL=${CODE_TAR_DIR_LOCAL}

# if [ ! -z "$FCL_TAR_DIR_LOCAL" ]; then
# 	echo "Using custom fcls from $FCL_TAR_DIR_LOCAL"
# 	source ${CODE_TAR_DIR_LOCAL}/*/localProducts*/setup-grid
# 	mrbslp
# fi

ls -lht $FCL_TAR_DIR_LOCAL

#Setup recent lar software suite
setup dunesw \
   "${DUNE_VERSION:-v09_91_01d00}" \
   -q "${DUNE_QUALIFIER:-e26:prof}"
#echo "printing env"
#env
 

if [ -z ${JUSTIN_PROCESSORS} ]; then
  JUSTIN_PROCESSORS=1
fi

echo "Justin processors: ${JUSTIN_PROCESSORS}"

export TF_NUM_THREADS=${JUSTIN_PROCESSORS}   
export OPENBLAS_NUM_THREADS=${JUSTIN_PROCESSORS} 
export JULIA_NUM_THREADS=${JUSTIN_PROCESSORS} 
export MKL_NUM_THREADS=${JUSTIN_PROCESSORS} 
export NUMEXPR_NUM_THREADS=${JUSTIN_PROCESSORS} 
export OMP_NUM_THREADS=${JUSTIN_PROCESSORS}  

#
echo "Will use justin-get-file"

now=$(date -u +"%Y%m%dT%H%M%SZ")


for nf in {1..10}
  do
  DID_PFN_RSE=`$JUSTIN_PATH/justin-get-file` 
 ##Check that any file was returned
 if [ "${DID_PFN_RSE}" == "" ] ; then
   echo "Could not get file"
#   exit 0
   continue
 fi

FILE=`echo ${DID_PFN_RSE} | cut -f2 -d' '`
DID=`echo ${DID_PFN_RSE} | cut -f1 -d' '`
 echo ${DID} >> did.list
 echo ${FILE} >> file.list 
done

#Exit if file.list does not exist
if [ ! -f file.list ] ; then
  echo "Nothing to process - exit jobscript"
  exit 0
fi


now=$(date -u +"%Y%m%dT%H%M%SZ")
 
####Run cafmaker
 
echo "now run lar on these files"

cat file.list
cat did.list
 
# echo $FILE 
# lar -c  "${CAF_FCL}" "$FILE" 
lar -c  "${FCL_FILE}" -S file.list > caf_$now.log

larExit=$?
echo "lar exit code $larExit"

echo '=== Start last 100 lines of lar log file ==='
tail -100 ${fname}_${now}.log
echo '=== End last 100 lines of lar log file ==='

if [ $larExit -eq 0 ] ; then
  # Success !
  echo "$pfn" > justin-processed-pfns.txt
  jobscriptExit=0
else
  # Oh :(
  jobscriptExit=1
fi

# echo "$FILE" > justin-processed-pfns.txt
  cat file.list > justin-processed-pfns.txt

  echo "processed files"
cat justin-processed-pfns.txt 

  cp justin-processed-pfns.txt caf_$now.pfns 
  cat did.list >> caf_$now.did
  cat file.list >> caf_$now.file
  cp caf.root caf_fd_hd_atmo_${JUSTIN_WORKFLOW_ID}_$now.root
#  cp caf.root.json caf_$now.root.json

ls -lRS

# Create compressed tar file with all log files 
tar zcf `echo "$JUSTIN_JOBSUB_ID.logs.tgz" | sed 's/@/_/g'` *.log
exit $jobscriptExit
 

justIN time: 2025-09-18 20:47:12 UTC       justIN version: 01.05.00