justIN           Dashboard       Workflows       Jobs       AWT       Sites       Storages       Docs       Login

Workflow 9627, Stage 1

Priority50
Processors1
Wall seconds80000
Image/cvmfs/singularity.opensciencegrid.org/fermilab/fnal-wn-sl7:latest
RSS bytes4194304000 (4000 MiB)
Max distance for inputs30.0
Enabled input RSEs CERN_PDUNE_EOS, DUNE_CA_SFU, DUNE_CERN_EOS, DUNE_ES_PIC, DUNE_FR_CCIN2P3_DISK, DUNE_IN_TIFR, DUNE_IT_INFN_CNAF, DUNE_UK_GLASGOW, DUNE_UK_LANCASTER_CEPH, DUNE_UK_MANCHESTER_CEPH, DUNE_US_BNL_SDCC, DUNE_US_FNAL_DISK_STAGE, FNAL_DCACHE, FNAL_DCACHE_STAGING, FNAL_DCACHE_TEST, MONTECARLO, NIKHEF, PRAGUE, QMUL, RAL-PP, RAL_ECHO, SURFSARA, T3_US_NERSC
Enabled output RSEs CERN_PDUNE_EOS, DUNE_CA_SFU, DUNE_CERN_EOS, DUNE_ES_PIC, DUNE_FR_CCIN2P3_DISK, DUNE_IN_TIFR, DUNE_IT_INFN_CNAF, DUNE_UK_GLASGOW, DUNE_UK_LANCASTER_CEPH, DUNE_UK_MANCHESTER_CEPH, DUNE_US_BNL_SDCC, DUNE_US_FNAL_DISK_STAGE, FNAL_DCACHE, FNAL_DCACHE_STAGING, FNAL_DCACHE_TEST, NIKHEF, PRAGUE, QMUL, RAL-PP, RAL_ECHO, SURFSARA, T3_US_NERSC
Enabled sites BR_CBPF, CA_SFU, CERN, CH_UNIBE-LHEP, CZ_FZU, ES_CIEMAT, ES_PIC, FR_CCIN2P3, IT_CNAF, NL_NIKHEF, NL_SURFsara, UK_Bristol, UK_Brunel, UK_Durham, UK_Edinburgh, UK_Lancaster, UK_Liverpool, UK_Manchester, UK_Oxford, UK_QMUL, UK_RAL-PPD, UK_RAL-Tier1, UK_Sheffield, US_Colorado, US_FNAL-FermiGrid, US_FNAL-T1, US_Michigan, US_PuertoRico, US_SU-ITS, US_Swan, US_UChicago, US_UConn-HPC, US_UCSD, US_Wisconsin
Scopefardet-hd
Events for this stage

Output patterns

 DestinationPatternLifetimeFor next stageRSE expression
1Rucio fardet-hd:fardet-hd-reco-ritm2612698_9627-fnal-w9627s1p1*reco.root51840000False

Environment variables

NameValue
INPUT_DIR/cvmfs/fifeuser3.opensciencegrid.org/sw/dune/bfc01f12eb33557b58642944529e6c76be004353

File states

Total filesFindingUnallocatedAllocatedOutputtingProcessedNot foundFailed
500000050000

Job states

TotalSubmittedStartedProcessingOutputtingFinishedNotusedAbortedStalledJobscript errorOutputting failedNone processed
6910100680009100
Files processed002020404060608080100100120120140140160160180180200200220220240240260260Nov-05 13:00Nov-05 14:00Nov-05 15:00Nov-05 16:00Nov-05 17:00Nov-05 18:00Files processedBin start timesNumber per binNL_SURFsaraUK_QMULUK_RAL-Tier1US_UChicagoES_PICUS_FNAL-FermiG…US_FNAL-FermiGridUK_LancasterUK_RAL-PPD
Replicas per RSE500485.0055598431783226.1309211453495500277.340090847254257.4314543455999548363.68305820128666129.971891248867731379.05484210195084128.704241887207271379.6849436068057128.70047132361367Replicas per RSEDUNE_US_FNAL_DISK_STAGE (47…DUNE_US_FNAL_DISK_STAGE (47%)FNAL_DCACHE (47%)PRAGUE (4%)QMUL (0%)DUNE_US_BNL_SDCC (0%)

RSEs used

NameInputsOutputs
DUNE_US_FNAL_DISK_STAGE4920
PRAGUE480
DUNE_US_BNL_SDCC10
QMUL10
DUNE_FR_CCIN2P3_DISK0500

Stats of processed input files as CSV or JSON, and of uploaded output files as CSV or JSON (up to 10000 files included)

File reset events, by site

SiteAllocatedOutputting
US_Wisconsin150
CA_SFU120
US_UChicago100

Jobscript

#!/bin/bash
#
source /cvmfs/dune.opensciencegrid.org/products/dune/setup_dune.sh
setup metacat
export METACAT_SERVER_URL=https://metacat.fnal.gov:9443/dune_meta_prod/app
export METACAT_AUTH_SERVER_URL=https://metacat.fnal.gov:8143/auth/dune


#Setup recent lar software suite

#setup dunesw v10_11_00d01 -q e26:prof
setup dunesw v10_12_01d01 -q e26:prof
#echo "printing env"
#env


if [ -z ${JUSTIN_PROCESSORS} ]; then
  JUSTIN_PROCESSORS=1
fi

echo "Justin processors: ${JUSTIN_PROCESSORS}"

export TF_NUM_THREADS=${JUSTIN_PROCESSORS}   
export OPENBLAS_NUM_THREADS=${JUSTIN_PROCESSORS} 
export JULIA_NUM_THREADS=${JUSTIN_PROCESSORS} 
export MKL_NUM_THREADS=${JUSTIN_PROCESSORS} 
export NUMEXPR_NUM_THREADS=${JUSTIN_PROCESSORS} 
export OMP_NUM_THREADS=${JUSTIN_PROCESSORS}  

#
echo "Will use justin-get-file" 
 

for nf in {1..5}
  do
  DID_PFN_RSE=`$JUSTIN_PATH/justin-get-file` 
 ##Check that any file was returned
 if [ "${DID_PFN_RSE}" == "" ] ; then
   echo "Could not get file"
#   exit 0
   continue
 fi

FILE=`echo ${DID_PFN_RSE} | cut -f2 -d' '`
DID=`echo ${DID_PFN_RSE} | cut -f1 -d' '`
 echo ${DID} >> did.list
 echo ${FILE} >> file.list 
done
 
while read inFile; do

temp=`echo "$inFile" | awk -F'/' '{print $NF}'`
DID="fardet-hd:"$temp
FILE=$inFile 
 
 
metacat file show -mj ${DID} > old_md.json
mcExit=$?
if [ $mcExit -eq 0 ] ; then
  echo "old metadata:"
  cat old_md.json
else
  echo "Could not retrieve old metadata"
  exit 1 
fi

now=$(date -u +"%Y%m%dT%H%M%SZ")  
namespace=${JUSTIN_SCOPE:-"usertests"}

echo "===============JUSTIN_JOBSUB_ID"
runid=$JUSTIN_WORKFLOW_ID
CLUSTER=`echo $JUSTIN_JOBSUB_ID | awk '{split($0,a,"."); print a[1]}'`
echo $CLUSTER
 
# reco2
echo "============TPG========================" 


infile=`echo $DID | awk -F ':' '{print $2}'`
reco="${infile%.root}" 
reco_name=${reco}_${now}_reco

echo "check  output filename "  ${reco_name}.root

lar -c  tpg_dune10kt_1x2x2.fcl $FILE -o ${reco_name}.root -n -1
larExit=$? 


#echo "Reco step lar exit code $larExit"


if [ $larExit -eq 0 ] ; then 
  echo "Moving on to metadata extractor "
else
  exit $larExit 
fi
 
#OUTFILE=`ls *reco.root`
OUTFILE=${reco_name}.root

#mv triggerAnaTree_hist.root triggerAnaTree_${reco_name}_hist.root


echo "============OUTPUT FILE: " $OUTFILE  
 
extractor_prod.py --infile ${OUTFILE}  --no_crc    --appfamily art --appname reco --appversion  v10_12_01d01  --requestid ritm2612698  --input_json ${INPUT_DIR}/trg_input.json> ${OUTFILE}.ext.json  && sed -i -e 's/physics/fardet-hd/g' ${OUTFILE}.ext.json

extractorExit=$?
if [ $extractorExit -eq 0 ] ; then
  # Success !
  echo "Extracted metadata"
else
  # Error -- exit immediately 
  jobscriptExit=1
  echo "Failed to extract md"
  exit $extractorExit
fi

sed -i 's/Reco2/reco2/g'  ${OUTFILE}.ext.json

echo "checking file======" $DID
echo $DID > all-input-dids.txt
python ${INPUT_DIR}/pdjson2metadata ${OUTFILE}.ext.json  all-input-dids.txt usertests > ${OUTFILE}.temp.json 


sed '/DUNE/ s/.*/\L&/'  ${OUTFILE}.temp.json >  ${OUTFILE}.json

idrun=`jq -r '.. | objects | select(has("core.runs")) | .["core.runs"][0]' old_md.json`
idsubrun=`jq -r '.. | objects | select(has("core.runs_subruns")) | .["core.runs_subruns"][0]' old_md.json`

sed -i "s/-99999/$idrun/g" ${OUTFILE}.json
sed -i "s/-88888/$idsubrun/g" ${OUTFILE}.json

# cp ${OUTFILE}.json triggerAnaTree_${reco_name}_hist.root.json
		  
converterExit=$?
if [ $converterExit -eq 0 ] ; then
  # Success !
  echo "MD conversion to MetaCat OK"
#  echo "$FILE" > justin-processed-pfns.txt
   echo "$FILE" >> temp_pfns.txt
else 
  jobscriptExit=1
  echo "Failed to convert md to MetacaCat"
  exit $converterExit
fi

 
ls

done < file.list
cp temp_pfns.txt justin-processed-pfns.txt
exit 0
justIN time: 2025-11-05 18:35:42 UTC       justIN version: 01.05.01
<