Stats of processed input files as CSV or JSON, and of uploaded output files as CSV or JSON (up to 10000 files included)
Jobscript
#!/bin/bash
#:<<'EOF'
#
#To use this jobscript to process 5 files from the dataset fardet-hd__fd_mc_2023a_reco2__full-reconstructed__v09_81_00d02__standard_reco2_dune10kt_nu_1x2x6__prodgenie_nu_dune10kt_1x2x6__out1__validation
#data and put the output in the $USER namespace (MetaCat) and saves the output in /scratch
#Use this command to create the workflow:
#
#justin simple-workflow \
#--mql \
#"files from fardet-hd:fardet-hd__fd_mc_2023a__hit-reconstructed__v09_78_01d01__standard_reco1_dune10kt_1x2x6__prodgenie_nu_dune10kt_1x2x6__out1__v1_official limit 5 ordered"\
#--jobscript submit_ana.jobscript --rss-mb 4000 \
#--scope higuera --output-pattern '*_myreco2_*.root:$FNALURL/$USERF'
#
#The following optional environment variables can be set when creating the
#workflow/stage: FCL_FILE, NUM_EVENTS, DUNE_VERSION, DUNE_QUALIFIER
#
#EOF
# fcl file and DUNE software version/qualifier to be used
FCL_FILE=${FCL_FILE:-$INPUT_TAR_DIR_LOCAL/Lar_reco/work/detsim_dune10kt_1x2x6_notpcsigproc_tpcrawdecoder.fcl}
DUNE_VERSION=${DUNE_VERSION:-v09_92_00d00}
DUNE_QUALIFIER=${DUNE_QUALIFIER:-e26:prof}
# number of events to process from the input file
if [ "$NUM_EVENTS" != "" ] ; then
events_option="-n $NUM_EVENTS"
fi
# First get an unprocessed file from this stage
did_pfn_rse=`$JUSTIN_PATH/justin-get-file`
if [ "$did_pfn_rse" = "" ] ; then
echo "Nothing to process - exit jobscript"
exit 0
fi
# Keep a record of all input DIDs, for pdjson2meta file -> DID mapping
echo "$did_pfn_rse" | cut -f1 -d' ' >>all-input-dids.txt
# pfn is also needed when creating justin-processed-pfns.txt
pfn=`echo $did_pfn_rse | cut -f2 -d' '`
echo "Input PFN = $pfn"
# Setup DUNE environment
source /cvmfs/dune.opensciencegrid.org/products/dune/setup_dune.sh
#export PRODUCTS="${INPUT_TAR_DIR_LOCAL}/Lar_reco/localProducts_larsoft_v09_91_02_e26_prof/:$PRODUCTS"
# Then we can set up our local products
#setup duneana "$DUNE_VERSION" -q "$DUNE_QUALIFIER"
source ${INPUT_TAR_DIR_LOCAL}/Lar_reco/localProducts*/setup
setup dunesw "$DUNE_VERSION" -q "$DUNE_QUALIFIER"
mrbslp
cd $MRB_SOURCE
mrbslp
# Construct outFile from input $pfn
now=$(date -u +"%Y-%m-%dT_%H%M%SZ")
Ffname=`echo $pfn | awk -F/ '{print $NF}'`
fname=`echo $Ffname | awk -F. '{print $1}'`
outFile=${fname}_detsim_${now}.root
OUTDIR=/pnfs/dune/scratch/users/muditk/SN_test/
campaign="justIN.w${JUSTIN_WORKFLOW_ID}s${JUSTIN_STAGE_ID}"
# Here is where the LArSoft command is call it
(
# Do the scary preload stuff in a subshell!
export LD_PRELOAD=${XROOTD_LIB}/libXrdPosixPreload.so
echo "$LD_PRELOAD"
lar -c $FCL_FILE $events_option -o $outFile "$pfn" > ${fname}_myreco2_${now}.log 2>&1
)
echo '=== Start last 100 lines of lar log file ==='
tail -100 ${fname}_myreco2_${now}.log
cat ${fname}_myreco2_${now}.log
echo '=== End last 100 lines of lar log file ==='
# Subshell exits with exit code of last command
larExit=$?
echo "lar exit code $larExit"
if [ $larExit -eq 0 ] ; then
# Success !
echo "$pfn" > justin-processed-pfns.txt
jobscriptExit=0
else
# Oh :(
jobscriptExit=1
fi
if [ -f $outFile ]; then
#mv tutorial_hist.root $OUTFILE
#and copy our output file back
ifdh cp -D $outFile $OUTDIR
#check the exit status to see if the copyback actually worked. Print a message if it did not.
IFDH_RESULT=$?
if [ $IFDH_RESULT -ne 0 ]; then
echo "Error during output copyback. See output logs."
exit $IFDH_RESULT
fi
fi
# Create compressed tar file with all log files
tar zcf `echo "$JUSTIN_JOBSUB_ID.logs.tgz" | sed 's/@/_/g'` *.log
exit $jobscriptExit
justIN time: 2026-02-05 01:19:20 UTC justIN version: 01.06.00