Workflow 11255, Stage 1
| Priority | 50 |
| Processors | 1 |
| Wall seconds | 80000 |
| Image | /cvmfs/singularity.opensciencegrid.org/fermilab/fnal-wn-sl7:latest |
| RSS bytes | 5242880000 (5000 MiB) |
| Max distance for inputs | 30.0 |
| Enabled input RSEs |
CERN_PDUNE_EOS, DUNE_CA_SFU, DUNE_CERN_EOS, DUNE_ES_PIC, DUNE_FR_CCIN2P3_DISK, DUNE_IN_TIFR, DUNE_IT_INFN_CNAF, DUNE_UK_GLASGOW, DUNE_UK_LANCASTER_CEPH, DUNE_UK_MANCHESTER_CEPH, DUNE_US_BNL_SDCC, DUNE_US_FNAL_DISK_STAGE, FNAL_DCACHE, FNAL_DCACHE_STAGING, FNAL_DCACHE_TEST, MONTECARLO, NIKHEF, PRAGUE, QMUL, RAL-PP, RAL_ECHO, SURFSARA, T3_US_NERSC |
| Enabled output RSEs |
CERN_PDUNE_EOS, DUNE_CA_SFU, DUNE_CERN_EOS, DUNE_ES_PIC, DUNE_FR_CCIN2P3_DISK, DUNE_IN_TIFR, DUNE_IT_INFN_CNAF, DUNE_UK_GLASGOW, DUNE_UK_LANCASTER_CEPH, DUNE_UK_MANCHESTER_CEPH, DUNE_US_BNL_SDCC, DUNE_US_FNAL_DISK_STAGE, FNAL_DCACHE, FNAL_DCACHE_STAGING, FNAL_DCACHE_TEST, NIKHEF, PRAGUE, QMUL, RAL-PP, RAL_ECHO, SURFSARA, T3_US_NERSC |
| Enabled sites |
BR_CBPF, CA_SFU, CA_Victoria, CERN, CH_UNIBE-LHEP, CZ_FZU, ES_CIEMAT, ES_PIC, FR_CCIN2P3, IT_CNAF, NL_NIKHEF, NL_SURFsara, UK_Bristol, UK_Brunel, UK_Durham, UK_Edinburgh, UK_Glasgow, UK_Imperial, UK_Lancaster, UK_Liverpool, UK_Manchester, UK_Oxford, UK_QMUL, UK_RAL-PPD, UK_RAL-Tier1, UK_Sheffield, US_BNL, US_Colorado, US_FNAL-FermiGrid, US_FNAL-T1, US_Michigan, US_NotreDame, US_PuertoRico, US_SU-ITS, US_Swan, US_UChicago, US_UConn-HPC, US_UCSD, US_Wisconsin |
| Scope | usertests |
| Events for this stage |
Output patterns
| | Destination | Pattern | Lifetime | For next stage | RSE expression |
|---|
| 1 | Rucio usertests:avizcaya_ntuples_mc5_wirefilter-fnal-w11255s1p1 | *.root | 5184000 | False | |
Environment variables
| Name | Value |
|---|
| DUNE_VERSION | v10_10_04d00 |
| FCL_FILE | pdhd_ana_MC_nosce_noreco.fcl |
| NTUPLE_DIR | /cvmfs/fifeuser1.opensciencegrid.org/sw/dune/afdb969380b44ad1ec876b1b4be2b7e43ae124db |
| PROTODUNEANA_TAR | /cvmfs/fifeuser3.opensciencegrid.org/sw/dune/c81d594ab1e792f6918da802b695da60766e8755 |
File states
| Total files | Finding | Unallocated | Allocated | Outputting | Processed | Not found | Failed |
|---|
|
| 164 | 0 | 0 | 0 | 0 | 163 | 0 | 1 |
Job states
| Total | Submitted | Started | Processing | Outputting | Finished | Notused | Aborted | Stalled | Jobscript error | Outputting failed | None processed |
|---|
| 258 | 0 | 0 | 0 | 0 | 251 | 0 | 0 | 0 | 5 | 2 | 0 |
RSEs used
| Name | Inputs | Outputs |
|---|
| DUNE_US_FNAL_DISK_STAGE | 142 | 83 |
| DUNE_US_BNL_SDCC | 27 | 75 |
| DUNE_CA_SFU | 0 | 3 |
Stats of processed input files as CSV or JSON, and of uploaded output files as CSV or JSON (up to 10000 files included)
Jobscript
#!/bin/bash
#
#These must be defined
if [ -z $FCL_FILE ]; then
echo "Fatal Must provide FCL_FILE env var"
exit 1
fi
if [ -z $NTUPLE_DIR ]; then
echo "Fatal Must provide NTUPLE_DIR env var"
exit 1
fi
stat ${NTUPLE_DIR}
if [ $? -ne 0 ]; then
echo "Failed to $NTUPLE_DIR. Exiting safely"
exit 0
fi
source /cvmfs/dune.opensciencegrid.org/products/dune/setup_dune.sh
setup metacat
export METACAT_SERVER_URL=https://metacat.fnal.gov:9443/dune_meta_prod/app
export METACAT_AUTH_SERVER_URL=https://metacat.fnal.gov:8143/auth/dune
if [ -n "$PROTODUNEANA_TAR" ]; then
stat ${PROTODUNEANA_TAR}
if [ $? -ne 0 ]; then
echo "Failed to stat $PROTODUNEANA_TAR. Exiting safely"
exit 0
fi
export PRODUCTS=$PROTODUNEANA_TAR:$PRODUCTS
echo "Set protoduneana to $PROTODUNEANA_TAR"
fi
if [ -n "$GEANT4REWEIGHT_TAR" ]; then
stat ${GEANT4REWEIGHT_TAR}
if [ $? -ne 0 ]; then
echo "Failed to stat $GEANT4REWEIGHT_TAR. Exiting safely"
exit 0
fi
export PRODUCTS=$GEANT4REWEIGHT_TAR:$PRODUCTS
echo "Set protoduneana to $GEANT4REWEIGHT_TAR"
fi
#Setup recent lar software suite
DUNE_VERSION=${DUNE_VERSION:-v09_81_00d01}
setup dunesw \
"${DUNE_VERSION}" \
-q "${DUNE_QUALIFIER:-e26:prof}"
setup_exit=$?
if [ $? -ne 0 ]; then
echo "Failed to setup dunesw $DUNE_VERSION $DUNE_QUALIFIER"
exit $setup_exit
fi
echo "DUNESW loc:"
ups active | grep dunesw
if [ -z ${JUSTIN_PROCESSORS} ]; then
JUSTIN_PROCESSORS=1
fi
echo "Justin processors: ${JUSTIN_PROCESSORS}"
export TF_NUM_THREADS=${JUSTIN_PROCESSORS}
export OPENBLAS_NUM_THREADS=${JUSTIN_PROCESSORS}
export JULIA_NUM_THREADS=${JUSTIN_PROCESSORS}
export MKL_NUM_THREADS=${JUSTIN_PROCESSORS}
export NUMEXPR_NUM_THREADS=${JUSTIN_PROCESSORS}
export OMP_NUM_THREADS=${JUSTIN_PROCESSORS}
echo "printing env"
env
echo "Justin specific env vars"
env | grep JUSTIN
echo "Will use justin-get-file"
#
nfiles=${NFILES:-1}
python $NTUPLE_DIR/ntuple_prod_utils.py get_nfiles_justin \
-n $nfiles \
--dids input_dids.list \
--pfns input_pfns.list
#pfn_exit=$?
#if [ $pfn_exit -ne 0 ]; then
# echo "Error in get_nfiles_justin. Exiting"
# exit $pfn_exit
#fi
n_files_retrieved=`wc -l input_pfns.list | cut -f1 -d' '`
echo "Files retrieved: ${n_files_retrieved}"
if [ $n_files_retrieved -eq 0 ]; then
echo "No files retrieved. Exiting safely"
exit 0
fi
pfn_list=`cat input_pfns.list`
echo "PFN list:"
echo $pfn_list
fname="${pfn_list##*/}"
lname="${fname//.root/}"
echo "Just the file name:"
echo "$lname"
now=$(date -u +"%Y%m%dT%H%M%SZ")
##TODO -- edit this
#jobid=`echo "${JUSTIN_JOBSUB_ID:-1}" | awk -F '.' '{print $1}'`
#jobid=`echo "${JUSTIN_JOBSUB_ID:-1}" | cut -f1 -d'@' | sed -e "s/\./_/"`
PREFIX=${PREFIX:-"ntuplesMC5"}
OUTFILE="${PREFIX}_${lname}_${JUSTIN_STAGE_ID}_${JUSTIN_WORKFLOW_ID}_${now}.root"
nevents=${NEVENTS:--1}
echo "Running ntuplesMC5"
touch ana.log
starttime=`date +"%s"`.0
lar -c ${FCL_FILE} \
-n ${nevents} \
-T ${OUTFILE} \
-s ${pfn_list} >ana.log 2>&1
larExit=$?
endtime=`date +"%s"`.0
if [ $larExit -ne 0 ]; then
echo "Error in ntuple production"
cat ana.log
exit $larExit
fi
echo "Ran successfully"
echo "Forming metadata"
### Get the metadata
#python $NTUPLE_DIR/ntuple_prod_utils.py \
# metadata \
# --root_file ${OUTFILE} \
# --dids input_dids.list \
# --version ${DUNE_VERSION} \
# --fcl_name ${FCL_FILE} \
# --tree_name ${TREE_NAME:-"pduneana/beamana"} \
# -o $OUTFILE.json
#--log_file md.log \
#mdExit=$?
#if [ $mdExit -ne 0 ]; then
# echo "Error in ntuple metdata production"
# cat md.log
# exit $mdExit
#fi
#echo "formed"
#cat ${OUTFILE}.json
echo "$pfn_list" > justin-processed-pfns.txt
exit 0