Workflow 10355, Stage 1
| Workflow | 10355 |
| Priority | 50 |
| Processors | 1 |
| Wall seconds | 80000 |
| Image | /cvmfs/singularity.opensciencegrid.org/fermilab/fnal-wn-sl7:latest |
| RSS bytes | 2097152000 (2000 MiB) |
| Max distance for inputs | 30.0 |
| Enabled input RSEs |
CERN_PDUNE_EOS, DUNE_CA_SFU, DUNE_CERN_EOS, DUNE_ES_PIC, DUNE_FR_CCIN2P3_DISK, DUNE_IN_TIFR, DUNE_IT_INFN_CNAF, DUNE_UK_GLASGOW, DUNE_UK_LANCASTER_CEPH, DUNE_UK_MANCHESTER_CEPH, DUNE_US_BNL_SDCC, DUNE_US_FNAL_DISK_STAGE, FNAL_DCACHE, FNAL_DCACHE_STAGING, FNAL_DCACHE_TEST, MONTECARLO, NIKHEF, PRAGUE, QMUL, RAL-PP, RAL_ECHO, SURFSARA, T3_US_NERSC |
| Enabled output RSEs |
CERN_PDUNE_EOS, DUNE_CA_SFU, DUNE_CERN_EOS, DUNE_ES_PIC, DUNE_FR_CCIN2P3_DISK, DUNE_IN_TIFR, DUNE_IT_INFN_CNAF, DUNE_UK_GLASGOW, DUNE_UK_LANCASTER_CEPH, DUNE_UK_MANCHESTER_CEPH, DUNE_US_BNL_SDCC, DUNE_US_FNAL_DISK_STAGE, FNAL_DCACHE, FNAL_DCACHE_STAGING, FNAL_DCACHE_TEST, NIKHEF, PRAGUE, QMUL, RAL-PP, RAL_ECHO, SURFSARA, T3_US_NERSC |
| Enabled sites |
BR_CBPF, CA_SFU, CERN, CH_UNIBE-LHEP, CZ_FZU, ES_CIEMAT, ES_PIC, FR_CCIN2P3, IT_CNAF, NL_NIKHEF, NL_SURFsara, UK_Bristol, UK_Brunel, UK_Durham, UK_Edinburgh, UK_Lancaster, UK_Liverpool, UK_Manchester, UK_Oxford, UK_QMUL, UK_RAL-PPD, UK_RAL-Tier1, UK_Sheffield, US_Colorado, US_FNAL-FermiGrid, US_FNAL-T1, US_Michigan, US_PuertoRico, US_SU-ITS, US_Swan, US_UChicago, US_UConn-HPC, US_UCSD, US_Wisconsin |
| Scope | usertests |
| Events for this stage |
Output patterns
| | Destination | Pattern | Lifetime | For next stage | RSE expression |
|---|
| 1 | https://fndcadoor.fnal.gov:2880/dune/scratch/users/lzambell/fnal/10355/1 | cbbot*.h5 | | | |
File states
| Total files | Finding | Unallocated | Allocated | Outputting | Processed | Not found | Failed |
|---|
|
| 91 | 0 | 91 | 0 | 0 | 0 | 0 | 0 |
Job states
| Total | Submitted | Started | Processing | Outputting | Finished | Notused | Aborted | Stalled | Jobscript error | Outputting failed | None processed |
|---|
| 137 | 0 | 0 | 0 | 0 | 46 | 0 | 0 | 0 | 91 | 0 | 0 |
RSEs used
| Name | Inputs | Outputs |
|---|
| DUNE_US_BNL_SDCC | 91 | 0 |
Stats of processed input files as CSV or JSON, and of uploaded output files as CSV or JSON (up to 10000 files included)
Jobscript
#!/bin/bash
#
export WORK=$HOME/workspace
source /cvmfs/dune.opensciencegrid.org/products/dune/setup_dune.sh
setup python v3_9_15
setup cmake v3_27_4
#SETUP STUFF HERE
if [ -z ${JUSTIN_PROCESSORS} ]; then
JUSTIN_PROCESSORS=1
fi
echo "Justin processors: ${JUSTIN_PROCESSORS}"
export TF_NUM_THREADS=${JUSTIN_PROCESSORS}
export OPENBLAS_NUM_THREADS=${JUSTIN_PROCESSORS}
export JULIA_NUM_THREADS=${JUSTIN_PROCESSORS}
export MKL_NUM_THREADS=${JUSTIN_PROCESSORS}
export NUMEXPR_NUM_THREADS=${JUSTIN_PROCESSORS}
export OMP_NUM_THREADS=${JUSTIN_PROCESSORS}
echo "Justin specific env vars"
env | grep JUSTIN
now=$(date -u +"%Y%m%dT%H%M%SZ")
jobid=`echo "${JUSTIN_JOBSUB_ID:-1}" | cut -f1 -d'@' | sed -e "s/\./_/"`
stageid=${JUSTIN_STAGE_ID:-1}
echo "Installing uv"
pip3 install --user uv
export PATH=$HOME/.local/bin/:$PATH
unsetup python
setup gcc v12_1_0
setup hdf5 v1_12_2b -q e26:prof
export HDF5_VERSION='1.12.2'
export CPATH=${CPATH}:${HDF5_DIR}/Linux64bit+3.10-2.17-e26-prof/include
export LIBRARY_PATH=${HDF5_LIB}:${LIBRARY_PATH}
export LD_LIBRARY_PATH=${HDF5_LIB}:${LD_LIBRARY_PATH}
export PYTHONPATH=${HOME}/.local/share/uv/python/cpython-3.12.12-linux-x86_64-gnu/lib/python3.12/
export PYTHON_ROOT=${HOME}/.local/share/uv/python/cpython-3.12.12-linux-x86_64-gnu/
export PYTHON_INCLUDE=${HOME}/.local/share/uv/python/cpython-3.12.12-linux-x86_64-gnu/include/python3.12/
export PYTHON_LIB=${HOME}/.local/share/uv/python/cpython-3.12.12-linux-x86_64-gnu/lib
echo "Will use justin-get-file"
DID_PFN_RSE=`$JUSTIN_PATH/justin-get-file`
if [ "${DID_PFN_RSE}" == "" ] ; then
echo "Could not get file"
exit 0
fi
pfn=`echo ${DID_PFN_RSE} | cut -f2 -d' '`
did=`echo ${DID_PFN_RSE} | cut -f1 -d' '`
input_filename=`echo $did | cut -f2 -d':'`
echo "input file: $input_filename"
echo $DID_PFN_RSE
if [[ $input_filename =~ run([0-9]+)_([0-9]+)_df-s([0-9]+)-d([0-9]+)_dw_([0-9]+)_ ]]; then
run_nb=$((10#${BASH_REMATCH[1]}))
sub_nb=$((10#${BASH_REMATCH[2]}))
serv_nb=$((10#${BASH_REMATCH[3]}))
flow_nb=$((10#${BASH_REMATCH[4]}))
writer_nb=$((10#${BASH_REMATCH[5]}))
fi
echo "file is: run $run_nb sub $sub_nb server $serv_nb flow $flow_nb writer $writer_nb"
#Setup ifdhc. Copy the file, then unsetup ifdhc
cd $HOME
setup ifdhc
ifdh cp -D $pfn ./
unsetup ifdhc
echo "Done"
echo $PYTHONPATH
export PREFIX=$HOME/local
mkdir -p $PREFIX
cd $HOME
#mkdir -p $HOME/local/src && cd $HOME/local/src
wget https://github.com/libspatialindex/libspatialindex/releases/download/1.9.3/spatialindex-src-1.9.3.tar.gz
tar xzf spatialindex-src-1.9.3.tar.gz
cd spatialindex-src-1.9.3
#./configure --prefix=$HOME/local
mkdir build && cd build
cmake .. \
-DCMAKE_BUILD_TYPE=Release \
-DCMAKE_INSTALL_PREFIX=$PREFIX
#cmake . -B $HOME/local
make -j$(nproc)
make install
ls $PREFIX/lib/libspatialindex*
# Make sure Python can find the library
export LD_LIBRARY_PATH=$PREFIX/lib:$LD_LIBRARY_PATH
cd $HOME
echo $LD_LIBRARY_PATH
git clone https://github.com/calcuttj/lardon.git
export LARDON_PATH=$HOME/lardon/lardon
export LARDON_RECO=$HOME/workspace
export LARDON_PLOT=$HOME
cd lardon
uv sync --python 3.11
uv run lardon -det cbbot -run $run_nb -sub $sub_nb -serv $serv_nb -flow $flow_nb -writer $writer_nb -file "$HOME/$input_filename" -trk -out crp8
exitcode=$?
if [ $exitcode -ne 0 ]; then
echo "Error running. Exiting with ${exitcode}"
exit $exitcode
fi
echo "lsing"
ls -lhS
echo "disk usage"
du -sh .
#THIS TELLS JUSTIN THIS FILE WAS SUCCESSFULLY PROCESSED
echo "$pfn" > $WORK/justin-processed-pfns.txt