From 8d9bb41192d682eca72e79cc62b141c95d0e94d1 Mon Sep 17 00:00:00 2001 From: Jacob Calcutt Date: Thu, 6 Oct 2022 15:50:01 -0500 Subject: [PATCH 001/166] Init commit --- get_rse.py | 18 + rses.txt | 1080 +++++++++++++++++++++++++++++++++++++++++++++++++ run_lar.py | 294 ++++++++++++++ top_script.sh | 252 ++++++++++++ 4 files changed, 1644 insertions(+) create mode 100644 get_rse.py create mode 100644 rses.txt create mode 100644 run_lar.py create mode 100755 top_script.sh diff --git a/get_rse.py b/get_rse.py new file mode 100644 index 0000000..79270e9 --- /dev/null +++ b/get_rse.py @@ -0,0 +1,18 @@ +import subprocess +import sys +import os + +if 'GLIDEIN_DUNESite' not in os.environ.keys(): + glidein_site = 'US_FNAL' +else: + glidein_site = os.environ['GLIDEIN_DUNESite'] +def is_good_rse(rse): + return (('DUNE_CERN_EOS' not in rse) and + ('FNAL_DCACHE' not in rse) and + ('CERN_PDUNE_CASTOR' not in rse) and + ('CERN_PDUNE_EOS' not in rse)) +with open(sys.argv[1], 'r') as f: + rses = {i.split(',')[1]:int(i.split(',')[2].strip('\n')) for i in f.readlines() if (glidein_site in i and is_good_rse(i.split(',')[1]))} +#print(rses) + +print(min(rses, key=rses.get)) diff --git a/rses.txt b/rses.txt new file mode 100644 index 0000000..789af3b --- /dev/null +++ b/rses.txt @@ -0,0 +1,1080 @@ +BR_CBPF,CERN_PDUNE_EOS,90 +BR_CBPF,DUNE_CERN_EOS,90 +BR_CBPF,DUNE_ES_PIC,100 +BR_CBPF,DUNE_FR_CCIN2P3,100 +BR_CBPF,DUNE_FR_CCIN2P3_DISK,100 +BR_CBPF,DUNE_FR_CCIN2P3_XROOTD,100 +BR_CBPF,DUNE_US_BNL_SDCC,100 +BR_CBPF,EDINBURGH,100 +BR_CBPF,FNAL_DCACHE,90 +BR_CBPF,FNAL_DCACHE_PERSISTENT,90 +BR_CBPF,FNAL_DCACHE_STAGING,90 +BR_CBPF,FNAL_DCACHE_TEST,90 +BR_CBPF,IMPERIAL,100 +BR_CBPF,LANCASTER,100 +BR_CBPF,LIVERPOOL,100 +BR_CBPF,MANCHESTER,100 +BR_CBPF,MONTECARLO,100 +BR_CBPF,NIKHEF,100 +BR_CBPF,PRAGUE,100 +BR_CBPF,QMUL,100 +BR_CBPF,RAL-PP,100 +BR_CBPF,RAL_ECHO,100 +BR_CBPF,SCRATCH_DCACHE,90 +BR_CBPF,T3_US_NERSC,100 +BR_UNICAMP,CERN_PDUNE_EOS,90 +BR_UNICAMP,DUNE_CERN_EOS,90 +BR_UNICAMP,DUNE_ES_PIC,100 +BR_UNICAMP,DUNE_FR_CCIN2P3,100 +BR_UNICAMP,DUNE_FR_CCIN2P3_DISK,100 +BR_UNICAMP,DUNE_FR_CCIN2P3_XROOTD,100 +BR_UNICAMP,DUNE_US_BNL_SDCC,100 +BR_UNICAMP,EDINBURGH,100 +BR_UNICAMP,FNAL_DCACHE,90 +BR_UNICAMP,FNAL_DCACHE_PERSISTENT,90 +BR_UNICAMP,FNAL_DCACHE_STAGING,90 +BR_UNICAMP,FNAL_DCACHE_TEST,90 +BR_UNICAMP,IMPERIAL,100 +BR_UNICAMP,LANCASTER,100 +BR_UNICAMP,LIVERPOOL,100 +BR_UNICAMP,MANCHESTER,100 +BR_UNICAMP,MONTECARLO,100 +BR_UNICAMP,NIKHEF,100 +BR_UNICAMP,PRAGUE,100 +BR_UNICAMP,QMUL,100 +BR_UNICAMP,RAL-PP,100 +BR_UNICAMP,RAL_ECHO,100 +BR_UNICAMP,SCRATCH_DCACHE,90 +BR_UNICAMP,T3_US_NERSC,100 +CA_Victoria,CERN_PDUNE_EOS,90 +CA_Victoria,DUNE_CERN_EOS,90 +CA_Victoria,DUNE_ES_PIC,100 +CA_Victoria,DUNE_FR_CCIN2P3,100 +CA_Victoria,DUNE_FR_CCIN2P3_DISK,100 +CA_Victoria,DUNE_FR_CCIN2P3_XROOTD,100 +CA_Victoria,DUNE_US_BNL_SDCC,20 +CA_Victoria,EDINBURGH,100 +CA_Victoria,FNAL_DCACHE,20 +CA_Victoria,FNAL_DCACHE_PERSISTENT,20 +CA_Victoria,FNAL_DCACHE_STAGING,20 +CA_Victoria,FNAL_DCACHE_TEST,20 +CA_Victoria,IMPERIAL,100 +CA_Victoria,LANCASTER,100 +CA_Victoria,LIVERPOOL,100 +CA_Victoria,MANCHESTER,100 +CA_Victoria,MONTECARLO,100 +CA_Victoria,NIKHEF,100 +CA_Victoria,PRAGUE,100 +CA_Victoria,QMUL,100 +CA_Victoria,RAL-PP,100 +CA_Victoria,RAL_ECHO,100 +CA_Victoria,SCRATCH_DCACHE,20 +CA_Victoria,T3_US_NERSC,20 +CERN,CERN_PDUNE_EOS,0 +CERN,DUNE_CERN_EOS,0 +CERN,DUNE_ES_PIC,30 +CERN,DUNE_FR_CCIN2P3,30 +CERN,DUNE_FR_CCIN2P3_DISK,30 +CERN,DUNE_FR_CCIN2P3_XROOTD,30 +CERN,DUNE_US_BNL_SDCC,100 +CERN,EDINBURGH,30 +CERN,FNAL_DCACHE,90 +CERN,FNAL_DCACHE_PERSISTENT,90 +CERN,FNAL_DCACHE_STAGING,90 +CERN,FNAL_DCACHE_TEST,90 +CERN,IMPERIAL,30 +CERN,LANCASTER,30 +CERN,LIVERPOOL,30 +CERN,MANCHESTER,30 +CERN,MONTECARLO,100 +CERN,NIKHEF,30 +CERN,PRAGUE,30 +CERN,QMUL,30 +CERN,RAL-PP,30 +CERN,RAL_ECHO,30 +CERN,SCRATCH_DCACHE,90 +CERN,T3_US_NERSC,100 +CH_UNIBE-LHEP,CERN_PDUNE_EOS,30 +CH_UNIBE-LHEP,DUNE_CERN_EOS,30 +CH_UNIBE-LHEP,DUNE_ES_PIC,30 +CH_UNIBE-LHEP,DUNE_FR_CCIN2P3,30 +CH_UNIBE-LHEP,DUNE_FR_CCIN2P3_DISK,30 +CH_UNIBE-LHEP,DUNE_FR_CCIN2P3_XROOTD,30 +CH_UNIBE-LHEP,DUNE_US_BNL_SDCC,100 +CH_UNIBE-LHEP,EDINBURGH,30 +CH_UNIBE-LHEP,FNAL_DCACHE,90 +CH_UNIBE-LHEP,FNAL_DCACHE_PERSISTENT,90 +CH_UNIBE-LHEP,FNAL_DCACHE_STAGING,90 +CH_UNIBE-LHEP,FNAL_DCACHE_TEST,90 +CH_UNIBE-LHEP,IMPERIAL,30 +CH_UNIBE-LHEP,LANCASTER,30 +CH_UNIBE-LHEP,LIVERPOOL,30 +CH_UNIBE-LHEP,MANCHESTER,30 +CH_UNIBE-LHEP,MONTECARLO,100 +CH_UNIBE-LHEP,NIKHEF,30 +CH_UNIBE-LHEP,PRAGUE,30 +CH_UNIBE-LHEP,QMUL,30 +CH_UNIBE-LHEP,RAL-PP,30 +CH_UNIBE-LHEP,RAL_ECHO,30 +CH_UNIBE-LHEP,SCRATCH_DCACHE,90 +CH_UNIBE-LHEP,T3_US_NERSC,100 +CZ_FZU,CERN_PDUNE_EOS,30 +CZ_FZU,DUNE_CERN_EOS,30 +CZ_FZU,DUNE_ES_PIC,30 +CZ_FZU,DUNE_FR_CCIN2P3,30 +CZ_FZU,DUNE_FR_CCIN2P3_DISK,30 +CZ_FZU,DUNE_FR_CCIN2P3_XROOTD,30 +CZ_FZU,DUNE_US_BNL_SDCC,100 +CZ_FZU,EDINBURGH,30 +CZ_FZU,FNAL_DCACHE,90 +CZ_FZU,FNAL_DCACHE_PERSISTENT,90 +CZ_FZU,FNAL_DCACHE_STAGING,90 +CZ_FZU,FNAL_DCACHE_TEST,90 +CZ_FZU,IMPERIAL,30 +CZ_FZU,LANCASTER,30 +CZ_FZU,LIVERPOOL,30 +CZ_FZU,MANCHESTER,30 +CZ_FZU,MONTECARLO,100 +CZ_FZU,NIKHEF,30 +CZ_FZU,PRAGUE,0 +CZ_FZU,QMUL,30 +CZ_FZU,RAL-PP,30 +CZ_FZU,RAL_ECHO,30 +CZ_FZU,SCRATCH_DCACHE,90 +CZ_FZU,T3_US_NERSC,100 +ES_CIEMAT,CERN_PDUNE_EOS,30 +ES_CIEMAT,DUNE_CERN_EOS,30 +ES_CIEMAT,DUNE_ES_PIC,30 +ES_CIEMAT,DUNE_FR_CCIN2P3,30 +ES_CIEMAT,DUNE_FR_CCIN2P3_DISK,30 +ES_CIEMAT,DUNE_FR_CCIN2P3_XROOTD,30 +ES_CIEMAT,DUNE_US_BNL_SDCC,100 +ES_CIEMAT,EDINBURGH,30 +ES_CIEMAT,FNAL_DCACHE,90 +ES_CIEMAT,FNAL_DCACHE_PERSISTENT,90 +ES_CIEMAT,FNAL_DCACHE_STAGING,90 +ES_CIEMAT,FNAL_DCACHE_TEST,90 +ES_CIEMAT,IMPERIAL,30 +ES_CIEMAT,LANCASTER,30 +ES_CIEMAT,LIVERPOOL,30 +ES_CIEMAT,MANCHESTER,30 +ES_CIEMAT,MONTECARLO,100 +ES_CIEMAT,NIKHEF,30 +ES_CIEMAT,PRAGUE,30 +ES_CIEMAT,QMUL,30 +ES_CIEMAT,RAL-PP,30 +ES_CIEMAT,RAL_ECHO,30 +ES_CIEMAT,SCRATCH_DCACHE,90 +ES_CIEMAT,T3_US_NERSC,100 +ES_PIC,CERN_PDUNE_EOS,30 +ES_PIC,DUNE_CERN_EOS,30 +ES_PIC,DUNE_ES_PIC,0 +ES_PIC,DUNE_FR_CCIN2P3,30 +ES_PIC,DUNE_FR_CCIN2P3_DISK,30 +ES_PIC,DUNE_FR_CCIN2P3_XROOTD,30 +ES_PIC,DUNE_US_BNL_SDCC,100 +ES_PIC,EDINBURGH,30 +ES_PIC,FNAL_DCACHE,90 +ES_PIC,FNAL_DCACHE_PERSISTENT,90 +ES_PIC,FNAL_DCACHE_STAGING,90 +ES_PIC,FNAL_DCACHE_TEST,90 +ES_PIC,IMPERIAL,30 +ES_PIC,LANCASTER,30 +ES_PIC,LIVERPOOL,30 +ES_PIC,MANCHESTER,30 +ES_PIC,MONTECARLO,100 +ES_PIC,NIKHEF,30 +ES_PIC,PRAGUE,30 +ES_PIC,QMUL,30 +ES_PIC,RAL-PP,30 +ES_PIC,RAL_ECHO,30 +ES_PIC,SCRATCH_DCACHE,90 +ES_PIC,T3_US_NERSC,100 +FR_CCIN2P3,CERN_PDUNE_EOS,30 +FR_CCIN2P3,DUNE_CERN_EOS,30 +FR_CCIN2P3,DUNE_ES_PIC,30 +FR_CCIN2P3,DUNE_FR_CCIN2P3,0 +FR_CCIN2P3,DUNE_FR_CCIN2P3_DISK,0 +FR_CCIN2P3,DUNE_FR_CCIN2P3_XROOTD,0 +FR_CCIN2P3,DUNE_US_BNL_SDCC,100 +FR_CCIN2P3,EDINBURGH,30 +FR_CCIN2P3,FNAL_DCACHE,90 +FR_CCIN2P3,FNAL_DCACHE_PERSISTENT,90 +FR_CCIN2P3,FNAL_DCACHE_STAGING,90 +FR_CCIN2P3,FNAL_DCACHE_TEST,90 +FR_CCIN2P3,IMPERIAL,30 +FR_CCIN2P3,LANCASTER,30 +FR_CCIN2P3,LIVERPOOL,30 +FR_CCIN2P3,MANCHESTER,30 +FR_CCIN2P3,MONTECARLO,100 +FR_CCIN2P3,NIKHEF,30 +FR_CCIN2P3,PRAGUE,30 +FR_CCIN2P3,QMUL,30 +FR_CCIN2P3,RAL-PP,30 +FR_CCIN2P3,RAL_ECHO,30 +FR_CCIN2P3,SCRATCH_DCACHE,90 +FR_CCIN2P3,T3_US_NERSC,100 +IN_TIFR,CERN_PDUNE_EOS,90 +IN_TIFR,DUNE_CERN_EOS,90 +IN_TIFR,DUNE_ES_PIC,100 +IN_TIFR,DUNE_FR_CCIN2P3,100 +IN_TIFR,DUNE_FR_CCIN2P3_DISK,100 +IN_TIFR,DUNE_FR_CCIN2P3_XROOTD,100 +IN_TIFR,DUNE_US_BNL_SDCC,100 +IN_TIFR,EDINBURGH,100 +IN_TIFR,FNAL_DCACHE,90 +IN_TIFR,FNAL_DCACHE_PERSISTENT,90 +IN_TIFR,FNAL_DCACHE_STAGING,90 +IN_TIFR,FNAL_DCACHE_TEST,90 +IN_TIFR,IMPERIAL,100 +IN_TIFR,LANCASTER,100 +IN_TIFR,LIVERPOOL,100 +IN_TIFR,MANCHESTER,100 +IN_TIFR,MONTECARLO,100 +IN_TIFR,NIKHEF,100 +IN_TIFR,PRAGUE,100 +IN_TIFR,QMUL,100 +IN_TIFR,RAL-PP,100 +IN_TIFR,RAL_ECHO,100 +IN_TIFR,SCRATCH_DCACHE,90 +IN_TIFR,T3_US_NERSC,100 +NL_NIKHEF,CERN_PDUNE_EOS,30 +NL_NIKHEF,DUNE_CERN_EOS,30 +NL_NIKHEF,DUNE_ES_PIC,30 +NL_NIKHEF,DUNE_FR_CCIN2P3,30 +NL_NIKHEF,DUNE_FR_CCIN2P3_DISK,30 +NL_NIKHEF,DUNE_FR_CCIN2P3_XROOTD,30 +NL_NIKHEF,DUNE_US_BNL_SDCC,100 +NL_NIKHEF,EDINBURGH,30 +NL_NIKHEF,FNAL_DCACHE,90 +NL_NIKHEF,FNAL_DCACHE_PERSISTENT,90 +NL_NIKHEF,FNAL_DCACHE_STAGING,90 +NL_NIKHEF,FNAL_DCACHE_TEST,90 +NL_NIKHEF,IMPERIAL,30 +NL_NIKHEF,LANCASTER,30 +NL_NIKHEF,LIVERPOOL,30 +NL_NIKHEF,MANCHESTER,30 +NL_NIKHEF,MONTECARLO,100 +NL_NIKHEF,NIKHEF,0 +NL_NIKHEF,PRAGUE,30 +NL_NIKHEF,QMUL,30 +NL_NIKHEF,RAL-PP,30 +NL_NIKHEF,RAL_ECHO,30 +NL_NIKHEF,SCRATCH_DCACHE,90 +NL_NIKHEF,T3_US_NERSC,100 +NL_SURFsara,CERN_PDUNE_EOS,30 +NL_SURFsara,DUNE_CERN_EOS,30 +NL_SURFsara,DUNE_ES_PIC,30 +NL_SURFsara,DUNE_FR_CCIN2P3,30 +NL_SURFsara,DUNE_FR_CCIN2P3_DISK,30 +NL_SURFsara,DUNE_FR_CCIN2P3_XROOTD,30 +NL_SURFsara,DUNE_US_BNL_SDCC,100 +NL_SURFsara,EDINBURGH,30 +NL_SURFsara,FNAL_DCACHE,90 +NL_SURFsara,FNAL_DCACHE_PERSISTENT,90 +NL_SURFsara,FNAL_DCACHE_STAGING,90 +NL_SURFsara,FNAL_DCACHE_TEST,90 +NL_SURFsara,IMPERIAL,30 +NL_SURFsara,LANCASTER,30 +NL_SURFsara,LIVERPOOL,30 +NL_SURFsara,MANCHESTER,30 +NL_SURFsara,MONTECARLO,100 +NL_SURFsara,NIKHEF,30 +NL_SURFsara,PRAGUE,30 +NL_SURFsara,QMUL,30 +NL_SURFsara,RAL-PP,30 +NL_SURFsara,RAL_ECHO,30 +NL_SURFsara,SCRATCH_DCACHE,90 +NL_SURFsara,T3_US_NERSC,100 +RU_JINR,CERN_PDUNE_EOS,90 +RU_JINR,DUNE_CERN_EOS,90 +RU_JINR,DUNE_ES_PIC,100 +RU_JINR,DUNE_FR_CCIN2P3,100 +RU_JINR,DUNE_FR_CCIN2P3_DISK,100 +RU_JINR,DUNE_FR_CCIN2P3_XROOTD,100 +RU_JINR,DUNE_US_BNL_SDCC,100 +RU_JINR,EDINBURGH,100 +RU_JINR,FNAL_DCACHE,90 +RU_JINR,FNAL_DCACHE_PERSISTENT,90 +RU_JINR,FNAL_DCACHE_STAGING,90 +RU_JINR,FNAL_DCACHE_TEST,90 +RU_JINR,IMPERIAL,100 +RU_JINR,LANCASTER,100 +RU_JINR,LIVERPOOL,100 +RU_JINR,MANCHESTER,100 +RU_JINR,MONTECARLO,100 +RU_JINR,NIKHEF,100 +RU_JINR,PRAGUE,100 +RU_JINR,QMUL,100 +RU_JINR,RAL-PP,100 +RU_JINR,RAL_ECHO,100 +RU_JINR,SCRATCH_DCACHE,90 +RU_JINR,T3_US_NERSC,100 +UK_Bristol,CERN_PDUNE_EOS,30 +UK_Bristol,DUNE_CERN_EOS,30 +UK_Bristol,DUNE_ES_PIC,30 +UK_Bristol,DUNE_FR_CCIN2P3,30 +UK_Bristol,DUNE_FR_CCIN2P3_DISK,30 +UK_Bristol,DUNE_FR_CCIN2P3_XROOTD,30 +UK_Bristol,DUNE_US_BNL_SDCC,100 +UK_Bristol,EDINBURGH,20 +UK_Bristol,FNAL_DCACHE,90 +UK_Bristol,FNAL_DCACHE_PERSISTENT,90 +UK_Bristol,FNAL_DCACHE_STAGING,90 +UK_Bristol,FNAL_DCACHE_TEST,90 +UK_Bristol,IMPERIAL,20 +UK_Bristol,LANCASTER,20 +UK_Bristol,LIVERPOOL,20 +UK_Bristol,MANCHESTER,20 +UK_Bristol,MONTECARLO,100 +UK_Bristol,NIKHEF,30 +UK_Bristol,PRAGUE,30 +UK_Bristol,QMUL,20 +UK_Bristol,RAL-PP,20 +UK_Bristol,RAL_ECHO,20 +UK_Bristol,SCRATCH_DCACHE,90 +UK_Bristol,T3_US_NERSC,100 +UK_Brunel,CERN_PDUNE_EOS,30 +UK_Brunel,DUNE_CERN_EOS,30 +UK_Brunel,DUNE_ES_PIC,30 +UK_Brunel,DUNE_FR_CCIN2P3,30 +UK_Brunel,DUNE_FR_CCIN2P3_DISK,30 +UK_Brunel,DUNE_FR_CCIN2P3_XROOTD,30 +UK_Brunel,DUNE_US_BNL_SDCC,100 +UK_Brunel,EDINBURGH,20 +UK_Brunel,FNAL_DCACHE,90 +UK_Brunel,FNAL_DCACHE_PERSISTENT,90 +UK_Brunel,FNAL_DCACHE_STAGING,90 +UK_Brunel,FNAL_DCACHE_TEST,90 +UK_Brunel,IMPERIAL,20 +UK_Brunel,LANCASTER,20 +UK_Brunel,LIVERPOOL,20 +UK_Brunel,MANCHESTER,20 +UK_Brunel,MONTECARLO,100 +UK_Brunel,NIKHEF,30 +UK_Brunel,PRAGUE,30 +UK_Brunel,QMUL,20 +UK_Brunel,RAL-PP,20 +UK_Brunel,RAL_ECHO,20 +UK_Brunel,SCRATCH_DCACHE,90 +UK_Brunel,T3_US_NERSC,100 +UK_Edinburgh,CERN_PDUNE_EOS,30 +UK_Edinburgh,DUNE_CERN_EOS,30 +UK_Edinburgh,DUNE_ES_PIC,30 +UK_Edinburgh,DUNE_FR_CCIN2P3,30 +UK_Edinburgh,DUNE_FR_CCIN2P3_DISK,30 +UK_Edinburgh,DUNE_FR_CCIN2P3_XROOTD,30 +UK_Edinburgh,DUNE_US_BNL_SDCC,100 +UK_Edinburgh,EDINBURGH,0 +UK_Edinburgh,FNAL_DCACHE,90 +UK_Edinburgh,FNAL_DCACHE_PERSISTENT,90 +UK_Edinburgh,FNAL_DCACHE_STAGING,90 +UK_Edinburgh,FNAL_DCACHE_TEST,90 +UK_Edinburgh,IMPERIAL,20 +UK_Edinburgh,LANCASTER,20 +UK_Edinburgh,LIVERPOOL,20 +UK_Edinburgh,MANCHESTER,20 +UK_Edinburgh,MONTECARLO,100 +UK_Edinburgh,NIKHEF,30 +UK_Edinburgh,PRAGUE,30 +UK_Edinburgh,QMUL,20 +UK_Edinburgh,RAL-PP,20 +UK_Edinburgh,RAL_ECHO,20 +UK_Edinburgh,SCRATCH_DCACHE,90 +UK_Edinburgh,T3_US_NERSC,100 +UK_Imperial,CERN_PDUNE_EOS,30 +UK_Imperial,DUNE_CERN_EOS,30 +UK_Imperial,DUNE_ES_PIC,30 +UK_Imperial,DUNE_FR_CCIN2P3,30 +UK_Imperial,DUNE_FR_CCIN2P3_DISK,30 +UK_Imperial,DUNE_FR_CCIN2P3_XROOTD,30 +UK_Imperial,DUNE_US_BNL_SDCC,100 +UK_Imperial,EDINBURGH,20 +UK_Imperial,FNAL_DCACHE,90 +UK_Imperial,FNAL_DCACHE_PERSISTENT,90 +UK_Imperial,FNAL_DCACHE_STAGING,90 +UK_Imperial,FNAL_DCACHE_TEST,90 +UK_Imperial,IMPERIAL,0 +UK_Imperial,LANCASTER,20 +UK_Imperial,LIVERPOOL,20 +UK_Imperial,MANCHESTER,20 +UK_Imperial,MONTECARLO,100 +UK_Imperial,NIKHEF,30 +UK_Imperial,PRAGUE,30 +UK_Imperial,QMUL,20 +UK_Imperial,RAL-PP,20 +UK_Imperial,RAL_ECHO,20 +UK_Imperial,SCRATCH_DCACHE,90 +UK_Imperial,T3_US_NERSC,100 +UK_Lancaster,CERN_PDUNE_EOS,30 +UK_Lancaster,DUNE_CERN_EOS,30 +UK_Lancaster,DUNE_ES_PIC,30 +UK_Lancaster,DUNE_FR_CCIN2P3,30 +UK_Lancaster,DUNE_FR_CCIN2P3_DISK,30 +UK_Lancaster,DUNE_FR_CCIN2P3_XROOTD,30 +UK_Lancaster,DUNE_US_BNL_SDCC,100 +UK_Lancaster,EDINBURGH,20 +UK_Lancaster,FNAL_DCACHE,90 +UK_Lancaster,FNAL_DCACHE_PERSISTENT,90 +UK_Lancaster,FNAL_DCACHE_STAGING,90 +UK_Lancaster,FNAL_DCACHE_TEST,90 +UK_Lancaster,IMPERIAL,20 +UK_Lancaster,LANCASTER,0 +UK_Lancaster,LIVERPOOL,20 +UK_Lancaster,MANCHESTER,20 +UK_Lancaster,MONTECARLO,100 +UK_Lancaster,NIKHEF,30 +UK_Lancaster,PRAGUE,30 +UK_Lancaster,QMUL,20 +UK_Lancaster,RAL-PP,20 +UK_Lancaster,RAL_ECHO,20 +UK_Lancaster,SCRATCH_DCACHE,90 +UK_Lancaster,T3_US_NERSC,100 +UK_Liverpool,CERN_PDUNE_EOS,30 +UK_Liverpool,DUNE_CERN_EOS,30 +UK_Liverpool,DUNE_ES_PIC,30 +UK_Liverpool,DUNE_FR_CCIN2P3,30 +UK_Liverpool,DUNE_FR_CCIN2P3_DISK,30 +UK_Liverpool,DUNE_FR_CCIN2P3_XROOTD,30 +UK_Liverpool,DUNE_US_BNL_SDCC,100 +UK_Liverpool,EDINBURGH,20 +UK_Liverpool,FNAL_DCACHE,90 +UK_Liverpool,FNAL_DCACHE_PERSISTENT,90 +UK_Liverpool,FNAL_DCACHE_STAGING,90 +UK_Liverpool,FNAL_DCACHE_TEST,90 +UK_Liverpool,IMPERIAL,20 +UK_Liverpool,LANCASTER,20 +UK_Liverpool,LIVERPOOL,0 +UK_Liverpool,MANCHESTER,20 +UK_Liverpool,MONTECARLO,100 +UK_Liverpool,NIKHEF,30 +UK_Liverpool,PRAGUE,30 +UK_Liverpool,QMUL,20 +UK_Liverpool,RAL-PP,20 +UK_Liverpool,RAL_ECHO,20 +UK_Liverpool,SCRATCH_DCACHE,90 +UK_Liverpool,T3_US_NERSC,100 +UK_Manchester,CERN_PDUNE_EOS,30 +UK_Manchester,DUNE_CERN_EOS,30 +UK_Manchester,DUNE_ES_PIC,30 +UK_Manchester,DUNE_FR_CCIN2P3,30 +UK_Manchester,DUNE_FR_CCIN2P3_DISK,30 +UK_Manchester,DUNE_FR_CCIN2P3_XROOTD,30 +UK_Manchester,DUNE_US_BNL_SDCC,100 +UK_Manchester,EDINBURGH,20 +UK_Manchester,FNAL_DCACHE,90 +UK_Manchester,FNAL_DCACHE_PERSISTENT,90 +UK_Manchester,FNAL_DCACHE_STAGING,90 +UK_Manchester,FNAL_DCACHE_TEST,90 +UK_Manchester,IMPERIAL,20 +UK_Manchester,LANCASTER,20 +UK_Manchester,LIVERPOOL,20 +UK_Manchester,MANCHESTER,0 +UK_Manchester,MONTECARLO,100 +UK_Manchester,NIKHEF,30 +UK_Manchester,PRAGUE,30 +UK_Manchester,QMUL,20 +UK_Manchester,RAL-PP,20 +UK_Manchester,RAL_ECHO,20 +UK_Manchester,SCRATCH_DCACHE,90 +UK_Manchester,T3_US_NERSC,100 +UK_Oxford,CERN_PDUNE_EOS,30 +UK_Oxford,DUNE_CERN_EOS,30 +UK_Oxford,DUNE_ES_PIC,30 +UK_Oxford,DUNE_FR_CCIN2P3,30 +UK_Oxford,DUNE_FR_CCIN2P3_DISK,30 +UK_Oxford,DUNE_FR_CCIN2P3_XROOTD,30 +UK_Oxford,DUNE_US_BNL_SDCC,100 +UK_Oxford,EDINBURGH,20 +UK_Oxford,FNAL_DCACHE,90 +UK_Oxford,FNAL_DCACHE_PERSISTENT,90 +UK_Oxford,FNAL_DCACHE_STAGING,90 +UK_Oxford,FNAL_DCACHE_TEST,90 +UK_Oxford,IMPERIAL,20 +UK_Oxford,LANCASTER,20 +UK_Oxford,LIVERPOOL,20 +UK_Oxford,MANCHESTER,20 +UK_Oxford,MONTECARLO,100 +UK_Oxford,NIKHEF,30 +UK_Oxford,PRAGUE,30 +UK_Oxford,QMUL,20 +UK_Oxford,RAL-PP,20 +UK_Oxford,RAL_ECHO,20 +UK_Oxford,SCRATCH_DCACHE,90 +UK_Oxford,T3_US_NERSC,100 +UK_QMUL,CERN_PDUNE_EOS,30 +UK_QMUL,DUNE_CERN_EOS,30 +UK_QMUL,DUNE_ES_PIC,30 +UK_QMUL,DUNE_FR_CCIN2P3,30 +UK_QMUL,DUNE_FR_CCIN2P3_DISK,30 +UK_QMUL,DUNE_FR_CCIN2P3_XROOTD,30 +UK_QMUL,DUNE_US_BNL_SDCC,100 +UK_QMUL,EDINBURGH,20 +UK_QMUL,FNAL_DCACHE,90 +UK_QMUL,FNAL_DCACHE_PERSISTENT,90 +UK_QMUL,FNAL_DCACHE_STAGING,90 +UK_QMUL,FNAL_DCACHE_TEST,90 +UK_QMUL,IMPERIAL,20 +UK_QMUL,LANCASTER,20 +UK_QMUL,LIVERPOOL,20 +UK_QMUL,MANCHESTER,20 +UK_QMUL,MONTECARLO,100 +UK_QMUL,NIKHEF,30 +UK_QMUL,PRAGUE,30 +UK_QMUL,QMUL,0 +UK_QMUL,RAL-PP,20 +UK_QMUL,RAL_ECHO,20 +UK_QMUL,SCRATCH_DCACHE,90 +UK_QMUL,T3_US_NERSC,100 +UK_RAL-PPD,CERN_PDUNE_EOS,30 +UK_RAL-PPD,DUNE_CERN_EOS,30 +UK_RAL-PPD,DUNE_ES_PIC,30 +UK_RAL-PPD,DUNE_FR_CCIN2P3,30 +UK_RAL-PPD,DUNE_FR_CCIN2P3_DISK,30 +UK_RAL-PPD,DUNE_FR_CCIN2P3_XROOTD,30 +UK_RAL-PPD,DUNE_US_BNL_SDCC,100 +UK_RAL-PPD,EDINBURGH,20 +UK_RAL-PPD,FNAL_DCACHE,90 +UK_RAL-PPD,FNAL_DCACHE_PERSISTENT,90 +UK_RAL-PPD,FNAL_DCACHE_STAGING,90 +UK_RAL-PPD,FNAL_DCACHE_TEST,90 +UK_RAL-PPD,IMPERIAL,20 +UK_RAL-PPD,LANCASTER,20 +UK_RAL-PPD,LIVERPOOL,20 +UK_RAL-PPD,MANCHESTER,20 +UK_RAL-PPD,MONTECARLO,100 +UK_RAL-PPD,NIKHEF,30 +UK_RAL-PPD,PRAGUE,30 +UK_RAL-PPD,QMUL,20 +UK_RAL-PPD,RAL-PP,0 +UK_RAL-PPD,RAL_ECHO,10 +UK_RAL-PPD,SCRATCH_DCACHE,90 +UK_RAL-PPD,T3_US_NERSC,100 +UK_RAL-Tier1,CERN_PDUNE_EOS,30 +UK_RAL-Tier1,DUNE_CERN_EOS,30 +UK_RAL-Tier1,DUNE_ES_PIC,30 +UK_RAL-Tier1,DUNE_FR_CCIN2P3,30 +UK_RAL-Tier1,DUNE_FR_CCIN2P3_DISK,30 +UK_RAL-Tier1,DUNE_FR_CCIN2P3_XROOTD,30 +UK_RAL-Tier1,DUNE_US_BNL_SDCC,100 +UK_RAL-Tier1,EDINBURGH,20 +UK_RAL-Tier1,FNAL_DCACHE,90 +UK_RAL-Tier1,FNAL_DCACHE_PERSISTENT,90 +UK_RAL-Tier1,FNAL_DCACHE_STAGING,90 +UK_RAL-Tier1,FNAL_DCACHE_TEST,90 +UK_RAL-Tier1,IMPERIAL,20 +UK_RAL-Tier1,LANCASTER,20 +UK_RAL-Tier1,LIVERPOOL,20 +UK_RAL-Tier1,MANCHESTER,20 +UK_RAL-Tier1,MONTECARLO,100 +UK_RAL-Tier1,NIKHEF,30 +UK_RAL-Tier1,PRAGUE,30 +UK_RAL-Tier1,QMUL,20 +UK_RAL-Tier1,RAL-PP,10 +UK_RAL-Tier1,RAL_ECHO,0 +UK_RAL-Tier1,SCRATCH_DCACHE,90 +UK_RAL-Tier1,T3_US_NERSC,100 +UK_Sheffield,CERN_PDUNE_EOS,30 +UK_Sheffield,DUNE_CERN_EOS,30 +UK_Sheffield,DUNE_ES_PIC,30 +UK_Sheffield,DUNE_FR_CCIN2P3,30 +UK_Sheffield,DUNE_FR_CCIN2P3_DISK,30 +UK_Sheffield,DUNE_FR_CCIN2P3_XROOTD,30 +UK_Sheffield,DUNE_US_BNL_SDCC,100 +UK_Sheffield,EDINBURGH,20 +UK_Sheffield,FNAL_DCACHE,90 +UK_Sheffield,FNAL_DCACHE_PERSISTENT,90 +UK_Sheffield,FNAL_DCACHE_STAGING,90 +UK_Sheffield,FNAL_DCACHE_TEST,90 +UK_Sheffield,IMPERIAL,20 +UK_Sheffield,LANCASTER,20 +UK_Sheffield,LIVERPOOL,20 +UK_Sheffield,MANCHESTER,20 +UK_Sheffield,MONTECARLO,100 +UK_Sheffield,NIKHEF,30 +UK_Sheffield,PRAGUE,30 +UK_Sheffield,QMUL,20 +UK_Sheffield,RAL-PP,20 +UK_Sheffield,RAL_ECHO,20 +UK_Sheffield,SCRATCH_DCACHE,90 +UK_Sheffield,T3_US_NERSC,100 +US_BNL,CERN_PDUNE_EOS,90 +US_BNL,DUNE_CERN_EOS,90 +US_BNL,DUNE_ES_PIC,100 +US_BNL,DUNE_FR_CCIN2P3,100 +US_BNL,DUNE_FR_CCIN2P3_DISK,100 +US_BNL,DUNE_FR_CCIN2P3_XROOTD,100 +US_BNL,DUNE_US_BNL_SDCC,0 +US_BNL,EDINBURGH,100 +US_BNL,FNAL_DCACHE,20 +US_BNL,FNAL_DCACHE_PERSISTENT,20 +US_BNL,FNAL_DCACHE_STAGING,20 +US_BNL,FNAL_DCACHE_TEST,20 +US_BNL,IMPERIAL,100 +US_BNL,LANCASTER,100 +US_BNL,LIVERPOOL,100 +US_BNL,MANCHESTER,100 +US_BNL,MONTECARLO,100 +US_BNL,NIKHEF,100 +US_BNL,PRAGUE,100 +US_BNL,QMUL,100 +US_BNL,RAL-PP,100 +US_BNL,RAL_ECHO,100 +US_BNL,SCRATCH_DCACHE,20 +US_BNL,T3_US_NERSC,20 +US_Caltech,CERN_PDUNE_EOS,90 +US_Caltech,DUNE_CERN_EOS,90 +US_Caltech,DUNE_ES_PIC,100 +US_Caltech,DUNE_FR_CCIN2P3,100 +US_Caltech,DUNE_FR_CCIN2P3_DISK,100 +US_Caltech,DUNE_FR_CCIN2P3_XROOTD,100 +US_Caltech,DUNE_US_BNL_SDCC,20 +US_Caltech,EDINBURGH,100 +US_Caltech,FNAL_DCACHE,20 +US_Caltech,FNAL_DCACHE_PERSISTENT,20 +US_Caltech,FNAL_DCACHE_STAGING,20 +US_Caltech,FNAL_DCACHE_TEST,20 +US_Caltech,IMPERIAL,100 +US_Caltech,LANCASTER,100 +US_Caltech,LIVERPOOL,100 +US_Caltech,MANCHESTER,100 +US_Caltech,MONTECARLO,100 +US_Caltech,NIKHEF,100 +US_Caltech,PRAGUE,100 +US_Caltech,QMUL,100 +US_Caltech,RAL-PP,100 +US_Caltech,RAL_ECHO,100 +US_Caltech,SCRATCH_DCACHE,20 +US_Caltech,T3_US_NERSC,20 +US_Clemson,CERN_PDUNE_EOS,90 +US_Clemson,DUNE_CERN_EOS,90 +US_Clemson,DUNE_ES_PIC,100 +US_Clemson,DUNE_FR_CCIN2P3,100 +US_Clemson,DUNE_FR_CCIN2P3_DISK,100 +US_Clemson,DUNE_FR_CCIN2P3_XROOTD,100 +US_Clemson,DUNE_US_BNL_SDCC,20 +US_Clemson,EDINBURGH,100 +US_Clemson,FNAL_DCACHE,20 +US_Clemson,FNAL_DCACHE_PERSISTENT,20 +US_Clemson,FNAL_DCACHE_STAGING,20 +US_Clemson,FNAL_DCACHE_TEST,20 +US_Clemson,IMPERIAL,100 +US_Clemson,LANCASTER,100 +US_Clemson,LIVERPOOL,100 +US_Clemson,MANCHESTER,100 +US_Clemson,MONTECARLO,100 +US_Clemson,NIKHEF,100 +US_Clemson,PRAGUE,100 +US_Clemson,QMUL,100 +US_Clemson,RAL-PP,100 +US_Clemson,RAL_ECHO,100 +US_Clemson,SCRATCH_DCACHE,20 +US_Clemson,T3_US_NERSC,20 +US_Colorado,CERN_PDUNE_EOS,90 +US_Colorado,DUNE_CERN_EOS,90 +US_Colorado,DUNE_ES_PIC,100 +US_Colorado,DUNE_FR_CCIN2P3,100 +US_Colorado,DUNE_FR_CCIN2P3_DISK,100 +US_Colorado,DUNE_FR_CCIN2P3_XROOTD,100 +US_Colorado,DUNE_US_BNL_SDCC,20 +US_Colorado,EDINBURGH,100 +US_Colorado,FNAL_DCACHE,20 +US_Colorado,FNAL_DCACHE_PERSISTENT,20 +US_Colorado,FNAL_DCACHE_STAGING,20 +US_Colorado,FNAL_DCACHE_TEST,20 +US_Colorado,IMPERIAL,100 +US_Colorado,LANCASTER,100 +US_Colorado,LIVERPOOL,100 +US_Colorado,MANCHESTER,100 +US_Colorado,MONTECARLO,100 +US_Colorado,NIKHEF,100 +US_Colorado,PRAGUE,100 +US_Colorado,QMUL,100 +US_Colorado,RAL-PP,100 +US_Colorado,RAL_ECHO,100 +US_Colorado,SCRATCH_DCACHE,20 +US_Colorado,T3_US_NERSC,20 +US_Florida,CERN_PDUNE_EOS,90 +US_Florida,DUNE_CERN_EOS,90 +US_Florida,DUNE_ES_PIC,100 +US_Florida,DUNE_FR_CCIN2P3,100 +US_Florida,DUNE_FR_CCIN2P3_DISK,100 +US_Florida,DUNE_FR_CCIN2P3_XROOTD,100 +US_Florida,DUNE_US_BNL_SDCC,20 +US_Florida,EDINBURGH,100 +US_Florida,FNAL_DCACHE,20 +US_Florida,FNAL_DCACHE_PERSISTENT,20 +US_Florida,FNAL_DCACHE_STAGING,20 +US_Florida,FNAL_DCACHE_TEST,20 +US_Florida,IMPERIAL,100 +US_Florida,LANCASTER,100 +US_Florida,LIVERPOOL,100 +US_Florida,MANCHESTER,100 +US_Florida,MONTECARLO,100 +US_Florida,NIKHEF,100 +US_Florida,PRAGUE,100 +US_Florida,QMUL,100 +US_Florida,RAL-PP,100 +US_Florida,RAL_ECHO,100 +US_Florida,SCRATCH_DCACHE,20 +US_Florida,T3_US_NERSC,20 +US_FNAL,CERN_PDUNE_EOS,90 +US_FNAL,DUNE_CERN_EOS,90 +US_FNAL,DUNE_ES_PIC,100 +US_FNAL,DUNE_FR_CCIN2P3,100 +US_FNAL,DUNE_FR_CCIN2P3_DISK,100 +US_FNAL,DUNE_FR_CCIN2P3_XROOTD,100 +US_FNAL,DUNE_US_BNL_SDCC,20 +US_FNAL,EDINBURGH,100 +US_FNAL,FNAL_DCACHE,0 +US_FNAL,FNAL_DCACHE_PERSISTENT,0 +US_FNAL,FNAL_DCACHE_STAGING,0 +US_FNAL,FNAL_DCACHE_TEST,0 +US_FNAL,IMPERIAL,100 +US_FNAL,LANCASTER,100 +US_FNAL,LIVERPOOL,100 +US_FNAL,MANCHESTER,100 +US_FNAL,MONTECARLO,100 +US_FNAL,NIKHEF,100 +US_FNAL,PRAGUE,100 +US_FNAL,QMUL,100 +US_FNAL,RAL-PP,100 +US_FNAL,RAL_ECHO,100 +US_FNAL,SCRATCH_DCACHE,0 +US_FNAL,T3_US_NERSC,20 +US_Lincoln,CERN_PDUNE_EOS,90 +US_Lincoln,DUNE_CERN_EOS,90 +US_Lincoln,DUNE_ES_PIC,100 +US_Lincoln,DUNE_FR_CCIN2P3,100 +US_Lincoln,DUNE_FR_CCIN2P3_DISK,100 +US_Lincoln,DUNE_FR_CCIN2P3_XROOTD,100 +US_Lincoln,DUNE_US_BNL_SDCC,20 +US_Lincoln,EDINBURGH,100 +US_Lincoln,FNAL_DCACHE,20 +US_Lincoln,FNAL_DCACHE_PERSISTENT,20 +US_Lincoln,FNAL_DCACHE_STAGING,20 +US_Lincoln,FNAL_DCACHE_TEST,20 +US_Lincoln,IMPERIAL,100 +US_Lincoln,LANCASTER,100 +US_Lincoln,LIVERPOOL,100 +US_Lincoln,MANCHESTER,100 +US_Lincoln,MONTECARLO,100 +US_Lincoln,NIKHEF,100 +US_Lincoln,PRAGUE,100 +US_Lincoln,QMUL,100 +US_Lincoln,RAL-PP,100 +US_Lincoln,RAL_ECHO,100 +US_Lincoln,SCRATCH_DCACHE,20 +US_Lincoln,T3_US_NERSC,20 +US_Michigan,CERN_PDUNE_EOS,90 +US_Michigan,DUNE_CERN_EOS,90 +US_Michigan,DUNE_ES_PIC,100 +US_Michigan,DUNE_FR_CCIN2P3,100 +US_Michigan,DUNE_FR_CCIN2P3_DISK,100 +US_Michigan,DUNE_FR_CCIN2P3_XROOTD,100 +US_Michigan,DUNE_US_BNL_SDCC,20 +US_Michigan,EDINBURGH,100 +US_Michigan,FNAL_DCACHE,20 +US_Michigan,FNAL_DCACHE_PERSISTENT,20 +US_Michigan,FNAL_DCACHE_STAGING,20 +US_Michigan,FNAL_DCACHE_TEST,20 +US_Michigan,IMPERIAL,100 +US_Michigan,LANCASTER,100 +US_Michigan,LIVERPOOL,100 +US_Michigan,MANCHESTER,100 +US_Michigan,MONTECARLO,100 +US_Michigan,NIKHEF,100 +US_Michigan,PRAGUE,100 +US_Michigan,QMUL,100 +US_Michigan,RAL-PP,100 +US_Michigan,RAL_ECHO,100 +US_Michigan,SCRATCH_DCACHE,20 +US_Michigan,T3_US_NERSC,20 +US_MIT,CERN_PDUNE_EOS,90 +US_MIT,DUNE_CERN_EOS,90 +US_MIT,DUNE_ES_PIC,100 +US_MIT,DUNE_FR_CCIN2P3,100 +US_MIT,DUNE_FR_CCIN2P3_DISK,100 +US_MIT,DUNE_FR_CCIN2P3_XROOTD,100 +US_MIT,DUNE_US_BNL_SDCC,20 +US_MIT,EDINBURGH,100 +US_MIT,FNAL_DCACHE,20 +US_MIT,FNAL_DCACHE_PERSISTENT,20 +US_MIT,FNAL_DCACHE_STAGING,20 +US_MIT,FNAL_DCACHE_TEST,20 +US_MIT,IMPERIAL,100 +US_MIT,LANCASTER,100 +US_MIT,LIVERPOOL,100 +US_MIT,MANCHESTER,100 +US_MIT,MONTECARLO,100 +US_MIT,NIKHEF,100 +US_MIT,PRAGUE,100 +US_MIT,QMUL,100 +US_MIT,RAL-PP,100 +US_MIT,RAL_ECHO,100 +US_MIT,SCRATCH_DCACHE,20 +US_MIT,T3_US_NERSC,20 +US_MWT2,CERN_PDUNE_EOS,90 +US_MWT2,DUNE_CERN_EOS,90 +US_MWT2,DUNE_ES_PIC,100 +US_MWT2,DUNE_FR_CCIN2P3,100 +US_MWT2,DUNE_FR_CCIN2P3_DISK,100 +US_MWT2,DUNE_FR_CCIN2P3_XROOTD,100 +US_MWT2,DUNE_US_BNL_SDCC,20 +US_MWT2,EDINBURGH,100 +US_MWT2,FNAL_DCACHE,20 +US_MWT2,FNAL_DCACHE_PERSISTENT,20 +US_MWT2,FNAL_DCACHE_STAGING,20 +US_MWT2,FNAL_DCACHE_TEST,20 +US_MWT2,IMPERIAL,100 +US_MWT2,LANCASTER,100 +US_MWT2,LIVERPOOL,100 +US_MWT2,MANCHESTER,100 +US_MWT2,MONTECARLO,100 +US_MWT2,NIKHEF,100 +US_MWT2,PRAGUE,100 +US_MWT2,QMUL,100 +US_MWT2,RAL-PP,100 +US_MWT2,RAL_ECHO,100 +US_MWT2,SCRATCH_DCACHE,20 +US_MWT2,T3_US_NERSC,20 +US_Nebraska,CERN_PDUNE_EOS,90 +US_Nebraska,DUNE_CERN_EOS,90 +US_Nebraska,DUNE_ES_PIC,100 +US_Nebraska,DUNE_FR_CCIN2P3,100 +US_Nebraska,DUNE_FR_CCIN2P3_DISK,100 +US_Nebraska,DUNE_FR_CCIN2P3_XROOTD,100 +US_Nebraska,DUNE_US_BNL_SDCC,20 +US_Nebraska,EDINBURGH,100 +US_Nebraska,FNAL_DCACHE,20 +US_Nebraska,FNAL_DCACHE_PERSISTENT,20 +US_Nebraska,FNAL_DCACHE_STAGING,20 +US_Nebraska,FNAL_DCACHE_TEST,20 +US_Nebraska,IMPERIAL,100 +US_Nebraska,LANCASTER,100 +US_Nebraska,LIVERPOOL,100 +US_Nebraska,MANCHESTER,100 +US_Nebraska,MONTECARLO,100 +US_Nebraska,NIKHEF,100 +US_Nebraska,PRAGUE,100 +US_Nebraska,QMUL,100 +US_Nebraska,RAL-PP,100 +US_Nebraska,RAL_ECHO,100 +US_Nebraska,SCRATCH_DCACHE,20 +US_Nebraska,T3_US_NERSC,20 +US_NotreDame,CERN_PDUNE_EOS,90 +US_NotreDame,DUNE_CERN_EOS,90 +US_NotreDame,DUNE_ES_PIC,100 +US_NotreDame,DUNE_FR_CCIN2P3,100 +US_NotreDame,DUNE_FR_CCIN2P3_DISK,100 +US_NotreDame,DUNE_FR_CCIN2P3_XROOTD,100 +US_NotreDame,DUNE_US_BNL_SDCC,20 +US_NotreDame,EDINBURGH,100 +US_NotreDame,FNAL_DCACHE,20 +US_NotreDame,FNAL_DCACHE_PERSISTENT,20 +US_NotreDame,FNAL_DCACHE_STAGING,20 +US_NotreDame,FNAL_DCACHE_TEST,20 +US_NotreDame,IMPERIAL,100 +US_NotreDame,LANCASTER,100 +US_NotreDame,LIVERPOOL,100 +US_NotreDame,MANCHESTER,100 +US_NotreDame,MONTECARLO,100 +US_NotreDame,NIKHEF,100 +US_NotreDame,PRAGUE,100 +US_NotreDame,QMUL,100 +US_NotreDame,RAL-PP,100 +US_NotreDame,RAL_ECHO,100 +US_NotreDame,SCRATCH_DCACHE,20 +US_NotreDame,T3_US_NERSC,20 +US_Omaha,CERN_PDUNE_EOS,90 +US_Omaha,DUNE_CERN_EOS,90 +US_Omaha,DUNE_ES_PIC,100 +US_Omaha,DUNE_FR_CCIN2P3,100 +US_Omaha,DUNE_FR_CCIN2P3_DISK,100 +US_Omaha,DUNE_FR_CCIN2P3_XROOTD,100 +US_Omaha,DUNE_US_BNL_SDCC,20 +US_Omaha,EDINBURGH,100 +US_Omaha,FNAL_DCACHE,20 +US_Omaha,FNAL_DCACHE_PERSISTENT,20 +US_Omaha,FNAL_DCACHE_STAGING,20 +US_Omaha,FNAL_DCACHE_TEST,20 +US_Omaha,IMPERIAL,100 +US_Omaha,LANCASTER,100 +US_Omaha,LIVERPOOL,100 +US_Omaha,MANCHESTER,100 +US_Omaha,MONTECARLO,100 +US_Omaha,NIKHEF,100 +US_Omaha,PRAGUE,100 +US_Omaha,QMUL,100 +US_Omaha,RAL-PP,100 +US_Omaha,RAL_ECHO,100 +US_Omaha,SCRATCH_DCACHE,20 +US_Omaha,T3_US_NERSC,20 +US_PuertoRico,CERN_PDUNE_EOS,90 +US_PuertoRico,DUNE_CERN_EOS,90 +US_PuertoRico,DUNE_ES_PIC,100 +US_PuertoRico,DUNE_FR_CCIN2P3,100 +US_PuertoRico,DUNE_FR_CCIN2P3_DISK,100 +US_PuertoRico,DUNE_FR_CCIN2P3_XROOTD,100 +US_PuertoRico,DUNE_US_BNL_SDCC,20 +US_PuertoRico,EDINBURGH,100 +US_PuertoRico,FNAL_DCACHE,20 +US_PuertoRico,FNAL_DCACHE_PERSISTENT,20 +US_PuertoRico,FNAL_DCACHE_STAGING,20 +US_PuertoRico,FNAL_DCACHE_TEST,20 +US_PuertoRico,IMPERIAL,100 +US_PuertoRico,LANCASTER,100 +US_PuertoRico,LIVERPOOL,100 +US_PuertoRico,MANCHESTER,100 +US_PuertoRico,MONTECARLO,100 +US_PuertoRico,NIKHEF,100 +US_PuertoRico,PRAGUE,100 +US_PuertoRico,QMUL,100 +US_PuertoRico,RAL-PP,100 +US_PuertoRico,RAL_ECHO,100 +US_PuertoRico,SCRATCH_DCACHE,20 +US_PuertoRico,T3_US_NERSC,20 +US_SU-ITS,CERN_PDUNE_EOS,90 +US_SU-ITS,DUNE_CERN_EOS,90 +US_SU-ITS,DUNE_ES_PIC,100 +US_SU-ITS,DUNE_FR_CCIN2P3,100 +US_SU-ITS,DUNE_FR_CCIN2P3_DISK,100 +US_SU-ITS,DUNE_FR_CCIN2P3_XROOTD,100 +US_SU-ITS,DUNE_US_BNL_SDCC,20 +US_SU-ITS,EDINBURGH,100 +US_SU-ITS,FNAL_DCACHE,20 +US_SU-ITS,FNAL_DCACHE_PERSISTENT,20 +US_SU-ITS,FNAL_DCACHE_STAGING,20 +US_SU-ITS,FNAL_DCACHE_TEST,20 +US_SU-ITS,IMPERIAL,100 +US_SU-ITS,LANCASTER,100 +US_SU-ITS,LIVERPOOL,100 +US_SU-ITS,MANCHESTER,100 +US_SU-ITS,MONTECARLO,100 +US_SU-ITS,NIKHEF,100 +US_SU-ITS,PRAGUE,100 +US_SU-ITS,QMUL,100 +US_SU-ITS,RAL-PP,100 +US_SU-ITS,RAL_ECHO,100 +US_SU-ITS,SCRATCH_DCACHE,20 +US_SU-ITS,T3_US_NERSC,20 +US_SU-OG,CERN_PDUNE_EOS,90 +US_SU-OG,DUNE_CERN_EOS,90 +US_SU-OG,DUNE_ES_PIC,100 +US_SU-OG,DUNE_FR_CCIN2P3,100 +US_SU-OG,DUNE_FR_CCIN2P3_DISK,100 +US_SU-OG,DUNE_FR_CCIN2P3_XROOTD,100 +US_SU-OG,DUNE_US_BNL_SDCC,20 +US_SU-OG,EDINBURGH,100 +US_SU-OG,FNAL_DCACHE,20 +US_SU-OG,FNAL_DCACHE_PERSISTENT,20 +US_SU-OG,FNAL_DCACHE_STAGING,20 +US_SU-OG,FNAL_DCACHE_TEST,20 +US_SU-OG,IMPERIAL,100 +US_SU-OG,LANCASTER,100 +US_SU-OG,LIVERPOOL,100 +US_SU-OG,MANCHESTER,100 +US_SU-OG,MONTECARLO,100 +US_SU-OG,NIKHEF,100 +US_SU-OG,PRAGUE,100 +US_SU-OG,QMUL,100 +US_SU-OG,RAL-PP,100 +US_SU-OG,RAL_ECHO,100 +US_SU-OG,SCRATCH_DCACHE,20 +US_SU-OG,T3_US_NERSC,20 +US_UChicago,CERN_PDUNE_EOS,90 +US_UChicago,DUNE_CERN_EOS,90 +US_UChicago,DUNE_ES_PIC,100 +US_UChicago,DUNE_FR_CCIN2P3,100 +US_UChicago,DUNE_FR_CCIN2P3_DISK,100 +US_UChicago,DUNE_FR_CCIN2P3_XROOTD,100 +US_UChicago,DUNE_US_BNL_SDCC,20 +US_UChicago,EDINBURGH,100 +US_UChicago,FNAL_DCACHE,20 +US_UChicago,FNAL_DCACHE_PERSISTENT,20 +US_UChicago,FNAL_DCACHE_STAGING,20 +US_UChicago,FNAL_DCACHE_TEST,20 +US_UChicago,IMPERIAL,100 +US_UChicago,LANCASTER,100 +US_UChicago,LIVERPOOL,100 +US_UChicago,MANCHESTER,100 +US_UChicago,MONTECARLO,100 +US_UChicago,NIKHEF,100 +US_UChicago,PRAGUE,100 +US_UChicago,QMUL,100 +US_UChicago,RAL-PP,100 +US_UChicago,RAL_ECHO,100 +US_UChicago,SCRATCH_DCACHE,20 +US_UChicago,T3_US_NERSC,20 +US_UCSD,CERN_PDUNE_EOS,90 +US_UCSD,DUNE_CERN_EOS,90 +US_UCSD,DUNE_ES_PIC,100 +US_UCSD,DUNE_FR_CCIN2P3,100 +US_UCSD,DUNE_FR_CCIN2P3_DISK,100 +US_UCSD,DUNE_FR_CCIN2P3_XROOTD,100 +US_UCSD,DUNE_US_BNL_SDCC,20 +US_UCSD,EDINBURGH,100 +US_UCSD,FNAL_DCACHE,20 +US_UCSD,FNAL_DCACHE_PERSISTENT,20 +US_UCSD,FNAL_DCACHE_STAGING,20 +US_UCSD,FNAL_DCACHE_TEST,20 +US_UCSD,IMPERIAL,100 +US_UCSD,LANCASTER,100 +US_UCSD,LIVERPOOL,100 +US_UCSD,MANCHESTER,100 +US_UCSD,MONTECARLO,100 +US_UCSD,NIKHEF,100 +US_UCSD,PRAGUE,100 +US_UCSD,QMUL,100 +US_UCSD,RAL-PP,100 +US_UCSD,RAL_ECHO,100 +US_UCSD,SCRATCH_DCACHE,20 +US_UCSD,T3_US_NERSC,20 +US_Wisconsin,CERN_PDUNE_EOS,90 +US_Wisconsin,DUNE_CERN_EOS,90 +US_Wisconsin,DUNE_ES_PIC,100 +US_Wisconsin,DUNE_FR_CCIN2P3,100 +US_Wisconsin,DUNE_FR_CCIN2P3_DISK,100 +US_Wisconsin,DUNE_FR_CCIN2P3_XROOTD,100 +US_Wisconsin,DUNE_US_BNL_SDCC,20 +US_Wisconsin,EDINBURGH,100 +US_Wisconsin,FNAL_DCACHE,20 +US_Wisconsin,FNAL_DCACHE_PERSISTENT,20 +US_Wisconsin,FNAL_DCACHE_STAGING,20 +US_Wisconsin,FNAL_DCACHE_TEST,20 +US_Wisconsin,IMPERIAL,100 +US_Wisconsin,LANCASTER,100 +US_Wisconsin,LIVERPOOL,100 +US_Wisconsin,MANCHESTER,100 +US_Wisconsin,MONTECARLO,100 +US_Wisconsin,NIKHEF,100 +US_Wisconsin,PRAGUE,100 +US_Wisconsin,QMUL,100 +US_Wisconsin,RAL-PP,100 +US_Wisconsin,RAL_ECHO,100 +US_Wisconsin,SCRATCH_DCACHE,20 +US_Wisconsin,T3_US_NERSC,20 +US_WSU,CERN_PDUNE_EOS,90 +US_WSU,DUNE_CERN_EOS,90 +US_WSU,DUNE_ES_PIC,100 +US_WSU,DUNE_FR_CCIN2P3,100 +US_WSU,DUNE_FR_CCIN2P3_DISK,100 +US_WSU,DUNE_FR_CCIN2P3_XROOTD,100 +US_WSU,DUNE_US_BNL_SDCC,20 +US_WSU,EDINBURGH,100 +US_WSU,FNAL_DCACHE,20 +US_WSU,FNAL_DCACHE_PERSISTENT,20 +US_WSU,FNAL_DCACHE_STAGING,20 +US_WSU,FNAL_DCACHE_TEST,20 +US_WSU,IMPERIAL,100 +US_WSU,LANCASTER,100 +US_WSU,LIVERPOOL,100 +US_WSU,MANCHESTER,100 +US_WSU,MONTECARLO,100 +US_WSU,NIKHEF,100 +US_WSU,PRAGUE,100 +US_WSU,QMUL,100 +US_WSU,RAL-PP,100 +US_WSU,RAL_ECHO,100 +US_WSU,SCRATCH_DCACHE,20 +US_WSU,T3_US_NERSC,20 diff --git a/run_lar.py b/run_lar.py new file mode 100644 index 0000000..1a89ae3 --- /dev/null +++ b/run_lar.py @@ -0,0 +1,294 @@ +from argparse import ArgumentParser as ap +import sys +import os +import subprocess +import time +import requests + +from data_dispatcher.api import DataDispatcherClient +from data_dispatcher.api import APIError + +def call_and_retry(func): + def inner1(*args, **kwargs): + nretries = 0 + while nretries < 5: + try: + func(*args, **kwargs) + break + except (requests.exceptions.ConnectionError, APIError) as err: + print('Caught', err.args) + print('Will wait and try again') + time.sleep(args[0].retry_time) + nretries += 1 + if nretries > 4: + print('Too many retries') + sys.exit(1) + return inner1 + + +class DDInterface: + def __init__(self, namespace, lar_limit, timeout=120, wait_time=60, wait_limit=5): + self.dataset = "" #dataset + self.limit = 1#limit + self.namespace = namespace + query_args = (self.dataset, self.namespace, self.limit) + self.query = '''files from %s where namespace="%s" limit %i'''%query_args + self.lar_limit = lar_limit + self.proj_id = -1 + self.proj_exists = False + self.proj_state = None + self.loaded_files = [] + self.loaded_file_uris = [] + self.loaded = False + self.dd_timeout = timeout + self.wait_time = wait_time + self.max_wait_attempts = wait_limit + self.hit_timeout = False + self.lar_return = -1 + self.lar_file_list = '' + self.n_waited = 0 + self.next_failed = False + self.next_replicas = [] + + self.retry_time = 120 + + #try: + # from data_dispatcher.api import DataDispatcherClient + # from data_dispatcher.api import APIError + # print('Loaded DataDispatcher') + #except: + # print('Failed DataDispatcher') + # pass + self.dd_client = DataDispatcherClient() + + def Login(self, username): + self.dd_client.login_x509(username, os.environ['X509_USER_PROXY']) + def SetLarLimit(self, limit): + self.lar_limit = limit + + def CreateProject(self): + query_files = mc_client.query(self.query) + proj_dict = self.dd_client.create_project(query_files, query=self.query) + self.proj_state = proj_dict['state'] + self.proj_id = proj_dict['project_id'] + self.proj_exists = True + print(proj_dict) + + def PrintFiles(self): + print('Printing files') + for j in self.loaded_files: + print(j['name']) + + @call_and_retry + def next_file(self): + self.next_output = self.dd_client.next_file(self.proj_id, + timeout=self.dd_timeout) + + def LoadFiles(self): + count = 0 + ##Should we take out the proj_state clause? + while (count < self.lar_limit and not self.next_failed and + self.proj_state == 'active'): + print('Attempting fetch %i/%i'%(count, self.lar_limit), self.next_failed) + self.Next() + if self.next_output == None: + ## this shouldn't happen, but if it does just exit the loop + break + elif self.next_output == True: + ##this means the fetch timed out. + + ##First --> check that there are files reserved by other jobs. + ## If there aren't, just exit the loop and try processing + ## any files (if any) we have + file_handles = self.dd_client.get_project(self.proj_id)['file_handles'] + total_reserved = sum([fh['state'] == 'reserved' for fh in file_handles]) + #if total_reserved == count: + # print('Equal number of reserved and loaded files. Ending loop') + # break + #elif total_reserved < count: + # ##This shouldn't happen... If it does, fail and make some noise + # sys.stderr.write("Something bad happened. N reserved in project < n reserved in this job: (%i/%i) \n"%(total_reserved, count)) + # sys.exit(1) + if count > 0: + print('data dispatcher next_file timed out. This job has at least one file reserved. Will continue.') + break + else: + ##We know we have externally-reserved files. + ##try waiting a user-defined amount of time + ##for a maximum number of attempts + ##-- if at max, go on to loop + if self.n_waited < self.max_wait_attempts: + print("data dispatcher next_file timed out. Waiting %i seconds before attempting again"%self.wait_time) + print("Wait attempts: %i/%i"%(self.n_waited, self.max_wait_attempts)) + time.sleep(self.wait_time) + self.n_waited += 1 + else: + print("Hit max wait limit. Ending attempts to load files") + break + elif self.next_output == False: + ##this means the project is done -- just exit the loop. + print("Project is done -- exiting file fetch loop") + break + else: + ##we successfully got a file (at least nominally). Check that it has replicas available. + ##If it doesn't, compromise it to a permament end + if len(self.next_replicas) > 0: + self.loaded_files.append(self.next_output) + count += 1 + ##also reset the number of times waited + self.n_waited = 0 + else: + print('Empty replicas -- marking as failed') + nretries = 0 + while nretries < 5: + try: + self.dd_client.file_failed(self.proj_id, '%s:%s'%(self.next_output['namespace'], self.next_output['name']), retry=False) + break + #except (requests.exceptions.ConnectionError, APIError) as err: + except (requests.exceptions.ConnectionError) as err: + print('Caught', err.args) + print('Will wait and try again') + time.sleep(self.retry_time) + nretries += 1 + if nretries > 4: + print('Too many retries') + sys.exit(1) + self.loaded = True + print("Loaded %i files. Moving on."%len(self.loaded_files)) + + def Next(self): + if self.proj_id < 0: + raise ValueError('DDLArInterface::Next -- Project ID is %i. Has a project been created?'%self.proj_id) + ## exists, state, etc. -- TODO + self.next_file() + ''' + nretries = 0 + while nretries < 5: + try: + self.next_output = self.dd_client.next_file(self.proj_id, timeout=self.dd_timeout) + break + except (requests.exceptions.ConnectionError, APIError) as err: + #except (requests.exceptions.ConnectionError) as err: + print('Caught', err.args) + print('Will wait and try again') + time.sleep(self.retry_time) + nretries += 1 + if nretries > 4: + print('Too many retries') + sys.exit(1) + ''' + if self.next_output == None: + self.next_failed = True + return + + if type(self.next_output) == dict: + self.next_replicas = list(self.next_output['replicas'].values()) + + + def MarkFiles(self, failed=False): + state = 'failed' if failed else 'done' + nretries = 0 + for j in self.loaded_files: + while nretries < 5: + try: + if failed: + print('Marking failed') + self.dd_client.file_failed(self.proj_id, '%s:%s'%(j['namespace'], j['name'])) + else: + print('Marking done') + self.dd_client.file_done(self.proj_id, '%s:%s'%(j['namespace'], j['name'])) + + break + except (requests.exceptions.ConnectionError, APIError) as err: + #except (requests.exceptions.ConnectionError) as err: + print('Caught', err.args) + print('Will wait and try again') + time.sleep(self.retry_time) + nretries += 1 + if nretries > 4: + print('Too many retries') + sys.exit(1) + + def SaveFileDIDs(self): + lines = [] + for i in range(len(self.loaded_files)): + f = self.loaded_files[i] + if i < len(self.loaded_files)-1: + lines.append('{"did":"%s:%s"},\n'%(f['namespace'], f['name'])) + else: + lines.append('{"did":"%s:%s"}\n'%(f['namespace'], f['name'])) + + with open('loaded_files.txt', 'w') as f: + f.writelines(lines) + + def AttachProject(self, proj_id): + self.proj_id = proj_id + proj = self.dd_client.get_project(proj_id, with_files=False) + if proj == None: + self.proj_exists = False + else: + self.proj_exists = True + self.proj_state = proj['state'] + + def BuildFileListString(self): + for j in self.loaded_files: + replicas = list(j['replicas'].values()) + if len(replicas) > 0: + #Get the first replica + replica = replicas[0] + print('Replica:', replica) + uri = replica['url'] + if 'https://eospublic.cern.ch/e' in uri: uri = uri.replace('https://eospublic.cern.ch/e', 'xroot://eospublic.cern.ch//e') + self.lar_file_list += uri + self.lar_file_list += ' ' + else: + print('Empty replicas -- marking as failed') + + ##TODO -- pop entry + self.dd_client.file_failed(self.proj_id, '%s:%s'%(j['namespace'], j['name'])) + def RunLAr(self, fcl, n, nskip): + if len(self.loaded_files) == 0: + print('No files loaded with data dispatcher. Exiting gracefully') + return + + if 'CLUSTER' in os.environ and 'PROCESS' in os.environ: + cluster = os.environ['CLUSTER'] + process = os.environ['PROCESS'] + else: + cluster = '0' + process = '0' + proc = subprocess.run('lar -c %s -s %s -n %i --nskip %i -o "dc4_hd_protodune_%%tc_%s_%s_reco.root"'%(fcl, self.lar_file_list, n, nskip, cluster, process), shell=True) + if proc.returncode != 0: + self.MarkFiles(True) + sys.exit(proc.returncode) + self.MarkFiles() + self.SaveFileDIDs() + + +if __name__ == '__main__': + + parser = ap() + parser.add_argument('--namespace', type=str) + parser.add_argument('--fcl', type=str) + parser.add_argument('--load_limit', type=int) + parser.add_argument('--user', type=str) + parser.add_argument('--project', type=int) + parser.add_argument('--timeout', type=int, default=120) + parser.add_argument('--wait_time', type=int, default=60) + parser.add_argument('--wait_limit', type=int, default=5) + parser.add_argument('-n', type=int, default=-1) + parser.add_argument('--nskip', type=int, default=0) + args = parser.parse_args() + + dd_interface = DDInterface(args.namespace, + args.load_limit, + timeout=args.timeout, + wait_time=args.wait_time, + wait_limit=args.wait_limit) + dd_interface.Login(args.user) + dd_interface.AttachProject(args.project) + dd_interface.LoadFiles() + dd_interface.BuildFileListString() + dd_interface.RunLAr(args.fcl, args.n, args.nskip) + + diff --git a/top_script.sh b/top_script.sh new file mode 100755 index 0000000..2e38ab6 --- /dev/null +++ b/top_script.sh @@ -0,0 +1,252 @@ +#!/bin/bash +source /cvmfs/dune.opensciencegrid.org/products/dune/setup_dune.sh + +export DATA_DISPATCHER_URL=https://metacat.fnal.gov:9443/dune/dd/data +export DATA_DISPATCHER_AUTH_URL=https://metacat.fnal.gov:8143/auth/dune +export METACAT_AUTH_SERVER_URL=https://metacat.fnal.gov:8143/auth/dune +export METACAT_SERVER_URL=https://metacat.fnal.gov:9443/dune_meta_demo/app + + +POSITIONAL_ARGS=() +nskip=0 +output_rses=("DUNE_US_FNAL_DISK_STAGE") +get_min_rse=false +while [[ $# -gt 0 ]]; do + case $1 in + --namespace) + NAMESPACE=$2 + shift + shift + ;; + --fcl) + FCL=$2 + shift + shift + ;; + -n) + N=$2 + shift + shift + ;; + --load_limit) + LOADLIMIT=$2 + shift + shift + ;; + --user) + USER=$2 + shift + shift + ;; + --metacat_user) + METACATUSER=$2 + shift + shift + ;; + --project) + PROJECT=$2 + shift + shift + ;; + --timeout) + DDTIMEOUT=$2 + shift + shift + ;; + --wait_time) + WAITTIME=$2 + shift + shift + ;; + --wait_limit) + WAITLIMIT=$2 + shift + shift + ;; + --output) + OUTPUT=$2 + shift + shift + ;; + --output_dataset) + OUTPUTDATASET=$2 + shift + shift + ;; + --output_namespace) + OUTPUTNAMESPACE=$2 + shift + shift + ;; + --nskip) + nskip=$2 + shift + shift + ;; + --rse) + output_rses+=($2) + shift + shift + ;; + --min-rse) + get_min_rse=true + shift + ;; + *) + POSITIONAL_ARGS+=("$1") # save positional arg + shift # past argument + ;; + esac +done + +echo $NAMESPACE + +logname=dc4-${NAMESPACE}_${PROCESS}_${CLUSTER}_`date +%F_%H_%M_%S` + +export PYTHONPATH=${CONDOR_DIR_INPUT}:${PYTHONPATH} + +###Setting up dunesw/Data Dispatcher/MetaCat and running lar +( +setup dunesw v09_55_01d00 -q e20:prof; + +python -m venv venv +source venv/bin/activate +pip install metacat +pip install datadispatcher + +#source ${CONDOR_DIR_INPUT}/${MC_TAR}/canned_client_setup.sh +#source ${CONDOR_DIR_INPUT}/${DD_TAR}/canned_client_setup.sh + +python -m run_lar \ + --namespace $NAMESPACE \ + --fcl $FCL \ + --project $PROJECT \ + --load_limit $LOADLIMIT \ + --user $USER \ + -n $N \ + #--nskip $nskip \ + #> ${logname}.out 2>${logname}.err +) + +returncode=$? +#echo "Return code: " $returncode >> ${logname}.out 2>>${logname}.err +echo "Return code: " $returncode + +echo "Site: $GLIDEIN_DUNESite" #>> ${logname}.out + +if [ $returncode -ne 0 ]; then + exit $returncode +fi + +if $get_min_rse; then + output_rses=(`python -m get_rse ${CONDOR_DIR_INPUT}/rses.txt`) +fi +echo "output rses" +for rse in ${output_rses[@]}; do + echo $rse +done +#if [ $returncode -ne "0" ]; then +# echo "exiting"; +# exit; +#fi + +###Setting up rucio, uploading to RSEs +( +setup rucio +echo "PINGING" +rucio ping +echo "DONE PINGING" +setup metacat + +export DATA_DISPATCHER_URL=https://metacat.fnal.gov:9443/dune/dd/data +export DATA_DISPATCHER_AUTH_URL=https://metacat.fnal.gov:8143/auth/dune +export METACAT_AUTH_SERVER_URL=https://metacat.fnal.gov:8143/auth/dune +export METACAT_SERVER_URL=https://metacat.fnal.gov:9443/dune_meta_demo/app + +echo "Authenticating" #>> ${logname}.out 2>>${logname}.err +metacat auth login -m x509 ${METACATUSER} # >> ${logname}.out 2>>${logname}.err +echo "whoami:" #>> ${logname}.out 2>>${logname}.err +metacat auth whoami #>> ${logname}.out 2>>${logname}.err + + +parents=`cat loaded_files.txt` + +echo $OUTPUT #>> ${logname}.out 2>>${logname}.err +#output_files=`ls *$OUTPUT` +shopt -s nullglob +for i in *$OUTPUT; do + FILESIZE=`stat -c%s $i` + echo 'filesize ' ${FILESIZE} #>> ${logname}.out 2>>${logname}.err + cat << EOF > ${i}.json + [ + { + "size": ${FILESIZE}, + "namespace": "${OUTPUTNAMESPACE}", + "name": "${i}", + "metadata": { + "DUNE.campaign": "dc4", + "core.file_format": "root" + }, + "parents": [ + $parents + ] + } + ] +EOF + + metacat file declare -j ${i}.json $OUTPUTNAMESPACE:$OUTPUTDATASET-data #>> ${logname}.out 2>>${logname}.err + + for rse in ${output_rses[@]}; do + echo "Uploading to $rse" + rucio -a dunepro upload --summary --scope $OUTPUTNAMESPACE --rse $rse $i #>> ${logname}.out 2>>${logname}.err + echo $? + done + + #rucio -a dunepro upload --scope $OUTPUTNAMESPACE --rse DUNE_US_FNAL_DISK_STAGE $i #>> ${logname}.out 2>>${logname}.err + #rucio -a dunepro upload --scope $OUTPUTNAMESPACE --rse DUNE_CERN_EOS $i #>> ${logname}.out 2>>${logname}.err + rucio -a dunepro attach $OUTPUTNAMESPACE:$OUTPUTDATASET-data $OUTPUTNAMESPACE:$i #>> ${logname}.out 2>>${logname}.err +done + +# FILESIZE=`stat -c%s ${logname}.out` +# cat << EOF > ${logname}.out.json +# [ +# { +# "size": ${FILESIZE}, +# "namespace": "${OUTPUTNAMESPACE}", +# "name": "${logname}.out", +# "metadata": {} +# } +# ] +#EOF +# metacat file declare -j ${logname}.out.json $OUTPUTNAMESPACE:$OUTPUTDATASET-log +# for rse in ${output_rses[@]}; do +# echo "Uploading to $rse" +# rucio -a dunepro upload --scope $OUTPUTNAMESPACE --rse $rse ${logname}.out +# done +# +# #rucio -a dunepro upload --scope $OUTPUTNAMESPACE --rse DUNE_US_FNAL_DISK_STAGE ${logname}.out +# #rucio -a dunepro upload --scope $OUTPUTNAMESPACE --rse DUNE_CERN_EOS ${logname}.out +# rucio -a dunepro attach $OUTPUTNAMESPACE:$OUTPUTDATASET-log $OUTPUTNAMESPACE:${logname}.out +# +# +# FILESIZE=`stat -c%s ${logname}.err` +# cat << EOF > ${logname}.err.json +# [ +# { +# "size": ${FILESIZE}, +# "namespace": "${OUTPUTNAMESPACE}", +# "name": "${logname}.err", +# "metadata": {} +# } +# ] +#EOF +# metacat file declare -j ${logname}.err.json $OUTPUTNAMESPACE:$OUTPUTDATASET-log +# for rse in ${output_rses[@]}; do +# echo "Uploading to $rse" +# rucio -a dunepro upload --scope $OUTPUTNAMESPACE --rse $rse ${logname}.err +# done +# #rucio -a dunepro upload --scope $OUTPUTNAMESPACE --rse DUNE_US_FNAL_DISK_STAGE ${logname}.err +# #rucio -a dunepro upload --scope $OUTPUTNAMESPACE --rse DUNE_CERN_EOS ${logname}.err +# rucio -a dunepro attach $OUTPUTNAMESPACE:$OUTPUTDATASET-log $OUTPUTNAMESPACE:${logname}.err + +) From 97125cb65c924f788a38929b88d3681dd8bb5ce2 Mon Sep 17 00:00:00 2001 From: Jacob Calcutt Date: Thu, 6 Oct 2022 16:49:43 -0500 Subject: [PATCH 002/166] More wrappers --- run_lar.py | 40 +++++++++++++++++++++++++++++++++------- 1 file changed, 33 insertions(+), 7 deletions(-) diff --git a/run_lar.py b/run_lar.py index 1a89ae3..698feee 100644 --- a/run_lar.py +++ b/run_lar.py @@ -83,7 +83,23 @@ def PrintFiles(self): def next_file(self): self.next_output = self.dd_client.next_file(self.proj_id, timeout=self.dd_timeout) - + @call_and_retry + def file_done(self, did): + #self.dd_client.file_done(self.proj_id, '%s:%s'%(j['namespace'], j['name'])) + self.dd_client.file_done(self.proj_id, did) + + @call_and_retry + def file_failed(self, did, do_retry=True): + self.dd_client.file_failed( + self.proj_id, did, + #'%s:%s'%(self.next_output['namespace'], self.next_output['name']), + retry=do_retry) + + @call_and_retry + def get_project(self, proj_id): + proj = self.dd_client.get_project(proj_id, with_files=False) + return proj + def LoadFiles(self): count = 0 ##Should we take out the proj_state clause? @@ -139,7 +155,7 @@ def LoadFiles(self): self.n_waited = 0 else: print('Empty replicas -- marking as failed') - nretries = 0 + '''nretries = 0 while nretries < 5: try: self.dd_client.file_failed(self.proj_id, '%s:%s'%(self.next_output['namespace'], self.next_output['name']), retry=False) @@ -152,7 +168,10 @@ def LoadFiles(self): nretries += 1 if nretries > 4: print('Too many retries') - sys.exit(1) + sys.exit(1)''' + self.file_failed( + '%s:%s'%(self.next_output['namespace'], self.next_output['name']), + do_retry=False) self.loaded = True print("Loaded %i files. Moving on."%len(self.loaded_files)) @@ -187,9 +206,15 @@ def Next(self): def MarkFiles(self, failed=False): state = 'failed' if failed else 'done' - nretries = 0 + #nretries = 0 for j in self.loaded_files: - while nretries < 5: + if failed: + print('Marking failed') + self.file_failed('%s:%s'%(j['namespace'], j['name'])) + else: + print('Marking done') + self.file_done('%s:%s'%(j['namespace'], j['name'])) + '''while nretries < 5: try: if failed: print('Marking failed') @@ -207,7 +232,7 @@ def MarkFiles(self, failed=False): nretries += 1 if nretries > 4: print('Too many retries') - sys.exit(1) + sys.exit(1)''' def SaveFileDIDs(self): lines = [] @@ -223,7 +248,8 @@ def SaveFileDIDs(self): def AttachProject(self, proj_id): self.proj_id = proj_id - proj = self.dd_client.get_project(proj_id, with_files=False) + #proj = self.dd_client.get_project(proj_id, with_files=False) + proj = self.get_project(proj_id) if proj == None: self.proj_exists = False else: From bf40e226c8c470b4618da23db99a00bc2ce8852e Mon Sep 17 00:00:00 2001 From: Jacob Calcutt Date: Tue, 29 Nov 2022 12:54:14 -0600 Subject: [PATCH 003/166] Long overdue commit -- many changes --- run_lar.py | 119 ++++++++++++++++++---------------------- submit_dd_jobs.py | 137 ++++++++++++++++++++++++++++++++++++++++++++++ top_script.sh | 22 ++++++++ 3 files changed, 212 insertions(+), 66 deletions(-) create mode 100644 submit_dd_jobs.py diff --git a/run_lar.py b/run_lar.py index 698feee..cf8333c 100644 --- a/run_lar.py +++ b/run_lar.py @@ -3,6 +3,7 @@ import os import subprocess import time +import datetime import requests from data_dispatcher.api import DataDispatcherClient @@ -13,11 +14,12 @@ def inner1(*args, **kwargs): nretries = 0 while nretries < 5: try: + print(datetime.datetime.now()) func(*args, **kwargs) break except (requests.exceptions.ConnectionError, APIError) as err: print('Caught', err.args) - print('Will wait and try again') + print(f'Will wait {args[0].retry_time} seconds and try again') time.sleep(args[0].retry_time) nretries += 1 if nretries > 4: @@ -25,6 +27,24 @@ def inner1(*args, **kwargs): sys.exit(1) return inner1 +def call_and_retry_return(func): + def inner1(*args, **kwargs): + nretries = 0 + while nretries < 5: + try: + print(datetime.datetime.now()) + result = func(*args, **kwargs) + break + except (requests.exceptions.ConnectionError, APIError) as err: + print('Caught', err.args) + print(f'Will wait {args[0].retry_time} and try again') + time.sleep(args[0].retry_time) + nretries += 1 + if nretries > 4: + print('Too many retries') + sys.exit(1) + return result + return inner1 class DDInterface: def __init__(self, namespace, lar_limit, timeout=120, wait_time=60, wait_limit=5): @@ -33,6 +53,7 @@ def __init__(self, namespace, lar_limit, timeout=120, wait_time=60, wait_limit=5 self.namespace = namespace query_args = (self.dataset, self.namespace, self.limit) self.query = '''files from %s where namespace="%s" limit %i'''%query_args + self.worker_timeout = 3600*5 self.lar_limit = lar_limit self.proj_id = -1 self.proj_exists = False @@ -50,7 +71,7 @@ def __init__(self, namespace, lar_limit, timeout=120, wait_time=60, wait_limit=5 self.next_failed = False self.next_replicas = [] - self.retry_time = 120 + self.retry_time = 600 #try: # from data_dispatcher.api import DataDispatcherClient @@ -61,14 +82,20 @@ def __init__(self, namespace, lar_limit, timeout=120, wait_time=60, wait_limit=5 # pass self.dd_client = DataDispatcherClient() + @call_and_retry def Login(self, username): self.dd_client.login_x509(username, os.environ['X509_USER_PROXY']) + #print(datetime.datetime.now()) + + def SetLarLimit(self, limit): self.lar_limit = limit def CreateProject(self): query_files = mc_client.query(self.query) - proj_dict = self.dd_client.create_project(query_files, query=self.query) + proj_dict = self.dd_client.create_project( + query_files, query=self.query, worker_timeout=self.worker_timeout) + print(datetime.datetime.now()) self.proj_state = proj_dict['state'] self.proj_id = proj_dict['project_id'] self.proj_exists = True @@ -81,12 +108,16 @@ def PrintFiles(self): @call_and_retry def next_file(self): - self.next_output = self.dd_client.next_file(self.proj_id, - timeout=self.dd_timeout) + self.next_output = self.dd_client.next_file( + self.proj_id, timeout=self.dd_timeout, + worker_id=os.environ['MYWORKERID']) + #print(datetime.datetime.now()) + @call_and_retry def file_done(self, did): #self.dd_client.file_done(self.proj_id, '%s:%s'%(j['namespace'], j['name'])) self.dd_client.file_done(self.proj_id, did) + #print(datetime.datetime.now()) @call_and_retry def file_failed(self, did, do_retry=True): @@ -94,17 +125,19 @@ def file_failed(self, did, do_retry=True): self.proj_id, did, #'%s:%s'%(self.next_output['namespace'], self.next_output['name']), retry=do_retry) + #print(datetime.datetime.now()) - @call_and_retry + @call_and_retry_return def get_project(self, proj_id): proj = self.dd_client.get_project(proj_id, with_files=False) + #print(datetime.datetime.now()) return proj def LoadFiles(self): count = 0 ##Should we take out the proj_state clause? - while (count < self.lar_limit and not self.next_failed and - self.proj_state == 'active'): + while (count < self.lar_limit and not self.next_failed): #and + #self.proj_state == 'active'): print('Attempting fetch %i/%i'%(count, self.lar_limit), self.next_failed) self.Next() if self.next_output == None: @@ -116,8 +149,8 @@ def LoadFiles(self): ##First --> check that there are files reserved by other jobs. ## If there aren't, just exit the loop and try processing ## any files (if any) we have - file_handles = self.dd_client.get_project(self.proj_id)['file_handles'] - total_reserved = sum([fh['state'] == 'reserved' for fh in file_handles]) + #file_handles = self.dd_client.get_project(self.proj_id)['file_handles'] + #total_reserved = sum([fh['state'] == 'reserved' for fh in file_handles]) #if total_reserved == count: # print('Equal number of reserved and loaded files. Ending loop') # break @@ -155,47 +188,18 @@ def LoadFiles(self): self.n_waited = 0 else: print('Empty replicas -- marking as failed') - '''nretries = 0 - while nretries < 5: - try: - self.dd_client.file_failed(self.proj_id, '%s:%s'%(self.next_output['namespace'], self.next_output['name']), retry=False) - break - #except (requests.exceptions.ConnectionError, APIError) as err: - except (requests.exceptions.ConnectionError) as err: - print('Caught', err.args) - print('Will wait and try again') - time.sleep(self.retry_time) - nretries += 1 - if nretries > 4: - print('Too many retries') - sys.exit(1)''' self.file_failed( '%s:%s'%(self.next_output['namespace'], self.next_output['name']), do_retry=False) self.loaded = True print("Loaded %i files. Moving on."%len(self.loaded_files)) + self.PrintFiles() def Next(self): if self.proj_id < 0: raise ValueError('DDLArInterface::Next -- Project ID is %i. Has a project been created?'%self.proj_id) ## exists, state, etc. -- TODO self.next_file() - ''' - nretries = 0 - while nretries < 5: - try: - self.next_output = self.dd_client.next_file(self.proj_id, timeout=self.dd_timeout) - break - except (requests.exceptions.ConnectionError, APIError) as err: - #except (requests.exceptions.ConnectionError) as err: - print('Caught', err.args) - print('Will wait and try again') - time.sleep(self.retry_time) - nretries += 1 - if nretries > 4: - print('Too many retries') - sys.exit(1) - ''' if self.next_output == None: self.next_failed = True return @@ -214,25 +218,6 @@ def MarkFiles(self, failed=False): else: print('Marking done') self.file_done('%s:%s'%(j['namespace'], j['name'])) - '''while nretries < 5: - try: - if failed: - print('Marking failed') - self.dd_client.file_failed(self.proj_id, '%s:%s'%(j['namespace'], j['name'])) - else: - print('Marking done') - self.dd_client.file_done(self.proj_id, '%s:%s'%(j['namespace'], j['name'])) - - break - except (requests.exceptions.ConnectionError, APIError) as err: - #except (requests.exceptions.ConnectionError) as err: - print('Caught', err.args) - print('Will wait and try again') - time.sleep(self.retry_time) - nretries += 1 - if nretries > 4: - print('Too many retries') - sys.exit(1)''' def SaveFileDIDs(self): lines = [] @@ -249,12 +234,13 @@ def SaveFileDIDs(self): def AttachProject(self, proj_id): self.proj_id = proj_id #proj = self.dd_client.get_project(proj_id, with_files=False) - proj = self.get_project(proj_id) - if proj == None: - self.proj_exists = False - else: - self.proj_exists = True - self.proj_state = proj['state'] + #proj = self.get_project(proj_id) + #print(proj) + #if proj == None: + # self.proj_exists = False + #else: + # self.proj_exists = True + # self.proj_state = proj['state'] def BuildFileListString(self): for j in self.loaded_files: @@ -272,6 +258,7 @@ def BuildFileListString(self): ##TODO -- pop entry self.dd_client.file_failed(self.proj_id, '%s:%s'%(j['namespace'], j['name'])) + print(datetime.datetime.now()) def RunLAr(self, fcl, n, nskip): if len(self.loaded_files) == 0: print('No files loaded with data dispatcher. Exiting gracefully') @@ -300,7 +287,7 @@ def RunLAr(self, fcl, n, nskip): parser.add_argument('--user', type=str) parser.add_argument('--project', type=int) parser.add_argument('--timeout', type=int, default=120) - parser.add_argument('--wait_time', type=int, default=60) + parser.add_argument('--wait_time', type=int, default=120) parser.add_argument('--wait_limit', type=int, default=5) parser.add_argument('-n', type=int, default=-1) parser.add_argument('--nskip', type=int, default=0) diff --git a/submit_dd_jobs.py b/submit_dd_jobs.py new file mode 100644 index 0000000..0573f48 --- /dev/null +++ b/submit_dd_jobs.py @@ -0,0 +1,137 @@ +from metacat.webapi import MetaCatClient +import sys +from time import sleep +import os +from data_dispatcher.api import DataDispatcherClient +from argparse import ArgumentParser as ap +import subprocess + + + + +if __name__ == '__main__': + parser = ap() + parser.add_argument('--project_only', action='store_true') + parser.add_argument('--dataset', type=str, default=None) + parser.add_argument('--namespace', type=str, default=None) + parser.add_argument('--query_limit', type=str, default=None) + parser.add_argument('--query_skip', type=str, default=None) + parser.add_argument('--njobs', type=int, default=1) + + parser.add_argument('--load_limit', type=int, default=None) + parser.add_argument('--fcl', type=str, default='evd_protoDUNE.fcl') + parser.add_argument('--nevents', type=int, default=-1) + parser.add_argument('--output_str', type=str, default='"*reco.root"') + parser.add_argument('--output_dataset', type=str, default='dd-interactive-tests') + parser.add_argument('--output_namespace', type=str, default='dc4-hd-protodune') + parser.add_argument('--metacat_user', type=str, default='calcuttj') + parser.add_argument('--blacklist', type=str, nargs='+') + parser.add_argument('--project', type=int, default=None) + parser.add_argument('--dry_run', action='store_true') + + args = parser.parse_args() + + mc_client = MetaCatClient('https://metacat.fnal.gov:9443/dune_meta_demo/app') + dd_client = DataDispatcherClient( + server_url='https://metacat.fnal.gov:9443/dune/dd/data', + auth_server_url='https://metacat.fnal.gov:8143/auth/dune') + dd_client.login_x509(os.environ['USER'], + os.environ['X509_USER_PROXY']) + + print(args.blacklist) + + if (not args.project) and args.dataset and args.namespace: + ##build up query + query = 'files from %s where namespace="%s" ordered'%(args.dataset, args.namespace) + if args.query_skip: query += ' skip %s'%args.query_skip + if args.query_limit: query += ' limit %s'%args.query_limit + print(query) + #query metacat + query_files = [i for i in mc_client.query(query)] + print(query_files) + + #check size + nfiles_in_dataset = len(query_files) + if nfiles_in_dataset == 0: + sys.stderr.write("Ignoring launch request on empty metacat query") + sys.stderr.write("Query: %s"%query) + sys.exit(1) + + #make project in data dispatcher + proj_dict = dd_client.create_project(query_files, query=query) + dd_proj_id = proj_dict['project_id'] + print('Project ID:', dd_proj_id) + + if args.project_only: + print('Only making project. Exiting now') + exit() + + elif args.project and not (args.dataset and args.namespace): + dd_proj_id = args.project + else: + sys.stderr.write("Need to provide project OR dataset & namespace\n") + sys.exit(1) + + if args.njobs > 10000: + njobs = [10000]*int(args.njobs/10000) + [args.njobs%10000] + else: + njobs = [args.njobs] + + print(njobs) + count = 0 + for nj in njobs: + cmd = 'fife_launch -c byhand.cfg ' \ + f'-Oglobal.load_limit={args.load_limit} ' \ + f'-Oglobal.project={dd_proj_id} ' \ + f'-Oglobal.nevents={args.nevents} ' \ + f'-Oglobal.output_str={args.output_str} ' \ + f'-Oglobal.output_dataset={args.output_dataset} ' \ + f'-Oglobal.output_namespace={args.output_namespace} ' \ + f'-Osubmit.N={nj} ' \ + f'-Oglobal.metacat_user={args.metacat_user} ' + + if args.blacklist: + cs_blacklist = ','.join(args.blacklist) + cmd += f'-Osubmit.blacklist={cs_blacklist} ' + + if args.dry_run: + cmd += '--dry_run ' + print(cmd) + #cmd2 = ('fife_launch -c byhand.cfg ' + # '-Oglobal.load_limit=%i ' + # '-Oglobal.project=%s ' + # '-Oglobal.nevents=%i ' + # '-Oglobal.output_str=%s ' + # '-Oglobal.output_dataset=%s ' + # '-Oglobal.output_namespace=%s ' + # '-Osubmit.N=%i ' + # '-Oglobal.metacat_user=%s ' + # )%(args.load_limit, dd_proj_id, args.nevents, + # args.output_str, args.output_dataset, args.output_namespace, + # nj, args.metacat_user) + + #print(cmd == cmd2) + #print(cmd) + #print(cmd2) + + subprocess.run(cmd, shell=True) + #subprocess.run(('fife_launch -c byhand.cfg ' + # '-Oglobal.load_limit=%i ' + # '-Oglobal.project=%s ' + # '-Oglobal.nevents=%i ' + # '-Oglobal.output_str=%s ' + # '-Oglobal.output_dataset=%s ' + # '-Oglobal.output_namespace=%s ' + # '-Osubmit.N=%i ' + # '-Oglobal.metacat_user=%s ' + # )%(args.load_limit, dd_proj_id, args.nevents, + # args.output_str, args.output_dataset, args.output_namespace, + # nj, args.metacat_user), + # shell=True) + + if count < len(njobs)-1: + print('Sleeping') + for i in range(60): + sleep(2) + print(f'{i*2}/120', end='\r') + count += 1 diff --git a/top_script.sh b/top_script.sh index 2e38ab6..a0a0349 100755 --- a/top_script.sh +++ b/top_script.sh @@ -117,6 +117,13 @@ pip install datadispatcher #source ${CONDOR_DIR_INPUT}/${MC_TAR}/canned_client_setup.sh #source ${CONDOR_DIR_INPUT}/${DD_TAR}/canned_client_setup.sh +sleeptime=$[ ( $RANDOM % 120 ) ] +echo "Will sleep for ${sleeptime} seconds" +sleep $sleeptime + +export MYWORKERID=`ddisp worker id -n` +echo "workerid: ${MYWORKERID}" + python -m run_lar \ --namespace $NAMESPACE \ --fcl $FCL \ @@ -165,8 +172,17 @@ export METACAT_SERVER_URL=https://metacat.fnal.gov:9443/dune_meta_demo/app echo "Authenticating" #>> ${logname}.out 2>>${logname}.err metacat auth login -m x509 ${METACATUSER} # >> ${logname}.out 2>>${logname}.err +date + +auth_return=$? +if [ $auth_return -ne 0 ]; then + echo "could not declare to metacat" + exit $auth_return +fi + echo "whoami:" #>> ${logname}.out 2>>${logname}.err metacat auth whoami #>> ${logname}.out 2>>${logname}.err +date parents=`cat loaded_files.txt` @@ -195,6 +211,12 @@ for i in *$OUTPUT; do EOF metacat file declare -j ${i}.json $OUTPUTNAMESPACE:$OUTPUTDATASET-data #>> ${logname}.out 2>>${logname}.err + date + returncode=$? + if [ $returncode -ne 0 ]; then + echo "could not declare to metacat" + exit $returncode + fi for rse in ${output_rses[@]}; do echo "Uploading to $rse" From c24e74dfcc979b1a5db56d8a79e19149f26db593 Mon Sep 17 00:00:00 2001 From: Jacob Calcutt Date: Wed, 30 Nov 2022 16:15:57 -0600 Subject: [PATCH 004/166] Quieting printing of query files in submit --- submit_dd_jobs.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/submit_dd_jobs.py b/submit_dd_jobs.py index 0573f48..84afe42 100644 --- a/submit_dd_jobs.py +++ b/submit_dd_jobs.py @@ -48,7 +48,7 @@ print(query) #query metacat query_files = [i for i in mc_client.query(query)] - print(query_files) + #print(query_files) #check size nfiles_in_dataset = len(query_files) From d17b9b72f95884a5b94f13be14026c8fb7a86aa2 Mon Sep 17 00:00:00 2001 From: Heidi Schellman Date: Fri, 2 Dec 2022 13:18:08 -0800 Subject: [PATCH 005/166] Heidi's test code --- Loginator.py | 226 ++++++++++++++++++++++++++++++++++++++++++++ interactive_test.sh | 3 + run_interactive.py | 52 ++++++++++ run_lar.py | 20 +++- setup_hms.sh | 17 ++++ submit_dd_jobs.py | 29 ++++++ 6 files changed, 346 insertions(+), 1 deletion(-) create mode 100644 Loginator.py create mode 100644 interactive_test.sh create mode 100644 run_interactive.py create mode 100644 setup_hms.sh diff --git a/Loginator.py b/Loginator.py new file mode 100644 index 0000000..f7fe206 --- /dev/null +++ b/Loginator.py @@ -0,0 +1,226 @@ +"""! @brief Art logfile parser """ +## +# @mainpage Loginator.py +# +# @section description_main Description +# A program for parsing art logs to put information into DUNE job monitoring. +# +# +# Copyright (c) 2022 Heidi Schellman, Oregon State University +## +# @file Loginator.py + +import string,time,datetime,json,os,sys +import samweb_client +from metacat.webapi import MetaCatClient +import string,datetime#,dateutil +from datetime import date,timezone,datetime +#from dateutil import parser + + +DEBUG=False + +class Loginator: + + def __init__(self,logname): + if not os.path.exists(logname): + print ("no such file exists, quitting",logname) + sys.exit(1) + self.logname = logname + self.logfile = open(logname,'r') + self.outobject ={} + self.info = self.getinfo() + self.tags = ["Opened input file", "Closed input file","Peak resident set size usage (VmHWM)"] + self.template = { + "source_rse":None, # + "user":None, # (who's request is this) + "job_id":None, # (jobsubXXX03@fnal.gov) + "timestamp_for_start":None, # + "timestamp_for_end":None, # + "duration":None, # (difference between end and start) + "file_size":None, # + "application_family":None, # + "application_name":None, # + "application_version":None, # + "final_state":None, # (what happened?) + "cpu_site":None, # (e.g. FNAL":None, # RAL) + "project_name":None, #(wkf request_id?)" + "file_name":None, # (not including the metacat namespace) + "fid":None, # metacat fid + "data_tier":None, # (from metacat) + "data_stream":None, + "run_type":None, + "job_node":None, # (name within the site) + "job_site":None, # (name of the site) + "country":None, # (nationality of the site) + "campaign":None, # (DUNE campaign) + "delivery_method":None, #(stream/copy) + "workflow_method":None, + "access_method":None, #(samweb/dd) + "path":None, + "namespace":None, + "real_memory":None, + "project_id":None, + "delivery_method":None + } + +## return the first tag or None in a line + def findme(self,line): + for tag in self.tags: + if tag in line: + if DEBUG: print (tag,line) + return tag + return None + +## get system info for the full job + def getinfo(self): + info = {} + # get a bunch of system thingies. + info["application_version"]=os.getenv("DUNESW_VERSION") + info["user"]=os.getenv("GRID_USER") + info["job_node"] = os.getenv("HOST") + info["job_site"] = os.getenv("GLIDEIN_DUNESite") + #info["POMSINFO"] = os.getenv("poms_data") # need to parse this further + return info + +## read in the log file and parse it, add the info + def readme(self): + object = {} + for line in self.logfile: + tag = self.findme(line) + if DEBUG: print (tag,line) + if tag == None: + continue + if "file" in tag: + data = line.split(tag) + filefull = data[1].strip().replace('"','') + timestamp = data[0].strip() + filename = os.path.basename(filefull).strip() + filepath = os.path.dirname(filefull).strip() + dups = 0 + if "Opened" in tag: + if DEBUG: print ("filename was",filename,line) + if filename in object.keys(): + print (" I HAVE ALREADY SEEN ",filename) + object[filename] = self.template + object[filename]["timestamp_for_start"] = timestamp + start = timestamp + object[filename]["path"]=filepath + object[filename]["file_name"] = filename + if DEBUG: print ("filepath",filepath) + if "root" in filepath[0:10]: + if DEBUG: print ("I am root") + tmp = filepath.split("//") + object[filename]["source_rse"] = tmp[1] + object[filename]["deliver_method"] = "xroot" + for thing in self.info: + object[filename][thing] = self.info[thing] + object[filename]["final_state"] = "Opened" + if "Closed" in tag: + object[filename]["timestamp_for_end"] = timestamp + object[filename]["duration"]=self.duration(start,timestamp) + object[filename]["final_state"] = "Closed" + continue + if "size usage" in tag: + data = line.split(":") + for thing in object: + object[thing]["real_memory"]=data[1].strip() + self.outobject=object + + def addinfo(self,info): + for s in info: + if s in self.outobject: + print ("Loginator replacing",s, self.outobject[s],self.info[s]) + else: + for f in self.outobject: + self.outobject[f][s] = info[s] + if DEBUG: print ("adding",s,info[s]) + + def addsaminfo(self): + samweb = samweb_client.SAMWebClient(experiment='dune') + for f in self.outobject: + if DEBUG: print ("f ",f) + meta = samweb.getMetadata(f) + self.outobject[f]["namespace"]="samweb" + self.outobject[f]["access_method"]="samweb" + for item in ["data_tier","file_type","data_stream","group","file_size"]: + self.outobject[f][item]=meta[item] + for run in meta["runs"]: + self.outobject[f]["run_type"] = run[2] + break + + def addmetacatinfo(self,namespace): + os.environ["METACAT_SERVER_URL"]="https://metacat.fnal.gov:9443/dune_meta_demo/app" + mc_client = MetaCatClient('https://metacat.fnal.gov:9443/dune_meta_demo/app') + for f in self.outobject: + meta = mc_client.get_file(name=f,namespace=namespace) + if DEBUG: print ("metacat answer",f,meta.keys()) + self.outobject[f]["access_method"]="metacat" + for item in ["data_tier","file_type","data_stream","run_type"]: + if "core."+item in meta["metadata"].keys(): + self.outobject[f][item]=meta["metadata"]["core."+item] + else: + print ("no", item, "in ",list(meta["metadata"].keys())) + self.outobject[f]["file_size"]=meta["size"] + self.outobject[f]["fid"]=meta["fid"] + self.outobject[f]["namespace"]=namespace + + + + def metacatinfo(self,namespace,filename): + print ("do something here") + + + def writeme(self): + result = [] + for thing in self.outobject: + outname = thing+".process.json" + outfile = open(outname,'w') + json.dump(self.outobject[thing],outfile,indent=4) + outfile.close() + result.append(outname) + return result + + def human2number(self,stamp): + #15-Nov-2022 17:24:41 CST https://docs.python.org/3/library/time.html#time.strftime + format = "%d-%b-%Y %H:%M:%S" + # python no longer accepts time zones. We only want the different but need to correct for DT + thetime = datetime.strptime(stamp[:-4],format) + epoch = datetime.utcfromtimestamp(0) + if "DT" in stamp: + stamp += 3600 + return (thetime-epoch).total_seconds() + + def duration(self,start,end): + t0 = self.human2number(start) + t1 = self.human2number(end) + return t1-t0 + +def envScraper(): + env = os.environ + if "apple" in env["CLANGXX"]: + f = open("bigenv.txt") + env = {} + for a in f.readlines(): + line = a.split("=") + env[line[0]] = line[1] + digest = {} + for k in env.keys(): + if "SETUP_" in k: + it = env[k].split(" ") + digest[k] = {"Product":it[0],"Version":it[1]} + return digest + + +def test(): + parse = Loginator(sys.argv[1]) + print ("looking at",sys.argv[1]) + parse.readme() + parse.addinfo(parse.getinfo()) + # parse.addsaminfo() + parse.addmetacatinfo("pdsp_mc_reco") + parse.writeme() + + +if __name__ == '__main__': + test() diff --git a/interactive_test.sh b/interactive_test.sh new file mode 100644 index 0000000..062b686 --- /dev/null +++ b/interactive_test.sh @@ -0,0 +1,3 @@ +# run an interactive test HMS 12-2-2022 +python run_interactive.py --dataset dc4:dc4 --namespace dc4-hd-protodune --query_limit 10 --load_limit 1 --fcl eventdump.fcl --user schellma -n 25 + diff --git a/run_interactive.py b/run_interactive.py new file mode 100644 index 0000000..87be3a8 --- /dev/null +++ b/run_interactive.py @@ -0,0 +1,52 @@ +import submit_dd_jobs +from run_lar import DDInterface +import sys +import os +from argparse import ArgumentParser as ap + +if __name__ == '__main__': + + parser = ap() + parser.add_argument('--namespace', type=str) + parser.add_argument('--dataset', type=str) + parser.add_argument('--fcl', type=str) + parser.add_argument('--load_limit', type=int) + parser.add_argument('--query_limit', default=10) + parser.add_argument('--query_skip', default=None) + parser.add_argument('--user', type=str) + parser.add_argument('--project', default=None) + parser.add_argument('--timeout', type=int, default=120) + parser.add_argument('--wait_time', type=int, default=120) + parser.add_argument('--wait_limit', type=int, default=5) + parser.add_argument('-n', type=int, default=-1) + parser.add_argument('--nskip', type=int, default=0) + args = parser.parse_args() + + if (not args.project) and args.dataset and args.namespace: + dd_proj_id = submit_dd_jobs.create_project(namespace=args.namespace, + dataset=args.dataset, + query_limit=args.query_limit, + query_skip=args.query_skip) + + + elif args.project and not (args.dataset and args.namespace): + dd_proj_id = int(args.project) + else: + sys.stderr.write("Need to provide project OR dataset & namespace\n") + sys.exit(1) + + dd_interface = DDInterface(args.namespace, + args.load_limit, + timeout=args.timeout, + wait_time=args.wait_time, + wait_limit=args.wait_limit) + dd_interface.Login(args.user) + dd_interface.SetWorkerID() + print(os.environ['MYWORKERID']) + dd_interface.AttachProject(dd_proj_id) + dd_interface.LoadFiles() + dd_interface.BuildFileListString() + dd_interface.RunLAr(args.fcl, args.n, args.nskip) + + ##Loginator stuff here? + diff --git a/run_lar.py b/run_lar.py index cf8333c..dad70c6 100644 --- a/run_lar.py +++ b/run_lar.py @@ -9,6 +9,8 @@ from data_dispatcher.api import DataDispatcherClient from data_dispatcher.api import APIError +import Loginator + def call_and_retry(func): def inner1(*args, **kwargs): nretries = 0 @@ -231,6 +233,9 @@ def SaveFileDIDs(self): with open('loaded_files.txt', 'w') as f: f.writelines(lines) + def SetWorkerID(self): + os.environ['MYWORKERID'] = self.dd_client.new_worker_id() + def AttachProject(self, proj_id): self.proj_id = proj_id #proj = self.dd_client.get_project(proj_id, with_files=False) @@ -270,7 +275,20 @@ def RunLAr(self, fcl, n, nskip): else: cluster = '0' process = '0' - proc = subprocess.run('lar -c %s -s %s -n %i --nskip %i -o "dc4_hd_protodune_%%tc_%s_%s_reco.root"'%(fcl, self.lar_file_list, n, nskip, cluster, process), shell=True) + ## TODO -- make options for capturing output + fname = "dc4_hd_protodune_%s_%s_reco.root"%(cluster, process) + oname = fname.replace(".root",".out") + ename = fname.replace(".root",".err") + ofile = open(oname,'w') + efile = open(ename,'w') + proc = subprocess.run('lar -c %s -s %s -n %i --nskip %i -o fname'%(fcl, self.lar_file_list, n, nskip), shell=True, stdout=ofile,stderr=efile) + ofile.close() + efile.close() + logparse = Loginator.Loginator(oname) + logparse.readme() + logparse.addinfo(logparse.getinfo()) + logparse.addmetacatinfo(self.namespace) #HMS assuming this is the input namespace. + logparse.writeme() if proc.returncode != 0: self.MarkFiles(True) sys.exit(proc.returncode) diff --git a/setup_hms.sh b/setup_hms.sh new file mode 100644 index 0000000..bc6a6be --- /dev/null +++ b/setup_hms.sh @@ -0,0 +1,17 @@ +setup dunesw v09_54_00d00 -q e20:prof +export DATA_DISPATCHER_URL=https://metacat.fnal.gov:9443/dune/dd/data +export DATA_DISPATCHER_AUTH_URL=https://metacat.fnal.gov:8143/auth/dune +export METACAT_SERVER_URL=https://metacat.fnal.gov:9443/dune_meta_demo/app +export METACAT_AUTH_SERVER_URL=https://metacat.fnal.gov:8143/auth/dune +export PATH=/nashome/c/calcuttj/.local/bin/:$PATH +#export PYTHONPATH=/dune/app/users/calcuttj/data-dispatcher/data_dispatcher:$PYTHONPATH +#export PYTHONPATH=/dune/app/users/calcuttj/metacat3/metacat:$PYTHONPATH +export PYTHONPATH=/dune/app/users/calcuttj/dd_metacat_pip/venv/lib/python3.9/site-packages/:$PYTHONPATH +#export PYTHONPATH=/dune/app/users/calcuttj/dd_metacat_pip/venv/lib/python3.9/site-packages/:$PYTHONPATH + +#source /dune/app/users/calcuttj/metacat3/metacat_venv/bin/activate +source /dune/app/users/calcuttj/dd_metacat_pip/venv/bin/activate + +kx509 +source ~/proxy.sh +export X509_USER_PROXY=/tmp/x509up_u1327 diff --git a/submit_dd_jobs.py b/submit_dd_jobs.py index 84afe42..c318574 100644 --- a/submit_dd_jobs.py +++ b/submit_dd_jobs.py @@ -6,6 +6,35 @@ from argparse import ArgumentParser as ap import subprocess +def create_project(dataset, namespace, query_limit=None, query_skip=None): + mc_client = MetaCatClient('https://metacat.fnal.gov:9443/dune_meta_demo/app') + dd_client = DataDispatcherClient( + server_url='https://metacat.fnal.gov:9443/dune/dd/data', + auth_server_url='https://metacat.fnal.gov:8143/auth/dune') + dd_client.login_x509(os.environ['USER'], + os.environ['X509_USER_PROXY']) + + query = 'files from %s where namespace="%s" ordered'%(dataset, namespace) + if query_skip: query += ' skip %s'%query_skip + if query_limit: query += ' limit %s'%query_limit + print(query) + #query metacat + query_files = [i for i in mc_client.query(query)] + #print(query_files) + + #check size + nfiles_in_dataset = len(query_files) + if nfiles_in_dataset == 0: + sys.stderr.write("Ignoring launch request on empty metacat query") + sys.stderr.write("Query: %s"%query) + sys.exit(1) + + #make project in data dispatcher + proj_dict = dd_client.create_project(query_files, query=query) + dd_proj_id = proj_dict['project_id'] + print('Project ID:', dd_proj_id) + + return dd_proj_id From 331f872a2cfda4bb94dc77cae091ac6729d0bfbc Mon Sep 17 00:00:00 2001 From: Heidi Schellman Date: Fri, 2 Dec 2022 13:43:34 -0800 Subject: [PATCH 006/166] fix bug in duplicates --- Loginator.py | 37 +++++++++++++++++++++---------------- 1 file changed, 21 insertions(+), 16 deletions(-) diff --git a/Loginator.py b/Loginator.py index f7fe206..4b6dda2 100644 --- a/Loginator.py +++ b/Loginator.py @@ -86,6 +86,7 @@ def getinfo(self): ## read in the log file and parse it, add the info def readme(self): object = {} + memdata = None for line in self.logfile: tag = self.findme(line) if DEBUG: print (tag,line) @@ -99,32 +100,36 @@ def readme(self): filepath = os.path.dirname(filefull).strip() dups = 0 if "Opened" in tag: + localobject = {} if DEBUG: print ("filename was",filename,line) - if filename in object.keys(): - print (" I HAVE ALREADY SEEN ",filename) - object[filename] = self.template - object[filename]["timestamp_for_start"] = timestamp + + localobject = self.template.copy() + localobject["timestamp_for_start"] = timestamp start = timestamp - object[filename]["path"]=filepath - object[filename]["file_name"] = filename + localobject["path"]=filepath + localobject["file_name"] = filename if DEBUG: print ("filepath",filepath) if "root" in filepath[0:10]: if DEBUG: print ("I am root") tmp = filepath.split("//") - object[filename]["source_rse"] = tmp[1] - object[filename]["deliver_method"] = "xroot" + localobject["source_rse"] = tmp[1] + localobject["deliver_method"] = "xroot" for thing in self.info: - object[filename][thing] = self.info[thing] - object[filename]["final_state"] = "Opened" + localobject[thing] = self.info[thing] + localobject["final_state"] = "Opened" if "Closed" in tag: - object[filename]["timestamp_for_end"] = timestamp - object[filename]["duration"]=self.duration(start,timestamp) - object[filename]["final_state"] = "Closed" + localobject["timestamp_for_end"] = timestamp + localobject["duration"]=self.duration(start,timestamp) + localobject["final_state"] = "Closed" + object[filename] = localobject continue if "size usage" in tag: - data = line.split(":") - for thing in object: - object[thing]["real_memory"]=data[1].strip() + memdata = line.split(":") + #print ("mem",memdata,filename) + # add the memory info if available + for thing in object: + if memdata != None: object[thing]["real_memory"]=memdata[1].strip() + #print ("mem",object[thing]["real_memory"]) self.outobject=object def addinfo(self,info): From fb4bc76517380ec41e6799758e2c68b1a4724443 Mon Sep 17 00:00:00 2001 From: Heidi Schellman <33669005+hschellman@users.noreply.github.com> Date: Fri, 2 Dec 2022 14:46:34 -0800 Subject: [PATCH 007/166] Create README.md --- README.md | 7 +++++++ 1 file changed, 7 insertions(+) create mode 100644 README.md diff --git a/README.md b/README.md new file mode 100644 index 0000000..dc484d1 --- /dev/null +++ b/README.md @@ -0,0 +1,7 @@ +# DataChallengeWork-loginator + + +- `setup_hms.sh` # sets it up + +- `./interactive_test.sh` # runs a test + From 6a58e2a6588851c6f4a11acf7d90e5578d7c8fbe Mon Sep 17 00:00:00 2001 From: Heidi Schellman Date: Sat, 3 Dec 2022 13:05:14 -0800 Subject: [PATCH 008/166] changes to parse lar log and flag processed files --- Loginator.py | 72 ++++- interactive_test.sh | 2 +- rejoin.sh | 3 + run_interactive.py | 26 +- run_lar.py | 57 ++-- test.out | 744 ++++++++++++++++++++++++++++++++++++++++++++ 6 files changed, 865 insertions(+), 39 deletions(-) create mode 100644 rejoin.sh create mode 100644 test.out diff --git a/Loginator.py b/Loginator.py index 4b6dda2..c3d3659 100644 --- a/Loginator.py +++ b/Loginator.py @@ -11,7 +11,10 @@ # @file Loginator.py import string,time,datetime,json,os,sys -import samweb_client +if "SAM_EXPERIMENT" in os.environ: + import samweb_client + + from metacat.webapi import MetaCatClient import string,datetime#,dateutil from datetime import date,timezone,datetime @@ -29,8 +32,8 @@ def __init__(self,logname): self.logname = logname self.logfile = open(logname,'r') self.outobject ={} - self.info = self.getinfo() - self.tags = ["Opened input file", "Closed input file","Peak resident set size usage (VmHWM)"] + self.info = self.getsysinfo() + self.tags = ["Opened input file", "Closed input file","VmHWM"] self.template = { "source_rse":None, # "user":None, # (who's request is this) @@ -43,7 +46,6 @@ def __init__(self,logname): "application_name":None, # "application_version":None, # "final_state":None, # (what happened?) - "cpu_site":None, # (e.g. FNAL":None, # RAL) "project_name":None, #(wkf request_id?)" "file_name":None, # (not including the metacat namespace) "fid":None, # metacat fid @@ -73,11 +75,13 @@ def findme(self,line): return None ## get system info for the full job - def getinfo(self): + def getsysinfo(self): info = {} # get a bunch of system thingies. - info["application_version"]=os.getenv("DUNESW_VERSION") - info["user"]=os.getenv("GRID_USER") + if os.getenv("GRID_USER") != None: + info["user"]=os.getenv("GRID_USER") + else: + info["user"]=os.getenv("USER") info["job_node"] = os.getenv("HOST") info["job_site"] = os.getenv("GLIDEIN_DUNESite") #info["POMSINFO"] = os.getenv("poms_data") # need to parse this further @@ -92,6 +96,8 @@ def readme(self): if DEBUG: print (tag,line) if tag == None: continue + if "VmHWM" == tag: + memdata = line.split("VmHWM = ")[1].strip() if "file" in tag: data = line.split(tag) filefull = data[1].strip().replace('"','') @@ -102,7 +108,7 @@ def readme(self): if "Opened" in tag: localobject = {} if DEBUG: print ("filename was",filename,line) - + localobject = self.template.copy() localobject["timestamp_for_start"] = timestamp start = timestamp @@ -123,12 +129,11 @@ def readme(self): localobject["final_state"] = "Closed" object[filename] = localobject continue - if "size usage" in tag: - memdata = line.split(":") + #print ("mem",memdata,filename) # add the memory info if available for thing in object: - if memdata != None: object[thing]["real_memory"]=memdata[1].strip() + if memdata != None: object[thing]["real_memory"]=memdata #print ("mem",object[thing]["real_memory"]) self.outobject=object @@ -154,12 +159,24 @@ def addsaminfo(self): self.outobject[f]["run_type"] = run[2] break - def addmetacatinfo(self,namespace): + def addmetacatinfo(self,defaultNamespace=None): os.environ["METACAT_SERVER_URL"]="https://metacat.fnal.gov:9443/dune_meta_demo/app" mc_client = MetaCatClient('https://metacat.fnal.gov:9443/dune_meta_demo/app') for f in self.outobject: + if "namespace" in self.outobject[f] and self.outobject[f]["namespace"] != None: + namespace = self.outobject[f]["namespace"] + else: + print ("set default namespace for file",f,defaultNamespace) + namespace = defaultNamespace + if namespace == None: + print (" could not set namespace for",f) + continue + print (f,namespace) meta = mc_client.get_file(name=f,namespace=namespace) - if DEBUG: print ("metacat answer",f,meta.keys()) + print ("metacat answer",f,namespace) + if meta == None: + print ("no metadata for",f) + continue self.outobject[f]["access_method"]="metacat" for item in ["data_tier","file_type","data_stream","run_type"]: if "core."+item in meta["metadata"].keys(): @@ -170,10 +187,30 @@ def addmetacatinfo(self,namespace): self.outobject[f]["fid"]=meta["fid"] self.outobject[f]["namespace"]=namespace + def addreplicainfo(self,replicas,test=False): + notfound = [] + for f in self.outobject: + self.outobject[f]["rse"] = None + + for r in replicas: + found = False + for f in self.outobject: + if f == r["name"]: + print ("replica match",r) + found = True + if "rse" in r: + self.outobject[f]["rse"] = r["rse"] + if "namespace" in r: + self.outobject[f]["namespace"] = r["namespace"] + print (self.outobject[f]) + if not found: + notfound.append(r) + + return notfound - def metacatinfo(self,namespace,filename): - print ("do something here") +# def metacatinfo(self,namespace,filename): +# print ("do something here") def writeme(self): @@ -221,9 +258,10 @@ def test(): parse = Loginator(sys.argv[1]) print ("looking at",sys.argv[1]) parse.readme() - parse.addinfo(parse.getinfo()) + parse.addinfo(parse.getsysinfo()) # parse.addsaminfo() - parse.addmetacatinfo("pdsp_mc_reco") + parse.addreplicainfo([]) + parse.addmetacatinfo("dc4-hd-protodune") # argument is there for testing when you don't have replica list. parse.writeme() diff --git a/interactive_test.sh b/interactive_test.sh index 062b686..bd1e5f4 100644 --- a/interactive_test.sh +++ b/interactive_test.sh @@ -1,3 +1,3 @@ # run an interactive test HMS 12-2-2022 -python run_interactive.py --dataset dc4:dc4 --namespace dc4-hd-protodune --query_limit 10 --load_limit 1 --fcl eventdump.fcl --user schellma -n 25 +python run_interactive.py --dataset dc4:dc4 --namespace dc4-hd-protodune --query_limit 100 --load_limit 3 --fcl eventdump.fcl --user schellma --appFamily=protoduneana --appVersion=$PROTODUNEANA_VERSION -n 5 diff --git a/rejoin.sh b/rejoin.sh new file mode 100644 index 0000000..10eb238 --- /dev/null +++ b/rejoin.sh @@ -0,0 +1,3 @@ +# run an interactive test HMS 12-2-2022 +python run_interactive.py --project=65 --query_limit 100 --load_limit 3 --fcl eventdump.fcl --user schellma --appFamily=protoduneana --appVersion=$PROTODUNEANA_VERSION -n 40 + diff --git a/run_interactive.py b/run_interactive.py index 87be3a8..b2e94f1 100644 --- a/run_interactive.py +++ b/run_interactive.py @@ -1,5 +1,5 @@ import submit_dd_jobs -from run_lar import DDInterface +from run_lar import DDInterface import sys import os from argparse import ArgumentParser as ap @@ -8,6 +8,9 @@ parser = ap() parser.add_argument('--namespace', type=str) + parser.add_argument('--appFamily', type=str) + parser.add_argument('--appVersion', type=str) + parser.add_argument('--appName', type=str) parser.add_argument('--dataset', type=str) parser.add_argument('--fcl', type=str) parser.add_argument('--load_limit', type=int) @@ -22,6 +25,21 @@ parser.add_argument('--nskip', type=int, default=0) args = parser.parse_args() + if args.appName == None: + appName = args.fcl.replace(".fcl","") + else: + appName = args.appName + + if args.appVersion == None: + appVersion = os.getenv("DUNESW_VERSION") + else: + appVersion = args.appVersion + + if args.appFamily == None: + appFamily = "LArSoft" + else: + appFamily = args.appFamily + if (not args.project) and args.dataset and args.namespace: dd_proj_id = submit_dd_jobs.create_project(namespace=args.namespace, dataset=args.dataset, @@ -39,7 +57,10 @@ args.load_limit, timeout=args.timeout, wait_time=args.wait_time, - wait_limit=args.wait_limit) + wait_limit=args.wait_limit, + appFamily=appFamily, + appName=appName, + appVersion=appVersion) dd_interface.Login(args.user) dd_interface.SetWorkerID() print(os.environ['MYWORKERID']) @@ -49,4 +70,3 @@ dd_interface.RunLAr(args.fcl, args.n, args.nskip) ##Loginator stuff here? - diff --git a/run_lar.py b/run_lar.py index dad70c6..c014248 100644 --- a/run_lar.py +++ b/run_lar.py @@ -49,21 +49,23 @@ def inner1(*args, **kwargs): return inner1 class DDInterface: - def __init__(self, namespace, lar_limit, timeout=120, wait_time=60, wait_limit=5): + def __init__(self, namespace, lar_limit, timeout=120, wait_time=60, wait_limit=5, appFamily=None, appName=None, appVersion=None): self.dataset = "" #dataset self.limit = 1#limit self.namespace = namespace query_args = (self.dataset, self.namespace, self.limit) self.query = '''files from %s where namespace="%s" limit %i'''%query_args + print ("the query is:",self.query) self.worker_timeout = 3600*5 self.lar_limit = lar_limit self.proj_id = -1 self.proj_exists = False self.proj_state = None - self.loaded_files = [] + self.loaded_files = [] # list of files with all the replicas for worker to choose from + self.input_replicas = [] # list of the replicas actually sent to Lar self.loaded_file_uris = [] self.loaded = False - self.dd_timeout = timeout + self.dd_timeout = timeout self.wait_time = wait_time self.max_wait_attempts = wait_limit self.hit_timeout = False @@ -72,6 +74,9 @@ def __init__(self, namespace, lar_limit, timeout=120, wait_time=60, wait_limit=5 self.n_waited = 0 self.next_failed = False self.next_replicas = [] + self.appFamily = appFamily + self.appName = appName + self.appVersion = appVersion self.retry_time = 600 @@ -132,6 +137,7 @@ def file_failed(self, did, do_retry=True): @call_and_retry_return def get_project(self, proj_id): proj = self.dd_client.get_project(proj_id, with_files=False) + print (proj) #print(datetime.datetime.now()) return proj @@ -144,7 +150,7 @@ def LoadFiles(self): self.Next() if self.next_output == None: ## this shouldn't happen, but if it does just exit the loop - break + break elif self.next_output == True: ##this means the fetch timed out. @@ -163,8 +169,8 @@ def LoadFiles(self): if count > 0: print('data dispatcher next_file timed out. This job has at least one file reserved. Will continue.') break - else: - ##We know we have externally-reserved files. + else: + ##We know we have externally-reserved files. ##try waiting a user-defined amount of time ##for a maximum number of attempts ##-- if at max, go on to loop @@ -176,7 +182,7 @@ def LoadFiles(self): else: print("Hit max wait limit. Ending attempts to load files") break - elif self.next_output == False: + elif self.next_output == False: ##this means the project is done -- just exit the loop. print("Project is done -- exiting file fetch loop") break @@ -226,9 +232,9 @@ def SaveFileDIDs(self): for i in range(len(self.loaded_files)): f = self.loaded_files[i] if i < len(self.loaded_files)-1: - lines.append('{"did":"%s:%s"},\n'%(f['namespace'], f['name'])) + lines.append('{"did":"%s:%s"},\n'%(f['namespace'], f['name'])) else: - lines.append('{"did":"%s:%s"}\n'%(f['namespace'], f['name'])) + lines.append('{"did":"%s:%s"}\n'%(f['namespace'], f['name'])) with open('loaded_files.txt', 'w') as f: f.writelines(lines) @@ -253,6 +259,7 @@ def BuildFileListString(self): if len(replicas) > 0: #Get the first replica replica = replicas[0] + self.input_replicas.append(replica) print('Replica:', replica) uri = replica['url'] if 'https://eospublic.cern.ch/e' in uri: uri = uri.replace('https://eospublic.cern.ch/e', 'xroot://eospublic.cern.ch//e') @@ -260,7 +267,7 @@ def BuildFileListString(self): self.lar_file_list += ' ' else: print('Empty replicas -- marking as failed') - + ##TODO -- pop entry self.dd_client.file_failed(self.proj_id, '%s:%s'%(j['namespace'], j['name'])) print(datetime.datetime.now()) @@ -276,18 +283,27 @@ def RunLAr(self, fcl, n, nskip): cluster = '0' process = '0' ## TODO -- make options for capturing output - fname = "dc4_hd_protodune_%s_%s_reco.root"%(cluster, process) - oname = fname.replace(".root",".out") - ename = fname.replace(".root",".err") + datetime.datetime.now().strftime("%Y%m%d%h%M%S%Z") + fname = "dc4_hd_protodune_%%tc_%s_%s_reco.root"%(cluster, process) + oname = fname.replace(".root",".out").replace("%tc",stamp) + ename = fname.replace(".root",".err").replace("%tc",stamp) ofile = open(oname,'w') efile = open(ename,'w') proc = subprocess.run('lar -c %s -s %s -n %i --nskip %i -o fname'%(fcl, self.lar_file_list, n, nskip), shell=True, stdout=ofile,stderr=efile) ofile.close() efile.close() + + # get log info, match with replicas logparse = Loginator.Loginator(oname) + # parse the log and find open./close/memory logparse.readme() - logparse.addinfo(logparse.getinfo()) - logparse.addmetacatinfo(self.namespace) #HMS assuming this is the input namespace. + #logparse.addinfo(logparse.getinfo()) + logparse.addinfo({"application_family":self.appFamily,"application_name":self.appName, + "application_version":self.appVersion,"delivery_method":"dd","workflow_method":"dd","project_id":self.proj_id}) + unused_replicas = logparse.addreplicainfo(self.input_replicas) + logparse.addmetacatinfo(self.namespace) # only uses namespace if can't get from replica info + print ("replicas not used",unused_replicas) + # write out json files for processed files whether closed properly or not. Those never opened don't get logged. logparse.writeme() if proc.returncode != 0: self.MarkFiles(True) @@ -300,6 +316,9 @@ def RunLAr(self, fcl, n, nskip): parser = ap() parser.add_argument('--namespace', type=str) + parser.add_argument('--appFamily', type=str) + parser.add_argument('--appName', type=str) + parser.add_argument('--appVersion', type=str) parser.add_argument('--fcl', type=str) parser.add_argument('--load_limit', type=int) parser.add_argument('--user', type=str) @@ -315,11 +334,13 @@ def RunLAr(self, fcl, n, nskip): args.load_limit, timeout=args.timeout, wait_time=args.wait_time, - wait_limit=args.wait_limit) + wait_limit=args.wait_limit, + appFamily=args.appFamily, + appName=args.appName, + appVersion=args.appVersion + ) dd_interface.Login(args.user) dd_interface.AttachProject(args.project) dd_interface.LoadFiles() dd_interface.BuildFileListString() dd_interface.RunLAr(args.fcl, args.n, args.nskip) - - diff --git a/test.out b/test.out new file mode 100644 index 0000000..803e5e2 --- /dev/null +++ b/test.out @@ -0,0 +1,744 @@ +%MSG-i MF_INIT_OK: Early 02-Dec-2022 17:12:58 CST JobSetup +Messagelogger initialization complete. +%MSG +02-Dec-2022 17:13:00 CST Initiating request to open input file "xroot://eospublic.cern.ch//eos/experiment/neutplatform/protodune/dune/dc4-hd-protodune/4d/37/dc4_np04hd_507091350_bf2fe11d-a96c-4d7f-9f9a-996da762514b-gen_protodunehd_1GeV_56895272_0_g4_detsim_a.root" +02-Dec-2022 17:13:04 CST Opened input file "xroot://eospublic.cern.ch//eos/experiment/neutplatform/protodune/dune/dc4-hd-protodune/4d/37/dc4_np04hd_507091350_bf2fe11d-a96c-4d7f-9f9a-996da762514b-gen_protodunehd_1GeV_56895272_0_g4_detsim_a.root" +Begin processing the 1st record. run: 1 subRun: 0 event: 1 at 02-Dec-2022 17:13:12 CST +PRINCIPAL TYPE: Event +PROCESS NAME | MODULE LABEL... | PRODUCT INSTANCE NAME | DATA PRODUCT TYPE.................................................... | ..SIZE +SinglesGen.. | generator...... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | kr85........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | ar42........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | rn222.......... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | generator...... | ..................... | std::vector................................ | .....1 +SinglesGen.. | ar39........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | cosmicgenerator | ..................... | std::vector........................................... | .....1 +SinglesGen.. | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +G4.......... | rns............ | ..................... | std::vector........................................ | .....2 +G4.......... | IonAndScint.... | ..................... | std::vector................................... | .....? +G4.......... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +G4.......... | largeant....... | ..................... | std::vector........................................ | 667349 +G4.......... | largeant....... | ..................... | std::map >......................................... | .....1 +G4.......... | largeant....... | ..................... | art::Assns | 667349 +G4.......... | IonAndScint.... | priorSCE............. | std::vector................................... | .....? +Detsim...... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +Detsim...... | crt............ | ..................... | std::vector............................................ | .....0 +Detsim...... | crt............ | ..................... | art::Assns....................... | .....0 +Detsim...... | tpcrawdecoder.. | simpleSC............. | std::vector......................................... | .10240 +Detsim...... | tpcrawdecoder.. | daq.................. | std::vector........................................... | .10240 +Detsim...... | rns............ | ..................... | std::vector........................................ | .....0 + +Total products (present, not present): 21 (19, 2). + +Begin processing the 2nd record. run: 1 subRun: 0 event: 2 at 02-Dec-2022 17:14:12 CST +PRINCIPAL TYPE: Event +PROCESS NAME | MODULE LABEL... | PRODUCT INSTANCE NAME | DATA PRODUCT TYPE.................................................... | ..SIZE +SinglesGen.. | generator...... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | kr85........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | ar42........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | rn222.......... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | generator...... | ..................... | std::vector................................ | .....1 +SinglesGen.. | ar39........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | cosmicgenerator | ..................... | std::vector........................................... | .....1 +SinglesGen.. | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +G4.......... | rns............ | ..................... | std::vector........................................ | .....2 +G4.......... | IonAndScint.... | ..................... | std::vector................................... | .....? +G4.......... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +G4.......... | largeant....... | ..................... | std::vector........................................ | 665036 +G4.......... | largeant....... | ..................... | std::map >......................................... | .....1 +G4.......... | largeant....... | ..................... | art::Assns | 665036 +G4.......... | IonAndScint.... | priorSCE............. | std::vector................................... | .....? +Detsim...... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +Detsim...... | crt............ | ..................... | std::vector............................................ | .....0 +Detsim...... | crt............ | ..................... | art::Assns....................... | .....0 +Detsim...... | tpcrawdecoder.. | simpleSC............. | std::vector......................................... | .10240 +Detsim...... | tpcrawdecoder.. | daq.................. | std::vector........................................... | .10240 +Detsim...... | rns............ | ..................... | std::vector........................................ | .....0 + +Total products (present, not present): 21 (19, 2). + +Begin processing the 3rd record. run: 1 subRun: 0 event: 3 at 02-Dec-2022 17:14:29 CST +PRINCIPAL TYPE: Event +PROCESS NAME | MODULE LABEL... | PRODUCT INSTANCE NAME | DATA PRODUCT TYPE.................................................... | ..SIZE +SinglesGen.. | generator...... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | kr85........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | ar42........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | rn222.......... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | generator...... | ..................... | std::vector................................ | .....1 +SinglesGen.. | ar39........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | cosmicgenerator | ..................... | std::vector........................................... | .....1 +SinglesGen.. | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +G4.......... | rns............ | ..................... | std::vector........................................ | .....2 +G4.......... | IonAndScint.... | ..................... | std::vector................................... | .....? +G4.......... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +G4.......... | largeant....... | ..................... | std::vector........................................ | 647744 +G4.......... | largeant....... | ..................... | std::map >......................................... | .....1 +G4.......... | largeant....... | ..................... | art::Assns | 647744 +G4.......... | IonAndScint.... | priorSCE............. | std::vector................................... | .....? +Detsim...... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +Detsim...... | crt............ | ..................... | std::vector............................................ | .....0 +Detsim...... | crt............ | ..................... | art::Assns....................... | .....0 +Detsim...... | tpcrawdecoder.. | simpleSC............. | std::vector......................................... | .10240 +Detsim...... | tpcrawdecoder.. | daq.................. | std::vector........................................... | .10240 +Detsim...... | rns............ | ..................... | std::vector........................................ | .....0 + +Total products (present, not present): 21 (19, 2). + +Begin processing the 4th record. run: 1 subRun: 0 event: 4 at 02-Dec-2022 17:14:47 CST +PRINCIPAL TYPE: Event +PROCESS NAME | MODULE LABEL... | PRODUCT INSTANCE NAME | DATA PRODUCT TYPE.................................................... | ..SIZE +SinglesGen.. | generator...... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | kr85........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | ar42........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | rn222.......... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | generator...... | ..................... | std::vector................................ | .....1 +SinglesGen.. | ar39........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | cosmicgenerator | ..................... | std::vector........................................... | .....1 +SinglesGen.. | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +G4.......... | rns............ | ..................... | std::vector........................................ | .....2 +G4.......... | IonAndScint.... | ..................... | std::vector................................... | .....? +G4.......... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +G4.......... | largeant....... | ..................... | std::vector........................................ | 657006 +G4.......... | largeant....... | ..................... | std::map >......................................... | .....1 +G4.......... | largeant....... | ..................... | art::Assns | 657006 +G4.......... | IonAndScint.... | priorSCE............. | std::vector................................... | .....? +Detsim...... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +Detsim...... | crt............ | ..................... | std::vector............................................ | .....0 +Detsim...... | crt............ | ..................... | art::Assns....................... | .....0 +Detsim...... | tpcrawdecoder.. | simpleSC............. | std::vector......................................... | .10240 +Detsim...... | tpcrawdecoder.. | daq.................. | std::vector........................................... | .10240 +Detsim...... | rns............ | ..................... | std::vector........................................ | .....0 + +Total products (present, not present): 21 (19, 2). + +Begin processing the 5th record. run: 1 subRun: 0 event: 5 at 02-Dec-2022 17:15:06 CST +PRINCIPAL TYPE: Event +PROCESS NAME | MODULE LABEL... | PRODUCT INSTANCE NAME | DATA PRODUCT TYPE.................................................... | ..SIZE +SinglesGen.. | generator...... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | kr85........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | ar42........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | rn222.......... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | generator...... | ..................... | std::vector................................ | .....1 +SinglesGen.. | ar39........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | cosmicgenerator | ..................... | std::vector........................................... | .....1 +SinglesGen.. | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +G4.......... | rns............ | ..................... | std::vector........................................ | .....2 +G4.......... | IonAndScint.... | ..................... | std::vector................................... | .....? +G4.......... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +G4.......... | largeant....... | ..................... | std::vector........................................ | 605796 +G4.......... | largeant....... | ..................... | std::map >......................................... | .....1 +G4.......... | largeant....... | ..................... | art::Assns | 605796 +G4.......... | IonAndScint.... | priorSCE............. | std::vector................................... | .....? +Detsim...... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +Detsim...... | crt............ | ..................... | std::vector............................................ | .....0 +Detsim...... | crt............ | ..................... | art::Assns....................... | .....0 +Detsim...... | tpcrawdecoder.. | simpleSC............. | std::vector......................................... | .10240 +Detsim...... | tpcrawdecoder.. | daq.................. | std::vector........................................... | .10240 +Detsim...... | rns............ | ..................... | std::vector........................................ | .....0 + +Total products (present, not present): 21 (19, 2). + +Begin processing the 6th record. run: 1 subRun: 0 event: 6 at 02-Dec-2022 17:15:23 CST +PRINCIPAL TYPE: Event +PROCESS NAME | MODULE LABEL... | PRODUCT INSTANCE NAME | DATA PRODUCT TYPE.................................................... | ..SIZE +SinglesGen.. | generator...... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | kr85........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | ar42........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | rn222.......... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | generator...... | ..................... | std::vector................................ | .....1 +SinglesGen.. | ar39........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | cosmicgenerator | ..................... | std::vector........................................... | .....1 +SinglesGen.. | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +G4.......... | rns............ | ..................... | std::vector........................................ | .....2 +G4.......... | IonAndScint.... | ..................... | std::vector................................... | .....? +G4.......... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +G4.......... | largeant....... | ..................... | std::vector........................................ | 728127 +G4.......... | largeant....... | ..................... | std::map >......................................... | .....1 +G4.......... | largeant....... | ..................... | art::Assns | 728127 +G4.......... | IonAndScint.... | priorSCE............. | std::vector................................... | .....? +Detsim...... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +Detsim...... | crt............ | ..................... | std::vector............................................ | .....0 +Detsim...... | crt............ | ..................... | art::Assns....................... | .....0 +Detsim...... | tpcrawdecoder.. | simpleSC............. | std::vector......................................... | .10240 +Detsim...... | tpcrawdecoder.. | daq.................. | std::vector........................................... | .10240 +Detsim...... | rns............ | ..................... | std::vector........................................ | .....0 + +Total products (present, not present): 21 (19, 2). + +Begin processing the 7th record. run: 1 subRun: 0 event: 7 at 02-Dec-2022 17:15:42 CST +PRINCIPAL TYPE: Event +PROCESS NAME | MODULE LABEL... | PRODUCT INSTANCE NAME | DATA PRODUCT TYPE.................................................... | ..SIZE +SinglesGen.. | generator...... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | kr85........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | ar42........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | rn222.......... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | generator...... | ..................... | std::vector................................ | .....1 +SinglesGen.. | ar39........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | cosmicgenerator | ..................... | std::vector........................................... | .....1 +SinglesGen.. | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +G4.......... | rns............ | ..................... | std::vector........................................ | .....2 +G4.......... | IonAndScint.... | ..................... | std::vector................................... | .....? +G4.......... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +G4.......... | largeant....... | ..................... | std::vector........................................ | 732048 +G4.......... | largeant....... | ..................... | std::map >......................................... | .....1 +G4.......... | largeant....... | ..................... | art::Assns | 732048 +G4.......... | IonAndScint.... | priorSCE............. | std::vector................................... | .....? +Detsim...... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +Detsim...... | crt............ | ..................... | std::vector............................................ | .....0 +Detsim...... | crt............ | ..................... | art::Assns....................... | .....0 +Detsim...... | tpcrawdecoder.. | simpleSC............. | std::vector......................................... | .10240 +Detsim...... | tpcrawdecoder.. | daq.................. | std::vector........................................... | .10240 +Detsim...... | rns............ | ..................... | std::vector........................................ | .....0 + +Total products (present, not present): 21 (19, 2). + +Begin processing the 8th record. run: 1 subRun: 0 event: 8 at 02-Dec-2022 17:16:01 CST +PRINCIPAL TYPE: Event +PROCESS NAME | MODULE LABEL... | PRODUCT INSTANCE NAME | DATA PRODUCT TYPE.................................................... | ..SIZE +SinglesGen.. | generator...... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | kr85........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | ar42........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | rn222.......... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | generator...... | ..................... | std::vector................................ | .....1 +SinglesGen.. | ar39........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | cosmicgenerator | ..................... | std::vector........................................... | .....1 +SinglesGen.. | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +G4.......... | rns............ | ..................... | std::vector........................................ | .....2 +G4.......... | IonAndScint.... | ..................... | std::vector................................... | .....? +G4.......... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +G4.......... | largeant....... | ..................... | std::vector........................................ | 698294 +G4.......... | largeant....... | ..................... | std::map >......................................... | .....1 +G4.......... | largeant....... | ..................... | art::Assns | 698294 +G4.......... | IonAndScint.... | priorSCE............. | std::vector................................... | .....? +Detsim...... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +Detsim...... | crt............ | ..................... | std::vector............................................ | .....0 +Detsim...... | crt............ | ..................... | art::Assns....................... | .....0 +Detsim...... | tpcrawdecoder.. | simpleSC............. | std::vector......................................... | .10240 +Detsim...... | tpcrawdecoder.. | daq.................. | std::vector........................................... | .10240 +Detsim...... | rns............ | ..................... | std::vector........................................ | .....0 + +Total products (present, not present): 21 (19, 2). + +Begin processing the 9th record. run: 1 subRun: 0 event: 9 at 02-Dec-2022 17:16:18 CST +PRINCIPAL TYPE: Event +PROCESS NAME | MODULE LABEL... | PRODUCT INSTANCE NAME | DATA PRODUCT TYPE.................................................... | ..SIZE +SinglesGen.. | generator...... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | kr85........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | ar42........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | rn222.......... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | generator...... | ..................... | std::vector................................ | .....1 +SinglesGen.. | ar39........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | cosmicgenerator | ..................... | std::vector........................................... | .....1 +SinglesGen.. | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +G4.......... | rns............ | ..................... | std::vector........................................ | .....2 +G4.......... | IonAndScint.... | ..................... | std::vector................................... | .....? +G4.......... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +G4.......... | largeant....... | ..................... | std::vector........................................ | 589402 +G4.......... | largeant....... | ..................... | std::map >......................................... | .....1 +G4.......... | largeant....... | ..................... | art::Assns | 589402 +G4.......... | IonAndScint.... | priorSCE............. | std::vector................................... | .....? +Detsim...... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +Detsim...... | crt............ | ..................... | std::vector............................................ | .....0 +Detsim...... | crt............ | ..................... | art::Assns....................... | .....0 +Detsim...... | tpcrawdecoder.. | simpleSC............. | std::vector......................................... | .10240 +Detsim...... | tpcrawdecoder.. | daq.................. | std::vector........................................... | .10240 +Detsim...... | rns............ | ..................... | std::vector........................................ | .....0 + +Total products (present, not present): 21 (19, 2). + +Begin processing the 10th record. run: 1 subRun: 0 event: 10 at 02-Dec-2022 17:16:33 CST +PRINCIPAL TYPE: Event +PROCESS NAME | MODULE LABEL... | PRODUCT INSTANCE NAME | DATA PRODUCT TYPE.................................................... | ..SIZE +SinglesGen.. | generator...... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | kr85........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | ar42........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | rn222.......... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | generator...... | ..................... | std::vector................................ | .....1 +SinglesGen.. | ar39........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | cosmicgenerator | ..................... | std::vector........................................... | .....1 +SinglesGen.. | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +G4.......... | rns............ | ..................... | std::vector........................................ | .....2 +G4.......... | IonAndScint.... | ..................... | std::vector................................... | .....? +G4.......... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +G4.......... | largeant....... | ..................... | std::vector........................................ | 608655 +G4.......... | largeant....... | ..................... | std::map >......................................... | .....1 +G4.......... | largeant....... | ..................... | art::Assns | 608655 +G4.......... | IonAndScint.... | priorSCE............. | std::vector................................... | .....? +Detsim...... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +Detsim...... | crt............ | ..................... | std::vector............................................ | .....0 +Detsim...... | crt............ | ..................... | art::Assns....................... | .....0 +Detsim...... | tpcrawdecoder.. | simpleSC............. | std::vector......................................... | .10240 +Detsim...... | tpcrawdecoder.. | daq.................. | std::vector........................................... | .10240 +Detsim...... | rns............ | ..................... | std::vector........................................ | .....0 + +Total products (present, not present): 21 (19, 2). + +PRINCIPAL TYPE: Run +PROCESS NAME | MODULE LABEL............... | PRODUCT INSTANCE NAME | DATA PRODUCT TYPE................. | SIZE +SinglesGen.. | generator.................. | ..................... | sumdata::RunData.................. | ...- +SinglesGen.. | GeometryConfigurationWriter | ..................... | sumdata::GeometryConfigurationInfo | ...- +SinglesGen.. | kr85....................... | ..................... | sumdata::RunData.................. | ...- +SinglesGen.. | ar42....................... | ..................... | sumdata::RunData.................. | ...- +SinglesGen.. | ar39....................... | ..................... | sumdata::RunData.................. | ...- +SinglesGen.. | cosmicgenerator............ | ..................... | sumdata::RunData.................. | ...- +SinglesGen.. | rn222...................... | ..................... | sumdata::RunData.................. | ...- +G4.......... | GeometryConfigurationWriter | ..................... | sumdata::GeometryConfigurationInfo | ...- +Detsim...... | GeometryConfigurationWriter | ..................... | sumdata::GeometryConfigurationInfo | ...- + +Total products (present, not present): 9 (9, 0). + +02-Dec-2022 17:16:49 CST Closed input file "xroot://eospublic.cern.ch//eos/experiment/neutplatform/protodune/dune/dc4-hd-protodune/4d/37/dc4_np04hd_507091350_bf2fe11d-a96c-4d7f-9f9a-996da762514b-gen_protodunehd_1GeV_56895272_0_g4_detsim_a.root" +02-Dec-2022 17:16:49 CST Initiating request to open input file "xroot://eospublic.cern.ch//eos/experiment/neutplatform/protodune/dune/dc4-hd-protodune/7a/ec/dc4_np04hd_507091050_08b230ae-be8a-40a5-95e4-e3f67620bb77-gen_protodunehd_1GeV_56895181_0_g4_detsim_c.root" +02-Dec-2022 17:16:51 CST Opened input file "xroot://eospublic.cern.ch//eos/experiment/neutplatform/protodune/dune/dc4-hd-protodune/7a/ec/dc4_np04hd_507091050_08b230ae-be8a-40a5-95e4-e3f67620bb77-gen_protodunehd_1GeV_56895181_0_g4_detsim_c.root" +Begin processing the 11th record. run: 1 subRun: 0 event: 1 at 02-Dec-2022 17:16:58 CST +PRINCIPAL TYPE: Event +PROCESS NAME | MODULE LABEL... | PRODUCT INSTANCE NAME | DATA PRODUCT TYPE.................................................... | ..SIZE +SinglesGen.. | generator...... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | kr85........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | ar42........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | rn222.......... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | generator...... | ..................... | std::vector................................ | .....1 +SinglesGen.. | ar39........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | cosmicgenerator | ..................... | std::vector........................................... | .....1 +SinglesGen.. | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +G4.......... | rns............ | ..................... | std::vector........................................ | .....2 +G4.......... | IonAndScint.... | ..................... | std::vector................................... | .....? +G4.......... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +G4.......... | largeant....... | ..................... | std::vector........................................ | 674111 +G4.......... | largeant....... | ..................... | std::map >......................................... | .....1 +G4.......... | largeant....... | ..................... | art::Assns | 674111 +G4.......... | IonAndScint.... | priorSCE............. | std::vector................................... | .....? +Detsim...... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +Detsim...... | crt............ | ..................... | std::vector............................................ | .....0 +Detsim...... | crt............ | ..................... | art::Assns....................... | .....0 +Detsim...... | tpcrawdecoder.. | simpleSC............. | std::vector......................................... | .10240 +Detsim...... | tpcrawdecoder.. | daq.................. | std::vector........................................... | .10240 +Detsim...... | rns............ | ..................... | std::vector........................................ | .....0 + +Total products (present, not present): 21 (19, 2). + +Begin processing the 12th record. run: 1 subRun: 0 event: 2 at 02-Dec-2022 17:17:55 CST +PRINCIPAL TYPE: Event +PROCESS NAME | MODULE LABEL... | PRODUCT INSTANCE NAME | DATA PRODUCT TYPE.................................................... | ..SIZE +SinglesGen.. | generator...... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | kr85........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | ar42........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | rn222.......... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | generator...... | ..................... | std::vector................................ | .....1 +SinglesGen.. | ar39........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | cosmicgenerator | ..................... | std::vector........................................... | .....1 +SinglesGen.. | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +G4.......... | rns............ | ..................... | std::vector........................................ | .....2 +G4.......... | IonAndScint.... | ..................... | std::vector................................... | .....? +G4.......... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +G4.......... | largeant....... | ..................... | std::vector........................................ | 694266 +G4.......... | largeant....... | ..................... | std::map >......................................... | .....1 +G4.......... | largeant....... | ..................... | art::Assns | 694266 +G4.......... | IonAndScint.... | priorSCE............. | std::vector................................... | .....? +Detsim...... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +Detsim...... | crt............ | ..................... | std::vector............................................ | .....0 +Detsim...... | crt............ | ..................... | art::Assns....................... | .....0 +Detsim...... | tpcrawdecoder.. | simpleSC............. | std::vector......................................... | .10240 +Detsim...... | tpcrawdecoder.. | daq.................. | std::vector........................................... | .10240 +Detsim...... | rns............ | ..................... | std::vector........................................ | .....0 + +Total products (present, not present): 21 (19, 2). + +Begin processing the 13th record. run: 1 subRun: 0 event: 3 at 02-Dec-2022 17:18:10 CST +PRINCIPAL TYPE: Event +PROCESS NAME | MODULE LABEL... | PRODUCT INSTANCE NAME | DATA PRODUCT TYPE.................................................... | ..SIZE +SinglesGen.. | generator...... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | kr85........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | ar42........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | rn222.......... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | generator...... | ..................... | std::vector................................ | .....1 +SinglesGen.. | ar39........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | cosmicgenerator | ..................... | std::vector........................................... | .....1 +SinglesGen.. | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +G4.......... | rns............ | ..................... | std::vector........................................ | .....2 +G4.......... | IonAndScint.... | ..................... | std::vector................................... | .....? +G4.......... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +G4.......... | largeant....... | ..................... | std::vector........................................ | 726125 +G4.......... | largeant....... | ..................... | std::map >......................................... | .....1 +G4.......... | largeant....... | ..................... | art::Assns | 726125 +G4.......... | IonAndScint.... | priorSCE............. | std::vector................................... | .....? +Detsim...... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +Detsim...... | crt............ | ..................... | std::vector............................................ | .....0 +Detsim...... | crt............ | ..................... | art::Assns....................... | .....0 +Detsim...... | tpcrawdecoder.. | simpleSC............. | std::vector......................................... | .10240 +Detsim...... | tpcrawdecoder.. | daq.................. | std::vector........................................... | .10240 +Detsim...... | rns............ | ..................... | std::vector........................................ | .....0 + +Total products (present, not present): 21 (19, 2). + +Begin processing the 14th record. run: 1 subRun: 0 event: 4 at 02-Dec-2022 17:18:26 CST +PRINCIPAL TYPE: Event +PROCESS NAME | MODULE LABEL... | PRODUCT INSTANCE NAME | DATA PRODUCT TYPE.................................................... | ..SIZE +SinglesGen.. | generator...... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | kr85........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | ar42........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | rn222.......... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | generator...... | ..................... | std::vector................................ | .....1 +SinglesGen.. | ar39........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | cosmicgenerator | ..................... | std::vector........................................... | .....1 +SinglesGen.. | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +G4.......... | rns............ | ..................... | std::vector........................................ | .....2 +G4.......... | IonAndScint.... | ..................... | std::vector................................... | .....? +G4.......... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +G4.......... | largeant....... | ..................... | std::vector........................................ | 648034 +G4.......... | largeant....... | ..................... | std::map >......................................... | .....1 +G4.......... | largeant....... | ..................... | art::Assns | 648034 +G4.......... | IonAndScint.... | priorSCE............. | std::vector................................... | .....? +Detsim...... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +Detsim...... | crt............ | ..................... | std::vector............................................ | .....0 +Detsim...... | crt............ | ..................... | art::Assns....................... | .....0 +Detsim...... | tpcrawdecoder.. | simpleSC............. | std::vector......................................... | .10240 +Detsim...... | tpcrawdecoder.. | daq.................. | std::vector........................................... | .10240 +Detsim...... | rns............ | ..................... | std::vector........................................ | .....0 + +Total products (present, not present): 21 (19, 2). + +Begin processing the 15th record. run: 1 subRun: 0 event: 5 at 02-Dec-2022 17:18:40 CST +PRINCIPAL TYPE: Event +PROCESS NAME | MODULE LABEL... | PRODUCT INSTANCE NAME | DATA PRODUCT TYPE.................................................... | ..SIZE +SinglesGen.. | generator...... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | kr85........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | ar42........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | rn222.......... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | generator...... | ..................... | std::vector................................ | .....1 +SinglesGen.. | ar39........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | cosmicgenerator | ..................... | std::vector........................................... | .....1 +SinglesGen.. | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +G4.......... | rns............ | ..................... | std::vector........................................ | .....2 +G4.......... | IonAndScint.... | ..................... | std::vector................................... | .....? +G4.......... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +G4.......... | largeant....... | ..................... | std::vector........................................ | 670689 +G4.......... | largeant....... | ..................... | std::map >......................................... | .....1 +G4.......... | largeant....... | ..................... | art::Assns | 670689 +G4.......... | IonAndScint.... | priorSCE............. | std::vector................................... | .....? +Detsim...... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +Detsim...... | crt............ | ..................... | std::vector............................................ | .....0 +Detsim...... | crt............ | ..................... | art::Assns....................... | .....0 +Detsim...... | tpcrawdecoder.. | simpleSC............. | std::vector......................................... | .10240 +Detsim...... | tpcrawdecoder.. | daq.................. | std::vector........................................... | .10240 +Detsim...... | rns............ | ..................... | std::vector........................................ | .....0 + +Total products (present, not present): 21 (19, 2). + +Begin processing the 16th record. run: 1 subRun: 0 event: 6 at 02-Dec-2022 17:18:55 CST +PRINCIPAL TYPE: Event +PROCESS NAME | MODULE LABEL... | PRODUCT INSTANCE NAME | DATA PRODUCT TYPE.................................................... | ..SIZE +SinglesGen.. | generator...... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | kr85........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | ar42........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | rn222.......... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | generator...... | ..................... | std::vector................................ | .....1 +SinglesGen.. | ar39........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | cosmicgenerator | ..................... | std::vector........................................... | .....1 +SinglesGen.. | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +G4.......... | rns............ | ..................... | std::vector........................................ | .....2 +G4.......... | IonAndScint.... | ..................... | std::vector................................... | .....? +G4.......... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +G4.......... | largeant....... | ..................... | std::vector........................................ | 517823 +G4.......... | largeant....... | ..................... | std::map >......................................... | .....1 +G4.......... | largeant....... | ..................... | art::Assns | 517823 +G4.......... | IonAndScint.... | priorSCE............. | std::vector................................... | .....? +Detsim...... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +Detsim...... | crt............ | ..................... | std::vector............................................ | .....0 +Detsim...... | crt............ | ..................... | art::Assns....................... | .....0 +Detsim...... | tpcrawdecoder.. | simpleSC............. | std::vector......................................... | .10240 +Detsim...... | tpcrawdecoder.. | daq.................. | std::vector........................................... | .10240 +Detsim...... | rns............ | ..................... | std::vector........................................ | .....0 + +Total products (present, not present): 21 (19, 2). + +Begin processing the 17th record. run: 1 subRun: 0 event: 7 at 02-Dec-2022 17:19:08 CST +PRINCIPAL TYPE: Event +PROCESS NAME | MODULE LABEL... | PRODUCT INSTANCE NAME | DATA PRODUCT TYPE.................................................... | ..SIZE +SinglesGen.. | generator...... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | kr85........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | ar42........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | rn222.......... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | generator...... | ..................... | std::vector................................ | .....1 +SinglesGen.. | ar39........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | cosmicgenerator | ..................... | std::vector........................................... | .....1 +SinglesGen.. | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +G4.......... | rns............ | ..................... | std::vector........................................ | .....2 +G4.......... | IonAndScint.... | ..................... | std::vector................................... | .....? +G4.......... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +G4.......... | largeant....... | ..................... | std::vector........................................ | 821401 +G4.......... | largeant....... | ..................... | std::map >......................................... | .....1 +G4.......... | largeant....... | ..................... | art::Assns | 821401 +G4.......... | IonAndScint.... | priorSCE............. | std::vector................................... | .....? +Detsim...... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +Detsim...... | crt............ | ..................... | std::vector............................................ | .....0 +Detsim...... | crt............ | ..................... | art::Assns....................... | .....0 +Detsim...... | tpcrawdecoder.. | simpleSC............. | std::vector......................................... | .10240 +Detsim...... | tpcrawdecoder.. | daq.................. | std::vector........................................... | .10240 +Detsim...... | rns............ | ..................... | std::vector........................................ | .....0 + +Total products (present, not present): 21 (19, 2). + +Begin processing the 18th record. run: 1 subRun: 0 event: 8 at 02-Dec-2022 17:19:26 CST +PRINCIPAL TYPE: Event +PROCESS NAME | MODULE LABEL... | PRODUCT INSTANCE NAME | DATA PRODUCT TYPE.................................................... | ..SIZE +SinglesGen.. | generator...... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | kr85........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | ar42........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | rn222.......... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | generator...... | ..................... | std::vector................................ | .....1 +SinglesGen.. | ar39........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | cosmicgenerator | ..................... | std::vector........................................... | .....1 +SinglesGen.. | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +G4.......... | rns............ | ..................... | std::vector........................................ | .....2 +G4.......... | IonAndScint.... | ..................... | std::vector................................... | .....? +G4.......... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +G4.......... | largeant....... | ..................... | std::vector........................................ | 641515 +G4.......... | largeant....... | ..................... | std::map >......................................... | .....1 +G4.......... | largeant....... | ..................... | art::Assns | 641515 +G4.......... | IonAndScint.... | priorSCE............. | std::vector................................... | .....? +Detsim...... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +Detsim...... | crt............ | ..................... | std::vector............................................ | .....0 +Detsim...... | crt............ | ..................... | art::Assns....................... | .....0 +Detsim...... | tpcrawdecoder.. | simpleSC............. | std::vector......................................... | .10240 +Detsim...... | tpcrawdecoder.. | daq.................. | std::vector........................................... | .10240 +Detsim...... | rns............ | ..................... | std::vector........................................ | .....0 + +Total products (present, not present): 21 (19, 2). + +Begin processing the 19th record. run: 1 subRun: 0 event: 9 at 02-Dec-2022 17:19:40 CST +PRINCIPAL TYPE: Event +PROCESS NAME | MODULE LABEL... | PRODUCT INSTANCE NAME | DATA PRODUCT TYPE.................................................... | ..SIZE +SinglesGen.. | generator...... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | kr85........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | ar42........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | rn222.......... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | generator...... | ..................... | std::vector................................ | .....1 +SinglesGen.. | ar39........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | cosmicgenerator | ..................... | std::vector........................................... | .....1 +SinglesGen.. | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +G4.......... | rns............ | ..................... | std::vector........................................ | .....2 +G4.......... | IonAndScint.... | ..................... | std::vector................................... | .....? +G4.......... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +G4.......... | largeant....... | ..................... | std::vector........................................ | 685039 +G4.......... | largeant....... | ..................... | std::map >......................................... | .....1 +G4.......... | largeant....... | ..................... | art::Assns | 685039 +G4.......... | IonAndScint.... | priorSCE............. | std::vector................................... | .....? +Detsim...... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +Detsim...... | crt............ | ..................... | std::vector............................................ | .....0 +Detsim...... | crt............ | ..................... | art::Assns....................... | .....0 +Detsim...... | tpcrawdecoder.. | simpleSC............. | std::vector......................................... | .10240 +Detsim...... | tpcrawdecoder.. | daq.................. | std::vector........................................... | .10240 +Detsim...... | rns............ | ..................... | std::vector........................................ | .....0 + +Total products (present, not present): 21 (19, 2). + +Begin processing the 20th record. run: 1 subRun: 0 event: 10 at 02-Dec-2022 17:19:55 CST +PRINCIPAL TYPE: Event +PROCESS NAME | MODULE LABEL... | PRODUCT INSTANCE NAME | DATA PRODUCT TYPE.................................................... | ..SIZE +SinglesGen.. | generator...... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | kr85........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | ar42........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | rn222.......... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | generator...... | ..................... | std::vector................................ | .....1 +SinglesGen.. | ar39........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | cosmicgenerator | ..................... | std::vector........................................... | .....1 +SinglesGen.. | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +G4.......... | rns............ | ..................... | std::vector........................................ | .....2 +G4.......... | IonAndScint.... | ..................... | std::vector................................... | .....? +G4.......... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +G4.......... | largeant....... | ..................... | std::vector........................................ | 766016 +G4.......... | largeant....... | ..................... | std::map >......................................... | .....1 +G4.......... | largeant....... | ..................... | art::Assns | 766016 +G4.......... | IonAndScint.... | priorSCE............. | std::vector................................... | .....? +Detsim...... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +Detsim...... | crt............ | ..................... | std::vector............................................ | .....0 +Detsim...... | crt............ | ..................... | art::Assns....................... | .....0 +Detsim...... | tpcrawdecoder.. | simpleSC............. | std::vector......................................... | .10240 +Detsim...... | tpcrawdecoder.. | daq.................. | std::vector........................................... | .10240 +Detsim...... | rns............ | ..................... | std::vector........................................ | .....0 + +Total products (present, not present): 21 (19, 2). + +PRINCIPAL TYPE: Run +PROCESS NAME | MODULE LABEL............... | PRODUCT INSTANCE NAME | DATA PRODUCT TYPE................. | SIZE +SinglesGen.. | generator.................. | ..................... | sumdata::RunData.................. | ...- +SinglesGen.. | GeometryConfigurationWriter | ..................... | sumdata::GeometryConfigurationInfo | ...- +SinglesGen.. | kr85....................... | ..................... | sumdata::RunData.................. | ...- +SinglesGen.. | ar42....................... | ..................... | sumdata::RunData.................. | ...- +SinglesGen.. | ar39....................... | ..................... | sumdata::RunData.................. | ...- +SinglesGen.. | cosmicgenerator............ | ..................... | sumdata::RunData.................. | ...- +SinglesGen.. | rn222...................... | ..................... | sumdata::RunData.................. | ...- +G4.......... | GeometryConfigurationWriter | ..................... | sumdata::GeometryConfigurationInfo | ...- +Detsim...... | GeometryConfigurationWriter | ..................... | sumdata::GeometryConfigurationInfo | ...- + +Total products (present, not present): 9 (9, 0). + +02-Dec-2022 17:20:12 CST Closed input file "xroot://eospublic.cern.ch//eos/experiment/neutplatform/protodune/dune/dc4-hd-protodune/7a/ec/dc4_np04hd_507091050_08b230ae-be8a-40a5-95e4-e3f67620bb77-gen_protodunehd_1GeV_56895181_0_g4_detsim_c.root" +02-Dec-2022 17:20:12 CST Initiating request to open input file "xroot://eospublic.cern.ch//eos/experiment/neutplatform/protodune/dune/dc4-hd-protodune/db/04/dc4_np04hd_507090250_90d65b7c-be9f-4140-87fc-38747801eb35-gen_protodunehd_1GeV_56895201_0_g4_detsim_a.root" +02-Dec-2022 17:20:13 CST Opened input file "xroot://eospublic.cern.ch//eos/experiment/neutplatform/protodune/dune/dc4-hd-protodune/db/04/dc4_np04hd_507090250_90d65b7c-be9f-4140-87fc-38747801eb35-gen_protodunehd_1GeV_56895201_0_g4_detsim_a.root" +Begin processing the 21st record. run: 1 subRun: 0 event: 1 at 02-Dec-2022 17:20:21 CST +PRINCIPAL TYPE: Event +PROCESS NAME | MODULE LABEL... | PRODUCT INSTANCE NAME | DATA PRODUCT TYPE.................................................... | ..SIZE +SinglesGen.. | generator...... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | kr85........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | ar42........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | rn222.......... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | generator...... | ..................... | std::vector................................ | .....1 +SinglesGen.. | ar39........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | cosmicgenerator | ..................... | std::vector........................................... | .....1 +SinglesGen.. | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +G4.......... | rns............ | ..................... | std::vector........................................ | .....2 +G4.......... | IonAndScint.... | ..................... | std::vector................................... | .....? +G4.......... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +G4.......... | largeant....... | ..................... | std::vector........................................ | 697439 +G4.......... | largeant....... | ..................... | std::map >......................................... | .....1 +G4.......... | largeant....... | ..................... | art::Assns | 697439 +G4.......... | IonAndScint.... | priorSCE............. | std::vector................................... | .....? +Detsim...... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +Detsim...... | crt............ | ..................... | std::vector............................................ | .....0 +Detsim...... | crt............ | ..................... | art::Assns....................... | .....0 +Detsim...... | tpcrawdecoder.. | simpleSC............. | std::vector......................................... | .10240 +Detsim...... | tpcrawdecoder.. | daq.................. | std::vector........................................... | .10240 +Detsim...... | rns............ | ..................... | std::vector........................................ | .....0 + +Total products (present, not present): 21 (19, 2). + +Begin processing the 22nd record. run: 1 subRun: 0 event: 2 at 02-Dec-2022 17:21:19 CST +PRINCIPAL TYPE: Event +PROCESS NAME | MODULE LABEL... | PRODUCT INSTANCE NAME | DATA PRODUCT TYPE.................................................... | ..SIZE +SinglesGen.. | generator...... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | kr85........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | ar42........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | rn222.......... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | generator...... | ..................... | std::vector................................ | .....1 +SinglesGen.. | ar39........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | cosmicgenerator | ..................... | std::vector........................................... | .....1 +SinglesGen.. | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +G4.......... | rns............ | ..................... | std::vector........................................ | .....2 +G4.......... | IonAndScint.... | ..................... | std::vector................................... | .....? +G4.......... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +G4.......... | largeant....... | ..................... | std::vector........................................ | 762927 +G4.......... | largeant....... | ..................... | std::map >......................................... | .....1 +G4.......... | largeant....... | ..................... | art::Assns | 762927 +G4.......... | IonAndScint.... | priorSCE............. | std::vector................................... | .....? +Detsim...... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +Detsim...... | crt............ | ..................... | std::vector............................................ | .....0 +Detsim...... | crt............ | ..................... | art::Assns....................... | .....0 +Detsim...... | tpcrawdecoder.. | simpleSC............. | std::vector......................................... | .10240 +Detsim...... | tpcrawdecoder.. | daq.................. | std::vector........................................... | .10240 +Detsim...... | rns............ | ..................... | std::vector........................................ | .....0 + +Total products (present, not present): 21 (19, 2). + +Begin processing the 23rd record. run: 1 subRun: 0 event: 3 at 02-Dec-2022 17:21:37 CST +PRINCIPAL TYPE: Event +PROCESS NAME | MODULE LABEL... | PRODUCT INSTANCE NAME | DATA PRODUCT TYPE.................................................... | ..SIZE +SinglesGen.. | generator...... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | kr85........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | ar42........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | rn222.......... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | generator...... | ..................... | std::vector................................ | .....1 +SinglesGen.. | ar39........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | cosmicgenerator | ..................... | std::vector........................................... | .....1 +SinglesGen.. | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +G4.......... | rns............ | ..................... | std::vector........................................ | .....2 +G4.......... | IonAndScint.... | ..................... | std::vector................................... | .....? +G4.......... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +G4.......... | largeant....... | ..................... | std::vector........................................ | 645289 +G4.......... | largeant....... | ..................... | std::map >......................................... | .....1 +G4.......... | largeant....... | ..................... | art::Assns | 645289 +G4.......... | IonAndScint.... | priorSCE............. | std::vector................................... | .....? +Detsim...... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +Detsim...... | crt............ | ..................... | std::vector............................................ | .....0 +Detsim...... | crt............ | ..................... | art::Assns....................... | .....0 +Detsim...... | tpcrawdecoder.. | simpleSC............. | std::vector......................................... | .10240 +Detsim...... | tpcrawdecoder.. | daq.................. | std::vector........................................... | .10240 +Detsim...... | rns............ | ..................... | std::vector........................................ | .....0 + +Total products (present, not present): 21 (19, 2). + +Begin processing the 24th record. run: 1 subRun: 0 event: 4 at 02-Dec-2022 17:21:52 CST +PRINCIPAL TYPE: Event +PROCESS NAME | MODULE LABEL... | PRODUCT INSTANCE NAME | DATA PRODUCT TYPE.................................................... | ..SIZE +SinglesGen.. | generator...... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | kr85........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | ar42........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | rn222.......... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | generator...... | ..................... | std::vector................................ | .....1 +SinglesGen.. | ar39........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | cosmicgenerator | ..................... | std::vector........................................... | .....1 +SinglesGen.. | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +G4.......... | rns............ | ..................... | std::vector........................................ | .....2 +G4.......... | IonAndScint.... | ..................... | std::vector................................... | .....? +G4.......... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +G4.......... | largeant....... | ..................... | std::vector........................................ | 664759 +G4.......... | largeant....... | ..................... | std::map >......................................... | .....1 +G4.......... | largeant....... | ..................... | art::Assns | 664759 +G4.......... | IonAndScint.... | priorSCE............. | std::vector................................... | .....? +Detsim...... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +Detsim...... | crt............ | ..................... | std::vector............................................ | .....0 +Detsim...... | crt............ | ..................... | art::Assns....................... | .....0 +Detsim...... | tpcrawdecoder.. | simpleSC............. | std::vector......................................... | .10240 +Detsim...... | tpcrawdecoder.. | daq.................. | std::vector........................................... | .10240 +Detsim...... | rns............ | ..................... | std::vector........................................ | .....0 + +Total products (present, not present): 21 (19, 2). + +Begin processing the 25th record. run: 1 subRun: 0 event: 5 at 02-Dec-2022 17:22:09 CST +PRINCIPAL TYPE: Event +PROCESS NAME | MODULE LABEL... | PRODUCT INSTANCE NAME | DATA PRODUCT TYPE.................................................... | ..SIZE +SinglesGen.. | generator...... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | kr85........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | ar42........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | rn222.......... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | generator...... | ..................... | std::vector................................ | .....1 +SinglesGen.. | ar39........... | ..................... | std::vector........................................... | .....1 +SinglesGen.. | cosmicgenerator | ..................... | std::vector........................................... | .....1 +SinglesGen.. | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +G4.......... | rns............ | ..................... | std::vector........................................ | .....2 +G4.......... | IonAndScint.... | ..................... | std::vector................................... | .....? +G4.......... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +G4.......... | largeant....... | ..................... | std::vector........................................ | 639526 +G4.......... | largeant....... | ..................... | std::map >......................................... | .....1 +G4.......... | largeant....... | ..................... | art::Assns | 639526 +G4.......... | IonAndScint.... | priorSCE............. | std::vector................................... | .....? +Detsim...... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 +Detsim...... | crt............ | ..................... | std::vector............................................ | .....0 +Detsim...... | crt............ | ..................... | art::Assns....................... | .....0 +Detsim...... | tpcrawdecoder.. | simpleSC............. | std::vector......................................... | .10240 +Detsim...... | tpcrawdecoder.. | daq.................. | std::vector........................................... | .10240 +Detsim...... | rns............ | ..................... | std::vector........................................ | .....0 + +Total products (present, not present): 21 (19, 2). + +PRINCIPAL TYPE: Run +PROCESS NAME | MODULE LABEL............... | PRODUCT INSTANCE NAME | DATA PRODUCT TYPE................. | SIZE +SinglesGen.. | generator.................. | ..................... | sumdata::RunData.................. | ...- +SinglesGen.. | GeometryConfigurationWriter | ..................... | sumdata::GeometryConfigurationInfo | ...- +SinglesGen.. | kr85....................... | ..................... | sumdata::RunData.................. | ...- +SinglesGen.. | ar42....................... | ..................... | sumdata::RunData.................. | ...- +SinglesGen.. | ar39....................... | ..................... | sumdata::RunData.................. | ...- +SinglesGen.. | cosmicgenerator............ | ..................... | sumdata::RunData.................. | ...- +SinglesGen.. | rn222...................... | ..................... | sumdata::RunData.................. | ...- +G4.......... | GeometryConfigurationWriter | ..................... | sumdata::GeometryConfigurationInfo | ...- +Detsim...... | GeometryConfigurationWriter | ..................... | sumdata::GeometryConfigurationInfo | ...- + +Total products (present, not present): 9 (9, 0). + +02-Dec-2022 17:22:25 CST Closed input file "xroot://eospublic.cern.ch//eos/experiment/neutplatform/protodune/dune/dc4-hd-protodune/db/04/dc4_np04hd_507090250_90d65b7c-be9f-4140-87fc-38747801eb35-gen_protodunehd_1GeV_56895201_0_g4_detsim_a.root" + +TrigReport ---------- Event summary ------------- +TrigReport Events total = 25 passed = 25 failed = 0 + +TrigReport ---------- Modules in End-path ---------- +TrigReport Run Success Error Name +TrigReport 25 25 0 out1 + +TimeReport ---------- Time summary [sec] ------- +TimeReport CPU = 150.911095 Real = 557.432832 + +MemReport ---------- Memory summary [base-10 MB] ------ +MemReport VmPeak = 2466.87 VmHWM = 1856.9 + +Art has completed and will exit with status 0. From d730193da9f430600c1524968bcb070b50512626 Mon Sep 17 00:00:00 2001 From: Heidi Schellman Date: Sat, 3 Dec 2022 16:39:09 -0800 Subject: [PATCH 009/166] try to run PDSP data --- other_test.sh | 3 +++ run_lar.py | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) create mode 100644 other_test.sh diff --git a/other_test.sh b/other_test.sh new file mode 100644 index 0000000..c33acb0 --- /dev/null +++ b/other_test.sh @@ -0,0 +1,3 @@ +# run an interactive test HMS 12-2-2022 +python run_interactive.py --dataset=schellma:run5141recentReco --namespace=pdsp_det_reco --query_limit 100 --load_limit 3 --fcl eventdump.fcl --user schellma --appFamily=protoduneana --appVersion=$PROTODUNEANA_VERSION -n 5 +#metacat query -i "files from schellma:protodune-sp-physics-generic where (namespace=pdsp_det_reco and core.data_tier='full-reconstructed' and core.runs[any] in (5141))" > newid.txt diff --git a/run_lar.py b/run_lar.py index c014248..5eeca30 100644 --- a/run_lar.py +++ b/run_lar.py @@ -283,7 +283,7 @@ def RunLAr(self, fcl, n, nskip): cluster = '0' process = '0' ## TODO -- make options for capturing output - datetime.datetime.now().strftime("%Y%m%d%h%M%S%Z") + stamp = datetime.datetime.now().strftime("%Y%m%d%H%M%S%Z") fname = "dc4_hd_protodune_%%tc_%s_%s_reco.root"%(cluster, process) oname = fname.replace(".root",".out").replace("%tc",stamp) ename = fname.replace(".root",".err").replace("%tc",stamp) From 956abf58c594dba87e463e103033924d08c73865 Mon Sep 17 00:00:00 2001 From: Heidi Schellman Date: Sun, 4 Dec 2022 07:21:39 -0800 Subject: [PATCH 010/166] remove the namespace requirement for inputs --- other_test.sh | 3 +++ run_lar.py | 11 +++++++---- submit_dd_jobs.py | 23 ++++++++++++----------- 3 files changed, 22 insertions(+), 15 deletions(-) diff --git a/other_test.sh b/other_test.sh index c33acb0..bc2d011 100644 --- a/other_test.sh +++ b/other_test.sh @@ -1,3 +1,6 @@ # run an interactive test HMS 12-2-2022 python run_interactive.py --dataset=schellma:run5141recentReco --namespace=pdsp_det_reco --query_limit 100 --load_limit 3 --fcl eventdump.fcl --user schellma --appFamily=protoduneana --appVersion=$PROTODUNEANA_VERSION -n 5 +python run_interactive.py --project=82 --query_limit 100 --load_limit 5 --fcl eventdump.fcl --user schellma --appFamily=protoduneana --appVersion=$PROTODUNEANA_VERSION -n 120 + + #metacat query -i "files from schellma:protodune-sp-physics-generic where (namespace=pdsp_det_reco and core.data_tier='full-reconstructed' and core.runs[any] in (5141))" > newid.txt diff --git a/run_lar.py b/run_lar.py index 5eeca30..00a1db1 100644 --- a/run_lar.py +++ b/run_lar.py @@ -53,8 +53,10 @@ def __init__(self, namespace, lar_limit, timeout=120, wait_time=60, wait_limit=5 self.dataset = "" #dataset self.limit = 1#limit self.namespace = namespace - query_args = (self.dataset, self.namespace, self.limit) - self.query = '''files from %s where namespace="%s" limit %i'''%query_args + #query_args = (self.dataset, self.namespace, self.limit) + #self.query = '''files from %s where namespace="%s" limit %i'''%query_args + query_args = (self.dataset, self.limit) + self.query = '''files from %s limit %i'''%query_args print ("the query is:",self.query) self.worker_timeout = 3600*5 self.lar_limit = lar_limit @@ -150,6 +152,7 @@ def LoadFiles(self): self.Next() if self.next_output == None: ## this shouldn't happen, but if it does just exit the loop + print ("next_output = None") break elif self.next_output == True: ##this means the fetch timed out. @@ -195,7 +198,7 @@ def LoadFiles(self): ##also reset the number of times waited self.n_waited = 0 else: - print('Empty replicas -- marking as failed') + print('Empty replicas -- marking as failed',self.next_output) self.file_failed( '%s:%s'%(self.next_output['namespace'], self.next_output['name']), do_retry=False) @@ -284,7 +287,7 @@ def RunLAr(self, fcl, n, nskip): process = '0' ## TODO -- make options for capturing output stamp = datetime.datetime.now().strftime("%Y%m%d%H%M%S%Z") - fname = "dc4_hd_protodune_%%tc_%s_%s_reco.root"%(cluster, process) + fname = "runLar_%%tc_%s_%s_reco.root"%(cluster, process) oname = fname.replace(".root",".out").replace("%tc",stamp) ename = fname.replace(".root",".err").replace("%tc",stamp) ofile = open(oname,'w') diff --git a/submit_dd_jobs.py b/submit_dd_jobs.py index c318574..982f2dd 100644 --- a/submit_dd_jobs.py +++ b/submit_dd_jobs.py @@ -13,22 +13,23 @@ def create_project(dataset, namespace, query_limit=None, query_skip=None): auth_server_url='https://metacat.fnal.gov:8143/auth/dune') dd_client.login_x509(os.environ['USER'], os.environ['X509_USER_PROXY']) - - query = 'files from %s where namespace="%s" ordered'%(dataset, namespace) + + #query = 'files from %s where namespace="%s" ordered'%(dataset, namespace) + query = 'files from %s ordered'%(dataset) if query_skip: query += ' skip %s'%query_skip if query_limit: query += ' limit %s'%query_limit - print(query) + print("Start Project for :",query) #query metacat query_files = [i for i in mc_client.query(query)] #print(query_files) - + #check size nfiles_in_dataset = len(query_files) if nfiles_in_dataset == 0: sys.stderr.write("Ignoring launch request on empty metacat query") sys.stderr.write("Query: %s"%query) sys.exit(1) - + #make project in data dispatcher proj_dict = dd_client.create_project(query_files, query=query) dd_proj_id = proj_dict['project_id'] @@ -58,15 +59,15 @@ def create_project(dataset, namespace, query_limit=None, query_skip=None): parser.add_argument('--project', type=int, default=None) parser.add_argument('--dry_run', action='store_true') - args = parser.parse_args() - + args = parser.parse_args() + mc_client = MetaCatClient('https://metacat.fnal.gov:9443/dune_meta_demo/app') dd_client = DataDispatcherClient( server_url='https://metacat.fnal.gov:9443/dune/dd/data', auth_server_url='https://metacat.fnal.gov:8143/auth/dune') dd_client.login_x509(os.environ['USER'], os.environ['X509_USER_PROXY']) - + print(args.blacklist) if (not args.project) and args.dataset and args.namespace: @@ -78,14 +79,14 @@ def create_project(dataset, namespace, query_limit=None, query_skip=None): #query metacat query_files = [i for i in mc_client.query(query)] #print(query_files) - + #check size nfiles_in_dataset = len(query_files) if nfiles_in_dataset == 0: sys.stderr.write("Ignoring launch request on empty metacat query") sys.stderr.write("Query: %s"%query) sys.exit(1) - + #make project in data dispatcher proj_dict = dd_client.create_project(query_files, query=query) dd_proj_id = proj_dict['project_id'] @@ -105,7 +106,7 @@ def create_project(dataset, namespace, query_limit=None, query_skip=None): njobs = [10000]*int(args.njobs/10000) + [args.njobs%10000] else: njobs = [args.njobs] - + print(njobs) count = 0 for nj in njobs: From d7332b70f80b6d40f5d349a0c189bb591a40a9ff Mon Sep 17 00:00:00 2001 From: Heidi Schellman Date: Sun, 4 Dec 2022 11:08:01 -0800 Subject: [PATCH 011/166] more on logs --- other_test.sh | 5 ++-- run_interactive.py | 5 ++-- run_lar.py | 66 +++++++++++++++++++++++++++++++++++++++------- 3 files changed, 62 insertions(+), 14 deletions(-) diff --git a/other_test.sh b/other_test.sh index bc2d011..e8d3cd6 100644 --- a/other_test.sh +++ b/other_test.sh @@ -1,6 +1,5 @@ # run an interactive test HMS 12-2-2022 -python run_interactive.py --dataset=schellma:run5141recentReco --namespace=pdsp_det_reco --query_limit 100 --load_limit 3 --fcl eventdump.fcl --user schellma --appFamily=protoduneana --appVersion=$PROTODUNEANA_VERSION -n 5 -python run_interactive.py --project=82 --query_limit 100 --load_limit 5 --fcl eventdump.fcl --user schellma --appFamily=protoduneana --appVersion=$PROTODUNEANA_VERSION -n 120 - +#python run_interactive.py --dataset=schellma:run5141recentReco --namespace=pdsp_det_reco --query_limit 100 --load_limit 3 --fcl eventdump.fcl --user schellma --appFamily=protoduneana --appVersion=$PROTODUNEANA_VERSION -n 5 +python run_interactive.py --project=88 --query_limit 100 --load_limit 2 --fcl eventdump.fcl --user schellma -n 2 #metacat query -i "files from schellma:protodune-sp-physics-generic where (namespace=pdsp_det_reco and core.data_tier='full-reconstructed' and core.runs[any] in (5141))" > newid.txt diff --git a/run_interactive.py b/run_interactive.py index b2e94f1..f3d2fcb 100644 --- a/run_interactive.py +++ b/run_interactive.py @@ -34,7 +34,7 @@ appVersion = os.getenv("DUNESW_VERSION") else: appVersion = args.appVersion - + if args.appFamily == None: appFamily = "LArSoft" else: @@ -65,8 +65,9 @@ dd_interface.SetWorkerID() print(os.environ['MYWORKERID']) dd_interface.AttachProject(dd_proj_id) + dd_interface.dump_project(dd_proj_id) dd_interface.LoadFiles() dd_interface.BuildFileListString() dd_interface.RunLAr(args.fcl, args.n, args.nskip) - + dd_interface.dump_project(dd_proj_id) ##Loginator stuff here? diff --git a/run_lar.py b/run_lar.py index 00a1db1..a7691c1 100644 --- a/run_lar.py +++ b/run_lar.py @@ -11,6 +11,13 @@ import Loginator +def NoneToString(thing): + if thing == None: + return "-" + +def makedid(namespace,name): + return "%s:%s"%(namespace,name) + def call_and_retry(func): def inner1(*args, **kwargs): nretries = 0 @@ -143,6 +150,27 @@ def get_project(self, proj_id): #print(datetime.datetime.now()) return proj + @call_and_retry_return + def dump_project(self, proj_id): + proj = self.dd_client.get_project(proj_id, with_files=True) + print ("dumping project",proj_id) + for k in proj: + if k == "file_handles": + continue + print (k,proj[k]) + for f in proj["file_handles"]: + reserved = f["reserved_since"] + if reserved == None: + reserved = "-" + else: + t = datetime.datetime.fromtimestamp(reserved,tz=datetime.timezone.utc) + reserved = t.isoformat()[0:19] + 'Z' + + print("%10s\t%d\t%21s\t%8s\t%s:%s"%(f["state"],f["attempts"],(reserved),NoneToString(f["worker_id"]),f["namespace"],f["name"])) + + #print(datetime.datetime.now()) + return proj + def LoadFiles(self): count = 0 ##Should we take out the proj_state clause? @@ -219,16 +247,30 @@ def Next(self): self.next_replicas = list(self.next_output['replicas'].values()) - def MarkFiles(self, failed=False): + def MarkFiles(self, failed=False,badlist=[]): state = 'failed' if failed else 'done' #nretries = 0 + for j in self.loaded_files: + did = makedid(j['namespace'], j['name']) + # mark all as failed if that is how things are set if failed: print('Marking failed') - self.file_failed('%s:%s'%(j['namespace'], j['name'])) + self.file_failed(did) + # ok, maybe some failed, else: - print('Marking done') - self.file_done('%s:%s'%(j['namespace'], j['name'])) + good = True + for b in badlist: + if did == b: + print ("this file was not used, mark as failed",did) + good = False + break + if good: + print('Marking done') + self.file_done(did) + else: + print('Marking failed') + self.file_failed(did) def SaveFileDIDs(self): lines = [] @@ -274,6 +316,7 @@ def BuildFileListString(self): ##TODO -- pop entry self.dd_client.file_failed(self.proj_id, '%s:%s'%(j['namespace'], j['name'])) print(datetime.datetime.now()) + def RunLAr(self, fcl, n, nskip): if len(self.loaded_files) == 0: print('No files loaded with data dispatcher. Exiting gracefully') @@ -287,7 +330,7 @@ def RunLAr(self, fcl, n, nskip): process = '0' ## TODO -- make options for capturing output stamp = datetime.datetime.now().strftime("%Y%m%d%H%M%S%Z") - fname = "runLar_%%tc_%s_%s_reco.root"%(cluster, process) + fname = "runLar_%s_%%tc_%s_%s_reco.root"%(self.proj_id, cluster, process) oname = fname.replace(".root",".out").replace("%tc",stamp) ename = fname.replace(".root",".err").replace("%tc",stamp) ofile = open(oname,'w') @@ -304,17 +347,22 @@ def RunLAr(self, fcl, n, nskip): logparse.addinfo({"application_family":self.appFamily,"application_name":self.appName, "application_version":self.appVersion,"delivery_method":"dd","workflow_method":"dd","project_id":self.proj_id}) unused_replicas = logparse.addreplicainfo(self.input_replicas) + unused_replica_names = [] + for u in unused_replicas: + unused_replica_names.append(u["namespace"]+":"+u["name"]) logparse.addmetacatinfo(self.namespace) # only uses namespace if can't get from replica info - print ("replicas not used",unused_replicas) + print ("replicas not used",unused_replica_names) # write out json files for processed files whether closed properly or not. Those never opened don't get logged. logparse.writeme() if proc.returncode != 0: self.MarkFiles(True) - sys.exit(proc.returncode) - self.MarkFiles() + print ("LAr returned", proc.returncode) + return proc.returncode + self.MarkFiles(False,unused_replica_names) self.SaveFileDIDs() + if __name__ == '__main__': parser = ap() @@ -346,4 +394,4 @@ def RunLAr(self, fcl, n, nskip): dd_interface.AttachProject(args.project) dd_interface.LoadFiles() dd_interface.BuildFileListString() - dd_interface.RunLAr(args.fcl, args.n, args.nskip) + code = dd_interface.RunLAr(args.fcl, args.n, args.nskip) From 3e485da88b6002655d4493b3dd2fdd0e05624754 Mon Sep 17 00:00:00 2001 From: Heidi Schellman Date: Sun, 4 Dec 2022 14:08:11 -0800 Subject: [PATCH 012/166] try doing batch --- Loginator.py | 12 +++-- byhand.cfg | 117 ++++++++++++++++++++++++++++++++++++++++++++++ other_test.sh | 2 +- run_lar.py | 14 +++++- submit_dd_jobs.py | 2 +- submit_test.sh | 7 +++ 6 files changed, 146 insertions(+), 8 deletions(-) create mode 100644 byhand.cfg create mode 100755 submit_test.sh diff --git a/Loginator.py b/Loginator.py index c3d3659..04dec96 100644 --- a/Loginator.py +++ b/Loginator.py @@ -171,14 +171,14 @@ def addmetacatinfo(self,defaultNamespace=None): if namespace == None: print (" could not set namespace for",f) continue - print (f,namespace) + if DEBUG: print (f,namespace) meta = mc_client.get_file(name=f,namespace=namespace) - print ("metacat answer",f,namespace) + if DEBUG: print ("metacat answer",f,namespace) if meta == None: print ("no metadata for",f) continue self.outobject[f]["access_method"]="metacat" - for item in ["data_tier","file_type","data_stream","run_type"]: + for item in ["data_tier","file_type","data_stream","run_type","event_count"]: if "core."+item in meta["metadata"].keys(): self.outobject[f][item]=meta["metadata"]["core."+item] else: @@ -196,7 +196,7 @@ def addreplicainfo(self,replicas,test=False): found = False for f in self.outobject: if f == r["name"]: - print ("replica match",r) + if DEBUG: print ("replica match",r) found = True if "rse" in r: self.outobject[f]["rse"] = r["rse"] @@ -204,6 +204,7 @@ def addreplicainfo(self,replicas,test=False): self.outobject[f]["namespace"] = r["namespace"] print (self.outobject[f]) if not found: + print (r,"appears in replicas but not in Lar Log, need to mark as unused") notfound.append(r) return notfound @@ -227,7 +228,8 @@ def human2number(self,stamp): #15-Nov-2022 17:24:41 CST https://docs.python.org/3/library/time.html#time.strftime format = "%d-%b-%Y %H:%M:%S" # python no longer accepts time zones. We only want the different but need to correct for DT - thetime = datetime.strptime(stamp[:-4],format) + print ("human2number converting",stamp) + thetime = datetime.strptime(stamp[0:19],format) epoch = datetime.utcfromtimestamp(0) if "DT" in stamp: stamp += 3600 diff --git a/byhand.cfg b/byhand.cfg new file mode 100644 index 0000000..a5403ab --- /dev/null +++ b/byhand.cfg @@ -0,0 +1,117 @@ +[global] +group = dune +experiment = dune +wrapper = file:///${FIFE_UTILS_DIR}/libexec/fife_wrap +#wrapper = /dune/app/users/$USER/new_fife_utils/fife_utils/v3_5_0/NULL/libexec/fife_wrap +output_dir = /pnfs/dune/scratch/users/$USER/dd_mc_test/ +extra_dir = ./ +input_namespace = dc4-hd-protodune +fcl = protoDUNEHD_refactored_reco.fcl +load_limit = 1 +project = 317 +nevents = -1 +output_str = "*reco.root" +output_dataset = dd-interactive-test-data +output_namespace = dc4-interactive-tests +metacat_user = $USER +dd_tar=$USER_8_22_22_dd +mc_tar=$USER_8_15_22_mc + +[executable] +#name = ./top_script.sh +name = \\\${CONDOR_DIR_INPUT}/top_script.sh +arg_1 = --namespace +arg_2 = %(input_namespace)s +arg_3 = --fcl +arg_4 = %(fcl)s +arg_5 = --load_limit +arg_6 = %(load_limit)s +arg_7 = --user +arg_8 = $USER +arg_9 = --project +arg_10 = %(project)s +arg_11 = -n +arg_12 = %(nevents)s +arg_13 = --output +arg_14 = %(output_str)s +arg_15 = --output_dataset +arg_16 = %(output_dataset)s +arg_17 = --output_namespace +arg_18 = %(output_namespace)s +arg_19 = --metacat_user +arg_20 = %(metacat_user)s + +[stage] +lines_1 '+FERMIHTC_AutoRelease=True' +lines_2 '+FERMIHTC_GraceMemory=1024' +lines_3 '+FERMIHTC_GraceLifetime=3600' + +[env_pass] +IFDH_DEBUG=1 +IFDH_CP_MAXRETRIES=3 +XRD_CONNECTIONRETRY=32 +XRD_REQUESTTIMEOUT=14400 +XRD_REDIRECTLIMIT=255 +XRD_LOADBALANCERTTL=7200 +XRD_STREAMTIMEOUT=7200 +DD_TAR = %(dd_tar)s +MC_TAR = %(mc_tar)s +DATA_DISPATCHER_URL=https://metacat.fnal.gov:9443/dune/dd/data +DATA_DISPATCHER_AUTH_URL=https://metacat.fnal.gov:8143/auth/dune +METACAT_AUTH_SERVER_URL=https://metacat.fnal.gov:8143/auth/dune +METACAT_SERVER_URL=https://metacat.fnal.gov:9443/dune_meta_demo/app + + +[submit] +G = %(group)s +OS = SL7 +memory = 1999MB +expected-lifetime = 4h +N = 1 +resource-provides = usage_model=OFFSITE,OPPORTUNISTIC,DEDICATED +#resource-provides = usage_model=OPPORTUNISTIC,DEDICATED +c = "has_avx==True" +lines_1 = '+FERMIHTC_AutoRelease=True' +lines_2 = '+FERMIHTC_GraceMemory=2048' +lines_3 = '+FERMIHTC_GraceLifetime=3600' +lines_4 = '+SingularityImage=\"/cvmfs/singularity.opensciencegrid.org/fermilab/fnal-wn-sl7:latest\"' +append_condor_requirements_1 = '(TARGET.HAS_Singularity==true)' +append_condor_requirements_2 = 'TARGET.HAS_CVMFS_fifeuser1_opensciencegrid_org==true' +append_condor_requirements_3 = 'TARGET.HAS_CVMFS_fifeuser2_opensciencegrid_org==true' +append_condor_requirements_4 = 'TARGET.HAS_CVMFS_fifeuser3_opensciencegrid_org==true' +append_condor_requirements_5 = 'TARGET.HAS_CVMFS_fifeuser4_opensciencegrid_org==true' +append_condor_requirements_6 = 'TARGET.HAS_CVMFS_dune_opensciencegrid_org==true' +append_condor_requirements_7 = 'TARGET.HAS_CVMFS_larsoft_opensciencegrid_org==true' +append_condor_requirements_8 = 'TARGET.CVMFS_dune_opensciencegrid_org_REVISION>=1105' +#f_0 = dropbox:///dune/data/users/$USER/dd_metacat_canned/%(dd_tar)s.tar +#f_1 = dropbox:///dune/data/users/$USER/dd_metacat_canned/%(mc_tar)s.tar +f_0 = dropbox:///dune/data/users/$USER/loginator.tar + +#[job_output] +#addoutput = *err +#dest = %(output_dir)s/%(extra_dir)s/\\\${CLUSTER}_\\\${PROCESS} +# +#[job_output_1] +#addoutput = *out +#dest = %(output_dir)s/%(extra_dir)s/\\\${CLUSTER}_\\\${PROCESS} + +#[data_dispatcher] +#dataset = dc4:dc4 +#namespace = dc4-hd-protodune +#query_limit = 9 +#load_limit = 3 +#wait_time = 45 +#wait_limit = 4 +#timeout = 50 + +[job_setup] +source_1 = /cvmfs/dune.opensciencegrid.org/products/dune/setup_dune.sh +#source_2 = ${CONDOR_DIR_INPUT}/%(mc_tar)s/canned_client_setup.sh +#source_3 = ${CONDOR_DIR_INPUT}/%(dd_tar)s/canned_client_setup.sh +#setup = dunesw v09_55_01d00 -q e20:prof +prescript_0 = echo lsing +prescript_1 = ls ${CONDOR_DIR_INPUT} +#prescript_1 = ls ${CONDOR_DIR_INPUT}/$USER_8*/ +#prescript_4 = echo $PATH +#prescript_5 = echo $PYTHONPATH +#prescript_6 = ls ${CONDOR_DIR_INPUT}/%(dd_tar)s/data_dispatcher diff --git a/other_test.sh b/other_test.sh index e8d3cd6..ec6675f 100644 --- a/other_test.sh +++ b/other_test.sh @@ -1,5 +1,5 @@ # run an interactive test HMS 12-2-2022 #python run_interactive.py --dataset=schellma:run5141recentReco --namespace=pdsp_det_reco --query_limit 100 --load_limit 3 --fcl eventdump.fcl --user schellma --appFamily=protoduneana --appVersion=$PROTODUNEANA_VERSION -n 5 -python run_interactive.py --project=88 --query_limit 100 --load_limit 2 --fcl eventdump.fcl --user schellma -n 2 +python run_interactive.py --project=83 --query_limit 100 --load_limit 4 --fcl eventdump.fcl --user schellma -n 300 #metacat query -i "files from schellma:protodune-sp-physics-generic where (namespace=pdsp_det_reco and core.data_tier='full-reconstructed' and core.runs[any] in (5141))" > newid.txt diff --git a/run_lar.py b/run_lar.py index a7691c1..db0e438 100644 --- a/run_lar.py +++ b/run_lar.py @@ -11,9 +11,12 @@ import Loginator +# make a string out of none for formatted Printing def NoneToString(thing): if thing == None: return "-" + else: + return thing def makedid(namespace,name): return "%s:%s"%(namespace,name) @@ -341,23 +344,32 @@ def RunLAr(self, fcl, n, nskip): # get log info, match with replicas logparse = Loginator.Loginator(oname) + # parse the log and find open./close/memory logparse.readme() + #logparse.addinfo(logparse.getinfo()) - logparse.addinfo({"application_family":self.appFamily,"application_name":self.appName, + logparse.addinfo({"dd_worker_id":os.environ["MYWORKERID"],"application_family":self.appFamily,"application_name":self.appName, "application_version":self.appVersion,"delivery_method":"dd","workflow_method":"dd","project_id":self.proj_id}) + + #deal with un unused_replicas = logparse.addreplicainfo(self.input_replicas) unused_replica_names = [] for u in unused_replicas: unused_replica_names.append(u["namespace"]+":"+u["name"]) logparse.addmetacatinfo(self.namespace) # only uses namespace if can't get from replica info print ("replicas not used",unused_replica_names) + # write out json files for processed files whether closed properly or not. Those never opened don't get logged. logparse.writeme() + + # make all files as bad if job crashed if proc.returncode != 0: self.MarkFiles(True) print ("LAr returned", proc.returncode) return proc.returncode + + # else go through files and mark the ones closed in the logfile as good self.MarkFiles(False,unused_replica_names) self.SaveFileDIDs() diff --git a/submit_dd_jobs.py b/submit_dd_jobs.py index 982f2dd..af12e03 100644 --- a/submit_dd_jobs.py +++ b/submit_dd_jobs.py @@ -54,7 +54,7 @@ def create_project(dataset, namespace, query_limit=None, query_skip=None): parser.add_argument('--output_str', type=str, default='"*reco.root"') parser.add_argument('--output_dataset', type=str, default='dd-interactive-tests') parser.add_argument('--output_namespace', type=str, default='dc4-hd-protodune') - parser.add_argument('--metacat_user', type=str, default='calcuttj') + parser.add_argument('--metacat_user', type=str, default='schellma') parser.add_argument('--blacklist', type=str, nargs='+') parser.add_argument('--project', type=int, default=None) parser.add_argument('--dry_run', action='store_true') diff --git a/submit_test.sh b/submit_test.sh new file mode 100755 index 0000000..6d05738 --- /dev/null +++ b/submit_test.sh @@ -0,0 +1,7 @@ +# submission tests +python submit_dd_jobs.py --dataset=schellma:run5141recentReco --namespace=pdsp_det_reco --query_limit=100 --fcl=dump_evet.fcl --nevents=200 --load_limit=4 + +#--dataset DATASET] [--namespace NAMESPACE] [--query_limit QUERY_LIMIT] +# [--query_skip QUERY_SKIP] [--njobs NJOBS] [--load_limit LOAD_LIMIT] [--fcl FCL] [--nevents NEVENTS] +# [--output_str OUTPUT_STR] [--output_dataset OUTPUT_DATASET] [--output_namespace OUTPUT_NAMESPACE] +# [--metacat_user METACAT_USER] [--blacklist BLACKLIST [BLACKLIST ...]] [--project PROJECT] [--dry_run] From c7de6b78593aef6fe52a305b729f549c6e75f2d7 Mon Sep 17 00:00:00 2001 From: Heidi Schellman Date: Sun, 4 Dec 2022 19:14:30 -0800 Subject: [PATCH 013/166] get batch running --- Loginator.py | 18 ++++- byhand.cfg | 4 +- gitadd.sh | 1 + tarme.sh | 1 + top_script.sh | 189 +++++++++++++++++++++++++++----------------------- 5 files changed, 123 insertions(+), 90 deletions(-) create mode 100644 gitadd.sh create mode 100644 tarme.sh diff --git a/Loginator.py b/Loginator.py index 04dec96..4b23709 100644 --- a/Loginator.py +++ b/Loginator.py @@ -33,7 +33,7 @@ def __init__(self,logname): self.logfile = open(logname,'r') self.outobject ={} self.info = self.getsysinfo() - self.tags = ["Opened input file", "Closed input file","VmHWM"] + self.tags = ["Opened input file", "Closed input file","VmHWM","CPU"] self.template = { "source_rse":None, # "user":None, # (who's request is this) @@ -62,7 +62,7 @@ def __init__(self,logname): "path":None, "namespace":None, "real_memory":None, - "project_id":None, + "project_id":0, "delivery_method":None } @@ -91,6 +91,8 @@ def getsysinfo(self): def readme(self): object = {} memdata = None + cpudata = None + walldata = None for line in self.logfile: tag = self.findme(line) if DEBUG: print (tag,line) @@ -98,6 +100,13 @@ def readme(self): continue if "VmHWM" == tag: memdata = line.split("VmHWM = ")[1].strip() + if "CPU" == tag: + timeline = line.strip().split(" ") + if len(timeline) < 7: + continue + cpudata = timeline[3] + walldata = timeline[6] + if "file" in tag: data = line.split(tag) filefull = data[1].strip().replace('"','') @@ -134,6 +143,8 @@ def readme(self): # add the memory info if available for thing in object: if memdata != None: object[thing]["real_memory"]=memdata + if walldata != None: object[thing]["job_wall_time"]=walldata + if cpudata != None: object[thing]["job_cpu_time"]=cpudata #print ("mem",object[thing]["real_memory"]) self.outobject=object @@ -184,6 +195,7 @@ def addmetacatinfo(self,defaultNamespace=None): else: print ("no", item, "in ",list(meta["metadata"].keys())) self.outobject[f]["file_size"]=meta["size"] + self.outobject[f]["campaign"]=meta["DUNE.campaign"] self.outobject[f]["fid"]=meta["fid"] self.outobject[f]["namespace"]=namespace @@ -217,7 +229,7 @@ def addreplicainfo(self,replicas,test=False): def writeme(self): result = [] for thing in self.outobject: - outname = thing+".process.json" + outname = "%s_%d_process.json" %(thing,self.outobject[thing]["project_id"]) outfile = open(outname,'w') json.dump(self.outobject[thing],outfile,indent=4) outfile.close() diff --git a/byhand.cfg b/byhand.cfg index a5403ab..8817a66 100644 --- a/byhand.cfg +++ b/byhand.cfg @@ -5,8 +5,8 @@ wrapper = file:///${FIFE_UTILS_DIR}/libexec/fife_wrap #wrapper = /dune/app/users/$USER/new_fife_utils/fife_utils/v3_5_0/NULL/libexec/fife_wrap output_dir = /pnfs/dune/scratch/users/$USER/dd_mc_test/ extra_dir = ./ -input_namespace = dc4-hd-protodune -fcl = protoDUNEHD_refactored_reco.fcl +input_namespace = pdsp_det_reco +fcl = eventdump.fcl load_limit = 1 project = 317 nevents = -1 diff --git a/gitadd.sh b/gitadd.sh new file mode 100644 index 0000000..6b81f6f --- /dev/null +++ b/gitadd.sh @@ -0,0 +1 @@ +git add *.sh *.cfg *.py diff --git a/tarme.sh b/tarme.sh new file mode 100644 index 0000000..2091aed --- /dev/null +++ b/tarme.sh @@ -0,0 +1 @@ +tar -cvf ../loginator.tar *.sh *.cfg *.py diff --git a/top_script.sh b/top_script.sh index a0a0349..a90ff1f 100755 --- a/top_script.sh +++ b/top_script.sh @@ -101,7 +101,7 @@ done echo $NAMESPACE -logname=dc4-${NAMESPACE}_${PROCESS}_${CLUSTER}_`date +%F_%H_%M_%S` +logname=loginator-${NAMESPACE}_${PROCESS}_${CLUSTER}_`date +%F_%H_%M_%S` export PYTHONPATH=${CONDOR_DIR_INPUT}:${PYTHONPATH} @@ -133,101 +133,120 @@ python -m run_lar \ -n $N \ #--nskip $nskip \ #> ${logname}.out 2>${logname}.err -) returncode=$? #echo "Return code: " $returncode >> ${logname}.out 2>>${logname}.err echo "Return code: " $returncode - +export SCRATCH_DIR=/pnfs/dune/scratch/users +#setup ifdh +ifdh ls ${SCRATCH_DIR}/${USER}/ddtest +#if [ $? -ne 0 &&- z "$IFDH_OPTION"]; then +# echo "Unable to read ${SCRATCH_DIR}/${USER}/ddtest make sure that you have created this directory and given it group write permission." +# exit 74 +#else + # directory already exists, so let's copy +echo $PWD +ls $PWD/*.json > files.txt +ls $PWD/*.out >> files.txt +ls $PWD/*.err >> files.txt +echo "copy the following files to "${SCRATCH_DIR}/${USER}/ddtest +cat files.txt +ifdh cp -D $IFDH_OPTION *.json ${SCRATCH_DIR}/${USER}/ddtest +ifdh cp -D $IFDH_OPTION *.out ${SCRATCH_DIR}/${USER}/ddtest +ifdh cp -D $IFDH_OPTION *.err ${SCRATCH_DIR}/${USER}/ddtest +#fi +) echo "Site: $GLIDEIN_DUNESite" #>> ${logname}.out if [ $returncode -ne 0 ]; then exit $returncode fi -if $get_min_rse; then - output_rses=(`python -m get_rse ${CONDOR_DIR_INPUT}/rses.txt`) -fi -echo "output rses" -for rse in ${output_rses[@]}; do - echo $rse -done -#if [ $returncode -ne "0" ]; then -# echo "exiting"; -# exit; -#fi - -###Setting up rucio, uploading to RSEs -( -setup rucio -echo "PINGING" -rucio ping -echo "DONE PINGING" -setup metacat - -export DATA_DISPATCHER_URL=https://metacat.fnal.gov:9443/dune/dd/data -export DATA_DISPATCHER_AUTH_URL=https://metacat.fnal.gov:8143/auth/dune -export METACAT_AUTH_SERVER_URL=https://metacat.fnal.gov:8143/auth/dune -export METACAT_SERVER_URL=https://metacat.fnal.gov:9443/dune_meta_demo/app - -echo "Authenticating" #>> ${logname}.out 2>>${logname}.err -metacat auth login -m x509 ${METACATUSER} # >> ${logname}.out 2>>${logname}.err -date - -auth_return=$? -if [ $auth_return -ne 0 ]; then - echo "could not declare to metacat" - exit $auth_return -fi - -echo "whoami:" #>> ${logname}.out 2>>${logname}.err -metacat auth whoami #>> ${logname}.out 2>>${logname}.err -date -parents=`cat loaded_files.txt` - -echo $OUTPUT #>> ${logname}.out 2>>${logname}.err -#output_files=`ls *$OUTPUT` -shopt -s nullglob -for i in *$OUTPUT; do - FILESIZE=`stat -c%s $i` - echo 'filesize ' ${FILESIZE} #>> ${logname}.out 2>>${logname}.err - cat << EOF > ${i}.json - [ - { - "size": ${FILESIZE}, - "namespace": "${OUTPUTNAMESPACE}", - "name": "${i}", - "metadata": { - "DUNE.campaign": "dc4", - "core.file_format": "root" - }, - "parents": [ - $parents - ] - } - ] -EOF - - metacat file declare -j ${i}.json $OUTPUTNAMESPACE:$OUTPUTDATASET-data #>> ${logname}.out 2>>${logname}.err - date - returncode=$? - if [ $returncode -ne 0 ]; then - echo "could not declare to metacat" - exit $returncode - fi - - for rse in ${output_rses[@]}; do - echo "Uploading to $rse" - rucio -a dunepro upload --summary --scope $OUTPUTNAMESPACE --rse $rse $i #>> ${logname}.out 2>>${logname}.err - echo $? - done - - #rucio -a dunepro upload --scope $OUTPUTNAMESPACE --rse DUNE_US_FNAL_DISK_STAGE $i #>> ${logname}.out 2>>${logname}.err - #rucio -a dunepro upload --scope $OUTPUTNAMESPACE --rse DUNE_CERN_EOS $i #>> ${logname}.out 2>>${logname}.err - rucio -a dunepro attach $OUTPUTNAMESPACE:$OUTPUTDATASET-data $OUTPUTNAMESPACE:$i #>> ${logname}.out 2>>${logname}.err -done +#if $get_min_rse; then +# output_rses=(`python -m get_rse ${CONDOR_DIR_INPUT}/rses.txt`) +#fi +#echo "output rses" +#for rse in ${output_rses[@]}; do +# echo $rse +#done +##if [ $returncode -ne "0" ]; then +## echo "exiting"; +## exit; +##fi +# +####Setting up rucio, uploading to RSEs +#( +#setup rucio +#echo "PINGING" +#rucio ping +#echo "DONE PINGING" +#setup metacat +# +#export DATA_DISPATCHER_URL=https://metacat.fnal.gov:9443/dune/dd/data +#export DATA_DISPATCHER_AUTH_URL=https://metacat.fnal.gov:8143/auth/dune +#export METACAT_AUTH_SERVER_URL=https://metacat.fnal.gov:8143/auth/dune +#export METACAT_SERVER_URL=https://metacat.fnal.gov:9443/dune_meta_demo/app +# +#echo "Authenticating" #>> ${logname}.out 2>>${logname}.err +#metacat auth login -m x509 ${METACATUSER} # >> ${logname}.out 2>>${logname}.err +#date +# +#auth_return=$? +#if [ $auth_return -ne 0 ]; then +# echo "could not declare to metacat" +# exit $auth_return +#fi +# +#echo "whoami:" #>> ${logname}.out 2>>${logname}.err +#metacat auth whoami #>> ${logname}.out 2>>${logname}.err +#date +# +# +#parents=`cat loaded_files.txt` +# +#echo $OUTPUT #>> ${logname}.out 2>>${logname}.err +##output_files=`ls *$OUTPUT` +#shopt -s nullglob +#for i in *$OUTPUT; do +# FILESIZE=`stat -c%s $i` +# echo 'filesize ' ${FILESIZE} #>> ${logname}.out 2>>${logname}.err +# cat << EOF > ${i}.json +# [ +# { +# "size": ${FILESIZE}, +# "namespace": "${OUTPUTNAMESPACE}", +# "name": "${i}", +# "metadata": { +# "DUNE.campaign": "dc4", +# "core.file_format": "root" +# }, +# "parents": [ +# $parents +# ] +# } +# ] +#EOF +# +# metacat file declare -j ${i}.json $OUTPUTNAMESPACE:$OUTPUTDATASET-data #>> ${logname}.out 2>>${logname}.err +# date +# returncode=$? +# if [ $returncode -ne 0 ]; then +# echo "could not declare to metacat" +# exit $returncode +# fi +# +# for rse in ${output_rses[@]}; do +# echo "Uploading to $rse" +# rucio -a dunepro upload --summary --scope $OUTPUTNAMESPACE --rse $rse $i #>> ${logname}.out 2>>${logname}.err +# echo $? +# done +# +# #rucio -a dunepro upload --scope $OUTPUTNAMESPACE --rse DUNE_US_FNAL_DISK_STAGE $i #>> ${logname}.out 2>>${logname}.err +# #rucio -a dunepro upload --scope $OUTPUTNAMESPACE --rse DUNE_CERN_EOS $i #>> ${logname}.out 2>>${logname}.err +# rucio -a dunepro attach $OUTPUTNAMESPACE:$OUTPUTDATASET-data $OUTPUTNAMESPACE:$i #>> ${logname}.out 2>>${logname}.err +#done # FILESIZE=`stat -c%s ${logname}.out` # cat << EOF > ${logname}.out.json @@ -271,4 +290,4 @@ done # #rucio -a dunepro upload --scope $OUTPUTNAMESPACE --rse DUNE_CERN_EOS ${logname}.err # rucio -a dunepro attach $OUTPUTNAMESPACE:$OUTPUTDATASET-log $OUTPUTNAMESPACE:${logname}.err -) +#) From 332d5509f41b3d793b89099e983b6f0867884c78 Mon Sep 17 00:00:00 2001 From: Heidi Schellman Date: Mon, 5 Dec 2022 11:08:01 -0800 Subject: [PATCH 014/166] start workign on env vars on grid --- Loginator.py | 28 ++++++++++++---------------- byhand.cfg | 2 +- run_lar.py | 3 ++- submit_dd_jobs.py | 2 +- submit_test.sh | 3 ++- 5 files changed, 18 insertions(+), 20 deletions(-) diff --git a/Loginator.py b/Loginator.py index 4b23709..0e61e21 100644 --- a/Loginator.py +++ b/Loginator.py @@ -65,6 +65,12 @@ def __init__(self,logname): "project_id":0, "delivery_method":None } + + def envPrinter(self): + env = os.environ + for k in env: + print ("env ",k,"=",env[k]) + ## return the first tag or None in a line def findme(self,line): @@ -86,6 +92,9 @@ def getsysinfo(self): info["job_site"] = os.getenv("GLIDEIN_DUNESite") #info["POMSINFO"] = os.getenv("poms_data") # need to parse this further return info + + def addsysinfo(self): + self.addinfo(self.getsysinfo()) ## read in the log file and parse it, add the info def readme(self): @@ -195,7 +204,8 @@ def addmetacatinfo(self,defaultNamespace=None): else: print ("no", item, "in ",list(meta["metadata"].keys())) self.outobject[f]["file_size"]=meta["size"] - self.outobject[f]["campaign"]=meta["DUNE.campaign"] + if "DUNE.campaign" in meta["metadata"]: + self.outobject[f]["campaign"]=meta["metadata"]["DUNE.campaign"] self.outobject[f]["fid"]=meta["fid"] self.outobject[f]["namespace"]=namespace @@ -252,27 +262,13 @@ def duration(self,start,end): t1 = self.human2number(end) return t1-t0 -def envScraper(): - env = os.environ - if "apple" in env["CLANGXX"]: - f = open("bigenv.txt") - env = {} - for a in f.readlines(): - line = a.split("=") - env[line[0]] = line[1] - digest = {} - for k in env.keys(): - if "SETUP_" in k: - it = env[k].split(" ") - digest[k] = {"Product":it[0],"Version":it[1]} - return digest def test(): parse = Loginator(sys.argv[1]) print ("looking at",sys.argv[1]) parse.readme() - parse.addinfo(parse.getsysinfo()) + parse.addsysinfo() # parse.addsaminfo() parse.addreplicainfo([]) parse.addmetacatinfo("dc4-hd-protodune") # argument is there for testing when you don't have replica list. diff --git a/byhand.cfg b/byhand.cfg index 8817a66..42d7dca 100644 --- a/byhand.cfg +++ b/byhand.cfg @@ -65,7 +65,7 @@ METACAT_SERVER_URL=https://metacat.fnal.gov:9443/dune_meta_demo/app [submit] G = %(group)s OS = SL7 -memory = 1999MB +memory = 3999MB expected-lifetime = 4h N = 1 resource-provides = usage_model=OFFSITE,OPPORTUNISTIC,DEDICATED diff --git a/run_lar.py b/run_lar.py index db0e438..428e09b 100644 --- a/run_lar.py +++ b/run_lar.py @@ -346,12 +346,13 @@ def RunLAr(self, fcl, n, nskip): logparse = Loginator.Loginator(oname) # parse the log and find open./close/memory + logparse.envPrinter() logparse.readme() #logparse.addinfo(logparse.getinfo()) logparse.addinfo({"dd_worker_id":os.environ["MYWORKERID"],"application_family":self.appFamily,"application_name":self.appName, "application_version":self.appVersion,"delivery_method":"dd","workflow_method":"dd","project_id":self.proj_id}) - + logparse.addsysinfo() #deal with un unused_replicas = logparse.addreplicainfo(self.input_replicas) unused_replica_names = [] diff --git a/submit_dd_jobs.py b/submit_dd_jobs.py index af12e03..b941994 100644 --- a/submit_dd_jobs.py +++ b/submit_dd_jobs.py @@ -49,7 +49,7 @@ def create_project(dataset, namespace, query_limit=None, query_skip=None): parser.add_argument('--njobs', type=int, default=1) parser.add_argument('--load_limit', type=int, default=None) - parser.add_argument('--fcl', type=str, default='evd_protoDUNE.fcl') + parser.add_argument('--fcl', type=str, default='eventdump.fcl') parser.add_argument('--nevents', type=int, default=-1) parser.add_argument('--output_str', type=str, default='"*reco.root"') parser.add_argument('--output_dataset', type=str, default='dd-interactive-tests') diff --git a/submit_test.sh b/submit_test.sh index 6d05738..b63e2ad 100755 --- a/submit_test.sh +++ b/submit_test.sh @@ -1,5 +1,6 @@ # submission tests -python submit_dd_jobs.py --dataset=schellma:run5141recentReco --namespace=pdsp_det_reco --query_limit=100 --fcl=dump_evet.fcl --nevents=200 --load_limit=4 +source tarme.sh # make the tar file up-to-date +python submit_dd_jobs.py --dataset=schellma:run5141recentReco --namespace=pdsp_det_reco --query_limit=100 --fcl=eventdump.fcl --nevents=200 --load_limit=4 #--dataset DATASET] [--namespace NAMESPACE] [--query_limit QUERY_LIMIT] # [--query_skip QUERY_SKIP] [--njobs NJOBS] [--load_limit LOAD_LIMIT] [--fcl FCL] [--nevents NEVENTS] From 68d51fd3c466d7fdf40c7f61b9eea8c620ed97e9 Mon Sep 17 00:00:00 2001 From: Heidi Schellman Date: Mon, 5 Dec 2022 13:13:40 -0800 Subject: [PATCH 015/166] try to add in batch stuff --- Loginator.py | 38 +++++++++++++++++++++++++++++--------- byhand.cfg | 2 +- run_lar.py | 4 ++-- submit_dd_jobs.py | 20 +++++++++++++++++++- submit_test.sh | 2 +- 5 files changed, 52 insertions(+), 14 deletions(-) diff --git a/Loginator.py b/Loginator.py index 0e61e21..a46eeed 100644 --- a/Loginator.py +++ b/Loginator.py @@ -33,7 +33,7 @@ def __init__(self,logname): self.logfile = open(logname,'r') self.outobject ={} self.info = self.getsysinfo() - self.tags = ["Opened input file", "Closed input file","VmHWM","CPU"] + self.tags = ["Opened input file", "Closed input file","VmHWM","CPU","Events total"] self.template = { "source_rse":None, # "user":None, # (who's request is this) @@ -52,6 +52,7 @@ def __init__(self,logname): "data_tier":None, # (from metacat) "data_stream":None, "run_type":None, + "file_format":None, "job_node":None, # (name within the site) "job_site":None, # (name of the site) "country":None, # (nationality of the site) @@ -61,9 +62,12 @@ def __init__(self,logname): "access_method":None, #(samweb/dd) "path":None, "namespace":None, - "real_memory":None, + "job_real_memory":None, "project_id":0, - "delivery_method":None + "job_wall_time":None, + "job_cpu_time":None, + "job_total_events":None + } def envPrinter(self): @@ -79,6 +83,14 @@ def findme(self,line): if DEBUG: print (tag,line) return tag return None + + def getSafe(self,dict,envname): + if envname in dict: + if DEBUG: print ("found ",envname) + return dict[envname] + else: + return None + ## get system info for the full job def getsysinfo(self): @@ -88,8 +100,10 @@ def getsysinfo(self): info["user"]=os.getenv("GRID_USER") else: info["user"]=os.getenv("USER") - info["job_node"] = os.getenv("HOST") - info["job_site"] = os.getenv("GLIDEIN_DUNESite") + info["job_id"] = self.getSafe(os.environ,"JOBSUBJOBID") + info["job_node"] = self.getSafe(os.environ,"NODE_NAME") + #info["job_node"] = os.getenv("HOST") + info["job_site"] = os.getenv("SITE_NAME") #info["POMSINFO"] = os.getenv("poms_data") # need to parse this further return info @@ -102,6 +116,7 @@ def readme(self): memdata = None cpudata = None walldata = None + totalevents = None for line in self.logfile: tag = self.findme(line) if DEBUG: print (tag,line) @@ -115,7 +130,11 @@ def readme(self): continue cpudata = timeline[3] walldata = timeline[6] - + if "Events total" in tag: + eventline = line.strip().split(" ") + if len(eventline) < 11: + continue + totalevents = eventline[4] if "file" in tag: data = line.split(tag) filefull = data[1].strip().replace('"','') @@ -151,9 +170,10 @@ def readme(self): #print ("mem",memdata,filename) # add the memory info if available for thing in object: - if memdata != None: object[thing]["real_memory"]=memdata + if memdata != None: object[thing]["job_real_memory"]=memdata if walldata != None: object[thing]["job_wall_time"]=walldata if cpudata != None: object[thing]["job_cpu_time"]=cpudata + if totalevents != None: object[thing]["job_total_events"]=totalevents #print ("mem",object[thing]["real_memory"]) self.outobject=object @@ -173,7 +193,7 @@ def addsaminfo(self): meta = samweb.getMetadata(f) self.outobject[f]["namespace"]="samweb" self.outobject[f]["access_method"]="samweb" - for item in ["data_tier","file_type","data_stream","group","file_size"]: + for item in ["data_tier","file_type","data_stream","group","file_size","file_format"]: self.outobject[f][item]=meta[item] for run in meta["runs"]: self.outobject[f]["run_type"] = run[2] @@ -198,7 +218,7 @@ def addmetacatinfo(self,defaultNamespace=None): print ("no metadata for",f) continue self.outobject[f]["access_method"]="metacat" - for item in ["data_tier","file_type","data_stream","run_type","event_count"]: + for item in ["data_tier","file_type","data_stream","run_type","event_count","file_format"]: if "core."+item in meta["metadata"].keys(): self.outobject[f][item]=meta["metadata"]["core."+item] else: diff --git a/byhand.cfg b/byhand.cfg index 42d7dca..8817a66 100644 --- a/byhand.cfg +++ b/byhand.cfg @@ -65,7 +65,7 @@ METACAT_SERVER_URL=https://metacat.fnal.gov:9443/dune_meta_demo/app [submit] G = %(group)s OS = SL7 -memory = 3999MB +memory = 1999MB expected-lifetime = 4h N = 1 resource-provides = usage_model=OFFSITE,OPPORTUNISTIC,DEDICATED diff --git a/run_lar.py b/run_lar.py index 428e09b..48ecc09 100644 --- a/run_lar.py +++ b/run_lar.py @@ -305,8 +305,8 @@ def BuildFileListString(self): for j in self.loaded_files: replicas = list(j['replicas'].values()) if len(replicas) > 0: - #Get the first replica - replica = replicas[0] + #Get the last replica + replica = replicas[len(replicas)-1] self.input_replicas.append(replica) print('Replica:', replica) uri = replica['url'] diff --git a/submit_dd_jobs.py b/submit_dd_jobs.py index b941994..ae486f9 100644 --- a/submit_dd_jobs.py +++ b/submit_dd_jobs.py @@ -58,9 +58,27 @@ def create_project(dataset, namespace, query_limit=None, query_skip=None): parser.add_argument('--blacklist', type=str, nargs='+') parser.add_argument('--project', type=int, default=None) parser.add_argument('--dry_run', action='store_true') - + parser.add_argument('--appFamily', type=str) + parser.add_argument('--appVersion', type=str) + parser.add_argument('--appName', type=str) + args = parser.parse_args() + if args.appName == None: + appName = args.fcl.replace(".fcl","") + else: + appName = args.appName + + if args.appVersion == None: + appVersion = os.getenv("DUNESW_VERSION") + else: + appVersion = args.appVersion + + if args.appFamily == None: + appFamily = "LArSoft" + else: + appFamily = args.appFamily + mc_client = MetaCatClient('https://metacat.fnal.gov:9443/dune_meta_demo/app') dd_client = DataDispatcherClient( server_url='https://metacat.fnal.gov:9443/dune/dd/data', diff --git a/submit_test.sh b/submit_test.sh index b63e2ad..a81d113 100755 --- a/submit_test.sh +++ b/submit_test.sh @@ -1,6 +1,6 @@ # submission tests source tarme.sh # make the tar file up-to-date -python submit_dd_jobs.py --dataset=schellma:run5141recentReco --namespace=pdsp_det_reco --query_limit=100 --fcl=eventdump.fcl --nevents=200 --load_limit=4 +python submit_dd_jobs.py --dataset=schellma:run5141recentReco --namespace=pdsp_det_reco --query_limit=100 --fcl=eventdump.fcl --nevents=2 --load_limit=4 --appFamily=LArSoft --appVersion=${DUNESW_VERSION} #--dataset DATASET] [--namespace NAMESPACE] [--query_limit QUERY_LIMIT] # [--query_skip QUERY_SKIP] [--njobs NJOBS] [--load_limit LOAD_LIMIT] [--fcl FCL] [--nevents NEVENTS] From fa67398890669bdace35d145127db291a66dce75 Mon Sep 17 00:00:00 2001 From: Heidi Schellman Date: Mon, 5 Dec 2022 16:23:18 -0800 Subject: [PATCH 016/166] work on site issue --- Loginator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Loginator.py b/Loginator.py index a46eeed..c957323 100644 --- a/Loginator.py +++ b/Loginator.py @@ -103,7 +103,7 @@ def getsysinfo(self): info["job_id"] = self.getSafe(os.environ,"JOBSUBJOBID") info["job_node"] = self.getSafe(os.environ,"NODE_NAME") #info["job_node"] = os.getenv("HOST") - info["job_site"] = os.getenv("SITE_NAME") + info["job_site"] = os.getenv("GLIDEIN_DUNESite") #info["POMSINFO"] = os.getenv("poms_data") # need to parse this further return info From 7ea61246d0c0c666cad37edd966bd24910233088 Mon Sep 17 00:00:00 2001 From: Heidi Schellman Date: Mon, 5 Dec 2022 16:26:15 -0800 Subject: [PATCH 017/166] add example json --- example.json | 39 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) create mode 100644 example.json diff --git a/example.json b/example.json new file mode 100644 index 0000000..e03fa78 --- /dev/null +++ b/example.json @@ -0,0 +1,39 @@ +{ + "source_rse": "fndca1.fnal.gov:1094", + "user": "schellma", + "job_id": "63733911.0@jobsub02.fnal.gov", + "timestamp_for_start": "06-Dec-2022 00:15:40 UTC", + "timestamp_for_end": "06-Dec-2022 00:15:47 UTC", + "duration": 0.0, + "file_size": 1627448050, + "application_family": null, + "application_name": null, + "application_version": null, + "final_state": "Closed", + "project_name": null, + "file_name": "np04_raw_run005141_0017_dl7_reco1_18126401_0_20210318T102233Z.root", + "fid": "52474922", + "data_tier": "full-reconstructed", + "data_stream": "physics", + "run_type": "protodune-sp", + "file_format": "artroot", + "job_node": "schellma-63733911-0-fnpc9106.fnal.gov", + "job_site": null, + "country": null, + "campaign": "PDSPProd4", + "delivery_method": "dd", + "workflow_method": "dd", + "access_method": "metacat", + "path": "root://fndca1.fnal.gov:1094//pnfs/fnal.gov/usr/dune//tape_backed/dunepro/protodune-sp/full-reconstructed/2021/detector/physics/PDSPProd4/00/00/51/41", + "namespace": "pdsp_det_reco", + "job_real_memory": "843.837", + "project_id": 125, + "job_wall_time": "5.868609", + "job_cpu_time": "3.974521", + "job_total_events": "2", + "deliver_method": "xroot", + "dd_worker_id": "cb904d15", + "rse": "FNAL_DCACHE", + "file_type": "detector", + "event_count": 42 +} \ No newline at end of file From 6d442a7e57100aea56cc2d4b284563b7e0d07579 Mon Sep 17 00:00:00 2001 From: Heidi Schellman Date: Tue, 6 Dec 2022 14:02:46 -0800 Subject: [PATCH 018/166] try to abstract LArWrapper to work with WFS and sam --- LarWrapper.py | 0 Loginator.py | 76 +++++++++++++++++++++++++++++++++++---------------- run_lar.py | 25 +++++++++++++++++ 3 files changed, 77 insertions(+), 24 deletions(-) create mode 100644 LarWrapper.py diff --git a/LarWrapper.py b/LarWrapper.py new file mode 100644 index 0000000..e69de29 diff --git a/Loginator.py b/Loginator.py index c957323..ee2cc76 100644 --- a/Loginator.py +++ b/Loginator.py @@ -35,41 +35,45 @@ def __init__(self,logname): self.info = self.getsysinfo() self.tags = ["Opened input file", "Closed input file","VmHWM","CPU","Events total"] self.template = { - "source_rse":None, # + # job attributes "user":None, # (who's request is this) "job_id":None, # (jobsubXXX03@fnal.gov) + "job_node":None, # (name within the site) + "job_site":None, # (name of the site) + "country":None, # (nationality of the site) + "job_real_memory":None, + "job_wall_time":None, + "job_cpu_time":None, + "job_total_events":None, + # processing attributes + "project_id":0, + "delivery_method":None, #(stream/copy) + "workflow_method":None, + "access_method":None, #(samweb/dd) "timestamp_for_start":None, # "timestamp_for_end":None, # - "duration":None, # (difference between end and start) - "file_size":None, # "application_family":None, # "application_name":None, # "application_version":None, # "final_state":None, # (what happened?) "project_name":None, #(wkf request_id?)" + "duration":None, # (difference between end and start) + "path":None, + "rse":None, + # file attributes from metacat + "file_size":None, # + "file_type":None, # "file_name":None, # (not including the metacat namespace) "fid":None, # metacat fid "data_tier":None, # (from metacat) "data_stream":None, "run_type":None, "file_format":None, - "job_node":None, # (name within the site) - "job_site":None, # (name of the site) - "country":None, # (nationality of the site) - "campaign":None, # (DUNE campaign) - "delivery_method":None, #(stream/copy) - "workflow_method":None, - "access_method":None, #(samweb/dd) - "path":None, + "file_campaign":None, # (DUNE campaign) "namespace":None, - "job_real_memory":None, - "project_id":0, - "job_wall_time":None, - "job_cpu_time":None, - "job_total_events":None - + "event_count":None } - + def envPrinter(self): env = os.environ for k in env: @@ -83,14 +87,14 @@ def findme(self,line): if DEBUG: print (tag,line) return tag return None - + def getSafe(self,dict,envname): if envname in dict: if DEBUG: print ("found ",envname) return dict[envname] else: return None - + ## get system info for the full job def getsysinfo(self): @@ -106,7 +110,7 @@ def getsysinfo(self): info["job_site"] = os.getenv("GLIDEIN_DUNESite") #info["POMSINFO"] = os.getenv("poms_data") # need to parse this further return info - + def addsysinfo(self): self.addinfo(self.getsysinfo()) @@ -155,8 +159,8 @@ def readme(self): if "root" in filepath[0:10]: if DEBUG: print ("I am root") tmp = filepath.split("//") - localobject["source_rse"] = tmp[1] - localobject["deliver_method"] = "xroot" + localobject["rse"] = tmp[1] + localobject["delivery_method"] = "xroot" for thing in self.info: localobject[thing] = self.info[thing] localobject["final_state"] = "Opened" @@ -225,7 +229,7 @@ def addmetacatinfo(self,defaultNamespace=None): print ("no", item, "in ",list(meta["metadata"].keys())) self.outobject[f]["file_size"]=meta["size"] if "DUNE.campaign" in meta["metadata"]: - self.outobject[f]["campaign"]=meta["metadata"]["DUNE.campaign"] + self.outobject[f]["file_campaign"]=meta["metadata"]["DUNE.campaign"] self.outobject[f]["fid"]=meta["fid"] self.outobject[f]["namespace"]=namespace @@ -250,6 +254,30 @@ def addreplicainfo(self,replicas,test=False): notfound.append(r) return notfound + + def findmissingfiles(self,files): + notfound = [] + + for r in files: + found = False + if ":" in r: + s = r.split(":") + name = s[1] + namespace = s[0] + else: + name = r + namespace = "samweb" + + for f in self.outobject: + if f == name: + if DEBUG: print ("file match",r) + found = True + self.outobject[f]["namespace"] = namespace + if not found: + print (r,"appears in replicas but not in Lar Log, need to mark as unused") + notfound.append(r) + + return notfound # def metacatinfo(self,namespace,filename): diff --git a/run_lar.py b/run_lar.py index 48ecc09..2ed2f7d 100644 --- a/run_lar.py +++ b/run_lar.py @@ -57,7 +57,32 @@ def inner1(*args, **kwargs): sys.exit(1) return result return inner1 + +class LArWrapper: + def __init__(self,fcl=None,flist=[],n=None,nskip=None,o=None,appFamily=None, appName=None, appVersion=None, deliveryMethod=None, workflowMethod=None,projectId=None): + oname = fname.replace(".root",".out").replace("%tc",stamp) + ename = fname.replace(".root",".err").replace("%tc",stamp) + ofile = open(oname,'w') + efile = open(ename,'w') + proc = subprocess.run('lar -c %s -s %s -n %i --nskip %i -o fname'%(fcl, flist, n, nskip), shell=True, stdout=ofile,stderr=efile) + ofile.close() + efile.close() + # get log info, match with replicas + logparse = Loginator.Loginator(oname) + + # parse the log and find open./close/memory + logparse.envPrinter() + logparse.readme() + + #logparse.addinfo(logparse.getinfo()) + logparse.addinfo({"dd_worker_id":os.environ["MYWORKERID"],"application_family":appFamily,"application_name":appName, + "application_version":self.Version,"delivery_method":deliveryMethod,"workflow_method":workflowMethod,"project_id":projectId}) + logparse.addsysinfo() + #deal with un + unused_replicas = logparse.addreplicainfo(self.input_replicas) + return unused_replicas + class DDInterface: def __init__(self, namespace, lar_limit, timeout=120, wait_time=60, wait_limit=5, appFamily=None, appName=None, appVersion=None): self.dataset = "" #dataset From 24f727053e5573c09c3db2a9969e1b768caf4063 Mon Sep 17 00:00:00 2001 From: Heidi Schellman Date: Tue, 6 Dec 2022 15:58:23 -0800 Subject: [PATCH 019/166] separate Lar part into LArWrapper.py which should be sam compatible --- other_test.sh | 2 +- run_lar.py | 104 +++++++++++++++++++++++--------------------------- 2 files changed, 48 insertions(+), 58 deletions(-) diff --git a/other_test.sh b/other_test.sh index ec6675f..e552b7e 100644 --- a/other_test.sh +++ b/other_test.sh @@ -1,5 +1,5 @@ # run an interactive test HMS 12-2-2022 #python run_interactive.py --dataset=schellma:run5141recentReco --namespace=pdsp_det_reco --query_limit 100 --load_limit 3 --fcl eventdump.fcl --user schellma --appFamily=protoduneana --appVersion=$PROTODUNEANA_VERSION -n 5 -python run_interactive.py --project=83 --query_limit 100 --load_limit 4 --fcl eventdump.fcl --user schellma -n 300 +python run_interactive.py --project=82 --query_limit 100 --load_limit 4 --fcl eventdump.fcl --user schellma -n 3 #metacat query -i "files from schellma:protodune-sp-physics-generic where (namespace=pdsp_det_reco and core.data_tier='full-reconstructed' and core.runs[any] in (5141))" > newid.txt diff --git a/run_lar.py b/run_lar.py index 2ed2f7d..de9099a 100644 --- a/run_lar.py +++ b/run_lar.py @@ -9,8 +9,11 @@ from data_dispatcher.api import DataDispatcherClient from data_dispatcher.api import APIError +import LArWrapper import Loginator +TEST = True + # make a string out of none for formatted Printing def NoneToString(thing): if thing == None: @@ -58,30 +61,7 @@ def inner1(*args, **kwargs): return result return inner1 -class LArWrapper: - def __init__(self,fcl=None,flist=[],n=None,nskip=None,o=None,appFamily=None, appName=None, appVersion=None, deliveryMethod=None, workflowMethod=None,projectId=None): - oname = fname.replace(".root",".out").replace("%tc",stamp) - ename = fname.replace(".root",".err").replace("%tc",stamp) - ofile = open(oname,'w') - efile = open(ename,'w') - proc = subprocess.run('lar -c %s -s %s -n %i --nskip %i -o fname'%(fcl, flist, n, nskip), shell=True, stdout=ofile,stderr=efile) - ofile.close() - efile.close() - # get log info, match with replicas - logparse = Loginator.Loginator(oname) - - # parse the log and find open./close/memory - logparse.envPrinter() - logparse.readme() - - #logparse.addinfo(logparse.getinfo()) - logparse.addinfo({"dd_worker_id":os.environ["MYWORKERID"],"application_family":appFamily,"application_name":appName, - "application_version":self.Version,"delivery_method":deliveryMethod,"workflow_method":workflowMethod,"project_id":projectId}) - logparse.addsysinfo() - #deal with un - unused_replicas = logparse.addreplicainfo(self.input_replicas) - return unused_replicas class DDInterface: def __init__(self, namespace, lar_limit, timeout=120, wait_time=60, wait_limit=5, appFamily=None, appName=None, appVersion=None): @@ -356,47 +336,57 @@ def RunLAr(self, fcl, n, nskip): else: cluster = '0' process = '0' - ## TODO -- make options for capturing output - stamp = datetime.datetime.now().strftime("%Y%m%d%H%M%S%Z") - fname = "runLar_%s_%%tc_%s_%s_reco.root"%(self.proj_id, cluster, process) - oname = fname.replace(".root",".out").replace("%tc",stamp) - ename = fname.replace(".root",".err").replace("%tc",stamp) - ofile = open(oname,'w') - efile = open(ename,'w') - proc = subprocess.run('lar -c %s -s %s -n %i --nskip %i -o fname'%(fcl, self.lar_file_list, n, nskip), shell=True, stdout=ofile,stderr=efile) - ofile.close() - efile.close() - - # get log info, match with replicas - logparse = Loginator.Loginator(oname) - - # parse the log and find open./close/memory - logparse.envPrinter() - logparse.readme() - - #logparse.addinfo(logparse.getinfo()) - logparse.addinfo({"dd_worker_id":os.environ["MYWORKERID"],"application_family":self.appFamily,"application_name":self.appName, - "application_version":self.appVersion,"delivery_method":"dd","workflow_method":"dd","project_id":self.proj_id}) - logparse.addsysinfo() + + unused_files = [] + if TEST: # new interface that does not talk to dd + lar = LArWrapper.LArWrapper(fcl=fcl, replicas=self.input_replicas, flist=self.lar_file_list, n=n, nskip=nskip, appFamily=self.appFamily, appName=self.appName, appVersion=self.appVersion, deliveryMethod="dd", workflowMethod="dd", projectID=self.proj_id, formatString="runLar_%s_%%tc_%s_%s_reco.root") + returncode = lar.DoLAr(cluster, process) + unused_files = lar.LArResults() + else: # old interace that has more detail exposed. + ## TODO -- make options for capturing output + stamp = datetime.datetime.now().strftime("%Y%m%d%H%M%S%Z") + fname = "runLar_%s_%%tc_%s_%s_reco.root"%(self.proj_id, cluster, process) + oname = fname.replace(".root",".out").replace("%tc",stamp) + ename = fname.replace(".root",".err").replace("%tc",stamp) + ofile = open(oname,'w') + efile = open(ename,'w') + proc = subprocess.run('lar -c %s -s %s -n %i --nskip %i -o fname'%(fcl, self.lar_file_list, n, nskip), shell=True, stdout=ofile,stderr=efile) + returncode = proc.returncode + ofile.close() + efile.close() + + # get log info, match with replicas + logparse = Loginator.Loginator(oname) + + # parse the log and find open./close/memory + logparse.envPrinter() + logparse.readme() + + #logparse.addinfo(logparse.getinfo()) + logparse.addinfo({"dd_worker_id":os.environ["MYWORKERID"],"application_family":self.appFamily,"application_name":self.appName, + "application_version":self.appVersion,"delivery_method":"dd","workflow_method":"dd","project_id":self.proj_id}) + logparse.addsysinfo() #deal with un - unused_replicas = logparse.addreplicainfo(self.input_replicas) - unused_replica_names = [] - for u in unused_replicas: - unused_replica_names.append(u["namespace"]+":"+u["name"]) - logparse.addmetacatinfo(self.namespace) # only uses namespace if can't get from replica info - print ("replicas not used",unused_replica_names) + unused_replicas = logparse.addreplicainfo(self.input_replicas) + + + unused_files = [] + for u in unused_replicas: + unused_files.append(u["namespace"]+":"+u["name"]) + logparse.addmetacatinfo(self.namespace) # only uses namespace if can't get from replica info + print ("replicas not used",unused_files) - # write out json files for processed files whether closed properly or not. Those never opened don't get logged. - logparse.writeme() + # write out json files for processed files whether closed properly or not. Those never opened don't get logged. + logparse.writeme() # make all files as bad if job crashed - if proc.returncode != 0: + if returncode != 0: self.MarkFiles(True) - print ("LAr returned", proc.returncode) - return proc.returncode + print ("LAr returned", returncode) + return returncode # else go through files and mark the ones closed in the logfile as good - self.MarkFiles(False,unused_replica_names) + self.MarkFiles(False,unused_files) self.SaveFileDIDs() From 249c46a4f78bc6d995e940fd7a7b3b40c6f6bb02 Mon Sep 17 00:00:00 2001 From: Heidi Schellman Date: Wed, 7 Dec 2022 07:10:25 -0800 Subject: [PATCH 020/166] LArWrapper works --- run_lar.py | 10 +++++----- submit_test.sh | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/run_lar.py b/run_lar.py index de9099a..066db9c 100644 --- a/run_lar.py +++ b/run_lar.py @@ -60,9 +60,9 @@ def inner1(*args, **kwargs): sys.exit(1) return result return inner1 - - + + class DDInterface: def __init__(self, namespace, lar_limit, timeout=120, wait_time=60, wait_limit=5, appFamily=None, appName=None, appVersion=None): self.dataset = "" #dataset @@ -336,7 +336,7 @@ def RunLAr(self, fcl, n, nskip): else: cluster = '0' process = '0' - + unused_files = [] if TEST: # new interface that does not talk to dd lar = LArWrapper.LArWrapper(fcl=fcl, replicas=self.input_replicas, flist=self.lar_file_list, n=n, nskip=nskip, appFamily=self.appFamily, appName=self.appName, appVersion=self.appVersion, deliveryMethod="dd", workflowMethod="dd", projectID=self.proj_id, formatString="runLar_%s_%%tc_%s_%s_reco.root") @@ -368,8 +368,8 @@ def RunLAr(self, fcl, n, nskip): logparse.addsysinfo() #deal with un unused_replicas = logparse.addreplicainfo(self.input_replicas) - - + + unused_files = [] for u in unused_replicas: unused_files.append(u["namespace"]+":"+u["name"]) diff --git a/submit_test.sh b/submit_test.sh index a81d113..995b0fb 100755 --- a/submit_test.sh +++ b/submit_test.sh @@ -1,6 +1,6 @@ # submission tests source tarme.sh # make the tar file up-to-date -python submit_dd_jobs.py --dataset=schellma:run5141recentReco --namespace=pdsp_det_reco --query_limit=100 --fcl=eventdump.fcl --nevents=2 --load_limit=4 --appFamily=LArSoft --appVersion=${DUNESW_VERSION} +python submit_dd_jobs.py --dataset=schellma:run5141recentReco --namespace=pdsp_det_reco --query_limit=100 --fcl=eventdump.fcl --nevents=400 --load_limit=4 --appFamily=LArSoft --appVersion=${DUNESW_VERSION} #--dataset DATASET] [--namespace NAMESPACE] [--query_limit QUERY_LIMIT] # [--query_skip QUERY_SKIP] [--njobs NJOBS] [--load_limit LOAD_LIMIT] [--fcl FCL] [--nevents NEVENTS] From 4da3962358d1eeedae349a1a747f0dbf15e80f31 Mon Sep 17 00:00:00 2001 From: Heidi Schellman Date: Wed, 7 Dec 2022 11:16:19 -0800 Subject: [PATCH 021/166] try to get git to recognize new files --- LarWrapper.py | 86 +++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 86 insertions(+) diff --git a/LarWrapper.py b/LarWrapper.py index e69de29..47bd93f 100644 --- a/LarWrapper.py +++ b/LarWrapper.py @@ -0,0 +1,86 @@ +"""! @brief Art logfile parser """ +## +# @mainpage LArWrapper.py +# +# @section description_main A wrapper for lar that parses logs and finds unprocessed files. +# +# Copyright (c) 2022 Heidi Schellman, Oregon State University +## +# @file LArWrapper.py + + +from argparse import ArgumentParser as ap +import sys +import os +import subprocess +import time +import datetime +import requests +import Loginator + +class LArWrapper: + def __init__(self,fcl=None,replicas=None,flist="",n=None,nskip=None,o=None,appFamily=None, appName=None, appVersion=None, deliveryMethod=None, workflowMethod=None, projectID=None, formatString="runLar_%s_%%tc_%s_%s_reco.root"): + self.fcl = fcl + self.flist = flist + self.n = n + self.nskip = nskip + self.o = os + self.appFamily = appFamily + self.appVersion = appVersion + self.appName = appName + self.deliveryMethod = deliveryMethod + self.workflowMethod = workflowMethod + self.oname = None + self.ename = None + self.formatString = formatString + self.replicas = replicas + self.flist = flist + self.returncode = None + self.projectID = projectID + + def DoLAr(self,cluster=0,process=0): + stamp = datetime.datetime.now().strftime("%Y%m%d%H%M%S%Z") + fname = self.formatString%(self.projectID, cluster, process) + self.oname = fname.replace(".root",".out").replace("%tc",stamp) + self.ename = fname.replace(".root",".err").replace("%tc",stamp) + ofile = open(self.oname,'w') + efile = open(self.ename,'w') + cmd = 'lar -c %s -s %s -n %i --nskip %i -o %s'%(self.fcl, self.flist, self.n, self.nskip, fname) + print ("cmd = ",cmd) + proc = subprocess.run(cmd, shell=True, stdout=ofile,stderr=efile) + self.returncode = proc.returncode + ofile.close() + efile.close() + return self.returncode + + def LArResults(self): + # get log info, match with replicas + logparse = Loginator.Loginator(self.oname) + + logparse.readme() # get info from the logfile + + info = {"application_family":self.appFamily,"application_name":self.appName, "application_version":self.appVersion,"delivery_method":self.deliveryMethod,"workflow_method":self.workflowMethod} + + if self.deliveryMethod == "dd": + + info["dd_worker_id"]=os.environ["MYWORKERID"] + info["project_id"]=self.projectID + unused_replicas = logparse.addreplicainfo(self.replicas) + unused_files = [] + for u in unused_replicas: + unused_files.append(u["namespace"]+":"+u["name"]) + logparse.addmetacatinfo() + print ("files not used",unused_files) + elif deliverMethod == "samweb": + unused_files = logparse.findmissingfiles(self.files) + logparse.addsaminfo() + + + logparse.addinfo(info) + logparse.addsysinfo() + + # write out json files for processed files whether closed properly or not. Those never opened don't get logged. + logparse.writeme() + return unused_files + + From 2c0c3f8cbbc91a41ee709a248f9edbbb3992f8f6 Mon Sep 17 00:00:00 2001 From: Heidi Schellman Date: Wed, 7 Dec 2022 13:08:31 -0800 Subject: [PATCH 022/166] get case right on LArWrapper.py --- LarWrapper.py => LArWrapper.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename LarWrapper.py => LArWrapper.py (100%) diff --git a/LarWrapper.py b/LArWrapper.py similarity index 100% rename from LarWrapper.py rename to LArWrapper.py From f60c018d62db5dcf25d3fecb92473e35f1223cf2 Mon Sep 17 00:00:00 2001 From: Heidi Schellman Date: Wed, 7 Dec 2022 19:51:24 -0800 Subject: [PATCH 023/166] start on samweb interface --- LArWrapper.py | 41 +++++++++++++++++++++++++++-------------- 1 file changed, 27 insertions(+), 14 deletions(-) diff --git a/LArWrapper.py b/LArWrapper.py index 47bd93f..720006d 100644 --- a/LArWrapper.py +++ b/LArWrapper.py @@ -19,7 +19,9 @@ import Loginator class LArWrapper: - def __init__(self,fcl=None,replicas=None,flist="",n=None,nskip=None,o=None,appFamily=None, appName=None, appVersion=None, deliveryMethod=None, workflowMethod=None, projectID=None, formatString="runLar_%s_%%tc_%s_%s_reco.root"): + def __init__(self,fcl=None,replicas=None,flist="",n=None,nskip=0,o=None,appFamily=None, appName=None, + appVersion=None, deliveryMethod=None, workflowMethod=None, projectID=None, sam_web_uri=None,workerID=None, + formatString="runLar_%s_%%tc_%s_%s_reco.root"): self.fcl = fcl self.flist = flist self.n = n @@ -37,7 +39,9 @@ def __init__(self,fcl=None,replicas=None,flist="",n=None,nskip=None,o=None,appFa self.flist = flist self.returncode = None self.projectID = projectID - + self.sam_web_uri = sam_web_uri + self.workerID = workerID + def DoLAr(self,cluster=0,process=0): stamp = datetime.datetime.now().strftime("%Y%m%d%H%M%S%Z") fname = self.formatString%(self.projectID, cluster, process) @@ -45,24 +49,35 @@ def DoLAr(self,cluster=0,process=0): self.ename = fname.replace(".root",".err").replace("%tc",stamp) ofile = open(self.oname,'w') efile = open(self.ename,'w') - cmd = 'lar -c %s -s %s -n %i --nskip %i -o %s'%(self.fcl, self.flist, self.n, self.nskip, fname) - print ("cmd = ",cmd) - proc = subprocess.run(cmd, shell=True, stdout=ofile,stderr=efile) + if self.deliveryMethod == "dd": + cmd = 'lar -c %s -s %s -n %i --nskip %i -o %s'%(self.fcl, self.flist, self.n, self.nskip, fname) + print ("cmd = ",cmd) + proc = subprocess.run(cmd, shell=True, stdout=ofile,stderr=efile) + elif self.deliveryMethod == "samweb": + lar_cmd = ("lar -c%s" % self.fcl) + (" -n%i"%self.n) + " -T temp.root" +\ + (" --sam-web-uri=%s"%self.sam_web_uri) + (" --sam-process-id=%s"%self.workerID) + \ + (" --sam-application-family=%s"%self.appFamily) + (" --sam-application-version=%s"%self.appVersion) + print (lar_cmd) + proc = subprocess.run(lar_cmd, stdout=ofile) + else: # assume it's something like interactive + cmd = 'lar -c %s -s %s -n %i --nskip %i -o %s'%(self.fcl, self.flist, self.n, self.nskip, fname) + print ("cmd = ",cmd) + proc = subprocess.run(cmd, shell=True, stdout=ofile,stderr=efile) self.returncode = proc.returncode ofile.close() efile.close() return self.returncode - + def LArResults(self): # get log info, match with replicas logparse = Loginator.Loginator(self.oname) logparse.readme() # get info from the logfile - + info = {"application_family":self.appFamily,"application_name":self.appName, "application_version":self.appVersion,"delivery_method":self.deliveryMethod,"workflow_method":self.workflowMethod} - + if self.deliveryMethod == "dd": - + info["dd_worker_id"]=os.environ["MYWORKERID"] info["project_id"]=self.projectID unused_replicas = logparse.addreplicainfo(self.replicas) @@ -71,16 +86,14 @@ def LArResults(self): unused_files.append(u["namespace"]+":"+u["name"]) logparse.addmetacatinfo() print ("files not used",unused_files) - elif deliverMethod == "samweb": + elif deliveryMethod == "samweb": unused_files = logparse.findmissingfiles(self.files) logparse.addsaminfo() - - + + logparse.addinfo(info) logparse.addsysinfo() # write out json files for processed files whether closed properly or not. Those never opened don't get logged. logparse.writeme() return unused_files - - From 8f5b522a6719ec58e2df2c47258cc2b8d4a2a895 Mon Sep 17 00:00:00 2001 From: Heidi Schellman Date: Thu, 8 Dec 2022 13:45:00 -0800 Subject: [PATCH 024/166] work on a command line interface for LArWrapper --- LArWrapper.py | 66 ++++++++++-- Loginator.py | 4 +- dd_test.sh | 6 ++ gitadd.sh | 0 interactive_test.sh | 0 new_test.sh | 6 ++ other_test.sh | 0 rejoin.sh | 0 run_lar.py | 3 +- setup_hms.sh | 0 tarme.sh | 0 test.fcl | 243 ++++++++++++++++++++++++++++++++++++++++++++ wfs_test.sh | 0 13 files changed, 315 insertions(+), 13 deletions(-) create mode 100644 dd_test.sh mode change 100644 => 100755 gitadd.sh mode change 100644 => 100755 interactive_test.sh create mode 100644 new_test.sh mode change 100644 => 100755 other_test.sh mode change 100644 => 100755 rejoin.sh mode change 100644 => 100755 setup_hms.sh mode change 100644 => 100755 tarme.sh create mode 100644 test.fcl create mode 100644 wfs_test.sh diff --git a/LArWrapper.py b/LArWrapper.py index 720006d..bac6924 100644 --- a/LArWrapper.py +++ b/LArWrapper.py @@ -19,14 +19,14 @@ import Loginator class LArWrapper: - def __init__(self,fcl=None,replicas=None,flist="",n=None,nskip=0,o=None,appFamily=None, appName=None, - appVersion=None, deliveryMethod=None, workflowMethod=None, projectID=None, sam_web_uri=None,workerID=None, - formatString="runLar_%s_%%tc_%s_%s_reco.root"): + def __init__(self,fcl=None,replicas=None,flist="",o="temp.root",n=None,nskip=0,appFamily=None, appName=None, + appVersion=None, deliveryMethod=None, workflowMethod=None, projectID=None, sam_web_uri=None,processID=None,processHASH=None, + formatString="runLar_%s_%%tc_%s_%s_%s.root", dataTier="sam-user", dataStream="test"): self.fcl = fcl self.flist = flist self.n = n self.nskip = nskip - self.o = os + self.o = o self.appFamily = appFamily self.appVersion = appVersion self.appName = appName @@ -40,22 +40,31 @@ def __init__(self,fcl=None,replicas=None,flist="",n=None,nskip=0,o=None,appFamil self.returncode = None self.projectID = projectID self.sam_web_uri = sam_web_uri - self.workerID = workerID + self.processID = processID + self.processHASH = processHASH + self.dataTier = dataTier + self.dataStream = dataStream + + if self.formatString == None: + formatString = "process_%s_%%tc_%s_%s_%s.root" def DoLAr(self,cluster=0,process=0): + print ("reading",self.flist) stamp = datetime.datetime.now().strftime("%Y%m%d%H%M%S%Z") - fname = self.formatString%(self.projectID, cluster, process) + print (self.formatString) + print (self.projectID, cluster, process, self.fcl.replace(".fcl","")) + fname = self.formatString%(self.projectID, cluster, process, self.fcl.replace(".fcl","")) self.oname = fname.replace(".root",".out").replace("%tc",stamp) self.ename = fname.replace(".root",".err").replace("%tc",stamp) ofile = open(self.oname,'w') efile = open(self.ename,'w') if self.deliveryMethod == "dd": - cmd = 'lar -c %s -s %s -n %i --nskip %i -o %s'%(self.fcl, self.flist, self.n, self.nskip, fname) + cmd = 'lar -c %s -s %s -n %i --nskip %i -o %s --sam-data-tier %s --sam-stream-name %s'%(self.fcl, self.flist, self.n, self.nskip, self.o, self.dataTier, self.dataStream) print ("cmd = ",cmd) proc = subprocess.run(cmd, shell=True, stdout=ofile,stderr=efile) elif self.deliveryMethod == "samweb": lar_cmd = ("lar -c%s" % self.fcl) + (" -n%i"%self.n) + " -T temp.root" +\ - (" --sam-web-uri=%s"%self.sam_web_uri) + (" --sam-process-id=%s"%self.workerID) + \ + (" --sam-web-uri=%s"%self.sam_web_uri) + (" --sam-process-id=%s"%self.processID) + \ (" --sam-application-family=%s"%self.appFamily) + (" --sam-application-version=%s"%self.appVersion) print (lar_cmd) proc = subprocess.run(lar_cmd, stdout=ofile) @@ -78,9 +87,12 @@ def LArResults(self): if self.deliveryMethod == "dd": - info["dd_worker_id"]=os.environ["MYWORKERID"] + info["dd_worker_id"]=self.processHASH info["project_id"]=self.projectID - unused_replicas = logparse.addreplicainfo(self.replicas) + if self.replicas != None: + unused_replicas = logparse.addreplicainfo(self.replicas) + else: + unused_replicas = [] unused_files = [] for u in unused_replicas: unused_files.append(u["namespace"]+":"+u["name"]) @@ -97,3 +109,37 @@ def LArResults(self): # write out json files for processed files whether closed properly or not. Those never opened don't get logged. logparse.writeme() return unused_files + +if __name__ == "__main__": + parser = ap() + parser.add_argument('--delivery_method',required=True, type=str, help='["samweb","dd","wfs"]') + parser.add_argument('--workflow_method',type=str, help='["samweb","dd","wfs"]') + parser.add_argument('--processHASH',type=str, default="", help='string code generated by dd for worker') + parser.add_argument('--processID',type=int, default=0, help='processID generated by samweb') + parser.add_argument('--sam_web_uri',type=str, help='samweb url for the project') + parser.add_argument('--appFamily', type=str, help='samweb needs this') + parser.add_argument('--appVersion', type=str, help='samweb needs this') + parser.add_argument('--dataTier', type=str, help='data tier for output file if only one') + parser.add_argument('--dataStream', type=str, help='data stream for output file if only one') + parser.add_argument('-o', default="temp.root", type=str, help='output root file') + parser.add_argument('-c', required=True, type=str, help='name of fcl file') + parser.add_argument('--user', type=str, help='user name') + parser.add_argument('--projectID', type=int, default=0, help='integer that identifies the project, samweb/dd/wfs') + parser.add_argument('--timeout', type=int, default=120) + parser.add_argument('--wait_time', type=int, default=120) + parser.add_argument('--wait_limit', type=int, default=5) + parser.add_argument('-n', type=int, default=-1, help='number of events total to process') + parser.add_argument('--nskip', type=int, default=0, help='number of events to skip before starting') + parser.add_argument('--formatString', type = str, default='runLar_%s_%%tc_%s_%s_%s.root',help='format string used by LarWrapper for logs') + args = parser.parse_args() + + print (args) + + if args.processHASH == None and "MYWORKERID" in os.environ: + args.processHASH = os.environ("MYWORKERID") + lar = LArWrapper(fcl=args.c, n=args.n, nskip = args.nskip, appFamily=args.appFamily, + appVersion=os.getenv("DUNESW_VERSION"), deliveryMethod=args.delivery_method, workflowMethod=args.workflow_method, + processID = args.processID, processHASH = args.processHASH, projectID=args.projectID, sam_web_uri = args.sam_web_uri, formatString=args.formatString) + returncode = lar.DoLAr(0, args.processID) + unused_files = lar.LArResults() + sys.exit(returncode) diff --git a/Loginator.py b/Loginator.py index ee2cc76..08c95c2 100644 --- a/Loginator.py +++ b/Loginator.py @@ -33,7 +33,7 @@ def __init__(self,logname): self.logfile = open(logname,'r') self.outobject ={} self.info = self.getsysinfo() - self.tags = ["Opened input file", "Closed input file","VmHWM","CPU","Events total"] + self.tags = ["Opened input file", "Closed input file","MemReport VmPeak","CPU","Events total"] self.template = { # job attributes "user":None, # (who's request is this) @@ -126,7 +126,7 @@ def readme(self): if DEBUG: print (tag,line) if tag == None: continue - if "VmHWM" == tag: + if "MemReport VmPeak" == tag: memdata = line.split("VmHWM = ")[1].strip() if "CPU" == tag: timeline = line.strip().split(" ") diff --git a/dd_test.sh b/dd_test.sh new file mode 100644 index 0000000..487f5f6 --- /dev/null +++ b/dd_test.sh @@ -0,0 +1,6 @@ +export PROJECTID=82 +export PROCESSHASH="2e434568" +dd project create files from schellma:run5141recentReco limit 100 +python LArWrapper.py --delivery_method=dd --processHASH=$PROCESSHASH\ + --processID=0 -c dumpevent.fcl\ + --user=$USER --projectID=$PROJECTID diff --git a/gitadd.sh b/gitadd.sh old mode 100644 new mode 100755 diff --git a/interactive_test.sh b/interactive_test.sh old mode 100644 new mode 100755 diff --git a/new_test.sh b/new_test.sh new file mode 100644 index 0000000..1e8dc9a --- /dev/null +++ b/new_test.sh @@ -0,0 +1,6 @@ +# run an interactive test HMS 12-2-2022 +#python run_interactive.py --dataset=schellma:run5141recentReco --namespace=pdsp_det_reco --query_limit 100 --load_limit 3 --fcl eventdump.fcl --user schellma --appFamily=protoduneana --appVersion=$PROTODUNEANA_VERSION -n 5 +python run_interactive.py --dataset=schellma:run5141recentReco --namespace=pdsp_det_reco --query_limit 100 --load_limit 4 --fcl test.fcl --user schellma -n 3 + +#metacat query -i "files from schellma:protodune-sp-physics-generic where (namespace=pdsp_det_reco and core.data_tier='full-reconstructed' and core.runs[any] in (5141))" > newid.txt + diff --git a/other_test.sh b/other_test.sh old mode 100644 new mode 100755 diff --git a/rejoin.sh b/rejoin.sh old mode 100644 new mode 100755 diff --git a/run_lar.py b/run_lar.py index 066db9c..1f5eebb 100644 --- a/run_lar.py +++ b/run_lar.py @@ -228,6 +228,7 @@ def LoadFiles(self): else: ##we successfully got a file (at least nominally). Check that it has replicas available. ##If it doesn't, compromise it to a permament end + print ("got a file",datetime.datetime.now()) if len(self.next_replicas) > 0: self.loaded_files.append(self.next_output) count += 1 @@ -339,7 +340,7 @@ def RunLAr(self, fcl, n, nskip): unused_files = [] if TEST: # new interface that does not talk to dd - lar = LArWrapper.LArWrapper(fcl=fcl, replicas=self.input_replicas, flist=self.lar_file_list, n=n, nskip=nskip, appFamily=self.appFamily, appName=self.appName, appVersion=self.appVersion, deliveryMethod="dd", workflowMethod="dd", projectID=self.proj_id, formatString="runLar_%s_%%tc_%s_%s_reco.root") + lar = LArWrapper.LArWrapper(fcl=fcl, o="temp.root", replicas=self.input_replicas, flist=self.lar_file_list, n=n, nskip=nskip, appFamily=self.appFamily, appName=self.appName, appVersion=self.appVersion, deliveryMethod="dd", workflowMethod="dd", projectID=self.proj_id, formatString="runLar_%s_%%tc_%s_%s_%s.root") returncode = lar.DoLAr(cluster, process) unused_files = lar.LArResults() else: # old interace that has more detail exposed. diff --git a/setup_hms.sh b/setup_hms.sh old mode 100644 new mode 100755 diff --git a/tarme.sh b/tarme.sh old mode 100644 new mode 100755 diff --git a/test.fcl b/test.fcl new file mode 100644 index 0000000..3d70283 --- /dev/null +++ b/test.fcl @@ -0,0 +1,243 @@ +#include "PDSPAnalyzer.fcl" +#include "services_dune.fcl" +#include "ProtoDUNETruthBeamFilter.fcl" +#include "protoDUNE_reco_mc_prolog.fcl" + + +process_name: PDSPAna + +services: +{ + # Load the service that manages root files for histograms. + TFileService: { fileName: "pduneana.root" } + TimeTracker: {} + MemoryTracker: {} + RandomNumberGenerator: {} #ART native random number generator + message: @local::dune_message_services_prod_debug + FileCatalogMetadata: @local::art_file_catalog_mc + @table::protodune_services + PdspChannelMapService: @local::pdspchannelmap + ChannelStatusService: @local::pdsp_channel_status + BackTrackerService: { + BackTracker: { + SimChannelModuleLabel: "tpcrawdecoder:simpleSC" + G4ModuleLabel: "largeant" + MinimumHitEnergyFraction: 1e-1 + } + } + FileCatalogMetadataDUNE: { } + TFileMetadataDUNE: @local::dune_tfile_metadata +} +services.message.destinations.LogStandardOut.threshold: "INFO" +services.SpaceCharge.EnableSimEfieldSCE: true +services.SpaceCharge.EnableSimSpatialSCE: true +services.SpaceCharge.EnableSimulationSCE: true +#services.Geometry: @local::protodunev6_geo +#services.TFileMetadataDUNE.JSONFileName: "%ifb_pdsp_ntuple_test.root.json" +services.TFileMetadataDUNE.dataTier: "root-tuple-virtual" + +source: +{ + module_type: RootInput + maxEvents: -1 + fileNames: ["input_file.root"] +} + +physics: +{ + + producers: { + @table::protoDUNE_reco_mc_stage1_producers + } + + filters: { + fTruth: @local::beam_filter + } + + fpath: [fTruth] + + analyzers: + { + pduneana: @local::pdspanalysis + } + + ana: [ pduneana ] + + #stream1: [ out1 ] + + end_paths: [ ana ] + +} + +outputs: {} +physics.filters.fTruth.PDG: [211, -13, 2212] +physics.analyzers.pduneana.SelectEvents: ["fpath"] +physics.analyzers.pduneana.CalorimetryTagSCE: "pandoracalinoxyzt" +physics.analyzers.pduneana.CalorimetryTagNoSCE: "pandoracalonosce" ##For no-SCE sample +physics.analyzers.pduneana.Pandora2CaloSCE: "pandora2calinoxyzt" +#physics.analyzers.pduneana.HitTag: "hitpdune" +physics.analyzers.pduneana.Verbose: false +physics.analyzers.pduneana.DoReweight: false +#physics.analyzers.pduneana.ParameterSet: [ +# { +# Cut: "inel" +# Name: "fQE1" +# Range: [0., 500.] +# Nominal: 1.0 +# Sigma: .2 +# }, +# { +# Cut: "inel" +# Name: "fQE2" +# Range: [500., 2000.] +# Nominal: 1.0 +# Sigma: .2 +# }, +# +# { +# Cut: "abs" +# Name: "fAbs1" +# Range: [0., 500.] +# Nominal: 1.0 +# Sigma: .2 +# }, +# { +# Cut: "abs" +# Name: "fAbs2" +# Range: [500., 2000.] +# Nominal: 1.0 +# Sigma: .2 +# }, +# +# { +# Cut: "cex" +# Name: "fCex1" +# Range: [0., 400.] +# Nominal: 1.0 +# Sigma: .2 +# }, +# { +# Cut: "cex" +# Name: "fCex2" +# Range: [400., 800.] +# Nominal: 1.0 +# Sigma: .2 +# }, +# { +# Cut: "cex" +# Name: "fCex3" +# Range: [800., 2000.] +# Nominal: 1.0 +# Sigma: .2 +# } +#] +physics.analyzers.pduneana.ParameterSet: [ + { + Cut: "inel" + Name: "fQE1" + Range: [0., 500.] + Nominal: 1.0 + Sigma: .2 + }, + { + Cut: "inel" + Name: "fQE2" + Range: [500., 2000.] + Nominal: 1.0 + Sigma: .2 + }, + + { + Cut: "abs" + Name: "fAbs1" + Range: [0., 500.] + Nominal: 1.0 + Sigma: .2 + }, + { + Cut: "abs" + Name: "fAbs2" + Range: [500., 2000.] + Nominal: 1.0 + Sigma: .2 + }, + + { + Cut: "cex" + Name: "fCex1" + Range: [0., 400.] + Nominal: 1.0 + Sigma: .2 + }, + { + Cut: "cex" + Name: "fCex2" + Range: [400., 800.] + Nominal: 1.0 + Sigma: .2 + }, + { + Cut: "cex" + Name: "fCex3" + Range: [800., 2000.] + Nominal: 1.0 + Sigma: .2 + }, + + #{ + # Cut: "cex" + # Name: "fCex4" + # Range: [0., 600.] + # Nominal: 1.0 + # Sigma: .2 + #}, + #{ + # Cut: "cex" + # Name: "fCex5" + # Range: [600., 2000.] + # Nominal: 1.0 + # Sigma: .2 + #}, + #{ + # Cut: "abs" + # Name: "fAbs3" + # Range: [0., 600.] + # Nominal: 1.0 + # Sigma: .2 + #}, + #{ + # Cut: "abs" + # Name: "fAbs4" + # Range: [600., 2000.] + # Nominal: 1.0 + # Sigma: .2 + #}, + { + Cut: "reac" + Name: "fReac1" + Range: [0., 600.] + Nominal: 1.0 + Sigma: .2 + }, + { + Cut: "reac" + Name: "fReac2" + Range: [600., 2000.] + Nominal: 1.0 + Sigma: .2 + } #, +# { +# Cut: "reac" +# Name: "fReacFull" +# Range: [0., 2000.] +# Nominal: 1.0 +# Sigma: .2 +# } +] + +#physics.reco: [ emtrkmichelid ] +physics.analyzers.pduneana.DoProtReweight: false +physics.analyzers.pduneana.GetTrackMichel: false +physics.analyzers.pduneana.CalibrationParsSCE: @local::CalorimetryParameters_SPProd4_MC_SCE #_eLT +physics.analyzers.pduneana.CalibrationParsNoSCE: @local::CalorimetryParameters_SPProd4_MC_SCE #_eLT +services.ParticleInventoryService.ParticleInventory.EveIdCalculator: "EmEveIdCalculator" diff --git a/wfs_test.sh b/wfs_test.sh new file mode 100644 index 0000000..e69de29 From ca355419c4d3fd08dd8323f6fbeb8e48c463ff28 Mon Sep 17 00:00:00 2001 From: Heidi Schellman <33669005+hschellman@users.noreply.github.com> Date: Thu, 8 Dec 2022 13:59:02 -0800 Subject: [PATCH 025/166] Update README.md --- README.md | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index dc484d1..92040ac 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,10 @@ # DataChallengeWork-loginator +- the main code are in LArWrapper.py and Loginator.py. LArWrapper actually wraps the call to LAr and should work with several access methods while Loginator produces an file-by-file summary for monitoring. -- `setup_hms.sh` # sets it up +- `setup_hms.sh` # sets it up, needs to be edited to make it work with your /tmp/..509.. file -- `./interactive_test.sh` # runs a test +- `./interactive_test.sh` # runs a test against the dd + +- ./LArWrapper.py should be command line compatible but hasn't been fully tested. From c0ce1dd283972d0590b2b0a493b6d77ae5e9c3bf Mon Sep 17 00:00:00 2001 From: Heidi Schellman Date: Fri, 9 Dec 2022 15:45:25 -0800 Subject: [PATCH 026/166] fix the missing fcl file problem in batch --- LArWrapper.py | 6 ++- ddconfig.cfg | 123 ++++++++++++++++++++++++++++++++++++++++++++++ run_lar.py | 8 +-- submit_dd_jobs.py | 10 ++-- submit_test.sh | 2 +- tarme.sh | 2 +- top_script.sh | 58 ++++++++++++++++++---- 7 files changed, 190 insertions(+), 19 deletions(-) create mode 100644 ddconfig.cfg diff --git a/LArWrapper.py b/LArWrapper.py index bac6924..808296d 100644 --- a/LArWrapper.py +++ b/LArWrapper.py @@ -47,13 +47,15 @@ def __init__(self,fcl=None,replicas=None,flist="",o="temp.root",n=None,nskip=0,a if self.formatString == None: formatString = "process_%s_%%tc_%s_%s_%s.root" + def DoLAr(self,cluster=0,process=0): + print ("check fcl",self.fcl,os.path.exists(self.fcl)) print ("reading",self.flist) stamp = datetime.datetime.now().strftime("%Y%m%d%H%M%S%Z") print (self.formatString) - print (self.projectID, cluster, process, self.fcl.replace(".fcl","")) - fname = self.formatString%(self.projectID, cluster, process, self.fcl.replace(".fcl","")) + print (self.projectID, cluster, process, os.path.basename(self.fcl).replace(".fcl","")) + fname = self.formatString%(self.projectID, cluster, process, os.path.basename(self.fcl).replace(".fcl","")) self.oname = fname.replace(".root",".out").replace("%tc",stamp) self.ename = fname.replace(".root",".err").replace("%tc",stamp) ofile = open(self.oname,'w') diff --git a/ddconfig.cfg b/ddconfig.cfg new file mode 100644 index 0000000..7c6557c --- /dev/null +++ b/ddconfig.cfg @@ -0,0 +1,123 @@ +[global] +group = dune +experiment = dune +wrapper = file:///${FIFE_UTILS_DIR}/libexec/fife_wrap +#wrapper = /dune/app/users/$USER/new_fife_utils/fife_utils/v3_5_0/NULL/libexec/fife_wrap +output_dir = /pnfs/dune/scratch/users/$USER/dd_mc_test/ +extra_dir = ./ +input_namespace = pdsp_det_reco +fcl = eventdump.fcl +load_limit = 1 +project = 317 +nevents = -1 +output_str = "*reco.root" +output_dataset = dd-interactive-test-data +output_namespace = dc4-interactive-tests +metacat_user = $USER +dd_tar=$USER_8_22_22_dd +mc_tar=$USER_8_15_22_mc + +[executable] +#name = ./top_script.sh +name = \\\${CONDOR_DIR_INPUT}/top_script.sh +arg_1 = --namespace +arg_2 = %(input_namespace)s +arg_3 = --fcl +arg_4 = %(fcl)s +arg_5 = --load_limit +arg_6 = %(load_limit)s +arg_7 = --user +arg_8 = $USER +arg_9 = --project +arg_10 = %(project)s +arg_11 = -n +arg_12 = %(nevents)s +arg_13 = --output +arg_14 = %(output_str)s +arg_15 = --output_dataset +arg_16 = %(output_dataset)s +arg_17 = --output_namespace +arg_18 = %(output_namespace)s +arg_19 = --metacat_user +arg_20 = %(metacat_user)s +arg_21 = --appFamily +arg_22 = %(appFamily)s +arg_23 = --appName +arg_24 = %(appName)s +arg_25 = --appVersion +arg_26 = %(appVersion)s + +[stage] +lines_1 '+FERMIHTC_AutoRelease=True' +lines_2 '+FERMIHTC_GraceMemory=1024' +lines_3 '+FERMIHTC_GraceLifetime=3600' + +[env_pass] +IFDH_DEBUG=1 +IFDH_CP_MAXRETRIES=3 +XRD_CONNECTIONRETRY=32 +XRD_REQUESTTIMEOUT=14400 +XRD_REDIRECTLIMIT=255 +XRD_LOADBALANCERTTL=7200 +XRD_STREAMTIMEOUT=7200 +DD_TAR = %(dd_tar)s +MC_TAR = %(mc_tar)s +DATA_DISPATCHER_URL=https://metacat.fnal.gov:9443/dune/dd/data +DATA_DISPATCHER_AUTH_URL=https://metacat.fnal.gov:8143/auth/dune +METACAT_AUTH_SERVER_URL=https://metacat.fnal.gov:8143/auth/dune +METACAT_SERVER_URL=https://metacat.fnal.gov:9443/dune_meta_demo/app + + +[submit] +G = %(group)s +OS = SL7 +memory = 1999MB +expected-lifetime = 4h +N = 1 +resource-provides = usage_model=OFFSITE,OPPORTUNISTIC,DEDICATED +#resource-provides = usage_model=OPPORTUNISTIC,DEDICATED +c = "has_avx==True" +lines_1 = '+FERMIHTC_AutoRelease=True' +lines_2 = '+FERMIHTC_GraceMemory=2048' +lines_3 = '+FERMIHTC_GraceLifetime=3600' +lines_4 = '+SingularityImage=\"/cvmfs/singularity.opensciencegrid.org/fermilab/fnal-wn-sl7:latest\"' +append_condor_requirements_1 = '(TARGET.HAS_Singularity==true)' +append_condor_requirements_2 = 'TARGET.HAS_CVMFS_fifeuser1_opensciencegrid_org==true' +append_condor_requirements_3 = 'TARGET.HAS_CVMFS_fifeuser2_opensciencegrid_org==true' +append_condor_requirements_4 = 'TARGET.HAS_CVMFS_fifeuser3_opensciencegrid_org==true' +append_condor_requirements_5 = 'TARGET.HAS_CVMFS_fifeuser4_opensciencegrid_org==true' +append_condor_requirements_6 = 'TARGET.HAS_CVMFS_dune_opensciencegrid_org==true' +append_condor_requirements_7 = 'TARGET.HAS_CVMFS_larsoft_opensciencegrid_org==true' +append_condor_requirements_8 = 'TARGET.CVMFS_dune_opensciencegrid_org_REVISION>=1105' +#f_0 = dropbox:///dune/data/users/$USER/dd_metacat_canned/%(dd_tar)s.tar +#f_1 = dropbox:///dune/data/users/$USER/dd_metacat_canned/%(mc_tar)s.tar +f_0 = dropbox:///dune/data/users/$USER/loginator.tar + +#[job_output] +#addoutput = *err +#dest = %(output_dir)s/%(extra_dir)s/\\\${CLUSTER}_\\\${PROCESS} +# +#[job_output_1] +#addoutput = *out +#dest = %(output_dir)s/%(extra_dir)s/\\\${CLUSTER}_\\\${PROCESS} + +#[data_dispatcher] +#dataset = dc4:dc4 +#namespace = dc4-hd-protodune +#query_limit = 9 +#load_limit = 3 +#wait_time = 45 +#wait_limit = 4 +#timeout = 50 + +[job_setup] +source_1 = /cvmfs/dune.opensciencegrid.org/products/dune/setup_dune.sh +#source_2 = ${CONDOR_DIR_INPUT}/%(mc_tar)s/canned_client_setup.sh +#source_3 = ${CONDOR_DIR_INPUT}/%(dd_tar)s/canned_client_setup.sh +#setup = dunesw v09_55_01d00 -q e20:prof +prescript_0 = echo lsing +prescript_1 = ls ${CONDOR_DIR_INPUT} +#prescript_1 = ls ${CONDOR_DIR_INPUT}/$USER_8*/ +#prescript_4 = echo $PATH +#prescript_5 = echo $PYTHONPATH +#prescript_6 = ls ${CONDOR_DIR_INPUT}/%(dd_tar)s/data_dispatcher diff --git a/run_lar.py b/run_lar.py index 1f5eebb..53426bb 100644 --- a/run_lar.py +++ b/run_lar.py @@ -96,6 +96,8 @@ def __init__(self, namespace, lar_limit, timeout=120, wait_time=60, wait_limit=5 self.appVersion = appVersion self.retry_time = 600 + + if TEST: print ("DDInterface args:",args) #try: # from data_dispatcher.api import DataDispatcherClient @@ -326,7 +328,7 @@ def BuildFileListString(self): self.dd_client.file_failed(self.proj_id, '%s:%s'%(j['namespace'], j['name'])) print(datetime.datetime.now()) - def RunLAr(self, fcl, n, nskip): + def RunLAr(self, fcl=None, n=-1, nskip=0): if len(self.loaded_files) == 0: print('No files loaded with data dispatcher. Exiting gracefully') return @@ -337,7 +339,7 @@ def RunLAr(self, fcl, n, nskip): else: cluster = '0' process = '0' - + print ("RunLAr called with ",fcl,n,nskip) unused_files = [] if TEST: # new interface that does not talk to dd lar = LArWrapper.LArWrapper(fcl=fcl, o="temp.root", replicas=self.input_replicas, flist=self.lar_file_list, n=n, nskip=nskip, appFamily=self.appFamily, appName=self.appName, appVersion=self.appVersion, deliveryMethod="dd", workflowMethod="dd", projectID=self.proj_id, formatString="runLar_%s_%%tc_%s_%s_%s.root") @@ -423,4 +425,4 @@ def RunLAr(self, fcl, n, nskip): dd_interface.AttachProject(args.project) dd_interface.LoadFiles() dd_interface.BuildFileListString() - code = dd_interface.RunLAr(args.fcl, args.n, args.nskip) + code = dd_interface.RunLAr(fcl=args.fcl, n=args.n, nskip=args.nskip) diff --git a/submit_dd_jobs.py b/submit_dd_jobs.py index ae486f9..c10f19d 100644 --- a/submit_dd_jobs.py +++ b/submit_dd_jobs.py @@ -128,7 +128,7 @@ def create_project(dataset, namespace, query_limit=None, query_skip=None): print(njobs) count = 0 for nj in njobs: - cmd = 'fife_launch -c byhand.cfg ' \ + cmd = 'fife_launch -c ddconfig.cfg ' \ f'-Oglobal.load_limit={args.load_limit} ' \ f'-Oglobal.project={dd_proj_id} ' \ f'-Oglobal.nevents={args.nevents} ' \ @@ -136,7 +136,11 @@ def create_project(dataset, namespace, query_limit=None, query_skip=None): f'-Oglobal.output_dataset={args.output_dataset} ' \ f'-Oglobal.output_namespace={args.output_namespace} ' \ f'-Osubmit.N={nj} ' \ - f'-Oglobal.metacat_user={args.metacat_user} ' + f'-Oglobal.metacat_user={args.metacat_user} '\ + f'-Oglobal.appFamily={args.appFamily} '\ + f'-Oglobal.appName={args.appName} '\ + f'-Oglobal.appVersion={args.appVersion} '\ + f'-Oglobal.fcl={args.fcl} ' if args.blacklist: cs_blacklist = ','.join(args.blacklist) @@ -144,7 +148,7 @@ def create_project(dataset, namespace, query_limit=None, query_skip=None): if args.dry_run: cmd += '--dry_run ' - print(cmd) + print("submit command:",cmd) #cmd2 = ('fife_launch -c byhand.cfg ' # '-Oglobal.load_limit=%i ' # '-Oglobal.project=%s ' diff --git a/submit_test.sh b/submit_test.sh index 995b0fb..0bc924e 100755 --- a/submit_test.sh +++ b/submit_test.sh @@ -1,6 +1,6 @@ # submission tests source tarme.sh # make the tar file up-to-date -python submit_dd_jobs.py --dataset=schellma:run5141recentReco --namespace=pdsp_det_reco --query_limit=100 --fcl=eventdump.fcl --nevents=400 --load_limit=4 --appFamily=LArSoft --appVersion=${DUNESW_VERSION} +python submit_dd_jobs.py --dataset=schellma:run5141recentReco --namespace=pdsp_det_reco --query_limit=10 --fcl=test.fcl --nevents=400 --load_limit=4 --appFamily=LArSoft --appName=pdsp_det_reco --appVersion=${DUNESW_VERSION} #--dataset DATASET] [--namespace NAMESPACE] [--query_limit QUERY_LIMIT] # [--query_skip QUERY_SKIP] [--njobs NJOBS] [--load_limit LOAD_LIMIT] [--fcl FCL] [--nevents NEVENTS] diff --git a/tarme.sh b/tarme.sh index 2091aed..d09f667 100755 --- a/tarme.sh +++ b/tarme.sh @@ -1 +1 @@ -tar -cvf ../loginator.tar *.sh *.cfg *.py +tar -cvf ../loginator.tar *.sh *.cfg *.py *.fcl diff --git a/top_script.sh b/top_script.sh index a90ff1f..b5d9e50 100755 --- a/top_script.sh +++ b/top_script.sh @@ -83,6 +83,21 @@ while [[ $# -gt 0 ]]; do shift shift ;; + --appFamily) + APPFAMILY=$2 + shift + shift + ;; + --appName) + APPNAME=$2 + shift + shift + ;; + --appVersion) + APPVERSION=$2 + shift + shift + ;; --rse) output_rses+=($2) shift @@ -124,6 +139,22 @@ sleep $sleeptime export MYWORKERID=`ddisp worker id -n` echo "workerid: ${MYWORKERID}" +echo "I am in directory" ${PWD} + +ls -lrt + +echo "the input directory" + +ls -lrt $CONDOR_DIR_INPUT + +export FHICL_FILE_PATH=${CONDOR_DIR_INPUT}:${FHICL_FILE_PATH} + +echo "try to really mean it about the FCL since putting it in the path doesn't seem to do it for me" + +cp ${CONDOR_DIR_INPUT}/$FCL . + +echo "I will now run run_lar with fcl file " $FCL + python -m run_lar \ --namespace $NAMESPACE \ --fcl $FCL \ @@ -131,6 +162,9 @@ python -m run_lar \ --load_limit $LOADLIMIT \ --user $USER \ -n $N \ + --appFamily $APPFAMILY \ + --appName $APPNAME \ + --appVersion $APPVERSION \ #--nskip $nskip \ #> ${logname}.out 2>${logname}.err @@ -139,21 +173,27 @@ returncode=$? echo "Return code: " $returncode export SCRATCH_DIR=/pnfs/dune/scratch/users #setup ifdh -ifdh ls ${SCRATCH_DIR}/${USER}/ddtest +export OUTDIR=${SCRATCH_DIR}/${USER}/ddtest/${PROJECT} + +ifdh mkdir_p ${OUTDIR} +export IFDH_DEBUG=0 #if [ $? -ne 0 &&- z "$IFDH_OPTION"]; then # echo "Unable to read ${SCRATCH_DIR}/${USER}/ddtest make sure that you have created this directory and given it group write permission." # exit 74 #else # directory already exists, so let's copy -echo $PWD -ls $PWD/*.json > files.txt -ls $PWD/*.out >> files.txt -ls $PWD/*.err >> files.txt -echo "copy the following files to "${SCRATCH_DIR}/${USER}/ddtest +ls -lrt +env > env.txt +echo ${OUTDIR} +ls > files.txt + +echo "copy the following files to "${OUTDIR} cat files.txt -ifdh cp -D $IFDH_OPTION *.json ${SCRATCH_DIR}/${USER}/ddtest -ifdh cp -D $IFDH_OPTION *.out ${SCRATCH_DIR}/${USER}/ddtest -ifdh cp -D $IFDH_OPTION *.err ${SCRATCH_DIR}/${USER}/ddtest +ifdh cp -D $IFDH_OPTION *.json ${OUTDIR} +ifdh cp -D $IFDH_OPTION *.txt ${OUTDIR} +ifdh cp -D $IFDH_OPTION *.out ${OUTDIR} +ifdh cp -D $IFDH_OPTION *.err ${OUTDIR} +ifdh cp -D $IFDH_OPTION *.fcl ${OUTDIR} #fi ) echo "Site: $GLIDEIN_DUNESite" #>> ${logname}.out From ac063668312577f0f131aa8b3cbe96ef9a3f9c97 Mon Sep 17 00:00:00 2001 From: Heidi Schellman Date: Mon, 12 Dec 2022 15:19:52 -0800 Subject: [PATCH 027/166] add in samweb tester --- LArWrapper.py | 21 ++--- Loginator.py | 10 +-- analysis/test2.py | 196 ++++++++++++++++++++++++++++++++++++++++++++++ samtest.py | 51 ++++++++++++ submit_many.sh | 8 ++ 5 files changed, 271 insertions(+), 15 deletions(-) create mode 100644 analysis/test2.py create mode 100644 samtest.py create mode 100755 submit_many.sh diff --git a/LArWrapper.py b/LArWrapper.py index 808296d..b63aa35 100644 --- a/LArWrapper.py +++ b/LArWrapper.py @@ -20,8 +20,8 @@ class LArWrapper: def __init__(self,fcl=None,replicas=None,flist="",o="temp.root",n=None,nskip=0,appFamily=None, appName=None, - appVersion=None, deliveryMethod=None, workflowMethod=None, projectID=None, sam_web_uri=None,processID=None,processHASH=None, - formatString="runLar_%s_%%tc_%s_%s_%s.root", dataTier="sam-user", dataStream="test"): + appVersion=None, deliveryMethod=None, workflowMethod=None, projectID=None, sam_web_uri=None,processID=None,\ + processHASH=None,formatString="runLar_%s_%s_%%tc_%s_%s_%s.root", dataTier="sam-user", dataStream="test"): self.fcl = fcl self.flist = flist self.n = n @@ -46,8 +46,8 @@ def __init__(self,fcl=None,replicas=None,flist="",o="temp.root",n=None,nskip=0,a self.dataStream = dataStream if self.formatString == None: - formatString = "process_%s_%%tc_%s_%s_%s.root" - + formatString = "process_%s_%s_%%tc_%s_%s_%s.root" + def DoLAr(self,cluster=0,process=0): print ("check fcl",self.fcl,os.path.exists(self.fcl)) @@ -55,7 +55,7 @@ def DoLAr(self,cluster=0,process=0): stamp = datetime.datetime.now().strftime("%Y%m%d%H%M%S%Z") print (self.formatString) print (self.projectID, cluster, process, os.path.basename(self.fcl).replace(".fcl","")) - fname = self.formatString%(self.projectID, cluster, process, os.path.basename(self.fcl).replace(".fcl","")) + fname = self.formatString%(self.deliveryMethod,self.projectID, cluster, process, os.path.basename(self.fcl).replace(".fcl","")) self.oname = fname.replace(".root",".out").replace("%tc",stamp) self.ename = fname.replace(".root",".err").replace("%tc",stamp) ofile = open(self.oname,'w') @@ -65,11 +65,12 @@ def DoLAr(self,cluster=0,process=0): print ("cmd = ",cmd) proc = subprocess.run(cmd, shell=True, stdout=ofile,stderr=efile) elif self.deliveryMethod == "samweb": - lar_cmd = ("lar -c%s" % self.fcl) + (" -n%i"%self.n) + " -T temp.root" +\ - (" --sam-web-uri=%s"%self.sam_web_uri) + (" --sam-process-id=%s"%self.processID) + \ - (" --sam-application-family=%s"%self.appFamily) + (" --sam-application-version=%s"%self.appVersion) + lar_cmd = 'lar -c %s -n %i --sam-web-uri=%s --sam-process-id=%s --sam-application-family=%s \ + --sam-application-version=%s --sam-data-tier %s --sam-stream-name %s'%(self.fcl,\ + self.n,self.sam_web_uri,self.processID,self.appFamily,self.appVersion,self.dataTier,self.dataStream) print (lar_cmd) - proc = subprocess.run(lar_cmd, stdout=ofile) + proc = subprocess.run(lar_cmd,shell=True, stdout=ofile,stderr=efile) + else: # assume it's something like interactive cmd = 'lar -c %s -s %s -n %i --nskip %i -o %s'%(self.fcl, self.flist, self.n, self.nskip, fname) print ("cmd = ",cmd) @@ -100,7 +101,7 @@ def LArResults(self): unused_files.append(u["namespace"]+":"+u["name"]) logparse.addmetacatinfo() print ("files not used",unused_files) - elif deliveryMethod == "samweb": + elif self.deliveryMethod == "samweb": unused_files = logparse.findmissingfiles(self.files) logparse.addsaminfo() diff --git a/Loginator.py b/Loginator.py index 08c95c2..05d8fd0 100644 --- a/Loginator.py +++ b/Loginator.py @@ -47,9 +47,9 @@ def __init__(self,logname): "job_total_events":None, # processing attributes "project_id":0, - "delivery_method":None, #(stream/copy) + "delivery_method":None, #(samweb/dd/wfs) "workflow_method":None, - "access_method":None, #(samweb/dd) + "access_method":None, #(stream/copy) "timestamp_for_start":None, # "timestamp_for_end":None, # "application_family":None, # @@ -160,7 +160,7 @@ def readme(self): if DEBUG: print ("I am root") tmp = filepath.split("//") localobject["rse"] = tmp[1] - localobject["delivery_method"] = "xroot" + localobject["access_method"] = "xroot" for thing in self.info: localobject[thing] = self.info[thing] localobject["final_state"] = "Opened" @@ -196,7 +196,7 @@ def addsaminfo(self): if DEBUG: print ("f ",f) meta = samweb.getMetadata(f) self.outobject[f]["namespace"]="samweb" - self.outobject[f]["access_method"]="samweb" + self.outobject[f]["delivery_method"]="samweb" for item in ["data_tier","file_type","data_stream","group","file_size","file_format"]: self.outobject[f][item]=meta[item] for run in meta["runs"]: @@ -221,7 +221,7 @@ def addmetacatinfo(self,defaultNamespace=None): if meta == None: print ("no metadata for",f) continue - self.outobject[f]["access_method"]="metacat" + self.outobject[f]["delivery_method"]="dd" for item in ["data_tier","file_type","data_stream","run_type","event_count","file_format"]: if "core."+item in meta["metadata"].keys(): self.outobject[f][item]=meta["metadata"]["core."+item] diff --git a/analysis/test2.py b/analysis/test2.py new file mode 100644 index 0000000..7a847d6 --- /dev/null +++ b/analysis/test2.py @@ -0,0 +1,196 @@ +import ROOT +from PlotUtils import MnvH1D,MnvLatErrorBand,MnvVertErrorBand,MnvH2D +from ROOT import TFile, TH1D,gROOT,gStyle,TColor,TCanvas,TPad,TMatrixD,TH2D + +interesting = ["ppfx"] +#interesting = ["Muon Energy Rec."] +max =12 + +lownu = False +maxuniv = 1000 +gROOT.Reset(); +gStyle.SetOptStat(""); +gStyle.SetOptFit(111) +gStyle.SetLineWidth(2) +#gROOT.SetStyle("Plain"); +gStyle.SetLabelSize(0.04,"x"); +gStyle.SetLabelSize(0.04,"y"); +#gStyle.SetTitleFont(90); + +#gStyle.SetPalette(69) +gStyle.SetPalette(69) + +gStyle.SetPadColor(0); +gStyle.SetPadBorderMode(0); + +gStyle.SetCanvasColor(0); +gStyle.SetCanvasBorderMode(0); + +gStyle.SetFrameBorderMode(0); +gStyle.SetPadTickY(1) +gStyle.SetPadTickX(1) + + +gStyle.SetLegendFillColor(0); + + +t = TCanvas() +pad = t.GetPad(0) +pad.SetBottomMargin(0.15) +pad.SetLeftMargin(0.15) + +tone = 255. +#red = array('d',[ 180./tone, 190./tone, 209./tone, 223./tone, 204./tone, 228./tone, 205./tone, 152./tone, 91./tone]) +#green = array('d',[ 93./tone, 125./tone, 147./tone, 172./tone, 181./tone, 224./tone, 233./tone, 198./tone, 158./tone]) +#blue = array('d',[ 236./tone, 218./tone, 160./tone, 133./tone, 114./tone, 132./tone, 162./tone, 220./tone, 218./tone]) +#stops = array('d',[ 0.00, 0.05,0.1,0.2,0.3,0.4,0.7,0.8,1.0]) +#TColor.CreateGradientColorTable(9, stops, red, green, blue, 255, 0.5); + + +POTRAT = 8.923e19/1.59e20 +header = "/minerva/data/users/schellmh/" +tail = "/cross_sections/eroica/cross_sections_muonpz_muonpt_lowangleqelike_minerva.root"; +m111="bigrun_more_v35_mec1_phil1_rpa1_2017-08-29_2002_qelikelo" + +filename = header+m111+tail +file = TFile(filename,"readonly") + +file.Print() + +interesting = ["ppfx"] + +mc = {} +data = {} + +#data[0] = MnvH2D() +data[0] = file.Get("cross_sections_muonpt_muonpz_data") +#data.Print("ALL") +bigcovmx = TMatrixD(96,96) +bigcovmx = data[0].GetTotalErrorMatrix(True,False) + +mc[0] = MnvH2D(file.Get("cross_sections_muonpt_muonpz_mc")) + +mcfile = file +datafile = file + +vertnames = [] +universes = {} +universenames = [] +datahist={} +mchist = {} +covmx = {} +dataname = "cross_sections_muonpt_muonpz_data" +mcname = "cross_sections_muonpt_muonpz_mc" + +for bin in range(0,1): + entry = "%s"%(bin+1) + + mc[bin] = mcfile.Get(mcname) + mc[bin].Scale(POTRAT) + mc[bin].GetXaxis().SetRange(1,max) + data[bin] = datafile.Get(dataname) + data[bin].GetXaxis().SetRange(1,max) + covmx[bin] = data[bin].GetTotalErrorMatrix(True,False) + datahist[bin] = data[bin].GetCVHistoWithStatError().Clone() + + mc[bin].Print() + mchist[bin] = mc[bin].GetCVHistoWithStatError().Clone() + datahist[bin].Divide(mchist[bin]) + #data[bin].Print() + universes[bin]={} + names = mc[bin].GetVertErrorBandNames() + + for name in names: + print name + if name not in interesting: + continue + nhists = mc[bin].GetVertErrorBand(name).GetNHists() + for hists in range(0,nhists): + if hists > maxuniv: continue + rename = "%s_%d"%(name,hists) + #print "vert",rename + universes[bin][rename]=mc[bin].GetVertErrorBand(name).GetHist(hists) + universes[bin][rename].GetXaxis().SetRange(1,max) + universes[bin][rename].Divide(mchist[bin]) + universenames.append(rename) + names = mc[bin].GetLatErrorBandNames() + for name in names: + print name + if name not in interesting: + continue + + nhists = mc[bin].GetLatErrorBand(name).GetNHists() + for hists in range(0,nhists): + if hists > maxuniv: continue + rename = "%s_%d"%(name,hists) + #print "lat",rename + universes[bin][rename]=mc[bin].GetLatErrorBand(name).GetHist(hists) + universenames.append(rename) + universes[bin][rename].GetXaxis().SetRange(1,max) + universes[bin][rename].Divide(mchist[bin]) +# have used it to normalize can now make it 1 as well + mchist[bin].Divide(mchist[bin]) +#vertnames = mc[0].GetVertErrorBandNames() + + +#print data +datahist[0].SetTitle(mc[0].GetTitle()+";Beam Energy;data/central value mc") +datahist[0].SetMinimum(0.5) +datahist[0].SetMaximum(2.0) +datahist[0].Draw("PE") + +covmx[0].Print() + +#mchist[0].SetTitle(mc[0].GetTitle()+";Beam Energy;data/mc") +mchist[0].Draw("SAME HIST") +mchist[0].Print("ALL") + +universenames = universes[0].keys() +print universenames +i = 1 +for name in universes[0].keys(): + + print "try ",name + i = i+1 + if "ppfx" in name: + universes[0][name].SetMarkerColor(2) + if "Muon" in name: + universes[0][name].SetMarkerColor(3) + universes[0][name].SetMarkerStyle(22) + universes[0][name].Draw("SAME PEX0 L HIST") +# print universes[0][name].Print() + +datahist[0].Draw("SAME") +mchist[0].Draw("SAME HIST") + +save = "save" + +if save == "save": + outname = "flux_study_"+interesting[0] + if lownu: + outname = outname+"_lowNu" + outname = outname+".pdf" + outname = outname.replace(" ","_") + pad.Print(outname) + pad.Update() + pad.Draw() + ROOT.gROOT.SaveContext() + + + + + + + +#--------------- end of the body of the program ------- + +## wait for input to keep the GUI (which lives on a ROOT event dispatcher) alive +if __name__ == '__main__': + rep = '' + while not rep in [ 'q', 'Q' ]: + rep = raw_input( 'enter "q" to quit: ' ) + if 1 < len(rep): + rep = rep[0] + + + diff --git a/samtest.py b/samtest.py new file mode 100644 index 0000000..7768025 --- /dev/null +++ b/samtest.py @@ -0,0 +1,51 @@ +import os,sys +import samweb_client +import LArWrapper +samweb = samweb_client.SAMWebClient(experiment='dune') + +TEST = True +def testProject(defname="schellma-run5141-PDSPProd4", appFamily="samtest", appName="test", appVersion=None, fcl="./eventdump.fcl",method="samweb"): + appVersion = os.environ["DUNESW_VERSION"] + projectname = samweb.makeProjectName(defname) + projectinfo = samweb.startProject(projectname, defname) + print (projectinfo) + projecturl = projectinfo["projectURL"] + print ("Project name is %s" % projectinfo["project"]) + print ("Project URL is %s" % projecturl) + info = samweb.projectSummary(projecturl) + print (info) + projectID = info["project_id"] + deliveryLocation = None # set this to a specific hostname if you want - default is the local hostname + + cpid = samweb.startProcess(projecturl, appFamily, appName, appVersion, deliveryLocation) + print ("Consumer process id %s" % cpid) + processurl = samweb.makeProcessUrl(projecturl, cpid) + try: + larW = LArWrapper.LArWrapper(fcl=fcl,appFamily=appFamily,appName=appName,\ + appVersion=appVersion,deliveryMethod="samweb",workflowMethod="interactive",\ + sam_web_uri=projecturl,processID=cpid,projectID=projectID,\ + dataTier="out1:sam-user",dataStream="out1:test",n=200) + retcode = larW.DoLAr(0,0) + list = larW.LArResults() + print ("return code",retcode) + except: + print ("LArWrapper failed, clean up") + +# while True: +# try: +# newfile = samweb.getNextFile(processurl)['url'] +# print "Got file %s" % newfile +# except samweb_client.NoMoreFiles: +# print "No more files available" +# break +# +# samweb.releaseFile(processurl, newfile) +# print "Released file %s" % newfile + + samweb.stopProject(projecturl) + print (samweb.projectSummaryText(projecturl)) + print ("Project ended") + +if __name__ == '__main__': + + testProject() diff --git a/submit_many.sh b/submit_many.sh new file mode 100755 index 0000000..1d4d7a1 --- /dev/null +++ b/submit_many.sh @@ -0,0 +1,8 @@ +# submission tests +source tarme.sh # make the tar file up-to-date +python submit_dd_jobs.py --dataset=schellma:run5141recentReco --namespace=pdsp_det_reco --query_limit=30 --fcl=test.fcl --njobs=50 --nevents=20 --load_limit=2 --appFamily=LArSoft --appName=test --appVersion=${DUNESW_VERSION} + +#--dataset DATASET] [--namespace NAMESPACE] [--query_limit QUERY_LIMIT] +# [--query_skip QUERY_SKIP] [--njobs NJOBS] [--load_limit LOAD_LIMIT] [--fcl FCL] [--nevents NEVENTS] +# [--output_str OUTPUT_STR] [--output_dataset OUTPUT_DATASET] [--output_namespace OUTPUT_NAMESPACE] +# [--metacat_user METACAT_USER] [--blacklist BLACKLIST [BLACKLIST ...]] [--project PROJECT] [--dry_run] From a32064b2c5852972344f6bd70c753e01fee5c92f Mon Sep 17 00:00:00 2001 From: Heidi Schellman Date: Mon, 12 Dec 2022 18:42:39 -0800 Subject: [PATCH 028/166] get final sam stuff in --- LArWrapper.py | 18 ++++++++++++------ Loginator.py | 13 +++++++------ samtest.py | 2 +- 3 files changed, 20 insertions(+), 13 deletions(-) diff --git a/LArWrapper.py b/LArWrapper.py index b63aa35..8313849 100644 --- a/LArWrapper.py +++ b/LArWrapper.py @@ -60,6 +60,7 @@ def DoLAr(self,cluster=0,process=0): self.ename = fname.replace(".root",".err").replace("%tc",stamp) ofile = open(self.oname,'w') efile = open(self.ename,'w') + if self.deliveryMethod == "dd": cmd = 'lar -c %s -s %s -n %i --nskip %i -o %s --sam-data-tier %s --sam-stream-name %s'%(self.fcl, self.flist, self.n, self.nskip, self.o, self.dataTier, self.dataStream) print ("cmd = ",cmd) @@ -86,12 +87,12 @@ def LArResults(self): logparse.readme() # get info from the logfile - info = {"application_family":self.appFamily,"application_name":self.appName, "application_version":self.appVersion,"delivery_method":self.deliveryMethod,"workflow_method":self.workflowMethod} - + info = {"application_family":self.appFamily,"application_name":self.appName, "application_version":self.appVersion,"delivery_method":self.deliveryMethod,"workflow_method":self.workflowMethod,"project_id":self.projectID} + print ("delivery",self.deliveryMethod) if self.deliveryMethod == "dd": info["dd_worker_id"]=self.processHASH - info["project_id"]=self.projectID + if self.replicas != None: unused_replicas = logparse.addreplicainfo(self.replicas) else: @@ -102,9 +103,13 @@ def LArResults(self): logparse.addmetacatinfo() print ("files not used",unused_files) elif self.deliveryMethod == "samweb": - unused_files = logparse.findmissingfiles(self.files) + #unused_files = logparse.findmissingfiles(self.files) + info["process_id"]=self.processID logparse.addsaminfo() + else: + print ("unknown delivery mechanism") + logparse.addinfo(info) logparse.addsysinfo() @@ -121,6 +126,7 @@ def LArResults(self): parser.add_argument('--processID',type=int, default=0, help='processID generated by samweb') parser.add_argument('--sam_web_uri',type=str, help='samweb url for the project') parser.add_argument('--appFamily', type=str, help='samweb needs this') + parser.add_argument('--appName', type=str, help='samweb needs this') parser.add_argument('--appVersion', type=str, help='samweb needs this') parser.add_argument('--dataTier', type=str, help='data tier for output file if only one') parser.add_argument('--dataStream', type=str, help='data stream for output file if only one') @@ -133,14 +139,14 @@ def LArResults(self): parser.add_argument('--wait_limit', type=int, default=5) parser.add_argument('-n', type=int, default=-1, help='number of events total to process') parser.add_argument('--nskip', type=int, default=0, help='number of events to skip before starting') - parser.add_argument('--formatString', type = str, default='runLar_%s_%%tc_%s_%s_%s.root',help='format string used by LarWrapper for logs') + parser.add_argument('--formatString', type = str, default='runLar_%s_%s_%%tc_%s_%s_%s.root',help='format string used by LarWrapper for logs') args = parser.parse_args() print (args) if args.processHASH == None and "MYWORKERID" in os.environ: args.processHASH = os.environ("MYWORKERID") - lar = LArWrapper(fcl=args.c, n=args.n, nskip = args.nskip, appFamily=args.appFamily, + lar = LArWrapper(fcl=args.c, n=args.n, nskip = args.nskip, appFamily=args.appFamily,appName=args.appName, appVersion=os.getenv("DUNESW_VERSION"), deliveryMethod=args.delivery_method, workflowMethod=args.workflow_method, processID = args.processID, processHASH = args.processHASH, projectID=args.projectID, sam_web_uri = args.sam_web_uri, formatString=args.formatString) returncode = lar.DoLAr(0, args.processID) diff --git a/Loginator.py b/Loginator.py index 05d8fd0..01620b5 100644 --- a/Loginator.py +++ b/Loginator.py @@ -21,7 +21,7 @@ #from dateutil import parser -DEBUG=False +DEBUG=True class Loginator: @@ -197,7 +197,7 @@ def addsaminfo(self): meta = samweb.getMetadata(f) self.outobject[f]["namespace"]="samweb" self.outobject[f]["delivery_method"]="samweb" - for item in ["data_tier","file_type","data_stream","group","file_size","file_format"]: + for item in ["event_count","data_tier","file_type","data_stream","file_size","file_format"]: self.outobject[f][item]=meta[item] for run in meta["runs"]: self.outobject[f]["run_type"] = run[2] @@ -254,10 +254,10 @@ def addreplicainfo(self,replicas,test=False): notfound.append(r) return notfound - + def findmissingfiles(self,files): notfound = [] - + for r in files: found = False if ":" in r: @@ -267,7 +267,7 @@ def findmissingfiles(self,files): else: name = r namespace = "samweb" - + for f in self.outobject: if f == name: if DEBUG: print ("file match",r) @@ -319,7 +319,8 @@ def test(): parse.addsysinfo() # parse.addsaminfo() parse.addreplicainfo([]) - parse.addmetacatinfo("dc4-hd-protodune") # argument is there for testing when you don't have replica list. + parse.addsaminfo() + #parse.addmetacatinfo("dc4-hd-protodune") # argument is there for testing when you don't have replica list. parse.writeme() diff --git a/samtest.py b/samtest.py index 7768025..8a1e01d 100644 --- a/samtest.py +++ b/samtest.py @@ -24,7 +24,7 @@ def testProject(defname="schellma-run5141-PDSPProd4", appFamily="samtest", appNa larW = LArWrapper.LArWrapper(fcl=fcl,appFamily=appFamily,appName=appName,\ appVersion=appVersion,deliveryMethod="samweb",workflowMethod="interactive",\ sam_web_uri=projecturl,processID=cpid,projectID=projectID,\ - dataTier="out1:sam-user",dataStream="out1:test",n=200) + dataTier="out1:sam-user",dataStream="out1:test",n=120) retcode = larW.DoLAr(0,0) list = larW.LArResults() print ("return code",retcode) From 8679bb05cb84a79367997d78fa7e1fc496a23eb0 Mon Sep 17 00:00:00 2001 From: Heidi Schellman Date: Tue, 13 Dec 2022 19:27:51 -0800 Subject: [PATCH 029/166] make ddtest --- LArWrapper.py | 4 +-- Loginator.py | 2 ++ ddtest.py | 62 ++++++++++++++++++++++++++++++++++++++++++++ run_lar.py | 22 +++++++++++----- samtest.py | 43 +++++++++++++++++++----------- samtest2.py | 66 +++++++++++++++++++++++++++++++++++++++++++++++ setup_hms.sh | 4 +-- submit_dd_jobs.py | 24 ++++++++++------- submit_test.sh | 2 +- 9 files changed, 193 insertions(+), 36 deletions(-) create mode 100644 ddtest.py create mode 100644 samtest2.py diff --git a/LArWrapper.py b/LArWrapper.py index 8313849..05ad723 100644 --- a/LArWrapper.py +++ b/LArWrapper.py @@ -125,8 +125,8 @@ def LArResults(self): parser.add_argument('--processHASH',type=str, default="", help='string code generated by dd for worker') parser.add_argument('--processID',type=int, default=0, help='processID generated by samweb') parser.add_argument('--sam_web_uri',type=str, help='samweb url for the project') - parser.add_argument('--appFamily', type=str, help='samweb needs this') - parser.add_argument('--appName', type=str, help='samweb needs this') + parser.add_argument('--appFamily', default='test',type=str, help='samweb needs this') + parser.add_argument('--appName', default='test', type=str, help='samweb needs this') parser.add_argument('--appVersion', type=str, help='samweb needs this') parser.add_argument('--dataTier', type=str, help='data tier for output file if only one') parser.add_argument('--dataStream', type=str, help='data stream for output file if only one') diff --git a/Loginator.py b/Loginator.py index 01620b5..c99cf58 100644 --- a/Loginator.py +++ b/Loginator.py @@ -199,6 +199,8 @@ def addsaminfo(self): self.outobject[f]["delivery_method"]="samweb" for item in ["event_count","data_tier","file_type","data_stream","file_size","file_format"]: self.outobject[f][item]=meta[item] + if "DUNE.campaign" in meta: + self.outobject[f]["file_campaign"] = meta["DUNE.campaign"] for run in meta["runs"]: self.outobject[f]["run_type"] = run[2] break diff --git a/ddtest.py b/ddtest.py new file mode 100644 index 0000000..b4ab7e7 --- /dev/null +++ b/ddtest.py @@ -0,0 +1,62 @@ +import submit_dd_jobs +from run_lar import DDInterface +import sys +import os +from argparse import ArgumentParser as ap + +if __name__ == '__main__': + + parser = ap() + # dd args + parser.add_argument('--dataset', default='schellma:run5141recentReco',type=str) + parser.add_argument('--load_limit', default=4, type=int) + parser.add_argument('--query_limit', default=10) + parser.add_argument('--query_skip', default=0) + parser.add_argument('--projectID', default=None) + parser.add_argument('--timeout', type=int, default=120) + parser.add_argument('--wait_time', type=int, default=120) + parser.add_argument('--wait_limit', type=int, default=5) + # args shared with lar + #parser.add_argument('--defName', default="schellma-run5141-PDSPProd4", type=str, help='samweb dataset definition name') + parser.add_argument('--appFamily',default='test', type=str, help=' application family') + parser.add_argument('--appName', default='test',type=str, help=' application name') + parser.add_argument('--appVersion', default=os.getenv('DUNESW_VERSION'), type=str, help='application version') + parser.add_argument('--dataTier', default='out1:sam-user',type=str, help='data tier for output file') + parser.add_argument('--dataStream', default='out1:test',type=str, help='data stream for output file') + parser.add_argument('-o', default="temp.root", type=str, help='output event stream file') + parser.add_argument('-c', required=True, type=str, help='name of fcl file') + parser.add_argument('--user', default = os.getenv("USER"),type=str, help='user name') + parser.add_argument('-n', type=int, default=10, help='number of events total to process') + parser.add_argument('--nskip', type=int, default=0, help='number of events to skip before starting') + args = parser.parse_args() + + if (not args.projectID) and args.dataset: + dd_proj_id = submit_dd_jobs.create_project(dataset=args.dataset, namespace=None, + query_limit=args.query_limit, + query_skip=args.query_skip) + + + elif args.project and not (args.dataset and args.namespace): + dd_proj_id = int(args.project) + else: + sys.stderr.write("Need to provide project OR dataset & namespace\n") + sys.exit(1) + + dd_interface = DDInterface( lar_limit=args.load_limit, + timeout=args.timeout, + wait_time=args.wait_time, + wait_limit=args.wait_limit, + appFamily=args.appFamily, + appName=args.appName, + appVersion=args.appVersion, + workflowMethod="interactive") + dd_interface.Login(args.user) + dd_interface.SetWorkerID() + print(os.environ['MYWORKERID']) + dd_interface.AttachProject(dd_proj_id) + dd_interface.dump_project(dd_proj_id) + dd_interface.LoadFiles() + dd_interface.BuildFileListString() + dd_interface.RunLAr(args.c, args.n, args.nskip) + dd_interface.dump_project(dd_proj_id) + ##Loginator stuff here? diff --git a/run_lar.py b/run_lar.py index 53426bb..d7ae816 100644 --- a/run_lar.py +++ b/run_lar.py @@ -64,14 +64,19 @@ def inner1(*args, **kwargs): class DDInterface: - def __init__(self, namespace, lar_limit, timeout=120, wait_time=60, wait_limit=5, appFamily=None, appName=None, appVersion=None): + def __init__(self, namespace=None, lar_limit=0, timeout=120, wait_time=60, wait_limit=5,\ + appFamily=None, appName=None, appVersion=None, workflowMethod="dd"): self.dataset = "" #dataset self.limit = 1#limit self.namespace = namespace #query_args = (self.dataset, self.namespace, self.limit) #self.query = '''files from %s where namespace="%s" limit %i'''%query_args - query_args = (self.dataset, self.limit) - self.query = '''files from %s limit %i'''%query_args + if namespace == None: + query_args = (self.dataset, self.limit) + self.query = '''files from %s limit %i'''%query_args + else: + query_args = (self.dataset, self.namespace, self.limit) + self.query = '''files from %s where namespace="%s" limit %i'''%query_args # this is not a good idea print ("the query is:",self.query) self.worker_timeout = 3600*5 self.lar_limit = lar_limit @@ -94,10 +99,11 @@ def __init__(self, namespace, lar_limit, timeout=120, wait_time=60, wait_limit=5 self.appFamily = appFamily self.appName = appName self.appVersion = appVersion + self.deliveryMethod="dd" + self.workflowMethod=workflowMethod self.retry_time = 600 - - if TEST: print ("DDInterface args:",args) + #try: # from data_dispatcher.api import DataDispatcherClient @@ -342,7 +348,7 @@ def RunLAr(self, fcl=None, n=-1, nskip=0): print ("RunLAr called with ",fcl,n,nskip) unused_files = [] if TEST: # new interface that does not talk to dd - lar = LArWrapper.LArWrapper(fcl=fcl, o="temp.root", replicas=self.input_replicas, flist=self.lar_file_list, n=n, nskip=nskip, appFamily=self.appFamily, appName=self.appName, appVersion=self.appVersion, deliveryMethod="dd", workflowMethod="dd", projectID=self.proj_id, formatString="runLar_%s_%%tc_%s_%s_%s.root") + lar = LArWrapper.LArWrapper(fcl=fcl, o="temp.root", replicas=self.input_replicas, flist=self.lar_file_list, n=n, nskip=nskip, appFamily=self.appFamily, appName=self.appName, appVersion=self.appVersion, deliveryMethod="dd", workflowMethod=self.workflowMethod, projectID=self.proj_id, formatString="runLar_%s_%s_%%tc_%s_%s_%s.root") returncode = lar.DoLAr(cluster, process) unused_files = lar.LArResults() else: # old interace that has more detail exposed. @@ -401,6 +407,7 @@ def RunLAr(self, fcl=None, n=-1, nskip=0): parser.add_argument('--appFamily', type=str) parser.add_argument('--appName', type=str) parser.add_argument('--appVersion', type=str) + parser.add_argument('--workflowMethod', default='dd', type=str, help="set this to interactive if interactive") parser.add_argument('--fcl', type=str) parser.add_argument('--load_limit', type=int) parser.add_argument('--user', type=str) @@ -419,7 +426,8 @@ def RunLAr(self, fcl=None, n=-1, nskip=0): wait_limit=args.wait_limit, appFamily=args.appFamily, appName=args.appName, - appVersion=args.appVersion + appVersion=args.appVersion, + workflowMethod=args.workflowMethod ) dd_interface.Login(args.user) dd_interface.AttachProject(args.project) diff --git a/samtest.py b/samtest.py index 8a1e01d..66ee248 100644 --- a/samtest.py +++ b/samtest.py @@ -1,20 +1,23 @@ import os,sys import samweb_client import LArWrapper +from argparse import ArgumentParser as ap + samweb = samweb_client.SAMWebClient(experiment='dune') TEST = True -def testProject(defname="schellma-run5141-PDSPProd4", appFamily="samtest", appName="test", appVersion=None, fcl="./eventdump.fcl",method="samweb"): +def testProject(defname="schellma-run5141-PDSPProd4", appFamily="samtest", appName="test", appVersion=None, fcl="eventdump.fcl",method="samweb",n=-1,nskip=0,dataTier="out1:sam-user",dataStream="out1:test"): appVersion = os.environ["DUNESW_VERSION"] projectname = samweb.makeProjectName(defname) projectinfo = samweb.startProject(projectname, defname) - print (projectinfo) + #print (projectinfo) projecturl = projectinfo["projectURL"] print ("Project name is %s" % projectinfo["project"]) print ("Project URL is %s" % projecturl) info = samweb.projectSummary(projecturl) print (info) projectID = info["project_id"] + print ("Project ID is %s" % projectID) deliveryLocation = None # set this to a specific hostname if you want - default is the local hostname cpid = samweb.startProcess(projecturl, appFamily, appName, appVersion, deliveryLocation) @@ -24,28 +27,38 @@ def testProject(defname="schellma-run5141-PDSPProd4", appFamily="samtest", appNa larW = LArWrapper.LArWrapper(fcl=fcl,appFamily=appFamily,appName=appName,\ appVersion=appVersion,deliveryMethod="samweb",workflowMethod="interactive",\ sam_web_uri=projecturl,processID=cpid,projectID=projectID,\ - dataTier="out1:sam-user",dataStream="out1:test",n=120) + dataTier=dataTier,dataStream=dataStream,n=n,nskip=nskip) + print ("before DoLAr") retcode = larW.DoLAr(0,0) list = larW.LArResults() print ("return code",retcode) except: print ("LArWrapper failed, clean up") - -# while True: -# try: -# newfile = samweb.getNextFile(processurl)['url'] -# print "Got file %s" % newfile -# except samweb_client.NoMoreFiles: -# print "No more files available" -# break -# -# samweb.releaseFile(processurl, newfile) -# print "Released file %s" % newfile + retcode = 99 samweb.stopProject(projecturl) print (samweb.projectSummaryText(projecturl)) print ("Project ended") + print ("LArWrapper returned",retcode) + return retcode if __name__ == '__main__': + parser = ap() + parser.add_argument('--defName', default="schellma-run5141-PDSPProd4", type=str, help='samweb dataset definition name') + parser.add_argument('--appFamily',default='test', type=str, help='samweb needs this') + parser.add_argument('--appName', default='test',type=str, help='samweb needs this') + parser.add_argument('--appVersion', default=os.getenv('DUNESW_VERSION'), type=str, help='samweb needs this') + parser.add_argument('--dataTier', default='out1:sam-user',type=str, help='data tier for output file') + parser.add_argument('--dataStream', default='out1:test',type=str, help='data stream for output file') + parser.add_argument('-o', default="temp.root", type=str, help='output event stream file') + parser.add_argument('-c', required=True, type=str, help='name of fcl file') + parser.add_argument('--user', default = os.getenv("USER"),type=str, help='user name') + parser.add_argument('-n', type=int, default=150, help='number of events total to process') + parser.add_argument('--nskip', type=int, default=0, help='number of events to skip before starting') + # if restarting need these + # parser.add_argument('--projectID', type=int, default=0, help='integer that identifies the project, samweb/dd/wfs') + # parser.add_argument('--sam_web_uri',type=str, help='samweb url for the project') + + args = parser.parse_args() - testProject() + testProject(defname=args.defName, appFamily=args.appFamily, appName=args.appName, appVersion=args.appVersion, fcl=args.c,method="samweb",n=args.n,nskip=args.nskip,dataTier=args.dataTier,dataStream=args.dataStream) diff --git a/samtest2.py b/samtest2.py new file mode 100644 index 0000000..f154614 --- /dev/null +++ b/samtest2.py @@ -0,0 +1,66 @@ +import os,sys +import samweb_client +import LArWrapper +from argparse import ArgumentParser as ap + +samweb = samweb_client.SAMWebClient(experiment='dune') + +TEST = True +def testProject(defname="schellma-run5141-PDSPProd4", appFamily="samtest", appName="test", appVersion=None, fcl="eventdump.fcl",method="samweb",n=-1,nskip=0,dataTier="out1:sam-user",dataStream="out1:test"): + appVersion = os.environ["DUNESW_VERSION"] + projectname = samweb.makeProjectName(defname) + projectinfo = samweb.startProject(projectname, defname) + print (projectinfo) + projecturl = projectinfo["projectURL"] + print ("Project name is %s" % projectinfo["project"]) + print ("Project URL is %s" % projecturl) + info = samweb.projectSummary(projecturl) + print (info) + projectID = info["project_id"] + deliveryLocation = None # set this to a specific hostname if you want - default is the local hostname + + cpid = samweb.startProcess(projecturl, appFamily, appName, appVersion, deliveryLocation) + print ("Consumer process id %s" % cpid) + processurl = samweb.makeProcessUrl(projecturl, cpid) + try: + larW = LArWrapper.LArWrapper(fcl=fcl,appFamily=appFamily,appName=appName,\ + appVersion=appVersion,deliveryMethod="samweb",workflowMethod="interactive",\ + sam_web_uri=projecturl,processID=cpid,projectID=projectID,\ + dataTier=dataTier,dataStream=dataStream,n=n,nskip=nskip) + print ("before DoLAr") + retcode = larW.DoLAr(0,0) + list = larW.LArResults() + print ("return code",retcode) + except: + print ("LArWrapper failed, clean up") + retcode = 99 + + samweb.stopProject(projecturl) + print (samweb.projectSummaryText(projecturl)) + print ("Project ended") + print ("LArWrapper returned",retcode) + return retcode + +if __name__ == '__main__': + parser = ap() + parser.add_argument('--appFamily', type=str, help='samweb needs this') + parser.add_argument('--appName', type=str, help='samweb needs this') + parser.add_argument('--appVersion', type=str, help='samweb needs this') + parser.add_argument('--dataTier', type=str, help='data tier for output file') + parser.add_argument('--dataStream', type=str, help='data stream for output file') + parser.add_argument('-o', default="temp.root", type=str, help='output event stream file') + parser.add_argument('-c', required=True, type=str, help='name of fcl file') + parser.add_argument('--user', default = os.getenv("USER"),type=str, help='user name') + parser.add_argument('--projectID', type=int, default=0, help='integer that identifies the project, samweb/dd/wfs') + parser.add_argument('-n', type=int, default=-1, help='number of events total to process') + parser.add_argument('--nskip', type=int, default=0, help='number of events to skip before starting') + parser.add_argument('--processID',type=int, default=0, help='processID generated by samweb') + parser.add_argument('--sam_web_uri',type=str, help='samweb url for the project') + + args = parser.parse_args() + + print (args) + + + + testProject() diff --git a/setup_hms.sh b/setup_hms.sh index bc6a6be..7d678dc 100755 --- a/setup_hms.sh +++ b/setup_hms.sh @@ -1,4 +1,4 @@ -setup dunesw v09_54_00d00 -q e20:prof +#setup dunesw v09_54_00d00 -q e20:prof export DATA_DISPATCHER_URL=https://metacat.fnal.gov:9443/dune/dd/data export DATA_DISPATCHER_AUTH_URL=https://metacat.fnal.gov:8143/auth/dune export METACAT_SERVER_URL=https://metacat.fnal.gov:9443/dune_meta_demo/app @@ -8,7 +8,7 @@ export PATH=/nashome/c/calcuttj/.local/bin/:$PATH #export PYTHONPATH=/dune/app/users/calcuttj/metacat3/metacat:$PYTHONPATH export PYTHONPATH=/dune/app/users/calcuttj/dd_metacat_pip/venv/lib/python3.9/site-packages/:$PYTHONPATH #export PYTHONPATH=/dune/app/users/calcuttj/dd_metacat_pip/venv/lib/python3.9/site-packages/:$PYTHONPATH - + #source /dune/app/users/calcuttj/metacat3/metacat_venv/bin/activate source /dune/app/users/calcuttj/dd_metacat_pip/venv/bin/activate diff --git a/submit_dd_jobs.py b/submit_dd_jobs.py index c10f19d..b731345 100644 --- a/submit_dd_jobs.py +++ b/submit_dd_jobs.py @@ -6,7 +6,7 @@ from argparse import ArgumentParser as ap import subprocess -def create_project(dataset, namespace, query_limit=None, query_skip=None): +def create_project(dataset=None, namespace = None, query_limit=None, query_skip=None): mc_client = MetaCatClient('https://metacat.fnal.gov:9443/dune_meta_demo/app') dd_client = DataDispatcherClient( server_url='https://metacat.fnal.gov:9443/dune/dd/data', @@ -14,8 +14,11 @@ def create_project(dataset, namespace, query_limit=None, query_skip=None): dd_client.login_x509(os.environ['USER'], os.environ['X509_USER_PROXY']) - #query = 'files from %s where namespace="%s" ordered'%(dataset, namespace) - query = 'files from %s ordered'%(dataset) + if namespace != None: + query = 'files from %s where namespace="%s" ordered'%(dataset, namespace) + else: + query = 'files from %s ordered'%(dataset) + if query_skip: query += ' skip %s'%query_skip if query_limit: query += ' limit %s'%query_limit print("Start Project for :",query) @@ -61,7 +64,7 @@ def create_project(dataset, namespace, query_limit=None, query_skip=None): parser.add_argument('--appFamily', type=str) parser.add_argument('--appVersion', type=str) parser.add_argument('--appName', type=str) - + args = parser.parse_args() if args.appName == None: @@ -78,7 +81,7 @@ def create_project(dataset, namespace, query_limit=None, query_skip=None): appFamily = "LArSoft" else: appFamily = args.appFamily - + mc_client = MetaCatClient('https://metacat.fnal.gov:9443/dune_meta_demo/app') dd_client = DataDispatcherClient( server_url='https://metacat.fnal.gov:9443/dune/dd/data', @@ -88,9 +91,12 @@ def create_project(dataset, namespace, query_limit=None, query_skip=None): print(args.blacklist) - if (not args.project) and args.dataset and args.namespace: + if (not args.project) and args.dataset: ##build up query - query = 'files from %s where namespace="%s" ordered'%(args.dataset, args.namespace) + if args.namespace == None: + query = 'files from %s ordered'%(args.dataset) + else: + query = 'files from %s where namespace="%s" ordered'%(args.dataset, args.namespace) if args.query_skip: query += ' skip %s'%args.query_skip if args.query_limit: query += ' limit %s'%args.query_limit print(query) @@ -114,10 +120,10 @@ def create_project(dataset, namespace, query_limit=None, query_skip=None): print('Only making project. Exiting now') exit() - elif args.project and not (args.dataset and args.namespace): + elif args.project and not (args.dataset): dd_proj_id = args.project else: - sys.stderr.write("Need to provide project OR dataset & namespace\n") + sys.stderr.write("Need to provide project OR dataset & optional namespace\n") sys.exit(1) if args.njobs > 10000: diff --git a/submit_test.sh b/submit_test.sh index 0bc924e..6692fe6 100755 --- a/submit_test.sh +++ b/submit_test.sh @@ -1,6 +1,6 @@ # submission tests source tarme.sh # make the tar file up-to-date -python submit_dd_jobs.py --dataset=schellma:run5141recentReco --namespace=pdsp_det_reco --query_limit=10 --fcl=test.fcl --nevents=400 --load_limit=4 --appFamily=LArSoft --appName=pdsp_det_reco --appVersion=${DUNESW_VERSION} +python submit_dd_jobs.py --dataset=schellma:run5141recentReco --query_limit=10 --fcl=test.fcl --nevents=400 --load_limit=4 --appFamily=LArSoft --appName=pdsp_det_reco --appVersion=${DUNESW_VERSION} #--dataset DATASET] [--namespace NAMESPACE] [--query_limit QUERY_LIMIT] # [--query_skip QUERY_SKIP] [--njobs NJOBS] [--load_limit LOAD_LIMIT] [--fcl FCL] [--nevents NEVENTS] From 3ef31b9012ab755a6d99f3018badd207d195d78e Mon Sep 17 00:00:00 2001 From: Heidi Schellman Date: Tue, 13 Dec 2022 19:28:33 -0800 Subject: [PATCH 030/166] make ddtest --- samtest2.py | 66 ----------------------------------------------------- 1 file changed, 66 deletions(-) delete mode 100644 samtest2.py diff --git a/samtest2.py b/samtest2.py deleted file mode 100644 index f154614..0000000 --- a/samtest2.py +++ /dev/null @@ -1,66 +0,0 @@ -import os,sys -import samweb_client -import LArWrapper -from argparse import ArgumentParser as ap - -samweb = samweb_client.SAMWebClient(experiment='dune') - -TEST = True -def testProject(defname="schellma-run5141-PDSPProd4", appFamily="samtest", appName="test", appVersion=None, fcl="eventdump.fcl",method="samweb",n=-1,nskip=0,dataTier="out1:sam-user",dataStream="out1:test"): - appVersion = os.environ["DUNESW_VERSION"] - projectname = samweb.makeProjectName(defname) - projectinfo = samweb.startProject(projectname, defname) - print (projectinfo) - projecturl = projectinfo["projectURL"] - print ("Project name is %s" % projectinfo["project"]) - print ("Project URL is %s" % projecturl) - info = samweb.projectSummary(projecturl) - print (info) - projectID = info["project_id"] - deliveryLocation = None # set this to a specific hostname if you want - default is the local hostname - - cpid = samweb.startProcess(projecturl, appFamily, appName, appVersion, deliveryLocation) - print ("Consumer process id %s" % cpid) - processurl = samweb.makeProcessUrl(projecturl, cpid) - try: - larW = LArWrapper.LArWrapper(fcl=fcl,appFamily=appFamily,appName=appName,\ - appVersion=appVersion,deliveryMethod="samweb",workflowMethod="interactive",\ - sam_web_uri=projecturl,processID=cpid,projectID=projectID,\ - dataTier=dataTier,dataStream=dataStream,n=n,nskip=nskip) - print ("before DoLAr") - retcode = larW.DoLAr(0,0) - list = larW.LArResults() - print ("return code",retcode) - except: - print ("LArWrapper failed, clean up") - retcode = 99 - - samweb.stopProject(projecturl) - print (samweb.projectSummaryText(projecturl)) - print ("Project ended") - print ("LArWrapper returned",retcode) - return retcode - -if __name__ == '__main__': - parser = ap() - parser.add_argument('--appFamily', type=str, help='samweb needs this') - parser.add_argument('--appName', type=str, help='samweb needs this') - parser.add_argument('--appVersion', type=str, help='samweb needs this') - parser.add_argument('--dataTier', type=str, help='data tier for output file') - parser.add_argument('--dataStream', type=str, help='data stream for output file') - parser.add_argument('-o', default="temp.root", type=str, help='output event stream file') - parser.add_argument('-c', required=True, type=str, help='name of fcl file') - parser.add_argument('--user', default = os.getenv("USER"),type=str, help='user name') - parser.add_argument('--projectID', type=int, default=0, help='integer that identifies the project, samweb/dd/wfs') - parser.add_argument('-n', type=int, default=-1, help='number of events total to process') - parser.add_argument('--nskip', type=int, default=0, help='number of events to skip before starting') - parser.add_argument('--processID',type=int, default=0, help='processID generated by samweb') - parser.add_argument('--sam_web_uri',type=str, help='samweb url for the project') - - args = parser.parse_args() - - print (args) - - - - testProject() From 41fc2732cc1e8c5b35d9dcfae86377f34778eb4a Mon Sep 17 00:00:00 2001 From: Heidi Schellman Date: Tue, 13 Dec 2022 19:44:18 -0800 Subject: [PATCH 031/166] fix dataset bug --- run_lar.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/run_lar.py b/run_lar.py index d7ae816..a37934a 100644 --- a/run_lar.py +++ b/run_lar.py @@ -64,9 +64,9 @@ def inner1(*args, **kwargs): class DDInterface: - def __init__(self, namespace=None, lar_limit=0, timeout=120, wait_time=60, wait_limit=5,\ + def __init__(self, dataset=None, namespace=None, lar_limit=0, timeout=120, wait_time=60, wait_limit=5,\ appFamily=None, appName=None, appVersion=None, workflowMethod="dd"): - self.dataset = "" #dataset + self.dataset = DATASET self.limit = 1#limit self.namespace = namespace #query_args = (self.dataset, self.namespace, self.limit) From 4d41f4d2dedec0b205e69141936314705b6df5b5 Mon Sep 17 00:00:00 2001 From: Heidi Schellman Date: Wed, 14 Dec 2022 12:55:35 -0800 Subject: [PATCH 032/166] remove some prints --- Loginator.py | 22 +++++++++++++++------- run_lar.py | 6 +++--- 2 files changed, 18 insertions(+), 10 deletions(-) diff --git a/Loginator.py b/Loginator.py index c99cf58..69a383d 100644 --- a/Loginator.py +++ b/Loginator.py @@ -21,7 +21,7 @@ #from dateutil import parser -DEBUG=True +DEBUG=False class Loginator: @@ -184,13 +184,18 @@ def readme(self): def addinfo(self,info): for s in info: if s in self.outobject: - print ("Loginator replacing",s, self.outobject[s],self.info[s]) + print ("Loginator.addinfo replacing",s, self.outobject[s],self.info[s]) else: for f in self.outobject: self.outobject[f][s] = info[s] if DEBUG: print ("adding",s,info[s]) def addsaminfo(self): + if "SAM_EXPERIMENT" in os.environ: + import samweb_client + else: + print ("You need to set up samweb to get sam info") + sys.exit(0) samweb = samweb_client.SAMWebClient(experiment='dune') for f in self.outobject: if DEBUG: print ("f ",f) @@ -228,7 +233,7 @@ def addmetacatinfo(self,defaultNamespace=None): if "core."+item in meta["metadata"].keys(): self.outobject[f][item]=meta["metadata"]["core."+item] else: - print ("no", item, "in ",list(meta["metadata"].keys())) + print ("addmetacatinfo: no", item, "in ",list(meta["metadata"].keys())) self.outobject[f]["file_size"]=meta["size"] if "DUNE.campaign" in meta["metadata"]: self.outobject[f]["file_campaign"]=meta["metadata"]["DUNE.campaign"] @@ -250,7 +255,7 @@ def addreplicainfo(self,replicas,test=False): self.outobject[f]["rse"] = r["rse"] if "namespace" in r: self.outobject[f]["namespace"] = r["namespace"] - print (self.outobject[f]) + if DEBUG: print (self.outobject[f]) if not found: print (r,"appears in replicas but not in Lar Log, need to mark as unused") notfound.append(r) @@ -300,7 +305,7 @@ def human2number(self,stamp): #15-Nov-2022 17:24:41 CST https://docs.python.org/3/library/time.html#time.strftime format = "%d-%b-%Y %H:%M:%S" # python no longer accepts time zones. We only want the different but need to correct for DT - print ("human2number converting",stamp) + #print ("human2number converting",stamp) thetime = datetime.strptime(stamp[0:19],format) epoch = datetime.utcfromtimestamp(0) if "DT" in stamp: @@ -316,12 +321,15 @@ def duration(self,start,end): def test(): parse = Loginator(sys.argv[1]) - print ("looking at",sys.argv[1]) + #print ("looking at",sys.argv[1]) parse.readme() parse.addsysinfo() # parse.addsaminfo() parse.addreplicainfo([]) - parse.addsaminfo() + if "SAM_EXPERIMENT" in os.environ: + parse.addsaminfo() + else: + parse.addmetacatinfo("pdsp_det_reco") #parse.addmetacatinfo("dc4-hd-protodune") # argument is there for testing when you don't have replica list. parse.writeme() diff --git a/run_lar.py b/run_lar.py index a37934a..0642e7f 100644 --- a/run_lar.py +++ b/run_lar.py @@ -66,7 +66,7 @@ def inner1(*args, **kwargs): class DDInterface: def __init__(self, dataset=None, namespace=None, lar_limit=0, timeout=120, wait_time=60, wait_limit=5,\ appFamily=None, appName=None, appVersion=None, workflowMethod="dd"): - self.dataset = DATASET + self.dataset = dataset self.limit = 1#limit self.namespace = namespace #query_args = (self.dataset, self.namespace, self.limit) @@ -77,7 +77,7 @@ def __init__(self, dataset=None, namespace=None, lar_limit=0, timeout=120, wait_ else: query_args = (self.dataset, self.namespace, self.limit) self.query = '''files from %s where namespace="%s" limit %i'''%query_args # this is not a good idea - print ("the query is:",self.query) + print ("DDInterface: the query is:",self.query) self.worker_timeout = 3600*5 self.lar_limit = lar_limit self.proj_id = -1 @@ -131,7 +131,7 @@ def CreateProject(self): self.proj_state = proj_dict['state'] self.proj_id = proj_dict['project_id'] self.proj_exists = True - print(proj_dict) + #print(proj_dict) def PrintFiles(self): print('Printing files') From 3c549a50b5506f431415cb1da6dfc8318cdf8aa1 Mon Sep 17 00:00:00 2001 From: Heidi Schellman Date: Thu, 15 Dec 2022 07:44:25 -0800 Subject: [PATCH 033/166] move code into subdirs --- ddconfig.cfg => batch/ddconfig.cfg | 0 rses.txt => batch/rses.txt | 0 top_script.sh => batch/top_script.sh | 13 +- byhand.cfg | 117 --- dd_test.sh | 6 - example.json | 39 - test.fcl => fcl/test.fcl | 0 new_test.sh | 6 - LArWrapper.py => python/LArWrapper.py | 0 Loginator.py => python/Loginator.py | 0 ddtest.py => python/ddtest.py | 3 +- .../run_interactive.py | 0 run_lar.py => python/run_lar.py | 160 ++-- samtest.py => python/samtest.py | 0 submit_dd_jobs.py => python/submit_dd_jobs.py | 0 tarme.sh | 2 +- test.out | 744 ------------------ .../interactive_test.sh | 0 other_test.sh => tests/other_test.sh | 0 rejoin.sh => tests/rejoin.sh | 0 submit_many.sh => tests/submit_many.sh | 0 submit_test.sh => tests/submit_test.sh | 0 wfs_test.sh | 0 23 files changed, 100 insertions(+), 990 deletions(-) rename ddconfig.cfg => batch/ddconfig.cfg (100%) rename rses.txt => batch/rses.txt (100%) rename top_script.sh => batch/top_script.sh (95%) delete mode 100644 byhand.cfg delete mode 100644 dd_test.sh delete mode 100644 example.json rename test.fcl => fcl/test.fcl (100%) delete mode 100644 new_test.sh rename LArWrapper.py => python/LArWrapper.py (100%) rename Loginator.py => python/Loginator.py (100%) rename ddtest.py => python/ddtest.py (95%) rename run_interactive.py => python/run_interactive.py (100%) rename run_lar.py => python/run_lar.py (71%) rename samtest.py => python/samtest.py (100%) rename submit_dd_jobs.py => python/submit_dd_jobs.py (100%) delete mode 100644 test.out rename interactive_test.sh => tests/interactive_test.sh (100%) rename other_test.sh => tests/other_test.sh (100%) rename rejoin.sh => tests/rejoin.sh (100%) rename submit_many.sh => tests/submit_many.sh (100%) rename submit_test.sh => tests/submit_test.sh (100%) delete mode 100644 wfs_test.sh diff --git a/ddconfig.cfg b/batch/ddconfig.cfg similarity index 100% rename from ddconfig.cfg rename to batch/ddconfig.cfg diff --git a/rses.txt b/batch/rses.txt similarity index 100% rename from rses.txt rename to batch/rses.txt diff --git a/top_script.sh b/batch/top_script.sh similarity index 95% rename from top_script.sh rename to batch/top_script.sh index b5d9e50..2e25783 100755 --- a/top_script.sh +++ b/batch/top_script.sh @@ -118,7 +118,7 @@ echo $NAMESPACE logname=loginator-${NAMESPACE}_${PROCESS}_${CLUSTER}_`date +%F_%H_%M_%S` -export PYTHONPATH=${CONDOR_DIR_INPUT}:${PYTHONPATH} +export PYTHONPATH=${CONDOR_DIR_INPUT}/python:${PYTHONPATH} ###Setting up dunesw/Data Dispatcher/MetaCat and running lar ( @@ -145,13 +145,13 @@ ls -lrt echo "the input directory" -ls -lrt $CONDOR_DIR_INPUT +ls -lrtR $CONDOR_DIR_INPUT -export FHICL_FILE_PATH=${CONDOR_DIR_INPUT}:${FHICL_FILE_PATH} +export FHICL_FILE_PATH=${CONDOR_DIR_INPUT}/fcl:${FHICL_FILE_PATH} -echo "try to really mean it about the FCL since putting it in the path doesn't seem to do it for me" +#echo "try to really mean it about the FCL since putting it in the path doesn't seem to do it for me" -cp ${CONDOR_DIR_INPUT}/$FCL . +#cp ${CONDOR_DIR_INPUT}/fcl/$FCL . echo "I will now run run_lar with fcl file " $FCL @@ -165,7 +165,8 @@ python -m run_lar \ --appFamily $APPFAMILY \ --appName $APPNAME \ --appVersion $APPVERSION \ - #--nskip $nskip \ + --nskip $nskip \ + --workflowmethod batch #> ${logname}.out 2>${logname}.err returncode=$? diff --git a/byhand.cfg b/byhand.cfg deleted file mode 100644 index 8817a66..0000000 --- a/byhand.cfg +++ /dev/null @@ -1,117 +0,0 @@ -[global] -group = dune -experiment = dune -wrapper = file:///${FIFE_UTILS_DIR}/libexec/fife_wrap -#wrapper = /dune/app/users/$USER/new_fife_utils/fife_utils/v3_5_0/NULL/libexec/fife_wrap -output_dir = /pnfs/dune/scratch/users/$USER/dd_mc_test/ -extra_dir = ./ -input_namespace = pdsp_det_reco -fcl = eventdump.fcl -load_limit = 1 -project = 317 -nevents = -1 -output_str = "*reco.root" -output_dataset = dd-interactive-test-data -output_namespace = dc4-interactive-tests -metacat_user = $USER -dd_tar=$USER_8_22_22_dd -mc_tar=$USER_8_15_22_mc - -[executable] -#name = ./top_script.sh -name = \\\${CONDOR_DIR_INPUT}/top_script.sh -arg_1 = --namespace -arg_2 = %(input_namespace)s -arg_3 = --fcl -arg_4 = %(fcl)s -arg_5 = --load_limit -arg_6 = %(load_limit)s -arg_7 = --user -arg_8 = $USER -arg_9 = --project -arg_10 = %(project)s -arg_11 = -n -arg_12 = %(nevents)s -arg_13 = --output -arg_14 = %(output_str)s -arg_15 = --output_dataset -arg_16 = %(output_dataset)s -arg_17 = --output_namespace -arg_18 = %(output_namespace)s -arg_19 = --metacat_user -arg_20 = %(metacat_user)s - -[stage] -lines_1 '+FERMIHTC_AutoRelease=True' -lines_2 '+FERMIHTC_GraceMemory=1024' -lines_3 '+FERMIHTC_GraceLifetime=3600' - -[env_pass] -IFDH_DEBUG=1 -IFDH_CP_MAXRETRIES=3 -XRD_CONNECTIONRETRY=32 -XRD_REQUESTTIMEOUT=14400 -XRD_REDIRECTLIMIT=255 -XRD_LOADBALANCERTTL=7200 -XRD_STREAMTIMEOUT=7200 -DD_TAR = %(dd_tar)s -MC_TAR = %(mc_tar)s -DATA_DISPATCHER_URL=https://metacat.fnal.gov:9443/dune/dd/data -DATA_DISPATCHER_AUTH_URL=https://metacat.fnal.gov:8143/auth/dune -METACAT_AUTH_SERVER_URL=https://metacat.fnal.gov:8143/auth/dune -METACAT_SERVER_URL=https://metacat.fnal.gov:9443/dune_meta_demo/app - - -[submit] -G = %(group)s -OS = SL7 -memory = 1999MB -expected-lifetime = 4h -N = 1 -resource-provides = usage_model=OFFSITE,OPPORTUNISTIC,DEDICATED -#resource-provides = usage_model=OPPORTUNISTIC,DEDICATED -c = "has_avx==True" -lines_1 = '+FERMIHTC_AutoRelease=True' -lines_2 = '+FERMIHTC_GraceMemory=2048' -lines_3 = '+FERMIHTC_GraceLifetime=3600' -lines_4 = '+SingularityImage=\"/cvmfs/singularity.opensciencegrid.org/fermilab/fnal-wn-sl7:latest\"' -append_condor_requirements_1 = '(TARGET.HAS_Singularity==true)' -append_condor_requirements_2 = 'TARGET.HAS_CVMFS_fifeuser1_opensciencegrid_org==true' -append_condor_requirements_3 = 'TARGET.HAS_CVMFS_fifeuser2_opensciencegrid_org==true' -append_condor_requirements_4 = 'TARGET.HAS_CVMFS_fifeuser3_opensciencegrid_org==true' -append_condor_requirements_5 = 'TARGET.HAS_CVMFS_fifeuser4_opensciencegrid_org==true' -append_condor_requirements_6 = 'TARGET.HAS_CVMFS_dune_opensciencegrid_org==true' -append_condor_requirements_7 = 'TARGET.HAS_CVMFS_larsoft_opensciencegrid_org==true' -append_condor_requirements_8 = 'TARGET.CVMFS_dune_opensciencegrid_org_REVISION>=1105' -#f_0 = dropbox:///dune/data/users/$USER/dd_metacat_canned/%(dd_tar)s.tar -#f_1 = dropbox:///dune/data/users/$USER/dd_metacat_canned/%(mc_tar)s.tar -f_0 = dropbox:///dune/data/users/$USER/loginator.tar - -#[job_output] -#addoutput = *err -#dest = %(output_dir)s/%(extra_dir)s/\\\${CLUSTER}_\\\${PROCESS} -# -#[job_output_1] -#addoutput = *out -#dest = %(output_dir)s/%(extra_dir)s/\\\${CLUSTER}_\\\${PROCESS} - -#[data_dispatcher] -#dataset = dc4:dc4 -#namespace = dc4-hd-protodune -#query_limit = 9 -#load_limit = 3 -#wait_time = 45 -#wait_limit = 4 -#timeout = 50 - -[job_setup] -source_1 = /cvmfs/dune.opensciencegrid.org/products/dune/setup_dune.sh -#source_2 = ${CONDOR_DIR_INPUT}/%(mc_tar)s/canned_client_setup.sh -#source_3 = ${CONDOR_DIR_INPUT}/%(dd_tar)s/canned_client_setup.sh -#setup = dunesw v09_55_01d00 -q e20:prof -prescript_0 = echo lsing -prescript_1 = ls ${CONDOR_DIR_INPUT} -#prescript_1 = ls ${CONDOR_DIR_INPUT}/$USER_8*/ -#prescript_4 = echo $PATH -#prescript_5 = echo $PYTHONPATH -#prescript_6 = ls ${CONDOR_DIR_INPUT}/%(dd_tar)s/data_dispatcher diff --git a/dd_test.sh b/dd_test.sh deleted file mode 100644 index 487f5f6..0000000 --- a/dd_test.sh +++ /dev/null @@ -1,6 +0,0 @@ -export PROJECTID=82 -export PROCESSHASH="2e434568" -dd project create files from schellma:run5141recentReco limit 100 -python LArWrapper.py --delivery_method=dd --processHASH=$PROCESSHASH\ - --processID=0 -c dumpevent.fcl\ - --user=$USER --projectID=$PROJECTID diff --git a/example.json b/example.json deleted file mode 100644 index e03fa78..0000000 --- a/example.json +++ /dev/null @@ -1,39 +0,0 @@ -{ - "source_rse": "fndca1.fnal.gov:1094", - "user": "schellma", - "job_id": "63733911.0@jobsub02.fnal.gov", - "timestamp_for_start": "06-Dec-2022 00:15:40 UTC", - "timestamp_for_end": "06-Dec-2022 00:15:47 UTC", - "duration": 0.0, - "file_size": 1627448050, - "application_family": null, - "application_name": null, - "application_version": null, - "final_state": "Closed", - "project_name": null, - "file_name": "np04_raw_run005141_0017_dl7_reco1_18126401_0_20210318T102233Z.root", - "fid": "52474922", - "data_tier": "full-reconstructed", - "data_stream": "physics", - "run_type": "protodune-sp", - "file_format": "artroot", - "job_node": "schellma-63733911-0-fnpc9106.fnal.gov", - "job_site": null, - "country": null, - "campaign": "PDSPProd4", - "delivery_method": "dd", - "workflow_method": "dd", - "access_method": "metacat", - "path": "root://fndca1.fnal.gov:1094//pnfs/fnal.gov/usr/dune//tape_backed/dunepro/protodune-sp/full-reconstructed/2021/detector/physics/PDSPProd4/00/00/51/41", - "namespace": "pdsp_det_reco", - "job_real_memory": "843.837", - "project_id": 125, - "job_wall_time": "5.868609", - "job_cpu_time": "3.974521", - "job_total_events": "2", - "deliver_method": "xroot", - "dd_worker_id": "cb904d15", - "rse": "FNAL_DCACHE", - "file_type": "detector", - "event_count": 42 -} \ No newline at end of file diff --git a/test.fcl b/fcl/test.fcl similarity index 100% rename from test.fcl rename to fcl/test.fcl diff --git a/new_test.sh b/new_test.sh deleted file mode 100644 index 1e8dc9a..0000000 --- a/new_test.sh +++ /dev/null @@ -1,6 +0,0 @@ -# run an interactive test HMS 12-2-2022 -#python run_interactive.py --dataset=schellma:run5141recentReco --namespace=pdsp_det_reco --query_limit 100 --load_limit 3 --fcl eventdump.fcl --user schellma --appFamily=protoduneana --appVersion=$PROTODUNEANA_VERSION -n 5 -python run_interactive.py --dataset=schellma:run5141recentReco --namespace=pdsp_det_reco --query_limit 100 --load_limit 4 --fcl test.fcl --user schellma -n 3 - -#metacat query -i "files from schellma:protodune-sp-physics-generic where (namespace=pdsp_det_reco and core.data_tier='full-reconstructed' and core.runs[any] in (5141))" > newid.txt - diff --git a/LArWrapper.py b/python/LArWrapper.py similarity index 100% rename from LArWrapper.py rename to python/LArWrapper.py diff --git a/Loginator.py b/python/Loginator.py similarity index 100% rename from Loginator.py rename to python/Loginator.py diff --git a/ddtest.py b/python/ddtest.py similarity index 95% rename from ddtest.py rename to python/ddtest.py index b4ab7e7..bc58f21 100644 --- a/ddtest.py +++ b/python/ddtest.py @@ -16,8 +16,9 @@ parser.add_argument('--timeout', type=int, default=120) parser.add_argument('--wait_time', type=int, default=120) parser.add_argument('--wait_limit', type=int, default=5) + parser.add_argument('--workFlowMethod', type=int, default="batch", help= 'workflow method [interactive,batch,wfs]') # args shared with lar - #parser.add_argument('--defName', default="schellma-run5141-PDSPProd4", type=str, help='samweb dataset definition name') + parser.add_argument('--appFamily',default='test', type=str, help=' application family') parser.add_argument('--appName', default='test',type=str, help=' application name') parser.add_argument('--appVersion', default=os.getenv('DUNESW_VERSION'), type=str, help='application version') diff --git a/run_interactive.py b/python/run_interactive.py similarity index 100% rename from run_interactive.py rename to python/run_interactive.py diff --git a/run_lar.py b/python/run_lar.py similarity index 71% rename from run_lar.py rename to python/run_lar.py index 0642e7f..0515b4f 100644 --- a/run_lar.py +++ b/python/run_lar.py @@ -12,7 +12,7 @@ import LArWrapper import Loginator -TEST = True + # make a string out of none for formatted Printing def NoneToString(thing): @@ -166,7 +166,7 @@ def get_project(self, proj_id): #print(datetime.datetime.now()) return proj - @call_and_retry_return + def dump_project(self, proj_id): proj = self.dd_client.get_project(proj_id, with_files=True) print ("dumping project",proj_id) @@ -184,8 +184,6 @@ def dump_project(self, proj_id): print("%10s\t%d\t%21s\t%8s\t%s:%s"%(f["state"],f["attempts"],(reserved),NoneToString(f["worker_id"]),f["namespace"],f["name"])) - #print(datetime.datetime.now()) - return proj def LoadFiles(self): count = 0 @@ -347,46 +345,46 @@ def RunLAr(self, fcl=None, n=-1, nskip=0): process = '0' print ("RunLAr called with ",fcl,n,nskip) unused_files = [] - if TEST: # new interface that does not talk to dd - lar = LArWrapper.LArWrapper(fcl=fcl, o="temp.root", replicas=self.input_replicas, flist=self.lar_file_list, n=n, nskip=nskip, appFamily=self.appFamily, appName=self.appName, appVersion=self.appVersion, deliveryMethod="dd", workflowMethod=self.workflowMethod, projectID=self.proj_id, formatString="runLar_%s_%s_%%tc_%s_%s_%s.root") - returncode = lar.DoLAr(cluster, process) - unused_files = lar.LArResults() - else: # old interace that has more detail exposed. - ## TODO -- make options for capturing output - stamp = datetime.datetime.now().strftime("%Y%m%d%H%M%S%Z") - fname = "runLar_%s_%%tc_%s_%s_reco.root"%(self.proj_id, cluster, process) - oname = fname.replace(".root",".out").replace("%tc",stamp) - ename = fname.replace(".root",".err").replace("%tc",stamp) - ofile = open(oname,'w') - efile = open(ename,'w') - proc = subprocess.run('lar -c %s -s %s -n %i --nskip %i -o fname'%(fcl, self.lar_file_list, n, nskip), shell=True, stdout=ofile,stderr=efile) - returncode = proc.returncode - ofile.close() - efile.close() - - # get log info, match with replicas - logparse = Loginator.Loginator(oname) - - # parse the log and find open./close/memory - logparse.envPrinter() - logparse.readme() - - #logparse.addinfo(logparse.getinfo()) - logparse.addinfo({"dd_worker_id":os.environ["MYWORKERID"],"application_family":self.appFamily,"application_name":self.appName, - "application_version":self.appVersion,"delivery_method":"dd","workflow_method":"dd","project_id":self.proj_id}) - logparse.addsysinfo() - #deal with un - unused_replicas = logparse.addreplicainfo(self.input_replicas) - - - unused_files = [] - for u in unused_replicas: - unused_files.append(u["namespace"]+":"+u["name"]) - logparse.addmetacatinfo(self.namespace) # only uses namespace if can't get from replica info - print ("replicas not used",unused_files) - - # write out json files for processed files whether closed properly or not. Those never opened don't get logged. - logparse.writeme() + # new interface that does not talk to dd + lar = LArWrapper.LArWrapper(fcl=fcl, o="temp.root", replicas=self.input_replicas, flist=self.lar_file_list, n=n, nskip=nskip, appFamily=self.appFamily, appName=self.appName, appVersion=self.appVersion, deliveryMethod="dd", workflowMethod=self.workflowMethod, projectID=self.proj_id, formatString="runLar_%s_%s_%%tc_%s_%s_%s.root") + returncode = lar.DoLAr(cluster, process) + unused_files = lar.LArResults() +# else: # old interace that has more detail exposed. +# ## TODO -- make options for capturing output +# stamp = datetime.datetime.now().strftime("%Y%m%d%H%M%S%Z") +# fname = "runLar_%s_%%tc_%s_%s_reco.root"%(self.proj_id, cluster, process) +# oname = fname.replace(".root",".out").replace("%tc",stamp) +# ename = fname.replace(".root",".err").replace("%tc",stamp) +# ofile = open(oname,'w') +# efile = open(ename,'w') +# proc = subprocess.run('lar -c %s -s %s -n %i --nskip %i -o fname'%(fcl, self.lar_file_list, n, nskip), shell=True, stdout=ofile,stderr=efile) +# returncode = proc.returncode +# ofile.close() +# efile.close() +# +# # get log info, match with replicas +# logparse = Loginator.Loginator(oname) +# +# # parse the log and find open./close/memory +# logparse.envPrinter() +# logparse.readme() +# +# #logparse.addinfo(logparse.getinfo()) +# logparse.addinfo({"dd_worker_id":os.environ["MYWORKERID"],"application_family":self.appFamily,"application_name":self.appName, +# "application_version":self.appVersion,"delivery_method":"dd","workflow_method":"dd","project_id":self.proj_id}) +# logparse.addsysinfo() +# #deal with un +# unused_replicas = logparse.addreplicainfo(self.input_replicas) +# +# +# unused_files = [] +# for u in unused_replicas: +# unused_files.append(u["namespace"]+":"+u["name"]) +# logparse.addmetacatinfo(self.namespace) # only uses namespace if can't get from replica info +# print ("replicas not used",unused_files) +# +# # write out json files for processed files whether closed properly or not. Those never opened don't get logged. +# logparse.writeme() # make all files as bad if job crashed if returncode != 0: @@ -402,35 +400,57 @@ def RunLAr(self, fcl=None, n=-1, nskip=0): if __name__ == '__main__': - parser = ap() - parser.add_argument('--namespace', type=str) - parser.add_argument('--appFamily', type=str) - parser.add_argument('--appName', type=str) - parser.add_argument('--appVersion', type=str) - parser.add_argument('--workflowMethod', default='dd', type=str, help="set this to interactive if interactive") - parser.add_argument('--fcl', type=str) - parser.add_argument('--load_limit', type=int) - parser.add_argument('--user', type=str) - parser.add_argument('--project', type=int) - parser.add_argument('--timeout', type=int, default=120) - parser.add_argument('--wait_time', type=int, default=120) - parser.add_argument('--wait_limit', type=int, default=5) - parser.add_argument('-n', type=int, default=-1) - parser.add_argument('--nskip', type=int, default=0) - args = parser.parse_args() - - dd_interface = DDInterface(args.namespace, - args.load_limit, + parser = ap() + # dd args + parser.add_argument('--dataset', default='schellma:run5141recentReco',type=str) + parser.add_argument('--load_limit', default=4, type=int) + parser.add_argument('--query_limit', default=10) + parser.add_argument('--query_skip', default=0) + parser.add_argument('--projectID', default=None) + parser.add_argument('--timeout', type=int, default=120) + parser.add_argument('--wait_time', type=int, default=120) + parser.add_argument('--wait_limit', type=int, default=5) + parser.add_argument('--workFlowMethod', type=int, default="batch", help= 'workflow method [interactive,batch,wfs]') + # args shared with lar + + parser.add_argument('--appFamily',default='test', type=str, help=' application family') + parser.add_argument('--appName', default='test',type=str, help=' application name') + parser.add_argument('--appVersion', default=os.getenv('DUNESW_VERSION'), type=str, help='application version') + parser.add_argument('--dataTier', default='out1:sam-user',type=str, help='data tier for output file') + parser.add_argument('--dataStream', default='out1:test',type=str, help='data stream for output file') + parser.add_argument('-o', default="temp.root", type=str, help='output event stream file') + parser.add_argument('-c', required=True, type=str, help='name of fcl file') + parser.add_argument('--user', default = os.getenv("USER"),type=str, help='user name') + parser.add_argument('-n', type=int, default=10, help='number of events total to process') + parser.add_argument('--nskip', type=int, default=0, help='number of events to skip before starting') + args = parser.parse_args() + + if (not args.projectID) and args.dataset: + dd_proj_id = submit_dd_jobs.create_project(dataset=args.dataset, namespace=None, + query_limit=args.query_limit, + query_skip=args.query_skip) + + + elif args.project and not (args.dataset and args.namespace): + dd_proj_id = int(args.project) + else: + sys.stderr.write("Need to provide project OR dataset & namespace\n") + sys.exit(1) + + dd_interface = DDInterface( lar_limit=args.load_limit, timeout=args.timeout, wait_time=args.wait_time, wait_limit=args.wait_limit, appFamily=args.appFamily, appName=args.appName, appVersion=args.appVersion, - workflowMethod=args.workflowMethod - ) - dd_interface.Login(args.user) - dd_interface.AttachProject(args.project) - dd_interface.LoadFiles() - dd_interface.BuildFileListString() - code = dd_interface.RunLAr(fcl=args.fcl, n=args.n, nskip=args.nskip) + workflowMethod="interactive") + dd_interface.Login(args.user) + dd_interface.SetWorkerID() + print(os.environ['MYWORKERID']) + dd_interface.AttachProject(dd_proj_id) + #dd_interface.dump_project(dd_proj_id) + dd_interface.LoadFiles() + dd_interface.BuildFileListString() + dd_interface.RunLAr(args.c, args.n, args.nskip) + dd_interface.dump_project(dd_proj_id) diff --git a/samtest.py b/python/samtest.py similarity index 100% rename from samtest.py rename to python/samtest.py diff --git a/submit_dd_jobs.py b/python/submit_dd_jobs.py similarity index 100% rename from submit_dd_jobs.py rename to python/submit_dd_jobs.py diff --git a/tarme.sh b/tarme.sh index d09f667..e9195b9 100755 --- a/tarme.sh +++ b/tarme.sh @@ -1 +1 @@ -tar -cvf ../loginator.tar *.sh *.cfg *.py *.fcl +tar -cvf ../loginator.tar tests/*.sh batch/* python/*.py *.fcl diff --git a/test.out b/test.out deleted file mode 100644 index 803e5e2..0000000 --- a/test.out +++ /dev/null @@ -1,744 +0,0 @@ -%MSG-i MF_INIT_OK: Early 02-Dec-2022 17:12:58 CST JobSetup -Messagelogger initialization complete. -%MSG -02-Dec-2022 17:13:00 CST Initiating request to open input file "xroot://eospublic.cern.ch//eos/experiment/neutplatform/protodune/dune/dc4-hd-protodune/4d/37/dc4_np04hd_507091350_bf2fe11d-a96c-4d7f-9f9a-996da762514b-gen_protodunehd_1GeV_56895272_0_g4_detsim_a.root" -02-Dec-2022 17:13:04 CST Opened input file "xroot://eospublic.cern.ch//eos/experiment/neutplatform/protodune/dune/dc4-hd-protodune/4d/37/dc4_np04hd_507091350_bf2fe11d-a96c-4d7f-9f9a-996da762514b-gen_protodunehd_1GeV_56895272_0_g4_detsim_a.root" -Begin processing the 1st record. run: 1 subRun: 0 event: 1 at 02-Dec-2022 17:13:12 CST -PRINCIPAL TYPE: Event -PROCESS NAME | MODULE LABEL... | PRODUCT INSTANCE NAME | DATA PRODUCT TYPE.................................................... | ..SIZE -SinglesGen.. | generator...... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | kr85........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | ar42........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | rn222.......... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | generator...... | ..................... | std::vector................................ | .....1 -SinglesGen.. | ar39........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | cosmicgenerator | ..................... | std::vector........................................... | .....1 -SinglesGen.. | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -G4.......... | rns............ | ..................... | std::vector........................................ | .....2 -G4.......... | IonAndScint.... | ..................... | std::vector................................... | .....? -G4.......... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -G4.......... | largeant....... | ..................... | std::vector........................................ | 667349 -G4.......... | largeant....... | ..................... | std::map >......................................... | .....1 -G4.......... | largeant....... | ..................... | art::Assns | 667349 -G4.......... | IonAndScint.... | priorSCE............. | std::vector................................... | .....? -Detsim...... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -Detsim...... | crt............ | ..................... | std::vector............................................ | .....0 -Detsim...... | crt............ | ..................... | art::Assns....................... | .....0 -Detsim...... | tpcrawdecoder.. | simpleSC............. | std::vector......................................... | .10240 -Detsim...... | tpcrawdecoder.. | daq.................. | std::vector........................................... | .10240 -Detsim...... | rns............ | ..................... | std::vector........................................ | .....0 - -Total products (present, not present): 21 (19, 2). - -Begin processing the 2nd record. run: 1 subRun: 0 event: 2 at 02-Dec-2022 17:14:12 CST -PRINCIPAL TYPE: Event -PROCESS NAME | MODULE LABEL... | PRODUCT INSTANCE NAME | DATA PRODUCT TYPE.................................................... | ..SIZE -SinglesGen.. | generator...... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | kr85........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | ar42........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | rn222.......... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | generator...... | ..................... | std::vector................................ | .....1 -SinglesGen.. | ar39........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | cosmicgenerator | ..................... | std::vector........................................... | .....1 -SinglesGen.. | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -G4.......... | rns............ | ..................... | std::vector........................................ | .....2 -G4.......... | IonAndScint.... | ..................... | std::vector................................... | .....? -G4.......... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -G4.......... | largeant....... | ..................... | std::vector........................................ | 665036 -G4.......... | largeant....... | ..................... | std::map >......................................... | .....1 -G4.......... | largeant....... | ..................... | art::Assns | 665036 -G4.......... | IonAndScint.... | priorSCE............. | std::vector................................... | .....? -Detsim...... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -Detsim...... | crt............ | ..................... | std::vector............................................ | .....0 -Detsim...... | crt............ | ..................... | art::Assns....................... | .....0 -Detsim...... | tpcrawdecoder.. | simpleSC............. | std::vector......................................... | .10240 -Detsim...... | tpcrawdecoder.. | daq.................. | std::vector........................................... | .10240 -Detsim...... | rns............ | ..................... | std::vector........................................ | .....0 - -Total products (present, not present): 21 (19, 2). - -Begin processing the 3rd record. run: 1 subRun: 0 event: 3 at 02-Dec-2022 17:14:29 CST -PRINCIPAL TYPE: Event -PROCESS NAME | MODULE LABEL... | PRODUCT INSTANCE NAME | DATA PRODUCT TYPE.................................................... | ..SIZE -SinglesGen.. | generator...... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | kr85........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | ar42........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | rn222.......... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | generator...... | ..................... | std::vector................................ | .....1 -SinglesGen.. | ar39........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | cosmicgenerator | ..................... | std::vector........................................... | .....1 -SinglesGen.. | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -G4.......... | rns............ | ..................... | std::vector........................................ | .....2 -G4.......... | IonAndScint.... | ..................... | std::vector................................... | .....? -G4.......... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -G4.......... | largeant....... | ..................... | std::vector........................................ | 647744 -G4.......... | largeant....... | ..................... | std::map >......................................... | .....1 -G4.......... | largeant....... | ..................... | art::Assns | 647744 -G4.......... | IonAndScint.... | priorSCE............. | std::vector................................... | .....? -Detsim...... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -Detsim...... | crt............ | ..................... | std::vector............................................ | .....0 -Detsim...... | crt............ | ..................... | art::Assns....................... | .....0 -Detsim...... | tpcrawdecoder.. | simpleSC............. | std::vector......................................... | .10240 -Detsim...... | tpcrawdecoder.. | daq.................. | std::vector........................................... | .10240 -Detsim...... | rns............ | ..................... | std::vector........................................ | .....0 - -Total products (present, not present): 21 (19, 2). - -Begin processing the 4th record. run: 1 subRun: 0 event: 4 at 02-Dec-2022 17:14:47 CST -PRINCIPAL TYPE: Event -PROCESS NAME | MODULE LABEL... | PRODUCT INSTANCE NAME | DATA PRODUCT TYPE.................................................... | ..SIZE -SinglesGen.. | generator...... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | kr85........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | ar42........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | rn222.......... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | generator...... | ..................... | std::vector................................ | .....1 -SinglesGen.. | ar39........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | cosmicgenerator | ..................... | std::vector........................................... | .....1 -SinglesGen.. | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -G4.......... | rns............ | ..................... | std::vector........................................ | .....2 -G4.......... | IonAndScint.... | ..................... | std::vector................................... | .....? -G4.......... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -G4.......... | largeant....... | ..................... | std::vector........................................ | 657006 -G4.......... | largeant....... | ..................... | std::map >......................................... | .....1 -G4.......... | largeant....... | ..................... | art::Assns | 657006 -G4.......... | IonAndScint.... | priorSCE............. | std::vector................................... | .....? -Detsim...... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -Detsim...... | crt............ | ..................... | std::vector............................................ | .....0 -Detsim...... | crt............ | ..................... | art::Assns....................... | .....0 -Detsim...... | tpcrawdecoder.. | simpleSC............. | std::vector......................................... | .10240 -Detsim...... | tpcrawdecoder.. | daq.................. | std::vector........................................... | .10240 -Detsim...... | rns............ | ..................... | std::vector........................................ | .....0 - -Total products (present, not present): 21 (19, 2). - -Begin processing the 5th record. run: 1 subRun: 0 event: 5 at 02-Dec-2022 17:15:06 CST -PRINCIPAL TYPE: Event -PROCESS NAME | MODULE LABEL... | PRODUCT INSTANCE NAME | DATA PRODUCT TYPE.................................................... | ..SIZE -SinglesGen.. | generator...... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | kr85........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | ar42........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | rn222.......... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | generator...... | ..................... | std::vector................................ | .....1 -SinglesGen.. | ar39........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | cosmicgenerator | ..................... | std::vector........................................... | .....1 -SinglesGen.. | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -G4.......... | rns............ | ..................... | std::vector........................................ | .....2 -G4.......... | IonAndScint.... | ..................... | std::vector................................... | .....? -G4.......... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -G4.......... | largeant....... | ..................... | std::vector........................................ | 605796 -G4.......... | largeant....... | ..................... | std::map >......................................... | .....1 -G4.......... | largeant....... | ..................... | art::Assns | 605796 -G4.......... | IonAndScint.... | priorSCE............. | std::vector................................... | .....? -Detsim...... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -Detsim...... | crt............ | ..................... | std::vector............................................ | .....0 -Detsim...... | crt............ | ..................... | art::Assns....................... | .....0 -Detsim...... | tpcrawdecoder.. | simpleSC............. | std::vector......................................... | .10240 -Detsim...... | tpcrawdecoder.. | daq.................. | std::vector........................................... | .10240 -Detsim...... | rns............ | ..................... | std::vector........................................ | .....0 - -Total products (present, not present): 21 (19, 2). - -Begin processing the 6th record. run: 1 subRun: 0 event: 6 at 02-Dec-2022 17:15:23 CST -PRINCIPAL TYPE: Event -PROCESS NAME | MODULE LABEL... | PRODUCT INSTANCE NAME | DATA PRODUCT TYPE.................................................... | ..SIZE -SinglesGen.. | generator...... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | kr85........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | ar42........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | rn222.......... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | generator...... | ..................... | std::vector................................ | .....1 -SinglesGen.. | ar39........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | cosmicgenerator | ..................... | std::vector........................................... | .....1 -SinglesGen.. | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -G4.......... | rns............ | ..................... | std::vector........................................ | .....2 -G4.......... | IonAndScint.... | ..................... | std::vector................................... | .....? -G4.......... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -G4.......... | largeant....... | ..................... | std::vector........................................ | 728127 -G4.......... | largeant....... | ..................... | std::map >......................................... | .....1 -G4.......... | largeant....... | ..................... | art::Assns | 728127 -G4.......... | IonAndScint.... | priorSCE............. | std::vector................................... | .....? -Detsim...... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -Detsim...... | crt............ | ..................... | std::vector............................................ | .....0 -Detsim...... | crt............ | ..................... | art::Assns....................... | .....0 -Detsim...... | tpcrawdecoder.. | simpleSC............. | std::vector......................................... | .10240 -Detsim...... | tpcrawdecoder.. | daq.................. | std::vector........................................... | .10240 -Detsim...... | rns............ | ..................... | std::vector........................................ | .....0 - -Total products (present, not present): 21 (19, 2). - -Begin processing the 7th record. run: 1 subRun: 0 event: 7 at 02-Dec-2022 17:15:42 CST -PRINCIPAL TYPE: Event -PROCESS NAME | MODULE LABEL... | PRODUCT INSTANCE NAME | DATA PRODUCT TYPE.................................................... | ..SIZE -SinglesGen.. | generator...... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | kr85........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | ar42........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | rn222.......... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | generator...... | ..................... | std::vector................................ | .....1 -SinglesGen.. | ar39........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | cosmicgenerator | ..................... | std::vector........................................... | .....1 -SinglesGen.. | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -G4.......... | rns............ | ..................... | std::vector........................................ | .....2 -G4.......... | IonAndScint.... | ..................... | std::vector................................... | .....? -G4.......... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -G4.......... | largeant....... | ..................... | std::vector........................................ | 732048 -G4.......... | largeant....... | ..................... | std::map >......................................... | .....1 -G4.......... | largeant....... | ..................... | art::Assns | 732048 -G4.......... | IonAndScint.... | priorSCE............. | std::vector................................... | .....? -Detsim...... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -Detsim...... | crt............ | ..................... | std::vector............................................ | .....0 -Detsim...... | crt............ | ..................... | art::Assns....................... | .....0 -Detsim...... | tpcrawdecoder.. | simpleSC............. | std::vector......................................... | .10240 -Detsim...... | tpcrawdecoder.. | daq.................. | std::vector........................................... | .10240 -Detsim...... | rns............ | ..................... | std::vector........................................ | .....0 - -Total products (present, not present): 21 (19, 2). - -Begin processing the 8th record. run: 1 subRun: 0 event: 8 at 02-Dec-2022 17:16:01 CST -PRINCIPAL TYPE: Event -PROCESS NAME | MODULE LABEL... | PRODUCT INSTANCE NAME | DATA PRODUCT TYPE.................................................... | ..SIZE -SinglesGen.. | generator...... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | kr85........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | ar42........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | rn222.......... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | generator...... | ..................... | std::vector................................ | .....1 -SinglesGen.. | ar39........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | cosmicgenerator | ..................... | std::vector........................................... | .....1 -SinglesGen.. | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -G4.......... | rns............ | ..................... | std::vector........................................ | .....2 -G4.......... | IonAndScint.... | ..................... | std::vector................................... | .....? -G4.......... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -G4.......... | largeant....... | ..................... | std::vector........................................ | 698294 -G4.......... | largeant....... | ..................... | std::map >......................................... | .....1 -G4.......... | largeant....... | ..................... | art::Assns | 698294 -G4.......... | IonAndScint.... | priorSCE............. | std::vector................................... | .....? -Detsim...... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -Detsim...... | crt............ | ..................... | std::vector............................................ | .....0 -Detsim...... | crt............ | ..................... | art::Assns....................... | .....0 -Detsim...... | tpcrawdecoder.. | simpleSC............. | std::vector......................................... | .10240 -Detsim...... | tpcrawdecoder.. | daq.................. | std::vector........................................... | .10240 -Detsim...... | rns............ | ..................... | std::vector........................................ | .....0 - -Total products (present, not present): 21 (19, 2). - -Begin processing the 9th record. run: 1 subRun: 0 event: 9 at 02-Dec-2022 17:16:18 CST -PRINCIPAL TYPE: Event -PROCESS NAME | MODULE LABEL... | PRODUCT INSTANCE NAME | DATA PRODUCT TYPE.................................................... | ..SIZE -SinglesGen.. | generator...... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | kr85........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | ar42........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | rn222.......... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | generator...... | ..................... | std::vector................................ | .....1 -SinglesGen.. | ar39........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | cosmicgenerator | ..................... | std::vector........................................... | .....1 -SinglesGen.. | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -G4.......... | rns............ | ..................... | std::vector........................................ | .....2 -G4.......... | IonAndScint.... | ..................... | std::vector................................... | .....? -G4.......... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -G4.......... | largeant....... | ..................... | std::vector........................................ | 589402 -G4.......... | largeant....... | ..................... | std::map >......................................... | .....1 -G4.......... | largeant....... | ..................... | art::Assns | 589402 -G4.......... | IonAndScint.... | priorSCE............. | std::vector................................... | .....? -Detsim...... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -Detsim...... | crt............ | ..................... | std::vector............................................ | .....0 -Detsim...... | crt............ | ..................... | art::Assns....................... | .....0 -Detsim...... | tpcrawdecoder.. | simpleSC............. | std::vector......................................... | .10240 -Detsim...... | tpcrawdecoder.. | daq.................. | std::vector........................................... | .10240 -Detsim...... | rns............ | ..................... | std::vector........................................ | .....0 - -Total products (present, not present): 21 (19, 2). - -Begin processing the 10th record. run: 1 subRun: 0 event: 10 at 02-Dec-2022 17:16:33 CST -PRINCIPAL TYPE: Event -PROCESS NAME | MODULE LABEL... | PRODUCT INSTANCE NAME | DATA PRODUCT TYPE.................................................... | ..SIZE -SinglesGen.. | generator...... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | kr85........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | ar42........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | rn222.......... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | generator...... | ..................... | std::vector................................ | .....1 -SinglesGen.. | ar39........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | cosmicgenerator | ..................... | std::vector........................................... | .....1 -SinglesGen.. | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -G4.......... | rns............ | ..................... | std::vector........................................ | .....2 -G4.......... | IonAndScint.... | ..................... | std::vector................................... | .....? -G4.......... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -G4.......... | largeant....... | ..................... | std::vector........................................ | 608655 -G4.......... | largeant....... | ..................... | std::map >......................................... | .....1 -G4.......... | largeant....... | ..................... | art::Assns | 608655 -G4.......... | IonAndScint.... | priorSCE............. | std::vector................................... | .....? -Detsim...... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -Detsim...... | crt............ | ..................... | std::vector............................................ | .....0 -Detsim...... | crt............ | ..................... | art::Assns....................... | .....0 -Detsim...... | tpcrawdecoder.. | simpleSC............. | std::vector......................................... | .10240 -Detsim...... | tpcrawdecoder.. | daq.................. | std::vector........................................... | .10240 -Detsim...... | rns............ | ..................... | std::vector........................................ | .....0 - -Total products (present, not present): 21 (19, 2). - -PRINCIPAL TYPE: Run -PROCESS NAME | MODULE LABEL............... | PRODUCT INSTANCE NAME | DATA PRODUCT TYPE................. | SIZE -SinglesGen.. | generator.................. | ..................... | sumdata::RunData.................. | ...- -SinglesGen.. | GeometryConfigurationWriter | ..................... | sumdata::GeometryConfigurationInfo | ...- -SinglesGen.. | kr85....................... | ..................... | sumdata::RunData.................. | ...- -SinglesGen.. | ar42....................... | ..................... | sumdata::RunData.................. | ...- -SinglesGen.. | ar39....................... | ..................... | sumdata::RunData.................. | ...- -SinglesGen.. | cosmicgenerator............ | ..................... | sumdata::RunData.................. | ...- -SinglesGen.. | rn222...................... | ..................... | sumdata::RunData.................. | ...- -G4.......... | GeometryConfigurationWriter | ..................... | sumdata::GeometryConfigurationInfo | ...- -Detsim...... | GeometryConfigurationWriter | ..................... | sumdata::GeometryConfigurationInfo | ...- - -Total products (present, not present): 9 (9, 0). - -02-Dec-2022 17:16:49 CST Closed input file "xroot://eospublic.cern.ch//eos/experiment/neutplatform/protodune/dune/dc4-hd-protodune/4d/37/dc4_np04hd_507091350_bf2fe11d-a96c-4d7f-9f9a-996da762514b-gen_protodunehd_1GeV_56895272_0_g4_detsim_a.root" -02-Dec-2022 17:16:49 CST Initiating request to open input file "xroot://eospublic.cern.ch//eos/experiment/neutplatform/protodune/dune/dc4-hd-protodune/7a/ec/dc4_np04hd_507091050_08b230ae-be8a-40a5-95e4-e3f67620bb77-gen_protodunehd_1GeV_56895181_0_g4_detsim_c.root" -02-Dec-2022 17:16:51 CST Opened input file "xroot://eospublic.cern.ch//eos/experiment/neutplatform/protodune/dune/dc4-hd-protodune/7a/ec/dc4_np04hd_507091050_08b230ae-be8a-40a5-95e4-e3f67620bb77-gen_protodunehd_1GeV_56895181_0_g4_detsim_c.root" -Begin processing the 11th record. run: 1 subRun: 0 event: 1 at 02-Dec-2022 17:16:58 CST -PRINCIPAL TYPE: Event -PROCESS NAME | MODULE LABEL... | PRODUCT INSTANCE NAME | DATA PRODUCT TYPE.................................................... | ..SIZE -SinglesGen.. | generator...... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | kr85........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | ar42........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | rn222.......... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | generator...... | ..................... | std::vector................................ | .....1 -SinglesGen.. | ar39........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | cosmicgenerator | ..................... | std::vector........................................... | .....1 -SinglesGen.. | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -G4.......... | rns............ | ..................... | std::vector........................................ | .....2 -G4.......... | IonAndScint.... | ..................... | std::vector................................... | .....? -G4.......... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -G4.......... | largeant....... | ..................... | std::vector........................................ | 674111 -G4.......... | largeant....... | ..................... | std::map >......................................... | .....1 -G4.......... | largeant....... | ..................... | art::Assns | 674111 -G4.......... | IonAndScint.... | priorSCE............. | std::vector................................... | .....? -Detsim...... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -Detsim...... | crt............ | ..................... | std::vector............................................ | .....0 -Detsim...... | crt............ | ..................... | art::Assns....................... | .....0 -Detsim...... | tpcrawdecoder.. | simpleSC............. | std::vector......................................... | .10240 -Detsim...... | tpcrawdecoder.. | daq.................. | std::vector........................................... | .10240 -Detsim...... | rns............ | ..................... | std::vector........................................ | .....0 - -Total products (present, not present): 21 (19, 2). - -Begin processing the 12th record. run: 1 subRun: 0 event: 2 at 02-Dec-2022 17:17:55 CST -PRINCIPAL TYPE: Event -PROCESS NAME | MODULE LABEL... | PRODUCT INSTANCE NAME | DATA PRODUCT TYPE.................................................... | ..SIZE -SinglesGen.. | generator...... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | kr85........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | ar42........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | rn222.......... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | generator...... | ..................... | std::vector................................ | .....1 -SinglesGen.. | ar39........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | cosmicgenerator | ..................... | std::vector........................................... | .....1 -SinglesGen.. | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -G4.......... | rns............ | ..................... | std::vector........................................ | .....2 -G4.......... | IonAndScint.... | ..................... | std::vector................................... | .....? -G4.......... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -G4.......... | largeant....... | ..................... | std::vector........................................ | 694266 -G4.......... | largeant....... | ..................... | std::map >......................................... | .....1 -G4.......... | largeant....... | ..................... | art::Assns | 694266 -G4.......... | IonAndScint.... | priorSCE............. | std::vector................................... | .....? -Detsim...... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -Detsim...... | crt............ | ..................... | std::vector............................................ | .....0 -Detsim...... | crt............ | ..................... | art::Assns....................... | .....0 -Detsim...... | tpcrawdecoder.. | simpleSC............. | std::vector......................................... | .10240 -Detsim...... | tpcrawdecoder.. | daq.................. | std::vector........................................... | .10240 -Detsim...... | rns............ | ..................... | std::vector........................................ | .....0 - -Total products (present, not present): 21 (19, 2). - -Begin processing the 13th record. run: 1 subRun: 0 event: 3 at 02-Dec-2022 17:18:10 CST -PRINCIPAL TYPE: Event -PROCESS NAME | MODULE LABEL... | PRODUCT INSTANCE NAME | DATA PRODUCT TYPE.................................................... | ..SIZE -SinglesGen.. | generator...... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | kr85........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | ar42........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | rn222.......... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | generator...... | ..................... | std::vector................................ | .....1 -SinglesGen.. | ar39........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | cosmicgenerator | ..................... | std::vector........................................... | .....1 -SinglesGen.. | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -G4.......... | rns............ | ..................... | std::vector........................................ | .....2 -G4.......... | IonAndScint.... | ..................... | std::vector................................... | .....? -G4.......... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -G4.......... | largeant....... | ..................... | std::vector........................................ | 726125 -G4.......... | largeant....... | ..................... | std::map >......................................... | .....1 -G4.......... | largeant....... | ..................... | art::Assns | 726125 -G4.......... | IonAndScint.... | priorSCE............. | std::vector................................... | .....? -Detsim...... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -Detsim...... | crt............ | ..................... | std::vector............................................ | .....0 -Detsim...... | crt............ | ..................... | art::Assns....................... | .....0 -Detsim...... | tpcrawdecoder.. | simpleSC............. | std::vector......................................... | .10240 -Detsim...... | tpcrawdecoder.. | daq.................. | std::vector........................................... | .10240 -Detsim...... | rns............ | ..................... | std::vector........................................ | .....0 - -Total products (present, not present): 21 (19, 2). - -Begin processing the 14th record. run: 1 subRun: 0 event: 4 at 02-Dec-2022 17:18:26 CST -PRINCIPAL TYPE: Event -PROCESS NAME | MODULE LABEL... | PRODUCT INSTANCE NAME | DATA PRODUCT TYPE.................................................... | ..SIZE -SinglesGen.. | generator...... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | kr85........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | ar42........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | rn222.......... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | generator...... | ..................... | std::vector................................ | .....1 -SinglesGen.. | ar39........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | cosmicgenerator | ..................... | std::vector........................................... | .....1 -SinglesGen.. | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -G4.......... | rns............ | ..................... | std::vector........................................ | .....2 -G4.......... | IonAndScint.... | ..................... | std::vector................................... | .....? -G4.......... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -G4.......... | largeant....... | ..................... | std::vector........................................ | 648034 -G4.......... | largeant....... | ..................... | std::map >......................................... | .....1 -G4.......... | largeant....... | ..................... | art::Assns | 648034 -G4.......... | IonAndScint.... | priorSCE............. | std::vector................................... | .....? -Detsim...... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -Detsim...... | crt............ | ..................... | std::vector............................................ | .....0 -Detsim...... | crt............ | ..................... | art::Assns....................... | .....0 -Detsim...... | tpcrawdecoder.. | simpleSC............. | std::vector......................................... | .10240 -Detsim...... | tpcrawdecoder.. | daq.................. | std::vector........................................... | .10240 -Detsim...... | rns............ | ..................... | std::vector........................................ | .....0 - -Total products (present, not present): 21 (19, 2). - -Begin processing the 15th record. run: 1 subRun: 0 event: 5 at 02-Dec-2022 17:18:40 CST -PRINCIPAL TYPE: Event -PROCESS NAME | MODULE LABEL... | PRODUCT INSTANCE NAME | DATA PRODUCT TYPE.................................................... | ..SIZE -SinglesGen.. | generator...... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | kr85........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | ar42........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | rn222.......... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | generator...... | ..................... | std::vector................................ | .....1 -SinglesGen.. | ar39........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | cosmicgenerator | ..................... | std::vector........................................... | .....1 -SinglesGen.. | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -G4.......... | rns............ | ..................... | std::vector........................................ | .....2 -G4.......... | IonAndScint.... | ..................... | std::vector................................... | .....? -G4.......... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -G4.......... | largeant....... | ..................... | std::vector........................................ | 670689 -G4.......... | largeant....... | ..................... | std::map >......................................... | .....1 -G4.......... | largeant....... | ..................... | art::Assns | 670689 -G4.......... | IonAndScint.... | priorSCE............. | std::vector................................... | .....? -Detsim...... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -Detsim...... | crt............ | ..................... | std::vector............................................ | .....0 -Detsim...... | crt............ | ..................... | art::Assns....................... | .....0 -Detsim...... | tpcrawdecoder.. | simpleSC............. | std::vector......................................... | .10240 -Detsim...... | tpcrawdecoder.. | daq.................. | std::vector........................................... | .10240 -Detsim...... | rns............ | ..................... | std::vector........................................ | .....0 - -Total products (present, not present): 21 (19, 2). - -Begin processing the 16th record. run: 1 subRun: 0 event: 6 at 02-Dec-2022 17:18:55 CST -PRINCIPAL TYPE: Event -PROCESS NAME | MODULE LABEL... | PRODUCT INSTANCE NAME | DATA PRODUCT TYPE.................................................... | ..SIZE -SinglesGen.. | generator...... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | kr85........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | ar42........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | rn222.......... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | generator...... | ..................... | std::vector................................ | .....1 -SinglesGen.. | ar39........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | cosmicgenerator | ..................... | std::vector........................................... | .....1 -SinglesGen.. | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -G4.......... | rns............ | ..................... | std::vector........................................ | .....2 -G4.......... | IonAndScint.... | ..................... | std::vector................................... | .....? -G4.......... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -G4.......... | largeant....... | ..................... | std::vector........................................ | 517823 -G4.......... | largeant....... | ..................... | std::map >......................................... | .....1 -G4.......... | largeant....... | ..................... | art::Assns | 517823 -G4.......... | IonAndScint.... | priorSCE............. | std::vector................................... | .....? -Detsim...... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -Detsim...... | crt............ | ..................... | std::vector............................................ | .....0 -Detsim...... | crt............ | ..................... | art::Assns....................... | .....0 -Detsim...... | tpcrawdecoder.. | simpleSC............. | std::vector......................................... | .10240 -Detsim...... | tpcrawdecoder.. | daq.................. | std::vector........................................... | .10240 -Detsim...... | rns............ | ..................... | std::vector........................................ | .....0 - -Total products (present, not present): 21 (19, 2). - -Begin processing the 17th record. run: 1 subRun: 0 event: 7 at 02-Dec-2022 17:19:08 CST -PRINCIPAL TYPE: Event -PROCESS NAME | MODULE LABEL... | PRODUCT INSTANCE NAME | DATA PRODUCT TYPE.................................................... | ..SIZE -SinglesGen.. | generator...... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | kr85........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | ar42........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | rn222.......... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | generator...... | ..................... | std::vector................................ | .....1 -SinglesGen.. | ar39........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | cosmicgenerator | ..................... | std::vector........................................... | .....1 -SinglesGen.. | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -G4.......... | rns............ | ..................... | std::vector........................................ | .....2 -G4.......... | IonAndScint.... | ..................... | std::vector................................... | .....? -G4.......... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -G4.......... | largeant....... | ..................... | std::vector........................................ | 821401 -G4.......... | largeant....... | ..................... | std::map >......................................... | .....1 -G4.......... | largeant....... | ..................... | art::Assns | 821401 -G4.......... | IonAndScint.... | priorSCE............. | std::vector................................... | .....? -Detsim...... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -Detsim...... | crt............ | ..................... | std::vector............................................ | .....0 -Detsim...... | crt............ | ..................... | art::Assns....................... | .....0 -Detsim...... | tpcrawdecoder.. | simpleSC............. | std::vector......................................... | .10240 -Detsim...... | tpcrawdecoder.. | daq.................. | std::vector........................................... | .10240 -Detsim...... | rns............ | ..................... | std::vector........................................ | .....0 - -Total products (present, not present): 21 (19, 2). - -Begin processing the 18th record. run: 1 subRun: 0 event: 8 at 02-Dec-2022 17:19:26 CST -PRINCIPAL TYPE: Event -PROCESS NAME | MODULE LABEL... | PRODUCT INSTANCE NAME | DATA PRODUCT TYPE.................................................... | ..SIZE -SinglesGen.. | generator...... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | kr85........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | ar42........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | rn222.......... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | generator...... | ..................... | std::vector................................ | .....1 -SinglesGen.. | ar39........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | cosmicgenerator | ..................... | std::vector........................................... | .....1 -SinglesGen.. | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -G4.......... | rns............ | ..................... | std::vector........................................ | .....2 -G4.......... | IonAndScint.... | ..................... | std::vector................................... | .....? -G4.......... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -G4.......... | largeant....... | ..................... | std::vector........................................ | 641515 -G4.......... | largeant....... | ..................... | std::map >......................................... | .....1 -G4.......... | largeant....... | ..................... | art::Assns | 641515 -G4.......... | IonAndScint.... | priorSCE............. | std::vector................................... | .....? -Detsim...... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -Detsim...... | crt............ | ..................... | std::vector............................................ | .....0 -Detsim...... | crt............ | ..................... | art::Assns....................... | .....0 -Detsim...... | tpcrawdecoder.. | simpleSC............. | std::vector......................................... | .10240 -Detsim...... | tpcrawdecoder.. | daq.................. | std::vector........................................... | .10240 -Detsim...... | rns............ | ..................... | std::vector........................................ | .....0 - -Total products (present, not present): 21 (19, 2). - -Begin processing the 19th record. run: 1 subRun: 0 event: 9 at 02-Dec-2022 17:19:40 CST -PRINCIPAL TYPE: Event -PROCESS NAME | MODULE LABEL... | PRODUCT INSTANCE NAME | DATA PRODUCT TYPE.................................................... | ..SIZE -SinglesGen.. | generator...... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | kr85........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | ar42........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | rn222.......... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | generator...... | ..................... | std::vector................................ | .....1 -SinglesGen.. | ar39........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | cosmicgenerator | ..................... | std::vector........................................... | .....1 -SinglesGen.. | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -G4.......... | rns............ | ..................... | std::vector........................................ | .....2 -G4.......... | IonAndScint.... | ..................... | std::vector................................... | .....? -G4.......... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -G4.......... | largeant....... | ..................... | std::vector........................................ | 685039 -G4.......... | largeant....... | ..................... | std::map >......................................... | .....1 -G4.......... | largeant....... | ..................... | art::Assns | 685039 -G4.......... | IonAndScint.... | priorSCE............. | std::vector................................... | .....? -Detsim...... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -Detsim...... | crt............ | ..................... | std::vector............................................ | .....0 -Detsim...... | crt............ | ..................... | art::Assns....................... | .....0 -Detsim...... | tpcrawdecoder.. | simpleSC............. | std::vector......................................... | .10240 -Detsim...... | tpcrawdecoder.. | daq.................. | std::vector........................................... | .10240 -Detsim...... | rns............ | ..................... | std::vector........................................ | .....0 - -Total products (present, not present): 21 (19, 2). - -Begin processing the 20th record. run: 1 subRun: 0 event: 10 at 02-Dec-2022 17:19:55 CST -PRINCIPAL TYPE: Event -PROCESS NAME | MODULE LABEL... | PRODUCT INSTANCE NAME | DATA PRODUCT TYPE.................................................... | ..SIZE -SinglesGen.. | generator...... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | kr85........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | ar42........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | rn222.......... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | generator...... | ..................... | std::vector................................ | .....1 -SinglesGen.. | ar39........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | cosmicgenerator | ..................... | std::vector........................................... | .....1 -SinglesGen.. | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -G4.......... | rns............ | ..................... | std::vector........................................ | .....2 -G4.......... | IonAndScint.... | ..................... | std::vector................................... | .....? -G4.......... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -G4.......... | largeant....... | ..................... | std::vector........................................ | 766016 -G4.......... | largeant....... | ..................... | std::map >......................................... | .....1 -G4.......... | largeant....... | ..................... | art::Assns | 766016 -G4.......... | IonAndScint.... | priorSCE............. | std::vector................................... | .....? -Detsim...... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -Detsim...... | crt............ | ..................... | std::vector............................................ | .....0 -Detsim...... | crt............ | ..................... | art::Assns....................... | .....0 -Detsim...... | tpcrawdecoder.. | simpleSC............. | std::vector......................................... | .10240 -Detsim...... | tpcrawdecoder.. | daq.................. | std::vector........................................... | .10240 -Detsim...... | rns............ | ..................... | std::vector........................................ | .....0 - -Total products (present, not present): 21 (19, 2). - -PRINCIPAL TYPE: Run -PROCESS NAME | MODULE LABEL............... | PRODUCT INSTANCE NAME | DATA PRODUCT TYPE................. | SIZE -SinglesGen.. | generator.................. | ..................... | sumdata::RunData.................. | ...- -SinglesGen.. | GeometryConfigurationWriter | ..................... | sumdata::GeometryConfigurationInfo | ...- -SinglesGen.. | kr85....................... | ..................... | sumdata::RunData.................. | ...- -SinglesGen.. | ar42....................... | ..................... | sumdata::RunData.................. | ...- -SinglesGen.. | ar39....................... | ..................... | sumdata::RunData.................. | ...- -SinglesGen.. | cosmicgenerator............ | ..................... | sumdata::RunData.................. | ...- -SinglesGen.. | rn222...................... | ..................... | sumdata::RunData.................. | ...- -G4.......... | GeometryConfigurationWriter | ..................... | sumdata::GeometryConfigurationInfo | ...- -Detsim...... | GeometryConfigurationWriter | ..................... | sumdata::GeometryConfigurationInfo | ...- - -Total products (present, not present): 9 (9, 0). - -02-Dec-2022 17:20:12 CST Closed input file "xroot://eospublic.cern.ch//eos/experiment/neutplatform/protodune/dune/dc4-hd-protodune/7a/ec/dc4_np04hd_507091050_08b230ae-be8a-40a5-95e4-e3f67620bb77-gen_protodunehd_1GeV_56895181_0_g4_detsim_c.root" -02-Dec-2022 17:20:12 CST Initiating request to open input file "xroot://eospublic.cern.ch//eos/experiment/neutplatform/protodune/dune/dc4-hd-protodune/db/04/dc4_np04hd_507090250_90d65b7c-be9f-4140-87fc-38747801eb35-gen_protodunehd_1GeV_56895201_0_g4_detsim_a.root" -02-Dec-2022 17:20:13 CST Opened input file "xroot://eospublic.cern.ch//eos/experiment/neutplatform/protodune/dune/dc4-hd-protodune/db/04/dc4_np04hd_507090250_90d65b7c-be9f-4140-87fc-38747801eb35-gen_protodunehd_1GeV_56895201_0_g4_detsim_a.root" -Begin processing the 21st record. run: 1 subRun: 0 event: 1 at 02-Dec-2022 17:20:21 CST -PRINCIPAL TYPE: Event -PROCESS NAME | MODULE LABEL... | PRODUCT INSTANCE NAME | DATA PRODUCT TYPE.................................................... | ..SIZE -SinglesGen.. | generator...... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | kr85........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | ar42........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | rn222.......... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | generator...... | ..................... | std::vector................................ | .....1 -SinglesGen.. | ar39........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | cosmicgenerator | ..................... | std::vector........................................... | .....1 -SinglesGen.. | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -G4.......... | rns............ | ..................... | std::vector........................................ | .....2 -G4.......... | IonAndScint.... | ..................... | std::vector................................... | .....? -G4.......... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -G4.......... | largeant....... | ..................... | std::vector........................................ | 697439 -G4.......... | largeant....... | ..................... | std::map >......................................... | .....1 -G4.......... | largeant....... | ..................... | art::Assns | 697439 -G4.......... | IonAndScint.... | priorSCE............. | std::vector................................... | .....? -Detsim...... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -Detsim...... | crt............ | ..................... | std::vector............................................ | .....0 -Detsim...... | crt............ | ..................... | art::Assns....................... | .....0 -Detsim...... | tpcrawdecoder.. | simpleSC............. | std::vector......................................... | .10240 -Detsim...... | tpcrawdecoder.. | daq.................. | std::vector........................................... | .10240 -Detsim...... | rns............ | ..................... | std::vector........................................ | .....0 - -Total products (present, not present): 21 (19, 2). - -Begin processing the 22nd record. run: 1 subRun: 0 event: 2 at 02-Dec-2022 17:21:19 CST -PRINCIPAL TYPE: Event -PROCESS NAME | MODULE LABEL... | PRODUCT INSTANCE NAME | DATA PRODUCT TYPE.................................................... | ..SIZE -SinglesGen.. | generator...... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | kr85........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | ar42........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | rn222.......... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | generator...... | ..................... | std::vector................................ | .....1 -SinglesGen.. | ar39........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | cosmicgenerator | ..................... | std::vector........................................... | .....1 -SinglesGen.. | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -G4.......... | rns............ | ..................... | std::vector........................................ | .....2 -G4.......... | IonAndScint.... | ..................... | std::vector................................... | .....? -G4.......... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -G4.......... | largeant....... | ..................... | std::vector........................................ | 762927 -G4.......... | largeant....... | ..................... | std::map >......................................... | .....1 -G4.......... | largeant....... | ..................... | art::Assns | 762927 -G4.......... | IonAndScint.... | priorSCE............. | std::vector................................... | .....? -Detsim...... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -Detsim...... | crt............ | ..................... | std::vector............................................ | .....0 -Detsim...... | crt............ | ..................... | art::Assns....................... | .....0 -Detsim...... | tpcrawdecoder.. | simpleSC............. | std::vector......................................... | .10240 -Detsim...... | tpcrawdecoder.. | daq.................. | std::vector........................................... | .10240 -Detsim...... | rns............ | ..................... | std::vector........................................ | .....0 - -Total products (present, not present): 21 (19, 2). - -Begin processing the 23rd record. run: 1 subRun: 0 event: 3 at 02-Dec-2022 17:21:37 CST -PRINCIPAL TYPE: Event -PROCESS NAME | MODULE LABEL... | PRODUCT INSTANCE NAME | DATA PRODUCT TYPE.................................................... | ..SIZE -SinglesGen.. | generator...... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | kr85........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | ar42........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | rn222.......... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | generator...... | ..................... | std::vector................................ | .....1 -SinglesGen.. | ar39........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | cosmicgenerator | ..................... | std::vector........................................... | .....1 -SinglesGen.. | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -G4.......... | rns............ | ..................... | std::vector........................................ | .....2 -G4.......... | IonAndScint.... | ..................... | std::vector................................... | .....? -G4.......... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -G4.......... | largeant....... | ..................... | std::vector........................................ | 645289 -G4.......... | largeant....... | ..................... | std::map >......................................... | .....1 -G4.......... | largeant....... | ..................... | art::Assns | 645289 -G4.......... | IonAndScint.... | priorSCE............. | std::vector................................... | .....? -Detsim...... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -Detsim...... | crt............ | ..................... | std::vector............................................ | .....0 -Detsim...... | crt............ | ..................... | art::Assns....................... | .....0 -Detsim...... | tpcrawdecoder.. | simpleSC............. | std::vector......................................... | .10240 -Detsim...... | tpcrawdecoder.. | daq.................. | std::vector........................................... | .10240 -Detsim...... | rns............ | ..................... | std::vector........................................ | .....0 - -Total products (present, not present): 21 (19, 2). - -Begin processing the 24th record. run: 1 subRun: 0 event: 4 at 02-Dec-2022 17:21:52 CST -PRINCIPAL TYPE: Event -PROCESS NAME | MODULE LABEL... | PRODUCT INSTANCE NAME | DATA PRODUCT TYPE.................................................... | ..SIZE -SinglesGen.. | generator...... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | kr85........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | ar42........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | rn222.......... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | generator...... | ..................... | std::vector................................ | .....1 -SinglesGen.. | ar39........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | cosmicgenerator | ..................... | std::vector........................................... | .....1 -SinglesGen.. | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -G4.......... | rns............ | ..................... | std::vector........................................ | .....2 -G4.......... | IonAndScint.... | ..................... | std::vector................................... | .....? -G4.......... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -G4.......... | largeant....... | ..................... | std::vector........................................ | 664759 -G4.......... | largeant....... | ..................... | std::map >......................................... | .....1 -G4.......... | largeant....... | ..................... | art::Assns | 664759 -G4.......... | IonAndScint.... | priorSCE............. | std::vector................................... | .....? -Detsim...... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -Detsim...... | crt............ | ..................... | std::vector............................................ | .....0 -Detsim...... | crt............ | ..................... | art::Assns....................... | .....0 -Detsim...... | tpcrawdecoder.. | simpleSC............. | std::vector......................................... | .10240 -Detsim...... | tpcrawdecoder.. | daq.................. | std::vector........................................... | .10240 -Detsim...... | rns............ | ..................... | std::vector........................................ | .....0 - -Total products (present, not present): 21 (19, 2). - -Begin processing the 25th record. run: 1 subRun: 0 event: 5 at 02-Dec-2022 17:22:09 CST -PRINCIPAL TYPE: Event -PROCESS NAME | MODULE LABEL... | PRODUCT INSTANCE NAME | DATA PRODUCT TYPE.................................................... | ..SIZE -SinglesGen.. | generator...... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | kr85........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | ar42........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | rn222.......... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | generator...... | ..................... | std::vector................................ | .....1 -SinglesGen.. | ar39........... | ..................... | std::vector........................................... | .....1 -SinglesGen.. | cosmicgenerator | ..................... | std::vector........................................... | .....1 -SinglesGen.. | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -G4.......... | rns............ | ..................... | std::vector........................................ | .....2 -G4.......... | IonAndScint.... | ..................... | std::vector................................... | .....? -G4.......... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -G4.......... | largeant....... | ..................... | std::vector........................................ | 639526 -G4.......... | largeant....... | ..................... | std::map >......................................... | .....1 -G4.......... | largeant....... | ..................... | art::Assns | 639526 -G4.......... | IonAndScint.... | priorSCE............. | std::vector................................... | .....? -Detsim...... | TriggerResults. | ..................... | art::TriggerResults.................................................. | .....1 -Detsim...... | crt............ | ..................... | std::vector............................................ | .....0 -Detsim...... | crt............ | ..................... | art::Assns....................... | .....0 -Detsim...... | tpcrawdecoder.. | simpleSC............. | std::vector......................................... | .10240 -Detsim...... | tpcrawdecoder.. | daq.................. | std::vector........................................... | .10240 -Detsim...... | rns............ | ..................... | std::vector........................................ | .....0 - -Total products (present, not present): 21 (19, 2). - -PRINCIPAL TYPE: Run -PROCESS NAME | MODULE LABEL............... | PRODUCT INSTANCE NAME | DATA PRODUCT TYPE................. | SIZE -SinglesGen.. | generator.................. | ..................... | sumdata::RunData.................. | ...- -SinglesGen.. | GeometryConfigurationWriter | ..................... | sumdata::GeometryConfigurationInfo | ...- -SinglesGen.. | kr85....................... | ..................... | sumdata::RunData.................. | ...- -SinglesGen.. | ar42....................... | ..................... | sumdata::RunData.................. | ...- -SinglesGen.. | ar39....................... | ..................... | sumdata::RunData.................. | ...- -SinglesGen.. | cosmicgenerator............ | ..................... | sumdata::RunData.................. | ...- -SinglesGen.. | rn222...................... | ..................... | sumdata::RunData.................. | ...- -G4.......... | GeometryConfigurationWriter | ..................... | sumdata::GeometryConfigurationInfo | ...- -Detsim...... | GeometryConfigurationWriter | ..................... | sumdata::GeometryConfigurationInfo | ...- - -Total products (present, not present): 9 (9, 0). - -02-Dec-2022 17:22:25 CST Closed input file "xroot://eospublic.cern.ch//eos/experiment/neutplatform/protodune/dune/dc4-hd-protodune/db/04/dc4_np04hd_507090250_90d65b7c-be9f-4140-87fc-38747801eb35-gen_protodunehd_1GeV_56895201_0_g4_detsim_a.root" - -TrigReport ---------- Event summary ------------- -TrigReport Events total = 25 passed = 25 failed = 0 - -TrigReport ---------- Modules in End-path ---------- -TrigReport Run Success Error Name -TrigReport 25 25 0 out1 - -TimeReport ---------- Time summary [sec] ------- -TimeReport CPU = 150.911095 Real = 557.432832 - -MemReport ---------- Memory summary [base-10 MB] ------ -MemReport VmPeak = 2466.87 VmHWM = 1856.9 - -Art has completed and will exit with status 0. diff --git a/interactive_test.sh b/tests/interactive_test.sh similarity index 100% rename from interactive_test.sh rename to tests/interactive_test.sh diff --git a/other_test.sh b/tests/other_test.sh similarity index 100% rename from other_test.sh rename to tests/other_test.sh diff --git a/rejoin.sh b/tests/rejoin.sh similarity index 100% rename from rejoin.sh rename to tests/rejoin.sh diff --git a/submit_many.sh b/tests/submit_many.sh similarity index 100% rename from submit_many.sh rename to tests/submit_many.sh diff --git a/submit_test.sh b/tests/submit_test.sh similarity index 100% rename from submit_test.sh rename to tests/submit_test.sh diff --git a/wfs_test.sh b/wfs_test.sh deleted file mode 100644 index e69de29..0000000 From 6ef837fb9ac0475b3289d59649698a8037e5c3ae Mon Sep 17 00:00:00 2001 From: Heidi Schellman Date: Sun, 18 Dec 2022 19:15:09 -0800 Subject: [PATCH 034/166] cleanup and documentation --- batch/ddconfig.cfg | 13 +- copyme.sh | 7 + doxygen.cfg | 2639 +++++++++++++++++++++++++++++++ python/LArWrapper.py | 70 +- python/Loginator.py | 75 +- python/ddtest.py | 8 +- get_rse.py => python/get_rse.py | 0 python/run_interactive.py | 73 - python/run_lar.py | 64 +- python/submit_dd_jobs.py | 53 +- setup_hms.sh | 5 +- tarme.sh | 3 +- 12 files changed, 2841 insertions(+), 169 deletions(-) create mode 100755 copyme.sh create mode 100644 doxygen.cfg rename get_rse.py => python/get_rse.py (100%) delete mode 100644 python/run_interactive.py diff --git a/batch/ddconfig.cfg b/batch/ddconfig.cfg index 7c6557c..d19c8b6 100644 --- a/batch/ddconfig.cfg +++ b/batch/ddconfig.cfg @@ -3,7 +3,7 @@ group = dune experiment = dune wrapper = file:///${FIFE_UTILS_DIR}/libexec/fife_wrap #wrapper = /dune/app/users/$USER/new_fife_utils/fife_utils/v3_5_0/NULL/libexec/fife_wrap -output_dir = /pnfs/dune/scratch/users/$USER/dd_mc_test/ +output_dir = /pnfs/dune/scratch/users/$USER/ddtest/ extra_dir = ./ input_namespace = pdsp_det_reco fcl = eventdump.fcl @@ -19,7 +19,7 @@ mc_tar=$USER_8_15_22_mc [executable] #name = ./top_script.sh -name = \\\${CONDOR_DIR_INPUT}/top_script.sh +name = \\\${CONDOR_DIR_INPUT}/loginator/batch/top_script.sh arg_1 = --namespace arg_2 = %(input_namespace)s arg_3 = --fcl @@ -71,11 +71,12 @@ METACAT_SERVER_URL=https://metacat.fnal.gov:9443/dune_meta_demo/app [submit] G = %(group)s OS = SL7 -memory = 1999MB +memory = 2047MB +disk = 10GB expected-lifetime = 4h N = 1 +#resource-provides = usage_model=OFFSITE,OPPORTUNISTIC resource-provides = usage_model=OFFSITE,OPPORTUNISTIC,DEDICATED -#resource-provides = usage_model=OPPORTUNISTIC,DEDICATED c = "has_avx==True" lines_1 = '+FERMIHTC_AutoRelease=True' lines_2 = '+FERMIHTC_GraceMemory=2048' @@ -115,8 +116,8 @@ source_1 = /cvmfs/dune.opensciencegrid.org/products/dune/setup_dune.sh #source_2 = ${CONDOR_DIR_INPUT}/%(mc_tar)s/canned_client_setup.sh #source_3 = ${CONDOR_DIR_INPUT}/%(dd_tar)s/canned_client_setup.sh #setup = dunesw v09_55_01d00 -q e20:prof -prescript_0 = echo lsing -prescript_1 = ls ${CONDOR_DIR_INPUT} +prescript_0 = echo "Starting" +prescript_1 = ls -lrtR ${CONDOR_DIR_INPUT}/loginator/* ; echo "that was tarball" #prescript_1 = ls ${CONDOR_DIR_INPUT}/$USER_8*/ #prescript_4 = echo $PATH #prescript_5 = echo $PYTHONPATH diff --git a/copyme.sh b/copyme.sh new file mode 100755 index 0000000..95cac49 --- /dev/null +++ b/copyme.sh @@ -0,0 +1,7 @@ +export DWORK=dunegpvm03.fnal.gov:/dune/data/users/schellma/DataChallengeWork-loginator/ +scp *.sh $DWORK +scp tests/*.sh $DWORK/tests/. +scp batch/* $DWORK/batch/. +scp python/*.py $DWORK/python/. +scp fcl/*.fcl $DWORK/fcl/. + diff --git a/doxygen.cfg b/doxygen.cfg new file mode 100644 index 0000000..10ccaef --- /dev/null +++ b/doxygen.cfg @@ -0,0 +1,2639 @@ +# Doxyfile 1.9.2 + +# This file describes the settings to be used by the documentation system +# doxygen (www.doxygen.org) for a project. +# +# All text after a double hash (##) is considered a comment and is placed in +# front of the TAG it is preceding. +# +# All text after a single hash (#) is considered a comment and will be ignored. +# The format is: +# TAG = value [value, ...] +# For lists, items can also be appended using: +# TAG += value [value, ...] +# Values that contain spaces should be placed between quotes (\" \"). + +#--------------------------------------------------------------------------- +# Project related configuration options +#--------------------------------------------------------------------------- + +# This tag specifies the encoding used for all characters in the configuration +# file that follow. The default is UTF-8 which is also the encoding used for all +# text before the first occurrence of this tag. Doxygen uses libiconv (or the +# iconv built into libc) for the transcoding. See +# https://www.gnu.org/software/libiconv/ for the list of possible encodings. +# The default value is: UTF-8. + +DOXYFILE_ENCODING = UTF-8 + +# The PROJECT_NAME tag is a single word (or a sequence of words surrounded by +# double-quotes, unless you are using Doxywizard) that should identify the +# project for which the documentation is generated. This name is used in the +# title of most generated pages and in a few other places. +# The default value is: My Project. + +PROJECT_NAME = "data delivery logging" + +# The PROJECT_NUMBER tag can be used to enter a project or revision number. This +# could be handy for archiving the generated documentation or if some version +# control system is used. + +PROJECT_NUMBER = + +# Using the PROJECT_BRIEF tag one can provide an optional one line description +# for a project that appears at the top of each page and should give viewer a +# quick idea about the purpose of the project. Keep the description short. + +PROJECT_BRIEF = + +# With the PROJECT_LOGO tag one can specify a logo or an icon that is included +# in the documentation. The maximum height of the logo should not exceed 55 +# pixels and the maximum width should not exceed 200 pixels. Doxygen will copy +# the logo to the output directory. + +PROJECT_LOGO = + +# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) path +# into which the generated documentation will be written. If a relative path is +# entered, it will be relative to the location where doxygen was started. If +# left blank the current directory will be used. + +OUTPUT_DIRECTORY = docs + +# If the CREATE_SUBDIRS tag is set to YES then doxygen will create 4096 sub- +# directories (in 2 levels) under the output directory of each output format and +# will distribute the generated files over these directories. Enabling this +# option can be useful when feeding doxygen a huge amount of source files, where +# putting all generated files in the same directory would otherwise causes +# performance problems for the file system. +# The default value is: NO. + +CREATE_SUBDIRS = YES + +# If the ALLOW_UNICODE_NAMES tag is set to YES, doxygen will allow non-ASCII +# characters to appear in the names of generated files. If set to NO, non-ASCII +# characters will be escaped, for example _xE3_x81_x84 will be used for Unicode +# U+3044. +# The default value is: NO. + +ALLOW_UNICODE_NAMES = NO + +# The OUTPUT_LANGUAGE tag is used to specify the language in which all +# documentation generated by doxygen is written. Doxygen will use this +# information to generate all constant output in the proper language. +# Possible values are: Afrikaans, Arabic, Armenian, Brazilian, Catalan, Chinese, +# Chinese-Traditional, Croatian, Czech, Danish, Dutch, English (United States), +# Esperanto, Farsi (Persian), Finnish, French, German, Greek, Hungarian, +# Indonesian, Italian, Japanese, Japanese-en (Japanese with English messages), +# Korean, Korean-en (Korean with English messages), Latvian, Lithuanian, +# Macedonian, Norwegian, Persian (Farsi), Polish, Portuguese, Romanian, Russian, +# Serbian, Serbian-Cyrillic, Slovak, Slovene, Spanish, Swedish, Turkish, +# Ukrainian and Vietnamese. +# The default value is: English. + +OUTPUT_LANGUAGE = English + +# If the BRIEF_MEMBER_DESC tag is set to YES, doxygen will include brief member +# descriptions after the members that are listed in the file and class +# documentation (similar to Javadoc). Set to NO to disable this. +# The default value is: YES. + +BRIEF_MEMBER_DESC = YES + +# If the REPEAT_BRIEF tag is set to YES, doxygen will prepend the brief +# description of a member or function before the detailed description +# +# Note: If both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the +# brief descriptions will be completely suppressed. +# The default value is: YES. + +REPEAT_BRIEF = YES + +# This tag implements a quasi-intelligent brief description abbreviator that is +# used to form the text in various listings. Each string in this list, if found +# as the leading text of the brief description, will be stripped from the text +# and the result, after processing the whole list, is used as the annotated +# text. Otherwise, the brief description is used as-is. If left blank, the +# following values are used ($name is automatically replaced with the name of +# the entity):The $name class, The $name widget, The $name file, is, provides, +# specifies, contains, represents, a, an and the. + +ABBREVIATE_BRIEF = "The $name class" \ + "The $name widget" \ + "The $name file" \ + is \ + provides \ + specifies \ + contains \ + represents \ + a \ + an \ + the + +# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then +# doxygen will generate a detailed section even if there is only a brief +# description. +# The default value is: NO. + +ALWAYS_DETAILED_SEC = NO + +# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all +# inherited members of a class in the documentation of that class as if those +# members were ordinary class members. Constructors, destructors and assignment +# operators of the base classes will not be shown. +# The default value is: NO. + +INLINE_INHERITED_MEMB = NO + +# If the FULL_PATH_NAMES tag is set to YES, doxygen will prepend the full path +# before files name in the file list and in the header files. If set to NO the +# shortest path that makes the file name unique will be used +# The default value is: YES. + +FULL_PATH_NAMES = YES + +# The STRIP_FROM_PATH tag can be used to strip a user-defined part of the path. +# Stripping is only done if one of the specified strings matches the left-hand +# part of the path. The tag can be used to show relative paths in the file list. +# If left blank the directory from which doxygen is run is used as the path to +# strip. +# +# Note that you can specify absolute paths here, but also relative paths, which +# will be relative from the directory where doxygen is started. +# This tag requires that the tag FULL_PATH_NAMES is set to YES. + +STRIP_FROM_PATH = + +# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of the +# path mentioned in the documentation of a class, which tells the reader which +# header file to include in order to use a class. If left blank only the name of +# the header file containing the class definition is used. Otherwise one should +# specify the list of include paths that are normally passed to the compiler +# using the -I flag. + +STRIP_FROM_INC_PATH = + +# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter (but +# less readable) file names. This can be useful is your file systems doesn't +# support long names like on DOS, Mac, or CD-ROM. +# The default value is: NO. + +SHORT_NAMES = NO + +# If the JAVADOC_AUTOBRIEF tag is set to YES then doxygen will interpret the +# first line (until the first dot) of a Javadoc-style comment as the brief +# description. If set to NO, the Javadoc-style will behave just like regular Qt- +# style comments (thus requiring an explicit @brief command for a brief +# description.) +# The default value is: NO. + +JAVADOC_AUTOBRIEF = NO + +# If the JAVADOC_BANNER tag is set to YES then doxygen will interpret a line +# such as +# /*************** +# as being the beginning of a Javadoc-style comment "banner". If set to NO, the +# Javadoc-style will behave just like regular comments and it will not be +# interpreted by doxygen. +# The default value is: NO. + +JAVADOC_BANNER = NO + +# If the QT_AUTOBRIEF tag is set to YES then doxygen will interpret the first +# line (until the first dot) of a Qt-style comment as the brief description. If +# set to NO, the Qt-style will behave just like regular Qt-style comments (thus +# requiring an explicit \brief command for a brief description.) +# The default value is: NO. + +QT_AUTOBRIEF = NO + +# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make doxygen treat a +# multi-line C++ special comment block (i.e. a block of //! or /// comments) as +# a brief description. This used to be the default behavior. The new default is +# to treat a multi-line C++ comment block as a detailed description. Set this +# tag to YES if you prefer the old behavior instead. +# +# Note that setting this tag to YES also means that rational rose comments are +# not recognized any more. +# The default value is: NO. + +MULTILINE_CPP_IS_BRIEF = NO + +# By default Python docstrings are displayed as preformatted text and doxygen's +# special commands cannot be used. By setting PYTHON_DOCSTRING to NO the +# doxygen's special commands can be used and the contents of the docstring +# documentation blocks is shown as doxygen documentation. +# The default value is: YES. + +PYTHON_DOCSTRING = YES + +# If the INHERIT_DOCS tag is set to YES then an undocumented member inherits the +# documentation from any documented member that it re-implements. +# The default value is: YES. + +INHERIT_DOCS = YES + +# If the SEPARATE_MEMBER_PAGES tag is set to YES then doxygen will produce a new +# page for each member. If set to NO, the documentation of a member will be part +# of the file/class/namespace that contains it. +# The default value is: NO. + +SEPARATE_MEMBER_PAGES = NO + +# The TAB_SIZE tag can be used to set the number of spaces in a tab. Doxygen +# uses this value to replace tabs by spaces in code fragments. +# Minimum value: 1, maximum value: 16, default value: 4. + +TAB_SIZE = 4 + +# This tag can be used to specify a number of aliases that act as commands in +# the documentation. An alias has the form: +# name=value +# For example adding +# "sideeffect=@par Side Effects:^^" +# will allow you to put the command \sideeffect (or @sideeffect) in the +# documentation, which will result in a user-defined paragraph with heading +# "Side Effects:". Note that you cannot put \n's in the value part of an alias +# to insert newlines (in the resulting output). You can put ^^ in the value part +# of an alias to insert a newline as if a physical newline was in the original +# file. When you need a literal { or } or , in the value part of an alias you +# have to escape them by means of a backslash (\), this can lead to conflicts +# with the commands \{ and \} for these it is advised to use the version @{ and +# @} or use a double escape (\\{ and \\}) + +ALIASES = + +# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources +# only. Doxygen will then generate output that is more tailored for C. For +# instance, some of the names that are used will be different. The list of all +# members will be omitted, etc. +# The default value is: NO. + +OPTIMIZE_OUTPUT_FOR_C = NO + +# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java or +# Python sources only. Doxygen will then generate output that is more tailored +# for that language. For instance, namespaces will be presented as packages, +# qualified scopes will look different, etc. +# The default value is: NO. + +OPTIMIZE_OUTPUT_JAVA = NO + +# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran +# sources. Doxygen will then generate output that is tailored for Fortran. +# The default value is: NO. + +OPTIMIZE_FOR_FORTRAN = NO + +# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL +# sources. Doxygen will then generate output that is tailored for VHDL. +# The default value is: NO. + +OPTIMIZE_OUTPUT_VHDL = NO + +# Set the OPTIMIZE_OUTPUT_SLICE tag to YES if your project consists of Slice +# sources only. Doxygen will then generate output that is more tailored for that +# language. For instance, namespaces will be presented as modules, types will be +# separated into more groups, etc. +# The default value is: NO. + +OPTIMIZE_OUTPUT_SLICE = NO + +# Doxygen selects the parser to use depending on the extension of the files it +# parses. With this tag you can assign which parser to use for a given +# extension. Doxygen has a built-in mapping, but you can override or extend it +# using this tag. The format is ext=language, where ext is a file extension, and +# language is one of the parsers supported by doxygen: IDL, Java, JavaScript, +# Csharp (C#), C, C++, Lex, D, PHP, md (Markdown), Objective-C, Python, Slice, +# VHDL, Fortran (fixed format Fortran: FortranFixed, free formatted Fortran: +# FortranFree, unknown formatted Fortran: Fortran. In the later case the parser +# tries to guess whether the code is fixed or free formatted code, this is the +# default for Fortran type files). For instance to make doxygen treat .inc files +# as Fortran files (default is PHP), and .f files as C (default is Fortran), +# use: inc=Fortran f=C. +# +# Note: For files without extension you can use no_extension as a placeholder. +# +# Note that for custom extensions you also need to set FILE_PATTERNS otherwise +# the files are not read by doxygen. When specifying no_extension you should add +# * to the FILE_PATTERNS. +# +# Note see also the list of default file extension mappings. + +EXTENSION_MAPPING = + +# If the MARKDOWN_SUPPORT tag is enabled then doxygen pre-processes all comments +# according to the Markdown format, which allows for more readable +# documentation. See https://daringfireball.net/projects/markdown/ for details. +# The output of markdown processing is further processed by doxygen, so you can +# mix doxygen, HTML, and XML commands with Markdown formatting. Disable only in +# case of backward compatibilities issues. +# The default value is: YES. + +MARKDOWN_SUPPORT = YES + +# When the TOC_INCLUDE_HEADINGS tag is set to a non-zero value, all headings up +# to that level are automatically included in the table of contents, even if +# they do not have an id attribute. +# Note: This feature currently applies only to Markdown headings. +# Minimum value: 0, maximum value: 99, default value: 5. +# This tag requires that the tag MARKDOWN_SUPPORT is set to YES. + +TOC_INCLUDE_HEADINGS = 5 + +# When enabled doxygen tries to link words that correspond to documented +# classes, or namespaces to their corresponding documentation. Such a link can +# be prevented in individual cases by putting a % sign in front of the word or +# globally by setting AUTOLINK_SUPPORT to NO. +# The default value is: YES. + +AUTOLINK_SUPPORT = YES + +# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want +# to include (a tag file for) the STL sources as input, then you should set this +# tag to YES in order to let doxygen match functions declarations and +# definitions whose arguments contain STL classes (e.g. func(std::string); +# versus func(std::string) {}). This also make the inheritance and collaboration +# diagrams that involve STL classes more complete and accurate. +# The default value is: NO. + +BUILTIN_STL_SUPPORT = NO + +# If you use Microsoft's C++/CLI language, you should set this option to YES to +# enable parsing support. +# The default value is: NO. + +CPP_CLI_SUPPORT = NO + +# Set the SIP_SUPPORT tag to YES if your project consists of sip (see: +# https://www.riverbankcomputing.com/software/sip/intro) sources only. Doxygen +# will parse them like normal C++ but will assume all classes use public instead +# of private inheritance when no explicit protection keyword is present. +# The default value is: NO. + +SIP_SUPPORT = NO + +# For Microsoft's IDL there are propget and propput attributes to indicate +# getter and setter methods for a property. Setting this option to YES will make +# doxygen to replace the get and set methods by a property in the documentation. +# This will only work if the methods are indeed getting or setting a simple +# type. If this is not the case, or you want to show the methods anyway, you +# should set this option to NO. +# The default value is: YES. + +IDL_PROPERTY_SUPPORT = YES + +# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC +# tag is set to YES then doxygen will reuse the documentation of the first +# member in the group (if any) for the other members of the group. By default +# all members of a group must be documented explicitly. +# The default value is: NO. + +DISTRIBUTE_GROUP_DOC = NO + +# If one adds a struct or class to a group and this option is enabled, then also +# any nested class or struct is added to the same group. By default this option +# is disabled and one has to add nested compounds explicitly via \ingroup. +# The default value is: NO. + +GROUP_NESTED_COMPOUNDS = NO + +# Set the SUBGROUPING tag to YES to allow class member groups of the same type +# (for instance a group of public functions) to be put as a subgroup of that +# type (e.g. under the Public Functions section). Set it to NO to prevent +# subgrouping. Alternatively, this can be done per class using the +# \nosubgrouping command. +# The default value is: YES. + +SUBGROUPING = YES + +# When the INLINE_GROUPED_CLASSES tag is set to YES, classes, structs and unions +# are shown inside the group in which they are included (e.g. using \ingroup) +# instead of on a separate page (for HTML and Man pages) or section (for LaTeX +# and RTF). +# +# Note that this feature does not work in combination with +# SEPARATE_MEMBER_PAGES. +# The default value is: NO. + +INLINE_GROUPED_CLASSES = NO + +# When the INLINE_SIMPLE_STRUCTS tag is set to YES, structs, classes, and unions +# with only public data fields or simple typedef fields will be shown inline in +# the documentation of the scope in which they are defined (i.e. file, +# namespace, or group documentation), provided this scope is documented. If set +# to NO, structs, classes, and unions are shown on a separate page (for HTML and +# Man pages) or section (for LaTeX and RTF). +# The default value is: NO. + +INLINE_SIMPLE_STRUCTS = NO + +# When TYPEDEF_HIDES_STRUCT tag is enabled, a typedef of a struct, union, or +# enum is documented as struct, union, or enum with the name of the typedef. So +# typedef struct TypeS {} TypeT, will appear in the documentation as a struct +# with name TypeT. When disabled the typedef will appear as a member of a file, +# namespace, or class. And the struct will be named TypeS. This can typically be +# useful for C code in case the coding convention dictates that all compound +# types are typedef'ed and only the typedef is referenced, never the tag name. +# The default value is: NO. + +TYPEDEF_HIDES_STRUCT = NO + +# The size of the symbol lookup cache can be set using LOOKUP_CACHE_SIZE. This +# cache is used to resolve symbols given their name and scope. Since this can be +# an expensive process and often the same symbol appears multiple times in the +# code, doxygen keeps a cache of pre-resolved symbols. If the cache is too small +# doxygen will become slower. If the cache is too large, memory is wasted. The +# cache size is given by this formula: 2^(16+LOOKUP_CACHE_SIZE). The valid range +# is 0..9, the default is 0, corresponding to a cache size of 2^16=65536 +# symbols. At the end of a run doxygen will report the cache usage and suggest +# the optimal cache size from a speed point of view. +# Minimum value: 0, maximum value: 9, default value: 0. + +LOOKUP_CACHE_SIZE = 0 + +# The NUM_PROC_THREADS specifies the number threads doxygen is allowed to use +# during processing. When set to 0 doxygen will based this on the number of +# cores available in the system. You can set it explicitly to a value larger +# than 0 to get more control over the balance between CPU load and processing +# speed. At this moment only the input processing can be done using multiple +# threads. Since this is still an experimental feature the default is set to 1, +# which effectively disables parallel processing. Please report any issues you +# encounter. Generating dot graphs in parallel is controlled by the +# DOT_NUM_THREADS setting. +# Minimum value: 0, maximum value: 32, default value: 1. + +NUM_PROC_THREADS = 1 + +#--------------------------------------------------------------------------- +# Build related configuration options +#--------------------------------------------------------------------------- + +# If the EXTRACT_ALL tag is set to YES, doxygen will assume all entities in +# documentation are documented, even if no documentation was available. Private +# class members and static file members will be hidden unless the +# EXTRACT_PRIVATE respectively EXTRACT_STATIC tags are set to YES. +# Note: This will also disable the warnings about undocumented members that are +# normally produced when WARNINGS is set to YES. +# The default value is: NO. + +#HMS +EXTRACT_ALL = YES + +# If the EXTRACT_PRIVATE tag is set to YES, all private members of a class will +# be included in the documentation. +# The default value is: NO. + +EXTRACT_PRIVATE = NO + +# If the EXTRACT_PRIV_VIRTUAL tag is set to YES, documented private virtual +# methods of a class will be included in the documentation. +# The default value is: NO. + +EXTRACT_PRIV_VIRTUAL = NO + +# If the EXTRACT_PACKAGE tag is set to YES, all members with package or internal +# scope will be included in the documentation. +# The default value is: NO. + +EXTRACT_PACKAGE = NO + +# If the EXTRACT_STATIC tag is set to YES, all static members of a file will be +# included in the documentation. +# The default value is: NO. + +EXTRACT_STATIC = NO + +# If the EXTRACT_LOCAL_CLASSES tag is set to YES, classes (and structs) defined +# locally in source files will be included in the documentation. If set to NO, +# only classes defined in header files are included. Does not have any effect +# for Java sources. +# The default value is: YES. + +EXTRACT_LOCAL_CLASSES = YES + +# This flag is only useful for Objective-C code. If set to YES, local methods, +# which are defined in the implementation section but not in the interface are +# included in the documentation. If set to NO, only methods in the interface are +# included. +# The default value is: NO. + +EXTRACT_LOCAL_METHODS = NO + +# If this flag is set to YES, the members of anonymous namespaces will be +# extracted and appear in the documentation as a namespace called +# 'anonymous_namespace{file}', where file will be replaced with the base name of +# the file that contains the anonymous namespace. By default anonymous namespace +# are hidden. +# The default value is: NO. + +EXTRACT_ANON_NSPACES = NO + +# If this flag is set to YES, the name of an unnamed parameter in a declaration +# will be determined by the corresponding definition. By default unnamed +# parameters remain unnamed in the output. +# The default value is: YES. + +RESOLVE_UNNAMED_PARAMS = YES + +# If the HIDE_UNDOC_MEMBERS tag is set to YES, doxygen will hide all +# undocumented members inside documented classes or files. If set to NO these +# members will be included in the various overviews, but no documentation +# section is generated. This option has no effect if EXTRACT_ALL is enabled. +# The default value is: NO. + +HIDE_UNDOC_MEMBERS = NO + +# If the HIDE_UNDOC_CLASSES tag is set to YES, doxygen will hide all +# undocumented classes that are normally visible in the class hierarchy. If set +# to NO, these classes will be included in the various overviews. This option +# has no effect if EXTRACT_ALL is enabled. +# The default value is: NO. + +HIDE_UNDOC_CLASSES = NO + +# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, doxygen will hide all friend +# declarations. If set to NO, these declarations will be included in the +# documentation. +# The default value is: NO. + +HIDE_FRIEND_COMPOUNDS = NO + +# If the HIDE_IN_BODY_DOCS tag is set to YES, doxygen will hide any +# documentation blocks found inside the body of a function. If set to NO, these +# blocks will be appended to the function's detailed documentation block. +# The default value is: NO. + +HIDE_IN_BODY_DOCS = NO + +# The INTERNAL_DOCS tag determines if documentation that is typed after a +# \internal command is included. If the tag is set to NO then the documentation +# will be excluded. Set it to YES to include the internal documentation. +# The default value is: NO. + +INTERNAL_DOCS = NO + +# With the correct setting of option CASE_SENSE_NAMES doxygen will better be +# able to match the capabilities of the underlying filesystem. In case the +# filesystem is case sensitive (i.e. it supports files in the same directory +# whose names only differ in casing), the option must be set to YES to properly +# deal with such files in case they appear in the input. For filesystems that +# are not case sensitive the option should be be set to NO to properly deal with +# output files written for symbols that only differ in casing, such as for two +# classes, one named CLASS and the other named Class, and to also support +# references to files without having to specify the exact matching casing. On +# Windows (including Cygwin) and MacOS, users should typically set this option +# to NO, whereas on Linux or other Unix flavors it should typically be set to +# YES. +# The default value is: system dependent. + +CASE_SENSE_NAMES = NO + +# If the HIDE_SCOPE_NAMES tag is set to NO then doxygen will show members with +# their full class and namespace scopes in the documentation. If set to YES, the +# scope will be hidden. +# The default value is: NO. + +HIDE_SCOPE_NAMES = NO + +# If the HIDE_COMPOUND_REFERENCE tag is set to NO (default) then doxygen will +# append additional text to a page's title, such as Class Reference. If set to +# YES the compound reference will be hidden. +# The default value is: NO. + +HIDE_COMPOUND_REFERENCE= NO + +# If the SHOW_HEADERFILE tag is set to YES then the documentation for a class +# will show which file needs to be included to use the class. +# The default value is: YES. + +SHOW_HEADERFILE = YES + +# If the SHOW_INCLUDE_FILES tag is set to YES then doxygen will put a list of +# the files that are included by a file in the documentation of that file. +# The default value is: YES. + +SHOW_INCLUDE_FILES = YES + +# If the SHOW_GROUPED_MEMB_INC tag is set to YES then Doxygen will add for each +# grouped member an include statement to the documentation, telling the reader +# which file to include in order to use the member. +# The default value is: NO. + +SHOW_GROUPED_MEMB_INC = NO + +# If the FORCE_LOCAL_INCLUDES tag is set to YES then doxygen will list include +# files with double quotes in the documentation rather than with sharp brackets. +# The default value is: NO. + +FORCE_LOCAL_INCLUDES = NO + +# If the INLINE_INFO tag is set to YES then a tag [inline] is inserted in the +# documentation for inline members. +# The default value is: YES. + +INLINE_INFO = YES + +# If the SORT_MEMBER_DOCS tag is set to YES then doxygen will sort the +# (detailed) documentation of file and class members alphabetically by member +# name. If set to NO, the members will appear in declaration order. +# The default value is: YES. + +SORT_MEMBER_DOCS = YES + +# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the brief +# descriptions of file, namespace and class members alphabetically by member +# name. If set to NO, the members will appear in declaration order. Note that +# this will also influence the order of the classes in the class list. +# The default value is: NO. + +SORT_BRIEF_DOCS = NO + +# If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen will sort the +# (brief and detailed) documentation of class members so that constructors and +# destructors are listed first. If set to NO the constructors will appear in the +# respective orders defined by SORT_BRIEF_DOCS and SORT_MEMBER_DOCS. +# Note: If SORT_BRIEF_DOCS is set to NO this option is ignored for sorting brief +# member documentation. +# Note: If SORT_MEMBER_DOCS is set to NO this option is ignored for sorting +# detailed member documentation. +# The default value is: NO. + +SORT_MEMBERS_CTORS_1ST = NO + +# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the hierarchy +# of group names into alphabetical order. If set to NO the group names will +# appear in their defined order. +# The default value is: NO. + +SORT_GROUP_NAMES = NO + +# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be sorted by +# fully-qualified names, including namespaces. If set to NO, the class list will +# be sorted only by class name, not including the namespace part. +# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. +# Note: This option applies only to the class list, not to the alphabetical +# list. +# The default value is: NO. + +SORT_BY_SCOPE_NAME = NO + +# If the STRICT_PROTO_MATCHING option is enabled and doxygen fails to do proper +# type resolution of all parameters of a function it will reject a match between +# the prototype and the implementation of a member function even if there is +# only one candidate or it is obvious which candidate to choose by doing a +# simple string match. By disabling STRICT_PROTO_MATCHING doxygen will still +# accept a match between prototype and implementation in such cases. +# The default value is: NO. + +STRICT_PROTO_MATCHING = NO + +# The GENERATE_TODOLIST tag can be used to enable (YES) or disable (NO) the todo +# list. This list is created by putting \todo commands in the documentation. +# The default value is: YES. + +GENERATE_TODOLIST = YES + +# The GENERATE_TESTLIST tag can be used to enable (YES) or disable (NO) the test +# list. This list is created by putting \test commands in the documentation. +# The default value is: YES. + +GENERATE_TESTLIST = YES + +# The GENERATE_BUGLIST tag can be used to enable (YES) or disable (NO) the bug +# list. This list is created by putting \bug commands in the documentation. +# The default value is: YES. + +GENERATE_BUGLIST = YES + +# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or disable (NO) +# the deprecated list. This list is created by putting \deprecated commands in +# the documentation. +# The default value is: YES. + +GENERATE_DEPRECATEDLIST= YES + +# The ENABLED_SECTIONS tag can be used to enable conditional documentation +# sections, marked by \if ... \endif and \cond +# ... \endcond blocks. + +ENABLED_SECTIONS = + +# The MAX_INITIALIZER_LINES tag determines the maximum number of lines that the +# initial value of a variable or macro / define can have for it to appear in the +# documentation. If the initializer consists of more lines than specified here +# it will be hidden. Use a value of 0 to hide initializers completely. The +# appearance of the value of individual variables and macros / defines can be +# controlled using \showinitializer or \hideinitializer command in the +# documentation regardless of this setting. +# Minimum value: 0, maximum value: 10000, default value: 30. + +MAX_INITIALIZER_LINES = 30 + +# Set the SHOW_USED_FILES tag to NO to disable the list of files generated at +# the bottom of the documentation of classes and structs. If set to YES, the +# list will mention the files that were used to generate the documentation. +# The default value is: YES. + +SHOW_USED_FILES = YES + +# Set the SHOW_FILES tag to NO to disable the generation of the Files page. This +# will remove the Files entry from the Quick Index and from the Folder Tree View +# (if specified). +# The default value is: YES. + +SHOW_FILES = YES + +# Set the SHOW_NAMESPACES tag to NO to disable the generation of the Namespaces +# page. This will remove the Namespaces entry from the Quick Index and from the +# Folder Tree View (if specified). +# The default value is: YES. + +SHOW_NAMESPACES = YES + +# The FILE_VERSION_FILTER tag can be used to specify a program or script that +# doxygen should invoke to get the current version for each file (typically from +# the version control system). Doxygen will invoke the program by executing (via +# popen()) the command command input-file, where command is the value of the +# FILE_VERSION_FILTER tag, and input-file is the name of an input file provided +# by doxygen. Whatever the program writes to standard output is used as the file +# version. For an example see the documentation. + +FILE_VERSION_FILTER = + +# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed +# by doxygen. The layout file controls the global structure of the generated +# output files in an output format independent way. To create the layout file +# that represents doxygen's defaults, run doxygen with the -l option. You can +# optionally specify a file name after the option, if omitted DoxygenLayout.xml +# will be used as the name of the layout file. See also section "Changing the +# layout of pages" for information. +# +# Note that if you run doxygen from a directory containing a file called +# DoxygenLayout.xml, doxygen will parse it automatically even if the LAYOUT_FILE +# tag is left empty. + +LAYOUT_FILE = + +# The CITE_BIB_FILES tag can be used to specify one or more bib files containing +# the reference definitions. This must be a list of .bib files. The .bib +# extension is automatically appended if omitted. This requires the bibtex tool +# to be installed. See also https://en.wikipedia.org/wiki/BibTeX for more info. +# For LaTeX the style of the bibliography can be controlled using +# LATEX_BIB_STYLE. To use this feature you need bibtex and perl available in the +# search path. See also \cite for info how to create references. + +CITE_BIB_FILES = + +#--------------------------------------------------------------------------- +# Configuration options related to warning and progress messages +#--------------------------------------------------------------------------- + +# The QUIET tag can be used to turn on/off the messages that are generated to +# standard output by doxygen. If QUIET is set to YES this implies that the +# messages are off. +# The default value is: NO. + +QUIET = NO + +# The WARNINGS tag can be used to turn on/off the warning messages that are +# generated to standard error (stderr) by doxygen. If WARNINGS is set to YES +# this implies that the warnings are on. +# +# Tip: Turn warnings on while writing the documentation. +# The default value is: YES. + +WARNINGS = YES + +# If the WARN_IF_UNDOCUMENTED tag is set to YES then doxygen will generate +# warnings for undocumented members. If EXTRACT_ALL is set to YES then this flag +# will automatically be disabled. +# The default value is: YES. + +WARN_IF_UNDOCUMENTED = YES + +# If the WARN_IF_DOC_ERROR tag is set to YES, doxygen will generate warnings for +# potential errors in the documentation, such as documenting some parameters in +# a documented function twice, or documenting parameters that don't exist or +# using markup commands wrongly. +# The default value is: YES. + +WARN_IF_DOC_ERROR = YES + +# If WARN_IF_INCOMPLETE_DOC is set to YES, doxygen will warn about incomplete +# function parameter documentation. If set to NO, doxygen will accept that some +# parameters have no documentation without warning. +# The default value is: YES. + +WARN_IF_INCOMPLETE_DOC = YES + +# This WARN_NO_PARAMDOC option can be enabled to get warnings for functions that +# are documented, but have no documentation for their parameters or return +# value. If set to NO, doxygen will only warn about wrong parameter +# documentation, but not about the absence of documentation. If EXTRACT_ALL is +# set to YES then this flag will automatically be disabled. See also +# WARN_IF_INCOMPLETE_DOC +# The default value is: NO. + +WARN_NO_PARAMDOC = NO + +# If the WARN_AS_ERROR tag is set to YES then doxygen will immediately stop when +# a warning is encountered. If the WARN_AS_ERROR tag is set to FAIL_ON_WARNINGS +# then doxygen will continue running as if WARN_AS_ERROR tag is set to NO, but +# at the end of the doxygen process doxygen will return with a non-zero status. +# Possible values are: NO, YES and FAIL_ON_WARNINGS. +# The default value is: NO. + +WARN_AS_ERROR = NO + +# The WARN_FORMAT tag determines the format of the warning messages that doxygen +# can produce. The string should contain the $file, $line, and $text tags, which +# will be replaced by the file and line number from which the warning originated +# and the warning text. Optionally the format may contain $version, which will +# be replaced by the version of the file (if it could be obtained via +# FILE_VERSION_FILTER) +# The default value is: $file:$line: $text. + +WARN_FORMAT = "$file:$line: $text" + +# The WARN_LOGFILE tag can be used to specify a file to which warning and error +# messages should be written. If left blank the output is written to standard +# error (stderr). + +WARN_LOGFILE = + +#--------------------------------------------------------------------------- +# Configuration options related to the input files +#--------------------------------------------------------------------------- + +# The INPUT tag is used to specify the files and/or directories that contain +# documented source files. You may enter file names like myfile.cpp or +# directories like /usr/src/myproject. Separate the files or directories with +# spaces. See also FILE_PATTERNS and EXTENSION_MAPPING +# Note: If this tag is empty the current directory is searched. + +INPUT = . + +# This tag can be used to specify the character encoding of the source files +# that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses +# libiconv (or the iconv built into libc) for the transcoding. See the libiconv +# documentation (see: +# https://www.gnu.org/software/libiconv/) for the list of possible encodings. +# The default value is: UTF-8. + +INPUT_ENCODING = UTF-8 + +# If the value of the INPUT tag contains directories, you can use the +# FILE_PATTERNS tag to specify one or more wildcard patterns (like *.cpp and +# *.h) to filter out the source-files in the directories. +# +# Note that for custom extensions or not directly supported extensions you also +# need to set EXTENSION_MAPPING for the extension otherwise the files are not +# read by doxygen. +# +# Note the list of default checked file patterns might differ from the list of +# default file extension mappings. +# +# If left blank the following patterns are tested:*.c, *.cc, *.cxx, *.cpp, +# *.c++, *.java, *.ii, *.ixx, *.ipp, *.i++, *.inl, *.idl, *.ddl, *.odl, *.h, +# *.hh, *.hxx, *.hpp, *.h++, *.l, *.cs, *.d, *.php, *.php4, *.php5, *.phtml, +# *.inc, *.m, *.markdown, *.md, *.mm, *.dox (to be provided as doxygen C +# comment), *.py, *.pyw, *.f90, *.f95, *.f03, *.f08, *.f18, *.f, *.for, *.vhd, +# *.vhdl, *.ucf, *.qsf and *.ice. + +FILE_PATTERNS = *.c \ + *.cc \ + *.cxx \ + *.cpp \ + *.c++ \ + *.java \ + *.ii \ + *.ixx \ + *.ipp \ + *.i++ \ + *.inl \ + *.idl \ + *.ddl \ + *.odl \ + *.h \ + *.hh \ + *.hxx \ + *.hpp \ + *.h++ \ + *.l \ + *.cs \ + *.d \ + *.php \ + *.php4 \ + *.php5 \ + *.phtml \ + *.inc \ + *.m \ + *.markdown \ + *.md \ + *.mm \ + *.dox \ + *.py \ + *.pyw \ + *.f90 \ + *.f95 \ + *.f03 \ + *.f08 \ + *.f18 \ + *.f \ + *.for \ + *.vhd \ + *.vhdl \ + *.ucf \ + *.qsf \ + *.ice + +# The RECURSIVE tag can be used to specify whether or not subdirectories should +# be searched for input files as well. +# The default value is: NO. + +RECURSIVE = YES + +# The EXCLUDE tag can be used to specify files and/or directories that should be +# excluded from the INPUT source files. This way you can easily exclude a +# subdirectory from a directory tree whose root is specified with the INPUT tag. +# +# Note that relative paths are relative to the directory from which doxygen is +# run. + +EXCLUDE = + +# The EXCLUDE_SYMLINKS tag can be used to select whether or not files or +# directories that are symbolic links (a Unix file system feature) are excluded +# from the input. +# The default value is: NO. + +EXCLUDE_SYMLINKS = NO + +# If the value of the INPUT tag contains directories, you can use the +# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude +# certain files from those directories. +# +# Note that the wildcards are matched against the file with absolute path, so to +# exclude all test directories for example use the pattern */test/* + +EXCLUDE_PATTERNS = + +# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names +# (namespaces, classes, functions, etc.) that should be excluded from the +# output. The symbol name can be a fully qualified name, a word, or if the +# wildcard * is used, a substring. Examples: ANamespace, AClass, +# AClass::ANamespace, ANamespace::*Test +# +# Note that the wildcards are matched against the file with absolute path, so to +# exclude all test directories use the pattern */test/* + +EXCLUDE_SYMBOLS = + +# The EXAMPLE_PATH tag can be used to specify one or more files or directories +# that contain example code fragments that are included (see the \include +# command). + +EXAMPLE_PATH = + +# If the value of the EXAMPLE_PATH tag contains directories, you can use the +# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and +# *.h) to filter out the source-files in the directories. If left blank all +# files are included. + +EXAMPLE_PATTERNS = * + +# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be +# searched for input files to be used with the \include or \dontinclude commands +# irrespective of the value of the RECURSIVE tag. +# The default value is: NO. + +EXAMPLE_RECURSIVE = NO + +# The IMAGE_PATH tag can be used to specify one or more files or directories +# that contain images that are to be included in the documentation (see the +# \image command). + +IMAGE_PATH = + +# The INPUT_FILTER tag can be used to specify a program that doxygen should +# invoke to filter for each input file. Doxygen will invoke the filter program +# by executing (via popen()) the command: +# +# +# +# where is the value of the INPUT_FILTER tag, and is the +# name of an input file. Doxygen will then use the output that the filter +# program writes to standard output. If FILTER_PATTERNS is specified, this tag +# will be ignored. +# +# Note that the filter must not add or remove lines; it is applied before the +# code is scanned, but not when the output code is generated. If lines are added +# or removed, the anchors will not be placed correctly. +# +# Note that for custom extensions or not directly supported extensions you also +# need to set EXTENSION_MAPPING for the extension otherwise the files are not +# properly processed by doxygen. + +INPUT_FILTER = + +# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern +# basis. Doxygen will compare the file name with each pattern and apply the +# filter if there is a match. The filters are a list of the form: pattern=filter +# (like *.cpp=my_cpp_filter). See INPUT_FILTER for further information on how +# filters are used. If the FILTER_PATTERNS tag is empty or if none of the +# patterns match the file name, INPUT_FILTER is applied. +# +# Note that for custom extensions or not directly supported extensions you also +# need to set EXTENSION_MAPPING for the extension otherwise the files are not +# properly processed by doxygen. + +FILTER_PATTERNS = + +# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using +# INPUT_FILTER) will also be used to filter the input files that are used for +# producing the source files to browse (i.e. when SOURCE_BROWSER is set to YES). +# The default value is: NO. + +FILTER_SOURCE_FILES = NO + +# The FILTER_SOURCE_PATTERNS tag can be used to specify source filters per file +# pattern. A pattern will override the setting for FILTER_PATTERN (if any) and +# it is also possible to disable source filtering for a specific pattern using +# *.ext= (so without naming a filter). +# This tag requires that the tag FILTER_SOURCE_FILES is set to YES. + +FILTER_SOURCE_PATTERNS = + +# If the USE_MDFILE_AS_MAINPAGE tag refers to the name of a markdown file that +# is part of the input, its contents will be placed on the main page +# (index.html). This can be useful if you have a project on for instance GitHub +# and want to reuse the introduction page also for the doxygen output. + +USE_MDFILE_AS_MAINPAGE = + +#--------------------------------------------------------------------------- +# Configuration options related to source browsing +#--------------------------------------------------------------------------- + +# If the SOURCE_BROWSER tag is set to YES then a list of source files will be +# generated. Documented entities will be cross-referenced with these sources. +# +# Note: To get rid of all source code in the generated output, make sure that +# also VERBATIM_HEADERS is set to NO. +# The default value is: NO. + +SOURCE_BROWSER = NO + +# Setting the INLINE_SOURCES tag to YES will include the body of functions, +# classes and enums directly into the documentation. +# The default value is: NO. + +INLINE_SOURCES = NO + +# Setting the STRIP_CODE_COMMENTS tag to YES will instruct doxygen to hide any +# special comment blocks from generated source code fragments. Normal C, C++ and +# Fortran comments will always remain visible. +# The default value is: YES. + +STRIP_CODE_COMMENTS = YES + +# If the REFERENCED_BY_RELATION tag is set to YES then for each documented +# entity all documented functions referencing it will be listed. +# The default value is: NO. + +REFERENCED_BY_RELATION = NO + +# If the REFERENCES_RELATION tag is set to YES then for each documented function +# all documented entities called/used by that function will be listed. +# The default value is: NO. + +REFERENCES_RELATION = NO + +# If the REFERENCES_LINK_SOURCE tag is set to YES and SOURCE_BROWSER tag is set +# to YES then the hyperlinks from functions in REFERENCES_RELATION and +# REFERENCED_BY_RELATION lists will link to the source code. Otherwise they will +# link to the documentation. +# The default value is: YES. + +REFERENCES_LINK_SOURCE = YES + +# If SOURCE_TOOLTIPS is enabled (the default) then hovering a hyperlink in the +# source code will show a tooltip with additional information such as prototype, +# brief description and links to the definition and documentation. Since this +# will make the HTML file larger and loading of large files a bit slower, you +# can opt to disable this feature. +# The default value is: YES. +# This tag requires that the tag SOURCE_BROWSER is set to YES. + +SOURCE_TOOLTIPS = YES + +# If the USE_HTAGS tag is set to YES then the references to source code will +# point to the HTML generated by the htags(1) tool instead of doxygen built-in +# source browser. The htags tool is part of GNU's global source tagging system +# (see https://www.gnu.org/software/global/global.html). You will need version +# 4.8.6 or higher. +# +# To use it do the following: +# - Install the latest version of global +# - Enable SOURCE_BROWSER and USE_HTAGS in the configuration file +# - Make sure the INPUT points to the root of the source tree +# - Run doxygen as normal +# +# Doxygen will invoke htags (and that will in turn invoke gtags), so these +# tools must be available from the command line (i.e. in the search path). +# +# The result: instead of the source browser generated by doxygen, the links to +# source code will now point to the output of htags. +# The default value is: NO. +# This tag requires that the tag SOURCE_BROWSER is set to YES. + +USE_HTAGS = NO + +# If the VERBATIM_HEADERS tag is set the YES then doxygen will generate a +# verbatim copy of the header file for each class for which an include is +# specified. Set to NO to disable this. +# See also: Section \class. +# The default value is: YES. + +VERBATIM_HEADERS = YES + +#--------------------------------------------------------------------------- +# Configuration options related to the alphabetical class index +#--------------------------------------------------------------------------- + +# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index of all +# compounds will be generated. Enable this if the project contains a lot of +# classes, structs, unions or interfaces. +# The default value is: YES. + +ALPHABETICAL_INDEX = YES + +# In case all classes in a project start with a common prefix, all classes will +# be put under the same header in the alphabetical index. The IGNORE_PREFIX tag +# can be used to specify a prefix (or a list of prefixes) that should be ignored +# while generating the index headers. +# This tag requires that the tag ALPHABETICAL_INDEX is set to YES. + +IGNORE_PREFIX = + +#--------------------------------------------------------------------------- +# Configuration options related to the HTML output +#--------------------------------------------------------------------------- + +# If the GENERATE_HTML tag is set to YES, doxygen will generate HTML output +# The default value is: YES. + +GENERATE_HTML = YES + +# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. If a +# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of +# it. +# The default directory is: html. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_OUTPUT = html + +# The HTML_FILE_EXTENSION tag can be used to specify the file extension for each +# generated HTML page (for example: .htm, .php, .asp). +# The default value is: .html. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_FILE_EXTENSION = .html + +# The HTML_HEADER tag can be used to specify a user-defined HTML header file for +# each generated HTML page. If the tag is left blank doxygen will generate a +# standard header. +# +# To get valid HTML the header file that includes any scripts and style sheets +# that doxygen needs, which is dependent on the configuration options used (e.g. +# the setting GENERATE_TREEVIEW). It is highly recommended to start with a +# default header using +# doxygen -w html new_header.html new_footer.html new_stylesheet.css +# YourConfigFile +# and then modify the file new_header.html. See also section "Doxygen usage" +# for information on how to generate the default header that doxygen normally +# uses. +# Note: The header is subject to change so you typically have to regenerate the +# default header when upgrading to a newer version of doxygen. For a description +# of the possible markers and block names see the documentation. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_HEADER = + +# The HTML_FOOTER tag can be used to specify a user-defined HTML footer for each +# generated HTML page. If the tag is left blank doxygen will generate a standard +# footer. See HTML_HEADER for more information on how to generate a default +# footer and what special commands can be used inside the footer. See also +# section "Doxygen usage" for information on how to generate the default footer +# that doxygen normally uses. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_FOOTER = + +# The HTML_STYLESHEET tag can be used to specify a user-defined cascading style +# sheet that is used by each HTML page. It can be used to fine-tune the look of +# the HTML output. If left blank doxygen will generate a default style sheet. +# See also section "Doxygen usage" for information on how to generate the style +# sheet that doxygen normally uses. +# Note: It is recommended to use HTML_EXTRA_STYLESHEET instead of this tag, as +# it is more robust and this tag (HTML_STYLESHEET) will in the future become +# obsolete. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_STYLESHEET = + +# The HTML_EXTRA_STYLESHEET tag can be used to specify additional user-defined +# cascading style sheets that are included after the standard style sheets +# created by doxygen. Using this option one can overrule certain style aspects. +# This is preferred over using HTML_STYLESHEET since it does not replace the +# standard style sheet and is therefore more robust against future updates. +# Doxygen will copy the style sheet files to the output directory. +# Note: The order of the extra style sheet files is of importance (e.g. the last +# style sheet in the list overrules the setting of the previous ones in the +# list). For an example see the documentation. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_EXTRA_STYLESHEET = + +# The HTML_EXTRA_FILES tag can be used to specify one or more extra images or +# other source files which should be copied to the HTML output directory. Note +# that these files will be copied to the base HTML output directory. Use the +# $relpath^ marker in the HTML_HEADER and/or HTML_FOOTER files to load these +# files. In the HTML_STYLESHEET file, use the file name only. Also note that the +# files will be copied as-is; there are no commands or markers available. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_EXTRA_FILES = + +# The HTML_COLORSTYLE_HUE tag controls the color of the HTML output. Doxygen +# will adjust the colors in the style sheet and background images according to +# this color. Hue is specified as an angle on a color-wheel, see +# https://en.wikipedia.org/wiki/Hue for more information. For instance the value +# 0 represents red, 60 is yellow, 120 is green, 180 is cyan, 240 is blue, 300 +# purple, and 360 is red again. +# Minimum value: 0, maximum value: 359, default value: 220. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_COLORSTYLE_HUE = 220 + +# The HTML_COLORSTYLE_SAT tag controls the purity (or saturation) of the colors +# in the HTML output. For a value of 0 the output will use gray-scales only. A +# value of 255 will produce the most vivid colors. +# Minimum value: 0, maximum value: 255, default value: 100. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_COLORSTYLE_SAT = 100 + +# The HTML_COLORSTYLE_GAMMA tag controls the gamma correction applied to the +# luminance component of the colors in the HTML output. Values below 100 +# gradually make the output lighter, whereas values above 100 make the output +# darker. The value divided by 100 is the actual gamma applied, so 80 represents +# a gamma of 0.8, The value 220 represents a gamma of 2.2, and 100 does not +# change the gamma. +# Minimum value: 40, maximum value: 240, default value: 80. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_COLORSTYLE_GAMMA = 80 + +# If the HTML_TIMESTAMP tag is set to YES then the footer of each generated HTML +# page will contain the date and time when the page was generated. Setting this +# to YES can help to show when doxygen was last run and thus if the +# documentation is up to date. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_TIMESTAMP = NO + +# If the HTML_DYNAMIC_MENUS tag is set to YES then the generated HTML +# documentation will contain a main index with vertical navigation menus that +# are dynamically created via JavaScript. If disabled, the navigation index will +# consists of multiple levels of tabs that are statically embedded in every HTML +# page. Disable this option to support browsers that do not have JavaScript, +# like the Qt help browser. +# The default value is: YES. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_DYNAMIC_MENUS = YES + +# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML +# documentation will contain sections that can be hidden and shown after the +# page has loaded. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_DYNAMIC_SECTIONS = NO + +# With HTML_INDEX_NUM_ENTRIES one can control the preferred number of entries +# shown in the various tree structured indices initially; the user can expand +# and collapse entries dynamically later on. Doxygen will expand the tree to +# such a level that at most the specified number of entries are visible (unless +# a fully collapsed tree already exceeds this amount). So setting the number of +# entries 1 will produce a full collapsed tree by default. 0 is a special value +# representing an infinite number of entries and will result in a full expanded +# tree by default. +# Minimum value: 0, maximum value: 9999, default value: 100. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_INDEX_NUM_ENTRIES = 100 + +# If the GENERATE_DOCSET tag is set to YES, additional index files will be +# generated that can be used as input for Apple's Xcode 3 integrated development +# environment (see: +# https://developer.apple.com/xcode/), introduced with OSX 10.5 (Leopard). To +# create a documentation set, doxygen will generate a Makefile in the HTML +# output directory. Running make will produce the docset in that directory and +# running make install will install the docset in +# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find it at +# startup. See https://developer.apple.com/library/archive/featuredarticles/Doxy +# genXcode/_index.html for more information. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_DOCSET = NO + +# This tag determines the name of the docset feed. A documentation feed provides +# an umbrella under which multiple documentation sets from a single provider +# (such as a company or product suite) can be grouped. +# The default value is: Doxygen generated docs. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_FEEDNAME = "Doxygen generated docs" + +# This tag specifies a string that should uniquely identify the documentation +# set bundle. This should be a reverse domain-name style string, e.g. +# com.mycompany.MyDocSet. Doxygen will append .docset to the name. +# The default value is: org.doxygen.Project. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_BUNDLE_ID = org.doxygen.Project + +# The DOCSET_PUBLISHER_ID tag specifies a string that should uniquely identify +# the documentation publisher. This should be a reverse domain-name style +# string, e.g. com.mycompany.MyDocSet.documentation. +# The default value is: org.doxygen.Publisher. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_PUBLISHER_ID = org.doxygen.Publisher + +# The DOCSET_PUBLISHER_NAME tag identifies the documentation publisher. +# The default value is: Publisher. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_PUBLISHER_NAME = Publisher + +# If the GENERATE_HTMLHELP tag is set to YES then doxygen generates three +# additional HTML index files: index.hhp, index.hhc, and index.hhk. The +# index.hhp is a project file that can be read by Microsoft's HTML Help Workshop +# on Windows. In the beginning of 2021 Microsoft took the original page, with +# a.o. the download links, offline the HTML help workshop was already many years +# in maintenance mode). You can download the HTML help workshop from the web +# archives at Installation executable (see: +# http://web.archive.org/web/20160201063255/http://download.microsoft.com/downlo +# ad/0/A/9/0A939EF6-E31C-430F-A3DF-DFAE7960D564/htmlhelp.exe). +# +# The HTML Help Workshop contains a compiler that can convert all HTML output +# generated by doxygen into a single compiled HTML file (.chm). Compiled HTML +# files are now used as the Windows 98 help format, and will replace the old +# Windows help format (.hlp) on all Windows platforms in the future. Compressed +# HTML files also contain an index, a table of contents, and you can search for +# words in the documentation. The HTML workshop also contains a viewer for +# compressed HTML files. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_HTMLHELP = NO + +# The CHM_FILE tag can be used to specify the file name of the resulting .chm +# file. You can add a path in front of the file if the result should not be +# written to the html output directory. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +CHM_FILE = + +# The HHC_LOCATION tag can be used to specify the location (absolute path +# including file name) of the HTML help compiler (hhc.exe). If non-empty, +# doxygen will try to run the HTML help compiler on the generated index.hhp. +# The file has to be specified with full path. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +HHC_LOCATION = + +# The GENERATE_CHI flag controls if a separate .chi index file is generated +# (YES) or that it should be included in the main .chm file (NO). +# The default value is: NO. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +GENERATE_CHI = NO + +# The CHM_INDEX_ENCODING is used to encode HtmlHelp index (hhk), content (hhc) +# and project file content. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +CHM_INDEX_ENCODING = + +# The BINARY_TOC flag controls whether a binary table of contents is generated +# (YES) or a normal table of contents (NO) in the .chm file. Furthermore it +# enables the Previous and Next buttons. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +BINARY_TOC = NO + +# The TOC_EXPAND flag can be set to YES to add extra items for group members to +# the table of contents of the HTML help documentation and to the tree view. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +TOC_EXPAND = NO + +# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and +# QHP_VIRTUAL_FOLDER are set, an additional index file will be generated that +# can be used as input for Qt's qhelpgenerator to generate a Qt Compressed Help +# (.qch) of the generated HTML documentation. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_QHP = NO + +# If the QHG_LOCATION tag is specified, the QCH_FILE tag can be used to specify +# the file name of the resulting .qch file. The path specified is relative to +# the HTML output folder. +# This tag requires that the tag GENERATE_QHP is set to YES. + +QCH_FILE = + +# The QHP_NAMESPACE tag specifies the namespace to use when generating Qt Help +# Project output. For more information please see Qt Help Project / Namespace +# (see: +# https://doc.qt.io/archives/qt-4.8/qthelpproject.html#namespace). +# The default value is: org.doxygen.Project. +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_NAMESPACE = org.doxygen.Project + +# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating Qt +# Help Project output. For more information please see Qt Help Project / Virtual +# Folders (see: +# https://doc.qt.io/archives/qt-4.8/qthelpproject.html#virtual-folders). +# The default value is: doc. +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_VIRTUAL_FOLDER = doc + +# If the QHP_CUST_FILTER_NAME tag is set, it specifies the name of a custom +# filter to add. For more information please see Qt Help Project / Custom +# Filters (see: +# https://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom-filters). +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_CUST_FILTER_NAME = + +# The QHP_CUST_FILTER_ATTRS tag specifies the list of the attributes of the +# custom filter to add. For more information please see Qt Help Project / Custom +# Filters (see: +# https://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom-filters). +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_CUST_FILTER_ATTRS = + +# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this +# project's filter section matches. Qt Help Project / Filter Attributes (see: +# https://doc.qt.io/archives/qt-4.8/qthelpproject.html#filter-attributes). +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_SECT_FILTER_ATTRS = + +# The QHG_LOCATION tag can be used to specify the location (absolute path +# including file name) of Qt's qhelpgenerator. If non-empty doxygen will try to +# run qhelpgenerator on the generated .qhp file. +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHG_LOCATION = + +# If the GENERATE_ECLIPSEHELP tag is set to YES, additional index files will be +# generated, together with the HTML files, they form an Eclipse help plugin. To +# install this plugin and make it available under the help contents menu in +# Eclipse, the contents of the directory containing the HTML and XML files needs +# to be copied into the plugins directory of eclipse. The name of the directory +# within the plugins directory should be the same as the ECLIPSE_DOC_ID value. +# After copying Eclipse needs to be restarted before the help appears. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_ECLIPSEHELP = NO + +# A unique identifier for the Eclipse help plugin. When installing the plugin +# the directory name containing the HTML and XML files should also have this +# name. Each documentation set should have its own identifier. +# The default value is: org.doxygen.Project. +# This tag requires that the tag GENERATE_ECLIPSEHELP is set to YES. + +ECLIPSE_DOC_ID = org.doxygen.Project + +# If you want full control over the layout of the generated HTML pages it might +# be necessary to disable the index and replace it with your own. The +# DISABLE_INDEX tag can be used to turn on/off the condensed index (tabs) at top +# of each HTML page. A value of NO enables the index and the value YES disables +# it. Since the tabs in the index contain the same information as the navigation +# tree, you can set this option to YES if you also set GENERATE_TREEVIEW to YES. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +DISABLE_INDEX = NO + +# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index +# structure should be generated to display hierarchical information. If the tag +# value is set to YES, a side panel will be generated containing a tree-like +# index structure (just like the one that is generated for HTML Help). For this +# to work a browser that supports JavaScript, DHTML, CSS and frames is required +# (i.e. any modern browser). Windows users are probably better off using the +# HTML help feature. Via custom style sheets (see HTML_EXTRA_STYLESHEET) one can +# further fine tune the look of the index (see "Fine-tuning the output"). As an +# example, the default style sheet generated by doxygen has an example that +# shows how to put an image at the root of the tree instead of the PROJECT_NAME. +# Since the tree basically has the same information as the tab index, you could +# consider setting DISABLE_INDEX to YES when enabling this option. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_TREEVIEW = NO + +# When both GENERATE_TREEVIEW and DISABLE_INDEX are set to YES, then the +# FULL_SIDEBAR option determines if the side bar is limited to only the treeview +# area (value NO) or if it should extend to the full height of the window (value +# YES). Setting this to YES gives a layout similar to +# https://docs.readthedocs.io with more room for contents, but less room for the +# project logo, title, and description. If either GENERATOR_TREEVIEW or +# DISABLE_INDEX is set to NO, this option has no effect. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +FULL_SIDEBAR = NO + +# The ENUM_VALUES_PER_LINE tag can be used to set the number of enum values that +# doxygen will group on one line in the generated HTML documentation. +# +# Note that a value of 0 will completely suppress the enum values from appearing +# in the overview section. +# Minimum value: 0, maximum value: 20, default value: 4. +# This tag requires that the tag GENERATE_HTML is set to YES. + +ENUM_VALUES_PER_LINE = 4 + +# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be used +# to set the initial width (in pixels) of the frame in which the tree is shown. +# Minimum value: 0, maximum value: 1500, default value: 250. +# This tag requires that the tag GENERATE_HTML is set to YES. + +TREEVIEW_WIDTH = 250 + +# If the EXT_LINKS_IN_WINDOW option is set to YES, doxygen will open links to +# external symbols imported via tag files in a separate window. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +EXT_LINKS_IN_WINDOW = NO + +# If the HTML_FORMULA_FORMAT option is set to svg, doxygen will use the pdf2svg +# tool (see https://github.com/dawbarton/pdf2svg) or inkscape (see +# https://inkscape.org) to generate formulas as SVG images instead of PNGs for +# the HTML output. These images will generally look nicer at scaled resolutions. +# Possible values are: png (the default) and svg (looks nicer but requires the +# pdf2svg or inkscape tool). +# The default value is: png. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_FORMULA_FORMAT = png + +# Use this tag to change the font size of LaTeX formulas included as images in +# the HTML documentation. When you change the font size after a successful +# doxygen run you need to manually remove any form_*.png images from the HTML +# output directory to force them to be regenerated. +# Minimum value: 8, maximum value: 50, default value: 10. +# This tag requires that the tag GENERATE_HTML is set to YES. + +FORMULA_FONTSIZE = 10 + +# Use the FORMULA_TRANSPARENT tag to determine whether or not the images +# generated for formulas are transparent PNGs. Transparent PNGs are not +# supported properly for IE 6.0, but are supported on all modern browsers. +# +# Note that when changing this option you need to delete any form_*.png files in +# the HTML output directory before the changes have effect. +# The default value is: YES. +# This tag requires that the tag GENERATE_HTML is set to YES. + +FORMULA_TRANSPARENT = YES + +# The FORMULA_MACROFILE can contain LaTeX \newcommand and \renewcommand commands +# to create new LaTeX commands to be used in formulas as building blocks. See +# the section "Including formulas" for details. + +FORMULA_MACROFILE = + +# Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see +# https://www.mathjax.org) which uses client side JavaScript for the rendering +# instead of using pre-rendered bitmaps. Use this if you do not have LaTeX +# installed or if you want to formulas look prettier in the HTML output. When +# enabled you may also need to install MathJax separately and configure the path +# to it using the MATHJAX_RELPATH option. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +USE_MATHJAX = NO + +# With MATHJAX_VERSION it is possible to specify the MathJax version to be used. +# Note that the different versions of MathJax have different requirements with +# regards to the different settings, so it is possible that also other MathJax +# settings have to be changed when switching between the different MathJax +# versions. +# Possible values are: MathJax_2 and MathJax_3. +# The default value is: MathJax_2. +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_VERSION = MathJax_2 + +# When MathJax is enabled you can set the default output format to be used for +# the MathJax output. For more details about the output format see MathJax +# version 2 (see: +# http://docs.mathjax.org/en/v2.7-latest/output.html) and MathJax version 3 +# (see: +# http://docs.mathjax.org/en/latest/web/components/output.html). +# Possible values are: HTML-CSS (which is slower, but has the best +# compatibility. This is the name for Mathjax version 2, for MathJax version 3 +# this will be translated into chtml), NativeMML (i.e. MathML. Only supported +# for NathJax 2. For MathJax version 3 chtml will be used instead.), chtml (This +# is the name for Mathjax version 3, for MathJax version 2 this will be +# translated into HTML-CSS) and SVG. +# The default value is: HTML-CSS. +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_FORMAT = HTML-CSS + +# When MathJax is enabled you need to specify the location relative to the HTML +# output directory using the MATHJAX_RELPATH option. The destination directory +# should contain the MathJax.js script. For instance, if the mathjax directory +# is located at the same level as the HTML output directory, then +# MATHJAX_RELPATH should be ../mathjax. The default value points to the MathJax +# Content Delivery Network so you can quickly see the result without installing +# MathJax. However, it is strongly recommended to install a local copy of +# MathJax from https://www.mathjax.org before deployment. The default value is: +# - in case of MathJax version 2: https://cdn.jsdelivr.net/npm/mathjax@2 +# - in case of MathJax version 3: https://cdn.jsdelivr.net/npm/mathjax@3 +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_RELPATH = + +# The MATHJAX_EXTENSIONS tag can be used to specify one or more MathJax +# extension names that should be enabled during MathJax rendering. For example +# for MathJax version 2 (see +# https://docs.mathjax.org/en/v2.7-latest/tex.html#tex-and-latex-extensions): +# MATHJAX_EXTENSIONS = TeX/AMSmath TeX/AMSsymbols +# For example for MathJax version 3 (see +# http://docs.mathjax.org/en/latest/input/tex/extensions/index.html): +# MATHJAX_EXTENSIONS = ams +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_EXTENSIONS = + +# The MATHJAX_CODEFILE tag can be used to specify a file with javascript pieces +# of code that will be used on startup of the MathJax code. See the MathJax site +# (see: +# http://docs.mathjax.org/en/v2.7-latest/output.html) for more details. For an +# example see the documentation. +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_CODEFILE = + +# When the SEARCHENGINE tag is enabled doxygen will generate a search box for +# the HTML output. The underlying search engine uses javascript and DHTML and +# should work on any modern browser. Note that when using HTML help +# (GENERATE_HTMLHELP), Qt help (GENERATE_QHP), or docsets (GENERATE_DOCSET) +# there is already a search function so this one should typically be disabled. +# For large projects the javascript based search engine can be slow, then +# enabling SERVER_BASED_SEARCH may provide a better solution. It is possible to +# search using the keyboard; to jump to the search box use + S +# (what the is depends on the OS and browser, but it is typically +# , /