From 67317a90bc6ce81be2933fc554f84c2791705fc4 Mon Sep 17 00:00:00 2001 From: Peter Onyisi <ponyisi@utexas.edu> Date: Wed, 28 Feb 2024 05:51:31 +0100 Subject: [PATCH 01/14] Remove unnecessary printout --- DataQuality/DCSCalculator2/python/subdetectors/mmg.py | 4 +--- DataQuality/DCSCalculator2/python/subdetectors/stg.py | 4 +--- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/DataQuality/DCSCalculator2/python/subdetectors/mmg.py b/DataQuality/DCSCalculator2/python/subdetectors/mmg.py index 1221cab3c62f..2568bea6b1e7 100644 --- a/DataQuality/DCSCalculator2/python/subdetectors/mmg.py +++ b/DataQuality/DCSCalculator2/python/subdetectors/mmg.py @@ -4,9 +4,7 @@ from ..lib import DCSC_DefectTranslate_Subdetector, DCSC_Variable from DQUtils import Databases from DQUtils.channel_mapping import get_channel_ids_names folder, database = "/MMG/DCS/HV", "COOLOFL_DCS/CONDBR2" -print(folder) -print(database) -print(Databases.get_folder(folder, database)) + ids, names, _ = get_channel_ids_names(Databases.get_folder(folder, database)) diff --git a/DataQuality/DCSCalculator2/python/subdetectors/stg.py b/DataQuality/DCSCalculator2/python/subdetectors/stg.py index cc3c44772154..e838bc2ef1f6 100644 --- a/DataQuality/DCSCalculator2/python/subdetectors/stg.py +++ b/DataQuality/DCSCalculator2/python/subdetectors/stg.py @@ -5,9 +5,7 @@ from DQUtils import Databases from DQUtils.channel_mapping import get_channel_ids_names folder, database = "/STG/DCS/HV", "COOLOFL_DCS/CONDBR2" -print(folder) -print(database) -print(Databases.get_folder(folder, database)) + ids, names, _ = get_channel_ids_names(Databases.get_folder(folder, database)) STGBA, STGBC, STGEA, STGEC = 1, 2, 3, 4 -- GitLab From 8d5afd7c9592efc9bc1de5703501afb6e3dffc7e Mon Sep 17 00:00:00 2001 From: Peter Onyisi <ponyisi@utexas.edu> Date: Thu, 29 Feb 2024 03:27:18 +0100 Subject: [PATCH 02/14] small flake8 fixes --- DataQuality/DQUtils/tests/test_coracool.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/DataQuality/DQUtils/tests/test_coracool.py b/DataQuality/DQUtils/tests/test_coracool.py index 33d8e9af5310..79d82f198a04 100755 --- a/DataQuality/DQUtils/tests/test_coracool.py +++ b/DataQuality/DQUtils/tests/test_coracool.py @@ -1,11 +1,10 @@ #! /usr/bin/env python -# Copyright (C) 2002-2021 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2024 CERN for the benefit of the ATLAS collaboration import gc from collections import namedtuple -from pprint import pprint from DQUtils.db import Databases, fetch_iovs from DQUtils.quick_retrieve import browse_coracool @@ -34,7 +33,7 @@ def fetch_lb_timestamps(since, until): def test_coracool(): - if not "<coracool>" in folder.description(): + if "<coracool>" not in folder.description(): print(f"{folder.fullPath()} is not a coracool folder") return @@ -76,7 +75,7 @@ def test_refcounting(): gc.collect() try: raise RuntimeError - except: pass + except Exception: pass print("Objects alive before call:", len(gc.get_objects())) -- GitLab From 8809e55bda790eef9d11a9627a0ac4c187769199 Mon Sep 17 00:00:00 2001 From: Peter Onyisi <ponyisi@utexas.edu> Date: Thu, 29 Feb 2024 03:27:46 +0100 Subject: [PATCH 03/14] Fix beamspot tag --- DataQuality/DCSCalculator2/python/subdetectors/idbs.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/DataQuality/DCSCalculator2/python/subdetectors/idbs.py b/DataQuality/DCSCalculator2/python/subdetectors/idbs.py index f37b401e3a1d..5fa0419ab407 100644 --- a/DataQuality/DCSCalculator2/python/subdetectors/idbs.py +++ b/DataQuality/DCSCalculator2/python/subdetectors/idbs.py @@ -1,4 +1,4 @@ -# Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2024 CERN for the benefit of the ATLAS collaboration from DCSCalculator2.variable import CodeIOV from DCSCalculator2.lib import DCSC_Subdetector, DCSC_Global_Variable @@ -10,7 +10,7 @@ class IDBS_Beampos(DCSC_Global_Variable): input_db = "COOLOFL_INDET/CONDBR2" timewise_folder = False - fetch_args = dict(tag="IndetBeampos-ES1-UPD2") + fetch_args = dict(tag="IndetBeampos-RUN3-ES1-UPD2-02") STATUSMAP = { 59 : GREEN, -- GitLab From 52ba3a3580ddd3d5419efd6724787024cd1f5887 Mon Sep 17 00:00:00 2001 From: Peter Onyisi <ponyisi@utexas.edu> Date: Thu, 29 Feb 2024 03:28:28 +0100 Subject: [PATCH 04/14] Small flake8 fix --- DataQuality/DCSCalculator2/python/main.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/DataQuality/DCSCalculator2/python/main.py b/DataQuality/DCSCalculator2/python/main.py index df5f6910b348..677b6039b71c 100644 --- a/DataQuality/DCSCalculator2/python/main.py +++ b/DataQuality/DCSCalculator2/python/main.py @@ -133,7 +133,7 @@ def go(iov, systems, db, indb, timewise=False): if db != "None": with timer("write result (%i iovs)" % len(result_iovs)): log.debug("Writing result (%i iovs)", len(result_iovs)) - defect_iovs = list(filter(lambda iov: isinstance(iov, DefectIOV), result_iovs)) + defect_iovs = list(filter(lambda iov: isinstance(iov, DefectIOV), result_iovs)) # type: ignore if len(defect_iovs) > 0: ddb = DefectsDB(db, read_only=False, create=True) defect_names = set(i.channel for i in defect_iovs) -- GitLab From 42abf070053054b2a94a0a18bb74e97771542032 Mon Sep 17 00:00:00 2001 From: Peter Onyisi <ponyisi@utexas.edu> Date: Thu, 29 Feb 2024 03:35:33 +0100 Subject: [PATCH 05/14] Add small script to generate sqlite file for DCSC testing --- .../share/create_dcsc_inputs_sqlite.py | 52 +++++++++++++++++++ 1 file changed, 52 insertions(+) create mode 100755 DataQuality/DCSCalculator2/share/create_dcsc_inputs_sqlite.py diff --git a/DataQuality/DCSCalculator2/share/create_dcsc_inputs_sqlite.py b/DataQuality/DCSCalculator2/share/create_dcsc_inputs_sqlite.py new file mode 100755 index 000000000000..95c183e578db --- /dev/null +++ b/DataQuality/DCSCalculator2/share/create_dcsc_inputs_sqlite.py @@ -0,0 +1,52 @@ +#!/usr/bin/env python3 +# Copyright (C) 2002-2024 CERN for the benefit of the ATLAS collaboration + +# This is a script that can be used to copy a number of COOL folders to a sqlite file for DCS Calculator testing + +FOLDERS = { + 'COOLONL_TDAQ/CONDBR2': ['/TDAQ/RunCtrl/DataTakingMode', '/TDAQ/OLC/LHC/SCANDATA',], + "COOLONL_SCT/CONDBR2": ['/SCT/DAQ/Configuration/Module',], + "COOLOFL_TILE/CONDBR2": ['/TILE/OFL02/STATUS/ADC',], + "COOLOFL_INDET/CONDBR2": ['/Indet/Beampos',], + 'COOLONL_INDET/CONDBR2': ['/Indet/Onl/Beampos',], + 'COOLOFL_TRIGGER/CONDBR2': ['/TRIGGER/OFLLUMI/LumiAccounting',], + 'COOLONL_TRIGGER/CONDBR2': ['/TRIGGER/LUMI/LBLB',], + + 'COOLOFL_DCS/CONDBR2': [ + '/AFP/DCS/STATION', + '/AFP/DCS/SIT/LV', + '/AFP/DCS/SIT/HV', + '/AFP/DCS/SIT/HV_VOLTAGE_SET', + '/AFP/DCS/TOF_TDC_CURRENT', + '/AFP/DCS/TOF', + '/AFP/DCS/TOF_PMT_VOLTAGE_SET', + '/LAR/DCS/FSM', + '/MDT/DCS/HV', + '/MDT/DCS/LV', + '/MDT/DCS/JTAG', + '/PIXEL/DCS/FSMSTATUS', + '/PIXEL/DCS/FSMSTATE', + '/RPC/DCS/DQTOWERS_3', + '/SCT/DCS/HV', + '/SCT/DCS/CHANSTAT', + '/TDQ/DCS/WIENER/LVL1', + '/TILE/DCS/STATES', + '/TGC/DCS/PSHVCHSTATE', + '/TRT/DCS/HV/BARREL', + '/TRT/DCS/HV/ENDCAPA', + '/TRT/DCS/HV/ENDCAPC', + '/EXT/DCS/MAGNETS/SENSORDATA', + '/STG/DCS/HV', + '/MMG/DCS/HV' + ], +} + +import subprocess +RUN = 456685 +TARGET = 'sqlite://;schema=junk.db;dbname=CONDBR2' +for db, folders in FOLDERS.items(): + arg = ['AtlCoolCopy', db, TARGET, '-r', f'{RUN}', + '-bs', '10240', '-gt', '-create'] + for folder in folders: + arg += ['-f', folder] + subprocess.run(arg) \ No newline at end of file -- GitLab From 01f8efb06858fb8c743304dfdc54dfe4088ba1b3 Mon Sep 17 00:00:00 2001 From: Peter Onyisi <ponyisi@utexas.edu> Date: Thu, 29 Feb 2024 03:38:21 +0100 Subject: [PATCH 06/14] Fixes for sqlalchemy 2.0 --- DataQuality/DQUtils/python/oracle.py | 41 +++++++++++++++++----------- 1 file changed, 25 insertions(+), 16 deletions(-) diff --git a/DataQuality/DQUtils/python/oracle.py b/DataQuality/DQUtils/python/oracle.py index dc7162a838a8..197f36a9d6cc 100644 --- a/DataQuality/DQUtils/python/oracle.py +++ b/DataQuality/DQUtils/python/oracle.py @@ -1,9 +1,10 @@ #! /usr/bin/env python -# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2024 CERN for the benefit of the ATLAS collaboration from sqlalchemy import (select, create_engine, MetaData, Table, Column, String, Integer) +from sqlalchemy.orm import sessionmaker from sqlalchemy.sql import and_ from DQUtils.sugar import IOVSet @@ -62,7 +63,8 @@ def make_oracle_connection(connection_string): username, password = get_authentication(connection_string) conn_str = "oracle://%s:%s@%s" % (username, password, host) engine = create_engine(conn_str, pool_recycle=10*60) - metadata = MetaData(bind=engine) + metadata = MetaData() + metadata.reflect(engine) return engine, metadata #conn_str = "oracle://%s:%s@ATLAS_COOLPROD" % get_authentication() @@ -73,6 +75,7 @@ def make_oracle_connection(connection_string): engine, metadata = make_oracle_connection("oracle://ATLAS_COOLPROD" "/ATLAS_COOLONL_GLOBAL") +Session = sessionmaker(engine) run_table = Table("ATLAS_RUN_NUMBER.RUNNUMBER", metadata, Column("NAME", String), @@ -101,8 +104,9 @@ def fetch_recent_runs(how_recent=ONE_WEEK, ascending=False): this_recent = strftime("%Y%m%dT%H%M%S", gmtime(time()-how_recent)) condition = and_(t.c.STARTAT >= this_recent, t.c.PARTITIONNAME == "ATLAS") - rows = select([run_table]).where(condition).order_by(ordering) - return rows.execute().fetchall() + rows = select(run_table).where(condition).order_by(ordering) + with Session() as session: + return session.execute(rows).fetchall() def fetch_runs_since(first_run=140000, ascending=False): """ @@ -113,24 +117,27 @@ def fetch_runs_since(first_run=140000, ascending=False): ordering = t.c.RUNNUMBER.asc() if ascending else t.c.RUNNUMBER.desc() condition = and_(t.c.RUNNUMBER > first_run, t.c.PARTITIONNAME == "ATLAS") - rows = select([run_table]).where(condition).order_by(ordering) - return rows.execute().fetchall() + rows = select(run_table).where(condition).order_by(ordering) + with Session() as session: + return session.execute(rows).fetchall() def make_atlas_partition_query(): - return (select([run_table.c.RUNNUMBER]) + return (select(run_table.c.RUNNUMBER) .where(run_table.c.PARTITIONNAME == "ATLAS") .order_by(run_table.c.RUNNUMBER)) def fetch_last_n_atlas_runs(n=10): - rows = (select([run_table.c.RUNNUMBER]) + rows = (select(run_table.c.RUNNUMBER) .where(run_table.c.PARTITIONNAME == "ATLAS") .order_by(run_table.c.RUNNUMBER.desc()).limit(n)) - return [row.RUNNUMBER for row in reversed(rows.execute().fetchall())] + with Session() as session: + return [row.RUNNUMBER for row in reversed(session.execute(rows).fetchall())] def fetch_atlas_runs(): rows = make_atlas_partition_query() - return rows.execute().fetchall() + with Session() as session: + return session.execute(rows).fetchall() def atlas_runs_set(): return set(x.RUNNUMBER for x in fetch_atlas_runs()) @@ -138,9 +145,10 @@ def atlas_runs_set(): def atlas_runs_between(first, last): rows = make_atlas_partition_query() - rows = rows.where(first <= run_table.c.RUNNUMBER <= last) + rows = rows.where(run_table.c.RUNNUMBER.between(first, last)) - return [row.RUNNUMBER for row in rows.execute().fetchall()] + with Session() as session: + return [row.RUNNUMBER for row in session.execute(rows).fetchall()] def filter_atlas_runs(iovs): @@ -148,10 +156,11 @@ def filter_atlas_runs(iovs): first, last = min(iov_runs), max(iov_runs) rows = make_atlas_partition_query() - rows = rows.where(first <= run_table.c.RUNNUMBER <= last) + rows = rows.where(run_table.c.RUNNUMBER.between(first, last)) - atlas_runs = set(row.RUNNUMBER for row in rows.execute().fetchall()) - keep_runs = atlas_runs.intersection(iov_runs) + with Session() as session: + atlas_runs = set(row.RUNNUMBER for row in session.execute(rows).fetchall()) + keep_runs = atlas_runs.intersection(iov_runs) - return IOVSet(iov for iov in iovs if iov.since.run in keep_runs) + return IOVSet(iov for iov in iovs if iov.since.run in keep_runs) -- GitLab From e517e8ed4d7ab7696c9e45a0bdbe9a1d9d1aef86 Mon Sep 17 00:00:00 2001 From: Peter Onyisi <ponyisi@utexas.edu> Date: Thu, 29 Feb 2024 03:39:36 +0100 Subject: [PATCH 07/14] Add tests for oracle run number access --- DataQuality/DQUtils/CMakeLists.txt | 4 ++-- DataQuality/DQUtils/tests/test_oracle.py | 25 ++++++++++++++++++++++++ 2 files changed, 27 insertions(+), 2 deletions(-) create mode 100755 DataQuality/DQUtils/tests/test_oracle.py diff --git a/DataQuality/DQUtils/CMakeLists.txt b/DataQuality/DQUtils/CMakeLists.txt index d274065386a0..3e6308e8c0ec 100644 --- a/DataQuality/DQUtils/CMakeLists.txt +++ b/DataQuality/DQUtils/CMakeLists.txt @@ -1,4 +1,4 @@ -# Copyright (C) 2002-2022 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2024 CERN for the benefit of the ATLAS collaboration # Declare the package name: atlas_subdir( DQUtils ) @@ -37,6 +37,6 @@ atlas_install_python_modules( python/*.py python/ext python/sugar atlas_install_scripts( share/count_filled_lbs.py share/dqu_grldiff.py share/dqu_lumi.py share/dqu_grl_extract_period.py share/dqu_dump_periods.py ) -foreach( _test coracool events iovset_pickle read iovtype ) +foreach( _test coracool events iovset_pickle read iovtype oracle ) atlas_add_test( ${_test} SCRIPT tests/test_${_test}.py POST_EXEC_SCRIPT nopost.sh ) endforeach() diff --git a/DataQuality/DQUtils/tests/test_oracle.py b/DataQuality/DQUtils/tests/test_oracle.py new file mode 100755 index 000000000000..39a796f4a1e4 --- /dev/null +++ b/DataQuality/DQUtils/tests/test_oracle.py @@ -0,0 +1,25 @@ +#! /usr/bin/env python + +# Copyright (C) 2002-2024 CERN for the benefit of the ATLAS collaboration + +def test_fetch_runs(): + from DQUtils import oracle + from DQUtils.sugar import RunLumi, RANGEIOV_VAL + + # we just want to check this doesn't fail + oracle.fetch_recent_runs() + + # these we check for consistency + lastruns = set(oracle.fetch_last_n_atlas_runs(10)) + assert len(lastruns) == 10 + minrun, maxrun = min(lastruns), max(lastruns) + selectedruns = {_[1] for _ in oracle.fetch_runs_since(minrun-1)} + assert selectedruns == lastruns, f'{selectedruns}, {lastruns}' + selectedruns = set(oracle.atlas_runs_between(minrun, maxrun)) + assert lastruns == selectedruns + fakeiovs = [RANGEIOV_VAL(RunLumi(_, 1), RunLumi(_, 0xffffffff)) + for _ in range(minrun, maxrun+1)] + assert oracle.filter_atlas_runs(fakeiovs).runs == lastruns + +if __name__ == '__main__': + test_fetch_runs() \ No newline at end of file -- GitLab From 1c4331175bf394297f9a234592ce9f1cca11ce3f Mon Sep 17 00:00:00 2001 From: Peter Onyisi <ponyisi@utexas.edu> Date: Thu, 29 Feb 2024 03:39:47 +0100 Subject: [PATCH 08/14] Flake8 fix --- DataQuality/DQUtils/python/db.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/DataQuality/DQUtils/python/db.py b/DataQuality/DQUtils/python/db.py index ada40a76ebe5..e99d03820336 100755 --- a/DataQuality/DQUtils/python/db.py +++ b/DataQuality/DQUtils/python/db.py @@ -1,6 +1,6 @@ #! /usr/bin/env python -# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2024 CERN for the benefit of the ATLAS collaboration from __future__ import with_statement @@ -411,9 +411,6 @@ class Databases(object): from PyCool import cool dbService = cool.DatabaseSvcFactory.databaseService() connection = dbService.createDatabase(res_db_string) - except Exception: - log.error(sys.exc_info()[0]) - raise return connection @classmethod -- GitLab From b8887255a4b740c92b883ce81fdce696937c81fa Mon Sep 17 00:00:00 2001 From: Peter Onyisi <ponyisi@utexas.edu> Date: Thu, 29 Feb 2024 21:34:46 +0100 Subject: [PATCH 09/14] Remove deprecated code --- .../python/subdetectors/__init__.py | 4 +- .../python/subdetectors/idbs.py | 38 ---- .../python/subdetectors/pixels.py | 166 ------------------ 3 files changed, 1 insertion(+), 207 deletions(-) delete mode 100644 DataQuality/DCSCalculator2/python/subdetectors/idbs.py delete mode 100644 DataQuality/DCSCalculator2/python/subdetectors/pixels.py diff --git a/DataQuality/DCSCalculator2/python/subdetectors/__init__.py b/DataQuality/DCSCalculator2/python/subdetectors/__init__.py index 191fd0b6d136..f3ff96a51b12 100644 --- a/DataQuality/DCSCalculator2/python/subdetectors/__init__.py +++ b/DataQuality/DCSCalculator2/python/subdetectors/__init__.py @@ -4,7 +4,6 @@ from .afp import AFP from .lar import LAr from .lucid import Lucid from .mdt import MDT -from .pixels import Pixels from .rpc import RPC from .sct import SCT from .tdq import TDQ @@ -14,11 +13,10 @@ from .tgc import TGC from .stg import STG from .mmg import MMG # Non-detector flags -from .idbs import IDBS from .magnets import Magnets from .global_system import Global from .trig import Trigger -ALL_SYSTEMS = [AFP, LAr, Lucid, MDT, Pixels, RPC, SCT, TDQ, Tile, TGC, TRT, IDBS, Magnets, Global, Trigger, STG, MMG] +ALL_SYSTEMS = [AFP, LAr, Lucid, MDT, RPC, SCT, TDQ, Tile, TGC, TRT, Magnets, Global, Trigger, STG, MMG] SYS_NAMES = ", ".join(map(lambda x: x.__name__, ALL_SYSTEMS)) SYSTEM_MAP = dict((x.__name__, x) for x in ALL_SYSTEMS) diff --git a/DataQuality/DCSCalculator2/python/subdetectors/idbs.py b/DataQuality/DCSCalculator2/python/subdetectors/idbs.py deleted file mode 100644 index 5fa0419ab407..000000000000 --- a/DataQuality/DCSCalculator2/python/subdetectors/idbs.py +++ /dev/null @@ -1,38 +0,0 @@ -# Copyright (C) 2002-2024 CERN for the benefit of the ATLAS collaboration - -from DCSCalculator2.variable import CodeIOV -from DCSCalculator2.lib import DCSC_Subdetector, DCSC_Global_Variable -from DCSCalculator2.consts import GREY, YELLOW, GREEN - -IDBS = 150 - -class IDBS_Beampos(DCSC_Global_Variable): - - input_db = "COOLOFL_INDET/CONDBR2" - timewise_folder = False - fetch_args = dict(tag="IndetBeampos-RUN3-ES1-UPD2-02") - - STATUSMAP = { - 59 : GREEN, - 83 : YELLOW, - 0 : GREY, - } - - def make_good_iov(self, iov): - """ - Logic to choose the colour code the calculator should write - """ - state = self.STATUSMAP.get(iov.status, GREY) - return CodeIOV(iov.since, iov.until, iov.channel, state) - -class IDBS(DCSC_Subdetector): - - folder_base = "/Indet" - - mapping = { - IDBS: [0], - } - - variables = [ - IDBS_Beampos("Beampos", None), - ] diff --git a/DataQuality/DCSCalculator2/python/subdetectors/pixels.py b/DataQuality/DCSCalculator2/python/subdetectors/pixels.py deleted file mode 100644 index e748b054ed5b..000000000000 --- a/DataQuality/DCSCalculator2/python/subdetectors/pixels.py +++ /dev/null @@ -1,166 +0,0 @@ -# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration - -from DCSCalculator2.lib import DCSC_Variable, GoodIOV, OUT_OF_CONFIG, DCSC_DefectTranslate_Subdetector - -from DQUtils.events import process_iovs_mc -from DQUtils.sugar import IOVSet - -import logging -logger = logging.getLogger("DCSCalculator2.subdetectors.pixels") - -WHITE, BLACK, GREY, RED, YELLOW, GREEN = None, -1, 0, 1, 2, 3 - -class DCSC_Variable_Pixels_Config(DCSC_Variable): - - is_config_variable = True - timewise_folder = False - input_db = "COOLOFL_DCSOFL/CONDBR2" - fetch_args = dict(what=[]) - - def make_good_iovs(self, iovs): - """ - The absence of an IoV means that channel is out of config. - """ - - seen_channels = set() - result = IOVSet() - for since, until, channel, (state,) in process_iovs_mc(iovs): - seen_channels.add(channel) - if state._is_empty: - # There is an IoV hole for this channel. - result.add(since, until, channel, OUT_OF_CONFIG) - - # Need to deal with channels which were not seen at all for the query - # range, since they will not appear in the above loop - all_channels = self.subdetector.input_channel_set - - for missing_channel in (all_channels - seen_channels): - result.add(0, 2**63-1, missing_channel, OUT_OF_CONFIG) - - return result.solidify(GoodIOV) - -class Pixels(DCSC_DefectTranslate_Subdetector): - - def __init__(self, *args, **kwargs): - super(Pixels, self).__init__(*args, **kwargs) - self.translators = [Pixels.pix_color_to_defect_translator] - - @staticmethod - def pix_color_to_defect_translator(flag_iovs): - from DCSCalculator2.consts import GREEN - from DCSCalculator2.variable import DefectIOV - from DQUtils import process_iovs - rv = [] - - defect_mapping = { - 101: "PIXEL_BARREL_STANDBY", - 102: "PIXEL_LAYER0_STANDBY", - 103: "PIXEL_IBL_STANDBY", - 104: "PIXEL_ENDCAPA_STANDBY", - 105: "PIXEL_ENDCAPC_STANDBY" - } - - for since, until, states in process_iovs(*flag_iovs.by_channel.values()): - #print states - for state in states: - if state.Code != GREEN: - badfrac = 'Standby module fraction: ' + str( state.deadFrac ) - rv.append(DefectIOV(since=since, until=until, - channel=defect_mapping[state.channel], - present=True, comment = badfrac)) - return rv - - input_db = "COOLOFL_DCS/CONDBR2" - folder_base = "/PIXEL/DCS" - - cid_barrel, cid_blayer, cid_ibl = 101, 102, 103 - cid_endcapa, cid_endcapb = 104, 105 - - mapping = { - 101: list(range( 722, 1892)), - 102: list(range( 436, 722)), - 103: list(range( 156, 436)), - 104: list(range(1892, 2036)), - 105: list(range( 12, 156)), - } - - variables = [ - DCSC_Variable("FSMSTATUS", lambda iov: iov.FSM_status in ("OK", "WARNING")), - DCSC_Variable("FSMSTATE", lambda iov: iov.FSM_state == "READY"), - ] - - # Note barrel and blayer use different deadfraction_caution. Implemented in - # calculate_iov below. - dead_fraction_caution = 0.2 - dead_fraction_caution_barrel = 0.05 - dead_fraction_bad = 0.9 - - assert dead_fraction_caution_barrel <= dead_fraction_caution, ( - "logic needs changing in calculate_iov before removing this assert") - - def start(self): - self.bad_modules = IOVSet() - - def tally_additional_info(self, since, until, output_channel, - states, state_iovs): - """ - Figure out which modules are OOC when - """ - ooc_modules = self.get_ids_which_are(output_channel, states, OUT_OF_CONFIG) - ooc_modules = tuple(sorted(ooc_modules)) - self.bad_modules.add(since, until, output_channel, ooc_modules) - - def done(self): - if logger.isEnabledFor(logging.DEBUG): - logger.debug("The following ranges indicate bad modules:") - #from pprint import pprint - #pprint(list(self.bad_modules)) - - def calculate_dead_fraction(self, since, until, output_channel, states, - state_iovs): - """ - Compute the dead fraction differently for the barrel and blayer. - - This function is written with the assumption that - `dead_fraction_caution_barrel` is smaller than `dead_fraction_caution` - because the new logic is only run if the code is better than Yellow. - """ - cdf = super(Pixels, self).calculate_dead_fraction - result = cdf(since, until, output_channel, states, state_iovs) - - self.tally_additional_info(since, until, output_channel, states, state_iovs) - - code, dead_fraction, thrust, n_config, n_working = result - - if (output_channel not in [self.cid_barrel, self.cid_blayer] or - code <= YELLOW): - # Result is already correct - return result - - # Need to check whether code needs modifying. - if self.dead_fraction_caution_barrel < dead_fraction < self.dead_fraction_bad: - code = YELLOW - - return code, dead_fraction, thrust, n_config, n_working - - def initialize_name_mapping(self): - from DQUtils.db import Databases, get_channel_ids_names - - f = Databases.get_folder("/TDAQ/EnabledResources/ATLAS/PIXEL/Modules", - "COOLONL_TDAQ") - cids, cnames, cmap = get_channel_ids_names(f) - self.name_mapping = dict(zip(cids, cnames)) - self.name_mapping_initialized = True - - # TODO: is this ok?? - def get_name_for_input_channel(self, input_channel): - """ - Transform an input channelid into a name - """ - #if not getattr(self, "name_mapping_initialized", False): - # self.initialize_name_mapping() - - #if input_channel in self.name_mapping: - # return (input_channel, self.name_mapping[input_channel]) - - return input_channel -- GitLab From 8be2c317226629d6d220aa5fcaecb45d086d45d1 Mon Sep 17 00:00:00 2001 From: Peter Onyisi <ponyisi@utexas.edu> Date: Thu, 29 Feb 2024 21:59:13 +0100 Subject: [PATCH 10/14] Code cleanup --- .../share/run/GetCompletedRuns.py | 14 --- .../share/run/GetCompletedRuns.sh | 101 ------------------ .../DCSCalculator2/share/run/TestRunning.sh | 34 ------ 3 files changed, 149 deletions(-) delete mode 100755 DataQuality/DCSCalculator2/share/run/GetCompletedRuns.py delete mode 100755 DataQuality/DCSCalculator2/share/run/GetCompletedRuns.sh delete mode 100755 DataQuality/DCSCalculator2/share/run/TestRunning.sh diff --git a/DataQuality/DCSCalculator2/share/run/GetCompletedRuns.py b/DataQuality/DCSCalculator2/share/run/GetCompletedRuns.py deleted file mode 100755 index b78db2be66d5..000000000000 --- a/DataQuality/DCSCalculator2/share/run/GetCompletedRuns.py +++ /dev/null @@ -1,14 +0,0 @@ - -import xmlrpclib - -from optparse import OptionParser - -parser = OptionParser() -(options, args) = parser.parse_args() - -lastRun = args[0] - -s=xmlrpclib.Server('http://atlasdqm.cern.ch:8080') -runList = s.get_completed_runs({'low_run':lastRun}) - -print runList diff --git a/DataQuality/DCSCalculator2/share/run/GetCompletedRuns.sh b/DataQuality/DCSCalculator2/share/run/GetCompletedRuns.sh deleted file mode 100755 index fea06ccb4389..000000000000 --- a/DataQuality/DCSCalculator2/share/run/GetCompletedRuns.sh +++ /dev/null @@ -1,101 +0,0 @@ -#!/usr/bin/env bash - -cd /afs/cern.ch/user/a/atlasdqm/ws/DCSCalc/RunDCSCalc - -# Takes as input the highest run number which was used in the last round. -# Don't run over this one, but do run over all others -# (if they are recorded here then the run should have finished). - -runsSince=`cat lastRunNumber` -echo "lastRunNumber = $runsSince" - -python GetCompletedRuns.py $runsSince > runLog - -output=`cat runLog` -echo "GetCompletedRuns.py output = $output" - -perl -pi -e "s/\[//g" runLog -perl -pi -e "s/\]//g" runLog -perl -pi -e "s/,\s/\n/g" runLog - -output2=`cat runLog` -echo "Tidied up output = $output2" - -# Put the list of run numbers obtained into reverse order so that the first run number in the list is the latest. -sort -ru runLog > reverseOrdered -output3=`cat reverseOrdered` -echo "Puting into reverse order. New output = $output3" -rm -f runLog -echo "Removed runLog" - -# Remove the old runList -rm -f runList -echo "Removed the old runList" - -# Save the latest run number for the next round to keep track of where we are. -awk NR==1 reverseOrdered > newLastRunNumber -output6=`cat newLastRunNumber` -echo "New lastRunNumber = $output6" - -# Check the size of the last run number to make sure it's not empty -# (size should be == 7 if correct format). -SIZE1=`du -b newLastRunNumber | awk -F ' ' '{print $1}'` -echo "Size of newLastRunNumber = $SIZE1" -if [ $SIZE1 -le 6 ] -then - echo "newLastRunNumber had size = $SIZE1" - echo "newLastRunNumber = $newLastRunNumber" - echo "runsSince = $runsSince" - -else - # Now that we know that newLastRunNumber exists, use this to replace lastRunNumber - echo "A new lastRunNumber exists so removing the old one." - rm -f lastRunNumber - cp newLastRunNumber lastRunNumber - output4=`cat lastRunNumber` - echo "New lastRunNumber = $output4" - - # Now put back in chronological order - echo "Now put back in chronological order" - sort reverseOrdered > ordered - #rm -f runList - - # Remove the oldest run on the list because this should have been run beforehand. - lastOne=`cat lastRunNumber` - echo "lastOne = $lastOne" - echo "runsSince = $runsSince" - if [ $lastOne = $runsSince ] - then - echo "No new runs have finished since you last checked" - else - echo "Checking to make sure that the previous lastRunNumber was returned by GetCompletedRuns.py" - grep $runsSince ordered > check - output5=`cat check` - echo "Result of grep $runsSince ordered (check) = $output5" - - SIZE2=`du -b check | awk -F ' ' '{print $1}'` - echo "Size of check = $SIZE2" - - if [ $SIZE2 -le 6 ] - then - echo "The old lastRunNumber (runsSince = $runsSince) was not returned by GetCompletedRuns.py." - echo "Including all returned runs in runList." - cp ordered runList - else - echo "The old lastRunNumber (runsSince = $runsSince) was returned by GetCompletedRuns.py." - echo "Excluding this run from runList." - sed 1d ordered > runList - fi - allRuns=`cat runList` - echo "$allRuns" - cat runList > logRunNumbers/runsToRun_upTo$lastOne - rm -f check - fi - rm -f ordered -fi - -echo "Removing reverseOrdered and newLastRunNumber." -rm -f reverseOrdered -rm -f newLastRunNumber -echo "Done." -#exit diff --git a/DataQuality/DCSCalculator2/share/run/TestRunning.sh b/DataQuality/DCSCalculator2/share/run/TestRunning.sh deleted file mode 100755 index da58c3c127a0..000000000000 --- a/DataQuality/DCSCalculator2/share/run/TestRunning.sh +++ /dev/null @@ -1,34 +0,0 @@ -#!/bin/zsh - -if [ -e "/afs/cern.ch/user/a/atlasdqm/ws/DCSCalc/RunDCSCalc/runList" ] -then - #Set up the environment and get COOL authorisations - echo "Setting up Athena 15.5.2.5 Tier0 release." - source ~/cmthome/setup.sh -tag=32,opt,15.5.2.5,gcc34,oneTest,AtlasTier0 - - echo "Getting COOL authentications." - export CORAL_AUTH_PATH=$HOME/private - - echo "Going to run directory" - cd /afs/cern.ch/user/a/atlasdqm/ws/DCSCalc/DCSCalculator/i686-slc4-gcc34-opt - - #Get list of runs to loop over by sorting temporary run list and removing any duplicates. - list=`cat /afs/cern.ch/user/a/atlasdqm/ws/DCSCalc/RunDCSCalc/runList` - for run in $list - do - - echo "-----------------------------------------------------> NEW RUN!" - echo "Run number = $run" - echo "Running.........." - - done - -else - echo "No new runs to process" - -fi - -echo "Finished" - -exit - -- GitLab From f4cfe90e54218e10cdb4e47a811f35633136b6a1 Mon Sep 17 00:00:00 2001 From: Peter Onyisi <ponyisi@utexas.edu> Date: Thu, 29 Feb 2024 23:10:38 +0100 Subject: [PATCH 11/14] Containerization stuff --- .../DCSCalculator2/share/container/Dockerfile | 14 ++++++++++++++ .../share/container/docker_build.sh | 18 ++++++++++++++++++ .../DCSCalculator2/share/container/execute.sh | 10 ++++++++++ 3 files changed, 42 insertions(+) create mode 100644 DataQuality/DCSCalculator2/share/container/Dockerfile create mode 100755 DataQuality/DCSCalculator2/share/container/docker_build.sh create mode 100755 DataQuality/DCSCalculator2/share/container/execute.sh diff --git a/DataQuality/DCSCalculator2/share/container/Dockerfile b/DataQuality/DCSCalculator2/share/container/Dockerfile new file mode 100644 index 000000000000..a7550556d4f0 --- /dev/null +++ b/DataQuality/DCSCalculator2/share/container/Dockerfile @@ -0,0 +1,14 @@ +FROM gitlab-registry.cern.ch/linuxsupport/alma9-base + +RUN yum -y update && yum install https://linuxsoft.cern.ch/wlcg/el9/aarch64/wlcg-repo-1.0.0-1.el9.noarch.rpm -y \ + && yum -y install HEP_OSlibs glibc-langpack-en cmake make g++ uuid python3 libcap pkg-config valgrind unzip nano krb5-libs \ + expat-devel openldap-devel krb5-devel libnsl2-devel pam-devel keyutils-libs-devel lksctp-tools-devel which \ + && yum clean all + +WORKDIR /RunDCSCalc +RUN mkdir logfiles + +COPY run container/execute.sh ./ +COPY build ./build + +CMD ["./execute.sh"] \ No newline at end of file diff --git a/DataQuality/DCSCalculator2/share/container/docker_build.sh b/DataQuality/DCSCalculator2/share/container/docker_build.sh new file mode 100755 index 000000000000..25a110103fbf --- /dev/null +++ b/DataQuality/DCSCalculator2/share/container/docker_build.sh @@ -0,0 +1,18 @@ +#!/bin/bash + +echo $0 +SCRIPTDIR=$(dirname $(realpath $0)) +mkdir docker_build +cd docker_build +SHAREDIR=$SCRIPTDIR/.. +cp -dpr $SHAREDIR/run . +cp -dpr $SHAREDIR/container . +mkdir build +if [ -n "$WorkDir_DIR" ] ; then + cp -dpLr $WorkDir_DIR build +fi + +docker build --push -t registry.cern.ch/atlas-dqm-core/dcscalculator:latest -f container/Dockerfile . + +cd .. +rm -rf docker_build \ No newline at end of file diff --git a/DataQuality/DCSCalculator2/share/container/execute.sh b/DataQuality/DCSCalculator2/share/container/execute.sh new file mode 100755 index 000000000000..59d686327f9f --- /dev/null +++ b/DataQuality/DCSCalculator2/share/container/execute.sh @@ -0,0 +1,10 @@ +#!/usr/bin/bash + +INTERVAL=${INTERVAL:-30m} + +while true +do + ./GetNextRunList.py + ./RunDCSCalculator.sh + sleep $INTERVAL +done \ No newline at end of file -- GitLab From 8a6c80ec489c4892ac2451d76bbd310890b4998a Mon Sep 17 00:00:00 2001 From: Peter Onyisi <ponyisi@utexas.edu> Date: Thu, 29 Feb 2024 23:11:11 +0100 Subject: [PATCH 12/14] Allow communication from non-AFS nodes --- DataQuality/DataQualityUtils/python/stompconfig.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/DataQuality/DataQualityUtils/python/stompconfig.py b/DataQuality/DataQualityUtils/python/stompconfig.py index c8f389fbd993..c2d893a49498 100644 --- a/DataQuality/DataQualityUtils/python/stompconfig.py +++ b/DataQuality/DataQualityUtils/python/stompconfig.py @@ -1,8 +1,9 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2024 CERN for the benefit of the ATLAS collaboration def config(): - with open('/afs/cern.ch/user/a/atlasdqm/atlas/mqinfo') as f: + import os + with open(os.environ.get('DQU_MQINFO', '/afs/cern.ch/user/a/atlasdqm/atlas/mqinfo')) as f: return {'username': 'atlasdqm', 'passcode': f.read().strip()} raise RuntimeError('Unable to read STOMP connection info') -- GitLab From 5016ddc634fb70d3113672f11452db265710d30c Mon Sep 17 00:00:00 2001 From: Peter Onyisi <ponyisi@utexas.edu> Date: Thu, 29 Feb 2024 23:12:21 +0100 Subject: [PATCH 13/14] Update DCS Calculator test --- DataQuality/DCSCalculator2/CMakeLists.txt | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/DataQuality/DCSCalculator2/CMakeLists.txt b/DataQuality/DCSCalculator2/CMakeLists.txt index 6328802a543a..9f64f901610d 100644 --- a/DataQuality/DCSCalculator2/CMakeLists.txt +++ b/DataQuality/DCSCalculator2/CMakeLists.txt @@ -1,4 +1,4 @@ -# Copyright (C) 2002-2021 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2024 CERN for the benefit of the ATLAS collaboration # Declare the package name: atlas_subdir( DCSCalculator2 ) @@ -13,7 +13,9 @@ atlas_install_scripts( share/*.py ) # Test: does DCS Calculator work? atlas_add_test( DCSCRun - SCRIPT dcsc.py -r348885 -d 'sqlite://$<SEMICOLON>schema=/cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/AthDataQuality/dcscalc_input_348885.db$<SEMICOLON>dbname=CONDBR2' - POST_EXEC_SCRIPT nopost.sh - PROPERTIES TIMEOUT 600 + SCRIPT dcsc.py -r456685 -d 'sqlite://$<SEMICOLON>schema=/cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/AthDataQuality/dcscalc_input_456685.db$<SEMICOLON>dbname=CONDBR2' + POST_EXEC_SCRIPT noerror.sh + PROPERTIES TIMEOUT 600 + LOG_SELECT_PATTERN "DCS Calculator failed" + ENVIRONMENT "PBEAST_SERVER_HTTPS_PROXY=atlasgw.cern.ch:3128" ) -- GitLab From 1f2f7b8e3252ebfcf9d179fbcfd16536d9efeddb Mon Sep 17 00:00:00 2001 From: Peter Onyisi <ponyisi@utexas.edu> Date: Thu, 29 Feb 2024 23:12:44 +0100 Subject: [PATCH 14/14] Update scripts for actual DCS Calculator execution --- .../DCSCalculator2/share/run/ExecuteDCSC2.sh | 47 +++----- .../share/run/GetNextRunList.py | 43 +++++++ .../share/run/RunDCSCalculator.sh | 109 ++++++++---------- 3 files changed, 109 insertions(+), 90 deletions(-) create mode 100755 DataQuality/DCSCalculator2/share/run/GetNextRunList.py diff --git a/DataQuality/DCSCalculator2/share/run/ExecuteDCSC2.sh b/DataQuality/DCSCalculator2/share/run/ExecuteDCSC2.sh index e118836860f3..4b1ef98ba6c0 100755 --- a/DataQuality/DCSCalculator2/share/run/ExecuteDCSC2.sh +++ b/DataQuality/DCSCalculator2/share/run/ExecuteDCSC2.sh @@ -1,10 +1,11 @@ #! /usr/bin/env bash +# Copyright (C) 2002-2024 CERN for the benefit of the ATLAS collaboration -DCSC2_SYSTEMS="-sMDT -sTile -sTGC -sRPC -sTDQ -sMagnets -sGlobal -sTRT -sSCT -sLAr -sLucid -sTrigger -sAFP" -#DCSC2_SYSTEMS="-sMDT -sTile -sTGC -sRPC -sTDQ -sCSC -sMagnets -sGlobal -sPixels -sTRT -sLAr -sLucid" #outdated -#DEST_DB=COOLOFL_GLOBAL/COMP200 #outdated -DEST_DB=COOLOFL_GLOBAL/CONDBR2 +DCSC2_SYSTEMS=${DCSC_SYSTEMS:-"-sMDT -sTile -sTGC -sRPC -sTDQ -sMagnets -sGlobal -sTRT -sSCT -sLAr -sLucid -sTrigger -sAFP -sMMG -sSTG"} +DCSC2_ARGS=${DCSC_ARGS:-""} +# the bottom should be overridden by environment variables +DEST_DB=${DCSC_DEST_DB:-"sqlite://;schema=test.db;dbname=CONDBR2"} RUN=$1 shift @@ -16,35 +17,17 @@ fi echo "Running for $RUN" -export AtlasSetup=/afs/cern.ch/atlas/software/dist/AtlasSetup -pushd /afs/cern.ch/user/a/atlasdqm/ws/DCSCalc/prodarea > /dev/null -source $AtlasSetup/scripts/asetup.sh 24.0.0,Athena -# Parse the major and minor of the Python version -PyVersion=$(python --version | sed -E 's/Python ([0-9]+\.[0-9]+)\.[0-9]+/\1/') -# Add the 'tdaq-' prefix to the TDAQ version if not present -TdaqVersion=$TDAQ_VERSION -if [[ ! $TdaqVersion = tdaq-* ]]; then - TdaqVersion=tdaq-$TdaqVersion +export AtlasSetup=/cvmfs/atlas.cern.ch/repo/ATLASLocalRootBase/x86_64/AtlasSetup/current/AtlasSetup +source $AtlasSetup/scripts/asetup.sh ${ATLAS_RELEASE:-24.0.25},Athena +if [ -n "${DCSC_BUILD_DIR}" ]; then + source ${DCSC_BUILD_DIR}/*/setup.sh fi -# Add auth-get-sso-cookie to the path - required by pBeast's ServerProxy -AUTH_GET_SSO_COOKIE="$(ls -d $LCG_RELEASE_BASE/auth_get_sso_cookie/*/$BINARY_TAG | sort -rV | head -n1)" -export PATH="$AUTH_GET_SSO_COOKIE/bin:$PATH" -# Add LCG packages at the end of the Python path - auth-get-sso-cookie dependencies -for package in $LCG_RELEASE_BASE/*/*/$BINARY_TAG/lib/python$PyVersion/site-packages; do - export PYTHONPATH="$PYTHONPATH:$package" -done -# Add TDAQ external Python packages to the Python path - pBeast and auth-get-sso-cookie dependencies -export PYTHONPATH="$PYTHONPATH:$TDAQ_RELEASE_BASE/tdaq/$TdaqVersion/installed/external/$BINARY_TAG/lib/python$PyVersion/site-packages" -source /afs/cern.ch/user/a/atlasdqm/DQCalculators/DCSCalc/prodarea/build/$BINARY_TAG/setup.sh - -export CORAL_AUTH_PATH=/afs/cern.ch/user/a/atlasdqm/private -export CORAL_DBLOOKUP_PATH=/afs/cern.ch/user/a/atlasdqm/private + +export CORAL_AUTH_PATH=${DCSC_AUTH_PATH:-/afs/cern.ch/user/a/atlasdqm/private} +export CORAL_DBLOOKUP_PATH=${DCSC_AUTH_PATH:-/afs/cern.ch/user/a/atlasdqm/private} +echo "Authentication from" $CORAL_AUTH_PATH +unset FRONTIER_SERVER #export FRONTIER_LOG_LEVEL=debug -export PBEAST_SERVER='https://pc-atlas-www.cern.ch' -export PBEAST_SERVER_SSO_SETUP_TYPE=AutoUpdateKerberos export PBEAST_SERVER_HTTPS_PROXY='atlasgw.cern.ch:3128' -export REQUESTS_CA_BUNDLE=/etc/pki/tls/certs/ca-bundle.crt -#dcsc.py -h -dcsc.py $@ $DCSC2_SYSTEMS -r$RUN -o$DEST_DB --email-on-failure -#dcsc.py $@ $DCSC2_SYSTEMS -r$RUN -o$DEST_DB +dcsc.py $@ $DCSC2_SYSTEMS $DCSC2_ARGS -r$RUN -o$DEST_DB --email-on-failure diff --git a/DataQuality/DCSCalculator2/share/run/GetNextRunList.py b/DataQuality/DCSCalculator2/share/run/GetNextRunList.py new file mode 100755 index 000000000000..a06ccb9acb58 --- /dev/null +++ b/DataQuality/DCSCalculator2/share/run/GetNextRunList.py @@ -0,0 +1,43 @@ +#! /usr/bin/env python3 + +import os.path, os +SCRIPTDIR = os.path.dirname(os.path.realpath(__file__)) +DATADIR = os.environ.get('DCSC_DATADIR', SCRIPTDIR) + +from time import strftime + +from xmlrpc.client import ServerProxy + +def get_runs_since(run_number: int) -> list[int]: + """ + Get runs that occured since `run_number`, excluding it. + """ + s = ServerProxy('http://atlasdqm.cern.ch:8080') + run_list: list[int] = s.get_completed_runs({'low_run': run_number}) # type: ignore + if run_number in run_list: + run_list.remove(run_number) + return run_list + +def main(): + """ + Read the lastRunNumber file, find runs which happened since it. + Then create a runList file containing these runs, and overwrite lastRunNumber. + """ + try: + last_run = int(open(os.path.join(DATADIR, "lastRunNumber"), 'r+').read()) + except Exception: + last_run = 450000 + new_runs = get_runs_since(last_run) + + open(os.path.join(DATADIR, "runList"), "w").write("\n".join(map(str, new_runs))) + + if not new_runs: + print(strftime("%d/%m/%Y %H:%M:%S"), "Nothing to do") + return + + last_run = new_runs[-1] + open(os.path.join(DATADIR, "lastRunNumber"), "w").write(str(last_run)+"\n") + print(strftime("%d/%m/%Y %H:%M:%S"), "Wrote new runList. Last run is", last_run, new_runs) + +if __name__ == "__main__": + main() diff --git a/DataQuality/DCSCalculator2/share/run/RunDCSCalculator.sh b/DataQuality/DCSCalculator2/share/run/RunDCSCalculator.sh index bd7b6eed58c3..e59d07f0e5d6 100755 --- a/DataQuality/DCSCalculator2/share/run/RunDCSCalculator.sh +++ b/DataQuality/DCSCalculator2/share/run/RunDCSCalculator.sh @@ -1,75 +1,68 @@ -#!/usr/bin/env bash +#!/bin/bash -# Systems using the new calculator: -# MDT, Tile -LOG_PATH=/afs/cern.ch/user/a/atlasdqm/DQCalculators/DCSCalc/RunDCSCalc/logfiles +# This script runs as a cron job in order to process new runs with the DCSCalculator2 +# It will normally process the runs listed in ./runList which is filled by ./GetNextRunList.py +# If however, you wish to run the calculator manually on a set of runs (for fixes, etc.), +# you may provide the the list of runs in a text file as argument to this script. +# The text file must have one run number per line -if [ -e "/afs/cern.ch/user/a/atlasdqm/ws/DCSCalc/RunDCSCalc/runList" ] +prodPath=${DCSC_DATADIR:-/afs/cern.ch/user/a/atlasdqm/ws/DCSCalc/RunDCSCalc} +basePath=$(dirname "$0") +scriptLogPath=${DCSC_SCRIPTLOGDIR:-$prodPath/logfiles} +logPath=${DCSC_RUNLOGDIR:-$basePath/logfiles} + +TMP=${TMPDIR:-/tmp} + +if [[ $# -ne 0 ]]; then + runList=$1 +else + runList=$prodPath/runList +fi + +logPath=$prodPath/logfiles + +if [ -e $runList ] then + echo "Using run list $runList" + echo "Getting COOL authentications." - export CORAL_AUTH_PATH=$HOME/private + export CORAL_AUTH_PATH={$CORAL_AUTH_PATH:-$HOME/private} - echo "Going to run directory" - #cd /afs/cern.ch/user/a/atlasdqm/ws/DCSCalc/DCSCalculator/i686-slc4-gcc34-opt - - #Get list of runs to loop over by sorting temporary run list and removing any duplicates. - #list=`cat /afs/cern.ch/user/a/atlasdqm/ws/DCSCalc/RunDCSCalc/runList | tr '\n' ' '` - list=`cat /afs/cern.ch/user/a/atlasdqm/ws/DCSCalc/RunDCSCalc/runList` + # Get list of runs to loop over from runList (any chance of duplicates?) + list=`cat $runList` for run in $list do - echo "-----------------------------------------------------> NEW RUN!" - cd /afs/cern.ch/user/a/atlasdqm/ws/DCSCalc/DCSCalculator/i686-slc4-gcc34-opt + echo "-----------------------------------------------------> NEW RUN!" echo "Run number = $run" - echo "Running.........." - - # Original DCSCalculator - #/afs/cern.ch/user/a/atlasdqm/DQCalculators/DCSCalc/RunDCSCalc/ExecuteDCSC1.sh $run - - # New DCSCalculator2 - /afs/cern.ch/user/a/atlasdqm/DQCalculators/DCSCalc/RunDCSCalc/ExecuteDCSC2.sh $run &> $LOG_PATH/dcsc2_$run + echo "Running calculator..." + + # DCSCalculator2 + $basePath/ExecuteDCSC2.sh $run &> $logPath/dcsc2_$run - # Append the recently processed run numbers to the end of a file which keeps track of the last 50 runs which were processed. - cd /afs/cern.ch/user/a/atlasdqm/ws/DCSCalc/logfiles - echo "$run" - echo "$run" >> processedRuns_DCSOFL.log - sort -u processedRuns_DCSOFL.log > finishedRuns - rm -f processedRuns_DCSOFL.log - lines=`wc -l finishedRuns | awk '{print $1}'` - if [ $lines -gt 50 ] - then - start=`expr $lines - 50` - sed "1,$start d" finishedRuns > processedRuns_DCSOFL.log - else - cp finishedRuns processedRuns_DCSOFL.log - fi - rm -f finishedRuns - + # Append the recently processed run numbers to the end of a file which + # keeps track of the last 200 runs which were processed. + runsLog=$scriptLogPath/processedRuns_DCSOFL.log + echo "$run" >> $runsLog + sort -u $runsLog > $TMP/finishedRuns + rm -f $runsLog + lines=`wc -l $TMP/finishedRuns | awk '{print $1}'` + if [ $lines -gt 200 ] + then + start=`expr $lines - 200` + sed "1,$start d" $TMP/finishedRuns > $runsLog + else + cp $TMP/finishedRuns $runsLog + fi + rm -f $TMP/finishedRuns + chmod g+w $runsLog + done - # Append the recently processed run numbers to the end of a file which keeps track of the last 50 runs which were processed. - #cd /afs/cern.ch/user/a/atlasdqm/ws/DCSCalc/logfiles - #echo "$list" - #echo "$list" >> processedRuns_DCSOFL.log - #sort -u processedRuns_DCSOFL.log > finishedRuns - #rm -f processedRuns_DCSOFL.log - #lines=`wc -l finishedRuns | awk '{print $1}'` - #if [ $lines -gt 50 ] - #then - #start=`expr $lines - 50` - #sed "1,$start d" finishedRuns > processedRuns_DCSOFL.log - #else - #cp finishedRuns processedRuns_DCSOFL.log - #fi - #rm -f finishedRuns - else - echo "No new runs to process" + echo "runList file doesn't exist" fi -echo "Finished" - -#exit - +echo "Finished" \ No newline at end of file -- GitLab