Skip to content
Snippets Groups Projects
Commit 749ade61 authored by Simon Mazenoux's avatar Simon Mazenoux
Browse files

Merge branch 'dev' into 'master'

Merge dev into master

See merge request !381
parents cb7f9558 966f101f
No related branches found
No related tags found
1 merge request!381Merge dev into master
Pipeline #8328134 failed with stage
in 7 minutes and 17 seconds
Showing with 175700 additions and 175334 deletions
......@@ -35,12 +35,7 @@ If nothing is returned, proceed with the installation of Kerberos.
sudo yum install krb5-workstation krb5-libs krb5-auth-dialog
```
If you have had to install Kerberos, or if Kerberos was already installed on your personal machine, you probably need to set the configuration file for it. To do so, we can copy the configuration file from lxplus (please make sure you have access to lxplus by checking the [CERN resources portal](https://resources.web.cern.ch/resources/Manage/Linux/Default.aspx)):
```console
sudo mv /etc/krb5.conf /etc/krb5-old.conf
sudo scp <your-cern-id>@lxplus7.cern.ch:/etc/krb5.conf /etc/krb5.conf
```
If you have had to install Kerberos, or if Kerberos was already installed on your personal machine, you probably need to set the configuration file for it. To do this, please refer to the documentation at <https://linux.web.cern.ch/docs/kerberos-access/#client-configuration-kerberos>.
Once this is done, you can create your Kerberos ticket using kinit:
......
......@@ -252,7 +252,7 @@ class QdsRbQuery(QdsQuery):
# ToDo: It would be nice to add this query to the library
pm_request = (
"http://pm-rest.cern.ch/v2/" # NOSONAR
"http://pm-rest.cern.ch/v2/"
+ "/pmdata?system=QPS&className="
+ metadata_qds["className"]
+ "&source="
......
......@@ -186,3 +186,8 @@ class AnalysisManager:
# from the context and depending on the data returns a True or False
return all(a.get_analysis_output() for a in self._registered_analysis.values())
def get_registered_analyses_identifiers(self) -> list[str]:
"""Returns the list of identifiers of the registered analysis objects."""
return list(self._registered_analysis.keys())
......@@ -22,7 +22,8 @@ from lhcsmapi.utils import unpack_single_element, vectorize
CMW = "CMW"
WINCCOA = "WINCCOA"
_ONE_DAY_NS = 86400000000000
_PM_ENDPOINT = "http://pm-rest.cern.ch/v2/" # NOSONAR
_ONE_YEAR_NS = 365 * _ONE_DAY_NS
_PM_ENDPOINT = "http://pm-rest.cern.ch/v2/"
_RD1_CIRCUITS = ["RD1.L2", "RD1.L8", "RD1.R2", "RD1.R8"]
logger = logging.getLogger()
......@@ -66,7 +67,7 @@ def query_nxcals_by_variables(
spark: SparkSession,
query_parameters: resolver.VariableQueryParams,
include_latest_data_point_prior_to_start: bool = False,
max_latest_data_point_search_period: int = 2_592_000_000_000_000, # 1 month
max_latest_data_point_search_period: int = _ONE_YEAR_NS, # inherited from CALS
) -> pd.DataFrame | list[pd.DataFrame]:
"""Queries NXCALS by variables and returning pandas dataframes (one for each variable)
Deprecated: Use query_cmw_by_variables / query_winccoa_by_variables instead
......@@ -231,7 +232,7 @@ def query_cmw_by_variables(
duration: int | tuple[int, str],
variables: Sequence[str],
include_latest_data_point_prior_to_start: bool = ...,
max_latest_data_point_search_period: int = ..., # 1 month
max_latest_data_point_search_period: int = ...,
) -> list[pd.DataFrame]: ...
......@@ -242,7 +243,7 @@ def query_cmw_by_variables(
duration: int | tuple[int, str],
variables: str | Sequence[str],
include_latest_data_point_prior_to_start: bool = False,
max_latest_data_point_search_period: int = 2_592_000_000_000_000, # 1 month
max_latest_data_point_search_period: int = _ONE_YEAR_NS, # inherited from CALS
) -> pd.DataFrame | list[pd.DataFrame]:
"""Method querying NXCALS by variables in CMW system and returning one pandas dataframe per variable
......@@ -302,7 +303,7 @@ def query_winccoa_by_variables(
duration: int | tuple[int, str],
variables: str,
include_latest_data_point_prior_to_start: bool = ...,
max_latest_data_point_search_period: int = ..., # 1 month
max_latest_data_point_search_period: int = ...,
) -> pd.DataFrame: ...
......@@ -313,7 +314,7 @@ def query_winccoa_by_variables(
duration: int | tuple[int, str],
variables: list[str],
include_latest_data_point_prior_to_start: bool = ...,
max_latest_data_point_search_period: int = ..., # 1 month
max_latest_data_point_search_period: int = ...,
) -> list[pd.DataFrame]: ...
......@@ -324,7 +325,7 @@ def query_winccoa_by_variables(
duration: int | tuple[int, str],
variables: str | Sequence[str],
include_latest_data_point_prior_to_start: bool = False,
max_latest_data_point_search_period: int = 2_592_000_000_000_000, # 1 month
max_latest_data_point_search_period: int = _ONE_YEAR_NS, # inherited from CALS
) -> pd.DataFrame | list[pd.DataFrame]:
"""Method querying NXCALS by variables in WINCCOA system and returning one pandas dataframe per variable
......@@ -716,7 +717,7 @@ def query_pm_data_headers(
duration: duration in nanoseconds or Tuple of number and unit i.e. 250000000000 or (250, 's')
Returns:
Pandas dataframe with event data retrieved from PM
Pandas dataframe with event data retrieved from PM sorted by timestamp and source
Examples:
To query PM events for FGC:
......@@ -777,7 +778,7 @@ def query_pm_data_headers(
if remainder > 0:
start_time_temp = timestamp + _ONE_DAY_NS * quotient
fetch_events(start_time_temp, remainder)
return result.reset_index(drop=True)
return result.sort_values(by=["timestamp", "source"]).reset_index(drop=True)
def _query_nxcals_by_variables(
......@@ -1048,10 +1049,11 @@ def query_hwc_powering_test_parameters(
if duration < 0:
raise ValueError(f"Duration cannot be negative, got {duration}")
pc_names = signal_metadata.get_fgc_names(circuit_name, start_time)
for pc_name in pc_names:
old_pc_names = signal_metadata.get_fgc_names(circuit_name, start_time)
new_pc_names = signal_metadata.get_fgc_names(circuit_name, datetime.now())
for old_pc_name, new_pc_name in zip(old_pc_names, new_pc_names):
query = (
f"http://mpe-systems-pro-gpn:60242/v1/systemInformation/powerConverters/{pc_name}/" # NOSONAR (http is ok)
f"http://mpe-systems-pro-gpn:60242/v1/systemInformation/powerConverters/{new_pc_name}/"
f"params?startTimeInNanos={start_time}&endTimeInNanos={start_time + duration}"
)
logger.debug(f"Querying HWC powering test parameters using the following query: {query}")
......@@ -1059,10 +1061,10 @@ def query_hwc_powering_test_parameters(
if response.status_code == 200:
d = json.loads(response.text)
if d:
res[pc_name] = d
res[old_pc_name] = d
else:
logger.warning(
f"Querying HWC powering test parameters returned no data for the the pc {pc_name} "
f"Querying HWC powering test parameters returned no data for the the pc {new_pc_name} "
f"(query: {query})"
)
else:
......
This diff is collapsed.
......@@ -68,6 +68,20 @@ class CircuitType(str, enum.Enum):
return str.__str__(self)
class GenericCircuitType(str, enum.Enum):
A60 = "60A"
A600 = "600A"
A80_120 = "80-120A"
IPD = "IPD"
IPQ = "IPQ"
IT = "IT"
RB = "RB"
RQ = "RQ"
def __str__(self) -> str:
return str.__str__(self)
_cache: Dict[str, Dict[str, Union[Dict, str]]] = {}
......@@ -158,7 +172,7 @@ def get_circuit_types() -> List[str]:
def get_circuit_names(
circuit_type: Union[str, List[str]], timestamp: Optional[Union[int, str, datetime]] = None
circuit_type: Optional[Union[str, List[str]]] = None, timestamp: Optional[Union[int, str, datetime]] = None
) -> List[str]:
"""
Args:
......@@ -168,6 +182,9 @@ def get_circuit_names(
Returns:
A list of circuit names for given circuit type(s) valid at the given point in time.
"""
if circuit_type is None:
circuit_type = get_circuit_types()
circuit_types = utils.vectorize(circuit_type)
circuit_names: List[str] = []
for circuit_type_ in circuit_types:
......@@ -491,6 +508,29 @@ def get_circuit_type_for_circuit_name(circuit_name: str) -> str:
raise KeyError(f"Circuit name {circuit_name} does not map to internal metadata.")
def get_generic_circuit_type_for_circuit_name(circuit_name: str) -> GenericCircuitType:
"""Returns the generic circuit type for a given circuit name, and raises an error if the circuit name is not found."""
if circuit_name not in get_circuit_names():
raise ValueError(f"Circuit name {circuit_name} not present in the metadata.")
circuit_type_mapping = {
is_60a: GenericCircuitType.A60,
is_80_120a: GenericCircuitType.A80_120,
is_600a: GenericCircuitType.A600,
is_inner_triplet: GenericCircuitType.IT,
is_ipd: GenericCircuitType.IPD,
is_ipq: GenericCircuitType.IPQ,
is_main_dipole: GenericCircuitType.RB,
is_main_quadrupole: GenericCircuitType.RQ,
}
for check_func, circuit_type in circuit_type_mapping.items():
if check_func(circuit_name):
return circuit_type
raise NotImplementedError(f"Circuit name '{circuit_name}' does not map to a generic circuit type.")
def get_family_name_for_600A(circuit_name: str) -> str:
"""Parses name of the 600A circuit to get its family name:
- RCD-RCO for RCD, RCO circuits
......@@ -611,7 +651,7 @@ def _get_rq_magnet_names_without_u_diode(circuit_name: str) -> List[str]:
mask_magnets_with_nan = (magnet_to_qps_crate_df["Circuit"] == circuit_name) & (
magnet_to_qps_crate_df["Crate U_DIODE_RQx"].isna()
)
return magnet_to_qps_crate_df[mask_magnets_with_nan]["Magnet"].values
return magnet_to_qps_crate_df[mask_magnets_with_nan]["Magnet"].values.tolist()
def get_signal_name(
......@@ -735,14 +775,11 @@ def has_ee(circuit_name: str) -> bool:
Raises:
ValueError: If the circuit name is not present in the metadata
"""
try:
circuit_type = get_circuit_type_for_circuit_name(circuit_name)
except KeyError as e:
raise ValueError(f"Circuit name {circuit_name} not present in the metadata.") from e
circuit_type = get_generic_circuit_type_for_circuit_name(circuit_name)
if circuit_type in (CircuitType.RB, CircuitType.RQ):
if circuit_type in (GenericCircuitType.RB, GenericCircuitType.RQ):
return True
if circuit_type == CircuitType.A600:
if circuit_type == GenericCircuitType.A600:
with open(pathlib.Path(__file__).parent / "circuit" / "circuit_types_location_acctesting.csv") as f:
for row in csv.DictReader(f):
if row["CIRCUIT_NAME"] == circuit_name:
......@@ -859,11 +896,9 @@ def _get_renaming_date_of_fgc_pm_class_names(circuit_name: str) -> int:
Raises:
ValueError: If the circuit name is not present in the metadata.
"""
try:
circuit_type = get_circuit_type_for_circuit_name(circuit_name)
except KeyError as e:
raise ValueError(f"Circuit name {circuit_name} not present in the metadata.") from e
if circuit_type == CircuitType.A60:
circuit_type = get_generic_circuit_type_for_circuit_name(circuit_name)
if circuit_type == GenericCircuitType.A60:
return 1483228800000000000 # 2017-01-01 00:00:00
else:
return 1514764800000000000 # 2018-01-01 00:00:00
......@@ -1081,6 +1116,16 @@ def get_hwc_summary(path: str | pathlib.Path | None = None) -> pd.DataFrame:
return pd.read_csv(path, low_memory=False).sort_values("executionStartTime").reset_index(drop=True)
def get_cells_for_circuit(circuit_name: str) -> set[str]:
"""Returns the cell names for a given RB or RQ circuit name."""
circuit_type = get_generic_circuit_type_for_circuit_name(circuit_name)
if circuit_type not in (GenericCircuitType.RB, GenericCircuitType.RQ):
raise ValueError(f"{circuit_name} is not a main dipole or main quadrupole circuit.")
df = pd.read_csv(pathlib.Path(__file__).parent / "magnet" / f"{circuit_type}_MagnetCellQpscrateSector.csv")
return set(df[df["Circuit"] == circuit_name]["Cell"].tolist())
def _update_wildcards_in_signal_name_per_circuit_name(
circuit_type: str, circuit_name: str, database: str, system: str, signal: str, timestamp
) -> Union[str, List[str]]:
......
......@@ -63,7 +63,7 @@ class PmDbRequest(object):
"""
if pm_rest_api_path is None:
pm_rest_api_path = "http://pm-rest.cern.ch/v2/" # NOSONAR
pm_rest_api_path = "http://pm-rest.cern.ch/v2/"
# check if keys are present
key_difference = kwargs.keys() - PmDbRequest._keyword_to_value_type.keys()
......
......@@ -29,6 +29,7 @@ dependencies = [
"nxcals>=1.0.29",
"pandas>=1.5",
"plotly",
"pyarrow",
"pyspark",
"requests",
"scikit-learn",
......
# SONAR_TOKEN and SONAR_HOST_URL are defined as CI variables
# Required metadata
sonar.projectKey=lhc-sm-api
sonar.projectName=lhc-sm-api
# Comma-separated paths to directories with sources (required)
sonar.sources=lhcsmapi
sonar.python.coverage.reportPaths=cov.xml
# Language
sonar.language=py
# Encoding of the source files
sonar.sourceEncoding=UTF-8
# Failing if the quality gate is not passed
sonar.qualitygate.wait=true
......@@ -20,7 +20,7 @@ def test_query_winccoa_by_variables_no_data(caplog: pytest.LogCaptureFixture):
assert caplog.messages == [
"Querying NXCALS returned no results for the variable RQ5.L8:CMD_ABORT_PIC: "
"system: WINCCOA, start_time: 0, duration: 100, include_latest_data_point_prior_to_start: False, "
"max_latest_data_point_search_period: 2,592,000,000,000,000"
"max_latest_data_point_search_period: 31,536,000,000,000,000"
]
......@@ -253,6 +253,84 @@ def test_query_hwc_powering_test_parameters_valid_case(start_time, duration):
}
def test_query_hwc_powering_test_parameters_renamed_fgc():
# arrange
circuit_name = "RQ6.R1"
start_time = 1492706598921000000
end_time = 1492707382516000000
# act
result = query.query_hwc_powering_test_parameters(circuit_name, start_time, end_time - start_time)
# assert
assert result == {
"RPHGB.RR17.RQ6.R1B1": {
"ACC_PNO": {"unit": "A/s²", "value": 2.0},
"DIDT_ASYN": {"unit": "A/s", "value": 4.0},
"DIDT_PNO": {"unit": "A/s", "value": 12.931},
"I_5TEV": {"unit": "A", "value": 2300.0},
"I_DELTA": {"unit": "A", "value": 50.0},
"I_EARTH_MAX": {"unit": "A", "value": 0.01},
"I_EARTH_PCC_MAX": {"unit": "A", "value": 0.01},
"I_ERR_MAX": {"unit": "A", "value": 0.05},
"I_ERR_PCC_MAX": {"unit": "A", "value": 0.04},
"I_HARDWARE": {"unit": "A", "value": 4650.0},
"I_INJECTION": {"unit": "A", "value": 206.0},
"I_INTERM_1": {"unit": "A", "value": 1300.0},
"I_INTERM_1_MID": {"unit": "A", "value": 750.0},
"I_INTERM_2": {"unit": "A", "value": 2300.0},
"I_INTERM_3": {"unit": "A", "value": 1900.0},
"I_INTERM_4": {"unit": "A", "value": 2300.0},
"I_PCC": {"unit": "A", "value": 150.0},
"I_PHASE_1": {"unit": "A", "value": 700.0},
"I_PNO": {"unit": "A", "value": 4050.0},
"I_PNO_MID": {"unit": "A", "value": 1250.0},
"I_PNO_TRAINING": {"unit": "A", "value": 4100.0},
"I_SM_INT_1": {"unit": "A", "value": 1200.0},
"I_SM_INT_2": {"unit": "A", "value": 1700.0},
"L_TOT": {"unit": "H", "value": 0.021},
"R_TOT_MEASURED": {"unit": "Ω", "value": 0.000264},
"TIME_PCC": {"unit": "s", "value": 30.0},
"TIME_PNO_LEADS": {"unit": "s", "value": 3600.0},
"TIME_TOP": {"unit": "s", "value": 300.0},
"TIME_ZERO": {"unit": "s", "value": 60.0},
"U_IND": {"unit": "V", "value": 0.271},
},
"RPHGB.RR17.RQ6.R1B2": {
"ACC_PNO": {"unit": "A/s²", "value": 2.0},
"DIDT_ASYN": {"unit": "A/s", "value": 4.0},
"DIDT_PNO": {"unit": "A/s", "value": 12.931},
"I_5TEV": {"unit": "A", "value": 2300.0},
"I_DELTA": {"unit": "A", "value": 50.0},
"I_EARTH_MAX": {"unit": "A", "value": 0.01},
"I_EARTH_PCC_MAX": {"unit": "A", "value": 0.01},
"I_ERR_MAX": {"unit": "A", "value": 0.05},
"I_ERR_PCC_MAX": {"unit": "A", "value": 0.04},
"I_HARDWARE": {"unit": "A", "value": 4650.0},
"I_INJECTION": {"unit": "A", "value": 206.0},
"I_INTERM_1": {"unit": "A", "value": 1300.0},
"I_INTERM_1_MID": {"unit": "A", "value": 750.0},
"I_INTERM_2": {"unit": "A", "value": 2300.0},
"I_INTERM_3": {"unit": "A", "value": 1900.0},
"I_INTERM_4": {"unit": "A", "value": 2300.0},
"I_PCC": {"unit": "A", "value": 150.0},
"I_PHASE_1": {"unit": "A", "value": 700.0},
"I_PNO": {"unit": "A", "value": 4050.0},
"I_PNO_MID": {"unit": "A", "value": 1250.0},
"I_PNO_TRAINING": {"unit": "A", "value": 4100.0},
"I_SM_INT_1": {"unit": "A", "value": 1200.0},
"I_SM_INT_2": {"unit": "A", "value": 1700.0},
"L_TOT": {"unit": "H", "value": 0.021},
"R_TOT_MEASURED": {"unit": "Ω", "value": 0.000237},
"TIME_PCC": {"unit": "s", "value": 30.0},
"TIME_PNO_LEADS": {"unit": "s", "value": 3600.0},
"TIME_TOP": {"unit": "s", "value": 300.0},
"TIME_ZERO": {"unit": "s", "value": 60.0},
"U_IND": {"unit": "V", "value": 0.271},
},
}, result
def test_query_hwc_powering_test_parameters_invalid_query(caplog: pytest.LogCaptureFixture):
# arrange
circuit_name = "RD4.R4"
......
......@@ -93,6 +93,7 @@ class AnalysisManagerTest(unittest.TestCase):
filtered_logs["id1"], "Starting analysis\nUser input received\nError encountered\nAnalysis complete"
)
self.assertEqual(filtered_logs["id2"], "Recalculating")
self.assertEqual(self.analysis_manager.get_registered_analyses_identifiers(), ["id1", "id2"])
def test_get_filtered_logs_multiple_filters_result_only_one_analysis(self):
# act
......@@ -100,6 +101,7 @@ class AnalysisManagerTest(unittest.TestCase):
# assert
self.assertEqual(filtered_logs["id2"], "Starting analysis")
self.assertEqual(filtered_logs["id1"], "")
self.assertEqual(self.analysis_manager.get_registered_analyses_identifiers(), ["id1", "id2"])
def test_get_filtered_logs_multiple_filters_no_results(self):
# act
......@@ -107,3 +109,4 @@ class AnalysisManagerTest(unittest.TestCase):
# assert
self.assertEqual(filtered_logs["id1"], "")
self.assertEqual(filtered_logs["id2"], "")
self.assertEqual(self.analysis_manager.get_registered_analyses_identifiers(), ["id1", "id2"])
......@@ -317,8 +317,8 @@ def test_query_pm_data_headers(mock_get):
10: "B15R4",
11: "A15R4",
12: "B34L8",
13: "C34L8",
14: "A34L8",
13: "A34L8",
14: "C34L8",
15: "C33L8",
16: "C34R7",
17: "A34R7",
......@@ -344,8 +344,8 @@ def test_query_pm_data_headers(mock_get):
10: 1426251337747000000,
11: 1426251388741000000,
12: 1426258716281000000,
13: 1426258747672000000,
14: 1426258747370000000,
13: 1426258747370000000,
14: 1426258747672000000,
15: 1426258835955000000,
16: 1426258853947000000,
17: 1426258854113000000,
......@@ -408,8 +408,8 @@ def test_query_builder_pm_duration_in_seconds_event_query(mock_get):
10: "B15R4",
11: "A15R4",
12: "B34L8",
13: "C34L8",
14: "A34L8",
13: "A34L8",
14: "C34L8",
15: "C33L8",
16: "C34R7",
17: "A34R7",
......@@ -435,8 +435,8 @@ def test_query_builder_pm_duration_in_seconds_event_query(mock_get):
10: 1426251337747000000,
11: 1426251388741000000,
12: 1426258716281000000,
13: 1426258747672000000,
14: 1426258747370000000,
13: 1426258747370000000,
14: 1426258747672000000,
15: 1426258835955000000,
16: 1426258853947000000,
17: 1426258854113000000,
......@@ -542,8 +542,8 @@ def test_query_pm_events_with_parameters(mock_get):
10: "B15R4",
11: "A15R4",
12: "B34L8",
13: "C34L8",
14: "A34L8",
13: "A34L8",
14: "C34L8",
15: "C33L8",
16: "C34R7",
17: "A34R7",
......@@ -569,8 +569,8 @@ def test_query_pm_events_with_parameters(mock_get):
10: 1426251337747000000,
11: 1426251388741000000,
12: 1426258716281000000,
13: 1426258747672000000,
14: 1426258747370000000,
13: 1426258747370000000,
14: 1426258747672000000,
15: 1426258835955000000,
16: 1426258853947000000,
17: 1426258854113000000,
......
......@@ -4,6 +4,7 @@ import warnings
import pytest
from lhcsmapi.metadata import signal_metadata
from lhcsmapi.metadata.signal_metadata import GenericCircuitType
def test_get_metadata():
......@@ -1437,6 +1438,32 @@ def test_get_circuit_type_for_circuit_name_error():
assert "'Circuit name ABC does not map to internal metadata.'" == str(exception.value)
@pytest.mark.parametrize(
("circuit_name", "expected_circuit_type"),
[
("RCBH11.L1B2", GenericCircuitType.A60),
("RCBCH10.L1B1", GenericCircuitType.A80_120),
("RCBXH1.L1", GenericCircuitType.A600),
("RQX.R1", GenericCircuitType.IT),
("RD1.L2", GenericCircuitType.IPD),
("RD2.L1", GenericCircuitType.IPD),
("RQ4.L2", GenericCircuitType.IPQ),
("RQ4.L1", GenericCircuitType.IPQ),
("RQ5.L1", GenericCircuitType.IPQ),
("RB.A12", GenericCircuitType.RB),
("RQD.A12", GenericCircuitType.RQ),
],
)
def test_get_generic_circuit_type_for_circuit_name(circuit_name: str, expected_circuit_type: GenericCircuitType):
assert signal_metadata.get_generic_circuit_type_for_circuit_name(circuit_name) == expected_circuit_type
assert signal_metadata.get_generic_circuit_type_for_circuit_name(circuit_name) == expected_circuit_type.value
def test_get_generic_circuit_type_for_circuit_name_invalid_circuit():
with pytest.raises(ValueError, match=re.escape("Circuit name ABC not present in the metadata.")):
signal_metadata.get_generic_circuit_type_for_circuit_name("ABC")
_NXCALS_SYSTEMS = [("RB", "QDS", "U_QS0", "CMW"), ("RB", ["LEADS_EVEN", "LEADS_ODD"], ["U_RES", "U_HTS"], "CMW")]
......@@ -1535,11 +1562,9 @@ def test_has_ee_valid_circuit_name():
def test_has_ee_invalid_circuit_name():
with pytest.raises(ValueError) as e:
with pytest.raises(ValueError, match=re.escape("Circuit name ABC not present in the metadata.")):
signal_metadata.has_ee("ABC")
assert str(e.value) == "Circuit name ABC not present in the metadata."
def test_has_crowbar_valid_circuit_name():
assert signal_metadata.has_crowbar("RQS.L1B2")
......@@ -1575,15 +1600,15 @@ def test_get_fgc_pm_class_name(circuit_name: str, timestamp: str, expected: str)
def test_get_fgc_pm_class_name_invalid_origin():
with pytest.raises(ValueError) as exception:
with pytest.raises(
ValueError, match=re.escape("The origin of the FGC dump could be only 'self' or 'ext' - provided origin.")
):
signal_metadata.get_fgc_pm_class_name("RCBH11.L1B2", "2016-07-01 00:00:00", "origin")
assert str(exception.value) == "The origin of the FGC dump could be only 'self' or 'ext' - provided origin."
def test_get_fgc_pm_class_name_invalid_circuit_name():
with pytest.raises(ValueError) as exception:
with pytest.raises(ValueError, match=re.escape("Circuit name ABC not present in the metadata.")):
signal_metadata.get_fgc_pm_class_name("ABC", "2016-07-01 00:00:00", "self")
assert str(exception.value) == "Circuit name ABC not present in the metadata."
def test_is_ipq():
......@@ -1704,4 +1729,230 @@ def test_get_hwc_summary():
"HWC_EDSL_AUTOMATED",
"MCS_COLLIMATION",
"HWC_SIGMON_AUTOMATED",
"automatedAnalysisResult",
]
@pytest.mark.parametrize(
("circuit_name", "expected"),
[
(
"RB.A12",
{
"A8R1",
"B8R1",
"A9R1",
"B9R1",
"A10R1",
"B10R1",
"A11R1",
"B11R1",
"A12R1",
"B12R1",
"C12R1",
"A13R1",
"B13R1",
"C13R1",
"A14R1",
"B14R1",
"C14R1",
"A15R1",
"B15R1",
"C15R1",
"A16R1",
"B16R1",
"C16R1",
"A17R1",
"B17R1",
"C17R1",
"A18R1",
"B18R1",
"C18R1",
"A19R1",
"B19R1",
"C19R1",
"A20R1",
"B20R1",
"C20R1",
"A21R1",
"B21R1",
"C21R1",
"A22R1",
"B22R1",
"C22R1",
"A23R1",
"B23R1",
"C23R1",
"A24R1",
"B24R1",
"C24R1",
"A25R1",
"B25R1",
"C25R1",
"A26R1",
"B26R1",
"C26R1",
"A27R1",
"B27R1",
"C27R1",
"A28R1",
"B28R1",
"C28R1",
"A29R1",
"B29R1",
"C29R1",
"A30R1",
"B30R1",
"C30R1",
"A31R1",
"B31R1",
"C31R1",
"A32R1",
"B32R1",
"C32R1",
"A33R1",
"B33R1",
"C33R1",
"A34R1",
"B34R1",
"C34R1",
"C34L2",
"B34L2",
"A34L2",
"C33L2",
"B33L2",
"A33L2",
"C32L2",
"B32L2",
"A32L2",
"C31L2",
"B31L2",
"A31L2",
"C30L2",
"B30L2",
"A30L2",
"C29L2",
"B29L2",
"A29L2",
"C28L2",
"B28L2",
"A28L2",
"C27L2",
"B27L2",
"A27L2",
"C26L2",
"B26L2",
"A26L2",
"C25L2",
"B25L2",
"A25L2",
"C24L2",
"B24L2",
"A24L2",
"C23L2",
"B23L2",
"A23L2",
"C22L2",
"B22L2",
"A22L2",
"C21L2",
"B21L2",
"A21L2",
"C20L2",
"B20L2",
"A20L2",
"C19L2",
"B19L2",
"A19L2",
"C18L2",
"B18L2",
"A18L2",
"C17L2",
"B17L2",
"A17L2",
"C16L2",
"B16L2",
"A16L2",
"C15L2",
"B15L2",
"A15L2",
"C14L2",
"B14L2",
"A14L2",
"C13L2",
"B13L2",
"A13L2",
"C12L2",
"B12L2",
"A12L2",
"B11L2",
"A11L2",
"B10L2",
"A10L2",
"B9L2",
"A9L2",
"B8L2",
"A8L2",
},
),
(
"RQD.A12",
{
"22L2",
"27L2",
"13L2",
"13R1",
"25L2",
"24R1",
"19L2",
"19R1",
"29R1",
"33L2",
"16L2",
"12L2",
"14L2",
"14R1",
"20R1",
"11R1",
"28R1",
"27R1",
"11L2",
"31L2",
"15R1",
"29L2",
"30R1",
"20L2",
"16R1",
"21R1",
"23L2",
"26L2",
"12R1",
"18R1",
"31R1",
"34R1",
"18L2",
"25R1",
"30L2",
"24L2",
"21L2",
"22R1",
"26R1",
"33R1",
"15L2",
"28L2",
"32L2",
"32R1",
"17L2",
"17R1",
"23R1",
},
),
],
)
def test_get_cells_for_circuit(circuit_name: str, expected: set[str]):
assert signal_metadata.get_cells_for_circuit(circuit_name) == expected
def test_get_cells_for_circuit_invalid_circuit_type():
with pytest.raises(ValueError):
signal_metadata.get_cells_for_circuit("RD1.L2")
......@@ -596,7 +596,7 @@ class TestQueryBuilder(unittest.TestCase):
expected = list(read_csv("query_builder/nxcals_fq/RQD.A12_U_RES.csv")["signal"].values)
query.assert_called_once_with(
"spark", "CMW", 1544569200000000000, 86400000000000, expected, False, 2592000000000000
"spark", "CMW", 1544569200000000000, 86400000000000, expected, False, 31536000000000000
)
def test_metadata_nxcals_feature_query_rqfa12_u_res(self):
......@@ -611,7 +611,7 @@ class TestQueryBuilder(unittest.TestCase):
expected = list(read_csv("query_builder/nxcals_fq/RQF.A12_U_RES.csv")["signal"].values)
query.assert_called_once_with(
"spark", "CMW", 1544569200000000000, 86400000000000, expected, False, 2592000000000000
"spark", "CMW", 1544569200000000000, 86400000000000, expected, False, 31536000000000000
)
def test_metadata_nxcals_feature_query_rqda12_rqfa12_u_res(self):
......@@ -626,7 +626,7 @@ class TestQueryBuilder(unittest.TestCase):
expected = list(read_csv("query_builder/nxcals_fq/RQD.A12_RQF.A12_U_RES.csv")["signal"].values)
query.assert_called_once_with(
"spark", "CMW", 1544569200000000000, 86400000000000, expected, False, 2592000000000000
"spark", "CMW", 1544569200000000000, 86400000000000, expected, False, 31536000000000000
)
def test_metadata_nxcals_feature_query_rb_u_res(self):
......@@ -641,7 +641,7 @@ class TestQueryBuilder(unittest.TestCase):
expected = list(read_csv("query_builder/nxcals_fq/RB_U_RES.csv")["signal"].values)
query.assert_called_once_with(
"spark", "CMW", 1544569200000000000, 86400000000000, expected, False, 2592000000000000
"spark", "CMW", 1544569200000000000, 86400000000000, expected, False, 31536000000000000
)
def test_metadata_nxcals_feature_query_rb_u_hds(self):
......@@ -656,7 +656,7 @@ class TestQueryBuilder(unittest.TestCase):
expected = list(read_csv("query_builder/nxcals_fq/RB_U_HDS.csv")["signal"].values)
query.assert_called_once_with(
"spark", "CMW", 1634542889913000000, 60000000000, expected, False, 2592000000000000
"spark", "CMW", 1634542889913000000, 60000000000, expected, False, 31536000000000000
)
def test_metadata_nxcals_feature_query_rb_u_diode_rb(self):
......@@ -671,7 +671,7 @@ class TestQueryBuilder(unittest.TestCase):
expected = list(read_csv("query_builder/nxcals_fq/RB_U_DIODE.csv")["signal"].values)
query.assert_called_once_with(
"spark", "CMW", 1634542889913000000, 60000000000, expected, False, 2592000000000000
"spark", "CMW", 1634542889913000000, 60000000000, expected, False, 31536000000000000
)
def test_query_parameters_nxcals_feature_query_rqd_u_res(self):
......@@ -686,5 +686,5 @@ class TestQueryBuilder(unittest.TestCase):
expected = list(read_csv("query_builder/nxcals_fq/RQ_U_RES.csv")["signal"].values)
query.assert_called_once_with(
"spark", "WINCCOA", 1493054985679000000, 1289130000000, expected, False, 2592000000000000
"spark", "WINCCOA", 1493054985679000000, 1289130000000, expected, False, 31536000000000000
)
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment