diff --git a/scripts/configdb_stripsRunkeys.py b/scripts/configdb_stripsRunkeys.py index 93fb73a09b76bab0f734d1a142593adf157714a9..aea71648a13e1972c7fecedb70ce56f49862dcde 100644 --- a/scripts/configdb_stripsRunkeys.py +++ b/scripts/configdb_stripsRunkeys.py @@ -37,6 +37,22 @@ from pprint import pprint DEBUG_DUPLICATE_PAYLOADS = False +def node_repr(node, depth=1, indent=0): + if 'reuse_id' in node: + return f'{" "*indent}reuse_id={node["reuse_id"]}' + + string = f'{" "*indent}type={node["type"]}\n' + string += f'{" "*indent}id={node.get("id")}\n' + string += f'{" "*indent}name={node.get("name")}\n' + string += f'{" "*indent}payloads={json.dumps(node["payloads"], indent=2, sort_keys=True)}\n' + + if depth>0: + string += f'{" "*indent}children:\n' + for ch in node['children']: + string += node_repr(ch, depth-1, indent+2) + + return string + def get_one(a_list: list, assert_one=True): if assert_one and len(a_list) != 1: raise AssertionError(f'Got a list of len != 1: {a_list}') @@ -174,12 +190,10 @@ def get_nested_field(TYPE_W_FIELDS, node, name, default=None): def set_nested_field(TYPE_W_FIELDS, node, **data_fields): # dict() .get('key'[, defaultvalue or None]) if len(get_node_payloads(node, TYPE_W_FIELDS)) == 0: - payload_data = {} - node.add_payload(TYPE_W_FIELDS, payload_data, meta=True) + node.add_payload(TYPE_W_FIELDS, {}, meta=True) - else: - payload_data = get_node_payload_data(node, TYPE_W_FIELDS) - assert isinstance(payload_data, dict), f"Something is wrong: '{TYPE_W_FIELDS}' payload must be meta i.e. dict" + payload_data = get_node_payload_data(node, TYPE_W_FIELDS) + assert isinstance(payload_data, dict), f"Something is wrong: '{TYPE_W_FIELDS}' payload must be meta i.e. dict" #payload[name] = value payload_data.update(data_fields) @@ -277,8 +291,17 @@ class Payload(dict): # raise AssertionError(f"Meta payloads can only be json dictionaries/objects or strings with dictionaries") - # here the payload must be a dictionary/json object assert isinstance(payload_data, dict), f"Meta payloads can only be dictionaries, got: {type(payload_data)} {payload_data}" + # it must be a dictionary + # but! + # since we also get it from the jsons from configdb + # it must be compatible to json + # i.e. no tuples, only lists + # let's just convert it back and forth + inp_data = payload_data + payload_data = json.loads(json.dumps(inp_data)) + + # here the payload must be a dictionary/json object self['meta'] = True else: @@ -585,7 +608,8 @@ class RunkeyHCC(RunkeyNode): 'config': config, **more_payloads } - all_payloads = [Payload("config", payload_dict, name="HCC"+str(itsdaq_number), meta=True)] + self['name'] = "HCC"+str(itsdaq_number) + all_payloads = [Payload("config", payload_dict, name=self['name'], meta=True)] # Transitioning to using only 1 meta payload for all the data ^^^ ''' @@ -621,9 +645,9 @@ class RunkeyHCC(RunkeyNode): >>> hcc = RunkeyHCC(str(1), str(0), "Y", ["0", "14"], "5", enable="True", locked="False") >>> hcc - {'type': 'HCC', 'payloads': [{'type': 'config', 'meta': True, 'data': {'itsdaq_number': '1', 'module_number': '0', 'hybrid_name': 'Y', 'rx_elink': ['0', '14'], 'tx_elink': '5', 'config': '', 'enable': 'True', 'locked': 'False'}, 'name': 'HCC1'}, {'type': 'enable', 'data': 'True'}, {'type': 'locked', 'data': 'False'}], 'children': [], 'id': '...'} + {'name': 'HCC1', 'type': 'HCC', 'payloads': [{'type': 'config', 'meta': True, 'data': {'itsdaq_number': '1', 'module_number': '0', 'hybrid_name': 'Y', 'rx_elink': ['0', '14'], 'tx_elink': '5', 'config': '', 'enable': 'True', 'locked': 'False'}, 'name': 'HCC1'}, {'type': 'enable', 'data': 'True'}, {'type': 'locked', 'data': 'False'}], 'children': [], 'id': '...'} >>> hcc.copy() - {'type': 'HCC', 'payloads': [{'type': 'enable', 'data': 'True'}, {'type': 'locked', 'data': 'False'}, {'reuse_id': '...'}], 'children': [], 'id': '...'} + {'type': 'HCC', 'payloads': [{'type': 'enable', 'data': 'True'}, {'type': 'locked', 'data': 'False'}, {'reuse_id': '...'}], 'children': [], 'id': '...', 'name': 'HCC1'} ''' pls = self['payloads'] @@ -636,6 +660,9 @@ class RunkeyHCC(RunkeyNode): #new_node['payloads'].append({'id': cfg_payload['id']}) new_node['payloads'].append(cfg_payload.make_reference()) + if 'name' in self: + new_node['name'] = self['name'] + return new_node diff --git a/scripts/test_upload_a_stave.py b/scripts/test_upload_a_stave.py index 5416e223f67b50c86e250bb45c5181cbc37eee34..824cabc6b4db42d44223a7ef7a5e3ed99a04449d 100644 --- a/scripts/test_upload_a_stave.py +++ b/scripts/test_upload_a_stave.py @@ -4,7 +4,7 @@ import pyconfigdb import functions_configDB as fxt import logging import datetime -from configdb_stripsRunkeys import get_subnode_of_type, get_node_payloads_data +from configdb_stripsRunkeys import get_subnode_of_type, get_node_with_payload, get_node_payload, node_repr, RunkeyNode from configdb_operations import upload_stave @@ -57,13 +57,133 @@ assert any(ch['type'] == "Staves" for ch in tagTree['objects'][0]['children']) runkey_root = tagTree['objects'][0] staves_subtree = get_subnode_of_type(runkey_root, "Staves") -stave_names = [] -for ch in staves_subtree['children']: - assert ch['type'] == "Stave" - st_names = get_node_payloads_data(ch, 'staveName') - assert len(st_names) == 1 - stave_names.append(st_names[0]) +downloaded_stave = get_node_with_payload(staves_subtree['children'], 'staveName', stave_name) +logging.info(f'Found our stave {stave_name} in the downloaded runkey: {downloaded_stave}') +assert downloaded_stave is not None -logging.info(f'Found the following staves in the {runkey_ref} runkey: {stave_names}') -logging.info(f'And our stave {stave_name} is there: {stave_name in stave_names}') -assert stave_name in stave_names \ No newline at end of file +downloaded_stave = RunkeyNode(downloaded_stave['type'], downloaded_stave['payloads'], downloaded_stave['children']) + +# confirm that the downloaded stave is the same as the original node +def eq_types_payloads(node_a, node_b, dont_log=False): + if dont_log: + log_info = logging.debug + else: + log_info = logging.info + + if 'reuse_id' in node_a: + # TODO: the IDs change on upload to the database + # so, you cannot compare them + # no idea how to handle this nicely. + # Maybe RunkeyNode should contain a dictionary of node IDs? + #return node_a['reuse_id'] in (node_b.get('reuse_id'), node_b.get('id')) + # let's just skip it and return true + return True + + if 'type' not in node_a: + logging.warning(f'got a node without type: {node_a}') + return False + + if 'type' not in node_b: + logging.warning(f'got a node without type: {node_b}') + return False + + #if node_a['type'] == node_b['type'] == 'HCC': + if node_a['type'] == 'HCC': + log_info == logging.info + + # compare types + if node_a['type'] != node_b['type']: + log_info(f'node types are different: {node_a["type"]} vs {node_b["type"]}') + return False + + # compare payloads + for pload in node_a['payloads']: + if pload['type'] == 'id': + continue + + node_b_pload = get_node_payload(node_b, pload['type']) + atyp, btyp = pload['type'], node_b_pload['type'] + adata, bdata = pload['data'], node_b_pload['data'] + + eq_payload = (atyp==btyp) and (adata==bdata) + if isinstance(adata, dict) and isinstance(bdata, dict): + log_info(f'adata and bdata sizes: {len(adata.items())} {len(bdata.items())}') + + for key, val in adata.items(): + if key not in bdata: + log_info(f'key {key} not found in bdata') + + elif val != bdata[key]: + log_info(f'key {key} values are different:\n{val}\nvs\n{bdata[key]}') + + if not eq_payload: + log_info(f'node payloads are different: {atyp == btyp} and {adata == bdata}, {type(adata)} and {type(bdata)}') + log_info(f'node payloads are different:\n{atyp}:\n{json.dumps(adata, sort_keys=True, indent=2)}') + log_info(f'vs:\n{btyp}:\n{json.dumps(bdata, sort_keys=True, indent=2)}') + return False + + return True + +def eq_nodes(node_a, node_b): + eq_node_data = eq_types_payloads(node_a, node_b, dont_log=False) + if not eq_node_data: + logging.info(f'nodes are different on data') + return False + + if 'reuse_id' in node_a: + # again, match all + return True + + # compare children? + if len(node_a['children']) != len(node_b['children']): + logging.info(f'nodes are different in number of children: {len(node_a['children'])} != {len(node_b['children'])}') + return False + + #logging.info(f'node type {node_a["type"]} has {[ch['type'] for ch in node_a["children"]]} child nodes') + #logging.info(f'comparing it to type {node_b["type"]} has {[ch['type'] for ch in node_b["children"]]} child nodes') + eq_children = [] + for ch in node_a['children']: + # matched child node in b: + equal_ch_b = None + + # try to match by name + ch_match = [n for n in node_b['children'] if 'name' in ch and n.get('name') == ch['name']] + if ch_match: + assert len(ch_match) == 1 + logging.info(f'found a match by name: {ch["name"]}') + if eq_types_payloads(ch, ch_match[0], dont_log=False): + equal_ch_b = ch_match[0] + + else: + logging.warning(f'did not match by name: {ch.get("name")}') + for ch_b in node_b['children']: + matched = False + + if ch.get('type') == 'HCC': + logging.warning('node_a is HCC type') + matched = eq_types_payloads(ch, ch_b, dont_log=False) + + else: + logging.warning(f'node_a is not HCC type: {ch.get("type")} reuse_id: {"reuse_id" in ch}') + matched = eq_types_payloads(ch, ch_b, dont_log=True) + + if matched: + equal_ch_b = ch_b + break + + eq_children.append((equal_ch_b is not None, (ch, equal_ch_b))) + + not_matched = [(ch_a, ch_b) for match, (ch_a, ch_b) in eq_children if not match] + if not all(match for match, _ in eq_children): + logging.info(f'nodes:\n{node_repr(node_a)}\nvs\n{node_repr(node_b)}') + logging.info(f'did not match by types and payloads') + logging.info(f'nodes number of children: {len(node_a["children"])} vs {len(node_b["children"])}') + logging.info(f'nodes are different in children: {[match for match, _ in eq_children]}') + + for ch_a, ch_b in not_matched: + logging.info(f'did not match child:\n{ch_a}\nvs\n{ch_b}') + return False + + return all(eq_nodes(ch_a, ch_b) for _, (ch_a, ch_b) in eq_children) + +assert eq_nodes(stave_node, downloaded_stave)