diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 89f4d038ef6c5a8f7a446414136f8d709ebdc60c..01b3f76412ca94e8ef2bd047da285432c9f497aa 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -53,6 +53,7 @@ after_script: dependencies: - run_test - run_doc + - run_build # # Job: run PyLint @@ -73,7 +74,7 @@ run_pylint: artifacts: paths: - ./pylint/ -# +# # Job: install packages and run tests # run_test: diff --git a/DEVELOPERS.md b/DEVELOPERS.md index d9351142ef3dc99f6f703ca68321f1764c0a9f97..86e9146694821f244393b1f4cfa872261280584c 100644 --- a/DEVELOPERS.md +++ b/DEVELOPERS.md @@ -34,6 +34,12 @@ deactivate # return from virtual environment to the regular one ## <a name="tests"> Running Tests +### Checking whether your code is up to project standard +Use [*pylint*][pylint] to check if your code quality is good. Standard linter configuration is used. +Compare the linter ratings before and after your modifications and do not commit code that reduces the code quality. + +### Testing the code with Jupyter notebooks + Tests alongside with some supporting documentation are provided in `jupyter` [notebook][./doc/notebooks/test_unpacker.ipynb]. `Jupyter` will be installed as a dependency with the `gdh` package. You can add your own test notebooks when required. In order to run the notebook locally simply run @@ -75,7 +81,7 @@ Additional dependencies to install: The documentation can be generated using the following command: ```bash -sphinx-build -b html doc build/doc +sphinx-build -b html doc doc/_build ``` Generated files are placed in `build/doc`. @@ -88,3 +94,4 @@ This section will be completed later with precise documentation style guide, for [doxygen]:http://www.doxygen.nl/manual/starting.html [sphinx]:https://www.sphinx-doc.org/en/master/usage/quickstart.html [pandoc]:https://pandoc.org/installing.html +[pylint]:https://www.pylint.org/ diff --git a/gdh/formats/amc.py b/gdh/formats/amc.py index 42eeabace8e1819c8fd8a23ae75e5224196995d6..9d5d89d5385a2cd7f5c9adbfdffe92b2817cd07e 100644 --- a/gdh/formats/amc.py +++ b/gdh/formats/amc.py @@ -1,18 +1,26 @@ +# pylint: disable=missing-module-docstring +# This module contains only one class which is properly documented + import cbitstruct as bs from gdh.formats.generic_block import GenericBlock from gdh.formats.geb import GEB class AMC(GenericBlock): """ - GEM AMC block. - See detailed data format `description. <https://docs.google.com/spreadsheets/d/1iKMl68vMsSWgr8ekVdsJYTk7-Pgy8paxq98xj6GtoVo/edit?usp=sharing>`_ + GEM AMC block. + See detailed data format `description. <https://docs.google.com/ + spreadsheets/d/1iKMl68vMsSWgr8ekVdsJYTk7-Pgy8paxq98xj6GtoVo/edit?usp=sharing>`_ - .. todo :: + .. todo :: provide detailed description here """ + + # pylint: disable=too-many-instance-attributes + def __init__(self): """ - Default constructor. Creates child ``GEB()`` and set self block id to 'AMC' + Default constructor. Creates child ``GEB()`` + and set self block id to 'AMC' """ super().__init__() self.slot = 0 @@ -21,10 +29,11 @@ class AMC(GenericBlock): def setup(self): """ - Define self header and trailer fields. + Define self header and trailer fields. .. note :: - See data format description and method implementation for details since the format is data-driven. + See data format description and method implementation for + details since the format is data-driven. Variable format options are implemented as tuple. """ # first 72 bits of 192-long header @@ -36,7 +45,7 @@ class AMC(GenericBlock): self.header_2_start_fields = {'FV' :'u4', 'RUN TYPE':'u4'} - + # 24 bits optional run parameters self.run_parameters = ({'RP:PAD' :'u24'},#Transition {'RP:PAD' :'u11', #Physics @@ -58,7 +67,7 @@ class AMC(GenericBlock): 'RP:CAL DAC' :'u8', 'RP:PULSE STRETCH':'u3', 'RP:THR ARM DAC' :'u10'}) - + # last 96 bits of header self.header_2_end_fields = {'OR N' :'u16', 'BOARD ID':'u16'} @@ -69,7 +78,7 @@ class AMC(GenericBlock): 'PAYLOAD VAR' :'u3', 'PAYLOAD TYPE':'u4', 'TTS' :'u4'} - + # 128 bit trailer self.trailer_1_fields = {'LINK TO' :'u24', 'OOS' :'u1', @@ -86,31 +95,44 @@ class AMC(GenericBlock): 'PAD3' :'u4', 'DATA LGTH':'u20'} - def assemble_header_2(self, rp): + def assemble_header_2(self, run_parameter): """ Assemble second header of the AMC block based on the run type """ tmp = {} tmp.clear() tmp.update(self.header_2_start_fields) - tmp.update(rp) + tmp.update(run_parameter) tmp.update(self.header_2_end_fields) - #self.logger.debug("Assembled header 2 is: %s"%tmp) + self.logger.debug("Assembled header 2 is: %s", tmp) return tmp def compile(self): """ - Compile self header and trailer fields (three 64-bit word header and two 64-bit word trailer) + Compile self header and trailer fields + (three 64-bit word header and two 64-bit word trailer) """ - self.header_1_compiled = bs.compile(''.join(list(self.header_1_fields.values())), list(self.header_1_fields.keys())) - self.header_2_start_compiled = bs.compile(''.join(list(self.header_2_start_fields.values())), list(self.header_2_start_fields.keys())) + self.header_1_compiled = bs.compile( + ''.join(list(self.header_1_fields.values())), + list(self.header_1_fields.keys())) + self.header_2_start_compiled = bs.compile( + ''.join(list(self.header_2_start_fields.values())), + list(self.header_2_start_fields.keys())) # assemble 5 options of header 2 and compile header_2_array = [self.assemble_header_2(rp) for rp in self.run_parameters] - #self.logger.debug("Header 2 array: %s"%header_2_array) - self.header_2_array_compiled = tuple(bs.compile(''.join(list(header2.values())), list(header2.keys())) for header2 in header_2_array) - self.header_3_compiled = bs.compile(''.join(list(self.header_3_fields.values())), list(self.header_3_fields.keys())) - self.trailer_1_compiled = bs.compile(''.join(list(self.trailer_1_fields.values())), list(self.trailer_1_fields.keys())) - self.trailer_2_compiled = bs.compile(''.join(list(self.trailer_2_fields.values())), list(self.trailer_2_fields.keys())) + self.logger.debug("Header 2 array: %s", header_2_array) + self.header_2_array_compiled = tuple(bs.compile( + ''.join(list(header2.values())), + list(header2.keys())) for header2 in header_2_array) + self.header_3_compiled = bs.compile( + ''.join(list(self.header_3_fields.values())), + list(self.header_3_fields.keys())) + self.trailer_1_compiled = bs.compile( + ''.join(list(self.trailer_1_fields.values())), + list(self.trailer_1_fields.keys())) + self.trailer_2_compiled = bs.compile( + ''.join(list(self.trailer_2_fields.values())), + list(self.trailer_2_fields.keys())) def unpack(self, stream): """ @@ -119,32 +141,37 @@ class AMC(GenericBlock): * AMC headers (building second header on the fly) * GEB blocks. Use "DAV_LIST" to determine the active optical links * AMC trailers - + :param IndexedNDArray stream: An array of 64-bits data words :return dict: Dictionary in form ``{Column name tuple: Value}`` """ - event = {self.update_key(k, 'HEADER'): v - for k, v in self.unpack_word(stream, self.header_1_compiled).items()} + event = {self.update_key(k, 'HEADER'): v + for k, v in + self.unpack_word(stream, self.header_1_compiled).items()} temp_dict = self.unpack_word(stream, self.header_2_start_compiled) stream.pos = stream.pos - 1 run_type = temp_dict['RUN TYPE'] - #self.logger.debug("Run type: %d" %run_type) - event.update({self.update_key(k, 'HEADER'): v - for k, v in self.unpack_word(stream, self.header_2_array_compiled[run_type]).items()}) - event.update({self.update_key(k, 'HEADER'): v - for k, v in self.unpack_word(stream, self.header_3_compiled).items()}) + self.logger.debug("Run type: %d", run_type) + event.update({self.update_key(k, 'HEADER'): v + for k, v in + self.unpack_word(stream, self.header_2_array_compiled[run_type]).items()}) + event.update({self.update_key(k, 'HEADER'): v + for k, v in + self.unpack_word(stream, self.header_3_compiled).items()}) # Retrieve active optical links and retreieve payloads dav_list = event[('DAV LIST', self.block_id, 'HEADER', self.slot, self.link, self.pos)] for link in range(24): - if ((dav_list >> link) & 0x1): + if (dav_list >> link) & 0x1: self.geb.slot = self.slot self.geb.link = link event.update(self.geb.unpack(stream)) - #self.logger.debug("all GEBs payload reading done. Stream position %d" %(stream.pos)) - event.update({self.update_key(k, 'TRAILER'): v - for k, v in self.unpack_word(stream, self.trailer_1_compiled).items()}) - event.update({self.update_key(k, 'TRAILER'): v - for k, v in self.unpack_word(stream, self.trailer_2_compiled).items()}) - #self.logger.debug("AMC:%d payload reading done. Stream position %d" %(self.slot, stream.pos)) + self.logger.debug("all GEBs payload reading done. Stream position %d", stream.pos) + event.update({self.update_key(k, 'TRAILER'): v + for k, v in + self.unpack_word(stream, self.trailer_1_compiled).items()}) + event.update({self.update_key(k, 'TRAILER'): v + for k, v in + self.unpack_word(stream, self.trailer_2_compiled).items()}) + self.logger.debug("AMC:%d payload reading done. Stream position %d", self.slot, stream.pos) return event diff --git a/gdh/formats/amc13.py b/gdh/formats/amc13.py index 804c637f9f2a805b795cf38ecffa1caba76be9a3..125d1e9d45182866dd06a14a0945d0fbcd0e6b1d 100644 --- a/gdh/formats/amc13.py +++ b/gdh/formats/amc13.py @@ -1,3 +1,6 @@ +# pylint: disable=missing-module-docstring +# This module contains only one class which is properly documented + import cbitstruct as bs from gdh.formats.generic_block import GenericBlock from gdh.formats.amc import AMC @@ -5,14 +8,19 @@ from gdh.formats.amc import AMC class AMC13(GenericBlock): """ CMS AMC13 data format block. - See detailed data format `description. <https://docs.google.com/spreadsheets/d/1iKMl68vMsSWgr8ekVdsJYTk7-Pgy8paxq98xj6GtoVo/edit?usp=sharing>`_ + See detailed data format `description. <https://docs.google.com/ + spreadsheets/d/1iKMl68vMsSWgr8ekVdsJYTk7-Pgy8paxq98xj6GtoVo/edit?usp=sharing>`_ - .. todo :: + .. todo :: provide detailed description here """ + + # pylint: disable=too-many-instance-attributes + def __init__(self): """ - Default constructor. Creates child ``AMC()`` and set self block id to 'AMC13' + Default constructor. + Creates child ``AMC()`` and set self block id to 'AMC13' """ super().__init__() self.amc = AMC() @@ -20,7 +28,8 @@ class AMC13(GenericBlock): def setup(self): """ - Define self header and trailer fields (two 64-bit word header and one 64-bit word trailer) + Define self header and trailer fields + (two 64-bit word header and one 64-bit word trailer) """ self.header_fields = {'uFOV' :'u4', 'CAL TYPE':'u4', @@ -52,11 +61,18 @@ class AMC13(GenericBlock): def compile(self): """ - Compile self header and trailer fields (two 64-bit word header and one 64-bit word trailer) + Compile self header and trailer fields + (two 64-bit word header and one 64-bit word trailer) """ - self.header_compiled = bs.compile(''.join(list(self.header_fields.values())), list(self.header_fields.keys())) - self.amc_header_compiled = bs.compile(''.join(list(self.amc_header_fields.values())), list(self.amc_header_fields.keys())) - self.trailer_compiled = bs.compile(''.join(list(self.trailer_fields.values())), list(self.trailer_fields.keys())) + self.header_compiled = bs.compile( + ''.join(list(self.header_fields.values())), + list(self.header_fields.keys())) + self.amc_header_compiled = bs.compile( + ''.join(list(self.amc_header_fields.values())), + list(self.amc_header_fields.keys())) + self.trailer_compiled = bs.compile( + ''.join(list(self.trailer_fields.values())), + list(self.trailer_fields.keys())) def unpack(self, stream): """ @@ -66,24 +82,41 @@ class AMC13(GenericBlock): * AMC13::AMC header fields (read their number from AMC13 header) * AMC blocks * AMC13trailer - + :param IndexedNDArray stream: An array of 64-bits data words :return dict: Dictionary in form ``{Column name tuple: Value}`` """ - event = {self.update_key(k, 'HEADER'): v for k, v in self.unpack_word(stream, self.header_compiled).items()} + event = {self.update_key(k, 'HEADER'): v + for k, v in + self.unpack_word(stream, self.header_compiled).items()} # Loop over AMC13:H:N AMC headers - for i in range(event[('N AMC', self.block_id, 'HEADER', self.slot, self.link, self.pos)]): + for _ in range(event[('N AMC', + self.block_id, + 'HEADER', + self.slot, + self.link, + self.pos)]): # NOTE AMC number ranges from 0 to (N AMC-1). Shall add "+1"? temp_amc_dict = self.unpack_word(stream, self.amc_header_compiled) - temp_amc_dict_2 = {f'{k[:4]}{temp_amc_dict["AMC:$:AMC N"]}{k[5:]}': v for k, v in temp_amc_dict.items()} - event.update({self.update_key(k): v for k, v in temp_amc_dict_2.items()}) + temp_amc_dict_2 = {f'{k[:4]}{temp_amc_dict["AMC:$:AMC N"]}{k[5:]}': v + for k, v in temp_amc_dict.items()} + event.update({self.update_key(k): v + for k, v in temp_amc_dict_2.items()}) - for i in range(event[('N AMC', self.block_id, 'HEADER', self.slot, self.link, self.pos)]): + for _ in range(event[('N AMC', + self.block_id, + 'HEADER', + self.slot, + self.link, + self.pos)]): self.amc.slot = temp_amc_dict['AMC:$:AMC N'] event.update(self.amc.unpack(stream)) - #self.logger.debug("all AMCs payload reading done. Stream position %d" %(stream.pos)) - event.update({self.update_key(k,'TRAILER'): v for k, v in self.unpack_word(stream, self.trailer_compiled).items()}) - #self.logger.debug("AMC13 payload reading done. Stream position %d" %(stream.pos)) + self.logger.debug("all AMCs payload reading done. Stream position %d", + stream.pos) + event.update({self.update_key(k, 'TRAILER'): v + for k, v in + self.unpack_word(stream, self.trailer_compiled).items()}) + self.logger.debug("AMC13 payload reading done. Stream position %d", + stream.pos) return event - diff --git a/gdh/formats/cdf.py b/gdh/formats/cdf.py index 4155dca06aac3940f566443fc22e001fc47b1b59..dc19a3aa11c7ed24c32362c3ed7a9ad520537cf9 100644 --- a/gdh/formats/cdf.py +++ b/gdh/formats/cdf.py @@ -1,15 +1,18 @@ +# pylint: disable=missing-module-docstring +# This module contains only one class which is properly documented + import cbitstruct as bs from gdh.formats.generic_block import GenericBlock from gdh.formats.amc13 import AMC13 -import pandas as pd class CDF(GenericBlock): """ - CMS Common Data Format block. - See detailed data format `description. <https://docs.google.com/spreadsheets/d/1iKMl68vMsSWgr8ekVdsJYTk7-Pgy8paxq98xj6GtoVo/edit?usp=sharing>`_ + CMS Common Data Format block. + See detailed data format `description. <https://docs.google.com/ + spreadsheets/d/1iKMl68vMsSWgr8ekVdsJYTk7-Pgy8paxq98xj6GtoVo/edit?usp=sharing>`_ - .. todo :: + .. todo :: provide detailed description here """ def __init__(self): @@ -51,8 +54,12 @@ class CDF(GenericBlock): """ Compile self header and trailer fields (one 64-bit word each) """ - self.header_compiled = bs.compile(''.join(list(self.header_fields.values())), list(self.header_fields.keys())) - self.trailer_compiled = bs.compile(''.join(list(self.trailer_fields.values())), list(self.trailer_fields.keys())) + self.header_compiled = bs.compile( + ''.join(list(self.header_fields.values())), + list(self.header_fields.keys())) + self.trailer_compiled = bs.compile( + ''.join(list(self.trailer_fields.values())), + list(self.trailer_fields.keys())) def unpack(self, stream): """ @@ -61,13 +68,17 @@ class CDF(GenericBlock): * CDF header * AMC13 block * CDF trailer - + :param IndexedNDArray stream: An array of 64-bits data words :return dict: Dictionary in form ``{Column name tuple: Value}`` """ - event = {self.update_key(k,'HEADER'): v for k, v in self.unpack_word(stream, self.header_compiled).items()} + event = {self.update_key(k, 'HEADER'): v + for k, v in + self.unpack_word(stream, self.header_compiled).items()} event.update(self.amc13.unpack(stream)) - event.update({self.update_key(k,'TRAILER'): v for k, v in self.unpack_word(stream, self.trailer_compiled).items()}) - #self.logger.debug("event reading done. Stream position %d" %(stream.pos)) + event.update({self.update_key(k, 'TRAILER'): v + for k, v in + self.unpack_word(stream, self.trailer_compiled).items()}) + self.logger.debug("event reading done. Stream position %d", stream.pos) return event diff --git a/gdh/formats/geb.py b/gdh/formats/geb.py index 1787f37929dcbab652358f6910fd3dda5d8f1a8d..09d3b83998e0b5268739a9df674859229092b904 100644 --- a/gdh/formats/geb.py +++ b/gdh/formats/geb.py @@ -1,15 +1,22 @@ +# pylint: disable=missing-module-docstring +# This module contains only one class which is properly documented + import cbitstruct as bs from gdh.formats.generic_block import GenericBlock from gdh.formats.vfat import VFAT class GEB(GenericBlock): """ - GEM GEB block. - See detailed data format `description. <https://docs.google.com/spreadsheets/d/1iKMl68vMsSWgr8ekVdsJYTk7-Pgy8paxq98xj6GtoVo/edit?usp=sharing>`_ + GEM GEB block. + See detailed data format `description. <https://docs.google.com/ + spreadsheets/d/1iKMl68vMsSWgr8ekVdsJYTk7-Pgy8paxq98xj6GtoVo/edit?usp=sharing>`_ - .. todo :: + .. todo :: provide detailed description here """ + + # pylint: disable=too-many-instance-attributes + def __init__(self): """ Default constructor. Creates child ``VFAT()`` and set self block id to 'GEB' @@ -22,7 +29,7 @@ class GEB(GenericBlock): self.block_id = 'GEB' self.payload_type = 0 # TODO implement zero suppression payload types self.vfat = VFAT() - + def setup(self): """ Define self header and trailer fields (one 64-bit word each) @@ -57,8 +64,12 @@ class GEB(GenericBlock): """ Compile self header and trailer fields (one 64-bit word each) """ - self.header_compiled = bs.compile(''.join(list(self.header_fields.values())), list(self.header_fields.keys())) - self.trailer_compiled = bs.compile(''.join(list(self.trailer_fields.values())), list(self.trailer_fields.keys())) + self.header_compiled = bs.compile( + ''.join(list(self.header_fields.values())), + list(self.header_fields.keys())) + self.trailer_compiled = bs.compile( + ''.join(list(self.trailer_fields.values())), + list(self.trailer_fields.keys())) def unpack(self, stream): """ @@ -69,22 +80,27 @@ class GEB(GenericBlock): * GEB trailer .. note :: - Currently only lossless data is supported. In this case, the size of VFAT payload is always 192 bits each. Number of VFAT blocks is then determined as number of VFAT words // 3. - + Currently only lossless data is supported. + In this case, the size of VFAT payload is always 192 bits each. + Number of VFAT blocks is then determined as number of VFAT words // 3. + :param IndexedNDArray stream: An array of 64-bits data words :return dict: Dictionary in form ``{Column name tuple: Value}`` """ tmp = self.unpack_word(stream, self.header_compiled) - event = {self.update_key(k, 'HEADER'): v - for k, v in tmp.items()} - for vfat_pos in range(tmp['N VFAT WORDS']//3): + event = {self.update_key(k, 'HEADER'): v + for k, v in tmp.items()} + for _ in range(tmp['N VFAT WORDS']//3): self.vfat.slot = self.slot self.vfat.link = self.link self.vfat.payload_type = self.payload_type event.update(self.vfat.unpack(stream)) - #self.logger.debug("all VFATs payload reading done. Stream position %d" %(stream.pos)) - event.update({self.update_key(k, 'TRAILER'): v - for k, v in self.unpack_word(stream, self.trailer_compiled).items()}) - #self.logger.debug("GEB:%d:%d payload reading done. Stream position %d" %(self.slot, self.link, stream.pos)) + self.logger.debug("all VFATs payload reading done. Stream position %d", + stream.pos) + event.update({self.update_key(k, 'TRAILER'): v + for k, v in + self.unpack_word(stream, self.trailer_compiled).items()}) + self.logger.debug("GEB:%d:%d payload reading done. Stream position %d", + self.slot, self.link, stream.pos) return event diff --git a/gdh/formats/generic_block.py b/gdh/formats/generic_block.py index b7d593814b6a7dcdbf313627e74f9c15d59e91a2..dc222b84b4ddbd237137cc009060e0c7eff119e0 100644 --- a/gdh/formats/generic_block.py +++ b/gdh/formats/generic_block.py @@ -1,11 +1,14 @@ -import cbitstruct as bs -import logging +# pylint: disable=missing-module-docstring +# This module contains only one class which is properly documented + from abc import ABC +import logging class GenericBlock(ABC): """ Abstract class defining genericl binary stream block - GEM detailed data format `description. <https://docs.google.com/spreadsheets/d/1iKMl68vMsSWgr8ekVdsJYTk7-Pgy8paxq98xj6GtoVo/edit?usp=sharing>`_ + See detailed data format `description. <https://docs.google.com/ + spreadsheets/d/1iKMl68vMsSWgr8ekVdsJYTk7-Pgy8paxq98xj6GtoVo/edit?usp=sharing>`_ .. note :: This is a base class and it must be extended @@ -23,14 +26,15 @@ class GenericBlock(ABC): self.setup() self.compile() - def update_key(self, key, sub_block = 'PAYLOAD'): + def update_key(self, key, sub_block='PAYLOAD'): """ Convert filed name to a tuple with hardware tagging information :param str key: Field name :param str sub_block: Sub block id - :return tuple: ``('FIELD NAME', 'BLOCK ID', 'SUB BLOCK', self.slot, self.link, self.pos)`` + :return tuple: ``('FIELD NAME', 'BLOCK ID', 'SUB BLOCK', + self.slot, self.link, self.pos)`` """ return (key, self.block_id, sub_block, self.slot, self.link, self.pos) @@ -39,48 +43,59 @@ class GenericBlock(ABC): Read and unpack a word from stream :param IndexedNDArray stream: An array of 64-bits data words - :param dict compiled_dict: Compiled ``bitstruct`` dictionary with word fields in form ``{'Field Name':'Field Type:length'}`` + :param dict compiled_dict: Compiled ``bitstruct`` dictionary + with word fields in form ``{'Field Name':'Field Type:length'}`` :param int n_words: Number of 64-bit words in the array :return dict: Dictionary in form ``{'Field Name': Value}`` """ - #self.logger.debug("unpack_word called. Stream position %d, word: %s" %(stream.pos, stream[stream.pos])) - unpacked_word = compiled_dict.unpack(stream[stream.pos:stream.pos + n_words].tobytes()) + self.logger.debug("unpack_word called. Stream position %d, word: %s", + stream.pos, stream[stream.pos]) + unpacked_word = compiled_dict.unpack( + stream[stream.pos:stream.pos + n_words].tobytes()) stream.pos += n_words - #self.logger.debug("word reading done. Stream position %d" %(stream.pos)) - #self.logger.debug('unpacked word %s', unpacked_word) + self.logger.debug("word reading done. Stream position %d", stream.pos) + self.logger.debug('unpacked word %s', unpacked_word) return unpacked_word def compile(self): """ - Compile bitstruct dictionary. Virtual method, implemented in descendant classes + Compile bitstruct dictionary. + Virtual method, implemented in descendant classes """ - pass def setup(self): """ - Define block data fields. Virtual method, implemented in descendant classes + Define block data fields. + Virtual method, implemented in descendant classes """ - pass def unpack(self, stream): """ - Unpack block. Virtual method, implemented in descendant classes. - + Unpack block. + Virtual method, implemented in descendant classes. + :param IndexedNDArray stream: An array of 64-bits data words :return dict: Dictionary in form ``{Column name tuple: Value}`` """ - pass def pack(self): - pass + """ + Placeholder. TBU + """ - def print(): - pass + def print(self): + """ + Placeholder. TBU + """ def set_logger_console_handler(self): - pass + """ + Placeholder. TBU + """ def set_logger_file_handler(self): - pass + """ + Placeholder. TBU + """ diff --git a/gdh/formats/vfat.py b/gdh/formats/vfat.py index 3e961bccc55766b3fbcf31da79b7b330aac573b2..3a5cb9e3698a9e24bd0026fc17f8cd2ef3255615 100644 --- a/gdh/formats/vfat.py +++ b/gdh/formats/vfat.py @@ -1,12 +1,16 @@ +# pylint: disable=missing-module-docstring +# This module contains only one class which is properly documented + import cbitstruct as bs from gdh.formats.generic_block import GenericBlock class VFAT(GenericBlock): """ - GEM AMC block. - See detailed data format `description. <https://docs.google.com/spreadsheets/d/1iKMl68vMsSWgr8ekVdsJYTk7-Pgy8paxq98xj6GtoVo/edit?usp=sharing>`_ + GEM AMC block. + See detailed data format `description. <https://docs.google.com/ + spreadsheets/d/1iKMl68vMsSWgr8ekVdsJYTk7-Pgy8paxq98xj6GtoVo/edit?usp=sharing>`_ - .. todo :: + .. todo :: provide detailed description here """ def __init__(self): @@ -19,20 +23,22 @@ class VFAT(GenericBlock): def setup(self): """ - Define self header and trailer fields. + Define self header and trailer fields. .. note :: Lossless VFAT data format: 192 bits = 3 64 bit words .. warning :: - ``cbitstruct`` package has a limitation on max field size of 64 bits. - Thus, VFAT channel data is presented as a collection of two 64-bit fields: + ``cbitstruct`` package has a limitation + on max field size of 64 bits. + Thus, VFAT channel data is presented as + a collection of two 64-bit fields: * 'CHANNEL DATA M' - Most significant 64 bits * 'CHANNEL DATA L' - List significant 64 bits - .. todo :: + .. todo :: Implement zero suppression and calibration data formats """ self.vfat_block = {'POS' :'u8', @@ -49,7 +55,9 @@ class VFAT(GenericBlock): """ Compile complete VFAT block """ - self.vfat_block_compiled = bs.compile(''.join(list(self.vfat_block.values())), list(self.vfat_block.keys())) + self.vfat_block_compiled = bs.compile( + ''.join(list(self.vfat_block.values())), + list(self.vfat_block.keys())) def unpack(self, stream): """ diff --git a/gdh/unpacker.py b/gdh/unpacker.py index 4841fa451129781aaa2a5b5dc560e75c8a16e12a..37e42088cfa8edeaf227dea455e529d451d9cbdb 100644 --- a/gdh/unpacker.py +++ b/gdh/unpacker.py @@ -1,6 +1,9 @@ +""" +Executable script for unpacking GEM binary data +""" + import argparse import logging -import os import pandas as pd import numpy as np from gdh.utils import IndexedNDArray @@ -11,14 +14,15 @@ np.set_printoptions(formatter={'int':hex}) def run(filename, source, nrows=None, verbosity=1): """ - Unpack gem binary file + Unpack gem binary file :param str filename: Input data binary file :param str source: Type of binary stream source :param int nrows: Number of events to unpack :param int verbosity: Logging level - :return pandas.DataFrame: A table of events with column names as tuples. Each row corresponds to one event + :return pandas.DataFrame: A table of events with column names as tuples. + Each row corresponds to one event """ if verbosity == 0: @@ -31,28 +35,28 @@ def run(filename, source, nrows=None, verbosity=1): # set level to DEBUG logging.basicConfig(level=logging.DEBUG) - logger.info('Processing file: %s. \n Data source: %s'%(filename, source)) - cbs = IndexedNDArray(np.fromfile(filename, dtype='>Q').byteswap(inplace=True), pos = 0) - #logger.debug('Input data size in 64-bit words: %d'%(cbs.size)) + logger.info('Processing file: %s. \n Data source: %s', filename, source) + cbs = IndexedNDArray(np.fromfile(filename, dtype='>Q').byteswap(inplace=True), pos=0) + logger.debug('Input data size in 64-bit words: %d', cbs.size) # TODO read first word and determine the source type instead of asking to pass - if source == 'minidaq' or source == 'ferol': - logger.warn('%s format is not supported at the moment'%(source)) - return + if source in ('minidaq', 'ferol'): + logger.warning('%s format is not supported at the moment', source) + return None - # Create the CDF instance (which creates every format block downstream). - # This has to be done outside of the loop as the formats are precompiled - # during creation of the objects and there's no need to create them on + # Create the CDF instance (which creates every format block downstream). + # This has to be done outside of the loop as the formats are precompiled + # during creation of the objects and there's no need to create them on # per-event basis cdf = CDF() # Get one event from the stream def get(): while cbs.pos < cbs.size: yield cdf.unpack(cbs) - #logger.debug('Data size in 64-bit words: %d. Position: %d '%(cbs.size, cbs.pos)) + logger.debug('Data size in 64-bit words: %d. Position: %d', cbs.size, cbs.pos) events = pd.DataFrame.from_records(get(), nrows=nrows) logger.info(events.head()) - logger.info('Finished unpacking. Processed %d events.'%(len(events.index))) + logger.info('Finished unpacking. Processed %d events.', len(events.index)) return events if __name__ == "__main__": @@ -63,9 +67,10 @@ if __name__ == "__main__": help='Source type. Only "sdram" is currently supported') parser.add_argument('-n', '--nevents', dest='nevents', type=int, default=None, help='Maximum number of events to process') - parser.add_argument('-v', '--verbosity', dest='verbosity', type=int, choices=[0, 1, 2], default=1, + parser.add_argument('-v', '--verbosity', dest='verbosity', type=int, + choices=[0, 1, 2], default=1, help='Logger verbosity levels') - + args = parser.parse_args() run(args.ifilename, args.isource, args.nevents, args.verbosity) diff --git a/gdh/utils.py b/gdh/utils.py index e364f88f18a5b99899c26a010159f793d84aa0a7..c1c446bd99b7d5278e2f67ba58dae309f4d94814 100644 --- a/gdh/utils.py +++ b/gdh/utils.py @@ -1,11 +1,20 @@ +# pylint: disable=missing-module-docstring +# This module contains only one class which is properly documented + import numpy as np class IndexedNDArray(np.ndarray): """ Helper class providing ndarray with extra ``pos`` attribute. - This attribute will be used to indicate current position when moving through array values. - See `numpy example. <https://numpy.org/doc/stable/user/basics.subclassing.html>`_ + This attribute will be used to indicate current position + when moving through array values. + See `numpy example. <https://numpy.org/ + doc/stable/user/basics.subclassing.html>`_ """ + + # pylint: disable=multiple-statements + # pylint: disable=attribute-defined-outside-init + def __new__(cls, input_array, pos=0): obj = np.asarray(input_array).view(cls) obj.pos = pos