From aea5a748eb97a433cbf34ef7e8e02be07604dffd Mon Sep 17 00:00:00 2001 From: Davide Cristoforetti <dcristof@gputest-genoa-01.cms> Date: Wed, 28 Aug 2024 17:50:54 +0200 Subject: [PATCH] output: Validation scripts and experimental famfs support --- src/OutputFileHandler.cc | 24 ++-- src/OutputFileHandler.h | 1 + src/format.h | 3 +- test/config/filedma-gmt-muon.json5 | 6 +- test/config/filedma-gmt-muon_famfs.json5 | 68 ++++++++++++ .../check_data_consistency.cpython-39.pyc | Bin 0 -> 2242 bytes .../testing_scripts/check_data_consistency.py | 104 ++++++++++++++++++ test/testing_scripts/compute_md5.py | 28 +++++ 8 files changed, 222 insertions(+), 12 deletions(-) create mode 100644 test/config/filedma-gmt-muon_famfs.json5 create mode 100644 test/testing_scripts/__pycache__/check_data_consistency.cpython-39.pyc create mode 100644 test/testing_scripts/check_data_consistency.py create mode 100644 test/testing_scripts/compute_md5.py diff --git a/src/OutputFileHandler.cc b/src/OutputFileHandler.cc index a6f51ca4..e8b98eb6 100644 --- a/src/OutputFileHandler.cc +++ b/src/OutputFileHandler.cc @@ -135,20 +135,28 @@ void OutputFileHandler::open_new_file() { std::string filename = format_filename(current_run_number_, current_index_, ls); std::string full_filename = working_files_base_path_ + "/" + filename; LOG(TRACE) << "opening file with index " << current_index_ << ", in lumisection " << ls; - OutputFile outputFile(fopen(full_filename.c_str(), "wbx"), filename, createFileHeader(ls), ls, + OutputFile outputFile(fopen(full_filename.c_str(), "wb+"), filename, createFileHeader(ls), ls, current_index_ % (max_index_per_ls_ + 1), run_dir_); // TODO: run_dir_ probably needs also the run number - if (!outputFile.exists()) { + outputFile_ = outputFile; + stop_counter += 1; + if(stop_counter > 4) { std::string err = - tools::strerror("ERROR when creating file '" + outputFile.getFileName() + "'"); + tools::strerror("Stop condition met when creating file '" + outputFile.getFileName() + "'"); LOG(ERROR) << err; throw std::runtime_error(err); } - outputFile_ = outputFile; - // reserve space for CMSSW header if required - if (this->OutputFileHandler::getCMSSWHeaders()) { - fseek(outputFile_.getFilePtr(), sizeof(FRDFileHeader_v2), SEEK_SET); - } +// if (!outputFile.exists()) { +// std::string err = +// tools::strerror("ERROR when creating file '" + outputFile.getFileName() + "'"); +// LOG(ERROR) << err; +// throw std::runtime_error(err); +// } +// outputFile_ = outputFile; +// // reserve space for CMSSW header if required +// if (this->OutputFileHandler::getCMSSWHeaders()) { +// fseek(outputFile_.getFilePtr(), sizeof(FRDFileHeader_v2), SEEK_SET); +// } } // Create a properly formatted file name diff --git a/src/OutputFileHandler.h b/src/OutputFileHandler.h index 5961e6b6..bc25c9c1 100644 --- a/src/OutputFileHandler.h +++ b/src/OutputFileHandler.h @@ -150,6 +150,7 @@ class OutputFileHandler { uint32_t ls_NFiles_{}; uint32_t run_NOrbits_{}; uint32_t run_NFiles_{}; + int stop_counter{}; // also known as catty }; #endif \ No newline at end of file diff --git a/src/format.h b/src/format.h index 20e28b40..340b67ec 100644 --- a/src/format.h +++ b/src/format.h @@ -311,7 +311,8 @@ struct constants { static constexpr uint32_t bril_header = 4278781695; static constexpr uint32_t NBXPerOrbit = 3564; static constexpr uint32_t NFramesInHistoHeader = 9; - static constexpr uint64_t N_orbits_per_lumisection = 1 << 18; // equivalent to pow(2, 18) +// static constexpr uint64_t N_orbits_per_lumisection = 1 << 18; // equivalent to pow(2, 18) + static constexpr uint64_t N_orbits_per_lumisection = 1 << 6; }; bool CheckOrbitTrailerStart(const char* frame); diff --git a/test/config/filedma-gmt-muon.json5 b/test/config/filedma-gmt-muon.json5 index a9f48615..62948bff 100644 --- a/test/config/filedma-gmt-muon.json5 +++ b/test/config/filedma-gmt-muon.json5 @@ -50,11 +50,11 @@ dev_TCPAutoReconnectOnFailure: "false", // (Minimum) Logging severity: TRACE DEBUG INFO WARNING ERROR FATAL. // Use TRACE to log everything. - log_min_severity: "ERROR", + log_min_severity: "TRACE", threads: 8, // Stores fixed number of orbits per file when nOrbitsPerFile > 1 // If zero, uses a fixed file size (`max_file_size`) instead - nOrbitsPerFile: 4096, + nOrbitsPerFile: 64, cmsswHeaders: "no", dthHeaders: "yes", source_id: 1, @@ -65,4 +65,4 @@ // e.g., kcu1500_ugmt, kcu1500_demux, sb852_bril board: "kcu1500_ugmt" } -} \ No newline at end of file +} diff --git a/test/config/filedma-gmt-muon_famfs.json5 b/test/config/filedma-gmt-muon_famfs.json5 new file mode 100644 index 00000000..526de71d --- /dev/null +++ b/test/config/filedma-gmt-muon_famfs.json5 @@ -0,0 +1,68 @@ +// filedma-gmt.json5 +{ + // Input settings ////////////////////////////////////////////////// + // Input type, one of: + // "wzdma" for DMA driver from Wojciech M. Zabolotny + // "filedma" for reading from file and simulating DMA + // "micronDMA" for PICO driver + // "tcpip" for TCP/IP input receiving + input: "filedma", + // DMA device + dma_dev: "/dev/xdma0_c2h_0", + // Max received packet size in bytes (buffer to reserve) + dma_packet_buffer_size: 2097152, + // Number of packet buffers to allocate + dma_number_of_packet_buffers: 1000, + // Print report each N packets, use 0 to disable + packets_per_report: 2000, + // Number of orbits per DMA packet, in decimal + nOrbitsPerPacket: 1, + // Extra settings for "filedma" input + input_files: [ + // Must have at least the same cardinality as input_streams; kept isolated since `FileDMA` is a development feature + "test/data/testfiles/gmt_muon_testfile.dat" + ], + // Processing types (unpacking): + // PASS_THROUGH, GMT, CALO, CALOOBJ, CALOSUM, BMTF, UGT, BRIL + // Primitive types: + // NONE, MUON, JET, EGAMMA, TAU, SUM, STUB, ALGOBIT + input_streams: [ + { + processor_type: "GMT", + primitive_type: "MUON", + source_id: 1, + tcp_dest_port: 10000 + } + ], + // Processor settings ////////////////////////////////////////////// + enable_stream_processor: "yes", + // Enable software zero-suppression. Does not affect Calo. + doZS: "yes", + // Output settings ///////////////////////////////////////////////// + output_filename_prefix: "scout_MUON", + output_filename_base: "/mnt/famfs", + output_filename_suffix: ".raw", + max_file_size: 8589934592, + // Always write data to a file regardless of the run status + output_force_write: "yes", + // Generic settings //////////////////////////////////////////////// + port: 8000, + dev_TCPAutoReconnectOnFailure: "false", + // (Minimum) Logging severity: TRACE DEBUG INFO WARNING ERROR FATAL. + // Use TRACE to log everything. + log_min_severity: "TRACE", + threads: 8, + // Stores fixed number of orbits per file when nOrbitsPerFile > 1 + // If zero, uses a fixed file size (`max_file_size`) instead + nOrbitsPerFile: 64, + cmsswHeaders: "no", + dthHeaders: "yes", + source_id: 1, + // Information necessary to issue a reset request for the board + scone_address: { + host: "scoutdaq-s1d12-34-01", + port: 8080, + // e.g., kcu1500_ugmt, kcu1500_demux, sb852_bril + board: "kcu1500_ugmt" + } +} diff --git a/test/testing_scripts/__pycache__/check_data_consistency.cpython-39.pyc b/test/testing_scripts/__pycache__/check_data_consistency.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..471342fd4de496086633b7d09f6ce90387172b1e GIT binary patch literal 2242 zcmZXW&2Jnv6u@oI=gw|6A5g+~r%=(%A=y+_FI80mr4b+zRcaApE~ClXyF1D5%wl`f zWIKBbC!`8)T$5w}0}lKT+<oQ5zfdl`=LtzDUitYwkH=&G{ND5COG^=f^8C&pgRD)+ zpJ-ey42(~qsyPTwI31I;X=zHW%~EDtCv|M=rY^K>>`naCr-VCcz}+<D9#lVV@gR+O z$Xh>>w9O;lhPJ~yd<ohm-sQ{CcEvJZIbiZobWaG(K%bCg^)L87*-fZ%ALn{BU+#^n ziRkgZER?PWz4<4q&+{L8S}5J)xz2lXR&LwUE6QvttDzK1^$aO~p@b9!ChmiBQC!eM zs1Kp4T@ad_(lfH4n$@s`6ZZ4X1*_Sqqun#QaB6z$)y^5E<U81`OI&i|P*SekChGn+ z%#FYAp~5)-jKBt*`Spj@WIEF#)*}&r^TpkGl&eu(4dOf=6l1ZKGP$>#1SY`#aj|Ed z3BPOHYAQ<ON|AFDe*W!F_T;Pdk@06!*rZ5XBXPuwA#6HvO$(>VrhrF!umM8{#zhn_ ziChBv)x+1AT_J|pigK7K;5pT**B^=gL5449{i;;JSd{(at?998_nTd0u()f;%K8rr zX@^F1Baq9`_dl@USq<2)LD59hbyAac3wPf+b!!Sp8K<0a0LxwOabJ5iJE1(#p7u}Z zFj_b|*bi$5edh%OuRy5Lu5s0jSH|P{QBl2On+4SBRlCQ<A-KWO?X7L-7|+?Jdis*4 zp50f{GA;BTi^{a@E|HpHl`^i?f}zl=m<!`5Elsp{tVO1DF13l&Y!AGt56)y9@G;=- zB6o=#j6W9T5M~O1(j+gr1>cA+K3{z%3z*6tl&%2q(M{-9Yaj>^?6@w)Ur58h-B5lE zz5ii}$9lnX3P!_{U-G<U%QUlW8M>?0J>)DOXm;wO1`L6BLmQ|EI;b5W9_m1cmbj~1 z`%&%k7LP87w|8-G(?ak}uD}B1Dhgb`d=teRD9|qe`N3297HCOZu4C`pDBeNwE{gX+ z7+)NLZz<zTF{uuPd>@~DfZ{_G*FhwnjG;-Fo6qiC_&5TWH&A?p0$uzH=`|RB+<+d) zYJ#ZmzzBt_Aan&`GjQ3fzyo;;p7n9Szr_aytIZ0(gQCekMMr%>7vRGC=Kv9pwR|3+ zg90!YdrlpjLBPTHy%PYg0sOfJkk<)sLm)s8gaSkC7nY-2co+*f0t|zNSG%<bDe)&A z@ea;ZhM+B>2Jg4!Z(wy#JGE20XD}YV)Yz={8rz1;^6vcCokA)dTQ^kanI|qvUD>Pz zkYb3OqHmK_85cUX)D>!bdH-N5-YLp_JU+$_%u_<e8;}W-SU;<58dLGGI22`^!#gFU zyBMM%wqX|+1N#viX;WV=n_Q^!iAX412ck(oMOE4(pRZu5kI&bJF@x!UB*s&4uETk2 za}zvfuTmn7;0}-?({d)}*B+Jm-dM!v&*EyPVHpWa{k*Sn>J2zXiD5VI91B}+#P>Fn zd8e#w0qfSreD(SE;v!4mi=W?KBuggD<#3uy_+s}nIh;*IsUM>$(@xHL22X8Y8reZ; zD;vaiJJ!3dgaT$t<4q;dH^KR8jX$X5B-aVtopugsT%0A1F1^1AhtTE_K%aImA7N5{ zJrLYUUX)+Lw^R&*fYW!`8VgxOBWI0vosfl2NF(U+AuQw;j4@%B;agdjtjGj9Y&1eH zJXTEr<7H_A%#?6XN&Js+WnVUY@hY`<L9>4htc$5jIPWaGvQ8ViHa~{<Cl#NK#RI&W Y?Sdf{p8x($j;`;ra2bBr!s{XZ2iEu{>;M1& literal 0 HcmV?d00001 diff --git a/test/testing_scripts/check_data_consistency.py b/test/testing_scripts/check_data_consistency.py new file mode 100644 index 00000000..df9531c3 --- /dev/null +++ b/test/testing_scripts/check_data_consistency.py @@ -0,0 +1,104 @@ +import argparse +import subprocess +import sys +import os +import hashlib +from pathlib import Path + + +#home_path = "/home/dcristof/" +home_path = "/home/dcristof/" +data_path = "scdaq/test/data/run000000/in_progress/" +data_path = "" +byte_start = 400 + + +#sudo daxio -o test/data/run000000/in_progress/retrieved_scout_BMTF_000000_002368.raw -i /dev/dax1.0 -l 757760 + +# BUF_SIZE is totally arbitrary, change for your app! +BUF_SIZE = 65536 # lets read stuff in 64kb chunks! + +def compute_md5(file_path): + """Compute the MD5 hash of a file.""" + md5 = hashlib.md5() + with open(file_path, 'rb') as f: + data = f.read(BUF_SIZE) + md5.update(data) + + while True: + data = f.read(BUF_SIZE) + if not data: + break + + return md5.hexdigest() + + +def write_file_to_dax(file): + file_path = home_path + data_path + file + length = os.path.getsize(file_path) + #command = f"sudo daxio -i {file_path} -o /dev/dax1.0 -l {length}" + + command = [ + "sudo", "daxio", + "-i", file_path, + "-o", "/dev/dax1.0", + "-l", str(length), + "-s", str(byte_start) + ] + + subprocess.run(command, check=True) + +def copy_file_from_dax(file): + copiedFile_path = home_path + data_path + "copied_" + file + file_path = home_path + data_path + file + length = os.path.getsize(file_path) + + if os.path.exists(copiedFile_path): + os.remove(copiedFile_path) + + #command = f"daxio -o {copiedFile_path} -i /dev/dax1.0 -l {length}" + command = [ + "sudo", "daxio", + "-i", "/dev/dax1.0", + "-o", copiedFile_path, + "-l", str(length), + "-k", str(byte_start) + ] + subprocess.run(command, check=True) + + + + +def main(): + # Set up argument parsing + parser = argparse.ArgumentParser(description='First writes a file into the dax device then it copies it back. Finally it hashes (md5) two files given as input and check if the two hash are the same') + parser.add_argument('file', help='Path to the file') + parser.add_argument('file2', help='Path to the second file') + parser.add_argument('-v', '--verbose', action='store_true', help='Enable verbose output') + + args = parser.parse_args() + + write_file_to_dax(args.file) + copy_file_from_dax(args.file) + + + copiedFile = home_path + data_path + "copied_" + args.file + file = home_path + data_path + args.file + + # Compute MD5 hashes for both files + file_md5 = compute_md5(file) + #copiedFile_md5 = compute_md5(copiedFile) + copiedFile_md5 = compute_md5(copiedFile) + + # Compare and print results + if file_md5 == copiedFile_md5: + print("yeeeeee! :-)") + else: + print("noooooo! :'(") + + if(args.verbose): + print("{0} MD5 hash: {1}".format(file, file_md5)) + print("{0} MD5 hash: {1}".format(copiedFile, copiedFile_md5)) + +if __name__ == "__main__": + main() diff --git a/test/testing_scripts/compute_md5.py b/test/testing_scripts/compute_md5.py new file mode 100644 index 00000000..41f85739 --- /dev/null +++ b/test/testing_scripts/compute_md5.py @@ -0,0 +1,28 @@ +import argparse +import subprocess +import sys +import os +import hashlib +from pathlib import Path + +BUF_SIZE = 65536 + +parser = argparse.ArgumentParser(description='First writes a file into the dax device then it copies it back. Finally it hashes (md5) two files given as input and check if the two hash are the same') +parser.add_argument('file', help='Path to the file') +parser.add_argument('-v', '--verbose', action='store_true', help='Enable verbose output') + +args = parser.parse_args() + +def compute_md5(file_path): + """Compute the MD5 hash of a file.""" + md5 = hashlib.md5(file_path) + with open(file_path, 'rb') as f: + data = f.read(BUF_SIZE) + md5.update(data) + + return md5.hexdigest() + +compute_md5(args.file) + + + -- GitLab