diff --git a/src/OutputFileHandler.cc b/src/OutputFileHandler.cc
index a6f51ca4d47a2b030b36aed17a36a4efbb382a1f..e8b98eb626ee87751066744e77b6c518670e3d79 100644
--- a/src/OutputFileHandler.cc
+++ b/src/OutputFileHandler.cc
@@ -135,20 +135,28 @@ void OutputFileHandler::open_new_file() {
   std::string filename = format_filename(current_run_number_, current_index_, ls);
   std::string full_filename = working_files_base_path_ + "/" + filename;
   LOG(TRACE) << "opening file with index " << current_index_ << ", in lumisection " << ls;
-  OutputFile outputFile(fopen(full_filename.c_str(), "wbx"), filename, createFileHeader(ls), ls,
+  OutputFile outputFile(fopen(full_filename.c_str(), "wb+"), filename, createFileHeader(ls), ls,
                         current_index_ % (max_index_per_ls_ + 1),
                         run_dir_);  // TODO: run_dir_ probably needs also the run number
-  if (!outputFile.exists()) {
+  outputFile_ = outputFile;
+  stop_counter += 1;
+  if(stop_counter > 4) {
     std::string err =
-        tools::strerror("ERROR when creating file '" + outputFile.getFileName() + "'");
+        tools::strerror("Stop condition met when creating file '" + outputFile.getFileName() + "'");
     LOG(ERROR) << err;
     throw std::runtime_error(err);
   }
-  outputFile_ = outputFile;
-  // reserve space for CMSSW header if required
-  if (this->OutputFileHandler::getCMSSWHeaders()) {
-    fseek(outputFile_.getFilePtr(), sizeof(FRDFileHeader_v2), SEEK_SET);
-  }
+//  if (!outputFile.exists()) {
+//    std::string err =
+//        tools::strerror("ERROR when creating file '" + outputFile.getFileName() + "'");
+//    LOG(ERROR) << err;
+//    throw std::runtime_error(err);
+//  }
+//  outputFile_ = outputFile;
+//  // reserve space for CMSSW header if required
+//  if (this->OutputFileHandler::getCMSSWHeaders()) {
+//    fseek(outputFile_.getFilePtr(), sizeof(FRDFileHeader_v2), SEEK_SET);
+//  }
 }
 
 // Create a properly formatted file name
diff --git a/src/OutputFileHandler.h b/src/OutputFileHandler.h
index 5961e6b6805bf72cf271b513ec2c146827d0613e..bc25c9c1559fbb7705efe86e85c2ea24cc43df72 100644
--- a/src/OutputFileHandler.h
+++ b/src/OutputFileHandler.h
@@ -150,6 +150,7 @@ class OutputFileHandler {
   uint32_t ls_NFiles_{};
   uint32_t run_NOrbits_{};
   uint32_t run_NFiles_{};
+  int stop_counter{}; // also known as catty
 };
 
 #endif
\ No newline at end of file
diff --git a/src/format.h b/src/format.h
index 20e28b4033e314ff14b2b870f6992d826c25d311..340b67ece33bf3ee4008510eef19712a70bb213e 100644
--- a/src/format.h
+++ b/src/format.h
@@ -311,7 +311,8 @@ struct constants {
   static constexpr uint32_t bril_header = 4278781695;
   static constexpr uint32_t NBXPerOrbit = 3564;
   static constexpr uint32_t NFramesInHistoHeader = 9;
-  static constexpr uint64_t N_orbits_per_lumisection = 1 << 18;  // equivalent to pow(2, 18)
+//  static constexpr uint64_t N_orbits_per_lumisection = 1 << 18;  // equivalent to pow(2, 18)
+  static constexpr uint64_t N_orbits_per_lumisection = 1 << 6;  
 };
 
 bool CheckOrbitTrailerStart(const char* frame);
diff --git a/test/config/filedma-gmt-muon.json5 b/test/config/filedma-gmt-muon.json5
index a9f48615e26ace7b735e7a89a4d3a1dca0d04094..62948bff9d7d946680b4304193b1e672f6c928ec 100644
--- a/test/config/filedma-gmt-muon.json5
+++ b/test/config/filedma-gmt-muon.json5
@@ -50,11 +50,11 @@
   dev_TCPAutoReconnectOnFailure: "false",
   // (Minimum) Logging severity: TRACE DEBUG INFO WARNING ERROR FATAL.
   // Use TRACE to log everything.
-  log_min_severity: "ERROR",
+  log_min_severity: "TRACE",
   threads: 8,
   // Stores fixed number of orbits per file when nOrbitsPerFile > 1
   // If zero, uses a fixed file size (`max_file_size`) instead
-  nOrbitsPerFile: 4096,
+  nOrbitsPerFile: 64,
   cmsswHeaders: "no",
   dthHeaders: "yes",
   source_id: 1,
@@ -65,4 +65,4 @@
     // e.g., kcu1500_ugmt, kcu1500_demux, sb852_bril
     board: "kcu1500_ugmt"
   }
-}
\ No newline at end of file
+}
diff --git a/test/config/filedma-gmt-muon_famfs.json5 b/test/config/filedma-gmt-muon_famfs.json5
new file mode 100644
index 0000000000000000000000000000000000000000..526de71d0e2b6e93e2ff2d4d5a66b5433c9a074c
--- /dev/null
+++ b/test/config/filedma-gmt-muon_famfs.json5
@@ -0,0 +1,68 @@
+// filedma-gmt.json5
+{
+  // Input settings //////////////////////////////////////////////////
+  // Input type, one of:
+  //   "wzdma"     for DMA driver from Wojciech M. Zabolotny
+  //   "filedma"   for reading from file and simulating DMA
+  //   "micronDMA" for PICO driver
+  //   "tcpip"     for TCP/IP input receiving
+  input: "filedma",
+  // DMA device
+  dma_dev: "/dev/xdma0_c2h_0",
+  // Max received packet size in bytes (buffer to reserve)
+  dma_packet_buffer_size: 2097152,
+  // Number of packet buffers to allocate
+  dma_number_of_packet_buffers: 1000,
+  // Print report each N packets, use 0 to disable
+  packets_per_report: 2000,
+  // Number of orbits per DMA packet, in decimal
+  nOrbitsPerPacket: 1,
+  // Extra settings for "filedma" input
+  input_files: [
+    // Must have at least the same cardinality as input_streams; kept isolated since `FileDMA` is a development feature
+    "test/data/testfiles/gmt_muon_testfile.dat"
+  ],
+  // Processing types (unpacking):
+  //   PASS_THROUGH, GMT, CALO, CALOOBJ, CALOSUM, BMTF, UGT, BRIL
+  // Primitive types:
+  //   NONE, MUON, JET, EGAMMA, TAU, SUM, STUB, ALGOBIT
+  input_streams: [
+    {
+      processor_type: "GMT",
+      primitive_type: "MUON",
+      source_id: 1,
+      tcp_dest_port: 10000
+    }
+  ],
+  // Processor settings //////////////////////////////////////////////
+  enable_stream_processor: "yes",
+  // Enable software zero-suppression. Does not affect Calo.
+  doZS: "yes",
+  // Output settings /////////////////////////////////////////////////
+  output_filename_prefix: "scout_MUON",
+  output_filename_base:    "/mnt/famfs",
+  output_filename_suffix: ".raw",
+  max_file_size: 8589934592,
+  // Always write data to a file regardless of the run status
+  output_force_write: "yes",
+  // Generic settings ////////////////////////////////////////////////
+  port: 8000,
+  dev_TCPAutoReconnectOnFailure: "false",
+  // (Minimum) Logging severity: TRACE DEBUG INFO WARNING ERROR FATAL.
+  // Use TRACE to log everything.
+  log_min_severity: "TRACE",
+  threads: 8,
+  // Stores fixed number of orbits per file when nOrbitsPerFile > 1
+  // If zero, uses a fixed file size (`max_file_size`) instead
+  nOrbitsPerFile: 64,
+  cmsswHeaders: "no",
+  dthHeaders: "yes",
+  source_id: 1,
+  //  Information necessary to issue a reset request for the board
+  scone_address: {
+    host: "scoutdaq-s1d12-34-01",
+    port: 8080,
+    // e.g., kcu1500_ugmt, kcu1500_demux, sb852_bril
+    board: "kcu1500_ugmt"
+  }
+}
diff --git a/test/testing_scripts/__pycache__/check_data_consistency.cpython-39.pyc b/test/testing_scripts/__pycache__/check_data_consistency.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..471342fd4de496086633b7d09f6ce90387172b1e
Binary files /dev/null and b/test/testing_scripts/__pycache__/check_data_consistency.cpython-39.pyc differ
diff --git a/test/testing_scripts/check_data_consistency.py b/test/testing_scripts/check_data_consistency.py
new file mode 100644
index 0000000000000000000000000000000000000000..df9531c30599871965e6803eaf0d2964ad6b4f86
--- /dev/null
+++ b/test/testing_scripts/check_data_consistency.py
@@ -0,0 +1,104 @@
+import argparse
+import subprocess
+import sys
+import os
+import hashlib
+from pathlib import Path
+
+
+#home_path = "/home/dcristof/" 
+home_path = "/home/dcristof/" 
+data_path = "scdaq/test/data/run000000/in_progress/"
+data_path = ""
+byte_start = 400
+
+
+#sudo daxio -o test/data/run000000/in_progress/retrieved_scout_BMTF_000000_002368.raw -i /dev/dax1.0 -l 757760
+
+# BUF_SIZE is totally arbitrary, change for your app!
+BUF_SIZE = 65536  # lets read stuff in 64kb chunks!
+
+def compute_md5(file_path):
+    """Compute the MD5 hash of a file."""
+    md5 = hashlib.md5()
+    with open(file_path, 'rb') as f:
+        data = f.read(BUF_SIZE)
+        md5.update(data)
+        
+        while True:
+            data = f.read(BUF_SIZE)
+            if not data:
+                break
+    
+    return md5.hexdigest()
+
+
+def write_file_to_dax(file):
+    file_path = home_path + data_path + file
+    length = os.path.getsize(file_path)
+    #command = f"sudo daxio -i {file_path} -o /dev/dax1.0 -l {length}"
+    
+    command = [
+        "sudo", "daxio",
+        "-i", file_path,
+        "-o", "/dev/dax1.0",
+        "-l", str(length),
+        "-s", str(byte_start)
+    ]
+
+    subprocess.run(command, check=True)
+    
+def copy_file_from_dax(file):
+    copiedFile_path = home_path + data_path + "copied_" + file
+    file_path = home_path + data_path + file
+    length = os.path.getsize(file_path)
+
+    if os.path.exists(copiedFile_path):
+        os.remove(copiedFile_path)
+    
+    #command = f"daxio -o {copiedFile_path} -i /dev/dax1.0 -l {length}"
+    command = [
+        "sudo", "daxio",
+        "-i", "/dev/dax1.0",
+        "-o", copiedFile_path,
+        "-l", str(length),
+        "-k", str(byte_start)
+    ]
+    subprocess.run(command, check=True)
+
+
+
+
+def main():
+    # Set up argument parsing
+    parser = argparse.ArgumentParser(description='First writes a file into the dax device then it copies it back. Finally it hashes (md5) two files given as input and check if the two hash are the same')
+    parser.add_argument('file', help='Path to the file')
+    parser.add_argument('file2', help='Path to the second file')
+    parser.add_argument('-v', '--verbose', action='store_true', help='Enable verbose output')
+
+    args = parser.parse_args()
+    
+    write_file_to_dax(args.file)
+    copy_file_from_dax(args.file)
+
+
+    copiedFile = home_path + data_path + "copied_" + args.file
+    file = home_path + data_path + args.file
+    
+    # Compute MD5 hashes for both files
+    file_md5 = compute_md5(file)
+    #copiedFile_md5 = compute_md5(copiedFile)
+    copiedFile_md5 = compute_md5(copiedFile)
+
+    # Compare and print results
+    if file_md5 == copiedFile_md5:
+        print("yeeeeee! :-)")
+    else:
+        print("noooooo! :'(")
+
+    if(args.verbose):
+        print("{0} MD5 hash: {1}".format(file, file_md5))
+        print("{0} MD5 hash: {1}".format(copiedFile, copiedFile_md5))
+
+if __name__ == "__main__":
+    main()
diff --git a/test/testing_scripts/compute_md5.py b/test/testing_scripts/compute_md5.py
new file mode 100644
index 0000000000000000000000000000000000000000..41f857394aa26bb866ad97fd25338c5ee1a5bc11
--- /dev/null
+++ b/test/testing_scripts/compute_md5.py
@@ -0,0 +1,28 @@
+import argparse
+import subprocess
+import sys
+import os
+import hashlib
+from pathlib import Path
+
+BUF_SIZE = 65536
+
+parser = argparse.ArgumentParser(description='First writes a file into the dax device then it copies it back. Finally it hashes (md5) two files given as input and check if the two hash are the same')
+parser.add_argument('file', help='Path to the file')
+parser.add_argument('-v', '--verbose', action='store_true', help='Enable verbose output')
+
+args = parser.parse_args()
+
+def compute_md5(file_path):
+    """Compute the MD5 hash of a file."""
+    md5 = hashlib.md5(file_path)
+    with open(file_path, 'rb') as f:
+        data = f.read(BUF_SIZE)
+        md5.update(data)
+    
+    return md5.hexdigest()
+
+compute_md5(args.file)
+
+
+