diff --git a/LHCData/LHCDataUtils/CMakeLists.txt b/LHCData/LHCDataUtils/CMakeLists.txt
deleted file mode 100644
index 2c4494950d82c0feed642e5ad2d23eca3265b879..0000000000000000000000000000000000000000
--- a/LHCData/LHCDataUtils/CMakeLists.txt
+++ /dev/null
@@ -1,10 +0,0 @@
-################################################################################
-# Package: LHCDataUtils
-################################################################################
-
-# Declare the package name:
-atlas_subdir( LHCDataUtils )
-
-atlas_install_python_modules( python/*.py )
-
-atlas_install_scripts( scripts/*.sh scripts/*.py )
diff --git a/LHCData/LHCDataUtils/README.md b/LHCData/LHCDataUtils/README.md
deleted file mode 100644
index 0ca447fe332e00ff7ff6eb90a5d7f31c7885b580..0000000000000000000000000000000000000000
--- a/LHCData/LHCDataUtils/README.md
+++ /dev/null
@@ -1 +0,0 @@
-Utilities to produce and update COOL databases for LHC information
diff --git a/LHCData/LHCDataUtils/python/CoolDataReader.py b/LHCData/LHCDataUtils/python/CoolDataReader.py
deleted file mode 100644
index 238f8987c53f785ee84d0689d63c7d8970ae5a48..0000000000000000000000000000000000000000
--- a/LHCData/LHCDataUtils/python/CoolDataReader.py
+++ /dev/null
@@ -1,181 +0,0 @@
-# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
-
-#
-# CoolDataReader
-#
-# Eric Torrence - October 2010
-#
-# Contents:
-# CoolDataReader - utility object to handle reading of COOL DB folders.
-#                 The benefit over just using AtlCoolLib directly is that each DB connection is
-#                 cached, so multiple connections to the same DB will not be made.
-#
-#                 CoolDataReader.readData() returns a list the full IObject for maximal flexibility
-#
-# General usage example
-# myReader = CoolDataReader('COOLONL_TRIGGER/COMP200', '/TRIGGER/LUMI/LBLESTONL')
-# myReader.setIOVRange(startIOV, endIOV)
-# myReader.readData()
-# for obj in myReader.data:
-#   ...
-#
-# One can specify specific channels or IOV ranges if desired, but by default all data will be loaded
-#
-# The CoolDataReader uses the LumiDBHandler internally to cache multiple CoolConnections
-#
-
-from __future__ import print_function
-from PyCool import cool
-
-# Get our global DB handler object
-from LHCDataUtils.LumiDBHandler import LumiDBHandler
-
-
-class CoolDataReader:
-
-    def __init__(self, dbstr=None, folderstr=None):
-
-        self.verbose = False
-
-        # Defined variables
-        self.dbstr = None
-        self.folderstr = None
-        self.channelIdList = []
-        self.tag = ''
-        self.iovstart = None
-        self.iovend = None
-
-        self.folder = None
-        self.data = []
-        
-        # Initialize to default values
-        self.setChannel()
-        self.setTag()
-        self.setFolder(dbstr, folderstr)
-        self.setIOVRange()
-            
-    def setFolder(self, dbstr, folderstr):
-        # Force re-opening connection if these are different
-        if (dbstr != self.dbstr) or (folderstr != self.folderstr):
-            self.folder = None
-            
-        self.dbstr = dbstr
-        self.folderstr = folderstr
-
-    def setTag(self, tagstr=''):
-        self.tag = tagstr
-
-    def setChannel(self, channelIdList=[]):
-        self.channelIdList = channelIdList
-        
-    def setChannelAll(self):
-        self.setChannel()
-
-    def setChannelId(self, channelId):
-        self.setChannel([channelId])
-        
-    def setIOVRange(self, iovstart=cool.ValidityKeyMin, iovend=cool.ValidityKeyMax):
-        self.iovstart = iovstart
-        self.iovend = iovend
-
-    def setIOVRangeFromRun(self, runnum, startOfNextRun=False):
-        self.iovstart = runnum << 32
-        if startOfNextRun:
-            self.iovend = ((runnum+1) << 32)
-        else:
-            self.iovend = ((runnum+1) << 32) - 1
-
-    # Call to get data after all other parameters are properly set
-    # Data is returned as a list of IObject values, one per DB entry.
-    # This gives maximal flexibility to manipulate the items
-    def readData(self):
-
-        self.data = []
-
-        # Open the DB connection here if needed
-        if self.folder is None:
-            dbHandler = LumiDBHandler()
-            self.folder = dbHandler.getFolder(self.dbstr, self.folderstr)
-            
-            if self.folder is None:
-                print("Can't access DB", self.dbstr, 'folder', self.folderstr, '!')
-                return self.data
-
-        # Create the channel list
-        if len(self.channelIdList) == 0:
-            channels = cool.ChannelSelection.all()
-            self.readChannelList(channels)
-
-        else:
-            # Build the channel list here
-            self.channelIdList.sort()  # Must be sorted!
-
-            # Must read channels 50 at a time due to COOL limit...
-            ichan = 0
-            while (ichan < len(self.channelIdList)) :
-
-                jchan = 0
-                channels = None
-                firstChan = True
-            
-                for channelId in self.channelIdList[ichan:]:
-                    jchan += 1
-                    if firstChan:
-                        firstChan = False
-                        channels = cool.ChannelSelection(channelId)
-                    else:
-                        channels.addChannel(channelId)
-                    if jchan == 50: break 
-
-                # Remeber how many we have read for next time
-                if self.verbose:
-                    print('CoolDataReader.readData() - loaded %d channels from %d' % (jchan, ichan))
-                ichan += jchan
-
-                if self.verbose:
-                    print('CoolDataReader.readData() - browsing', self.iovstart, self.iovend, 'with channel', channels, 'and tag', self.tag)
-
-                self.readChannelList(channels)
-
-            # End of loop building channel list and reading
-
-        # End of if statement reading data
-        return self.data
-
-    def readChannelList(self, channels):
-
-        # Open iterator over our defined IOV range
-        try:
-            itr = self.folder.browseObjects(self.iovstart, self.iovend, channels, self.tag)
-        except Exception as e:
-            print('CoolDataReader.readData() - exception reading folder:', self.folderstr)
-            print(e)
-            print('CoolDataReader.readData() - will try to reconnect (once)')
-
-            # Force re-opening connection
-            dbHandler = LumiDBHandler()
-            dbHandler.verbose = True
-            self.folder = dbHandler.getFolder(self.dbstr, self.folderstr, force=True)
-            
-            if self.folder is None:
-                print('CoolDataReader.readData() - forced re-opening failed!')
-                return self.data
-
-            # OK, lets try reading this again
-            print('CoolDataReader.readData() - trying to re-read re-opened folder!')
-            try:
-                itr = self.folder.browseObjects(self.iovstart, self.iovend, channels, self.tag)
-            except Exception as e:
-                print('CoolDataReader.readData() - exception reading folder:', self.folderstr)
-                print(e)
-                return self.data
-                
-        while itr.goToNext():
-            obj = itr.currentRef()
-            # print obj.payload()
-            self.data.append(obj.clone())
-            
-        itr.close()
-
-
-        
diff --git a/LHCData/LHCDataUtils/python/LumiBlobConversion.py b/LHCData/LHCDataUtils/python/LumiBlobConversion.py
deleted file mode 100644
index bf57d1d2815a823818662ef5230957343a33e500..0000000000000000000000000000000000000000
--- a/LHCData/LHCDataUtils/python/LumiBlobConversion.py
+++ /dev/null
@@ -1,388 +0,0 @@
-# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
-
-from __future__ import print_function
-from builtins import range
-import sys
-import array
-import struct
-
-# import cppyy
-# cppyy.gbl.cool.IDatabase # force the load of the dictionary (to stay on the safe side)
-# from cppyy import gbl
-# def blob_read(self, size = -1):
-#     if size < 0:
-#         endpos = self.size()
-#     else:
-#         endpos = self.pos + size
-#     beginpos = self.pos
-#     self.pos = endpos
-#     buf = self.startingAddress()
-#     buf.SetSize(self.size())
-#     return buf[beginpos:endpos]
-
-# add the new functions
-# getattr(gbl,"coral::Blob").read = blob_read
-
-def bConvert(b, nbyte=1):
-    # routine to store an unsigned int (1, 2, 4 or 8 byte) in a blob
-    packopt=dict([[1,'B'],[2,'H'],[4,'f'],[8,'d']])
-    if nbyte in packopt:
-        # print 'bConvert - b:[', b[0:nbyte], '] nbyte:', nbyte, ' fmt:', packopt[nbyte], type(b)
-        ival=struct.unpack(packopt[nbyte], b[0:nbyte])
-    else:
-        print(f'bConvert: Unrecognized pack option {nbyte}')
-        sys.exit()
-
-    return ival[0]
-
-# Optional arguemnt to nval to specify number of values to read
-def bConvertList(b, nbyte=1, nval=1):
-    # routine to store an unsigned int (1, 2, 4 or 8 byte) in a blob
-    packopt=dict([[1,'B'],[2,'H'],[4,'f'],[8,'d']])
-    if nbyte in packopt:
-        # print 'bConvert - b:[', b[0:nbyte], '] nbyte:', nbyte, ' fmt:', packopt[nbyte], type(b)
-        fmt = '%d%s' % (nval, packopt[nbyte])
-        ival=struct.unpack(fmt, b[0:nval*nbyte])
-    else:
-        print(f'bConvertList: Unrecognized pack option {nbyte}')
-        sys.exit()
-
-    return list(ival)
-
-# Unpack bunch group bgrp.  By default, bgrp=1 is the physics bunch group. 
-def unpackBunchGroup(blob, bgrp=1):
-    physBG = []
-    if blob is None: return
-    if blob.size() == 0: return
-    
-    blobCopy = blob.read()
-    mask = (1 << int(bgrp))
-    
-    ivallist = bConvertList(blobCopy, 1, 3564)
-    for i in range(3564):
-        if ivallist[i] & mask:
-            physBG.append(i)
-
-#     blobCounter = 0
-#     for i in range(3564):
-#         try:
-#             b = blobCopy[blobCounter:blobCounter+1]
-#             blobCounter += 1
-#             ival = struct.unpack('B', b)
-#             #s = struct.unpack('B', b)
-#             #ival = bConvert(s)
-#         except Exception, e:
-#             print e
-#             ival = 0
-#         if (ival>>1) & 1 == 1:
-#             physBG.append(i)
-
-    return physBG
-            
-# Unpack bunch group bgrp.  By default, bgrp=1 is the physics bunch group. 
-def unpackBunchGroupList(blob, bgrp=[1]):
-    physBG = dict()
-    mask = dict()
-
-    if blob is None: return    
-    if blob.size() == 0: return
-    
-    blobCopy = blob.read()
-
-    for id in bgrp:
-        mask[id] = (1 << int(id))
-        physBG[id] = []
-        
-    ivallist = bConvertList(blobCopy, 1, 3564)
-    for i in range(3564):
-        for id in bgrp:
-            if ivallist[i] & mask[id]:
-                physBG[id].append(i)
-
-    return physBG
-
-# Generic routine to unpack BCID mask
-# The nb1, nb2, nlumi are for backwards compatibility to Run1
-# These are not needed to unpack the Run2 BCID mask
-# Return values are a list of beam1, beam2, and colliding BCIDs
-def unpackBCIDMask(blob,nb1=0,nb2=0,nlumi=0):
-
-    if blob is None:
-        return [],[],[]
-
-    bloblength = blob.size()
-
-    if bloblength == 0:
-        return [],[],[]
-
-    if bloblength == 3564:
-        return unpackRun2BCIDMask(blob)
-    else:
-        return unpackRun1BCIDMask(blob,nb1,nb2,nlumi)
-
-# routine to unpack the BCID mask stored in COOL
-# This is the run2 version
-def unpackRun2BCIDMask(blob):
-    beam1=[]
-    beam2=[]
-    coll=[]
-    blobCopy = blob.read()
-    rawData = bConvertList(blobCopy, 1, 3564)
-
-    for i in range(3564):
-        val = rawData[i]
-        if val & 0x01:
-            beam1.append(i)
-        if val & 0x02: 
-            beam2.append(i)
-        if (val & 0x03) == 0x03:
-            coll.append(i)
-
-    # print('unpackRun2BCIDMask found:')
-    # print(' Beam1:', beam1)
-    # print(' Beam2:', beam2)
-    # print(' Coll: ', coll)
-
-    return beam1,beam2,coll
-
-# routine to unpack the BCID mask stored in COOL
-# This is the run1 version
-def unpackRun1BCIDMask(blob,nb1,nb2,nlumi):
-    beam1=[]
-    beam2=[]
-    coll=[]
-    blobCopy = blob.read()
-    beam1 = bConvertList(blobCopy, 2, nb1)
-    beam2 = bConvertList(blobCopy[2*nb1:], 2, nb2)
-    coll = bConvertList(blobCopy[2*(nb1+nb2):], 2, nlumi)
-    #unpackfmt = '%dH' % nb1
-    #list(struct.unpack(unpackfmt, blobCopy[0:(2*nb1)]))
-    #unpackfmt = '%dH' % nb2
-    #beam2 = list(struct.unpack(unpackfmt, blobCopy[(2*nb1):2*(nb1+nb2)]))
-    #unpackfmt = '%dH' % nlumi
-    #coll = list(struct.unpack(unpackfmt, blobCopy[2*(nb1+nb2):2*(nb1+nb2+nlumi)]))
-                 
-#    blobCounter = 0
-#     for i in range(nb1):
-#         b = blobCopy[blobCounter:blobCounter+2]
-#         blobCounter += 2
-#         val=struct.unpack('H', b)
-#         beam1.append(val)
-        
-#     for i in range(nb2):
-#         b = blobCopy[blobCounter:blobCounter+2]
-#         blobCounter += 2
-#         val=struct.unpack('H', b)
-#         beam2.append(val)
-
-#     for i in range(nlumi):
-#         b = blobCopy[blobCounter:blobCounter+2]
-#         blobCounter += 2
-#         val=struct.unpack('H', b)
-#         coll.append(val)
-
-    return beam1,beam2,coll
-
-# routine to unpack values (raw lumi or currents) stored as blob in COOL
-# blob - COOL blob with per-BCID values
-# mask - BCID mask appropriate for quantity being unpacked (i.e.: beam1, collisions, ...)
-# normValue - Normalization value from same COOL folder as BLOB (i.e.: B1BunchAverage)
-#
-# Note, the normValue is only used in certain storage modes.  If you want to renormalize, do this yourself.
-# Specifying a different value for the normValue will likely cause unpredictable results.
-
-def unpackBCIDValues(blob, mask=[], normValue=1):
-
-    bss, bcidVec, lvec = unpackBunches(blob, mask)
-    
-    if bss>0:
-      if not (len(bcidVec)==len(lvec)):
-        print('unpackBCIDValues - length mismatch: len(bcidVec)=', len(bcidVec), 'len(lvec)=', len(lvec))
-        sys.exit()
-        
-      bLumi=[]
-      for i in range(len(bcidVec)):
-        if bss<4:
-          bLumi.append(lvec[i]*normValue/pow(100,bss))
-        else:
-          bLumi.append(lvec[i])
-
-      #for i in range(len(bcidVec)):
-      #    print 'BCID:', bcidVec[i], 'Raw:', bLumi[i]
-          
-      return bcidVec,bLumi
-
-    else:
-      return [],[]
-    
-def unpackBunches(blob,mask=[]):
-    # routine to unpack Intensity/Luminosity info stored in COOL
-    # the mask given as input has to match the quantity to be
-    # unpacked (beam1, beam2, beamsand for B1, B2 intensities and
-    # luminosities, respectively)
-
-    if blob is None or blob.size() == 0:
-        return 0,[],[]
-    
-    blobCopy = blob.read()
-    blobCounter = 0
-    try:
-        b = blobCopy[blobCounter:blobCounter+1]
-        blobCounter += 1
-        flag=bConvert(b)
-        bss=(flag%100)//10
-        smod=flag%10
-        # print 'Storage mode for',str, 'is', smod, 'with bss=', bss
-            
-        if smod==2:
-            b = blobCopy[blobCounter:blobCounter+2]
-            blobCounter += 2
-            vlen=bConvert(b, 2)
-            #print 'Bunch vector has length ',vlen
-            bcidVec=[]
-            bcidVec = bConvertList(blobCopy[blobCounter:], 2, vlen)
-            blobCounter += 2*vlen
-            # for i in range(vlen):
-            #     valb = blobCopy[blobCounter:blobCounter+2]
-            #     blobCounter += 2
-            #     val=struct.unpack('H', valb)
-            #     bcidVec.append(val)
-            
-        elif smod==0:
-            # Make sure this is a list, and sorted (can pass set for example)
-            bcidVec=list(mask)
-            bcidVec.sort()
-            vlen=len(mask)
-        elif smod==3:
-            print('storage mode 3 not implemented in unpackBunches')
-            sys.exit()
-        elif smod==1:
-            bcidVec=[i for i in range(3564)]
-            vlen=3564
-        else:
-            print('Unknown storage mode ',smod)
-            sys.exit()
-        valueVec=[]
-
-        valueVec = bConvertList(blobCopy[blobCounter:], bss, vlen)
-#         for i in range(vlen):
-#             valb = blobCopy[blobCounter:blobCounter+bss]
-#             blobCounter += bss
-#             val=bConvert(valb,bss)
-#             valueVec.append(val)
-
-        return bss,bcidVec,valueVec
-
-    except RuntimeError as e:
-        print(e)
-        return 0,[],[]
-                                  
-# Unpack live fraction into vector keyed by bcid-1
-# Takes payload of /TRIGGER/LUMI/PerBcidDeadtime folder
-def unpackLiveFraction(trigPayload, priority = 'high'):
-
-    liveVec = array.array('f', 3564*[0.])
-    
-    if priority == 'high':
-        blob = trigPayload['HighPriority']
-    elif priority == 'low':
-        blob = trigPayload['LowPriority']
-    else:
-        print('unpackLiveFraction - unknown priority requested %s', str(priority))
-        return liveVec
-    
-    bloblength = blob.size()
-
-    # Due to a bug, the blob was sometimes written at 3654 rather than desired 3564
-    # More bugs, use anything long enough 
-    if bloblength < 3*3564: #!= 3*3654 and bloblength != 3*3564:
-        # Corrupt, don't trust anything
-        print('unpackLiveFraction found blob length %d!' % bloblength)
-        return liveVec
-
-    blobCopy = blob.read()
-    # blobCounter = 0
-
-    # No counts, no work to do
-    turnCounter = trigPayload['TurnCounter']
-    if not turnCounter > 0:
-        return liveVec
-
-    # Even if longer blob is present, only care about this range
-    
-    byte = bConvertList(blobCopy, 1, 3*3564)
-    
-    for i in range(3564):
-
-        busyCounter = byte[3*i] | (byte[3*i+1] << 8) | (byte[3*i+2] << 16)
-        
-        # byte0 = struct.unpack('B', blobCopy[blobCounter:blobCounter+1])
-        # blobCounter += 1
-        # byte1 = struct.unpack('B', blobCopy[blobCounter:blobCounter+1])
-        # blobCounter += 1
-        # byte2 = struct.unpack('B', blobCopy[blobCounter:blobCounter+1])
-        # blobCounter += 1
-        # busyCounter = byte0 | (byte1 << 8) | (byte2 << 16)
-        
-        liveFrac = 1 - float(busyCounter) / turnCounter
-
-        liveVec[i] = liveFrac
-
-        # print 'BCID: %d Busy: %d Turn: %d Live: %f' % (i+1, busyCounter, turnCounter, liveFrac)
-
-    return liveVec
-
-# Unpack live fraction into vector keyed by bcid-1
-# Takes payload of /TRIGGER/LUMI/PerBcidDeadtime folder
-def unpackLiveFractionRun2(trigPayload, priority = 'high'):
-
-    liveVec = array.array('f', 3564*[0.])
-    
-    if priority == 'high':
-        blob = trigPayload['DT0']
-    elif priority == 'low':
-        blob = trigPayload['DT1']
-    else:
-        print('unpackLiveFraction - unknown priority requested %s', str(priority))
-        return liveVec
-    
-    bloblength = blob.size()
-
-    if bloblength < 3*(3564+2): #!= 3*3654 and bloblength != 3*3564:
-        # Corrupt, don't trust anything
-        print('unpackLiveFraction found blob length %d!' % bloblength)
-        return liveVec
-
-    blobCopy = blob.read()
-    # blobCounter = 0
-
-    # Turn counter is now at the end, so we must unpack everything
-    byte = bConvertList(blobCopy, 1, 3*3566)
-
-    i = 3565
-    turnCounter = byte[3*i] | (byte[3*i+1] << 8) | (byte[3*i+2] << 16)
-
-    if not turnCounter > 0:
-        return liveVec
-
-    # Entry 0 is LB number, which we can skip
-    for i in range(1, 3564):
-
-        busyCounter = byte[3*i] | (byte[3*i+1] << 8) | (byte[3*i+2] << 16)
-        
-        # byte0 = struct.unpack('B', blobCopy[blobCounter:blobCounter+1])
-        # blobCounter += 1
-        # byte1 = struct.unpack('B', blobCopy[blobCounter:blobCounter+1])
-        # blobCounter += 1
-        # byte2 = struct.unpack('B', blobCopy[blobCounter:blobCounter+1])
-        # blobCounter += 1
-        # busyCounter = byte0 | (byte1 << 8) | (byte2 << 16)
-        
-        liveFrac = float(turnCounter - busyCounter) / turnCounter
-
-        liveVec[i] = liveFrac
-
-        # print 'BCID: %d Busy: %d Turn: %d Live: %f' % (i+1, busyCounter, turnCounter, liveFrac)
-
-    return liveVec
-
diff --git a/LHCData/LHCDataUtils/python/LumiDBHandler.py b/LHCData/LHCDataUtils/python/LumiDBHandler.py
deleted file mode 100644
index 7e6d5619b1cc2e0d58215adeea75f2bfc2aee6af..0000000000000000000000000000000000000000
--- a/LHCData/LHCDataUtils/python/LumiDBHandler.py
+++ /dev/null
@@ -1,121 +0,0 @@
-# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
-
-#
-# LumiDBHandler
-#
-# Eric Torrence - October 2010
-#
-# Contents:
-# LumiDBHandler - utility object to handle opening and closing COOL DB connections within
-#                 a large python script.  The benefit over just using AtlCoolLib directly
-#                 is that each DB connection is cached, so multiple connections to the same
-#                 DB will not be made.
-#
-#                 The parent script should call closeAllDB in its __del__ function to close
-#                 the DB connections, even if the script crashes.
-#
-# General usage example
-# dbH = LumiDBHandler()
-# myFolder = dbH.getFolder('COOLONL_TRIGGER/COMP200', '/TRIGGER/LUMI/LBLESTONL')
-#
-# One can then browse the folder as usual using browseObjects
-#
-# The CoolDataReader uses this class internally to make for more easy access
-#
-
-import CoolConvUtilities.AtlCoolLib as AtlCoolLib
-
-class LumiDBHandler:
-
-    # Define dbDict here at class scope
-    # Then access with self.__class__.dbDict and it will be the same for all instances of the class
-    # This is a pythonish way to create static classes
-
-    # Dict to store DB connection indexed by text DB connection name
-    dbDict = dict()
-
-    
-    def __init__(self):
-
-        # Debug output (can be changed for each instance, slick...)
-        self.verbose = False
-        
-    # Return a folder reference for the dbstring, folder specified
-    # DB will be opened if necessary
-    # Example: getFolder('COOLONL_TRIGGER/COMP200', '/TRIGGER/LUMI/LBLESTONL')
-    def getFolder(self, dbstring, folder, force=False):
-
-        if self.verbose:
-            print('LumiDBHandler.getFolder(', dbstring, ',', folder, ') called')
-
-        if not self.openDB(dbstring, force=force):
-            print("LumiDBHandler.getFolder - can't connect to DB!")
-            return None
-
-        return self.__class__.dbDict[dbstring].getFolder(folder)
-    
-    # Open a COOL DB connection based on a name such as "COOLONL_INDET/OFLP200"
-    # Returns True if successful (or DB already open) 
-    def openDB(self, dbstring, oracle=False, debug=False, force=False):
-
-        if self.verbose:
-            print('LumiDBHandler.openDB(', dbstring, ') called')
-
-        # Check if already open
-        if dbstring in self.__class__.dbDict:
-
-            # No force, just return 
-            if not force:
-                if self.verbose:
-                    print('LumiDBHandler.openDB - Connection already exists')
-                return True # Yes it is
-
-            # Force specified, close so we can re-open
-            if self.verbose:
-                print('LumiDBHandler.openDB - Connection already exists, closing first due to force=True')
-            self.closeDB(dbstring)
-
-        # Try to open DB connection
-        if self.verbose:
-            print('LumiDBHandler.openDB - Connecting to', dbstring)
-            
-        try:
-            db = AtlCoolLib.indirectOpen(dbstring, readOnly=True, oracle=oracle, debug=debug)
-        except Exception as e:
-            print(e)
-            return False
-
-        # OK, opened.  Save this to our dict for later use
-        self.__class__.dbDict[dbstring] = db
-        
-        return True
-
-    # Close specific DB
-    def closeDB(self, dbstring):
-
-        if self.verbose:
-            print('LumiDBHandler.closeDB - Closing connection to', dbstring)
-
-        if dbstring not in self.__class__.dbDict:
-            print("LumiDBHandler.closeDB - DB doesn't exist:", dbstring)
-        else:
-            try:
-                self.__class__.dbDict[dbstring].closeDatabase()
-            except Exception as e:
-                print(e)
-            self.__class__.dbDict.pop(dbstring)
-
-    # Called by default in the destructor, but not guaranteed if there are problems
-    def closeAllDB(self):
-        self.closeAll()
-        
-    def closeAll(self):
-        
-        if self.verbose:
-            print('LumiDBHandler.closeAllDB called')
-
-        # Can't use iterkeys here as we are deleting the elements
-        # In python3 must create explicit list
-        for dbstring in list(self.__class__.dbDict.keys()):
-            self.closeDB(dbstring)
-            
diff --git a/LHCData/LHCDataUtils/scripts/makeLHCFillData.py b/LHCData/LHCDataUtils/scripts/makeLHCFillData.py
deleted file mode 100755
index f0187ab20d4e4d445b949367cf847b8b94f4dc39..0000000000000000000000000000000000000000
--- a/LHCData/LHCDataUtils/scripts/makeLHCFillData.py
+++ /dev/null
@@ -1,734 +0,0 @@
-#!/usr/bin/env python3
-import os
-import sys
-import argparse
-
-import time
-import calendar
-import datetime
-
-from pathlib import Path
-
-#from LHCDataUtils.LumiDBHandler import LumiDBHandler
-#from LHCDataUtils.CoolDataReader import CoolDataReader
-
-from PyCool import cool
-
-# Useful utilities for manipulating COOL files
-# See https://gitlab.cern.ch/atlas/athena/-/blob/master/Database/CoolConvUtilities/python/AtlCoolLib.py
-from CoolConvUtilities.AtlCoolLib import ensureFolder,forceOpen,athenaDesc,timeVal,timeString
-
-
-import CoolConvUtilities.AtlCoolLib as AtlCoolLib
-
-class LumiDBHandler:
-
-    # Define dbDict here at class scope
-    # Then access with self.__class__.dbDict and it will be the same for all instances of the class
-    # This is a pythonish way to create static classes
-
-    # Dict to store DB connection indexed by text DB connection name
-    dbDict = dict()
-
-    
-    def __init__(self):
-
-        # Debug output (can be changed for each instance, slick...)
-        self.verbose = False
-        
-    # Return a folder reference for the dbstring, folder specified
-    # DB will be opened if necessary
-    # Example: getFolder('COOLONL_TRIGGER/COMP200', '/TRIGGER/LUMI/LBLESTONL')
-    def getFolder(self, dbstring, folder, force=False):
-
-        if self.verbose:
-            print('LumiDBHandler.getFolder(', dbstring, ',', folder, ') called')
-
-        if not self.openDB(dbstring, force=force):
-            print("LumiDBHandler.getFolder - can't connect to DB!")
-            return None
-
-        return self.__class__.dbDict[dbstring].getFolder(folder)
-    
-    # Open a COOL DB connection based on a name such as "COOLONL_INDET/OFLP200"
-    # Returns True if successful (or DB already open) 
-    def openDB(self, dbstring, oracle=False, debug=False, force=False):
-
-        if self.verbose:
-            print('LumiDBHandler.openDB(', dbstring, ') called')
-
-        # Check if already open
-        if dbstring in self.__class__.dbDict:
-
-            # No force, just return 
-            if not force:
-                if self.verbose:
-                    print('LumiDBHandler.openDB - Connection already exists')
-                return True # Yes it is
-
-            # Force specified, close so we can re-open
-            if self.verbose:
-                print('LumiDBHandler.openDB - Connection already exists, closing first due to force=True')
-            self.closeDB(dbstring)
-
-        # Try to open DB connection
-        if self.verbose:
-            print('LumiDBHandler.openDB - Connecting to', dbstring)
-            
-        try:
-            db = AtlCoolLib.indirectOpen(dbstring, readOnly=True, oracle=oracle, debug=debug)
-        except Exception as e:
-            print(e)
-            return False
-
-        # OK, opened.  Save this to our dict for later use
-        self.__class__.dbDict[dbstring] = db
-        
-        return True
-
-    # Close specific DB
-    def closeDB(self, dbstring):
-
-        if self.verbose:
-            print('LumiDBHandler.closeDB - Closing connection to', dbstring)
-
-        if dbstring not in self.__class__.dbDict:
-            print("LumiDBHandler.closeDB - DB doesn't exist:", dbstring)
-        else:
-            try:
-                self.__class__.dbDict[dbstring].closeDatabase()
-            except Exception as e:
-                print(e)
-            self.__class__.dbDict.pop(dbstring)
-
-    # Called by default in the destructor, but not guaranteed if there are problems
-    def closeAllDB(self):
-        self.closeAll()
-        
-    def closeAll(self):
-        
-        if self.verbose:
-            print('LumiDBHandler.closeAllDB called')
-
-        # Can't use iterkeys here as we are deleting the elements
-        # In python3 must create explicit list
-        for dbstring in list(self.__class__.dbDict.keys()):
-            self.closeDB(dbstring)
-            
-# End of class LumiDBHandler
-
-
-class CoolDataReader:
-
-    def __init__(self, dbstr=None, folderstr=None):
-
-        self.verbose = False
-
-        # Defined variables
-        self.dbstr = None
-        self.folderstr = None
-        self.channelIdList = []
-        self.tag = ''
-        self.iovstart = None
-        self.iovend = None
-
-        self.folder = None
-        self.data = []
-        
-        # Initialize to default values
-        self.setChannel()
-        self.setTag()
-        self.setFolder(dbstr, folderstr)
-        self.setIOVRange()
-            
-    def setFolder(self, dbstr, folderstr):
-        # Force re-opening connection if these are different
-        if (dbstr != self.dbstr) or (folderstr != self.folderstr):
-            self.folder = None
-            
-        self.dbstr = dbstr
-        self.folderstr = folderstr
-
-    def setTag(self, tagstr=''):
-        self.tag = tagstr
-
-    def setChannel(self, channelIdList=[]):
-        self.channelIdList = channelIdList
-        
-    def setChannelAll(self):
-        self.setChannel()
-
-    def setChannelId(self, channelId):
-        self.setChannel([channelId])
-        
-    def setIOVRange(self, iovstart=cool.ValidityKeyMin, iovend=cool.ValidityKeyMax):
-        self.iovstart = iovstart
-        self.iovend = iovend
-
-    def setIOVRangeFromRun(self, runnum, startOfNextRun=False):
-        self.iovstart = runnum << 32
-        if startOfNextRun:
-            self.iovend = ((runnum+1) << 32)
-        else:
-            self.iovend = ((runnum+1) << 32) - 1
-
-    # Call to get data after all other parameters are properly set
-    # Data is returned as a list of IObject values, one per DB entry.
-    # This gives maximal flexibility to manipulate the items
-    def readData(self):
-
-        self.data = []
-
-        # Open the DB connection here if needed
-        if self.folder is None:
-            dbHandler = LumiDBHandler()
-            self.folder = dbHandler.getFolder(self.dbstr, self.folderstr)
-            
-            if self.folder is None:
-                print("Can't access DB", self.dbstr, 'folder', self.folderstr, '!')
-                return self.data
-
-        # Create the channel list
-        if len(self.channelIdList) == 0:
-            channels = cool.ChannelSelection.all()
-            self.readChannelList(channels)
-
-        else:
-            # Build the channel list here
-            self.channelIdList.sort()  # Must be sorted!
-
-            # Must read channels 50 at a time due to COOL limit...
-            ichan = 0
-            while (ichan < len(self.channelIdList)) :
-
-                jchan = 0
-                channels = None
-                firstChan = True
-            
-                for channelId in self.channelIdList[ichan:]:
-                    jchan += 1
-                    if firstChan:
-                        firstChan = False
-                        channels = cool.ChannelSelection(channelId)
-                    else:
-                        channels.addChannel(channelId)
-                    if jchan == 50: break 
-
-                # Remeber how many we have read for next time
-                if self.verbose:
-                    print('CoolDataReader.readData() - loaded %d channels from %d' % (jchan, ichan))
-                ichan += jchan
-
-                if self.verbose:
-                    print('CoolDataReader.readData() - browsing', self.iovstart, self.iovend, 'with channel', channels, 'and tag', self.tag)
-
-                self.readChannelList(channels)
-
-            # End of loop building channel list and reading
-
-        # End of if statement reading data
-        return self.data
-
-    def readChannelList(self, channels):
-
-        # Open iterator over our defined IOV range
-        try:
-            itr = self.folder.browseObjects(self.iovstart, self.iovend, channels, self.tag)
-        except Exception as e:
-            print('CoolDataReader.readData() - exception reading folder:', self.folderstr)
-            print(e)
-            print('CoolDataReader.readData() - will try to reconnect (once)')
-
-            # Force re-opening connection
-            dbHandler = LumiDBHandler()
-            dbHandler.verbose = True
-            self.folder = dbHandler.getFolder(self.dbstr, self.folderstr, force=True)
-            
-            if self.folder is None:
-                print('CoolDataReader.readData() - forced re-opening failed!')
-                return self.data
-
-            # OK, lets try reading this again
-            print('CoolDataReader.readData() - trying to re-read re-opened folder!')
-            try:
-                itr = self.folder.browseObjects(self.iovstart, self.iovend, channels, self.tag)
-            except Exception as e:
-                print('CoolDataReader.readData() - exception reading folder:', self.folderstr)
-                print(e)
-                return self.data
-                
-        while itr.goToNext():
-            obj = itr.currentRef()
-            # print obj.payload()
-            self.data.append(obj.clone())
-            
-        itr.close()
-
-# End of class CoolDataReader
-        
-
-def parse_arguments():
-
-    description = "Script to create LHC data database"
-    parser = argparse.ArgumentParser(description)
-
-    parser.add_argument("--verbose", "-v", action="store_true", 
-                        help="Print debugging information")
-
-    parser.add_argument("--fills", "-f", nargs='+', 
-                        help="Fills to find information on")
-
-    parser.add_argument("--recent", action="store_true",
-                        help="Update new fills not already in DB")
-
-    parser.add_argument("--output", "-o", default="fill_data.db", 
-                        help="Specify output DB")
-
-    parser.add_argument("--create", "-c", action="store_true", 
-                        help="Overwrite existing DB")
-
-    return parser.parse_args()
-
-
-# Take a string and turn it into a list of integers
-# Can specify single values, ranges, or comma separated lists of both
-# Can also specify file name with list of numbers
-def parseFillList(filllist):
-
-    fill_list = []
-
-    # Check if this is a file with fill numbers
-    if len(filllist) == 1:
-        path = Path(filllist[0])
-        if path.exists() and path.is_file():
-            print(f"Reading fills from {path}")
-            # Try reading each line as a fill number
-            with path.open() as f: 
-                for line in f.readlines():
-                    line = line.strip()
-                    if len(line) == 0: continue
-                    if line[0] in ['#', '!']: continue
-                    if not line.isnumeric():
-                        print(f"Error parsing {line}")
-                        continue
-                    fill_list.append(int(line))
-            # Done reading file
-            return(fill_list)
-        elif '-' in filllist[0]:
-            pass
-        elif ',' in filllist[0]:
-            pass
-        elif not filllist[0].isnumeric():
-            print(f"File {path} doesn't exist!")
-            return fill_list
-
-    for string in filllist:
-        tokens = string.split(',')
-
-        for segment in tokens:
-
-            if len(segment) == 0: continue
-
-            if '-' in segment:  # Range of fills
-                start, end = segment.split('-')
-                if not start.isnumeric():
-                    print(f"Found invalid fill {start}")
-                    continue
-                if not end.isnumeric():
-                    print(f"Found invalid fill {end}")
-                    continue
-                start = int(start)
-                end = int(end)
-                fill_list.extend(list(range(int(start), int(end)+1)))
-
-            else:
-                if not segment.isnumeric():
-                    print(f"Found invalid fill {segment}")
-                    continue
-                fill_list.append(int(segment))
-
-    return(fill_list)
-
-def getIOVDict(args, first_fill):
-
-    if args.verbose:
-        print(f"Searching for IOV for fill {first_fill}")
-
-    # Utility to read ATLAS DB
-    dbHandler = LumiDBHandler()
-    dbname = "COOLOFL_DCS/CONDBR2"
-    db = dbHandler.getFolder(dbname, "/LHC/DCS/FILLSTATE")
-    db.setPrefetchAll(False)
-
-    # Use channel selector to give us a reverse iterator
-    channel = cool.ChannelSelection(order=cool.ChannelSelection.channelBeforeSinceDesc)
-
-    # Limit how much of DB to read
-    iovstart = 1000000000 * timeVal("2022-06-01:00:00:00")
-    #iovstart = int(1E9) * int(calendar.timegm(time.strptime("2022-06-01", "%Y-%m-%d")))
-
-    if args.verbose:
-        print(f"Starting from {timeString(iovstart)}")
-
-    # Now step backwards reading until we find our first fill
-    itr = db.browseObjects(iovstart, cool.ValidityKeyMax, channel)
-
-    iov_dict = {}
-    last_fill = None
-    while itr.goToNext():
-        obj = itr.currentRef()
-        fill = obj.payloadValue('FillNumber')
-        # Skip any invalid values
-        if not fill.isnumeric():
-            print(f"Found {fill} for FillNumber, skipping!")
-            continue
-
-        # Replace with integer
-        fill = int(obj.payloadValue('FillNumber'))
-
-        if fill == 0:
-            print(f"Found FillNumber = {fill}, skipping!")
-            continue
-
-        # Lots of output...
-        #if args.verbose:
-        #    print(f"Fill {obj.payloadValue('FillNumber')} Since {timeString(obj.since())}")
-
-        # Have we gone far enough?
-        if fill < first_fill: break
-
-        # Check if we found a new fill
-        if not iov_dict.get(fill, None):
-
-            # Update previous fill
-            if last_fill:
-                iov = iov_dict[last_fill]
-                iov_dict[last_fill] = (obj.until(), iov[1])
-
-            last_fill = fill
-            iov_dict[fill] = (obj.since(), obj.until())
-
-        # Update fill range
-        iov = iov_dict[fill]
-        iov_dict[fill] = (obj.since(), iov[1])
-
-    # Done, print out what we found if desired
-    if args.verbose:
-        for fill in sorted(iov_dict.keys()):
-
-            #time_lo = iov_dict[fill][0] // int(1E9)
-            #time_hi = iov_dict[fill][1] // int(1E9)
-
-            #print(f"Fill {fill} from {datetime.datetime.fromtimestamp(time_lo)} to {datetime.datetime.fromtimestamp(time_hi)}")
-            print(f"Fill {fill} from {timeString(iov_dict[fill][0])} to {timeString(iov_dict[fill][1])}")
-
-    # Close our database here
-    dbHandler.closeDB(dbname)
-
-    return iov_dict
-
-
-class FillObject:
-    def __init__(self):
-        self.fillNumber = 0
-        self.machineMode = ''
-        self.beamType1 = 0
-        self.beamType2 = 0
-        self.nBeam1 = 0
-        self.nBeam2 = 0
-        self.nColl = 0
-        self.injScheme = ''
-        self.since = cool.ValidityKeyMin
-        self.until = cool.ValidityKeyMax
-
-        # List with tuple of IOV range and payload dict (since, until, payload)
-        self.iov_list = []
-
-        # List with tuple of IOV range and payload dict (since, until, payload)
-        self.bcid_list = []
-
-    def updateFill(self, obj):
-
-        payload = obj.payload()
-
-        # Set the fill number
-        if self.fillNumber == 0:
-            self.updateFillParams(payload)
-
-        # For bunches, use max
-        self.nBeam1 = max(self.nBeam1, payload["NumBunchBeam1"])
-        self.nBeam2 = max(self.nBeam2, payload["NumBunchBeam2"])
-        self.nColl  = max(self.nColl,  payload["NumBunchColl"])
-
-        self.updateIOVList(obj.since(), obj.until(), obj.payload())
-
-    def updateBCID(self, obj):
-
-        # Need to make sure we don't duplicate IOVs from FILLPARAMS
-        # Truncate records to fit into fill IOV range
-        if obj.since() < self.since: 
-            since = self.since
-        else:
-            since = obj.since()
-
-        if obj.until() > self.until:
-            until = self.until
-        else:
-            until = obj.until()
-
-        if since == until: # Could happen?
-            return
-
-        # Need to copy here?
-        #valdict = {}
-        #for key in ["Beam1Bunches", "Beam2Bunches", "LuminousBunches", "BCIDmasks"]:
-        #    valdict[key] = obj.payload()[key]
-
-        self.bcid_list.append((since, until, obj.payload()))
-
-    def updateIOVList(self, since, until, payload):
-
-        # We want a subset of the payload
-        valdict = {}
-        for key in ["BeamMode", "BetaStar", "CrossingAngle"]:
-            valdict[key] = payload[key]
-
-        # No list, append current value
-        if len(self.iov_list) == 0:
-            self.iov_list.append((since, until, valdict))
-
-        else:
-            # Extend existing IOV to start of this one
-            self.iov_list[-1] = (self.iov_list[-1][0], since, self.iov_list[-1][2])
-
-            # Add new IOV if value is different
-            # Check values separately, as we don't want to update betastar
-            # unless we are in stable beams
-            last_payload = self.iov_list[-1][2]
-            if valdict["BeamMode"] != last_payload["BeamMode"]:
-                self.iov_list.append((since, until, valdict))
-
-            elif valdict["CrossingAngle"] != last_payload["CrossingAngle"]:  
-                self.iov_list.append((since, until, valdict))
-
-            elif valdict["BeamMode"] == "STABLE BEAMS" and valdict["BetaStar"] != last_payload["BetaStar"]:
-                self.iov_list.append((since, until, valdict))
-
-    def updateFillParams(self, payload):
-        # Update things that shouldn't change
-        try:
-            self.fillNumber = int(payload['FillNumber'])
-        except Exception as e:
-            print(f'Error setting fill number from {payload["FillNumber"]}') 
-            print(e)
-
-        self.machineMode = payload['MachineMode']
-        self.beamType1 = payload['BeamType1']
-        self.beamType2 = payload['BeamType2']
-        self.injScheme = payload['InjectionScheme']
-
-    def setLast(self):
-        ''' Set the final entry in the self.iov_list to an open-ended IOV '''
-        if len(self.iov_list) == 0: return
-
-        self.iov_list[-1] = (self.iov_list[-1][0], cool.ValidityKeyMax, self.iov_list[-1][2])
-
-    def __str__(self):
-        return f'Fill: {self.fillNumber} Mode: {self.machineMode} B1/2: {self.beamType1}/{self.beamType2} Bunches B1/B2/Coll: {self.nBeam1}/{self.nBeam2}/{self.nColl} {self.injScheme}'
-
-def findRecentFills(args):
-    fill_list = []
-    print("findRecentFills not implemented!")
-    return fill_list
-
-#
-# Start execution here
-#
-args = parse_arguments()
-
-if args.verbose:
-    print(f"Updating fill {args.fills}")
-    print(f"Recent: {args.recent}")
-    print(f"Output: {args.output}")
-    print(f"Create: {args.create}")
-
-if args.recent:
-
-    if args.fills:
-        print("Can't specify --fills and --recent!")
-        sys.exit(1)
-
-    fill_list = findRecentFills(args)
-
-    if len(fill_list) == 0:
-        print("No new fills found!")
-        sys.exit(0) 
-
-else:
-    if not args.fills:
-        print("No fills specified!  Use --fills to provide fill numbers")
-        sys.exit(1)
-
-    fill_list = parseFillList(args.fills)
-    fill_list.sort()
-
-    if len(fill_list) == 0:
-        print("No fills specified!  Use --fills to provide fill numbers")
-        sys.exit(1)
-
-if args.verbose:
-    print(f"Fill list:\n{fill_list}")
-
-# To speed things up, lets find the IOV ranges for each fill in our fill list
-# Do this by reverse lookup in FILLSTATE
-iov_dict = getIOVDict(args, fill_list[0])
-
-# Open (or create) the database
-connectString = f'sqlite://;schema={args.output};dbname=CONDBR3'
-
-if args.verbose:
-    print(f"Opening DB using connection string {connectString}")
-
-if os.path.exists(args.output):
-    if args.create:
-        print(f"Deleting {args.output} due to --create")
-        os.remove(args.output)
-
-    else:
-        print(f"Output DB file {args.output} already exists!")
-        print(f"Writing in place, use --create to delete")
-
-else:
-    print(f"Creating new DB {args.output}")
-
-# Opens or creates as needed
-db = forceOpen(connectString)
-
-if not db:
-    print("Error with {connectString}")
-    sys.exit(1)
-
-# Create folders (use CoolConvUtilities function)
-description = athenaDesc(runLumi=False, datatype="AthenaAttributeList")
-if args.verbose:
-    print(f"Folder description: {description}")
-
-# Order matters here!
-lhc_spec = cool.RecordSpecification()
-lhc_spec.extend("FillNumber", cool.StorageType.Int32)
-lhc_spec.extend("MachineMode", cool.StorageType.String4k)
-lhc_spec.extend("InjectionScheme", cool.StorageType.String4k)
-lhc_spec.extend("BeamType1", cool.StorageType.Int32)
-lhc_spec.extend("BeamType2", cool.StorageType.Int32)
-lhc_spec.extend("NumBunchBeam1", cool.StorageType.UInt32)
-lhc_spec.extend("NumBunchBeam2", cool.StorageType.UInt32)
-lhc_spec.extend("NumBunchColl", cool.StorageType.UInt32)
-
-beam_spec = cool.RecordSpecification()
-beam_spec.extend("BeamMode", cool.StorageType.String4k)
-beam_spec.extend("BetaStar", cool.StorageType.Float)
-beam_spec.extend("CrossingAngle", cool.StorageType.Float)
-
-bcid_spec = cool.RecordSpecification()
-bcid_spec.extend("Beam1Bunches", cool.StorageType.UInt32)
-bcid_spec.extend("Beam2Bunches", cool.StorageType.UInt32)
-bcid_spec.extend("LuminousBunches", cool.StorageType.UInt32)
-bcid_spec.extend("BCIDmasks", cool.StorageType.Blob64k)
-
-# Ensure folder opens or creates as needed
-# Create storage buffer for writing
-try:
-    lhc_folder  = ensureFolder(db, "/LHC/FillData", lhc_spec, description)
-    beam_folder = ensureFolder(db, "/LHC/BeamData", beam_spec, description)
-    bcid_folder = ensureFolder(db, "/LHC/BCIDData", bcid_spec, description)
-    lhc_folder.setupStorageBuffer()
-    beam_folder.setupStorageBuffer()
-    bcid_folder.setupStorageBuffer()
-
-except Exception as e:
-    print("Could not access or create folders!")
-    print(e)
-    sys.exit(1)
-
-db_lhc  = CoolDataReader("COOLOFL_DCS/CONDBR2", "/LHC/DCS/FILLSTATE")
-db_bcid = CoolDataReader("COOLONL_TDAQ/CONDBR2", "/TDAQ/OLC/LHC/FILLPARAMS")
-
-for fill in fill_list:
-
-    if not iov_dict.get(fill, None):  # Should have just found this
-        print(f"Can't find fill {fill} in IOV dictionary!")
-        sys.exit(1)
-
-    time_lo = iov_dict[fill][0]
-    time_hi = iov_dict[fill][1]
-    if args.verbose:
-        print(f"Working on fill {fill} from {timeString(time_lo)} to {timeString(time_hi)}")
-
-    fill_obj = FillObject()
-    fill_obj.since = time_lo
-    fill_obj.until = time_hi
-
-    db_lhc.setIOVRange(time_lo, time_hi)
-    db_lhc.readData()
-
-    for obj in db_lhc.data:
-        fill_obj.updateFill(obj)
-
-    # Is this the last fill?
-    if fill == fill_list[-1]:
-        fill_obj.setLast()
-
-    db_bcid.setIOVRange(time_lo, time_hi)
-    db_bcid.readData()
-
-    for obj in db_bcid.data:
-        fill_obj.updateBCID(obj)
-
-    # Now we want to fill our folders
-    lhc_record = cool.Record(lhc_spec)
-    lhc_record["FillNumber"] = fill_obj.fillNumber
-    lhc_record["MachineMode"] = fill_obj.machineMode
-    lhc_record["InjectionScheme"] = fill_obj.injScheme
-    lhc_record["BeamType1"] = fill_obj.beamType1
-    lhc_record["BeamType2"] = fill_obj.beamType2
-    lhc_record["NumBunchBeam1"] = fill_obj.nBeam1
-    lhc_record["NumBunchBeam2"] = fill_obj.nBeam2
-    lhc_record["NumBunchColl"] = fill_obj.nColl
-
-    if args.verbose: 
-        print(f"Writing fill {fill_obj.fillNumber}")
-        print(fill_obj)
-
-    chan = 0 # No channels here, but need to pass dummy argument
-    lhc_folder.storeObject(fill_obj.since, fill_obj.until, lhc_record, chan)
-
-    if args.verbose: print("Writing beam folder:")
-
-    for since, until, payload in fill_obj.iov_list:
-        beam_record = cool.Record(beam_spec)
-        for key in payload:
-            beam_record[key] = payload[key]
-
-        if args.verbose:
-            print(f"{timeString(since)} - {timeString(until)}: {beam_record}")
-
-        beam_folder.storeObject(since, until, beam_record, chan)
-
-    for since, until, payload in fill_obj.bcid_list:
-        bcid_record = cool.Record(bcid_spec)
-        for key in payload:
-            bcid_record[key] = payload[key]
-
-        if args.verbose:
-            print(f"{timeString(since)} - {timeString(until)}: {bcid_record}")
-
-        bcid_folder.storeObject(since, until, bcid_record, chan)
-
-# Make sure everything is writen
-lhc_folder.flushStorageBuffer()
-beam_folder.flushStorageBuffer()
-bcid_folder.flushStorageBuffer()
-
-# End of loop over fills
-
-db.closeDatabase()