diff --git a/functions/TFileCache.py b/functions/TFileCache.py
new file mode 100644
index 0000000000000000000000000000000000000000..741be3cc64d515f843955fb16459af95a0b9fcdf
--- /dev/null
+++ b/functions/TFileCache.py
@@ -0,0 +1,88 @@
+import os
+import shutil 
+
+"""
+TFileCache( pathToFile , tFileModality = "READ")
+
+This is a helper file for when running ROOT from a linux container on a Windows machine
+There is some overhead when acessing files from wihtin the container that are stored in the Windows file system.
+
+This overhad can be meaningfull when processing larger files. 
+As a workaround TFileCache copies the file to a folder in the linux file system, before opening the TFile.
+
+"""
+
+
+def copyToTemp( inputFile, targetDir = os.path.join(os.path.expanduser("~"), "temp") ):
+
+    
+
+    if not os.path.exists(targetDir): os.mkdir(targetDir)
+
+    fileName = inputFile.split("/")[-1]
+
+    targetPath = os.path.join(targetDir,fileName)
+
+    dest = shutil.copyfile(inputFile, targetPath) 
+
+    return targetPath
+
+
+def TFileCache( pathToFile , tFileModality = "READ"):
+
+    import ROOT
+
+    cachedFile =  copyToTemp( pathToFile) # copy the file to the to a temporary linux file system, to speed up access when using a container
+
+    return ROOT.TFile(cachedFile, tFileModality)
+
+
+
+if __name__ == '__main__':
+
+
+    def generate_big_random_letters(filename,size):
+        """
+        generate big random letters/alphabets to a file
+        :param filename: the filename
+        :param size: the size in bytes
+        :return: void
+        """
+        import random
+        import string
+
+        chars = ''.join([random.choice(string.letters) for i in range(size)]) #1
+
+
+        with open(filename, 'w') as f:   f.write(chars)
+        return None
+
+
+
+    referenceFileName = "test.txt"
+
+    generate_big_random_letters(referenceFileName,100)
+
+    cachedFileName = copyToTemp( referenceFileName )
+
+
+    referenceFile = open(referenceFileName, "r")
+    referenceOutput = referenceFile.read() 
+
+    os.remove(referenceFileName)
+
+
+
+
+    cachedFile = open(cachedFileName, "r")
+
+    cachedFileOutput = cachedFile.read() 
+
+    os.remove(cachedFileName)
+
+    assert  referenceOutput == cachedFileOutput
+
+    print( "TFileCache / copyToTemp passed test!")
+
+
+
diff --git a/plotPostProcess.py b/plotPostProcess.py
index 0013fab59550c909282c90e0de8857cb755aa18e..3808eb5fcf2440ff0530dbcc6b599442c812d09c 100644
--- a/plotPostProcess.py
+++ b/plotPostProcess.py
@@ -23,8 +23,11 @@ import numpy as np # good ol' numpy
 import os
 import collections # so we can use collections.defaultdict to more easily construct nested dicts on the fly
 import resource # print 'Memory usage: %s (kb)' % resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
+import time # for measuring execution time
+import limitSetting.limitFunctions.reportMemUsage as reportMemUsage
 
 import functions.histNumpyTools as histNumpyTools # to convert ROOT.TH1 histograms to numpy arrays
+from functions.TFileCache import TFileCache
 
 import makeReducibleShapes.makeReducibleShapes as makeReducibleShapes
 
@@ -827,6 +830,11 @@ if __name__ == '__main__':
     parser.add_argument( "--skipReducible", default=False, action='store_true' , 
     help = "If run with '--skipReducible' we will not include any 'reducible' MC in the plots " ) 
 
+    parser.add_argument( "--cacheForDockerOnWSL", default=False, action='store_true' , 
+    help = "Use when opening larger files, while operating in a docker container, on a Windows machine" ) 
+
+    startTime = time.time() 
+
     args = parser.parse_args()
 
     skipZX = True
@@ -885,7 +893,8 @@ if __name__ == '__main__':
     mainBackgrounds.extend( myDSIDHelper.analysisMapping["H4l"])
     mainBackgrounds.extend( myDSIDHelper.analysisMapping["ZZ"])
 
-    postProcessedData = ROOT.TFile(args.input,"READ"); # open the file with te data from the ZdZdPostProcessing
+    if args.cacheForDockerOnWSL : postProcessedData = TFileCache(args.input,"READ"); # open the file with te data from the ZdZdPostProcessing
+    else:                         postProcessedData = ROOT.TFile(args.input,"READ"); # open the file with te data from the ZdZdPostProcessing
 
     myDSIDHelper.fillSumOfEventWeightsDict(postProcessedData)
 
@@ -943,7 +952,7 @@ if __name__ == '__main__':
 
         plotTitle = idPlotTitle(path, myDSIDHelper, DSID=DSID)
 
-        systematicChannel = path.split("/")[2]
+        systematicChannel = path.split("/")[-2]
 
         # skip the not 'nominal' histograms, unless we are doing systematic plots
         if not args.makeSystematicsPlots  and "Nominal" != systematicChannel: continue
@@ -1327,6 +1336,8 @@ if __name__ == '__main__':
 
     printSubsetOfHists( canvasList, searchStrings=["M34","M4l"], outputDir = "supportnoteFigs")
 
+    reportMemUsage.reportMemUsage(startTime)
+
     print("All plots processed!")
     #import pdb; pdb.set_trace() # import the debugger and instruct it to stop here