diff --git a/InnerDetector/InDetExample/InDetAlignExample/CMakeLists.txt b/InnerDetector/InDetExample/InDetAlignExample/CMakeLists.txt
index 0ea8406d7061d981a356d229ab5691b2f2381844..c133a1a53a84e1023619ade11ecce5fad602d775 100644
--- a/InnerDetector/InDetExample/InDetAlignExample/CMakeLists.txt
+++ b/InnerDetector/InDetExample/InDetAlignExample/CMakeLists.txt
@@ -1,13 +1,11 @@
-################################################################################
-# Package: InDetAlignExample
-################################################################################
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 
 # Declare the package name:
 atlas_subdir( InDetAlignExample )
 
 # Install files from the package:
-atlas_install_python_modules( python/*.py )
+atlas_install_python_modules( python/*.py POST_BUILD_CMD ${ATLAS_FLAKE8} )
 atlas_install_joboptions( share/*.py share/InDetAlignExampleFlags/*.py share/Data/*.txt )
-atlas_install_runtime( test/InDetAlignExample_TestConfiguration.xml macros/*.cc html/*.html )
+atlas_install_runtime( macros/*.cc html/*.html )
 atlas_install_scripts( share/RunIterator_Run2Rel19.py )
 
diff --git a/InnerDetector/InDetExample/InDetAlignExample/python/IDAlignGridDatasets.py b/InnerDetector/InDetExample/InDetAlignExample/python/IDAlignGridDatasets.py
index 0fb0488194beab57db1550508b21d71a2fb724ae..3615f9183d49890b699cb3ef30bdb7b945bda104 100644
--- a/InnerDetector/InDetExample/InDetAlignExample/python/IDAlignGridDatasets.py
+++ b/InnerDetector/InDetExample/InDetAlignExample/python/IDAlignGridDatasets.py
@@ -6,13 +6,8 @@
 #    Authors: Jike Wang      (jike.wang@cern.ch)
 #################################################################
 
-from __future__ import print_function
-
-import os, types
+import os
 import sys
-
-from future import standard_library
-standard_library.install_aliases()
 import subprocess
 
 
@@ -105,14 +100,14 @@ class ConfiguredIDAlignDatasets:
 		if ("mc09" == self.__datasetType or "MC09" == self.__datasetType) :
 			oneDatasetName = "mc09_valid.107271.Multimuons_pt9.recon.ESD.e436_s561_r731"
 			return oneDatasetName
-	 
+
 		elif self.containType("Customed") and "Customed" == topology :
 			oneDatasetName = self.__DatasetsOptions["CustomedDatasetsNameList"][0]
 
 		elif ("Collision" == topology) :  
 			if self.stream() == "MinBias" and self.containType("900GeV") :
 				oneDatasetName = "data09_900GeV.%08d.physics_MinBias.recon.ESD.%s" % ( int(self.__DatasetsOptions["CollisionRunList"][0]), self.__DatasetsOptions["CollisionRecoTag"][0] )
-									                                  
+
 			if self.stream() == "MinBias" and self.containType("7TeV"):
 				oneDatasetName = "data10_7TeV.%08d.physics_MinBias.recon.ESD.%s"   % ( int(self.__DatasetsOptions["CollisionRunList"][0]), self.__DatasetsOptions["CollisionRecoTag"][0] )
 
@@ -244,8 +239,6 @@ class ConfiguredIDAlignDatasets:
 				namesList.append(datasetName)
 			str = ",".join(namesList)
 			return str
-                #elif("CosmicBon" == topology)
-
 
 
 	def recoScript(self, topology = "", i = 0) :
@@ -257,8 +250,8 @@ class ConfiguredIDAlignDatasets:
 
 			#elif "mc" in self.__DatasetsOptions["CustomedDatasetsNameList"][i] :
 			#	recoScript = "InDetAlignExample/loadInDetRec.py"                               
-	     	         
-			else:                           
+
+			else:
 				recoScript = "InDetAlignExample/loadInDetRec_new.py"
 
 			return recoScript 
diff --git a/InnerDetector/InDetExample/InDetAlignExample/python/InDetAlignExample_IteratorClasses.py b/InnerDetector/InDetExample/InDetAlignExample/python/InDetAlignExample_IteratorClasses.py
index 92705175ff3deb49c4dd3e171a859d69ad4db2eb..71d109bfb4642fde8c01fca8e5119274b46c37cc 100644
--- a/InnerDetector/InDetExample/InDetAlignExample/python/InDetAlignExample_IteratorClasses.py
+++ b/InnerDetector/InDetExample/InDetAlignExample/python/InDetAlignExample_IteratorClasses.py
@@ -5,7 +5,6 @@
 #  Class for local processing
 #
 # =====================================================================
-from __future__ import print_function
 
 from threading import Thread
 import os
@@ -43,87 +42,85 @@ class runProcess(Thread):
                 self.status=1
                 break
             
-import os
 class SortCpus:
-	def __init__(self, TOTALCPUS, LOCALDIR, FILELIST, OutputLevel):
-		def sort_by_value(d):
-			""" Returns the keys of dictionary d sorted by their values """
-			items=d.items()
-			backitems=[ [v[1],v[0]] for v in items]
-			backitems.sort()
-			backitems.reverse()	
-			return [ backitems[i][1] for i in range(0,len(backitems))]		
-		self.OutputLevel = OutputLevel
-		inputfiles = open(FILELIST, "r")
-		filelist = inputfiles.read().split()
-		inputfiles.close()
-		if not LOCALDIR:
-			print ("Reading Custom File")
-			FinalListSorted = []
-			for line in filelist:
-				if line and line[0] != '#':
-					FinalListSorted.append(line)
-			print (FinalListSorted)
-			
-		elif "castor" in LOCALDIR:
-			print ("Reading castor directory. Please wait...")
-			extendedFileList = os.popen("rfdir "+ LOCALDIR[7:]).read().splitlines()
-		else:
-			print ("Reading directory. Please wait...")
-			extendedFileList = os.popen("ls -l "+ LOCALDIR).read().splitlines()
+        def __init__(self, TOTALCPUS, LOCALDIR, FILELIST, OutputLevel):
+                def sort_by_value(d):
+                        """ Returns the keys of dictionary d sorted by their values """
+                        items=d.items()
+                        backitems=[ [v[1],v[0]] for v in items]
+                        backitems.sort()
+                        backitems.reverse()
+                        return [ backitems[i][1] for i in range(0,len(backitems))]
+                self.OutputLevel = OutputLevel
+                inputfiles = open(FILELIST, "r")
+                filelist = inputfiles.read().split()
+                inputfiles.close()
+                if not LOCALDIR:
+                        print ("Reading Custom File")
+                        FinalListSorted = []
+                        for line in filelist:
+                                if line and line[0] != '#':
+                                        FinalListSorted.append(line)
+                        print (FinalListSorted)
+
+                elif "castor" in LOCALDIR:
+                        print ("Reading castor directory. Please wait...")
+                        extendedFileList = os.popen("rfdir "+ LOCALDIR[7:]).read().splitlines()
+                else:
+                        print ("Reading directory. Please wait...")
+                        extendedFileList = os.popen("ls -l "+ LOCALDIR).read().splitlines()
 
-		if LOCALDIR:
-			i = 0
-			SizeList = {}
-			for line in extendedFileList:
-				curr = line.split()
-				SizeList[i] = {}
-				SizeList[i][0] = curr[8]
-				SizeList[i][1] = curr[4]
-				i = i+1
-			FinalList = {}
-			count = 0
-			for i in range(0,len(SizeList)):
-				if SizeList[i][0] in filelist:
-					#print (SizeList[i][0], " size:", SizeList[i][1])
-					FinalList[SizeList[i][0]] = int(SizeList[i][1])
+                if LOCALDIR:
+                        i = 0
+                        SizeList = {}
+                        for line in extendedFileList:
+                                curr = line.split()
+                                SizeList[i] = {}
+                                SizeList[i][0] = curr[8]
+                                SizeList[i][1] = curr[4]
+                                i = i+1
+                        FinalList = {}
+                        for i in range(0,len(SizeList)):
+                                if SizeList[i][0] in filelist:
+                                        #print (SizeList[i][0], " size:", SizeList[i][1])
+                                        FinalList[SizeList[i][0]] = int(SizeList[i][1])
 
-			#SizeListSorted = [ (k,SizeList[k]) for k in sorted(SizeList.values())] 
-			FinalListSorted = sort_by_value(FinalList)
-			#print ("Sorted list" )
-			#for i in range(0,len(FinalListSorted)):
-			#	print (FinalListSorted[i], "\tsize:\t", FinalList[FinalListSorted[i]])
-		currCPU = 0
-		reverse = False
-		self.CPUsFiles = {}
-		for i in range(0,len(FinalListSorted)):
-			#print (FinalListSorted[i], "CPU: ", currCPU)
-			if currCPU in self.CPUsFiles:
-				self.CPUsFiles[currCPU].append(LOCALDIR+FinalListSorted[i])
-			else:
-				self.CPUsFiles[currCPU] = [LOCALDIR+FinalListSorted[i]]
-			if(not reverse):
-				currCPU = currCPU + 1
-				if(currCPU == TOTALCPUS):
-					#currCPU = currCPU - 1
-					reverse = not reverse
-			if(reverse):
-				currCPU = currCPU - 1
-				if(currCPU == -1):
-					currCPU = currCPU + 1
-					reverse = not reverse
+                        #SizeListSorted = [ (k,SizeList[k]) for k in sorted(SizeList.values())]
+                        FinalListSorted = sort_by_value(FinalList)
+                        #print ("Sorted list" )
+                        #for i in range(0,len(FinalListSorted)):
+                        #       print (FinalListSorted[i], "\tsize:\t", FinalList[FinalListSorted[i]])
+                currCPU = 0
+                reverse = False
+                self.CPUsFiles = {}
+                for i in range(0,len(FinalListSorted)):
+                        #print (FinalListSorted[i], "CPU: ", currCPU)
+                        if currCPU in self.CPUsFiles:
+                                self.CPUsFiles[currCPU].append(LOCALDIR+FinalListSorted[i])
+                        else:
+                                self.CPUsFiles[currCPU] = [LOCALDIR+FinalListSorted[i]]
+                        if(not reverse):
+                                currCPU = currCPU + 1
+                                if(currCPU == TOTALCPUS):
+                                        #currCPU = currCPU - 1
+                                        reverse = not reverse
+                        if(reverse):
+                                currCPU = currCPU - 1
+                                if(currCPU == -1):
+                                        currCPU = currCPU + 1
+                                        reverse = not reverse
                     
                                         
-	def getCPU(self,CURRENTCPU):
-		if self.OutputLevel=='DEBUG':
-			print ("|",40*"-"," CPU #: ", CURRENTCPU, 40*"-", "|")
-			for line in self.CPUsFiles[CURRENTCPU]:
-				print ("|  - ",line)
-			print ("|",93*"-","|")
-		return self.CPUsFiles[CURRENTCPU]
+        def getCPU(self,CURRENTCPU):
+                if self.OutputLevel=='DEBUG':
+                        print ("|",40*"-"," CPU #: ", CURRENTCPU, 40*"-", "|")
+                        for line in self.CPUsFiles[CURRENTCPU]:
+                                print ("|  - ",line)
+                        print ("|",93*"-","|")
+                return self.CPUsFiles[CURRENTCPU]
            
                 
-		
+
 class writeJob:
     def __init__(self,
                  OutputPath,
@@ -196,7 +193,7 @@ class writeJob:
         topOptions=open(topOptionFileName,'r')
             
         #job=open(TempPath+"/"+self.JOBNAME,'w')
-        job=open(self.JOBNAME,'w')	
+        job=open(self.JOBNAME,'w')
         job.write(topOptions.readline())
         job.write(topOptions.readline())
         job.write("\n")
@@ -315,7 +312,7 @@ class writeScript:
         script.write("source %s/../InnerDetector/InDetExample/InDetAlignExample/cmt/setup.sh \n" % self.CMTDIR)
 
 #        script.write("source %s/../%s/InnerDetector/InDetExample/InDetAlignExample/cmt/setup.sh \n" % (self.CMTDIR,self.ATHENAREL))
-		
+
 #        script.write("cd %s \n" % temppath)
         script.write("cd %s \n" % self.RUNPATH)
 
@@ -375,7 +372,6 @@ class collectRAmodules:
         print ("------------------------------------------")
         print ("  Collecting Iter%d RA module files" % self.i)
         print ("------------------------------------------")
-        HOME = os.environ['HOME']
         os.chdir("%s/Iter%d" % (self.OutputPath,self.i))
         os.mkdir("moduleRA")
         
@@ -452,7 +448,6 @@ class mergeMatrix:
         print ("------------------------------------------")
         print ("  Merging Iter%d GX2 Matrices" % self.i)
         print ("------------------------------------------")
-        HOME = os.environ['HOME']
         if os.environ['HOSTNAME'] != 'tst01.ific.uv.es':
             os.chdir(self.addbigPath)
             print (self.addbigPath)
@@ -598,7 +593,6 @@ class COG:
                     ATHENAREL,
                     TAGS,
                     RUNPATH):
-        TempPath="%s/Iter%d/" % (self.OutputPath, self.iter)
         script=open(self.SCRIPTNAME,'w')
         script.write("#BSUB -J %s_Iter%dCog \n" % (self.preName,self.iter))         
         script.write("#BSUB -o %s/Iter%d/logs/Iter%dCog.log \n" % (self.OutputPath,self.iter,self.iter))
diff --git a/InnerDetector/InDetExample/InDetAlignExample/python/InDetAlignJobRunner.py b/InnerDetector/InDetExample/InDetAlignExample/python/InDetAlignJobRunner.py
index a8355f37075d720267adf5667a177581e4d5a011..1fadf029526ffc9139835386e1a2ca16575c35fb 100644
--- a/InnerDetector/InDetExample/InDetAlignExample/python/InDetAlignJobRunner.py
+++ b/InnerDetector/InDetExample/InDetAlignExample/python/InDetAlignJobRunner.py
@@ -2,15 +2,10 @@
 
 # Written by Juerg Beringer in April 2008.
 
-from __future__ import print_function
-
 import math
 import os
 import socket
 import time
-
-from future import standard_library
-standard_library.install_aliases()
 import subprocess
 
 
@@ -161,7 +156,7 @@ class InDetAlignJobRunner:
 
     def submitBackground(self,jobnr):
         """Execute a configured job in the background"""
-        if not jobnr in self.jobs: raise InDetAlignJobRunnerError ('Job number %s is not yet configured' % jobnr)
+        if jobnr not in self.jobs: raise InDetAlignJobRunnerError ('Job number %s is not yet configured' % jobnr)
         scriptfile = self.jobs[jobnr]['scriptfile']
         logfile = self.jobs[jobnr]['logfile']
         os.system(scriptfile+' >& '+logfile+' &')
@@ -169,7 +164,7 @@ class InDetAlignJobRunner:
 
     def submitLSF(self,jobnr):
         """Execute a configured job as a LSF batch job"""
-        if not jobnr in self.jobs: raise InDetAlignJobRunnerError ('Job number %s is not yet configured' % jobnr)
+        if jobnr not in self.jobs: raise InDetAlignJobRunnerError ('Job number %s is not yet configured' % jobnr)
         batchCmd = 'bsub -q %(batchQueue)s -J %(jobname)s -o %(logfile)s %(scriptfile)s' % self.jobs[jobnr]
         print (batchCmd)
         os.system(batchCmd)
@@ -178,7 +173,7 @@ class InDetAlignJobRunner:
     def run(self):
         """Run all jobs either in batch or in the background, as specified by option batchType."""
         batchType = self.options['batchType']    # Currently the same for all jobs
-        if not batchType in ('LSF','background','configureOnly'): raise InDetAlignJobRunnerError ('Cannot run job type %s' % type)
+        if batchType not in ('LSF','background','configureOnly'): raise InDetAlignJobRunnerError ('Cannot run job type %s' % type)
         filesPerJob = self.options['filesPerJob']
         njobs = int(math.ceil(float(len(self.inputfiles))/filesPerJob))
         self.options['njobs'] = njobs
diff --git a/InnerDetector/InDetExample/InDetAlignExample/python/IteratorGridClasses.py b/InnerDetector/InDetExample/InDetAlignExample/python/IteratorGridClasses.py
index 3971c62b2ddc233c4d059b5e1a44bebddd24f71d..51ed2f0cf71ceecd88c44d5bf70e845509654b3a 100644
--- a/InnerDetector/InDetExample/InDetAlignExample/python/IteratorGridClasses.py
+++ b/InnerDetector/InDetExample/InDetAlignExample/python/IteratorGridClasses.py
@@ -6,13 +6,9 @@
 #             Song-Ming Wang (smwang@phys.sinica.edu.tw)
 # =====================================================================
 
-from __future__ import print_function
-
-
 from threading import Thread
 import os
-import time,datetime
-import string
+import time
 import sys
 
 from future import standard_library
@@ -21,157 +17,156 @@ import subprocess
 
 
 class runProcess(Thread):
-	def __init__ (self,
-		      jobOptions,
-		      iter,
-		      part,
-		      OutputPath,
-		      OutputLevel):
-		Thread.__init__(self)
-		self.jobOptions = jobOptions
-		self.i = iter
-		self.j = part
-		self.OutputPath = OutputPath
-		self.OutputLevel = OutputLevel
-		self.status = -1
-
-	def run(self):
-		if self.j == -1:
-			print ("----------------------------------------------")
-			print ("  Running Iter%d - Solve in local machine" % (self.i))
-			print ("----------------------------------------------")
-			process=os.popen("athena.py %s | tee %s/Iter%02d/logs/Iter%02dSolve.log" % (self.jobOptions, self.OutputPath, self.i, self.i))
-		else:
-			print ("----------------------------------------------")
-			print ("  Running Iter%d - Part%02d in local machine" % (self.i, self.j))
-			print ("----------------------------------------------")
-			print (" - output path: %s/Iter%02d/%02d" % (self.OutputPath, self.i, self.j))
-			process=os.popen("athena.py %s | tee %s/Iter%02d/logs/Iter%02dPart%02d.log" % (self.jobOptions, self.OutputPath, self.i, self.i ,self.j))
-
-		print ("Running...")
-		while 1:
-			line = process.readline()
-			if self.OutputLevel == 'DEBUG':
-				print (line, end='')
-			if not line:
-				self.status=1
-				break
-			
+        def __init__ (self,
+                      jobOptions,
+                      iter,
+                      part,
+                      OutputPath,
+                      OutputLevel):
+                Thread.__init__(self)
+                self.jobOptions = jobOptions
+                self.i = iter
+                self.j = part
+                self.OutputPath = OutputPath
+                self.OutputLevel = OutputLevel
+                self.status = -1
+
+        def run(self):
+                if self.j == -1:
+                        print ("----------------------------------------------")
+                        print ("  Running Iter%d - Solve in local machine" % (self.i))
+                        print ("----------------------------------------------")
+                        process=os.popen("athena.py %s | tee %s/Iter%02d/logs/Iter%02dSolve.log" % (self.jobOptions, self.OutputPath, self.i, self.i))
+                else:
+                        print ("----------------------------------------------")
+                        print ("  Running Iter%d - Part%02d in local machine" % (self.i, self.j))
+                        print ("----------------------------------------------")
+                        print (" - output path: %s/Iter%02d/%02d" % (self.OutputPath, self.i, self.j))
+                        process=os.popen("athena.py %s | tee %s/Iter%02d/logs/Iter%02dPart%02d.log" % (self.jobOptions, self.OutputPath, self.i, self.i ,self.j))
+
+                print ("Running...")
+                while 1:
+                        line = process.readline()
+                        if self.OutputLevel == 'DEBUG':
+                                print (line, end='')
+                        if not line:
+                                self.status=1
+                                break
+
 
 
 
 def prepareForThisIter(iteration, GridFileOptions):
 
-	GridFileOptions["GridAccSubJobID"]  = 'GridAccSubJobID_%02d.txt'  % (iteration)
-	GridFileOptions["GridAccSubInfo"]   = 'GridAccSubInfo_%02d.txt'   % (iteration)
-	GridFileOptions["GridAccOutDS"]     = 'GridAccOutDS_%02d.txt'     % (iteration)
-	GridFileOptions["GridSolvingOutDS"] = 'GridSolvingOutDS_%02d.txt' % (iteration)
+        GridFileOptions["GridAccSubJobID"]  = 'GridAccSubJobID_%02d.txt'  % (iteration)
+        GridFileOptions["GridAccSubInfo"]   = 'GridAccSubInfo_%02d.txt'   % (iteration)
+        GridFileOptions["GridAccOutDS"]     = 'GridAccOutDS_%02d.txt'     % (iteration)
+        GridFileOptions["GridSolvingOutDS"] = 'GridSolvingOutDS_%02d.txt' % (iteration)
+
 
 
-			
 class writeJobGrid:
-	def __init__(self,
-		     OutputPath,
-		     CosmicsBoff,
-		     CosmicsBon,
-		     iter,
-		     part,
-		     JOBNAME,
-		     RecoOptions      = {},
-		     extraOptions     = {},
-		     AlignmentOptions = {},
-		     RecoScript       = "InDetAlignExample/NewTopOptions.py",
-		     #AlignmentScript  = "InDetAlignExample/NewInDetAlignAlgSetup.py",
-		     AlignmentScript  = "NewInDetAlignAlgSetup.py",
-		     AlignmentLevels  = "InDetAlignExample/NewInDetAlignLevels.py",
-		     MonitoringScript = "InDetRecExample/InDetMonitoringAlignment.py"
-		     ):
-		self.OutputPath       = OutputPath
-		self.CosmicsBoff      = CosmicsBoff
-		self.CosmicsBon       = CosmicsBon
-		self.i                = iter
-		self.part             = part
-		self.JOBNAME          = JOBNAME
-		self.RecoOptions      = RecoOptions
-		self.extraOptions     = extraOptions
-		self.RecoScript       = RecoScript
-		self.AlignmentOptions = AlignmentOptions
-		self.AlignmentScript  = AlignmentScript
-		self.AlignmentLevels  = AlignmentLevels
-		self.MonitoringScript = MonitoringScript
-		
-	def write(self):
-		PrefixName="Iter%02d_" % self.i
-		if self.part == "Solve":
-			TempPath         = "%s/Iter%02d/"                 % (self.OutputPath, self.i)
-		else:
-			if self.CosmicsBoff:
-				TempPath = "%s/Iter%02d/CosmicsBoff/" % (self.OutputPath, self.i)
-			elif self.CosmicsBon:
-				TempPath = "%s/Iter%02d/CosmicsBon/"   % (self.OutputPath, self.i)
-			else:
-				TempPath = "%s/Iter%02d/Collision/"       % (self.OutputPath, self.i)
-
-		os.system('mkdir -p %s' % TempPath)
-		os.system('mkdir -p %s' % TempPath+"/InDetRecExample")
-		if self.MonitoringScript != "":
-			os.system('ln -s %s %s' % (self.MonitoringScript,TempPath+"/InDetRecExample/InDetMonitoringAlignment.py") )
-
-		# Make Monitoring script dir and file here
-
-		#job=open(TempPath+self.JOBNAME,'w')
-		job=open(self.JOBNAME,'w')
-		job.write('##-------- Alignment Configuration --------------------\n')
-
-		for option in self.AlignmentOptions:
-			if type(self.AlignmentOptions[option]) is str:
-				customoption = option + "\t=\t\"" +  self.AlignmentOptions[option]+'\"\n'
-				job.write(customoption)
-			else:
-				customoption = option + "\t=\t"   +  str(self.AlignmentOptions[option])+'\n'
-				job.write(customoption)
-			
-		job.write("\n")
-		job.write('##-------- Loading the Alignment Levels --------------------\n')
-		job.write('include("'+str(self.AlignmentLevels)+'") \n')
-		job.write("\n")
-		job.write('##-------- Reconstruction Configuration --------------------\n')
-		
-		for option in self.RecoOptions:
-			if type(self.RecoOptions[option]) is str:
-				customoption = option + "\t=\t\"" +  self.RecoOptions[option]+'\"\n'
-				job.write(customoption)
-			else:
-				customoption = option + "\t=\t"   +  str(self.RecoOptions[option])+'\n'
-				job.write(customoption)
-
-		if len(self.extraOptions):
-			job.write("\n")
-			job.write('##-------- Extra Configuration --------------------\n')
-		
-		for option in self.extraOptions:
-			if type(self.extraOptions[option]) is str:
-				customoption = option + "\t=\t\"" +  self.extraOptions[option]+'\"\n'
-				job.write(customoption)
-			else:
-				customoption = option + "\t=\t"   +  str(self.extraOptions[option])+'\n'
-				job.write(customoption)
-
-		job.write("\n")
-		job.write('##-------- End of custom options -------------\n')
-
-		# Need to write the InDetRec Options Here:
-		job.write("\n")
-		job.write('##-------- Load Reconstruction or purely build geometry ---------------\n')
-		job.write('include("'+str(self.RecoScript)+'") \n')
-
-		job.write("\n")
-		job.write('##-------- Load Alignment --------------------\n')
-		job.write('include("'+str(self.AlignmentScript)+'") \n')
-		job.write("\n")
-		job.close()
-
-	
+        def __init__(self,
+                     OutputPath,
+                     CosmicsBoff,
+                     CosmicsBon,
+                     iter,
+                     part,
+                     JOBNAME,
+                     RecoOptions      = {},
+                     extraOptions     = {},
+                     AlignmentOptions = {},
+                     RecoScript       = "InDetAlignExample/NewTopOptions.py",
+                     #AlignmentScript  = "InDetAlignExample/NewInDetAlignAlgSetup.py",
+                     AlignmentScript  = "NewInDetAlignAlgSetup.py",
+                     AlignmentLevels  = "InDetAlignExample/NewInDetAlignLevels.py",
+                     MonitoringScript = "InDetRecExample/InDetMonitoringAlignment.py"
+                     ):
+                self.OutputPath       = OutputPath
+                self.CosmicsBoff      = CosmicsBoff
+                self.CosmicsBon       = CosmicsBon
+                self.i                = iter
+                self.part             = part
+                self.JOBNAME          = JOBNAME
+                self.RecoOptions      = RecoOptions
+                self.extraOptions     = extraOptions
+                self.RecoScript       = RecoScript
+                self.AlignmentOptions = AlignmentOptions
+                self.AlignmentScript  = AlignmentScript
+                self.AlignmentLevels  = AlignmentLevels
+                self.MonitoringScript = MonitoringScript
+
+        def write(self):
+                if self.part == "Solve":
+                        TempPath         = "%s/Iter%02d/"                 % (self.OutputPath, self.i)
+                else:
+                        if self.CosmicsBoff:
+                                TempPath = "%s/Iter%02d/CosmicsBoff/" % (self.OutputPath, self.i)
+                        elif self.CosmicsBon:
+                                TempPath = "%s/Iter%02d/CosmicsBon/"   % (self.OutputPath, self.i)
+                        else:
+                                TempPath = "%s/Iter%02d/Collision/"       % (self.OutputPath, self.i)
+
+                os.system('mkdir -p %s' % TempPath)
+                os.system('mkdir -p %s' % TempPath+"/InDetRecExample")
+                if self.MonitoringScript != "":
+                        os.system('ln -s %s %s' % (self.MonitoringScript,TempPath+"/InDetRecExample/InDetMonitoringAlignment.py") )
+
+                # Make Monitoring script dir and file here
+
+                #job=open(TempPath+self.JOBNAME,'w')
+                job=open(self.JOBNAME,'w')
+                job.write('##-------- Alignment Configuration --------------------\n')
+
+                for option in self.AlignmentOptions:
+                        if type(self.AlignmentOptions[option]) is str:
+                                customoption = option + "\t=\t\"" +  self.AlignmentOptions[option]+'\"\n'
+                                job.write(customoption)
+                        else:
+                                customoption = option + "\t=\t"   +  str(self.AlignmentOptions[option])+'\n'
+                                job.write(customoption)
+
+                job.write("\n")
+                job.write('##-------- Loading the Alignment Levels --------------------\n')
+                job.write('include("'+str(self.AlignmentLevels)+'") \n')
+                job.write("\n")
+                job.write('##-------- Reconstruction Configuration --------------------\n')
+
+                for option in self.RecoOptions:
+                        if type(self.RecoOptions[option]) is str:
+                                customoption = option + "\t=\t\"" +  self.RecoOptions[option]+'\"\n'
+                                job.write(customoption)
+                        else:
+                                customoption = option + "\t=\t"   +  str(self.RecoOptions[option])+'\n'
+                                job.write(customoption)
+
+                if len(self.extraOptions):
+                        job.write("\n")
+                        job.write('##-------- Extra Configuration --------------------\n')
+
+                for option in self.extraOptions:
+                        if type(self.extraOptions[option]) is str:
+                                customoption = option + "\t=\t\"" +  self.extraOptions[option]+'\"\n'
+                                job.write(customoption)
+                        else:
+                                customoption = option + "\t=\t"   +  str(self.extraOptions[option])+'\n'
+                                job.write(customoption)
+
+                job.write("\n")
+                job.write('##-------- End of custom options -------------\n')
+
+                # Need to write the InDetRec Options Here:
+                job.write("\n")
+                job.write('##-------- Load Reconstruction or purely build geometry ---------------\n')
+                job.write('include("'+str(self.RecoScript)+'") \n')
+
+                job.write("\n")
+                job.write('##-------- Load Alignment --------------------\n')
+                job.write('include("'+str(self.AlignmentScript)+'") \n')
+                job.write("\n")
+                job.close()
+
+
 
 
 
@@ -179,1373 +174,1373 @@ class writeJobGrid:
 
 class writeScriptGridForTFile :
 
-	def __init__(self,
-		     OutputPath,
-		     preName,
-		     iter,
-		     part,
-		     ATHENAREL,
-		     SCRIPTNAME,
-		     Datasets,
-		     outDS,
-		     AccSubJobID,
-		     AccSubInfo,
-		     JOBNAMES,
-		     thisJobDir,
-		     jobId,
-		     retryNo,
-		     GridOptions = {},
-		     AlignmentOptions = {}
-       		     ):
-
-			self.OutputPath       = OutputPath
-			self.preName          = preName
-			self.i                = iter
-			self.part             = part
-			self.ATHENAREL	      = ATHENAREL
-			self.SCRIPTNAME       = SCRIPTNAME
-			self.Datasets         =  Datasets
-			self.outDS            = outDS
-			self.AccSubJobID      = AccSubJobID
-			self.AccSubInfo       = AccSubInfo     
-			self.JOBNAMES         = JOBNAMES
-			self.thisJobDir       = thisJobDir
-			self.jobId            = -99
-			self.retryNo	      = 0
-			self.GridOptions      = GridOptions
-			self.AlignmentOptions = AlignmentOptions
-
-
-
-	def write(self) :
-
-
-		print (" start to create the job submit command line ...\n "   )
-		extOutFileStr    = "AlignmentTFile.root"
-		extOutFileStrAcc = extOutFileStr
-
-		if self.GridOptions["doMonitoring"] :
-			extOutFileStrAcc = extOutFileStr + ",monitoring.root"
-		if self.GridOptions["doTrkNtuple"]  :
-			extOutFileStrAcc += ",TrkValidation.root"
-
-		if self.part == "Accumulate":
-			self.SCRIPTNAME
-			script = open(self.SCRIPTNAME,'w')
-
-			if self.Datasets.containType("Customed") :
-				if self.Datasets.doDetailedTagsConfig() :
-					for index in range(len(self.Datasets.namesList("Customed"))) :
-						if ( "group" in self.GridOptions["userIDnum"] ) :
-							scriptStr   = "pathena --official --voms=atlas:/atlas/det-indet/Role=production  %s  " % (self.JOBNAMES["Customed"][index])
-						else :
-							scriptStr   = "pathena  %s  " % (self.JOBNAMES["Customed"][index])
-
-						if ("accumulateLibDS" in self.GridOptions) and (self.GridOptions["accumulateLibDS"] != "") :
-							scriptStr += " --libDS %s "          % self.GridOptions["accumulateLibDS"]
-							scriptStr += " --excludeFile %s "    % "*.cxx,*.h,*.o"
-
-						if ("removeFileList" in self.GridOptions) and (self.GridOptions["removeFileList"]   != "") :
-							scriptStr += " --removeFileList %s " % self.GridOptions["removeFileList"]
-
-						if ("fileList"       in self.GridOptions) and (self.GridOptions["fileList"]         != "") :
-							scriptStr += " --fileList %s "       % self.GridOptions["fileList"]
-
-						if ("siteName"	     in self.GridOptions) and (self.GridOptions["siteName"]         != "") :
-							scriptStr += " --site %s "           % self.GridOptions["siteName"]
-
-						if ("excludedSite"   in self.GridOptions) and (self.GridOptions["excludedSite"]     != "") :
-							scriptStr += " --excludedSite %s "   % self.GridOptions["excludedSite"]
-
-						if ("SkipFiles"      in self.GridOptions) and (self.GridOptions["SkipFiles"]        != "") :
-							scriptStr += " --nSkipFiles %s "     % self.GridOptions["SkipFiles"][self.i]
-
-
-						if self.GridOptions["dbRelease"]    == "":
-							scriptStr += " --dbRelease LATEST "
-						else :
-							scriptStr += " --dbRelease %s "    % self.GridOptions["dbRelease"]
-
-
-
-						scriptStr += " --extFile "
-
-						if (0 == self.i) : 
-							if os.path.isfile("initial_AlignmentConstants.root") :
-								scriptStr += "   initial_AlignmentConstants.root,Scaling.root  "   
-							else : 
-								scriptStr += " Scaling.root " 
-						else :
-							lastAlignConstant = "Iter%02d_AlignmentConstants.root"  %  (self.i-1)
-							if os.path.isfile("%s"  % lastAlignConstant ) : 
-								scriptStr += "  %s,Scaling.root "      % lastAlignConstant 
-								
-						
-		
-						if self.GridOptions["debugLevel"] == 4 :
-							self.GridOptions["doNFilesPerJobSplit"] = False
-							scriptStr += " --nEventsPerJob %s" % self.GridOptions["nEventsPerJob"]
-  	
-						if self.GridOptions["doNFilesPerJobSplit" ] :
-							if (-1 != self.GridOptions["CustomedNFiles"][index]) :
-								scriptStr += " --inDS %s --outDS %s --extOutFile %s --nFiles %d --nFilesPerJob %d --tmpDir %s  >& tmpSubJobInfo_Customed_%02d.txt ; \n" % ( self.Datasets.namesList("Customed")[index], self.outDS["Customed"][index], extOutFileStrAcc, self.GridOptions["CustomedNFiles"][index], self.GridOptions["CustomedNFPJ"][index], self.GridOptions["TmpWorkDir"], int(index) )
-							else : 
-								scriptStr += " --inDS %s --outDS %s --extOutFile %s  --nFilesPerJob %d --tmpDir %s  >& tmpSubJobInfo_Customed_%02d.txt ; \n" % ( self.Datasets.namesList("Customed")[index], self.outDS["Customed"][index], extOutFileStrAcc, self.GridOptions["CustomedNFPJ"][index], self.GridOptions["TmpWorkDir"], int(index) ) 
-						else : 
-							if (-1 != self.GridOptions["CustomedNFiles"][index]) :
-								scriptStr += " --inDS %s --outDS %s --extOutFile %s --nFiles %d --split %d        --tmpDir %s  >& tmpSubJobInfo_Customed_%02d.txt ; \n" % ( self.Datasets.namesList("Customed")[index], self.outDS["Customed"][index], extOutFileStrAcc, self.GridOptions["CustomedNFiles"][index], self.GridOptions["CustomedCPUs"][index], self.GridOptions["TmpWorkDir"], int(index) )
-							
-							else :   
-								scriptStr += " --inDS %s --outDS %s --extOutFile %s --nFiles %d --split %d        --tmpDir %s  >& tmpSubJobInfo_Customed_%02d.txt ; \n" % ( self.Datasets.namesList("Customed")[index], self.outDS["Customed"][index], extOutFileStrAcc, self.Datasets.nFiles("Customed")[index], self.GridOptions["CustomedCPUs"][index], self.GridOptions["TmpWorkDir"], int(index) )
-
-							
-						script.write(scriptStr)
-							
-					print (" You are running alignment accumulation and using TFile for bookkeeping \n" )
-					print (" You are running on customed dataset and doing detailed tags configuration \n")
-					print (" the Grid job submission command line is : \n")
-					print (scriptStr)
-
-
-			if (self.Datasets.containType("900GeV") or self.Datasets.containType("7TeV")) :
+        def __init__(self,
+                     OutputPath,
+                     preName,
+                     iter,
+                     part,
+                     ATHENAREL,
+                     SCRIPTNAME,
+                     Datasets,
+                     outDS,
+                     AccSubJobID,
+                     AccSubInfo,
+                     JOBNAMES,
+                     thisJobDir,
+                     jobId,
+                     retryNo,
+                     GridOptions = {},
+                     AlignmentOptions = {}
+                     ):
+
+                        self.OutputPath       = OutputPath
+                        self.preName          = preName
+                        self.i                = iter
+                        self.part             = part
+                        self.ATHENAREL        = ATHENAREL
+                        self.SCRIPTNAME       = SCRIPTNAME
+                        self.Datasets         =  Datasets
+                        self.outDS            = outDS
+                        self.AccSubJobID      = AccSubJobID
+                        self.AccSubInfo       = AccSubInfo
+                        self.JOBNAMES         = JOBNAMES
+                        self.thisJobDir       = thisJobDir
+                        self.jobId            = -99
+                        self.retryNo          = 0
+                        self.GridOptions      = GridOptions
+                        self.AlignmentOptions = AlignmentOptions
+
+
+
+        def write(self) :
+
+
+                print (" start to create the job submit command line ...\n "   )
+                extOutFileStr    = "AlignmentTFile.root"
+                extOutFileStrAcc = extOutFileStr
+
+                if self.GridOptions["doMonitoring"] :
+                        extOutFileStrAcc = extOutFileStr + ",monitoring.root"
+                if self.GridOptions["doTrkNtuple"]  :
+                        extOutFileStrAcc += ",TrkValidation.root"
+
+                if self.part == "Accumulate":
+                        self.SCRIPTNAME
+                        script = open(self.SCRIPTNAME,'w')
+
+                        if self.Datasets.containType("Customed") :
+                                if self.Datasets.doDetailedTagsConfig() :
+                                        for index in range(len(self.Datasets.namesList("Customed"))) :
+                                                if ( "group" in self.GridOptions["userIDnum"] ) :
+                                                        scriptStr   = "pathena --official --voms=atlas:/atlas/det-indet/Role=production  %s  " % (self.JOBNAMES["Customed"][index])
+                                                else :
+                                                        scriptStr   = "pathena  %s  " % (self.JOBNAMES["Customed"][index])
+
+                                                if ("accumulateLibDS" in self.GridOptions) and (self.GridOptions["accumulateLibDS"] != "") :
+                                                        scriptStr += " --libDS %s "          % self.GridOptions["accumulateLibDS"]
+                                                        scriptStr += " --excludeFile %s "    % "*.cxx,*.h,*.o"
+
+                                                if ("removeFileList" in self.GridOptions) and (self.GridOptions["removeFileList"]   != "") :
+                                                        scriptStr += " --removeFileList %s " % self.GridOptions["removeFileList"]
+
+                                                if ("fileList"       in self.GridOptions) and (self.GridOptions["fileList"]         != "") :
+                                                        scriptStr += " --fileList %s "       % self.GridOptions["fileList"]
+
+                                                if ("siteName"       in self.GridOptions) and (self.GridOptions["siteName"]         != "") :
+                                                        scriptStr += " --site %s "           % self.GridOptions["siteName"]
+
+                                                if ("excludedSite"   in self.GridOptions) and (self.GridOptions["excludedSite"]     != "") :
+                                                        scriptStr += " --excludedSite %s "   % self.GridOptions["excludedSite"]
+
+                                                if ("SkipFiles"      in self.GridOptions) and (self.GridOptions["SkipFiles"]        != "") :
+                                                        scriptStr += " --nSkipFiles %s "     % self.GridOptions["SkipFiles"][self.i]
+
+
+                                                if self.GridOptions["dbRelease"]    == "":
+                                                        scriptStr += " --dbRelease LATEST "
+                                                else :
+                                                        scriptStr += " --dbRelease %s "    % self.GridOptions["dbRelease"]
+
+
+
+                                                scriptStr += " --extFile "
+
+                                                if (0 == self.i) :
+                                                        if os.path.isfile("initial_AlignmentConstants.root") :
+                                                                scriptStr += "   initial_AlignmentConstants.root,Scaling.root  "
+                                                        else :
+                                                                scriptStr += " Scaling.root "
+                                                else :
+                                                        lastAlignConstant = "Iter%02d_AlignmentConstants.root"  %  (self.i-1)
+                                                        if os.path.isfile("%s"  % lastAlignConstant ) :
+                                                                scriptStr += "  %s,Scaling.root "      % lastAlignConstant
+
+                                                
+
+                                                if self.GridOptions["debugLevel"] == 4 :
+                                                        self.GridOptions["doNFilesPerJobSplit"] = False
+                                                        scriptStr += " --nEventsPerJob %s" % self.GridOptions["nEventsPerJob"]
+
+                                                if self.GridOptions["doNFilesPerJobSplit" ] :
+                                                        if (-1 != self.GridOptions["CustomedNFiles"][index]) :
+                                                                scriptStr += " --inDS %s --outDS %s --extOutFile %s --nFiles %d --nFilesPerJob %d --tmpDir %s  >& tmpSubJobInfo_Customed_%02d.txt ; \n" % ( self.Datasets.namesList("Customed")[index], self.outDS["Customed"][index], extOutFileStrAcc, self.GridOptions["CustomedNFiles"][index], self.GridOptions["CustomedNFPJ"][index], self.GridOptions["TmpWorkDir"], int(index) )
+                                                        else :
+                                                                scriptStr += " --inDS %s --outDS %s --extOutFile %s  --nFilesPerJob %d --tmpDir %s  >& tmpSubJobInfo_Customed_%02d.txt ; \n" % ( self.Datasets.namesList("Customed")[index], self.outDS["Customed"][index], extOutFileStrAcc, self.GridOptions["CustomedNFPJ"][index], self.GridOptions["TmpWorkDir"], int(index) )
+                                                else :
+                                                        if (-1 != self.GridOptions["CustomedNFiles"][index]) :
+                                                                scriptStr += " --inDS %s --outDS %s --extOutFile %s --nFiles %d --split %d        --tmpDir %s  >& tmpSubJobInfo_Customed_%02d.txt ; \n" % ( self.Datasets.namesList("Customed")[index], self.outDS["Customed"][index], extOutFileStrAcc, self.GridOptions["CustomedNFiles"][index], self.GridOptions["CustomedCPUs"][index], self.GridOptions["TmpWorkDir"], int(index) )
+
+                                                        else :
+                                                                scriptStr += " --inDS %s --outDS %s --extOutFile %s --nFiles %d --split %d        --tmpDir %s  >& tmpSubJobInfo_Customed_%02d.txt ; \n" % ( self.Datasets.namesList("Customed")[index], self.outDS["Customed"][index], extOutFileStrAcc, self.Datasets.nFiles("Customed")[index], self.GridOptions["CustomedCPUs"][index], self.GridOptions["TmpWorkDir"], int(index) )
+
+
+                                                script.write(scriptStr)
+
+                                        print (" You are running alignment accumulation and using TFile for bookkeeping \n" )
+                                        print (" You are running on customed dataset and doing detailed tags configuration \n")
+                                        print (" the Grid job submission command line is : \n")
+                                        print (scriptStr)
+
+
+                        if (self.Datasets.containType("900GeV") or self.Datasets.containType("7TeV")) :
+
+                                if self.GridOptions["ColCPUs"][self.i]:
+                                        scriptStr   = "pathena  %s  " % (self.JOBNAMES["Collision"])
+                                if self.GridOptions["CosBonCPUs"][self.i]:
+                                        scriptStr   = "pathena  %s  " % (self.JOBNAMES["CosBon"])
+                                if self.GridOptions["CosBoffCPUs"][self.i]:
+                                        scriptStr   = "pathena  %s  " % (self.JOBNAMES["CosBoff"])
+
+
+                                if ("fileList" in self.GridOptions) and (self.GridOptions["fileList"]     != "") :
+                                        scriptStr += "--fileList %s "     % self.GridOptions["fileList"]
+
+                                if ("siteName" in self.GridOptions ) and (self.GridOptions["siteName"]     != "") :
+                                        scriptStr += "--site %s "         % self.GridOptions["siteName"]
+
+                                if ("excludedSite" in self.GridOptions) and (self.GridOptions["excludedSite"] != "") :
+                                        scriptStr += "--excludedSite %s " % self.GridOptions["excludedSite"]
+
+                                if self.GridOptions["dbRelease"]    == "":
+                                        scriptStr += "--dbRelease LATEST "
+                                else:
+                                        scriptStr += "--dbRelease %s "    % self.GridOptions["dbRelease"]
+
+
+
+
+                                scriptStr += " --extFile "
+
+                                if (0 == self.i) :
+                                        if os.path.isfile("initial_AlignmentConstants.root") :
+                                                scriptStr += "   initial_AlignmentConstants.root,Scaling.root  "
+                                        else :
+                                                scriptStr += " Scaling.root "
+                                else :
+                                        lastAlignConstant = "Iter%02d_AlignmentConstants.root"  %  (self.i-1)
+                                        if os.path.isfile("%s"  % lastAlignConstant ) :
+                                                scriptStr += "  %s,Scaling.root "      % lastAlignConstant
+
+
+
+
+                                if self.GridOptions["ColCPUs"][self.i] :
+                                        if self.GridOptions["doNFilesPerJobSplit" ] :
+                                                scriptStr_forCollision = scriptStr + " --inDS %s --outDS %s --extOutFile %s --nFiles %d --nFilesPerJob %d --tmpDir %s  >& tmpSubJobInfo_collision.txt ; \n" % ( self.Datasets.namesList("Collision"), self.outDS["Collision"], extOutFileStrAcc, self.Datasets.nFiles("Collision"), self.GridOptions["ColNFPJ"][self.i], self.GridOptions["TmpWorkDir"] )
+                                        else :
+                                                scriptStr_forCollision = scriptStr + " --inDS %s --outDS %s --extOutFile %s --nFiles %d --split %d        --tmpDir %s  >& tmpSubJobInfo_collision.txt ; \n" % ( self.Datasets.namesList("Collision"), self.outDS["Collision"], extOutFileStrAcc, self.Datasets.nFiles("Collision"), self.GridOptions["ColCPUs"][self.i], self.GridOptions["TmpWorkDir"] )
+                                        script.write(scriptStr_forCollision)
+
+
+                                if self.GridOptions["CosBonCPUs"][self.i]:
+                                        scriptStr_forBon       = scriptStr + " --inDS %s --outDS %s --extOutFile %s --nFiles %d --split %d --tmpDir %s  >& tmpSubJobInfo_Bon.txt ;       \n" % ( self.Datasets.namesList("CosBon"),    self.outDS["CosBon"], extOutFileStrAcc, self.Datasets.nFiles("Bon"),       self.GridOptions["CosBonCPUs"][self.i],   self.GridOptions["TmpWorkDir"] )
+                                        script.write(scriptStr_forBon)
+
+                                if self.GridOptions["CosBoffCPUs"][self.i]:
+                                        scriptStr_forBoff      = scriptStr + " --inDS %s --outDS %s --extOutFile %s --nFiles %d --split %d --tmpDir %s  >& tmpSubJobInfo_Boff.txt ;      \n" % (self.Datasets.namesList("CosBoff"), self.outDS["CosBoff"],   extOutFileStrAcc, self.Datasets.nFiles("Boff"),      self.GridOptions["CosBoffCPUs"][self.i], self.GridOptions["TmpWorkDir"] )
+                                        script.write(scriptStr_forBoff)
+
+                        script.close()
+
+
+                elif self.part == "Prun" :
+                        print (self.SCRIPTNAME)
+                        script     = open(self.SCRIPTNAME,'w')
+
+                        prunSolve = open("prunSolve.py",'w')
+                        prunSolve.write('import os\nimport string\nimport sys\nfrom future import standard_library\nstandard_library.install_aliases()\nimport subprocess\n\n\n')
+                        prunSolve.write('inputTFiles = []\ninFiles = []\n\n\n')
+                        prunSolve.write('ret, out = subprocess.getstatusoutput(\"cat input1.txt\")\n')
+                        prunSolve.write('print (\" the content of file input1.txt:  \", out) \n')
+                        prunSolve.write('lines = out.split(\",\")\n')
+                        prunSolve.write('for line in lines:\n')
+                        prunSolve.write('\tif \"AlignmentTFile.root\" in str(line):\n\t\tinputTFiles.append(line)\n')
+
+                        tmpStrJO = "newSolveJO_Iter%02d.py" % self.i
+                        prunSolve.write('jofile = open( \'%s\' , \'w+\')\n' % tmpStrJO )
+
+                        tmpStr = ""
+
+                        if (0 == self.i) :
+                                if os.path.isfile("initial_AlignmentConstants.root") :
+                                        tmpStr = "pool_insertFileToCatalog  initial_AlignmentConstants.root "
+
+
+                        else :
+
+                                alignConstants = "Iter%02d_AlignmentConstants.root" % (self.i - 1)
+                                if os.path.isfile(alignConstants) :
+                                        tmpStr = "pool_insertFileToCatalog  %s "  % alignConstants
+                                else :
+                                        print ("ALIGNMENT CONSTANTS %s NOT EXIST, WILL EXIT ANYHOW !!!  "  % alignConstants)
+                                        sys.exit()
+
+
+                        prunSolve.write('jofile.write(\'os.system(\\\"%s\\\")\\n\')\n'  % tmpStr)
+                        prunSolve.write('jofile.write(\"inputTFiles = \" + str(inputTFiles) + \'\\n\')\n')
+
+                        prunSolve.write('ff = open(\'%s\', \'r\')\n' % self.JOBNAMES["Solve"] )
+                        prunSolve.write('jofile.write(ff.read())\nff.close()\njofile.close()\n')
+
+                        prunSolve.write('os.system(\"athena.py %s \")\n'  % tmpStrJO )
+
+                        # debugging ...
+                        print ("prunSolve.py: ")
+                        os.system(" cat prunSolve.py ")
+                        print (" newSolveJO_Iter%02d.py: " % self.i)
+                        os.system(" cat %s " % tmpStrJO)
+
+
+                        extOutFileStrSol = ""
+
+                        if (    ("sctAlignmentLevel"          in self.AlignmentOptions and self.AlignmentOptions["sctAlignmentLevel"]          == 3 ) or
+                                ("sctAlignmentLevelBarrel"    in self.AlignmentOptions and self.AlignmentOptions["sctAlignmentLevelBarrel"]    == 3)  or
+                                ("sctAlignmentLevelEndcaps"   in self.AlignmentOptions and self.AlignmentOptions["sctAlignmentLevelEndcaps"]   == 3)  or
+                                ("pixelAlignmentLevel"        in self.AlignmentOptions and self.AlignmentOptions["pixelAlignmentLevel"]        == 3)  or
+                                ("pixelAlignmentLevelBarrel"  in self.AlignmentOptions and self.AlignmentOptions["pixelAlignmentLevelBarrel"]  == 3)  or
+                                ("pixelAlignmentLevelEndcaps" in self.AlignmentOptions and self.AlignmentOptions["pixelAlignmentLevelEndcaps"] == 3) ) :
+                                print ("hmn, you are going to run L3 alignment, Eigen is being used, so no eigen value information!!! ")
+
+                        else :
 
-				if self.GridOptions["ColCPUs"][self.i]:
-					scriptStr   = "pathena  %s  " % (self.JOBNAMES["Collision"])
-				if self.GridOptions["CosBonCPUs"][self.i]:
-					scriptStr   = "pathena  %s  " % (self.JOBNAMES["CosBon"])
-				if self.GridOptions["CosBoffCPUs"][self.i]:
-					scriptStr   = "pathena  %s  " % (self.JOBNAMES["CosBoff"])
-
-
-				if ("fileList" in self.GridOptions) and (self.GridOptions["fileList"]     != "") :
-					scriptStr += "--fileList %s "     % self.GridOptions["fileList"]
-
-				if ("siteName" in self.GridOptions ) and (self.GridOptions["siteName"]     != "") :
-					scriptStr += "--site %s "         % self.GridOptions["siteName"]
-						
-				if ("excludedSite" in self.GridOptions) and (self.GridOptions["excludedSite"] != "") :
-					scriptStr += "--excludedSite %s " % self.GridOptions["excludedSite"]
-						                       
-				if self.GridOptions["dbRelease"]    == "":
-					scriptStr += "--dbRelease LATEST "
-				else:
-					scriptStr += "--dbRelease %s "    % self.GridOptions["dbRelease"]
-
-
-
-
-				scriptStr += " --extFile "
-
-				if (0 == self.i) : 
-					if os.path.isfile("initial_AlignmentConstants.root") :
-						scriptStr += "   initial_AlignmentConstants.root,Scaling.root  "   
-					else : 
-						scriptStr += " Scaling.root " 
-				else :
-					lastAlignConstant = "Iter%02d_AlignmentConstants.root"  %  (self.i-1)
-					if os.path.isfile("%s"  % lastAlignConstant ) : 
-						scriptStr += "  %s,Scaling.root "      % lastAlignConstant 
+                                if ( "writeEigenMat"      in self.GridOptions  and self.GridOptions["writeEigenMat"]    is True  ) and ( self.AlignmentOptions["runLocal"] is False ):
+                                        extOutFileStrSol += "eigenvectors.bin,"
+                                        extOutFileStrSol += "eigenvalues.bin,"
 
+                                if ( "writeEigenMatTxt"   in self.GridOptions  and self.GridOptions["writeEigenMatTxt"] is True  ) and ( self.AlignmentOptions["runLocal"] is False ):
+                                        extOutFileStrSol += "eigenvectors.txt,"
+                                        extOutFileStrSol += "eigenvalues.txt,"
 
 
+                        extOutFileStrSol = extOutFileStrSol + extOutFileStr + ",Iter%02d_AlignmentConstants.root,OldSiAlignment.txt,OutputSiAlignment.txt,alignlogfile.txt" % (self.i)
 
-				if self.GridOptions["ColCPUs"][self.i] :
-					if self.GridOptions["doNFilesPerJobSplit" ] :
-						scriptStr_forCollision = scriptStr + " --inDS %s --outDS %s --extOutFile %s --nFiles %d --nFilesPerJob %d --tmpDir %s  >& tmpSubJobInfo_collision.txt ; \n" % ( self.Datasets.namesList("Collision"), self.outDS["Collision"], extOutFileStrAcc, self.Datasets.nFiles("Collision"), self.GridOptions["ColNFPJ"][self.i], self.GridOptions["TmpWorkDir"] )   
-					else :
-						scriptStr_forCollision = scriptStr + " --inDS %s --outDS %s --extOutFile %s --nFiles %d --split %d        --tmpDir %s  >& tmpSubJobInfo_collision.txt ; \n" % ( self.Datasets.namesList("Collision"), self.outDS["Collision"], extOutFileStrAcc, self.Datasets.nFiles("Collision"), self.GridOptions["ColCPUs"][self.i], self.GridOptions["TmpWorkDir"] )
-					script.write(scriptStr_forCollision)
-
-
-				if self.GridOptions["CosBonCPUs"][self.i]:
-					scriptStr_forBon       = scriptStr + " --inDS %s --outDS %s --extOutFile %s --nFiles %d --split %d --tmpDir %s  >& tmpSubJobInfo_Bon.txt ;       \n" % ( self.Datasets.namesList("CosBon"),    self.outDS["CosBon"], extOutFileStrAcc, self.Datasets.nFiles("Bon"),       self.GridOptions["CosBonCPUs"][self.i],   self.GridOptions["TmpWorkDir"] )
-					script.write(scriptStr_forBon)
-
-				if self.GridOptions["CosBoffCPUs"][self.i]:
-					scriptStr_forBoff      = scriptStr + " --inDS %s --outDS %s --extOutFile %s --nFiles %d --split %d --tmpDir %s  >& tmpSubJobInfo_Boff.txt ;      \n" % (self.Datasets.namesList("CosBoff"), self.outDS["CosBoff"],   extOutFileStrAcc, self.Datasets.nFiles("Boff"),      self.GridOptions["CosBoffCPUs"][self.i], self.GridOptions["TmpWorkDir"] )
-					script.write(scriptStr_forBoff)
-	
-			script.close()
-
-
-		elif self.part == "Prun" :
-			print (self.SCRIPTNAME)
-			script     = open(self.SCRIPTNAME,'w')
-
-			prunSolve = open("prunSolve.py",'w')
-			prunSolve.write('import os\nimport string\nimport sys\nfrom future import standard_library\nstandard_library.install_aliases()\nimport subprocess\n\n\n')
-			prunSolve.write('inputTFiles = []\ninFiles = []\n\n\n')
-			prunSolve.write('ret, out = subprocess.getstatusoutput(\"cat input1.txt\")\n')
-			prunSolve.write('print (\" the content of file input1.txt:  \", out) \n')
-			prunSolve.write('lines = out.split(\",\")\n')
-			prunSolve.write('for line in lines:\n')
-			prunSolve.write('\tif \"AlignmentTFile.root\" in str(line):\n\t\tinputTFiles.append(line)\n')
-	
-			tmpStrJO = "newSolveJO_Iter%02d.py" % self.i
-			prunSolve.write('jofile = open( \'%s\' , \'w+\')\n' % tmpStrJO )
-
-			tmpStr = ""
 
-			if (0 == self.i) :
-				if os.path.isfile("initial_AlignmentConstants.root") :
-					tmpStr = "pool_insertFileToCatalog  initial_AlignmentConstants.root "
+                        extFileStr = ""
 
+                        if (0 == self.i) :
+                                if os.path.isfile("initial_AlignmentConstants.root") :
+                                        extFileStr = " initial_AlignmentConstants.root,Scaling.root "
+                                else :
+                                        extFileStr = " Scaling.root "
 
-			else : 
+                        else :
+                                lastAlignConstant = "Iter%02d_AlignmentConstants.root"  %  (self.i-1)
+                                if os.path.isfile("%s"  % lastAlignConstant ) :
+                                        extFileStr = lastAlignConstant + ",Scaling.root "
 
-				alignConstants = "Iter%02d_AlignmentConstants.root" % (self.i - 1)
-				if os.path.isfile(alignConstants) :
-					tmpStr = "pool_insertFileToCatalog  %s "  % alignConstants
-				else :
-					print ("ALIGNMENT CONSTANTS %s NOT EXIST, WILL EXIT ANYHOW !!!  "  % alignConstants)
-					sys.exit()
 
+                        TNFiles = 0
+                        tmpInDS = []
+                        print (" self.outDS : " , self.outDS)
 
-			prunSolve.write('jofile.write(\'os.system(\\\"%s\\\")\\n\')\n'  % tmpStr)
-			prunSolve.write('jofile.write(\"inputTFiles = \" + str(inputTFiles) + \'\\n\')\n')
-	
-			prunSolve.write('ff = open(\'%s\', \'r\')\n' % self.JOBNAMES["Solve"] )                               
-			prunSolve.write('jofile.write(ff.read())\nff.close()\njofile.close()\n')
-	
-			prunSolve.write('os.system(\"athena.py %s \")\n'  % tmpStrJO )
+                        print (" self.outDS[Customed]: " , self.outDS["Customed"])
+                        for item in self.outDS["Customed"] :
+                                print ("self.outDS[Customed] item:  " , item)
+                                tmpInDS.append(item+"/")
+                                rec, nFiles = subprocess.getstatusoutput("dq2-ls -f %s | grep -i files | grep -i total | cut -c 13-" %  (item+"/") )
+                                print (" nFiles of this outDS[Customed] :" , nFiles)
+                                TNFiles += int(nFiles)
+                        solveInDS = ",".join(tmpInDS)
 
-			# debugging ...
-			print ("prunSolve.py: ")
-			os.system(" cat prunSolve.py ")
-			print (" newSolveJO_Iter%02d.py: " % self.i)
-			os.system(" cat %s " % tmpStrJO)
+                        #scriptStr = " prun --exec \" python prunSolve.py \" --writeInputToTxt IN:input1.txt --express --athenaTag %s,AtlasProduction --inDS %s --outputs %s --outDS %s --extFile %s --nJobs 1 --nFilesPerJob %d -v --maxNFilesPerJob 100000 " % (self.ATHENAREL, solveInDS , extOutFileStrSol, self.outDS["Solve"], extFileStr,  int(TNFiles) )
+                        if ( "group" in self.GridOptions["userIDnum"] ) :
+                                scriptStr = " prun --exec \" python prunSolve.py \" --writeInputToTxt IN:input1.txt --express  --useAthenaPackages --match \"*AlignmentTFile.root*\"  --official --voms=atlas:/atlas/det-indet/Role=production --inDS %s --outputs %s --outDS %s --extFile %s --nJobs 1 --nFilesPerJob %d -v --maxNFilesPerJob 100000 " % ( solveInDS , extOutFileStrSol, self.outDS["Solve"], extFileStr,  int(TNFiles) )
+                        else :
+                                scriptStr = " prun --exec \" python prunSolve.py \" --writeInputToTxt IN:input1.txt --express  --useAthenaPackages --match \"*AlignmentTFile.root*\"  --inDS %s --outputs %s --outDS %s --extFile %s --nJobs 1 --nFilesPerJob %d -v --maxNFilesPerJob 100000 " % ( solveInDS , extOutFileStrSol, self.outDS["Solve"], extFileStr,  int(TNFiles) )
 
 
-			extOutFileStrSol = ""
 
-			if (    ("sctAlignmentLevel"          in self.AlignmentOptions and self.AlignmentOptions["sctAlignmentLevel"]          == 3 ) or 
-				("sctAlignmentLevelBarrel"    in self.AlignmentOptions and self.AlignmentOptions["sctAlignmentLevelBarrel"]    == 3)  or 
-				("sctAlignmentLevelEndcaps"   in self.AlignmentOptions and self.AlignmentOptions["sctAlignmentLevelEndcaps"]   == 3)  or 
-				("pixelAlignmentLevel"        in self.AlignmentOptions and self.AlignmentOptions["pixelAlignmentLevel"]        == 3)  or 
-				("pixelAlignmentLevelBarrel"  in self.AlignmentOptions and self.AlignmentOptions["pixelAlignmentLevelBarrel"]  == 3)  or 
-				("pixelAlignmentLevelEndcaps" in self.AlignmentOptions and self.AlignmentOptions["pixelAlignmentLevelEndcaps"] == 3) ) :
-				print ("hmn, you are going to run L3 alignment, Eigen is being used, so no eigen value information!!! ")
-			 
-			else : 
+                        if  (True is self.GridOptions["reUseSolveLibDS"]) and  self.GridOptions["solveLibDS"]   != "":
+                                scriptStr += " --libDS %s "        % self.GridOptions["solveLibDS"]
 
-				if ( "writeEigenMat"      in self.GridOptions  and self.GridOptions["writeEigenMat"]    == True  ) and ( self.AlignmentOptions["runLocal"] == False ):
-					extOutFileStrSol += "eigenvectors.bin,";
-					extOutFileStrSol += "eigenvalues.bin," ;
-			   
-				if ( "writeEigenMatTxt"   in self.GridOptions  and self.GridOptions["writeEigenMatTxt"] == True  ) and ( self.AlignmentOptions["runLocal"] == False ):
-					extOutFileStrSol += "eigenvectors.txt,";
-					extOutFileStrSol += "eigenvalues.txt," ;
 
+                        scriptStr += " --tmpDir %s >& tmpSubJobInfo_prunSolve.txt; \n" % self.GridOptions["TmpWorkDir"]
 
-			extOutFileStrSol = extOutFileStrSol + extOutFileStr + ",Iter%02d_AlignmentConstants.root,OldSiAlignment.txt,OutputSiAlignment.txt,alignlogfile.txt" % (self.i)
-
-
-			extFileStr = ""
-
-			if (0 == self.i) :
-				if os.path.isfile("initial_AlignmentConstants.root") :
-					extFileStr = " initial_AlignmentConstants.root,Scaling.root "
-				else : 
-					extFileStr = " Scaling.root "
+                        script.write(scriptStr)
+                        script.close()
 
-			else :
-				lastAlignConstant = "Iter%02d_AlignmentConstants.root"  %  (self.i-1)
-				if os.path.isfile("%s"  % lastAlignConstant ) :
-					extFileStr = lastAlignConstant + ",Scaling.root "
 
+                else :
+                        print ("Hi, except accumulate and solve, where do you want to go?")
 
-			TNFiles = 0
-			tmpInDS = []
-			print (" self.outDS : " , self.outDS)
-
-			print (" self.outDS[Customed]: " , self.outDS["Customed"])
-			for item in self.outDS["Customed"] :
-				print ("self.outDS[Customed] item:  " , item)
-				tmpInDS.append(item+"/")
-				rec, nFiles = subprocess.getstatusoutput("dq2-ls -f %s | grep -i files | grep -i total | cut -c 13-" %  (item+"/") )
-				print (" nFiles of this outDS[Customed] :" , nFiles)
-				TNFiles += int(nFiles)
-			solveInDS = ",".join(tmpInDS)
 
-			#scriptStr = " prun --exec \" python prunSolve.py \" --writeInputToTxt IN:input1.txt --express --athenaTag %s,AtlasProduction --inDS %s --outputs %s --outDS %s --extFile %s --nJobs 1 --nFilesPerJob %d -v --maxNFilesPerJob 100000 " % (self.ATHENAREL, solveInDS , extOutFileStrSol, self.outDS["Solve"], extFileStr,  int(TNFiles) )
-			if ( "group" in self.GridOptions["userIDnum"] ) : 
-				scriptStr = " prun --exec \" python prunSolve.py \" --writeInputToTxt IN:input1.txt --express  --useAthenaPackages --match \"*AlignmentTFile.root*\"  --official --voms=atlas:/atlas/det-indet/Role=production --inDS %s --outputs %s --outDS %s --extFile %s --nJobs 1 --nFilesPerJob %d -v --maxNFilesPerJob 100000 " % ( solveInDS , extOutFileStrSol, self.outDS["Solve"], extFileStr,  int(TNFiles) )
-			else : 
-				scriptStr = " prun --exec \" python prunSolve.py \" --writeInputToTxt IN:input1.txt --express  --useAthenaPackages --match \"*AlignmentTFile.root*\"  --inDS %s --outputs %s --outDS %s --extFile %s --nJobs 1 --nFilesPerJob %d -v --maxNFilesPerJob 100000 " % ( solveInDS , extOutFileStrSol, self.outDS["Solve"], extFileStr,  int(TNFiles) )
-
-
-
-			if  (True == self.GridOptions["reUseSolveLibDS"]) and  self.GridOptions["solveLibDS"]   != "":
-				scriptStr += " --libDS %s "        % self.GridOptions["solveLibDS"]
 
+        def send(self) :
 
-			scriptStr += " --tmpDir %s >& tmpSubJobInfo_prunSolve.txt; \n" % self.GridOptions["TmpWorkDir"]
+                os.system("pwd")
+                os.system("chmod 777 %s" % self.SCRIPTNAME)
 
-			script.write(scriptStr)
-			script.close()
+                print ("----------------------------------------------")
+                if self.part == "Accumulate":
+                        print ("Sending the %dth iteration accumulation job to grid at site ... %s" % (self.i, self.GridOptions["siteName"]))
 
+                        if self.Datasets.containType("Customed") :
+                                for i in range(len(self.Datasets.namesList("Customed"))) :
+                                        os.system(" rm -rf tmpSubJobInfo_Customed_%02d.txt" % i )
 
-		else : 
-			print ("Hi, except accumulate and solve, where do you want to go?")
+                        os.system(" rm -rf  tmpSubJobInfo_collision.txt tmpSubJobInfo_Bon.txt tmpSubJobInfo_Boff.txt ")
+                        os.system("source %s" % self.SCRIPTNAME)
 
 
+                        if self.Datasets.containType("Customed") :
+                                if self.Datasets.doDetailedTagsConfig() :
+                                        for i in range(len(self.Datasets.namesList("Customed"))) :
+                                                os.system("cat tmpSubJobInfo_Customed_%02d.txt                 >> %s\n" % (i, self.AccSubInfo))
+                                                os.system("cat tmpSubJobInfo_Customed_%02d.txt | grep -i JobID >> %s\n" % (i, self.AccSubJobID))
+                                else :
+                                        print (" you don't plan to do detailed tags configuration for every dataset, so will submit only one job with long inDS: ")
 
-	def send(self) :
 
-		os.system("pwd")
-		os.system("chmod 777 %s" % self.SCRIPTNAME)
-
-		print ("----------------------------------------------")
-		if self.part == "Accumulate":           
-			print ("Sending the %dth iteration accumulation job to grid at site ... %s" % (self.i, self.GridOptions["siteName"]))
-		 
-			if self.Datasets.containType("Customed") :
-				for i in range(len(self.Datasets.namesList("Customed"))) :   
-					os.system(" rm -rf tmpSubJobInfo_Customed_%02d.txt" % i )
-		     
-			os.system(" rm -rf  tmpSubJobInfo_collision.txt tmpSubJobInfo_Bon.txt tmpSubJobInfo_Boff.txt ")
-			os.system("source %s" % self.SCRIPTNAME)
-		     
+                        if (self.Datasets.containType("900GeV") or self.Datasets.containType("7TeV")) :
+                                if self.GridOptions["ColCPUs"][self.i]:
+                                        os.system("cat tmpSubJobInfo_collision.txt                 >> %s\n" % (self.AccSubInfo))
+                                        os.system("cat tmpSubJobInfo_collision.txt | grep -i JobID >> %s\n" % (self.AccSubJobID))
 
-			if self.Datasets.containType("Customed") :
-				if self.Datasets.doDetailedTagsConfig() :
-					for i in range(len(self.Datasets.namesList("Customed"))) :
-						os.system("cat tmpSubJobInfo_Customed_%02d.txt                 >> %s\n" % (i, self.AccSubInfo))
-						os.system("cat tmpSubJobInfo_Customed_%02d.txt | grep -i JobID >> %s\n" % (i, self.AccSubJobID))
-				else :
-					print (" you don't plan to do detailed tags configuration for every dataset, so will submit only one job with long inDS: ")
-			     	                                       
-			     	
-			if (self.Datasets.containType("900GeV") or self.Datasets.containType("7TeV")) :
-				if self.GridOptions["ColCPUs"][self.i]:
-					os.system("cat tmpSubJobInfo_collision.txt                 >> %s\n" % (self.AccSubInfo))
-					os.system("cat tmpSubJobInfo_collision.txt | grep -i JobID >> %s\n" % (self.AccSubJobID))
-				 	
-				if self.GridOptions["CosBonCPUs"][self.i]:
-					os.system("cat tmpSubJobInfo_Bon.txt                       >> %s\n" % (self.AccSubInfo))
-					os.system("cat tmpSubJobInfo_Bon.txt       | grep -i JobID >> %s\n" % (self.AccSubJobID))
-				   	
-				if self.GridOptions["CosBoffCPUs"][self.i]:
-					os.system("cat tmpSubJobInfo_Boff.txt                      >> %s\n" % (self.AccSubInfo))
-					os.system("cat tmpSubJobInfo_Boff.txt      | grep -i JobID >> %s\n" % (self.AccSubJobID))
+                                if self.GridOptions["CosBonCPUs"][self.i]:
+                                        os.system("cat tmpSubJobInfo_Bon.txt                       >> %s\n" % (self.AccSubInfo))
+                                        os.system("cat tmpSubJobInfo_Bon.txt       | grep -i JobID >> %s\n" % (self.AccSubJobID))
 
+                                if self.GridOptions["CosBoffCPUs"][self.i]:
+                                        os.system("cat tmpSubJobInfo_Boff.txt                      >> %s\n" % (self.AccSubInfo))
+                                        os.system("cat tmpSubJobInfo_Boff.txt      | grep -i JobID >> %s\n" % (self.AccSubJobID))
 
-		elif self.part == "Prun" :
 
-			print ("  Sending the %dth iteration prun matrix solving job to grid site ... %s" % (self.i, self.GridOptions["siteName"]))
-			os.system("rm -f tmpSubJobInfo_prunSolve.txt")
-			os.system("source %s" % self.SCRIPTNAME)
-			os.system("cat tmpSubJobInfo_prunSolve.txt                 >> %s\n" % (self.AccSubInfo))
-			os.system("cat tmpSubJobInfo_prunSolve.txt | grep -i JobID >> %s\n" % (self.AccSubJobID))
-	
-			print ("----------------------------------------------")
-		
-		else : 
+                elif self.part == "Prun" :
 
-			print ("Hi, where do you want to go?")
+                        print ("  Sending the %dth iteration prun matrix solving job to grid site ... %s" % (self.i, self.GridOptions["siteName"]))
+                        os.system("rm -f tmpSubJobInfo_prunSolve.txt")
+                        os.system("source %s" % self.SCRIPTNAME)
+                        os.system("cat tmpSubJobInfo_prunSolve.txt                 >> %s\n" % (self.AccSubInfo))
+                        os.system("cat tmpSubJobInfo_prunSolve.txt | grep -i JobID >> %s\n" % (self.AccSubJobID))
 
+                        print ("----------------------------------------------")
 
+                else :
 
+                        print ("Hi, where do you want to go?")
 
-	def wait(self,logfilename):
-		print ("Pathena wait()" )
-	
-		if self.jobId == -99:
-			print ("logiflename: ",logfilename)
-			ret, out = subprocess.getstatusoutput("cat "+logfilename)
-			lines = out.split('\n')
+
+
+
+        def wait(self,logfilename):
+                print ("Pathena wait()" )
+
+                if self.jobId == -99:
+                        print ("logiflename: ",logfilename)
+                        ret, out = subprocess.getstatusoutput("cat "+logfilename)
+                        lines = out.split('\n')
              
-			# looping over all the job IDs
-			for line in lines:
-				items = line.split()
-				if len(items)>0 and items[0]=="JobID" :
-					self.jobId = int(items[2])
-					print ("jobId = ",self.jobId)
-
-				# check status of each job ID
-				# while self.bjobs() == 0:
-				while self.bjobs() != 1:       
-					print (" waiting for jobID ",self.jobId,"...")
-					time.sleep(300)
-
-
-
-
-
-	def bjobs(self) :
-		if self.jobId == -99:
-			print ("need jobId")
-			sys.exit(3)
-	
-		print ("Pathena bjobs(), jobId: ",self.jobId)
-		jobId = self.jobId
-		bjobstring = "pbook -c 'show(" + str(jobId) + ")'"
-		print (bjobstring)
-		ret, out = subprocess.getstatusoutput(bjobstring)
-		print ("statusoutput: ",out)
-		for line in out.split("\n") :
-			items_1 = line.split()
-			if len(items_1)>0 and items_1[0] != "jobStatus" :
-				continue
-	
-			if len(items_1)>2 :
-				if items_1[2] == "frozen" :
-					print ("jobStatus: " , items_1[2])
-	
-					### search the libDS #######   
-					for line2 in out.split("\n") :
-						print (" line2: " , line2)
-						items_2 = line2.split()
-						if items_2[0] == "libDS" :
-							break 
-	
-					if self.part == "Accumulate" and self.GridOptions["accumulateLibDS"] == "" :   
-						self.GridOptions["accumulateLibDS"] = items_2[2]
-						print (" self.GridOptions accumulateLibDS: " , self.GridOptions["accumulateLibDS"])
-	
-					if (self.part == "Grid" or self.part == "Prun") and self.GridOptions["solveLibDS"] == "":
-						self.GridOptions["solveLibDS"     ] = items_2[2]                               
-						print (" self.GridOptions solveLibDS: "      , self.GridOptions["solveLibDS"])
-	                                  
-					return 1
-				else :
-					print ("jobStatus: ",items_1[2])
-					return 0
-	
-
-
-	def whetherRetry(self) :               
-		nfailed   = 0
-		nfinished = 0
-		if self.jobId == -99 :
-			print ("need jobId, JOB SUBMISSION FAILED!!!, check the log files")
-			sys.exit(3)
-	
-		print ("Pathena bjobs(), jobId: ",self.jobId)
-		jobId = self.jobId
-		bjobstring = "pbook -c 'show(" + str(jobId) + ")'"
-		print (bjobstring)
-		ret, out = subprocess.getstatusoutput(bjobstring)
+                        # looping over all the job IDs
+                        for line in lines:
+                                items = line.split()
+                                if len(items)>0 and items[0]=="JobID" :
+                                        self.jobId = int(items[2])
+                                        print ("jobId = ",self.jobId)
+
+                                # check status of each job ID
+                                # while self.bjobs() == 0:
+                                while self.bjobs() != 1:
+                                        print (" waiting for jobID ",self.jobId,"...")
+                                        time.sleep(300)
+
+
+
+
+
+        def bjobs(self) :
+                if self.jobId == -99:
+                        print ("need jobId")
+                        sys.exit(3)
+
+                print ("Pathena bjobs(), jobId: ",self.jobId)
+                jobId = self.jobId
+                bjobstring = "pbook -c 'show(" + str(jobId) + ")'"
+                print (bjobstring)
+                ret, out = subprocess.getstatusoutput(bjobstring)
+                print ("statusoutput: ",out)
+                for line in out.split("\n") :
+                        items_1 = line.split()
+                        if len(items_1)>0 and items_1[0] != "jobStatus" :
+                                continue
+
+                        if len(items_1)>2 :
+                                if items_1[2] == "frozen" :
+                                        print ("jobStatus: " , items_1[2])
+
+                                        ### search the libDS #######
+                                        for line2 in out.split("\n") :
+                                                print (" line2: " , line2)
+                                                items_2 = line2.split()
+                                                if items_2[0] == "libDS" :
+                                                        break
+
+                                        if self.part == "Accumulate" and self.GridOptions["accumulateLibDS"] == "" :
+                                                self.GridOptions["accumulateLibDS"] = items_2[2]
+                                                print (" self.GridOptions accumulateLibDS: " , self.GridOptions["accumulateLibDS"])
+
+                                        if (self.part == "Grid" or self.part == "Prun") and self.GridOptions["solveLibDS"] == "":
+                                                self.GridOptions["solveLibDS"     ] = items_2[2]
+                                                print (" self.GridOptions solveLibDS: "      , self.GridOptions["solveLibDS"])
+
+                                        return 1
+                                else :
+                                        print ("jobStatus: ",items_1[2])
+                                        return 0
+
+
+
+        def whetherRetry(self) :
+                nfailed   = 0
+                nfinished = 0
+                if self.jobId == -99 :
+                        print ("need jobId, JOB SUBMISSION FAILED!!!, check the log files")
+                        sys.exit(3)
+
+                print ("Pathena bjobs(), jobId: ",self.jobId)
+                jobId = self.jobId
+                bjobstring = "pbook -c 'show(" + str(jobId) + ")'"
+                print (bjobstring)
+                ret, out = subprocess.getstatusoutput(bjobstring)
                
-		lines  = out.split("\n")
-		nlines = len(lines)
-		print (" nlines: " , nlines)
-	
-		for i in range(0, nlines) :
-			items = lines[i].split()
-			print (" items: " , items)
-			if "failed"   in items :
-				nfailed   = int(items[-1])
-			if "finished" in items :
-				nfinished = int(items[-1])
-				# Hi, just stop, next line will cause the loop crash ...
-				break
-
-
-		if (self.retryNo == 0) and (self.i == 0) : 
-			if ( 0 == (nfailed + nfinished - 1) ) : 
-				successRatio = -1 
-			else : 
-				successRatio = float(nfinished - 1)/(nfailed + nfinished - 1) 
-		else : 
-			successRatio = float(nfinished)/(nfailed + nfinished) 
-
-
-		print ("the success ratio: ", successRatio)
-		if successRatio >= self.GridOptions["successRatioCut"] :
-			print ("The success ratio is higher than the cut, will not retry ---")
-			return False
-		else :
-			print ("The success ratio is lower than the cut, will retry ---")
-			return True
-	
-
-
-
-
-	def retry(self) :
-		jobId = self.jobId
-		retrystring = "pbook -c 'retry(" + str(jobId) + ")'"   
-		ret, out    = subprocess.getstatusoutput(retrystring)
-		print (" out1: " , out)
-		#self.jobId =  self.jobId + 2
-
-		## get the new JobID ## 
-		for line in out.split("\n") : 
-			items = line.split() 
-			nitems = len(items) 
-			for i in range(0, nitems) : 
-				if items[i] == "New" : 
-					jobstring = items[i+1].split("=") 
-					self.jobId = int(jobstring[-1]) 
-					print ("new JobID: " , self.jobId )
-					break 
-		self.retryNo = self.retryNo + 1 
-
-
-
-		while self.bjobs() != 1 :
-			print (" waiting for the first retry jobID " , self.jobId , "...")
-			time.sleep(300)
-	
-		if self.whetherRetry() :
-			jobId = self.jobId
-			retrystring = "pbook -c 'retry(" + str(jobId) + ")'"
-			ret, out    = subprocess.getstatusoutput(retrystring)
-			print (" out2: " , out)
-	
-			#self.jobId = self.jobId + 2
-
-			## get the new JobID ## 
-			for line in out.split("\n") : 
-				items = line.split() 
-				nitems = len(items) 
-				for i in range(0, nitems) : 
-					if items[i] == "New" : 
-						jobstring = items[i+1].split("=") 
-						self.jobId = int(jobstring[-1]) 
-						print ("new JobID: " , self.jobId )
-						break 
-			self.retryNo = self.retryNo + 1
-
-
-
-			while self.bjobs() != 1 :
-				print (" waiting for the second retry jobID " , self.jobId,"...")
-				time.sleep(300)
-	
-			
-	
-	def getGridOptions(self, option = "") :
-		return self.GridOptions[option]
-	
-	def setGridOptions(self, option1 = "", option2 = "") :
-		self.GridOptions[option1] = option2
-
-
-
-
-
-
-
-
-
-class writeScriptGrid :		
-	def __init__(self,
-		     OutputPath,
-		     preName,
-		     iter,
-		     part,
-		     CMTDIR,
-		     ATHENAREL,
-		     TAGS,
-		     SCRIPTNAME,
-		     Datasets,
-		     outDS,
-		     AccSubJobID,
-		     AccSubInfo,
-		     JOBNAMES,
-		     thisJobDir,
-		     jobId,
-		     GridOptions = {},
-		     AlignmentOptions = {}
-		     ):
-		self.OutputPath       = OutputPath
-		self.preName          = preName
-		self.i                = iter
-		self.part             = part
-		self.CMTDIR           = CMTDIR
-		self.ATHENAREL        = ATHENAREL
-		self.TAGS             = TAGS
-		self.SCRIPTNAME       = SCRIPTNAME
-		self.Datasets         =  Datasets
-		self.outDS            = outDS
-		self.AccSubJobID      = AccSubJobID 
-		self.AccSubInfo       = AccSubInfo	
-		self.JOBNAMES         = JOBNAMES
-		self.thisJobDir       = thisJobDir
-		self.jobId            = -99
-		self.GridOptions      = GridOptions
-		self.AlignmentOptions = AlignmentOptions
-
-
-	def write(self):
-
-
-		extOutFile = []
-
-		extOutFile.append("hitmap.bin")
-		extOutFile.append("matrix.bin")
-		extOutFile.append("vector.bin")
-			
-
-		if (    ("sctAlignmentLevel"          in self.AlignmentOptions and self.AlignmentOptions["sctAlignmentLevel"]          == 3 ) or 
-			("sctAlignmentLevelBarrel"    in self.AlignmentOptions and self.AlignmentOptions["sctAlignmentLevelBarrel"]    == 3)  or 
-			("sctAlignmentLevelEndcaps"   in self.AlignmentOptions and self.AlignmentOptions["sctAlignmentLevelEndcaps"]   == 3)  or 
-			("pixelAlignmentLevel"        in self.AlignmentOptions and self.AlignmentOptions["pixelAlignmentLevel"]        == 3)  or 
-			("pixelAlignmentLevelBarrel"  in self.AlignmentOptions and self.AlignmentOptions["pixelAlignmentLevelBarrel"]  == 3)  or 
-			("pixelAlignmentLevelEndcaps" in self.AlignmentOptions and self.AlignmentOptions["pixelAlignmentLevelEndcaps"] == 3) ) : 
-			#extOutFile.append("vector.txt")
-			print ("hmmn, you plan to do Level 3 alignment ... ")
-
-			### put the three flags writeMatrixFileTxt,writeHitmapTxt,writeEigenMatTxt in the GridOptions, not in the AlignmentOptions
-			if ( "writeMatrixFileTxt" in self.GridOptions  and self.GridOptions["writeMatrixFileTxt"] == True  ) : 
-				extOutFile.append("matrix.txt")
-				extOutFile.append("vector.txt")
-			if ( "writeHitmapTxt"     in self.GridOptions  and self.GridOptions["writeHitmapTxt"]     == True  ) :        
-				extOutFile.append("hitmap.txt")
-
-		else :
-			extOutFile.append("matrix.txt")
-			extOutFile.append("hitmap.txt")				
-			if ( "writeMatrixFileTxt" in self.GridOptions  and self.GridOptions["writeMatrixFileTxt"] == False  ) :        
-				extOutFile.remove("matrix.txt")
-			if ( "writeHitmapTxt"     in self.GridOptions  and self.GridOptions["writeHitmapTxt"    ] == False  ) :        
-				extOutFile.remove("hitmap.txt")
-
-			
-		extOutFileStr = ",".join(extOutFile)
-
-
-		if self.GridOptions["doMonitoring"] :
-			extOutFileStrAcc = extOutFileStr + ",monitoring.root"
-		if self.GridOptions["doTrkNtuple"]  :
-			extOutFileStrAcc += ",TrkValidation.root"
-
-
-		if self.part == "Accumulate":
-			print (self.SCRIPTNAME)
-			script = open(self.SCRIPTNAME,'w')
-
-			if self.Datasets.containType("Customed") : 
-				if self.Datasets.doDetailedTagsConfig() : 
-					for index in range(len(self.Datasets.namesList("Customed"))) :
-
-						if ("group" in self.GridOptions["userIDnum"] ) :
-							scriptStr   = "pathena  --official --voms=atlas:/atlas/det-indet/Role=production %s  " % (self.JOBNAMES["Customed"][index])
-						else : 
-							scriptStr   = "pathena  %s  " % (self.JOBNAMES["Customed"][index])
-						
-						if self.GridOptions["accumulateLibDS"]   != "":
-							scriptStr += " --libDS %s "        % self.GridOptions["accumulateLibDS"]
-							scriptStr += " --excludeFile %s "  % "*.cxx,*.h,*.o"
-
-						if  "removeFileList" in self.GridOptions : 
-							if self.GridOptions["removeFileList"] != "" :
-								scriptStr += " --removeFileList %s " % self.GridOptions["removeFileList"] 
-
-						if self.GridOptions["fileList"]     != "":
-							scriptStr += " --fileList %s "     % self.GridOptions["fileList"]
-						if self.GridOptions["siteName"]     != "":
-							scriptStr += " --site %s "         % self.GridOptions["siteName"]
-						if self.GridOptions["excludedSite"] != "":
-							scriptStr += " --excludedSite %s " % self.GridOptions["excludedSite"]
-						if self.GridOptions["dbRelease"]    == "":
-							scriptStr += " --dbRelease LATEST "
-						else:
-							scriptStr += " --dbRelease %s "    % self.GridOptions["dbRelease"]
-
-
-						#if self.GridOptions["reUseAccumulateLibDS"] :
-						if (0 == self.i) and os.path.isfile("initial_AlignmentConstants.root") : 
-							scriptStr += " --extFile  initial_AlignmentConstants.root,Scaling.root  "    
-						else :
-							lastAlignConstant = "Iter%02d_AlignmentConstants.root"  %  (self.i-1)
-							if os.path.isfile("%s"  % lastAlignConstant ) :  
-								scriptStr += " --extFile %s,Scaling.root  "      % lastAlignConstant    
-						
-						#print (index)
-						#print (self.Datasets.namesList("Customed")[index])
-						#print (self.outDS["Customed"][index])
-						#print (self.Datasets.nFiles("Customed")[index])
-						#print (self.GridOptions["CustomedCPUs"][index])
-
-
-						if self.GridOptions["debugLevel"] == 4 :
-							self.GridOptions["doNFilesPerJobSplit"] = False 
-							scriptStr += " --nEventsPerJob %s" % self.GridOptions["nEventsPerJob"]
-
-						if self.GridOptions["doNFilesPerJobSplit" ] :
-							if (-1 != self.GridOptions["CustomedNFiles"][index]) : 
-								scriptStr += " --inDS %s --outDS %s --extOutFile %s --nFiles %d --nFilesPerJob %d --tmpDir %s  >& tmpSubJobInfo_Customed_%02d.txt ; \n" % ( self.Datasets.namesList("Customed")[index], self.outDS["Customed"][index], extOutFileStrAcc, self.GridOptions["CustomedNFiles"][index], self.GridOptions["CustomedNFPJ"][index], self.GridOptions["TmpWorkDir"], int(index) )
-
-							else : 
-								scriptStr += " --inDS %s --outDS %s --extOutFile %s  --nFilesPerJob %d --tmpDir %s  >& tmpSubJobInfo_Customed_%02d.txt ; \n" % ( self.Datasets.namesList("Customed")[index], self.outDS["Customed"][index], extOutFileStrAcc, self.GridOptions["CustomedNFPJ"][index], self.GridOptions["TmpWorkDir"], int(index) )
-
-						else : 
-							if (-1 != self.GridOptions["CustomedNFiles"][index]) :
-								scriptStr += " --inDS %s --outDS %s --extOutFile %s --nFiles %d --split %d        --tmpDir %s  >& tmpSubJobInfo_Customed_%02d.txt ; \n" % ( self.Datasets.namesList("Customed")[index], self.outDS["Customed"][index], extOutFileStrAcc, self.GridOptions["CustomedNFiles"][index], self.GridOptions["CustomedCPUs"][index], self.GridOptions["TmpWorkDir"], int(index) )
-
-							else :   
-								scriptStr += " --inDS %s --outDS %s --extOutFile %s --nFiles %d --split %d        --tmpDir %s  >& tmpSubJobInfo_Customed_%02d.txt ; \n" % ( self.Datasets.namesList("Customed")[index], self.outDS["Customed"][index], extOutFileStrAcc, self.Datasets.nFiles("Customed")[index], self.GridOptions["CustomedCPUs"][index], self.GridOptions["TmpWorkDir"], int(index) )
-						script.write(scriptStr)
-
-				else : 
-					print (" you don't plan to do detailed tags configuration for every dataset, so will submit only one job with a LONG inDS: ")
-
-					if ( "group" in self.GridOptions["userIDnum"] ) :
-						scriptStr   = "pathena  --official --voms=atlas:/atlas/det-indet/Role=production  %s  " % (self.JOBNAMES["Customed"][0])
-					else :
-						scriptStr   = "pathena  %s  " % (self.JOBNAMES["Customed"][0])
-
-					#if self.GridOptions["accumulateLibDS"]   != "":
-					#	scriptStr += " --libDS %s "        % self.GridOptions["accumulateLibDS"]
-					if self.GridOptions["fileList"]     != "":
-						scriptStr += " --fileList %s "     % self.GridOptions["fileList"]
-					if self.GridOptions["siteName"]     != "":
-						scriptStr += " --site %s "         % self.GridOptions["siteName"]
-					if self.GridOptions["excludedSite"] != "":
-						scriptStr += " --excludedSite %s " % self.GridOptions["excludedSite"]
-					if self.GridOptions["dbRelease"]    == "":
-						scriptStr += " --dbRelease LATEST "
-					else:
-						scriptStr += " --dbRelease %s "    % self.GridOptions["dbRelease"]
-
-
-					#if self.GridOptions["reUseAccumulateLibDS"] :
-					if (0 == self.i) and os.path.isfile("initial_AlignmentConstants.root") :
-						scriptStr += " --extFile  initial_AlignmentConstants.root "      
-					else :
-						lastAlignConstant = "Iter%02d_AlignmentConstants.root"  %  (self.i-1)
-						if os.path.isfile("%s"  % lastAlignConstant ) :
-							scriptStr += " --extFile %s "      % lastAlignConstant
-
-
-					if self.GridOptions["doNFilesPerJobSplit" ] :
-						scriptStr += " --inDS %s --outDS %s --extOutFile %s --nFiles %d --nFilesPerJob %d --tmpDir %s  >& tmpSubJobInfo_Customed.txt ; \n" % ( self.Datasets.namesList("Customed"), self.outDS["Customed"][0], extOutFileStrAcc, self.Datasets.nFiles("Customed"), self.GridOptions["CustomedNFPJ"][0], self.GridOptions["TmpWorkDir"] )
-					else : 
-						scriptStr += " --inDS %s --outDS %s --extOutFile %s --nFiles %d --split %d        --tmpDir %s  >& tmpSubJobInfo_Customed.txt ; \n" % ( self.Datasets.namesList("Customed"), self.outDS["Customed"][0], extOutFileStrAcc, self.Datasets.nFiles("Customed"), self.GridOptions["CustomedCPUs"][0], self.GridOptions["TmpWorkDir"] )
-					script.write(scriptStr)
-
-
-			######################################################################################################################################		
-			if (self.Datasets.containType("900GeV") or self.Datasets.containType("7TeV")) :
-
-				if self.GridOptions["ColCPUs"][self.i]:
-					scriptStr   = "pathena  %s  " % (self.JOBNAMES["Collision"])
-				if self.GridOptions["CosBonCPUs"][self.i]:
-					scriptStr   = "pathena  %s  " % (self.JOBNAMES["CosBon"])
-				if self.GridOptions["CosBoffCPUs"][self.i]:
-					scriptStr   = "pathena  %s  " % (self.JOBNAMES["CosBoff"])
-
-				#if self.GridOptions["accumulateLibDS"]   != "":
-				#	scriptStr += "--libDS %s "        % self.GridOptions["accumulateLibDS"]
-
-				if self.GridOptions["fileList"]     != "":
-					scriptStr += "--fileList %s "     % self.GridOptions["fileList"]
-
-				if self.GridOptions["siteName"]     != "":
-					scriptStr += "--site %s "         % self.GridOptions["siteName"]
-
-				if self.GridOptions["excludedSite"] != "":
-					scriptStr += "--excludedSite %s " % self.GridOptions["excludedSite"]
-			
-				if self.GridOptions["dbRelease"]    == "":
-					scriptStr += "--dbRelease LATEST "
-				else:
-					scriptStr += "--dbRelease %s "    % self.GridOptions["dbRelease"] 
-		
-
-				#if self.GridOptions["reUseAccumulateLibDS"] :
-
-				if (0 == self.i) and os.path.isfile("initial_AlignmentConstants.root") :
-					scriptStr += " --extFile  initial_AlignmentConstants.root "
-				else :                                         
-					lastAlignConstant = "Iter%02d_AlignmentConstants.root"  %  (self.i-1)
-					if os.path.isfile("%s"  % lastAlignConstant ) :    
-						scriptStr += " --extFile %s "      % lastAlignConstant
-
-
-        	                ########################## for submitting different topology jobs ########################################################################
-				if self.GridOptions["ColCPUs"][self.i] :
-					if self.GridOptions["doNFilesPerJobSplit" ] :
-						scriptStr_forCollision = scriptStr + " --inDS %s --outDS %s --extOutFile %s --nFiles %d --nFilesPerJob %d --tmpDir %s  >& tmpSubJobInfo_collision.txt ; \n" % ( self.Datasets.namesList("Collision"), self.outDS["Collision"], extOutFileStrAcc, self.Datasets.nFiles("Collision"), self.GridOptions["ColNFPJ"][self.i], self.GridOptions["TmpWorkDir"] )   
-					else : 
-						scriptStr_forCollision = scriptStr + " --inDS %s --outDS %s --extOutFile %s --nFiles %d --split %d        --tmpDir %s  >& tmpSubJobInfo_collision.txt ; \n" % ( self.Datasets.namesList("Collision"), self.outDS["Collision"], extOutFileStrAcc, self.Datasets.nFiles("Collision"), self.GridOptions["ColCPUs"][self.i], self.GridOptions["TmpWorkDir"] )
-					script.write(scriptStr_forCollision)
-
-				if self.GridOptions["CosBonCPUs"][self.i]:
-					scriptStr_forBon       = scriptStr + " --inDS %s --outDS %s --extOutFile %s --nFiles %d --split %d --tmpDir %s  >& tmpSubJobInfo_Bon.txt ;       \n" % ( self.Datasets.namesList("CosBon"),    self.outDS["CosBon"], extOutFileStrAcc, self.Datasets.nFiles("Bon"),       self.GridOptions["CosBonCPUs"][self.i],   self.GridOptions["TmpWorkDir"] )
-					script.write(scriptStr_forBon)
-
-				if self.GridOptions["CosBoffCPUs"][self.i]:
-					scriptStr_forBoff      = scriptStr + " --inDS %s --outDS %s --extOutFile %s --nFiles %d --split %d --tmpDir %s  >& tmpSubJobInfo_Boff.txt ;      \n" % (self.Datasets.namesList("CosBoff"), self.outDS["CosBoff"],   extOutFileStrAcc, self.Datasets.nFiles("Boff"),      self.GridOptions["CosBoffCPUs"][self.i], self.GridOptions["TmpWorkDir"] )
-					script.write(scriptStr_forBoff)
-			script.close()
-
-
-
-		elif self.part == "Grid" :
-			print (self.SCRIPTNAME)
-			script     = open(self.SCRIPTNAME,'w')
-		
-			extOutFileStrSol = extOutFileStr + ",Iter%02d_AlignmentConstants.root,OldSiAlignment.txt,OutputSiAlignment.txt,mycool.db,alignlogfile.txt" % (self.i)
- 			#  should consider more details about how to get one files and the corresponding geometry/condition tags, below is just temporary solution 
-			'''
-			if self.Datasets.containType("Customed") :
-				scriptStr  = "pathena %s --inDS %s --fileList %s --outDS %s --extOutFile %s --nEventsPerFile 1 --nEventsPerJob 1 --nFilesPerJob 1  " % (self.JOBNAMES["Solve"], self.Datasets.oneDatasetName("Customed"), self.Datasets.oneFileName("Customed"),   self.outDS["Solve"], extOutFileStrSol)
-
-			if (self.Datasets.containType("900GeV") or self.Datasets.containType("7TeV")) :
-				scriptStr  = "pathena %s --inDS %s --fileList %s --outDS %s --extOutFile %s --nEventsPerFile 1 --nEventsPerJob 1 --nFilesPerJob 1  " % (self.JOBNAMES["Solve"], self.Datasets.oneDatasetName("Collision"), self.Datasets.oneFileName("Collision"), self.outDS["Solve"], extOutFileStrSol)
-			'''
+                lines  = out.split("\n")
+                nlines = len(lines)
+                print (" nlines: " , nlines)
+
+                for i in range(0, nlines) :
+                        items = lines[i].split()
+                        print (" items: " , items)
+                        if "failed"   in items :
+                                nfailed   = int(items[-1])
+                        if "finished" in items :
+                                nfinished = int(items[-1])
+                                # Hi, just stop, next line will cause the loop crash ...
+                                break
+
+
+                if (self.retryNo == 0) and (self.i == 0) :
+                        if ( 0 == (nfailed + nfinished - 1) ) :
+                                successRatio = -1
+                        else :
+                                successRatio = float(nfinished - 1)/(nfailed + nfinished - 1)
+                else :
+                        successRatio = float(nfinished)/(nfailed + nfinished)
+
+
+                print ("the success ratio: ", successRatio)
+                if successRatio >= self.GridOptions["successRatioCut"] :
+                        print ("The success ratio is higher than the cut, will not retry ---")
+                        return False
+                else :
+                        print ("The success ratio is lower than the cut, will retry ---")
+                        return True
+
+
+
+
+
+        def retry(self) :
+                jobId = self.jobId
+                retrystring = "pbook -c 'retry(" + str(jobId) + ")'"
+                ret, out    = subprocess.getstatusoutput(retrystring)
+                print (" out1: " , out)
+                #self.jobId =  self.jobId + 2
+
+                ## get the new JobID ##
+                for line in out.split("\n") :
+                        items = line.split()
+                        nitems = len(items)
+                        for i in range(0, nitems) :
+                                if items[i] == "New" :
+                                        jobstring = items[i+1].split("=")
+                                        self.jobId = int(jobstring[-1])
+                                        print ("new JobID: " , self.jobId )
+                                        break
+                self.retryNo = self.retryNo + 1
+
+
+
+                while self.bjobs() != 1 :
+                        print (" waiting for the first retry jobID " , self.jobId , "...")
+                        time.sleep(300)
+
+                if self.whetherRetry() :
+                        jobId = self.jobId
+                        retrystring = "pbook -c 'retry(" + str(jobId) + ")'"
+                        ret, out    = subprocess.getstatusoutput(retrystring)
+                        print (" out2: " , out)
+
+                        #self.jobId = self.jobId + 2
+
+                        ## get the new JobID ##
+                        for line in out.split("\n") :
+                                items = line.split()
+                                nitems = len(items)
+                                for i in range(0, nitems) :
+                                        if items[i] == "New" :
+                                                jobstring = items[i+1].split("=")
+                                                self.jobId = int(jobstring[-1])
+                                                print ("new JobID: " , self.jobId )
+                                                break
+                        self.retryNo = self.retryNo + 1
+
+
+
+                        while self.bjobs() != 1 :
+                                print (" waiting for the second retry jobID " , self.jobId,"...")
+                                time.sleep(300)
+
+
+        
+        def getGridOptions(self, option = "") :
+                return self.GridOptions[option]
+
+        def setGridOptions(self, option1 = "", option2 = "") :
+                self.GridOptions[option1] = option2
+
+
+
+
+
+
+
+
+
+class writeScriptGrid :
+        def __init__(self,
+                     OutputPath,
+                     preName,
+                     iter,
+                     part,
+                     CMTDIR,
+                     ATHENAREL,
+                     TAGS,
+                     SCRIPTNAME,
+                     Datasets,
+                     outDS,
+                     AccSubJobID,
+                     AccSubInfo,
+                     JOBNAMES,
+                     thisJobDir,
+                     jobId,
+                     GridOptions = {},
+                     AlignmentOptions = {}
+                     ):
+                self.OutputPath       = OutputPath
+                self.preName          = preName
+                self.i                = iter
+                self.part             = part
+                self.CMTDIR           = CMTDIR
+                self.ATHENAREL        = ATHENAREL
+                self.TAGS             = TAGS
+                self.SCRIPTNAME       = SCRIPTNAME
+                self.Datasets         =  Datasets
+                self.outDS            = outDS
+                self.AccSubJobID      = AccSubJobID
+                self.AccSubInfo       = AccSubInfo
+                self.JOBNAMES         = JOBNAMES
+                self.thisJobDir       = thisJobDir
+                self.jobId            = -99
+                self.GridOptions      = GridOptions
+                self.AlignmentOptions = AlignmentOptions
+
+
+        def write(self):
+
+
+                extOutFile = []
+
+                extOutFile.append("hitmap.bin")
+                extOutFile.append("matrix.bin")
+                extOutFile.append("vector.bin")
+
+
+                if (    ("sctAlignmentLevel"          in self.AlignmentOptions and self.AlignmentOptions["sctAlignmentLevel"]          == 3 ) or 
+                        ("sctAlignmentLevelBarrel"    in self.AlignmentOptions and self.AlignmentOptions["sctAlignmentLevelBarrel"]    == 3)  or
+                        ("sctAlignmentLevelEndcaps"   in self.AlignmentOptions and self.AlignmentOptions["sctAlignmentLevelEndcaps"]   == 3)  or
+                        ("pixelAlignmentLevel"        in self.AlignmentOptions and self.AlignmentOptions["pixelAlignmentLevel"]        == 3)  or
+                        ("pixelAlignmentLevelBarrel"  in self.AlignmentOptions and self.AlignmentOptions["pixelAlignmentLevelBarrel"]  == 3)  or
+                        ("pixelAlignmentLevelEndcaps" in self.AlignmentOptions and self.AlignmentOptions["pixelAlignmentLevelEndcaps"] == 3) ) :
+                        #extOutFile.append("vector.txt")
+                        print ("hmmn, you plan to do Level 3 alignment ... ")
+
+                        ### put the three flags writeMatrixFileTxt,writeHitmapTxt,writeEigenMatTxt in the GridOptions, not in the AlignmentOptions
+                        if ( "writeMatrixFileTxt" in self.GridOptions  and self.GridOptions["writeMatrixFileTxt"] is True  ) :
+                                extOutFile.append("matrix.txt")
+                                extOutFile.append("vector.txt")
+                        if ( "writeHitmapTxt"     in self.GridOptions  and self.GridOptions["writeHitmapTxt"]     is True  ) :
+                                extOutFile.append("hitmap.txt")
+
+                else :
+                        extOutFile.append("matrix.txt")
+                        extOutFile.append("hitmap.txt")
+                        if ( "writeMatrixFileTxt" in self.GridOptions  and self.GridOptions["writeMatrixFileTxt"] is False  ) :
+                                extOutFile.remove("matrix.txt")
+                        if ( "writeHitmapTxt"     in self.GridOptions  and self.GridOptions["writeHitmapTxt"    ] is False  ) :
+                                extOutFile.remove("hitmap.txt")
+
+
+                extOutFileStr = ",".join(extOutFile)
+
+
+                if self.GridOptions["doMonitoring"] :
+                        extOutFileStrAcc = extOutFileStr + ",monitoring.root"
+                if self.GridOptions["doTrkNtuple"]  :
+                        extOutFileStrAcc += ",TrkValidation.root"
+
+
+                if self.part == "Accumulate":
+                        print (self.SCRIPTNAME)
+                        script = open(self.SCRIPTNAME,'w')
+
+                        if self.Datasets.containType("Customed") :
+                                if self.Datasets.doDetailedTagsConfig() :
+                                        for index in range(len(self.Datasets.namesList("Customed"))) :
+
+                                                if ("group" in self.GridOptions["userIDnum"] ) :
+                                                        scriptStr   = "pathena  --official --voms=atlas:/atlas/det-indet/Role=production %s  " % (self.JOBNAMES["Customed"][index])
+                                                else :
+                                                        scriptStr   = "pathena  %s  " % (self.JOBNAMES["Customed"][index])
+
+                                                if self.GridOptions["accumulateLibDS"]   != "":
+                                                        scriptStr += " --libDS %s "        % self.GridOptions["accumulateLibDS"]
+                                                        scriptStr += " --excludeFile %s "  % "*.cxx,*.h,*.o"
+
+                                                if  "removeFileList" in self.GridOptions :
+                                                        if self.GridOptions["removeFileList"] != "" :
+                                                                scriptStr += " --removeFileList %s " % self.GridOptions["removeFileList"]
+
+                                                if self.GridOptions["fileList"]     != "":
+                                                        scriptStr += " --fileList %s "     % self.GridOptions["fileList"]
+                                                if self.GridOptions["siteName"]     != "":
+                                                        scriptStr += " --site %s "         % self.GridOptions["siteName"]
+                                                if self.GridOptions["excludedSite"] != "":
+                                                        scriptStr += " --excludedSite %s " % self.GridOptions["excludedSite"]
+                                                if self.GridOptions["dbRelease"]    == "":
+                                                        scriptStr += " --dbRelease LATEST "
+                                                else:
+                                                        scriptStr += " --dbRelease %s "    % self.GridOptions["dbRelease"]
+
+
+                                                #if self.GridOptions["reUseAccumulateLibDS"] :
+                                                if (0 == self.i) and os.path.isfile("initial_AlignmentConstants.root") :
+                                                        scriptStr += " --extFile  initial_AlignmentConstants.root,Scaling.root  "
+                                                else :
+                                                        lastAlignConstant = "Iter%02d_AlignmentConstants.root"  %  (self.i-1)
+                                                        if os.path.isfile("%s"  % lastAlignConstant ) :
+                                                                scriptStr += " --extFile %s,Scaling.root  "      % lastAlignConstant
+
+                                                #print (index)
+                                                #print (self.Datasets.namesList("Customed")[index])
+                                                #print (self.outDS["Customed"][index])
+                                                #print (self.Datasets.nFiles("Customed")[index])
+                                                #print (self.GridOptions["CustomedCPUs"][index])
+
+
+                                                if self.GridOptions["debugLevel"] == 4 :
+                                                        self.GridOptions["doNFilesPerJobSplit"] = False
+                                                        scriptStr += " --nEventsPerJob %s" % self.GridOptions["nEventsPerJob"]
+
+                                                if self.GridOptions["doNFilesPerJobSplit" ] :
+                                                        if (-1 != self.GridOptions["CustomedNFiles"][index]) :
+                                                                scriptStr += " --inDS %s --outDS %s --extOutFile %s --nFiles %d --nFilesPerJob %d --tmpDir %s  >& tmpSubJobInfo_Customed_%02d.txt ; \n" % ( self.Datasets.namesList("Customed")[index], self.outDS["Customed"][index], extOutFileStrAcc, self.GridOptions["CustomedNFiles"][index], self.GridOptions["CustomedNFPJ"][index], self.GridOptions["TmpWorkDir"], int(index) )
+
+                                                        else :
+                                                                scriptStr += " --inDS %s --outDS %s --extOutFile %s  --nFilesPerJob %d --tmpDir %s  >& tmpSubJobInfo_Customed_%02d.txt ; \n" % ( self.Datasets.namesList("Customed")[index], self.outDS["Customed"][index], extOutFileStrAcc, self.GridOptions["CustomedNFPJ"][index], self.GridOptions["TmpWorkDir"], int(index) )
+
+                                                else :
+                                                        if (-1 != self.GridOptions["CustomedNFiles"][index]) :
+                                                                scriptStr += " --inDS %s --outDS %s --extOutFile %s --nFiles %d --split %d        --tmpDir %s  >& tmpSubJobInfo_Customed_%02d.txt ; \n" % ( self.Datasets.namesList("Customed")[index], self.outDS["Customed"][index], extOutFileStrAcc, self.GridOptions["CustomedNFiles"][index], self.GridOptions["CustomedCPUs"][index], self.GridOptions["TmpWorkDir"], int(index) )
+
+                                                        else :
+                                                                scriptStr += " --inDS %s --outDS %s --extOutFile %s --nFiles %d --split %d        --tmpDir %s  >& tmpSubJobInfo_Customed_%02d.txt ; \n" % ( self.Datasets.namesList("Customed")[index], self.outDS["Customed"][index], extOutFileStrAcc, self.Datasets.nFiles("Customed")[index], self.GridOptions["CustomedCPUs"][index], self.GridOptions["TmpWorkDir"], int(index) )
+                                                script.write(scriptStr)
+
+                                else :
+                                        print (" you don't plan to do detailed tags configuration for every dataset, so will submit only one job with a LONG inDS: ")
+
+                                        if ( "group" in self.GridOptions["userIDnum"] ) :
+                                                scriptStr   = "pathena  --official --voms=atlas:/atlas/det-indet/Role=production  %s  " % (self.JOBNAMES["Customed"][0])
+                                        else :
+                                                scriptStr   = "pathena  %s  " % (self.JOBNAMES["Customed"][0])
+
+                                        #if self.GridOptions["accumulateLibDS"]   != "":
+                                        #       scriptStr += " --libDS %s "        % self.GridOptions["accumulateLibDS"]
+                                        if self.GridOptions["fileList"]     != "":
+                                                scriptStr += " --fileList %s "     % self.GridOptions["fileList"]
+                                        if self.GridOptions["siteName"]     != "":
+                                                scriptStr += " --site %s "         % self.GridOptions["siteName"]
+                                        if self.GridOptions["excludedSite"] != "":
+                                                scriptStr += " --excludedSite %s " % self.GridOptions["excludedSite"]
+                                        if self.GridOptions["dbRelease"]    == "":
+                                                scriptStr += " --dbRelease LATEST "
+                                        else:
+                                                scriptStr += " --dbRelease %s "    % self.GridOptions["dbRelease"]
+
+
+                                        #if self.GridOptions["reUseAccumulateLibDS"] :
+                                        if (0 == self.i) and os.path.isfile("initial_AlignmentConstants.root") :
+                                                scriptStr += " --extFile  initial_AlignmentConstants.root "
+                                        else :
+                                                lastAlignConstant = "Iter%02d_AlignmentConstants.root"  %  (self.i-1)
+                                                if os.path.isfile("%s"  % lastAlignConstant ) :
+                                                        scriptStr += " --extFile %s "      % lastAlignConstant
+
+
+                                        if self.GridOptions["doNFilesPerJobSplit" ] :
+                                                scriptStr += " --inDS %s --outDS %s --extOutFile %s --nFiles %d --nFilesPerJob %d --tmpDir %s  >& tmpSubJobInfo_Customed.txt ; \n" % ( self.Datasets.namesList("Customed"), self.outDS["Customed"][0], extOutFileStrAcc, self.Datasets.nFiles("Customed"), self.GridOptions["CustomedNFPJ"][0], self.GridOptions["TmpWorkDir"] )
+                                        else :
+                                                scriptStr += " --inDS %s --outDS %s --extOutFile %s --nFiles %d --split %d        --tmpDir %s  >& tmpSubJobInfo_Customed.txt ; \n" % ( self.Datasets.namesList("Customed"), self.outDS["Customed"][0], extOutFileStrAcc, self.Datasets.nFiles("Customed"), self.GridOptions["CustomedCPUs"][0], self.GridOptions["TmpWorkDir"] )
+                                        script.write(scriptStr)
+
+
+                        ######################################################################################################################################
+                        if (self.Datasets.containType("900GeV") or self.Datasets.containType("7TeV")) :
+
+                                if self.GridOptions["ColCPUs"][self.i]:
+                                        scriptStr   = "pathena  %s  " % (self.JOBNAMES["Collision"])
+                                if self.GridOptions["CosBonCPUs"][self.i]:
+                                        scriptStr   = "pathena  %s  " % (self.JOBNAMES["CosBon"])
+                                if self.GridOptions["CosBoffCPUs"][self.i]:
+                                        scriptStr   = "pathena  %s  " % (self.JOBNAMES["CosBoff"])
+
+                                #if self.GridOptions["accumulateLibDS"]   != "":
+                                #       scriptStr += "--libDS %s "        % self.GridOptions["accumulateLibDS"]
+
+                                if self.GridOptions["fileList"]     != "":
+                                        scriptStr += "--fileList %s "     % self.GridOptions["fileList"]
+
+                                if self.GridOptions["siteName"]     != "":
+                                        scriptStr += "--site %s "         % self.GridOptions["siteName"]
+
+                                if self.GridOptions["excludedSite"] != "":
+                                        scriptStr += "--excludedSite %s " % self.GridOptions["excludedSite"]
+
+                                if self.GridOptions["dbRelease"]    == "":
+                                        scriptStr += "--dbRelease LATEST "
+                                else:
+                                        scriptStr += "--dbRelease %s "    % self.GridOptions["dbRelease"]
+
+
+                                #if self.GridOptions["reUseAccumulateLibDS"] :
+
+                                if (0 == self.i) and os.path.isfile("initial_AlignmentConstants.root") :
+                                        scriptStr += " --extFile  initial_AlignmentConstants.root "
+                                else :
+                                        lastAlignConstant = "Iter%02d_AlignmentConstants.root"  %  (self.i-1)
+                                        if os.path.isfile("%s"  % lastAlignConstant ) :
+                                                scriptStr += " --extFile %s "      % lastAlignConstant
+
+
+                                ########################## for submitting different topology jobs ########################################################################
+                                if self.GridOptions["ColCPUs"][self.i] :
+                                        if self.GridOptions["doNFilesPerJobSplit" ] :
+                                                scriptStr_forCollision = scriptStr + " --inDS %s --outDS %s --extOutFile %s --nFiles %d --nFilesPerJob %d --tmpDir %s  >& tmpSubJobInfo_collision.txt ; \n" % ( self.Datasets.namesList("Collision"), self.outDS["Collision"], extOutFileStrAcc, self.Datasets.nFiles("Collision"), self.GridOptions["ColNFPJ"][self.i], self.GridOptions["TmpWorkDir"] )
+                                        else :
+                                                scriptStr_forCollision = scriptStr + " --inDS %s --outDS %s --extOutFile %s --nFiles %d --split %d        --tmpDir %s  >& tmpSubJobInfo_collision.txt ; \n" % ( self.Datasets.namesList("Collision"), self.outDS["Collision"], extOutFileStrAcc, self.Datasets.nFiles("Collision"), self.GridOptions["ColCPUs"][self.i], self.GridOptions["TmpWorkDir"] )
+                                        script.write(scriptStr_forCollision)
+
+                                if self.GridOptions["CosBonCPUs"][self.i]:
+                                        scriptStr_forBon       = scriptStr + " --inDS %s --outDS %s --extOutFile %s --nFiles %d --split %d --tmpDir %s  >& tmpSubJobInfo_Bon.txt ;       \n" % ( self.Datasets.namesList("CosBon"),    self.outDS["CosBon"], extOutFileStrAcc, self.Datasets.nFiles("Bon"),       self.GridOptions["CosBonCPUs"][self.i],   self.GridOptions["TmpWorkDir"] )
+                                        script.write(scriptStr_forBon)
+
+                                if self.GridOptions["CosBoffCPUs"][self.i]:
+                                        scriptStr_forBoff      = scriptStr + " --inDS %s --outDS %s --extOutFile %s --nFiles %d --split %d --tmpDir %s  >& tmpSubJobInfo_Boff.txt ;      \n" % (self.Datasets.namesList("CosBoff"), self.outDS["CosBoff"],   extOutFileStrAcc, self.Datasets.nFiles("Boff"),      self.GridOptions["CosBoffCPUs"][self.i], self.GridOptions["TmpWorkDir"] )
+                                        script.write(scriptStr_forBoff)
+                        script.close()
+
+
+
+                elif self.part == "Grid" :
+                        print (self.SCRIPTNAME)
+                        script     = open(self.SCRIPTNAME,'w')
+
+                        extOutFileStrSol = extOutFileStr + ",Iter%02d_AlignmentConstants.root,OldSiAlignment.txt,OutputSiAlignment.txt,mycool.db,alignlogfile.txt" % (self.i)
+                        #  should consider more details about how to get one files and the corresponding geometry/condition tags, below is just temporary solution
+                        '''
+                        if self.Datasets.containType("Customed") :
+                                scriptStr  = "pathena %s --inDS %s --fileList %s --outDS %s --extOutFile %s --nEventsPerFile 1 --nEventsPerJob 1 --nFilesPerJob 1  " % (self.JOBNAMES["Solve"], self.Datasets.oneDatasetName("Customed"), self.Datasets.oneFileName("Customed"),   self.outDS["Solve"], extOutFileStrSol)
+
+                        if (self.Datasets.containType("900GeV") or self.Datasets.containType("7TeV")) :
+                                scriptStr  = "pathena %s --inDS %s --fileList %s --outDS %s --extOutFile %s --nEventsPerFile 1 --nEventsPerJob 1 --nFilesPerJob 1  " % (self.JOBNAMES["Solve"], self.Datasets.oneDatasetName("Collision"), self.Datasets.oneFileName("Collision"), self.outDS["Solve"], extOutFileStrSol)
+                        '''
  
-			if ( "group" in self.GridOptions["userIDnum"] ) :
-				scriptStr  = "pathena  --official --voms=atlas:/atlas/det-indet/Role=production  %s  --outDS %s --extOutFile %s --nEventsPerFile 1 --nEventsPerJob 1 --nFilesPerJob 1  " % (self.JOBNAMES["Solve"], self.outDS["Solve"], extOutFileStrSol)
-			      
-			else :
-				scriptStr  = "pathena %s  --outDS %s --extOutFile %s --nEventsPerFile 1 --nEventsPerJob 1 --nFilesPerJob 1  " % (self.JOBNAMES["Solve"], self.outDS["Solve"], extOutFileStrSol)
+                        if ( "group" in self.GridOptions["userIDnum"] ) :
+                                scriptStr  = "pathena  --official --voms=atlas:/atlas/det-indet/Role=production  %s  --outDS %s --extOutFile %s --nEventsPerFile 1 --nEventsPerJob 1 --nFilesPerJob 1  " % (self.JOBNAMES["Solve"], self.outDS["Solve"], extOutFileStrSol)
+
+                        else :
+                                scriptStr  = "pathena %s  --outDS %s --extOutFile %s --nEventsPerFile 1 --nEventsPerJob 1 --nFilesPerJob 1  " % (self.JOBNAMES["Solve"], self.outDS["Solve"], extOutFileStrSol)
+
+
+                        if ( True is self.GridOptions["reUseSolveLibDS"] ) and self.GridOptions["solveLibDS"]   != "":
+                                scriptStr += "--libDS %s "        % self.GridOptions["solveLibDS"]
+
+                        if self.GridOptions["siteName"]     != "":
+                                scriptStr += "--site %s "         % self.GridOptions["siteName"]
+
+                        if self.GridOptions["excludedSite"] != "":
+                                scriptStr += "--excludedSite %s " % self.GridOptions["excludedSite"]
+
+                        if self.GridOptions["dbRelease"]    == "":
+                                scriptStr += "--dbRelease LATEST "
+                        else:
+                                scriptStr += "--dbRelease %s "    % self.GridOptions["dbRelease"]
+
 
 
-			if ( True == self.GridOptions["reUseSolveLibDS"] ) and self.GridOptions["solveLibDS"]   != "":
-				scriptStr += "--libDS %s "        % self.GridOptions["solveLibDS"]
+                        if (0 == self.i) and os.path.isfile("initial_AlignmentConstants.root") :
+                                scriptStr += " --extFile  initial_AlignmentConstants.root,"
+                        else :
+                                lastAlignConstant = "Iter%02d_AlignmentConstants.root"  %  (self.i-1)
+                                if os.path.isfile("%s"  % lastAlignConstant ) :
+                                        scriptStr += " --extFile %s,"      % lastAlignConstant
 
-			if self.GridOptions["siteName"]     != "":
-				scriptStr += "--site %s "         % self.GridOptions["siteName"]
+                        scriptStr += "*.bin "
 
-			if self.GridOptions["excludedSite"] != "":
-				scriptStr += "--excludedSite %s " % self.GridOptions["excludedSite"]
+                        if self.GridOptions["reUseSolveLibDS"] :
+                                #ret, out = subprocess.getstatusoutput("cat %s" % GridAccOutDS)
+                                ret, out = subprocess.getstatusoutput("cat GridAccOutDS.txt")
+                                print ("out: ",out)
+                                #lines = out.split('\n')
+                                #extFiles = []
 
-			if self.GridOptions["dbRelease"]    == "":
-				scriptStr += "--dbRelease LATEST "
-			else:
-				scriptStr += "--dbRelease %s "    % self.GridOptions["dbRelease"]
 
+                                #fileName = open('GridAccOutDS.txt', 'r+')
+                                #ret, out = subprocess.getstatusoutput('fileName.read()')
+                                #lines = out.split('\n')
+                                #extFiles = []
 
+                                #for line in lines :
+                                #       print ("line: ", line)
+                                #       thisStr = "%s/\*.bin" % line
+                                #       extFiles.append(thisStr)
+                                #print (" thisStr: " , thisStr)
+                                #extFileStr = ",".join(extFiles)
+                                #print (" extFileStr: " , extFileStr)
+                                #scriptStr += " --extFile %s "     %  extFileStr
 
-			if (0 == self.i) and os.path.isfile("initial_AlignmentConstants.root") :
-				scriptStr += " --extFile  initial_AlignmentConstants.root,"
-			else :
-				lastAlignConstant = "Iter%02d_AlignmentConstants.root"  %  (self.i-1)
-				if os.path.isfile("%s"  % lastAlignConstant ) :
-					scriptStr += " --extFile %s,"      % lastAlignConstant
 
-			scriptStr += "*.bin "
 
-			if self.GridOptions["reUseSolveLibDS"] : 
-				#ret, out = subprocess.getstatusoutput("cat %s" % GridAccOutDS)
-				ret, out = subprocess.getstatusoutput("cat GridAccOutDS.txt")
-				print ("out: ",out)
-				lines = out.split('\n')
-				extFiles = []
-				 
+                        scriptStr += " --tmpDir %s >& tmpSubJobInfo_solve.txt; \n" % self.GridOptions["TmpWorkDir"]
 
-				#fileName = open('GridAccOutDS.txt', 'r+')				
- 				#ret, out = subprocess.getstatusoutput('fileName.read()')
-				#lines = out.split('\n')
-				#extFiles = []
+                        script.write(scriptStr)
+                        script.close()
 
-				#for line in lines :
-				#	print ("line: ", line)
-				#	thisStr = "%s/\*.bin" % line
-				#	extFiles.append(thisStr)
-				#print (" thisStr: " , thisStr)
-				#extFileStr = ",".join(extFiles)
-				#print (" extFileStr: " , extFileStr)
-				#scriptStr += " --extFile %s "     %  extFileStr
+                elif self.part == "Prun" :
+                        print (self.SCRIPTNAME)
+                        script     = open(self.SCRIPTNAME,'w')
 
 
+                        prunSolve = open("prunSolve.py",'w')
+                        prunSolve.write('import os\nimport string\nimport sys\nfrom future import standard_library\nstandard_library.install_aliases()\nimport subprocess\n\n')
+                        prunSolve.write('inputHitmapFiles = []\ninputMatrixFiles = []\ninputVectorFiles = []\ninFiles = []\n\n\n')
+                        prunSolve.write('ret, out = subprocess.getstatusoutput(\"cat input1.txt\")\n')
+                        prunSolve.write('print (\" the content of file input1.txt:  \", out) \n')
+                        prunSolve.write('lines = out.split(\",\")\n')
+                        prunSolve.write('for line in lines:\n')
+                        prunSolve.write('\tif \"hitmap.bin\" in str(line):\n\t\tif \"dcap://\" in str(line) :\n\t\t\tos.system(\" dccp %s ./\" % str(line) )\n\t\t\tlineHitmap = line.split(\'/\')[-1]\n\t\t\tprint (\'file name retrieved after splitting dcap address is %s \' % lineHitmap)\n\t\t\tinputHitmapFiles.append(lineHitmap)\n\t\telse : \n\t\t\tinputHitmapFiles.append(line)\n')
+                        prunSolve.write('\tif \"vector.bin\" in str(line):\n\t\tif \"dcap://\" in str(line) :\n\t\t\tos.system(\" dccp %s ./\" % str(line) )\n\t\t\tlineVector = line.split(\'/\')[-1]\n\t\t\tprint (\'file name retrieved after splitting dcap address is %s \' % lineVector)\n\t\t\tinputVectorFiles.append(lineVector)\n\t\telse : \n\t\t\tinputVectorFiles.append(line)\n')
+                        prunSolve.write('\tif \"matrix.bin\" in str(line):\n\t\tif \"dcap://\" in str(line) :\n\t\t\tos.system(\" dccp %s ./\" % str(line) )\n\t\t\tlineMatrix = line.split(\'/\')[-1]\n\t\t\tprint (\'file name retrieved after splitting dcap address is %s \' % lineMatrix)\n\t\t\tinputMatrixFiles.append(lineMatrix)\n\t\telse : \n\t\t\tinputMatrixFiles.append(line)\n')
 
-			scriptStr += " --tmpDir %s >& tmpSubJobInfo_solve.txt; \n" % self.GridOptions["TmpWorkDir"]
+                        tmpStrJO = "newSolveJO_Iter%02d.py" % self.i
+                        prunSolve.write('jofile = open( \'%s\' , \'w+\')\n' % tmpStrJO )
 
-			script.write(scriptStr)
-			script.close()
 
-		elif self.part == "Prun" :
-			print (self.SCRIPTNAME)
-			script     = open(self.SCRIPTNAME,'w')
+                        tmpStr = ""
+                        if (0 == self.i) :
+                                if os.path.isfile("initial_AlignmentConstants.root") :
+                                        tmpStr = "pool_insertFileToCatalog  initial_AlignmentConstants.root "
+                        else :
+                                alignConstants = "Iter%02d_AlignmentConstants.root" % (self.i - 1)
+                                if os.path.isfile(alignConstants) :
+                                        tmpStr = "pool_insertFileToCatalog  %s "  % alignConstants
+                                else :
+                                        print ("ALIGNMENT CONSTANTS %s NOT EXIST, WILL EXIT ANYHOW !!!  "  % alignConstants)
+                                        sys.exit()
 
 
-			prunSolve = open("prunSolve.py",'w')
-			prunSolve.write('import os\nimport string\nimport sys\nfrom future import standard_library\nstandard_library.install_aliases()\nimport subprocess\n\n')
-			prunSolve.write('inputHitmapFiles = []\ninputMatrixFiles = []\ninputVectorFiles = []\ninFiles = []\n\n\n')
-			prunSolve.write('ret, out = subprocess.getstatusoutput(\"cat input1.txt\")\n')
-			prunSolve.write('print (\" the content of file input1.txt:  \", out) \n')
-			prunSolve.write('lines = out.split(\",\")\n')
-			prunSolve.write('for line in lines:\n')
-			prunSolve.write('\tif \"hitmap.bin\" in str(line):\n\t\tif \"dcap://\" in str(line) :\n\t\t\tos.system(\" dccp %s ./\" % str(line) )\n\t\t\tlineHitmap = line.split(\'/\')[-1]\n\t\t\tprint (\'file name retrieved after splitting dcap address is %s \' % lineHitmap)\n\t\t\tinputHitmapFiles.append(lineHitmap)\n\t\telse : \n\t\t\tinputHitmapFiles.append(line)\n')
-			prunSolve.write('\tif \"vector.bin\" in str(line):\n\t\tif \"dcap://\" in str(line) :\n\t\t\tos.system(\" dccp %s ./\" % str(line) )\n\t\t\tlineVector = line.split(\'/\')[-1]\n\t\t\tprint (\'file name retrieved after splitting dcap address is %s \' % lineVector)\n\t\t\tinputVectorFiles.append(lineVector)\n\t\telse : \n\t\t\tinputVectorFiles.append(line)\n')
-			prunSolve.write('\tif \"matrix.bin\" in str(line):\n\t\tif \"dcap://\" in str(line) :\n\t\t\tos.system(\" dccp %s ./\" % str(line) )\n\t\t\tlineMatrix = line.split(\'/\')[-1]\n\t\t\tprint (\'file name retrieved after splitting dcap address is %s \' % lineMatrix)\n\t\t\tinputMatrixFiles.append(lineMatrix)\n\t\telse : \n\t\t\tinputMatrixFiles.append(line)\n')
+                        prunSolve.write('jofile.write(\'os.system(\\\"%s\\\")\\n\')\n'  % tmpStr)
+                        prunSolve.write('jofile.write(\"inputHitmapFiles = \" + str(inputHitmapFiles) + \'\\n\')\n')
+                        prunSolve.write('jofile.write(\"inputVectorFiles = \" + str(inputVectorFiles) + \'\\n\')\n')
+                        prunSolve.write('jofile.write(\"inputMatrixFiles = \" + str(inputMatrixFiles) + \'\\n\')\n')
 
-			tmpStrJO = "newSolveJO_Iter%02d.py" % self.i
-			prunSolve.write('jofile = open( \'%s\' , \'w+\')\n' % tmpStrJO )
+                        prunSolve.write('ff = open(\'%s\', \'r\')\n' % self.JOBNAMES["Solve"] )
+                        prunSolve.write('jofile.write(ff.read())\nff.close()\njofile.close()\n')
 
+                        prunSolve.write('os.system(\"athena.py %s \")\n'  % tmpStrJO )
 
-			tmpStr = ""
-			if (0 == self.i) : 
-				if os.path.isfile("initial_AlignmentConstants.root") :
-					tmpStr = "pool_insertFileToCatalog  initial_AlignmentConstants.root "
-			else :
-				alignConstants = "Iter%02d_AlignmentConstants.root" % (self.i - 1)
-				if os.path.isfile(alignConstants) :
-					tmpStr = "pool_insertFileToCatalog  %s "  % alignConstants
-				else : 
-					print ("ALIGNMENT CONSTANTS %s NOT EXIST, WILL EXIT ANYHOW !!!  "  % alignConstants)
-					sys.exit()
+                        # debugging ...
+                        print ("prunSolve.py: ")
+                        os.system(" cat prunSolve.py ")
+                        print (" newSolveJO_Iter%02d.py: " % self.i)
+                        os.system(" cat %s " % tmpStrJO)
 
 
-			prunSolve.write('jofile.write(\'os.system(\\\"%s\\\")\\n\')\n'  % tmpStr)
-			prunSolve.write('jofile.write(\"inputHitmapFiles = \" + str(inputHitmapFiles) + \'\\n\')\n')
-			prunSolve.write('jofile.write(\"inputVectorFiles = \" + str(inputVectorFiles) + \'\\n\')\n')
-			prunSolve.write('jofile.write(\"inputMatrixFiles = \" + str(inputMatrixFiles) + \'\\n\')\n')
+                        #extOutFileStrSol = "prunSolve.py,newSolveJO_Iter%02d.py,"  % (self.i)
+                        extOutFileStrSol = ""
 
-			prunSolve.write('ff = open(\'%s\', \'r\')\n' % self.JOBNAMES["Solve"] )                               
-			prunSolve.write('jofile.write(ff.read())\nff.close()\njofile.close()\n')
+                        if (    ("sctAlignmentLevel"          in self.AlignmentOptions and self.AlignmentOptions["sctAlignmentLevel"]          == 3 ) or
+                                ("sctAlignmentLevelBarrel"    in self.AlignmentOptions and self.AlignmentOptions["sctAlignmentLevelBarrel"]    == 3)  or
+                                ("sctAlignmentLevelEndcaps"   in self.AlignmentOptions and self.AlignmentOptions["sctAlignmentLevelEndcaps"]   == 3)  or
+                                ("pixelAlignmentLevel"        in self.AlignmentOptions and self.AlignmentOptions["pixelAlignmentLevel"]        == 3)  or
+                                ("pixelAlignmentLevelBarrel"  in self.AlignmentOptions and self.AlignmentOptions["pixelAlignmentLevelBarrel"]  == 3)  or
+                                ("pixelAlignmentLevelEndcaps" in self.AlignmentOptions and self.AlignmentOptions["pixelAlignmentLevelEndcaps"] == 3) ) :
+                                print ("hmn, you are going to run L3 alignment, Eigen is going to be used, so no eigen value information!!! ")
 
-			prunSolve.write('os.system(\"athena.py %s \")\n'  % tmpStrJO )
+                        else :
+                                if ( "writeEigenMat"      in self.GridOptions  and self.GridOptions["writeEigenMat"]    is True  ) and ( self.AlignmentOptions["runLocal"] is False ):
+                                        extOutFileStrSol += "eigenvectors.bin,"
+                                        extOutFileStrSol += "eigenvalues.bin,"
 
-			# debugging ...
-			print ("prunSolve.py: ")
-			os.system(" cat prunSolve.py ")
-			print (" newSolveJO_Iter%02d.py: " % self.i)
-			os.system(" cat %s " % tmpStrJO)
+                                if ( "writeEigenMatTxt"   in self.GridOptions  and self.GridOptions["writeEigenMatTxt"] is True  ) and ( self.AlignmentOptions["runLocal"] is False ):
+                                        extOutFileStrSol += "eigenvectors.txt,"
+                                        extOutFileStrSol += "eigenvalues.txt,"
 
+                        extOutFileStrSol = extOutFileStrSol + extOutFileStr + ",Iter%02d_AlignmentConstants.root,OldSiAlignment.txt,OutputSiAlignment.txt,mycool.db,alignlogfile.txt" % (self.i)
 
-			#extOutFileStrSol = "prunSolve.py,newSolveJO_Iter%02d.py,"  % (self.i)	
-			extOutFileStrSol = ""
+                        extFileStr = ""
+                        if (0 == self.i) and os.path.isfile("initial_AlignmentConstants.root") :
+                                extFileStr = " initial_AlignmentConstants.root "
+                        else :
+                                lastAlignConstant = "Iter%02d_AlignmentConstants.root"  %  (self.i-1)
 
-			if (    ("sctAlignmentLevel"          in self.AlignmentOptions and self.AlignmentOptions["sctAlignmentLevel"]          == 3 ) or 
-				("sctAlignmentLevelBarrel"    in self.AlignmentOptions and self.AlignmentOptions["sctAlignmentLevelBarrel"]    == 3)  or 
-				("sctAlignmentLevelEndcaps"   in self.AlignmentOptions and self.AlignmentOptions["sctAlignmentLevelEndcaps"]   == 3)  or 
-				("pixelAlignmentLevel"        in self.AlignmentOptions and self.AlignmentOptions["pixelAlignmentLevel"]        == 3)  or 
-				("pixelAlignmentLevelBarrel"  in self.AlignmentOptions and self.AlignmentOptions["pixelAlignmentLevelBarrel"]  == 3)  or 
-				("pixelAlignmentLevelEndcaps" in self.AlignmentOptions and self.AlignmentOptions["pixelAlignmentLevelEndcaps"] == 3) ) : 
-				print ("hmn, you are going to run L3 alignment, Eigen is going to be used, so no eigen value information!!! ")
+                                if os.path.isfile("%s"  % lastAlignConstant ) :
+                                        extFileStr = lastAlignConstant
 
-			else : 
-				if ( "writeEigenMat"      in self.GridOptions  and self.GridOptions["writeEigenMat"]    == True  ) and ( self.AlignmentOptions["runLocal"] == False ):
-					extOutFileStrSol += "eigenvectors.bin,";
-					extOutFileStrSol += "eigenvalues.bin," ;
 
-				if ( "writeEigenMatTxt"   in self.GridOptions  and self.GridOptions["writeEigenMatTxt"] == True  ) and ( self.AlignmentOptions["runLocal"] == False ):
-					extOutFileStrSol += "eigenvectors.txt,";
-					extOutFileStrSol += "eigenvalues.txt," ;
 
-			extOutFileStrSol = extOutFileStrSol + extOutFileStr + ",Iter%02d_AlignmentConstants.root,OldSiAlignment.txt,OutputSiAlignment.txt,mycool.db,alignlogfile.txt" % (self.i)
+                        TNFiles = 0
+                        tmpInDS = []
+                        print (" self.outDS : " , self.outDS)
 
-			extFileStr = ""
-			if (0 == self.i) and os.path.isfile("initial_AlignmentConstants.root") :
-				extFileStr = " initial_AlignmentConstants.root "
-			else :
-				lastAlignConstant = "Iter%02d_AlignmentConstants.root"  %  (self.i-1)
+                        print (" self.outDS[Customed]: " , self.outDS["Customed"])
+                        for item in self.outDS["Customed"] :
+                                print ("self.outDS[Customed] item:  " , item)
+                                tmpInDS.append(item+"/")
+                                rec, nFiles = subprocess.getstatusoutput("dq2-ls -f %s | grep -i files | grep -i total | cut -c 13-" %  (item+"/") )
+                                print (" nFiles of this outDS[Customed] :" , nFiles)
+                                TNFiles += int(nFiles)
 
-				if os.path.isfile("%s"  % lastAlignConstant ) :
-					extFileStr = lastAlignConstant
+                        solveInDS = ",".join(tmpInDS)
+                        #scriptStr = " prun --exec \" python prunSolve.py \" --writeInputToTxt IN:input1.txt --athenaTag 15.8.0,AtlasProduction --inDS %s --outputs %s --outDS %s --extFile %s --nJobs 1 --nFilesPerJob %d -v --maxNFilesPerJob 100000 " % ( (self.outDS["Customed"][0]+"/") , extOutFileStrSol, self.outDS["Solve"], extFileStr,  int(float(nFiles)) )
 
+                        if ( "group" in self.GridOptions["userIDnum"] ) :
+                                scriptStr = " prun --exec \" python prunSolve.py \" --writeInputToTxt IN:input1.txt --official --voms=atlas:/atlas/det-indet/Role=production --express --athenaTag %s,AtlasProduction --inDS %s --outputs %s --outDS %s --extFile %s --nJobs 1 --nFilesPerJob %d -v --maxNFilesPerJob 100000 " % (self.ATHENAREL, solveInDS , extOutFileStrSol, self.outDS["Solve"], extFileStr,  int(TNFiles) )
 
+                        else :
+                                scriptStr = " prun --exec \" python prunSolve.py \" --writeInputToTxt IN:input1.txt --express --athenaTag %s,AtlasProduction --inDS %s --outputs %s --outDS %s --extFile %s --nJobs 1 --nFilesPerJob %d -v --maxNFilesPerJob 100000 " % (self.ATHENAREL, solveInDS , extOutFileStrSol, self.outDS["Solve"], extFileStr,  int(TNFiles) )
 
-			TNFiles = 0
-			tmpInDS = []
-			print (" self.outDS : " , self.outDS)
+                        if  (True is self.GridOptions["reUseSolveLibDS"]) and  self.GridOptions["solveLibDS"]   != "":
+                                scriptStr += " --libDS %s "        % self.GridOptions["solveLibDS"]
 
-			print (" self.outDS[Customed]: " , self.outDS["Customed"])
-			for item in self.outDS["Customed"] : 
-				print ("self.outDS[Customed] item:  " , item)
-				tmpInDS.append(item+"/")
-				rec, nFiles = subprocess.getstatusoutput("dq2-ls -f %s | grep -i files | grep -i total | cut -c 13-" %  (item+"/") )
-				print (" nFiles of this outDS[Customed] :" , nFiles)
-				TNFiles += int(nFiles)
+                        scriptStr += " --tmpDir %s >& tmpSubJobInfo_prunSolve.txt; \n" % self.GridOptions["TmpWorkDir"]
 
-			solveInDS = ",".join(tmpInDS) 
-			#scriptStr = " prun --exec \" python prunSolve.py \" --writeInputToTxt IN:input1.txt --athenaTag 15.8.0,AtlasProduction --inDS %s --outputs %s --outDS %s --extFile %s --nJobs 1 --nFilesPerJob %d -v --maxNFilesPerJob 100000 " % ( (self.outDS["Customed"][0]+"/") , extOutFileStrSol, self.outDS["Solve"], extFileStr,  int(float(nFiles)) )
+                        script.write(scriptStr)
+                        script.close()
 
-			if ( "group" in self.GridOptions["userIDnum"] ) :
-				scriptStr = " prun --exec \" python prunSolve.py \" --writeInputToTxt IN:input1.txt --official --voms=atlas:/atlas/det-indet/Role=production --express --athenaTag %s,AtlasProduction --inDS %s --outputs %s --outDS %s --extFile %s --nJobs 1 --nFilesPerJob %d -v --maxNFilesPerJob 100000 " % (self.ATHENAREL, solveInDS , extOutFileStrSol, self.outDS["Solve"], extFileStr,  int(TNFiles) )
 
-			else : 
-				scriptStr = " prun --exec \" python prunSolve.py \" --writeInputToTxt IN:input1.txt --express --athenaTag %s,AtlasProduction --inDS %s --outputs %s --outDS %s --extFile %s --nJobs 1 --nFilesPerJob %d -v --maxNFilesPerJob 100000 " % (self.ATHENAREL, solveInDS , extOutFileStrSol, self.outDS["Solve"], extFileStr,  int(TNFiles) )
+                else:
+                   print ("Hi, except accumulate and solve, where do you want to go?")
 
-			if  (True == self.GridOptions["reUseSolveLibDS"]) and  self.GridOptions["solveLibDS"]   != "":
-				scriptStr += " --libDS %s "        % self.GridOptions["solveLibDS"]
 
-			scriptStr += " --tmpDir %s >& tmpSubJobInfo_prunSolve.txt; \n" % self.GridOptions["TmpWorkDir"]
+        def send(self):
+                os.system("pwd")
+                os.system("chmod 777 %s" % self.SCRIPTNAME)
 
-			script.write(scriptStr)
-			script.close()
+                print ("----------------------------------------------")
+                if self.part == "Accumulate":
+                        print ("Sending %s_Iter%02d accumulation job to grid at site ... %s" % (self.preName, self.i, self.GridOptions["siteName"]))
 
+                        if self.Datasets.containType("Customed") :
+                                for i in range(len(self.Datasets.namesList("Customed"))) :
+                                        os.system(" rm -rf tmpSubJobInfo_Customed_%02d.txt" % i )
 
-		else:
-		   print ("Hi, except accumulate and solve, where do you want to go?")
-			
-			
-	def send(self):
-		os.system("pwd")
-		os.system("chmod 777 %s" % self.SCRIPTNAME)
+                        os.system(" rm -rf  tmpSubJobInfo_collision.txt tmpSubJobInfo_Bon.txt tmpSubJobInfo_Boff.txt ")
+                        os.system("source %s" % self.SCRIPTNAME)
 
-		print ("----------------------------------------------")
-		if self.part == "Accumulate":		
-			print ("Sending %s_Iter%02d accumulation job to grid at site ... %s" % (self.preName, self.i, self.GridOptions["siteName"]))
+                        if self.Datasets.containType("Customed") :
+                                if self.Datasets.doDetailedTagsConfig() :
+                                        for i in range(len(self.Datasets.namesList("Customed"))) :
+                                                os.system("cat tmpSubJobInfo_Customed_%02d.txt                 >> %s\n" % (i, self.AccSubInfo))
+                                                os.system("cat tmpSubJobInfo_Customed_%02d.txt | grep -i JobID >> %s\n" % (i, self.AccSubJobID))
+                                else :
+                                        print (" you don't plan to do detailed tags configuration for every dataset, so will submit only one job with long inDS: ")
 
-			if self.Datasets.containType("Customed") :
-				for i in range(len(self.Datasets.namesList("Customed"))) :   
-					os.system(" rm -rf tmpSubJobInfo_Customed_%02d.txt" % i )
 
-			os.system(" rm -rf  tmpSubJobInfo_collision.txt tmpSubJobInfo_Bon.txt tmpSubJobInfo_Boff.txt ")
-			os.system("source %s" % self.SCRIPTNAME)
+                        if (self.Datasets.containType("900GeV") or self.Datasets.containType("7TeV")) :
+                                if self.GridOptions["ColCPUs"][self.i]:
+                                        os.system("cat tmpSubJobInfo_collision.txt                 >> %s\n" % (self.AccSubInfo))
+                                        os.system("cat tmpSubJobInfo_collision.txt | grep -i JobID >> %s\n" % (self.AccSubJobID))
 
-			if self.Datasets.containType("Customed") :
-				if self.Datasets.doDetailedTagsConfig() :
-					for i in range(len(self.Datasets.namesList("Customed"))) :
-						os.system("cat tmpSubJobInfo_Customed_%02d.txt                 >> %s\n" % (i, self.AccSubInfo))
-						os.system("cat tmpSubJobInfo_Customed_%02d.txt | grep -i JobID >> %s\n" % (i, self.AccSubJobID))
-				else : 
-					print (" you don't plan to do detailed tags configuration for every dataset, so will submit only one job with long inDS: ")
-					
+                                if self.GridOptions["CosBonCPUs"][self.i]:
+                                        os.system("cat tmpSubJobInfo_Bon.txt                       >> %s\n" % (self.AccSubInfo))
+                                        os.system("cat tmpSubJobInfo_Bon.txt       | grep -i JobID >> %s\n" % (self.AccSubJobID))
 
-			if (self.Datasets.containType("900GeV") or self.Datasets.containType("7TeV")) :
-				if self.GridOptions["ColCPUs"][self.i]:
-					os.system("cat tmpSubJobInfo_collision.txt                 >> %s\n" % (self.AccSubInfo))
-					os.system("cat tmpSubJobInfo_collision.txt | grep -i JobID >> %s\n" % (self.AccSubJobID))
+                                if self.GridOptions["CosBoffCPUs"][self.i]:
+                                        os.system("cat tmpSubJobInfo_Boff.txt                      >> %s\n" % (self.AccSubInfo))
+                                        os.system("cat tmpSubJobInfo_Boff.txt      | grep -i JobID >> %s\n" % (self.AccSubJobID))
 
-				if self.GridOptions["CosBonCPUs"][self.i]:
-					os.system("cat tmpSubJobInfo_Bon.txt                       >> %s\n" % (self.AccSubInfo))
-					os.system("cat tmpSubJobInfo_Bon.txt       | grep -i JobID >> %s\n" % (self.AccSubJobID))
 
-				if self.GridOptions["CosBoffCPUs"][self.i]:
-					os.system("cat tmpSubJobInfo_Boff.txt                      >> %s\n" % (self.AccSubInfo))
-					os.system("cat tmpSubJobInfo_Boff.txt      | grep -i JobID >> %s\n" % (self.AccSubJobID))
+                elif self.part == "Grid" :
 
+                        print ("  Sending %s_Iter%02d matrix solving job to grid site %s" % (self.preName, self.i, self.GridOptions["siteName"]))
+                        os.system("rm -f tmpSubJobInfo_solve.txt")
+                        os.system("source %s" % self.SCRIPTNAME)
+                        os.system("cat tmpSubJobInfo_solve.txt                 >> %s\n" % (self.AccSubInfo))
+                        os.system("cat tmpSubJobInfo_solve.txt | grep -i JobID >> %s\n" % (self.AccSubJobID))
 
-		elif self.part == "Grid" :
+                        print ("----------------------------------------------")
 
-			print ("  Sending %s_Iter%02d matrix solving job to grid site %s" % (self.preName, self.i, self.GridOptions["siteName"]))
-			os.system("rm -f tmpSubJobInfo_solve.txt")
-			os.system("source %s" % self.SCRIPTNAME)
-			os.system("cat tmpSubJobInfo_solve.txt                 >> %s\n" % (self.AccSubInfo))
-			os.system("cat tmpSubJobInfo_solve.txt | grep -i JobID >> %s\n" % (self.AccSubJobID))
+                elif self.part == "Prun" :
 
-			print ("----------------------------------------------")
+                        print ("  Sending %s_Iter%02d prun matrix solving job to grid site %s" % (self.preName, self.i, self.GridOptions["siteName"]))
+                        os.system("rm -f tmpSubJobInfo_prunSolve.txt")
+                        os.system("source %s" % self.SCRIPTNAME)
+                        os.system("cat tmpSubJobInfo_prunSolve.txt                 >> %s\n" % (self.AccSubInfo))
+                        os.system("cat tmpSubJobInfo_prunSolve.txt | grep -i JobID >> %s\n" % (self.AccSubJobID))
 
-		elif self.part == "Prun" :
+                        print ("----------------------------------------------")
 
-			print ("  Sending %s_Iter%02d prun matrix solving job to grid site %s" % (self.preName, self.i, self.GridOptions["siteName"]))
-			os.system("rm -f tmpSubJobInfo_prunSolve.txt")
-			os.system("source %s" % self.SCRIPTNAME)
-			os.system("cat tmpSubJobInfo_prunSolve.txt                 >> %s\n" % (self.AccSubInfo))
-			os.system("cat tmpSubJobInfo_prunSolve.txt | grep -i JobID >> %s\n" % (self.AccSubJobID))
 
-			print ("----------------------------------------------")
+                else:
+                        print ("Hi, where do you want to go?")
 
 
-		else:
-			print ("Hi, where do you want to go?")
 
 
+        # take this part from Muon alignment in Jobs.py
+        def wait(self,logfilename):
+                print ("Pathena wait()" )
 
+                if self.jobId == -99:
+                        print ("logiflename: ",logfilename)
+                        ret, out = subprocess.getstatusoutput("cat "+logfilename)
+                        lines = out.split('\n')
 
-	# take this part from Muon alignment in Jobs.py
-	def wait(self,logfilename):
-		print ("Pathena wait()" )
-
-		if self.jobId == -99:
-			print ("logiflename: ",logfilename)
-			ret, out = subprocess.getstatusoutput("cat "+logfilename)
-			lines = out.split('\n')
-			
-			# looping over all the job IDs
-			for line in lines:
-				items = line.split()
-				if len(items)>0 and items[0]=="JobID" :
-					self.jobId = int(items[2])
-					print ("jobId = ",self.jobId)
-
-				# check status of each job ID
-				# while self.bjobs() == 0:
-				while self.bjobs() != 1:	
-					print (" waiting for jobID ",self.jobId,"...")
-					time.sleep(300)
-
-
-
-	def bjobs(self) :
-		if self.jobId == -99:
-			print ("need jobId")
-			sys.exit(3)
-
-		print ("Pathena bjobs(), jobId: ",self.jobId)
-		jobId = self.jobId
-		bjobstring = "pbook -c 'show(" + str(jobId) + ")'"
-		print (bjobstring)
-		ret, out = subprocess.getstatusoutput(bjobstring)
-		print ("statusoutput: ",out)
-		for line in out.split("\n") :
-			items_1 = line.split()
-			if len(items_1)>0 and items_1[0] != "jobStatus" :
-				continue
-
-			if len(items_1)>2 :
-				if items_1[2] == "frozen" :
-					print ("jobStatus: " , items_1[2])
-
-					### search the libDS #######	
-					for line2 in out.split("\n") : 
-						print (" line2: " , line2 )
-						items_2 = line2.split()
-						if items_2[0] == "libDS" :
-							break 
-
-					if self.part == "Accumulate" and self.GridOptions["accumulateLibDS"] == "" : 	
-						self.GridOptions["accumulateLibDS"] = items_2[2] 
-						print (" self.GridOptions accumulateLibDS: " , self.GridOptions["accumulateLibDS"])
-
-					if (self.part == "Grid" or self.part == "Prun") and self.GridOptions["solveLibDS"] == "":
-						self.GridOptions["solveLibDS"     ] = items_2[2]				
-						print (" self.GridOptions solveLibDS: "      , self.GridOptions["solveLibDS"])
-
-					
-					return 1
-				else :
-					print ("jobStatus: ",items_1[2])
-					return 0
-
+                        # looping over all the job IDs
+                        for line in lines:
+                                items = line.split()
+                                if len(items)>0 and items[0]=="JobID" :
+                                        self.jobId = int(items[2])
+                                        print ("jobId = ",self.jobId)
 
+                                # check status of each job ID
+                                # while self.bjobs() == 0:
+                                while self.bjobs() != 1:
+                                        print (" waiting for jobID ",self.jobId,"...")
+                                        time.sleep(300)
 
 
-	def whetherRetry(self) :	  	
-		nfailed   = 0
-		nfinished = 0
-		if self.jobId == -99 :
-			print ("need jobId")
-			sys.exit(3)
 
-		print ("Pathena bjobs(), jobId: ",self.jobId)
-		jobId = self.jobId
-		bjobstring = "pbook -c 'show(" + str(jobId) + ")'"
-		print (bjobstring)
-		ret, out = subprocess.getstatusoutput(bjobstring)
-		
-		lines  = out.split("\n")
-		nlines = len(lines)
-		print (" nlines: " , nlines)
+        def bjobs(self) :
+                if self.jobId == -99:
+                        print ("need jobId")
+                        sys.exit(3)
 
-		for i in range(0, nlines) : 
-			items = lines[i].split()
-			print (" items: " , items)
-			if "failed"   in items :
-				nfailed   = int(items[-1])
-			if "finished" in items :
-				nfinished = int(items[-1])
-				# Hi, just stop, next line will cause the loop crash ...
-				break
+                print ("Pathena bjobs(), jobId: ",self.jobId)
+                jobId = self.jobId
+                bjobstring = "pbook -c 'show(" + str(jobId) + ")'"
+                print (bjobstring)
+                ret, out = subprocess.getstatusoutput(bjobstring)
+                print ("statusoutput: ",out)
+                for line in out.split("\n") :
+                        items_1 = line.split()
+                        if len(items_1)>0 and items_1[0] != "jobStatus" :
+                                continue
 
-		if (self.retryNo == 0) and (self.i == 0) : 
-			if ( 0 == (nfailed + nfinished - 1) ) : 
-				successRatio = -1 
-			else : 
-				successRatio = float(nfinished - 1)/(nfailed + nfinished - 1) 
-		else : 
-			successRatio = float(nfinished)/(nfailed + nfinished) 
+                        if len(items_1)>2 :
+                                if items_1[2] == "frozen" :
+                                        print ("jobStatus: " , items_1[2])
 
+                                        ### search the libDS #######
+                                        for line2 in out.split("\n") :
+                                                print (" line2: " , line2 )
+                                                items_2 = line2.split()
+                                                if items_2[0] == "libDS" :
+                                                        break
 
+                                        if self.part == "Accumulate" and self.GridOptions["accumulateLibDS"] == "" :
+                                                self.GridOptions["accumulateLibDS"] = items_2[2]
+                                                print (" self.GridOptions accumulateLibDS: " , self.GridOptions["accumulateLibDS"])
 
-		print ("the success ratio: ", successRatio)
-		if successRatio >= self.GridOptions["successRatioCut"] :
-			print ("The success ratio is higher than the cut, will not retry ---")
-			return False
-		else :
-			print ("The success ratio is lower than the cut, will retry ---")
-			return True
+                                        if (self.part == "Grid" or self.part == "Prun") and self.GridOptions["solveLibDS"] == "":
+                                                self.GridOptions["solveLibDS"     ] = items_2[2]
+                                                print (" self.GridOptions solveLibDS: "      , self.GridOptions["solveLibDS"])
 
 
+                                        return 1
+                                else :
+                                        print ("jobStatus: ",items_1[2])
+                                        return 0
 
-	def retry(self) :
-		jobId = self.jobId
-		retrystring = "pbook -c 'retry(" + str(jobId) + ")'"	
-		ret, out    = subprocess.getstatusoutput(retrystring)
-		print (" out1: " , out )
-		#self.jobId =  self.jobId + 2
 
-		## get the new JobID ## 
-		for line in out.split("\n") : 
-			items = line.split() 
-			nitems = len(items) 
-			for i in range(0, nitems) : 
-				if items[i] == "New" : 
-					jobstring = items[i+1].split("=") 
-					self.jobId = int(jobstring[-1]) 
-					print ("new JobID: " , self.jobId )
-					break 
-		self.retryNo = self.retryNo + 1
 
 
+        def whetherRetry(self) :
+                nfailed   = 0
+                nfinished = 0
+                if self.jobId == -99 :
+                        print ("need jobId")
+                        sys.exit(3)
 
-		while self.bjobs() != 1 :
-			print (" waiting for the first retry jobID " , self.jobId , "...")
-			time.sleep(300)
+                print ("Pathena bjobs(), jobId: ",self.jobId)
+                jobId = self.jobId
+                bjobstring = "pbook -c 'show(" + str(jobId) + ")'"
+                print (bjobstring)
+                ret, out = subprocess.getstatusoutput(bjobstring)
 
-		if self.whetherRetry() :
-			jobId = self.jobId
-			retrystring = "pbook -c 'retry(" + str(jobId) + ")'"
-			ret, out    = subprocess.getstatusoutput(retrystring)
-			print (" out2: " , out)
+                lines  = out.split("\n")
+                nlines = len(lines)
+                print (" nlines: " , nlines)
 
-			#self.jobId =self.jobId + 2
+                for i in range(0, nlines) :
+                        items = lines[i].split()
+                        print (" items: " , items)
+                        if "failed"   in items :
+                                nfailed   = int(items[-1])
+                        if "finished" in items :
+                                nfinished = int(items[-1])
+                                # Hi, just stop, next line will cause the loop crash ...
+                                break
 
+                if (self.retryNo == 0) and (self.i == 0) :
+                        if ( 0 == (nfailed + nfinished - 1) ) :
+                                successRatio = -1
+                        else :
+                                successRatio = float(nfinished - 1)/(nfailed + nfinished - 1)
+                else :
+                        successRatio = float(nfinished)/(nfailed + nfinished)
 
-			## get the new JobID ## 
-			for line in out.split("\n") : 
-				items = line.split() 
-				nitems = len(items) 
-				for i in range(0, nitems) : 
-					if items[i] == "New" : 
-						jobstring = items[i+1].split("=") 
-						self.jobId = int(jobstring[-1]) 
-						print ("new JobID: " , self.jobId )
-						break 
-			self.retryNo = self.retryNo + 1 
 
 
-			while self.bjobs() != 1 :
-				print (" waiting for the second retry jobID " , self.jobId,"...")
-				time.sleep(300)
+                print ("the success ratio: ", successRatio)
+                if successRatio >= self.GridOptions["successRatioCut"] :
+                        print ("The success ratio is higher than the cut, will not retry ---")
+                        return False
+                else :
+                        print ("The success ratio is lower than the cut, will retry ---")
+                        return True
 
 
 
+        def retry(self) :
+                jobId = self.jobId
+                retrystring = "pbook -c 'retry(" + str(jobId) + ")'"
+                ret, out    = subprocess.getstatusoutput(retrystring)
+                print (" out1: " , out )
+                #self.jobId =  self.jobId + 2
 
+                ## get the new JobID ##
+                for line in out.split("\n") :
+                        items = line.split()
+                        nitems = len(items)
+                        for i in range(0, nitems) :
+                                if items[i] == "New" :
+                                        jobstring = items[i+1].split("=")
+                                        self.jobId = int(jobstring[-1])
+                                        print ("new JobID: " , self.jobId )
+                                        break
+                self.retryNo = self.retryNo + 1
 
-	def getGridOptions(self, option = "") : 
-		return self.GridOptions[option]
 
-	def setGridOptions(self, option1 = "", option2 = "") : 
-		self.GridOptions[option1] = option2
+
+                while self.bjobs() != 1 :
+                        print (" waiting for the first retry jobID " , self.jobId , "...")
+                        time.sleep(300)
+
+                if self.whetherRetry() :
+                        jobId = self.jobId
+                        retrystring = "pbook -c 'retry(" + str(jobId) + ")'"
+                        ret, out    = subprocess.getstatusoutput(retrystring)
+                        print (" out2: " , out)
+
+                        #self.jobId =self.jobId + 2
+
+
+                        ## get the new JobID ##
+                        for line in out.split("\n") :
+                                items = line.split()
+                                nitems = len(items)
+                                for i in range(0, nitems) :
+                                        if items[i] == "New" :
+                                                jobstring = items[i+1].split("=")
+                                                self.jobId = int(jobstring[-1])
+                                                print ("new JobID: " , self.jobId )
+                                                break
+                        self.retryNo = self.retryNo + 1
+
+
+                        while self.bjobs() != 1 :
+                                print (" waiting for the second retry jobID " , self.jobId,"...")
+                                time.sleep(300)
+
+
+
+
+
+        def getGridOptions(self, option = "") :
+                return self.GridOptions[option]
+
+        def setGridOptions(self, option1 = "", option2 = "") :
+                self.GridOptions[option1] = option2
 
 
 
 
 
 class writeScriptAFS : 
-	def __init__(self,
-		     iter,
-		     JOBNAME,
-		     SCRIPTNAME,
-		     preName,
-		     QUEUE          = "8nh",
-		     CMTDIR         = "",
-		     ATHENAREL      = "",
-		     TAGS           = "",
-		     inputPoolFiles = ""
-		     ):
-
-		self.i              = iter
-		self.JOBNAME        = JOBNAME
-		self.SCRIPTNAME     = SCRIPTNAME
-		self.preName        = preName
-		self.QUEUE          = QUEUE
-		self.CMTDIR         = CMTDIR
-		self.ATHENAREL      = ATHENAREL
-		self.TAGS           = TAGS
-		self.inputPoolFiles = inputPoolFiles
-
-	def write(self) : 
-		script = open(self.SCRIPTNAME,'w')
-
-		script.write("\n")
-		script.write("#   setup the environment \n")
-		script.write("source %s/setup.sh -tag=%s,%s \n" % (self.CMTDIR, self.ATHENAREL, self.TAGS))
-		for file in self.inputPoolFiles:
-			if "atlasdatadisk" in file: 
-				script.write("export STAGE_SVCCLASS=atldata\n")
-				break
-		for file in self.inputPoolFiles:
-			if "DAQ" in file:
-				script.write("export STAGE_SVCCLASS=atlcal\n")
-				break
-
-		for file in self.inputPoolFiles:
-			if 'ESD' in file or 'AlignmentConstants' in file:
-				script.write("pool_insertFileToCatalog "+ file + " \n")
-
-		nowPath = os.getcwd()
-		print ("current path: ", nowPath)
-
-		script.write("athena %s \n" % (nowPath + "/" + self.JOBNAME) )
-		script.close()
-
-
-	def send(self, runmode) :
-		os.system("chmod +x %s" % self.SCRIPTNAME)
-		if "Local" == runmode : 
-			print ("Running Iter%02dSolve job locally ..." % (self.i))
-			os.system("sh %s | tee Iter%02dSolveLocally.log \n" % (self.SCRIPTNAME, self.i))
-
-		if "Batch" == runmode :
-			print ("Submitting Iter%02dSolve job to queue %s ..." % (self.i, self.QUEUE))
-			os.system("bsub -q %s  %s" % (self.QUEUE, self.SCRIPTNAME) )
-
-
-	def wait(self) : 
-		print ("Processing in lxbatch...")
-		time.sleep(60)
-		while os.popen('bjobs -w').read().find(self.preName) != -1:
-			time.sleep(30)
+        def __init__(self,
+                     iter,
+                     JOBNAME,
+                     SCRIPTNAME,
+                     preName,
+                     QUEUE          = "8nh",
+                     CMTDIR         = "",
+                     ATHENAREL      = "",
+                     TAGS           = "",
+                     inputPoolFiles = ""
+                     ):
+
+                self.i              = iter
+                self.JOBNAME        = JOBNAME
+                self.SCRIPTNAME     = SCRIPTNAME
+                self.preName        = preName
+                self.QUEUE          = QUEUE
+                self.CMTDIR         = CMTDIR
+                self.ATHENAREL      = ATHENAREL
+                self.TAGS           = TAGS
+                self.inputPoolFiles = inputPoolFiles
+
+        def write(self) :
+                script = open(self.SCRIPTNAME,'w')
+
+                script.write("\n")
+                script.write("#   setup the environment \n")
+                script.write("source %s/setup.sh -tag=%s,%s \n" % (self.CMTDIR, self.ATHENAREL, self.TAGS))
+                for file in self.inputPoolFiles:
+                        if "atlasdatadisk" in file:
+                                script.write("export STAGE_SVCCLASS=atldata\n")
+                                break
+                for file in self.inputPoolFiles:
+                        if "DAQ" in file:
+                                script.write("export STAGE_SVCCLASS=atlcal\n")
+                                break
+
+                for file in self.inputPoolFiles:
+                        if 'ESD' in file or 'AlignmentConstants' in file:
+                                script.write("pool_insertFileToCatalog "+ file + " \n")
+
+                nowPath = os.getcwd()
+                print ("current path: ", nowPath)
+
+                script.write("athena %s \n" % (nowPath + "/" + self.JOBNAME) )
+                script.close()
+
+
+        def send(self, runmode) :
+                os.system("chmod +x %s" % self.SCRIPTNAME)
+                if "Local" == runmode :
+                        print ("Running Iter%02dSolve job locally ..." % (self.i))
+                        os.system("sh %s | tee Iter%02dSolveLocally.log \n" % (self.SCRIPTNAME, self.i))
+
+                if "Batch" == runmode :
+                        print ("Submitting Iter%02dSolve job to queue %s ..." % (self.i, self.QUEUE))
+                        os.system("bsub -q %s  %s" % (self.QUEUE, self.SCRIPTNAME) )
+
+
+        def wait(self) :
+                print ("Processing in lxbatch...")
+                time.sleep(60)
+                while os.popen('bjobs -w').read().find(self.preName) != -1:
+                        time.sleep(30)
 
 
 
 def prepareForNextIter(OutputPath, iteration, GridSolvingOutDS, runSolveMode):
 
-	if ( "Grid"  == runSolveMode or "Prun" == runSolveMode ) :
-		print ("GridSolveOutDS = ", (GridSolvingOutDS))
-		ret, outDS = subprocess.getstatusoutput("cat %s" % GridSolvingOutDS)
-		print ("solve outDS: ",outDS)
-
-		ret, out = subprocess.getstatusoutput("dq2-ls -f %s/ " % outDS)
-		rootlist = []
-
-		print ("out: ",out)
-		lines = out.split('\n')
-		for line in lines :
-			items = line.split()
-			for item in items :
-				print (" item : " , item)
-				if item.find("AlignmentConstants.root") != -1 :
-					rootlist.append(item)
-		rootstr = ",".join(rootlist)
-		print ("rootstr : " , rootstr)
-		os.system("dq2-get -f %s -H %s/ -V %s/ "  % ( rootstr, outDS, outDS ) )
-	
-
-		jobstring2  = "mv %s/*.Iter%02d_AlignmentConstants.root  Iter%02d_AlignmentConstants.root\n\n" % \
-			(outDS, iteration, iteration)
-	 
-		'''
-		jobstring2 += "mv %s/*.OldSiAlignment.txt     %s/Iter%02d/OldSiAlignment.txt\n\n" % \
-			(outDS, OutputPath, iteration)
-		jobstring2 += "mv %s/*.OutputSiAlignment.txt  %s/Iter%02d/OutputSiAlignment.txt\n\n" %  \
-			(outDS, OutputPath, iteration)
-		jobstring2 += "mv %s/*.mycool.db   %s/Iter%02d/mycool.db\n\n" %  \
-			(outDS, OutputPath, iteration)
-		jobstring2 += "mv %s/*.vector.txt  %s/Iter%02d/vector.txt\n\n" %  \
-			(outDS, OutputPath, iteration)
-		jobstring2 += "mv %s/*.vector.bin  %s/Iter%02d/vector.bin\n\n" %  \
-			(outDS, OutputPath, iteration)
-		jobstring2 += "mv %s/*.matrix.txt  %s/Iter%02d/matrix.txt\n\n" %  \
-			(outDS, OutputPath, iteration)
-		jobstring2 += "mv %s/*.matrix.bin  %s/Iter%02d/matrix.bin\n\n" %  \
-			(outDS, OutputPath, iteration)
-		jobstring2 += "mv %s/*.hitmap.txt  %s/Iter%02d/hitmap.txt\n\n" %  \
-			(outDS, OutputPath, iteration)
-		jobstring2 += "mv %s/*.hitmap.bin  %s/Iter%02d/hitmap.bin\n\n" %  \
-			(outDS, OutputPath, iteration)
-		jobstring2 += "mv %s/*.alignlogfile.txt  %s/Iter%02d/alignlogfile.txt\n\n" %  \
-			(outDS, OutputPath, iteration)
-		jobstring2 += "mv %s/*.log.tgz  %s/Iter%02d/\n\n" % (outDS, OutputPath, iteration)     
-		# move the merged total monitoring file into every iteration directory
-		jobstring2 += "mv TotalMonitoring.root %s/Iter%02d/\n\n" % (OutputPath, iteration)
-		'''
-		os.system(jobstring2)
-
-	else : 	
-		jobstring3 = ""
-		jobstring3 += "mv ./OldSiAlignment.txt     %s/Iter%02d/OldSiAlignment.txt\n" %     ( OutputPath, iteration)
-		jobstring3 += "mv ./OutputSiAlignment.txt  %s/Iter%02d/OutputSiAlignment.txt\n" %  ( OutputPath, iteration)
-		jobstring3 += "mv ./mycool.db              %s/Iter%02d/mycool.db\n" %              ( OutputPath, iteration)
-		jobstring3 += "mv ./vector.txt             %s/Iter%02d/vector.txt\n" %             ( OutputPath, iteration)
-		jobstring3 += "mv ./vector.bin             %s/Iter%02d/vector.bin\n" %             ( OutputPath, iteration)
-		jobstring3 += "mv ./matrix.txt             %s/Iter%02d/matrix.txt\n" %             ( OutputPath, iteration)
-		jobstring3 += "mv ./matrix.bin             %s/Iter%02d/matrix.bin\n" %             ( OutputPath, iteration)
-		jobstring3 += "mv ./hitmap.txt             %s/Iter%02d/hitmap.txt\n" %             ( OutputPath, iteration)
-		jobstring3 += "mv ./hitmap.bin             %s/Iter%02d/hitmap.bin\n" %             ( OutputPath, iteration)
-		jobstring3 += "mv ./alignlogfile.txt       %s/Iter%02d/alignlogfile.txt\n" %       ( OutputPath, iteration)
-		os.system(jobstring3)
+        if ( "Grid"  == runSolveMode or "Prun" == runSolveMode ) :
+                print ("GridSolveOutDS = ", (GridSolvingOutDS))
+                ret, outDS = subprocess.getstatusoutput("cat %s" % GridSolvingOutDS)
+                print ("solve outDS: ",outDS)
+
+                ret, out = subprocess.getstatusoutput("dq2-ls -f %s/ " % outDS)
+                rootlist = []
+
+                print ("out: ",out)
+                lines = out.split('\n')
+                for line in lines :
+                        items = line.split()
+                        for item in items :
+                                print (" item : " , item)
+                                if item.find("AlignmentConstants.root") != -1 :
+                                        rootlist.append(item)
+                rootstr = ",".join(rootlist)
+                print ("rootstr : " , rootstr)
+                os.system("dq2-get -f %s -H %s/ -V %s/ "  % ( rootstr, outDS, outDS ) )
+
+
+                jobstring2  = "mv %s/*.Iter%02d_AlignmentConstants.root  Iter%02d_AlignmentConstants.root\n\n" % \
+                        (outDS, iteration, iteration)
+
+                '''
+                jobstring2 += "mv %s/*.OldSiAlignment.txt     %s/Iter%02d/OldSiAlignment.txt\n\n" % \
+                        (outDS, OutputPath, iteration)
+                jobstring2 += "mv %s/*.OutputSiAlignment.txt  %s/Iter%02d/OutputSiAlignment.txt\n\n" %  \
+                        (outDS, OutputPath, iteration)
+                jobstring2 += "mv %s/*.mycool.db   %s/Iter%02d/mycool.db\n\n" %  \
+                        (outDS, OutputPath, iteration)
+                jobstring2 += "mv %s/*.vector.txt  %s/Iter%02d/vector.txt\n\n" %  \
+                        (outDS, OutputPath, iteration)
+                jobstring2 += "mv %s/*.vector.bin  %s/Iter%02d/vector.bin\n\n" %  \
+                        (outDS, OutputPath, iteration)
+                jobstring2 += "mv %s/*.matrix.txt  %s/Iter%02d/matrix.txt\n\n" %  \
+                        (outDS, OutputPath, iteration)
+                jobstring2 += "mv %s/*.matrix.bin  %s/Iter%02d/matrix.bin\n\n" %  \
+                        (outDS, OutputPath, iteration)
+                jobstring2 += "mv %s/*.hitmap.txt  %s/Iter%02d/hitmap.txt\n\n" %  \
+                        (outDS, OutputPath, iteration)
+                jobstring2 += "mv %s/*.hitmap.bin  %s/Iter%02d/hitmap.bin\n\n" %  \
+                        (outDS, OutputPath, iteration)
+                jobstring2 += "mv %s/*.alignlogfile.txt  %s/Iter%02d/alignlogfile.txt\n\n" %  \
+                        (outDS, OutputPath, iteration)
+                jobstring2 += "mv %s/*.log.tgz  %s/Iter%02d/\n\n" % (outDS, OutputPath, iteration)
+                # move the merged total monitoring file into every iteration directory
+                jobstring2 += "mv TotalMonitoring.root %s/Iter%02d/\n\n" % (OutputPath, iteration)
+                '''
+                os.system(jobstring2)
+
+        else :
+                jobstring3 = ""
+                jobstring3 += "mv ./OldSiAlignment.txt     %s/Iter%02d/OldSiAlignment.txt\n" %     ( OutputPath, iteration)
+                jobstring3 += "mv ./OutputSiAlignment.txt  %s/Iter%02d/OutputSiAlignment.txt\n" %  ( OutputPath, iteration)
+                jobstring3 += "mv ./mycool.db              %s/Iter%02d/mycool.db\n" %              ( OutputPath, iteration)
+                jobstring3 += "mv ./vector.txt             %s/Iter%02d/vector.txt\n" %             ( OutputPath, iteration)
+                jobstring3 += "mv ./vector.bin             %s/Iter%02d/vector.bin\n" %             ( OutputPath, iteration)
+                jobstring3 += "mv ./matrix.txt             %s/Iter%02d/matrix.txt\n" %             ( OutputPath, iteration)
+                jobstring3 += "mv ./matrix.bin             %s/Iter%02d/matrix.bin\n" %             ( OutputPath, iteration)
+                jobstring3 += "mv ./hitmap.txt             %s/Iter%02d/hitmap.txt\n" %             ( OutputPath, iteration)
+                jobstring3 += "mv ./hitmap.bin             %s/Iter%02d/hitmap.bin\n" %             ( OutputPath, iteration)
+                jobstring3 += "mv ./alignlogfile.txt       %s/Iter%02d/alignlogfile.txt\n" %       ( OutputPath, iteration)
+                os.system(jobstring3)
 
 
 
@@ -1555,225 +1550,222 @@ def prepareForNextIter(OutputPath, iteration, GridSolvingOutDS, runSolveMode):
 
 def mergeMatrix(OutputPath, iteration, GridAccOutDS, GridOptions):
 
-	matrixlist = []
-	vectorlist = []
-	hitmaplist = []
-
-	print ("GridAccOutDS = ", (GridAccOutDS))
-
-	ret, out = subprocess.getstatusoutput("cat %s" % GridAccOutDS)
-	print ("out: ",out)
-	lines=out.split('\n')
-
-	nowDir = os.getcwd()
-	os.chdir(GridOptions["TmpWorkDir"])
-
-	# looping over all output dataset names
-	for line in lines:
-		items = line.split()
-		if len(items)>0 :
-			outDS = items[0]
-			print ("when merging matrixes and vectors, this outDS name : ", outDS)
-			binlist = []
-			ret, out = subprocess.getstatusoutput("dq2-ls -f %s/ " % outDS)
-			print (" dq2-ls -f, out : ", out)
-			lines = out.split('\n')
-			for line in lines :
-				items = line.split()
-				for item in items :
-					print (" item : " , item)
-					if item.find(".bin") != -1 :
-						binlist.append(item)
-			binstr = ",".join(binlist)
-			print ("binary files string : " , binstr)
-			os.system("dq2-get -f %s -H %s/ -V %s/ "  % ( binstr, outDS, outDS ))
-
-
-			jobstr2 = "ls %s/*.matrix.bin" % (outDS)
-			job2 = open("job2.sh",'w')
-			os.system("chmod 777 job2.sh")
-			job2.write(jobstr2)
-			job2.close()
-			ret, out = subprocess.getstatusoutput('sh job2.sh')
-			for line in out.split("\n"):
-				MatrixPath =  line
-				print ("MatrixPath: ",MatrixPath)
-				#if os.path.isfile(MatrixPath):
-				matrixlist.append(MatrixPath)
-
-
-			jobstr3 = "ls %s/*.vector.bin" % (outDS)
-			job3 = open("job3.sh",'w')
-			os.system("chmod 777 job3.sh")
-			job3.write(jobstr3)
-			job3.close()
-			print ("job3: ",job3)
-			ret, out = subprocess.getstatusoutput('sh job3.sh')
-			for line in out.split("\n"):
-				VectorPath = line
-				#		if os.path.isfile(VectorPath):
-				vectorlist.append(VectorPath)
-
-			print ("vectorlist: ",vectorlist)
-
-			jobstr4 = "ls %s/*.hitmap.bin" % (outDS)
-			job4 = open("job4.sh",'w')
-			os.system("chmod 777 job4.sh")
-			job4.write(jobstr4)
-			job4.close()
-			print ("job4: ",job4)
-			ret, out = subprocess.getstatusoutput('sh job4.sh')
-			for line in out.split("\n"):
-				HitmapPath =  line
-				#		if os.path.isfile(HitmapPath):
-				hitmaplist.append(HitmapPath)
-			print ("hitmaplist: ",hitmaplist)
-		
-		else:
-			print ("Problem getting the outDS files")
-			
+        matrixlist = []
+        vectorlist = []
+        hitmaplist = []
+
+        print ("GridAccOutDS = ", (GridAccOutDS))
+
+        ret, out = subprocess.getstatusoutput("cat %s" % GridAccOutDS)
+        print ("out: ",out)
+        lines=out.split('\n')
+
+        nowDir = os.getcwd()
+        os.chdir(GridOptions["TmpWorkDir"])
+
+        # looping over all output dataset names
+        for line in lines:
+                items = line.split()
+                if len(items)>0 :
+                        outDS = items[0]
+                        print ("when merging matrixes and vectors, this outDS name : ", outDS)
+                        binlist = []
+                        ret, out = subprocess.getstatusoutput("dq2-ls -f %s/ " % outDS)
+                        print (" dq2-ls -f, out : ", out)
+                        lines = out.split('\n')
+                        for line in lines :
+                                items = line.split()
+                                for item in items :
+                                        print (" item : " , item)
+                                        if item.find(".bin") != -1 :
+                                                binlist.append(item)
+                        binstr = ",".join(binlist)
+                        print ("binary files string : " , binstr)
+                        os.system("dq2-get -f %s -H %s/ -V %s/ "  % ( binstr, outDS, outDS ))
+
+
+                        jobstr2 = "ls %s/*.matrix.bin" % (outDS)
+                        job2 = open("job2.sh",'w')
+                        os.system("chmod 777 job2.sh")
+                        job2.write(jobstr2)
+                        job2.close()
+                        ret, out = subprocess.getstatusoutput('sh job2.sh')
+                        for line in out.split("\n"):
+                                MatrixPath =  line
+                                print ("MatrixPath: ",MatrixPath)
+                                #if os.path.isfile(MatrixPath):
+                                matrixlist.append(MatrixPath)
+
+
+                        jobstr3 = "ls %s/*.vector.bin" % (outDS)
+                        job3 = open("job3.sh",'w')
+                        os.system("chmod 777 job3.sh")
+                        job3.write(jobstr3)
+                        job3.close()
+                        print ("job3: ",job3)
+                        ret, out = subprocess.getstatusoutput('sh job3.sh')
+                        for line in out.split("\n"):
+                                VectorPath = line
+                                #               if os.path.isfile(VectorPath):
+                                vectorlist.append(VectorPath)
+
+                        print ("vectorlist: ",vectorlist)
+
+                        jobstr4 = "ls %s/*.hitmap.bin" % (outDS)
+                        job4 = open("job4.sh",'w')
+                        os.system("chmod 777 job4.sh")
+                        job4.write(jobstr4)
+                        job4.close()
+                        print ("job4: ",job4)
+                        ret, out = subprocess.getstatusoutput('sh job4.sh')
+                        for line in out.split("\n"):
+                                HitmapPath =  line
+                                #               if os.path.isfile(HitmapPath):
+                                hitmaplist.append(HitmapPath)
+                        print ("hitmaplist: ",hitmaplist)
+
+                else:
+                        print ("Problem getting the outDS files")
+
   
-	print ("------------------------------------------")
-	print ("  Setting Matrices list" )
-	print ("------------------------------------------")
+        print ("------------------------------------------")
+        print ("  Setting Matrices list" )
+        print ("------------------------------------------")
+
+        os.system("rm *.sh")
+        os.chdir(nowDir)
 
-	os.system("rm *.sh")
-	os.chdir(nowDir)
+        return matrixlist,vectorlist,hitmaplist
+
+                
 
-	return matrixlist,vectorlist,hitmaplist
-				
-		
-			 
 # For the merging of the monitoring Files
 class mergeMonitoringScript:
-	def __init__(self,
-		     OutputPath,
-		     preName,
-		     iter,
-		     CosmicsBoff,
-		     CosmicsBon,
-		     Collision,
-		     CMTDIR,
-		     ATHENAREL,
-		     TAGS,
-		     SCRIPTNAME,
-		     JOBNAME, 
-		     GridAccOutDS
-		     ):
-		self.OutputPath      = OutputPath
-		self.preName         = preName
-		self.i               = iter
-		self.CosmicsBoff     = CosmicsBoff
-		self.CosmicsBon      = CosmicsBon
-		self.Collision       = Collision
-		self.CMTDIR          = CMTDIR
-		self.ATHENAREL       = ATHENAREL
-		self.TAGS            = TAGS
-		self.SCRIPTNAME      = SCRIPTNAME
-		self.JOBNAME         = JOBNAME
-		self.GridAccOutDS    = GridAccOutDS
-
-
-	def write(self):
-		TempPath="%s/Iter%02d" % (self.OutputPath, self.i)
-		#self.SCRIPTNAME = TempPath + '/' + self.SCRIPTNAME
-		# list of Files to be merged
-		mergeMonitoringFilesName = 'mergeMonitoringFiles.txt'
-
-		script=open(self.SCRIPTNAME,'w')
-		script.write("#BSUB -J %s_Iter%02dMerge \n" % (self.preName, self.i))
-		script.write("#BSUB -o %s/Iter%02d/logs/Iter%02dMerge.log \n" % (self.OutputPath,self.i,self.i))
-		script.write("\n")
-		script.write("#   setup the environment \n")
-		script.write("source %s/setup.sh -tag=%s,%s \n" % (self.CMTDIR, self.ATHENAREL, self.TAGS))
-		#script.write("cd %s/Iter%d/ \n" % (self.OutputPath,self.i))
-		script.write("DQHistogramMerge.py %s TotalMonitoring.root True\n" % mergeMonitoringFilesName)
-		script.close()
-
-		mergeMonitoringFile = open(mergeMonitoringFilesName,"w")
-		ret, out = subprocess.getstatusoutput("cat %s" % self.GridAccOutDS)
-		print ("out: ",out)
-		lines = out.split('\n')
-		# looping over all output dataset names
-		for line in lines:
-			items = line.split()
-			if len(items)>0 :
-				outDS = items[0]
-				print ("outDS = ",outDS)
-				#print ("hmn", glob.glob(("%s/*.root") % outDS))
-				os.system("find %s/*.root >> %s \n " % (outDS, mergeMonitoringFilesName))
-
-		mergeMonitoringFile.close()
-
-
-	def send(self):
-		os.system("chmod +x %s" % self.SCRIPTNAME)
-		print ("in doing merge----------------------------------------------")
-		os.system("sh %s  \n"   % self.SCRIPTNAME)
-		print ("after doing merge----------------------------------------------")
+        def __init__(self,
+                     OutputPath,
+                     preName,
+                     iter,
+                     CosmicsBoff,
+                     CosmicsBon,
+                     Collision,
+                     CMTDIR,
+                     ATHENAREL,
+                     TAGS,
+                     SCRIPTNAME,
+                     JOBNAME,
+                     GridAccOutDS
+                     ):
+                self.OutputPath      = OutputPath
+                self.preName         = preName
+                self.i               = iter
+                self.CosmicsBoff     = CosmicsBoff
+                self.CosmicsBon      = CosmicsBon
+                self.Collision       = Collision
+                self.CMTDIR          = CMTDIR
+                self.ATHENAREL       = ATHENAREL
+                self.TAGS            = TAGS
+                self.SCRIPTNAME      = SCRIPTNAME
+                self.JOBNAME         = JOBNAME
+                self.GridAccOutDS    = GridAccOutDS
+
+
+        def write(self):
+                #TempPath="%s/Iter%02d" % (self.OutputPath, self.i)
+                #self.SCRIPTNAME = TempPath + '/' + self.SCRIPTNAME
+                # list of Files to be merged
+                mergeMonitoringFilesName = 'mergeMonitoringFiles.txt'
+
+                script=open(self.SCRIPTNAME,'w')
+                script.write("#BSUB -J %s_Iter%02dMerge \n" % (self.preName, self.i))
+                script.write("#BSUB -o %s/Iter%02d/logs/Iter%02dMerge.log \n" % (self.OutputPath,self.i,self.i))
+                script.write("\n")
+                script.write("#   setup the environment \n")
+                script.write("source %s/setup.sh -tag=%s,%s \n" % (self.CMTDIR, self.ATHENAREL, self.TAGS))
+                #script.write("cd %s/Iter%d/ \n" % (self.OutputPath,self.i))
+                script.write("DQHistogramMerge.py %s TotalMonitoring.root True\n" % mergeMonitoringFilesName)
+                script.close()
+
+                mergeMonitoringFile = open(mergeMonitoringFilesName,"w")
+                ret, out = subprocess.getstatusoutput("cat %s" % self.GridAccOutDS)
+                print ("out: ",out)
+                lines = out.split('\n')
+                # looping over all output dataset names
+                for line in lines:
+                        items = line.split()
+                        if len(items)>0 :
+                                outDS = items[0]
+                                print ("outDS = ",outDS)
+                                #print ("hmn", glob.glob(("%s/*.root") % outDS))
+                                os.system("find %s/*.root >> %s \n " % (outDS, mergeMonitoringFilesName))
+
+                mergeMonitoringFile.close()
+
+
+        def send(self):
+                os.system("chmod +x %s" % self.SCRIPTNAME)
+                print ("in doing merge----------------------------------------------")
+                os.system("sh %s  \n"   % self.SCRIPTNAME)
+                print ("after doing merge----------------------------------------------")
 
 
 
 
 # For Comparing the before and after monitoring files 
 class compareMonitoringScript:
-	def __init__(self,
-		     OutputPath,
-		     numIter,
-		     CMTDIR,
-		     ATHENAREL,
-		     TAGS
-		     ):
-		self.OutputPath = OutputPath
-		self.numIter    = numIter
-		self.CMTDIR     = CMTDIR
-		self.ATHENAREL  = ATHENAREL
-		self.TAGS       = TAGS
-
-		
-	def write(self):
-		# Names of the Job and the Script
-		self.SCRIPTNAME = self.OutputPath + '/MonitoringComparison.lsf'
-		self.JOBNAME = 'MonitoringComparison.py'
-		
-		# Write the Script
-		script=open(self.SCRIPTNAME,'w')
-		script.write("#BSUB -J MonitoringComparision \n")
-		script.write("#BSUB -o "+self.OutputPath+"/MonitoringComparision.log \n")
-		script.write("\n")
-		script.write("#   setup the environment \n")
-		script.write("source %s/setup.sh -tag=%s,%s \n" % (self.CMTDIR, self.ATHENAREL, self.TAGS))
-		script.write("cd "+self.OutputPath+" \n")
-		script.write("athena.py "+self.JOBNAME+" \n")
-		script.write("cd -")
-		script.close()
-		
-		# Write the Job
-		job=open(self.OutputPath+"/"+self.JOBNAME,"w")
-		job.write(" \n")
-		job.write("# ==================================== \n")
-		job.write("# The Reference File (Black)           \n")
-		job.write('afterAlignmentFile = "'+self.OutputPath+'/Iter'+str(self.numIter-1)+'/TotalMonitoring.root" \n')
-		job.write(" \n")
-		job.write("# ==================================== \n")
-		job.write("# The Monitored File (Red)             \n")
-		job.write('beforeAlignmentFile = "'+self.OutputPath+'/Iter0/TotalMonitoring.root"\n')
-		job.write("\n")
-		job.write("# ==================================== \n")
-		job.write("# The Output File                      \n")
-		job.write('outputFile = "AlignmentOutput.root"    \n')
-		job.write("\n")
-		job.write("# ==================================== \n")
-		job.write("include('InDetAlignmentMonitoring/makeComparision.py') \n")
-		job.write(" \n")
-		job.close()
-		
-	def send(self):
-		os.system("chmod +x %s" % self.SCRIPTNAME)
-		print ("----------------------------------------------")
-		print ("  Running MonitoringComparision.lsf job")
-		os.system("sh "+self.SCRIPTNAME+" | tee "+self.OutputPath+"/MonitoringComparison.log \n")
-		print ("----------------------------------------------")
-			
-         
-
+        def __init__(self,
+                     OutputPath,
+                     numIter,
+                     CMTDIR,
+                     ATHENAREL,
+                     TAGS
+                     ):
+                self.OutputPath = OutputPath
+                self.numIter    = numIter
+                self.CMTDIR     = CMTDIR
+                self.ATHENAREL  = ATHENAREL
+                self.TAGS       = TAGS
+
+
+        def write(self):
+                # Names of the Job and the Script
+                self.SCRIPTNAME = self.OutputPath + '/MonitoringComparison.lsf'
+                self.JOBNAME = 'MonitoringComparison.py'
+
+                # Write the Script
+                script=open(self.SCRIPTNAME,'w')
+                script.write("#BSUB -J MonitoringComparision \n")
+                script.write("#BSUB -o "+self.OutputPath+"/MonitoringComparision.log \n")
+                script.write("\n")
+                script.write("#   setup the environment \n")
+                script.write("source %s/setup.sh -tag=%s,%s \n" % (self.CMTDIR, self.ATHENAREL, self.TAGS))
+                script.write("cd "+self.OutputPath+" \n")
+                script.write("athena.py "+self.JOBNAME+" \n")
+                script.write("cd -")
+                script.close()
+
+                # Write the Job
+                job=open(self.OutputPath+"/"+self.JOBNAME,"w")
+                job.write(" \n")
+                job.write("# ==================================== \n")
+                job.write("# The Reference File (Black)           \n")
+                job.write('afterAlignmentFile = "'+self.OutputPath+'/Iter'+str(self.numIter-1)+'/TotalMonitoring.root" \n')
+                job.write(" \n")
+                job.write("# ==================================== \n")
+                job.write("# The Monitored File (Red)             \n")
+                job.write('beforeAlignmentFile = "'+self.OutputPath+'/Iter0/TotalMonitoring.root"\n')
+                job.write("\n")
+                job.write("# ==================================== \n")
+                job.write("# The Output File                      \n")
+                job.write('outputFile = "AlignmentOutput.root"    \n')
+                job.write("\n")
+                job.write("# ==================================== \n")
+                job.write("include('InDetAlignmentMonitoring/makeComparision.py') \n")
+                job.write(" \n")
+                job.close()
+
+        def send(self):
+                os.system("chmod +x %s" % self.SCRIPTNAME)
+                print ("----------------------------------------------")
+                print ("  Running MonitoringComparision.lsf job")
+                os.system("sh "+self.SCRIPTNAME+" | tee "+self.OutputPath+"/MonitoringComparison.log \n")
+                print ("----------------------------------------------")
diff --git a/InnerDetector/InDetExample/InDetAlignExample/python/NewInDet_Challenges.py b/InnerDetector/InDetExample/InDetAlignExample/python/NewInDet_Challenges.py
index c147e3dacf98655c7f0e484626a2fcb64bb75383..ba1e1bb493995d930d1950ec57be3fcbf5062f4e 100644
--- a/InnerDetector/InDetExample/InDetAlignExample/python/NewInDet_Challenges.py
+++ b/InnerDetector/InDetExample/InDetAlignExample/python/NewInDet_Challenges.py
@@ -1,202 +1,199 @@
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 
 import os
 
 class setupChallenge:
-	def __init__ (self
+        def __init__ (self
                       ,thisChallenge
                       ,geometrySetting):
-		self.thisChallenge = thisChallenge
-		self.GlobalTag = ""
-		self.DetDescrVersion = ""
-		self.DetDescrVersionNoField = ""
-		
-		if self.thisChallenge == "CSC":
-			if os.environ['HOSTNAME'] == 'tst01.ific.uv.es':
-				collisionDir = "/data1/cescobar/data/csc/multimuons/"
-			else:
-				collisionDir = "castor:/castor/cern.ch/user/a/atlidali/data/csc/multimuons/"
-				
-			self.setup(CollisionDirectory = collisionDir
-			      ,CollisionFileList = "inputData_CSC_multimuons_digits.txt"
-			      ,CosmicBFieldDirectory = "castor:/castor/cern.ch/user/l/lytken/cosmic_13010/digitization/TRTBarrel/misaligned/"
-			      ,CosmicBFieldFileList  = "inputData_CSC_CosmicsRel13_BFOn.txt"
-			      ,CosmicNoBFieldDirectory = "castor:/castor/cern.ch/user/l/lytken/cosmic_13010/digitization/NoField/TRTBarrel/misaligned/"  
-			      ,CosmicNoBFieldFileList  = "inputData_CSC_CosmicsRel13_BFOff.txt")
-
-			if geometrySetting == 'Nominal':
-			    self.DetDescrVersion = "ATLAS-CSC-01-00-00"
-			    self.GlobalTag = 'OFLCOND-CSC-00-00-00'
-			if geometrySetting == 'Perfect':
-			    self.DetDescrVersion = "ATLAS-CSC-01-02-00"
-			    self.GlobalTag = 'OFLCOND-CSC-00-01-00'
-			if geometrySetting == 'Aligned':
-			    self.DetDescrVersion = "ATLAS-CSC-01-00-00"
-			    self.GlobalTag = 'OFLCOND-CSC-00-01-05'
-
-		if self.thisChallenge == 'FDR1':
-			self.setup(CollisionDirectory = "castor:/castor/cern.ch/user/h/hawkings/calibstream/fdr1/"
-			      ,CollisionFileList  = "inputData_FDR1_idcalibstream.txt")
-			
-			self.DetDescrVersion = "ATLAS-GEO-08-00-00"
-			self.DetDescrVersionNoField = "ATLAS-GEONF-08-00-00"
-			if geometrySetting == 'Nominal':
-				self.GlobalTag = 'COMCOND-REPC-003-00'
-			if geometrySetting == 'Aligned':
-				self.GlobalTag = 'COMCOND-ES1C-000-00'
-
-		if self.thisChallenge == 'FDR2':
-			self.setup(CollisionDirectory = "castor:/castor/cern.ch/user/b/bvendapi/FDR2_Pi/BS_files/CalibStream/"
-			      ,CollisionFileList = "inputData_FDR2_idcalibstream.txt"
-			      ,CosmicBFieldDirectory = "castor:/castor/cern.ch/user/b/bvendapi/cosmics_fdr2/"
-			      ,CosmicBFieldFileLiast = "inputData_FDR2_cosmicstream.txt")
-
-
-		if self.thisChallenge == 'M8plus':
-			self.setup(CosmicBFieldDirectory = "castor:/castor/cern.ch/grid/atlas/caf/atlcal/perm/id/cosmics/ESDs/"
-				   ,CosmicBFieldFileList  = "inputData_M8plus_Cosmic_91800.txt"
-				   ,CosmicNoBFieldDirectory = "castor:/castor/cern.ch/grid/atlas/caf/atlcal/perm/id/cosmics/ESDs/"
-				   ,CosmicNoBFieldFileList  = "inputData_M8plus_Cosmic_NoBField.txt")
-			
-			self.DetDescrVersion = "ATLAS-GEO-03-00-00"
-			self.DetDescrVersionNoField = "ATLAS-GEONF-04-00-00"
-
-		if self.thisChallenge == 'Cosmic09':
-			self.setup(CosmicBFieldDirectory = ""
-			      #,CosmicBFieldDirectory = "castor:/castor/cern.ch/grid/atlas/atlasdatadisk/data09_cos/ESD/"
-			      ,CosmicBFieldFileList  = "inputData_Cosmic09_BFOn.txt"
-			      ,CosmicNoBFieldDirectory = ""
-			      #,CosmicNoBFieldDirectory = "castor:/castor/cern.ch/grid/atlas/atlasdatadisk/data09_cos/ESD/"
-			      ,CosmicNoBFieldFileList  = "inputData_Cosmic09_BFOff.txt")
-			
-			self.DetDescrVersion = "ATLAS-GEO-08-00-00"
-			self.DetDescrVersionNoField = "ATLAS-GEONF-08-00-00"
-			
-			if geometrySetting == 'Nominal':
-				self.GlobalTag = 'COMCOND-REPC-003-00'
-			if geometrySetting == 'Aligned':
-				self.GlobalTag = 'COMCOND-ES1C-000-00'
-
-
-		if self.thisChallenge == 'CosmicStream':
-			self.setup(CosmicBFieldFileList =  "inputData_CosmicStream.txt"
-			      ,CosmicBFieldDirectory = "rfio:/castor/cern.ch/user/s/sfyrla/91338_PEB/")
-
-		if self.thisChallenge == 'CosmicsRel14':
-			self.setup(CosmicBFieldDirectory = "castor:/castor/cern.ch/grid/atlas/atlasgroupdisk/proj-simcos/rel14/ATLAS-GEO-03-00-00/dig/"
-			      ,CosmicBFieldFileList  = "inputData_CosmicsRel14_BFOn.txt"
-			      ,CosmicNoBFieldDirectory = "castor:/castor/cern.ch/grid/atlas/atlasgroupdisk/proj-simcos/rel14/ATLAS-GEONF-04-00-00/"
-			      ,CosmicNoBFieldFileList  = "inputData_CosmicsRel14_BFOff.txt")
-
-		if self.thisChallenge == 'CalibrationStream':
-			self.setup(CollisionDirectory = ""
-			      ,CollisionFileList = "inputData_CalibrationStream.txt")
-
-		if self.thisChallenge == 'CosmicSim09':
-			self.setup(CosmicBFieldFileList = "inputData_CosmicSim2009_BOn.txt"
-			      ,CosmicNoBFieldFileList = "inputData_CosmicSim2009_BOff.txt")
-
-
-		if self.thisChallenge == "FDR09":
-			self.setup(CollisionDirectory = "castor:/castor/cern.ch/grid/atlas/caf/atlcal/perm/id/FDR2_IDCalibStream/ESD/"
-			      ,CollisionFileList = "inputData_FDR09.txt")
-			
-			if geometrySetting == 'Nominal':
-				self.DetDescrVersion = "ATLAS-CSC-02-00-00"
-				self.GlobalTag = 'OFLCOND-FDR-02-08-00'
-			if geometrySetting == 'Perfect':
-				self.DetDescrVersion = "ATLAS-CSC-02-00-00"
-				self.GlobalTag = 'OFLCOND-CSC-01-00-00'
-
-		if self.thisChallenge == "MC09":
-			self.setup(CollisionDirectory = "castor:/castor/cern.ch/grid/atlas/atlasgroupdisk/perf-idtracking/dq2/mc09_valid/ESD/e436_s561_r731/mc09_valid.107271.Multimuons_pt9.recon.ESD.e436_s561_r731_tid076491/"
-			      ,CollisionFileList = "inputData_multimuons09.txt")
-			
-			if geometrySetting == 'Nominal':
-				self.DetDescrVersion = "ATLAS-GEO-08-00-00"
-				self.GlobalTag = 'OFLCOND-SIM-00-00-07'
-			if geometrySetting == 'Perfect':
-				self.DetDescrVersion = "ATLAS-GEO-08-00-00"
-				self.GlobalTag = 'OFLCOND-CSC-01-00-00'
-
-		if self.thisChallenge == "MinBias":
-			self.setup(CollisionDirectory = "/afs/cern.ch/user/a/atlidali/w0/data/minbias/"
-			      ,CollisionFileList = "inputData_minbias.txt")
-
-			if geometrySetting == 'Nominal':
-				self.DetDescrVersion = "ATLAS-GEO-08-00-00"
-				self.GlobalTag = 'OFLCOND-SIM-00-00-00'
-			if geometrySetting == 'Perfect':
-				self.DetDescrVersion = "ATLAS-GEO-08-00-00"
-				self.GlobalTag = 'OFLCOND-SIM-01-00-00'
-			if geometrySetting == 'Aligned':
-				self.DetDescrVersion = "ATLAS-GEO-08-00-00"
-				self.GlobalTag = 'OFLCOND-CSC-00-01-05'
-
-		if self.thisChallenge == "Halo":
-			# Beam halo events
-			self.setup(BeamHaloDirectory = "castor:/castor/cern.ch/user/t/tcorneli/"
-			      ,BeamHaloFileList = "inputData_halo_digits.txt")
-
-		if self.thisChallenge == "BeamGas":
-			# Beam Gas O events
-			self.setup(BeamGasDirectory ="castor:/castor/cern.ch/user/s/stradlin/BeamGas_1.1/digit/hijing.O.digit/"
-			      ,BeamGasFileList  = "inputData_beamgas_digits.txt")
-
-		if self.thisChallenge == "900GeV":
-			self.setup(CollisionDirectory = "",
-				   CollisionFileList  = "CustomRun_900GeV.txt",
-				   CosmicNoBFieldFileList = "GoodRunList_Cosmic_NoBF_DPD_atlasdatadisk.txt")
-
-			if 'Nominal':
-				self.DetDescrVersion = "ATLAS-GEO-08-00-02"
-				# self.GlobalTag = "COMCOND-ES1PST-001-00"
-				self.GlobalTag = "COMCOND-ES1PS-001-00"
-#			self.DetDescrVersion = "ATLAS-GEO-08-00-02"
-			self.DetDescrVersionNoField = "ATLAS-GEONF-08-00-00"
-		
-		if self.thisChallenge == "7TeV":
-			self.setup(CollisionDirectory = "",
-				   CollisionFileList  = "CustomRun_7TeV.txt",
-				   CosmicNoBFieldFileList = "CustomRun_CosmicsNoBF_7TeV.txt",
-				   CosmicBFieldFileList = "CustomRun_CosmicsBF_7TeV.txt")
-			#self.GlobalTag = "COMCOND-ES1PST-002-00"
-			self.GlobalTag = ""  #2010 data
-			#self.DetDescrVersion = "ATLAS-GEO-10-00-00"
-			self.DetDescrVersion = ""
-			self.DetDescrVersionNoField = "ATLAS-GEONF-08-00-00"
-		
-		
-		if self.thisChallenge == "SingleBeam":
-			self.setup(CollisionDirectory = ""
-				   ,CollisionFileList  = "CustomRun_SingleBeam.txt")
-			if 'Nominal':
-				self.DetDescrVersion = "ATLAS-GEO-03-00-00"
-				self.GlobalTag = "COMCOND-ES1C-000-00"
-		
-	
-	def setup(self
+                self.thisChallenge = thisChallenge
+                self.GlobalTag = ""
+                self.DetDescrVersion = ""
+                self.DetDescrVersionNoField = ""
+
+                if self.thisChallenge == "CSC":
+                        if os.environ['HOSTNAME'] == 'tst01.ific.uv.es':
+                                collisionDir = "/data1/cescobar/data/csc/multimuons/"
+                        else:
+                                collisionDir = "castor:/castor/cern.ch/user/a/atlidali/data/csc/multimuons/"
+
+                        self.setup(CollisionDirectory = collisionDir
+                              ,CollisionFileList = "inputData_CSC_multimuons_digits.txt"
+                              ,CosmicBFieldDirectory = "castor:/castor/cern.ch/user/l/lytken/cosmic_13010/digitization/TRTBarrel/misaligned/"
+                              ,CosmicBFieldFileList  = "inputData_CSC_CosmicsRel13_BFOn.txt"
+                              ,CosmicNoBFieldDirectory = "castor:/castor/cern.ch/user/l/lytken/cosmic_13010/digitization/NoField/TRTBarrel/misaligned/"
+                              ,CosmicNoBFieldFileList  = "inputData_CSC_CosmicsRel13_BFOff.txt")
+
+                        if geometrySetting == 'Nominal':
+                            self.DetDescrVersion = "ATLAS-CSC-01-00-00"
+                            self.GlobalTag = 'OFLCOND-CSC-00-00-00'
+                        if geometrySetting == 'Perfect':
+                            self.DetDescrVersion = "ATLAS-CSC-01-02-00"
+                            self.GlobalTag = 'OFLCOND-CSC-00-01-00'
+                        if geometrySetting == 'Aligned':
+                            self.DetDescrVersion = "ATLAS-CSC-01-00-00"
+                            self.GlobalTag = 'OFLCOND-CSC-00-01-05'
+
+                if self.thisChallenge == 'FDR1':
+                        self.setup(CollisionDirectory = "castor:/castor/cern.ch/user/h/hawkings/calibstream/fdr1/"
+                              ,CollisionFileList  = "inputData_FDR1_idcalibstream.txt")
+
+                        self.DetDescrVersion = "ATLAS-GEO-08-00-00"
+                        self.DetDescrVersionNoField = "ATLAS-GEONF-08-00-00"
+                        if geometrySetting == 'Nominal':
+                                self.GlobalTag = 'COMCOND-REPC-003-00'
+                        if geometrySetting == 'Aligned':
+                                self.GlobalTag = 'COMCOND-ES1C-000-00'
+
+                if self.thisChallenge == 'FDR2':
+                        self.setup(CollisionDirectory = "castor:/castor/cern.ch/user/b/bvendapi/FDR2_Pi/BS_files/CalibStream/"
+                              ,CollisionFileList = "inputData_FDR2_idcalibstream.txt"
+                              ,CosmicBFieldDirectory = "castor:/castor/cern.ch/user/b/bvendapi/cosmics_fdr2/"
+                              ,CosmicBFieldFileLiast = "inputData_FDR2_cosmicstream.txt")
+
+
+                if self.thisChallenge == 'M8plus':
+                        self.setup(CosmicBFieldDirectory = "castor:/castor/cern.ch/grid/atlas/caf/atlcal/perm/id/cosmics/ESDs/"
+                                   ,CosmicBFieldFileList  = "inputData_M8plus_Cosmic_91800.txt"
+                                   ,CosmicNoBFieldDirectory = "castor:/castor/cern.ch/grid/atlas/caf/atlcal/perm/id/cosmics/ESDs/"
+                                   ,CosmicNoBFieldFileList  = "inputData_M8plus_Cosmic_NoBField.txt")
+
+                        self.DetDescrVersion = "ATLAS-GEO-03-00-00"
+                        self.DetDescrVersionNoField = "ATLAS-GEONF-04-00-00"
+
+                if self.thisChallenge == 'Cosmic09':
+                        self.setup(CosmicBFieldDirectory = ""
+                              #,CosmicBFieldDirectory = "castor:/castor/cern.ch/grid/atlas/atlasdatadisk/data09_cos/ESD/"
+                              ,CosmicBFieldFileList  = "inputData_Cosmic09_BFOn.txt"
+                              ,CosmicNoBFieldDirectory = ""
+                              #,CosmicNoBFieldDirectory = "castor:/castor/cern.ch/grid/atlas/atlasdatadisk/data09_cos/ESD/"
+                              ,CosmicNoBFieldFileList  = "inputData_Cosmic09_BFOff.txt")
+
+                        self.DetDescrVersion = "ATLAS-GEO-08-00-00"
+                        self.DetDescrVersionNoField = "ATLAS-GEONF-08-00-00"
+
+                        if geometrySetting == 'Nominal':
+                                self.GlobalTag = 'COMCOND-REPC-003-00'
+                        if geometrySetting == 'Aligned':
+                                self.GlobalTag = 'COMCOND-ES1C-000-00'
+
+
+                if self.thisChallenge == 'CosmicStream':
+                        self.setup(CosmicBFieldFileList =  "inputData_CosmicStream.txt"
+                              ,CosmicBFieldDirectory = "rfio:/castor/cern.ch/user/s/sfyrla/91338_PEB/")
+
+                if self.thisChallenge == 'CosmicsRel14':
+                        self.setup(CosmicBFieldDirectory = "castor:/castor/cern.ch/grid/atlas/atlasgroupdisk/proj-simcos/rel14/ATLAS-GEO-03-00-00/dig/"
+                              ,CosmicBFieldFileList  = "inputData_CosmicsRel14_BFOn.txt"
+                              ,CosmicNoBFieldDirectory = "castor:/castor/cern.ch/grid/atlas/atlasgroupdisk/proj-simcos/rel14/ATLAS-GEONF-04-00-00/"
+                              ,CosmicNoBFieldFileList  = "inputData_CosmicsRel14_BFOff.txt")
+
+                if self.thisChallenge == 'CalibrationStream':
+                        self.setup(CollisionDirectory = ""
+                              ,CollisionFileList = "inputData_CalibrationStream.txt")
+
+                if self.thisChallenge == 'CosmicSim09':
+                        self.setup(CosmicBFieldFileList = "inputData_CosmicSim2009_BOn.txt"
+                              ,CosmicNoBFieldFileList = "inputData_CosmicSim2009_BOff.txt")
+
+
+                if self.thisChallenge == "FDR09":
+                        self.setup(CollisionDirectory = "castor:/castor/cern.ch/grid/atlas/caf/atlcal/perm/id/FDR2_IDCalibStream/ESD/"
+                              ,CollisionFileList = "inputData_FDR09.txt")
+
+                        if geometrySetting == 'Nominal':
+                                self.DetDescrVersion = "ATLAS-CSC-02-00-00"
+                                self.GlobalTag = 'OFLCOND-FDR-02-08-00'
+                        if geometrySetting == 'Perfect':
+                                self.DetDescrVersion = "ATLAS-CSC-02-00-00"
+                                self.GlobalTag = 'OFLCOND-CSC-01-00-00'
+
+                if self.thisChallenge == "MC09":
+                        self.setup(CollisionDirectory = "castor:/castor/cern.ch/grid/atlas/atlasgroupdisk/perf-idtracking/dq2/mc09_valid/ESD/e436_s561_r731/mc09_valid.107271.Multimuons_pt9.recon.ESD.e436_s561_r731_tid076491/"
+                              ,CollisionFileList = "inputData_multimuons09.txt")
+
+                        if geometrySetting == 'Nominal':
+                                self.DetDescrVersion = "ATLAS-GEO-08-00-00"
+                                self.GlobalTag = 'OFLCOND-SIM-00-00-07'
+                        if geometrySetting == 'Perfect':
+                                self.DetDescrVersion = "ATLAS-GEO-08-00-00"
+                                self.GlobalTag = 'OFLCOND-CSC-01-00-00'
+
+                if self.thisChallenge == "MinBias":
+                        self.setup(CollisionDirectory = "/afs/cern.ch/user/a/atlidali/w0/data/minbias/"
+                              ,CollisionFileList = "inputData_minbias.txt")
+
+                        if geometrySetting == 'Nominal':
+                                self.DetDescrVersion = "ATLAS-GEO-08-00-00"
+                                self.GlobalTag = 'OFLCOND-SIM-00-00-00'
+                        if geometrySetting == 'Perfect':
+                                self.DetDescrVersion = "ATLAS-GEO-08-00-00"
+                                self.GlobalTag = 'OFLCOND-SIM-01-00-00'
+                        if geometrySetting == 'Aligned':
+                                self.DetDescrVersion = "ATLAS-GEO-08-00-00"
+                                self.GlobalTag = 'OFLCOND-CSC-00-01-05'
+
+                if self.thisChallenge == "Halo":
+                        # Beam halo events
+                        self.setup(BeamHaloDirectory = "castor:/castor/cern.ch/user/t/tcorneli/"
+                              ,BeamHaloFileList = "inputData_halo_digits.txt")
+
+                if self.thisChallenge == "BeamGas":
+                        # Beam Gas O events
+                        self.setup(BeamGasDirectory ="castor:/castor/cern.ch/user/s/stradlin/BeamGas_1.1/digit/hijing.O.digit/"
+                              ,BeamGasFileList  = "inputData_beamgas_digits.txt")
+
+                if self.thisChallenge == "900GeV":
+                        self.setup(CollisionDirectory = "",
+                                   CollisionFileList  = "CustomRun_900GeV.txt",
+                                   CosmicNoBFieldFileList = "GoodRunList_Cosmic_NoBF_DPD_atlasdatadisk.txt")
+
+                        if 'Nominal':
+                                self.DetDescrVersion = "ATLAS-GEO-08-00-02"
+                                # self.GlobalTag = "COMCOND-ES1PST-001-00"
+                                self.GlobalTag = "COMCOND-ES1PS-001-00"
+#                       self.DetDescrVersion = "ATLAS-GEO-08-00-02"
+                        self.DetDescrVersionNoField = "ATLAS-GEONF-08-00-00"
+
+                if self.thisChallenge == "7TeV":
+                        self.setup(CollisionDirectory = "",
+                                   CollisionFileList  = "CustomRun_7TeV.txt",
+                                   CosmicNoBFieldFileList = "CustomRun_CosmicsNoBF_7TeV.txt",
+                                   CosmicBFieldFileList = "CustomRun_CosmicsBF_7TeV.txt")
+                        #self.GlobalTag = "COMCOND-ES1PST-002-00"
+                        self.GlobalTag = ""  #2010 data
+                        #self.DetDescrVersion = "ATLAS-GEO-10-00-00"
+                        self.DetDescrVersion = ""
+                        self.DetDescrVersionNoField = "ATLAS-GEONF-08-00-00"
+
+
+                if self.thisChallenge == "SingleBeam":
+                        self.setup(CollisionDirectory = ""
+                                   ,CollisionFileList  = "CustomRun_SingleBeam.txt")
+                        if 'Nominal':
+                                self.DetDescrVersion = "ATLAS-GEO-03-00-00"
+                                self.GlobalTag = "COMCOND-ES1C-000-00"
+
+        
+        def setup(self
                   ,CollisionDirectory = ""
                   ,CollisionFileList = ""
                   ,CosmicBFieldDirectory = ""
                   ,CosmicBFieldFileList  = ""
                   ,CosmicNoBFieldDirectory = ""
                   ,CosmicNoBFieldFileList  = ""):
-		
-		self.CollisionDirectory = CollisionDirectory
-		self.CollisionFileList = CollisionFileList
-		if self.CollisionFileList:
-			os.system("get_files -jo %s >/dev/null" % self.CollisionFileList)
 
-		self.CosmicBFieldDirectory = CosmicBFieldDirectory
-		self.CosmicBFieldFileList = CosmicBFieldFileList
-		if self.CosmicBFieldFileList:
-			os.system("get_files -jo %s >/dev/null" % self.CosmicBFieldFileList)
+                self.CollisionDirectory = CollisionDirectory
+                self.CollisionFileList = CollisionFileList
+                if self.CollisionFileList:
+                        os.system("get_files -jo %s >/dev/null" % self.CollisionFileList)
 
-		self.CosmicNoBFieldDirectory = CosmicNoBFieldDirectory
-		self.CosmicNoBFieldFileList = CosmicNoBFieldFileList
-		if self.CosmicNoBFieldDirectory:
-			os.system("get_files -jo %s >/dev/null" % self.CosmicNoBFieldFileList)
-
-            
+                self.CosmicBFieldDirectory = CosmicBFieldDirectory
+                self.CosmicBFieldFileList = CosmicBFieldFileList
+                if self.CosmicBFieldFileList:
+                        os.system("get_files -jo %s >/dev/null" % self.CosmicBFieldFileList)
 
+                self.CosmicNoBFieldDirectory = CosmicNoBFieldDirectory
+                self.CosmicNoBFieldFileList = CosmicNoBFieldFileList
+                if self.CosmicNoBFieldDirectory:
+                        os.system("get_files -jo %s >/dev/null" % self.CosmicNoBFieldFileList)
diff --git a/InnerDetector/InDetExample/InDetAlignExample/python/NewInDet_IteratorClasses.py b/InnerDetector/InDetExample/InDetAlignExample/python/NewInDet_IteratorClasses.py
index b091ed52a1a77a5a9c84dc0b80074f0d9e1b498c..64e4885f8470a50738bbfae233579d187be5c8cb 100644
--- a/InnerDetector/InDetExample/InDetAlignExample/python/NewInDet_IteratorClasses.py
+++ b/InnerDetector/InDetExample/InDetAlignExample/python/NewInDet_IteratorClasses.py
@@ -6,9 +6,6 @@
 #
 # =====================================================================
 
-from __future__ import print_function
-
-from threading import Thread
 import os
 import time
 
@@ -26,11 +23,11 @@ class getAthenaConfig:
         
         try:
             self.athenaTestArea = os.environ['TestArea']
-        except:
+        except Exception:
             self.athenaTestArea = ""
         try:
             self.athenaTags = os.environ['AtlasProject'] + ',' + self.athenaConf.replace("-",",")
-        except:
+        except Exception:
             self.athenaTags = ""
         if self.atlasSet == "AtlasSetup":
             self.atlasSetupPath = os.environ['AtlasSetup']
@@ -119,8 +116,7 @@ class manageJob:
         #   os.system('ln -s %s %s' % (self.MonitoringScript,self.RunPath+"/InDetRecExample/InDetMonitoringAlignment.py") )
 
     def writeJO(self):
-        PrefixName="Iter%d%s_" % (self.i, self.folderSuffix)
-        
+
         job=open(self.RunPath + self.JOBNAME,'w')
         job.write('##-------- Alignment Configuration --------------------\n')
 
@@ -267,7 +263,6 @@ class manageJob:
         while (os.popen('bjobs -w').read().find(self.preName)!=-1 and os.popen('bjobs -w').read().find(self.folderSuffix)!=-1):
             time.sleep(30)
             
-import os
 class SortCpus:
     def __init__(self, TOTALCPUS, LOCALDIR, FILELIST, OutputLevel,doDetailedSplitting = False, nEventsPerFile=-1):
             def sort_by_value(d):
@@ -323,7 +318,6 @@ class SortCpus:
                             SizeList[i][1] = curr[4].rstrip()
                             i = i+1
                     FinalList = {}
-                    count = 0
                     
                     for i in range(0,len(SizeList)):
                         #print (SizeList[i][0])
@@ -786,8 +780,6 @@ def HandleRunOptions():
     User_ColCPUs = 0
     Col_CPUs = Def_ColCPUs
 
-    argNum = 1
-    argMax = len(sys.argv)-1
     argCurr = -1 # jut to make that first time it points to 0
     
     #print (' >>> user input has %s arguments' % argMax)
diff --git a/InnerDetector/InDetExample/InDetAlignExample/python/NewInDet_IteratorClasses_EoverPMaps.py b/InnerDetector/InDetExample/InDetAlignExample/python/NewInDet_IteratorClasses_EoverPMaps.py
index 7332c5397065314c2342852cac5834fc23d3d99b..2d0027b91ca84188effda5dffb56018022292ce3 100644
--- a/InnerDetector/InDetExample/InDetAlignExample/python/NewInDet_IteratorClasses_EoverPMaps.py
+++ b/InnerDetector/InDetExample/InDetAlignExample/python/NewInDet_IteratorClasses_EoverPMaps.py
@@ -1,210 +1,208 @@
 # Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 
-from __future__ import print_function
-
 import os, time
 
 class manageJobEoverPMaps:
-	def __init__(self,
-		     OutputPath,
-		     dataName,
-		     iter,
-		     part,
-		     JOBNAME,
-		     preName,
-     	     ATHENACFG,
-		     RecoOptions={},
-		     extraOptions={},
-		     AlignmentOptions={},
-		     RecoScript="InDetAlignExample/NewTopOptions.py",
-		     AlignmentScript="InDetAlignExample/NewInDetAlignAlgSetup.py",
-		     AlignmentLevels = "InDetAlignExample/NewInDetAlignLevels.py",
-		     #MonitoringScript = "InDetRecExample/InDetMonitoringAlignment.py", 
-		     QUEUE = "1nh",
-		     CMTDIR = "",
-		     inputPoolFiles = "",
-		     SCRIPTNAME = "",
-		     RunPath = ""):
-		self.OutputPath = OutputPath
-		self.preName = preName
-		self.i=iter
-		self.j=part
-		self.dataName = dataName
-		self.QUEUE=QUEUE
-		self.CMTDIR=CMTDIR
-		self.ATHENACFG=ATHENACFG
-		self.inputPoolFiles = inputPoolFiles
-		self.JOBNAME=JOBNAME
-		self.RecoOptions = RecoOptions
-		self.extraOptions = extraOptions
-		self.RecoScript = RecoScript
-		self.AlignmentOptions = AlignmentOptions
-		self.AlignmentScript = AlignmentScript
-		self.AlignmentLevels = AlignmentLevels
-		#self.MonitoringScript = MonitoringScript
-		self.SCRIPTNAME = SCRIPTNAME
-		self.RunPath = RunPath
-
-		
-	def createDirectories(self):
-		os.system('mkdir -p %s' % self.RunPath)
-
-	def writeJO(self):		
-		job=open(self.RunPath + self.JOBNAME,'w')
-		
-
-		job.write('##-------- Reconstruction Configuration --------------------\n')
-		
-		for option in self.RecoOptions:
-			if type(self.RecoOptions[option]) is str:
-				customoption = option + "\t=\t\"" +  self.RecoOptions[option]+'\"\n'
-				job.write(customoption)
-			else:
-				if option == "inputFiles":
-					customoption = option + "\t=\t" +  str(self.RecoOptions[option])+'\n'
-				else:
-					customoption = option + "\t=\t" +  str(self.RecoOptions[option])+'\n'
-				job.write(customoption)
-
-		if len(self.extraOptions):
-			job.write("\n")
-			job.write('##-------- Extra Configuration --------------------\n')
-		
-		for option in self.extraOptions:
-			if type(self.extraOptions[option]) is str:
-				customoption = option + "\t=\t\"" +  self.extraOptions[option]+'\"\n'
-				job.write(customoption)
-			else:
-				customoption = option + "\t=\t" +  str(self.extraOptions[option])+'\n'
-				job.write(customoption)
-
-		job.write("\n")
-		job.write('##-------- End of custom options -------------\n')
-
-		# Need to write the InDetRec Options Here:
-		job.write("\n")
-		job.write('##-------- Load Reconstruction --------------------\n')
-		job.write('include("'+str(self.RecoScript)+'") \n')
-
-		job.close()
-
-	def writeScript(self):
-		self.SCRIPTNAME = self.RunPath + '/' + self.SCRIPTNAME
-		script=open(self.SCRIPTNAME,'w')
-		script.write("#BSUB -J %s_%s_Part%02d \n" % (self.preName,self.dataName, self.j))
-		script.write("#BSUB -o %s/logs/%s_Part%02d.log \n" % (self.OutputPath,self.dataName,self.j))
-		script.write("#BSUB -e %s/logs/%s_Part%02d.err \n" % (self.OutputPath,self.dataName,self.j))
-					
-		script.write("#BSUB -q %s \n" % self.QUEUE)
-		script.write("\n")
-		script.write("#   setup the environment \n")
-		if self.ATHENACFG.atlasSetup() == "CMTHOME":
-			script.write("source %s/setup.sh -tag=%s,%s \n" % (self.CMTDIR, self.ATHENACFG.Release(), self.ATHENACFG.Tags()))
-		elif "single" in self.ATHENACFG.AtlasSetupOptions():
-			script.write("source %s/scripts/asetup.sh %s --testarea=%s --tags=%s --single \n" % (self.ATHENACFG.AtlasSetupPath(),self.ATHENACFG.Release(),self.ATHENACFG.TestArea(), self.ATHENACFG.Tags()))
-		else:
-			script.write("source %s/scripts/asetup.sh %s --testarea=%s --tags=%s \n" % (self.ATHENACFG.AtlasSetupPath(),self.ATHENACFG.Release(),self.ATHENACFG.TestArea(), self.ATHENACFG.Tags()))
-		script.write("export STAGE_SVCCLASS=atldata\n")
-		
-
-		script.write("cd %s \n" % self.RunPath)
-
-		for file in self.inputPoolFiles:
-			#if 'ESD' in file or 'AlignmentConstants' in file:
-			if 'AlignmentConstants' in file or 'ESD' in file:
-				script.write("pool_insertFileToCatalog "+ file + " \n")
-		
-		script.write("athena %s \n" % self.JOBNAME)
-		script.write("cd -")
-		script.close()
-		
-	def send(self,runmode):
-		os.system("chmod +x %s" % self.SCRIPTNAME)
-		print ("----------------------------------------------")
-		if runmode == "batch":
-			os.system("bsub <%s" % self.SCRIPTNAME)
-		   
-			print ("  Sending %s_%s_Part%02d job to LxBatch" % (self.preName,self.dataName, self.j))
-				  
-		elif runmode == "local":
-			print ("  Running %s_%s_Part%02d job" % (self.preName,self.dataName,self.j))
-			os.system("sh %s | tee %s/logs/%s_Part%02d.log \n" % (self.SCRIPTNAME, self.OutputPath,self.dataName,self.j))
-
-			
-		print ("----------------------------------------------")
-
-	def wait(self):
-		print ("Processing in lxbatch...")
-		# Wait for signal
-		time.sleep(30)
-		while os.popen('bjobs -w').read().find(self.preName)!=-1:
-			time.sleep(30)
-			
+        def __init__(self,
+                     OutputPath,
+                     dataName,
+                     iter,
+                     part,
+                     JOBNAME,
+                     preName,
+             ATHENACFG,
+                     RecoOptions={},
+                     extraOptions={},
+                     AlignmentOptions={},
+                     RecoScript="InDetAlignExample/NewTopOptions.py",
+                     AlignmentScript="InDetAlignExample/NewInDetAlignAlgSetup.py",
+                     AlignmentLevels = "InDetAlignExample/NewInDetAlignLevels.py",
+                     #MonitoringScript = "InDetRecExample/InDetMonitoringAlignment.py",
+                     QUEUE = "1nh",
+                     CMTDIR = "",
+                     inputPoolFiles = "",
+                     SCRIPTNAME = "",
+                     RunPath = ""):
+                self.OutputPath = OutputPath
+                self.preName = preName
+                self.i=iter
+                self.j=part
+                self.dataName = dataName
+                self.QUEUE=QUEUE
+                self.CMTDIR=CMTDIR
+                self.ATHENACFG=ATHENACFG
+                self.inputPoolFiles = inputPoolFiles
+                self.JOBNAME=JOBNAME
+                self.RecoOptions = RecoOptions
+                self.extraOptions = extraOptions
+                self.RecoScript = RecoScript
+                self.AlignmentOptions = AlignmentOptions
+                self.AlignmentScript = AlignmentScript
+                self.AlignmentLevels = AlignmentLevels
+                #self.MonitoringScript = MonitoringScript
+                self.SCRIPTNAME = SCRIPTNAME
+                self.RunPath = RunPath
+
+
+        def createDirectories(self):
+                os.system('mkdir -p %s' % self.RunPath)
+
+        def writeJO(self):
+                job=open(self.RunPath + self.JOBNAME,'w')
+
+
+                job.write('##-------- Reconstruction Configuration --------------------\n')
+
+                for option in self.RecoOptions:
+                        if type(self.RecoOptions[option]) is str:
+                                customoption = option + "\t=\t\"" +  self.RecoOptions[option]+'\"\n'
+                                job.write(customoption)
+                        else:
+                                if option == "inputFiles":
+                                        customoption = option + "\t=\t" +  str(self.RecoOptions[option])+'\n'
+                                else:
+                                        customoption = option + "\t=\t" +  str(self.RecoOptions[option])+'\n'
+                                job.write(customoption)
+
+                if len(self.extraOptions):
+                        job.write("\n")
+                        job.write('##-------- Extra Configuration --------------------\n')
+
+                for option in self.extraOptions:
+                        if type(self.extraOptions[option]) is str:
+                                customoption = option + "\t=\t\"" +  self.extraOptions[option]+'\"\n'
+                                job.write(customoption)
+                        else:
+                                customoption = option + "\t=\t" +  str(self.extraOptions[option])+'\n'
+                                job.write(customoption)
+
+                job.write("\n")
+                job.write('##-------- End of custom options -------------\n')
+
+                # Need to write the InDetRec Options Here:
+                job.write("\n")
+                job.write('##-------- Load Reconstruction --------------------\n')
+                job.write('include("'+str(self.RecoScript)+'") \n')
+
+                job.close()
+
+        def writeScript(self):
+                self.SCRIPTNAME = self.RunPath + '/' + self.SCRIPTNAME
+                script=open(self.SCRIPTNAME,'w')
+                script.write("#BSUB -J %s_%s_Part%02d \n" % (self.preName,self.dataName, self.j))
+                script.write("#BSUB -o %s/logs/%s_Part%02d.log \n" % (self.OutputPath,self.dataName,self.j))
+                script.write("#BSUB -e %s/logs/%s_Part%02d.err \n" % (self.OutputPath,self.dataName,self.j))
+
+                script.write("#BSUB -q %s \n" % self.QUEUE)
+                script.write("\n")
+                script.write("#   setup the environment \n")
+                if self.ATHENACFG.atlasSetup() == "CMTHOME":
+                        script.write("source %s/setup.sh -tag=%s,%s \n" % (self.CMTDIR, self.ATHENACFG.Release(), self.ATHENACFG.Tags()))
+                elif "single" in self.ATHENACFG.AtlasSetupOptions():
+                        script.write("source %s/scripts/asetup.sh %s --testarea=%s --tags=%s --single \n" % (self.ATHENACFG.AtlasSetupPath(),self.ATHENACFG.Release(),self.ATHENACFG.TestArea(), self.ATHENACFG.Tags()))
+                else:
+                        script.write("source %s/scripts/asetup.sh %s --testarea=%s --tags=%s \n" % (self.ATHENACFG.AtlasSetupPath(),self.ATHENACFG.Release(),self.ATHENACFG.TestArea(), self.ATHENACFG.Tags()))
+                script.write("export STAGE_SVCCLASS=atldata\n")
+
+
+                script.write("cd %s \n" % self.RunPath)
+
+                for file in self.inputPoolFiles:
+                        #if 'ESD' in file or 'AlignmentConstants' in file:
+                        if 'AlignmentConstants' in file or 'ESD' in file:
+                                script.write("pool_insertFileToCatalog "+ file + " \n")
+
+                script.write("athena %s \n" % self.JOBNAME)
+                script.write("cd -")
+                script.close()
+
+        def send(self,runmode):
+                os.system("chmod +x %s" % self.SCRIPTNAME)
+                print ("----------------------------------------------")
+                if runmode == "batch":
+                        os.system("bsub <%s" % self.SCRIPTNAME)
+
+                        print ("  Sending %s_%s_Part%02d job to LxBatch" % (self.preName,self.dataName, self.j))
+
+                elif runmode == "local":
+                        print ("  Running %s_%s_Part%02d job" % (self.preName,self.dataName,self.j))
+                        os.system("sh %s | tee %s/logs/%s_Part%02d.log \n" % (self.SCRIPTNAME, self.OutputPath,self.dataName,self.j))
+
+
+                print ("----------------------------------------------")
+
+        def wait(self):
+                print ("Processing in lxbatch...")
+                # Wait for signal
+                time.sleep(30)
+                while os.popen('bjobs -w').read().find(self.preName)!=-1:
+                        time.sleep(30)
+
 class mergeScriptEoverPMaps:
-	def __init__(self,
-		     OutputPath,
-		     preName,
-		     QUEUE,
-		     CMTDIR,
-		     ATHENACFG,
-		     SCRIPTNAME,
-		     JOBNAME
-		     ):
-		self.OutputPath = OutputPath
-		self.preName = preName
-		self.QUEUE=QUEUE
-		self.CMTDIR=CMTDIR
-		self.ATHENACFG=ATHENACFG
-		self.SCRIPTNAME=SCRIPTNAME
-		self.JOBNAME=JOBNAME
-
-	def write(self):
-		TempPath = self.OutputPath
-		self.SCRIPTNAME = TempPath + '/' + self.SCRIPTNAME
-		
-		# list of Files to be merged
-		mergeFilesName = TempPath + '/merge_Files.txt'
-		script=open(self.SCRIPTNAME,'w')
-		
-		script.write("#BSUB -J %s_Merge \n" % (self.preName))
-		script.write("#BSUB -o %s/logs/%s_Merge.log \n" % (self.OutputPath,self.preName))
-		script.write("#BSUB -e %s/logs/%s_Merge.err \n" % (self.OutputPath,self.preName))
-		
-		script.write("#BSUB -q %s \n" % self.QUEUE)
-		script.write("\n")
-		script.write("#   setup the environment \n")
-		if self.ATHENACFG.atlasSetup() == "CMTHOME":
-			script.write("source %s/setup.sh -tag=%s,%s \n" % (self.CMTDIR, self.ATHENACFG.Release(), self.ATHENACFG.Tags()))
-
-		elif "single" in self.ATHENACFG.AtlasSetupOptions():
-			script.write("source %s/scripts/asetup.sh %s --testarea=%s --tags=%s --single \n" % (self.ATHENACFG.AtlasSetupPath(),self.ATHENACFG.Release(),self.ATHENACFG.TestArea(), self.ATHENACFG.Tags()))
-
-		else:
-			script.write("source %s/scripts/asetup.sh %s --testarea=%s --tags=%s \n" % (self.ATHENACFG.AtlasSetupPath(),self.ATHENACFG.Release(),self.ATHENACFG.TestArea(), self.ATHENACFG.Tags()))
-		script.write("cd %s \n" % (self.OutputPath))
-		script.write("DQHistogramMerge.py %s ./eoverpValidationMerge.root True\n" %(mergeFilesName))
-		script.write("cd -")
-		script.close()
-		
-		mergeFiles=open(mergeFilesName,"w")
-		os.chdir(self.OutputPath)
-		files = os.popen("find -name 'eoverpValidationOut.root'").readlines()
-		for line in files:
-			mergeFiles.write("%s" % line)
-		mergeFiles.close()
-				
-
-	def send(self,runmode):
-		os.system("chmod +x %s" % self.SCRIPTNAME)
-		print ("----------------------------------------------")
-		if runmode == "batch":
-			os.system("bsub <%s" % self.SCRIPTNAME)
-			
-			print ("  Sending %s_Merge job " % self.preName)
-			
-		elif runmode == "local":
-			print ("  Running %s_Merge job" % self.preName)
-			os.system("sh %s | tee %s/logs/Merge.log \n" % (self.SCRIPTNAME, self.OutputPath))
-			
-		print ("----------------------------------------------")
+        def __init__(self,
+                     OutputPath,
+                     preName,
+                     QUEUE,
+                     CMTDIR,
+                     ATHENACFG,
+                     SCRIPTNAME,
+                     JOBNAME
+                     ):
+                self.OutputPath = OutputPath
+                self.preName = preName
+                self.QUEUE=QUEUE
+                self.CMTDIR=CMTDIR
+                self.ATHENACFG=ATHENACFG
+                self.SCRIPTNAME=SCRIPTNAME
+                self.JOBNAME=JOBNAME
+
+        def write(self):
+                TempPath = self.OutputPath
+                self.SCRIPTNAME = TempPath + '/' + self.SCRIPTNAME
+
+                # list of Files to be merged
+                mergeFilesName = TempPath + '/merge_Files.txt'
+                script=open(self.SCRIPTNAME,'w')
+
+                script.write("#BSUB -J %s_Merge \n" % (self.preName))
+                script.write("#BSUB -o %s/logs/%s_Merge.log \n" % (self.OutputPath,self.preName))
+                script.write("#BSUB -e %s/logs/%s_Merge.err \n" % (self.OutputPath,self.preName))
+
+                script.write("#BSUB -q %s \n" % self.QUEUE)
+                script.write("\n")
+                script.write("#   setup the environment \n")
+                if self.ATHENACFG.atlasSetup() == "CMTHOME":
+                        script.write("source %s/setup.sh -tag=%s,%s \n" % (self.CMTDIR, self.ATHENACFG.Release(), self.ATHENACFG.Tags()))
+
+                elif "single" in self.ATHENACFG.AtlasSetupOptions():
+                        script.write("source %s/scripts/asetup.sh %s --testarea=%s --tags=%s --single \n" % (self.ATHENACFG.AtlasSetupPath(),self.ATHENACFG.Release(),self.ATHENACFG.TestArea(), self.ATHENACFG.Tags()))
+
+                else:
+                        script.write("source %s/scripts/asetup.sh %s --testarea=%s --tags=%s \n" % (self.ATHENACFG.AtlasSetupPath(),self.ATHENACFG.Release(),self.ATHENACFG.TestArea(), self.ATHENACFG.Tags()))
+                script.write("cd %s \n" % (self.OutputPath))
+                script.write("DQHistogramMerge.py %s ./eoverpValidationMerge.root True\n" %(mergeFilesName))
+                script.write("cd -")
+                script.close()
+
+                mergeFiles=open(mergeFilesName,"w")
+                os.chdir(self.OutputPath)
+                files = os.popen("find -name 'eoverpValidationOut.root'").readlines()
+                for line in files:
+                        mergeFiles.write("%s" % line)
+                mergeFiles.close()
+
+
+        def send(self,runmode):
+                os.system("chmod +x %s" % self.SCRIPTNAME)
+                print ("----------------------------------------------")
+                if runmode == "batch":
+                        os.system("bsub <%s" % self.SCRIPTNAME)
+
+                        print ("  Sending %s_Merge job " % self.preName)
+
+                elif runmode == "local":
+                        print ("  Running %s_Merge job" % self.preName)
+                        os.system("sh %s | tee %s/logs/Merge.log \n" % (self.SCRIPTNAME, self.OutputPath))
+
+                print ("----------------------------------------------")
                        
diff --git a/InnerDetector/InDetExample/InDetAlignExample/test/InDetAlignExample_TestConfiguration.xml b/InnerDetector/InDetExample/InDetAlignExample/test/InDetAlignExample_TestConfiguration.xml
deleted file mode 100644
index 2a7210c3414b6bb385731640ad9b3925ec73da85..0000000000000000000000000000000000000000
--- a/InnerDetector/InDetExample/InDetAlignExample/test/InDetAlignExample_TestConfiguration.xml
+++ /dev/null
@@ -1,78 +0,0 @@
-<?xml version="1.0"?>
-<!DOCTYPE unifiedTestConfiguration SYSTEM "http://www.hep.ucl.ac.uk/atlas/AtlasTesting/DTD/unifiedTestConfiguration.dtd">
-
-<unifiedTestConfiguration>
- <atn>
-   <!-- Test has been failing for a long time and no one is willing to fix it.
-        See ATLIDALIGN-7.
-   <TEST name="InDetAlignExample_ATNSimple" type="athena" suite="Examples">
-     <options_atn>InDetAlignExample/AlignmentATNSimple.py</options_atn>
-     <author> Atlas Developer </author>
-     <mailto>lacuesta@ific.uv.es, john.alison@cern.ch, anthony.morley@cern.ch, salvador.marti@ific.uv.es, jike.wang@cern.ch, regina.moles.valls@cern.ch</mailto>
-     <expectations>
-       <returnValue>0</returnValue>
-     </expectations>
-   </TEST>
-   -->
-   
- </atn>
-
- <rtt xmlns="http://www.hep.ucl.ac.uk/atlas/AtlasTesting/rtt">
-   <rttContactPerson>InDetAlignment Monitoring Group</rttContactPerson>
-   <mailto>Salvador.Marti@ific.uv.es, moles@ific.uv.es, lacuesta@ific.uv.es</mailto>
-   <refRelease>15.0.0</refRelease>
-   <jobList>
-      <classification>
-         <displayClass>OfflineValidation</displayClass>
-         <displayProcess>Reco</displayProcess>
-         <displayComponent>Det-InDet</displayComponent>
-      </classification>
-      <jobTransform userJobId="NewInDetSiLevel1Alignment"> 
-	<doc>Script to run 2 iterations of level 1 silicon only alignment.</doc> 
-	<jobTransformJobName>NewInDetSiLevel1Alignment</jobTransformJobName>
-        <jobTransformCmd>RunIterator.py</jobTransformCmd>
-        <group>NewInDetSiLevel1Alignment</group>
-        <queue>long</queue>
-	<castorSvcClass>atlasgroupdisk</castorSvcClass>
-	<castorStageHost>castoratlas</castorStageHost>
-      </jobTransform> 
-   
-   </jobList>
-   
-   <jobGroups>
-     <jobGroup name="NewInDetSiLevel1Alignment" parent="Transform">
-       <keepFilePattern>Iter0/*</keepFilePattern>
-       <keepFilePattern>Iter0/logs/*</keepFilePattern>
-       <keepFilePattern>Iter0/Collision/*</keepFilePattern>
-       <keepFilePattern>Iter1/*</keepFilePattern>
-       <keepFilePattern>Iter1/logs/*</keepFilePattern>
-       <keepFilePattern>Iter1/Collision/*</keepFilePattern>
-       <keepFilePattern>*.html</keepFilePattern>
-       <keepFilePattern>*.eps</keepFilePattern>
-       <keepFilePattern>*.png</keepFilePattern>
-       <keepFilePattern>*.html</keepFilePattern>
-       <auxFilePattern>NewSiL1AlignRTT.cc</auxFilePattern>
-       <auxFilePattern>Constants_L1.cc</auxFilePattern>
-       <auxFilePattern>InDetAlignExample_NewInDetSiLevel1Alignment.html</auxFilePattern>
-       <action>
-	 <modulename>RttLibraryTools</modulename>
-	 <testname>ROOTMacroRunner</testname>
-	 <arg>
-           <argname>macro</argname>
-           <argvalue>NewSiL1AlignRTT.cc</argvalue>
-	 </arg>
-	 <arg>
-           <argname>macro</argname>
-           <argvalue>Constants_L1.cc</argvalue>
-	 </arg>
-       </action>
-       <testToRemove>
-         <jobGroupName>Top</jobGroupName> 
-	 <testidentifier>CheckFileRunner0</testidentifier> 
-       </testToRemove>
-     </jobGroup>
-   </jobGroups>
-      
- </rtt>
-</unifiedTestConfiguration>
-