diff --git a/InnerDetector/InDetCalibAlgs/PixelCalibAlgs/RadDamage/IBLLeakageCurrentData/README.md b/InnerDetector/InDetCalibAlgs/PixelCalibAlgs/RadDamage/IBLLeakageCurrentData/README.md new file mode 100644 index 0000000000000000000000000000000000000000..f4a39ba21681fd21c793e91a0d551e96be6fe02d --- /dev/null +++ b/InnerDetector/InDetCalibAlgs/PixelCalibAlgs/RadDamage/IBLLeakageCurrentData/README.md @@ -0,0 +1,64 @@ +2018 IBL Leakage current measurement + +Code started by Nick Dann and further developed by Jennet Dickinson (jdickinson@lbl.gov). For questions, please contact Jennet and/or Ben Nachman (bnachman@cern.ch). + + + +Contents: + +grabAllAutoStave.py +* Nick's modified script can download data from DCS if run on lxplus. + Jennet's raw 2018 data is being stored at + /eos/atlas/atlascerngroupdisk/det-ibl/data/leakage-current/rawData/ + Function grabData gets one day at a time. Fuction grabDataStave divides this into + smaller bites and then merges into one file per day. Use grabDataStave if grabData + returns errors like this: "Detected very big DB data request" + +getIBLDate.py +* Nick's modified script reads the raw data and adds new days to the processed + .ssv file for each module + Processed data is being stored at + /eos/atlas/atlascerngroupdisk/det-ibl/data/leakage-current/processedData + +lumiFormat.py +* Processes the luminosity from COOL to match the format of Nick's lumi text files. + Thank you to Aidan Grummer and Eric Torrence for the COOL setup. + From COOL output, take the instantaneous luminosity for each lumi block from the + "Lumi" column and multiply by the length of the lumi block in seconds (column "Len (s)"). + This gives the integrated luminosity collected during this lumi block. Warning: + the column "IntLumi" looks useful, but counts integrated luminosity from the beginning + of the fill, not the beginning of the LB + +avgData.py +* Saves a pandas dataframe containing HV_VMeas [V], PP4LV [V], TModule [C], HV_IMeas [mA], and + integrated luminosity [fb-1] per LB for each module. Also plots HV_VMeas [V], PP4LV [V], + TModule [C], HV_IMeas [mA] against integrated luminosity. See output here: + /eos/atlas/atlascerngroupdisk/det-ibl/data/leakage-current/processedData/means_data + + Fills all variables for each lumi-block, then keeps only those lumi-blocks where a + measurement of HV_IMeas is made. + + Run by doing + python avgData.py ${moduleName} + +avgSim.py +* Saves a pandas dataframe containing HV_VMeas [V], PP4LV [V], TModule [C], HV_IMeas [mA], and + integrated luminosity [fb-1] per LB for each module. Also plots HV_VMeas [V], PP4LV [V], + TModule [C], HV_IMeas [mA] against integrated luminosity. See output here: + /eos/atlas/atlascerngroupdisk/det-ibl/data/leakage-current/processedData/means_sim + + Fills all variables for each hour, including during shutdowns. Used as input for + Hamburg model simulations. + + Run by doing + python avgSim.py ${moduleName} + +submit_avg.sh and run_avg.sh +* Scripts for submitting batch jobs on lxplus to do the average with avgOverLB.py + Submits one job per module + +graphs.py +* Averages over module groups at similar z and creates TGraphs for plotting + +plot.C +* Draws the plots diff --git a/InnerDetector/InDetCalibAlgs/PixelCalibAlgs/RadDamage/IBLLeakageCurrentData/avgData.py b/InnerDetector/InDetCalibAlgs/PixelCalibAlgs/RadDamage/IBLLeakageCurrentData/avgData.py new file mode 100644 index 0000000000000000000000000000000000000000..e09c28627a3d5be29a5d5c2efee7aceeb1190f91 --- /dev/null +++ b/InnerDetector/InDetCalibAlgs/PixelCalibAlgs/RadDamage/IBLLeakageCurrentData/avgData.py @@ -0,0 +1,219 @@ +#Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration + +import matplotlib +matplotlib.use('Agg') +import matplotlib.pyplot as plt +import os, sys +import numpy as np +import pandas as pd +import datetime, time + +# Factor that corrects the leakage current to 20 degrees C +def tempCorr(Temp,Eg): + + kB = 8.617*pow(10,-5) # eV/K + Tref = 273.0 # Reference temperature in K + Temp = Temp + 273 # Convert to K + + return pow(1.0*Tref/Temp,2)*np.exp((-0.5*Eg/kB)*(1.0/Tref - 1.0/Temp)) + +# Jennet shamelessly steals Nick's code for bookkeeping +def averageData (m,lumi_df): + + #home directory definition + fillerDate = datetime.datetime(2000,1,1,1,1,1,1) + tempDate = datetime.datetime(2000,1,1,1,1,1,1) + returnList =[fillerDate,fillerDate,fillerDate,False,"LI_S00_0_M0"] + homeDirectory = os.path.expanduser('/eos/atlas/user/j/jdickins/Pixel/LeakageCurrent/') + + # Define path to folder + dataFolder = homeDirectory + "/IBLData/processedData/means_dat/" + inputFolder = homeDirectory + "/IBLData/processedData/" + runDataFolder = homeDirectory + "/IBLData/processedData/Lumi/runData.txt" + + if not os.path.exists(dataFolder): + os.mkdir(dataFolder) + + # Time bins = every day + b = lumi_df["start"].drop_duplicates() + + output_dict = pd.DataFrame({}) + + # Loop over lumi blocks + lumis = [] + total_lumi = 0 + for l in lumi_df["intlumi"]: + total_lumi += l/(10**9) + lumis += [ total_lumi ] + + print(total_lumi) + lumi_df["totlumi"] = lumis + +# tmp_dict = lumi_df.groupby(pd.cut(lumi_df["start"],bins=b),as_index=False).mean() +# tmp_dict.fillna(method='ffill',inplace=True) + output_dict["start"] = b + output_dict["intlumi"] = lumi_df["totlumi"] + output_dict.fillna(method='ffill',inplace=True) + times = [datetime.datetime.utcfromtimestamp(s) for s in b] + + plt.scatter(times,output_dict["intlumi"],marker=".") + plt.title(m) + plt.savefig(dataFolder+"intlumi/"+m+"_time.png") + plt.close() + + # Jennet gets these from https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelConditionsRUN2 + voltage_settings = [] + + volume = [] + sensorSize_planar = 50*250*200*1E-12 #cm3 + sensorSize_3D = 50*250*230*1E-12 #cm3 + + sensorsPerModule = 336*80 + + if m.endswith("M4"): # 3D module + for s in output_dict["start"]: + volume += [sensorSize_3D*sensorsPerModule*4] + + if m == "LI_S11_A_M4": + if s < time.mktime(datetime.date(2017,1,1).timetuple()): + voltage_settings += [20.0] + elif s < time.mktime(datetime.date(2018,1,1).timetuple()): + voltage_settings += [30.0] + else: + voltage_settings += [20.0] + continue + if m == "LI_S12_A_M4": + if s < time.mktime(datetime.date(2017,1,1).timetuple()): + voltage_settings += [20.0] + elif s < time.mktime(datetime.date(2018,1,1).timetuple()): + voltage_settings += [21.0] + else: + voltage_settings += [30.0] + continue + if m == "LI_S13_A_M4": + if s < time.mktime(datetime.date(2017,1,1).timetuple()): + voltage_settings += [15.0] + else: + voltage_settings += [40.0] + continue + if s < time.mktime(datetime.date(2017,1,1).timetuple()): + voltage_settings += [20.0] + else: + voltage_settings += [40.0] + else: # Planar module + for s in output_dict["start"]: + volume += [sensorSize_planar*sensorsPerModule*4] + + if s < time.mktime(datetime.date(2016,9,16).timetuple()): + voltage_settings += [80.0] + elif s < time.mktime(datetime.date(2017,1,1).timetuple()): + voltage_settings += [150.0] + elif s < time.mktime(datetime.date(2017,11,7).timetuple()): + voltage_settings += [350.0] + else: + voltage_settings += [400.0] + + output_dict["HV_VSet"] = voltage_settings + output_dict["volume"] = volume + + dataTypes = ["PP4LV","TModule","ENV_TT","HV_VMeas","HV_IMeas"] + dataType_index = 0 + + for dataType in dataTypes: + + print ("Investigating " + dataType ) + + if not os.path.exists(dataFolder+dataType): + os.mkdir(dataFolder+dataType) + + # DO THE AVERAGES + infile = inputFolder + dataType + "/" + m + ".ssv" + meas_header=["module_name","measurement_date","measurement_time","unix-timestamp",dataType] + meas_dict = pd.read_csv(infile, names=meas_header, delimiter=' ', skiprows=1) + output_dict[dataType] = meas_dict.groupby(pd.cut(meas_dict["unix-timestamp"],bins=b),as_index=False).mean()[dataType] + + if dataType == "TModule" or dataType == "PP4LV" or dataType == "ENV_TT": + output_dict.fillna(method='ffill',inplace=True) + + if dataType == "HV_VMeas": + output_dict["HV_VMeas_0"] = meas_dict.groupby(pd.cut(meas_dict["unix-timestamp"],bins=b),as_index=False).mean()[dataType] + output_dict.fillna(method='ffill',inplace=True) + output_dict["HV_VMeas_1"] = meas_dict.groupby(pd.cut(meas_dict["unix-timestamp"],bins=b),as_index=False).mean()[dataType] + output_dict.fillna(method='bfill',inplace=True) + output_dict["HV_VMeas"] = output_dict[["HV_VMeas_0","HV_VMeas_1"]].mean(axis=1) + + output_dict.plot.scatter(x="intlumi",y=dataType,marker=".") + plt.title(m) + plt.savefig(dataFolder+dataType+"/"+m+".png") + plt.close() + + plt.scatter(times,output_dict[dataType],marker=".") + plt.title(m) + plt.savefig(dataFolder+dataType+"/"+m+"_time.png") + plt.close() + + # Take cooling pipe temp +# output_dict['TModule'] = np.where(output_dict['TModule'] < -20, output_dict['ENV_TT'], output_dict['TModule']) + + plt.scatter(times,output_dict["TModule"],marker=".",s=1,label="TModule") + plt.scatter(times,output_dict["ENV_TT"],marker=".",s=1,label="ENV_TT") + plt.legend() + plt.title(m) + plt.savefig(m+".png") + plt.close() + + saveFileName = dataFolder + m + "_nocuts.ssv" + if os.path.exists(saveFileName): + os.remove(saveFileName) + output_dict.to_csv(saveFileName,index=False) + + output_dict.dropna(inplace=True) + + # Veto + output_dict = output_dict[abs(output_dict["HV_VMeas"]-output_dict["HV_VSet"])<1.0] + + # Correct + output_dict["I_Eg1.12"] = [ row["HV_IMeas"] * tempCorr(row["TModule"],1.12) / row["volume"] for i, row in output_dict.iterrows() ] + output_dict["I_Eg1.21"] = [ row["HV_IMeas"] * tempCorr(row["TModule"],1.21) / row["volume"] for i, row in output_dict.iterrows() ] + output_dict["I_Eg1.30"] = [ row["HV_IMeas"] * tempCorr(row["TModule"],1.30) / row["volume"] for i, row in output_dict.iterrows() ] + + if not os.path.exists(dataFolder+"I_Eg1.12"): + os.mkdir(dataFolder+"I_Eg1.12") + output_dict.plot.scatter("intlumi","I_Eg1.12",marker=".") + plt.title(m) + plt.savefig(dataFolder+"I_Eg1.12/"+m+".png") + plt.close() + + if not os.path.exists(dataFolder+"I_Eg1.21"): + os.mkdir(dataFolder+"I_Eg1.21") + output_dict.plot.scatter("intlumi","I_Eg1.21",marker=".") + plt.title(m) + plt.savefig(dataFolder+"I_Eg1.21/"+m+".png") + plt.close() + + if not os.path.exists(dataFolder+"I_Eg1.30"): + os.mkdir(dataFolder+"I_Eg1.30") + output_dict.plot.scatter("intlumi","I_Eg1.30",marker=".") + plt.title(m) + plt.savefig(dataFolder+"I_Eg1.30/"+m+".png") + plt.close() + + saveFileName = dataFolder + m + ".ssv" + if os.path.exists(saveFileName): + os.remove(saveFileName) + output_dict.to_csv(saveFileName,index=False) + +# Begin script +def main(): + + infile_lumi = "/eos/atlas/user/j/jdickins/Pixel/LeakageCurrent/IBLData/processedData/Lumi/runData.txt" + lumi_header=["run","fill","lb","start","len","0","1","lumiall","intlumi"] + lumi_df=pd.read_csv(infile_lumi, names=lumi_header, delimiter=' ', skiprows=0) + +# lumi_df.drop_duplicates(subset='intlumi',keep='first',inplace=True) + + input_module = sys.argv[1] + averageData(input_module,lumi_df) + +if __name__ == "__main__": + main() diff --git a/InnerDetector/InDetCalibAlgs/PixelCalibAlgs/RadDamage/IBLLeakageCurrentData/avgSim.py b/InnerDetector/InDetCalibAlgs/PixelCalibAlgs/RadDamage/IBLLeakageCurrentData/avgSim.py new file mode 100644 index 0000000000000000000000000000000000000000..77929ac49003745e18f9bd96652f8c48eb72bb10 --- /dev/null +++ b/InnerDetector/InDetCalibAlgs/PixelCalibAlgs/RadDamage/IBLLeakageCurrentData/avgSim.py @@ -0,0 +1,220 @@ +#Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration + +import matplotlib +matplotlib.use('Agg') +import matplotlib.pyplot as plt +import os, sys +import numpy as np +import pandas as pd +import datetime, time + +# Factor that corrects the leakage current to 20 degrees C +def tempCorr(Temp,Eg): + + kB = 8.617*pow(10,-5) # eV/K + Tref = 273.0 # Reference temperature in K + Temp = Temp + 273 # Convert to K + + return pow(1.0*Tref/Temp,2)*np.exp((-0.5*Eg/kB)*(1.0/Tref - 1.0/Temp)) + +# Jennet shamelessly steals Nick's code for bookkeeping +def averageData (m,lumi_df): + + #home directory definition + fillerDate = datetime.datetime(2000,1,1,1,1,1,1) + tempDate = datetime.datetime(2000,1,1,1,1,1,1) + returnList =[fillerDate,fillerDate,fillerDate,False,"LI_S00_0_M0"] + homeDirectory = os.path.expanduser('/eos/atlas/user/j/jdickins/Pixel/LeakageCurrent/') + + # Define path to folder + dataFolder = homeDirectory + "/IBLData/processedData/means_sim/" + inputFolder = homeDirectory + "/IBLData/processedData/" + runDataFolder = homeDirectory + "/IBLData/processedData/Lumi/runData.txt" + + if not os.path.exists(dataFolder): + os.mkdir(dataFolder) + + # Time bins = every day + s_time = 1433310297 + e_time = 1541545141 + stepsize = 3600 + nbins = int((e_time - s_time)/stepsize) + b = [s_time + stepsize*x for x in range(0,nbins)] + + output_dict = pd.DataFrame({}) + + # Loop over lumi blocks + lumis = [] + total_lumi = 0 + for l in lumi_df["intlumi"]: + total_lumi += l/(10**9) + lumis += [ total_lumi ] + + print(total_lumi) + lumi_df["totlumi"] = lumis + + tmp_dict = lumi_df.groupby(pd.cut(lumi_df["start"],bins=b),as_index=False).mean() + tmp_dict.fillna(method='ffill',inplace=True) + output_dict["start"] = b + output_dict["intlumi"] = tmp_dict["totlumi"] + times = [datetime.datetime.utcfromtimestamp(s) for s in b] + + plt.scatter(times,output_dict["intlumi"],marker=".") + plt.title(m) + plt.savefig(dataFolder+"intlumi/"+m+"_time.png") + plt.close() + + # Jennet gets these from https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelConditionsRUN2 + voltage_settings = [] + + volume = [] + sensorSize_planar = 50*250*200*1E-12 #cm3 + sensorSize_3D = 50*250*230*1E-12 #cm3 + + sensorsPerModule = 336*80 + + if m.endswith("M4"): # 3D module + for s in output_dict["start"]: + volume += [sensorSize_3D*sensorsPerModule*4] + + if m == "LI_S11_A_M4": + if s < time.mktime(datetime.date(2017,1,1).timetuple()): + voltage_settings += [20.0] + elif s < time.mktime(datetime.date(2018,1,1).timetuple()): + voltage_settings += [30.0] + else: + voltage_settings += [20.0] + continue + if m == "LI_S12_A_M4": + if s < time.mktime(datetime.date(2017,1,1).timetuple()): + voltage_settings += [20.0] + elif s < time.mktime(datetime.date(2018,1,1).timetuple()): + voltage_settings += [21.0] + else: + voltage_settings += [30.0] + continue + if m == "LI_S13_A_M4": + if s < time.mktime(datetime.date(2017,1,1).timetuple()): + voltage_settings += [15.0] + else: + voltage_settings += [40.0] + continue + if s < time.mktime(datetime.date(2017,1,1).timetuple()): + voltage_settings += [20.0] + else: + voltage_settings += [40.0] + else: # Planar module + for s in output_dict["start"]: + volume += [sensorSize_planar*sensorsPerModule*4] + + if s < time.mktime(datetime.date(2016,9,16).timetuple()): + voltage_settings += [80.0] + elif s < time.mktime(datetime.date(2017,1,1).timetuple()): + voltage_settings += [150.0] + elif s < time.mktime(datetime.date(2017,11,7).timetuple()): + voltage_settings += [350.0] + else: + voltage_settings += [400.0] + + output_dict["HV_VSet"] = voltage_settings + output_dict["volume"] = volume + + dataTypes = ["PP4LV","TModule","ENV_TT","HV_VMeas","HV_IMeas"] + dataType_index = 0 + + for dataType in dataTypes: + + print ("Investigating " + dataType ) + + if not os.path.exists(dataFolder+dataType): + os.mkdir(dataFolder+dataType) + + # DO THE AVERAGES + infile = inputFolder + dataType + "/" + m + ".ssv" + meas_header=["module_name","measurement_date","measurement_time","unix-timestamp",dataType] + meas_dict = pd.read_csv(infile, names=meas_header, delimiter=' ', skiprows=1) + output_dict[dataType] = meas_dict.groupby(pd.cut(meas_dict["unix-timestamp"],bins=b),as_index=False).mean()[dataType] + + if dataType == "TModule" or dataType == "PP4LV" or dataType == "ENV_TT": + output_dict.fillna(method='ffill',inplace=True) + + if dataType == "HV_VMeas": + output_dict["HV_VMeas_0"] = meas_dict.groupby(pd.cut(meas_dict["unix-timestamp"],bins=b),as_index=False).mean()[dataType] + output_dict.fillna(method='ffill',inplace=True) + output_dict["HV_VMeas_1"] = meas_dict.groupby(pd.cut(meas_dict["unix-timestamp"],bins=b),as_index=False).mean()[dataType] + output_dict.fillna(method='bfill',inplace=True) + output_dict["HV_VMeas"] = output_dict[["HV_VMeas_0","HV_VMeas_1"]].mean(axis=1) + + output_dict.plot.scatter(x="intlumi",y=dataType,marker=".") + plt.title(m) + plt.savefig(dataFolder+dataType+"/"+m+".png") + plt.close() + + plt.scatter(times,output_dict[dataType],marker=".") + plt.title(m) + plt.savefig(dataFolder+dataType+"/"+m+"_time.png") + plt.close() + + # Take cooling pipe temp + output_dict['TModule'] = np.where(output_dict['TModule'] < -20, output_dict['ENV_TT'], output_dict['TModule']) + + plt.scatter(times,output_dict["TModule"],marker=".",s=1,label="TModule") + plt.scatter(times,output_dict["ENV_TT"],marker=".",s=1,label="ENV_TT") + plt.legend() + plt.title(m) + plt.savefig(m+".png") + plt.close() + + saveFileName = dataFolder + m + "_nocuts.ssv" + if os.path.exists(saveFileName): + os.remove(saveFileName) + output_dict.to_csv(saveFileName,index=False) + + output_dict.dropna(inplace=True) + + # Veto + output_dict = output_dict[abs(output_dict["HV_VMeas"]-output_dict["HV_VSet"])<1.0] + + # Correct + output_dict["I_Eg1.12"] = [ row["HV_IMeas"] * tempCorr(row["TModule"],1.12) / row["volume"] for i, row in output_dict.iterrows() ] + output_dict["I_Eg1.21"] = [ row["HV_IMeas"] * tempCorr(row["TModule"],1.21) / row["volume"] for i, row in output_dict.iterrows() ] + output_dict["I_Eg1.30"] = [ row["HV_IMeas"] * tempCorr(row["TModule"],1.30) / row["volume"] for i, row in output_dict.iterrows() ] + + if not os.path.exists(dataFolder+"I_Eg1.12"): + os.mkdir(dataFolder+"I_Eg1.12") + output_dict.plot.scatter("intlumi","I_Eg1.12",marker=".") + plt.title(m) + plt.savefig(dataFolder+"I_Eg1.12/"+m+".png") + plt.close() + + if not os.path.exists(dataFolder+"I_Eg1.21"): + os.mkdir(dataFolder+"I_Eg1.21") + output_dict.plot.scatter("intlumi","I_Eg1.21",marker=".") + plt.title(m) + plt.savefig(dataFolder+"I_Eg1.21/"+m+".png") + plt.close() + + if not os.path.exists(dataFolder+"I_Eg1.30"): + os.mkdir(dataFolder+"I_Eg1.30") + output_dict.plot.scatter("intlumi","I_Eg1.30",marker=".") + plt.title(m) + plt.savefig(dataFolder+"I_Eg1.30/"+m+".png") + plt.close() + + saveFileName = dataFolder + m + ".ssv" + if os.path.exists(saveFileName): + os.remove(saveFileName) + output_dict.to_csv(saveFileName,index=False) + +# Begin script +def main(): + + infile_lumi = "/eos/atlas/user/j/jdickins/Pixel/LeakageCurrent/IBLData/processedData/Lumi/runData.txt" + lumi_header=["run","fill","lb","start","len","0","1","lumiall","intlumi"] + lumi_df=pd.read_csv(infile_lumi, names=lumi_header, delimiter=' ', skiprows=0) + + input_module = sys.argv[1] + averageData(input_module,lumi_df) + +if __name__ == "__main__": + main() diff --git a/InnerDetector/InDetCalibAlgs/PixelCalibAlgs/RadDamage/IBLLeakageCurrentData/downloadSingle.py b/InnerDetector/InDetCalibAlgs/PixelCalibAlgs/RadDamage/IBLLeakageCurrentData/downloadSingle.py new file mode 100644 index 0000000000000000000000000000000000000000..3a8de604b8be1b142436c0c9faecc77419d8e0cb --- /dev/null +++ b/InnerDetector/InDetCalibAlgs/PixelCalibAlgs/RadDamage/IBLLeakageCurrentData/downloadSingle.py @@ -0,0 +1,59 @@ +#Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration + +# A script that re-downloads a single day's worth of data +# Very useful if something went wrong in the initial download +# Replace s and e with the desired start and end dates + +import datetime +import os + +url = 'http://atlas-ddv.cern.ch:8089/multidata/getDataSafely' +url2 = 'http://atlas-ddv.cern.ch:8089/multidata/downloadTxtData' + + +s = datetime.datetime(2016,12,30,0,0,0) +e = datetime.datetime(2016,12,31,0,0,0) + +dataFolder = '/eos/atlas/user/j/jdickins/Pixel/LeakageCurrent/IBLData/rawData/' +dataType = 'ENV_TT' + +if not os.path.exists(dataFolder+dataType+'/'): + os.mkdir(dataFolder+dataType+'/') + +if not os.path.exists(dataFolder+dataType+'Stave/'): + os.mkdir(dataFolder+dataType+'Stave/') + +saveFileName2 = dataFolder+dataType+'/'+s.strftime("%Y_%m_%d") +'-' +e.strftime("%Y_%m_%d")+ '.txt' +if os.path.exists(saveFileName2): + os.remove(saveFileName2) + +saveFile = open(saveFileName2,'w') + +staveString="stave" +for staveNumber in range (1,15): + if staveNumber<10: + staveString = "0" + str(staveNumber) + else: + staveString = str(staveNumber) + + # Generate save file name. I save in format YYYY/MM/DD, so it is in alphabetical order. + saveFileName = dataFolder+dataType+'Stave/'+s.strftime("%Y_%m_%d") +'-' +e.strftime("%Y_%m_%d")+ 'Stave'+ staveString + '.txt' + if os.path.exists(saveFileName): + os.remove(saveFileName) + + # Create wget command + cmd = 'wget --post-data "queryInfo=atlas_pvssPIX, alias, LI_S' + staveString + '_A_M1_' + dataType + ', ' + s.strftime("%d-%m-%Y") + ' 00:00, ' + e.strftime("%d-%m-%Y") + ' 00:01, , , , , ,no, , +2!atlas_pvssPIX, alias, LI_S' + staveString + '_A_M2_' + dataType + ', ' + s.strftime("%d-%m-%Y") + ' 00:00, ' + e.strftime("%d-%m-%Y") + ' 00:01, , , , , ,no, , +2!atlas_pvssPIX, alias, LI_S' + staveString + '_A_M3_' + dataType + ', ' + s.strftime("%d-%m-%Y") + ' 00:00, ' + e.strftime("%d-%m-%Y") + ' 00:01, , , , , ,no, , +2!atlas_pvssPIX, alias, LI_S' + staveString + '_A_M4_' + dataType + ', ' + s.strftime("%d-%m-%Y") + ' 00:00, ' + e.strftime("%d-%m-%Y") + ' 00:01, , , , , ,no, , +2!atlas_pvssPIX, alias, LI_S' + staveString + '_C_M1_' + dataType + ', ' + s.strftime("%d-%m-%Y") + ' 00:00, ' + e.strftime("%d-%m-%Y") + ' 00:01, , , , , ,no, , +2!atlas_pvssPIX, alias, LI_S' + staveString + '_C_M2_' + dataType + ', ' + s.strftime("%d-%m-%Y") + ' 00:00, ' + e.strftime("%d-%m-%Y") + ' 00:01, , , , , ,no, , +2!atlas_pvssPIX, alias, LI_S' + staveString + '_C_M3_' + dataType + ', ' + s.strftime("%d-%m-%Y") + ' 00:00, ' + e.strftime("%d-%m-%Y") + ' 00:01, , , , , ,no, , +2!atlas_pvssPIX, alias, LI_S' + staveString + '_C_M4_' + dataType + ', ' + s.strftime("%d-%m-%Y") + ' 00:00, ' + e.strftime("%d-%m-%Y") + ' 00:01, , , , , ,no, , +2!" ' + url + ' --output-document='+ saveFileName + + # Execute wget command + os.system(cmd) + + bloop = open(saveFileName,'r') + +# if (staveNumber!=1): +# saveFile.write("!!!") + + for bloopLine in bloop: + saveFile.write(bloopLine) + + bloop.close() + diff --git a/InnerDetector/InDetCalibAlgs/PixelCalibAlgs/RadDamage/IBLLeakageCurrentData/getIBLDate.py b/InnerDetector/InDetCalibAlgs/PixelCalibAlgs/RadDamage/IBLLeakageCurrentData/getIBLDate.py new file mode 100644 index 0000000000000000000000000000000000000000..e0082f22bf2ebeb217a64aeb457ab69d66d4f6f6 --- /dev/null +++ b/InnerDetector/InDetCalibAlgs/PixelCalibAlgs/RadDamage/IBLLeakageCurrentData/getIBLDate.py @@ -0,0 +1,513 @@ +#Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration + +# Python script from Nick Dann +# Clean-ups for clarity and additional comments from Jennet Dickinson +# Search for JENNET to find where to replace file paths etc +# Nick set this up multithreaded, but Jennet prefers not to run this way, +# so she can more easily see what is going on + +# Comments from Nick: +# Multithreaded python script which finds files downloaded from the ATLAS DDV, reads in the data, then saves the data in seperate files for each DCS group. +# Files from DDV should be in format YYYY_MM_DD-YYYY_MM_DD.txt +# I save to SENSORNAME.ssv (space seperated variable). I usually save times as YYYY-MM-DD HH:MM:SS UTC timestamp value. Times are CERN time (CEST or CET). + +# scanDataThread is the thread declaration +# scanData (dataType) is the function which you should call to start the searching; datatype should be a string. + +#!/usr/bin/env python +import os +import shutil +import subprocess +import datetime, time +import fileinput +import sys +import random +from os.path import expanduser +import threading + +def sensorToNumber (stave,side,module): #converts from IBL stave, senors and side to DCS group number (<112) + + + number = 8 * (int(stave)-1) + module + if side == 'C': + number+=4 + return number-1 + +def findFiles (searchFolderPath): #find all files in searchFolderPath, return as list + + try: + os.chdir(searchFolderPath) + except IOError: + print('No entries in ' + searchFolderPath) + return -1 + + todaysList=[] + + #SOMEHOW, this searches the search Folder and returns a list of contained files in well... files. + for src_dir, dirs, files in os.walk(searchFolderPath): + #i parse through the files, I think. For each, I check if there's a reference to it in the + #diary entry for the day. If not, I go ahead and append the figure to the relevent diary entry. + for file_ in files: + sortedFile = os.path.join(src_dir, file_) + todaysList.append(str(sortedFile)) + #now we've got a list containing all files we want to add, sort it alphabetically and add them. + todaysList.sort() + return todaysList + +#Function that actually reads in data from fileName, saves it to... somewhere. Probably dataFolder/datatype/modulename.ssv . It also returns +#stuff like the most recent dates for all DCS groups (endDates), and number of datapoints found for each DCS group (tempDataNumbers). +#Probably says something if it can't read the data as well. Gonna be honest, DDV is a bit of a nightmare and I've spent too long messing +#around with it to remember what my bodges are. +def appendEntries(fileName,dataType,dataFolder,dateEntries,largestDates,firstDate,lastDate,borkDate): + + try: + rawFiles = open(fileName,'r') + except IOError: + print ("could not open file " + str(fileName) + " fool") + return firstDate #So this probably crashes if filename can't be opened. Try not to let that happen. + + returnList = [firstDate,largestDates,borkDate,dateEntries,firstDate] + + print(firstDate) + + index_Jennet = 0 + for dataLine in rawFiles: #read in each line of fileName and do things to it. Does this work with empty files? I'm guessing so. + + print(index_Jennet) + index_Jennet = index_Jennet + 1 + #We actually have two types of data file from DDV, one containing lots of commas, one which doesn't. The len(commasplit) thing is how I + #differentiate between the two types. + + if len(dataLine)>5: + commaSplit = dataLine.split(',') + + if '!!!!' in dataLine: #Our entry is blank, leg it! + return returnList + + if len(commaSplit)<2: + + #file is from DownloadTxtData. Erm, I'm not gonna comment this, rough idea is split tempLines, 0th element is the sensor name, + #grab the DCS flags from the name, use it to make teh module name. If that module name is unexpected, break and report an error + + tempLines = dataLine.split() + name = tempLines[0] + A = name[4:6] + B = name[7] + C = name[10] + + moduleName = 'LI_S' + str(A) +'_'+ str(B) + '_M' + str(C) + + moduleNumber = int(8 * (int(A)-1) + int(C)-1) + if B =='C': + moduleNumber +=4 + + if (name!=moduleName+"_"+dataType) or (len(tempLines)<3) : #This section is true if the data is bad; break and return + returnList[2] = lastDate + print ("\nBORK BORK BORK! Error in data values of " + fileName + " for " + dataType + " borkDate " + str(returnList[2]) ) + print ("First line reads " + dataLine+"\n") + print ("should read " + moduleName+"_"+dataType+"\n") + + borkRepork = open(dataFolder+dataType+"BORKBORKBORK.txt",'w') + borkRepork.write("filename " + fileName + "date range " + str(firstDate) + "-" + str(lastDate) + " first line " + dataLine + '\n') + borkRepork.close() + return returnList + + outName = dataFolder+dataType+'/' + moduleName +'.ssv' #save file name made here! + + date = tempLines[1].split('-') + time1 = tempLines[2].split(':') + data = 0 + + if dataType == 'TModule': #Oh yeah, DDV switched to reporting absolute values to magnitudes for bias and current at some point. + data = tempLines[3] + else: #if current or bias, only use absolute values, instead of y'know, signed values. Stupid DDV. + data = abs(float(tempLines[3])) + dateTime = datetime.datetime(int(date[2]), int(date[1]), int(date[0]), int(time1[0]), int(time1[1]), int(time1[2]),int(time1[3])) + + #This checks if the values are within the expected range, if not the below is true and it borks and returns. + if (dateTime>lastDate+datetime.timedelta(hours = 2)) or (dateTimereturnList[1][moduleNumber]: #if the end time for data is bigger than the previous max value for this DCS group. I think + returnList[3][moduleNumber]+=1 + + #format the line for saving to the finished file area he clunked clunkily. Change this if you wanna change the output format. + outputLine = moduleName+'_'+dataType + " " + str(dateTime) + " " + str( time.mktime(dateTime.timetuple()) ) + " " + str(data) + "\n" + + returnList[1][moduleNumber] = dateTime + + #check if output file already exists, if not create it, if so append previous file. + if os.path.isfile(outName) == False: + + output = open(outName,'w') + output.write(outputLine) + output.close() + else: #AI=='11' and BI =='A' and CI == '1': + output = open(outName,'a') + output.write(outputLine) + output.close() + + else: + #from getDataSafely, format here splits data based on !!! between DCS groups. + + moduleList = dataLine.split('!!!') + + for module in moduleList: #Hah, look at me using accurate labels for my variables! + + elements = module.split(',') + name = elements[0] + A="A" + B="A" + C="A" + if len(name)>9: + #print (name) + A = name[4:6] + B = name[7] + C = name[10] + + moduleName = 'LI_S' + str(A) +'_'+ str(B) + '_M' + str(C) + + try: + + moduleNumber = int(8 * (int(A)-1) + int(C)-1) + if B =='C': + moduleNumber +=4 + + outName = dataFolder+dataType+'/' + moduleName +'.ssv' + + #check if file exists, make it if not, append it if so. + if os.path.isfile(outName) == False: + output = open(outName,'w') + else: + output = open(outName,'a') + + position=0 + + #I'm like 90% sure each element is a single time (so, time and sensor reading). This makes this process slow. + for element in elements: + + if position!=0: #the first element is the DCS group name; not hella useful. + + tempLines = element.split() + date= tempLines[1].split('-') + time1 = tempLines[2].split(':') + data = 0 + + #Make sure we use absolute values for current and bias. + if dataType == 'TModule': + data = tempLines[0] + else: + data = abs(float(tempLines[0])) + + dateTime = datetime.datetime(int(date[2]), int(date[1]), int(date[0]), int(time1[0]), int(time1[1]), int(time1[2]),int(time1[3])) + + #true if data stuff + if (dateTime>lastDate+datetime.timedelta(hours = 2)) or (dateTimereturnList[1][moduleNumber]: #if time in right range, output. + returnList[3][moduleNumber]+=1 + outputLine = moduleName + "_" + dataType + " " + str(dateTime) + " " + str( time.mktime(dateTime.timetuple()) ) + " " + str(data) + "\n" + + returnList[1][moduleNumber] = dateTime + + output.write(outputLine) + + + + position+=1 + + output.close() + + except: #we failed at something, so print out an error message and run away screaming + returnList[2] = lastDate + print("Something broke :( \n") + print("Could be an error in data values of " + fileName + " for " + dataType + " borkDate " + str(returnList[2]) ) + print("First line reads " + name + "\n") + + borkRepork = open(dataFolder+dataType+"BORKBORKBORK.txt",'w') + borkRepork.write("filename " + fileName + "date range " + str(firstDate) + "-" + str(lastDate) + " first line " + dataLine + '\n') + borkRepork.close() + print("Try running again... often this is an issue with the connection to eos") + return returnList + + return returnList + + +def scanData (dataType): + + #home directory definition + fillerDate = datetime.datetime(2000,1,1,1,1,1,1) + tempDate = datetime.datetime(2000,1,1,1,1,1,1) + returnList =[fillerDate,fillerDate,fillerDate,False,"LI_S00_0_M0"] + # JENNET setsfile paths + homeDirectory = os.path.expanduser('/eos/atlas/user/j/jdickins/Pixel/LeakageCurrent/') + + #define path to folder + dataFolder = homeDirectory + "/IBLData/processedData/" + inputFolder = homeDirectory + "/IBLData/rawData/"+dataType+"/" + entriesFolder = homeDirectory + "/IBLData/rawData/entriesPerDay/" + dataType + "/" + + #make directories if they don't exist + if not os.path.exists(dataFolder+dataType): + os.mkdir(dataFolder+dataType) + + if not os.path.exists(entriesFolder): + os.mkdir(entriesFolder) + + #declare some stuff, so it's in scope when we need it + startingDates=[] + endDates=[] + filesProcessed=[] + dataPoints=[] + dailyPoints=[] + smallestEndDate=fillerDate + + + #This section checks to see if there's a file containing the dates we've worked up till. + if not os.path.exists(dataFolder+dataType+".txt"): + + #No dates file found, so create one. + print("No any file found! at " + dataFolder+dataType +" Making default values") + #set default max and min values for each sensor + + datesFile = open(dataFolder+dataType+".txt",'w') + firstTempDate = datetime.datetime(2015,5,1,0,0,0,0) + lastTempDate = datetime.datetime(2015,5,1,0,0,0,1) + smallestEndDate = lastTempDate + + for stave in range(1,15): + staveString = str(stave) + if stave<10: + staveString="0"+str(stave) + for side in ['A','C']: + for DCSGroup in range(1,5): + moduleName = 'LI_S' + str(staveString) + '_' + side + '_M' + str(DCSGroup) + datesFile.write(moduleName +" " + str(firstTempDate) + " " + str(lastTempDate) +" 0 0\n") + startingDates.append(firstTempDate) + endDates.append(lastTempDate) + filesProcessed.append(0) + dataPoints.append(0) + dailyPoints.append(0) + + datesFile.close() + + else: #dates file exists, so read dates for each DCS group + print("Found " + dataFolder+dataType+".txt") + datesFile = open(dataFolder+dataType+".txt",'r') + + holder=0 + + for dateLine in datesFile: #read each line in, each line should correspond to one DCS group + + tempDatesLine = dateLine.split() + filesProcessed.append(int(tempDatesLine[5]) ) + dataPoints.append( int( tempDatesLine[6]) ) + dailyPoints.append(0) + + firstTemp = tempDatesLine[1].split('-') + lastTemp = tempDatesLine[3].split('-') + + firstTempTime = tempDatesLine[2].split(':') + lastTempTime = tempDatesLine[4].split(':') + + firstTempTimes = firstTempTime[2].split('.') + lastTempTimes = lastTempTime[2].split('.') + + if len(firstTempTimes)<2: + firstTempTimes.append(0) + if len(lastTempTimes)<2: + lastTempTimes.append(0) + + firstTempDate = datetime.datetime(int(firstTemp[0]), int(firstTemp[1]), int(firstTemp[2]), int(firstTempTime[0]),int(firstTempTime[1]), int(firstTempTimes[0]), int(firstTempTimes[1])) + lastTempDate = datetime.datetime(int(lastTemp[0]), int(lastTemp[1]), int(lastTemp[2]), int(lastTempTime[0]), int(lastTempTime[1]),int(lastTempTimes[0]), int(lastTempTimes[1])) + + startingDates.append(firstTempDate) + endDates.append(lastTempDate) + + if holder==0: + returnList[0] = firstTempDate + returnList[1] = lastTempDate + smallestEndDate=lastTempDate + + + else: + if firstTempDatereturnList[1]: + returnList[1] = lastTempDate + if lastTempDate < smallestEndDate: + smallestEndDate = lastTempDate + + + holder+=1 + + datesFile.close() + print ("Investigating " + dataType + " from " + str(smallestEndDate)) + + + holder = 0 + + #call function to return list of all files in input folder + fileList = findFiles(inputFolder) + + firstTempDate = startingDates[0] + lastTempDate = endDates[0] + numberFiles = len(fileList) + fileNumber = 0 + + #iterate through all files from file list, opening them if they're in the time period of interest. + for fileName in fileList: + +# print(fileName) + + end = len(fileName) + endDate1 = fileName[end-4:end] + endDate2 = fileName[end-7:end-6] + + if endDate1=='.txt' and endDate2=='_': #check file ends with text, and contains underscore in expected place; Could make this more rigorous + + tempDataNumber = 0 + startDate = fileName[end-23:end-15] + endDate = fileName[end-12:end-4] + endDateSplit = endDate.split('_') + endDateFile = datetime.datetime(2000+int(endDateSplit[0]),int(endDateSplit[1]),int(endDateSplit[2]),0,0,0,1) + + startDateSplit = startDate.split('_') + startDateFile = datetime.datetime(2000+int(startDateSplit[0]),int(startDateSplit[1]),int(startDateSplit[2]),0,0,0,1) + + if endDateFile > smallestEndDate: #data from region of interest + #APPEND FILES DOING THINGS HERE + + lastTempDate = endDateFile + [firstTempDate,endDates,fillerDate,tempDataNumbers,smallestEndDate] = appendEntries(fileName,dataType,dataFolder,dailyPoints,endDates,startDateFile,endDateFile,fillerDate) + #append entries called here. Editing of data files done at that location. + + for i in range(0,112): #112 DCS groups for IBL, if you're doing something else, change that number. + + filesProcessed[i] +=1 #number of files + dataPoints[i]+=tempDataNumbers[i] #number of data points for each DCS group + + holderX=0 + + #this does something. Probably recording number of data points per day for each DCS group + for stave in range(1,15): + staveString = str(stave) + if stave<10: + staveString="0"+str(stave) + for side in ['A','C']: + for DCSGroup in range(1,5): + moduleName = 'LI_S' + str(staveString) + '_' + side + '_M' + str(DCSGroup) + outName = entriesFolder + moduleName + ".txt" + dataLine = str(startDate) + " " + str(tempDataNumbers[holderX]) + "\n" + tempDataNumbers[holderX]=0 + + if os.path.isfile(outName) == False: + output = open(outName,'w') + output.write(dataLine) + output.close() + + else: + output = open(outName,'a') + output.write(dataLine) + output.close() + holderX +=1 + + #check if the list is bork-a-dorked. If borked, save and break + if returnList[2]!=fillerDate: + returnList[2] = fillerDate + datesFile = open(dataFolder+dataType+".txt",'w') + tempHolder=0 + for stave in range(1,15): + staveString = str(stave) + if stave<10: + staveString="0"+str(stave) + for side in ['A','C']: + for DCSGroup in range(1,5): + moduleName = 'LI_S' + str(staveString) + '_' + side + '_M' + str(DCSGroup) + datesFile.write(moduleName +" " + str(startingDates[tempHolder]) + " " + str(endDates[tempHolder]) +" " + str(filesProcessed[tempHolder]) + " " + str ( dataPoints[tempHolder] ) + "\n") + tempHolder+=1 + + datesFile.close() + + + return returnList + + sys.stdout.flush() + fileNumber+=1 + + #open file, go through lists, if value < smallest, set to that, + + #if number bigger, set to that + #if any value smaller than previous largest value, bork bork! + + holder+=1 + + datesFile = open(dataFolder+dataType+".txt",'w') + tempHolder=0 + for staveX in range(1,15): + staveStringX = str(staveX) + if staveX<10: + staveStringX="0"+str(staveX) + for sideX in ['A','C']: + for DCSGroupX in range(1,5): + + moduleNameX = 'LI_S' + str(staveStringX) + '_' + sideX + '_M' + str(DCSGroupX) + + datesFile.write(moduleNameX +" " + str(startingDates[tempHolder]) + " " + str(endDates[tempHolder]) +" " + str(filesProcessed[tempHolder]) + " " + str ( dataPoints[tempHolder] ) +"\n") + + tempHolder+=1 + + datesFile.close() + returnList[3]=True + + return returnList + +class scanDataThread (threading.Thread): + def __init__(self, threadID, name): + threading.Thread.__init__(self) + self.threadID = threadID + self.name = name + + def run(self): + print ("Starting getIBLDate.py for " + self.name) + returnList = scanData(self.name) + print ( "Exiting getIBLDate.py for " + self.name + " with no issues, data range from " + str(returnList[0]) + " to " + str(returnList[1]) ) + +################################################################################ +# PROGRAM STARTS HERE # +################################################################################ +def main(): + + thread1 = scanDataThread (1,'HV_VMeas' ) + thread1.start() + + thread2 = scanDataThread (2,'PP4LV' ) + thread2.start() + + thread3 = scanDataThread (3,'HV_IMeas' ) + thread3.start() + + thread4 = scanDataThread (4,'TModule' ) + thread4.start() + +if __name__ == "__main__": + main() diff --git a/InnerDetector/InDetCalibAlgs/PixelCalibAlgs/RadDamage/IBLLeakageCurrentData/getPipeDate.py b/InnerDetector/InDetCalibAlgs/PixelCalibAlgs/RadDamage/IBLLeakageCurrentData/getPipeDate.py new file mode 100644 index 0000000000000000000000000000000000000000..a8f4ecc58ffa543bc4b3412018e9aec5c4a40c8a --- /dev/null +++ b/InnerDetector/InDetCalibAlgs/PixelCalibAlgs/RadDamage/IBLLeakageCurrentData/getPipeDate.py @@ -0,0 +1,416 @@ +#Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration + +# Python script from Nick Dann +# Clean-ups for clarity and additional comments from Jennet Dickinson +# Search for JENNET to find where to replace file paths etc +# Nick set this up multithreaded, but Jennet prefers not to run this way, +# so she can more easily see what is going on + +# Comments from Nick: +# Multithreaded python script which finds files downloaded from the ATLAS DDV, reads in the data, then saves the data in seperate files for each DCS group. +# Files from DDV should be in format YYYY_MM_DD-YYYY_MM_DD.txt +# I save to SENSORNAME.ssv (space seperated variable). I usually save times as YYYY-MM-DD HH:MM:SS UTC timestamp value. Times are CERN time (CEST or CET). + +# scanDataThread is the thread declaration +# scanData (dataType) is the function which you should call to start the searching; datatype should be a string. + +#!/usr/bin/env python +import os +import shutil +import subprocess +import datetime, time +import fileinput +import sys +import random +from os.path import expanduser +import threading + +def sensorToNumber (stave,side,module): #converts from IBL stave, senors and side to DCS group number (<112) + + + number = 8 * (int(stave)-1) + module + if side == 'C': + number+=4 + return number-1 + +def findFiles (searchFolderPath): #find all files in searchFolderPath, return as list + + try: + os.chdir(searchFolderPath) + except IOError: + print('No entries in ' + searchFolderPath) + return -1 + + todaysList=[] + + #SOMEHOW, this searches the search Folder and returns a list of contained files in well... files. + for src_dir, dirs, files in os.walk(searchFolderPath): + #i parse through the files, I think. For each, I check if there's a reference to it in the + #diary entry for the day. If not, I go ahead and append the figure to the relevent diary entry. + for file_ in files: + sortedFile = os.path.join(src_dir, file_) + todaysList.append(str(sortedFile)) + #now we've got a list containing all files we want to add, sort it alphabetically and add them. + todaysList.sort() + return todaysList + +#Function that actually reads in data from fileName, saves it to... somewhere. Probably dataFolder/datatype/modulename.ssv . It also returns +#stuff like the most recent dates for all DCS groups (endDates), and number of datapoints found for each DCS group (tempDataNumbers). +#Probably says something if it can't read the data as well. Gonna be honest, DDV is a bit of a nightmare and I've spent too long messing +#around with it to remember what my bodges are. +def appendEntries(fileName,dataType,dataFolder,dateEntries,largestDates,firstDate,lastDate,borkDate): + + try: + rawFiles = open(fileName,'r') + except IOError: + print ("could not open file " + str(fileName) + " fool") + return firstDate #So this probably crashes if filename can't be opened. Try not to let that happen. + + returnList = [firstDate,largestDates,borkDate,dateEntries,firstDate] + + print(firstDate) + + index_Jennet = 0 + for dataLine in rawFiles: #read in each line of fileName and do things to it. Does this work with empty files? I'm guessing so. + + index_Jennet = index_Jennet + 1 + #We actually have two types of data file from DDV, one containing lots of commas, one which doesn't. The len(commasplit) thing is how I + #differentiate between the two types. + + if len(dataLine)>5: + commaSplit = dataLine.split(',') + + if '!!!!' in dataLine: #Our entry is blank, leg it! + return returnList + + if len(commaSplit)<2: + print("Dead") + else: + #from getDataSafely, format here splits data based on !!! between DCS groups. + + moduleList = dataLine.split('!!!') + + for module in moduleList: #Hah, look at me using accurate labels for my variables! + + elements = module.split(',') + name = elements[0] + A="A" + B="A" + C="A" + if len(name)>9: + A = name[4:6] + B = name[7] + C = 1 + + moduleName = 'LI_S' + str(A) +'_'+ str(B) + try: + moduleNumber = int(2 * (int(A)-1)-1 ) + if B =='C': + moduleNumber +=4 + + outName = dataFolder+dataType+'/' + moduleName +'.ssv' + + #check if file exists, make it if not, append it if so. + if os.path.isfile(outName) == False: + output = open(outName,'w') + else: + output = open(outName,'a') + + position=0 + + #I'm like 90% sure each element is a single time (so, time and sensor reading). This makes it slow. + for element in elements: + + if position!=0: #the first element is the DCS group name; not hella useful. + + tempLines = element.split() + date= tempLines[1].split('-') + time1 = tempLines[2].split(':') + data = tempLines[0] + + dateTime = datetime.datetime(int(date[2]), int(date[1]), int(date[0]), int(time1[0]), int(time1[1]), int(time1[2]),int(time1[3])) + #true if data stuff + if (dateTime>lastDate+datetime.timedelta(hours = 2)) or (dateTimereturnList[1][moduleNumber]: #if time in right range, output. + returnList[3][moduleNumber]+=1 + outputLine = moduleName + "_" + dataType + " " + str(dateTime) + " " + str( time.mktime(dateTime.timetuple()) ) + " " + str(data) + "\n" + + returnList[1][moduleNumber] = dateTime + + output.write(outputLine) + + + + position+=1 + + output.close() + + except: #we failed at something, so print out an error message and run away screaming + returnList[2] = lastDate + print("Something broke :( \n") + print("Could be an error in data values of " + fileName + " for " + dataType + " borkDate " + str(returnList[2]) ) + print("First line reads " + name + "\n") + + borkRepork = open(dataFolder+dataType+"BORKBORKBORK.txt",'w') + borkRepork.write("filename " + fileName + "date range " + str(firstDate) + "-" + str(lastDate) + " first line " + dataLine + '\n') + borkRepork.close() + print("Try running again... often this is an issue with the connection to eos") + return returnList + + return returnList + + +def scanData (dataType): + + #home directory definition + fillerDate = datetime.datetime(2000,1,1,1,1,1,1) + tempDate = datetime.datetime(2000,1,1,1,1,1,1) + returnList =[fillerDate,fillerDate,fillerDate,False,"LI_S00_0_M0"] + # JENNET setsfile paths + homeDirectory = os.path.expanduser('/eos/atlas/user/j/jdickins/Pixel/LeakageCurrent/') + + #define path to folder + dataFolder = homeDirectory + "/IBLData/processedData/" + inputFolder = homeDirectory + "/IBLData/rawData/"+dataType+"/" + entriesFolder = homeDirectory + "/IBLData/rawData/entriesPerDay/" + dataType + "/" + + #make directories if they don't exist + if not os.path.exists(dataFolder+dataType): + os.mkdir(dataFolder+dataType) + + if not os.path.exists(entriesFolder): + os.mkdir(entriesFolder) + + #declare some stuff, so it's in scope when we need it + startingDates=[] + endDates=[] + filesProcessed=[] + dataPoints=[] + dailyPoints=[] + smallestEndDate=fillerDate + + + #This section checks to see if there's a file containing the dates we've worked up till. + if not os.path.exists(dataFolder+dataType+".txt"): + + #No dates file found, so create one. + print("No any file found! at " + dataFolder+dataType +" Making default values") + #set default max and min values for each sensor + + datesFile = open(dataFolder+dataType+".txt",'w') + firstTempDate = datetime.datetime(2015,5,1,0,0,0,0) + lastTempDate = datetime.datetime(2015,5,1,0,0,0,1) + smallestEndDate = lastTempDate + + for stave in range(1,15): + staveString = str(stave) + if stave<10: + staveString="0"+str(stave) + for side in ['A','C']: + moduleName = 'LI_S' + str(staveString) + '_' + side + datesFile.write(moduleName +" " + str(firstTempDate) + " " + str(lastTempDate) +" 0 0\n") + startingDates.append(firstTempDate) + endDates.append(lastTempDate) + filesProcessed.append(0) + dataPoints.append(0) + dailyPoints.append(0) + + datesFile.close() + + else: #dates file exists, so read dates for each DCS group + print("Found " + dataFolder+dataType+".txt") + datesFile = open(dataFolder+dataType+".txt",'r') + + holder=0 + + for dateLine in datesFile: #read each line in, each line should correspond to one DCS group + + tempDatesLine = dateLine.split() + filesProcessed.append(int(tempDatesLine[5]) ) + dataPoints.append( int( tempDatesLine[6]) ) + dailyPoints.append(0) + + firstTemp = tempDatesLine[1].split('-') + lastTemp = tempDatesLine[3].split('-') + + firstTempTime = tempDatesLine[2].split(':') + lastTempTime = tempDatesLine[4].split(':') + + firstTempTimes = firstTempTime[2].split('.') + lastTempTimes = lastTempTime[2].split('.') + + if len(firstTempTimes)<2: + firstTempTimes.append(0) + if len(lastTempTimes)<2: + lastTempTimes.append(0) + + firstTempDate = datetime.datetime(int(firstTemp[0]), int(firstTemp[1]), int(firstTemp[2]), int(firstTempTime[0]),int(firstTempTime[1]), int(firstTempTimes[0]), int(firstTempTimes[1])) + lastTempDate = datetime.datetime(int(lastTemp[0]), int(lastTemp[1]), int(lastTemp[2]), int(lastTempTime[0]), int(lastTempTime[1]),int(lastTempTimes[0]), int(lastTempTimes[1])) + + startingDates.append(firstTempDate) + endDates.append(lastTempDate) + + if holder==0: + returnList[0] = firstTempDate + returnList[1] = lastTempDate + smallestEndDate=lastTempDate + + + else: + if firstTempDatereturnList[1]: + returnList[1] = lastTempDate + if lastTempDate < smallestEndDate: + smallestEndDate = lastTempDate + + + holder+=1 + + datesFile.close() + print ("Investigating " + dataType + " from " + str(smallestEndDate)) + + + holder = 0 + + #call function to return list of all files in input folder + fileList = findFiles(inputFolder) + + firstTempDate = startingDates[0] + lastTempDate = endDates[0] + numberFiles = len(fileList) + fileNumber = 0 + + #iterate through all files from file list, opening them if they're in the time period of interest. + for fileName in fileList: + +# print(fileName) + + end = len(fileName) + endDate1 = fileName[end-4:end] + endDate2 = fileName[end-7:end-6] + + if endDate1=='.txt' and endDate2=='_': #check file ends with text, and contains underscore in expected place; Could make this more rigorous + + tempDataNumber = 0 + startDate = fileName[end-23:end-15] + endDate = fileName[end-12:end-4] + endDateSplit = endDate.split('_') + endDateFile = datetime.datetime(2000+int(endDateSplit[0]),int(endDateSplit[1]),int(endDateSplit[2]),0,0,0,1) + + startDateSplit = startDate.split('_') + startDateFile = datetime.datetime(2000+int(startDateSplit[0]),int(startDateSplit[1]),int(startDateSplit[2]),0,0,0,1) + + if endDateFile > smallestEndDate: #data from region of interest + #APPEND FILES DOING THINGS HERE + + lastTempDate = endDateFile + [firstTempDate,endDates,fillerDate,tempDataNumbers,smallestEndDate] = appendEntries(fileName,dataType,dataFolder,dailyPoints,endDates,startDateFile,endDateFile,fillerDate) + #append entries called here. Editing of data files done at that location. + + for i in range(0,28): #112 DCS groups for IBL, if you're doing something else, change that number.# + filesProcessed[i] +=1 #number of files + dataPoints[i]+=tempDataNumbers[i] #number of data points for each DCS group + + holderX=0 + + #this does something. Probably recording number of data points per day for each DCS group + for stave in range(1,15): + staveString = str(stave) + if stave<10: + staveString="0"+str(stave) + for side in ['A','C']: + moduleName = 'LI_S' + str(staveString) + '_' + side + outName = entriesFolder + moduleName + ".txt" + dataLine = str(startDate) + " " + str(tempDataNumbers[holderX]) + "\n" + tempDataNumbers[holderX]=0 + + if os.path.isfile(outName) == False: + output = open(outName,'w') + output.write(dataLine) + output.close() + + else: + output = open(outName,'a') + output.write(dataLine) + output.close() + holderX +=1 + + #check if the list is bork-a-dorked. If borked, save and break + if returnList[2]!=fillerDate: + returnList[2] = fillerDate + datesFile = open(dataFolder+dataType+".txt",'w') + tempHolder=0 + for stave in range(1,15): + staveString = str(stave) + if stave<10: + staveString="0"+str(stave) + for side in ['A','C']: + moduleName = 'LI_S' + str(staveString) + '_' + side + datesFile.write(moduleName +" " + str(startingDates[tempHolder]) + " " + str(endDates[tempHolder]) +" " + str(filesProcessed[tempHolder]) + " " + str ( dataPoints[tempHolder] ) + "\n") + tempHolder+=1 + + datesFile.close() + + + return returnList + + sys.stdout.flush() + fileNumber+=1 + + #open file, go through lists, if value < smallest, set to that, + + #if number bigger, set to that + #if any value smaller than previous largest value, bork bork! + + holder+=1 + + datesFile = open(dataFolder+dataType+".txt",'w') + tempHolder=0 + for staveX in range(1,15): + staveStringX = str(staveX) + if staveX<10: + staveStringX="0"+str(staveX) + for sideX in ['A','C']: + moduleNameX = 'LI_S' + str(staveStringX) + '_' + sideX + datesFile.write(moduleNameX +" " + str(startingDates[tempHolder]) + " " + str(endDates[tempHolder]) +" " + str(filesProcessed[tempHolder]) + " " + str ( dataPoints[tempHolder] ) +"\n") + + tempHolder+=1 + + datesFile.close() + returnList[3]=True + + return returnList + +class scanDataThread (threading.Thread): + def __init__(self, threadID, name): + threading.Thread.__init__(self) + self.threadID = threadID + self.name = name + + def run(self): + print ("Starting getPipeDate.py for " + self.name) + returnList = scanData(self.name) + print ( "Exiting getPipeDate.py for " + self.name + " with no issues, data range from " + str(returnList[0]) + " to " + str(returnList[1]) ) + +################################################################################ +# PROGRAM STARTS HERE # +################################################################################ +def main(): + + thread1 = scanDataThread (1,'ENV_TT' ) + thread1.start() + +if __name__ == "__main__": + main() diff --git a/InnerDetector/InDetCalibAlgs/PixelCalibAlgs/RadDamage/IBLLeakageCurrentData/grabAllAutoStave.py b/InnerDetector/InDetCalibAlgs/PixelCalibAlgs/RadDamage/IBLLeakageCurrentData/grabAllAutoStave.py new file mode 100644 index 0000000000000000000000000000000000000000..942dfe122c95d08638c03d21bcac2e40793d0a3c --- /dev/null +++ b/InnerDetector/InDetCalibAlgs/PixelCalibAlgs/RadDamage/IBLLeakageCurrentData/grabAllAutoStave.py @@ -0,0 +1,334 @@ +#Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration + +# Python script from Nick Dann +# Clean-ups for clarity and additional comments from Jennet Dickinson +# Search for JENNET to find where to replace file paths etc + +# Comments from Nick: +# Python script to grab IBL DCS data from the ATLAS DCS data viewer. +# DO NOT USE THE MULTITHREADING FUNCTION! +# grabData('dataType') is the function you should call to download DCS data. Program works from inside CERN network +# There are some strings you'll have to change to point to locations on your file system. BE CAREFUL; no idea what this will do if it can't find the right file structure. +# Timezones might be an issue; I used a PC running on UTC to get around this; there's probably a better solution, but that's what I did + +#!/usr/bin/env python3 +import os +import time +import shutil +import subprocess +import datetime +import fileinput +import sys +import random +from os.path import expanduser +import threading +from datetime import date, timedelta + +# Function to iterate through dates between start and end in steps of delta. ~ish +def perdelta(start, end, delta): + curr = start + while curr < end: + yield curr, min(curr + delta, end) + curr += delta + +# Function to iterate through dates between end and start in steps of delta. ~ish +def perdeltadown(start, end, delta): + curr = end + while curr > start: + yield max(curr - delta, start),curr + curr -= delta + +# Function to return list of all files in search folder path, sorted alphabetically +def findFiles (searchFolderPath): + + try: + os.chdir(searchFolderPath) + except IOError: + print('No entries in ' + searchFolderPath) + return -1 + + todaysList=[] + + # SOMEHOW, this searches the searchFolderPath and returns a list of contained files in well... files. + for src_dir, dirs, files in os.walk(searchFolderPath): + + # Parse through the files, appending the file name to a list of files. + for file_ in files: + + sortedFile = os.path.join(src_dir, file_) + todaysList.append(str(sortedFile)) + + # Now we've got a list containing all files we want to add, sort it alphabetically and return them. + todaysList.sort() + return todaysList + +# Function to return required dates +def checkDates(dataType,dataFolder): + + # Placeholder declarations + fillerDate = datetime.datetime(2000,1,1,1,1,1,1) + tempDate = datetime.datetime(2000,1,1,1,1,1,1) + returnList =[fillerDate,fillerDate,fillerDate] + + startingDates=[] + endDates=[] + + # Check: what dates have I already downloaded? + # If no file containing previously used dates, create one with default values. + # "No any file found!" is what my motherboard says whilst booting my home PC, I found it funny and copied it over here + if not os.path.exists(dataFolder+dataType+".txt"): + print("No any file found! at " + dataFolder+dataType +" Making default values") + + datesFile = open(dataFolder+dataType+".txt",'w') + firstTempDate = datetime.datetime(2018,1,10,1,1,1,1) + lastTempDate = datetime.datetime(2017,12,21,1,1,1) + + datesFile.write(dataType + " " + str(firstTempDate) + " " + str(lastTempDate) +"\n") + startingDates.append(firstTempDate) + endDates.append(lastTempDate) + + datesFile.close() + + # If dates file exists, read from it + else: + print("Found " + dataFolder+dataType+".txt") + datesFile = open(dataFolder+dataType+".txt",'r') + + for dateLine in datesFile: + tempDatesLine = dateLine.split() + firstTemp = tempDatesLine[1].split('-') + lastTemp = tempDatesLine[3].split('-') + + firstTempTime = tempDatesLine[2].split(':') + lastTempTime = tempDatesLine[4].split(':') + + firstTempTimes = firstTempTime[2].split('.') + lastTempTimes = lastTempTime[2].split('.') + + if len(firstTempTimes)<2: + firstTempTimes.append(0) + if len(lastTempTimes)<2: + lastTempTimes.append(0) + + firstTempDate = datetime.datetime(int(firstTemp[0]), int(firstTemp[1]), int(firstTemp[2]), int(firstTempTime[0]),int(firstTempTime[1]), int(firstTempTimes[0]), int(firstTempTimes[1])) + lastTempDate = datetime.datetime(int(lastTemp[0]), int(lastTemp[1]), int(lastTemp[2]), int(lastTempTime[0]), int(lastTempTimes[0]),int(lastTempTimes[0]), int(lastTempTimes[1])) + + startingDates.append(firstTempDate) + endDates.append(lastTempDate) + + datesFile.close() + + datesList=[startingDates,endDates] + # Return start and end dates for each sensor + return datesList + +# Function to save data of type dataType in dataFolder for all sensors, for specified dates. +# Currently used fof HV_VMease, PP4LV and TModules +def grabData(dataType): + + url = 'http://atlas-ddv.cern.ch:8089/multidata/getDataSafely' + url2 = 'http://atlas-ddv.cern.ch:8089/multidata/downloadTxtData' + + # JENNET changed the output file path + defaultPath = os.path.expanduser('/eos/atlas/user/j/jdickins/Pixel/LeakageCurrent/') + # The data is saved to dataFolder / dataType / moduleGroup / startDate-endDate.txt + # This script will autogenerate folders for this. + dataFolder = defaultPath + "/IBLData/rawData/" + + # Check top level directory exists, then make folder for HV I, T, HV V, LV I and.. actually should get LV V + if not os.path.exists(dataFolder): + os.mkdir(dataFolder) + + if not os.path.exists(dataFolder+dataType): + os.mkdir(dataFolder+dataType) + + [startDates,endDates] = checkDates(dataType,dataFolder) + currentDay = date.today() + firstDay = endDates[0].date()-timedelta(days=1) + # Iterate through all data in below dates, with a time step of time delta. (So from 1/1/2016 - 15/5/2016, with one file per day). You could probably increase the timedelta, but it'll increase the chance of the program dying, so I didn't. You could also create a bunch of folders to hold everything, or something similar.... I should do that. I did that! + # Jennet has issues for TModule with time delta = 1 day. Had to switch to grabDataStave + + sensorNumber=0 + if firstDay+timedelta(days=1) 30: + for checker in range (0,14): + if introubleCount[checker]<2: +# If a sensor's data is less than 2 entries, whilst there are more than 30 entries for the day. + print ( "borkalork in stave " + str(checker+1) ) + breakThatStick=True + + if breakThatStick == True: + print ("breaking now at " + s.strftime("%d-%m-%Y") ) + break + + saveFile.close() + firstTempDate = min ( datetime.datetime(s.year, s.month, s.day,0,0,0,0), startDates[0] ) + startDates[0] = firstTempDate + + lastTempDate = max ( datetime.datetime(e.year, e.month, e.day,0,0,0,0), endDates[0]) + endDates[0] = lastTempDate + + datesFile = open(dataFolder+dataType+".txt",'w') + + datesFile.write(dataType + " " + str(firstTempDate) + " " + str(lastTempDate) +"\n") + + datesFile.close() + + sleepTime = random.randint(10, 30) + time.sleep(sleepTime) + +################################################################################ +# PROGRAM STARTS HERE # +################################################################################ +def main(): + + grabData('HV_VMeas') + grabData('PP4LV') + + # grabData no longer works reliably for HV_IMeas, use grabStaveData instead + # Use this if grabData returns an error about very big DB request + grabDataStave('HV_IMeas') + grabDataStave('TModule') + +if __name__ == "__main__": + main() diff --git a/InnerDetector/InDetCalibAlgs/PixelCalibAlgs/RadDamage/IBLLeakageCurrentData/grabPipeTemp.py b/InnerDetector/InDetCalibAlgs/PixelCalibAlgs/RadDamage/IBLLeakageCurrentData/grabPipeTemp.py new file mode 100644 index 0000000000000000000000000000000000000000..77d0d03d7f8cc00c27aa6ce088cb18919e6d63dd --- /dev/null +++ b/InnerDetector/InDetCalibAlgs/PixelCalibAlgs/RadDamage/IBLLeakageCurrentData/grabPipeTemp.py @@ -0,0 +1,280 @@ +#Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration + +# Python script from Nick Dann +# Clean-ups for clarity and additional comments from Jennet Dickinson +# Search for JENNET to find where to replace file paths etc + +# Comments from Nick: +# Python script to grab IBL DCS data from the ATLAS DCS data viewer. +# DO NOT USE THE MULTITHREADING FUNCTION! +# grabData('dataType') is the function you should call to download DCS data. Program works from inside CERN network +# There are some strings you'll have to change to point to locations on your file system. BE CAREFUL; no idea what this will do if it can't find the right file structure. +# Timezones might be an issue; I used a PC running on UTC to get around this; there's probably a better solution, but that's what I did + +#!/usr/bin/env python3 +import os +import time +import shutil +import subprocess +import datetime +import fileinput +import sys +import random +from os.path import expanduser +import threading +from datetime import date, timedelta + +# Function to iterate through dates between start and end in steps of delta. ~ish +def perdelta(start, end, delta): + curr = start + while curr < end: + yield curr, min(curr + delta, end) + curr += delta + +# Function to iterate through dates between end and start in steps of delta. ~ish +def perdeltadown(start, end, delta): + curr = end + while curr > start: + yield max(curr - delta, start),curr + curr -= delta + +# Function to return list of all files in search folder path, sorted alphabetically +def findFiles (searchFolderPath): + + try: + os.chdir(searchFolderPath) + except IOError: + print('No entries in ' + searchFolderPath) + return -1 + + todaysList=[] + + # SOMEHOW, this searches the searchFolderPath and returns a list of contained files in well... files. + for src_dir, dirs, files in os.walk(searchFolderPath): + + # Parse through the files, appending the file name to a list of files. + for file_ in files: + + sortedFile = os.path.join(src_dir, file_) + todaysList.append(str(sortedFile)) + + # Now we've got a list containing all files we want to add, sort it alphabetically and return them. + todaysList.sort() + return todaysList + +# Function to return required dates +def checkDates(dataType,dataFolder): + + # Placeholder declarations + fillerDate = datetime.datetime(2000,1,1,1,1,1,1) + tempDate = datetime.datetime(2000,1,1,1,1,1,1) + returnList =[fillerDate,fillerDate,fillerDate] + + startingDates=[] + endDates=[] + + # Check: what dates have I already downloaded? + # If no file containing previously used dates, create one with default values. + # "No any file found!" is what my motherboard says whilst booting my home PC, I found it funny and copied it over here + if not os.path.exists(dataFolder+dataType+".txt"): + print("No any file found! at " + dataFolder+dataType +" Making default values") + + datesFile = open(dataFolder+dataType+".txt",'w') + firstTempDate = datetime.datetime(2015,1,1,1,1,1,1) + lastTempDate = datetime.datetime(2015,1,1,1,1,1) + + datesFile.write(dataType + " " + str(firstTempDate) + " " + str(lastTempDate) +"\n") + startingDates.append(firstTempDate) + endDates.append(lastTempDate) + + datesFile.close() + + # If dates file exists, read from it + else: + print("Found " + dataFolder+dataType+".txt") + datesFile = open(dataFolder+dataType+".txt",'r') + + for dateLine in datesFile: + tempDatesLine = dateLine.split() + firstTemp = tempDatesLine[1].split('-') + lastTemp = tempDatesLine[3].split('-') + + firstTempTime = tempDatesLine[2].split(':') + lastTempTime = tempDatesLine[4].split(':') + + firstTempTimes = firstTempTime[2].split('.') + lastTempTimes = lastTempTime[2].split('.') + + if len(firstTempTimes)<2: + firstTempTimes.append(0) + if len(lastTempTimes)<2: + lastTempTimes.append(0) + + firstTempDate = datetime.datetime(int(firstTemp[0]), int(firstTemp[1]), int(firstTemp[2]), int(firstTempTime[0]),int(firstTempTime[1]), int(firstTempTimes[0]), int(firstTempTimes[1])) + lastTempDate = datetime.datetime(int(lastTemp[0]), int(lastTemp[1]), int(lastTemp[2]), int(lastTempTime[0]), int(lastTempTimes[0]),int(lastTempTimes[0]), int(lastTempTimes[1])) + + startingDates.append(firstTempDate) + endDates.append(lastTempDate) + + datesFile.close() + + datesList=[startingDates,endDates] + # Return start and end dates for each sensor + return datesList + +# Function to save data of type dataType in dataFolder for all sensors, for specified dates. Only used to HV_IMeas +def grabDataStave(dataType): + + # url is the line you're supposed to use. url2 is the line that works better. + url = 'http://atlas-ddv.cern.ch:8089/multidata/getDataSafely' + url2 = 'http://atlas-ddv.cern.ch:8089/multidata/downloadTxtData' + + # JENNET changed the output file path + defaultPath = os.path.expanduser('/eos/atlas/user/j/jdickins/Pixel/LeakageCurrent/') + # The data is saved to dataFolder / dataType / moduleGroup / startDate-endDate.txt + # This script will autogenerate folders for this. + dataFolder = defaultPath + "/IBLData/rawData/" + + # Check top level directory exists, then make folder for HV I, T, HV V, LV I and.. actually should get LV V + if not os.path.exists(dataFolder): + os.mkdir(dataFolder) + # For dataType in ['HV_IMeas', 'TModule','HV_VMeas','PP4LV']: + if not os.path.exists(dataFolder+dataType): + os.mkdir(dataFolder+dataType) + + [startDates,endDates] = checkDates(dataType,dataFolder) + currentDay = date.today() + firstDay = endDates[0].date()-timedelta(days=1) + +# firstDay = datetime.date(2018,3,7) +# currentDay = datetime.date(2018,3,10) + # Iterate through all data in below dates, with a time step of time delta. (So from 1/1/2016 - 15/5/2016, with one file per day). + + # Iterate through staves, modules and sides + sensorNumber=0 + + if firstDay+timedelta(days=1) 30: + for checker in range (0,14): + if introubleCount[checker]<2: +# If a sensor's data is less than 2 entries, whilst there are more than 30 entries for the day. + print ( "borkalork in stave " + str(checker+1) ) + breakThatStick=True + + if breakThatStick == True: + print ("breaking now at " + s.strftime("%d-%m-%Y") ) + break + + saveFile.close() + firstTempDate = min ( datetime.datetime(s.year, s.month, s.day,0,0,0,0), startDates[0] ) + startDates[0] = firstTempDate + + lastTempDate = max ( datetime.datetime(e.year, e.month, e.day,0,0,0,0), endDates[0]) + endDates[0] = lastTempDate + + datesFile = open(dataFolder+dataType+".txt",'w') + + datesFile.write(dataType + " " + str(firstTempDate) + " " + str(lastTempDate) +"\n") + + datesFile.close() + + sleepTime = random.randint(10, 30) + time.sleep(sleepTime) + +################################################################################ +# PROGRAM STARTS HERE # +################################################################################ +def main(): + +# grabData('HV_VMeas') +# grabData('PP4LV') + + # grabData no longer works reliably for HV_IMeas, use grabStaveData instead + # Use this if grabData returns an error about very big DB request +# grabDataStave('HV_IMeas') +# grabDataStave('TModule') + grabDataStave('ENV_TT') + +if __name__ == "__main__": + main() diff --git a/InnerDetector/InDetCalibAlgs/PixelCalibAlgs/RadDamage/IBLLeakageCurrentData/graphs.py b/InnerDetector/InDetCalibAlgs/PixelCalibAlgs/RadDamage/IBLLeakageCurrentData/graphs.py new file mode 100644 index 0000000000000000000000000000000000000000..7a4791cb70a05846ce4fd48adc084f64700e969a --- /dev/null +++ b/InnerDetector/InDetCalibAlgs/PixelCalibAlgs/RadDamage/IBLLeakageCurrentData/graphs.py @@ -0,0 +1,59 @@ +#Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration + +import matplotlib +#matplotlib.use('Agg') +import matplotlib.pyplot as plt +import os, sys +import numpy as np +import pandas as pd + +# Jennet averages over modules and saves TGraphs +def averageModules (suffix): + + blacklist = ["LI_S01_C_M4","LI_S03_A_M4","LI_S05_C_M4","LI_S11_A_M4","LI_S12_A_M4","LI_S13_C_M4"] + + indir = "/eos/atlas/user/j/jdickins/Pixel/LeakageCurrent/IBLData/processedData/means/" + header = ["unix-timestamp","HV_VMeas","start","intlumi","HV_VSet","volume","HV_VMeas_0","HV_VMeas_1","PP4LV","TModule","HV_IMeas","ENV_TT","I_Eg1.12","I_Eg1.21","I_Eg1.30"] + + df_concat = pd.DataFrame({}) + # loop over modules and add the ones with the right suffix + for stave in range(1,15): + staveString = str(stave) + if stave<10: + staveString="0"+str(stave) + m = "LI_S" + str(staveString) + "_" + suffix + print(m) + this_infile = indir + m + ".ssv" + if m in blacklist: + print(m + " is blacklisted. Skipping...") + continue + + this_infile = indir + "LI_S" + str(staveString) + "_" + suffix + ".ssv" + this_df = pd.read_csv(this_infile, names=header, delimiter=',', skiprows=1) + df_concat = df_concat.append(this_df) + + df_avg = df_concat.groupby(df_concat.index).mean() + df_avg = df_avg[["intlumi","I_Eg1.12","I_Eg1.21","I_Eg1.30"]] +# df_avg.plot.scatter("intlumi","I_Eg1.12",marker=".") + +# print((1.0*max(df_avg["start"])-min(df_avg["start"]))/31557600,max(df_avg["intlumi"])) + + saveFileName = indir + suffix + ".ssv" + if os.path.exists(saveFileName): + os.remove(saveFileName) + df_avg.to_csv(saveFileName,index=False) + +# Begin script +def main(): + + averageModules("A_M1") + averageModules("C_M1") + averageModules("A_M2") + averageModules("C_M2") + averageModules("A_M3") + averageModules("C_M3") + averageModules("A_M4") + averageModules("C_M4") + +if __name__ == "__main__": + main() diff --git a/InnerDetector/InDetCalibAlgs/PixelCalibAlgs/RadDamage/IBLLeakageCurrentData/listModules.py b/InnerDetector/InDetCalibAlgs/PixelCalibAlgs/RadDamage/IBLLeakageCurrentData/listModules.py new file mode 100644 index 0000000000000000000000000000000000000000..743e2e8f24425214d2400a2bce709828977361b0 --- /dev/null +++ b/InnerDetector/InDetCalibAlgs/PixelCalibAlgs/RadDamage/IBLLeakageCurrentData/listModules.py @@ -0,0 +1,27 @@ +#Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration + +# Write a text file containing all module names +def main(): + + modules = [] + # Get the module names + + i=0 + for stave in range(1,15): + staveString = str(stave) + + if stave<10: + staveString="0"+str(stave) + for side in ['A','C']: + for DCSGroup in range(1,5): + modules += ['LI_S' + str(staveString) + '_' + side + '_M' + str(DCSGroup)] + + filename = "modules.txt" + f = open(filename,"w") + for m in modules: + f.write(m+"\n") + + f.close() + +if __name__ == "__main__": + main() diff --git a/InnerDetector/InDetCalibAlgs/PixelCalibAlgs/RadDamage/IBLLeakageCurrentData/lumiFormat.py b/InnerDetector/InDetCalibAlgs/PixelCalibAlgs/RadDamage/IBLLeakageCurrentData/lumiFormat.py new file mode 100644 index 0000000000000000000000000000000000000000..08583f79fde396cda9270a7862d66a5cc14d2ff8 --- /dev/null +++ b/InnerDetector/InDetCalibAlgs/PixelCalibAlgs/RadDamage/IBLLeakageCurrentData/lumiFormat.py @@ -0,0 +1,125 @@ +#Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration + +import os +import datetime, time + +# Function to iterate through dates between start and end in steps of delta. ~ish +def perdelta(start, end, delta): + curr = start + while curr < end: + yield curr, min(curr + delta, end) + curr += delta + +# Function to return required dates +def checkDates(datesFileName): + #placeholder declarations + fillerDate = datetime.datetime(2000,1,1,1,1,1,1) + tempDate = datetime.datetime(2000,1,1,1,1,1,1) + returnList =[fillerDate,fillerDate,fillerDate] + tempRun = 0 + + startingDates=[] + endDates=[] + + #if no file containing previously used dates, create one with default values. + #"No any file found!" is what my motherboard says whilst booting my home PC, I found it funny and copied it over here + if not os.path.exists(datesFileName): + print("No any file found! at " + datesFileName + ". Making default values") + + datesFile = open(datesFileName,'w') + firstTempDate = datetime.datetime(2015,4,2,1,1,1,1) + lastTempDate = datetime.datetime(2015,4,2,1,1,1) + + datesFile.write(str(tempRun) + " " + str(firstTempDate) + " " + str(lastTempDate) +"\n") + startingDates.append(firstTempDate) + endDates.append(lastTempDate) + datesFile.close() + + #if dates file exists, read from it + else: + print("Found " + datesFileName) + datesFile = open(datesFileName,'r') + + for dateLine in datesFile: + tempDatesLine = dateLine.split() + firstTemp = tempDatesLine[1].split('-') + lastTemp = tempDatesLine[3].split('-') + + firstTempTime = tempDatesLine[2].split(':') + lastTempTime = tempDatesLine[4].split(':') + + firstTempTimes = firstTempTime[2].split('.') + lastTempTimes = lastTempTime[2].split('.') + + if len(firstTempTimes)<2: + firstTempTimes.append(0) + if len(lastTempTimes)<2: + lastTempTimes.append(0) + + firstTempDate = datetime.datetime(int(firstTemp[0]), int(firstTemp[1]), int(firstTemp[2]), int(firstTempTime[0]),int(firstTempTime[1]), int(firstTempTimes[0]), int(firstTempTimes[1])) + lastTempDate = datetime.datetime(int(lastTemp[0]), int(lastTemp[1]), int(lastTemp[2]), int(lastTempTime[0]), int(lastTempTimes[0]),int(lastTempTimes[0]), int(lastTempTimes[1])) + + startingDates.append(firstTempDate) + endDates.append(lastTempDate) + + datesFile.close() + + datesList=[startingDates,endDates] + return datesList + +outPath = "/eos/atlas/user/j/jdickins/Pixel/LeakageCurrent/IBLData/processedData/Lumi/" +outputName = outPath + "/runData.txt" +outputSummary = outPath + "/runs.txt" + +inPath = "/eos/atlas/user/j/jdickins/Pixel/LeakageCurrent/IBLData/rawData/Lumi/" + +[startDates,endDates] = checkDates(outputSummary) + +currentDay = datetime.date(2018,11,7) +firstDay = startDates[0].date() + +fout = open(outputName,'a+') + +if firstDay + datetime.timedelta(days=1)SaveAs(title_eps.c_str()); + c->SaveAs(title_png.c_str()); + c->SaveAs(title_pdf.c_str()); + + return; +} + +// Apply smoothing +TGraph* smooth(TGraph* g1){ + + TGraph* g1sm = new TGraph(); + + double* all_x = g1->GetX(); + double* all_y = g1->GetY(); + + for(int i=0; iGetN()-5; i++){ + + if(i+4 > g1->GetN() ) continue; + + double x[] = {all_x[i],all_x[i+1],all_x[i+2],all_x[i+3],all_x[i+4]}; + double y[] = {all_y[i],all_y[i+1],all_y[i+2],all_y[i+3],all_y[i+4]}; + + double med_lumi = TMath::Median(5,x); + double med_ileak = TMath::Median(5,y); + + g1sm->SetPoint(i,med_lumi,med_ileak); + } + + return g1sm; +} + +// Function for converting ssv to TGraph +void from_ssv(string suffix){ + + TGraph* g1 = new TGraph(); + TGraph* g2 = new TGraph(); + TGraph* g3 = new TGraph(); + + string indir = "/eos/atlas/user/j/jdickins/Pixel/LeakageCurrent/IBLData/processedData/means/"; + string filename = indir + suffix + ".ssv"; + + string line; int i=-1; + // Read necessary input parameters from text file + std::ifstream inputFile(filename.c_str()); + while(getline(inputFile, line)) { + + // skip empty lines and first line + if (!line.length() || i == -1){ + i++; + continue; + } + i++; + + string value; + std::istringstream iss(line); + double vals[4]; + int j = 0; + while(getline(iss,value,',')){ + vals[j] = stod(value); + j++; + } + + double lumi = vals[0]; + double I1 = vals[1]; + double I2 = vals[2]; + double I3 = vals[3]; + + g1->SetPoint(i, lumi, I1); + g2->SetPoint(i, lumi, I2); + g3->SetPoint(i, lumi, I3); + } + + TGraph* g1sm = smooth(g1); + TGraph* g2sm = smooth(g2); + TGraph* g3sm = smooth(g3); + + string out_filename=indir+suffix+"_coarse.root"; + TFile outfile(out_filename.c_str(),"recreate"); + + g1->SetTitle("I_Eg1_12"); + g2->SetTitle("I_Eg1_21"); + g3->SetTitle("I_Eg1_30"); + + g1->SetName("I_Eg1_12"); + g2->SetName("I_Eg1_21"); + g3->SetName("I_Eg1_30"); + + g1->Write(); + g2->Write(); + g3->Write(); + + string out_filename2=indir+suffix+".root"; + TFile outfile2(out_filename2.c_str(),"recreate"); + + g1sm->SetTitle("I_Eg1_12"); + g2sm->SetTitle("I_Eg1_21"); + g3sm->SetTitle("I_Eg1_30"); + + g1sm->SetName("I_Eg1_12"); + g2sm->SetName("I_Eg1_21"); + g3sm->SetName("I_Eg1_30"); + + g1sm->Write(); + g2sm->Write(); + g3sm->Write(); + + return; +} + +// Calculate the ratio hists +void get_ratios(string gname){ + + vector suffix = {"A_M1","A_M2","A_M3","A_M4","C_M1","C_M2","C_M3","C_M4"}; + vector g; + vector g_rat; + + string indir = "/eos/atlas/user/j/jdickins/Pixel/LeakageCurrent/IBLData/processedData/means/"; + + for(int i=0; iGet(gname.c_str()); + g.at(i) = (TGraph*)gtmp->Clone(suffix.at(i).c_str()); + } + + string out_filename=indir+"Ratios_"+gname+".root"; + TFile outfile(out_filename.c_str(),"recreate"); + + for(int i=0; iGetX(); + + if( i<=4 ){ + for(int j=0; jGetN(); j++){ + gtmp->SetPoint(j,lumi[j],g.at(i)->Eval(lumi[j])/g.at(3)->Eval(lumi[j])); + } + } + else{ + for(int j=0; jGetN(); j++){ + gtmp->SetPoint(j,lumi[j],g.at(i)->Eval(lumi[j])/g.at(7)->Eval(lumi[j])); + } + } + + g_rat.push_back(smooth(gtmp)); + + g_rat.at(i)->SetTitle(suffix.at(i).c_str()); + g_rat.at(i)->SetName(suffix.at(i).c_str()); + g_rat.at(i)->Write(); + } + + return; + +} + +// Draw it real pretty +void overlay(string gname){ + + gStyle->SetOptTitle(0); + + vector suffix = {"A_M1","C_M1","A_M2","C_M2","A_M3","C_M3","A_M4","C_M4"}; + vector leg_names = {"Planar: z #epsilon [0,8] cm", "Planar: z #epsilon [-8,0] cm", + "Planar: z #epsilon [8,16] cm", "Planar: z #epsilon [-16,-8] cm", + "Planar: z #epsilon [16,24] cm", "Planar: z #epsilon [-24,-16] cm", + "3D: z > 24 cm", "3D: z < -24 cm"}; + vector colors = {1,4,2,5,3,6,7,15}; + vector g; + + string indir = "/eos/atlas/user/j/jdickins/Pixel/LeakageCurrent/IBLData/processedData/means/"; + + double x1=0.13, y1=0.78; + TLegend* leg = new TLegend(x1,y1,x1+0.45,y1-.15); + leg->SetBorderSize(0); + leg->SetNColumns(2); + leg->SetTextSize(0.025); + + for(int i=0; iGet(gname.c_str()); + g.at(i) = (TGraph*)gtmp->Clone(suffix.at(i).c_str()); + + g.at(i)->GetXaxis()->SetTitle("Delivered integrated luminosity [fb^{-1}]"); + g.at(i)->GetYaxis()->SetTitle("Leakage current at 0#circC [mA/cm^{3}]"); + g.at(i)->SetMarkerColor(colors.at(i)); + g.at(i)->SetLineColor(kGray); + g.at(i)->SetFillColor(colors.at(i)); + + string legtext = leg_names.at(i); + leg->AddEntry(g.at(i),legtext.c_str(),"f"); + } + + TCanvas* c1 =new TCanvas(gname.c_str(),gname.c_str(),800,600); + gPad->SetTickx(); + gPad->SetTicky(); + + g.at(0)->Draw("ap"); + for(int i=1; iDraw("psame"); + } + leg->Draw(); + + // Add text + TLatex l1; + l1.SetNDC(); + l1.SetTextColor(1); + l1.DrawLatex(0.15,0.83, "#it{ATLAS} Pixel Preliminary"); + + string eg = gname.substr(4); + eg.replace(1,1,"."); + + TLatex l2; + l2.SetNDC(); + l2.SetTextSize(0.03); + l2.SetTextColor(1); + l2.DrawLatex(0.15,0.79, ("#bf{E_{g} = " + eg + " eV}").c_str()); + + double ymax = TMath::MaxElement(g.at(0)->GetN(),g.at(0)->GetY()); + + TLine *l_2015 = new TLine(lumi_2015, 0., lumi_2015, 0.5*ymax); + l_2015->SetLineWidth(1); + l_2015->SetLineColor(15); + l_2015->SetLineStyle(1); + l_2015->Draw("same"); + + TLine *l_2016 = new TLine(lumi_2016, 0., lumi_2016, 0.5*ymax); + l_2016->SetLineWidth(1); + l_2016->SetLineColor(15); + l_2016->SetLineStyle(1); + l_2016->Draw("same"); + + TLine *l_2017 = new TLine(lumi_2017, 0., lumi_2017, 0.65*ymax); + l_2017->SetLineWidth(1); + l_2017->SetLineColor(15); + l_2017->SetLineStyle(1); + l_2017->Draw("same"); + + TLatex years; + years.SetNDC(); + years.SetTextColor(15); + years.SetTextSize(0.03); + years.DrawLatex(0.13,.4, "2016"); + years.DrawLatex(0.31,.4, "2017"); + years.DrawLatex(0.55,.2, "2018"); + + save(c1,gname); + + return; +} + +// Draw it real pretty +void overlay_ratio(string gname){ + + gStyle->SetOptTitle(0); + + vector suffix = {"A_M1","C_M1","A_M2","C_M2","A_M3","C_M3"}; + + vector leg_names = {"Planar: z #epsilon [0,8] cm", "Planar: z #epsilon [-8,0] cm", + "Planar: z #epsilon [8,16] cm", "Planar: z #epsilon [-16,-8] cm", + "Planar: z #epsilon [16,24] cm", "Planar: z #epsilon [-24,-16] cm"}; + vector colors = {1,4,2,5,3,6}; + vector g; + + string indir = "/eos/atlas/user/j/jdickins/Pixel/LeakageCurrent/IBLData/processedData/means/"; + + double x1=.45, y1=.85; + TLegend* leg = new TLegend(x1,y1,x1+0.45,y1-.15); + leg->SetBorderSize(0); + leg->SetNColumns(2); + + string filename = indir+"Ratios_"+gname+".root"; + TFile* infile = new TFile(filename.c_str()); + + for(int i=0; iGet(suffix.at(i).c_str()); + g.at(i) = smooth((TGraph*)gtmp->Clone(suffix.at(i).c_str())); + + g.at(i)->GetXaxis()->SetTitle("Delivered integrated luminosity [fb^{-1}]"); + g.at(i)->GetYaxis()->SetTitle("Ratio of leakage current to 3Ds at 0#circC [mA/cm^{3}]"); + g.at(i)->SetMarkerColor(colors.at(i)); + g.at(i)->SetLineColor(kGray); + g.at(i)->SetFillColor(colors.at(i)); + g.at(i)->SetMaximum(2.5); + g.at(i)->SetMinimum(0.8); + + string legtext = leg_names.at(i); + leg->AddEntry(g.at(i),legtext.c_str(),"f"); + } + + TCanvas* c1 =new TCanvas(gname.c_str(),gname.c_str(),800,600); + gPad->SetTickx(); + gPad->SetTicky(); + + g.at(0)->Draw("ap"); + for(int i=1; iDraw("psame"); + } + leg->Draw(); + + // Add text + TLatex l1; + l1.SetNDC(); + l1.SetTextColor(1); + l1.DrawLatex(0.13,0.8, "#it{ATLAS} Preliminary"); + + string eg = gname.substr(4); + eg.replace(1,1,"."); + + TLatex l2; + l2.SetNDC(); + l2.SetTextSize(0.03); + l2.SetTextColor(1); + l2.DrawLatex(0.13,0.75, ("#bf{E_{g} = " + eg + " eV}").c_str()); + + TLine *l_2015 = new TLine(lumi_2015, 0.8, lumi_2015, 2); + l_2015->SetLineWidth(1); + l_2015->SetLineColor(15); + l_2015->SetLineStyle(1); + l_2015->Draw("same"); + + TLine *l_2016 = new TLine(lumi_2016, 0.8, lumi_2016, 2); + l_2016->SetLineWidth(1); + l_2016->SetLineColor(15); + l_2016->SetLineStyle(1); + l_2016->Draw("same"); + + TLine *l_2017 = new TLine(lumi_2017, 0.8, lumi_2017, 2); + l_2017->SetLineWidth(1); + l_2017->SetLineColor(15); + l_2017->SetLineStyle(1); + l_2017->Draw("same"); + + TLine *l_1 = new TLine(33.98, 0.8, 33.98, 2); + l_1->SetLineWidth(1); + l_1->SetLineColor(15); + l_1->SetLineStyle(2); + l_1->Draw("same"); + + TLatex years; + years.SetNDC(); + years.SetTextColor(15); + years.SetTextSize(0.03); + years.DrawLatex(0.13,.6, "2016 (80 V)"); + //years.DrawLatex(0.27,.55, "(150 V)"); + years.DrawLatex(0.31,.6, "2017 (350 V)"); + years.DrawLatex(0.55,.6, "2018 (400 V)"); + + save(c1,"Ratio_"+gname); + + return; +} + +void plot(){ + + from_ssv("A_M1"); + from_ssv("C_M1"); + from_ssv("A_M2"); + from_ssv("C_M2"); + from_ssv("A_M3"); + from_ssv("C_M3"); + from_ssv("A_M4"); + from_ssv("C_M4"); + + get_ratios("I_Eg1_30"); + //get_ratios("I_Eg1_21"); + get_ratios("I_Eg1_12"); + + overlay("I_Eg1_30"); + //overlay("I_Eg1_21"); + overlay("I_Eg1_12"); + + overlay_ratio("I_Eg1_30"); + //overlay_ratio("I_Eg1_21"); + overlay_ratio("I_Eg1_12"); + + return; +} diff --git a/InnerDetector/InDetCalibAlgs/PixelCalibAlgs/RadDamage/IBLLeakageCurrentData/run_avg.sh b/InnerDetector/InDetCalibAlgs/PixelCalibAlgs/RadDamage/IBLLeakageCurrentData/run_avg.sh new file mode 100644 index 0000000000000000000000000000000000000000..35a4c23e17410a312780e26d258745236b35bbbd --- /dev/null +++ b/InnerDetector/InDetCalibAlgs/PixelCalibAlgs/RadDamage/IBLLeakageCurrentData/run_avg.sh @@ -0,0 +1,7 @@ +#!/bin/bash +m=$1 + +echo "Sourcing setup scripts" +source /afs/cern.ch/work/j/jdickins/Pixel/LeakgeCurrent/code_Nick/setup_env.sh + +python /afs/cern.ch/work/j/jdickins/Pixel/LeakgeCurrent/code_Nick/avgOverLB.py $m diff --git a/InnerDetector/InDetCalibAlgs/PixelCalibAlgs/RadDamage/IBLLeakageCurrentData/setup.sh b/InnerDetector/InDetCalibAlgs/PixelCalibAlgs/RadDamage/IBLLeakageCurrentData/setup.sh new file mode 100644 index 0000000000000000000000000000000000000000..6b2132be9336f72799be8fa77ca4338be4ed4c62 --- /dev/null +++ b/InnerDetector/InDetCalibAlgs/PixelCalibAlgs/RadDamage/IBLLeakageCurrentData/setup.sh @@ -0,0 +1,2 @@ +setupATLAS +lsetup "root 6.14.04-x86_64-slc6-gcc62-opt" \ No newline at end of file diff --git a/InnerDetector/InDetCalibAlgs/PixelCalibAlgs/RadDamage/IBLLeakageCurrentData/setup_env.sh b/InnerDetector/InDetCalibAlgs/PixelCalibAlgs/RadDamage/IBLLeakageCurrentData/setup_env.sh new file mode 100644 index 0000000000000000000000000000000000000000..70e8ab9a12d6ca656f013a65a49122f57d96e9b0 --- /dev/null +++ b/InnerDetector/InDetCalibAlgs/PixelCalibAlgs/RadDamage/IBLLeakageCurrentData/setup_env.sh @@ -0,0 +1,2 @@ +#Need to setup miniconda first +source ~/miniconda2/bin/activate pix \ No newline at end of file diff --git a/InnerDetector/InDetCalibAlgs/PixelCalibAlgs/RadDamage/IBLLeakageCurrentData/submit_avg.sh b/InnerDetector/InDetCalibAlgs/PixelCalibAlgs/RadDamage/IBLLeakageCurrentData/submit_avg.sh new file mode 100644 index 0000000000000000000000000000000000000000..dbedb8ea1970302cf0cff637c1d62baa4a0c420f --- /dev/null +++ b/InnerDetector/InDetCalibAlgs/PixelCalibAlgs/RadDamage/IBLLeakageCurrentData/submit_avg.sh @@ -0,0 +1,28 @@ +#!/bin/bash + +marray=`cat modules.txt` + +#m=LI_S01_A_M2 +#bsub -C 0 -q 1nh -o ${m}.log -e ${m}.err -R 'pool>4000' run_avg.sh $m + +for m in $marray +do + echo $m + + subfile=means_HVON/$m.sub + rm $subfile + + echo "executable = run_avg.sh" >> $subfile + echo "arguments = $m" >> $subfile + echo "output = means_HVON/run_avg_$m.out" >> $subfile + echo "error = means_HVON/run_avg_$m.err" >> $subfile + echo "log = means_HVON/run_avg_$m.log" >> $subfile + echo "queue" >> $subfile + + condor_submit $subfile + +# bsub -C 0 -q 8nh -o ${m}.log -e ${m}.err -R 'pool>4000' run_avg.sh $m +# ./run_avg.sh $m + +done +