diff --git a/TopAnalysis/scripts/steerTOP17010.sh b/TopAnalysis/scripts/steerTOP17010.sh index 1d8b790e324feaf9c178948bdc656a1f16d4e7d2..c2905be046d3b81069eb904b54954980d78fd777 100644 --- a/TopAnalysis/scripts/steerTOP17010.sh +++ b/TopAnalysis/scripts/steerTOP17010.sh @@ -25,7 +25,7 @@ fi export LSB_JOB_REPORT_MAIL=N -queue=workday +queue=1nd lumi=35922 lumiSpecs="" lumiUnc=0.025 @@ -34,8 +34,9 @@ myletter=${whoami:0:1} eosdir=/store/cmst3/group/top/ReReco2016/b312177 dataeosdir=/store/cmst3/group/top/ReReco2016/be52dbe_03Feb2017 summaryeosdir=/store/cmst3/group/top/TOP-17-010/ -COMBINERELEASE=~/scratch0/CMSSW_7_4_7/src/ +COMBINERELEASE=~/CMSSW_7_4_7/src/ outdir=/afs/cern.ch/work/${myletter}/${whoami}/TOP-17-010/ +anadir=${outdir}/$2 wwwdir=~/www/TOP-17-010/ @@ -122,15 +123,17 @@ case $WHAT in cp test/index.php ${wwwdir}/comb ;; HYPOTEST ) - mainHypo=1.0 + mainHypo=100 CATS=( - "lowptEE1b,lowptEE2b,highptEE1b,highptEE2b,lowptEM1b,lowptEM2b,highptEM1b,highptEM2b,lowptMM1b,lowptMM2b,highptMM1b,highptMM2b" - "lowptEE1b,lowptEE2b,highptEE1b,highptEE2b,lowptMM1b,lowptMM2b,highptMM1b,highptMM2b" - "lowptEM1b,lowptEM2b,highptEM1b,highptEM2b" - ) - TAGS=("inc" "ll" "em") - altHypo=(0.2 0.4 0.6 0.8 1.0 1.2 1.4 1.6 1.8 2.0 2.2 2.4 2.6 2.8 3.0 3.5 4.0) - data=(-1.0 1.0 4.0) + "EE1blowpt,EE2blowpt,EE1bhighpt,EE2bhighpt,EM1blowpt,EM2blowpt,EM1bhighpt,EM2bhighpt,MM1blowpt,MM2blowpt,MM1bhighpt,MM2bhighpt") + # "EE1blowpt,EE2blowpt,EE1bhighpt,EE2bhighpt,MM1blowpt,MM2blowpt,MM1bhighpt,MM2bhighpt" + # "EM1blowpt,EM2blowpt,EM1bhighpt,EM2bhighpt" + #) + TAGS=("inclesscpu1000") # "ll" "em") + altHypo=(20 40 60 80 100 120 140 160 180 200 220 240 260 280 300 350 400) + #altHypo=(20 40 60 80 120 140 160 180 200 220 240 280 300 350) + #data=(-1 100 400) + data=(100) #still to be debugged #cmd="${cmd} --addBinByBin 0.3" for h in ${altHypo[@]}; do @@ -143,14 +146,15 @@ case $WHAT in cmd="python test/TopWidthAnalysis/runHypoTestDatacards.py" cmd="${cmd} --combine ${COMBINERELEASE}" cmd="${cmd} --mainHypo=${mainHypo} --altHypo ${h} --pseudoData=${d}" - cmd="${cmd} -s tbart,tW --replaceDYshape" + cmd="${cmd} -s tbart --replaceDYshape" cmd="${cmd} --dist incmlb" + cmd="${cmd} --nToys 2000" cmd="${cmd} -i ${outdir}/analysis/plots/plotter.root" cmd="${cmd} --systInput ${outdir}/analysis/plots/syst_plotter.root" cmd="${cmd} -c ${icat}" cmd="${cmd} --rebin 2" - if [ "$h" == "2.2" ]; then - if [ "$d" == "-1.0" ]; then + if [ "$h" == "220" ]; then + if [ "$d" == "-1" ]; then echo " validation will be included" cmd="${cmd} --doValidation" fi @@ -159,18 +163,18 @@ case $WHAT in echo "Submitting ($mainHypo,$h,$d,$itag,$icat)" stdcmd="${cmd} -o ${outdir}/datacards_${itag}/" bsub -q ${queue} ${CMSSW_BASE}/src/TopLJets2015/TopAnalysis/scripts/wrapLocalAnalysisRun.sh ${stdcmd}; - if [ "$itag" == "inc" ]; then - if [ "$d" == "1.0" ]; then - echo " injecting pseudo-data from nloproddec" - nlocmd="${cmd} --pseudoDataFromWgt nloproddec -o ${outdir}/datacards_${itag}_nloproddec" - bsub -q ${queue} ${CMSSW_BASE}/src/TopLJets2015/TopAnalysis/scripts/wrapLocalAnalysisRun.sh ${nlocmd}; - echo " injecting pseudo-data from widthx4" - width4cmd="${cmd} --pseudoDataFromSim=t#bar{t}_widthx4 -o ${outdir}/datacards_${itag}_widthx4" - bsub -q ${queue} sh ${CMSSW_BASE}/src/TopLJets2015/TopAnalysis/scripts/wrapLocalAnalysisRun.sh ${width4cmd}; - fi - fi + #if [ "$itag" == "inc" ]; then + #if [ "$d" == "100" ]; then + # #echo " injecting pseudo-data from nloproddec" + # #nlocmd="${cmd} --pseudoDataFromWgt nloproddec -o ${outdir}/datacards_${itag}_nloproddec" + # #bsub -q ${queue} ${CMSSW_BASE}/src/TopLJets2015/TopAnalysis/scripts/wrapLocalAnalysisRun.sh ${nlocmd}; + # #echo " injecting pseudo-data from widthx4" + # #width4cmd="${cmd} --pseudoDataFromSim=t#bar{t}_widthx4 -o ${outdir}/datacards_${itag}_widthx4" + # #bsub -q ${queue} sh ${CMSSW_BASE}/src/TopLJets2015/TopAnalysis/scripts/wrapLocalAnalysisRun.sh ${width4cmd}; + #fi + #fi done - done + done done ;; PLOTHYPOTEST ) @@ -190,7 +194,7 @@ case $WHAT in itag=${TAGS[${i}]} mkdir -p ${wwwdir}/hypo${itag} cp ${outdir}/datacards${itag}/*.{png,pdf} ${wwwdir}/hypo${itag}; - cp ${outdir}/datacards${itag}/hypotest_1.0vs2.2_data/*.{png,pdf} ${wwwdir}/hypo${itag}; + cp ${outdir}/datacards${itag}/hypotest_100vs220_data/*.{png,pdf} ${wwwdir}/hypo${itag}; cp test/index.php ${wwwdir}/hypo${itag} done ;; diff --git a/TopAnalysis/test/TopWidthAnalysis/createShapesFromPlotter.py b/TopAnalysis/test/TopWidthAnalysis/createShapesFromPlotter.py index a2aea55715742023cc2b9c6004bdd0e55309e784..a4492befd7a4f595245f4c88ba1d747c760de486 100755 --- a/TopAnalysis/test/TopWidthAnalysis/createShapesFromPlotter.py +++ b/TopAnalysis/test/TopWidthAnalysis/createShapesFromPlotter.py @@ -253,12 +253,12 @@ def main(): return False # HARDCODE systematics - tWRateSystList=['tW'] + tWRateSystList=['Singletop'] ttRateSystList=['tbart'] - tWIsSig=('tW' in rawSignalList) + tWIsSig=('Singletop' in rawSignalList) ttIsSig=('tbart' in rawSignalList) for wid in modWidList : - if tWIsSig : tWRateSystList += ["tW%s"%wid] + if tWIsSig : tWRateSystList += ["Singletop%s"%wid] if ttIsSig : ttRateSystList += ["tbart%s"%wid] rateSysts=[ @@ -272,7 +272,7 @@ def main(): #('sel', 1.02, 'lnN', tWRateSystList+ttRateSystList+['DY','W','tch','Multiboson','tbartV'], []), ] - Mtop =['tbartm=171.5','tbartm=173.5']#,'tWm=169.5','tWm=175.5'] + Mtop =['tbartm=171.5','tbartm=173.5']#,'Singletopm=171.5','Singletopm=173.5'] #ttParton_tt=['tbartscaledown','tbartscaleup'] #ttParton_tW=['tWscaledown','tWscaleup'] #ME={ "muF": ['%sgen%i'%(sig,ind) for sig in ['tbart'] for ind in [3,4]], @@ -282,10 +282,10 @@ def main(): #tWinterf=['tWDS'] #amcNLO =['tbartaMCNLO'] #Herwig =['tbartHerwig'] - ISR =['tbartisrdn','tbartisrup'] - FSR =['tbartfsrdn','tbartfsrup'] - hdamp =['tbarthdampdn','tbarthdampup'] - undEve =['tbartUEdn','tbartUEup'] + ISR =['tbartisrdn','tbartisrup']#,'Singletopisrdn','Singletopisrup'] + FSR =['tbartfsrdn','tbartfsrup']#,'Singletopfsrdn','Singletopfsrup'] + hdamp =['tbarthdampdn','tbarthdampup']#,'Singletophdampdn','Singletophdampup'] + undEve =['tbartUEdn','tbartUEup']#,'SingletopUEdn','SingletopUEup'] #cflip =['tbartcflip'] #noSC =['tbartnoSC'] @@ -305,8 +305,8 @@ def main(): #noSCMap={} for wid in modWidList : - MtopMap[ 'tbart%s'%wid]=['tbartm=171.5%s' %(wid),'tbartm=173.5%s'%(wid)] - #MtopMap[ 'tW%s' %wid]=['tWm=171.5%s' %(wid),'tWm=173.5%s' %(wid)] + MtopMap[ 'tbart%s'%wid]=['tbartm=171.5%s' %(wid),'tbartm=173.5%s'%(wid)] + #MtopMap['Singletop%s'%wid]=['Singletopm=171.5%s'%(wid),'Singletopm=173.5%s'%(wid)] #ttPartonMap['tbart%s'%wid]=['tbartscaledown%s'%(wid),'tbartscaleup%s'%(wid)] #twPartonMap['tW%s' %wid]=['tWscaledown%s' %(wid),'tWscaleup%s' %(wid)] @@ -314,11 +314,16 @@ def main(): #amcNLOMap['tbart%s'%wid]=['tbartaMCNLO%s'%(wid)] #HerwigMap['tbart%s'%wid]=['tbartHerwig%s'%(wid)] ISRMap['tbart%s'%wid]=['tbartisrdn%s'%(wid),'tbartisrup%s'%(wid)] + #ISRMap['Singletop%s'%wid]=['Singletopisrdn%s'%(wid),'Singletopisrup%s'%(wid)] + FSRMap['tbart%s'%wid]=['tbartfsrdn%s'%(wid),'tbartfsrup%s'%(wid)] + #FSRMap['Singletop%s'%wid]=['Singletopfsrdn%s'%(wid),'Singletopfsrup%s'%(wid)] + hdampMap['tbart%s'%wid]=['tbarthdampdn%s'%(wid),'tbarthdampup%s'%(wid)] + #hdampMap['Singletop%s'%wid]=['Singletophdampdn%s'%(wid),'Singletophdampup%s'%(wid)] + UEMap['tbart%s'%wid]=['tbartUEdn%s'%(wid),'tbartUEup%s'%(wid)] - #cflMap['tbart%s'%wid]=['tbartcflip%s'%(wid)] - #noSCMap['tbart%s'%wid]=['tbartnoSC%s'%(wid)] + #UEMap['Singletop%s'%wid]=['SingletopUEdn%s'%(wid),'SingletopUEup%s'%(wid)] #tWinterfMap['tW%s' %wid]=['tWDS%s' %(wid)] @@ -806,15 +811,25 @@ def main(): if len(upShapes)==0 : continue #export to shapes file - if systVar == "trig" : + if systVar in ["sel","les","trig"] : systVar=systVar+lfs extraRateSyst={} - if systVar in ['jes']: + if systVar in ['ISR','FSR','hdamp','UE','btag','btag%s'%lfs,'ltag','les','les%s'%lfs,'jer','pu','sel','sel%s'%lfs,'toppt','jes','trig%s'%lfs] : for proc in downShapes: + nomproc = proc if "NOM" not in proc else proc.replace('NOM','1p0w') + nevtsnm=exp[nomproc].Integral() + nevtsup,nevtsdn=upShapes[proc].Integral(),downShapes[proc].Integral() if nevtsdn==0 : continue + + # compute rate rateVarInProc=1.0+0.5*ROOT.TMath.Abs(1.0-nevtsup/nevtsdn) extraRateSyst[proc]=rateVarInProc + + # normalize variations to nominal integral + upShapes[ proc].Scale(nevtsnm/nevtsup) + downShapes[proc].Scale(nevtsnm/nevtsdn) + saveToShapesFile(outFile,downShapes,lbCat+lfs+cat+"_"+dist+"_"+systVar+'Down') saveToShapesFile(outFile,upShapes,lbCat+lfs+cat+"_"+dist+"_"+systVar+'Up') @@ -841,6 +856,7 @@ def main(): #add extra rate systs if len(extraRateSyst)>0: + #if "trigMM" in systVar or "selMM" in systVar : continue datacard.write('%26s lnN'%(systVar+'rate')) for sig in signalList: if ("%s%s"%(sig,modNomWid)) in exp and ("%s%s"%(sig,modNomWid)) in extraRateSyst: diff --git a/TopAnalysis/test/TopWidthAnalysis/getRatiosFromPlotter.py b/TopAnalysis/test/TopWidthAnalysis/getRatiosFromPlotter.py index d5fb063c9004a5981a461db593172cb09e3f5c52..56df89e8ff75d1896dade0b694b438d3e2948751 100644 --- a/TopAnalysis/test/TopWidthAnalysis/getRatiosFromPlotter.py +++ b/TopAnalysis/test/TopWidthAnalysis/getRatiosFromPlotter.py @@ -15,10 +15,10 @@ def main(): parser = optparse.OptionParser(usage) parser.add_option('-i', '--inDir', dest='inDir' , help='input directory', default="./plotter.root", type='string') parser.add_option('-d', '--divideFile', dest='inDiv' , help='comparison input plotter', default="./syst_plotter.root", type='string') - parser.add_option('-l', '--lumi', dest='lumi' , help='lumi to print out', default=41.6, type=float) + parser.add_option('-l', '--lumi', dest='lumi' , help='lumi to print out', default=35.9, type=float) parser.add_option( '--saveLog', dest='saveLog' , help='save log versions of the plots', default=False, action='store_true') - parser.add_option('--wids', dest='wids' , help='widths to compare to nominal in div', default="4.0", type='string') - parser.add_option('--obs', dest='obs' , help='observables to process', default="incmlb,sncmlb,mt2mlb", type='string') + parser.add_option('--wids', dest='wids' , help='widths to compare to nominal in div', default="8.0", type='string') + parser.add_option('--obs', dest='obs' , help='observables to process', default="incmlb", type='string') parser.add_option('--dists', dest='dists' , help='non-observable distributions to process', default="tmass", type='string') parser.add_option('--sigs', dest='sigs' , help='signal processes to plot ratios for', default="t#bar{t}", type='string') parser.add_option('--ptCh', dest='ptchs' , help='pt categories to consider', default="highpt,lowpt", type='string') @@ -39,7 +39,7 @@ def main(): obsList =opt.obs.split(',') sigList =opt.sigs.split(',') - # useful settings (currenty magic) + # useful settings divWid="1.0" #show plots @@ -51,12 +51,12 @@ def main(): # collect a list of all distributions to plot plots = [("%s_%sw/%s_%sw_%s"%(dist,wid,dist,wid,sig), - "%s_%sw/%s_%sw_%s widthx4"%(dist,divWid,dist,divWid,sig)) + "%s_%sw/%s_%sw_%s widthx8"%(dist,divWid,dist,divWid,sig)) for dist in distList for wid in widList for sig in sigList] plots+= [("%s%s%s_%s_%sw/%s%s%s_%s_%sw_%s"%(ptC,ch,bc,obs,wid,ptC,ch,bc,obs,wid,sig), - "%s%s%s_%s_%sw/%s%s%s_%s_%sw_%s widthx4"%(ptC,ch,bc,obs,divWid,ptC,ch,bc,obs,divWid,sig)) + "%s%s%s_%s_%sw/%s%s%s_%s_%sw_%s widthx8"%(ptC,ch,bc,obs,divWid,ptC,ch,bc,obs,divWid,sig)) for ptC in ptChList for ch in chList for bc in bcatList @@ -103,6 +103,7 @@ def main(): CMS_lumi.CMS_lumi(canvas,4,0) canvas.SaveAs("%s/%s.pdf"%(outDir,p[0].split('/')[1].replace('.','w').replace('#','').replace('{','').replace('}',''))) + canvas.SaveAs("%s/%s.png"%(outDir,p[0].split('/')[1].replace('.','w').replace('#','').replace('{','').replace('}',''))) for wid,obs,sig in [(a,b,c) for a in widList for b in obsList for c in sigList]: miniCanvas=ROOT.TCanvas() @@ -131,7 +132,7 @@ def main(): bc = bcatList[ibc] wgtHist=wgtFin.Get("%s%s%s_%s_%sw/%s%s%s_%s_%sw_%s"%(ptC,ch,bc,obs,wid,ptC,ch,bc,obs,wid,sig)).Clone() - divHists["%i %i %i"%(iptC,ich,ibc)]=divFin.Get("%s%s%s_%s_%sw/%s%s%s_%s_%sw_%s widthx4"%(ptC,ch,bc,obs,divWid,ptC,ch,bc,obs,divWid,sig)).Clone() + divHists["%i %i %i"%(iptC,ich,ibc)]=divFin.Get("%s%s%s_%s_%sw/%s%s%s_%s_%sw_%s widthx8"%(ptC,ch,bc,obs,divWid,ptC,ch,bc,obs,divWid,sig)).Clone() divHist=divHists["%i %i %i"%(iptC,ich,ibc)] divHist.Divide(wgtHist) @@ -159,6 +160,7 @@ def main(): #CMS_lumi.CMS_lumi(miniCanvas,4,0) miniCanvas.SaveAs("%s/RatioGrid_%s_%s_%s.pdf"%(outDir,obs,sig,wid.replace('#','').replace('{','').replace('}',''))) + miniCanvas.SaveAs("%s/RatioGrid_%s_%s_%s.png"%(outDir,obs,sig,wid.replace('#','').replace('{','').replace('}',''))) # use full dataset for comparison for wid,sig in [(a,c) for a in widList for c in sigList]: @@ -193,7 +195,7 @@ def main(): wgtHist=wgtFin.Get("%s%s%s_%s_%sw/%s%s%s_%s_%sw_%s"%(ptC,ch,bc,obs,wid,ptC,ch,bc,obs,wid,sig)).Clone() xHist=wgtFin.Get("%s%s%s_%s_1.0w/%s%s%s_%s_1.0w_%s"%(ptC,ch,bc,obs,ptC,ch,bc,obs,sig)).Clone() - divHist=divFin.Get("%s%s%s_%s_%sw/%s%s%s_%s_%sw_%s widthx4"%(ptC,ch,bc,obs,divWid,ptC,ch,bc,obs,divWid,sig)).Clone() + divHist=divFin.Get("%s%s%s_%s_%sw/%s%s%s_%s_%sw_%s widthx8"%(ptC,ch,bc,obs,divWid,ptC,ch,bc,obs,divWid,sig)).Clone() wgtHist.SetDirectory(0) xHist.SetDirectory(0) @@ -221,7 +223,7 @@ def main(): totObsHist=obsHists[obs]["1x"] totObsHist.SetDirectory(0) totObsHist.Divide(obsHists[obs]["4x"]) - totObsHist.GetYaxis().SetTitle("1#timesSM Events / 4#times Events") + totObsHist.GetYaxis().SetTitle("1#timesSM Events / 8.0#times Events") totObsHist.GetYaxis().SetTitleSize(0.065) totObsHist.GetYaxis().SetRangeUser(0.7,1.3) totObsHist.GetXaxis().SetTitleSize(0.065) diff --git a/TopAnalysis/test/TopWidthAnalysis/getSeparationTables.py b/TopAnalysis/test/TopWidthAnalysis/getSeparationTables.py index 04ba92719f4575e932a8fc4e064a8402c4911308..b8dc5df8d6b0d9795a987797dca7d6cfff501c27 100644 --- a/TopAnalysis/test/TopWidthAnalysis/getSeparationTables.py +++ b/TopAnalysis/test/TopWidthAnalysis/getSeparationTables.py @@ -184,7 +184,7 @@ if not options.doAll : print obsErrYArr for ix in xrange(0,len(xarr)): - x[ix] = float(xarr[ix]) + x[ix] = float(xarr[ix])/100 y[ix] = float(yarr[ix]) if not options.unblind: continue obsY[ix]=obsYArr[ix] @@ -193,7 +193,12 @@ if not options.doAll : if options.prepost != "post" and not options.addpre: continue preY[ix]=preYArr[ix] + ty = [a for (b,a) in sorted(zip(x,y))] + tx = sorted(x) + for ix in xrange(0,len(x)) : + x[ix] = tx[ix] + y[ix] = ty[ix] clsGr=ROOT.TGraph(x,y) clsGr.GetXaxis().SetTitle("Generator-level #Gamma_{t} [GeV]") @@ -339,7 +344,7 @@ if options.doAll : # loop over widths, parse array info for wid in rawWidList : - latexWid = wid.replace('p','.').replace('w','$\\times\\Gamma_{\\rm SM}$') + latexWid = float(wid.replace('p','.').replace('w','$\\times\\Gamma_{\\rm SM}$')) statsFileName="%s/stats__%s__%s.txt"%(options.indir,wid,dist) for line in open(statsFileName,"r"): diff --git a/TopAnalysis/test/TopWidthAnalysis/hypoTestResultTreeTopWid.cxx b/TopAnalysis/test/TopWidthAnalysis/hypoTestResultTreeTopWid.cxx index c8ca278c79c0704e95c6349afab5b88e23d72698..ff0c9d11cd0de208a3182033291eb67dda57ab8c 100644 --- a/TopAnalysis/test/TopWidthAnalysis/hypoTestResultTreeTopWid.cxx +++ b/TopAnalysis/test/TopWidthAnalysis/hypoTestResultTreeTopWid.cxx @@ -117,8 +117,19 @@ void hypoTestResultTreeTopWid(TString fOutName, /* * Outputting LaTeX table with useful statistics */ - TH1D *hnullstat = new TH1D("hnullstat","Null Hypothesis",1000,-1000,1000); - TH1D *haltstat = new TH1D("haltstat" ,"Alternate Hypothesis",1000,-1000,1000); + double maxToyValue = -2*tree->GetMinimum("q"); + double minToyValue = -2*tree->GetMaximum("q"); + maxToyValue = maxToyValue + 0.1*TMath::Abs(maxToyValue-minToyValue); + minToyValue = minToyValue - 0.1*TMath::Abs(maxToyValue-minToyValue); + + std::cout<<"Max is: "<<maxToyValue<<" and min is "<<minToyValue<<std::endl; + + TH1D *hnullstat = new TH1D("hnullstat","Null Hypothesis", + maxToyValue-minToyValue, + minToyValue,maxToyValue); + TH1D *haltstat = new TH1D("haltstat" ,"Alternate Hypothesis", + maxToyValue-minToyValue, + minToyValue,maxToyValue); tree->Draw("-2*q>>hnullstat","type>0","goff"); tree->Draw("-2*q>>haltstat" ,"type<0","goff"); @@ -127,8 +138,8 @@ void hypoTestResultTreeTopWid(TString fOutName, // // Get x position where toy histograms approximately intersect // - gnull = new TF1("gnull","gaus",-1000,1000); - galt = new TF1("galt", "gaus",-1000,1000); + gnull = new TF1("gnull","gaus",minToyValue,maxToyValue); + galt = new TF1("galt", "gaus",minToyValue,maxToyValue); hnullstat->Fit(gnull); haltstat->Fit(galt); @@ -144,8 +155,8 @@ void hypoTestResultTreeTopWid(TString fOutName, // std::cout << " - getting separation... " << std::endl; - Double_t galtNorm = galt->Integral(-1000,1000); - Double_t gnullNorm=gnull->Integral(-1000,1000); + Double_t galtNorm = galt->Integral(minToyValue,maxToyValue); + Double_t gnullNorm=gnull->Integral(minToyValue,maxToyValue); if(galtNorm == 0 or gnullNorm == 0) { std::cout<< " \n\nWARNING : INTEGRAL OF ONE FIT IS 0 \n\n " << std::endl; @@ -153,11 +164,11 @@ void hypoTestResultTreeTopWid(TString fOutName, Double_t separation = 0; if(hnullstat->GetMean() <= haltstat->GetMean()) { - separation = galt->Integral(-1000,xint)/galtNorm - + gnull->Integral(xint,1000)/gnullNorm; + separation = galt->Integral(minToyValue,xint)/galtNorm + + gnull->Integral(xint,maxToyValue)/gnullNorm; } else { - separation = gnull->Integral(-1000,xint)/gnullNorm - + galt->Integral(xint,1000)/galtNorm; + separation = gnull->Integral(minToyValue,xint)/gnullNorm + + galt->Integral(xint,maxToyValue)/galtNorm; } separation = 1 - separation; @@ -197,11 +208,11 @@ void hypoTestResultTreeTopWid(TString fOutName, Double_t nullFitMedian = gnull->GetParameter("Mean"); Double_t altFitMedian = galt->GetParameter("Mean"); if(nullFitMedian < altFitMedian) { - nullExceededDensity=gnull->Integral(altFitMedian,1000)/gnullNorm; - altExceededDensity= galt->Integral(-1000,nullFitMedian)/galtNorm; + nullExceededDensity=gnull->Integral(altFitMedian,maxToyValue)/gnullNorm; + altExceededDensity= galt->Integral(minToyValue,nullFitMedian)/galtNorm; } else { - nullExceededDensity=gnull->Integral(-1000,altFitMedian)/gnullNorm; - altExceededDensity= galt->Integral(nullFitMedian,1000)/galtNorm; + nullExceededDensity=gnull->Integral(minToyValue,altFitMedian)/gnullNorm; + altExceededDensity= galt->Integral(nullFitMedian,maxToyValue)/galtNorm; } Double_t CLsExp = altExceededDensity/0.5; @@ -263,11 +274,12 @@ void hypoTestResultTreeTopWid(TString fOutName, std::cout << "Creating plots" << std::endl; - Double_t plotMinMax = 2*TMath::Max(TMath::Abs(gnull->GetParameter("Mean")-3*gnull->GetParameter(2)), - TMath::Abs( galt->GetParameter("Mean")-3* galt->GetParameter(2)))*1.2; - - TH1D *hnull = new TH1D("hnull","Null Hypothesis",TMath::FloorNint(2*plotMinMax*100/40)/5,-plotMinMax,plotMinMax); - TH1D *halt = new TH1D("halt" ,"Alternate Hypothesis",TMath::FloorNint(2*plotMinMax*100/40)/5,-plotMinMax,plotMinMax); + TH1D *hnull = new TH1D("hnull","Null Hypothesis", + TMath::CeilNint((maxToyValue-minToyValue)/4), + minToyValue,maxToyValue); + TH1D *halt = new TH1D("halt" ,"Alternate Hypothesis", + TMath::CeilNint((maxToyValue-minToyValue)/4), + minToyValue,maxToyValue); tree->Draw("-2*q>>hnull","type>0","goff"); tree->Draw("-2*q>>halt" ,"type<0","goff"); diff --git a/TopAnalysis/test/TopWidthAnalysis/runHypoTestDatacards.py b/TopAnalysis/test/TopWidthAnalysis/runHypoTestDatacards.py index ad4fac33bb87ebc2f0be47b832499320c2c36624..e7ba8381f7a117f3206f1592c09a7b7c743f9e49 100755 --- a/TopAnalysis/test/TopWidthAnalysis/runHypoTestDatacards.py +++ b/TopAnalysis/test/TopWidthAnalysis/runHypoTestDatacards.py @@ -20,23 +20,49 @@ def getDistsFromDirIn(url,indir,applyFilter=''): fIn.Close() return obs,exp -def getDistsForHypoTest(cat,rawSignalList,opt,outDir=""): +def getRowFromTH2(tempHist2D,columnName) : + tempBinNum = tempHist2D.GetYaxis().FindBin(columnName); + new1DProj = tempHist2D.ProjectionX(tempHist2D.GetName()+"_"+columnName, + tempBinNum, + tempBinNum).Clone(tempHist2D.GetName()) + new1DProj.SetDirectory(0) - obs,exp=getDistsFromDirIn(opt.input,'%s_%s_1.0w'%(cat,opt.dist)) + return new1DProj + +def getDistsForHypoTest(cat,rawSignalList,opt,outDir="",systName="",systIsGen=False): + + # do we want systematics? + systExt = "" + if systIsGen : systExt = "_gen" + elif systName != "" : systExt = "_exp" + + # get main dists + obs,exp=getDistsFromDirIn(opt.input,'%s_%s_w100%s'%(cat,opt.dist,systExt)) + + #run through exp and convert into TH1s + if systName != "" : + for tHist2D in exp : + exp[tHist2D] = getRowFromTH2(exp[tHist2D],systName) #signal hypothesis expMainHypo=exp.copy() - if opt.mainHypo!=1.0: _,expMainHypo=getDistsFromDirIn(opt.input,'%s_%s_%3.1fw'%(cat,opt.dist,opt.mainHypo)) + if opt.mainHypo!=1.0: _,expMainHypo=getDistsFromDirIn(opt.input,'%s_%s_w%.0f'%(cat,opt.dist,opt.mainHypo)) expAltHypo=None if len(opt.altHypoFromSim)>0 : - _,expAltHypo=getDistsFromDirIn(opt.systInput,'%s_%s_%3.1fw'%(cat,opt.dist,opt.altHypo),opt.expAltHypoFromSim) + _,expAltHypo=getDistsFromDirIn(opt.systInput,'%s_%s_w%.0f%s'%(cat,opt.dist,opt.altHypo,systExt),opt.expAltHypoFromSim) + if systName != "" : + for tHist2D in expAltHypo : + expAltHypo[tHist2D] = getRowFromTH2(expAltHypo[tHist2D],systName) else: - _,expAltHypo=getDistsFromDirIn(opt.input,'%s_%s_%3.1fw'%(cat,opt.dist,opt.altHypo)) + _,expAltHypo=getDistsFromDirIn(opt.input,'%s_%s_w%.0f%s'%(cat,opt.dist,opt.altHypo,systExt)) + if systName != "" : + for tHist2D in expAltHypo : + expAltHypo[tHist2D] = getRowFromTH2(expAltHypo[tHist2D],systName) #replace DY shape from alternative sample try: if opt.replaceDYshape: - _,altDY=getDistsFromDirIn(opt.systInput,'%s_%s_%3.1fw'%(cat,opt.dist,opt.mainHypo),'DY') + _,altDY=getDistsFromDirIn(opt.systInput,'%s_%s_w%.0f'%(cat,opt.dist,opt.mainHypo),'DY') nbins=exp['DY'].GetNbinsX() sf=exp['DY'].Integral(0,nbins+1)/altDY['DY'].Integral(0,nbins+1) altDY['DY'].Scale(sf) @@ -64,19 +90,27 @@ def getDistsForHypoTest(cat,rawSignalList,opt,outDir=""): #add signal hypothesis to expectations for proc in rawSignalList: + if 'Singletop' in proc : + newProc=('Singletopw%.0f'%(opt.altHypo)).replace('.','p') + print exp['Singletop'] + exp[newProc] = exp['Singletop'].Clone(newProc) + exp[newProc].SetDirectory(0) try: - newProc=('%s%3.1fw'%(proc,opt.mainHypo)).replace('.','p') + newProc=('%sw%.0f'%(proc,opt.mainHypo)).replace('.','p') exp[newProc]=expMainHypo[proc].Clone(newProc) exp[newProc].SetDirectory(0) - newProc=('%s%3.1fw'%(proc,opt.altHypo)).replace('.','p') + newProc=('%sw%.0f'%(proc,opt.altHypo)).replace('.','p') if opt.mainHypo==opt.altHypo: newProc+='a' - exp[newProc]=expAltHypo[proc].Clone(newProc) + if 'Singletop' in proc : + exp[newProc]=exp[proc].Clone(newProc) + else : + exp[newProc]=expAltHypo[proc].Clone(newProc) exp[newProc].SetDirectory(0) except: pass #delete the nominal expectations - for proc in rawSignalList: + for proc in rawSignalList: try: exp[proc].Delete() del exp[proc] @@ -91,14 +125,14 @@ prepare the steering script for combine """ def doCombineScript(opt,args,outDir,dataCardList): - altHypoTag=('%3.1fw'%opt.altHypo).replace('.','p') + altHypoTag=('w%.0f'%opt.altHypo).replace('.','p') if opt.altHypo==opt.mainHypo : altHypoTag+='a' scriptname='%s/steerHypoTest.sh'%outDir script=open(scriptname,'w') print 'Starting script',scriptname script.write('#\n') - script.write('# Generated by %s with git hash %s for standard (alternative) hypothesis %3.1f (%3.1f)\n' % (getpass.getuser(), + script.write('# Generated by %s with git hash %s for standard (alternative) hypothesis %.0f (%.0f)\n' % (getpass.getuser(), commands.getstatusoutput('git log --pretty=format:\'%h\' -n 1')[1], opt.mainHypo, opt.altHypo) ) @@ -124,9 +158,10 @@ def doCombineScript(opt,args,outDir,dataCardList): script.write('combine workspace.root -M MaxLikelihoodFit -m 172.5 --redefineSignalPOIs x --minimizerTolerance 0.001 -n x_fit_obs --saveWorkspace --robustFit=1;\n') script.write('\n') script.write('### CLs\n') - script.write('combine workspace.root -M HybridNew --seed 8192 --saveHybridResult -m 172.5 --testStat=TEV --singlePoint 1 -T %d -i 2 --fork 6 --clsAcc 0 --fullBToys --generateExt=1 --generateNuis=0 --expectedFromGrid 0.5 -n cls_prefit_exp;\n'%opt.nToys) - script.write('combine workspace.root -M HybridNew --seed 8192 --saveHybridResult -m 172.5 --testStat=TEV --singlePoint 1 -T %d -i 2 --fork 6 --clsAcc 0 --fullBToys --frequentist --expectedFromGrid 0.5 -n cls_postfit_exp;\n'%opt.nToys) - script.write('combine workspace.root -M HybridNew --seed 8192 --saveHybridResult -m 172.5 --testStat=TEV --singlePoint 1 -T %d -i 2 --fork 6 --clsAcc 0 --fullBToys --frequentist -n cls_postfit_obs;\n'%opt.nToys) + # do not write CLs -- python can't launch scripts with forking + #script.write('combine workspace.root -M HybridNew --seed 8192 --saveHybridResult -m 172.5 --saveWorkspace --saveToys --testStat=TEV --singlePoint 1 -T %d -i 2 --fork 6 --clsAcc 0 --fullBToys --generateExt=1 --generateNuis=0 --expectedFromGrid 0.5 -n cls_prefit_exp;\n'%opt.nToys) + #script.write('combine workspace.root -M HybridNew --seed 8192 --saveHybridResult -m 172.5 --saveWorkspace --saveToys --testStat=TEV --singlePoint 1 -T %d -i 2 --fork 6 --clsAcc 0 --fullBToys --frequentist --expectedFromGrid 0.5 -n cls_postfit_exp;\n'%opt.nToys) + #script.write('combine workspace.root -M HybridNew --seed 8192 --saveHybridResult -m 172.5 --saveWorkspace --saveToys --testStat=TEV --singlePoint 1 -T %d -i 2 --fork 6 --clsAcc 0 --fullBToys --frequentist -n cls_postfit_obs;\n'%opt.nToys) script.write('\n') script.close() @@ -142,13 +177,13 @@ def doDataCards(opt,args): ttScenarioList=['tbart'] mainSignalList,altSignalList=[],[] if 'tbart' in rawSignalList: - ttScenarioList = [('tbart%3.1fw'%h).replace('.','p') for h in [opt.mainHypo,opt.altHypo]] + ttScenarioList = [('tbartw%.0f'%h).replace('.','p') for h in [opt.mainHypo,opt.altHypo]] if opt.mainHypo==opt.altHypo: ttScenarioList[1]+='a' mainSignalList += [ttScenarioList[0]] altSignalList += [ttScenarioList[1]] - tWScenarioList=['tW'] - if 'tW' in rawSignalList: - tWScenarioList = [('tW%3.1fw'%h).replace('.','p') for h in [opt.mainHypo,opt.altHypo]] + tWScenarioList=['Singletop'] + if 'Singletop' in rawSignalList: + tWScenarioList = [('Singletopw%.0f'%h).replace('.','p') for h in [opt.mainHypo,opt.altHypo]] if opt.mainHypo==opt.altHypo: tWScenarioList[1]+='a' mainSignalList += [tWScenarioList[0]] altSignalList += [tWScenarioList[1]] @@ -163,42 +198,57 @@ def doDataCards(opt,args): ('tnorm_th', 1.044, 'lnN', ['tch'], []), ('VVnorm_th', 1.20, 'lnN', ['Multiboson'], []), ('tbartVnorm_th', 1.30, 'lnN', ['tbartV'], []), - ('sel_*CH*', 1.02, 'lnN', tWScenarioList+ttScenarioList+['W','tch','Multiboson','tbartV'], ['DY']), ] - + #define the SHAPE systematics from weighting, varying object scales, efficiencies, etc. # syst,weightList,whiteList,blackList,shapeTreatement=0 (none), 1 (shape only), 2 (factorizeRate),nsigma - weightingSysts=[ - ('jes', ['jes'], [], ['DY'], 2, 1.0), + weightingSysts=[ + ('ees', ['ees'], [], ['DY'], 2, 1.0), + ('mes', ['mes'], [], ['DY'], 2, 1.0), ('jer', ['jer'], [], ['DY'], 2, 1.0), ('trig_*CH*', ['trig'], [], ['DY'], 2, 1.0), - #('sel_*CH*', ['sel'], [], ['DY'], 2, 1.0), - ('les_*CH*', ['les'], [], ['DY'], 2, 1.0), + ('sel_E', ['esel'], [], ['DY'], 2, 1.0), + ('sel_M', ['msel'], [], ['DY'], 2, 1.0), ('ltag', ['ltag'], [], ['DY'], 2, 1.0), ('btag', ['btag'], [], ['DY'], 2, 1.0), + ('bfrag', ['bfrag'], [], ['DY'], 2, 1.0), + ('semilep', ['semilep'], [], ['DY'], 2, 1.0), ('pu', ['pu'], [], ['DY'], 1, 1.0), ('tttoppt', ['toppt'], ttScenarioList, [], 1, 1.0), ('ttMEqcdscale', ['gen%d'%ig for ig in[3,5,6,4,8,10] ], ttScenarioList, [], 1, 1.0), ('ttPDF', ['gen%d'%(11+ig) for ig in xrange(0,100) ], ttScenarioList, [], 0, 1.0) ] + for ig in xrange(0,29) : + weightingSysts += [('jes%s'%ig, ['jes%d'%ig], [], ['DY'], 1, 1.0)] #define the SHAPE systematics from dedicated samples : syst,{procs,samples}, shapeTreatment (see above) nsigma fileShapeSysts = [ - ('mtop' , {'tbart':['t#bar{t} m=169.5','t#bar{t} m=175.5'], - 'tW': ['tW m=169.5','tW m=175.5']} , 0, 1./6.), - ('ttPSScale' , {'tbart':['t#bar{t} scale down','t#bar{t} scale up']} , 2, 1.0 ), - ('ttGenerator', {'tbart':['t#bar{t} amc@nlo FxFx']}, 1, 1.0 ), - ('ttPartonShower', {'tbart':['t#bar{t} Herwig++']}, 1, 1.0 ), - ('tWttInterf', {'tW': ['tW DS']}, 1, 1.0 ), - ('tWQCDScale', {'tW': ['tW scale down','tW scale up']}, 1, 1.0 ) + ('mtop', {'tbart':['t#bar{t} m=171.5', 't#bar{t} m=173.5'], + 'Singletop':['Single top m=169.5', 'Single top m=175.5']}, + 0, {'tbart':1./2.,'Singletop':1./6.}), + ('UE', {'tbart':['t#bar{t} UEdn', 't#bar{t} UEup']} , 2, 1.0 ), + ('hdamp', {'tbart':['t#bar{t} hdamp dn', 't#bar{t} hdamp up']} , 2, 1.0 ), + ('ISR', {'tbart':['t#bar{t} isr dn', 't#bar{t} isr up'], + 'Singletop':['Single top isr dn', 'Single top isr up']} + , 2, 1.0 ), + ('FSR', {'tbart':['t#bar{t} fsr dn', 't#bar{t} fsr up'], + 'Singletop':['Single top fsr dn', 'Single top fsr up']} + , 2, 1.0 ), + #('ttPSScale' , {'tbart':['t#bar{t} scale down','t#bar{t} scale up']} , 2, 1.0 ), + #('ttGenerator', {'tbart':['t#bar{t} amc@nlo FxFx']}, 1, 1.0 ), + #('ttPartonShower', {'tbart':['t#bar{t} Herwig++']}, 1, 1.0 ), + ('tWttInterf', {'Singletop': ['Single top DS']}, 2, 1.0 ), + ('tWMEScale', {'Singletop': ['Single top me dn', 'Single top me up']}, + 2, 1.0 ), + #('tWQCDScale', {'tW': ['tW scale down','tW scale up']}, 1, 1.0 ) ] # prepare output directory - outDir='%s/hypotest_%3.1fvs%3.1f%s'%(opt.output, opt.mainHypo,opt.altHypo,'sim' if len(opt.altHypoFromSim)!=0 else '') + outDir='%s/hypotest_%.0fvs%.0f%s'%(opt.output, opt.mainHypo,opt.altHypo,'sim' if len(opt.altHypoFromSim)!=0 else '') if opt.pseudoData==-1 : outDir += '_data' else: - outDir += '_%3.1f'%opt.pseudoData + outDir += '_%.0f'%opt.pseudoData if len(opt.pseudoDataFromSim)!=0 : outDir+='sim_' elif len(opt.pseudoDataFromWgt)!=0 : outDir+='wgt_' outDir += 'pseudodata' @@ -220,36 +270,36 @@ def doDataCards(opt,args): #data and nominal shapes obs,exp=getDistsForHypoTest(cat,rawSignalList,opt,outDir) - - #recreate data if requested + + #recreate data if requested if opt.pseudoData!=-1: pseudoSignal=None print '\t pseudo-data is being generated', if len(opt.pseudoDataFromSim) and opt.systInput: print 'injecting signal from',opt.pseudoDataFromSim pseudoDataFromSim=opt.pseudoDataFromSim.replace('_',' ') - _,pseudoSignalRaw=getDistsFromDirIn(opt.systInput,'%s_%s_1.0w'%(cat,opt.dist),pseudoDataFromSim) + _,pseudoSignalRaw=getDistsFromDirIn(opt.systInput,'%s_%s_w100'%(cat,opt.dist),pseudoDataFromSim) pseudoSignal={} pseudoSignal['tbart']=pseudoSignalRaw.popitem()[1] elif len(opt.pseudoDataFromWgt): print 'injecting signal from',opt.pseudoDataFromWgt - _,pseudoSignal=getDistsFromDirIn(opt.input,'%s%s_%s_1.0w'%(opt.pseudoDataFromWgt,cat,opt.dist),'t#bar{t}') - print pseudoSignal,'%s%s_%s_1.0w'%(opt.pseudoDataFromWgt,cat,opt.dist) + _,pseudoSignal=getDistsFromDirIn(opt.input,'%s%s_%s_w100'%(opt.pseudoDataFromWgt,cat,opt.dist),'t#bar{t}') + print pseudoSignal,'%s%s_%s_w100'%(opt.pseudoDataFromWgt,cat,opt.dist) else: - print 'injecting signal from weighted',opt.pseudoData - _,pseudoSignal=getDistsFromDirIn(opt.input,'%s_%s_%3.1fw'%(cat,opt.dist,opt.pseudoData)) + print 'injecting signal from weighted',opt.pseudoData + _,pseudoSignal=getDistsFromDirIn(opt.input,'%s_%s_w%.0f'%(cat,opt.dist,opt.pseudoData)) obs.Reset('ICE') #build pseudo-expectations pseudoSignalAccept=[] for proc in pseudoSignal: accept=False - for sig in rawSignalList: + for sig in rawSignalList: if sig==proc: accept=True if not accept : continue - - newProc=('%s1.0w'%proc).replace('.','p') - pseudoSignalAccept.append(newProc) + + newProc=('%sw100'%proc).replace('.','p') + pseudoSignalAccept.append(newProc) sf=exp[newProc].Integral()/pseudoSignal[proc].Integral() pseudoSignal[proc].Scale(sf) obs.Add( pseudoSignal[proc] ) @@ -262,7 +312,7 @@ def doDataCards(opt,args): obs.Add( exp[proc] ) print pseudoSignalAccept for xbin in xrange(0,obs.GetNbinsX()+2): obs.SetBinContent(xbin,int(obs.GetBinContent(xbin))) - + #start the datacard header datacardname='%s/datacard_%s.dat'%(outDir,cat) dataCardList+='%s=%s '%(cat,os.path.basename(datacardname)) @@ -300,10 +350,10 @@ def doDataCards(opt,args): datacard.write('\n') datacard.write('\t\t\t %16s'%'process') procCtr=-len(mainSignalList)-len(altSignalList)+1 - for sig in mainSignalList: + for sig in mainSignalList: datacard.write('%15s'%str(procCtr)) procCtr+=1 - for sig in altSignalList: + for sig in altSignalList: datacard.write('%15s'%str(procCtr)) procCtr+=1 for proc in exp: @@ -313,7 +363,9 @@ def doDataCards(opt,args): datacard.write('\n') datacard.write('\t\t\t %16s'%'rate') for sig in mainSignalList: datacard.write('%15s'%('%3.2f'%(exp[sig].Integral()))) - for sig in altSignalList: datacard.write('%15s'%('%3.2f'%(exp[sig].Integral()))) + for sig in altSignalList: + #if 'Singletop' in sig : sig = 'Singletopw100' + datacard.write('%15s'%('%3.2f'%(exp[sig].Integral()))) for proc in exp: if proc in mainSignalList+altSignalList : continue datacard.write('%15s'%('%3.2f'%(exp[proc].Integral()))) @@ -348,20 +400,20 @@ def doDataCards(opt,args): binShapes[proc]=finalNomShape.Clone('%sDown'%systVar) binShapes[proc].SetBinContent(xbin,ROOT.TMath.Max(val-unc,1e-3)) saveToShapesFile(outFile,binShapes,binShapes[proc].GetName()) - + #write to datacard - datacard.write('%32s shape'%systVar) + datacard.write('%32s shape'%systVar) for sig in mainSignalList: if proc==sig: - datacard.write('%15s'%'1') + datacard.write('%15s'%'1') else: datacard.write('%15s'%'-') for sig in altSignalList: if proc==sig: - datacard.write('%15s'%'1') + datacard.write('%15s'%'1') else: datacard.write('%15s'%'-') - for iproc in exp: + for iproc in exp: if iproc in mainSignalList+altSignalList : continue if iproc==proc: datacard.write('%15s'%'1') @@ -387,7 +439,7 @@ def doDataCards(opt,args): datacard.write(entryTxt) else: datacard.write('%15s'%'-') - for sig in altSignalList: + for sig in altSignalList: if (len(whiteList)==0 and not sig in blackList) or sig in whiteList: datacard.write(entryTxt) else: @@ -406,18 +458,29 @@ def doDataCards(opt,args): for syst,weightList,whiteList,blackList,shapeTreatment,nsigma in weightingSysts: if '*CH*' in syst : syst=syst.replace('*CH*',lfs) + isGen = any("gen" in twght for twght in weightList) + isGen = isGen or 'toppt' in weightList + + # jes has annoying formatting different + + #get shapes and adapt them - iexpUp,iexpDn=None,None + iexpUp,iexpDn=None,None if len(weightList)==1: - _,iexpUp=getDistsForHypoTest(weightList[0]+"up"+cat,rawSignalList,opt) - _,iexpDn=getDistsForHypoTest(weightList[0]+"dn"+cat,rawSignalList,opt) + if 'jes' in weightList[0] : + jesNum=weightList[0].replace('jes','') + _,iexpUp=getDistsForHypoTest(cat,rawSignalList,opt,"","jesup_"+jesNum,isGen) + _,iexpDn=getDistsForHypoTest(cat,rawSignalList,opt,"","jesdn_"+jesNum,isGen) + else : + _,iexpUp=getDistsForHypoTest(cat,rawSignalList,opt,"",weightList[0]+"up",isGen) + _,iexpDn=getDistsForHypoTest(cat,rawSignalList,opt,"",weightList[0]+"dn",isGen) else: #put all the shapes in a 2D histogram - iexp2D={} + iexp2D={} for iw in xrange(0,len(weightList)): w=weightList[iw] - _,kexp=getDistsForHypoTest(w+cat,rawSignalList,opt) + _,kexp=getDistsForHypoTest(cat,rawSignalList,opt,"",w,isGen) for proc in kexp: nbins=kexp[proc].GetNbinsX() if not proc in iexp2D: @@ -434,7 +497,7 @@ def doDataCards(opt,args): #create the up/down variations iexpUp,iexpDn={},{} for proc in iexp2D: - + #create the base shape if not proc in iexpUp: tmp=iexp2D[proc].ProjectionX("tmp",1,1) @@ -450,14 +513,14 @@ def doDataCards(opt,args): #project each bin shape for the different variations for xbin in xrange(0,iexp2D[proc].GetNbinsX()+2): - tmp=iexp2D[proc].ProjectionY("tmp",xbin,xbin) + tmp=iexp2D[proc].ProjectionY("tmp",xbin,xbin) tvals=numpy.zeros(tmp.GetNbinsX()) for txbin in xrange(1,tmp.GetNbinsX()+1) : tvals[txbin-1]=tmp.GetBinContent(txbin) - + #mean and RMS based - if 'PDF' in syst: + if 'PDF' in syst: mean=numpy.mean(tvals) - rms=numpy.std(tvals) + rms=numpy.std(tvals) iexpUp[proc].SetBinContent(xbin,mean+rms) iexpDn[proc].SetBinContent(xbin,ROOT.TMath.Max(mean-rms,1.0e-4)) @@ -469,14 +532,14 @@ def doDataCards(opt,args): imin=numpy.min(tvals) if iexpDn[proc].GetBinContent(xbin)>0 : imin=ROOT.TMath.Min(iexpDn[proc].GetBinContent(xbin),imin) - iexpDn[proc].SetBinContent(xbin,imin) - + iexpDn[proc].SetBinContent(xbin,imin) + tmp.Delete() #all done, can remove the 2D histo from memory iexp2D[proc].Delete() - + #check the shapes iRateVars={} @@ -493,12 +556,12 @@ def doDataCards(opt,args): #save a rate systematic from the variation of the yields if n==0 : continue nvarUp=ROOT.TMath.Abs(1-nUp/n) - nvarDn=ROOT.TMath.Abs(1-nDn/n ) + nvarDn=ROOT.TMath.Abs(1-nDn/n ) iRateVars[proc]=1.0+0.5*(nvarUp+nvarDn) if iRateVars[proc]<1.001 : del iRateVars[proc] - - #write the shapes to the ROOT file + + #write the shapes to the ROOT file saveToShapesFile(outFile,iexpUp,('%s_%s_%sUp'%(cat,opt.dist,syst)),opt.rebin) saveToShapesFile(outFile,iexpDn,('%s_%s_%sDown'%(cat,opt.dist,syst)),opt.rebin) @@ -510,7 +573,7 @@ def doDataCards(opt,args): datacard.write(entryTxt) else: datacard.write('%15s'%'-') - for sig in altSignalList: + for sig in altSignalList: if (len(whiteList)==0 and not sig in blackList) or sig in whiteList: datacard.write(entryTxt) else: @@ -532,7 +595,7 @@ def doDataCards(opt,args): datacard.write('%15s'%('%3.3f'%iRateVars[sig])) else: datacard.write('%15s'%'-') - for sig in altSignalList: + for sig in altSignalList: if sig in iRateVars and ((len(whiteList)==0 and not sig in blackList) or sig in whiteList): datacard.write('%15s'%('%3.3f'%iRateVars[sig])) else: @@ -555,7 +618,7 @@ def doDataCards(opt,args): iexpUp,iexpDn={},{} for proc in procsAndSamples: samples=procsAndSamples[proc] - + hyposToGet=[opt.mainHypo] isSignal=False if proc in rawSignalList: @@ -565,14 +628,14 @@ def doDataCards(opt,args): jexpDn,jexpUp=None,None for hypo in hyposToGet: if len(samples)==2: - _,jexpDn=getDistsFromDirIn(opt.systInput,'%s_%s_%3.1fw'%(cat,opt.dist,hypo),samples[0]) - _,jexpUp=getDistsFromDirIn(opt.systInput,'%s_%s_%3.1fw'%(cat,opt.dist,hypo),samples[1]) + _,jexpDn=getDistsFromDirIn(opt.systInput,'%s_%s_w%.0f'%(cat,opt.dist,hypo),samples[0]) + _,jexpUp=getDistsFromDirIn(opt.systInput,'%s_%s_w%.0f'%(cat,opt.dist,hypo),samples[1]) else: - _,jexpUp=getDistsFromDirIn(opt.systInput,'%s_%s_%3.1fw'%(cat,opt.dist,hypo),samples[0]) - + _,jexpUp=getDistsFromDirIn(opt.systInput,'%s_%s_w%.0f'%(cat,opt.dist,hypo),samples[0]) + newProc=proc if isSignal: - newProc=('%s%3.1fw'%(proc,hypo)).replace('.','p') + newProc=('%sw%.0f'%(proc,hypo)).replace('.','p') jexpUp.values()[0].SetName(newProc) iexpUp[newProc]=jexpUp.values()[0] @@ -609,30 +672,32 @@ def doDataCards(opt,args): #save a rate systematic from the variation of the yields if n==0 : continue nvarUp=ROOT.TMath.Abs(1-nUp/n) - nvarDn=ROOT.TMath.Abs(1-nDn/n ) + nvarDn=ROOT.TMath.Abs(1-nDn/n ) iRateVars[proc]=1.0+0.5*(nvarUp+nvarDn) if iRateVars[proc]<1.001 : del iRateVars[proc] - #write the shapes to the ROOT file + #write the shapes to the ROOT file saveToShapesFile(outFile,iexpUp,('%s_%s_%sUp'%(cat,opt.dist,syst)),opt.rebin) saveToShapesFile(outFile,iexpDn,('%s_%s_%sDown'%(cat,opt.dist,syst)),opt.rebin) #fill in the datacard datacard.write('%32s %8s'%(syst,'shape')) - entryTxt='%15s'%('%3.3f'%nsigma) for sig in mainSignalList: if sig in iexpUp: + entryTxt='%15s'%('%3.3f'%(nsigma if not isinstance(nsigma,dict) else nsigma[sig.split('w')[0]])) datacard.write(entryTxt) else: datacard.write('%15s'%'-') for sig in altSignalList: if sig in iexpUp: + entryTxt='%15s'%('%3.3f'%(nsigma if not isinstance(nsigma,dict) else nsigma[sig.split('w')[0]])) datacard.write(entryTxt) else: datacard.write('%15s'%'-') for proc in exp: if proc in mainSignalList+altSignalList : continue if proc in iexpUp: + entryTxt='%15s'%('%3.3f'%(nsigma if not isinstance(nsigma,dict) else nsigma[proc.split('w')[0]])) datacard.write(entryTxt) else: datacard.write('%15s'%'-') @@ -647,7 +712,7 @@ def doDataCards(opt,args): datacard.write('%15s'%('%3.3f'%iRateVars[sig])) else: datacard.write('%15s'%'-') - for sig in altSignalList: + for sig in altSignalList: if sig in iRateVars : datacard.write('%15s'%('%3.3f'%iRateVars[sig])) else: @@ -662,20 +727,20 @@ def doDataCards(opt,args): print '\t ended datacard generation' datacard.close() - + if opt.doValidation: - print '\t running validation' + print '\t running validation' for proc in rawSignalList: - newProc=('%s%3.1fw'%(proc,opt.mainHypo)).replace('.','p') - altProc=('%s%3.1fw'%(proc,opt.altHypo)).replace('.','p') if proc=='tbart' else '' - for uncList in [ 'jes,jer,les_*CH*', + newProc=('%sw%.0f'%(proc,opt.mainHypo)).replace('.','p') + altProc=('%sw%.0f'%(proc,opt.altHypo)).replace('.','p') if proc=='tbart' else '' + for uncList in [ 'jes,jer,les_*CH*', 'btag,ltag,pu,trig_*CH*', 'ttPSScale,ttMEqcdscale,ttPDF,tttoppt', 'ttGenerator,ttPartonShower', 'mtop', 'tWttInterf,tWQCDScale' ]: - if 'tW' in proc and ('ttPS' in uncList or 'ttGen' in uncList) : continue + if 'Singletop' in proc and ('ttPS' in uncList or 'ttGen' in uncList) : continue if 'tbart' in proc and 'tWttInterf' in uncList : continue uncList=uncList.replace('*CH*',lfs) plotter=Popen(['python', @@ -712,17 +777,17 @@ def main(): parser.add_option( '--nToys', dest='nToys', help='toys to through for CLs', default=2000, type=int) parser.add_option('--addBinByBin', dest='addBinByBin', help='add bin-by-bin stat uncertainties @ threshold', default=-1, type=float) parser.add_option( '--rebin', dest='rebin', help='histogram rebin factor', default=0, type=int) - parser.add_option( '--pseudoData', dest='pseudoData', help='pseudo data to use (-1=real data)', default=1.0, type=float) + parser.add_option( '--pseudoData', dest='pseudoData', help='pseudo data to use (-1=real data)', default=100, type=float) parser.add_option( '--replaceDYshape', dest='replaceDYshape', help='use DY shape from syst file', default=False, action='store_true') parser.add_option( '--doValidation', dest='doValidation', help='create validation plots', default=False, action='store_true') parser.add_option( '--pseudoDataFromSim', dest='pseudoDataFromSim', help='pseudo data from dedicated sample', default='', type='string') parser.add_option( '--pseudoDataFromWgt', dest='pseudoDataFromWgt', help='pseudo data from weighting', default='', type='string') - parser.add_option( '--mainHypo', dest='mainHypo', help='main hypothesis', default=1.0, type=float) - parser.add_option( '--altHypo', dest='altHypo', help='alternative hypothesis', default=4.0, type=float) + parser.add_option( '--mainHypo', dest='mainHypo', help='main hypothesis', default=100, type=float) + parser.add_option( '--altHypo', dest='altHypo', help='alternative hypothesis', default=400, type=float) parser.add_option( '--altHypoFromSim', dest='altHypoFromSim', help='alternative hypothesis from dedicated sample', default='', type='string') - parser.add_option('-s', '--signal', dest='signal', help='signal (csv)', default='tbart,tW', type='string') - parser.add_option('-c', '--cat', dest='cat', help='categories (csv)', - default='lowptEE1b,lowptEE2b,highptEE1b,highptEE2b,lowptEM1b,lowptEM2b,highptEM1b,highptEM2b,lowptMM1b,lowptMM2b,highptMM1b,highptMM2b', + parser.add_option('-s', '--signal', dest='signal', help='signal (csv)', default='tbart,Singletop', type='string') + parser.add_option('-c', '--cat', dest='cat', help='categories (csv)', + default='EE1blowpt,EE2blowpt,EE1bhighpt,EE2bhighpt,EM1blowpt,EM2blowpt,EM1bhighpt,EM2bhighpt,MM1blowpt,MM2blowpt,MM1bhighpt,MM2bhighpt', type='string') parser.add_option('-o', '--output', dest='output', help='output directory', default='datacards', type='string') (opt, args) = parser.parse_args() diff --git a/TopAnalysis/test/TopWidthAnalysis/steerTOPWidthAnalysis.sh b/TopAnalysis/test/TopWidthAnalysis/steerTOPWidthAnalysis.sh index 5f496bb2b8c9381f9c71fc3d08ade1d99318180c..a6aecec4067a166f7fe06efd691d8eedace28926 100755 --- a/TopAnalysis/test/TopWidthAnalysis/steerTOPWidthAnalysis.sh +++ b/TopAnalysis/test/TopWidthAnalysis/steerTOPWidthAnalysis.sh @@ -1,8 +1,8 @@ #!/bin/bash WHAT=$1; -ERA=$2; -if [ "$#" -ne 2 ]; then +EXT=$2; +if [ "$#" -lt 2 ]; then echo "steerTOPWidthAnalysis.sh <SEL/MERGESEL/PLOTSEL/WWWSEL/ANA/MERGE/PLOT/WWW>"; echo " SHAPES - create shapes files from plotter files"; echo " MERGE_SHAPES - merge output of shapes generation"; @@ -20,21 +20,31 @@ fi export LSB_JOB_REPORT_MAIL=N -queue=2nw -outdir=/afs/cern.ch/work/e/ecoleman/TopWidth/ -cardsdir=${outdir}/datacards -wwwdir=~/www/TopWidth_${ERA}/ +queue=2nd +outdir=/afs/cern.ch/work/e/ecoleman/TOP-17-010/ +extdir=${outdir}/${EXT}/ +cardsdir=${extdir}/datacards +#wwwdir=~/www/TopWidth_${ERA}/ CMSSW_7_4_7dir=~/CMSSW_7_4_7/src/ CMSSW_7_6_3dir=~/CMSSW_8_0_26_patch1/src/ unblind=false nPseudo=1000 -wid=(0p2w 0p4w 0p6w 0p8w - 1p0w 1p2w 1p4w 1p6w - 1p8w 2p0w 2p2w 2p4w - 2p6w 2p8w 3p0w 3p5w 4p0w) +#(20 40 80 +# 100 140 +# 180 200 220 240 +# 260 280 300 350 400) +wid=(20 40 60 80 + 100 120 140 160 + 180 200 220 240 + 260 280 300 350 400) +#wid=(0p2w 0p4w 0p6w 0p8w +# 1p0w 1p2w 1p4w 1p6w +# 1p8w 2p0w 2p2w 2p4w +# 2p6w 2p8w 3p0w 3p5w 4p0w) lbCat=(highpt lowpt) +#lfs=(EM) lfs=(EE EM MM) cat=(1b 2b) dists=(incmlb) @@ -54,9 +64,14 @@ function join { local IFS=','; echo "$*"; } # Helpers: getting comma-separated lists of variables distStr="$(join ${dists[@]})" +lbcStr="$(join ${lbCat[@]})" lfsStr="$(join ${lfs[@]})" +catStr="$(join ${cat[@]})" widStr="$(join ${wid[@]})" + +mkdir -p $extdir + case $WHAT in ############################### SHAPES #################################### SHAPES ) # to get the shapes file / datacards for the full analysis @@ -73,16 +88,19 @@ case $WHAT in nohup python test/TopWidthAnalysis/createShapesFromPlotter.py \ -s tbart,Singletop \ --dists ${distStr} \ - -o ${outdir}/datacards/ \ + -o ${extdir}/datacards/ \ -n shapes$index \ - -i ${outdir}/analysis/plots/plotter.root \ - --systInput ${outdir}/analysis/plots/syst_plotter.root \ + --lbCat $lbcStr \ + --lfs $lfsStr \ + --cat $catStr \ + -i ${outdir}/old_analysis/plots/plotter.root \ + --systInput ${outdir}/old_analysis/plots/syst_plotter.root \ --min $min --max $max > shapes${index}.txt & done ;; ############################### MERGE_SHAPES ############################## MERGE_SHAPES ) # hadd the shapes files since they are split - hadd ${outdir}/datacards/shapes.root ${outdir}/datacards/shapes*.root + hadd ${extdir}/datacards/shapes.root ${extdir}/datacards/shapes*.root ;; ############################### MERGE_DATACARDS ########################### MERGE_DATACARDS ) # get all the datacards you could possibly desire for the analysis @@ -129,7 +147,6 @@ case $WHAT in WORKSPACE ) # generate combine workspaces cd ${CMSSW_7_4_7dir} eval `scramv1 runtime -sh` - mkdir ${outdir} for dist in ${dists[*]} ; do for twid in ${wid[*]} ; do @@ -138,7 +155,7 @@ case $WHAT in text2workspace.py ${cardsdir}/datacard__${twid}_${dist}.dat -P \ HiggsAnalysis.CombinedLimit.TopHypoTest:twoHypothesisTest \ -m 172.5 --PO verbose --PO altSignal=${twid} --PO muFloating \ - -o ${outdir}/${twid}_${dist}.root + -o ${extdir}/${twid}_${dist}.root done done ;; @@ -146,12 +163,12 @@ case $WHAT in SCAN ) # perform likelihood scans on Asimov datasets cd ${CMSSW_7_4_7dir} eval `scramv1 runtime -sh` - cd ${outdir} + cd ${extdir} for dist in ${dists[*]} ; do for twid in ${wid[*]} ; do # All datacards - cmd="combine ${outdir}/${twid}_${dist}.root -M MultiDimFit" + cmd="combine ${extdir}/${twid}_${dist}.root -M MultiDimFit" cmd="${cmd} -m 172.5 -P x --floatOtherPOI=1 --algo=grid --points=200" cmd="${cmd} --expectSignal=1 --setPhysicsModelParameters x=0,r=1" cmd="${cmd} -n x0_scan_Asimov_${twid}_${dist}" @@ -162,7 +179,7 @@ case $WHAT in bsub -q ${queue} \ ${CMSSW_7_6_3dir}/TopLJets2015/TopAnalysis/test/TopWidthAnalysis/wrapPseudoexperiments.sh \ - "${outdir}/" "${cmd}" + "${extdir}/" "${cmd}" done done ;; @@ -170,41 +187,41 @@ case $WHAT in CLs ) # get CLs statistics from combine cd ${CMSSW_7_4_7dir} eval `scramv1 runtime -sh` - cd ${outdir} + cd ${extdir} for dist in ${dists[*]} ; do for twid in ${wid[*]} ; do # pre-fit echo "Making CLs for ${twid} ${dist}" - cmd="combine ${outdir}/${twid}_${dist}.root -M HybridNew --seed 8192 --saveHybridResult" - cmd="${cmd} -m 172.5 --testStat=TEV --singlePoint 1 -T 2000 -i 2 --fork 8" + cmd="combine ${extdir}/hypotest_100vs${twid}_100pseudodata/workspace.root -M HybridNew --seed 8192 --saveHybridResult" + cmd="${cmd} -m 172.5 --testStat=TEV --singlePoint 1 -T ${nPseudo} -i 2 --fork 6" cmd="${cmd} --clsAcc 0 --fullBToys --saveWorkspace --generateExt=1 --generateNuis=0" - cmd="${cmd} --expectedFromGrid 0.5 -n x_pre-fit_exp__${twid}_${dist}" - #cmd="${cmd} &> ${outdir}/x_pre-fit_exp__${twid}_${dist}.log" + cmd="${cmd} --expectedFromGrid 0.5 -n cls_prefit_exp" + #cmd="${cmd} &> ${extdir}/x_pre-fit_exp__${twid}_${dist}.log" - bsub -q ${queue} ${CMSSW_7_6_3dir}/TopLJets2015/TopAnalysis/test/TopWidthAnalysis/wrapPseudoexperiments.sh "${outdir}/" "${cmd}" + bsub -q ${queue} ${CMSSW_7_6_3dir}/TopLJets2015/TopAnalysis/test/TopWidthAnalysis/wrapPseudoexperiments.sh "${extdir}/hypotest_100vs${twid}_100pseudodata/" "${cmd}" # post-fit expected echo "Making CLs for ${twid} ${dist}" - cmd="combine ${outdir}/${twid}_${dist}.root -M HybridNew --seed 8192 --saveHybridResult" - cmd="${cmd} -m 172.5 --testStat=TEV --singlePoint 1 -T ${nPseudo} -i 2 --fork 8" + cmd="combine ${extdir}/hypotest_100vs${twid}_100pseudodata/workspace.root -M HybridNew --seed 8192 --saveHybridResult" + cmd="${cmd} -m 172.5 --testStat=TEV --singlePoint 1 -T ${nPseudo} -i 2 --fork 6" cmd="${cmd} --clsAcc 0 --fullBToys --saveWorkspace --saveToys --frequentist" - cmd="${cmd} --expectedFromGrid 0.5 -n x_post-fit_exp__${twid}_${dist}" - #cmd="${cmd} &> ${outdir}/x_post-fit_exp__${twid}_${dist}.log" + cmd="${cmd} --expectedFromGrid 0.5 -n cls_postfit_exp" + #cmd="${cmd} &> ${extdir}/x_post-fit_exp__${twid}_${dist}.log" - bsub -q ${queue} ${CMSSW_7_6_3dir}/TopLJets2015/TopAnalysis/test/TopWidthAnalysis/wrapPseudoexperiments.sh "${outdir}/" "${cmd}" + bsub -q ${queue} ${CMSSW_7_6_3dir}/TopLJets2015/TopAnalysis/test/TopWidthAnalysis/wrapPseudoexperiments.sh "${extdir}/hypotest_100vs${twid}_100pseudodata/" "${cmd}" # post-fit observed - if [[ ${unblind} == true ]] ; then - echo "Making CLs for ${twid} ${dist}" - cmd="combine ${outdir}/${twid}_${dist}.root -M HybridNew --seed 8192 --saveHybridResult" - cmd="${cmd} -m 172.5 --testStat=TEV --singlePoint 1 -T ${nPseudo} -i 2 --fork 8" - cmd="${cmd} --clsAcc 0 --fullBToys --saveWorkspace --saveToys --frequentist" - cmd="${cmd} -n x_post-fit_obs__${twid}_${dist}" - #cmd="${cmd} &> ${outdir}/x_post-fit_obs__${twid}_${dist}.log" - - bsub -q ${queue} ${CMSSW_7_6_3dir}/TopLJets2015/TopAnalysis/test/TopWidthAnalysis/wrapPseudoexperiments.sh "${outdir}/" "${cmd}" - fi + #if [[ ${unblind} == true ]] ; then + # echo "Making CLs for ${twid} ${dist}" + # cmd="combine ${extdir}/${twid}_${dist}.root -M HybridNew --seed 8192 --saveHybridResult" + # cmd="${cmd} -m 172.5 --testStat=TEV --singlePoint 1 -T ${nPseudo} -i 2 --fork 8" + # cmd="${cmd} --clsAcc 0 --fullBToys --saveWorkspace --saveToys --frequentist" + # cmd="${cmd} -n x_post-fit_obs__${twid}_${dist}" + # #cmd="${cmd} &> ${extdir}/x_post-fit_obs__${twid}_${dist}.log" + + # bsub -q ${queue} ${CMSSW_7_6_3dir}/TopLJets2015/TopAnalysis/test/TopWidthAnalysis/wrapPseudoexperiments.sh "${extdir}/" "${cmd}" + #fi done done ;; @@ -212,86 +229,88 @@ case $WHAT in TOYS ) # get toys distributions from the pseudoexperiments cd ${CMSSW_7_4_7dir} eval `scramv1 runtime -sh` - cd ${outdir} + cd ${extdir} for dist in ${dists[*]} ; do for twid in ${wid[*]} ; do - ## pre-fit expected + # pre-fit expected #rootcmds="${CMSSW_7_6_3dir}/TopLJets2015/TopAnalysis/test/TopWidthAnalysis" #rootcmds="${rootcmds}/hypoTestResultTreeTopWid.cxx\(\\\"x_pre-fit_exp__${twid}_${dist}.qvals.root" - #rootcmds="${rootcmds}\\\",172.5,1,\\\"x\\\",${nPseudo},\\\"\\\",\\\"${twid}\\\",\\\"${dist}\\\",${unblind},\\\"pre\\\"\)" + #rootcmds="${rootcmds}\\\",172.5,1,\\\"x\\\",${nPseudo},\\\"\\\",\\\"${twid}\\\",\\\"${dist}\\\",false,\\\"pre\\\"\)" #cmd="" #cmd="${cmd}root -l -q -b" - #cmd="${cmd} ${outdir}" + #cmd="${cmd} ${extdir}" #cmd="${cmd}/higgsCombinex_pre-fit_exp__${twid}_${dist}.HybridNew.mH172.5.8192.quant0.500.root" #cmd="${cmd} ${rootcmds}" ##bsub -q ${queue} \ #sh ${CMSSW_7_6_3dir}/TopLJets2015/TopAnalysis/test/TopWidthAnalysis/wrapPseudoexperiments.sh \ - # "${outdir}/" "${cmd}" + # "${extdir}/" "${cmd}" ## post-fit expected rootcmds="${CMSSW_7_6_3dir}/TopLJets2015/TopAnalysis/test/TopWidthAnalysis" rootcmds="${rootcmds}/hypoTestResultTreeTopWid.cxx\(\\\"x_post-fit_exp__${twid}_${dist}.qvals.root" - rootcmds="${rootcmds}\\\",172.5,1,\\\"x\\\",${nPseudo},\\\"\\\",\\\"${twid}\\\",\\\"${dist}\\\",${unblind},\\\"post\\\"\)" + rootcmds="${rootcmds}\\\",172.5,1,\\\"x\\\",${nPseudo},\\\"\\\",\\\"${twid}\\\",\\\"${dist}\\\",false,\\\"post\\\"\)" cmd="" cmd="${cmd}root -l -q -b" - cmd="${cmd} ${outdir}" - cmd="${cmd}/higgsCombinex_post-fit_exp__${twid}_${dist}.HybridNew.mH172.5.8192.quant0.500.root" + cmd="${cmd} ${extdir}" + cmd="${cmd}/hypotest_100vs${twid}_100pseudodata/higgsCombinecls_postfit_exp.HybridNew.mH172.5.8192.quant0.500.root" cmd="${cmd} ${rootcmds}" #bsub -q ${queue} \ sh ${CMSSW_7_6_3dir}/TopLJets2015/TopAnalysis/test/TopWidthAnalysis/wrapPseudoexperiments.sh \ - "${outdir}/" "${cmd}" - - ## post-fit observed - if [[ ${unblind} == true ]] ; then - rootcmds="${CMSSW_7_6_3dir}/TopLJets2015/TopAnalysis/test/TopWidthAnalysis" - rootcmds="${rootcmds}/hypoTestResultTreeTopWid.cxx\(\\\"x_post-fit_obs__${twid}_${dist}.qvals.root" - rootcmds="${rootcmds}\\\",172.5,1,\\\"x\\\",${nPseudo},\\\"\\\",\\\"${twid}\\\",\\\"${dist}\\\",${unblind},\\\"obs\\\"\)" - - cmd="" - cmd="${cmd}root -l -q -b" - cmd="${cmd} ${outdir}" - cmd="${cmd}/higgsCombinex_post-fit_obs__${twid}_${dist}.HybridNew.mH172.5.8192.root" - cmd="${cmd} ${rootcmds}" - - bsub -q ${queue} \ - sh ${CMSSW_7_6_3dir}/TopLJets2015/TopAnalysis/test/TopWidthAnalysis/wrapPseudoexperiments.sh \ - "${outdir}/" "${cmd}" - fi + "${extdir}/hypotest_100vs${twid}_100pseudodata/" "${cmd}" + cp ${extdir}/hypotest_100vs${twid}_100pseudodata/*post*${twid}*${dist}*.{pdf,png} ${extdir} + cp ${extdir}/hypotest_100vs${twid}_100pseudodata/*post*stats*.txt ${extdir} + + ### post-fit observed + #if [[ ${unblind} == true ]] ; then + # rootcmds="${CMSSW_7_6_3dir}/TopLJets2015/TopAnalysis/test/TopWidthAnalysis" + # rootcmds="${rootcmds}/hypoTestResultTreeTopWid.cxx\(\\\"x_post-fit_obs__${twid}_${dist}.qvals.root" + # rootcmds="${rootcmds}\\\",172.5,1,\\\"x\\\",${nPseudo},\\\"\\\",\\\"${twid}\\\",\\\"${dist}\\\",${unblind},\\\"obs\\\"\)" + + # cmd="" + # cmd="${cmd}root -l -q -b" + # cmd="${cmd} ${extdir}" + # cmd="${cmd}/higgsCombinex_post-fit_obs__${twid}_${dist}.HybridNew.mH172.5.8192.root" + # cmd="${cmd} ${rootcmds}" + + # bsub -q ${queue} \ + # sh ${CMSSW_7_6_3dir}/TopLJets2015/TopAnalysis/test/TopWidthAnalysis/wrapPseudoexperiments.sh \ + # "${extdir}/" "${cmd}" + #fi done done ;; ############################### WWW ####################################### - WWW ) - mkdir -p ${wwwdir}/ana_${ERA} - cp ${outdir}/analysis/plots/*.{png,pdf} ${wwwdir}/ana_${ERA} - cp ${outdir}/*.{png,pdf} ${wwwdir}/ana_${ERA} - cp test/index.php ${wwwdir}/ana_${ERA} - ;; +# WWW ) +# mkdir -p ${wwwdir}/ana_${ERA} +# cp ${outdir}/analysis/plots/*.{png,pdf} ${wwwdir}/ana_${ERA} +# cp ${outdir}/*.{png,pdf} ${wwwdir}/ana_${ERA} +# cp test/index.php ${wwwdir}/ana_${ERA} +# ;; ############################### QUANTILES ################################# QUANTILES ) # plot quantiles distributions of all toys, get CLsPlot cd ${CMSSW_7_4_7dir} eval `scramv1 runtime -sh` - cd ${outdir} + cd ${extdir} rm statsPlots.root for dist in ${dists[*]} ; do # Quantiles plot with post-fit information - python ${CMSSW_7_6_3dir}/TopLJets2015/TopAnalysis/test/TopWidthAnalysis/getQuantilesPlot.py \ - -i ${outdir}/ -o ${outdir}/ \ - --wid ${widStr} \ - --dist ${dist} \ - --prep pre + #python ${CMSSW_7_6_3dir}/TopLJets2015/TopAnalysis/test/TopWidthAnalysis/getQuantilesPlot.py \ + # -i ${extdir}/ -o ${extdir}/ \ + # --wid ${widStr} \ + # --dist ${dist} \ + # --prep pre # Quantiles plot with post-fit information python ${CMSSW_7_6_3dir}/TopLJets2015/TopAnalysis/test/TopWidthAnalysis/getQuantilesPlot.py \ - -i ${outdir}/ -o ${outdir}/ \ + -i ${extdir}/ -o ${extdir}/ \ --wid ${widStr} \ --dist ${dist} \ --prep post #\ @@ -299,43 +318,43 @@ case $WHAT in # Quantiles plot with post-fit information #python ${CMSSW_7_6_3dir}/TopLJets2015/TopAnalysis/test/TopWidthAnalysis/getQuantilesPlot.py \ - # -i ${outdir}/ -o ${outdir}/ \ + # -i ${extdir}/ -o ${extdir}/ \ # --wid ${widStr} \ # --dist ${dist} \ # --prep obs #\ # #--unblind # Get CLs plots for pre-fit expectations - python ${CMSSW_7_6_3dir}/TopLJets2015/TopAnalysis/test/TopWidthAnalysis/getSeparationTables.py \ - -i ${outdir}/ -o ${outdir}/ \ - --wid ${widStr} \ - --prep pre \ - --dist ${dist} + #python ${CMSSW_7_6_3dir}/TopLJets2015/TopAnalysis/test/TopWidthAnalysis/getSeparationTables.py \ + # -i ${extdir}/ -o ${extdir}/ \ + # --wid ${widStr} \ + # --prep pre \ + # --dist ${dist} # Get CLs plots for post-fit expectations python ${CMSSW_7_6_3dir}/TopLJets2015/TopAnalysis/test/TopWidthAnalysis/getSeparationTables.py \ - -i ${outdir}/ -o ${outdir}/ \ + -i ${extdir}/ -o ${extdir}/ \ --wid ${widStr} \ --dist ${dist} \ - --prep post \ - --addPre #\ + --prep post #\ + #--addPre #\ #--unblind # Get CLs plots for post-fit expectations #python ${CMSSW_7_6_3dir}/TopLJets2015/TopAnalysis/test/TopWidthAnalysis/getSeparationTables.py \ - # -i ${outdir}/ -o ${outdir}/ \ + # -i ${extdir}/ -o ${extdir}/ \ # --wid ${widStr} \ # --dist ${dist} \ # --prep obs #\ # #--unblind - python ${CMSSW_7_6_3dir}/TopLJets2015/TopAnalysis/test/TopWidthAnalysis/getCLsFromFit.py \ - -i ${outdir}/ \ - --dist ${dist} \ - --prep pre + #python ${CMSSW_7_6_3dir}/TopLJets2015/TopAnalysis/test/TopWidthAnalysis/getCLsFromFit.py \ + # -i ${extdir}/ \ + # --dist ${dist} \ + # --prep pre python ${CMSSW_7_6_3dir}/TopLJets2015/TopAnalysis/test/TopWidthAnalysis/getCLsFromFit.py \ - -i ${outdir}/ \ + -i ${extdir}/ \ --dist ${dist} \ --prep post #\ #--unblind @@ -343,7 +362,7 @@ case $WHAT in # # Get Separation plots for all dists # python ${CMSSW_7_6_3dir}/TopLJets2015/TopAnalysis/test/TopWidthAnalysis/getSeparationTables.py \ - # -i ${outdir}/ -o ${outdir}/ \ + # -i ${extdir}/ -o ${extdir}/ \ # --dist ${distStr} \ # --doAll \ # --unblind @@ -360,14 +379,14 @@ case $WHAT in nwid=${nwid/w/} #python test/TopWidthAnalysis/getNuisances.py \ - # -i ${outdir}/higgsCombinex_pre-fit_exp__${twid}_${dist}.HybridNew.mH172.5.8192.quant0.500.root \ - # -o ${outdir}/ \ + # -i ${extdir}/higgsCombinex_pre-fit_exp__${twid}_${dist}.HybridNew.mH172.5.8192.quant0.500.root \ + # -o ${extdir}/ \ # -n preNuisances_${twid}_${dist} \ # --extraText "#Gamma_{Alt.} = ${nwid} #times #Gamma_{SM}" python test/TopWidthAnalysis/getNuisances.py \ - -i ${outdir}/higgsCombinex_post-fit_exp__${twid}_${dist}.HybridNew.mH172.5.8192.quant0.500.root \ - -o ${outdir}/ \ + -i ${extdir}/hypotest_100vs${twid}_100pseudodata/higgsCombinecls_postfit_exp.HybridNew.mH172.5.8192.quant0.500.root \ + -o ${extdir}/ \ -n postNuisances_${twid}_${dist} \ --extraText "#Gamma_{Alt.} = ${nwid} #times #Gamma_{SM}" done @@ -380,231 +399,38 @@ case $WHAT in catList="${catList}highptMM1b,highptMM2b,lowptMM1b,lowptMM2b" python test/TopWidthAnalysis/getShapeUncPlots.py \ - -i ${outdir}/withSel/datacards/shapes.root \ - -o ${outdir}/withSel/datacards/plots \ + -i ${extdir}/datacards/shapes.root \ + -o ${extdir}/datacards/plots \ --uncs btag,ltag,pu \ --altProc tbart2p0w \ --cats ${catList} python test/TopWidthAnalysis/getShapeUncPlots.py \ - -i ${outdir}/withSel/datacards/shapes.root \ - -o ${outdir}/withSel/datacards/plots \ + -i ${extdir}/datacards/shapes.root \ + -o ${extdir}/datacards/plots \ --uncs jes,les,jer \ --altProc tbart2p0w \ --cats ${catList} python test/TopWidthAnalysis/getShapeUncPlots.py \ - -i ${outdir}/withSel/datacards/shapes.root \ - -o ${outdir}/withSel/datacards/plots \ + -i ${extdir}/datacards/shapes.root \ + -o ${extdir}/datacards/plots \ --uncs Mtop \ --altProc tbart2p0w \ --cats ${catList} python test/TopWidthAnalysis/getShapeUncPlots.py \ - -i ${outdir}/withSel/datacards/shapes.root \ - -o ${outdir}/withSel/datacards/plots \ + -i ${extdir}/datacards/shapes.root \ + -o ${extdir}/datacards/plots \ --uncs hdamp,toppt \ --altProc tbart2p0w \ --cats ${catList} python test/TopWidthAnalysis/getShapeUncPlots.py \ - -i ${outdir}/withSel/datacards/shapes.root \ - -o ${outdir}/withSel/datacards/plots \ + -i ${extdir}/datacards/shapes.root \ + -o ${extdir}/datacards/plots \ --uncs ISR,FSR,UE \ --altProc tbart2p0w \ --cats ${catList} ;; -############################### NUISANCES ################################# - NUISANCES ) # run once for each nuisance, doing a full analysis to understand effects of systematics - for dist in ${dists[*]} ; do - systList="" - i=0 - for syst in ${nuisances[*]} ; do - if [[ "${syst}" != "${nuisances[0]}" ]]; then - systList="${systList}," - fi - systList="${systList}${syst}" - - echo "Frozen systematics are now: ${systList[@]}" - - mkdir ${outdir}/nuisanceTurnOn_${dist}_${syst}_${i} - cd ${outdir}/nuisanceTurnOn_${dist}_${syst}_${i} - - # start writing job - for twid in ${wid[*]}; do - - # run likelihood scan - cmd="combine ${outdir}/${twid}_${dist}.root -M MultiDimFit" - cmd="${cmd} -m 172.5 -P x --floatOtherPOI=1 --algo=grid --points=200" - cmd="${cmd} --expectSignal=1 --setPhysicsModelParameters x=0,r=1" - cmd="${cmd} -n x0_scan_Asimov_${twid}_${dist}" - cmd="${cmd} --freezeNuisances ${systList}" - if [[ ${unblind} == false ]] ; then - echo "Analysis is blinded" - cmd="${cmd} -t -1" - fi - - # launch job - bsub -q ${queue} \ - ${CMSSW_7_6_3dir}/TopLJets2015/TopAnalysis/test/TopWidthAnalysis/wrapPseudoexperiments.sh \ - "${outdir}/nuisanceTurnOn_${dist}_${syst}_${i}/" "${cmd}" - - # run CLs pre-fit - cmd="combine ${outdir}/${twid}_${dist}.root -M HybridNew --seed 8192 --saveHybridResult" - cmd="${cmd} -m 172.5 --testStat=TEV --singlePoint 1 -T ${nPseudo} -i 2 --fork 6" - cmd="${cmd} --clsAcc 0 --fullBToys --generateExt=1 --generateNuis=0" - cmd="${cmd} --expectedFromGrid 0.5 -n x_pre-fit_exp_${twid}_${dist}" - cmd="${cmd} --freezeNuisances ${systList}" - if [[ ${unblind} == false ]] ; then - echo "Analysis is blinded" - cmd="${cmd} -t -1" - fi - - # launch job - bsub -q ${queue} \ - ${CMSSW_7_6_3dir}/TopLJets2015/TopAnalysis/test/TopWidthAnalysis/wrapPseudoexperiments.sh \ - "${outdir}/nuisanceTurnOn_${dist}_${syst}_${i}/" "${cmd}" - - # run CLs post-fit exp - cmd="combine ${outdir}/${twid}_${dist}.root -M HybridNew --seed 8192 --saveHybridResult" - cmd="${cmd} -m 172.5 --testStat=TEV --singlePoint 1 -T ${nPseudo} -i 2 --fork 6" - cmd="${cmd} --clsAcc 0 --fullBToys --frequentist" - cmd="${cmd} --expectedFromGrid 0.5 -n x_post-fit_exp_${twid}_${dist}" - cmd="${cmd} --freezeNuisances ${systList}" - if [[ ${unblind} == false ]] ; then - echo "Analysis is blinded" - cmd="${cmd} -t -1" - fi - - # launch job - bsub -q ${queue} \ - ${CMSSW_7_6_3dir}/TopLJets2015/TopAnalysis/test/TopWidthAnalysis/wrapPseudoexperiments.sh \ - "${outdir}/nuisanceTurnOn_${dist}_${syst}_${i}/" "${cmd}" - - # run CLs post-fit obs - cmd="combine ${outdir}/${twid}_${dist}.root -M HybridNew --seed 8192 --saveHybridResult" - cmd="${cmd} -m 172.5 --testStat=TEV --singlePoint 1 -T ${nPseudo} -i 2 --fork 6" - cmd="${cmd} --clsAcc 0 --fullBToys --frequentist" - cmd="${cmd} -n x_post-fit_obs_${twid}_${dist}" - cmd="${cmd} --freezeNuisances ${systList}" - if [[ ${unblind} == false ]] ; then - echo "Analysis is blinded" - cmd="${cmd} -t -1" - fi - - # launch job - bsub -q ${queue} \ - ${CMSSW_7_6_3dir}/TopLJets2015/TopAnalysis/test/TopWidthAnalysis/wrapPseudoexperiments.sh \ - "${outdir}/nuisanceTurnOn_${dist}_${syst}_${i}/" "${cmd}" - - done - - let "i += 1" - done - done - ;; -############################### NUISANCES_TOYS ############################ - NUISANCES_TOYS ) - for dist in ${dists[*]} ; do - i=0 - for syst in ${nuisances[*]} ; do - cd ${outdir}/nuisanceTurnOn_${dist}_${syst}_${i} - - # start writing job - for twid in ${wid[*]}; do - - ## pre-fit expected - rootcmds="${CMSSW_7_6_3dir}/TopLJets2015/TopAnalysis/test/TopWidthAnalysis" - rootcmds="${rootcmds}/hypoTestResultTreeTopWid.cxx\(\\\"x_pre-fit_exp_${twid}_${dist}.qvals.root" - rootcmds="${rootcmds}\\\",172.5,1,\\\"x\\\",1000,\\\"\\\",\\\"${twid}\\\",\\\"${dist}\\\",${unblind},\\\"pre\\\"\)" - - cmd="" - cmd="${cmd}root -l -q -b" - cmd="${cmd} ${outdir}/nuisanceTurnOn_${dist}_${syst}_${i}" - cmd="${cmd}/higgsCombinex_pre-fit_exp_${twid}_${dist}.HybridNew.mH172.5.8192.quant0.500.root" - cmd="${cmd} ${rootcmds}" - - #bsub -q ${queue} \ - sh ${CMSSW_7_6_3dir}/TopLJets2015/TopAnalysis/test/TopWidthAnalysis/wrapPseudoexperiments.sh \ - "${outdir}/nuisanceTurnOn_${dist}_${syst}_${i}/" "${cmd}" - - - ## post-fit expected - rootcmds="${CMSSW_7_6_3dir}/TopLJets2015/TopAnalysis/test/TopWidthAnalysis" - rootcmds="${rootcmds}/hypoTestResultTreeTopWid.cxx\(\\\"x_post-fit_exp_${twid}_${dist}.qvals.root" - rootcmds="${rootcmds}\\\",172.5,1,\\\"x\\\",1000,\\\"\\\",\\\"${twid}\\\",\\\"${dist}\\\",${unblind},\\\"post\\\"\)" - - cmd="" - cmd="${cmd}root -l -q -b" - cmd="${cmd} ${outdir}/nuisanceTurnOn_${dist}_${syst}_${i}" - cmd="${cmd}/higgsCombinex_post-fit_exp_${twid}_${dist}.HybridNew.mH172.5.8192.quant0.500.root" - cmd="${cmd} ${rootcmds}" - - #bsub -q ${queue} \ - sh ${CMSSW_7_6_3dir}/TopLJets2015/TopAnalysis/test/TopWidthAnalysis/wrapPseudoexperiments.sh \ - "${outdir}/nuisanceTurnOn_${dist}_${syst}_${i}/" "${cmd}" - - - ## post-fit observed - rootcmds="${CMSSW_7_6_3dir}/TopLJets2015/TopAnalysis/test/TopWidthAnalysis" - rootcmds="${rootcmds}/hypoTestResultTreeTopWid.cxx\(\\\"x_post-fit_obs_${twid}_${dist}.qvals.root" - rootcmds="${rootcmds}\\\",172.5,1,\\\"x\\\",1000,\\\"\\\",\\\"${twid}\\\",\\\"${dist}\\\",${unblind},\\\"obs\\\"\)" - - cmd="" - cmd="${cmd}root -l -q -b" - cmd="${cmd} ${outdir}/nuisanceTurnOn_${dist}_${syst}_${i}" - cmd="${cmd}/higgsCombinex_post-fit_obs_${twid}_${dist}.HybridNew.mH172.5.8192.root" - cmd="${cmd} ${rootcmds}" - - #bsub -q ${queue} \ - sh ${CMSSW_7_6_3dir}/TopLJets2015/TopAnalysis/test/TopWidthAnalysis/wrapPseudoexperiments.sh \ - "${outdir}/nuisanceTurnOn_${dist}_${syst}_${i}/" "${cmd}" - - done - - let "i += 1" - done - done - ;; -############################### NUISANCES_FITS ############################ - NUISANCE_FITS ) - cd ${CMSSW_7_4_7dir}/ - eval `scramv1 runtime -sh` - - for dist in ${dists[*]} ; do - i=0 - for syst in ${nuisances[*]} ; do - dirName=${outdir}/nuisanceTurnOn_${dist}_${syst}_${i} - - python ${CMSSW_7_6_3dir}/TopLJets2015/TopAnalysis/test/TopWidthAnalysis/getSeparationTables.py \ - -i ${dirName}/ \ - -o ${dirName}/ \ - --dist ${dist} \ - --wid ${widStr} \ - --unblind - - let "i += 1" - - done - done - - for dist in ${dists[*]} ; do - i=0 - for syst in ${nuisances[*]} ; do - dirName=${outdir}/nuisanceTurnOn_${dist}_${syst}_${i} - - - echo " " - echo "$i ||| $dist $syst : " - - python ${CMSSW_7_6_3dir}/TopLJets2015/TopAnalysis/test/TopWidthAnalysis/getCLsFromFit.py \ - -i $dirName \ - --dist ${dist} \ - --unblind - - let "i += 1" - - done - done - ;; esac