Commit 1ce973c2 authored by John Derek Chapman's avatar John Derek Chapman Committed by Graeme Stewart
Browse files

python/overlayTransformUtils.py, share/skeleton.OverlayBS_tf.py - OverlayBS_tf...

python/overlayTransformUtils.py, share/skeleton.OverlayBS_tf.py - OverlayBS_tf should take inputBSFile, rather than inputBS_SKIMFile as input. ATLASSIM-492. scripts/OverlayChain_tf.py - set all subSteps to take the maxEvents value from the command-line, rather than just the first one. ATLASSIM-492. EventOverlayJobTransforms-00-05-01 (EventOverlayJobTransforms-00-05-01)

2014-12-17  John Chapman  <John.Chapman@cern.ch>

	* python/overlayTransformUtils.py, share/skeleton.OverlayBS_tf.py
	- OverlayBS_tf should take inputBSFile, rather than
	inputBS_SKIMFile as input. ATLASSIM-492.
	* scripts/OverlayChain_tf.py - set all subSteps to take the
	maxEvents value from the command-line, rather than just the first
	one. ATLASSIM-492.
	* EventOverlayJobTransforms-00-05-01

2014-12-09  John Chapman  <John.Chapman@cern.ch>

	* add python/overlayTrfArgs.py - to define all the
	overlay-specific arguments for the new framework transforms.
	* python/overlayTransformUtils.py - moved argument definitions to
	overlayTrfArgs.py and added all new framework overlay transform
	definitions here.
	* scripts/BSOverlayFilter_tf.py - tweak config to reflect new
	method names in overlayTransformUtils.py.
	* share/skeleton.BSOverlayFilter_tf.py - update to be able to use
...
(Long ChangeLog diff - truncated)
parent 39e5d395
......@@ -2,10 +2,11 @@ package EventOverlayJobTransforms
use AtlasPolicy AtlasPolicy-*
use PyJobTransforms PyJobTransforms-* Tools
use PyJobTransformsCore PyJobTransformsCore-* Tools
apply_pattern declare_job_transforms tfs='*.py' jo='*.py'
apply_pattern declare_python_modules files="*.py"
apply_pattern declare_jobtransforms trfs='*_trf.py' jo='*.py'
#apply_pattern generic_declare_for_link kind=runtime files='-s=../share *.db' prefix=share name=trf
apply_pattern declare_runtime_extras files='-s=../share *.db'
......
# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
## -*- python -*-
from PyJobTransformsCore.full_trfarg import *
from PATJobTransforms.Configuration import *
class DigiRndmSvcArg (StringChoicesArg):
"""random number service to use for digitization - AtRndmGenSvc uses Ranecu, AtRanluxGenSvc uses Ranlux64"""
def __init__(self, choices, caseSensitive = False, help = 'default', name = 'digiRndmSvc'):
StringChoicesArg.__init__(self, choices, help, name, caseSensitive)
def isFullArgument(self):
return True
def AddDigiRndmSvc(trf, inDic):
trf.add( DigiRndmSvcArg( ['AtRndmGenSvc', 'AtRanluxGenSvc', 'AtDSFMTGenSvc']) )
return
AddDigiRndmSvc.subSteps = ['h2r']
AddToConfigDic('digiRndmSvc', AddDigiRndmSvc)
class SamplingFractionDbTagArg (StringChoicesArg):
"""liquid argon calorimeter sampling fraction data base tag, passed on in jobOptions to LArfSamplG4Phys"""
def __init__(self, choices, caseSensitive = False, help = 'default', name = 'samplingFractionDbTag'):
StringChoicesArg.__init__(self, choices, help, name, caseSensitive)
def isFullArgument(self):
return True
def AddSamplingFractionDbTagArg(trf, inDic):
trf.add( SamplingFractionDbTagArg(['QGSP_BERT', 'QGSP_EMV', 'QGSP', 'QGSP_BERT_EMV', 'FTFP_BERT', 'QGSP_FTFP_BERT', 'FTF_BIC', 'QGSP_BERT_CHIPS', 'QGSP_BIC', 'CHIPS']) )
return
AddSamplingFractionDbTagArg.subSteps = ['h2r']
AddToConfigDic('samplingFractionDbTag', AddSamplingFractionDbTagArg)
class NoiseControlArg (StringChoicesArg):
"""overall control of noise simualtion - useful for overlay jobs, will set doCaloNoise, doMuonNoise, doInDetNoise digitization jobproperties"""
def __init__(self, choices, caseSensitive = False, help = 'override noise simulation flags in all subdetectors, [True/False]', name = 'doAllNoise'):
StringChoicesArg.__init__(self, choices, help, name, caseSensitive)
def isFullArgument(self):
return True
def AddNoiseControlArg(trf, inDic):
trf.add( NoiseControlArg(['True', 'False', 'NONE']) )
return
AddNoiseControlArg.subSteps = ['h2r']
AddToConfigDic('doAllNoise', AddNoiseControlArg)
class AddCaloDigiArg(BoolArg):
"""Option to save Calo Digits too, not just RawChannels"""
def __init__(self, help, name = 'AddCaloDigi'):
BoolArg.__init__(self, help, name)
def isFullArgument(self):
return True
def jobOrTask(self):
return 'job'
def AddAddCaloDigiArg(trf, inDic):
trf.add( AddCaloDigiArg ("True/False: Save CaloDigits as well, not just RawChannels") )
return
AddAddCaloDigiArg.subSteps = ['h2r']
AddToConfigDic('AddCaloDigi', AddAddCaloDigiArg)
# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
## @brief Specialist reconstruction and bytestream transforms
# @author atlas-comp-jt-dev@cern.ch
# @version $Id: overlayTransformUtils.py 636536 2014-12-17 16:49:33Z jchapman $
import os
import re
import subprocess
import sys
import tarfile
import logging
# Logging needs to be in the PyJobTransforms "namespace"
msg = logging.getLogger('PyJobTransforms.'+__name__.split('.')[-1])
import PyJobTransforms.trfExceptions as trfExceptions
import PyJobTransforms.trfArgClasses as trfArgClasses
import PyJobTransforms.trfValidation as trfValidation
from PyJobTransforms.trfExitCodes import trfExit
from PyJobTransforms.trfExe import athenaExecutor
## @brief write me
# numbers
class BSJobSplitterExecutor(athenaExecutor):
def preExecute(self, input = set(), output = set()):
msg.debug('Preparing for execution of {0} with inputs {1} and outputs {2}'.format(self.name, input, output))
# There are two ways to configure this transform:
# - Give an inputBSFile argument directly
# - Give a overlayConfigFile and jobNumber argument
# Check now that we have a configuration that works
if 'inputBSFile' in self.conf.argdict and 'overlayConfigFile' in self.conf.argdict:
raise trfExceptions.TransformSetupException(trfExit.nameToCode('TRF_EXEC_SETUP_FAIL'), 'Both inputBSFile and overlayConfigFile have been specified - please use only one.')
if 'overlayConfigFile' in self.conf.argdict:
if 'jobNumber' not in self.conf.argdict:
raise trfExceptions.TransformSetupException(trfExit.nameToCode('TRF_EXEC_SETUP_FAIL'), 'overlayConfigFile is specified, but no jobNumber was given.')
# Job number has to wrap around from 500, dropping back to 1
wrappedJobNumber = (self.conf.argdict['jobNumber'].value-1)%500 + 1
self._inputFilelist = 'filelist_{0}.txt'.format(wrappedJobNumber)
self._lblList = 'lbn_anal_map_{0}.txt'.format(wrappedJobNumber)
try:
f=tarfile.open(name=self.conf.argdict['overlayConfigFile'].value)
f.extract('filelist_{0}.txt'.format(wrappedJobNumber))
f.extract('lbn_anal_map_{0}.txt'.format(wrappedJobNumber))
f.close()
bsInputs = open(self._inputFilelist).readline().rstrip().split(',')
self.conf.addToArgdict('inputBSFile', trfArgClasses.argBSFile(bsInputs, io='input', type='BS', subtype='BS'))
self.conf.addToDataDictionary('BS', self.conf.argdict['inputBSFile'])
input.add('BS')
msg.info('Validating resolved input bytestream files')
trfValidation.performStandardFileValidation({'BS': self.conf.argdict['inputBSFile']}, io='input')
except Exception, e:
raise trfExceptions.TransformSetupException(trfExit.nameToCode('TRF_EXEC_SETUP_FAIL'), 'Error while unpacking and extracting input files for transform: {0}'.format(e))
# Now setup correct input arguments
self.conf.argdict['InputLbnMapFile'] = trfArgClasses.argString(self._lblList)
self.conf.argdict['InputFileMapFile'] = trfArgClasses.argString(self._inputFilelist)
else:
#if 'lumiBlockMapFile' not in self.conf.argdict:
# raise trfExceptions.TransformSetupException(trfExit.nameToCode('TRF_EXEC_SETUP_FAIL'), 'inputBSFile is specified, but no lumiBlockMapFile was given.')
#self.conf.argdict['InputLbnMapFile'] = self.conf.argdict['lumiBlockMapFile']
if 'lumiBlockMapFile' in self.conf.argdict:
self.conf.argdict['InputLbnMapFile'] = self.conf.argdict['lumiBlockMapFile']
super(BSJobSplitterExecutor, self).preExecute(input=input, output=output)
def execute(self):
if 'maxFilesPerSubjob' in self.conf.argdict:
self._hasExecuted = True
msg.info('Activating job splitting with {0} files per subjob'.format(self.conf.argdict['maxFilesPerSubjob'].value))
tmpFiles = self.conf.argdict['inputBSFile'].value
self._subJobInputs = []
while len(tmpFiles) > 0:
self._subJobInputs.append(tmpFiles[0:self.conf.argdict['maxFilesPerSubjob'].value-1])
del tmpFiles[0:self.conf.argdict['maxFilesPerSubjob'].value-1]
#### TESTING TESTING!
# self._subJobInputs = self._subJobInputs[0:2]
####
msg.info('Job will be split into {0} pieces: {1}'.format(len(self._subJobInputs), self._subJobInputs))
# Now loop over each subjob
from PyJobTransforms.trfExe import athenaExecutor, executorConfig, bsMergeExecutor
jobCounter = 0
outputBSFileList = []
for sjInputs in self._subJobInputs:
sjobArgdict = {'triggerBit': self.conf.argdict['triggerBit'],
'InputLbnMapFile': self.conf.argdict['InputLbnMapFile'],
'checkEventCount': trfArgClasses.argSubstepBool('False', runarg=False)}
sjOutput = self.conf.argdict['outputBS_SKIMFile'].value[0] + '_sub{0:03d}.tmp'.format(jobCounter)
outputBSFileList.append(sjOutput)
sjDataDictionary = {'BS': trfArgClasses.argBSFile(sjInputs, io='input', type='BS', subtype='BS'),
'BS_SKIM': trfArgClasses.argBSFile(sjOutput, io='output', type='BS', subtype='BS_SKIM')}
sjconf = executorConfig(sjobArgdict, sjDataDictionary)
sjexe = athenaExecutor(name='BSOverlayFilter_{0:03d}'.format(jobCounter), skeletonFile = 'EventOverlayJobTransforms/skeleton.BSOverlayFilter_tf.py',
conf=sjconf, inData=set(['BS']), outData=set(['BS_SKIM']))
sjexe.doAll(input=set(['BS']), output=set(['BS_SKIM']))
jobCounter += 1
# Now do the merging
mrgArgdict = {'maskEmptyInputs': trfArgClasses.argBool(True, runarg=False),
'allowRename': trfArgClasses.argBool(True, runarg=False)}
mrgDataDictionary = {'BS': trfArgClasses.argBSFile(outputBSFileList, io='input', type='BS', subtype='BS_SKIM'),
'BS_MRG': self.conf.argdict['outputBS_SKIMFile']}
mrgConf = executorConfig(mrgArgdict, mrgDataDictionary)
mrgExe = bsMergeExecutor(name = 'RAWFileMerge', exe = 'file_merging', conf=mrgConf,
inData = set(['BS']), outData = set(['BS_MRG']))
mrgExe.doAll(input=set(['BS']), output=set(['BS_MRG']))
# If we got here then all must be well, so we have executed successfully
self._rc = 0
else:
# Ordinary run
super(BSJobSplitterExecutor, self).execute()
### Add Argument Methods
def addOverlayBSFilterArguments(parser):
from EventOverlayJobTransforms.overlayTrfArgs import addOverlayBSFilterArgs
addOverlayBSFilterArgs(parser)
def addOverlay_PoolArguments(parser):
from SimuJobTransforms.simTrfArgs import addForwardDetTrfArgs, addBasicDigiArgs
from EventOverlayJobTransforms.overlayTrfArgs import addOverlayTrfArgs, addOverlayPoolTrfArgs
addBasicDigiArgs(parser)
addForwardDetTrfArgs(parser)
addOverlayTrfArgs(parser)
addOverlayPoolTrfArgs(parser)
def addOverlay_BSArguments(parser):
from SimuJobTransforms.simTrfArgs import addForwardDetTrfArgs, addBasicDigiArgs
from EventOverlayJobTransforms.overlayTrfArgs import addOverlayTrfArgs, addOverlayBSTrfArgs
addBasicDigiArgs(parser)
addForwardDetTrfArgs(parser)
addOverlayTrfArgs(parser)
addOverlayBSTrfArgs(parser)
### Add Sub-step Methods
def addOverlayBSFilterSubstep(executorSet):
executorSet.add(BSJobSplitterExecutor(name = 'BSFilter', skeletonFile = 'EventOverlayJobTransforms/skeleton.BSOverlayFilter_tf.py', substep='overlayBSFilt',
perfMonFile = 'ntuple.pmon.gz', inData = ['BS'], outData = ['BS_SKIM']))
def addOverlay_PoolSubstep(executorSet):
executorSet.add(athenaExecutor(name = 'OverlayPool', skeletonFile = 'EventOverlayJobTransforms/skeleton.OverlayPool_tf.py',
substep = 'overlayPOOL', tryDropAndReload = False, perfMonFile = 'ntuple.pmon.gz',
inData = ['HITS', 'RDO_BKG'], outData = ['RDO', 'RDO_SGNL']))
def addOverlay_BSSubstep(executorSet):
executorSet.add(athenaExecutor(name = 'OverlayBS', skeletonFile = 'EventOverlayJobTransforms/skeleton.OverlayBS_tf.py',
substep = 'overlayBS', tryDropAndReload = False, perfMonFile = 'ntuple.pmon.gz',
inData = [('HITS', 'BS')], outData = ['RDO', 'RDO_SGNL']))
### Append Sub-step Methods
def appendOverlayBSFilterSubstep(trf):
executor = set()
addOverlayBSFilterSubstep(executor)
trf.appendToExecutorSet(executor)
def appendOverlay_PoolSubstep(trf):
executor = set()
addOverlay_PoolSubstep(executor)
trf.appendToExecutorSet(executor)
def appendOverlay_BSSubstep(trf):
executor = set()
addOverlay_BSSubstep(executor)
trf.appendToExecutorSet(executor)
# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
import argparse
import logging
msg = logging.getLogger(__name__)
import unittest
import pickle
import os
from PyJobTransforms.trfArgClasses import argFactory, argFile, argInt, argFloat, argString, argSubstep, trfArgParser, argList, argBool, argBSFile, argPOOLFile, argHITSFile, argRDOFile, argSubstepInt, argSubstepBool
## Add Overlay transform arguments to an argparse ArgumentParser
def addOverlayBSFilterArgs(parser):
parser.defineArgGroup('Overlay Filter', 'Overlay filter transform arguments')
parser.add_argument('--overlayConfigFile',
type=argFactory(argString),
help='Input overlay configuration tarball file', group='Overlay Filter')
parser.add_argument('--inputBSFile', nargs='+',
type=argFactory(argBSFile, io='input'),
help='Input overlay BS file(s)', group='Overlay Filter')
parser.add_argument('--lumiBlockMapFile',
type=argFactory(argString),
help='Lumi block information file', group='Overlay Filter')
parser.add_argument('--outputBS_SKIMFile', '--outputBSFile',
type=argFactory(argBSFile, io='output'),
help='Output skimmed BS file', group='Overlay Filter')
parser.add_argument('--eventIdFile',
type=argFactory(argString), help='The name of the file to write to for EventIdModifierSvc lines', group='Overlay Filter')
parser.add_argument('--jobNumber',
type=argFactory(argInt),
help='Job number', group='Overlay Filter')
parser.add_argument('--triggerBit',
type=argFactory(argInt),
help='Trigger bit', group='Overlay Filter')
parser.add_argument('--maxFilesPerSubjob',
type=argFactory(argInt),
help='Number of bytestream input files for each athena subjob', group='Overlay Filter')
# jobNumber=102
# InputDataTarFile=/afs/cern.ch/work/e/efeld/overlay/prep/mytar.tar.gz
# InputLbnMapFile=lbn_anal_map.txt
# InputFileMapFile=filelist.txt
# TriggerBit=240
def addOverlayTrfArgs(parser):
parser.defineArgGroup('EventOverlay', 'Event Overlay Options')
parser.add_argument('--ReadByteStream',
type=argFactory(argBool),
help='Are the real data input files byte stream or POOL?', group='EventOverlay')
parser.add_argument('--muonForceUse',
type=argFactory(argBool),
help='A hack for muon 8-fold versus 12-fold problems?', group='EventOverlay')
parser.add_argument('--outputRDO_SGNLFile', nargs='+',
type=argFactory(argRDOFile, io='output'),
help='The RDO file of the MC signal alone', group='EventOverlay')
parser.add_argument('--fSampltag',
type=argFactory(argString),
help='The cool tag for /LAR/ElecCalib/fSampl/Symmetry, see https://twiki.cern.ch/twiki/bin/viewauth/Atlas/LArCalibMCPoolCool', group='EventOverlay')
parser.add_argument('--cscCondOverride',
type=argFactory(argBool),
help='A hack to override CSC conditions folder', group='EventOverlay')
def addOverlayBSTrfArgs(parser):
parser.defineArgGroup('EventOverlayBS', 'Event Overlay Options')
parser.add_argument('--inputBS_SKIMFile', nargs='+',
type=argFactory(argBSFile, io='input'),
help='Input RAW BS for pileup overlay', group='EventOverlayBS')
def addOverlayPoolTrfArgs(parser):
parser.defineArgGroup('EventOverlayPool', 'Event Overlay Options')
parser.add_argument('--inputRDO_BKGFile', nargs='+',
type=argFactory(argRDOFile, io='input'),
help='Input RAW RDO for pileup overlay', group='EventOverlayPool')
# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
#get the list of special run,lbn with more than 1 file
special_run_lbn = {}
with open("runs_lbn_files_lb.txt") as f:
for line in f:
#print line
linfo=line.split("\t")
#print linfo[1]
finfo=linfo[1].split(".")
run=finfo[1]
lbn=finfo[5].replace("_lb","")
#print run+"_"+lbn
if run+"_"+lbn in special_run_lbn:
if linfo[1] in special_run_lbn[run+"_"+lbn]:
print linfo[1], "already in map for ",run,lbn
else:
special_run_lbn[run+"_"+lbn].append(linfo[1])
else:
special_run_lbn[run+"_"+lbn]=[linfo[1]]
for key in special_run_lbn.keys():
#if len(special_run_lbn[key])>1: print special_run_lbn[key]
pass
print "done with special_run_lbn"
#do the work
for s in range(0,50): #50
with open("/home/ahaas/nfs3/zerobias_skim/lbn_anal_map_"+str(s)+"wanted.txt") as f:
tot=0
nfile=1
of=open("/home/ahaas/nfs3/zerobias_skim/lbn_anal_map_"+str(s)+"wanted_"+str(nfile)+".txt","w")
of2=open("/home/ahaas/nfs3/zerobias_skim/filelist_"+str(s)+"wanted_"+str(nfile)+".txt","w")
for line in f:
linfo=line.split(" ")
#print line
#print linfo[16]
tot+=int(linfo[16])
run=linfo[3].replace(",","").zfill(8)
lbn=linfo[5].replace(",","").zfill(4)
if tot>100:
#replace last bit of line and also write it to the new file
extra=tot-100
forthisone=int(linfo[16])-extra
rline=line.replace(linfo[16]+" wanted",str(forthisone)+" wanted")
of.write(rline)
if run+"_"+lbn in special_run_lbn:
for f in special_run_lbn[run+"_"+lbn]:
#print of2.tell()
if of2.tell()>0: of2.write(",")
of2.write(f)
else:
print run,lbn," not in files map!"
#print line, rline
#print run, lbn
of.close(); of2.close(); nfile+=1
of=open("/home/ahaas/nfs3/zerobias_skim/lbn_anal_map_"+str(s)+"wanted_"+str(nfile)+".txt","w")
of2=open("/home/ahaas/nfs3/zerobias_skim/filelist_"+str(s)+"wanted_"+str(nfile)+".txt","w")
tot=extra #the number left over from the last set
rline=line.replace(linfo[16]+" wanted",str(extra)+" wanted")
of.write(rline)
if run+"_"+lbn in special_run_lbn:
for f in special_run_lbn[run+"_"+lbn]:
if of2.tell()>0: of2.write(",")
of2.write(f)
else:
print run,lbn," not in files map!"
elif tot==100:
of.write(line)
if run+"_"+lbn in special_run_lbn:
for f in special_run_lbn[run+"_"+lbn]:
if of2.tell()>0: of2.write(",")
of2.write(f)
else:
print run,lbn," not in files map!"
of.close(); of2.close(); nfile+=1
of=open("/home/ahaas/nfs3/zerobias_skim/lbn_anal_map_"+str(s)+"wanted_"+str(nfile)+".txt","w")
of2=open("/home/ahaas/nfs3/zerobias_skim/filelist_"+str(s)+"wanted_"+str(nfile)+".txt","w")
tot=0
else:
of.write(line)
if run+"_"+lbn in special_run_lbn:
for f in special_run_lbn[run+"_"+lbn]:
#print of2.tell()
if of2.tell()>0: of2.write(",")
of2.write(f)
else:
print run,lbn," not in files map!"
print "done ",s
of.close(); of2.close()
......@@ -90,7 +90,7 @@ void lbn_analyze(int stream, int nwanted)
//runmap[grun].nevt=0;
}
grun=run;//save the run number
if (debug) printf("Setting grun to %d\n",run);
printf("Setting grun to %d\n",run);
//read in event counts from runquery file for run
readeventcounts(run);
......
#!/bin/bash
#check the bit that L1_ZB is on
#cool_read_lvl1rates.py --item L1_ZB --run $run --printSummary --printNumbers --enabledItems
#use GRL instead, see http://atlasdqm.web.cern.ch/atlasdqm/grlgen/StandardGRL/Atlas_Ready_v1/
#wget -nc http://atlasdqm.web.cern.ch/atlasdqm/grlgen/StandardGRL/Atlas_Ready_v1/data11_7TeV.periodAllYear_DetStatus-v35-pro09-03_CoolRunQuery-00-04-00_Atlas_Ready.xml
wget -nc http://atlasdqm.web.cern.ch/atlasdqm/grlgen/StandardGRL/Atlas_Ready_v1/data12_8TeV.periodAllYear_DetStatus-v49-pro13-03_CoolRunQuery-00-04-08_Atlas_Ready.xml
#use GRL
wget -nc http://atlasdqm.web.cern.ch/atlasdqm/grlgen/All_Good/data12_8TeV.periodAllYear_DetStatus-v61-pro14-02_DQDefects-00-01-00_PHYS_StandardGRL_All_Good.xml
#See https://atlas-datasummary.cern.ch/lumicalc/, https://atlas-lumicalc.cern.ch/
#iLumiCalc.exe -t L1_ZB -V -x data/MyLBCollection.xml --lumitag=OflLumi-7TeV-000 --livetrigger=L1_EM14 >lbn
iLumiCalc.exe -t L1_ZB -V -x data12_8TeV.periodAllYear_DetStatus-v49-pro13-03_CoolRunQuery-00-04-08_Atlas_Ready.xml --lumitag=OflLumi-8TeV-002 --livetrigger=L1_EM30 > lbn
#iLumiCalc.exe -t L1_ZB -V -x data12_8TeV.periodAllYear_DetStatus-v61-pro14-02_DQDefects-00-01-00_PHYS_StandardGRL_All_Good.xml --lumitag=OflLumi-8TeV-003 --livetrigger=L1_EM30 --lar --lartag=LARBadChannelsOflEventVeto-UPD4-04 > lbn
iLumiCalc.exe -t L1_ZB -V -x data12_8TeV.periodAllYear_DetStatus-v61-pro14-02_DQDefects-00-01-00_PHYS_StandardGRL_All_Good.xml --lumitag=OflLumi-8TeV-003 --livetrigger=L1_EM30 > lbn
grep Total lbn
#get info on number of events per lumi block
rm lbnevents*
for runn in `grep "TGoodRunsListReader : Metadata value:" lbn | grep -v "find run "|head -1|sed -e "s%--- <DEBUG> TGoodRunsListReader : Metadata value: %%g" | sed 's/[\x01-\x1F\x7F]//g' | sed "s/\[34m//g" | sed "s/\[0m//g" | sed "s/,/ /g" ` ; do
#get info on number of events and files per lumi block in each run
rm -f lbnevents*
rm -f runs_lbn_files.txt
sstream="physics_ZeroBiasOverlay"
#sstream="physics_ZeroBias"
for runn in `echo "200842,200863,200913,200926,200965,200967,200982,200987,201006,201052,201113,201120,201138,201190,201191,201257,201269,201280,201289,201489,201494,201555,201556,202660,202668,202712,202740,202798,202965,202991,203027,203169,203191,203195,203228,203256,203258,203277,203335,203336,203353,203432,203454,203456,203523,203524,203602,203605,203636,203680,203719,203739,203745,203760,203779,203792,203875,203876,203934,204025,204026,204071,204073,204134,204153,204158,204240,204265,204416,204442,204474,204564,204633,204668,204726,204763,204769,204772,204796,204853,204857,204910,204932,204954,204955,204976,205010,205016,205017,205055,205071,205112,205113,206368,206369,206409,206497,206573,206614,206955,206962,206971,207044,207046,207221,207262,207304,207306,207332,207397,207447,207490,207528,207531,207532,207582,207589,207620,207664,207696,207749,207772,207800,207809,207845,207864,207865,207931,207934,207975,207982,208123,208126,208179,208184,208189,208258,208261,208354,208485,208631,208642,208662,208705,208717,208720,208780,208781,208811,208870,208930,208931,208970,208982,209024,209025,209074,209084,209109,209161,209183,209214,209254,209265,209269,209353,209381,209550,209580,209608,209628,209629,209736,209776,209787,209812,209864,209866,209899,209980,209995,210302,210308,211620,211670,211697,211772,211787,211867,211902,211937,212034,212103,212142,212144,212172,212199,212272,212619,212663,212687,212721,212742,212809,212815,212858,212967,212993,213039,213079,213092,213130,213155,213157,213204,213250,213359,213431,213479,213486,213539,213627,213640,213684,213695,213702,213754,213796,213816,213819,213900,213951,213964,213968,214021,214086,214160,214176,214216,214388,214390,214494,214523,214544,214553,214618,214651,214680,214714,214721,214758,214777,215027,215061,215063,215091,215414,215433,215456,215464,215473,215541,215571,215589,215643"|sed "s%,% %g"` ; do
echo "Getting event info for run $runn "
python GetNEventsLB.py -r $runn -s physics_ZeroBiasOverlay |grep "... Run" > lbnevents_${runn}.txt
python GetNEventsLB.py -r $runn -s $sstream |grep "... Run" > lbnevents_${runn}.txt
dq2-ls -f -H data12_8TeV.00${runn}.physics_ZeroBiasOverlay.merge.RAW >> runs_lbn_files.txt
done
grep "_lb" runs_lbn_files.txt > runs_lbn_files_lb.txt
#make lbn map
root -l -b -q run_lbn_analyze.C > log_lbn_analyze.txt
#make lbn_anal_map.txt
root -l -b -q run_lbn_analyze.C > ~/nfs3/zerobias_skim/log_lbn_analyze.txt
echo -n "Total events in dataset: "
echo -n "Total events in dataset before GRL: "
cat lbnevents_*.txt |cut -d ' ' -f 8 |awk '{total = total + $1}END{print total}'
echo -n "Total events in dataset from log_lbn_analyze.txt: "
grep "stream 0" ~/nfs3/zerobias_skim/log_lbn_analyze.txt|cut -d ' ' -f 6 |awk '{total = total + $1}END{print total}'
echo -n "Total events in dataset from lbn_anal_map: "
grep "stream 1," lbn_anal_map.txt |cut -d ' ' -f 8 |awk '{total = total + $1}END{print total}'
echo -n "Selected events per stream: "
grep "stream 8," lbn_anal_map.txt |cut -d ' ' -f 17 |awk '{total = total + $1}END{print total}'
#do this for all 50 streams
for s in {0..49}; do grep "stream ${s}," lbn_anal_map.txt | grep -v "0 want" > ~/nfs3/zerobias_skim/lbn_anal_map_${s}wanted.txt ; done
rm lbn_anal_map.txt
#split into jobs for each stream
python lbn_anal_map_splitter.py
rm ~/nfs3/zerobias_skim/*501.txt
for s in {0..49}; do
echo "renaming stream $s to 0"
for f in {1..500}; do sed -i -e "s%stream $s,%stream 0,%g" ~/nfs3/zerobias_skim/lbn_anal_map_${s}wanted_${f}.txt; done
done
cd ~/nfs3/zerobias_skim/
it=2
for s in {0..49}; do
tar cfz ${s}wanted${it}.tar.gz *_${s}wanted_*.txt
done
cd -
#send to afs
scp ~/nfs3/zerobias_skim/*wanted${it}.tar.gz ahaas@lxplus.cern.ch:public/overlay/lists/
#to check the events in each map:
for s in {0..49}; do
echo -n "files with 100 events in stream $s :"
for f in {1..500}; do cat ~/nfs3/zerobias_skim/lbn_anal_map_${s}wanted_$f.txt | cut -d ' ' -f 17 |awk '{total = total + $1}END{print total}'; done |grep -c 100
done
#to show number of files in each filelist:
for f in ~/nfs3/zerobias_skim/filelist_0wanted_*.txt; do echo -n `grep -o "data12" $f | wc -l`; echo " : $f"; done |sort -n
#! /usr/bin/env python
# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
## Transform for preparation of BS overlay
# @version $Id: BSOverlayFilter_tf.py 634748 2014-12-09 14:59:16Z jchapman $
import os.path
import sys
import time
import logging
# Setup core logging here
from PyJobTransforms.trfLogger import msg
msg.info('logging set in %s' % sys.argv[0])
from PyJobTransforms.transform import transform
from EventOverlayJobTransforms.overlayTransformUtils import BSJobSplitterExecutor
from PyJobTransforms.trfArgs import addAthenaArguments, addDetectorArguments
from PyJobTransforms.trfDecorators import stdTrfExceptionHandler, sigUsrStackTrace
ListOfDefaultPositionalKeys=['--amiConfig', '--amiMetadataTag', '--asetup', '--athena', '--athenaopts', '--checkEventCount', '--command', '--env', '--eventAcceptanceEfficiency', '--execOnly', '--ignoreErrors', '--ignoreFiles', '--ignorePatterns', '--inputBSFile', '--jobNumber', '--lumiBlockMapFile', '--maxEvents', '--maxFilesPerSubjob', '--noimf', '--notcmalloc', '--outputBS_SKIMFile', '--overlayConfigFile', '--postExec', '--postInclude', '--preExec', '--preInclude', '--reportName', '--reportType', '--showGraph', '--showPath', '--showSteps', '--skipEvents', '--skipFileValidation', '--skipInputFileValidation', '--skipOutputFileValidation', '--triggerBit']
@stdTrfExceptionHandler
@sigUsrStackTrace
def main():
msg.info('This is %s' % sys.argv[0])
trf = getTransform()
trf.parseCmdLineArgs(sys.argv[1:])
trf.execute()
trf.generateReport()
msg.info("%s stopped at %s, trf exit code %d" % (sys.argv