Skip to content
Snippets Groups Projects
Commit abe2fcee authored by John Derek Chapman's avatar John Derek Chapman Committed by Graeme Stewart
Browse files

share/custom.py, share/magfield.py - fixes for running in devval, as described...

share/custom.py, share/magfield.py - fixes for running in devval, as described in ATLASSIM-506. Tagging EventOverlayJobTransforms-00-03-46-11 (EventOverlayJobTransforms-00-03-46-11)
parent 93d63525
No related merge requests found
Showing
with 3149 additions and 0 deletions
package EventOverlayJobTransforms
use AtlasPolicy AtlasPolicy-*
use AtlasPython AtlasPython-* External
use PyJobTransformsCore PyJobTransformsCore-* Tools
apply_pattern declare_python_modules files="*.py"
apply_pattern declare_jobtransforms trfs='*_trf.py' jo='*.py'
#apply_pattern generic_declare_for_link kind=runtime files='-s=../share *.db' prefix=share name=trf
apply_pattern declare_runtime_extras extras="../test/EventOverlayJobTransforms_TestConfiguration.xml" files='-s=../share *.db'
#macro EventOverlayJobTransforms_TestConfiguration "../test/EventOverlayJobTransforms_TestConfiguration.xml"
#private
#use TestPolicy TestPolicy-*
#apply_pattern validate_xml
#public
package EventOverlayJobTransforms
use AtlasPolicy AtlasPolicy-*
use PyJobTransformsCore PyJobTransformsCore-* Tools
apply_pattern declare_python_modules files="*.py"
apply_pattern declare_jobtransforms trfs='*_trf.py' jo='*.py'
#apply_pattern generic_declare_for_link kind=runtime files='-s=../share *.db' prefix=share name=trf
apply_pattern declare_runtime_extras files='-s=../share *.db'
#apply_pattern declare_runtime_extras extras="../test/EventOverlayJobTransforms_TestConfiguration.xml" files='-s=../share *.db'
#macro EventOverlayJobTransforms_TestConfiguration "../test/EventOverlayJobTransforms_TestConfiguration.xml"
#private
#use TestPolicy TestPolicy-*
#apply_pattern validate_xml
#public
# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
#####################
## OverlayConfig.py
#####################
import os
__all__ = []
from PyJobTransformsCore.TransformConfig import *
import PyJobTransformsCore.basic_trfarg as trfarg
# define configuration properties
class OverlayConfig(TransformConfig):
# prevent any mistypings by not allowing dynamic members
__slots__ = ()
def __init__(self,name='overlayConfig',metaData=None):
if metaData is None: metaData = []
TransformConfig.__init__(self,name,metaData)
self.maxeventsstrategy = 'INPUTEVENTS'
# make configuration object
overlayConfig = OverlayConfig()
# EOF
##########################
#!/bin/env python
# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
#
# ----------------------------------------------------------------
# Script : GetNEventsLB.py
# Purpose: Utility to retrieve number of events per LB from SFO DB
# Authors: Andreas Hoecker (CERN)
# Created: Aug 17, 2011
# ----------------------------------------------------------------
import sys, getopt
def GetSFO_LBNEvents( cursor, runno, streamname ):
"""returns nfiles, fsize, nevents"""
stype, sep, sname = streamname.partition('_')
cursor.execute( "SELECT SUM(NREVENTS),LUMIBLOCKNR,COUNT(FILESIZE),SUM(FILESIZE) FROM SFO_TZ_File WHERE RUNNR=:arg_1 and STREAMTYPE=:arg_2 and STREAM=:arg_3 GROUP BY LUMIBLOCKNR ORDER BY LUMIBLOCKNR",
arg_1=runno, arg_2=stype, arg_3=sname )
return cursor.fetchall()
def GetSFO_LBs( cursor, runno ):
cursor.execute( "SELECT MIN(LUMIBLOCKNR), MAX(LUMIBLOCKNR) FROM SFO_TZ_Lumiblock WHERE RUNNR=:arg_1", arg_1=runno )
return cursor.fetchone()[0:2]
def OpenSFOConnection():
import cx_Oracle
return cx_Oracle.connect("ATLAS_SFO_T0_R/readmesfotz2008@atlr")
def usage():
print ' '
print 'Usage: python %s [options]' % sys.argv[0]
print ' -r | --run : run number (REQUIRED)'
print ' -s | --stream : full stream name (REQUIRED)'
print ' -h | --help : print this usage message'
print ' '
exit(1)
def main():
try:
shortopts = "r:s:h?"
longopts = ["run=", "stream=", "help", "usage"]
opts, args = getopt.getopt( sys.argv[1:], shortopts, longopts )
except getopt.GetoptError:
# print help information and exit:
print >> sys.stderr, 'ERROR: unknown options in argument %s' % sys.argv[1:]
usage()
runno = None
stream = None
for o, a in opts:
if o in ("-?", "-h", "--help", "--usage"):
usage()
elif o in ("-r", "--run"):
runno = a
elif o in ("-s", "--stream"):
stream = a
if not runno or not stream: usage()
runno = int(runno)
connection = OpenSFOConnection()
cursor = connection.cursor()
print 'Results for run: %i, stream: "%s"' % (runno, stream)
# min/max LB number for given run/stream
# --> this command is slow... don't know why
# minLB, maxLB = GetSFO_LBs( cursor, runno )
# print 'LB range: %i -- %i' % (minLB, maxLB)
# list with
lblist = GetSFO_LBNEvents( cursor, runno, stream )
print 'First non-zero LB: ',lblist[0][1]
print 'Last non-zero LB: ',lblist[-1][1]
# access to all LBs
sumnev = 0
sumnfiles = 0
sumfsize = 0
for (nev,lb,nfiles,fsize) in lblist:
fsize /= 1.e6
print ' ... Run %i, LB %i has %i events, %i RAW files and %f MB' % (runno,lb,nev,nfiles,fsize)
sumnev += nev
sumnfiles += nfiles
sumfsize += fsize
print '--------------------------------------------------'
print 'Total #events : ', sumnev
print 'Total #files : ', sumnfiles
print 'Total RAW size: ', sumfsize/1000.0, ' GB'
cursor.close()
connection.close()
if __name__ == '__main__':
main()
#Each line contains 3 fields, separated by comma's:
#atlas_release_regexp, who_prints_it, error_message_regexp
# For the regular expression syntax that can be used in <error_message_regexp>, see:
# http://docs.python.org/lib/re-syntax.html
# Note in particular the special regexp characters that need to be backslashed if meant litteral: ()[]{}^$.*+?
# In constructing the total regular expression used to match the lines:
# - whitespace is stripped of both ends of the fields <atlas_release_regexp> and <who_prints_it>,
# and from the right end of <error_message_regexp>
# - zero or more whitespace characters are allowed between <who_prints_it> and <error_message_regexp>
# - if the <who_prints_it> field is empty, the <error_message_regexp> is the total regexp.
# error detection can be tested by running on a relevant log file:
# checklog.py someLogFile
## Errors to ignore for ALL releases
## =================================
ALL ,.*?, INFO .+
ALL ,ByteStreamInputSvc,ERROR Skipping bad event
ALL ,ToolSvc.CscSplitClusterFitter,ERROR Peak-to-Val dist is [-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)? Val-to-Peak dist is [-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)? Shouldnot be negative value :[-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)? [-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)? [-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?
ALL ,AlgErrorAuditor,ERROR Illegal Return Code: Algorithm CscThresholdClusterBuilder reported an ERROR, but returned a StatusCode "SUCCESS"
ALL ,AlgErrorAuditor,ERROR Illegal Return Code: Algorithm InDetSCTRawDataProvider reported an ERROR, but returned a StatusCode "SUCCESS"
ALL ,(?:Py:)?Athena , ERROR inconsistent case used in property name ".*?" of ApplicationMgr
ALL ,(?:Py:)?Athena , ERROR Algorithm ".*?": not in TopAlg or other known list, no properties set
ALL ,(?:Py:)?Athena , ERROR Algorithm ".*?": type missing, no properties set
ALL ,(?:Py:)?Athena , ERROR attempt to add .* to non-existent property .*?
ALL ,(?:Py:)?Configurable, ERROR .* undeclared or uses a backdoor
ALL ,(?:Py:)?Configurable, ERROR children\(\) is deprecated
ALL ,(?:Py:)?Configurable, ERROR getChildren\(\) returns a copy
ALL ,(?:Py:)?Configurable, ERROR jobOptName\(\) is deprecated
# Reco
ALL ,(?:Py:)?Configurable, ERROR attempt to add a duplicate \(CellCalibrator.CellCalibrator.H1WeightCone7H1Tower\)
ALL ,(?:Py:)?ResourceLimits,ERROR failed to set max resource limits
ALL ,AlgErrorAuditor, ERROR Illegal Return Code: Algorithm StreamESD reported an ERROR, but returned a StatusCode "SUCCESS"
# Trigger BStoRDO
ALL ,AthenaRefIOHandler, ERROR Failed to set ElementLink
ALL ,ElementLink, ERROR toPersistent: the internal state of link
ALL ,StoreGateSvc, ERROR record: object not added to store
ALL ,StoreGateSvc, ERROR setupProxy:: error setting up proxy
ALL ,AlgErrorAuditor, ERROR Illegal Return Code: Algorithm MooHLTAlgo
ALL ,AlgErrorAuditor, ERROR Illegal Return Code: Algorithm TrigSteer_EF
ALL ,AlgErrorAuditor, ERROR Illegal Return Code: Algorithm muFast_(?:Muon|900GeV)
# Trigger reco_ESD
ALL ,THistSvc, ERROR already registered an object with identifier "/EXPERT/
ALL ,RpcRawDataNtuple , ERROR .*
ALL ,CBNT_L1CaloROD\S+ , ERROR .*
ALL ,CBNTAA_Tile\S+ , ERROR .*
ALL ,TileDigitsMaker , ERROR .*
ALL ,MdtDigitToMdtRDO , ERROR .*
ALL ,HelloWorld , ERROR .*
ALL ,HelloWorld , FATAL .*
ALL ,PythiaB , ERROR ERROR in PYTHIA PARAMETERS
ALL ,ToolSvc , ERROR Tool .* not found and creation not requested
ALL ,ToolSvc , ERROR Unable to finalize the following tools
ALL ,ToolSvc , ERROR Factory for Tool .* not found
ALL ,CBNT_Audit , ERROR Memory leak!.*
ALL ,ToolSvc.InDetSCTRodDecoder , ERROR Unknown offlineId for OnlineId*
ALL ,THistSvc.sysFinali, FATAL Standard std::exception is caught
ALL ,,.*Message limit reached for .*
ALL ,,\s+ERROR IN C-S .*=.*
ALL ,,.*ERROR\s+\|.*
ALL ,,^\s*FATAL ERROR\s*$
ALL ,,ERROR \(poolDb\):
ALL ,,ERROR \(pool\):
ALL ,,ERROR - G4Navigator::ComputeStep\(\)
ALL ,,.*ERROR OCCURED DURING A SECONDARY SCATTER AND WAS
ALL ,THistSvc , ERROR already registered an object with identifier .*
ALL ,,ERROR MuonDetectorManager::getCscReadoutElement stNameindex out of range .*
ALL ,muFast_\S+ , ERROR CSM for Subsystem \d+, MrodId \d+, LinkId \d+ not found
ALL ,TRTDetectorManager , FATAL Unable to apply Inner Detector alignments
ALL ,TRTDetectorManager , ERROR AlignableTransformContainer for key \/TRT\/Align is empty
ALL ,,ERROR in Single_Process::CalculateTotalXSec
ALL ,,.*ERROR WITH DELM.*
#ALL ,ToolSvc.TrigTSerializer,ERROR Errors while decoding
ALL ,AlgErrorAuditor,ERROR Illegal Return Code: Algorithm
## Errors to ignore for specific releases
## ======================================
# bug #47761
15.[0-99].[0-99]|(?:rel|dev|bug)_[0-6],PoolSvc, ERROR Failed to get ContainerHandle (for: POOLContainer_DataHeader_p3|to set POOL property\.)
# Cosmic Reco
14.[0-99].[0-99]|(?:rel|dev|bug)_[0-6],(?:Py:)?Configurable, ERROR attempt to add a duplicate \(.*\) \.\.\. dupe ignored
14.[0-99].[0-99]|(?:rel|dev|bug)_[0-6],ToolSvc, ERROR Cannot create tool ICalorimeterNoiseTool .*
14.[0-99].[0-99]|(?:rel|dev|bug)_[0-6],ToolSvc.MdtPrepDataProviderTool, ERROR Error in MDT RDO decoder for subdetId/mrodId/csmId*
14.[0-99].[0-99]|(?:rel|dev|bug)_[0-6],AlgErrorAuditor, ERROR Illegal Return Code: Algorithm MdtRdoToMdtPrepData reported an ERROR*
14.[0-99].[0-99]|(?:rel|dev|bug)_[0-6],\S+::fillBranch, ERROR <index out of range, (pat|hit) not added to ntuple> 100
# csc_recoESD, 14.2.21.1
14.2.[0-99]|(?:rel|dev|bug)_[0-6],TrigSteer_EF, ERROR std exception thrown from algorithm \(basic error\)To many objects in trigger collection of one type, end: 65535 >= 65535
14.2.[0-99]|(?:rel|dev|bug)_[0-6],AlgErrorAuditor, ERROR Illegal Return Code: Algorithm TrigCaloCellMaker_jet reported an ERROR, but returned a StatusCode "SUCCESS"
/*
Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
*/
#include <map>
#include "TRandom3.h"
class lbn_info{
public:
lbn_info(){nevt=0; intlumi=0; nevtwanted=0;}
int nevt;
float intlumi;
int nevtwanted;
};
class run_info{
public:
run_info(){nevt=0; intlumi=0; nevtwanted=0;}
int nevt;
float intlumi;
std::map<int,lbn_info> lbnmap;
int nevtwanted;
};
FILE *outfile=fopen("lbn_anal_map.txt","w");
int debug=0;
std::map< int, int > eventcounts;
void readeventcounts(int run){
eventcounts.clear();
char buf[50];
sprintf(buf,"lbnevents_%d.txt",run);
if (debug) printf("Opening %s, eventcounts size is %d\n",buf,eventcounts.size());
FILE *fp = fopen(buf,"r");
if (!fp) printf("Failed to open %s!!!\n",buf);
char *line=new char[500];
while (fgets(line,480,fp)) {
int lbn,ne,nf,runn;
float mb;
int s=sscanf(&line[0]," ... Run %d, LB %d has %d events, %d RAW files and %f MB",&runn,&lbn,&ne,&nf,&mb);
if (s>4){
assert(run==runn);
if (debug) printf("run %d lbn %d has %d events\n",run,lbn,ne);
eventcounts[lbn]=ne;
}
else {
printf("s=%d, bad read?\n",s);
assert(false);
}
}
if (debug) printf("Closing %s, eventcounts size is %d\n",buf,eventcounts.size());
fclose(fp);
}
void lbn_analyze(int stream, int nwanted)
{
FILE *fp = fopen("lbn","r");
if (!fp) printf("Failed to open lbn input file!!!\n");
char *line=new char[500];
int run,lbn,L1Acc,beforeps,afterps,L1p,L2p,L3p,valid;
int liveL1,livebp,liveap;
float instlumi,dt,avevtperbx,live;
//float intlumi,intlumitrig;
int grun=0;
float gtotaltotallumi=0,gtotaltotallumiprescaled=0;
float gtotallumi=0, gtotallumiprescaled=0;
TRandom myrand;
myrand.SetSeed(0);
printf("- myrand seed is %u\n",myrand.GetSeed());
myrand.SetSeed(myrand.Integer(10000000)+stream);
printf("- myrand seed is now %u\n",myrand.GetSeed());
std::map<int,run_info> runmap;
while (fgets(line,480,fp)) {
if (line[0]!='-') continue;
int s=sscanf(&line[0],"--- LumiCalculator : %d[%d]: L1Acc: %d, Livetime trigger L1Acc: %d, InstLumi: %f, deltaT: %f, AvEvtsPerBX: %f, BeforePrescale: %d, AfterPrescale: %d, Livetime trigger BeforePrescale: %d Livetime trigger AfterPrescale: %d, Livefrac: %f, L1Presc: %d, L2Presc: %d, L3Presc: %d, Valid: %d", &run,&lbn,&L1Acc,&liveL1,&instlumi,&dt,&avevtperbx,&beforeps,&afterps,&livebp,&liveap,&live,&L1p,&L2p,&L3p,&valid);
if (s>8){
if (debug) printf("- run=%d, lbn=%d, L1Acc=%d, instlumi=%f,L1p=%d, dt=%f, afterps=%d",run,lbn,L1Acc,instlumi,L1p,dt,afterps);
if (run!=grun){
if (grun>0){//change of run
runmap[grun].intlumi=gtotallumi; printf("Setting lumi for run %d to %f\n",grun,gtotallumi);
if (debug) printf("Adding to totaltotal lumis\n");
gtotaltotallumi+=gtotallumi; gtotaltotallumiprescaled+=gtotallumiprescaled;
gtotallumi=0;gtotallumiprescaled=0;//reset the int lumi calculation for this run
//runmap[grun].nevt=0;
}
grun=run;//save the run number
if (debug) printf("Setting grun to %d\n",run);
//read in event counts from runquery file for run
readeventcounts(run);
}//new run
if (L1p<0) {
runmap[run].lbnmap[lbn].nevt=0;
if (eventcounts[lbn]!=0) printf("For lbn %d, L1p<0 but eventcounts is %d\n",lbn,eventcounts[lbn]);
}
else{
int ne = L1Acc;//afterps
if (ne!=eventcounts[lbn]){
if (0==stream) printf("For lbn %d, ne from lumicalc is %d but we will trust runquery value of %d\n",lbn,ne,eventcounts[lbn]);
ne=eventcounts[lbn];
}
if (0==ne){dt=0; printf("For lbn %d, setting lumi to 0 since 0 events were recorded\n",lbn);}
runmap[run].lbnmap[lbn].nevt=ne;
runmap[run].lbnmap[lbn].intlumi=instlumi*dt*live;
runmap[run].nevt+=ne;
gtotallumiprescaled+= instlumi*dt*live/L1p;
gtotallumi+= instlumi*dt*live;
}
if (debug) printf(", s=%d, grun=%d, gtotallumi=%f, gtotallumiprescaled=%f\n",s,grun,gtotallumi,gtotallumiprescaled);
}//good line
}//loop over lines in file
//after last run
runmap[grun].intlumi=gtotallumi; printf("Setting lumi for run %d to %f\n",grun,gtotallumi);
if (debug) printf("Adding to totaltotal lumis for last run\n");
gtotaltotallumi+=gtotallumi; gtotaltotallumiprescaled+=gtotallumiprescaled;
fclose(fp);
printf("- %d runs, gtotaltotallumi=%f, gtotaltotallumiprescaled=%f\n",runmap.size(),gtotaltotallumi,gtotaltotallumiprescaled);
if (runmap.size()<1) {printf("- runmap size is %d, quitting!\n",runmap.size()); return;}
//return;
//check the total lumi...
double tempr=0;
for (std::map<int,run_info>::const_iterator r=runmap.begin(); r!=runmap.end(); ++r){
tempr += r->second.intlumi;
}
if (fabs(tempr-gtotaltotallumi)/tempr>0.001){
printf("tempr=%f and gtotaltotallumi=%f\n",tempr,gtotaltotallumi);
assert(false);
}
for (int e=0;e<nwanted;++e){
//pick a random run, proportional to intlumi
double rnd = myrand.Uniform(tempr);
run=-1;
for (std::map<int,run_info>::const_iterator r=runmap.begin(); r!=runmap.end(); ++r){
if (rnd<r->second.intlumi) {run=r->first; break;}
rnd -= r->second.intlumi;
}
assert(run>=0);
//check the total run lumi...
double tempt=0;
for (std::map<int,lbn_info>::const_iterator l=runmap[run].lbnmap.begin(); l!=runmap[run].lbnmap.end(); ++l){
tempt += l->second.intlumi;
}
if (fabs(tempt-runmap[run].intlumi)/tempt>0.001){
printf("tempt=%f and runmap[%d].intlumi=%f\n",tempt,run,runmap[run].intlumi);
assert(false);
}
//pick a random lbn, proportional to intlumi
rnd = myrand.Uniform(tempt);
lbn=-1;
for (std::map<int,lbn_info>::const_iterator l=runmap[run].lbnmap.begin(); l!=runmap[run].lbnmap.end(); ++l){
if (rnd<l->second.intlumi) {lbn=l->first; break;}
rnd -= l->second.intlumi;
}
assert(lbn>=0);
runmap[run].nevtwanted++; runmap[run].lbnmap[lbn].nevtwanted++;
if (debug) printf("- stream %d, run %d, lbn %d, choose %d of out %d\n",stream,run,lbn,runmap[run].lbnmap[lbn].nevtwanted,runmap[run].lbnmap[lbn].nevt);
}//loop over nwanted
for (std::map<int,run_info>::const_iterator r=runmap.begin(); r!=runmap.end(); ++r){
int totnevt=0; float totintlumi=0;
printf("stream %d, run %d, has %d events and %f/ub, %f intlumi of total, and %d wanted\n", stream,
r->first,r->second.nevt,r->second.intlumi,r->second.intlumi/gtotaltotallumi,r->second.nevtwanted);
for (std::map<int,lbn_info>::const_iterator l=r->second.lbnmap.begin(); l!=r->second.lbnmap.end(); ++l){
fprintf(outfile,"stream %d, run %d, lbn %d, has %d events and %f/ub, %f intlumi of run, and %d wanted", stream, r->first,
l->first,l->second.nevt,l->second.intlumi,l->second.intlumi/r->second.intlumi,l->second.nevtwanted);
if (l->second.nevtwanted>l->second.nevt) fprintf(outfile," : WARNING, more than available, will be duplicates!\n"); else fprintf(outfile,"\n");
totnevt+=l->second.nevt; totintlumi+=l->second.intlumi;
}
if (totnevt!=r->second.nevt) printf(" XXX events do not agree !!! \n");
if (totintlumi>0.0 && fabs(totintlumi - r->second.intlumi)/totintlumi>.001) printf(" XXX intlumi does not agree !!! %f %f \n",totintlumi,r->second.intlumi);
}
printf("--\n\n");
}
#!/bin/bash
#check the bit that L1_ZB is on
#cool_read_lvl1rates.py --item L1_ZB --run $run --printSummary --printNumbers --enabledItems
#use GRL instead, see http://atlasdqm.web.cern.ch/atlasdqm/grlgen/StandardGRL/Atlas_Ready_v1/
#wget -nc http://atlasdqm.web.cern.ch/atlasdqm/grlgen/StandardGRL/Atlas_Ready_v1/data11_7TeV.periodAllYear_DetStatus-v35-pro09-03_CoolRunQuery-00-04-00_Atlas_Ready.xml
wget -nc http://atlasdqm.web.cern.ch/atlasdqm/grlgen/StandardGRL/Atlas_Ready_v1/data12_8TeV.periodAllYear_DetStatus-v49-pro13-03_CoolRunQuery-00-04-08_Atlas_Ready.xml
#See https://atlas-datasummary.cern.ch/lumicalc/, https://atlas-lumicalc.cern.ch/
#iLumiCalc.exe -t L1_ZB -V -x data/MyLBCollection.xml --lumitag=OflLumi-7TeV-000 --livetrigger=L1_EM14 >lbn
iLumiCalc.exe -t L1_ZB -V -x data12_8TeV.periodAllYear_DetStatus-v49-pro13-03_CoolRunQuery-00-04-08_Atlas_Ready.xml --lumitag=OflLumi-8TeV-002 --livetrigger=L1_EM30 > lbn
grep Total lbn
#get info on number of events per lumi block
rm lbnevents*
for runn in `grep "TGoodRunsListReader : Metadata value:" lbn | grep -v "find run "|head -1|sed -e "s%--- <DEBUG> TGoodRunsListReader : Metadata value: %%g" | sed 's/[\x01-\x1F\x7F]//g' | sed "s/\[34m//g" | sed "s/\[0m//g" | sed "s/,/ /g" ` ; do
echo "Getting event info for run $runn "
python GetNEventsLB.py -r $runn -s physics_ZeroBiasOverlay |grep "... Run" > lbnevents_${runn}.txt
done
#make lbn map
root -l -b -q run_lbn_analyze.C > log_lbn_analyze.txt
echo -n "Total events in dataset: "
grep "stream 1," lbn_anal_map.txt |cut -d ' ' -f 8 |awk '{total = total + $1}END{print total}'
echo -n "Selected events per stream: "
grep "stream 8," lbn_anal_map.txt |cut -d ' ' -f 17 |awk '{total = total + $1}END{print total}'
/*
Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
*/
{
gROOT->ProcessLine(".L lbn_analyze.C+");
for (int i=0; i<50; ++i){//number of zerobias streams to make
//gSystem->Sleep(2000);//to get new random number seed
lbn_analyze(i,50000);//number of events per zerobias stream
}
}
#!/usr/bin/env python
# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
__doc__ = """Filter BS data based on trigger bit"""
from PyJobTransformsCore.trf import *
from PyJobTransformsCore.full_trfarg import *
from PyJobTransformsCore.trfutil import *
from PyJobTransformsCore.TransformConfig import *
import PyJobTransformsCore.basic_trfarg as trfarg
from RecJobTransforms.RecConfig import recConfig
class OutputBSFilterFileArg(StringArg):
"""The output file prefix"""
def __init__(self,help='default',name='OutputBSFilterFile'):
StringArg.__init__(self,help,name)
def isFullArgument(self):
return True
class EventIdFileArg(StringArg):
"""The output text file for EventIdModifierSvc lines"""
def __init__(self,help='default',name='EventIdFile'):
StringArg.__init__(self,help,name)
def isFullArgument(self):
return True
class FilterFileArg(StringArg):
"""The name of the text file for filtering (HI) events"""
def __init__(self,help='default',name='FilterFile'):
StringArg.__init__(self,help,name)
def isFullArgument(self):
return True
class TriggerBitArg(IntegerArg):
"""The trigger bit to select"""
def __init__(self,help='default',name='TriggerBit'):
IntegerArg.__init__(self,help,name)
def isFullArgument(self):
return True
class PostIncludeArg(JobOptionsArg):
"""Joboptions file with user settings, to run after the job itself"""
def __init__(self,help='default',package='',name='default'):
# split comma separated string into list
if type(package) == str: package = package.split(',')
# always add 'EventOverlayJobTransforms' package (which contain common postIncludeConfig files)
commonPack = 'EventOverlayJobTransforms'
if commonPack not in package: package.append(commonPack)
JobOptionsArg.__init__(self,help=help,package=package,name=name)
self.__config = None
def isFullArgument(self):
return True
class BSFilterJobTransform( JobTransform ):
def __init__(self):
JobTransform.__init__(self,
authors = [ Author('Andrew Haas', 'ahaas@cern.ch') ] ,
skeleton='EventOverlayJobTransforms/skeleton.BSFilter.py' ,
help = __doc__,
config = recConfig )
#add arguments
self.add( InputBSFileArg() )
self.add( OutputBSFilterFileArg() )
self.add( EventIdFileArg(), default="" )
self.add( FilterFileArg(), default="" )
self.add( TriggerBitArg(), default=-1 )
self.add( MaxEventsArg(), default=-1 )
self.add( SkipEventsArg(), default=0 )
self.add( PostIncludeArg(), default='NONE' )
#add other features
self.add( SQLiteSupport() )
# execute it if not imported
if __name__ == '__main__':
trf = BSFilterJobTransform()
sys.exit(trf.exeSysArgs().exitCode())
#!/usr/bin/env python
# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
__doc__ = """Wrapper for BSFilter for ZeroBias overlay"""
import sys
import re
import os, commands
#ListOfDefaultPositionalKeys = ['inputEvgenFile','EventIdFile','OutputBSFilterFile','PostInclude','TriggerBit','ZeroBiasFile','outputRAWFile']
ListOfDefaultPositionalKeys = ['inputEvgenFile','PostInclude','TriggerBit','pileupBSFile','outputEvgenFile']
# execute it if not imported
if __name__ == '__main__':
argMap = {}
for tmpKeyVal in sys.argv[1:]:
try:
tmpMatch = re.search('^([^=]+)=(.+)$',tmpKeyVal)
if tmpMatch != None:
mapKey = tmpMatch.group(1)
mapVal = tmpMatch.group(2)
if mapKey in ['ipileupBSFile']:
# convert to list
argMap[mapKey] = mapVal.split(',')
else:
# convert to int
try:
argMap[mapKey] = eval(mapVal)
except:
# use string
argMap[mapKey] = mapVal
except:
pass
print "arguments : " + str(sys.argv[1:])
print "arg map : " + str(argMap)
#print "zeroBiasFileName", argMap['ZeroBiasFile']
zeroBiasFileName=argMap['pileupBSFile']
triggerBit=argMap['TriggerBit']
# execute original trf
# EventIdFile=events.txt
com = "BSFilter_trf.py inputBSFile=%s EventIdFile=events.txt OutputBSFilterFile=simpledummy.data.RAW TriggerBit=%d skipEvents=0 maxEvents=100" % (zeroBiasFileName,triggerBit)
retStat = os.system(com)
inputEvgenFile=argMap['inputEvgenFile']
outputEvgenFile=argMap['outputEvgenFile']
try:
print "renaming %s to %s" % (inputEvgenFile, outputEvgenFile)
os.rename(inputEvgenFile, outputEvgenFile)
except:
pass
sys.exit(retStat % 255)
#!/usr/bin/env python
# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
__doc__ = """Wrapper for BSFilter for ZeroBias overlay on Heavy Ions"""
import sys
import re
import os, commands
ListOfDefaultPositionalKeys = ['inputEvgenFile','PostInclude','TriggerBit','pileupBSFile','extraBSFile','outputEvgenFile','filterFile']
# execute it if not imported
if __name__ == '__main__':
argMap = {}
for tmpKeyVal in sys.argv[1:]:
try:
tmpMatch = re.search('^([^=]+)=(.+)$',tmpKeyVal)
if tmpMatch != None:
mapKey = tmpMatch.group(1)
mapVal = tmpMatch.group(2)
if mapKey in ['ipileupBSFile']:
# convert to list
argMap[mapKey] = mapVal.split(',')
else:
# convert to int
try:
argMap[mapKey] = eval(mapVal)
except:
# use string
argMap[mapKey] = mapVal
except:
pass
print "arguments : " + str(sys.argv[1:])
print "arg map : " + str(argMap)
zeroBiasFileName=argMap['pileupBSFile']
triggerBit=argMap['TriggerBit']
#unpack the tarball with vtx and filter text files.
extraBSTarball=argMap['extraBSFile']
comt="tar xvzf %s" %(extraBSTarball)
retStat = os.system(comt)
if retStat:
sys.exit(retStat % 255)
#BSFilter_trf.py inputBSFile=HIMinBiasOverlay.RAW OutputBSFilterFile=simplefiltered.RAW EventIdFile=events_orig.txt filterfile=filter.txt ; BSFilter_trf.py inputBSFile=simplefiltered.RAW OutputBSFilterFile=simpledummy.RAW EventIdFile=events.txt TriggerBit=5 skipEvents=0 maxEvents=100 ;
# execute original trf
# EventIdFile=events.txt
com1 = "BSFilter_trf.py inputBSFile=%s EventIdFile=events_orig.txt OutputBSFilterFile=simplefiltered.data.RAW filterfile=%s" % (zeroBiasFileName,argMap['filterFile'])
retStat = os.system(com1)
if retStat:
sys.exit(retStat % 255)
com2 = "BSFilter_trf.py inputBSFile=simplefiltered.data.RAW EventIdFile=events.txt OutputBSFilterFile=simpledummy.data.RAW TriggerBit=%d skipEvents=0 maxevents=100" % (triggerBit)
retStat = os.system(com2)
if retStat:
sys.exit(retStat % 255)
inputEvgenFile=argMap['inputEvgenFile']
outputEvgenFile=argMap['outputEvgenFile']
try:
print "renaming %s to %s" % (inputEvgenFile, outputEvgenFile)
os.rename(inputEvgenFile, outputEvgenFile)
except:
pass
sys.exit(retStat % 255)
#!/usr/bin/env python
# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
__doc__ = """Merge two G4 HITS files into one file, adding the HITS from event 1 in one file to those from event 1 in the other file, 2 to 2, etc."""
from PyJobTransformsCore.trf import *
from PyJobTransformsCore.full_trfarg import *
from PyJobTransformsCore.trfutil import *
from PyJobTransformsCore.TransformConfig import *
import PyJobTransformsCore.basic_trfarg as trfarg
from RecJobTransforms.RecConfig import recConfig
class InputHITS1FileArg(StringArg):
"""The first input HITS file"""
def __init__(self,help='default',name='inputHITS1File'):
StringArg.__init__(self,help,name)
def isFullArgument(self):
return True
class InputHITS2FileArg(StringArg):
"""The second input HITS file"""
def __init__(self,help='default',name='inputHITS2File'):
StringArg.__init__(self,help,name)
def isFullArgument(self):
return True
class OutputHITSFileArg(StringArg):
"""The output HITS file"""
def __init__(self,help='default',name='outputHITSFile'):
StringArg.__init__(self,help,name)
def isFullArgument(self):
return True
class PostIncludeArg(JobOptionsArg):
"""Joboptions file with user settings, to run after the job itself"""
def __init__(self,help='default',package='',name='default'):
# split comma separated string into list
if type(package) == str: package = package.split(',')
# always add 'EventOverlayJobTransforms' package (which contain common postIncludeConfig files)
commonPack = 'EventOverlayJobTransforms'
if commonPack not in package: package.append(commonPack)
JobOptionsArg.__init__(self,help=help,package=package,name=name)
self.__config = None
def isFullArgument(self):
return True
class G4HitMergeJobTransform( JobTransform ):
def __init__(self):
JobTransform.__init__(self,
authors = [ Author('Andrew Haas', 'ahaas@cern.ch'), Author('William Lockman','William.Lockman@cern.ch') ] ,
skeleton='EventOverlayJobTransforms/skeleton.G4HitMerge.py' ,
help = __doc__,
config = recConfig )
#add arguments
self.add( InputHITS1FileArg() )
self.add( InputHITS2FileArg() )
self.add( OutputHITSFileArg() )
self.add( MaxEventsArg() )
self.add( SkipEventsArg() )
self.add( GeometryVersionArg() )
self.add( DBReleaseArg(), default='NONE' )
self.add( ConditionsTagArg(), default='NONE' )
self.add( PostIncludeArg(), default='NONE' )
#add other features
self.add( SQLiteSupport() )
# execute it if not imported
if __name__ == '__main__':
trf = G4HitMergeJobTransform()
sys.exit(trf.exeSysArgs().exitCode())
/*
Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
*/
#define HITAGprinter_cxx
#include "HITAGprinter.h"
#include <TH2.h>
#include <TStyle.h>
#include <TCanvas.h>
#include <map>
std::map< int , std::map< int, int> > runeventmap;
void HITAGprinter::Loop()
{
// In a ROOT session, you can do:
// Root > .L HITAGprinter.C
// Root > HITAGprinter t
// Root > t.GetEntry(12); // Fill t data members with entry number 12
// Root > t.Show(); // Show values of entry 12
// Root > t.Show(16); // Read and show values of entry 16
// Root > t.Loop(); // Loop on all entries
//
// This is the loop skeleton where:
// jentry is the global entry number in the chain
// ientry is the entry number in the current Tree
// Note that the argument to GetEntry must be:
// jentry for TChain::GetEntry
// ientry for TTree::GetEntry and TBranch::GetEntry
//
// To read only selected branches, Insert statements like:
// METHOD1:
// fChain->SetBranchStatus("*",0); // disable all branches
// fChain->SetBranchStatus("branchname",1); // activate branchname
// METHOD2: replace line
// fChain->GetEntry(jentry); //read all branches
//by b_branchname->GetEntry(ientry); //read only this branch
if (fChain == 0) return;
FILE *vfile=fopen("HI_vtx.txt","w");
FILE *ffile=fopen("HI_filter.txt","w");
Long64_t nentries = fChain->GetEntriesFast();
Long64_t nbytes = 0, nb = 0, passed=0,total=0,duplicate=0;
for (Long64_t jentry=0; jentry<nentries;jentry++) {
Long64_t ientry = LoadTree(jentry);
if (ientry < 0) break;
MBTSTimeDiff=0;//default in case it's not there
L1PassedTrigMaskTBP5 = 1<<29;//otherwise no events pass
nb = fChain->GetEntry(jentry); nbytes += nb;
// if (Cut(ientry) < 0) continue;
//run event vx vy vz
if (NVtx<1) {
//printf("Warning: %d %d NVtx=%d, setting to 0 VtxX,VtxY,VtxZ = %f %f %f\n",RunNumber,EventNumber,NVtx,VtxX,VtxY,VtxZ);
VtxX=0;VtxY=0;VtxZ=0;
}
//check if we already have this event
if (runeventmap[RunNumber][EventNumber]==7744) {
++duplicate;
continue;
}
runeventmap[RunNumber][EventNumber]=7744;
fprintf(vfile,"%d %d %f %f %f\n",RunNumber,EventNumber,VtxX,VtxY,VtxZ);
fprintf(ffile,"%d %d %d %d %f\n",RunNumber,EventNumber,(L1PassedTrigMaskTBP5>>29)&0x1,NVtx,MBTSTimeDiff);
++total;
if ( (L1PassedTrigMaskTBP5>>29)&0x1 ==1 && NVtx==1 && fabs(MBTSTimeDiff)<3 ) ++passed;
}//Loop over events
printf("passed %d out of %d events, with %d duplicates skipped\n",passed,total,duplicate);
fclose(vfile);
fclose(ffile);
}//Loop()
This diff is collapsed.
/*
Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
*/
{
TChain c("POOLCollectionTree");
int n=0;
n=c.Add("/datadisk1/temp2/HITAG/*"); if (n<1) {printf("Added %d files!\n",n); return;} else {printf("Added %d files.\n",n);}
gROOT->ProcessLine(".L HITAGprinter.C");
HITAGprinter t(&c);
t.Loop();
}
#!/bin/bash
#mkdir /datadisk1/temp2/HITAG; cd /datadisk1/temp2/HITAG
#set +o noglob
#for f in ~/nfs3/zerobias_skim/data11_hi.*MinBiasOverlay*TAG*/*; do ln -s $f; done
#cd -
root -l -b -q HITAGprinter_run.C
#mkdir HI_files
#mv HI_filter.txt HI_files/data11_hi.metadata.physics_MinBiasOverlay.filter.txt
#mv HI_vtx.txt HI_files/data11_hi.metadata.physics_MinBiasOverlay.vtx.txt
#dq2-put -L CERN-PROD_SCRATCHDISK -s HI_files group.dataprep.data11_hi.metadata.physics_MinBiasOverlay.test15
#!/bin/bash
if [ $# -lt 3 ]
then
echo "Usage example: `basename $0` test stream outset <skipdq2>"
exit 65
fi
testn=$1
stream=$2
outset=$3
skipdq2=$4
mkdir -p ~/nfs3/zerobias_skim_${testn}/merge/
cd ~/nfs3/zerobias_skim_${testn}/merge/
if [ "$skipdq2" != "" ]; then echo "skipping dq2"; else dq2-get -f "*tgz*" group.dataprep.BSmultipleselector.GRL.${testn}_EXT${stream}.merge.*.test${outset}/ ; fi
rm -rf tarball_PandaJob_*
for f in `ls group.dataprep.BSmultipleselector.GRL.${testn}_EXT${stream}.merge.*.test${outset}.*/*.tgz* `; do echo $f; tar xzf $f; done
for p in `ls -d tarball_PandaJob_* `; do
grep "Job successfully completed" ${p}/pilotlog.txt > /dev/null
if [ $? -eq 0 ]; then echo "Good job";
else
echo "Failed job";
rm -rf $p
fi
done
echo -n "total events: "
grep "number of events written" tarball_PandaJob_*/athena_stdout.txt | cut -d ':' -f 3 | awk '{total = total + $1}END{print total}'
echo -n "total files: "
grep "number of events written" tarball_PandaJob_*/athena_stdout.txt | grep -c number
echo -n "files with 100 events: "
grep "number of events written" tarball_PandaJob_*/athena_stdout.txt | grep -c "100"
echo -n "total jobs: "
ls tarball_PandaJob_*/athena_stdout.txt | grep -c athena
#!/bin/bash
if [ $# -lt 3 ]
then
echo "Usage example: `basename $0` test (test12p4) stream (\"19 24 45\") outset (3) "
exit 65
fi
testn=$1
stream=$2
outset=$3
rm -f progress77.txt ; touch progress77.txt
for s in {0..9}; do echo "EXT${s}"
for d in `dq2-list-datasets-container group.dataprep.data12_hip.HImerge.physics_MinBiasOverlay_EXT${s}.test${outset}/`; do
#for s in ${stream}; do echo "EXT${s}"; #pass in the list of bad EXT numbers from above step, i.e. "10 42 44"
# for d in `dq2-list-datasets-container group.dataprep.BSmultipleselector.GRL.${testn}_EXT${s}.merge.test${outset}/`; do
dq2-ls -f -H $d |grep Zerobias > temp192.txt;
cat temp192.txt >> progress77.txt
grep "bytes" temp192.txt > temp193.txt
if [ $? -eq 0 ]; then echo "Dataset $d has "; cat temp193.txt;
echo
cat temp193.txt | cut -f 3
echo
for gg in `cat temp193.txt | cut -f 3`; do
echo "Deleting file with guid $gg from dataset $d"
dq2-delete-files $d $gg
done
fi
done
rm temp*.txt
done
rm progress77.txt
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment