diff --git a/Tools/PyUtils/bin/checkMetaSG.py b/Tools/PyUtils/bin/checkMetaSG.py new file mode 100755 index 0000000000000000000000000000000000000000..b50a4ec71797c42646c81e3620f8ac88e1ded103 --- /dev/null +++ b/Tools/PyUtils/bin/checkMetaSG.py @@ -0,0 +1,106 @@ +#!/usr/bin/env python + +# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration + +# @file: checkMetaSG.py +# @purpose: Print the metadata that would be accessible via the IOVMetaDataContainers of the MetaDataStore +# @author: Will Buttinger <will@cern.ch> +# @date: Oct 2014 +# +# @example: +# @code +# checkMetaSG.py aod.pool.root +# @endcode +# + +__version__ = "$Revision: 621253 $" +__author__ = "Will Buttinger <will@cern.ch>" + +import sys +import os + +from optparse import OptionParser + +if __name__ == "__main__": + + parser = OptionParser(usage="usage: %prog [options] [-f] my.file.pool") + parser.add_option( "-f", + "--file", + dest = "fileName", + help = "The path to the POOL file to analyze" ) + parser.add_option( "-o", + "--output", + dest = "outFileName", + default = None, + help = "Name of the output file which will contain the informations gathered during checkSG processing. These informations will be stored into a python-shelve or an ASCII/py file (depending on the extension: .pkl,.dat -> shelve; everything else -> ASCII/py)" ) + + (options, args) = parser.parse_args() + + fileNames = [] + + if len(args) > 0: + fileNames = [ arg for arg in args if arg[0] != "-" ] + pass + + if options.fileName == None and len(fileNames) == 0: + str(parser.print_help() or "") + sys.exit(1) + + if not (options.fileName is None): + fileName = os.path.expandvars(os.path.expanduser(options.fileName)) + fileNames.append(fileName) + + fileNames = set( fileNames ) + sc = 0 + for fileName in fileNames: + try: + from PyUtils import AthFile + print "## checking [%s]..."%fileName + metadata = AthFile.fopen(fileName).fileinfos['metadata'] + print "="*91 + print "%30s%-28s%-10s%-30s" % ("folder", " | key "," | type "," | value") + print "%30s%s%-25s%s%-7s%s%-30s" % ("-"*30, "-+-", "-"*(28-3),"-+-","-"*(10-3),"-+-","-"*(20)) + for metaFolder,metaObj in metadata.items(): #metaObj may be dict, list (occurs with multi IOV), or none... so far only support dict FIXME + first=True + if isinstance(metaObj,dict): + for metaKey,metaValue in metaObj.items(): + if first: print "%30s%s%-25s%s%-7s%s%-30s" % (metaFolder, " | ", metaKey," | ",type(metaValue).__name__," | ",metaValue) #print "%30s%s%-30s" % (metaFolder, " | ",metaKey+" = "+str(metaValue) ) + else: print "%30s%s%-25s%s%-7s%s%-30s" % ("", " | ", metaKey," | ",type(metaValue).__name__," | ",metaValue) + first=False + print "="*91 + if options.outFileName: + osp = os.path + outFileName = options.outFileName + outFileName = osp.expanduser(outFileName) + outFileName = osp.expandvars(outFileName) + print "## saving checkSG report into [%s]..." % outFileName + if os.path.splitext(outFileName)[1] in ('.pkl', '.dat'): + # we explicitely import 'bsddb' to try to always + # get that particular backend for the shelve... + import bsddb + import shelve + if os.path.exists(outFileName): + os.remove(outFileName) + db = shelve.open(outFileName) + db['eventdata_items'] = ks + db.close() + except Exception, e: + print "## Caught exception [%s] !!" % str(e.__class__) + print "## What:",e + print sys.exc_info()[0] + print sys.exc_info()[1] + sc = 1 + pass + + except : + print "## Caught something !! (don't know what)" + print sys.exc_info()[0] + print sys.exc_info()[1] + sc = 10 + pass + if len(fileNames) > 1: + print "" + pass # loop over fileNames + + print "## Bye." + sys.exit(sc) diff --git a/Tools/PyUtils/bin/checkxAOD.py b/Tools/PyUtils/bin/checkxAOD.py index 870d19a1894c75eef6150ec56485cf5a33783c27..2a2bcb014d60d28a10d850e4270c3a3832a03bfe 100755 --- a/Tools/PyUtils/bin/checkxAOD.py +++ b/Tools/PyUtils/bin/checkxAOD.py @@ -2,14 +2,14 @@ # Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration # -# $Id: checkxAOD.py 592348 2014-04-10 12:06:41Z krasznaa $ +# $Id: checkxAOD.py 619905 2014-10-03 16:11:12Z krasznaa $ # # This is a modified version of PyUtils/bin/checkFile.py. It has been taught # how to sum up the sizes of all the branches belonging to a single xAOD # object/container. # -__version__ = "$Revision: 592348 $" +__version__ = "$Revision: 619905 $" __author__ = "Sebastien Binet <binet@cern.ch>, " \ "Attila Krasznahorkay <Attila.Krasznahorkay@cern.ch>" @@ -27,6 +27,10 @@ if __name__ == "__main__": "--file", dest = "fileName", help = "The path to the POOL file to analyze" ) + p( "-c", + "--csv", + dest = "csvFileName", + help = "Output CSV file name, to use with spreadsheets" ) ( options, args ) = parser.parse_args() fileNames = [] @@ -46,6 +50,12 @@ if __name__ == "__main__": fileNames = set( fileNames ) + # Check the consistency with the CSV output: + if len( fileNames ) > 1 and options.csvFileName: + print( "WARNING CSV output is only available when processing a single " + "input file" ) + pass + # Loop over the specified file(s): for fileName in fileNames: @@ -159,6 +169,27 @@ if __name__ == "__main__": ( memSize, diskSize, "Total" ) ) print( "=" * 80 ) + # Write out a CSV file if one was requested: + if options.csvFileName and ( len( fileNames ) == 1 ): + # Open the output file: + import csv + with open( options.csvFileName, "wb" ) as f: + writer = csv.writer( f ) + # Set up the formatting of the file: + writer.writerow( [ "Name (Type)", "Size/Evt" ] ) + # Write all entries to it: + for d in orderedData: + # Skip metadata items: + if d.nEntries != poolFile.dataHeader.nEntries: continue + # Construct the name of the entry: + nameType = "%s (%s)" % \ + ( d.name, ttree.GetBranch( d.name ).GetClassName() ) + # Write the entry: + writer.writerow( [ nameType, d.diskSize / d.nEntries ] ) + pass + pass + pass + if len(fileNames) > 1: print "" pass # loop over fileNames diff --git a/Tools/PyUtils/bin/dumpAthfilelite.py b/Tools/PyUtils/bin/dumpAthfilelite.py new file mode 100755 index 0000000000000000000000000000000000000000..29d228d9fe47c25d722373831fead0cdadc61b3e --- /dev/null +++ b/Tools/PyUtils/bin/dumpAthfilelite.py @@ -0,0 +1,45 @@ +#! /usr/bin/env python + +# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +# +## Simple wrapper to invoke AthFileLite metadata grabber and +# produce AthFile-like text output +# +# $Id: dumpAthfilelite.py 618684 2014-09-26 11:46:14Z graemes $ + +import argparse +import os +import pprint +import sys + +import PyUtils.AthFileLite as AthFileLite + +def main(): + parser = argparse.ArgumentParser(description="Use AthFileLite interface to retrieve file metadata. " + "Note that the '--type' argument is mandatory as AthFileLite will " + "not even try to guess the type of file.") + parser.add_argument('--type', '-t', metavar='FILETYPE', + help="Specifie filetype: POOL, BS or TAG", required=True, + choices=("POOL", "BS", "TAG")) + parser.add_argument('input_files', nargs="+", help="Input files") + + args = vars(parser.parse_args(sys.argv[1:])) + + for filename in args['input_files']: + if args["type"] == "POOL": + afl = AthFileLite.AthPoolFile(filename) + elif args["type"] == "BS": + afl = AthFileLite.AthBSFile(filename) + elif args["type"] == "TAG": + afl = AthFileLite.AthTagFile(filename) + + metadata = afl.fileinfo + + print "="*80 + print filename + print "="*80 + pprint.pprint(metadata) + print "="*80 + +if __name__ == "__main__": + main() diff --git a/Tools/PyUtils/cmt/requirements b/Tools/PyUtils/cmt/requirements index 181a8bc047d670665cdac1195fa97fa4bee8ec00..bc16bca082862d7d41ba4362274651016f302424 100755 --- a/Tools/PyUtils/cmt/requirements +++ b/Tools/PyUtils/cmt/requirements @@ -18,6 +18,7 @@ alias checkxAOD checkxAOD.py alias diffPoolFiles diffPoolFiles.py alias merge-poolfiles merge-poolfiles.py alias checkTag checkTag.py +alias checkMetaSG checkMetaSG.py alias setupWorkArea setupWorkArea.py alias pyroot pyroot.py alias print_auditor_callgraph print_auditor_callgraph.py @@ -53,6 +54,7 @@ apply_pattern declare_scripts files="\ checkFile.py \ checkPlugins.py \ checkSG.py \ + checkMetaSG.py \ checkTP.py \ checkTag.py \ checkxAOD.py \ @@ -65,7 +67,8 @@ apply_pattern declare_scripts files="\ dlldep.py \ dso-stats.py \ dump-athfile.py \ - filter-and-merge-d3pd.py \ + dumpAthfilelite.py \ + filter-and-merge-d3pd.py \ gen-typereg-dso.py \ gen_klass.py \ get-tag-diff.py \ diff --git a/Tools/PyUtils/python/AmiLib.py b/Tools/PyUtils/python/AmiLib.py index bbaa2e3efa016cac05ae532ec37f82762e74b2ef..5e548806a5e914b0a4f69617d34bf3d2e381a389 100644 --- a/Tools/PyUtils/python/AmiLib.py +++ b/Tools/PyUtils/python/AmiLib.py @@ -1,162 +1,196 @@ # Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration -# @file PyUtils.AmiLib -# @purpose a set of functions and helpers to talk to the TagCollector - -__version__ = "$Revision: 538932 $" -__author__ = "Sebastien Binet" -__doc__ = "a set of functions and helpers to talk to AMI and the TagCollector" +## @file PyUtils.AmiLib +# @brief a set of functions and helpers to talk to the TagCollector +# @version $Id$ +# @author Originally Sebastien Binet, substantial changes by Graeme Stewart __all__ = [ - 'ami_todict', 'Client', + 'PyUtilsAMIException' ] -if 0: - import xml.etree.cElementTree as ET - from pyAMI.pyAMI import * - amiclient = AMI(certAuth = True) - - import PyUtils.xmldict as _x - - clients_cmd = 'TCListPackageVersionClient -processingStep=production -project=TagCollector -groupName=AtlasOffline -releaseName=17.0.1 -fullPackageName=/AtlasTest/AthenaMPTest -repositoryName=AtlasOfflineRepository'.split(' ') - - rec_cmd = 'TCFormGetDependencyPackageVersionTree -expandedPackageID="*" -expandedTopContainerPackage="*" -groupName="AtlasProduction" -processingStep="production" -project="TagCollector" -releaseName="15.7.0"'.replace('"','').split(' ') - res = amiclient.execute(rec_cmd) - dd = _x.xml2dict(ET.fromstring(res.output('xml'))) - dd['AMIMessage']['Result']['tree'] - - - # all the leaf packages in AtlasProduction and its dependencies - cmd = """ - TCFormGetDependencyPackageVersionTree -expandedPackageID=* - -expandedTopContainerPackage=* -groupName=AtlasProduction - -processingStep=production -project=TagCollector -releaseName=15.7.0 - """.replace("\n","").split() - res = amiclient.execute(cmd) - d = _x.xml2dict(ET.fromstring(res.output('xml'))) - - # only the leaf packages in groupName="AtlasProduction" - cmd = """ - TCFormGetPackageVersionTree -expandedPackageID='*' - -expandedTopContainerPackage='*' -groupName='AtlasProduction' - -processingStep='production' -project='TagCollector' -releaseName='15.7.0' - """.replace("\n","").split() - res = amiclient.execute(cmd) - d = _x.xml2dict(ET.fromstring(res.output('xml'))) - - - - # all the leaf packages in AtlasCore and its dependencies - cmd = """ - TCFormGetDependencyPackageVersionTree - -expandedPackageID=* - -expandedTopContainerPackage=* - -groupName=AtlasCore - -processingStep=production - -project=TagCollector - -releaseName=15.7.0 - """.replace("\n","").split() - res = amiclient.execute(cmd) - d = _x.xml2dict(ET.fromstring(res.output('xml'))) - - # ami dataset: - cmd = """ - GetDatasetInfo - -logicalDatasetName=data09_900GeV.00142404.physics_RNDM.merge.AOD.f193_m320 - """.replace("\n","").split() - res = amiclient.execute(cmt) - d = _x.xml2dict(ET.fromstring(res.output('xml'))) - - """ - [amiCommand] - -logicalFileName=data09_1beam.00140536.physics_L1Calo.merge.HIST.f170_m255._0001.1 - - [amiCommand] GetDatasetInfo - -logicalDatasetName=mc08.105609.Pythia_Zprime_tt2000.merge.AOD.e393_s462_s520_r635_t53 - - amiCommand=["GetDatasetInfo","logicalDatasetName=adatasetname"] - result=amiclient.execute(amiCommand) - - - or - amiCommand=["GetDatasetInfo","logicalFileName=aFilename"] - result=amiclient.execute(amiCommand) - """ - def dsinfo(n): - import PyUtils.AmiLib as A - import xml.etree.cElementTree as ET - import PyUtils.xmldict as _x - c = A.Client() - try: - res = c.exec_cmd(cmd="GetDatasetInfo", logicalFileName=n) - dd = _x.xml2dict(ET.fromstring(res.output('xml'))) - return dd['AMIMessage']['Result'] - except PyAmi.AMI_Error: - # maybe a logical dataset name ? - res = c.exec_cmd(cmd="GetDatasetInfo", logicalDatasetName=n) - dd = _x.xml2dict(ET.fromstring(res.output('xml'))) - return dd['AMIMessage']['Result'] - +# Not sure what's happening here - some testing that's off by default? +# if 0: +# import xml.etree.cElementTree as ET +# import pyAMI.client as PyAmi +# amiclient = pyAmi.Client('atlas') +# +# import PyUtils.xmldict as _x +# +# clients_cmd = 'TCListPackageVersionClient -processingStep=production -project=TagCollector -groupName=AtlasOffline -releaseName=17.0.1 -fullPackageName=/AtlasTest/AthenaMPTest -repositoryName=AtlasOfflineRepository'.split(' ') +# +# rec_cmd = 'TCFormGetDependencyPackageVersionTree -expandedPackageID="*" -expandedTopContainerPackage="*" -groupName="AtlasProduction" -processingStep="production" -project="TagCollector" -releaseName="15.7.0"'.replace('"','').split(' ') +# res = amiclient.execute(rec_cmd) +# dd = _x.xml2dict(ET.fromstring(res.output('xml'))) +# dd['AMIMessage']['Result']['tree'] +# +# +# # all the leaf packages in AtlasProduction and its dependencies +# cmd = """ +# TCFormGetDependencyPackageVersionTree -expandedPackageID=* +# -expandedTopContainerPackage=* -groupName=AtlasProduction +# -processingStep=production -project=TagCollector -releaseName=15.7.0 +# """.replace("\n","").split() +# res = amiclient.execute(cmd) +# d = _x.xml2dict(ET.fromstring(res.output('xml'))) +# +# # only the leaf packages in groupName="AtlasProduction" +# cmd = """ +# TCFormGetPackageVersionTree -expandedPackageID='*' +# -expandedTopContainerPackage='*' -groupName='AtlasProduction' +# -processingStep='production' -project='TagCollector' -releaseName='15.7.0' +# """.replace("\n","").split() +# res = amiclient.execute(cmd) +# d = _x.xml2dict(ET.fromstring(res.output('xml'))) +# +# +# +# # all the leaf packages in AtlasCore and its dependencies +# cmd = """ +# TCFormGetDependencyPackageVersionTree +# -expandedPackageID=* +# -expandedTopContainerPackage=* +# -groupName=AtlasCore +# -processingStep=production +# -project=TagCollector +# -releaseName=15.7.0 +# """.replace("\n","").split() +# res = amiclient.execute(cmd) +# d = _x.xml2dict(ET.fromstring(res.output('xml'))) +# +# # ami dataset: +# cmd = """ +# GetDatasetInfo +# -logicalDatasetName=data09_900GeV.00142404.physics_RNDM.merge.AOD.f193_m320 +# """.replace("\n","").split() +# res = amiclient.execute(cmt) +# d = _x.xml2dict(ET.fromstring(res.output('xml'))) +# +# """ +# [amiCommand] +# -logicalFileName=data09_1beam.00140536.physics_L1Calo.merge.HIST.f170_m255._0001.1 +# +# [amiCommand] GetDatasetInfo +# -logicalDatasetName=mc08.105609.Pythia_Zprime_tt2000.merge.AOD.e393_s462_s520_r635_t53 +# +# amiCommand=["GetDatasetInfo","logicalDatasetName=adatasetname"] +# result=amiclient.execute(amiCommand) +# +# +# or +# amiCommand=["GetDatasetInfo","logicalFileName=aFilename"] +# result=amiclient.execute(amiCommand) +# """ +# def dsinfo(n): +# import PyUtils.AmiLib as A +# import xml.etree.cElementTree as ET +# import PyUtils.xmldict as _x +# c = A.Client() +# try: +# res = c.exec_cmd(cmd="GetDatasetInfo", logicalFileName=n) +# dd = _x.xml2dict(ET.fromstring(res.output('xml'))) +# return dd['AMIMessage']['Result'] +# except PyAmi.AMI_Error: +# # maybe a logical dataset name ? +# res = c.exec_cmd(cmd="GetDatasetInfo", logicalDatasetName=n) +# dd = _x.xml2dict(ET.fromstring(res.output('xml'))) +# return dd['AMIMessage']['Result'] +# ### imports ------------------------------------------------------------------- +import json import os +import pprint import sys import pyAMI.client as PyAmi -import pyAMI.auth as PyAmiAuth +import pyAMI.exception from PyUtils.xmldict import xml2dict ### globals ------------------------------------------------------------------- -### functions ----------------------------------------------------------------- -def ami_todict(res): - return res.to_dict() - +### functions ----------------------------------------------------------------- def xmlstr_todict(s): import PyUtils.xmldict as _x import xml.etree.cElementTree as ET return _x.xml2dict(ET.fromstring(s)) + +## @basic Do some basic checks on an AMI JSON result +# @return bool, errmsg tuple +def badresult(result, checkForValidRows=True, expectCommandStatus=True): + try: + if type(result) is not dict: + return True, "Result is not a dictionary" + if expectCommandStatus and 'commandStatus' not in result['AMIMessage'][0]: + return True, "No results found (command probably didn't execute - do you have valid AMI authentication?)" + if expectCommandStatus and result['AMIMessage'][0]['commandStatus'][0]['$'] != "successful": + return True, "Command execution apprears to have failed" + if checkForValidRows: + if 'Result' not in result['AMIMessage'][0]: + return True, "No result found in query" + if len(result['AMIMessage'][0]['Result']) == 0: + return True, "No results found from query" + if 'rowset' not in result['AMIMessage'][0]['Result'][0]: + return True, "No rowsets found from query" + if 'row' not in result['AMIMessage'][0]['Result'][0]['rowset'][0]: + return True, "No rows found in rowset" + except Exception, e: + print >>sys.stderr, "Unexpected exception when querying result '{0}': {1}".format(pprint.pformat(result), e) + return True, "This is Kari, werid things are happening and I don't know what to do" + return False, "" + +## @brief Convert the AMI result rows into a list of dictionaries +# @param take_rowsets List of rowsets to use: +# None=use all +# string=take rowset if type matches the string +# @note The rowset type is added to each element dictionary +def amijsontodict(result, take_rowsets=None): + take_rowset_indexes = [] + if take_rowsets == None: + take_rowset_indexes = range(len((result['AMIMessage'][0]['Result'][0]['rowset']))) + else: + for idx, rowset in enumerate(result['AMIMessage'][0]['Result'][0]['rowset']): + for rowset_id in take_rowsets: + if rowset_id == rowset['@type']: + take_rowset_indexes.append(idx) + + answer = [] + for rowset_index in take_rowset_indexes: + for row in result['AMIMessage'][0]['Result'][0]['rowset'][rowset_index]['row']: + answer_dict = {'rowset': result['AMIMessage'][0]['Result'][0]['rowset'][rowset_index]['@type']} + for element in row['field']: + if '$' in element and '@name' in element: + answer_dict[element['@name']] = element['$'] + answer.append(answer_dict) + return answer + ### classes ------------------------------------------------------------------- -class Client(object): - _instance = None - - @property - @staticmethod - def instance(self): - if Client._instance is None: - c = PyAmi.AMI() - import os.path as osp - if not osp.exists(PyAmiAuth.AMI_CONFIG): - PyAmiAuth.create_auth_config() - pass - c.read_config(PyAmiAuth.AMI_CONFIG) - Client._instance = c - return Client._instance - - def __init__(self, certAuth=True, dry_run=False): - self._client = PyAmi.AMI() - import os.path as osp - if not osp.exists(PyAmiAuth.AMI_CONFIG): - PyAmiAuth.create_auth_config() - pass - self._client.read_config(PyAmiAuth.AMI_CONFIG) +class PyUtilsAMIException(Exception): + pass + +class Client(object): + def __init__(self, certAuth=True, dryrun=False): + self._client = PyAmi.Client('atlas') import PyUtils.Logging as L self.msg = L.logging.getLogger('ami-client') - self.msg.setLevel(L.logging.INFO) - self.dry_run = dry_run + if 'PYUTILS_DEBUG' in os.environ: + self.msg.setLevel(L.logging.DEBUG) + else: + self.msg.setLevel(L.logging.INFO) + self.dryrun = dryrun return - def exec_cmd(self, cmd, **args): - """execute an AMI command""" - if 'args' in args and len(args)==1: - args = args['args'] - # add some defaults - args.setdefault('project', 'TagCollector') - args.setdefault('processingStep', 'production') - args.setdefault('repositoryName', 'AtlasOfflineRepository') + ## @brief Accept an AMI command with a set of arguments given as a dictionary + # execute it and return the JSON result + # @note No longer accept arbirtary keyword:value pairs + def exec_cmd(self, cmd, args={}, defaults=True, dryrun = None): + if defaults: + args.setdefault('-project', 'TagCollector') + args.setdefault('-processingStep', 'production') + args.setdefault('-repositoryName', 'AtlasOfflineRepository') # transform into an AMI command string ami_cmd = map( @@ -166,32 +200,32 @@ class Client(object): ) ami_cmd.insert(0, cmd) - self.msg.debug('ami_cmd: %s', ami_cmd) - if self.dry_run: + if dryrun is not None: + my_dryrun = dryrun + else: + my_dryrun = self.dryrun + + self.msg.debug('ami_cmd: {0}'.format(ami_cmd)) + self.msg.debug('Dry run setting is {0}'.format(my_dryrun)) + if my_dryrun: + self.msg.info('Dry run detected - actual AMI command execution is suppressed') return True - # execute - ## try: - ## result = self._client.execute(ami_cmd) - ## return result - ## except Exception, err: - ## if self.reraise: - ## raise - ## self.msg.error('caught an exception:\n%s', err) - ## return - return self._client.execute(ami_cmd) - - def find_pkg(self, pkg, check_tag=True, cbk_fct=None): - """Find the full path name of a package. - @return (pkg,tag) tuple - """ + result = self._client.execute(ami_cmd, format = 'json') + self.msg.debug(pprint.pformat(result)) + return json.loads(result) + + ## @brief Find the full path name of a package. + # @return list of dictionaries with all AMI information by key:value + def find_pkg(self, pkg, check_tag=True): + self.msg.debug("Finding package {0}".format(pkg)) # if '-' in name, a tag was given. if '-' in pkg: tag = pkg.split('/')[-1] pkg = pkg.split('-',1)[0] elif check_tag: - raise ValueError('no tag was given for [%s]' % (pkg,)) + raise PyUtilsAMIException('No tag was given for {0}'.format(pkg)) else: tag = None @@ -201,40 +235,37 @@ class Client(object): pkg = pkg.split('/')[-1] args = { - 'glite': ( - "select packages.path,packages.packageName,packages.archive " - "where repositories.repositoryName='AtlasOfflineRepository' " - "and packages.packageName='%s' and packages.archive=0" % pkg + '-glite': ('"select packages.path,packages.packageName,packages.archive' + ' where repositories.repositoryName=\'AtlasOfflineRepository\'' + ' and packages.packageName=\'{0}\' and packages.archive=0"'.format(pkg) ), } result = self.exec_cmd(cmd='SearchQuery', args=args) - if not result: - raise RuntimeError( - 'could not resolve [%s] to full package path' % - (pkg,) - ) - res_dict = result.to_dict() - if not 'Element_Info' in res_dict: - raise RuntimeError( - 'could not resolve [%s] to full package path' % - (pkg,) - ) - - pkg_list = [] - for v in res_dict['Element_Info'].values(): - pkg_list.append(v) # += [v['path'] + v['packageName']] + self.msg.debug(pprint.pformat(result)) + + bad, msg = badresult(result) + if bad: + errmsg = 'Could not resolve [{0}] to full package path: {1}'.format(pkg, msg) + self.msg.error(errmsg) + raise PyUtilsAMIException(errmsg) + + pkg_list=amijsontodict(result) idx = 0 if len(pkg_list) == 0: - raise RuntimeError('package [%s] does not exist' % pkg) + raise PyUtilsAMIException('package [{0}] does not exist'.format(pkg)) elif len(pkg_list)>1: + # Multiple matches - try and use some resolution, + # like searching the path+name + # (Somehow this all stinks - there must be a way to get more reliable + # information back from AMI with a better query) ambiguous = True if '/' in orig_pkg: pkg_candidates = [] - for i,v in enumerate(pkg_list): - if orig_pkg in v['path']+v['packageName']: + for i, pkg in enumerate(pkg_list): + if orig_pkg in pkg: pkg_candidates.append(i) if len(pkg_candidates) == 1: idx = pkg_candidates[0] @@ -246,17 +277,7 @@ class Client(object): self.msg.info('multiple packages found for [%s]:', pkg) for i,v in enumerate(pkg_list): self.msg.info(' %i) %s', i, v['path']+v['packageName']) - if cbk_fct: - try: - n = cbk_fct() - except StopIteration: - raise RuntimeError( - 'multiple packages found for [%s]' % pkg - ) - idx = n - pkg = pkg_list[n] - else: - raise RuntimeError('multiple packages found for [%s]' % pkg) + raise PyUtilsAMIException('multiple packages found for [{0}]'.format(pkg)) else: idx = 0 @@ -281,115 +302,76 @@ class Client(object): if isinstance(v, basestring): v = str(v) pkg[str(k)] = v - - - ## if tag is None: - ## tag = tag_list[idx] - - ## print "-"*80 - ## print res_dict - ## print "-"*80 + return pkg - def get_project_of_pkg(self, pkg, release): - """ - retrieve the list of projects from AMI for a given release and package - """ - pkg = self.find_pkg(pkg,check_tag=False) + ## @brief retrieve the tag collector information for a given release and package + # optional arguments control the type of information returned + def get_pkg_info(self, package, release, resultKey="groupName", filterRelease=False): + + pkg = self.find_pkg(package, check_tag=False) - projects = [] - full_pkg_name = pkg['packagePath']+pkg['packageName'] # pkg['packageTag'] - try: - res = self.exec_cmd(cmd='TCGetPackageVersionHistory', - fullPackageName=full_pkg_name, - releaseName=release) - rows = res.rows() - if isinstance(rows, dict): - rows = [rows] - # print "---" - # print list(rows) - # print "---" - for row in rows: - projects.append(row.get('groupName')) - if not projects: - self.msg.error( - "no project found for package [%s] and release [%s]", - full_pkg_name, - release) - except PyAmi.AMI_Error, err: - pass - return projects - - def get_version_of_pkg(self, pkg, release): - """ - retrieve the list of versions from AMI for a given release and package - """ - pkg = self.find_pkg(pkg,check_tag=False) + full_pkg_name = pkg['packagePath']+pkg['packageName'] + result = self.exec_cmd(cmd='TCGetPackageVersionHistory', + args={'-fullPackageName': '"' + full_pkg_name + '"', + '-releaseName': release}) + bad, msg = badresult(result) + if bad: + errmsg = "Bad AMI result for projects of package {0}: {1}".format(full_pkg_name, msg) + self.msg.error(errmsg) + raw_result_list = amijsontodict(result) + self.msg.debug(pprint.pformat(raw_result_list)) + + #import code + #code.interact(local=locals()) + + results = [] + for res in raw_result_list: + if filterRelease: + if res.get("releaseName") != release: + continue + if resultKey in res: + results.append(res[resultKey]) + + return results + + ## @brief retrieve the package version from AMI taking into account project dependencies + def get_version_of_pkg_with_deps(self, pkg, project, release): versions = [] - full_pkg_name = pkg['packagePath']+pkg['packageName'] # pkg['packageTag'] - try: - res = self.exec_cmd(cmd='TCGetPackageVersionHistory', - fullPackageName=full_pkg_name, - releaseName=release) - rows = res.rows() - if isinstance(rows, dict): - rows = [rows] - ## print "---" - ## print list(rows) - ## print "---" - for row in rows: - versions.append(row.get('packageTag')) - if not versions: - self.msg.error( - "no version found for package [%s] and release [%s]", - full_pkg_name, - release) - except PyAmi.AMI_Error, err: - pass - return versions + result = self.exec_cmd(cmd='TCSearchPackageVersion', + args = { + '-keyword': pkg, + '-groupName': project, + '-withDep': "True", + '-releaseName': release} + ) + self.msg.debug(pprint.pformat(result)) + bad, msg = badresult(result, expectCommandStatus=False) + if bad: + errmsg = "Failed to find package {0} in release {1}, project {2}: {3}".format(pkg, release, project, msg) + raise PyUtilsAMIException(errmsg) - def get_version_of_pkg_with_deps(self, pkg, project, release): - """ - retrieve the package version from AMI taken into account project dependencies - """ + results_list = amijsontodict(result) + for res in results_list: + versions.append((res.get('groupName'), res.get('releaseName'), res.get('fullPackageName'), res.get('packageTag'))) - versions = [] - try: - res = self.exec_cmd(cmd='TCSearchPackageVersion', - keyword=pkg, - groupName=project, - withDep=True, - releaseName=release) - rows = res.rows() - if isinstance(rows, dict): - rows = [rows] - - for row in rows: - packageTag = row.get('packageTag', None) - fullPackageName = row.get('fullPackageName', None) - groupName = row.get('groupName', None) - releaseName = row.get('releaseName', None) - versions.append((groupName,releaseName,fullPackageName,packageTag)) - - # If more than one result, match full package name - if len(versions)>1: - pkg = self.find_pkg(pkg, check_tag=False) - full_pkg_name = pkg['packagePath']+pkg['packageName'] - versions = filter(lambda v:v[2]==full_pkg_name, versions) - - if len(versions)==0: - self.msg.error( - "no version found for package [%s] and release [%s]", - pkg, - release) - - except PyAmi.AMI_Error, err: - pass + # If more than one result, match full package name + self.msg.debug(pprint.pformat(versions)) + if len(versions)>1: + pkg = self.find_pkg(pkg, check_tag=False) + full_pkg_name = pkg['packagePath']+pkg['packageName'] + self.msg.debug(pprint.pformat(full_pkg_name)) + versions = [ v for v in versions if v[2] == full_pkg_name ] + + if len(versions)==0: + errmsg = "No version found for package {0} in release {1}".format(pkg, release) + raise PyUtilsAMIException(errmsg) return versions - + + def get_project_tree(self, project, release, recursive=False): """return the dependency tree of packages for a given project and a given release @@ -399,126 +381,89 @@ class Client(object): cmd = 'TCFormGetPackageVersionTree' if recursive: cmd = 'TCFormGetDependencyPackageVersionTree' - result = self.exec_cmd( - cmd=cmd, - expandedPackageID='*', - expandedTopContainerPackage='*', - groupName=project, - processingStep='production', - project='TagCollector', - releaseName=release, - ) - if not result: - raise RuntimeError( - "Could not retrieve the dependency tree for project [%s]" - " and release [%s]" % (project, release,) - ) - import xml.etree.cElementTree as ET - d = result.to_dict() - - out = d - abs_path = ('AMIMessage', 'Result', 'tree', 'treeBranch',) - for i,k in enumerate(abs_path): - if not k in out: - raise RuntimeError( - 'malformated answer from AMI (no [%s] key)' % k - ) - out = out[k] - return out + result = self.exec_cmd(cmd=cmd, + args = {'-expandedPackageID': '*', + '-expandedTopContainerPackage': '*', + '-groupName': project, + '-processingStep': 'production', + '-project': 'TagCollector', + '-releaseName': release, + }, + defaults=False + ) + + bad, msg = badresult(result) + if bad: + errmsg = "Bad AMI result for project {0} in release {1}: {2}".format(project, release, msg) + self.msg.error(errmsg) + raise PyUtilsAMIException(errmsg) + + result_list = amijsontodict(result) + self.msg.debug(pprint.pformat(result_list)) + + # Results here seem to be out of kilter with what the following code + # was trying to parse. This is also true in pyAMI4, so probably this is + # a dead function... + +# out = d +# abs_path = ('AMIMessage', 'Result', 'tree', 'treeBranch',) +# for i,k in enumerate(abs_path): +# if not k in out: +# raise RuntimeError( +# 'malformated answer from AMI (no [%s] key)' % k +# ) +# out = out[k] + + return False def get_open_releases(self, project): return self.get_releases(project, lambda x : x!='terminated') def get_releases(self, project, relStatusCond=lambda x : True): """return the list of open releases for a given ``project``""" - args = { - 'groupName' : project, - 'expandedRelease': '*', - } - - result = self.exec_cmd(cmd='TCFormGetReleaseTreeDevView', args=args) - if not result: - raise RuntimeError( - "Could not find open releases in project %s" % project - ) - - rxml = result.output('xml') - import xml.etree.cElementTree as ET - - try: - reltree = ET.fromstring( - rxml - ).find("Result").find("tree") - releases = [ r.get("releaseName") - for r in reltree.getiterator("treeBranch") - if relStatusCond(r.get("status")) ] - - # Filter all special purpose releases (e.g. -MIG, -SLHC) - releases = filter(lambda x: x.count("-")==0, releases) - except Exception, e: - self.msg.error(e.message) - raise RuntimeError( - 'Could not parse result of TCFormGetReleaseTreeDevView:\n%s' % rxml - ) - - # Sort by release number + + result = self.exec_cmd(cmd='SearchQuery', + args={'-sql': '"select * from releases r,groups g where g.identifier=r.groupFK and g.groupName=\'{0}\'"'.format(project), + '-project': 'TagCollector', + '-processingStep': 'production'}, + defaults=False + ) + + bad, msg = badresult(result) + if bad: + errmsg = "Got bad result back from AMI for {0} releases: {1}".format(project, msg) + self.msg.error(errmsg) + raise PyUtilsAMIException(errmsg) + + result_list = amijsontodict(result) + releases = [] + for release in result_list: + if 'releaseName' in release and '-' not in release['releaseName']: + releases.append(release['releaseName']) releases.sort(key=lambda x: [int(y) if y.isdigit() else 0 for y in x.split('.')]) + self.msg.debug(pprint.pformat(releases)) return releases + def get_clients(self, project, release, full_pkg_name): """return the list of clients (full-pkg-name, version) of `full_pkg_name` for project `project` and release `release` + + Currently this query is broken - doesn't work in pyAMI4 either """ args = { - 'groupName': project, # AtlasOffline, AtlasEvent, ... - 'releaseName': release, + '-groupName': project, # AtlasOffline, AtlasEvent, ... + '-releaseName': release, } if full_pkg_name[0] != "/": full_pkg_name = "/"+full_pkg_name - args['fullPackageName'] = full_pkg_name + args['fullPackageName'] = '"'+full_pkg_name+'"' result = self.exec_cmd(cmd="TCListPackageVersionClient", args=args) - if not result: - raise RuntimeError( - 'error executing TCListPackageVersionClient' - ) - - rxml = result.output('xml') - import xml.etree.cElementTree as ET - try: - rows = xml2dict(ET.fromstring(rxml))['AMIMessage']["Result"]["rowset"]['row'] - except Exception, e: - self.msg.error(e.message) - raise RuntimeError( - 'could not parse result of TCListPackageVersionClient:\n%s' % rxml - ) - - if not isinstance(rows, (tuple,list)): - rows = [rows] + if badresult(result): + self.msg.error("Got bad result back from AMI for clients of {0} in {1} and {2}".format(full_pkg_name, project, release)) - clients = [] - for row in rows: - fields = row['field'] - client_name = None - client_vers = None - release_vers = None - group_name = None - for f in fields: - if f['name'] == 'fullPackageName': - client_name = f['_text'] - elif f['name'] == 'packageTag': - client_vers = f['_text'] - elif f['name'] == 'releaseName': - release_vers = f['_text'] - elif f['name'] == 'groupName': - group_name = f['_text'] - if client_name is None or client_vers is None: - self.msg.warning("could not find client-info for:\n%s", fields) - else: - if client_name[0] == '/': - client_name = client_name[1:] - clients.append((client_name, client_vers, release_vers, group_name)) - return clients - - pass # Client - + result_list = amijsontodict(result) + + return None + diff --git a/Tools/PyUtils/python/AthFile/impl.py b/Tools/PyUtils/python/AthFile/impl.py index fec6683c034897feefa537539c170b14c6363228..0f0b2abf3e482e039a74ec52c95a5d956b5233d0 100644 --- a/Tools/PyUtils/python/AthFile/impl.py +++ b/Tools/PyUtils/python/AthFile/impl.py @@ -7,7 +7,7 @@ from __future__ import with_statement -__version__ = "$Revision: 588873 $" +__version__ = "$Revision: 635800 $" __author__ = "Sebastien Binet" __doc__ = "implementation of AthFile-server behind a set of proxies to isolate environments" @@ -393,8 +393,10 @@ class AthFileServer(object): def _root_open(self, fname): import PyUtils.Helpers as H # speed-up by tampering LD_LIBRARY_PATH to not load reflex-dicts - import re - with H.restricted_ldenviron(projects=['AtlasCore']): + import re, os + restrictedProjects = ['AtlasCore'] + if(os.environ.get("AtlasProject",None)=="AthAnalysisBase"): restrictedProjects=[] #special case for athanalysisbase + with H.restricted_ldenviron(projects=restrictedProjects): with H.ShutUp(filters=[ re.compile( 'TClass::TClass:0: RuntimeWarning: no dictionary for.*'), @@ -482,7 +484,7 @@ class AthFileServer(object): use_cache = False sync_cache = True - if protocol in ('', 'file'): + if protocol in ('', 'file') : fid = self.md5sum(fname) fid_in_cache = fid in cache # also check the cached name in case 2 identical files @@ -1023,7 +1025,10 @@ class FilePeeker(object): def _root_open(self, fname, raw=False): import PyUtils.Helpers as H - with H.restricted_ldenviron(projects=['AtlasCore']): + restrictedProjects = ['AtlasCore'] + import os + if(os.environ.get("AtlasProject",None)=="AthAnalysisBase"): restrictedProjects=[] #special case for athanalysisbase + with H.restricted_ldenviron(projects=restrictedProjects): root = self.pyroot import re with H.ShutUp(filters=[ @@ -1061,7 +1066,10 @@ class FilePeeker(object): runs=[] evts=[] import PyUtils.Helpers as H - with H.restricted_ldenviron(projects=['AtlasCore']): + restrictedProjects = ['AtlasCore'] + import os + if(os.environ.get("AtlasProject",None)=="AthAnalysisBase"): restrictedProjects=[] #special case for athanalysisbase + with H.restricted_ldenviron(projects=restrictedProjects): root = self.pyroot do_close = True if isinstance(fname, basestring): @@ -1106,9 +1114,15 @@ class FilePeeker(object): for row in xrange(evtmax): if coll_tree.GetEntry(row) < 0: break - runnbr = coll_tree.RunNumber + # With root 5.34.22, trying to access leaves of a + # fundamental type like this gives an error: + # TypeError: attempt to bind ROOT object w/o class + # Rewrite like this for now to work around the problem. + #runnbr = coll_tree.RunNumber + runnbr = coll_tree.GetBranch('RunNumber').GetListOfLeaves()[0].GetValueLong64() runs.append(runnbr) - evtnbr = coll_tree.EventNumber + #evtnbr = coll_tree.EventNumber + evtnbr = coll_tree.GetBranch('EventNumber').GetListOfLeaves()[0].GetValueLong64() evts.append(evtnbr) del coll_tree if f and do_close: @@ -1119,7 +1133,10 @@ class FilePeeker(object): def _is_empty_pool_file(self, fname): is_empty = False import PyUtils.Helpers as H - with H.restricted_ldenviron(projects=['AtlasCore']): + restrictedProjects = ['AtlasCore'] + import os + if(os.environ.get("AtlasProject",None)=="AthAnalysisBase"): restrictedProjects=[] #special case for athanalysisbase + with H.restricted_ldenviron(projects=restrictedProjects): root = self.pyroot do_close = True if isinstance(fname, basestring): @@ -1140,6 +1157,8 @@ class FilePeeker(object): return is_empty def _process_call(self, fname, evtmax, projects=['AtlasCore']): + import os + if(os.environ.get("AtlasProject",None)=="AthAnalysisBase"): projects=[] #special case for athanalysisbase msg = self.msg() import PyUtils.Helpers as H f = _create_file_infos() @@ -1170,7 +1189,9 @@ class FilePeeker(object): file_name,] subprocess.call(cmd, env=self._sub_env) # - with H.restricted_ldenviron(projects=None): + #with H.restricted_ldenviron(projects=None): + # MN: disabled clean environ to let ROOT6 find headers + if True: is_tag, tag_ref, tag_guid, nentries, runs, evts = self._is_tag_file(f_root, evtmax) if is_tag: f['stream_names'] = ['TAG'] @@ -1187,6 +1208,9 @@ class FilePeeker(object): os.close(fd_pkl) if os.path.exists(out_pkl_fname): os.remove(out_pkl_fname) + print "\n --------- runnign Athena peeker" + print os.environ['CMTPATH'] + import AthenaCommon.ChapPy as api app = api.AthenaApp(cmdlineargs=["--nprocs=0"]) app << """ diff --git a/Tools/PyUtils/python/AthFileLite.py b/Tools/PyUtils/python/AthFileLite.py new file mode 100755 index 0000000000000000000000000000000000000000..ecfe2a61836a6d8f88a2bbeb9dddfccd37c3025e --- /dev/null +++ b/Tools/PyUtils/python/AthFileLite.py @@ -0,0 +1,357 @@ +# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration + +# Lightweight and simplified version of AthFile +# As the transform knows which files are bytestream and which are +# POOL files we just have two simple classes and definately avoid +# doing anything fancy here + +import os +import os.path +import re +import subprocess +import sys +import uuid + +import PyUtils.dbsqlite as dbsqlite +from PyCmt.Logging import msg, logging + +def _create_file_info_template(): + """simple helper function to create consistent dicts for the + fileinfos attribute of AthFile + """ + d = { + 'file_md5sum': None, + 'file_name': None, + 'file_size': None, + 'file_type': None, + 'file_guid': None, + 'nentries' : 0, # to handle empty files + 'run_number': [], + 'run_type': [], + 'evt_type': [], + 'evt_number': [], + 'lumi_block': [], + 'beam_energy': [], + 'beam_type': [], + 'stream_tags': [], + 'metadata_items': None, + 'eventdata_items': None, + 'stream_names': None, + 'geometry': None, + 'conditions_tag': None, + 'det_descr_tags': None, + ## + 'metadata': None, + 'tag_info': None, + } + return d + + +def _urlType(filename): + if filename.startswith('dcap:'): + return 'dcap' + if filename.startswith('root:'): + return 'root' + if filename.startswith('rfio:'): + return 'rfio' + if filename.startswith('file:'): + return 'posix' + return 'posix' + + +def _get_file_size(filename): + if _urlType(filename) == 'posix': + try: + fsize = os.stat(filename)[6] + except IOError: + fsize = None + else: + from PyUtils.RootUtils import import_root + root = import_root() + try: + msg.debug('Calling TFile.Open for {0}'.format(filename)) + file = root.TFile.Open(filename + '?filetype=raw', 'READ') + fsize = file.GetSize() + msg.debug('Got size {0} from TFile.GetSize'.format(fsize)) + except ReferenceError: + msg.error('Failed to get size of {0}'.format(filename)) + fsize = None + + file.Close() + del root + return fsize + + +class AthPoolFile(object): + def __init__(self, filename): + self._filename = filename + if self._filename: + self._stub = os.path.basename(self._filename) + '-' + str(uuid.uuid4()) + else: + self._stub = str(uuid.uuid4()) + self._jobOptionsFile = self._stub + '-miniAthFile.py' + self._infoOutputFile = self._stub + '-miniAthFile.db' + self._logFile = self._stub + '-miniAthFile.log' + + self._metadata = _create_file_info_template() + self._error = False + self.fopen() + + + def fopen(self): + self._writeMiniJobOptions() + self._runMiniAthena() + self._loadFileInfo() + self._metadata['file_type'] = 'pool' + self._metadata['file_size'] = _get_file_size(self._filename) + + + @property + def fileinfo(self): + return self._metadata + + def _writeMiniJobOptions(self): + try: + jo = open(self._jobOptionsFile, "w") + + print >>jo, os.linesep.join(("FNAME=['{filename}']", + "import os", + "os.environ['ATHENA_PROC_NUMBER'] = '0'", + "os.environ.pop('PYTHONINSPECT', None)", + "include('AthenaPython/athfile_peeker.py')", + "from AthenaCommon.AlgSequence import AlgSequence", + "job = AlgSequence()", + "job.peeker.outfname='{picklename}'", + "job.peeker.infname=FNAME[0]", + "import IOVDbSvc.IOVDb", + "theApp.EvtMax = 1")).format(filename=self._filename, picklename=self._infoOutputFile) + + except Exception, e: + print >>sys.stderr, "Exception raised when writing JO file: {0}".format(e) + self._error = True + raise + + def _runMiniAthena(self): + out = open(self._logFile, 'wb') + try: + athenv = os.environ.copy() + athenv["ATHENA_PROC_NUMBER"] = "0" # Suppress AthenaMP running + subprocess.check_call(['athena.py', self._jobOptionsFile], stdout=out, stderr=out, env=athenv) + except subprocess.CalledProcessError: + # Don't delete log files if errors occured + self._error = True + raise + + + def _loadFileInfo(self): + db = dbsqlite.open(self._infoOutputFile) + self._metadata = db['fileinfos'] + + def _getSize(self): + # FIXME Probably need to use ROOT for non-posix fs + try: + self._metadata['file_size'] = os.stat(self._filename)[6] + except IOError: + self._metadata['file_size'] = None + + def __del__(self): + if ('AFDEBUG' not in os.environ) and (not self._error): + for fname in (self._jobOptionsFile, self._infoOutputFile, self._logFile): + try: + os.unlink(fname) + except (OSError, IOError): + pass + + +class AthBSFile(object): + def __init__(self, filename): + self._filename = filename + self._metadata = _create_file_info_template() + self.fopen() + + def fopen(self): + self._process_bs_file(self._filename) + self._metadata['file_type'] = 'bs' + self._metadata['file_size'] = _get_file_size(self._filename) + + @property + def fileinfo(self): + return self._metadata + + + def _process_bs_file (self, fname, evtmax=1, full_details=True): + import eformat as ef + + data_reader = ef.EventStorage.pickDataReader(fname) + assert data_reader, \ + 'problem picking a data reader for file [%s]'%fname + + beam_type = '<beam-type N/A>' + try: + beam_type = data_reader.beamType() + except Exception,err: + msg.warning ("problem while extracting beam-type information") + + beam_energy = '<beam-energy N/A>' + try: + beam_energy = data_reader.beamEnergy() + except Exception,err: + msg.warning ("problem while extracting beam-type information") + + bs = ef.istream(fname) + + self._metadata['nentries'] = bs.total_events + + bs_metadata = {} + + for md in data_reader.freeMetaDataStrings(): + if md.startswith('Event type:'): + k = 'evt_type' + v = [] + if 'is sim' in md: v.append('IS_SIMULATION') + else: v.append('IS_DATA') + if 'is atlas' in md: v.append('IS_ATLAS') + else: v.append('IS_TESTBEAM') + if 'is physics' in md: v.append('IS_PHYSICS') + else: v.append('IS_CALIBRATION') + bs_metadata[k] = tuple(v) + elif md.startswith('GeoAtlas:'): + k = 'geometry' + v = md.split('GeoAtlas:')[1].strip() + bs_metadata[k] = v + elif md.startswith('IOVDbGlobalTag:'): + k = 'conditions_tag' + v = md.split('IOVDbGlobalTag:')[1].strip() + bs_metadata[k] = v + elif '=' in md: + k,v = md.split('=') + bs_metadata[k] = v + + # for bwd/fwd compat... + # see: https://savannah.cern.ch/bugs/?73208 + # needed for very old BS + for key_name,fn_name in ( + ('GUID','GUID'), + ('Stream','stream'), + ('Project', 'projectTag'), + ('LumiBlock', 'lumiblockNumber'), + ('run_number', 'runNumber'), + ): + if key_name in bs_metadata: + # no need: already in bs metadata dict + continue + if hasattr(data_reader, fn_name): + bs_metadata[key_name] = getattr(data_reader, fn_name)() + + self._metadata['file_guid'] = bs_metadata.get('GUID', None) + self._metadata['evt_type'] = bs_metadata.get('evt_type', []) + self._metadata['geometry'] = bs_metadata.get('geometry', None) + self._metadata['conditions_tag'] = bs_metadata.get('conditions_tag', None) + self._metadata['bs_metadata'] = bs_metadata + + if not data_reader.good(): + # event-less file... + self._metadata['run_number'].append(bs_metadata.get('run_number', 0)) + self._metadata['lumi_block'].append(bs_metadata.get('LumiBlock', 0)) + return + + if evtmax == -1: + evtmax = nentries + + ievt = iter(bs) + for i in xrange(evtmax): + try: + evt = ievt.next() + evt.check() # may raise a RuntimeError + stream_tags = [dict(stream_type=tag.type, + stream_name=tag.name, + obeys_lbk=bool(tag.obeys_lumiblock)) + for tag in evt.stream_tag()] + self._metadata['run_number'].append(evt.run_no()) + self._metadata['evt_number'].append(evt.global_id()) + self._metadata['lumi_block'].append(evt.lumi_block()) + self._metadata['run_type'].append(ef.helper.run_type2string(evt.run_type())) + self._metadata['beam_type'].append(beam_type) + self._metadata['beam_energy'].append(beam_energy) + self._metadata['stream_tags'].extend(stream_tags) + + except RuntimeError, err: + print "** WARNING ** detected a corrupted bs-file:\n",err + + +class AthTagFile(object): + def __init__(self, filename): + self._filename = filename + self._metadata = _create_file_info_template() + + self.fopen() + + def fopen(self): + self._process_tag_file() + self._metadata['file_type'] = 'tag' + self._metadata['file_size'] = _get_file_size(self._filename) + + @property + def fileinfo(self): + return self._metadata + + def _process_tag_file(self, evtmax=1): + tag_ref= None + tag_guid=None + nentries = 0 + runs=[] + evts=[] + + try: + from PyUtils.RootUtils import import_root + root = import_root() + f = root.TFile.Open(self._filename, 'READ') + + metadata= f.Get('CollectionMetadata') if f else None + if metadata: + nbytes = metadata.GetEntry(0) + # note: we used to use .rstrip('\0') b/c of the change in + # semantics in PyROOT (char[] and const char* may not mean + # the same thing) + # see https://savannah.cern.ch/bugs/?100920 for the gory details + # but in the end, we use ctypes... + # see https://savannah.cern.ch/bugs/?101200 for the gory details + # + # make sure it is what we think it is + import ctypes + key_name = str(ctypes.c_char_p(metadata.Key).value) + assert key_name == 'POOLCollectionID' + tag_guid = str(ctypes.c_char_p(metadata.Value).value) + del metadata + coll_tree = f.Get('POOLCollectionTree') if f else None + if coll_tree: + nentries = coll_tree.GetEntries() + if evtmax in (-1, None): + evtmax = nentries + evtmax = int(evtmax) + for row in xrange(evtmax): + if coll_tree.GetEntry(row) < 0: + break + runnbr = coll_tree.RunNumber + runs.append(runnbr) + evtnbr = coll_tree.EventNumber + evts.append(evtnbr) + del coll_tree + f.Close() + del f + + self._metadata['stream_names'] = ['TAG'] + self._metadata['file_guid'] = tag_guid + self._metadata['nentries'] = nentries + self._metadata['run_number'] = runs + self._metadata['evt_number'] = evts + except Exception, e: + print >>sys.stderr, "Exception raised when processing TAG file {0}: {1}".format(self._filename, e) + raise + + def _getSize(self): + # FIXME Probably need to use ROOT for non-posix fs + try: + self._metadata['file_size'] = os.stat(self._filename)[6] + except IOError: + self._metadata['file_size'] = None diff --git a/Tools/PyUtils/python/Dso.py b/Tools/PyUtils/python/Dso.py index e687339724c652f7fef8eb3935fdfc735c9c8006..bfdf80057e1b1c8e10de9a5bd8da13aae643fbe1 100755 --- a/Tools/PyUtils/python/Dso.py +++ b/Tools/PyUtils/python/Dso.py @@ -158,8 +158,8 @@ def gen_typeregistry_dso(oname=_dflt_typereg_fname): import PyUtils.Logging as _L msg = _L.logging.getLogger('typereg-dso') - #msg.setLevel(_L.logging.INFO) - msg.setLevel(_L.logging.VERBOSE) + msg.setLevel(_L.logging.INFO) + #msg.setLevel(_L.logging.VERBOSE) #MN del _L msg.info("installing registry in [%s]...", oname) @@ -169,18 +169,18 @@ def gen_typeregistry_dso(oname=_dflt_typereg_fname): reg = PyDsoDb() cls_names = reg.db.keys() - msg.debug("::: loading reflex") - import PyCintex - PyCintex.Cintex.Enable() - PyCintex.loadDict('libReflexRflx.so') - rflx = PyCintex.makeNamespace('Reflex') - if not rflx: - rflx = PyCintex.makeNamespace('ROOT::Reflex') - rflx = rflx.Type - assert(rflx) - - import PyCintex - _load_lib = PyCintex.loadDict + import cppyy + _load_lib = cppyy.loadDict + + if not hasattr(cppyy, 'hasFakeCintex '): + msg.debug("::: loading reflex") + _load_lib('libReflexRflx.so') + rflx = cppyy.makeNamespace('Reflex') + if not rflx: + rflx = cppyy.makeNamespace('ROOT::Reflex') + rflx = rflx.Type + assert(rflx) + def _load_dict(libname,retry=10): msg.debug("::: loading [%s]...", libname) try: @@ -191,8 +191,8 @@ def gen_typeregistry_dso(oname=_dflt_typereg_fname): # we need to pre-load these guys as HepPDT is missing a linkopts # against HepPID. see bug #46551 - hep_pid = PyCintex.loadDict('libHepPID.so') - hep_pdt = PyCintex.loadDict('libHepPDT.so') + hep_pid = _load_lib('libHepPID.so') + hep_pdt = _load_lib('libHepPDT.so') from PyUtils.Decorators import forking @@ -318,18 +318,20 @@ class CxxDsoDb(object): """ def __init__(self): # import cintex - import PyCintex; PyCintex.Cintex.Enable() + # import PyCintex; PyCintex.Cintex.Enable() + import cppyy # import root import PyUtils.RootUtils as ru ROOT = ru.import_root() self._cxx = ROOT.Ath.DsoDb.instance() - # load reflex - _load_dict = PyCintex.loadDict - _load_dict('ReflexRflx') - self._rflx = PyCintex.makeNamespace('Reflex') - if not self._rflx: - self._rflx = PyCintex.makeNamespace('ROOT::Reflex') - return + if not hasattr(cppyy, 'hasFakeCintex '): + # load reflex + _load_dict = cppyy.loadDict + _load_dict('ReflexRflx') + self._rflx = cppyy.makeNamespace('Reflex') + if not self._rflx: + self._rflx = cppyy.makeNamespace('ROOT::Reflex') + return def _to_py(self, cxx): dd = {} diff --git a/Tools/PyUtils/python/Helpers.py b/Tools/PyUtils/python/Helpers.py index ef58d017e5c36114793dc124d2eeb7159a3647cc..5a86ebf0c2d97d8f46a58bc8ac28357946bcc58f 100755 --- a/Tools/PyUtils/python/Helpers.py +++ b/Tools/PyUtils/python/Helpers.py @@ -12,6 +12,107 @@ __author__ = "Sebastien Binet <binet@cern.ch>" import sys import os +from AthenaCommon.Logging import log + + +def ROOT6Setup(): + + def addROOTIncludePaths(): + """ + Fill ROOT include path list for entries for all packages found in CMTPATH + """ + log.debug( "\n --------- addROOTIncludePaths!" ) + log.debug( "Expanding CMTPATH:\n" + str( os.environ['CMTPATH'] ) + '\n' ) + import glob + import PyUtils.RootUtils as ru + interp = ru.import_root().gInterpreter + plist = os.environ['CMTPATH'].split(':') + for p in plist: + if p.find('AtlasCore')>0: + path_for_eigen = os.path.join (p, 'InstallArea', os.environ['CMTCONFIG'],'include') + interp.AddIncludePath( path_for_eigen ) + # MN disabling to use the patchs from release setup now + # if p.find('GAUDI')<0: + # idir = os.path.join (p, 'InstallArea', 'include') + # for ii in glob.glob (os.path.join (idir, '*')): + # interp.AddIncludePath (ii) + try: + interp.AddIncludePath( os.environ['G4INCLUDE'] ) + except KeyError: + pass + + + def cppyyFakeCintex(): + class Cintex: + def Enable(self): + pass + + _load = cppyy.loadDict + def loadDict(dict): + if dict.find('Reflex') >= 0: + log.debug(" LoadDict: ignoring dict " + dict ) + else: + log.debug(" LoadDict: loading dict " + dict ) + return _load(dict) + + cppyy.Cintex = Cintex() + cppyy.hasFakeCintex = True + cppyy.loadDict = loadDict + + + def install_root6_importhook(): + import __builtin__ + oldimporthook = __builtin__.__import__ + autoload_var_name = 'ROOT6_NamespaceAutoloadHook' + + def root6_importhook(name, globals={}, locals={}, fromlist=[], level=-1): + if name == 'PyCintex': + import sys, traceback + source, line, f, t = traceback.extract_stack( sys._getframe(1) )[-1] + log.warning( 'PyCintex imported (replace with import cppyy) from: %s:%d'%(source,line) ) + m = oldimporthook(name, globals, locals, fromlist, level) + if m and m.__name__== 'ROOT': + log.debug('Python import module=%s fromlist=%s'%(name, str(fromlist))) + if fromlist: + vars = [ '.'.join([name, fl, autoload_var_name]) for fl in fromlist] + else: + vars = [ '.'.join([name, autoload_var_name]) ] + for v in vars: + mm = m + try: + #MN: walk the module chain and try to touch 'autoload_var_name' to trigger ROOT autoloading of namespaces + for comp in v.split('.')[1:]: + mm = getattr(mm, comp) + except: + pass + return m + + __builtin__.__import__ = root6_importhook + + + try: + import cppyy + # let cppyy pretend to be PyCintex (and prevent subsequent imports of PyCintex) + sys.modules['PyCintex'] = PyCintex = cppyy + except ImportError, e: + # handle a somewhat common mistake + import traceback + traceback.print_exception( sys.exc_type, + '%s, ROOT version or setup problem?' % str(e), sys.exc_traceback ) + sys.exit( 1 ) + + try: + # test if we have Cintex (ROOT5) + PyCintex.Cintex.Debug + except AttributeError: + # no Cintex! do ROOT6 stuff + # but don't initialize more than once + if not hasattr('cppyy','hasFakeCintex'): + log.info('executing ROOT6Setup') + cppyyFakeCintex() + addROOTIncludePaths() + install_root6_importhook() + import re from tempfile import NamedTemporaryFile class ShutUp(object): diff --git a/Tools/PyUtils/python/PoolFile.py b/Tools/PyUtils/python/PoolFile.py index 3e8881554bdbf7f516096b1ea7d7040ec6112c91..c5e487d2cb404f8214d5997955b826505db23148 100755 --- a/Tools/PyUtils/python/PoolFile.py +++ b/Tools/PyUtils/python/PoolFile.py @@ -29,7 +29,8 @@ import os import shelve import whichdb -from Helpers import ShutUp +from Helpers import ShutUp, ROOT6Setup +ROOT6Setup() from Decorators import forking ### --- data ------------------------------------------------------------------ @@ -298,7 +299,7 @@ def extract_streams_from_tag (fname, import sys import PyUtils.RootUtils as ru ROOT = ru.import_root() - import PyCintex; PyCintex.Cintex.Enable() + #import PyCintex; PyCintex.Cintex.Enable() print "::: opening file [%s]..." % fname @@ -546,6 +547,8 @@ class PoolFile(object): protocol, fileName = af.server.fname(fileName) except Exception,err: print "## warning: problem opening PoolFileCatalog:\n%s"%err + import traceback + traceback.print_exc(err) pass self.poolFile = None @@ -588,8 +591,7 @@ class PoolFile(object): ROOT.gErrorIgnoreLevel = ROOT.kFatal rootMsg.unMute() - import PyCintex - PyCintex.Cintex.Enable() + #import PyCintex; PyCintex.Cintex.Enable() rootMsg.mute() poolFile = None diff --git a/Tools/PyUtils/python/RootUtils.py b/Tools/PyUtils/python/RootUtils.py index dd71fae35d96e7e4467032e8dfe65bdaa7a0d265..77cef093063aec62f2de6273189ae5131bcd9615 100644 --- a/Tools/PyUtils/python/RootUtils.py +++ b/Tools/PyUtils/python/RootUtils.py @@ -8,7 +8,7 @@ from __future__ import with_statement __doc__ = "a few utils to ease the day-to-day work with ROOT" -__version__ = "$Revision: 543921 $" +__version__ = "$Revision: 632456 $" __author__ = "Sebastien Binet" __all__ = [ @@ -38,8 +38,7 @@ def import_root(batch=True): ROOT.gROOT.SetBatch(batch) if batch: ROOT.PyConfig.IgnoreCommandLineOptions = True - import PyCintex - PyCintex.Cintex.Enable() + #import PyCintex; PyCintex.Cintex.Enable() return ROOT def root_compile(src=None, fname=None, batch=True): @@ -95,7 +94,8 @@ def root_compile(src=None, fname=None, batch=True): @memoize def _pythonize_tfile(): - import PyCintex; PyCintex.Cintex.Enable() + #import PyCintex; PyCintex.Cintex.Enable() + import cppyy root = import_root() import PyUtils.Helpers as H with H.ShutUp(filters=[ @@ -105,9 +105,10 @@ def _pythonize_tfile(): 'Warning in <TEnvRec::ChangeValue>: duplicate entry.*' ), ]): - PyCintex.loadDict("RootUtilsPyROOTDict") + cppyy.loadDict("RootUtilsPyROOTDict") rootutils = getattr(root, "RootUtils") - pybytes = getattr(rootutils, "PyBytes") + pybytes = getattr(rootutils, "PyBytes") + #MN: lines below fail in ROOT6 if PCM from RootUtils is not found read_root_file = getattr(rootutils, "_pythonize_read_root_file") tell_root_file = getattr(rootutils, "_pythonize_tell_root_file") pass @@ -306,6 +307,7 @@ def _test_main(): fct=root_compile, fname=tmp.name) print "OK" + return True if __name__ == "__main__": _test_main() diff --git a/Tools/PyUtils/python/scripts/__init__.py b/Tools/PyUtils/python/scripts/__init__.py index fff0beffcf74936a6f0f976891adfb3bcfc9ef4f..5d846ba39015a3eb7d25d29fc9b6c7cdb643ed34 100644 --- a/Tools/PyUtils/python/scripts/__init__.py +++ b/Tools/PyUtils/python/scripts/__init__.py @@ -12,14 +12,10 @@ acmdlib.register('chk-sg', 'PyUtils.scripts.check_sg:main') acmdlib.register('ath-dump', 'PyUtils.scripts.ath_dump:main') acmdlib.register('chk-rflx', 'PyUtils.scripts.check_reflex:main') acmdlib.register('gen-klass', 'PyUtils.scripts.gen_klass:main') -#acmdlib.register('tc.submit', 'PyUtils.AmiLib:tc_submit') -#acmdlib.register('tc.pkg-tree', 'PyUtils.AmiLib:tc_pkg_tree') -#acmdlib.register('ami-dset', 'PyUtils.AmiLib:ami_dset') acmdlib.register('tc.find-pkg', 'PyUtils.scripts.tc_find_pkg:main') acmdlib.register('tc.find-tag', 'PyUtils.scripts.tc_find_tag:main') acmdlib.register('tc.submit-tag', 'PyUtils.scripts.tc_submit_tag:main') -acmdlib.register('tc.show-clients', 'PyUtils.scripts.tc_show_clients:main') acmdlib.register('get-tag-diff', 'PyUtils.scripts.get_tag_diff:main') @@ -27,5 +23,8 @@ acmdlib.register('merge-files', 'PyUtils.scripts.merge_files:main') acmdlib.register('filter-files', 'PyUtils.scripts.filter_files:main') acmdlib.register('cmt.new-pkg', 'PyUtils.scripts.cmt_newpkg:main') +acmdlib.register('cmt.new-alg', 'PyUtils.scripts.cmt_newalg:main') +acmdlib.register('cmt.new-metadataalg', 'PyUtils.scripts.cmt_newmetadataalg:main') +acmdlib.register('cmt.new-pyalg', 'PyUtils.scripts.cmt_newpyalg:main') ## diff --git a/Tools/PyUtils/python/scripts/cmt_newalg.py b/Tools/PyUtils/python/scripts/cmt_newalg.py new file mode 100644 index 0000000000000000000000000000000000000000..81f528e5955711c1d113dcec4a712856986b786d --- /dev/null +++ b/Tools/PyUtils/python/scripts/cmt_newalg.py @@ -0,0 +1,264 @@ +# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration + +# @file PyUtils.scripts.cmt_newalg +# @purpose streamline and ease the creation of new athena algs +# @author Will Buttinger +# @date September 2014 + +#Note - this code could use a serious rewrite, I just hacked it together to get something working + +from __future__ import with_statement + +__version__ = "$Revision: 279982 $" +__author__ = "Will Buttinger" +__doc__ = "streamline and ease the creation of new algorithms" + +### imports ------------------------------------------------------------------- +import os +import textwrap +import commands +import PyUtils.acmdlib as acmdlib +import fileinput + +class Templates: + alg_hdr_template = """\ +#ifndef %(guard)s +#define %(guard)s 1 + +#include "AthenaBaseComps/AthAlgorithm.h" + +%(namespace_begin)s + +class %(klass)s: public ::AthAlgorithm { + public: + %(klass)s( const std::string& name, ISvcLocator* pSvcLocator ); + virtual ~%(klass)s(); + + virtual StatusCode initialize(); + virtual StatusCode execute(); + virtual StatusCode finalize(); + + private: + +}; +%(namespace_end)s +#endif //> !%(guard)s +""" + + alg_cxx_template = """\ +// %(pkg)s includes +#include "%(namespace_klass)s.h" + +%(namespace_begin)s + +%(klass)s::%(klass)s( const std::string& name, ISvcLocator* pSvcLocator ) : AthAlgorithm( name, pSvcLocator ){ + + //declareProperty( "Property", m_nProperty ); //example property declaration + +} + + +%(klass)s::~%(klass)s() {} + + +StatusCode %(klass)s::initialize() { + ATH_MSG_INFO ("Initializing " << name() << "..."); + + return StatusCode::SUCCESS; +} + +StatusCode %(klass)s::finalize() { + ATH_MSG_INFO ("Finalizing " << name() << "..."); + + return StatusCode::SUCCESS; +} + +StatusCode %(klass)s::execute() { + ATH_MSG_DEBUG ("Executing " << name() << "..."); + + return StatusCode::SUCCESS; +} + +%(namespace_end)s +""" + + +### functions ----------------------------------------------------------------- +@acmdlib.command( + name='cmt.new-alg' + ) +@acmdlib.argument( + 'algname', + help="name of the new alg" + ) +def main(args): + """create a new algorithm inside the current package. Call from within the package directory + + ex: + $ acmd cmt new-alg MyAlg + """ + sc = 0 + + full_alg_name = args.algname + + #determine the package from the cwd + cwd = os.getcwd() + #check that cmt dir exists (i.e. this is a package) + if not os.path.isdir(cwd+"/cmt"): + print "ERROR you must call new-alg from within the package you want to add the algorithm to" + return -1 + full_pkg_name = os.path.basename(cwd) + print textwrap.dedent("""\ + ::: create alg [%(full_alg_name)s] in pkg [%(full_pkg_name)s]""" %locals()) + + + #first we must check that requirements file has the AthenaBaseComps use statement in it + foundBaseComps=False + lastUse=0 + lineCount=0 + for line in open('cmt/requirements'): + lineCount +=1 + if not line.startswith("use "): continue + lastUse=lineCount + uu = line.split(" ") + if uu[1].startswith("AthenaBaseComps"): foundBaseComps=True + + if not foundBaseComps: + print "::: INFO Adding AthenaBaseComps to requirements file" + #must add a use statement to the requirements file + #put inside private blocks + lineCount=0 + inPrivate=False + for line in fileinput.input('cmt/requirements', inplace=1): + lineCount+=1 + if lineCount==lastUse+1: + if not inPrivate: print "private" + print "use AthenaBaseComps AthenaBaseComps-* Control" + if not inPrivate: print "end_private" + if line.startswith("private"): inPrivate=True + elif line.startswith("end_private"): inPrivate=False + print line, + + + #following code borrowed from gen_klass + hdr = getattr(Templates, 'alg_hdr_template') + cxx = getattr(Templates, 'alg_cxx_template') + + namespace_klass = full_alg_name.replace('::','__') + namespace_begin,namespace_end = "","" + namespace = "" + if full_alg_name.count("::")>0: + namespace = full_alg_name.split("::")[0] + full_alg_name = full_alg_name.split("::")[1] + namespace_begin = "namespace %s {" % namespace + namespace_end = "} //> end namespace %s" % namespace + pass + + guard = "%s_%s_H" % (full_pkg_name.upper(), namespace_klass.upper()) + + d = dict( pkg=full_pkg_name, + klass=full_alg_name, + guard=guard, + namespace_begin=namespace_begin, + namespace_end=namespace_end,namespace_klass=namespace_klass,namespace=namespace + ) + fname = os.path.splitext("src/%s"%namespace_klass)[0] + #first check doesn't exist + if os.path.isfile(fname+'.h'): + print "::: ERROR %s.h already exists" % fname + return -1 + o_hdr = open(fname+'.h', 'w') + o_hdr.writelines(hdr%d) + o_hdr.flush() + o_hdr.close() + + if os.path.isfile(fname+'.cxx'): + print "::: ERROR %s.cxx already exists" % fname + return -1 + o_cxx = open(fname+'.cxx', 'w') + o_cxx.writelines(cxx%d) + o_cxx.flush() + o_cxx.close() + + #now add the algorithm to the _entries.cxx file in the components folder + #first check they exist + if not os.path.isfile("src/components/%s_load.cxx"%full_pkg_name): + print "::: INFO Creating src/components/%s_load.cxx"%full_pkg_name + loadFile = open("src/components/%s_load.cxx"%full_pkg_name,'w') + loadFile.writelines(""" +#include "GaudiKernel/LoadFactoryEntries.h" +LOAD_FACTORY_ENTRIES(%(pkg)s) +"""%d) + loadFile.flush() + loadFile.close() + + if not os.path.isfile("src/components/%s_entries.cxx"%full_pkg_name): + print "::: INFO Creating src/components/%s_entries.cxx"%full_pkg_name + loadFile = open("src/components/%s_entries.cxx"%full_pkg_name,'w') + if len(namespace_begin)>0: + d["namespace"] = args.algname.split("::")[0] + loadFile.writelines(""" +#include "GaudiKernel/DeclareFactoryEntries.h" + +#include "../%(namespace_klass)s.h" + +DECLARE_NAMESPACE_ALGORITHM_FACTORY(%(namespace)s, %(klass)s ) + +DECLARE_FACTORY_ENTRIES( %(pkg)s ) +{ + DECLARE_NAMESPACE_ALGORITHM(%(namespace)s, %(klass)s ); +} +"""%d) + else: + loadFile.writelines(""" +#include "GaudiKernel/DeclareFactoryEntries.h" + +#include "../%(namespace_klass)s.h" + +DECLARE_ALGORITHM_FACTORY( %(klass)s ) + +DECLARE_FACTORY_ENTRIES( %(pkg)s ) +{ + DECLARE_ALGORITHM( %(klass)s ); +} +"""%d) + loadFile.flush() + loadFile.close() + else: + #first check algorithm not already in _entries file + inFile=False + for line in open("src/components/%s_entries.cxx"%full_pkg_name): + if len(namespace_begin)==0 and "DECLARE_ALGORITHM" in line and d["klass"] in line: inFile=True + if len(namespace_begin)>0 and "DECLARE_NAMESPACE_ALGORITHM" in line and d["klass"] in line and d["namespace"]: inFile=True + + if not inFile: + print "::: INFO Adding %s to src/components/%s_entries.cxx"% (args.algname,full_pkg_name) + nextAdd=False + for line in fileinput.input("src/components/%s_entries.cxx"%full_pkg_name, inplace=1): + if nextAdd and not "{" in line: + nextAdd=False + if len(namespace_begin)>0: + print """ DECLARE_NAMESPACE_ALGORITHM(%(namespace)s, %(klass)s );"""%d + else: + print """ DECLARE_ALGORITHM( %(klass)s );"""%d + if line.startswith("DECLARE_FACTORY_ENTRIES"): + nextAdd=True + if len(namespace_begin)>0: + + print """ +#include "../%(namespace_klass)s.h" +DECLARE_NAMESPACE_ALGORITHM_FACTORY( %(namespace)s, %(klass)s ) +"""%d + print """ +#include "../%(namespace_klass)s.h" +DECLARE_ALGORITHM_FACTORY( %(klass)s ) +"""%d + print line, + + #to finish up, call cmt config so that the new algorithm will be captured and genconf run on it + cwd = os.getcwd() + try: + os.chdir('cmt') + _ = commands.getstatusoutput('cmt config') + finally: + os.chdir(cwd) diff --git a/Tools/PyUtils/python/scripts/cmt_newmetadataalg.py b/Tools/PyUtils/python/scripts/cmt_newmetadataalg.py new file mode 100644 index 0000000000000000000000000000000000000000..18b313d6035a1449f5f6c7716163cd53b24f0556 --- /dev/null +++ b/Tools/PyUtils/python/scripts/cmt_newmetadataalg.py @@ -0,0 +1,274 @@ +# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration + +# @file PyUtils.scripts.cmt_newmetadataalg +# @purpose streamline and ease the creation of new athena algs +# @author Will Buttinger +# @date September 2014 + +#Note - this code could use a serious rewrite, I just hacked it together to get something working + +from __future__ import with_statement + +__version__ = "$Revision: 621307 $" +__author__ = "Will Buttinger" +__doc__ = "streamline and ease the creation of new metadata algorithms" + +### imports ------------------------------------------------------------------- +import os +import textwrap +import commands +import PyUtils.acmdlib as acmdlib +import fileinput + +class Templates: + alg_hdr_template = """\ +#ifndef %(guard)s +#define %(guard)s 1 + +#include "AthenaBaseComps/AthMetadataAlgorithm.h" + +%(namespace_begin)s + +class %(klass)s: public ::AthMetadataAlgorithm { + public: + %(klass)s( const std::string& name, ISvcLocator* pSvcLocator ); + virtual ~%(klass)s(); + + virtual StatusCode initialize(); + virtual StatusCode execute(); + virtual StatusCode finalize(); + + virtual StatusCode beginInputFile(); + + private: + +}; +%(namespace_end)s +#endif //> !%(guard)s +""" + + alg_cxx_template = """\ +// %(pkg)s includes +#include "%(namespace_klass)s.h" + +%(namespace_begin)s + +%(klass)s::%(klass)s( const std::string& name, ISvcLocator* pSvcLocator ) : AthMetadataAlgorithm( name, pSvcLocator ){ + + //declareProperty( "Property", m_nProperty ); //example property declaration + +} + + +%(klass)s::~%(klass)s() {} + + +StatusCode %(klass)s::initialize() { + ATH_MSG_INFO ("Initializing " << name() << "..."); + + return StatusCode::SUCCESS; +} + +StatusCode %(klass)s::finalize() { + ATH_MSG_INFO ("Finalizing " << name() << "..."); + + return StatusCode::SUCCESS; +} + +StatusCode %(klass)s::execute() { + ATH_MSG_DEBUG ("Executing " << name() << "..."); + + return StatusCode::SUCCESS; +} + +StatusCode %(klass)s::beginInputFile() { + //example of metadata retrieval: + //float beamEnergy(0); CHECK( retrieveMetadata("/TagInfo","beam_energy",beamEnergy) ); + //std::vector<float> bunchPattern; CHECK( retrieveMetadata("/Digitiation/Parameters","BeamIntensityPattern",bunchPattern) ); + + return StatusCode::SUCCESS; +} + +%(namespace_end)s +""" + + +### functions ----------------------------------------------------------------- +@acmdlib.command( + name='cmt.new-metadataalg' + ) +@acmdlib.argument( + 'algname', + help="name of the new alg" + ) +def main(args): + """create a new metadataalgorithm inside the current package. Call from within the package directory + + ex: + $ acmd cmt new-metadataalg MyAlg + """ + sc = 0 + + full_alg_name = args.algname + + #determine the package from the cwd + cwd = os.getcwd() + #check that cmt dir exists (i.e. this is a package) + if not os.path.isdir(cwd+"/cmt"): + print "ERROR you must call new-metadataalg from within the package you want to add the algorithm to" + return -1 + full_pkg_name = os.path.basename(cwd) + print textwrap.dedent("""\ + ::: create alg [%(full_alg_name)s] in pkg [%(full_pkg_name)s]""" %locals()) + + + #first we must check that requirements file has the AthenaBaseComps use statement in it + foundBaseComps=False + lastUse=0 + lineCount=0 + for line in open('cmt/requirements'): + lineCount +=1 + if not line.startswith("use "): continue + lastUse=lineCount + uu = line.split(" ") + if uu[1].startswith("AthenaBaseComps"): foundBaseComps=True + + if not foundBaseComps: + print "::: INFO Adding AthenaBaseComps to requirements file" + #must add a use statement to the requirements file + #put inside private blocks + lineCount=0 + inPrivate=False + for line in fileinput.input('cmt/requirements', inplace=1): + lineCount+=1 + if lineCount==lastUse+1: + if not inPrivate: print "private" + print "use AthenaBaseComps AthenaBaseComps-* Control" + if not inPrivate: print "end_private" + if line.startswith("private"): inPrivate=True + elif line.startswith("end_private"): inPrivate=False + print line, + + + #following code borrowed from gen_klass + hdr = getattr(Templates, 'alg_hdr_template') + cxx = getattr(Templates, 'alg_cxx_template') + + namespace_klass = full_alg_name.replace('::','__') + namespace_begin,namespace_end = "","" + namespace = "" + if full_alg_name.count("::")>0: + namespace = full_alg_name.split("::")[0] + full_alg_name = full_alg_name.split("::")[1] + namespace_begin = "namespace %s {" % namespace + namespace_end = "} //> end namespace %s" % namespace + pass + + guard = "%s_%s_H" % (full_pkg_name.upper(), namespace_klass.upper()) + + d = dict( pkg=full_pkg_name, + klass=full_alg_name, + guard=guard, + namespace_begin=namespace_begin, + namespace_end=namespace_end,namespace_klass=namespace_klass,namespace=namespace + ) + fname = os.path.splitext("src/%s"%namespace_klass)[0] + #first check doesn't exist + if os.path.isfile(fname+'.h'): + print "::: ERROR %s.h already exists" % fname + return -1 + o_hdr = open(fname+'.h', 'w') + o_hdr.writelines(hdr%d) + o_hdr.flush() + o_hdr.close() + + if os.path.isfile(fname+'.cxx'): + print "::: ERROR %s.cxx already exists" % fname + return -1 + o_cxx = open(fname+'.cxx', 'w') + o_cxx.writelines(cxx%d) + o_cxx.flush() + o_cxx.close() + + #now add the algorithm to the _entries.cxx file in the components folder + #first check they exist + if not os.path.isfile("src/components/%s_load.cxx"%full_pkg_name): + print "::: INFO Creating src/components/%s_load.cxx"%full_pkg_name + loadFile = open("src/components/%s_load.cxx"%full_pkg_name,'w') + loadFile.writelines(""" +#include "GaudiKernel/LoadFactoryEntries.h" +LOAD_FACTORY_ENTRIES(%(pkg)s) +"""%d) + loadFile.flush() + loadFile.close() + + if not os.path.isfile("src/components/%s_entries.cxx"%full_pkg_name): + print "::: INFO Creating src/components/%s_entries.cxx"%full_pkg_name + loadFile = open("src/components/%s_entries.cxx"%full_pkg_name,'w') + if len(namespace_begin)>0: + d["namespace"] = args.algname.split("::")[0] + loadFile.writelines(""" +#include "GaudiKernel/DeclareFactoryEntries.h" + +#include "../%(namespace_klass)s.h" + +DECLARE_NAMESPACE_ALGORITHM_FACTORY(%(namespace)s, %(klass)s ) + +DECLARE_FACTORY_ENTRIES( %(pkg)s ) +{ + DECLARE_NAMESPACE_ALGORITHM(%(namespace)s, %(klass)s ); +} +"""%d) + else: + loadFile.writelines(""" +#include "GaudiKernel/DeclareFactoryEntries.h" + +#include "../%(namespace_klass)s.h" + +DECLARE_ALGORITHM_FACTORY( %(klass)s ) + +DECLARE_FACTORY_ENTRIES( %(pkg)s ) +{ + DECLARE_ALGORITHM( %(klass)s ); +} +"""%d) + loadFile.flush() + loadFile.close() + else: + #first check algorithm not already in _entries file + inFile=False + for line in open("src/components/%s_entries.cxx"%full_pkg_name): + if len(namespace_begin)==0 and "DECLARE_ALGORITHM" in line and d["klass"] in line: inFile=True + if len(namespace_begin)>0 and "DECLARE_NAMESPACE_ALGORITHM" in line and d["klass"] in line and d["namespace"]: inFile=True + + if not inFile: + print "::: INFO Adding %s to src/components/%s_entries.cxx"% (args.algname,full_pkg_name) + nextAdd=False + for line in fileinput.input("src/components/%s_entries.cxx"%full_pkg_name, inplace=1): + if nextAdd and not "{" in line: + nextAdd=False + if len(namespace_begin)>0: + print """ DECLARE_NAMESPACE_ALGORITHM(%(namespace)s, %(klass)s );"""%d + else: + print """ DECLARE_ALGORITHM( %(klass)s );"""%d + if line.startswith("DECLARE_FACTORY_ENTRIES"): + nextAdd=True + if len(namespace_begin)>0: + + print """ +#include "../%(namespace_klass)s.h" +DECLARE_NAMESPACE_ALGORITHM_FACTORY( %(namespace)s, %(klass)s ) +"""%d + print """ +#include "../%(namespace_klass)s.h" +DECLARE_ALGORITHM_FACTORY( %(klass)s ) +"""%d + print line, + + #to finish up, call cmt config so that the new algorithm will be captured and genconf run on it + cwd = os.getcwd() + try: + os.chdir('cmt') + _ = commands.getstatusoutput('cmt config') + finally: + os.chdir(cwd) diff --git a/Tools/PyUtils/python/scripts/cmt_newpyalg.py b/Tools/PyUtils/python/scripts/cmt_newpyalg.py new file mode 100644 index 0000000000000000000000000000000000000000..0fd1af9c31037365c1373f2e9b82825eeb9c4c4d --- /dev/null +++ b/Tools/PyUtils/python/scripts/cmt_newpyalg.py @@ -0,0 +1,139 @@ +# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration + +# @file PyUtils.scripts.cmt_newalg +# @purpose streamline and ease the creation of new athena algs +# @author Will Buttinger +# @date September 2014 + +#Note - this code could use a serious rewrite, I just hacked it together to get something working + +from __future__ import with_statement + +__version__ = "$Revision: 623437 $" +__author__ = "Will Buttinger" +__doc__ = "streamline and ease the creation of new python algorithms" + +### imports ------------------------------------------------------------------- +import os +import textwrap +import commands +import PyUtils.acmdlib as acmdlib +import fileinput + +class Templates: + pyalg_template = """\ +# @file: %(pkg)s/python/%(fname)s +# @purpose: <put some purpose here> +# @author: <put your name here> + +__doc__ = 'some documentation here' +__version__ = '$Revision: 623437 $' +__author__ = '<put your name here>' + +import AthenaCommon.SystemOfUnits as Units +import AthenaPython.PyAthena as PyAthena +from AthenaPython.PyAthena import StatusCode + +class %(klass)s (PyAthena.Alg): + 'put some documentation here' + def __init__(self, name='%(klass)s', **kw): + ## init base class + kw['name'] = name + super(%(klass)s, self).__init__(**kw) + + ## properties and data members + #self.foo = kw.get('foo', 10) # default value + return + + def initialize(self): + self.msg.info('==> initialize...') + return StatusCode.Success + + def execute(self): + #here's an example of how to loop over something from xAOD + #for electron in self.evtStore["ElectronCollection"]: print electron.pt() + + return StatusCode.Success + + def finalize(self): + self.msg.info('==> finalize...') + return StatusCode.Success + + # class %(klass)s +""" + + +### functions ----------------------------------------------------------------- +@acmdlib.command( + name='cmt.new-pyalg' + ) +@acmdlib.argument( + 'algname', + help="name of the new pyalg" + ) +def main(args): + """create a new algorithm inside the current package. Call from within the package directory + + ex: + $ acmd cmt new-alg MyAlg + """ + sc = 0 + + full_alg_name = args.algname + + #determine the package from the cwd + cwd = os.getcwd() + #check that cmt dir exists (i.e. this is a package) + if not os.path.isdir(cwd+"/cmt"): + print "ERROR you must call new-pyalg from within the package you want to add the algorithm to" + return -1 + full_pkg_name = os.path.basename(cwd) + print textwrap.dedent("""\ + ::: create pyalg [%(full_alg_name)s] in pkg [%(full_pkg_name)s]""" %locals()) + + + + #following code borrowed from gen_klass + hdr = getattr(Templates, 'pyalg_template') + + namespace_klass = full_alg_name.replace('::','__') + namespace_begin,namespace_end = "","" + namespace = "" + if full_alg_name.count("::")>0: + namespace = full_alg_name.split("::")[0] + full_alg_name = full_alg_name.split("::")[1] + namespace_begin = "namespace %s {" % namespace + namespace_end = "} //> end namespace %s" % namespace + pass + + d = dict( pkg=full_pkg_name, + klass=full_alg_name, + namespace_begin=namespace_begin, + namespace_end=namespace_end,namespace_klass=namespace_klass,namespace=namespace,fname=namespace_klass+".py" + ) + fname = os.path.splitext("python/%s"%namespace_klass)[0] + #first check doesn't exist + if os.path.isfile(fname+'.u'): + print "::: ERROR %s.py already exists" % fname + return -1 + o_hdr = open(fname+'.py', 'w') + o_hdr.writelines(hdr%d) + o_hdr.flush() + o_hdr.close() + + #need to create a dummy cxx file in the src folder, if there is nothing in there + import glob + if len(glob.glob('src/*.cxx'))==0: + #create a dummy file + o_hdr = open('src/dummy.cxx', 'w') + o_hdr.writelines('//dummy file created to keep cmt happy when compiling only with python algs') + o_hdr.flush() + o_hdr.close() + + #to finish up, call cmt config so that the new algorithm will be captured and genconf run on it + cwd = os.getcwd() + try: + os.chdir('cmt') + _ = commands.getstatusoutput('cmt config') + finally: + os.chdir(cwd) diff --git a/Tools/PyUtils/python/scripts/gen_klass.py b/Tools/PyUtils/python/scripts/gen_klass.py index 22500e6bb5b53a4a8926198ffe1498a7b27b84f7..8f19185847408bbae7d7a53122d60e4e23251a4e 100644 --- a/Tools/PyUtils/python/scripts/gen_klass.py +++ b/Tools/PyUtils/python/scripts/gen_klass.py @@ -6,7 +6,7 @@ # @author Sebastien Binet # @date April 2008 -__version__ = "$Revision: 615061 $" +__version__ = "$Revision: 615060 $" __author__ = "Sebastien Binet" __doc__ = """\ helper script to generate header and cxx files of various athena @@ -873,7 +873,7 @@ StatusCode %(klass)s::finalize() # @author: Sebastien Binet <binet@cern.ch> __doc__ = 'some documentation here' -__version__ = '$Revision: 615061 $' +__version__ = '$Revision: 615060 $' __author__ = 'Sebastien Binet <binet@cern.ch>' import AthenaCommon.SystemOfUnits as Units @@ -911,7 +911,7 @@ class %(klass)s (PyAthena.Alg): # @author: Sebastien Binet <binet@cern.ch> __doc__ = 'some documentation here' -__version__ = '$Revision: 615061 $' +__version__ = '$Revision: 615060 $' __author__ = 'Sebastien Binet <binet@cern.ch>' import AthenaCommon.SystemOfUnits as Units @@ -946,7 +946,7 @@ class %(klass)s (PyAthena.Svc): # @author: Sebastien Binet <binet@cern.ch> __doc__ = 'some documentation here' -__version__ = '$Revision: 615061 $' +__version__ = '$Revision: 615060 $' __author__ = 'Sebastien Binet <binet@cern.ch>' import AthenaCommon.SystemOfUnits as Units @@ -981,7 +981,7 @@ class %(klass)s (PyAthena.AlgTool): # @author: Sebastien Binet <binet@cern.ch> __doc__ = 'some documentation here' -__version__ = '$Revision: 615061 $' +__version__ = '$Revision: 615060 $' __author__ = 'Sebastien Binet <binet@cern.ch>' import AthenaCommon.SystemOfUnits as Units diff --git a/Tools/PyUtils/python/scripts/tc_find_pkg.py b/Tools/PyUtils/python/scripts/tc_find_pkg.py index 42783c403c91f04bc35d655c553ab402f72acc22..ed7a43befe810475436c6df961d65f832fcbfa87 100644 --- a/Tools/PyUtils/python/scripts/tc_find_pkg.py +++ b/Tools/PyUtils/python/scripts/tc_find_pkg.py @@ -2,15 +2,10 @@ # @file PyUtils.scripts.tc_find_pkg # @purpose find a package using TC-2 -# @author Sebastien Binet -# @date February 2010 +# @author Sebastien Binet, Graeme Stewart +# @version $Id$ -__version__ = "$Revision: 279982 $" -__doc__ = "find a package using TC-2." -__author__ = "Sebastien Binet" - - -### imports ------------------------------------------------------------------- +import sys import PyUtils.acmdlib as acmdlib @acmdlib.command(name='tc.find-pkg') @@ -21,17 +16,19 @@ def main(args): """find a package using TagCollector-2""" import PyUtils.AmiLib as amilib - client = amilib.Client() - - pkgs = args.pkg - if isinstance(pkgs, basestring): - pkgs = [pkgs] - - for pkg in pkgs: - client.msg.info('looking for [%s]...', pkg) - pkg = client.find_pkg(pkg, check_tag=False) - client.msg.info(' found: pkg= [%s]', pkg['packagePath']+pkg['packageName']) - #client.msg.info(' tag= [%s]', tag) - - return 0 + try: + client = amilib.Client() + pkgs = args.pkg + if isinstance(pkgs, basestring): + pkgs = [pkgs] + + for pkg in pkgs: + client.msg.info('looking for [%s]...', pkg) + pkg = client.find_pkg(pkg, check_tag=False) + client.msg.info(' found: pkg= [%s]', pkg['packagePath']+pkg['packageName']) + + return 0 + except amilib.PyUtilsAMIException, e: + print >>sys.stderr, e + sys.exit(1) diff --git a/Tools/PyUtils/python/scripts/tc_find_tag.py b/Tools/PyUtils/python/scripts/tc_find_tag.py index db1e5ec31e24a69cd39a92ae792c12a9d9fd850c..3ff28bd67222042db2de4da9ab5396189e407ba2 100644 --- a/Tools/PyUtils/python/scripts/tc_find_tag.py +++ b/Tools/PyUtils/python/scripts/tc_find_tag.py @@ -2,15 +2,10 @@ # @file PyUtils.scripts.tc_find_tag # @purpose Find package version taking into account project dependencies -# @author Frank Winklmeier -# @date Novemeber 2011 +# @author Frank Winklmeier, Graeme Stewart +# @version $Id$ -__version__ = "$Revision:$" -__doc__ = "Find package version taking into account project dependencies." -__author__ = "Frank Winklmeier" - - -### imports ------------------------------------------------------------------- +import sys import PyUtils.acmdlib as acmdlib @acmdlib.command(name='tc.find-tag') @@ -28,28 +23,36 @@ import PyUtils.acmdlib as acmdlib action='store', help='Release [default: latest]') +## @brief find package version taking into account project dependencies def main(args): - """find package version taking into account project dependencies""" import PyUtils.AmiLib as amilib - client = amilib.Client() - - pkgs = args.pkg - if isinstance(pkgs, basestring): - pkgs = [pkgs] - - if not args.release: - rel = client.get_releases(args.project) - if len(rel)==0: - raise RuntimeError('No release for project',args.project) - args.release = rel[-1] - - client.msg.info('searching package tags for [%s] in release [%s]' % (','.join(pkgs),args.release)) - pkg_list = [client.get_version_of_pkg_with_deps(pkg, args.project, args.release) for pkg in pkgs] - pkg_list = sum(pkg_list,[]) # Flatten list in case more than one version per package - client.msg.info('Found %d package(s)' % len(pkg_list)) - for p in pkg_list: - print(' '.join(p)) + + try: + client = amilib.Client() + + pkgs = args.pkg + if isinstance(pkgs, basestring): + pkgs = [pkgs] + + if not args.release: + rel = client.get_releases(args.project) + if len(rel)==0: + print >>sys.stderr, 'No release for project', args.project + sys.exit(1) + args.release = rel[-1] + + client.msg.info('searching package tags for [%s] in release [%s]' % (','.join(pkgs),args.release)) + pkg_list = [client.get_version_of_pkg_with_deps(pkg, args.project, args.release) for pkg in pkgs] + pkg_list = sum(pkg_list,[]) # Flatten list in case more than one version per package + client.msg.info('Found %d package(s)' % len(pkg_list)) + for p in pkg_list: + print(' '.join(p)) + + return 0 + + except amilib.PyUtilsAMIException, e: + print >>sys.stderr, e + sys.exit(1) - return 0 diff --git a/Tools/PyUtils/python/scripts/tc_show_clients.py b/Tools/PyUtils/python/scripts/tc_show_clients.py index 18b1ec4c5836209994cb2cab4847e4af201fada1..2b057103845a8d55c6514dae87b598b33087ab39 100644 --- a/Tools/PyUtils/python/scripts/tc_show_clients.py +++ b/Tools/PyUtils/python/scripts/tc_show_clients.py @@ -5,7 +5,7 @@ # @author Sebastien Binet # @date May 2011 -__version__ = "$Revision: 538932 $" +__version__ = "$Revision: 636803 $" __doc__ = "show the clients of a package using TC-2" __author__ = "Sebastien Binet" @@ -68,7 +68,7 @@ def main(args): for full_name in sorted(_all_clients.keys()): v = _all_clients[full_name] if len(v) > 1: - versions = client.get_version_of_pkg(full_name, args.release) + versions = client.get_pkg_info(full_name, args.release, resultKey="packageTag") if len(versions) != 1: client.msg.info('found multiple versions for package [%s]: %r', full_name, versions) diff --git a/Tools/PyUtils/python/scripts/tc_submit_tag.py b/Tools/PyUtils/python/scripts/tc_submit_tag.py index d38a4339100b0c9c4455ef4175646abbfa925f32..e77cfb8a5ce8730741e678f18876cce01c0e8a97 100644 --- a/Tools/PyUtils/python/scripts/tc_submit_tag.py +++ b/Tools/PyUtils/python/scripts/tc_submit_tag.py @@ -5,7 +5,7 @@ # @author Sebastien Binet # @date February 2010 -__version__ = "$Revision: 611656 $" +__version__ = "$Revision: 636803 $" __doc__ = "Submit one or more TAGs to TagCollector." __author__ = "Sebastien Binet, Frank Winklmeier" @@ -15,9 +15,10 @@ import readline import getpass import os import os.path as osp +import sys import PyUtils.acmdlib as acmdlib -import PyUtils.AmiLib as amilib +import pyAMI.exception import PyCmt.Cmt as cmt ### functions ----------------------------------------------------------------- @@ -44,35 +45,6 @@ def query_option(opt_name): if value == '': return None return value - -def _get_projects(client, release, pkg): - """retrieve the list of projects from AMI for a given release and package - """ - projects = [] - full_pkg_name = pkg['packagePath']+pkg['packageName'] # pkg['packageTag'] - try: - res = client.exec_cmd(cmd='TCGetPackageVersionHistory', - fullPackageName=full_pkg_name, - releaseName=release) - rows = res.rows() - if isinstance(rows, dict): - rows = [rows] - ## print "---" - ## print list(rows) - ## print "---" - for row in rows: - if row.get('releaseName')!=release: continue # skip irrelevant releases - v = row.get('groupName') - if not v in projects: - projects.append(v) - - if not projects: - print "::: no project found for package [%s] and release [%s]" % ( - full_pkg_name, - release) - except amilib.PyAmi.AMI_Error, err: - pass - return projects def query_project(projects, release, pkg): """query the project(s) to submit to""" @@ -115,26 +87,26 @@ def query_release(releases, project): return ','.join(releases) return choice -def submit_tag(client, args, pkg, tag): - """Submit tag""" - - cmd_args = {} - cmd_args['action'] = 'update' - cmd_args['fullPackageName'] = pkg - cmd_args['packageTag'] = tag - cmd_args['autoDetectChanges'] = 'yes' - - if args.justification: cmd_args['justification'] = args.justification - if args.bug: cmd_args['bugReport'] = args.bug - if args.bundle: cmd_args['bundleName'] = args.bundle - if args.no_mail: cmd_args['noMail'] = '' +def submit_tag(client, args, pkg, tag, dryrun=None): + """Submit tag""" + + cmd_args = {} + cmd_args['-action'] = 'update' + cmd_args['-fullPackageName'] = '"'+pkg+'"' + cmd_args['-packageTag'] = '"'+tag+'"' + cmd_args['-autoDetectChanges'] = 'yes' + + if args.justification: cmd_args['-justification'] = '"'+args.justification+'"' + if args.bug: cmd_args['-bugReport'] = '"'+args.bug+'"' + if args.bundle: cmd_args['-bundleName'] = '"'+args.bundle+'"' + if args.no_mail: cmd_args['-noMail'] = '' - for i,p in enumerate(args.project): - cmd_args['groupName'] = p - cmd_args['releaseName'] = args.release[i] - ok = client.exec_cmd(cmd='TCSubmitTagApproval', args=cmd_args) - if ok: - print "%s %s submitted to %s %s" % (pkg,tag,p,args.release[i]) + for i,p in enumerate(args.project): + cmd_args['-groupName'] = '"'+p+'"' + cmd_args['-releaseName'] = args.release[i] + ok = client.exec_cmd(cmd='TCSubmitTagApproval', args=cmd_args, dryrun=dryrun) + if ok: + print "%s %s submitted to %s %s" % (pkg,tag,p,args.release[i]) @acmdlib.command(name='tc.submit-tag') @acmdlib.argument( @@ -150,11 +122,11 @@ def submit_tag(client, args, pkg, tag): action='store', help='justification for tag request') @acmdlib.argument( - '-s', '--savannah', '--bug', + '-s', '--bug', dest='bug', action='store', metavar='BUG', - help='bug report number') + help='Jira issue') @acmdlib.argument( '-b','--bundle', action='store', @@ -165,7 +137,7 @@ def submit_tag(client, args, pkg, tag): default=False, help="do not send confirmation email") @acmdlib.argument( - '--dry-run', + '--dryrun', action='store_true', default=False, help='switch to simulate the commands but not actually send the requests' @@ -176,6 +148,7 @@ def submit_tag(client, args, pkg, tag): metavar='TAG', help="""\ (list of package) tags to submit or a file containing that list""") + def main(args): """submit one or more package tags to TagCollector @@ -196,119 +169,115 @@ def main(args): """ import PyUtils.AmiLib as amilib - client = amilib.Client() - def select_tag(): - value = raw_input('Please select (q to quit): ') - if value.lower() == 'q': - raise StopIteration - return int(value) + try: + client = amilib.Client() - # create a list of (pkg,tag) with full package path - pkgs = [] - - for pkg in args.pkgs: - # a file ? - if osp.exists(pkg): - fname = pkg - print "::: taking tags from file [%s]..." % (fname,) - for l in open(fname, 'r'): - l = l.strip() - if l: - print " - [%s]" % (l,) - pkgs.append(l) - else: - pkgs.append(pkg) - - pkg_list = [client.find_pkg(pkg, cbk_fct=select_tag, check_tag=False) for pkg in pkgs] - - # setup history - readline.set_history_length(10) - - # query release if project is known - if args.project and not args.release: - for p in args.project.split(','): - rel = client.get_open_releases(p) - if len(rel)==0: - continue - if not args.release: - args.release = query_release(rel, p) + # create a list of (pkg,tag) with full package path + pkgs = [] + + for pkg in args.pkgs: + # a file ? + if osp.exists(pkg): + fname = pkg + print "::: taking tags from file [%s]..." % (fname,) + for l in open(fname, 'r'): + l = l.strip() + if l: + print " - [%s]" % (l,) + pkgs.append(l) else: - args.release += (',%s' % query_release(rel, p)) - if args.release and len(args.release.split(',')) == 1: - _release = args.release.split(',')[0] - args.release = ','.join([_release]*len(pkg_list)) - # adjust the project list too - if args.project and len(args.project.split(',')) == 1: - args.project = ','.join([args.project.split(',')[0]]*len(pkg_list)) - - # query project if release is known - if args.release and not args.project: - _releases = args.release.split(',') - _projects = [] - rel = _releases[0] - for pkg in pkg_list: - proj = _get_projects(client, rel, pkg) - if len(proj)==0: - _projects.append(None) - continue - v = query_project(proj, rel, pkg) - _projects.append(v) - pass # pkgs - if not args.project: - args.project = ','.join(_projects) - else: - args.project += ','+','.join(_projects) - pass - - # Find latest tag if needed - print '-'*80 - for p in pkg_list: - if not 'packageTag' in p: - pkg = (p['packagePath']+p['packageName']).strip('/') # CMTise path - p['packageTag'] = cmt.CmtWrapper().get_latest_pkg_tag(pkg) - print 'Using latest tag %s' % (p['packageTag']) - - # query for missing options - for o in ('project', 'release', 'justification', 'bug',): - value = getattr(args, o) - if value: - print '%s : %s' % (o, value) + pkgs.append(pkg) + + pkg_list = [client.find_pkg(pkg, check_tag=False) for pkg in pkgs] + + # setup history + readline.set_history_length(10) + + # query release if project is known + if args.project and not args.release: + for p in args.project.split(','): + rel = client.get_open_releases(p) + if len(rel)==0: + continue + if not args.release: + args.release = query_release(rel, p) + else: + args.release += (',%s' % query_release(rel, p)) + if args.release and len(args.release.split(',')) == 1: + _release = args.release.split(',')[0] + args.release = ','.join([_release]*len(pkg_list)) + # adjust the project list too + if args.project and len(args.project.split(',')) == 1: + args.project = ','.join([args.project.split(',')[0]]*len(pkg_list)) + + # query project if release is known + if args.release and not args.project: + _releases = args.release.split(',') + _projects = [] + rel = _releases[0] + for pkg in pkg_list: + proj = client.get_pkg_info(pkg['packageName'], rel, resultKey="groupName") + if len(proj)==0: + _projects.append(None) + continue + v = query_project(proj, rel, pkg) + _projects.append(v) + pass # pkgs + if not args.project: + args.project = ','.join(_projects) + else: + args.project += ','+','.join(_projects) + pass + + # Find latest tag if needed + print '-'*80 + for p in pkg_list: + if not 'packageTag' in p: + pkg = (p['packagePath']+p['packageName']).strip('/') # CMTise path + p['packageTag'] = cmt.CmtWrapper().get_latest_pkg_tag(pkg) + print 'Using latest tag %s' % (p['packageTag']) + + # query for missing options + for o in ('project', 'release', 'justification', 'bug',): + value = getattr(args, o) + if value: + print '%s : %s' % (o, value) + else: + setattr(args, o, query_option(o)) + print '-'*80 + + args.project = args.project.split(',') + args.release = args.release.split(',') + if len(args.project) != len(args.release): + raise RuntimeError( + 'Number of projects %s and releases %s do not match' % + (args.project, args.release) + ) + + # If only one tag given, submit this tag to all releases + if len(pkg_list)==1: pkg_list = pkg_list*len(args.release) + + choice = raw_input("Submit tag? [Y/n] ") + ok = len(choice)==0 or choice.upper()=="Y" + + releases = args.release[:] + projects = args.project[:] + + exitcode = 0 + if ok: + # Submit tag request + for p,rel,proj in zip(pkg_list, releases, projects): + args.release = [rel] + args.project = [proj] + submit_tag(client, args, + p['packagePath']+p['packageName'],p['packageTag'], dryrun=args.dryrun) else: - setattr(args, o, query_option(o)) - print '-'*80 - - args.project = args.project.split(',') - args.release = args.release.split(',') - if len(args.project) != len(args.release): - raise RuntimeError( - 'Number of projects %s and releases %s do not match' % - (args.project, args.release) - ) - - # If only one tag given, submit this tag to all releases - if len(pkg_list)==1: pkg_list = pkg_list*len(args.release) - - choice = raw_input("Submit tag? [Y/n] ") - ok = len(choice)==0 or choice.upper()=="Y" - - if args.dry_run: - client.dry_run = args.dry_run - - releases = args.release[:] - projects = args.project[:] - - exitcode = 0 - if ok: - # Submit tag request - for p,rel,proj in zip(pkg_list, releases, projects): - args.release = [rel] - args.project = [proj] - submit_tag(client, args, - p['packagePath']+p['packageName'],p['packageTag']) - else: - print "Tag submission aborted" - exitcode = 1 - - return exitcode + print "Tag submission aborted" + exitcode = 1 + + return exitcode + except amilib.PyUtilsAMIException, e: + print >>sys.stderr, e + sys.exit(1) diff --git a/Tools/PyUtils/test/PyUtils.xml b/Tools/PyUtils/test/PyUtils.xml index 77ef09804d8e51283374110b9b644dad7a0d0fbc..0612cbbd783738d8712b34ea1645b27c767a2abb 100755 --- a/Tools/PyUtils/test/PyUtils.xml +++ b/Tools/PyUtils/test/PyUtils.xml @@ -15,10 +15,10 @@ <TEST name="pyutils.rootutils" type="script" suite="pyutils"> <package_atn>Tools/PyUtils</package_atn> - <options_atn>python -ttc 'import PyUtils.RootUtils as ru; ru._test_main()'</options_atn> - <timelimit>30</timelimit> - <author> Sebastien Binet </author> - <mailto> binet@cern.ch </mailto> + <options_atn>${ATN_PACKAGE}/test/test_RootUtils.py</options_atn> + <timelimit>5</timelimit> + <author> Graeme Stewart </author> + <mailto> graeme.andrew.stewart@cern.ch </mailto> <expectations> <successMessage>OK</successMessage> <returnValue>0</returnValue> diff --git a/Tools/PyUtils/test/test_RootUtils.py b/Tools/PyUtils/test/test_RootUtils.py new file mode 100755 index 0000000000000000000000000000000000000000..7d85e122a368a9ba729b7d65bb24f5a1df6f11fc --- /dev/null +++ b/Tools/PyUtils/test/test_RootUtils.py @@ -0,0 +1,16 @@ +#! /usr/bin/env python + +# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +# Simple test wrapper for PyUtils.RootUtils +# +import sys +import unittest + +class rootUtilsTests(unittest.TestCase): + + def test_rootUtilsBasic(self): + import PyUtils.RootUtils as ru + self.assertEqual(ru._test_main(), True) + +if __name__ == '__main__': + unittest.main()