From 6c4a1fa3932367dc099b49c9a2e54b712e6c02d7 Mon Sep 17 00:00:00 2001
From: Walter Lampl <Walter.Lampl@cern.ch>
Date: Wed, 27 Apr 2016 12:30:21 +0200
Subject: [PATCH] rewrite of get-tag-diff.py to be cmt and cmake compatible
 (PyUtils-00-14-75)

2016-04-27 Walter Lampl <walter.lampl@ cern.ch>
	* rewrite of get-tag-diff.py to be cmt and cmake compatible
	* tag PyUtils-00-14-75

2016-04-12  scott snyder  <snyder@bnl.gov>

	* Tagging PyUtils-00-14-74.
	* python/RootUtils.py: Remove diagnostics.  Keep temp files open
	through the end of program execution to avoid cling caching
	issues.

2016-04-11  scott snyder  <snyder@bnl.gov>

	* Tagging PyUtils-00-14-73.
	* Still working on ATN test failure.  Seems to be related to
	ROOT-7059.

2016-04-11 Will Buttinger <will@cern.ch>
	* bin/getAMIDatasetParameters.py: improved default printout, preserve order of specified datasets, and preserve comments/blanklines from file in the inDsTxt
	* Tagging PyUtils-00-14-72
...
(Long ChangeLog diff - truncated)
---
 Tools/PyUtils/CMakeLists.txt                  |  49 ++
 Tools/PyUtils/bin/checkMetaSG.py              |  17 +-
 Tools/PyUtils/bin/checkxAOD.py                |   4 +-
 Tools/PyUtils/bin/dump-athfile.py             |  17 +
 Tools/PyUtils/bin/get-tag-diff.py             | 248 +++++-
 Tools/PyUtils/bin/getAMIDatasetParameters.py  | 399 ++++++++++
 Tools/PyUtils/bin/getTagDiff.py               | 708 ------------------
 Tools/PyUtils/bin/lstags                      | 109 ++-
 Tools/PyUtils/cmt/requirements                |   1 +
 Tools/PyUtils/doc/mainpage.h                  |   0
 Tools/PyUtils/python/AthFile/impl.py          |  47 +-
 Tools/PyUtils/python/AthFileLite.py           |   0
 Tools/PyUtils/python/Cmt.py                   |   0
 Tools/PyUtils/python/Dso.py                   |   2 +-
 Tools/PyUtils/python/FilePeekerTool.py        |  39 +-
 Tools/PyUtils/python/Helpers.py               |  14 +-
 Tools/PyUtils/python/PoolFile.py              |   0
 Tools/PyUtils/python/RootUtils.py             |  23 +-
 Tools/PyUtils/python/WorkAreaLib.py           |  56 +-
 Tools/PyUtils/python/__init__.py              |   0
 Tools/PyUtils/python/scripts/__init__.py      |   3 +
 Tools/PyUtils/python/scripts/cmt_newalg.py    |  13 +-
 .../python/scripts/cmt_newanalysisalg.py      | 178 ++++-
 .../python/scripts/cmt_newanalysisapp.py      | 197 +++++
 .../PyUtils/python/scripts/cmt_newasgtool.py  | 604 +++++++++++++++
 Tools/PyUtils/python/scripts/cmt_newjobo.py   |  37 +-
 Tools/PyUtils/python/scripts/cmt_newpkg.py    |   9 +-
 .../PyUtils/python/scripts/diff_root_files.py |   2 +-
 .../PyUtils/python/scripts/get_tagsvn_diff.py | 125 ++++
 Tools/PyUtils/python/scripts/tc_submit_tag.py |   7 +-
 Tools/PyUtils/test/PyUtils.xml                |   0
 Tools/PyUtils/test/test_RootUtils.py          |   8 +
 32 files changed, 2100 insertions(+), 816 deletions(-)
 create mode 100644 Tools/PyUtils/CMakeLists.txt
 create mode 100755 Tools/PyUtils/bin/getAMIDatasetParameters.py
 delete mode 100755 Tools/PyUtils/bin/getTagDiff.py
 mode change 100755 => 100644 Tools/PyUtils/cmt/requirements
 mode change 100755 => 100644 Tools/PyUtils/doc/mainpage.h
 mode change 100755 => 100644 Tools/PyUtils/python/AthFileLite.py
 mode change 100755 => 100644 Tools/PyUtils/python/Cmt.py
 mode change 100755 => 100644 Tools/PyUtils/python/Dso.py
 mode change 100755 => 100644 Tools/PyUtils/python/Helpers.py
 mode change 100755 => 100644 Tools/PyUtils/python/PoolFile.py
 mode change 100755 => 100644 Tools/PyUtils/python/__init__.py
 create mode 100644 Tools/PyUtils/python/scripts/cmt_newanalysisapp.py
 create mode 100644 Tools/PyUtils/python/scripts/cmt_newasgtool.py
 create mode 100644 Tools/PyUtils/python/scripts/get_tagsvn_diff.py
 mode change 100755 => 100644 Tools/PyUtils/test/PyUtils.xml

diff --git a/Tools/PyUtils/CMakeLists.txt b/Tools/PyUtils/CMakeLists.txt
new file mode 100644
index 00000000000..5c44df806fc
--- /dev/null
+++ b/Tools/PyUtils/CMakeLists.txt
@@ -0,0 +1,49 @@
+################################################################################
+# Package: PyUtils
+################################################################################
+
+# Declare the package name:
+atlas_subdir( PyUtils )
+
+# Declare the package's dependencies:
+atlas_depends_on_subdirs( PUBLIC
+                          Control/RootUtils
+                          Tools/PyCmt )
+
+# External dependencies:
+find_package( PythonLibs )
+find_package( ROOT COMPONENTS Core PyROOT Tree MathCore Hist RIO pthread )
+
+# Install files from the package:
+atlas_install_python_modules( python/*.py python/AthFile python/scripts )
+atlas_install_scripts( bin/abootstrap-wkarea.py bin/acmd.py bin/atl-gen-athena-d3pd-reader bin/avn.py bin/build_cmt_pkg_db.py bin/checkFile.py bin/checkPlugins.py bin/checkSG.py bin/checkMetaSG.py bin/checkTP.py bin/checkTag.py bin/checkxAOD.py bin/cmtClients.py bin/diff-athfile bin/diff-jobo-cfg.py bin/diffConfigs.py bin/diffPoolFiles.py bin/diffTAGTree.py bin/dlldep.py bin/dso-stats.py bin/dump-athfile.py bin/dumpAthfilelite.py bin/filter-and-merge-d3pd.py bin/getAMIDatasetParameters.py bin/gen-typereg-dso.py bin/gen_klass.py bin/get-tag-diff.py bin/getTagDiff.py bin/gprof2dot bin/issues bin/lstags bin/magnifyPoolFile.py bin/merge-poolfiles.py bin/pep8.py bin/pkgco.py bin/pool_extractFileIdentifier.py bin/pool_insertFileToCatalog.py bin/print_auditor_callgraph.py bin/pyroot.py bin/setupWorkArea.py bin/tabnanny-checker.py bin/tcSubmitTag.py bin/vmem-sz.py )
+
+# Aliases:
+atlas_add_alias( checkFile "checkFile.py" )
+atlas_add_alias( checkMetaSG "checkMetaSG.py" )
+atlas_add_alias( dso-stats "dso-stats.py" )
+atlas_add_alias( gen_klass "gen_klass.py" )
+atlas_add_alias( merge-poolfiles "merge-poolfiles.py" )
+atlas_add_alias( diffConfigs "diffConfigs.py" )
+atlas_add_alias( filter-and-merge-d3pd "filter-and-merge-d3pd.py" )
+atlas_add_alias( avn "avn.py" )
+atlas_add_alias( diffPoolFiles "diffPoolFiles.py" )
+atlas_add_alias( print_auditor_callgraph "print_auditor_callgraph.py" )
+atlas_add_alias( dump-athfile "dump-athfile.py" )
+atlas_add_alias( tabnanny-checker "tabnanny-checker.py" )
+atlas_add_alias( abootstrap-wkarea "abootstrap-wkarea.py" )
+atlas_add_alias( pyroot "pyroot.py" )
+atlas_add_alias( checkTag "checkTag.py" )
+atlas_add_alias( diffTAGTree "diffTAGTree.py" )
+atlas_add_alias( checkxAOD "checkxAOD.py" )
+atlas_add_alias( get-tag-diff "get-tag-diff.py" )
+atlas_add_alias( tcSubmitTag "tcSubmitTag.py" )
+atlas_add_alias( pkgco "pkgco.py" )
+atlas_add_alias( checkSG "checkSG.py" )
+atlas_add_alias( diff-jobo-cfg "diff-jobo-cfg.py" )
+atlas_add_alias( acmd "acmd.py" )
+atlas_add_alias( vmem-sz "vmem-sz.py" )
+atlas_add_alias( build_cmt_pkg_db "build_cmt_pkg_db.py" )
+atlas_add_alias( setupWorkArea "setupWorkArea.py" )
+atlas_add_alias( tc-submit-tag "tcSubmitTag.py" )
+
diff --git a/Tools/PyUtils/bin/checkMetaSG.py b/Tools/PyUtils/bin/checkMetaSG.py
index b50a4ec7179..044641e494f 100755
--- a/Tools/PyUtils/bin/checkMetaSG.py
+++ b/Tools/PyUtils/bin/checkMetaSG.py
@@ -13,7 +13,7 @@
 # @endcode
 #
 
-__version__ = "$Revision: 621253 $"
+__version__ = "$Revision: 724150 $"
 __author__  = "Will Buttinger <will@cern.ch>"
 
 import sys
@@ -56,8 +56,21 @@ if __name__ == "__main__":
         try:
             from PyUtils import AthFile
             print "## checking [%s]..."%fileName
-            metadata = AthFile.fopen(fileName).fileinfos['metadata']
+            fileInfo = AthFile.fopen(fileName).fileinfos
             print "="*91
+            #do the table of extra information
+            print "## Content of AthFile.fopen(%s).fileinfos"%fileName
+            print "="*91
+            exclude_list = ['stream_tags','file_name','tag_info','det_descr_tags', 'evt_number', 'metadata', 'metadata','metadata_items', 'lumi_block','eventdata_items' ]
+            print "%30s%-28s" % ("key", " | value")
+            print "%30s%s%-25s" % ("-"*30, "-+-", "-"*(28-3))
+            for a,b in fileInfo.items():
+               if a in exclude_list: continue
+               print "%30s%s%-25s" % (a, " | ", b) 
+            metadata = fileInfo['metadata']
+            print ""
+            print "="*91
+            print "## IOVMetaData (fileinfos['metadata']):"
             print "%30s%-28s%-10s%-30s" % ("folder", " | key "," | type "," | value")
             print "%30s%s%-25s%s%-7s%s%-30s" % ("-"*30, "-+-", "-"*(28-3),"-+-","-"*(10-3),"-+-","-"*(20))
             for metaFolder,metaObj in metadata.items(): #metaObj may be dict, list (occurs with multi IOV), or none... so far only support dict FIXME
diff --git a/Tools/PyUtils/bin/checkxAOD.py b/Tools/PyUtils/bin/checkxAOD.py
index 830186ae539..8f8bdfe118a 100755
--- a/Tools/PyUtils/bin/checkxAOD.py
+++ b/Tools/PyUtils/bin/checkxAOD.py
@@ -2,14 +2,14 @@
 
 # Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
 #
-# $Id: checkxAOD.py 717322 2016-01-12 11:17:21Z schaffer $
+# $Id: checkxAOD.py 717311 2016-01-12 10:51:45Z schaffer $
 #
 # This is a modified version of PyUtils/bin/checkFile.py. It has been taught
 # how to sum up the sizes of all the branches belonging to a single xAOD
 # object/container.
 #
 
-__version__ = "$Revision: 717322 $"
+__version__ = "$Revision: 717311 $"
 __author__  = "Sebastien Binet <binet@cern.ch>, " \
     "Attila Krasznahorkay <Attila.Krasznahorkay@cern.ch>, " \
     "RD Schaffer R.D.Schaffer@cern.ch"
diff --git a/Tools/PyUtils/bin/dump-athfile.py b/Tools/PyUtils/bin/dump-athfile.py
index a2823d84f59..348ee0fb47e 100755
--- a/Tools/PyUtils/bin/dump-athfile.py
+++ b/Tools/PyUtils/bin/dump-athfile.py
@@ -115,6 +115,7 @@ if __name__ == "__main__":
         print fmt % ('run type',       f.infos['run_type'])
         print fmt % ('evt number',     f.infos['evt_number'])
         print fmt % ('evt type',       f.infos['evt_type'])
+        print fmt % ('mc channel #',   f.infos['mc_channel_number'])
         print fmt % ('lumi block',     f.infos['lumi_block'])
         print fmt % ('beam energy',    f.infos['beam_energy'])
         print fmt % ('beam type',      f.infos['beam_type'])
@@ -123,6 +124,22 @@ if __name__ == "__main__":
         print fmt % ('geometry',       f.infos['geometry'])
         print fmt % ('conditions tag', f.infos['conditions_tag'])
         _metadata = f.infos['metadata']
+
+        # ATEAM-162: determine if this is 25ns or 50ns sub-campaign
+        DigitizationParameters = _metadata['/Digitization/Parameters'] if '/Digitization/Parameters' in _metadata.keys() else {}
+        if 'bunchSpacing' in DigitizationParameters.keys() and 'BeamIntensityPattern' in DigitizationParameters.keys() :
+            bunchSlotLength = DigitizationParameters['bunchSpacing']
+            pattern = DigitizationParameters['BeamIntensityPattern']
+            firstBunch = pattern.index(1.0)
+            bunchCountInTwoFirstSlots = pattern[firstBunch:firstBunch+2].count(1.0)
+            if bunchCountInTwoFirstSlots == 1:
+                campaign = '50 ns'
+            elif bunchCountInTwoFirstSlots == 2:
+                campaign = '25 ns'
+            else:
+                campaign = None
+            print fmt % ('bunch spacing',   campaign + ' i.e. ..., ' + str(pattern[firstBunch:firstBunch+4])[1:-1] + ', ...')
+
         _metadata = _metadata.keys() if isinstance(_metadata,dict) else None
         print fmt % ('meta data',      _metadata)
 
diff --git a/Tools/PyUtils/bin/get-tag-diff.py b/Tools/PyUtils/bin/get-tag-diff.py
index 1e99413ad34..c5a9d2664f0 100755
--- a/Tools/PyUtils/bin/get-tag-diff.py
+++ b/Tools/PyUtils/bin/get-tag-diff.py
@@ -4,48 +4,238 @@
 
 # @file:    get-tag-diff.py
 # @purpose: Get the list of tag differences between 2 releases (CERN centric)
-# @author:  Sebastien Binet <binet@cern.ch>
-# @date:    October 2009
+# @author:  Walter Lampl <walter.lampL@cern.ch> (based on earlier code by Sebastien)
+# @date:    April 2016
 #
 # @example:
 #
-# get-tag-diff --ref=rel_3,12.0.X --chk=12.0.3
+# getTagDiff rel_3,12.0.X 12.0.3
 #
 
-__version__ = "$Revision: 273188 $"
 
 from optparse import OptionParser
 
 import sys
-import os
+import os,stat
+import subprocess
+import tempfile
+import shutil
 
-def main():
+#standard os.walk is too slow on our directory strucutre 
+def walkCMTPath(root):
+    subdirs=os.listdir(root)
+    if 'cmt' in subdirs and os.access(root+"/cmt/version.cmt",os.F_OK):
+        yield root+"/cmt/version.cmt"
+    else: #Loop to find suitable sub-dirs
+        for f in subdirs:
+            if f in ("InstallArea",".svn",".git"): continue #Ignore those sub-dirs
+            nextLevel=root+"/"+f
+            if os.path.isdir(nextLevel) and not os.path.islink(nextLevel):
+                #print "Diving into ",nextLevel
+                for path in walkCMTPath(nextLevel):
+                    yield path
+                    pass
+                pass
+            pass
+        pass
+    return
+
+
+
+def parseCMTPath(dumpfilename):
+    if not os.access(dumpfilename,os.R_OK) :
+        return None
+
+    dumpfile=open(dumpfilename)
+    allProjects=dumpfile.readline().strip().split(":") 
+    dumpfile.close()
+    if len(allProjects)==0 or allProjects[0]=="": 
+        return None 
+
+    cmtPackages=dict()
+    for projPath in reversed(allProjects): #We want to scan projects in reverse order (so that packages patch overwrite others)
+        if len(projPath.split("/")) < 3:
+            print "Encountered unexpected path name in CMTPATH", projPath
+            return None
+        thisProject=projPath.split("/")[-2]
+        for cmtversfile in walkCMTPath(projPath):
+            versfile=open(cmtversfile)
+            fullPackVersion=versfile.readline().strip()
+            versfile.close()
+            if fullPackVersion.count("-")<3 or fullPackVersion.count("-")>4:
+                #print "WARNING, encountered unexpteced version.cmt format", fullPackVersion,fullPackVersion.count("-")
+                continue
+            if not (fullPackVersion.split("-")[1]).isdigit(): #ignoring things like "DetCommonRelease-DetCommon-00-02-11"
+                #print "Ignoring ",fullPackVersion
+                continue
+                
+            packageName=fullPackVersion.split("-")[0]
+            packageVersion="-".join(fullPackVersion.split("-")[1:])
+            packagePath=cmtversfile[len(projPath)+1:-16]
+            cmtPackages[packageName]=(thisProject,packagePath,packageVersion)
+            pass
+        pass
+    print "Found [%i] cmt packages" % len(cmtPackages)
+    return cmtPackages
+
+
+def parseCMAKEPath(dumpfilename):
+    if not os.access(dumpfilename,os.R_OK) :
+        return None
+
+    dumpfile=open(dumpfilename)
+    allProjects=dumpfile.readline().strip().split(":") 
+    dumpfile.close()
+    if len(allProjects)==0 or allProjects[0]=="": 
+        return None 
+
+    print allProjects,allProjects
+
+    cmakePackages=dict()
+    for projPath in reversed(allProjects): #We want to scan projects in reverse order (so that packages patch overwrite others)
+        if not os.access(projPath+"/packages.txt",os.R_OK):
+            print "WARNING, no packages.txt file found in",projPath
+        else:
+            thisProject=None
+            packagesFile=open(projPath+"/packages.txt")
+            for l in packagesFile:
+                ls=l.strip()
+                if thisProject is None and ls.startswith("# Packages built in"):
+                    thisProject=ls.split(" ")[4]
+                    #print "Current project",thisProject
+
+                if not ls.startswith("#"):
+                    packagePath=ls.split(" ")[0]
+                    fullPackVersion=ls.split(" ")[1]
+                    packageName=fullPackVersion.split("-")[0]
+                    packageVersion="-".join(fullPackVersion.split("-")[1:])
+                    cmakePackages[packageName]=(thisProject,packagePath,packageVersion)
+                    pass
+                pass
+            packagesFile.close()
+        pass
+
+    print "Found [%i] cmake packages" % len(cmakePackages)    
+    return cmakePackages
     
-    parser = OptionParser( usage = "usage: %prog --ref some_rel --chk other_rel")
-    parser.add_option("--ref",
-                      dest = "ref",
-                      help = "The description string of the reference release (eg: 12.0.X,rel_3,AtlasOffline)" )
 
-    parser.add_option("--chk",
-                      dest = "chk",
-                      help = "The description string of the to-be-compared release (eg: 12.0.X,rel_3 or 12.0.3)" )
 
-    options,args = parser.parse_args()
+def comparePackageDicts(dictRef,dictChk,header):
 
-    if options.ref is None or options.chk is None:
-        parser.print_help()
-        return 1
+    fmt = "%-15s %-15s | %-15s %-15s | %-45s"
+    print fmt % (header[0], "ref-project", header[1], "chk-project", "pkg-name")
+    print "-" * 100
+    allPackages=set(dictRef.keys())
+    allPackages.update(dictChk.keys())
+    nDiffs=0
+    for pack in allPackages:
+        if pack in dictRef.keys():
+            refEntry=dictRef[pack]
+        else:
+            refEntry=("None",None,"None")
+        
+        if pack in dictChk.keys():
+            chkEntry=dictChk[pack]
+        else:
+            chkEntry=("None",None,"None")
+        if (chkEntry[2]!=refEntry[2]): #Compare version numbers
+            nDiffs+=1
+            print fmt % (refEntry[2],refEntry[0].replace("Atlas",""),chkEntry[2],chkEntry[0].replace("Atlas",""),
+                         chkEntry[1] or refEntry[1])
+            
+
+    print "-"*100
+    print "::: found [%i] tags which are different" % nDiffs
+    return nDiffs
+
+def getPackageDict(setupcmd):
+    tempdir=tempfile.mkdtemp()
+    if not os.access(tempdir,os.W_OK):
+        print "ERROR, failed to get writeable temp directory!"
+        return None
+
+    print "Setting up release %s in temporary directory %s" % (setupcmd,tempdir)
+    sys.stdout.flush()
+
+    asetupscriptname=tempdir+"/setupAndPath.sh"
+    asetupscriptfile=open(asetupscriptname,"w")
+    asetupscriptfile.write("#!/bin/bash\n");
+    asetupscriptfile.write("cd %s\n" % tempdir)
+    asetupscriptfile.write("set CMTPATH=""\n")
+    asetupscriptfile.write("set CMAKE_PREFIX_PATH=""\n")
+    asetupscriptfile.write("source /afs/cern.ch/atlas/software/dist/AtlasSetup/scripts/asetup.sh %s\n" %setupcmd)
+    #asetupscriptfile.write("echo $CMTPATH\n")
+    #asetupscriptfile.write("echo $CMAKE_PREFIX_PATH\n")
+    asetupscriptfile.write("if [ $? -ne 0 ]; then \n   exit -1\nfi\n")
+    asetupscriptfile.write("echo $CMTPATH > cmtpath.txt\n")
+    asetupscriptfile.write("echo $CMAKE_PREFIX_PATH > cmakepath.txt\n")
+    asetupscriptfile.close()
+
+    sc=subprocess.call(["/bin/bash",asetupscriptname])
     
-    ref = options.ref
-    chk = options.chk
-
-    import PyCmt.Cmt as Cmt
-    diffs = Cmt.get_tag_diff(ref=ref, chk=chk, verbose=True)
-    if len(diffs) > 0:
-        return 1
-    return 0
-
-if __name__ == "__main__":
-    import sys
-    sys.exit(main())
+    if sc!=0:
+        print "ERROR asetup %s failed!" % setupcmd
+        shutil.rmtree(tempdir)
+        return None
+
+    #Try scanning cmt-like directory structure
+    packageVersions=parseCMTPath(tempdir+"/cmtpath.txt")
+    if packageVersions is not None:
+        #print "Found cmt-like directories"
+        pass
+    else:
+        packageVersions=parseCMAKEPath(tempdir+"/cmakepath.txt")
+        if packageVersions is not None:
+            #print "Found cmake-like directories"
+            pass
+        pass
+
+    if packageVersions is None:
+        print "ERROR, Neither $CMTPATH nor $CMAKE_PREFIX_PATH found!"
+        
+    
+    shutil.rmtree(tempdir)        
+    return packageVersions
+
+if __name__=="__main__":
+
+    parser = OptionParser( usage = "usage: %prog [-R|--ref] 12.0.X,rel_3 [-r|--rel] 12.0.3")
+    parser.add_option( "-R",
+                       "--ref",
+                       dest = "ref",
+                       help = "The description string of the reference release (eg: 12.0.X,rel_3)" )
+
+    parser.add_option( "-r",
+                       "--chk",
+                       dest = "chk",
+                       help = "The description string of the to-be-compared release (eg: 12.0.X,rel_3 or 12.0.3)" )
+
+
+    (options, args) = parser.parse_args()
+
+    if len(args) > 0 and args[0][0] != "-":
+        options.ref = args[0]
+        pass
+
+    if len(args) > 1 and args[1][0] != "-":
+        options.chk = args[1]
+        pass
+
+    if options.ref == None or options.chk == None:
+        parser.print_help()
+        sys.exit(1)
+        pass
     
+
+    print "## Comparing [%s] VS [%s]" % ( options.ref, options.chk )
+
+    sys.stdout.flush()
+    refPackages=getPackageDict(options.ref)
+    if refPackages is None: sys.exit(-1)
+
+    chkPackages=getPackageDict(options.chk)
+    if chkPackages is None: sys.exit(-1)
+
+    comparePackageDicts(refPackages,chkPackages,(options.ref,options.chk))
+
+
diff --git a/Tools/PyUtils/bin/getAMIDatasetParameters.py b/Tools/PyUtils/bin/getAMIDatasetParameters.py
new file mode 100755
index 00000000000..5882ed1d22a
--- /dev/null
+++ b/Tools/PyUtils/bin/getAMIDatasetParameters.py
@@ -0,0 +1,399 @@
+#!/usr/bin/env python
+
+# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+from __future__ import print_function
+
+__author__ = "Will Buttinger"
+__doc__ = """Extract dataset parameters from AMI, and write them to a text file.\nExamples:\n\n\ngetAMIDatasetParameters.py --inDS="mc15_13TeV.361103%DAOD_TRUTH%" --fields=dataset_number,ldn,nfiles,events,crossSection,genFiltEff,generator_name"""
+
+
+
+import logging
+
+
+#pinched from pandatools!
+def readDsFromFile(txtName):
+    import re
+    dsList = []
+    try:
+        # read lines
+        txt = open(txtName)
+        for tmpLine in txt:
+            # remove \n
+            tmpLine = re.sub('\n','',tmpLine)
+            # remove white spaces
+            tmpLine = tmpLine.strip()
+            # skip comment or empty
+            if tmpLine.startswith('#') or tmpLine == '':
+                continue
+            # append
+            dsList += [tmpLine]
+        # close file    
+        txt.close()
+    except:
+        errType,errValue = sys.exc_info()[:2]
+        logging.error("cannot read datasets from %s due to %s:%s" % (txtName,errType,errValue))
+        sys.exit(-1)    
+    return dsList
+
+
+
+def isfloat(x):
+    try:
+        a = float(x)
+    except ValueError:
+        return False
+    else:
+        return True
+
+def isint(x):
+    try:
+        a = float(x)
+        b = int(a)
+    except ValueError:
+        return False
+    else:
+        return a == b
+
+
+
+
+def main():
+    logging.basicConfig(format='%(levelname)s:%(message)s')
+
+    import time,datetime
+    from pytz import timezone
+    import argparse
+
+    try:
+      import pyAMI.client
+      import pyAMI.atlas.api as AtlasAPI
+      import pyAMI.config
+    except ImportError:
+       logging.error("Unable to find pyAMI client. Please try this command first: lsetup pyAMI")
+       return -1
+
+    
+    extraFieldDefaults = {} #{"approx_crossSection":None,"approx_GenFiltEff":1.0}
+
+    fieldDefaults = {"subprocessID":0,"dataset_number":0}
+    #populate the fieldDefaults ... for all, assume 'None'
+    for field in pyAMI.config.tables['datasets'].keys():
+        if str(field) == "cross_section": continue #special exception because this field only present in 
+        if str(field) in fieldDefaults.keys(): continue
+        if str(field).startswith("@"): continue
+        fieldDefaults[str(field)] = None
+        
+
+    import commands
+    #check the voms proxy 
+    status,out = commands.getstatusoutput("voms-proxy-info -fqan -exists")
+    if status!=0:
+        logging.error("Please renew your certificate with this command: voms-proxy-init -voms atlas");
+        return -1
+
+    try:
+        client = pyAMI.client.Client('atlas')
+        AtlasAPI.init()
+    except:
+        logging.error("Could not establish pyAMI session. Are you sure you have a valid certificate? Do: voms-proxy-init -voms atlas")
+        return -1
+
+    #need to collect the ami dataset parameter defaults
+    paramExplains = [] #for the help message only
+
+    paramDefaults = {}
+    res = client.execute('ListPhysicsParameterDefs',format='dom_object')
+    for r in res.get_rows() : #r is OrderedDict
+        explainString = "%s: %s" % (r[u'PARAMNAME'],r[u'DESCRIPTION']);
+        if r[u'UNITS']!=u'NULL': explainString += " (units: %s)" % r[u'UNITS']
+        if r[u'HASDEFAULT']==u'N' : paramDefaults[str(r[u'PARAMNAME'])] = None
+        else: 
+            explainString += " (default value = %s)" % r[u'DEFAULTVALUE']
+            if r[u'PARAMTYPE']==u'number': paramDefaults[str(r[u'PARAMNAME'])] = float(r[u'DEFAULTVALUE']) #FIXME: Assumes all parameters are floats
+            elif r[u'PARAMTYPE']==u'string': paramDefaults[str(r[u'PARAMNAME'])] = str(r[u'DEFAULTVALUE'])
+        paramExplains += [explainString]
+
+
+    cern_time = timezone('UCT')
+    current_time = datetime.datetime.fromtimestamp(time.time(),cern_time).strftime('%Y-%m-%d %H:%M:%S')
+
+    from argparse import RawTextHelpFormatter
+    parser = argparse.ArgumentParser(description=__doc__,formatter_class=RawTextHelpFormatter)
+    parser.add_argument('--inDS',nargs='+',default=[""],help="List of datasets to retrieve parameters for")
+    parser.add_argument('--inDsTxt',default="",help="Alternative to --inDS, can specify the datasets from an input file")
+    parser.add_argument('--fields',nargs='+',help="List of parameters to extract. Available parameters are: \n\n  %s\n\nYou can also include any from:\n  %s\nYou can also do keyword_xxx to add a bool branch for keywords" % ("\n  ".join(paramExplains),", ".join(fieldDefaults.keys()+extraFieldDefaults.keys())),default=["dataset_number","crossSection","kFactor","genFiltEff"])
+    parser.add_argument('--timestamp',default=current_time,help="The timestamp to query parameters at, specified in Universal Central Time (UCT). If left blank, will take the current time")
+    parser.add_argument('--physicsGroups',nargs='+',default=["PMG,MCGN"],help="Physics group from which to retrieve parameters, listed in order of priority (highest first). Default value is 'PMG,MCGN' (i.e. try to use PMG values, fallback on MCGN values if unavailable). Allowed groups are:\n   PMG (this is the PMG's group name), BPHY, COSM, DAPR, EGAM, EXOT, FTAG, HIGG, HION, IDET, IDTR, JETM, LARG, MCGN (this is the AMI default group name), MDET, MUON, PHYS, REPR, SIMU, STDM, SUSY, TAUP, TCAL, TDAQ, THLT, TOPQ, TRIG, UPGR, VALI")
+    parser.add_argument('--explainFields',nargs='+',default=[],help="The fields you would like explained .. will appear as comment lines after each row in the output")
+    parser.add_argument('--explainInfo',nargs='+',default=[],help="Properties of the parameter you want to show in the explanation. Can list from: explanation, insert_time, end_time, physicsGroup, createdby")
+    parser.add_argument('--outFile',default=sys.stdout,type=argparse.FileType('w'),help="Where to print the output to. Leave blank to print to stdout")
+    parser.add_argument('--delim',default="\t",help="The delimiter character. Defaults to tab")
+    parser.add_argument('-v',action='store_true',help="Verbose output for debugging")
+
+    args = parser.parse_args()
+
+    if args.timestamp=="the dawn of time": 
+        logging.error("Unfortunately we don't know any parameters from this time period... but we're working on it!")
+        return 9999
+
+    #split elements of fields by comma to get full list 
+    args.fields = sum((y.split(',') for y in args.fields),[])
+    args.fields = [x.strip() for x in args.fields] #strips whitespace
+    #look for keyword_ fields, these are special ...
+    args.keywords=[]
+    for f in args.fields:
+        if f.startswith("keyword_"):
+            k = f[8:]
+            #and then add each keyword to the extraFieldDefaults so it is recognised thusly 
+            extraFieldDefaults["keyword_%s"%k]=bool(False)
+            args.keywords += [k]
+
+    #same for physics groups
+    args.physicsGroups = sum((y.split(',') for y in args.physicsGroups),[])
+    args.physicsGroups = [x.strip() for x in args.physicsGroups] #strips whitespace
+
+
+    #same for explainFields and explainInfo
+    args.explainFields = sum((y.split(',') for y in args.explainFields),[])
+    args.explainFields = [x.strip() for x in args.explainFields] #strips whitespace
+    args.explainInfo = sum((y.split(',') for y in args.explainInfo),[])
+    args.explainInfo = [x.strip() for x in args.explainInfo] #strips whitespace
+
+    if args.inDsTxt != '': args.inDS = readDsFromFile(args.inDsTxt)
+    
+    #and same for inDS
+    args.inDS = sum((y.split(',') for y in args.inDS),[])
+    args.inDS = [x.strip() for x in args.inDS] #strips whitespace
+
+    if args.v: logging.getLogger().setLevel(logging.DEBUG)
+    else: logging.getLogger().setLevel(logging.INFO)
+    logging.debug(args.inDS)
+    logging.debug(args.fields)
+    logging.debug(args.timestamp)
+    
+    #1. check field values are allowed, we obtain default field values at same time..
+    #2. For each entry in inDS, if contains wildcard we obtain list of DS, otherwise check DS exists. During this time we obtain the datasetid and numEvents properties, incase we need them
+    #3.  For each of these DS, get parameters from ami matching the timestamp. Organize into fields and index by subprocessID
+    #4.  Output a line to our output file
+
+
+
+
+    #1.
+    #before adding all the ami parameters, identify which of provided fields are: 1). Obtained from list_datasets command (dsFields) 2). actual parameters
+    dsFields = [ x for x in args.fields if x in fieldDefaults.keys() and x not in ["subprocessID","ldn"] ]
+    extraFields = [ x for x in args.fields if x in extraFieldDefaults.keys() ]
+    paramFields = [ x for x in args.fields if x in paramDefaults.keys() ]
+
+    if len(paramFields)>0 and args.physicsGroups==[""]:
+        logging.error("You must specify at least one physics group. See -h for allowed groups")
+        return -1;
+
+    #combine paramDefaults with fieldDefaults
+    fieldDefaults.update(paramDefaults)
+    #and with extra fields
+    fieldDefaults.update(extraFieldDefaults)
+    
+    for field in args.fields:
+        if field not in fieldDefaults:
+            logging.error("%s is not a recognised field. Allowed fields are:" % field)
+            logging.error(fieldDefaults.keys())
+            return -1;
+        
+
+    #2.
+    #replace all '*' with '%' and strip "/"
+    args.inDS = [ds.replace("*","%") for ds in args.inDS]
+    args.inDS = [ds.rstrip("/") for ds in args.inDS]
+  
+
+    logging.info("Fetching list of datasets from AMI (this may take a few minutes)...")
+
+
+    #obtain list of datasets 
+    res = AtlasAPI.list_datasets(client,patterns=args.inDS,fields=dsFields+['ldn'],ami_status="VALID") #changed status from %, to only catch valid now: wb 08/2015
+    
+    #NOTE: Should we allow retrieval of the extra information: keyword, genfiltereff, approx crossection, .. these all come from GetDatasetInfo ami command
+
+    dataset_values = dict()
+    for r in res:
+        mydict = dict()
+        dataset_values[str(r['ldn'])] = mydict
+        for field in r.items():
+            if str(field[0]) == "ldn": continue
+            if str(field[0]) not in args.fields: continue
+            mydict[str(field[0])] = str(field[1])
+        #also if we have the 'extra fields or keywords' we will need to execute AtlasAPI.get_dataset_info ..
+        if len(extraFields)>0 or len(args.keywords)>0:
+            info_res = AtlasAPI.get_dataset_info(client,str(r['ldn']))
+            #print(info_res)
+            if len(info_res)==0: logging.error("Unable to retrieve dataset info for %s" % str(r['ldn']));return -1
+            for field in extraFields:
+                #ignore the keyword_ fields 
+                if field.startswith("keyword_"): continue
+                mydict[field] = float(info_res[0][unicode(field)]) if isfloat(info_res[0][unicode(field)]) else extraFieldDefaults[field]
+            for k in args.keywords:
+                mydict["keyword_%s" % k] = int( (k in str(info_res[0][unicode('keyword')]).split(",")) )
+            
+    #sort dataset_values as well as possible
+    from collections import OrderedDict
+    sorted_values = OrderedDict()
+    for ds in args.inDS:
+        if ds in dataset_values.keys():
+            sorted_values[ds] = dataset_values[ds]
+
+    for ds in dataset_values.keys():
+        if ds not in sorted_values.keys():
+            sorted_values[ds] = dataset_values[ds]
+    dataset_values = sorted_values
+
+    logging.debug(dataset_values)
+
+    #res = client.execute(['GetDatasetInfo
+
+    for ds in args.inDS:
+        if '%' not in ds and ds not in dataset_values.keys():
+            logging.warning("Unknown dataset: %s" % ds)
+
+    #if using inDsTxt, retain any comment or blank lines in structure of output
+    complete_values = OrderedDict()
+    if args.inDsTxt != "":
+         # read lines
+        commentcount=0
+        import re
+        txt = open(args.inDsTxt)
+        for tmpLine in txt:
+            # remove \n
+            tmpLine = re.sub('\n','',tmpLine)
+            # remove white spaces
+            tmpLine = tmpLine.strip()
+            # skip comment or empty
+            if tmpLine.startswith('#') or tmpLine == '':
+                complete_values['comment%d'%(commentcount)] = tmpLine
+                commentcount = commentcount+1
+                continue
+            # append
+            tmpLine = tmpLine.rstrip("/")
+            if tmpLine in dataset_values.keys():
+                complete_values[tmpLine] = dataset_values[tmpLine]
+            else:
+                print("cannot find %s" % tmpLine)
+        # close file    
+        txt.close()
+        dataset_values = complete_values
+
+    logging.info("Obtaining %s for selected datasets at timestamp=%s..." % (args.fields,args.timestamp))
+
+    headerString = ""
+    doneHeader=False
+    commentCache = ""
+    commentCount = 0
+    for ds in dataset_values.keys():
+        if ds.startswith('comment'):
+            if commentCount > 0 : commentCache += "\n"
+            commentCache += dataset_values[ds]
+            commentCount=commentCount+1
+            continue
+        #obtain list of parameters for this dataset
+        if(args.timestamp==current_time):
+            res = client.execute(['GetPhysicsParamsForDataset',"--logicalDatasetName=%s"% ds,"--timestamp='%s'"%args.timestamp], format='dom_object')
+        else:
+             res = client.execute(['GetPhysicsParamsForDataset',"--logicalDatasetName=%s"% ds,"--timestamp='%s'"%args.timestamp,"--history=true"], format='dom_object')
+
+        #first we have to determine how many subprocesses this ds has 
+        dsSubprocesses = [0] #always have the 0 subprocess 
+        for r in res.get_rows():
+            sp = int(r[u'subprocessID'])
+            if sp not in dsSubprocesses: dsSubprocesses += [sp]
+
+        #now for each subprocess we have to locate each required field value (in paramFields)
+        #rank by physicsGroup
+        for sp in dsSubprocesses:
+            paramVals = dict()
+            groupsWithVals = dict() #held for helpful output
+            #need to keep explanations for requested fields
+            explainInfo = dict()
+            for i in args.explainFields: explainInfo[i] = dict()
+
+            for param in paramFields:
+                groupsWithVals[param] = []
+                bestGroupIndex = len(args.physicsGroups)
+                paramVals[param] = fieldDefaults[param]
+                for r in res.get_rows():
+                    if int(r[u'subprocessID']) != sp: continue
+                    if str(r[u'paramName']) != param: continue
+                    if str(r[u'physicsGroup']) not in args.physicsGroups: 
+                        groupsWithVals[param] += [(str(r[u'physicsGroup']),str(r[u'paramValue']))]
+                        continue
+                    if args.physicsGroups.index(str(r[u'physicsGroup'])) > bestGroupIndex : continue
+                    if args.physicsGroups.index(str(r[u'physicsGroup'])) == bestGroupIndex : logging.warning("Duplicate parameter %s for group %s in dataset %s (subprocess %d). Please report this!" % (param,str(r[u'physicsGroup']),ds,sp))
+                    paramVals[param] = str(r[u'paramValue'])
+                    bestGroupIndex=args.physicsGroups.index(str(r[u'physicsGroup']))
+                    #keep the explanation info 
+                    for e in args.explainInfo: 
+                        #print(r.keys())
+                        if unicode(e) not in r:
+                            logging.error("Unrecognised explainInfo field: %s" % e)
+                            return -1
+                        explainInfo[param][e]=str(r[unicode(e)])
+            #at this stage, parameters reside in paramVals dict or dataset_values[ds] dict
+            #print them in the requested order .. if any is "None" then stop, because it doesn't have a default value and didn't find a value for it either 
+            rowString = ""
+            for param in args.fields:
+                val = None
+                if param == "ldn": val = ds
+                elif param == "subprocessID": val = sp
+                elif param in dataset_values[ds].keys(): val = dataset_values[ds][param]
+                else: val = paramVals.get(param,None)
+                if val == None:
+                    if args.outFile != sys.stdout: logging.warning("dataset %s (subprocess %d) does not have parameter %s, which has no default." % (ds,sp,param))
+                    if len(groupsWithVals.get(param,[]))>0:
+                        logging.warning("The follow physicsGroups have defined that parameter though:")
+                        logging.warning(groupsWithVals[param])
+                    val = "#UNKNOWN#"
+                    #return -1
+                if rowString != "": rowString += args.delim
+                rowString += str(val)
+                #inspect the type of str(val) to build up the header
+                if not doneHeader:
+                    headerString += param
+                    if type(fieldDefaults[param])==bool: headerString += "/O:"
+                    elif type(fieldDefaults[param])==int: headerString += "/I:"
+                    elif type(fieldDefaults[param])==float: headerString += "/D:"
+                    elif isfloat(str(val)): headerString += "/D:"
+                    #elif isint(str(val)): headerString += "/I:" TO BE SAFE WE MAKE ALL NUMERIC FIELDS FLOATS, EXCEPT if the default value is type int
+                    else: headerString += "/C:"
+                
+            if not doneHeader:
+                doneHeader=True
+                if args.outFile != sys.stdout: print(headerString[:-1],file=args.outFile)
+            if commentCount > 0:
+                print(commentCache,file=args.outFile)
+                commentCache = ''; commentCount = 0
+            print(rowString,file=args.outFile)
+            #also print the required explanations
+            for (field,expl) in explainInfo.items():
+                outString = "#%s: { " % field
+                doneFirst=False
+                for eField in args.explainInfo:
+                    if doneFirst: outString += " , "
+                    if not eField in expl.keys(): outString += " %s: <NONE .. value is default>"%eField
+                    else: outString += "%s: %s" % (eField,expl[eField])
+                    doneFirst=True
+                outString += " }"
+                print(outString,file=args.outFile)
+
+    #print the footer, which is the command to reproduce this output
+    import os
+    if args.outFile != sys.stdout:
+        print("#lsetup  \"asetup %s,%s\" pyAMI" % (os.environ.get('AtlasProject','UNKNOWN!'),os.environ.get('AtlasVersion','UNKNOWN!')))
+        print("#getAMIDatasetParameters.py --timestamp=\"%s\" --physicsGroups=\"%s\" --fields=\"%s\" --inDS=\"%s\"" % (args.timestamp,",".join(args.physicsGroups),",".join(args.fields),",".join(dataset_values.keys())),file=args.outFile )
+
+    args.outFile.close()
+
+
+if __name__ == "__main__":
+    import sys
+    sys.exit(main())
+
diff --git a/Tools/PyUtils/bin/getTagDiff.py b/Tools/PyUtils/bin/getTagDiff.py
deleted file mode 100755
index 319b480ae03..00000000000
--- a/Tools/PyUtils/bin/getTagDiff.py
+++ /dev/null
@@ -1,708 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-
-# @file:    getTagDiff.py
-# @purpose: Get the list of tag differences between 2 releases (CERN centric)
-# @author:  Sebastien Binet <binet@cern.ch>
-# @date:    September 2006
-#
-# @example:
-#
-# getTagDiff rel_3,12.0.X 12.0.3
-#
-
-__version__ = "$Revision: 1.4 $"
-
-from optparse import OptionParser
-
-import sys
-import os
-import commands
-
-import logging
-class GetTagDiff:
-    LoggerName = "GetTagDiff"
-    ReleaseRoot = "/afs/cern.ch/atlas/software/builds"
-    Projects = [
-#        "AtlasPoint1",
-        "AtlasProduction",
-        "AtlasOffline",
-        "AtlasAnalysis",
-        "AtlasTrigger",
-        "AtlasReconstruction",
-        "AtlasSimulation",
-        "AtlasEvent",
-        "AtlasConditions",
-        "DetCommon",
-        "AtlasCore"
-        ]
-    pass
-
-try:
-    import cPickle as pickle
-except ImportError:
-    import pickle
-    pass
-
-class CmtStrings:
-    CMTPATH        = 'CMTPATH'
-    CMTDIR         = 'cmt'
-    CMTVERSIONFILE = 'version.cmt'
-    CMTREQFILE     = 'requirements'
-    CMTPROJFILE    = 'project.cmt'
-    pass
-
-class CmtPkg:
-
-    def __init__( self,
-                  pkgName    = 'Example',
-                  pkgVersion = 'Example-00-00-00',
-                  pkgPath    = '' ):
-        self.name    = pkgName
-        self.version = pkgVersion
-        self.path    = pkgPath
-
-        return
-    def __repr__( self ):
-       s = [ "Package: %s" % self.name,
-             "Version: %s" % self.version,
-             "Path:    %s" % self.path ]
-       return os.linesep.join(s)
-    
-    pass
-
-def listCmtDirs( path ):
-
-    log = logging.getLogger( GetTagDiff.LoggerName )
-    
-    cmtDirs = []
-    
-    # fill list of CMT directories
-    try:
-        ls = os.listdir(path)
-    except OSError, what:
-        log.error( "OSError: %s" % what )
-        return cmtDirs
-
-    subDirs = []
-    for n in ls:
-        fullName = os.path.join( path, n )
-
-        # hack to optimize searching 
-        if fullName.count( "i686-" )        <= 0 and \
-           fullName.count( "o..pacman..o" ) <= 0 and \
-           fullName.count( "InstallArea"  ) <= 0 and \
-           os.path.splitext( fullName )[1] != '.so' :
-            isDir = os.path.isdir(fullName)
-        else:
-            isDir = False
-            pass
-        
-        isCmtDir =  os.path.split(fullName)[1] == CmtStrings.CMTDIR
-
-        if isDir :
-            if isCmtDir:
-                log.debug( "\t==> found %s" % fullName )
-                cmtDirs.append(fullName)
-                isDir = True
-                pass
-            else:
-                subDirs.append( fullName )
-
-        pass
-    
-    # now take care of the subdirs:
-    for subDir in subDirs:
-        subDirName = os.path.split(subDir)[1]
-        if subDirName.count( "i686-") <= 0 and \
-           subDirName.count( "o..pacman..o" ) <= 0 and \
-           subDirName != 'CVS' and \
-           os.path.splitext( subDirName )[1] != '.so' :
-            cmtDirs.extend( listCmtDirs( subDir ) )
-        pass
-    
-    return cmtDirs
-
-def createCmtPkg( cmtDir, printWrongPkgNames = True ):
-    """
-    the cmtDir is assumed to be of the form Xyz/cmt
-    One has also to handle the case with or without version-directory
-    """
-    log = logging.getLogger(GetTagDiff.LoggerName)
-    
-    pkgName = None
-    try:
-        # the CMTVERSIONFILE should provide the name of the package
-        # so we extract it from this file
-        fileName = os.path.join( cmtDir, CmtStrings.CMTVERSIONFILE )
-        if os.path.exists(fileName):
-            versionFile = open( fileName, "r" )
-            pkgName = versionFile.readlines()[0].strip()
-            # a valid pkgName is of the form PkgName-??-??-??[-??]
-            pkgName = pkgName.split("-")[0]
-            versionFile.close()
-            del versionFile
-        else:
-            # the CMTREQFILE should (also) provide the name of the package
-            # so we extract it from this file
-            fileName = os.path.join( cmtDir, CmtStrings.CMTREQFILE )
-            reqFile = open( fileName, "r" )
-            for line in reqFile.readlines():
-                line = line.strip()
-                if len(line) > 0  and \
-                       line[0] != "#" and \
-                       line.count("package ") > 0:
-                    pkgName = line.splitlines()[0]\
-                              .split("package ")[1]\
-                              .replace("\r","")\
-                              .split("#")[0]\
-                              .strip()
-                    break
-                pass
-            reqFile.close()
-            del reqFile
-    except IOError:
-        ## No CMTREQFILE in this directory
-        ## ==> not a CMT package then ?
-        ## check if there is any CMT project file instead
-        if not os.path.exists( os.path.join(cmtDir, CmtStrings.CMTPROJFILE) ):
-            log.warning( "[%s] does NOT contain any '%s' nor '%s' file !!" % \
-                         ( cmtDir,
-                           CmtStrings.CMTREQFILE,
-                           CmtStrings.CMTPROJFILE ) )
-        return None
-
-    if pkgName == None:
-        log.warning( "No 'package Foo' line in %s of %s" % \
-                     ( CmtStrings.CMTREQFILE, cmtDir ) )
-        return None
-    
-    log.debug( "\t\t==> Analysing [%s]" % cmtDir )
-    
-    # first we try the no-version-directory case as it is the ATLAS
-    # default now.
-    if CmtStrings.CMTVERSIONFILE in os.listdir(cmtDir):
-        version = open( os.path.join( cmtDir, CmtStrings.CMTVERSIONFILE ),
-                        'r' )\
-                        .readline()
-        version = version.splitlines()[0].strip()
-        pkgDir = os.path.split(cmtDir)[0].strip()
-        pkgPath = os.path.split(pkgDir)[0].strip()
-        pass
-
-    # Now we are in the case where:
-    # /somePath/MyPkg/MyPkg-00-00-00/cmt
-    # or
-    # /somePath/MyPkg/v1r2p3/cmt
-    else:
-        baseDir = os.path.split(cmtDir)[0].strip()
-        pkgDir, version = os.path.split(baseDir)
-        pkgPath = os.path.split(pkgDir)[0].strip()
-
-        pass
-
-    log.debug( "\t\t\t- name    = %s" % pkgName )
-    log.debug( "\t\t\t- version = %s" % version )
-    log.debug( "\t\t\t- path    = %s" % pkgPath )
-
-    if pkgName.count(os.sep) > 0 and printWrongPkgNames :
-       log.warning( "About to create a funny CMT package !" )
-       log.warning( "'PkgName' contains '%s'. Please fix it!" % os.sep )
-       log.warning( "\t- name    = %s" % pkgName )
-       log.warning( "\t- version = %s" % version )
-       log.warning( "\t- path    = %s" % pkgPath )
-       # Ok, so, I fix it - but user is warned...
-       pkgName = os.path.basename(pkgName)
-       pass
-    
-    #version = '*'
-    return CmtPkg( pkgName, version, pkgPath )
-
-def scan( scanDir = os.curdir,
-          printWrongPkgNames = True,
-          suppressList = ["WorkArea"] ):
-    """Search for CMT packages in the given directory and walk down the
-    directory tree.
-    Return the list of found CMT packages.
-    """
-    log = logging.getLogger( GetTagDiff.LoggerName )
-    log.debug( "Scanning [%s]" % scanDir )
-    
-    # return value
-    cmtPackages = []
-    
-    # retrieve all cmt-ised directories in the scan directory
-    scanDir = os.path.abspath( scanDir )
-
-    cmtDirs = []
-    try:
-        cmtDirs = listCmtDirs(scanDir)
-    except KeyboardInterrupt:
-        log.warning( "Scanning has been STOPPED ! (by you)" )
-        pass
-    
-    for cmtDir in cmtDirs:
-        cmtPkg = createCmtPkg(cmtDir, printWrongPkgNames)
-        if cmtPkg != None and \
-           cmtPkg.name not in suppressList:
-            cmtPackages.append( cmtPkg )
-        pass
-    
-    return cmtPackages
-
-
-def getRelease( releaseId = "12.0.3", releaseRoot = GetTagDiff.ReleaseRoot ):
-    # fetch logger object
-    log = logging.getLogger( GetTagDiff.LoggerName)
-
-    id = releaseId.split(",")
-    #print "## id [%s] length: %i" % (str(id), len(id))
-    import re
-    lcgmatch = re.search('lcg\d?', releaseId.lower())
-    migmatch = re.search('mig\d\d?', releaseId.lower())
-    if releaseId.count("rel_") > 0 :
-        if releaseId.lower().count("bugfix")  > 0 :
-            releaseRoot = os.path.join( releaseRoot, "nightlies", "bugfix" )
-        elif releaseId.lower().count("cos")   > 0 :
-            releaseRoot = os.path.join( releaseRoot, "nightlies", "cos" )
-        elif releaseId.lower().count("devmig0")>0 :
-            releaseRoot = os.path.join( releaseRoot, "nightlies", "devmig0" )
-        elif releaseId.lower().count("devval")> 0 :
-            releaseRoot = os.path.join( releaseRoot, "nightlies", "devval" )
-        elif releaseId.lower().count("dev")   > 0 :
-            releaseRoot = os.path.join( releaseRoot, "nightlies", "dev" )
-        elif lcgmatch:
-            releaseRoot = os.path.join( releaseRoot, "nightlies", lcgmatch.group(0) )
-        elif migmatch:
-            releaseRoot = os.path.join( releaseRoot, "nightlies", migmatch.group(0) )
-        elif releaseId.lower().count("lst")   > 0 :
-            releaseRoot = os.path.join( releaseRoot, "nightlies", "lst" )
-        elif releaseId.lower().count("trials")> 0 :
-            releaseRoot = os.path.join( releaseRoot, "nightlies", "trials" )
-        elif releaseId.lower().count("val")   > 0 :
-            releaseRoot = os.path.join( releaseRoot, "nightlies", "val" )
-        elif releaseId.lower().count("pcache")   > 0 :
-            releaseRoot = os.path.join( releaseRoot, "nightlies", "pcache" )
-        elif releaseId.count(".0.X")  > 0 :
-            releaseRoot = os.path.join( releaseRoot, "nightlies", "bugfix" )
-        elif releaseId.count(".X.0") > 0:
-            releaseRoot = os.path.join( releaseRoot, "nightlies", "dev" )
-        else:
-            log.warning( "Unknown release id [%s]" % releaseId )
-        for i in id:
-            if i.count("rel_") > 0:
-                releaseId = i
-                break
-    else:
-        if len(id) == 1:
-            releaseId = id[0]
-        else:
-            log.warning( "Wrong release id [%s]" % releaseId )
-        pass
-    log.debug( "## releaseRoot: %s" % releaseRoot )
-    log.debug( "## releaseId  : %s" % releaseId   )
-    
-    return releaseRoot,releaseId
-
-class Release(object):
-
-    def __init__(self, id, projs = GetTagDiff.Projects ):
-        object.__init__(self)
-        self.rawId  = self.__getRawId(id)
-        self.topDir, self.id = getRelease(id)
-        self.projs  = projs
-
-        self.printWrongPkgNames = False
-        
-        # fetch logger object
-        self.log = logging.getLogger(GetTagDiff.LoggerName)
-
-        self.cmtPkgs = None
-        self.cachedCmtPkgs = False
-        return
-
-    def __getRawId(self, id):
-        # 'normalize' the rawId : always 12.0.X-rel_? (not rel_?-12.0.X)
-        ids = id.split(",")
-        ids.sort()
-        id = ",".join( ids )
-        return id
-    
-    def setProjects(self, projs):
-        self.projs = projs
-        return
-
-    def getProjectDirs(self):
-        def getProjectVersion(projName, projVersion):
-            version = projVersion
-            if projVersion.count("rel_") <= 0:
-                if projName in GetTagDiff.Projects[2:]:
-                    version = projVersion[1:]
-                    pass
-                pass
-            return version
-        
-        return [
-            os.path.join( self.topDir,
-                          p,
-                          getProjectVersion(p, self.id)) for p in self.projs
-            ]
-
-    def getCmtDirs(self):
-        cmtDirs = []
-        for d in self.getProjectDirs():
-            self.log.info( "## scanning [%s]..." % \
-                           os.path.basename( os.path.dirname(d) ) )
-            cmtDirs += listCmtDirs(d)
-            pass
-        return cmtDirs
-
-    def buildCmtPkgsList(self):
-        self.log.info( "Building list of CMT pkgs [%s]..." % \
-                       self.rawId )
-        self.cachedCmtPkgs = False
-        self.cmtPkgs = {}
-        cmtPkgs = []
-        cmtDirs = self.getProjectDirs()
-        for cmtDir in cmtDirs:
-            self.log.info( "...Scanning [%s]..." % cmtDir )
-            cmtPkgs += scan(cmtDir, self.printWrongPkgNames)
-            pass
-        for cmtPkg in cmtPkgs:
-            # don't bother with container pkg
-            if self.isContainerPkg(cmtPkg):
-                continue
-            
-            if self.cmtPkgs.has_key(cmtPkg.name):
-                old = self.cmtPkgs[cmtPkg.name]
-                self.log.warning( "About to replace this pkg:" )
-                self.log.warning( " - name:    %s" % old.name )
-                self.log.warning( " - version: %s" % old.version )
-                self.log.warning( " - path:    %s" % old.path )
-                new = cmtPkg
-                self.log.warning( "with this pkg:" )
-                self.log.warning( " - name:    %s" % new.name )
-                self.log.warning( " - version: %s" % new.version )
-                self.log.warning( " - path:    %s" % new.path )
-                pass
-            self.cmtPkgs[cmtPkg.name] = cmtPkg
-        self.log.info( "Building list of CMT pkgs [%s]... [OK]" % \
-                       self.rawId )
-        self.saveCmtPkgs()
-        return
-
-    def getCmtPkgs(self):
-        if not self.cachedCmtPkgs:
-            self.buildCmtPkgsList()
-            pass
-        return self.cmtPkgs
-
-    def getPkgFullName(self, cmtPkg):
-        projId  = self.id
-        if projId.count("rel_") <= 0 :
-            projId = projId[1:]
-            pass
-        pkgName = cmtPkg.name
-        if cmtPkg.path.endswith(projId):
-            return pkgName
-        pkgFullName = cmtPkg.path.split(projId+os.sep)[1]
-        pkgFullName = os.path.join( pkgFullName, pkgName )
-        return pkgFullName
-    
-    def isContainerPkg(self, cmtPkg):
-        # this method assumes that a tag-name for a container pkg is of
-        # the form:
-        # ContainerName-AtlasProject-00-00-00[-00]
-        isContainer = False
-        version = cmtPkg.version
-        for p in self.projs:
-            if version.count( "-%s-" % p ) > 0:
-                isContainer = True
-                self.log.debug("Flagged Container pkg [%s] (%s)" %
-                               ( self.getPkgFullName(cmtPkg),
-                                 cmtPkg.path ) )
-                break
-        return isContainer
-    
-    def saveCmtPkgs(self, fileName = None):
-##         if fileName == None:
-##             fileName
-        fileName = os.path.join( os.path.expandvars( os.environ['HOME'] ),
-                                 ".pane",
-                                 "cmtTags-%s.db" % self.rawId.replace(",",
-                                                                      "-") )
-        if not os.path.exists( os.path.dirname(fileName) ):
-            os.makedirs( os.path.dirname(fileName) )
-            pass
-
-        self.log.debug( "Saving CMT pkgs list to [%s]..." % fileName )
-        self.cachedCmtPkgs = True
-        f = open( fileName, "wb" )
-        pickle.dump(self.getCmtPkgs(), f)
-        f.close()
-        self.log.info( "Saving CMT pkgs list to [%s]... [OK]" % fileName )
-        
-        return
-    
-    def loadCmtPkgs(self, fileName = None):
-        if fileName == None:
-            fileName = os.path.join( os.path.expandvars( os.environ['HOME'] ),
-                                     ".pane",
-                                     "cmtTags-%s.db" % \
-                                     self.rawId.replace(",", "-")
-                                     )
-        if not os.path.exists( fileName ):
-            self.log.error( "No such file [%s]" % fileName )
-            return -1
-        
-        # we don't want to read a tag-db which is deprecated
-        # hence we read the last modified time of SomeProject/cmt/project.cmt
-        projFile = os.path.join( self.getProjectDirs()[0],
-                                 "cmt",
-                                 "project.cmt" )
-        if os.path.getmtime( fileName ) <= \
-           os.path.getmtime( projFile ):
-            self.log.warning( "[%s] is older than [%s]" %
-                              ( fileName, projFile ) )
-            return -1
-        
-        self.log.info( "Loading CMT pkgs list from [%s]..." % fileName )
-
-        f = open( fileName, "r" )
-        self.cmtPkgs = pickle.load(f)
-        f.close()
-        
-        return len(self.cmtPkgs.keys())
-
-    pass # Release
-
-class ReleaseCmp(object):
-
-    def __init__(self, ref, rel, rescan = False):
-        self.ref    = ref
-        self.rel    = rel
-        self.rescan = rescan
-        self.log    = logging.getLogger(GetTagDiff.LoggerName)
-        self.fmt    = "%-45s : %-25s -> %-25s"
-        return
-
-    def cmp(self, ref = None, rel = None):
-        if ref == None:
-            ref = self.ref
-            pass
-        if rel == None:
-            rel = self.rel
-            pass
-
-        self.log.info( "Comparing [%s VS %s]" % (ref.rawId, rel.rawId) )
-
-        if self.rescan:
-            self.log.info( "Rescan has been requested: doing it..." )
-            ref.buildCmtPkgsList()
-            rel.buildCmtPkgsList()
-            pass
-        
-        if ref.loadCmtPkgs() <= 0:
-            self.log.warning( "==> will rebuild cache file..." )
-            ref.buildCmtPkgsList()
-        if rel.loadCmtPkgs() <= 0:
-            self.log.warning( "==> will rebuild cache file..." )
-            rel.buildCmtPkgsList()
-
-        diff = {}
-
-        # first compare the list of packages registered in the reference
-        for pkgName in ref.cmtPkgs.keys():
-
-            # check if it is a container package
-            if ref.isContainerPkg(ref.cmtPkgs[pkgName]):
-                continue
-
-            pkgFullName = ref.getPkgFullName(ref.cmtPkgs[pkgName])
-            
-            if not rel.cmtPkgs.has_key(pkgName):
-                diff[pkgFullName] = { 'ref' : ref.cmtPkgs[pkgName],
-                                      'rel' : CmtPkg( "None",
-                                                      "None-00-00-00",
-                                                      "-" ) }
-                pass
-            else:
-                refVersion = ref.cmtPkgs[pkgName].version
-                version    = rel.cmtPkgs[pkgName].version
-                if refVersion != version:
-                    diff[pkgFullName] = { 'ref' : ref.cmtPkgs[pkgName],
-                                          'rel' : rel.cmtPkgs[pkgName] }
-                pass
-            pass
-
-        # then compare the list of packages registered in the release
-        for pkgName in rel.cmtPkgs.keys():
-
-            # check if it is a container package
-            if rel.isContainerPkg(rel.cmtPkgs[pkgName]):
-                continue
-
-            pkgFullName = rel.getPkgFullName(rel.cmtPkgs[pkgName])
-
-            if not ref.cmtPkgs.has_key(pkgName):
-                diff[pkgFullName] = { 'ref' : CmtPkg( "None",
-                                                      "None-00-00-00",
-                                                      "-" ),
-                                      'rel' : rel.cmtPkgs[pkgName] }
-                pass
-            else:
-                refVersion = ref.cmtPkgs[pkgName].version
-                version    = rel.cmtPkgs[pkgName].version
-                if refVersion != version:
-                    diff[pkgFullName] = { 'ref' : ref.cmtPkgs[pkgName],
-                                          'rel' : rel.cmtPkgs[pkgName] }
-                pass
-            pass
-
-        self.diff = diff
-        self.saveReport()
-        return diff
-
-    def printReport(self):
-        self.log.info( "#" * 80 )
-        self.log.info( "Found [%i] tags which are different !" %
-                       len(self.diff.keys()) )
-        self.log.info( self.fmt % ( "          PKGNAME",
-                                    " PKG-REF",
-                                    " PKG-REL" ) )
-        self.log.info( "-" * 80 )
-        for k in self.diff.keys():
-            pkgFullName = k
-            pkgName = os.path.basename(pkgFullName)
-            self.log.info( self.fmt %
-                           ( pkgFullName,
-                             self.diff[k]['ref'].version.replace(pkgName,""),
-                             self.diff[k]['rel'].version.replace(pkgName,"") )
-                           )
-            pass
-        self.log.info( "-" * 80 )
-        return
-
-    def saveReport(self, fileName=None):
-        refId = self.ref.rawId.replace(",","-")
-        relId = self.rel.rawId.replace(",","-")
-        if fileName == None:
-            fileName = os.path.join( os.getcwd(),
-                                     "tag-diff-%s-vs-%s.out" % \
-                                     ( refId, relId ) )
-            pass
-        out = open( fileName, "w" )
-        lines = []
-        lines += [
-            "-" * 80,
-            "## Found [%i] tags which are different !" % len(self.diff.keys()),
-            self.fmt % ( "##       PKGNAME",
-                         " PKG-REF (%s)" % refId,
-                         " PKG-REL (%s)" % relId ),
-            "-" * 80
-            ]
-        for k in self.diff.keys():
-            pkgName = k
-            lines += [
-                self.fmt %
-                ( pkgName,
-                  self.diff[k]['ref'].version.replace(pkgName,""),
-                  self.diff[k]['rel'].version.replace(pkgName,"") )
-                ]
-            pass
-        lines += [ "-" * 80 ]
-        for line in lines:
-            out.writelines( line + os.linesep )
-            pass
-        out.close()
-        return
-        
-def _installLogger( lvl        = "INFO",
-                    loggerName = GetTagDiff.LoggerName ):
-    # define a Handler which writes DEBUG messages or higher to the sys.stderr
-    logger = logging.StreamHandler()
-    logger.setLevel(logging.DEBUG)
-    # set a format which is simpler for console use
-    formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s')
-    # tell the handler to use this format
-    logger.setFormatter(formatter)
-    # add the handler to the root logger
-    logging.getLogger('').addHandler(logger)
-
-    log = logging.getLogger(loggerName)
-    exec ( """
-try:
- log.setLevel( logging.%s )
-except AttributeError,err:
- log.warning( 'Invalid logging.Level [%s]' )
- log.warning( 'setting to [INFO]' )
- log.setLevel( logging.INFO )
-""" % (lvl, lvl) )
-    return
-
-if __name__ == "__main__":
-
-    parser = OptionParser( usage = "usage: %prog [-R|--ref] 12.0.X,rel_3 [-r|--rel] 12.0.3")
-    parser.add_option( "-R",
-                       "--ref",
-                       dest = "ref",
-                       help = "The description string of the reference release (eg: 12.0.X,rel_3)" )
-
-    parser.add_option( "-r",
-                       "--rel",
-                       dest = "rel",
-                       help = "The description string of the to-be-compared release (eg: 12.0.X,rel_3 or 12.0.3)" )
-
-    
-    parser.add_option( "-f",
-                       "--force-rescan",
-                       action  = "store_true",
-                       dest    = "rescan",
-                       default = False,
-                       help    = "Switch to rescan the whole release or look for tags from a previous db-file" )
-
-    
-    parser.add_option( "-l",
-                       "--loglevel",
-                       type    = "string",
-                       dest    = "logLevel",
-                       default = "INFO",
-#                       choices = [ "DEBUG", "INFO", "WARNING", "ERROR" ],
-                       help    = "Logging message level [DEBUG, INFO, WARNING, ERROR]"
-                       )
-
-    (options, args) = parser.parse_args()
-
-    if len(args) > 0 and args[0][0] != "-":
-        options.ref = args[0]
-        pass
-
-    if len(args) > 1 and args[1][0] != "-":
-        options.rel = args[1]
-        pass
-
-    if options.ref == None or options.rel == None:
-        parser.print_help()
-        sys.exit(1)
-        pass
-    
-    ## install the user loglevel
-    _installLogger( loggerName = GetTagDiff.LoggerName,
-                    lvl        = options.logLevel )
-    log = logging.getLogger( GetTagDiff.LoggerName )
-
-    log.info( "#" * 80 )
-    log.info( "## Welcome to getTagDiff" )
-    log.info( "#" * 80 )
-    log.info( "##" )
-
-    relRef = Release(options.ref)
-    rel    = Release(options.rel)
-    log.info( "## Comparing [%s] VS [%s]" % ( relRef.rawId, rel.rawId ) )
-
-    relCmp = ReleaseCmp( relRef, rel, options.rescan )
-    relCmp.cmp()
-    relCmp.printReport()
-    
-    log.info( "...Bye..." )
diff --git a/Tools/PyUtils/bin/lstags b/Tools/PyUtils/bin/lstags
index f34bcd54f4d..30831098654 100755
--- a/Tools/PyUtils/bin/lstags
+++ b/Tools/PyUtils/bin/lstags
@@ -6,14 +6,75 @@
 #           Inspired by BaBar's 'statusrel' command.
 # @author:  Frank Winklmeier
 #
-# $Id: $ 
 
-__version__ = "$Revision$"
-__author__  = "Frank Winklmeier"
-
-import sys
-import os
+import sys, os
 from PyCmt import Cmt
+from PyUtils.WorkAreaLib import get_latest_pkg_tag
+from PyUtils.Decorators import memoize
+
+class Package:
+   def __init__(self, name, version = None, path = ''):
+      self.name = name
+      self.version = version
+      self.path = path
+
+
+@memoize
+def read_pkg_file(filename):
+   """Read a packages.txt file and cache the content if already read"""
+   try:
+      lines = open(filename).readlines()
+   except:
+      lines = []
+   pkgs = {}
+   for l in lines:
+      if l.startswith('#'): continue
+      name,tag = l.split()
+      pkgs[name] = tag
+   return pkgs
+   
+
+def cmake_get_pkg_version(fullPkgName):
+   """Get version of package in the release"""
+
+   cpath = os.environ.get('CMAKE_PREFIX_PATH','')
+   for cdir in cpath.split(os.pathsep):
+      pkgs = read_pkg_file(os.path.join(cdir,'packages.txt'))
+      if fullPkgName in pkgs:
+         return pkgs[fullPkgName]
+
+   return None
+   
+def get_pkg_version(fullPkgName, cmt=None):
+   if cmt==None:
+      v = cmake_get_pkg_version(fullPkgName)
+   else:
+      v = cmt.get_pkg_version(fullPkgName)
+   return v if v!=None else "NOT_FOUND"
+
+def cmt_get_packages(testArea, pkgSupList):
+   """Get locally checked out packages"""
+
+   from PyUtils.WorkAreaLib import scan
+   cmtPackages = scan( testArea, pkgSupList )
+   pkgs = [Package(p.name,p.version,p.path) for p in cmtPackages]
+   return pkgs
+
+def cmake_get_packages(testArea, pkgSupList):
+   """Get locally checked out packages"""
+
+   from PyUtils.WorkAreaLib import listCMakeDirs
+   pkgs = []
+   for p in listCMakeDirs(testArea):
+      name = os.path.basename(p)
+      path = os.path.dirname(p)
+      if name not in pkgSupList:
+         try:
+            version = open(os.path.join(p,'version.cmake')).read().strip()
+         except:
+            version = 'UNKNOWN'
+         pkgs.append(Package(name,version,path))
+   return pkgs
 
 def main():
 
@@ -29,18 +90,18 @@ def main():
    parser.add_option("-r", "--recent", action="store_true", dest="recent",
                      help="show most recent tag in SVN")
    parser.add_option("-q", "--quiet", action="store_true", dest="quiet",
-                     help="only print package name (e.g. as input to pkgco)")
+                     help="only print package name (e.g. as input to pkgco/svnco)")
    parser.add_option("-f", "--fullName", action="store_true", dest="fullName",
                      help="print full package name including all containers")
-   parser.add_option("-c", "--cmtCmd", action="store_true", dest="cmtCmd",
-                     help="display as 'cmt co' command")
+   parser.add_option("-c", "--coCmd", action="store_true", dest="coCmd",
+                     help="display checkout command")
    parser.add_option("-n", "--nosupression", action="store_true", dest="nosup",
                      help="do not use default package supression list")
    
    (opt, args) = parser.parse_args()
 
    # Some consistency checks
-   if (opt.cmtCmd):
+   if (opt.coCmd):
       opt.fullName = True
       opt.quiet = True
       opt.recent = False
@@ -48,24 +109,27 @@ def main():
    if (opt.nosup): pkgSupList = []
    else: pkgSupList = ["WorkArea"]
 
+   if 'CMAKE_PREFIX_PATH' in os.environ:
+      cmt = None
+   else:
+      cmt = Cmt.CmtWrapper()
+
    testArea = os.environ.get("TestArea")
    if testArea==None:
       log.fatal("TestArea environment variable not set. Setup your test release first.")
       return 1
    
-   from PyUtils.WorkAreaLib import scan
-   cmtPackages = []
-   cmtPackages.extend( scan( testArea, pkgSupList ) )
-   
-   cmt = Cmt.CmtWrapper()
+   if cmt==None:
+      packages = cmake_get_packages(testArea, pkgSupList)
+   else:
+      packages = cmt_get_packages(testArea, pkgSupList)
    
-   for pkg in cmtPackages:
+   for pkg in packages:
       pkgContainer = pkg.path.replace(testArea,"").lstrip("/")
       fullPkgName = pkgContainer + "/" + pkg.name
 
       if (opt.differ or not opt.quiet):    # Need to know the release version of pkg
-         testversion = cmt.get_pkg_version(fullPkgName)
-         if testversion==None: testversion = "NOT_FOUND"
+         testversion = get_pkg_version(fullPkgName,cmt)
       
       if (not opt.differ or testversion!=pkg.version):
          prefix = ""
@@ -78,12 +142,15 @@ def main():
             msg += "  (%s uses %s)" % (release,testversion)
             
          if (opt.recent):
-            headversion = cmt.get_latest_pkg_tag(fullPkgName)
+            headversion = get_latest_pkg_tag(fullPkgName)   # this is just using SVN, not CMT
             if headversion==None: headversion="NONE"
             msg += "  (most recent %s)" % (headversion)
 
-         if (opt.cmtCmd):
-            msg = "cmt co -r %s %s" % (pkg.version,fullPkgName)
+         if (opt.coCmd):
+            if cmt==None:
+               msg = "svnco.py %s" % pkg.version
+            else:
+               msg = "cmt co -r %s %s" % (pkg.version,fullPkgName)
             
          print msg
 
diff --git a/Tools/PyUtils/cmt/requirements b/Tools/PyUtils/cmt/requirements
old mode 100755
new mode 100644
index 349bcb12ace..a85fa662372
--- a/Tools/PyUtils/cmt/requirements
+++ b/Tools/PyUtils/cmt/requirements
@@ -68,6 +68,7 @@ apply_pattern declare_scripts files="\
  dump-athfile.py \
  dumpAthfilelite.py \
   filter-and-merge-d3pd.py \
+ getAMIDatasetParameters.py \
  gen-typereg-dso.py \
  gen_klass.py \
  get-tag-diff.py \
diff --git a/Tools/PyUtils/doc/mainpage.h b/Tools/PyUtils/doc/mainpage.h
old mode 100755
new mode 100644
diff --git a/Tools/PyUtils/python/AthFile/impl.py b/Tools/PyUtils/python/AthFile/impl.py
index da650f577c4..115139ed00e 100644
--- a/Tools/PyUtils/python/AthFile/impl.py
+++ b/Tools/PyUtils/python/AthFile/impl.py
@@ -7,7 +7,7 @@
 
 from __future__ import with_statement
 
-__version__ = "$Revision: 662725 $"
+__version__ = "$Revision: 723532 $"
 __author__  = "Sebastien Binet"
 __doc__ = "implementation of AthFile-server behind a set of proxies to isolate environments"
 
@@ -29,6 +29,9 @@ except:
     pass
 
 ### globals -------------------------------------------------------------------
+DEFAULT_AF_RUN = os.environ.get('DEFAULT_AF_RUN', False)
+'''Revert to old file peeking via Athena sub-process if True.'''
+
 DEFAULT_AF_CACHE_FNAME = os.environ.get('DEFAULT_AF_CACHE_FNAME',
                                         'athfile-cache.ascii.gz')
 
@@ -113,6 +116,7 @@ def _create_file_infos():
         'beam_energy': [],
         'beam_type':   [],
         'stream_tags': [],
+        'mc_channel_number': [],
         'metadata_items': None,
         'eventdata_items': None,
         'stream_names': None,
@@ -270,6 +274,13 @@ class AthFile (object):
     # backward compatibility
     run_numbers = run_number
     
+    @property
+    def mc_channel_number (self):
+        """return the list of unique mc_channel-numbers the @c AthFile contains"""
+        return list(set(self.infos['mc_channel_number']))
+    # ATEAM-168: requested for derivations
+    mc_channel_numbers = mc_channel_number
+    
     @property
     def evt_number (self):
         """return the list of unique evt-numbers the @c AthFile contains"""
@@ -395,7 +406,7 @@ class AthFileServer(object):
         # speed-up by tampering LD_LIBRARY_PATH to not load reflex-dicts
         import re, os
         restrictedProjects = ['AtlasCore']
-        if(os.environ.get("AtlasProject",None)=="AthAnalysisBase"): restrictedProjects=[] #special case for athanalysisbase
+        if "AthAnalysisBase" in os.environ.get("CMTEXTRATAGS","") or "AthSimulationBase" in os.environ.get("CMTEXTRATAGS",""): restrictedProjects=[] #special cases
         with H.restricted_ldenviron(projects=restrictedProjects):
             with H.ShutUp(filters=[
                 re.compile(
@@ -1027,7 +1038,7 @@ class FilePeeker(object):
         import PyUtils.Helpers as H
         restrictedProjects = ['AtlasCore']
         import os
-        if(os.environ.get("AtlasProject",None)=="AthAnalysisBase"): restrictedProjects=[] #special case for athanalysisbase
+        if "AthAnalysisBase" in os.environ.get("CMTEXTRATAGS","") or "AthSimulationBase" in os.environ.get("CMTEXTRATAGS",""): restrictedProjects=[] #special cases
         with H.restricted_ldenviron(projects=restrictedProjects):
             root = self.pyroot
             import re
@@ -1068,7 +1079,7 @@ class FilePeeker(object):
         import PyUtils.Helpers as H
         restrictedProjects = ['AtlasCore']
         import os
-        if(os.environ.get("AtlasProject",None)=="AthAnalysisBase"): restrictedProjects=[] #special case for athanalysisbase
+        if "AthAnalysisBase" in os.environ.get("CMTEXTRATAGS","") or "AthSimulationBase" in os.environ.get("CMTEXTRATAGS",""): restrictedProjects=[] #special cases
         with H.restricted_ldenviron(projects=restrictedProjects):
             root = self.pyroot
             do_close = True
@@ -1135,7 +1146,7 @@ class FilePeeker(object):
         import PyUtils.Helpers as H
         restrictedProjects = ['AtlasCore']
         import os
-        if(os.environ.get("AtlasProject",None)=="AthAnalysisBase"): restrictedProjects=[] #special case for athanalysisbase
+        if "AthAnalysisBase" in os.environ.get("CMTEXTRATAGS","") or "AthSimulationBase" in os.environ.get("CMTEXTRATAGS",""): restrictedProjects=[] #special cases
         with H.restricted_ldenviron(projects=restrictedProjects):
             root = self.pyroot
             do_close = True
@@ -1158,7 +1169,7 @@ class FilePeeker(object):
      
     def _process_call(self, fname, evtmax, projects=['AtlasCore']):
         import os
-        if(os.environ.get("AtlasProject",None)=="AthAnalysisBase"): projects=[] #special case for athanalysisbase
+        if "AthAnalysisBase" in os.environ.get("CMTEXTRATAGS","") or "AthSimulationBase" in os.environ.get("CMTEXTRATAGS",""): projects=[] #special cases
         msg = self.msg()
         import PyUtils.Helpers as H
         f = _create_file_infos()
@@ -1185,9 +1196,11 @@ class FilePeeker(object):
                 # FIXME: best would be to do that in athfile_peeker.py but
                 #        athena.py closes sys.stdin when in batch, which confuses
                 #        PyCmt.Cmt:subprocess.getstatusoutput
-                cmd = ['pool_insertFileToCatalog.py',
-                       file_name,]
-                subprocess.call(cmd, env=self._sub_env)
+                #
+                # ATEAM-192: avoid the PoolFileCatalog.xml conflict
+                #cmd = ['pool_insertFileToCatalog.py',
+                #       file_name,]
+                #subprocess.call(cmd, env=self._sub_env)
                 #
                 #with H.restricted_ldenviron(projects=None):
                 # MN: disabled clean environ to let ROOT6 find headers
@@ -1208,8 +1221,8 @@ class FilePeeker(object):
                         os.close(fd_pkl)
                         if os.path.exists(out_pkl_fname):
                             os.remove(out_pkl_fname)
-                        print "\n  ---------   runnign Athena peeker"
-                        print  os.environ['CMTPATH']
+                        print "\n  ---------   running Athena peeker"
+                        print  os.environ.get('CMTPATH','')
 
                         import AthenaCommon.ChapPy as api
                         app = api.AthenaApp(cmdlineargs=["--nprocs=0"])
@@ -1254,7 +1267,17 @@ class FilePeeker(object):
                         print >> stdout,self._sub_env
                         print >> stdout,"="*80
                         stdout.flush()
-                        sc = app.run(stdout=stdout, env=self._sub_env)
+                        if DEFAULT_AF_RUN:
+                            sc = app.run(stdout=stdout, env=self._sub_env)
+                        else:
+                            import PyUtils.FilePeekerTool as fpt
+                            fp = fpt.FilePeekerTool(f_root)
+                            sc, fp_pkl_fname = fp.run()
+                            # revert to athena sub-process in case of file with old schema
+                            if sc == 0:
+                                out_pkl_fname = fp_pkl_fname
+                            else:
+                                sc = app.run(stdout=stdout, env=self._sub_env)
                         stdout.flush()
                         stdout.close()
                         import AthenaCommon.ExitCodes as ath_codes
diff --git a/Tools/PyUtils/python/AthFileLite.py b/Tools/PyUtils/python/AthFileLite.py
old mode 100755
new mode 100644
diff --git a/Tools/PyUtils/python/Cmt.py b/Tools/PyUtils/python/Cmt.py
old mode 100755
new mode 100644
diff --git a/Tools/PyUtils/python/Dso.py b/Tools/PyUtils/python/Dso.py
old mode 100755
new mode 100644
index b2df258243a..6b424e322fd
--- a/Tools/PyUtils/python/Dso.py
+++ b/Tools/PyUtils/python/Dso.py
@@ -275,7 +275,7 @@ def load_typeregistry_dso(iname=None):
         iname = _p.path(_dflt_typereg_fname)
         if not iname.exists():
             import os
-            projects = os.environ['CMTPATH'].split(os.pathsep)[:2]
+            projects = os.environ.get('CMTPATH','').split(os.pathsep)[:2]
             for project_root in projects:
                 n = _p.path(project_root)/"InstallArea"/"share"/iname
                 if n.exists():
diff --git a/Tools/PyUtils/python/FilePeekerTool.py b/Tools/PyUtils/python/FilePeekerTool.py
index 728e84b7370..ae8f1339b48 100644
--- a/Tools/PyUtils/python/FilePeekerTool.py
+++ b/Tools/PyUtils/python/FilePeekerTool.py
@@ -5,7 +5,7 @@
 # @author Alexandre Vaniachine <vaniachine@anl.gov>
 # @date May 2015
 
-__version__= "$Revision: 667927 $"
+__version__= "$Revision: 734431 $"
 __author__ = "Alexandre Vaniachine <vaniachine@anl.gov>"
 __doc__ = "peek into APR files to read in-file metadata"
 
@@ -60,28 +60,40 @@ class FilePeekerTool():
         from AthenaPython.FilePeekerLib import toiter
 
         from PyCool import coral
-        try:
-            getattr(coral.Attribute, 'data<std::basic_string<char> >')
-            #MN: use coral.Attribute.data<std::basic_string<char> >() if defined  (ROOT6)
-            def attr_str_data(attr):
-                return getattr(attr, 'data<std::basic_string<char> >') ()
-            # if not defined, use the old one (ROOT5) 
-        except AttributeError:
-            def attr_str_data(attr):
-                return getattr(attr, 'data<std::string>') ()
+
+        attribute_methods = dir(coral.Attribute)
+        methnames = ['data<std::__cxx11::basic_string<char> >',
+                     'data<std::basic_string<char> >',
+                     'data<std::string>']
+        for m in methnames:
+            if m in attribute_methods:
+                attribute_getdata = m
+                break
+        else:
+            raise Exception("Can't find data method in Attribute")
+        def attr_str_data(attr):
+            return getattr(attr, attribute_getdata) ()
 
         nb = meta.GetEntry( 0 )
 
         esiName= 'Stream'
+        esiTypeName = 'EventStreamInfo'
         for l in meta.GetListOfLeaves():
-            if l.GetTypeName() == 'EventStreamInfo_p3':
+            if l.GetTypeName().startswith(esiTypeName):
+                esiTypeName = l.GetTypeName()
                 esiName = l.GetName()
                 break
 
+        if esiTypeName != 'EventStreamInfo_p3':
+            print >> stdout, "old schema is not supported:", esiTypeName
+            return {}
+
         import cppyy
 
         esic = cppyy.gbl.EventStreamInfoPTCnv_p3()
         esi = getattr (meta, esiName)
+        if esiName.startswith(esiTypeName):
+            esiName = esiName[len(esiTypeName)+1:]
 
         peeked_data = {}
 
@@ -135,7 +147,7 @@ class FilePeekerTool():
             #ddt = _get_detdescr_tags(et)
             #peeked_data['det_descr_tags'] = ddt
 
-            print >> stdout,  'mc_channel_number', et.m_mc_channel_number
+            peeked_data['mc_channel_number'] = [et.m_mc_channel_number]
             peeked_data['evt_number'] = [et.m_mc_event_number]
             #print >> stdout,  'mc_event_number', et.m_mc_event_number
             print >> stdout,  'mc_event_weights.size:', et.m_mc_event_weights.size()
@@ -420,7 +432,8 @@ class FilePeekerTool():
         db['fileinfos'] = self.peeked_data
         db.close()
 
-        if os.path.exists(oname):
+        if os.path.exists(oname) and len(self.peeked_data) > 0:
             return 0, out_pkl_fname
         else:
+            os.remove(oname)
             return 1, out_pkl_fname
diff --git a/Tools/PyUtils/python/Helpers.py b/Tools/PyUtils/python/Helpers.py
old mode 100755
new mode 100644
index af5fa6d716f..89f108cae6b
--- a/Tools/PyUtils/python/Helpers.py
+++ b/Tools/PyUtils/python/Helpers.py
@@ -72,15 +72,16 @@ def ROOT6Setup():
              source, line, f, t = traceback.extract_stack( sys._getframe(1) )[-1]
              log.warning( 'PyCintex imported (replace with import cppyy) from: %s:%d'%(source,line) )
           m = oldimporthook(name, globals, locals, fromlist, level)
-          if m and m.__name__== 'ROOT':
+          if m and (m.__name__== 'ROOT' or name[0:4]=='ROOT'):
              log.debug('Python import module=%s  fromlist=%s'%(name, str(fromlist)))
              if fromlist:
-                vars = [ '.'.join([name, fl, autoload_var_name]) for fl in fromlist]
+                #MN: in this case 'm' is the final nested module already, don't walk the full 'name'
+                vars = [ '.'.join(['', fl, autoload_var_name]) for fl in fromlist]
              else:
                 vars = [ '.'.join([name, autoload_var_name]) ]
              for v in vars:
-                mm = m
                 try:
+                   mm = m
                    #MN: walk the module chain and try to touch 'autoload_var_name' to trigger ROOT autoloading of namespaces
                    for comp in v.split('.')[1:]:
                       mm = getattr(mm, comp)
@@ -198,6 +199,7 @@ def restricted_ldenviron(projects=None, msg=None):
     a context helper to limit ROOT automatic loading of dictionaries
     to a given set of cmt-projects (LCGCMT, AtlasCore, ...)
     """
+
     if projects is None:
         # nothing to do.
         # execute user stuff
@@ -205,6 +207,12 @@ def restricted_ldenviron(projects=None, msg=None):
         # end of story
         return
 
+    # Bypass the rest of the function in case CMake is used and not CMT
+    import os
+    if os.environ.get( 'CMTPATH', '' ) == '':
+       yield
+       return
+
     if isinstance(projects, str):
         projects = [p.strip() for p in projects.split() if p.strip() != '']
     if not isinstance(projects, (list, tuple)):
diff --git a/Tools/PyUtils/python/PoolFile.py b/Tools/PyUtils/python/PoolFile.py
old mode 100755
new mode 100644
diff --git a/Tools/PyUtils/python/RootUtils.py b/Tools/PyUtils/python/RootUtils.py
index cf8f5a5a54d..a4c27788737 100644
--- a/Tools/PyUtils/python/RootUtils.py
+++ b/Tools/PyUtils/python/RootUtils.py
@@ -8,7 +8,7 @@
 from __future__ import with_statement
 
 __doc__ = "a few utils to ease the day-to-day work with ROOT"
-__version__ = "$Revision: 678771 $"
+__version__ = "$Revision: 739816 $"
 __author__ = "Sebastien Binet"
 
 __all__ = [
@@ -39,8 +39,14 @@ def import_root(batch=True):
     if batch:
         ROOT.PyConfig.IgnoreCommandLineOptions = True
     import cppyy
+    if os.environ.get('GLIBCXX_USE_CXX11_ABI') == '0':
+        cmd = ROOT.gSystem.GetMakeSharedLib()
+        if cmd.find('GLIBCXX_USE_CXX11_ABI') < 0:
+            cmd = cmd.replace ('$SourceFiles', '$SourceFiles -D_GLIBCXX_USE_CXX11_ABI=0 ')
+            ROOT.gSystem.SetMakeSharedLib(cmd)
     return ROOT
 
+_tempfiles = []
 def root_compile(src=None, fname=None, batch=True):
     """a helper method to compile a set of C++ statements (via ``src``) or
     a C++ file (via ``fname``) via ACLiC
@@ -71,6 +77,17 @@ def root_compile(src=None, fname=None, batch=True):
         src_file.flush()
         src_file.seek(0)
         fname = src_file.name
+
+        # Apparently, cling caches files by inode.
+        # If you ask it to read a file that has the same inode as one
+        # that it has already read, then it will just use the cached
+        # contents rather than rereading.  This, however, doesn't play
+        # very well if we're reading temp files, where inodes may be reused,
+        # giving rise to hard-to-reproduce failures.
+        #
+        # Try to avoid this by keeping the temp files open until the
+        # the program exits.
+        _tempfiles.append (src_file)
         pass
 
     elif fname:
@@ -318,8 +335,8 @@ def _test_main():
     no_raise("problem pythonizing TFile", fct=_pythonize_tfile)
     no_raise("problem compiling dummy one-liner",
              root_compile, "void foo1() { return ; }")
-#    no_raise("problem compiling dummy one-liner w/ kwds",
-#             fct=root_compile, src="void foo1() { return ; }")
+    no_raise("problem compiling dummy one-liner w/ kwds",
+             fct=root_compile, src="void foo1a() { return ; }")
     import tempfile
     # PvG workaround for ROOT-7059
     dummy = tempfile.NamedTemporaryFile(prefix="foo_",suffix=".cxx")
diff --git a/Tools/PyUtils/python/WorkAreaLib.py b/Tools/PyUtils/python/WorkAreaLib.py
index 42928a734a9..12d281abf68 100644
--- a/Tools/PyUtils/python/WorkAreaLib.py
+++ b/Tools/PyUtils/python/WorkAreaLib.py
@@ -5,6 +5,8 @@
 # @author Sebastien Binet
 
 import os, sys
+import subprocess
+from string import rstrip
 from PyCmt.Logging import logging
 from PyCmt.Cmt import CmtPkg, CmtStrings
 
@@ -43,19 +45,63 @@ def listCmtDirs( path ):
     cmtDirs = []
     
     # fill list of CMT directories
-    import os
-    import os.path as osp
     for root, dirs, files in os.walk(path):
         for d in dirs[:]:
             if _is_in_ignore_dir_list(d):
                 dirs.remove(d)
         for d in dirs:
             if d == CmtStrings.CMTDIR:
-                full_name = osp.join(root, d)
+                full_name = os.path.join(root, d)
                 msg.debug("\t==> found %s" % full_name)
                 cmtDirs.append(full_name)
     return cmtDirs
 
+def listCMakeDirs( path ):
+    """Return the list of paths pointing at 'CMake' directories, accessible
+    from the `path` path.
+    """
+
+    msg = logging.getLogger( "WorkAreaMgr" )    
+    cmakeDirs = []
+    
+    # fill list of CMake directories
+    for root, dirs, files in os.walk(path):
+        if _is_in_ignore_dir_list(root): continue
+        if 'version.cmake' in files:
+            msg.debug("\t==> found %s" % root)
+            cmakeDirs.append(root)
+    return cmakeDirs
+
+
+def get_latest_pkg_tag(fullPkgName):
+    """Return the most recent SVN tag of the package.
+
+    Return: Tag or None on error
+    """
+
+    msg = logging.getLogger( "WorkAreaMgr" )
+    svnroot = os.environ.get("SVNROOT")
+    if svnroot==None:
+        msg.error("SVNROOT is not set.")
+        return None
+
+    _cmd = "svn ls %s" % os.path.join(svnroot, fullPkgName, "tags")
+    if fullPkgName.startswith('Gaudi'):
+        _cmd = "svn ls %s" % os.path.join(svnroot, 'tags', fullPkgName)
+    msg.debug('running [%s]...', _cmd)        
+    p = subprocess.Popen(_cmd, shell = True,
+                         stdout = subprocess.PIPE, stderr = subprocess.PIPE)
+    tags = p.communicate()[0].splitlines()
+    if len(tags)==0 or p.returncode!=0: return None
+
+    pkg_name = os.path.basename(fullPkgName)
+
+    # enforce atlas convention of tags (pkgname-xx-yy-zz-aa)
+    tags = [t for t in tags if t.startswith(pkg_name)]
+    latest_tag = rstrip(tags[-1],"/\n ")
+    return latest_tag
+
+
 def scan( scanDir = os.curdir, suppressList = ["WorkArea"] ):
     """Search for CMT packages in the given directory and walk down the
     directory tree.
@@ -251,7 +297,9 @@ def _translate_runtimepkg_name(n):
         'hlt': 'AtlasHLT',
         'manacore': 'ManaCore',
         'detcommon': 'DetCommon',
-        'AthAnalysisBase': 'AthAnalysisBase'
+        'AthAnalysisBase': 'AthAnalysisBase',
+        'AthAnalysisSUSY': 'AthAnalysisSUSY',
+        'AthSimulationBase': 'AthSimulationBase'
         }
     if n in db:
         return db[n]
diff --git a/Tools/PyUtils/python/__init__.py b/Tools/PyUtils/python/__init__.py
old mode 100755
new mode 100644
diff --git a/Tools/PyUtils/python/scripts/__init__.py b/Tools/PyUtils/python/scripts/__init__.py
index d1729d5cf2a..b71a3ab330a 100644
--- a/Tools/PyUtils/python/scripts/__init__.py
+++ b/Tools/PyUtils/python/scripts/__init__.py
@@ -18,6 +18,7 @@ acmdlib.register('tc.find-tag', 'PyUtils.scripts.tc_find_tag:main')
 acmdlib.register('tc.submit-tag', 'PyUtils.scripts.tc_submit_tag:main')
 
 acmdlib.register('get-tag-diff', 'PyUtils.scripts.get_tag_diff:main')
+acmdlib.register('get-tagsvn-diff', 'PyUtils.scripts.get_tagsvn_diff:main')
 
 acmdlib.register('merge-files', 'PyUtils.scripts.merge_files:main')
 acmdlib.register('filter-files', 'PyUtils.scripts.filter_files:main')
@@ -25,7 +26,9 @@ acmdlib.register('filter-files', 'PyUtils.scripts.filter_files:main')
 acmdlib.register('cmt.new-pkg', 'PyUtils.scripts.cmt_newpkg:main')
 acmdlib.register('cmt.new-alg', 'PyUtils.scripts.cmt_newalg:main')
 acmdlib.register('cmt.new-analysisalg', 'PyUtils.scripts.cmt_newanalysisalg:main')
+acmdlib.register('cmt.new-asgtool', 'PyUtils.scripts.cmt_newasgtool:main')
 acmdlib.register('cmt.new-pyalg', 'PyUtils.scripts.cmt_newpyalg:main')
 acmdlib.register('cmt.new-jobo', 'PyUtils.scripts.cmt_newjobo:main')
+acmdlib.register('cmt.new-analysisapp', 'PyUtils.scripts.cmt_newanalysisapp:main')
 ##
 
diff --git a/Tools/PyUtils/python/scripts/cmt_newalg.py b/Tools/PyUtils/python/scripts/cmt_newalg.py
index 32d682f7883..687472442af 100644
--- a/Tools/PyUtils/python/scripts/cmt_newalg.py
+++ b/Tools/PyUtils/python/scripts/cmt_newalg.py
@@ -115,10 +115,12 @@ def main(args):
     
     #first we must check that requirements file has the AthenaBaseComps use statement in it
     foundBaseComps=False
+    hasLibraryLine=False
     lastUse=0 
     lineCount=0
     for line in open('cmt/requirements'):
         lineCount +=1 
+        if line.startswith("library") or line.startswith("apply_pattern dual_use_library"): hasLibraryLine=True
         if not line.startswith("use "): continue
         lastUse=lineCount
         uu = line.split(" ")
@@ -139,7 +141,11 @@ def main(args):
             if line.startswith("private"): inPrivate=True
             elif line.startswith("end_private"): inPrivate=False
             print line,
-    
+        #append library line if necessary
+    if not hasLibraryLine:
+      with open("cmt/requirements", "a") as myfile:
+         myfile.write("library %s *.cxx components/*.cxx\n" % (full_pkg_name))
+         myfile.write("apply_pattern component_library\n")
     
     #following code borrowed from gen_klass
     hdr = getattr(Templates, 'alg_hdr_template')
@@ -168,6 +174,7 @@ def main(args):
     if os.path.isfile(fname+'.h'):
        print ":::  ERROR %s.h already exists" % fname
        return -1
+    print ":::  INFO Creating %s.h" % fname
     o_hdr = open(fname+'.h', 'w')
     o_hdr.writelines(hdr%d)
     o_hdr.flush()
@@ -176,6 +183,7 @@ def main(args):
     if os.path.isfile(fname+'.cxx'):
        print ":::  ERROR %s.cxx already exists" % fname
        return -1
+    print ":::  INFO Creating %s.cxx" % fname
     o_cxx = open(fname+'.cxx', 'w')
     o_cxx.writelines(cxx%d)
     o_cxx.flush()
@@ -250,7 +258,8 @@ DECLARE_FACTORY_ENTRIES( %(pkg)s )
 #include "../%(namespace_klass)s.h"
 DECLARE_NAMESPACE_ALGORITHM_FACTORY( %(namespace)s, %(klass)s )
 """%d
-               print """
+               else:
+                  print """
 #include "../%(namespace_klass)s.h"
 DECLARE_ALGORITHM_FACTORY( %(klass)s )
 """%d
diff --git a/Tools/PyUtils/python/scripts/cmt_newanalysisalg.py b/Tools/PyUtils/python/scripts/cmt_newanalysisalg.py
index 753ed716ca1..125605e58b5 100644
--- a/Tools/PyUtils/python/scripts/cmt_newanalysisalg.py
+++ b/Tools/PyUtils/python/scripts/cmt_newanalysisalg.py
@@ -9,7 +9,7 @@
 
 from __future__ import with_statement
 
-__version__ = "$Revision: 655341 $"
+__version__ = "$Revision: 734631 $"
 __author__ = "Will Buttinger"
 __doc__ = "streamline and ease the creation of new AthAnalysisAlgorithm"
 
@@ -51,6 +51,13 @@ class %(klass)s: public ::AthAnalysisAlgorithm {
 // %(pkg)s includes
 #include "%(namespace_klass)s.h"
 
+//#include "xAODEventInfo/EventInfo.h"
+
+//uncomment the line below to use the HistSvc for outputting trees and histograms
+//#include "GaudiKernel/ITHistSvc.h"
+//#include "TTree.h"
+//#include "TH1D.h"
+
 %(namespace_begin)s
 
 %(klass)s::%(klass)s( const std::string& name, ISvcLocator* pSvcLocator ) : AthAnalysisAlgorithm( name, pSvcLocator ){
@@ -65,32 +72,93 @@ class %(klass)s: public ::AthAnalysisAlgorithm {
 
 StatusCode %(klass)s::initialize() {
   ATH_MSG_INFO ("Initializing " << name() << "...");
+  //
+  //This is called once, before the start of the event loop
+  //Retrieves of tools you have configured in the joboptions go here
+  //
+
+  //HERE IS AN EXAMPLE
+  //We will create a histogram and a ttree and register them to the histsvc
+  //Remember to uncomment the configuration of the histsvc stream in the joboptions
+  //
+  //ServiceHandle<ITHistSvc> histSvc("THistSvc",name());
+  //TH1D* myHist = new TH1D("myHist","myHist",10,0,10);
+  //CHECK( histSvc->regHist("/MYSTREAM/myHist", myHist) ); //registers histogram to output stream (like SetDirectory in EventLoop)
+  //TTree* myTree = new TTree("myTree","myTree");
+  //CHECK( histSvc->regTree("/MYSTREAM/SubDirectory/myTree", myTree) ); //registers tree to output stream (like SetDirectory in EventLoop) inside a sub-directory
+
 
   return StatusCode::SUCCESS;
 }
 
 StatusCode %(klass)s::finalize() {
   ATH_MSG_INFO ("Finalizing " << name() << "...");
+  //
+  //Things that happen once at the end of the event loop go here
+  //
+
 
   return StatusCode::SUCCESS;
 }
 
 StatusCode %(klass)s::execute() {  
   ATH_MSG_DEBUG ("Executing " << name() << "...");
+  setFilterPassed(false); //optional: start with algorithm not passed
+
+
+
+  //
+  //Your main analysis code goes here
+  //If you will use this algorithm to perform event skimming, you
+  //should ensure the setFilterPassed method is called
+  //If never called, the algorithm is assumed to have 'passed' by default
+  //
+
 
+  //HERE IS AN EXAMPLE
+  //const xAOD::EventInfo* evtInfo = 0;
+  //CHECK( evtStore()->retrieve( evtInfo, "EventInfo" ) );
+  //ATH_MSG_INFO("eventNumber=" << evtInfo->eventNumber() );
+
+
+  setFilterPassed(true); //if got here, assume that means algorithm passed
   return StatusCode::SUCCESS;
 }
 
-StatusCode %(klass)s::beginInputFile() {  
-  //example of metadata retrieval:
+StatusCode %(klass)s::beginInputFile() { 
+  //
+  //This method is called at the start of each input file, even if
+  //the input file contains no events. Accumulate metadata information here
+  //
+
+  //example of retrieval of CutBookkeepers: (remember you will need to include the necessary header files and use statements in requirements file)
+  // const xAOD::CutBookkeeperContainer* bks = 0;
+  // CHECK( inputMetaStore()->retrieve(bks, "CutBookkeepers") );
+
+  //example of IOVMetaData retrieval (see https://twiki.cern.ch/twiki/bin/viewauth/AtlasProtected/AthAnalysisBase#How_to_access_file_metadata_in_C)
   //float beamEnergy(0); CHECK( retrieveMetadata("/TagInfo","beam_energy",beamEnergy) );
   //std::vector<float> bunchPattern; CHECK( retrieveMetadata("/Digitiation/Parameters","BeamIntensityPattern",bunchPattern) );
 
+
+
   return StatusCode::SUCCESS;
 }
 
 %(namespace_end)s
 """
+    testxml_template = """\
+   <TEST name="%(namespace_klass)s" type="athena" suite="ASGTests">
+      <options_atn>%(pkg)s/%(namespace_klass)sJobOptions.py</options_atn>
+      <timelimit>5</timelimit>
+      <author> PLEASE ENTER A NAME </author>
+      <mailto> PLEASEENTER@cern.ch </mailto>
+      <expectations>
+         <errorMessage> Athena exited abnormally </errorMessage>
+         <errorMessage>FAILURE (ERROR)</errorMessage>
+         <returnValue>0</returnValue>
+      </expectations>
+   </TEST>
+"""
 
 
 ### functions -----------------------------------------------------------------
@@ -101,6 +169,18 @@ StatusCode %(klass)s::beginInputFile() {
     'algname',
     help="name of the new alg"
     )
+@acmdlib.argument(
+    '--newJobo',
+    action='store_true',
+    default=False,
+    help='Create a skeleton joboption for execution of the new algorithm'
+    )
+@acmdlib.argument(
+    '--atnTest',
+    action='store_true',
+    default=False,
+    help='Register the skeleton joboption as an ATN test'
+    )
 def main(args):
     """create a new AthAnalysisAlgorithm inside the current package. Call from within the package directory
 
@@ -117,6 +197,12 @@ def main(args):
     if not os.path.isdir(cwd+"/cmt"):
         print "ERROR you must call new-analysisalg from within the package you want to add the algorithm to"
         return -1
+   
+    if args.atnTest and not full_alg_name.startswith("Test::"):
+       print "::: INFO  Requested --atnTest option, so adding 'Test::' namespace to the alg"
+       args.algname = "Test::%s"%args.algname
+       full_alg_name = args.algname
+   
     full_pkg_name = os.path.basename(cwd)
     print textwrap.dedent("""\
     ::: create alg [%(full_alg_name)s] in pkg [%(full_pkg_name)s]""" %locals())
@@ -124,14 +210,23 @@ def main(args):
     
     #first we must check that requirements file has the AthenaBaseComps use statement in it
     foundBaseComps=False
+    hasxAODEventInfo=False
+    hasAtlasROOT=False
+    hasAsgTools=False
     lastUse=0 
     lineCount=0
+    hasLibraryLine=False
     for line in open('cmt/requirements'):
         lineCount +=1 
+        if line.startswith("library") or line.startswith("apply_pattern dual_use_library"): hasLibraryLine=True
         if not line.startswith("use "): continue
         lastUse=lineCount
         uu = line.split(" ")
         if uu[1].startswith("AthAnalysisBaseComps"): foundBaseComps=True
+        if uu[1].startswith("xAODEventInfo"): hasxAODEventInfo=True
+        if uu[1].startswith("AsgTools"): hasAsgTools=True
+        if uu[1].startswith("AtlasROOT"): hasAtlasROOT=True;
+        
         
     if not foundBaseComps:
         print ":::  INFO Adding AthAnalysisBaseComps to requirements file"
@@ -143,12 +238,29 @@ def main(args):
             lineCount+=1
             if lineCount==lastUse+1:
                 if not inPrivate: print "private"
+                print ""
                 print "use AthAnalysisBaseComps AthAnalysisBaseComps-* Control"
+                print ""
+                if not hasAtlasROOT:
+                  print "#uncomment the next line to use ROOT libraries in your package"
+                  print "#use AtlasROOT AtlasROOT-* External"
+                  print ""
+                if not hasxAODEventInfo:
+                  print "#use xAODEventInfo xAODEventInfo-* Event/xAOD"
+                  print ""
+                if not hasAsgTools:
+                  print "use AsgTools AsgTools-* Control/AthToolSupport"
+                  print ""
                 if not inPrivate: print "end_private"
             if line.startswith("private"): inPrivate=True
             elif line.startswith("end_private"): inPrivate=False
             print line,
-    
+    #append library line if necessary
+    if not hasLibraryLine:
+      with open("cmt/requirements", "a") as myfile:
+         myfile.write("library %s *.cxx components/*.cxx\n" % (full_pkg_name))
+         myfile.write("apply_pattern component_library\n")
+
     
     #following code borrowed from gen_klass
     hdr = getattr(Templates, 'alg_hdr_template')
@@ -177,6 +289,7 @@ def main(args):
     if os.path.isfile(fname+'.h'):
        print ":::  ERROR %s.h already exists" % fname
        return -1
+    print ":::  INFO Creating %s.h" % fname
     o_hdr = open(fname+'.h', 'w')
     o_hdr.writelines(hdr%d)
     o_hdr.flush()
@@ -185,6 +298,7 @@ def main(args):
     if os.path.isfile(fname+'.cxx'):
        print ":::  ERROR %s.cxx already exists" % fname
        return -1
+    print ":::  INFO Creating %s.cxx" % fname
     o_cxx = open(fname+'.cxx', 'w')
     o_cxx.writelines(cxx%d)
     o_cxx.flush()
@@ -259,12 +373,64 @@ DECLARE_FACTORY_ENTRIES( %(pkg)s )
 #include "../%(namespace_klass)s.h"
 DECLARE_NAMESPACE_ALGORITHM_FACTORY( %(namespace)s, %(klass)s )
 """%d
-               print """
+               else:
+                  print """
 #include "../%(namespace_klass)s.h"
 DECLARE_ALGORITHM_FACTORY( %(klass)s )
 """%d
             print line,
-          
+   
+   
+    if args.atnTest or args.newJobo:
+      #make the joboptions file too
+      full_jobo_name = namespace_klass + "JobOptions"
+      full_alg_name = namespace_klass
+   
+      print textwrap.dedent("""\
+      ::: create jobo [%(full_jobo_name)s] for alg [%(full_alg_name)s]""" %locals())
+   
+      #following code borrowed from gen_klass
+      from cmt_newjobo import Templates as joboTemplates
+      jobo = getattr(joboTemplates, 'jobo_template')
+   
+      e = dict( klass=full_alg_name,
+               inFile=os.environ['ASG_TEST_FILE_MC'],
+               )
+      fname = 'share/%s.py' % full_jobo_name
+      #first check doesn't exist 
+      if os.path.isfile(fname):
+         print ":::  WARNING %s already exists .. will not overwrite" % fname
+      else:
+         o_hdr = open(fname, 'w')
+         o_hdr.writelines(jobo%e)
+         o_hdr.flush()
+         o_hdr.close()
+
+    if args.atnTest:
+      testxml = getattr(Templates, 'testxml_template')
+      #add the test joboptions to the atn test
+      #check we have a test directory with appropriate xml file in it
+      if not os.path.isdir("test"):
+         os.mkdir("test")
+      if not os.path.isfile("test/%s.xml"%full_pkg_name):
+         print ":::  INFO Creating test/%s.xml"%full_pkg_name
+         loadFile = open("test/%s.xml"%full_pkg_name,'w')
+         loadFile.writelines("<?xml version=\"1.0\"?>\n<atn>\n")
+         loadFile.writelines(testxml%d)
+         loadFile.writelines("</atn>\n")
+         loadFile.flush()
+         loadFile.close()
+      else:
+         print ":::  INFO Adding %s to test/%s.xml"%(namespace_klass + "JobOptions.py",full_pkg_name)
+         nextAdd=False
+         for line in fileinput.input("test/%s.xml"%full_pkg_name, inplace=1):
+            if nextAdd:
+               print(testxml%d)
+               nextAdd=False
+            if "<atn>" in line: nextAdd=True
+            print line,
+
+   
     #to finish up, call cmt config so that the new algorithm will be captured and genconf run on it
     cwd = os.getcwd()
     try:
diff --git a/Tools/PyUtils/python/scripts/cmt_newanalysisapp.py b/Tools/PyUtils/python/scripts/cmt_newanalysisapp.py
new file mode 100644
index 00000000000..f3d640bf89f
--- /dev/null
+++ b/Tools/PyUtils/python/scripts/cmt_newanalysisapp.py
@@ -0,0 +1,197 @@
+# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+
+# @file PyUtils.scripts.cmt_newalg
+# @purpose streamline and ease the creation of new standalone applications
+# @author Will Buttinger
+# @date September 2014
+
+#Note - this code could use a serious rewrite, I just hacked it together to get something working
+
+from __future__ import with_statement
+
+__version__ = "$Revision: 734631 $"
+__author__ = "Will Buttinger"
+__doc__ = "streamline and ease the creation of new standalone applications"
+
+### imports -------------------------------------------------------------------
+import os
+import textwrap
+import commands
+import PyUtils.acmdlib as acmdlib
+import fileinput
+
+class Templates:
+    app_template = """\
+    
+    
+#ifndef %(guard)s
+#define %(guard)s 1
+
+
+#include "AthAnalysisBaseComps/AthAnalysisHelper.h" //tool creation and configuration
+#include "POOLRootAccess/TEvent.h" //event looping
+#include "GaudiKernel/ToolHandle.h" //for better working with tools
+
+#include "AsgTools/MessageCheck.h" //messaging
+using namespace asg::msgUserCode;  //messaging
+
+//ROOT includes
+#include "TString.h"
+#include "TSystem.h"
+
+int main( int argc, char* argv[] ) {
+
+   IAppMgrUI* app = POOL::Init(); //important to do this first!
+
+   // Open the input file:
+   TString fileName = "$ASG_TEST_FILE_MC";
+   if( argc < 2 ) {
+      ANA_MSG_WARNING( "No file name received, using $ASG_TEST_FILE_MC" );
+   } else {
+      fileName = argv[1]; //use the user provided file
+   }
+   ANA_MSG_INFO("Opening file: " << gSystem->ExpandPathName(fileName.Data()) );
+
+   //Here's an example of how you would create a tool of type ToolType, and set a property on it
+   //The preferred way to create and configure the tool is with a ToolHandle:
+   //ToolHandle<IToolInterface> myTool("ToolType/myTool");
+   //AthAnalysisHelper::setProperty( myTool, "MyProperty", value );
+   //myTool.retrieve(); //this will cause the tool to be created and initialized
+
+   //loop over input file with POOL 
+   POOL::TEvent evt;
+   evt.readFrom( fileName );
+
+   for(int i=0;i < evt.getEntries(); i++) {
+      if( evt.getEntry(i) < 0) { ANA_MSG_ERROR("Failed to read event " << i); continue; }
+
+      //put your code here
+      //e.g.
+      //const xAOD::EventInfo* evtInfo = 0;
+      //evt.retrieve( evtInfo );
+
+   }
+
+   app->finalize(); //trigger finalization of all services and tools created by the Gaudi Application
+   return 0;
+}
+
+#endif //> !%(guard)s
+"""
+
+### functions -----------------------------------------------------------------
+@acmdlib.command(
+    name='cmt.new-analysisapp'
+    )
+@acmdlib.argument(
+    'appname',
+    help="name of the new app"
+    )
+def main(args):
+    """create a new algorithm inside the current package. Call from within the package directory
+
+    ex:
+     $ acmd cmt new-alg MyAlg
+    """
+    sc = 0
+    
+    full_app_name = args.appname
+
+    #determine the package from the cwd 
+    cwd = os.getcwd()
+    #check that cmt dir exists (i.e. this is a package)
+    if not os.path.isdir(cwd+"/cmt"):
+        print "ERROR you must call new-analysisapp from within the package you want to add the app to"
+        return -1
+    full_pkg_name = os.path.basename(cwd)
+    print textwrap.dedent("""\
+    ::: create analysisapp [%(full_app_name)s] in pkg [%(full_pkg_name)s]""" %locals())
+
+    
+    #first we must check that requirements file has the AthAnalysisBaseComps use statement in it
+    #also need AsgTools and 
+    foundBaseComps=False    
+    foundAsgTools=False
+    foundPOOLRootAccess=False
+    foundROOT=False
+    lastUse=0 
+    lineCount=0
+    for line in open('cmt/requirements'):
+        lineCount +=1 
+        if not line.startswith("use "): continue
+        lastUse=lineCount
+        uu = line.split(" ")
+        if uu[1].startswith("AthAnalysisBaseComps"): foundBaseComps=True
+        if uu[1].startswith("AsgTools"): foundAsgTools=True
+        if uu[1].startswith("POOLRootAccess"): foundPOOLRootAccess=True
+        if uu[1].startswith("AtlasROOT"): foundROOT=True
+        
+    if not foundBaseComps or not foundAsgTools or not foundPOOLRootAccess or not foundROOT:
+        print ":::  INFO Adding dependencies to requirements file"
+        #must add a use statement to the requirements file 
+        #put inside private blocks
+        lineCount=0
+        inPrivate=False
+        for line in fileinput.input('cmt/requirements', inplace=1):
+            lineCount+=1
+            if lineCount==lastUse+1:
+                if not inPrivate: print "private"
+                print ""
+                if not foundBaseComps:
+                  print "use AthAnalysisBaseComps AthAnalysisBaseComps-* Control"
+                if not foundAsgTools:
+                  print "use AsgTools AsgTools-* Control/AthToolSupport"
+                if not foundPOOLRootAccess:
+                  print "use POOLRootAccess POOLRootAccess-* PhysicsAnalysis"
+                if not foundROOT:
+                  print "use AtlasROOT AtlasROOT-* External"
+                print ""
+                if not inPrivate: print "end_private"
+            if line.startswith("private"): inPrivate=True
+            elif line.startswith("end_private"): inPrivate=False
+            print line,
+     
+     #append application line
+    with open("cmt/requirements", "a") as myfile:
+         myfile.write("application %s %s.cxx\n" % (full_app_name,full_app_name))
+    
+    #following code borrowed from gen_klass
+    cxx = getattr(Templates, 'app_template')
+    
+    namespace_klass = full_app_name.replace('::','__')
+    namespace_begin,namespace_end = "",""
+    namespace = ""
+    if full_app_name.count("::")>0:
+        namespace    = full_app_name.split("::")[0]
+        full_alg_name = full_app_name.split("::")[1]
+        namespace_begin = "namespace %s {" % namespace
+        namespace_end   = "} //> end namespace %s" % namespace
+        pass
+
+    guard = "%s_%s_H" % (full_pkg_name.upper(), namespace_klass.upper())
+
+    d = dict( pkg=full_pkg_name,
+              klass=full_app_name,
+              guard=guard,
+              namespace_begin=namespace_begin,
+              namespace_end=namespace_end,namespace_klass=namespace_klass,namespace=namespace
+              )
+    fname = os.path.splitext("src/%s"%namespace_klass)[0]
+
+
+    if os.path.isfile(fname+'.cxx'):
+       print ":::  ERROR %s.cxx already exists" % fname
+       return -1
+    print ":::  INFO Creating %s.cxx" % fname
+    o_cxx = open(fname+'.cxx', 'w')
+    o_cxx.writelines(cxx%d)
+    o_cxx.flush()
+    o_cxx.close()
+
+    #to finish up, call cmt config so that the new algorithm will be captured and genconf run on it
+    cwd = os.getcwd()
+    try:
+        os.chdir('cmt')
+        _ = commands.getstatusoutput('cmt config')
+    finally:
+        os.chdir(cwd)
diff --git a/Tools/PyUtils/python/scripts/cmt_newasgtool.py b/Tools/PyUtils/python/scripts/cmt_newasgtool.py
new file mode 100644
index 00000000000..895d9270f5b
--- /dev/null
+++ b/Tools/PyUtils/python/scripts/cmt_newasgtool.py
@@ -0,0 +1,604 @@
+# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+
+# @file PyUtils.scripts.cmt_newasgtool
+# @purpose streamline and ease the creation of new asg tools
+# @author Will Buttinger
+# @date September 2014
+
+#Note - this code could use a serious rewrite, I just hacked it together to get something working
+
+from __future__ import with_statement
+
+__version__ = "$Revision: 734631 $"
+__author__ = "Will Buttinger"
+__doc__ = "streamline and ease the creation of new AsgTool"
+
+### imports -------------------------------------------------------------------
+import os
+import textwrap
+import commands
+import PyUtils.acmdlib as acmdlib
+import fileinput
+
+class Templates:
+    tool_hdr_template = """\
+#ifndef %(guard)s
+#define %(guard)s 1
+
+///
+///Example usage code:
+///Athena only:
+///To configure the tool, add the following to your joboptions:
+/// myTool = CfgMgr.%(namespace_klass)s("ToolInstanceName",Property=4.0)
+/// import ROOT
+/// myTool.ENumProperty = ROOT.%(namespace_klass)s.Val2
+///To use the tool in your algorithm (can use regular ToolHandle or asg::AnaToolHandle):
+/// #include "%(pkg)s/I%(klass)s.h"
+/// ToolHandle<I%(klass)s> myTool("%(namespace2)s%(klass)s/ToolInstanceName")
+/// CHECK( myTool.retrieve() );
+/// myTool->isSelected(...); //or whatever methods are implemented in I%(klass)s
+///
+///Dual use: Alternative for c++ configuration and usage (with or without athena):
+/// #include "AsgTools/AnaToolHandle.h"
+/// #include "%(pkg)s/I%(klass)s.h"
+/// asg::AnaToolHandle<I%(klass)s> myTool("%(namespace2)s%(klass)s/ToolInstanceName");
+/// myTool.setProperty( "Property", 4.0 );
+/// myTool.setProperty( "ENumProperty", 
+/// myTool.initialize();
+/// myTool->isSelected(....); //or whatever methods are implemented in I%(klass)s
+
+
+
+#include "AsgTools/AsgTool.h"
+
+#include "%(pkg)s/I%(klass)s.h"
+
+%(namespace_begin)s
+
+class %(klass)s: public asg::AsgTool, public virtual I%(klass)s { 
+ public: 
+   //constructor for athena can be created using special macro
+   //Note: if you add a second interface to your tool, you must use: ASG_TOOL_CLASS2( ToolName, Interface1, Interface2) 
+   ASG_TOOL_CLASS( %(klass)s , I%(klass)s )
+   //add another constructor for non-athena use cases
+   %(klass)s( const std::string& name );
+
+   /// Initialize is required by AsgTool base class
+   virtual StatusCode  initialize() override;
+
+   // Example code: (part of skeleton code, delete as necessary)
+   // Following method is required by I%(klass)s base class (part of skeleton code, delete as necessary)
+   // DO NOT put default argument values here ... only put them in the interface class
+   virtual bool isSelected(const xAOD::IParticle& particle, bool someAdditionalArgument) const override;
+
+ private: 
+   double m_nProperty;
+   unsigned int m_enumProperty; //do not make the actual property an enum - not supported in athena
+}; 
+%(namespace_end)s
+#endif //> !%(guard)s
+"""
+
+    tool_cxx_template = """\
+// %(pkg)s includes
+#include "%(pkg)s/%(namespace_klass)s.h"
+
+%(namespace_begin)s
+
+%(klass)s::%(klass)s( const std::string& name ) : asg::AsgTool( name ){
+
+  declareProperty( "Property", m_nProperty = 3.0 , "Please describe the property here"); //example property declaration with it's default value
+  declareProperty( "ENumProperty", m_enumProperty = Val1 , "Please define enums inside your classes, not just in namespaces" );
+}
+
+
+StatusCode %(klass)s::initialize() {
+  ATH_MSG_INFO ("Initializing " << name() << "...");
+  //
+  //Make use of the property values to configure the tool
+  //Tools should be designed so that no method other than setProperty is called before initialize
+  //
+
+  return StatusCode::SUCCESS;
+}
+
+//
+//the lines below for the example interface method implementation
+//This example is for a tool that selects particles with |eta| < "Property" value .. using 'someAdditionalArgument' to flip the cut
+//
+bool %(klass)s::isSelected( const xAOD::IParticle& particle , bool someAdditionalArgument ) const {
+   if(someAdditionalArgument) return ( fabs(particle.eta()) >= m_nProperty );
+   return ( fabs(particle.eta()) < m_nProperty );
+}
+
+
+%(namespace_end)s
+"""
+
+    tool_interface_template = """\
+#ifndef %(guard)s
+#define %(guard)s 1
+
+#include "AsgTools/IAsgTool.h"
+
+#include "xAODBase/IParticle.h"
+
+class I%(klass)s : public virtual asg::IAsgTool {
+   public:
+      ASG_TOOL_INTERFACE( I%(klass)s ) //declares the interface to athena
+
+      //below is an example method, delete as necessary
+      //this is where you specify default values for additional arguments
+      virtual bool isSelected(const xAOD::IParticle& particle, bool someAdditionalArgument = false) const = 0;
+
+      //declare enums for properties here too, so that they are accessible through just the interface header
+      //example of an enum you might end up using for your properties .. please put enums in a class! (not just a namespace)
+      enum MyEnumProperty { Val1=1 , Val2=2 };
+
+};
+
+#endif //> !%(guard)s
+"""
+    tool_test_template = """ \
+<?xml version="1.0"?>
+<atn>
+   <TEST name="%(pkg)s_makeCheck" type="makecheck">
+      <package>%(fullpkgpath)s</package>
+      <timelimit>5</timelimit>
+      <author> PLEASE ENTER A NAME </author>
+      <mailto> PLEASEENTER@cern.ch </mailto>
+      <expectations>
+         <errorMessage> Athena exited abnormally </errorMessage>
+         <errorMessage>differ</errorMessage>
+         <returnValue>0</returnValue>
+      </expectations>
+   </TEST>
+</atn>
+"""
+
+    tool_testcpp_template = """ \
+   
+#include "AsgTools/AnaToolHandle.h"
+#include "%(pkg)s/I%(klass)s.h"
+
+using namespace asg::msgUserCode;
+
+int main() {
+  ANA_CHECK_SET_TYPE (int); //makes ANA_CHECK return ints if exiting function
+#ifdef ROOTCORE
+  StatusCode::enableFailure();
+  ANA_CHECK (xAOD::Init ());
+#endif
+
+   asg::AnaToolHandle<I%(klass)s> myTool("%(namespace2)s%(klass)s/ToolInstanceName");
+   ANA_CHECK( myTool.setProperty( "Property", 4.0 ) );
+   ANA_CHECK( myTool.setProperty( "EnumProperty", I%(klass)s::Val2 ) );
+   ANA_CHECK( myTool.initialize() );
+
+   //myTool->isSelected(....); put test code here
+
+   return 0; //zero = success
+}
+"""
+
+    tool_testcppg_template = """ \
+    
+#include "AsgTools/AnaToolHandle.h"
+#include "%(pkg)s/I%(klass)s.h"
+
+#include "AsgTools/UnitTest.h"
+#include <gtest/gtest.h>
+
+using namespace asg::msgUserCode;
+
+//first arg: name of fixture (if applicable), second arg: name of the test (special prefixes have special meanings)
+//see https://twiki.cern.ch/twiki/bin/view/AtlasComputing/RootCore#Unit_Tests for more details
+TEST (%(klass)sTest, basicTest) {
+   asg::AnaToolHandle<I%(klass)s> myTool("%(namespace2)s%(klass)s/ToolInstanceName");
+   ASSERT_SUCCESS (myTool.setProperty( "Property", 4.0 ));
+   ASSERT_SUCCESS (myTool.setProperty( "EnumProperty", I%(klass)s::Val2 ));
+   ASSERT_SUCCESS (myTool.initialize());
+
+//   ASSERT_TRUE( myTool->isSelected( ...goodObject.. ) );
+
+}
+
+int main (int argc, char **argv)
+{
+  ANA_CHECK_SET_TYPE (int);
+#ifdef ROOTCORE
+  StatusCode::enableFailure();
+  ANA_CHECK (xAOD::Init ());
+#endif
+  ::testing::InitGoogleTest (&argc, argv);
+  return RUN_ALL_TESTS();
+}
+"""
+
+### functions -----------------------------------------------------------------
+@acmdlib.command(
+    name='cmt.new-asgtool'
+    )
+@acmdlib.argument(
+    'toolname',
+    help="name of the new tool"
+    )
+@acmdlib.argument(
+    '--googleTest',
+    action='store_true',
+    default=False,
+    help='Create a google test instead of a unit test'
+    )
+def main(args):
+    """create a new AsgTool inside the current package with a corresponding interface class. Call from within the package directory
+
+    ex:
+     $ acmd cmt new-asgtool MyTool
+    """
+    sc = 0
+    
+    full_tool_name = args.toolname
+
+    #determine the package from the cwd 
+    cwd = os.getcwd()
+    #check that cmt dir exists (i.e. this is a package)
+    if not os.path.isdir(cwd+"/cmt"):
+        print "ERROR you must call new-asgtool from within the package you want to add the tool to"
+        return -1
+    full_pkg_name = os.path.basename(cwd)
+    print textwrap.dedent("""\
+    ::: create asg tool [%(full_tool_name)s] in pkg [%(full_pkg_name)s]""" %locals())
+
+    full_pk_path = os.getcwd().replace(os.environ['TestArea']+'/',"")
+
+    
+    #first we must check that requirements file has the AsgTools use statement in it
+    #it must also have '../Root/*.cxx' in the library declaration
+    #so look for patterns of: library xxxx or apply_pattern dual_use_library ...
+    foundBaseComps=False
+    hasxAODBase=False
+    hasAtlasROOT=False
+    haslcgdict=False
+    lastUse=0 
+    lineCount=0
+    libraryLines = []
+    hasLibraryLine=False
+    for line in open('cmt/requirements'):
+        lineCount +=1 
+        if line.startswith("apply_pattern lcgdict"): haslcgdict=True
+        if line.startswith("library") or line.startswith("apply_pattern dual_use_library"): hasLibraryLine=True
+        if line.startswith("library "):
+           if not ("../Root/*.cxx" in line):
+              print ":::  INFO Adding Root directory to library compilation in requirements file (%s -> %s)" % (line.rstrip(), line.rstrip() + " ../Root/*.cxx")
+              libraryLines += [lineCount]
+        
+        if not line.startswith("use "): continue
+        lastUse=lineCount
+        uu = line.split(" ")
+        if uu[1].startswith("AsgTools"): foundBaseComps=True
+        if uu[1].startswith("xAODBase"): hasxAODBase=True
+        if uu[1].startswith("AtlasROOT"): hasAtlasROOT=True;
+    if not hasLibraryLine:
+      with open("cmt/requirements", "a") as myfile:
+         myfile.write("library %s *.cxx components/*.cxx\n" % (full_pkg_name))
+         myfile.write("apply_pattern component_library\n")
+    
+    if not foundBaseComps or len(libraryLines)>0:
+        
+        #must add a use statement to the requirements file 
+        #put inside public blocks
+        lineCount=0
+        inPrivate=False
+        if not foundBaseComps: print ":::  INFO Adding AsgTools to requirements file"
+        for line in fileinput.input('cmt/requirements', inplace=1):
+            lineCount+=1
+            if not foundBaseComps and lineCount==lastUse+1:
+                if inPrivate: print "end_private"
+                print ""
+                print "use AsgTools AsgTools-* Control/AthToolSupport"
+                print ""
+                if not hasAtlasROOT:
+                  print "#uncomment the next line to use ROOT libraries in your package"
+                  print "#use AtlasROOT AtlasROOT-* External"
+                  print ""
+                if not hasxAODBase:
+                  print "#next line only needed for example code in asg tool...please remove if not using"
+                  print "use xAODBase xAODBase-* Event/xAOD"
+                  print ""
+                if inPrivate: print "private"
+            if line.startswith("private"): inPrivate=True
+            elif line.startswith("end_private"): inPrivate=False
+            if lineCount in libraryLines: print line.rstrip() + " ../Root/*.cxx\n",
+            else: print line,
+    
+    if not haslcgdict:
+       #append lcgdict pattern to requirements
+       print ":::  INFO Adding Reflex Dictionary pattern (lcgdict) to requirements file"
+       with open("cmt/requirements", "a") as myfile:
+         myfile.write("")
+         myfile.write("#Reflex Dictionary Generation:\n")
+         myfile.write("private\n")
+         myfile.write("use AtlasReflex AtlasReflex-* External\n")
+         myfile.write("apply_pattern lcgdict dict=%s selectionfile=selection.xml headerfiles=\"../%s/%sDict.h\"\n" % (full_pkg_name,full_pkg_name,full_pkg_name))
+         myfile.write("end_private\n")
+         myfile.flush()
+         myfile.close()
+
+
+
+    
+    #following code borrowed from gen_klass
+    hdr = getattr(Templates, 'tool_hdr_template')
+    cxx = getattr(Templates, 'tool_cxx_template')
+    iface = getattr(Templates, 'tool_interface_template')
+    testxml = getattr(Templates, 'tool_test_template')
+    testcpp = getattr(Templates,'tool_testcpp_template')
+    testcppg = getattr(Templates, 'tool_testcppg_template')
+    
+    namespace_klass = full_tool_name.replace('::','__')
+    namespace_begin,namespace_end = "",""
+    namespace = ""
+    if full_tool_name.count("::")>0:
+        namespace    = full_tool_name.split("::")[0]
+        full_tool_name = full_tool_name.split("::")[1]
+        namespace_begin = "namespace %s {" % namespace
+        namespace_end   = "} //> end namespace %s" % namespace
+        pass
+
+    guard = "%s_%s_H" % (full_pkg_name.upper(), namespace_klass.upper())
+    dictguard = "%s_%sDICT_H" % (full_pkg_name.upper(),full_pkg_name.upper())
+
+    d = dict( pkg=full_pkg_name, fullpkgpath=full_pk_path, 
+              klass=full_tool_name,
+              guard=guard,dictguard=dictguard,
+              namespace_begin=namespace_begin,
+              namespace_end=namespace_end,namespace_klass=namespace_klass,namespace=namespace,namespace2=namespace
+              )
+    if not namespace == "": d["namespace2"]+="::"
+
+    fname = os.path.splitext("%s/%s"%(full_pkg_name,namespace_klass))[0]
+    
+    #create package header folder if not existing already
+    if not os.path.isdir(full_pkg_name):
+       print ":::   INFO Creating %s folder" % full_pkg_name
+       os.mkdir(full_pkg_name)
+
+    #first check doesn't exist 
+    if os.path.isfile(fname+'.h'):
+       print ":::  WARNING %s.h already exists .. not overwriting" % fname
+    else:
+      print ":::  INFO Creating %s.h" % fname
+      o_hdr = open(fname+'.h', 'w')
+      o_hdr.writelines(hdr%d)
+      o_hdr.flush()
+      o_hdr.close()
+
+    d["guard"] = "%s_I%s_H" % (full_pkg_name.upper(), full_tool_name.upper())
+
+    fname = os.path.splitext("%s/I%s"%(full_pkg_name,full_tool_name))[0]
+    if os.path.isfile(fname+'.h'):
+       print ":::  WARNING %s.h already exists .. not overwriting" % fname
+    else:
+      print ":::  INFO Creating %s.h" % fname
+      o_iface = open(fname+'.h', 'w')
+      o_iface.writelines(iface%d)
+      o_iface.flush()
+      o_iface.close()
+
+    #create Root folder if not existing already
+    if not os.path.isdir("Root"):
+       os.mkdir("Root")
+
+    fname = os.path.splitext("Root/%s"%(namespace_klass))[0]
+    if os.path.isfile(fname+'.cxx'):
+       print ":::  WARNING %s.cxx already exists .. not overwriting" % fname
+    else:
+      print ":::  INFO Creating %s.cxx" % fname
+      o_cxx = open(fname+'.cxx', 'w')
+      o_cxx.writelines(cxx%d)
+      o_cxx.flush()
+      o_cxx.close()
+
+    #if lcgdict generating, create/add selection.xml and create/add MyPackageDict.h
+    if True:
+       if not os.path.isfile("%s/%sDict.h"%(full_pkg_name,full_pkg_name)):
+         print ":::  INFO Creating %s/%sDict.h" % (full_pkg_name,full_pkg_name)
+         loadFile = open("%s/%sDict.h" % (full_pkg_name,full_pkg_name),'w')
+         loadFile.writelines("""
+#ifndef %(dictguard)s
+#define %(dictguard)s
+
+#include "%(pkg)s/%(namespace_klass)s.h"
+
+#endif
+"""%d)
+         loadFile.flush()
+         loadFile.close()
+       else:
+         print ":::  INFO Adding %s to %s/%sDict.h" % (namespace_klass,full_pkg_name,full_pkg_name)
+         defineLine=False
+         doneLine=False
+         for line in fileinput.input("%s/%sDict.h"%(full_pkg_name,full_pkg_name), inplace=1):
+            if defineLine and not doneLine:
+               print """
+#include "%(pkg)s/%(namespace_klass)s.h"
+"""%d
+               doneLine=True
+            if line.startswith("#define"): defineLine=True
+            print line,
+         #FIXME: should check actually inserted it!
+       
+       if not os.path.isfile("%s/selection.xml"%(full_pkg_name)):
+         print ":::  INFO Creating %s/selection.xml" % (full_pkg_name)
+         loadFile = open("%s/selection.xml" % (full_pkg_name),'w')
+         if not namespace == "":
+            loadFile.writelines("""
+<lcgdict>
+   <namespace name="%(namespace)s" />
+      <class name="%(namespace)s::%(klass)s" />
+      <class name="I%(klass)s" />
+</lcgdict>
+"""%d)
+         else:
+            loadFile.writelines("""
+<lcgdict>
+   <class name="%(klass)s" />
+   <class name="I%(klass)s" />
+</lcgdict>
+"""%d)
+            loadFile.flush()
+            loadFile.close()
+       else:
+          print ":::  INFO Adding %s and I%s to %s/selection.xml" % (namespace_klass,full_tool_name,full_pkg_name)
+          doNamespace=False
+          if not namespace == "":
+             #check if namespace already defined in file
+             if ('<namespace name="%s"' % namespace) not in open(("%s/selection.xml" % (full_pkg_name))).read():
+                doNamespace=True
+          dictLine=False
+          done=False
+          for line in fileinput.input("%s/selection.xml"%(full_pkg_name), inplace=1):
+               if dictLine and not done:
+                  if doNamespace: print """
+   <namespace name="%(namespace)s" />"""%d
+                  if not namespace == "":
+                     print """
+      <class name="%(namespace)s::%(klass)s" />
+      <class name="I%(klass)s" />"""%d
+                  else:
+                      print """
+      <class name="%(klass)s" />
+      <class name="I%(klass)s" />"""%d
+                  done = True
+               if ((namespace=="" or doNamespace) and "<lcgdict>" in line) or (not namespace =="" and ('<namespace name="%s"' % namespace) in line):
+                  dictLine=True
+               print line,
+
+
+    #check we have a test directory with appropriate xml file in it
+    if not os.path.isdir("test"):
+       os.mkdir("test")
+    if not os.path.isfile("test/%s.xml"%full_pkg_name):
+       print ":::  INFO Creating test/%s.xml"%full_pkg_name
+       loadFile = open("test/%s.xml"%full_pkg_name,'w')
+       loadFile.writelines(testxml%d)
+       loadFile.flush()
+       loadFile.close()
+
+
+    print ":::  INFO Adding 'make check' test to requirements file"
+    with open("cmt/requirements", "a") as myfile:
+      myfile.write("")
+      myfile.write("#test for %s ... compile and execute it with 'make check' \n"%namespace_klass)
+      myfile.write("private\n")
+      myfile.write("use TestTools      TestTools-*         AtlasTest\n")
+      if args.googleTest: 
+         myfile.write("use AtlasGoogleTest AtlasGoogleTest-* External\n")
+         myfile.write("apply_pattern UnitTest_run unit_test=gt_%s\n"%namespace_klass)
+         myfile.write("macro_append gt_%s_test_dependencies \" %s %sMergeComponentsList \"\n" % (namespace_klass, full_pkg_name, full_pkg_name))
+      else:
+         myfile.write("apply_pattern UnitTest_run unit_test=ut_%s\n"%namespace_klass)
+         myfile.write("macro_append ut_%s_test_dependencies \" %s %sMergeComponentsList \"\n" % (namespace_klass, full_pkg_name, full_pkg_name))
+      myfile.write("end_private\n")
+      myfile.flush()
+      myfile.close()
+
+    if args.googleTest:
+       if not os.path.isfile("test/gt_%s_test.cxx" % namespace_klass):
+         print ":::  INFO Creating test/gt_%s_test.cxx" % namespace_klass
+         loadFile = open("test/gt_%s_test.cxx" % namespace_klass,'w')
+         loadFile.writelines(testcppg%d)
+         loadFile.flush()
+         loadFile.close()
+    else:
+      if not os.path.isfile("test/ut_%s_test.cxx" % namespace_klass):
+         print ":::  INFO Creating test/ut_%s_test.cxx" % namespace_klass
+         loadFile = open("test/ut_%s_test.cxx" % namespace_klass,'w')
+         loadFile.writelines(testcpp%d)
+         loadFile.flush()
+         loadFile.close()
+
+
+    #now add the tool to the _entries.cxx file in the components folder 
+    #first check they exist 
+    if not os.path.isfile("src/components/%s_load.cxx"%full_pkg_name):
+       print ":::  INFO Creating src/components/%s_load.cxx"%full_pkg_name
+       loadFile = open("src/components/%s_load.cxx"%full_pkg_name,'w')
+       loadFile.writelines(""" 
+#include "GaudiKernel/LoadFactoryEntries.h"
+LOAD_FACTORY_ENTRIES(%(pkg)s)
+"""%d)
+       loadFile.flush()
+       loadFile.close()
+      
+    if not os.path.isfile("src/components/%s_entries.cxx"%full_pkg_name):
+       print ":::  INFO Creating src/components/%s_entries.cxx"%full_pkg_name
+       loadFile = open("src/components/%s_entries.cxx"%full_pkg_name,'w')
+       if len(namespace_begin)>0:
+          d["namespace"] = args.toolname.split("::")[0]
+          loadFile.writelines("""
+#include "GaudiKernel/DeclareFactoryEntries.h"
+
+#include "%(pkg)s/%(namespace_klass)s.h"
+
+DECLARE_NAMESPACE_TOOL_FACTORY(%(namespace)s, %(klass)s )
+
+DECLARE_FACTORY_ENTRIES( %(pkg)s ) 
+{
+  DECLARE_NAMESPACE_TOOL(%(namespace)s, %(klass)s );
+}
+"""%d)
+       else:
+          loadFile.writelines("""
+#include "GaudiKernel/DeclareFactoryEntries.h"
+
+#include "%(pkg)s/%(namespace_klass)s.h"
+
+DECLARE_TOOL_FACTORY( %(klass)s )
+
+DECLARE_FACTORY_ENTRIES( %(pkg)s ) 
+{
+  DECLARE_TOOL( %(klass)s );
+}
+"""%d)
+       loadFile.flush()
+       loadFile.close()
+    else:
+       #first check tool not already in _entries file 
+       inFile=False
+       for line in open("src/components/%s_entries.cxx"%full_pkg_name):
+          if len(namespace_begin)==0 and "DECLARE_TOOL" in line and d["klass"] in line: inFile=True
+          if len(namespace_begin)>0 and "DECLARE_NAMESPACE_TOOL" in line and d["klass"] in line and d["namespace"]: inFile=True
+          
+       if not inFile:
+         print ":::  INFO Adding %s to src/components/%s_entries.cxx"% (args.toolname,full_pkg_name)
+         nextAdd=False
+         for line in fileinput.input("src/components/%s_entries.cxx"%full_pkg_name, inplace=1):
+            if nextAdd and not "{" in line:
+               nextAdd=False
+               if len(namespace_begin)>0:
+                  print """  DECLARE_NAMESPACE_TOOL(%(namespace)s, %(klass)s );"""%d
+               else:
+                  print """  DECLARE_TOOL( %(klass)s );"""%d
+            if line.startswith("DECLARE_FACTORY_ENTRIES"):
+               nextAdd=True
+               if len(namespace_begin)>0:
+                  
+                  print """
+#include "%(pkg)s/%(namespace_klass)s.h"
+DECLARE_NAMESPACE_TOOL_FACTORY( %(namespace)s, %(klass)s )
+"""%d
+               else:
+                  print """
+#include "%(pkg)s/%(namespace_klass)s.h"
+DECLARE_TOOL_FACTORY( %(klass)s )
+"""%d
+            print line,
+          
+    #to finish up, call cmt config so that the new tool will be captured and genconf run on it
+    cwd = os.getcwd()
+    try:
+        os.chdir('cmt')
+        _ = commands.getstatusoutput('cmt config')
+    finally:
+        os.chdir(cwd)
diff --git a/Tools/PyUtils/python/scripts/cmt_newjobo.py b/Tools/PyUtils/python/scripts/cmt_newjobo.py
index b09e3ac2fd6..795a9fbfd67 100644
--- a/Tools/PyUtils/python/scripts/cmt_newjobo.py
+++ b/Tools/PyUtils/python/scripts/cmt_newjobo.py
@@ -9,7 +9,7 @@
 
 from __future__ import with_statement
 
-__version__ = "$Revision: 655341 $"
+__version__ = "$Revision: 710998 $"
 __author__ = "Will Buttinger"
 __doc__ = "streamline and ease the creation of new skeleton joboption for analysis"
 
@@ -34,8 +34,41 @@ algseq += CfgMgr.%(klass)s()                                 #adds an instance o
 
 
 
+##--------------------------------------------------------------------
+## This section shows up to set up a HistSvc output stream for outputing histograms and trees
+## See https://twiki.cern.ch/twiki/bin/viewauth/AtlasProtected/AthAnalysisBase#How_to_output_trees_and_histogra for more details and examples
+
+#if not hasattr(svcMgr, 'THistSvc'): svcMgr += CfgMgr.THistSvc() #only add the histogram service if not already present (will be the case in this jobo)
+#svcMgr.THistSvc.Output += ["MYSTREAM DATAFILE='myfile.root' OPT='RECREATE'"] #add an output root file stream
+
+##--------------------------------------------------------------------
+
+
+##--------------------------------------------------------------------
+## The lines below are an example of how to create an output xAOD
+## See https://twiki.cern.ch/twiki/bin/viewauth/AtlasProtected/AthAnalysisBase#How_to_create_an_output_xAOD for more details and examples
+
+#from OutputStreamAthenaPool.MultipleStreamManager import MSMgr
+#xaodStream = MSMgr.NewPoolRootStream( "StreamXAOD", "xAOD.out.root" )
+
+##EXAMPLE OF BASIC ADDITION OF EVENT AND METADATA ITEMS
+##AddItem and AddMetaDataItem methods accept either string or list of strings
+#xaodStream.AddItem( ["xAOD::JetContainer#*","xAOD::JetAuxContainer#*"] ) #Keeps all JetContainers (and their aux stores)
+#xaodStream.AddMetaDataItem( ["xAOD::TriggerMenuContainer#*","xAOD::TriggerMenuAuxContainer#*"] )
+#ToolSvc += CfgMgr.xAODMaker__TriggerMenuMetaDataTool("TriggerMenuMetaDataTool") #MetaDataItems needs their corresponding MetaDataTool
+#svcMgr.MetaDataSvc.MetaDataTools += [ ToolSvc.TriggerMenuMetaDataTool ] #Add the tool to the MetaDataSvc to ensure it is loaded
+
+##EXAMPLE OF SLIMMING (keeping parts of the aux store)
+#xaodStream.AddItem( ["xAOD::ElectronContainer#Electrons","xAOD::ElectronAuxContainer#ElectronsAux.pt.eta.phi"] ) #example of slimming: only keep pt,eta,phi auxdata of electrons
+
+##EXAMPLE OF SKIMMING (keeping specific events)
+#xaodStream.AddAcceptAlgs( "%(klass)s" ) #will only keep events where 'setFilterPassed(false)' has NOT been called in the given algorithm
+
+##--------------------------------------------------------------------
+
+
+include("AthAnalysisBaseComps/SuppressLogging.py")              #Optional include to suppress as much athena output as possible. Keep at bottom of joboptions so that it doesn't suppress the logging of the things you have configured above
 
-include("AthAnalysisBaseComps/SuppressLogging.py")       #Optional include to suppress as much athena output as possible
 """
 
 
diff --git a/Tools/PyUtils/python/scripts/cmt_newpkg.py b/Tools/PyUtils/python/scripts/cmt_newpkg.py
index a525797c84c..c4aa37ddb13 100644
--- a/Tools/PyUtils/python/scripts/cmt_newpkg.py
+++ b/Tools/PyUtils/python/scripts/cmt_newpkg.py
@@ -7,7 +7,7 @@
 
 from __future__ import with_statement
 
-__version__ = "$Revision: 655341 $"
+__version__ = "$Revision: 734631 $"
 __author__ = "Sebastien Binet"
 __doc__ = "streamline and ease the creation of new cmt packages"
 
@@ -54,8 +54,8 @@ def main(args):
     author = os.path.expanduser(os.path.expandvars(args.author))
 
     if os.path.exists(full_pkg_name):
-        import shutil
-        shutil.rmtree(full_pkg_name)
+        print "ERROR: %s package already exists" % full_pkg_name
+        return 1
         
     print textwrap.dedent("""\
     ::: creating package [%(full_pkg_name)s]...
@@ -90,8 +90,7 @@ def main(args):
         branches src src/components doc python share
 
         ## default is to make component library. See: https://twiki.cern.ch/twiki/bin/view/Main/LearningAthena#Libraries_in_CMT for alternatives
-        library %(pkg_name)s *.cxx components/*.cxx
-        apply_pattern component_library
+        
         
         apply_pattern declare_joboptions files="*.py"
         apply_pattern declare_python_modules files="*.py"
diff --git a/Tools/PyUtils/python/scripts/diff_root_files.py b/Tools/PyUtils/python/scripts/diff_root_files.py
index bfdc7a86971..be2f3d01c0a 100644
--- a/Tools/PyUtils/python/scripts/diff_root_files.py
+++ b/Tools/PyUtils/python/scripts/diff_root_files.py
@@ -5,7 +5,7 @@
 # @author Sebastien Binet
 # @date February 2010
 
-__version__ = "$Revision: 692760 $"
+__version__ = "$Revision: 681245 $"
 __doc__ = "check that 2 ROOT files have same content (containers and sizes)."
 __author__ = "Sebastien Binet"
 
diff --git a/Tools/PyUtils/python/scripts/get_tagsvn_diff.py b/Tools/PyUtils/python/scripts/get_tagsvn_diff.py
new file mode 100644
index 00000000000..67e9665e309
--- /dev/null
+++ b/Tools/PyUtils/python/scripts/get_tagsvn_diff.py
@@ -0,0 +1,125 @@
+# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+
+# @file PyUtils.scripts.get_tag_diff
+# @purpose Get the list of tag differences between 2 releases (CERN centric)
+# @author Sebastien Binet
+# @date February 2010
+
+__version__ = "$Revision: 276362 $"
+__doc__ = "Get the list of the SVN differences between packages in 2 releases (CERN centric)."
+__author__ = "Edward Moyse"
+
+
+### imports -------------------------------------------------------------------
+import PyUtils.acmdlib as acmdlib
+from subprocess import check_call,CalledProcessError
+
+@acmdlib.command(
+    name='get-tagsvn-diff'
+    )
+@acmdlib.argument(
+    'old',
+    help="The description string of the reference release (eg: 12.0.X,rel_3,AtlasOffline)"
+    )
+@acmdlib.argument(
+    'new',
+    help="The description string of the to-be-compared release (eg: 12.0.X,rel_3 or 12.0.3)"
+    )
+    
+@acmdlib.argument(
+    '-d', '--domain',
+    help="Restrict to a given domain (e.g. MuonSpectrometer)"
+    )
+
+@acmdlib.argument(
+    '-p', '--project',
+    help="Restrict to a given project (e.g. Event)"
+    )
+    
+@acmdlib.argument(
+    '-k', '--package',
+    help="Restrict to a given package (e.g. MuonRecExample). This is incompatible with restricting to a given domain or project."
+    )
+
+@acmdlib.argument(
+    '-f', '--files', action='store_true', default=False,
+    help="Call 'svn diff' with '--depth files', which will typically just show ChangeLog differences."
+    )
+        
+def main(args):
+    """Get the list of tag differences between 2 releases (CERN centric)
+    """
+
+    print "===> Generating tag difference by calling CMT \n"
+    
+    import PyCmt.Cmt as Cmt
+    diffs = Cmt.get_tag_diff(ref=args.old,
+                             chk=args.new,
+                             verbose=False)
+
+    if (args.package):
+        print "===> Will now dump the SVN diff for ",args.package
+        args.domain=None
+        args.files=None
+    else:
+        print "===> Will now dump the SVN diff for the packages"
+    
+    if (args.domain):
+        print "Restricting to packages in the domain",args.domain
+    if (args.project):
+        print "Restricting to packages in the project",args.project
+    if (args.files):
+        print "... and will limit to file-level (i.e. ChangeLog) differences."
+        
+    for line in diffs:
+        oldTag = line['ref']
+        oldProject = line['ref_proj']
+        newTag = line['chk']
+        newProject = line['chk_proj']
+        if (args.project and args.project!=newProject):
+            continue
+                    
+        path = line['full_name']
+        domain = path.split('/')[0]
+        # print "domain=",domain
+        
+        if (args.domain and args.domain!=domain):
+            continue
+        package = path.split('/')[-1]
+        
+        if (args.package and package!=args.package):
+            continue
+        
+        if ('N/A' in newProject):
+            print "Package",package,"removed from release. Skipping. \n"
+            continue
+        
+        print
+        print '===> Getting SVN diff for package '+package+' at '+path+'\n'
+
+        path="svn+ssh://svn.cern.ch/reps/atlasoff/"+path
+        #path="$SVNOFF/"+path
+
+        if ('N/A' in oldProject):
+            print "New package, so showing diff from first version. \n"
+            oldTag = package+'-00-00-00' #TODO What if this isn't the first tag?
+        else:
+            oldTag=package+'-'+oldTag
+        newTag=package+'-'+newTag
+        # print path+'/tags/'+oldTag, path+'/tags/'+newTag
+        svnargs = ["svn", "diff", path+'/tags/'+oldTag, path+'/tags/'+newTag]
+        if (args.files):
+            svnargs.extend( ["--depth", "files"] )
+            
+        try:
+            svndiff = check_call(svnargs)
+            if (svndiff):
+                for svnline in svndiff:
+                   print svnline
+
+        except CalledProcessError:
+          print 'Some problem running SVN diff!'
+
+    if len(diffs) > 0:
+        return 1
+    return 0
diff --git a/Tools/PyUtils/python/scripts/tc_submit_tag.py b/Tools/PyUtils/python/scripts/tc_submit_tag.py
index df64c246a5e..0fcb4fe11f7 100644
--- a/Tools/PyUtils/python/scripts/tc_submit_tag.py
+++ b/Tools/PyUtils/python/scripts/tc_submit_tag.py
@@ -5,7 +5,7 @@
 # @author Sebastien Binet
 # @date February 2010
 
-__version__ = "$Revision: 636895 $"
+__version__ = "$Revision: 717788 $"
 __doc__ = "Submit one or more TAGs to TagCollector."
 __author__ = "Sebastien Binet, Frank Winklmeier"
 
@@ -18,7 +18,10 @@ import os.path as osp
 import sys
 
 import PyUtils.acmdlib as acmdlib
-import pyAMI.exception
+try:
+   import pyAMI.exception
+except ImportError:
+   pass #do nothing
 import PyCmt.Cmt as cmt
 
 ### functions -----------------------------------------------------------------
diff --git a/Tools/PyUtils/test/PyUtils.xml b/Tools/PyUtils/test/PyUtils.xml
old mode 100755
new mode 100644
diff --git a/Tools/PyUtils/test/test_RootUtils.py b/Tools/PyUtils/test/test_RootUtils.py
index 7d85e122a36..eb86e428bb4 100755
--- a/Tools/PyUtils/test/test_RootUtils.py
+++ b/Tools/PyUtils/test/test_RootUtils.py
@@ -3,6 +3,14 @@
 # Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
 # Simple test wrapper for PyUtils.RootUtils
 #
+
+# Make errors visible.
+import os
+os.environ['PYUTILS_SHUTUP_DEBUG']='1'
+import ctypes
+dso=ctypes.cdll.LoadLibrary('libCxxUtils.so')
+dso.CxxUtils_installFatalHandler()
+
 import sys
 import unittest
 
-- 
GitLab