From 0cad40cd2c7578125092752c7a41a337691f794c Mon Sep 17 00:00:00 2001
From: James Beacham <j.beacham@cern.ch>
Date: Wed, 7 Feb 2018 19:40:20 +0000
Subject: [PATCH] update MG version to 2.61

Former-commit-id: 80d522b981f64c31485a788357dd9c01e300524a
---
 Generators/MadGraphControl/CMakeLists.txt     |    2 +-
 .../MadGraphControl/python/MadGraphUtils.py   | 1243 ++++++++++++++---
 .../MadGraphControl/share/fastjet-config      |  357 +++++
 .../MadGraphControl/share/lhapdf-config       |   85 +-
 .../share/makeSLHAFilesForSM.py               |   14 +-
 Generators/Pythia8_i/CMakeLists.txt           |    4 +
 Generators/Pythia8_i/Pythia8_i/Pythia8_i.h    |   23 +-
 .../Pythia8_i/Pythia8_i/UserHooksFactory.h    |   10 +
 Generators/Pythia8_i/src/Pythia8_i.cxx        |   60 +-
 .../Pythia8_i/src/UserHooks/PowhegBB4L.cxx    |  311 +++++
 .../UserHooks/SettableColourReconnection.cxx  |   94 ++
 .../src/UserHooks/SingleTopWideEta.cxx        |   66 +
 .../Pythia8_i/src/UserHooks/UserSetting.h     |   24 +-
 Generators/Pythia8_i/src/UserHooks/main31.cxx |  503 +------
 Generators/Pythia8_i/src/UserHooksFactory.cxx |   41 +-
 Projects/Athena/externals/MadGraph5Amc.cmake  |    2 +-
 Projects/Athena/externals/Pythia8.cmake       |    1 +
 17 files changed, 2014 insertions(+), 826 deletions(-)
 create mode 100755 Generators/MadGraphControl/share/fastjet-config
 create mode 100644 Generators/Pythia8_i/src/UserHooks/PowhegBB4L.cxx
 create mode 100644 Generators/Pythia8_i/src/UserHooks/SettableColourReconnection.cxx
 create mode 100644 Generators/Pythia8_i/src/UserHooks/SingleTopWideEta.cxx

diff --git a/Generators/MadGraphControl/CMakeLists.txt b/Generators/MadGraphControl/CMakeLists.txt
index 09377ece95f..249e6eea3cb 100644
--- a/Generators/MadGraphControl/CMakeLists.txt
+++ b/Generators/MadGraphControl/CMakeLists.txt
@@ -11,7 +11,7 @@ find_package( MadGraph )
 # Install files from the package:
 atlas_install_python_modules( python/*.py )
 atlas_install_joboptions( share/*.py )
-atlas_install_generic( share/*.dat share/lhapdf-config
+atlas_install_generic( share/*.dat share/lhapdf-config share/fastjet-config
                        DESTINATION share
                        EXECUTABLE )
 
diff --git a/Generators/MadGraphControl/python/MadGraphUtils.py b/Generators/MadGraphControl/python/MadGraphUtils.py
index 95240cbe0e1..01ef89a54a9 100755
--- a/Generators/MadGraphControl/python/MadGraphUtils.py
+++ b/Generators/MadGraphControl/python/MadGraphUtils.py
@@ -7,11 +7,26 @@
 #    updates for aMC@NLO by Josh McFayden <mcfayden@cern.ch>
 #  Attempts to remove path-dependence of MadGraph
 
-import os,sys,time,subprocess,shutil,glob,re,difflib
+import os,sys,time,subprocess,shutil,glob,re,difflib,stat
 from AthenaCommon import Logging
 mglog = Logging.logging.getLogger('MadGraphUtils')
 
 
+def setup_path_protection():
+    # Addition for models directory
+    if 'PYTHONPATH' in os.environ:
+        if not 'Generators/madgraph/models' in os.environ['PYTHONPATH']:
+            os.environ['PYTHONPATH'] += ':/cvmfs/atlas.cern.ch/repo/sw/Generators/madgraph/models/latest'
+    # Make sure that gfortran doesn't write to somewhere it shouldn't
+    if 'GFORTRAN_TMPDIR' in os.environ:
+        return
+    if 'TMPDIR' in os.environ:
+        os.environ['GFORTRAN_TMPDIR']=os.environ['TMPDIR']
+        return
+    if 'TMP' in os.environ:
+        os.environ['GFORTRAN_TMPDIR']=os.environ['TMP']
+        return
+
 def new_process(card_loc='proc_card_mg5.dat',grid_pack=None):
     """ Generate a new process in madgraph.  Note that
     you can pass *either* a process card location or a
@@ -40,6 +55,8 @@ def new_process(card_loc='proc_card_mg5.dat',grid_pack=None):
         return new_process()
 
     madpath=os.environ['MADPATH']
+    # Just in case
+    setup_path_protection()
 
     mgexec='/bin/mg5_aMC'
 
@@ -79,7 +96,7 @@ def new_process(card_loc='proc_card_mg5.dat',grid_pack=None):
     mglog.info('Started process generation at '+str(time.asctime()))
 
     generate = subprocess.Popen([madpath+mgexec,card_loc],stdin=subprocess.PIPE)
-    generate.wait()
+    generate.communicate()
 
     mglog.info('Finished process generation at '+str(time.asctime()))
 
@@ -93,28 +110,6 @@ def new_process(card_loc='proc_card_mg5.dat',grid_pack=None):
     return thedir
 
 
-#def get_default_runcard(isNLO=False):
-#    # Get the run card from the installation
-#    if isNLO:
-#        mglog.info('Fetching default NLO run_card.dat')
-#        if os.access(os.environ['MADPATH']+'/Template/NLO/Cards/run_card.dat',os.R_OK):
-#            shutil.copy(os.environ['MADPATH']+'/Template/NLO/Cards/run_card.dat','run_card.SM.dat')
-#            return 'run_card.SM.dat'
-#        else:
-#            raise RuntimeError('Cannot find default NLO run_card.dat!')
-#    else:
-#        mglog.info('Fetching default LO run_card.dat')
-#        if os.access(os.environ['MADPATH']+'/Template/LO/Cards/run_card.dat',os.R_OK):
-#            shutil.copy(os.environ['MADPATH']+'/Template/LO/Cards/run_card.dat','run_card.SM.dat')
-#            return 'run_card.SM.dat'
-#        elif os.access(os.environ['MADPATH']+'/Template/Cards/run_card.dat',os.R_OK):
-#            shutil.copy(os.environ['MADPATH']+'/Template/Cards/run_card.dat','run_card.SM.dat')
-#            return 'run_card.SM.dat'
-#        else:
-#            raise RuntimeError('Cannot find default LO run_card.dat!')
-
-
-
 def get_default_runcard(proc_dir='PROC_mssm_0'):
     try:
         from __main__ import opts
@@ -134,21 +129,21 @@ def get_default_runcard(proc_dir='PROC_mssm_0'):
 
     # Get the run card from the installation
     run_card_loc=proc_dir+'/Cards/run_card.dat'
-    mglog.info('Fetching default run_card.dat from '+str(run_card_loc))
     if os.access(run_card_loc,os.R_OK):
-        shutil.copy(run_card_loc,'run_card.SM.dat')
-        return 'run_card.SM.dat'
+        mglog.info('Copying default run_card.dat from '+str(run_card_loc))
+        shutil.copy(run_card_loc,'run_card.tmp.dat')
+        return 'run_card.tmp.dat'
     else:
         run_card_loc=proc_dir+'/Cards/run_card_default.dat'
         mglog.info('Fetching default run_card.dat from '+str(run_card_loc))
         if os.access(run_card_loc,os.R_OK):
-            shutil.copy(run_card_loc,'run_card.SM.dat')
-            return 'run_card.SM.dat'
+            shutil.copy(run_card_loc,'run_card.tmp.dat')
+            return 'run_card.tmp.dat'
         else:
             raise RuntimeError('Cannot find default run_card.dat or run_card_default.dat! I was looking here: %s'%run_card_loc)
     
     
-def generate(run_card_loc='run_card.dat',param_card_loc='param_card.dat',mode=0,njobs=1,run_name='Test',proc_dir='PROC_mssm_0',grid_pack=False,gridpack_compile=False,cluster_type=None,cluster_queue=None,cluster_temp_path=None,extlhapath=None,madspin_card_loc=None,required_accuracy=0.01,gridpack_dir=None,nevents=None,random_seed=None):
+def generate(run_card_loc='run_card.dat',param_card_loc='param_card.dat',mode=0,njobs=1,run_name='Test',proc_dir='PROC_mssm_0',grid_pack=False,gridpack_compile=False,cluster_type=None,cluster_queue=None,cluster_temp_path=None,extlhapath=None,madspin_card_loc=None,required_accuracy=0.01,gridpack_dir=None,nevents=None,random_seed=None,reweight_card_loc=None,bias_module=None):
     try:
         from __main__ import opts
         if opts.config_only:
@@ -157,18 +152,20 @@ def generate(run_card_loc='run_card.dat',param_card_loc='param_card.dat',mode=0,
     except:
         pass
 
+    # Just in case
+    setup_path_protection()
 
     if is_gen_from_gridpack(grid_pack):
         if gridpack_dir and nevents and random_seed:
             mglog.info('Running event generation from gridpack (using smarter mode from generate() function)')
-            generate_from_gridpack(run_name=run_name,gridpack_dir=gridpack_dir,nevents=nevents,random_seed=random_seed,card_check=proc_dir,param_card=param_card_loc,madspin_card=madspin_card_loc,proc_dir=proc_dir,extlhapath=extlhapath) 
+            generate_from_gridpack(run_name=run_name,gridpack_dir=gridpack_dir,nevents=nevents,random_seed=random_seed,card_check=proc_dir,param_card=param_card_loc,madspin_card=madspin_card_loc,extlhapath=extlhapath,gridpack_compile=gridpack_compile,reweight_card=reweight_card_loc) 
             return
         else:
             mglog.info('Detected gridpack mode for generating events but asssuming old configuration (using sepatate generate_from_gridpack() call)')
             return
 
 
-    version = getMadGraphVersion() #DR: avoiding code duplication
+    version = getMadGraphVersion() # avoiding code duplication
 
     # If we need to get the cards...
     if run_card_loc is not None and not os.access(run_card_loc,os.R_OK):
@@ -193,10 +190,13 @@ def generate(run_card_loc='run_card.dat',param_card_loc='param_card.dat',mode=0,
         if not os.access(param_card_loc,os.R_OK):
             mglog.error('Could not find param card '+str(param_card_loc))
             return 1
+    if reweight_card_loc is not None and not os.access(reweight_card_loc,os.R_OK):
+        mglog.error('Could not find reweight card '+str(reweight_card_loc))
+        return 1
 
     # Check if process is NLO or LO
     isNLO=is_NLO_run(proc_dir=proc_dir)
-           
+
     if grid_pack:
         #Running in gridpack mode
         mglog.info('Started generating gridpack at '+str(time.asctime()))
@@ -224,8 +224,8 @@ def generate(run_card_loc='run_card.dat',param_card_loc='param_card.dat',mode=0,
             req_acc=required_accuracy
             for line in oldcard:
                 if ' = nevents ' in line:
-                    newcard.write(' 0 = nevents    ! Number of unweighted events requested \n')
-                    mglog.info('Setting nevents = 0.')
+                    newcard.write(' 1 = nevents    ! Number of unweighted events requested \n')
+                    mglog.info('Setting nevents = 1.')
                 elif ' = req_acc ' in line:
                     newcard.write(' %f = req_acc    ! Required accuracy (-1=auto determined from nevents)\n'%(req_acc))
                     mglog.info('Setting req_acc = %f'%(req_acc))
@@ -235,7 +235,7 @@ def generate(run_card_loc='run_card.dat',param_card_loc='param_card.dat',mode=0,
             newcard.close()
             shutil.move(run_card_loc+'.tmp',run_card_loc)
 
-        print "run_card.dat:",run_card_loc
+        mglog.info( "run_card.dat: "+run_card_loc )
         runCard = subprocess.Popen(['cat',run_card_loc])
         runCard.wait()
 
@@ -259,6 +259,13 @@ def generate(run_card_loc='run_card.dat',param_card_loc='param_card.dat',mode=0,
         mglog.info('Decaying with MadSpin. Moving card (%s) into place.'%(madspin_card_loc))
         shutil.copyfile(madspin_card_loc,proc_dir+'/Cards/madspin_card.dat')
 
+    if reweight_card_loc:
+        if int(version.split('.')[0]) < 3 and not ( int(version.split('.')[0]) >= 2 and int(version.split('.')[1]) >= 4 ):
+            mglog.error('Attempting to run reweighting with old MadGraph version - please upgrade to MadGraph5_aMC@NLO v2.4+.')
+            return 1
+        mglog.info('Running reweighting module. Moving card (%s) into place.'%(reweight_card_loc))
+        shutil.copyfile(reweight_card_loc,proc_dir+'/Cards/reweight_card.dat')
+     
 
     # Ensure that things are set up normally
     if not os.access(run_card_loc,os.R_OK):
@@ -267,6 +274,10 @@ def generate(run_card_loc='run_card.dat',param_card_loc='param_card.dat',mode=0,
     if param_card_loc is not None and not os.access(param_card_loc,os.R_OK):
         mglog.error('No param card found at '+param_card_loc)
         return 1
+    if reweight_card_loc is not None and not os.access(reweight_card_loc,os.R_OK):
+        mglog.error('No reweight card found at '+reweight_card_loc)
+        return 1
+
     if not os.access(proc_dir,os.R_OK):
         mglog.error('No process directory found at '+proc_dir)
         return 1
@@ -287,13 +298,23 @@ def generate(run_card_loc='run_card.dat',param_card_loc='param_card.dat',mode=0,
         mglog.error('Working from '+str(os.getcwd()))
         return 1
 
-    (LHAPATH,origLHAPATH,origLHAPDF_DATA_PATH) = setupLHAPDF(isNLO, version=version, proc_dir=proc_dir, extlhapath=extlhapath) 
-    
+    allow_links = True
+    if cluster_type is not None and cluster_queue is not None:
+        if 'condor' in cluster_type.lower():
+            mglog.warning('Condor clusters do not allow links.  Will do more copying rather than linking')
+            allow_links = False
+
+    (LHAPATH,origLHAPATH,origLHAPDF_DATA_PATH) = setupLHAPDF(isNLO, version=version, proc_dir=proc_dir, extlhapath=extlhapath, allow_links=allow_links) 
+
             
     mglog.info('For your information, the libraries available are (should include LHAPDF):')
     mglog.info( sorted( os.listdir( proc_dir+'/lib/' ) ) )
 
+    setupFastjet(isNLO, proc_dir=proc_dir)
+    if bias_module!=None:
+        setup_bias_module(bias_module,run_card_loc,proc_dir,grid_pack)
 
+        
     mglog.info('Now I will hack the make files a bit.  Apologies, but there seems to be no good way around this.')
     shutil.copyfile(proc_dir+'/Source/make_opts',proc_dir+'/Source/make_opts_old')
     old_opts = open(proc_dir+'/Source/make_opts_old','r')
@@ -311,6 +332,8 @@ def generate(run_card_loc='run_card.dat',param_card_loc='param_card.dat',mode=0,
     new_opts.close()
     mglog.info('Make file hacking complete.')
 
+    print_cards(run_card=run_card_loc,param_card=(param_card_loc if param_card_loc is not None else proc_dir+'/Cards/param_card.dat'))
+
     currdir=os.getcwd()
     os.chdir(proc_dir)
 
@@ -323,7 +346,7 @@ def generate(run_card_loc='run_card.dat',param_card_loc='param_card.dat',mode=0,
         
     if njobs>1 and mode==0: mglog.warning('Inconsistent configuration between mode '+str(mode)+' and number of jobs '+str(njobs))
     if njobs>1 and not isNLO:
-    
+
         if not grid_pack and not athenaMP:
             mglog.warning('Running parallel generation.  This should not happen for a grid job, to be a good citizen.')
         elif not athenaMP:
@@ -333,7 +356,8 @@ def generate(run_card_loc='run_card.dat',param_card_loc='param_card.dat',mode=0,
         if cluster_type is not None and cluster_queue is not None:
             mglog.info('Setting up parallel running system settings')
             
-            
+            setNCores(process_dir=os.getcwd(), Ncores=njobs)
+
             config_card='Cards/me5_configuration.txt'
             oldcard = open(config_card,'r')
             newcard = open(config_card+'.tmp','w')                                                                                             
@@ -360,15 +384,18 @@ def generate(run_card_loc='run_card.dat',param_card_loc='param_card.dat',mode=0,
             if cluster_type=='pbs': 
                 mglog.info('Modifying bin/internal/cluster.py for PBS cluster running')
                 os.system("sed -i \"s:text += prog:text += './'+prog:g\" bin/internal/cluster.py")                 
-    
+
+        run_card_consistency_check(isNLO=isNLO)
         generate = subprocess.Popen(['bin/generate_events',str(mode),str(njobs),str(run_name)],stdin=subprocess.PIPE)
-        generate.wait()
+        generate.communicate()
 
     elif not isNLO:   
-        
+
+        setNCores(process_dir=os.getcwd(), Ncores=njobs)
+        run_card_consistency_check(isNLO=isNLO)
         mglog.info('Running serial generation.  This will take a bit more time than parallel generation.')
         generate = subprocess.Popen(['bin/generate_events','0',str(run_name)],stdin=subprocess.PIPE)
-        generate.wait()
+        generate.communicate()
 
     elif isNLO:
         
@@ -378,18 +405,12 @@ def generate(run_card_loc='run_card.dat',param_card_loc='param_card.dat',mode=0,
         newcard = open(config_card_loc+'.tmp','w')
         
         # Make sure params only set once
-        lhapdf_set=False
-        fastjet_set=False
         run_mode_set=False
         auto_html_set=False
         cltype_set=False
         clqueue_set=False
         nbcore_set=False
         tmppath_set=False
-        thelhapath = LHAPATH.split('share/')[0]+os.environ['CMTCONFIG']
-        thefastjetpath = '/afs/cern.ch/sw/lcg/external/fastjet/3.0.3/x86_64-slc6-gcc47-opt'
-        if not os.path.exists(thefastjetpath):
-            thefastjetpath='/cvmfs/sft.cern.ch/lcg/external/fastjet/3.0.3/x86_64-slc6-gcc47-opt'
 
         for line in oldcard:
             if 'run_mode =' in line:
@@ -402,17 +423,6 @@ def generate(run_card_loc='run_card.dat',param_card_loc='param_card.dat',mode=0,
                     mglog.info('Setting automatic_html_opening = %s'%('False'))
                     newcard.write('automatic_html_opening = %s \n'%('False'))
                     auto_html_set=True
-# Shouldn't be needed as this is now set above...
-#            elif 'lhapdf = ' in line:
-#                if not lhapdf_set:
-#                    mglog.info('Setting lhapdf path = %s'%(thelhapath+'/bin/lhapdf-config'))
-#                    newcard.write('lhapdf = %s \n'%(thelhapath+'/bin/lhapdf-config'))
-#                    lhapdf_set=True
-            elif 'fastjet = ' in line:
-                if not fastjet_set:
-                    mglog.info('Setting fastjet path = %s'%(thefastjetpath+'/bin/fastjet-config'))
-                    newcard.write('fastjet = %s \n'%(thefastjetpath+'/bin/fastjet-config'))
-                    fastjet_set=True
             elif 'cluster_type = ' in line and mode == 1:
                 if not cltype_set:
                     mglog.info('Setting cluster type = %s in %s'%(cluster_type,config_card_loc))   
@@ -439,23 +449,22 @@ def generate(run_card_loc='run_card.dat',param_card_loc='param_card.dat',mode=0,
         newcard.close()
         shutil.move(config_card_loc+'.tmp',config_card_loc)
         
-        print "amcatnlo_configuration.txt:",config_card_loc
+        mglog.info( "amcatnlo_configuration.txt: "+config_card_loc )
         configCard = subprocess.Popen(['cat',config_card_loc])
         configCard.wait()
     
     
         #generate = subprocess.Popen(['bin/aMCatNLO','launch','-p','-f'],stdin=subprocess.PIPE)
-        #generate.wait()
+        #generate.communicate()
         
         mglog.info('Removing Cards/shower_card.dat to ensure we get parton level events only')
         remove_shower = subprocess.Popen(['rm','Cards/shower_card.dat'])
         remove_shower.wait()
 
             
-        
+        run_card_consistency_check(isNLO=isNLO)        
         mygenerate = subprocess.Popen(['bin/generate_events','--name='+str(run_name)],stdin=subprocess.PIPE, stderr=subprocess.STDOUT)
-        mygenerate.wait()
-
+        mygenerate.communicate()
 
 
 
@@ -468,7 +477,10 @@ def generate(run_card_loc='run_card.dat',param_card_loc='param_card.dat',mode=0,
             mglog.info('Tidying up gridpack (%s)...'%gridpack_name)
             
             os.chdir(currdir)
-            shutil.copy((proc_dir+'/'+run_name+'_gridpack.tar.gz'),gridpack_name)
+            if madspin_card_loc:
+                shutil.copy((proc_dir+'/'+run_name+'_decayed_1_gridpack.tar.gz'),gridpack_name)
+            else:
+                shutil.copy((proc_dir+'/'+run_name+'_gridpack.tar.gz'),gridpack_name)
             
             if gridpack_compile:
                 mkdir = subprocess.Popen(['mkdir','tmp%i/'%os.getpid()])
@@ -488,8 +500,9 @@ def generate(run_card_loc='run_card.dat',param_card_loc='param_card.dat',mode=0,
                 remove_old = subprocess.Popen(['rm',('../'+gridpack_name)])
                 remove_old.wait()
                 mglog.info('Package up new tarball')
-                tar = subprocess.Popen(['tar','cvzfh',('../'+gridpack_name),'.'])
-                tar.wait()
+                tar = subprocess.Popen(['tar','cvzf','../'+gridpack_name,'--exclude=lib/PDFsets','.']) 
+                tar.wait() 
+
                 os.chdir('../')
                 mglog.info('Remove temporary directory')
                 remove_tmp = subprocess.Popen(['rm','-fr','tmp%i/'%os.getpid()])
@@ -497,17 +510,34 @@ def generate(run_card_loc='run_card.dat',param_card_loc='param_card.dat',mode=0,
                 mglog.info('Tidying up complete!')
 
         else:
+
+            run_card_consistency_check(isNLO=isNLO)
+
             ### NLO RUN ###
+
+            # In case a MadSpin card was specified setup MadSpin
+            if madspin_card_loc:
+                # modify run card for nevents 250
+                modify_run_card(settings={'nevents': 250})
+                # Generate some events -> trigger MadSpin code generation, compilation and setup (overweight estimation)
+                run = subprocess.Popen(['./bin/generate_events','--parton','--nocompile','--only_generation','-f','--name='+str(run_name)], stdin=subprocess.PIPE)
+                run.communicate()
+                # The folders with the generated events and decayed events are removed when running from the gridpack
+
+
             gridpack_name=(run_name+'_gridpack.tar.gz')
             mglog.info('Creating gridpack (%s)...'%gridpack_name)
             
             os.chdir('../')
             mglog.info('Package up proc_dir')
             os.rename(proc_dir,gridpack_dir) 
-            tar = subprocess.Popen(['tar','cvzfh',gridpack_name,gridpack_dir,'--exclude=lib/PDFsets']) 
+            tar = subprocess.Popen(['tar','czf',gridpack_name,gridpack_dir,'--exclude=lib/PDFsets']) 
             tar.wait() 
             os.rename(gridpack_dir,proc_dir) 
 
+
+
+
         raise RuntimeError('Gridpack sucessfully created, exiting the transform. IGNORE ERRORS if running gridpack generation!')
 
 
@@ -523,6 +553,20 @@ def generate(run_card_loc='run_card.dat',param_card_loc='param_card.dat',mode=0,
         else:
             mglog.error('MadSpin was run but can\'t find output folder %s.'%('Events/'+run_name+'_decayed_1/'))
             raise RuntimeError('MadSpin was run but can\'t find output folder %s.'%('Events/'+run_name+'_decayed_1/'))
+
+    elif isNLO:
+
+        mglog.info('Moving generated events to be in correct format for arrange_output().')
+        mglog.info('Unzipping generated events.')
+        unzip = subprocess.Popen(['gunzip','-f','Events/'+run_name+'/events.lhe.gz'])
+        unzip.wait()
+        
+        mglog.info('Moving file over to '+'Events/'+run_name+'/unweighted_events.lhe')
+        shutil.move('Events/'+run_name+'/events.lhe','Events/'+run_name+'/unweighted_events.lhe')
+        
+        mglog.info('Re-zipping into dataset name '+'Events/'+run_name+'/unweighted_events.lhe.gz')
+        rezip = subprocess.Popen(['gzip','Events/'+run_name+'/unweighted_events.lhe'])
+        rezip.wait()
         
     os.chdir(currdir)
 
@@ -532,20 +576,35 @@ def generate(run_card_loc='run_card.dat',param_card_loc='param_card.dat',mode=0,
     return 0
 
 
-def generate_from_gridpack(run_name='Test',gridpack_dir='madevent/',nevents=-1,random_seed=-1,card_check=None,param_card=None,madspin_card=None,proc_dir=None,extlhapath=None):
+def generate_from_gridpack(run_name='Test',gridpack_dir='madevent/',nevents=-1,random_seed=-1,card_check=None,param_card=None,madspin_card=None,reweight_card=None,extlhapath=None, gridpack_compile=None):
+
+    # Just in case
+    setup_path_protection()
 
     isNLO=is_NLO_run(proc_dir=gridpack_dir)
     LHAPATH=os.environ['LHAPATH'].split(':')[0]
 
     version = getMadGraphVersion()
-    (LHAPATH,origLHAPATH,origLHAPDF_DATA_PATH) = setupLHAPDF(isNLO, version=version, proc_dir=proc_dir, extlhapath=extlhapath) 
+    (LHAPATH,origLHAPATH,origLHAPDF_DATA_PATH) = setupLHAPDF(isNLO, version=version, proc_dir=gridpack_dir, extlhapath=extlhapath) 
+
+    setupFastjet(isNLO, proc_dir=gridpack_dir)
 
     if param_card is not None:
-        #DR: only copy param_card if name of destination directory differs
-        if param_card != gridpack_dir + '/Cards/' + param_card.split('/')[-1]: #DR
-            shutil.copy( param_card , gridpack_dir+'/Cards' ) #DR
+        # only copy param_card if name of destination directory differs
+        if param_card != gridpack_dir + '/Cards/' + param_card.split('/')[-1]:
+            shutil.copy( param_card , gridpack_dir+'/Cards' )
         mglog.info( 'Moved param card into place: '+str(param_card) )
 
+    if reweight_card is not None:
+        if os.path.exists(reweight_card):
+            shutil.copy( reweight_card , gridpack_dir+'/Cards' )
+            mglog.info( 'Moved reweight card into place: '+str(reweight_card) )
+        else:
+            mglog.info( 'Did not find reweight card '+str(reweight_card)+', using the one provided by gridpack' )
+
+
+    print_cards(run_card=gridpack_dir+'/Cards/run_card.dat',param_card=gridpack_dir+'/Cards/param_card.dat')
+
     # Work in progress...
     #if card_check:
     #    mglog.info('Checking that cards in %s and %s are consistent.'%(gridpack_dir,card_check))
@@ -565,29 +624,67 @@ def generate_from_gridpack(run_name='Test',gridpack_dir='madevent/',nevents=-1,r
 
 
     mglog.info('>>>> FOUND GRIDPACK <<<<  <- This will be used for generation')
-    mglog.info('Generation of '+str(nevents)+' events will be performed using the supplied gridpack with random seed '+str(random_seed))
+    mglog.info('Generation of '+str(int(nevents))+' events will be performed using the supplied gridpack with random seed '+str(random_seed))
     mglog.info('Started generating events at '+str(time.asctime()))
 
 
 
-
+    #Remove addmasses if it's there
+    if os.access(gridpack_dir+'/bin/internal/addmasses.py',os.R_OK):
+        os.remove(gridpack_dir+'/bin/internal/addmasses.py')
+        
     currdir=os.getcwd()
     
     if not isNLO:
         ### LO RUN ###
-        if not os.access(gridpack_dir+'bin/run.sh',os.R_OK):
-            mglog.error('bin/run.sh not found at '+gridpack_dir)
+        if not os.access(gridpack_dir+'/bin/run.sh',os.R_OK):
+            mglog.error('/bin/run.sh not found at '+gridpack_dir)
             return 1
         else:
             mglog.info('Found '+gridpack_dir+' bin/run.sh, starting generation.')
+        # add reweighting to run.sh
+        if reweight_card:
+            runscript=gridpack_dir+'/bin/run.sh'
+            oldscript = open(runscript,'r')
+            newscript = open(runscript+'.tmp','w')
+            added_rwgt=False
+            gridrun_line='./bin/gridrun $num_events $seed'
+            reweight_line='./bin/madevent reweight GridRun_${seed} -f\n'
+            for line in oldscript:
+                if not added_rwgt and gridrun_line in line:
+                    if madspin_card:
+                        # renaming madspin card deactivates madspin -- madspin should be run after reweighting
+                        newscript.write('mv Cards/madspin_card.dat Cards/madspin_card_backup.dat\n')
+                    newscript.write(line)
+                    # reweight
+                    newscript.write(reweight_line)
+                    if madspin_card:
+                        # move madspin card back in place and run madspin
+                        newscript.write('mv Cards/madspin_card_backup.dat Cards/madspin_card.dat\n')
+                        newscript.write('./bin/madevent decay_events GridRun_${seed} -f\n')
+                    added_rwgt=True
+                else:
+                    newscript.write(line)
+            oldscript.close()
+            newscript.close()
+            mglog.info('created '+runscript+'.tmp')
+
+            if not added_rwgt:
+                raise RuntimeError('Could not add reweighting to gridpack script: '+runscript+' maybe line to generate events changed, was "'+gridrun_line+'"')
+            shutil.move(runscript+'.tmp',runscript)
+            st = os.stat(runscript)
+            os.chmod(runscript, st.st_mode | stat.S_IEXEC)
+
+        setNCores(process_dir=gridpack_dir)
 
         mglog.info('For your information, ls of '+currdir+':')
         mglog.info( sorted( os.listdir( currdir ) ) )
         mglog.info('For your information, ls of '+gridpack_dir+':')
         mglog.info( sorted( os.listdir( gridpack_dir ) ) )
-        
-        generate = subprocess.Popen([gridpack_dir+'/bin/run.sh',str(nevents),str(random_seed)],stdin=subprocess.PIPE)
-        generate.wait()
+
+        run_card_consistency_check(isNLO=isNLO,path=gridpack_dir)
+        generate = subprocess.Popen([gridpack_dir+'/bin/run.sh',str(int(nevents)),str(int(random_seed))],stdin=subprocess.PIPE)
+        generate.communicate()
         
     else:
         ### NLO RUN ###
@@ -617,63 +714,20 @@ def generate_from_gridpack(run_name='Test',gridpack_dir='madevent/',nevents=-1,r
         newruncard.close()
         shutil.move(run_card_loc+'.tmp',run_card_loc)
         
-        print "run_card.dat:",run_card_loc
+        mglog.info( "run_card.dat: "+run_card_loc )
         runCard = subprocess.Popen(['cat',run_card_loc])
         runCard.wait()
     
-        ### Editing config card
         ### Editing config card
         config_card_loc=gridpack_dir+'/Cards/amcatnlo_configuration.txt'
-        oldcard = open(config_card_loc,'r')
-        newcard = open(config_card_loc+'.tmp','w')
-        
+ 
         # Make sure params only set once
-        lhapdf_set=False
-        fastjet_set=False
-        run_mode_set=False
-        auto_html_set=False
         cltype_set=False
         clqueue_set=False
-        nbcore_set=False
-        thelhapath = LHAPATH.split('share/')[0]+os.environ['CMTCONFIG']
 
-        thefastjetpath = '/afs/cern.ch/sw/lcg/external/fastjet/3.0.3/x86_64-slc6-gcc47-opt'
-        if not os.path.exists(thefastjetpath):
-            thefastjetpath='/cvmfs/sft.cern.ch/lcg/external/fastjet/3.0.3/x86_64-slc6-gcc47-opt'
+        setNCores(process_dir=gridpack_dir)
 
-        for line in oldcard:
-            if 'run_mode =' in line:
-                if not run_mode_set:
-                    mglog.info('Setting run_mode = %i'%(0))
-                    newcard.write('run_mode = %i \n'%(0))
-                    run_mode_set=True
-            elif 'automatic_html_opening =' in line:
-                if not auto_html_set:
-                    mglog.info('Setting automatic_html_opening = %s'%('False'))
-                    newcard.write('automatic_html_opening = %s \n'%('False'))
-                    auto_html_set=True
-#            elif 'lhapdf = ' in line:
-#                if not lhapdf_set:
-#                    mglog.info('Setting lhapdf path = %s'%(thelhapath+'/bin/lhapdf-config'))
-#                    newcard.write('lhapdf = %s \n'%(thelhapath+'/bin/lhapdf-config'))
-#                    lhapdf_set=True
-            elif 'fastjet = ' in line:
-                if not fastjet_set:
-                    mglog.info('Setting fastjet path = %s'%(thefastjetpath+'/bin/fastjet-config'))
-                    newcard.write('fastjet = %s \n'%(thefastjetpath+'/bin/fastjet-config'))
-                    fastjet_set=True
-            elif 'nb_core = ' in line:                                               
-                if not nbcore_set:
-                    mglog.info('Setting number of cores = %i in %s'%(1,config_card_loc)) 
-                    newcard.write('nb_core = %i \n'%(1))                     
-                    nbcore_set=True
-            else:
-                newcard.write(line)
-        oldcard.close()
-        newcard.close()
-        shutil.move(config_card_loc+'.tmp',config_card_loc)
-        
-        print "amcatnlo_configuration.txt:",config_card_loc
+        mglog.info( "amcatnlo_configuration.txt: "+config_card_loc )
         configCard = subprocess.Popen(['cat',config_card_loc])
         configCard.wait()
 
@@ -700,29 +754,60 @@ def generate_from_gridpack(run_name='Test',gridpack_dir='madevent/',nevents=-1,r
             mglog.info('Removing %s/Events/%s directory from gridpack generation.'%(gridpack_dir,run_name))
             shutil.rmtree(gridpack_dir+'/Events/'+run_name)
     
+        # Delete events generated when setting up MadSpin during gridpack generation
+        if os.access(gridpack_dir+'/Events/'+run_name+'_decayed_1', os.F_OK):
+            mglog.info('Removing %s/Events/%s_decayed_1 directory from gridpack generation.'%(gridpack_dir,run_name))
+            shutil.rmtree(gridpack_dir+'/Events/'+run_name+'_decayed_1')
+
         mglog.info('For your information, ls of '+gridpack_dir+'/Events/:')
         mglog.info( sorted( os.listdir( gridpack_dir+'/Events/' ) ) )
     
-        mglog.info('Copying make_opts from Template')   
-        shutil.copy(os.environ['MADPATH']+'/Template/LO/Source/make_opts',gridpack_dir+'/Source/')   
        
-        generate = subprocess.Popen([gridpack_dir+'/bin/generate_events','--parton','--nocompile','--only_generation','-f','--name=%s'%run_name],stdin=subprocess.PIPE)
-        generate.wait()
+        if not gridpack_compile:
+            mglog.info('Copying make_opts from Template')   
+            shutil.copy(os.environ['MADPATH']+'/Template/LO/Source/make_opts',gridpack_dir+'/Source/')   
+
+            run_card_consistency_check(isNLO=isNLO,path=gridpack_dir) 
+            generate = subprocess.Popen([gridpack_dir+'/bin/generate_events','--parton','--nocompile','--only_generation','-f','--name=%s'%run_name],stdin=subprocess.PIPE)
+            generate.communicate()
+        else:
+            mglog.info('Allowing recompilation of gridpack')
+            if os.path.islink(gridpack_dir+'/lib/libLHAPDF.a'):
+                mglog.info('Unlinking '+gridpack_dir+'/lib/libLHAPDF.a')
+                os.unlink(gridpack_dir+'/lib/libLHAPDF.a')
+
+            run_card_consistency_check(isNLO=isNLO,path=gridpack_dir) 
+            generate = subprocess.Popen([gridpack_dir+'/bin/generate_events','--parton','--only_generation','-f','--name=%s'%run_name],stdin=subprocess.PIPE)
+            generate.communicate()
+
 
     mglog.info('Copying generated events to %s.'%currdir)
 
     if madspin_card:
-        if os.path.exists(gridpack_dir+'Events/'+run_name+'_decayed_1'): 
-            if not isNLO:
-                shutil.copy(gridpack_dir+'/Events/'+run_name+'_decayed_1'+'/unweighted_events.lhe.gz','events.lhe.gz') 
-            else:
-                shutil.copy(gridpack_dir+'/Events/'+run_name+'_decayed_1'+'/events.lhe.gz','events.lhe.gz') 
-        else: 
-            mglog.error('MadSpin was run but can\'t find output folder %s.'%('Events/'+run_name+'_decayed_1/')) 
-            raise RuntimeError('MadSpin was run but can\'t find output folder %s.'%('Events/'+run_name+'_decayed_1/')) 
-    else: #DR
-        shutil.copy(gridpack_dir+'/Events/'+run_name+'/events.lhe.gz','events.lhe.gz') #DR
+        #if os.path.exists(gridpack_dir+'Events/'+run_name+'_decayed_1'): 
+        if not isNLO:
+            LOdir='Events/GridRun_%i'%random_seed+'_decayed_1'
+            if os.path.exists(gridpack_dir+'/'+LOdir):
+                shutil.copy(gridpack_dir+'/'+LOdir+'/unweighted_events.lhe.gz','events.lhe.gz') 
+            else: 
+                mglog.error('MadSpin was run but can\'t find output folder %s.'%LOdir)
+                raise RuntimeError('MadSpin was run but can\'t find output folder %s.'%LOdir)
+        else:
+            NLOdir='Events/'+run_name+'_decayed_1'
+            if os.path.exists(gridpack_dir+'/'+NLOdir):
+                shutil.copy(gridpack_dir+'/'+NLOdir+'/events.lhe.gz','events.lhe.gz') 
+            else: 
+                mglog.error('MadSpin was run but can\'t find output folder %s.'%NLOdir)
+                raise RuntimeError('MadSpin was run but can\'t find output folder %s.'%NLOdir)
+
+
+
+    else: 
 
+        if not os.path.exists(gridpack_dir+'Events/GridRun_%i/'%random_seed):
+            shutil.copy(gridpack_dir+'/Events/'+run_name+'/events.lhe.gz','events.lhe.gz') 
+
+ 
         
 
     mglog.info('For your information, ls of '+currdir+':')
@@ -730,7 +815,7 @@ def generate_from_gridpack(run_name='Test',gridpack_dir='madevent/',nevents=-1,r
  
     mglog.info('Moving generated events to be in correct format for arrange_output().')
     mglog.info('Unzipping generated events.')
-    unzip = subprocess.Popen(['gunzip','events.lhe.gz'])
+    unzip = subprocess.Popen(['gunzip','-f','events.lhe.gz'])
     unzip.wait()
 
     mglog.info('Moving file over to '+gridpack_dir+'/Events/'+run_name+'/unweighted_events.lhe')
@@ -753,7 +838,7 @@ def generate_from_gridpack(run_name='Test',gridpack_dir='madevent/',nevents=-1,r
 
 def getMadGraphVersion():
 
-    #DR: also need to find out the version (copied from generate)
+    # also need to find out the version (copied from generate)
     madpath=os.environ['MADPATH']
     version=None    
     version_file = open(os.environ['MADPATH']+'/VERSION','r')
@@ -773,8 +858,47 @@ def getMadGraphVersion():
     return(version)
 
 
-#DR: put LHAPDF setup in function
-def setupLHAPDF(isNLO, version=None, proc_dir=None, extlhapath=None):
+def setupFastjet(isNLO, proc_dir=None):
+
+    mglog.info('Path to fastjet install dir:%s'%os.environ['FASTJETPATH'])
+
+
+    getfjconfig = subprocess.Popen(['get_files','-data','fastjet-config'])
+    getfjconfig.wait() 
+    #Get custom fastjet-config 
+    if not os.access(os.getcwd()+'/fastjet-config',os.X_OK):
+        mglog.error('Failed to get fastjet-config from MadGraphControl')
+        return 1
+    fastjetconfig = os.getcwd()+'/fastjet-config'
+    
+    mglog.info('fastjet-config --version:      %s'%str(subprocess.Popen([fastjetconfig, '--version'],stdout = subprocess.PIPE).stdout.read().strip()))
+    mglog.info('fastjet-config --prefix:       %s'%str(subprocess.Popen([fastjetconfig, '--prefix'],stdout = subprocess.PIPE).stdout.read().strip()))
+        
+    if not isNLO:
+        config_card=proc_dir+'/Cards/me5_configuration.txt'
+    else:
+        config_card=proc_dir+'/Cards/amcatnlo_configuration.txt'
+        
+    oldcard = open(config_card,'r')
+    newcard = open(config_card+'.tmp','w')
+     
+    for line in oldcard:
+        if 'fastjet = ' in line:                                                
+            newcard.write('fastjet = %s \n'%(fastjetconfig))
+            mglog.info('Setting fastjet = %s in %s'%(fastjetconfig,config_card))
+        else:
+            newcard.write(line)
+    oldcard.close()
+    newcard.close()
+    shutil.move(config_card+'.tmp',config_card)
+    #mglog.info('New %s card:'%config_card)
+    #configCard = subprocess.Popen(['cat',config_card])             
+    #configCard.wait()
+
+    return
+
+
+def setupLHAPDF(isNLO, version=None, proc_dir=None, extlhapath=None, allow_links=True):
 
     origLHAPATH=os.environ['LHAPATH']
     origLHAPDF_DATA_PATH=os.environ['LHAPDF_DATA_PATH']
@@ -786,11 +910,101 @@ def setupLHAPDF(isNLO, version=None, proc_dir=None, extlhapath=None):
     else:
         LHADATAPATH=os.environ['LHAPATH'].split(':')[0]
 
+
+
+    #pdfname='NNPDF30_nlo_as_0118'
+    #pdfid='260000'
+
+    pdfname=''
+    pdfid=-999
+
+    ### Reading LHAPDF ID from run card
+    mydict={}
+    run_card_loc=proc_dir+'/Cards/run_card.dat'
+    runcard = open(run_card_loc,'r')
+    for line in runcard:
+        if not line.strip().startswith('#'): # line commented out
+            command = line.split('!', 1)[0]
+            comment = line.split('!', 1)[1] if '!' in line else ''
+            if '=' in command:
+                setting = command.split('=')[-1].strip()
+                value = command.split('=')[0].strip()
+                mydict[setting]=value
+
+    runcard.close()
+
+    if mydict["pdlabel"].replace("'","") == 'lhapdf':
+        #Make local LHAPDF dir
+        mglog.info('creating local LHAPDF dir: MGC_LHAPDF/')
+        if os.path.islink('MGC_LHAPDF/'):
+            os.unlink('MGC_LHAPDF/')
+        elif os.path.isdir('MGC_LHAPDF/'):
+            shutil.rmtree('MGC_LHAPDF/')
+
+        newMGCLHA='MGC_LHAPDF/'
+
+        mkdir = subprocess.Popen(['mkdir','-p',newMGCLHA])
+        mkdir.wait()
+
+
+        # Error checking
+        for pdfid in [ int(x) for x in mydict['lhaid'].replace(' ',',').split(',') ]:
+            pdflist = open(LHADATAPATH+'/pdfsets.index','r')
+            for line in pdflist:
+                splitline=line.split()
+                if int(splitline[0]) == pdfid:
+                    pdfname=splitline[1]
+                    break
+            pdflist.close()
+    
+            if pdfname=='':
+                err='Couldn\'t find PDF name associated to ID %i in %s.'%(pdfid,LHADATAPATH+'/pdfsets.index')
+                mglog.error(err)
+                raise RuntimeError(err)
+    
+            mglog.info("Found LHAPDF ID=%i, name=%s!"%(pdfid,pdfname))
+
+            if allow_links:
+                mglog.info('linking '+LHADATAPATH+'/'+pdfname+' --> '+newMGCLHA+pdfname)
+                os.symlink(LHADATAPATH+'/'+pdfname,newMGCLHA+pdfname)
+            else:
+                mglog.info('copying '+LHADATAPATH+'/'+pdfname+' --> '+newMGCLHA+pdfname)
+                shutil.copytree(LHADATAPATH+'/'+pdfname,newMGCLHA+pdfname)
+
+        if allow_links:
+            mglog.info('linking '+LHADATAPATH+'/pdfsets.index --> '+newMGCLHA+'pdfsets.index')
+            os.symlink(LHADATAPATH+'/pdfsets.index',newMGCLHA+'pdfsets.index')
+            
+            atlasLHADATAPATH=LHADATAPATH.replace('sft.cern.ch/lcg/external/lhapdfsets/current','atlas.cern.ch/repo/sw/Generators/lhapdfsets/current')
+            mglog.info('linking '+atlasLHADATAPATH+'/lhapdf.conf --> '+newMGCLHA+'lhapdf.conf')
+            os.symlink(atlasLHADATAPATH+'/lhapdf.conf',newMGCLHA+'lhapdf.conf')
+        else:
+            mglog.info('copying '+LHADATAPATH+'/pdfsets.index --> '+newMGCLHA+'pdfsets.index')
+            shutil.copy2(LHADATAPATH+'/pdfsets.index',newMGCLHA+'pdfsets.index')
+ 
+            atlasLHADATAPATH=LHADATAPATH.replace('sft.cern.ch/lcg/external/lhapdfsets/current','atlas.cern.ch/repo/sw/Generators/lhapdfsets/current')
+            mglog.info('copying '+atlasLHADATAPATH+'/lhapdf.conf -->'+newMGCLHA+'lhapdf.conf')
+            shutil.copy2(atlasLHADATAPATH+'/lhapdf.conf',newMGCLHA+'lhapdf.conf')
+
+        
+        LHADATAPATH=os.getcwd()+'/MGC_LHAPDF'
+
+    else:
+        mglog.info('Not using LHAPDF')
+        return (LHAPATH,origLHAPATH,origLHAPDF_DATA_PATH)
+        
+    
     if isNLO:
         os.environ['LHAPDF_DATA_PATH']=LHADATAPATH
 
     mglog.info('Path to LHAPDF install dir:%s'%LHAPATH)
     mglog.info('Path to LHAPDF data dir: %s'%LHADATAPATH)
+    if not os.path.isdir(LHADATAPATH):
+        mglog.error('LHAPDF data dir: %s is not accesible'%LHADATAPATH)
+        return 1
+    if not os.path.isdir(LHAPATH):
+        mglog.error('LHAPDF path dir: %s is not accesible'%LHAPATH)
+        return 1
 
     # Dealing with LHAPDF (Only need to edit configuration file for 2.1.1 onwards)
     if int(version.split('.')[0]) >= 2 and ( int(version.split('.')[1]) > 1 or ( int(version.split('.')[1]) == 1 and int(version.split('.')[2]) > 0) ):
@@ -813,6 +1027,7 @@ def setupLHAPDF(isNLO, version=None, proc_dir=None, extlhapath=None):
             lhapdfconfig = os.getcwd()+'/lhapdf-config'
 
         mglog.info('lhapdf-config --version:      %s'%str(subprocess.Popen([lhapdfconfig, '--version'],stdout = subprocess.PIPE).stdout.read().strip()))
+        mglog.info('lhapdf-config --prefix:       %s'%str(subprocess.Popen([lhapdfconfig, '--prefix'],stdout = subprocess.PIPE).stdout.read().strip()))
         mglog.info('lhapdf-config --libdir:       %s'%str(subprocess.Popen([lhapdfconfig, '--libdir'],stdout = subprocess.PIPE).stdout.read().strip()))
         mglog.info('lhapdf-config --datadir:      %s'%str(subprocess.Popen([lhapdfconfig, '--datadir'],stdout = subprocess.PIPE).stdout.read().strip()))
         mglog.info('lhapdf-config --pdfsets-path: %s'%str(subprocess.Popen([lhapdfconfig, '--pdfsets-path'],stdout = subprocess.PIPE).stdout.read().strip()))
@@ -835,16 +1050,21 @@ def setupLHAPDF(isNLO, version=None, proc_dir=None, extlhapath=None):
         oldcard.close()
         newcard.close()
         shutil.move(config_card+'.tmp',config_card)
-        mglog.info('New me5_configuration.txt card:')
-        configCard = subprocess.Popen(['cat',config_card])             
-        configCard.wait()
+        #mglog.info('New me5_configuration.txt card:')
+        #configCard = subprocess.Popen(['cat',config_card])             
+        #configCard.wait()
 
         mglog.info('Creating links for LHAPDF')
-        if os.path.isdir(proc_dir+'/lib/PDFsets'):
+        if os.path.islink(proc_dir+'/lib/PDFsets'):
+            os.unlink(proc_dir+'/lib/PDFsets')
+        elif os.path.isdir(proc_dir+'/lib/PDFsets'):
             shutil.rmtree(proc_dir+'/lib/PDFsets')
-        os.symlink(LHADATAPATH,proc_dir+'/lib/PDFsets')
+        if allow_links:
+            os.symlink(LHADATAPATH,proc_dir+'/lib/PDFsets')
+        else:
+            shutil.copytree(LHADATAPATH,proc_dir+'/lib/PDFsets')
         mglog.info('Available PDFs are:')
-        mglog.info( sorted( os.listdir( proc_dir+'/lib/PDFsets/' ) ) )
+        mglog.info( sorted( [ x for x in os.listdir(proc_dir+'/lib/PDFsets') if not ".tar.gz" in x ] ) )
         
     else:
         # Nasty fixes for MG5v1:
@@ -866,7 +1086,10 @@ def setupLHAPDF(isNLO, version=None, proc_dir=None, extlhapath=None):
             mglog.info('  '+LHAPATH)
                         
         mglog.info('Creating links for LHAPDF')
-        os.symlink(LHAPATH,proc_dir+'/lib/PDFsets')
+        if allow_links:
+            os.symlink(LHAPATH,proc_dir+'/lib/PDFsets')
+        else:
+            shutil.copytree(LHAPATH,proc_dir+'/lib/PDFsets')
         mglog.info('Available PDFs are:')
         mglog.info( sorted( os.listdir( proc_dir+'/lib/PDFsets/' ) ) )
     
@@ -890,12 +1113,53 @@ def setupLHAPDF(isNLO, version=None, proc_dir=None, extlhapath=None):
                 mglog.info('  '+releaselhapath)
                 lhalibpath = releaselhapath.split(
                     'share/')[0]+os.environ['CMTCONFIG']+'/lib/'
-                
-        os.symlink( lhalibpath+'libLHAPDF.a',proc_dir+'/lib/libLHAPDF.a')
-        os.symlink( lhalibpath+'libLHAPDF.so',proc_dir+'/lib/libLHAPDF.so')
+
+        if allow_links:
+            os.symlink( lhalibpath+'libLHAPDF.a',proc_dir+'/lib/libLHAPDF.a')
+            os.symlink( lhalibpath+'libLHAPDF.so',proc_dir+'/lib/libLHAPDF.so')
+        else:
+            shutil.copy2( lhalibpath+'libLHAPDF.a',proc_dir+'/lib/libLHAPDF.a')
+            shutil.copy2( lhalibpath+'libLHAPDF.so',proc_dir+'/lib/libLHAPDF.so')
 
     return (LHAPATH,origLHAPATH,origLHAPDF_DATA_PATH)
 
+# Function to set the number of cores and the running mode in the run card
+def setNCores(process_dir=None, Ncores=None):
+    if process_dir is None:
+        mglog.warning('Cannot setNCores because no process dir was provided')
+        return
+    my_Ncores = Ncores
+    if Ncores is None and 'ATHENA_PROC_NUMBER' in os.environ:
+        my_Ncores = int(os.environ['ATHENA_PROC_NUMBER'])
+    if my_Ncores is None:
+        mglog.info('Setting up for serial run')
+        my_Ncores = 1
+
+    if not is_NLO_run(proc_dir=process_dir):
+        config_card=process_dir+'/Cards/me5_configuration.txt'
+    else:
+        config_card=process_dir+'/Cards/amcatnlo_configuration.txt'
+
+    import fileinput
+    # core configuration with the one we need
+    oldcard = open(config_card,'r')
+    newcard = open(config_card+'.tmp','w')
+    for line in oldcard.readlines():
+        if 'nb_core = ' in line:
+            mglog.info('Setting number of cores = %i in %s'%(my_Ncores,config_card))
+            newcard.write('nb_core = %i \n'%(my_Ncores))
+        elif 'run_mode = ' in line:
+            mglog.info('Setting run mode = %i in %s'%(0 if my_Ncores==1 else 2,config_card))
+            newcard.write('run_mode = %i \n'%(0 if my_Ncores==1 else 2))
+        elif 'automatic_html_opening =' in line:
+            mglog.info('Setting automatic_html_opening = %s'%('False'))
+            newcard.write('automatic_html_opening = %s \n'%('False'))
+        else:
+            newcard.write(line)
+    oldcard.close()
+    newcard.close()
+    shutil.move(config_card+'.tmp',config_card)
+
 
 def resetLHAPDF(origLHAPATH='',origLHAPDF_DATA_PATH=''):
     mglog.info('Restoring original LHAPDF env variables:')
@@ -939,14 +1203,21 @@ def add_lifetimes(process_dir=None,threshold=None):
     mglog.info('Started adding time of flight info '+str(time.asctime()))
 
     generate = subprocess.Popen([me_exec,'time_of_flight_exec_card'],stdin=subprocess.PIPE)
-    generate.wait()
+    generate.communicate()
 
     mglog.info('Finished adding time of flight information at '+str(time.asctime()))
 
-    return True
+    # Put the file back where we found it
+    lhe = glob(process_dir+'/Events/*/*lhe')[0]
+    rezip = subprocess.Popen(['gzip',lhe])
+    rezip.wait()
 
+    return True
 
-def arrange_output(run_name='Test',proc_dir='PROC_mssm_0',outputDS='madgraph_OTF._00001.events.tar.gz',lhe_version=None,saveProcDir=False):
+def run_madspin(madspin_card=None,runArgs=None,saveProcDir=False,run_name='Test'):
+    """ Run madspin on the generated LHE file.  Should be
+    run when you have inputGeneratorFile set.
+    """
     try:
         from __main__ import opts
         if opts.config_only:
@@ -954,28 +1225,105 @@ def arrange_output(run_name='Test',proc_dir='PROC_mssm_0',outputDS='madgraph_OTF
             return
     except:
         pass
+    madpath=os.environ['MADPATH']
+    process_dir=new_process(SUSY_model()+"""
+generate p p > go go
+output -f"""
+        )
 
+    tarball = runArgs.inputGeneratorFile
+    from glob import glob
+    if not os.path.exists(os.getcwd()+'/'+tarball):
+        mglog.error(' %s does not contain events?'%(os.getcwd()))
+    else:
+        mglog.info('Unzipping generated events.')
+        unzip = subprocess.Popen(['gunzip','-f',os.getcwd()+'/'+tarball])
+        unzip.wait()
+        mglog.info('Moving file over to '+process_dir+'/Events/'+run_name+'/unweighted_events.lhe')
+        mkdir = subprocess.Popen(['mkdir','-p',(process_dir+'/Events/')])
+        mkdir.wait()
+        mkdir = subprocess.Popen(['mkdir','-p',(process_dir+'/Events/'+run_name)])
+        mkdir.wait()
+        shutil.move(os.getcwd()+'/'+tarball.replace('tar.gz','events'),process_dir+'/Events/'+run_name+'/unweighted_events.lhe')
+    if len(glob(process_dir+'/Events/*'))<1:
+        mglog.error('Process dir %s does not contain events?'%process_dir)
+    run = glob(process_dir+'/Events/*')[0].split('/')[-1]
+    from glob import glob
+    me_exec=process_dir+'/bin/madevent'
+    shutil.copyfile(madspin_card,process_dir+'/Cards/madspin_card.dat')
 
-    isNLO=is_NLO_run(proc_dir=proc_dir)
+    ms_c = open('madspin_decay_cmd','w')
+    ms_c.write('decay_events '+run_name)
+    ms_c.close()
+
+    mglog.info('Started madspin at '+str(time.asctime()))
+
+    generate = subprocess.Popen([me_exec,'madspin_decay_cmd'],stdin=subprocess.PIPE)
+    generate.communicate()
+
+    mglog.info('Finished running madspin at '+str(time.asctime()))
+    shutil.move(process_dir+'/Events/'+run_name+'_decayed_1/unweighted_events.lhe.gz',process_dir+'/Events/'+run_name+'/events.lhe.gz')
+    mglog.info('Moving MadSpin events from %s to %s.'%(process_dir+'/Events/'+run_name+'_decayed_1/unweighted_events.lhe.gz',process_dir+'/Events/'+run_name+'/events.lhe.gz'))
+
+    
+    # Move output files into the appropriate place, with the appropriate name
+    the_spot = arrange_output(run_name=run_name,proc_dir=process_dir,outputDS='madgraph_OTF._00001.events.tar.gz',saveProcDir=saveProcDir)
+    if the_spot == '':
+        mglog.error('Error arranging output dataset!')
+        return -1
+
+    mglog.info('All done generating events!!')
+
+    return 
+
+
+def arrange_output(run_name='Test',proc_dir='PROC_mssm_0',outputDS='madgraph_OTF._00001.events.tar.gz',lhe_version=None,saveProcDir=False,runArgs=None):
+    try:
+        from __main__ import opts
+        if opts.config_only:
+            mglog.info('Athena running on config only mode: not executing MadGraph')
+            return
+    except:
+        pass
+
+    # NLO is not *really* the question here, we need to know if we should look for weighted or
+    #  unweighted events in the output directory.  MadSpin (above) only seems to give weighted
+    #  results for now?
+    #isNLO=is_NLO_run(proc_dir=proc_dir)
+    hasUnweighted = os.access(proc_dir+'/Events/'+run_name+'/unweighted_events.lhe.gz',os.R_OK)
 
     # Clean up in case a link or file was already there
     if os.path.exists(os.getcwd()+'/events.lhe'): os.remove(os.getcwd()+'/events.lhe')
 
     mglog.info('Unzipping generated events.')
-    unzip = subprocess.Popen(['gunzip',proc_dir+'/Events/'+run_name+'/unweighted_events.lhe.gz'])
-    unzip.wait()
- 
+    if hasUnweighted:
+        unzip = subprocess.Popen(['gunzip','-f',proc_dir+'/Events/'+run_name+'/unweighted_events.lhe.gz'])
+        unzip.wait()
+    else:
+        unzip = subprocess.Popen(['gunzip','-f',proc_dir+'/Events/'+run_name+'/events.lhe.gz'])
+        unzip.wait()
+
     mglog.info('Putting a copy in place for the transform.')
-    orig_input = open(proc_dir+'/Events/'+run_name+'/unweighted_events.lhe','r')
-    mod_output = open(os.getcwd()+'/events.lhe','w')
+    if hasUnweighted:
+        orig_input = proc_dir+'/Events/'+run_name+'/unweighted_events.lhe'
+        mod_output = open(os.getcwd()+'/events.lhe','w')
+    else:
+        orig_input = proc_dir+'/Events/'+run_name+'/events.lhe'
+        mod_output = open(os.getcwd()+'/events.lhe','w')
+
 
+    #Removing empty lines in LHE
     nEmpty=0
-    for line in orig_input.readlines():
-        if line.strip():
-            mod_output.write(line)
-        else:
-            nEmpty=nEmpty+1
-    orig_input.close()
+    # Try to do this in a way that uses less memory...
+    # Note that I could do this instead with unix commands:
+    #grep -cvP '\S' orig_input
+    #sed -i '/^$/d' orig_input
+    with open(orig_input,'r') as fileobject:
+        for line in fileobject:
+            if line.strip():
+                mod_output.write(line)
+            else:
+                nEmpty=nEmpty+1
     mod_output.close()
 
     mglog.info('Removed %i empty lines from LHEF.'%nEmpty)
@@ -992,16 +1340,22 @@ def arrange_output(run_name='Test',proc_dir='PROC_mssm_0',outputDS='madgraph_OTF
             shutil.copy(os.getcwd()+'/events.lhe.copy',os.getcwd()+'/events.lhe')
         mod_output2.close()
 
-    # Actually move over the dataset - this first part is horrible...
-    variables = {}
-    if os.access('runargs.generate.py',os.R_OK): 
-        execfile('runargs.generate.py',variables) 
-    else: 
-        execfile('runargs.Generate.py',variables) 
 
+    # Actually move over the dataset - this first part is horrible...
     outputTXTFile = None
-    if 'runArgs' in variables and hasattr(variables['runArgs'],'outputTXTFile'):
-        outputTXTFile = variables['runArgs'].outputTXTFile
+    if runArgs is None:
+        # If they didn't pass in runArgs, then we have to do this by hand
+        variables = {}
+        if os.access('runargs.generate.py',os.R_OK): 
+            execfile('runargs.generate.py',variables) 
+        elif os.access('runargs.Generate.py',os.R_OK): 
+            execfile('runargs.Generate.py',variables) 
+    
+        if 'runArgs' in variables and hasattr(variables['runArgs'],'outputTXTFile'):
+            outputTXTFile = variables['runArgs'].outputTXTFile
+    else:
+        if hasattr(runArgs,'outputTXTFile'):
+            outputTXTFile = runArgs.outputTXTFile
 
     if outputTXTFile is None: outputDS = 'tmp_'+outputDS
     else:                     outputDS = outputTXTFile
@@ -1018,9 +1372,78 @@ def arrange_output(run_name='Test',proc_dir='PROC_mssm_0',outputDS='madgraph_OTF
         mglog.info('Blasting away the process directory')
         shutil.rmtree(proc_dir,ignore_errors=True)
 
+        if os.path.isdir('MGC_LHAPDF/'):
+            shutil.rmtree('MGC_LHAPDF/',ignore_errors=True)
+
+    # shortening the outputDS in the case of an output TXT file
+    if outputTXTFile is not None:
+        outputDS = outputDS.split('.TXT')[0]
+    # Do some fixing up for them
+    if runArgs is not None:
+        mglog.debug('Setting inputGenerator file to '+outputDS)
+        runArgs.inputGeneratorFile=outputDS
+
     mglog.info('All done with output arranging!')
     return outputDS
 
+def setup_bias_module(bias_module,run_card_loc,proc_dir,grid_pack):
+    version = getMadGraphVersion()
+    if int(version.split('.')[0])<2 or int(version.split('.')[0])==2 and int(version.split('.')[1])<5:
+        mglog.error('Sorry, the bias module is available only from MG5_aMC v2.5.0')
+        return 1
+    if grid_pack and (int(version.split('.')[0])<2 or int(version.split('.')[0])==2 and int(version.split('.')[1])<6):
+        mglog.error('Sorry, the bias module works with gridpacks only from MG5_aMC v2.6.0')
+        return 1
+    if isinstance(bias_module,tuple):
+        mglog.info('Using bias module '+bias_module[0])
+        the_run_card = open(run_card_loc,'r')
+        for line in the_run_card:
+            if 'bias_module' in line and not bias_module[0] in line:
+                mglog.error('You need to add the bias module '+bias_module[0]+' to the run card to actually run it')
+                return 1
+        the_run_card.close()
+        if len(bias_module)!=3:
+            mglog.error('Please give a 3-tuple of strings containing bias module name, bias module, and makefile. Alternatively, give path to bias module tarball.')
+            return 1
+        bias_module_newpath=proc_dir+'/Source/BIAS/'+bias_module[0]
+        os.makedirs(bias_module_newpath)
+        bias_module_file=open(bias_module_newpath+'/'+bias_module[0]+'.f','w')
+        bias_module_file.write(bias_module[1])
+        bias_module_file.close()
+        bias_module_make_file=open(bias_module_newpath+'/Makefile','w')
+        bias_module_make_file.write(bias_module[2])
+        bias_module_make_file.close()
+    else:
+        mglog.info('Using bias module '+bias_module)
+        bias_module_name=bias_module.split('/')[-1].replace('.gz','')
+        bias_module_name=bias_module_name.replace('.tar','')
+        the_run_card = open(run_card_loc,'r')
+        for line in the_run_card:
+            if 'bias_module' in line and not bias_module_name in line:
+                mglog.error('You need to add the bias module '+bias_module_name+' to the run card to actually run it')
+                return 1
+        the_run_card.close()
+
+        if os.path.exists(bias_module+'.tar.gz'):
+            bias_module_path=bias_module+'.tar.gz'
+        elif os.path.exists(bias_module+'.gz'):
+            bias_module_path=bias_module+'.gz'
+        elif os.path.exists(bias_module):
+            bias_module_path=bias_module
+        else:
+            mglog.error('Did not find bias module '+bias_module+' , this path should point to folder or tarball.  Alternatively give a tuple of strings containing module name, module, and makefile')
+            return 1
+        bias_module_newpath=proc_dir+'/Source/BIAS/'+bias_module_path.split('/')[-1]
+        mglog.info('Copying bias module into place: '+bias_module_newpath)
+        shutil.copy(bias_module_path,bias_module_newpath)
+        mglog.info('Unpacking bias module')
+        if bias_module_newpath.endswith('.tar.gz'):
+            untar = subprocess.Popen(['tar','xvzf',bias_module_newpath,'--directory='+proc_dir+'/Source/BIAS/'])
+            untar.wait()
+        elif bias_module_path.endswith('.gz'):
+            gunzip = subprocess.Popen(['gunzip',bias_module_newpath])
+            gunzip.wait()
+
 
 def helpful_definitions():
     return """
@@ -1049,8 +1472,7 @@ define susyv~ = sve~ svm~ svt~
 """
 
 def strong_process_dict( njets = 1 , gentype = 'GG' ):
-    header = """
-import model mssm"""
+    header = SUSY_model()
     header += helpful_definitions()
     header += """
 # Specify process(es) to run
@@ -1161,6 +1583,10 @@ def get_variations( gentype , masses , syst_mod , xqcut = None ):
             if masses['1000024']<xqcut*4.: xqcut = masses['1000024']*0.25
         elif 'Stau'==gentype:
             if masses['1000015']<xqcut*4.: xqcut = masses['1000015']*0.25
+        elif 'SlepSlep'==gentype:
+            if masses['1000011']<xqcut*4.: xqcut = masses['1000011']*0.25
+        elif 'T2' in gentype:
+            if masses['2000006']<xqcut*4.: xqcut = masses['2000006']*0.25
         else:
             if 'G' in gentype or 'ALL' in gentype:
                 if masses['1000021']<xqcut*4.: xqcut = masses['1000021']*0.25
@@ -1172,6 +1598,8 @@ def get_variations( gentype , masses , syst_mod , xqcut = None ):
                 if masses['1000005']<xqcut*4.: xqcut = masses['1000005']*0.25
             if 'C' in gentype:
                 if masses['1000024']<xqcut*4.: xqcut = masses['1000024']*0.25
+            if 'D' in gentype:
+                if masses['2000001']<xqcut*4.: xqcut = masses['2000001']*0.25
         if syst_mod is not None and 'qup' in syst_mod.lower(): xqcut = xqcut*2.
         elif syst_mod is not None and 'qdown' in syst_mod.lower(): xqcut = xqcut*0.5
     mglog.info('For matching, will use xqcut of '+str(xqcut))
@@ -1217,7 +1645,7 @@ def SUSY_StrongSM_Generation(runArgs = None, gentype='SS',decaytype='direct',mas
 
     xqcut , alpsfact , scalefact = get_variations( gentype , masses , syst_mod )
 
-    if not SLHAonly:
+    if not SLHAonly and (writeGridpack or gridpackDirName is None):
         # Generate the new process!
         thedir = new_process(card_loc=process)
         if 1==thedir:
@@ -1247,28 +1675,18 @@ def SUSY_StrongSM_Generation(runArgs = None, gentype='SS',decaytype='direct',mas
         else:
             build_param_card(param_card_old='param_card.SM.%s.%s.dat'%(gentype,decaytype),param_card_new='param_card.dat',masses=masses)
 
+    # Ensure that the param card is compatible with the model that's being used
+    shutil.move('param_card.dat','original_param_card.dat')
+    update_param_card_blocks( process_dir=thedir , from_param_card='original_param_card.dat' , to_param_card='param_card.dat' )
+
     if SLHAonly:
         mglog.info('Not running generation - only setting up SLHA file')
         return [xqcut,'dummy']
 
-    # Grab the run card and move it into place
-    if 'phot' in gentype:
-        build_run_card(run_card_old='run_card.SMphot.dat',run_card_new='run_card.dat',
-                   xqcut=xqcut,nevts=nevts,rand_seed=rand_seed,beamEnergy=beamEnergy, scalefact=scalefact, alpsfact=alpsfact)
-
-    else:
-       if getnewruncard==True:
-           extras = { 'ktdurham':xqcut , 'lhe_version':'2.0' , 'cut_decays':'F' , 'pdlabel':pdlabel , 'lhaid':lhaid , 'drjj':0.0 }
-           build_run_card(run_card_old=get_default_runcard(),run_card_new='run_card.dat',xqcut=0,
-                                  nevts=nevts,rand_seed=rand_seed,beamEnergy=beamEnergy, scalefact=scalefact, alpsfact=alpsfact,extras=extras)
-       else:
-           build_run_card(run_card_old='run_card.SM.dat',run_card_new='run_card.dat',
-                       xqcut=xqcut,nevts=nevts,rand_seed=rand_seed,beamEnergy=beamEnergy, scalefact=scalefact, alpsfact=alpsfact)
-
     # Generate events!
     if gridpackDirName is not None:
         if writeGridpack==False:
-            mglog.info('Generateing events from gridpack')
+            mglog.info('Generating events from gridpack')
             if generate_from_gridpack(run_name='Test',gridpack_dir=gridpackDirName,nevents=int(nevts),random_seed=rand_seed,param_card='param_card.dat'):
                 mglog.error('Error generating events!')
                 return -1
@@ -1277,12 +1695,27 @@ def SUSY_StrongSM_Generation(runArgs = None, gentype='SS',decaytype='direct',mas
             mglog.error('Wrong combination of arguments! writeGridpack='+str(writeGridpack)+' gridpackDirName='+str(gridpackDirName))
             return -1
     else:
+
+        # Grab the run card and move it into place
+        if 'phot' in gentype:
+            build_run_card(run_card_old='run_card.SMphot.dat',run_card_new='run_card.dat',
+                       xqcut=xqcut,nevts=nevts,rand_seed=rand_seed,beamEnergy=beamEnergy, scalefact=scalefact, alpsfact=alpsfact)
+        else:
+           if getnewruncard==True:
+               extras = { 'ktdurham':xqcut , 'lhe_version':'2.0' , 'cut_decays':'F' , 'pdlabel':pdlabel , 'lhaid':lhaid , 'drjj':0.0 }
+               build_run_card(run_card_old=get_default_runcard(thedir),run_card_new='run_card.dat',xqcut=0,
+                                      nevts=nevts,rand_seed=rand_seed,beamEnergy=beamEnergy, scalefact=scalefact, alpsfact=alpsfact,extras=extras)
+           else:
+               build_run_card(run_card_old='run_card.SM.dat',run_card_new='run_card.dat',
+                           xqcut=xqcut,nevts=nevts,rand_seed=rand_seed,beamEnergy=beamEnergy, scalefact=scalefact, alpsfact=alpsfact)
+
+        # Now do the actual event generation
         if generate(run_card_loc='run_card.dat',param_card_loc='param_card.dat',mode=0,njobs=1,run_name='Test',proc_dir=thedir,grid_pack=writeGridpack,madspin_card_loc=madspin_card):
             mglog.error('Error generating events!')
             return -1
 
     # Move output files into the appropriate place, with the appropriate name
-    the_spot = arrange_output(run_name='Test',proc_dir=thedir,outputDS='madgraph_OTF._00001.events.tar.gz',saveProcDir=keepOutput)
+    the_spot = arrange_output(run_name='Test',proc_dir=thedir,outputDS='madgraph_OTF._00001.events.tar.gz',saveProcDir=keepOutput,runArgs=runArgs)
     if the_spot == '':
         mglog.error('Error arranging output dataset!')
         return -1
@@ -1294,7 +1727,7 @@ def SUSY_StrongSM_Generation(runArgs = None, gentype='SS',decaytype='direct',mas
 def SUSY_SM_Generation(runArgs = None, process='', gentype='SS',decaytype='direct',masses=None,\
                        nevts=None, syst_mod=None,xqcut=None, SLHAonly=False, keepOutput=False, SLHAexactCopy=False,\
                        writeGridpack=False,gridpackDirName=None,MSSMCalc=False,pdlabel="'cteq6l1'",lhaid=10042,\
-                       madspin_card=None,decays={}):
+                       madspin_card=None,decays={},extras=None):
     # Set beam energy
     beamEnergy = 6500.
     if hasattr(runArgs,'ecmEnergy'): beamEnergy = runArgs.ecmEnergy * 0.5
@@ -1316,7 +1749,7 @@ def SUSY_SM_Generation(runArgs = None, process='', gentype='SS',decaytype='direc
 
     xqcut , alpsfact , scalefact = get_variations( gentype , masses , syst_mod , xqcut=xqcut )
 
-    if not SLHAonly:
+    if not SLHAonly and (writeGridpack or gridpackDirName is None):
         # Generate the new process!
         if 'import model' in process:
             mglog.info('Assuming that you have specified the model in your process string already')
@@ -1334,9 +1767,7 @@ def SUSY_SM_Generation(runArgs = None, process='', gentype='SS',decaytype='direc
 output -f
 """
         else:
-            full_proc = """
-import model mssm
-"""+helpful_definitions()+"""
+            full_proc = SUSY_model()+helpful_definitions()+"""
 # Specify process(es) to run
 
 """+process+"""
@@ -1370,20 +1801,35 @@ output -f
                 mglog.info('Could not get param card '+str_param_card)
         else:
             build_param_card(param_card_old='param_card.SM.%s.%s.dat'%(gentype,decaytype),param_card_new='param_card.dat',masses=masses,decays=decays)
+    # Ensure that the param card is compatible with the model that's being used
+    shutil.move('param_card.dat','original_param_card.dat')
+    update_param_card_blocks( process_dir=thedir , from_param_card='original_param_card.dat' , to_param_card='param_card.dat' )
 
     if SLHAonly:
         mglog.info('Not running generation - only setting up SLHA file')
         return [xqcut,'dummy']
 
-    # Grab the run card and move it into place
-    extras = { 'ktdurham':xqcut , 'lhe_version':'2.0' , 'cut_decays':'F' , 'pdlabel':pdlabel , 'lhaid':lhaid , 'drjj':0.0 }
-    build_run_card(run_card_old=get_default_runcard(),run_card_new='run_card.dat',xqcut=0,
-                   nevts=nevts,rand_seed=rand_seed,beamEnergy=beamEnergy, scalefact=scalefact, alpsfact=alpsfact,extras=extras)
+    # Set up the extras dictionary
+    if extras is None:
+        extras = { 'ktdurham':xqcut , 'lhe_version':'2.0' , 'cut_decays':'F' , 'pdlabel':pdlabel , 'lhaid':lhaid , 'drjj':0.0 }
+    else:
+        if 'drjj' not in extras: extras['drjj']=0.0
+        if 'lhe_version' not in extras: extras['lhe_version']='2.0'
+        if 'cut_decays' not in extras: extras['cut_decays']='F'
+        if ('pdlabel' in extras and pdlabel is not None) or\
+           ('lhaid' in extras and lhaid is not None) or\
+           ('ktdurham' in extras and xqcut is not None):
+            mglog.error('Tried to define variables in two places.  Please pass pdlabel, lhaid, and ktduram ONLY through either the extras dictionary OR the function parameters')
+            return -1
+
+        if 'pdlabel' not in extras: extras['pdlabel']=pdlabel
+        if 'lhaid' not in extras: extras['lhaid']=lhaid
+        if 'ktdurham' not in extras: extras['ktdurham']=xqcut
 
     # Generate events!
     if gridpackDirName is not None:
         if writeGridpack==False:
-            mglog.info('Generateing events from gridpack')
+            mglog.info('Generating events from gridpack')
             if generate_from_gridpack(run_name='Test',gridpack_dir=gridpackDirName,nevents=int(nevts),random_seed=rand_seed,param_card='param_card.dat'):
                 mglog.error('Error generating events!')
                 return -1
@@ -1392,12 +1838,15 @@ output -f
             mglog.error('Wrong combination of arguments! writeGridpack='+str(writeGridpack)+' gridpackDirName='+str(gridpackDirName))
             return -1
     else:
+        # Grab the run card and move it into place
+        build_run_card(run_card_old=get_default_runcard(thedir),run_card_new='run_card.dat',xqcut=0,
+                       nevts=nevts,rand_seed=rand_seed,beamEnergy=beamEnergy, scalefact=scalefact, alpsfact=alpsfact,extras=extras)
         if generate(run_card_loc='run_card.dat',param_card_loc='param_card.dat',mode=0,njobs=1,run_name='Test',proc_dir=thedir,grid_pack=writeGridpack,madspin_card_loc=madspin_card):
             mglog.error('Error generating events!')
             return -1
 
     # Move output files into the appropriate place, with the appropriate name
-    the_spot = arrange_output(run_name='Test',proc_dir=thedir,outputDS='madgraph_OTF._00001.events.tar.gz',saveProcDir=keepOutput)
+    the_spot = arrange_output(run_name='Test',proc_dir=thedir,outputDS='madgraph_OTF._00001.events.tar.gz',saveProcDir=keepOutput,runArgs=runArgs)
     if the_spot == '':
         mglog.error('Error arranging output dataset!')
         return -1
@@ -1406,17 +1855,126 @@ output -f
     return [xqcut,the_spot]
 
 
+def update_lhe_file(lhe_file_old,param_card_old=None,lhe_file_new=None,masses={},delete_old_lhe=True):
+    """Build a new LHE file from an old one and an updated param card.
+    The masses of some particles can be changed via the masses dictionary.  No particles that appear in the events
+    may have their masses changed.
+    If the param card is provided, the decay block in the LHE file will be replaced with the one in the param card.
+    By default, the old LHE file is removed.
+    If None is provided as a new LHE file name, the new file will replace the old one."""
+    # If we want to just use a temp file, then put in a little temp holder
+    lhe_file_new_tmp = lhe_file_new if lhe_file_new is not None else lhe_file_old+'.tmp'
+    # Make sure the LHE file is there
+    if not os.access(lhe_file_old,os.R_OK):
+        mglog.error('Could not access old LHE file at '+str(lhe_file_old)+'. Please check the file location.')
+        return -1
+    # Grab the old param card
+    paramcard = subprocess.Popen(['get_files','-data',param_card_old])
+    paramcard.wait()
+    if not os.access(param_card_old,os.R_OK):
+        mglog.info('Could not get param card '+param_card_old)
+    # Don't overwrite old param cards
+    if os.access(lhe_file_new_tmp,os.R_OK):
+        mglog.error('Old file at'+str(lhe_file_new_tmp)+' in the current directory. Dont want to clobber it. Please move it first.')
+        return -1
+
+    oldparam = open(param_card_old,'r')
+    newlhe = open(lhe_file_new_tmp,'w')
+    blockName = None
+    decayEdit = False
+    eventRead = False
+    particles_in_events = []
+    # Decay block ends with </slha>
+
+    with open(lhe_file_old,'r') as fileobject:
+        for line in fileobject:
+            if decayEdit and not '</slha>' in line: continue
+            if decayEdit and '</slha>' in line:
+                decayEdit = False
+            if line.strip().upper().startswith('BLOCK') or line.strip().upper().startswith('DECAY')\
+                        and len(line.strip().split()) > 1:
+                pos = 0 if line.strip().startswith('DECAY') else 1
+                blockName = line.strip().upper().split()[pos]
+    
+            akey = None
+            if blockName != 'DECAY' and len(line.strip().split()) > 0:
+                akey = line.strip().split()[0]
+            elif blockName == 'DECAY' and len(line.strip().split()) > 1:
+                akey = line.strip().split()[1]
+
+            # Replace the masses with those in the dictionary
+            if akey != None and blockName == 'MASS'  and akey in masses:
+                newlhe.write('   %s    %s  # \n'%(akey,str(masses[akey])))
+                mglog.info('   %s    %s  #'%(akey,str(masses[akey])))
+                decayEdit = False
+                continue
+
+            # Replace the entire decay section of the LHE file with the one from the param card
+            if blockName == 'DECAY' and param_card_old is not None:
+                # We are now reading the decay blocks!  Take them from the param card
+                oldparam = open(param_card_old,'r')
+                newDecays = False
+                for old_line in oldparam.readlines():
+                    newBlockName = None
+                    if old_line.strip().upper().startswith('DECAY') and len(old_line.strip().split()) > 1:
+                        newBlockName = line.strip().upper().split()[pos]
+                    if newDecays:
+                        newlhe.write(old_line)
+                    elif newBlockName == 'DECAY':
+                        newDecays = True
+                        newlhe.write(old_line)
+                oldparam.close()
+                # Done adding the decays
+                decayEdit = True
+                blockName = None
+                continue
+ 
+            # Keep a record of the particles that are in the events
+            if not eventRead and '<event>' in line: eventRead = True 
+            if eventRead:
+                if len(line.split())==11:
+                    aparticle = line.split()[0]
+                    if not aparticle in particles_in_events: particles_in_events += [aparticle]
+
+            # Otherwise write the line again
+            newlhe.write(line)
+
+    # Check that none of the particles that we were setting the masses of appear in the LHE events
+    for akey in masses:
+        if akey in particles_in_events:
+            mglog.error('Attempted to change mass of a particle that was in an LHE event!  This is not allowed!')
+            return -1
+
+    # Close up and return
+    newlhe.close()
+
+    # Move the new file to the old file location
+    if lhe_file_new is None:
+        os.remove(lhe_file_old)
+        shutil.move(lhe_file_new_tmp,lhe_file_old)
+        lhe_file_new_tmp = lhe_file_old
+    # Delete the old file if requested
+    elif delete_old_lhe:
+        os.remove(lhe_file_old)
+
+    return lhe_file_new_tmp
+
+
+
 def build_param_card(param_card_old=None,param_card_new='param_card.dat',masses={},decays={},extras={},params={}):
     """Build a new param_card.dat from an existing one.
     Params should be a dictionary of dictionaries. The first key is the block name, and the second in the param name.
     Eventually params will replace the other arguments, but we need to keep them for backward compatibility for now."""
     # Grab the old param card and move it into place
-    paramcard = subprocess.Popen(['get_files','-data',param_card_old])
-    paramcard.wait()
+    if os.access(param_card_old,os.R_OK):
+        mglog.info('Found old param card at '+param_card_old)
+    else:
+        paramcard = subprocess.Popen(['get_files','-data',param_card_old])
+        paramcard.wait()
     if not os.access(param_card_old,os.R_OK):
         mglog.info('Could not get param card '+param_card_old)
     if os.access(param_card_new,os.R_OK):
-        mglog.error('Old param card at'+str(param_card_new)+' in the current directory. Dont want to clobber it. Please move it first.')
+        mglog.error('Old param card at '+str(param_card_new)+' in the current directory. Dont want to clobber it. Please move it first.')
         return -1
 
     #ensure all blocknames and paramnames are upper case
@@ -1540,21 +2098,54 @@ def build_param_card(param_card_old=None,param_card_new='param_card.dat',masses=
 
 def build_run_card(run_card_old='run_card.SM.dat',run_card_new='run_card.dat',
                    xqcut=0.,nevts=60000,rand_seed=1234,beamEnergy=4000.,
-                   scalefact=1.,alpsfact=1.,extras={}):
+                   scalefact=-1.,alpsfact=-1.,extras={}):
     """Build a new run_card.dat from an existing one.
     Extras is a dictionary of keys (no spaces needed) and
     values to replace as well.
     """
+    # Handle scalefact setting -- old setup makes this a little clunky
+    if 'scalefact' in extras:
+        if scalefact>0 and scalefact!=extras['scalefact']:
+            mglog.error('Scalefact set in both extras (%1.2f) and arguments to build_run_card (%1.2f)'%(extras['scalefact'],scalefact))
+            mglog.error('and not equal! Do not know what to do, so giving up')
+            raise RuntimeError('Inconsistent setting of scalefact')
+        elif scalefact<0:
+            scalefact=extras['scalefact']
+            del extras['scalefact']
+    # Otherwise set the default value
+    elif scalefact<=0:
+        scalefact=1.
+
+    # Handle alpsfact setting -- old setup makes this a little clunky
+    if 'alpsfact' in extras:
+        if alpsfact>0 and alpsfact!=extras['alpsfact']:
+            mglog.error('Alpsfact set in both extras (%1.2f) and arguments to build_run_card (%1.2f)'%(extras['alpsfact'],alpsfact))
+            mglog.error('and not equal! Do not know what to do, so giving up')
+            raise RuntimeError('Inconsistent setting of alpsfact')
+        elif alpsfact<0:
+            alpsfact=extras['alpsfact']
+            del extras['alpsfact']
+    # Otherwise set the default value
+    elif alpsfact<=0: 
+        alpsfact=1.
+
     # Grab the old run card and move it into place
-    runcard = subprocess.Popen(['get_files','-data',run_card_old])
-    runcard.wait()
+    # Get the run card from the installation
+    if os.access(run_card_old,os.R_OK):
+        mglog.info('Found old card at '+run_card_old)
+    else:
+        runcard = subprocess.Popen(['get_files','-data',run_card_old])
+        runcard.wait()
+
     if not os.access(run_card_old,os.R_OK):
         mglog.info('Could not get run card '+run_card_old)
     if os.access(run_card_new,os.R_OK):
         mglog.error('Old run card in the current directory. Dont want to clobber it. Please move it first.')
         return -1
+
     oldcard = open(run_card_old,'r')
     newcard = open(run_card_new,'w')
+    used_options = []
     for line in oldcard:
         if '= xqcut ' in line:
             newcard.write('%f   = xqcut   ! minimum kt jet measure between partons \n'%(xqcut))
@@ -1572,30 +2163,175 @@ def build_run_card(run_card_old='run_card.SM.dat',run_card_new='run_card.dat',
             newcard.write(' %3.2f     = alpsfact         ! scale factor for QCD emission vx \n'%(alpsfact))
         else:
             for ak in extras:
+                excludeList=['xqcut','nevents','iseed','ebeam1','ebeam2','scalefact','alpsfact']
+                if ak in excludeList:
+                    mglog.error('You are trying to set "%s" with the extras parameter in build_run_card, this must be set in the build_run_card arguments instead.'%ak)
+                    raise RuntimeError('You are trying to set "%s" with the extras parameter in build_run_card, this must be set in the build_run_card arguments instead.'%ak)
+                    return -1
+
                 #if '='+ak.strip() in line.replace(' ',''):
                 if '='+ak.strip() in line.replace(' ','') and (len(line.strip().split(ak.strip())[1])==0 or line.split(ak.strip())[1][0]==" "):
                     newcard.write( ' '+str(extras[ak])+'    ='+'='.join(line.split('=')[1:]) )
+                    used_options += [ ak ]
                     break
             else: # Fell through the loop
                 newcard.write(line)
+    # Clean up options that weren't used
+    for ak in extras:
+        if ak in used_options: continue
+        mglog.warning('Option '+ak+' was not in the default run_card.  Adding by hand a setting to '+str(extras[ak]) )
+        newcard.write( ' '+str(extras[ak])+'   = '+ak+'\n')
+    # Close up
     oldcard.close()
     newcard.close()
     return run_card_new
 
-def print_cards():
-    if os.access('proc_card_mg5.dat',os.R_OK):
-        mglog.info("proc_card_mg5.dat:")
-        procCard = subprocess.Popen(['cat','proc_card_mg5.dat'])
+# New helper function - this is a bit of duplication with build_run_card but modify_run_card
+# is a bit more lightweight. build_run_card could be rewritten to call modify_run_card
+# with the explicit arguments included in the dict.
+def modify_run_card(run_card='Cards/run_card.dat',
+                     run_card_backup='Cards/run_card_backup.dat',
+                     settings={}, delete_backup = False):
+    mglog.info('Modifying run card located at '+run_card+'.')
+    if delete_backup:
+        mglog.info('Deleting original run card.')
+    else:
+        mglog.info('Keeping backup of original run card at '+run_card_backup+'.')
+    print(settings)
+    if os.path.isfile(run_card_backup): os.unlink(run_card_backup) # delete old backup
+    os.rename(run_card, run_card_backup) # change name of original card
+
+    oldCard = open(run_card_backup, 'r')
+    newCard = open(run_card, 'w')
+    used_settings = []
+    for line in iter(oldCard):
+        if not line.strip().startswith('#'): # line commented out
+            command = line.split('!', 1)[0]
+            comment = line.split('!', 1)[1] if '!' in line else ''
+            if '=' in command:
+                setting = command.split('=')[-1] #.strip()
+                stripped_setting = setting.strip()
+                oldValue = command.split('=')[0] #.strip()
+            if stripped_setting in settings:
+                line = oldValue.replace(oldValue.strip(), str(settings[stripped_setting]))+'='+setting
+                if comment != '': line += '  !' + comment
+                mglog.info('Setting '+stripped_setting+' = '+str(settings[stripped_setting])+'.')
+                used_settings += [ stripped_setting ]
+        newCard.write(line)
+
+    # Clean up unused options
+    for asetting in settings:
+        if asetting in used_settings: continue
+        mglog.warning('Option '+asetting+' was not in the default run_card.  Adding by hand a setting to '+str(settings[asetting]) )
+        newCard.write( ' '+str(settings[asetting])+'   = '+str(asetting)+'\n')
+    # close files
+    oldCard.close()
+    newCard.close()
+    mglog.info('Finished modification of run card.')
+    if delete_backup: os.unlink(run_card_backup)
+
+
+# Update a param card default template with blocks from a different param card
+def update_param_card_blocks( process_dir , from_param_card , to_param_card ):
+    # Make sure the files exist where we expect them to
+    if not os.access(process_dir+'/Cards/param_card.dat',os.R_OK):
+        mglog.error('update_param_card_blocks: Could not find param card in '+process_dir)
+        raise RuntimeError('update_param_card_blocks: No param card')
+    if not os.access(from_param_card,os.R_OK):
+        mglog.error('update_param_card_blocks: Could not find param card '+from_param_card)
+        raise RuntimeError('update_param_card_blocks: No param card')
+    # Open up the old param cards and read in their lines
+    proc_param_card_f = open(process_dir+'/Cards/param_card.dat','r')
+    proc_param_card_l = proc_param_card_f.readlines()
+    from_param_card_f = open(from_param_card,'r')
+    from_param_card_l = from_param_card_f.readlines()
+    # What blocks are in the old card?
+    from_blocks = [x.split()[1] for x in from_param_card_l if 'block ' in x.lower()]
+    # What decays are in the old card?
+    from_decays = [x.split()[1] for x in from_param_card_l if 'decay ' in x.lower()]
+    # Build a dictionary of blocks and decays
+    from_block_dict = {}
+    from_decay_dict = {}
+    line_number=0
+    sys.stdout.flush()
+
+    while line_number<len(from_param_card_l):
+        if from_param_card_l[line_number].lower().startswith('block '):
+            my_block = from_param_card_l[line_number].split()[1].lower()
+            holder=from_param_card_l[line_number]
+            line_number += 1
+            while line_number<len(from_param_card_l) and not from_param_card_l[line_number].lower().startswith('block ') and not from_param_card_l[line_number].lower().startswith('decay '):
+                holder += from_param_card_l[line_number]
+                line_number += 1
+            from_block_dict[my_block] = holder
+        elif from_param_card_l[line_number].lower().startswith('decay '):
+            my_decay = from_param_card_l[line_number].split()[1]
+            holder=from_param_card_l[line_number]
+            line_number += 1
+            while line_number<len(from_param_card_l) and not from_param_card_l[line_number].lower().startswith('block ') and not from_param_card_l[line_number].lower().startswith('decay '):
+                holder += from_param_card_l[line_number]
+                line_number += 1
+            from_decay_dict[my_decay] = holder
+        else:
+            line_number += 1
+    # Now write our new param card
+    to_param_card_f = open(to_param_card,'w')
+    line_number=0
+    sys.stdout.flush()
+
+    while line_number<len(proc_param_card_l):
+        if proc_param_card_l[line_number].lower().startswith('block ') and proc_param_card_l[line_number].split()[1].lower() in from_block_dict:
+            my_block = proc_param_card_l[line_number].split()[1].lower()
+            # Use the above block.  Watch for lines that we didn't have that should now be there!
+            to_param_card_f.write(from_block_dict[my_block])
+            # Get the list of parameters in this block that are being set
+            block_pars = [l.split()[0] for l in from_block_dict[my_block].split('\n') if len(l.split('#')[0].strip())>0]
+            # Advance to the next line of the default card
+            line_number += 1
+            # Advance to the next point in the file that we care about
+            while line_number<len(proc_param_card_l) and not proc_param_card_l[line_number].lower().startswith('block ') and not proc_param_card_l[line_number].lower().startswith('decay '):
+                if len(proc_param_card_l[line_number].lower().split('#')[0].strip())>0 and my_block not in ['alpha']:
+                    my_par = proc_param_card_l[line_number].lower().split('#')[0].strip().split()[0]
+                    if not my_par in block_pars:
+                        to_param_card_f.write(proc_param_card_l[line_number])
+                line_number += 1
+        elif proc_param_card_l[line_number].lower().startswith('decay ') and proc_param_card_l[line_number].split()[1] in from_decay_dict:
+            # Use the above decay - safe to take the whole thing
+            to_param_card_f.write(from_decay_dict[proc_param_card_l[line_number].split()[1]])
+            line_number += 1
+            # Advance to the next point in the file that we care about
+            while line_number<len(proc_param_card_l) and not proc_param_card_l[line_number].lower().startswith('block ') and not proc_param_card_l[line_number].lower().startswith('decay '):
+                line_number += 1
+        else:
+            to_param_card_f.write(proc_param_card_l[line_number])
+            line_number += 1
+    # Close out
+    to_param_card_f.close()
+    from_param_card_f.close()
+    proc_param_card_f.close()
+
+
+def print_cards(proc_card='proc_card_mg5.dat',run_card='run_card.dat',param_card='param_card.dat'):
+    if os.access(proc_card,os.R_OK):
+        mglog.info("proc_card:")
+        procCard = subprocess.Popen(['cat',proc_card])
         procCard.wait()
     else:
-        mglog.warning('No proc_card_mg5.dat found')
+        mglog.warning('No proc_card: '+proc_card+' found')
 
-    if os.access('run_card.dat',os.R_OK):
-        mglog.info("run_card.dat:")
-        runCard = subprocess.Popen(['cat','run_card.dat'])
+    if os.access(run_card,os.R_OK):
+        mglog.info("run_card:")
+        runCard = subprocess.Popen(['cat',run_card])
         runCard.wait()
     else:
-        mglog.warning('No run_card.dat found')
+        mglog.warning('No run_card: '+run_card+' found')
+
+    if os.access(param_card,os.R_OK):
+        mglog.info("param_card:")
+        runCard = subprocess.Popen(['cat',param_card])
+        runCard.wait()
+    else:
+        mglog.warning('No param_card: '+param_card+' found')
 
 
 def is_gen_from_gridpack(grid_pack=None):
@@ -1638,3 +2374,60 @@ def is_NLO_run(proc_dir='PROC_mssm_0'):
         RuntimeError('escaping')
 
     return isNLO
+
+
+
+def run_card_consistency_check(isNLO=False,path='.'):
+    cardpath=path+'/Cards/run_card.dat'
+    card = open(cardpath, 'r')
+    mydict={}
+    for line in iter(card):
+        if not line.strip().startswith('#'): # line commented out
+            command = line.split('!', 1)[0]
+            comment = line.split('!', 1)[1] if '!' in line else ''
+            if '=' in command:
+                setting = command.split('=')[-1].strip()
+                value = command.split('=')[0].strip()
+                mydict[setting]=value
+
+
+
+    for k,v in mydict.iteritems():
+        mglog.info( '"%s" = %s'%(k,v) )
+
+    if not isNLO: 
+        #Check CKKW-L setting
+        if float(mydict['ktdurham']) > 0 and int(mydict['ickkw']) != 0:
+            log='Bad combination of settings for CKKW-L merging! ktdurham=%s and ickkw=%s.'%(mydict['ktdurham'],mydict['ickkw'])
+            mglog.error(log)
+            raise RuntimeError(log)
+
+        version = getMadGraphVersion() # avoiding code duplication
+
+        # Only needed for 2.5.0 and later
+        if int(version.split('.')[0])>=2 and int(version.split('.')[1])>=5:
+            #event_norm must be "sum" for use_syst to work
+            if mydict['use_syst'].replace('.','').lower() in ['t','true']:
+                if 'event_norm' not in mydict or mydict['event_norm']!="sum":
+                    modify_run_card(cardpath,cardpath.replace('.dat','_backup.dat'),{'event_norm':'sum'})
+
+    # close files
+    card.close()
+
+    mglog.info('Finished checking run card - All OK!')
+    return
+
+
+def SUSY_model():
+    if not 'MADPATH' in os.environ:
+        mglog.warning('No MADPATH in your environment - not sure what the right SUSY model is')
+        return 'import model mssm\n'
+    if os.access(os.environ['MADPATH']+'/models/mssm',os.R_OK):
+        mglog.info('Using mssm model')
+        return 'import model mssm\n'
+    if os.access(os.environ['MADPATH']+'/models/MSSM_SLHA2',os.R_OK):
+        mglog.info('Using MSSM_SLHA2 model')
+        return 'import model MSSM_SLHA2\n'
+    mglog.warning('No idea what to do - models not found!')
+    return 'import model mssm\n'
+
diff --git a/Generators/MadGraphControl/share/fastjet-config b/Generators/MadGraphControl/share/fastjet-config
new file mode 100755
index 00000000000..36abfefc454
--- /dev/null
+++ b/Generators/MadGraphControl/share/fastjet-config
@@ -0,0 +1,357 @@
+#!/bin/bash
+#
+# fastjet-config.  Generated from fastjet-config.in by configure.
+#
+# This is the base script for retrieving all information
+# regarding compiling and linking programs using FastJet.
+# Run ./fastjet-config without arguments for usage details
+#########################################################
+
+# the list of plugins is dynamic so we need the following
+# line to deal with the static lib link
+
+#installationdir=/afs/.cern.ch/sw/lcg/experimental/fastjet/3.0.3/x86_64-slc6-gcc48-opt
+#prefix=/afs/.cern.ch/sw/lcg/experimental/fastjet/3.0.3/x86_64-slc6-gcc48-opt
+installationdir=$FASTJETPATH
+prefix=$FASTJETPATH
+exec_prefix=${prefix}
+
+# print a usage message and exit
+# exit code passed as argument:
+#   0 if it is a normal call
+#   1 if it is due to a misusage.
+usage()
+{
+if [ "xyes" == "xyes" ] ; then
+  cat 1>&2 <<EOF
+
+This is FastJet 3.0.3 configuration tool.
+Usage:
+  fastjet-config [--help] [--version] [--prefix] [--cxxflags] [--libs]
+      [--shared[=yes|no]] [--plugins[=yes|no]] [--rpath[=yes|no]] [--runpath]
+      [--list-plugins] [--config]
+
+The arguments can be either queries (one must be present):
+
+  --help       prints this message and exits
+  --version    prints FastJet version and exits
+  --prefix     gets the FastJet installation directory
+  --cxxflags   returns the compilation flags to be used with C++ programs
+  --libs       returns the flags to pass to the linker
+
+or flags (optional):
+
+  --shared     controls whether you want to use the static or shared lib
+               (default=yes)
+
+  --plugins    controls whether you also want to link the FastJet plugins
+               (default=no)
+
+  --rpath      adds a -rpath argument at link-time that points to the
+               directory where FastJet libraries are installed. This
+               avoid having to set LD_LIBRARY_PATH at runtime when
+               using shared libs in a non standard location (but may
+               cause the program to inadvertently pick up other shared
+               libraries that happen to be in the FastJet installation
+               directory). 
+               (default=yes)
+
+  --runpath    at link-time, adds info about the directory where FastJet
+               libraries are installed to the runpath (ELF systens
+               only). This avoids having to set LD_LIBRARY_PATH at
+               runtime when using shared libs in a non standard
+               location but gives priority to an existing LD_LIBRARY_PATH.
+
+  --list-plugins  list all the available plugins
+
+  --config     shows a summary of how FastJet was configured
+
+EOF
+else
+  cat 1>&2 <<EOF
+
+This is FastJet 3.0.3 configuration tool.
+Usage:
+  fastjet-config [--help] [--version] [--prefix] [--cxxflags] [--libs]
+      [--plugins[=yes|no]] [--list-plugins] [--config]
+
+The arguments can be either queries (one must be present):
+
+  --help       prints this message and exits
+  --version    prints FastJet version and exits
+  --prefix     get the FastJet installation directory
+  --cxxflags   returns the compilation flags to be used with C++ programs
+  --libs       returns the flags to pass to the linker
+
+or flags (optional):
+
+  --plugins    controls whether you also want to link the FastJet plugins
+               (default=no)
+
+  --list-plugins  list all the available plugins
+
+  --config     shows a summary of how FastJet was configured
+
+EOF
+fi
+  exit $1
+}
+
+
+# first deal with the case where no argument is passed
+[ $# -gt 0 ] || usage 1
+
+
+# tools to parse options
+########################
+
+# option_name _string
+# Returns NAME if _string is of the form: --NAME[=...]
+option_name()
+{
+    echo "$1" | sed 's/^--//;s/=.*//' | tr '-' '_'
+}
+
+# option_value _string
+# Returns FOO if _string is of the form: --option=FOO
+option_value()
+{
+    echo "$1" | sed 's/^[^=]*=//'
+}
+
+# is_in_list _arg _arg_list
+# return true if the argument _arg is in the list _arg_list
+# and false otherwise
+is_in_list()
+{
+    arg_match="$1"
+    shift
+    for arg_match_i do
+        [ "x$arg_match_i" != "x$arg_match" ] || return 0
+    done
+    false
+}
+
+
+# useful utilities
+##################
+
+# wite error messages and exit
+write_error()
+{
+    echo "Error: $1"
+    echo "Use fastjet-config --help for more information"
+    exit 1
+}
+
+
+# browse the argument list
+# This is done the following way:
+#  - at first pass, we check if the --help argument is set. If yes, 
+#    print usage and exit.
+#    we also and make sure that there is no interference between the
+#    arguments (e.g. --cflags --libs is wrong)
+#  - we then check for extra arguments and return the requested info
+#####################################################################
+# useful lists of arguments
+arg_query_list="version prefix list_plugins config help" # cxxflags libs
+arg_yesno_list="shared plugins rpath"
+
+# default behaviour for parameters
+plugins_included="no"
+use_shared="yes"
+add_rpath="yes"
+add_runpath="no"
+
+# no query found initially
+found_query="no"
+found_flags="no"
+found_libs="no"
+
+# browse arguments
+for arg do
+    case "$arg" in
+	--help|-h)
+	    usage 0
+	    ;;
+	--*=*)
+	    arg_name=`option_name $arg`
+	    arg_value=`option_value $arg`
+	    # check the validity of the parmeter value
+	    if ! is_in_list $arg_value yes no ; then
+		write_error "$arg: parameter value must be yes or no"
+	    fi
+            # set the parameter value
+	    case $arg_name in
+		plugins)
+		    plugins_included="$arg_value"
+		    ;;
+		shared)
+		    use_shared="$arg_value"
+		    if [ "x$arg_value" == "xno" ] ; then
+			add_rpath="no"
+		    fi
+		    ;;
+		rpath)
+		    if test "xyes" = "xyes" ; then
+			add_rpath="$arg_value"
+		    else
+			write_error "--rpath is only available together with shared libraries"
+		    fi
+		    ;;
+		*)
+		    write_error "$arg: unrecognised argument"
+		    ;;
+	    esac
+	    ;;
+	--cxxflags)
+	    # we've found a query, make sure we don't already have one
+	    # except if it is --libs
+	    if [[ "x$found_query" != "xno" && "x$found_query" != "xlibs" ]]; then
+		write_error "--cxxflags cannot be used with --$found_query"
+	    fi
+
+	    # update found_query 
+	    # note: for the "big case" later, don't overwrite it if libs are already asked for
+	    found_flags="yes"
+	    if [ "x$found_query" != "xlibs" ]; then
+		found_query="cxxflags"
+	    fi	    
+	    ;;
+	--libs)
+	    # we've found a query, make sure we don't already have one
+	    # except if it is --cxxflags
+	    if [[ "x$found_query" != "xno" && "x$found_query" != "xcxxflags" ]]; then
+		write_error "--libs cannot be used with --$found_query"
+	    fi
+
+	    # update found_query 
+	    found_libs="yes"
+	    found_query="libs"
+	    ;;
+	--*)
+	    arg_name=`option_name $arg`
+	    if is_in_list $arg_name $arg_query_list ; then
+		# we've found a query, make sure we don't already have one
+		if [ "x$found_flags" != "xno" ] ; then
+		    write_error "--$arg_name cannot be used with --cxxflags"
+		fi
+		if [ "x$found_libs" != "xno" ] ; then
+		    write_error "--$arg_name cannot be used with --libs"
+		fi
+		if [ "x$found_query" != "xno" ] ; then
+		    write_error "You can only make one query at a time"
+		fi
+		found_query="$arg_name"
+	    else
+		if is_in_list $arg_name $arg_yesno_list ; then
+		    # we've found a parameter, set it to "yes"
+		    case $arg_name in
+			plugins)
+			    plugins_included="yes"
+			    ;;
+			shared)
+			    use_shared="yes"
+			    ;;
+			rpath)
+			    if test "xyes" = "xyes" ; then
+				add_rpath="yes"
+			    else
+				write_error "--rpath is only available together with shared libraries"
+			    fi
+			    ;;
+			*)
+			    write_error "$arg: unrecognised argument"
+			    ;;
+		    esac
+		else
+		    case $arg_name in
+			runpath)
+			    if test "xyes" = "xyes" ; then
+				if test "xyes" = "xyes" ; then
+				    add_runpath="yes"
+				    add_rpath="no"
+				else
+				    write_error "--runpath is not available on this platform"
+				fi
+			    else
+				write_error "--runpath is only available together with shared libraries"
+			    fi
+			    ;;
+			*)
+			    write_error "$arg: unrecognised argument"
+			    ;;
+		    esac
+		fi
+	    fi
+	    ;;
+	*)
+	    write_error "$arg is not a valid argument"
+	    ;;
+    esac
+done
+
+
+# now deal with the output
+case $found_query in
+    no)
+	write_error "you must at least specify one query in '$arg_query_list'"
+	;;
+    version)
+	echo 3.0.3
+	;;
+    prefix)
+	#echo /afs/.cern.ch/sw/lcg/experimental/fastjet/3.0.3/x86_64-slc6-gcc48-opt
+	echo $FASTJETPATH
+	;;
+    cxxflags)
+	echo -I${prefix}/include 
+	;;
+    libs)
+	libs_string="  -lm "
+#       since we use the system default (use shared lib if available, static 
+#       otherwise), we only need to worry if shared available and static
+#       explicitely asked
+	if test "x$use_shared" = "xno" && test "xyes" = "xyes" ; then
+	    libs_string=$libs_string" ${exec_prefix}/lib/libfastjettools.a ${exec_prefix}/lib/libfastjet.a"
+	else
+	    libs_string=$libs_string" -L${exec_prefix}/lib -lfastjettools -lfastjet"
+	fi
+	if test "x$add_rpath" = "xyes" ; then
+	    libs_string="-Wl,-rpath,${exec_prefix}/lib "$libs_string
+        else
+            # GPS 2009-05-29: remove any left over -Wl, e.g. related to CGAL.
+            libs_string=`echo $libs_string | sed 's/-Wl,-rpath[^ ]*//'`
+	fi
+	if test "x$add_runpath" = "xyes" ; then
+	    libs_string="-Wl,--enable-new-dtags -Wl,-rpath,${exec_prefix}/lib "$libs_string
+	fi
+	if test "x$plugins_included" = "xyes" ; then
+	    if test "x$use_shared" = "xno" && test "xyes" = "xyes" ; then
+		libs_string=$libs_string"  ${installationdir}/lib/libfastjetplugins.a  ${installationdir}/lib/libsiscone.a ${installationdir}/lib/libsiscone_spherical.a   -L/afs/cern.ch/sw/lcg/contrib/gcc/4.8.0/x86_64-slc6-gcc48-opt/bin/../lib/gcc/x86_64-unknown-linux-gnu/4.8.0 -L/afs/cern.ch/sw/lcg/contrib/gcc/4.8.0/x86_64-slc6-gcc48-opt/bin/../lib/gcc -L/afs/cern.ch/sw/lcg/contrib/gcc/4.8.0/x86_64-slc6-gcc48-opt/bin/../lib/gcc/x86_64-unknown-linux-gnu/4.8.0/../../../../lib64 -L/lib/../lib64 -L/usr/lib/../lib64 -L/afs/cern.ch/sw/lcg/contrib/gcc/4.8.0/x86_64-slc6-gcc48-opt/bin/../lib/gcc/x86_64-unknown-linux-gnu/4.8.0/../../.. -lgfortran -lm -lquadmath"
+	    else
+		libs_string=$libs_string"  -lfastjetplugins  -lsiscone_spherical -lsiscone   -L/afs/cern.ch/sw/lcg/contrib/gcc/4.8.0/x86_64-slc6-gcc48-opt/bin/../lib/gcc/x86_64-unknown-linux-gnu/4.8.0 -L/afs/cern.ch/sw/lcg/contrib/gcc/4.8.0/x86_64-slc6-gcc48-opt/bin/../lib/gcc -L/afs/cern.ch/sw/lcg/contrib/gcc/4.8.0/x86_64-slc6-gcc48-opt/bin/../lib/gcc/x86_64-unknown-linux-gnu/4.8.0/../../../../lib64 -L/lib/../lib64 -L/usr/lib/../lib64 -L/afs/cern.ch/sw/lcg/contrib/gcc/4.8.0/x86_64-slc6-gcc48-opt/bin/../lib/gcc/x86_64-unknown-linux-gnu/4.8.0/../../.. -lgfortran -lm -lquadmath"
+	    fi
+
+	fi
+	if [ "x$found_flags" = "xyes" ] ; then
+	    echo -I${prefix}/include  $libs_string
+	else
+	    echo $libs_string
+	fi
+	;;
+    list_plugins)
+	echo "Available plugins: "
+	echo -n "  "
+	echo  SISCone CDFCones PxCone D0RunIICone NestedDefs TrackJet ATLASCone CMSIterativeCone EECambridge Jade D0RunICone GridJet | sed -e "s/ /\\`printf '\n\r  '`/g"
+	;;
+    config)
+	echo "This is FastJet version 3.0.3"
+	echo ""
+	echo "Configuration invocation was"
+	echo ""
+	echo "  /build/hegner/afs_build/lcgcmake-build/externals/fastjet/src/fastjet/3.0.3/configure  '--prefix=/afs/.cern.ch/sw/lcg/experimental/fastjet/3.0.3/x86_64-slc6-gcc48-opt' '--enable-shared' '--enable-allplugins'"
+	echo ""
+	printf "Configuration summary:\n----------------------\n  Installation directory     /afs/.cern.ch/sw/lcg/experimental/fastjet/3.0.3/x86_64-slc6-gcc48-opt\n  Shared libraries           yes\n  Static libraries           yes\n  Debug flag                 yes\n  CGAL support               no\n  Plugins: EECambridge       yes\n           Jade              yes\n           NestedDefs        yes\n           SISCone           yes\n           CDFCones          yes\n           D0RunICone        yes\n           D0RunIICone       yes\n           ATLASCone         yes\n           CMSIterativeCone  yes\n           PxCone            yes\n           TrackJet          yes\n           GridJet           yes\n  Monolithic plugins lib     yes\n"
+	;;
+esac
diff --git a/Generators/MadGraphControl/share/lhapdf-config b/Generators/MadGraphControl/share/lhapdf-config
index 47e5052f886..e0cf06375f1 100755
--- a/Generators/MadGraphControl/share/lhapdf-config
+++ b/Generators/MadGraphControl/share/lhapdf-config
@@ -1,91 +1,60 @@
 #! /usr/bin/env bash
+## -*- sh -*-
+## lhapdf-config.  Generated from lhapdf-config.in by configure.
+
+## These variables need to exist
+## Note no use of $DESTDIR... we ignore it so that destdir can be used
+## for temp installs later copied to /
+#prefix=/afs/.cern.ch/sw/lcg/external/MCGenerators_lcgcmt67c/lhapdf/6.1.5/x86_64-slc6-gcc47-opt
+prefix=$(echo $LHAPATH | awk '{split($1,a,"/share");print a[1];}')
+exec_prefix=${prefix}
+datarootdir=${prefix}/share
 
-# These variables need to exist
-
-#prefix=/afs/cern.ch/sw/lcg/external/MCGenerators/lhapdf/5.8.9/i686-slc5-gcc43-opt
-lhabase=$(echo $LHAPATH | awk '{split($1,a,"/share");print a[1];}')
-prefix=$lhabase
-
-athversion=$(echo $AtlasVersion | awk '{split($1,a,"."); print a[1]}')
-if [[ "$athversion" != "17" && "$athversion" != "19" ]]; then
-    #Hack for nightlies
-    athversion=$(echo $(basename $(echo $AtlasBaseDir)) | awk '{split($1,a,"."); print a[1]}')
-fi
-
-
-if [[ "$athversion" == "17" ]]; then
-    prefix=$prefix/$CMTCONFIG
-
-    version=$($prefix/bin/lhapdf-config --version)
-    
-    datadir=$lhabase/share/lhapdf
-    if [ ! -d $datadir ]; then
-	datadir=$lhabase/share/LHAPDF
-    fi
-
-else
-    
-    version=$(echo "$prefix" | awk '{split($1,a,"lhapdf/"); split(a[2],b,"/");print b[1];}')
-    datadir=$(echo $LHAPATH | awk '{split($1,a,":");print a[2];}')
-    #datadir=$(echo $LHAPDF_DATA_PATH)
-fi
-
-versionIs6=FALSE
-test=$(echo $version | grep 6. | wc -w)
-if [ $test -eq 1 ]; then
-    versionIs6=TRUE
-fi
+version=$(echo "$prefix" | awk '{split($1,a,"lhapdf/"); split(a[2],b,"/");print b[1];}')
 
+boost=$(find $(ls -d /cvmfs/atlas.cern.ch/repo/sw/software/x86_64-slc6-gcc47-opt/19.2.4/sw/lcg/external/Boost/$BoostVers*/) -name "boost-*")
 
-exec_prefix=${prefix}
-datarootdir=${prefix}/share
+datapath=$(echo $LHAPDF_DATA_PATH | awk '{n=split($1,a,":");if(n>1){print a[2];}else{print a[1];}}')
 
 if [[ $# -eq 0 || -n $( echo $* | egrep -- "--help|-h" ) ]]; then
     echo "lhapdf-config: configuration tool for the LHAPDF"
     echo "               parton density function evolution library"
     echo "               http://projects.hepforge.org/lhapdf/"
     echo
-    echo "Usage: lhapdf-config [[--help|-h] | [--prefix] | [--pdfsets-path]]"
+    echo "Usage: lhapdf-config [options]"
     echo "Options:"
-    echo "  --help | -h    : show this help message"
-    echo "  --prefix       : show the installation prefix (cf. autoconf)"
-    echo "  --incdir       : show the path to the LHAPDF header directory (for C++ interface)"
-    echo "  --libdir       : show the path to the LHAPDF library directory"
-    echo "  --datadir      : show the path to the LHAPDF installed data directory"
-    echo "  --pdfsets-path : show the path to the directory containing the PDF set data files"
+    echo "  --help | -h   : show this help message"
+    echo "  --prefix      : show the installation prefix (cf. autoconf)"
+    echo "  --incdir      : show the path to the LHAPDF C++ header directory"
+    echo "  --libdir      : show the path to the LHAPDF library directory"
+    echo "  --datadir     : show the path to the LHAPDF data directory"
     echo
-    echo "  --cppflags     : get compiler flags for use with the C preprocessor stage of C++ compilation"
-    echo "  --ldflags      : get compiler flags for use with the linker stage of any compilation"
+    echo "  --cflags      : get compiler flags (aka --cppflags|--cxxflags)"
+    echo "  --libs        : get linker flags (aka --ldflags)"
     echo
-    echo "  --version      : returns Rivet release version number"
+    echo "  --version     : return LHAPDF release version number"
 fi
 
 OUT=""
 
-
-
 tmp=$( echo "$*" | egrep -- '--\<prefix\>')
-test -n "$tmp" && OUT="$OUT ${prefix}"
+test -n "$tmp" && OUT="$OUT $prefix"
 
 tmp=$( echo "$*" | egrep -- '--\<incdir\>')
 test -n "$tmp" && OUT="$OUT ${prefix}/include"
 
-tmp=$( echo "$*" | egrep -- '--\<cppflags\>')
-test -n "$tmp" && OUT="$OUT -I${prefix}/include"
+tmp=$( echo "$*" | egrep -- '--\<cflags|cppflags|cxxflags\>')
+test -n "$tmp" && OUT="$OUT -I${prefix}/include -I$boost"
 
 tmp=$( echo "$*" | egrep -- '--\<libdir\>')
 test -n "$tmp" && OUT="$OUT ${exec_prefix}/lib"
 
-tmp=$( echo "$*" | egrep -- '--\<ldflags\>')
+tmp=$( echo "$*" | egrep -- '--\<libs|ldflags\>')
 test -n "$tmp" && OUT="$OUT -L${exec_prefix}/lib -lLHAPDF"
 
 tmp=$( echo "$*" | egrep -- '--\<datadir\>|--\<datarootdir\>')
-test -n "$tmp" && OUT="$OUT $datadir"
-
-tmp=$( echo "$*" | egrep -- '--\<pdfsets-path\>')
-test -n "$tmp" && OUT="$OUT $LHAPATH"
+test -n "$tmp" && OUT="$OUT $datapath"
 
-## Version
 tmp=$( echo "$*" | egrep -- '--\<version\>')
 test -n "$tmp" && OUT="$OUT $version"
 
diff --git a/Generators/MadGraphControl/share/makeSLHAFilesForSM.py b/Generators/MadGraphControl/share/makeSLHAFilesForSM.py
index adcbaa6da76..c334b17a8d8 100644
--- a/Generators/MadGraphControl/share/makeSLHAFilesForSM.py
+++ b/Generators/MadGraphControl/share/makeSLHAFilesForSM.py
@@ -22,14 +22,14 @@ evgenLog = Logging.logging.getLogger('SLHAGetter')
 # only really need this once
 from AthenaCommon.Include import IncludeError, include
 include.setShowIncludes(False)
-include('/cvmfs/atlas.cern.ch/repo/sw/Generators/MC12JobOptions/latest/susycontrol/MadGraphControl_SimplifiedModelPreInclude.py')
-include.block('MC12JobOptions/MadGraphControl_SimplifiedModelPreInclude.py')
-include.block('MC12JobOptions/MadGraphControl_SimplifiedModelPostInclude.py')
+include('/cvmfs/atlas.cern.ch/repo/sw/Generators/MC15JobOptions/latest/susycontrol/MadGraphControl_SimplifiedModelPreInclude.py')
+include.block('MC15JobOptions/MadGraphControl_SimplifiedModelPreInclude.py')
+include.block('MC15JobOptions/MadGraphControl_SimplifiedModelPostInclude.py')
 
 for run in listOfRuns:
-    loc_l = glob.glob('/cvmfs/atlas.cern.ch/repo/sw/Generators/MC12JobOptions/latest/share/DSID'+run[0:3]+'xxx/MC12.'+run+'.*')
+    loc_l = glob.glob('/cvmfs/atlas.cern.ch/repo/sw/Generators/MC15JobOptions/latest/share/DSID'+run[0:3]+'xxx/MC15.'+run+'.*')
     if 0==len(loc_l):
-        print 'Run not found:',run,'in','/cvmfs/atlas.cern.ch/repo/sw/Generators/MC12JobOptions/latest/share/DSID'+run[0:3]+'xxx/MC12.'+run+'.*'
+        print 'Run not found:',run,'in','/cvmfs/atlas.cern.ch/repo/sw/Generators/MC15JobOptions/latest/share/DSID'+run[0:3]+'xxx/MC15.'+run+'.*'
         continue
     if len(loc_l)>1:
         print 'Multiple runs found:',loc_l,'for run',run,'- Using first.'
@@ -49,9 +49,9 @@ for run in listOfRuns:
     try:
         runArgs.runNumber=int(run)
         runNumber=int(run) # Protection against old code
-        include('/cvmfs/atlas.cern.ch/repo/sw/Generators/MC12JobOptions/latest/susycontrol/'+(main_jobO.split('/')[-1]))
+        include('/cvmfs/atlas.cern.ch/repo/sw/Generators/MC15JobOptions/latest/susycontrol/'+(main_jobO.split('/')[-1]))
         SLHAonly=True
-        #include('/cvmfs/atlas.cern.ch/repo/sw/Generators/MC12JobOptions/latest/susycontrol/MadGraphControl_SimplifiedModelPostInclude.py')
+        #include('/cvmfs/atlas.cern.ch/repo/sw/Generators/MC15JobOptions/latest/susycontrol/MadGraphControl_SimplifiedModelPostInclude.py')
         include('./MadGraphControl_SimplifiedModelPostInclude.py')
     except: pass
 
diff --git a/Generators/Pythia8_i/CMakeLists.txt b/Generators/Pythia8_i/CMakeLists.txt
index 7ef80a3625b..4b8090be9ea 100644
--- a/Generators/Pythia8_i/CMakeLists.txt
+++ b/Generators/Pythia8_i/CMakeLists.txt
@@ -45,7 +45,11 @@ atlas_add_library( Pythia8_iLib
                    src/UserHooks/PTRelVetoedShower.cxx
                    src/UserHooks/WprimeFlat.cxx
                    src/UserHooks/WprimeWZFlat.cxx
+                   src/UserHooks/mHatReweight.cxx
                    src/UserHooks/main31.cxx
+                   src/UserHooks/SingleTopWideEta.cxx
+                   src/UserHooks/SettableColourReconnection.cxx
+                   src/UserHooks/PowhegBB4L.cxx
                    src/UserResonances/ResonanceExcitedCI.cxx
                    src/UserResonances/ResonanceLQ.cxx
                    PUBLIC_HEADERS Pythia8_i
diff --git a/Generators/Pythia8_i/Pythia8_i/Pythia8_i.h b/Generators/Pythia8_i/Pythia8_i/Pythia8_i.h
index bfef6dbf847..820e763424f 100644
--- a/Generators/Pythia8_i/Pythia8_i/Pythia8_i.h
+++ b/Generators/Pythia8_i/Pythia8_i/Pythia8_i.h
@@ -17,13 +17,15 @@
 
 #include <stdexcept>
 
+using std::string;
 
+/*
+  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+*/
 /**
  *  Author: James Monk (jmonk@cern.ch)
 */
 
-using std::string;
-
 class IAtRndmGenSvc;
 
 namespace Pythia8{
@@ -58,14 +60,14 @@ private:
 class Pythia8_i: public GenModule{
 
 public:
-  Pythia8_i(const string &name, ISvcLocator *pSvcLocator);
+  Pythia8_i(const std::string &name, ISvcLocator *pSvcLocator);
   
   ~Pythia8_i();
 
   class CommandException : public std::runtime_error{
   public:
     
-  CommandException(const string &cmd): std::runtime_error("Cannot interpret command: " + cmd){
+  CommandException(const std::string &cmd): std::runtime_error("Cannot interpret command: " + cmd){
     }
   };
     
@@ -97,10 +99,8 @@ private:
   double m_version;
   
   std::vector<std::string> m_commands;
-  std::vector<std::string> m_userParams;
-  std::vector<std::string> m_userModes;
   
-  enum PDGID {PROTON=2212, ANTIPROTON=-2212, ELECTRON=11, POSITRON=-11, INVALID=0};
+  enum PDGID {PROTON=2212, ANTIPROTON=-2212, NEUTRON=2112, ANTINEUTRON=-2112, MUON=13, ANTIMUON=-13, ELECTRON=11, POSITRON=-11, INVALID=0};
   
   double m_collisionEnergy;
   bool m_useRndmGenSvc;
@@ -111,7 +111,8 @@ private:
   std::string m_beam2;
 
   std::string m_lheFile;
-  
+
+  bool m_storeLHE;
   bool m_doCKKWLAcceptance;
   double m_nAccepted;
   double m_nMerged;
@@ -130,7 +131,7 @@ private:
   
   std::string m_userHook;
   
-  Pythia8::UserHooks *m_userHookPtr;
+  std::vector<Pythia8::UserHooks*> m_userHooksPtrs;
   
   std::string m_userResonances;
   
@@ -141,8 +142,10 @@ private:
   std::string m_particleDataFile;
   std::string m_outputParticleDataFile;
   
-  std::vector<string> m_weightIDs;
+  std::vector<std::string> m_weightIDs;
   bool m_doLHE3Weights;
+  std::vector<std::string> m_weightCommands;
+  std::vector<std::string> m_showerWeightNames;
   
   static int s_allowedTunes(double version);
 
diff --git a/Generators/Pythia8_i/Pythia8_i/UserHooksFactory.h b/Generators/Pythia8_i/Pythia8_i/UserHooksFactory.h
index 18e1007d117..40cb8a326dd 100644
--- a/Generators/Pythia8_i/Pythia8_i/UserHooksFactory.h
+++ b/Generators/Pythia8_i/Pythia8_i/UserHooksFactory.h
@@ -8,6 +8,7 @@
 #include "Pythia8/UserHooks.h"
 
 #include <string>
+#include <map>
 
 namespace Pythia8_UserHooks{
  
@@ -58,11 +59,20 @@ namespace Pythia8_UserHooks{
       string m_name;
     };
     
+    template<typename T>
+    static std::map<std::string, T> &userSettings();
+    
+    static std::map<std::string, double> &userParams();
+    static std::map<std::string, int> &userModes();
+    static std::map<std::string, std::string> &userWords();
+    
   private:
     
     static std::map<string, const ICreator*> &s_creators();
     
   };
+  
+  
 }
 
 
diff --git a/Generators/Pythia8_i/src/Pythia8_i.cxx b/Generators/Pythia8_i/src/Pythia8_i.cxx
index 5286dba14db..ac4236b5d9b 100644
--- a/Generators/Pythia8_i/src/Pythia8_i.cxx
+++ b/Generators/Pythia8_i/src/Pythia8_i.cxx
@@ -19,11 +19,15 @@
 #include "CLHEP/Random/RandFlat.h"
 #include "AthenaKernel/IAtRndmGenSvc.h"
 
+#include <sstream>
+
 // Name of AtRndmGenSvc stream
 std::string     Pythia8_i::pythia_stream   = "PYTHIA8_INIT";
 
 using boost::assign::operator+=;
-
+/*
+  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+*/
 /**
  * author: James Monk (jmonk@cern.ch)
  */
@@ -38,6 +42,7 @@ m_nMerged(0.),
 m_failureCount(0),
 m_procPtr(0),
 m_userHookPtr(0),
+  //m_userHooksPtrs(std::vector<Pythia8::UserHooks*>()),
 m_doLHE3Weights(false)
 {
   declareProperty("Commands", m_commands);
@@ -48,6 +53,7 @@ m_doLHE3Weights(false)
   declareProperty("Beam1", m_beam1 = "PROTON");
   declareProperty("Beam2", m_beam2 = "PROTON");
   declareProperty("LHEFile", m_lheFile = "");
+  declareProperty("StoreLHE", m_storeLHE=false);
   declareProperty("CKKWLAcceptance", m_doCKKWLAcceptance = false);
   declareProperty("MaxFailures", m_maxFailures = 10);//the max number of consecutive failures
   declareProperty("UserProcess", m_userProcess="");
@@ -56,12 +62,19 @@ m_doLHE3Weights(false)
   declareProperty("UseLHAPDF", m_useLHAPDF=true);
   declareProperty("ParticleData", m_particleDataFile="");
   declareProperty("OutputParticleData",m_outputParticleDataFile="ParticleData.local.xml");
+  declareProperty("ShowerWeightNames",m_showerWeightNames);
   
-  m_particleIDs["PROTON"]     = PROTON;
-  m_particleIDs["ANTIPROTON"] = ANTIPROTON;
-  m_particleIDs["ELECTRON"]   = ELECTRON;
-  m_particleIDs["POSITRON"]   = POSITRON;
+  m_particleIDs["PROTON"]      = PROTON;
+  m_particleIDs["ANTIPROTON"]  = ANTIPROTON;
+  m_particleIDs["ELECTRON"]    = ELECTRON;
+  m_particleIDs["POSITRON"]    = POSITRON;
+  m_particleIDs["NEUTRON"]     = NEUTRON;
+  m_particleIDs["ANTINEUTRON"] = ANTINEUTRON;
+  m_particleIDs["MUON"]        = MUON;
+  m_particleIDs["ANTIMUON"]    = ANTIMUON;
 
+  ATH_MSG_INFO("XML Path is " + xmlpath());
+  
 }
 
 ////////////////////////////////////////////////////////////////////////////////
@@ -70,8 +83,10 @@ Pythia8_i::~Pythia8_i() {
   delete m_atlasRndmEngine;
   
   if(m_procPtr != 0)     delete m_procPtr;
-  if(m_userHookPtr != 0) delete m_userHookPtr;
   
+  for(Pythia8::UserHooks *ptr: m_userHooksPtrs){
+    delete ptr;
+  }
 }
 
 ////////////////////////////////////////////////////////////////////////////////
@@ -93,6 +108,9 @@ StatusCode Pythia8_i::genInitialize() {
   
   Pythia8_i::pythia_stream =       "PYTHIA8_INIT";
   
+  // By default add "nominal" to the list of shower weight names
+  m_showerWeightNames.insert(m_showerWeightNames.begin(), "nominal");
+  
   // We do explicitly set tune 4C, since it is the starting point for many other tunes
   // Tune 4C for pp collisions
   m_pythia->readString("Tune:pp = 5");
@@ -224,9 +242,9 @@ StatusCode Pythia8_i::genInitialize() {
   if(m_userProcess != ""){
     m_procPtr = Pythia8_UserProcess::UserProcessFactory::create(m_userProcess);
   }
-
-  bool canInit = true;
   
+  bool canInit = true;
+
   m_userHookPtr = 0;
 
   if(m_userHook != ""){
@@ -418,8 +436,6 @@ StatusCode Pythia8_i::fillEvt(HepMC::GenEvent *evt){
   double eventWeight = phaseSpaceWeight*mergingWeight;
   
   ATH_MSG_DEBUG("Event weights: phase space weight, merging weight, total weight = "<<phaseSpaceWeight<<", "<<mergingWeight<<", "<<eventWeight);
-  
-  // set the event weight
   evt->weights().clear();
   
   std::vector<string>::const_iterator id = m_weightIDs.begin();
@@ -447,8 +463,20 @@ StatusCode Pythia8_i::fillEvt(HepMC::GenEvent *evt){
       }
       
     }
-  }else{
-    evt->weights().push_back(eventWeight);
+  }
+
+  size_t firstWeight = (m_doLHE3Weights)? 1: 0;
+  
+  for(int iw = firstWeight; iw != m_pythia.info.nWeights(); ++iw){
+    
+    std::string wtName = ((int)m_showerWeightNames.size() == m_pythia.info.nWeights())? m_showerWeightNames[iw]: "ShowerWt_" + std::to_string(iw);
+    
+    if(m_pythia.info.nWeights() != 1){
+      if(m_internal_event_number == 1) m_weightIDs.push_back(wtName);
+      evt->weights()[wtName] = mergingWeight*m_pythia.info.weight(iw);
+    }else{
+      evt->weights().push_back(eventWeight);
+    }
   }
 
   // Units correction
@@ -493,9 +521,9 @@ StatusCode Pythia8_i::genFinalize(){
   std::cout << "MetaData: cross-section (nb)= " << xs <<std::endl;
   std::cout << "MetaData: generator= Pythia 8." << PY8VERSION <<std::endl;
 
-  if(m_doLHE3Weights){
-    
+  if(m_doLHE3Weights || m_weightIDs.size()>1 ){
     std::cout<<"MetaData: weights = ";
+
     foreach(const string &id, m_weightIDs){
       
       std::map<string, Pythia8::LHAweight>::const_iterator weight = m_pythia->info.init_weights->find(id);
@@ -506,7 +534,7 @@ StatusCode Pythia8_i::genFinalize(){
         std::cout<<"Unknown | ";
       }
     }
-    std::cout<<std::endl;
+  std::cout<<std::endl;
   }
   
   return StatusCode::SUCCESS;
@@ -560,7 +588,7 @@ string Pythia8_i::xmlpath(){
      // using PathResolver:
      foundpath = PathResolverFindCalibDirectory( "Pythia8/xmldoc" );
   }
-
+  
   return foundpath;
 }
 
diff --git a/Generators/Pythia8_i/src/UserHooks/PowhegBB4L.cxx b/Generators/Pythia8_i/src/UserHooks/PowhegBB4L.cxx
new file mode 100644
index 00000000000..0691844b636
--- /dev/null
+++ b/Generators/Pythia8_i/src/UserHooks/PowhegBB4L.cxx
@@ -0,0 +1,311 @@
+// PowhegHooksBB4L.h 
+// Copyright (C) 2017 Tomas Jezo, Markus Seidel, Ben Nachman 
+
+// Author: Tomas Jezo, Markus Seidel and Ben Nachman based on 
+// PowhegHooks.h by Richard Corke
+
+#include "Pythia8_i/UserHooksFactory.h"
+#include "Pythia8Plugins/PowhegHooks.h"
+#include "UserSetting.h"
+
+namespace Pythia8 {
+
+// Create the user hook
+  class PowhegBB4L;
+  Pythia8_UserHooks::UserHooksFactory::Creator<Pythia8::PowhegBB4L> powhegBB4LCreator("PowhegBB4L");
+
+//==========================================================================
+
+// Use userhooks to veto PYTHIA emissions above the POWHEG scale.
+
+class PowhegBB4L : public UserHooks {
+//class PowhegHooksBB4L : public PowhegHooks {
+
+public:
+
+	// Constructor and destructor.
+	PowhegBB4L(): m_topresscale(-1.), m_atopresscale(-1.),
+                      m_onlyDistance1("Powheg:bb4l:onlyDistance1", 0),
+                      m_useScaleResonanceInstead("Powheg:bb4l:useScaleResonanceInstead", 0){}
+	~PowhegBB4L() {}
+//--------------------------------------------------------------------------
+
+//--------------------------------------------------------------------------
+
+	// Calculates the scale of the hardest emission from within the resonance system
+	// translated by Markus Seidel modified by Tomas Jezo
+	inline double findresscale( const int iRes, const Event& event) {
+		double scale = 0.;
+    
+		int nDau = event[iRes].daughterList().size();
+    
+		if (nDau == 0) {
+			// No resonance found, set scale to high value
+			// Pythia will shower any MC generated resonance unrestricted
+			scale = 1e30;
+		}
+		else if (nDau < 3) {
+			// No radiating resonance found
+			scale = 0.8;
+		}
+		else if (abs(event[iRes].id()) == 6) {
+			// Find top daughters
+			int idw = -1, idb = -1, idg = -1;
+			
+			for (int i = 0; i < nDau; i++) {
+				int iDau = event[iRes].daughterList()[i];
+				if (abs(event[iDau].id()) == 24) idw = iDau;
+				if (abs(event[iDau].id()) ==  5) idb = iDau;
+				if (abs(event[iDau].id()) == 21) idg = iDau;
+			}
+			
+			// Get daughter 4-vectors in resonance frame
+			Vec4 pw(event[idw].p());
+			pw.bstback(event[iRes].p());
+			
+			Vec4 pb(event[idb].p());
+			pb.bstback(event[iRes].p());
+			
+			Vec4 pg(event[idg].p());
+			pg.bstback(event[iRes].p());
+			
+			// Calculate scale
+			scale = sqrt(2*pg*pb*pg.e()/pb.e());
+		}
+		else {
+			scale = 1e30;
+		}
+
+		return scale;
+	}
+
+//--------------------------------------------------------------------------
+
+	// The following routine will match daughters of particle `e[iparticle]` to an expected pattern specified via the list of expected particle PDG ID's `ids`, 
+	// id wildcard is specified as 0 if match is obtained, the positions and the momenta of these particles are returned in vectors `positions` and `momenta` 
+	// respectively
+	// if exitOnExtraLegs==true, it will exit if the decay has more particles than expected, but not less
+	inline bool match_decay(int iparticle, const Event &e, const vector<int> &ids, vector<int> &positions, vector<Vec4> &momenta, bool exitOnExtraLegs = true){
+		// compare sizes
+		if (e[iparticle].daughterList().size() != ids.size()) {
+			if (exitOnExtraLegs && e[iparticle].daughterList().size() > ids.size()) exit(-1);
+			return false; 
+		}
+		// compare content
+		for (size_t i = 0; i < e[iparticle].daughterList().size(); i++) {
+			int di = e[iparticle].daughterList()[i];
+			if (ids[i] != 0 && e[di].id() != ids[i]) 
+				return false;
+		}
+		// reset the positions and momenta vectors (because they may be reused)
+		positions.clear();
+		momenta.clear();
+		// construct the array of momenta
+		for (size_t i = 0; i < e[iparticle].daughterList().size(); i++) {
+			int di = e[iparticle].daughterList()[i];
+			positions.push_back(di);
+			momenta.push_back(e[di].p());
+		}
+		return true;
+	}
+
+	inline double qSplittingScale(Vec4 pt, Vec4 p1, Vec4 p2){
+		p1.bstback(pt);
+		p2.bstback(pt);
+		return sqrt( 2*p1*p2*p2.e()/p1.e() );
+	}
+
+	inline double gSplittingScale(Vec4 pt, Vec4 p1, Vec4 p2){
+		p1.bstback(pt);
+		p2.bstback(pt);		
+		return sqrt( 2*p1*p2*p1.e()*p2.e()/(pow(p1.e(),2)+pow(p2.e(),2)) );
+	}
+
+
+	inline double getdechardness(int topcharge, const Event &e){
+		int tid = 6*topcharge, wid = 24*topcharge, bid = 5*topcharge, gid = 21, wildcard = 0;
+		// find last top in the record
+		int i_top = -1;
+		Vec4 p_top, p_b, p_g, p_g1, p_g2;
+		for (int i = 0; i < e.size(); i++) 
+			if (e[i].id() == tid) {				
+				i_top = i;
+				p_top = e[i].p();
+			}
+		if (i_top == -1) return -1.0;
+				
+		// summary of cases
+		// 1.) t > W b
+		//   a.) b > 3     ... error
+		//   b.) b > b g   ... h = sqrt(2*p_g*p_b*p_g.e()/p_b.e())
+		//   c.) b > other ... h = -1
+		//   return h
+		// 2.) t > W b g
+		//   a.)   b > 3     ... error
+		//   b.)   b > b g   ... h1 = sqrt(2*p_g*p_b*p_g.e()/p_b.e())
+		//   c.)   b > other ... h1 = -1
+		//   i.)   g > 3     ... error
+		//   ii.)  g > 2     ... h2 = sqrt(2*p_g1*p_g2*p_g1.e()*p_g2.e()/(pow(p_g1.e(),2)+pow(p_g2.e(),2))) );
+		//   iii.) g > other ... h2 = -1
+		//   return max(h1,h2)
+		// 3.) else ... error
+
+		vector<Vec4> momenta;
+		vector<int> positions;
+
+		// 1.) t > b W
+		if ( match_decay(i_top, e, vector<int> {wid, bid}, positions, momenta, false) ) {
+			double h;
+			int i_b = positions[1];
+			// a.+b.) b > 3 or b > b g 
+			if ( match_decay(i_b, e, vector<int> {bid, gid}, positions, momenta) )
+				h = qSplittingScale(e[i_top].p(), momenta[0], momenta[1]);
+			// c.) b > other
+			else 
+				h = -1;
+			return h;
+		} 
+		// 2.) t > b W g
+		else if ( match_decay(i_top, e, vector<int> {wid, bid, gid}, positions, momenta, false) ) {
+			double h1, h2;
+			int i_b = positions[1], i_g = positions[2];
+			// a.+b.) b > 3 or b > b g
+			if ( match_decay(i_b, e, vector<int> {bid, gid}, positions, momenta) )
+				h1 = qSplittingScale(e[i_top].p(), momenta[0], momenta[1]);
+			// c.) b > other
+			else 
+				h1 = -1;
+			// i.+ii.) g > 3 or g > 2
+			if ( match_decay(i_g, e, vector<int> {wildcard, wildcard}, positions, momenta) )
+				h2 = gSplittingScale(e[i_top].p(), momenta[0], momenta[1]);
+			// c.) b > other
+			else 
+				h2 = -1;
+			return max(h1, h2);
+		}
+		// 3.) else
+		else { 
+			exit(-1);
+		}
+	}	
+
+//--------------------------------------------------------------------------
+
+	// called after shower -- cannot be used for veto, because the event will get discarded
+	inline bool canVetoPartonLevel() { return true; }
+	inline bool doVetoPartonLevel(const Event &e) {
+		double topdechardness = getdechardness(1, e),  atopdechardness = getdechardness(-1, e);
+		if ((topdechardness > m_topresscale) or (atopdechardness > m_atopresscale)) {
+
+			infoPtr->errorMsg("Warning in PowhegHooksBB4L::doVetoPartonLevel: passed doVetoFSREmission veto, but wouldn't have passed veto based on the full event listing");
+		}
+//		cout << " veto scales: " << fixed << setprecision(17) << setw(30) << topresscale << setw(30) << atopresscale << endl;
+		m_topresscale = -1;
+		m_atopresscale = -1;
+
+		return false;
+	}
+
+	// called before each resonance decay shower
+	inline bool canSetResonanceScale() { return true; }
+	inline double scaleResonance(int iRes, const Event &e) {		
+	  double scale = 0.;
+	  
+	  if (e[iRes].id() == 6){ 
+	    scale = m_topresscale = findresscale(iRes, e);
+	  }else if (e[iRes].id() == -6){ 
+	    scale = m_atopresscale = findresscale(iRes, e);
+	  }
+	  
+	  if (m_useScaleResonanceInstead(settingsPtr)) return scale;
+	  
+	  return 1e30;
+	}
+
+//--------------------------------------------------------------------------
+
+	// FSR veto
+	inline bool canVetoFSREmission() { 
+		if (m_useScaleResonanceInstead(settingsPtr)) 
+			return false;
+		else 
+			return true; 
+	}
+  
+  
+  
+	inline bool doVetoFSREmission(int, const Event &e, int, bool inResonance) {
+
+		if (inResonance) {
+
+			// get the participants of the splitting: the radiator and the emitted
+			int iEmt = e.size() - 2;
+			int iRadAft = e.size() - 3;
+			int iRadBef = e[iEmt].mother1();
+
+			// find the top resonance the radiator originates from
+			int iTop = e[iRadBef].mother1();
+			int distance = 1;
+			while (abs(e[iTop].id()) != 6 && iTop > 0) {
+				iTop = e[iTop].mother1();
+				distance ++;
+			}
+			if (iTop == 0) {
+				infoPtr->errorMsg("Warning in PowhegHooksBB4L::doVetoFSREmission: emission in resonance not from top quark, not vetoing");
+				return false;
+			}
+			int iTopCharge = (e[iTop].id()>0)?1:-1;
+
+
+			// calculate the scale of the emission
+			Vec4 pr(e[iRadAft].p()), pe(e[iEmt].p()), pt(e[iTop].p());
+			double scale;
+			// gluon splitting into two partons
+			if (e[iRadBef].id() == 21)
+				scale = gSplittingScale(pt, pr, pe);
+			// quark emitting a gluon
+			else if (abs(e[iRadBef].id()) <= 5)
+				scale = qSplittingScale(pt, pr, pe);
+			// other stuff (which we should not veto)
+			else {
+				scale = 0;
+			}
+
+			if (iTopCharge > 0) {
+				if (m_onlyDistance1(settingsPtr))
+					return (distance == 1) && scale > m_topresscale;
+				else
+					return scale > m_topresscale;
+			}
+			else {
+				if (m_onlyDistance1(settingsPtr))
+					return (distance == 1) && scale > m_atopresscale;
+				else
+					return scale > m_atopresscale;
+			}
+		} else {
+			return false;
+		}
+    
+    return false;
+	}
+
+//--------------------------------------------------------------------------
+
+  // Functions to return information
+
+//  inline int    getNFSRveto() { return nFSRveto; }
+
+//--------------------------------------------------------------------------
+
+private:
+  
+  double m_topresscale;
+  double m_atopresscale;
+  Pythia8_UserHooks::UserSetting<int> m_onlyDistance1;
+  Pythia8_UserHooks::UserSetting<int> m_useScaleResonanceInstead;
+};
+
+//==========================================================================
+
+} // end namespace Pythia8
diff --git a/Generators/Pythia8_i/src/UserHooks/SettableColourReconnection.cxx b/Generators/Pythia8_i/src/UserHooks/SettableColourReconnection.cxx
new file mode 100644
index 00000000000..2d183c61b9a
--- /dev/null
+++ b/Generators/Pythia8_i/src/UserHooks/SettableColourReconnection.cxx
@@ -0,0 +1,94 @@
+#include "Pythia8_i/UserHooksFactory.h"
+#include "Pythia8Plugins/ColourReconnectionHooks.h"
+#include "UserSetting.h"
+
+namespace Pythia8{
+
+  class SettableColourReconnectionBase : public UserHooks{
+    
+    public:
+
+    SettableColourReconnectionBase() : m_crHook(0){}
+    
+    virtual ~SettableColourReconnectionBase(){
+      if(m_crHook != 0) delete m_crHook;
+    }
+    
+    // Allow colour reconnection after resonance decays (early or late)...
+    bool canReconnectResonanceSystems() {return true;}
+    
+    bool doReconnectResonanceSystems(int oldSizeEvent, Event& event) {
+      
+      if(m_crHook == 0){
+        _init();
+        m_crHook->initPtr(infoPtr, settingsPtr, particleDataPtr, rndmPtr, beamAPtr, beamBPtr, beamPomAPtr, beamPomBPtr, coupSMPtr, partonSystemsPtr, sigmaTotPtr);
+        
+      }
+      return m_crHook->doReconnectResonanceSystems(oldSizeEvent, event);
+    }
+    
+    protected:
+    
+    virtual void _init() = 0;
+    
+    UserHooks *m_crHook;
+    
+    
+  };
+  //////////////////////////////////////////////////////////////////////
+  // Minimum bias colour reconnection hook
+  class SettableMBColourReconnection : public SettableColourReconnectionBase{
+    
+    public:
+    SettableMBColourReconnection() :  SettableColourReconnectionBase(),
+                                      m_modeIn("MBReconnection:Mode", 0),
+                                      m_flipIn("MBReconnection:Flip", 0),
+                                      m_dLamCutIn("MBReconnection:DeltaLambda", 0.),
+                                      m_fracGluonIn("MBReconnection:GluonFraction", 1.){}
+    
+    protected:
+    
+    void _init(){
+      if(m_crHook != 0) return;
+      m_crHook = new MBReconUserHooks(m_modeIn(settingsPtr), m_flipIn(settingsPtr), m_dLamCutIn(settingsPtr), m_fracGluonIn(settingsPtr));
+      return;
+    }
+    
+    private:
+    
+    Pythia8_UserHooks::UserSetting<int> m_modeIn;
+    Pythia8_UserHooks::UserSetting<int> m_flipIn;
+    Pythia8_UserHooks::UserSetting<double> m_dLamCutIn;
+    Pythia8_UserHooks::UserSetting<double> m_fracGluonIn;
+    
+  };
+  
+  //////////////////////////////////////////////////////////////////////
+  // Top colour reconnection hook
+  class SettableTopColourReconnection : public SettableColourReconnectionBase{
+    
+  public:
+    SettableTopColourReconnection() :  SettableColourReconnectionBase(),
+                                      m_modeIn("TopReconnection:Mode", 0),
+                                      m_strengthIn("TopReconnection:Strength", 1){}
+    
+  protected:
+    
+    void _init(){
+      if(m_crHook != 0) return;
+      m_crHook = new TopReconUserHooks(m_modeIn(settingsPtr), m_strengthIn(settingsPtr));
+      return;
+    }
+    
+  private:
+    
+    Pythia8_UserHooks::UserSetting<int> m_modeIn;
+    Pythia8_UserHooks::UserSetting<int> m_strengthIn;
+    
+  };
+  
+  
+}
+
+Pythia8_UserHooks::UserHooksFactory::Creator<Pythia8::SettableMBColourReconnection> settableMBColourReconnectionCreator("MBReconUserHooks_Unvalidated");
+Pythia8_UserHooks::UserHooksFactory::Creator<Pythia8::SettableTopColourReconnection> settableTopColourReconnectionCreator("TopReconUserHooks_Unvalidated");
diff --git a/Generators/Pythia8_i/src/UserHooks/SingleTopWideEta.cxx b/Generators/Pythia8_i/src/UserHooks/SingleTopWideEta.cxx
new file mode 100644
index 00000000000..3439f84fc3a
--- /dev/null
+++ b/Generators/Pythia8_i/src/UserHooks/SingleTopWideEta.cxx
@@ -0,0 +1,66 @@
+#include "Pythia8_i/UserHooksFactory.h"
+#include "Pythia8/PhaseSpace.h"
+
+namespace Pythia8{
+  class SingleTopWideEta;
+}
+
+
+Pythia8_UserHooks::UserHooksFactory::Creator<Pythia8::SingleTopWideEta> singleTopWideEta("SingleTopWideEta");
+
+namespace Pythia8 {
+
+  class SingleTopWideEta : public UserHooks {
+    
+  public:
+    
+    // Constructor.
+    SingleTopWideEta(){}
+    
+    // Destructor.
+    ~SingleTopWideEta(){}
+    
+    // Allow process cross section to be modified...
+    virtual bool canModifySigma() {return true;}
+  
+    // ...which gives access to the event at the trial level, before selection.
+    virtual double multiplySigmaBy(const SigmaProcess* sigmaProcessPtr, 
+				   const PhaseSpace* phaseSpacePtr, 
+				   bool /* inEvent */) {
+      // Throw up on events that are not t-channel single top
+      if(sigmaProcessPtr->code() != 603){
+        throw std::runtime_error("SingleTopWideEta: Can only be run on qq -> tq (t-channel W) events, code 603. Event had code" + std::to_string(sigmaProcessPtr->code()) + ".");
+      }
+      
+      double sHat = phaseSpacePtr->sHat();
+      double flatEta= 1.;
+      double rH = sqrt(sHat);
+
+      if(rH <= 160.){
+        double c = -5.55578e+01;
+        double slope=  2.88096e-01;
+        flatEta = exp(c+slope*rH);
+      }
+
+
+      if( 160.  < rH  && rH <  180. ){
+        double g1 = 8.07441e-1 ;
+        double g2 = 1.7313e2 ;
+        double g3 = 2.4357;
+        flatEta = g1*exp(-0.5*pow((g2-rH)/g3,2));
+      }
+
+
+      if(rH >= 180.){
+        double c1 = 8.84562;
+        double slope1 = -9.22426e-2;
+        flatEta = exp(c1+slope1*rH);
+      }
+
+      return flatEta;
+
+    }
+    
+  };  
+
+} // end namespace Pythia8
diff --git a/Generators/Pythia8_i/src/UserHooks/UserSetting.h b/Generators/Pythia8_i/src/UserHooks/UserSetting.h
index 2673e89c1c3..70f1e283748 100644
--- a/Generators/Pythia8_i/src/UserHooks/UserSetting.h
+++ b/Generators/Pythia8_i/src/UserHooks/UserSetting.h
@@ -6,8 +6,11 @@
 #define PYTHIA8_USERHOOKS_USERSETTING_H
 
 #include "Pythia8/Settings.h"
+#include "Pythia8_i/UserHooksFactory.h"
 #include <string>
 #include <iostream>
+#include <stdexcept>
+
 
 namespace Pythia8_UserHooks{
  
@@ -18,19 +21,28 @@ namespace Pythia8_UserHooks{
     
     public:
     
-    UserSetting(string name, T defaultValue):
+  UserSetting(string name, T defaultValue):
     m_paramName(name),
     m_param(defaultValue),
     m_settingsPtr(0),
-    m_retrieved(false){  }
+    m_retrieved(false){
+    
+      
+      typename std::map<std::string, T>::const_iterator test = UserHooksFactory::userSettings<T>().find(m_paramName);
+      if(test != UserHooksFactory::userSettings<T>().end()) throw std::runtime_error("Duplicate user-defined setting already exists: " + m_paramName);
+      UserHooksFactory::userSettings<T>()[m_paramName] = defaultValue;
+      
+      
+    }
     
     T operator()(Pythia8::Settings *settingsPtr){
     
+      if(m_settingsPtr == 0 && settingsPtr == 0) throw std::runtime_error("settingsPtr is not yet initialised!");
+      
       if(m_retrieved && m_settingsPtr == settingsPtr) return m_param;
       
       m_settingsPtr = settingsPtr;
       m_param = uncachedRetrieve();
-      std::cout<<m_paramName<<" = "<<m_param<<std::endl;
       m_retrieved = true;
       return m_param;
     };
@@ -53,6 +65,8 @@ namespace Pythia8_UserHooks{
     if(m_settingsPtr->isParm(m_paramName)){
       return m_settingsPtr->parm(m_paramName);
     }
+    
+    throw std::runtime_error("UserSetting " + m_paramName + " does not exist!");
     return m_param;
   }
   
@@ -61,6 +75,8 @@ namespace Pythia8_UserHooks{
     if(m_settingsPtr->isMode(m_paramName)){
       return m_settingsPtr->mode(m_paramName);
     }
+    
+    throw std::runtime_error("UserSetting " + m_paramName + " does not exist!");
     return m_param;
   }
   
@@ -69,6 +85,8 @@ namespace Pythia8_UserHooks{
     if(m_settingsPtr->isWord(m_paramName)){
       return m_settingsPtr->word(m_paramName);
     }
+    
+    throw std::runtime_error("UserSetting " + m_paramName + " does not exist!");
     return m_param;
   }
   
diff --git a/Generators/Pythia8_i/src/UserHooks/main31.cxx b/Generators/Pythia8_i/src/UserHooks/main31.cxx
index 3abd230437f..07774edf21b 100644
--- a/Generators/Pythia8_i/src/UserHooks/main31.cxx
+++ b/Generators/Pythia8_i/src/UserHooks/main31.cxx
@@ -5,509 +5,10 @@
 #include "UserHooksUtils.h"
 #include "UserSetting.h"
 #include "Pythia8_i/UserHooksFactory.h"
-#include "boost/lexical_cast.hpp"
-#include <stdexcept>
-#include <iostream>
+#include "Pythia8Plugins/PowhegHooks.h"
 
 namespace Pythia8{
-  class main31;
+  Pythia8_UserHooks::UserHooksFactory::Creator<Pythia8::PowhegHooks> PowhegHooksCreator("PowhegMain31");
 }
 
-Pythia8_UserHooks::UserHooksFactory::Creator<Pythia8::main31> main31Creator("Main31");
 
-namespace Pythia8{
-  
-  // Use userhooks to veto PYTHIA emissions above the POWHEG scale.
-  
-  class main31 : public UserHooks {
-    
-  public:  
-    
-    // Constructor and destructor.
-    main31() : m_nFinal("Main31:NFinal", 2),
-    m_pTHardMode("Main31:pTHard", 2),
-    m_pTDefMode("Main31:pTdef", 1),
-    m_vetoMode(1), m_vetoCount(3),
-    m_pTemtMode(0),
-    m_emittedMode(0),
-    m_MPIvetoMode(0),
-    m_pThard(0), m_pTMPI(0),
-    m_accepted(0),
-    m_nAcceptSeq(0),
-    m_nISRveto(0), m_nFSRveto(0) {};
-    ~main31() {}
-    
-    //--------------------------------------------------------------------------
-    
-    // Routines to calculate the pT (according to pTdefMode) in a splitting:
-    //   ISR: i (radiator after)  -> j (emitted after) k (radiator before)
-    //   FSR: i (radiator before) -> j (emitted after) k (radiator after)
-    // For the Pythia pT definition, a recoiler (after) must be specified.
-    
-    // Compute the Pythia pT separation. Based on pTLund function in History.cc
-    double pTpythia(const Event &e, int RadAfterBranch, int EmtAfterBranch,
-                    int RecAfterBranch, bool FSR) {
-      
-      // Convenient shorthands for later
-      Vec4 radVec = e[RadAfterBranch].p();
-      Vec4 emtVec = e[EmtAfterBranch].p();
-      Vec4 recVec = e[RecAfterBranch].p();
-      int  radID  = e[RadAfterBranch].id();
-      
-      // Calculate virtuality of splitting
-      double sign = (FSR) ? 1. : -1.;
-      Vec4 Q(radVec + sign * emtVec); 
-      double Qsq = sign * Q.m2Calc();
-      
-      // Mass term of radiator
-      double m2Rad = (abs(radID) >= 4 && abs(radID) < 7) ?
-      pow2(particleDataPtr->m0(radID)) : 0.;
-      
-      // z values for FSR and ISR
-      double z, pTnow;
-      if (FSR) {
-        // Construct 2 -> 3 variables
-        Vec4 sum = radVec + recVec + emtVec;
-        double m2Dip = sum.m2Calc();
-        double x1 = 2. * (sum * radVec) / m2Dip;
-        double x3 = 2. * (sum * emtVec) / m2Dip;
-        z     = x1 / (x1 + x3);
-        pTnow = z * (1. - z);
-        
-      } else {
-        // Construct dipoles before/after splitting
-        Vec4 qBR(radVec - emtVec + recVec);
-        Vec4 qAR(radVec + recVec);
-        z     = qBR.m2Calc() / qAR.m2Calc();
-        pTnow = (1. - z);
-      }
-      
-      // Virtuality with correct sign
-      pTnow *= (Qsq - sign * m2Rad);
-      
-      // Can get negative pT for massive splittings
-      if (pTnow < 0.) {
-        cout << "Warning: pTpythia was negative" << endl;
-        return -1.;
-      }
-      
-#ifdef DBGOUTPUT
-      cout << "pTpythia: rad = " << RadAfterBranch << ", emt = "
-      << EmtAfterBranch << ", rec = " << RecAfterBranch
-      << ", pTnow = " << sqrt(pTnow) << endl;
-#endif
-      
-      // Return pT
-      return sqrt(pTnow);
-    }
-    
-    // Compute the POWHEG pT separation between i and j
-    double pTpowheg(const Event &e, int i, int j, bool FSR) {
-      
-      // pT value for FSR and ISR
-      double pTnow = 0.;
-      if (FSR) {
-        // POWHEG d_ij (in CM frame). Note that the incoming beams have not
-        // been updated in the parton systems pointer yet (i.e. prior to any
-        // potential recoil).
-        int iInA = partonSystemsPtr->getInA(0);
-        int iInB = partonSystemsPtr->getInB(0);
-        double betaZ = - ( e[iInA].pz() + e[iInB].pz() ) /
-        ( e[iInA].e()  + e[iInB].e()  );
-                
-        Vec4 iVecBst(e[i].p()), jVecBst(e[j].p());
-        iVecBst.bst(0., 0., betaZ);
-        jVecBst.bst(0., 0., betaZ);
-        pTnow = sqrt( (iVecBst + jVecBst).m2Calc() *
-                     iVecBst.e() * jVecBst.e() /
-                     pow2(iVecBst.e() + jVecBst.e()) );
-        
-      } else {
-        // POWHEG pT_ISR is just kinematic pT
-        pTnow = e[j].pT();
-      }
-      
-      // Check result
-      if (pTnow < 0.) {
-        cout << "Warning: pTpowheg was negative" << endl;
-        return -1.;
-      }
-      
-#ifdef DBGOUTPUT
-      cout << "pTpowheg: i = " << i << ", j = " << j
-      << ", pTnow = " << pTnow << endl;
-#endif
-      
-      return pTnow;
-    }
-    
-    // Calculate pT for a splitting based on pTdefMode.
-    // If j is -1, all final-state partons are tried.
-    // If i, k, r and xSR are -1, then all incoming and outgoing 
-    // partons are tried.
-    // xSR set to 0 means ISR, while xSR set to 1 means FSR
-    double pTcalc(const Event &e, int i, int j, int k, int r, int xSRin) {
-      
-      // Loop over ISR and FSR if necessary
-      double pTemt = -1., pTnow;
-      int xSR1 = (xSRin == -1) ? 0 : xSRin;
-      int xSR2 = (xSRin == -1) ? 2 : xSRin + 1;
-      for (int xSR = xSR1; xSR < xSR2; xSR++) {
-        // FSR flag
-        bool FSR = (xSR == 0) ? false : true;
-        
-        // If all necessary arguments have been given, then directly calculate.
-        // POWHEG ISR and FSR, need i and j.
-        if ((m_pTDefMode(settingsPtr) == 0 || m_pTDefMode(settingsPtr) == 1) && i > 0 && j > 0) {
-          pTemt = pTpowheg(e, i, j, (m_pTDefMode(settingsPtr) == 0) ? false : FSR);
-          
-          // Pythia ISR, need i, j and r.
-        } else if (!FSR && m_pTDefMode(settingsPtr) == 2 && i > 0 && j > 0 && r > 0) {
-          pTemt = pTpythia(e, i, j, r, FSR);
-          
-          // Pythia FSR, need k, j and r.
-        } else if (FSR && m_pTDefMode(settingsPtr) == 2 && j > 0 && k > 0 && r > 0) {
-          pTemt = pTpythia(e, k, j, r, FSR);
-          
-          // Otherwise need to try all possible combintations.
-        } else {
-          // Start by finding incoming legs to the hard system after
-          // branching (radiator after branching, i for ISR).
-          // Use partonSystemsPtr to find incoming just prior to the
-          // branching and track mothers.
-          int iInA = partonSystemsPtr->getInA(0);
-          int iInB = partonSystemsPtr->getInB(0);
-          while (e[iInA].mother1() != 1) { iInA = e[iInA].mother1(); }
-          while (e[iInB].mother1() != 2) { iInB = e[iInB].mother1(); }
-          
-          // If we do not have j, then try all final-state partons
-          int jNow = (j > 0) ? j : 0;
-          int jMax = (j > 0) ? j + 1 : e.size();
-          for (; jNow < jMax; jNow++) {
-            
-            // Final-state and coloured jNow only
-            if (!e[jNow].isFinal() || e[jNow].colType() == 0) continue;
-            
-            // POWHEG
-            if (m_pTDefMode(settingsPtr) == 0 || m_pTDefMode(settingsPtr) == 1) {
-              
-              // ISR - only done once as just kinematical pT
-              if (!FSR) {
-                pTnow = pTpowheg(e, iInA, jNow, (m_pTDefMode(settingsPtr) == 0) ? false : FSR);
-                if (pTnow > 0.) pTemt = (pTemt < 0) ? pTnow : min(pTemt, pTnow);
-                
-                // FSR - try all outgoing partons from system before branching 
-                // as i. Note that for the hard system, there is no 
-                // "before branching" information.
-              } else {
-                
-                int outSize = partonSystemsPtr->sizeOut(0);
-                for (int iMem = 0; iMem < outSize; iMem++) {
-                  int iNow = partonSystemsPtr->getOut(0, iMem);
-                  
-                  // Coloured only, i != jNow and no carbon copies
-                  if (iNow == jNow || e[iNow].colType() == 0) continue;
-                  if (jNow == e[iNow].daughter1() 
-                      && jNow == e[iNow].daughter2()) continue;
-                  
-                  pTnow = pTpowheg(e, iNow, jNow, (m_pTDefMode(settingsPtr) == 0) 
-                                   ? false : FSR);
-                  if (pTnow > 0.) pTemt = (pTemt < 0) 
-                    ? pTnow : min(pTemt, pTnow);
-                } // for (iMem)
-                
-              } // if (!FSR)
-              
-              // Pythia
-            } else if (m_pTDefMode(settingsPtr) == 2) {
-              
-              // ISR - other incoming as recoiler
-              if (!FSR) {
-                pTnow = pTpythia(e, iInA, jNow, iInB, FSR);
-                if (pTnow > 0.) pTemt = (pTemt < 0) ? pTnow : min(pTemt, pTnow);
-                pTnow = pTpythia(e, iInB, jNow, iInA, FSR);
-                if (pTnow > 0.) pTemt = (pTemt < 0) ? pTnow : min(pTemt, pTnow);
-                
-                // FSR - try all final-state coloured partons as radiator
-                //       after emission (k).
-              } else {
-                for (int kNow = 0; kNow < e.size(); kNow++) {
-                  if (kNow == jNow || !e[kNow].isFinal() ||
-                      e[kNow].colType() == 0) continue;
-                  
-                  // For this kNow, need to have a recoiler.
-                  // Try two incoming.
-                  pTnow = pTpythia(e, kNow, jNow, iInA, FSR);
-                  if (pTnow > 0.) pTemt = (pTemt < 0) 
-                    ? pTnow : min(pTemt, pTnow);
-                  pTnow = pTpythia(e, kNow, jNow, iInB, FSR);
-                  if (pTnow > 0.) pTemt = (pTemt < 0) 
-                    ? pTnow : min(pTemt, pTnow);
-                  
-                  // Try all other outgoing.
-                  for (int rNow = 0; rNow < e.size(); rNow++) {
-                    if (rNow == kNow || rNow == jNow ||
-                        !e[rNow].isFinal() || e[rNow].colType() == 0) continue;
-                    pTnow = pTpythia(e, kNow, jNow, rNow, FSR);
-                    if (pTnow > 0.) pTemt = (pTemt < 0) 
-                      ? pTnow : min(pTemt, pTnow);
-                  } // for (rNow)
-                  
-                } // for (kNow)
-              } // if (!FSR)
-            } // if (pTdefMode)
-          } // for (j)
-        }
-      } // for (xSR)
-      
-#ifdef DBGOUTPUT
-      cout << "pTcalc: i = " << i << ", j = " << j << ", k = " << k
-      << ", r = " << r << ", xSR = " << xSRin
-      << ", pTemt = " << pTemt << endl;
-#endif
-      
-      return pTemt;
-    }
-    
-    //--------------------------------------------------------------------------
-    
-    // Extraction of m_pThard based on the incoming event.
-    // Assume that all the final-state particles are in a continuous block
-    // at the end of the event and the final entry is the POWHEG emission.
-    // If there is no POWHEG emission, then m_pThard is set to Qfac.
-    
-    bool canVetoMPIStep()    { return true; }
-    int  numberVetoMPIStep() { return 1; }
-    bool doVetoMPIStep(int nMPI, const Event &e) {
-      // Extra check on nMPI
-      if (nMPI > 1) return false;
-      
-      // Find if there is a POWHEG emission. Go backwards through the
-      // event record until there is a non-final particle. Also sum pT and
-      // find pT_1 for possible MPI vetoing
-      int    count = 0;
-      double pT1 = 0., pTsum = 0.;
-      for (int i = e.size() - 1; i > 0; i--) {
-        if (e[i].isFinal()) {
-          count++;
-          pT1    = e[i].pT();
-          pTsum += e[i].pT();
-        } else break;
-      }
-      // Extra check that we have the correct final state
-      if (count != m_nFinal(settingsPtr) && count != m_nFinal(settingsPtr) + 1) {
-        cout << "Error: wrong number of final state particles in event" << endl;
-        exit(1);
-      }
-      // Flag if POWHEG radiation present and index
-      bool isEmt = (count == m_nFinal(settingsPtr)) ? false : true;
-      int  iEmt  = (isEmt) ? e.size() - 1 : -1;
-      
-      // If there is no radiation or if pThardMode is 0 then set m_pThard to QRen.
-      if (!isEmt || m_pTHardMode(settingsPtr) == 0) {
-        m_pThard = infoPtr->QRen();
-        
-        // If pThardMode is 1 then the pT of the POWHEG emission is checked against
-        // all other incoming and outgoing partons, with the minimal value taken
-      } else if (m_pTHardMode(settingsPtr) == 1) {
-        m_pThard = pTcalc(e, -1, iEmt, -1, -1, -1);
-        
-        // If pThardMode is 2, then the pT of all final-state partons is checked
-        // against all other incoming and outgoing partons, with the minimal value
-        // taken
-      } else if (m_pTHardMode(settingsPtr) == 2) {
-        m_pThard = pTcalc(e, -1, -1, -1, -1, -1);
-        
-      }
-      
-      // Find MPI veto pT if necessary
-      if (m_MPIvetoMode == 1) {
-        m_pTMPI = (isEmt) ? pTsum / 2. : pT1;
-      }
-      
-#ifdef DBGOUTPUT
-      cout << "doVetoMPIStep: QRen = " << infoPtr->QRen()
-      << ", m_pThard = " << m_pThard << endl << endl;
-#endif
-      
-//      std::cout<<"vetoScale = "<<m_pThard<<std::endl;
-      
-      // Initialise other variables
-      m_accepted   = false;
-      m_nAcceptSeq = m_nISRveto = m_nFSRveto = 0;
-      
-      // Do not veto the event
-      return false;
-    }
-    
-    //--------------------------------------------------------------------------
-    
-    // ISR veto
-    
-    bool canVetoISREmission() { return (m_vetoMode == 0) ? false : true; }
-    bool doVetoISREmission(int, const Event &e, int iSys) {
-      // Must be radiation from the hard system
-      if (iSys != 0) return false;
-      
-      // If we already have m_accepted 'm_vetoCount' emissions in a row, do nothing
-      if (m_vetoMode == 1 && m_nAcceptSeq >= m_vetoCount) return false;
-      
-      // Pythia radiator after, emitted and recoiler after.
-      int iRadAft = -1, iEmt = -1, iRecAft = -1;
-      for (int i = e.size() - 1; i > 0; i--) {
-        if      (iRadAft == -1 && e[i].status() == -41) iRadAft = i;
-        else if (iEmt    == -1 && e[i].status() ==  43) iEmt    = i;
-        else if (iRecAft == -1 && e[i].status() == -42) iRecAft = i;
-        if (iRadAft != -1 && iEmt != -1 && iRecAft != -1) break;
-      }
-      if (iRadAft == -1 || iEmt == -1 || iRecAft == -1) {
-        e.list();
-        cout << "Error: couldn't find Pythia ISR emission" << endl;
-        exit(1);
-      }
-      
-      // m_pTemtMode == 0: pT of emitted w.r.t. radiator
-      // m_pTemtMode == 1: min(pT of emitted w.r.t. all incoming/outgoing)
-      // m_pTemtMode == 2: min(pT of all outgoing w.r.t. all incoming/outgoing)
-      int xSR      = (m_pTemtMode == 0) ? 0       : -1;
-      int i        = (m_pTemtMode == 0) ? iRadAft : -1;
-      int j        = (m_pTemtMode != 2) ? iEmt    : -1;
-      int k        = -1;
-      int r        = (m_pTemtMode == 0) ? iRecAft : -1;
-      double pTemt = pTcalc(e, i, j, k, r, xSR);
-      
-#ifdef DBGOUTPUT
-      cout << "doVetoISREmission: pTemt = " << pTemt << endl << endl;
-#endif
-      
-      // Veto if pTemt > m_pThard
-      if (pTemt > m_pThard) {
-        m_nAcceptSeq = 0;
-        m_nISRveto++;
-        return true;
-      }
-      
-      // Else mark that an emission has been m_accepted and continue
-      m_nAcceptSeq++;
-      m_accepted = true;
-      return false;
-    }
-    
-    //--------------------------------------------------------------------------
-    
-    // FSR veto
-    
-    bool canVetoFSREmission() { return (m_vetoMode == 0) ? false : true; }
-    bool doVetoFSREmission(int, const Event &e, int iSys, bool) {
-      // Must be radiation from the hard system
-      if (iSys != 0) return false;
-      
-      // If we already have m_accepted 'm_vetoCount' emissions in a row, do nothing
-      if (m_vetoMode == 1 && m_nAcceptSeq >= m_vetoCount) return false;
-      
-      // Pythia radiator (before and after), emitted and recoiler (after)
-      int iRecAft = e.size() - 1;
-      int iEmt    = e.size() - 2;
-      int iRadAft = e.size() - 3;
-      int iRadBef = e[iEmt].mother1();
-      if ( (e[iRecAft].status() != 52 && e[iRecAft].status() != -53) ||
-          e[iEmt].status() != 51 || e[iRadAft].status() != 51) {
-        e.list();
-        cout << "Error: couldn't find Pythia FSR emission" << endl;
-        exit(1);
-      }
-      
-      // Behaviour based on m_pTemtMode:
-      //  0 - pT of emitted w.r.t. radiator before
-      //  1 - min(pT of emitted w.r.t. all incoming/outgoing)
-      //  2 - min(pT of all outgoing w.r.t. all incoming/outgoing)
-      int xSR = (m_pTemtMode == 0) ? 1       : -1;
-      int i   = (m_pTemtMode == 0) ? iRadBef : -1;
-      int k   = (m_pTemtMode == 0) ? iRadAft : -1;
-      int r   = (m_pTemtMode == 0) ? iRecAft : -1;
-      
-      // When m_pTemtMode is 0 or 1, iEmt has been selected
-      double pTemt = 0.;
-      if (m_pTemtMode == 0 || m_pTemtMode == 1) {
-        // Which parton is emitted, based on m_emittedMode:
-        //  0 - Pythia definition of emitted
-        //  1 - Pythia definition of radiated after emission
-        //  2 - Random selection of emitted or radiated after emission
-        //  3 - Try both emitted and radiated after emission
-        int j = iRadAft;
-        if (m_emittedMode == 0 || (m_emittedMode == 2 && rndmPtr->flat() < 0.5)) j++;
-        
-        for (int jLoop = 0; jLoop < 2; jLoop++) {
-          if      (jLoop == 0) pTemt = pTcalc(e, i, j, k, r, xSR);
-          else if (jLoop == 1) pTemt = min(pTemt, pTcalc(e, i, j, k, r, xSR));
-          
-          // For m_emittedMode == 3, have tried iRadAft, now try iEmt
-          if (m_emittedMode != 3) break;
-          if (k != -1) swap(j, k); else j = iEmt;
-        }
-        
-        // If m_pTemtMode is 2, then try all final-state partons as emitted
-      } else if (m_pTemtMode == 2) {
-        pTemt = pTcalc(e, i, -1, k, r, xSR);
-        
-      }
-      
-#ifdef DBGOUTPUT
-      cout << "doVetoFSREmission: pTemt = " << pTemt << endl << endl;
-#endif
-      
-      // Veto if pTemt > m_pThard
-      if (pTemt > m_pThard) {
-        m_nAcceptSeq = 0;
-        m_nFSRveto++;
-        return true;
-      }
-      
-      // Else mark that an emission has been m_accepted and continue
-      m_nAcceptSeq++;
-      m_accepted = true;
-      return false;
-    }
-    
-    //--------------------------------------------------------------------------
-    
-    // MPI veto
-    
-    bool canVetoMPIEmission() { return (m_MPIvetoMode == 0) ? false : true; }
-    bool doVetoMPIEmission(int, const Event &e) {
-      if (m_MPIvetoMode == 1) {
-        if (e[e.size() - 1].pT() > m_pTMPI) {
-#ifdef DBGOUTPUT
-          cout << "doVetoMPIEmission: pTnow = " << e[e.size() - 1].pT()
-          << ", m_pTMPI = " << m_pTMPI << endl << endl;
-#endif
-          return true;
-        }
-      }
-      return false;
-    }
-    
-    //--------------------------------------------------------------------------
-    
-    // Functions to return information
-    
-    int    getNISRveto() { return m_nISRveto; }
-    int    getNFSRveto() { return m_nFSRveto; }
-    
-  private:
-    
-    Pythia8_UserHooks::UserSetting<int> m_nFinal;
-    Pythia8_UserHooks::UserSetting<int> m_pTHardMode;
-    Pythia8_UserHooks::UserSetting<int> m_pTDefMode;
-    
-    int  m_vetoMode, m_vetoCount, m_pTemtMode,
-    m_emittedMode, m_MPIvetoMode;
-    double m_pThard, m_pTMPI;
-    bool   m_accepted;
-    // The number of m_accepted emissions (in a row)
-    int m_nAcceptSeq;
-    // Statistics on vetos
-    unsigned long int m_nISRveto, m_nFSRveto;
-    
-  };
-}
diff --git a/Generators/Pythia8_i/src/UserHooksFactory.cxx b/Generators/Pythia8_i/src/UserHooksFactory.cxx
index 6fef4bac8d7..83d65314dbf 100644
--- a/Generators/Pythia8_i/src/UserHooksFactory.cxx
+++ b/Generators/Pythia8_i/src/UserHooksFactory.cxx
@@ -1,11 +1,10 @@
-/*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-*/
-
 #include "Pythia8_i/UserHooksFactory.h"
 
 #include <stdexcept>
 
+/*
+  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+*/
 namespace Pythia8_UserHooks{
  
   UserHooks *UserHooksFactory::create(const string &name){
@@ -23,5 +22,39 @@ namespace Pythia8_UserHooks{
     return creators;
   }
   
+  template<>
+  std::map<std::string, double> &UserHooksFactory::userSettings(){
+    static std::map<std::string, double> settings;
+    return settings;
+  }
+
+  template<>
+  std::map<std::string, int> &UserHooksFactory::userSettings(){
+    static std::map<std::string, int> settings;
+    return settings;
+  }
+  
+  template<>
+  std::map<std::string, std::string> &UserHooksFactory::userSettings(){
+    static std::map<std::string, std::string> settings;
+    return settings;
+  }
+  
+  
+  std::map<std::string, double> &userParams(){
+    static std::map<std::string, double> params;
+    return params;
+  }
+
+  std::map<std::string, int> &userModes(){
+    static std::map<std::string, int> modes;
+    return modes;
+  }
+  
+  std::map<std::string, std::string> &userWords(){
+    static std::map<std::string, std::string> words;
+    return words;
+  }
+  
 }
 
diff --git a/Projects/Athena/externals/MadGraph5Amc.cmake b/Projects/Athena/externals/MadGraph5Amc.cmake
index a37c8dadcdc..a410e52e444 100644
--- a/Projects/Athena/externals/MadGraph5Amc.cmake
+++ b/Projects/Athena/externals/MadGraph5Amc.cmake
@@ -2,6 +2,6 @@
 # File specifying the location of MadGraph to use.
 #
 
-set( MADGRAPH5AMC_VERSION 2.4.3.atlas )
+set( MADGRAPH5AMC_VERSION 2.6.1.atlas )
 set( MADGRAPH5AMC_ROOT
    ${LCG_RELEASE_DIR}/MCGenerators/madgraph5amc/${MADGRAPH5AMC_VERSION}/${LCG_PLATFORM} )
diff --git a/Projects/Athena/externals/Pythia8.cmake b/Projects/Athena/externals/Pythia8.cmake
index 7dd4c8a1f4c..f40af1563d1 100644
--- a/Projects/Athena/externals/Pythia8.cmake
+++ b/Projects/Athena/externals/Pythia8.cmake
@@ -3,5 +3,6 @@
 #
 
 set( PYTHIA8_VERSION 219 )
+
 set( PYTHIA8_ROOT
    ${LCG_RELEASE_DIR}/MCGenerators/pythia8/${PYTHIA8_VERSION}/${LCG_PLATFORM} )
-- 
GitLab