Skip to content
Snippets Groups Projects
Commit d7dde865 authored by Marco Clemencic's avatar Marco Clemencic
Browse files

release tag v8r2p11

parent 8d498cda
No related branches found
No related tags found
No related merge requests found
Showing
with 445 additions and 89 deletions
......@@ -2,7 +2,7 @@
"""
Create a pool xml catalog slice for the specified LFNs
"""
__RCSID__ = "$Id: dirac-bookkeeping-genXMLCatalog.py 84480 2015-07-23 12:47:43Z phicharp $"
__RCSID__ = "$Id: dirac-bookkeeping-genXMLCatalog.py 86030 2015-10-19 17:10:41Z phicharp $"
import sys, os, time
......@@ -156,7 +156,7 @@ def execute():
if not result['OK']:
gLogger.fatal( "Error getting ancestor files..." )
DIRAC.exit( 1 )
lfnList = [x[0]['FileName'] for x in result['Value']['Successful'].values()] + result['Value']['Successful'].keys()
lfnList = [anc['FileName'] for ancestors in result['Value']['Successful'].values() for anc in ancestors] + result['Value']['Successful'].keys()
from DIRAC.Interfaces.API.Dirac import Dirac
if not verbose:
......
#!/usr/bin/env python
########################################################################
# $HeadURL: http://svn.cern.ch/guest/dirac/LHCbDIRAC/tags/LHCbDIRAC/v8r2p10/BookkeepingSystem/scripts/dirac-bookkeeping-get-file-ancestors.py $
# $HeadURL: http://svn.cern.ch/guest/dirac/LHCbDIRAC/tags/LHCbDIRAC/v8r2p11/BookkeepingSystem/scripts/dirac-bookkeeping-get-file-ancestors.py $
# File : dirac-bookkeeping-get-file-ancestors
# Author : Zoltan Mathe
########################################################################
......
#!/usr/bin/env python
########################################################################
# $HeadURL: http://svn.cern.ch/guest/dirac/LHCbDIRAC/tags/LHCbDIRAC/v8r2p10/BookkeepingSystem/scripts/dirac-bookkeeping-get-file-descendants.py $
# $HeadURL: http://svn.cern.ch/guest/dirac/LHCbDIRAC/tags/LHCbDIRAC/v8r2p11/BookkeepingSystem/scripts/dirac-bookkeeping-get-file-descendants.py $
# File : dirac-bookkeeping-get-file-descendants
# Author : Zoltan Mathe
########################################################################
......
#!/usr/bin/env python
########################################################################
# $HeadURL: http://svn.cern.ch/guest/dirac/LHCbDIRAC/tags/LHCbDIRAC/v8r2p10/BookkeepingSystem/scripts/dirac-bookkeeping-get-file-sisters.py $
# $HeadURL: http://svn.cern.ch/guest/dirac/LHCbDIRAC/tags/LHCbDIRAC/v8r2p11/BookkeepingSystem/scripts/dirac-bookkeeping-get-file-sisters.py $
# File : dirac-bookkeeping-get-file-sisters
# Author : Zoltan Mathe
########################################################################
......
......@@ -38,10 +38,18 @@ def scaleLumi( lumi ):
def execute():
triggerRate = False
listRuns = False
listFills = False
for switch in Script.getUnprocessedSwitches():
if switch[0] == 'TriggerRate':
triggerRate = True
elif switch[0] == 'ListRuns':
listRuns = True
elif switch[0] == 'ListFills':
listFills = True
if listRuns:
listFills = False
lfns = dmScript.getOption( 'LFNs' )
bkQuery = dmScript.getBKQuery()
if not bkQuery and not lfns:
......@@ -163,9 +171,10 @@ def execute():
fileSize += metadata['FileSize']
lumi += metadata['Luminosity']
run = metadata['RunNumber']
runList.setdefault( run, [ 0., 0. ] )
runList.setdefault( run, [ 0., 0., 0. ] )
runList[run][0] += metadata['Luminosity']
runList[run][1] += metadata['EventStat']
runList[run][2] += metadata['FileSize']
nbFiles += 1
except Exception as e:
gLogger.exception( 'Exception for %s' % str( metadata.keys() ), e )
......@@ -179,9 +188,10 @@ def execute():
fileSize += metadata['FileSize']
lumi += metadata['Luminosity']
run = metadata['RunNumber']
runList.setdefault( run, [ 0, 0 ] )
runList.setdefault( run, [ 0, 0, 0 ] )
runList[run][0] += metadata['Luminosity']
runList[run][1] += metadata['EventStat']
runList[run][2] += metadata['FileSize']
nbFiles += 1
except Exception as e:
gLogger.exception( 'Exception for %s' % lfn, str( metadata.keys() ), e )
......@@ -234,30 +244,146 @@ def execute():
print "%s: %.1f" % ( ( 'Files per %s' % lumiUnit ).ljust( tab ), filesPerLumi )
if triggerRate:
from datetime import timedelta
# Get information from the runs
fullDuration = 0.
totalLumi = 0.
for run in sorted( runList ):
res = bk.getRunInformations( run )
if not res['OK']:
gLogger.error( 'Error from BK getting run information', res['Message'] )
else:
info = res['Value']
fullDuration += ( info['RunEnd'] - info['RunStart'] ).total_seconds()
lumiDict = dict( zip( info['Stream'], info['luminosity'] ) )
statDict = dict( zip( info['Stream'], info['Number of events'] ) )
lumi = info['TotalLuminosity']
if abs( lumi - runList[run][0] / nDatasets ) > 1:
print 'Run and files luminosity mismatch (ignored): run', run, 'runLumi', lumi, 'filesLumi', runList[run][0] / nDatasets
# Get information from the runs, but first get those that are Finished
res = bk.getRunStatus( list( runList ) )
if not res['OK']:
gLogger.error( 'Error getting run status', res['Message'] )
runs = []
else:
success = res['Value']['Successful']
runs = [run for run in success if success[run].get( 'Finished' ) == 'Y']
notFinished = len( runList ) - len( runs )
if notFinished:
gLogger.notice( '%d runs not Finished (ignored), %s runs Finished (used for trigger rate)' % ( notFinished, str( len( runs ) if len( runs ) else 'no' ) ) )
if runs:
nevts = 0
size = 0
fullDuration = 0.
totalLumi = 0.
fills = {}
fillDuration = {}
for run in sorted( runs ):
res = bk.getRunInformations( run )
if not res['OK']:
gLogger.error( 'Error from BK getting run information', res['Message'] )
else:
totalLumi += lumi
rate = ( '%.1f events/second' % ( nevts / fullDuration ) ) if fullDuration else 'Run duration not available'
totalLumi, lumiUnit = scaleLumi( totalLumi )
print '%s: %.3f %s' % ( 'Total Luminosity'.ljust( tab ), totalLumi, lumiUnit )
print '%s: %.1f hours (%d runs)' % ( 'Run duration'.ljust( tab ), fullDuration / 3600., len( runList ) )
print '%s: %s' % ( 'Trigger rate'.ljust( tab ), rate )
nevts += runList[run][1]
size += runList[run][2]
info = res['Value']
fill = info['FillNumber']
fills.setdefault( fill, [] ).append( str( run ) )
runDuration = ( info['RunEnd'] - info['RunStart'] ).total_seconds() / 3600.
fillDuration[fill] = fillDuration.setdefault( fill, 0 ) + runDuration
fullDuration += runDuration
lumiDict = dict( zip( info['Stream'], info['luminosity'] ) )
statDict = dict( zip( info['Stream'], info['Number of events'] ) )
lumi = info['TotalLuminosity']
if abs( lumi - runList[run][0] / nDatasets ) > 1:
print 'Run and files luminosity mismatch (ignored): run', run, 'runLumi', lumi, 'filesLumi', runList[run][0] / nDatasets
else:
totalLumi += lumi
rate = ( '%.1f events/second' % ( nevts / fullDuration / 3600 ) ) if fullDuration else 'Run duration not available'
totalLumi, lumiUnit = scaleLumi( totalLumi )
print '%s: %.3f %s' % ( 'Total Luminosity'.ljust( tab ), totalLumi, lumiUnit )
print '%s: %.1f hours (%d runs)' % ( 'Run duration'.ljust( tab ), fullDuration, len( runs ) )
print '%s: %s' % ( 'Trigger rate'.ljust( tab ), rate )
rate = ( '%.1f MB/second' % ( size / 1000000. / fullDuration / 3600. ) ) if fullDuration else 'Run duration not available'
print '%s: %s' % ( 'Throughput'.ljust( tab ), rate )
result = getCollidingBunches( min( fills.keys() ), max( fills.keys() ) )
collBunches = 0.
for fill in fillDuration:
if fill not in result:
print "Error: no number of colliding bunches for fill %d" % fillDuration
else:
collBunches += result[fill] * fillDuration[fill]
collBunches /= fullDuration
print '%s: %.1f on average' % ( 'Colliding bunches'.ljust( tab ), collBunches )
if listFills:
print 'List of fills: ', ','.join( ["%d (%d runs, %.1f hours)" % ( fill, len( fills[fill] ), fillDuration[fill] ) for fill in sorted( fills )] )
if listRuns:
for fill in sorted( fills ):
print 'Fill %d (%4d bunches, %.1f hours):' % ( fill, result[fill], fillDuration[fill] ), ','.join( fills[fill] )
print ""
def getCollidingBunches( minFill, maxFill ):
import pycurl
import sys
from urllib import urlencode
from StringIO import StringIO
runDbUrl = 'http://lbrundb.cern.ch/rundb/export_fills_maintable'
data = urlencode( {"fill_id_min": minFill, "fill_id_max" : maxFill, "fill_flist" : "params.nCollidingBunches"} )
buf = StringIO()
c = pycurl.Curl()
c.setopt( pycurl.URL, runDbUrl )
c.setopt( pycurl.POST, 1 )
c.setopt( pycurl.POSTFIELDS, data )
c.setopt( c.WRITEFUNCTION, buf.write )
c.perform()
c.close()
body = buf.getvalue()
from HTMLParser import HTMLParser
class tabHolder( object ):
def __init__( self ):
self.listOfTabs = []
self.curTab = []
def nextTab( self ):
self.listOfTabs.append( self.curTab )
self.curTab = []
return self.curTab
def getTab( self ):
return self.curTab
tb = tabHolder()
# create a subclass and override the handler methods
class MyHTMLParser( HTMLParser ):
def handle_starttag( self, tag, attrs ):
if ( tag == 'th' or 'td' ) and len( attrs ) == 1 and "numbers" in attrs[0][1]:
self.toLog = True
elif tag == 'form' and len( attrs ) > 1 and "frmExportFills" in attrs[0][1]:
self.inTheForm = True
def handle_endtag( self, tag ):
if tag == 'tr':
tb.nextTab()
elif tag == 'th':
self.toLog = False
def handle_data( self, data ):
if getattr( self, "toLog", None ) and getattr( self, 'inTheForm', None ):
tb.getTab().append( data )
# instantiate the parser and fed it some HTML
parser = MyHTMLParser()
parser.feed( body )
wantedValues = []
for i, t in enumerate( tb.listOfTabs ):
if i == 0:
continue
elif i == 1:
continue
elif i == len( tb.listOfTabs ) - 1 :
continue
values = []
for v in t:
v = v.replace( '\\n', '' ).replace( '\\t', '' ).replace( ' ', '' )
if not v:
continue
values.append( v )
wantedValues.append( values )
result = {}
for line in wantedValues:
result[int( line[0] )] = int( line[2] )
return result
if __name__ == "__main__":
......@@ -266,6 +392,8 @@ if __name__ == "__main__":
dmScript.registerFileSwitches()
Script.registerSwitch( '', 'TriggerRate', ' For RAW files, returns the trigger rate' )
Script.registerSwitch( '', 'ListRuns', ' Give a list of runs (to be used with --Trigger)' )
Script.registerSwitch( '', 'ListFills', ' Give a list of fills (to be used with --Trigger)' )
Script.setUsageMessage( '\n'.join( [ __doc__.split( '\n' )[1],
'Usage:',
' %s [option|cfgfile]' % Script.scriptName, ] ) )
......
......@@ -4,7 +4,7 @@
use by workflow modules or client tools.
"""
__RCSID__ = "$Id: ProductionEnvironment.py 84919 2015-08-17 14:15:47Z fstagni $"
__RCSID__ = "$Id: ProductionEnvironment.py 86093 2015-10-23 15:29:47Z fstagni $"
import os
import shutil
......@@ -358,24 +358,21 @@ def getProjectCommand( location, applicationName, applicationVersion, extraPacka
#############################################################################
def getScriptsLocation():
""" This function determines the location of LbLogin / SetupProject based on the
local site configuration. The order of preference for the local software
location is:
- $VO_LHCB_SW_DIR/lib - typically defined for Grid jobs
- LocalSite/SharedArea - for locally running jobs
If LbLogin / SetupProject are not found in the above locations this function
""" This function determines the location of LbLogin / SetupProject based on the local site configuration.
SharedArea is in local cfg file at /LocalSite/SharedArea and corresponds to $VO_LHCB_SW_DIR/lib for Grid jobs
If LbLogin / SetupProject are not found in the above location this function
returns an error. Otherwise the location of the environment scripts is returned
in a dictionary with the name as the key.
"""
softwareArea = getSharedArea()
softwareArea = gConfig.getValue( '/LocalSite/SharedArea' )
if softwareArea:
gLogger.verbose( 'MYSITEROOT = %s' % softwareArea )
if os.path.exists( os.path.join( softwareArea, groupLogin ) ):
gLogger.info( 'Using %s from the site shared area directory at %s' % ( groupLogin, softwareArea ) )
else:
gLogger.error( '%s not found in local area or shared area %s' % ( groupLogin, softwareArea ) )
return S_ERROR( '%s not found in local or shared areas' % ( groupLogin ) )
gLogger.verbose( 'MYSITEROOT = %s' % softwareArea )
if os.path.exists( os.path.join( softwareArea, groupLogin ) ):
gLogger.info( 'Using %s from the site shared area directory at %s' % ( groupLogin, softwareArea ) )
else:
gLogger.error( '%s not found in local area or shared area %s' % ( groupLogin, softwareArea ) )
return S_ERROR( '%s not found in local or shared areas' % ( groupLogin ) )
gLogger.verbose( 'Using scripts from software area at %s' % softwareArea )
groupLoginPath = os.path.join( softwareArea, groupLogin )
......@@ -397,29 +394,28 @@ def getScriptsLocation():
'MYSITEROOT':softwareArea} )
def getSharedArea():
"""
Discover location of Shared SW area. This area is populated by a tool independent from the DIRAC jobs
"""
sharedArea = ''
if os.environ.has_key( 'VO_LHCB_SW_DIR' ):
sharedArea = os.path.join( os.environ[ 'VO_LHCB_SW_DIR' ], 'lib' )
gLogger.debug( 'Using VO_LHCB_SW_DIR at "%s"' % sharedArea )
if os.environ[ 'VO_LHCB_SW_DIR' ] == '.':
if not os.path.isdir( 'lib' ):
os.mkdir( 'lib' )
elif gConfig.getValue( '/LocalSite/SharedArea', '' ):
sharedArea = gConfig.getValue( '/LocalSite/SharedArea' )
gLogger.debug( 'Using CE SharedArea at "%s"' % sharedArea )
if sharedArea:
# if defined, check that it really exists
if not os.path.isdir( sharedArea ):
gLogger.error( 'Missing Shared Area Directory:', sharedArea )
sharedArea = ''
return sharedArea
# def getSharedArea():
# """ Discovers location of Shared SW area. This area is populated by a tool independent from the DIRAC jobs
# """
# sharedArea = ''
# if os.environ.has_key( 'VO_LHCB_SW_DIR' ):
# sharedArea = os.path.join( os.environ[ 'VO_LHCB_SW_DIR' ], 'lib' )
# gLogger.debug( "Using VO_LHCB_SW_DIR at '%s'" % sharedArea )
# if os.environ[ 'VO_LHCB_SW_DIR' ] == '.':
# if not os.path.isdir( 'lib' ):
# os.mkdir( 'lib' )
# elif gConfig.getValue( '/LocalSite/SharedArea', '' ):
# sharedArea = gConfig.getValue( '/LocalSite/SharedArea' )
# gLogger.debug( "Using CE SharedArea at '%s'" % sharedArea )
# else:
# gLogger.warn( "Can't find shared area, forcing it" )
# sharedArea = '/cvmfs/lhcb.cern.ch/lib'
#
# if not os.path.isdir( sharedArea ):
# gLogger.error( "Missing Shared Area Directory", sharedArea )
# sharedArea = ''
#
# return sharedArea
def getPlatformsConfigsDict():
......
#!/usr/bin/env python
########################################################################
# $HeadURL: http://svn.cern.ch/guest/dirac/LHCbDIRAC/tags/LHCbDIRAC/v8r2p10/DataManagementSystem/scripts/dirac-dms-get-file.py $
# $HeadURL: http://svn.cern.ch/guest/dirac/LHCbDIRAC/tags/LHCbDIRAC/v8r2p11/DataManagementSystem/scripts/dirac-dms-get-file.py $
# File : dirac-dms-get-file
# Author : Stuart Paterson
########################################################################
......
#!/usr/bin/env python
########################################################################
# $HeadURL: http://svn.cern.ch/guest/dirac/LHCbDIRAC/tags/LHCbDIRAC/v8r2p10/DataManagementSystem/scripts/dirac-dms-remove-files.py $
# $HeadURL: http://svn.cern.ch/guest/dirac/LHCbDIRAC/tags/LHCbDIRAC/v8r2p11/DataManagementSystem/scripts/dirac-dms-remove-files.py $
########################################################################
"""
Remove the given file or a list of files from the File Catalog and from the storage
......
#!/usr/bin/env python
########################################################################
# $HeadURL: http://svn.cern.ch/guest/dirac/LHCbDIRAC/tags/LHCbDIRAC/v8r2p10/DataManagementSystem/scripts/dirac-dms-remove-replicas.py $
# $HeadURL: http://svn.cern.ch/guest/dirac/LHCbDIRAC/tags/LHCbDIRAC/v8r2p11/DataManagementSystem/scripts/dirac-dms-remove-replicas.py $
########################################################################
"""
Remove replicas of a (list of) LFNs at a list of sites. It is possible to request a minimum of remaining replicas
......
#!/usr/bin/env python
########################################################################
# $HeadURL: http://svn.cern.ch/guest/dirac/LHCbDIRAC/tags/LHCbDIRAC/v8r2p10/DataManagementSystem/scripts/dirac-dms-replicate-lfn.py $
# $HeadURL: http://svn.cern.ch/guest/dirac/LHCbDIRAC/tags/LHCbDIRAC/v8r2p11/DataManagementSystem/scripts/dirac-dms-replicate-lfn.py $
# File : dirac-dms-replicate-lfn
# Author : Stuart Paterson
########################################################################
......
#!/usr/bin/env python
'''
dirac-production-runjoblocal
Module created to run failed jobs locally on a CVMFS-configured machine.
It creates the necessary environment, downloads the necessary files, modifies the necessary
files and runs the job
Usage:
dirac-production-runjoblocal (job ID) (Data imput mode) - No parenthesis
'''
__RCSID__ = "$transID: dirac-production-runjoblocal.py 61232 2015-09-22 16:20:00 msoares $"
import DIRAC
import LHCbDIRAC
import os
import sys
import errno
#from DIRAC.Core.Utilities import DError
from DIRAC.Core.Base import Script
from DIRAC import S_OK, S_ERROR
Script.registerSwitch( 'D:', 'Download=' , 'Defines data acquisition as DownloadInputData' )
Script.registerSwitch( 'P:', 'Protocol=' , 'Defines data acquisition as InputDataByProtocol' )
Script.parseCommandLine( ignoreErrors = False )
Script.setUsageMessage( '\n'.join( [ __doc__.split( '\n' )[1],
'\nUsage:',
'dirac-production-runjoblocal [Data imput mode] [job ID]'
'\nArguments:',
' Download (Job ID): Defines data aquisition as DownloadInputData',
' Protocol (Job ID): Defines data acquisition as InputDataByProtocol\n'] ) )
_downloadinputdata = False
_jobID = None
for switch in Script.getUnprocessedSwitches():
if switch [ 0 ] in ( 'D', 'Download' ):
_downloadinputdata = True
_jobID = switch[1]
if switch [ 0 ] in ( 'P', 'Protocol' ):
_downloadinputdata = False
_jobID = switch[1]
def __runSystemDefaults(jobID = None):
"""
Creates the environment for running the job and returns
the path for the other functions.
"""
tempdir = "LHCbjob" + str(jobID) + "temp"
os.environ['VO_LHCB_SW_DIR'] = "/cvmfs/lhcb.cern.ch"
try:
os.mkdir(tempdir)
if not sys.exc_info()[1][0]:
S_OK("Temporary directory created.")
pass
except:
if sys.exc_info()[1][0] == 17:
S_OK("Temporary directory already exists.")
elif sys.exc_info()[1][0] == 30:
print sys.exc_info()[1], "Unable to create temporary directory"
# DError(errno.EROFS, "Unable to create temporary directory")
basepath = os.getcwd()
return basepath + "/" + tempdir
def __downloadJobDescriptionXML(jobID, basepath):
"""
Downloads the jobDescription.xml file into the temporary directory
created.
"""
from DIRAC.Interfaces.API.Dirac import Dirac
jdXML = Dirac()
jdXML.getInputSandbox(jobID, basepath)
def __modifyJobDescription(jobID, basepath, downloadinputdata):
"""
Modifies the jobDescription.xml to, instead of DownloadInputData, it
uses InputDataByProtocol
"""
if not downloadinputdata:
from xml.etree import ElementTree as et
archive = et.parse(basepath + "/InputSandbox" + str(jobID) + "/jobDescription.xml")
for element in archive.getiterator():
if element.text == "DIRAC.WorkloadManagementSystem.Client.DownloadInputData":
element.text = "DIRAC.WorkloadManagementSystem.Client.InputDataByProtocol"
archive.write(basepath + "/InputSandbox" + str(jobID) + "/jobDescription.xml")
return S_OK("Job parameter changed from DownloadInputData to InputDataByProtocol.")
def __downloadPilotScripts(basepath):
"""
Downloads the scripts necessary to configure the pilot
"""
#include retry function
out = os.system("wget -P " + basepath + "/ http://lhcbproject.web.cern.ch/lhcbproject/Operations/VM/pilotscripts/LHCbPilotCommands.py")
if not out:
S_OK("LHCbPilotCommands.py script successfully download.\n")
else:
print "LHCbPilotCommands.py script download error.\n"
#DError(errno.ENETUNREACH, "LHCbPilotCommands.py script download error.\n" )
out = os.system("wget -P " + basepath + "/ http://lhcbproject.web.cern.ch/lhcbproject/Operations/VM/pilotscripts/dirac-pilot.py")
if not out:
S_OK("dirac-pilot.py script successfully download.\n")
else:
print "dirac-pilot.py script download error.\n"
#DError(errno.ENETUNREACH, "dirac-pilot.py script download error.\n" )
out = os.system("wget -P " + basepath + "/ http://lhcbproject.web.cern.ch/lhcbproject/Operations/VM/pilotscripts/pilotCommands.py")
if not out:
S_OK("pilotCommands.py script successfully download.\n")
else:
print "pilotCommands.py script download error.\n"
#DError(errno.ENETUNREACH, "pilotCommands.py script download error.\n" )
out = os.system("wget -P " + basepath + "/ http://lhcbproject.web.cern.ch/lhcbproject/Operations/VM/pilotscripts/pilotTools.py")
if not out:
S_OK("pilotTools.py script successfully download.\n")
else:
print "pilotTools.py script download error.\n"
#DError(errno.ENETUNREACH, "pilotTools.py script download error.\n" )
def __configurePilot(basepath):
"""
Configures the pilot.
"""
out = os.system("python " + basepath + "/dirac-pilot.py -S LHCb-Production -l LHCb -C dips://lbvobox18.cern.ch:9135/Configuration/Server -N ce.debug.ch -Q default -n DIRAC.JobDebugger.ch -M 1 -E LHCbPilot -X LHCbConfigureBasics,LHCbConfigureSite,LHCbConfigureArchitecture,LHCbConfigureCPURequirements -dd")
if not out:
dir = str(os.getcwd())
os.rename(dir + '/.dirac.cfg', dir + '/.dirac.cfg.old')
os.system("cp " + dir + "/pilot.cfg " + dir + "/.dirac.cfg")
return S_OK("Pilot successfully configured.")
# else:
# some DErrno message
def __runJobLocally(jobID, basepath):
"""
Runs the job!
"""
from LHCbDIRAC.Interfaces.API.LHCbJob import LHCbJob
localJob = LHCbJob(basepath + "/InputSandbox" + str(jobID) + "/jobDescription.xml")
localJob.setInputSandbox(os.getcwd()+"/pilot.cfg")
localJob.setConfigArgs(os.getcwd()+"/pilot.cfg")
os.chdir(basepath)
localJob.runLocal()
if __name__ == "__main__":
dir = os.getcwd()
try:
_path = __runSystemDefaults(_jobID)
__downloadJobDescriptionXML(_jobID, _path)
__modifyJobDescription(_jobID, _path, _downloadinputdata)
__downloadPilotScripts(_path)
__configurePilot(_path)
__runJobLocally(_jobID, _path)
finally:
os.chdir(dir)
os.rename(dir + '/.dirac.cfg.old', dir + '/.dirac.cfg')
\ No newline at end of file
......@@ -112,6 +112,23 @@ class LHCbConfigureBasics( LHCbCommandBase, ConfigureBasics ):
""" Only case here, for now, is if to set or not the CAs and VOMS location, that should be found in CVMFS
"""
def _getBasicsCFG( self ):
super( LHCbConfigureBasics, self )._getBasicsCFG()
# Adding SharedArea (which should be in CVMFS)
if os.environ.has_key( 'VO_LHCB_SW_DIR' ):
sharedArea = os.path.join( os.environ[ 'VO_LHCB_SW_DIR' ], 'lib' )
self.log.debug( "Using VO_LHCB_SW_DIR at '%s'" % sharedArea )
if os.environ[ 'VO_LHCB_SW_DIR' ] == '.':
if not os.path.isdir( 'lib' ):
os.mkdir( 'lib' )
else:
sharedArea = '/cvmfs/lhcb.cern.ch/lib'
self.log.warn( "Can't find shared area, forcing it to %s" % sharedArea )
self.cfg.append( '-o /LocalSite/SharedArea=%s' % sharedArea )
def _getSecurityCFG( self ):
self.log.debug( "self.pp.installEnv: %s" % str( self.pp.installEnv ) )
......@@ -190,7 +207,6 @@ class LHCbConfigureBasics( LHCbCommandBase, ConfigureBasics ):
class LHCbConfigureCPURequirements( LHCbCommandBase, ConfigureCPURequirements ):
pass
# FIXME# FIXME# FIXME# FIXME# FIXME# FIXME# FIXME# FIXME# FIXME# FIXME# FIXME# FIXME# FIXME# FIXME
# FIXME: this is just a copy/paste from DIRAC introducing the line for avoiding too slow CPUs
......@@ -216,6 +232,7 @@ class LHCbConfigureCPURequirements( LHCbCommandBase, ConfigureCPURequirements ):
'' ).replace( " HS06", '' ) )
self.log.info( "Current normalized CPU as determined by 'dirac-wms-cpu-normalization' is %f" % cpuNormalizationFactor )
from DIRAC import gConfig
gConfig.forceRefresh()
slowCPU = float( gConfig.getValue( "Resources/Computing/CEDefaults/SlowCPULimit", 3.0 ) )
if cpuNormalizationFactor < slowCPU:
self.log.info( "Current normalized CPU is too slow, exiting" )
......
......@@ -5,7 +5,7 @@ __path__ = extend_path( __path__, __name__ )
majorVersion = 8
minorVersion = 2
patchLevel = 10
patchLevel = 11
preVersion = 0
version = "v%sr%s" % ( majorVersion, minorVersion )
......
......@@ -2,6 +2,23 @@
Package LHCbDIRAC
-----------------
Version v8r2p11
---------------
NEW
:::
TransformationSystem
- script for re-running a DIRAC job locally (needs access to software, so e.g. on lxplus)
CHANGE
::::::
Core
- /LocalSite/SharedArea is filled by the pilot
WorkloadManagementSystem
- /LocalSite/SharedArea is filled by the pilot
Version v8r2p10
---------------
......
......@@ -2,14 +2,20 @@
Package LHCbDIRAC
-----------------
Version v8r2p10
Version v8r2p11
---------------
BUGFIX
NEW
:::
TransformationSystem
- script for re-running a DIRAC job locally (needs access to software, so e.g. on lxplus)
CHANGE
::::::
ProductionManagementSystem
- Priority of MC testing jobs increased to 10
BookkeepingSystem
- If no file type provided, the file types table must not used in the join condition.
Core
- /LocalSite/SharedArea is filled by the pilot
WorkloadManagementSystem
- /LocalSite/SharedArea is filled by the pilot
# $HeadURL: http://svn.cern.ch/guest/dirac/LHCbDIRAC/tags/LHCbDIRAC/v8r2p10/versions.cfg $
# $HeadURL: http://svn.cern.ch/guest/dirac/LHCbDIRAC/tags/LHCbDIRAC/v8r2p11/versions.cfg $
Versions
{
......@@ -8,7 +8,27 @@ Versions
Next Release (from v8r2)
{
}
v8r2p11
{
AccountingSystem = ac_2015040101
BookkeepingSystem = bk_2015101902
ConfigurationSystem = cs_2015081301
#CHANGE: /LocalSite/SharedArea is filled by the pilot
Core = co_2015102601
DataManagementSystem = dm_2015091701
FrameworkSystem = fw_2015070701
Interfaces = if_2015081801
ProductionManagementSystem = pm_2015101501
Resources = re_2015090801
ResourceStatusSystem = rs_2015090201
#NEW: script for re-running a DIRAC job locally (needs access to software, so e.g. on lxplus)
TransformationSystem = ts_2015102601
Workflow = wo_2015090701
#CHANGE: /LocalSite/SharedArea is filled by the pilot
WorkloadManagementSystem = wm_2015102601
}
v8r2p10
......@@ -2572,4 +2592,4 @@ Versions
}
}
packageExtraFiles = __init__.py, versions.cfg, cmt
fileVersion = $Id: versions.cfg 86074 2015-10-23 09:00:12Z zmathe $
fileVersion = $Id: versions.cfg 86116 2015-10-26 10:59:38Z zmathe $
......@@ -5,7 +5,7 @@ __path__ = extend_path( __path__, __name__ )
majorVersion = 8
minorVersion = 2
patchLevel = 10
patchLevel = 11
preVersion = 0
version = "v%sr%s" % ( majorVersion, minorVersion )
......
......@@ -22,7 +22,7 @@ dest = sys.argv[1]
# decode project version
m = re.search(r"\$[^$]*/tags.*/(v\d+r\d+(p\d+)?)/[^$]* \$",
'''$URL: http://svn.cern.ch/guest/dirac/LHCbDirac/tags/LHCbDiracSys/v8r2p10/cmt/gen_manifest.py $''')
'''$URL: http://svn.cern.ch/guest/dirac/LHCbDirac/tags/LHCbDiracSys/v8r2p11/cmt/gen_manifest.py $''')
if not m:
version = 'head'
else:
......
......@@ -11,8 +11,8 @@ branches cmt doc src
include_path none
use LHCbDiracPolicy v8r2p10
use LHCbDiracConfig v8r2p10
use LHCbDiracPolicy v8r2p11
use LHCbDiracConfig v8r2p11
# The following section is only for the check out and build
private
......@@ -20,17 +20,17 @@ private
# LHCbDIRAC subsystems
#============================================================================
use AccountingSystem ac_2015040101 LHCbDIRAC
use BookkeepingSystem bk_2015101901 LHCbDIRAC
use BookkeepingSystem bk_2015101902 LHCbDIRAC
use ConfigurationSystem cs_2015081301 LHCbDIRAC
use Core co_2015081801 LHCbDIRAC
use Core co_2015102601 LHCbDIRAC
use DataManagementSystem dm_2015091701 LHCbDIRAC
use FrameworkSystem fw_2015070701 LHCbDIRAC
use Interfaces if_2015081801 LHCbDIRAC
use ProductionManagementSystem pm_2015101501 LHCbDIRAC
use Resources re_2015090801 LHCbDIRAC
use ResourceStatusSystem rs_2015090201 LHCbDIRAC
use TransformationSystem ts_2015100901 LHCbDIRAC
use WorkloadManagementSystem wm_2015100901 LHCbDIRAC
use TransformationSystem ts_2015102601 LHCbDIRAC
use WorkloadManagementSystem wm_2015102601 LHCbDIRAC
use Workflow wo_2015090701 LHCbDIRAC
# Avoid the installation of __init__.py in the parent directory.
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment