Skip to content
Snippets Groups Projects
Commit 84a7c387 authored by Tulay Cuhadar Donszelmann's avatar Tulay Cuhadar Donszelmann
Browse files

sweeping MR 12251 to 21.2

Former-commit-id: f171978779011a3987f3127dfba384983b8e08b1
parent e38f5891
Branches
Tags
No related merge requests found
Showing
with 956 additions and 432 deletions
# Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration
#
# This module is used to set up the environment for ART inputs
#
# author : Attila Krasznahorkay <attila.krasznahorkay@cern.ch>, Tulay Cuhadar Donszelmann <tcuhadar@cern.ch>
# Set the environment variable(s):
set( ARTENVIRONMENT_ENVIRONMENT
APPEND DATAPATH "/eos/atlas/atlascerngroupdisk/data-art/grid-input" )
# Silently declare the module found:
set( ARTENVIRONMENT_FOUND TRUE )
################################################################################
# Package: ART
################################################################################
# Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration
#
# CMake configuration file for the ART package.
#
# author : Tulay Cuhadar Donszelmann <tcuhadar@cern.ch>, Attila Krasznahorkay <attila.krasznahorkay@cern.ch>
# Declare the package name:
atlas_subdir( ART )
......@@ -13,4 +15,7 @@ atlas_depends_on_subdirs( PRIVATE
atlas_install_python_modules( python/ART/*.py )
atlas_install_scripts( scripts/*.py scripts/*.sh )
# Set up the general runtime environment (by Attila Krasznahorkay <attila.krasznahorkay@cern.ch>):
set( ARTEnvironment_DIR ${CMAKE_CURRENT_SOURCE_DIR}
CACHE PATH "Location of ARTEnvironmentConfig.cmake" )
find_package( ARTEnvironment )
......@@ -7,6 +7,6 @@ Allows one to do:
from ART import ArtBase
"""
from art_base import ArtBase
from art_build import ArtBuild
from art_grid import ArtGrid
from art_base import ArtBase # noqa: F401
from art_build import ArtBuild # noqa: F401
from art_grid import ArtGrid # noqa: F401
......@@ -110,7 +110,7 @@ class ArtBase(object):
#
# Default implementations
#
def compare_ref(self, path, ref_path, entries=-1):
def compare_ref(self, path, ref_path, files, entries=-1, mode='detailed'):
"""TBD."""
result = 0
......@@ -120,7 +120,7 @@ class ArtBase(object):
print err
print out
(exit_code, out, err, command, start_time, end_time) = run_command(' '.join(("art-diff.py", "--diff-type=diff-root", "--entries=" + str(entries), path, ref_path)))
(exit_code, out, err, command, start_time, end_time) = run_command(' '.join(("art-diff.py", "--diff-type=diff-root", "--mode=" + mode, "--entries=" + str(entries), (' '.join(('--file=' + s for s in files))), path, ref_path)))
if exit_code != 0:
result |= exit_code
print err
......@@ -171,26 +171,30 @@ class ArtBase(object):
files = os.listdir(directory)
files.sort()
for fname in files:
# is not a test ?
if not fnmatch.fnmatch(fname, 'test_*.sh') and not fnmatch.fnmatch(fname, 'test_*.py'):
continue
test_name = os.path.join(directory, fname)
# is not of correct type
has_art_input = ArtHeader(test_name).get(ArtHeader.ART_INPUT) is not None
has_art_athena_mt = ArtHeader(test_name).get(ArtHeader.ART_ATHENA_MT) > 0
# SKIP if is not of correct type
if job_type is not None and ArtHeader(test_name).get(ArtHeader.ART_TYPE) != job_type:
continue
# is not included in nightly_release, project, platform
# SKIP if is not included in nightly_release, project, platform
if nightly_release is not None and not self.is_included(test_name, nightly_release, project, platform):
continue
# batch and does specify art-input
if index_type == "batch" and ArtHeader(test_name).get(ArtHeader.ART_INPUT) is not None:
# SKIP if batch and does specify art-input or art-athena-mt
if index_type == "batch" and (has_art_input or has_art_athena_mt):
continue
# single and does not specify art-input
if index_type == "single" and ArtHeader(test_name).get(ArtHeader.ART_INPUT) is None:
# SKIP if single and does NOT specify art-input or art-athena-mt
if index_type == "single" and not (has_art_input or has_art_athena_mt):
continue
result.append(fname)
......
......@@ -11,8 +11,9 @@ import json
import logging
import multiprocessing
import os
import socket
from art_misc import run_command, mkdir_p
from art_misc import memory, mkdir, run_command, GByte
from art_base import ArtBase
from art_header import ArtHeader
......@@ -49,7 +50,10 @@ class ArtBuild(ArtBase):
self.project = project
self.platform = platform
self.nightly_tag = nightly_tag
self.max_jobs = multiprocessing.cpu_count() if max_jobs <= 0 else max_jobs
mem = memory(GByte)
max_cores = min(mem / 4, multiprocessing.cpu_count())
max_cores = max_cores if max_cores >= 4 else 1
self.max_jobs = max_cores if max_jobs <= 0 else max_jobs
self.ci = ci
def task_list(self, job_type, sequence_tag):
......@@ -75,15 +79,13 @@ class ArtBuild(ArtBase):
status['release_info']['nightly_tag'] = self.nightly_tag
status['release_info']['project'] = self.project
status['release_info']['platform'] = self.platform
status['release_info']['hostname'] = socket.gethostname()
# Package information with all tests in each package
for future in concurrent.futures.as_completed(future_set):
(package, test_name, exit_code, out, err, start_time, end_time) = future.result()
log.debug("Handling job for %s %s", package, test_name)
status[package][test_name]['exit_code'] = exit_code
# Removed, seem to give empty lines
# status[package][test_name]['out'] = out
# status[package][test_name]['err'] = err
status[package][test_name]['start_time'] = start_time.strftime('%Y-%m-%dT%H:%M:%S')
status[package][test_name]['end_time'] = end_time.strftime('%Y-%m-%dT%H:%M:%S')
status[package][test_name]['start_epoch'] = start_time.strftime('%s')
......@@ -111,7 +113,7 @@ class ArtBuild(ArtBase):
status[package][test_name]['result'] = result
mkdir_p(sequence_tag)
mkdir(sequence_tag)
with open(os.path.join(sequence_tag, "status.json"), 'w') as outfile:
json.dump(status, outfile, sort_keys=True, indent=4, ensure_ascii=False)
......@@ -161,7 +163,7 @@ class ArtBuild(ArtBase):
test_name = self.get_files(test_directory, job_type, "all", self.nightly_release, self.project, self.platform)[int(job_index)]
work_directory = os.path.join(sequence_tag, package, os.path.splitext(test_name)[0])
mkdir_p(work_directory)
mkdir(work_directory)
log.debug("Work dir %s", work_directory)
# Tests are called with arguments: PACKAGE TEST_NAME SCRIPT_DIRECTORY TYPE
......
This diff is collapsed.
......@@ -17,6 +17,8 @@ MODULE = "art.header"
class ArtHeader(object):
"""Class to handle art-headers."""
# headers in alphabetical order
ART_ATHENA_MT = 'art-athena-mt'
ART_CI = 'art-ci'
ART_CORES = 'art-cores'
ART_DESCRIPTION = 'art-description'
......@@ -52,6 +54,7 @@ class ArtHeader(object):
self.add(ArtHeader.ART_INPUT, StringType, None)
self.add(ArtHeader.ART_INPUT_NFILES, IntType, 1)
self.add(ArtHeader.ART_INPUT_SPLIT, IntType, 0)
self.add(ArtHeader.ART_ATHENA_MT, IntType, 0)
self.read(filename)
......@@ -75,12 +78,9 @@ class ArtHeader(object):
if line_match:
try:
key = line_match.group(1)
value = line_match.group(2)
if key in self.header:
if self.header[key]['type'] == StringType:
value = value.strip()
elif self.header[key]['type'] == IntType:
value = int(value)
value = line_match.group(2).strip()
if key in self.header and self.header[key]['type'] == IntType:
value = int(value)
if self.is_list(key):
# handle list types
......
......@@ -5,7 +5,6 @@
__author__ = "Tulay Cuhadar Donszelmann <tcuhadar@cern.ch>"
import concurrent.futures
import errno
import logging
import os
import shlex
......@@ -15,6 +14,11 @@ import sys
from datetime import datetime
MODULE = "art.misc"
EOS_MGM_URL = 'root://eosatlas.cern.ch/'
KByte = 1024
MByte = KByte * 1024
GByte = MByte * 1024
def set_log(kwargs):
......@@ -166,15 +170,94 @@ def make_executable(path):
os.chmod(path, mode)
def mkdir_p(path):
def mkdir(path):
"""Make (missing) directories."""
log = logging.getLogger(MODULE)
if path.startswith('/eos'):
mkdir_cmd = 'eos ' + EOS_MGM_URL + ' mkdir -p'
else:
mkdir_cmd = 'mkdir -p'
if mkdir_cmd is not None:
(exit_code, out, err, command, start_time, end_time) = run_command(' '.join((mkdir_cmd, path)))
if exit_code != 0:
log.error("Mkdir Error: %d %s %s", exit_code, out, err)
return exit_code
return 0
def ls(path):
"""List files in directroy."""
if path.startswith('/eos'):
ls_cmd = 'eos ' + EOS_MGM_URL + ' ls ' + path + '/'
else:
ls_cmd = 'ls ' + path + '/'
(exit_code, out, err, command, start_time, end_time) = run_command(ls_cmd)
if exit_code == 0:
print out
print err
return exit_code
def cp(src, dst):
"""Copy files to directory."""
log = logging.getLogger(MODULE)
if dst.startswith('/eos'):
# check which xrdcp we are running
(exit_code, out, err, command, start_time, end_time) = run_command('which xrdcp')
print out
print err
# check which version of xrdcp we are running
(exit_code, out, err, command, start_time, end_time) = run_command('xrdcp --version')
print out
print err
cmd = ' '.join(('xrdcp -f -N -r -p -v', src, EOS_MGM_URL + dst + '/'))
else:
cmd = ' '.join(('xrdcp -f -N -r -p -v', src, dst + '/'))
# run the actual command
log.info("Using: %s", cmd)
(exit_code, exit_out, exit_err, command, start_time, end_time) = run_command(cmd)
if exit_code != 0:
log.error("COPY to DST Error: %d %s %s", exit_code, exit_out, exit_err)
return exit_code
def count_files(path):
"""Count number of files."""
log = logging.getLogger(MODULE)
if path.startswith('/eos'):
cmd = ' '.join(('eos', EOS_MGM_URL, 'find', path, '|', 'wc', '-l'))
else:
cmd = ' '.join(('find', path, '|', 'wc', '-l'))
(exit_code, out, err, command, start_time, end_time) = run_command(cmd)
if exit_code == 0:
nFiles = int(out)
return nFiles
log.error("Error retrieving number of files on %s, %s", path, err)
return -1
def touch(fname, times=None):
"""Touch a file."""
with open(fname, 'a'):
os.utime(fname, times)
def rm(fname):
"""Remove a file."""
try:
os.makedirs(path)
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
os.remove(fname)
except OSError:
pass
def which(program):
......@@ -191,3 +274,8 @@ def which(program):
return exe_file
return None
def memory(scale=1):
"""Return free memory."""
return os.sysconf('SC_PHYS_PAGES') * os.sysconf('SC_PAGE_SIZE') / scale
......@@ -55,7 +55,8 @@ class ArtRucio(object):
self.exit_if_no_rucio()
# rucio downloads cache properly
log.info("Shell = %s", shell)
log.debug("DID = %s", did)
log.debug("Shell = %s", shell)
env = os.environ.copy()
if shell:
cmd = ' '.join((os.path.join(self.art_directory, 'art-download.sh'), did, dst_dir))
......@@ -152,6 +153,7 @@ class ArtRucio(object):
pattern = self.get_outfile_name(user, package, '*', None, nightly_tag)
outfile = None
sequence = None
log.debug("Pattern 1 %s", pattern)
for out in rucio_client.list_dids(self.get_scope(user), {'name': '.'.join((pattern, 'log'))}):
sequence_tag = self.get_sequence_tag(out)
if sequence is None or sequence_tag > sequence:
......@@ -159,14 +161,13 @@ class ArtRucio(object):
sequence = sequence_tag
if outfile is not None:
log.debug("Adding 'batch': %s", outfile)
result.append(outfile)
# look for "single" outfile, deduce sequence_tag
pattern = self.get_outfile_name(user, package, '*', '*', nightly_tag)
log.debug("Trying pattern %s", pattern)
outfile = None
sequence = None
log.debug("Pattern 2 %s", pattern)
for out in rucio_client.list_dids(self.get_scope(user), {'name': '.'.join((pattern, 'log'))}):
sequence_tag = self.get_sequence_tag(out)
if sequence is None or sequence_tag > sequence:
......@@ -179,6 +180,7 @@ class ArtRucio(object):
if sequence_tag is not None:
# found sequence_tag, find all 'single' outfiles
pattern = self.get_outfile_name(user, package, sequence_tag, '*', nightly_tag)
log.debug("Pattern 3 %s", pattern)
for out in rucio_client.list_dids(self.get_scope(user), {'name': '.'.join((pattern, 'log'))}):
outfile = os.path.splitext(out)[0]
log.debug("Adding 'single': %s", outfile)
......@@ -186,7 +188,7 @@ class ArtRucio(object):
return result
def get_table(self, user, package, nightly_tag=None, shell=False):
def get_table(self, user, package, nightly_tag=None, shell=False, tmp=None):
"""Get full table with grid_index, single_index and test_name for particular package and nightly_tag."""
log = logging.getLogger(MODULE)
......@@ -195,6 +197,8 @@ class ArtRucio(object):
self.exit_if_no_rucio()
tmp = tempfile.gettempdir() if tmp is None else tmp
table = []
nightly_tag = self.nightly_tag if nightly_tag is None else nightly_tag
......@@ -202,61 +206,63 @@ class ArtRucio(object):
outfiles = self.get_outfiles(user, package, nightly_tag)
outfiles_str = [x + ArtRucio.JSON for x in outfiles]
outfiles_str = ' '.join(outfiles_str)
tmp_dir = tempfile.gettempdir()
dst_dir = tmp_dir
log.info("Shell = %s", shell)
exit_code = self.download(outfiles_str, dst_dir, shell)
if exit_code != 0:
log.error("Failed to execute rucio download %d", exit_code)
return table
for outfile in outfiles:
single_index = self.get_single_index(outfile)
json_directory = os.path.join(dst_dir, outfile + ArtRucio.JSON)
if not os.path.isdir(json_directory):
# print single_index, rucio_name
table.append({
'single_index': single_index,
'grid_index': -1,
'file_index': -1,
'job_index': -1,
'outfile': outfile,
'job_name': None
})
continue
for json_file in os.listdir(json_directory):
json_path = os.path.join(json_directory, json_file)
if os.path.isfile(json_path):
with open(json_path) as json_fd:
info = json.load(json_fd)
job_name = os.path.splitext(info['name'])[0]
# Match: user.artprod.13199077.EXT0._000002.art-job.json
# Match: user.artprod.13199077.EXT0._000003.art-job.json.4
# job_index = 13199077, grid_index = 3, file_index = 4
match = re.search(r"user\.([^\.]+)\.(\d+)\.EXT0\._(\d+)\.art-job.json(?:\.(\d+))?", json_file)
if match:
job_index = int(match.group(2))
grid_index = int(match.group(3))
file_index = -1 if match.group(4) is None else int(match.group(4))
else:
job_index = -1
grid_index = -1
file_index = -1
table.append({
'single_index': single_index,
'grid_index': grid_index,
'file_index': file_index,
'job_index': job_index,
'outfile': outfile,
'job_name': job_name
})
if outfiles_str:
outfiles_str = ' '.join(outfiles_str)
dst_dir = tmp
log.debug("Shell = %s", shell)
exit_code = self.download(outfiles_str, dst_dir, shell)
if exit_code != 0:
log.error("Failed to execute rucio download %d", exit_code)
return table
for outfile in outfiles:
single_index = self.get_single_index(outfile)
json_directory = os.path.join(dst_dir, outfile + ArtRucio.JSON)
if not os.path.isdir(json_directory):
log.debug("Adding, single_index: %d, outfile: %s", single_index, outfile)
table.append({
'single_index': single_index,
'grid_index': -1,
'file_index': -1,
'job_index': -1,
'outfile': outfile,
'job_name': None
})
continue
for json_file in os.listdir(json_directory):
if json_file.endswith(".json"):
json_path = os.path.join(json_directory, json_file)
if os.path.isfile(json_path):
with open(json_path) as json_fd:
info = json.load(json_fd)
job_name = os.path.splitext(info['name'])[0]
# Match: user.artprod.13199077.EXT0._000002.art-job.json
# Match: user.artprod.13199077.EXT0._000003.art-job.json.4
# job_index = 13199077, grid_index = 3, file_index = 4
match = re.search(r"user\.([^\.]+)\.(\d+)\.EXT0\._(\d+)\.art-job.json(?:\.(\d+))?", json_file)
if match:
job_index = int(match.group(2))
grid_index = int(match.group(3))
file_index = -1 if match.group(4) is None else int(match.group(4))
else:
job_index = -1
grid_index = -1
file_index = -1
log.debug("Adding, single_index: %d, grid_index: %d, file_index: %d, job_index %d, outfile: %s, job_name: %s", single_index, grid_index, file_index, job_index, outfile, job_name)
table.append({
'single_index': single_index,
'grid_index': grid_index,
'file_index': file_index,
'job_index': job_index,
'outfile': outfile,
'job_name': job_name
})
self.table = table
return table
"""Pythonic command-line interface parser that will make you smile.
"""
Pythonic command-line interface parser that will make you smile.
* http://docopt.org
* Repository and issue-tracker: https://github.com/docopt/docopt
......@@ -10,17 +11,15 @@ import sys
import re
__all__ = ['docopt']
__all__ = ('docopt')
__version__ = '0.6.2'
class DocoptLanguageError(Exception):
"""Error in construction of usage-message by developer."""
class DocoptExit(SystemExit):
"""Exit in case user invoked program with incorrect arguments."""
usage = ''
......@@ -97,7 +96,6 @@ def transform(pattern):
class LeafPattern(Pattern):
"""Leaf/terminal node of a pattern tree."""
def __init__(self, name, value=None):
......@@ -131,7 +129,6 @@ class LeafPattern(Pattern):
class BranchPattern(Pattern):
"""Branch/inner node of a pattern tree."""
def __init__(self, *children):
......@@ -239,7 +236,6 @@ class Optional(BranchPattern):
class OptionsShortcut(Optional):
"""Marker/placeholder for [options] shortcut."""
......@@ -299,7 +295,10 @@ class Tokens(list):
def parse_long(tokens, options):
"""long ::= '--' chars [ ( ' ' | '=' ) chars ] ;"""
"""Parse long according to definition.
long ::= '--' chars [ ( ' ' | '=' ) chars ] ;
"""
long, eq, value = tokens.move().partition('=')
assert long.startswith('--')
value = None if eq == value == '' else value
......@@ -332,7 +331,10 @@ def parse_long(tokens, options):
def parse_shorts(tokens, options):
"""shorts ::= '-' ( chars )* [ [ ' ' ] chars ] ;"""
"""Parse shorts according to definition.
shorts ::= '-' ( chars )* [ [ ' ' ] chars ] ;
"""
token = tokens.move()
assert token.startswith('-') and not token.startswith('--')
left = token.lstrip('-')
......@@ -375,7 +377,10 @@ def parse_pattern(source, options):
def parse_expr(tokens, options):
"""expr ::= seq ( '|' seq )* ;"""
"""Parse expr according to definition.
expr ::= seq ( '|' seq )* ;
"""
seq = parse_seq(tokens, options)
if tokens.current() != '|':
return seq
......@@ -388,7 +393,10 @@ def parse_expr(tokens, options):
def parse_seq(tokens, options):
"""seq ::= ( atom [ '...' ] )* ;"""
"""Parse seq according to definition.
seq ::= ( atom [ '...' ] )* ;
"""
result = []
while tokens.current() not in [None, ']', ')', '|']:
atom = parse_atom(tokens, options)
......@@ -400,8 +408,10 @@ def parse_seq(tokens, options):
def parse_atom(tokens, options):
"""atom ::= '(' expr ')' | '[' expr ']' | 'options'
| long | shorts | argument | command ;
"""Parse atom according to definition.
atom ::= '(' expr ')' | '[' expr ']' | 'options'
| long | shorts | argument | command ;
"""
token = tokens.current()
result = []
......@@ -426,13 +436,12 @@ def parse_atom(tokens, options):
def parse_argv(tokens, options, options_first=False):
"""Parse command-line argument vector.
"""Parse argv argument vector.
If options_first:
argv ::= [ long | shorts ]* [ argument ]* [ '--' [ argument ]* ] ;
else:
argv ::= [ long | shorts | argument ]* [ '--' [ argument ]* ] ;
"""
parsed = []
while tokens.current() is not None:
......@@ -456,7 +465,7 @@ def parse_defaults(doc):
_, _, s = s.partition(':') # get rid of "options:"
split = re.split('\n[ \t]*(-\S+?)', '\n' + s)[1:]
split = [s1 + s2 for s1, s2 in zip(split[::2], split[1::2])]
options = [Option.parse(s) for s in split if s.startswith('-')]
options = [Option.parse(t) for t in split if t.startswith('-')]
defaults += options
return defaults
......@@ -562,7 +571,7 @@ def docopt(doc, argv=None, help=True, version=None, options_first=False):
options = parse_defaults(doc)
pattern = parse_pattern(formal_usage(DocoptExit.usage), options)
# [default] syntax for argument is disabled
#for a in pattern.flat(Argument):
# for a in pattern.flat(Argument):
# same_name = [d for d in arguments if d.name == a.name]
# if same_name:
# a.value = same_name[0].value
......@@ -571,7 +580,7 @@ def docopt(doc, argv=None, help=True, version=None, options_first=False):
for options_shortcut in pattern.flat(OptionsShortcut):
doc_options = parse_defaults(doc)
options_shortcut.children = list(set(doc_options) - pattern_options)
#if any_options:
# if any_options:
# options_shortcut.children += [Option(o.short, o.long, o.argcount)
# for o in argv if type(o) is Option]
extras(help, version, argv, doc)
......
#!/usr/bin/env python
# Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration
"""
ART - ATLAS Release Tester - Clean.
Usage:
art-clean.py [-v -q --base-dir=<base_dir> --delete --days=<days> --eos --config=<file> --release --package=<package>] [<nightly_release> <project> [<platform>]]
Options:
--base-dir=<base_dir> Start search from basedir [default: /eos/atlas/atlascerngroupdisk/data-art/grid-output]
--config=<file> art-configuration to retrieve packages and days [default: art-configuration.yml]
--delete Actually delete the directories to be cleaned
--days=<days> Number of nighlies to keep [default: 7]
--eos Use eos commands
-h --help Show this screen
--package=<package> Package to be cleaned up, no configuration
--release Clean up full release, no configuration
-q --quiet Show less information, only warnings and errors
-v --verbose Show more information, debug level
--version Show version
Arguments:
nightly_release Name of the nightly release (e.g. 21.0)
project Project to clean (e.g. Athena)
platform Platform to clean [default: x86_64-slc6-gcc62-opt]
Environment:
AtlasBuildBranch Name of the nightly release (e.g. 21.0)
AtlasProject Name of the project (e.g. Athena)
<AtlasProject>_PLATFORM Platform (e.g. x86_64-slc6-gcc62-opt)
"""
__author__ = "Tulay Cuhadar Donszelmann <tcuhadar@cern.ch>"
import datetime
import logging
import os
import re
import shutil
import sys
from ART.docopt import docopt
from ART.art_misc import get_atlas_env, run_command
from ART.art_configuration import ArtConfiguration
MODULE = "art.clean"
class ArtClean(object):
"""Class to cleanup eos area."""
EOS_MGM_URL = 'root://eosatlas.cern.ch'
def __init__(self, arguments):
"""Clean when more than 'days' old."""
log = logging.getLogger(MODULE)
self.verbose = arguments['--verbose']
self.eos = arguments['--eos']
default_days = int(arguments['--days'])
self.delete = arguments['--delete']
nightly_release = arguments['<nightly_release>']
project = arguments['<project>']
platform = arguments['<platform>']
base_dir = arguments['--base-dir']
# Check for missing arguments
if nightly_release is None:
(nightly_release, project, platform, dummy) = get_atlas_env()
if platform is None:
platform = 'x86_64-slc6-gcc62-opt'
log.info("Defaulting to platform %s", platform)
if self.eos:
# Test if we have access to kerberos
(code, out, err, command, start_time, end_time) = run_command('klist', verbose=self.verbose)
if code != 0:
log.critical("%s", err)
exit(1)
package = arguments['--package']
if package is not None:
self.clean_release(os.path.join(base_dir, nightly_release, project, platform), default_days, package)
return
if arguments['--release']:
self.clean_release(os.path.join(base_dir, nightly_release, project, platform), default_days)
return
config_file = arguments['--config']
config = ArtConfiguration(config_file)
for package in config.packages():
copy = config.get(nightly_release, project, platform, package, 'copy', False)
if copy:
days = config.get(nightly_release, project, platform, package, 'days', default_days)
self.clean_release(os.path.join(base_dir, nightly_release, project, platform), days, package)
return
def clean_release(self, release, days, package=None):
"""Clean a release dir."""
log = logging.getLogger(MODULE)
log.debug("Starting to clean up release, keeping %d days, for %s in %s", days, "All" if package is None else package, release)
now = datetime.datetime.now()
count = 0
date = '1970-01-01'
for entry in reversed(self.listdirs(release)):
# Matches 2018-12-05T0345
match = re.match(r"(\d{4}-\d{2}-\d{2})T\d{4}", entry)
if match:
tag = os.path.join(release, entry)
# package_dir = tag for full release cleanup
package_dir = tag if package is None else os.path.join(tag, package)
# only count entries on different dates and where package actually exist
if self.isdir(package_dir) and match.group(1) != date:
count += 1
date = match.group(1)
if count > days:
# compare times
dir_time = datetime.datetime.strptime(match.group(0), '%Y-%m-%dT%H%M')
time_diff = now - dir_time
if time_diff.days > days:
log.info("- Nightly tag %d days old, removing %s for package %s", time_diff.days, package_dir, package if package is not None else 'All')
self.remove_dir(package_dir)
if package is not None and self.isempty(tag):
log.info("- Nightly tag contains no more packages, removing %s for package %s", tag, package if package is not None else 'All')
self.remove_dir(tag)
else:
log.debug("- Tag within %d days, keeping %s for package %s", days, tag, package if package is not None else 'All')
else:
log.debug("- Tag within %d entries, keeping %s for package %s", days, tag, package if package is not None else 'All')
if package is not None and count == 0:
log.debug("No package %s found in any available nightly tag for %s", package, release)
def listdirs(self, directory):
"""Return list of directories in directory."""
dirs = []
if self.isdir(directory):
for entry in self.listdir(directory):
path = os.path.join(directory, entry)
if self.isdir(path):
dirs.append(entry)
return dirs
def isempty(self, directory):
"""Return true if directory is empty."""
if self.isdir(directory):
for entry in self.listdir(directory):
return False
return True
def listdir(self, directory):
"""Return list of entries in directory."""
log = logging.getLogger(MODULE)
if self.eos:
(code, out, err, command, start_time, end_time) = run_command('eos ' + ArtClean.EOS_MGM_URL + ' ls ' + directory, verbose=self.verbose)
if code == 0:
return out.splitlines()
log.info("eos listdir %d %s", code, err)
return []
else:
return os.listdir(directory)
def isdir(self, path):
"""Return true is path is directory."""
log = logging.getLogger(MODULE)
if self.eos:
(code, out, err, command, start_time, end_time) = run_command('eos ' + ArtClean.EOS_MGM_URL + ' stat -d ' + path, verbose=self.verbose)
if code == 0:
return True
log.debug("eos isdir(%s) %d %s", path, code, err)
return False
else:
return os.path.isdir(path)
def remove_dir(self, directory):
"""Remove directory and all below."""
log = logging.getLogger(MODULE)
if not self.isdir(directory):
return
if self.delete:
if self.eos:
(code, out, err, command, start_time, end_time) = run_command('eos ' + ArtClean.EOS_MGM_URL + ' rm -r ' + directory, verbose=self.verbose)
if code == 0:
return True
log.info("eos rm -r (%s) %d %s", directory, code, err)
else:
shutil.rmtree(directory)
if __name__ == '__main__':
if sys.version_info < (2, 7, 0):
sys.stderr.write("You need python 2.7 or later to run this script\n")
exit(1)
# NOTE: import should be here, to keep the order of the decorators (module first, art last and unused)
from art import __version__
logging.basicConfig()
log = logging.getLogger('art')
arguments = docopt(__doc__, version=os.path.splitext(os.path.basename(__file__))[0] + ' ' + __version__)
level = logging.DEBUG if arguments['--verbose'] else logging.WARN if arguments['--quiet'] else logging.INFO
log.setLevel(level)
ArtClean(arguments)
......@@ -4,14 +4,16 @@
ART - ATLAS Release Tester - Diff.
Usage:
art-diff.py [--diff-type=<diff_type> --exclude=<pattern>... --platform-ref=<platform> --entries=<entries>] <nightly_release_ref> <project_ref> <nightly_tag_ref> <package>
art-diff.py [--diff-type=<diff_type> --exclude=<pattern>... --entries=<entries>] <path> <ref_path>
art-diff.py [--diff-type=<diff_type> --file=<pattern>... --exclude=<pattern>... --platform-ref=<platform> --entries=<entries> --mode=<mode>] <nightly_release_ref> <project_ref> <nightly_tag_ref> <package>
art-diff.py [--diff-type=<diff_type> --file=<pattern>... --exclude=<pattern>... --entries=<entries> --mode=<mode>] <path> <ref_path>
Options:
--diff-type=<diff_type> Type of diff (e.g. diff-pool or diff-root) [default: diff-pool]
--entries=<entries> Only diff over number of entries [default: -1]
--exclude=<pattern>... Exclude test files according to pattern
--file=<pattern>... Compare the following file patterns for diff-root [default: *AOD*.pool.root *ESD*.pool.root *HITS*.pool.root *RDO*.pool.root *TAG*.root]
-h --help Show this screen
--mode=<mode> Sets the mode for diff-root {summary, detailed} [default: detailed]
--platform-ref=<platform> Reference Platform [default: x86_64-slc6-gcc62-opt]
--test-name=<test_name> Test name to compare
--version Show version
......@@ -43,9 +45,10 @@ import sys
from ART.docopt import docopt
VERSION = "0.7.8"
VERSION = "0.10.16"
ATHENA_STDOUT = "athena_stdout.txt"
DEFAULT_ENTRIES = -1
DEFAULT_MODE = "detailed"
class ArtDiff(object):
......@@ -55,11 +58,14 @@ class ArtDiff(object):
def __init__(self):
"""Constructor of ArtDiff."""
self.default_file_patterns = ['*AOD*.pool.root', '*ESD*.pool.root', '*HITS*.pool.root', '*RDO*.pool.root', '*TAG*.root']
def parse(self, arguments):
"""Called from comandline."""
diff_type = arguments['--diff-type']
files = self.default_file_patterns if diff_type == 'diff-pool' else list(set(arguments['--file']))
entries = arguments['--entries']
mode = arguments['--mode']
excludes = arguments['--exclude']
if arguments['<nightly_release_ref>'] is not None:
try:
......@@ -67,7 +73,6 @@ class ArtDiff(object):
project = os.environ['AtlasProject']
platform = os.environ[project + '_PLATFORM']
nightly_tag = os.environ['AtlasBuildStamp']
return (nightly_release, project, platform, nightly_tag)
except KeyError, e:
print "Environment variable not set", e
sys.exit(1)
......@@ -78,8 +83,8 @@ class ArtDiff(object):
nightly_tag_ref = arguments['<nightly_tag_ref>']
package = arguments['<package>']
print nightly_release, project, platform, nightly_tag, nightly_release_ref, project_ref, platform_ref, nightly_tag_ref
exit(self.diff(nightly_release, project, platform, nightly_tag, nightly_release_ref, project_ref, platform_ref, nightly_tag_ref, package, diff_type, excludes, entries=entries))
print nightly_release, project, platform, nightly_tag, nightly_release_ref, project_ref, platform_ref, nightly_tag_ref
exit(self.diff(nightly_release, project, platform, nightly_tag, nightly_release_ref, project_ref, platform_ref, nightly_tag_ref, package, diff_type, files, excludes, entries=entries, mode=mode))
# directory compare
path = arguments['<path>']
......@@ -91,7 +96,7 @@ class ArtDiff(object):
print "Error: <ref_path> should be a file, if <path> is a file."
sys.exit(1)
exit(self.diff_file(path, ref_path, diff_type, entries=entries))
exit(self.diff_file(path, ref_path, diff_type, entries=entries, mode=mode))
if os.path.isfile(ref_path):
print "Error: <ref_path> should be a directory, if <path> is a directory."
......@@ -100,18 +105,18 @@ class ArtDiff(object):
# check if path contains "test_" entries
if len(glob.glob(os.path.join(path, 'test_*'))) > 0:
# directory compare
exit(self.diff_dirs(path, ref_path, diff_type, excludes, entries=entries))
exit(self.diff_dirs(path, ref_path, diff_type, files, excludes, entries=entries, mode=mode))
# single test compare
exit(self.diff_test(path, ref_path, diff_type, entries=entries))
exit(self.diff_test(path, ref_path, diff_type, files, entries=entries, mode=mode))
def diff(self, nightly_release, project, platform, nightly_tag, nightly_release_ref, project_ref, platform_ref, nightly_tag_ref, package, diff_type, excludes=[], entries=DEFAULT_ENTRIES):
def diff(self, nightly_release, project, platform, nightly_tag, nightly_release_ref, project_ref, platform_ref, nightly_tag_ref, package, diff_type, files, excludes=[], entries=DEFAULT_ENTRIES, mode=DEFAULT_MODE):
"""Run difference between two results."""
path = os.path.join(ArtDiff.EOS_OUTPUT_DIR, nightly_release, project, platform, nightly_tag, package)
ref_path = os.path.join(ArtDiff.EOS_OUTPUT_DIR, nightly_release_ref, project_ref, platform_ref, nightly_tag_ref, package)
return self.diff_dirs(path, ref_path, diff_type, excludes, entries=entries)
return self.diff_dirs(path, ref_path, diff_type, files, excludes, entries=entries, mode=mode)
def diff_dirs(self, path, ref_path, diff_type, excludes=[], entries=DEFAULT_ENTRIES):
def diff_dirs(self, path, ref_path, diff_type, files, excludes=[], entries=DEFAULT_ENTRIES, mode=DEFAULT_MODE):
"""Run difference between two directories."""
print " path: %s" % path
print "ref_path: %s" % ref_path
......@@ -131,7 +136,7 @@ class ArtDiff(object):
print "******************************************"
print "Test: %s" % test_name
print "******************************************"
stat_per_chain[test_name] = self.diff_test(os.path.join(path, test_name), os.path.join(ref_path, test_name), diff_type, entries=entries)
stat_per_chain[test_name] = self.diff_test(os.path.join(path, test_name), os.path.join(ref_path, test_name), diff_type, files, entries=entries, mode=mode)
result = 0
for test_name, status in stat_per_chain.iteritems():
......@@ -143,7 +148,7 @@ class ArtDiff(object):
return result
def diff_test(self, path, ref_path, diff_type, entries=DEFAULT_ENTRIES):
def diff_test(self, path, ref_path, diff_type, files, entries=DEFAULT_ENTRIES, mode=DEFAULT_MODE):
"""Run differences between two directories."""
result = self.get_result(path)
ref_result = self.get_result(ref_path)
......@@ -152,10 +157,9 @@ class ArtDiff(object):
print "%-10s: ref: %d events, val: %d events" % (key, int(ref_result[key][1]), int(result[key][1]))
test_dir = path
test_patterns = ['*AOD*.pool.root', '*ESD*.pool.root', '*HITS*.pool.root', '*RDO*.pool.root', '*TAG*.root']
# get files in all patterns
test_files = []
for test_pattern in test_patterns:
for test_pattern in files:
test_files.extend(glob.glob(os.path.join(test_dir, test_pattern)))
# run test over all files
result = 0
......@@ -166,11 +170,11 @@ class ArtDiff(object):
print "val_file: %s" % val_file
print "ref_file: %s" % ref_file
result |= self.diff_file(val_file, ref_file, diff_type, entries=entries)
result |= self.diff_file(val_file, ref_file, diff_type, entries=entries, mode=mode)
return result
def diff_file(self, path, ref_path, diff_type, entries=DEFAULT_ENTRIES):
def diff_file(self, path, ref_path, diff_type, entries=DEFAULT_ENTRIES, mode=DEFAULT_MODE):
"""Compare two files."""
if not os.path.exists(ref_path):
print "no test found in ref_dir to compare: %s" % ref_path
......@@ -182,7 +186,7 @@ class ArtDiff(object):
if diff_type == 'diff-pool':
return self.diff_pool(path, ref_path)
return self.diff_root(path, ref_path, entries)
return self.diff_root(path, ref_path, entries, mode)
def get_result(self, directory):
"""
......@@ -226,10 +230,10 @@ class ArtDiff(object):
return stat
def diff_root(self, file_name, ref_file, entries):
def diff_root(self, file_name, ref_file, entries, mode):
"""TBD."""
# diff-root
(code, out, err) = self.run_command("acmd.py diff-root " + file_name + " " + ref_file + " --error-mode resilient --ignore-leaves RecoTimingObj_p1_HITStoRDO_timings RecoTimingObj_p1_RAWtoESD_mems RecoTimingObj_p1_RAWtoESD_timings RAWtoESD_mems RAWtoESD_timings ESDtoAOD_mems ESDtoAOD_timings HITStoRDO_timings RAWtoALL_mems RAWtoALL_timings RecoTimingObj_p1_RAWtoALL_mems RecoTimingObj_p1_RAWtoALL_timings RecoTimingObj_p1_EVNTtoHITS_timings --entries " + str(entries))
(code, out, err) = self.run_command("acmd.py diff-root " + file_name + " " + ref_file + " --error-mode resilient --ignore-leaves RecoTimingObj_p1_HITStoRDO_timings RecoTimingObj_p1_RAWtoESD_mems RecoTimingObj_p1_RAWtoESD_timings RAWtoESD_mems RAWtoESD_timings ESDtoAOD_mems ESDtoAOD_timings HITStoRDO_timings RAWtoALL_mems RAWtoALL_timings RecoTimingObj_p1_RAWtoALL_mems RecoTimingObj_p1_RAWtoALL_timings RecoTimingObj_p1_EVNTtoHITS_timings --entries " + str(entries) + " --mode " + mode)
if code != 0:
print "Error: %d" % code
print err
......
......@@ -25,6 +25,7 @@ source "${ATLAS_LOCAL_ROOT_BASE}"/user/atlasLocalSetup.sh --quiet
unset ALRB_noGridMW
lsetup -f rucio
lsetup -f "xrootd 4.7.1"
echo "Name: ${NAME}"
echo "Directory: ${DIRECTORY}"
......
......@@ -5,8 +5,8 @@ ART-internal - ATLAS Release Tester (internal command).
Usage:
art-internal.py build job [-v -q] <script_directory> <sequence_tag> <package> <outfile> <job_type> <job_index>
art-internal.py grid batch [-v -q --skip-setup -n] <script_directory> <sequence_tag> <package> <outfile> <job_type> <job_index>
art-internal.py grid single [-v -q --skip-setup --in=<in_file> -n] <script_directory> <sequence_tag> <package> <outfile> <job_name>
art-internal.py grid batch [-v -q --skip-setup -n] <script_directory> <sequence_tag> <package> <outfile> <inform_panda> <job_type> <job_index>
art-internal.py grid single [-v -q --skip-setup --in=<in_file> -n] <script_directory> <sequence_tag> <package> <outfile> <inform_panda> <job_name>
Options:
-h --help Show this screen.
......@@ -18,6 +18,7 @@ Options:
--version Show version.
Arguments:
inform_panda Inform Big Panda about job
job_index Index of the test inside the package
job_name Index of the test (batch), or its name (single)
job_type Type of job (e.g. grid, ci, build)
......@@ -63,7 +64,7 @@ def build_job(script_directory, sequence_tag, package, outfile, job_type, job_in
@dispatch.on('grid', 'batch')
def grid_batch(script_directory, sequence_tag, package, outfile, job_type, job_index, **kwargs):
def grid_batch(script_directory, sequence_tag, package, outfile, inform_panda, job_type, job_index, **kwargs):
"""Run a batch job, given a particular index.
Tests are called with the following parameters:
......@@ -73,11 +74,11 @@ def grid_batch(script_directory, sequence_tag, package, outfile, job_type, job_i
art_directory = os.path.dirname(os.path.realpath(sys.argv[0]))
(nightly_release, project, platform, nightly_tag) = get_atlas_env()
skip_setup = kwargs['skip_setup']
exit(ArtGrid(art_directory, nightly_release, project, platform, nightly_tag, script_directory, skip_setup).batch(sequence_tag, package, outfile, job_type, job_index))
exit(ArtGrid(art_directory, nightly_release, project, platform, nightly_tag, script_directory, skip_setup).batch(sequence_tag, package, outfile, inform_panda, job_type, job_index))
@dispatch.on('grid', 'single')
def grid_single(script_directory, sequence_tag, package, outfile, job_name, **kwargs):
def grid_single(script_directory, sequence_tag, package, outfile, inform_panda, job_name, **kwargs):
"""Run a single job, given a particular name.
Tests are called with the following parameters:
......@@ -88,7 +89,7 @@ def grid_single(script_directory, sequence_tag, package, outfile, job_name, **kw
(nightly_release, project, platform, nightly_tag) = get_atlas_env()
skip_setup = kwargs['skip_setup']
in_file = kwargs['in']
exit(ArtGrid(art_directory, nightly_release, project, platform, nightly_tag, script_directory, skip_setup).single(sequence_tag, package, outfile, job_name, in_file))
exit(ArtGrid(art_directory, nightly_release, project, platform, nightly_tag, script_directory, skip_setup).single(sequence_tag, package, outfile, inform_panda, job_name, in_file))
if __name__ == '__main__':
......
......@@ -170,7 +170,7 @@ if __name__ == '__main__':
from art import __version__
logging.basicConfig()
log = logging.getLogger(MODULE)
log = logging.getLogger('art')
arguments = docopt(__doc__, version=os.path.splitext(os.path.basename(__file__))[0] + ' ' + __version__)
level = logging.DEBUG if arguments['--verbose'] else logging.WARN if arguments['--quiet'] else logging.INFO
......
......@@ -21,7 +21,7 @@ else
fi
export ATLAS_LOCAL_ROOT_BASE="${ATLAS_LOCAL_ROOT_BASE:-/cvmfs/atlas.cern.ch/repo/ATLASLocalRootBase}"
# shellcheck source=/dev/null
# shellcheck source=/dev/null
source "${ATLAS_LOCAL_ROOT_BASE}"/user/atlasLocalSetup.sh --quiet
if [ "${BRANCH}" == "master" ]; then
lsetup -a testing asetup
......@@ -48,6 +48,17 @@ ART_DIRECTORY=$(command -v art.py)
ART_VERSION=$(art.py --version)
echo "INFO: Using ART version ${ART_VERSION} in ${ART_DIRECTORY} directory"
# automatic clean-up build-output EOS area
art-clean.py --eos --release --base-dir=/eos/atlas/atlascerngroupdisk/data-art/build-output --delete "${AtlasBuildBranch}" "${AtlasProject}" "${PLATFORM}" || true &
# configure EOS_MGM_URL
if [ -z "${EOS_MGM_URL}" ]; then
echo "WARNING: EOS_MGM_URL variable is empty, setting it to root://eosatlas.cern.ch"
export EOS_MGM_URL="root://eosatlas.cern.ch"
else
echo "EOS_MGM_URL variable contains", ${EOS_MGM_URL}
fi
# run build tests
SUBDIR=${AtlasBuildBranch}/${AtlasProject}/${PLATFORM}/${AtlasBuildStamp}
OUTDIR="${RELEASE_BASE}/art-build/${SUBDIR}"
......@@ -57,13 +68,6 @@ RESULT=$(eval "${CMD}")
echo "${RESULT}"
# copy the test results to EOS area
if [ -z "${EOS_MGM_URL}" ]; then
echo "WARNING: EOS_MGM_URL variable is empty, setting it to root://eosatlas.cern.ch"
export EOS_MGM_URL="root://eosatlas.cern.ch"
else
echo "EOS_MGM_URL variable contains", ${EOS_MGM_URL}
fi
TARGETDIR=/eos/atlas/atlascerngroupdisk/data-art/build-output/${SUBDIR}
if [[ ! -e ${TARGETDIR} ]]; then
echo Target directory "${TARGETDIR}"
......
......@@ -5,9 +5,9 @@
#
# Example command lines for three types:
#
# art-task-grid.sh [--no-action] batch <submit_directory> <script_directory> <sequence_tag> <package> <outfile> <job_type> <number_of_tests>
# art-task-grid.sh [--no-action] batch <submit_directory> <script_directory> <sequence_tag> <package> <outfile> <inform_panda> <job_type> <number_of_tests>
#
# art-task-grid.sh [--no-action] single [--inds <input_file> --n-files <number_of_files> --split <split>] <submit_directory> <script_directory> <sequence_tag> <package> <outfile> <job_name>
# art-task-grid.sh [--no-action] single [--inds <input_file> --n-files <number_of_files> --split <split>] <submit_directory> <script_directory> <sequence_tag> <package> <outfile> <inform_panda> <job_name>
#
# env: ART_GRID_OPTIONS
#
......@@ -50,6 +50,7 @@ case ${TYPE} in
if [ "$1" == "--n-files" ]; then
NFILES="--nFiles $2"
NFILES_PER_JOB="--nFilesPerJob $2"
NCORE_NFILES_PER_JOB="--nFilesPerJob $2"
shift
shift
fi
......@@ -70,7 +71,15 @@ case ${TYPE} in
NCORES=""
if [ "$1" == "--ncore" ]; then
NCORES="--nCore $2"
NFILES_PER_JOB=""
NFILES_PER_JOB="${NCORE_NFILES_PER_JOB}"
LARGE_JOB=""
shift
shift
fi
ATHENA_MT=""
if [ "$1" == "--athena_mt" ]; then
NCORES="--nCore $2"
LARGE_JOB=""
shift
shift
fi
......@@ -101,6 +110,10 @@ OUTFILE=$1
shift
echo "OUTFILE=${OUTFILE}"
INFORM_PANDA=$1
shift
echo "INFORM_PANDA=${INFORM_PANDA}"
case ${TYPE} in
'batch')
......@@ -123,7 +136,7 @@ case ${TYPE} in
esac
# general options
PATHENA_OPTIONS="--destSE=CERN-PROD_SCRATCHDISK"
PATHENA_OPTIONS="--noBuild --expertOnly_skipScout --noEmail --maxAttempt 2"
OUT="%OUT.tar"
# we seem to have to copy the env variables locally
......@@ -144,7 +157,7 @@ case ${TYPE} in
'single')
# <script_directory> <sequence_tag> <package> <outfile> <job_name>
INTERNAL_COMMAND="grid single"
PATHENA_TYPE_OPTIONS="${LARGE_JOB} ${INDS} ${NFILES} ${NFILES_PER_JOB} ${NCORES}"
PATHENA_TYPE_OPTIONS="${LARGE_JOB} ${INDS} ${NFILES} ${NFILES_PER_JOB} ${NCORES} ${ATHENA_MT}"
ARGS="${JOB_NAME}"
echo "PATHENA_TYPE_OPTIONS=${PATHENA_TYPE_OPTIONS}"
echo "ARGS=${ARGS}"
......@@ -154,13 +167,12 @@ esac
# NOTE: for art-internal.py the current dir can be used as it is copied there
cd "${SUBMIT_DIRECTORY}"/"${PACKAGE}"/run
SUBCOMMAND="./art-internal.py ${INTERNAL_COMMAND} ${IN_FILE} ${SCRIPT_DIRECTORY} ${SEQUENCE_TAG} ${PACKAGE} ${OUT} ${ARGS}"
CMD="pathena ${GRID_OPTIONS} ${PATHENA_OPTIONS} ${PATHENA_TYPE_OPTIONS} --noBuild --expertOnly_skipScout --trf \"${SUBCOMMAND}\" ${SPLIT} --outDS ${OUTFILE} --extOutFile art-job.json"
SUBCOMMAND="./art-internal.py ${INTERNAL_COMMAND} ${IN_FILE} ${SCRIPT_DIRECTORY} ${SEQUENCE_TAG} ${PACKAGE} ${OUT} ${INFORM_PANDA} ${ARGS}"
CMD="pathena ${GRID_OPTIONS} ${PATHENA_OPTIONS} ${PATHENA_TYPE_OPTIONS} --trf \"${SUBCOMMAND}\" ${SPLIT} --outDS ${OUTFILE} --extOutFile art-job.json"
#--disableAutoRetry
#--excludedSite=ANALY_TECHNION-HEP-CREAM
#--site=ANALY_NIKHEF-ELPROD_SHORT,ANALY_NIKHEF-ELPROD"
#--site=ANALY_FZK,ANALY_BNL,ANALY_RAL"
echo "Command: ${CMD}"
......
......@@ -7,13 +7,13 @@ You need to setup for an ATLAS release before using ART.
Usage:
art.py run [-v -q --type=<T> --max-jobs=<N> --ci] <script_directory> <sequence_tag>
art.py grid [-v -q --type=<T> --max-jobs=<N> --config=<file> --copy -n] <script_directory> <sequence_tag>
art.py grid [-v -q --type=<T> --max-jobs=<N> -n] <script_directory> <sequence_tag>
art.py submit [-v -q --type=<T> --max-jobs=<N> --config=<file> -n] <sequence_tag> [<package>]
art.py copy [-v -q --user=<user> --dst=<dir>] <indexed_package>
art.py copy [-v -q --user=<user> --dst=<dir> --no-unpack --tmp=<dir> --seq=<N> --keep-tmp] <indexed_package>
art.py validate [-v -q] <script_directory>
art.py included [-v -q --type=<T> --test-type=<TT>] <script_directory>
art.py compare grid [-v -q --days=<D> --user=<user> --entries=<entries>] <package> <test_name>
art.py compare ref [-v -q --entries=<entries>] <path> <ref_path>
art.py compare grid [-v -q --days=<D> --user=<user> --entries=<entries> --file=<pattern>... --mode=<mode>] <package> <test_name>
art.py compare ref [-v -q --entries=<entries> --file=<pattern>... --mode=<mode>] <path> <ref_path>
art.py list grid [-v -q --user=<user> --json --test-type=<TT>] <package>
art.py log grid [-v -q --user=<user>] <package> <test_name>
art.py output grid [-v -q --user=<user>] <package> <test_name>
......@@ -23,16 +23,21 @@ Usage:
Options:
--ci Run Continuous Integration tests only (using env: AtlasBuildBranch)
--config=<file> Use specific config file [default: art-configuration.yml]
--copy Run the copy after running the jobs
--days=<D> Number of days ago to pick up reference for compare [default: 1]
--dst=<dir> Destination directory for downloaded files
--entries=<entries> Number of entries to compare [default: 10]
--file=<pattern>... Compare the following file patterns for diff-root [default: *AOD*.pool.root *ESD*.pool.root *HITS*.pool.root *RDO*.pool.root *TAG*.root]
-h --help Show this screen.
--json Output in json format
--keep-tmp Keep temporary directory while copying
--max-jobs=<N> Maximum number of concurrent jobs to run [default: 0]
--mode=<mode> Sets the mode for diff-root {summary, detailed} [default: detailed]
-n --no-action No real submit will be done
--no-unpack Do not unpack downloaded tar files
-q --quiet Show less information, only warnings and errors
--seq=<N> Use N as postfix on destination nightly-tag (for retries) [default: 0]
--test-type=<TT> Type of test (e.g. all, batch or single) [default: all]
--tmp=<dir> Temporary directory for downloaded files and caching of EXT0
--type=<T> Type of job (e.g. grid, build)
--user=<user> User to use for RUCIO
-v --verbose Show more information, debug level
......@@ -41,7 +46,7 @@ Options:
Sub-commands:
run Run jobs from a package in a local build (needs release and grid setup)
grid Run jobs from a package on the grid (needs release and grid setup)
submit Submit nightly jobs to the grid (NOT for users)
submit Submit nightly jobs to the grid and informs big panda (NOT for users)
copy Copy outputs and logs from RUCIO
validate Check headers in tests
included Show list of files which will be included for art submit/art grid
......@@ -73,7 +78,7 @@ Tests are called with:
"""
__author__ = "Tulay Cuhadar Donszelmann <tcuhadar@cern.ch>"
__version__ = '0.9.5'
__version__ = '0.10.21'
import logging
import os
......@@ -97,8 +102,10 @@ def compare_ref(path, ref_path, **kwargs):
"""Compare the output of a job."""
set_log(kwargs)
art_directory = os.path.dirname(os.path.realpath(sys.argv[0]))
files = kwargs['file']
entries = kwargs['entries']
exit(ArtBase(art_directory).compare_ref(path, ref_path, entries))
mode = kwargs['mode']
exit(ArtBase(art_directory).compare_ref(path, ref_path, files, entries, mode))
@dispatch.on('compare', 'grid')
......@@ -108,9 +115,11 @@ def compare_grid(package, test_name, **kwargs):
art_directory = os.path.dirname(os.path.realpath(sys.argv[0]))
(nightly_release, project, platform, nightly_tag) = get_atlas_env()
days = int(kwargs['days'])
entries = kwargs['entries']
user = kwargs['user']
exit(ArtGrid(art_directory, nightly_release, project, platform, nightly_tag).compare(package, test_name, days, user, entries=entries, shell=True))
files = kwargs['file']
entries = kwargs['entries']
mode = kwargs['mode']
exit(ArtGrid(art_directory, nightly_release, project, platform, nightly_tag).compare(package, test_name, days, user, files, entries=entries, mode=mode, shell=True))
@dispatch.on('list', 'grid')
......@@ -153,11 +162,13 @@ def submit(sequence_tag, **kwargs):
art_directory = os.path.dirname(os.path.realpath(sys.argv[0]))
(nightly_release, project, platform, nightly_tag) = get_atlas_env()
job_type = 'grid' if kwargs['type'] is None else kwargs['type']
user = os.getenv('USER', 'artprod')
inform_panda = user == 'artprod'
package = kwargs['package']
config = kwargs['config']
no_action = kwargs['no_action']
wait_and_copy = True
exit(ArtGrid(art_directory, nightly_release, project, platform, nightly_tag, max_jobs=int(kwargs['max_jobs'])).task_list(job_type, sequence_tag, package, no_action, wait_and_copy, config))
exit(ArtGrid(art_directory, nightly_release, project, platform, nightly_tag, max_jobs=int(kwargs['max_jobs'])).task_list(job_type, sequence_tag, inform_panda, package, no_action, wait_and_copy, config))
@dispatch.on('grid')
......@@ -167,11 +178,12 @@ def grid(script_directory, sequence_tag, **kwargs):
art_directory = os.path.dirname(os.path.realpath(sys.argv[0]))
(nightly_release, project, platform, nightly_tag) = get_atlas_env()
job_type = 'grid' if kwargs['type'] is None else kwargs['type']
inform_panda = False
package = None
config = kwargs['config']
config = None
no_action = kwargs['no_action']
wait_and_copy = kwargs['copy']
exit(ArtGrid(art_directory, nightly_release, project, platform, nightly_tag, script_directory=script_directory, skip_setup=True, max_jobs=int(kwargs['max_jobs'])).task_list(job_type, sequence_tag, package, no_action, wait_and_copy, config))
wait_and_copy = False
exit(ArtGrid(art_directory, nightly_release, project, platform, nightly_tag, script_directory=script_directory, skip_setup=True, max_jobs=int(kwargs['max_jobs'])).task_list(job_type, sequence_tag, inform_panda, package, no_action, wait_and_copy, config))
@dispatch.on('run')
......@@ -193,7 +205,11 @@ def copy(indexed_package, **kwargs):
# NOTE: default depends on USER, not set it here but in ArtGrid.copy
dst = kwargs['dst']
user = kwargs['user']
exit(ArtGrid(art_directory, nightly_release, project, platform, nightly_tag).copy(indexed_package, dst=dst, user=user))
no_unpack = kwargs['no_unpack']
tmp = kwargs['tmp']
seq = int(kwargs['seq'])
keep_tmp = kwargs['keep_tmp']
exit(ArtGrid(art_directory, nightly_release, project, platform, nightly_tag).copy(indexed_package, dst=dst, user=user, no_unpack=no_unpack, tmp=tmp, seq=seq, keep_tmp=keep_tmp))
@dispatch.on('validate')
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment