Skip to content
Snippets Groups Projects
Commit 03521d8f authored by Luke Grazette's avatar Luke Grazette
Browse files

Merge branch 'master' into 'lugrazet-BW-spruce-ydayshlt2'

# Conflicts from !362:
#   python/MooreTests/run_bandwidth_test_jobs.py
parents d3be07ce f4c873bf
No related branches found
No related tags found
1 merge request!361[RTADPA BW Tests] New Sprucing bandwidth test using latest-available HLT2 input.
Pipeline #6547086 passed
......@@ -27,7 +27,7 @@ DEFAULT_CACHE_DIRS = {'default': '.'}
# prefer XDG_RUNTIME_DIR which should be on tmpfs
FALLBACK_CACHE_DIR = os.getenv('XDG_RUNTIME_DIR', tempfile.gettempdir())
FILE_TO_COPY = "mdf:root://eoslhcb.cern.ch//eos/lhcb/storage/lhcbpr/www/UpgradeRateTest/hlt2_bw_testing__production__full.mdf"
FILE_TO_COPY = "mdf:root://eoslhcb.cern.ch//eos/lhcb/storage/lhcbpr/www/UpgradeRateTest/current_hlt2_output/hlt2_bw_testing__production__full.mdf"
def default_cache_dirs():
......
......@@ -28,6 +28,7 @@ import atexit
import shutil
import yaml
from PRConfig.bandwidth_helpers import FileNameHelper
from datetime import datetime
# Default cache dir is the current working directory as this is most convenient for the machine
# that the test runs on periodically. It assumes the working directory is not cleaned up often,
......@@ -37,9 +38,6 @@ DEFAULT_CACHE_DIRS = {'default': ['.']}
# prefer XDG_RUNTIME_DIR which should be on tmpfs
FALLBACK_CACHE_DIR = os.getenv('XDG_RUNTIME_DIR', tempfile.gettempdir())
# Limit size of output log if many options files
MAX_NFILES_TO_PRINT_TO_LOG = 10
def default_cache_dirs():
hostname = socket.getfqdn()
......@@ -203,7 +201,6 @@ if __name__ == '__main__':
job_inputs = [
inputs_fns
] # This is a list to allow for possible NUMA extension: see discussion on !316.
logging.info(inputs_fns[:MAX_NFILES_TO_PRINT_TO_LOG])
# Set up local directories where inputs are cached
if args.download_input_files:
......@@ -223,22 +220,25 @@ if __name__ == '__main__':
# Now download files
for i, inputs in enumerate(job_inputs):
logging.info(
f'Downloading input files {inputs[:MAX_NFILES_TO_PRINT_TO_LOG]}'
)
if all(is_remote(url) for url in inputs):
from Moore.qmtest.context import download_mdf_inputs_locally
# download_mdf_inputs_locally only downloads if files
# are not already available locally on the machine
before_copy = datetime.now()
logging.info(
f'Downloading inputs for bandwidth job to {args.cache_dirs[i]}'
)
logging.info(
'Downloading inputs for bandwidth job to {}'.format(
args.cache_dirs[i]))
f'There are {len(inputs)} input files: [{inputs[0]}, {inputs[1]}, ... ]'
)
kB_to_GB = 1e3
job_inputs[i] = download_mdf_inputs_locally(
inputs,
args.cache_dirs[i],
max_size=args.avg_evt_size * kB_to_GB * args.events)
logging.info(inputs)
logging.info(
f"Finished file downloads. This took: {datetime.now() - before_copy}"
)
elif any(is_remote(url) for url in inputs_fns):
parser.error('inputs must either be all xrootd or all local')
else:
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment