Skip to content
Snippets Groups Projects
Commit 77f3c503 authored by Rafal Bielski's avatar Rafal Bielski :wave: Committed by Walter Lampl
Browse files

Fix/improve Trigger ART timeout handling

Fix: decode bytes from subprocess.check_output to string
Improvements:
* drop LCG96 workaround
* catch exception when killing process or producing backtrace
parent f58c75cf
No related branches found
No related tags found
No related merge requests found
......@@ -6,7 +6,6 @@ import re
import argparse
import sys
import os
import six
desc = 'Tool to check for error messages in a log file. By default ERROR, FATAL \
and CRITICAL messages are considered. The config file may be used to \
......@@ -131,8 +130,7 @@ def scanLogfile():
msgLevels = re.compile('|'.join(pattern))
igLevels = re.compile('|'.join(ignorePattern))
logFileAddress = args.logfile
encargs = {} if six.PY2 else {'encoding' : 'utf-8'}
with open(logFileAddress,'r',**encargs) as logFile:
with open(logFileAddress,'r', encoding='utf-8') as logFile:
tracing = False
for line in logFile:
#Tracing only makes sense for errors
......
......@@ -16,7 +16,6 @@ import logging
import argparse
import json
from collections import OrderedDict
import six
default_ignore_patterns = [
......@@ -106,11 +105,11 @@ def make_summary(result):
def print_result(summary, full_result, print_messages=False):
summary_str = 'Found the following number of messages:\n'
for p, n in six.iteritems(summary):
for p, n in summary.items():
summary_str += '{:8d} {:s} messages\n'.format(n, p)
logging.info(summary_str)
if print_messages:
for p, lines in six.iteritems(full_result):
for p, lines in full_result.items():
logging.info('##### The following %s messages were found #####', p)
for line in lines:
print(line, end='') # noqa: ATL901
......@@ -144,8 +143,7 @@ def main():
logging.error('Cannot open file %s, skipping', fname)
continue
logging.info('Analysing file %s', fname)
encargs = {} if six.PY2 else {'encoding' : 'utf-8'}
with open(fname, **encargs) as f:
with open(fname, encoding='utf-8') as f:
messages = extract_messages(f, start, end, ignore)
summary = make_summary(messages)
print_result(summary, messages, args.printMessages)
......
......@@ -10,7 +10,6 @@ import argparse
import shutil
import subprocess
import json
import six
from TrigValTools.TrigARTUtils import package_prefix, find_scripts, remember_cwd
......@@ -121,7 +120,7 @@ def analyse_results(all_test_results):
max_len_col1 = len(max(table.keys(), key=len))
max_len_col2 = len(max(table.values(), key=len))
logging.info('-'*(max_len_col1+max_len_col2+7))
for k, v in six.iteritems(table):
for k, v in table.items():
logging.info('| {col1:<{width1}} | {col2:<{width2}} |'.format(
col1=k, width1=max_len_col1,
col2=v, width2=max_len_col2))
......
......@@ -11,7 +11,6 @@ import logging
import argparse
import subprocess
import errno
from six import iteritems
from collections import OrderedDict
from TrigValTools.TrigARTUtils import find_scripts, remember_cwd
......@@ -117,7 +116,7 @@ def main():
logging.info('RESULTS SUMMARY:')
logging.info('='*(max_name_len+11))
final_code = 0
for script, result in iteritems(results):
for script, result in results.items():
logging.info('| %s : %4d |', '{:{width}s}'.format(script, width=max_name_len), result)
if abs(result) > final_code:
final_code = abs(result)
......
......@@ -12,7 +12,6 @@ import re
import sys
import logging
import os
import six
from collections import OrderedDict
from TrigValTools.TrigARTUtils import first_existing_file, newest_file
......@@ -84,7 +83,7 @@ def convert_to_megabytes(number, unit):
"GB": 1024,
'TB': 1024**2
}
for unit_name, mult in six.iteritems(multipliers):
for unit_name, mult in multipliers.items():
if unit_name == unit:
return float(number)*mult
logging.error("Unit conversion failed from {} to MB".format(unit))
......
......@@ -10,7 +10,6 @@ import os
import re
import subprocess
import json
import six
import glob
from TrigValTools.TrigValSteering.Step import Step, get_step_from_list
......@@ -147,9 +146,8 @@ class LogMergeStep(Step):
self.log_files.append(f)
def merge_logs(self):
encargs = {} if six.PY2 else {'encoding' : 'utf-8'}
try:
with open(self.merged_name, 'w', **encargs) as merged_file:
with open(self.merged_name, 'w', encoding='utf-8') as merged_file:
for log_name in self.log_files:
if not os.path.isfile(log_name):
if self.warn_if_missing:
......@@ -157,7 +155,7 @@ class LogMergeStep(Step):
merged_file.write(
'### WARNING Missing {} ###\n'.format(log_name))
continue
with open(log_name, **encargs) as log_file:
with open(log_name, encoding='utf-8') as log_file:
merged_file.write('### {} ###\n'.format(log_name))
for line in log_file:
merged_file.write(line)
......@@ -305,11 +303,10 @@ class RegTestStep(RefComparisonStep):
if not os.path.isfile(log_file):
self.log.error('%s input file %s is missing', self.name, log_file)
return False
encargs = {} if six.PY2 else {'encoding' : 'utf-8'}
with open(log_file, **encargs) as f_in:
with open(log_file, encoding='utf-8') as f_in:
matches = re.findall('({}.*).*$'.format(self.regex),
f_in.read(), re.MULTILINE)
with open(self.input_file, 'w', **encargs) as f_out:
with open(self.input_file, 'w', encoding='utf-8') as f_out:
for line in matches:
linestr = str(line[0]) if type(line) is tuple else line
f_out.write(linestr+'\n')
......@@ -563,8 +560,7 @@ class ZeroCountsStep(Step):
self.name, input_file)
return -1
lines_checked = 0
encargs = {} if six.PY2 else {'encoding' : 'utf-8'}
with open(input_file, **encargs) as f_in:
with open(input_file, encoding='utf-8') as f_in:
for line in f_in.readlines():
split_line = line.split()
lines_checked += 1
......@@ -648,7 +644,7 @@ class MessageCountStep(Step):
self.log.warning('%s cannot open file %s', self.name, json_file)
with open(json_file) as f:
summary = json.load(f)
for level, threshold in six.iteritems(self.thresholds):
for level, threshold in self.thresholds.items():
if summary[level] > threshold:
self.result += 1
self.log.info(
......
......@@ -12,6 +12,7 @@ import signal
import subprocess
import time
import re
import psutil
from enum import Enum
from threading import Timer
from TrigValTools.TrigValSteering.Common import get_logger, art_result, running_in_CI
......@@ -86,24 +87,24 @@ class Step(object):
where the first is filled with the backtrace by this function
(it has to be a list to be mutable).
'''
# Produce backtrace for the parent and all children
try:
import psutil
# Produce backtrace for the parent and all children
parent = psutil.Process(pid)
backtrace = ''
for proc in [parent] + parent.children(recursive=True):
backtrace += '\nTraceback for {} PID {}:\n'.format(proc.name(), proc.pid)
backtrace += subprocess.check_output('$ROOTSYS/etc/gdb-backtrace.sh {}'.format(proc.pid),
stderr=subprocess.STDOUT, shell=True)
except ImportError:
# psutil is missing in LCG_96 python3
backtrace = 'psutil not available; no backtrace generated'
stderr=subprocess.STDOUT, shell=True).decode('utf-8')
backtrace_list[0] = backtrace
backtrace_list[0] = backtrace
# Kill the process
os.killpg(pid, signal)
# Kill the process
os.killpg(pid, signal)
except Exception as e:
# This may happen e.g. if one of the processes finishes before we generate backtrace
msg = 'Caught exception while generating backtrace: ' + str(e)
backtrace_list[0] = msg
self.log.error(msg)
def __execute_with_timeout(self, cmd, timeout_sec):
'''
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment