diff --git a/package_alerts/package_alerts b/package_alerts/package_alerts index f06a4dd758098299fdccba5f6f9f7c2083b2e86b..c116e47fcdbbc41b6eb4df26876c1f922c92cab9 100755 --- a/package_alerts/package_alerts +++ b/package_alerts/package_alerts @@ -1,4 +1,5 @@ #!/usr/bin/python3 +""" script to alert us via mm on new 'interesting' packages """ import os import glob @@ -9,7 +10,6 @@ import signal import subprocess import sys import tempfile -import time from collections import ChainMap from datetime import datetime import dnf @@ -32,7 +32,7 @@ if ( ) sys.exit(1) -with open(f"/root/{interesting_packages_file}") as f: +with open(f"/root/{interesting_packages_file}", encoding='utf-8') as f: interesting_packages = yaml.safe_load(f) try: interesting_packages = interesting_packages["packages"] @@ -44,7 +44,7 @@ with open(f"/root/{interesting_packages_file}") as f: # In case somebody screws up the input format, make sure it's a dict and not a list of dicts for package in interesting_packages: - if type(interesting_packages[package]) is list: + if isinstance(interesting_packages[package], list): interesting_packages[package] = dict(ChainMap(*interesting_packages[package])) feeds = unformatted_feeds.split(',') @@ -52,7 +52,7 @@ feeds = unformatted_feeds.split(',') logfile_retain_days = 30 logfilename = f"/work/{datetime.today().strftime('%Y%m%d-%H%M')}.log" # pylint: disable=consider-using-with -logfile = open(logfilename, "w") +logfile = open(logfilename, "w", encoding='utf-8') def generate_temp_file(prefix): """Generate a temp file""" @@ -100,12 +100,11 @@ def get_changelog(token, payload): attempts += 1 result = request_url(token, url, stream=True) if result.status_code == 200: - # TODO: try and do this with dnf 'add_remote_rpms', although we would need to auth with open(temp_file, "wb") as f: try: for chunk in result.iter_content(chunk_size=1024): if chunk: - f.write(chunk) + f.write(chunk) rpm_downloaded = True except: print(f"Saving changelog appeared to have failed. Retrying {attempts}/{max_attempts}", flush=True) @@ -115,7 +114,7 @@ def get_changelog(token, payload): else: print(f"API call failed, Retrying {attempts}/{max_attempts}", flush=True) if not rpm_downloaded: - print(f"Changelog failed to download, exiting", flush=True) + print("Changelog failed to download, exiting", flush=True) sys.exit(1) # TODO: maybe have some better logic instead of the head command changelog = do_execute(f"rpm -qp {temp_file} --changelog | head -20", return_output=True) @@ -160,7 +159,7 @@ def request_url(token, url, params=None, stream=False): def get_upstream(token, feed): """retrieve full list of packages from a cset / package url""" packages = [] - matches = re.match(f"(\w+)-([0-9])-\w+-(\w+)-(\w+)", feed) + matches = re.match(r"(\w+)-([0-9])-\w+-(\w+)-(\w+)", feed) arch = matches.group(3) # almalinux is simplier, so we process it earlier if 'almalinux' in feed: @@ -173,9 +172,9 @@ def get_upstream(token, feed): # When querying 8 from a 9 host, libdnf spews warnings about modularity # to stderr. We don't care about that so let's silence it. original_stderr = sys.stderr - f = open('/dev/null', 'w') - sys.stderr = f - base.fill_sack(load_system_repo=False) + with open('/dev/null', encoding='utf-8') as f: + sys.stderr = f + base.fill_sack(load_system_repo=False) # We can care about stderr again sys.stderr = original_stderr q = base.sack.query() @@ -227,13 +226,13 @@ def get_local(feed): """read previously saved packages into a list""" try: with open(f"/work/{feed}.txt", "rb") as f: - local_packages = [] + lp = [] for line in f: x = line[:-1] - local_packages.append(x.decode()) + lp.append(x.decode()) except FileNotFoundError: - local_packages = [] - return local_packages + lp = [] + return lp def set_local(packages, feed): @@ -300,13 +299,12 @@ def format_release(p, f, token): ) print(f" -- Payload sent: {payload}", flush=True) -def timeout_handler(num, stack): +def timeout_handler(): """ function to handle timeouts for long running function """ print("Received SIGALRM") raise TimeoutError if __name__ == "__main__": - """this is the main class, it's amazing.""" for feed in feeds: if 'rhel' in feed: token = get_token(offline_token)