From ebd38185ac4462f668271764441d7f448336e384 Mon Sep 17 00:00:00 2001 From: Andrea Waltlova Date: Thu, 27 Jun 2024 15:07:29 +0200 Subject: [PATCH] Update sync script Signed-off-by: Andrea Waltlova --- Makefile | 6 + misc/sync_scripts.py | 111 ++++++++--------- playbooks/leapp_preupgrade_script.yml | 172 +++++++++++++++++++++----- playbooks/leapp_upgrade_script.yml | 172 +++++++++++++++++++++----- 4 files changed, 344 insertions(+), 117 deletions(-) diff --git a/Makefile b/Makefile index 841085a..880a58f 100644 --- a/Makefile +++ b/Makefile @@ -30,3 +30,9 @@ install: install-deps pre-commit tests: install-deps . $(PYTHON_VENV)/bin/activate; \ $(PYTEST_CALL) + +sync: install-deps + python misc/sync_scripts.py worker + +sync-advisor: install-deps + python misc/sync_scripts.py advisor diff --git a/misc/sync_scripts.py b/misc/sync_scripts.py index 1b64a0a..514af83 100644 --- a/misc/sync_scripts.py +++ b/misc/sync_scripts.py @@ -1,49 +1,50 @@ +import sys import os -import argparse import ruamel.yaml +# Get the last argument used in the commandline if available, otherwise, use +# "worker" as the default value. +SYNC_PROJECT = sys.argv[1:][-1] if sys.argv[1:] else "worker" + # Scripts located in this project SCRIPT_PATH = "scripts/leapp_script.py" -REPO_PRE_UPGRADE_YAML_PATH = os.path.join(".", "playbooks/leapp_preupgrade_script.yml") -REPO_UPGRADE_YAML_PATH = os.path.join(".", "playbooks/leapp_upgrade_script.yml") - -WORKER_PRE_UPGRADE_YAML_PATH = os.path.join( - "..", "rhc-worker-script/development/nginx/data/leapp_preupgrade.yml" -) -WORKER_UPGRADE_YAML_PATH = os.path.join( - "..", "rhc-worker-script/development/nginx/data/leapp_upgrade.yml" -) - -DEFAULT_YAML_ENVELOPE = """ -- name: LEAPP - vars: - insights_signature: | - ascii_armored gpg signature - insights_signature_exclude: /vars/insights_signature - interpreter: /usr/bin/python - content: | - placeholder - content_vars: - # variables that will be handed to the script as environment vars - # will be prefixed with RHC_WORKER_* - LEAPP_SCRIPT_TYPE: type -""" +SCRIPTS_YAML_PATH = { + # TODO(r0x0d): Deprecate this in the future + "worker": ( + os.path.join( + "..", "rhc-worker-script/development/nginx/data/leapp_preupgrade_script.yml" + ), + os.path.join( + "..", "rhc-worker-script/development/nginx/data/leapp_upgrade_script.yml" + ), + ), + "tasks": ( + "playbooks/leapp_preupgrade_script.yml", + "playbooks/leapp_upgrade_script.yml", + ), + "advisor": ( + os.path.join( + "..", + "advisor-backend/api/advisor/tasks/playbooks/leapp_preupgrade_script.yml", + ), + os.path.join( + "..", + "advisor-backend/api/advisor/tasks/playbooks/leapp_upgrade_script.yml", + ), + ), +} def _get_updated_yaml_content(yaml_path, script_path): if not os.path.exists(yaml_path): - yaml = ruamel.yaml.YAML() - config = yaml.load(DEFAULT_YAML_ENVELOPE) - mapping = 2 - offset = 0 - else: - config, mapping, offset = ruamel.yaml.util.load_yaml_guess_indent( - open(yaml_path) - ) - print(mapping, offset) + raise SystemExit(f"Couldn't find yaml file: {yaml_path}") + + config, mapping, offset = ruamel.yaml.util.load_yaml_guess_indent( + open(yaml_path, encoding="utf-8") + ) - with open(script_path) as script: + with open(script_path, encoding="utf-8") as script: content = script.read() script_type = "PREUPGRADE" if "preupgrade" in yaml_path else "UPGRADE" @@ -57,36 +58,24 @@ def _write_content(config, path, mapping=None, offset=None): yaml = ruamel.yaml.YAML() if mapping and offset: yaml.indent(mapping=mapping, sequence=mapping, offset=offset) - with open(path, "w") as handler: + with open(path, "w", encoding="utf-8") as handler: yaml.dump(config, handler) def main(): - parser = argparse.ArgumentParser() - parser.add_argument( - "--target", - choices=["repo", "worker"], - help="Target to sync scripts to", - default="worker", - ) - args = parser.parse_args() - - if args.target == "repo": - print("Syncing scripts to repo") - pre_upgrade_path = REPO_PRE_UPGRADE_YAML_PATH - upgrade_path = REPO_UPGRADE_YAML_PATH - - elif args.target == "worker": - print("Syncing scripts to worker") - pre_upgrade_path = WORKER_PRE_UPGRADE_YAML_PATH - upgrade_path = WORKER_UPGRADE_YAML_PATH - - config, mapping, offset = _get_updated_yaml_content(pre_upgrade_path, SCRIPT_PATH) - print("Writing new content to %s" % pre_upgrade_path) - _write_content(config, pre_upgrade_path, mapping, offset) - config, mapping, offset = _get_updated_yaml_content(upgrade_path, SCRIPT_PATH) - print("Writing new content to %s" % upgrade_path) - _write_content(config, upgrade_path, mapping, offset) + if SYNC_PROJECT not in ("worker", "advisor", "tasks"): + raise SystemExit( + f"'{SYNC_PROJECT}' not recognized. Valid values are 'worker' or 'advisor'" + ) + + analysis_script, conversion_script = SCRIPTS_YAML_PATH[SYNC_PROJECT] + + config, mapping, offset = _get_updated_yaml_content(analysis_script, SCRIPT_PATH) + print(f"Writing new content to {analysis_script}") + _write_content(config, analysis_script, mapping, offset) + config, mapping, offset = _get_updated_yaml_content(conversion_script, SCRIPT_PATH) + print(f"Writing new content to {conversion_script}") + _write_content(config, conversion_script, mapping, offset) if __name__ == "__main__": diff --git a/playbooks/leapp_preupgrade_script.yml b/playbooks/leapp_preupgrade_script.yml index 6bce4b0..d6dd0ca 100644 --- a/playbooks/leapp_preupgrade_script.yml +++ b/playbooks/leapp_preupgrade_script.yml @@ -6,9 +6,15 @@ interpreter: /usr/bin/python content: | import json + import logging import os + import shutil + import sys import subprocess + from time import gmtime, strftime + + # SCRIPT_TYPE is either 'PREUPGRADE' or 'UPGRADE' # Value is set in signed yaml envelope in content_vars (RHC_WORKER_LEAPP_SCRIPT_TYPE) SCRIPT_TYPE = os.environ.get("RHC_WORKER_LEAPP_SCRIPT_TYPE", "None") @@ -37,8 +43,24 @@ } - # Both classes taken from: - # https://github.com/oamg/convert2rhel-worker-scripts/blob/main/scripts/preconversion_assessment_script.py + # Path to store the script logs + LOG_DIR = "/var/log/leapp-insights-tasks" + # Log filename for the script. It will be created based on the script type of + # execution. + LOG_FILENAME = "leapp-insights-tasks-%s.log" % ( + "upgrade" if IS_UPGRADE else "preupgrade" + ) + + # Path to the sos extras folder + SOS_REPORT_FOLDER = "/etc/sos.extras.d" + # Name of the file based on the task type for sos report + SOS_REPORT_FILE = "leapp-insights-tasks-%s-logs" % ( + "upgrade" if IS_UPGRADE else "preupgrade" + ) + + logger = logging.getLogger(__name__) + + class ProcessError(Exception): """Custom exception to report errors during setup and run of leapp""" @@ -94,9 +116,100 @@ } + def setup_sos_report(): + """Setup sos report log collection.""" + if not os.path.exists(SOS_REPORT_FOLDER): + os.makedirs(SOS_REPORT_FOLDER) + + script_log_file = os.path.join(LOG_DIR, LOG_FILENAME) + sosreport_link_file = os.path.join(SOS_REPORT_FOLDER, SOS_REPORT_FILE) + # In case the file for sos report does not exist, lets create one and add + # the log file path to it. + if not os.path.exists(sosreport_link_file): + with open(sosreport_link_file, mode="w") as handler: + handler.write(":%s\n" % script_log_file) + + + def setup_logger_handler(): + """ + Setup custom logging levels, handlers, and so on. Call this method from + your application's main start point. + """ + # Receive the log level from the worker and try to parse it. If the log + # level is not compatible with what the logging library expects, set the + # log level to INFO automatically. + log_level = os.getenv("RHC_WORKER_LOG_LEVEL", "INFO").upper() + log_level = logging.getLevelName(log_level) + if isinstance(log_level, str): + log_level = logging.INFO + + # enable raising exceptions + logging.raiseExceptions = True + logger.setLevel(log_level) + + # create sys.stdout handler for info/debug + stdout_handler = logging.StreamHandler(sys.stdout) + formatter = logging.Formatter("%(asctime)s - %(levelname)s - %(message)s") + stdout_handler.setFormatter(formatter) + + # Create the directory if it don't exist + if not os.path.exists(LOG_DIR): + os.makedirs(LOG_DIR) + + log_filepath = os.path.join(LOG_DIR, LOG_FILENAME) + file_handler = logging.FileHandler(log_filepath) + file_handler.setFormatter(formatter) + + # can flush logs to the file that were logged before initializing the file handler + logger.addHandler(stdout_handler) + logger.addHandler(file_handler) + + + def archive_old_logger_files(): + """ + Archive the old log files to not mess with multiple runs outputs. Every + time a new run begins, this method will be called to archive the previous + logs if there is a `convert2rhel.log` file there, it will be archived using + the same name for the log file, but having an appended timestamp to it. + For example: + /var/log/leapp-insights-tasks/archive/leapp-insights-tasks-1635162445070567607.log + /var/log/leapp-insights-tasks/archive/leapp-insights-tasks-1635162478219820043.log + This way, the user can track the logs for each run individually based on + the timestamp. + """ + + current_log_file = os.path.join(LOG_DIR, LOG_FILENAME) + archive_log_dir = os.path.join(LOG_DIR, "archive") + + # No log file found, that means it's a first run or it was manually deleted + if not os.path.exists(current_log_file): + return + + stat = os.stat(current_log_file) + + # Get the last modified time in UTC + last_modified_at = gmtime(stat.st_mtime) + + # Format time to a human-readable format + formatted_time = strftime("%Y%m%dT%H%M%SZ", last_modified_at) + + # Create the directory if it don't exist + if not os.path.exists(archive_log_dir): + os.makedirs(archive_log_dir) + + file_name, suffix = tuple(LOG_FILENAME.rsplit(".", 1)) + archive_log_file = "%s/%s-%s.%s" % ( + archive_log_dir, + file_name, + formatted_time, + suffix, + ) + shutil.move(current_log_file, archive_log_file) + + def get_rhel_version(): """Currently we execute the task only for RHEL 7 or 8""" - print("Checking OS distribution and version ID ...") + logger.info("Checking OS distribution and version ID ...") try: distribution_id = None version_id = None @@ -107,13 +220,13 @@ elif line.startswith("VERSION_ID="): version_id = line.split("=")[1].strip().strip('"') except IOError: - print("Couldn't read /etc/os-release") + logger.warn("Couldn't read /etc/os-release") return distribution_id, version_id def is_non_eligible_releases(release): """Check if the release is eligible for upgrade or preupgrade.""" - print("Exit if not RHEL 7 or RHEL 8 ...") + logger.info("Exit if not RHEL 7 or RHEL 8 ...") major_version, _ = release.split(".") if release is not None else (None, None) return release is None or major_version not in ALLOWED_RHEL_RELEASES @@ -136,7 +249,7 @@ raise TypeError("cmd should be a list, not a str") if print_cmd: - print("Calling command '%s'" % " ".join(cmd)) + logger.info("Calling command '%s'", " ".join(cmd)) process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, bufsize=1, env=env @@ -232,9 +345,9 @@ def setup_leapp(version): leapp_install_command, rhel_rhui_packages = _get_leapp_command_and_packages(version) if _check_if_package_installed("leapp-upgrade"): - print("'leapp-upgrade' already installed, skipping ...") + logger.info("'leapp-upgrade' already installed, skipping ...") else: - print("Installing leapp ...") + logger.info("Installing leapp ...") output, returncode = run_subprocess(leapp_install_command) if returncode: raise ProcessError( @@ -243,7 +356,7 @@ % (returncode, output.rstrip("\n")), ) - print("Check installed rhui packages ...") + logger.info("Check installed rhui packages ...") for pkg in rhel_rhui_packages: if _check_if_package_installed(pkg["src_pkg"]): pkg["installed"] = True @@ -251,7 +364,7 @@ def should_use_no_rhsm_check(rhui_installed, command): - print("Checking if subscription manager and repositories are available ...") + logger.info("Checking if subscription manager and repositories are available ...") rhsm_repo_check_fail = True rhsm_installed_check = _check_if_package_installed("subscription-manager") if rhsm_installed_check: @@ -265,10 +378,9 @@ ) if rhui_installed and not rhsm_repo_check_fail: - print( - "RHUI packages detected, adding --no-rhsm flag to {} command".format( - SCRIPT_TYPE.title() - ) + logger.info( + "RHUI packages detected, adding --no-rhsm flag to % command", + SCRIPT_TYPE.title() ) command.append("--no-rhsm") return True @@ -276,7 +388,7 @@ def install_leapp_pkg_corresponding_to_installed_rhui(rhui_pkgs): - print("Installing leapp package corresponding to installed rhui packages") + logger.info("Installing leapp package corresponding to installed rhui packages") for pkg in rhui_pkgs: install_pkg = pkg["leapp_pkg"] install_output, returncode = run_subprocess( @@ -291,7 +403,7 @@ def remove_previous_reports(): - print("Removing previous leapp reports at /var/log/leapp/leapp-report.* ...") + logger.info("Removing previous leapp reports at /var/log/leapp/leapp-report.* ...") if os.path.exists(JSON_REPORT_PATH): os.remove(JSON_REPORT_PATH) @@ -301,7 +413,7 @@ def execute_operation(command): - print("Executing {} ...".format(SCRIPT_TYPE.title())) + logger.info("Executing %s ...", SCRIPT_TYPE.title()) output, _ = run_subprocess(command) return output @@ -311,7 +423,7 @@ """ Gather status codes from entries. """ - print("Collecting and combining report status.") + logger.info("Collecting and combining report status.") action_level_combined = [value["severity"] for value in entries] valid_action_levels = [ @@ -322,14 +434,14 @@ def parse_results(output, reboot_required=False): - print("Processing {} results ...".format(SCRIPT_TYPE.title())) + logger.info("Processing %s results ...", SCRIPT_TYPE.title()) report_json = "Not found" message = "Can't open json report at " + JSON_REPORT_PATH alert = True status = "ERROR" - print("Reading JSON report") + logger.info("Reading JSON report") if os.path.exists(JSON_REPORT_PATH): with open(JSON_REPORT_PATH, mode="r") as handler: report_json = json.load(handler) @@ -386,7 +498,7 @@ output.alert = alert output.message = message - print("Reading TXT report") + logger.info("Reading TXT report") report_txt = "Not found" if os.path.exists(TXT_REPORT_PATH): with open(TXT_REPORT_PATH, mode="r") as handler: @@ -397,24 +509,28 @@ def update_insights_inventory(output): """Call insights-client to update insights inventory.""" - print("Updating system status in Red Hat Insights.") + logger.info("Updating system status in Red Hat Insights.") _, returncode = run_subprocess(cmd=["/usr/bin/insights-client"]) if returncode: - print("System registration failed with exit code %s." % returncode) + logger.info("System registration failed with exit code %s.", returncode) output.message += " Failed to update Insights Inventory." output.alert = True return - print("System registered with insights-client successfully.") + logger.info("System registered with insights-client successfully.") def reboot_system(): - print("Rebooting system in 1 minute.") + logger.info("Rebooting system in 1 minute.") run_subprocess(["/usr/sbin/shutdown", "-r", "1"], wait=False) def main(): + """Main entrypoint for the script.""" + setup_sos_report() + archive_old_logger_files() + setup_logger_handler() try: # Exit if invalid value for SCRIPT_TYPE if SCRIPT_TYPE not in ["PREUPGRADE", "UPGRADE"]: @@ -452,11 +568,11 @@ upgrade_reboot_required = REBOOT_GUIDANCE_MESSAGE in leapp_output parse_results(output, upgrade_reboot_required) update_insights_inventory(output) - print("Operation {} finished successfully.".format(SCRIPT_TYPE.title())) + logger.info("Operation %s finished successfully.", SCRIPT_TYPE.title()) if upgrade_reboot_required: reboot_system() except ProcessError as exception: - print(exception.report) + logger.error(exception.report) output = OutputCollector( status="ERROR", alert=True, @@ -465,7 +581,7 @@ report=exception.report, ) except Exception as exception: - print(str(exception)) + logger.critical(str(exception)) output = OutputCollector( status="ERROR", alert=True, diff --git a/playbooks/leapp_upgrade_script.yml b/playbooks/leapp_upgrade_script.yml index d134e4a..66807e4 100644 --- a/playbooks/leapp_upgrade_script.yml +++ b/playbooks/leapp_upgrade_script.yml @@ -6,9 +6,15 @@ interpreter: /usr/bin/python content: | import json + import logging import os + import shutil + import sys import subprocess + from time import gmtime, strftime + + # SCRIPT_TYPE is either 'PREUPGRADE' or 'UPGRADE' # Value is set in signed yaml envelope in content_vars (RHC_WORKER_LEAPP_SCRIPT_TYPE) SCRIPT_TYPE = os.environ.get("RHC_WORKER_LEAPP_SCRIPT_TYPE", "None") @@ -37,8 +43,24 @@ } - # Both classes taken from: - # https://github.com/oamg/convert2rhel-worker-scripts/blob/main/scripts/preconversion_assessment_script.py + # Path to store the script logs + LOG_DIR = "/var/log/leapp-insights-tasks" + # Log filename for the script. It will be created based on the script type of + # execution. + LOG_FILENAME = "leapp-insights-tasks-%s.log" % ( + "upgrade" if IS_UPGRADE else "preupgrade" + ) + + # Path to the sos extras folder + SOS_REPORT_FOLDER = "/etc/sos.extras.d" + # Name of the file based on the task type for sos report + SOS_REPORT_FILE = "leapp-insights-tasks-%s-logs" % ( + "upgrade" if IS_UPGRADE else "preupgrade" + ) + + logger = logging.getLogger(__name__) + + class ProcessError(Exception): """Custom exception to report errors during setup and run of leapp""" @@ -94,9 +116,100 @@ } + def setup_sos_report(): + """Setup sos report log collection.""" + if not os.path.exists(SOS_REPORT_FOLDER): + os.makedirs(SOS_REPORT_FOLDER) + + script_log_file = os.path.join(LOG_DIR, LOG_FILENAME) + sosreport_link_file = os.path.join(SOS_REPORT_FOLDER, SOS_REPORT_FILE) + # In case the file for sos report does not exist, lets create one and add + # the log file path to it. + if not os.path.exists(sosreport_link_file): + with open(sosreport_link_file, mode="w") as handler: + handler.write(":%s\n" % script_log_file) + + + def setup_logger_handler(): + """ + Setup custom logging levels, handlers, and so on. Call this method from + your application's main start point. + """ + # Receive the log level from the worker and try to parse it. If the log + # level is not compatible with what the logging library expects, set the + # log level to INFO automatically. + log_level = os.getenv("RHC_WORKER_LOG_LEVEL", "INFO").upper() + log_level = logging.getLevelName(log_level) + if isinstance(log_level, str): + log_level = logging.INFO + + # enable raising exceptions + logging.raiseExceptions = True + logger.setLevel(log_level) + + # create sys.stdout handler for info/debug + stdout_handler = logging.StreamHandler(sys.stdout) + formatter = logging.Formatter("%(asctime)s - %(levelname)s - %(message)s") + stdout_handler.setFormatter(formatter) + + # Create the directory if it don't exist + if not os.path.exists(LOG_DIR): + os.makedirs(LOG_DIR) + + log_filepath = os.path.join(LOG_DIR, LOG_FILENAME) + file_handler = logging.FileHandler(log_filepath) + file_handler.setFormatter(formatter) + + # can flush logs to the file that were logged before initializing the file handler + logger.addHandler(stdout_handler) + logger.addHandler(file_handler) + + + def archive_old_logger_files(): + """ + Archive the old log files to not mess with multiple runs outputs. Every + time a new run begins, this method will be called to archive the previous + logs if there is a `convert2rhel.log` file there, it will be archived using + the same name for the log file, but having an appended timestamp to it. + For example: + /var/log/leapp-insights-tasks/archive/leapp-insights-tasks-1635162445070567607.log + /var/log/leapp-insights-tasks/archive/leapp-insights-tasks-1635162478219820043.log + This way, the user can track the logs for each run individually based on + the timestamp. + """ + + current_log_file = os.path.join(LOG_DIR, LOG_FILENAME) + archive_log_dir = os.path.join(LOG_DIR, "archive") + + # No log file found, that means it's a first run or it was manually deleted + if not os.path.exists(current_log_file): + return + + stat = os.stat(current_log_file) + + # Get the last modified time in UTC + last_modified_at = gmtime(stat.st_mtime) + + # Format time to a human-readable format + formatted_time = strftime("%Y%m%dT%H%M%SZ", last_modified_at) + + # Create the directory if it don't exist + if not os.path.exists(archive_log_dir): + os.makedirs(archive_log_dir) + + file_name, suffix = tuple(LOG_FILENAME.rsplit(".", 1)) + archive_log_file = "%s/%s-%s.%s" % ( + archive_log_dir, + file_name, + formatted_time, + suffix, + ) + shutil.move(current_log_file, archive_log_file) + + def get_rhel_version(): """Currently we execute the task only for RHEL 7 or 8""" - print("Checking OS distribution and version ID ...") + logger.info("Checking OS distribution and version ID ...") try: distribution_id = None version_id = None @@ -107,13 +220,13 @@ elif line.startswith("VERSION_ID="): version_id = line.split("=")[1].strip().strip('"') except IOError: - print("Couldn't read /etc/os-release") + logger.warn("Couldn't read /etc/os-release") return distribution_id, version_id def is_non_eligible_releases(release): """Check if the release is eligible for upgrade or preupgrade.""" - print("Exit if not RHEL 7 or RHEL 8 ...") + logger.info("Exit if not RHEL 7 or RHEL 8 ...") major_version, _ = release.split(".") if release is not None else (None, None) return release is None or major_version not in ALLOWED_RHEL_RELEASES @@ -136,7 +249,7 @@ raise TypeError("cmd should be a list, not a str") if print_cmd: - print("Calling command '%s'" % " ".join(cmd)) + logger.info("Calling command '%s'", " ".join(cmd)) process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, bufsize=1, env=env @@ -232,9 +345,9 @@ def setup_leapp(version): leapp_install_command, rhel_rhui_packages = _get_leapp_command_and_packages(version) if _check_if_package_installed("leapp-upgrade"): - print("'leapp-upgrade' already installed, skipping ...") + logger.info("'leapp-upgrade' already installed, skipping ...") else: - print("Installing leapp ...") + logger.info("Installing leapp ...") output, returncode = run_subprocess(leapp_install_command) if returncode: raise ProcessError( @@ -243,7 +356,7 @@ % (returncode, output.rstrip("\n")), ) - print("Check installed rhui packages ...") + logger.info("Check installed rhui packages ...") for pkg in rhel_rhui_packages: if _check_if_package_installed(pkg["src_pkg"]): pkg["installed"] = True @@ -251,7 +364,7 @@ def should_use_no_rhsm_check(rhui_installed, command): - print("Checking if subscription manager and repositories are available ...") + logger.info("Checking if subscription manager and repositories are available ...") rhsm_repo_check_fail = True rhsm_installed_check = _check_if_package_installed("subscription-manager") if rhsm_installed_check: @@ -265,10 +378,9 @@ ) if rhui_installed and not rhsm_repo_check_fail: - print( - "RHUI packages detected, adding --no-rhsm flag to {} command".format( - SCRIPT_TYPE.title() - ) + logger.info( + "RHUI packages detected, adding --no-rhsm flag to % command", + SCRIPT_TYPE.title() ) command.append("--no-rhsm") return True @@ -276,7 +388,7 @@ def install_leapp_pkg_corresponding_to_installed_rhui(rhui_pkgs): - print("Installing leapp package corresponding to installed rhui packages") + logger.info("Installing leapp package corresponding to installed rhui packages") for pkg in rhui_pkgs: install_pkg = pkg["leapp_pkg"] install_output, returncode = run_subprocess( @@ -291,7 +403,7 @@ def remove_previous_reports(): - print("Removing previous leapp reports at /var/log/leapp/leapp-report.* ...") + logger.info("Removing previous leapp reports at /var/log/leapp/leapp-report.* ...") if os.path.exists(JSON_REPORT_PATH): os.remove(JSON_REPORT_PATH) @@ -301,7 +413,7 @@ def execute_operation(command): - print("Executing {} ...".format(SCRIPT_TYPE.title())) + logger.info("Executing %s ...", SCRIPT_TYPE.title()) output, _ = run_subprocess(command) return output @@ -311,7 +423,7 @@ """ Gather status codes from entries. """ - print("Collecting and combining report status.") + logger.info("Collecting and combining report status.") action_level_combined = [value["severity"] for value in entries] valid_action_levels = [ @@ -322,14 +434,14 @@ def parse_results(output, reboot_required=False): - print("Processing {} results ...".format(SCRIPT_TYPE.title())) + logger.info("Processing %s results ...", SCRIPT_TYPE.title()) report_json = "Not found" message = "Can't open json report at " + JSON_REPORT_PATH alert = True status = "ERROR" - print("Reading JSON report") + logger.info("Reading JSON report") if os.path.exists(JSON_REPORT_PATH): with open(JSON_REPORT_PATH, mode="r") as handler: report_json = json.load(handler) @@ -386,7 +498,7 @@ output.alert = alert output.message = message - print("Reading TXT report") + logger.info("Reading TXT report") report_txt = "Not found" if os.path.exists(TXT_REPORT_PATH): with open(TXT_REPORT_PATH, mode="r") as handler: @@ -397,24 +509,28 @@ def update_insights_inventory(output): """Call insights-client to update insights inventory.""" - print("Updating system status in Red Hat Insights.") + logger.info("Updating system status in Red Hat Insights.") _, returncode = run_subprocess(cmd=["/usr/bin/insights-client"]) if returncode: - print("System registration failed with exit code %s." % returncode) + logger.info("System registration failed with exit code %s.", returncode) output.message += " Failed to update Insights Inventory." output.alert = True return - print("System registered with insights-client successfully.") + logger.info("System registered with insights-client successfully.") def reboot_system(): - print("Rebooting system in 1 minute.") + logger.info("Rebooting system in 1 minute.") run_subprocess(["/usr/sbin/shutdown", "-r", "1"], wait=False) def main(): + """Main entrypoint for the script.""" + setup_sos_report() + archive_old_logger_files() + setup_logger_handler() try: # Exit if invalid value for SCRIPT_TYPE if SCRIPT_TYPE not in ["PREUPGRADE", "UPGRADE"]: @@ -452,11 +568,11 @@ upgrade_reboot_required = REBOOT_GUIDANCE_MESSAGE in leapp_output parse_results(output, upgrade_reboot_required) update_insights_inventory(output) - print("Operation {} finished successfully.".format(SCRIPT_TYPE.title())) + logger.info("Operation %s finished successfully.", SCRIPT_TYPE.title()) if upgrade_reboot_required: reboot_system() except ProcessError as exception: - print(exception.report) + logger.error(exception.report) output = OutputCollector( status="ERROR", alert=True, @@ -465,7 +581,7 @@ report=exception.report, ) except Exception as exception: - print(str(exception)) + logger.critical(str(exception)) output = OutputCollector( status="ERROR", alert=True,