#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# @linux-aarhus - root.nix.dk
# SPDX-FileCopyrightText: 2024 Frede Hundewadt <fh at manjaro dot org>
# SPDX-License-Identifier: MIT
import argparse
import os
import shutil
import subprocess
import sys
import requests.exceptions
import tempfile

try:
    import requests
except ImportError:
    print("Please install requests package")
    exit(1)
PROG_VERSION = "0.20"
PROG_NAME = os.path.basename(__file__)
ISO_RELEASE_URL = \
    "https://gitlab.manjaro.org/webpage/iso-info/-/raw/master/file-info.json"
LICENSE_URL = "https://opensource.org/license/mit"
REL_REVIEW_URL = \
    "https://api.github.com/repos/manjaro/release-review/releases/latest"
DEV_PREVIEW_URL = \
    "https://api.github.com/repos/manjaro-edition/download/releases/latest"

REVIEW_EDITIONS = ["gnome", "plasma", "xfce"]
HOME = os.path.expanduser("~")
SYS_TMP = tempfile.mkdtemp()
USR_TMP = f"{HOME}/.cache/get-iso"
RED = "\033[1;31m"
GREEN = "\033[1;32m"
YELLOW = "\033[1;33m"
BLUE = "\033[1;34m"
WHITE = "\033[1;37m"
RESET = "\033[1;m"


def msg(message: str, end: str = "\n") -> None:
    print(f"{GREEN}==> {RESET}{message}", end=end)


def msg2(message: str, end: str = "\n") -> None:
    print(f"{BLUE} --> {RESET}{message}", end=end)


def info(message: str, end: str = "\n") -> None:
    print(f"{YELLOW}   -> {RESET}{message}", end=end)


def warn(message: str, end: str = "\n") -> None:
    print(f"{YELLOW}<!!!> {RESET}{message}", end=end)


def error(message: str, end: str = "\n") -> None:
    print(f"{RED}<###> {RESET}{message}", end=end)


def get_definitions(url: str) -> dict:
    """
    Fetches JSON data from the given URL.

    This function sends a GET request to the specified URL and retrieves the
    JSON response. If the request encounters any issues such as a timeout or
    an unsuccessful HTTP response, the program terminates after printing an
    error message.

    :param url: The URL to fetch the JSON data from.
    :type url: string
    :return: A dictionary containing the JSON data from the response.
    :rtype: dictionary
    :raises: Terminates the program with an error message if the request fails.
    """
    try:
        resp = requests.get(url=url, timeout=10)
        resp.raise_for_status()
        return resp.json()
    except Exception as e:
        error(f"Network Error: {e}")
        msg2("Terminated!")
        sys.exit(1)


def get_download_size(url: str) -> int:
    """
    Retrieve the download size of a resource from the provided URL.

    This function sends a HEAD request to the given URL and extracts the
    value of the "Content-Length" header to determine the size of the
    associated resource in bytes. If the "Content-Length" header is not
    present or cannot be retrieved, it defaults to 0.

    :param url: A string representing the URL of the resource to check.
    :return: An integer representing the size of the resource in bytes.
    """
    try:
        resp = requests.head(url=url, timeout=10, allow_redirects=True)
        resp.raise_for_status()
        return int(resp.headers.get("content-length", 0))
    except Exception as e:
        error(f"Network Error: {e}")
        msg2("Terminated!")
        sys.exit(1)


def init_iso_list(url: str, review: bool = False, preview: bool = False) -> list:
    """
    Initializes a list of ISO files based on the given URL and mode of operation.

    This function determines whether to initialize a list of preview, review,
    or release ISO files based on the provided parameters. The URL is required for
    fetching the respective data, and the mode of operation is decided by the
    boolean flags `review` and `preview`. If neither `review` nor `preview`
    is specified, it defaults to initializing the release ISO list.

    :param url: The URL used to fetch the desired type of ISO list.
    :type url: string
    :param review: A flag indicating whether to initialize the review ISO list.
    :type review: bool
    :param preview: A flag indicating whether to initialize the preview ISO list.
    :type preview: bool
    :return: A list of initialized ISO files based on the selected mode.
    :rtype: list
    """
    if preview:
        return init_preview_iso_list(url)

    if review:
        return init_review_iso_list(url)

    return init_release_iso_list(url)


def init_release_iso_list(url: str) -> list:
    """
    Generates a list of release ISO details for both official and community editions
    by fetching and processing the data from the specified URL.

    The function retrieves release definitions from the provided URL, parses official
    and community editions, and aggregates relevant metadata such as image links,
    signatures, and checksums. It omits entries where essential keys are missing.

    :param url: The URL from which to fetch release definitions.
    :type url: string
    :return: A list of dictionaries containing metadata for both official and community editions,
        structured by "full" and "minimal" variants with details like image paths, signatures,
        and checksums.
    :rtype: list
    """
    result_releases = []
    data = get_definitions(url)
    official_editions: dict = data.get("official")
    community_editions: dict = data.get("community")
    # parse official editions
    for o_name, o_data in official_editions.items():
        result_releases.append(extract_iso_info(o_name, [o_data], review=False))

    # parse community editions
    for c_name, c_data in community_editions.items():
        try:
            if c_data["custom"]:
                continue
        except KeyError:
            pass

        result_releases.append(extract_iso_info(c_name, [c_data], review=False))

    # return result
    return result_releases


def init_review_iso_list(url: str) -> list:
    """
    Extracts and organizes the browser download URLs of ISO assets from a given
    data source URL into a structured format based on specific review editions.

    This function is different from the `init_preview_iso_list` function in that it uses the same url for all review edition.

    :param url: The URL containing the data from which ISO asset information
        will be extracted.
    :return: A list of dictionaries where each dictionary organizes the ISO
        assets' information for a specific edition, including parts and SHA256
        checksum URLs.
    :rtype: list
    """
    # from the asset list we want to extract
    # the browser_download_url propoerty for each asset
    result = []
    # get definitions
    data = get_definitions(url)
    # extract the asset dictionary
    data_assets: dict = data.get("assets")
    # initialize an empty asset list
    available_assets = []
    for asset in data_assets:
        # extract the url and the size of an asset and append to the asset list
        available_assets.append({
            "url": asset["browser_download_url"],
            "size": asset["size"]
        })

    for edition in REVIEW_EDITIONS:
        # extract iso assets from the list
        result.append(extract_iso_info(edition, available_assets, review=True))

    return result


def init_preview_iso_list(url: str) -> list:
    """
    Extracts and organizes download URLs for multiple editions of assets from a given URL.

    This function processes the assets for different editions by substituting the edition
    placeholders in the provided base URL. It retrieves asset data from the resulting URLs,
    parses the asset list for each edition to identify the URLs for full and minimal ISO
    downloads, and groups the relevant parts and SHA256 checksum files. It returns a structured
    list containing the download information for each edition.

    This function is different from the `init_review_iso_list` function in that it uses a different url for each preview edition.

    :param url: A string representing the base URL containing an "edition" placeholder to be
        replaced by specific edition keys.
    :return: A list of dictionaries where each dictionary contains the download information for
        a specific edition. The information includes the URLs for the full ISO parts, minimal ISO
        parts, and their corresponding SHA256 checksum files.
    """
    # from the asset list we want to extract
    # the browser_download_url property for each asset
    result_preview_iso = []

    for edition in REVIEW_EDITIONS:
        # replace edition in generic url
        edition_url = url.replace("edition", edition)

        data = get_definitions(edition_url)
        data_assets: dict = data.get("assets")

        available_assets = []
        for asset in data_assets:
            available_assets.append({
                "url": asset["browser_download_url"],
                "size": asset["size"]
            })

        # extract iso assets from the list of available assets
        result_preview_iso.append(extract_iso_info(edition, available_assets, review=True))

    return result_preview_iso


def extract_iso_info(edition: str, iso_data: list, review: bool) -> dict:
    """
    Extracts ISO information based on the given edition and ISO data, delegating the task
    to specific functions depending on whether the review process is enabled or not. When
    the `review` flag is set to `True`, the extraction is handled through
    `extract_iso_info_github`. Otherwise, it is delegated to `extract_iso_info_manjaro`.

    :param edition: The edition string to filter or process ISO information.
    :type edition: string
    :param iso_data: A list containing ISO data to process.
    :type iso_data: list
    :param review: A boolean flag indicating whether the review process is enabled.
    :type review: bool
    :return: A dictionary containing the processed ISO information.
    :rtype: dictionary
    """
    if review:
        return extract_iso_info_github(edition, iso_data)
    else:
        return extract_iso_info_manjaro(edition, iso_data)


def select_kernel(available_kernels: list) -> str:
    """
    Prompts the user to select a kernel from the available kernels list.

    This function displays a numbered list of available kernels and prompts the user
    to select one by entering the corresponding number. The function validates the
    input and returns the selected kernel name.

    :param available_kernels: A list of available kernel names (e.g., ['linux618', 'linux70'])
    :type available_kernels: list
    :return: The selected kernel name as a string (e.g., 'linux618')
    :rtype: str
    """
    msg("Multiple kernels available:")
    for idx, kernel in enumerate(available_kernels, start=1):
        msg2(f"{idx}. {kernel}")

    while True:
        try:
            selection = input(f"{BLUE} --> {RESET}Select kernel number: ")
            selection_num = int(selection)

            if 1 <= selection_num <= len(available_kernels):
                selected_kernel = available_kernels[selection_num - 1]
                info(f"Selected: {selected_kernel}")
                return selected_kernel
            else:
                warn(f"Invalid selection. Please enter a number between 1 and {len(available_kernels)}")
        except ValueError:
            warn("Invalid input. Please enter a number.")
        except (KeyboardInterrupt, EOFError):
            error("\nSelection cancelled by user.")
            sys.exit(1)


def extract_kernel_name(filename: str) -> str:
    """
    Extracts the kernel name from the filename.
    The kernel identifier is the word 'linux' followed by one or more digits e.g., 618 or 70
    An iso filename follows the pattern manjaro-$EDITION-$RELEASE-$DATE-$KERNEL.iso
    :param filename:
    :return:
    """
    # split the filename at the hyphen
    parts = filename.split("-")
    # take the last part, split it at the (.) and return the first element
    return parts[-1].split(".")[0]


def extract_iso_info_manjaro(edition: str, iso_data: list) -> dict:
    """
    Extract ISO information for the Manjaro edition.

    This function takes details of an edition and a list of ISO data dictionaries and
    extracts relevant information such as image names, signatures, and checksum values.
    It organizes this data into a structured dictionary format.

    :param edition: A dictionary containing `image` and `signature` keys for the full
       and minimal versions of the edition.
    :param iso_data: A list of dictionaries where each dictionary includes a "checksum"
       key that contains checksum file information (e.g., `.sha256` or `.sha512`).
    :return: A dictionary with the `name` of the edition along with nested `full` and
       `minimal` dictionaries. Each nested dictionary includes `img` (image filename),
       `sig` (signature filename), `sha256` (SHA-256 checksum), and `sha512` (SHA-512 checksum).
    """
    edition = {
        "name": edition,
        "variants": []
    }
    for data in iso_data:
        kernel = extract_kernel_name(data["image"])
        size_full = get_download_size(data["image"])
        size_minimal = get_download_size(data["minimal"]["image"])
        variant = {
            "full": {
                "checksum": data["checksum"],
                "parts": [
                    {
                        "size": size_full,
                        "url": data["image"],
                    }
                ],
                "signature": data["signature"],
                "size": size_full,
            },
            "kernel": kernel,
            "minimal": {
                "checksum": data["minimal"]["checksum"],
                "parts": [
                    {
                        "size": size_minimal,
                        "url": data["minimal"]["image"]
                    }
                ],
                "signature": data["minimal"]["signature"],
                "size": size_minimal,
            }
        }
        edition["variants"].append(variant)

    return edition


def extract_iso_info_github(edition: str, iso_data: list) -> dict:
    """
    Extracts detailed ISO information from the provided list of ISO data for the given edition. It categorizes the
    data into full and minimal ISO versions, retrieving respective URLs for file parts, SHA256, and SHA512 checksums,
    along with size information.

    This function is specifically designed to work with structured ISO metadata, filtering relevant entries based on
    the edition and preparing a detailed dictionary with categorized URLs and associated size data.

    :param edition: The edition of the ISO to process (e.g., 'plasma', 'xfce').
    :type edition: string
    :param iso_data: A list of dictionaries containing ISO metadata, including URLs and file sizes.
    :type iso_data: list
    :return: A dictionary with structured information about the ISO, categorized into 'full' and 'minimal' ISO
        details including URLs for file parts, SHA256, SHA512 checksums, and size.
    :rtype: dictionary
    """
    minimal = "minimal"
    sha256sum = ".iso.sha256"
    partial_name = ".iso.z"
    if edition == "plasma":
        edition_data = [x for x in iso_data if "kde" in x["url"]]
    else:
        edition_data = [x for x in iso_data if edition in x["url"]]

    # Only review and preview editions are provided from Github.
    # but there may be more than one kernel for a given edition.
    available_kernels = []
    result = {
        "name": edition,
        "variants": []
    }
    for data in edition_data:
        if data["url"].endswith(".iso.zip"):
            kernel = extract_kernel_name(data["url"])
            if kernel not in available_kernels:
                available_kernels.append(kernel)

    for kernel in available_kernels:
        # extract all parts for the kernel
        full_iso = [x for x in edition_data if kernel in x["url"] and minimal not in x["url"]]
        minimal_iso = [x for x in edition_data if kernel in x["url"] and minimal in x["url"]]
        # extract the iso partials
        full_iso_parts = [x for x in full_iso if partial_name in x["url"]]
        minimal_iso_parts = [x for x in minimal_iso if partial_name in x["url"]]
        # extract the checksum file
        full_iso_checksum = [x for x in full_iso if sha256sum in x["url"]]
        minimal_iso_checksum = [x for x in minimal_iso if sha256sum in x["url"]]
        # populate a variant
        variant = {
            "full": {
                "checksum": full_iso_checksum[0]["url"],
                "parts": full_iso_parts,
                "signature": "",
                "size": sum(x["size"] for x in full_iso_parts),
            },
            "kernel": kernel,
            "minimal": {
                "checksum": minimal_iso_checksum[0]["url"],
                "parts": minimal_iso_parts,
                "signature": "",
                "size": sum(x["size"] for x in minimal_iso_parts),
            }
        }
        # append variant to edition
        result["variants"].append(variant)
    # return the result
    return result


def download(url: str, out_dir: str, binary=True) -> bool:
    """
    Downloads a file from the specified URL to the desired output directory. The function takes
    an optional parameter to specify if the file should be downloaded in binary format or not.

    :param url: The URL of the file to be downloaded.
    :type url: string
    :param out_dir: The output directory where the file will be saved.
    :type out_dir: string
    :param binary: Determines if the file should be downloaded in binary format.
                   The default is `True`.
    :type binary: bool
    :return: Returns `True` if the file was successfully downloaded, otherwise returns `False`.
    :rtype: bool
    """
    msg2(f'Download: {url.split("/")[-1]}')
    return download_file(url, f"{out_dir}", binary=binary)


def download_file(url: str, folder_name: str, binary=True) -> bool:
    """
    Downloads a file from the given URL and saves it to the specified folder. The method
    supports both binary and non-binary file types. For binary files, it uses chunked
    downloading with progress tracking.

    :param url: The URL from where the file will be downloaded.
    :type url: string
    :param folder_name: The directory where the downloaded file will be saved.
    :type folder_name: string
    :param binary: Determines if the file should be downloaded in binary mode. Defaults to True.
    :type binary: bool
    :return: Boolean value indicating whether the download operation was successful.
    :rtype: bool
    """
    filename: str = url.split("/")[-1]
    path = os.path.join("{}/{}".format(folder_name, filename))
    try:
        if binary:
            response = requests.get(url, stream=True, timeout=10)
            response.raise_for_status()
            total_size_in_bytes = int(response.headers.get("content-length", 0))
            block_size = 1024 * 1024
            if total_size_in_bytes < block_size:
                block_size = total_size_in_bytes

            with open(path, "wb") as f:
                progress = 0
                for data in response.iter_content(block_size):
                    f.write(data)
                    if len(data) < block_size:
                        progress += len(data)
                    else:
                        progress += block_size
                    info(f"Downloading {round(progress / 1024 / 1024)}MiB of "
                         f"{round(total_size_in_bytes / 1024 / 1024)}MiB", end="\r")
        else:
            response = requests.get(url, timeout=10)
            response.raise_for_status()
            with open(path, "w") as f:
                f.write(response.text)
    except Exception as e:
        error(f"Network Error: {e}")
        return False
    return True


def check_directory_writable(path: str) -> bool:
    """
    Determines whether the given directory path is writable. This function checks
    if the provided path exists as a directory and verifies write access by
    attempting to create and delete a temporary test file inside the directory. If
    the directory is not writable or is not a valid directory, appropriate feedback
    is provided via print statements, and the function returns False.

    :param path: The file path to the directory to be checked for writability.
    :type path: string
    :return: True if the directory is writable, otherwise False.
    :rtype: bool
    """
    if os.path.isdir(path):
        try:
            # test if the path is writable
            with open(os.path.join(path, ".keep"), "w") as f:
                f.write("")
            # remove test
            os.remove(os.path.join(path, ".keep"))
            return True
        except IOError:
            return False
    else:
        return False


def check_7zip():
    """
    Checks for the presence of the 7-Zip utility (`7z`) on the system.

    This function verifies if the 7-Zip executable is available in the system's
    PATH. If not found, it issues a warning message to the user and terminates
    the program.

    :raises SystemExit: Exits the program with a status code 1 if the `7z`
        utility is not found.
    """
    if shutil.which("7z") is None:
        warn("7z is missing. Please install using your package manager.")
        exit(1)


def clean_work_dir(path: str, keep: bool = False):
    """
    Cleans up a temporary directory.

    This function removes the specified temporary directory if it resides under
    the `/tmp` directory. Otherwise, it leaves the directory in place and warns
    the user about the non-standard location of the temporary directory. It is
    designed to manage temporary files in a controlled manner.

    :param keep:
    :param path: Path to the temporary directory to be cleaned up. If the path
        does not start with `/tmp`, no action will be taken, and a warning will be
        displayed.
    :type path: string
    :return: None
    """
    if keep:
        warn("keep flag active. Keeping temp files in place.")
        return
    if not path.startswith("/tmp"):
        warn("User defined temp folder. Leaving temp files in place.")
    else:
        subprocess.run(["rm", "-r", f"{path}"])


def check_work_dir(path: str, min_req: int) -> str:
    """
    Checks the given directory path for sufficient free space and determines its usability as a temporary
    directory. The function assesses whether the provided directory path is writable and meets the
    minimum required free disk space. Fallback to a default directory occurs if the provided path is
    unusable or unwritable.

    :param path: The path to the directory to be validated.
    :type path: string
    :param min_req: The minimum required free space in bytes for the directory to be considered usable.
    :type min_req: integer
    :return: The validated directory path that meets the criteria for usage.
    :rtype: string
    """
    # system tmp folder is always writable
    if path.startswith("/tmp"):
        if check_space_required(path, min_req):
            return path
    # user-defined tmp must be writable
    if check_directory_writable(path):
        # and have enough space
        if check_space_required(path, min_req):
            return path
    # usually we don't get here
    # if the user wants us to create the folder,
    # we will attempt to do so
    if not os.path.isdir(path):
        try:
            os.makedirs(path, exist_ok=True)
        except PermissionError:
            warn(f"Working dir '{path}' is not writable.")

    # create a fallback in the user's home/.cache folder
    os.makedirs(USR_TMP, exist_ok=True)
    msg2(f"Fall back to '{USR_TMP}'")
    return f"{USR_TMP}"


def check_space_required(path: str, size: int) -> bool:
    """
    Checks if the available disk space at the specified path is greater than the
    given size.

    :param path: The file system path for which the disk space should
                 be checked.
    :type path: string
    :param size: The minimum required free space in bytes.
    :type size: integer
    :return: True if the available disk space is greater than the specified
             size, False otherwise.
    :rtype: bool
    """
    return shutil.disk_usage(path).free > size


def terminate_on_fail(exit_message: str, message_type: str, working_dir: str = None, keep = False) -> None:
    """
    Terminates the program on failure by logging the specified message, optionally cleaning
    up the working directory, and exiting with a non-zero status code.

    This function handles different message types, such as error and warning, prior to
    performing any cleanup of the working directory if a path is provided. Finally, it
    exits the program to signal failure.

    :param exit_message: A string containing the message to log before termination.
    :param message_type: A string indicating the type of message to log, e.g., "error" or
        "warning".
    :param working_dir: An optional string specifying the path to the working directory that
        may need cleanup. Defaults to None if no cleanup is necessary.
    :param keep: A boolean flag indicating whether to keep files in the specified working
        directory (`True`) or to remove them (`False`).
    :return: None
    """
    if message_type == "error":
        error(exit_message)
    if message_type == "warning":
        warn(exit_message)
    if working_dir is not None:
        clean_work_dir(working_dir, keep)
    sys.exit(1)


def main():
    """
    Main function to handle the downloading of a specified Manjaro ISO edition. This function
    performs tasks including parsing command-line arguments, deciding download options, and
    verifying the downloaded files through checksum and signature verification.

    The flow of the function includes:
    - Setting up the working directory.
    - Initializing the ISO's data from predefined sources.
    - Parsing and validating command-line arguments.
    - Handling normal, review, and developer preview downloads.
    - Downloading ISO files or parts, along with associated checksum/signature files.
    - Performing validation and cleanup tasks.

    The function ensures that the working directory is writable before proceeding. Based on the
    download mode (normal, review, or developer preview), it selects appropriate data sources
    and validates options accordingly.

    :raises SystemExit: Exits the program on various invalid conditions such as:
        - Non-writable working directory.
        - Invalid 'edition' input not matching available choices.
        - Runtime errors during downloading or file validation.

    :return: No return value, terminates the program execution with exit code 0 on success.
    """
    # initialize a list of release ISO
    msg("Loading Manjaro ISO editions metadata... Please wait.")
    edition_isos = init_iso_list(ISO_RELEASE_URL, review=False)
    choices = []
    for iso in edition_isos:
        choices.append(iso["name"])

    parser = argparse.ArgumentParser(
        prog=f"{PROG_NAME}",
        description="This tool will download the latest ISO for a named Manjaro Edition (default: minimal)",
        epilog=f"{PROG_NAME} v. {PROG_VERSION} - MIT License <{LICENSE_URL}>")
    parser.add_argument("edition",
                        type=str,
                        help=f"Edition name (only {', '.join(REVIEW_EDITIONS)} is valid for (p)review)",
                        choices=choices)
    parser.add_argument("-f", "--full",
                        required=False,
                        action="store_true",
                        help="Download full ISO")
    parser.add_argument("-k", "--keep",
                        required=False,
                        action="store_true",
                        help="Keep temporary files after downloading")
    parser.add_argument("-o", "--out-dir",
                        type=str,
                        default=os.getcwd(),
                        help="Folder to store downloaded ISO files (default: current directory)")
    parser.add_argument("-t", "--temp-dir",
                        type=str,
                        default=SYS_TMP,
                        help=f"Location to store temporary files (random tmp folder: {SYS_TMP})")
    previews = parser.add_argument_group("(P)review")
    preview = previews.add_mutually_exclusive_group()
    preview.add_argument("-p", "--preview",
                         required=False,
                         action="store_true",
                         help=f"[Github] Developer Preview")
    preview.add_argument("-r", "--review",
                         required=False,
                         action="store_true",
                         help=f"[Github] Release Review ISO"),

    args = parser.parse_args()

    msg(f"Storage dir: {args.out_dir}")
    msg(f"Working dir: {args.temp_dir}")

    storage_dir = args.out_dir
    working_dir = args.temp_dir
    keep_files = args.keep
    if storage_dir == working_dir:
        terminate_on_fail("Storage dir must be different from working dir.", "error")

    if not check_directory_writable(storage_dir):
        warn(f"Storage dir '{storage_dir}' is not writable.")
        msg2(f"Changing to '{HOME}'")
        storage_dir = HOME

    if args.review or args.preview:
        if args.edition in REVIEW_EDITIONS:
            if args.review:
                msg2("Downloading Review ISO edition metadata...")
                edition_isos = init_iso_list(REL_REVIEW_URL, review=True)
            else:
                msg2("Downloading Preview ISO edition metadata...")
                edition_isos = init_iso_list(DEV_PREVIEW_URL, preview=True)
        else:
            terminate_on_fail(f"Invalid review edition. Valid editions: {', '.join(REVIEW_EDITIONS)}", "error")

    if len(edition_isos) == 0:
        terminate_on_fail("Could not get iso file list.", "error")

    # extracting information for the selected ISO
    info(f"Processing {args.edition} ISO")
    edition_data = [x for x in edition_isos if x["name"] == args.edition][0]["variants"]
    # check if we have multiple kernels
    available_kernels = []
    for x in edition_data:
        available_kernels.append(x["kernel"])
    # select a kernel
    if len(available_kernels) > 1:
        # prompt user for kernel
        selected_kernel = select_kernel(available_kernels)
    else:
        # take the only kernel
        selected_kernel = available_kernels[0]
    # extract the request kernel variant
    iso_info = [x for x in edition_data if x["kernel"] == selected_kernel][0]
    if args.full:
        iso_data = iso_info["full"]
    else:
        iso_data = iso_info["minimal"]

    # preview and review
    if args.review or args.preview:
        # hosted on Github and therefore the required storage
        # needs to be doubled for unpacking the archive
        # temp space required
        temp_space = round(iso_data['size'] / 1024 / 1024 * 2) + 1
        msg2(f"Required space on tmpfs: {temp_space} MiB")
        # storage space required
        storage_space = round(iso_data['size'] / 1024 / 1040) + 1
        msg2(f"Required space on storage: {storage_space} MiB")
        # verify temp dir has the required space
        working_dir = check_work_dir(working_dir, temp_space)
        # verify work dir has the required space
        if not check_space_required(storage_dir, storage_space):
            terminate_on_fail("Not enough space on storage device.",
                              "error")
        # initialize variables and flags
        iso_zip = [x for x in iso_data["parts"] if ".iso.zip" in x["url"]]
        zipfile = iso_zip[0]["url"].split("/")[-1]
        iso_file = iso_zip[0]["url"].split("/")[-1][:-4]
        # ensure 7zip is available
        check_7zip()  # the check will abort execution with an error message if 7z is not found
        # download checksum file
        checksum_ok = download(iso_data["checksum"], out_dir=working_dir, binary=False)
        if not checksum_ok:
            terminate_on_fail("Download failed... cleanup up and exit!",
                              "error",
                              working_dir,
                              keep_files)
        # download the parts
        for part in iso_data["parts"]:
            zip_ok = download(part["url"], out_dir=working_dir)
            if not zip_ok:
                terminate_on_fail("Download failed...",
                                  "error",
                                  working_dir,
                                  keep_files)
        # extract sha filename
        sha_file = iso_data["checksum"].split("/")[-1]
        # test archive
        info("Testing archive integrity...")
        result = subprocess.run(["7z", "-bso0", "-y", "t", f"{zipfile}"], cwd=f"{working_dir}")
        if result.returncode != 0:
            terminate_on_fail("Archive integrity check failed...",
                              "error",
                              working_dir,
                              keep_files)
        # extract archive
        info(f"Unpacking ISO to {working_dir}...")
        result = subprocess.run(["7z", "-bso0", "-y", "x", f"-o{working_dir}", f"{zipfile}"], cwd=f"{working_dir}")
        if result.returncode != 0:
            terminate_on_fail("Unpacking archive failed...",
                              "error",
                              working_dir,
                              keep_files)

        # move the checksum file to storage_dir
        info(f"Moving '{sha_file}' to '{storage_dir}' ...", end="\r")
        subprocess.run(["mv", f"{sha_file}", f"{storage_dir}"], cwd=f"{working_dir}")
        msg2(f"Moved '{sha_file}' to '{storage_dir}'      ", end="\n")

        # move iso file to storage_dir
        info(f"Moving '{iso_file}' to '{storage_dir}' ...", end="\r")
        subprocess.run(["mv", f"{iso_file}", f"{storage_dir}"], cwd=f"{working_dir}")
        msg2(f"Moved '{iso_file}' to '{storage_dir}'      ", end="\n")

        # verify iso checksum on storage_dir
        info("Wait for checksum to complete...")
        result = subprocess.run(["sha256sum", "-c", f"{iso_data["checksum"].split("/")[-1]}"],
                                cwd=f"{storage_dir}", capture_output=True)
        if result.returncode != 0:
            terminate_on_fail("Checksum verification failed...",
                              "error",
                              working_dir,
                              keep_files)
        msg2(f"Checksum verified. {result.stdout.decode('utf-8')}", end="\r")

    # manjaro web site link
    else:
        # standard ISO listed on manjaro.org/download
        # storage space requirement
        storage_space = round(iso_data['size'] / 1024 / 1024) + 1
        msg2(f"Required space on tmpfs: {storage_space} MiB")
        msg2(f"Required space on storage: {storage_space} MiB")
        # check temp dir for adequate storage
        working_dir = check_work_dir(working_dir, storage_space)
        # check storage dir is matching requirements
        if not check_space_required(storage_dir, storage_space):
            terminate_on_fail("Not enough space on storage device...",
                              "error")
        sha256 = True
        if iso_data["checksum"].endswith(".sha512"):
            sha256 = False
            checksum_ok = download(iso_data["checksum"], out_dir=working_dir, binary=False)
        else:
            checksum_ok = download(iso_data["checksum"], out_dir=working_dir, binary=False)
        # download checksum file
        if not checksum_ok:
            terminate_on_fail("download failed...",
                              "error",
                              working_dir,
                              keep_files)
        # download image file
        image_ok = download(iso_data["parts"][0]["url"], out_dir=working_dir)
        if not image_ok:
            terminate_on_fail("Download failed...",
                              "error",
                              working_dir,
                              keep_files)
        # download signature file
        signature_ok = download(iso_data["signature"], out_dir=working_dir)
        if not signature_ok:
            terminate_on_fail("Download failed......",
                              "error",
                              working_dir,
                              keep_files)
        # extract iso filename
        iso_file = iso_data["parts"][0]["url"].split("/")[-1]
        # extract sha sum filename
        sha_file = iso_data["checksum"].split("/")[-1]
        # verify signature
        info("Wait for signature verification to complete...")
        sigfile = iso_data["signature"].split("/")[-1]
        result = subprocess.run(["gpg", "--verify", f'{sigfile}'], cwd=f"{working_dir}", capture_output=True)
        if result.returncode != 0:
            terminate_on_fail("Signature verification failed......",
                              "error",
                              working_dir,
                              keep_files)
        msg2(f"Signature verified. {result.stdout.decode('utf-8')}", end="\n")

        # move the checksum file to storage_dir
        info(f"Moving '{sha_file}' to '{storage_dir}' ...", end="\r")
        subprocess.run(["mv", f"{sha_file}", f"{storage_dir}"], cwd=f"{working_dir}")
        msg2(f"Moved '{sha_file}' to '{storage_dir}'      ", end="\n")

        # move the signature file to storage_dir
        info(f"Moving '{sigfile}' to '{storage_dir}' ...", end="\r")
        subprocess.run(["mv", f"{sigfile}", f"{storage_dir}"], cwd=f"{working_dir}")
        msg2(f"Moved '{sigfile}' to '{storage_dir}'      ", end="\n")

        # move iso file to storage_dir
        info(f"Moving '{iso_file}' to '{storage_dir}' ...", end="\r")
        subprocess.run(["mv", f"{iso_file}", f"{storage_dir}"], cwd=f"{working_dir}")
        msg2(f"Moved '{iso_file}' to '{storage_dir}'      ", end="\n")

        # verify iso checksum on storage_dir
        info("Wait for checksum to complete...")
        if sha256:
            result = subprocess.run(["sha256sum", "-c", f"{sha_file}"],
                                    cwd=f"{storage_dir}", capture_output=True)
        else:
            result = subprocess.run(["sha512sum", "-c", f"{sha_file}"],
                                    cwd=f"{storage_dir}", capture_output=True)
        if result.returncode != 0:
            terminate_on_fail("Checksum verification failed......",
                              "error",
                              working_dir,
                              keep_files)
        msg2(f"Checksum verified. {result.stdout.decode('utf-8')}", end="\r")

    # remove temp dir
    info("Cleaning up...")
    clean_work_dir(working_dir, keep_files)

    msg(f"ISO file: {iso_file}")
    msg(f"Storage : {storage_dir}")
    sys.exit(0)


if __name__ == '__main__':
    try:
        main()
    except KeyboardInterrupt:
        print("\n" + "Exit: interrupted by the user.")
        sys.exit(1)
