From 6916b3f09ebea13c2e2d4229871446dc256da6b2 Mon Sep 17 00:00:00 2001
From: olemorud <ole.kristian.morud@cern.ch>
Date: Tue, 28 Feb 2023 10:41:52 +0100
Subject: [PATCH] [CI] Major refactor for python build script

---
 .../workflows/root-ci-config/build_root.py    | 301 ++++++++++--------
 .../workflows/root-ci-config/build_utils.py   |  38 ++-
 2 files changed, 203 insertions(+), 136 deletions(-)

diff --git a/.github/workflows/root-ci-config/build_root.py b/.github/workflows/root-ci-config/build_root.py
index 76616ec1e67..f645744f567 100755
--- a/.github/workflows/root-ci-config/build_root.py
+++ b/.github/workflows/root-ci-config/build_root.py
@@ -21,219 +21,255 @@ import tarfile
 from hashlib import sha1
 
 import openstack
+import ctypes
+
 from build_utils import (
     cmake_options_from_dict,
     die,
     download_latest,
+    github_log_group,
+    print_info,
     load_config,
     print_shell_log,
     subprocess_with_log,
     upload_file,
-    warning,
+    print_warning,
 )
 
 S3CONTAINER = 'ROOT-build-artifacts'  # Used for uploads
 S3URL = 'https://s3.cern.ch/swift/v1/' + S3CONTAINER  # Used for downloads
+CONNECTION = openstack.connect(cloud='envvars') if os.getenv('OS_REGION_NAME') else None
 
+WINDOWS = (os.name == 'nt')
+WORKDIR = '/tmp/workspace' if not WINDOWS else 'C:/ROOT-CI'
+COMPRESSIONLEVEL = 6 if not WINDOWS else 1
 
 def main():
     # openstack.enable_logging(debug=True)
-    shell_log = ''
+
+    try:
+        sudo = (os.getuid() == 0)
+    except AttributeError:
+        sudo = ctypes.windll.shell32.IsUserAnAdmin() != 0
+    
+    if sudo:
+        die(os.EX_USAGE, "Script should not be run with sudo/admin")
+
+    # accumulates commands executed so they can be displayed as a script on build failure
+    shell_log = ""
+
+    # used when uploading artifacts, calculate early since build times are inconsistent
     yyyy_mm_dd = datetime.datetime.today().strftime('%Y-%m-%d')
-    this_script_dir = os.path.dirname(os.path.abspath(__file__))
 
+    # it is difficult to use boolean flags from github actions, use strings to convey
+    # true/false for boolean arguments instead.
     parser = argparse.ArgumentParser()
-    parser.add_argument("--platform",    default="centos8", help="Platform to build on")
-    parser.add_argument("--incremental", default=False,     help="Do incremental build")
-    parser.add_argument("--buildtype",   default="Release", help="Release, Debug or RelWithDebInfo")
-    parser.add_argument("--base_ref",    default=None,      help="Ref to target branch")
-    parser.add_argument("--head_ref",    default=None,      help="Ref to feature branch")
-    parser.add_argument("--repository",  default="https://github.com/root-project/root.git",
+    parser.add_argument("--platform",     default="centos8", help="Platform to build on")
+    parser.add_argument("--incremental",  default="false",   help="Do incremental build")
+    parser.add_argument("--buildtype",    default="Release", help="Release|Debug|RelWithDebInfo")
+    parser.add_argument("--base_ref",     default=None,      help="Ref to target branch")
+    parser.add_argument("--head_ref",     default=None,      help="Ref to feature branch")
+    parser.add_argument("--architecture", default=None,      help="Windows only, target arch")
+    parser.add_argument("--repository",   default="https://github.com/root-project/root.git",
                         help="url to repository")
 
     args = parser.parse_args()
 
-    platform    = args.platform
-    incremental = args.incremental.lower() in ('yes', 'true', '1', 'on')
-    buildtype   = args.buildtype
-    base_ref    = args.base_ref
-    head_ref    = args.head_ref
-    repository  = args.repository
+    args.incremental = args.incremental.lower() in ('yes', 'true', '1', 'on')
 
-    if not base_ref:
+    if not args.base_ref:
         die(os.EX_USAGE, "base_ref not specified")
 
-    if not head_ref or (head_ref == base_ref):
-        warning("head_ref same as base_ref, assuming non-PR build")
-        pull_request = False
-        print("\nEstablishing s3 connection")
-        connection = openstack.connect(cloud='envvars')
-    else:
-        pull_request = True
-        connection = None
+    pull_request = args.head_ref and args.head_ref != args.base_ref
 
-    if os.name == 'nt':
-        # windows
-        compressionlevel = 1
-        workdir = 'C:/ROOT-CI'
-        os.environ['COMSPEC'] = 'powershell.exe'
-        result, shell_log = subprocess_with_log(f"""
-            Remove-Item -Recurse -Force -Path {workdir}
-            New-Item -Force -Type directory -Path {workdir}
-            Set-Location -LiteralPath {workdir}
-        """, shell_log)
-    else:
-        # mac/linux/POSIX
-        compressionlevel = 6
-        workdir = '/tmp/workspace'
-        result, shell_log = subprocess_with_log(f"""
-            mkdir -p {workdir}
-            rm -rf {workdir}/*
-            cd {workdir}
-        """, shell_log)
+    if not pull_request:
+        print_info("head_ref same as base_ref, assuming non-PR build")
 
-    if result != 0:
-        die(result, "Failed to clean up previous artifacts", shell_log)
+    shell_log = cleanup_previous_build(shell_log)
 
-    os.chdir(workdir)
+    # Load CMake options from .github/workflows/root-ci-config/buildconfig/[platform].txt
+    this_script_dir = os.path.dirname(os.path.abspath(__file__))
 
-    # Load CMake options from file
     options_dict = {
         **load_config(f'{this_script_dir}/buildconfig/global.txt'),
-        # below has precedence
-        **load_config(f'{this_script_dir}/buildconfig/{platform}.txt')
+        # file below overwrites values from above
+        **load_config(f'{this_script_dir}/buildconfig/{args.platform}.txt')
     }
+
     options = cmake_options_from_dict(options_dict)
 
-    option_hash = sha1(options.encode('utf-8')).hexdigest()
-    obj_prefix = f'{platform}/{base_ref}/{buildtype}/{option_hash}'
+    if WINDOWS:
+        options = f"-Thost={args.architecture} " + options
+
+        if args.architecture == 'x86':
+            options = "-AWin32 " + options
 
-    # to make testing of CI in forks not impact artifacts
-    if 'root-project/root' not in repository:
-        obj_prefix = f"ci-testing/{repository.split('/')[-2]}/" + obj_prefix
+    # The sha1 of the build option string is used to find existing artifacts
+    # with matching build options on s3 storage.
+    option_hash = sha1(options.encode('utf-8')).hexdigest()
+    obj_prefix = f'{args.platform}/{args.base_ref}/{args.buildtype}/{option_hash}'
 
-    # Download and extract previous build artifacts
-    if incremental:
-        print("::group::Download previous build artifacts")
-        print("Attempting incremental build")
+    # Make testing of CI in forks not impact artifacts
+    if 'root-project/root' not in args.repository:
+        obj_prefix = f"ci-testing/{args.repository.split('/')[-2]}/" + obj_prefix
+        print("Attempting to download")
 
+    if args.incremental:
         try:
-            tar_path, shell_log = download_latest(S3URL, obj_prefix, workdir, shell_log)
+            shell_log += download_artifacts(obj_prefix, shell_log)
+        except Exception as err:
+            print_warning(f'Failed to download: {err}')
+            args.incremental = False
 
-            print(f"\nExtracting archive {tar_path}")
+    shell_log = git_pull(args.repository, args.base_ref, shell_log)
 
-            with tarfile.open(tar_path) as tar:
-                tar.extractall()
+    if pull_request:
+        shell_log = rebase(args.base_ref, args.head_ref, shell_log)
 
-            shell_log += f'\ntar -xf {tar_path}\n'
+    shell_log = build(options, args.buildtype, shell_log)
 
-        except Exception as err:
-            warning("failed to download/extract:", err)
-            shutil.rmtree(f'{workdir}/src', ignore_errors=True)
-            shutil.rmtree(f'{workdir}/build', ignore_errors=True)
+    testing: bool = options_dict['testing'].lower() == "on" and options_dict['roottest'].lower() == "on"
 
-            incremental = False
+    if testing:
+        extra_ctest_flags = ""
+        
+        if WINDOWS:
+            extra_ctest_flags += " -C" + args.buildtype
+
+        shell_log = run_ctest(shell_log, extra_ctest_flags)
 
-        print("::endgroup::")
+    archive_and_upload(yyyy_mm_dd, obj_prefix)
+
+    print_shell_log(shell_log)
 
-    print(f"::group::Pull {base_ref}")
 
-    if not incremental:
-        print("Doing non-incremental build")
+@github_log_group("Clean up from previous runs")
+def cleanup_previous_build(shell_log):
+    # runners should never have root permissions but be on the safe side
+    if WORKDIR == "" or WORKDIR == "/":
+        die(1, "WORKDIR not set", "")
 
+    if WINDOWS:
+        # windows
+        os.environ['COMSPEC'] = 'powershell.exe'
         result, shell_log = subprocess_with_log(f"""
-            git clone --branch {base_ref} --single-branch {repository} "{workdir}/src"
+            $ErrorActionPreference = 'Stop'
+            if (Test-Path {WORKDIR}) {{
+                Remove-Item -Recurse -Force -Path {WORKDIR}
+            }}
+            New-Item -Force -Type directory -Path {WORKDIR}
         """, shell_log)
     else:
+        # mac/linux/POSIX
         result, shell_log = subprocess_with_log(f"""
-            cd '{workdir}/src'      || exit 1
-            git checkout {base_ref} || exit 2
-            git fetch               || exit 3
-            git reset --hard @{{u}} || exit 4
+            rm -rf {WORKDIR}
+            mkdir -p {WORKDIR}
         """, shell_log)
 
-    print("::endgroup::")
-
     if result != 0:
-        die(result, f"Failed to pull {base_ref}", shell_log)
+        die(result, "Failed to clean up previous artifacts", shell_log)
+    
+    return shell_log
 
 
-    extra_ctest_flags = f"-C {buildtype}" if os.name == "nt" else ""
+@github_log_group("Pull/clone branch")
+def git_pull(repository:str, branch: str, shell_log: str):
+    returncode = 1
+
+    for attempts in range(5):
+        if returncode == 0:
+            break
+
+        if os.path.exists(f"{WORKDIR}/src/.git"):
+            returncode, shell_log = subprocess_with_log(f"""
+                cd '{WORKDIR}/src'
+                git checkout {branch}
+                git fetch
+                git reset --hard @{{u}}
+            """, shell_log)
+        else:
+            returncode, shell_log = subprocess_with_log(f"""
+                git clone --branch {branch} --single-branch {repository} "{WORKDIR}/src"
+            """, shell_log)
+
+    if returncode != 0:
+        die(returncode, f"Failed to pull {branch}", shell_log)
+    
+    return shell_log
 
-    if pull_request:
-        print("::group::Rebase {base_ref} onto {head_ref}")
-        shell_log = rebase(base_ref, head_ref, workdir, shell_log)
-        print("::endgroup::")
-
-        print("::group::Build")
-        shell_log = build(workdir, options, buildtype, shell_log)
-        print("::endgroup::")
-
-        if(    options_dict['testing'].lower() == "on"
-           and options_dict['roottest'].lower() == "on"):
-            print("::group::Run tests")
-            shell_log = test(workdir, shell_log, extra_ctest_flags)
-            print("::endgroup::")
-    else:
-        print(f"::group::Build release branch {base_ref}")
-        shell_log = build(workdir, options, buildtype, shell_log)
-        print("::endgroup::")
 
-        if(    options_dict['testing'].lower() == "on"
-           and options_dict['roottest'].lower() == "on"):
-            print("::group::Run tests")
-            shell_log = test(workdir, shell_log, extra_ctest_flags)
-            print("::endgroup::")
+@github_log_group("Download previous build artifacts")
+def download_artifacts(obj_prefix: str, shell_log: str):
+    try:
+        tar_path, shell_log = download_latest(S3URL, obj_prefix, WORKDIR, shell_log)
 
-        print("::group::Archive and upload")
-        archive_and_upload(yyyy_mm_dd, workdir, connection, compressionlevel, obj_prefix)
-        print("::endgroup::")
+        print(f"\nExtracting archive {tar_path}")
 
-    print_shell_log(shell_log)
+        with tarfile.open(tar_path) as tar:
+            tar.extractall(WORKDIR)
 
+        shell_log += f'\ntar -xf {tar_path}\n'
+
+    except Exception as err:
+        print_warning("failed to download/extract:", err)
+        shutil.rmtree(f'{WORKDIR}/src', ignore_errors=True)
+        shutil.rmtree(f'{WORKDIR}/build', ignore_errors=True)
+        raise err
+    
+    return shell_log
 
-def test(workdir: str, shell_log: str, extra_ctest_flags: str):
 
+@github_log_group("Run tests")
+def run_ctest(shell_log: str, extra_ctest_flags: str) -> str:
     result, shell_log = subprocess_with_log(f"""
-        cd '{workdir}/build'
+        cd '{WORKDIR}/build'
         ctest -j{os.cpu_count()} --output-junit TestResults.xml {extra_ctest_flags}
     """, shell_log)
     
     if result != 0:
-        warning("Some tests failed")
+        print_warning("Some tests failed")
     
     return shell_log
 
 
-def archive_and_upload(archive_name, workdir, connection, compressionlevel, prefix):
+@github_log_group("Archive and upload")
+def archive_and_upload(archive_name, prefix):
     new_archive = f"{archive_name}.tar.gz"
 
-    with tarfile.open(f"{workdir}/{new_archive}", "x:gz", compresslevel=compressionlevel) as targz:
+    os.chdir(WORKDIR)
+
+    with tarfile.open(f"{WORKDIR}/{new_archive}", "x:gz", compresslevel=COMPRESSIONLEVEL) as targz:
         targz.add("src")
         targz.add("build")
 
     upload_file(
-        connection=connection,
+        connection=CONNECTION,
         container=S3CONTAINER,
         dest_object=f"{prefix}/{new_archive}",
-        src_file=f"{workdir}/{new_archive}"
+        src_file=f"{WORKDIR}/{new_archive}"
     )
 
 
-def build(workdir, options, buildtype, shell_log):
-    if not os.path.exists(f'{workdir}/build/CMakeCache.txt'):
+@github_log_group("Build")
+def build(options, buildtype, shell_log):
+    generator_flags = "-- '-verbosity:minimal'" if WINDOWS else ""
+
+    if not os.path.isdir(f'{WORKDIR}/build'):
+        result, shell_log = subprocess_with_log(f"mkdir {WORKDIR}/build", shell_log)
+
+        if result != 0:
+            die(result, "Failed to create build directory", shell_log)
+
+    if not os.path.exists(f'{WORKDIR}/build/CMakeCache.txt'):
         result, shell_log = subprocess_with_log(f"""
-            mkdir -p '{workdir}/build'
-            cmake -S '{workdir}/src' -B '{workdir}/build' {options} \\
-                -DCMAKE_BUILD_TYPE={buildtype}
+            cmake -S '{WORKDIR}/src' -B '{WORKDIR}/build' {options} -DCMAKE_BUILD_TYPE={buildtype}
         """, shell_log)
 
         if result != 0:
             die(result, "Failed cmake generation step", shell_log)
 
     result, shell_log = subprocess_with_log(f"""
-        mkdir '{workdir}/build'
-        cmake --build '{workdir}/build' --config '{buildtype}' --parallel '{os.cpu_count()}'
+        cmake --build '{WORKDIR}/build' --config '{buildtype}' --parallel '{os.cpu_count()}' {generator_flags}
     """, shell_log)
 
     if result != 0:
@@ -242,21 +278,24 @@ def build(workdir, options, buildtype, shell_log):
     return shell_log
 
 
-def rebase(base_ref, head_ref, workdir, shell_log) -> str:
-    """rebases head_ref on base_ref, returns shell log"""
-
+@github_log_group("Rebase")
+def rebase(base_ref, head_ref, shell_log) -> str:
+    # This mental gymnastics is neccessary because the the CMake build fetches 
+    # roottest based on the current branch name of ROOT
+    #
+    # rebase fails unless user.email and user.name is set
     result, shell_log = subprocess_with_log(f"""
-        cd '{workdir}/src' || exit 1
+        cd '{WORKDIR}/src'
         
         git config user.email "rootci@root.cern"
         git config user.name 'ROOT Continous Integration'
         
-        git fetch origin {head_ref}:__tmp || exit 2
-        git checkout __tmp || exit 3
+        git fetch origin {head_ref}:__tmp
+        git checkout __tmp
         
-        git rebase {base_ref} || exit 5
-        git checkout {base_ref} || exit 6
-        git reset --hard __tmp || exit 7
+        git rebase {base_ref}
+        git checkout {base_ref}
+        git reset --hard __tmp
     """, shell_log)
 
     if result != 0:
diff --git a/.github/workflows/root-ci-config/build_utils.py b/.github/workflows/root-ci-config/build_utils.py
index dfd610e51eb..c2c7f77d9f9 100755
--- a/.github/workflows/root-ci-config/build_utils.py
+++ b/.github/workflows/root-ci-config/build_utils.py
@@ -5,13 +5,37 @@ import os
 import subprocess
 import sys
 import textwrap
+from functools import wraps
 from http import HTTPStatus
-from typing import Dict, Tuple
+from typing import Callable, Dict, Tuple
 
 from openstack.connection import Connection
 from requests import get
 
 
+def github_log_group(title: str):
+    """ decorator that places function's stdout/stderr output in a
+        dropdown group when running on github workflows """
+    def group(func: Callable):
+        @wraps(func)
+        def wrapper(*args, **kwargs):
+            print("::group::" + title)
+
+            try:
+                result = func(*args, **kwargs)
+            except Exception as e:
+                print("::endgroup::")
+                raise e
+            
+            print("::endgroup::")
+
+            return result
+
+        return wrapper if os.getenv("GITHUB_ACTIONS") else func
+
+    return group
+
+
 def print_fancy(*values, sgr=1, **kwargs) -> None:
     """prints message using select graphic rendition, defaults to bold text
        https://en.wikipedia.org/wiki/ANSI_escape_code#SGR_(Select_Graphic_Rendition)_parameters"""
@@ -21,11 +45,15 @@ def print_fancy(*values, sgr=1, **kwargs) -> None:
     print("\033[0m", **kwargs)
 
 
-def warning(*values, **kwargs):
+def print_info(*values, **kwargs):
+    print_fancy("Info: ", *values, sgr=90, **kwargs)
+
+
+def print_warning(*values, **kwargs):
     print_fancy("Warning: ", *values, sgr=33, **kwargs)
 
 
-def error(*values, **kwargs):
+def print_error(*values, **kwargs):
     print_fancy("Fatal error: ", *values, sgr=31, **kwargs)
 
 
@@ -45,7 +73,7 @@ def subprocess_with_log(command: str, log="") -> Tuple[int, str]:
 
 
 def die(code: int = 1, msg: str = "", log: str = "") -> None:
-    error(f"({code}) {msg}")
+    print_error(f"({code}) {msg}")
 
     print_shell_log(log)
 
@@ -73,7 +101,7 @@ def load_config(filename) -> dict:
     try:
         file = open(filename, 'r', encoding='utf-8')
     except OSError as err:
-        warning(f"couldn't load {filename}: {err.strerror}")
+        print_warning(f"couldn't load {filename}: {err.strerror}")
         return {}
 
     with file:
-- 
GitLab