xref: /openbmc/openbmc-build-scripts/scripts/build-unit-test-docker (revision 3a7693c054e876a4bf93f3c1b3cee70511f6daa3)
1#!/usr/bin/env python3
2#
3# Build the required docker image to run package unit tests
4#
5# Script Variables:
6#   DOCKER_IMG_NAME:  <optional, the name of the docker image to generate>
7#                     default is openbmc/ubuntu-unit-test
8#   DISTRO:           <optional, the distro to build a docker image against>
9#   FORCE_DOCKER_BUILD: <optional, a non-zero value with force all Docker
10#                     images to be rebuilt rather than reusing caches.>
11#   BUILD_URL:        <optional, used to detect running under CI context
12#                     (ex. Jenkins)>
13#   BRANCH:           <optional, branch to build from each of the openbmc/
14#                     repositories>
15#                     default is master, which will be used if input branch not
16#                     provided or not found
17#   UBUNTU_MIRROR:    <optional, the URL of a mirror of Ubuntu to override the
18#                     default ones in /etc/apt/sources.list>
19#                     default is empty, and no mirror is used.
20#   DOCKER_REG:       <optional, the URL of a docker registry to utilize
21#                     instead of our default (public.ecr.aws/ubuntu)
22#                     (ex. docker.io)
23#   http_proxy        The HTTP address of the proxy server to connect to.
24#                     Default: "", proxy is not setup if this is not set
25
26import json
27import os
28import re
29import sys
30import threading
31import urllib.request
32from datetime import date
33from hashlib import sha256
34
35# typing.Dict is used for type-hints.
36from typing import Any, Callable, Dict, Iterable, Optional  # noqa: F401
37
38from sh import git, nproc  # type: ignore
39
40try:
41    # System may have docker or it may have podman, try docker first
42    from sh import docker
43
44    container = docker
45except ImportError:
46    try:
47        from sh import podman
48
49        container = podman
50    except Exception:
51        print("No docker or podman found on system")
52        exit(1)
53
54try:
55    # Python before 3.8 doesn't have TypedDict, so reroute to standard 'dict'.
56    from typing import TypedDict
57except Exception:
58
59    class TypedDict(dict):  # type: ignore
60        # We need to do this to eat the 'total' argument.
61        def __init_subclass__(cls, **kwargs: Any) -> None:
62            super().__init_subclass__()
63
64
65# Declare some variables used in package definitions.
66prefix = "/usr/local"
67proc_count = nproc().strip()
68
69
70class PackageDef(TypedDict, total=False):
71    """Package Definition for packages dictionary."""
72
73    # rev [optional]: Revision of package to use.
74    rev: str
75    # url [optional]: lambda function to create URL: (package, rev) -> url.
76    url: Callable[[str, str], str]
77    # depends [optional]: List of package dependencies.
78    depends: Iterable[str]
79    # build_type [required]: Build type used for package.
80    #   Currently supported: autoconf, cmake, custom, make, meson
81    build_type: str
82    # build_steps [optional]: Steps to run for 'custom' build_type.
83    build_steps: Iterable[str]
84    # config_flags [optional]: List of options to pass configuration tool.
85    config_flags: Iterable[str]
86    # config_env [optional]: List of environment variables to set for config.
87    config_env: Iterable[str]
88    # custom_post_dl [optional]: List of steps to run after download, but
89    #   before config / build / install.
90    custom_post_dl: Iterable[str]
91    # custom_post_install [optional]: List of steps to run after install.
92    custom_post_install: Iterable[str]
93
94    # __tag [private]: Generated Docker tag name for package stage.
95    __tag: str
96    # __package [private]: Package object associated with this package.
97    __package: Any  # Type is Package, but not defined yet.
98
99
100# Packages to include in image.
101packages = {
102    "boost": PackageDef(
103        rev="1.88.0",
104        url=(
105            lambda pkg, rev: f"https://github.com/boostorg/{pkg}/releases/download/{pkg}-{rev}/{pkg}-{rev}-cmake.tar.gz"
106        ),
107        build_type="custom",
108        build_steps=[
109            (
110                "./bootstrap.sh"
111                f" --prefix={prefix} --with-libraries=atomic,context,coroutine,filesystem,process,url"
112            ),
113            "./b2",
114            f"./b2 install --prefix={prefix} valgrind=on",
115        ],
116    ),
117    "USCiLab/cereal": PackageDef(
118        rev="v1.3.2",
119        build_type="custom",
120        build_steps=[f"cp -a include/cereal/ {prefix}/include/"],
121    ),
122    "danmar/cppcheck": PackageDef(
123        rev="2.12.1",
124        build_type="cmake",
125    ),
126    "DMTF/libspdm": PackageDef(
127        rev="3.7.0",
128        url=lambda pkg, rev: f"https://github.com/DMTF/libspdm/archive/{rev}.tar.gz",
129        build_type="cmake",
130        config_flags=(
131            lambda: (
132                lambda arch_mapping={
133                    "x86_64": "x64",
134                    "i586": "ia32",
135                    "i686": "ia32",
136                    "arm": "arm",
137                    "aarch64": "aarch64",
138                    "arm64": "aarch64",
139                    "riscv32": "riscv32",
140                    "riscv64": "riscv64",
141                    "ppc64le": "ppc64le",
142                }: [
143                    f"-DARCH={arch_mapping.get(__import__('platform').machine(), 'x64')}",
144                    "-DTOOLCHAIN=GCC",
145                    "-DTARGET=Release",
146                    "-DCRYPTO=openssl",
147                    "-DBUILD_LINUX_SHARED_LIB=ON",
148                    "-DENABLE_BINARY_BUILD=1",
149                    "-DDISABLE_TESTS=1",
150                    f"-DCOMPILED_LIBCRYPTO_PATH={prefix}/lib",
151                    f"-DCOMPILED_LIBSSL_PATH={prefix}/lib",
152                ]
153            )()
154        )(),
155    ),
156    "CLIUtils/CLI11": PackageDef(
157        rev="v2.3.2",
158        build_type="cmake",
159        config_flags=[
160            "-DBUILD_TESTING=OFF",
161            "-DCLI11_BUILD_DOCS=OFF",
162            "-DCLI11_BUILD_EXAMPLES=OFF",
163        ],
164    ),
165    "fmtlib/fmt": PackageDef(
166        rev="11.2.0",
167        build_type="cmake",
168        config_flags=[
169            "-DFMT_DOC=OFF",
170            "-DFMT_TEST=OFF",
171        ],
172    ),
173    "Naios/function2": PackageDef(
174        rev="4.2.4",
175        build_type="custom",
176        build_steps=[
177            f"mkdir {prefix}/include/function2",
178            f"cp include/function2/function2.hpp {prefix}/include/function2/",
179        ],
180    ),
181    "google/googletest": PackageDef(
182        rev="v1.16.0",
183        build_type="cmake",
184        config_env=["CXXFLAGS=-std=c++20"],
185        config_flags=["-DTHREADS_PREFER_PTHREAD_FLAG=ON"],
186    ),
187    "nghttp2/nghttp2": PackageDef(
188        rev="v1.65.0",
189        build_type="cmake",
190        config_env=["CXXFLAGS=-std=c++20"],
191        config_flags=[
192            "-DENABLE_LIB_ONLY=ON",
193            "-DENABLE_STATIC_LIB=ON",
194        ],
195    ),
196    "nlohmann/json": PackageDef(
197        rev="v3.12.0",
198        build_type="cmake",
199        config_flags=["-DJSON_BuildTests=OFF"],
200        custom_post_install=[
201            (
202                f"ln -s {prefix}/include/nlohmann/json.hpp"
203                f" {prefix}/include/json.hpp"
204            ),
205        ],
206    ),
207    "json-c/json-c": PackageDef(
208        rev="json-c-0.18-20240915",
209        build_type="cmake",
210    ),
211    "LibVNC/libvncserver": PackageDef(
212        rev="LibVNCServer-0.9.14",
213        build_type="cmake",
214    ),
215    "leethomason/tinyxml2": PackageDef(
216        rev="11.0.0",
217        build_type="cmake",
218    ),
219    "tristanpenman/valijson": PackageDef(
220        rev="v1.0.5",
221        build_type="cmake",
222        config_flags=[
223            "-Dvalijson_BUILD_TESTS=0",
224            "-Dvalijson_INSTALL_HEADERS=1",
225        ],
226    ),
227    "libgpiod": PackageDef(
228        rev="1.6.5",
229        url=(
230            lambda pkg, rev: f"https://git.kernel.org/pub/scm/libs/{pkg}/{pkg}.git/snapshot/{pkg}-{rev}.tar.gz"
231        ),
232        build_type="autogen",
233        config_flags=["--enable-bindings-cxx"],
234    ),
235    "NVIDIA/stdexec": PackageDef(
236        rev="36a92fd776c835abd4dc5e62d43cf040c20a9add",
237        build_type="meson",
238    ),
239    "open-power/pdbg": PackageDef(build_type="autoconf"),
240    "openbmc/gpioplus": PackageDef(
241        build_type="meson",
242        config_flags=[
243            "-Dexamples=false",
244            "-Dtests=disabled",
245        ],
246    ),
247    "openbmc/phosphor-dbus-interfaces": PackageDef(
248        depends=["openbmc/sdbusplus"],
249        build_type="meson",
250        config_flags=["-Dgenerate_md=false"],
251    ),
252    "openbmc/phosphor-logging": PackageDef(
253        depends=[
254            "USCiLab/cereal",
255            "openbmc/phosphor-dbus-interfaces",
256            "openbmc/sdbusplus",
257            "openbmc/sdeventplus",
258        ],
259        build_type="meson",
260        config_flags=[
261            "-Dlibonly=true",
262            "-Dtests=disabled",
263        ],
264    ),
265    "openbmc/phosphor-objmgr": PackageDef(
266        depends=[
267            "CLIUtils/CLI11",
268            "boost",
269            "leethomason/tinyxml2",
270            "openbmc/phosphor-dbus-interfaces",
271            "openbmc/phosphor-logging",
272            "openbmc/sdbusplus",
273        ],
274        build_type="meson",
275        config_flags=[
276            "-Dtests=disabled",
277        ],
278    ),
279    "openbmc/libpeci": PackageDef(
280        build_type="meson",
281        config_flags=[
282            "-Draw-peci=disabled",
283        ],
284    ),
285    "openbmc/libpldm": PackageDef(
286        build_type="meson",
287        config_flags=[
288            "-Dabi=deprecated,stable",
289            "-Dtests=false",
290            "-Dabi-compliance-check=false",
291        ],
292    ),
293    "openbmc/sdbusplus": PackageDef(
294        depends=[
295            "NVIDIA/stdexec",
296            "nlohmann/json",
297        ],
298        build_type="meson",
299        custom_post_dl=[
300            "cd tools",
301            "python3 -m pip install --break-system-packages --root-user-action ignore .",
302            "cd ..",
303        ],
304        config_flags=[
305            "-Dexamples=disabled",
306            "-Dtests=disabled",
307        ],
308    ),
309    "openbmc/sdeventplus": PackageDef(
310        depends=[
311            "openbmc/stdplus",
312        ],
313        build_type="meson",
314        config_flags=[
315            "-Dexamples=false",
316            "-Dtests=disabled",
317        ],
318    ),
319    "openbmc/stdplus": PackageDef(
320        depends=[
321            "fmtlib/fmt",
322            "google/googletest",
323            "Naios/function2",
324        ],
325        build_type="meson",
326        config_flags=[
327            "-Dexamples=false",
328            "-Dtests=disabled",
329            "-Dgtest=enabled",
330        ],
331    ),
332}  # type: Dict[str, PackageDef]
333
334# Define common flags used for builds
335configure_flags = " ".join(
336    [
337        f"--prefix={prefix}",
338    ]
339)
340cmake_flags = " ".join(
341    [
342        "-DBUILD_SHARED_LIBS=ON",
343        "-DCMAKE_BUILD_TYPE=RelWithDebInfo",
344        f"-DCMAKE_INSTALL_PREFIX:PATH={prefix}",
345        "-GNinja",
346        "-DCMAKE_MAKE_PROGRAM=ninja",
347    ]
348)
349meson_flags = " ".join(
350    [
351        "--wrap-mode=nodownload",
352        f"-Dprefix={prefix}",
353    ]
354)
355
356
357class Package(threading.Thread):
358    """Class used to build the Docker stages for each package.
359
360    Generally, this class should not be instantiated directly but through
361    Package.generate_all().
362    """
363
364    # Copy the packages dictionary.
365    packages = packages.copy()
366
367    # Lock used for thread-safety.
368    lock = threading.Lock()
369
370    def __init__(self, pkg: str):
371        """pkg - The name of this package (ex. foo/bar )"""
372        super(Package, self).__init__()
373
374        self.package = pkg
375        self.exception = None  # type: Optional[Exception]
376
377        # Reference to this package's
378        self.pkg_def = Package.packages[pkg]
379        self.pkg_def["__package"] = self
380
381    def run(self) -> None:
382        """Thread 'run' function.  Builds the Docker stage."""
383
384        # In case this package has no rev, fetch it from Github.
385        self._update_rev()
386
387        # Find all the Package objects that this package depends on.
388        #   This section is locked because we are looking into another
389        #   package's PackageDef dict, which could be being modified.
390        Package.lock.acquire()
391        deps: Iterable[Package] = [
392            Package.packages[deppkg]["__package"]
393            for deppkg in self.pkg_def.get("depends", [])
394        ]
395        Package.lock.release()
396
397        # Wait until all the depends finish building.  We need them complete
398        # for the "COPY" commands.
399        for deppkg in deps:
400            deppkg.join()
401
402        # Generate this package's Dockerfile.
403        dockerfile = f"""
404FROM {docker_base_img_name}
405{self._df_copycmds()}
406{self._df_build()}
407"""
408
409        # Generate the resulting tag name and save it to the PackageDef.
410        #   This section is locked because we are modifying the PackageDef,
411        #   which can be accessed by other threads.
412        Package.lock.acquire()
413        tag = Docker.tagname(self._stagename(), dockerfile)
414        self.pkg_def["__tag"] = tag
415        Package.lock.release()
416
417        # Do the build / save any exceptions.
418        try:
419            Docker.build(self.package, tag, dockerfile)
420        except Exception as e:
421            self.exception = e
422
423    @classmethod
424    def generate_all(cls) -> None:
425        """Ensure a Docker stage is created for all defined packages.
426
427        These are done in parallel but with appropriate blocking per
428        package 'depends' specifications.
429        """
430
431        # Create a Package for each defined package.
432        pkg_threads = [Package(p) for p in cls.packages.keys()]
433
434        # Start building them all.
435        #   This section is locked because threads depend on each other,
436        #   based on the packages, and they cannot 'join' on a thread
437        #   which is not yet started.  Adding a lock here allows all the
438        #   threads to start before they 'join' their dependencies.
439        Package.lock.acquire()
440        for t in pkg_threads:
441            t.start()
442        Package.lock.release()
443
444        # Wait for completion.
445        for t in pkg_threads:
446            t.join()
447            # Check if the thread saved off its own exception.
448            if t.exception:
449                print(f"Package {t.package} failed!", file=sys.stderr)
450                raise t.exception
451
452    @staticmethod
453    def df_all_copycmds() -> str:
454        """Formulate the Dockerfile snippet necessary to copy all packages
455        into the final image.
456        """
457        return Package.df_copycmds_set(Package.packages.keys())
458
459    @classmethod
460    def depcache(cls) -> str:
461        """Create the contents of the '/tmp/depcache'.
462        This file is a comma-separated list of "<pkg>:<rev>".
463        """
464
465        # This needs to be sorted for consistency.
466        depcache = ""
467        for pkg in sorted(cls.packages.keys()):
468            depcache += "%s:%s," % (pkg, cls.packages[pkg]["rev"])
469        return depcache
470
471    def _check_gerrit_topic(self) -> bool:
472        if not gerrit_topic:
473            return False
474        if not self.package.startswith("openbmc/"):
475            return False
476        if gerrit_project == self.package and gerrit_rev:
477            return False
478
479        # URL escape any spaces.  Gerrit uses pluses.
480        gerrit_topic_escape = urllib.parse.quote_plus(gerrit_topic)
481
482        try:
483            commits = json.loads(
484                urllib.request.urlopen(
485                    f'https://gerrit.openbmc.org/changes/?q=status:open+project:{self.package}+topic:"{gerrit_topic_escape}"'
486                )
487                .read()
488                .splitlines()[-1]
489            )
490
491            if len(commits) == 0:
492                return False
493            if len(commits) > 1:
494                print(
495                    f"{self.package} has more than 1 commit under {gerrit_topic}; using lastest upstream: {len(commits)}",
496                    file=sys.stderr,
497                )
498                return False
499
500            change_id = commits[0]["id"]
501
502            commit = json.loads(
503                urllib.request.urlopen(
504                    f"https://gerrit.openbmc.org/changes/{change_id}/revisions/current/commit"
505                )
506                .read()
507                .splitlines()[-1]
508            )["commit"]
509
510            print(
511                f"Using {commit} from {gerrit_topic} for {self.package}",
512                file=sys.stderr,
513            )
514            self.pkg_def["rev"] = commit
515            return True
516
517        except urllib.error.HTTPError as e:
518            print(
519                f"Error loading topic {gerrit_topic} for {self.package}: ",
520                e,
521                file=sys.stderr,
522            )
523            return False
524
525    def _update_rev(self) -> None:
526        """Look up the HEAD for missing a static rev."""
527
528        if "rev" in self.pkg_def:
529            return
530
531        if self._check_gerrit_topic():
532            return
533
534        # Check if Jenkins/Gerrit gave us a revision and use it.
535        if gerrit_project == self.package and gerrit_rev:
536            print(
537                f"Found Gerrit revision for {self.package}: {gerrit_rev}",
538                file=sys.stderr,
539            )
540            self.pkg_def["rev"] = gerrit_rev
541            return
542
543        # Ask Github for all the branches.
544        lookup = git(
545            "ls-remote", "--heads", f"https://github.com/{self.package}"
546        )
547
548        # Find the branch matching {branch} (or fallback to master).
549        #   This section is locked because we are modifying the PackageDef.
550        Package.lock.acquire()
551        for line in lookup.split("\n"):
552            if re.fullmatch(f".*{branch}$", line.strip()):
553                self.pkg_def["rev"] = line.split()[0]
554                break
555            elif (
556                "refs/heads/master" in line or "refs/heads/main" in line
557            ) and "rev" not in self.pkg_def:
558                self.pkg_def["rev"] = line.split()[0]
559        Package.lock.release()
560
561    def _stagename(self) -> str:
562        """Create a name for the Docker stage associated with this pkg."""
563        return self.package.replace("/", "-").lower()
564
565    def _url(self) -> str:
566        """Get the URL for this package."""
567        rev = self.pkg_def["rev"]
568
569        # If the lambda exists, call it.
570        if "url" in self.pkg_def:
571            return self.pkg_def["url"](self.package, rev)
572
573        # Default to the github archive URL.
574        return f"https://github.com/{self.package}/archive/{rev}.tar.gz"
575
576    def _cmd_download(self) -> str:
577        """Formulate the command necessary to download and unpack to source."""
578
579        url = self._url()
580        if ".tar." not in url:
581            raise NotImplementedError(
582                f"Unhandled download type for {self.package}: {url}"
583            )
584
585        cmd = f"curl -L {url} | tar -x"
586
587        if url.endswith(".bz2"):
588            cmd += "j"
589        elif url.endswith(".gz"):
590            cmd += "z"
591        else:
592            raise NotImplementedError(
593                f"Unknown tar flags needed for {self.package}: {url}"
594            )
595
596        return cmd
597
598    def _cmd_cd_srcdir(self) -> str:
599        """Formulate the command necessary to 'cd' into the source dir."""
600        return f"cd {self.package.split('/')[-1]}*"
601
602    def _df_copycmds(self) -> str:
603        """Formulate the dockerfile snippet necessary to COPY all depends."""
604
605        if "depends" not in self.pkg_def:
606            return ""
607        return Package.df_copycmds_set(self.pkg_def["depends"])
608
609    @staticmethod
610    def df_copycmds_set(pkgs: Iterable[str]) -> str:
611        """Formulate the Dockerfile snippet necessary to COPY a set of
612        packages into a Docker stage.
613        """
614
615        copy_cmds = ""
616
617        # Sort the packages for consistency.
618        for p in sorted(pkgs):
619            tag = Package.packages[p]["__tag"]
620            copy_cmds += f"COPY --from={tag} {prefix} {prefix}\n"
621            # Workaround for upstream docker bug and multiple COPY cmds
622            # https://github.com/moby/moby/issues/37965
623            copy_cmds += "RUN true\n"
624
625        return copy_cmds
626
627    def _df_build(self) -> str:
628        """Formulate the Dockerfile snippet necessary to download, build, and
629        install a package into a Docker stage.
630        """
631
632        # Download and extract source.
633        result = f"RUN {self._cmd_download()} && {self._cmd_cd_srcdir()} && "
634
635        # Handle 'custom_post_dl' commands.
636        custom_post_dl = self.pkg_def.get("custom_post_dl")
637        if custom_post_dl:
638            result += " && ".join(custom_post_dl) + " && "
639
640        # Build and install package based on 'build_type'.
641        build_type = self.pkg_def["build_type"]
642        if build_type == "autoconf":
643            result += self._cmd_build_autoconf()
644        elif build_type == "autogen":
645            result += self._cmd_build_autogen()
646        elif build_type == "cmake":
647            result += self._cmd_build_cmake()
648        elif build_type == "custom":
649            result += self._cmd_build_custom()
650        elif build_type == "make":
651            result += self._cmd_build_make()
652        elif build_type == "meson":
653            result += self._cmd_build_meson()
654        else:
655            raise NotImplementedError(
656                f"Unhandled build type for {self.package}: {build_type}"
657            )
658
659        # Handle 'custom_post_install' commands.
660        custom_post_install = self.pkg_def.get("custom_post_install")
661        if custom_post_install:
662            result += " && " + " && ".join(custom_post_install)
663
664        return result
665
666    def _cmd_build_autoconf(self) -> str:
667        options = " ".join(self.pkg_def.get("config_flags", []))
668        env = " ".join(self.pkg_def.get("config_env", []))
669        result = "./bootstrap.sh && "
670        result += f"{env} ./configure {configure_flags} {options} && "
671        result += f"make -j{proc_count} && make install"
672        return result
673
674    def _cmd_build_autogen(self) -> str:
675        options = " ".join(self.pkg_def.get("config_flags", []))
676        env = " ".join(self.pkg_def.get("config_env", []))
677        result = f"{env} ./autogen.sh {configure_flags} {options} && "
678        result += "make && make install"
679        return result
680
681    def _cmd_build_cmake(self) -> str:
682        options = " ".join(self.pkg_def.get("config_flags", []))
683        env = " ".join(self.pkg_def.get("config_env", []))
684        result = "mkdir builddir && cd builddir && "
685        result += f"{env} cmake {cmake_flags} {options} .. && "
686        result += "cmake --build . --target all && "
687        result += "cmake --build . --target install && "
688        result += "cd .."
689        return result
690
691    def _cmd_build_custom(self) -> str:
692        return " && ".join(self.pkg_def.get("build_steps", []))
693
694    def _cmd_build_make(self) -> str:
695        return f"make -j{proc_count} && make install"
696
697    def _cmd_build_meson(self) -> str:
698        options = " ".join(self.pkg_def.get("config_flags", []))
699        env = " ".join(self.pkg_def.get("config_env", []))
700        result = f"{env} meson setup builddir {meson_flags} {options} && "
701        result += "ninja -C builddir && ninja -C builddir install"
702        return result
703
704
705class Docker:
706    """Class to assist with Docker interactions.  All methods are static."""
707
708    @staticmethod
709    def timestamp() -> str:
710        """Generate a timestamp for today using the ISO week."""
711        today = date.today().isocalendar()
712        return f"{today[0]}-W{today[1]:02}"
713
714    @staticmethod
715    def tagname(pkgname: Optional[str], dockerfile: str) -> str:
716        """Generate a tag name for a package using a hash of the Dockerfile."""
717        result = docker_image_name
718        if pkgname:
719            result += "-" + pkgname
720
721        result += ":" + Docker.timestamp()
722        result += "-" + sha256(dockerfile.encode()).hexdigest()[0:16]
723
724        return result
725
726    @staticmethod
727    def build(pkg: str, tag: str, dockerfile: str) -> None:
728        """Build a docker image using the Dockerfile and tagging it with 'tag'."""
729
730        # If we're not forcing builds, check if it already exists and skip.
731        if not force_build:
732            if container.image.ls(
733                tag, "--format", '"{{.Repository}}:{{.Tag}}"'
734            ):
735                print(
736                    f"Image {tag} already exists.  Skipping.", file=sys.stderr
737                )
738                return
739
740        # Build it.
741        #   Capture the output of the 'docker build' command and send it to
742        #   stderr (prefixed with the package name).  This allows us to see
743        #   progress but not pollute stdout.  Later on we output the final
744        #   docker tag to stdout and we want to keep that pristine.
745        #
746        #   Other unusual flags:
747        #       --no-cache: Bypass the Docker cache if 'force_build'.
748        #       --force-rm: Clean up Docker processes if they fail.
749        container.build(
750            proxy_args,
751            "--network=host",
752            "--force-rm",
753            "--no-cache=true" if force_build else "--no-cache=false",
754            "-t",
755            tag,
756            "-",
757            _in=dockerfile,
758            _out=(
759                lambda line: print(
760                    pkg + ":", line, end="", file=sys.stderr, flush=True
761                )
762            ),
763            _err_to_out=True,
764        )
765
766
767# Read a bunch of environment variables.
768docker_image_name = os.environ.get(
769    "DOCKER_IMAGE_NAME", "openbmc/ubuntu-unit-test"
770)
771force_build = os.environ.get("FORCE_DOCKER_BUILD")
772is_automated_ci_build = os.environ.get("BUILD_URL", False)
773distro = os.environ.get("DISTRO", "ubuntu:plucky")
774branch = os.environ.get("BRANCH", "master")
775ubuntu_mirror = os.environ.get("UBUNTU_MIRROR")
776docker_reg = os.environ.get("DOCKER_REG", "public.ecr.aws/ubuntu")
777http_proxy = os.environ.get("http_proxy")
778
779gerrit_project = os.environ.get("GERRIT_PROJECT")
780gerrit_rev = os.environ.get("GERRIT_PATCHSET_REVISION")
781gerrit_topic = os.environ.get("GERRIT_TOPIC")
782
783# Ensure appropriate docker build output to see progress and identify
784# any issues
785os.environ["BUILDKIT_PROGRESS"] = "plain"
786
787# Set up some common variables.
788username = os.environ.get("USER", "root")
789homedir = os.environ.get("HOME", "/root")
790gid = os.getgid()
791uid = os.getuid()
792
793# Use well-known constants if user is root
794if username == "root":
795    homedir = "/root"
796    gid = 0
797    uid = 0
798
799# Special flags if setting up a deb mirror.
800mirror = ""
801if "ubuntu" in distro and ubuntu_mirror:
802    mirror = f"""
803RUN echo "deb {ubuntu_mirror} \
804        $(. /etc/os-release && echo $VERSION_CODENAME) \
805        main restricted universe multiverse" > /etc/apt/sources.list && \\
806    echo "deb {ubuntu_mirror} \
807        $(. /etc/os-release && echo $VERSION_CODENAME)-updates \
808            main restricted universe multiverse" >> /etc/apt/sources.list && \\
809    echo "deb {ubuntu_mirror} \
810        $(. /etc/os-release && echo $VERSION_CODENAME)-security \
811            main restricted universe multiverse" >> /etc/apt/sources.list && \\
812    echo "deb {ubuntu_mirror} \
813        $(. /etc/os-release && echo $VERSION_CODENAME)-proposed \
814            main restricted universe multiverse" >> /etc/apt/sources.list && \\
815    echo "deb {ubuntu_mirror} \
816        $(. /etc/os-release && echo $VERSION_CODENAME)-backports \
817            main restricted universe multiverse" >> /etc/apt/sources.list
818"""
819
820# Special flags for proxying.
821proxy_cmd = ""
822proxy_keyserver = ""
823proxy_args = []
824if http_proxy:
825    proxy_cmd = f"""
826RUN echo "[http]" >> {homedir}/.gitconfig && \
827    echo "proxy = {http_proxy}" >> {homedir}/.gitconfig
828COPY <<EOF_WGETRC {homedir}/.wgetrc
829https_proxy = {http_proxy}
830http_proxy = {http_proxy}
831use_proxy = on
832EOF_WGETRC
833"""
834    proxy_keyserver = f"--keyserver-options http-proxy={http_proxy}"
835
836    proxy_args.extend(
837        [
838            "--build-arg",
839            f"http_proxy={http_proxy}",
840            "--build-arg",
841            f"https_proxy={http_proxy}",
842        ]
843    )
844
845# Create base Dockerfile.
846dockerfile_base = f"""
847FROM {docker_reg}/{distro}
848
849{mirror}
850
851ENV DEBIAN_FRONTEND noninteractive
852
853ENV PYTHONPATH "/usr/local/lib/python3.10/site-packages/"
854
855# Sometimes the ubuntu key expires and we need a way to force an execution
856# of the apt-get commands for the dbgsym-keyring.  When this happens we see
857# an error like: "Release: The following signatures were invalid:"
858# Insert a bogus echo that we can change here when we get this error to force
859# the update.
860RUN echo "ubuntu keyserver rev as of 2025-11-17"
861
862# We need the keys to be imported for dbgsym repos
863# New releases have a package, older ones fall back to manual fetching
864# https://wiki.ubuntu.com/Debug%20Symbol%20Packages
865# Known issue with gpg to get keys via proxy -
866# https://bugs.launchpad.net/ubuntu/+source/gnupg2/+bug/1788190, hence using
867# curl to get keys.
868RUN apt-get update && apt-get dist-upgrade -yy && \
869    ( apt-get install -yy gpgv ubuntu-dbgsym-keyring || \
870        ( apt-get install -yy dirmngr curl && \
871          curl -sSL \
872          'https://keyserver.ubuntu.com/pks/lookup?op=get&search=0xF2EDC64DC5AEE1F6B9C621F0C8CAB6595FDFF622' \
873          | apt-key add - ))
874
875# Parse the current repo list into a debug repo list
876RUN sed -n '/^deb /s,^deb [^ ]* ,deb http://ddebs.ubuntu.com ,p' \
877        /etc/apt/sources.list >/etc/apt/sources.list.d/debug.list
878
879# Remove non-existent debug repos
880RUN sed -i '/-\\(backports\\|security\\) /d' /etc/apt/sources.list.d/debug.list
881
882RUN cat /etc/apt/sources.list.d/debug.list
883
884RUN apt-get update && apt-get dist-upgrade -yy && apt-get install -yy \
885    abi-compliance-checker \
886    abi-dumper \
887    autoconf \
888    autoconf-archive \
889    bison \
890    cmake \
891    curl \
892    dbus \
893    device-tree-compiler \
894    doxygen \
895    flex \
896    git \
897    glib-2.0 \
898    gnupg \
899    iproute2 \
900    iputils-ping \
901    libaudit-dev \
902    libc6-dbg \
903    libc6-dev \
904    libcjson-dev \
905    libconfig++-dev \
906    libcryptsetup-dev \
907    libcurl4-openssl-dev \
908    libdbus-1-dev \
909    libevdev-dev \
910    libi2c-dev \
911    libjpeg-dev \
912    libjson-perl \
913    libldap2-dev \
914    libmimetic-dev \
915    libmpfr-dev \
916    libnl-3-dev \
917    libnl-genl-3-dev \
918    libpam0g-dev \
919    libpciaccess-dev \
920    libperlio-gzip-perl \
921    libpng-dev \
922    libprotobuf-dev \
923    libsnmp-dev \
924    libssl-dev \
925    libsystemd-dev \
926    libtool \
927    liburing-dev \
928    libxml2-utils \
929    libxml-simple-perl \
930    lsb-release \
931    ninja-build \
932    npm \
933    pkg-config \
934    protobuf-compiler \
935    python3 \
936    python3-dev\
937    python3-git \
938    python3-mako \
939    python3-pip \
940    python3-protobuf \
941    python3-setuptools \
942    python3-socks \
943    python3-yaml \
944    rsync \
945    shellcheck \
946    socat \
947    software-properties-common \
948    sudo \
949    systemd \
950    systemd-dev \
951    valgrind \
952    vim \
953    wget \
954    xxd
955
956# Add the ubuntu-toolchain-r repository for later versions of GCC and install.
957RUN add-apt-repository ppa:ubuntu-toolchain-r/ppa && \
958    apt-get update && \
959    apt-get install -y \
960        gcc-15 \
961        g++-15 \
962        libstdc++-15-dev
963
964RUN update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-15 15 \
965  --slave /usr/bin/g++ g++ /usr/bin/g++-15 \
966  --slave /usr/bin/gcov gcov /usr/bin/gcov-15 \
967  --slave /usr/bin/gcov-dump gcov-dump /usr/bin/gcov-dump-15 \
968  --slave /usr/bin/gcov-tool gcov-tool /usr/bin/gcov-tool-15
969RUN update-alternatives --remove cpp /usr/bin/cpp && \
970    update-alternatives --install /usr/bin/cpp cpp /usr/bin/cpp-15 15
971
972# Set up LLVM apt repository.
973RUN bash -c "$(wget -O - https://apt.llvm.org/llvm.sh)" -- 21 -m https://apt.llvm.org
974
975# Install extra clang tools
976RUN apt-get install -y \
977        clang-21 \
978        clang-format-21 \
979        clang-tidy-21 \
980        lld-21
981
982RUN update-alternatives --install /usr/bin/clang clang /usr/bin/clang-21 1000 \
983  --slave /usr/bin/clang++ clang++ /usr/bin/clang++-21 \
984  --slave /usr/bin/clang-tidy clang-tidy /usr/bin/clang-tidy-21 \
985  --slave /usr/bin/clang-apply-replacements clang-apply-replacements \
986        /usr/bin/clang-apply-replacements-21 \
987  --slave /usr/bin/clang-format clang-format /usr/bin/clang-format-21 \
988  --slave /usr/bin/run-clang-tidy run-clang-tidy.py \
989        /usr/bin/run-clang-tidy-21 \
990  --slave /usr/bin/scan-build scan-build /usr/bin/scan-build-21 \
991  --slave /usr/bin/lld lld /usr/bin/lld-21
992
993"""
994
995if is_automated_ci_build:
996    dockerfile_base += f"""
997# Run an arbitrary command to pollute the docker cache regularly force us
998# to re-run `apt-get update` daily.
999RUN echo {Docker.timestamp()}
1000RUN apt-get update && apt-get dist-upgrade -yy
1001
1002"""
1003
1004dockerfile_base += """
1005RUN pip3 install --break-system-packages \
1006        beautysh==6.2.1 \
1007        black \
1008        codespell \
1009        flake8 \
1010        gcovr \
1011        gitlint \
1012        inflection \
1013        isoduration \
1014        isort \
1015        jsonschema \
1016        meson==1.9.0 \
1017        referencing \
1018        requests
1019
1020ENV NODE_PATH="/usr/local/lib/node_modules"
1021RUN npm install -g \
1022        eslint@latest eslint-plugin-json@latest \
1023        markdownlint-cli@latest \
1024        prettier@latest
1025"""
1026
1027# Build the base and stage docker images.
1028docker_base_img_name = Docker.tagname("base", dockerfile_base)
1029Docker.build("base", docker_base_img_name, dockerfile_base)
1030Package.generate_all()
1031
1032# Create the final Dockerfile.
1033dockerfile = f"""
1034# Build the final output image
1035FROM {docker_base_img_name}
1036{Package.df_all_copycmds()}
1037
1038# Some of our infrastructure still relies on the presence of this file
1039# even though it is no longer needed to rebuild the docker environment
1040# NOTE: The file is sorted to ensure the ordering is stable.
1041RUN echo '{Package.depcache()}' > /tmp/depcache
1042
1043# Ensure the group, user, and home directory are created (or rename them if
1044# they already exist).
1045RUN if grep -q ":{gid}:" /etc/group ; then \
1046        groupmod -n {username} $(awk -F : '{{ if ($3 == {gid}) {{ print $1 }} }}' /etc/group) ; \
1047    else \
1048        groupadd -f -g {gid} {username} ; \
1049    fi
1050RUN mkdir -p "{os.path.dirname(homedir)}"
1051RUN if grep -q ":{uid}:" /etc/passwd ; then \
1052        usermod -l {username} -d {homedir} -m $(awk -F : '{{ if ($3 == {uid}) {{ print $1 }} }}' /etc/passwd) ; \
1053    else \
1054        useradd -d {homedir} -m -u {uid} -g {gid} {username} ; \
1055    fi
1056RUN sed -i '1iDefaults umask=000' /etc/sudoers
1057RUN echo "{username} ALL=(ALL) NOPASSWD: ALL" >>/etc/sudoers
1058
1059# Ensure user has ability to write to /usr/local for different tool
1060# and data installs
1061RUN chown -R {username}:{username} /usr/local/share
1062
1063# Update library cache
1064RUN ldconfig
1065
1066{proxy_cmd}
1067
1068RUN /bin/bash
1069"""
1070
1071# Do the final docker build
1072docker_final_img_name = Docker.tagname(None, dockerfile)
1073Docker.build("final", docker_final_img_name, dockerfile)
1074
1075# Print the tag of the final image.
1076print(docker_final_img_name)
1077