xref: /openbmc/openbmc-build-scripts/scripts/build-unit-test-docker (revision e347f825463040930c159ab48db49f6ac120a8b6)
1#!/usr/bin/env python3
2#
3# Build the required docker image to run package unit tests
4#
5# Script Variables:
6#   DOCKER_IMG_NAME:  <optional, the name of the docker image to generate>
7#                     default is openbmc/ubuntu-unit-test
8#   DISTRO:           <optional, the distro to build a docker image against>
9#   FORCE_DOCKER_BUILD: <optional, a non-zero value with force all Docker
10#                     images to be rebuilt rather than reusing caches.>
11#   BUILD_URL:        <optional, used to detect running under CI context
12#                     (ex. Jenkins)>
13#   BRANCH:           <optional, branch to build from each of the openbmc/
14#                     repositories>
15#                     default is master, which will be used if input branch not
16#                     provided or not found
17#   UBUNTU_MIRROR:    <optional, the URL of a mirror of Ubuntu to override the
18#                     default ones in /etc/apt/sources.list>
19#                     default is empty, and no mirror is used.
20#   DOCKER_REG:       <optional, the URL of a docker registry to utilize
21#                     instead of our default (public.ecr.aws/ubuntu)
22#                     (ex. docker.io)
23#   http_proxy        The HTTP address of the proxy server to connect to.
24#                     Default: "", proxy is not setup if this is not set
25
26import json
27import os
28import re
29import sys
30import threading
31import urllib.request
32from datetime import date
33from hashlib import sha256
34
35# typing.Dict is used for type-hints.
36from typing import Any, Callable, Dict, Iterable, Optional  # noqa: F401
37
38from sh import git, nproc  # type: ignore
39
40try:
41    # System may have docker or it may have podman, try docker first
42    from sh import docker
43
44    container = docker
45except ImportError:
46    try:
47        from sh import podman
48
49        container = podman
50    except Exception:
51        print("No docker or podman found on system")
52        exit(1)
53
54try:
55    # Python before 3.8 doesn't have TypedDict, so reroute to standard 'dict'.
56    from typing import TypedDict
57except Exception:
58
59    class TypedDict(dict):  # type: ignore
60        # We need to do this to eat the 'total' argument.
61        def __init_subclass__(cls, **kwargs: Any) -> None:
62            super().__init_subclass__()
63
64
65# Declare some variables used in package definitions.
66prefix = "/usr/local"
67proc_count = nproc().strip()
68
69
70class PackageDef(TypedDict, total=False):
71    """Package Definition for packages dictionary."""
72
73    # rev [optional]: Revision of package to use.
74    rev: str
75    # url [optional]: lambda function to create URL: (package, rev) -> url.
76    url: Callable[[str, str], str]
77    # depends [optional]: List of package dependencies.
78    depends: Iterable[str]
79    # build_type [required]: Build type used for package.
80    #   Currently supported: autoconf, cmake, custom, make, meson
81    build_type: str
82    # build_steps [optional]: Steps to run for 'custom' build_type.
83    build_steps: Iterable[str]
84    # config_flags [optional]: List of options to pass configuration tool.
85    config_flags: Iterable[str]
86    # config_env [optional]: List of environment variables to set for config.
87    config_env: Iterable[str]
88    # custom_post_dl [optional]: List of steps to run after download, but
89    #   before config / build / install.
90    custom_post_dl: Iterable[str]
91    # custom_post_install [optional]: List of steps to run after install.
92    custom_post_install: Iterable[str]
93
94    # __tag [private]: Generated Docker tag name for package stage.
95    __tag: str
96    # __package [private]: Package object associated with this package.
97    __package: Any  # Type is Package, but not defined yet.
98
99
100# Packages to include in image.
101packages = {
102    "boost": PackageDef(
103        rev="1.88.0",
104        url=(
105            lambda pkg, rev: f"https://github.com/boostorg/{pkg}/releases/download/{pkg}-{rev}/{pkg}-{rev}-cmake.tar.gz"
106        ),
107        build_type="custom",
108        build_steps=[
109            (
110                "./bootstrap.sh"
111                f" --prefix={prefix} --with-libraries=atomic,context,coroutine,filesystem,process,url"
112            ),
113            "./b2",
114            f"./b2 install --prefix={prefix} valgrind=on",
115        ],
116    ),
117    "USCiLab/cereal": PackageDef(
118        rev="v1.3.2",
119        build_type="custom",
120        build_steps=[f"cp -a include/cereal/ {prefix}/include/"],
121    ),
122    "danmar/cppcheck": PackageDef(
123        rev="2.12.1",
124        build_type="cmake",
125    ),
126    "DMTF/libspdm": PackageDef(
127        rev="3.7.0",
128        url=lambda pkg, rev: f"https://github.com/DMTF/libspdm/archive/{rev}.tar.gz",
129        build_type="cmake",
130        config_flags=(
131            lambda: (
132                lambda arch_mapping={
133                    "x86_64": "x64",
134                    "i586": "ia32",
135                    "i686": "ia32",
136                    "arm": "arm",
137                    "aarch64": "aarch64",
138                    "arm64": "aarch64",
139                    "riscv32": "riscv32",
140                    "riscv64": "riscv64",
141                    "ppc64le": "ppc64le",
142                }: [
143                    f"-DARCH={arch_mapping.get(__import__('platform').machine(), 'x64')}",
144                    "-DTOOLCHAIN=GCC",
145                    "-DTARGET=Release",
146                    "-DCRYPTO=openssl",
147                    "-DBUILD_LINUX_SHARED_LIB=ON",
148                    "-DENABLE_BINARY_BUILD=1",
149                    "-DDISABLE_TESTS=1",
150                    f"-DCOMPILED_LIBCRYPTO_PATH={prefix}/lib",
151                    f"-DCOMPILED_LIBSSL_PATH={prefix}/lib",
152                ]
153            )()
154        )(),
155    ),
156    "CLIUtils/CLI11": PackageDef(
157        rev="v2.3.2",
158        build_type="cmake",
159        config_flags=[
160            "-DBUILD_TESTING=OFF",
161            "-DCLI11_BUILD_DOCS=OFF",
162            "-DCLI11_BUILD_EXAMPLES=OFF",
163        ],
164    ),
165    "fmtlib/fmt": PackageDef(
166        rev="11.2.0",
167        build_type="cmake",
168        config_flags=[
169            "-DFMT_DOC=OFF",
170            "-DFMT_TEST=OFF",
171        ],
172    ),
173    "Naios/function2": PackageDef(
174        rev="4.2.4",
175        build_type="custom",
176        build_steps=[
177            f"mkdir {prefix}/include/function2",
178            f"cp include/function2/function2.hpp {prefix}/include/function2/",
179        ],
180    ),
181    "google/googletest": PackageDef(
182        rev="v1.16.0",
183        build_type="cmake",
184        config_env=["CXXFLAGS=-std=c++20"],
185        config_flags=["-DTHREADS_PREFER_PTHREAD_FLAG=ON"],
186    ),
187    "nghttp2/nghttp2": PackageDef(
188        rev="v1.65.0",
189        build_type="cmake",
190        config_env=["CXXFLAGS=-std=c++20"],
191        config_flags=[
192            "-DENABLE_LIB_ONLY=ON",
193            "-DENABLE_STATIC_LIB=ON",
194        ],
195    ),
196    "nlohmann/json": PackageDef(
197        rev="v3.12.0",
198        build_type="cmake",
199        config_flags=["-DJSON_BuildTests=OFF"],
200        custom_post_install=[
201            (
202                f"ln -s {prefix}/include/nlohmann/json.hpp"
203                f" {prefix}/include/json.hpp"
204            ),
205        ],
206    ),
207    "json-c/json-c": PackageDef(
208        rev="json-c-0.18-20240915",
209        build_type="cmake",
210    ),
211    "LibVNC/libvncserver": PackageDef(
212        rev="LibVNCServer-0.9.14",
213        build_type="cmake",
214    ),
215    "leethomason/tinyxml2": PackageDef(
216        rev="11.0.0",
217        build_type="cmake",
218    ),
219    "tristanpenman/valijson": PackageDef(
220        rev="v1.0.5",
221        build_type="cmake",
222        config_flags=[
223            "-Dvalijson_BUILD_TESTS=0",
224            "-Dvalijson_INSTALL_HEADERS=1",
225        ],
226    ),
227    "libgpiod": PackageDef(
228        rev="1.6.5",
229        url=(
230            lambda pkg, rev: f"https://git.kernel.org/pub/scm/libs/{pkg}/{pkg}.git/snapshot/{pkg}-{rev}.tar.gz"
231        ),
232        build_type="autogen",
233        config_flags=["--enable-bindings-cxx"],
234    ),
235    "open-power/pdbg": PackageDef(build_type="autoconf"),
236    "openbmc/gpioplus": PackageDef(
237        build_type="meson",
238        config_flags=[
239            "-Dexamples=false",
240            "-Dtests=disabled",
241        ],
242    ),
243    "openbmc/phosphor-dbus-interfaces": PackageDef(
244        depends=["openbmc/sdbusplus"],
245        build_type="meson",
246        config_flags=["-Dgenerate_md=false"],
247    ),
248    "openbmc/phosphor-logging": PackageDef(
249        depends=[
250            "USCiLab/cereal",
251            "openbmc/phosphor-dbus-interfaces",
252            "openbmc/sdbusplus",
253            "openbmc/sdeventplus",
254        ],
255        build_type="meson",
256        config_flags=[
257            "-Dlibonly=true",
258            "-Dtests=disabled",
259        ],
260    ),
261    "openbmc/phosphor-objmgr": PackageDef(
262        depends=[
263            "CLIUtils/CLI11",
264            "boost",
265            "leethomason/tinyxml2",
266            "openbmc/phosphor-dbus-interfaces",
267            "openbmc/phosphor-logging",
268            "openbmc/sdbusplus",
269        ],
270        build_type="meson",
271        config_flags=[
272            "-Dtests=disabled",
273        ],
274    ),
275    "openbmc/libpeci": PackageDef(
276        build_type="meson",
277        config_flags=[
278            "-Draw-peci=disabled",
279        ],
280    ),
281    "openbmc/libpldm": PackageDef(
282        build_type="meson",
283        config_flags=[
284            "-Dabi=deprecated,stable",
285            "-Dtests=false",
286            "-Dabi-compliance-check=false",
287        ],
288    ),
289    "openbmc/sdbusplus": PackageDef(
290        depends=[
291            "nlohmann/json",
292        ],
293        build_type="meson",
294        custom_post_dl=[
295            "cd tools",
296            "python3 -m pip install --break-system-packages --root-user-action ignore .",
297            "cd ..",
298        ],
299        config_flags=[
300            "-Dexamples=disabled",
301            "-Dtests=disabled",
302        ],
303    ),
304    "openbmc/sdeventplus": PackageDef(
305        depends=[
306            "openbmc/stdplus",
307        ],
308        build_type="meson",
309        config_flags=[
310            "-Dexamples=false",
311            "-Dtests=disabled",
312        ],
313    ),
314    "openbmc/stdplus": PackageDef(
315        depends=[
316            "fmtlib/fmt",
317            "google/googletest",
318            "Naios/function2",
319        ],
320        build_type="meson",
321        config_flags=[
322            "-Dexamples=false",
323            "-Dtests=disabled",
324            "-Dgtest=enabled",
325        ],
326    ),
327}  # type: Dict[str, PackageDef]
328
329# Define common flags used for builds
330configure_flags = " ".join(
331    [
332        f"--prefix={prefix}",
333    ]
334)
335cmake_flags = " ".join(
336    [
337        "-DBUILD_SHARED_LIBS=ON",
338        "-DCMAKE_BUILD_TYPE=RelWithDebInfo",
339        f"-DCMAKE_INSTALL_PREFIX:PATH={prefix}",
340        "-GNinja",
341        "-DCMAKE_MAKE_PROGRAM=ninja",
342    ]
343)
344meson_flags = " ".join(
345    [
346        "--wrap-mode=nodownload",
347        f"-Dprefix={prefix}",
348    ]
349)
350
351
352class Package(threading.Thread):
353    """Class used to build the Docker stages for each package.
354
355    Generally, this class should not be instantiated directly but through
356    Package.generate_all().
357    """
358
359    # Copy the packages dictionary.
360    packages = packages.copy()
361
362    # Lock used for thread-safety.
363    lock = threading.Lock()
364
365    def __init__(self, pkg: str):
366        """pkg - The name of this package (ex. foo/bar )"""
367        super(Package, self).__init__()
368
369        self.package = pkg
370        self.exception = None  # type: Optional[Exception]
371
372        # Reference to this package's
373        self.pkg_def = Package.packages[pkg]
374        self.pkg_def["__package"] = self
375
376    def run(self) -> None:
377        """Thread 'run' function.  Builds the Docker stage."""
378
379        # In case this package has no rev, fetch it from Github.
380        self._update_rev()
381
382        # Find all the Package objects that this package depends on.
383        #   This section is locked because we are looking into another
384        #   package's PackageDef dict, which could be being modified.
385        Package.lock.acquire()
386        deps: Iterable[Package] = [
387            Package.packages[deppkg]["__package"]
388            for deppkg in self.pkg_def.get("depends", [])
389        ]
390        Package.lock.release()
391
392        # Wait until all the depends finish building.  We need them complete
393        # for the "COPY" commands.
394        for deppkg in deps:
395            deppkg.join()
396
397        # Generate this package's Dockerfile.
398        dockerfile = f"""
399FROM {docker_base_img_name}
400{self._df_copycmds()}
401{self._df_build()}
402"""
403
404        # Generate the resulting tag name and save it to the PackageDef.
405        #   This section is locked because we are modifying the PackageDef,
406        #   which can be accessed by other threads.
407        Package.lock.acquire()
408        tag = Docker.tagname(self._stagename(), dockerfile)
409        self.pkg_def["__tag"] = tag
410        Package.lock.release()
411
412        # Do the build / save any exceptions.
413        try:
414            Docker.build(self.package, tag, dockerfile)
415        except Exception as e:
416            self.exception = e
417
418    @classmethod
419    def generate_all(cls) -> None:
420        """Ensure a Docker stage is created for all defined packages.
421
422        These are done in parallel but with appropriate blocking per
423        package 'depends' specifications.
424        """
425
426        # Create a Package for each defined package.
427        pkg_threads = [Package(p) for p in cls.packages.keys()]
428
429        # Start building them all.
430        #   This section is locked because threads depend on each other,
431        #   based on the packages, and they cannot 'join' on a thread
432        #   which is not yet started.  Adding a lock here allows all the
433        #   threads to start before they 'join' their dependencies.
434        Package.lock.acquire()
435        for t in pkg_threads:
436            t.start()
437        Package.lock.release()
438
439        # Wait for completion.
440        for t in pkg_threads:
441            t.join()
442            # Check if the thread saved off its own exception.
443            if t.exception:
444                print(f"Package {t.package} failed!", file=sys.stderr)
445                raise t.exception
446
447    @staticmethod
448    def df_all_copycmds() -> str:
449        """Formulate the Dockerfile snippet necessary to copy all packages
450        into the final image.
451        """
452        return Package.df_copycmds_set(Package.packages.keys())
453
454    @classmethod
455    def depcache(cls) -> str:
456        """Create the contents of the '/tmp/depcache'.
457        This file is a comma-separated list of "<pkg>:<rev>".
458        """
459
460        # This needs to be sorted for consistency.
461        depcache = ""
462        for pkg in sorted(cls.packages.keys()):
463            depcache += "%s:%s," % (pkg, cls.packages[pkg]["rev"])
464        return depcache
465
466    def _check_gerrit_topic(self) -> bool:
467        if not gerrit_topic:
468            return False
469        if not self.package.startswith("openbmc/"):
470            return False
471        if gerrit_project == self.package and gerrit_rev:
472            return False
473
474        # URL escape any spaces.  Gerrit uses pluses.
475        gerrit_topic_escape = urllib.parse.quote_plus(gerrit_topic)
476
477        try:
478            commits = json.loads(
479                urllib.request.urlopen(
480                    f'https://gerrit.openbmc.org/changes/?q=status:open+project:{self.package}+topic:"{gerrit_topic_escape}"'
481                )
482                .read()
483                .splitlines()[-1]
484            )
485
486            if len(commits) == 0:
487                return False
488            if len(commits) > 1:
489                print(
490                    f"{self.package} has more than 1 commit under {gerrit_topic}; using lastest upstream: {len(commits)}",
491                    file=sys.stderr,
492                )
493                return False
494
495            change_id = commits[0]["id"]
496
497            commit = json.loads(
498                urllib.request.urlopen(
499                    f"https://gerrit.openbmc.org/changes/{change_id}/revisions/current/commit"
500                )
501                .read()
502                .splitlines()[-1]
503            )["commit"]
504
505            print(
506                f"Using {commit} from {gerrit_topic} for {self.package}",
507                file=sys.stderr,
508            )
509            self.pkg_def["rev"] = commit
510            return True
511
512        except urllib.error.HTTPError as e:
513            print(
514                f"Error loading topic {gerrit_topic} for {self.package}: ",
515                e,
516                file=sys.stderr,
517            )
518            return False
519
520    def _update_rev(self) -> None:
521        """Look up the HEAD for missing a static rev."""
522
523        if "rev" in self.pkg_def:
524            return
525
526        if self._check_gerrit_topic():
527            return
528
529        # Check if Jenkins/Gerrit gave us a revision and use it.
530        if gerrit_project == self.package and gerrit_rev:
531            print(
532                f"Found Gerrit revision for {self.package}: {gerrit_rev}",
533                file=sys.stderr,
534            )
535            self.pkg_def["rev"] = gerrit_rev
536            return
537
538        # Ask Github for all the branches.
539        lookup = git(
540            "ls-remote", "--heads", f"https://github.com/{self.package}"
541        )
542
543        # Find the branch matching {branch} (or fallback to master).
544        #   This section is locked because we are modifying the PackageDef.
545        Package.lock.acquire()
546        for line in lookup.split("\n"):
547            if re.fullmatch(f".*{branch}$", line.strip()):
548                self.pkg_def["rev"] = line.split()[0]
549                break
550            elif (
551                "refs/heads/master" in line or "refs/heads/main" in line
552            ) and "rev" not in self.pkg_def:
553                self.pkg_def["rev"] = line.split()[0]
554        Package.lock.release()
555
556    def _stagename(self) -> str:
557        """Create a name for the Docker stage associated with this pkg."""
558        return self.package.replace("/", "-").lower()
559
560    def _url(self) -> str:
561        """Get the URL for this package."""
562        rev = self.pkg_def["rev"]
563
564        # If the lambda exists, call it.
565        if "url" in self.pkg_def:
566            return self.pkg_def["url"](self.package, rev)
567
568        # Default to the github archive URL.
569        return f"https://github.com/{self.package}/archive/{rev}.tar.gz"
570
571    def _cmd_download(self) -> str:
572        """Formulate the command necessary to download and unpack to source."""
573
574        url = self._url()
575        if ".tar." not in url:
576            raise NotImplementedError(
577                f"Unhandled download type for {self.package}: {url}"
578            )
579
580        cmd = f"curl -L {url} | tar -x"
581
582        if url.endswith(".bz2"):
583            cmd += "j"
584        elif url.endswith(".gz"):
585            cmd += "z"
586        else:
587            raise NotImplementedError(
588                f"Unknown tar flags needed for {self.package}: {url}"
589            )
590
591        return cmd
592
593    def _cmd_cd_srcdir(self) -> str:
594        """Formulate the command necessary to 'cd' into the source dir."""
595        return f"cd {self.package.split('/')[-1]}*"
596
597    def _df_copycmds(self) -> str:
598        """Formulate the dockerfile snippet necessary to COPY all depends."""
599
600        if "depends" not in self.pkg_def:
601            return ""
602        return Package.df_copycmds_set(self.pkg_def["depends"])
603
604    @staticmethod
605    def df_copycmds_set(pkgs: Iterable[str]) -> str:
606        """Formulate the Dockerfile snippet necessary to COPY a set of
607        packages into a Docker stage.
608        """
609
610        copy_cmds = ""
611
612        # Sort the packages for consistency.
613        for p in sorted(pkgs):
614            tag = Package.packages[p]["__tag"]
615            copy_cmds += f"COPY --from={tag} {prefix} {prefix}\n"
616            # Workaround for upstream docker bug and multiple COPY cmds
617            # https://github.com/moby/moby/issues/37965
618            copy_cmds += "RUN true\n"
619
620        return copy_cmds
621
622    def _df_build(self) -> str:
623        """Formulate the Dockerfile snippet necessary to download, build, and
624        install a package into a Docker stage.
625        """
626
627        # Download and extract source.
628        result = f"RUN {self._cmd_download()} && {self._cmd_cd_srcdir()} && "
629
630        # Handle 'custom_post_dl' commands.
631        custom_post_dl = self.pkg_def.get("custom_post_dl")
632        if custom_post_dl:
633            result += " && ".join(custom_post_dl) + " && "
634
635        # Build and install package based on 'build_type'.
636        build_type = self.pkg_def["build_type"]
637        if build_type == "autoconf":
638            result += self._cmd_build_autoconf()
639        elif build_type == "autogen":
640            result += self._cmd_build_autogen()
641        elif build_type == "cmake":
642            result += self._cmd_build_cmake()
643        elif build_type == "custom":
644            result += self._cmd_build_custom()
645        elif build_type == "make":
646            result += self._cmd_build_make()
647        elif build_type == "meson":
648            result += self._cmd_build_meson()
649        else:
650            raise NotImplementedError(
651                f"Unhandled build type for {self.package}: {build_type}"
652            )
653
654        # Handle 'custom_post_install' commands.
655        custom_post_install = self.pkg_def.get("custom_post_install")
656        if custom_post_install:
657            result += " && " + " && ".join(custom_post_install)
658
659        return result
660
661    def _cmd_build_autoconf(self) -> str:
662        options = " ".join(self.pkg_def.get("config_flags", []))
663        env = " ".join(self.pkg_def.get("config_env", []))
664        result = "./bootstrap.sh && "
665        result += f"{env} ./configure {configure_flags} {options} && "
666        result += f"make -j{proc_count} && make install"
667        return result
668
669    def _cmd_build_autogen(self) -> str:
670        options = " ".join(self.pkg_def.get("config_flags", []))
671        env = " ".join(self.pkg_def.get("config_env", []))
672        result = f"{env} ./autogen.sh {configure_flags} {options} && "
673        result += "make && make install"
674        return result
675
676    def _cmd_build_cmake(self) -> str:
677        options = " ".join(self.pkg_def.get("config_flags", []))
678        env = " ".join(self.pkg_def.get("config_env", []))
679        result = "mkdir builddir && cd builddir && "
680        result += f"{env} cmake {cmake_flags} {options} .. && "
681        result += "cmake --build . --target all && "
682        result += "cmake --build . --target install && "
683        result += "cd .."
684        return result
685
686    def _cmd_build_custom(self) -> str:
687        return " && ".join(self.pkg_def.get("build_steps", []))
688
689    def _cmd_build_make(self) -> str:
690        return f"make -j{proc_count} && make install"
691
692    def _cmd_build_meson(self) -> str:
693        options = " ".join(self.pkg_def.get("config_flags", []))
694        env = " ".join(self.pkg_def.get("config_env", []))
695        result = f"{env} meson setup builddir {meson_flags} {options} && "
696        result += "ninja -C builddir && ninja -C builddir install"
697        return result
698
699
700class Docker:
701    """Class to assist with Docker interactions.  All methods are static."""
702
703    @staticmethod
704    def timestamp() -> str:
705        """Generate a timestamp for today using the ISO week."""
706        today = date.today().isocalendar()
707        return f"{today[0]}-W{today[1]:02}"
708
709    @staticmethod
710    def tagname(pkgname: Optional[str], dockerfile: str) -> str:
711        """Generate a tag name for a package using a hash of the Dockerfile."""
712        result = docker_image_name
713        if pkgname:
714            result += "-" + pkgname
715
716        result += ":" + Docker.timestamp()
717        result += "-" + sha256(dockerfile.encode()).hexdigest()[0:16]
718
719        return result
720
721    @staticmethod
722    def build(pkg: str, tag: str, dockerfile: str) -> None:
723        """Build a docker image using the Dockerfile and tagging it with 'tag'."""
724
725        # If we're not forcing builds, check if it already exists and skip.
726        if not force_build:
727            if container.image.ls(
728                tag, "--format", '"{{.Repository}}:{{.Tag}}"'
729            ):
730                print(
731                    f"Image {tag} already exists.  Skipping.", file=sys.stderr
732                )
733                return
734
735        # Build it.
736        #   Capture the output of the 'docker build' command and send it to
737        #   stderr (prefixed with the package name).  This allows us to see
738        #   progress but not pollute stdout.  Later on we output the final
739        #   docker tag to stdout and we want to keep that pristine.
740        #
741        #   Other unusual flags:
742        #       --no-cache: Bypass the Docker cache if 'force_build'.
743        #       --force-rm: Clean up Docker processes if they fail.
744        container.build(
745            proxy_args,
746            "--network=host",
747            "--force-rm",
748            "--no-cache=true" if force_build else "--no-cache=false",
749            "-t",
750            tag,
751            "-",
752            _in=dockerfile,
753            _out=(
754                lambda line: print(
755                    pkg + ":", line, end="", file=sys.stderr, flush=True
756                )
757            ),
758            _err_to_out=True,
759        )
760
761
762# Read a bunch of environment variables.
763docker_image_name = os.environ.get(
764    "DOCKER_IMAGE_NAME", "openbmc/ubuntu-unit-test"
765)
766force_build = os.environ.get("FORCE_DOCKER_BUILD")
767is_automated_ci_build = os.environ.get("BUILD_URL", False)
768distro = os.environ.get("DISTRO", "ubuntu:plucky")
769branch = os.environ.get("BRANCH", "master")
770ubuntu_mirror = os.environ.get("UBUNTU_MIRROR")
771docker_reg = os.environ.get("DOCKER_REG", "public.ecr.aws/ubuntu")
772http_proxy = os.environ.get("http_proxy")
773
774gerrit_project = os.environ.get("GERRIT_PROJECT")
775gerrit_rev = os.environ.get("GERRIT_PATCHSET_REVISION")
776gerrit_topic = os.environ.get("GERRIT_TOPIC")
777
778# Ensure appropriate docker build output to see progress and identify
779# any issues
780os.environ["BUILDKIT_PROGRESS"] = "plain"
781
782# Set up some common variables.
783username = os.environ.get("USER", "root")
784homedir = os.environ.get("HOME", "/root")
785gid = os.getgid()
786uid = os.getuid()
787
788# Use well-known constants if user is root
789if username == "root":
790    homedir = "/root"
791    gid = 0
792    uid = 0
793
794# Special flags if setting up a deb mirror.
795mirror = ""
796if "ubuntu" in distro and ubuntu_mirror:
797    mirror = f"""
798RUN echo "deb {ubuntu_mirror} \
799        $(. /etc/os-release && echo $VERSION_CODENAME) \
800        main restricted universe multiverse" > /etc/apt/sources.list && \\
801    echo "deb {ubuntu_mirror} \
802        $(. /etc/os-release && echo $VERSION_CODENAME)-updates \
803            main restricted universe multiverse" >> /etc/apt/sources.list && \\
804    echo "deb {ubuntu_mirror} \
805        $(. /etc/os-release && echo $VERSION_CODENAME)-security \
806            main restricted universe multiverse" >> /etc/apt/sources.list && \\
807    echo "deb {ubuntu_mirror} \
808        $(. /etc/os-release && echo $VERSION_CODENAME)-proposed \
809            main restricted universe multiverse" >> /etc/apt/sources.list && \\
810    echo "deb {ubuntu_mirror} \
811        $(. /etc/os-release && echo $VERSION_CODENAME)-backports \
812            main restricted universe multiverse" >> /etc/apt/sources.list
813"""
814
815# Special flags for proxying.
816proxy_cmd = ""
817proxy_keyserver = ""
818proxy_args = []
819if http_proxy:
820    proxy_cmd = f"""
821RUN echo "[http]" >> {homedir}/.gitconfig && \
822    echo "proxy = {http_proxy}" >> {homedir}/.gitconfig
823COPY <<EOF_WGETRC {homedir}/.wgetrc
824https_proxy = {http_proxy}
825http_proxy = {http_proxy}
826use_proxy = on
827EOF_WGETRC
828"""
829    proxy_keyserver = f"--keyserver-options http-proxy={http_proxy}"
830
831    proxy_args.extend(
832        [
833            "--build-arg",
834            f"http_proxy={http_proxy}",
835            "--build-arg",
836            f"https_proxy={http_proxy}",
837        ]
838    )
839
840# Create base Dockerfile.
841dockerfile_base = f"""
842FROM {docker_reg}/{distro}
843
844{mirror}
845
846ENV DEBIAN_FRONTEND noninteractive
847
848ENV PYTHONPATH "/usr/local/lib/python3.10/site-packages/"
849
850# Sometimes the ubuntu key expires and we need a way to force an execution
851# of the apt-get commands for the dbgsym-keyring.  When this happens we see
852# an error like: "Release: The following signatures were invalid:"
853# Insert a bogus echo that we can change here when we get this error to force
854# the update.
855RUN echo "ubuntu keyserver rev as of 2025-06-25"
856
857# We need the keys to be imported for dbgsym repos
858# New releases have a package, older ones fall back to manual fetching
859# https://wiki.ubuntu.com/Debug%20Symbol%20Packages
860# Known issue with gpg to get keys via proxy -
861# https://bugs.launchpad.net/ubuntu/+source/gnupg2/+bug/1788190, hence using
862# curl to get keys.
863RUN apt-get update && apt-get dist-upgrade -yy && \
864    ( apt-get install -yy gpgv ubuntu-dbgsym-keyring || \
865        ( apt-get install -yy dirmngr curl && \
866          curl -sSL \
867          'https://keyserver.ubuntu.com/pks/lookup?op=get&search=0xF2EDC64DC5AEE1F6B9C621F0C8CAB6595FDFF622' \
868          | apt-key add - ))
869
870# Parse the current repo list into a debug repo list
871RUN sed -n '/^deb /s,^deb [^ ]* ,deb http://ddebs.ubuntu.com ,p' \
872        /etc/apt/sources.list >/etc/apt/sources.list.d/debug.list
873
874# Remove non-existent debug repos
875RUN sed -i '/-\\(backports\\|security\\) /d' /etc/apt/sources.list.d/debug.list
876
877RUN cat /etc/apt/sources.list.d/debug.list
878
879RUN apt-get update && apt-get dist-upgrade -yy && apt-get install -yy \
880    abi-compliance-checker \
881    abi-dumper \
882    autoconf \
883    autoconf-archive \
884    bison \
885    cmake \
886    curl \
887    dbus \
888    device-tree-compiler \
889    doxygen \
890    flex \
891    git \
892    glib-2.0 \
893    gnupg \
894    iproute2 \
895    iputils-ping \
896    libaudit-dev \
897    libc6-dbg \
898    libc6-dev \
899    libcjson-dev \
900    libconfig++-dev \
901    libcryptsetup-dev \
902    libcurl4-openssl-dev \
903    libdbus-1-dev \
904    libevdev-dev \
905    libi2c-dev \
906    libjpeg-dev \
907    libjson-perl \
908    libldap2-dev \
909    libmimetic-dev \
910    libmpfr-dev \
911    libnl-3-dev \
912    libnl-genl-3-dev \
913    libpam0g-dev \
914    libpciaccess-dev \
915    libperlio-gzip-perl \
916    libpng-dev \
917    libprotobuf-dev \
918    libsnmp-dev \
919    libssl-dev \
920    libsystemd-dev \
921    libtool \
922    liburing-dev \
923    libxml2-utils \
924    libxml-simple-perl \
925    lsb-release \
926    ninja-build \
927    npm \
928    pkg-config \
929    protobuf-compiler \
930    python3 \
931    python3-dev\
932    python3-git \
933    python3-mako \
934    python3-pip \
935    python3-protobuf \
936    python3-setuptools \
937    python3-socks \
938    python3-yaml \
939    rsync \
940    shellcheck \
941    socat \
942    software-properties-common \
943    sudo \
944    systemd \
945    systemd-dev \
946    valgrind \
947    vim \
948    wget \
949    xxd
950
951# Add the ubuntu-toolchain-r repository for later versions of GCC and install.
952RUN add-apt-repository ppa:ubuntu-toolchain-r/ppa && \
953    apt-get update && \
954    apt-get install -y \
955        gcc-15 \
956        g++-15 \
957        libstdc++-15-dev
958
959RUN update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-15 15 \
960  --slave /usr/bin/g++ g++ /usr/bin/g++-15 \
961  --slave /usr/bin/gcov gcov /usr/bin/gcov-15 \
962  --slave /usr/bin/gcov-dump gcov-dump /usr/bin/gcov-dump-15 \
963  --slave /usr/bin/gcov-tool gcov-tool /usr/bin/gcov-tool-15
964RUN update-alternatives --remove cpp /usr/bin/cpp && \
965    update-alternatives --install /usr/bin/cpp cpp /usr/bin/cpp-15 15
966
967# Set up LLVM apt repository.
968RUN bash -c "$(wget -O - https://apt.llvm.org/llvm.sh)" -- 21 -m https://apt.llvm.org
969
970# Install extra clang tools
971RUN apt-get install -y \
972        clang-21 \
973        clang-format-21 \
974        clang-tidy-21 \
975        lld-21
976
977RUN update-alternatives --install /usr/bin/clang clang /usr/bin/clang-21 1000 \
978  --slave /usr/bin/clang++ clang++ /usr/bin/clang++-21 \
979  --slave /usr/bin/clang-tidy clang-tidy /usr/bin/clang-tidy-21 \
980  --slave /usr/bin/clang-apply-replacements clang-apply-replacements \
981        /usr/bin/clang-apply-replacements-21 \
982  --slave /usr/bin/clang-format clang-format /usr/bin/clang-format-21 \
983  --slave /usr/bin/run-clang-tidy run-clang-tidy.py \
984        /usr/bin/run-clang-tidy-21 \
985  --slave /usr/bin/scan-build scan-build /usr/bin/scan-build-21 \
986  --slave /usr/bin/lld lld /usr/bin/lld-21
987
988"""
989
990if is_automated_ci_build:
991    dockerfile_base += f"""
992# Run an arbitrary command to pollute the docker cache regularly force us
993# to re-run `apt-get update` daily.
994RUN echo {Docker.timestamp()}
995RUN apt-get update && apt-get dist-upgrade -yy
996
997"""
998
999dockerfile_base += """
1000RUN pip3 install --break-system-packages \
1001        beautysh \
1002        black \
1003        codespell \
1004        flake8 \
1005        gcovr \
1006        gitlint \
1007        inflection \
1008        isoduration \
1009        isort \
1010        jsonschema \
1011        meson==1.9.0 \
1012        referencing \
1013        requests
1014
1015RUN npm install -g \
1016        eslint@v8.56.0 eslint-plugin-json@v3.1.0 \
1017        markdownlint-cli@latest \
1018        prettier@latest
1019"""
1020
1021# Build the base and stage docker images.
1022docker_base_img_name = Docker.tagname("base", dockerfile_base)
1023Docker.build("base", docker_base_img_name, dockerfile_base)
1024Package.generate_all()
1025
1026# Create the final Dockerfile.
1027dockerfile = f"""
1028# Build the final output image
1029FROM {docker_base_img_name}
1030{Package.df_all_copycmds()}
1031
1032# Some of our infrastructure still relies on the presence of this file
1033# even though it is no longer needed to rebuild the docker environment
1034# NOTE: The file is sorted to ensure the ordering is stable.
1035RUN echo '{Package.depcache()}' > /tmp/depcache
1036
1037# Ensure the group, user, and home directory are created (or rename them if
1038# they already exist).
1039RUN if grep -q ":{gid}:" /etc/group ; then \
1040        groupmod -n {username} $(awk -F : '{{ if ($3 == {gid}) {{ print $1 }} }}' /etc/group) ; \
1041    else \
1042        groupadd -f -g {gid} {username} ; \
1043    fi
1044RUN mkdir -p "{os.path.dirname(homedir)}"
1045RUN if grep -q ":{uid}:" /etc/passwd ; then \
1046        usermod -l {username} -d {homedir} -m $(awk -F : '{{ if ($3 == {uid}) {{ print $1 }} }}' /etc/passwd) ; \
1047    else \
1048        useradd -d {homedir} -m -u {uid} -g {gid} {username} ; \
1049    fi
1050RUN sed -i '1iDefaults umask=000' /etc/sudoers
1051RUN echo "{username} ALL=(ALL) NOPASSWD: ALL" >>/etc/sudoers
1052
1053# Ensure user has ability to write to /usr/local for different tool
1054# and data installs
1055RUN chown -R {username}:{username} /usr/local/share
1056
1057# Update library cache
1058RUN ldconfig
1059
1060{proxy_cmd}
1061
1062RUN /bin/bash
1063"""
1064
1065# Do the final docker build
1066docker_final_img_name = Docker.tagname(None, dockerfile)
1067Docker.build("final", docker_final_img_name, dockerfile)
1068
1069# Print the tag of the final image.
1070print(docker_final_img_name)
1071