xref: /openbmc/openbmc-build-scripts/scripts/build-unit-test-docker (revision 3dc37e6ec0d0ddd68dc2a2287b88357fa3686cc6)
1#!/usr/bin/env python3
2#
3# Build the required docker image to run package unit tests
4#
5# Script Variables:
6#   DOCKER_IMG_NAME:  <optional, the name of the docker image to generate>
7#                     default is openbmc/ubuntu-unit-test
8#   DISTRO:           <optional, the distro to build a docker image against>
9#   FORCE_DOCKER_BUILD: <optional, a non-zero value with force all Docker
10#                     images to be rebuilt rather than reusing caches.>
11#   BUILD_URL:        <optional, used to detect running under CI context
12#                     (ex. Jenkins)>
13#   BRANCH:           <optional, branch to build from each of the openbmc/
14#                     repositories>
15#                     default is master, which will be used if input branch not
16#                     provided or not found
17#   UBUNTU_MIRROR:    <optional, the URL of a mirror of Ubuntu to override the
18#                     default ones in /etc/apt/sources.list>
19#                     default is empty, and no mirror is used.
20#   DOCKER_REG:       <optional, the URL of a docker registry to utilize
21#                     instead of our default (public.ecr.aws/ubuntu)
22#                     (ex. docker.io)
23#   http_proxy        The HTTP address of the proxy server to connect to.
24#                     Default: "", proxy is not setup if this is not set
25
26import json
27import os
28import re
29import sys
30import threading
31import urllib.request
32from datetime import date
33from hashlib import sha256
34
35# typing.Dict is used for type-hints.
36from typing import Any, Callable, Dict, Iterable, Optional  # noqa: F401
37
38from sh import git, nproc  # type: ignore
39
40try:
41    # System may have docker or it may have podman, try docker first
42    from sh import docker
43
44    container = docker
45except ImportError:
46    try:
47        from sh import podman
48
49        container = podman
50    except Exception:
51        print("No docker or podman found on system")
52        exit(1)
53
54try:
55    # Python before 3.8 doesn't have TypedDict, so reroute to standard 'dict'.
56    from typing import TypedDict
57except Exception:
58
59    class TypedDict(dict):  # type: ignore
60        # We need to do this to eat the 'total' argument.
61        def __init_subclass__(cls, **kwargs: Any) -> None:
62            super().__init_subclass__()
63
64
65# Declare some variables used in package definitions.
66prefix = "/usr/local"
67proc_count = nproc().strip()
68
69
70class PackageDef(TypedDict, total=False):
71    """Package Definition for packages dictionary."""
72
73    # rev [optional]: Revision of package to use.
74    rev: str
75    # url [optional]: lambda function to create URL: (package, rev) -> url.
76    url: Callable[[str, str], str]
77    # depends [optional]: List of package dependencies.
78    depends: Iterable[str]
79    # build_type [required]: Build type used for package.
80    #   Currently supported: autoconf, cmake, custom, make, meson
81    build_type: str
82    # build_steps [optional]: Steps to run for 'custom' build_type.
83    build_steps: Iterable[str]
84    # config_flags [optional]: List of options to pass configuration tool.
85    config_flags: Iterable[str]
86    # config_env [optional]: List of environment variables to set for config.
87    config_env: Iterable[str]
88    # custom_post_dl [optional]: List of steps to run after download, but
89    #   before config / build / install.
90    custom_post_dl: Iterable[str]
91    # custom_post_install [optional]: List of steps to run after install.
92    custom_post_install: Iterable[str]
93
94    # __tag [private]: Generated Docker tag name for package stage.
95    __tag: str
96    # __package [private]: Package object associated with this package.
97    __package: Any  # Type is Package, but not defined yet.
98
99
100# Packages to include in image.
101packages = {
102    "boost": PackageDef(
103        rev="1.88.0",
104        url=(
105            lambda pkg, rev: f"https://github.com/boostorg/{pkg}/releases/download/{pkg}-{rev}/{pkg}-{rev}-cmake.tar.gz"
106        ),
107        build_type="custom",
108        build_steps=[
109            (
110                "./bootstrap.sh"
111                f" --prefix={prefix} --with-libraries=atomic,context,coroutine,filesystem,process,url"
112            ),
113            "./b2",
114            f"./b2 install --prefix={prefix} valgrind=on",
115        ],
116    ),
117    "USCiLab/cereal": PackageDef(
118        rev="v1.3.2",
119        build_type="custom",
120        build_steps=[f"cp -a include/cereal/ {prefix}/include/"],
121    ),
122    "danmar/cppcheck": PackageDef(
123        rev="2.12.1",
124        build_type="cmake",
125    ),
126    "DMTF/libspdm": PackageDef(
127        rev="3.7.0",
128        url=lambda pkg, rev: f"https://github.com/DMTF/libspdm/archive/{rev}.tar.gz",
129        build_type="cmake",
130        config_flags=(
131            lambda: (
132                lambda arch_mapping={
133                    "x86_64": "x64",
134                    "i586": "ia32",
135                    "i686": "ia32",
136                    "arm": "arm",
137                    "aarch64": "aarch64",
138                    "arm64": "aarch64",
139                    "riscv32": "riscv32",
140                    "riscv64": "riscv64",
141                    "ppc64le": "ppc64le",
142                }: [
143                    f"-DARCH={arch_mapping.get(__import__('platform').machine(), 'x64')}",
144                    "-DTOOLCHAIN=GCC",
145                    "-DTARGET=Release",
146                    "-DCRYPTO=openssl",
147                    "-DBUILD_LINUX_SHARED_LIB=ON",
148                    "-DENABLE_BINARY_BUILD=1",
149                    "-DDISABLE_TESTS=1",
150                    f"-DCOMPILED_LIBCRYPTO_PATH={prefix}/lib",
151                    f"-DCOMPILED_LIBSSL_PATH={prefix}/lib",
152                ]
153            )()
154        )(),
155    ),
156    "CLIUtils/CLI11": PackageDef(
157        rev="v2.3.2",
158        build_type="cmake",
159        config_flags=[
160            "-DBUILD_TESTING=OFF",
161            "-DCLI11_BUILD_DOCS=OFF",
162            "-DCLI11_BUILD_EXAMPLES=OFF",
163        ],
164    ),
165    "fmtlib/fmt": PackageDef(
166        rev="11.2.0",
167        build_type="cmake",
168        config_flags=[
169            "-DFMT_DOC=OFF",
170            "-DFMT_TEST=OFF",
171        ],
172    ),
173    "Naios/function2": PackageDef(
174        rev="4.2.4",
175        build_type="custom",
176        build_steps=[
177            f"mkdir {prefix}/include/function2",
178            f"cp include/function2/function2.hpp {prefix}/include/function2/",
179        ],
180    ),
181    "google/googletest": PackageDef(
182        rev="v1.16.0",
183        build_type="cmake",
184        config_env=["CXXFLAGS=-std=c++20"],
185        config_flags=["-DTHREADS_PREFER_PTHREAD_FLAG=ON"],
186    ),
187    "nghttp2/nghttp2": PackageDef(
188        rev="v1.65.0",
189        build_type="cmake",
190        config_env=["CXXFLAGS=-std=c++20"],
191        config_flags=[
192            "-DENABLE_LIB_ONLY=ON",
193            "-DENABLE_STATIC_LIB=ON",
194        ],
195    ),
196    "nlohmann/json": PackageDef(
197        rev="v3.12.0",
198        build_type="cmake",
199        config_flags=["-DJSON_BuildTests=OFF"],
200        custom_post_install=[
201            (
202                f"ln -s {prefix}/include/nlohmann/json.hpp"
203                f" {prefix}/include/json.hpp"
204            ),
205        ],
206    ),
207    "json-c/json-c": PackageDef(
208        rev="json-c-0.18-20240915",
209        build_type="cmake",
210    ),
211    "LibVNC/libvncserver": PackageDef(
212        rev="LibVNCServer-0.9.14",
213        build_type="cmake",
214    ),
215    "leethomason/tinyxml2": PackageDef(
216        rev="11.0.0",
217        build_type="cmake",
218    ),
219    "tristanpenman/valijson": PackageDef(
220        rev="v1.0.5",
221        build_type="cmake",
222        config_flags=[
223            "-Dvalijson_BUILD_TESTS=0",
224            "-Dvalijson_INSTALL_HEADERS=1",
225        ],
226    ),
227    "libgpiod": PackageDef(
228        rev="1.6.5",
229        url=(
230            lambda pkg, rev: f"https://git.kernel.org/pub/scm/libs/{pkg}/{pkg}.git/snapshot/{pkg}-{rev}.tar.gz"
231        ),
232        build_type="autogen",
233        config_flags=["--enable-bindings-cxx"],
234    ),
235    "open-power/pdbg": PackageDef(build_type="autoconf"),
236    "openbmc/gpioplus": PackageDef(
237        build_type="meson",
238        config_flags=[
239            "-Dexamples=false",
240            "-Dtests=disabled",
241        ],
242    ),
243    "openbmc/phosphor-dbus-interfaces": PackageDef(
244        depends=["openbmc/sdbusplus"],
245        build_type="meson",
246        config_flags=["-Dgenerate_md=false"],
247    ),
248    "openbmc/phosphor-logging": PackageDef(
249        depends=[
250            "USCiLab/cereal",
251            "openbmc/phosphor-dbus-interfaces",
252            "openbmc/sdbusplus",
253            "openbmc/sdeventplus",
254        ],
255        build_type="meson",
256        config_flags=[
257            "-Dlibonly=true",
258            "-Dtests=disabled",
259        ],
260    ),
261    "openbmc/phosphor-objmgr": PackageDef(
262        depends=[
263            "CLIUtils/CLI11",
264            "boost",
265            "leethomason/tinyxml2",
266            "openbmc/phosphor-dbus-interfaces",
267            "openbmc/phosphor-logging",
268            "openbmc/sdbusplus",
269        ],
270        build_type="meson",
271        config_flags=[
272            "-Dtests=disabled",
273        ],
274    ),
275    "openbmc/libpeci": PackageDef(
276        build_type="meson",
277        config_flags=[
278            "-Draw-peci=disabled",
279        ],
280    ),
281    "openbmc/libpldm": PackageDef(
282        build_type="meson",
283        config_flags=[
284            "-Dabi=deprecated,stable",
285            "-Dtests=false",
286            "-Dabi-compliance-check=false",
287        ],
288    ),
289    "openbmc/sdbusplus": PackageDef(
290        depends=[
291            "nlohmann/json",
292        ],
293        build_type="meson",
294        custom_post_dl=[
295            "cd tools",
296            f"./setup.py install --root=/ --prefix={prefix}",
297            "cd ..",
298        ],
299        config_flags=[
300            "-Dexamples=disabled",
301            "-Dtests=disabled",
302        ],
303    ),
304    "openbmc/sdeventplus": PackageDef(
305        depends=[
306            "openbmc/stdplus",
307        ],
308        build_type="meson",
309        config_flags=[
310            "-Dexamples=false",
311            "-Dtests=disabled",
312        ],
313    ),
314    "openbmc/stdplus": PackageDef(
315        depends=[
316            "fmtlib/fmt",
317            "google/googletest",
318            "Naios/function2",
319        ],
320        build_type="meson",
321        config_flags=[
322            "-Dexamples=false",
323            "-Dtests=disabled",
324            "-Dgtest=enabled",
325        ],
326    ),
327}  # type: Dict[str, PackageDef]
328
329# Define common flags used for builds
330configure_flags = " ".join(
331    [
332        f"--prefix={prefix}",
333    ]
334)
335cmake_flags = " ".join(
336    [
337        "-DBUILD_SHARED_LIBS=ON",
338        "-DCMAKE_BUILD_TYPE=RelWithDebInfo",
339        f"-DCMAKE_INSTALL_PREFIX:PATH={prefix}",
340        "-GNinja",
341        "-DCMAKE_MAKE_PROGRAM=ninja",
342    ]
343)
344meson_flags = " ".join(
345    [
346        "--wrap-mode=nodownload",
347        f"-Dprefix={prefix}",
348    ]
349)
350
351
352class Package(threading.Thread):
353    """Class used to build the Docker stages for each package.
354
355    Generally, this class should not be instantiated directly but through
356    Package.generate_all().
357    """
358
359    # Copy the packages dictionary.
360    packages = packages.copy()
361
362    # Lock used for thread-safety.
363    lock = threading.Lock()
364
365    def __init__(self, pkg: str):
366        """pkg - The name of this package (ex. foo/bar )"""
367        super(Package, self).__init__()
368
369        self.package = pkg
370        self.exception = None  # type: Optional[Exception]
371
372        # Reference to this package's
373        self.pkg_def = Package.packages[pkg]
374        self.pkg_def["__package"] = self
375
376    def run(self) -> None:
377        """Thread 'run' function.  Builds the Docker stage."""
378
379        # In case this package has no rev, fetch it from Github.
380        self._update_rev()
381
382        # Find all the Package objects that this package depends on.
383        #   This section is locked because we are looking into another
384        #   package's PackageDef dict, which could be being modified.
385        Package.lock.acquire()
386        deps: Iterable[Package] = [
387            Package.packages[deppkg]["__package"]
388            for deppkg in self.pkg_def.get("depends", [])
389        ]
390        Package.lock.release()
391
392        # Wait until all the depends finish building.  We need them complete
393        # for the "COPY" commands.
394        for deppkg in deps:
395            deppkg.join()
396
397        # Generate this package's Dockerfile.
398        dockerfile = f"""
399FROM {docker_base_img_name}
400{self._df_copycmds()}
401{self._df_build()}
402"""
403
404        # Generate the resulting tag name and save it to the PackageDef.
405        #   This section is locked because we are modifying the PackageDef,
406        #   which can be accessed by other threads.
407        Package.lock.acquire()
408        tag = Docker.tagname(self._stagename(), dockerfile)
409        self.pkg_def["__tag"] = tag
410        Package.lock.release()
411
412        # Do the build / save any exceptions.
413        try:
414            Docker.build(self.package, tag, dockerfile)
415        except Exception as e:
416            self.exception = e
417
418    @classmethod
419    def generate_all(cls) -> None:
420        """Ensure a Docker stage is created for all defined packages.
421
422        These are done in parallel but with appropriate blocking per
423        package 'depends' specifications.
424        """
425
426        # Create a Package for each defined package.
427        pkg_threads = [Package(p) for p in cls.packages.keys()]
428
429        # Start building them all.
430        #   This section is locked because threads depend on each other,
431        #   based on the packages, and they cannot 'join' on a thread
432        #   which is not yet started.  Adding a lock here allows all the
433        #   threads to start before they 'join' their dependencies.
434        Package.lock.acquire()
435        for t in pkg_threads:
436            t.start()
437        Package.lock.release()
438
439        # Wait for completion.
440        for t in pkg_threads:
441            t.join()
442            # Check if the thread saved off its own exception.
443            if t.exception:
444                print(f"Package {t.package} failed!", file=sys.stderr)
445                raise t.exception
446
447    @staticmethod
448    def df_all_copycmds() -> str:
449        """Formulate the Dockerfile snippet necessary to copy all packages
450        into the final image.
451        """
452        return Package.df_copycmds_set(Package.packages.keys())
453
454    @classmethod
455    def depcache(cls) -> str:
456        """Create the contents of the '/tmp/depcache'.
457        This file is a comma-separated list of "<pkg>:<rev>".
458        """
459
460        # This needs to be sorted for consistency.
461        depcache = ""
462        for pkg in sorted(cls.packages.keys()):
463            depcache += "%s:%s," % (pkg, cls.packages[pkg]["rev"])
464        return depcache
465
466    def _check_gerrit_topic(self) -> bool:
467        if not gerrit_topic:
468            return False
469        if not self.package.startswith("openbmc/"):
470            return False
471        if gerrit_project == self.package and gerrit_rev:
472            return False
473
474        try:
475            commits = json.loads(
476                urllib.request.urlopen(
477                    f"https://gerrit.openbmc.org/changes/?q=status:open+project:{self.package}+topic:{gerrit_topic}"
478                )
479                .read()
480                .splitlines()[-1]
481            )
482
483            if len(commits) == 0:
484                return False
485            if len(commits) > 1:
486                print(
487                    f"{self.package} has more than 1 commit under {gerrit_topic}; using lastest upstream: {len(commits)}",
488                    file=sys.stderr,
489                )
490                return False
491
492            change_id = commits[0]["id"]
493
494            commit = json.loads(
495                urllib.request.urlopen(
496                    f"https://gerrit.openbmc.org/changes/{change_id}/revisions/current/commit"
497                )
498                .read()
499                .splitlines()[-1]
500            )["commit"]
501
502            print(
503                f"Using {commit} from {gerrit_topic} for {self.package}",
504                file=sys.stderr,
505            )
506            self.pkg_def["rev"] = commit
507            return True
508
509        except urllib.error.HTTPError as e:
510            print(
511                f"Error loading topic {gerrit_topic} for {self.package}: ",
512                e,
513                file=sys.stderr,
514            )
515            return False
516
517    def _update_rev(self) -> None:
518        """Look up the HEAD for missing a static rev."""
519
520        if "rev" in self.pkg_def:
521            return
522
523        if self._check_gerrit_topic():
524            return
525
526        # Check if Jenkins/Gerrit gave us a revision and use it.
527        if gerrit_project == self.package and gerrit_rev:
528            print(
529                f"Found Gerrit revision for {self.package}: {gerrit_rev}",
530                file=sys.stderr,
531            )
532            self.pkg_def["rev"] = gerrit_rev
533            return
534
535        # Ask Github for all the branches.
536        lookup = git(
537            "ls-remote", "--heads", f"https://github.com/{self.package}"
538        )
539
540        # Find the branch matching {branch} (or fallback to master).
541        #   This section is locked because we are modifying the PackageDef.
542        Package.lock.acquire()
543        for line in lookup.split("\n"):
544            if re.fullmatch(f".*{branch}$", line.strip()):
545                self.pkg_def["rev"] = line.split()[0]
546                break
547            elif (
548                "refs/heads/master" in line or "refs/heads/main" in line
549            ) and "rev" not in self.pkg_def:
550                self.pkg_def["rev"] = line.split()[0]
551        Package.lock.release()
552
553    def _stagename(self) -> str:
554        """Create a name for the Docker stage associated with this pkg."""
555        return self.package.replace("/", "-").lower()
556
557    def _url(self) -> str:
558        """Get the URL for this package."""
559        rev = self.pkg_def["rev"]
560
561        # If the lambda exists, call it.
562        if "url" in self.pkg_def:
563            return self.pkg_def["url"](self.package, rev)
564
565        # Default to the github archive URL.
566        return f"https://github.com/{self.package}/archive/{rev}.tar.gz"
567
568    def _cmd_download(self) -> str:
569        """Formulate the command necessary to download and unpack to source."""
570
571        url = self._url()
572        if ".tar." not in url:
573            raise NotImplementedError(
574                f"Unhandled download type for {self.package}: {url}"
575            )
576
577        cmd = f"curl -L {url} | tar -x"
578
579        if url.endswith(".bz2"):
580            cmd += "j"
581        elif url.endswith(".gz"):
582            cmd += "z"
583        else:
584            raise NotImplementedError(
585                f"Unknown tar flags needed for {self.package}: {url}"
586            )
587
588        return cmd
589
590    def _cmd_cd_srcdir(self) -> str:
591        """Formulate the command necessary to 'cd' into the source dir."""
592        return f"cd {self.package.split('/')[-1]}*"
593
594    def _df_copycmds(self) -> str:
595        """Formulate the dockerfile snippet necessary to COPY all depends."""
596
597        if "depends" not in self.pkg_def:
598            return ""
599        return Package.df_copycmds_set(self.pkg_def["depends"])
600
601    @staticmethod
602    def df_copycmds_set(pkgs: Iterable[str]) -> str:
603        """Formulate the Dockerfile snippet necessary to COPY a set of
604        packages into a Docker stage.
605        """
606
607        copy_cmds = ""
608
609        # Sort the packages for consistency.
610        for p in sorted(pkgs):
611            tag = Package.packages[p]["__tag"]
612            copy_cmds += f"COPY --from={tag} {prefix} {prefix}\n"
613            # Workaround for upstream docker bug and multiple COPY cmds
614            # https://github.com/moby/moby/issues/37965
615            copy_cmds += "RUN true\n"
616
617        return copy_cmds
618
619    def _df_build(self) -> str:
620        """Formulate the Dockerfile snippet necessary to download, build, and
621        install a package into a Docker stage.
622        """
623
624        # Download and extract source.
625        result = f"RUN {self._cmd_download()} && {self._cmd_cd_srcdir()} && "
626
627        # Handle 'custom_post_dl' commands.
628        custom_post_dl = self.pkg_def.get("custom_post_dl")
629        if custom_post_dl:
630            result += " && ".join(custom_post_dl) + " && "
631
632        # Build and install package based on 'build_type'.
633        build_type = self.pkg_def["build_type"]
634        if build_type == "autoconf":
635            result += self._cmd_build_autoconf()
636        elif build_type == "autogen":
637            result += self._cmd_build_autogen()
638        elif build_type == "cmake":
639            result += self._cmd_build_cmake()
640        elif build_type == "custom":
641            result += self._cmd_build_custom()
642        elif build_type == "make":
643            result += self._cmd_build_make()
644        elif build_type == "meson":
645            result += self._cmd_build_meson()
646        else:
647            raise NotImplementedError(
648                f"Unhandled build type for {self.package}: {build_type}"
649            )
650
651        # Handle 'custom_post_install' commands.
652        custom_post_install = self.pkg_def.get("custom_post_install")
653        if custom_post_install:
654            result += " && " + " && ".join(custom_post_install)
655
656        return result
657
658    def _cmd_build_autoconf(self) -> str:
659        options = " ".join(self.pkg_def.get("config_flags", []))
660        env = " ".join(self.pkg_def.get("config_env", []))
661        result = "./bootstrap.sh && "
662        result += f"{env} ./configure {configure_flags} {options} && "
663        result += f"make -j{proc_count} && make install"
664        return result
665
666    def _cmd_build_autogen(self) -> str:
667        options = " ".join(self.pkg_def.get("config_flags", []))
668        env = " ".join(self.pkg_def.get("config_env", []))
669        result = f"{env} ./autogen.sh {configure_flags} {options} && "
670        result += "make && make install"
671        return result
672
673    def _cmd_build_cmake(self) -> str:
674        options = " ".join(self.pkg_def.get("config_flags", []))
675        env = " ".join(self.pkg_def.get("config_env", []))
676        result = "mkdir builddir && cd builddir && "
677        result += f"{env} cmake {cmake_flags} {options} .. && "
678        result += "cmake --build . --target all && "
679        result += "cmake --build . --target install && "
680        result += "cd .."
681        return result
682
683    def _cmd_build_custom(self) -> str:
684        return " && ".join(self.pkg_def.get("build_steps", []))
685
686    def _cmd_build_make(self) -> str:
687        return f"make -j{proc_count} && make install"
688
689    def _cmd_build_meson(self) -> str:
690        options = " ".join(self.pkg_def.get("config_flags", []))
691        env = " ".join(self.pkg_def.get("config_env", []))
692        result = f"{env} meson setup builddir {meson_flags} {options} && "
693        result += "ninja -C builddir && ninja -C builddir install"
694        return result
695
696
697class Docker:
698    """Class to assist with Docker interactions.  All methods are static."""
699
700    @staticmethod
701    def timestamp() -> str:
702        """Generate a timestamp for today using the ISO week."""
703        today = date.today().isocalendar()
704        return f"{today[0]}-W{today[1]:02}"
705
706    @staticmethod
707    def tagname(pkgname: Optional[str], dockerfile: str) -> str:
708        """Generate a tag name for a package using a hash of the Dockerfile."""
709        result = docker_image_name
710        if pkgname:
711            result += "-" + pkgname
712
713        result += ":" + Docker.timestamp()
714        result += "-" + sha256(dockerfile.encode()).hexdigest()[0:16]
715
716        return result
717
718    @staticmethod
719    def build(pkg: str, tag: str, dockerfile: str) -> None:
720        """Build a docker image using the Dockerfile and tagging it with 'tag'."""
721
722        # If we're not forcing builds, check if it already exists and skip.
723        if not force_build:
724            if container.image.ls(
725                tag, "--format", '"{{.Repository}}:{{.Tag}}"'
726            ):
727                print(
728                    f"Image {tag} already exists.  Skipping.", file=sys.stderr
729                )
730                return
731
732        # Build it.
733        #   Capture the output of the 'docker build' command and send it to
734        #   stderr (prefixed with the package name).  This allows us to see
735        #   progress but not pollute stdout.  Later on we output the final
736        #   docker tag to stdout and we want to keep that pristine.
737        #
738        #   Other unusual flags:
739        #       --no-cache: Bypass the Docker cache if 'force_build'.
740        #       --force-rm: Clean up Docker processes if they fail.
741        container.build(
742            proxy_args,
743            "--network=host",
744            "--force-rm",
745            "--no-cache=true" if force_build else "--no-cache=false",
746            "-t",
747            tag,
748            "-",
749            _in=dockerfile,
750            _out=(
751                lambda line: print(
752                    pkg + ":", line, end="", file=sys.stderr, flush=True
753                )
754            ),
755            _err_to_out=True,
756        )
757
758
759# Read a bunch of environment variables.
760docker_image_name = os.environ.get(
761    "DOCKER_IMAGE_NAME", "openbmc/ubuntu-unit-test"
762)
763force_build = os.environ.get("FORCE_DOCKER_BUILD")
764is_automated_ci_build = os.environ.get("BUILD_URL", False)
765distro = os.environ.get("DISTRO", "ubuntu:plucky")
766branch = os.environ.get("BRANCH", "master")
767ubuntu_mirror = os.environ.get("UBUNTU_MIRROR")
768docker_reg = os.environ.get("DOCKER_REG", "public.ecr.aws/ubuntu")
769http_proxy = os.environ.get("http_proxy")
770
771gerrit_project = os.environ.get("GERRIT_PROJECT")
772gerrit_rev = os.environ.get("GERRIT_PATCHSET_REVISION")
773gerrit_topic = os.environ.get("GERRIT_TOPIC")
774
775# Ensure appropriate docker build output to see progress and identify
776# any issues
777os.environ["BUILDKIT_PROGRESS"] = "plain"
778
779# Set up some common variables.
780username = os.environ.get("USER", "root")
781homedir = os.environ.get("HOME", "/root")
782gid = os.getgid()
783uid = os.getuid()
784
785# Use well-known constants if user is root
786if username == "root":
787    homedir = "/root"
788    gid = 0
789    uid = 0
790
791# Special flags if setting up a deb mirror.
792mirror = ""
793if "ubuntu" in distro and ubuntu_mirror:
794    mirror = f"""
795RUN echo "deb {ubuntu_mirror} \
796        $(. /etc/os-release && echo $VERSION_CODENAME) \
797        main restricted universe multiverse" > /etc/apt/sources.list && \\
798    echo "deb {ubuntu_mirror} \
799        $(. /etc/os-release && echo $VERSION_CODENAME)-updates \
800            main restricted universe multiverse" >> /etc/apt/sources.list && \\
801    echo "deb {ubuntu_mirror} \
802        $(. /etc/os-release && echo $VERSION_CODENAME)-security \
803            main restricted universe multiverse" >> /etc/apt/sources.list && \\
804    echo "deb {ubuntu_mirror} \
805        $(. /etc/os-release && echo $VERSION_CODENAME)-proposed \
806            main restricted universe multiverse" >> /etc/apt/sources.list && \\
807    echo "deb {ubuntu_mirror} \
808        $(. /etc/os-release && echo $VERSION_CODENAME)-backports \
809            main restricted universe multiverse" >> /etc/apt/sources.list
810"""
811
812# Special flags for proxying.
813proxy_cmd = ""
814proxy_keyserver = ""
815proxy_args = []
816if http_proxy:
817    proxy_cmd = f"""
818RUN echo "[http]" >> {homedir}/.gitconfig && \
819    echo "proxy = {http_proxy}" >> {homedir}/.gitconfig
820COPY <<EOF_WGETRC {homedir}/.wgetrc
821https_proxy = {http_proxy}
822http_proxy = {http_proxy}
823use_proxy = on
824EOF_WGETRC
825"""
826    proxy_keyserver = f"--keyserver-options http-proxy={http_proxy}"
827
828    proxy_args.extend(
829        [
830            "--build-arg",
831            f"http_proxy={http_proxy}",
832            "--build-arg",
833            f"https_proxy={http_proxy}",
834        ]
835    )
836
837# Create base Dockerfile.
838dockerfile_base = f"""
839FROM {docker_reg}/{distro}
840
841{mirror}
842
843ENV DEBIAN_FRONTEND noninteractive
844
845ENV PYTHONPATH "/usr/local/lib/python3.10/site-packages/"
846
847# Sometimes the ubuntu key expires and we need a way to force an execution
848# of the apt-get commands for the dbgsym-keyring.  When this happens we see
849# an error like: "Release: The following signatures were invalid:"
850# Insert a bogus echo that we can change here when we get this error to force
851# the update.
852RUN echo "ubuntu keyserver rev as of 2025-06-25"
853
854# We need the keys to be imported for dbgsym repos
855# New releases have a package, older ones fall back to manual fetching
856# https://wiki.ubuntu.com/Debug%20Symbol%20Packages
857# Known issue with gpg to get keys via proxy -
858# https://bugs.launchpad.net/ubuntu/+source/gnupg2/+bug/1788190, hence using
859# curl to get keys.
860RUN apt-get update && apt-get dist-upgrade -yy && \
861    ( apt-get install -yy gpgv ubuntu-dbgsym-keyring || \
862        ( apt-get install -yy dirmngr curl && \
863          curl -sSL \
864          'https://keyserver.ubuntu.com/pks/lookup?op=get&search=0xF2EDC64DC5AEE1F6B9C621F0C8CAB6595FDFF622' \
865          | apt-key add - ))
866
867# Parse the current repo list into a debug repo list
868RUN sed -n '/^deb /s,^deb [^ ]* ,deb http://ddebs.ubuntu.com ,p' \
869        /etc/apt/sources.list >/etc/apt/sources.list.d/debug.list
870
871# Remove non-existent debug repos
872RUN sed -i '/-\\(backports\\|security\\) /d' /etc/apt/sources.list.d/debug.list
873
874RUN cat /etc/apt/sources.list.d/debug.list
875
876RUN apt-get update && apt-get dist-upgrade -yy && apt-get install -yy \
877    abi-compliance-checker \
878    abi-dumper \
879    autoconf \
880    autoconf-archive \
881    bison \
882    cmake \
883    curl \
884    dbus \
885    device-tree-compiler \
886    doxygen \
887    flex \
888    g++-15 \
889    gcc-15 \
890    git \
891    glib-2.0 \
892    gnupg \
893    iproute2 \
894    iputils-ping \
895    libaudit-dev \
896    libc6-dbg \
897    libc6-dev \
898    libcjson-dev \
899    libconfig++-dev \
900    libcryptsetup-dev \
901    libcurl4-openssl-dev \
902    libdbus-1-dev \
903    libevdev-dev \
904    libi2c-dev \
905    libjpeg-dev \
906    libjson-perl \
907    libldap2-dev \
908    libmimetic-dev \
909    libmpfr-dev \
910    libnl-3-dev \
911    libnl-genl-3-dev \
912    libpam0g-dev \
913    libpciaccess-dev \
914    libperlio-gzip-perl \
915    libpng-dev \
916    libprotobuf-dev \
917    libsnmp-dev \
918    libssl-dev \
919    libsystemd-dev \
920    libtool \
921    liburing-dev \
922    libxml2-utils \
923    libxml-simple-perl \
924    lsb-release \
925    ninja-build \
926    npm \
927    pkg-config \
928    protobuf-compiler \
929    python3 \
930    python3-dev\
931    python3-git \
932    python3-mako \
933    python3-pip \
934    python3-protobuf \
935    python3-setuptools \
936    python3-socks \
937    python3-yaml \
938    rsync \
939    shellcheck \
940    socat \
941    software-properties-common \
942    sudo \
943    systemd \
944    systemd-dev \
945    valgrind \
946    vim \
947    wget \
948    xxd
949
950RUN update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-15 15 \
951  --slave /usr/bin/g++ g++ /usr/bin/g++-15 \
952  --slave /usr/bin/gcov gcov /usr/bin/gcov-15 \
953  --slave /usr/bin/gcov-dump gcov-dump /usr/bin/gcov-dump-15 \
954  --slave /usr/bin/gcov-tool gcov-tool /usr/bin/gcov-tool-15
955RUN update-alternatives --remove cpp /usr/bin/cpp && \
956    update-alternatives --install /usr/bin/cpp cpp /usr/bin/cpp-15 15
957
958# Set up LLVM apt repository.
959RUN bash -c "$(wget -O - https://apt.llvm.org/llvm.sh)" -- 20
960
961# Install extra clang tools
962RUN apt-get install -y \
963        clang-20 \
964        clang-format-20 \
965        clang-tidy-20 \
966        lld-20
967
968RUN update-alternatives --install /usr/bin/clang clang /usr/bin/clang-20 1000 \
969  --slave /usr/bin/clang++ clang++ /usr/bin/clang++-20 \
970  --slave /usr/bin/clang-tidy clang-tidy /usr/bin/clang-tidy-20 \
971  --slave /usr/bin/clang-format clang-format /usr/bin/clang-format-20 \
972  --slave /usr/bin/run-clang-tidy run-clang-tidy.py \
973        /usr/bin/run-clang-tidy-20 \
974  --slave /usr/bin/scan-build scan-build /usr/bin/scan-build-20 \
975  --slave /usr/bin/lld lld /usr/bin/lld-20
976
977"""
978
979if is_automated_ci_build:
980    dockerfile_base += f"""
981# Run an arbitrary command to pollute the docker cache regularly force us
982# to re-run `apt-get update` daily.
983RUN echo {Docker.timestamp()}
984RUN apt-get update && apt-get dist-upgrade -yy
985
986"""
987
988dockerfile_base += """
989RUN pip3 install --break-system-packages \
990        beautysh \
991        black \
992        codespell \
993        flake8 \
994        gcovr \
995        gitlint \
996        inflection \
997        isoduration \
998        isort \
999        jsonschema \
1000        meson==1.8.2 \
1001        referencing \
1002        requests
1003
1004RUN npm install -g \
1005        eslint@v8.56.0 eslint-plugin-json@v3.1.0 \
1006        markdownlint-cli@latest \
1007        prettier@latest
1008"""
1009
1010# Build the base and stage docker images.
1011docker_base_img_name = Docker.tagname("base", dockerfile_base)
1012Docker.build("base", docker_base_img_name, dockerfile_base)
1013Package.generate_all()
1014
1015# Create the final Dockerfile.
1016dockerfile = f"""
1017# Build the final output image
1018FROM {docker_base_img_name}
1019{Package.df_all_copycmds()}
1020
1021# Some of our infrastructure still relies on the presence of this file
1022# even though it is no longer needed to rebuild the docker environment
1023# NOTE: The file is sorted to ensure the ordering is stable.
1024RUN echo '{Package.depcache()}' > /tmp/depcache
1025
1026# Ensure the group, user, and home directory are created (or rename them if
1027# they already exist).
1028RUN if grep -q ":{gid}:" /etc/group ; then \
1029        groupmod -n {username} $(awk -F : '{{ if ($3 == {gid}) {{ print $1 }} }}' /etc/group) ; \
1030    else \
1031        groupadd -f -g {gid} {username} ; \
1032    fi
1033RUN mkdir -p "{os.path.dirname(homedir)}"
1034RUN if grep -q ":{uid}:" /etc/passwd ; then \
1035        usermod -l {username} -d {homedir} -m $(awk -F : '{{ if ($3 == {uid}) {{ print $1 }} }}' /etc/passwd) ; \
1036    else \
1037        useradd -d {homedir} -m -u {uid} -g {gid} {username} ; \
1038    fi
1039RUN sed -i '1iDefaults umask=000' /etc/sudoers
1040RUN echo "{username} ALL=(ALL) NOPASSWD: ALL" >>/etc/sudoers
1041
1042# Ensure user has ability to write to /usr/local for different tool
1043# and data installs
1044RUN chown -R {username}:{username} /usr/local/share
1045
1046# Update library cache
1047RUN ldconfig
1048
1049{proxy_cmd}
1050
1051RUN /bin/bash
1052"""
1053
1054# Do the final docker build
1055docker_final_img_name = Docker.tagname(None, dockerfile)
1056Docker.build("final", docker_final_img_name, dockerfile)
1057
1058# Print the tag of the final image.
1059print(docker_final_img_name)
1060