xref: /openbmc/openbmc-build-scripts/scripts/build-unit-test-docker (revision c7e719f9b58e2e27a13586b3ef33b14fa5e2941d)
1#!/usr/bin/env python3
2#
3# Build the required docker image to run package unit tests
4#
5# Script Variables:
6#   DOCKER_IMG_NAME:  <optional, the name of the docker image to generate>
7#                     default is openbmc/ubuntu-unit-test
8#   DISTRO:           <optional, the distro to build a docker image against>
9#   FORCE_DOCKER_BUILD: <optional, a non-zero value with force all Docker
10#                     images to be rebuilt rather than reusing caches.>
11#   BUILD_URL:        <optional, used to detect running under CI context
12#                     (ex. Jenkins)>
13#   BRANCH:           <optional, branch to build from each of the openbmc/
14#                     repositories>
15#                     default is master, which will be used if input branch not
16#                     provided or not found
17#   UBUNTU_MIRROR:    <optional, the URL of a mirror of Ubuntu to override the
18#                     default ones in /etc/apt/sources.list>
19#                     default is empty, and no mirror is used.
20#   DOCKER_REG:       <optional, the URL of a docker registry to utilize
21#                     instead of our default (public.ecr.aws/ubuntu)
22#                     (ex. docker.io)
23#   http_proxy        The HTTP address of the proxy server to connect to.
24#                     Default: "", proxy is not setup if this is not set
25
26import json
27import os
28import re
29import sys
30import threading
31import urllib.request
32from datetime import date
33from hashlib import sha256
34
35# typing.Dict is used for type-hints.
36from typing import Any, Callable, Dict, Iterable, Optional  # noqa: F401
37
38from sh import git, nproc  # type: ignore
39
40try:
41    # System may have docker or it may have podman, try docker first
42    from sh import docker
43
44    container = docker
45except ImportError:
46    try:
47        from sh import podman
48
49        container = podman
50    except Exception:
51        print("No docker or podman found on system")
52        exit(1)
53
54try:
55    # Python before 3.8 doesn't have TypedDict, so reroute to standard 'dict'.
56    from typing import TypedDict
57except Exception:
58
59    class TypedDict(dict):  # type: ignore
60        # We need to do this to eat the 'total' argument.
61        def __init_subclass__(cls, **kwargs: Any) -> None:
62            super().__init_subclass__()
63
64
65# Declare some variables used in package definitions.
66prefix = "/usr/local"
67proc_count = nproc().strip()
68
69
70class PackageDef(TypedDict, total=False):
71    """Package Definition for packages dictionary."""
72
73    # rev [optional]: Revision of package to use.
74    rev: str
75    # url [optional]: lambda function to create URL: (package, rev) -> url.
76    url: Callable[[str, str], str]
77    # depends [optional]: List of package dependencies.
78    depends: Iterable[str]
79    # build_type [required]: Build type used for package.
80    #   Currently supported: autoconf, cmake, custom, make, meson
81    build_type: str
82    # build_steps [optional]: Steps to run for 'custom' build_type.
83    build_steps: Iterable[str]
84    # config_flags [optional]: List of options to pass configuration tool.
85    config_flags: Iterable[str]
86    # config_env [optional]: List of environment variables to set for config.
87    config_env: Iterable[str]
88    # custom_post_dl [optional]: List of steps to run after download, but
89    #   before config / build / install.
90    custom_post_dl: Iterable[str]
91    # custom_post_install [optional]: List of steps to run after install.
92    custom_post_install: Iterable[str]
93
94    # __tag [private]: Generated Docker tag name for package stage.
95    __tag: str
96    # __package [private]: Package object associated with this package.
97    __package: Any  # Type is Package, but not defined yet.
98
99
100# Packages to include in image.
101packages = {
102    "boost": PackageDef(
103        rev="1.88.0",
104        url=(
105            lambda pkg, rev: f"https://github.com/boostorg/{pkg}/releases/download/{pkg}-{rev}/{pkg}-{rev}-cmake.tar.gz"
106        ),
107        build_type="custom",
108        build_steps=[
109            (
110                "./bootstrap.sh"
111                f" --prefix={prefix} --with-libraries=atomic,context,coroutine,filesystem,process,url"
112            ),
113            "./b2",
114            f"./b2 install --prefix={prefix} valgrind=on",
115        ],
116    ),
117    "USCiLab/cereal": PackageDef(
118        rev="v1.3.2",
119        build_type="custom",
120        build_steps=[f"cp -a include/cereal/ {prefix}/include/"],
121    ),
122    "danmar/cppcheck": PackageDef(
123        rev="2.12.1",
124        build_type="cmake",
125    ),
126    "CLIUtils/CLI11": PackageDef(
127        rev="v2.3.2",
128        build_type="cmake",
129        config_flags=[
130            "-DBUILD_TESTING=OFF",
131            "-DCLI11_BUILD_DOCS=OFF",
132            "-DCLI11_BUILD_EXAMPLES=OFF",
133        ],
134    ),
135    "fmtlib/fmt": PackageDef(
136        rev="11.2.0",
137        build_type="cmake",
138        config_flags=[
139            "-DFMT_DOC=OFF",
140            "-DFMT_TEST=OFF",
141        ],
142    ),
143    "Naios/function2": PackageDef(
144        rev="4.2.4",
145        build_type="custom",
146        build_steps=[
147            f"mkdir {prefix}/include/function2",
148            f"cp include/function2/function2.hpp {prefix}/include/function2/",
149        ],
150    ),
151    "google/googletest": PackageDef(
152        rev="v1.16.0",
153        build_type="cmake",
154        config_env=["CXXFLAGS=-std=c++20"],
155        config_flags=["-DTHREADS_PREFER_PTHREAD_FLAG=ON"],
156    ),
157    "nghttp2/nghttp2": PackageDef(
158        rev="v1.65.0",
159        build_type="cmake",
160        config_env=["CXXFLAGS=-std=c++20"],
161        config_flags=[
162            "-DENABLE_LIB_ONLY=ON",
163            "-DENABLE_STATIC_LIB=ON",
164        ],
165    ),
166    "nlohmann/json": PackageDef(
167        rev="v3.12.0",
168        build_type="cmake",
169        config_flags=["-DJSON_BuildTests=OFF"],
170        custom_post_install=[
171            (
172                f"ln -s {prefix}/include/nlohmann/json.hpp"
173                f" {prefix}/include/json.hpp"
174            ),
175        ],
176    ),
177    "json-c/json-c": PackageDef(
178        rev="json-c-0.18-20240915",
179        build_type="cmake",
180    ),
181    "LibVNC/libvncserver": PackageDef(
182        rev="LibVNCServer-0.9.14",
183        build_type="cmake",
184    ),
185    "leethomason/tinyxml2": PackageDef(
186        rev="11.0.0",
187        build_type="cmake",
188    ),
189    "tristanpenman/valijson": PackageDef(
190        rev="v1.0.5",
191        build_type="cmake",
192        config_flags=[
193            "-Dvalijson_BUILD_TESTS=0",
194            "-Dvalijson_INSTALL_HEADERS=1",
195        ],
196    ),
197    "libgpiod": PackageDef(
198        rev="1.6.5",
199        url=(
200            lambda pkg, rev: f"https://git.kernel.org/pub/scm/libs/{pkg}/{pkg}.git/snapshot/{pkg}-{rev}.tar.gz"
201        ),
202        build_type="autogen",
203        config_flags=["--enable-bindings-cxx"],
204    ),
205    "open-power/pdbg": PackageDef(build_type="autoconf"),
206    "openbmc/gpioplus": PackageDef(
207        build_type="meson",
208        config_flags=[
209            "-Dexamples=false",
210            "-Dtests=disabled",
211        ],
212    ),
213    "openbmc/phosphor-dbus-interfaces": PackageDef(
214        depends=["openbmc/sdbusplus"],
215        build_type="meson",
216        config_flags=["-Dgenerate_md=false"],
217    ),
218    "openbmc/phosphor-logging": PackageDef(
219        depends=[
220            "USCiLab/cereal",
221            "openbmc/phosphor-dbus-interfaces",
222            "openbmc/sdbusplus",
223            "openbmc/sdeventplus",
224        ],
225        build_type="meson",
226        config_flags=[
227            "-Dlibonly=true",
228            "-Dtests=disabled",
229        ],
230    ),
231    "openbmc/phosphor-objmgr": PackageDef(
232        depends=[
233            "CLIUtils/CLI11",
234            "boost",
235            "leethomason/tinyxml2",
236            "openbmc/phosphor-dbus-interfaces",
237            "openbmc/phosphor-logging",
238            "openbmc/sdbusplus",
239        ],
240        build_type="meson",
241        config_flags=[
242            "-Dtests=disabled",
243        ],
244    ),
245    "openbmc/libpeci": PackageDef(
246        build_type="meson",
247        config_flags=[
248            "-Draw-peci=disabled",
249        ],
250    ),
251    "openbmc/libpldm": PackageDef(
252        build_type="meson",
253        config_flags=[
254            "-Dabi=deprecated,stable",
255            "-Dtests=false",
256            "-Dabi-compliance-check=false",
257        ],
258    ),
259    "openbmc/sdbusplus": PackageDef(
260        depends=[
261            "nlohmann/json",
262        ],
263        build_type="meson",
264        custom_post_dl=[
265            "cd tools",
266            f"./setup.py install --root=/ --prefix={prefix}",
267            "cd ..",
268        ],
269        config_flags=[
270            "-Dexamples=disabled",
271            "-Dtests=disabled",
272        ],
273    ),
274    "openbmc/sdeventplus": PackageDef(
275        depends=[
276            "openbmc/stdplus",
277        ],
278        build_type="meson",
279        config_flags=[
280            "-Dexamples=false",
281            "-Dtests=disabled",
282        ],
283    ),
284    "openbmc/stdplus": PackageDef(
285        depends=[
286            "fmtlib/fmt",
287            "google/googletest",
288            "Naios/function2",
289        ],
290        build_type="meson",
291        config_flags=[
292            "-Dexamples=false",
293            "-Dtests=disabled",
294            "-Dgtest=enabled",
295        ],
296    ),
297}  # type: Dict[str, PackageDef]
298
299# Define common flags used for builds
300configure_flags = " ".join(
301    [
302        f"--prefix={prefix}",
303    ]
304)
305cmake_flags = " ".join(
306    [
307        "-DBUILD_SHARED_LIBS=ON",
308        "-DCMAKE_BUILD_TYPE=RelWithDebInfo",
309        f"-DCMAKE_INSTALL_PREFIX:PATH={prefix}",
310        "-GNinja",
311        "-DCMAKE_MAKE_PROGRAM=ninja",
312    ]
313)
314meson_flags = " ".join(
315    [
316        "--wrap-mode=nodownload",
317        f"-Dprefix={prefix}",
318    ]
319)
320
321
322class Package(threading.Thread):
323    """Class used to build the Docker stages for each package.
324
325    Generally, this class should not be instantiated directly but through
326    Package.generate_all().
327    """
328
329    # Copy the packages dictionary.
330    packages = packages.copy()
331
332    # Lock used for thread-safety.
333    lock = threading.Lock()
334
335    def __init__(self, pkg: str):
336        """pkg - The name of this package (ex. foo/bar )"""
337        super(Package, self).__init__()
338
339        self.package = pkg
340        self.exception = None  # type: Optional[Exception]
341
342        # Reference to this package's
343        self.pkg_def = Package.packages[pkg]
344        self.pkg_def["__package"] = self
345
346    def run(self) -> None:
347        """Thread 'run' function.  Builds the Docker stage."""
348
349        # In case this package has no rev, fetch it from Github.
350        self._update_rev()
351
352        # Find all the Package objects that this package depends on.
353        #   This section is locked because we are looking into another
354        #   package's PackageDef dict, which could be being modified.
355        Package.lock.acquire()
356        deps: Iterable[Package] = [
357            Package.packages[deppkg]["__package"]
358            for deppkg in self.pkg_def.get("depends", [])
359        ]
360        Package.lock.release()
361
362        # Wait until all the depends finish building.  We need them complete
363        # for the "COPY" commands.
364        for deppkg in deps:
365            deppkg.join()
366
367        # Generate this package's Dockerfile.
368        dockerfile = f"""
369FROM {docker_base_img_name}
370{self._df_copycmds()}
371{self._df_build()}
372"""
373
374        # Generate the resulting tag name and save it to the PackageDef.
375        #   This section is locked because we are modifying the PackageDef,
376        #   which can be accessed by other threads.
377        Package.lock.acquire()
378        tag = Docker.tagname(self._stagename(), dockerfile)
379        self.pkg_def["__tag"] = tag
380        Package.lock.release()
381
382        # Do the build / save any exceptions.
383        try:
384            Docker.build(self.package, tag, dockerfile)
385        except Exception as e:
386            self.exception = e
387
388    @classmethod
389    def generate_all(cls) -> None:
390        """Ensure a Docker stage is created for all defined packages.
391
392        These are done in parallel but with appropriate blocking per
393        package 'depends' specifications.
394        """
395
396        # Create a Package for each defined package.
397        pkg_threads = [Package(p) for p in cls.packages.keys()]
398
399        # Start building them all.
400        #   This section is locked because threads depend on each other,
401        #   based on the packages, and they cannot 'join' on a thread
402        #   which is not yet started.  Adding a lock here allows all the
403        #   threads to start before they 'join' their dependencies.
404        Package.lock.acquire()
405        for t in pkg_threads:
406            t.start()
407        Package.lock.release()
408
409        # Wait for completion.
410        for t in pkg_threads:
411            t.join()
412            # Check if the thread saved off its own exception.
413            if t.exception:
414                print(f"Package {t.package} failed!", file=sys.stderr)
415                raise t.exception
416
417    @staticmethod
418    def df_all_copycmds() -> str:
419        """Formulate the Dockerfile snippet necessary to copy all packages
420        into the final image.
421        """
422        return Package.df_copycmds_set(Package.packages.keys())
423
424    @classmethod
425    def depcache(cls) -> str:
426        """Create the contents of the '/tmp/depcache'.
427        This file is a comma-separated list of "<pkg>:<rev>".
428        """
429
430        # This needs to be sorted for consistency.
431        depcache = ""
432        for pkg in sorted(cls.packages.keys()):
433            depcache += "%s:%s," % (pkg, cls.packages[pkg]["rev"])
434        return depcache
435
436    def _check_gerrit_topic(self) -> bool:
437        if not gerrit_topic:
438            return False
439        if not self.package.startswith("openbmc/"):
440            return False
441        if gerrit_project == self.package and gerrit_rev:
442            return False
443
444        try:
445            commits = json.loads(
446                urllib.request.urlopen(
447                    f"https://gerrit.openbmc.org/changes/?q=status:open+project:{self.package}+topic:{gerrit_topic}"
448                )
449                .read()
450                .splitlines()[-1]
451            )
452
453            if len(commits) == 0:
454                return False
455            if len(commits) > 1:
456                print(
457                    f"{self.package} has more than 1 commit under {gerrit_topic}; using lastest upstream: {len(commits)}",
458                    file=sys.stderr,
459                )
460                return False
461
462            change_id = commits[0]["id"]
463
464            commit = json.loads(
465                urllib.request.urlopen(
466                    f"https://gerrit.openbmc.org/changes/{change_id}/revisions/current/commit"
467                )
468                .read()
469                .splitlines()[-1]
470            )["commit"]
471
472            print(
473                f"Using {commit} from {gerrit_topic} for {self.package}",
474                file=sys.stderr,
475            )
476            self.pkg_def["rev"] = commit
477            return True
478
479        except urllib.error.HTTPError as e:
480            print(
481                f"Error loading topic {gerrit_topic} for {self.package}: ",
482                e,
483                file=sys.stderr,
484            )
485            return False
486
487    def _update_rev(self) -> None:
488        """Look up the HEAD for missing a static rev."""
489
490        if "rev" in self.pkg_def:
491            return
492
493        if self._check_gerrit_topic():
494            return
495
496        # Check if Jenkins/Gerrit gave us a revision and use it.
497        if gerrit_project == self.package and gerrit_rev:
498            print(
499                f"Found Gerrit revision for {self.package}: {gerrit_rev}",
500                file=sys.stderr,
501            )
502            self.pkg_def["rev"] = gerrit_rev
503            return
504
505        # Ask Github for all the branches.
506        lookup = git(
507            "ls-remote", "--heads", f"https://github.com/{self.package}"
508        )
509
510        # Find the branch matching {branch} (or fallback to master).
511        #   This section is locked because we are modifying the PackageDef.
512        Package.lock.acquire()
513        for line in lookup.split("\n"):
514            if re.fullmatch(f".*{branch}$", line.strip()):
515                self.pkg_def["rev"] = line.split()[0]
516                break
517            elif (
518                "refs/heads/master" in line or "refs/heads/main" in line
519            ) and "rev" not in self.pkg_def:
520                self.pkg_def["rev"] = line.split()[0]
521        Package.lock.release()
522
523    def _stagename(self) -> str:
524        """Create a name for the Docker stage associated with this pkg."""
525        return self.package.replace("/", "-").lower()
526
527    def _url(self) -> str:
528        """Get the URL for this package."""
529        rev = self.pkg_def["rev"]
530
531        # If the lambda exists, call it.
532        if "url" in self.pkg_def:
533            return self.pkg_def["url"](self.package, rev)
534
535        # Default to the github archive URL.
536        return f"https://github.com/{self.package}/archive/{rev}.tar.gz"
537
538    def _cmd_download(self) -> str:
539        """Formulate the command necessary to download and unpack to source."""
540
541        url = self._url()
542        if ".tar." not in url:
543            raise NotImplementedError(
544                f"Unhandled download type for {self.package}: {url}"
545            )
546
547        cmd = f"curl -L {url} | tar -x"
548
549        if url.endswith(".bz2"):
550            cmd += "j"
551        elif url.endswith(".gz"):
552            cmd += "z"
553        else:
554            raise NotImplementedError(
555                f"Unknown tar flags needed for {self.package}: {url}"
556            )
557
558        return cmd
559
560    def _cmd_cd_srcdir(self) -> str:
561        """Formulate the command necessary to 'cd' into the source dir."""
562        return f"cd {self.package.split('/')[-1]}*"
563
564    def _df_copycmds(self) -> str:
565        """Formulate the dockerfile snippet necessary to COPY all depends."""
566
567        if "depends" not in self.pkg_def:
568            return ""
569        return Package.df_copycmds_set(self.pkg_def["depends"])
570
571    @staticmethod
572    def df_copycmds_set(pkgs: Iterable[str]) -> str:
573        """Formulate the Dockerfile snippet necessary to COPY a set of
574        packages into a Docker stage.
575        """
576
577        copy_cmds = ""
578
579        # Sort the packages for consistency.
580        for p in sorted(pkgs):
581            tag = Package.packages[p]["__tag"]
582            copy_cmds += f"COPY --from={tag} {prefix} {prefix}\n"
583            # Workaround for upstream docker bug and multiple COPY cmds
584            # https://github.com/moby/moby/issues/37965
585            copy_cmds += "RUN true\n"
586
587        return copy_cmds
588
589    def _df_build(self) -> str:
590        """Formulate the Dockerfile snippet necessary to download, build, and
591        install a package into a Docker stage.
592        """
593
594        # Download and extract source.
595        result = f"RUN {self._cmd_download()} && {self._cmd_cd_srcdir()} && "
596
597        # Handle 'custom_post_dl' commands.
598        custom_post_dl = self.pkg_def.get("custom_post_dl")
599        if custom_post_dl:
600            result += " && ".join(custom_post_dl) + " && "
601
602        # Build and install package based on 'build_type'.
603        build_type = self.pkg_def["build_type"]
604        if build_type == "autoconf":
605            result += self._cmd_build_autoconf()
606        elif build_type == "autogen":
607            result += self._cmd_build_autogen()
608        elif build_type == "cmake":
609            result += self._cmd_build_cmake()
610        elif build_type == "custom":
611            result += self._cmd_build_custom()
612        elif build_type == "make":
613            result += self._cmd_build_make()
614        elif build_type == "meson":
615            result += self._cmd_build_meson()
616        else:
617            raise NotImplementedError(
618                f"Unhandled build type for {self.package}: {build_type}"
619            )
620
621        # Handle 'custom_post_install' commands.
622        custom_post_install = self.pkg_def.get("custom_post_install")
623        if custom_post_install:
624            result += " && " + " && ".join(custom_post_install)
625
626        return result
627
628    def _cmd_build_autoconf(self) -> str:
629        options = " ".join(self.pkg_def.get("config_flags", []))
630        env = " ".join(self.pkg_def.get("config_env", []))
631        result = "./bootstrap.sh && "
632        result += f"{env} ./configure {configure_flags} {options} && "
633        result += f"make -j{proc_count} && make install"
634        return result
635
636    def _cmd_build_autogen(self) -> str:
637        options = " ".join(self.pkg_def.get("config_flags", []))
638        env = " ".join(self.pkg_def.get("config_env", []))
639        result = f"{env} ./autogen.sh {configure_flags} {options} && "
640        result += "make && make install"
641        return result
642
643    def _cmd_build_cmake(self) -> str:
644        options = " ".join(self.pkg_def.get("config_flags", []))
645        env = " ".join(self.pkg_def.get("config_env", []))
646        result = "mkdir builddir && cd builddir && "
647        result += f"{env} cmake {cmake_flags} {options} .. && "
648        result += "cmake --build . --target all && "
649        result += "cmake --build . --target install && "
650        result += "cd .."
651        return result
652
653    def _cmd_build_custom(self) -> str:
654        return " && ".join(self.pkg_def.get("build_steps", []))
655
656    def _cmd_build_make(self) -> str:
657        return f"make -j{proc_count} && make install"
658
659    def _cmd_build_meson(self) -> str:
660        options = " ".join(self.pkg_def.get("config_flags", []))
661        env = " ".join(self.pkg_def.get("config_env", []))
662        result = f"{env} meson setup builddir {meson_flags} {options} && "
663        result += "ninja -C builddir && ninja -C builddir install"
664        return result
665
666
667class Docker:
668    """Class to assist with Docker interactions.  All methods are static."""
669
670    @staticmethod
671    def timestamp() -> str:
672        """Generate a timestamp for today using the ISO week."""
673        today = date.today().isocalendar()
674        return f"{today[0]}-W{today[1]:02}"
675
676    @staticmethod
677    def tagname(pkgname: Optional[str], dockerfile: str) -> str:
678        """Generate a tag name for a package using a hash of the Dockerfile."""
679        result = docker_image_name
680        if pkgname:
681            result += "-" + pkgname
682
683        result += ":" + Docker.timestamp()
684        result += "-" + sha256(dockerfile.encode()).hexdigest()[0:16]
685
686        return result
687
688    @staticmethod
689    def build(pkg: str, tag: str, dockerfile: str) -> None:
690        """Build a docker image using the Dockerfile and tagging it with 'tag'."""
691
692        # If we're not forcing builds, check if it already exists and skip.
693        if not force_build:
694            if container.image.ls(
695                tag, "--format", '"{{.Repository}}:{{.Tag}}"'
696            ):
697                print(
698                    f"Image {tag} already exists.  Skipping.", file=sys.stderr
699                )
700                return
701
702        # Build it.
703        #   Capture the output of the 'docker build' command and send it to
704        #   stderr (prefixed with the package name).  This allows us to see
705        #   progress but not pollute stdout.  Later on we output the final
706        #   docker tag to stdout and we want to keep that pristine.
707        #
708        #   Other unusual flags:
709        #       --no-cache: Bypass the Docker cache if 'force_build'.
710        #       --force-rm: Clean up Docker processes if they fail.
711        container.build(
712            proxy_args,
713            "--network=host",
714            "--force-rm",
715            "--no-cache=true" if force_build else "--no-cache=false",
716            "-t",
717            tag,
718            "-",
719            _in=dockerfile,
720            _out=(
721                lambda line: print(
722                    pkg + ":", line, end="", file=sys.stderr, flush=True
723                )
724            ),
725            _err_to_out=True,
726        )
727
728
729# Read a bunch of environment variables.
730docker_image_name = os.environ.get(
731    "DOCKER_IMAGE_NAME", "openbmc/ubuntu-unit-test"
732)
733force_build = os.environ.get("FORCE_DOCKER_BUILD")
734is_automated_ci_build = os.environ.get("BUILD_URL", False)
735distro = os.environ.get("DISTRO", "ubuntu:plucky")
736branch = os.environ.get("BRANCH", "master")
737ubuntu_mirror = os.environ.get("UBUNTU_MIRROR")
738docker_reg = os.environ.get("DOCKER_REG", "public.ecr.aws/ubuntu")
739http_proxy = os.environ.get("http_proxy")
740
741gerrit_project = os.environ.get("GERRIT_PROJECT")
742gerrit_rev = os.environ.get("GERRIT_PATCHSET_REVISION")
743gerrit_topic = os.environ.get("GERRIT_TOPIC")
744
745# Ensure appropriate docker build output to see progress and identify
746# any issues
747os.environ["BUILDKIT_PROGRESS"] = "plain"
748
749# Set up some common variables.
750username = os.environ.get("USER", "root")
751homedir = os.environ.get("HOME", "/root")
752gid = os.getgid()
753uid = os.getuid()
754
755# Use well-known constants if user is root
756if username == "root":
757    homedir = "/root"
758    gid = 0
759    uid = 0
760
761# Special flags if setting up a deb mirror.
762mirror = ""
763if "ubuntu" in distro and ubuntu_mirror:
764    mirror = f"""
765RUN echo "deb {ubuntu_mirror} \
766        $(. /etc/os-release && echo $VERSION_CODENAME) \
767        main restricted universe multiverse" > /etc/apt/sources.list && \\
768    echo "deb {ubuntu_mirror} \
769        $(. /etc/os-release && echo $VERSION_CODENAME)-updates \
770            main restricted universe multiverse" >> /etc/apt/sources.list && \\
771    echo "deb {ubuntu_mirror} \
772        $(. /etc/os-release && echo $VERSION_CODENAME)-security \
773            main restricted universe multiverse" >> /etc/apt/sources.list && \\
774    echo "deb {ubuntu_mirror} \
775        $(. /etc/os-release && echo $VERSION_CODENAME)-proposed \
776            main restricted universe multiverse" >> /etc/apt/sources.list && \\
777    echo "deb {ubuntu_mirror} \
778        $(. /etc/os-release && echo $VERSION_CODENAME)-backports \
779            main restricted universe multiverse" >> /etc/apt/sources.list
780"""
781
782# Special flags for proxying.
783proxy_cmd = ""
784proxy_keyserver = ""
785proxy_args = []
786if http_proxy:
787    proxy_cmd = f"""
788RUN echo "[http]" >> {homedir}/.gitconfig && \
789    echo "proxy = {http_proxy}" >> {homedir}/.gitconfig
790COPY <<EOF_WGETRC {homedir}/.wgetrc
791https_proxy = {http_proxy}
792http_proxy = {http_proxy}
793use_proxy = on
794EOF_WGETRC
795"""
796    proxy_keyserver = f"--keyserver-options http-proxy={http_proxy}"
797
798    proxy_args.extend(
799        [
800            "--build-arg",
801            f"http_proxy={http_proxy}",
802            "--build-arg",
803            f"https_proxy={http_proxy}",
804        ]
805    )
806
807# Create base Dockerfile.
808dockerfile_base = f"""
809FROM {docker_reg}/{distro}
810
811{mirror}
812
813ENV DEBIAN_FRONTEND noninteractive
814
815ENV PYTHONPATH "/usr/local/lib/python3.10/site-packages/"
816
817# Sometimes the ubuntu key expires and we need a way to force an execution
818# of the apt-get commands for the dbgsym-keyring.  When this happens we see
819# an error like: "Release: The following signatures were invalid:"
820# Insert a bogus echo that we can change here when we get this error to force
821# the update.
822RUN echo "ubuntu keyserver rev as of 2025-06-25"
823
824# We need the keys to be imported for dbgsym repos
825# New releases have a package, older ones fall back to manual fetching
826# https://wiki.ubuntu.com/Debug%20Symbol%20Packages
827# Known issue with gpg to get keys via proxy -
828# https://bugs.launchpad.net/ubuntu/+source/gnupg2/+bug/1788190, hence using
829# curl to get keys.
830RUN apt-get update && apt-get dist-upgrade -yy && \
831    ( apt-get install -yy gpgv ubuntu-dbgsym-keyring || \
832        ( apt-get install -yy dirmngr curl && \
833          curl -sSL \
834          'https://keyserver.ubuntu.com/pks/lookup?op=get&search=0xF2EDC64DC5AEE1F6B9C621F0C8CAB6595FDFF622' \
835          | apt-key add - ))
836
837# Parse the current repo list into a debug repo list
838RUN sed -n '/^deb /s,^deb [^ ]* ,deb http://ddebs.ubuntu.com ,p' \
839        /etc/apt/sources.list >/etc/apt/sources.list.d/debug.list
840
841# Remove non-existent debug repos
842RUN sed -i '/-\\(backports\\|security\\) /d' /etc/apt/sources.list.d/debug.list
843
844RUN cat /etc/apt/sources.list.d/debug.list
845
846RUN apt-get update && apt-get dist-upgrade -yy && apt-get install -yy \
847    abi-compliance-checker \
848    abi-dumper \
849    autoconf \
850    autoconf-archive \
851    bison \
852    cmake \
853    curl \
854    dbus \
855    device-tree-compiler \
856    doxygen \
857    flex \
858    g++-15 \
859    gcc-15 \
860    git \
861    glib-2.0 \
862    gnupg \
863    iproute2 \
864    iputils-ping \
865    libaudit-dev \
866    libc6-dbg \
867    libc6-dev \
868    libcjson-dev \
869    libconfig++-dev \
870    libcryptsetup-dev \
871    libcurl4-openssl-dev \
872    libdbus-1-dev \
873    libevdev-dev \
874    libi2c-dev \
875    libjpeg-dev \
876    libjson-perl \
877    libldap2-dev \
878    libmimetic-dev \
879    libmpfr-dev \
880    libnl-3-dev \
881    libnl-genl-3-dev \
882    libpam0g-dev \
883    libpciaccess-dev \
884    libperlio-gzip-perl \
885    libpng-dev \
886    libprotobuf-dev \
887    libsnmp-dev \
888    libssl-dev \
889    libsystemd-dev \
890    libtool \
891    liburing-dev \
892    libxml2-utils \
893    libxml-simple-perl \
894    lsb-release \
895    ninja-build \
896    npm \
897    pkg-config \
898    protobuf-compiler \
899    python3 \
900    python3-dev\
901    python3-git \
902    python3-mako \
903    python3-pip \
904    python3-protobuf \
905    python3-setuptools \
906    python3-socks \
907    python3-yaml \
908    rsync \
909    shellcheck \
910    socat \
911    software-properties-common \
912    sudo \
913    systemd \
914    systemd-dev \
915    valgrind \
916    vim \
917    wget \
918    xxd
919
920RUN update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-15 15 \
921  --slave /usr/bin/g++ g++ /usr/bin/g++-15 \
922  --slave /usr/bin/gcov gcov /usr/bin/gcov-15 \
923  --slave /usr/bin/gcov-dump gcov-dump /usr/bin/gcov-dump-15 \
924  --slave /usr/bin/gcov-tool gcov-tool /usr/bin/gcov-tool-15
925RUN update-alternatives --remove cpp /usr/bin/cpp && \
926    update-alternatives --install /usr/bin/cpp cpp /usr/bin/cpp-15 15
927
928# Set up LLVM apt repository.
929RUN bash -c "$(wget -O - https://apt.llvm.org/llvm.sh)" -- 20
930
931# Install extra clang tools
932RUN apt-get install -y \
933        clang-20 \
934        clang-format-20 \
935        clang-tidy-20 \
936        lld-20
937
938RUN update-alternatives --install /usr/bin/clang clang /usr/bin/clang-20 1000 \
939  --slave /usr/bin/clang++ clang++ /usr/bin/clang++-20 \
940  --slave /usr/bin/clang-tidy clang-tidy /usr/bin/clang-tidy-20 \
941  --slave /usr/bin/clang-format clang-format /usr/bin/clang-format-20 \
942  --slave /usr/bin/run-clang-tidy run-clang-tidy.py \
943        /usr/bin/run-clang-tidy-20 \
944  --slave /usr/bin/scan-build scan-build /usr/bin/scan-build-20 \
945  --slave /usr/bin/lld lld /usr/bin/lld-20
946
947"""
948
949if is_automated_ci_build:
950    dockerfile_base += f"""
951# Run an arbitrary command to pollute the docker cache regularly force us
952# to re-run `apt-get update` daily.
953RUN echo {Docker.timestamp()}
954RUN apt-get update && apt-get dist-upgrade -yy
955
956"""
957
958dockerfile_base += """
959RUN pip3 install --break-system-packages \
960        beautysh \
961        black \
962        codespell \
963        flake8 \
964        gcovr \
965        gitlint \
966        inflection \
967        isoduration \
968        isort \
969        jsonschema \
970        meson==1.8.2 \
971        referencing \
972        requests
973
974RUN npm install -g \
975        eslint@v8.56.0 eslint-plugin-json@v3.1.0 \
976        markdownlint-cli@latest \
977        prettier@latest
978"""
979
980# Build the base and stage docker images.
981docker_base_img_name = Docker.tagname("base", dockerfile_base)
982Docker.build("base", docker_base_img_name, dockerfile_base)
983Package.generate_all()
984
985# Create the final Dockerfile.
986dockerfile = f"""
987# Build the final output image
988FROM {docker_base_img_name}
989{Package.df_all_copycmds()}
990
991# Some of our infrastructure still relies on the presence of this file
992# even though it is no longer needed to rebuild the docker environment
993# NOTE: The file is sorted to ensure the ordering is stable.
994RUN echo '{Package.depcache()}' > /tmp/depcache
995
996# Ensure the group, user, and home directory are created (or rename them if
997# they already exist).
998RUN if grep -q ":{gid}:" /etc/group ; then \
999        groupmod -n {username} $(awk -F : '{{ if ($3 == {gid}) {{ print $1 }} }}' /etc/group) ; \
1000    else \
1001        groupadd -f -g {gid} {username} ; \
1002    fi
1003RUN mkdir -p "{os.path.dirname(homedir)}"
1004RUN if grep -q ":{uid}:" /etc/passwd ; then \
1005        usermod -l {username} -d {homedir} -m $(awk -F : '{{ if ($3 == {uid}) {{ print $1 }} }}' /etc/passwd) ; \
1006    else \
1007        useradd -d {homedir} -m -u {uid} -g {gid} {username} ; \
1008    fi
1009RUN sed -i '1iDefaults umask=000' /etc/sudoers
1010RUN echo "{username} ALL=(ALL) NOPASSWD: ALL" >>/etc/sudoers
1011
1012# Ensure user has ability to write to /usr/local for different tool
1013# and data installs
1014RUN chown -R {username}:{username} /usr/local/share
1015
1016# Update library cache
1017RUN ldconfig
1018
1019{proxy_cmd}
1020
1021RUN /bin/bash
1022"""
1023
1024# Do the final docker build
1025docker_final_img_name = Docker.tagname(None, dockerfile)
1026Docker.build("final", docker_final_img_name, dockerfile)
1027
1028# Print the tag of the final image.
1029print(docker_final_img_name)
1030