xref: /openbmc/openbmc-build-scripts/scripts/build-unit-test-docker (revision fb6653ce37956bb3b67bb61ffec3a593a2c51070)
1#!/usr/bin/env python3
2#
3# Build the required docker image to run package unit tests
4#
5# Script Variables:
6#   DOCKER_IMG_NAME:  <optional, the name of the docker image to generate>
7#                     default is openbmc/ubuntu-unit-test
8#   DISTRO:           <optional, the distro to build a docker image against>
9#   FORCE_DOCKER_BUILD: <optional, a non-zero value with force all Docker
10#                     images to be rebuilt rather than reusing caches.>
11#   BUILD_URL:        <optional, used to detect running under CI context
12#                     (ex. Jenkins)>
13#   BRANCH:           <optional, branch to build from each of the openbmc/
14#                     repositories>
15#                     default is master, which will be used if input branch not
16#                     provided or not found
17#   UBUNTU_MIRROR:    <optional, the URL of a mirror of Ubuntu to override the
18#                     default ones in /etc/apt/sources.list>
19#                     default is empty, and no mirror is used.
20#   DOCKER_REG:       <optional, the URL of a docker registry to utilize
21#                     instead of our default (public.ecr.aws/ubuntu)
22#                     (ex. docker.io)
23#   http_proxy        The HTTP address of the proxy server to connect to.
24#                     Default: "", proxy is not setup if this is not set
25
26import json
27import os
28import re
29import sys
30import threading
31import urllib.request
32from datetime import date
33from hashlib import sha256
34
35# typing.Dict is used for type-hints.
36from typing import Any, Callable, Dict, Iterable, Optional  # noqa: F401
37
38from sh import git, nproc  # type: ignore
39
40try:
41    # System may have docker or it may have podman, try docker first
42    from sh import docker
43
44    container = docker
45except ImportError:
46    try:
47        from sh import podman
48
49        container = podman
50    except Exception:
51        print("No docker or podman found on system")
52        exit(1)
53
54try:
55    # Python before 3.8 doesn't have TypedDict, so reroute to standard 'dict'.
56    from typing import TypedDict
57except Exception:
58
59    class TypedDict(dict):  # type: ignore
60        # We need to do this to eat the 'total' argument.
61        def __init_subclass__(cls, **kwargs: Any) -> None:
62            super().__init_subclass__()
63
64
65# Declare some variables used in package definitions.
66prefix = "/usr/local"
67proc_count = nproc().strip()
68
69
70class PackageDef(TypedDict, total=False):
71    """Package Definition for packages dictionary."""
72
73    # rev [optional]: Revision of package to use.
74    rev: str
75    # url [optional]: lambda function to create URL: (package, rev) -> url.
76    url: Callable[[str, str], str]
77    # depends [optional]: List of package dependencies.
78    depends: Iterable[str]
79    # build_type [required]: Build type used for package.
80    #   Currently supported: autoconf, cmake, custom, make, meson
81    build_type: str
82    # build_steps [optional]: Steps to run for 'custom' build_type.
83    build_steps: Iterable[str]
84    # config_flags [optional]: List of options to pass configuration tool.
85    config_flags: Iterable[str]
86    # config_env [optional]: List of environment variables to set for config.
87    config_env: Iterable[str]
88    # custom_post_dl [optional]: List of steps to run after download, but
89    #   before config / build / install.
90    custom_post_dl: Iterable[str]
91    # custom_post_install [optional]: List of steps to run after install.
92    custom_post_install: Iterable[str]
93
94    # __tag [private]: Generated Docker tag name for package stage.
95    __tag: str
96    # __package [private]: Package object associated with this package.
97    __package: Any  # Type is Package, but not defined yet.
98
99
100# Packages to include in image.
101packages = {
102    "boost": PackageDef(
103        rev="1.86.0",
104        url=(
105            lambda pkg, rev: f"https://github.com/boostorg/{pkg}/releases/download/{pkg}-{rev}/{pkg}-{rev}-cmake.tar.gz"
106        ),
107        build_type="custom",
108        build_steps=[
109            (
110                "./bootstrap.sh"
111                f" --prefix={prefix} --with-libraries=atomic,context,coroutine,filesystem,process,url"
112            ),
113            "./b2",
114            f"./b2 install --prefix={prefix} valgrind=on",
115        ],
116    ),
117    "USCiLab/cereal": PackageDef(
118        rev="v1.3.2",
119        build_type="custom",
120        build_steps=[f"cp -a include/cereal/ {prefix}/include/"],
121    ),
122    "danmar/cppcheck": PackageDef(
123        rev="2.12.1",
124        build_type="cmake",
125    ),
126    "CLIUtils/CLI11": PackageDef(
127        rev="v2.3.2",
128        build_type="cmake",
129        config_flags=[
130            "-DBUILD_TESTING=OFF",
131            "-DCLI11_BUILD_DOCS=OFF",
132            "-DCLI11_BUILD_EXAMPLES=OFF",
133        ],
134    ),
135    "fmtlib/fmt": PackageDef(
136        rev="10.1.1",
137        build_type="cmake",
138        config_flags=[
139            "-DFMT_DOC=OFF",
140            "-DFMT_TEST=OFF",
141        ],
142    ),
143    "Naios/function2": PackageDef(
144        rev="4.2.4",
145        build_type="custom",
146        build_steps=[
147            f"mkdir {prefix}/include/function2",
148            f"cp include/function2/function2.hpp {prefix}/include/function2/",
149        ],
150    ),
151    "google/googletest": PackageDef(
152        rev="v1.15.2",
153        build_type="cmake",
154        config_env=["CXXFLAGS=-std=c++20"],
155        config_flags=["-DTHREADS_PREFER_PTHREAD_FLAG=ON"],
156    ),
157    "nghttp2/nghttp2": PackageDef(
158        rev="v1.61.0",
159        build_type="cmake",
160        config_env=["CXXFLAGS=-std=c++20"],
161        config_flags=[
162            "-DENABLE_LIB_ONLY=ON",
163            "-DENABLE_STATIC_LIB=ON",
164        ],
165    ),
166    "nlohmann/json": PackageDef(
167        rev="v3.11.2",
168        build_type="cmake",
169        config_flags=["-DJSON_BuildTests=OFF"],
170        custom_post_install=[
171            (
172                f"ln -s {prefix}/include/nlohmann/json.hpp"
173                f" {prefix}/include/json.hpp"
174            ),
175        ],
176    ),
177    "json-c/json-c": PackageDef(
178        rev="json-c-0.17-20230812",
179        build_type="cmake",
180    ),
181    "LibVNC/libvncserver": PackageDef(
182        rev="LibVNCServer-0.9.14",
183        build_type="cmake",
184    ),
185    "leethomason/tinyxml2": PackageDef(
186        rev="9.0.0",
187        build_type="cmake",
188    ),
189    "tristanpenman/valijson": PackageDef(
190        rev="v1.0.1",
191        build_type="cmake",
192        config_flags=[
193            "-Dvalijson_BUILD_TESTS=0",
194            "-Dvalijson_INSTALL_HEADERS=1",
195        ],
196    ),
197    "open-power/pdbg": PackageDef(build_type="autoconf"),
198    "openbmc/gpioplus": PackageDef(
199        build_type="meson",
200        config_flags=[
201            "-Dexamples=false",
202            "-Dtests=disabled",
203        ],
204    ),
205    "openbmc/phosphor-dbus-interfaces": PackageDef(
206        depends=["openbmc/sdbusplus"],
207        build_type="meson",
208        config_flags=["-Dgenerate_md=false"],
209    ),
210    "openbmc/phosphor-logging": PackageDef(
211        depends=[
212            "USCiLab/cereal",
213            "openbmc/phosphor-dbus-interfaces",
214            "openbmc/sdbusplus",
215            "openbmc/sdeventplus",
216        ],
217        build_type="meson",
218        config_flags=[
219            "-Dlibonly=true",
220            "-Dtests=disabled",
221            f"-Dyamldir={prefix}/share/phosphor-dbus-yaml/yaml",
222        ],
223    ),
224    "openbmc/phosphor-objmgr": PackageDef(
225        depends=[
226            "CLIUtils/CLI11",
227            "boost",
228            "leethomason/tinyxml2",
229            "openbmc/phosphor-dbus-interfaces",
230            "openbmc/phosphor-logging",
231            "openbmc/sdbusplus",
232        ],
233        build_type="meson",
234        config_flags=[
235            "-Dtests=disabled",
236        ],
237    ),
238    "openbmc/libpeci": PackageDef(
239        build_type="meson",
240        config_flags=[
241            "-Draw-peci=disabled",
242        ],
243    ),
244    "openbmc/libpldm": PackageDef(
245        build_type="meson",
246        config_flags=[
247            "-Dabi=deprecated,stable",
248            "-Dtests=false",
249            "-Dabi-compliance-check=false",
250        ],
251    ),
252    "openbmc/sdbusplus": PackageDef(
253        depends=[
254            "nlohmann/json",
255        ],
256        build_type="meson",
257        custom_post_dl=[
258            "cd tools",
259            f"./setup.py install --root=/ --prefix={prefix}",
260            "cd ..",
261        ],
262        config_flags=[
263            "-Dexamples=disabled",
264            "-Dtests=disabled",
265        ],
266    ),
267    "openbmc/sdeventplus": PackageDef(
268        depends=[
269            "openbmc/stdplus",
270        ],
271        build_type="meson",
272        config_flags=[
273            "-Dexamples=false",
274            "-Dtests=disabled",
275        ],
276    ),
277    "openbmc/stdplus": PackageDef(
278        depends=[
279            "fmtlib/fmt",
280            "google/googletest",
281            "Naios/function2",
282        ],
283        build_type="meson",
284        config_flags=[
285            "-Dexamples=false",
286            "-Dtests=disabled",
287            "-Dgtest=enabled",
288        ],
289    ),
290}  # type: Dict[str, PackageDef]
291
292# Define common flags used for builds
293configure_flags = " ".join(
294    [
295        f"--prefix={prefix}",
296    ]
297)
298cmake_flags = " ".join(
299    [
300        "-DBUILD_SHARED_LIBS=ON",
301        "-DCMAKE_BUILD_TYPE=RelWithDebInfo",
302        f"-DCMAKE_INSTALL_PREFIX:PATH={prefix}",
303        "-GNinja",
304        "-DCMAKE_MAKE_PROGRAM=ninja",
305    ]
306)
307meson_flags = " ".join(
308    [
309        "--wrap-mode=nodownload",
310        f"-Dprefix={prefix}",
311    ]
312)
313
314
315class Package(threading.Thread):
316    """Class used to build the Docker stages for each package.
317
318    Generally, this class should not be instantiated directly but through
319    Package.generate_all().
320    """
321
322    # Copy the packages dictionary.
323    packages = packages.copy()
324
325    # Lock used for thread-safety.
326    lock = threading.Lock()
327
328    def __init__(self, pkg: str):
329        """pkg - The name of this package (ex. foo/bar )"""
330        super(Package, self).__init__()
331
332        self.package = pkg
333        self.exception = None  # type: Optional[Exception]
334
335        # Reference to this package's
336        self.pkg_def = Package.packages[pkg]
337        self.pkg_def["__package"] = self
338
339    def run(self) -> None:
340        """Thread 'run' function.  Builds the Docker stage."""
341
342        # In case this package has no rev, fetch it from Github.
343        self._update_rev()
344
345        # Find all the Package objects that this package depends on.
346        #   This section is locked because we are looking into another
347        #   package's PackageDef dict, which could be being modified.
348        Package.lock.acquire()
349        deps: Iterable[Package] = [
350            Package.packages[deppkg]["__package"]
351            for deppkg in self.pkg_def.get("depends", [])
352        ]
353        Package.lock.release()
354
355        # Wait until all the depends finish building.  We need them complete
356        # for the "COPY" commands.
357        for deppkg in deps:
358            deppkg.join()
359
360        # Generate this package's Dockerfile.
361        dockerfile = f"""
362FROM {docker_base_img_name}
363{self._df_copycmds()}
364{self._df_build()}
365"""
366
367        # Generate the resulting tag name and save it to the PackageDef.
368        #   This section is locked because we are modifying the PackageDef,
369        #   which can be accessed by other threads.
370        Package.lock.acquire()
371        tag = Docker.tagname(self._stagename(), dockerfile)
372        self.pkg_def["__tag"] = tag
373        Package.lock.release()
374
375        # Do the build / save any exceptions.
376        try:
377            Docker.build(self.package, tag, dockerfile)
378        except Exception as e:
379            self.exception = e
380
381    @classmethod
382    def generate_all(cls) -> None:
383        """Ensure a Docker stage is created for all defined packages.
384
385        These are done in parallel but with appropriate blocking per
386        package 'depends' specifications.
387        """
388
389        # Create a Package for each defined package.
390        pkg_threads = [Package(p) for p in cls.packages.keys()]
391
392        # Start building them all.
393        #   This section is locked because threads depend on each other,
394        #   based on the packages, and they cannot 'join' on a thread
395        #   which is not yet started.  Adding a lock here allows all the
396        #   threads to start before they 'join' their dependencies.
397        Package.lock.acquire()
398        for t in pkg_threads:
399            t.start()
400        Package.lock.release()
401
402        # Wait for completion.
403        for t in pkg_threads:
404            t.join()
405            # Check if the thread saved off its own exception.
406            if t.exception:
407                print(f"Package {t.package} failed!", file=sys.stderr)
408                raise t.exception
409
410    @staticmethod
411    def df_all_copycmds() -> str:
412        """Formulate the Dockerfile snippet necessary to copy all packages
413        into the final image.
414        """
415        return Package.df_copycmds_set(Package.packages.keys())
416
417    @classmethod
418    def depcache(cls) -> str:
419        """Create the contents of the '/tmp/depcache'.
420        This file is a comma-separated list of "<pkg>:<rev>".
421        """
422
423        # This needs to be sorted for consistency.
424        depcache = ""
425        for pkg in sorted(cls.packages.keys()):
426            depcache += "%s:%s," % (pkg, cls.packages[pkg]["rev"])
427        return depcache
428
429    def _check_gerrit_topic(self) -> bool:
430        if not gerrit_topic:
431            return False
432        if not self.package.startswith("openbmc/"):
433            return False
434        if gerrit_project == self.package and gerrit_rev:
435            return False
436
437        try:
438            commits = json.loads(
439                urllib.request.urlopen(
440                    f"https://gerrit.openbmc.org/changes/?q=status:open+project:{self.package}+topic:{gerrit_topic}"
441                )
442                .read()
443                .splitlines()[-1]
444            )
445
446            if len(commits) == 0:
447                return False
448            if len(commits) > 1:
449                print(
450                    f"{self.package} has more than 1 commit under {gerrit_topic}; using lastest upstream: {len(commits)}",
451                    file=sys.stderr,
452                )
453                return False
454
455            change_id = commits[0]["id"]
456
457            commit = json.loads(
458                urllib.request.urlopen(
459                    f"https://gerrit.openbmc.org/changes/{change_id}/revisions/current/commit"
460                )
461                .read()
462                .splitlines()[-1]
463            )["commit"]
464
465            print(
466                f"Using {commit} from {gerrit_topic} for {self.package}",
467                file=sys.stderr,
468            )
469            self.pkg_def["rev"] = commit
470            return True
471
472        except urllib.error.HTTPError as e:
473            print(
474                f"Error loading topic {gerrit_topic} for {self.package}: ",
475                e,
476                file=sys.stderr,
477            )
478            return False
479
480    def _update_rev(self) -> None:
481        """Look up the HEAD for missing a static rev."""
482
483        if "rev" in self.pkg_def:
484            return
485
486        if self._check_gerrit_topic():
487            return
488
489        # Check if Jenkins/Gerrit gave us a revision and use it.
490        if gerrit_project == self.package and gerrit_rev:
491            print(
492                f"Found Gerrit revision for {self.package}: {gerrit_rev}",
493                file=sys.stderr,
494            )
495            self.pkg_def["rev"] = gerrit_rev
496            return
497
498        # Ask Github for all the branches.
499        lookup = git(
500            "ls-remote", "--heads", f"https://github.com/{self.package}"
501        )
502
503        # Find the branch matching {branch} (or fallback to master).
504        #   This section is locked because we are modifying the PackageDef.
505        Package.lock.acquire()
506        for line in lookup.split("\n"):
507            if re.fullmatch(f".*{branch}$", line.strip()):
508                self.pkg_def["rev"] = line.split()[0]
509                break
510            elif (
511                "refs/heads/master" in line or "refs/heads/main" in line
512            ) and "rev" not in self.pkg_def:
513                self.pkg_def["rev"] = line.split()[0]
514        Package.lock.release()
515
516    def _stagename(self) -> str:
517        """Create a name for the Docker stage associated with this pkg."""
518        return self.package.replace("/", "-").lower()
519
520    def _url(self) -> str:
521        """Get the URL for this package."""
522        rev = self.pkg_def["rev"]
523
524        # If the lambda exists, call it.
525        if "url" in self.pkg_def:
526            return self.pkg_def["url"](self.package, rev)
527
528        # Default to the github archive URL.
529        return f"https://github.com/{self.package}/archive/{rev}.tar.gz"
530
531    def _cmd_download(self) -> str:
532        """Formulate the command necessary to download and unpack to source."""
533
534        url = self._url()
535        if ".tar." not in url:
536            raise NotImplementedError(
537                f"Unhandled download type for {self.package}: {url}"
538            )
539
540        cmd = f"curl -L {url} | tar -x"
541
542        if url.endswith(".bz2"):
543            cmd += "j"
544        elif url.endswith(".gz"):
545            cmd += "z"
546        else:
547            raise NotImplementedError(
548                f"Unknown tar flags needed for {self.package}: {url}"
549            )
550
551        return cmd
552
553    def _cmd_cd_srcdir(self) -> str:
554        """Formulate the command necessary to 'cd' into the source dir."""
555        return f"cd {self.package.split('/')[-1]}*"
556
557    def _df_copycmds(self) -> str:
558        """Formulate the dockerfile snippet necessary to COPY all depends."""
559
560        if "depends" not in self.pkg_def:
561            return ""
562        return Package.df_copycmds_set(self.pkg_def["depends"])
563
564    @staticmethod
565    def df_copycmds_set(pkgs: Iterable[str]) -> str:
566        """Formulate the Dockerfile snippet necessary to COPY a set of
567        packages into a Docker stage.
568        """
569
570        copy_cmds = ""
571
572        # Sort the packages for consistency.
573        for p in sorted(pkgs):
574            tag = Package.packages[p]["__tag"]
575            copy_cmds += f"COPY --from={tag} {prefix} {prefix}\n"
576            # Workaround for upstream docker bug and multiple COPY cmds
577            # https://github.com/moby/moby/issues/37965
578            copy_cmds += "RUN true\n"
579
580        return copy_cmds
581
582    def _df_build(self) -> str:
583        """Formulate the Dockerfile snippet necessary to download, build, and
584        install a package into a Docker stage.
585        """
586
587        # Download and extract source.
588        result = f"RUN {self._cmd_download()} && {self._cmd_cd_srcdir()} && "
589
590        # Handle 'custom_post_dl' commands.
591        custom_post_dl = self.pkg_def.get("custom_post_dl")
592        if custom_post_dl:
593            result += " && ".join(custom_post_dl) + " && "
594
595        # Build and install package based on 'build_type'.
596        build_type = self.pkg_def["build_type"]
597        if build_type == "autoconf":
598            result += self._cmd_build_autoconf()
599        elif build_type == "cmake":
600            result += self._cmd_build_cmake()
601        elif build_type == "custom":
602            result += self._cmd_build_custom()
603        elif build_type == "make":
604            result += self._cmd_build_make()
605        elif build_type == "meson":
606            result += self._cmd_build_meson()
607        else:
608            raise NotImplementedError(
609                f"Unhandled build type for {self.package}: {build_type}"
610            )
611
612        # Handle 'custom_post_install' commands.
613        custom_post_install = self.pkg_def.get("custom_post_install")
614        if custom_post_install:
615            result += " && " + " && ".join(custom_post_install)
616
617        return result
618
619    def _cmd_build_autoconf(self) -> str:
620        options = " ".join(self.pkg_def.get("config_flags", []))
621        env = " ".join(self.pkg_def.get("config_env", []))
622        result = "./bootstrap.sh && "
623        result += f"{env} ./configure {configure_flags} {options} && "
624        result += f"make -j{proc_count} && make install"
625        return result
626
627    def _cmd_build_cmake(self) -> str:
628        options = " ".join(self.pkg_def.get("config_flags", []))
629        env = " ".join(self.pkg_def.get("config_env", []))
630        result = "mkdir builddir && cd builddir && "
631        result += f"{env} cmake {cmake_flags} {options} .. && "
632        result += "cmake --build . --target all && "
633        result += "cmake --build . --target install && "
634        result += "cd .."
635        return result
636
637    def _cmd_build_custom(self) -> str:
638        return " && ".join(self.pkg_def.get("build_steps", []))
639
640    def _cmd_build_make(self) -> str:
641        return f"make -j{proc_count} && make install"
642
643    def _cmd_build_meson(self) -> str:
644        options = " ".join(self.pkg_def.get("config_flags", []))
645        env = " ".join(self.pkg_def.get("config_env", []))
646        result = f"{env} meson setup builddir {meson_flags} {options} && "
647        result += "ninja -C builddir && ninja -C builddir install"
648        return result
649
650
651class Docker:
652    """Class to assist with Docker interactions.  All methods are static."""
653
654    @staticmethod
655    def timestamp() -> str:
656        """Generate a timestamp for today using the ISO week."""
657        today = date.today().isocalendar()
658        return f"{today[0]}-W{today[1]:02}"
659
660    @staticmethod
661    def tagname(pkgname: Optional[str], dockerfile: str) -> str:
662        """Generate a tag name for a package using a hash of the Dockerfile."""
663        result = docker_image_name
664        if pkgname:
665            result += "-" + pkgname
666
667        result += ":" + Docker.timestamp()
668        result += "-" + sha256(dockerfile.encode()).hexdigest()[0:16]
669
670        return result
671
672    @staticmethod
673    def build(pkg: str, tag: str, dockerfile: str) -> None:
674        """Build a docker image using the Dockerfile and tagging it with 'tag'."""
675
676        # If we're not forcing builds, check if it already exists and skip.
677        if not force_build:
678            if container.image.ls(
679                tag, "--format", '"{{.Repository}}:{{.Tag}}"'
680            ):
681                print(
682                    f"Image {tag} already exists.  Skipping.", file=sys.stderr
683                )
684                return
685
686        # Build it.
687        #   Capture the output of the 'docker build' command and send it to
688        #   stderr (prefixed with the package name).  This allows us to see
689        #   progress but not pollute stdout.  Later on we output the final
690        #   docker tag to stdout and we want to keep that pristine.
691        #
692        #   Other unusual flags:
693        #       --no-cache: Bypass the Docker cache if 'force_build'.
694        #       --force-rm: Clean up Docker processes if they fail.
695        container.build(
696            proxy_args,
697            "--network=host",
698            "--force-rm",
699            "--no-cache=true" if force_build else "--no-cache=false",
700            "-t",
701            tag,
702            "-",
703            _in=dockerfile,
704            _out=(
705                lambda line: print(
706                    pkg + ":", line, end="", file=sys.stderr, flush=True
707                )
708            ),
709            _err_to_out=True,
710        )
711
712
713# Read a bunch of environment variables.
714docker_image_name = os.environ.get(
715    "DOCKER_IMAGE_NAME", "openbmc/ubuntu-unit-test"
716)
717force_build = os.environ.get("FORCE_DOCKER_BUILD")
718is_automated_ci_build = os.environ.get("BUILD_URL", False)
719distro = os.environ.get("DISTRO", "ubuntu:oracular")
720branch = os.environ.get("BRANCH", "master")
721ubuntu_mirror = os.environ.get("UBUNTU_MIRROR")
722docker_reg = os.environ.get("DOCKER_REG", "public.ecr.aws/ubuntu")
723http_proxy = os.environ.get("http_proxy")
724
725gerrit_project = os.environ.get("GERRIT_PROJECT")
726gerrit_rev = os.environ.get("GERRIT_PATCHSET_REVISION")
727gerrit_topic = os.environ.get("GERRIT_TOPIC")
728
729# Ensure appropriate docker build output to see progress and identify
730# any issues
731os.environ["BUILDKIT_PROGRESS"] = "plain"
732
733# Set up some common variables.
734username = os.environ.get("USER", "root")
735homedir = os.environ.get("HOME", "/root")
736gid = os.getgid()
737uid = os.getuid()
738
739# Use well-known constants if user is root
740if username == "root":
741    homedir = "/root"
742    gid = 0
743    uid = 0
744
745# Special flags if setting up a deb mirror.
746mirror = ""
747if "ubuntu" in distro and ubuntu_mirror:
748    mirror = f"""
749RUN echo "deb {ubuntu_mirror} \
750        $(. /etc/os-release && echo $VERSION_CODENAME) \
751        main restricted universe multiverse" > /etc/apt/sources.list && \\
752    echo "deb {ubuntu_mirror} \
753        $(. /etc/os-release && echo $VERSION_CODENAME)-updates \
754            main restricted universe multiverse" >> /etc/apt/sources.list && \\
755    echo "deb {ubuntu_mirror} \
756        $(. /etc/os-release && echo $VERSION_CODENAME)-security \
757            main restricted universe multiverse" >> /etc/apt/sources.list && \\
758    echo "deb {ubuntu_mirror} \
759        $(. /etc/os-release && echo $VERSION_CODENAME)-proposed \
760            main restricted universe multiverse" >> /etc/apt/sources.list && \\
761    echo "deb {ubuntu_mirror} \
762        $(. /etc/os-release && echo $VERSION_CODENAME)-backports \
763            main restricted universe multiverse" >> /etc/apt/sources.list
764"""
765
766# Special flags for proxying.
767proxy_cmd = ""
768proxy_keyserver = ""
769proxy_args = []
770if http_proxy:
771    proxy_cmd = f"""
772RUN echo "[http]" >> {homedir}/.gitconfig && \
773    echo "proxy = {http_proxy}" >> {homedir}/.gitconfig
774"""
775    proxy_keyserver = f"--keyserver-options http-proxy={http_proxy}"
776
777    proxy_args.extend(
778        [
779            "--build-arg",
780            f"http_proxy={http_proxy}",
781            "--build-arg",
782            f"https_proxy={http_proxy}",
783        ]
784    )
785
786# Create base Dockerfile.
787dockerfile_base = f"""
788FROM {docker_reg}/{distro}
789
790{mirror}
791
792ENV DEBIAN_FRONTEND noninteractive
793
794ENV PYTHONPATH "/usr/local/lib/python3.10/site-packages/"
795
796# Sometimes the ubuntu key expires and we need a way to force an execution
797# of the apt-get commands for the dbgsym-keyring.  When this happens we see
798# an error like: "Release: The following signatures were invalid:"
799# Insert a bogus echo that we can change here when we get this error to force
800# the update.
801RUN echo "ubuntu keyserver rev as of 2021-04-21"
802
803# We need the keys to be imported for dbgsym repos
804# New releases have a package, older ones fall back to manual fetching
805# https://wiki.ubuntu.com/Debug%20Symbol%20Packages
806# Known issue with gpg to get keys via proxy -
807# https://bugs.launchpad.net/ubuntu/+source/gnupg2/+bug/1788190, hence using
808# curl to get keys.
809RUN apt-get update && apt-get dist-upgrade -yy && \
810    ( apt-get install -yy gpgv ubuntu-dbgsym-keyring || \
811        ( apt-get install -yy dirmngr curl && \
812          curl -sSL \
813          'https://keyserver.ubuntu.com/pks/lookup?op=get&search=0xF2EDC64DC5AEE1F6B9C621F0C8CAB6595FDFF622' \
814          | apt-key add - ))
815
816# Parse the current repo list into a debug repo list
817RUN sed -n '/^deb /s,^deb [^ ]* ,deb http://ddebs.ubuntu.com ,p' \
818        /etc/apt/sources.list >/etc/apt/sources.list.d/debug.list
819
820# Remove non-existent debug repos
821RUN sed -i '/-\\(backports\\|security\\) /d' /etc/apt/sources.list.d/debug.list
822
823RUN cat /etc/apt/sources.list.d/debug.list
824
825RUN apt-get update && apt-get dist-upgrade -yy && apt-get install -yy \
826    abi-compliance-checker \
827    abi-dumper \
828    autoconf \
829    autoconf-archive \
830    bison \
831    cmake \
832    curl \
833    dbus \
834    device-tree-compiler \
835    flex \
836    g++-14 \
837    gcc-14 \
838    git \
839    glib-2.0 \
840    gnupg \
841    iproute2 \
842    iputils-ping \
843    libaudit-dev \
844    libc6-dbg \
845    libc6-dev \
846    libcjson-dev \
847    libconfig++-dev \
848    libcryptsetup-dev \
849    libcurl4-openssl-dev \
850    libdbus-1-dev \
851    libevdev-dev \
852    libgpiod-dev \
853    libi2c-dev \
854    libjpeg-dev \
855    libjson-perl \
856    libldap2-dev \
857    libmimetic-dev \
858    libnl-3-dev \
859    libnl-genl-3-dev \
860    libpam0g-dev \
861    libpciaccess-dev \
862    libperlio-gzip-perl \
863    libpng-dev \
864    libprotobuf-dev \
865    libsnmp-dev \
866    libssl-dev \
867    libsystemd-dev \
868    libtool \
869    liburing-dev \
870    libxml2-utils \
871    libxml-simple-perl \
872    lsb-release \
873    ninja-build \
874    npm \
875    pkg-config \
876    protobuf-compiler \
877    python3 \
878    python3-dev\
879    python3-git \
880    python3-mako \
881    python3-pip \
882    python3-protobuf \
883    python3-setuptools \
884    python3-socks \
885    python3-yaml \
886    rsync \
887    shellcheck \
888    socat \
889    software-properties-common \
890    sudo \
891    systemd \
892    systemd-dev \
893    valgrind \
894    vim \
895    wget \
896    xxd
897
898RUN update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-14 14 \
899  --slave /usr/bin/g++ g++ /usr/bin/g++-14 \
900  --slave /usr/bin/gcov gcov /usr/bin/gcov-14 \
901  --slave /usr/bin/gcov-dump gcov-dump /usr/bin/gcov-dump-14 \
902  --slave /usr/bin/gcov-tool gcov-tool /usr/bin/gcov-tool-14
903RUN update-alternatives --remove cpp /usr/bin/cpp && \
904    update-alternatives --install /usr/bin/cpp cpp /usr/bin/cpp-14 14
905
906# Set up LLVM apt repository.
907RUN bash -c "$(wget -O - https://apt.llvm.org/llvm.sh)" -- 19
908
909# Install extra clang tools
910RUN apt-get install -y \
911        clang-19 \
912        clang-format-19 \
913        clang-tidy-19
914
915RUN update-alternatives --install /usr/bin/clang clang /usr/bin/clang-19 1000 \
916  --slave /usr/bin/clang++ clang++ /usr/bin/clang++-19 \
917  --slave /usr/bin/clang-tidy clang-tidy /usr/bin/clang-tidy-19 \
918  --slave /usr/bin/clang-format clang-format /usr/bin/clang-format-19 \
919  --slave /usr/bin/run-clang-tidy run-clang-tidy.py \
920        /usr/bin/run-clang-tidy-19 \
921  --slave /usr/bin/scan-build scan-build /usr/bin/scan-build-19
922
923"""
924
925if is_automated_ci_build:
926    dockerfile_base += f"""
927# Run an arbitrary command to pollute the docker cache regularly force us
928# to re-run `apt-get update` daily.
929RUN echo {Docker.timestamp()}
930RUN apt-get update && apt-get dist-upgrade -yy
931
932"""
933
934dockerfile_base += """
935RUN pip3 install --break-system-packages \
936        beautysh \
937        black \
938        codespell \
939        flake8 \
940        gcovr \
941        gitlint \
942        inflection \
943        isoduration \
944        isort \
945        jsonschema \
946        meson==1.5.1 \
947        requests
948
949RUN npm install -g \
950        eslint@v8.56.0 eslint-plugin-json@v3.1.0 \
951        markdownlint-cli@latest \
952        prettier@latest
953"""
954
955# Build the base and stage docker images.
956docker_base_img_name = Docker.tagname("base", dockerfile_base)
957Docker.build("base", docker_base_img_name, dockerfile_base)
958Package.generate_all()
959
960# Create the final Dockerfile.
961dockerfile = f"""
962# Build the final output image
963FROM {docker_base_img_name}
964{Package.df_all_copycmds()}
965
966# Some of our infrastructure still relies on the presence of this file
967# even though it is no longer needed to rebuild the docker environment
968# NOTE: The file is sorted to ensure the ordering is stable.
969RUN echo '{Package.depcache()}' > /tmp/depcache
970
971# Ensure the group, user, and home directory are created (or rename them if
972# they already exist).
973RUN if grep -q ":{gid}:" /etc/group ; then \
974        groupmod -n {username} $(awk -F : '{{ if ($3 == {gid}) {{ print $1 }} }}' /etc/group) ; \
975    else \
976        groupadd -f -g {gid} {username} ; \
977    fi
978RUN mkdir -p "{os.path.dirname(homedir)}"
979RUN if grep -q ":{uid}:" /etc/passwd ; then \
980        usermod -l {username} -d {homedir} -m $(awk -F : '{{ if ($3 == {uid}) {{ print $1 }} }}' /etc/passwd) ; \
981    else \
982        useradd -d {homedir} -m -u {uid} -g {gid} {username} ; \
983    fi
984RUN sed -i '1iDefaults umask=000' /etc/sudoers
985RUN echo "{username} ALL=(ALL) NOPASSWD: ALL" >>/etc/sudoers
986
987# Ensure user has ability to write to /usr/local for different tool
988# and data installs
989RUN chown -R {username}:{username} /usr/local/share
990
991# Update library cache
992RUN ldconfig
993
994{proxy_cmd}
995
996RUN /bin/bash
997"""
998
999# Do the final docker build
1000docker_final_img_name = Docker.tagname(None, dockerfile)
1001Docker.build("final", docker_final_img_name, dockerfile)
1002
1003# Print the tag of the final image.
1004print(docker_final_img_name)
1005