1#!/usr/bin/env python3
2#
3# Build the required docker image to run package unit tests
4#
5# Script Variables:
6#   DOCKER_IMG_NAME:  <optional, the name of the docker image to generate>
7#                     default is openbmc/ubuntu-unit-test
8#   DISTRO:           <optional, the distro to build a docker image against>
9#   FORCE_DOCKER_BUILD: <optional, a non-zero value with force all Docker
10#                     images to be rebuilt rather than reusing caches.>
11#   BUILD_URL:        <optional, used to detect running under CI context
12#                     (ex. Jenkins)>
13#   BRANCH:           <optional, branch to build from each of the openbmc/
14#                     repositories>
15#                     default is master, which will be used if input branch not
16#                     provided or not found
17#   UBUNTU_MIRROR:    <optional, the URL of a mirror of Ubuntu to override the
18#                     default ones in /etc/apt/sources.list>
19#                     default is empty, and no mirror is used.
20#   http_proxy        The HTTP address of the proxy server to connect to.
21#                     Default: "", proxy is not setup if this is not set
22
23import os
24import sys
25import threading
26from datetime import date
27from hashlib import sha256
28
29# typing.Dict is used for type-hints.
30from typing import Any, Callable, Dict, Iterable, Optional  # noqa: F401
31
32from sh import docker, git, nproc, uname  # type: ignore
33
34try:
35    # Python before 3.8 doesn't have TypedDict, so reroute to standard 'dict'.
36    from typing import TypedDict
37except Exception:
38
39    class TypedDict(dict):  # type: ignore
40        # We need to do this to eat the 'total' argument.
41        def __init_subclass__(cls, **kwargs: Any) -> None:
42            super().__init_subclass__()
43
44
45# Declare some variables used in package definitions.
46prefix = "/usr/local"
47proc_count = nproc().strip()
48
49
50class PackageDef(TypedDict, total=False):
51    """Package Definition for packages dictionary."""
52
53    # rev [optional]: Revision of package to use.
54    rev: str
55    # url [optional]: lambda function to create URL: (package, rev) -> url.
56    url: Callable[[str, str], str]
57    # depends [optional]: List of package dependencies.
58    depends: Iterable[str]
59    # build_type [required]: Build type used for package.
60    #   Currently supported: autoconf, cmake, custom, make, meson
61    build_type: str
62    # build_steps [optional]: Steps to run for 'custom' build_type.
63    build_steps: Iterable[str]
64    # config_flags [optional]: List of options to pass configuration tool.
65    config_flags: Iterable[str]
66    # config_env [optional]: List of environment variables to set for config.
67    config_env: Iterable[str]
68    # custom_post_dl [optional]: List of steps to run after download, but
69    #   before config / build / install.
70    custom_post_dl: Iterable[str]
71    # custom_post_install [optional]: List of steps to run after install.
72    custom_post_install: Iterable[str]
73
74    # __tag [private]: Generated Docker tag name for package stage.
75    __tag: str
76    # __package [private]: Package object associated with this package.
77    __package: Any  # Type is Package, but not defined yet.
78
79
80# Packages to include in image.
81packages = {
82    "boost": PackageDef(
83        rev="1.82.0",
84        url=(
85            lambda pkg, rev: f"https://boostorg.jfrog.io/artifactory/main/release/{rev}/source/{pkg}_{rev.replace('.', '_')}.tar.gz"  # noqa: E501
86        ),
87        build_type="custom",
88        build_steps=[
89            (
90                "curl --remote-name"
91                " https://github.com/williamspatrick/beast/commit/98f8b1fbd059a35754c2c7b2841769cf8d021272.patch"
92                " && patch -p2 <"
93                " 98f8b1fbd059a35754c2c7b2841769cf8d021272.patch &&"
94                " ./bootstrap.sh"
95                f" --prefix={prefix} --with-libraries=context,coroutine"
96            ),
97            "./b2",
98            f"./b2 install --prefix={prefix}",
99        ],
100    ),
101    "USCiLab/cereal": PackageDef(
102        rev="v1.3.2",
103        build_type="custom",
104        build_steps=[f"cp -a include/cereal/ {prefix}/include/"],
105    ),
106    "danmar/cppcheck": PackageDef(
107        rev="2.9",
108        build_type="cmake",
109    ),
110    "CLIUtils/CLI11": PackageDef(
111        rev="v1.9.1",
112        build_type="cmake",
113        config_flags=[
114            "-DBUILD_TESTING=OFF",
115            "-DCLI11_BUILD_DOCS=OFF",
116            "-DCLI11_BUILD_EXAMPLES=OFF",
117        ],
118    ),
119    "fmtlib/fmt": PackageDef(
120        rev="9.1.0",
121        build_type="cmake",
122        config_flags=[
123            "-DFMT_DOC=OFF",
124            "-DFMT_TEST=OFF",
125        ],
126    ),
127    "Naios/function2": PackageDef(
128        rev="4.2.1",
129        build_type="custom",
130        build_steps=[
131            f"mkdir {prefix}/include/function2",
132            f"cp include/function2/function2.hpp {prefix}/include/function2/",
133        ],
134    ),
135    # release-1.12.1
136    "google/googletest": PackageDef(
137        rev="58d77fa8070e8cec2dc1ed015d66b454c8d78850",
138        build_type="cmake",
139        config_env=["CXXFLAGS=-std=c++20"],
140        config_flags=["-DTHREADS_PREFER_PTHREAD_FLAG=ON"],
141    ),
142    "nlohmann/json": PackageDef(
143        rev="v3.11.2",
144        build_type="cmake",
145        config_flags=["-DJSON_BuildTests=OFF"],
146        custom_post_install=[
147            (
148                f"ln -s {prefix}/include/nlohmann/json.hpp"
149                f" {prefix}/include/json.hpp"
150            ),
151        ],
152    ),
153    "json-c/json-c": PackageDef(
154        rev="json-c-0.16-20220414",
155        build_type="cmake",
156    ),
157    # Snapshot from 2019-05-24
158    "linux-test-project/lcov": PackageDef(
159        rev="v1.15",
160        build_type="make",
161    ),
162    "LibVNC/libvncserver": PackageDef(
163        rev="LibVNCServer-0.9.13",
164        build_type="cmake",
165    ),
166    "leethomason/tinyxml2": PackageDef(
167        rev="9.0.0",
168        build_type="cmake",
169    ),
170    "tristanpenman/valijson": PackageDef(
171        rev="v0.7",
172        build_type="cmake",
173        config_flags=[
174            "-Dvalijson_BUILD_TESTS=0",
175            "-Dvalijson_INSTALL_HEADERS=1",
176        ],
177    ),
178    "open-power/pdbg": PackageDef(build_type="autoconf"),
179    "openbmc/gpioplus": PackageDef(
180        depends=["openbmc/stdplus"],
181        build_type="meson",
182        config_flags=[
183            "-Dexamples=false",
184            "-Dtests=disabled",
185        ],
186    ),
187    "openbmc/phosphor-dbus-interfaces": PackageDef(
188        depends=["openbmc/sdbusplus"],
189        build_type="meson",
190        config_flags=["-Dgenerate_md=false"],
191    ),
192    "openbmc/phosphor-logging": PackageDef(
193        depends=[
194            "USCiLab/cereal",
195            "openbmc/phosphor-dbus-interfaces",
196            "openbmc/sdbusplus",
197            "openbmc/sdeventplus",
198        ],
199        build_type="meson",
200        config_flags=[
201            "-Dlibonly=true",
202            "-Dtests=disabled",
203            f"-Dyamldir={prefix}/share/phosphor-dbus-yaml/yaml",
204        ],
205    ),
206    "openbmc/phosphor-objmgr": PackageDef(
207        depends=[
208            "CLIUtils/CLI11",
209            "boost",
210            "leethomason/tinyxml2",
211            "openbmc/phosphor-dbus-interfaces",
212            "openbmc/phosphor-logging",
213            "openbmc/sdbusplus",
214        ],
215        build_type="meson",
216        config_flags=[
217            "-Dtests=disabled",
218        ],
219    ),
220    "openbmc/libpldm": PackageDef(
221        build_type="meson",
222        config_flags=[
223            "-Dabi=deprecated,stable",
224            "-Doem-ibm=enabled",
225            "-Dtests=disabled",
226        ],
227    ),
228    "openbmc/sdbusplus": PackageDef(
229        build_type="meson",
230        custom_post_dl=[
231            "cd tools",
232            f"./setup.py install --root=/ --prefix={prefix}",
233            "cd ..",
234        ],
235        config_flags=[
236            "-Dexamples=disabled",
237            "-Dtests=disabled",
238        ],
239    ),
240    "openbmc/sdeventplus": PackageDef(
241        depends=[
242            "Naios/function2",
243            "openbmc/stdplus",
244        ],
245        build_type="meson",
246        config_flags=[
247            "-Dexamples=false",
248            "-Dtests=disabled",
249        ],
250    ),
251    "openbmc/stdplus": PackageDef(
252        depends=[
253            "fmtlib/fmt",
254            "google/googletest",
255            "Naios/function2",
256        ],
257        build_type="meson",
258        config_flags=[
259            "-Dexamples=false",
260            "-Dtests=disabled",
261            "-Dgtest=enabled",
262        ],
263    ),
264}  # type: Dict[str, PackageDef]
265
266# Define common flags used for builds
267configure_flags = " ".join(
268    [
269        f"--prefix={prefix}",
270    ]
271)
272cmake_flags = " ".join(
273    [
274        "-DBUILD_SHARED_LIBS=ON",
275        "-DCMAKE_BUILD_TYPE=RelWithDebInfo",
276        f"-DCMAKE_INSTALL_PREFIX:PATH={prefix}",
277        "-GNinja",
278        "-DCMAKE_MAKE_PROGRAM=ninja",
279    ]
280)
281meson_flags = " ".join(
282    [
283        "--wrap-mode=nodownload",
284        f"-Dprefix={prefix}",
285    ]
286)
287
288
289class Package(threading.Thread):
290    """Class used to build the Docker stages for each package.
291
292    Generally, this class should not be instantiated directly but through
293    Package.generate_all().
294    """
295
296    # Copy the packages dictionary.
297    packages = packages.copy()
298
299    # Lock used for thread-safety.
300    lock = threading.Lock()
301
302    def __init__(self, pkg: str):
303        """pkg - The name of this package (ex. foo/bar )"""
304        super(Package, self).__init__()
305
306        self.package = pkg
307        self.exception = None  # type: Optional[Exception]
308
309        # Reference to this package's
310        self.pkg_def = Package.packages[pkg]
311        self.pkg_def["__package"] = self
312
313    def run(self) -> None:
314        """Thread 'run' function.  Builds the Docker stage."""
315
316        # In case this package has no rev, fetch it from Github.
317        self._update_rev()
318
319        # Find all the Package objects that this package depends on.
320        #   This section is locked because we are looking into another
321        #   package's PackageDef dict, which could be being modified.
322        Package.lock.acquire()
323        deps: Iterable[Package] = [
324            Package.packages[deppkg]["__package"]
325            for deppkg in self.pkg_def.get("depends", [])
326        ]
327        Package.lock.release()
328
329        # Wait until all the depends finish building.  We need them complete
330        # for the "COPY" commands.
331        for deppkg in deps:
332            deppkg.join()
333
334        # Generate this package's Dockerfile.
335        dockerfile = f"""
336FROM {docker_base_img_name}
337{self._df_copycmds()}
338{self._df_build()}
339"""
340
341        # Generate the resulting tag name and save it to the PackageDef.
342        #   This section is locked because we are modifying the PackageDef,
343        #   which can be accessed by other threads.
344        Package.lock.acquire()
345        tag = Docker.tagname(self._stagename(), dockerfile)
346        self.pkg_def["__tag"] = tag
347        Package.lock.release()
348
349        # Do the build / save any exceptions.
350        try:
351            Docker.build(self.package, tag, dockerfile)
352        except Exception as e:
353            self.exception = e
354
355    @classmethod
356    def generate_all(cls) -> None:
357        """Ensure a Docker stage is created for all defined packages.
358
359        These are done in parallel but with appropriate blocking per
360        package 'depends' specifications.
361        """
362
363        # Create a Package for each defined package.
364        pkg_threads = [Package(p) for p in cls.packages.keys()]
365
366        # Start building them all.
367        #   This section is locked because threads depend on each other,
368        #   based on the packages, and they cannot 'join' on a thread
369        #   which is not yet started.  Adding a lock here allows all the
370        #   threads to start before they 'join' their dependencies.
371        Package.lock.acquire()
372        for t in pkg_threads:
373            t.start()
374        Package.lock.release()
375
376        # Wait for completion.
377        for t in pkg_threads:
378            t.join()
379            # Check if the thread saved off its own exception.
380            if t.exception:
381                print(f"Package {t.package} failed!", file=sys.stderr)
382                raise t.exception
383
384    @staticmethod
385    def df_all_copycmds() -> str:
386        """Formulate the Dockerfile snippet necessary to copy all packages
387        into the final image.
388        """
389        return Package.df_copycmds_set(Package.packages.keys())
390
391    @classmethod
392    def depcache(cls) -> str:
393        """Create the contents of the '/tmp/depcache'.
394        This file is a comma-separated list of "<pkg>:<rev>".
395        """
396
397        # This needs to be sorted for consistency.
398        depcache = ""
399        for pkg in sorted(cls.packages.keys()):
400            depcache += "%s:%s," % (pkg, cls.packages[pkg]["rev"])
401        return depcache
402
403    def _update_rev(self) -> None:
404        """Look up the HEAD for missing a static rev."""
405
406        if "rev" in self.pkg_def:
407            return
408
409        # Check if Jenkins/Gerrit gave us a revision and use it.
410        if gerrit_project == self.package and gerrit_rev:
411            print(
412                f"Found Gerrit revision for {self.package}: {gerrit_rev}",
413                file=sys.stderr,
414            )
415            self.pkg_def["rev"] = gerrit_rev
416            return
417
418        # Ask Github for all the branches.
419        lookup = git(
420            "ls-remote", "--heads", f"https://github.com/{self.package}"
421        )
422
423        # Find the branch matching {branch} (or fallback to master).
424        #   This section is locked because we are modifying the PackageDef.
425        Package.lock.acquire()
426        for line in lookup.split("\n"):
427            if f"refs/heads/{branch}" in line:
428                self.pkg_def["rev"] = line.split()[0]
429            elif (
430                "refs/heads/master" in line or "refs/heads/main" in line
431            ) and "rev" not in self.pkg_def:
432                self.pkg_def["rev"] = line.split()[0]
433        Package.lock.release()
434
435    def _stagename(self) -> str:
436        """Create a name for the Docker stage associated with this pkg."""
437        return self.package.replace("/", "-").lower()
438
439    def _url(self) -> str:
440        """Get the URL for this package."""
441        rev = self.pkg_def["rev"]
442
443        # If the lambda exists, call it.
444        if "url" in self.pkg_def:
445            return self.pkg_def["url"](self.package, rev)
446
447        # Default to the github archive URL.
448        return f"https://github.com/{self.package}/archive/{rev}.tar.gz"
449
450    def _cmd_download(self) -> str:
451        """Formulate the command necessary to download and unpack to source."""
452
453        url = self._url()
454        if ".tar." not in url:
455            raise NotImplementedError(
456                f"Unhandled download type for {self.package}: {url}"
457            )
458
459        cmd = f"curl -L {url} | tar -x"
460
461        if url.endswith(".bz2"):
462            cmd += "j"
463        elif url.endswith(".gz"):
464            cmd += "z"
465        else:
466            raise NotImplementedError(
467                f"Unknown tar flags needed for {self.package}: {url}"
468            )
469
470        return cmd
471
472    def _cmd_cd_srcdir(self) -> str:
473        """Formulate the command necessary to 'cd' into the source dir."""
474        return f"cd {self.package.split('/')[-1]}*"
475
476    def _df_copycmds(self) -> str:
477        """Formulate the dockerfile snippet necessary to COPY all depends."""
478
479        if "depends" not in self.pkg_def:
480            return ""
481        return Package.df_copycmds_set(self.pkg_def["depends"])
482
483    @staticmethod
484    def df_copycmds_set(pkgs: Iterable[str]) -> str:
485        """Formulate the Dockerfile snippet necessary to COPY a set of
486        packages into a Docker stage.
487        """
488
489        copy_cmds = ""
490
491        # Sort the packages for consistency.
492        for p in sorted(pkgs):
493            tag = Package.packages[p]["__tag"]
494            copy_cmds += f"COPY --from={tag} {prefix} {prefix}\n"
495            # Workaround for upstream docker bug and multiple COPY cmds
496            # https://github.com/moby/moby/issues/37965
497            copy_cmds += "RUN true\n"
498
499        return copy_cmds
500
501    def _df_build(self) -> str:
502        """Formulate the Dockerfile snippet necessary to download, build, and
503        install a package into a Docker stage.
504        """
505
506        # Download and extract source.
507        result = f"RUN {self._cmd_download()} && {self._cmd_cd_srcdir()} && "
508
509        # Handle 'custom_post_dl' commands.
510        custom_post_dl = self.pkg_def.get("custom_post_dl")
511        if custom_post_dl:
512            result += " && ".join(custom_post_dl) + " && "
513
514        # Build and install package based on 'build_type'.
515        build_type = self.pkg_def["build_type"]
516        if build_type == "autoconf":
517            result += self._cmd_build_autoconf()
518        elif build_type == "cmake":
519            result += self._cmd_build_cmake()
520        elif build_type == "custom":
521            result += self._cmd_build_custom()
522        elif build_type == "make":
523            result += self._cmd_build_make()
524        elif build_type == "meson":
525            result += self._cmd_build_meson()
526        else:
527            raise NotImplementedError(
528                f"Unhandled build type for {self.package}: {build_type}"
529            )
530
531        # Handle 'custom_post_install' commands.
532        custom_post_install = self.pkg_def.get("custom_post_install")
533        if custom_post_install:
534            result += " && " + " && ".join(custom_post_install)
535
536        return result
537
538    def _cmd_build_autoconf(self) -> str:
539        options = " ".join(self.pkg_def.get("config_flags", []))
540        env = " ".join(self.pkg_def.get("config_env", []))
541        result = "./bootstrap.sh && "
542        result += f"{env} ./configure {configure_flags} {options} && "
543        result += f"make -j{proc_count} && make install"
544        return result
545
546    def _cmd_build_cmake(self) -> str:
547        options = " ".join(self.pkg_def.get("config_flags", []))
548        env = " ".join(self.pkg_def.get("config_env", []))
549        result = "mkdir builddir && cd builddir && "
550        result += f"{env} cmake {cmake_flags} {options} .. && "
551        result += "cmake --build . --target all && "
552        result += "cmake --build . --target install && "
553        result += "cd .."
554        return result
555
556    def _cmd_build_custom(self) -> str:
557        return " && ".join(self.pkg_def.get("build_steps", []))
558
559    def _cmd_build_make(self) -> str:
560        return f"make -j{proc_count} && make install"
561
562    def _cmd_build_meson(self) -> str:
563        options = " ".join(self.pkg_def.get("config_flags", []))
564        env = " ".join(self.pkg_def.get("config_env", []))
565        result = f"{env} meson builddir {meson_flags} {options} && "
566        result += "ninja -C builddir && ninja -C builddir install"
567        return result
568
569
570class Docker:
571    """Class to assist with Docker interactions.  All methods are static."""
572
573    @staticmethod
574    def timestamp() -> str:
575        """Generate a timestamp for today using the ISO week."""
576        today = date.today().isocalendar()
577        return f"{today[0]}-W{today[1]:02}"
578
579    @staticmethod
580    def tagname(pkgname: Optional[str], dockerfile: str) -> str:
581        """Generate a tag name for a package using a hash of the Dockerfile."""
582        result = docker_image_name
583        if pkgname:
584            result += "-" + pkgname
585
586        result += ":" + Docker.timestamp()
587        result += "-" + sha256(dockerfile.encode()).hexdigest()[0:16]
588
589        return result
590
591    @staticmethod
592    def build(pkg: str, tag: str, dockerfile: str) -> None:
593        """Build a docker image using the Dockerfile and tagging it with 'tag'."""
594
595        # If we're not forcing builds, check if it already exists and skip.
596        if not force_build:
597            if docker.image.ls(tag, "--format", '"{{.Repository}}:{{.Tag}}"'):
598                print(
599                    f"Image {tag} already exists.  Skipping.", file=sys.stderr
600                )
601                return
602
603        # Build it.
604        #   Capture the output of the 'docker build' command and send it to
605        #   stderr (prefixed with the package name).  This allows us to see
606        #   progress but not polute stdout.  Later on we output the final
607        #   docker tag to stdout and we want to keep that pristine.
608        #
609        #   Other unusual flags:
610        #       --no-cache: Bypass the Docker cache if 'force_build'.
611        #       --force-rm: Clean up Docker processes if they fail.
612        docker.build(
613            proxy_args,
614            "--network=host",
615            "--force-rm",
616            "--no-cache=true" if force_build else "--no-cache=false",
617            "-t",
618            tag,
619            "-",
620            _in=dockerfile,
621            _out=(
622                lambda line: print(
623                    pkg + ":", line, end="", file=sys.stderr, flush=True
624                )
625            ),
626        )
627
628
629# Read a bunch of environment variables.
630docker_image_name = os.environ.get(
631    "DOCKER_IMAGE_NAME", "openbmc/ubuntu-unit-test"
632)
633force_build = os.environ.get("FORCE_DOCKER_BUILD")
634is_automated_ci_build = os.environ.get("BUILD_URL", False)
635distro = os.environ.get("DISTRO", "ubuntu:lunar")
636branch = os.environ.get("BRANCH", "master")
637ubuntu_mirror = os.environ.get("UBUNTU_MIRROR")
638http_proxy = os.environ.get("http_proxy")
639
640gerrit_project = os.environ.get("GERRIT_PROJECT")
641gerrit_rev = os.environ.get("GERRIT_PATCHSET_REVISION")
642
643# Ensure appropriate docker build output to see progress and identify
644# any issues
645os.environ["BUILDKIT_PROGRESS"] = "plain"
646
647# Set up some common variables.
648username = os.environ.get("USER", "root")
649homedir = os.environ.get("HOME", "/root")
650gid = os.getgid()
651uid = os.getuid()
652
653# Use well-known constants if user is root
654if username == "root":
655    homedir = "/root"
656    gid = 0
657    uid = 0
658
659# Determine the architecture for Docker.
660arch = uname("-m").strip()
661if arch == "ppc64le":
662    docker_base = "ppc64le/"
663elif arch == "x86_64":
664    docker_base = ""
665elif arch == "aarch64":
666    docker_base = "arm64v8/"
667else:
668    print(
669        f"Unsupported system architecture({arch}) found for docker image",
670        file=sys.stderr,
671    )
672    sys.exit(1)
673
674# Special flags if setting up a deb mirror.
675mirror = ""
676if "ubuntu" in distro and ubuntu_mirror:
677    mirror = f"""
678RUN echo "deb {ubuntu_mirror} \
679        $(. /etc/os-release && echo $VERSION_CODENAME) \
680        main restricted universe multiverse" > /etc/apt/sources.list && \\
681    echo "deb {ubuntu_mirror} \
682        $(. /etc/os-release && echo $VERSION_CODENAME)-updates \
683            main restricted universe multiverse" >> /etc/apt/sources.list && \\
684    echo "deb {ubuntu_mirror} \
685        $(. /etc/os-release && echo $VERSION_CODENAME)-security \
686            main restricted universe multiverse" >> /etc/apt/sources.list && \\
687    echo "deb {ubuntu_mirror} \
688        $(. /etc/os-release && echo $VERSION_CODENAME)-proposed \
689            main restricted universe multiverse" >> /etc/apt/sources.list && \\
690    echo "deb {ubuntu_mirror} \
691        $(. /etc/os-release && echo $VERSION_CODENAME)-backports \
692            main restricted universe multiverse" >> /etc/apt/sources.list
693"""
694
695# Special flags for proxying.
696proxy_cmd = ""
697proxy_keyserver = ""
698proxy_args = []
699if http_proxy:
700    proxy_cmd = f"""
701RUN echo "[http]" >> {homedir}/.gitconfig && \
702    echo "proxy = {http_proxy}" >> {homedir}/.gitconfig
703"""
704    proxy_keyserver = f"--keyserver-options http-proxy={http_proxy}"
705
706    proxy_args.extend(
707        [
708            "--build-arg",
709            f"http_proxy={http_proxy}",
710            "--build-arg",
711            f"https_proxy={http_proxy}",
712        ]
713    )
714
715# Create base Dockerfile.
716dockerfile_base = f"""
717FROM {docker_base}{distro}
718
719{mirror}
720
721ENV DEBIAN_FRONTEND noninteractive
722
723ENV PYTHONPATH "/usr/local/lib/python3.10/site-packages/"
724
725# Sometimes the ubuntu key expires and we need a way to force an execution
726# of the apt-get commands for the dbgsym-keyring.  When this happens we see
727# an error like: "Release: The following signatures were invalid:"
728# Insert a bogus echo that we can change here when we get this error to force
729# the update.
730RUN echo "ubuntu keyserver rev as of 2021-04-21"
731
732# We need the keys to be imported for dbgsym repos
733# New releases have a package, older ones fall back to manual fetching
734# https://wiki.ubuntu.com/Debug%20Symbol%20Packages
735# Known issue with gpg to get keys via proxy -
736# https://bugs.launchpad.net/ubuntu/+source/gnupg2/+bug/1788190, hence using
737# curl to get keys.
738RUN apt-get update && apt-get dist-upgrade -yy && \
739    ( apt-get install gpgv ubuntu-dbgsym-keyring || \
740        ( apt-get install -yy dirmngr curl && \
741          curl -sSL \
742          'https://keyserver.ubuntu.com/pks/lookup?op=get&search=0xF2EDC64DC5AEE1F6B9C621F0C8CAB6595FDFF622' \
743          | apt-key add - ))
744
745# Parse the current repo list into a debug repo list
746RUN sed -n '/^deb /s,^deb [^ ]* ,deb http://ddebs.ubuntu.com ,p' \
747        /etc/apt/sources.list >/etc/apt/sources.list.d/debug.list
748
749# Remove non-existent debug repos
750RUN sed -i '/-\\(backports\\|security\\) /d' /etc/apt/sources.list.d/debug.list
751
752RUN cat /etc/apt/sources.list.d/debug.list
753
754RUN apt-get update && apt-get dist-upgrade -yy && apt-get install -yy \
755    abi-compliance-checker \
756    abi-dumper \
757    autoconf \
758    autoconf-archive \
759    bison \
760    clang-16 \
761    clang-format-16 \
762    clang-tidy-16 \
763    clang-tools-16 \
764    cmake \
765    curl \
766    dbus \
767    device-tree-compiler \
768    flex \
769    g++-13 \
770    gcc-13 \
771    git \
772    iproute2 \
773    iputils-ping \
774    libaudit-dev \
775    libc6-dbg \
776    libc6-dev \
777    libconfig++-dev \
778    libcryptsetup-dev \
779    libdbus-1-dev \
780    libevdev-dev \
781    libgpiod-dev \
782    libi2c-dev \
783    libjpeg-dev \
784    libjson-perl \
785    libldap2-dev \
786    libmimetic-dev \
787    libnl-3-dev \
788    libnl-genl-3-dev \
789    libpam0g-dev \
790    libpciaccess-dev \
791    libperlio-gzip-perl \
792    libpng-dev \
793    libprotobuf-dev \
794    libsnmp-dev \
795    libssl-dev \
796    libsystemd-dev \
797    libtool \
798    liburing-dev \
799    libxml2-utils \
800    libxml-simple-perl \
801    ninja-build \
802    npm \
803    pkg-config \
804    protobuf-compiler \
805    python3 \
806    python3-dev\
807    python3-git \
808    python3-mako \
809    python3-pip \
810    python3-setuptools \
811    python3-socks \
812    python3-yaml \
813    rsync \
814    shellcheck \
815    sudo \
816    systemd \
817    valgrind \
818    valgrind-dbg \
819    vim \
820    wget \
821    xxd
822
823RUN update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-13 13 \
824  --slave /usr/bin/g++ g++ /usr/bin/g++-13 \
825  --slave /usr/bin/gcov gcov /usr/bin/gcov-13 \
826  --slave /usr/bin/gcov-dump gcov-dump /usr/bin/gcov-dump-13 \
827  --slave /usr/bin/gcov-tool gcov-tool /usr/bin/gcov-tool-13
828RUN update-alternatives --remove cpp /usr/bin/cpp && \
829    update-alternatives --install /usr/bin/cpp cpp /usr/bin/cpp-13 13
830
831RUN update-alternatives --install /usr/bin/clang clang /usr/bin/clang-16 1000 \
832  --slave /usr/bin/clang++ clang++ /usr/bin/clang++-16 \
833  --slave /usr/bin/clang-tidy clang-tidy /usr/bin/clang-tidy-16 \
834  --slave /usr/bin/clang-format clang-format /usr/bin/clang-format-16 \
835  --slave /usr/bin/run-clang-tidy run-clang-tidy.py \
836        /usr/bin/run-clang-tidy-16 \
837  --slave /usr/bin/scan-build scan-build /usr/bin/scan-build-16
838
839"""
840
841if is_automated_ci_build:
842    dockerfile_base += f"""
843# Run an arbitrary command to polute the docker cache regularly force us
844# to re-run `apt-get update` daily.
845RUN echo {Docker.timestamp()}
846RUN apt-get update && apt-get dist-upgrade -yy
847
848"""
849
850dockerfile_base += """
851RUN pip3 install --break-system-packages \
852        beautysh \
853        black \
854        codespell \
855        flake8 \
856        gitlint \
857        inflection \
858        isort \
859        jsonschema \
860        meson==1.0.1 \
861        protobuf \
862        requests
863
864RUN npm install -g \
865        eslint@latest eslint-plugin-json@latest \
866        markdownlint-cli@latest \
867        prettier@latest
868"""
869
870# Build the base and stage docker images.
871docker_base_img_name = Docker.tagname("base", dockerfile_base)
872Docker.build("base", docker_base_img_name, dockerfile_base)
873Package.generate_all()
874
875# Create the final Dockerfile.
876dockerfile = f"""
877# Build the final output image
878FROM {docker_base_img_name}
879{Package.df_all_copycmds()}
880
881# Some of our infrastructure still relies on the presence of this file
882# even though it is no longer needed to rebuild the docker environment
883# NOTE: The file is sorted to ensure the ordering is stable.
884RUN echo '{Package.depcache()}' > /tmp/depcache
885
886# Ensure the group, user, and home directory are created (or rename them if
887# they already exist).
888RUN if grep -q ":{gid}:" /etc/group ; then \
889        groupmod -n {username} $(awk -F : '{{ if ($3 == {gid}) {{ print $1 }} }}' /etc/group) ; \
890    else \
891        groupadd -f -g {gid} {username} ; \
892    fi
893RUN mkdir -p "{os.path.dirname(homedir)}"
894RUN if grep -q ":{uid}:" /etc/passwd ; then \
895        usermod -l {username} -d {homedir} -m $(awk -F : '{{ if ($3 == {uid}) {{ print $1 }} }}' /etc/passwd) ; \
896    else \
897        useradd -d {homedir} -m -u {uid} -g {gid} {username} ; \
898    fi
899RUN sed -i '1iDefaults umask=000' /etc/sudoers
900RUN echo "{username} ALL=(ALL) NOPASSWD: ALL" >>/etc/sudoers
901
902# Ensure user has ability to write to /usr/local for different tool
903# and data installs
904RUN chown -R {username}:{username} /usr/local/share
905
906{proxy_cmd}
907
908RUN /bin/bash
909"""
910
911# Do the final docker build
912docker_final_img_name = Docker.tagname(None, dockerfile)
913Docker.build("final", docker_final_img_name, dockerfile)
914
915# Print the tag of the final image.
916print(docker_final_img_name)
917