1#!/usr/bin/env python3
2#
3# Build the required docker image to run package unit tests
4#
5# Script Variables:
6#   DOCKER_IMG_NAME:  <optional, the name of the docker image to generate>
7#                     default is openbmc/ubuntu-unit-test
8#   DISTRO:           <optional, the distro to build a docker image against>
9#   FORCE_DOCKER_BUILD: <optional, a non-zero value with force all Docker
10#                     images to be rebuilt rather than reusing caches.>
11#   BUILD_URL:        <optional, used to detect running under CI context
12#                     (ex. Jenkins)>
13#   BRANCH:           <optional, branch to build from each of the openbmc/
14#                     repositories>
15#                     default is master, which will be used if input branch not
16#                     provided or not found
17#   UBUNTU_MIRROR:    <optional, the URL of a mirror of Ubuntu to override the
18#                     default ones in /etc/apt/sources.list>
19#                     default is empty, and no mirror is used.
20#   http_proxy        The HTTP address of the proxy server to connect to.
21#                     Default: "", proxy is not setup if this is not set
22
23import os
24import re
25import sys
26import threading
27from datetime import date
28from hashlib import sha256
29
30# typing.Dict is used for type-hints.
31from typing import Any, Callable, Dict, Iterable, Optional  # noqa: F401
32
33from sh import docker, git, nproc, uname  # type: ignore
34
35try:
36    # Python before 3.8 doesn't have TypedDict, so reroute to standard 'dict'.
37    from typing import TypedDict
38except Exception:
39
40    class TypedDict(dict):  # type: ignore
41        # We need to do this to eat the 'total' argument.
42        def __init_subclass__(cls, **kwargs: Any) -> None:
43            super().__init_subclass__()
44
45
46# Declare some variables used in package definitions.
47prefix = "/usr/local"
48proc_count = nproc().strip()
49
50
51class PackageDef(TypedDict, total=False):
52    """Package Definition for packages dictionary."""
53
54    # rev [optional]: Revision of package to use.
55    rev: str
56    # url [optional]: lambda function to create URL: (package, rev) -> url.
57    url: Callable[[str, str], str]
58    # depends [optional]: List of package dependencies.
59    depends: Iterable[str]
60    # build_type [required]: Build type used for package.
61    #   Currently supported: autoconf, cmake, custom, make, meson
62    build_type: str
63    # build_steps [optional]: Steps to run for 'custom' build_type.
64    build_steps: Iterable[str]
65    # config_flags [optional]: List of options to pass configuration tool.
66    config_flags: Iterable[str]
67    # config_env [optional]: List of environment variables to set for config.
68    config_env: Iterable[str]
69    # custom_post_dl [optional]: List of steps to run after download, but
70    #   before config / build / install.
71    custom_post_dl: Iterable[str]
72    # custom_post_install [optional]: List of steps to run after install.
73    custom_post_install: Iterable[str]
74
75    # __tag [private]: Generated Docker tag name for package stage.
76    __tag: str
77    # __package [private]: Package object associated with this package.
78    __package: Any  # Type is Package, but not defined yet.
79
80
81# Packages to include in image.
82packages = {
83    "boost": PackageDef(
84        rev="1.84.0",
85        url=(
86            lambda pkg, rev: f"https://github.com/boostorg/{pkg}/releases/download/{pkg}-{rev}/{pkg}-{rev}.tar.gz"
87        ),
88        build_type="custom",
89        build_steps=[
90            (
91                "./bootstrap.sh"
92                f" --prefix={prefix} --with-libraries=context,coroutine,url"
93            ),
94            "./b2",
95            f"./b2 install --prefix={prefix} valgrind=on",
96        ],
97    ),
98    "USCiLab/cereal": PackageDef(
99        rev="v1.3.2",
100        build_type="custom",
101        build_steps=[f"cp -a include/cereal/ {prefix}/include/"],
102    ),
103    "danmar/cppcheck": PackageDef(
104        rev="2.12.1",
105        build_type="cmake",
106    ),
107    "CLIUtils/CLI11": PackageDef(
108        rev="v2.3.2",
109        build_type="cmake",
110        config_flags=[
111            "-DBUILD_TESTING=OFF",
112            "-DCLI11_BUILD_DOCS=OFF",
113            "-DCLI11_BUILD_EXAMPLES=OFF",
114        ],
115    ),
116    "fmtlib/fmt": PackageDef(
117        rev="10.1.1",
118        build_type="cmake",
119        config_flags=[
120            "-DFMT_DOC=OFF",
121            "-DFMT_TEST=OFF",
122        ],
123    ),
124    "Naios/function2": PackageDef(
125        rev="4.2.4",
126        build_type="custom",
127        build_steps=[
128            f"mkdir {prefix}/include/function2",
129            f"cp include/function2/function2.hpp {prefix}/include/function2/",
130        ],
131    ),
132    "google/googletest": PackageDef(
133        rev="v1.15.2",
134        build_type="cmake",
135        config_env=["CXXFLAGS=-std=c++20"],
136        config_flags=["-DTHREADS_PREFER_PTHREAD_FLAG=ON"],
137    ),
138    "nghttp2/nghttp2": PackageDef(
139        rev="v1.61.0",
140        build_type="cmake",
141        config_env=["CXXFLAGS=-std=c++20"],
142        config_flags=[
143            "-DENABLE_LIB_ONLY=ON",
144            "-DENABLE_STATIC_LIB=ON",
145        ],
146    ),
147    "nlohmann/json": PackageDef(
148        rev="v3.11.2",
149        build_type="cmake",
150        config_flags=["-DJSON_BuildTests=OFF"],
151        custom_post_install=[
152            (
153                f"ln -s {prefix}/include/nlohmann/json.hpp"
154                f" {prefix}/include/json.hpp"
155            ),
156        ],
157    ),
158    "json-c/json-c": PackageDef(
159        rev="json-c-0.17-20230812",
160        build_type="cmake",
161    ),
162    "LibVNC/libvncserver": PackageDef(
163        rev="LibVNCServer-0.9.14",
164        build_type="cmake",
165    ),
166    "leethomason/tinyxml2": PackageDef(
167        rev="9.0.0",
168        build_type="cmake",
169    ),
170    "tristanpenman/valijson": PackageDef(
171        rev="v1.0.1",
172        build_type="cmake",
173        config_flags=[
174            "-Dvalijson_BUILD_TESTS=0",
175            "-Dvalijson_INSTALL_HEADERS=1",
176        ],
177    ),
178    "open-power/pdbg": PackageDef(build_type="autoconf"),
179    "openbmc/gpioplus": PackageDef(
180        build_type="meson",
181        config_flags=[
182            "-Dexamples=false",
183            "-Dtests=disabled",
184        ],
185    ),
186    "openbmc/phosphor-dbus-interfaces": PackageDef(
187        depends=["openbmc/sdbusplus"],
188        build_type="meson",
189        config_flags=["-Dgenerate_md=false"],
190    ),
191    "openbmc/phosphor-logging": PackageDef(
192        depends=[
193            "USCiLab/cereal",
194            "openbmc/phosphor-dbus-interfaces",
195            "openbmc/sdbusplus",
196            "openbmc/sdeventplus",
197        ],
198        build_type="meson",
199        config_flags=[
200            "-Dlibonly=true",
201            "-Dtests=disabled",
202            f"-Dyamldir={prefix}/share/phosphor-dbus-yaml/yaml",
203        ],
204    ),
205    "openbmc/phosphor-objmgr": PackageDef(
206        depends=[
207            "CLIUtils/CLI11",
208            "boost",
209            "leethomason/tinyxml2",
210            "openbmc/phosphor-dbus-interfaces",
211            "openbmc/phosphor-logging",
212            "openbmc/sdbusplus",
213        ],
214        build_type="meson",
215        config_flags=[
216            "-Dtests=disabled",
217        ],
218    ),
219    "openbmc/libpeci": PackageDef(
220        build_type="meson",
221        config_flags=[
222            "-Draw-peci=disabled",
223        ],
224    ),
225    "openbmc/libpldm": PackageDef(
226        build_type="meson",
227        config_flags=[
228            "-Dabi=deprecated,stable",
229            "-Doem-ibm=enabled",
230            "-Dtests=disabled",
231        ],
232    ),
233    "openbmc/sdbusplus": PackageDef(
234        depends=[
235            "nlohmann/json",
236        ],
237        build_type="meson",
238        custom_post_dl=[
239            "cd tools",
240            f"./setup.py install --root=/ --prefix={prefix}",
241            "cd ..",
242        ],
243        config_flags=[
244            "-Dexamples=disabled",
245            "-Dtests=disabled",
246        ],
247    ),
248    "openbmc/sdeventplus": PackageDef(
249        depends=[
250            "openbmc/stdplus",
251        ],
252        build_type="meson",
253        config_flags=[
254            "-Dexamples=false",
255            "-Dtests=disabled",
256        ],
257    ),
258    "openbmc/stdplus": PackageDef(
259        depends=[
260            "fmtlib/fmt",
261            "google/googletest",
262            "Naios/function2",
263        ],
264        build_type="meson",
265        config_flags=[
266            "-Dexamples=false",
267            "-Dtests=disabled",
268            "-Dgtest=enabled",
269        ],
270    ),
271}  # type: Dict[str, PackageDef]
272
273# Define common flags used for builds
274configure_flags = " ".join(
275    [
276        f"--prefix={prefix}",
277    ]
278)
279cmake_flags = " ".join(
280    [
281        "-DBUILD_SHARED_LIBS=ON",
282        "-DCMAKE_BUILD_TYPE=RelWithDebInfo",
283        f"-DCMAKE_INSTALL_PREFIX:PATH={prefix}",
284        "-GNinja",
285        "-DCMAKE_MAKE_PROGRAM=ninja",
286    ]
287)
288meson_flags = " ".join(
289    [
290        "--wrap-mode=nodownload",
291        f"-Dprefix={prefix}",
292    ]
293)
294
295
296class Package(threading.Thread):
297    """Class used to build the Docker stages for each package.
298
299    Generally, this class should not be instantiated directly but through
300    Package.generate_all().
301    """
302
303    # Copy the packages dictionary.
304    packages = packages.copy()
305
306    # Lock used for thread-safety.
307    lock = threading.Lock()
308
309    def __init__(self, pkg: str):
310        """pkg - The name of this package (ex. foo/bar )"""
311        super(Package, self).__init__()
312
313        self.package = pkg
314        self.exception = None  # type: Optional[Exception]
315
316        # Reference to this package's
317        self.pkg_def = Package.packages[pkg]
318        self.pkg_def["__package"] = self
319
320    def run(self) -> None:
321        """Thread 'run' function.  Builds the Docker stage."""
322
323        # In case this package has no rev, fetch it from Github.
324        self._update_rev()
325
326        # Find all the Package objects that this package depends on.
327        #   This section is locked because we are looking into another
328        #   package's PackageDef dict, which could be being modified.
329        Package.lock.acquire()
330        deps: Iterable[Package] = [
331            Package.packages[deppkg]["__package"]
332            for deppkg in self.pkg_def.get("depends", [])
333        ]
334        Package.lock.release()
335
336        # Wait until all the depends finish building.  We need them complete
337        # for the "COPY" commands.
338        for deppkg in deps:
339            deppkg.join()
340
341        # Generate this package's Dockerfile.
342        dockerfile = f"""
343FROM {docker_base_img_name}
344{self._df_copycmds()}
345{self._df_build()}
346"""
347
348        # Generate the resulting tag name and save it to the PackageDef.
349        #   This section is locked because we are modifying the PackageDef,
350        #   which can be accessed by other threads.
351        Package.lock.acquire()
352        tag = Docker.tagname(self._stagename(), dockerfile)
353        self.pkg_def["__tag"] = tag
354        Package.lock.release()
355
356        # Do the build / save any exceptions.
357        try:
358            Docker.build(self.package, tag, dockerfile)
359        except Exception as e:
360            self.exception = e
361
362    @classmethod
363    def generate_all(cls) -> None:
364        """Ensure a Docker stage is created for all defined packages.
365
366        These are done in parallel but with appropriate blocking per
367        package 'depends' specifications.
368        """
369
370        # Create a Package for each defined package.
371        pkg_threads = [Package(p) for p in cls.packages.keys()]
372
373        # Start building them all.
374        #   This section is locked because threads depend on each other,
375        #   based on the packages, and they cannot 'join' on a thread
376        #   which is not yet started.  Adding a lock here allows all the
377        #   threads to start before they 'join' their dependencies.
378        Package.lock.acquire()
379        for t in pkg_threads:
380            t.start()
381        Package.lock.release()
382
383        # Wait for completion.
384        for t in pkg_threads:
385            t.join()
386            # Check if the thread saved off its own exception.
387            if t.exception:
388                print(f"Package {t.package} failed!", file=sys.stderr)
389                raise t.exception
390
391    @staticmethod
392    def df_all_copycmds() -> str:
393        """Formulate the Dockerfile snippet necessary to copy all packages
394        into the final image.
395        """
396        return Package.df_copycmds_set(Package.packages.keys())
397
398    @classmethod
399    def depcache(cls) -> str:
400        """Create the contents of the '/tmp/depcache'.
401        This file is a comma-separated list of "<pkg>:<rev>".
402        """
403
404        # This needs to be sorted for consistency.
405        depcache = ""
406        for pkg in sorted(cls.packages.keys()):
407            depcache += "%s:%s," % (pkg, cls.packages[pkg]["rev"])
408        return depcache
409
410    def _update_rev(self) -> None:
411        """Look up the HEAD for missing a static rev."""
412
413        if "rev" in self.pkg_def:
414            return
415
416        # Check if Jenkins/Gerrit gave us a revision and use it.
417        if gerrit_project == self.package and gerrit_rev:
418            print(
419                f"Found Gerrit revision for {self.package}: {gerrit_rev}",
420                file=sys.stderr,
421            )
422            self.pkg_def["rev"] = gerrit_rev
423            return
424
425        # Ask Github for all the branches.
426        lookup = git(
427            "ls-remote", "--heads", f"https://github.com/{self.package}"
428        )
429
430        # Find the branch matching {branch} (or fallback to master).
431        #   This section is locked because we are modifying the PackageDef.
432        Package.lock.acquire()
433        for line in lookup.split("\n"):
434            if re.fullmatch(f".*{branch}$", line.strip()):
435                self.pkg_def["rev"] = line.split()[0]
436                break
437            elif (
438                "refs/heads/master" in line or "refs/heads/main" in line
439            ) and "rev" not in self.pkg_def:
440                self.pkg_def["rev"] = line.split()[0]
441        Package.lock.release()
442
443    def _stagename(self) -> str:
444        """Create a name for the Docker stage associated with this pkg."""
445        return self.package.replace("/", "-").lower()
446
447    def _url(self) -> str:
448        """Get the URL for this package."""
449        rev = self.pkg_def["rev"]
450
451        # If the lambda exists, call it.
452        if "url" in self.pkg_def:
453            return self.pkg_def["url"](self.package, rev)
454
455        # Default to the github archive URL.
456        return f"https://github.com/{self.package}/archive/{rev}.tar.gz"
457
458    def _cmd_download(self) -> str:
459        """Formulate the command necessary to download and unpack to source."""
460
461        url = self._url()
462        if ".tar." not in url:
463            raise NotImplementedError(
464                f"Unhandled download type for {self.package}: {url}"
465            )
466
467        cmd = f"curl -L {url} | tar -x"
468
469        if url.endswith(".bz2"):
470            cmd += "j"
471        elif url.endswith(".gz"):
472            cmd += "z"
473        else:
474            raise NotImplementedError(
475                f"Unknown tar flags needed for {self.package}: {url}"
476            )
477
478        return cmd
479
480    def _cmd_cd_srcdir(self) -> str:
481        """Formulate the command necessary to 'cd' into the source dir."""
482        return f"cd {self.package.split('/')[-1]}*"
483
484    def _df_copycmds(self) -> str:
485        """Formulate the dockerfile snippet necessary to COPY all depends."""
486
487        if "depends" not in self.pkg_def:
488            return ""
489        return Package.df_copycmds_set(self.pkg_def["depends"])
490
491    @staticmethod
492    def df_copycmds_set(pkgs: Iterable[str]) -> str:
493        """Formulate the Dockerfile snippet necessary to COPY a set of
494        packages into a Docker stage.
495        """
496
497        copy_cmds = ""
498
499        # Sort the packages for consistency.
500        for p in sorted(pkgs):
501            tag = Package.packages[p]["__tag"]
502            copy_cmds += f"COPY --from={tag} {prefix} {prefix}\n"
503            # Workaround for upstream docker bug and multiple COPY cmds
504            # https://github.com/moby/moby/issues/37965
505            copy_cmds += "RUN true\n"
506
507        return copy_cmds
508
509    def _df_build(self) -> str:
510        """Formulate the Dockerfile snippet necessary to download, build, and
511        install a package into a Docker stage.
512        """
513
514        # Download and extract source.
515        result = f"RUN {self._cmd_download()} && {self._cmd_cd_srcdir()} && "
516
517        # Handle 'custom_post_dl' commands.
518        custom_post_dl = self.pkg_def.get("custom_post_dl")
519        if custom_post_dl:
520            result += " && ".join(custom_post_dl) + " && "
521
522        # Build and install package based on 'build_type'.
523        build_type = self.pkg_def["build_type"]
524        if build_type == "autoconf":
525            result += self._cmd_build_autoconf()
526        elif build_type == "cmake":
527            result += self._cmd_build_cmake()
528        elif build_type == "custom":
529            result += self._cmd_build_custom()
530        elif build_type == "make":
531            result += self._cmd_build_make()
532        elif build_type == "meson":
533            result += self._cmd_build_meson()
534        else:
535            raise NotImplementedError(
536                f"Unhandled build type for {self.package}: {build_type}"
537            )
538
539        # Handle 'custom_post_install' commands.
540        custom_post_install = self.pkg_def.get("custom_post_install")
541        if custom_post_install:
542            result += " && " + " && ".join(custom_post_install)
543
544        return result
545
546    def _cmd_build_autoconf(self) -> str:
547        options = " ".join(self.pkg_def.get("config_flags", []))
548        env = " ".join(self.pkg_def.get("config_env", []))
549        result = "./bootstrap.sh && "
550        result += f"{env} ./configure {configure_flags} {options} && "
551        result += f"make -j{proc_count} && make install"
552        return result
553
554    def _cmd_build_cmake(self) -> str:
555        options = " ".join(self.pkg_def.get("config_flags", []))
556        env = " ".join(self.pkg_def.get("config_env", []))
557        result = "mkdir builddir && cd builddir && "
558        result += f"{env} cmake {cmake_flags} {options} .. && "
559        result += "cmake --build . --target all && "
560        result += "cmake --build . --target install && "
561        result += "cd .."
562        return result
563
564    def _cmd_build_custom(self) -> str:
565        return " && ".join(self.pkg_def.get("build_steps", []))
566
567    def _cmd_build_make(self) -> str:
568        return f"make -j{proc_count} && make install"
569
570    def _cmd_build_meson(self) -> str:
571        options = " ".join(self.pkg_def.get("config_flags", []))
572        env = " ".join(self.pkg_def.get("config_env", []))
573        result = f"{env} meson setup builddir {meson_flags} {options} && "
574        result += "ninja -C builddir && ninja -C builddir install"
575        return result
576
577
578class Docker:
579    """Class to assist with Docker interactions.  All methods are static."""
580
581    @staticmethod
582    def timestamp() -> str:
583        """Generate a timestamp for today using the ISO week."""
584        today = date.today().isocalendar()
585        return f"{today[0]}-W{today[1]:02}"
586
587    @staticmethod
588    def tagname(pkgname: Optional[str], dockerfile: str) -> str:
589        """Generate a tag name for a package using a hash of the Dockerfile."""
590        result = docker_image_name
591        if pkgname:
592            result += "-" + pkgname
593
594        result += ":" + Docker.timestamp()
595        result += "-" + sha256(dockerfile.encode()).hexdigest()[0:16]
596
597        return result
598
599    @staticmethod
600    def build(pkg: str, tag: str, dockerfile: str) -> None:
601        """Build a docker image using the Dockerfile and tagging it with 'tag'."""
602
603        # If we're not forcing builds, check if it already exists and skip.
604        if not force_build:
605            if docker.image.ls(tag, "--format", '"{{.Repository}}:{{.Tag}}"'):
606                print(
607                    f"Image {tag} already exists.  Skipping.", file=sys.stderr
608                )
609                return
610
611        # Build it.
612        #   Capture the output of the 'docker build' command and send it to
613        #   stderr (prefixed with the package name).  This allows us to see
614        #   progress but not pollute stdout.  Later on we output the final
615        #   docker tag to stdout and we want to keep that pristine.
616        #
617        #   Other unusual flags:
618        #       --no-cache: Bypass the Docker cache if 'force_build'.
619        #       --force-rm: Clean up Docker processes if they fail.
620        docker.build(
621            proxy_args,
622            "--network=host",
623            "--force-rm",
624            "--no-cache=true" if force_build else "--no-cache=false",
625            "-t",
626            tag,
627            "-",
628            _in=dockerfile,
629            _out=(
630                lambda line: print(
631                    pkg + ":", line, end="", file=sys.stderr, flush=True
632                )
633            ),
634            _err_to_out=True,
635        )
636
637
638# Read a bunch of environment variables.
639docker_image_name = os.environ.get(
640    "DOCKER_IMAGE_NAME", "openbmc/ubuntu-unit-test"
641)
642force_build = os.environ.get("FORCE_DOCKER_BUILD")
643is_automated_ci_build = os.environ.get("BUILD_URL", False)
644distro = os.environ.get("DISTRO", "ubuntu:noble")
645branch = os.environ.get("BRANCH", "master")
646ubuntu_mirror = os.environ.get("UBUNTU_MIRROR")
647http_proxy = os.environ.get("http_proxy")
648
649gerrit_project = os.environ.get("GERRIT_PROJECT")
650gerrit_rev = os.environ.get("GERRIT_PATCHSET_REVISION")
651
652# Ensure appropriate docker build output to see progress and identify
653# any issues
654os.environ["BUILDKIT_PROGRESS"] = "plain"
655
656# Set up some common variables.
657username = os.environ.get("USER", "root")
658homedir = os.environ.get("HOME", "/root")
659gid = os.getgid()
660uid = os.getuid()
661
662# Use well-known constants if user is root
663if username == "root":
664    homedir = "/root"
665    gid = 0
666    uid = 0
667
668# Determine the architecture for Docker.
669arch = uname("-m").strip()
670if arch == "ppc64le":
671    docker_base = "ppc64le/"
672elif arch == "x86_64":
673    docker_base = ""
674elif arch == "aarch64":
675    docker_base = "arm64v8/"
676else:
677    print(
678        f"Unsupported system architecture({arch}) found for docker image",
679        file=sys.stderr,
680    )
681    sys.exit(1)
682
683# Special flags if setting up a deb mirror.
684mirror = ""
685if "ubuntu" in distro and ubuntu_mirror:
686    mirror = f"""
687RUN echo "deb {ubuntu_mirror} \
688        $(. /etc/os-release && echo $VERSION_CODENAME) \
689        main restricted universe multiverse" > /etc/apt/sources.list && \\
690    echo "deb {ubuntu_mirror} \
691        $(. /etc/os-release && echo $VERSION_CODENAME)-updates \
692            main restricted universe multiverse" >> /etc/apt/sources.list && \\
693    echo "deb {ubuntu_mirror} \
694        $(. /etc/os-release && echo $VERSION_CODENAME)-security \
695            main restricted universe multiverse" >> /etc/apt/sources.list && \\
696    echo "deb {ubuntu_mirror} \
697        $(. /etc/os-release && echo $VERSION_CODENAME)-proposed \
698            main restricted universe multiverse" >> /etc/apt/sources.list && \\
699    echo "deb {ubuntu_mirror} \
700        $(. /etc/os-release && echo $VERSION_CODENAME)-backports \
701            main restricted universe multiverse" >> /etc/apt/sources.list
702"""
703
704# Special flags for proxying.
705proxy_cmd = ""
706proxy_keyserver = ""
707proxy_args = []
708if http_proxy:
709    proxy_cmd = f"""
710RUN echo "[http]" >> {homedir}/.gitconfig && \
711    echo "proxy = {http_proxy}" >> {homedir}/.gitconfig
712"""
713    proxy_keyserver = f"--keyserver-options http-proxy={http_proxy}"
714
715    proxy_args.extend(
716        [
717            "--build-arg",
718            f"http_proxy={http_proxy}",
719            "--build-arg",
720            f"https_proxy={http_proxy}",
721        ]
722    )
723
724# Create base Dockerfile.
725dockerfile_base = f"""
726FROM {docker_base}{distro}
727
728{mirror}
729
730ENV DEBIAN_FRONTEND noninteractive
731
732ENV PYTHONPATH "/usr/local/lib/python3.10/site-packages/"
733
734# Sometimes the ubuntu key expires and we need a way to force an execution
735# of the apt-get commands for the dbgsym-keyring.  When this happens we see
736# an error like: "Release: The following signatures were invalid:"
737# Insert a bogus echo that we can change here when we get this error to force
738# the update.
739RUN echo "ubuntu keyserver rev as of 2021-04-21"
740
741# We need the keys to be imported for dbgsym repos
742# New releases have a package, older ones fall back to manual fetching
743# https://wiki.ubuntu.com/Debug%20Symbol%20Packages
744# Known issue with gpg to get keys via proxy -
745# https://bugs.launchpad.net/ubuntu/+source/gnupg2/+bug/1788190, hence using
746# curl to get keys.
747RUN apt-get update && apt-get dist-upgrade -yy && \
748    ( apt-get install -yy gpgv ubuntu-dbgsym-keyring || \
749        ( apt-get install -yy dirmngr curl && \
750          curl -sSL \
751          'https://keyserver.ubuntu.com/pks/lookup?op=get&search=0xF2EDC64DC5AEE1F6B9C621F0C8CAB6595FDFF622' \
752          | apt-key add - ))
753
754# Parse the current repo list into a debug repo list
755RUN sed -n '/^deb /s,^deb [^ ]* ,deb http://ddebs.ubuntu.com ,p' \
756        /etc/apt/sources.list >/etc/apt/sources.list.d/debug.list
757
758# Remove non-existent debug repos
759RUN sed -i '/-\\(backports\\|security\\) /d' /etc/apt/sources.list.d/debug.list
760
761RUN cat /etc/apt/sources.list.d/debug.list
762
763RUN apt-get update && apt-get dist-upgrade -yy && apt-get install -yy \
764    abi-compliance-checker \
765    abi-dumper \
766    autoconf \
767    autoconf-archive \
768    bison \
769    cmake \
770    curl \
771    dbus \
772    device-tree-compiler \
773    flex \
774    g++-13 \
775    gcc-13 \
776    git \
777    gnupg \
778    iproute2 \
779    iputils-ping \
780    libaudit-dev \
781    libc6-dbg \
782    libc6-dev \
783    libconfig++-dev \
784    libcryptsetup-dev \
785    libdbus-1-dev \
786    libevdev-dev \
787    libgpiod-dev \
788    libi2c-dev \
789    libjpeg-dev \
790    libjson-perl \
791    libldap2-dev \
792    libmimetic-dev \
793    libnl-3-dev \
794    libnl-genl-3-dev \
795    libpam0g-dev \
796    libpciaccess-dev \
797    libperlio-gzip-perl \
798    libpng-dev \
799    libprotobuf-dev \
800    libsnmp-dev \
801    libssl-dev \
802    libsystemd-dev \
803    libtool \
804    liburing-dev \
805    libxml2-utils \
806    libxml-simple-perl \
807    lsb-release \
808    ninja-build \
809    npm \
810    pkg-config \
811    protobuf-compiler \
812    python3 \
813    python3-dev\
814    python3-git \
815    python3-mako \
816    python3-pip \
817    python3-protobuf \
818    python3-setuptools \
819    python3-socks \
820    python3-yaml \
821    rsync \
822    shellcheck \
823    socat \
824    software-properties-common \
825    sudo \
826    systemd \
827    valgrind \
828    vim \
829    wget \
830    xxd
831
832RUN update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-13 13 \
833  --slave /usr/bin/g++ g++ /usr/bin/g++-13 \
834  --slave /usr/bin/gcov gcov /usr/bin/gcov-13 \
835  --slave /usr/bin/gcov-dump gcov-dump /usr/bin/gcov-dump-13 \
836  --slave /usr/bin/gcov-tool gcov-tool /usr/bin/gcov-tool-13
837RUN update-alternatives --remove cpp /usr/bin/cpp && \
838    update-alternatives --install /usr/bin/cpp cpp /usr/bin/cpp-13 13
839
840# Set up LLVM apt repository.
841RUN bash -c "$(wget -O - https://apt.llvm.org/llvm.sh)" 18
842
843# Install extra clang tools
844RUN apt-get install \
845        clang-18 \
846        clang-format-18 \
847        clang-tidy-18
848
849RUN update-alternatives --install /usr/bin/clang clang /usr/bin/clang-18 1000 \
850  --slave /usr/bin/clang++ clang++ /usr/bin/clang++-18 \
851  --slave /usr/bin/clang-tidy clang-tidy /usr/bin/clang-tidy-18 \
852  --slave /usr/bin/clang-format clang-format /usr/bin/clang-format-18 \
853  --slave /usr/bin/run-clang-tidy run-clang-tidy.py \
854        /usr/bin/run-clang-tidy-18 \
855  --slave /usr/bin/scan-build scan-build /usr/bin/scan-build-18
856
857"""
858
859if is_automated_ci_build:
860    dockerfile_base += f"""
861# Run an arbitrary command to pollute the docker cache regularly force us
862# to re-run `apt-get update` daily.
863RUN echo {Docker.timestamp()}
864RUN apt-get update && apt-get dist-upgrade -yy
865
866"""
867
868dockerfile_base += """
869RUN pip3 install --break-system-packages \
870        beautysh \
871        black \
872        codespell \
873        flake8 \
874        gcovr \
875        gitlint \
876        inflection \
877        isort \
878        jsonschema \
879        meson==1.3.0 \
880        requests
881
882RUN npm install -g \
883        eslint@v8.56.0 eslint-plugin-json@v3.1.0 \
884        markdownlint-cli@latest \
885        prettier@latest
886"""
887
888# Build the base and stage docker images.
889docker_base_img_name = Docker.tagname("base", dockerfile_base)
890Docker.build("base", docker_base_img_name, dockerfile_base)
891Package.generate_all()
892
893# Create the final Dockerfile.
894dockerfile = f"""
895# Build the final output image
896FROM {docker_base_img_name}
897{Package.df_all_copycmds()}
898
899# Some of our infrastructure still relies on the presence of this file
900# even though it is no longer needed to rebuild the docker environment
901# NOTE: The file is sorted to ensure the ordering is stable.
902RUN echo '{Package.depcache()}' > /tmp/depcache
903
904# Ensure the group, user, and home directory are created (or rename them if
905# they already exist).
906RUN if grep -q ":{gid}:" /etc/group ; then \
907        groupmod -n {username} $(awk -F : '{{ if ($3 == {gid}) {{ print $1 }} }}' /etc/group) ; \
908    else \
909        groupadd -f -g {gid} {username} ; \
910    fi
911RUN mkdir -p "{os.path.dirname(homedir)}"
912RUN if grep -q ":{uid}:" /etc/passwd ; then \
913        usermod -l {username} -d {homedir} -m $(awk -F : '{{ if ($3 == {uid}) {{ print $1 }} }}' /etc/passwd) ; \
914    else \
915        useradd -d {homedir} -m -u {uid} -g {gid} {username} ; \
916    fi
917RUN sed -i '1iDefaults umask=000' /etc/sudoers
918RUN echo "{username} ALL=(ALL) NOPASSWD: ALL" >>/etc/sudoers
919
920# Ensure user has ability to write to /usr/local for different tool
921# and data installs
922RUN chown -R {username}:{username} /usr/local/share
923
924# Update library cache
925RUN ldconfig
926
927{proxy_cmd}
928
929RUN /bin/bash
930"""
931
932# Do the final docker build
933docker_final_img_name = Docker.tagname(None, dockerfile)
934Docker.build("final", docker_final_img_name, dockerfile)
935
936# Print the tag of the final image.
937print(docker_final_img_name)
938