1#!/usr/bin/env python3
2#
3# Build the required docker image to run package unit tests
4#
5# Script Variables:
6#   DOCKER_IMG_NAME:  <optional, the name of the docker image to generate>
7#                     default is openbmc/ubuntu-unit-test
8#   DISTRO:           <optional, the distro to build a docker image against>
9#   FORCE_DOCKER_BUILD: <optional, a non-zero value with force all Docker
10#                     images to be rebuilt rather than reusing caches.>
11#   BUILD_URL:        <optional, used to detect running under CI context
12#                     (ex. Jenkins)>
13#   BRANCH:           <optional, branch to build from each of the openbmc/
14#                     repositories>
15#                     default is master, which will be used if input branch not
16#                     provided or not found
17#   UBUNTU_MIRROR:    <optional, the URL of a mirror of Ubuntu to override the
18#                     default ones in /etc/apt/sources.list>
19#                     default is empty, and no mirror is used.
20#   http_proxy        The HTTP address of the proxy server to connect to.
21#                     Default: "", proxy is not setup if this is not set
22
23import os
24import sys
25import threading
26from datetime import date
27from hashlib import sha256
28from sh import docker, git, nproc, uname  # type: ignore
29from typing import Any, Callable, Dict, Iterable, Optional
30
31try:
32    # Python before 3.8 doesn't have TypedDict, so reroute to standard 'dict'.
33    from typing import TypedDict
34except:
35
36    class TypedDict(dict):  # type: ignore
37        # We need to do this to eat the 'total' argument.
38        def __init_subclass__(cls, **kwargs):
39            super().__init_subclass__()
40
41
42# Declare some variables used in package definitions.
43prefix = "/usr/local"
44proc_count = nproc().strip()
45
46
47class PackageDef(TypedDict, total=False):
48    """ Package Definition for packages dictionary. """
49
50    # rev [optional]: Revision of package to use.
51    rev: str
52    # url [optional]: lambda function to create URL: (package, rev) -> url.
53    url: Callable[[str, str], str]
54    # depends [optional]: List of package dependencies.
55    depends: Iterable[str]
56    # build_type [required]: Build type used for package.
57    #   Currently supported: autoconf, cmake, custom, make, meson
58    build_type: str
59    # build_steps [optional]: Steps to run for 'custom' build_type.
60    build_steps: Iterable[str]
61    # config_flags [optional]: List of options to pass configuration tool.
62    config_flags: Iterable[str]
63    # config_env [optional]: List of environment variables to set for config.
64    config_env: Iterable[str]
65    # custom_post_dl [optional]: List of steps to run after download, but
66    #   before config / build / install.
67    custom_post_dl: Iterable[str]
68    # custom_post_install [optional]: List of steps to run after install.
69    custom_post_install: Iterable[str]
70
71    # __tag [private]: Generated Docker tag name for package stage.
72    __tag: str
73    # __package [private]: Package object associated with this package.
74    __package: Any  # Type is Package, but not defined yet.
75
76
77# Packages to include in image.
78packages = {
79    # Install OpenSSL 3.x.
80    # Generally we want to rely on the version of OpenSSL from the OS, but v3.x
81    # was a major API change.  It is included in Yocto but not Ubuntu until
82    # 22.04.  Install it manually so that our CI can target the OpenSSL 3.x
83    # APIs.
84    "openssl/openssl": PackageDef(
85        rev="openssl-3.0.1",
86        build_type="custom",
87        build_steps=[
88            f"./Configure --prefix={prefix} --libdir=lib",
89            f"make -j{proc_count}",
90            f"make -j{proc_count} install"
91        ],
92    ),
93    "boost": PackageDef(
94        rev="1.78.0",
95        url=(
96            lambda pkg, rev: f"https://downloads.yoctoproject.org/mirror/sources/{pkg}_{rev.replace('.', '_')}.tar.bz2"
97        ),
98        build_type="custom",
99        build_steps=[
100            f"./bootstrap.sh --prefix={prefix} --with-libraries=context,coroutine",
101            "./b2",
102            f"./b2 install --prefix={prefix}",
103        ],
104    ),
105    "USCiLab/cereal": PackageDef(
106        rev="3e4d1b84cab4891368d2179a61a7ba06a5693e7f",
107        build_type="custom",
108        build_steps=[f"cp -a include/cereal/ {prefix}/include/"],
109    ),
110    "catchorg/Catch2": PackageDef(
111        rev="v2.13.6",
112        build_type="cmake",
113        config_flags=["-DBUILD_TESTING=OFF", "-DCATCH_INSTALL_DOCS=OFF"],
114    ),
115    "CLIUtils/CLI11": PackageDef(
116        rev="v1.9.1",
117        build_type="cmake",
118        config_flags=[
119            "-DBUILD_TESTING=OFF",
120            "-DCLI11_BUILD_DOCS=OFF",
121            "-DCLI11_BUILD_EXAMPLES=OFF",
122        ],
123    ),
124    "fmtlib/fmt": PackageDef(
125        rev="8.1.1",
126        build_type="cmake",
127        config_flags=[
128            "-DFMT_DOC=OFF",
129            "-DFMT_TEST=OFF",
130        ],
131    ),
132    "Naios/function2": PackageDef(
133        rev="4.1.0",
134        build_type="custom",
135        build_steps=[
136            f"mkdir {prefix}/include/function2",
137            f"cp include/function2/function2.hpp {prefix}/include/function2/",
138        ],
139    ),
140    # Release 2021-06-12
141    "google/googletest": PackageDef(
142        rev="9e712372214d75bb30ec2847a44bf124d48096f3",
143        build_type="cmake",
144        config_env=["CXXFLAGS=-std=c++20"],
145        config_flags=["-DTHREADS_PREFER_PTHREAD_FLAG=ON"],
146    ),
147    # Release 2020-08-06
148    "nlohmann/json": PackageDef(
149        rev="v3.10.4",
150        build_type="cmake",
151        config_flags=["-DJSON_BuildTests=OFF"],
152        custom_post_install=[
153            f"ln -s {prefix}/include/nlohmann/json.hpp {prefix}/include/json.hpp",
154        ],
155    ),
156    # Snapshot from 2019-05-24
157    "linux-test-project/lcov": PackageDef(
158        rev="v1.15",
159        build_type="make",
160    ),
161    # dev-5.8 2021-01-11
162    "openbmc/linux": PackageDef(
163        rev="3cc95ae40716e56f81b69615781f54c78079042d",
164        build_type="custom",
165        build_steps=[
166            f"make -j{proc_count} defconfig",
167            f"make INSTALL_HDR_PATH={prefix} headers_install",
168        ],
169    ),
170    # Snapshot from 2020-06-13
171    "LibVNC/libvncserver": PackageDef(
172        rev="LibVNCServer-0.9.13",
173        build_type="cmake",
174    ),
175    "martinmoene/span-lite": PackageDef(
176        rev="v0.9.2",
177        build_type="cmake",
178        config_flags=[
179            "-DSPAN_LITE_OPT_BUILD_TESTS=OFF",
180        ],
181    ),
182    # version from meta-openembedded/meta-oe/recipes-support/libtinyxml2/libtinyxml2_8.0.0.bb
183    "leethomason/tinyxml2": PackageDef(
184        rev="8.0.0",
185        build_type="cmake",
186    ),
187    # version from /meta-openembedded/meta-oe/recipes-devtools/boost-url/boost-url_git.bb
188    "CPPAlliance/url": PackageDef(
189        rev="d740a92d38e3a8f4d5b2153f53b82f1c98e312ab",
190        build_type="custom",
191        build_steps=[f"cp -a include/** {prefix}/include/"],
192    ),
193    # version from meta-openembedded/meta-oe/dynamic-layers/networking-layer/recipes-devools/valijson/valijson_0.6.bb
194    "tristanpenman/valijson": PackageDef(
195        rev="v0.6",
196        build_type="cmake",
197        config_flags=[
198            "-Dvalijson_BUILD_TESTS=0",
199            "-Dvalijson_INSTALL_HEADERS=1",
200        ],
201    ),
202    # version from meta-openembedded/meta-oe/recipes-devtools/nlohmann-fifo/nlohmann-fifo_git.bb
203    "nlohmann/fifo_map": PackageDef(
204        rev="0dfbf5dacbb15a32c43f912a7e66a54aae39d0f9",
205        build_type="custom",
206        build_steps=[f"cp src/fifo_map.hpp {prefix}/include/"],
207    ),
208    # version from meta-openembedded/meta-oe/recipes-devtools/unifex/unifex_git.bb
209    "facebookexperimental/libunifex": PackageDef(
210        rev="9df21c58d34ce8a1cd3b15c3a7347495e29417a0",
211        build_type="cmake",
212        config_flags=[
213            "-DBUILD_SHARED_LIBS=ON",
214            "-DBUILD_TESTING=OFF",
215            "-DCMAKE_CXX_STANDARD=20",
216            "-DUNIFEX_BUILD_EXAMPLES=OFF",
217        ],
218    ),
219    "open-power/pdbg": PackageDef(build_type="autoconf"),
220    "openbmc/gpioplus": PackageDef(
221        depends=["openbmc/stdplus"],
222        build_type="meson",
223        config_flags=[
224            "-Dexamples=false",
225            "-Dtests=disabled",
226        ],
227    ),
228    "openbmc/phosphor-dbus-interfaces": PackageDef(
229        depends=["openbmc/sdbusplus"],
230        build_type="meson",
231    ),
232    "openbmc/phosphor-logging": PackageDef(
233        depends=[
234            "USCiLab/cereal",
235            "nlohmann/fifo_map",
236            "openbmc/phosphor-dbus-interfaces",
237            "openbmc/sdbusplus",
238            "openbmc/sdeventplus",
239        ],
240        build_type="meson",
241        config_flags=[
242            f"-Dyamldir={prefix}/share/phosphor-dbus-yaml/yaml",
243        ],
244    ),
245    "openbmc/phosphor-objmgr": PackageDef(
246        depends=[
247            "boost",
248            "leethomason/tinyxml2",
249            "openbmc/phosphor-logging",
250            "openbmc/sdbusplus",
251        ],
252        build_type="meson",
253        config_flags=[
254            "-Dtests=disabled",
255        ],
256    ),
257    "openbmc/pldm": PackageDef(
258        depends=[
259            "CLIUtils/CLI11",
260            "boost",
261            "nlohmann/json",
262            "openbmc/phosphor-dbus-interfaces",
263            "openbmc/phosphor-logging",
264            "openbmc/sdbusplus",
265            "openbmc/sdeventplus",
266        ],
267        build_type="meson",
268        config_flags=[
269            "-Dlibpldm-only=enabled",
270            "-Doem-ibm=enabled",
271            "-Dtests=disabled",
272        ],
273    ),
274    "openbmc/sdbusplus": PackageDef(
275        depends=[
276            "facebookexperimental/libunifex",
277        ],
278        build_type="meson",
279        custom_post_dl=[
280            "cd tools",
281            f"./setup.py install --root=/ --prefix={prefix}",
282            "cd ..",
283        ],
284        config_flags=[
285            "-Dexamples=disabled",
286            "-Dtests=disabled",
287        ],
288    ),
289    "openbmc/sdeventplus": PackageDef(
290        depends=["Naios/function2", "openbmc/stdplus"],
291        build_type="meson",
292        config_flags=[
293            "-Dexamples=false",
294            "-Dtests=disabled",
295        ],
296    ),
297    "openbmc/stdplus": PackageDef(
298        depends=["fmtlib/fmt", "martinmoene/span-lite"],
299        build_type="meson",
300        config_flags=[
301            "-Dexamples=false",
302            "-Dtests=disabled",
303        ],
304    ),
305}  # type: Dict[str, PackageDef]
306
307# Define common flags used for builds
308configure_flags = " ".join(
309    [
310        f"--prefix={prefix}",
311    ]
312)
313cmake_flags = " ".join(
314    [
315        "-DBUILD_SHARED_LIBS=ON",
316        "-DCMAKE_BUILD_TYPE=RelWithDebInfo",
317        f"-DCMAKE_INSTALL_PREFIX:PATH={prefix}",
318        "-GNinja",
319        "-DCMAKE_MAKE_PROGRAM=ninja",
320    ]
321)
322meson_flags = " ".join(
323    [
324        "--wrap-mode=nodownload",
325        f"-Dprefix={prefix}",
326    ]
327)
328
329
330class Package(threading.Thread):
331    """Class used to build the Docker stages for each package.
332
333    Generally, this class should not be instantiated directly but through
334    Package.generate_all().
335    """
336
337    # Copy the packages dictionary.
338    packages = packages.copy()
339
340    # Lock used for thread-safety.
341    lock = threading.Lock()
342
343    def __init__(self, pkg: str):
344        """ pkg - The name of this package (ex. foo/bar ) """
345        super(Package, self).__init__()
346
347        self.package = pkg
348        self.exception = None  # type: Optional[Exception]
349
350        # Reference to this package's
351        self.pkg_def = Package.packages[pkg]
352        self.pkg_def["__package"] = self
353
354    def run(self) -> None:
355        """ Thread 'run' function.  Builds the Docker stage. """
356
357        # In case this package has no rev, fetch it from Github.
358        self._update_rev()
359
360        # Find all the Package objects that this package depends on.
361        #   This section is locked because we are looking into another
362        #   package's PackageDef dict, which could be being modified.
363        Package.lock.acquire()
364        deps: Iterable[Package] = [
365            Package.packages[deppkg]["__package"]
366            for deppkg in self.pkg_def.get("depends", [])
367        ]
368        Package.lock.release()
369
370        # Wait until all the depends finish building.  We need them complete
371        # for the "COPY" commands.
372        for deppkg in deps:
373            deppkg.join()
374
375        # Generate this package's Dockerfile.
376        dockerfile = f"""
377FROM {docker_base_img_name}
378{self._df_copycmds()}
379{self._df_build()}
380"""
381
382        # Generate the resulting tag name and save it to the PackageDef.
383        #   This section is locked because we are modifying the PackageDef,
384        #   which can be accessed by other threads.
385        Package.lock.acquire()
386        tag = Docker.tagname(self._stagename(), dockerfile)
387        self.pkg_def["__tag"] = tag
388        Package.lock.release()
389
390        # Do the build / save any exceptions.
391        try:
392            Docker.build(self.package, tag, dockerfile)
393        except Exception as e:
394            self.exception = e
395
396    @classmethod
397    def generate_all(cls) -> None:
398        """Ensure a Docker stage is created for all defined packages.
399
400        These are done in parallel but with appropriate blocking per
401        package 'depends' specifications.
402        """
403
404        # Create a Package for each defined package.
405        pkg_threads = [Package(p) for p in cls.packages.keys()]
406
407        # Start building them all.
408        #   This section is locked because threads depend on each other,
409        #   based on the packages, and they cannot 'join' on a thread
410        #   which is not yet started.  Adding a lock here allows all the
411        #   threads to start before they 'join' their dependencies.
412        Package.lock.acquire()
413        for t in pkg_threads:
414            t.start()
415        Package.lock.release()
416
417        # Wait for completion.
418        for t in pkg_threads:
419            t.join()
420            # Check if the thread saved off its own exception.
421            if t.exception:
422                print(f"Package {t.package} failed!", file=sys.stderr)
423                raise t.exception
424
425    @staticmethod
426    def df_all_copycmds() -> str:
427        """Formulate the Dockerfile snippet necessary to copy all packages
428        into the final image.
429        """
430        return Package.df_copycmds_set(Package.packages.keys())
431
432    @classmethod
433    def depcache(cls) -> str:
434        """Create the contents of the '/tmp/depcache'.
435        This file is a comma-separated list of "<pkg>:<rev>".
436        """
437
438        # This needs to be sorted for consistency.
439        depcache = ""
440        for pkg in sorted(cls.packages.keys()):
441            depcache += "%s:%s," % (pkg, cls.packages[pkg]["rev"])
442        return depcache
443
444    def _update_rev(self) -> None:
445        """ Look up the HEAD for missing a static rev. """
446
447        if "rev" in self.pkg_def:
448            return
449
450        # Check if Jenkins/Gerrit gave us a revision and use it.
451        if gerrit_project == self.package and gerrit_rev:
452            print(
453                f"Found Gerrit revision for {self.package}: {gerrit_rev}",
454                file=sys.stderr,
455            )
456            self.pkg_def["rev"] = gerrit_rev
457            return
458
459        # Ask Github for all the branches.
460        lookup = git("ls-remote", "--heads", f"https://github.com/{self.package}")
461
462        # Find the branch matching {branch} (or fallback to master).
463        #   This section is locked because we are modifying the PackageDef.
464        Package.lock.acquire()
465        for line in lookup.split("\n"):
466            if f"refs/heads/{branch}" in line:
467                self.pkg_def["rev"] = line.split()[0]
468            elif f"refs/heads/master" in line and "rev" not in self.pkg_def:
469                self.pkg_def["rev"] = line.split()[0]
470        Package.lock.release()
471
472    def _stagename(self) -> str:
473        """ Create a name for the Docker stage associated with this pkg. """
474        return self.package.replace("/", "-").lower()
475
476    def _url(self) -> str:
477        """ Get the URL for this package. """
478        rev = self.pkg_def["rev"]
479
480        # If the lambda exists, call it.
481        if "url" in self.pkg_def:
482            return self.pkg_def["url"](self.package, rev)
483
484        # Default to the github archive URL.
485        return f"https://github.com/{self.package}/archive/{rev}.tar.gz"
486
487    def _cmd_download(self) -> str:
488        """Formulate the command necessary to download and unpack to source."""
489
490        url = self._url()
491        if ".tar." not in url:
492            raise NotImplementedError(
493                f"Unhandled download type for {self.package}: {url}"
494            )
495
496        cmd = f"curl -L {url} | tar -x"
497
498        if url.endswith(".bz2"):
499            cmd += "j"
500        elif url.endswith(".gz"):
501            cmd += "z"
502        else:
503            raise NotImplementedError(
504                f"Unknown tar flags needed for {self.package}: {url}"
505            )
506
507        return cmd
508
509    def _cmd_cd_srcdir(self) -> str:
510        """ Formulate the command necessary to 'cd' into the source dir. """
511        return f"cd {self.package.split('/')[-1]}*"
512
513    def _df_copycmds(self) -> str:
514        """ Formulate the dockerfile snippet necessary to COPY all depends. """
515
516        if "depends" not in self.pkg_def:
517            return ""
518        return Package.df_copycmds_set(self.pkg_def["depends"])
519
520    @staticmethod
521    def df_copycmds_set(pkgs: Iterable[str]) -> str:
522        """Formulate the Dockerfile snippet necessary to COPY a set of
523        packages into a Docker stage.
524        """
525
526        copy_cmds = ""
527
528        # Sort the packages for consistency.
529        for p in sorted(pkgs):
530            tag = Package.packages[p]["__tag"]
531            copy_cmds += f"COPY --from={tag} {prefix} {prefix}\n"
532            # Workaround for upstream docker bug and multiple COPY cmds
533            # https://github.com/moby/moby/issues/37965
534            copy_cmds += "RUN true\n"
535
536        return copy_cmds
537
538    def _df_build(self) -> str:
539        """Formulate the Dockerfile snippet necessary to download, build, and
540        install a package into a Docker stage.
541        """
542
543        # Download and extract source.
544        result = f"RUN {self._cmd_download()} && {self._cmd_cd_srcdir()} && "
545
546        # Handle 'custom_post_dl' commands.
547        custom_post_dl = self.pkg_def.get("custom_post_dl")
548        if custom_post_dl:
549            result += " && ".join(custom_post_dl) + " && "
550
551        # Build and install package based on 'build_type'.
552        build_type = self.pkg_def["build_type"]
553        if build_type == "autoconf":
554            result += self._cmd_build_autoconf()
555        elif build_type == "cmake":
556            result += self._cmd_build_cmake()
557        elif build_type == "custom":
558            result += self._cmd_build_custom()
559        elif build_type == "make":
560            result += self._cmd_build_make()
561        elif build_type == "meson":
562            result += self._cmd_build_meson()
563        else:
564            raise NotImplementedError(
565                f"Unhandled build type for {self.package}: {build_type}"
566            )
567
568        # Handle 'custom_post_install' commands.
569        custom_post_install = self.pkg_def.get("custom_post_install")
570        if custom_post_install:
571            result += " && " + " && ".join(custom_post_install)
572
573        return result
574
575    def _cmd_build_autoconf(self) -> str:
576        options = " ".join(self.pkg_def.get("config_flags", []))
577        env = " ".join(self.pkg_def.get("config_env", []))
578        result = "./bootstrap.sh && "
579        result += f"{env} ./configure {configure_flags} {options} && "
580        result += f"make -j{proc_count} && make install"
581        return result
582
583    def _cmd_build_cmake(self) -> str:
584        options = " ".join(self.pkg_def.get("config_flags", []))
585        env = " ".join(self.pkg_def.get("config_env", []))
586        result = "mkdir builddir && cd builddir && "
587        result += f"{env} cmake {cmake_flags} {options} .. && "
588        result += "cmake --build . --target all && "
589        result += "cmake --build . --target install && "
590        result += "cd .."
591        return result
592
593    def _cmd_build_custom(self) -> str:
594        return " && ".join(self.pkg_def.get("build_steps", []))
595
596    def _cmd_build_make(self) -> str:
597        return f"make -j{proc_count} && make install"
598
599    def _cmd_build_meson(self) -> str:
600        options = " ".join(self.pkg_def.get("config_flags", []))
601        env = " ".join(self.pkg_def.get("config_env", []))
602        result = f"{env} meson builddir {meson_flags} {options} && "
603        result += "ninja -C builddir && ninja -C builddir install"
604        return result
605
606
607class Docker:
608    """Class to assist with Docker interactions.  All methods are static."""
609
610    @staticmethod
611    def timestamp() -> str:
612        """ Generate a timestamp for today using the ISO week. """
613        today = date.today().isocalendar()
614        return f"{today[0]}-W{today[1]:02}"
615
616    @staticmethod
617    def tagname(pkgname: str, dockerfile: str) -> str:
618        """ Generate a tag name for a package using a hash of the Dockerfile. """
619        result = docker_image_name
620        if pkgname:
621            result += "-" + pkgname
622
623        result += ":" + Docker.timestamp()
624        result += "-" + sha256(dockerfile.encode()).hexdigest()[0:16]
625
626        return result
627
628    @staticmethod
629    def build(pkg: str, tag: str, dockerfile: str) -> None:
630        """Build a docker image using the Dockerfile and tagging it with 'tag'."""
631
632        # If we're not forcing builds, check if it already exists and skip.
633        if not force_build:
634            if docker.image.ls(tag, "--format", '"{{.Repository}}:{{.Tag}}"'):
635                print(f"Image {tag} already exists.  Skipping.", file=sys.stderr)
636                return
637
638        # Build it.
639        #   Capture the output of the 'docker build' command and send it to
640        #   stderr (prefixed with the package name).  This allows us to see
641        #   progress but not polute stdout.  Later on we output the final
642        #   docker tag to stdout and we want to keep that pristine.
643        #
644        #   Other unusual flags:
645        #       --no-cache: Bypass the Docker cache if 'force_build'.
646        #       --force-rm: Clean up Docker processes if they fail.
647        docker.build(
648            proxy_args,
649            "--network=host",
650            "--force-rm",
651            "--no-cache=true" if force_build else "--no-cache=false",
652            "-t",
653            tag,
654            "-",
655            _in=dockerfile,
656            _out=(
657                lambda line: print(
658                    pkg + ":", line, end="", file=sys.stderr, flush=True
659                )
660            ),
661        )
662
663
664# Read a bunch of environment variables.
665docker_image_name = os.environ.get("DOCKER_IMAGE_NAME", "openbmc/ubuntu-unit-test")
666force_build = os.environ.get("FORCE_DOCKER_BUILD")
667is_automated_ci_build = os.environ.get("BUILD_URL", False)
668distro = os.environ.get("DISTRO", "ubuntu:impish")
669branch = os.environ.get("BRANCH", "master")
670ubuntu_mirror = os.environ.get("UBUNTU_MIRROR")
671http_proxy = os.environ.get("http_proxy")
672
673gerrit_project = os.environ.get("GERRIT_PROJECT")
674gerrit_rev = os.environ.get("GERRIT_PATCHSET_REVISION")
675
676# Set up some common variables.
677username = os.environ.get("USER", "root")
678homedir = os.environ.get("HOME", "/root")
679gid = os.getgid()
680uid = os.getuid()
681
682# Use well-known constants if user is root
683if username == "root":
684    homedir = "/root"
685    gid = 0
686    uid = 0
687
688# Determine the architecture for Docker.
689arch = uname("-m").strip()
690if arch == "ppc64le":
691    docker_base = "ppc64le/"
692elif arch == "x86_64":
693    docker_base = ""
694elif arch == "aarch64":
695    docker_base = "arm64v8/"
696else:
697    print(
698        f"Unsupported system architecture({arch}) found for docker image",
699        file=sys.stderr,
700    )
701    sys.exit(1)
702
703# Special flags if setting up a deb mirror.
704mirror = ""
705if "ubuntu" in distro and ubuntu_mirror:
706    mirror = f"""
707RUN echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME) main restricted universe multiverse" > /etc/apt/sources.list && \\
708    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-updates main restricted universe multiverse" >> /etc/apt/sources.list && \\
709    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-security main restricted universe multiverse" >> /etc/apt/sources.list && \\
710    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-proposed main restricted universe multiverse" >> /etc/apt/sources.list && \\
711    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-backports main restricted universe multiverse" >> /etc/apt/sources.list
712"""
713
714# Special flags for proxying.
715proxy_cmd = ""
716proxy_keyserver = ""
717proxy_args = []
718if http_proxy:
719    proxy_cmd = f"""
720RUN echo "[http]" >> {homedir}/.gitconfig && \
721    echo "proxy = {http_proxy}" >> {homedir}/.gitconfig
722"""
723    proxy_keyserver = f"--keyserver-options http-proxy={http_proxy}"
724
725    proxy_args.extend(
726        [
727            "--build-arg",
728            f"http_proxy={http_proxy}",
729            "--build-arg",
730            f"https_proxy={http_proxy}",
731        ]
732    )
733
734# Create base Dockerfile.
735dockerfile_base = f"""
736FROM {docker_base}{distro}
737
738{mirror}
739
740ENV DEBIAN_FRONTEND noninteractive
741
742ENV PYTHONPATH "/usr/local/lib/python3.9/site-packages/"
743
744# Sometimes the ubuntu key expires and we need a way to force an execution
745# of the apt-get commands for the dbgsym-keyring.  When this happens we see
746# an error like: "Release: The following signatures were invalid:"
747# Insert a bogus echo that we can change here when we get this error to force
748# the update.
749RUN echo "ubuntu keyserver rev as of 2021-04-21"
750
751# We need the keys to be imported for dbgsym repos
752# New releases have a package, older ones fall back to manual fetching
753# https://wiki.ubuntu.com/Debug%20Symbol%20Packages
754RUN apt-get update && apt-get dist-upgrade -yy && \
755    ( apt-get install gpgv ubuntu-dbgsym-keyring || \
756        ( apt-get install -yy dirmngr && \
757          apt-key adv --keyserver keyserver.ubuntu.com \
758                      {proxy_keyserver} \
759                      --recv-keys F2EDC64DC5AEE1F6B9C621F0C8CAB6595FDFF622 ) )
760
761# Parse the current repo list into a debug repo list
762RUN sed -n '/^deb /s,^deb [^ ]* ,deb http://ddebs.ubuntu.com ,p' /etc/apt/sources.list >/etc/apt/sources.list.d/debug.list
763
764# Remove non-existent debug repos
765RUN sed -i '/-\(backports\|security\) /d' /etc/apt/sources.list.d/debug.list
766
767RUN cat /etc/apt/sources.list.d/debug.list
768
769RUN apt-get update && apt-get dist-upgrade -yy && apt-get install -yy \
770    gcc-11 \
771    g++-11 \
772    libc6-dbg \
773    libc6-dev \
774    libtool \
775    bison \
776    libdbus-1-dev \
777    flex \
778    cmake \
779    python3 \
780    python3-dev\
781    python3-yaml \
782    python3-mako \
783    python3-pip \
784    python3-setuptools \
785    python3-git \
786    python3-socks \
787    pkg-config \
788    autoconf \
789    autoconf-archive \
790    libsystemd-dev \
791    systemd \
792    libssl-dev \
793    libevdev-dev \
794    libjpeg-dev \
795    libpng-dev \
796    ninja-build \
797    sudo \
798    curl \
799    git \
800    dbus \
801    iputils-ping \
802    clang-13 \
803    clang-format-13 \
804    clang-tidy-13 \
805    clang-tools-13 \
806    shellcheck \
807    npm \
808    iproute2 \
809    libnl-3-dev \
810    libnl-genl-3-dev \
811    libconfig++-dev \
812    libsnmp-dev \
813    valgrind \
814    valgrind-dbg \
815    libpam0g-dev \
816    xxd \
817    libi2c-dev \
818    wget \
819    libldap2-dev \
820    libprotobuf-dev \
821    liburing-dev \
822    liburing1-dbgsym \
823    libperlio-gzip-perl \
824    libjson-perl \
825    protobuf-compiler \
826    libgpiod-dev \
827    device-tree-compiler \
828    cppcheck \
829    libpciaccess-dev \
830    libmimetic-dev \
831    libxml2-utils \
832    libxml-simple-perl \
833    rsync \
834    libcryptsetup-dev
835
836# Apply autoconf-archive-v2022.02.11 file ax_cxx_compile_stdcxx for C++20.
837RUN curl "http://git.savannah.gnu.org/gitweb/?p=autoconf-archive.git;a=blob_plain;f=m4/ax_cxx_compile_stdcxx.m4;hb=3311b6bdeff883c6a13952594a9dcb60bce6ba80" \
838  > /usr/share/aclocal/ax_cxx_compile_stdcxx.m4
839
840RUN npm install -g eslint@latest eslint-plugin-json@latest
841
842RUN update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-11 11 \
843  --slave /usr/bin/g++ g++ /usr/bin/g++-11 \
844  --slave /usr/bin/gcov gcov /usr/bin/gcov-11 \
845  --slave /usr/bin/gcov-dump gcov-dump /usr/bin/gcov-dump-11 \
846  --slave /usr/bin/gcov-tool gcov-tool /usr/bin/gcov-tool-11
847
848RUN update-alternatives --install /usr/bin/clang clang /usr/bin/clang-13 1000 \
849  --slave /usr/bin/clang++ clang++ /usr/bin/clang++-13 \
850  --slave /usr/bin/clang-tidy clang-tidy /usr/bin/clang-tidy-13 \
851  --slave /usr/bin/clang-format clang-format /usr/bin/clang-format-13 \
852  --slave /usr/bin/run-clang-tidy run-clang-tidy.py /usr/bin/run-clang-tidy-13 \
853  --slave /usr/bin/scan-build scan-build /usr/bin/scan-build-13
854
855"""
856
857if is_automated_ci_build:
858    dockerfile_base += f"""
859# Run an arbitrary command to polute the docker cache regularly force us
860# to re-run `apt-get update` daily.
861RUN echo {Docker.timestamp()}
862RUN apt-get update && apt-get dist-upgrade -yy
863
864"""
865
866dockerfile_base += f"""
867RUN pip3 install inflection
868RUN pip3 install pycodestyle
869RUN pip3 install jsonschema
870RUN pip3 install meson==0.61.3
871RUN pip3 install protobuf
872RUN pip3 install codespell
873RUN pip3 install requests
874"""
875
876# Build the base and stage docker images.
877docker_base_img_name = Docker.tagname("base", dockerfile_base)
878Docker.build("base", docker_base_img_name, dockerfile_base)
879Package.generate_all()
880
881# Create the final Dockerfile.
882dockerfile = f"""
883# Build the final output image
884FROM {docker_base_img_name}
885{Package.df_all_copycmds()}
886
887# Some of our infrastructure still relies on the presence of this file
888# even though it is no longer needed to rebuild the docker environment
889# NOTE: The file is sorted to ensure the ordering is stable.
890RUN echo '{Package.depcache()}' > /tmp/depcache
891
892# Final configuration for the workspace
893RUN grep -q {gid} /etc/group || groupadd -f -g {gid} {username}
894RUN mkdir -p "{os.path.dirname(homedir)}"
895RUN grep -q {uid} /etc/passwd || useradd -d {homedir} -m -u {uid} -g {gid} {username}
896RUN sed -i '1iDefaults umask=000' /etc/sudoers
897RUN echo "{username} ALL=(ALL) NOPASSWD: ALL" >>/etc/sudoers
898
899# Ensure user has ability to write to /usr/local for different tool
900# and data installs
901RUN chown -R {username}:{username} /usr/local/share
902
903{proxy_cmd}
904
905RUN /bin/bash
906"""
907
908# Do the final docker build
909docker_final_img_name = Docker.tagname(None, dockerfile)
910Docker.build("final", docker_final_img_name, dockerfile)
911
912# Print the tag of the final image.
913print(docker_final_img_name)
914