1#!/usr/bin/env python3
2#
3# Build the required docker image to run package unit tests
4#
5# Script Variables:
6#   DOCKER_IMG_NAME:  <optional, the name of the docker image to generate>
7#                     default is openbmc/ubuntu-unit-test
8#   DISTRO:           <optional, the distro to build a docker image against>
9#   FORCE_DOCKER_BUILD: <optional, a non-zero value with force all Docker
10#                     images to be rebuilt rather than reusing caches.>
11#   BUILD_URL:        <optional, used to detect running under CI context
12#                     (ex. Jenkins)>
13#   BRANCH:           <optional, branch to build from each of the openbmc/
14#                     repositories>
15#                     default is master, which will be used if input branch not
16#                     provided or not found
17#   UBUNTU_MIRROR:    <optional, the URL of a mirror of Ubuntu to override the
18#                     default ones in /etc/apt/sources.list>
19#                     default is empty, and no mirror is used.
20#   http_proxy        The HTTP address of the proxy server to connect to.
21#                     Default: "", proxy is not setup if this is not set
22
23import os
24import sys
25import threading
26from datetime import date
27from hashlib import sha256
28from sh import docker, git, nproc, uname  # type: ignore
29from typing import Any, Callable, Dict, Iterable, Optional
30
31try:
32    # Python before 3.8 doesn't have TypedDict, so reroute to standard 'dict'.
33    from typing import TypedDict
34except:
35
36    class TypedDict(dict):  # type: ignore
37        # We need to do this to eat the 'total' argument.
38        def __init_subclass__(cls, **kwargs):
39            super().__init_subclass__()
40
41
42# Declare some variables used in package definitions.
43prefix = "/usr/local"
44proc_count = nproc().strip()
45
46
47class PackageDef(TypedDict, total=False):
48    """ Package Definition for packages dictionary. """
49
50    # rev [optional]: Revision of package to use.
51    rev: str
52    # url [optional]: lambda function to create URL: (package, rev) -> url.
53    url: Callable[[str, str], str]
54    # depends [optional]: List of package dependencies.
55    depends: Iterable[str]
56    # build_type [required]: Build type used for package.
57    #   Currently supported: autoconf, cmake, custom, make, meson
58    build_type: str
59    # build_steps [optional]: Steps to run for 'custom' build_type.
60    build_steps: Iterable[str]
61    # config_flags [optional]: List of options to pass configuration tool.
62    config_flags: Iterable[str]
63    # config_env [optional]: List of environment variables to set for config.
64    config_env: Iterable[str]
65    # custom_post_dl [optional]: List of steps to run after download, but
66    #   before config / build / install.
67    custom_post_dl: Iterable[str]
68    # custom_post_install [optional]: List of steps to run after install.
69    custom_post_install: Iterable[str]
70
71    # __tag [private]: Generated Docker tag name for package stage.
72    __tag: str
73    # __package [private]: Package object associated with this package.
74    __package: Any  # Type is Package, but not defined yet.
75
76
77# Packages to include in image.
78packages = {
79    # Install OpenSSL 3.x.
80    # Generally we want to rely on the version of OpenSSL from the OS, but v3.x
81    # was a major API change.  It is included in Yocto but not Ubuntu until
82    # 22.04.  Install it manually so that our CI can target the OpenSSL 3.x
83    # APIs.
84    "openssl/openssl": PackageDef(
85        rev="openssl-3.0.1",
86        build_type="custom",
87        build_steps=[
88            f"./Configure --prefix={prefix} --libdir=lib",
89            f"make -j{proc_count}",
90            f"make -j{proc_count} install"
91        ],
92    ),
93    "boost": PackageDef(
94        rev="1.78.0",
95        url=(
96            lambda pkg, rev: f"https://downloads.yoctoproject.org/mirror/sources/{pkg}_{rev.replace('.', '_')}.tar.bz2"
97        ),
98        build_type="custom",
99        build_steps=[
100            f"./bootstrap.sh --prefix={prefix} --with-libraries=context,coroutine",
101            "./b2",
102            f"./b2 install --prefix={prefix}",
103        ],
104    ),
105    "USCiLab/cereal": PackageDef(
106        rev="3e4d1b84cab4891368d2179a61a7ba06a5693e7f",
107        build_type="custom",
108        build_steps=[f"cp -a include/cereal/ {prefix}/include/"],
109    ),
110    "catchorg/Catch2": PackageDef(
111        rev="v2.13.6",
112        build_type="cmake",
113        config_flags=["-DBUILD_TESTING=OFF", "-DCATCH_INSTALL_DOCS=OFF"],
114    ),
115    "CLIUtils/CLI11": PackageDef(
116        rev="v1.9.1",
117        build_type="cmake",
118        config_flags=[
119            "-DBUILD_TESTING=OFF",
120            "-DCLI11_BUILD_DOCS=OFF",
121            "-DCLI11_BUILD_EXAMPLES=OFF",
122        ],
123    ),
124    "fmtlib/fmt": PackageDef(
125        rev="8.1.1",
126        build_type="cmake",
127        config_flags=[
128            "-DFMT_DOC=OFF",
129            "-DFMT_TEST=OFF",
130        ],
131    ),
132    "Naios/function2": PackageDef(
133        rev="4.1.0",
134        build_type="custom",
135        build_steps=[
136            f"mkdir {prefix}/include/function2",
137            f"cp include/function2/function2.hpp {prefix}/include/function2/",
138        ],
139    ),
140    # Release 2021-06-12
141    "google/googletest": PackageDef(
142        rev="9e712372214d75bb30ec2847a44bf124d48096f3",
143        build_type="cmake",
144        config_env=["CXXFLAGS=-std=c++20"],
145        config_flags=["-DTHREADS_PREFER_PTHREAD_FLAG=ON"],
146    ),
147    # Release 2020-08-06
148    "nlohmann/json": PackageDef(
149        rev="v3.10.4",
150        build_type="cmake",
151        config_flags=["-DJSON_BuildTests=OFF"],
152        custom_post_install=[
153            f"ln -s {prefix}/include/nlohmann/json.hpp {prefix}/include/json.hpp",
154        ],
155    ),
156    # Snapshot from 2019-05-24
157    "linux-test-project/lcov": PackageDef(
158        rev="v1.15",
159        build_type="make",
160    ),
161    # dev-5.8 2021-01-11
162    "openbmc/linux": PackageDef(
163        rev="3cc95ae40716e56f81b69615781f54c78079042d",
164        build_type="custom",
165        build_steps=[
166            f"make -j{proc_count} defconfig",
167            f"make INSTALL_HDR_PATH={prefix} headers_install",
168        ],
169    ),
170    # Snapshot from 2020-06-13
171    "LibVNC/libvncserver": PackageDef(
172        rev="LibVNCServer-0.9.13",
173        build_type="cmake",
174    ),
175    "martinmoene/span-lite": PackageDef(
176        rev="v0.9.2",
177        build_type="cmake",
178        config_flags=[
179            "-DSPAN_LITE_OPT_BUILD_TESTS=OFF",
180        ],
181    ),
182    # version from meta-openembedded/meta-oe/recipes-support/libtinyxml2/libtinyxml2_8.0.0.bb
183    "leethomason/tinyxml2": PackageDef(
184        rev="8.0.0",
185        build_type="cmake",
186    ),
187    # version from /meta-openembedded/meta-oe/recipes-devtools/boost-url/boost-url_git.bb
188    "CPPAlliance/url": PackageDef(
189        rev="d740a92d38e3a8f4d5b2153f53b82f1c98e312ab",
190        build_type="custom",
191        build_steps=[f"cp -a include/** {prefix}/include/"],
192    ),
193    # version from meta-openembedded/meta-oe/dynamic-layers/networking-layer/recipes-devools/valijson/valijson_0.6.bb
194    "tristanpenman/valijson": PackageDef(
195        rev="v0.6",
196        build_type="cmake",
197        config_flags=[
198            "-Dvalijson_BUILD_TESTS=0",
199            "-Dvalijson_INSTALL_HEADERS=1",
200        ],
201    ),
202    # version from meta-openembedded/meta-oe/recipes-devtools/unifex/unifex_git.bb
203    "facebookexperimental/libunifex": PackageDef(
204        rev="9df21c58d34ce8a1cd3b15c3a7347495e29417a0",
205        build_type="cmake",
206        config_flags=[
207            "-DBUILD_SHARED_LIBS=ON",
208            "-DBUILD_TESTING=OFF",
209            "-DCMAKE_CXX_STANDARD=20",
210            "-DUNIFEX_BUILD_EXAMPLES=OFF",
211        ],
212    ),
213    "open-power/pdbg": PackageDef(build_type="autoconf"),
214    "openbmc/gpioplus": PackageDef(
215        depends=["openbmc/stdplus"],
216        build_type="meson",
217        config_flags=[
218            "-Dexamples=false",
219            "-Dtests=disabled",
220        ],
221    ),
222    "openbmc/phosphor-dbus-interfaces": PackageDef(
223        depends=["openbmc/sdbusplus"],
224        build_type="meson",
225    ),
226    "openbmc/phosphor-logging": PackageDef(
227        depends=[
228            "USCiLab/cereal",
229            "openbmc/phosphor-dbus-interfaces",
230            "openbmc/sdbusplus",
231            "openbmc/sdeventplus",
232        ],
233        build_type="meson",
234        config_flags=[
235            f"-Dyamldir={prefix}/share/phosphor-dbus-yaml/yaml",
236        ],
237    ),
238    "openbmc/phosphor-objmgr": PackageDef(
239        depends=[
240            "boost",
241            "leethomason/tinyxml2",
242            "openbmc/phosphor-logging",
243            "openbmc/sdbusplus",
244        ],
245        build_type="meson",
246        config_flags=[
247            "-Dtests=disabled",
248        ],
249    ),
250    "openbmc/pldm": PackageDef(
251        depends=[
252            "CLIUtils/CLI11",
253            "boost",
254            "nlohmann/json",
255            "openbmc/phosphor-dbus-interfaces",
256            "openbmc/phosphor-logging",
257            "openbmc/sdbusplus",
258            "openbmc/sdeventplus",
259        ],
260        build_type="meson",
261        config_flags=[
262            "-Dlibpldm-only=enabled",
263            "-Doem-ibm=enabled",
264            "-Dtests=disabled",
265        ],
266    ),
267    "openbmc/sdbusplus": PackageDef(
268        depends=[
269            "facebookexperimental/libunifex",
270        ],
271        build_type="meson",
272        custom_post_dl=[
273            "cd tools",
274            f"./setup.py install --root=/ --prefix={prefix}",
275            "cd ..",
276        ],
277        config_flags=[
278            "-Dexamples=disabled",
279            "-Dtests=disabled",
280        ],
281    ),
282    "openbmc/sdeventplus": PackageDef(
283        depends=["Naios/function2", "openbmc/stdplus"],
284        build_type="meson",
285        config_flags=[
286            "-Dexamples=false",
287            "-Dtests=disabled",
288        ],
289    ),
290    "openbmc/stdplus": PackageDef(
291        depends=["fmtlib/fmt", "martinmoene/span-lite"],
292        build_type="meson",
293        config_flags=[
294            "-Dexamples=false",
295            "-Dtests=disabled",
296        ],
297    ),
298}  # type: Dict[str, PackageDef]
299
300# Define common flags used for builds
301configure_flags = " ".join(
302    [
303        f"--prefix={prefix}",
304    ]
305)
306cmake_flags = " ".join(
307    [
308        "-DBUILD_SHARED_LIBS=ON",
309        "-DCMAKE_BUILD_TYPE=RelWithDebInfo",
310        f"-DCMAKE_INSTALL_PREFIX:PATH={prefix}",
311        "-GNinja",
312        "-DCMAKE_MAKE_PROGRAM=ninja",
313    ]
314)
315meson_flags = " ".join(
316    [
317        "--wrap-mode=nodownload",
318        f"-Dprefix={prefix}",
319    ]
320)
321
322
323class Package(threading.Thread):
324    """Class used to build the Docker stages for each package.
325
326    Generally, this class should not be instantiated directly but through
327    Package.generate_all().
328    """
329
330    # Copy the packages dictionary.
331    packages = packages.copy()
332
333    # Lock used for thread-safety.
334    lock = threading.Lock()
335
336    def __init__(self, pkg: str):
337        """ pkg - The name of this package (ex. foo/bar ) """
338        super(Package, self).__init__()
339
340        self.package = pkg
341        self.exception = None  # type: Optional[Exception]
342
343        # Reference to this package's
344        self.pkg_def = Package.packages[pkg]
345        self.pkg_def["__package"] = self
346
347    def run(self) -> None:
348        """ Thread 'run' function.  Builds the Docker stage. """
349
350        # In case this package has no rev, fetch it from Github.
351        self._update_rev()
352
353        # Find all the Package objects that this package depends on.
354        #   This section is locked because we are looking into another
355        #   package's PackageDef dict, which could be being modified.
356        Package.lock.acquire()
357        deps: Iterable[Package] = [
358            Package.packages[deppkg]["__package"]
359            for deppkg in self.pkg_def.get("depends", [])
360        ]
361        Package.lock.release()
362
363        # Wait until all the depends finish building.  We need them complete
364        # for the "COPY" commands.
365        for deppkg in deps:
366            deppkg.join()
367
368        # Generate this package's Dockerfile.
369        dockerfile = f"""
370FROM {docker_base_img_name}
371{self._df_copycmds()}
372{self._df_build()}
373"""
374
375        # Generate the resulting tag name and save it to the PackageDef.
376        #   This section is locked because we are modifying the PackageDef,
377        #   which can be accessed by other threads.
378        Package.lock.acquire()
379        tag = Docker.tagname(self._stagename(), dockerfile)
380        self.pkg_def["__tag"] = tag
381        Package.lock.release()
382
383        # Do the build / save any exceptions.
384        try:
385            Docker.build(self.package, tag, dockerfile)
386        except Exception as e:
387            self.exception = e
388
389    @classmethod
390    def generate_all(cls) -> None:
391        """Ensure a Docker stage is created for all defined packages.
392
393        These are done in parallel but with appropriate blocking per
394        package 'depends' specifications.
395        """
396
397        # Create a Package for each defined package.
398        pkg_threads = [Package(p) for p in cls.packages.keys()]
399
400        # Start building them all.
401        #   This section is locked because threads depend on each other,
402        #   based on the packages, and they cannot 'join' on a thread
403        #   which is not yet started.  Adding a lock here allows all the
404        #   threads to start before they 'join' their dependencies.
405        Package.lock.acquire()
406        for t in pkg_threads:
407            t.start()
408        Package.lock.release()
409
410        # Wait for completion.
411        for t in pkg_threads:
412            t.join()
413            # Check if the thread saved off its own exception.
414            if t.exception:
415                print(f"Package {t.package} failed!", file=sys.stderr)
416                raise t.exception
417
418    @staticmethod
419    def df_all_copycmds() -> str:
420        """Formulate the Dockerfile snippet necessary to copy all packages
421        into the final image.
422        """
423        return Package.df_copycmds_set(Package.packages.keys())
424
425    @classmethod
426    def depcache(cls) -> str:
427        """Create the contents of the '/tmp/depcache'.
428        This file is a comma-separated list of "<pkg>:<rev>".
429        """
430
431        # This needs to be sorted for consistency.
432        depcache = ""
433        for pkg in sorted(cls.packages.keys()):
434            depcache += "%s:%s," % (pkg, cls.packages[pkg]["rev"])
435        return depcache
436
437    def _update_rev(self) -> None:
438        """ Look up the HEAD for missing a static rev. """
439
440        if "rev" in self.pkg_def:
441            return
442
443        # Check if Jenkins/Gerrit gave us a revision and use it.
444        if gerrit_project == self.package and gerrit_rev:
445            print(
446                f"Found Gerrit revision for {self.package}: {gerrit_rev}",
447                file=sys.stderr,
448            )
449            self.pkg_def["rev"] = gerrit_rev
450            return
451
452        # Ask Github for all the branches.
453        lookup = git("ls-remote", "--heads", f"https://github.com/{self.package}")
454
455        # Find the branch matching {branch} (or fallback to master).
456        #   This section is locked because we are modifying the PackageDef.
457        Package.lock.acquire()
458        for line in lookup.split("\n"):
459            if f"refs/heads/{branch}" in line:
460                self.pkg_def["rev"] = line.split()[0]
461            elif f"refs/heads/master" in line and "rev" not in self.pkg_def:
462                self.pkg_def["rev"] = line.split()[0]
463        Package.lock.release()
464
465    def _stagename(self) -> str:
466        """ Create a name for the Docker stage associated with this pkg. """
467        return self.package.replace("/", "-").lower()
468
469    def _url(self) -> str:
470        """ Get the URL for this package. """
471        rev = self.pkg_def["rev"]
472
473        # If the lambda exists, call it.
474        if "url" in self.pkg_def:
475            return self.pkg_def["url"](self.package, rev)
476
477        # Default to the github archive URL.
478        return f"https://github.com/{self.package}/archive/{rev}.tar.gz"
479
480    def _cmd_download(self) -> str:
481        """Formulate the command necessary to download and unpack to source."""
482
483        url = self._url()
484        if ".tar." not in url:
485            raise NotImplementedError(
486                f"Unhandled download type for {self.package}: {url}"
487            )
488
489        cmd = f"curl -L {url} | tar -x"
490
491        if url.endswith(".bz2"):
492            cmd += "j"
493        elif url.endswith(".gz"):
494            cmd += "z"
495        else:
496            raise NotImplementedError(
497                f"Unknown tar flags needed for {self.package}: {url}"
498            )
499
500        return cmd
501
502    def _cmd_cd_srcdir(self) -> str:
503        """ Formulate the command necessary to 'cd' into the source dir. """
504        return f"cd {self.package.split('/')[-1]}*"
505
506    def _df_copycmds(self) -> str:
507        """ Formulate the dockerfile snippet necessary to COPY all depends. """
508
509        if "depends" not in self.pkg_def:
510            return ""
511        return Package.df_copycmds_set(self.pkg_def["depends"])
512
513    @staticmethod
514    def df_copycmds_set(pkgs: Iterable[str]) -> str:
515        """Formulate the Dockerfile snippet necessary to COPY a set of
516        packages into a Docker stage.
517        """
518
519        copy_cmds = ""
520
521        # Sort the packages for consistency.
522        for p in sorted(pkgs):
523            tag = Package.packages[p]["__tag"]
524            copy_cmds += f"COPY --from={tag} {prefix} {prefix}\n"
525            # Workaround for upstream docker bug and multiple COPY cmds
526            # https://github.com/moby/moby/issues/37965
527            copy_cmds += "RUN true\n"
528
529        return copy_cmds
530
531    def _df_build(self) -> str:
532        """Formulate the Dockerfile snippet necessary to download, build, and
533        install a package into a Docker stage.
534        """
535
536        # Download and extract source.
537        result = f"RUN {self._cmd_download()} && {self._cmd_cd_srcdir()} && "
538
539        # Handle 'custom_post_dl' commands.
540        custom_post_dl = self.pkg_def.get("custom_post_dl")
541        if custom_post_dl:
542            result += " && ".join(custom_post_dl) + " && "
543
544        # Build and install package based on 'build_type'.
545        build_type = self.pkg_def["build_type"]
546        if build_type == "autoconf":
547            result += self._cmd_build_autoconf()
548        elif build_type == "cmake":
549            result += self._cmd_build_cmake()
550        elif build_type == "custom":
551            result += self._cmd_build_custom()
552        elif build_type == "make":
553            result += self._cmd_build_make()
554        elif build_type == "meson":
555            result += self._cmd_build_meson()
556        else:
557            raise NotImplementedError(
558                f"Unhandled build type for {self.package}: {build_type}"
559            )
560
561        # Handle 'custom_post_install' commands.
562        custom_post_install = self.pkg_def.get("custom_post_install")
563        if custom_post_install:
564            result += " && " + " && ".join(custom_post_install)
565
566        return result
567
568    def _cmd_build_autoconf(self) -> str:
569        options = " ".join(self.pkg_def.get("config_flags", []))
570        env = " ".join(self.pkg_def.get("config_env", []))
571        result = "./bootstrap.sh && "
572        result += f"{env} ./configure {configure_flags} {options} && "
573        result += f"make -j{proc_count} && make install"
574        return result
575
576    def _cmd_build_cmake(self) -> str:
577        options = " ".join(self.pkg_def.get("config_flags", []))
578        env = " ".join(self.pkg_def.get("config_env", []))
579        result = "mkdir builddir && cd builddir && "
580        result += f"{env} cmake {cmake_flags} {options} .. && "
581        result += "cmake --build . --target all && "
582        result += "cmake --build . --target install && "
583        result += "cd .."
584        return result
585
586    def _cmd_build_custom(self) -> str:
587        return " && ".join(self.pkg_def.get("build_steps", []))
588
589    def _cmd_build_make(self) -> str:
590        return f"make -j{proc_count} && make install"
591
592    def _cmd_build_meson(self) -> str:
593        options = " ".join(self.pkg_def.get("config_flags", []))
594        env = " ".join(self.pkg_def.get("config_env", []))
595        result = f"{env} meson builddir {meson_flags} {options} && "
596        result += "ninja -C builddir && ninja -C builddir install"
597        return result
598
599
600class Docker:
601    """Class to assist with Docker interactions.  All methods are static."""
602
603    @staticmethod
604    def timestamp() -> str:
605        """ Generate a timestamp for today using the ISO week. """
606        today = date.today().isocalendar()
607        return f"{today[0]}-W{today[1]:02}"
608
609    @staticmethod
610    def tagname(pkgname: str, dockerfile: str) -> str:
611        """ Generate a tag name for a package using a hash of the Dockerfile. """
612        result = docker_image_name
613        if pkgname:
614            result += "-" + pkgname
615
616        result += ":" + Docker.timestamp()
617        result += "-" + sha256(dockerfile.encode()).hexdigest()[0:16]
618
619        return result
620
621    @staticmethod
622    def build(pkg: str, tag: str, dockerfile: str) -> None:
623        """Build a docker image using the Dockerfile and tagging it with 'tag'."""
624
625        # If we're not forcing builds, check if it already exists and skip.
626        if not force_build:
627            if docker.image.ls(tag, "--format", '"{{.Repository}}:{{.Tag}}"'):
628                print(f"Image {tag} already exists.  Skipping.", file=sys.stderr)
629                return
630
631        # Build it.
632        #   Capture the output of the 'docker build' command and send it to
633        #   stderr (prefixed with the package name).  This allows us to see
634        #   progress but not polute stdout.  Later on we output the final
635        #   docker tag to stdout and we want to keep that pristine.
636        #
637        #   Other unusual flags:
638        #       --no-cache: Bypass the Docker cache if 'force_build'.
639        #       --force-rm: Clean up Docker processes if they fail.
640        docker.build(
641            proxy_args,
642            "--network=host",
643            "--force-rm",
644            "--no-cache=true" if force_build else "--no-cache=false",
645            "-t",
646            tag,
647            "-",
648            _in=dockerfile,
649            _out=(
650                lambda line: print(
651                    pkg + ":", line, end="", file=sys.stderr, flush=True
652                )
653            ),
654        )
655
656
657# Read a bunch of environment variables.
658docker_image_name = os.environ.get("DOCKER_IMAGE_NAME", "openbmc/ubuntu-unit-test")
659force_build = os.environ.get("FORCE_DOCKER_BUILD")
660is_automated_ci_build = os.environ.get("BUILD_URL", False)
661distro = os.environ.get("DISTRO", "ubuntu:impish")
662branch = os.environ.get("BRANCH", "master")
663ubuntu_mirror = os.environ.get("UBUNTU_MIRROR")
664http_proxy = os.environ.get("http_proxy")
665
666gerrit_project = os.environ.get("GERRIT_PROJECT")
667gerrit_rev = os.environ.get("GERRIT_PATCHSET_REVISION")
668
669# Set up some common variables.
670username = os.environ.get("USER", "root")
671homedir = os.environ.get("HOME", "/root")
672gid = os.getgid()
673uid = os.getuid()
674
675# Use well-known constants if user is root
676if username == "root":
677    homedir = "/root"
678    gid = 0
679    uid = 0
680
681# Determine the architecture for Docker.
682arch = uname("-m").strip()
683if arch == "ppc64le":
684    docker_base = "ppc64le/"
685elif arch == "x86_64":
686    docker_base = ""
687elif arch == "aarch64":
688    docker_base = "arm64v8/"
689else:
690    print(
691        f"Unsupported system architecture({arch}) found for docker image",
692        file=sys.stderr,
693    )
694    sys.exit(1)
695
696# Special flags if setting up a deb mirror.
697mirror = ""
698if "ubuntu" in distro and ubuntu_mirror:
699    mirror = f"""
700RUN echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME) main restricted universe multiverse" > /etc/apt/sources.list && \\
701    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-updates main restricted universe multiverse" >> /etc/apt/sources.list && \\
702    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-security main restricted universe multiverse" >> /etc/apt/sources.list && \\
703    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-proposed main restricted universe multiverse" >> /etc/apt/sources.list && \\
704    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-backports main restricted universe multiverse" >> /etc/apt/sources.list
705"""
706
707# Special flags for proxying.
708proxy_cmd = ""
709proxy_keyserver = ""
710proxy_args = []
711if http_proxy:
712    proxy_cmd = f"""
713RUN echo "[http]" >> {homedir}/.gitconfig && \
714    echo "proxy = {http_proxy}" >> {homedir}/.gitconfig
715"""
716    proxy_keyserver = f"--keyserver-options http-proxy={http_proxy}"
717
718    proxy_args.extend(
719        [
720            "--build-arg",
721            f"http_proxy={http_proxy}",
722            "--build-arg",
723            f"https_proxy={http_proxy}",
724        ]
725    )
726
727# Create base Dockerfile.
728dockerfile_base = f"""
729FROM {docker_base}{distro}
730
731{mirror}
732
733ENV DEBIAN_FRONTEND noninteractive
734
735ENV PYTHONPATH "/usr/local/lib/python3.9/site-packages/"
736
737# Sometimes the ubuntu key expires and we need a way to force an execution
738# of the apt-get commands for the dbgsym-keyring.  When this happens we see
739# an error like: "Release: The following signatures were invalid:"
740# Insert a bogus echo that we can change here when we get this error to force
741# the update.
742RUN echo "ubuntu keyserver rev as of 2021-04-21"
743
744# We need the keys to be imported for dbgsym repos
745# New releases have a package, older ones fall back to manual fetching
746# https://wiki.ubuntu.com/Debug%20Symbol%20Packages
747RUN apt-get update && apt-get dist-upgrade -yy && \
748    ( apt-get install gpgv ubuntu-dbgsym-keyring || \
749        ( apt-get install -yy dirmngr && \
750          apt-key adv --keyserver keyserver.ubuntu.com \
751                      {proxy_keyserver} \
752                      --recv-keys F2EDC64DC5AEE1F6B9C621F0C8CAB6595FDFF622 ) )
753
754# Parse the current repo list into a debug repo list
755RUN sed -n '/^deb /s,^deb [^ ]* ,deb http://ddebs.ubuntu.com ,p' /etc/apt/sources.list >/etc/apt/sources.list.d/debug.list
756
757# Remove non-existent debug repos
758RUN sed -i '/-\(backports\|security\) /d' /etc/apt/sources.list.d/debug.list
759
760RUN cat /etc/apt/sources.list.d/debug.list
761
762RUN apt-get update && apt-get dist-upgrade -yy && apt-get install -yy \
763    gcc-11 \
764    g++-11 \
765    libc6-dbg \
766    libc6-dev \
767    libtool \
768    bison \
769    libdbus-1-dev \
770    flex \
771    cmake \
772    python3 \
773    python3-dev\
774    python3-yaml \
775    python3-mako \
776    python3-pip \
777    python3-setuptools \
778    python3-git \
779    python3-socks \
780    pkg-config \
781    autoconf \
782    autoconf-archive \
783    libsystemd-dev \
784    systemd \
785    libssl-dev \
786    libevdev-dev \
787    libjpeg-dev \
788    libpng-dev \
789    ninja-build \
790    sudo \
791    curl \
792    git \
793    dbus \
794    iputils-ping \
795    clang-13 \
796    clang-format-13 \
797    clang-tidy-13 \
798    clang-tools-13 \
799    shellcheck \
800    npm \
801    iproute2 \
802    libnl-3-dev \
803    libnl-genl-3-dev \
804    libconfig++-dev \
805    libsnmp-dev \
806    valgrind \
807    valgrind-dbg \
808    libpam0g-dev \
809    xxd \
810    libi2c-dev \
811    wget \
812    libldap2-dev \
813    libprotobuf-dev \
814    liburing-dev \
815    liburing1-dbgsym \
816    libperlio-gzip-perl \
817    libjson-perl \
818    protobuf-compiler \
819    libgpiod-dev \
820    device-tree-compiler \
821    cppcheck \
822    libpciaccess-dev \
823    libmimetic-dev \
824    libxml2-utils \
825    libxml-simple-perl \
826    rsync \
827    libcryptsetup-dev
828
829# Apply autoconf-archive-v2022.02.11 file ax_cxx_compile_stdcxx for C++20.
830RUN curl "http://git.savannah.gnu.org/gitweb/?p=autoconf-archive.git;a=blob_plain;f=m4/ax_cxx_compile_stdcxx.m4;hb=3311b6bdeff883c6a13952594a9dcb60bce6ba80" \
831  > /usr/share/aclocal/ax_cxx_compile_stdcxx.m4
832
833RUN npm install -g eslint@latest eslint-plugin-json@latest
834
835RUN update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-11 11 \
836  --slave /usr/bin/g++ g++ /usr/bin/g++-11 \
837  --slave /usr/bin/gcov gcov /usr/bin/gcov-11 \
838  --slave /usr/bin/gcov-dump gcov-dump /usr/bin/gcov-dump-11 \
839  --slave /usr/bin/gcov-tool gcov-tool /usr/bin/gcov-tool-11
840
841RUN update-alternatives --install /usr/bin/clang clang /usr/bin/clang-13 1000 \
842  --slave /usr/bin/clang++ clang++ /usr/bin/clang++-13 \
843  --slave /usr/bin/clang-tidy clang-tidy /usr/bin/clang-tidy-13 \
844  --slave /usr/bin/clang-format clang-format /usr/bin/clang-format-13 \
845  --slave /usr/bin/run-clang-tidy run-clang-tidy.py /usr/bin/run-clang-tidy-13 \
846  --slave /usr/bin/scan-build scan-build /usr/bin/scan-build-13
847
848"""
849
850if is_automated_ci_build:
851    dockerfile_base += f"""
852# Run an arbitrary command to polute the docker cache regularly force us
853# to re-run `apt-get update` daily.
854RUN echo {Docker.timestamp()}
855RUN apt-get update && apt-get dist-upgrade -yy
856
857"""
858
859dockerfile_base += f"""
860RUN pip3 install inflection
861RUN pip3 install pycodestyle
862RUN pip3 install jsonschema
863RUN pip3 install meson==0.61.3
864RUN pip3 install protobuf
865RUN pip3 install codespell
866RUN pip3 install requests
867"""
868
869# Build the base and stage docker images.
870docker_base_img_name = Docker.tagname("base", dockerfile_base)
871Docker.build("base", docker_base_img_name, dockerfile_base)
872Package.generate_all()
873
874# Create the final Dockerfile.
875dockerfile = f"""
876# Build the final output image
877FROM {docker_base_img_name}
878{Package.df_all_copycmds()}
879
880# Some of our infrastructure still relies on the presence of this file
881# even though it is no longer needed to rebuild the docker environment
882# NOTE: The file is sorted to ensure the ordering is stable.
883RUN echo '{Package.depcache()}' > /tmp/depcache
884
885# Final configuration for the workspace
886RUN grep -q {gid} /etc/group || groupadd -f -g {gid} {username}
887RUN mkdir -p "{os.path.dirname(homedir)}"
888RUN grep -q {uid} /etc/passwd || useradd -d {homedir} -m -u {uid} -g {gid} {username}
889RUN sed -i '1iDefaults umask=000' /etc/sudoers
890RUN echo "{username} ALL=(ALL) NOPASSWD: ALL" >>/etc/sudoers
891
892# Ensure user has ability to write to /usr/local for different tool
893# and data installs
894RUN chown -R {username}:{username} /usr/local/share
895
896{proxy_cmd}
897
898RUN /bin/bash
899"""
900
901# Do the final docker build
902docker_final_img_name = Docker.tagname(None, dockerfile)
903Docker.build("final", docker_final_img_name, dockerfile)
904
905# Print the tag of the final image.
906print(docker_final_img_name)
907