1#!/usr/bin/env python3
2#
3# Build the required docker image to run package unit tests
4#
5# Script Variables:
6#   DOCKER_IMG_NAME:  <optional, the name of the docker image to generate>
7#                     default is openbmc/ubuntu-unit-test
8#   DISTRO:           <optional, the distro to build a docker image against>
9#   FORCE_DOCKER_BUILD: <optional, a non-zero value with force all Docker
10#                     images to be rebuilt rather than reusing caches.>
11#   BUILD_URL:        <optional, used to detect running under CI context
12#                     (ex. Jenkins)>
13#   BRANCH:           <optional, branch to build from each of the openbmc/
14#                     repositories>
15#                     default is master, which will be used if input branch not
16#                     provided or not found
17#   UBUNTU_MIRROR:    <optional, the URL of a mirror of Ubuntu to override the
18#                     default ones in /etc/apt/sources.list>
19#                     default is empty, and no mirror is used.
20#   http_proxy        The HTTP address of the proxy server to connect to.
21#                     Default: "", proxy is not setup if this is not set
22
23import os
24import sys
25import threading
26from datetime import date
27from hashlib import sha256
28from sh import docker, git, nproc, uname  # type: ignore
29from typing import Any, Callable, Dict, Iterable, Optional
30
31try:
32    # Python before 3.8 doesn't have TypedDict, so reroute to standard 'dict'.
33    from typing import TypedDict
34except:
35
36    class TypedDict(dict):  # type: ignore
37        # We need to do this to eat the 'total' argument.
38        def __init_subclass__(cls, **kwargs):
39            super().__init_subclass__()
40
41
42# Declare some variables used in package definitions.
43prefix = "/usr/local"
44proc_count = nproc().strip()
45
46
47class PackageDef(TypedDict, total=False):
48    """ Package Definition for packages dictionary. """
49
50    # rev [optional]: Revision of package to use.
51    rev: str
52    # url [optional]: lambda function to create URL: (package, rev) -> url.
53    url: Callable[[str, str], str]
54    # depends [optional]: List of package dependencies.
55    depends: Iterable[str]
56    # build_type [required]: Build type used for package.
57    #   Currently supported: autoconf, cmake, custom, make, meson
58    build_type: str
59    # build_steps [optional]: Steps to run for 'custom' build_type.
60    build_steps: Iterable[str]
61    # config_flags [optional]: List of options to pass configuration tool.
62    config_flags: Iterable[str]
63    # config_env [optional]: List of environment variables to set for config.
64    config_env: Iterable[str]
65    # custom_post_dl [optional]: List of steps to run after download, but
66    #   before config / build / install.
67    custom_post_dl: Iterable[str]
68    # custom_post_install [optional]: List of steps to run after install.
69    custom_post_install: Iterable[str]
70
71    # __tag [private]: Generated Docker tag name for package stage.
72    __tag: str
73    # __package [private]: Package object associated with this package.
74    __package: Any  # Type is Package, but not defined yet.
75
76
77# Packages to include in image.
78packages = {
79    "boost": PackageDef(
80        rev="1.77.0",
81        url=(
82            lambda pkg, rev: f"https://downloads.yoctoproject.org/mirror/sources/{pkg}_{rev.replace('.', '_')}.tar.bz2"
83        ),
84        build_type="custom",
85        build_steps=[
86            f"./bootstrap.sh --prefix={prefix} --with-libraries=context,coroutine",
87            "./b2",
88            f"./b2 install --prefix={prefix}",
89        ],
90    ),
91    "USCiLab/cereal": PackageDef(
92        rev="3e4d1b84cab4891368d2179a61a7ba06a5693e7f",
93        build_type="custom",
94        build_steps=[f"cp -a include/cereal/ {prefix}/include/"],
95    ),
96    "catchorg/Catch2": PackageDef(
97        rev="v2.13.6",
98        build_type="cmake",
99        config_flags=["-DBUILD_TESTING=OFF", "-DCATCH_INSTALL_DOCS=OFF"],
100    ),
101    "CLIUtils/CLI11": PackageDef(
102        rev="v1.9.1",
103        build_type="cmake",
104        config_flags=[
105            "-DBUILD_TESTING=OFF",
106            "-DCLI11_BUILD_DOCS=OFF",
107            "-DCLI11_BUILD_EXAMPLES=OFF",
108        ],
109    ),
110    "fmtlib/fmt": PackageDef(
111        rev="7.1.3",
112        build_type="cmake",
113        config_flags=[
114            "-DFMT_DOC=OFF",
115            "-DFMT_TEST=OFF",
116        ],
117    ),
118    "Naios/function2": PackageDef(
119        rev="4.1.0",
120        build_type="custom",
121        build_steps=[
122            f"mkdir {prefix}/include/function2",
123            f"cp include/function2/function2.hpp {prefix}/include/function2/",
124        ],
125    ),
126    # Snapshot from 2021-05-13
127    "google/googletest": PackageDef(
128        rev="662fe38e44900c007eccb65a5d2ea19df7bd520e",
129        build_type="cmake",
130        config_env=["CXXFLAGS=-std=c++20"],
131        config_flags=["-DTHREADS_PREFER_PTHREAD_FLAG=ON"],
132    ),
133    # Release 2020-08-06
134    "nlohmann/json": PackageDef(
135        rev="v3.10.4",
136        build_type="cmake",
137        config_flags=["-DJSON_BuildTests=OFF"],
138        custom_post_install=[
139            f"ln -s {prefix}/include/nlohmann/json.hpp {prefix}/include/json.hpp",
140        ],
141    ),
142    # Snapshot from 2019-05-24
143    "linux-test-project/lcov": PackageDef(
144        rev="v1.15",
145        build_type="make",
146    ),
147    # dev-5.8 2021-01-11
148    "openbmc/linux": PackageDef(
149        rev="3cc95ae40716e56f81b69615781f54c78079042d",
150        build_type="custom",
151        build_steps=[
152            f"make -j{proc_count} defconfig",
153            f"make INSTALL_HDR_PATH={prefix} headers_install",
154        ],
155    ),
156    # Snapshot from 2020-06-13
157    "LibVNC/libvncserver": PackageDef(
158        rev="LibVNCServer-0.9.13",
159        build_type="cmake",
160    ),
161    "martinmoene/span-lite": PackageDef(
162        rev="v0.9.2",
163        build_type="cmake",
164        config_flags=[
165            "-DSPAN_LITE_OPT_BUILD_TESTS=OFF",
166        ],
167    ),
168    # version from meta-openembedded/meta-oe/recipes-support/libtinyxml2/libtinyxml2_8.0.0.bb
169    "leethomason/tinyxml2": PackageDef(
170        rev="8.0.0",
171        build_type="cmake",
172    ),
173    # version from /meta-openembedded/meta-oe/recipes-devtools/boost-url/boost-url_git.bb
174    "CPPAlliance/url": PackageDef(
175        rev="4f712ed69a04a344957d22efa5dc111b415b3aff",
176        build_type="custom",
177        build_steps=[f"cp -a include/** {prefix}/include/"],
178    ),
179    # version from meta-openembedded/meta-oe/dynamic-layers/networking-layer/recipes-devools/valijson/valijson_0.6.bb
180    "tristanpenman/valijson": PackageDef(
181        rev="v0.6",
182        build_type="cmake",
183        config_flags=[
184            "-Dvalijson_BUILD_TESTS=0",
185            "-Dvalijson_INSTALL_HEADERS=1",
186        ],
187    ),
188    # version from meta-openembedded/meta-oe/recipes-devtools/nlohmann-fifo/nlohmann-fifo_git.bb
189    "nlohmann/fifo_map": PackageDef(
190        rev="0dfbf5dacbb15a32c43f912a7e66a54aae39d0f9",
191        build_type="custom",
192        build_steps=[f"cp src/fifo_map.hpp {prefix}/include/"],
193    ),
194    # version from meta-openembedded/meta-oe/recipes-devtools/unifex/unifex_git.bb
195    "facebookexperimental/libunifex": PackageDef(
196        rev="9df21c58d34ce8a1cd3b15c3a7347495e29417a0",
197        build_type="cmake",
198        config_flags=[
199            "-DBUILD_SHARED_LIBS=ON",
200            "-DBUILD_TESTING=OFF",
201            "-DCMAKE_CXX_STANDARD=20",
202            "-DUNIFEX_BUILD_EXAMPLES=OFF",
203        ],
204    ),
205    "open-power/pdbg": PackageDef(build_type="autoconf"),
206    "openbmc/gpioplus": PackageDef(
207        depends=["openbmc/stdplus"],
208        build_type="meson",
209        config_flags=[
210            "-Dexamples=false",
211            "-Dtests=disabled",
212        ],
213    ),
214    "openbmc/phosphor-dbus-interfaces": PackageDef(
215        depends=["openbmc/sdbusplus"],
216        build_type="meson",
217        config_flags=[
218            "-Ddata_com_ibm=true",
219            "-Ddata_org_open_power=true",
220        ],
221    ),
222    "openbmc/phosphor-logging": PackageDef(
223        depends=[
224            "USCiLab/cereal",
225            "nlohmann/fifo_map",
226            "openbmc/phosphor-dbus-interfaces",
227            "openbmc/sdbusplus",
228            "openbmc/sdeventplus",
229        ],
230        build_type="meson",
231        config_flags=[
232            f"-Dyaml_dir={prefix}/share/phosphor-dbus-yaml/yaml",
233        ],
234    ),
235    "openbmc/phosphor-objmgr": PackageDef(
236        depends=[
237            "boost",
238            "leethomason/tinyxml2",
239            "openbmc/phosphor-logging",
240            "openbmc/sdbusplus",
241        ],
242        build_type="meson",
243        config_flags=[
244            "-Dtests=disabled",
245        ],
246    ),
247    "openbmc/pldm": PackageDef(
248        depends=[
249            "CLIUtils/CLI11",
250            "boost",
251            "nlohmann/json",
252            "openbmc/phosphor-dbus-interfaces",
253            "openbmc/phosphor-logging",
254            "openbmc/sdbusplus",
255            "openbmc/sdeventplus",
256        ],
257        build_type="meson",
258        config_flags=[
259            "-Dlibpldm-only=enabled",
260            "-Doem-ibm=enabled",
261            "-Dtests=disabled",
262        ],
263    ),
264    "openbmc/sdbusplus": PackageDef(
265        depends=[
266            "facebookexperimental/libunifex",
267        ],
268        build_type="meson",
269        custom_post_dl=[
270            "cd tools",
271            f"./setup.py install --root=/ --prefix={prefix}",
272            "cd ..",
273        ],
274        config_flags=[
275            "-Dexamples=disabled",
276            "-Dtests=disabled",
277        ],
278    ),
279    "openbmc/sdeventplus": PackageDef(
280        depends=["Naios/function2", "openbmc/stdplus"],
281        build_type="meson",
282        config_flags=[
283            "-Dexamples=false",
284            "-Dtests=disabled",
285        ],
286    ),
287    "openbmc/stdplus": PackageDef(
288        depends=["fmtlib/fmt", "martinmoene/span-lite"],
289        build_type="meson",
290        config_flags=[
291            "-Dexamples=false",
292            "-Dtests=disabled",
293        ],
294    ),
295}  # type: Dict[str, PackageDef]
296
297# Define common flags used for builds
298configure_flags = " ".join(
299    [
300        f"--prefix={prefix}",
301    ]
302)
303cmake_flags = " ".join(
304    [
305        "-DBUILD_SHARED_LIBS=ON",
306        "-DCMAKE_BUILD_TYPE=RelWithDebInfo",
307        f"-DCMAKE_INSTALL_PREFIX:PATH={prefix}",
308        "-GNinja",
309        "-DCMAKE_MAKE_PROGRAM=ninja",
310    ]
311)
312meson_flags = " ".join(
313    [
314        "--wrap-mode=nodownload",
315        f"-Dprefix={prefix}",
316    ]
317)
318
319
320class Package(threading.Thread):
321    """Class used to build the Docker stages for each package.
322
323    Generally, this class should not be instantiated directly but through
324    Package.generate_all().
325    """
326
327    # Copy the packages dictionary.
328    packages = packages.copy()
329
330    # Lock used for thread-safety.
331    lock = threading.Lock()
332
333    def __init__(self, pkg: str):
334        """ pkg - The name of this package (ex. foo/bar ) """
335        super(Package, self).__init__()
336
337        self.package = pkg
338        self.exception = None  # type: Optional[Exception]
339
340        # Reference to this package's
341        self.pkg_def = Package.packages[pkg]
342        self.pkg_def["__package"] = self
343
344    def run(self) -> None:
345        """ Thread 'run' function.  Builds the Docker stage. """
346
347        # In case this package has no rev, fetch it from Github.
348        self._update_rev()
349
350        # Find all the Package objects that this package depends on.
351        #   This section is locked because we are looking into another
352        #   package's PackageDef dict, which could be being modified.
353        Package.lock.acquire()
354        deps: Iterable[Package] = [
355            Package.packages[deppkg]["__package"]
356            for deppkg in self.pkg_def.get("depends", [])
357        ]
358        Package.lock.release()
359
360        # Wait until all the depends finish building.  We need them complete
361        # for the "COPY" commands.
362        for deppkg in deps:
363            deppkg.join()
364
365        # Generate this package's Dockerfile.
366        dockerfile = f"""
367FROM {docker_base_img_name}
368{self._df_copycmds()}
369{self._df_build()}
370"""
371
372        # Generate the resulting tag name and save it to the PackageDef.
373        #   This section is locked because we are modifying the PackageDef,
374        #   which can be accessed by other threads.
375        Package.lock.acquire()
376        tag = Docker.tagname(self._stagename(), dockerfile)
377        self.pkg_def["__tag"] = tag
378        Package.lock.release()
379
380        # Do the build / save any exceptions.
381        try:
382            Docker.build(self.package, tag, dockerfile)
383        except Exception as e:
384            self.exception = e
385
386    @classmethod
387    def generate_all(cls) -> None:
388        """Ensure a Docker stage is created for all defined packages.
389
390        These are done in parallel but with appropriate blocking per
391        package 'depends' specifications.
392        """
393
394        # Create a Package for each defined package.
395        pkg_threads = [Package(p) for p in cls.packages.keys()]
396
397        # Start building them all.
398        #   This section is locked because threads depend on each other,
399        #   based on the packages, and they cannot 'join' on a thread
400        #   which is not yet started.  Adding a lock here allows all the
401        #   threads to start before they 'join' their dependencies.
402        Package.lock.acquire()
403        for t in pkg_threads:
404            t.start()
405        Package.lock.release()
406
407        # Wait for completion.
408        for t in pkg_threads:
409            t.join()
410            # Check if the thread saved off its own exception.
411            if t.exception:
412                print(f"Package {t.package} failed!", file=sys.stderr)
413                raise t.exception
414
415    @staticmethod
416    def df_all_copycmds() -> str:
417        """Formulate the Dockerfile snippet necessary to copy all packages
418        into the final image.
419        """
420        return Package.df_copycmds_set(Package.packages.keys())
421
422    @classmethod
423    def depcache(cls) -> str:
424        """Create the contents of the '/tmp/depcache'.
425        This file is a comma-separated list of "<pkg>:<rev>".
426        """
427
428        # This needs to be sorted for consistency.
429        depcache = ""
430        for pkg in sorted(cls.packages.keys()):
431            depcache += "%s:%s," % (pkg, cls.packages[pkg]["rev"])
432        return depcache
433
434    def _update_rev(self) -> None:
435        """ Look up the HEAD for missing a static rev. """
436
437        if "rev" in self.pkg_def:
438            return
439
440        # Check if Jenkins/Gerrit gave us a revision and use it.
441        if gerrit_project == self.package and gerrit_rev:
442            print(
443                f"Found Gerrit revision for {self.package}: {gerrit_rev}",
444                file=sys.stderr,
445            )
446            self.pkg_def["rev"] = gerrit_rev
447            return
448
449        # Ask Github for all the branches.
450        lookup = git("ls-remote", "--heads", f"https://github.com/{self.package}")
451
452        # Find the branch matching {branch} (or fallback to master).
453        #   This section is locked because we are modifying the PackageDef.
454        Package.lock.acquire()
455        for line in lookup.split("\n"):
456            if f"refs/heads/{branch}" in line:
457                self.pkg_def["rev"] = line.split()[0]
458            elif f"refs/heads/master" in line and "rev" not in self.pkg_def:
459                self.pkg_def["rev"] = line.split()[0]
460        Package.lock.release()
461
462    def _stagename(self) -> str:
463        """ Create a name for the Docker stage associated with this pkg. """
464        return self.package.replace("/", "-").lower()
465
466    def _url(self) -> str:
467        """ Get the URL for this package. """
468        rev = self.pkg_def["rev"]
469
470        # If the lambda exists, call it.
471        if "url" in self.pkg_def:
472            return self.pkg_def["url"](self.package, rev)
473
474        # Default to the github archive URL.
475        return f"https://github.com/{self.package}/archive/{rev}.tar.gz"
476
477    def _cmd_download(self) -> str:
478        """Formulate the command necessary to download and unpack to source."""
479
480        url = self._url()
481        if ".tar." not in url:
482            raise NotImplementedError(
483                f"Unhandled download type for {self.package}: {url}"
484            )
485
486        cmd = f"curl -L {url} | tar -x"
487
488        if url.endswith(".bz2"):
489            cmd += "j"
490        elif url.endswith(".gz"):
491            cmd += "z"
492        else:
493            raise NotImplementedError(
494                f"Unknown tar flags needed for {self.package}: {url}"
495            )
496
497        return cmd
498
499    def _cmd_cd_srcdir(self) -> str:
500        """ Formulate the command necessary to 'cd' into the source dir. """
501        return f"cd {self.package.split('/')[-1]}*"
502
503    def _df_copycmds(self) -> str:
504        """ Formulate the dockerfile snippet necessary to COPY all depends. """
505
506        if "depends" not in self.pkg_def:
507            return ""
508        return Package.df_copycmds_set(self.pkg_def["depends"])
509
510    @staticmethod
511    def df_copycmds_set(pkgs: Iterable[str]) -> str:
512        """Formulate the Dockerfile snippet necessary to COPY a set of
513        packages into a Docker stage.
514        """
515
516        copy_cmds = ""
517
518        # Sort the packages for consistency.
519        for p in sorted(pkgs):
520            tag = Package.packages[p]["__tag"]
521            copy_cmds += f"COPY --from={tag} {prefix} {prefix}\n"
522            # Workaround for upstream docker bug and multiple COPY cmds
523            # https://github.com/moby/moby/issues/37965
524            copy_cmds += "RUN true\n"
525
526        return copy_cmds
527
528    def _df_build(self) -> str:
529        """Formulate the Dockerfile snippet necessary to download, build, and
530        install a package into a Docker stage.
531        """
532
533        # Download and extract source.
534        result = f"RUN {self._cmd_download()} && {self._cmd_cd_srcdir()} && "
535
536        # Handle 'custom_post_dl' commands.
537        custom_post_dl = self.pkg_def.get("custom_post_dl")
538        if custom_post_dl:
539            result += " && ".join(custom_post_dl) + " && "
540
541        # Build and install package based on 'build_type'.
542        build_type = self.pkg_def["build_type"]
543        if build_type == "autoconf":
544            result += self._cmd_build_autoconf()
545        elif build_type == "cmake":
546            result += self._cmd_build_cmake()
547        elif build_type == "custom":
548            result += self._cmd_build_custom()
549        elif build_type == "make":
550            result += self._cmd_build_make()
551        elif build_type == "meson":
552            result += self._cmd_build_meson()
553        else:
554            raise NotImplementedError(
555                f"Unhandled build type for {self.package}: {build_type}"
556            )
557
558        # Handle 'custom_post_install' commands.
559        custom_post_install = self.pkg_def.get("custom_post_install")
560        if custom_post_install:
561            result += " && " + " && ".join(custom_post_install)
562
563        return result
564
565    def _cmd_build_autoconf(self) -> str:
566        options = " ".join(self.pkg_def.get("config_flags", []))
567        env = " ".join(self.pkg_def.get("config_env", []))
568        result = "./bootstrap.sh && "
569        result += f"{env} ./configure {configure_flags} {options} && "
570        result += f"make -j{proc_count} && make install"
571        return result
572
573    def _cmd_build_cmake(self) -> str:
574        options = " ".join(self.pkg_def.get("config_flags", []))
575        env = " ".join(self.pkg_def.get("config_env", []))
576        result = "mkdir builddir && cd builddir && "
577        result += f"{env} cmake {cmake_flags} {options} .. && "
578        result += "cmake --build . --target all && "
579        result += "cmake --build . --target install && "
580        result += "cd .."
581        return result
582
583    def _cmd_build_custom(self) -> str:
584        return " && ".join(self.pkg_def.get("build_steps", []))
585
586    def _cmd_build_make(self) -> str:
587        return f"make -j{proc_count} && make install"
588
589    def _cmd_build_meson(self) -> str:
590        options = " ".join(self.pkg_def.get("config_flags", []))
591        env = " ".join(self.pkg_def.get("config_env", []))
592        result = f"{env} meson builddir {meson_flags} {options} && "
593        result += "ninja -C builddir && ninja -C builddir install"
594        return result
595
596
597class Docker:
598    """Class to assist with Docker interactions.  All methods are static."""
599
600    @staticmethod
601    def timestamp() -> str:
602        """ Generate a timestamp for today using the ISO week. """
603        today = date.today().isocalendar()
604        return f"{today[0]}-W{today[1]:02}"
605
606    @staticmethod
607    def tagname(pkgname: str, dockerfile: str) -> str:
608        """ Generate a tag name for a package using a hash of the Dockerfile. """
609        result = docker_image_name
610        if pkgname:
611            result += "-" + pkgname
612
613        result += ":" + Docker.timestamp()
614        result += "-" + sha256(dockerfile.encode()).hexdigest()[0:16]
615
616        return result
617
618    @staticmethod
619    def build(pkg: str, tag: str, dockerfile: str) -> None:
620        """Build a docker image using the Dockerfile and tagging it with 'tag'."""
621
622        # If we're not forcing builds, check if it already exists and skip.
623        if not force_build:
624            if docker.image.ls(tag, "--format", '"{{.Repository}}:{{.Tag}}"'):
625                print(f"Image {tag} already exists.  Skipping.", file=sys.stderr)
626                return
627
628        # Build it.
629        #   Capture the output of the 'docker build' command and send it to
630        #   stderr (prefixed with the package name).  This allows us to see
631        #   progress but not polute stdout.  Later on we output the final
632        #   docker tag to stdout and we want to keep that pristine.
633        #
634        #   Other unusual flags:
635        #       --no-cache: Bypass the Docker cache if 'force_build'.
636        #       --force-rm: Clean up Docker processes if they fail.
637        docker.build(
638            proxy_args,
639            "--network=host",
640            "--force-rm",
641            "--no-cache=true" if force_build else "--no-cache=false",
642            "-t",
643            tag,
644            "-",
645            _in=dockerfile,
646            _out=(
647                lambda line: print(
648                    pkg + ":", line, end="", file=sys.stderr, flush=True
649                )
650            ),
651        )
652
653
654# Read a bunch of environment variables.
655docker_image_name = os.environ.get("DOCKER_IMAGE_NAME", "openbmc/ubuntu-unit-test")
656force_build = os.environ.get("FORCE_DOCKER_BUILD")
657is_automated_ci_build = os.environ.get("BUILD_URL", False)
658distro = os.environ.get("DISTRO", "ubuntu:impish")
659branch = os.environ.get("BRANCH", "master")
660ubuntu_mirror = os.environ.get("UBUNTU_MIRROR")
661http_proxy = os.environ.get("http_proxy")
662
663gerrit_project = os.environ.get("GERRIT_PROJECT")
664gerrit_rev = os.environ.get("GERRIT_PATCHSET_REVISION")
665
666# Set up some common variables.
667username = os.environ.get("USER", "root")
668homedir = os.environ.get("HOME", "/root")
669gid = os.getgid()
670uid = os.getuid()
671
672# Determine the architecture for Docker.
673arch = uname("-m").strip()
674if arch == "ppc64le":
675    docker_base = "ppc64le/"
676elif arch == "x86_64":
677    docker_base = ""
678else:
679    print(
680        f"Unsupported system architecture({arch}) found for docker image",
681        file=sys.stderr,
682    )
683    sys.exit(1)
684
685# Special flags if setting up a deb mirror.
686mirror = ""
687if "ubuntu" in distro and ubuntu_mirror:
688    mirror = f"""
689RUN echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME) main restricted universe multiverse" > /etc/apt/sources.list && \\
690    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-updates main restricted universe multiverse" >> /etc/apt/sources.list && \\
691    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-security main restricted universe multiverse" >> /etc/apt/sources.list && \\
692    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-proposed main restricted universe multiverse" >> /etc/apt/sources.list && \\
693    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-backports main restricted universe multiverse" >> /etc/apt/sources.list
694"""
695
696# Special flags for proxying.
697proxy_cmd = ""
698proxy_keyserver = ""
699proxy_args = []
700if http_proxy:
701    proxy_cmd = f"""
702RUN echo "[http]" >> {homedir}/.gitconfig && \
703    echo "proxy = {http_proxy}" >> {homedir}/.gitconfig
704"""
705    proxy_keyserver = f"--keyserver-options http-proxy={http_proxy}"
706
707    proxy_args.extend(
708        [
709            "--build-arg",
710            f"http_proxy={http_proxy}",
711            "--build-arg",
712            f"https_proxy={http_proxy}",
713        ]
714    )
715
716# Create base Dockerfile.
717dockerfile_base = f"""
718FROM {docker_base}{distro}
719
720{mirror}
721
722ENV DEBIAN_FRONTEND noninteractive
723
724ENV PYTHONPATH "/usr/local/lib/python3.8/site-packages/"
725
726# Sometimes the ubuntu key expires and we need a way to force an execution
727# of the apt-get commands for the dbgsym-keyring.  When this happens we see
728# an error like: "Release: The following signatures were invalid:"
729# Insert a bogus echo that we can change here when we get this error to force
730# the update.
731RUN echo "ubuntu keyserver rev as of 2021-04-21"
732
733# We need the keys to be imported for dbgsym repos
734# New releases have a package, older ones fall back to manual fetching
735# https://wiki.ubuntu.com/Debug%20Symbol%20Packages
736RUN apt-get update && apt-get dist-upgrade -yy && \
737    ( apt-get install gpgv ubuntu-dbgsym-keyring || \
738        ( apt-get install -yy dirmngr && \
739          apt-key adv --keyserver keyserver.ubuntu.com \
740                      {proxy_keyserver} \
741                      --recv-keys F2EDC64DC5AEE1F6B9C621F0C8CAB6595FDFF622 ) )
742
743# Parse the current repo list into a debug repo list
744RUN sed -n '/^deb /s,^deb [^ ]* ,deb http://ddebs.ubuntu.com ,p' /etc/apt/sources.list >/etc/apt/sources.list.d/debug.list
745
746# Remove non-existent debug repos
747RUN sed -i '/-\(backports\|security\) /d' /etc/apt/sources.list.d/debug.list
748
749RUN cat /etc/apt/sources.list.d/debug.list
750
751RUN apt-get update && apt-get dist-upgrade -yy && apt-get install -yy \
752    gcc-11 \
753    g++-11 \
754    libc6-dbg \
755    libc6-dev \
756    libtool \
757    bison \
758    libdbus-1-dev \
759    flex \
760    cmake \
761    python3 \
762    python3-dev\
763    python3-yaml \
764    python3-mako \
765    python3-pip \
766    python3-setuptools \
767    python3-git \
768    python3-socks \
769    pkg-config \
770    autoconf \
771    autoconf-archive \
772    libsystemd-dev \
773    systemd \
774    libssl-dev \
775    libevdev-dev \
776    libjpeg-dev \
777    libpng-dev \
778    ninja-build \
779    sudo \
780    curl \
781    git \
782    dbus \
783    iputils-ping \
784    clang-13 \
785    clang-format-13 \
786    clang-tidy-13 \
787    clang-tools-13 \
788    shellcheck \
789    npm \
790    iproute2 \
791    libnl-3-dev \
792    libnl-genl-3-dev \
793    libconfig++-dev \
794    libsnmp-dev \
795    valgrind \
796    valgrind-dbg \
797    libpam0g-dev \
798    xxd \
799    libi2c-dev \
800    wget \
801    libldap2-dev \
802    libprotobuf-dev \
803    liburing-dev \
804    liburing1-dbgsym \
805    libperlio-gzip-perl \
806    libjson-perl \
807    protobuf-compiler \
808    libgpiod-dev \
809    device-tree-compiler \
810    cppcheck \
811    libpciaccess-dev \
812    libmimetic-dev \
813    libxml2-utils \
814    libxml-simple-perl \
815    rsync \
816    libcryptsetup-dev
817
818RUN npm install -g eslint@latest eslint-plugin-json@latest
819
820RUN update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-11 11 \
821  --slave /usr/bin/g++ g++ /usr/bin/g++-11 \
822  --slave /usr/bin/gcov gcov /usr/bin/gcov-11 \
823  --slave /usr/bin/gcov-dump gcov-dump /usr/bin/gcov-dump-11 \
824  --slave /usr/bin/gcov-tool gcov-tool /usr/bin/gcov-tool-11
825
826RUN update-alternatives --install /usr/bin/clang clang /usr/bin/clang-13 1000 \
827  --slave /usr/bin/clang++ clang++ /usr/bin/clang++-13 \
828  --slave /usr/bin/clang-tidy clang-tidy /usr/bin/clang-tidy-13 \
829  --slave /usr/bin/clang-format clang-format /usr/bin/clang-format-13 \
830  --slave /usr/bin/run-clang-tidy run-clang-tidy.py /usr/bin/run-clang-tidy-13 \
831  --slave /usr/bin/scan-build scan-build /usr/bin/scan-build-13
832
833"""
834
835if is_automated_ci_build:
836    dockerfile_base += f"""
837# Run an arbitrary command to polute the docker cache regularly force us
838# to re-run `apt-get update` daily.
839RUN echo {Docker.timestamp()}
840RUN apt-get update && apt-get dist-upgrade -yy
841
842"""
843
844dockerfile_base += f"""
845RUN pip3 install inflection
846RUN pip3 install pycodestyle
847RUN pip3 install jsonschema
848RUN pip3 install meson==0.58.1
849RUN pip3 install protobuf
850RUN pip3 install codespell
851"""
852
853# Build the base and stage docker images.
854docker_base_img_name = Docker.tagname("base", dockerfile_base)
855Docker.build("base", docker_base_img_name, dockerfile_base)
856Package.generate_all()
857
858# Create the final Dockerfile.
859dockerfile = f"""
860# Build the final output image
861FROM {docker_base_img_name}
862{Package.df_all_copycmds()}
863
864# Some of our infrastructure still relies on the presence of this file
865# even though it is no longer needed to rebuild the docker environment
866# NOTE: The file is sorted to ensure the ordering is stable.
867RUN echo '{Package.depcache()}' > /tmp/depcache
868
869# Final configuration for the workspace
870RUN grep -q {gid} /etc/group || groupadd -g {gid} {username}
871RUN mkdir -p "{os.path.dirname(homedir)}"
872RUN grep -q {uid} /etc/passwd || useradd -d {homedir} -m -u {uid} -g {gid} {username}
873RUN sed -i '1iDefaults umask=000' /etc/sudoers
874RUN echo "{username} ALL=(ALL) NOPASSWD: ALL" >>/etc/sudoers
875
876# Ensure user has ability to write to /usr/local for different tool
877# and data installs
878RUN chown -R {username}:{username} /usr/local/share
879
880{proxy_cmd}
881
882RUN /bin/bash
883"""
884
885# Do the final docker build
886docker_final_img_name = Docker.tagname(None, dockerfile)
887Docker.build("final", docker_final_img_name, dockerfile)
888
889# Print the tag of the final image.
890print(docker_final_img_name)
891