1#!/usr/bin/env python3
2#
3# Build the required docker image to run package unit tests
4#
5# Script Variables:
6#   DOCKER_IMG_NAME:  <optional, the name of the docker image to generate>
7#                     default is openbmc/ubuntu-unit-test
8#   DISTRO:           <optional, the distro to build a docker image against>
9#                     default is ubuntu:hirsute
10#   FORCE_DOCKER_BUILD: <optional, a non-zero value with force all Docker
11#                     images to be rebuilt rather than reusing caches.>
12#   BUILD_URL:        <optional, used to detect running under CI context
13#                     (ex. Jenkins)>
14#   BRANCH:           <optional, branch to build from each of the openbmc/
15#                     repositories>
16#                     default is master, which will be used if input branch not
17#                     provided or not found
18#   UBUNTU_MIRROR:    <optional, the URL of a mirror of Ubuntu to override the
19#                     default ones in /etc/apt/sources.list>
20#                     default is empty, and no mirror is used.
21#   http_proxy        The HTTP address of the proxy server to connect to.
22#                     Default: "", proxy is not setup if this is not set
23
24import os
25import sys
26import threading
27from datetime import date
28from hashlib import sha256
29from sh import docker, git, nproc, uname  # type: ignore
30from typing import Any, Callable, Dict, Iterable, Optional
31
32try:
33    # Python before 3.8 doesn't have TypedDict, so reroute to standard 'dict'.
34    from typing import TypedDict
35except:
36
37    class TypedDict(dict):  # type: ignore
38        # We need to do this to eat the 'total' argument.
39        def __init_subclass__(cls, **kwargs):
40            super().__init_subclass__()
41
42
43# Declare some variables used in package definitions.
44prefix = "/usr/local"
45proc_count = nproc().strip()
46
47
48class PackageDef(TypedDict, total=False):
49    """ Package Definition for packages dictionary. """
50
51    # rev [optional]: Revision of package to use.
52    rev: str
53    # url [optional]: lambda function to create URL: (package, rev) -> url.
54    url: Callable[[str, str], str]
55    # depends [optional]: List of package dependencies.
56    depends: Iterable[str]
57    # build_type [required]: Build type used for package.
58    #   Currently supported: autoconf, cmake, custom, make, meson
59    build_type: str
60    # build_steps [optional]: Steps to run for 'custom' build_type.
61    build_steps: Iterable[str]
62    # config_flags [optional]: List of options to pass configuration tool.
63    config_flags: Iterable[str]
64    # config_env [optional]: List of environment variables to set for config.
65    config_env: Iterable[str]
66    # custom_post_dl [optional]: List of steps to run after download, but
67    #   before config / build / install.
68    custom_post_dl: Iterable[str]
69    # custom_post_install [optional]: List of steps to run after install.
70    custom_post_install: Iterable[str]
71
72    # __tag [private]: Generated Docker tag name for package stage.
73    __tag: str
74    # __package [private]: Package object associated with this package.
75    __package: Any  # Type is Package, but not defined yet.
76
77
78# Packages to include in image.
79packages = {
80    "boost": PackageDef(
81        rev="1.76.0",
82        url=(
83            lambda pkg, rev: f"https://downloads.yoctoproject.org/mirror/sources/{pkg}_{rev.replace('.', '_')}.tar.bz2"
84        ),
85        build_type="custom",
86        build_steps=[
87            f"./bootstrap.sh --prefix={prefix} --with-libraries=context,coroutine",
88            "./b2",
89            f"./b2 install --prefix={prefix}",
90        ],
91    ),
92    "USCiLab/cereal": PackageDef(
93        rev="3e4d1b84cab4891368d2179a61a7ba06a5693e7f",
94        build_type="custom",
95        build_steps=[f"cp -a include/cereal/ {prefix}/include/"],
96    ),
97    "catchorg/Catch2": PackageDef(
98        rev="v2.13.6",
99        build_type="cmake",
100        config_flags=["-DBUILD_TESTING=OFF", "-DCATCH_INSTALL_DOCS=OFF"],
101    ),
102    "CLIUtils/CLI11": PackageDef(
103        rev="v1.9.1",
104        build_type="cmake",
105        config_flags=[
106            "-DBUILD_TESTING=OFF",
107            "-DCLI11_BUILD_DOCS=OFF",
108            "-DCLI11_BUILD_EXAMPLES=OFF",
109        ],
110    ),
111    "fmtlib/fmt": PackageDef(
112        rev="7.1.3",
113        build_type="cmake",
114        config_flags=[
115            "-DFMT_DOC=OFF",
116            "-DFMT_TEST=OFF",
117        ],
118    ),
119    "Naios/function2": PackageDef(
120        rev="4.1.0",
121        build_type="custom",
122        build_steps=[
123            f"mkdir {prefix}/include/function2",
124            f"cp include/function2/function2.hpp {prefix}/include/function2/",
125        ],
126    ),
127    # Snapshot from 2021-05-13
128    "google/googletest": PackageDef(
129        rev="662fe38e44900c007eccb65a5d2ea19df7bd520e",
130        build_type="cmake",
131        config_env=["CXXFLAGS=-std=c++20"],
132        config_flags=["-DTHREADS_PREFER_PTHREAD_FLAG=ON"],
133    ),
134    # Release 2020-08-06
135    "nlohmann/json": PackageDef(
136        rev="v3.9.1",
137        build_type="cmake",
138        config_flags=["-DJSON_BuildTests=OFF"],
139        custom_post_install=[
140            f"ln -s {prefix}/include/nlohmann/json.hpp {prefix}/include/json.hpp",
141        ],
142    ),
143    # Snapshot from 2019-05-24
144    "linux-test-project/lcov": PackageDef(
145        rev="v1.15",
146        build_type="make",
147    ),
148    # dev-5.8 2021-01-11
149    "openbmc/linux": PackageDef(
150        rev="3cc95ae40716e56f81b69615781f54c78079042d",
151        build_type="custom",
152        build_steps=[
153            f"make -j{proc_count} defconfig",
154            f"make INSTALL_HDR_PATH={prefix} headers_install",
155        ],
156    ),
157    # Snapshot from 2020-06-13
158    "LibVNC/libvncserver": PackageDef(
159        rev="LibVNCServer-0.9.13",
160        build_type="cmake",
161    ),
162    "martinmoene/span-lite": PackageDef(
163        rev="v0.9.2",
164        build_type="cmake",
165        config_flags=[
166            "-DSPAN_LITE_OPT_BUILD_TESTS=OFF",
167        ],
168    ),
169    # version from meta-openembedded/meta-oe/recipes-support/libtinyxml2/libtinyxml2_8.0.0.bb
170    "leethomason/tinyxml2": PackageDef(
171        rev="8.0.0",
172        build_type="cmake",
173    ),
174    # version from /meta-openembedded/meta-oe/recipes-devtools/boost-url/boost-url_git.bb
175    "CPPAlliance/url": PackageDef(
176        rev="a56ae0df6d3078319755fbaa67822b4fa7fd352b",
177        build_type="cmake",
178        config_flags=[
179            "-DBOOST_URL_BUILD_EXAMPLES=OFF",
180            "-DBOOST_URL_BUILD_TESTS=OFF",
181            "-DBOOST_URL_STANDALONE=ON",
182        ],
183    ),
184    # version from ./meta-openembedded/meta-oe/dynamic-layers/networking-layer/recipes-devtools/valijson/valijson_0.3.bb
185    # Snapshot from 2020-12-02 - fix for curlpp dependency
186    "tristanpenman/valijson": PackageDef(
187        rev="8cc83c8be9c1c927f5da952b2333b30e5f0353be",
188        build_type="cmake",
189        config_flags=[
190            "-Dvalijson_BUILD_TESTS=0",
191            "-Dvalijson_INSTALL_HEADERS=1",
192        ],
193    ),
194    # version from meta-openembedded/meta-oe/recipes-devtools/nlohmann-fifo/nlohmann-fifo_git.bb
195    "nlohmann/fifo_map": PackageDef(
196        rev="0dfbf5dacbb15a32c43f912a7e66a54aae39d0f9",
197        build_type="custom",
198        build_steps=[f"cp src/fifo_map.hpp {prefix}/include/"],
199    ),
200    "open-power/pdbg": PackageDef(build_type="autoconf"),
201    "openbmc/gpioplus": PackageDef(
202        depends=["openbmc/stdplus"],
203        build_type="meson",
204        config_flags=[
205            "-Dexamples=false",
206            "-Dtests=disabled",
207        ],
208    ),
209    "openbmc/phosphor-dbus-interfaces": PackageDef(
210        depends=["openbmc/sdbusplus"],
211        build_type="meson",
212        config_flags=[
213            "-Ddata_com_ibm=true",
214            "-Ddata_org_open_power=true",
215        ],
216    ),
217    "openbmc/phosphor-logging": PackageDef(
218        depends=[
219            "USCiLab/cereal",
220            "nlohmann/fifo_map",
221            "openbmc/phosphor-dbus-interfaces",
222            "openbmc/sdbusplus",
223            "openbmc/sdeventplus",
224        ],
225        build_type="meson",
226        config_flags=[
227            f"-Dyaml_dir={prefix}/share/phosphor-dbus-yaml/yaml",
228        ],
229    ),
230    "openbmc/phosphor-objmgr": PackageDef(
231        depends=[
232            "boost",
233            "leethomason/tinyxml2",
234            "openbmc/phosphor-logging",
235            "openbmc/sdbusplus",
236        ],
237        build_type="autoconf",
238    ),
239    "openbmc/pldm": PackageDef(
240        depends=[
241            "CLIUtils/CLI11",
242            "boost",
243            "nlohmann/json",
244            "openbmc/phosphor-dbus-interfaces",
245            "openbmc/phosphor-logging",
246            "openbmc/sdbusplus",
247            "openbmc/sdeventplus",
248        ],
249        build_type="meson",
250        config_flags=[
251            "-Dlibpldm-only=enabled",
252            "-Doem-ibm=enabled",
253            "-Dtests=disabled",
254        ],
255    ),
256    "openbmc/sdbusplus": PackageDef(
257        build_type="meson",
258        custom_post_dl=[
259            "cd tools",
260            f"./setup.py install --root=/ --prefix={prefix}",
261            "cd ..",
262        ],
263        config_flags=[
264            "-Dexamples=disabled",
265            "-Dtests=disabled",
266        ],
267    ),
268    "openbmc/sdeventplus": PackageDef(
269        depends=["Naios/function2", "openbmc/stdplus"],
270        build_type="meson",
271        config_flags=[
272            "-Dexamples=false",
273            "-Dtests=disabled",
274        ],
275    ),
276    "openbmc/stdplus": PackageDef(
277        depends=["fmtlib/fmt", "martinmoene/span-lite"],
278        build_type="meson",
279        config_flags=[
280            "-Dexamples=false",
281            "-Dtests=disabled",
282        ],
283    ),
284}  # type: Dict[str, PackageDef]
285
286# Define common flags used for builds
287configure_flags = " ".join(
288    [
289        f"--prefix={prefix}",
290    ]
291)
292cmake_flags = " ".join(
293    [
294        "-DBUILD_SHARED_LIBS=ON",
295        "-DCMAKE_BUILD_TYPE=RelWithDebInfo",
296        f"-DCMAKE_INSTALL_PREFIX:PATH={prefix}",
297        "-GNinja",
298        "-DCMAKE_MAKE_PROGRAM=ninja",
299    ]
300)
301meson_flags = " ".join(
302    [
303        "--wrap-mode=nodownload",
304        f"-Dprefix={prefix}",
305    ]
306)
307
308
309class Package(threading.Thread):
310    """Class used to build the Docker stages for each package.
311
312    Generally, this class should not be instantiated directly but through
313    Package.generate_all().
314    """
315
316    # Copy the packages dictionary.
317    packages = packages.copy()
318
319    # Lock used for thread-safety.
320    lock = threading.Lock()
321
322    def __init__(self, pkg: str):
323        """ pkg - The name of this package (ex. foo/bar ) """
324        super(Package, self).__init__()
325
326        self.package = pkg
327        self.exception = None  # type: Optional[Exception]
328
329        # Reference to this package's
330        self.pkg_def = Package.packages[pkg]
331        self.pkg_def["__package"] = self
332
333    def run(self) -> None:
334        """ Thread 'run' function.  Builds the Docker stage. """
335
336        # In case this package has no rev, fetch it from Github.
337        self._update_rev()
338
339        # Find all the Package objects that this package depends on.
340        #   This section is locked because we are looking into another
341        #   package's PackageDef dict, which could be being modified.
342        Package.lock.acquire()
343        deps: Iterable[Package] = [
344            Package.packages[deppkg]["__package"]
345            for deppkg in self.pkg_def.get("depends", [])
346        ]
347        Package.lock.release()
348
349        # Wait until all the depends finish building.  We need them complete
350        # for the "COPY" commands.
351        for deppkg in deps:
352            deppkg.join()
353
354        # Generate this package's Dockerfile.
355        dockerfile = f"""
356FROM {docker_base_img_name}
357{self._df_copycmds()}
358{self._df_build()}
359"""
360
361        # Generate the resulting tag name and save it to the PackageDef.
362        #   This section is locked because we are modifying the PackageDef,
363        #   which can be accessed by other threads.
364        Package.lock.acquire()
365        tag = Docker.tagname(self._stagename(), dockerfile)
366        self.pkg_def["__tag"] = tag
367        Package.lock.release()
368
369        # Do the build / save any exceptions.
370        try:
371            Docker.build(self.package, tag, dockerfile)
372        except Exception as e:
373            self.exception = e
374
375    @classmethod
376    def generate_all(cls) -> None:
377        """Ensure a Docker stage is created for all defined packages.
378
379        These are done in parallel but with appropriate blocking per
380        package 'depends' specifications.
381        """
382
383        # Create a Package for each defined package.
384        pkg_threads = [Package(p) for p in cls.packages.keys()]
385
386        # Start building them all.
387        #   This section is locked because threads depend on each other,
388        #   based on the packages, and they cannot 'join' on a thread
389        #   which is not yet started.  Adding a lock here allows all the
390        #   threads to start before they 'join' their dependencies.
391        Package.lock.acquire()
392        for t in pkg_threads:
393            t.start()
394        Package.lock.release()
395
396        # Wait for completion.
397        for t in pkg_threads:
398            t.join()
399            # Check if the thread saved off its own exception.
400            if t.exception:
401                print(f"Package {t.package} failed!", file=sys.stderr)
402                raise t.exception
403
404    @staticmethod
405    def df_all_copycmds() -> str:
406        """Formulate the Dockerfile snippet necessary to copy all packages
407        into the final image.
408        """
409        return Package.df_copycmds_set(Package.packages.keys())
410
411    @classmethod
412    def depcache(cls) -> str:
413        """Create the contents of the '/tmp/depcache'.
414        This file is a comma-separated list of "<pkg>:<rev>".
415        """
416
417        # This needs to be sorted for consistency.
418        depcache = ""
419        for pkg in sorted(cls.packages.keys()):
420            depcache += "%s:%s," % (pkg, cls.packages[pkg]["rev"])
421        return depcache
422
423    def _update_rev(self) -> None:
424        """ Look up the HEAD for missing a static rev. """
425
426        if "rev" in self.pkg_def:
427            return
428
429        # Check if Jenkins/Gerrit gave us a revision and use it.
430        if gerrit_project == self.package and gerrit_rev:
431            print(
432                f"Found Gerrit revision for {self.package}: {gerrit_rev}",
433                file=sys.stderr,
434            )
435            self.pkg_def["rev"] = gerrit_rev
436            return
437
438        # Ask Github for all the branches.
439        lookup = git("ls-remote", "--heads", f"https://github.com/{self.package}")
440
441        # Find the branch matching {branch} (or fallback to master).
442        #   This section is locked because we are modifying the PackageDef.
443        Package.lock.acquire()
444        for line in lookup.split("\n"):
445            if f"refs/heads/{branch}" in line:
446                self.pkg_def["rev"] = line.split()[0]
447            elif f"refs/heads/master" in line and "rev" not in self.pkg_def:
448                self.pkg_def["rev"] = line.split()[0]
449        Package.lock.release()
450
451    def _stagename(self) -> str:
452        """ Create a name for the Docker stage associated with this pkg. """
453        return self.package.replace("/", "-").lower()
454
455    def _url(self) -> str:
456        """ Get the URL for this package. """
457        rev = self.pkg_def["rev"]
458
459        # If the lambda exists, call it.
460        if "url" in self.pkg_def:
461            return self.pkg_def["url"](self.package, rev)
462
463        # Default to the github archive URL.
464        return f"https://github.com/{self.package}/archive/{rev}.tar.gz"
465
466    def _cmd_download(self) -> str:
467        """Formulate the command necessary to download and unpack to source."""
468
469        url = self._url()
470        if ".tar." not in url:
471            raise NotImplementedError(
472                f"Unhandled download type for {self.package}: {url}"
473            )
474
475        cmd = f"curl -L {url} | tar -x"
476
477        if url.endswith(".bz2"):
478            cmd += "j"
479        elif url.endswith(".gz"):
480            cmd += "z"
481        else:
482            raise NotImplementedError(
483                f"Unknown tar flags needed for {self.package}: {url}"
484            )
485
486        return cmd
487
488    def _cmd_cd_srcdir(self) -> str:
489        """ Formulate the command necessary to 'cd' into the source dir. """
490        return f"cd {self.package.split('/')[-1]}*"
491
492    def _df_copycmds(self) -> str:
493        """ Formulate the dockerfile snippet necessary to COPY all depends. """
494
495        if "depends" not in self.pkg_def:
496            return ""
497        return Package.df_copycmds_set(self.pkg_def["depends"])
498
499    @staticmethod
500    def df_copycmds_set(pkgs: Iterable[str]) -> str:
501        """Formulate the Dockerfile snippet necessary to COPY a set of
502        packages into a Docker stage.
503        """
504
505        copy_cmds = ""
506
507        # Sort the packages for consistency.
508        for p in sorted(pkgs):
509            tag = Package.packages[p]["__tag"]
510            copy_cmds += f"COPY --from={tag} {prefix} {prefix}\n"
511            # Workaround for upstream docker bug and multiple COPY cmds
512            # https://github.com/moby/moby/issues/37965
513            copy_cmds += "RUN true\n"
514
515        return copy_cmds
516
517    def _df_build(self) -> str:
518        """Formulate the Dockerfile snippet necessary to download, build, and
519        install a package into a Docker stage.
520        """
521
522        # Download and extract source.
523        result = f"RUN {self._cmd_download()} && {self._cmd_cd_srcdir()} && "
524
525        # Handle 'custom_post_dl' commands.
526        custom_post_dl = self.pkg_def.get("custom_post_dl")
527        if custom_post_dl:
528            result += " && ".join(custom_post_dl) + " && "
529
530        # Build and install package based on 'build_type'.
531        build_type = self.pkg_def["build_type"]
532        if build_type == "autoconf":
533            result += self._cmd_build_autoconf()
534        elif build_type == "cmake":
535            result += self._cmd_build_cmake()
536        elif build_type == "custom":
537            result += self._cmd_build_custom()
538        elif build_type == "make":
539            result += self._cmd_build_make()
540        elif build_type == "meson":
541            result += self._cmd_build_meson()
542        else:
543            raise NotImplementedError(
544                f"Unhandled build type for {self.package}: {build_type}"
545            )
546
547        # Handle 'custom_post_install' commands.
548        custom_post_install = self.pkg_def.get("custom_post_install")
549        if custom_post_install:
550            result += " && " + " && ".join(custom_post_install)
551
552        return result
553
554    def _cmd_build_autoconf(self) -> str:
555        options = " ".join(self.pkg_def.get("config_flags", []))
556        env = " ".join(self.pkg_def.get("config_env", []))
557        result = "./bootstrap.sh && "
558        result += f"{env} ./configure {configure_flags} {options} && "
559        result += f"make -j{proc_count} && make install"
560        return result
561
562    def _cmd_build_cmake(self) -> str:
563        options = " ".join(self.pkg_def.get("config_flags", []))
564        env = " ".join(self.pkg_def.get("config_env", []))
565        result = "mkdir builddir && cd builddir && "
566        result += f"{env} cmake {cmake_flags} {options} .. && "
567        result += "cmake --build . --target all && "
568        result += "cmake --build . --target install && "
569        result += "cd .."
570        return result
571
572    def _cmd_build_custom(self) -> str:
573        return " && ".join(self.pkg_def.get("build_steps", []))
574
575    def _cmd_build_make(self) -> str:
576        return f"make -j{proc_count} && make install"
577
578    def _cmd_build_meson(self) -> str:
579        options = " ".join(self.pkg_def.get("config_flags", []))
580        env = " ".join(self.pkg_def.get("config_env", []))
581        result = f"{env} meson builddir {meson_flags} {options} && "
582        result += "ninja -C builddir && ninja -C builddir install"
583        return result
584
585
586class Docker:
587    """Class to assist with Docker interactions.  All methods are static."""
588
589    @staticmethod
590    def timestamp() -> str:
591        """ Generate a timestamp for today using the ISO week. """
592        today = date.today().isocalendar()
593        return f"{today[0]}-W{today[1]:02}"
594
595    @staticmethod
596    def tagname(pkgname: str, dockerfile: str) -> str:
597        """ Generate a tag name for a package using a hash of the Dockerfile. """
598        result = docker_image_name
599        if pkgname:
600            result += "-" + pkgname
601
602        result += ":" + Docker.timestamp()
603        result += "-" + sha256(dockerfile.encode()).hexdigest()[0:16]
604
605        return result
606
607    @staticmethod
608    def build(pkg: str, tag: str, dockerfile: str) -> None:
609        """Build a docker image using the Dockerfile and tagging it with 'tag'."""
610
611        # If we're not forcing builds, check if it already exists and skip.
612        if not force_build:
613            if docker.image.ls(tag, "--format", '"{{.Repository}}:{{.Tag}}"'):
614                print(f"Image {tag} already exists.  Skipping.", file=sys.stderr)
615                return
616
617        # Build it.
618        #   Capture the output of the 'docker build' command and send it to
619        #   stderr (prefixed with the package name).  This allows us to see
620        #   progress but not polute stdout.  Later on we output the final
621        #   docker tag to stdout and we want to keep that pristine.
622        #
623        #   Other unusual flags:
624        #       --no-cache: Bypass the Docker cache if 'force_build'.
625        #       --force-rm: Clean up Docker processes if they fail.
626        docker.build(
627            proxy_args,
628            "--network=host",
629            "--force-rm",
630            "--no-cache=true" if force_build else "--no-cache=false",
631            "-t",
632            tag,
633            "-",
634            _in=dockerfile,
635            _out=(
636                lambda line: print(
637                    pkg + ":", line, end="", file=sys.stderr, flush=True
638                )
639            ),
640        )
641
642
643# Read a bunch of environment variables.
644docker_image_name = os.environ.get("DOCKER_IMAGE_NAME", "openbmc/ubuntu-unit-test")
645force_build = os.environ.get("FORCE_DOCKER_BUILD")
646is_automated_ci_build = os.environ.get("BUILD_URL", False)
647distro = os.environ.get("DISTRO", "ubuntu:hirsute")
648branch = os.environ.get("BRANCH", "master")
649ubuntu_mirror = os.environ.get("UBUNTU_MIRROR")
650http_proxy = os.environ.get("http_proxy")
651
652gerrit_project = os.environ.get("GERRIT_PROJECT")
653gerrit_rev = os.environ.get("GERRIT_PATCHSET_REVISION")
654
655# Set up some common variables.
656username = os.environ.get("USER", "root")
657homedir = os.environ.get("HOME", "/root")
658gid = os.getgid()
659uid = os.getuid()
660
661# Determine the architecture for Docker.
662arch = uname("-m").strip()
663if arch == "ppc64le":
664    docker_base = "ppc64le/"
665elif arch == "x86_64":
666    docker_base = ""
667else:
668    print(
669        f"Unsupported system architecture({arch}) found for docker image",
670        file=sys.stderr,
671    )
672    sys.exit(1)
673
674# Special flags if setting up a deb mirror.
675mirror = ""
676if "ubuntu" in distro and ubuntu_mirror:
677    mirror = f"""
678RUN echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME) main restricted universe multiverse" > /etc/apt/sources.list && \\
679    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-updates main restricted universe multiverse" >> /etc/apt/sources.list && \\
680    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-security main restricted universe multiverse" >> /etc/apt/sources.list && \\
681    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-proposed main restricted universe multiverse" >> /etc/apt/sources.list && \\
682    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-backports main restricted universe multiverse" >> /etc/apt/sources.list
683"""
684
685# Special flags for proxying.
686proxy_cmd = ""
687proxy_keyserver = ""
688proxy_args = []
689if http_proxy:
690    proxy_cmd = f"""
691RUN echo "[http]" >> {homedir}/.gitconfig && \
692    echo "proxy = {http_proxy}" >> {homedir}/.gitconfig
693"""
694    proxy_keyserver = f"--keyserver-options http-proxy={http_proxy}"
695
696    proxy_args.extend(
697        [
698            "--build-arg",
699            f"http_proxy={http_proxy}",
700            "--build-arg",
701            f"https_proxy={http_proxy}",
702        ]
703    )
704
705# Create base Dockerfile.
706dockerfile_base = f"""
707FROM {docker_base}{distro}
708
709{mirror}
710
711ENV DEBIAN_FRONTEND noninteractive
712
713ENV PYTHONPATH "/usr/local/lib/python3.8/site-packages/"
714
715# Sometimes the ubuntu key expires and we need a way to force an execution
716# of the apt-get commands for the dbgsym-keyring.  When this happens we see
717# an error like: "Release: The following signatures were invalid:"
718# Insert a bogus echo that we can change here when we get this error to force
719# the update.
720RUN echo "ubuntu keyserver rev as of 2021-04-21"
721
722# We need the keys to be imported for dbgsym repos
723# New releases have a package, older ones fall back to manual fetching
724# https://wiki.ubuntu.com/Debug%20Symbol%20Packages
725RUN apt-get update && apt-get dist-upgrade -yy && \
726    ( apt-get install gpgv ubuntu-dbgsym-keyring || \
727        ( apt-get install -yy dirmngr && \
728          apt-key adv --keyserver keyserver.ubuntu.com \
729                      {proxy_keyserver} \
730                      --recv-keys F2EDC64DC5AEE1F6B9C621F0C8CAB6595FDFF622 ) )
731
732# Parse the current repo list into a debug repo list
733RUN sed -n '/^deb /s,^deb [^ ]* ,deb http://ddebs.ubuntu.com ,p' /etc/apt/sources.list >/etc/apt/sources.list.d/debug.list
734
735# Remove non-existent debug repos
736RUN sed -i '/-\(backports\|security\) /d' /etc/apt/sources.list.d/debug.list
737
738RUN cat /etc/apt/sources.list.d/debug.list
739
740RUN apt-get update && apt-get dist-upgrade -yy && apt-get install -yy \
741    gcc-11 \
742    g++-11 \
743    libc6-dbg \
744    libc6-dev \
745    libtool \
746    bison \
747    libdbus-1-dev \
748    flex \
749    cmake \
750    python3 \
751    python3-dev\
752    python3-yaml \
753    python3-mako \
754    python3-pip \
755    python3-setuptools \
756    python3-git \
757    python3-socks \
758    pkg-config \
759    autoconf \
760    autoconf-archive \
761    libsystemd-dev \
762    systemd \
763    libssl-dev \
764    libevdev-dev \
765    libjpeg-dev \
766    libpng-dev \
767    ninja-build \
768    sudo \
769    curl \
770    git \
771    dbus \
772    iputils-ping \
773    clang-11 \
774    clang-format-11 \
775    clang-tidy-11 \
776    clang-tools-11 \
777    shellcheck \
778    npm \
779    iproute2 \
780    libnl-3-dev \
781    libnl-genl-3-dev \
782    libconfig++-dev \
783    libsnmp-dev \
784    valgrind \
785    valgrind-dbg \
786    libpam0g-dev \
787    xxd \
788    libi2c-dev \
789    wget \
790    libldap2-dev \
791    libprotobuf-dev \
792    liburing-dev \
793    liburing1-dbgsym \
794    libperlio-gzip-perl \
795    libjson-perl \
796    protobuf-compiler \
797    libgpiod-dev \
798    device-tree-compiler \
799    cppcheck \
800    libpciaccess-dev \
801    libmimetic-dev \
802    libxml2-utils \
803    libxml-simple-perl \
804    rsync
805
806RUN update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-11 11 \
807  --slave /usr/bin/g++ g++ /usr/bin/g++-11 \
808  --slave /usr/bin/gcov gcov /usr/bin/gcov-11 \
809  --slave /usr/bin/gcov-dump gcov-dump /usr/bin/gcov-dump-11 \
810  --slave /usr/bin/gcov-tool gcov-tool /usr/bin/gcov-tool-11
811
812RUN update-alternatives --install /usr/bin/clang clang /usr/bin/clang-11 1000 \
813  --slave /usr/bin/clang++ clang++ /usr/bin/clang++-11 \
814  --slave /usr/bin/clang-tidy clang-tidy /usr/bin/clang-tidy-11 \
815  --slave /usr/bin/clang-format clang-format /usr/bin/clang-format-11 \
816  --slave /usr/bin/run-clang-tidy.py run-clang-tidy.py /usr/bin/run-clang-tidy-11.py \
817  --slave /usr/bin/scan-build scan-build /usr/bin/scan-build-11
818
819"""
820
821if is_automated_ci_build:
822    dockerfile_base += f"""
823# Run an arbitrary command to polute the docker cache regularly force us
824# to re-run `apt-get update` daily.
825RUN echo {Docker.timestamp()}
826RUN apt-get update && apt-get dist-upgrade -yy
827
828"""
829
830dockerfile_base += f"""
831RUN pip3 install inflection
832RUN pip3 install pycodestyle
833RUN pip3 install jsonschema
834RUN pip3 install meson==0.58.0
835RUN pip3 install protobuf
836RUN pip3 install codespell
837"""
838
839# Build the base and stage docker images.
840docker_base_img_name = Docker.tagname("base", dockerfile_base)
841Docker.build("base", docker_base_img_name, dockerfile_base)
842Package.generate_all()
843
844# Create the final Dockerfile.
845dockerfile = f"""
846# Build the final output image
847FROM {docker_base_img_name}
848{Package.df_all_copycmds()}
849
850# Some of our infrastructure still relies on the presence of this file
851# even though it is no longer needed to rebuild the docker environment
852# NOTE: The file is sorted to ensure the ordering is stable.
853RUN echo '{Package.depcache()}' > /tmp/depcache
854
855# Final configuration for the workspace
856RUN grep -q {gid} /etc/group || groupadd -g {gid} {username}
857RUN mkdir -p "{os.path.dirname(homedir)}"
858RUN grep -q {uid} /etc/passwd || useradd -d {homedir} -m -u {uid} -g {gid} {username}
859RUN sed -i '1iDefaults umask=000' /etc/sudoers
860RUN echo "{username} ALL=(ALL) NOPASSWD: ALL" >>/etc/sudoers
861
862# Ensure user has ability to write to /usr/local for different tool
863# and data installs
864RUN chown -R {username}:{username} /usr/local/share
865
866{proxy_cmd}
867
868RUN /bin/bash
869"""
870
871# Do the final docker build
872docker_final_img_name = Docker.tagname(None, dockerfile)
873Docker.build("final", docker_final_img_name, dockerfile)
874
875# Print the tag of the final image.
876print(docker_final_img_name)
877