1#!/usr/bin/env python3
2#
3# Build the required docker image to run package unit tests
4#
5# Script Variables:
6#   DOCKER_IMG_NAME:  <optional, the name of the docker image to generate>
7#                     default is openbmc/ubuntu-unit-test
8#   DISTRO:           <optional, the distro to build a docker image against>
9#                     default is ubuntu:hirsute
10#   FORCE_DOCKER_BUILD: <optional, a non-zero value with force all Docker
11#                     images to be rebuilt rather than reusing caches.>
12#   BUILD_URL:        <optional, used to detect running under CI context
13#                     (ex. Jenkins)>
14#   BRANCH:           <optional, branch to build from each of the openbmc/
15#                     repositories>
16#                     default is master, which will be used if input branch not
17#                     provided or not found
18#   UBUNTU_MIRROR:    <optional, the URL of a mirror of Ubuntu to override the
19#                     default ones in /etc/apt/sources.list>
20#                     default is empty, and no mirror is used.
21#   http_proxy        The HTTP address of the proxy server to connect to.
22#                     Default: "", proxy is not setup if this is not set
23
24import os
25import sys
26import threading
27from datetime import date
28from hashlib import sha256
29from sh import docker, git, nproc, uname  # type: ignore
30from typing import Any, Callable, Dict, Iterable, Optional
31
32try:
33    # Python before 3.8 doesn't have TypedDict, so reroute to standard 'dict'.
34    from typing import TypedDict
35except:
36
37    class TypedDict(dict):  # type: ignore
38        # We need to do this to eat the 'total' argument.
39        def __init_subclass__(cls, **kwargs):
40            super().__init_subclass__()
41
42
43# Declare some variables used in package definitions.
44prefix = "/usr/local"
45proc_count = nproc().strip()
46
47
48class PackageDef(TypedDict, total=False):
49    """ Package Definition for packages dictionary. """
50
51    # rev [optional]: Revision of package to use.
52    rev: str
53    # url [optional]: lambda function to create URL: (package, rev) -> url.
54    url: Callable[[str, str], str]
55    # depends [optional]: List of package dependencies.
56    depends: Iterable[str]
57    # build_type [required]: Build type used for package.
58    #   Currently supported: autoconf, cmake, custom, make, meson
59    build_type: str
60    # build_steps [optional]: Steps to run for 'custom' build_type.
61    build_steps: Iterable[str]
62    # config_flags [optional]: List of options to pass configuration tool.
63    config_flags: Iterable[str]
64    # config_env [optional]: List of environment variables to set for config.
65    config_env: Iterable[str]
66    # custom_post_dl [optional]: List of steps to run after download, but
67    #   before config / build / install.
68    custom_post_dl: Iterable[str]
69    # custom_post_install [optional]: List of steps to run after install.
70    custom_post_install: Iterable[str]
71
72    # __tag [private]: Generated Docker tag name for package stage.
73    __tag: str
74    # __package [private]: Package object associated with this package.
75    __package: Any  # Type is Package, but not defined yet.
76
77
78# Packages to include in image.
79packages = {
80    "boost": PackageDef(
81        rev="1.75.0",
82        url=(
83            lambda pkg, rev: f"https://downloads.yoctoproject.org/mirror/sources/{pkg}_{rev.replace('.', '_')}.tar.bz2"
84        ),
85        build_type="custom",
86        build_steps=[
87            f"./bootstrap.sh --prefix={prefix} --with-libraries=context,coroutine",
88            "./b2",
89            f"./b2 install --prefix={prefix}",
90        ],
91    ),
92    "USCiLab/cereal": PackageDef(
93        rev="3e4d1b84cab4891368d2179a61a7ba06a5693e7f",
94        build_type="custom",
95        build_steps=[f"cp -a include/cereal/ {prefix}/include/"],
96    ),
97    "catchorg/Catch2": PackageDef(
98        rev="v2.12.2",
99        build_type="cmake",
100        config_flags=["-DBUILD_TESTING=OFF", "-DCATCH_INSTALL_DOCS=OFF"],
101    ),
102    "CLIUtils/CLI11": PackageDef(
103        rev="v1.9.1",
104        build_type="cmake",
105        config_flags=[
106            "-DBUILD_TESTING=OFF",
107            "-DCLI11_BUILD_DOCS=OFF",
108            "-DCLI11_BUILD_EXAMPLES=OFF",
109        ],
110    ),
111    "fmtlib/fmt": PackageDef(
112        rev="7.1.3",
113        build_type="cmake",
114        config_flags=[
115            "-DFMT_DOC=OFF",
116            "-DFMT_TEST=OFF",
117        ],
118    ),
119    # Snapshot from 2020-01-03
120    "Naios/function2": PackageDef(
121        rev="3a0746bf5f601dfed05330aefcb6854354fce07d",
122        build_type="custom",
123        build_steps=[
124            f"mkdir {prefix}/include/function2",
125            f"cp include/function2/function2.hpp {prefix}/include/function2/",
126        ],
127    ),
128    "google/googletest": PackageDef(
129        rev="release-1.10.0",
130        build_type="cmake",
131        config_env=["CXXFLAGS=-std=c++17"],
132        config_flags=["-DTHREADS_PREFER_PTHREAD_FLAG=ON"],
133    ),
134    # Release 2020-08-06
135    "nlohmann/json": PackageDef(
136        rev="v3.9.1",
137        build_type="cmake",
138        config_flags=["-DJSON_BuildTests=OFF"],
139        custom_post_install=[
140            f"ln -s {prefix}/include/nlohmann/json.hpp {prefix}/include/json.hpp",
141        ],
142    ),
143    # Snapshot from 2019-05-24
144    "linux-test-project/lcov": PackageDef(
145        rev="v1.15",
146        build_type="make",
147    ),
148    # dev-5.8 2021-01-11
149    "openbmc/linux": PackageDef(
150        rev="3cc95ae40716e56f81b69615781f54c78079042d",
151        build_type="custom",
152        build_steps=[
153            f"make -j{proc_count} defconfig",
154            f"make INSTALL_HDR_PATH={prefix} headers_install",
155        ],
156    ),
157    # Snapshot from 2020-06-13
158    "LibVNC/libvncserver": PackageDef(
159        rev="LibVNCServer-0.9.13",
160        build_type="cmake",
161    ),
162    "martinmoene/span-lite": PackageDef(
163        rev="v0.8.1",
164        build_type="cmake",
165        config_flags=[
166            "-DSPAN_LITE_OPT_BUILD_TESTS=OFF",
167        ],
168    ),
169    # version from meta-openembedded/meta-oe/recipes-support/libtinyxml2/libtinyxml2_8.0.0.bb
170    "leethomason/tinyxml2": PackageDef(
171        rev="8.0.0",
172        build_type="cmake",
173    ),
174    # version from /meta-openembedded/meta-oe/recipes-devtools/boost-url/boost-url_git.bb
175    "CPPAlliance/url": PackageDef(
176        rev="a56ae0df6d3078319755fbaa67822b4fa7fd352b",
177        build_type="cmake",
178        config_flags=[
179            "-DBOOST_URL_BUILD_EXAMPLES=OFF",
180            "-DBOOST_URL_BUILD_TESTS=OFF",
181            "-DBOOST_URL_STANDALONE=ON",
182        ],
183    ),
184    # version from ./meta-openembedded/meta-oe/dynamic-layers/networking-layer/recipes-devtools/valijson/valijson_0.3.bb
185    # Snapshot from 2020-12-02 - fix for curlpp dependency
186    "tristanpenman/valijson": PackageDef(
187        rev="8cc83c8be9c1c927f5da952b2333b30e5f0353be",
188        build_type="cmake",
189        config_flags=[
190            "-Dvalijson_BUILD_TESTS=0",
191            "-Dvalijson_INSTALL_HEADERS=1",
192        ],
193    ),
194    # version from meta-openembedded/meta-oe/recipes-devtools/nlohmann-fifo/nlohmann-fifo_git.bb
195    "nlohmann/fifo_map": PackageDef(
196        rev="0dfbf5dacbb15a32c43f912a7e66a54aae39d0f9",
197        build_type="custom",
198        build_steps=[f"cp src/fifo_map.hpp {prefix}/include/"],
199    ),
200    "open-power/pdbg": PackageDef(build_type="autoconf"),
201    "openbmc/gpioplus": PackageDef(
202        depends=["openbmc/stdplus"],
203        build_type="meson",
204        config_flags=[
205            "-Dexamples=false",
206            "-Dtests=disabled",
207        ],
208    ),
209    "openbmc/phosphor-dbus-interfaces": PackageDef(
210        depends=["openbmc/sdbusplus"],
211        build_type="meson",
212        config_flags=[
213            "-Ddata_com_ibm=true",
214            "-Ddata_org_open_power=true",
215        ],
216    ),
217    "openbmc/phosphor-logging": PackageDef(
218        depends=[
219            "USCiLab/cereal",
220            "nlohmann/fifo_map",
221            "openbmc/phosphor-dbus-interfaces",
222            "openbmc/sdbusplus",
223            "openbmc/sdeventplus",
224        ],
225        build_type="meson",
226        config_flags=[
227            f"-Dyaml_dir={prefix}/share/phosphor-dbus-yaml/yaml",
228        ],
229    ),
230    "openbmc/phosphor-objmgr": PackageDef(
231        depends=[
232            "boost",
233            "leethomason/tinyxml2",
234            "openbmc/phosphor-logging",
235            "openbmc/sdbusplus",
236        ],
237        build_type="autoconf",
238    ),
239    "openbmc/pldm": PackageDef(
240        depends=[
241            "CLIUtils/CLI11",
242            "boost",
243            "nlohmann/json",
244            "openbmc/phosphor-dbus-interfaces",
245            "openbmc/phosphor-logging",
246            "openbmc/sdbusplus",
247            "openbmc/sdeventplus",
248        ],
249        build_type="meson",
250        config_flags=[
251            "-Dlibpldm-only=enabled",
252            "-Doem-ibm=enabled",
253            "-Dtests=disabled",
254        ],
255    ),
256    "openbmc/sdbusplus": PackageDef(
257        build_type="meson",
258        custom_post_dl=[
259            "cd tools",
260            f"./setup.py install --root=/ --prefix={prefix}",
261            "cd ..",
262        ],
263        config_flags=[
264            "-Dexamples=disabled",
265            "-Dtests=disabled",
266        ],
267    ),
268    "openbmc/sdeventplus": PackageDef(
269        depends=["Naios/function2", "openbmc/stdplus"],
270        build_type="meson",
271        config_flags=[
272            "-Dexamples=false",
273            "-Dtests=disabled",
274        ],
275    ),
276    "openbmc/stdplus": PackageDef(
277        depends=["fmtlib/fmt", "martinmoene/span-lite"],
278        build_type="meson",
279        config_flags=[
280            "-Dexamples=false",
281            "-Dtests=disabled",
282        ],
283    ),
284}  # type: Dict[str, PackageDef]
285
286# Define common flags used for builds
287configure_flags = " ".join(
288    [
289        f"--prefix={prefix}",
290    ]
291)
292cmake_flags = " ".join(
293    [
294        "-DBUILD_SHARED_LIBS=ON",
295        "-DCMAKE_BUILD_TYPE=RelWithDebInfo",
296        f"-DCMAKE_INSTALL_PREFIX:PATH={prefix}",
297        "-GNinja",
298        "-DCMAKE_MAKE_PROGRAM=ninja",
299    ]
300)
301meson_flags = " ".join(
302    [
303        "--wrap-mode=nodownload",
304        f"-Dprefix={prefix}",
305    ]
306)
307
308
309class Package(threading.Thread):
310    """Class used to build the Docker stages for each package.
311
312    Generally, this class should not be instantiated directly but through
313    Package.generate_all().
314    """
315
316    # Copy the packages dictionary.
317    packages = packages.copy()
318
319    # Lock used for thread-safety.
320    lock = threading.Lock()
321
322    def __init__(self, pkg: str):
323        """ pkg - The name of this package (ex. foo/bar ) """
324        super(Package, self).__init__()
325
326        self.package = pkg
327        self.exception = None  # type: Optional[Exception]
328
329        # Reference to this package's
330        self.pkg_def = Package.packages[pkg]
331        self.pkg_def["__package"] = self
332
333    def run(self) -> None:
334        """ Thread 'run' function.  Builds the Docker stage. """
335
336        # In case this package has no rev, fetch it from Github.
337        self._update_rev()
338
339        # Find all the Package objects that this package depends on.
340        #   This section is locked because we are looking into another
341        #   package's PackageDef dict, which could be being modified.
342        Package.lock.acquire()
343        deps: Iterable[Package] = [
344            Package.packages[deppkg]["__package"]
345            for deppkg in self.pkg_def.get("depends", [])
346        ]
347        Package.lock.release()
348
349        # Wait until all the depends finish building.  We need them complete
350        # for the "COPY" commands.
351        for deppkg in deps:
352            deppkg.join()
353
354        # Generate this package's Dockerfile.
355        dockerfile = f"""
356FROM {docker_base_img_name}
357{self._df_copycmds()}
358{self._df_build()}
359"""
360
361        # Generate the resulting tag name and save it to the PackageDef.
362        #   This section is locked because we are modifying the PackageDef,
363        #   which can be accessed by other threads.
364        Package.lock.acquire()
365        tag = Docker.tagname(self._stagename(), dockerfile)
366        self.pkg_def["__tag"] = tag
367        Package.lock.release()
368
369        # Do the build / save any exceptions.
370        try:
371            Docker.build(self.package, tag, dockerfile)
372        except Exception as e:
373            self.exception = e
374
375    @classmethod
376    def generate_all(cls) -> None:
377        """Ensure a Docker stage is created for all defined packages.
378
379        These are done in parallel but with appropriate blocking per
380        package 'depends' specifications.
381        """
382
383        # Create a Package for each defined package.
384        pkg_threads = [Package(p) for p in cls.packages.keys()]
385
386        # Start building them all.
387        #   This section is locked because threads depend on each other,
388        #   based on the packages, and they cannot 'join' on a thread
389        #   which is not yet started.  Adding a lock here allows all the
390        #   threads to start before they 'join' their dependencies.
391        Package.lock.acquire()
392        for t in pkg_threads:
393            t.start()
394        Package.lock.release()
395
396        # Wait for completion.
397        for t in pkg_threads:
398            t.join()
399            # Check if the thread saved off its own exception.
400            if t.exception:
401                print(f"Package {t.package} failed!", file=sys.stderr)
402                raise t.exception
403
404    @staticmethod
405    def df_all_copycmds() -> str:
406        """Formulate the Dockerfile snippet necessary to copy all packages
407        into the final image.
408        """
409        return Package.df_copycmds_set(Package.packages.keys())
410
411    @classmethod
412    def depcache(cls) -> str:
413        """Create the contents of the '/tmp/depcache'.
414        This file is a comma-separated list of "<pkg>:<rev>".
415        """
416
417        # This needs to be sorted for consistency.
418        depcache = ""
419        for pkg in sorted(cls.packages.keys()):
420            depcache += "%s:%s," % (pkg, cls.packages[pkg]["rev"])
421        return depcache
422
423    def _update_rev(self) -> None:
424        """ Look up the HEAD for missing a static rev. """
425
426        if "rev" in self.pkg_def:
427            return
428
429        # Check if Jenkins/Gerrit gave us a revision and use it.
430        if gerrit_project == self.package and gerrit_rev:
431            print(
432                f"Found Gerrit revision for {self.package}: {gerrit_rev}",
433                file=sys.stderr,
434            )
435            self.pkg_def["rev"] = gerrit_rev
436            return
437
438        # Ask Github for all the branches.
439        lookup = git("ls-remote", "--heads", f"https://github.com/{self.package}")
440
441        # Find the branch matching {branch} (or fallback to master).
442        #   This section is locked because we are modifying the PackageDef.
443        Package.lock.acquire()
444        for line in lookup.split("\n"):
445            if f"refs/heads/{branch}" in line:
446                self.pkg_def["rev"] = line.split()[0]
447            elif f"refs/heads/master" in line and "rev" not in self.pkg_def:
448                self.pkg_def["rev"] = line.split()[0]
449        Package.lock.release()
450
451    def _stagename(self) -> str:
452        """ Create a name for the Docker stage associated with this pkg. """
453        return self.package.replace("/", "-").lower()
454
455    def _url(self) -> str:
456        """ Get the URL for this package. """
457        rev = self.pkg_def["rev"]
458
459        # If the lambda exists, call it.
460        if "url" in self.pkg_def:
461            return self.pkg_def["url"](self.package, rev)
462
463        # Default to the github archive URL.
464        return f"https://github.com/{self.package}/archive/{rev}.tar.gz"
465
466    def _cmd_download(self) -> str:
467        """Formulate the command necessary to download and unpack to source."""
468
469        url = self._url()
470        if ".tar." not in url:
471            raise NotImplementedError(
472                f"Unhandled download type for {self.package}: {url}"
473            )
474
475        cmd = f"curl -L {url} | tar -x"
476
477        if url.endswith(".bz2"):
478            cmd += "j"
479        elif url.endswith(".gz"):
480            cmd += "z"
481        else:
482            raise NotImplementedError(
483                f"Unknown tar flags needed for {self.package}: {url}"
484            )
485
486        return cmd
487
488    def _cmd_cd_srcdir(self) -> str:
489        """ Formulate the command necessary to 'cd' into the source dir. """
490        return f"cd {self.package.split('/')[-1]}*"
491
492    def _df_copycmds(self) -> str:
493        """ Formulate the dockerfile snippet necessary to COPY all depends. """
494
495        if "depends" not in self.pkg_def:
496            return ""
497        return Package.df_copycmds_set(self.pkg_def["depends"])
498
499    @staticmethod
500    def df_copycmds_set(pkgs: Iterable[str]) -> str:
501        """Formulate the Dockerfile snippet necessary to COPY a set of
502        packages into a Docker stage.
503        """
504
505        copy_cmds = ""
506
507        # Sort the packages for consistency.
508        for p in sorted(pkgs):
509            tag = Package.packages[p]["__tag"]
510            copy_cmds += f"COPY --from={tag} {prefix} {prefix}\n"
511            # Workaround for upstream docker bug and multiple COPY cmds
512            # https://github.com/moby/moby/issues/37965
513            copy_cmds += "RUN true\n"
514
515        return copy_cmds
516
517    def _df_build(self) -> str:
518        """Formulate the Dockerfile snippet necessary to download, build, and
519        install a package into a Docker stage.
520        """
521
522        # Download and extract source.
523        result = f"RUN {self._cmd_download()} && {self._cmd_cd_srcdir()} && "
524
525        # Handle 'custom_post_dl' commands.
526        custom_post_dl = self.pkg_def.get("custom_post_dl")
527        if custom_post_dl:
528            result += " && ".join(custom_post_dl) + " && "
529
530        # Build and install package based on 'build_type'.
531        build_type = self.pkg_def["build_type"]
532        if build_type == "autoconf":
533            result += self._cmd_build_autoconf()
534        elif build_type == "cmake":
535            result += self._cmd_build_cmake()
536        elif build_type == "custom":
537            result += self._cmd_build_custom()
538        elif build_type == "make":
539            result += self._cmd_build_make()
540        elif build_type == "meson":
541            result += self._cmd_build_meson()
542        else:
543            raise NotImplementedError(
544                f"Unhandled build type for {self.package}: {build_type}"
545            )
546
547        # Handle 'custom_post_install' commands.
548        custom_post_install = self.pkg_def.get("custom_post_install")
549        if custom_post_install:
550            result += " && " + " && ".join(custom_post_install)
551
552        return result
553
554    def _cmd_build_autoconf(self) -> str:
555        options = " ".join(self.pkg_def.get("config_flags", []))
556        env = " ".join(self.pkg_def.get("config_env", []))
557        result = "./bootstrap.sh && "
558        result += f"{env} ./configure {configure_flags} {options} && "
559        result += f"make -j{proc_count} && make install"
560        return result
561
562    def _cmd_build_cmake(self) -> str:
563        options = " ".join(self.pkg_def.get("config_flags", []))
564        env = " ".join(self.pkg_def.get("config_env", []))
565        result = "mkdir builddir && cd builddir && "
566        result += f"{env} cmake {cmake_flags} {options} .. && "
567        result += "cmake --build . --target all && "
568        result += "cmake --build . --target install && "
569        result += "cd .."
570        return result
571
572    def _cmd_build_custom(self) -> str:
573        return " && ".join(self.pkg_def.get("build_steps", []))
574
575    def _cmd_build_make(self) -> str:
576        return f"make -j{proc_count} && make install"
577
578    def _cmd_build_meson(self) -> str:
579        options = " ".join(self.pkg_def.get("config_flags", []))
580        env = " ".join(self.pkg_def.get("config_env", []))
581        result = f"{env} meson builddir {meson_flags} {options} && "
582        result += "ninja -C builddir && ninja -C builddir install"
583        return result
584
585
586class Docker:
587    """Class to assist with Docker interactions.  All methods are static."""
588
589    @staticmethod
590    def timestamp() -> str:
591        """ Generate a timestamp for today using the ISO week. """
592        today = date.today().isocalendar()
593        return f"{today[0]}-W{today[1]:02}"
594
595    @staticmethod
596    def tagname(pkgname: str, dockerfile: str) -> str:
597        """ Generate a tag name for a package using a hash of the Dockerfile. """
598        result = docker_image_name
599        if pkgname:
600            result += "-" + pkgname
601
602        result += ":" + Docker.timestamp()
603        result += "-" + sha256(dockerfile.encode()).hexdigest()[0:16]
604
605        return result
606
607    @staticmethod
608    def build(pkg: str, tag: str, dockerfile: str) -> None:
609        """Build a docker image using the Dockerfile and tagging it with 'tag'."""
610
611        # If we're not forcing builds, check if it already exists and skip.
612        if not force_build:
613            if docker.image.ls(tag, "--format", '"{{.Repository}}:{{.Tag}}"'):
614                print(f"Image {tag} already exists.  Skipping.", file=sys.stderr)
615                return
616
617        # Build it.
618        #   Capture the output of the 'docker build' command and send it to
619        #   stderr (prefixed with the package name).  This allows us to see
620        #   progress but not polute stdout.  Later on we output the final
621        #   docker tag to stdout and we want to keep that pristine.
622        #
623        #   Other unusual flags:
624        #       --no-cache: Bypass the Docker cache if 'force_build'.
625        #       --force-rm: Clean up Docker processes if they fail.
626        docker.build(
627            proxy_args,
628            "--network=host",
629            "--force-rm",
630            "--no-cache=true" if force_build else "--no-cache=false",
631            "-t",
632            tag,
633            "-",
634            _in=dockerfile,
635            _out=(
636                lambda line: print(
637                    pkg + ":", line, end="", file=sys.stderr, flush=True
638                )
639            ),
640        )
641
642
643# Read a bunch of environment variables.
644docker_image_name = os.environ.get("DOCKER_IMAGE_NAME", "openbmc/ubuntu-unit-test")
645force_build = os.environ.get("FORCE_DOCKER_BUILD")
646is_automated_ci_build = os.environ.get("BUILD_URL", False)
647distro = os.environ.get("DISTRO", "ubuntu:hirsute")
648branch = os.environ.get("BRANCH", "master")
649ubuntu_mirror = os.environ.get("UBUNTU_MIRROR")
650http_proxy = os.environ.get("http_proxy")
651
652gerrit_project = os.environ.get("GERRIT_PROJECT")
653gerrit_rev = os.environ.get("GERRIT_PATCHSET_REVISION")
654
655# Set up some common variables.
656username = os.environ.get("USER", "root")
657homedir = os.environ.get("HOME", "/root")
658gid = os.getgid()
659uid = os.getuid()
660
661# Determine the architecture for Docker.
662arch = uname("-m").strip()
663if arch == "ppc64le":
664    docker_base = "ppc64le/"
665elif arch == "x86_64":
666    docker_base = ""
667else:
668    print(
669        f"Unsupported system architecture({arch}) found for docker image",
670        file=sys.stderr,
671    )
672    sys.exit(1)
673
674# Special flags if setting up a deb mirror.
675mirror = ""
676if "ubuntu" in distro and ubuntu_mirror:
677    mirror = f"""
678RUN echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME) main restricted universe multiverse" > /etc/apt/sources.list && \\
679    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-updates main restricted universe multiverse" >> /etc/apt/sources.list && \\
680    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-security main restricted universe multiverse" >> /etc/apt/sources.list && \\
681    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-proposed main restricted universe multiverse" >> /etc/apt/sources.list && \\
682    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-backports main restricted universe multiverse" >> /etc/apt/sources.list
683"""
684
685# Special flags for proxying.
686proxy_cmd = ""
687proxy_args = []
688if http_proxy:
689    proxy_cmd = f"""
690RUN echo "[http]" >> {homedir}/.gitconfig && \
691    echo "proxy = {http_proxy}" >> {homedir}/.gitconfig
692"""
693    proxy_args.extend(
694        [
695            "--build-arg",
696            f"http_proxy={http_proxy}",
697            "--build-arg",
698            f"https_proxy={http_proxy}",
699        ]
700    )
701
702# Create base Dockerfile.
703dockerfile_base = f"""
704FROM {docker_base}{distro}
705
706{mirror}
707
708ENV DEBIAN_FRONTEND noninteractive
709
710ENV PYTHONPATH "/usr/local/lib/python3.8/site-packages/"
711
712# Sometimes the ubuntu key expires and we need a way to force an execution
713# of the apt-get commands for the dbgsym-keyring.  When this happens we see
714# an error like: "Release: The following signatures were invalid:"
715# Insert a bogus echo that we can change here when we get this error to force
716# the update.
717RUN echo "ubuntu keyserver rev as of 2021-04-21"
718
719# We need the keys to be imported for dbgsym repos
720# New releases have a package, older ones fall back to manual fetching
721# https://wiki.ubuntu.com/Debug%20Symbol%20Packages
722RUN apt-get update && apt-get dist-upgrade -yy && \
723    ( apt-get install gpgv ubuntu-dbgsym-keyring || \
724        ( apt-get install -yy dirmngr && \
725          apt-key adv --keyserver keyserver.ubuntu.com \
726                      --recv-keys F2EDC64DC5AEE1F6B9C621F0C8CAB6595FDFF622 ) )
727
728# Parse the current repo list into a debug repo list
729RUN sed -n '/^deb /s,^deb [^ ]* ,deb http://ddebs.ubuntu.com ,p' /etc/apt/sources.list >/etc/apt/sources.list.d/debug.list
730
731# Remove non-existent debug repos
732RUN sed -i '/-\(backports\|security\) /d' /etc/apt/sources.list.d/debug.list
733
734RUN cat /etc/apt/sources.list.d/debug.list
735
736RUN apt-get update && apt-get dist-upgrade -yy && apt-get install -yy \
737    gcc-10 \
738    g++-10 \
739    libc6-dbg \
740    libc6-dev \
741    libtool \
742    bison \
743    libdbus-1-dev \
744    flex \
745    cmake \
746    python3 \
747    python3-dev\
748    python3-yaml \
749    python3-mako \
750    python3-pip \
751    python3-setuptools \
752    python3-git \
753    python3-socks \
754    pkg-config \
755    autoconf \
756    autoconf-archive \
757    libsystemd-dev \
758    systemd \
759    libssl-dev \
760    libevdev-dev \
761    libjpeg-dev \
762    libpng-dev \
763    ninja-build \
764    sudo \
765    curl \
766    git \
767    dbus \
768    iputils-ping \
769    clang-11 \
770    clang-format-11 \
771    clang-tidy-11 \
772    clang-tools-11 \
773    shellcheck \
774    npm \
775    iproute2 \
776    libnl-3-dev \
777    libnl-genl-3-dev \
778    libconfig++-dev \
779    libsnmp-dev \
780    valgrind \
781    valgrind-dbg \
782    libpam0g-dev \
783    xxd \
784    libi2c-dev \
785    wget \
786    libldap2-dev \
787    libprotobuf-dev \
788    libperlio-gzip-perl \
789    libjson-perl \
790    protobuf-compiler \
791    libgpiod-dev \
792    device-tree-compiler \
793    cppcheck \
794    libpciaccess-dev \
795    libmimetic-dev \
796    libxml2-utils \
797    libxml-simple-perl \
798    rsync
799
800RUN update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-10 1000 \
801  --slave /usr/bin/g++ g++ /usr/bin/g++-10 \
802  --slave /usr/bin/gcov gcov /usr/bin/gcov-10 \
803  --slave /usr/bin/gcov-dump gcov-dump /usr/bin/gcov-dump-10 \
804  --slave /usr/bin/gcov-tool gcov-tool /usr/bin/gcov-tool-10
805
806RUN update-alternatives --install /usr/bin/clang clang /usr/bin/clang-11 1000 \
807  --slave /usr/bin/clang++ clang++ /usr/bin/clang++-11 \
808  --slave /usr/bin/clang-tidy clang-tidy /usr/bin/clang-tidy-11 \
809  --slave /usr/bin/clang-format clang-format /usr/bin/clang-format-11 \
810  --slave /usr/bin/run-clang-tidy.py run-clang-tidy.py /usr/bin/run-clang-tidy-11.py \
811  --slave /usr/bin/scan-build scan-build /usr/bin/scan-build-11
812
813"""
814
815if is_automated_ci_build:
816    dockerfile_base += f"""
817# Run an arbitrary command to polute the docker cache regularly force us
818# to re-run `apt-get update` daily.
819RUN echo {Docker.timestamp()}
820RUN apt-get update && apt-get dist-upgrade -yy
821
822"""
823
824dockerfile_base += f"""
825RUN pip3 install inflection
826RUN pip3 install pycodestyle
827RUN pip3 install jsonschema
828RUN pip3 install meson==0.57.1
829RUN pip3 install protobuf
830RUN pip3 install codespell
831"""
832
833# Build the base and stage docker images.
834docker_base_img_name = Docker.tagname("base", dockerfile_base)
835Docker.build("base", docker_base_img_name, dockerfile_base)
836Package.generate_all()
837
838# Create the final Dockerfile.
839dockerfile = f"""
840# Build the final output image
841FROM {docker_base_img_name}
842{Package.df_all_copycmds()}
843
844# Some of our infrastructure still relies on the presence of this file
845# even though it is no longer needed to rebuild the docker environment
846# NOTE: The file is sorted to ensure the ordering is stable.
847RUN echo '{Package.depcache()}' > /tmp/depcache
848
849# Final configuration for the workspace
850RUN grep -q {gid} /etc/group || groupadd -g {gid} {username}
851RUN mkdir -p "{os.path.dirname(homedir)}"
852RUN grep -q {uid} /etc/passwd || useradd -d {homedir} -m -u {uid} -g {gid} {username}
853RUN sed -i '1iDefaults umask=000' /etc/sudoers
854RUN echo "{username} ALL=(ALL) NOPASSWD: ALL" >>/etc/sudoers
855
856# Ensure user has ability to write to /usr/local for different tool
857# and data installs
858RUN chown -R {username}:{username} /usr/local/share
859
860{proxy_cmd}
861
862RUN /bin/bash
863"""
864
865# Do the final docker build
866docker_final_img_name = Docker.tagname(None, dockerfile)
867Docker.build("final", docker_final_img_name, dockerfile)
868
869# Print the tag of the final image.
870print(docker_final_img_name)
871