1#!/usr/bin/env python3
2#
3# Build the required docker image to run package unit tests
4#
5# Script Variables:
6#   DOCKER_IMG_NAME:  <optional, the name of the docker image to generate>
7#                     default is openbmc/ubuntu-unit-test
8#   DISTRO:           <optional, the distro to build a docker image against>
9#                     default is ubuntu:hirsute
10#   FORCE_DOCKER_BUILD: <optional, a non-zero value with force all Docker
11#                     images to be rebuilt rather than reusing caches.>
12#   BUILD_URL:        <optional, used to detect running under CI context
13#                     (ex. Jenkins)>
14#   BRANCH:           <optional, branch to build from each of the openbmc/
15#                     repositories>
16#                     default is master, which will be used if input branch not
17#                     provided or not found
18#   UBUNTU_MIRROR:    <optional, the URL of a mirror of Ubuntu to override the
19#                     default ones in /etc/apt/sources.list>
20#                     default is empty, and no mirror is used.
21#   http_proxy        The HTTP address of the proxy server to connect to.
22#                     Default: "", proxy is not setup if this is not set
23
24import os
25import sys
26import threading
27from datetime import date
28from hashlib import sha256
29from sh import docker, git, nproc, uname  # type: ignore
30from typing import Any, Callable, Dict, Iterable, Optional
31
32try:
33    # Python before 3.8 doesn't have TypedDict, so reroute to standard 'dict'.
34    from typing import TypedDict
35except:
36
37    class TypedDict(dict):  # type: ignore
38        # We need to do this to eat the 'total' argument.
39        def __init_subclass__(cls, **kwargs):
40            super().__init_subclass__()
41
42
43# Declare some variables used in package definitions.
44prefix = "/usr/local"
45proc_count = nproc().strip()
46
47
48class PackageDef(TypedDict, total=False):
49    """ Package Definition for packages dictionary. """
50
51    # rev [optional]: Revision of package to use.
52    rev: str
53    # url [optional]: lambda function to create URL: (package, rev) -> url.
54    url: Callable[[str, str], str]
55    # depends [optional]: List of package dependencies.
56    depends: Iterable[str]
57    # build_type [required]: Build type used for package.
58    #   Currently supported: autoconf, cmake, custom, make, meson
59    build_type: str
60    # build_steps [optional]: Steps to run for 'custom' build_type.
61    build_steps: Iterable[str]
62    # config_flags [optional]: List of options to pass configuration tool.
63    config_flags: Iterable[str]
64    # config_env [optional]: List of environment variables to set for config.
65    config_env: Iterable[str]
66    # custom_post_dl [optional]: List of steps to run after download, but
67    #   before config / build / install.
68    custom_post_dl: Iterable[str]
69    # custom_post_install [optional]: List of steps to run after install.
70    custom_post_install: Iterable[str]
71
72    # __tag [private]: Generated Docker tag name for package stage.
73    __tag: str
74    # __package [private]: Package object associated with this package.
75    __package: Any  # Type is Package, but not defined yet.
76
77
78# Packages to include in image.
79packages = {
80    "boost": PackageDef(
81        rev="1.76.0",
82        url=(
83            lambda pkg, rev: f"https://downloads.yoctoproject.org/mirror/sources/{pkg}_{rev.replace('.', '_')}.tar.bz2"
84        ),
85        build_type="custom",
86        build_steps=[
87            f"./bootstrap.sh --prefix={prefix} --with-libraries=context,coroutine",
88            "./b2",
89            f"./b2 install --prefix={prefix}",
90        ],
91    ),
92    "USCiLab/cereal": PackageDef(
93        rev="3e4d1b84cab4891368d2179a61a7ba06a5693e7f",
94        build_type="custom",
95        build_steps=[f"cp -a include/cereal/ {prefix}/include/"],
96    ),
97    "catchorg/Catch2": PackageDef(
98        rev="v2.13.6",
99        build_type="cmake",
100        config_flags=["-DBUILD_TESTING=OFF", "-DCATCH_INSTALL_DOCS=OFF"],
101    ),
102    "CLIUtils/CLI11": PackageDef(
103        rev="v1.9.1",
104        build_type="cmake",
105        config_flags=[
106            "-DBUILD_TESTING=OFF",
107            "-DCLI11_BUILD_DOCS=OFF",
108            "-DCLI11_BUILD_EXAMPLES=OFF",
109        ],
110    ),
111    "fmtlib/fmt": PackageDef(
112        rev="7.1.3",
113        build_type="cmake",
114        config_flags=[
115            "-DFMT_DOC=OFF",
116            "-DFMT_TEST=OFF",
117        ],
118    ),
119    "Naios/function2": PackageDef(
120        rev="4.1.0",
121        build_type="custom",
122        build_steps=[
123            f"mkdir {prefix}/include/function2",
124            f"cp include/function2/function2.hpp {prefix}/include/function2/",
125        ],
126    ),
127    # Snapshot from 2021-05-13
128    "google/googletest": PackageDef(
129        rev="662fe38e44900c007eccb65a5d2ea19df7bd520e",
130        build_type="cmake",
131        config_env=["CXXFLAGS=-std=c++20"],
132        config_flags=["-DTHREADS_PREFER_PTHREAD_FLAG=ON"],
133    ),
134    # Release 2020-08-06
135    "nlohmann/json": PackageDef(
136        rev="v3.9.1",
137        build_type="cmake",
138        config_flags=["-DJSON_BuildTests=OFF"],
139        custom_post_install=[
140            f"ln -s {prefix}/include/nlohmann/json.hpp {prefix}/include/json.hpp",
141        ],
142    ),
143    # Snapshot from 2019-05-24
144    "linux-test-project/lcov": PackageDef(
145        rev="v1.15",
146        build_type="make",
147    ),
148    # dev-5.8 2021-01-11
149    "openbmc/linux": PackageDef(
150        rev="3cc95ae40716e56f81b69615781f54c78079042d",
151        build_type="custom",
152        build_steps=[
153            f"make -j{proc_count} defconfig",
154            f"make INSTALL_HDR_PATH={prefix} headers_install",
155        ],
156    ),
157    # Snapshot from 2020-06-13
158    "LibVNC/libvncserver": PackageDef(
159        rev="LibVNCServer-0.9.13",
160        build_type="cmake",
161    ),
162    "martinmoene/span-lite": PackageDef(
163        rev="v0.9.2",
164        build_type="cmake",
165        config_flags=[
166            "-DSPAN_LITE_OPT_BUILD_TESTS=OFF",
167        ],
168    ),
169    # version from meta-openembedded/meta-oe/recipes-support/libtinyxml2/libtinyxml2_8.0.0.bb
170    "leethomason/tinyxml2": PackageDef(
171        rev="8.0.0",
172        build_type="cmake",
173    ),
174    # version from /meta-openembedded/meta-oe/recipes-devtools/boost-url/boost-url_git.bb
175    "CPPAlliance/url": PackageDef(
176        rev="a56ae0df6d3078319755fbaa67822b4fa7fd352b",
177        build_type="cmake",
178        config_flags=[
179            "-DBOOST_URL_BUILD_EXAMPLES=OFF",
180            "-DBOOST_URL_BUILD_TESTS=OFF",
181            "-DBOOST_URL_STANDALONE=ON",
182        ],
183    ),
184    # valijson v0.4, which contains the nlohmann/json.hpp include fix:
185    # 66262bafb82c ("Include nlohmann/json.hpp instead of json.hpp")
186    "tristanpenman/valijson": PackageDef(
187        rev="v0.4",
188        build_type="cmake",
189        config_flags=[
190            "-Dvalijson_BUILD_TESTS=0",
191            "-Dvalijson_INSTALL_HEADERS=1",
192        ],
193    ),
194    # version from meta-openembedded/meta-oe/recipes-devtools/nlohmann-fifo/nlohmann-fifo_git.bb
195    "nlohmann/fifo_map": PackageDef(
196        rev="0dfbf5dacbb15a32c43f912a7e66a54aae39d0f9",
197        build_type="custom",
198        build_steps=[f"cp src/fifo_map.hpp {prefix}/include/"],
199    ),
200    "open-power/pdbg": PackageDef(build_type="autoconf"),
201    "openbmc/gpioplus": PackageDef(
202        depends=["openbmc/stdplus"],
203        build_type="meson",
204        config_flags=[
205            "-Dexamples=false",
206            "-Dtests=disabled",
207        ],
208    ),
209    "openbmc/phosphor-dbus-interfaces": PackageDef(
210        depends=["openbmc/sdbusplus"],
211        build_type="meson",
212        config_flags=[
213            "-Ddata_com_ibm=true",
214            "-Ddata_org_open_power=true",
215        ],
216    ),
217    "openbmc/phosphor-logging": PackageDef(
218        depends=[
219            "USCiLab/cereal",
220            "nlohmann/fifo_map",
221            "openbmc/phosphor-dbus-interfaces",
222            "openbmc/sdbusplus",
223            "openbmc/sdeventplus",
224        ],
225        build_type="meson",
226        config_flags=[
227            f"-Dyaml_dir={prefix}/share/phosphor-dbus-yaml/yaml",
228        ],
229    ),
230    "openbmc/phosphor-objmgr": PackageDef(
231        depends=[
232            "boost",
233            "leethomason/tinyxml2",
234            "openbmc/phosphor-logging",
235            "openbmc/sdbusplus",
236        ],
237        build_type="meson",
238        config_flags=[
239            "-Dtests=disabled",
240        ],
241    ),
242    "openbmc/pldm": PackageDef(
243        depends=[
244            "CLIUtils/CLI11",
245            "boost",
246            "nlohmann/json",
247            "openbmc/phosphor-dbus-interfaces",
248            "openbmc/phosphor-logging",
249            "openbmc/sdbusplus",
250            "openbmc/sdeventplus",
251        ],
252        build_type="meson",
253        config_flags=[
254            "-Dlibpldm-only=enabled",
255            "-Doem-ibm=enabled",
256            "-Dtests=disabled",
257        ],
258    ),
259    "openbmc/sdbusplus": PackageDef(
260        build_type="meson",
261        custom_post_dl=[
262            "cd tools",
263            f"./setup.py install --root=/ --prefix={prefix}",
264            "cd ..",
265        ],
266        config_flags=[
267            "-Dexamples=disabled",
268            "-Dtests=disabled",
269        ],
270    ),
271    "openbmc/sdeventplus": PackageDef(
272        depends=["Naios/function2", "openbmc/stdplus"],
273        build_type="meson",
274        config_flags=[
275            "-Dexamples=false",
276            "-Dtests=disabled",
277        ],
278    ),
279    "openbmc/stdplus": PackageDef(
280        depends=["fmtlib/fmt", "martinmoene/span-lite"],
281        build_type="meson",
282        config_flags=[
283            "-Dexamples=false",
284            "-Dtests=disabled",
285        ],
286    ),
287}  # type: Dict[str, PackageDef]
288
289# Define common flags used for builds
290configure_flags = " ".join(
291    [
292        f"--prefix={prefix}",
293    ]
294)
295cmake_flags = " ".join(
296    [
297        "-DBUILD_SHARED_LIBS=ON",
298        "-DCMAKE_BUILD_TYPE=RelWithDebInfo",
299        f"-DCMAKE_INSTALL_PREFIX:PATH={prefix}",
300        "-GNinja",
301        "-DCMAKE_MAKE_PROGRAM=ninja",
302    ]
303)
304meson_flags = " ".join(
305    [
306        "--wrap-mode=nodownload",
307        f"-Dprefix={prefix}",
308    ]
309)
310
311
312class Package(threading.Thread):
313    """Class used to build the Docker stages for each package.
314
315    Generally, this class should not be instantiated directly but through
316    Package.generate_all().
317    """
318
319    # Copy the packages dictionary.
320    packages = packages.copy()
321
322    # Lock used for thread-safety.
323    lock = threading.Lock()
324
325    def __init__(self, pkg: str):
326        """ pkg - The name of this package (ex. foo/bar ) """
327        super(Package, self).__init__()
328
329        self.package = pkg
330        self.exception = None  # type: Optional[Exception]
331
332        # Reference to this package's
333        self.pkg_def = Package.packages[pkg]
334        self.pkg_def["__package"] = self
335
336    def run(self) -> None:
337        """ Thread 'run' function.  Builds the Docker stage. """
338
339        # In case this package has no rev, fetch it from Github.
340        self._update_rev()
341
342        # Find all the Package objects that this package depends on.
343        #   This section is locked because we are looking into another
344        #   package's PackageDef dict, which could be being modified.
345        Package.lock.acquire()
346        deps: Iterable[Package] = [
347            Package.packages[deppkg]["__package"]
348            for deppkg in self.pkg_def.get("depends", [])
349        ]
350        Package.lock.release()
351
352        # Wait until all the depends finish building.  We need them complete
353        # for the "COPY" commands.
354        for deppkg in deps:
355            deppkg.join()
356
357        # Generate this package's Dockerfile.
358        dockerfile = f"""
359FROM {docker_base_img_name}
360{self._df_copycmds()}
361{self._df_build()}
362"""
363
364        # Generate the resulting tag name and save it to the PackageDef.
365        #   This section is locked because we are modifying the PackageDef,
366        #   which can be accessed by other threads.
367        Package.lock.acquire()
368        tag = Docker.tagname(self._stagename(), dockerfile)
369        self.pkg_def["__tag"] = tag
370        Package.lock.release()
371
372        # Do the build / save any exceptions.
373        try:
374            Docker.build(self.package, tag, dockerfile)
375        except Exception as e:
376            self.exception = e
377
378    @classmethod
379    def generate_all(cls) -> None:
380        """Ensure a Docker stage is created for all defined packages.
381
382        These are done in parallel but with appropriate blocking per
383        package 'depends' specifications.
384        """
385
386        # Create a Package for each defined package.
387        pkg_threads = [Package(p) for p in cls.packages.keys()]
388
389        # Start building them all.
390        #   This section is locked because threads depend on each other,
391        #   based on the packages, and they cannot 'join' on a thread
392        #   which is not yet started.  Adding a lock here allows all the
393        #   threads to start before they 'join' their dependencies.
394        Package.lock.acquire()
395        for t in pkg_threads:
396            t.start()
397        Package.lock.release()
398
399        # Wait for completion.
400        for t in pkg_threads:
401            t.join()
402            # Check if the thread saved off its own exception.
403            if t.exception:
404                print(f"Package {t.package} failed!", file=sys.stderr)
405                raise t.exception
406
407    @staticmethod
408    def df_all_copycmds() -> str:
409        """Formulate the Dockerfile snippet necessary to copy all packages
410        into the final image.
411        """
412        return Package.df_copycmds_set(Package.packages.keys())
413
414    @classmethod
415    def depcache(cls) -> str:
416        """Create the contents of the '/tmp/depcache'.
417        This file is a comma-separated list of "<pkg>:<rev>".
418        """
419
420        # This needs to be sorted for consistency.
421        depcache = ""
422        for pkg in sorted(cls.packages.keys()):
423            depcache += "%s:%s," % (pkg, cls.packages[pkg]["rev"])
424        return depcache
425
426    def _update_rev(self) -> None:
427        """ Look up the HEAD for missing a static rev. """
428
429        if "rev" in self.pkg_def:
430            return
431
432        # Check if Jenkins/Gerrit gave us a revision and use it.
433        if gerrit_project == self.package and gerrit_rev:
434            print(
435                f"Found Gerrit revision for {self.package}: {gerrit_rev}",
436                file=sys.stderr,
437            )
438            self.pkg_def["rev"] = gerrit_rev
439            return
440
441        # Ask Github for all the branches.
442        lookup = git("ls-remote", "--heads", f"https://github.com/{self.package}")
443
444        # Find the branch matching {branch} (or fallback to master).
445        #   This section is locked because we are modifying the PackageDef.
446        Package.lock.acquire()
447        for line in lookup.split("\n"):
448            if f"refs/heads/{branch}" in line:
449                self.pkg_def["rev"] = line.split()[0]
450            elif f"refs/heads/master" in line and "rev" not in self.pkg_def:
451                self.pkg_def["rev"] = line.split()[0]
452        Package.lock.release()
453
454    def _stagename(self) -> str:
455        """ Create a name for the Docker stage associated with this pkg. """
456        return self.package.replace("/", "-").lower()
457
458    def _url(self) -> str:
459        """ Get the URL for this package. """
460        rev = self.pkg_def["rev"]
461
462        # If the lambda exists, call it.
463        if "url" in self.pkg_def:
464            return self.pkg_def["url"](self.package, rev)
465
466        # Default to the github archive URL.
467        return f"https://github.com/{self.package}/archive/{rev}.tar.gz"
468
469    def _cmd_download(self) -> str:
470        """Formulate the command necessary to download and unpack to source."""
471
472        url = self._url()
473        if ".tar." not in url:
474            raise NotImplementedError(
475                f"Unhandled download type for {self.package}: {url}"
476            )
477
478        cmd = f"curl -L {url} | tar -x"
479
480        if url.endswith(".bz2"):
481            cmd += "j"
482        elif url.endswith(".gz"):
483            cmd += "z"
484        else:
485            raise NotImplementedError(
486                f"Unknown tar flags needed for {self.package}: {url}"
487            )
488
489        return cmd
490
491    def _cmd_cd_srcdir(self) -> str:
492        """ Formulate the command necessary to 'cd' into the source dir. """
493        return f"cd {self.package.split('/')[-1]}*"
494
495    def _df_copycmds(self) -> str:
496        """ Formulate the dockerfile snippet necessary to COPY all depends. """
497
498        if "depends" not in self.pkg_def:
499            return ""
500        return Package.df_copycmds_set(self.pkg_def["depends"])
501
502    @staticmethod
503    def df_copycmds_set(pkgs: Iterable[str]) -> str:
504        """Formulate the Dockerfile snippet necessary to COPY a set of
505        packages into a Docker stage.
506        """
507
508        copy_cmds = ""
509
510        # Sort the packages for consistency.
511        for p in sorted(pkgs):
512            tag = Package.packages[p]["__tag"]
513            copy_cmds += f"COPY --from={tag} {prefix} {prefix}\n"
514            # Workaround for upstream docker bug and multiple COPY cmds
515            # https://github.com/moby/moby/issues/37965
516            copy_cmds += "RUN true\n"
517
518        return copy_cmds
519
520    def _df_build(self) -> str:
521        """Formulate the Dockerfile snippet necessary to download, build, and
522        install a package into a Docker stage.
523        """
524
525        # Download and extract source.
526        result = f"RUN {self._cmd_download()} && {self._cmd_cd_srcdir()} && "
527
528        # Handle 'custom_post_dl' commands.
529        custom_post_dl = self.pkg_def.get("custom_post_dl")
530        if custom_post_dl:
531            result += " && ".join(custom_post_dl) + " && "
532
533        # Build and install package based on 'build_type'.
534        build_type = self.pkg_def["build_type"]
535        if build_type == "autoconf":
536            result += self._cmd_build_autoconf()
537        elif build_type == "cmake":
538            result += self._cmd_build_cmake()
539        elif build_type == "custom":
540            result += self._cmd_build_custom()
541        elif build_type == "make":
542            result += self._cmd_build_make()
543        elif build_type == "meson":
544            result += self._cmd_build_meson()
545        else:
546            raise NotImplementedError(
547                f"Unhandled build type for {self.package}: {build_type}"
548            )
549
550        # Handle 'custom_post_install' commands.
551        custom_post_install = self.pkg_def.get("custom_post_install")
552        if custom_post_install:
553            result += " && " + " && ".join(custom_post_install)
554
555        return result
556
557    def _cmd_build_autoconf(self) -> str:
558        options = " ".join(self.pkg_def.get("config_flags", []))
559        env = " ".join(self.pkg_def.get("config_env", []))
560        result = "./bootstrap.sh && "
561        result += f"{env} ./configure {configure_flags} {options} && "
562        result += f"make -j{proc_count} && make install"
563        return result
564
565    def _cmd_build_cmake(self) -> str:
566        options = " ".join(self.pkg_def.get("config_flags", []))
567        env = " ".join(self.pkg_def.get("config_env", []))
568        result = "mkdir builddir && cd builddir && "
569        result += f"{env} cmake {cmake_flags} {options} .. && "
570        result += "cmake --build . --target all && "
571        result += "cmake --build . --target install && "
572        result += "cd .."
573        return result
574
575    def _cmd_build_custom(self) -> str:
576        return " && ".join(self.pkg_def.get("build_steps", []))
577
578    def _cmd_build_make(self) -> str:
579        return f"make -j{proc_count} && make install"
580
581    def _cmd_build_meson(self) -> str:
582        options = " ".join(self.pkg_def.get("config_flags", []))
583        env = " ".join(self.pkg_def.get("config_env", []))
584        result = f"{env} meson builddir {meson_flags} {options} && "
585        result += "ninja -C builddir && ninja -C builddir install"
586        return result
587
588
589class Docker:
590    """Class to assist with Docker interactions.  All methods are static."""
591
592    @staticmethod
593    def timestamp() -> str:
594        """ Generate a timestamp for today using the ISO week. """
595        today = date.today().isocalendar()
596        return f"{today[0]}-W{today[1]:02}"
597
598    @staticmethod
599    def tagname(pkgname: str, dockerfile: str) -> str:
600        """ Generate a tag name for a package using a hash of the Dockerfile. """
601        result = docker_image_name
602        if pkgname:
603            result += "-" + pkgname
604
605        result += ":" + Docker.timestamp()
606        result += "-" + sha256(dockerfile.encode()).hexdigest()[0:16]
607
608        return result
609
610    @staticmethod
611    def build(pkg: str, tag: str, dockerfile: str) -> None:
612        """Build a docker image using the Dockerfile and tagging it with 'tag'."""
613
614        # If we're not forcing builds, check if it already exists and skip.
615        if not force_build:
616            if docker.image.ls(tag, "--format", '"{{.Repository}}:{{.Tag}}"'):
617                print(f"Image {tag} already exists.  Skipping.", file=sys.stderr)
618                return
619
620        # Build it.
621        #   Capture the output of the 'docker build' command and send it to
622        #   stderr (prefixed with the package name).  This allows us to see
623        #   progress but not polute stdout.  Later on we output the final
624        #   docker tag to stdout and we want to keep that pristine.
625        #
626        #   Other unusual flags:
627        #       --no-cache: Bypass the Docker cache if 'force_build'.
628        #       --force-rm: Clean up Docker processes if they fail.
629        docker.build(
630            proxy_args,
631            "--network=host",
632            "--force-rm",
633            "--no-cache=true" if force_build else "--no-cache=false",
634            "-t",
635            tag,
636            "-",
637            _in=dockerfile,
638            _out=(
639                lambda line: print(
640                    pkg + ":", line, end="", file=sys.stderr, flush=True
641                )
642            ),
643        )
644
645
646# Read a bunch of environment variables.
647docker_image_name = os.environ.get("DOCKER_IMAGE_NAME", "openbmc/ubuntu-unit-test")
648force_build = os.environ.get("FORCE_DOCKER_BUILD")
649is_automated_ci_build = os.environ.get("BUILD_URL", False)
650distro = os.environ.get("DISTRO", "ubuntu:hirsute")
651branch = os.environ.get("BRANCH", "master")
652ubuntu_mirror = os.environ.get("UBUNTU_MIRROR")
653http_proxy = os.environ.get("http_proxy")
654
655gerrit_project = os.environ.get("GERRIT_PROJECT")
656gerrit_rev = os.environ.get("GERRIT_PATCHSET_REVISION")
657
658# Set up some common variables.
659username = os.environ.get("USER", "root")
660homedir = os.environ.get("HOME", "/root")
661gid = os.getgid()
662uid = os.getuid()
663
664# Determine the architecture for Docker.
665arch = uname("-m").strip()
666if arch == "ppc64le":
667    docker_base = "ppc64le/"
668elif arch == "x86_64":
669    docker_base = ""
670else:
671    print(
672        f"Unsupported system architecture({arch}) found for docker image",
673        file=sys.stderr,
674    )
675    sys.exit(1)
676
677# Special flags if setting up a deb mirror.
678mirror = ""
679if "ubuntu" in distro and ubuntu_mirror:
680    mirror = f"""
681RUN echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME) main restricted universe multiverse" > /etc/apt/sources.list && \\
682    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-updates main restricted universe multiverse" >> /etc/apt/sources.list && \\
683    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-security main restricted universe multiverse" >> /etc/apt/sources.list && \\
684    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-proposed main restricted universe multiverse" >> /etc/apt/sources.list && \\
685    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-backports main restricted universe multiverse" >> /etc/apt/sources.list
686"""
687
688# Special flags for proxying.
689proxy_cmd = ""
690proxy_keyserver = ""
691proxy_args = []
692if http_proxy:
693    proxy_cmd = f"""
694RUN echo "[http]" >> {homedir}/.gitconfig && \
695    echo "proxy = {http_proxy}" >> {homedir}/.gitconfig
696"""
697    proxy_keyserver = f"--keyserver-options http-proxy={http_proxy}"
698
699    proxy_args.extend(
700        [
701            "--build-arg",
702            f"http_proxy={http_proxy}",
703            "--build-arg",
704            f"https_proxy={http_proxy}",
705        ]
706    )
707
708# Create base Dockerfile.
709dockerfile_base = f"""
710FROM {docker_base}{distro}
711
712{mirror}
713
714ENV DEBIAN_FRONTEND noninteractive
715
716ENV PYTHONPATH "/usr/local/lib/python3.8/site-packages/"
717
718# Sometimes the ubuntu key expires and we need a way to force an execution
719# of the apt-get commands for the dbgsym-keyring.  When this happens we see
720# an error like: "Release: The following signatures were invalid:"
721# Insert a bogus echo that we can change here when we get this error to force
722# the update.
723RUN echo "ubuntu keyserver rev as of 2021-04-21"
724
725# We need the keys to be imported for dbgsym repos
726# New releases have a package, older ones fall back to manual fetching
727# https://wiki.ubuntu.com/Debug%20Symbol%20Packages
728RUN apt-get update && apt-get dist-upgrade -yy && \
729    ( apt-get install gpgv ubuntu-dbgsym-keyring || \
730        ( apt-get install -yy dirmngr && \
731          apt-key adv --keyserver keyserver.ubuntu.com \
732                      {proxy_keyserver} \
733                      --recv-keys F2EDC64DC5AEE1F6B9C621F0C8CAB6595FDFF622 ) )
734
735# Parse the current repo list into a debug repo list
736RUN sed -n '/^deb /s,^deb [^ ]* ,deb http://ddebs.ubuntu.com ,p' /etc/apt/sources.list >/etc/apt/sources.list.d/debug.list
737
738# Remove non-existent debug repos
739RUN sed -i '/-\(backports\|security\) /d' /etc/apt/sources.list.d/debug.list
740
741RUN cat /etc/apt/sources.list.d/debug.list
742
743RUN apt-get update && apt-get dist-upgrade -yy && apt-get install -yy \
744    gcc-11 \
745    g++-11 \
746    libc6-dbg \
747    libc6-dev \
748    libtool \
749    bison \
750    libdbus-1-dev \
751    flex \
752    cmake \
753    python3 \
754    python3-dev\
755    python3-yaml \
756    python3-mako \
757    python3-pip \
758    python3-setuptools \
759    python3-git \
760    python3-socks \
761    pkg-config \
762    autoconf \
763    autoconf-archive \
764    libsystemd-dev \
765    systemd \
766    libssl-dev \
767    libevdev-dev \
768    libjpeg-dev \
769    libpng-dev \
770    ninja-build \
771    sudo \
772    curl \
773    git \
774    dbus \
775    iputils-ping \
776    clang-12 \
777    clang-format-12 \
778    clang-tidy-12 \
779    clang-tools-12 \
780    shellcheck \
781    npm \
782    iproute2 \
783    libnl-3-dev \
784    libnl-genl-3-dev \
785    libconfig++-dev \
786    libsnmp-dev \
787    valgrind \
788    valgrind-dbg \
789    libpam0g-dev \
790    xxd \
791    libi2c-dev \
792    wget \
793    libldap2-dev \
794    libprotobuf-dev \
795    liburing-dev \
796    liburing1-dbgsym \
797    libperlio-gzip-perl \
798    libjson-perl \
799    protobuf-compiler \
800    libgpiod-dev \
801    device-tree-compiler \
802    cppcheck \
803    libpciaccess-dev \
804    libmimetic-dev \
805    libxml2-utils \
806    libxml-simple-perl \
807    rsync
808
809RUN update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-11 11 \
810  --slave /usr/bin/g++ g++ /usr/bin/g++-11 \
811  --slave /usr/bin/gcov gcov /usr/bin/gcov-11 \
812  --slave /usr/bin/gcov-dump gcov-dump /usr/bin/gcov-dump-11 \
813  --slave /usr/bin/gcov-tool gcov-tool /usr/bin/gcov-tool-11
814
815RUN update-alternatives --install /usr/bin/clang clang /usr/bin/clang-12 1000 \
816  --slave /usr/bin/clang++ clang++ /usr/bin/clang++-12 \
817  --slave /usr/bin/clang-tidy clang-tidy /usr/bin/clang-tidy-12 \
818  --slave /usr/bin/clang-format clang-format /usr/bin/clang-format-12 \
819  --slave /usr/bin/run-clang-tidy.py run-clang-tidy.py /usr/bin/run-clang-tidy-12.py \
820  --slave /usr/bin/scan-build scan-build /usr/bin/scan-build-12
821
822"""
823
824if is_automated_ci_build:
825    dockerfile_base += f"""
826# Run an arbitrary command to polute the docker cache regularly force us
827# to re-run `apt-get update` daily.
828RUN echo {Docker.timestamp()}
829RUN apt-get update && apt-get dist-upgrade -yy
830
831"""
832
833dockerfile_base += f"""
834RUN pip3 install inflection
835RUN pip3 install pycodestyle
836RUN pip3 install jsonschema
837RUN pip3 install meson==0.58.1
838RUN pip3 install protobuf
839RUN pip3 install codespell
840"""
841
842# Build the base and stage docker images.
843docker_base_img_name = Docker.tagname("base", dockerfile_base)
844Docker.build("base", docker_base_img_name, dockerfile_base)
845Package.generate_all()
846
847# Create the final Dockerfile.
848dockerfile = f"""
849# Build the final output image
850FROM {docker_base_img_name}
851{Package.df_all_copycmds()}
852
853# Some of our infrastructure still relies on the presence of this file
854# even though it is no longer needed to rebuild the docker environment
855# NOTE: The file is sorted to ensure the ordering is stable.
856RUN echo '{Package.depcache()}' > /tmp/depcache
857
858# Final configuration for the workspace
859RUN grep -q {gid} /etc/group || groupadd -g {gid} {username}
860RUN mkdir -p "{os.path.dirname(homedir)}"
861RUN grep -q {uid} /etc/passwd || useradd -d {homedir} -m -u {uid} -g {gid} {username}
862RUN sed -i '1iDefaults umask=000' /etc/sudoers
863RUN echo "{username} ALL=(ALL) NOPASSWD: ALL" >>/etc/sudoers
864
865# Ensure user has ability to write to /usr/local for different tool
866# and data installs
867RUN chown -R {username}:{username} /usr/local/share
868
869{proxy_cmd}
870
871RUN /bin/bash
872"""
873
874# Do the final docker build
875docker_final_img_name = Docker.tagname(None, dockerfile)
876Docker.build("final", docker_final_img_name, dockerfile)
877
878# Print the tag of the final image.
879print(docker_final_img_name)
880