1#!/usr/bin/env python3
2#
3# Build the required docker image to run package unit tests
4#
5# Script Variables:
6#   DOCKER_IMG_NAME:  <optional, the name of the docker image to generate>
7#                     default is openbmc/ubuntu-unit-test
8#   DISTRO:           <optional, the distro to build a docker image against>
9#                     default is ubuntu:focal
10#   FORCE_DOCKER_BUILD: <optional, a non-zero value with force all Docker
11#                     images to be rebuilt rather than reusing caches.>
12#   BUILD_URL:        <optional, used to detect running under CI context
13#                     (ex. Jenkins)>
14#   BRANCH:           <optional, branch to build from each of the openbmc/
15#                     repositories>
16#                     default is master, which will be used if input branch not
17#                     provided or not found
18#   UBUNTU_MIRROR:    <optional, the URL of a mirror of Ubuntu to override the
19#                     default ones in /etc/apt/sources.list>
20#                     default is empty, and no mirror is used.
21#   http_proxy        The HTTP address of the proxy server to connect to.
22#                     Default: "", proxy is not setup if this is not set
23
24import os
25import sys
26import threading
27from datetime import date
28from hashlib import sha256
29from sh import docker, git, nproc, uname  # type: ignore
30from typing import Any, Callable, Dict, Iterable, Optional
31
32try:
33    # Python before 3.8 doesn't have TypedDict, so reroute to standard 'dict'.
34    from typing import TypedDict
35except:
36
37    class TypedDict(dict):  # type: ignore
38        # We need to do this to eat the 'total' argument.
39        def __init_subclass__(cls, **kwargs):
40            super().__init_subclass__()
41
42
43# Declare some variables used in package definitions.
44prefix = "/usr/local"
45proc_count = nproc().strip()
46
47
48class PackageDef(TypedDict, total=False):
49    """ Package Definition for packages dictionary. """
50
51    # rev [optional]: Revision of package to use.
52    rev: str
53    # url [optional]: lambda function to create URL: (package, rev) -> url.
54    url: Callable[[str, str], str]
55    # depends [optional]: List of package dependencies.
56    depends: Iterable[str]
57    # build_type [required]: Build type used for package.
58    #   Currently supported: autoconf, cmake, custom, make, meson
59    build_type: str
60    # build_steps [optional]: Steps to run for 'custom' build_type.
61    build_steps: Iterable[str]
62    # config_flags [optional]: List of options to pass configuration tool.
63    config_flags: Iterable[str]
64    # config_env [optional]: List of environment variables to set for config.
65    config_env: Iterable[str]
66    # custom_post_dl [optional]: List of steps to run after download, but
67    #   before config / build / install.
68    custom_post_dl: Iterable[str]
69    # custom_post_install [optional]: List of steps to run after install.
70    custom_post_install: Iterable[str]
71
72    # __tag [private]: Generated Docker tag name for package stage.
73    __tag: str
74    # __package [private]: Package object associated with this package.
75    __package: Any  # Type is Package, but not defined yet.
76
77
78# Packages to include in image.
79packages = {
80    "boost": PackageDef(
81        rev="1.75.0",
82        url=(
83            lambda pkg, rev: f"https://downloads.yoctoproject.org/mirror/sources/{pkg}_{rev.replace('.', '_')}.tar.bz2"
84        ),
85        build_type="custom",
86        build_steps=[
87            f"./bootstrap.sh --prefix={prefix} --with-libraries=context,coroutine",
88            "./b2",
89            f"./b2 install --prefix={prefix}",
90        ],
91    ),
92    "USCiLab/cereal": PackageDef(
93        rev="v1.3.0",
94        build_type="custom",
95        build_steps=[f"cp -a include/cereal/ {prefix}/include/"],
96    ),
97    "catchorg/Catch2": PackageDef(
98        rev="v2.12.2",
99        build_type="cmake",
100        config_flags=["-DBUILD_TESTING=OFF", "-DCATCH_INSTALL_DOCS=OFF"],
101    ),
102    "CLIUtils/CLI11": PackageDef(
103        rev="v1.9.1",
104        build_type="cmake",
105        config_flags=[
106            "-DBUILD_TESTING=OFF",
107            "-DCLI11_BUILD_DOCS=OFF",
108            "-DCLI11_BUILD_EXAMPLES=OFF",
109        ],
110    ),
111    "fmtlib/fmt": PackageDef(
112        rev="7.1.3",
113        build_type="cmake",
114        config_flags=[
115            "-DFMT_DOC=OFF",
116            "-DFMT_TEST=OFF",
117        ],
118    ),
119    # Snapshot from 2020-01-03
120    "Naios/function2": PackageDef(
121        rev="3a0746bf5f601dfed05330aefcb6854354fce07d",
122        build_type="custom",
123        build_steps=[
124            f"mkdir {prefix}/include/function2",
125            f"cp include/function2/function2.hpp {prefix}/include/function2/",
126        ],
127    ),
128    "google/googletest": PackageDef(
129        rev="release-1.10.0",
130        build_type="cmake",
131        config_env=["CXXFLAGS=-std=c++17"],
132        config_flags=["-DTHREADS_PREFER_PTHREAD_FLAG=ON"],
133    ),
134    # Release 2020-08-06
135    "nlohmann/json": PackageDef(
136        rev="v3.9.1",
137        build_type="cmake",
138        config_flags=["-DJSON_BuildTests=OFF"],
139        custom_post_install=[
140            f"ln -s {prefix}/include/nlohmann/json.hpp {prefix}/include/json.hpp",
141        ],
142    ),
143    # Snapshot from 2019-05-24
144    "linux-test-project/lcov": PackageDef(
145        rev="v1.15",
146        build_type="make",
147    ),
148    # dev-5.8 2021-01-11
149    "openbmc/linux": PackageDef(
150        rev="3cc95ae40716e56f81b69615781f54c78079042d",
151        build_type="custom",
152        build_steps=[
153            f"make -j{proc_count} defconfig",
154            f"make INSTALL_HDR_PATH={prefix} headers_install",
155        ],
156    ),
157    # Snapshot from 2020-06-13
158    "LibVNC/libvncserver": PackageDef(
159        rev="LibVNCServer-0.9.13",
160        build_type="cmake",
161    ),
162    "martinmoene/span-lite": PackageDef(
163        rev="v0.8.1",
164        build_type="cmake",
165        config_flags=[
166            "-DSPAN_LITE_OPT_BUILD_TESTS=OFF",
167        ],
168    ),
169    # version from meta-openembedded/meta-oe/recipes-support/libtinyxml2/libtinyxml2_8.0.0.bb
170    "leethomason/tinyxml2": PackageDef(
171        rev="8.0.0",
172        build_type="cmake",
173    ),
174    # version from /meta-openembedded/meta-oe/recipes-devtools/boost-url/boost-url_git.bb
175    "CPPAlliance/url": PackageDef(
176        rev="a56ae0df6d3078319755fbaa67822b4fa7fd352b",
177        build_type="cmake",
178        config_flags=[
179            "-DBOOST_URL_BUILD_EXAMPLES=OFF",
180            "-DBOOST_URL_BUILD_TESTS=OFF",
181            "-DBOOST_URL_STANDALONE=ON",
182        ],
183    ),
184    # version from ./meta-openembedded/meta-oe/dynamic-layers/networking-layer/recipes-devtools/valijson/valijson_0.3.bb
185    # Snapshot from 2020-12-02 - fix for curlpp dependency
186    "tristanpenman/valijson": PackageDef(
187        rev="8cc83c8be9c1c927f5da952b2333b30e5f0353be",
188        build_type="cmake",
189        config_flags=[
190            "-Dvalijson_BUILD_TESTS=0",
191            "-Dvalijson_INSTALL_HEADERS=1",
192        ],
193    ),
194    # version from meta-openembedded/meta-oe/recipes-devtools/nlohmann-fifo/nlohmann-fifo_git.bb
195    "nlohmann/fifo_map": PackageDef(
196        rev="0dfbf5dacbb15a32c43f912a7e66a54aae39d0f9",
197        build_type="custom",
198        build_steps=[f"cp src/fifo_map.hpp {prefix}/include/"],
199    ),
200    "open-power/pdbg": PackageDef(build_type="autoconf"),
201    "openbmc/gpioplus": PackageDef(
202        depends=["openbmc/stdplus"],
203        build_type="meson",
204        config_flags=[
205            "-Dexamples=false",
206            "-Dtests=disabled",
207        ],
208    ),
209    "openbmc/phosphor-dbus-interfaces": PackageDef(
210        depends=["openbmc/sdbusplus"],
211        build_type="meson",
212        config_flags=[
213            "-Ddata_com_ibm=true",
214            "-Ddata_org_open_power=true",
215        ],
216    ),
217    "openbmc/phosphor-logging": PackageDef(
218        depends=[
219            "USCiLab/cereal",
220            "nlohmann/fifo_map",
221            "openbmc/phosphor-dbus-interfaces",
222            "openbmc/sdbusplus",
223            "openbmc/sdeventplus",
224        ],
225        build_type="autoconf",
226        config_flags=[
227            "--enable-metadata-processing",
228            f"YAML_DIR={prefix}/share/phosphor-dbus-yaml/yaml",
229        ],
230    ),
231    "openbmc/phosphor-objmgr": PackageDef(
232        depends=[
233            "boost",
234            "leethomason/tinyxml2",
235            "openbmc/phosphor-logging",
236            "openbmc/sdbusplus",
237        ],
238        build_type="autoconf",
239    ),
240    "openbmc/pldm": PackageDef(
241        depends=[
242            "CLIUtils/CLI11",
243            "boost",
244            "nlohmann/json",
245            "openbmc/phosphor-dbus-interfaces",
246            "openbmc/phosphor-logging",
247            "openbmc/sdbusplus",
248            "openbmc/sdeventplus",
249        ],
250        build_type="meson",
251        config_flags=[
252            "-Dlibpldm-only=enabled",
253            "-Doem-ibm=enabled",
254            "-Dtests=disabled",
255        ],
256    ),
257    "openbmc/sdbusplus": PackageDef(
258        build_type="meson",
259        custom_post_dl=[
260            "cd tools",
261            f"./setup.py install --root=/ --prefix={prefix}",
262            "cd ..",
263        ],
264        config_flags=[
265            "-Dexamples=disabled",
266            "-Dtests=disabled",
267        ],
268    ),
269    "openbmc/sdeventplus": PackageDef(
270        depends=["Naios/function2", "openbmc/stdplus"],
271        build_type="meson",
272        config_flags=[
273            "-Dexamples=false",
274            "-Dtests=disabled",
275        ],
276    ),
277    "openbmc/stdplus": PackageDef(
278        depends=["fmtlib/fmt", "martinmoene/span-lite"],
279        build_type="meson",
280        config_flags=[
281            "-Dexamples=false",
282            "-Dtests=disabled",
283        ],
284    ),
285}  # type: Dict[str, PackageDef]
286
287# Define common flags used for builds
288configure_flags = " ".join(
289    [
290        f"--prefix={prefix}",
291    ]
292)
293cmake_flags = " ".join(
294    [
295        "-DBUILD_SHARED_LIBS=ON",
296        "-DCMAKE_BUILD_TYPE=RelWithDebInfo",
297        f"-DCMAKE_INSTALL_PREFIX:PATH={prefix}",
298        "-GNinja",
299        "-DCMAKE_MAKE_PROGRAM=ninja",
300    ]
301)
302meson_flags = " ".join(
303    [
304        "--wrap-mode=nodownload",
305        f"-Dprefix={prefix}",
306    ]
307)
308
309
310class Package(threading.Thread):
311    """Class used to build the Docker stages for each package.
312
313    Generally, this class should not be instantiated directly but through
314    Package.generate_all().
315    """
316
317    # Copy the packages dictionary.
318    packages = packages.copy()
319
320    # Lock used for thread-safety.
321    lock = threading.Lock()
322
323    def __init__(self, pkg: str):
324        """ pkg - The name of this package (ex. foo/bar ) """
325        super(Package, self).__init__()
326
327        self.package = pkg
328        self.exception = None  # type: Optional[Exception]
329
330        # Reference to this package's
331        self.pkg_def = Package.packages[pkg]
332        self.pkg_def["__package"] = self
333
334    def run(self) -> None:
335        """ Thread 'run' function.  Builds the Docker stage. """
336
337        # In case this package has no rev, fetch it from Github.
338        self._update_rev()
339
340        # Find all the Package objects that this package depends on.
341        #   This section is locked because we are looking into another
342        #   package's PackageDef dict, which could be being modified.
343        Package.lock.acquire()
344        deps: Iterable[Package] = [
345            Package.packages[deppkg]["__package"]
346            for deppkg in self.pkg_def.get("depends", [])
347        ]
348        Package.lock.release()
349
350        # Wait until all the depends finish building.  We need them complete
351        # for the "COPY" commands.
352        for deppkg in deps:
353            deppkg.join()
354
355        # Generate this package's Dockerfile.
356        dockerfile = f"""
357FROM {docker_base_img_name}
358{self._df_copycmds()}
359{self._df_build()}
360"""
361
362        # Generate the resulting tag name and save it to the PackageDef.
363        #   This section is locked because we are modifying the PackageDef,
364        #   which can be accessed by other threads.
365        Package.lock.acquire()
366        tag = Docker.tagname(self._stagename(), dockerfile)
367        self.pkg_def["__tag"] = tag
368        Package.lock.release()
369
370        # Do the build / save any exceptions.
371        try:
372            Docker.build(self.package, tag, dockerfile)
373        except Exception as e:
374            self.exception = e
375
376    @classmethod
377    def generate_all(cls) -> None:
378        """Ensure a Docker stage is created for all defined packages.
379
380        These are done in parallel but with appropriate blocking per
381        package 'depends' specifications.
382        """
383
384        # Create a Package for each defined package.
385        pkg_threads = [Package(p) for p in cls.packages.keys()]
386
387        # Start building them all.
388        #   This section is locked because threads depend on each other,
389        #   based on the packages, and they cannot 'join' on a thread
390        #   which is not yet started.  Adding a lock here allows all the
391        #   threads to start before they 'join' their dependencies.
392        Package.lock.acquire()
393        for t in pkg_threads:
394            t.start()
395        Package.lock.release()
396
397        # Wait for completion.
398        for t in pkg_threads:
399            t.join()
400            # Check if the thread saved off its own exception.
401            if t.exception:
402                print(f"Package {t.package} failed!", file=sys.stderr)
403                raise t.exception
404
405    @staticmethod
406    def df_all_copycmds() -> str:
407        """Formulate the Dockerfile snippet necessary to copy all packages
408        into the final image.
409        """
410        return Package.df_copycmds_set(Package.packages.keys())
411
412    @classmethod
413    def depcache(cls) -> str:
414        """Create the contents of the '/tmp/depcache'.
415        This file is a comma-separated list of "<pkg>:<rev>".
416        """
417
418        # This needs to be sorted for consistency.
419        depcache = ""
420        for pkg in sorted(cls.packages.keys()):
421            depcache += "%s:%s," % (pkg, cls.packages[pkg]["rev"])
422        return depcache
423
424    def _update_rev(self) -> None:
425        """ Look up the HEAD for missing a static rev. """
426
427        if "rev" in self.pkg_def:
428            return
429
430        # Check if Jenkins/Gerrit gave us a revision and use it.
431        if gerrit_project == self.package and gerrit_rev:
432            print(
433                f"Found Gerrit revision for {self.package}: {gerrit_rev}",
434                file=sys.stderr,
435            )
436            self.pkg_def["rev"] = gerrit_rev
437            return
438
439        # Ask Github for all the branches.
440        lookup = git("ls-remote", "--heads", f"https://github.com/{self.package}")
441
442        # Find the branch matching {branch} (or fallback to master).
443        #   This section is locked because we are modifying the PackageDef.
444        Package.lock.acquire()
445        for line in lookup.split("\n"):
446            if f"refs/heads/{branch}" in line:
447                self.pkg_def["rev"] = line.split()[0]
448            elif f"refs/heads/master" in line and "rev" not in self.pkg_def:
449                self.pkg_def["rev"] = line.split()[0]
450        Package.lock.release()
451
452    def _stagename(self) -> str:
453        """ Create a name for the Docker stage associated with this pkg. """
454        return self.package.replace("/", "-").lower()
455
456    def _url(self) -> str:
457        """ Get the URL for this package. """
458        rev = self.pkg_def["rev"]
459
460        # If the lambda exists, call it.
461        if "url" in self.pkg_def:
462            return self.pkg_def["url"](self.package, rev)
463
464        # Default to the github archive URL.
465        return f"https://github.com/{self.package}/archive/{rev}.tar.gz"
466
467    def _cmd_download(self) -> str:
468        """Formulate the command necessary to download and unpack to source."""
469
470        url = self._url()
471        if ".tar." not in url:
472            raise NotImplementedError(
473                f"Unhandled download type for {self.package}: {url}"
474            )
475
476        cmd = f"curl -L {url} | tar -x"
477
478        if url.endswith(".bz2"):
479            cmd += "j"
480        elif url.endswith(".gz"):
481            cmd += "z"
482        else:
483            raise NotImplementedError(
484                f"Unknown tar flags needed for {self.package}: {url}"
485            )
486
487        return cmd
488
489    def _cmd_cd_srcdir(self) -> str:
490        """ Formulate the command necessary to 'cd' into the source dir. """
491        return f"cd {self.package.split('/')[-1]}*"
492
493    def _df_copycmds(self) -> str:
494        """ Formulate the dockerfile snippet necessary to COPY all depends. """
495
496        if "depends" not in self.pkg_def:
497            return ""
498        return Package.df_copycmds_set(self.pkg_def["depends"])
499
500    @staticmethod
501    def df_copycmds_set(pkgs: Iterable[str]) -> str:
502        """Formulate the Dockerfile snippet necessary to COPY a set of
503        packages into a Docker stage.
504        """
505
506        copy_cmds = ""
507
508        # Sort the packages for consistency.
509        for p in sorted(pkgs):
510            tag = Package.packages[p]["__tag"]
511            copy_cmds += f"COPY --from={tag} {prefix} {prefix}\n"
512            # Workaround for upstream docker bug and multiple COPY cmds
513            # https://github.com/moby/moby/issues/37965
514            copy_cmds += "RUN true\n"
515
516        return copy_cmds
517
518    def _df_build(self) -> str:
519        """Formulate the Dockerfile snippet necessary to download, build, and
520        install a package into a Docker stage.
521        """
522
523        # Download and extract source.
524        result = f"RUN {self._cmd_download()} && {self._cmd_cd_srcdir()} && "
525
526        # Handle 'custom_post_dl' commands.
527        custom_post_dl = self.pkg_def.get("custom_post_dl")
528        if custom_post_dl:
529            result += " && ".join(custom_post_dl) + " && "
530
531        # Build and install package based on 'build_type'.
532        build_type = self.pkg_def["build_type"]
533        if build_type == "autoconf":
534            result += self._cmd_build_autoconf()
535        elif build_type == "cmake":
536            result += self._cmd_build_cmake()
537        elif build_type == "custom":
538            result += self._cmd_build_custom()
539        elif build_type == "make":
540            result += self._cmd_build_make()
541        elif build_type == "meson":
542            result += self._cmd_build_meson()
543        else:
544            raise NotImplementedError(
545                f"Unhandled build type for {self.package}: {build_type}"
546            )
547
548        # Handle 'custom_post_install' commands.
549        custom_post_install = self.pkg_def.get("custom_post_install")
550        if custom_post_install:
551            result += " && " + " && ".join(custom_post_install)
552
553        return result
554
555    def _cmd_build_autoconf(self) -> str:
556        options = " ".join(self.pkg_def.get("config_flags", []))
557        env = " ".join(self.pkg_def.get("config_env", []))
558        result = "./bootstrap.sh && "
559        result += f"{env} ./configure {configure_flags} {options} && "
560        result += f"make -j{proc_count} && make install"
561        return result
562
563    def _cmd_build_cmake(self) -> str:
564        options = " ".join(self.pkg_def.get("config_flags", []))
565        env = " ".join(self.pkg_def.get("config_env", []))
566        result = "mkdir builddir && cd builddir && "
567        result += f"{env} cmake {cmake_flags} {options} .. && "
568        result += "cmake --build . --target all && "
569        result += "cmake --build . --target install && "
570        result += "cd .."
571        return result
572
573    def _cmd_build_custom(self) -> str:
574        return " && ".join(self.pkg_def.get("build_steps", []))
575
576    def _cmd_build_make(self) -> str:
577        return f"make -j{proc_count} && make install"
578
579    def _cmd_build_meson(self) -> str:
580        options = " ".join(self.pkg_def.get("config_flags", []))
581        env = " ".join(self.pkg_def.get("config_env", []))
582        result = f"{env} meson builddir {meson_flags} {options} && "
583        result += "ninja -C builddir && ninja -C builddir install"
584        return result
585
586
587class Docker:
588    """Class to assist with Docker interactions.  All methods are static."""
589
590    @staticmethod
591    def timestamp() -> str:
592        """ Generate a timestamp for today using the ISO week. """
593        today = date.today().isocalendar()
594        return f"{today[0]}-W{today[1]:02}"
595
596    @staticmethod
597    def tagname(pkgname: str, dockerfile: str) -> str:
598        """ Generate a tag name for a package using a hash of the Dockerfile. """
599        result = docker_image_name
600        if pkgname:
601            result += "-" + pkgname
602
603        result += ":" + Docker.timestamp()
604        result += "-" + sha256(dockerfile.encode()).hexdigest()[0:16]
605
606        return result
607
608    @staticmethod
609    def build(pkg: str, tag: str, dockerfile: str) -> None:
610        """Build a docker image using the Dockerfile and tagging it with 'tag'."""
611
612        # If we're not forcing builds, check if it already exists and skip.
613        if not force_build:
614            if docker.image.ls(tag, "--format", '"{{.Repository}}:{{.Tag}}"'):
615                print(f"Image {tag} already exists.  Skipping.", file=sys.stderr)
616                return
617
618        # Build it.
619        #   Capture the output of the 'docker build' command and send it to
620        #   stderr (prefixed with the package name).  This allows us to see
621        #   progress but not polute stdout.  Later on we output the final
622        #   docker tag to stdout and we want to keep that pristine.
623        #
624        #   Other unusual flags:
625        #       --no-cache: Bypass the Docker cache if 'force_build'.
626        #       --force-rm: Clean up Docker processes if they fail.
627        docker.build(
628            proxy_args,
629            "--network=host",
630            "--force-rm",
631            "--no-cache=true" if force_build else "--no-cache=false",
632            "-t",
633            tag,
634            "-",
635            _in=dockerfile,
636            _out=(
637                lambda line: print(
638                    pkg + ":", line, end="", file=sys.stderr, flush=True
639                )
640            ),
641        )
642
643
644# Read a bunch of environment variables.
645docker_image_name = os.environ.get("DOCKER_IMAGE_NAME", "openbmc/ubuntu-unit-test")
646force_build = os.environ.get("FORCE_DOCKER_BUILD")
647is_automated_ci_build = os.environ.get("BUILD_URL", False)
648distro = os.environ.get("DISTRO", "ubuntu:groovy")
649branch = os.environ.get("BRANCH", "master")
650ubuntu_mirror = os.environ.get("UBUNTU_MIRROR")
651http_proxy = os.environ.get("http_proxy")
652
653gerrit_project = os.environ.get("GERRIT_PROJECT")
654gerrit_rev = os.environ.get("GERRIT_PATCHSET_REVISION")
655
656# Set up some common variables.
657username = os.environ.get("USER", "root")
658homedir = os.environ.get("HOME", "/root")
659gid = os.getgid()
660uid = os.getuid()
661
662# Determine the architecture for Docker.
663arch = uname("-m").strip()
664if arch == "ppc64le":
665    docker_base = "ppc64le/"
666elif arch == "x86_64":
667    docker_base = ""
668else:
669    print(
670        f"Unsupported system architecture({arch}) found for docker image",
671        file=sys.stderr,
672    )
673    sys.exit(1)
674
675# Special flags if setting up a deb mirror.
676mirror = ""
677if "ubuntu" in distro and ubuntu_mirror:
678    mirror = f"""
679RUN echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME) main restricted universe multiverse" > /etc/apt/sources.list && \\
680    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-updates main restricted universe multiverse" >> /etc/apt/sources.list && \\
681    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-security main restricted universe multiverse" >> /etc/apt/sources.list && \\
682    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-proposed main restricted universe multiverse" >> /etc/apt/sources.list && \\
683    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-backports main restricted universe multiverse" >> /etc/apt/sources.list
684"""
685
686# Special flags for proxying.
687proxy_cmd = ""
688proxy_args = []
689if http_proxy:
690    proxy_cmd = f"""
691RUN echo "[http]" >> {homedir}/.gitconfig && \
692    echo "proxy = {http_proxy}" >> {homedir}/.gitconfig
693"""
694    proxy_args.extend(
695        [
696            "--build-arg",
697            f"http_proxy={http_proxy}",
698            "--build-arg",
699            f"https_proxy={http_proxy}",
700        ]
701    )
702
703# Create base Dockerfile.
704dockerfile_base = f"""
705FROM {docker_base}{distro}
706
707{mirror}
708
709ENV DEBIAN_FRONTEND noninteractive
710
711ENV PYTHONPATH "/usr/local/lib/python3.8/site-packages/"
712
713# Sometimes the ubuntu key expires and we need a way to force an execution
714# of the apt-get commands for the dbgsym-keyring.  When this happens we see
715# an error like: "Release: The following signatures were invalid:"
716# Insert a bogus echo that we can change here when we get this error to force
717# the update.
718RUN echo "ubuntu keyserver rev as of 2021-04-21"
719
720# We need the keys to be imported for dbgsym repos
721# New releases have a package, older ones fall back to manual fetching
722# https://wiki.ubuntu.com/Debug%20Symbol%20Packages
723RUN apt-get update && apt-get dist-upgrade -yy && \
724    ( apt-get install ubuntu-dbgsym-keyring || \
725        ( apt-get install -yy dirmngr && \
726          apt-key adv --keyserver keyserver.ubuntu.com \
727                      --recv-keys F2EDC64DC5AEE1F6B9C621F0C8CAB6595FDFF622 ) )
728
729# Parse the current repo list into a debug repo list
730RUN sed -n '/^deb /s,^deb [^ ]* ,deb http://ddebs.ubuntu.com ,p' /etc/apt/sources.list >/etc/apt/sources.list.d/debug.list
731
732# Remove non-existent debug repos
733RUN sed -i '/-\(backports\|security\) /d' /etc/apt/sources.list.d/debug.list
734
735RUN cat /etc/apt/sources.list.d/debug.list
736
737RUN apt-get update && apt-get dist-upgrade -yy && apt-get install -yy \
738    gcc-10 \
739    g++-10 \
740    libc6-dbg \
741    libc6-dev \
742    libtool \
743    bison \
744    libdbus-1-dev \
745    flex \
746    cmake \
747    python3 \
748    python3-dev\
749    python3-yaml \
750    python3-mako \
751    python3-pip \
752    python3-setuptools \
753    python3-git \
754    python3-socks \
755    pkg-config \
756    autoconf \
757    autoconf-archive \
758    libsystemd-dev \
759    systemd \
760    libssl-dev \
761    libevdev-dev \
762    libjpeg-dev \
763    libpng-dev \
764    ninja-build \
765    sudo \
766    curl \
767    git \
768    dbus \
769    iputils-ping \
770    clang-11 \
771    clang-format-11 \
772    clang-tidy-11 \
773    clang-tools-11 \
774    shellcheck \
775    npm \
776    iproute2 \
777    libnl-3-dev \
778    libnl-genl-3-dev \
779    libconfig++-dev \
780    libsnmp-dev \
781    valgrind \
782    valgrind-dbg \
783    libpam0g-dev \
784    xxd \
785    libi2c-dev \
786    wget \
787    libldap2-dev \
788    libprotobuf-dev \
789    libperlio-gzip-perl \
790    libjson-perl \
791    protobuf-compiler \
792    libgpiod-dev \
793    device-tree-compiler \
794    cppcheck \
795    libpciaccess-dev \
796    libmimetic-dev \
797    libxml2-utils \
798    libxml-simple-perl \
799    rsync
800
801RUN update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-10 1000 \
802  --slave /usr/bin/g++ g++ /usr/bin/g++-10 \
803  --slave /usr/bin/gcov gcov /usr/bin/gcov-10 \
804  --slave /usr/bin/gcov-dump gcov-dump /usr/bin/gcov-dump-10 \
805  --slave /usr/bin/gcov-tool gcov-tool /usr/bin/gcov-tool-10
806
807RUN update-alternatives --install /usr/bin/clang clang /usr/bin/clang-11 1000 \
808  --slave /usr/bin/clang++ clang++ /usr/bin/clang++-11 \
809  --slave /usr/bin/clang-tidy clang-tidy /usr/bin/clang-tidy-11 \
810  --slave /usr/bin/clang-format clang-format /usr/bin/clang-format-11 \
811  --slave /usr/bin/run-clang-tidy.py run-clang-tidy.py /usr/bin/run-clang-tidy-11.py \
812  --slave /usr/bin/scan-build scan-build /usr/bin/scan-build-11
813
814"""
815
816if is_automated_ci_build:
817    dockerfile_base += f"""
818# Run an arbitrary command to polute the docker cache regularly force us
819# to re-run `apt-get update` daily.
820RUN echo {Docker.timestamp()}
821RUN apt-get update && apt-get dist-upgrade -yy
822
823"""
824
825dockerfile_base += f"""
826RUN pip3 install inflection
827RUN pip3 install pycodestyle
828RUN pip3 install jsonschema
829RUN pip3 install meson==0.57.1
830RUN pip3 install protobuf
831RUN pip3 install codespell
832"""
833
834# Build the base and stage docker images.
835docker_base_img_name = Docker.tagname("base", dockerfile_base)
836Docker.build("base", docker_base_img_name, dockerfile_base)
837Package.generate_all()
838
839# Create the final Dockerfile.
840dockerfile = f"""
841# Build the final output image
842FROM {docker_base_img_name}
843{Package.df_all_copycmds()}
844
845# Some of our infrastructure still relies on the presence of this file
846# even though it is no longer needed to rebuild the docker environment
847# NOTE: The file is sorted to ensure the ordering is stable.
848RUN echo '{Package.depcache()}' > /tmp/depcache
849
850# Final configuration for the workspace
851RUN grep -q {gid} /etc/group || groupadd -g {gid} {username}
852RUN mkdir -p "{os.path.dirname(homedir)}"
853RUN grep -q {uid} /etc/passwd || useradd -d {homedir} -m -u {uid} -g {gid} {username}
854RUN sed -i '1iDefaults umask=000' /etc/sudoers
855RUN echo "{username} ALL=(ALL) NOPASSWD: ALL" >>/etc/sudoers
856
857# Ensure user has ability to write to /usr/local for different tool
858# and data installs
859RUN chown -R {username}:{username} /usr/local
860
861{proxy_cmd}
862
863RUN /bin/bash
864"""
865
866# Do the final docker build
867docker_final_img_name = Docker.tagname(None, dockerfile)
868Docker.build("final", docker_final_img_name, dockerfile)
869
870# Print the tag of the final image.
871print(docker_final_img_name)
872