xref: /openbmc/openbmc-build-scripts/scripts/build-unit-test-docker (revision de3ee7624036e1c8d7cd71f0bd8e2bb4a8e002ec)
1#!/usr/bin/env python3
2#
3# Build the required docker image to run package unit tests
4#
5# Script Variables:
6#   DOCKER_IMG_NAME:  <optional, the name of the docker image to generate>
7#                     default is openbmc/ubuntu-unit-test
8#   DISTRO:           <optional, the distro to build a docker image against>
9#   FORCE_DOCKER_BUILD: <optional, a non-zero value with force all Docker
10#                     images to be rebuilt rather than reusing caches.>
11#   BUILD_URL:        <optional, used to detect running under CI context
12#                     (ex. Jenkins)>
13#   BRANCH:           <optional, branch to build from each of the openbmc/
14#                     repositories>
15#                     default is master, which will be used if input branch not
16#                     provided or not found
17#   UBUNTU_MIRROR:    <optional, the URL of a mirror of Ubuntu to override the
18#                     default ones in /etc/apt/sources.list>
19#                     default is empty, and no mirror is used.
20#   http_proxy        The HTTP address of the proxy server to connect to.
21#                     Default: "", proxy is not setup if this is not set
22
23import os
24import sys
25import threading
26from datetime import date
27from hashlib import sha256
28from sh import docker, git, nproc, uname  # type: ignore
29from typing import Any, Callable, Dict, Iterable, Optional
30
31try:
32    # Python before 3.8 doesn't have TypedDict, so reroute to standard 'dict'.
33    from typing import TypedDict
34except:
35
36    class TypedDict(dict):  # type: ignore
37        # We need to do this to eat the 'total' argument.
38        def __init_subclass__(cls, **kwargs):
39            super().__init_subclass__()
40
41
42# Declare some variables used in package definitions.
43prefix = "/usr/local"
44proc_count = nproc().strip()
45
46
47class PackageDef(TypedDict, total=False):
48    """ Package Definition for packages dictionary. """
49
50    # rev [optional]: Revision of package to use.
51    rev: str
52    # url [optional]: lambda function to create URL: (package, rev) -> url.
53    url: Callable[[str, str], str]
54    # depends [optional]: List of package dependencies.
55    depends: Iterable[str]
56    # build_type [required]: Build type used for package.
57    #   Currently supported: autoconf, cmake, custom, make, meson
58    build_type: str
59    # build_steps [optional]: Steps to run for 'custom' build_type.
60    build_steps: Iterable[str]
61    # config_flags [optional]: List of options to pass configuration tool.
62    config_flags: Iterable[str]
63    # config_env [optional]: List of environment variables to set for config.
64    config_env: Iterable[str]
65    # custom_post_dl [optional]: List of steps to run after download, but
66    #   before config / build / install.
67    custom_post_dl: Iterable[str]
68    # custom_post_install [optional]: List of steps to run after install.
69    custom_post_install: Iterable[str]
70
71    # __tag [private]: Generated Docker tag name for package stage.
72    __tag: str
73    # __package [private]: Package object associated with this package.
74    __package: Any  # Type is Package, but not defined yet.
75
76
77# Packages to include in image.
78packages = {
79    "boost": PackageDef(
80        rev="1.77.0",
81        url=(
82            lambda pkg, rev: f"https://downloads.yoctoproject.org/mirror/sources/{pkg}_{rev.replace('.', '_')}.tar.bz2"
83        ),
84        build_type="custom",
85        build_steps=[
86            f"./bootstrap.sh --prefix={prefix} --with-libraries=context,coroutine",
87            "./b2",
88            f"./b2 install --prefix={prefix}",
89        ],
90    ),
91    "USCiLab/cereal": PackageDef(
92        rev="3e4d1b84cab4891368d2179a61a7ba06a5693e7f",
93        build_type="custom",
94        build_steps=[f"cp -a include/cereal/ {prefix}/include/"],
95    ),
96    "catchorg/Catch2": PackageDef(
97        rev="v2.13.6",
98        build_type="cmake",
99        config_flags=["-DBUILD_TESTING=OFF", "-DCATCH_INSTALL_DOCS=OFF"],
100    ),
101    "CLIUtils/CLI11": PackageDef(
102        rev="v1.9.1",
103        build_type="cmake",
104        config_flags=[
105            "-DBUILD_TESTING=OFF",
106            "-DCLI11_BUILD_DOCS=OFF",
107            "-DCLI11_BUILD_EXAMPLES=OFF",
108        ],
109    ),
110    "fmtlib/fmt": PackageDef(
111        rev="7.1.3",
112        build_type="cmake",
113        config_flags=[
114            "-DFMT_DOC=OFF",
115            "-DFMT_TEST=OFF",
116        ],
117    ),
118    "Naios/function2": PackageDef(
119        rev="4.1.0",
120        build_type="custom",
121        build_steps=[
122            f"mkdir {prefix}/include/function2",
123            f"cp include/function2/function2.hpp {prefix}/include/function2/",
124        ],
125    ),
126    # Snapshot from 2021-05-13
127    "google/googletest": PackageDef(
128        rev="662fe38e44900c007eccb65a5d2ea19df7bd520e",
129        build_type="cmake",
130        config_env=["CXXFLAGS=-std=c++20"],
131        config_flags=["-DTHREADS_PREFER_PTHREAD_FLAG=ON"],
132    ),
133    # Release 2020-08-06
134    "nlohmann/json": PackageDef(
135        rev="v3.10.4",
136        build_type="cmake",
137        config_flags=["-DJSON_BuildTests=OFF"],
138        custom_post_install=[
139            f"ln -s {prefix}/include/nlohmann/json.hpp {prefix}/include/json.hpp",
140        ],
141    ),
142    # Snapshot from 2019-05-24
143    "linux-test-project/lcov": PackageDef(
144        rev="v1.15",
145        build_type="make",
146    ),
147    # dev-5.8 2021-01-11
148    "openbmc/linux": PackageDef(
149        rev="3cc95ae40716e56f81b69615781f54c78079042d",
150        build_type="custom",
151        build_steps=[
152            f"make -j{proc_count} defconfig",
153            f"make INSTALL_HDR_PATH={prefix} headers_install",
154        ],
155    ),
156    # Snapshot from 2020-06-13
157    "LibVNC/libvncserver": PackageDef(
158        rev="LibVNCServer-0.9.13",
159        build_type="cmake",
160    ),
161    "martinmoene/span-lite": PackageDef(
162        rev="v0.9.2",
163        build_type="cmake",
164        config_flags=[
165            "-DSPAN_LITE_OPT_BUILD_TESTS=OFF",
166        ],
167    ),
168    # version from meta-openembedded/meta-oe/recipes-support/libtinyxml2/libtinyxml2_8.0.0.bb
169    "leethomason/tinyxml2": PackageDef(
170        rev="8.0.0",
171        build_type="cmake",
172    ),
173    # version from /meta-openembedded/meta-oe/recipes-devtools/boost-url/boost-url_git.bb
174    "CPPAlliance/url": PackageDef(
175        rev="4f712ed69a04a344957d22efa5dc111b415b3aff",
176        build_type="custom",
177        build_steps=[f"cp -a include/** {prefix}/include/"],
178    ),
179    # version from meta-openembedded/meta-oe/dynamic-layers/networking-layer/recipes-devools/valijson/valijson_0.6.bb
180    "tristanpenman/valijson": PackageDef(
181        rev="v0.6",
182        build_type="cmake",
183        config_flags=[
184            "-Dvalijson_BUILD_TESTS=0",
185            "-Dvalijson_INSTALL_HEADERS=1",
186        ],
187    ),
188    # version from meta-openembedded/meta-oe/recipes-devtools/nlohmann-fifo/nlohmann-fifo_git.bb
189    "nlohmann/fifo_map": PackageDef(
190        rev="0dfbf5dacbb15a32c43f912a7e66a54aae39d0f9",
191        build_type="custom",
192        build_steps=[f"cp src/fifo_map.hpp {prefix}/include/"],
193    ),
194    "open-power/pdbg": PackageDef(build_type="autoconf"),
195    "openbmc/gpioplus": PackageDef(
196        depends=["openbmc/stdplus"],
197        build_type="meson",
198        config_flags=[
199            "-Dexamples=false",
200            "-Dtests=disabled",
201        ],
202    ),
203    "openbmc/phosphor-dbus-interfaces": PackageDef(
204        depends=["openbmc/sdbusplus"],
205        build_type="meson",
206        config_flags=[
207            "-Ddata_com_ibm=true",
208            "-Ddata_org_open_power=true",
209        ],
210    ),
211    "openbmc/phosphor-logging": PackageDef(
212        depends=[
213            "USCiLab/cereal",
214            "nlohmann/fifo_map",
215            "openbmc/phosphor-dbus-interfaces",
216            "openbmc/sdbusplus",
217            "openbmc/sdeventplus",
218        ],
219        build_type="meson",
220        config_flags=[
221            f"-Dyaml_dir={prefix}/share/phosphor-dbus-yaml/yaml",
222        ],
223    ),
224    "openbmc/phosphor-objmgr": PackageDef(
225        depends=[
226            "boost",
227            "leethomason/tinyxml2",
228            "openbmc/phosphor-logging",
229            "openbmc/sdbusplus",
230        ],
231        build_type="meson",
232        config_flags=[
233            "-Dtests=disabled",
234        ],
235    ),
236    "openbmc/pldm": PackageDef(
237        depends=[
238            "CLIUtils/CLI11",
239            "boost",
240            "nlohmann/json",
241            "openbmc/phosphor-dbus-interfaces",
242            "openbmc/phosphor-logging",
243            "openbmc/sdbusplus",
244            "openbmc/sdeventplus",
245        ],
246        build_type="meson",
247        config_flags=[
248            "-Dlibpldm-only=enabled",
249            "-Doem-ibm=enabled",
250            "-Dtests=disabled",
251        ],
252    ),
253    "openbmc/sdbusplus": PackageDef(
254        build_type="meson",
255        custom_post_dl=[
256            "cd tools",
257            f"./setup.py install --root=/ --prefix={prefix}",
258            "cd ..",
259        ],
260        config_flags=[
261            "-Dexamples=disabled",
262            "-Dtests=disabled",
263        ],
264    ),
265    "openbmc/sdeventplus": PackageDef(
266        depends=["Naios/function2", "openbmc/stdplus"],
267        build_type="meson",
268        config_flags=[
269            "-Dexamples=false",
270            "-Dtests=disabled",
271        ],
272    ),
273    "openbmc/stdplus": PackageDef(
274        depends=["fmtlib/fmt", "martinmoene/span-lite"],
275        build_type="meson",
276        config_flags=[
277            "-Dexamples=false",
278            "-Dtests=disabled",
279        ],
280    ),
281}  # type: Dict[str, PackageDef]
282
283# Define common flags used for builds
284configure_flags = " ".join(
285    [
286        f"--prefix={prefix}",
287    ]
288)
289cmake_flags = " ".join(
290    [
291        "-DBUILD_SHARED_LIBS=ON",
292        "-DCMAKE_BUILD_TYPE=RelWithDebInfo",
293        f"-DCMAKE_INSTALL_PREFIX:PATH={prefix}",
294        "-GNinja",
295        "-DCMAKE_MAKE_PROGRAM=ninja",
296    ]
297)
298meson_flags = " ".join(
299    [
300        "--wrap-mode=nodownload",
301        f"-Dprefix={prefix}",
302    ]
303)
304
305
306class Package(threading.Thread):
307    """Class used to build the Docker stages for each package.
308
309    Generally, this class should not be instantiated directly but through
310    Package.generate_all().
311    """
312
313    # Copy the packages dictionary.
314    packages = packages.copy()
315
316    # Lock used for thread-safety.
317    lock = threading.Lock()
318
319    def __init__(self, pkg: str):
320        """ pkg - The name of this package (ex. foo/bar ) """
321        super(Package, self).__init__()
322
323        self.package = pkg
324        self.exception = None  # type: Optional[Exception]
325
326        # Reference to this package's
327        self.pkg_def = Package.packages[pkg]
328        self.pkg_def["__package"] = self
329
330    def run(self) -> None:
331        """ Thread 'run' function.  Builds the Docker stage. """
332
333        # In case this package has no rev, fetch it from Github.
334        self._update_rev()
335
336        # Find all the Package objects that this package depends on.
337        #   This section is locked because we are looking into another
338        #   package's PackageDef dict, which could be being modified.
339        Package.lock.acquire()
340        deps: Iterable[Package] = [
341            Package.packages[deppkg]["__package"]
342            for deppkg in self.pkg_def.get("depends", [])
343        ]
344        Package.lock.release()
345
346        # Wait until all the depends finish building.  We need them complete
347        # for the "COPY" commands.
348        for deppkg in deps:
349            deppkg.join()
350
351        # Generate this package's Dockerfile.
352        dockerfile = f"""
353FROM {docker_base_img_name}
354{self._df_copycmds()}
355{self._df_build()}
356"""
357
358        # Generate the resulting tag name and save it to the PackageDef.
359        #   This section is locked because we are modifying the PackageDef,
360        #   which can be accessed by other threads.
361        Package.lock.acquire()
362        tag = Docker.tagname(self._stagename(), dockerfile)
363        self.pkg_def["__tag"] = tag
364        Package.lock.release()
365
366        # Do the build / save any exceptions.
367        try:
368            Docker.build(self.package, tag, dockerfile)
369        except Exception as e:
370            self.exception = e
371
372    @classmethod
373    def generate_all(cls) -> None:
374        """Ensure a Docker stage is created for all defined packages.
375
376        These are done in parallel but with appropriate blocking per
377        package 'depends' specifications.
378        """
379
380        # Create a Package for each defined package.
381        pkg_threads = [Package(p) for p in cls.packages.keys()]
382
383        # Start building them all.
384        #   This section is locked because threads depend on each other,
385        #   based on the packages, and they cannot 'join' on a thread
386        #   which is not yet started.  Adding a lock here allows all the
387        #   threads to start before they 'join' their dependencies.
388        Package.lock.acquire()
389        for t in pkg_threads:
390            t.start()
391        Package.lock.release()
392
393        # Wait for completion.
394        for t in pkg_threads:
395            t.join()
396            # Check if the thread saved off its own exception.
397            if t.exception:
398                print(f"Package {t.package} failed!", file=sys.stderr)
399                raise t.exception
400
401    @staticmethod
402    def df_all_copycmds() -> str:
403        """Formulate the Dockerfile snippet necessary to copy all packages
404        into the final image.
405        """
406        return Package.df_copycmds_set(Package.packages.keys())
407
408    @classmethod
409    def depcache(cls) -> str:
410        """Create the contents of the '/tmp/depcache'.
411        This file is a comma-separated list of "<pkg>:<rev>".
412        """
413
414        # This needs to be sorted for consistency.
415        depcache = ""
416        for pkg in sorted(cls.packages.keys()):
417            depcache += "%s:%s," % (pkg, cls.packages[pkg]["rev"])
418        return depcache
419
420    def _update_rev(self) -> None:
421        """ Look up the HEAD for missing a static rev. """
422
423        if "rev" in self.pkg_def:
424            return
425
426        # Check if Jenkins/Gerrit gave us a revision and use it.
427        if gerrit_project == self.package and gerrit_rev:
428            print(
429                f"Found Gerrit revision for {self.package}: {gerrit_rev}",
430                file=sys.stderr,
431            )
432            self.pkg_def["rev"] = gerrit_rev
433            return
434
435        # Ask Github for all the branches.
436        lookup = git("ls-remote", "--heads", f"https://github.com/{self.package}")
437
438        # Find the branch matching {branch} (or fallback to master).
439        #   This section is locked because we are modifying the PackageDef.
440        Package.lock.acquire()
441        for line in lookup.split("\n"):
442            if f"refs/heads/{branch}" in line:
443                self.pkg_def["rev"] = line.split()[0]
444            elif f"refs/heads/master" in line and "rev" not in self.pkg_def:
445                self.pkg_def["rev"] = line.split()[0]
446        Package.lock.release()
447
448    def _stagename(self) -> str:
449        """ Create a name for the Docker stage associated with this pkg. """
450        return self.package.replace("/", "-").lower()
451
452    def _url(self) -> str:
453        """ Get the URL for this package. """
454        rev = self.pkg_def["rev"]
455
456        # If the lambda exists, call it.
457        if "url" in self.pkg_def:
458            return self.pkg_def["url"](self.package, rev)
459
460        # Default to the github archive URL.
461        return f"https://github.com/{self.package}/archive/{rev}.tar.gz"
462
463    def _cmd_download(self) -> str:
464        """Formulate the command necessary to download and unpack to source."""
465
466        url = self._url()
467        if ".tar." not in url:
468            raise NotImplementedError(
469                f"Unhandled download type for {self.package}: {url}"
470            )
471
472        cmd = f"curl -L {url} | tar -x"
473
474        if url.endswith(".bz2"):
475            cmd += "j"
476        elif url.endswith(".gz"):
477            cmd += "z"
478        else:
479            raise NotImplementedError(
480                f"Unknown tar flags needed for {self.package}: {url}"
481            )
482
483        return cmd
484
485    def _cmd_cd_srcdir(self) -> str:
486        """ Formulate the command necessary to 'cd' into the source dir. """
487        return f"cd {self.package.split('/')[-1]}*"
488
489    def _df_copycmds(self) -> str:
490        """ Formulate the dockerfile snippet necessary to COPY all depends. """
491
492        if "depends" not in self.pkg_def:
493            return ""
494        return Package.df_copycmds_set(self.pkg_def["depends"])
495
496    @staticmethod
497    def df_copycmds_set(pkgs: Iterable[str]) -> str:
498        """Formulate the Dockerfile snippet necessary to COPY a set of
499        packages into a Docker stage.
500        """
501
502        copy_cmds = ""
503
504        # Sort the packages for consistency.
505        for p in sorted(pkgs):
506            tag = Package.packages[p]["__tag"]
507            copy_cmds += f"COPY --from={tag} {prefix} {prefix}\n"
508            # Workaround for upstream docker bug and multiple COPY cmds
509            # https://github.com/moby/moby/issues/37965
510            copy_cmds += "RUN true\n"
511
512        return copy_cmds
513
514    def _df_build(self) -> str:
515        """Formulate the Dockerfile snippet necessary to download, build, and
516        install a package into a Docker stage.
517        """
518
519        # Download and extract source.
520        result = f"RUN {self._cmd_download()} && {self._cmd_cd_srcdir()} && "
521
522        # Handle 'custom_post_dl' commands.
523        custom_post_dl = self.pkg_def.get("custom_post_dl")
524        if custom_post_dl:
525            result += " && ".join(custom_post_dl) + " && "
526
527        # Build and install package based on 'build_type'.
528        build_type = self.pkg_def["build_type"]
529        if build_type == "autoconf":
530            result += self._cmd_build_autoconf()
531        elif build_type == "cmake":
532            result += self._cmd_build_cmake()
533        elif build_type == "custom":
534            result += self._cmd_build_custom()
535        elif build_type == "make":
536            result += self._cmd_build_make()
537        elif build_type == "meson":
538            result += self._cmd_build_meson()
539        else:
540            raise NotImplementedError(
541                f"Unhandled build type for {self.package}: {build_type}"
542            )
543
544        # Handle 'custom_post_install' commands.
545        custom_post_install = self.pkg_def.get("custom_post_install")
546        if custom_post_install:
547            result += " && " + " && ".join(custom_post_install)
548
549        return result
550
551    def _cmd_build_autoconf(self) -> str:
552        options = " ".join(self.pkg_def.get("config_flags", []))
553        env = " ".join(self.pkg_def.get("config_env", []))
554        result = "./bootstrap.sh && "
555        result += f"{env} ./configure {configure_flags} {options} && "
556        result += f"make -j{proc_count} && make install"
557        return result
558
559    def _cmd_build_cmake(self) -> str:
560        options = " ".join(self.pkg_def.get("config_flags", []))
561        env = " ".join(self.pkg_def.get("config_env", []))
562        result = "mkdir builddir && cd builddir && "
563        result += f"{env} cmake {cmake_flags} {options} .. && "
564        result += "cmake --build . --target all && "
565        result += "cmake --build . --target install && "
566        result += "cd .."
567        return result
568
569    def _cmd_build_custom(self) -> str:
570        return " && ".join(self.pkg_def.get("build_steps", []))
571
572    def _cmd_build_make(self) -> str:
573        return f"make -j{proc_count} && make install"
574
575    def _cmd_build_meson(self) -> str:
576        options = " ".join(self.pkg_def.get("config_flags", []))
577        env = " ".join(self.pkg_def.get("config_env", []))
578        result = f"{env} meson builddir {meson_flags} {options} && "
579        result += "ninja -C builddir && ninja -C builddir install"
580        return result
581
582
583class Docker:
584    """Class to assist with Docker interactions.  All methods are static."""
585
586    @staticmethod
587    def timestamp() -> str:
588        """ Generate a timestamp for today using the ISO week. """
589        today = date.today().isocalendar()
590        return f"{today[0]}-W{today[1]:02}"
591
592    @staticmethod
593    def tagname(pkgname: str, dockerfile: str) -> str:
594        """ Generate a tag name for a package using a hash of the Dockerfile. """
595        result = docker_image_name
596        if pkgname:
597            result += "-" + pkgname
598
599        result += ":" + Docker.timestamp()
600        result += "-" + sha256(dockerfile.encode()).hexdigest()[0:16]
601
602        return result
603
604    @staticmethod
605    def build(pkg: str, tag: str, dockerfile: str) -> None:
606        """Build a docker image using the Dockerfile and tagging it with 'tag'."""
607
608        # If we're not forcing builds, check if it already exists and skip.
609        if not force_build:
610            if docker.image.ls(tag, "--format", '"{{.Repository}}:{{.Tag}}"'):
611                print(f"Image {tag} already exists.  Skipping.", file=sys.stderr)
612                return
613
614        # Build it.
615        #   Capture the output of the 'docker build' command and send it to
616        #   stderr (prefixed with the package name).  This allows us to see
617        #   progress but not polute stdout.  Later on we output the final
618        #   docker tag to stdout and we want to keep that pristine.
619        #
620        #   Other unusual flags:
621        #       --no-cache: Bypass the Docker cache if 'force_build'.
622        #       --force-rm: Clean up Docker processes if they fail.
623        docker.build(
624            proxy_args,
625            "--network=host",
626            "--force-rm",
627            "--no-cache=true" if force_build else "--no-cache=false",
628            "-t",
629            tag,
630            "-",
631            _in=dockerfile,
632            _out=(
633                lambda line: print(
634                    pkg + ":", line, end="", file=sys.stderr, flush=True
635                )
636            ),
637        )
638
639
640# Read a bunch of environment variables.
641docker_image_name = os.environ.get("DOCKER_IMAGE_NAME", "openbmc/ubuntu-unit-test")
642force_build = os.environ.get("FORCE_DOCKER_BUILD")
643is_automated_ci_build = os.environ.get("BUILD_URL", False)
644distro = os.environ.get("DISTRO", "ubuntu:impish")
645branch = os.environ.get("BRANCH", "master")
646ubuntu_mirror = os.environ.get("UBUNTU_MIRROR")
647http_proxy = os.environ.get("http_proxy")
648
649gerrit_project = os.environ.get("GERRIT_PROJECT")
650gerrit_rev = os.environ.get("GERRIT_PATCHSET_REVISION")
651
652# Set up some common variables.
653username = os.environ.get("USER", "root")
654homedir = os.environ.get("HOME", "/root")
655gid = os.getgid()
656uid = os.getuid()
657
658# Determine the architecture for Docker.
659arch = uname("-m").strip()
660if arch == "ppc64le":
661    docker_base = "ppc64le/"
662elif arch == "x86_64":
663    docker_base = ""
664else:
665    print(
666        f"Unsupported system architecture({arch}) found for docker image",
667        file=sys.stderr,
668    )
669    sys.exit(1)
670
671# Special flags if setting up a deb mirror.
672mirror = ""
673if "ubuntu" in distro and ubuntu_mirror:
674    mirror = f"""
675RUN echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME) main restricted universe multiverse" > /etc/apt/sources.list && \\
676    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-updates main restricted universe multiverse" >> /etc/apt/sources.list && \\
677    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-security main restricted universe multiverse" >> /etc/apt/sources.list && \\
678    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-proposed main restricted universe multiverse" >> /etc/apt/sources.list && \\
679    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-backports main restricted universe multiverse" >> /etc/apt/sources.list
680"""
681
682# Special flags for proxying.
683proxy_cmd = ""
684proxy_keyserver = ""
685proxy_args = []
686if http_proxy:
687    proxy_cmd = f"""
688RUN echo "[http]" >> {homedir}/.gitconfig && \
689    echo "proxy = {http_proxy}" >> {homedir}/.gitconfig
690"""
691    proxy_keyserver = f"--keyserver-options http-proxy={http_proxy}"
692
693    proxy_args.extend(
694        [
695            "--build-arg",
696            f"http_proxy={http_proxy}",
697            "--build-arg",
698            f"https_proxy={http_proxy}",
699        ]
700    )
701
702# Create base Dockerfile.
703dockerfile_base = f"""
704FROM {docker_base}{distro}
705
706{mirror}
707
708ENV DEBIAN_FRONTEND noninteractive
709
710ENV PYTHONPATH "/usr/local/lib/python3.8/site-packages/"
711
712# Sometimes the ubuntu key expires and we need a way to force an execution
713# of the apt-get commands for the dbgsym-keyring.  When this happens we see
714# an error like: "Release: The following signatures were invalid:"
715# Insert a bogus echo that we can change here when we get this error to force
716# the update.
717RUN echo "ubuntu keyserver rev as of 2021-04-21"
718
719# We need the keys to be imported for dbgsym repos
720# New releases have a package, older ones fall back to manual fetching
721# https://wiki.ubuntu.com/Debug%20Symbol%20Packages
722RUN apt-get update && apt-get dist-upgrade -yy && \
723    ( apt-get install gpgv ubuntu-dbgsym-keyring || \
724        ( apt-get install -yy dirmngr && \
725          apt-key adv --keyserver keyserver.ubuntu.com \
726                      {proxy_keyserver} \
727                      --recv-keys F2EDC64DC5AEE1F6B9C621F0C8CAB6595FDFF622 ) )
728
729# Parse the current repo list into a debug repo list
730RUN sed -n '/^deb /s,^deb [^ ]* ,deb http://ddebs.ubuntu.com ,p' /etc/apt/sources.list >/etc/apt/sources.list.d/debug.list
731
732# Remove non-existent debug repos
733RUN sed -i '/-\(backports\|security\) /d' /etc/apt/sources.list.d/debug.list
734
735RUN cat /etc/apt/sources.list.d/debug.list
736
737RUN apt-get update && apt-get dist-upgrade -yy && apt-get install -yy \
738    gcc-11 \
739    g++-11 \
740    libc6-dbg \
741    libc6-dev \
742    libtool \
743    bison \
744    libdbus-1-dev \
745    flex \
746    cmake \
747    python3 \
748    python3-dev\
749    python3-yaml \
750    python3-mako \
751    python3-pip \
752    python3-setuptools \
753    python3-git \
754    python3-socks \
755    pkg-config \
756    autoconf \
757    autoconf-archive \
758    libsystemd-dev \
759    systemd \
760    libssl-dev \
761    libevdev-dev \
762    libjpeg-dev \
763    libpng-dev \
764    ninja-build \
765    sudo \
766    curl \
767    git \
768    dbus \
769    iputils-ping \
770    clang-13 \
771    clang-format-13 \
772    clang-tidy-13 \
773    clang-tools-13 \
774    shellcheck \
775    npm \
776    iproute2 \
777    libnl-3-dev \
778    libnl-genl-3-dev \
779    libconfig++-dev \
780    libsnmp-dev \
781    valgrind \
782    valgrind-dbg \
783    libpam0g-dev \
784    xxd \
785    libi2c-dev \
786    wget \
787    libldap2-dev \
788    libprotobuf-dev \
789    liburing-dev \
790    liburing1-dbgsym \
791    libperlio-gzip-perl \
792    libjson-perl \
793    protobuf-compiler \
794    libgpiod-dev \
795    device-tree-compiler \
796    cppcheck \
797    libpciaccess-dev \
798    libmimetic-dev \
799    libxml2-utils \
800    libxml-simple-perl \
801    rsync
802
803RUN npm install -g eslint@latest eslint-plugin-json@latest
804
805RUN update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-11 11 \
806  --slave /usr/bin/g++ g++ /usr/bin/g++-11 \
807  --slave /usr/bin/gcov gcov /usr/bin/gcov-11 \
808  --slave /usr/bin/gcov-dump gcov-dump /usr/bin/gcov-dump-11 \
809  --slave /usr/bin/gcov-tool gcov-tool /usr/bin/gcov-tool-11
810
811RUN update-alternatives --install /usr/bin/clang clang /usr/bin/clang-13 1000 \
812  --slave /usr/bin/clang++ clang++ /usr/bin/clang++-13 \
813  --slave /usr/bin/clang-tidy clang-tidy /usr/bin/clang-tidy-13 \
814  --slave /usr/bin/clang-format clang-format /usr/bin/clang-format-13 \
815  --slave /usr/bin/run-clang-tidy run-clang-tidy.py /usr/bin/run-clang-tidy-13 \
816  --slave /usr/bin/scan-build scan-build /usr/bin/scan-build-13
817
818"""
819
820if is_automated_ci_build:
821    dockerfile_base += f"""
822# Run an arbitrary command to polute the docker cache regularly force us
823# to re-run `apt-get update` daily.
824RUN echo {Docker.timestamp()}
825RUN apt-get update && apt-get dist-upgrade -yy
826
827"""
828
829dockerfile_base += f"""
830RUN pip3 install inflection
831RUN pip3 install pycodestyle
832RUN pip3 install jsonschema
833RUN pip3 install meson==0.58.1
834RUN pip3 install protobuf
835RUN pip3 install codespell
836"""
837
838# Build the base and stage docker images.
839docker_base_img_name = Docker.tagname("base", dockerfile_base)
840Docker.build("base", docker_base_img_name, dockerfile_base)
841Package.generate_all()
842
843# Create the final Dockerfile.
844dockerfile = f"""
845# Build the final output image
846FROM {docker_base_img_name}
847{Package.df_all_copycmds()}
848
849# Some of our infrastructure still relies on the presence of this file
850# even though it is no longer needed to rebuild the docker environment
851# NOTE: The file is sorted to ensure the ordering is stable.
852RUN echo '{Package.depcache()}' > /tmp/depcache
853
854# Final configuration for the workspace
855RUN grep -q {gid} /etc/group || groupadd -g {gid} {username}
856RUN mkdir -p "{os.path.dirname(homedir)}"
857RUN grep -q {uid} /etc/passwd || useradd -d {homedir} -m -u {uid} -g {gid} {username}
858RUN sed -i '1iDefaults umask=000' /etc/sudoers
859RUN echo "{username} ALL=(ALL) NOPASSWD: ALL" >>/etc/sudoers
860
861# Ensure user has ability to write to /usr/local for different tool
862# and data installs
863RUN chown -R {username}:{username} /usr/local/share
864
865{proxy_cmd}
866
867RUN /bin/bash
868"""
869
870# Do the final docker build
871docker_final_img_name = Docker.tagname(None, dockerfile)
872Docker.build("final", docker_final_img_name, dockerfile)
873
874# Print the tag of the final image.
875print(docker_final_img_name)
876