xref: /openbmc/openbmc-build-scripts/scripts/build-unit-test-docker (revision ee3c9eeb293302b3245fc9ae7c56e00883a792d5)
1#!/usr/bin/env python3
2#
3# Build the required docker image to run package unit tests
4#
5# Script Variables:
6#   DOCKER_IMG_NAME:  <optional, the name of the docker image to generate>
7#                     default is openbmc/ubuntu-unit-test
8#   DISTRO:           <optional, the distro to build a docker image against>
9#                     default is ubuntu:focal
10#   FORCE_DOCKER_BUILD: <optional, a non-zero value with force all Docker
11#                     images to be rebuilt rather than reusing caches.>
12#   BUILD_URL:        <optional, used to detect running under CI context
13#                     (ex. Jenkins)>
14#   BRANCH:           <optional, branch to build from each of the openbmc/
15#                     repositories>
16#                     default is master, which will be used if input branch not
17#                     provided or not found
18#   UBUNTU_MIRROR:    <optional, the URL of a mirror of Ubuntu to override the
19#                     default ones in /etc/apt/sources.list>
20#                     default is empty, and no mirror is used.
21#   http_proxy        The HTTP address of the proxy server to connect to.
22#                     Default: "", proxy is not setup if this is not set
23
24import os
25import sys
26import threading
27from datetime import date
28from hashlib import sha256
29from sh import docker, git, nproc, uname  # type: ignore
30from typing import Any, Callable, Dict, Iterable, Optional
31
32try:
33    # Python before 3.8 doesn't have TypedDict, so reroute to standard 'dict'.
34    from typing import TypedDict
35except:
36
37    class TypedDict(dict):  # type: ignore
38        # We need to do this to eat the 'total' argument.
39        def __init_subclass__(cls, **kwargs):
40            super().__init_subclass__()
41
42
43# Declare some variables used in package definitions.
44prefix = "/usr/local"
45proc_count = nproc().strip()
46
47
48class PackageDef(TypedDict, total=False):
49    """ Package Definition for packages dictionary. """
50
51    # rev [optional]: Revision of package to use.
52    rev: str
53    # url [optional]: lambda function to create URL: (package, rev) -> url.
54    url: Callable[[str, str], str]
55    # depends [optional]: List of package dependencies.
56    depends: Iterable[str]
57    # build_type [required]: Build type used for package.
58    #   Currently supported: autoconf, cmake, custom, make, meson
59    build_type: str
60    # build_steps [optional]: Steps to run for 'custom' build_type.
61    build_steps: Iterable[str]
62    # config_flags [optional]: List of options to pass configuration tool.
63    config_flags: Iterable[str]
64    # config_env [optional]: List of environment variables to set for config.
65    config_env: Iterable[str]
66    # custom_post_dl [optional]: List of steps to run after download, but
67    #   before config / build / install.
68    custom_post_dl: Iterable[str]
69
70    # __tag [private]: Generated Docker tag name for package stage.
71    __tag: str
72    # __package [private]: Package object associated with this package.
73    __package: Any  # Type is Package, but not defined yet.
74
75
76# Packages to include in image.
77packages = {
78    "boost": PackageDef(
79        rev="1.75.0",
80        url=(
81            lambda pkg, rev: f"https://dl.bintray.com/boostorg/release/{rev}/source/{pkg}_{rev.replace('.', '_')}.tar.bz2"
82        ),
83        build_type="custom",
84        build_steps=[
85            f"./bootstrap.sh --prefix={prefix} --with-libraries=context,coroutine",
86            "./b2",
87            f"./b2 install --prefix={prefix}",
88        ],
89    ),
90    "USCiLab/cereal": PackageDef(
91        rev="v1.3.0",
92        build_type="custom",
93        build_steps=[f"cp -a include/cereal/ {prefix}/include/"],
94    ),
95    "catchorg/Catch2": PackageDef(
96        rev="v2.12.2",
97        build_type="cmake",
98        config_flags=["-DBUILD_TESTING=OFF", "-DCATCH_INSTALL_DOCS=OFF"],
99    ),
100    "CLIUtils/CLI11": PackageDef(
101        rev="v1.9.1",
102        build_type="cmake",
103        config_flags=[
104            "-DBUILD_TESTING=OFF",
105            "-DCLI11_BUILD_DOCS=OFF",
106            "-DCLI11_BUILD_EXAMPLES=OFF",
107        ],
108    ),
109    "fmtlib/fmt": PackageDef(
110        rev="7.1.3",
111        build_type="cmake",
112        config_flags=[
113            "-DFMT_DOC=OFF",
114            "-DFMT_TEST=OFF",
115        ],
116    ),
117    # Snapshot from 2020-01-03
118    "Naios/function2": PackageDef(
119        rev="3a0746bf5f601dfed05330aefcb6854354fce07d",
120        build_type="custom",
121        build_steps=[
122            f"mkdir {prefix}/include/function2",
123            f"cp include/function2/function2.hpp {prefix}/include/function2/",
124        ],
125    ),
126    "google/googletest": PackageDef(
127        rev="release-1.10.0",
128        build_type="cmake",
129        config_env=["CXXFLAGS=-std=c++17"],
130        config_flags=["-DTHREADS_PREFER_PTHREAD_FLAG=ON"],
131    ),
132    # Release 2020-08-06
133    "nlohmann/json": PackageDef(
134        rev="v3.9.1",
135        build_type="custom",
136        build_steps=[
137            f"mkdir {prefix}/include/nlohmann",
138            f"cp single_include/nlohmann/json.hpp {prefix}/include/nlohmann",
139            f"ln -s {prefix}/include/nlohmann/json.hpp {prefix}/include/json.hpp",
140        ],
141    ),
142    # Snapshot from 2019-05-24
143    "linux-test-project/lcov": PackageDef(
144        rev="v1.15",
145        build_type="make",
146    ),
147    # dev-5.8 2021-01-11
148    "openbmc/linux": PackageDef(
149        rev="3cc95ae40716e56f81b69615781f54c78079042d",
150        build_type="custom",
151        build_steps=[
152            f"make -j{proc_count} defconfig",
153            f"make INSTALL_HDR_PATH={prefix} headers_install",
154        ],
155    ),
156    # Snapshot from 2020-06-13
157    "LibVNC/libvncserver": PackageDef(
158        rev="LibVNCServer-0.9.13",
159        build_type="cmake",
160    ),
161    "martinmoene/span-lite": PackageDef(
162        rev="v0.8.1",
163        build_type="cmake",
164        config_flags=[
165            "-DSPAN_LITE_OPT_BUILD_TESTS=OFF",
166        ],
167    ),
168    # version from meta-openembedded/meta-oe/recipes-support/libtinyxml2/libtinyxml2_8.0.0.bb
169    "leethomason/tinyxml2": PackageDef(
170        rev="8.0.0",
171        build_type="cmake",
172    ),
173    # version from /meta-openembedded/meta-oe/recipes-devtools/boost-url/boost-url_git.bb
174    "CPPAlliance/url": PackageDef(
175        rev="a56ae0df6d3078319755fbaa67822b4fa7fd352b",
176        build_type="cmake",
177        config_flags=[
178            "-DBOOST_URL_BUILD_EXAMPLES=OFF",
179            "-DBOOST_URL_BUILD_TESTS=OFF",
180            "-DBOOST_URL_STANDALONE=ON",
181        ],
182    ),
183    # version from ./meta-openembedded/meta-oe/dynamic-layers/networking-layer/recipes-devtools/valijson/valijson_0.3.bb
184    # Snapshot from 2020-12-02 - fix for curlpp dependency
185    "tristanpenman/valijson": PackageDef(
186        rev="8cc83c8be9c1c927f5da952b2333b30e5f0353be",
187        build_type="cmake",
188        config_flags=[
189            "-Dvalijson_BUILD_TESTS=0",
190            "-Dvalijson_INSTALL_HEADERS=1",
191        ],
192    ),
193    # version from meta-openembedded/meta-oe/recipes-devtools/nlohmann-fifo/nlohmann-fifo_git.bb
194    "nlohmann/fifo_map": PackageDef(
195        rev="0dfbf5dacbb15a32c43f912a7e66a54aae39d0f9",
196        build_type="custom",
197        build_steps=[f"cp src/fifo_map.hpp {prefix}/include/"],
198    ),
199    "open-power/pdbg": PackageDef(build_type="autoconf"),
200    "openbmc/gpioplus": PackageDef(
201        depends=["openbmc/stdplus"],
202        build_type="meson",
203        config_flags=[
204            "-Dexamples=false",
205            "-Dtests=disabled",
206        ],
207    ),
208    "openbmc/phosphor-dbus-interfaces": PackageDef(
209        depends=["openbmc/sdbusplus"],
210        build_type="meson",
211        config_flags=[
212            "-Ddata_com_ibm=true",
213            "-Ddata_org_open_power=true",
214        ],
215    ),
216    "openbmc/phosphor-logging": PackageDef(
217        depends=[
218            "USCiLab/cereal",
219            "nlohmann/fifo_map",
220            "openbmc/phosphor-dbus-interfaces",
221            "openbmc/sdbusplus",
222            "openbmc/sdeventplus",
223        ],
224        build_type="autoconf",
225        config_flags=[
226            "--enable-metadata-processing",
227            f"YAML_DIR={prefix}/share/phosphor-dbus-yaml/yaml",
228        ],
229    ),
230    "openbmc/phosphor-objmgr": PackageDef(
231        depends=[
232            "boost",
233            "leethomason/tinyxml2",
234            "openbmc/phosphor-logging",
235            "openbmc/sdbusplus",
236        ],
237        build_type="autoconf",
238    ),
239    "openbmc/pldm": PackageDef(
240        depends=[
241            "CLIUtils/CLI11",
242            "boost",
243            "nlohmann/json",
244            "openbmc/phosphor-dbus-interfaces",
245            "openbmc/phosphor-logging",
246            "openbmc/sdbusplus",
247            "openbmc/sdeventplus",
248        ],
249        build_type="meson",
250        config_flags=[
251            "-Dlibpldm-only=enabled",
252            "-Doem-ibm=enabled",
253            "-Dtests=disabled",
254        ],
255    ),
256    "openbmc/sdbusplus": PackageDef(
257        build_type="meson",
258        custom_post_dl=[
259            "cd tools",
260            f"./setup.py install --root=/ --prefix={prefix}",
261            "cd ..",
262        ],
263        config_flags=[
264            "-Dexamples=disabled",
265            "-Dtests=disabled",
266        ],
267    ),
268    "openbmc/sdeventplus": PackageDef(
269        depends=["Naios/function2", "openbmc/stdplus"],
270        build_type="meson",
271        config_flags=[
272            "-Dexamples=false",
273            "-Dtests=disabled",
274        ],
275    ),
276    "openbmc/stdplus": PackageDef(
277        depends=["fmtlib/fmt", "martinmoene/span-lite"],
278        build_type="meson",
279        config_flags=[
280            "-Dexamples=false",
281            "-Dtests=disabled",
282        ],
283    ),
284}  # type: Dict[str, PackageDef]
285
286# Define common flags used for builds
287configure_flags = " ".join(
288    [
289        f"--prefix={prefix}",
290    ]
291)
292cmake_flags = " ".join(
293    [
294        "-DBUILD_SHARED_LIBS=ON",
295        "-DCMAKE_BUILD_TYPE=RelWithDebInfo",
296        f"-DCMAKE_INSTALL_PREFIX:PATH={prefix}",
297        "-GNinja",
298        "-DCMAKE_MAKE_PROGRAM=ninja",
299    ]
300)
301meson_flags = " ".join(
302    [
303        "--wrap-mode=nodownload",
304        f"-Dprefix={prefix}",
305    ]
306)
307
308
309class Package(threading.Thread):
310    """Class used to build the Docker stages for each package.
311
312    Generally, this class should not be instantiated directly but through
313    Package.generate_all().
314    """
315
316    # Copy the packages dictionary.
317    packages = packages.copy()
318
319    # Lock used for thread-safety.
320    lock = threading.Lock()
321
322    def __init__(self, pkg: str):
323        """ pkg - The name of this package (ex. foo/bar ) """
324        super(Package, self).__init__()
325
326        self.package = pkg
327        self.exception = None  # type: Optional[Exception]
328
329        # Reference to this package's
330        self.pkg_def = Package.packages[pkg]
331        self.pkg_def["__package"] = self
332
333    def run(self) -> None:
334        """ Thread 'run' function.  Builds the Docker stage. """
335
336        # In case this package has no rev, fetch it from Github.
337        self._update_rev()
338
339        # Find all the Package objects that this package depends on.
340        #   This section is locked because we are looking into another
341        #   package's PackageDef dict, which could be being modified.
342        Package.lock.acquire()
343        deps: Iterable[Package] = [
344            Package.packages[deppkg]["__package"]
345            for deppkg in self.pkg_def.get("depends", [])
346        ]
347        Package.lock.release()
348
349        # Wait until all the depends finish building.  We need them complete
350        # for the "COPY" commands.
351        for deppkg in deps:
352            deppkg.join()
353
354        # Generate this package's Dockerfile.
355        dockerfile = f"""
356FROM {docker_base_img_name}
357{self._df_copycmds()}
358{self._df_build()}
359"""
360
361        # Generate the resulting tag name and save it to the PackageDef.
362        #   This section is locked because we are modifying the PackageDef,
363        #   which can be accessed by other threads.
364        Package.lock.acquire()
365        tag = Docker.tagname(self._stagename(), dockerfile)
366        self.pkg_def["__tag"] = tag
367        Package.lock.release()
368
369        # Do the build / save any exceptions.
370        try:
371            Docker.build(self.package, tag, dockerfile)
372        except Exception as e:
373            self.exception = e
374
375    @classmethod
376    def generate_all(cls) -> None:
377        """Ensure a Docker stage is created for all defined packages.
378
379        These are done in parallel but with appropriate blocking per
380        package 'depends' specifications.
381        """
382
383        # Create a Package for each defined package.
384        pkg_threads = [Package(p) for p in cls.packages.keys()]
385
386        # Start building them all.
387        for t in pkg_threads:
388            t.start()
389
390        # Wait for completion.
391        for t in pkg_threads:
392            t.join()
393            # Check if the thread saved off its own exception.
394            if t.exception:
395                print(f"Package {t.package} failed!", file=sys.stderr)
396                raise t.exception
397
398    @staticmethod
399    def df_all_copycmds() -> str:
400        """Formulate the Dockerfile snippet necessary to copy all packages
401        into the final image.
402        """
403        return Package.df_copycmds_set(Package.packages.keys())
404
405    @classmethod
406    def depcache(cls) -> str:
407        """Create the contents of the '/tmp/depcache'.
408        This file is a comma-separated list of "<pkg>:<rev>".
409        """
410
411        # This needs to be sorted for consistency.
412        depcache = ""
413        for pkg in sorted(cls.packages.keys()):
414            depcache += "%s:%s," % (pkg, cls.packages[pkg]["rev"])
415        return depcache
416
417    def _update_rev(self) -> None:
418        """ Look up the HEAD for missing a static rev. """
419
420        if "rev" in self.pkg_def:
421            return
422
423        # Ask Github for all the branches.
424        lookup = git("ls-remote", "--heads", f"https://github.com/{self.package}")
425
426        # Find the branch matching {branch} (or fallback to master).
427        #   This section is locked because we are modifying the PackageDef.
428        Package.lock.acquire()
429        for line in lookup.split("\n"):
430            if f"refs/heads/{branch}" in line:
431                self.pkg_def["rev"] = line.split()[0]
432            elif f"refs/heads/master" in line and "rev" not in self.pkg_def:
433                self.pkg_def["rev"] = line.split()[0]
434        Package.lock.release()
435
436    def _stagename(self) -> str:
437        """ Create a name for the Docker stage associated with this pkg. """
438        return self.package.replace("/", "-").lower()
439
440    def _url(self) -> str:
441        """ Get the URL for this package. """
442        rev = self.pkg_def["rev"]
443
444        # If the lambda exists, call it.
445        if "url" in self.pkg_def:
446            return self.pkg_def["url"](self.package, rev)
447
448        # Default to the github archive URL.
449        return f"https://github.com/{self.package}/archive/{rev}.tar.gz"
450
451    def _cmd_download(self) -> str:
452        """Formulate the command necessary to download and unpack to source."""
453
454        url = self._url()
455        if ".tar." not in url:
456            raise NotImplementedError(
457                f"Unhandled download type for {self.package}: {url}"
458            )
459
460        cmd = f"curl -L {url} | tar -x"
461
462        if url.endswith(".bz2"):
463            cmd += "j"
464        elif url.endswith(".gz"):
465            cmd += "z"
466        else:
467            raise NotImplementedError(
468                f"Unknown tar flags needed for {self.package}: {url}"
469            )
470
471        return cmd
472
473    def _cmd_cd_srcdir(self) -> str:
474        """ Formulate the command necessary to 'cd' into the source dir. """
475        return f"cd {self.package.split('/')[-1]}*"
476
477    def _df_copycmds(self) -> str:
478        """ Formulate the dockerfile snippet necessary to COPY all depends. """
479
480        if "depends" not in self.pkg_def:
481            return ""
482        return Package.df_copycmds_set(self.pkg_def["depends"])
483
484    @staticmethod
485    def df_copycmds_set(pkgs: Iterable[str]) -> str:
486        """Formulate the Dockerfile snippet necessary to COPY a set of
487        packages into a Docker stage.
488        """
489
490        copy_cmds = ""
491
492        # Sort the packages for consistency.
493        for p in sorted(pkgs):
494            tag = Package.packages[p]["__tag"]
495            copy_cmds += f"COPY --from={tag} {prefix} {prefix}\n"
496            # Workaround for upstream docker bug and multiple COPY cmds
497            # https://github.com/moby/moby/issues/37965
498            copy_cmds += "RUN true\n"
499
500        return copy_cmds
501
502    def _df_build(self) -> str:
503        """Formulate the Dockerfile snippet necessary to download, build, and
504        install a package into a Docker stage.
505        """
506
507        # Download and extract source.
508        result = f"RUN {self._cmd_download()} && {self._cmd_cd_srcdir()} && "
509
510        # Handle 'custom_post_dl' commands.
511        custom_post_dl = self.pkg_def.get("custom_post_dl")
512        if custom_post_dl:
513            result += " && ".join(custom_post_dl) + " && "
514
515        # Build and install package based on 'build_type'.
516        build_type = self.pkg_def["build_type"]
517        if build_type == "autoconf":
518            result += self._cmd_build_autoconf()
519        elif build_type == "cmake":
520            result += self._cmd_build_cmake()
521        elif build_type == "custom":
522            result += self._cmd_build_custom()
523        elif build_type == "make":
524            result += self._cmd_build_make()
525        elif build_type == "meson":
526            result += self._cmd_build_meson()
527        else:
528            raise NotImplementedError(
529                f"Unhandled build type for {self.package}: {build_type}"
530            )
531
532        return result
533
534    def _cmd_build_autoconf(self) -> str:
535        options = " ".join(self.pkg_def.get("config_flags", []))
536        env = " ".join(self.pkg_def.get("config_env", []))
537        result = "./bootstrap.sh && "
538        result += f"{env} ./configure {configure_flags} {options} && "
539        result += f"make -j{proc_count} && make install"
540        return result
541
542    def _cmd_build_cmake(self) -> str:
543        options = " ".join(self.pkg_def.get("config_flags", []))
544        env = " ".join(self.pkg_def.get("config_env", []))
545        result = "mkdir builddir && cd builddir && "
546        result += f"{env} cmake {cmake_flags} {options} .. && "
547        result += "cmake --build . --target all && "
548        result += "cmake --build . --target install && "
549        result += "cd .."
550        return result
551
552    def _cmd_build_custom(self) -> str:
553        return " && ".join(self.pkg_def.get("build_steps", []))
554
555    def _cmd_build_make(self) -> str:
556        return f"make -j{proc_count} && make install"
557
558    def _cmd_build_meson(self) -> str:
559        options = " ".join(self.pkg_def.get("config_flags", []))
560        env = " ".join(self.pkg_def.get("config_env", []))
561        result = f"{env} meson builddir {meson_flags} {options} && "
562        result += "ninja -C builddir && ninja -C builddir install"
563        return result
564
565
566class Docker:
567    """Class to assist with Docker interactions.  All methods are static."""
568
569    @staticmethod
570    def timestamp() -> str:
571        """ Generate a timestamp for today using the ISO week. """
572        today = date.today().isocalendar()
573        return f"{today[0]}-W{today[1]:02}"
574
575    @staticmethod
576    def tagname(pkgname: str, dockerfile: str) -> str:
577        """ Generate a tag name for a package using a hash of the Dockerfile. """
578        result = docker_image_name
579        if pkgname:
580            result += "-" + pkgname
581
582        result += ":" + Docker.timestamp()
583        result += "-" + sha256(dockerfile.encode()).hexdigest()[0:16]
584
585        return result
586
587    @staticmethod
588    def build(pkg: str, tag: str, dockerfile: str) -> None:
589        """Build a docker image using the Dockerfile and tagging it with 'tag'."""
590
591        # If we're not forcing builds, check if it already exists and skip.
592        if not force_build:
593            if docker.image.ls(tag, "--format", '"{{.Repository}}:{{.Tag}}"'):
594                print(f"Image {tag} already exists.  Skipping.", file=sys.stderr)
595                return
596
597        # Build it.
598        #   Capture the output of the 'docker build' command and send it to
599        #   stderr (prefixed with the package name).  This allows us to see
600        #   progress but not polute stdout.  Later on we output the final
601        #   docker tag to stdout and we want to keep that pristine.
602        #
603        #   Other unusual flags:
604        #       --no-cache: Bypass the Docker cache if 'force_build'.
605        #       --force-rm: Clean up Docker processes if they fail.
606        docker.build(
607            proxy_args,
608            "--network=host",
609            "--force-rm",
610            "--no-cache=true" if force_build else "--no-cache=false",
611            "-t",
612            tag,
613            "-",
614            _in=dockerfile,
615            _out=(
616                lambda line: print(
617                    pkg + ":", line, end="", file=sys.stderr, flush=True
618                )
619            ),
620        )
621
622
623# Read a bunch of environment variables.
624docker_image_name = os.environ.get("DOCKER_IMAGE_NAME", "openbmc/ubuntu-unit-test")
625force_build = os.environ.get("FORCE_DOCKER_BUILD")
626is_automated_ci_build = os.environ.get("BUILD_URL", False)
627distro = os.environ.get("DISTRO", "ubuntu:focal")
628branch = os.environ.get("BRANCH", "master")
629ubuntu_mirror = os.environ.get("UBUNTU_MIRROR")
630http_proxy = os.environ.get("http_proxy")
631
632# Set up some common variables.
633username = os.environ.get("USER", "root")
634homedir = os.environ.get("HOME", "/root")
635gid = os.getgid()
636uid = os.getuid()
637
638# Determine the architecture for Docker.
639arch = uname("-m").strip()
640if arch == "ppc64le":
641    docker_base = "ppc64le/"
642elif arch == "x86_64":
643    docker_base = ""
644else:
645    print(
646        f"Unsupported system architecture({arch}) found for docker image",
647        file=sys.stderr,
648    )
649    sys.exit(1)
650
651# Special flags if setting up a deb mirror.
652mirror = ""
653if "ubuntu" in distro and ubuntu_mirror:
654    mirror = f"""
655RUN echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME) main restricted universe multiverse" > /etc/apt/sources.list && \\
656    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-updates main restricted universe multiverse" >> /etc/apt/sources.list && \\
657    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-security main restricted universe multiverse" >> /etc/apt/sources.list && \\
658    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-proposed main restricted universe multiverse" >> /etc/apt/sources.list && \\
659    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-backports main restricted universe multiverse" >> /etc/apt/sources.list
660"""
661
662# Special flags for proxying.
663proxy_cmd = ""
664proxy_args = []
665if http_proxy:
666    proxy_cmd = f"""
667RUN echo "[http]" >> {homedir}/.gitconfig && \
668    echo "proxy = {http_proxy}" >> {homedir}/.gitconfig
669"""
670    proxy_args.extend(
671        [
672            "--build-arg",
673            f"http_proxy={http_proxy}",
674            "--build-arg",
675            "https_proxy={https_proxy}",
676        ]
677    )
678
679# Create base Dockerfile.
680dockerfile_base = f"""
681FROM {docker_base}{distro}
682
683{mirror}
684
685ENV DEBIAN_FRONTEND noninteractive
686
687ENV PYTHONPATH "/usr/local/lib/python3.8/site-packages/"
688
689# We need the keys to be imported for dbgsym repos
690# New releases have a package, older ones fall back to manual fetching
691# https://wiki.ubuntu.com/Debug%20Symbol%20Packages
692RUN apt-get update && apt-get dist-upgrade -yy && \
693    ( apt-get install ubuntu-dbgsym-keyring || \
694        ( apt-get install -yy dirmngr && \
695          apt-key adv --keyserver keyserver.ubuntu.com \
696                      --recv-keys F2EDC64DC5AEE1F6B9C621F0C8CAB6595FDFF622 ) )
697
698# Parse the current repo list into a debug repo list
699RUN sed -n '/^deb /s,^deb [^ ]* ,deb http://ddebs.ubuntu.com ,p' /etc/apt/sources.list >/etc/apt/sources.list.d/debug.list
700
701# Remove non-existent debug repos
702RUN sed -i '/-\(backports\|security\) /d' /etc/apt/sources.list.d/debug.list
703
704RUN cat /etc/apt/sources.list.d/debug.list
705
706RUN apt-get update && apt-get dist-upgrade -yy && apt-get install -yy \
707    gcc-10 \
708    g++-10 \
709    libc6-dbg \
710    libc6-dev \
711    libtool \
712    bison \
713    libdbus-1-dev \
714    flex \
715    cmake \
716    python3 \
717    python3-dev\
718    python3-yaml \
719    python3-mako \
720    python3-pip \
721    python3-setuptools \
722    python3-git \
723    python3-socks \
724    pkg-config \
725    autoconf \
726    autoconf-archive \
727    libsystemd-dev \
728    systemd \
729    libssl-dev \
730    libevdev-dev \
731    libevdev2-dbgsym \
732    libjpeg-dev \
733    libpng-dev \
734    ninja-build \
735    sudo \
736    curl \
737    git \
738    dbus \
739    iputils-ping \
740    clang-10 \
741    clang-format-10 \
742    clang-tidy-10 \
743    clang-tools-10 \
744    shellcheck \
745    npm \
746    iproute2 \
747    libnl-3-dev \
748    libnl-genl-3-dev \
749    libconfig++-dev \
750    libsnmp-dev \
751    valgrind \
752    valgrind-dbg \
753    libpam0g-dev \
754    xxd \
755    libi2c-dev \
756    wget \
757    libldap2-dev \
758    libprotobuf-dev \
759    libperlio-gzip-perl \
760    libjson-perl \
761    protobuf-compiler \
762    libgpiod-dev \
763    device-tree-compiler \
764    cppcheck \
765    libpciaccess-dev \
766    libmimetic-dev \
767    libxml2-utils \
768    libxml-simple-perl \
769    rsync
770
771RUN update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-10 1000 \
772  --slave /usr/bin/g++ g++ /usr/bin/g++-10 \
773  --slave /usr/bin/gcov gcov /usr/bin/gcov-10 \
774  --slave /usr/bin/gcov-dump gcov-dump /usr/bin/gcov-dump-10 \
775  --slave /usr/bin/gcov-tool gcov-tool /usr/bin/gcov-tool-10
776
777RUN update-alternatives --install /usr/bin/clang clang /usr/bin/clang-10 1000 \
778  --slave /usr/bin/clang++ clang++ /usr/bin/clang++-10 \
779  --slave /usr/bin/clang-tidy clang-tidy /usr/bin/clang-tidy-10 \
780  --slave /usr/bin/clang-format clang-format /usr/bin/clang-format-10 \
781  --slave /usr/bin/run-clang-tidy.py run-clang-tidy.py /usr/bin/run-clang-tidy-10.py
782
783"""
784
785if is_automated_ci_build:
786    dockerfile_base += f"""
787# Run an arbitrary command to polute the docker cache regularly force us
788# to re-run `apt-get update` daily.
789RUN echo {Docker.timestamp()}
790RUN apt-get update && apt-get dist-upgrade -yy
791
792"""
793
794dockerfile_base += f"""
795RUN pip3 install inflection
796RUN pip3 install pycodestyle
797RUN pip3 install jsonschema
798RUN pip3 install meson==0.54.3
799RUN pip3 install protobuf
800"""
801
802# Build the base and stage docker images.
803docker_base_img_name = Docker.tagname("base", dockerfile_base)
804Docker.build("base", docker_base_img_name, dockerfile_base)
805Package.generate_all()
806
807# Create the final Dockerfile.
808dockerfile = f"""
809# Build the final output image
810FROM {docker_base_img_name}
811{Package.df_all_copycmds()}
812
813# Some of our infrastructure still relies on the presence of this file
814# even though it is no longer needed to rebuild the docker environment
815# NOTE: The file is sorted to ensure the ordering is stable.
816RUN echo '{Package.depcache()}' > /tmp/depcache
817
818# Final configuration for the workspace
819RUN grep -q {gid} /etc/group || groupadd -g {gid} {username}
820RUN mkdir -p "{os.path.dirname(homedir)}"
821RUN grep -q {uid} /etc/passwd || useradd -d {homedir} -m -u {uid} -g {gid} {username}
822RUN sed -i '1iDefaults umask=000' /etc/sudoers
823RUN echo "{username} ALL=(ALL) NOPASSWD: ALL" >>/etc/sudoers
824
825{proxy_cmd}
826
827RUN /bin/bash
828"""
829
830# Do the final docker build
831docker_final_img_name = Docker.tagname(None, dockerfile)
832Docker.build("final", docker_final_img_name, dockerfile)
833
834# Print the tag of the final image.
835print(docker_final_img_name)
836