1#!/usr/bin/env python3
2#
3# Build the required docker image to run package unit tests
4#
5# Script Variables:
6#   DOCKER_IMG_NAME:  <optional, the name of the docker image to generate>
7#                     default is openbmc/ubuntu-unit-test
8#   DISTRO:           <optional, the distro to build a docker image against>
9#                     default is ubuntu:focal
10#   FORCE_DOCKER_BUILD: <optional, a non-zero value with force all Docker
11#                     images to be rebuilt rather than reusing caches.>
12#   BUILD_URL:        <optional, used to detect running under CI context
13#                     (ex. Jenkins)>
14#   BRANCH:           <optional, branch to build from each of the openbmc/
15#                     repositories>
16#                     default is master, which will be used if input branch not
17#                     provided or not found
18#   UBUNTU_MIRROR:    <optional, the URL of a mirror of Ubuntu to override the
19#                     default ones in /etc/apt/sources.list>
20#                     default is empty, and no mirror is used.
21#   http_proxy        The HTTP address of the proxy server to connect to.
22#                     Default: "", proxy is not setup if this is not set
23
24import os
25import sys
26import threading
27from datetime import date
28from hashlib import sha256
29from sh import docker, git, nproc, uname  # type: ignore
30from typing import Any, Callable, Dict, Iterable, Optional
31
32try:
33    # Python before 3.8 doesn't have TypedDict, so reroute to standard 'dict'.
34    from typing import TypedDict
35except:
36
37    class TypedDict(dict):  # type: ignore
38        # We need to do this to eat the 'total' argument.
39        def __init_subclass__(cls, **kwargs):
40            super().__init_subclass__()
41
42
43# Declare some variables used in package definitions.
44prefix = "/usr/local"
45proc_count = nproc().strip()
46
47
48class PackageDef(TypedDict, total=False):
49    """ Package Definition for packages dictionary. """
50
51    # rev [optional]: Revision of package to use.
52    rev: str
53    # url [optional]: lambda function to create URL: (package, rev) -> url.
54    url: Callable[[str, str], str]
55    # depends [optional]: List of package dependencies.
56    depends: Iterable[str]
57    # build_type [required]: Build type used for package.
58    #   Currently supported: autoconf, cmake, custom, make, meson
59    build_type: str
60    # build_steps [optional]: Steps to run for 'custom' build_type.
61    build_steps: Iterable[str]
62    # config_flags [optional]: List of options to pass configuration tool.
63    config_flags: Iterable[str]
64    # config_env [optional]: List of environment variables to set for config.
65    config_env: Iterable[str]
66    # custom_post_dl [optional]: List of steps to run after download, but
67    #   before config / build / install.
68    custom_post_dl: Iterable[str]
69    # custom_post_install [optional]: List of steps to run after install.
70    custom_post_install: Iterable[str]
71
72    # __tag [private]: Generated Docker tag name for package stage.
73    __tag: str
74    # __package [private]: Package object associated with this package.
75    __package: Any  # Type is Package, but not defined yet.
76
77
78# Packages to include in image.
79packages = {
80    "boost": PackageDef(
81        rev="1.75.0",
82        url=(
83            lambda pkg, rev: f"https://dl.bintray.com/boostorg/release/{rev}/source/{pkg}_{rev.replace('.', '_')}.tar.bz2"
84        ),
85        build_type="custom",
86        build_steps=[
87            f"./bootstrap.sh --prefix={prefix} --with-libraries=context,coroutine",
88            "./b2",
89            f"./b2 install --prefix={prefix}",
90        ],
91    ),
92    "USCiLab/cereal": PackageDef(
93        rev="v1.3.0",
94        build_type="custom",
95        build_steps=[f"cp -a include/cereal/ {prefix}/include/"],
96    ),
97    "catchorg/Catch2": PackageDef(
98        rev="v2.12.2",
99        build_type="cmake",
100        config_flags=["-DBUILD_TESTING=OFF", "-DCATCH_INSTALL_DOCS=OFF"],
101    ),
102    "CLIUtils/CLI11": PackageDef(
103        rev="v1.9.1",
104        build_type="cmake",
105        config_flags=[
106            "-DBUILD_TESTING=OFF",
107            "-DCLI11_BUILD_DOCS=OFF",
108            "-DCLI11_BUILD_EXAMPLES=OFF",
109        ],
110    ),
111    "fmtlib/fmt": PackageDef(
112        rev="7.1.3",
113        build_type="cmake",
114        config_flags=[
115            "-DFMT_DOC=OFF",
116            "-DFMT_TEST=OFF",
117        ],
118    ),
119    # Snapshot from 2020-01-03
120    "Naios/function2": PackageDef(
121        rev="3a0746bf5f601dfed05330aefcb6854354fce07d",
122        build_type="custom",
123        build_steps=[
124            f"mkdir {prefix}/include/function2",
125            f"cp include/function2/function2.hpp {prefix}/include/function2/",
126        ],
127    ),
128    "google/googletest": PackageDef(
129        rev="release-1.10.0",
130        build_type="cmake",
131        config_env=["CXXFLAGS=-std=c++17"],
132        config_flags=["-DTHREADS_PREFER_PTHREAD_FLAG=ON"],
133    ),
134    # Release 2020-08-06
135    "nlohmann/json": PackageDef(
136        rev="v3.9.1",
137        build_type="cmake",
138        config_flags=["-DJSON_BuildTests=OFF"],
139        custom_post_install=[
140            f"ln -s {prefix}/include/nlohmann/json.hpp {prefix}/include/json.hpp",
141        ],
142    ),
143    # Snapshot from 2019-05-24
144    "linux-test-project/lcov": PackageDef(
145        rev="v1.15",
146        build_type="make",
147    ),
148    # dev-5.8 2021-01-11
149    "openbmc/linux": PackageDef(
150        rev="3cc95ae40716e56f81b69615781f54c78079042d",
151        build_type="custom",
152        build_steps=[
153            f"make -j{proc_count} defconfig",
154            f"make INSTALL_HDR_PATH={prefix} headers_install",
155        ],
156    ),
157    # Snapshot from 2020-06-13
158    "LibVNC/libvncserver": PackageDef(
159        rev="LibVNCServer-0.9.13",
160        build_type="cmake",
161    ),
162    "martinmoene/span-lite": PackageDef(
163        rev="v0.8.1",
164        build_type="cmake",
165        config_flags=[
166            "-DSPAN_LITE_OPT_BUILD_TESTS=OFF",
167        ],
168    ),
169    # version from meta-openembedded/meta-oe/recipes-support/libtinyxml2/libtinyxml2_8.0.0.bb
170    "leethomason/tinyxml2": PackageDef(
171        rev="8.0.0",
172        build_type="cmake",
173    ),
174    # version from /meta-openembedded/meta-oe/recipes-devtools/boost-url/boost-url_git.bb
175    "CPPAlliance/url": PackageDef(
176        rev="a56ae0df6d3078319755fbaa67822b4fa7fd352b",
177        build_type="cmake",
178        config_flags=[
179            "-DBOOST_URL_BUILD_EXAMPLES=OFF",
180            "-DBOOST_URL_BUILD_TESTS=OFF",
181            "-DBOOST_URL_STANDALONE=ON",
182        ],
183    ),
184    # version from ./meta-openembedded/meta-oe/dynamic-layers/networking-layer/recipes-devtools/valijson/valijson_0.3.bb
185    # Snapshot from 2020-12-02 - fix for curlpp dependency
186    "tristanpenman/valijson": PackageDef(
187        rev="8cc83c8be9c1c927f5da952b2333b30e5f0353be",
188        build_type="cmake",
189        config_flags=[
190            "-Dvalijson_BUILD_TESTS=0",
191            "-Dvalijson_INSTALL_HEADERS=1",
192        ],
193    ),
194    # version from meta-openembedded/meta-oe/recipes-devtools/nlohmann-fifo/nlohmann-fifo_git.bb
195    "nlohmann/fifo_map": PackageDef(
196        rev="0dfbf5dacbb15a32c43f912a7e66a54aae39d0f9",
197        build_type="custom",
198        build_steps=[f"cp src/fifo_map.hpp {prefix}/include/"],
199    ),
200    "open-power/pdbg": PackageDef(build_type="autoconf"),
201    "openbmc/gpioplus": PackageDef(
202        depends=["openbmc/stdplus"],
203        build_type="meson",
204        config_flags=[
205            "-Dexamples=false",
206            "-Dtests=disabled",
207        ],
208    ),
209    "openbmc/phosphor-dbus-interfaces": PackageDef(
210        depends=["openbmc/sdbusplus"],
211        build_type="meson",
212        config_flags=[
213            "-Ddata_com_ibm=true",
214            "-Ddata_org_open_power=true",
215        ],
216    ),
217    "openbmc/phosphor-logging": PackageDef(
218        depends=[
219            "USCiLab/cereal",
220            "nlohmann/fifo_map",
221            "openbmc/phosphor-dbus-interfaces",
222            "openbmc/sdbusplus",
223            "openbmc/sdeventplus",
224        ],
225        build_type="autoconf",
226        config_flags=[
227            "--enable-metadata-processing",
228            f"YAML_DIR={prefix}/share/phosphor-dbus-yaml/yaml",
229        ],
230    ),
231    "openbmc/phosphor-objmgr": PackageDef(
232        depends=[
233            "boost",
234            "leethomason/tinyxml2",
235            "openbmc/phosphor-logging",
236            "openbmc/sdbusplus",
237        ],
238        build_type="autoconf",
239    ),
240    "openbmc/pldm": PackageDef(
241        depends=[
242            "CLIUtils/CLI11",
243            "boost",
244            "nlohmann/json",
245            "openbmc/phosphor-dbus-interfaces",
246            "openbmc/phosphor-logging",
247            "openbmc/sdbusplus",
248            "openbmc/sdeventplus",
249        ],
250        build_type="meson",
251        config_flags=[
252            "-Dlibpldm-only=enabled",
253            "-Doem-ibm=enabled",
254            "-Dtests=disabled",
255        ],
256    ),
257    "openbmc/sdbusplus": PackageDef(
258        build_type="meson",
259        custom_post_dl=[
260            "cd tools",
261            f"./setup.py install --root=/ --prefix={prefix}",
262            "cd ..",
263        ],
264        config_flags=[
265            "-Dexamples=disabled",
266            "-Dtests=disabled",
267        ],
268    ),
269    "openbmc/sdeventplus": PackageDef(
270        depends=["Naios/function2", "openbmc/stdplus"],
271        build_type="meson",
272        config_flags=[
273            "-Dexamples=false",
274            "-Dtests=disabled",
275        ],
276    ),
277    "openbmc/stdplus": PackageDef(
278        depends=["fmtlib/fmt", "martinmoene/span-lite"],
279        build_type="meson",
280        config_flags=[
281            "-Dexamples=false",
282            "-Dtests=disabled",
283        ],
284    ),
285}  # type: Dict[str, PackageDef]
286
287# Define common flags used for builds
288configure_flags = " ".join(
289    [
290        f"--prefix={prefix}",
291    ]
292)
293cmake_flags = " ".join(
294    [
295        "-DBUILD_SHARED_LIBS=ON",
296        "-DCMAKE_BUILD_TYPE=RelWithDebInfo",
297        f"-DCMAKE_INSTALL_PREFIX:PATH={prefix}",
298        "-GNinja",
299        "-DCMAKE_MAKE_PROGRAM=ninja",
300    ]
301)
302meson_flags = " ".join(
303    [
304        "--wrap-mode=nodownload",
305        f"-Dprefix={prefix}",
306    ]
307)
308
309
310class Package(threading.Thread):
311    """Class used to build the Docker stages for each package.
312
313    Generally, this class should not be instantiated directly but through
314    Package.generate_all().
315    """
316
317    # Copy the packages dictionary.
318    packages = packages.copy()
319
320    # Lock used for thread-safety.
321    lock = threading.Lock()
322
323    def __init__(self, pkg: str):
324        """ pkg - The name of this package (ex. foo/bar ) """
325        super(Package, self).__init__()
326
327        self.package = pkg
328        self.exception = None  # type: Optional[Exception]
329
330        # Reference to this package's
331        self.pkg_def = Package.packages[pkg]
332        self.pkg_def["__package"] = self
333
334    def run(self) -> None:
335        """ Thread 'run' function.  Builds the Docker stage. """
336
337        # In case this package has no rev, fetch it from Github.
338        self._update_rev()
339
340        # Find all the Package objects that this package depends on.
341        #   This section is locked because we are looking into another
342        #   package's PackageDef dict, which could be being modified.
343        Package.lock.acquire()
344        deps: Iterable[Package] = [
345            Package.packages[deppkg]["__package"]
346            for deppkg in self.pkg_def.get("depends", [])
347        ]
348        Package.lock.release()
349
350        # Wait until all the depends finish building.  We need them complete
351        # for the "COPY" commands.
352        for deppkg in deps:
353            deppkg.join()
354
355        # Generate this package's Dockerfile.
356        dockerfile = f"""
357FROM {docker_base_img_name}
358{self._df_copycmds()}
359{self._df_build()}
360"""
361
362        # Generate the resulting tag name and save it to the PackageDef.
363        #   This section is locked because we are modifying the PackageDef,
364        #   which can be accessed by other threads.
365        Package.lock.acquire()
366        tag = Docker.tagname(self._stagename(), dockerfile)
367        self.pkg_def["__tag"] = tag
368        Package.lock.release()
369
370        # Do the build / save any exceptions.
371        try:
372            Docker.build(self.package, tag, dockerfile)
373        except Exception as e:
374            self.exception = e
375
376    @classmethod
377    def generate_all(cls) -> None:
378        """Ensure a Docker stage is created for all defined packages.
379
380        These are done in parallel but with appropriate blocking per
381        package 'depends' specifications.
382        """
383
384        # Create a Package for each defined package.
385        pkg_threads = [Package(p) for p in cls.packages.keys()]
386
387        # Start building them all.
388        for t in pkg_threads:
389            t.start()
390
391        # Wait for completion.
392        for t in pkg_threads:
393            t.join()
394            # Check if the thread saved off its own exception.
395            if t.exception:
396                print(f"Package {t.package} failed!", file=sys.stderr)
397                raise t.exception
398
399    @staticmethod
400    def df_all_copycmds() -> str:
401        """Formulate the Dockerfile snippet necessary to copy all packages
402        into the final image.
403        """
404        return Package.df_copycmds_set(Package.packages.keys())
405
406    @classmethod
407    def depcache(cls) -> str:
408        """Create the contents of the '/tmp/depcache'.
409        This file is a comma-separated list of "<pkg>:<rev>".
410        """
411
412        # This needs to be sorted for consistency.
413        depcache = ""
414        for pkg in sorted(cls.packages.keys()):
415            depcache += "%s:%s," % (pkg, cls.packages[pkg]["rev"])
416        return depcache
417
418    def _update_rev(self) -> None:
419        """ Look up the HEAD for missing a static rev. """
420
421        if "rev" in self.pkg_def:
422            return
423
424        # Ask Github for all the branches.
425        lookup = git("ls-remote", "--heads", f"https://github.com/{self.package}")
426
427        # Find the branch matching {branch} (or fallback to master).
428        #   This section is locked because we are modifying the PackageDef.
429        Package.lock.acquire()
430        for line in lookup.split("\n"):
431            if f"refs/heads/{branch}" in line:
432                self.pkg_def["rev"] = line.split()[0]
433            elif f"refs/heads/master" in line and "rev" not in self.pkg_def:
434                self.pkg_def["rev"] = line.split()[0]
435        Package.lock.release()
436
437    def _stagename(self) -> str:
438        """ Create a name for the Docker stage associated with this pkg. """
439        return self.package.replace("/", "-").lower()
440
441    def _url(self) -> str:
442        """ Get the URL for this package. """
443        rev = self.pkg_def["rev"]
444
445        # If the lambda exists, call it.
446        if "url" in self.pkg_def:
447            return self.pkg_def["url"](self.package, rev)
448
449        # Default to the github archive URL.
450        return f"https://github.com/{self.package}/archive/{rev}.tar.gz"
451
452    def _cmd_download(self) -> str:
453        """Formulate the command necessary to download and unpack to source."""
454
455        url = self._url()
456        if ".tar." not in url:
457            raise NotImplementedError(
458                f"Unhandled download type for {self.package}: {url}"
459            )
460
461        cmd = f"curl -L {url} | tar -x"
462
463        if url.endswith(".bz2"):
464            cmd += "j"
465        elif url.endswith(".gz"):
466            cmd += "z"
467        else:
468            raise NotImplementedError(
469                f"Unknown tar flags needed for {self.package}: {url}"
470            )
471
472        return cmd
473
474    def _cmd_cd_srcdir(self) -> str:
475        """ Formulate the command necessary to 'cd' into the source dir. """
476        return f"cd {self.package.split('/')[-1]}*"
477
478    def _df_copycmds(self) -> str:
479        """ Formulate the dockerfile snippet necessary to COPY all depends. """
480
481        if "depends" not in self.pkg_def:
482            return ""
483        return Package.df_copycmds_set(self.pkg_def["depends"])
484
485    @staticmethod
486    def df_copycmds_set(pkgs: Iterable[str]) -> str:
487        """Formulate the Dockerfile snippet necessary to COPY a set of
488        packages into a Docker stage.
489        """
490
491        copy_cmds = ""
492
493        # Sort the packages for consistency.
494        for p in sorted(pkgs):
495            tag = Package.packages[p]["__tag"]
496            copy_cmds += f"COPY --from={tag} {prefix} {prefix}\n"
497            # Workaround for upstream docker bug and multiple COPY cmds
498            # https://github.com/moby/moby/issues/37965
499            copy_cmds += "RUN true\n"
500
501        return copy_cmds
502
503    def _df_build(self) -> str:
504        """Formulate the Dockerfile snippet necessary to download, build, and
505        install a package into a Docker stage.
506        """
507
508        # Download and extract source.
509        result = f"RUN {self._cmd_download()} && {self._cmd_cd_srcdir()} && "
510
511        # Handle 'custom_post_dl' commands.
512        custom_post_dl = self.pkg_def.get("custom_post_dl")
513        if custom_post_dl:
514            result += " && ".join(custom_post_dl) + " && "
515
516        # Build and install package based on 'build_type'.
517        build_type = self.pkg_def["build_type"]
518        if build_type == "autoconf":
519            result += self._cmd_build_autoconf()
520        elif build_type == "cmake":
521            result += self._cmd_build_cmake()
522        elif build_type == "custom":
523            result += self._cmd_build_custom()
524        elif build_type == "make":
525            result += self._cmd_build_make()
526        elif build_type == "meson":
527            result += self._cmd_build_meson()
528        else:
529            raise NotImplementedError(
530                f"Unhandled build type for {self.package}: {build_type}"
531            )
532
533        # Handle 'custom_post_install' commands.
534        custom_post_install = self.pkg_def.get("custom_post_install")
535        if custom_post_install:
536            result += " && " + " && ".join(custom_post_install)
537
538        return result
539
540    def _cmd_build_autoconf(self) -> str:
541        options = " ".join(self.pkg_def.get("config_flags", []))
542        env = " ".join(self.pkg_def.get("config_env", []))
543        result = "./bootstrap.sh && "
544        result += f"{env} ./configure {configure_flags} {options} && "
545        result += f"make -j{proc_count} && make install"
546        return result
547
548    def _cmd_build_cmake(self) -> str:
549        options = " ".join(self.pkg_def.get("config_flags", []))
550        env = " ".join(self.pkg_def.get("config_env", []))
551        result = "mkdir builddir && cd builddir && "
552        result += f"{env} cmake {cmake_flags} {options} .. && "
553        result += "cmake --build . --target all && "
554        result += "cmake --build . --target install && "
555        result += "cd .."
556        return result
557
558    def _cmd_build_custom(self) -> str:
559        return " && ".join(self.pkg_def.get("build_steps", []))
560
561    def _cmd_build_make(self) -> str:
562        return f"make -j{proc_count} && make install"
563
564    def _cmd_build_meson(self) -> str:
565        options = " ".join(self.pkg_def.get("config_flags", []))
566        env = " ".join(self.pkg_def.get("config_env", []))
567        result = f"{env} meson builddir {meson_flags} {options} && "
568        result += "ninja -C builddir && ninja -C builddir install"
569        return result
570
571
572class Docker:
573    """Class to assist with Docker interactions.  All methods are static."""
574
575    @staticmethod
576    def timestamp() -> str:
577        """ Generate a timestamp for today using the ISO week. """
578        today = date.today().isocalendar()
579        return f"{today[0]}-W{today[1]:02}"
580
581    @staticmethod
582    def tagname(pkgname: str, dockerfile: str) -> str:
583        """ Generate a tag name for a package using a hash of the Dockerfile. """
584        result = docker_image_name
585        if pkgname:
586            result += "-" + pkgname
587
588        result += ":" + Docker.timestamp()
589        result += "-" + sha256(dockerfile.encode()).hexdigest()[0:16]
590
591        return result
592
593    @staticmethod
594    def build(pkg: str, tag: str, dockerfile: str) -> None:
595        """Build a docker image using the Dockerfile and tagging it with 'tag'."""
596
597        # If we're not forcing builds, check if it already exists and skip.
598        if not force_build:
599            if docker.image.ls(tag, "--format", '"{{.Repository}}:{{.Tag}}"'):
600                print(f"Image {tag} already exists.  Skipping.", file=sys.stderr)
601                return
602
603        # Build it.
604        #   Capture the output of the 'docker build' command and send it to
605        #   stderr (prefixed with the package name).  This allows us to see
606        #   progress but not polute stdout.  Later on we output the final
607        #   docker tag to stdout and we want to keep that pristine.
608        #
609        #   Other unusual flags:
610        #       --no-cache: Bypass the Docker cache if 'force_build'.
611        #       --force-rm: Clean up Docker processes if they fail.
612        docker.build(
613            proxy_args,
614            "--network=host",
615            "--force-rm",
616            "--no-cache=true" if force_build else "--no-cache=false",
617            "-t",
618            tag,
619            "-",
620            _in=dockerfile,
621            _out=(
622                lambda line: print(
623                    pkg + ":", line, end="", file=sys.stderr, flush=True
624                )
625            ),
626        )
627
628
629# Read a bunch of environment variables.
630docker_image_name = os.environ.get("DOCKER_IMAGE_NAME", "openbmc/ubuntu-unit-test")
631force_build = os.environ.get("FORCE_DOCKER_BUILD")
632is_automated_ci_build = os.environ.get("BUILD_URL", False)
633distro = os.environ.get("DISTRO", "ubuntu:focal")
634branch = os.environ.get("BRANCH", "master")
635ubuntu_mirror = os.environ.get("UBUNTU_MIRROR")
636http_proxy = os.environ.get("http_proxy")
637
638# Set up some common variables.
639username = os.environ.get("USER", "root")
640homedir = os.environ.get("HOME", "/root")
641gid = os.getgid()
642uid = os.getuid()
643
644# Determine the architecture for Docker.
645arch = uname("-m").strip()
646if arch == "ppc64le":
647    docker_base = "ppc64le/"
648elif arch == "x86_64":
649    docker_base = ""
650else:
651    print(
652        f"Unsupported system architecture({arch}) found for docker image",
653        file=sys.stderr,
654    )
655    sys.exit(1)
656
657# Special flags if setting up a deb mirror.
658mirror = ""
659if "ubuntu" in distro and ubuntu_mirror:
660    mirror = f"""
661RUN echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME) main restricted universe multiverse" > /etc/apt/sources.list && \\
662    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-updates main restricted universe multiverse" >> /etc/apt/sources.list && \\
663    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-security main restricted universe multiverse" >> /etc/apt/sources.list && \\
664    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-proposed main restricted universe multiverse" >> /etc/apt/sources.list && \\
665    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-backports main restricted universe multiverse" >> /etc/apt/sources.list
666"""
667
668# Special flags for proxying.
669proxy_cmd = ""
670proxy_args = []
671if http_proxy:
672    proxy_cmd = f"""
673RUN echo "[http]" >> {homedir}/.gitconfig && \
674    echo "proxy = {http_proxy}" >> {homedir}/.gitconfig
675"""
676    proxy_args.extend(
677        [
678            "--build-arg",
679            f"http_proxy={http_proxy}",
680            "--build-arg",
681            "https_proxy={https_proxy}",
682        ]
683    )
684
685# Create base Dockerfile.
686dockerfile_base = f"""
687FROM {docker_base}{distro}
688
689{mirror}
690
691ENV DEBIAN_FRONTEND noninteractive
692
693ENV PYTHONPATH "/usr/local/lib/python3.8/site-packages/"
694
695# We need the keys to be imported for dbgsym repos
696# New releases have a package, older ones fall back to manual fetching
697# https://wiki.ubuntu.com/Debug%20Symbol%20Packages
698RUN apt-get update && apt-get dist-upgrade -yy && \
699    ( apt-get install ubuntu-dbgsym-keyring || \
700        ( apt-get install -yy dirmngr && \
701          apt-key adv --keyserver keyserver.ubuntu.com \
702                      --recv-keys F2EDC64DC5AEE1F6B9C621F0C8CAB6595FDFF622 ) )
703
704# Parse the current repo list into a debug repo list
705RUN sed -n '/^deb /s,^deb [^ ]* ,deb http://ddebs.ubuntu.com ,p' /etc/apt/sources.list >/etc/apt/sources.list.d/debug.list
706
707# Remove non-existent debug repos
708RUN sed -i '/-\(backports\|security\) /d' /etc/apt/sources.list.d/debug.list
709
710RUN cat /etc/apt/sources.list.d/debug.list
711
712RUN apt-get update && apt-get dist-upgrade -yy && apt-get install -yy \
713    gcc-10 \
714    g++-10 \
715    libc6-dbg \
716    libc6-dev \
717    libtool \
718    bison \
719    libdbus-1-dev \
720    flex \
721    cmake \
722    python3 \
723    python3-dev\
724    python3-yaml \
725    python3-mako \
726    python3-pip \
727    python3-setuptools \
728    python3-git \
729    python3-socks \
730    pkg-config \
731    autoconf \
732    autoconf-archive \
733    libsystemd-dev \
734    systemd \
735    libssl-dev \
736    libevdev-dev \
737    libevdev2-dbgsym \
738    libjpeg-dev \
739    libpng-dev \
740    ninja-build \
741    sudo \
742    curl \
743    git \
744    dbus \
745    iputils-ping \
746    clang-10 \
747    clang-format-10 \
748    clang-tidy-10 \
749    clang-tools-10 \
750    shellcheck \
751    npm \
752    iproute2 \
753    libnl-3-dev \
754    libnl-genl-3-dev \
755    libconfig++-dev \
756    libsnmp-dev \
757    valgrind \
758    valgrind-dbg \
759    libpam0g-dev \
760    xxd \
761    libi2c-dev \
762    wget \
763    libldap2-dev \
764    libprotobuf-dev \
765    libperlio-gzip-perl \
766    libjson-perl \
767    protobuf-compiler \
768    libgpiod-dev \
769    device-tree-compiler \
770    cppcheck \
771    libpciaccess-dev \
772    libmimetic-dev \
773    libxml2-utils \
774    libxml-simple-perl \
775    rsync
776
777RUN update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-10 1000 \
778  --slave /usr/bin/g++ g++ /usr/bin/g++-10 \
779  --slave /usr/bin/gcov gcov /usr/bin/gcov-10 \
780  --slave /usr/bin/gcov-dump gcov-dump /usr/bin/gcov-dump-10 \
781  --slave /usr/bin/gcov-tool gcov-tool /usr/bin/gcov-tool-10
782
783RUN update-alternatives --install /usr/bin/clang clang /usr/bin/clang-10 1000 \
784  --slave /usr/bin/clang++ clang++ /usr/bin/clang++-10 \
785  --slave /usr/bin/clang-tidy clang-tidy /usr/bin/clang-tidy-10 \
786  --slave /usr/bin/clang-format clang-format /usr/bin/clang-format-10 \
787  --slave /usr/bin/run-clang-tidy.py run-clang-tidy.py /usr/bin/run-clang-tidy-10.py
788
789"""
790
791if is_automated_ci_build:
792    dockerfile_base += f"""
793# Run an arbitrary command to polute the docker cache regularly force us
794# to re-run `apt-get update` daily.
795RUN echo {Docker.timestamp()}
796RUN apt-get update && apt-get dist-upgrade -yy
797
798"""
799
800dockerfile_base += f"""
801RUN pip3 install inflection
802RUN pip3 install pycodestyle
803RUN pip3 install jsonschema
804RUN pip3 install meson==0.54.3
805RUN pip3 install protobuf
806"""
807
808# Build the base and stage docker images.
809docker_base_img_name = Docker.tagname("base", dockerfile_base)
810Docker.build("base", docker_base_img_name, dockerfile_base)
811Package.generate_all()
812
813# Create the final Dockerfile.
814dockerfile = f"""
815# Build the final output image
816FROM {docker_base_img_name}
817{Package.df_all_copycmds()}
818
819# Some of our infrastructure still relies on the presence of this file
820# even though it is no longer needed to rebuild the docker environment
821# NOTE: The file is sorted to ensure the ordering is stable.
822RUN echo '{Package.depcache()}' > /tmp/depcache
823
824# Final configuration for the workspace
825RUN grep -q {gid} /etc/group || groupadd -g {gid} {username}
826RUN mkdir -p "{os.path.dirname(homedir)}"
827RUN grep -q {uid} /etc/passwd || useradd -d {homedir} -m -u {uid} -g {gid} {username}
828RUN sed -i '1iDefaults umask=000' /etc/sudoers
829RUN echo "{username} ALL=(ALL) NOPASSWD: ALL" >>/etc/sudoers
830
831{proxy_cmd}
832
833RUN /bin/bash
834"""
835
836# Do the final docker build
837docker_final_img_name = Docker.tagname(None, dockerfile)
838Docker.build("final", docker_final_img_name, dockerfile)
839
840# Print the tag of the final image.
841print(docker_final_img_name)
842