1#!/usr/bin/env python3
2#
3# Build the required docker image to run package unit tests
4#
5# Script Variables:
6#   DOCKER_IMG_NAME:  <optional, the name of the docker image to generate>
7#                     default is openbmc/ubuntu-unit-test
8#   DISTRO:           <optional, the distro to build a docker image against>
9#   FORCE_DOCKER_BUILD: <optional, a non-zero value with force all Docker
10#                     images to be rebuilt rather than reusing caches.>
11#   BUILD_URL:        <optional, used to detect running under CI context
12#                     (ex. Jenkins)>
13#   BRANCH:           <optional, branch to build from each of the openbmc/
14#                     repositories>
15#                     default is master, which will be used if input branch not
16#                     provided or not found
17#   UBUNTU_MIRROR:    <optional, the URL of a mirror of Ubuntu to override the
18#                     default ones in /etc/apt/sources.list>
19#                     default is empty, and no mirror is used.
20#   http_proxy        The HTTP address of the proxy server to connect to.
21#                     Default: "", proxy is not setup if this is not set
22
23import os
24import sys
25import threading
26from datetime import date
27from hashlib import sha256
28from sh import docker, git, nproc, uname  # type: ignore
29from typing import Any, Callable, Dict, Iterable, Optional
30
31try:
32    # Python before 3.8 doesn't have TypedDict, so reroute to standard 'dict'.
33    from typing import TypedDict
34except:
35
36    class TypedDict(dict):  # type: ignore
37        # We need to do this to eat the 'total' argument.
38        def __init_subclass__(cls, **kwargs):
39            super().__init_subclass__()
40
41
42# Declare some variables used in package definitions.
43prefix = "/usr/local"
44proc_count = nproc().strip()
45
46
47class PackageDef(TypedDict, total=False):
48    """ Package Definition for packages dictionary. """
49
50    # rev [optional]: Revision of package to use.
51    rev: str
52    # url [optional]: lambda function to create URL: (package, rev) -> url.
53    url: Callable[[str, str], str]
54    # depends [optional]: List of package dependencies.
55    depends: Iterable[str]
56    # build_type [required]: Build type used for package.
57    #   Currently supported: autoconf, cmake, custom, make, meson
58    build_type: str
59    # build_steps [optional]: Steps to run for 'custom' build_type.
60    build_steps: Iterable[str]
61    # config_flags [optional]: List of options to pass configuration tool.
62    config_flags: Iterable[str]
63    # config_env [optional]: List of environment variables to set for config.
64    config_env: Iterable[str]
65    # custom_post_dl [optional]: List of steps to run after download, but
66    #   before config / build / install.
67    custom_post_dl: Iterable[str]
68    # custom_post_install [optional]: List of steps to run after install.
69    custom_post_install: Iterable[str]
70
71    # __tag [private]: Generated Docker tag name for package stage.
72    __tag: str
73    # __package [private]: Package object associated with this package.
74    __package: Any  # Type is Package, but not defined yet.
75
76
77# Packages to include in image.
78packages = {
79    # Install OpenSSL 3.x.
80    # Generally we want to rely on the version of OpenSSL from the OS, but v3.x
81    # was a major API change.  It is included in Yocto but not Ubuntu until
82    # 22.04.  Install it manually so that our CI can target the OpenSSL 3.x
83    # APIs.
84    "openssl/openssl": PackageDef(
85        rev="openssl-3.0.1",
86        build_type="custom",
87        build_steps=[
88            f"./Configure --prefix={prefix} --libdir=lib",
89            f"make -j{proc_count}",
90            f"make -j{proc_count} install"
91        ],
92    ),
93    "boost": PackageDef(
94        rev="1.78.0",
95        url=(
96            lambda pkg, rev: f"https://downloads.yoctoproject.org/mirror/sources/{pkg}_{rev.replace('.', '_')}.tar.bz2"
97        ),
98        build_type="custom",
99        build_steps=[
100            f"./bootstrap.sh --prefix={prefix} --with-libraries=context,coroutine",
101            "./b2",
102            f"./b2 install --prefix={prefix}",
103        ],
104    ),
105    "USCiLab/cereal": PackageDef(
106        rev="3e4d1b84cab4891368d2179a61a7ba06a5693e7f",
107        build_type="custom",
108        build_steps=[f"cp -a include/cereal/ {prefix}/include/"],
109    ),
110    "catchorg/Catch2": PackageDef(
111        rev="v2.13.6",
112        build_type="cmake",
113        config_flags=["-DBUILD_TESTING=OFF", "-DCATCH_INSTALL_DOCS=OFF"],
114    ),
115    "CLIUtils/CLI11": PackageDef(
116        rev="v1.9.1",
117        build_type="cmake",
118        config_flags=[
119            "-DBUILD_TESTING=OFF",
120            "-DCLI11_BUILD_DOCS=OFF",
121            "-DCLI11_BUILD_EXAMPLES=OFF",
122        ],
123    ),
124    "fmtlib/fmt": PackageDef(
125        rev="8.1.1",
126        build_type="cmake",
127        config_flags=[
128            "-DFMT_DOC=OFF",
129            "-DFMT_TEST=OFF",
130        ],
131    ),
132    "Naios/function2": PackageDef(
133        rev="4.1.0",
134        build_type="custom",
135        build_steps=[
136            f"mkdir {prefix}/include/function2",
137            f"cp include/function2/function2.hpp {prefix}/include/function2/",
138        ],
139    ),
140    # Release 2021-06-12
141    "google/googletest": PackageDef(
142        rev="release-1.11.0",
143        build_type="cmake",
144        config_env=["CXXFLAGS=-std=c++20"],
145        config_flags=["-DTHREADS_PREFER_PTHREAD_FLAG=ON"],
146    ),
147    # Release 2020-08-06
148    "nlohmann/json": PackageDef(
149        rev="v3.10.4",
150        build_type="cmake",
151        config_flags=["-DJSON_BuildTests=OFF"],
152        custom_post_install=[
153            f"ln -s {prefix}/include/nlohmann/json.hpp {prefix}/include/json.hpp",
154        ],
155    ),
156    # Snapshot from 2019-05-24
157    "linux-test-project/lcov": PackageDef(
158        rev="v1.15",
159        build_type="make",
160    ),
161    # dev-5.8 2021-01-11
162    "openbmc/linux": PackageDef(
163        rev="3cc95ae40716e56f81b69615781f54c78079042d",
164        build_type="custom",
165        build_steps=[
166            f"make -j{proc_count} defconfig",
167            f"make INSTALL_HDR_PATH={prefix} headers_install",
168        ],
169    ),
170    # Snapshot from 2020-06-13
171    "LibVNC/libvncserver": PackageDef(
172        rev="LibVNCServer-0.9.13",
173        build_type="cmake",
174    ),
175    "martinmoene/span-lite": PackageDef(
176        rev="v0.9.2",
177        build_type="cmake",
178        config_flags=[
179            "-DSPAN_LITE_OPT_BUILD_TESTS=OFF",
180        ],
181    ),
182    # version from meta-openembedded/meta-oe/recipes-support/libtinyxml2/libtinyxml2_8.0.0.bb
183    "leethomason/tinyxml2": PackageDef(
184        rev="8.0.0",
185        build_type="cmake",
186    ),
187    # version from /meta-openembedded/meta-oe/recipes-devtools/boost-url/boost-url_git.bb
188    "CPPAlliance/url": PackageDef(
189        rev="d740a92d38e3a8f4d5b2153f53b82f1c98e312ab",
190        build_type="custom",
191        build_steps=[f"cp -a include/** {prefix}/include/"],
192    ),
193    # version from meta-openembedded/meta-oe/dynamic-layers/networking-layer/recipes-devools/valijson/valijson_0.6.bb
194    "tristanpenman/valijson": PackageDef(
195        rev="v0.6",
196        build_type="cmake",
197        config_flags=[
198            "-Dvalijson_BUILD_TESTS=0",
199            "-Dvalijson_INSTALL_HEADERS=1",
200        ],
201    ),
202    # version from meta-openembedded/meta-oe/recipes-devtools/nlohmann-fifo/nlohmann-fifo_git.bb
203    "nlohmann/fifo_map": PackageDef(
204        rev="0dfbf5dacbb15a32c43f912a7e66a54aae39d0f9",
205        build_type="custom",
206        build_steps=[f"cp src/fifo_map.hpp {prefix}/include/"],
207    ),
208    # version from meta-openembedded/meta-oe/recipes-devtools/unifex/unifex_git.bb
209    "facebookexperimental/libunifex": PackageDef(
210        rev="9df21c58d34ce8a1cd3b15c3a7347495e29417a0",
211        build_type="cmake",
212        config_flags=[
213            "-DBUILD_SHARED_LIBS=ON",
214            "-DBUILD_TESTING=OFF",
215            "-DCMAKE_CXX_STANDARD=20",
216            "-DUNIFEX_BUILD_EXAMPLES=OFF",
217        ],
218    ),
219    "open-power/pdbg": PackageDef(build_type="autoconf"),
220    "openbmc/gpioplus": PackageDef(
221        depends=["openbmc/stdplus"],
222        build_type="meson",
223        config_flags=[
224            "-Dexamples=false",
225            "-Dtests=disabled",
226        ],
227    ),
228    "openbmc/phosphor-dbus-interfaces": PackageDef(
229        depends=["openbmc/sdbusplus"],
230        build_type="meson",
231        config_flags=[
232            "-Ddata_com_ibm=true",
233            "-Ddata_org_open_power=true",
234        ],
235    ),
236    "openbmc/phosphor-logging": PackageDef(
237        depends=[
238            "USCiLab/cereal",
239            "nlohmann/fifo_map",
240            "openbmc/phosphor-dbus-interfaces",
241            "openbmc/sdbusplus",
242            "openbmc/sdeventplus",
243        ],
244        build_type="meson",
245        config_flags=[
246            f"-Dyaml_dir={prefix}/share/phosphor-dbus-yaml/yaml",
247        ],
248    ),
249    "openbmc/phosphor-objmgr": PackageDef(
250        depends=[
251            "boost",
252            "leethomason/tinyxml2",
253            "openbmc/phosphor-logging",
254            "openbmc/sdbusplus",
255        ],
256        build_type="meson",
257        config_flags=[
258            "-Dtests=disabled",
259        ],
260    ),
261    "openbmc/pldm": PackageDef(
262        depends=[
263            "CLIUtils/CLI11",
264            "boost",
265            "nlohmann/json",
266            "openbmc/phosphor-dbus-interfaces",
267            "openbmc/phosphor-logging",
268            "openbmc/sdbusplus",
269            "openbmc/sdeventplus",
270        ],
271        build_type="meson",
272        config_flags=[
273            "-Dlibpldm-only=enabled",
274            "-Doem-ibm=enabled",
275            "-Dtests=disabled",
276        ],
277    ),
278    "openbmc/sdbusplus": PackageDef(
279        depends=[
280            "facebookexperimental/libunifex",
281        ],
282        build_type="meson",
283        custom_post_dl=[
284            "cd tools",
285            f"./setup.py install --root=/ --prefix={prefix}",
286            "cd ..",
287        ],
288        config_flags=[
289            "-Dexamples=disabled",
290            "-Dtests=disabled",
291        ],
292    ),
293    "openbmc/sdeventplus": PackageDef(
294        depends=["Naios/function2", "openbmc/stdplus"],
295        build_type="meson",
296        config_flags=[
297            "-Dexamples=false",
298            "-Dtests=disabled",
299        ],
300    ),
301    "openbmc/stdplus": PackageDef(
302        depends=["fmtlib/fmt", "martinmoene/span-lite"],
303        build_type="meson",
304        config_flags=[
305            "-Dexamples=false",
306            "-Dtests=disabled",
307        ],
308    ),
309}  # type: Dict[str, PackageDef]
310
311# Define common flags used for builds
312configure_flags = " ".join(
313    [
314        f"--prefix={prefix}",
315    ]
316)
317cmake_flags = " ".join(
318    [
319        "-DBUILD_SHARED_LIBS=ON",
320        "-DCMAKE_BUILD_TYPE=RelWithDebInfo",
321        f"-DCMAKE_INSTALL_PREFIX:PATH={prefix}",
322        "-GNinja",
323        "-DCMAKE_MAKE_PROGRAM=ninja",
324    ]
325)
326meson_flags = " ".join(
327    [
328        "--wrap-mode=nodownload",
329        f"-Dprefix={prefix}",
330    ]
331)
332
333
334class Package(threading.Thread):
335    """Class used to build the Docker stages for each package.
336
337    Generally, this class should not be instantiated directly but through
338    Package.generate_all().
339    """
340
341    # Copy the packages dictionary.
342    packages = packages.copy()
343
344    # Lock used for thread-safety.
345    lock = threading.Lock()
346
347    def __init__(self, pkg: str):
348        """ pkg - The name of this package (ex. foo/bar ) """
349        super(Package, self).__init__()
350
351        self.package = pkg
352        self.exception = None  # type: Optional[Exception]
353
354        # Reference to this package's
355        self.pkg_def = Package.packages[pkg]
356        self.pkg_def["__package"] = self
357
358    def run(self) -> None:
359        """ Thread 'run' function.  Builds the Docker stage. """
360
361        # In case this package has no rev, fetch it from Github.
362        self._update_rev()
363
364        # Find all the Package objects that this package depends on.
365        #   This section is locked because we are looking into another
366        #   package's PackageDef dict, which could be being modified.
367        Package.lock.acquire()
368        deps: Iterable[Package] = [
369            Package.packages[deppkg]["__package"]
370            for deppkg in self.pkg_def.get("depends", [])
371        ]
372        Package.lock.release()
373
374        # Wait until all the depends finish building.  We need them complete
375        # for the "COPY" commands.
376        for deppkg in deps:
377            deppkg.join()
378
379        # Generate this package's Dockerfile.
380        dockerfile = f"""
381FROM {docker_base_img_name}
382{self._df_copycmds()}
383{self._df_build()}
384"""
385
386        # Generate the resulting tag name and save it to the PackageDef.
387        #   This section is locked because we are modifying the PackageDef,
388        #   which can be accessed by other threads.
389        Package.lock.acquire()
390        tag = Docker.tagname(self._stagename(), dockerfile)
391        self.pkg_def["__tag"] = tag
392        Package.lock.release()
393
394        # Do the build / save any exceptions.
395        try:
396            Docker.build(self.package, tag, dockerfile)
397        except Exception as e:
398            self.exception = e
399
400    @classmethod
401    def generate_all(cls) -> None:
402        """Ensure a Docker stage is created for all defined packages.
403
404        These are done in parallel but with appropriate blocking per
405        package 'depends' specifications.
406        """
407
408        # Create a Package for each defined package.
409        pkg_threads = [Package(p) for p in cls.packages.keys()]
410
411        # Start building them all.
412        #   This section is locked because threads depend on each other,
413        #   based on the packages, and they cannot 'join' on a thread
414        #   which is not yet started.  Adding a lock here allows all the
415        #   threads to start before they 'join' their dependencies.
416        Package.lock.acquire()
417        for t in pkg_threads:
418            t.start()
419        Package.lock.release()
420
421        # Wait for completion.
422        for t in pkg_threads:
423            t.join()
424            # Check if the thread saved off its own exception.
425            if t.exception:
426                print(f"Package {t.package} failed!", file=sys.stderr)
427                raise t.exception
428
429    @staticmethod
430    def df_all_copycmds() -> str:
431        """Formulate the Dockerfile snippet necessary to copy all packages
432        into the final image.
433        """
434        return Package.df_copycmds_set(Package.packages.keys())
435
436    @classmethod
437    def depcache(cls) -> str:
438        """Create the contents of the '/tmp/depcache'.
439        This file is a comma-separated list of "<pkg>:<rev>".
440        """
441
442        # This needs to be sorted for consistency.
443        depcache = ""
444        for pkg in sorted(cls.packages.keys()):
445            depcache += "%s:%s," % (pkg, cls.packages[pkg]["rev"])
446        return depcache
447
448    def _update_rev(self) -> None:
449        """ Look up the HEAD for missing a static rev. """
450
451        if "rev" in self.pkg_def:
452            return
453
454        # Check if Jenkins/Gerrit gave us a revision and use it.
455        if gerrit_project == self.package and gerrit_rev:
456            print(
457                f"Found Gerrit revision for {self.package}: {gerrit_rev}",
458                file=sys.stderr,
459            )
460            self.pkg_def["rev"] = gerrit_rev
461            return
462
463        # Ask Github for all the branches.
464        lookup = git("ls-remote", "--heads", f"https://github.com/{self.package}")
465
466        # Find the branch matching {branch} (or fallback to master).
467        #   This section is locked because we are modifying the PackageDef.
468        Package.lock.acquire()
469        for line in lookup.split("\n"):
470            if f"refs/heads/{branch}" in line:
471                self.pkg_def["rev"] = line.split()[0]
472            elif f"refs/heads/master" in line and "rev" not in self.pkg_def:
473                self.pkg_def["rev"] = line.split()[0]
474        Package.lock.release()
475
476    def _stagename(self) -> str:
477        """ Create a name for the Docker stage associated with this pkg. """
478        return self.package.replace("/", "-").lower()
479
480    def _url(self) -> str:
481        """ Get the URL for this package. """
482        rev = self.pkg_def["rev"]
483
484        # If the lambda exists, call it.
485        if "url" in self.pkg_def:
486            return self.pkg_def["url"](self.package, rev)
487
488        # Default to the github archive URL.
489        return f"https://github.com/{self.package}/archive/{rev}.tar.gz"
490
491    def _cmd_download(self) -> str:
492        """Formulate the command necessary to download and unpack to source."""
493
494        url = self._url()
495        if ".tar." not in url:
496            raise NotImplementedError(
497                f"Unhandled download type for {self.package}: {url}"
498            )
499
500        cmd = f"curl -L {url} | tar -x"
501
502        if url.endswith(".bz2"):
503            cmd += "j"
504        elif url.endswith(".gz"):
505            cmd += "z"
506        else:
507            raise NotImplementedError(
508                f"Unknown tar flags needed for {self.package}: {url}"
509            )
510
511        return cmd
512
513    def _cmd_cd_srcdir(self) -> str:
514        """ Formulate the command necessary to 'cd' into the source dir. """
515        return f"cd {self.package.split('/')[-1]}*"
516
517    def _df_copycmds(self) -> str:
518        """ Formulate the dockerfile snippet necessary to COPY all depends. """
519
520        if "depends" not in self.pkg_def:
521            return ""
522        return Package.df_copycmds_set(self.pkg_def["depends"])
523
524    @staticmethod
525    def df_copycmds_set(pkgs: Iterable[str]) -> str:
526        """Formulate the Dockerfile snippet necessary to COPY a set of
527        packages into a Docker stage.
528        """
529
530        copy_cmds = ""
531
532        # Sort the packages for consistency.
533        for p in sorted(pkgs):
534            tag = Package.packages[p]["__tag"]
535            copy_cmds += f"COPY --from={tag} {prefix} {prefix}\n"
536            # Workaround for upstream docker bug and multiple COPY cmds
537            # https://github.com/moby/moby/issues/37965
538            copy_cmds += "RUN true\n"
539
540        return copy_cmds
541
542    def _df_build(self) -> str:
543        """Formulate the Dockerfile snippet necessary to download, build, and
544        install a package into a Docker stage.
545        """
546
547        # Download and extract source.
548        result = f"RUN {self._cmd_download()} && {self._cmd_cd_srcdir()} && "
549
550        # Handle 'custom_post_dl' commands.
551        custom_post_dl = self.pkg_def.get("custom_post_dl")
552        if custom_post_dl:
553            result += " && ".join(custom_post_dl) + " && "
554
555        # Build and install package based on 'build_type'.
556        build_type = self.pkg_def["build_type"]
557        if build_type == "autoconf":
558            result += self._cmd_build_autoconf()
559        elif build_type == "cmake":
560            result += self._cmd_build_cmake()
561        elif build_type == "custom":
562            result += self._cmd_build_custom()
563        elif build_type == "make":
564            result += self._cmd_build_make()
565        elif build_type == "meson":
566            result += self._cmd_build_meson()
567        else:
568            raise NotImplementedError(
569                f"Unhandled build type for {self.package}: {build_type}"
570            )
571
572        # Handle 'custom_post_install' commands.
573        custom_post_install = self.pkg_def.get("custom_post_install")
574        if custom_post_install:
575            result += " && " + " && ".join(custom_post_install)
576
577        return result
578
579    def _cmd_build_autoconf(self) -> str:
580        options = " ".join(self.pkg_def.get("config_flags", []))
581        env = " ".join(self.pkg_def.get("config_env", []))
582        result = "./bootstrap.sh && "
583        result += f"{env} ./configure {configure_flags} {options} && "
584        result += f"make -j{proc_count} && make install"
585        return result
586
587    def _cmd_build_cmake(self) -> str:
588        options = " ".join(self.pkg_def.get("config_flags", []))
589        env = " ".join(self.pkg_def.get("config_env", []))
590        result = "mkdir builddir && cd builddir && "
591        result += f"{env} cmake {cmake_flags} {options} .. && "
592        result += "cmake --build . --target all && "
593        result += "cmake --build . --target install && "
594        result += "cd .."
595        return result
596
597    def _cmd_build_custom(self) -> str:
598        return " && ".join(self.pkg_def.get("build_steps", []))
599
600    def _cmd_build_make(self) -> str:
601        return f"make -j{proc_count} && make install"
602
603    def _cmd_build_meson(self) -> str:
604        options = " ".join(self.pkg_def.get("config_flags", []))
605        env = " ".join(self.pkg_def.get("config_env", []))
606        result = f"{env} meson builddir {meson_flags} {options} && "
607        result += "ninja -C builddir && ninja -C builddir install"
608        return result
609
610
611class Docker:
612    """Class to assist with Docker interactions.  All methods are static."""
613
614    @staticmethod
615    def timestamp() -> str:
616        """ Generate a timestamp for today using the ISO week. """
617        today = date.today().isocalendar()
618        return f"{today[0]}-W{today[1]:02}"
619
620    @staticmethod
621    def tagname(pkgname: str, dockerfile: str) -> str:
622        """ Generate a tag name for a package using a hash of the Dockerfile. """
623        result = docker_image_name
624        if pkgname:
625            result += "-" + pkgname
626
627        result += ":" + Docker.timestamp()
628        result += "-" + sha256(dockerfile.encode()).hexdigest()[0:16]
629
630        return result
631
632    @staticmethod
633    def build(pkg: str, tag: str, dockerfile: str) -> None:
634        """Build a docker image using the Dockerfile and tagging it with 'tag'."""
635
636        # If we're not forcing builds, check if it already exists and skip.
637        if not force_build:
638            if docker.image.ls(tag, "--format", '"{{.Repository}}:{{.Tag}}"'):
639                print(f"Image {tag} already exists.  Skipping.", file=sys.stderr)
640                return
641
642        # Build it.
643        #   Capture the output of the 'docker build' command and send it to
644        #   stderr (prefixed with the package name).  This allows us to see
645        #   progress but not polute stdout.  Later on we output the final
646        #   docker tag to stdout and we want to keep that pristine.
647        #
648        #   Other unusual flags:
649        #       --no-cache: Bypass the Docker cache if 'force_build'.
650        #       --force-rm: Clean up Docker processes if they fail.
651        docker.build(
652            proxy_args,
653            "--network=host",
654            "--force-rm",
655            "--no-cache=true" if force_build else "--no-cache=false",
656            "-t",
657            tag,
658            "-",
659            _in=dockerfile,
660            _out=(
661                lambda line: print(
662                    pkg + ":", line, end="", file=sys.stderr, flush=True
663                )
664            ),
665        )
666
667
668# Read a bunch of environment variables.
669docker_image_name = os.environ.get("DOCKER_IMAGE_NAME", "openbmc/ubuntu-unit-test")
670force_build = os.environ.get("FORCE_DOCKER_BUILD")
671is_automated_ci_build = os.environ.get("BUILD_URL", False)
672distro = os.environ.get("DISTRO", "ubuntu:impish")
673branch = os.environ.get("BRANCH", "master")
674ubuntu_mirror = os.environ.get("UBUNTU_MIRROR")
675http_proxy = os.environ.get("http_proxy")
676
677gerrit_project = os.environ.get("GERRIT_PROJECT")
678gerrit_rev = os.environ.get("GERRIT_PATCHSET_REVISION")
679
680# Set up some common variables.
681username = os.environ.get("USER", "root")
682homedir = os.environ.get("HOME", "/root")
683gid = os.getgid()
684uid = os.getuid()
685
686# Determine the architecture for Docker.
687arch = uname("-m").strip()
688if arch == "ppc64le":
689    docker_base = "ppc64le/"
690elif arch == "x86_64":
691    docker_base = ""
692elif arch == "aarch64":
693    docker_base = "arm64v8/"
694else:
695    print(
696        f"Unsupported system architecture({arch}) found for docker image",
697        file=sys.stderr,
698    )
699    sys.exit(1)
700
701# Special flags if setting up a deb mirror.
702mirror = ""
703if "ubuntu" in distro and ubuntu_mirror:
704    mirror = f"""
705RUN echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME) main restricted universe multiverse" > /etc/apt/sources.list && \\
706    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-updates main restricted universe multiverse" >> /etc/apt/sources.list && \\
707    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-security main restricted universe multiverse" >> /etc/apt/sources.list && \\
708    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-proposed main restricted universe multiverse" >> /etc/apt/sources.list && \\
709    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-backports main restricted universe multiverse" >> /etc/apt/sources.list
710"""
711
712# Special flags for proxying.
713proxy_cmd = ""
714proxy_keyserver = ""
715proxy_args = []
716if http_proxy:
717    proxy_cmd = f"""
718RUN echo "[http]" >> {homedir}/.gitconfig && \
719    echo "proxy = {http_proxy}" >> {homedir}/.gitconfig
720"""
721    proxy_keyserver = f"--keyserver-options http-proxy={http_proxy}"
722
723    proxy_args.extend(
724        [
725            "--build-arg",
726            f"http_proxy={http_proxy}",
727            "--build-arg",
728            f"https_proxy={http_proxy}",
729        ]
730    )
731
732# Create base Dockerfile.
733dockerfile_base = f"""
734FROM {docker_base}{distro}
735
736{mirror}
737
738ENV DEBIAN_FRONTEND noninteractive
739
740ENV PYTHONPATH "/usr/local/lib/python3.8/site-packages/"
741
742# Sometimes the ubuntu key expires and we need a way to force an execution
743# of the apt-get commands for the dbgsym-keyring.  When this happens we see
744# an error like: "Release: The following signatures were invalid:"
745# Insert a bogus echo that we can change here when we get this error to force
746# the update.
747RUN echo "ubuntu keyserver rev as of 2021-04-21"
748
749# We need the keys to be imported for dbgsym repos
750# New releases have a package, older ones fall back to manual fetching
751# https://wiki.ubuntu.com/Debug%20Symbol%20Packages
752RUN apt-get update && apt-get dist-upgrade -yy && \
753    ( apt-get install gpgv ubuntu-dbgsym-keyring || \
754        ( apt-get install -yy dirmngr && \
755          apt-key adv --keyserver keyserver.ubuntu.com \
756                      {proxy_keyserver} \
757                      --recv-keys F2EDC64DC5AEE1F6B9C621F0C8CAB6595FDFF622 ) )
758
759# Parse the current repo list into a debug repo list
760RUN sed -n '/^deb /s,^deb [^ ]* ,deb http://ddebs.ubuntu.com ,p' /etc/apt/sources.list >/etc/apt/sources.list.d/debug.list
761
762# Remove non-existent debug repos
763RUN sed -i '/-\(backports\|security\) /d' /etc/apt/sources.list.d/debug.list
764
765RUN cat /etc/apt/sources.list.d/debug.list
766
767RUN apt-get update && apt-get dist-upgrade -yy && apt-get install -yy \
768    gcc-11 \
769    g++-11 \
770    libc6-dbg \
771    libc6-dev \
772    libtool \
773    bison \
774    libdbus-1-dev \
775    flex \
776    cmake \
777    python3 \
778    python3-dev\
779    python3-yaml \
780    python3-mako \
781    python3-pip \
782    python3-setuptools \
783    python3-git \
784    python3-socks \
785    pkg-config \
786    autoconf \
787    autoconf-archive \
788    libsystemd-dev \
789    systemd \
790    libssl-dev \
791    libevdev-dev \
792    libjpeg-dev \
793    libpng-dev \
794    ninja-build \
795    sudo \
796    curl \
797    git \
798    dbus \
799    iputils-ping \
800    clang-13 \
801    clang-format-13 \
802    clang-tidy-13 \
803    clang-tools-13 \
804    shellcheck \
805    npm \
806    iproute2 \
807    libnl-3-dev \
808    libnl-genl-3-dev \
809    libconfig++-dev \
810    libsnmp-dev \
811    valgrind \
812    valgrind-dbg \
813    libpam0g-dev \
814    xxd \
815    libi2c-dev \
816    wget \
817    libldap2-dev \
818    libprotobuf-dev \
819    liburing-dev \
820    liburing1-dbgsym \
821    libperlio-gzip-perl \
822    libjson-perl \
823    protobuf-compiler \
824    libgpiod-dev \
825    device-tree-compiler \
826    cppcheck \
827    libpciaccess-dev \
828    libmimetic-dev \
829    libxml2-utils \
830    libxml-simple-perl \
831    rsync \
832    libcryptsetup-dev
833
834RUN npm install -g eslint@latest eslint-plugin-json@latest
835
836RUN update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-11 11 \
837  --slave /usr/bin/g++ g++ /usr/bin/g++-11 \
838  --slave /usr/bin/gcov gcov /usr/bin/gcov-11 \
839  --slave /usr/bin/gcov-dump gcov-dump /usr/bin/gcov-dump-11 \
840  --slave /usr/bin/gcov-tool gcov-tool /usr/bin/gcov-tool-11
841
842RUN update-alternatives --install /usr/bin/clang clang /usr/bin/clang-13 1000 \
843  --slave /usr/bin/clang++ clang++ /usr/bin/clang++-13 \
844  --slave /usr/bin/clang-tidy clang-tidy /usr/bin/clang-tidy-13 \
845  --slave /usr/bin/clang-format clang-format /usr/bin/clang-format-13 \
846  --slave /usr/bin/run-clang-tidy run-clang-tidy.py /usr/bin/run-clang-tidy-13 \
847  --slave /usr/bin/scan-build scan-build /usr/bin/scan-build-13
848
849"""
850
851if is_automated_ci_build:
852    dockerfile_base += f"""
853# Run an arbitrary command to polute the docker cache regularly force us
854# to re-run `apt-get update` daily.
855RUN echo {Docker.timestamp()}
856RUN apt-get update && apt-get dist-upgrade -yy
857
858"""
859
860dockerfile_base += f"""
861RUN pip3 install inflection
862RUN pip3 install pycodestyle
863RUN pip3 install jsonschema
864RUN pip3 install meson==0.58.1
865RUN pip3 install protobuf
866RUN pip3 install codespell
867RUN pip3 install requests
868"""
869
870# Build the base and stage docker images.
871docker_base_img_name = Docker.tagname("base", dockerfile_base)
872Docker.build("base", docker_base_img_name, dockerfile_base)
873Package.generate_all()
874
875# Create the final Dockerfile.
876dockerfile = f"""
877# Build the final output image
878FROM {docker_base_img_name}
879{Package.df_all_copycmds()}
880
881# Some of our infrastructure still relies on the presence of this file
882# even though it is no longer needed to rebuild the docker environment
883# NOTE: The file is sorted to ensure the ordering is stable.
884RUN echo '{Package.depcache()}' > /tmp/depcache
885
886# Final configuration for the workspace
887RUN grep -q {gid} /etc/group || groupadd -g {gid} {username}
888RUN mkdir -p "{os.path.dirname(homedir)}"
889RUN grep -q {uid} /etc/passwd || useradd -d {homedir} -m -u {uid} -g {gid} {username}
890RUN sed -i '1iDefaults umask=000' /etc/sudoers
891RUN echo "{username} ALL=(ALL) NOPASSWD: ALL" >>/etc/sudoers
892
893# Ensure user has ability to write to /usr/local for different tool
894# and data installs
895RUN chown -R {username}:{username} /usr/local/share
896
897{proxy_cmd}
898
899RUN /bin/bash
900"""
901
902# Do the final docker build
903docker_final_img_name = Docker.tagname(None, dockerfile)
904Docker.build("final", docker_final_img_name, dockerfile)
905
906# Print the tag of the final image.
907print(docker_final_img_name)
908