1#!/usr/bin/env python3
2#
3# Build the required docker image to run package unit tests
4#
5# Script Variables:
6#   DOCKER_IMG_NAME:  <optional, the name of the docker image to generate>
7#                     default is openbmc/ubuntu-unit-test
8#   DISTRO:           <optional, the distro to build a docker image against>
9#   FORCE_DOCKER_BUILD: <optional, a non-zero value with force all Docker
10#                     images to be rebuilt rather than reusing caches.>
11#   BUILD_URL:        <optional, used to detect running under CI context
12#                     (ex. Jenkins)>
13#   BRANCH:           <optional, branch to build from each of the openbmc/
14#                     repositories>
15#                     default is master, which will be used if input branch not
16#                     provided or not found
17#   UBUNTU_MIRROR:    <optional, the URL of a mirror of Ubuntu to override the
18#                     default ones in /etc/apt/sources.list>
19#                     default is empty, and no mirror is used.
20#   http_proxy        The HTTP address of the proxy server to connect to.
21#                     Default: "", proxy is not setup if this is not set
22
23import os
24import sys
25import threading
26from datetime import date
27from hashlib import sha256
28
29# typing.Dict is used for type-hints.
30from typing import Any, Callable, Dict, Iterable, Optional  # noqa: F401
31
32from sh import docker, git, nproc, uname  # type: ignore
33
34try:
35    # Python before 3.8 doesn't have TypedDict, so reroute to standard 'dict'.
36    from typing import TypedDict
37except Exception:
38
39    class TypedDict(dict):  # type: ignore
40        # We need to do this to eat the 'total' argument.
41        def __init_subclass__(cls, **kwargs: Any) -> None:
42            super().__init_subclass__()
43
44
45# Declare some variables used in package definitions.
46prefix = "/usr/local"
47proc_count = nproc().strip()
48
49
50class PackageDef(TypedDict, total=False):
51    """Package Definition for packages dictionary."""
52
53    # rev [optional]: Revision of package to use.
54    rev: str
55    # url [optional]: lambda function to create URL: (package, rev) -> url.
56    url: Callable[[str, str], str]
57    # depends [optional]: List of package dependencies.
58    depends: Iterable[str]
59    # build_type [required]: Build type used for package.
60    #   Currently supported: autoconf, cmake, custom, make, meson
61    build_type: str
62    # build_steps [optional]: Steps to run for 'custom' build_type.
63    build_steps: Iterable[str]
64    # config_flags [optional]: List of options to pass configuration tool.
65    config_flags: Iterable[str]
66    # config_env [optional]: List of environment variables to set for config.
67    config_env: Iterable[str]
68    # custom_post_dl [optional]: List of steps to run after download, but
69    #   before config / build / install.
70    custom_post_dl: Iterable[str]
71    # custom_post_install [optional]: List of steps to run after install.
72    custom_post_install: Iterable[str]
73
74    # __tag [private]: Generated Docker tag name for package stage.
75    __tag: str
76    # __package [private]: Package object associated with this package.
77    __package: Any  # Type is Package, but not defined yet.
78
79
80# Packages to include in image.
81packages = {
82    "boost": PackageDef(
83        rev="1.80.0",
84        url=(
85            lambda pkg, rev: f"https://downloads.yoctoproject.org/mirror/sources/{pkg}_{rev.replace('.', '_')}.tar.bz2"  # noqa: E501
86        ),
87        build_type="custom",
88        build_steps=[
89            (
90                "./bootstrap.sh"
91                f" --prefix={prefix} --with-libraries=context,coroutine"
92            ),
93            "./b2",
94            f"./b2 install --prefix={prefix}",
95        ],
96    ),
97    "USCiLab/cereal": PackageDef(
98        rev="v1.3.2",
99        build_type="custom",
100        build_steps=[f"cp -a include/cereal/ {prefix}/include/"],
101    ),
102    "danmar/cppcheck": PackageDef(
103        rev="2.9",
104        build_type="cmake",
105    ),
106    "CLIUtils/CLI11": PackageDef(
107        rev="v1.9.1",
108        build_type="cmake",
109        config_flags=[
110            "-DBUILD_TESTING=OFF",
111            "-DCLI11_BUILD_DOCS=OFF",
112            "-DCLI11_BUILD_EXAMPLES=OFF",
113        ],
114    ),
115    "fmtlib/fmt": PackageDef(
116        rev="9.1.0",
117        build_type="cmake",
118        config_flags=[
119            "-DFMT_DOC=OFF",
120            "-DFMT_TEST=OFF",
121        ],
122    ),
123    "Naios/function2": PackageDef(
124        rev="4.2.1",
125        build_type="custom",
126        build_steps=[
127            f"mkdir {prefix}/include/function2",
128            f"cp include/function2/function2.hpp {prefix}/include/function2/",
129        ],
130    ),
131    # release-1.12.1
132    "google/googletest": PackageDef(
133        rev="58d77fa8070e8cec2dc1ed015d66b454c8d78850",
134        build_type="cmake",
135        config_env=["CXXFLAGS=-std=c++20"],
136        config_flags=["-DTHREADS_PREFER_PTHREAD_FLAG=ON"],
137    ),
138    "nlohmann/json": PackageDef(
139        rev="v3.11.2",
140        build_type="cmake",
141        config_flags=["-DJSON_BuildTests=OFF"],
142        custom_post_install=[
143            (
144                f"ln -s {prefix}/include/nlohmann/json.hpp"
145                f" {prefix}/include/json.hpp"
146            ),
147        ],
148    ),
149    # Snapshot from 2019-05-24
150    "linux-test-project/lcov": PackageDef(
151        rev="v1.15",
152        build_type="make",
153    ),
154    # dev-6.0 2022-11-28
155    "openbmc/linux": PackageDef(
156        rev="1b16243b004ce4d977a9f3b9d9e715cf5028f867",
157        build_type="custom",
158        build_steps=[
159            f"make -j{proc_count} defconfig",
160            f"make INSTALL_HDR_PATH={prefix} headers_install",
161        ],
162    ),
163    "LibVNC/libvncserver": PackageDef(
164        rev="LibVNCServer-0.9.13",
165        build_type="cmake",
166    ),
167    "leethomason/tinyxml2": PackageDef(
168        rev="9.0.0",
169        build_type="cmake",
170    ),
171    # version from /meta-openembedded/meta-oe/recipes-devtools/boost-url/boost-url_git.bb # noqa: E501
172    "CPPAlliance/url": PackageDef(
173        rev="d740a92d38e3a8f4d5b2153f53b82f1c98e312ab",
174        build_type="custom",
175        build_steps=[f"cp -a include/** {prefix}/include/"],
176    ),
177    "tristanpenman/valijson": PackageDef(
178        rev="v0.7",
179        build_type="cmake",
180        config_flags=[
181            "-Dvalijson_BUILD_TESTS=0",
182            "-Dvalijson_INSTALL_HEADERS=1",
183        ],
184    ),
185    "open-power/pdbg": PackageDef(build_type="autoconf"),
186    "openbmc/gpioplus": PackageDef(
187        depends=["openbmc/stdplus"],
188        build_type="meson",
189        config_flags=[
190            "-Dexamples=false",
191            "-Dtests=disabled",
192        ],
193    ),
194    "openbmc/phosphor-dbus-interfaces": PackageDef(
195        depends=["openbmc/sdbusplus"],
196        build_type="meson",
197        config_flags=["-Dgenerate_md=false"],
198    ),
199    "openbmc/phosphor-logging": PackageDef(
200        depends=[
201            "USCiLab/cereal",
202            "openbmc/phosphor-dbus-interfaces",
203            "openbmc/sdbusplus",
204            "openbmc/sdeventplus",
205        ],
206        build_type="meson",
207        config_flags=[
208            "-Dlibonly=true",
209            "-Dtests=disabled",
210            f"-Dyamldir={prefix}/share/phosphor-dbus-yaml/yaml",
211        ],
212    ),
213    "openbmc/phosphor-objmgr": PackageDef(
214        depends=[
215            "CLIUtils/CLI11",
216            "boost",
217            "leethomason/tinyxml2",
218            "openbmc/phosphor-dbus-interfaces",
219            "openbmc/phosphor-logging",
220            "openbmc/sdbusplus",
221        ],
222        build_type="meson",
223        config_flags=[
224            "-Dtests=disabled",
225        ],
226    ),
227    "openbmc/libpldm": PackageDef(
228        build_type="meson",
229        config_flags=[
230            "-Doem-ibm=enabled",
231            "-Dtests=disabled",
232        ],
233    ),
234    "openbmc/sdbusplus": PackageDef(
235        build_type="meson",
236        custom_post_dl=[
237            "cd tools",
238            f"./setup.py install --root=/ --prefix={prefix}",
239            "cd ..",
240        ],
241        config_flags=[
242            "-Dexamples=disabled",
243            "-Dtests=disabled",
244        ],
245    ),
246    "openbmc/sdeventplus": PackageDef(
247        depends=[
248            "Naios/function2",
249            "openbmc/stdplus",
250        ],
251        build_type="meson",
252        config_flags=[
253            "-Dexamples=false",
254            "-Dtests=disabled",
255        ],
256    ),
257    "openbmc/stdplus": PackageDef(
258        depends=[
259            "fmtlib/fmt",
260            "google/googletest",
261            "Naios/function2",
262        ],
263        build_type="meson",
264        config_flags=[
265            "-Dexamples=false",
266            "-Dtests=disabled",
267            "-Dgtest=enabled",
268        ],
269    ),
270}  # type: Dict[str, PackageDef]
271
272# Define common flags used for builds
273configure_flags = " ".join(
274    [
275        f"--prefix={prefix}",
276    ]
277)
278cmake_flags = " ".join(
279    [
280        "-DBUILD_SHARED_LIBS=ON",
281        "-DCMAKE_BUILD_TYPE=RelWithDebInfo",
282        f"-DCMAKE_INSTALL_PREFIX:PATH={prefix}",
283        "-GNinja",
284        "-DCMAKE_MAKE_PROGRAM=ninja",
285    ]
286)
287meson_flags = " ".join(
288    [
289        "--wrap-mode=nodownload",
290        f"-Dprefix={prefix}",
291    ]
292)
293
294
295class Package(threading.Thread):
296    """Class used to build the Docker stages for each package.
297
298    Generally, this class should not be instantiated directly but through
299    Package.generate_all().
300    """
301
302    # Copy the packages dictionary.
303    packages = packages.copy()
304
305    # Lock used for thread-safety.
306    lock = threading.Lock()
307
308    def __init__(self, pkg: str):
309        """pkg - The name of this package (ex. foo/bar )"""
310        super(Package, self).__init__()
311
312        self.package = pkg
313        self.exception = None  # type: Optional[Exception]
314
315        # Reference to this package's
316        self.pkg_def = Package.packages[pkg]
317        self.pkg_def["__package"] = self
318
319    def run(self) -> None:
320        """Thread 'run' function.  Builds the Docker stage."""
321
322        # In case this package has no rev, fetch it from Github.
323        self._update_rev()
324
325        # Find all the Package objects that this package depends on.
326        #   This section is locked because we are looking into another
327        #   package's PackageDef dict, which could be being modified.
328        Package.lock.acquire()
329        deps: Iterable[Package] = [
330            Package.packages[deppkg]["__package"]
331            for deppkg in self.pkg_def.get("depends", [])
332        ]
333        Package.lock.release()
334
335        # Wait until all the depends finish building.  We need them complete
336        # for the "COPY" commands.
337        for deppkg in deps:
338            deppkg.join()
339
340        # Generate this package's Dockerfile.
341        dockerfile = f"""
342FROM {docker_base_img_name}
343{self._df_copycmds()}
344{self._df_build()}
345"""
346
347        # Generate the resulting tag name and save it to the PackageDef.
348        #   This section is locked because we are modifying the PackageDef,
349        #   which can be accessed by other threads.
350        Package.lock.acquire()
351        tag = Docker.tagname(self._stagename(), dockerfile)
352        self.pkg_def["__tag"] = tag
353        Package.lock.release()
354
355        # Do the build / save any exceptions.
356        try:
357            Docker.build(self.package, tag, dockerfile)
358        except Exception as e:
359            self.exception = e
360
361    @classmethod
362    def generate_all(cls) -> None:
363        """Ensure a Docker stage is created for all defined packages.
364
365        These are done in parallel but with appropriate blocking per
366        package 'depends' specifications.
367        """
368
369        # Create a Package for each defined package.
370        pkg_threads = [Package(p) for p in cls.packages.keys()]
371
372        # Start building them all.
373        #   This section is locked because threads depend on each other,
374        #   based on the packages, and they cannot 'join' on a thread
375        #   which is not yet started.  Adding a lock here allows all the
376        #   threads to start before they 'join' their dependencies.
377        Package.lock.acquire()
378        for t in pkg_threads:
379            t.start()
380        Package.lock.release()
381
382        # Wait for completion.
383        for t in pkg_threads:
384            t.join()
385            # Check if the thread saved off its own exception.
386            if t.exception:
387                print(f"Package {t.package} failed!", file=sys.stderr)
388                raise t.exception
389
390    @staticmethod
391    def df_all_copycmds() -> str:
392        """Formulate the Dockerfile snippet necessary to copy all packages
393        into the final image.
394        """
395        return Package.df_copycmds_set(Package.packages.keys())
396
397    @classmethod
398    def depcache(cls) -> str:
399        """Create the contents of the '/tmp/depcache'.
400        This file is a comma-separated list of "<pkg>:<rev>".
401        """
402
403        # This needs to be sorted for consistency.
404        depcache = ""
405        for pkg in sorted(cls.packages.keys()):
406            depcache += "%s:%s," % (pkg, cls.packages[pkg]["rev"])
407        return depcache
408
409    def _update_rev(self) -> None:
410        """Look up the HEAD for missing a static rev."""
411
412        if "rev" in self.pkg_def:
413            return
414
415        # Check if Jenkins/Gerrit gave us a revision and use it.
416        if gerrit_project == self.package and gerrit_rev:
417            print(
418                f"Found Gerrit revision for {self.package}: {gerrit_rev}",
419                file=sys.stderr,
420            )
421            self.pkg_def["rev"] = gerrit_rev
422            return
423
424        # Ask Github for all the branches.
425        lookup = git(
426            "ls-remote", "--heads", f"https://github.com/{self.package}"
427        )
428
429        # Find the branch matching {branch} (or fallback to master).
430        #   This section is locked because we are modifying the PackageDef.
431        Package.lock.acquire()
432        for line in lookup.split("\n"):
433            if f"refs/heads/{branch}" in line:
434                self.pkg_def["rev"] = line.split()[0]
435            elif (
436                "refs/heads/master" in line or "refs/heads/main" in line
437            ) and "rev" not in self.pkg_def:
438                self.pkg_def["rev"] = line.split()[0]
439        Package.lock.release()
440
441    def _stagename(self) -> str:
442        """Create a name for the Docker stage associated with this pkg."""
443        return self.package.replace("/", "-").lower()
444
445    def _url(self) -> str:
446        """Get the URL for this package."""
447        rev = self.pkg_def["rev"]
448
449        # If the lambda exists, call it.
450        if "url" in self.pkg_def:
451            return self.pkg_def["url"](self.package, rev)
452
453        # Default to the github archive URL.
454        return f"https://github.com/{self.package}/archive/{rev}.tar.gz"
455
456    def _cmd_download(self) -> str:
457        """Formulate the command necessary to download and unpack to source."""
458
459        url = self._url()
460        if ".tar." not in url:
461            raise NotImplementedError(
462                f"Unhandled download type for {self.package}: {url}"
463            )
464
465        cmd = f"curl -L {url} | tar -x"
466
467        if url.endswith(".bz2"):
468            cmd += "j"
469        elif url.endswith(".gz"):
470            cmd += "z"
471        else:
472            raise NotImplementedError(
473                f"Unknown tar flags needed for {self.package}: {url}"
474            )
475
476        return cmd
477
478    def _cmd_cd_srcdir(self) -> str:
479        """Formulate the command necessary to 'cd' into the source dir."""
480        return f"cd {self.package.split('/')[-1]}*"
481
482    def _df_copycmds(self) -> str:
483        """Formulate the dockerfile snippet necessary to COPY all depends."""
484
485        if "depends" not in self.pkg_def:
486            return ""
487        return Package.df_copycmds_set(self.pkg_def["depends"])
488
489    @staticmethod
490    def df_copycmds_set(pkgs: Iterable[str]) -> str:
491        """Formulate the Dockerfile snippet necessary to COPY a set of
492        packages into a Docker stage.
493        """
494
495        copy_cmds = ""
496
497        # Sort the packages for consistency.
498        for p in sorted(pkgs):
499            tag = Package.packages[p]["__tag"]
500            copy_cmds += f"COPY --from={tag} {prefix} {prefix}\n"
501            # Workaround for upstream docker bug and multiple COPY cmds
502            # https://github.com/moby/moby/issues/37965
503            copy_cmds += "RUN true\n"
504
505        return copy_cmds
506
507    def _df_build(self) -> str:
508        """Formulate the Dockerfile snippet necessary to download, build, and
509        install a package into a Docker stage.
510        """
511
512        # Download and extract source.
513        result = f"RUN {self._cmd_download()} && {self._cmd_cd_srcdir()} && "
514
515        # Handle 'custom_post_dl' commands.
516        custom_post_dl = self.pkg_def.get("custom_post_dl")
517        if custom_post_dl:
518            result += " && ".join(custom_post_dl) + " && "
519
520        # Build and install package based on 'build_type'.
521        build_type = self.pkg_def["build_type"]
522        if build_type == "autoconf":
523            result += self._cmd_build_autoconf()
524        elif build_type == "cmake":
525            result += self._cmd_build_cmake()
526        elif build_type == "custom":
527            result += self._cmd_build_custom()
528        elif build_type == "make":
529            result += self._cmd_build_make()
530        elif build_type == "meson":
531            result += self._cmd_build_meson()
532        else:
533            raise NotImplementedError(
534                f"Unhandled build type for {self.package}: {build_type}"
535            )
536
537        # Handle 'custom_post_install' commands.
538        custom_post_install = self.pkg_def.get("custom_post_install")
539        if custom_post_install:
540            result += " && " + " && ".join(custom_post_install)
541
542        return result
543
544    def _cmd_build_autoconf(self) -> str:
545        options = " ".join(self.pkg_def.get("config_flags", []))
546        env = " ".join(self.pkg_def.get("config_env", []))
547        result = "./bootstrap.sh && "
548        result += f"{env} ./configure {configure_flags} {options} && "
549        result += f"make -j{proc_count} && make install"
550        return result
551
552    def _cmd_build_cmake(self) -> str:
553        options = " ".join(self.pkg_def.get("config_flags", []))
554        env = " ".join(self.pkg_def.get("config_env", []))
555        result = "mkdir builddir && cd builddir && "
556        result += f"{env} cmake {cmake_flags} {options} .. && "
557        result += "cmake --build . --target all && "
558        result += "cmake --build . --target install && "
559        result += "cd .."
560        return result
561
562    def _cmd_build_custom(self) -> str:
563        return " && ".join(self.pkg_def.get("build_steps", []))
564
565    def _cmd_build_make(self) -> str:
566        return f"make -j{proc_count} && make install"
567
568    def _cmd_build_meson(self) -> str:
569        options = " ".join(self.pkg_def.get("config_flags", []))
570        env = " ".join(self.pkg_def.get("config_env", []))
571        result = f"{env} meson builddir {meson_flags} {options} && "
572        result += "ninja -C builddir && ninja -C builddir install"
573        return result
574
575
576class Docker:
577    """Class to assist with Docker interactions.  All methods are static."""
578
579    @staticmethod
580    def timestamp() -> str:
581        """Generate a timestamp for today using the ISO week."""
582        today = date.today().isocalendar()
583        return f"{today[0]}-W{today[1]:02}"
584
585    @staticmethod
586    def tagname(pkgname: Optional[str], dockerfile: str) -> str:
587        """Generate a tag name for a package using a hash of the Dockerfile."""
588        result = docker_image_name
589        if pkgname:
590            result += "-" + pkgname
591
592        result += ":" + Docker.timestamp()
593        result += "-" + sha256(dockerfile.encode()).hexdigest()[0:16]
594
595        return result
596
597    @staticmethod
598    def build(pkg: str, tag: str, dockerfile: str) -> None:
599        """Build a docker image using the Dockerfile and tagging it with 'tag'.
600        """
601
602        # If we're not forcing builds, check if it already exists and skip.
603        if not force_build:
604            if docker.image.ls(tag, "--format", '"{{.Repository}}:{{.Tag}}"'):
605                print(
606                    f"Image {tag} already exists.  Skipping.", file=sys.stderr
607                )
608                return
609
610        # Build it.
611        #   Capture the output of the 'docker build' command and send it to
612        #   stderr (prefixed with the package name).  This allows us to see
613        #   progress but not polute stdout.  Later on we output the final
614        #   docker tag to stdout and we want to keep that pristine.
615        #
616        #   Other unusual flags:
617        #       --no-cache: Bypass the Docker cache if 'force_build'.
618        #       --force-rm: Clean up Docker processes if they fail.
619        docker.build(
620            proxy_args,
621            "--network=host",
622            "--force-rm",
623            "--no-cache=true" if force_build else "--no-cache=false",
624            "-t",
625            tag,
626            "-",
627            _in=dockerfile,
628            _out=(
629                lambda line: print(
630                    pkg + ":", line, end="", file=sys.stderr, flush=True
631                )
632            ),
633        )
634
635
636# Read a bunch of environment variables.
637docker_image_name = os.environ.get(
638    "DOCKER_IMAGE_NAME", "openbmc/ubuntu-unit-test"
639)
640force_build = os.environ.get("FORCE_DOCKER_BUILD")
641is_automated_ci_build = os.environ.get("BUILD_URL", False)
642distro = os.environ.get("DISTRO", "ubuntu:kinetic")
643branch = os.environ.get("BRANCH", "master")
644ubuntu_mirror = os.environ.get("UBUNTU_MIRROR")
645http_proxy = os.environ.get("http_proxy")
646
647gerrit_project = os.environ.get("GERRIT_PROJECT")
648gerrit_rev = os.environ.get("GERRIT_PATCHSET_REVISION")
649
650# Set up some common variables.
651username = os.environ.get("USER", "root")
652homedir = os.environ.get("HOME", "/root")
653gid = os.getgid()
654uid = os.getuid()
655
656# Use well-known constants if user is root
657if username == "root":
658    homedir = "/root"
659    gid = 0
660    uid = 0
661
662# Determine the architecture for Docker.
663arch = uname("-m").strip()
664if arch == "ppc64le":
665    docker_base = "ppc64le/"
666elif arch == "x86_64":
667    docker_base = ""
668elif arch == "aarch64":
669    docker_base = "arm64v8/"
670else:
671    print(
672        f"Unsupported system architecture({arch}) found for docker image",
673        file=sys.stderr,
674    )
675    sys.exit(1)
676
677# Special flags if setting up a deb mirror.
678mirror = ""
679if "ubuntu" in distro and ubuntu_mirror:
680    mirror = f"""
681RUN echo "deb {ubuntu_mirror} \
682        $(. /etc/os-release && echo $VERSION_CODENAME) \
683        main restricted universe multiverse" > /etc/apt/sources.list && \\
684    echo "deb {ubuntu_mirror} \
685        $(. /etc/os-release && echo $VERSION_CODENAME)-updates \
686            main restricted universe multiverse" >> /etc/apt/sources.list && \\
687    echo "deb {ubuntu_mirror} \
688        $(. /etc/os-release && echo $VERSION_CODENAME)-security \
689            main restricted universe multiverse" >> /etc/apt/sources.list && \\
690    echo "deb {ubuntu_mirror} \
691        $(. /etc/os-release && echo $VERSION_CODENAME)-proposed \
692            main restricted universe multiverse" >> /etc/apt/sources.list && \\
693    echo "deb {ubuntu_mirror} \
694        $(. /etc/os-release && echo $VERSION_CODENAME)-backports \
695            main restricted universe multiverse" >> /etc/apt/sources.list
696"""
697
698# Special flags for proxying.
699proxy_cmd = ""
700proxy_keyserver = ""
701proxy_args = []
702if http_proxy:
703    proxy_cmd = f"""
704RUN echo "[http]" >> {homedir}/.gitconfig && \
705    echo "proxy = {http_proxy}" >> {homedir}/.gitconfig
706"""
707    proxy_keyserver = f"--keyserver-options http-proxy={http_proxy}"
708
709    proxy_args.extend(
710        [
711            "--build-arg",
712            f"http_proxy={http_proxy}",
713            "--build-arg",
714            f"https_proxy={http_proxy}",
715        ]
716    )
717
718# Create base Dockerfile.
719dockerfile_base = f"""
720FROM {docker_base}{distro}
721
722{mirror}
723
724ENV DEBIAN_FRONTEND noninteractive
725
726ENV PYTHONPATH "/usr/local/lib/python3.10/site-packages/"
727
728# Sometimes the ubuntu key expires and we need a way to force an execution
729# of the apt-get commands for the dbgsym-keyring.  When this happens we see
730# an error like: "Release: The following signatures were invalid:"
731# Insert a bogus echo that we can change here when we get this error to force
732# the update.
733RUN echo "ubuntu keyserver rev as of 2021-04-21"
734
735# We need the keys to be imported for dbgsym repos
736# New releases have a package, older ones fall back to manual fetching
737# https://wiki.ubuntu.com/Debug%20Symbol%20Packages
738RUN apt-get update && apt-get dist-upgrade -yy && \
739    ( apt-get install gpgv ubuntu-dbgsym-keyring || \
740        ( apt-get install -yy dirmngr && \
741          apt-key adv --keyserver keyserver.ubuntu.com \
742                      {proxy_keyserver} \
743                      --recv-keys F2EDC64DC5AEE1F6B9C621F0C8CAB6595FDFF622 ) )
744
745# Parse the current repo list into a debug repo list
746RUN sed -n '/^deb /s,^deb [^ ]* ,deb http://ddebs.ubuntu.com ,p' \
747        /etc/apt/sources.list >/etc/apt/sources.list.d/debug.list
748
749# Remove non-existent debug repos
750RUN sed -i '/-\\(backports\\|security\\) /d' /etc/apt/sources.list.d/debug.list
751
752RUN cat /etc/apt/sources.list.d/debug.list
753
754RUN apt-get update && apt-get dist-upgrade -yy && apt-get install -yy \
755    gcc-12 \
756    g++-12 \
757    libc6-dbg \
758    libc6-dev \
759    libtool \
760    bison \
761    libdbus-1-dev \
762    flex \
763    cmake \
764    python3 \
765    python3-dev\
766    python3-yaml \
767    python3-mako \
768    python3-pip \
769    python3-setuptools \
770    python3-git \
771    python3-socks \
772    pkg-config \
773    autoconf \
774    autoconf-archive \
775    libsystemd-dev \
776    systemd \
777    libssl-dev \
778    libevdev-dev \
779    libjpeg-dev \
780    libpng-dev \
781    ninja-build \
782    sudo \
783    curl \
784    git \
785    dbus \
786    iputils-ping \
787    clang-15 \
788    clang-format-15 \
789    clang-tidy-15 \
790    clang-tools-15 \
791    shellcheck \
792    npm \
793    iproute2 \
794    libnl-3-dev \
795    libnl-genl-3-dev \
796    libconfig++-dev \
797    libsnmp-dev \
798    valgrind \
799    valgrind-dbg \
800    libpam0g-dev \
801    xxd \
802    libi2c-dev \
803    wget \
804    libldap2-dev \
805    libprotobuf-dev \
806    liburing-dev \
807    liburing2-dbgsym \
808    libperlio-gzip-perl \
809    libjson-perl \
810    protobuf-compiler \
811    libgpiod-dev \
812    device-tree-compiler \
813    libpciaccess-dev \
814    libmimetic-dev \
815    libxml2-utils \
816    libxml-simple-perl \
817    rsync \
818    libcryptsetup-dev
819
820# Kinetic comes with GCC-12, so skip this.
821#RUN update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-12 12 \
822#  --slave /usr/bin/g++ g++ /usr/bin/g++-12 \
823#  --slave /usr/bin/gcov gcov /usr/bin/gcov-12 \
824#  --slave /usr/bin/gcov-dump gcov-dump /usr/bin/gcov-dump-12 \
825#  --slave /usr/bin/gcov-tool gcov-tool /usr/bin/gcov-tool-12
826#RUN update-alternatives --install /usr/bin/cpp cpp /usr/bin/cpp-12 12
827
828RUN update-alternatives --install /usr/bin/clang clang /usr/bin/clang-15 1000 \
829  --slave /usr/bin/clang++ clang++ /usr/bin/clang++-15 \
830  --slave /usr/bin/clang-tidy clang-tidy /usr/bin/clang-tidy-15 \
831  --slave /usr/bin/clang-format clang-format /usr/bin/clang-format-15 \
832  --slave /usr/bin/run-clang-tidy run-clang-tidy.py \
833        /usr/bin/run-clang-tidy-15 \
834  --slave /usr/bin/scan-build scan-build /usr/bin/scan-build-15
835
836"""
837
838if is_automated_ci_build:
839    dockerfile_base += f"""
840# Run an arbitrary command to polute the docker cache regularly force us
841# to re-run `apt-get update` daily.
842RUN echo {Docker.timestamp()}
843RUN apt-get update && apt-get dist-upgrade -yy
844
845"""
846
847dockerfile_base += """
848RUN pip3 install beautysh
849RUN pip3 install black
850RUN pip3 install codespell
851RUN pip3 install flake8
852RUN pip3 install gitlint
853RUN pip3 install inflection
854RUN pip3 install isort
855RUN pip3 install jsonschema
856RUN pip3 install meson==0.63.0
857RUN pip3 install protobuf
858RUN pip3 install requests
859
860RUN npm install -g \
861        eslint@latest eslint-plugin-json@latest \
862        markdownlint-cli@latest \
863        prettier@latest
864"""
865
866# Build the base and stage docker images.
867docker_base_img_name = Docker.tagname("base", dockerfile_base)
868Docker.build("base", docker_base_img_name, dockerfile_base)
869Package.generate_all()
870
871# Create the final Dockerfile.
872dockerfile = f"""
873# Build the final output image
874FROM {docker_base_img_name}
875{Package.df_all_copycmds()}
876
877# Some of our infrastructure still relies on the presence of this file
878# even though it is no longer needed to rebuild the docker environment
879# NOTE: The file is sorted to ensure the ordering is stable.
880RUN echo '{Package.depcache()}' > /tmp/depcache
881
882# Final configuration for the workspace
883RUN grep -q {gid} /etc/group || groupadd -f -g {gid} {username}
884RUN mkdir -p "{os.path.dirname(homedir)}"
885RUN grep -q {uid} /etc/passwd || \
886        useradd -d {homedir} -m -u {uid} -g {gid} {username}
887RUN sed -i '1iDefaults umask=000' /etc/sudoers
888RUN echo "{username} ALL=(ALL) NOPASSWD: ALL" >>/etc/sudoers
889
890# Ensure user has ability to write to /usr/local for different tool
891# and data installs
892RUN chown -R {username}:{username} /usr/local/share
893
894{proxy_cmd}
895
896RUN /bin/bash
897"""
898
899# Do the final docker build
900docker_final_img_name = Docker.tagname(None, dockerfile)
901Docker.build("final", docker_final_img_name, dockerfile)
902
903# Print the tag of the final image.
904print(docker_final_img_name)
905