1#!/usr/bin/env python3
2#
3# Build the required docker image to run package unit tests
4#
5# Script Variables:
6#   DOCKER_IMG_NAME:  <optional, the name of the docker image to generate>
7#                     default is openbmc/ubuntu-unit-test
8#   DISTRO:           <optional, the distro to build a docker image against>
9#   FORCE_DOCKER_BUILD: <optional, a non-zero value with force all Docker
10#                     images to be rebuilt rather than reusing caches.>
11#   BUILD_URL:        <optional, used to detect running under CI context
12#                     (ex. Jenkins)>
13#   BRANCH:           <optional, branch to build from each of the openbmc/
14#                     repositories>
15#                     default is master, which will be used if input branch not
16#                     provided or not found
17#   UBUNTU_MIRROR:    <optional, the URL of a mirror of Ubuntu to override the
18#                     default ones in /etc/apt/sources.list>
19#                     default is empty, and no mirror is used.
20#   http_proxy        The HTTP address of the proxy server to connect to.
21#                     Default: "", proxy is not setup if this is not set
22
23import os
24import sys
25import threading
26from datetime import date
27from hashlib import sha256
28
29# typing.Dict is used for type-hints.
30from typing import Any, Callable, Dict, Iterable, Optional  # noqa: F401
31
32from sh import docker, git, nproc, uname  # type: ignore
33
34try:
35    # Python before 3.8 doesn't have TypedDict, so reroute to standard 'dict'.
36    from typing import TypedDict
37except Exception:
38
39    class TypedDict(dict):  # type: ignore
40        # We need to do this to eat the 'total' argument.
41        def __init_subclass__(cls, **kwargs: Any) -> None:
42            super().__init_subclass__()
43
44
45# Declare some variables used in package definitions.
46prefix = "/usr/local"
47proc_count = nproc().strip()
48
49
50class PackageDef(TypedDict, total=False):
51    """Package Definition for packages dictionary."""
52
53    # rev [optional]: Revision of package to use.
54    rev: str
55    # url [optional]: lambda function to create URL: (package, rev) -> url.
56    url: Callable[[str, str], str]
57    # depends [optional]: List of package dependencies.
58    depends: Iterable[str]
59    # build_type [required]: Build type used for package.
60    #   Currently supported: autoconf, cmake, custom, make, meson
61    build_type: str
62    # build_steps [optional]: Steps to run for 'custom' build_type.
63    build_steps: Iterable[str]
64    # config_flags [optional]: List of options to pass configuration tool.
65    config_flags: Iterable[str]
66    # config_env [optional]: List of environment variables to set for config.
67    config_env: Iterable[str]
68    # custom_post_dl [optional]: List of steps to run after download, but
69    #   before config / build / install.
70    custom_post_dl: Iterable[str]
71    # custom_post_install [optional]: List of steps to run after install.
72    custom_post_install: Iterable[str]
73
74    # __tag [private]: Generated Docker tag name for package stage.
75    __tag: str
76    # __package [private]: Package object associated with this package.
77    __package: Any  # Type is Package, but not defined yet.
78
79
80# Packages to include in image.
81packages = {
82    "boost": PackageDef(
83        rev="1.83.0",
84        url=(
85            lambda pkg, rev: f"https://boostorg.jfrog.io/artifactory/main/release/{rev}/source/{pkg}_{rev.replace('.', '_')}.tar.gz"  # noqa: E501
86        ),
87        build_type="custom",
88        build_steps=[
89            (
90                "curl --remote-name"
91                " https://github.com/williamspatrick/beast/commit/98f8b1fbd059a35754c2c7b2841769cf8d021272.patch"
92                " && patch -p2 <"
93                " 98f8b1fbd059a35754c2c7b2841769cf8d021272.patch &&"
94                " ./bootstrap.sh"
95                f" --prefix={prefix} --with-libraries=context,coroutine,url"
96            ),
97            "./b2",
98            f"./b2 install --prefix={prefix}",
99        ],
100    ),
101    "USCiLab/cereal": PackageDef(
102        rev="v1.3.2",
103        build_type="custom",
104        build_steps=[f"cp -a include/cereal/ {prefix}/include/"],
105    ),
106    "danmar/cppcheck": PackageDef(
107        rev="2.12.1",
108        build_type="cmake",
109    ),
110    "CLIUtils/CLI11": PackageDef(
111        rev="v2.3.2",
112        build_type="cmake",
113        config_flags=[
114            "-DBUILD_TESTING=OFF",
115            "-DCLI11_BUILD_DOCS=OFF",
116            "-DCLI11_BUILD_EXAMPLES=OFF",
117        ],
118    ),
119    "fmtlib/fmt": PackageDef(
120        rev="10.1.1",
121        build_type="cmake",
122        config_flags=[
123            "-DFMT_DOC=OFF",
124            "-DFMT_TEST=OFF",
125        ],
126    ),
127    "Naios/function2": PackageDef(
128        rev="4.2.4",
129        build_type="custom",
130        build_steps=[
131            f"mkdir {prefix}/include/function2",
132            f"cp include/function2/function2.hpp {prefix}/include/function2/",
133        ],
134    ),
135    "google/googletest": PackageDef(
136        rev="v1.14.0",
137        build_type="cmake",
138        config_env=["CXXFLAGS=-std=c++20"],
139        config_flags=["-DTHREADS_PREFER_PTHREAD_FLAG=ON"],
140    ),
141    "nghttp2/nghttp2": PackageDef(
142        rev="v1.57.0",
143        build_type="cmake",
144        config_env=["CXXFLAGS=-std=c++20"],
145        config_flags=[
146            "-DENABLE_LIB_ONLY=ON",
147            "-DENABLE_STATIC_LIB=ON",
148        ],
149    ),
150    "nlohmann/json": PackageDef(
151        rev="v3.11.2",
152        build_type="cmake",
153        config_flags=["-DJSON_BuildTests=OFF"],
154        custom_post_install=[
155            (
156                f"ln -s {prefix}/include/nlohmann/json.hpp"
157                f" {prefix}/include/json.hpp"
158            ),
159        ],
160    ),
161    "json-c/json-c": PackageDef(
162        rev="json-c-0.17-20230812",
163        build_type="cmake",
164    ),
165    "linux-test-project/lcov": PackageDef(
166        rev="v1.16",
167        build_type="make",
168    ),
169    "LibVNC/libvncserver": PackageDef(
170        rev="LibVNCServer-0.9.13",
171        build_type="cmake",
172    ),
173    "leethomason/tinyxml2": PackageDef(
174        rev="9.0.0",
175        build_type="cmake",
176    ),
177    "tristanpenman/valijson": PackageDef(
178        rev="v0.7",
179        build_type="cmake",
180        config_flags=[
181            "-Dvalijson_BUILD_TESTS=0",
182            "-Dvalijson_INSTALL_HEADERS=1",
183        ],
184    ),
185    "open-power/pdbg": PackageDef(build_type="autoconf"),
186    "openbmc/gpioplus": PackageDef(
187        depends=["openbmc/stdplus"],
188        build_type="meson",
189        config_flags=[
190            "-Dexamples=false",
191            "-Dtests=disabled",
192        ],
193    ),
194    "openbmc/phosphor-dbus-interfaces": PackageDef(
195        depends=["openbmc/sdbusplus"],
196        build_type="meson",
197        config_flags=["-Dgenerate_md=false"],
198    ),
199    "openbmc/phosphor-logging": PackageDef(
200        depends=[
201            "USCiLab/cereal",
202            "openbmc/phosphor-dbus-interfaces",
203            "openbmc/sdbusplus",
204            "openbmc/sdeventplus",
205        ],
206        build_type="meson",
207        config_flags=[
208            "-Dlibonly=true",
209            "-Dtests=disabled",
210            f"-Dyamldir={prefix}/share/phosphor-dbus-yaml/yaml",
211        ],
212    ),
213    "openbmc/phosphor-objmgr": PackageDef(
214        depends=[
215            "CLIUtils/CLI11",
216            "boost",
217            "leethomason/tinyxml2",
218            "openbmc/phosphor-dbus-interfaces",
219            "openbmc/phosphor-logging",
220            "openbmc/sdbusplus",
221        ],
222        build_type="meson",
223        config_flags=[
224            "-Dtests=disabled",
225        ],
226    ),
227    "openbmc/libpeci": PackageDef(
228        build_type="meson",
229        config_flags=[
230            "-Draw-peci=disabled",
231        ],
232    ),
233    "openbmc/libpldm": PackageDef(
234        build_type="meson",
235        config_flags=[
236            "-Dabi=deprecated,stable",
237            "-Doem-ibm=enabled",
238            "-Dtests=disabled",
239        ],
240    ),
241    "openbmc/sdbusplus": PackageDef(
242        build_type="meson",
243        custom_post_dl=[
244            "cd tools",
245            f"./setup.py install --root=/ --prefix={prefix}",
246            "cd ..",
247        ],
248        config_flags=[
249            "-Dexamples=disabled",
250            "-Dtests=disabled",
251        ],
252    ),
253    "openbmc/sdeventplus": PackageDef(
254        depends=[
255            "Naios/function2",
256            "openbmc/stdplus",
257        ],
258        build_type="meson",
259        config_flags=[
260            "-Dexamples=false",
261            "-Dtests=disabled",
262        ],
263    ),
264    "openbmc/stdplus": PackageDef(
265        depends=[
266            "fmtlib/fmt",
267            "google/googletest",
268            "Naios/function2",
269        ],
270        build_type="meson",
271        config_flags=[
272            "-Dexamples=false",
273            "-Dtests=disabled",
274            "-Dgtest=enabled",
275        ],
276    ),
277}  # type: Dict[str, PackageDef]
278
279# Define common flags used for builds
280configure_flags = " ".join(
281    [
282        f"--prefix={prefix}",
283    ]
284)
285cmake_flags = " ".join(
286    [
287        "-DBUILD_SHARED_LIBS=ON",
288        "-DCMAKE_BUILD_TYPE=RelWithDebInfo",
289        f"-DCMAKE_INSTALL_PREFIX:PATH={prefix}",
290        "-GNinja",
291        "-DCMAKE_MAKE_PROGRAM=ninja",
292    ]
293)
294meson_flags = " ".join(
295    [
296        "--wrap-mode=nodownload",
297        f"-Dprefix={prefix}",
298    ]
299)
300
301
302class Package(threading.Thread):
303    """Class used to build the Docker stages for each package.
304
305    Generally, this class should not be instantiated directly but through
306    Package.generate_all().
307    """
308
309    # Copy the packages dictionary.
310    packages = packages.copy()
311
312    # Lock used for thread-safety.
313    lock = threading.Lock()
314
315    def __init__(self, pkg: str):
316        """pkg - The name of this package (ex. foo/bar )"""
317        super(Package, self).__init__()
318
319        self.package = pkg
320        self.exception = None  # type: Optional[Exception]
321
322        # Reference to this package's
323        self.pkg_def = Package.packages[pkg]
324        self.pkg_def["__package"] = self
325
326    def run(self) -> None:
327        """Thread 'run' function.  Builds the Docker stage."""
328
329        # In case this package has no rev, fetch it from Github.
330        self._update_rev()
331
332        # Find all the Package objects that this package depends on.
333        #   This section is locked because we are looking into another
334        #   package's PackageDef dict, which could be being modified.
335        Package.lock.acquire()
336        deps: Iterable[Package] = [
337            Package.packages[deppkg]["__package"]
338            for deppkg in self.pkg_def.get("depends", [])
339        ]
340        Package.lock.release()
341
342        # Wait until all the depends finish building.  We need them complete
343        # for the "COPY" commands.
344        for deppkg in deps:
345            deppkg.join()
346
347        # Generate this package's Dockerfile.
348        dockerfile = f"""
349FROM {docker_base_img_name}
350{self._df_copycmds()}
351{self._df_build()}
352"""
353
354        # Generate the resulting tag name and save it to the PackageDef.
355        #   This section is locked because we are modifying the PackageDef,
356        #   which can be accessed by other threads.
357        Package.lock.acquire()
358        tag = Docker.tagname(self._stagename(), dockerfile)
359        self.pkg_def["__tag"] = tag
360        Package.lock.release()
361
362        # Do the build / save any exceptions.
363        try:
364            Docker.build(self.package, tag, dockerfile)
365        except Exception as e:
366            self.exception = e
367
368    @classmethod
369    def generate_all(cls) -> None:
370        """Ensure a Docker stage is created for all defined packages.
371
372        These are done in parallel but with appropriate blocking per
373        package 'depends' specifications.
374        """
375
376        # Create a Package for each defined package.
377        pkg_threads = [Package(p) for p in cls.packages.keys()]
378
379        # Start building them all.
380        #   This section is locked because threads depend on each other,
381        #   based on the packages, and they cannot 'join' on a thread
382        #   which is not yet started.  Adding a lock here allows all the
383        #   threads to start before they 'join' their dependencies.
384        Package.lock.acquire()
385        for t in pkg_threads:
386            t.start()
387        Package.lock.release()
388
389        # Wait for completion.
390        for t in pkg_threads:
391            t.join()
392            # Check if the thread saved off its own exception.
393            if t.exception:
394                print(f"Package {t.package} failed!", file=sys.stderr)
395                raise t.exception
396
397    @staticmethod
398    def df_all_copycmds() -> str:
399        """Formulate the Dockerfile snippet necessary to copy all packages
400        into the final image.
401        """
402        return Package.df_copycmds_set(Package.packages.keys())
403
404    @classmethod
405    def depcache(cls) -> str:
406        """Create the contents of the '/tmp/depcache'.
407        This file is a comma-separated list of "<pkg>:<rev>".
408        """
409
410        # This needs to be sorted for consistency.
411        depcache = ""
412        for pkg in sorted(cls.packages.keys()):
413            depcache += "%s:%s," % (pkg, cls.packages[pkg]["rev"])
414        return depcache
415
416    def _update_rev(self) -> None:
417        """Look up the HEAD for missing a static rev."""
418
419        if "rev" in self.pkg_def:
420            return
421
422        # Check if Jenkins/Gerrit gave us a revision and use it.
423        if gerrit_project == self.package and gerrit_rev:
424            print(
425                f"Found Gerrit revision for {self.package}: {gerrit_rev}",
426                file=sys.stderr,
427            )
428            self.pkg_def["rev"] = gerrit_rev
429            return
430
431        # Ask Github for all the branches.
432        lookup = git(
433            "ls-remote", "--heads", f"https://github.com/{self.package}"
434        )
435
436        # Find the branch matching {branch} (or fallback to master).
437        #   This section is locked because we are modifying the PackageDef.
438        Package.lock.acquire()
439        for line in lookup.split("\n"):
440            if f"refs/heads/{branch}" in line:
441                self.pkg_def["rev"] = line.split()[0]
442            elif (
443                "refs/heads/master" in line or "refs/heads/main" in line
444            ) and "rev" not in self.pkg_def:
445                self.pkg_def["rev"] = line.split()[0]
446        Package.lock.release()
447
448    def _stagename(self) -> str:
449        """Create a name for the Docker stage associated with this pkg."""
450        return self.package.replace("/", "-").lower()
451
452    def _url(self) -> str:
453        """Get the URL for this package."""
454        rev = self.pkg_def["rev"]
455
456        # If the lambda exists, call it.
457        if "url" in self.pkg_def:
458            return self.pkg_def["url"](self.package, rev)
459
460        # Default to the github archive URL.
461        return f"https://github.com/{self.package}/archive/{rev}.tar.gz"
462
463    def _cmd_download(self) -> str:
464        """Formulate the command necessary to download and unpack to source."""
465
466        url = self._url()
467        if ".tar." not in url:
468            raise NotImplementedError(
469                f"Unhandled download type for {self.package}: {url}"
470            )
471
472        cmd = f"curl -L {url} | tar -x"
473
474        if url.endswith(".bz2"):
475            cmd += "j"
476        elif url.endswith(".gz"):
477            cmd += "z"
478        else:
479            raise NotImplementedError(
480                f"Unknown tar flags needed for {self.package}: {url}"
481            )
482
483        return cmd
484
485    def _cmd_cd_srcdir(self) -> str:
486        """Formulate the command necessary to 'cd' into the source dir."""
487        return f"cd {self.package.split('/')[-1]}*"
488
489    def _df_copycmds(self) -> str:
490        """Formulate the dockerfile snippet necessary to COPY all depends."""
491
492        if "depends" not in self.pkg_def:
493            return ""
494        return Package.df_copycmds_set(self.pkg_def["depends"])
495
496    @staticmethod
497    def df_copycmds_set(pkgs: Iterable[str]) -> str:
498        """Formulate the Dockerfile snippet necessary to COPY a set of
499        packages into a Docker stage.
500        """
501
502        copy_cmds = ""
503
504        # Sort the packages for consistency.
505        for p in sorted(pkgs):
506            tag = Package.packages[p]["__tag"]
507            copy_cmds += f"COPY --from={tag} {prefix} {prefix}\n"
508            # Workaround for upstream docker bug and multiple COPY cmds
509            # https://github.com/moby/moby/issues/37965
510            copy_cmds += "RUN true\n"
511
512        return copy_cmds
513
514    def _df_build(self) -> str:
515        """Formulate the Dockerfile snippet necessary to download, build, and
516        install a package into a Docker stage.
517        """
518
519        # Download and extract source.
520        result = f"RUN {self._cmd_download()} && {self._cmd_cd_srcdir()} && "
521
522        # Handle 'custom_post_dl' commands.
523        custom_post_dl = self.pkg_def.get("custom_post_dl")
524        if custom_post_dl:
525            result += " && ".join(custom_post_dl) + " && "
526
527        # Build and install package based on 'build_type'.
528        build_type = self.pkg_def["build_type"]
529        if build_type == "autoconf":
530            result += self._cmd_build_autoconf()
531        elif build_type == "cmake":
532            result += self._cmd_build_cmake()
533        elif build_type == "custom":
534            result += self._cmd_build_custom()
535        elif build_type == "make":
536            result += self._cmd_build_make()
537        elif build_type == "meson":
538            result += self._cmd_build_meson()
539        else:
540            raise NotImplementedError(
541                f"Unhandled build type for {self.package}: {build_type}"
542            )
543
544        # Handle 'custom_post_install' commands.
545        custom_post_install = self.pkg_def.get("custom_post_install")
546        if custom_post_install:
547            result += " && " + " && ".join(custom_post_install)
548
549        return result
550
551    def _cmd_build_autoconf(self) -> str:
552        options = " ".join(self.pkg_def.get("config_flags", []))
553        env = " ".join(self.pkg_def.get("config_env", []))
554        result = "./bootstrap.sh && "
555        result += f"{env} ./configure {configure_flags} {options} && "
556        result += f"make -j{proc_count} && make install"
557        return result
558
559    def _cmd_build_cmake(self) -> str:
560        options = " ".join(self.pkg_def.get("config_flags", []))
561        env = " ".join(self.pkg_def.get("config_env", []))
562        result = "mkdir builddir && cd builddir && "
563        result += f"{env} cmake {cmake_flags} {options} .. && "
564        result += "cmake --build . --target all && "
565        result += "cmake --build . --target install && "
566        result += "cd .."
567        return result
568
569    def _cmd_build_custom(self) -> str:
570        return " && ".join(self.pkg_def.get("build_steps", []))
571
572    def _cmd_build_make(self) -> str:
573        return f"make -j{proc_count} && make install"
574
575    def _cmd_build_meson(self) -> str:
576        options = " ".join(self.pkg_def.get("config_flags", []))
577        env = " ".join(self.pkg_def.get("config_env", []))
578        result = f"{env} meson setup builddir {meson_flags} {options} && "
579        result += "ninja -C builddir && ninja -C builddir install"
580        return result
581
582
583class Docker:
584    """Class to assist with Docker interactions.  All methods are static."""
585
586    @staticmethod
587    def timestamp() -> str:
588        """Generate a timestamp for today using the ISO week."""
589        today = date.today().isocalendar()
590        return f"{today[0]}-W{today[1]:02}"
591
592    @staticmethod
593    def tagname(pkgname: Optional[str], dockerfile: str) -> str:
594        """Generate a tag name for a package using a hash of the Dockerfile."""
595        result = docker_image_name
596        if pkgname:
597            result += "-" + pkgname
598
599        result += ":" + Docker.timestamp()
600        result += "-" + sha256(dockerfile.encode()).hexdigest()[0:16]
601
602        return result
603
604    @staticmethod
605    def build(pkg: str, tag: str, dockerfile: str) -> None:
606        """Build a docker image using the Dockerfile and tagging it with 'tag'."""
607
608        # If we're not forcing builds, check if it already exists and skip.
609        if not force_build:
610            if docker.image.ls(tag, "--format", '"{{.Repository}}:{{.Tag}}"'):
611                print(
612                    f"Image {tag} already exists.  Skipping.", file=sys.stderr
613                )
614                return
615
616        # Build it.
617        #   Capture the output of the 'docker build' command and send it to
618        #   stderr (prefixed with the package name).  This allows us to see
619        #   progress but not polute stdout.  Later on we output the final
620        #   docker tag to stdout and we want to keep that pristine.
621        #
622        #   Other unusual flags:
623        #       --no-cache: Bypass the Docker cache if 'force_build'.
624        #       --force-rm: Clean up Docker processes if they fail.
625        docker.build(
626            proxy_args,
627            "--network=host",
628            "--force-rm",
629            "--no-cache=true" if force_build else "--no-cache=false",
630            "-t",
631            tag,
632            "-",
633            _in=dockerfile,
634            _out=(
635                lambda line: print(
636                    pkg + ":", line, end="", file=sys.stderr, flush=True
637                )
638            ),
639        )
640
641
642# Read a bunch of environment variables.
643docker_image_name = os.environ.get(
644    "DOCKER_IMAGE_NAME", "openbmc/ubuntu-unit-test"
645)
646force_build = os.environ.get("FORCE_DOCKER_BUILD")
647is_automated_ci_build = os.environ.get("BUILD_URL", False)
648distro = os.environ.get("DISTRO", "ubuntu:mantic")
649branch = os.environ.get("BRANCH", "master")
650ubuntu_mirror = os.environ.get("UBUNTU_MIRROR")
651http_proxy = os.environ.get("http_proxy")
652
653gerrit_project = os.environ.get("GERRIT_PROJECT")
654gerrit_rev = os.environ.get("GERRIT_PATCHSET_REVISION")
655
656# Ensure appropriate docker build output to see progress and identify
657# any issues
658os.environ["BUILDKIT_PROGRESS"] = "plain"
659
660# Set up some common variables.
661username = os.environ.get("USER", "root")
662homedir = os.environ.get("HOME", "/root")
663gid = os.getgid()
664uid = os.getuid()
665
666# Use well-known constants if user is root
667if username == "root":
668    homedir = "/root"
669    gid = 0
670    uid = 0
671
672# Determine the architecture for Docker.
673arch = uname("-m").strip()
674if arch == "ppc64le":
675    docker_base = "ppc64le/"
676elif arch == "x86_64":
677    docker_base = ""
678elif arch == "aarch64":
679    docker_base = "arm64v8/"
680else:
681    print(
682        f"Unsupported system architecture({arch}) found for docker image",
683        file=sys.stderr,
684    )
685    sys.exit(1)
686
687# Special flags if setting up a deb mirror.
688mirror = ""
689if "ubuntu" in distro and ubuntu_mirror:
690    mirror = f"""
691RUN echo "deb {ubuntu_mirror} \
692        $(. /etc/os-release && echo $VERSION_CODENAME) \
693        main restricted universe multiverse" > /etc/apt/sources.list && \\
694    echo "deb {ubuntu_mirror} \
695        $(. /etc/os-release && echo $VERSION_CODENAME)-updates \
696            main restricted universe multiverse" >> /etc/apt/sources.list && \\
697    echo "deb {ubuntu_mirror} \
698        $(. /etc/os-release && echo $VERSION_CODENAME)-security \
699            main restricted universe multiverse" >> /etc/apt/sources.list && \\
700    echo "deb {ubuntu_mirror} \
701        $(. /etc/os-release && echo $VERSION_CODENAME)-proposed \
702            main restricted universe multiverse" >> /etc/apt/sources.list && \\
703    echo "deb {ubuntu_mirror} \
704        $(. /etc/os-release && echo $VERSION_CODENAME)-backports \
705            main restricted universe multiverse" >> /etc/apt/sources.list
706"""
707
708# Special flags for proxying.
709proxy_cmd = ""
710proxy_keyserver = ""
711proxy_args = []
712if http_proxy:
713    proxy_cmd = f"""
714RUN echo "[http]" >> {homedir}/.gitconfig && \
715    echo "proxy = {http_proxy}" >> {homedir}/.gitconfig
716"""
717    proxy_keyserver = f"--keyserver-options http-proxy={http_proxy}"
718
719    proxy_args.extend(
720        [
721            "--build-arg",
722            f"http_proxy={http_proxy}",
723            "--build-arg",
724            f"https_proxy={http_proxy}",
725        ]
726    )
727
728# Create base Dockerfile.
729dockerfile_base = f"""
730FROM {docker_base}{distro}
731
732{mirror}
733
734ENV DEBIAN_FRONTEND noninteractive
735
736ENV PYTHONPATH "/usr/local/lib/python3.10/site-packages/"
737
738# Sometimes the ubuntu key expires and we need a way to force an execution
739# of the apt-get commands for the dbgsym-keyring.  When this happens we see
740# an error like: "Release: The following signatures were invalid:"
741# Insert a bogus echo that we can change here when we get this error to force
742# the update.
743RUN echo "ubuntu keyserver rev as of 2021-04-21"
744
745# We need the keys to be imported for dbgsym repos
746# New releases have a package, older ones fall back to manual fetching
747# https://wiki.ubuntu.com/Debug%20Symbol%20Packages
748# Known issue with gpg to get keys via proxy -
749# https://bugs.launchpad.net/ubuntu/+source/gnupg2/+bug/1788190, hence using
750# curl to get keys.
751RUN apt-get update && apt-get dist-upgrade -yy && \
752    ( apt-get install -yy gpgv ubuntu-dbgsym-keyring || \
753        ( apt-get install -yy dirmngr curl && \
754          curl -sSL \
755          'https://keyserver.ubuntu.com/pks/lookup?op=get&search=0xF2EDC64DC5AEE1F6B9C621F0C8CAB6595FDFF622' \
756          | apt-key add - ))
757
758# Parse the current repo list into a debug repo list
759RUN sed -n '/^deb /s,^deb [^ ]* ,deb http://ddebs.ubuntu.com ,p' \
760        /etc/apt/sources.list >/etc/apt/sources.list.d/debug.list
761
762# Remove non-existent debug repos
763RUN sed -i '/-\\(backports\\|security\\) /d' /etc/apt/sources.list.d/debug.list
764
765RUN cat /etc/apt/sources.list.d/debug.list
766
767RUN apt-get update && apt-get dist-upgrade -yy && apt-get install -yy \
768    abi-compliance-checker \
769    abi-dumper \
770    autoconf \
771    autoconf-archive \
772    bison \
773    clang-17 \
774    clang-format-17 \
775    clang-tidy-17 \
776    clang-tools-17 \
777    cmake \
778    curl \
779    dbus \
780    device-tree-compiler \
781    flex \
782    g++-13 \
783    gcc-13 \
784    git \
785    iproute2 \
786    iputils-ping \
787    libaudit-dev \
788    libc6-dbg \
789    libc6-dev \
790    libconfig++-dev \
791    libcryptsetup-dev \
792    libdbus-1-dev \
793    libevdev-dev \
794    libgpiod-dev \
795    libi2c-dev \
796    libjpeg-dev \
797    libjson-perl \
798    libldap2-dev \
799    libmimetic-dev \
800    libnl-3-dev \
801    libnl-genl-3-dev \
802    libpam0g-dev \
803    libpciaccess-dev \
804    libperlio-gzip-perl \
805    libpng-dev \
806    libprotobuf-dev \
807    libsnmp-dev \
808    libssl-dev \
809    libsystemd-dev \
810    libtool \
811    liburing-dev \
812    libxml2-utils \
813    libxml-simple-perl \
814    ninja-build \
815    npm \
816    pkg-config \
817    protobuf-compiler \
818    python3 \
819    python3-dev\
820    python3-git \
821    python3-mako \
822    python3-pip \
823    python3-setuptools \
824    python3-socks \
825    python3-yaml \
826    rsync \
827    shellcheck \
828    sudo \
829    systemd \
830    valgrind \
831    valgrind-dbgsym \
832    vim \
833    wget \
834    xxd
835
836RUN update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-13 13 \
837  --slave /usr/bin/g++ g++ /usr/bin/g++-13 \
838  --slave /usr/bin/gcov gcov /usr/bin/gcov-13 \
839  --slave /usr/bin/gcov-dump gcov-dump /usr/bin/gcov-dump-13 \
840  --slave /usr/bin/gcov-tool gcov-tool /usr/bin/gcov-tool-13
841RUN update-alternatives --remove cpp /usr/bin/cpp && \
842    update-alternatives --install /usr/bin/cpp cpp /usr/bin/cpp-13 13
843
844RUN update-alternatives --install /usr/bin/clang clang /usr/bin/clang-17 1000 \
845  --slave /usr/bin/clang++ clang++ /usr/bin/clang++-17 \
846  --slave /usr/bin/clang-tidy clang-tidy /usr/bin/clang-tidy-17 \
847  --slave /usr/bin/clang-format clang-format /usr/bin/clang-format-17 \
848  --slave /usr/bin/run-clang-tidy run-clang-tidy.py \
849        /usr/bin/run-clang-tidy-17 \
850  --slave /usr/bin/scan-build scan-build /usr/bin/scan-build-17
851
852"""
853
854if is_automated_ci_build:
855    dockerfile_base += f"""
856# Run an arbitrary command to polute the docker cache regularly force us
857# to re-run `apt-get update` daily.
858RUN echo {Docker.timestamp()}
859RUN apt-get update && apt-get dist-upgrade -yy
860
861"""
862
863dockerfile_base += """
864RUN pip3 install --break-system-packages \
865        beautysh \
866        black \
867        codespell \
868        flake8 \
869        gitlint \
870        inflection \
871        isort \
872        jsonschema \
873        meson==1.2.3 \
874        protobuf \
875        requests
876
877RUN npm install -g \
878        eslint@latest eslint-plugin-json@latest \
879        markdownlint-cli@latest \
880        prettier@latest
881"""
882
883# Build the base and stage docker images.
884docker_base_img_name = Docker.tagname("base", dockerfile_base)
885Docker.build("base", docker_base_img_name, dockerfile_base)
886Package.generate_all()
887
888# Create the final Dockerfile.
889dockerfile = f"""
890# Build the final output image
891FROM {docker_base_img_name}
892{Package.df_all_copycmds()}
893
894# Some of our infrastructure still relies on the presence of this file
895# even though it is no longer needed to rebuild the docker environment
896# NOTE: The file is sorted to ensure the ordering is stable.
897RUN echo '{Package.depcache()}' > /tmp/depcache
898
899# Ensure the group, user, and home directory are created (or rename them if
900# they already exist).
901RUN if grep -q ":{gid}:" /etc/group ; then \
902        groupmod -n {username} $(awk -F : '{{ if ($3 == {gid}) {{ print $1 }} }}' /etc/group) ; \
903    else \
904        groupadd -f -g {gid} {username} ; \
905    fi
906RUN mkdir -p "{os.path.dirname(homedir)}"
907RUN if grep -q ":{uid}:" /etc/passwd ; then \
908        usermod -l {username} -d {homedir} -m $(awk -F : '{{ if ($3 == {uid}) {{ print $1 }} }}' /etc/passwd) ; \
909    else \
910        useradd -d {homedir} -m -u {uid} -g {gid} {username} ; \
911    fi
912RUN sed -i '1iDefaults umask=000' /etc/sudoers
913RUN echo "{username} ALL=(ALL) NOPASSWD: ALL" >>/etc/sudoers
914
915# Ensure user has ability to write to /usr/local for different tool
916# and data installs
917RUN chown -R {username}:{username} /usr/local/share
918
919{proxy_cmd}
920
921RUN /bin/bash
922"""
923
924# Do the final docker build
925docker_final_img_name = Docker.tagname(None, dockerfile)
926Docker.build("final", docker_final_img_name, dockerfile)
927
928# Print the tag of the final image.
929print(docker_final_img_name)
930