1#!/usr/bin/env python3
2#
3# Build the required docker image to run package unit tests
4#
5# Script Variables:
6#   DOCKER_IMG_NAME:  <optional, the name of the docker image to generate>
7#                     default is openbmc/ubuntu-unit-test
8#   DISTRO:           <optional, the distro to build a docker image against>
9#   FORCE_DOCKER_BUILD: <optional, a non-zero value with force all Docker
10#                     images to be rebuilt rather than reusing caches.>
11#   BUILD_URL:        <optional, used to detect running under CI context
12#                     (ex. Jenkins)>
13#   BRANCH:           <optional, branch to build from each of the openbmc/
14#                     repositories>
15#                     default is master, which will be used if input branch not
16#                     provided or not found
17#   UBUNTU_MIRROR:    <optional, the URL of a mirror of Ubuntu to override the
18#                     default ones in /etc/apt/sources.list>
19#                     default is empty, and no mirror is used.
20#   http_proxy        The HTTP address of the proxy server to connect to.
21#                     Default: "", proxy is not setup if this is not set
22
23import json
24import os
25import re
26import sys
27import threading
28import urllib.request
29from datetime import date
30from hashlib import sha256
31
32# typing.Dict is used for type-hints.
33from typing import Any, Callable, Dict, Iterable, Optional  # noqa: F401
34
35from sh import docker, git, nproc, uname  # type: ignore
36
37try:
38    # Python before 3.8 doesn't have TypedDict, so reroute to standard 'dict'.
39    from typing import TypedDict
40except Exception:
41
42    class TypedDict(dict):  # type: ignore
43        # We need to do this to eat the 'total' argument.
44        def __init_subclass__(cls, **kwargs: Any) -> None:
45            super().__init_subclass__()
46
47
48# Declare some variables used in package definitions.
49prefix = "/usr/local"
50proc_count = nproc().strip()
51
52
53class PackageDef(TypedDict, total=False):
54    """Package Definition for packages dictionary."""
55
56    # rev [optional]: Revision of package to use.
57    rev: str
58    # url [optional]: lambda function to create URL: (package, rev) -> url.
59    url: Callable[[str, str], str]
60    # depends [optional]: List of package dependencies.
61    depends: Iterable[str]
62    # build_type [required]: Build type used for package.
63    #   Currently supported: autoconf, cmake, custom, make, meson
64    build_type: str
65    # build_steps [optional]: Steps to run for 'custom' build_type.
66    build_steps: Iterable[str]
67    # config_flags [optional]: List of options to pass configuration tool.
68    config_flags: Iterable[str]
69    # config_env [optional]: List of environment variables to set for config.
70    config_env: Iterable[str]
71    # custom_post_dl [optional]: List of steps to run after download, but
72    #   before config / build / install.
73    custom_post_dl: Iterable[str]
74    # custom_post_install [optional]: List of steps to run after install.
75    custom_post_install: Iterable[str]
76
77    # __tag [private]: Generated Docker tag name for package stage.
78    __tag: str
79    # __package [private]: Package object associated with this package.
80    __package: Any  # Type is Package, but not defined yet.
81
82
83# Packages to include in image.
84packages = {
85    "boost": PackageDef(
86        rev="1.84.0",
87        url=(
88            lambda pkg, rev: f"https://github.com/boostorg/{pkg}/releases/download/{pkg}-{rev}/{pkg}-{rev}.tar.gz"
89        ),
90        build_type="custom",
91        build_steps=[
92            (
93                "./bootstrap.sh"
94                f" --prefix={prefix} --with-libraries=context,coroutine,url"
95            ),
96            "./b2",
97            f"./b2 install --prefix={prefix} valgrind=on",
98        ],
99    ),
100    "USCiLab/cereal": PackageDef(
101        rev="v1.3.2",
102        build_type="custom",
103        build_steps=[f"cp -a include/cereal/ {prefix}/include/"],
104    ),
105    "danmar/cppcheck": PackageDef(
106        rev="2.12.1",
107        build_type="cmake",
108    ),
109    "CLIUtils/CLI11": PackageDef(
110        rev="v2.3.2",
111        build_type="cmake",
112        config_flags=[
113            "-DBUILD_TESTING=OFF",
114            "-DCLI11_BUILD_DOCS=OFF",
115            "-DCLI11_BUILD_EXAMPLES=OFF",
116        ],
117    ),
118    "fmtlib/fmt": PackageDef(
119        rev="10.1.1",
120        build_type="cmake",
121        config_flags=[
122            "-DFMT_DOC=OFF",
123            "-DFMT_TEST=OFF",
124        ],
125    ),
126    "Naios/function2": PackageDef(
127        rev="4.2.4",
128        build_type="custom",
129        build_steps=[
130            f"mkdir {prefix}/include/function2",
131            f"cp include/function2/function2.hpp {prefix}/include/function2/",
132        ],
133    ),
134    "google/googletest": PackageDef(
135        rev="v1.15.2",
136        build_type="cmake",
137        config_env=["CXXFLAGS=-std=c++20"],
138        config_flags=["-DTHREADS_PREFER_PTHREAD_FLAG=ON"],
139    ),
140    "nghttp2/nghttp2": PackageDef(
141        rev="v1.61.0",
142        build_type="cmake",
143        config_env=["CXXFLAGS=-std=c++20"],
144        config_flags=[
145            "-DENABLE_LIB_ONLY=ON",
146            "-DENABLE_STATIC_LIB=ON",
147        ],
148    ),
149    "nlohmann/json": PackageDef(
150        rev="v3.11.2",
151        build_type="cmake",
152        config_flags=["-DJSON_BuildTests=OFF"],
153        custom_post_install=[
154            (
155                f"ln -s {prefix}/include/nlohmann/json.hpp"
156                f" {prefix}/include/json.hpp"
157            ),
158        ],
159    ),
160    "json-c/json-c": PackageDef(
161        rev="json-c-0.17-20230812",
162        build_type="cmake",
163    ),
164    "LibVNC/libvncserver": PackageDef(
165        rev="LibVNCServer-0.9.14",
166        build_type="cmake",
167    ),
168    "leethomason/tinyxml2": PackageDef(
169        rev="9.0.0",
170        build_type="cmake",
171    ),
172    "tristanpenman/valijson": PackageDef(
173        rev="v1.0.1",
174        build_type="cmake",
175        config_flags=[
176            "-Dvalijson_BUILD_TESTS=0",
177            "-Dvalijson_INSTALL_HEADERS=1",
178        ],
179    ),
180    "open-power/pdbg": PackageDef(build_type="autoconf"),
181    "openbmc/gpioplus": PackageDef(
182        build_type="meson",
183        config_flags=[
184            "-Dexamples=false",
185            "-Dtests=disabled",
186        ],
187    ),
188    "openbmc/phosphor-dbus-interfaces": PackageDef(
189        depends=["openbmc/sdbusplus"],
190        build_type="meson",
191        config_flags=["-Dgenerate_md=false"],
192    ),
193    "openbmc/phosphor-logging": PackageDef(
194        depends=[
195            "USCiLab/cereal",
196            "openbmc/phosphor-dbus-interfaces",
197            "openbmc/sdbusplus",
198            "openbmc/sdeventplus",
199        ],
200        build_type="meson",
201        config_flags=[
202            "-Dlibonly=true",
203            "-Dtests=disabled",
204            f"-Dyamldir={prefix}/share/phosphor-dbus-yaml/yaml",
205        ],
206    ),
207    "openbmc/phosphor-objmgr": PackageDef(
208        depends=[
209            "CLIUtils/CLI11",
210            "boost",
211            "leethomason/tinyxml2",
212            "openbmc/phosphor-dbus-interfaces",
213            "openbmc/phosphor-logging",
214            "openbmc/sdbusplus",
215        ],
216        build_type="meson",
217        config_flags=[
218            "-Dtests=disabled",
219        ],
220    ),
221    "openbmc/libpeci": PackageDef(
222        build_type="meson",
223        config_flags=[
224            "-Draw-peci=disabled",
225        ],
226    ),
227    "openbmc/libpldm": PackageDef(
228        build_type="meson",
229        config_flags=[
230            "-Dabi=deprecated,stable",
231            "-Doem-ibm=enabled",
232            "-Dtests=disabled",
233        ],
234    ),
235    "openbmc/sdbusplus": PackageDef(
236        depends=[
237            "nlohmann/json",
238        ],
239        build_type="meson",
240        custom_post_dl=[
241            "cd tools",
242            f"./setup.py install --root=/ --prefix={prefix}",
243            "cd ..",
244        ],
245        config_flags=[
246            "-Dexamples=disabled",
247            "-Dtests=disabled",
248        ],
249    ),
250    "openbmc/sdeventplus": PackageDef(
251        depends=[
252            "openbmc/stdplus",
253        ],
254        build_type="meson",
255        config_flags=[
256            "-Dexamples=false",
257            "-Dtests=disabled",
258        ],
259    ),
260    "openbmc/stdplus": PackageDef(
261        depends=[
262            "fmtlib/fmt",
263            "google/googletest",
264            "Naios/function2",
265        ],
266        build_type="meson",
267        config_flags=[
268            "-Dexamples=false",
269            "-Dtests=disabled",
270            "-Dgtest=enabled",
271        ],
272    ),
273}  # type: Dict[str, PackageDef]
274
275# Define common flags used for builds
276configure_flags = " ".join(
277    [
278        f"--prefix={prefix}",
279    ]
280)
281cmake_flags = " ".join(
282    [
283        "-DBUILD_SHARED_LIBS=ON",
284        "-DCMAKE_BUILD_TYPE=RelWithDebInfo",
285        f"-DCMAKE_INSTALL_PREFIX:PATH={prefix}",
286        "-GNinja",
287        "-DCMAKE_MAKE_PROGRAM=ninja",
288    ]
289)
290meson_flags = " ".join(
291    [
292        "--wrap-mode=nodownload",
293        f"-Dprefix={prefix}",
294    ]
295)
296
297
298class Package(threading.Thread):
299    """Class used to build the Docker stages for each package.
300
301    Generally, this class should not be instantiated directly but through
302    Package.generate_all().
303    """
304
305    # Copy the packages dictionary.
306    packages = packages.copy()
307
308    # Lock used for thread-safety.
309    lock = threading.Lock()
310
311    def __init__(self, pkg: str):
312        """pkg - The name of this package (ex. foo/bar )"""
313        super(Package, self).__init__()
314
315        self.package = pkg
316        self.exception = None  # type: Optional[Exception]
317
318        # Reference to this package's
319        self.pkg_def = Package.packages[pkg]
320        self.pkg_def["__package"] = self
321
322    def run(self) -> None:
323        """Thread 'run' function.  Builds the Docker stage."""
324
325        # In case this package has no rev, fetch it from Github.
326        self._update_rev()
327
328        # Find all the Package objects that this package depends on.
329        #   This section is locked because we are looking into another
330        #   package's PackageDef dict, which could be being modified.
331        Package.lock.acquire()
332        deps: Iterable[Package] = [
333            Package.packages[deppkg]["__package"]
334            for deppkg in self.pkg_def.get("depends", [])
335        ]
336        Package.lock.release()
337
338        # Wait until all the depends finish building.  We need them complete
339        # for the "COPY" commands.
340        for deppkg in deps:
341            deppkg.join()
342
343        # Generate this package's Dockerfile.
344        dockerfile = f"""
345FROM {docker_base_img_name}
346{self._df_copycmds()}
347{self._df_build()}
348"""
349
350        # Generate the resulting tag name and save it to the PackageDef.
351        #   This section is locked because we are modifying the PackageDef,
352        #   which can be accessed by other threads.
353        Package.lock.acquire()
354        tag = Docker.tagname(self._stagename(), dockerfile)
355        self.pkg_def["__tag"] = tag
356        Package.lock.release()
357
358        # Do the build / save any exceptions.
359        try:
360            Docker.build(self.package, tag, dockerfile)
361        except Exception as e:
362            self.exception = e
363
364    @classmethod
365    def generate_all(cls) -> None:
366        """Ensure a Docker stage is created for all defined packages.
367
368        These are done in parallel but with appropriate blocking per
369        package 'depends' specifications.
370        """
371
372        # Create a Package for each defined package.
373        pkg_threads = [Package(p) for p in cls.packages.keys()]
374
375        # Start building them all.
376        #   This section is locked because threads depend on each other,
377        #   based on the packages, and they cannot 'join' on a thread
378        #   which is not yet started.  Adding a lock here allows all the
379        #   threads to start before they 'join' their dependencies.
380        Package.lock.acquire()
381        for t in pkg_threads:
382            t.start()
383        Package.lock.release()
384
385        # Wait for completion.
386        for t in pkg_threads:
387            t.join()
388            # Check if the thread saved off its own exception.
389            if t.exception:
390                print(f"Package {t.package} failed!", file=sys.stderr)
391                raise t.exception
392
393    @staticmethod
394    def df_all_copycmds() -> str:
395        """Formulate the Dockerfile snippet necessary to copy all packages
396        into the final image.
397        """
398        return Package.df_copycmds_set(Package.packages.keys())
399
400    @classmethod
401    def depcache(cls) -> str:
402        """Create the contents of the '/tmp/depcache'.
403        This file is a comma-separated list of "<pkg>:<rev>".
404        """
405
406        # This needs to be sorted for consistency.
407        depcache = ""
408        for pkg in sorted(cls.packages.keys()):
409            depcache += "%s:%s," % (pkg, cls.packages[pkg]["rev"])
410        return depcache
411
412    def _check_gerrit_topic(self) -> bool:
413        if not gerrit_topic:
414            return False
415        if not self.package.startswith("openbmc/"):
416            return False
417        if gerrit_project == self.package and gerrit_rev:
418            return False
419
420        try:
421            commits = json.loads(
422                urllib.request.urlopen(
423                    f"https://gerrit.openbmc.org/changes/?q=status:open+project:{self.package}+topic:{gerrit_topic}"
424                )
425                .read()
426                .splitlines()[-1]
427            )
428
429            if len(commits) == 0:
430                return False
431            if len(commits) > 1:
432                print(
433                    f"{self.package} has more than 1 commit under {gerrit_topic}; using lastest upstream: {len(commits)}",
434                    file=sys.stderr,
435                )
436                return False
437
438            change_id = commits[0]["id"]
439
440            commit = json.loads(
441                urllib.request.urlopen(
442                    f"https://gerrit.openbmc.org/changes/{change_id}/revisions/current/commit"
443                )
444                .read()
445                .splitlines()[-1]
446            )["commit"]
447
448            print(
449                f"Using {commit} from {gerrit_topic} for {self.package}",
450                file=sys.stderr,
451            )
452            self.pkg_def["rev"] = commit
453            return True
454
455        except urllib.error.HTTPError as e:
456            print(
457                f"Error loading topic {gerrit_topic} for {self.package}: ",
458                e,
459                file=sys.stderr,
460            )
461            return False
462
463    def _update_rev(self) -> None:
464        """Look up the HEAD for missing a static rev."""
465
466        if "rev" in self.pkg_def:
467            return
468
469        if self._check_gerrit_topic():
470            return
471
472        # Check if Jenkins/Gerrit gave us a revision and use it.
473        if gerrit_project == self.package and gerrit_rev:
474            print(
475                f"Found Gerrit revision for {self.package}: {gerrit_rev}",
476                file=sys.stderr,
477            )
478            self.pkg_def["rev"] = gerrit_rev
479            return
480
481        # Ask Github for all the branches.
482        lookup = git(
483            "ls-remote", "--heads", f"https://github.com/{self.package}"
484        )
485
486        # Find the branch matching {branch} (or fallback to master).
487        #   This section is locked because we are modifying the PackageDef.
488        Package.lock.acquire()
489        for line in lookup.split("\n"):
490            if re.fullmatch(f".*{branch}$", line.strip()):
491                self.pkg_def["rev"] = line.split()[0]
492                break
493            elif (
494                "refs/heads/master" in line or "refs/heads/main" in line
495            ) and "rev" not in self.pkg_def:
496                self.pkg_def["rev"] = line.split()[0]
497        Package.lock.release()
498
499    def _stagename(self) -> str:
500        """Create a name for the Docker stage associated with this pkg."""
501        return self.package.replace("/", "-").lower()
502
503    def _url(self) -> str:
504        """Get the URL for this package."""
505        rev = self.pkg_def["rev"]
506
507        # If the lambda exists, call it.
508        if "url" in self.pkg_def:
509            return self.pkg_def["url"](self.package, rev)
510
511        # Default to the github archive URL.
512        return f"https://github.com/{self.package}/archive/{rev}.tar.gz"
513
514    def _cmd_download(self) -> str:
515        """Formulate the command necessary to download and unpack to source."""
516
517        url = self._url()
518        if ".tar." not in url:
519            raise NotImplementedError(
520                f"Unhandled download type for {self.package}: {url}"
521            )
522
523        cmd = f"curl -L {url} | tar -x"
524
525        if url.endswith(".bz2"):
526            cmd += "j"
527        elif url.endswith(".gz"):
528            cmd += "z"
529        else:
530            raise NotImplementedError(
531                f"Unknown tar flags needed for {self.package}: {url}"
532            )
533
534        return cmd
535
536    def _cmd_cd_srcdir(self) -> str:
537        """Formulate the command necessary to 'cd' into the source dir."""
538        return f"cd {self.package.split('/')[-1]}*"
539
540    def _df_copycmds(self) -> str:
541        """Formulate the dockerfile snippet necessary to COPY all depends."""
542
543        if "depends" not in self.pkg_def:
544            return ""
545        return Package.df_copycmds_set(self.pkg_def["depends"])
546
547    @staticmethod
548    def df_copycmds_set(pkgs: Iterable[str]) -> str:
549        """Formulate the Dockerfile snippet necessary to COPY a set of
550        packages into a Docker stage.
551        """
552
553        copy_cmds = ""
554
555        # Sort the packages for consistency.
556        for p in sorted(pkgs):
557            tag = Package.packages[p]["__tag"]
558            copy_cmds += f"COPY --from={tag} {prefix} {prefix}\n"
559            # Workaround for upstream docker bug and multiple COPY cmds
560            # https://github.com/moby/moby/issues/37965
561            copy_cmds += "RUN true\n"
562
563        return copy_cmds
564
565    def _df_build(self) -> str:
566        """Formulate the Dockerfile snippet necessary to download, build, and
567        install a package into a Docker stage.
568        """
569
570        # Download and extract source.
571        result = f"RUN {self._cmd_download()} && {self._cmd_cd_srcdir()} && "
572
573        # Handle 'custom_post_dl' commands.
574        custom_post_dl = self.pkg_def.get("custom_post_dl")
575        if custom_post_dl:
576            result += " && ".join(custom_post_dl) + " && "
577
578        # Build and install package based on 'build_type'.
579        build_type = self.pkg_def["build_type"]
580        if build_type == "autoconf":
581            result += self._cmd_build_autoconf()
582        elif build_type == "cmake":
583            result += self._cmd_build_cmake()
584        elif build_type == "custom":
585            result += self._cmd_build_custom()
586        elif build_type == "make":
587            result += self._cmd_build_make()
588        elif build_type == "meson":
589            result += self._cmd_build_meson()
590        else:
591            raise NotImplementedError(
592                f"Unhandled build type for {self.package}: {build_type}"
593            )
594
595        # Handle 'custom_post_install' commands.
596        custom_post_install = self.pkg_def.get("custom_post_install")
597        if custom_post_install:
598            result += " && " + " && ".join(custom_post_install)
599
600        return result
601
602    def _cmd_build_autoconf(self) -> str:
603        options = " ".join(self.pkg_def.get("config_flags", []))
604        env = " ".join(self.pkg_def.get("config_env", []))
605        result = "./bootstrap.sh && "
606        result += f"{env} ./configure {configure_flags} {options} && "
607        result += f"make -j{proc_count} && make install"
608        return result
609
610    def _cmd_build_cmake(self) -> str:
611        options = " ".join(self.pkg_def.get("config_flags", []))
612        env = " ".join(self.pkg_def.get("config_env", []))
613        result = "mkdir builddir && cd builddir && "
614        result += f"{env} cmake {cmake_flags} {options} .. && "
615        result += "cmake --build . --target all && "
616        result += "cmake --build . --target install && "
617        result += "cd .."
618        return result
619
620    def _cmd_build_custom(self) -> str:
621        return " && ".join(self.pkg_def.get("build_steps", []))
622
623    def _cmd_build_make(self) -> str:
624        return f"make -j{proc_count} && make install"
625
626    def _cmd_build_meson(self) -> str:
627        options = " ".join(self.pkg_def.get("config_flags", []))
628        env = " ".join(self.pkg_def.get("config_env", []))
629        result = f"{env} meson setup builddir {meson_flags} {options} && "
630        result += "ninja -C builddir && ninja -C builddir install"
631        return result
632
633
634class Docker:
635    """Class to assist with Docker interactions.  All methods are static."""
636
637    @staticmethod
638    def timestamp() -> str:
639        """Generate a timestamp for today using the ISO week."""
640        today = date.today().isocalendar()
641        return f"{today[0]}-W{today[1]:02}"
642
643    @staticmethod
644    def tagname(pkgname: Optional[str], dockerfile: str) -> str:
645        """Generate a tag name for a package using a hash of the Dockerfile."""
646        result = docker_image_name
647        if pkgname:
648            result += "-" + pkgname
649
650        result += ":" + Docker.timestamp()
651        result += "-" + sha256(dockerfile.encode()).hexdigest()[0:16]
652
653        return result
654
655    @staticmethod
656    def build(pkg: str, tag: str, dockerfile: str) -> None:
657        """Build a docker image using the Dockerfile and tagging it with 'tag'."""
658
659        # If we're not forcing builds, check if it already exists and skip.
660        if not force_build:
661            if docker.image.ls(tag, "--format", '"{{.Repository}}:{{.Tag}}"'):
662                print(
663                    f"Image {tag} already exists.  Skipping.", file=sys.stderr
664                )
665                return
666
667        # Build it.
668        #   Capture the output of the 'docker build' command and send it to
669        #   stderr (prefixed with the package name).  This allows us to see
670        #   progress but not pollute stdout.  Later on we output the final
671        #   docker tag to stdout and we want to keep that pristine.
672        #
673        #   Other unusual flags:
674        #       --no-cache: Bypass the Docker cache if 'force_build'.
675        #       --force-rm: Clean up Docker processes if they fail.
676        docker.build(
677            proxy_args,
678            "--network=host",
679            "--force-rm",
680            "--no-cache=true" if force_build else "--no-cache=false",
681            "-t",
682            tag,
683            "-",
684            _in=dockerfile,
685            _out=(
686                lambda line: print(
687                    pkg + ":", line, end="", file=sys.stderr, flush=True
688                )
689            ),
690            _err_to_out=True,
691        )
692
693
694# Read a bunch of environment variables.
695docker_image_name = os.environ.get(
696    "DOCKER_IMAGE_NAME", "openbmc/ubuntu-unit-test"
697)
698force_build = os.environ.get("FORCE_DOCKER_BUILD")
699is_automated_ci_build = os.environ.get("BUILD_URL", False)
700distro = os.environ.get("DISTRO", "ubuntu:noble")
701branch = os.environ.get("BRANCH", "master")
702ubuntu_mirror = os.environ.get("UBUNTU_MIRROR")
703http_proxy = os.environ.get("http_proxy")
704
705gerrit_project = os.environ.get("GERRIT_PROJECT")
706gerrit_rev = os.environ.get("GERRIT_PATCHSET_REVISION")
707gerrit_topic = os.environ.get("GERRIT_TOPIC")
708
709# Ensure appropriate docker build output to see progress and identify
710# any issues
711os.environ["BUILDKIT_PROGRESS"] = "plain"
712
713# Set up some common variables.
714username = os.environ.get("USER", "root")
715homedir = os.environ.get("HOME", "/root")
716gid = os.getgid()
717uid = os.getuid()
718
719# Use well-known constants if user is root
720if username == "root":
721    homedir = "/root"
722    gid = 0
723    uid = 0
724
725# Determine the architecture for Docker.
726arch = uname("-m").strip()
727if arch == "ppc64le":
728    docker_base = "ppc64le/"
729elif arch == "x86_64":
730    docker_base = ""
731elif arch == "aarch64":
732    docker_base = "arm64v8/"
733else:
734    print(
735        f"Unsupported system architecture({arch}) found for docker image",
736        file=sys.stderr,
737    )
738    sys.exit(1)
739
740# Special flags if setting up a deb mirror.
741mirror = ""
742if "ubuntu" in distro and ubuntu_mirror:
743    mirror = f"""
744RUN echo "deb {ubuntu_mirror} \
745        $(. /etc/os-release && echo $VERSION_CODENAME) \
746        main restricted universe multiverse" > /etc/apt/sources.list && \\
747    echo "deb {ubuntu_mirror} \
748        $(. /etc/os-release && echo $VERSION_CODENAME)-updates \
749            main restricted universe multiverse" >> /etc/apt/sources.list && \\
750    echo "deb {ubuntu_mirror} \
751        $(. /etc/os-release && echo $VERSION_CODENAME)-security \
752            main restricted universe multiverse" >> /etc/apt/sources.list && \\
753    echo "deb {ubuntu_mirror} \
754        $(. /etc/os-release && echo $VERSION_CODENAME)-proposed \
755            main restricted universe multiverse" >> /etc/apt/sources.list && \\
756    echo "deb {ubuntu_mirror} \
757        $(. /etc/os-release && echo $VERSION_CODENAME)-backports \
758            main restricted universe multiverse" >> /etc/apt/sources.list
759"""
760
761# Special flags for proxying.
762proxy_cmd = ""
763proxy_keyserver = ""
764proxy_args = []
765if http_proxy:
766    proxy_cmd = f"""
767RUN echo "[http]" >> {homedir}/.gitconfig && \
768    echo "proxy = {http_proxy}" >> {homedir}/.gitconfig
769"""
770    proxy_keyserver = f"--keyserver-options http-proxy={http_proxy}"
771
772    proxy_args.extend(
773        [
774            "--build-arg",
775            f"http_proxy={http_proxy}",
776            "--build-arg",
777            f"https_proxy={http_proxy}",
778        ]
779    )
780
781# Create base Dockerfile.
782dockerfile_base = f"""
783FROM {docker_base}{distro}
784
785{mirror}
786
787ENV DEBIAN_FRONTEND noninteractive
788
789ENV PYTHONPATH "/usr/local/lib/python3.10/site-packages/"
790
791# Sometimes the ubuntu key expires and we need a way to force an execution
792# of the apt-get commands for the dbgsym-keyring.  When this happens we see
793# an error like: "Release: The following signatures were invalid:"
794# Insert a bogus echo that we can change here when we get this error to force
795# the update.
796RUN echo "ubuntu keyserver rev as of 2021-04-21"
797
798# We need the keys to be imported for dbgsym repos
799# New releases have a package, older ones fall back to manual fetching
800# https://wiki.ubuntu.com/Debug%20Symbol%20Packages
801# Known issue with gpg to get keys via proxy -
802# https://bugs.launchpad.net/ubuntu/+source/gnupg2/+bug/1788190, hence using
803# curl to get keys.
804RUN apt-get update && apt-get dist-upgrade -yy && \
805    ( apt-get install -yy gpgv ubuntu-dbgsym-keyring || \
806        ( apt-get install -yy dirmngr curl && \
807          curl -sSL \
808          'https://keyserver.ubuntu.com/pks/lookup?op=get&search=0xF2EDC64DC5AEE1F6B9C621F0C8CAB6595FDFF622' \
809          | apt-key add - ))
810
811# Parse the current repo list into a debug repo list
812RUN sed -n '/^deb /s,^deb [^ ]* ,deb http://ddebs.ubuntu.com ,p' \
813        /etc/apt/sources.list >/etc/apt/sources.list.d/debug.list
814
815# Remove non-existent debug repos
816RUN sed -i '/-\\(backports\\|security\\) /d' /etc/apt/sources.list.d/debug.list
817
818RUN cat /etc/apt/sources.list.d/debug.list
819
820RUN apt-get update && apt-get dist-upgrade -yy && apt-get install -yy \
821    abi-compliance-checker \
822    abi-dumper \
823    autoconf \
824    autoconf-archive \
825    bison \
826    cmake \
827    curl \
828    dbus \
829    device-tree-compiler \
830    flex \
831    g++-13 \
832    gcc-13 \
833    git \
834    glib-2.0 \
835    gnupg \
836    iproute2 \
837    iputils-ping \
838    libaudit-dev \
839    libc6-dbg \
840    libc6-dev \
841    libcjson-dev \
842    libconfig++-dev \
843    libcryptsetup-dev \
844    libdbus-1-dev \
845    libevdev-dev \
846    libgpiod-dev \
847    libi2c-dev \
848    libjpeg-dev \
849    libjson-perl \
850    libldap2-dev \
851    libmimetic-dev \
852    libnl-3-dev \
853    libnl-genl-3-dev \
854    libpam0g-dev \
855    libpciaccess-dev \
856    libperlio-gzip-perl \
857    libpng-dev \
858    libprotobuf-dev \
859    libsnmp-dev \
860    libssl-dev \
861    libsystemd-dev \
862    libtool \
863    liburing-dev \
864    libxml2-utils \
865    libxml-simple-perl \
866    lsb-release \
867    ninja-build \
868    npm \
869    pkg-config \
870    protobuf-compiler \
871    python3 \
872    python3-dev\
873    python3-git \
874    python3-mako \
875    python3-pip \
876    python3-protobuf \
877    python3-setuptools \
878    python3-socks \
879    python3-yaml \
880    rsync \
881    shellcheck \
882    socat \
883    software-properties-common \
884    sudo \
885    systemd \
886    valgrind \
887    vim \
888    wget \
889    xxd
890
891RUN update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-13 13 \
892  --slave /usr/bin/g++ g++ /usr/bin/g++-13 \
893  --slave /usr/bin/gcov gcov /usr/bin/gcov-13 \
894  --slave /usr/bin/gcov-dump gcov-dump /usr/bin/gcov-dump-13 \
895  --slave /usr/bin/gcov-tool gcov-tool /usr/bin/gcov-tool-13
896RUN update-alternatives --remove cpp /usr/bin/cpp && \
897    update-alternatives --install /usr/bin/cpp cpp /usr/bin/cpp-13 13
898
899# Set up LLVM apt repository.
900RUN bash -c "$(wget -O - https://apt.llvm.org/llvm.sh)" 18
901
902# Install extra clang tools
903RUN apt-get install \
904        clang-18 \
905        clang-format-18 \
906        clang-tidy-18
907
908RUN update-alternatives --install /usr/bin/clang clang /usr/bin/clang-18 1000 \
909  --slave /usr/bin/clang++ clang++ /usr/bin/clang++-18 \
910  --slave /usr/bin/clang-tidy clang-tidy /usr/bin/clang-tidy-18 \
911  --slave /usr/bin/clang-format clang-format /usr/bin/clang-format-18 \
912  --slave /usr/bin/run-clang-tidy run-clang-tidy.py \
913        /usr/bin/run-clang-tidy-18 \
914  --slave /usr/bin/scan-build scan-build /usr/bin/scan-build-18
915
916"""
917
918if is_automated_ci_build:
919    dockerfile_base += f"""
920# Run an arbitrary command to pollute the docker cache regularly force us
921# to re-run `apt-get update` daily.
922RUN echo {Docker.timestamp()}
923RUN apt-get update && apt-get dist-upgrade -yy
924
925"""
926
927dockerfile_base += """
928RUN pip3 install --break-system-packages \
929        beautysh \
930        black \
931        codespell \
932        flake8 \
933        gcovr \
934        gitlint \
935        inflection \
936        isort \
937        jsonschema \
938        meson==1.3.0 \
939        requests
940
941RUN npm install -g \
942        eslint@v8.56.0 eslint-plugin-json@v3.1.0 \
943        markdownlint-cli@latest \
944        prettier@latest
945"""
946
947# Build the base and stage docker images.
948docker_base_img_name = Docker.tagname("base", dockerfile_base)
949Docker.build("base", docker_base_img_name, dockerfile_base)
950Package.generate_all()
951
952# Create the final Dockerfile.
953dockerfile = f"""
954# Build the final output image
955FROM {docker_base_img_name}
956{Package.df_all_copycmds()}
957
958# Some of our infrastructure still relies on the presence of this file
959# even though it is no longer needed to rebuild the docker environment
960# NOTE: The file is sorted to ensure the ordering is stable.
961RUN echo '{Package.depcache()}' > /tmp/depcache
962
963# Ensure the group, user, and home directory are created (or rename them if
964# they already exist).
965RUN if grep -q ":{gid}:" /etc/group ; then \
966        groupmod -n {username} $(awk -F : '{{ if ($3 == {gid}) {{ print $1 }} }}' /etc/group) ; \
967    else \
968        groupadd -f -g {gid} {username} ; \
969    fi
970RUN mkdir -p "{os.path.dirname(homedir)}"
971RUN if grep -q ":{uid}:" /etc/passwd ; then \
972        usermod -l {username} -d {homedir} -m $(awk -F : '{{ if ($3 == {uid}) {{ print $1 }} }}' /etc/passwd) ; \
973    else \
974        useradd -d {homedir} -m -u {uid} -g {gid} {username} ; \
975    fi
976RUN sed -i '1iDefaults umask=000' /etc/sudoers
977RUN echo "{username} ALL=(ALL) NOPASSWD: ALL" >>/etc/sudoers
978
979# Ensure user has ability to write to /usr/local for different tool
980# and data installs
981RUN chown -R {username}:{username} /usr/local/share
982
983# Update library cache
984RUN ldconfig
985
986{proxy_cmd}
987
988RUN /bin/bash
989"""
990
991# Do the final docker build
992docker_final_img_name = Docker.tagname(None, dockerfile)
993Docker.build("final", docker_final_img_name, dockerfile)
994
995# Print the tag of the final image.
996print(docker_final_img_name)
997