1#!/usr/bin/env python3
2#
3# Build the required docker image to run package unit tests
4#
5# Script Variables:
6#   DOCKER_IMG_NAME:  <optional, the name of the docker image to generate>
7#                     default is openbmc/ubuntu-unit-test
8#   DISTRO:           <optional, the distro to build a docker image against>
9#   FORCE_DOCKER_BUILD: <optional, a non-zero value with force all Docker
10#                     images to be rebuilt rather than reusing caches.>
11#   BUILD_URL:        <optional, used to detect running under CI context
12#                     (ex. Jenkins)>
13#   BRANCH:           <optional, branch to build from each of the openbmc/
14#                     repositories>
15#                     default is master, which will be used if input branch not
16#                     provided or not found
17#   UBUNTU_MIRROR:    <optional, the URL of a mirror of Ubuntu to override the
18#                     default ones in /etc/apt/sources.list>
19#                     default is empty, and no mirror is used.
20#   DOCKER_REG:       <optional, the URL of a docker registry to utilize
21#                     instead of our default (public.ecr.aws/ubuntu)
22#                     (ex. docker.io)
23#   http_proxy        The HTTP address of the proxy server to connect to.
24#                     Default: "", proxy is not setup if this is not set
25
26import json
27import os
28import re
29import sys
30import threading
31import urllib.request
32from datetime import date
33from hashlib import sha256
34
35# typing.Dict is used for type-hints.
36from typing import Any, Callable, Dict, Iterable, Optional  # noqa: F401
37
38from sh import docker, git, nproc  # type: ignore
39
40try:
41    # Python before 3.8 doesn't have TypedDict, so reroute to standard 'dict'.
42    from typing import TypedDict
43except Exception:
44
45    class TypedDict(dict):  # type: ignore
46        # We need to do this to eat the 'total' argument.
47        def __init_subclass__(cls, **kwargs: Any) -> None:
48            super().__init_subclass__()
49
50
51# Declare some variables used in package definitions.
52prefix = "/usr/local"
53proc_count = nproc().strip()
54
55
56class PackageDef(TypedDict, total=False):
57    """Package Definition for packages dictionary."""
58
59    # rev [optional]: Revision of package to use.
60    rev: str
61    # url [optional]: lambda function to create URL: (package, rev) -> url.
62    url: Callable[[str, str], str]
63    # depends [optional]: List of package dependencies.
64    depends: Iterable[str]
65    # build_type [required]: Build type used for package.
66    #   Currently supported: autoconf, cmake, custom, make, meson
67    build_type: str
68    # build_steps [optional]: Steps to run for 'custom' build_type.
69    build_steps: Iterable[str]
70    # config_flags [optional]: List of options to pass configuration tool.
71    config_flags: Iterable[str]
72    # config_env [optional]: List of environment variables to set for config.
73    config_env: Iterable[str]
74    # custom_post_dl [optional]: List of steps to run after download, but
75    #   before config / build / install.
76    custom_post_dl: Iterable[str]
77    # custom_post_install [optional]: List of steps to run after install.
78    custom_post_install: Iterable[str]
79
80    # __tag [private]: Generated Docker tag name for package stage.
81    __tag: str
82    # __package [private]: Package object associated with this package.
83    __package: Any  # Type is Package, but not defined yet.
84
85
86# Packages to include in image.
87packages = {
88    "boost": PackageDef(
89        rev="1.84.0",
90        url=(
91            lambda pkg, rev: f"https://github.com/boostorg/{pkg}/releases/download/{pkg}-{rev}/{pkg}-{rev}.tar.gz"
92        ),
93        build_type="custom",
94        build_steps=[
95            (
96                "./bootstrap.sh"
97                f" --prefix={prefix} --with-libraries=context,coroutine,url"
98            ),
99            "./b2",
100            f"./b2 install --prefix={prefix} valgrind=on",
101        ],
102    ),
103    "USCiLab/cereal": PackageDef(
104        rev="v1.3.2",
105        build_type="custom",
106        build_steps=[f"cp -a include/cereal/ {prefix}/include/"],
107    ),
108    "danmar/cppcheck": PackageDef(
109        rev="2.12.1",
110        build_type="cmake",
111    ),
112    "CLIUtils/CLI11": PackageDef(
113        rev="v2.3.2",
114        build_type="cmake",
115        config_flags=[
116            "-DBUILD_TESTING=OFF",
117            "-DCLI11_BUILD_DOCS=OFF",
118            "-DCLI11_BUILD_EXAMPLES=OFF",
119        ],
120    ),
121    "fmtlib/fmt": PackageDef(
122        rev="10.1.1",
123        build_type="cmake",
124        config_flags=[
125            "-DFMT_DOC=OFF",
126            "-DFMT_TEST=OFF",
127        ],
128    ),
129    "Naios/function2": PackageDef(
130        rev="4.2.4",
131        build_type="custom",
132        build_steps=[
133            f"mkdir {prefix}/include/function2",
134            f"cp include/function2/function2.hpp {prefix}/include/function2/",
135        ],
136    ),
137    "google/googletest": PackageDef(
138        rev="v1.15.2",
139        build_type="cmake",
140        config_env=["CXXFLAGS=-std=c++20"],
141        config_flags=["-DTHREADS_PREFER_PTHREAD_FLAG=ON"],
142    ),
143    "nghttp2/nghttp2": PackageDef(
144        rev="v1.61.0",
145        build_type="cmake",
146        config_env=["CXXFLAGS=-std=c++20"],
147        config_flags=[
148            "-DENABLE_LIB_ONLY=ON",
149            "-DENABLE_STATIC_LIB=ON",
150        ],
151    ),
152    "nlohmann/json": PackageDef(
153        rev="v3.11.2",
154        build_type="cmake",
155        config_flags=["-DJSON_BuildTests=OFF"],
156        custom_post_install=[
157            (
158                f"ln -s {prefix}/include/nlohmann/json.hpp"
159                f" {prefix}/include/json.hpp"
160            ),
161        ],
162    ),
163    "json-c/json-c": PackageDef(
164        rev="json-c-0.17-20230812",
165        build_type="cmake",
166    ),
167    "LibVNC/libvncserver": PackageDef(
168        rev="LibVNCServer-0.9.14",
169        build_type="cmake",
170    ),
171    "leethomason/tinyxml2": PackageDef(
172        rev="9.0.0",
173        build_type="cmake",
174    ),
175    "tristanpenman/valijson": PackageDef(
176        rev="v1.0.1",
177        build_type="cmake",
178        config_flags=[
179            "-Dvalijson_BUILD_TESTS=0",
180            "-Dvalijson_INSTALL_HEADERS=1",
181        ],
182    ),
183    "open-power/pdbg": PackageDef(build_type="autoconf"),
184    "openbmc/gpioplus": PackageDef(
185        build_type="meson",
186        config_flags=[
187            "-Dexamples=false",
188            "-Dtests=disabled",
189        ],
190    ),
191    "openbmc/phosphor-dbus-interfaces": PackageDef(
192        depends=["openbmc/sdbusplus"],
193        build_type="meson",
194        config_flags=["-Dgenerate_md=false"],
195    ),
196    "openbmc/phosphor-logging": PackageDef(
197        depends=[
198            "USCiLab/cereal",
199            "openbmc/phosphor-dbus-interfaces",
200            "openbmc/sdbusplus",
201            "openbmc/sdeventplus",
202        ],
203        build_type="meson",
204        config_flags=[
205            "-Dlibonly=true",
206            "-Dtests=disabled",
207            f"-Dyamldir={prefix}/share/phosphor-dbus-yaml/yaml",
208        ],
209    ),
210    "openbmc/phosphor-objmgr": PackageDef(
211        depends=[
212            "CLIUtils/CLI11",
213            "boost",
214            "leethomason/tinyxml2",
215            "openbmc/phosphor-dbus-interfaces",
216            "openbmc/phosphor-logging",
217            "openbmc/sdbusplus",
218        ],
219        build_type="meson",
220        config_flags=[
221            "-Dtests=disabled",
222        ],
223    ),
224    "openbmc/libpeci": PackageDef(
225        build_type="meson",
226        config_flags=[
227            "-Draw-peci=disabled",
228        ],
229    ),
230    "openbmc/libpldm": PackageDef(
231        build_type="meson",
232        config_flags=[
233            "-Dabi=deprecated,stable",
234            "-Dtests=false",
235            "-Dabi-compliance-check=false",
236        ],
237    ),
238    "openbmc/sdbusplus": PackageDef(
239        depends=[
240            "nlohmann/json",
241        ],
242        build_type="meson",
243        custom_post_dl=[
244            "cd tools",
245            f"./setup.py install --root=/ --prefix={prefix}",
246            "cd ..",
247        ],
248        config_flags=[
249            "-Dexamples=disabled",
250            "-Dtests=disabled",
251        ],
252    ),
253    "openbmc/sdeventplus": PackageDef(
254        depends=[
255            "openbmc/stdplus",
256        ],
257        build_type="meson",
258        config_flags=[
259            "-Dexamples=false",
260            "-Dtests=disabled",
261        ],
262    ),
263    "openbmc/stdplus": PackageDef(
264        depends=[
265            "fmtlib/fmt",
266            "google/googletest",
267            "Naios/function2",
268        ],
269        build_type="meson",
270        config_flags=[
271            "-Dexamples=false",
272            "-Dtests=disabled",
273            "-Dgtest=enabled",
274        ],
275    ),
276}  # type: Dict[str, PackageDef]
277
278# Define common flags used for builds
279configure_flags = " ".join(
280    [
281        f"--prefix={prefix}",
282    ]
283)
284cmake_flags = " ".join(
285    [
286        "-DBUILD_SHARED_LIBS=ON",
287        "-DCMAKE_BUILD_TYPE=RelWithDebInfo",
288        f"-DCMAKE_INSTALL_PREFIX:PATH={prefix}",
289        "-GNinja",
290        "-DCMAKE_MAKE_PROGRAM=ninja",
291    ]
292)
293meson_flags = " ".join(
294    [
295        "--wrap-mode=nodownload",
296        f"-Dprefix={prefix}",
297    ]
298)
299
300
301class Package(threading.Thread):
302    """Class used to build the Docker stages for each package.
303
304    Generally, this class should not be instantiated directly but through
305    Package.generate_all().
306    """
307
308    # Copy the packages dictionary.
309    packages = packages.copy()
310
311    # Lock used for thread-safety.
312    lock = threading.Lock()
313
314    def __init__(self, pkg: str):
315        """pkg - The name of this package (ex. foo/bar )"""
316        super(Package, self).__init__()
317
318        self.package = pkg
319        self.exception = None  # type: Optional[Exception]
320
321        # Reference to this package's
322        self.pkg_def = Package.packages[pkg]
323        self.pkg_def["__package"] = self
324
325    def run(self) -> None:
326        """Thread 'run' function.  Builds the Docker stage."""
327
328        # In case this package has no rev, fetch it from Github.
329        self._update_rev()
330
331        # Find all the Package objects that this package depends on.
332        #   This section is locked because we are looking into another
333        #   package's PackageDef dict, which could be being modified.
334        Package.lock.acquire()
335        deps: Iterable[Package] = [
336            Package.packages[deppkg]["__package"]
337            for deppkg in self.pkg_def.get("depends", [])
338        ]
339        Package.lock.release()
340
341        # Wait until all the depends finish building.  We need them complete
342        # for the "COPY" commands.
343        for deppkg in deps:
344            deppkg.join()
345
346        # Generate this package's Dockerfile.
347        dockerfile = f"""
348FROM {docker_base_img_name}
349{self._df_copycmds()}
350{self._df_build()}
351"""
352
353        # Generate the resulting tag name and save it to the PackageDef.
354        #   This section is locked because we are modifying the PackageDef,
355        #   which can be accessed by other threads.
356        Package.lock.acquire()
357        tag = Docker.tagname(self._stagename(), dockerfile)
358        self.pkg_def["__tag"] = tag
359        Package.lock.release()
360
361        # Do the build / save any exceptions.
362        try:
363            Docker.build(self.package, tag, dockerfile)
364        except Exception as e:
365            self.exception = e
366
367    @classmethod
368    def generate_all(cls) -> None:
369        """Ensure a Docker stage is created for all defined packages.
370
371        These are done in parallel but with appropriate blocking per
372        package 'depends' specifications.
373        """
374
375        # Create a Package for each defined package.
376        pkg_threads = [Package(p) for p in cls.packages.keys()]
377
378        # Start building them all.
379        #   This section is locked because threads depend on each other,
380        #   based on the packages, and they cannot 'join' on a thread
381        #   which is not yet started.  Adding a lock here allows all the
382        #   threads to start before they 'join' their dependencies.
383        Package.lock.acquire()
384        for t in pkg_threads:
385            t.start()
386        Package.lock.release()
387
388        # Wait for completion.
389        for t in pkg_threads:
390            t.join()
391            # Check if the thread saved off its own exception.
392            if t.exception:
393                print(f"Package {t.package} failed!", file=sys.stderr)
394                raise t.exception
395
396    @staticmethod
397    def df_all_copycmds() -> str:
398        """Formulate the Dockerfile snippet necessary to copy all packages
399        into the final image.
400        """
401        return Package.df_copycmds_set(Package.packages.keys())
402
403    @classmethod
404    def depcache(cls) -> str:
405        """Create the contents of the '/tmp/depcache'.
406        This file is a comma-separated list of "<pkg>:<rev>".
407        """
408
409        # This needs to be sorted for consistency.
410        depcache = ""
411        for pkg in sorted(cls.packages.keys()):
412            depcache += "%s:%s," % (pkg, cls.packages[pkg]["rev"])
413        return depcache
414
415    def _check_gerrit_topic(self) -> bool:
416        if not gerrit_topic:
417            return False
418        if not self.package.startswith("openbmc/"):
419            return False
420        if gerrit_project == self.package and gerrit_rev:
421            return False
422
423        try:
424            commits = json.loads(
425                urllib.request.urlopen(
426                    f"https://gerrit.openbmc.org/changes/?q=status:open+project:{self.package}+topic:{gerrit_topic}"
427                )
428                .read()
429                .splitlines()[-1]
430            )
431
432            if len(commits) == 0:
433                return False
434            if len(commits) > 1:
435                print(
436                    f"{self.package} has more than 1 commit under {gerrit_topic}; using lastest upstream: {len(commits)}",
437                    file=sys.stderr,
438                )
439                return False
440
441            change_id = commits[0]["id"]
442
443            commit = json.loads(
444                urllib.request.urlopen(
445                    f"https://gerrit.openbmc.org/changes/{change_id}/revisions/current/commit"
446                )
447                .read()
448                .splitlines()[-1]
449            )["commit"]
450
451            print(
452                f"Using {commit} from {gerrit_topic} for {self.package}",
453                file=sys.stderr,
454            )
455            self.pkg_def["rev"] = commit
456            return True
457
458        except urllib.error.HTTPError as e:
459            print(
460                f"Error loading topic {gerrit_topic} for {self.package}: ",
461                e,
462                file=sys.stderr,
463            )
464            return False
465
466    def _update_rev(self) -> None:
467        """Look up the HEAD for missing a static rev."""
468
469        if "rev" in self.pkg_def:
470            return
471
472        if self._check_gerrit_topic():
473            return
474
475        # Check if Jenkins/Gerrit gave us a revision and use it.
476        if gerrit_project == self.package and gerrit_rev:
477            print(
478                f"Found Gerrit revision for {self.package}: {gerrit_rev}",
479                file=sys.stderr,
480            )
481            self.pkg_def["rev"] = gerrit_rev
482            return
483
484        # Ask Github for all the branches.
485        lookup = git(
486            "ls-remote", "--heads", f"https://github.com/{self.package}"
487        )
488
489        # Find the branch matching {branch} (or fallback to master).
490        #   This section is locked because we are modifying the PackageDef.
491        Package.lock.acquire()
492        for line in lookup.split("\n"):
493            if re.fullmatch(f".*{branch}$", line.strip()):
494                self.pkg_def["rev"] = line.split()[0]
495                break
496            elif (
497                "refs/heads/master" in line or "refs/heads/main" in line
498            ) and "rev" not in self.pkg_def:
499                self.pkg_def["rev"] = line.split()[0]
500        Package.lock.release()
501
502    def _stagename(self) -> str:
503        """Create a name for the Docker stage associated with this pkg."""
504        return self.package.replace("/", "-").lower()
505
506    def _url(self) -> str:
507        """Get the URL for this package."""
508        rev = self.pkg_def["rev"]
509
510        # If the lambda exists, call it.
511        if "url" in self.pkg_def:
512            return self.pkg_def["url"](self.package, rev)
513
514        # Default to the github archive URL.
515        return f"https://github.com/{self.package}/archive/{rev}.tar.gz"
516
517    def _cmd_download(self) -> str:
518        """Formulate the command necessary to download and unpack to source."""
519
520        url = self._url()
521        if ".tar." not in url:
522            raise NotImplementedError(
523                f"Unhandled download type for {self.package}: {url}"
524            )
525
526        cmd = f"curl -L {url} | tar -x"
527
528        if url.endswith(".bz2"):
529            cmd += "j"
530        elif url.endswith(".gz"):
531            cmd += "z"
532        else:
533            raise NotImplementedError(
534                f"Unknown tar flags needed for {self.package}: {url}"
535            )
536
537        return cmd
538
539    def _cmd_cd_srcdir(self) -> str:
540        """Formulate the command necessary to 'cd' into the source dir."""
541        return f"cd {self.package.split('/')[-1]}*"
542
543    def _df_copycmds(self) -> str:
544        """Formulate the dockerfile snippet necessary to COPY all depends."""
545
546        if "depends" not in self.pkg_def:
547            return ""
548        return Package.df_copycmds_set(self.pkg_def["depends"])
549
550    @staticmethod
551    def df_copycmds_set(pkgs: Iterable[str]) -> str:
552        """Formulate the Dockerfile snippet necessary to COPY a set of
553        packages into a Docker stage.
554        """
555
556        copy_cmds = ""
557
558        # Sort the packages for consistency.
559        for p in sorted(pkgs):
560            tag = Package.packages[p]["__tag"]
561            copy_cmds += f"COPY --from={tag} {prefix} {prefix}\n"
562            # Workaround for upstream docker bug and multiple COPY cmds
563            # https://github.com/moby/moby/issues/37965
564            copy_cmds += "RUN true\n"
565
566        return copy_cmds
567
568    def _df_build(self) -> str:
569        """Formulate the Dockerfile snippet necessary to download, build, and
570        install a package into a Docker stage.
571        """
572
573        # Download and extract source.
574        result = f"RUN {self._cmd_download()} && {self._cmd_cd_srcdir()} && "
575
576        # Handle 'custom_post_dl' commands.
577        custom_post_dl = self.pkg_def.get("custom_post_dl")
578        if custom_post_dl:
579            result += " && ".join(custom_post_dl) + " && "
580
581        # Build and install package based on 'build_type'.
582        build_type = self.pkg_def["build_type"]
583        if build_type == "autoconf":
584            result += self._cmd_build_autoconf()
585        elif build_type == "cmake":
586            result += self._cmd_build_cmake()
587        elif build_type == "custom":
588            result += self._cmd_build_custom()
589        elif build_type == "make":
590            result += self._cmd_build_make()
591        elif build_type == "meson":
592            result += self._cmd_build_meson()
593        else:
594            raise NotImplementedError(
595                f"Unhandled build type for {self.package}: {build_type}"
596            )
597
598        # Handle 'custom_post_install' commands.
599        custom_post_install = self.pkg_def.get("custom_post_install")
600        if custom_post_install:
601            result += " && " + " && ".join(custom_post_install)
602
603        return result
604
605    def _cmd_build_autoconf(self) -> str:
606        options = " ".join(self.pkg_def.get("config_flags", []))
607        env = " ".join(self.pkg_def.get("config_env", []))
608        result = "./bootstrap.sh && "
609        result += f"{env} ./configure {configure_flags} {options} && "
610        result += f"make -j{proc_count} && make install"
611        return result
612
613    def _cmd_build_cmake(self) -> str:
614        options = " ".join(self.pkg_def.get("config_flags", []))
615        env = " ".join(self.pkg_def.get("config_env", []))
616        result = "mkdir builddir && cd builddir && "
617        result += f"{env} cmake {cmake_flags} {options} .. && "
618        result += "cmake --build . --target all && "
619        result += "cmake --build . --target install && "
620        result += "cd .."
621        return result
622
623    def _cmd_build_custom(self) -> str:
624        return " && ".join(self.pkg_def.get("build_steps", []))
625
626    def _cmd_build_make(self) -> str:
627        return f"make -j{proc_count} && make install"
628
629    def _cmd_build_meson(self) -> str:
630        options = " ".join(self.pkg_def.get("config_flags", []))
631        env = " ".join(self.pkg_def.get("config_env", []))
632        result = f"{env} meson setup builddir {meson_flags} {options} && "
633        result += "ninja -C builddir && ninja -C builddir install"
634        return result
635
636
637class Docker:
638    """Class to assist with Docker interactions.  All methods are static."""
639
640    @staticmethod
641    def timestamp() -> str:
642        """Generate a timestamp for today using the ISO week."""
643        today = date.today().isocalendar()
644        return f"{today[0]}-W{today[1]:02}"
645
646    @staticmethod
647    def tagname(pkgname: Optional[str], dockerfile: str) -> str:
648        """Generate a tag name for a package using a hash of the Dockerfile."""
649        result = docker_image_name
650        if pkgname:
651            result += "-" + pkgname
652
653        result += ":" + Docker.timestamp()
654        result += "-" + sha256(dockerfile.encode()).hexdigest()[0:16]
655
656        return result
657
658    @staticmethod
659    def build(pkg: str, tag: str, dockerfile: str) -> None:
660        """Build a docker image using the Dockerfile and tagging it with 'tag'."""
661
662        # If we're not forcing builds, check if it already exists and skip.
663        if not force_build:
664            if docker.image.ls(tag, "--format", '"{{.Repository}}:{{.Tag}}"'):
665                print(
666                    f"Image {tag} already exists.  Skipping.", file=sys.stderr
667                )
668                return
669
670        # Build it.
671        #   Capture the output of the 'docker build' command and send it to
672        #   stderr (prefixed with the package name).  This allows us to see
673        #   progress but not pollute stdout.  Later on we output the final
674        #   docker tag to stdout and we want to keep that pristine.
675        #
676        #   Other unusual flags:
677        #       --no-cache: Bypass the Docker cache if 'force_build'.
678        #       --force-rm: Clean up Docker processes if they fail.
679        docker.build(
680            proxy_args,
681            "--network=host",
682            "--force-rm",
683            "--no-cache=true" if force_build else "--no-cache=false",
684            "-t",
685            tag,
686            "-",
687            _in=dockerfile,
688            _out=(
689                lambda line: print(
690                    pkg + ":", line, end="", file=sys.stderr, flush=True
691                )
692            ),
693            _err_to_out=True,
694        )
695
696
697# Read a bunch of environment variables.
698docker_image_name = os.environ.get(
699    "DOCKER_IMAGE_NAME", "openbmc/ubuntu-unit-test"
700)
701force_build = os.environ.get("FORCE_DOCKER_BUILD")
702is_automated_ci_build = os.environ.get("BUILD_URL", False)
703distro = os.environ.get("DISTRO", "ubuntu:noble")
704branch = os.environ.get("BRANCH", "master")
705ubuntu_mirror = os.environ.get("UBUNTU_MIRROR")
706docker_reg = os.environ.get("DOCKER_REG", "public.ecr.aws/ubuntu")
707http_proxy = os.environ.get("http_proxy")
708
709gerrit_project = os.environ.get("GERRIT_PROJECT")
710gerrit_rev = os.environ.get("GERRIT_PATCHSET_REVISION")
711gerrit_topic = os.environ.get("GERRIT_TOPIC")
712
713# Ensure appropriate docker build output to see progress and identify
714# any issues
715os.environ["BUILDKIT_PROGRESS"] = "plain"
716
717# Set up some common variables.
718username = os.environ.get("USER", "root")
719homedir = os.environ.get("HOME", "/root")
720gid = os.getgid()
721uid = os.getuid()
722
723# Use well-known constants if user is root
724if username == "root":
725    homedir = "/root"
726    gid = 0
727    uid = 0
728
729# Special flags if setting up a deb mirror.
730mirror = ""
731if "ubuntu" in distro and ubuntu_mirror:
732    mirror = f"""
733RUN echo "deb {ubuntu_mirror} \
734        $(. /etc/os-release && echo $VERSION_CODENAME) \
735        main restricted universe multiverse" > /etc/apt/sources.list && \\
736    echo "deb {ubuntu_mirror} \
737        $(. /etc/os-release && echo $VERSION_CODENAME)-updates \
738            main restricted universe multiverse" >> /etc/apt/sources.list && \\
739    echo "deb {ubuntu_mirror} \
740        $(. /etc/os-release && echo $VERSION_CODENAME)-security \
741            main restricted universe multiverse" >> /etc/apt/sources.list && \\
742    echo "deb {ubuntu_mirror} \
743        $(. /etc/os-release && echo $VERSION_CODENAME)-proposed \
744            main restricted universe multiverse" >> /etc/apt/sources.list && \\
745    echo "deb {ubuntu_mirror} \
746        $(. /etc/os-release && echo $VERSION_CODENAME)-backports \
747            main restricted universe multiverse" >> /etc/apt/sources.list
748"""
749
750# Special flags for proxying.
751proxy_cmd = ""
752proxy_keyserver = ""
753proxy_args = []
754if http_proxy:
755    proxy_cmd = f"""
756RUN echo "[http]" >> {homedir}/.gitconfig && \
757    echo "proxy = {http_proxy}" >> {homedir}/.gitconfig
758"""
759    proxy_keyserver = f"--keyserver-options http-proxy={http_proxy}"
760
761    proxy_args.extend(
762        [
763            "--build-arg",
764            f"http_proxy={http_proxy}",
765            "--build-arg",
766            f"https_proxy={http_proxy}",
767        ]
768    )
769
770# Create base Dockerfile.
771dockerfile_base = f"""
772FROM {docker_reg}/{distro}
773
774{mirror}
775
776ENV DEBIAN_FRONTEND noninteractive
777
778ENV PYTHONPATH "/usr/local/lib/python3.10/site-packages/"
779
780# Sometimes the ubuntu key expires and we need a way to force an execution
781# of the apt-get commands for the dbgsym-keyring.  When this happens we see
782# an error like: "Release: The following signatures were invalid:"
783# Insert a bogus echo that we can change here when we get this error to force
784# the update.
785RUN echo "ubuntu keyserver rev as of 2021-04-21"
786
787# We need the keys to be imported for dbgsym repos
788# New releases have a package, older ones fall back to manual fetching
789# https://wiki.ubuntu.com/Debug%20Symbol%20Packages
790# Known issue with gpg to get keys via proxy -
791# https://bugs.launchpad.net/ubuntu/+source/gnupg2/+bug/1788190, hence using
792# curl to get keys.
793RUN apt-get update && apt-get dist-upgrade -yy && \
794    ( apt-get install -yy gpgv ubuntu-dbgsym-keyring || \
795        ( apt-get install -yy dirmngr curl && \
796          curl -sSL \
797          'https://keyserver.ubuntu.com/pks/lookup?op=get&search=0xF2EDC64DC5AEE1F6B9C621F0C8CAB6595FDFF622' \
798          | apt-key add - ))
799
800# Parse the current repo list into a debug repo list
801RUN sed -n '/^deb /s,^deb [^ ]* ,deb http://ddebs.ubuntu.com ,p' \
802        /etc/apt/sources.list >/etc/apt/sources.list.d/debug.list
803
804# Remove non-existent debug repos
805RUN sed -i '/-\\(backports\\|security\\) /d' /etc/apt/sources.list.d/debug.list
806
807RUN cat /etc/apt/sources.list.d/debug.list
808
809RUN apt-get update && apt-get dist-upgrade -yy && apt-get install -yy \
810    abi-compliance-checker \
811    abi-dumper \
812    autoconf \
813    autoconf-archive \
814    bison \
815    cmake \
816    curl \
817    dbus \
818    device-tree-compiler \
819    flex \
820    g++-13 \
821    gcc-13 \
822    git \
823    glib-2.0 \
824    gnupg \
825    iproute2 \
826    iputils-ping \
827    libaudit-dev \
828    libc6-dbg \
829    libc6-dev \
830    libcjson-dev \
831    libconfig++-dev \
832    libcryptsetup-dev \
833    libdbus-1-dev \
834    libevdev-dev \
835    libgpiod-dev \
836    libi2c-dev \
837    libjpeg-dev \
838    libjson-perl \
839    libldap2-dev \
840    libmimetic-dev \
841    libnl-3-dev \
842    libnl-genl-3-dev \
843    libpam0g-dev \
844    libpciaccess-dev \
845    libperlio-gzip-perl \
846    libpng-dev \
847    libprotobuf-dev \
848    libsnmp-dev \
849    libssl-dev \
850    libsystemd-dev \
851    libtool \
852    liburing-dev \
853    libxml2-utils \
854    libxml-simple-perl \
855    lsb-release \
856    ninja-build \
857    npm \
858    pkg-config \
859    protobuf-compiler \
860    python3 \
861    python3-dev\
862    python3-git \
863    python3-mako \
864    python3-pip \
865    python3-protobuf \
866    python3-setuptools \
867    python3-socks \
868    python3-yaml \
869    rsync \
870    shellcheck \
871    socat \
872    software-properties-common \
873    sudo \
874    systemd \
875    valgrind \
876    vim \
877    wget \
878    xxd
879
880RUN update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-13 13 \
881  --slave /usr/bin/g++ g++ /usr/bin/g++-13 \
882  --slave /usr/bin/gcov gcov /usr/bin/gcov-13 \
883  --slave /usr/bin/gcov-dump gcov-dump /usr/bin/gcov-dump-13 \
884  --slave /usr/bin/gcov-tool gcov-tool /usr/bin/gcov-tool-13
885RUN update-alternatives --remove cpp /usr/bin/cpp && \
886    update-alternatives --install /usr/bin/cpp cpp /usr/bin/cpp-13 13
887
888# Set up LLVM apt repository.
889RUN bash -c "$(wget -O - https://apt.llvm.org/llvm.sh)" 18
890
891# Install extra clang tools
892RUN apt-get install \
893        clang-18 \
894        clang-format-18 \
895        clang-tidy-18
896
897RUN update-alternatives --install /usr/bin/clang clang /usr/bin/clang-18 1000 \
898  --slave /usr/bin/clang++ clang++ /usr/bin/clang++-18 \
899  --slave /usr/bin/clang-tidy clang-tidy /usr/bin/clang-tidy-18 \
900  --slave /usr/bin/clang-format clang-format /usr/bin/clang-format-18 \
901  --slave /usr/bin/run-clang-tidy run-clang-tidy.py \
902        /usr/bin/run-clang-tidy-18 \
903  --slave /usr/bin/scan-build scan-build /usr/bin/scan-build-18
904
905"""
906
907if is_automated_ci_build:
908    dockerfile_base += f"""
909# Run an arbitrary command to pollute the docker cache regularly force us
910# to re-run `apt-get update` daily.
911RUN echo {Docker.timestamp()}
912RUN apt-get update && apt-get dist-upgrade -yy
913
914"""
915
916dockerfile_base += """
917RUN pip3 install --break-system-packages \
918        beautysh \
919        black \
920        codespell \
921        flake8 \
922        gcovr \
923        gitlint \
924        inflection \
925        isoduration \
926        isort \
927        jsonschema \
928        meson==1.3.0 \
929        requests
930
931RUN npm install -g \
932        eslint@v8.56.0 eslint-plugin-json@v3.1.0 \
933        markdownlint-cli@latest \
934        prettier@latest
935"""
936
937# Build the base and stage docker images.
938docker_base_img_name = Docker.tagname("base", dockerfile_base)
939Docker.build("base", docker_base_img_name, dockerfile_base)
940Package.generate_all()
941
942# Create the final Dockerfile.
943dockerfile = f"""
944# Build the final output image
945FROM {docker_base_img_name}
946{Package.df_all_copycmds()}
947
948# Some of our infrastructure still relies on the presence of this file
949# even though it is no longer needed to rebuild the docker environment
950# NOTE: The file is sorted to ensure the ordering is stable.
951RUN echo '{Package.depcache()}' > /tmp/depcache
952
953# Ensure the group, user, and home directory are created (or rename them if
954# they already exist).
955RUN if grep -q ":{gid}:" /etc/group ; then \
956        groupmod -n {username} $(awk -F : '{{ if ($3 == {gid}) {{ print $1 }} }}' /etc/group) ; \
957    else \
958        groupadd -f -g {gid} {username} ; \
959    fi
960RUN mkdir -p "{os.path.dirname(homedir)}"
961RUN if grep -q ":{uid}:" /etc/passwd ; then \
962        usermod -l {username} -d {homedir} -m $(awk -F : '{{ if ($3 == {uid}) {{ print $1 }} }}' /etc/passwd) ; \
963    else \
964        useradd -d {homedir} -m -u {uid} -g {gid} {username} ; \
965    fi
966RUN sed -i '1iDefaults umask=000' /etc/sudoers
967RUN echo "{username} ALL=(ALL) NOPASSWD: ALL" >>/etc/sudoers
968
969# Ensure user has ability to write to /usr/local for different tool
970# and data installs
971RUN chown -R {username}:{username} /usr/local/share
972
973# Update library cache
974RUN ldconfig
975
976{proxy_cmd}
977
978RUN /bin/bash
979"""
980
981# Do the final docker build
982docker_final_img_name = Docker.tagname(None, dockerfile)
983Docker.build("final", docker_final_img_name, dockerfile)
984
985# Print the tag of the final image.
986print(docker_final_img_name)
987