1#!/usr/bin/env python3
2#
3# Build the required docker image to run package unit tests
4#
5# Script Variables:
6#   DOCKER_IMG_NAME:  <optional, the name of the docker image to generate>
7#                     default is openbmc/ubuntu-unit-test
8#   DISTRO:           <optional, the distro to build a docker image against>
9#   FORCE_DOCKER_BUILD: <optional, a non-zero value with force all Docker
10#                     images to be rebuilt rather than reusing caches.>
11#   BUILD_URL:        <optional, used to detect running under CI context
12#                     (ex. Jenkins)>
13#   BRANCH:           <optional, branch to build from each of the openbmc/
14#                     repositories>
15#                     default is master, which will be used if input branch not
16#                     provided or not found
17#   UBUNTU_MIRROR:    <optional, the URL of a mirror of Ubuntu to override the
18#                     default ones in /etc/apt/sources.list>
19#                     default is empty, and no mirror is used.
20#   DOCKER_REG:       <optional, the URL of a docker registry to utilize
21#                     instead of our default (public.ecr.aws/ubuntu)
22#                     (ex. docker.io)
23#   http_proxy        The HTTP address of the proxy server to connect to.
24#                     Default: "", proxy is not setup if this is not set
25
26import json
27import os
28import re
29import sys
30import threading
31import urllib.request
32from datetime import date
33from hashlib import sha256
34
35# typing.Dict is used for type-hints.
36from typing import Any, Callable, Dict, Iterable, Optional  # noqa: F401
37
38from sh import docker, git, nproc  # type: ignore
39
40try:
41    # Python before 3.8 doesn't have TypedDict, so reroute to standard 'dict'.
42    from typing import TypedDict
43except Exception:
44
45    class TypedDict(dict):  # type: ignore
46        # We need to do this to eat the 'total' argument.
47        def __init_subclass__(cls, **kwargs: Any) -> None:
48            super().__init_subclass__()
49
50
51# Declare some variables used in package definitions.
52prefix = "/usr/local"
53proc_count = nproc().strip()
54
55
56class PackageDef(TypedDict, total=False):
57    """Package Definition for packages dictionary."""
58
59    # rev [optional]: Revision of package to use.
60    rev: str
61    # url [optional]: lambda function to create URL: (package, rev) -> url.
62    url: Callable[[str, str], str]
63    # depends [optional]: List of package dependencies.
64    depends: Iterable[str]
65    # build_type [required]: Build type used for package.
66    #   Currently supported: autoconf, cmake, custom, make, meson
67    build_type: str
68    # build_steps [optional]: Steps to run for 'custom' build_type.
69    build_steps: Iterable[str]
70    # config_flags [optional]: List of options to pass configuration tool.
71    config_flags: Iterable[str]
72    # config_env [optional]: List of environment variables to set for config.
73    config_env: Iterable[str]
74    # custom_post_dl [optional]: List of steps to run after download, but
75    #   before config / build / install.
76    custom_post_dl: Iterable[str]
77    # custom_post_install [optional]: List of steps to run after install.
78    custom_post_install: Iterable[str]
79
80    # __tag [private]: Generated Docker tag name for package stage.
81    __tag: str
82    # __package [private]: Package object associated with this package.
83    __package: Any  # Type is Package, but not defined yet.
84
85
86# Packages to include in image.
87packages = {
88    "boost": PackageDef(
89        rev="1.84.0",
90        url=(
91            lambda pkg, rev: f"https://github.com/boostorg/{pkg}/releases/download/{pkg}-{rev}/{pkg}-{rev}.tar.gz"
92        ),
93        build_type="custom",
94        build_steps=[
95            (
96                "./bootstrap.sh"
97                f" --prefix={prefix} --with-libraries=context,coroutine,url"
98            ),
99            "./b2",
100            f"./b2 install --prefix={prefix} valgrind=on",
101        ],
102    ),
103    "USCiLab/cereal": PackageDef(
104        rev="v1.3.2",
105        build_type="custom",
106        build_steps=[f"cp -a include/cereal/ {prefix}/include/"],
107    ),
108    "danmar/cppcheck": PackageDef(
109        rev="2.12.1",
110        build_type="cmake",
111    ),
112    "CLIUtils/CLI11": PackageDef(
113        rev="v2.3.2",
114        build_type="cmake",
115        config_flags=[
116            "-DBUILD_TESTING=OFF",
117            "-DCLI11_BUILD_DOCS=OFF",
118            "-DCLI11_BUILD_EXAMPLES=OFF",
119        ],
120    ),
121    "fmtlib/fmt": PackageDef(
122        rev="10.1.1",
123        build_type="cmake",
124        config_flags=[
125            "-DFMT_DOC=OFF",
126            "-DFMT_TEST=OFF",
127        ],
128    ),
129    "Naios/function2": PackageDef(
130        rev="4.2.4",
131        build_type="custom",
132        build_steps=[
133            f"mkdir {prefix}/include/function2",
134            f"cp include/function2/function2.hpp {prefix}/include/function2/",
135        ],
136    ),
137    "google/googletest": PackageDef(
138        rev="v1.15.2",
139        build_type="cmake",
140        config_env=["CXXFLAGS=-std=c++20"],
141        config_flags=["-DTHREADS_PREFER_PTHREAD_FLAG=ON"],
142    ),
143    "nghttp2/nghttp2": PackageDef(
144        rev="v1.61.0",
145        build_type="cmake",
146        config_env=["CXXFLAGS=-std=c++20"],
147        config_flags=[
148            "-DENABLE_LIB_ONLY=ON",
149            "-DENABLE_STATIC_LIB=ON",
150        ],
151    ),
152    "nlohmann/json": PackageDef(
153        rev="v3.11.2",
154        build_type="cmake",
155        config_flags=["-DJSON_BuildTests=OFF"],
156        custom_post_install=[
157            (
158                f"ln -s {prefix}/include/nlohmann/json.hpp"
159                f" {prefix}/include/json.hpp"
160            ),
161        ],
162    ),
163    "json-c/json-c": PackageDef(
164        rev="json-c-0.17-20230812",
165        build_type="cmake",
166    ),
167    "LibVNC/libvncserver": PackageDef(
168        rev="LibVNCServer-0.9.14",
169        build_type="cmake",
170    ),
171    "leethomason/tinyxml2": PackageDef(
172        rev="9.0.0",
173        build_type="cmake",
174    ),
175    "tristanpenman/valijson": PackageDef(
176        rev="v1.0.1",
177        build_type="cmake",
178        config_flags=[
179            "-Dvalijson_BUILD_TESTS=0",
180            "-Dvalijson_INSTALL_HEADERS=1",
181        ],
182    ),
183    "open-power/pdbg": PackageDef(build_type="autoconf"),
184    "openbmc/gpioplus": PackageDef(
185        build_type="meson",
186        config_flags=[
187            "-Dexamples=false",
188            "-Dtests=disabled",
189        ],
190    ),
191    "openbmc/phosphor-dbus-interfaces": PackageDef(
192        depends=["openbmc/sdbusplus"],
193        build_type="meson",
194        config_flags=["-Dgenerate_md=false"],
195    ),
196    "openbmc/phosphor-logging": PackageDef(
197        depends=[
198            "USCiLab/cereal",
199            "openbmc/phosphor-dbus-interfaces",
200            "openbmc/sdbusplus",
201            "openbmc/sdeventplus",
202        ],
203        build_type="meson",
204        config_flags=[
205            "-Dlibonly=true",
206            "-Dtests=disabled",
207            f"-Dyamldir={prefix}/share/phosphor-dbus-yaml/yaml",
208        ],
209    ),
210    "openbmc/phosphor-objmgr": PackageDef(
211        depends=[
212            "CLIUtils/CLI11",
213            "boost",
214            "leethomason/tinyxml2",
215            "openbmc/phosphor-dbus-interfaces",
216            "openbmc/phosphor-logging",
217            "openbmc/sdbusplus",
218        ],
219        build_type="meson",
220        config_flags=[
221            "-Dtests=disabled",
222        ],
223    ),
224    "openbmc/libpeci": PackageDef(
225        build_type="meson",
226        config_flags=[
227            "-Draw-peci=disabled",
228        ],
229    ),
230    "openbmc/libpldm": PackageDef(
231        build_type="meson",
232        config_flags=[
233            "-Dabi=deprecated,stable",
234            "-Dtests=disabled",
235        ],
236    ),
237    "openbmc/sdbusplus": PackageDef(
238        depends=[
239            "nlohmann/json",
240        ],
241        build_type="meson",
242        custom_post_dl=[
243            "cd tools",
244            f"./setup.py install --root=/ --prefix={prefix}",
245            "cd ..",
246        ],
247        config_flags=[
248            "-Dexamples=disabled",
249            "-Dtests=disabled",
250        ],
251    ),
252    "openbmc/sdeventplus": PackageDef(
253        depends=[
254            "openbmc/stdplus",
255        ],
256        build_type="meson",
257        config_flags=[
258            "-Dexamples=false",
259            "-Dtests=disabled",
260        ],
261    ),
262    "openbmc/stdplus": PackageDef(
263        depends=[
264            "fmtlib/fmt",
265            "google/googletest",
266            "Naios/function2",
267        ],
268        build_type="meson",
269        config_flags=[
270            "-Dexamples=false",
271            "-Dtests=disabled",
272            "-Dgtest=enabled",
273        ],
274    ),
275}  # type: Dict[str, PackageDef]
276
277# Define common flags used for builds
278configure_flags = " ".join(
279    [
280        f"--prefix={prefix}",
281    ]
282)
283cmake_flags = " ".join(
284    [
285        "-DBUILD_SHARED_LIBS=ON",
286        "-DCMAKE_BUILD_TYPE=RelWithDebInfo",
287        f"-DCMAKE_INSTALL_PREFIX:PATH={prefix}",
288        "-GNinja",
289        "-DCMAKE_MAKE_PROGRAM=ninja",
290    ]
291)
292meson_flags = " ".join(
293    [
294        "--wrap-mode=nodownload",
295        f"-Dprefix={prefix}",
296    ]
297)
298
299
300class Package(threading.Thread):
301    """Class used to build the Docker stages for each package.
302
303    Generally, this class should not be instantiated directly but through
304    Package.generate_all().
305    """
306
307    # Copy the packages dictionary.
308    packages = packages.copy()
309
310    # Lock used for thread-safety.
311    lock = threading.Lock()
312
313    def __init__(self, pkg: str):
314        """pkg - The name of this package (ex. foo/bar )"""
315        super(Package, self).__init__()
316
317        self.package = pkg
318        self.exception = None  # type: Optional[Exception]
319
320        # Reference to this package's
321        self.pkg_def = Package.packages[pkg]
322        self.pkg_def["__package"] = self
323
324    def run(self) -> None:
325        """Thread 'run' function.  Builds the Docker stage."""
326
327        # In case this package has no rev, fetch it from Github.
328        self._update_rev()
329
330        # Find all the Package objects that this package depends on.
331        #   This section is locked because we are looking into another
332        #   package's PackageDef dict, which could be being modified.
333        Package.lock.acquire()
334        deps: Iterable[Package] = [
335            Package.packages[deppkg]["__package"]
336            for deppkg in self.pkg_def.get("depends", [])
337        ]
338        Package.lock.release()
339
340        # Wait until all the depends finish building.  We need them complete
341        # for the "COPY" commands.
342        for deppkg in deps:
343            deppkg.join()
344
345        # Generate this package's Dockerfile.
346        dockerfile = f"""
347FROM {docker_base_img_name}
348{self._df_copycmds()}
349{self._df_build()}
350"""
351
352        # Generate the resulting tag name and save it to the PackageDef.
353        #   This section is locked because we are modifying the PackageDef,
354        #   which can be accessed by other threads.
355        Package.lock.acquire()
356        tag = Docker.tagname(self._stagename(), dockerfile)
357        self.pkg_def["__tag"] = tag
358        Package.lock.release()
359
360        # Do the build / save any exceptions.
361        try:
362            Docker.build(self.package, tag, dockerfile)
363        except Exception as e:
364            self.exception = e
365
366    @classmethod
367    def generate_all(cls) -> None:
368        """Ensure a Docker stage is created for all defined packages.
369
370        These are done in parallel but with appropriate blocking per
371        package 'depends' specifications.
372        """
373
374        # Create a Package for each defined package.
375        pkg_threads = [Package(p) for p in cls.packages.keys()]
376
377        # Start building them all.
378        #   This section is locked because threads depend on each other,
379        #   based on the packages, and they cannot 'join' on a thread
380        #   which is not yet started.  Adding a lock here allows all the
381        #   threads to start before they 'join' their dependencies.
382        Package.lock.acquire()
383        for t in pkg_threads:
384            t.start()
385        Package.lock.release()
386
387        # Wait for completion.
388        for t in pkg_threads:
389            t.join()
390            # Check if the thread saved off its own exception.
391            if t.exception:
392                print(f"Package {t.package} failed!", file=sys.stderr)
393                raise t.exception
394
395    @staticmethod
396    def df_all_copycmds() -> str:
397        """Formulate the Dockerfile snippet necessary to copy all packages
398        into the final image.
399        """
400        return Package.df_copycmds_set(Package.packages.keys())
401
402    @classmethod
403    def depcache(cls) -> str:
404        """Create the contents of the '/tmp/depcache'.
405        This file is a comma-separated list of "<pkg>:<rev>".
406        """
407
408        # This needs to be sorted for consistency.
409        depcache = ""
410        for pkg in sorted(cls.packages.keys()):
411            depcache += "%s:%s," % (pkg, cls.packages[pkg]["rev"])
412        return depcache
413
414    def _check_gerrit_topic(self) -> bool:
415        if not gerrit_topic:
416            return False
417        if not self.package.startswith("openbmc/"):
418            return False
419        if gerrit_project == self.package and gerrit_rev:
420            return False
421
422        try:
423            commits = json.loads(
424                urllib.request.urlopen(
425                    f"https://gerrit.openbmc.org/changes/?q=status:open+project:{self.package}+topic:{gerrit_topic}"
426                )
427                .read()
428                .splitlines()[-1]
429            )
430
431            if len(commits) == 0:
432                return False
433            if len(commits) > 1:
434                print(
435                    f"{self.package} has more than 1 commit under {gerrit_topic}; using lastest upstream: {len(commits)}",
436                    file=sys.stderr,
437                )
438                return False
439
440            change_id = commits[0]["id"]
441
442            commit = json.loads(
443                urllib.request.urlopen(
444                    f"https://gerrit.openbmc.org/changes/{change_id}/revisions/current/commit"
445                )
446                .read()
447                .splitlines()[-1]
448            )["commit"]
449
450            print(
451                f"Using {commit} from {gerrit_topic} for {self.package}",
452                file=sys.stderr,
453            )
454            self.pkg_def["rev"] = commit
455            return True
456
457        except urllib.error.HTTPError as e:
458            print(
459                f"Error loading topic {gerrit_topic} for {self.package}: ",
460                e,
461                file=sys.stderr,
462            )
463            return False
464
465    def _update_rev(self) -> None:
466        """Look up the HEAD for missing a static rev."""
467
468        if "rev" in self.pkg_def:
469            return
470
471        if self._check_gerrit_topic():
472            return
473
474        # Check if Jenkins/Gerrit gave us a revision and use it.
475        if gerrit_project == self.package and gerrit_rev:
476            print(
477                f"Found Gerrit revision for {self.package}: {gerrit_rev}",
478                file=sys.stderr,
479            )
480            self.pkg_def["rev"] = gerrit_rev
481            return
482
483        # Ask Github for all the branches.
484        lookup = git(
485            "ls-remote", "--heads", f"https://github.com/{self.package}"
486        )
487
488        # Find the branch matching {branch} (or fallback to master).
489        #   This section is locked because we are modifying the PackageDef.
490        Package.lock.acquire()
491        for line in lookup.split("\n"):
492            if re.fullmatch(f".*{branch}$", line.strip()):
493                self.pkg_def["rev"] = line.split()[0]
494                break
495            elif (
496                "refs/heads/master" in line or "refs/heads/main" in line
497            ) and "rev" not in self.pkg_def:
498                self.pkg_def["rev"] = line.split()[0]
499        Package.lock.release()
500
501    def _stagename(self) -> str:
502        """Create a name for the Docker stage associated with this pkg."""
503        return self.package.replace("/", "-").lower()
504
505    def _url(self) -> str:
506        """Get the URL for this package."""
507        rev = self.pkg_def["rev"]
508
509        # If the lambda exists, call it.
510        if "url" in self.pkg_def:
511            return self.pkg_def["url"](self.package, rev)
512
513        # Default to the github archive URL.
514        return f"https://github.com/{self.package}/archive/{rev}.tar.gz"
515
516    def _cmd_download(self) -> str:
517        """Formulate the command necessary to download and unpack to source."""
518
519        url = self._url()
520        if ".tar." not in url:
521            raise NotImplementedError(
522                f"Unhandled download type for {self.package}: {url}"
523            )
524
525        cmd = f"curl -L {url} | tar -x"
526
527        if url.endswith(".bz2"):
528            cmd += "j"
529        elif url.endswith(".gz"):
530            cmd += "z"
531        else:
532            raise NotImplementedError(
533                f"Unknown tar flags needed for {self.package}: {url}"
534            )
535
536        return cmd
537
538    def _cmd_cd_srcdir(self) -> str:
539        """Formulate the command necessary to 'cd' into the source dir."""
540        return f"cd {self.package.split('/')[-1]}*"
541
542    def _df_copycmds(self) -> str:
543        """Formulate the dockerfile snippet necessary to COPY all depends."""
544
545        if "depends" not in self.pkg_def:
546            return ""
547        return Package.df_copycmds_set(self.pkg_def["depends"])
548
549    @staticmethod
550    def df_copycmds_set(pkgs: Iterable[str]) -> str:
551        """Formulate the Dockerfile snippet necessary to COPY a set of
552        packages into a Docker stage.
553        """
554
555        copy_cmds = ""
556
557        # Sort the packages for consistency.
558        for p in sorted(pkgs):
559            tag = Package.packages[p]["__tag"]
560            copy_cmds += f"COPY --from={tag} {prefix} {prefix}\n"
561            # Workaround for upstream docker bug and multiple COPY cmds
562            # https://github.com/moby/moby/issues/37965
563            copy_cmds += "RUN true\n"
564
565        return copy_cmds
566
567    def _df_build(self) -> str:
568        """Formulate the Dockerfile snippet necessary to download, build, and
569        install a package into a Docker stage.
570        """
571
572        # Download and extract source.
573        result = f"RUN {self._cmd_download()} && {self._cmd_cd_srcdir()} && "
574
575        # Handle 'custom_post_dl' commands.
576        custom_post_dl = self.pkg_def.get("custom_post_dl")
577        if custom_post_dl:
578            result += " && ".join(custom_post_dl) + " && "
579
580        # Build and install package based on 'build_type'.
581        build_type = self.pkg_def["build_type"]
582        if build_type == "autoconf":
583            result += self._cmd_build_autoconf()
584        elif build_type == "cmake":
585            result += self._cmd_build_cmake()
586        elif build_type == "custom":
587            result += self._cmd_build_custom()
588        elif build_type == "make":
589            result += self._cmd_build_make()
590        elif build_type == "meson":
591            result += self._cmd_build_meson()
592        else:
593            raise NotImplementedError(
594                f"Unhandled build type for {self.package}: {build_type}"
595            )
596
597        # Handle 'custom_post_install' commands.
598        custom_post_install = self.pkg_def.get("custom_post_install")
599        if custom_post_install:
600            result += " && " + " && ".join(custom_post_install)
601
602        return result
603
604    def _cmd_build_autoconf(self) -> str:
605        options = " ".join(self.pkg_def.get("config_flags", []))
606        env = " ".join(self.pkg_def.get("config_env", []))
607        result = "./bootstrap.sh && "
608        result += f"{env} ./configure {configure_flags} {options} && "
609        result += f"make -j{proc_count} && make install"
610        return result
611
612    def _cmd_build_cmake(self) -> str:
613        options = " ".join(self.pkg_def.get("config_flags", []))
614        env = " ".join(self.pkg_def.get("config_env", []))
615        result = "mkdir builddir && cd builddir && "
616        result += f"{env} cmake {cmake_flags} {options} .. && "
617        result += "cmake --build . --target all && "
618        result += "cmake --build . --target install && "
619        result += "cd .."
620        return result
621
622    def _cmd_build_custom(self) -> str:
623        return " && ".join(self.pkg_def.get("build_steps", []))
624
625    def _cmd_build_make(self) -> str:
626        return f"make -j{proc_count} && make install"
627
628    def _cmd_build_meson(self) -> str:
629        options = " ".join(self.pkg_def.get("config_flags", []))
630        env = " ".join(self.pkg_def.get("config_env", []))
631        result = f"{env} meson setup builddir {meson_flags} {options} && "
632        result += "ninja -C builddir && ninja -C builddir install"
633        return result
634
635
636class Docker:
637    """Class to assist with Docker interactions.  All methods are static."""
638
639    @staticmethod
640    def timestamp() -> str:
641        """Generate a timestamp for today using the ISO week."""
642        today = date.today().isocalendar()
643        return f"{today[0]}-W{today[1]:02}"
644
645    @staticmethod
646    def tagname(pkgname: Optional[str], dockerfile: str) -> str:
647        """Generate a tag name for a package using a hash of the Dockerfile."""
648        result = docker_image_name
649        if pkgname:
650            result += "-" + pkgname
651
652        result += ":" + Docker.timestamp()
653        result += "-" + sha256(dockerfile.encode()).hexdigest()[0:16]
654
655        return result
656
657    @staticmethod
658    def build(pkg: str, tag: str, dockerfile: str) -> None:
659        """Build a docker image using the Dockerfile and tagging it with 'tag'."""
660
661        # If we're not forcing builds, check if it already exists and skip.
662        if not force_build:
663            if docker.image.ls(tag, "--format", '"{{.Repository}}:{{.Tag}}"'):
664                print(
665                    f"Image {tag} already exists.  Skipping.", file=sys.stderr
666                )
667                return
668
669        # Build it.
670        #   Capture the output of the 'docker build' command and send it to
671        #   stderr (prefixed with the package name).  This allows us to see
672        #   progress but not pollute stdout.  Later on we output the final
673        #   docker tag to stdout and we want to keep that pristine.
674        #
675        #   Other unusual flags:
676        #       --no-cache: Bypass the Docker cache if 'force_build'.
677        #       --force-rm: Clean up Docker processes if they fail.
678        docker.build(
679            proxy_args,
680            "--network=host",
681            "--force-rm",
682            "--no-cache=true" if force_build else "--no-cache=false",
683            "-t",
684            tag,
685            "-",
686            _in=dockerfile,
687            _out=(
688                lambda line: print(
689                    pkg + ":", line, end="", file=sys.stderr, flush=True
690                )
691            ),
692            _err_to_out=True,
693        )
694
695
696# Read a bunch of environment variables.
697docker_image_name = os.environ.get(
698    "DOCKER_IMAGE_NAME", "openbmc/ubuntu-unit-test"
699)
700force_build = os.environ.get("FORCE_DOCKER_BUILD")
701is_automated_ci_build = os.environ.get("BUILD_URL", False)
702distro = os.environ.get("DISTRO", "ubuntu:noble")
703branch = os.environ.get("BRANCH", "master")
704ubuntu_mirror = os.environ.get("UBUNTU_MIRROR")
705docker_reg = os.environ.get("DOCKER_REG", "public.ecr.aws/ubuntu")
706http_proxy = os.environ.get("http_proxy")
707
708gerrit_project = os.environ.get("GERRIT_PROJECT")
709gerrit_rev = os.environ.get("GERRIT_PATCHSET_REVISION")
710gerrit_topic = os.environ.get("GERRIT_TOPIC")
711
712# Ensure appropriate docker build output to see progress and identify
713# any issues
714os.environ["BUILDKIT_PROGRESS"] = "plain"
715
716# Set up some common variables.
717username = os.environ.get("USER", "root")
718homedir = os.environ.get("HOME", "/root")
719gid = os.getgid()
720uid = os.getuid()
721
722# Use well-known constants if user is root
723if username == "root":
724    homedir = "/root"
725    gid = 0
726    uid = 0
727
728# Special flags if setting up a deb mirror.
729mirror = ""
730if "ubuntu" in distro and ubuntu_mirror:
731    mirror = f"""
732RUN echo "deb {ubuntu_mirror} \
733        $(. /etc/os-release && echo $VERSION_CODENAME) \
734        main restricted universe multiverse" > /etc/apt/sources.list && \\
735    echo "deb {ubuntu_mirror} \
736        $(. /etc/os-release && echo $VERSION_CODENAME)-updates \
737            main restricted universe multiverse" >> /etc/apt/sources.list && \\
738    echo "deb {ubuntu_mirror} \
739        $(. /etc/os-release && echo $VERSION_CODENAME)-security \
740            main restricted universe multiverse" >> /etc/apt/sources.list && \\
741    echo "deb {ubuntu_mirror} \
742        $(. /etc/os-release && echo $VERSION_CODENAME)-proposed \
743            main restricted universe multiverse" >> /etc/apt/sources.list && \\
744    echo "deb {ubuntu_mirror} \
745        $(. /etc/os-release && echo $VERSION_CODENAME)-backports \
746            main restricted universe multiverse" >> /etc/apt/sources.list
747"""
748
749# Special flags for proxying.
750proxy_cmd = ""
751proxy_keyserver = ""
752proxy_args = []
753if http_proxy:
754    proxy_cmd = f"""
755RUN echo "[http]" >> {homedir}/.gitconfig && \
756    echo "proxy = {http_proxy}" >> {homedir}/.gitconfig
757"""
758    proxy_keyserver = f"--keyserver-options http-proxy={http_proxy}"
759
760    proxy_args.extend(
761        [
762            "--build-arg",
763            f"http_proxy={http_proxy}",
764            "--build-arg",
765            f"https_proxy={http_proxy}",
766        ]
767    )
768
769# Create base Dockerfile.
770dockerfile_base = f"""
771FROM {docker_reg}/{distro}
772
773{mirror}
774
775ENV DEBIAN_FRONTEND noninteractive
776
777ENV PYTHONPATH "/usr/local/lib/python3.10/site-packages/"
778
779# Sometimes the ubuntu key expires and we need a way to force an execution
780# of the apt-get commands for the dbgsym-keyring.  When this happens we see
781# an error like: "Release: The following signatures were invalid:"
782# Insert a bogus echo that we can change here when we get this error to force
783# the update.
784RUN echo "ubuntu keyserver rev as of 2021-04-21"
785
786# We need the keys to be imported for dbgsym repos
787# New releases have a package, older ones fall back to manual fetching
788# https://wiki.ubuntu.com/Debug%20Symbol%20Packages
789# Known issue with gpg to get keys via proxy -
790# https://bugs.launchpad.net/ubuntu/+source/gnupg2/+bug/1788190, hence using
791# curl to get keys.
792RUN apt-get update && apt-get dist-upgrade -yy && \
793    ( apt-get install -yy gpgv ubuntu-dbgsym-keyring || \
794        ( apt-get install -yy dirmngr curl && \
795          curl -sSL \
796          'https://keyserver.ubuntu.com/pks/lookup?op=get&search=0xF2EDC64DC5AEE1F6B9C621F0C8CAB6595FDFF622' \
797          | apt-key add - ))
798
799# Parse the current repo list into a debug repo list
800RUN sed -n '/^deb /s,^deb [^ ]* ,deb http://ddebs.ubuntu.com ,p' \
801        /etc/apt/sources.list >/etc/apt/sources.list.d/debug.list
802
803# Remove non-existent debug repos
804RUN sed -i '/-\\(backports\\|security\\) /d' /etc/apt/sources.list.d/debug.list
805
806RUN cat /etc/apt/sources.list.d/debug.list
807
808RUN apt-get update && apt-get dist-upgrade -yy && apt-get install -yy \
809    abi-compliance-checker \
810    abi-dumper \
811    autoconf \
812    autoconf-archive \
813    bison \
814    cmake \
815    curl \
816    dbus \
817    device-tree-compiler \
818    flex \
819    g++-13 \
820    gcc-13 \
821    git \
822    glib-2.0 \
823    gnupg \
824    iproute2 \
825    iputils-ping \
826    libaudit-dev \
827    libc6-dbg \
828    libc6-dev \
829    libcjson-dev \
830    libconfig++-dev \
831    libcryptsetup-dev \
832    libdbus-1-dev \
833    libevdev-dev \
834    libgpiod-dev \
835    libi2c-dev \
836    libjpeg-dev \
837    libjson-perl \
838    libldap2-dev \
839    libmimetic-dev \
840    libnl-3-dev \
841    libnl-genl-3-dev \
842    libpam0g-dev \
843    libpciaccess-dev \
844    libperlio-gzip-perl \
845    libpng-dev \
846    libprotobuf-dev \
847    libsnmp-dev \
848    libssl-dev \
849    libsystemd-dev \
850    libtool \
851    liburing-dev \
852    libxml2-utils \
853    libxml-simple-perl \
854    lsb-release \
855    ninja-build \
856    npm \
857    pkg-config \
858    protobuf-compiler \
859    python3 \
860    python3-dev\
861    python3-git \
862    python3-mako \
863    python3-pip \
864    python3-protobuf \
865    python3-setuptools \
866    python3-socks \
867    python3-yaml \
868    rsync \
869    shellcheck \
870    socat \
871    software-properties-common \
872    sudo \
873    systemd \
874    valgrind \
875    vim \
876    wget \
877    xxd
878
879RUN update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-13 13 \
880  --slave /usr/bin/g++ g++ /usr/bin/g++-13 \
881  --slave /usr/bin/gcov gcov /usr/bin/gcov-13 \
882  --slave /usr/bin/gcov-dump gcov-dump /usr/bin/gcov-dump-13 \
883  --slave /usr/bin/gcov-tool gcov-tool /usr/bin/gcov-tool-13
884RUN update-alternatives --remove cpp /usr/bin/cpp && \
885    update-alternatives --install /usr/bin/cpp cpp /usr/bin/cpp-13 13
886
887# Set up LLVM apt repository.
888RUN bash -c "$(wget -O - https://apt.llvm.org/llvm.sh)" 18
889
890# Install extra clang tools
891RUN apt-get install \
892        clang-18 \
893        clang-format-18 \
894        clang-tidy-18
895
896RUN update-alternatives --install /usr/bin/clang clang /usr/bin/clang-18 1000 \
897  --slave /usr/bin/clang++ clang++ /usr/bin/clang++-18 \
898  --slave /usr/bin/clang-tidy clang-tidy /usr/bin/clang-tidy-18 \
899  --slave /usr/bin/clang-format clang-format /usr/bin/clang-format-18 \
900  --slave /usr/bin/run-clang-tidy run-clang-tidy.py \
901        /usr/bin/run-clang-tidy-18 \
902  --slave /usr/bin/scan-build scan-build /usr/bin/scan-build-18
903
904"""
905
906if is_automated_ci_build:
907    dockerfile_base += f"""
908# Run an arbitrary command to pollute the docker cache regularly force us
909# to re-run `apt-get update` daily.
910RUN echo {Docker.timestamp()}
911RUN apt-get update && apt-get dist-upgrade -yy
912
913"""
914
915dockerfile_base += """
916RUN pip3 install --break-system-packages \
917        beautysh \
918        black \
919        codespell \
920        flake8 \
921        gcovr \
922        gitlint \
923        inflection \
924        isort \
925        jsonschema \
926        meson==1.3.0 \
927        requests
928
929RUN npm install -g \
930        eslint@v8.56.0 eslint-plugin-json@v3.1.0 \
931        markdownlint-cli@latest \
932        prettier@latest
933"""
934
935# Build the base and stage docker images.
936docker_base_img_name = Docker.tagname("base", dockerfile_base)
937Docker.build("base", docker_base_img_name, dockerfile_base)
938Package.generate_all()
939
940# Create the final Dockerfile.
941dockerfile = f"""
942# Build the final output image
943FROM {docker_base_img_name}
944{Package.df_all_copycmds()}
945
946# Some of our infrastructure still relies on the presence of this file
947# even though it is no longer needed to rebuild the docker environment
948# NOTE: The file is sorted to ensure the ordering is stable.
949RUN echo '{Package.depcache()}' > /tmp/depcache
950
951# Ensure the group, user, and home directory are created (or rename them if
952# they already exist).
953RUN if grep -q ":{gid}:" /etc/group ; then \
954        groupmod -n {username} $(awk -F : '{{ if ($3 == {gid}) {{ print $1 }} }}' /etc/group) ; \
955    else \
956        groupadd -f -g {gid} {username} ; \
957    fi
958RUN mkdir -p "{os.path.dirname(homedir)}"
959RUN if grep -q ":{uid}:" /etc/passwd ; then \
960        usermod -l {username} -d {homedir} -m $(awk -F : '{{ if ($3 == {uid}) {{ print $1 }} }}' /etc/passwd) ; \
961    else \
962        useradd -d {homedir} -m -u {uid} -g {gid} {username} ; \
963    fi
964RUN sed -i '1iDefaults umask=000' /etc/sudoers
965RUN echo "{username} ALL=(ALL) NOPASSWD: ALL" >>/etc/sudoers
966
967# Ensure user has ability to write to /usr/local for different tool
968# and data installs
969RUN chown -R {username}:{username} /usr/local/share
970
971# Update library cache
972RUN ldconfig
973
974{proxy_cmd}
975
976RUN /bin/bash
977"""
978
979# Do the final docker build
980docker_final_img_name = Docker.tagname(None, dockerfile)
981Docker.build("final", docker_final_img_name, dockerfile)
982
983# Print the tag of the final image.
984print(docker_final_img_name)
985