xref: /openbmc/openbmc-build-scripts/scripts/build-unit-test-docker (revision 64fb4dba1715e43bd656126d75dfc870c61ca089)
1#!/usr/bin/env python3
2#
3# Build the required docker image to run package unit tests
4#
5# Script Variables:
6#   DOCKER_IMG_NAME:  <optional, the name of the docker image to generate>
7#                     default is openbmc/ubuntu-unit-test
8#   DISTRO:           <optional, the distro to build a docker image against>
9#   FORCE_DOCKER_BUILD: <optional, a non-zero value with force all Docker
10#                     images to be rebuilt rather than reusing caches.>
11#   BUILD_URL:        <optional, used to detect running under CI context
12#                     (ex. Jenkins)>
13#   BRANCH:           <optional, branch to build from each of the openbmc/
14#                     repositories>
15#                     default is master, which will be used if input branch not
16#                     provided or not found
17#   UBUNTU_MIRROR:    <optional, the URL of a mirror of Ubuntu to override the
18#                     default ones in /etc/apt/sources.list>
19#                     default is empty, and no mirror is used.
20#   http_proxy        The HTTP address of the proxy server to connect to.
21#                     Default: "", proxy is not setup if this is not set
22
23import json
24import os
25import re
26import sys
27import threading
28import urllib.request
29from datetime import date
30from hashlib import sha256
31
32# typing.Dict is used for type-hints.
33from typing import Any, Callable, Dict, Iterable, Optional  # noqa: F401
34
35from sh import docker, git, nproc  # type: ignore
36
37try:
38    # Python before 3.8 doesn't have TypedDict, so reroute to standard 'dict'.
39    from typing import TypedDict
40except Exception:
41
42    class TypedDict(dict):  # type: ignore
43        # We need to do this to eat the 'total' argument.
44        def __init_subclass__(cls, **kwargs: Any) -> None:
45            super().__init_subclass__()
46
47
48# Declare some variables used in package definitions.
49prefix = "/usr/local"
50proc_count = nproc().strip()
51
52
53class PackageDef(TypedDict, total=False):
54    """Package Definition for packages dictionary."""
55
56    # rev [optional]: Revision of package to use.
57    rev: str
58    # url [optional]: lambda function to create URL: (package, rev) -> url.
59    url: Callable[[str, str], str]
60    # depends [optional]: List of package dependencies.
61    depends: Iterable[str]
62    # build_type [required]: Build type used for package.
63    #   Currently supported: autoconf, cmake, custom, make, meson
64    build_type: str
65    # build_steps [optional]: Steps to run for 'custom' build_type.
66    build_steps: Iterable[str]
67    # config_flags [optional]: List of options to pass configuration tool.
68    config_flags: Iterable[str]
69    # config_env [optional]: List of environment variables to set for config.
70    config_env: Iterable[str]
71    # custom_post_dl [optional]: List of steps to run after download, but
72    #   before config / build / install.
73    custom_post_dl: Iterable[str]
74    # custom_post_install [optional]: List of steps to run after install.
75    custom_post_install: Iterable[str]
76
77    # __tag [private]: Generated Docker tag name for package stage.
78    __tag: str
79    # __package [private]: Package object associated with this package.
80    __package: Any  # Type is Package, but not defined yet.
81
82
83# Packages to include in image.
84packages = {
85    "boost": PackageDef(
86        rev="1.84.0",
87        url=(
88            lambda pkg, rev: f"https://github.com/boostorg/{pkg}/releases/download/{pkg}-{rev}/{pkg}-{rev}.tar.gz"
89        ),
90        build_type="custom",
91        build_steps=[
92            (
93                "./bootstrap.sh"
94                f" --prefix={prefix} --with-libraries=context,coroutine,url"
95            ),
96            "./b2",
97            f"./b2 install --prefix={prefix} valgrind=on",
98        ],
99    ),
100    "USCiLab/cereal": PackageDef(
101        rev="v1.3.2",
102        build_type="custom",
103        build_steps=[f"cp -a include/cereal/ {prefix}/include/"],
104    ),
105    "danmar/cppcheck": PackageDef(
106        rev="2.12.1",
107        build_type="cmake",
108    ),
109    "CLIUtils/CLI11": PackageDef(
110        rev="v2.3.2",
111        build_type="cmake",
112        config_flags=[
113            "-DBUILD_TESTING=OFF",
114            "-DCLI11_BUILD_DOCS=OFF",
115            "-DCLI11_BUILD_EXAMPLES=OFF",
116        ],
117    ),
118    "fmtlib/fmt": PackageDef(
119        rev="10.1.1",
120        build_type="cmake",
121        config_flags=[
122            "-DFMT_DOC=OFF",
123            "-DFMT_TEST=OFF",
124        ],
125    ),
126    "Naios/function2": PackageDef(
127        rev="4.2.4",
128        build_type="custom",
129        build_steps=[
130            f"mkdir {prefix}/include/function2",
131            f"cp include/function2/function2.hpp {prefix}/include/function2/",
132        ],
133    ),
134    "google/googletest": PackageDef(
135        rev="v1.15.2",
136        build_type="cmake",
137        config_env=["CXXFLAGS=-std=c++20"],
138        config_flags=["-DTHREADS_PREFER_PTHREAD_FLAG=ON"],
139    ),
140    "nghttp2/nghttp2": PackageDef(
141        rev="v1.61.0",
142        build_type="cmake",
143        config_env=["CXXFLAGS=-std=c++20"],
144        config_flags=[
145            "-DENABLE_LIB_ONLY=ON",
146            "-DENABLE_STATIC_LIB=ON",
147        ],
148    ),
149    "nlohmann/json": PackageDef(
150        rev="v3.11.2",
151        build_type="cmake",
152        config_flags=["-DJSON_BuildTests=OFF"],
153        custom_post_install=[
154            (
155                f"ln -s {prefix}/include/nlohmann/json.hpp"
156                f" {prefix}/include/json.hpp"
157            ),
158        ],
159    ),
160    "json-c/json-c": PackageDef(
161        rev="json-c-0.17-20230812",
162        build_type="cmake",
163    ),
164    "LibVNC/libvncserver": PackageDef(
165        rev="LibVNCServer-0.9.14",
166        build_type="cmake",
167    ),
168    "leethomason/tinyxml2": PackageDef(
169        rev="9.0.0",
170        build_type="cmake",
171    ),
172    "tristanpenman/valijson": PackageDef(
173        rev="v1.0.1",
174        build_type="cmake",
175        config_flags=[
176            "-Dvalijson_BUILD_TESTS=0",
177            "-Dvalijson_INSTALL_HEADERS=1",
178        ],
179    ),
180    "open-power/pdbg": PackageDef(build_type="autoconf"),
181    "openbmc/gpioplus": PackageDef(
182        build_type="meson",
183        config_flags=[
184            "-Dexamples=false",
185            "-Dtests=disabled",
186        ],
187    ),
188    "openbmc/phosphor-dbus-interfaces": PackageDef(
189        depends=["openbmc/sdbusplus"],
190        build_type="meson",
191        config_flags=["-Dgenerate_md=false"],
192    ),
193    "openbmc/phosphor-logging": PackageDef(
194        depends=[
195            "USCiLab/cereal",
196            "openbmc/phosphor-dbus-interfaces",
197            "openbmc/sdbusplus",
198            "openbmc/sdeventplus",
199        ],
200        build_type="meson",
201        config_flags=[
202            "-Dlibonly=true",
203            "-Dtests=disabled",
204            f"-Dyamldir={prefix}/share/phosphor-dbus-yaml/yaml",
205        ],
206    ),
207    "openbmc/phosphor-objmgr": PackageDef(
208        depends=[
209            "CLIUtils/CLI11",
210            "boost",
211            "leethomason/tinyxml2",
212            "openbmc/phosphor-dbus-interfaces",
213            "openbmc/phosphor-logging",
214            "openbmc/sdbusplus",
215        ],
216        build_type="meson",
217        config_flags=[
218            "-Dtests=disabled",
219        ],
220    ),
221    "openbmc/libpeci": PackageDef(
222        build_type="meson",
223        config_flags=[
224            "-Draw-peci=disabled",
225        ],
226    ),
227    "openbmc/libpldm": PackageDef(
228        build_type="meson",
229        config_flags=[
230            "-Dabi=deprecated,stable",
231            "-Doem-ibm=enabled",
232            "-Dtests=disabled",
233        ],
234    ),
235    "openbmc/sdbusplus": PackageDef(
236        depends=[
237            "nlohmann/json",
238        ],
239        build_type="meson",
240        custom_post_dl=[
241            "cd tools",
242            f"./setup.py install --root=/ --prefix={prefix}",
243            "cd ..",
244        ],
245        config_flags=[
246            "-Dexamples=disabled",
247            "-Dtests=disabled",
248        ],
249    ),
250    "openbmc/sdeventplus": PackageDef(
251        depends=[
252            "openbmc/stdplus",
253        ],
254        build_type="meson",
255        config_flags=[
256            "-Dexamples=false",
257            "-Dtests=disabled",
258        ],
259    ),
260    "openbmc/stdplus": PackageDef(
261        depends=[
262            "fmtlib/fmt",
263            "google/googletest",
264            "Naios/function2",
265        ],
266        build_type="meson",
267        config_flags=[
268            "-Dexamples=false",
269            "-Dtests=disabled",
270            "-Dgtest=enabled",
271        ],
272    ),
273}  # type: Dict[str, PackageDef]
274
275# Define common flags used for builds
276configure_flags = " ".join(
277    [
278        f"--prefix={prefix}",
279    ]
280)
281cmake_flags = " ".join(
282    [
283        "-DBUILD_SHARED_LIBS=ON",
284        "-DCMAKE_BUILD_TYPE=RelWithDebInfo",
285        f"-DCMAKE_INSTALL_PREFIX:PATH={prefix}",
286        "-GNinja",
287        "-DCMAKE_MAKE_PROGRAM=ninja",
288    ]
289)
290meson_flags = " ".join(
291    [
292        "--wrap-mode=nodownload",
293        f"-Dprefix={prefix}",
294    ]
295)
296
297
298class Package(threading.Thread):
299    """Class used to build the Docker stages for each package.
300
301    Generally, this class should not be instantiated directly but through
302    Package.generate_all().
303    """
304
305    # Copy the packages dictionary.
306    packages = packages.copy()
307
308    # Lock used for thread-safety.
309    lock = threading.Lock()
310
311    def __init__(self, pkg: str):
312        """pkg - The name of this package (ex. foo/bar )"""
313        super(Package, self).__init__()
314
315        self.package = pkg
316        self.exception = None  # type: Optional[Exception]
317
318        # Reference to this package's
319        self.pkg_def = Package.packages[pkg]
320        self.pkg_def["__package"] = self
321
322    def run(self) -> None:
323        """Thread 'run' function.  Builds the Docker stage."""
324
325        # In case this package has no rev, fetch it from Github.
326        self._update_rev()
327
328        # Find all the Package objects that this package depends on.
329        #   This section is locked because we are looking into another
330        #   package's PackageDef dict, which could be being modified.
331        Package.lock.acquire()
332        deps: Iterable[Package] = [
333            Package.packages[deppkg]["__package"]
334            for deppkg in self.pkg_def.get("depends", [])
335        ]
336        Package.lock.release()
337
338        # Wait until all the depends finish building.  We need them complete
339        # for the "COPY" commands.
340        for deppkg in deps:
341            deppkg.join()
342
343        # Generate this package's Dockerfile.
344        dockerfile = f"""
345FROM {docker_base_img_name}
346{self._df_copycmds()}
347{self._df_build()}
348"""
349
350        # Generate the resulting tag name and save it to the PackageDef.
351        #   This section is locked because we are modifying the PackageDef,
352        #   which can be accessed by other threads.
353        Package.lock.acquire()
354        tag = Docker.tagname(self._stagename(), dockerfile)
355        self.pkg_def["__tag"] = tag
356        Package.lock.release()
357
358        # Do the build / save any exceptions.
359        try:
360            Docker.build(self.package, tag, dockerfile)
361        except Exception as e:
362            self.exception = e
363
364    @classmethod
365    def generate_all(cls) -> None:
366        """Ensure a Docker stage is created for all defined packages.
367
368        These are done in parallel but with appropriate blocking per
369        package 'depends' specifications.
370        """
371
372        # Create a Package for each defined package.
373        pkg_threads = [Package(p) for p in cls.packages.keys()]
374
375        # Start building them all.
376        #   This section is locked because threads depend on each other,
377        #   based on the packages, and they cannot 'join' on a thread
378        #   which is not yet started.  Adding a lock here allows all the
379        #   threads to start before they 'join' their dependencies.
380        Package.lock.acquire()
381        for t in pkg_threads:
382            t.start()
383        Package.lock.release()
384
385        # Wait for completion.
386        for t in pkg_threads:
387            t.join()
388            # Check if the thread saved off its own exception.
389            if t.exception:
390                print(f"Package {t.package} failed!", file=sys.stderr)
391                raise t.exception
392
393    @staticmethod
394    def df_all_copycmds() -> str:
395        """Formulate the Dockerfile snippet necessary to copy all packages
396        into the final image.
397        """
398        return Package.df_copycmds_set(Package.packages.keys())
399
400    @classmethod
401    def depcache(cls) -> str:
402        """Create the contents of the '/tmp/depcache'.
403        This file is a comma-separated list of "<pkg>:<rev>".
404        """
405
406        # This needs to be sorted for consistency.
407        depcache = ""
408        for pkg in sorted(cls.packages.keys()):
409            depcache += "%s:%s," % (pkg, cls.packages[pkg]["rev"])
410        return depcache
411
412    def _check_gerrit_topic(self) -> bool:
413        if not gerrit_topic:
414            return False
415        if not self.package.startswith("openbmc/"):
416            return False
417        if gerrit_project == self.package and gerrit_rev:
418            return False
419
420        try:
421            commits = json.loads(
422                urllib.request.urlopen(
423                    f"https://gerrit.openbmc.org/changes/?q=status:open+project:{self.package}+topic:{gerrit_topic}"
424                )
425                .read()
426                .splitlines()[-1]
427            )
428
429            if len(commits) == 0:
430                return False
431            if len(commits) > 1:
432                print(
433                    f"{self.package} has more than 1 commit under {gerrit_topic}; using lastest upstream: {len(commits)}",
434                    file=sys.stderr,
435                )
436                return False
437
438            change_id = commits[0]["id"]
439
440            commit = json.loads(
441                urllib.request.urlopen(
442                    f"https://gerrit.openbmc.org/changes/{change_id}/revisions/current/commit"
443                )
444                .read()
445                .splitlines()[-1]
446            )["commit"]
447
448            print(
449                f"Using {commit} from {gerrit_topic} for {self.package}",
450                file=sys.stderr,
451            )
452            self.pkg_def["rev"] = commit
453            return True
454
455        except urllib.error.HTTPError as e:
456            print(
457                f"Error loading topic {gerrit_topic} for {self.package}: ",
458                e,
459                file=sys.stderr,
460            )
461            return False
462
463    def _update_rev(self) -> None:
464        """Look up the HEAD for missing a static rev."""
465
466        if "rev" in self.pkg_def:
467            return
468
469        if self._check_gerrit_topic():
470            return
471
472        # Check if Jenkins/Gerrit gave us a revision and use it.
473        if gerrit_project == self.package and gerrit_rev:
474            print(
475                f"Found Gerrit revision for {self.package}: {gerrit_rev}",
476                file=sys.stderr,
477            )
478            self.pkg_def["rev"] = gerrit_rev
479            return
480
481        # Ask Github for all the branches.
482        lookup = git(
483            "ls-remote", "--heads", f"https://github.com/{self.package}"
484        )
485
486        # Find the branch matching {branch} (or fallback to master).
487        #   This section is locked because we are modifying the PackageDef.
488        Package.lock.acquire()
489        for line in lookup.split("\n"):
490            if re.fullmatch(f".*{branch}$", line.strip()):
491                self.pkg_def["rev"] = line.split()[0]
492                break
493            elif (
494                "refs/heads/master" in line or "refs/heads/main" in line
495            ) and "rev" not in self.pkg_def:
496                self.pkg_def["rev"] = line.split()[0]
497        Package.lock.release()
498
499    def _stagename(self) -> str:
500        """Create a name for the Docker stage associated with this pkg."""
501        return self.package.replace("/", "-").lower()
502
503    def _url(self) -> str:
504        """Get the URL for this package."""
505        rev = self.pkg_def["rev"]
506
507        # If the lambda exists, call it.
508        if "url" in self.pkg_def:
509            return self.pkg_def["url"](self.package, rev)
510
511        # Default to the github archive URL.
512        return f"https://github.com/{self.package}/archive/{rev}.tar.gz"
513
514    def _cmd_download(self) -> str:
515        """Formulate the command necessary to download and unpack to source."""
516
517        url = self._url()
518        if ".tar." not in url:
519            raise NotImplementedError(
520                f"Unhandled download type for {self.package}: {url}"
521            )
522
523        cmd = f"curl -L {url} | tar -x"
524
525        if url.endswith(".bz2"):
526            cmd += "j"
527        elif url.endswith(".gz"):
528            cmd += "z"
529        else:
530            raise NotImplementedError(
531                f"Unknown tar flags needed for {self.package}: {url}"
532            )
533
534        return cmd
535
536    def _cmd_cd_srcdir(self) -> str:
537        """Formulate the command necessary to 'cd' into the source dir."""
538        return f"cd {self.package.split('/')[-1]}*"
539
540    def _df_copycmds(self) -> str:
541        """Formulate the dockerfile snippet necessary to COPY all depends."""
542
543        if "depends" not in self.pkg_def:
544            return ""
545        return Package.df_copycmds_set(self.pkg_def["depends"])
546
547    @staticmethod
548    def df_copycmds_set(pkgs: Iterable[str]) -> str:
549        """Formulate the Dockerfile snippet necessary to COPY a set of
550        packages into a Docker stage.
551        """
552
553        copy_cmds = ""
554
555        # Sort the packages for consistency.
556        for p in sorted(pkgs):
557            tag = Package.packages[p]["__tag"]
558            copy_cmds += f"COPY --from={tag} {prefix} {prefix}\n"
559            # Workaround for upstream docker bug and multiple COPY cmds
560            # https://github.com/moby/moby/issues/37965
561            copy_cmds += "RUN true\n"
562
563        return copy_cmds
564
565    def _df_build(self) -> str:
566        """Formulate the Dockerfile snippet necessary to download, build, and
567        install a package into a Docker stage.
568        """
569
570        # Download and extract source.
571        result = f"RUN {self._cmd_download()} && {self._cmd_cd_srcdir()} && "
572
573        # Handle 'custom_post_dl' commands.
574        custom_post_dl = self.pkg_def.get("custom_post_dl")
575        if custom_post_dl:
576            result += " && ".join(custom_post_dl) + " && "
577
578        # Build and install package based on 'build_type'.
579        build_type = self.pkg_def["build_type"]
580        if build_type == "autoconf":
581            result += self._cmd_build_autoconf()
582        elif build_type == "cmake":
583            result += self._cmd_build_cmake()
584        elif build_type == "custom":
585            result += self._cmd_build_custom()
586        elif build_type == "make":
587            result += self._cmd_build_make()
588        elif build_type == "meson":
589            result += self._cmd_build_meson()
590        else:
591            raise NotImplementedError(
592                f"Unhandled build type for {self.package}: {build_type}"
593            )
594
595        # Handle 'custom_post_install' commands.
596        custom_post_install = self.pkg_def.get("custom_post_install")
597        if custom_post_install:
598            result += " && " + " && ".join(custom_post_install)
599
600        return result
601
602    def _cmd_build_autoconf(self) -> str:
603        options = " ".join(self.pkg_def.get("config_flags", []))
604        env = " ".join(self.pkg_def.get("config_env", []))
605        result = "./bootstrap.sh && "
606        result += f"{env} ./configure {configure_flags} {options} && "
607        result += f"make -j{proc_count} && make install"
608        return result
609
610    def _cmd_build_cmake(self) -> str:
611        options = " ".join(self.pkg_def.get("config_flags", []))
612        env = " ".join(self.pkg_def.get("config_env", []))
613        result = "mkdir builddir && cd builddir && "
614        result += f"{env} cmake {cmake_flags} {options} .. && "
615        result += "cmake --build . --target all && "
616        result += "cmake --build . --target install && "
617        result += "cd .."
618        return result
619
620    def _cmd_build_custom(self) -> str:
621        return " && ".join(self.pkg_def.get("build_steps", []))
622
623    def _cmd_build_make(self) -> str:
624        return f"make -j{proc_count} && make install"
625
626    def _cmd_build_meson(self) -> str:
627        options = " ".join(self.pkg_def.get("config_flags", []))
628        env = " ".join(self.pkg_def.get("config_env", []))
629        result = f"{env} meson setup builddir {meson_flags} {options} && "
630        result += "ninja -C builddir && ninja -C builddir install"
631        return result
632
633
634class Docker:
635    """Class to assist with Docker interactions.  All methods are static."""
636
637    @staticmethod
638    def timestamp() -> str:
639        """Generate a timestamp for today using the ISO week."""
640        today = date.today().isocalendar()
641        return f"{today[0]}-W{today[1]:02}"
642
643    @staticmethod
644    def tagname(pkgname: Optional[str], dockerfile: str) -> str:
645        """Generate a tag name for a package using a hash of the Dockerfile."""
646        result = docker_image_name
647        if pkgname:
648            result += "-" + pkgname
649
650        result += ":" + Docker.timestamp()
651        result += "-" + sha256(dockerfile.encode()).hexdigest()[0:16]
652
653        return result
654
655    @staticmethod
656    def build(pkg: str, tag: str, dockerfile: str) -> None:
657        """Build a docker image using the Dockerfile and tagging it with 'tag'."""
658
659        # If we're not forcing builds, check if it already exists and skip.
660        if not force_build:
661            if docker.image.ls(tag, "--format", '"{{.Repository}}:{{.Tag}}"'):
662                print(
663                    f"Image {tag} already exists.  Skipping.", file=sys.stderr
664                )
665                return
666
667        # Build it.
668        #   Capture the output of the 'docker build' command and send it to
669        #   stderr (prefixed with the package name).  This allows us to see
670        #   progress but not pollute stdout.  Later on we output the final
671        #   docker tag to stdout and we want to keep that pristine.
672        #
673        #   Other unusual flags:
674        #       --no-cache: Bypass the Docker cache if 'force_build'.
675        #       --force-rm: Clean up Docker processes if they fail.
676        docker.build(
677            proxy_args,
678            "--network=host",
679            "--force-rm",
680            "--no-cache=true" if force_build else "--no-cache=false",
681            "-t",
682            tag,
683            "-",
684            _in=dockerfile,
685            _out=(
686                lambda line: print(
687                    pkg + ":", line, end="", file=sys.stderr, flush=True
688                )
689            ),
690            _err_to_out=True,
691        )
692
693
694# Read a bunch of environment variables.
695docker_image_name = os.environ.get(
696    "DOCKER_IMAGE_NAME", "openbmc/ubuntu-unit-test"
697)
698force_build = os.environ.get("FORCE_DOCKER_BUILD")
699is_automated_ci_build = os.environ.get("BUILD_URL", False)
700distro = os.environ.get("DISTRO", "ubuntu:noble")
701branch = os.environ.get("BRANCH", "master")
702ubuntu_mirror = os.environ.get("UBUNTU_MIRROR")
703http_proxy = os.environ.get("http_proxy")
704
705gerrit_project = os.environ.get("GERRIT_PROJECT")
706gerrit_rev = os.environ.get("GERRIT_PATCHSET_REVISION")
707gerrit_topic = os.environ.get("GERRIT_TOPIC")
708
709# Ensure appropriate docker build output to see progress and identify
710# any issues
711os.environ["BUILDKIT_PROGRESS"] = "plain"
712
713# Set up some common variables.
714username = os.environ.get("USER", "root")
715homedir = os.environ.get("HOME", "/root")
716gid = os.getgid()
717uid = os.getuid()
718
719# Use well-known constants if user is root
720if username == "root":
721    homedir = "/root"
722    gid = 0
723    uid = 0
724
725# Special flags if setting up a deb mirror.
726mirror = ""
727if "ubuntu" in distro and ubuntu_mirror:
728    mirror = f"""
729RUN echo "deb {ubuntu_mirror} \
730        $(. /etc/os-release && echo $VERSION_CODENAME) \
731        main restricted universe multiverse" > /etc/apt/sources.list && \\
732    echo "deb {ubuntu_mirror} \
733        $(. /etc/os-release && echo $VERSION_CODENAME)-updates \
734            main restricted universe multiverse" >> /etc/apt/sources.list && \\
735    echo "deb {ubuntu_mirror} \
736        $(. /etc/os-release && echo $VERSION_CODENAME)-security \
737            main restricted universe multiverse" >> /etc/apt/sources.list && \\
738    echo "deb {ubuntu_mirror} \
739        $(. /etc/os-release && echo $VERSION_CODENAME)-proposed \
740            main restricted universe multiverse" >> /etc/apt/sources.list && \\
741    echo "deb {ubuntu_mirror} \
742        $(. /etc/os-release && echo $VERSION_CODENAME)-backports \
743            main restricted universe multiverse" >> /etc/apt/sources.list
744"""
745
746# Special flags for proxying.
747proxy_cmd = ""
748proxy_keyserver = ""
749proxy_args = []
750if http_proxy:
751    proxy_cmd = f"""
752RUN echo "[http]" >> {homedir}/.gitconfig && \
753    echo "proxy = {http_proxy}" >> {homedir}/.gitconfig
754"""
755    proxy_keyserver = f"--keyserver-options http-proxy={http_proxy}"
756
757    proxy_args.extend(
758        [
759            "--build-arg",
760            f"http_proxy={http_proxy}",
761            "--build-arg",
762            f"https_proxy={http_proxy}",
763        ]
764    )
765
766# Create base Dockerfile.
767dockerfile_base = f"""
768FROM {distro}
769
770{mirror}
771
772ENV DEBIAN_FRONTEND noninteractive
773
774ENV PYTHONPATH "/usr/local/lib/python3.10/site-packages/"
775
776# Sometimes the ubuntu key expires and we need a way to force an execution
777# of the apt-get commands for the dbgsym-keyring.  When this happens we see
778# an error like: "Release: The following signatures were invalid:"
779# Insert a bogus echo that we can change here when we get this error to force
780# the update.
781RUN echo "ubuntu keyserver rev as of 2021-04-21"
782
783# We need the keys to be imported for dbgsym repos
784# New releases have a package, older ones fall back to manual fetching
785# https://wiki.ubuntu.com/Debug%20Symbol%20Packages
786# Known issue with gpg to get keys via proxy -
787# https://bugs.launchpad.net/ubuntu/+source/gnupg2/+bug/1788190, hence using
788# curl to get keys.
789RUN apt-get update && apt-get dist-upgrade -yy && \
790    ( apt-get install -yy gpgv ubuntu-dbgsym-keyring || \
791        ( apt-get install -yy dirmngr curl && \
792          curl -sSL \
793          'https://keyserver.ubuntu.com/pks/lookup?op=get&search=0xF2EDC64DC5AEE1F6B9C621F0C8CAB6595FDFF622' \
794          | apt-key add - ))
795
796# Parse the current repo list into a debug repo list
797RUN sed -n '/^deb /s,^deb [^ ]* ,deb http://ddebs.ubuntu.com ,p' \
798        /etc/apt/sources.list >/etc/apt/sources.list.d/debug.list
799
800# Remove non-existent debug repos
801RUN sed -i '/-\\(backports\\|security\\) /d' /etc/apt/sources.list.d/debug.list
802
803RUN cat /etc/apt/sources.list.d/debug.list
804
805RUN apt-get update && apt-get dist-upgrade -yy && apt-get install -yy \
806    abi-compliance-checker \
807    abi-dumper \
808    autoconf \
809    autoconf-archive \
810    bison \
811    cmake \
812    curl \
813    dbus \
814    device-tree-compiler \
815    flex \
816    g++-13 \
817    gcc-13 \
818    git \
819    glib-2.0 \
820    gnupg \
821    iproute2 \
822    iputils-ping \
823    libaudit-dev \
824    libc6-dbg \
825    libc6-dev \
826    libcjson-dev \
827    libconfig++-dev \
828    libcryptsetup-dev \
829    libdbus-1-dev \
830    libevdev-dev \
831    libgpiod-dev \
832    libi2c-dev \
833    libjpeg-dev \
834    libjson-perl \
835    libldap2-dev \
836    libmimetic-dev \
837    libnl-3-dev \
838    libnl-genl-3-dev \
839    libpam0g-dev \
840    libpciaccess-dev \
841    libperlio-gzip-perl \
842    libpng-dev \
843    libprotobuf-dev \
844    libsnmp-dev \
845    libssl-dev \
846    libsystemd-dev \
847    libtool \
848    liburing-dev \
849    libxml2-utils \
850    libxml-simple-perl \
851    lsb-release \
852    ninja-build \
853    npm \
854    pkg-config \
855    protobuf-compiler \
856    python3 \
857    python3-dev\
858    python3-git \
859    python3-mako \
860    python3-pip \
861    python3-protobuf \
862    python3-setuptools \
863    python3-socks \
864    python3-yaml \
865    rsync \
866    shellcheck \
867    socat \
868    software-properties-common \
869    sudo \
870    systemd \
871    valgrind \
872    vim \
873    wget \
874    xxd
875
876RUN update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-13 13 \
877  --slave /usr/bin/g++ g++ /usr/bin/g++-13 \
878  --slave /usr/bin/gcov gcov /usr/bin/gcov-13 \
879  --slave /usr/bin/gcov-dump gcov-dump /usr/bin/gcov-dump-13 \
880  --slave /usr/bin/gcov-tool gcov-tool /usr/bin/gcov-tool-13
881RUN update-alternatives --remove cpp /usr/bin/cpp && \
882    update-alternatives --install /usr/bin/cpp cpp /usr/bin/cpp-13 13
883
884# Set up LLVM apt repository.
885RUN bash -c "$(wget -O - https://apt.llvm.org/llvm.sh)" 18
886
887# Install extra clang tools
888RUN apt-get install \
889        clang-18 \
890        clang-format-18 \
891        clang-tidy-18
892
893RUN update-alternatives --install /usr/bin/clang clang /usr/bin/clang-18 1000 \
894  --slave /usr/bin/clang++ clang++ /usr/bin/clang++-18 \
895  --slave /usr/bin/clang-tidy clang-tidy /usr/bin/clang-tidy-18 \
896  --slave /usr/bin/clang-format clang-format /usr/bin/clang-format-18 \
897  --slave /usr/bin/run-clang-tidy run-clang-tidy.py \
898        /usr/bin/run-clang-tidy-18 \
899  --slave /usr/bin/scan-build scan-build /usr/bin/scan-build-18
900
901"""
902
903if is_automated_ci_build:
904    dockerfile_base += f"""
905# Run an arbitrary command to pollute the docker cache regularly force us
906# to re-run `apt-get update` daily.
907RUN echo {Docker.timestamp()}
908RUN apt-get update && apt-get dist-upgrade -yy
909
910"""
911
912dockerfile_base += """
913RUN pip3 install --break-system-packages \
914        beautysh \
915        black \
916        codespell \
917        flake8 \
918        gcovr \
919        gitlint \
920        inflection \
921        isort \
922        jsonschema \
923        meson==1.3.0 \
924        requests
925
926RUN npm install -g \
927        eslint@v8.56.0 eslint-plugin-json@v3.1.0 \
928        markdownlint-cli@latest \
929        prettier@latest
930"""
931
932# Build the base and stage docker images.
933docker_base_img_name = Docker.tagname("base", dockerfile_base)
934Docker.build("base", docker_base_img_name, dockerfile_base)
935Package.generate_all()
936
937# Create the final Dockerfile.
938dockerfile = f"""
939# Build the final output image
940FROM {docker_base_img_name}
941{Package.df_all_copycmds()}
942
943# Some of our infrastructure still relies on the presence of this file
944# even though it is no longer needed to rebuild the docker environment
945# NOTE: The file is sorted to ensure the ordering is stable.
946RUN echo '{Package.depcache()}' > /tmp/depcache
947
948# Ensure the group, user, and home directory are created (or rename them if
949# they already exist).
950RUN if grep -q ":{gid}:" /etc/group ; then \
951        groupmod -n {username} $(awk -F : '{{ if ($3 == {gid}) {{ print $1 }} }}' /etc/group) ; \
952    else \
953        groupadd -f -g {gid} {username} ; \
954    fi
955RUN mkdir -p "{os.path.dirname(homedir)}"
956RUN if grep -q ":{uid}:" /etc/passwd ; then \
957        usermod -l {username} -d {homedir} -m $(awk -F : '{{ if ($3 == {uid}) {{ print $1 }} }}' /etc/passwd) ; \
958    else \
959        useradd -d {homedir} -m -u {uid} -g {gid} {username} ; \
960    fi
961RUN sed -i '1iDefaults umask=000' /etc/sudoers
962RUN echo "{username} ALL=(ALL) NOPASSWD: ALL" >>/etc/sudoers
963
964# Ensure user has ability to write to /usr/local for different tool
965# and data installs
966RUN chown -R {username}:{username} /usr/local/share
967
968# Update library cache
969RUN ldconfig
970
971{proxy_cmd}
972
973RUN /bin/bash
974"""
975
976# Do the final docker build
977docker_final_img_name = Docker.tagname(None, dockerfile)
978Docker.build("final", docker_final_img_name, dockerfile)
979
980# Print the tag of the final image.
981print(docker_final_img_name)
982