1#!/usr/bin/env python3
2#
3# Build the required docker image to run package unit tests
4#
5# Script Variables:
6#   DOCKER_IMG_NAME:  <optional, the name of the docker image to generate>
7#                     default is openbmc/ubuntu-unit-test
8#   DISTRO:           <optional, the distro to build a docker image against>
9#   FORCE_DOCKER_BUILD: <optional, a non-zero value with force all Docker
10#                     images to be rebuilt rather than reusing caches.>
11#   BUILD_URL:        <optional, used to detect running under CI context
12#                     (ex. Jenkins)>
13#   BRANCH:           <optional, branch to build from each of the openbmc/
14#                     repositories>
15#                     default is master, which will be used if input branch not
16#                     provided or not found
17#   UBUNTU_MIRROR:    <optional, the URL of a mirror of Ubuntu to override the
18#                     default ones in /etc/apt/sources.list>
19#                     default is empty, and no mirror is used.
20#   http_proxy        The HTTP address of the proxy server to connect to.
21#                     Default: "", proxy is not setup if this is not set
22
23import os
24import sys
25import threading
26from datetime import date
27from hashlib import sha256
28from sh import docker, git, nproc, uname  # type: ignore
29from typing import Any, Callable, Dict, Iterable, Optional
30
31try:
32    # Python before 3.8 doesn't have TypedDict, so reroute to standard 'dict'.
33    from typing import TypedDict
34except:
35
36    class TypedDict(dict):  # type: ignore
37        # We need to do this to eat the 'total' argument.
38        def __init_subclass__(cls, **kwargs):
39            super().__init_subclass__()
40
41
42# Declare some variables used in package definitions.
43prefix = "/usr/local"
44proc_count = nproc().strip()
45
46
47class PackageDef(TypedDict, total=False):
48    """ Package Definition for packages dictionary. """
49
50    # rev [optional]: Revision of package to use.
51    rev: str
52    # url [optional]: lambda function to create URL: (package, rev) -> url.
53    url: Callable[[str, str], str]
54    # depends [optional]: List of package dependencies.
55    depends: Iterable[str]
56    # build_type [required]: Build type used for package.
57    #   Currently supported: autoconf, cmake, custom, make, meson
58    build_type: str
59    # build_steps [optional]: Steps to run for 'custom' build_type.
60    build_steps: Iterable[str]
61    # config_flags [optional]: List of options to pass configuration tool.
62    config_flags: Iterable[str]
63    # config_env [optional]: List of environment variables to set for config.
64    config_env: Iterable[str]
65    # custom_post_dl [optional]: List of steps to run after download, but
66    #   before config / build / install.
67    custom_post_dl: Iterable[str]
68    # custom_post_install [optional]: List of steps to run after install.
69    custom_post_install: Iterable[str]
70
71    # __tag [private]: Generated Docker tag name for package stage.
72    __tag: str
73    # __package [private]: Package object associated with this package.
74    __package: Any  # Type is Package, but not defined yet.
75
76
77# Packages to include in image.
78packages = {
79    "boost": PackageDef(
80        rev="1.78.0",
81        url=(
82            lambda pkg, rev: f"https://downloads.yoctoproject.org/mirror/sources/{pkg}_{rev.replace('.', '_')}.tar.bz2"
83        ),
84        build_type="custom",
85        build_steps=[
86            f"./bootstrap.sh --prefix={prefix} --with-libraries=context,coroutine",
87            "./b2",
88            f"./b2 install --prefix={prefix}",
89        ],
90    ),
91    "USCiLab/cereal": PackageDef(
92        rev="3e4d1b84cab4891368d2179a61a7ba06a5693e7f",
93        build_type="custom",
94        build_steps=[f"cp -a include/cereal/ {prefix}/include/"],
95    ),
96    "catchorg/Catch2": PackageDef(
97        rev="v2.13.6",
98        build_type="cmake",
99        config_flags=["-DBUILD_TESTING=OFF", "-DCATCH_INSTALL_DOCS=OFF"],
100    ),
101    "CLIUtils/CLI11": PackageDef(
102        rev="v1.9.1",
103        build_type="cmake",
104        config_flags=[
105            "-DBUILD_TESTING=OFF",
106            "-DCLI11_BUILD_DOCS=OFF",
107            "-DCLI11_BUILD_EXAMPLES=OFF",
108        ],
109    ),
110    "fmtlib/fmt": PackageDef(
111        rev="8.1.1",
112        build_type="cmake",
113        config_flags=[
114            "-DFMT_DOC=OFF",
115            "-DFMT_TEST=OFF",
116        ],
117    ),
118    "Naios/function2": PackageDef(
119        rev="4.1.0",
120        build_type="custom",
121        build_steps=[
122            f"mkdir {prefix}/include/function2",
123            f"cp include/function2/function2.hpp {prefix}/include/function2/",
124        ],
125    ),
126    # Release 2021-06-12
127    "google/googletest": PackageDef(
128        rev="9e712372214d75bb30ec2847a44bf124d48096f3",
129        build_type="cmake",
130        config_env=["CXXFLAGS=-std=c++20"],
131        config_flags=["-DTHREADS_PREFER_PTHREAD_FLAG=ON"],
132    ),
133    # Release 2020-08-06
134    "nlohmann/json": PackageDef(
135        rev="v3.10.4",
136        build_type="cmake",
137        config_flags=["-DJSON_BuildTests=OFF"],
138        custom_post_install=[
139            f"ln -s {prefix}/include/nlohmann/json.hpp {prefix}/include/json.hpp",
140        ],
141    ),
142    # Snapshot from 2019-05-24
143    "linux-test-project/lcov": PackageDef(
144        rev="v1.15",
145        build_type="make",
146    ),
147    # dev-5.8 2021-01-11
148    "openbmc/linux": PackageDef(
149        rev="3cc95ae40716e56f81b69615781f54c78079042d",
150        build_type="custom",
151        build_steps=[
152            f"make -j{proc_count} defconfig",
153            f"make INSTALL_HDR_PATH={prefix} headers_install",
154        ],
155    ),
156    # Snapshot from 2020-06-13
157    "LibVNC/libvncserver": PackageDef(
158        rev="LibVNCServer-0.9.13",
159        build_type="cmake",
160    ),
161    # version from meta-openembedded/meta-oe/recipes-support/libtinyxml2/libtinyxml2_8.0.0.bb
162    "leethomason/tinyxml2": PackageDef(
163        rev="8.0.0",
164        build_type="cmake",
165    ),
166    # version from /meta-openembedded/meta-oe/recipes-devtools/boost-url/boost-url_git.bb
167    "CPPAlliance/url": PackageDef(
168        rev="d740a92d38e3a8f4d5b2153f53b82f1c98e312ab",
169        build_type="custom",
170        build_steps=[f"cp -a include/** {prefix}/include/"],
171    ),
172    # version from meta-openembedded/meta-oe/dynamic-layers/networking-layer/recipes-devools/valijson/valijson_0.6.bb
173    "tristanpenman/valijson": PackageDef(
174        rev="v0.6",
175        build_type="cmake",
176        config_flags=[
177            "-Dvalijson_BUILD_TESTS=0",
178            "-Dvalijson_INSTALL_HEADERS=1",
179        ],
180    ),
181    # version from meta-openembedded/meta-oe/recipes-devtools/unifex/unifex_git.bb
182    "facebookexperimental/libunifex": PackageDef(
183        rev="9df21c58d34ce8a1cd3b15c3a7347495e29417a0",
184        build_type="cmake",
185        config_flags=[
186            "-DBUILD_SHARED_LIBS=ON",
187            "-DBUILD_TESTING=OFF",
188            "-DCMAKE_CXX_STANDARD=20",
189            "-DUNIFEX_BUILD_EXAMPLES=OFF",
190        ],
191    ),
192    "open-power/pdbg": PackageDef(build_type="autoconf"),
193    "openbmc/gpioplus": PackageDef(
194        depends=["openbmc/stdplus"],
195        build_type="meson",
196        config_flags=[
197            "-Dexamples=false",
198            "-Dtests=disabled",
199        ],
200    ),
201    "openbmc/phosphor-dbus-interfaces": PackageDef(
202        depends=["openbmc/sdbusplus"],
203        build_type="meson",
204    ),
205    "openbmc/phosphor-logging": PackageDef(
206        depends=[
207            "USCiLab/cereal",
208            "openbmc/phosphor-dbus-interfaces",
209            "openbmc/sdbusplus",
210            "openbmc/sdeventplus",
211        ],
212        build_type="meson",
213        config_flags=[
214            f"-Dyamldir={prefix}/share/phosphor-dbus-yaml/yaml",
215        ],
216    ),
217    "openbmc/phosphor-objmgr": PackageDef(
218        depends=[
219            "boost",
220            "leethomason/tinyxml2",
221            "openbmc/phosphor-logging",
222            "openbmc/sdbusplus",
223        ],
224        build_type="meson",
225        config_flags=[
226            "-Dtests=disabled",
227        ],
228    ),
229    "openbmc/pldm": PackageDef(
230        depends=[
231            "CLIUtils/CLI11",
232            "boost",
233            "nlohmann/json",
234            "openbmc/phosphor-dbus-interfaces",
235            "openbmc/phosphor-logging",
236            "openbmc/sdbusplus",
237            "openbmc/sdeventplus",
238        ],
239        build_type="meson",
240        config_flags=[
241            "-Dlibpldm-only=enabled",
242            "-Doem-ibm=enabled",
243            "-Dtests=disabled",
244        ],
245    ),
246    "openbmc/sdbusplus": PackageDef(
247        depends=[
248            "facebookexperimental/libunifex",
249        ],
250        build_type="meson",
251        custom_post_dl=[
252            "cd tools",
253            f"./setup.py install --root=/ --prefix={prefix}",
254            "cd ..",
255        ],
256        config_flags=[
257            "-Dexamples=disabled",
258            "-Dtests=disabled",
259        ],
260    ),
261    "openbmc/sdeventplus": PackageDef(
262        depends=["Naios/function2", "openbmc/stdplus"],
263        build_type="meson",
264        config_flags=[
265            "-Dexamples=false",
266            "-Dtests=disabled",
267        ],
268    ),
269    "openbmc/stdplus": PackageDef(
270        depends=["fmtlib/fmt"],
271        build_type="meson",
272        config_flags=[
273            "-Dexamples=false",
274            "-Dtests=disabled",
275        ],
276    ),
277}  # type: Dict[str, PackageDef]
278
279# Define common flags used for builds
280configure_flags = " ".join(
281    [
282        f"--prefix={prefix}",
283    ]
284)
285cmake_flags = " ".join(
286    [
287        "-DBUILD_SHARED_LIBS=ON",
288        "-DCMAKE_BUILD_TYPE=RelWithDebInfo",
289        f"-DCMAKE_INSTALL_PREFIX:PATH={prefix}",
290        "-GNinja",
291        "-DCMAKE_MAKE_PROGRAM=ninja",
292    ]
293)
294meson_flags = " ".join(
295    [
296        "--wrap-mode=nodownload",
297        f"-Dprefix={prefix}",
298    ]
299)
300
301
302class Package(threading.Thread):
303    """Class used to build the Docker stages for each package.
304
305    Generally, this class should not be instantiated directly but through
306    Package.generate_all().
307    """
308
309    # Copy the packages dictionary.
310    packages = packages.copy()
311
312    # Lock used for thread-safety.
313    lock = threading.Lock()
314
315    def __init__(self, pkg: str):
316        """ pkg - The name of this package (ex. foo/bar ) """
317        super(Package, self).__init__()
318
319        self.package = pkg
320        self.exception = None  # type: Optional[Exception]
321
322        # Reference to this package's
323        self.pkg_def = Package.packages[pkg]
324        self.pkg_def["__package"] = self
325
326    def run(self) -> None:
327        """ Thread 'run' function.  Builds the Docker stage. """
328
329        # In case this package has no rev, fetch it from Github.
330        self._update_rev()
331
332        # Find all the Package objects that this package depends on.
333        #   This section is locked because we are looking into another
334        #   package's PackageDef dict, which could be being modified.
335        Package.lock.acquire()
336        deps: Iterable[Package] = [
337            Package.packages[deppkg]["__package"]
338            for deppkg in self.pkg_def.get("depends", [])
339        ]
340        Package.lock.release()
341
342        # Wait until all the depends finish building.  We need them complete
343        # for the "COPY" commands.
344        for deppkg in deps:
345            deppkg.join()
346
347        # Generate this package's Dockerfile.
348        dockerfile = f"""
349FROM {docker_base_img_name}
350{self._df_copycmds()}
351{self._df_build()}
352"""
353
354        # Generate the resulting tag name and save it to the PackageDef.
355        #   This section is locked because we are modifying the PackageDef,
356        #   which can be accessed by other threads.
357        Package.lock.acquire()
358        tag = Docker.tagname(self._stagename(), dockerfile)
359        self.pkg_def["__tag"] = tag
360        Package.lock.release()
361
362        # Do the build / save any exceptions.
363        try:
364            Docker.build(self.package, tag, dockerfile)
365        except Exception as e:
366            self.exception = e
367
368    @classmethod
369    def generate_all(cls) -> None:
370        """Ensure a Docker stage is created for all defined packages.
371
372        These are done in parallel but with appropriate blocking per
373        package 'depends' specifications.
374        """
375
376        # Create a Package for each defined package.
377        pkg_threads = [Package(p) for p in cls.packages.keys()]
378
379        # Start building them all.
380        #   This section is locked because threads depend on each other,
381        #   based on the packages, and they cannot 'join' on a thread
382        #   which is not yet started.  Adding a lock here allows all the
383        #   threads to start before they 'join' their dependencies.
384        Package.lock.acquire()
385        for t in pkg_threads:
386            t.start()
387        Package.lock.release()
388
389        # Wait for completion.
390        for t in pkg_threads:
391            t.join()
392            # Check if the thread saved off its own exception.
393            if t.exception:
394                print(f"Package {t.package} failed!", file=sys.stderr)
395                raise t.exception
396
397    @staticmethod
398    def df_all_copycmds() -> str:
399        """Formulate the Dockerfile snippet necessary to copy all packages
400        into the final image.
401        """
402        return Package.df_copycmds_set(Package.packages.keys())
403
404    @classmethod
405    def depcache(cls) -> str:
406        """Create the contents of the '/tmp/depcache'.
407        This file is a comma-separated list of "<pkg>:<rev>".
408        """
409
410        # This needs to be sorted for consistency.
411        depcache = ""
412        for pkg in sorted(cls.packages.keys()):
413            depcache += "%s:%s," % (pkg, cls.packages[pkg]["rev"])
414        return depcache
415
416    def _update_rev(self) -> None:
417        """ Look up the HEAD for missing a static rev. """
418
419        if "rev" in self.pkg_def:
420            return
421
422        # Check if Jenkins/Gerrit gave us a revision and use it.
423        if gerrit_project == self.package and gerrit_rev:
424            print(
425                f"Found Gerrit revision for {self.package}: {gerrit_rev}",
426                file=sys.stderr,
427            )
428            self.pkg_def["rev"] = gerrit_rev
429            return
430
431        # Ask Github for all the branches.
432        lookup = git("ls-remote", "--heads", f"https://github.com/{self.package}")
433
434        # Find the branch matching {branch} (or fallback to master).
435        #   This section is locked because we are modifying the PackageDef.
436        Package.lock.acquire()
437        for line in lookup.split("\n"):
438            if f"refs/heads/{branch}" in line:
439                self.pkg_def["rev"] = line.split()[0]
440            elif f"refs/heads/master" in line and "rev" not in self.pkg_def:
441                self.pkg_def["rev"] = line.split()[0]
442        Package.lock.release()
443
444    def _stagename(self) -> str:
445        """ Create a name for the Docker stage associated with this pkg. """
446        return self.package.replace("/", "-").lower()
447
448    def _url(self) -> str:
449        """ Get the URL for this package. """
450        rev = self.pkg_def["rev"]
451
452        # If the lambda exists, call it.
453        if "url" in self.pkg_def:
454            return self.pkg_def["url"](self.package, rev)
455
456        # Default to the github archive URL.
457        return f"https://github.com/{self.package}/archive/{rev}.tar.gz"
458
459    def _cmd_download(self) -> str:
460        """Formulate the command necessary to download and unpack to source."""
461
462        url = self._url()
463        if ".tar." not in url:
464            raise NotImplementedError(
465                f"Unhandled download type for {self.package}: {url}"
466            )
467
468        cmd = f"curl -L {url} | tar -x"
469
470        if url.endswith(".bz2"):
471            cmd += "j"
472        elif url.endswith(".gz"):
473            cmd += "z"
474        else:
475            raise NotImplementedError(
476                f"Unknown tar flags needed for {self.package}: {url}"
477            )
478
479        return cmd
480
481    def _cmd_cd_srcdir(self) -> str:
482        """ Formulate the command necessary to 'cd' into the source dir. """
483        return f"cd {self.package.split('/')[-1]}*"
484
485    def _df_copycmds(self) -> str:
486        """ Formulate the dockerfile snippet necessary to COPY all depends. """
487
488        if "depends" not in self.pkg_def:
489            return ""
490        return Package.df_copycmds_set(self.pkg_def["depends"])
491
492    @staticmethod
493    def df_copycmds_set(pkgs: Iterable[str]) -> str:
494        """Formulate the Dockerfile snippet necessary to COPY a set of
495        packages into a Docker stage.
496        """
497
498        copy_cmds = ""
499
500        # Sort the packages for consistency.
501        for p in sorted(pkgs):
502            tag = Package.packages[p]["__tag"]
503            copy_cmds += f"COPY --from={tag} {prefix} {prefix}\n"
504            # Workaround for upstream docker bug and multiple COPY cmds
505            # https://github.com/moby/moby/issues/37965
506            copy_cmds += "RUN true\n"
507
508        return copy_cmds
509
510    def _df_build(self) -> str:
511        """Formulate the Dockerfile snippet necessary to download, build, and
512        install a package into a Docker stage.
513        """
514
515        # Download and extract source.
516        result = f"RUN {self._cmd_download()} && {self._cmd_cd_srcdir()} && "
517
518        # Handle 'custom_post_dl' commands.
519        custom_post_dl = self.pkg_def.get("custom_post_dl")
520        if custom_post_dl:
521            result += " && ".join(custom_post_dl) + " && "
522
523        # Build and install package based on 'build_type'.
524        build_type = self.pkg_def["build_type"]
525        if build_type == "autoconf":
526            result += self._cmd_build_autoconf()
527        elif build_type == "cmake":
528            result += self._cmd_build_cmake()
529        elif build_type == "custom":
530            result += self._cmd_build_custom()
531        elif build_type == "make":
532            result += self._cmd_build_make()
533        elif build_type == "meson":
534            result += self._cmd_build_meson()
535        else:
536            raise NotImplementedError(
537                f"Unhandled build type for {self.package}: {build_type}"
538            )
539
540        # Handle 'custom_post_install' commands.
541        custom_post_install = self.pkg_def.get("custom_post_install")
542        if custom_post_install:
543            result += " && " + " && ".join(custom_post_install)
544
545        return result
546
547    def _cmd_build_autoconf(self) -> str:
548        options = " ".join(self.pkg_def.get("config_flags", []))
549        env = " ".join(self.pkg_def.get("config_env", []))
550        result = "./bootstrap.sh && "
551        result += f"{env} ./configure {configure_flags} {options} && "
552        result += f"make -j{proc_count} && make install"
553        return result
554
555    def _cmd_build_cmake(self) -> str:
556        options = " ".join(self.pkg_def.get("config_flags", []))
557        env = " ".join(self.pkg_def.get("config_env", []))
558        result = "mkdir builddir && cd builddir && "
559        result += f"{env} cmake {cmake_flags} {options} .. && "
560        result += "cmake --build . --target all && "
561        result += "cmake --build . --target install && "
562        result += "cd .."
563        return result
564
565    def _cmd_build_custom(self) -> str:
566        return " && ".join(self.pkg_def.get("build_steps", []))
567
568    def _cmd_build_make(self) -> str:
569        return f"make -j{proc_count} && make install"
570
571    def _cmd_build_meson(self) -> str:
572        options = " ".join(self.pkg_def.get("config_flags", []))
573        env = " ".join(self.pkg_def.get("config_env", []))
574        result = f"{env} meson builddir {meson_flags} {options} && "
575        result += "ninja -C builddir && ninja -C builddir install"
576        return result
577
578
579class Docker:
580    """Class to assist with Docker interactions.  All methods are static."""
581
582    @staticmethod
583    def timestamp() -> str:
584        """ Generate a timestamp for today using the ISO week. """
585        today = date.today().isocalendar()
586        return f"{today[0]}-W{today[1]:02}"
587
588    @staticmethod
589    def tagname(pkgname: str, dockerfile: str) -> str:
590        """ Generate a tag name for a package using a hash of the Dockerfile. """
591        result = docker_image_name
592        if pkgname:
593            result += "-" + pkgname
594
595        result += ":" + Docker.timestamp()
596        result += "-" + sha256(dockerfile.encode()).hexdigest()[0:16]
597
598        return result
599
600    @staticmethod
601    def build(pkg: str, tag: str, dockerfile: str) -> None:
602        """Build a docker image using the Dockerfile and tagging it with 'tag'."""
603
604        # If we're not forcing builds, check if it already exists and skip.
605        if not force_build:
606            if docker.image.ls(tag, "--format", '"{{.Repository}}:{{.Tag}}"'):
607                print(f"Image {tag} already exists.  Skipping.", file=sys.stderr)
608                return
609
610        # Build it.
611        #   Capture the output of the 'docker build' command and send it to
612        #   stderr (prefixed with the package name).  This allows us to see
613        #   progress but not polute stdout.  Later on we output the final
614        #   docker tag to stdout and we want to keep that pristine.
615        #
616        #   Other unusual flags:
617        #       --no-cache: Bypass the Docker cache if 'force_build'.
618        #       --force-rm: Clean up Docker processes if they fail.
619        docker.build(
620            proxy_args,
621            "--network=host",
622            "--force-rm",
623            "--no-cache=true" if force_build else "--no-cache=false",
624            "-t",
625            tag,
626            "-",
627            _in=dockerfile,
628            _out=(
629                lambda line: print(
630                    pkg + ":", line, end="", file=sys.stderr, flush=True
631                )
632            ),
633        )
634
635
636# Read a bunch of environment variables.
637docker_image_name = os.environ.get("DOCKER_IMAGE_NAME", "openbmc/ubuntu-unit-test")
638force_build = os.environ.get("FORCE_DOCKER_BUILD")
639is_automated_ci_build = os.environ.get("BUILD_URL", False)
640distro = os.environ.get("DISTRO", "ubuntu:jammy")
641branch = os.environ.get("BRANCH", "master")
642ubuntu_mirror = os.environ.get("UBUNTU_MIRROR")
643http_proxy = os.environ.get("http_proxy")
644
645gerrit_project = os.environ.get("GERRIT_PROJECT")
646gerrit_rev = os.environ.get("GERRIT_PATCHSET_REVISION")
647
648# Set up some common variables.
649username = os.environ.get("USER", "root")
650homedir = os.environ.get("HOME", "/root")
651gid = os.getgid()
652uid = os.getuid()
653
654# Use well-known constants if user is root
655if username == "root":
656    homedir = "/root"
657    gid = 0
658    uid = 0
659
660# Determine the architecture for Docker.
661arch = uname("-m").strip()
662if arch == "ppc64le":
663    docker_base = "ppc64le/"
664elif arch == "x86_64":
665    docker_base = ""
666elif arch == "aarch64":
667    docker_base = "arm64v8/"
668else:
669    print(
670        f"Unsupported system architecture({arch}) found for docker image",
671        file=sys.stderr,
672    )
673    sys.exit(1)
674
675# Special flags if setting up a deb mirror.
676mirror = ""
677if "ubuntu" in distro and ubuntu_mirror:
678    mirror = f"""
679RUN echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME) main restricted universe multiverse" > /etc/apt/sources.list && \\
680    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-updates main restricted universe multiverse" >> /etc/apt/sources.list && \\
681    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-security main restricted universe multiverse" >> /etc/apt/sources.list && \\
682    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-proposed main restricted universe multiverse" >> /etc/apt/sources.list && \\
683    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-backports main restricted universe multiverse" >> /etc/apt/sources.list
684"""
685
686# Special flags for proxying.
687proxy_cmd = ""
688proxy_keyserver = ""
689proxy_args = []
690if http_proxy:
691    proxy_cmd = f"""
692RUN echo "[http]" >> {homedir}/.gitconfig && \
693    echo "proxy = {http_proxy}" >> {homedir}/.gitconfig
694"""
695    proxy_keyserver = f"--keyserver-options http-proxy={http_proxy}"
696
697    proxy_args.extend(
698        [
699            "--build-arg",
700            f"http_proxy={http_proxy}",
701            "--build-arg",
702            f"https_proxy={http_proxy}",
703        ]
704    )
705
706# Create base Dockerfile.
707dockerfile_base = f"""
708FROM {docker_base}{distro}
709
710{mirror}
711
712ENV DEBIAN_FRONTEND noninteractive
713
714ENV PYTHONPATH "/usr/local/lib/python3.10/site-packages/"
715
716# Sometimes the ubuntu key expires and we need a way to force an execution
717# of the apt-get commands for the dbgsym-keyring.  When this happens we see
718# an error like: "Release: The following signatures were invalid:"
719# Insert a bogus echo that we can change here when we get this error to force
720# the update.
721RUN echo "ubuntu keyserver rev as of 2021-04-21"
722
723# We need the keys to be imported for dbgsym repos
724# New releases have a package, older ones fall back to manual fetching
725# https://wiki.ubuntu.com/Debug%20Symbol%20Packages
726RUN apt-get update && apt-get dist-upgrade -yy && \
727    ( apt-get install gpgv ubuntu-dbgsym-keyring || \
728        ( apt-get install -yy dirmngr && \
729          apt-key adv --keyserver keyserver.ubuntu.com \
730                      {proxy_keyserver} \
731                      --recv-keys F2EDC64DC5AEE1F6B9C621F0C8CAB6595FDFF622 ) )
732
733# Parse the current repo list into a debug repo list
734RUN sed -n '/^deb /s,^deb [^ ]* ,deb http://ddebs.ubuntu.com ,p' /etc/apt/sources.list >/etc/apt/sources.list.d/debug.list
735
736# Remove non-existent debug repos
737RUN sed -i '/-\(backports\|security\) /d' /etc/apt/sources.list.d/debug.list
738
739RUN cat /etc/apt/sources.list.d/debug.list
740
741RUN apt-get update && apt-get dist-upgrade -yy && apt-get install -yy \
742    gcc-11 \
743    g++-11 \
744    libc6-dbg \
745    libc6-dev \
746    libtool \
747    bison \
748    libdbus-1-dev \
749    flex \
750    cmake \
751    python3 \
752    python3-dev\
753    python3-yaml \
754    python3-mako \
755    python3-pip \
756    python3-setuptools \
757    python3-git \
758    python3-socks \
759    pkg-config \
760    autoconf \
761    autoconf-archive \
762    libsystemd-dev \
763    systemd \
764    libssl-dev \
765    libevdev-dev \
766    libjpeg-dev \
767    libpng-dev \
768    ninja-build \
769    sudo \
770    curl \
771    git \
772    dbus \
773    iputils-ping \
774    clang-13 \
775    clang-format-13 \
776    clang-tidy-13 \
777    clang-tools-13 \
778    shellcheck \
779    npm \
780    iproute2 \
781    libnl-3-dev \
782    libnl-genl-3-dev \
783    libconfig++-dev \
784    libsnmp-dev \
785    valgrind \
786    valgrind-dbg \
787    libpam0g-dev \
788    xxd \
789    libi2c-dev \
790    wget \
791    libldap2-dev \
792    libprotobuf-dev \
793    liburing-dev \
794    liburing2-dbgsym \
795    libperlio-gzip-perl \
796    libjson-perl \
797    protobuf-compiler \
798    libgpiod-dev \
799    device-tree-compiler \
800    cppcheck \
801    libpciaccess-dev \
802    libmimetic-dev \
803    libxml2-utils \
804    libxml-simple-perl \
805    rsync \
806    libcryptsetup-dev
807
808# Apply autoconf-archive-v2022.02.11 file ax_cxx_compile_stdcxx for C++20.
809RUN curl "http://git.savannah.gnu.org/gitweb/?p=autoconf-archive.git;a=blob_plain;f=m4/ax_cxx_compile_stdcxx.m4;hb=3311b6bdeff883c6a13952594a9dcb60bce6ba80" \
810  > /usr/share/aclocal/ax_cxx_compile_stdcxx.m4
811
812RUN npm install -g eslint@latest eslint-plugin-json@latest
813
814RUN update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-11 11 \
815  --slave /usr/bin/g++ g++ /usr/bin/g++-11 \
816  --slave /usr/bin/gcov gcov /usr/bin/gcov-11 \
817  --slave /usr/bin/gcov-dump gcov-dump /usr/bin/gcov-dump-11 \
818  --slave /usr/bin/gcov-tool gcov-tool /usr/bin/gcov-tool-11
819
820RUN update-alternatives --install /usr/bin/clang clang /usr/bin/clang-13 1000 \
821  --slave /usr/bin/clang++ clang++ /usr/bin/clang++-13 \
822  --slave /usr/bin/clang-tidy clang-tidy /usr/bin/clang-tidy-13 \
823  --slave /usr/bin/clang-format clang-format /usr/bin/clang-format-13 \
824  --slave /usr/bin/run-clang-tidy run-clang-tidy.py /usr/bin/run-clang-tidy-13 \
825  --slave /usr/bin/scan-build scan-build /usr/bin/scan-build-13
826
827"""
828
829if is_automated_ci_build:
830    dockerfile_base += f"""
831# Run an arbitrary command to polute the docker cache regularly force us
832# to re-run `apt-get update` daily.
833RUN echo {Docker.timestamp()}
834RUN apt-get update && apt-get dist-upgrade -yy
835
836"""
837
838dockerfile_base += f"""
839RUN pip3 install inflection
840RUN pip3 install pycodestyle
841RUN pip3 install jsonschema
842RUN pip3 install meson==0.61.3
843RUN pip3 install protobuf
844RUN pip3 install codespell
845RUN pip3 install requests
846"""
847
848# Build the base and stage docker images.
849docker_base_img_name = Docker.tagname("base", dockerfile_base)
850Docker.build("base", docker_base_img_name, dockerfile_base)
851Package.generate_all()
852
853# Create the final Dockerfile.
854dockerfile = f"""
855# Build the final output image
856FROM {docker_base_img_name}
857{Package.df_all_copycmds()}
858
859# Some of our infrastructure still relies on the presence of this file
860# even though it is no longer needed to rebuild the docker environment
861# NOTE: The file is sorted to ensure the ordering is stable.
862RUN echo '{Package.depcache()}' > /tmp/depcache
863
864# Final configuration for the workspace
865RUN grep -q {gid} /etc/group || groupadd -f -g {gid} {username}
866RUN mkdir -p "{os.path.dirname(homedir)}"
867RUN grep -q {uid} /etc/passwd || useradd -d {homedir} -m -u {uid} -g {gid} {username}
868RUN sed -i '1iDefaults umask=000' /etc/sudoers
869RUN echo "{username} ALL=(ALL) NOPASSWD: ALL" >>/etc/sudoers
870
871# Ensure user has ability to write to /usr/local for different tool
872# and data installs
873RUN chown -R {username}:{username} /usr/local/share
874
875{proxy_cmd}
876
877RUN /bin/bash
878"""
879
880# Do the final docker build
881docker_final_img_name = Docker.tagname(None, dockerfile)
882Docker.build("final", docker_final_img_name, dockerfile)
883
884# Print the tag of the final image.
885print(docker_final_img_name)
886