1#!/usr/bin/env python3
2#
3# Build the required docker image to run package unit tests
4#
5# Script Variables:
6#   DOCKER_IMG_NAME:  <optional, the name of the docker image to generate>
7#                     default is openbmc/ubuntu-unit-test
8#   DISTRO:           <optional, the distro to build a docker image against>
9#   FORCE_DOCKER_BUILD: <optional, a non-zero value with force all Docker
10#                     images to be rebuilt rather than reusing caches.>
11#   BUILD_URL:        <optional, used to detect running under CI context
12#                     (ex. Jenkins)>
13#   BRANCH:           <optional, branch to build from each of the openbmc/
14#                     repositories>
15#                     default is master, which will be used if input branch not
16#                     provided or not found
17#   UBUNTU_MIRROR:    <optional, the URL of a mirror of Ubuntu to override the
18#                     default ones in /etc/apt/sources.list>
19#                     default is empty, and no mirror is used.
20#   http_proxy        The HTTP address of the proxy server to connect to.
21#                     Default: "", proxy is not setup if this is not set
22
23import os
24import sys
25import threading
26from datetime import date
27from hashlib import sha256
28from sh import docker, git, nproc, uname  # type: ignore
29from typing import Any, Callable, Dict, Iterable, Optional
30
31try:
32    # Python before 3.8 doesn't have TypedDict, so reroute to standard 'dict'.
33    from typing import TypedDict
34except:
35
36    class TypedDict(dict):  # type: ignore
37        # We need to do this to eat the 'total' argument.
38        def __init_subclass__(cls, **kwargs):
39            super().__init_subclass__()
40
41
42# Declare some variables used in package definitions.
43prefix = "/usr/local"
44proc_count = nproc().strip()
45
46
47class PackageDef(TypedDict, total=False):
48    """ Package Definition for packages dictionary. """
49
50    # rev [optional]: Revision of package to use.
51    rev: str
52    # url [optional]: lambda function to create URL: (package, rev) -> url.
53    url: Callable[[str, str], str]
54    # depends [optional]: List of package dependencies.
55    depends: Iterable[str]
56    # build_type [required]: Build type used for package.
57    #   Currently supported: autoconf, cmake, custom, make, meson
58    build_type: str
59    # build_steps [optional]: Steps to run for 'custom' build_type.
60    build_steps: Iterable[str]
61    # config_flags [optional]: List of options to pass configuration tool.
62    config_flags: Iterable[str]
63    # config_env [optional]: List of environment variables to set for config.
64    config_env: Iterable[str]
65    # custom_post_dl [optional]: List of steps to run after download, but
66    #   before config / build / install.
67    custom_post_dl: Iterable[str]
68    # custom_post_install [optional]: List of steps to run after install.
69    custom_post_install: Iterable[str]
70
71    # __tag [private]: Generated Docker tag name for package stage.
72    __tag: str
73    # __package [private]: Package object associated with this package.
74    __package: Any  # Type is Package, but not defined yet.
75
76
77# Packages to include in image.
78packages = {
79    "boost": PackageDef(
80        rev="1.79.0",
81        url=(
82            lambda pkg, rev: f"https://downloads.yoctoproject.org/mirror/sources/{pkg}_{rev.replace('.', '_')}.tar.bz2"
83        ),
84        build_type="custom",
85        build_steps=[
86            f"./bootstrap.sh --prefix={prefix} --with-libraries=context,coroutine",
87            "./b2",
88            f"./b2 install --prefix={prefix}",
89        ],
90    ),
91    "USCiLab/cereal": PackageDef(
92        rev="3e4d1b84cab4891368d2179a61a7ba06a5693e7f",
93        build_type="custom",
94        build_steps=[f"cp -a include/cereal/ {prefix}/include/"],
95    ),
96    "catchorg/Catch2": PackageDef(
97        rev="v2.13.6",
98        build_type="cmake",
99        config_flags=["-DBUILD_TESTING=OFF", "-DCATCH_INSTALL_DOCS=OFF"],
100    ),
101    "CLIUtils/CLI11": PackageDef(
102        rev="v1.9.1",
103        build_type="cmake",
104        config_flags=[
105            "-DBUILD_TESTING=OFF",
106            "-DCLI11_BUILD_DOCS=OFF",
107            "-DCLI11_BUILD_EXAMPLES=OFF",
108        ],
109    ),
110    "fmtlib/fmt": PackageDef(
111        rev="8.1.1",
112        build_type="cmake",
113        config_flags=[
114            "-DFMT_DOC=OFF",
115            "-DFMT_TEST=OFF",
116        ],
117    ),
118    "Naios/function2": PackageDef(
119        rev="4.1.0",
120        build_type="custom",
121        build_steps=[
122            f"mkdir {prefix}/include/function2",
123            f"cp include/function2/function2.hpp {prefix}/include/function2/",
124        ],
125    ),
126    # Release 2021-06-12
127    "google/googletest": PackageDef(
128        rev="9e712372214d75bb30ec2847a44bf124d48096f3",
129        build_type="cmake",
130        config_env=["CXXFLAGS=-std=c++20"],
131        config_flags=["-DTHREADS_PREFER_PTHREAD_FLAG=ON"],
132    ),
133    # Release 2020-08-06
134    "nlohmann/json": PackageDef(
135        rev="v3.10.4",
136        build_type="cmake",
137        config_flags=["-DJSON_BuildTests=OFF"],
138        custom_post_install=[
139            f"ln -s {prefix}/include/nlohmann/json.hpp {prefix}/include/json.hpp",
140        ],
141    ),
142    # Snapshot from 2019-05-24
143    "linux-test-project/lcov": PackageDef(
144        rev="v1.15",
145        build_type="make",
146    ),
147    # dev-5.8 2021-01-11
148    "openbmc/linux": PackageDef(
149        rev="3cc95ae40716e56f81b69615781f54c78079042d",
150        build_type="custom",
151        build_steps=[
152            f"make -j{proc_count} defconfig",
153            f"make INSTALL_HDR_PATH={prefix} headers_install",
154        ],
155    ),
156    # Snapshot from 2020-06-13
157    "LibVNC/libvncserver": PackageDef(
158        rev="LibVNCServer-0.9.13",
159        build_type="cmake",
160    ),
161    # version from meta-openembedded/meta-oe/recipes-support/libtinyxml2/libtinyxml2_8.0.0.bb
162    "leethomason/tinyxml2": PackageDef(
163        rev="8.0.0",
164        build_type="cmake",
165    ),
166    # version from /meta-openembedded/meta-oe/recipes-devtools/boost-url/boost-url_git.bb
167    "CPPAlliance/url": PackageDef(
168        rev="d740a92d38e3a8f4d5b2153f53b82f1c98e312ab",
169        build_type="custom",
170        build_steps=[f"cp -a include/** {prefix}/include/"],
171    ),
172    # version from meta-openembedded/meta-oe/dynamic-layers/networking-layer/recipes-devools/valijson/valijson_0.6.bb
173    "tristanpenman/valijson": PackageDef(
174        rev="v0.6",
175        build_type="cmake",
176        config_flags=[
177            "-Dvalijson_BUILD_TESTS=0",
178            "-Dvalijson_INSTALL_HEADERS=1",
179        ],
180    ),
181    # version from meta-openembedded/meta-oe/recipes-devtools/unifex/unifex_git.bb
182    "facebookexperimental/libunifex": PackageDef(
183        rev="9df21c58d34ce8a1cd3b15c3a7347495e29417a0",
184        build_type="cmake",
185        config_flags=[
186            "-DBUILD_SHARED_LIBS=ON",
187            "-DBUILD_TESTING=OFF",
188            "-DCMAKE_CXX_STANDARD=20",
189            "-DUNIFEX_BUILD_EXAMPLES=OFF",
190        ],
191    ),
192    "open-power/pdbg": PackageDef(build_type="autoconf"),
193    "openbmc/gpioplus": PackageDef(
194        depends=["openbmc/stdplus"],
195        build_type="meson",
196        config_flags=[
197            "-Dexamples=false",
198            "-Dtests=disabled",
199        ],
200    ),
201    "openbmc/phosphor-dbus-interfaces": PackageDef(
202        depends=["openbmc/sdbusplus"],
203        build_type="meson",
204        config_flags=["-Dgenerate_md=false"],
205    ),
206    "openbmc/phosphor-logging": PackageDef(
207        depends=[
208            "USCiLab/cereal",
209            "openbmc/phosphor-dbus-interfaces",
210            "openbmc/sdbusplus",
211            "openbmc/sdeventplus",
212        ],
213        build_type="meson",
214        config_flags=[
215            f"-Dyamldir={prefix}/share/phosphor-dbus-yaml/yaml",
216        ],
217    ),
218    "openbmc/phosphor-objmgr": PackageDef(
219        depends=[
220            "boost",
221            "leethomason/tinyxml2",
222            "openbmc/phosphor-logging",
223            "openbmc/sdbusplus",
224        ],
225        build_type="meson",
226        config_flags=[
227            "-Dtests=disabled",
228        ],
229    ),
230    "openbmc/pldm": PackageDef(
231        depends=[
232            "CLIUtils/CLI11",
233            "boost",
234            "nlohmann/json",
235            "openbmc/phosphor-dbus-interfaces",
236            "openbmc/phosphor-logging",
237            "openbmc/sdbusplus",
238            "openbmc/sdeventplus",
239        ],
240        build_type="meson",
241        config_flags=[
242            "-Dlibpldm-only=enabled",
243            "-Doem-ibm=enabled",
244            "-Dtests=disabled",
245        ],
246    ),
247    "openbmc/sdbusplus": PackageDef(
248        depends=[
249            "facebookexperimental/libunifex",
250        ],
251        build_type="meson",
252        custom_post_dl=[
253            "cd tools",
254            f"./setup.py install --root=/ --prefix={prefix}",
255            "cd ..",
256        ],
257        config_flags=[
258            "-Dexamples=disabled",
259            "-Dtests=disabled",
260        ],
261    ),
262    "openbmc/sdeventplus": PackageDef(
263        depends=["Naios/function2", "openbmc/stdplus"],
264        build_type="meson",
265        config_flags=[
266            "-Dexamples=false",
267            "-Dtests=disabled",
268        ],
269    ),
270    "openbmc/stdplus": PackageDef(
271        depends=["fmtlib/fmt"],
272        build_type="meson",
273        config_flags=[
274            "-Dexamples=false",
275            "-Dtests=disabled",
276        ],
277    ),
278}  # type: Dict[str, PackageDef]
279
280# Define common flags used for builds
281configure_flags = " ".join(
282    [
283        f"--prefix={prefix}",
284    ]
285)
286cmake_flags = " ".join(
287    [
288        "-DBUILD_SHARED_LIBS=ON",
289        "-DCMAKE_BUILD_TYPE=RelWithDebInfo",
290        f"-DCMAKE_INSTALL_PREFIX:PATH={prefix}",
291        "-GNinja",
292        "-DCMAKE_MAKE_PROGRAM=ninja",
293    ]
294)
295meson_flags = " ".join(
296    [
297        "--wrap-mode=nodownload",
298        f"-Dprefix={prefix}",
299    ]
300)
301
302
303class Package(threading.Thread):
304    """Class used to build the Docker stages for each package.
305
306    Generally, this class should not be instantiated directly but through
307    Package.generate_all().
308    """
309
310    # Copy the packages dictionary.
311    packages = packages.copy()
312
313    # Lock used for thread-safety.
314    lock = threading.Lock()
315
316    def __init__(self, pkg: str):
317        """ pkg - The name of this package (ex. foo/bar ) """
318        super(Package, self).__init__()
319
320        self.package = pkg
321        self.exception = None  # type: Optional[Exception]
322
323        # Reference to this package's
324        self.pkg_def = Package.packages[pkg]
325        self.pkg_def["__package"] = self
326
327    def run(self) -> None:
328        """ Thread 'run' function.  Builds the Docker stage. """
329
330        # In case this package has no rev, fetch it from Github.
331        self._update_rev()
332
333        # Find all the Package objects that this package depends on.
334        #   This section is locked because we are looking into another
335        #   package's PackageDef dict, which could be being modified.
336        Package.lock.acquire()
337        deps: Iterable[Package] = [
338            Package.packages[deppkg]["__package"]
339            for deppkg in self.pkg_def.get("depends", [])
340        ]
341        Package.lock.release()
342
343        # Wait until all the depends finish building.  We need them complete
344        # for the "COPY" commands.
345        for deppkg in deps:
346            deppkg.join()
347
348        # Generate this package's Dockerfile.
349        dockerfile = f"""
350FROM {docker_base_img_name}
351{self._df_copycmds()}
352{self._df_build()}
353"""
354
355        # Generate the resulting tag name and save it to the PackageDef.
356        #   This section is locked because we are modifying the PackageDef,
357        #   which can be accessed by other threads.
358        Package.lock.acquire()
359        tag = Docker.tagname(self._stagename(), dockerfile)
360        self.pkg_def["__tag"] = tag
361        Package.lock.release()
362
363        # Do the build / save any exceptions.
364        try:
365            Docker.build(self.package, tag, dockerfile)
366        except Exception as e:
367            self.exception = e
368
369    @classmethod
370    def generate_all(cls) -> None:
371        """Ensure a Docker stage is created for all defined packages.
372
373        These are done in parallel but with appropriate blocking per
374        package 'depends' specifications.
375        """
376
377        # Create a Package for each defined package.
378        pkg_threads = [Package(p) for p in cls.packages.keys()]
379
380        # Start building them all.
381        #   This section is locked because threads depend on each other,
382        #   based on the packages, and they cannot 'join' on a thread
383        #   which is not yet started.  Adding a lock here allows all the
384        #   threads to start before they 'join' their dependencies.
385        Package.lock.acquire()
386        for t in pkg_threads:
387            t.start()
388        Package.lock.release()
389
390        # Wait for completion.
391        for t in pkg_threads:
392            t.join()
393            # Check if the thread saved off its own exception.
394            if t.exception:
395                print(f"Package {t.package} failed!", file=sys.stderr)
396                raise t.exception
397
398    @staticmethod
399    def df_all_copycmds() -> str:
400        """Formulate the Dockerfile snippet necessary to copy all packages
401        into the final image.
402        """
403        return Package.df_copycmds_set(Package.packages.keys())
404
405    @classmethod
406    def depcache(cls) -> str:
407        """Create the contents of the '/tmp/depcache'.
408        This file is a comma-separated list of "<pkg>:<rev>".
409        """
410
411        # This needs to be sorted for consistency.
412        depcache = ""
413        for pkg in sorted(cls.packages.keys()):
414            depcache += "%s:%s," % (pkg, cls.packages[pkg]["rev"])
415        return depcache
416
417    def _update_rev(self) -> None:
418        """ Look up the HEAD for missing a static rev. """
419
420        if "rev" in self.pkg_def:
421            return
422
423        # Check if Jenkins/Gerrit gave us a revision and use it.
424        if gerrit_project == self.package and gerrit_rev:
425            print(
426                f"Found Gerrit revision for {self.package}: {gerrit_rev}",
427                file=sys.stderr,
428            )
429            self.pkg_def["rev"] = gerrit_rev
430            return
431
432        # Ask Github for all the branches.
433        lookup = git("ls-remote", "--heads", f"https://github.com/{self.package}")
434
435        # Find the branch matching {branch} (or fallback to master).
436        #   This section is locked because we are modifying the PackageDef.
437        Package.lock.acquire()
438        for line in lookup.split("\n"):
439            if f"refs/heads/{branch}" in line:
440                self.pkg_def["rev"] = line.split()[0]
441            elif f"refs/heads/master" in line and "rev" not in self.pkg_def:
442                self.pkg_def["rev"] = line.split()[0]
443        Package.lock.release()
444
445    def _stagename(self) -> str:
446        """ Create a name for the Docker stage associated with this pkg. """
447        return self.package.replace("/", "-").lower()
448
449    def _url(self) -> str:
450        """ Get the URL for this package. """
451        rev = self.pkg_def["rev"]
452
453        # If the lambda exists, call it.
454        if "url" in self.pkg_def:
455            return self.pkg_def["url"](self.package, rev)
456
457        # Default to the github archive URL.
458        return f"https://github.com/{self.package}/archive/{rev}.tar.gz"
459
460    def _cmd_download(self) -> str:
461        """Formulate the command necessary to download and unpack to source."""
462
463        url = self._url()
464        if ".tar." not in url:
465            raise NotImplementedError(
466                f"Unhandled download type for {self.package}: {url}"
467            )
468
469        cmd = f"curl -L {url} | tar -x"
470
471        if url.endswith(".bz2"):
472            cmd += "j"
473        elif url.endswith(".gz"):
474            cmd += "z"
475        else:
476            raise NotImplementedError(
477                f"Unknown tar flags needed for {self.package}: {url}"
478            )
479
480        return cmd
481
482    def _cmd_cd_srcdir(self) -> str:
483        """ Formulate the command necessary to 'cd' into the source dir. """
484        return f"cd {self.package.split('/')[-1]}*"
485
486    def _df_copycmds(self) -> str:
487        """ Formulate the dockerfile snippet necessary to COPY all depends. """
488
489        if "depends" not in self.pkg_def:
490            return ""
491        return Package.df_copycmds_set(self.pkg_def["depends"])
492
493    @staticmethod
494    def df_copycmds_set(pkgs: Iterable[str]) -> str:
495        """Formulate the Dockerfile snippet necessary to COPY a set of
496        packages into a Docker stage.
497        """
498
499        copy_cmds = ""
500
501        # Sort the packages for consistency.
502        for p in sorted(pkgs):
503            tag = Package.packages[p]["__tag"]
504            copy_cmds += f"COPY --from={tag} {prefix} {prefix}\n"
505            # Workaround for upstream docker bug and multiple COPY cmds
506            # https://github.com/moby/moby/issues/37965
507            copy_cmds += "RUN true\n"
508
509        return copy_cmds
510
511    def _df_build(self) -> str:
512        """Formulate the Dockerfile snippet necessary to download, build, and
513        install a package into a Docker stage.
514        """
515
516        # Download and extract source.
517        result = f"RUN {self._cmd_download()} && {self._cmd_cd_srcdir()} && "
518
519        # Handle 'custom_post_dl' commands.
520        custom_post_dl = self.pkg_def.get("custom_post_dl")
521        if custom_post_dl:
522            result += " && ".join(custom_post_dl) + " && "
523
524        # Build and install package based on 'build_type'.
525        build_type = self.pkg_def["build_type"]
526        if build_type == "autoconf":
527            result += self._cmd_build_autoconf()
528        elif build_type == "cmake":
529            result += self._cmd_build_cmake()
530        elif build_type == "custom":
531            result += self._cmd_build_custom()
532        elif build_type == "make":
533            result += self._cmd_build_make()
534        elif build_type == "meson":
535            result += self._cmd_build_meson()
536        else:
537            raise NotImplementedError(
538                f"Unhandled build type for {self.package}: {build_type}"
539            )
540
541        # Handle 'custom_post_install' commands.
542        custom_post_install = self.pkg_def.get("custom_post_install")
543        if custom_post_install:
544            result += " && " + " && ".join(custom_post_install)
545
546        return result
547
548    def _cmd_build_autoconf(self) -> str:
549        options = " ".join(self.pkg_def.get("config_flags", []))
550        env = " ".join(self.pkg_def.get("config_env", []))
551        result = "./bootstrap.sh && "
552        result += f"{env} ./configure {configure_flags} {options} && "
553        result += f"make -j{proc_count} && make install"
554        return result
555
556    def _cmd_build_cmake(self) -> str:
557        options = " ".join(self.pkg_def.get("config_flags", []))
558        env = " ".join(self.pkg_def.get("config_env", []))
559        result = "mkdir builddir && cd builddir && "
560        result += f"{env} cmake {cmake_flags} {options} .. && "
561        result += "cmake --build . --target all && "
562        result += "cmake --build . --target install && "
563        result += "cd .."
564        return result
565
566    def _cmd_build_custom(self) -> str:
567        return " && ".join(self.pkg_def.get("build_steps", []))
568
569    def _cmd_build_make(self) -> str:
570        return f"make -j{proc_count} && make install"
571
572    def _cmd_build_meson(self) -> str:
573        options = " ".join(self.pkg_def.get("config_flags", []))
574        env = " ".join(self.pkg_def.get("config_env", []))
575        result = f"{env} meson builddir {meson_flags} {options} && "
576        result += "ninja -C builddir && ninja -C builddir install"
577        return result
578
579
580class Docker:
581    """Class to assist with Docker interactions.  All methods are static."""
582
583    @staticmethod
584    def timestamp() -> str:
585        """ Generate a timestamp for today using the ISO week. """
586        today = date.today().isocalendar()
587        return f"{today[0]}-W{today[1]:02}"
588
589    @staticmethod
590    def tagname(pkgname: str, dockerfile: str) -> str:
591        """ Generate a tag name for a package using a hash of the Dockerfile. """
592        result = docker_image_name
593        if pkgname:
594            result += "-" + pkgname
595
596        result += ":" + Docker.timestamp()
597        result += "-" + sha256(dockerfile.encode()).hexdigest()[0:16]
598
599        return result
600
601    @staticmethod
602    def build(pkg: str, tag: str, dockerfile: str) -> None:
603        """Build a docker image using the Dockerfile and tagging it with 'tag'."""
604
605        # If we're not forcing builds, check if it already exists and skip.
606        if not force_build:
607            if docker.image.ls(tag, "--format", '"{{.Repository}}:{{.Tag}}"'):
608                print(f"Image {tag} already exists.  Skipping.", file=sys.stderr)
609                return
610
611        # Build it.
612        #   Capture the output of the 'docker build' command and send it to
613        #   stderr (prefixed with the package name).  This allows us to see
614        #   progress but not polute stdout.  Later on we output the final
615        #   docker tag to stdout and we want to keep that pristine.
616        #
617        #   Other unusual flags:
618        #       --no-cache: Bypass the Docker cache if 'force_build'.
619        #       --force-rm: Clean up Docker processes if they fail.
620        docker.build(
621            proxy_args,
622            "--network=host",
623            "--force-rm",
624            "--no-cache=true" if force_build else "--no-cache=false",
625            "-t",
626            tag,
627            "-",
628            _in=dockerfile,
629            _out=(
630                lambda line: print(
631                    pkg + ":", line, end="", file=sys.stderr, flush=True
632                )
633            ),
634        )
635
636
637# Read a bunch of environment variables.
638docker_image_name = os.environ.get("DOCKER_IMAGE_NAME", "openbmc/ubuntu-unit-test")
639force_build = os.environ.get("FORCE_DOCKER_BUILD")
640is_automated_ci_build = os.environ.get("BUILD_URL", False)
641distro = os.environ.get("DISTRO", "ubuntu:jammy")
642branch = os.environ.get("BRANCH", "master")
643ubuntu_mirror = os.environ.get("UBUNTU_MIRROR")
644http_proxy = os.environ.get("http_proxy")
645
646gerrit_project = os.environ.get("GERRIT_PROJECT")
647gerrit_rev = os.environ.get("GERRIT_PATCHSET_REVISION")
648
649# Set up some common variables.
650username = os.environ.get("USER", "root")
651homedir = os.environ.get("HOME", "/root")
652gid = os.getgid()
653uid = os.getuid()
654
655# Use well-known constants if user is root
656if username == "root":
657    homedir = "/root"
658    gid = 0
659    uid = 0
660
661# Determine the architecture for Docker.
662arch = uname("-m").strip()
663if arch == "ppc64le":
664    docker_base = "ppc64le/"
665elif arch == "x86_64":
666    docker_base = ""
667elif arch == "aarch64":
668    docker_base = "arm64v8/"
669else:
670    print(
671        f"Unsupported system architecture({arch}) found for docker image",
672        file=sys.stderr,
673    )
674    sys.exit(1)
675
676# Special flags if setting up a deb mirror.
677mirror = ""
678if "ubuntu" in distro and ubuntu_mirror:
679    mirror = f"""
680RUN echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME) main restricted universe multiverse" > /etc/apt/sources.list && \\
681    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-updates main restricted universe multiverse" >> /etc/apt/sources.list && \\
682    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-security main restricted universe multiverse" >> /etc/apt/sources.list && \\
683    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-proposed main restricted universe multiverse" >> /etc/apt/sources.list && \\
684    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-backports main restricted universe multiverse" >> /etc/apt/sources.list
685"""
686
687# Special flags for proxying.
688proxy_cmd = ""
689proxy_keyserver = ""
690proxy_args = []
691if http_proxy:
692    proxy_cmd = f"""
693RUN echo "[http]" >> {homedir}/.gitconfig && \
694    echo "proxy = {http_proxy}" >> {homedir}/.gitconfig
695"""
696    proxy_keyserver = f"--keyserver-options http-proxy={http_proxy}"
697
698    proxy_args.extend(
699        [
700            "--build-arg",
701            f"http_proxy={http_proxy}",
702            "--build-arg",
703            f"https_proxy={http_proxy}",
704        ]
705    )
706
707# Create base Dockerfile.
708dockerfile_base = f"""
709FROM {docker_base}{distro}
710
711{mirror}
712
713ENV DEBIAN_FRONTEND noninteractive
714
715ENV PYTHONPATH "/usr/local/lib/python3.10/site-packages/"
716
717# Sometimes the ubuntu key expires and we need a way to force an execution
718# of the apt-get commands for the dbgsym-keyring.  When this happens we see
719# an error like: "Release: The following signatures were invalid:"
720# Insert a bogus echo that we can change here when we get this error to force
721# the update.
722RUN echo "ubuntu keyserver rev as of 2021-04-21"
723
724# We need the keys to be imported for dbgsym repos
725# New releases have a package, older ones fall back to manual fetching
726# https://wiki.ubuntu.com/Debug%20Symbol%20Packages
727RUN apt-get update && apt-get dist-upgrade -yy && \
728    ( apt-get install gpgv ubuntu-dbgsym-keyring || \
729        ( apt-get install -yy dirmngr && \
730          apt-key adv --keyserver keyserver.ubuntu.com \
731                      {proxy_keyserver} \
732                      --recv-keys F2EDC64DC5AEE1F6B9C621F0C8CAB6595FDFF622 ) )
733
734# Parse the current repo list into a debug repo list
735RUN sed -n '/^deb /s,^deb [^ ]* ,deb http://ddebs.ubuntu.com ,p' /etc/apt/sources.list >/etc/apt/sources.list.d/debug.list
736
737# Remove non-existent debug repos
738RUN sed -i '/-\(backports\|security\) /d' /etc/apt/sources.list.d/debug.list
739
740RUN cat /etc/apt/sources.list.d/debug.list
741
742RUN apt-get update && apt-get dist-upgrade -yy && apt-get install -yy \
743    gcc-11 \
744    g++-11 \
745    libc6-dbg \
746    libc6-dev \
747    libtool \
748    bison \
749    libdbus-1-dev \
750    flex \
751    cmake \
752    python3 \
753    python3-dev\
754    python3-yaml \
755    python3-mako \
756    python3-pip \
757    python3-setuptools \
758    python3-git \
759    python3-socks \
760    pkg-config \
761    autoconf \
762    autoconf-archive \
763    libsystemd-dev \
764    systemd \
765    libssl-dev \
766    libevdev-dev \
767    libjpeg-dev \
768    libpng-dev \
769    ninja-build \
770    sudo \
771    curl \
772    git \
773    dbus \
774    iputils-ping \
775    clang-14 \
776    clang-format-14 \
777    clang-tidy-14 \
778    clang-tools-14 \
779    shellcheck \
780    npm \
781    iproute2 \
782    libnl-3-dev \
783    libnl-genl-3-dev \
784    libconfig++-dev \
785    libsnmp-dev \
786    valgrind \
787    valgrind-dbg \
788    libpam0g-dev \
789    xxd \
790    libi2c-dev \
791    wget \
792    libldap2-dev \
793    libprotobuf-dev \
794    liburing-dev \
795    liburing2-dbgsym \
796    libperlio-gzip-perl \
797    libjson-perl \
798    protobuf-compiler \
799    libgpiod-dev \
800    device-tree-compiler \
801    cppcheck \
802    libpciaccess-dev \
803    libmimetic-dev \
804    libxml2-utils \
805    libxml-simple-perl \
806    rsync \
807    libcryptsetup-dev
808
809# Apply autoconf-archive-v2022.02.11 file ax_cxx_compile_stdcxx for C++20.
810RUN curl "http://git.savannah.gnu.org/gitweb/?p=autoconf-archive.git;a=blob_plain;f=m4/ax_cxx_compile_stdcxx.m4;hb=3311b6bdeff883c6a13952594a9dcb60bce6ba80" \
811  > /usr/share/aclocal/ax_cxx_compile_stdcxx.m4
812
813RUN npm install -g eslint@latest eslint-plugin-json@latest
814
815RUN update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-11 11 \
816  --slave /usr/bin/g++ g++ /usr/bin/g++-11 \
817  --slave /usr/bin/gcov gcov /usr/bin/gcov-11 \
818  --slave /usr/bin/gcov-dump gcov-dump /usr/bin/gcov-dump-11 \
819  --slave /usr/bin/gcov-tool gcov-tool /usr/bin/gcov-tool-11
820
821RUN update-alternatives --install /usr/bin/clang clang /usr/bin/clang-14 1000 \
822  --slave /usr/bin/clang++ clang++ /usr/bin/clang++-14 \
823  --slave /usr/bin/clang-tidy clang-tidy /usr/bin/clang-tidy-14 \
824  --slave /usr/bin/clang-format clang-format /usr/bin/clang-format-14 \
825  --slave /usr/bin/run-clang-tidy run-clang-tidy.py /usr/bin/run-clang-tidy-14 \
826  --slave /usr/bin/scan-build scan-build /usr/bin/scan-build-14
827
828"""
829
830if is_automated_ci_build:
831    dockerfile_base += f"""
832# Run an arbitrary command to polute the docker cache regularly force us
833# to re-run `apt-get update` daily.
834RUN echo {Docker.timestamp()}
835RUN apt-get update && apt-get dist-upgrade -yy
836
837"""
838
839dockerfile_base += f"""
840RUN pip3 install inflection
841RUN pip3 install pycodestyle
842RUN pip3 install jsonschema
843RUN pip3 install meson==0.61.3
844RUN pip3 install protobuf
845RUN pip3 install codespell
846RUN pip3 install requests
847"""
848
849# Build the base and stage docker images.
850docker_base_img_name = Docker.tagname("base", dockerfile_base)
851Docker.build("base", docker_base_img_name, dockerfile_base)
852Package.generate_all()
853
854# Create the final Dockerfile.
855dockerfile = f"""
856# Build the final output image
857FROM {docker_base_img_name}
858{Package.df_all_copycmds()}
859
860# Some of our infrastructure still relies on the presence of this file
861# even though it is no longer needed to rebuild the docker environment
862# NOTE: The file is sorted to ensure the ordering is stable.
863RUN echo '{Package.depcache()}' > /tmp/depcache
864
865# Final configuration for the workspace
866RUN grep -q {gid} /etc/group || groupadd -f -g {gid} {username}
867RUN mkdir -p "{os.path.dirname(homedir)}"
868RUN grep -q {uid} /etc/passwd || useradd -d {homedir} -m -u {uid} -g {gid} {username}
869RUN sed -i '1iDefaults umask=000' /etc/sudoers
870RUN echo "{username} ALL=(ALL) NOPASSWD: ALL" >>/etc/sudoers
871
872# Ensure user has ability to write to /usr/local for different tool
873# and data installs
874RUN chown -R {username}:{username} /usr/local/share
875
876{proxy_cmd}
877
878RUN /bin/bash
879"""
880
881# Do the final docker build
882docker_final_img_name = Docker.tagname(None, dockerfile)
883Docker.build("final", docker_final_img_name, dockerfile)
884
885# Print the tag of the final image.
886print(docker_final_img_name)
887