1#!/usr/bin/env python3
2#
3# Build the required docker image to run package unit tests
4#
5# Script Variables:
6#   DOCKER_IMG_NAME:  <optional, the name of the docker image to generate>
7#                     default is openbmc/ubuntu-unit-test
8#   DISTRO:           <optional, the distro to build a docker image against>
9#                     default is ubuntu:impish
10#   FORCE_DOCKER_BUILD: <optional, a non-zero value with force all Docker
11#                     images to be rebuilt rather than reusing caches.>
12#   BUILD_URL:        <optional, used to detect running under CI context
13#                     (ex. Jenkins)>
14#   BRANCH:           <optional, branch to build from each of the openbmc/
15#                     repositories>
16#                     default is master, which will be used if input branch not
17#                     provided or not found
18#   UBUNTU_MIRROR:    <optional, the URL of a mirror of Ubuntu to override the
19#                     default ones in /etc/apt/sources.list>
20#                     default is empty, and no mirror is used.
21#   http_proxy        The HTTP address of the proxy server to connect to.
22#                     Default: "", proxy is not setup if this is not set
23
24import os
25import sys
26import threading
27from datetime import date
28from hashlib import sha256
29from sh import docker, git, nproc, uname  # type: ignore
30from typing import Any, Callable, Dict, Iterable, Optional
31
32try:
33    # Python before 3.8 doesn't have TypedDict, so reroute to standard 'dict'.
34    from typing import TypedDict
35except:
36
37    class TypedDict(dict):  # type: ignore
38        # We need to do this to eat the 'total' argument.
39        def __init_subclass__(cls, **kwargs):
40            super().__init_subclass__()
41
42
43# Declare some variables used in package definitions.
44prefix = "/usr/local"
45proc_count = nproc().strip()
46
47
48class PackageDef(TypedDict, total=False):
49    """ Package Definition for packages dictionary. """
50
51    # rev [optional]: Revision of package to use.
52    rev: str
53    # url [optional]: lambda function to create URL: (package, rev) -> url.
54    url: Callable[[str, str], str]
55    # depends [optional]: List of package dependencies.
56    depends: Iterable[str]
57    # build_type [required]: Build type used for package.
58    #   Currently supported: autoconf, cmake, custom, make, meson
59    build_type: str
60    # build_steps [optional]: Steps to run for 'custom' build_type.
61    build_steps: Iterable[str]
62    # config_flags [optional]: List of options to pass configuration tool.
63    config_flags: Iterable[str]
64    # config_env [optional]: List of environment variables to set for config.
65    config_env: Iterable[str]
66    # custom_post_dl [optional]: List of steps to run after download, but
67    #   before config / build / install.
68    custom_post_dl: Iterable[str]
69    # custom_post_install [optional]: List of steps to run after install.
70    custom_post_install: Iterable[str]
71
72    # __tag [private]: Generated Docker tag name for package stage.
73    __tag: str
74    # __package [private]: Package object associated with this package.
75    __package: Any  # Type is Package, but not defined yet.
76
77
78# Packages to include in image.
79packages = {
80    "boost": PackageDef(
81        rev="1.76.0",
82        url=(
83            lambda pkg, rev: f"https://downloads.yoctoproject.org/mirror/sources/{pkg}_{rev.replace('.', '_')}.tar.bz2"
84        ),
85        build_type="custom",
86        build_steps=[
87            f"./bootstrap.sh --prefix={prefix} --with-libraries=context,coroutine",
88            "./b2",
89            f"./b2 install --prefix={prefix}",
90        ],
91    ),
92    "USCiLab/cereal": PackageDef(
93        rev="3e4d1b84cab4891368d2179a61a7ba06a5693e7f",
94        build_type="custom",
95        build_steps=[f"cp -a include/cereal/ {prefix}/include/"],
96    ),
97    "catchorg/Catch2": PackageDef(
98        rev="v2.13.6",
99        build_type="cmake",
100        config_flags=["-DBUILD_TESTING=OFF", "-DCATCH_INSTALL_DOCS=OFF"],
101    ),
102    "CLIUtils/CLI11": PackageDef(
103        rev="v1.9.1",
104        build_type="cmake",
105        config_flags=[
106            "-DBUILD_TESTING=OFF",
107            "-DCLI11_BUILD_DOCS=OFF",
108            "-DCLI11_BUILD_EXAMPLES=OFF",
109        ],
110    ),
111    "fmtlib/fmt": PackageDef(
112        rev="7.1.3",
113        build_type="cmake",
114        config_flags=[
115            "-DFMT_DOC=OFF",
116            "-DFMT_TEST=OFF",
117        ],
118    ),
119    "Naios/function2": PackageDef(
120        rev="4.1.0",
121        build_type="custom",
122        build_steps=[
123            f"mkdir {prefix}/include/function2",
124            f"cp include/function2/function2.hpp {prefix}/include/function2/",
125        ],
126    ),
127    # Snapshot from 2021-05-13
128    "google/googletest": PackageDef(
129        rev="662fe38e44900c007eccb65a5d2ea19df7bd520e",
130        build_type="cmake",
131        config_env=["CXXFLAGS=-std=c++20"],
132        config_flags=["-DTHREADS_PREFER_PTHREAD_FLAG=ON"],
133    ),
134    # Release 2020-08-06
135    "nlohmann/json": PackageDef(
136        rev="v3.9.1",
137        build_type="cmake",
138        config_flags=["-DJSON_BuildTests=OFF"],
139        custom_post_install=[
140            f"ln -s {prefix}/include/nlohmann/json.hpp {prefix}/include/json.hpp",
141        ],
142    ),
143    # Snapshot from 2019-05-24
144    "linux-test-project/lcov": PackageDef(
145        rev="v1.15",
146        build_type="make",
147    ),
148    # dev-5.8 2021-01-11
149    "openbmc/linux": PackageDef(
150        rev="3cc95ae40716e56f81b69615781f54c78079042d",
151        build_type="custom",
152        build_steps=[
153            f"make -j{proc_count} defconfig",
154            f"make INSTALL_HDR_PATH={prefix} headers_install",
155        ],
156    ),
157    # Snapshot from 2020-06-13
158    "LibVNC/libvncserver": PackageDef(
159        rev="LibVNCServer-0.9.13",
160        build_type="cmake",
161    ),
162    "martinmoene/span-lite": PackageDef(
163        rev="v0.9.2",
164        build_type="cmake",
165        config_flags=[
166            "-DSPAN_LITE_OPT_BUILD_TESTS=OFF",
167        ],
168    ),
169    # version from meta-openembedded/meta-oe/recipes-support/libtinyxml2/libtinyxml2_8.0.0.bb
170    "leethomason/tinyxml2": PackageDef(
171        rev="8.0.0",
172        build_type="cmake",
173    ),
174    # version from /meta-openembedded/meta-oe/recipes-devtools/boost-url/boost-url_git.bb
175    "CPPAlliance/url": PackageDef(
176        rev="4f712ed69a04a344957d22efa5dc111b415b3aff",
177        build_type="custom",
178        build_steps=[f"cp -a include/** {prefix}/include/"],
179    ),
180    # version from meta-openembedded/meta-oe/dynamic-layers/networking-layer/recipes-devools/valijson/valijson_0.6.bb
181    "tristanpenman/valijson": PackageDef(
182        rev="v0.6",
183        build_type="cmake",
184        config_flags=[
185            "-Dvalijson_BUILD_TESTS=0",
186            "-Dvalijson_INSTALL_HEADERS=1",
187        ],
188    ),
189    # version from meta-openembedded/meta-oe/recipes-devtools/nlohmann-fifo/nlohmann-fifo_git.bb
190    "nlohmann/fifo_map": PackageDef(
191        rev="0dfbf5dacbb15a32c43f912a7e66a54aae39d0f9",
192        build_type="custom",
193        build_steps=[f"cp src/fifo_map.hpp {prefix}/include/"],
194    ),
195    "open-power/pdbg": PackageDef(build_type="autoconf"),
196    "openbmc/gpioplus": PackageDef(
197        depends=["openbmc/stdplus"],
198        build_type="meson",
199        config_flags=[
200            "-Dexamples=false",
201            "-Dtests=disabled",
202        ],
203    ),
204    "openbmc/phosphor-dbus-interfaces": PackageDef(
205        depends=["openbmc/sdbusplus"],
206        build_type="meson",
207        config_flags=[
208            "-Ddata_com_ibm=true",
209            "-Ddata_org_open_power=true",
210        ],
211    ),
212    "openbmc/phosphor-logging": PackageDef(
213        depends=[
214            "USCiLab/cereal",
215            "nlohmann/fifo_map",
216            "openbmc/phosphor-dbus-interfaces",
217            "openbmc/sdbusplus",
218            "openbmc/sdeventplus",
219        ],
220        build_type="meson",
221        config_flags=[
222            f"-Dyaml_dir={prefix}/share/phosphor-dbus-yaml/yaml",
223        ],
224    ),
225    "openbmc/phosphor-objmgr": PackageDef(
226        depends=[
227            "boost",
228            "leethomason/tinyxml2",
229            "openbmc/phosphor-logging",
230            "openbmc/sdbusplus",
231        ],
232        build_type="meson",
233        config_flags=[
234            "-Dtests=disabled",
235        ],
236    ),
237    "openbmc/pldm": PackageDef(
238        depends=[
239            "CLIUtils/CLI11",
240            "boost",
241            "nlohmann/json",
242            "openbmc/phosphor-dbus-interfaces",
243            "openbmc/phosphor-logging",
244            "openbmc/sdbusplus",
245            "openbmc/sdeventplus",
246        ],
247        build_type="meson",
248        config_flags=[
249            "-Dlibpldm-only=enabled",
250            "-Doem-ibm=enabled",
251            "-Dtests=disabled",
252        ],
253    ),
254    "openbmc/sdbusplus": PackageDef(
255        build_type="meson",
256        custom_post_dl=[
257            "cd tools",
258            f"./setup.py install --root=/ --prefix={prefix}",
259            "cd ..",
260        ],
261        config_flags=[
262            "-Dexamples=disabled",
263            "-Dtests=disabled",
264        ],
265    ),
266    "openbmc/sdeventplus": PackageDef(
267        depends=["Naios/function2", "openbmc/stdplus"],
268        build_type="meson",
269        config_flags=[
270            "-Dexamples=false",
271            "-Dtests=disabled",
272        ],
273    ),
274    "openbmc/stdplus": PackageDef(
275        depends=["fmtlib/fmt", "martinmoene/span-lite"],
276        build_type="meson",
277        config_flags=[
278            "-Dexamples=false",
279            "-Dtests=disabled",
280        ],
281    ),
282}  # type: Dict[str, PackageDef]
283
284# Define common flags used for builds
285configure_flags = " ".join(
286    [
287        f"--prefix={prefix}",
288    ]
289)
290cmake_flags = " ".join(
291    [
292        "-DBUILD_SHARED_LIBS=ON",
293        "-DCMAKE_BUILD_TYPE=RelWithDebInfo",
294        f"-DCMAKE_INSTALL_PREFIX:PATH={prefix}",
295        "-GNinja",
296        "-DCMAKE_MAKE_PROGRAM=ninja",
297    ]
298)
299meson_flags = " ".join(
300    [
301        "--wrap-mode=nodownload",
302        f"-Dprefix={prefix}",
303    ]
304)
305
306
307class Package(threading.Thread):
308    """Class used to build the Docker stages for each package.
309
310    Generally, this class should not be instantiated directly but through
311    Package.generate_all().
312    """
313
314    # Copy the packages dictionary.
315    packages = packages.copy()
316
317    # Lock used for thread-safety.
318    lock = threading.Lock()
319
320    def __init__(self, pkg: str):
321        """ pkg - The name of this package (ex. foo/bar ) """
322        super(Package, self).__init__()
323
324        self.package = pkg
325        self.exception = None  # type: Optional[Exception]
326
327        # Reference to this package's
328        self.pkg_def = Package.packages[pkg]
329        self.pkg_def["__package"] = self
330
331    def run(self) -> None:
332        """ Thread 'run' function.  Builds the Docker stage. """
333
334        # In case this package has no rev, fetch it from Github.
335        self._update_rev()
336
337        # Find all the Package objects that this package depends on.
338        #   This section is locked because we are looking into another
339        #   package's PackageDef dict, which could be being modified.
340        Package.lock.acquire()
341        deps: Iterable[Package] = [
342            Package.packages[deppkg]["__package"]
343            for deppkg in self.pkg_def.get("depends", [])
344        ]
345        Package.lock.release()
346
347        # Wait until all the depends finish building.  We need them complete
348        # for the "COPY" commands.
349        for deppkg in deps:
350            deppkg.join()
351
352        # Generate this package's Dockerfile.
353        dockerfile = f"""
354FROM {docker_base_img_name}
355{self._df_copycmds()}
356{self._df_build()}
357"""
358
359        # Generate the resulting tag name and save it to the PackageDef.
360        #   This section is locked because we are modifying the PackageDef,
361        #   which can be accessed by other threads.
362        Package.lock.acquire()
363        tag = Docker.tagname(self._stagename(), dockerfile)
364        self.pkg_def["__tag"] = tag
365        Package.lock.release()
366
367        # Do the build / save any exceptions.
368        try:
369            Docker.build(self.package, tag, dockerfile)
370        except Exception as e:
371            self.exception = e
372
373    @classmethod
374    def generate_all(cls) -> None:
375        """Ensure a Docker stage is created for all defined packages.
376
377        These are done in parallel but with appropriate blocking per
378        package 'depends' specifications.
379        """
380
381        # Create a Package for each defined package.
382        pkg_threads = [Package(p) for p in cls.packages.keys()]
383
384        # Start building them all.
385        #   This section is locked because threads depend on each other,
386        #   based on the packages, and they cannot 'join' on a thread
387        #   which is not yet started.  Adding a lock here allows all the
388        #   threads to start before they 'join' their dependencies.
389        Package.lock.acquire()
390        for t in pkg_threads:
391            t.start()
392        Package.lock.release()
393
394        # Wait for completion.
395        for t in pkg_threads:
396            t.join()
397            # Check if the thread saved off its own exception.
398            if t.exception:
399                print(f"Package {t.package} failed!", file=sys.stderr)
400                raise t.exception
401
402    @staticmethod
403    def df_all_copycmds() -> str:
404        """Formulate the Dockerfile snippet necessary to copy all packages
405        into the final image.
406        """
407        return Package.df_copycmds_set(Package.packages.keys())
408
409    @classmethod
410    def depcache(cls) -> str:
411        """Create the contents of the '/tmp/depcache'.
412        This file is a comma-separated list of "<pkg>:<rev>".
413        """
414
415        # This needs to be sorted for consistency.
416        depcache = ""
417        for pkg in sorted(cls.packages.keys()):
418            depcache += "%s:%s," % (pkg, cls.packages[pkg]["rev"])
419        return depcache
420
421    def _update_rev(self) -> None:
422        """ Look up the HEAD for missing a static rev. """
423
424        if "rev" in self.pkg_def:
425            return
426
427        # Check if Jenkins/Gerrit gave us a revision and use it.
428        if gerrit_project == self.package and gerrit_rev:
429            print(
430                f"Found Gerrit revision for {self.package}: {gerrit_rev}",
431                file=sys.stderr,
432            )
433            self.pkg_def["rev"] = gerrit_rev
434            return
435
436        # Ask Github for all the branches.
437        lookup = git("ls-remote", "--heads", f"https://github.com/{self.package}")
438
439        # Find the branch matching {branch} (or fallback to master).
440        #   This section is locked because we are modifying the PackageDef.
441        Package.lock.acquire()
442        for line in lookup.split("\n"):
443            if f"refs/heads/{branch}" in line:
444                self.pkg_def["rev"] = line.split()[0]
445            elif f"refs/heads/master" in line and "rev" not in self.pkg_def:
446                self.pkg_def["rev"] = line.split()[0]
447        Package.lock.release()
448
449    def _stagename(self) -> str:
450        """ Create a name for the Docker stage associated with this pkg. """
451        return self.package.replace("/", "-").lower()
452
453    def _url(self) -> str:
454        """ Get the URL for this package. """
455        rev = self.pkg_def["rev"]
456
457        # If the lambda exists, call it.
458        if "url" in self.pkg_def:
459            return self.pkg_def["url"](self.package, rev)
460
461        # Default to the github archive URL.
462        return f"https://github.com/{self.package}/archive/{rev}.tar.gz"
463
464    def _cmd_download(self) -> str:
465        """Formulate the command necessary to download and unpack to source."""
466
467        url = self._url()
468        if ".tar." not in url:
469            raise NotImplementedError(
470                f"Unhandled download type for {self.package}: {url}"
471            )
472
473        cmd = f"curl -L {url} | tar -x"
474
475        if url.endswith(".bz2"):
476            cmd += "j"
477        elif url.endswith(".gz"):
478            cmd += "z"
479        else:
480            raise NotImplementedError(
481                f"Unknown tar flags needed for {self.package}: {url}"
482            )
483
484        return cmd
485
486    def _cmd_cd_srcdir(self) -> str:
487        """ Formulate the command necessary to 'cd' into the source dir. """
488        return f"cd {self.package.split('/')[-1]}*"
489
490    def _df_copycmds(self) -> str:
491        """ Formulate the dockerfile snippet necessary to COPY all depends. """
492
493        if "depends" not in self.pkg_def:
494            return ""
495        return Package.df_copycmds_set(self.pkg_def["depends"])
496
497    @staticmethod
498    def df_copycmds_set(pkgs: Iterable[str]) -> str:
499        """Formulate the Dockerfile snippet necessary to COPY a set of
500        packages into a Docker stage.
501        """
502
503        copy_cmds = ""
504
505        # Sort the packages for consistency.
506        for p in sorted(pkgs):
507            tag = Package.packages[p]["__tag"]
508            copy_cmds += f"COPY --from={tag} {prefix} {prefix}\n"
509            # Workaround for upstream docker bug and multiple COPY cmds
510            # https://github.com/moby/moby/issues/37965
511            copy_cmds += "RUN true\n"
512
513        return copy_cmds
514
515    def _df_build(self) -> str:
516        """Formulate the Dockerfile snippet necessary to download, build, and
517        install a package into a Docker stage.
518        """
519
520        # Download and extract source.
521        result = f"RUN {self._cmd_download()} && {self._cmd_cd_srcdir()} && "
522
523        # Handle 'custom_post_dl' commands.
524        custom_post_dl = self.pkg_def.get("custom_post_dl")
525        if custom_post_dl:
526            result += " && ".join(custom_post_dl) + " && "
527
528        # Build and install package based on 'build_type'.
529        build_type = self.pkg_def["build_type"]
530        if build_type == "autoconf":
531            result += self._cmd_build_autoconf()
532        elif build_type == "cmake":
533            result += self._cmd_build_cmake()
534        elif build_type == "custom":
535            result += self._cmd_build_custom()
536        elif build_type == "make":
537            result += self._cmd_build_make()
538        elif build_type == "meson":
539            result += self._cmd_build_meson()
540        else:
541            raise NotImplementedError(
542                f"Unhandled build type for {self.package}: {build_type}"
543            )
544
545        # Handle 'custom_post_install' commands.
546        custom_post_install = self.pkg_def.get("custom_post_install")
547        if custom_post_install:
548            result += " && " + " && ".join(custom_post_install)
549
550        return result
551
552    def _cmd_build_autoconf(self) -> str:
553        options = " ".join(self.pkg_def.get("config_flags", []))
554        env = " ".join(self.pkg_def.get("config_env", []))
555        result = "./bootstrap.sh && "
556        result += f"{env} ./configure {configure_flags} {options} && "
557        result += f"make -j{proc_count} && make install"
558        return result
559
560    def _cmd_build_cmake(self) -> str:
561        options = " ".join(self.pkg_def.get("config_flags", []))
562        env = " ".join(self.pkg_def.get("config_env", []))
563        result = "mkdir builddir && cd builddir && "
564        result += f"{env} cmake {cmake_flags} {options} .. && "
565        result += "cmake --build . --target all && "
566        result += "cmake --build . --target install && "
567        result += "cd .."
568        return result
569
570    def _cmd_build_custom(self) -> str:
571        return " && ".join(self.pkg_def.get("build_steps", []))
572
573    def _cmd_build_make(self) -> str:
574        return f"make -j{proc_count} && make install"
575
576    def _cmd_build_meson(self) -> str:
577        options = " ".join(self.pkg_def.get("config_flags", []))
578        env = " ".join(self.pkg_def.get("config_env", []))
579        result = f"{env} meson builddir {meson_flags} {options} && "
580        result += "ninja -C builddir && ninja -C builddir install"
581        return result
582
583
584class Docker:
585    """Class to assist with Docker interactions.  All methods are static."""
586
587    @staticmethod
588    def timestamp() -> str:
589        """ Generate a timestamp for today using the ISO week. """
590        today = date.today().isocalendar()
591        return f"{today[0]}-W{today[1]:02}"
592
593    @staticmethod
594    def tagname(pkgname: str, dockerfile: str) -> str:
595        """ Generate a tag name for a package using a hash of the Dockerfile. """
596        result = docker_image_name
597        if pkgname:
598            result += "-" + pkgname
599
600        result += ":" + Docker.timestamp()
601        result += "-" + sha256(dockerfile.encode()).hexdigest()[0:16]
602
603        return result
604
605    @staticmethod
606    def build(pkg: str, tag: str, dockerfile: str) -> None:
607        """Build a docker image using the Dockerfile and tagging it with 'tag'."""
608
609        # If we're not forcing builds, check if it already exists and skip.
610        if not force_build:
611            if docker.image.ls(tag, "--format", '"{{.Repository}}:{{.Tag}}"'):
612                print(f"Image {tag} already exists.  Skipping.", file=sys.stderr)
613                return
614
615        # Build it.
616        #   Capture the output of the 'docker build' command and send it to
617        #   stderr (prefixed with the package name).  This allows us to see
618        #   progress but not polute stdout.  Later on we output the final
619        #   docker tag to stdout and we want to keep that pristine.
620        #
621        #   Other unusual flags:
622        #       --no-cache: Bypass the Docker cache if 'force_build'.
623        #       --force-rm: Clean up Docker processes if they fail.
624        docker.build(
625            proxy_args,
626            "--network=host",
627            "--force-rm",
628            "--no-cache=true" if force_build else "--no-cache=false",
629            "-t",
630            tag,
631            "-",
632            _in=dockerfile,
633            _out=(
634                lambda line: print(
635                    pkg + ":", line, end="", file=sys.stderr, flush=True
636                )
637            ),
638        )
639
640
641# Read a bunch of environment variables.
642docker_image_name = os.environ.get("DOCKER_IMAGE_NAME", "openbmc/ubuntu-unit-test")
643force_build = os.environ.get("FORCE_DOCKER_BUILD")
644is_automated_ci_build = os.environ.get("BUILD_URL", False)
645distro = os.environ.get("DISTRO", "ubuntu:impish")
646branch = os.environ.get("BRANCH", "master")
647ubuntu_mirror = os.environ.get("UBUNTU_MIRROR")
648http_proxy = os.environ.get("http_proxy")
649
650gerrit_project = os.environ.get("GERRIT_PROJECT")
651gerrit_rev = os.environ.get("GERRIT_PATCHSET_REVISION")
652
653# Set up some common variables.
654username = os.environ.get("USER", "root")
655homedir = os.environ.get("HOME", "/root")
656gid = os.getgid()
657uid = os.getuid()
658
659# Determine the architecture for Docker.
660arch = uname("-m").strip()
661if arch == "ppc64le":
662    docker_base = "ppc64le/"
663elif arch == "x86_64":
664    docker_base = ""
665else:
666    print(
667        f"Unsupported system architecture({arch}) found for docker image",
668        file=sys.stderr,
669    )
670    sys.exit(1)
671
672# Special flags if setting up a deb mirror.
673mirror = ""
674if "ubuntu" in distro and ubuntu_mirror:
675    mirror = f"""
676RUN echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME) main restricted universe multiverse" > /etc/apt/sources.list && \\
677    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-updates main restricted universe multiverse" >> /etc/apt/sources.list && \\
678    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-security main restricted universe multiverse" >> /etc/apt/sources.list && \\
679    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-proposed main restricted universe multiverse" >> /etc/apt/sources.list && \\
680    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-backports main restricted universe multiverse" >> /etc/apt/sources.list
681"""
682
683# Special flags for proxying.
684proxy_cmd = ""
685proxy_keyserver = ""
686proxy_args = []
687if http_proxy:
688    proxy_cmd = f"""
689RUN echo "[http]" >> {homedir}/.gitconfig && \
690    echo "proxy = {http_proxy}" >> {homedir}/.gitconfig
691"""
692    proxy_keyserver = f"--keyserver-options http-proxy={http_proxy}"
693
694    proxy_args.extend(
695        [
696            "--build-arg",
697            f"http_proxy={http_proxy}",
698            "--build-arg",
699            f"https_proxy={http_proxy}",
700        ]
701    )
702
703# Create base Dockerfile.
704dockerfile_base = f"""
705FROM {docker_base}{distro}
706
707{mirror}
708
709ENV DEBIAN_FRONTEND noninteractive
710
711ENV PYTHONPATH "/usr/local/lib/python3.8/site-packages/"
712
713# Sometimes the ubuntu key expires and we need a way to force an execution
714# of the apt-get commands for the dbgsym-keyring.  When this happens we see
715# an error like: "Release: The following signatures were invalid:"
716# Insert a bogus echo that we can change here when we get this error to force
717# the update.
718RUN echo "ubuntu keyserver rev as of 2021-04-21"
719
720# We need the keys to be imported for dbgsym repos
721# New releases have a package, older ones fall back to manual fetching
722# https://wiki.ubuntu.com/Debug%20Symbol%20Packages
723RUN apt-get update && apt-get dist-upgrade -yy && \
724    ( apt-get install gpgv ubuntu-dbgsym-keyring || \
725        ( apt-get install -yy dirmngr && \
726          apt-key adv --keyserver keyserver.ubuntu.com \
727                      {proxy_keyserver} \
728                      --recv-keys F2EDC64DC5AEE1F6B9C621F0C8CAB6595FDFF622 ) )
729
730# Parse the current repo list into a debug repo list
731RUN sed -n '/^deb /s,^deb [^ ]* ,deb http://ddebs.ubuntu.com ,p' /etc/apt/sources.list >/etc/apt/sources.list.d/debug.list
732
733# Remove non-existent debug repos
734RUN sed -i '/-\(backports\|security\) /d' /etc/apt/sources.list.d/debug.list
735
736RUN cat /etc/apt/sources.list.d/debug.list
737
738RUN apt-get update && apt-get dist-upgrade -yy && apt-get install -yy \
739    gcc-11 \
740    g++-11 \
741    libc6-dbg \
742    libc6-dev \
743    libtool \
744    bison \
745    libdbus-1-dev \
746    flex \
747    cmake \
748    python3 \
749    python3-dev\
750    python3-yaml \
751    python3-mako \
752    python3-pip \
753    python3-setuptools \
754    python3-git \
755    python3-socks \
756    pkg-config \
757    autoconf \
758    autoconf-archive \
759    libsystemd-dev \
760    systemd \
761    libssl-dev \
762    libevdev-dev \
763    libjpeg-dev \
764    libpng-dev \
765    ninja-build \
766    sudo \
767    curl \
768    git \
769    dbus \
770    iputils-ping \
771    clang-13 \
772    clang-format-13 \
773    clang-tidy-13 \
774    clang-tools-13 \
775    shellcheck \
776    npm \
777    iproute2 \
778    libnl-3-dev \
779    libnl-genl-3-dev \
780    libconfig++-dev \
781    libsnmp-dev \
782    valgrind \
783    valgrind-dbg \
784    libpam0g-dev \
785    xxd \
786    libi2c-dev \
787    wget \
788    libldap2-dev \
789    libprotobuf-dev \
790    liburing-dev \
791    liburing1-dbgsym \
792    libperlio-gzip-perl \
793    libjson-perl \
794    protobuf-compiler \
795    libgpiod-dev \
796    device-tree-compiler \
797    cppcheck \
798    libpciaccess-dev \
799    libmimetic-dev \
800    libxml2-utils \
801    libxml-simple-perl \
802    rsync
803
804RUN npm install -g eslint@latest eslint-plugin-json@latest
805
806RUN update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-11 11 \
807  --slave /usr/bin/g++ g++ /usr/bin/g++-11 \
808  --slave /usr/bin/gcov gcov /usr/bin/gcov-11 \
809  --slave /usr/bin/gcov-dump gcov-dump /usr/bin/gcov-dump-11 \
810  --slave /usr/bin/gcov-tool gcov-tool /usr/bin/gcov-tool-11
811
812RUN update-alternatives --install /usr/bin/clang clang /usr/bin/clang-13 1000 \
813  --slave /usr/bin/clang++ clang++ /usr/bin/clang++-13 \
814  --slave /usr/bin/clang-tidy clang-tidy /usr/bin/clang-tidy-13 \
815  --slave /usr/bin/clang-format clang-format /usr/bin/clang-format-13 \
816  --slave /usr/bin/run-clang-tidy run-clang-tidy.py /usr/bin/run-clang-tidy-13 \
817  --slave /usr/bin/scan-build scan-build /usr/bin/scan-build-13
818
819"""
820
821if is_automated_ci_build:
822    dockerfile_base += f"""
823# Run an arbitrary command to polute the docker cache regularly force us
824# to re-run `apt-get update` daily.
825RUN echo {Docker.timestamp()}
826RUN apt-get update && apt-get dist-upgrade -yy
827
828"""
829
830dockerfile_base += f"""
831RUN pip3 install inflection
832RUN pip3 install pycodestyle
833RUN pip3 install jsonschema
834RUN pip3 install meson==0.58.1
835RUN pip3 install protobuf
836RUN pip3 install codespell
837"""
838
839# Build the base and stage docker images.
840docker_base_img_name = Docker.tagname("base", dockerfile_base)
841Docker.build("base", docker_base_img_name, dockerfile_base)
842Package.generate_all()
843
844# Create the final Dockerfile.
845dockerfile = f"""
846# Build the final output image
847FROM {docker_base_img_name}
848{Package.df_all_copycmds()}
849
850# Some of our infrastructure still relies on the presence of this file
851# even though it is no longer needed to rebuild the docker environment
852# NOTE: The file is sorted to ensure the ordering is stable.
853RUN echo '{Package.depcache()}' > /tmp/depcache
854
855# Final configuration for the workspace
856RUN grep -q {gid} /etc/group || groupadd -g {gid} {username}
857RUN mkdir -p "{os.path.dirname(homedir)}"
858RUN grep -q {uid} /etc/passwd || useradd -d {homedir} -m -u {uid} -g {gid} {username}
859RUN sed -i '1iDefaults umask=000' /etc/sudoers
860RUN echo "{username} ALL=(ALL) NOPASSWD: ALL" >>/etc/sudoers
861
862# Ensure user has ability to write to /usr/local for different tool
863# and data installs
864RUN chown -R {username}:{username} /usr/local/share
865
866{proxy_cmd}
867
868RUN /bin/bash
869"""
870
871# Do the final docker build
872docker_final_img_name = Docker.tagname(None, dockerfile)
873Docker.build("final", docker_final_img_name, dockerfile)
874
875# Print the tag of the final image.
876print(docker_final_img_name)
877