1#!/usr/bin/env python3
2#
3# Build the required docker image to run package unit tests
4#
5# Script Variables:
6#   DOCKER_IMG_NAME:  <optional, the name of the docker image to generate>
7#                     default is openbmc/ubuntu-unit-test
8#   DISTRO:           <optional, the distro to build a docker image against>
9#   FORCE_DOCKER_BUILD: <optional, a non-zero value with force all Docker
10#                     images to be rebuilt rather than reusing caches.>
11#   BUILD_URL:        <optional, used to detect running under CI context
12#                     (ex. Jenkins)>
13#   BRANCH:           <optional, branch to build from each of the openbmc/
14#                     repositories>
15#                     default is master, which will be used if input branch not
16#                     provided or not found
17#   UBUNTU_MIRROR:    <optional, the URL of a mirror of Ubuntu to override the
18#                     default ones in /etc/apt/sources.list>
19#                     default is empty, and no mirror is used.
20#   http_proxy        The HTTP address of the proxy server to connect to.
21#                     Default: "", proxy is not setup if this is not set
22
23import os
24import sys
25import threading
26from datetime import date
27from hashlib import sha256
28from sh import docker, git, nproc, uname  # type: ignore
29from typing import Any, Callable, Dict, Iterable, Optional
30
31try:
32    # Python before 3.8 doesn't have TypedDict, so reroute to standard 'dict'.
33    from typing import TypedDict
34except:
35
36    class TypedDict(dict):  # type: ignore
37        # We need to do this to eat the 'total' argument.
38        def __init_subclass__(cls, **kwargs):
39            super().__init_subclass__()
40
41
42# Declare some variables used in package definitions.
43prefix = "/usr/local"
44proc_count = nproc().strip()
45
46
47class PackageDef(TypedDict, total=False):
48    """ Package Definition for packages dictionary. """
49
50    # rev [optional]: Revision of package to use.
51    rev: str
52    # url [optional]: lambda function to create URL: (package, rev) -> url.
53    url: Callable[[str, str], str]
54    # depends [optional]: List of package dependencies.
55    depends: Iterable[str]
56    # build_type [required]: Build type used for package.
57    #   Currently supported: autoconf, cmake, custom, make, meson
58    build_type: str
59    # build_steps [optional]: Steps to run for 'custom' build_type.
60    build_steps: Iterable[str]
61    # config_flags [optional]: List of options to pass configuration tool.
62    config_flags: Iterable[str]
63    # config_env [optional]: List of environment variables to set for config.
64    config_env: Iterable[str]
65    # custom_post_dl [optional]: List of steps to run after download, but
66    #   before config / build / install.
67    custom_post_dl: Iterable[str]
68    # custom_post_install [optional]: List of steps to run after install.
69    custom_post_install: Iterable[str]
70
71    # __tag [private]: Generated Docker tag name for package stage.
72    __tag: str
73    # __package [private]: Package object associated with this package.
74    __package: Any  # Type is Package, but not defined yet.
75
76
77# Packages to include in image.
78packages = {
79    "boost": PackageDef(
80        rev="1.79.0",
81        url=(
82            lambda pkg, rev: f"https://downloads.yoctoproject.org/mirror/sources/{pkg}_{rev.replace('.', '_')}.tar.bz2"
83        ),
84        build_type="custom",
85        build_steps=[
86            f"./bootstrap.sh --prefix={prefix} --with-libraries=context,coroutine",
87            "./b2",
88            f"./b2 install --prefix={prefix}",
89        ],
90    ),
91    "USCiLab/cereal": PackageDef(
92        rev="3e4d1b84cab4891368d2179a61a7ba06a5693e7f",
93        build_type="custom",
94        build_steps=[f"cp -a include/cereal/ {prefix}/include/"],
95    ),
96    "catchorg/Catch2": PackageDef(
97        rev="v2.13.6",
98        build_type="cmake",
99        config_flags=["-DBUILD_TESTING=OFF", "-DCATCH_INSTALL_DOCS=OFF"],
100    ),
101    "danmar/cppcheck": PackageDef(
102        rev="27578e9c4c1f90c62b6938867735a054082e178e",
103        build_type="cmake",
104    ),
105    "CLIUtils/CLI11": PackageDef(
106        rev="v1.9.1",
107        build_type="cmake",
108        config_flags=[
109            "-DBUILD_TESTING=OFF",
110            "-DCLI11_BUILD_DOCS=OFF",
111            "-DCLI11_BUILD_EXAMPLES=OFF",
112        ],
113    ),
114    "fmtlib/fmt": PackageDef(
115        rev="8.1.1",
116        build_type="cmake",
117        config_flags=[
118            "-DFMT_DOC=OFF",
119            "-DFMT_TEST=OFF",
120        ],
121    ),
122    "Naios/function2": PackageDef(
123        rev="4.1.0",
124        build_type="custom",
125        build_steps=[
126            f"mkdir {prefix}/include/function2",
127            f"cp include/function2/function2.hpp {prefix}/include/function2/",
128        ],
129    ),
130    # Release 2021-06-12
131    "google/googletest": PackageDef(
132        rev="9e712372214d75bb30ec2847a44bf124d48096f3",
133        build_type="cmake",
134        config_env=["CXXFLAGS=-std=c++20"],
135        config_flags=["-DTHREADS_PREFER_PTHREAD_FLAG=ON"],
136    ),
137    # Release 2020-08-06
138    "nlohmann/json": PackageDef(
139        rev="v3.10.4",
140        build_type="cmake",
141        config_flags=["-DJSON_BuildTests=OFF"],
142        custom_post_install=[
143            f"ln -s {prefix}/include/nlohmann/json.hpp {prefix}/include/json.hpp",
144        ],
145    ),
146    # Snapshot from 2019-05-24
147    "linux-test-project/lcov": PackageDef(
148        rev="v1.15",
149        build_type="make",
150    ),
151    # dev-5.8 2021-01-11
152    "openbmc/linux": PackageDef(
153        rev="3cc95ae40716e56f81b69615781f54c78079042d",
154        build_type="custom",
155        build_steps=[
156            f"make -j{proc_count} defconfig",
157            f"make INSTALL_HDR_PATH={prefix} headers_install",
158        ],
159    ),
160    # Snapshot from 2020-06-13
161    "LibVNC/libvncserver": PackageDef(
162        rev="LibVNCServer-0.9.13",
163        build_type="cmake",
164    ),
165    # version from meta-openembedded/meta-oe/recipes-support/libtinyxml2/libtinyxml2_8.0.0.bb
166    "leethomason/tinyxml2": PackageDef(
167        rev="8.0.0",
168        build_type="cmake",
169    ),
170    # version from /meta-openembedded/meta-oe/recipes-devtools/boost-url/boost-url_git.bb
171    "CPPAlliance/url": PackageDef(
172        rev="d740a92d38e3a8f4d5b2153f53b82f1c98e312ab",
173        build_type="custom",
174        build_steps=[f"cp -a include/** {prefix}/include/"],
175    ),
176    # version from meta-openembedded/meta-oe/dynamic-layers/networking-layer/recipes-devools/valijson/valijson_0.6.bb
177    "tristanpenman/valijson": PackageDef(
178        rev="v0.6",
179        build_type="cmake",
180        config_flags=[
181            "-Dvalijson_BUILD_TESTS=0",
182            "-Dvalijson_INSTALL_HEADERS=1",
183        ],
184    ),
185    # version from meta-openembedded/meta-oe/recipes-devtools/unifex/unifex_git.bb
186    "facebookexperimental/libunifex": PackageDef(
187        rev="9df21c58d34ce8a1cd3b15c3a7347495e29417a0",
188        build_type="cmake",
189        config_flags=[
190            "-DBUILD_SHARED_LIBS=ON",
191            "-DBUILD_TESTING=OFF",
192            "-DCMAKE_CXX_STANDARD=20",
193            "-DUNIFEX_BUILD_EXAMPLES=OFF",
194        ],
195    ),
196    "open-power/pdbg": PackageDef(build_type="autoconf"),
197    "openbmc/gpioplus": PackageDef(
198        depends=["openbmc/stdplus"],
199        build_type="meson",
200        config_flags=[
201            "-Dexamples=false",
202            "-Dtests=disabled",
203        ],
204    ),
205    "openbmc/phosphor-dbus-interfaces": PackageDef(
206        depends=["openbmc/sdbusplus"],
207        build_type="meson",
208        config_flags=["-Dgenerate_md=false"],
209    ),
210    "openbmc/phosphor-logging": PackageDef(
211        depends=[
212            "USCiLab/cereal",
213            "openbmc/phosphor-dbus-interfaces",
214            "openbmc/sdbusplus",
215            "openbmc/sdeventplus",
216        ],
217        build_type="meson",
218        config_flags=[
219            f"-Dyamldir={prefix}/share/phosphor-dbus-yaml/yaml",
220        ],
221    ),
222    "openbmc/phosphor-objmgr": PackageDef(
223        depends=[
224            "boost",
225            "leethomason/tinyxml2",
226            "openbmc/phosphor-logging",
227            "openbmc/sdbusplus",
228        ],
229        build_type="meson",
230        config_flags=[
231            "-Dtests=disabled",
232        ],
233    ),
234    "openbmc/pldm": PackageDef(
235        depends=[
236            "CLIUtils/CLI11",
237            "boost",
238            "nlohmann/json",
239            "openbmc/phosphor-dbus-interfaces",
240            "openbmc/phosphor-logging",
241            "openbmc/sdbusplus",
242            "openbmc/sdeventplus",
243        ],
244        build_type="meson",
245        config_flags=[
246            "-Dlibpldm-only=enabled",
247            "-Doem-ibm=enabled",
248            "-Dtests=disabled",
249        ],
250    ),
251    "openbmc/sdbusplus": PackageDef(
252        depends=[
253            "facebookexperimental/libunifex",
254        ],
255        build_type="meson",
256        custom_post_dl=[
257            "cd tools",
258            f"./setup.py install --root=/ --prefix={prefix}",
259            "cd ..",
260        ],
261        config_flags=[
262            "-Dexamples=disabled",
263            "-Dtests=disabled",
264        ],
265    ),
266    "openbmc/sdeventplus": PackageDef(
267        depends=["Naios/function2", "openbmc/stdplus"],
268        build_type="meson",
269        config_flags=[
270            "-Dexamples=false",
271            "-Dtests=disabled",
272        ],
273    ),
274    "openbmc/stdplus": PackageDef(
275        depends=["fmtlib/fmt", "google/googletest"],
276        build_type="meson",
277        config_flags=[
278            "-Dexamples=false",
279            "-Dtests=disabled",
280        ],
281    ),
282}  # type: Dict[str, PackageDef]
283
284# Define common flags used for builds
285configure_flags = " ".join(
286    [
287        f"--prefix={prefix}",
288    ]
289)
290cmake_flags = " ".join(
291    [
292        "-DBUILD_SHARED_LIBS=ON",
293        "-DCMAKE_BUILD_TYPE=RelWithDebInfo",
294        f"-DCMAKE_INSTALL_PREFIX:PATH={prefix}",
295        "-GNinja",
296        "-DCMAKE_MAKE_PROGRAM=ninja",
297    ]
298)
299meson_flags = " ".join(
300    [
301        "--wrap-mode=nodownload",
302        f"-Dprefix={prefix}",
303    ]
304)
305
306
307class Package(threading.Thread):
308    """Class used to build the Docker stages for each package.
309
310    Generally, this class should not be instantiated directly but through
311    Package.generate_all().
312    """
313
314    # Copy the packages dictionary.
315    packages = packages.copy()
316
317    # Lock used for thread-safety.
318    lock = threading.Lock()
319
320    def __init__(self, pkg: str):
321        """ pkg - The name of this package (ex. foo/bar ) """
322        super(Package, self).__init__()
323
324        self.package = pkg
325        self.exception = None  # type: Optional[Exception]
326
327        # Reference to this package's
328        self.pkg_def = Package.packages[pkg]
329        self.pkg_def["__package"] = self
330
331    def run(self) -> None:
332        """ Thread 'run' function.  Builds the Docker stage. """
333
334        # In case this package has no rev, fetch it from Github.
335        self._update_rev()
336
337        # Find all the Package objects that this package depends on.
338        #   This section is locked because we are looking into another
339        #   package's PackageDef dict, which could be being modified.
340        Package.lock.acquire()
341        deps: Iterable[Package] = [
342            Package.packages[deppkg]["__package"]
343            for deppkg in self.pkg_def.get("depends", [])
344        ]
345        Package.lock.release()
346
347        # Wait until all the depends finish building.  We need them complete
348        # for the "COPY" commands.
349        for deppkg in deps:
350            deppkg.join()
351
352        # Generate this package's Dockerfile.
353        dockerfile = f"""
354FROM {docker_base_img_name}
355{self._df_copycmds()}
356{self._df_build()}
357"""
358
359        # Generate the resulting tag name and save it to the PackageDef.
360        #   This section is locked because we are modifying the PackageDef,
361        #   which can be accessed by other threads.
362        Package.lock.acquire()
363        tag = Docker.tagname(self._stagename(), dockerfile)
364        self.pkg_def["__tag"] = tag
365        Package.lock.release()
366
367        # Do the build / save any exceptions.
368        try:
369            Docker.build(self.package, tag, dockerfile)
370        except Exception as e:
371            self.exception = e
372
373    @classmethod
374    def generate_all(cls) -> None:
375        """Ensure a Docker stage is created for all defined packages.
376
377        These are done in parallel but with appropriate blocking per
378        package 'depends' specifications.
379        """
380
381        # Create a Package for each defined package.
382        pkg_threads = [Package(p) for p in cls.packages.keys()]
383
384        # Start building them all.
385        #   This section is locked because threads depend on each other,
386        #   based on the packages, and they cannot 'join' on a thread
387        #   which is not yet started.  Adding a lock here allows all the
388        #   threads to start before they 'join' their dependencies.
389        Package.lock.acquire()
390        for t in pkg_threads:
391            t.start()
392        Package.lock.release()
393
394        # Wait for completion.
395        for t in pkg_threads:
396            t.join()
397            # Check if the thread saved off its own exception.
398            if t.exception:
399                print(f"Package {t.package} failed!", file=sys.stderr)
400                raise t.exception
401
402    @staticmethod
403    def df_all_copycmds() -> str:
404        """Formulate the Dockerfile snippet necessary to copy all packages
405        into the final image.
406        """
407        return Package.df_copycmds_set(Package.packages.keys())
408
409    @classmethod
410    def depcache(cls) -> str:
411        """Create the contents of the '/tmp/depcache'.
412        This file is a comma-separated list of "<pkg>:<rev>".
413        """
414
415        # This needs to be sorted for consistency.
416        depcache = ""
417        for pkg in sorted(cls.packages.keys()):
418            depcache += "%s:%s," % (pkg, cls.packages[pkg]["rev"])
419        return depcache
420
421    def _update_rev(self) -> None:
422        """ Look up the HEAD for missing a static rev. """
423
424        if "rev" in self.pkg_def:
425            return
426
427        # Check if Jenkins/Gerrit gave us a revision and use it.
428        if gerrit_project == self.package and gerrit_rev:
429            print(
430                f"Found Gerrit revision for {self.package}: {gerrit_rev}",
431                file=sys.stderr,
432            )
433            self.pkg_def["rev"] = gerrit_rev
434            return
435
436        # Ask Github for all the branches.
437        lookup = git("ls-remote", "--heads", f"https://github.com/{self.package}")
438
439        # Find the branch matching {branch} (or fallback to master).
440        #   This section is locked because we are modifying the PackageDef.
441        Package.lock.acquire()
442        for line in lookup.split("\n"):
443            if f"refs/heads/{branch}" in line:
444                self.pkg_def["rev"] = line.split()[0]
445            elif f"refs/heads/master" in line and "rev" not in self.pkg_def:
446                self.pkg_def["rev"] = line.split()[0]
447        Package.lock.release()
448
449    def _stagename(self) -> str:
450        """ Create a name for the Docker stage associated with this pkg. """
451        return self.package.replace("/", "-").lower()
452
453    def _url(self) -> str:
454        """ Get the URL for this package. """
455        rev = self.pkg_def["rev"]
456
457        # If the lambda exists, call it.
458        if "url" in self.pkg_def:
459            return self.pkg_def["url"](self.package, rev)
460
461        # Default to the github archive URL.
462        return f"https://github.com/{self.package}/archive/{rev}.tar.gz"
463
464    def _cmd_download(self) -> str:
465        """Formulate the command necessary to download and unpack to source."""
466
467        url = self._url()
468        if ".tar." not in url:
469            raise NotImplementedError(
470                f"Unhandled download type for {self.package}: {url}"
471            )
472
473        cmd = f"curl -L {url} | tar -x"
474
475        if url.endswith(".bz2"):
476            cmd += "j"
477        elif url.endswith(".gz"):
478            cmd += "z"
479        else:
480            raise NotImplementedError(
481                f"Unknown tar flags needed for {self.package}: {url}"
482            )
483
484        return cmd
485
486    def _cmd_cd_srcdir(self) -> str:
487        """ Formulate the command necessary to 'cd' into the source dir. """
488        return f"cd {self.package.split('/')[-1]}*"
489
490    def _df_copycmds(self) -> str:
491        """ Formulate the dockerfile snippet necessary to COPY all depends. """
492
493        if "depends" not in self.pkg_def:
494            return ""
495        return Package.df_copycmds_set(self.pkg_def["depends"])
496
497    @staticmethod
498    def df_copycmds_set(pkgs: Iterable[str]) -> str:
499        """Formulate the Dockerfile snippet necessary to COPY a set of
500        packages into a Docker stage.
501        """
502
503        copy_cmds = ""
504
505        # Sort the packages for consistency.
506        for p in sorted(pkgs):
507            tag = Package.packages[p]["__tag"]
508            copy_cmds += f"COPY --from={tag} {prefix} {prefix}\n"
509            # Workaround for upstream docker bug and multiple COPY cmds
510            # https://github.com/moby/moby/issues/37965
511            copy_cmds += "RUN true\n"
512
513        return copy_cmds
514
515    def _df_build(self) -> str:
516        """Formulate the Dockerfile snippet necessary to download, build, and
517        install a package into a Docker stage.
518        """
519
520        # Download and extract source.
521        result = f"RUN {self._cmd_download()} && {self._cmd_cd_srcdir()} && "
522
523        # Handle 'custom_post_dl' commands.
524        custom_post_dl = self.pkg_def.get("custom_post_dl")
525        if custom_post_dl:
526            result += " && ".join(custom_post_dl) + " && "
527
528        # Build and install package based on 'build_type'.
529        build_type = self.pkg_def["build_type"]
530        if build_type == "autoconf":
531            result += self._cmd_build_autoconf()
532        elif build_type == "cmake":
533            result += self._cmd_build_cmake()
534        elif build_type == "custom":
535            result += self._cmd_build_custom()
536        elif build_type == "make":
537            result += self._cmd_build_make()
538        elif build_type == "meson":
539            result += self._cmd_build_meson()
540        else:
541            raise NotImplementedError(
542                f"Unhandled build type for {self.package}: {build_type}"
543            )
544
545        # Handle 'custom_post_install' commands.
546        custom_post_install = self.pkg_def.get("custom_post_install")
547        if custom_post_install:
548            result += " && " + " && ".join(custom_post_install)
549
550        return result
551
552    def _cmd_build_autoconf(self) -> str:
553        options = " ".join(self.pkg_def.get("config_flags", []))
554        env = " ".join(self.pkg_def.get("config_env", []))
555        result = "./bootstrap.sh && "
556        result += f"{env} ./configure {configure_flags} {options} && "
557        result += f"make -j{proc_count} && make install"
558        return result
559
560    def _cmd_build_cmake(self) -> str:
561        options = " ".join(self.pkg_def.get("config_flags", []))
562        env = " ".join(self.pkg_def.get("config_env", []))
563        result = "mkdir builddir && cd builddir && "
564        result += f"{env} cmake {cmake_flags} {options} .. && "
565        result += "cmake --build . --target all && "
566        result += "cmake --build . --target install && "
567        result += "cd .."
568        return result
569
570    def _cmd_build_custom(self) -> str:
571        return " && ".join(self.pkg_def.get("build_steps", []))
572
573    def _cmd_build_make(self) -> str:
574        return f"make -j{proc_count} && make install"
575
576    def _cmd_build_meson(self) -> str:
577        options = " ".join(self.pkg_def.get("config_flags", []))
578        env = " ".join(self.pkg_def.get("config_env", []))
579        result = f"{env} meson builddir {meson_flags} {options} && "
580        result += "ninja -C builddir && ninja -C builddir install"
581        return result
582
583
584class Docker:
585    """Class to assist with Docker interactions.  All methods are static."""
586
587    @staticmethod
588    def timestamp() -> str:
589        """ Generate a timestamp for today using the ISO week. """
590        today = date.today().isocalendar()
591        return f"{today[0]}-W{today[1]:02}"
592
593    @staticmethod
594    def tagname(pkgname: str, dockerfile: str) -> str:
595        """ Generate a tag name for a package using a hash of the Dockerfile. """
596        result = docker_image_name
597        if pkgname:
598            result += "-" + pkgname
599
600        result += ":" + Docker.timestamp()
601        result += "-" + sha256(dockerfile.encode()).hexdigest()[0:16]
602
603        return result
604
605    @staticmethod
606    def build(pkg: str, tag: str, dockerfile: str) -> None:
607        """Build a docker image using the Dockerfile and tagging it with 'tag'."""
608
609        # If we're not forcing builds, check if it already exists and skip.
610        if not force_build:
611            if docker.image.ls(tag, "--format", '"{{.Repository}}:{{.Tag}}"'):
612                print(f"Image {tag} already exists.  Skipping.", file=sys.stderr)
613                return
614
615        # Build it.
616        #   Capture the output of the 'docker build' command and send it to
617        #   stderr (prefixed with the package name).  This allows us to see
618        #   progress but not polute stdout.  Later on we output the final
619        #   docker tag to stdout and we want to keep that pristine.
620        #
621        #   Other unusual flags:
622        #       --no-cache: Bypass the Docker cache if 'force_build'.
623        #       --force-rm: Clean up Docker processes if they fail.
624        docker.build(
625            proxy_args,
626            "--network=host",
627            "--force-rm",
628            "--no-cache=true" if force_build else "--no-cache=false",
629            "-t",
630            tag,
631            "-",
632            _in=dockerfile,
633            _out=(
634                lambda line: print(
635                    pkg + ":", line, end="", file=sys.stderr, flush=True
636                )
637            ),
638        )
639
640
641# Read a bunch of environment variables.
642docker_image_name = os.environ.get("DOCKER_IMAGE_NAME", "openbmc/ubuntu-unit-test")
643force_build = os.environ.get("FORCE_DOCKER_BUILD")
644is_automated_ci_build = os.environ.get("BUILD_URL", False)
645distro = os.environ.get("DISTRO", "ubuntu:jammy")
646branch = os.environ.get("BRANCH", "master")
647ubuntu_mirror = os.environ.get("UBUNTU_MIRROR")
648http_proxy = os.environ.get("http_proxy")
649
650gerrit_project = os.environ.get("GERRIT_PROJECT")
651gerrit_rev = os.environ.get("GERRIT_PATCHSET_REVISION")
652
653# Set up some common variables.
654username = os.environ.get("USER", "root")
655homedir = os.environ.get("HOME", "/root")
656gid = os.getgid()
657uid = os.getuid()
658
659# Use well-known constants if user is root
660if username == "root":
661    homedir = "/root"
662    gid = 0
663    uid = 0
664
665# Determine the architecture for Docker.
666arch = uname("-m").strip()
667if arch == "ppc64le":
668    docker_base = "ppc64le/"
669elif arch == "x86_64":
670    docker_base = ""
671elif arch == "aarch64":
672    docker_base = "arm64v8/"
673else:
674    print(
675        f"Unsupported system architecture({arch}) found for docker image",
676        file=sys.stderr,
677    )
678    sys.exit(1)
679
680# Special flags if setting up a deb mirror.
681mirror = ""
682if "ubuntu" in distro and ubuntu_mirror:
683    mirror = f"""
684RUN echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME) main restricted universe multiverse" > /etc/apt/sources.list && \\
685    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-updates main restricted universe multiverse" >> /etc/apt/sources.list && \\
686    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-security main restricted universe multiverse" >> /etc/apt/sources.list && \\
687    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-proposed main restricted universe multiverse" >> /etc/apt/sources.list && \\
688    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-backports main restricted universe multiverse" >> /etc/apt/sources.list
689"""
690
691# Special flags for proxying.
692proxy_cmd = ""
693proxy_keyserver = ""
694proxy_args = []
695if http_proxy:
696    proxy_cmd = f"""
697RUN echo "[http]" >> {homedir}/.gitconfig && \
698    echo "proxy = {http_proxy}" >> {homedir}/.gitconfig
699"""
700    proxy_keyserver = f"--keyserver-options http-proxy={http_proxy}"
701
702    proxy_args.extend(
703        [
704            "--build-arg",
705            f"http_proxy={http_proxy}",
706            "--build-arg",
707            f"https_proxy={http_proxy}",
708        ]
709    )
710
711# Create base Dockerfile.
712dockerfile_base = f"""
713FROM {docker_base}{distro}
714
715{mirror}
716
717ENV DEBIAN_FRONTEND noninteractive
718
719ENV PYTHONPATH "/usr/local/lib/python3.10/site-packages/"
720
721# Sometimes the ubuntu key expires and we need a way to force an execution
722# of the apt-get commands for the dbgsym-keyring.  When this happens we see
723# an error like: "Release: The following signatures were invalid:"
724# Insert a bogus echo that we can change here when we get this error to force
725# the update.
726RUN echo "ubuntu keyserver rev as of 2021-04-21"
727
728# We need the keys to be imported for dbgsym repos
729# New releases have a package, older ones fall back to manual fetching
730# https://wiki.ubuntu.com/Debug%20Symbol%20Packages
731RUN apt-get update && apt-get dist-upgrade -yy && \
732    ( apt-get install gpgv ubuntu-dbgsym-keyring || \
733        ( apt-get install -yy dirmngr && \
734          apt-key adv --keyserver keyserver.ubuntu.com \
735                      {proxy_keyserver} \
736                      --recv-keys F2EDC64DC5AEE1F6B9C621F0C8CAB6595FDFF622 ) )
737
738# Parse the current repo list into a debug repo list
739RUN sed -n '/^deb /s,^deb [^ ]* ,deb http://ddebs.ubuntu.com ,p' /etc/apt/sources.list >/etc/apt/sources.list.d/debug.list
740
741# Remove non-existent debug repos
742RUN sed -i '/-\(backports\|security\) /d' /etc/apt/sources.list.d/debug.list
743
744RUN cat /etc/apt/sources.list.d/debug.list
745
746RUN apt-get update && apt-get dist-upgrade -yy && apt-get install -yy \
747    gcc-11 \
748    g++-11 \
749    libc6-dbg \
750    libc6-dev \
751    libtool \
752    bison \
753    libdbus-1-dev \
754    flex \
755    cmake \
756    python3 \
757    python3-dev\
758    python3-yaml \
759    python3-mako \
760    python3-pip \
761    python3-setuptools \
762    python3-git \
763    python3-socks \
764    pkg-config \
765    autoconf \
766    autoconf-archive \
767    libsystemd-dev \
768    systemd \
769    libssl-dev \
770    libevdev-dev \
771    libjpeg-dev \
772    libpng-dev \
773    ninja-build \
774    sudo \
775    curl \
776    git \
777    dbus \
778    iputils-ping \
779    clang-14 \
780    clang-format-14 \
781    clang-tidy-14 \
782    clang-tools-14 \
783    shellcheck \
784    npm \
785    iproute2 \
786    libnl-3-dev \
787    libnl-genl-3-dev \
788    libconfig++-dev \
789    libsnmp-dev \
790    valgrind \
791    valgrind-dbg \
792    libpam0g-dev \
793    xxd \
794    libi2c-dev \
795    wget \
796    libldap2-dev \
797    libprotobuf-dev \
798    liburing-dev \
799    liburing2-dbgsym \
800    libperlio-gzip-perl \
801    libjson-perl \
802    protobuf-compiler \
803    libgpiod-dev \
804    device-tree-compiler \
805    libpciaccess-dev \
806    libmimetic-dev \
807    libxml2-utils \
808    libxml-simple-perl \
809    rsync \
810    libcryptsetup-dev
811
812# Apply autoconf-archive-v2022.02.11 file ax_cxx_compile_stdcxx for C++20.
813RUN curl "http://git.savannah.gnu.org/gitweb/?p=autoconf-archive.git;a=blob_plain;f=m4/ax_cxx_compile_stdcxx.m4;hb=3311b6bdeff883c6a13952594a9dcb60bce6ba80" \
814  > /usr/share/aclocal/ax_cxx_compile_stdcxx.m4
815
816RUN npm install -g eslint@latest eslint-plugin-json@latest
817
818RUN update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-11 11 \
819  --slave /usr/bin/g++ g++ /usr/bin/g++-11 \
820  --slave /usr/bin/gcov gcov /usr/bin/gcov-11 \
821  --slave /usr/bin/gcov-dump gcov-dump /usr/bin/gcov-dump-11 \
822  --slave /usr/bin/gcov-tool gcov-tool /usr/bin/gcov-tool-11
823
824RUN update-alternatives --install /usr/bin/clang clang /usr/bin/clang-14 1000 \
825  --slave /usr/bin/clang++ clang++ /usr/bin/clang++-14 \
826  --slave /usr/bin/clang-tidy clang-tidy /usr/bin/clang-tidy-14 \
827  --slave /usr/bin/clang-format clang-format /usr/bin/clang-format-14 \
828  --slave /usr/bin/run-clang-tidy run-clang-tidy.py /usr/bin/run-clang-tidy-14 \
829  --slave /usr/bin/scan-build scan-build /usr/bin/scan-build-14
830
831"""
832
833if is_automated_ci_build:
834    dockerfile_base += f"""
835# Run an arbitrary command to polute the docker cache regularly force us
836# to re-run `apt-get update` daily.
837RUN echo {Docker.timestamp()}
838RUN apt-get update && apt-get dist-upgrade -yy
839
840"""
841
842dockerfile_base += f"""
843RUN pip3 install inflection
844RUN pip3 install pycodestyle
845RUN pip3 install jsonschema
846RUN pip3 install meson==0.61.3
847RUN pip3 install protobuf
848RUN pip3 install codespell
849RUN pip3 install requests
850"""
851
852# Build the base and stage docker images.
853docker_base_img_name = Docker.tagname("base", dockerfile_base)
854Docker.build("base", docker_base_img_name, dockerfile_base)
855Package.generate_all()
856
857# Create the final Dockerfile.
858dockerfile = f"""
859# Build the final output image
860FROM {docker_base_img_name}
861{Package.df_all_copycmds()}
862
863# Some of our infrastructure still relies on the presence of this file
864# even though it is no longer needed to rebuild the docker environment
865# NOTE: The file is sorted to ensure the ordering is stable.
866RUN echo '{Package.depcache()}' > /tmp/depcache
867
868# Final configuration for the workspace
869RUN grep -q {gid} /etc/group || groupadd -f -g {gid} {username}
870RUN mkdir -p "{os.path.dirname(homedir)}"
871RUN grep -q {uid} /etc/passwd || useradd -d {homedir} -m -u {uid} -g {gid} {username}
872RUN sed -i '1iDefaults umask=000' /etc/sudoers
873RUN echo "{username} ALL=(ALL) NOPASSWD: ALL" >>/etc/sudoers
874
875# Ensure user has ability to write to /usr/local for different tool
876# and data installs
877RUN chown -R {username}:{username} /usr/local/share
878
879{proxy_cmd}
880
881RUN /bin/bash
882"""
883
884# Do the final docker build
885docker_final_img_name = Docker.tagname(None, dockerfile)
886Docker.build("final", docker_final_img_name, dockerfile)
887
888# Print the tag of the final image.
889print(docker_final_img_name)
890