1#!/usr/bin/env python3
2#
3# Build the required docker image to run package unit tests
4#
5# Script Variables:
6#   DOCKER_IMG_NAME:  <optional, the name of the docker image to generate>
7#                     default is openbmc/ubuntu-unit-test
8#   DISTRO:           <optional, the distro to build a docker image against>
9#                     default is ubuntu:focal
10#   FORCE_DOCKER_BUILD: <optional, a non-zero value with force all Docker
11#                     images to be rebuilt rather than reusing caches.>
12#   BUILD_URL:        <optional, used to detect running under CI context
13#                     (ex. Jenkins)>
14#   BRANCH:           <optional, branch to build from each of the openbmc/
15#                     repositories>
16#                     default is master, which will be used if input branch not
17#                     provided or not found
18#   UBUNTU_MIRROR:    <optional, the URL of a mirror of Ubuntu to override the
19#                     default ones in /etc/apt/sources.list>
20#                     default is empty, and no mirror is used.
21#   http_proxy        The HTTP address of the proxy server to connect to.
22#                     Default: "", proxy is not setup if this is not set
23
24import os
25import sys
26import threading
27from datetime import date
28from hashlib import sha256
29from sh import docker, git, nproc, uname  # type: ignore
30from typing import Any, Callable, Dict, Iterable, Optional
31
32try:
33    # Python before 3.8 doesn't have TypedDict, so reroute to standard 'dict'.
34    from typing import TypedDict
35except:
36
37    class TypedDict(dict):  # type: ignore
38        # We need to do this to eat the 'total' argument.
39        def __init_subclass__(cls, **kwargs):
40            super().__init_subclass__()
41
42
43# Declare some variables used in package definitions.
44prefix = "/usr/local"
45proc_count = nproc().strip()
46
47
48class PackageDef(TypedDict, total=False):
49    """ Package Definition for packages dictionary. """
50
51    # rev [optional]: Revision of package to use.
52    rev: str
53    # url [optional]: lambda function to create URL: (package, rev) -> url.
54    url: Callable[[str, str], str]
55    # depends [optional]: List of package dependencies.
56    depends: Iterable[str]
57    # build_type [required]: Build type used for package.
58    #   Currently supported: autoconf, cmake, custom, make, meson
59    build_type: str
60    # build_steps [optional]: Steps to run for 'custom' build_type.
61    build_steps: Iterable[str]
62    # config_flags [optional]: List of options to pass configuration tool.
63    config_flags: Iterable[str]
64    # config_env [optional]: List of environment variables to set for config.
65    config_env: Iterable[str]
66    # custom_post_dl [optional]: List of steps to run after download, but
67    #   before config / build / install.
68    custom_post_dl: Iterable[str]
69    # custom_post_install [optional]: List of steps to run after install.
70    custom_post_install: Iterable[str]
71
72    # __tag [private]: Generated Docker tag name for package stage.
73    __tag: str
74    # __package [private]: Package object associated with this package.
75    __package: Any  # Type is Package, but not defined yet.
76
77
78# Packages to include in image.
79packages = {
80    "boost": PackageDef(
81        rev="1.75.0",
82        url=(
83            lambda pkg, rev: f"https://dl.bintray.com/boostorg/release/{rev}/source/{pkg}_{rev.replace('.', '_')}.tar.bz2"
84        ),
85        build_type="custom",
86        build_steps=[
87            f"./bootstrap.sh --prefix={prefix} --with-libraries=context,coroutine",
88            "./b2",
89            f"./b2 install --prefix={prefix}",
90        ],
91    ),
92    "USCiLab/cereal": PackageDef(
93        rev="v1.3.0",
94        build_type="custom",
95        build_steps=[f"cp -a include/cereal/ {prefix}/include/"],
96    ),
97    "catchorg/Catch2": PackageDef(
98        rev="v2.12.2",
99        build_type="cmake",
100        config_flags=["-DBUILD_TESTING=OFF", "-DCATCH_INSTALL_DOCS=OFF"],
101    ),
102    "CLIUtils/CLI11": PackageDef(
103        rev="v1.9.1",
104        build_type="cmake",
105        config_flags=[
106            "-DBUILD_TESTING=OFF",
107            "-DCLI11_BUILD_DOCS=OFF",
108            "-DCLI11_BUILD_EXAMPLES=OFF",
109        ],
110    ),
111    "fmtlib/fmt": PackageDef(
112        rev="7.1.3",
113        build_type="cmake",
114        config_flags=[
115            "-DFMT_DOC=OFF",
116            "-DFMT_TEST=OFF",
117        ],
118    ),
119    # Snapshot from 2020-01-03
120    "Naios/function2": PackageDef(
121        rev="3a0746bf5f601dfed05330aefcb6854354fce07d",
122        build_type="custom",
123        build_steps=[
124            f"mkdir {prefix}/include/function2",
125            f"cp include/function2/function2.hpp {prefix}/include/function2/",
126        ],
127    ),
128    "google/googletest": PackageDef(
129        rev="release-1.10.0",
130        build_type="cmake",
131        config_env=["CXXFLAGS=-std=c++17"],
132        config_flags=["-DTHREADS_PREFER_PTHREAD_FLAG=ON"],
133    ),
134    # Release 2020-08-06
135    "nlohmann/json": PackageDef(
136        rev="v3.9.1",
137        build_type="cmake",
138        config_flags=["-DJSON_BuildTests=OFF"],
139        custom_post_install=[
140            f"ln -s {prefix}/include/nlohmann/json.hpp {prefix}/include/json.hpp",
141        ],
142    ),
143    # Snapshot from 2019-05-24
144    "linux-test-project/lcov": PackageDef(
145        rev="v1.15",
146        build_type="make",
147    ),
148    # dev-5.8 2021-01-11
149    "openbmc/linux": PackageDef(
150        rev="3cc95ae40716e56f81b69615781f54c78079042d",
151        build_type="custom",
152        build_steps=[
153            f"make -j{proc_count} defconfig",
154            f"make INSTALL_HDR_PATH={prefix} headers_install",
155        ],
156    ),
157    # Snapshot from 2020-06-13
158    "LibVNC/libvncserver": PackageDef(
159        rev="LibVNCServer-0.9.13",
160        build_type="cmake",
161    ),
162    "martinmoene/span-lite": PackageDef(
163        rev="v0.8.1",
164        build_type="cmake",
165        config_flags=[
166            "-DSPAN_LITE_OPT_BUILD_TESTS=OFF",
167        ],
168    ),
169    # version from meta-openembedded/meta-oe/recipes-support/libtinyxml2/libtinyxml2_8.0.0.bb
170    "leethomason/tinyxml2": PackageDef(
171        rev="8.0.0",
172        build_type="cmake",
173    ),
174    # version from /meta-openembedded/meta-oe/recipes-devtools/boost-url/boost-url_git.bb
175    "CPPAlliance/url": PackageDef(
176        rev="a56ae0df6d3078319755fbaa67822b4fa7fd352b",
177        build_type="cmake",
178        config_flags=[
179            "-DBOOST_URL_BUILD_EXAMPLES=OFF",
180            "-DBOOST_URL_BUILD_TESTS=OFF",
181            "-DBOOST_URL_STANDALONE=ON",
182        ],
183    ),
184    # version from ./meta-openembedded/meta-oe/dynamic-layers/networking-layer/recipes-devtools/valijson/valijson_0.3.bb
185    # Snapshot from 2020-12-02 - fix for curlpp dependency
186    "tristanpenman/valijson": PackageDef(
187        rev="8cc83c8be9c1c927f5da952b2333b30e5f0353be",
188        build_type="cmake",
189        config_flags=[
190            "-Dvalijson_BUILD_TESTS=0",
191            "-Dvalijson_INSTALL_HEADERS=1",
192        ],
193    ),
194    # version from meta-openembedded/meta-oe/recipes-devtools/nlohmann-fifo/nlohmann-fifo_git.bb
195    "nlohmann/fifo_map": PackageDef(
196        rev="0dfbf5dacbb15a32c43f912a7e66a54aae39d0f9",
197        build_type="custom",
198        build_steps=[f"cp src/fifo_map.hpp {prefix}/include/"],
199    ),
200    "open-power/pdbg": PackageDef(build_type="autoconf"),
201    "openbmc/gpioplus": PackageDef(
202        depends=["openbmc/stdplus"],
203        build_type="meson",
204        config_flags=[
205            "-Dexamples=false",
206            "-Dtests=disabled",
207        ],
208    ),
209    "openbmc/phosphor-dbus-interfaces": PackageDef(
210        depends=["openbmc/sdbusplus"],
211        build_type="meson",
212        config_flags=[
213            "-Ddata_com_ibm=true",
214            "-Ddata_org_open_power=true",
215        ],
216    ),
217    "openbmc/phosphor-logging": PackageDef(
218        depends=[
219            "USCiLab/cereal",
220            "nlohmann/fifo_map",
221            "openbmc/phosphor-dbus-interfaces",
222            "openbmc/sdbusplus",
223            "openbmc/sdeventplus",
224        ],
225        build_type="autoconf",
226        config_flags=[
227            "--enable-metadata-processing",
228            f"YAML_DIR={prefix}/share/phosphor-dbus-yaml/yaml",
229        ],
230    ),
231    "openbmc/phosphor-objmgr": PackageDef(
232        depends=[
233            "boost",
234            "leethomason/tinyxml2",
235            "openbmc/phosphor-logging",
236            "openbmc/sdbusplus",
237        ],
238        build_type="autoconf",
239    ),
240    "openbmc/pldm": PackageDef(
241        depends=[
242            "CLIUtils/CLI11",
243            "boost",
244            "nlohmann/json",
245            "openbmc/phosphor-dbus-interfaces",
246            "openbmc/phosphor-logging",
247            "openbmc/sdbusplus",
248            "openbmc/sdeventplus",
249        ],
250        build_type="meson",
251        config_flags=[
252            "-Dlibpldm-only=enabled",
253            "-Doem-ibm=enabled",
254            "-Dtests=disabled",
255        ],
256    ),
257    "openbmc/sdbusplus": PackageDef(
258        build_type="meson",
259        custom_post_dl=[
260            "cd tools",
261            f"./setup.py install --root=/ --prefix={prefix}",
262            "cd ..",
263        ],
264        config_flags=[
265            "-Dexamples=disabled",
266            "-Dtests=disabled",
267        ],
268    ),
269    "openbmc/sdeventplus": PackageDef(
270        depends=["Naios/function2", "openbmc/stdplus"],
271        build_type="meson",
272        config_flags=[
273            "-Dexamples=false",
274            "-Dtests=disabled",
275        ],
276    ),
277    "openbmc/stdplus": PackageDef(
278        depends=["fmtlib/fmt", "martinmoene/span-lite"],
279        build_type="meson",
280        config_flags=[
281            "-Dexamples=false",
282            "-Dtests=disabled",
283        ],
284    ),
285}  # type: Dict[str, PackageDef]
286
287# Define common flags used for builds
288configure_flags = " ".join(
289    [
290        f"--prefix={prefix}",
291    ]
292)
293cmake_flags = " ".join(
294    [
295        "-DBUILD_SHARED_LIBS=ON",
296        "-DCMAKE_BUILD_TYPE=RelWithDebInfo",
297        f"-DCMAKE_INSTALL_PREFIX:PATH={prefix}",
298        "-GNinja",
299        "-DCMAKE_MAKE_PROGRAM=ninja",
300    ]
301)
302meson_flags = " ".join(
303    [
304        "--wrap-mode=nodownload",
305        f"-Dprefix={prefix}",
306    ]
307)
308
309
310class Package(threading.Thread):
311    """Class used to build the Docker stages for each package.
312
313    Generally, this class should not be instantiated directly but through
314    Package.generate_all().
315    """
316
317    # Copy the packages dictionary.
318    packages = packages.copy()
319
320    # Lock used for thread-safety.
321    lock = threading.Lock()
322
323    def __init__(self, pkg: str):
324        """ pkg - The name of this package (ex. foo/bar ) """
325        super(Package, self).__init__()
326
327        self.package = pkg
328        self.exception = None  # type: Optional[Exception]
329
330        # Reference to this package's
331        self.pkg_def = Package.packages[pkg]
332        self.pkg_def["__package"] = self
333
334    def run(self) -> None:
335        """ Thread 'run' function.  Builds the Docker stage. """
336
337        # In case this package has no rev, fetch it from Github.
338        self._update_rev()
339
340        # Find all the Package objects that this package depends on.
341        #   This section is locked because we are looking into another
342        #   package's PackageDef dict, which could be being modified.
343        Package.lock.acquire()
344        deps: Iterable[Package] = [
345            Package.packages[deppkg]["__package"]
346            for deppkg in self.pkg_def.get("depends", [])
347        ]
348        Package.lock.release()
349
350        # Wait until all the depends finish building.  We need them complete
351        # for the "COPY" commands.
352        for deppkg in deps:
353            deppkg.join()
354
355        # Generate this package's Dockerfile.
356        dockerfile = f"""
357FROM {docker_base_img_name}
358{self._df_copycmds()}
359{self._df_build()}
360"""
361
362        # Generate the resulting tag name and save it to the PackageDef.
363        #   This section is locked because we are modifying the PackageDef,
364        #   which can be accessed by other threads.
365        Package.lock.acquire()
366        tag = Docker.tagname(self._stagename(), dockerfile)
367        self.pkg_def["__tag"] = tag
368        Package.lock.release()
369
370        # Do the build / save any exceptions.
371        try:
372            Docker.build(self.package, tag, dockerfile)
373        except Exception as e:
374            self.exception = e
375
376    @classmethod
377    def generate_all(cls) -> None:
378        """Ensure a Docker stage is created for all defined packages.
379
380        These are done in parallel but with appropriate blocking per
381        package 'depends' specifications.
382        """
383
384        # Create a Package for each defined package.
385        pkg_threads = [Package(p) for p in cls.packages.keys()]
386
387        # Start building them all.
388        for t in pkg_threads:
389            t.start()
390
391        # Wait for completion.
392        for t in pkg_threads:
393            t.join()
394            # Check if the thread saved off its own exception.
395            if t.exception:
396                print(f"Package {t.package} failed!", file=sys.stderr)
397                raise t.exception
398
399    @staticmethod
400    def df_all_copycmds() -> str:
401        """Formulate the Dockerfile snippet necessary to copy all packages
402        into the final image.
403        """
404        return Package.df_copycmds_set(Package.packages.keys())
405
406    @classmethod
407    def depcache(cls) -> str:
408        """Create the contents of the '/tmp/depcache'.
409        This file is a comma-separated list of "<pkg>:<rev>".
410        """
411
412        # This needs to be sorted for consistency.
413        depcache = ""
414        for pkg in sorted(cls.packages.keys()):
415            depcache += "%s:%s," % (pkg, cls.packages[pkg]["rev"])
416        return depcache
417
418    def _update_rev(self) -> None:
419        """ Look up the HEAD for missing a static rev. """
420
421        if "rev" in self.pkg_def:
422            return
423
424        # Check if Jenkins/Gerrit gave us a revision and use it.
425        if gerrit_project == self.package and gerrit_rev:
426            print(
427                f"Found Gerrit revision for {self.package}: {gerrit_rev}",
428                file=sys.stderr,
429            )
430            self.pkg_def["rev"] = gerrit_rev
431            return
432
433        # Ask Github for all the branches.
434        lookup = git("ls-remote", "--heads", f"https://github.com/{self.package}")
435
436        # Find the branch matching {branch} (or fallback to master).
437        #   This section is locked because we are modifying the PackageDef.
438        Package.lock.acquire()
439        for line in lookup.split("\n"):
440            if f"refs/heads/{branch}" in line:
441                self.pkg_def["rev"] = line.split()[0]
442            elif f"refs/heads/master" in line and "rev" not in self.pkg_def:
443                self.pkg_def["rev"] = line.split()[0]
444        Package.lock.release()
445
446    def _stagename(self) -> str:
447        """ Create a name for the Docker stage associated with this pkg. """
448        return self.package.replace("/", "-").lower()
449
450    def _url(self) -> str:
451        """ Get the URL for this package. """
452        rev = self.pkg_def["rev"]
453
454        # If the lambda exists, call it.
455        if "url" in self.pkg_def:
456            return self.pkg_def["url"](self.package, rev)
457
458        # Default to the github archive URL.
459        return f"https://github.com/{self.package}/archive/{rev}.tar.gz"
460
461    def _cmd_download(self) -> str:
462        """Formulate the command necessary to download and unpack to source."""
463
464        url = self._url()
465        if ".tar." not in url:
466            raise NotImplementedError(
467                f"Unhandled download type for {self.package}: {url}"
468            )
469
470        cmd = f"curl -L {url} | tar -x"
471
472        if url.endswith(".bz2"):
473            cmd += "j"
474        elif url.endswith(".gz"):
475            cmd += "z"
476        else:
477            raise NotImplementedError(
478                f"Unknown tar flags needed for {self.package}: {url}"
479            )
480
481        return cmd
482
483    def _cmd_cd_srcdir(self) -> str:
484        """ Formulate the command necessary to 'cd' into the source dir. """
485        return f"cd {self.package.split('/')[-1]}*"
486
487    def _df_copycmds(self) -> str:
488        """ Formulate the dockerfile snippet necessary to COPY all depends. """
489
490        if "depends" not in self.pkg_def:
491            return ""
492        return Package.df_copycmds_set(self.pkg_def["depends"])
493
494    @staticmethod
495    def df_copycmds_set(pkgs: Iterable[str]) -> str:
496        """Formulate the Dockerfile snippet necessary to COPY a set of
497        packages into a Docker stage.
498        """
499
500        copy_cmds = ""
501
502        # Sort the packages for consistency.
503        for p in sorted(pkgs):
504            tag = Package.packages[p]["__tag"]
505            copy_cmds += f"COPY --from={tag} {prefix} {prefix}\n"
506            # Workaround for upstream docker bug and multiple COPY cmds
507            # https://github.com/moby/moby/issues/37965
508            copy_cmds += "RUN true\n"
509
510        return copy_cmds
511
512    def _df_build(self) -> str:
513        """Formulate the Dockerfile snippet necessary to download, build, and
514        install a package into a Docker stage.
515        """
516
517        # Download and extract source.
518        result = f"RUN {self._cmd_download()} && {self._cmd_cd_srcdir()} && "
519
520        # Handle 'custom_post_dl' commands.
521        custom_post_dl = self.pkg_def.get("custom_post_dl")
522        if custom_post_dl:
523            result += " && ".join(custom_post_dl) + " && "
524
525        # Build and install package based on 'build_type'.
526        build_type = self.pkg_def["build_type"]
527        if build_type == "autoconf":
528            result += self._cmd_build_autoconf()
529        elif build_type == "cmake":
530            result += self._cmd_build_cmake()
531        elif build_type == "custom":
532            result += self._cmd_build_custom()
533        elif build_type == "make":
534            result += self._cmd_build_make()
535        elif build_type == "meson":
536            result += self._cmd_build_meson()
537        else:
538            raise NotImplementedError(
539                f"Unhandled build type for {self.package}: {build_type}"
540            )
541
542        # Handle 'custom_post_install' commands.
543        custom_post_install = self.pkg_def.get("custom_post_install")
544        if custom_post_install:
545            result += " && " + " && ".join(custom_post_install)
546
547        return result
548
549    def _cmd_build_autoconf(self) -> str:
550        options = " ".join(self.pkg_def.get("config_flags", []))
551        env = " ".join(self.pkg_def.get("config_env", []))
552        result = "./bootstrap.sh && "
553        result += f"{env} ./configure {configure_flags} {options} && "
554        result += f"make -j{proc_count} && make install"
555        return result
556
557    def _cmd_build_cmake(self) -> str:
558        options = " ".join(self.pkg_def.get("config_flags", []))
559        env = " ".join(self.pkg_def.get("config_env", []))
560        result = "mkdir builddir && cd builddir && "
561        result += f"{env} cmake {cmake_flags} {options} .. && "
562        result += "cmake --build . --target all && "
563        result += "cmake --build . --target install && "
564        result += "cd .."
565        return result
566
567    def _cmd_build_custom(self) -> str:
568        return " && ".join(self.pkg_def.get("build_steps", []))
569
570    def _cmd_build_make(self) -> str:
571        return f"make -j{proc_count} && make install"
572
573    def _cmd_build_meson(self) -> str:
574        options = " ".join(self.pkg_def.get("config_flags", []))
575        env = " ".join(self.pkg_def.get("config_env", []))
576        result = f"{env} meson builddir {meson_flags} {options} && "
577        result += "ninja -C builddir && ninja -C builddir install"
578        return result
579
580
581class Docker:
582    """Class to assist with Docker interactions.  All methods are static."""
583
584    @staticmethod
585    def timestamp() -> str:
586        """ Generate a timestamp for today using the ISO week. """
587        today = date.today().isocalendar()
588        return f"{today[0]}-W{today[1]:02}"
589
590    @staticmethod
591    def tagname(pkgname: str, dockerfile: str) -> str:
592        """ Generate a tag name for a package using a hash of the Dockerfile. """
593        result = docker_image_name
594        if pkgname:
595            result += "-" + pkgname
596
597        result += ":" + Docker.timestamp()
598        result += "-" + sha256(dockerfile.encode()).hexdigest()[0:16]
599
600        return result
601
602    @staticmethod
603    def build(pkg: str, tag: str, dockerfile: str) -> None:
604        """Build a docker image using the Dockerfile and tagging it with 'tag'."""
605
606        # If we're not forcing builds, check if it already exists and skip.
607        if not force_build:
608            if docker.image.ls(tag, "--format", '"{{.Repository}}:{{.Tag}}"'):
609                print(f"Image {tag} already exists.  Skipping.", file=sys.stderr)
610                return
611
612        # Build it.
613        #   Capture the output of the 'docker build' command and send it to
614        #   stderr (prefixed with the package name).  This allows us to see
615        #   progress but not polute stdout.  Later on we output the final
616        #   docker tag to stdout and we want to keep that pristine.
617        #
618        #   Other unusual flags:
619        #       --no-cache: Bypass the Docker cache if 'force_build'.
620        #       --force-rm: Clean up Docker processes if they fail.
621        docker.build(
622            proxy_args,
623            "--network=host",
624            "--force-rm",
625            "--no-cache=true" if force_build else "--no-cache=false",
626            "-t",
627            tag,
628            "-",
629            _in=dockerfile,
630            _out=(
631                lambda line: print(
632                    pkg + ":", line, end="", file=sys.stderr, flush=True
633                )
634            ),
635        )
636
637
638# Read a bunch of environment variables.
639docker_image_name = os.environ.get("DOCKER_IMAGE_NAME", "openbmc/ubuntu-unit-test")
640force_build = os.environ.get("FORCE_DOCKER_BUILD")
641is_automated_ci_build = os.environ.get("BUILD_URL", False)
642distro = os.environ.get("DISTRO", "ubuntu:focal")
643branch = os.environ.get("BRANCH", "master")
644ubuntu_mirror = os.environ.get("UBUNTU_MIRROR")
645http_proxy = os.environ.get("http_proxy")
646
647gerrit_project = os.environ.get("GERRIT_PROJECT")
648gerrit_rev = os.environ.get("GERRIT_PATCHSET_REVISION")
649
650# Set up some common variables.
651username = os.environ.get("USER", "root")
652homedir = os.environ.get("HOME", "/root")
653gid = os.getgid()
654uid = os.getuid()
655
656# Determine the architecture for Docker.
657arch = uname("-m").strip()
658if arch == "ppc64le":
659    docker_base = "ppc64le/"
660elif arch == "x86_64":
661    docker_base = ""
662else:
663    print(
664        f"Unsupported system architecture({arch}) found for docker image",
665        file=sys.stderr,
666    )
667    sys.exit(1)
668
669# Special flags if setting up a deb mirror.
670mirror = ""
671if "ubuntu" in distro and ubuntu_mirror:
672    mirror = f"""
673RUN echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME) main restricted universe multiverse" > /etc/apt/sources.list && \\
674    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-updates main restricted universe multiverse" >> /etc/apt/sources.list && \\
675    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-security main restricted universe multiverse" >> /etc/apt/sources.list && \\
676    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-proposed main restricted universe multiverse" >> /etc/apt/sources.list && \\
677    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-backports main restricted universe multiverse" >> /etc/apt/sources.list
678"""
679
680# Special flags for proxying.
681proxy_cmd = ""
682proxy_args = []
683if http_proxy:
684    proxy_cmd = f"""
685RUN echo "[http]" >> {homedir}/.gitconfig && \
686    echo "proxy = {http_proxy}" >> {homedir}/.gitconfig
687"""
688    proxy_args.extend(
689        [
690            "--build-arg",
691            f"http_proxy={http_proxy}",
692            "--build-arg",
693            f"https_proxy={http_proxy}",
694        ]
695    )
696
697# Create base Dockerfile.
698dockerfile_base = f"""
699FROM {docker_base}{distro}
700
701{mirror}
702
703ENV DEBIAN_FRONTEND noninteractive
704
705ENV PYTHONPATH "/usr/local/lib/python3.8/site-packages/"
706
707# We need the keys to be imported for dbgsym repos
708# New releases have a package, older ones fall back to manual fetching
709# https://wiki.ubuntu.com/Debug%20Symbol%20Packages
710RUN apt-get update && apt-get dist-upgrade -yy && \
711    ( apt-get install ubuntu-dbgsym-keyring || \
712        ( apt-get install -yy dirmngr && \
713          apt-key adv --keyserver keyserver.ubuntu.com \
714                      --recv-keys F2EDC64DC5AEE1F6B9C621F0C8CAB6595FDFF622 ) )
715
716# Parse the current repo list into a debug repo list
717RUN sed -n '/^deb /s,^deb [^ ]* ,deb http://ddebs.ubuntu.com ,p' /etc/apt/sources.list >/etc/apt/sources.list.d/debug.list
718
719# Remove non-existent debug repos
720RUN sed -i '/-\(backports\|security\) /d' /etc/apt/sources.list.d/debug.list
721
722RUN cat /etc/apt/sources.list.d/debug.list
723
724RUN apt-get update && apt-get dist-upgrade -yy && apt-get install -yy \
725    gcc-10 \
726    g++-10 \
727    libc6-dbg \
728    libc6-dev \
729    libtool \
730    bison \
731    libdbus-1-dev \
732    flex \
733    cmake \
734    python3 \
735    python3-dev\
736    python3-yaml \
737    python3-mako \
738    python3-pip \
739    python3-setuptools \
740    python3-git \
741    python3-socks \
742    pkg-config \
743    autoconf \
744    autoconf-archive \
745    libsystemd-dev \
746    systemd \
747    libssl-dev \
748    libevdev-dev \
749    libevdev2-dbgsym \
750    libjpeg-dev \
751    libpng-dev \
752    ninja-build \
753    sudo \
754    curl \
755    git \
756    dbus \
757    iputils-ping \
758    clang-10 \
759    clang-format-10 \
760    clang-tidy-10 \
761    clang-tools-10 \
762    shellcheck \
763    npm \
764    iproute2 \
765    libnl-3-dev \
766    libnl-genl-3-dev \
767    libconfig++-dev \
768    libsnmp-dev \
769    valgrind \
770    valgrind-dbg \
771    libpam0g-dev \
772    xxd \
773    libi2c-dev \
774    wget \
775    libldap2-dev \
776    libprotobuf-dev \
777    libperlio-gzip-perl \
778    libjson-perl \
779    protobuf-compiler \
780    libgpiod-dev \
781    device-tree-compiler \
782    cppcheck \
783    libpciaccess-dev \
784    libmimetic-dev \
785    libxml2-utils \
786    libxml-simple-perl \
787    rsync
788
789RUN update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-10 1000 \
790  --slave /usr/bin/g++ g++ /usr/bin/g++-10 \
791  --slave /usr/bin/gcov gcov /usr/bin/gcov-10 \
792  --slave /usr/bin/gcov-dump gcov-dump /usr/bin/gcov-dump-10 \
793  --slave /usr/bin/gcov-tool gcov-tool /usr/bin/gcov-tool-10
794
795RUN update-alternatives --install /usr/bin/clang clang /usr/bin/clang-10 1000 \
796  --slave /usr/bin/clang++ clang++ /usr/bin/clang++-10 \
797  --slave /usr/bin/clang-tidy clang-tidy /usr/bin/clang-tidy-10 \
798  --slave /usr/bin/clang-format clang-format /usr/bin/clang-format-10 \
799  --slave /usr/bin/run-clang-tidy.py run-clang-tidy.py /usr/bin/run-clang-tidy-10.py
800
801"""
802
803if is_automated_ci_build:
804    dockerfile_base += f"""
805# Run an arbitrary command to polute the docker cache regularly force us
806# to re-run `apt-get update` daily.
807RUN echo {Docker.timestamp()}
808RUN apt-get update && apt-get dist-upgrade -yy
809
810"""
811
812dockerfile_base += f"""
813RUN pip3 install inflection
814RUN pip3 install pycodestyle
815RUN pip3 install jsonschema
816RUN pip3 install meson==0.54.3
817RUN pip3 install protobuf
818"""
819
820# Build the base and stage docker images.
821docker_base_img_name = Docker.tagname("base", dockerfile_base)
822Docker.build("base", docker_base_img_name, dockerfile_base)
823Package.generate_all()
824
825# Create the final Dockerfile.
826dockerfile = f"""
827# Build the final output image
828FROM {docker_base_img_name}
829{Package.df_all_copycmds()}
830
831# Some of our infrastructure still relies on the presence of this file
832# even though it is no longer needed to rebuild the docker environment
833# NOTE: The file is sorted to ensure the ordering is stable.
834RUN echo '{Package.depcache()}' > /tmp/depcache
835
836# Final configuration for the workspace
837RUN grep -q {gid} /etc/group || groupadd -g {gid} {username}
838RUN mkdir -p "{os.path.dirname(homedir)}"
839RUN grep -q {uid} /etc/passwd || useradd -d {homedir} -m -u {uid} -g {gid} {username}
840RUN sed -i '1iDefaults umask=000' /etc/sudoers
841RUN echo "{username} ALL=(ALL) NOPASSWD: ALL" >>/etc/sudoers
842
843{proxy_cmd}
844
845RUN /bin/bash
846"""
847
848# Do the final docker build
849docker_final_img_name = Docker.tagname(None, dockerfile)
850Docker.build("final", docker_final_img_name, dockerfile)
851
852# Print the tag of the final image.
853print(docker_final_img_name)
854