1#!/usr/bin/env python3
2#
3# Build the required docker image to run package unit tests
4#
5# Script Variables:
6#   DOCKER_IMG_NAME:  <optional, the name of the docker image to generate>
7#                     default is openbmc/ubuntu-unit-test
8#   DISTRO:           <optional, the distro to build a docker image against>
9#                     default is ubuntu:hirsute
10#   FORCE_DOCKER_BUILD: <optional, a non-zero value with force all Docker
11#                     images to be rebuilt rather than reusing caches.>
12#   BUILD_URL:        <optional, used to detect running under CI context
13#                     (ex. Jenkins)>
14#   BRANCH:           <optional, branch to build from each of the openbmc/
15#                     repositories>
16#                     default is master, which will be used if input branch not
17#                     provided or not found
18#   UBUNTU_MIRROR:    <optional, the URL of a mirror of Ubuntu to override the
19#                     default ones in /etc/apt/sources.list>
20#                     default is empty, and no mirror is used.
21#   http_proxy        The HTTP address of the proxy server to connect to.
22#                     Default: "", proxy is not setup if this is not set
23
24import os
25import sys
26import threading
27from datetime import date
28from hashlib import sha256
29from sh import docker, git, nproc, uname  # type: ignore
30from typing import Any, Callable, Dict, Iterable, Optional
31
32try:
33    # Python before 3.8 doesn't have TypedDict, so reroute to standard 'dict'.
34    from typing import TypedDict
35except:
36
37    class TypedDict(dict):  # type: ignore
38        # We need to do this to eat the 'total' argument.
39        def __init_subclass__(cls, **kwargs):
40            super().__init_subclass__()
41
42
43# Declare some variables used in package definitions.
44prefix = "/usr/local"
45proc_count = nproc().strip()
46
47
48class PackageDef(TypedDict, total=False):
49    """ Package Definition for packages dictionary. """
50
51    # rev [optional]: Revision of package to use.
52    rev: str
53    # url [optional]: lambda function to create URL: (package, rev) -> url.
54    url: Callable[[str, str], str]
55    # depends [optional]: List of package dependencies.
56    depends: Iterable[str]
57    # build_type [required]: Build type used for package.
58    #   Currently supported: autoconf, cmake, custom, make, meson
59    build_type: str
60    # build_steps [optional]: Steps to run for 'custom' build_type.
61    build_steps: Iterable[str]
62    # config_flags [optional]: List of options to pass configuration tool.
63    config_flags: Iterable[str]
64    # config_env [optional]: List of environment variables to set for config.
65    config_env: Iterable[str]
66    # custom_post_dl [optional]: List of steps to run after download, but
67    #   before config / build / install.
68    custom_post_dl: Iterable[str]
69    # custom_post_install [optional]: List of steps to run after install.
70    custom_post_install: Iterable[str]
71
72    # __tag [private]: Generated Docker tag name for package stage.
73    __tag: str
74    # __package [private]: Package object associated with this package.
75    __package: Any  # Type is Package, but not defined yet.
76
77
78# Packages to include in image.
79packages = {
80    "boost": PackageDef(
81        rev="1.76.0",
82        url=(
83            lambda pkg, rev: f"https://downloads.yoctoproject.org/mirror/sources/{pkg}_{rev.replace('.', '_')}.tar.bz2"
84        ),
85        build_type="custom",
86        build_steps=[
87            f"./bootstrap.sh --prefix={prefix} --with-libraries=context,coroutine",
88            "./b2",
89            f"./b2 install --prefix={prefix}",
90        ],
91    ),
92    "USCiLab/cereal": PackageDef(
93        rev="3e4d1b84cab4891368d2179a61a7ba06a5693e7f",
94        build_type="custom",
95        build_steps=[f"cp -a include/cereal/ {prefix}/include/"],
96    ),
97    "catchorg/Catch2": PackageDef(
98        rev="v2.13.6",
99        build_type="cmake",
100        config_flags=["-DBUILD_TESTING=OFF", "-DCATCH_INSTALL_DOCS=OFF"],
101    ),
102    "CLIUtils/CLI11": PackageDef(
103        rev="v1.9.1",
104        build_type="cmake",
105        config_flags=[
106            "-DBUILD_TESTING=OFF",
107            "-DCLI11_BUILD_DOCS=OFF",
108            "-DCLI11_BUILD_EXAMPLES=OFF",
109        ],
110    ),
111    "fmtlib/fmt": PackageDef(
112        rev="7.1.3",
113        build_type="cmake",
114        config_flags=[
115            "-DFMT_DOC=OFF",
116            "-DFMT_TEST=OFF",
117        ],
118    ),
119    "Naios/function2": PackageDef(
120        rev="4.1.0",
121        build_type="custom",
122        build_steps=[
123            f"mkdir {prefix}/include/function2",
124            f"cp include/function2/function2.hpp {prefix}/include/function2/",
125        ],
126    ),
127    # Snapshot from 2021-05-13
128    "google/googletest": PackageDef(
129        rev="662fe38e44900c007eccb65a5d2ea19df7bd520e",
130        build_type="cmake",
131        config_env=["CXXFLAGS=-std=c++20"],
132        config_flags=["-DTHREADS_PREFER_PTHREAD_FLAG=ON"],
133    ),
134    # Release 2020-08-06
135    "nlohmann/json": PackageDef(
136        rev="v3.9.1",
137        build_type="cmake",
138        config_flags=["-DJSON_BuildTests=OFF"],
139        custom_post_install=[
140            f"ln -s {prefix}/include/nlohmann/json.hpp {prefix}/include/json.hpp",
141        ],
142    ),
143    # Snapshot from 2019-05-24
144    "linux-test-project/lcov": PackageDef(
145        rev="v1.15",
146        build_type="make",
147    ),
148    # dev-5.8 2021-01-11
149    "openbmc/linux": PackageDef(
150        rev="3cc95ae40716e56f81b69615781f54c78079042d",
151        build_type="custom",
152        build_steps=[
153            f"make -j{proc_count} defconfig",
154            f"make INSTALL_HDR_PATH={prefix} headers_install",
155        ],
156    ),
157    # Snapshot from 2020-06-13
158    "LibVNC/libvncserver": PackageDef(
159        rev="LibVNCServer-0.9.13",
160        build_type="cmake",
161    ),
162    "martinmoene/span-lite": PackageDef(
163        rev="v0.9.2",
164        build_type="cmake",
165        config_flags=[
166            "-DSPAN_LITE_OPT_BUILD_TESTS=OFF",
167        ],
168    ),
169    # version from meta-openembedded/meta-oe/recipes-support/libtinyxml2/libtinyxml2_8.0.0.bb
170    "leethomason/tinyxml2": PackageDef(
171        rev="8.0.0",
172        build_type="cmake",
173    ),
174    # version from /meta-openembedded/meta-oe/recipes-devtools/boost-url/boost-url_git.bb
175    "CPPAlliance/url": PackageDef(
176        rev="4f712ed69a04a344957d22efa5dc111b415b3aff",
177        build_type="custom",
178        build_steps=[f"cp -a include/** {prefix}/include/"],
179    ),
180    # valijson v0.4, which contains the nlohmann/json.hpp include fix:
181    # 66262bafb82c ("Include nlohmann/json.hpp instead of json.hpp")
182    "tristanpenman/valijson": PackageDef(
183        rev="v0.4",
184        build_type="cmake",
185        config_flags=[
186            "-Dvalijson_BUILD_TESTS=0",
187            "-Dvalijson_INSTALL_HEADERS=1",
188        ],
189    ),
190    # version from meta-openembedded/meta-oe/recipes-devtools/nlohmann-fifo/nlohmann-fifo_git.bb
191    "nlohmann/fifo_map": PackageDef(
192        rev="0dfbf5dacbb15a32c43f912a7e66a54aae39d0f9",
193        build_type="custom",
194        build_steps=[f"cp src/fifo_map.hpp {prefix}/include/"],
195    ),
196    "open-power/pdbg": PackageDef(build_type="autoconf"),
197    "openbmc/gpioplus": PackageDef(
198        depends=["openbmc/stdplus"],
199        build_type="meson",
200        config_flags=[
201            "-Dexamples=false",
202            "-Dtests=disabled",
203        ],
204    ),
205    "openbmc/phosphor-dbus-interfaces": PackageDef(
206        depends=["openbmc/sdbusplus"],
207        build_type="meson",
208        config_flags=[
209            "-Ddata_com_ibm=true",
210            "-Ddata_org_open_power=true",
211        ],
212    ),
213    "openbmc/phosphor-logging": PackageDef(
214        depends=[
215            "USCiLab/cereal",
216            "nlohmann/fifo_map",
217            "openbmc/phosphor-dbus-interfaces",
218            "openbmc/sdbusplus",
219            "openbmc/sdeventplus",
220        ],
221        build_type="meson",
222        config_flags=[
223            f"-Dyaml_dir={prefix}/share/phosphor-dbus-yaml/yaml",
224        ],
225    ),
226    "openbmc/phosphor-objmgr": PackageDef(
227        depends=[
228            "boost",
229            "leethomason/tinyxml2",
230            "openbmc/phosphor-logging",
231            "openbmc/sdbusplus",
232        ],
233        build_type="meson",
234        config_flags=[
235            "-Dtests=disabled",
236        ],
237    ),
238    "openbmc/pldm": PackageDef(
239        depends=[
240            "CLIUtils/CLI11",
241            "boost",
242            "nlohmann/json",
243            "openbmc/phosphor-dbus-interfaces",
244            "openbmc/phosphor-logging",
245            "openbmc/sdbusplus",
246            "openbmc/sdeventplus",
247        ],
248        build_type="meson",
249        config_flags=[
250            "-Dlibpldm-only=enabled",
251            "-Doem-ibm=enabled",
252            "-Dtests=disabled",
253        ],
254    ),
255    "openbmc/sdbusplus": PackageDef(
256        build_type="meson",
257        custom_post_dl=[
258            "cd tools",
259            f"./setup.py install --root=/ --prefix={prefix}",
260            "cd ..",
261        ],
262        config_flags=[
263            "-Dexamples=disabled",
264            "-Dtests=disabled",
265        ],
266    ),
267    "openbmc/sdeventplus": PackageDef(
268        depends=["Naios/function2", "openbmc/stdplus"],
269        build_type="meson",
270        config_flags=[
271            "-Dexamples=false",
272            "-Dtests=disabled",
273        ],
274    ),
275    "openbmc/stdplus": PackageDef(
276        depends=["fmtlib/fmt", "martinmoene/span-lite"],
277        build_type="meson",
278        config_flags=[
279            "-Dexamples=false",
280            "-Dtests=disabled",
281        ],
282    ),
283}  # type: Dict[str, PackageDef]
284
285# Define common flags used for builds
286configure_flags = " ".join(
287    [
288        f"--prefix={prefix}",
289    ]
290)
291cmake_flags = " ".join(
292    [
293        "-DBUILD_SHARED_LIBS=ON",
294        "-DCMAKE_BUILD_TYPE=RelWithDebInfo",
295        f"-DCMAKE_INSTALL_PREFIX:PATH={prefix}",
296        "-GNinja",
297        "-DCMAKE_MAKE_PROGRAM=ninja",
298    ]
299)
300meson_flags = " ".join(
301    [
302        "--wrap-mode=nodownload",
303        f"-Dprefix={prefix}",
304    ]
305)
306
307
308class Package(threading.Thread):
309    """Class used to build the Docker stages for each package.
310
311    Generally, this class should not be instantiated directly but through
312    Package.generate_all().
313    """
314
315    # Copy the packages dictionary.
316    packages = packages.copy()
317
318    # Lock used for thread-safety.
319    lock = threading.Lock()
320
321    def __init__(self, pkg: str):
322        """ pkg - The name of this package (ex. foo/bar ) """
323        super(Package, self).__init__()
324
325        self.package = pkg
326        self.exception = None  # type: Optional[Exception]
327
328        # Reference to this package's
329        self.pkg_def = Package.packages[pkg]
330        self.pkg_def["__package"] = self
331
332    def run(self) -> None:
333        """ Thread 'run' function.  Builds the Docker stage. """
334
335        # In case this package has no rev, fetch it from Github.
336        self._update_rev()
337
338        # Find all the Package objects that this package depends on.
339        #   This section is locked because we are looking into another
340        #   package's PackageDef dict, which could be being modified.
341        Package.lock.acquire()
342        deps: Iterable[Package] = [
343            Package.packages[deppkg]["__package"]
344            for deppkg in self.pkg_def.get("depends", [])
345        ]
346        Package.lock.release()
347
348        # Wait until all the depends finish building.  We need them complete
349        # for the "COPY" commands.
350        for deppkg in deps:
351            deppkg.join()
352
353        # Generate this package's Dockerfile.
354        dockerfile = f"""
355FROM {docker_base_img_name}
356{self._df_copycmds()}
357{self._df_build()}
358"""
359
360        # Generate the resulting tag name and save it to the PackageDef.
361        #   This section is locked because we are modifying the PackageDef,
362        #   which can be accessed by other threads.
363        Package.lock.acquire()
364        tag = Docker.tagname(self._stagename(), dockerfile)
365        self.pkg_def["__tag"] = tag
366        Package.lock.release()
367
368        # Do the build / save any exceptions.
369        try:
370            Docker.build(self.package, tag, dockerfile)
371        except Exception as e:
372            self.exception = e
373
374    @classmethod
375    def generate_all(cls) -> None:
376        """Ensure a Docker stage is created for all defined packages.
377
378        These are done in parallel but with appropriate blocking per
379        package 'depends' specifications.
380        """
381
382        # Create a Package for each defined package.
383        pkg_threads = [Package(p) for p in cls.packages.keys()]
384
385        # Start building them all.
386        #   This section is locked because threads depend on each other,
387        #   based on the packages, and they cannot 'join' on a thread
388        #   which is not yet started.  Adding a lock here allows all the
389        #   threads to start before they 'join' their dependencies.
390        Package.lock.acquire()
391        for t in pkg_threads:
392            t.start()
393        Package.lock.release()
394
395        # Wait for completion.
396        for t in pkg_threads:
397            t.join()
398            # Check if the thread saved off its own exception.
399            if t.exception:
400                print(f"Package {t.package} failed!", file=sys.stderr)
401                raise t.exception
402
403    @staticmethod
404    def df_all_copycmds() -> str:
405        """Formulate the Dockerfile snippet necessary to copy all packages
406        into the final image.
407        """
408        return Package.df_copycmds_set(Package.packages.keys())
409
410    @classmethod
411    def depcache(cls) -> str:
412        """Create the contents of the '/tmp/depcache'.
413        This file is a comma-separated list of "<pkg>:<rev>".
414        """
415
416        # This needs to be sorted for consistency.
417        depcache = ""
418        for pkg in sorted(cls.packages.keys()):
419            depcache += "%s:%s," % (pkg, cls.packages[pkg]["rev"])
420        return depcache
421
422    def _update_rev(self) -> None:
423        """ Look up the HEAD for missing a static rev. """
424
425        if "rev" in self.pkg_def:
426            return
427
428        # Check if Jenkins/Gerrit gave us a revision and use it.
429        if gerrit_project == self.package and gerrit_rev:
430            print(
431                f"Found Gerrit revision for {self.package}: {gerrit_rev}",
432                file=sys.stderr,
433            )
434            self.pkg_def["rev"] = gerrit_rev
435            return
436
437        # Ask Github for all the branches.
438        lookup = git("ls-remote", "--heads", f"https://github.com/{self.package}")
439
440        # Find the branch matching {branch} (or fallback to master).
441        #   This section is locked because we are modifying the PackageDef.
442        Package.lock.acquire()
443        for line in lookup.split("\n"):
444            if f"refs/heads/{branch}" in line:
445                self.pkg_def["rev"] = line.split()[0]
446            elif f"refs/heads/master" in line and "rev" not in self.pkg_def:
447                self.pkg_def["rev"] = line.split()[0]
448        Package.lock.release()
449
450    def _stagename(self) -> str:
451        """ Create a name for the Docker stage associated with this pkg. """
452        return self.package.replace("/", "-").lower()
453
454    def _url(self) -> str:
455        """ Get the URL for this package. """
456        rev = self.pkg_def["rev"]
457
458        # If the lambda exists, call it.
459        if "url" in self.pkg_def:
460            return self.pkg_def["url"](self.package, rev)
461
462        # Default to the github archive URL.
463        return f"https://github.com/{self.package}/archive/{rev}.tar.gz"
464
465    def _cmd_download(self) -> str:
466        """Formulate the command necessary to download and unpack to source."""
467
468        url = self._url()
469        if ".tar." not in url:
470            raise NotImplementedError(
471                f"Unhandled download type for {self.package}: {url}"
472            )
473
474        cmd = f"curl -L {url} | tar -x"
475
476        if url.endswith(".bz2"):
477            cmd += "j"
478        elif url.endswith(".gz"):
479            cmd += "z"
480        else:
481            raise NotImplementedError(
482                f"Unknown tar flags needed for {self.package}: {url}"
483            )
484
485        return cmd
486
487    def _cmd_cd_srcdir(self) -> str:
488        """ Formulate the command necessary to 'cd' into the source dir. """
489        return f"cd {self.package.split('/')[-1]}*"
490
491    def _df_copycmds(self) -> str:
492        """ Formulate the dockerfile snippet necessary to COPY all depends. """
493
494        if "depends" not in self.pkg_def:
495            return ""
496        return Package.df_copycmds_set(self.pkg_def["depends"])
497
498    @staticmethod
499    def df_copycmds_set(pkgs: Iterable[str]) -> str:
500        """Formulate the Dockerfile snippet necessary to COPY a set of
501        packages into a Docker stage.
502        """
503
504        copy_cmds = ""
505
506        # Sort the packages for consistency.
507        for p in sorted(pkgs):
508            tag = Package.packages[p]["__tag"]
509            copy_cmds += f"COPY --from={tag} {prefix} {prefix}\n"
510            # Workaround for upstream docker bug and multiple COPY cmds
511            # https://github.com/moby/moby/issues/37965
512            copy_cmds += "RUN true\n"
513
514        return copy_cmds
515
516    def _df_build(self) -> str:
517        """Formulate the Dockerfile snippet necessary to download, build, and
518        install a package into a Docker stage.
519        """
520
521        # Download and extract source.
522        result = f"RUN {self._cmd_download()} && {self._cmd_cd_srcdir()} && "
523
524        # Handle 'custom_post_dl' commands.
525        custom_post_dl = self.pkg_def.get("custom_post_dl")
526        if custom_post_dl:
527            result += " && ".join(custom_post_dl) + " && "
528
529        # Build and install package based on 'build_type'.
530        build_type = self.pkg_def["build_type"]
531        if build_type == "autoconf":
532            result += self._cmd_build_autoconf()
533        elif build_type == "cmake":
534            result += self._cmd_build_cmake()
535        elif build_type == "custom":
536            result += self._cmd_build_custom()
537        elif build_type == "make":
538            result += self._cmd_build_make()
539        elif build_type == "meson":
540            result += self._cmd_build_meson()
541        else:
542            raise NotImplementedError(
543                f"Unhandled build type for {self.package}: {build_type}"
544            )
545
546        # Handle 'custom_post_install' commands.
547        custom_post_install = self.pkg_def.get("custom_post_install")
548        if custom_post_install:
549            result += " && " + " && ".join(custom_post_install)
550
551        return result
552
553    def _cmd_build_autoconf(self) -> str:
554        options = " ".join(self.pkg_def.get("config_flags", []))
555        env = " ".join(self.pkg_def.get("config_env", []))
556        result = "./bootstrap.sh && "
557        result += f"{env} ./configure {configure_flags} {options} && "
558        result += f"make -j{proc_count} && make install"
559        return result
560
561    def _cmd_build_cmake(self) -> str:
562        options = " ".join(self.pkg_def.get("config_flags", []))
563        env = " ".join(self.pkg_def.get("config_env", []))
564        result = "mkdir builddir && cd builddir && "
565        result += f"{env} cmake {cmake_flags} {options} .. && "
566        result += "cmake --build . --target all && "
567        result += "cmake --build . --target install && "
568        result += "cd .."
569        return result
570
571    def _cmd_build_custom(self) -> str:
572        return " && ".join(self.pkg_def.get("build_steps", []))
573
574    def _cmd_build_make(self) -> str:
575        return f"make -j{proc_count} && make install"
576
577    def _cmd_build_meson(self) -> str:
578        options = " ".join(self.pkg_def.get("config_flags", []))
579        env = " ".join(self.pkg_def.get("config_env", []))
580        result = f"{env} meson builddir {meson_flags} {options} && "
581        result += "ninja -C builddir && ninja -C builddir install"
582        return result
583
584
585class Docker:
586    """Class to assist with Docker interactions.  All methods are static."""
587
588    @staticmethod
589    def timestamp() -> str:
590        """ Generate a timestamp for today using the ISO week. """
591        today = date.today().isocalendar()
592        return f"{today[0]}-W{today[1]:02}"
593
594    @staticmethod
595    def tagname(pkgname: str, dockerfile: str) -> str:
596        """ Generate a tag name for a package using a hash of the Dockerfile. """
597        result = docker_image_name
598        if pkgname:
599            result += "-" + pkgname
600
601        result += ":" + Docker.timestamp()
602        result += "-" + sha256(dockerfile.encode()).hexdigest()[0:16]
603
604        return result
605
606    @staticmethod
607    def build(pkg: str, tag: str, dockerfile: str) -> None:
608        """Build a docker image using the Dockerfile and tagging it with 'tag'."""
609
610        # If we're not forcing builds, check if it already exists and skip.
611        if not force_build:
612            if docker.image.ls(tag, "--format", '"{{.Repository}}:{{.Tag}}"'):
613                print(f"Image {tag} already exists.  Skipping.", file=sys.stderr)
614                return
615
616        # Build it.
617        #   Capture the output of the 'docker build' command and send it to
618        #   stderr (prefixed with the package name).  This allows us to see
619        #   progress but not polute stdout.  Later on we output the final
620        #   docker tag to stdout and we want to keep that pristine.
621        #
622        #   Other unusual flags:
623        #       --no-cache: Bypass the Docker cache if 'force_build'.
624        #       --force-rm: Clean up Docker processes if they fail.
625        docker.build(
626            proxy_args,
627            "--network=host",
628            "--force-rm",
629            "--no-cache=true" if force_build else "--no-cache=false",
630            "-t",
631            tag,
632            "-",
633            _in=dockerfile,
634            _out=(
635                lambda line: print(
636                    pkg + ":", line, end="", file=sys.stderr, flush=True
637                )
638            ),
639        )
640
641
642# Read a bunch of environment variables.
643docker_image_name = os.environ.get("DOCKER_IMAGE_NAME", "openbmc/ubuntu-unit-test")
644force_build = os.environ.get("FORCE_DOCKER_BUILD")
645is_automated_ci_build = os.environ.get("BUILD_URL", False)
646distro = os.environ.get("DISTRO", "ubuntu:hirsute")
647branch = os.environ.get("BRANCH", "master")
648ubuntu_mirror = os.environ.get("UBUNTU_MIRROR")
649http_proxy = os.environ.get("http_proxy")
650
651gerrit_project = os.environ.get("GERRIT_PROJECT")
652gerrit_rev = os.environ.get("GERRIT_PATCHSET_REVISION")
653
654# Set up some common variables.
655username = os.environ.get("USER", "root")
656homedir = os.environ.get("HOME", "/root")
657gid = os.getgid()
658uid = os.getuid()
659
660# Determine the architecture for Docker.
661arch = uname("-m").strip()
662if arch == "ppc64le":
663    docker_base = "ppc64le/"
664elif arch == "x86_64":
665    docker_base = ""
666else:
667    print(
668        f"Unsupported system architecture({arch}) found for docker image",
669        file=sys.stderr,
670    )
671    sys.exit(1)
672
673# Special flags if setting up a deb mirror.
674mirror = ""
675if "ubuntu" in distro and ubuntu_mirror:
676    mirror = f"""
677RUN echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME) main restricted universe multiverse" > /etc/apt/sources.list && \\
678    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-updates main restricted universe multiverse" >> /etc/apt/sources.list && \\
679    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-security main restricted universe multiverse" >> /etc/apt/sources.list && \\
680    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-proposed main restricted universe multiverse" >> /etc/apt/sources.list && \\
681    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-backports main restricted universe multiverse" >> /etc/apt/sources.list
682"""
683
684# Special flags for proxying.
685proxy_cmd = ""
686proxy_keyserver = ""
687proxy_args = []
688if http_proxy:
689    proxy_cmd = f"""
690RUN echo "[http]" >> {homedir}/.gitconfig && \
691    echo "proxy = {http_proxy}" >> {homedir}/.gitconfig
692"""
693    proxy_keyserver = f"--keyserver-options http-proxy={http_proxy}"
694
695    proxy_args.extend(
696        [
697            "--build-arg",
698            f"http_proxy={http_proxy}",
699            "--build-arg",
700            f"https_proxy={http_proxy}",
701        ]
702    )
703
704# Create base Dockerfile.
705dockerfile_base = f"""
706FROM {docker_base}{distro}
707
708{mirror}
709
710ENV DEBIAN_FRONTEND noninteractive
711
712ENV PYTHONPATH "/usr/local/lib/python3.8/site-packages/"
713
714# Sometimes the ubuntu key expires and we need a way to force an execution
715# of the apt-get commands for the dbgsym-keyring.  When this happens we see
716# an error like: "Release: The following signatures were invalid:"
717# Insert a bogus echo that we can change here when we get this error to force
718# the update.
719RUN echo "ubuntu keyserver rev as of 2021-04-21"
720
721# We need the keys to be imported for dbgsym repos
722# New releases have a package, older ones fall back to manual fetching
723# https://wiki.ubuntu.com/Debug%20Symbol%20Packages
724RUN apt-get update && apt-get dist-upgrade -yy && \
725    ( apt-get install gpgv ubuntu-dbgsym-keyring || \
726        ( apt-get install -yy dirmngr && \
727          apt-key adv --keyserver keyserver.ubuntu.com \
728                      {proxy_keyserver} \
729                      --recv-keys F2EDC64DC5AEE1F6B9C621F0C8CAB6595FDFF622 ) )
730
731# Parse the current repo list into a debug repo list
732RUN sed -n '/^deb /s,^deb [^ ]* ,deb http://ddebs.ubuntu.com ,p' /etc/apt/sources.list >/etc/apt/sources.list.d/debug.list
733
734# Remove non-existent debug repos
735RUN sed -i '/-\(backports\|security\) /d' /etc/apt/sources.list.d/debug.list
736
737RUN cat /etc/apt/sources.list.d/debug.list
738
739RUN apt-get update && apt-get dist-upgrade -yy && apt-get install -yy \
740    gcc-11 \
741    g++-11 \
742    libc6-dbg \
743    libc6-dev \
744    libtool \
745    bison \
746    libdbus-1-dev \
747    flex \
748    cmake \
749    python3 \
750    python3-dev\
751    python3-yaml \
752    python3-mako \
753    python3-pip \
754    python3-setuptools \
755    python3-git \
756    python3-socks \
757    pkg-config \
758    autoconf \
759    autoconf-archive \
760    libsystemd-dev \
761    systemd \
762    libssl-dev \
763    libevdev-dev \
764    libjpeg-dev \
765    libpng-dev \
766    ninja-build \
767    sudo \
768    curl \
769    git \
770    dbus \
771    iputils-ping \
772    clang-12 \
773    clang-format-12 \
774    clang-tidy-12 \
775    clang-tools-12 \
776    shellcheck \
777    npm \
778    iproute2 \
779    libnl-3-dev \
780    libnl-genl-3-dev \
781    libconfig++-dev \
782    libsnmp-dev \
783    valgrind \
784    valgrind-dbg \
785    libpam0g-dev \
786    xxd \
787    libi2c-dev \
788    wget \
789    libldap2-dev \
790    libprotobuf-dev \
791    liburing-dev \
792    liburing1-dbgsym \
793    libperlio-gzip-perl \
794    libjson-perl \
795    protobuf-compiler \
796    libgpiod-dev \
797    device-tree-compiler \
798    cppcheck \
799    libpciaccess-dev \
800    libmimetic-dev \
801    libxml2-utils \
802    libxml-simple-perl \
803    rsync
804
805RUN update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-11 11 \
806  --slave /usr/bin/g++ g++ /usr/bin/g++-11 \
807  --slave /usr/bin/gcov gcov /usr/bin/gcov-11 \
808  --slave /usr/bin/gcov-dump gcov-dump /usr/bin/gcov-dump-11 \
809  --slave /usr/bin/gcov-tool gcov-tool /usr/bin/gcov-tool-11
810
811RUN update-alternatives --install /usr/bin/clang clang /usr/bin/clang-12 1000 \
812  --slave /usr/bin/clang++ clang++ /usr/bin/clang++-12 \
813  --slave /usr/bin/clang-tidy clang-tidy /usr/bin/clang-tidy-12 \
814  --slave /usr/bin/clang-format clang-format /usr/bin/clang-format-12 \
815  --slave /usr/bin/run-clang-tidy.py run-clang-tidy.py /usr/bin/run-clang-tidy-12.py \
816  --slave /usr/bin/scan-build scan-build /usr/bin/scan-build-12
817
818"""
819
820if is_automated_ci_build:
821    dockerfile_base += f"""
822# Run an arbitrary command to polute the docker cache regularly force us
823# to re-run `apt-get update` daily.
824RUN echo {Docker.timestamp()}
825RUN apt-get update && apt-get dist-upgrade -yy
826
827"""
828
829dockerfile_base += f"""
830RUN pip3 install inflection
831RUN pip3 install pycodestyle
832RUN pip3 install jsonschema
833RUN pip3 install meson==0.58.1
834RUN pip3 install protobuf
835RUN pip3 install codespell
836"""
837
838# Build the base and stage docker images.
839docker_base_img_name = Docker.tagname("base", dockerfile_base)
840Docker.build("base", docker_base_img_name, dockerfile_base)
841Package.generate_all()
842
843# Create the final Dockerfile.
844dockerfile = f"""
845# Build the final output image
846FROM {docker_base_img_name}
847{Package.df_all_copycmds()}
848
849# Some of our infrastructure still relies on the presence of this file
850# even though it is no longer needed to rebuild the docker environment
851# NOTE: The file is sorted to ensure the ordering is stable.
852RUN echo '{Package.depcache()}' > /tmp/depcache
853
854# Final configuration for the workspace
855RUN grep -q {gid} /etc/group || groupadd -g {gid} {username}
856RUN mkdir -p "{os.path.dirname(homedir)}"
857RUN grep -q {uid} /etc/passwd || useradd -d {homedir} -m -u {uid} -g {gid} {username}
858RUN sed -i '1iDefaults umask=000' /etc/sudoers
859RUN echo "{username} ALL=(ALL) NOPASSWD: ALL" >>/etc/sudoers
860
861# Ensure user has ability to write to /usr/local for different tool
862# and data installs
863RUN chown -R {username}:{username} /usr/local/share
864
865{proxy_cmd}
866
867RUN /bin/bash
868"""
869
870# Do the final docker build
871docker_final_img_name = Docker.tagname(None, dockerfile)
872Docker.build("final", docker_final_img_name, dockerfile)
873
874# Print the tag of the final image.
875print(docker_final_img_name)
876