1#!/usr/bin/env python3 2# 3# Build the required docker image to run package unit tests 4# 5# Script Variables: 6# DOCKER_IMG_NAME: <optional, the name of the docker image to generate> 7# default is openbmc/ubuntu-unit-test 8# DISTRO: <optional, the distro to build a docker image against> 9# default is ubuntu:hirsute 10# FORCE_DOCKER_BUILD: <optional, a non-zero value with force all Docker 11# images to be rebuilt rather than reusing caches.> 12# BUILD_URL: <optional, used to detect running under CI context 13# (ex. Jenkins)> 14# BRANCH: <optional, branch to build from each of the openbmc/ 15# repositories> 16# default is master, which will be used if input branch not 17# provided or not found 18# UBUNTU_MIRROR: <optional, the URL of a mirror of Ubuntu to override the 19# default ones in /etc/apt/sources.list> 20# default is empty, and no mirror is used. 21# http_proxy The HTTP address of the proxy server to connect to. 22# Default: "", proxy is not setup if this is not set 23 24import os 25import sys 26import threading 27from datetime import date 28from hashlib import sha256 29from sh import docker, git, nproc, uname # type: ignore 30from typing import Any, Callable, Dict, Iterable, Optional 31 32try: 33 # Python before 3.8 doesn't have TypedDict, so reroute to standard 'dict'. 34 from typing import TypedDict 35except: 36 37 class TypedDict(dict): # type: ignore 38 # We need to do this to eat the 'total' argument. 39 def __init_subclass__(cls, **kwargs): 40 super().__init_subclass__() 41 42 43# Declare some variables used in package definitions. 44prefix = "/usr/local" 45proc_count = nproc().strip() 46 47 48class PackageDef(TypedDict, total=False): 49 """ Package Definition for packages dictionary. """ 50 51 # rev [optional]: Revision of package to use. 52 rev: str 53 # url [optional]: lambda function to create URL: (package, rev) -> url. 54 url: Callable[[str, str], str] 55 # depends [optional]: List of package dependencies. 56 depends: Iterable[str] 57 # build_type [required]: Build type used for package. 58 # Currently supported: autoconf, cmake, custom, make, meson 59 build_type: str 60 # build_steps [optional]: Steps to run for 'custom' build_type. 61 build_steps: Iterable[str] 62 # config_flags [optional]: List of options to pass configuration tool. 63 config_flags: Iterable[str] 64 # config_env [optional]: List of environment variables to set for config. 65 config_env: Iterable[str] 66 # custom_post_dl [optional]: List of steps to run after download, but 67 # before config / build / install. 68 custom_post_dl: Iterable[str] 69 # custom_post_install [optional]: List of steps to run after install. 70 custom_post_install: Iterable[str] 71 72 # __tag [private]: Generated Docker tag name for package stage. 73 __tag: str 74 # __package [private]: Package object associated with this package. 75 __package: Any # Type is Package, but not defined yet. 76 77 78# Packages to include in image. 79packages = { 80 "boost": PackageDef( 81 rev="1.75.0", 82 url=( 83 lambda pkg, rev: f"https://downloads.yoctoproject.org/mirror/sources/{pkg}_{rev.replace('.', '_')}.tar.bz2" 84 ), 85 build_type="custom", 86 build_steps=[ 87 f"./bootstrap.sh --prefix={prefix} --with-libraries=context,coroutine", 88 "./b2", 89 f"./b2 install --prefix={prefix}", 90 ], 91 ), 92 "USCiLab/cereal": PackageDef( 93 rev="3e4d1b84cab4891368d2179a61a7ba06a5693e7f", 94 build_type="custom", 95 build_steps=[f"cp -a include/cereal/ {prefix}/include/"], 96 ), 97 "catchorg/Catch2": PackageDef( 98 rev="v2.13.6", 99 build_type="cmake", 100 config_flags=["-DBUILD_TESTING=OFF", "-DCATCH_INSTALL_DOCS=OFF"], 101 ), 102 "CLIUtils/CLI11": PackageDef( 103 rev="v1.9.1", 104 build_type="cmake", 105 config_flags=[ 106 "-DBUILD_TESTING=OFF", 107 "-DCLI11_BUILD_DOCS=OFF", 108 "-DCLI11_BUILD_EXAMPLES=OFF", 109 ], 110 ), 111 "fmtlib/fmt": PackageDef( 112 rev="7.1.3", 113 build_type="cmake", 114 config_flags=[ 115 "-DFMT_DOC=OFF", 116 "-DFMT_TEST=OFF", 117 ], 118 ), 119 "Naios/function2": PackageDef( 120 rev="4.1.0", 121 build_type="custom", 122 build_steps=[ 123 f"mkdir {prefix}/include/function2", 124 f"cp include/function2/function2.hpp {prefix}/include/function2/", 125 ], 126 ), 127 "google/googletest": PackageDef( 128 rev="release-1.10.0", 129 build_type="cmake", 130 config_env=["CXXFLAGS=-std=c++17"], 131 config_flags=["-DTHREADS_PREFER_PTHREAD_FLAG=ON"], 132 ), 133 # Release 2020-08-06 134 "nlohmann/json": PackageDef( 135 rev="v3.9.1", 136 build_type="cmake", 137 config_flags=["-DJSON_BuildTests=OFF"], 138 custom_post_install=[ 139 f"ln -s {prefix}/include/nlohmann/json.hpp {prefix}/include/json.hpp", 140 ], 141 ), 142 # Snapshot from 2019-05-24 143 "linux-test-project/lcov": PackageDef( 144 rev="v1.15", 145 build_type="make", 146 ), 147 # dev-5.8 2021-01-11 148 "openbmc/linux": PackageDef( 149 rev="3cc95ae40716e56f81b69615781f54c78079042d", 150 build_type="custom", 151 build_steps=[ 152 f"make -j{proc_count} defconfig", 153 f"make INSTALL_HDR_PATH={prefix} headers_install", 154 ], 155 ), 156 # Snapshot from 2020-06-13 157 "LibVNC/libvncserver": PackageDef( 158 rev="LibVNCServer-0.9.13", 159 build_type="cmake", 160 ), 161 "martinmoene/span-lite": PackageDef( 162 rev="v0.9.2", 163 build_type="cmake", 164 config_flags=[ 165 "-DSPAN_LITE_OPT_BUILD_TESTS=OFF", 166 ], 167 ), 168 # version from meta-openembedded/meta-oe/recipes-support/libtinyxml2/libtinyxml2_8.0.0.bb 169 "leethomason/tinyxml2": PackageDef( 170 rev="8.0.0", 171 build_type="cmake", 172 ), 173 # version from /meta-openembedded/meta-oe/recipes-devtools/boost-url/boost-url_git.bb 174 "CPPAlliance/url": PackageDef( 175 rev="a56ae0df6d3078319755fbaa67822b4fa7fd352b", 176 build_type="cmake", 177 config_flags=[ 178 "-DBOOST_URL_BUILD_EXAMPLES=OFF", 179 "-DBOOST_URL_BUILD_TESTS=OFF", 180 "-DBOOST_URL_STANDALONE=ON", 181 ], 182 ), 183 # version from ./meta-openembedded/meta-oe/dynamic-layers/networking-layer/recipes-devtools/valijson/valijson_0.3.bb 184 # Snapshot from 2020-12-02 - fix for curlpp dependency 185 "tristanpenman/valijson": PackageDef( 186 rev="8cc83c8be9c1c927f5da952b2333b30e5f0353be", 187 build_type="cmake", 188 config_flags=[ 189 "-Dvalijson_BUILD_TESTS=0", 190 "-Dvalijson_INSTALL_HEADERS=1", 191 ], 192 ), 193 # version from meta-openembedded/meta-oe/recipes-devtools/nlohmann-fifo/nlohmann-fifo_git.bb 194 "nlohmann/fifo_map": PackageDef( 195 rev="0dfbf5dacbb15a32c43f912a7e66a54aae39d0f9", 196 build_type="custom", 197 build_steps=[f"cp src/fifo_map.hpp {prefix}/include/"], 198 ), 199 "open-power/pdbg": PackageDef(build_type="autoconf"), 200 "openbmc/gpioplus": PackageDef( 201 depends=["openbmc/stdplus"], 202 build_type="meson", 203 config_flags=[ 204 "-Dexamples=false", 205 "-Dtests=disabled", 206 ], 207 ), 208 "openbmc/phosphor-dbus-interfaces": PackageDef( 209 depends=["openbmc/sdbusplus"], 210 build_type="meson", 211 config_flags=[ 212 "-Ddata_com_ibm=true", 213 "-Ddata_org_open_power=true", 214 ], 215 ), 216 "openbmc/phosphor-logging": PackageDef( 217 depends=[ 218 "USCiLab/cereal", 219 "nlohmann/fifo_map", 220 "openbmc/phosphor-dbus-interfaces", 221 "openbmc/sdbusplus", 222 "openbmc/sdeventplus", 223 ], 224 build_type="meson", 225 config_flags=[ 226 f"-Dyaml_dir={prefix}/share/phosphor-dbus-yaml/yaml", 227 ], 228 ), 229 "openbmc/phosphor-objmgr": PackageDef( 230 depends=[ 231 "boost", 232 "leethomason/tinyxml2", 233 "openbmc/phosphor-logging", 234 "openbmc/sdbusplus", 235 ], 236 build_type="autoconf", 237 ), 238 "openbmc/pldm": PackageDef( 239 depends=[ 240 "CLIUtils/CLI11", 241 "boost", 242 "nlohmann/json", 243 "openbmc/phosphor-dbus-interfaces", 244 "openbmc/phosphor-logging", 245 "openbmc/sdbusplus", 246 "openbmc/sdeventplus", 247 ], 248 build_type="meson", 249 config_flags=[ 250 "-Dlibpldm-only=enabled", 251 "-Doem-ibm=enabled", 252 "-Dtests=disabled", 253 ], 254 ), 255 "openbmc/sdbusplus": PackageDef( 256 build_type="meson", 257 custom_post_dl=[ 258 "cd tools", 259 f"./setup.py install --root=/ --prefix={prefix}", 260 "cd ..", 261 ], 262 config_flags=[ 263 "-Dexamples=disabled", 264 "-Dtests=disabled", 265 ], 266 ), 267 "openbmc/sdeventplus": PackageDef( 268 depends=["Naios/function2", "openbmc/stdplus"], 269 build_type="meson", 270 config_flags=[ 271 "-Dexamples=false", 272 "-Dtests=disabled", 273 ], 274 ), 275 "openbmc/stdplus": PackageDef( 276 depends=["fmtlib/fmt", "martinmoene/span-lite"], 277 build_type="meson", 278 config_flags=[ 279 "-Dexamples=false", 280 "-Dtests=disabled", 281 ], 282 ), 283} # type: Dict[str, PackageDef] 284 285# Define common flags used for builds 286configure_flags = " ".join( 287 [ 288 f"--prefix={prefix}", 289 ] 290) 291cmake_flags = " ".join( 292 [ 293 "-DBUILD_SHARED_LIBS=ON", 294 "-DCMAKE_BUILD_TYPE=RelWithDebInfo", 295 f"-DCMAKE_INSTALL_PREFIX:PATH={prefix}", 296 "-GNinja", 297 "-DCMAKE_MAKE_PROGRAM=ninja", 298 ] 299) 300meson_flags = " ".join( 301 [ 302 "--wrap-mode=nodownload", 303 f"-Dprefix={prefix}", 304 ] 305) 306 307 308class Package(threading.Thread): 309 """Class used to build the Docker stages for each package. 310 311 Generally, this class should not be instantiated directly but through 312 Package.generate_all(). 313 """ 314 315 # Copy the packages dictionary. 316 packages = packages.copy() 317 318 # Lock used for thread-safety. 319 lock = threading.Lock() 320 321 def __init__(self, pkg: str): 322 """ pkg - The name of this package (ex. foo/bar ) """ 323 super(Package, self).__init__() 324 325 self.package = pkg 326 self.exception = None # type: Optional[Exception] 327 328 # Reference to this package's 329 self.pkg_def = Package.packages[pkg] 330 self.pkg_def["__package"] = self 331 332 def run(self) -> None: 333 """ Thread 'run' function. Builds the Docker stage. """ 334 335 # In case this package has no rev, fetch it from Github. 336 self._update_rev() 337 338 # Find all the Package objects that this package depends on. 339 # This section is locked because we are looking into another 340 # package's PackageDef dict, which could be being modified. 341 Package.lock.acquire() 342 deps: Iterable[Package] = [ 343 Package.packages[deppkg]["__package"] 344 for deppkg in self.pkg_def.get("depends", []) 345 ] 346 Package.lock.release() 347 348 # Wait until all the depends finish building. We need them complete 349 # for the "COPY" commands. 350 for deppkg in deps: 351 deppkg.join() 352 353 # Generate this package's Dockerfile. 354 dockerfile = f""" 355FROM {docker_base_img_name} 356{self._df_copycmds()} 357{self._df_build()} 358""" 359 360 # Generate the resulting tag name and save it to the PackageDef. 361 # This section is locked because we are modifying the PackageDef, 362 # which can be accessed by other threads. 363 Package.lock.acquire() 364 tag = Docker.tagname(self._stagename(), dockerfile) 365 self.pkg_def["__tag"] = tag 366 Package.lock.release() 367 368 # Do the build / save any exceptions. 369 try: 370 Docker.build(self.package, tag, dockerfile) 371 except Exception as e: 372 self.exception = e 373 374 @classmethod 375 def generate_all(cls) -> None: 376 """Ensure a Docker stage is created for all defined packages. 377 378 These are done in parallel but with appropriate blocking per 379 package 'depends' specifications. 380 """ 381 382 # Create a Package for each defined package. 383 pkg_threads = [Package(p) for p in cls.packages.keys()] 384 385 # Start building them all. 386 # This section is locked because threads depend on each other, 387 # based on the packages, and they cannot 'join' on a thread 388 # which is not yet started. Adding a lock here allows all the 389 # threads to start before they 'join' their dependencies. 390 Package.lock.acquire() 391 for t in pkg_threads: 392 t.start() 393 Package.lock.release() 394 395 # Wait for completion. 396 for t in pkg_threads: 397 t.join() 398 # Check if the thread saved off its own exception. 399 if t.exception: 400 print(f"Package {t.package} failed!", file=sys.stderr) 401 raise t.exception 402 403 @staticmethod 404 def df_all_copycmds() -> str: 405 """Formulate the Dockerfile snippet necessary to copy all packages 406 into the final image. 407 """ 408 return Package.df_copycmds_set(Package.packages.keys()) 409 410 @classmethod 411 def depcache(cls) -> str: 412 """Create the contents of the '/tmp/depcache'. 413 This file is a comma-separated list of "<pkg>:<rev>". 414 """ 415 416 # This needs to be sorted for consistency. 417 depcache = "" 418 for pkg in sorted(cls.packages.keys()): 419 depcache += "%s:%s," % (pkg, cls.packages[pkg]["rev"]) 420 return depcache 421 422 def _update_rev(self) -> None: 423 """ Look up the HEAD for missing a static rev. """ 424 425 if "rev" in self.pkg_def: 426 return 427 428 # Check if Jenkins/Gerrit gave us a revision and use it. 429 if gerrit_project == self.package and gerrit_rev: 430 print( 431 f"Found Gerrit revision for {self.package}: {gerrit_rev}", 432 file=sys.stderr, 433 ) 434 self.pkg_def["rev"] = gerrit_rev 435 return 436 437 # Ask Github for all the branches. 438 lookup = git("ls-remote", "--heads", f"https://github.com/{self.package}") 439 440 # Find the branch matching {branch} (or fallback to master). 441 # This section is locked because we are modifying the PackageDef. 442 Package.lock.acquire() 443 for line in lookup.split("\n"): 444 if f"refs/heads/{branch}" in line: 445 self.pkg_def["rev"] = line.split()[0] 446 elif f"refs/heads/master" in line and "rev" not in self.pkg_def: 447 self.pkg_def["rev"] = line.split()[0] 448 Package.lock.release() 449 450 def _stagename(self) -> str: 451 """ Create a name for the Docker stage associated with this pkg. """ 452 return self.package.replace("/", "-").lower() 453 454 def _url(self) -> str: 455 """ Get the URL for this package. """ 456 rev = self.pkg_def["rev"] 457 458 # If the lambda exists, call it. 459 if "url" in self.pkg_def: 460 return self.pkg_def["url"](self.package, rev) 461 462 # Default to the github archive URL. 463 return f"https://github.com/{self.package}/archive/{rev}.tar.gz" 464 465 def _cmd_download(self) -> str: 466 """Formulate the command necessary to download and unpack to source.""" 467 468 url = self._url() 469 if ".tar." not in url: 470 raise NotImplementedError( 471 f"Unhandled download type for {self.package}: {url}" 472 ) 473 474 cmd = f"curl -L {url} | tar -x" 475 476 if url.endswith(".bz2"): 477 cmd += "j" 478 elif url.endswith(".gz"): 479 cmd += "z" 480 else: 481 raise NotImplementedError( 482 f"Unknown tar flags needed for {self.package}: {url}" 483 ) 484 485 return cmd 486 487 def _cmd_cd_srcdir(self) -> str: 488 """ Formulate the command necessary to 'cd' into the source dir. """ 489 return f"cd {self.package.split('/')[-1]}*" 490 491 def _df_copycmds(self) -> str: 492 """ Formulate the dockerfile snippet necessary to COPY all depends. """ 493 494 if "depends" not in self.pkg_def: 495 return "" 496 return Package.df_copycmds_set(self.pkg_def["depends"]) 497 498 @staticmethod 499 def df_copycmds_set(pkgs: Iterable[str]) -> str: 500 """Formulate the Dockerfile snippet necessary to COPY a set of 501 packages into a Docker stage. 502 """ 503 504 copy_cmds = "" 505 506 # Sort the packages for consistency. 507 for p in sorted(pkgs): 508 tag = Package.packages[p]["__tag"] 509 copy_cmds += f"COPY --from={tag} {prefix} {prefix}\n" 510 # Workaround for upstream docker bug and multiple COPY cmds 511 # https://github.com/moby/moby/issues/37965 512 copy_cmds += "RUN true\n" 513 514 return copy_cmds 515 516 def _df_build(self) -> str: 517 """Formulate the Dockerfile snippet necessary to download, build, and 518 install a package into a Docker stage. 519 """ 520 521 # Download and extract source. 522 result = f"RUN {self._cmd_download()} && {self._cmd_cd_srcdir()} && " 523 524 # Handle 'custom_post_dl' commands. 525 custom_post_dl = self.pkg_def.get("custom_post_dl") 526 if custom_post_dl: 527 result += " && ".join(custom_post_dl) + " && " 528 529 # Build and install package based on 'build_type'. 530 build_type = self.pkg_def["build_type"] 531 if build_type == "autoconf": 532 result += self._cmd_build_autoconf() 533 elif build_type == "cmake": 534 result += self._cmd_build_cmake() 535 elif build_type == "custom": 536 result += self._cmd_build_custom() 537 elif build_type == "make": 538 result += self._cmd_build_make() 539 elif build_type == "meson": 540 result += self._cmd_build_meson() 541 else: 542 raise NotImplementedError( 543 f"Unhandled build type for {self.package}: {build_type}" 544 ) 545 546 # Handle 'custom_post_install' commands. 547 custom_post_install = self.pkg_def.get("custom_post_install") 548 if custom_post_install: 549 result += " && " + " && ".join(custom_post_install) 550 551 return result 552 553 def _cmd_build_autoconf(self) -> str: 554 options = " ".join(self.pkg_def.get("config_flags", [])) 555 env = " ".join(self.pkg_def.get("config_env", [])) 556 result = "./bootstrap.sh && " 557 result += f"{env} ./configure {configure_flags} {options} && " 558 result += f"make -j{proc_count} && make install" 559 return result 560 561 def _cmd_build_cmake(self) -> str: 562 options = " ".join(self.pkg_def.get("config_flags", [])) 563 env = " ".join(self.pkg_def.get("config_env", [])) 564 result = "mkdir builddir && cd builddir && " 565 result += f"{env} cmake {cmake_flags} {options} .. && " 566 result += "cmake --build . --target all && " 567 result += "cmake --build . --target install && " 568 result += "cd .." 569 return result 570 571 def _cmd_build_custom(self) -> str: 572 return " && ".join(self.pkg_def.get("build_steps", [])) 573 574 def _cmd_build_make(self) -> str: 575 return f"make -j{proc_count} && make install" 576 577 def _cmd_build_meson(self) -> str: 578 options = " ".join(self.pkg_def.get("config_flags", [])) 579 env = " ".join(self.pkg_def.get("config_env", [])) 580 result = f"{env} meson builddir {meson_flags} {options} && " 581 result += "ninja -C builddir && ninja -C builddir install" 582 return result 583 584 585class Docker: 586 """Class to assist with Docker interactions. All methods are static.""" 587 588 @staticmethod 589 def timestamp() -> str: 590 """ Generate a timestamp for today using the ISO week. """ 591 today = date.today().isocalendar() 592 return f"{today[0]}-W{today[1]:02}" 593 594 @staticmethod 595 def tagname(pkgname: str, dockerfile: str) -> str: 596 """ Generate a tag name for a package using a hash of the Dockerfile. """ 597 result = docker_image_name 598 if pkgname: 599 result += "-" + pkgname 600 601 result += ":" + Docker.timestamp() 602 result += "-" + sha256(dockerfile.encode()).hexdigest()[0:16] 603 604 return result 605 606 @staticmethod 607 def build(pkg: str, tag: str, dockerfile: str) -> None: 608 """Build a docker image using the Dockerfile and tagging it with 'tag'.""" 609 610 # If we're not forcing builds, check if it already exists and skip. 611 if not force_build: 612 if docker.image.ls(tag, "--format", '"{{.Repository}}:{{.Tag}}"'): 613 print(f"Image {tag} already exists. Skipping.", file=sys.stderr) 614 return 615 616 # Build it. 617 # Capture the output of the 'docker build' command and send it to 618 # stderr (prefixed with the package name). This allows us to see 619 # progress but not polute stdout. Later on we output the final 620 # docker tag to stdout and we want to keep that pristine. 621 # 622 # Other unusual flags: 623 # --no-cache: Bypass the Docker cache if 'force_build'. 624 # --force-rm: Clean up Docker processes if they fail. 625 docker.build( 626 proxy_args, 627 "--network=host", 628 "--force-rm", 629 "--no-cache=true" if force_build else "--no-cache=false", 630 "-t", 631 tag, 632 "-", 633 _in=dockerfile, 634 _out=( 635 lambda line: print( 636 pkg + ":", line, end="", file=sys.stderr, flush=True 637 ) 638 ), 639 ) 640 641 642# Read a bunch of environment variables. 643docker_image_name = os.environ.get("DOCKER_IMAGE_NAME", "openbmc/ubuntu-unit-test") 644force_build = os.environ.get("FORCE_DOCKER_BUILD") 645is_automated_ci_build = os.environ.get("BUILD_URL", False) 646distro = os.environ.get("DISTRO", "ubuntu:hirsute") 647branch = os.environ.get("BRANCH", "master") 648ubuntu_mirror = os.environ.get("UBUNTU_MIRROR") 649http_proxy = os.environ.get("http_proxy") 650 651gerrit_project = os.environ.get("GERRIT_PROJECT") 652gerrit_rev = os.environ.get("GERRIT_PATCHSET_REVISION") 653 654# Set up some common variables. 655username = os.environ.get("USER", "root") 656homedir = os.environ.get("HOME", "/root") 657gid = os.getgid() 658uid = os.getuid() 659 660# Determine the architecture for Docker. 661arch = uname("-m").strip() 662if arch == "ppc64le": 663 docker_base = "ppc64le/" 664elif arch == "x86_64": 665 docker_base = "" 666else: 667 print( 668 f"Unsupported system architecture({arch}) found for docker image", 669 file=sys.stderr, 670 ) 671 sys.exit(1) 672 673# Special flags if setting up a deb mirror. 674mirror = "" 675if "ubuntu" in distro and ubuntu_mirror: 676 mirror = f""" 677RUN echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME) main restricted universe multiverse" > /etc/apt/sources.list && \\ 678 echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-updates main restricted universe multiverse" >> /etc/apt/sources.list && \\ 679 echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-security main restricted universe multiverse" >> /etc/apt/sources.list && \\ 680 echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-proposed main restricted universe multiverse" >> /etc/apt/sources.list && \\ 681 echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-backports main restricted universe multiverse" >> /etc/apt/sources.list 682""" 683 684# Special flags for proxying. 685proxy_cmd = "" 686proxy_args = [] 687if http_proxy: 688 proxy_cmd = f""" 689RUN echo "[http]" >> {homedir}/.gitconfig && \ 690 echo "proxy = {http_proxy}" >> {homedir}/.gitconfig 691""" 692 proxy_args.extend( 693 [ 694 "--build-arg", 695 f"http_proxy={http_proxy}", 696 "--build-arg", 697 f"https_proxy={http_proxy}", 698 ] 699 ) 700 701# Create base Dockerfile. 702dockerfile_base = f""" 703FROM {docker_base}{distro} 704 705{mirror} 706 707ENV DEBIAN_FRONTEND noninteractive 708 709ENV PYTHONPATH "/usr/local/lib/python3.8/site-packages/" 710 711# Sometimes the ubuntu key expires and we need a way to force an execution 712# of the apt-get commands for the dbgsym-keyring. When this happens we see 713# an error like: "Release: The following signatures were invalid:" 714# Insert a bogus echo that we can change here when we get this error to force 715# the update. 716RUN echo "ubuntu keyserver rev as of 2021-04-21" 717 718# We need the keys to be imported for dbgsym repos 719# New releases have a package, older ones fall back to manual fetching 720# https://wiki.ubuntu.com/Debug%20Symbol%20Packages 721RUN apt-get update && apt-get dist-upgrade -yy && \ 722 ( apt-get install gpgv ubuntu-dbgsym-keyring || \ 723 ( apt-get install -yy dirmngr && \ 724 apt-key adv --keyserver keyserver.ubuntu.com \ 725 --recv-keys F2EDC64DC5AEE1F6B9C621F0C8CAB6595FDFF622 ) ) 726 727# Parse the current repo list into a debug repo list 728RUN sed -n '/^deb /s,^deb [^ ]* ,deb http://ddebs.ubuntu.com ,p' /etc/apt/sources.list >/etc/apt/sources.list.d/debug.list 729 730# Remove non-existent debug repos 731RUN sed -i '/-\(backports\|security\) /d' /etc/apt/sources.list.d/debug.list 732 733RUN cat /etc/apt/sources.list.d/debug.list 734 735RUN apt-get update && apt-get dist-upgrade -yy && apt-get install -yy \ 736 gcc-11 \ 737 g++-11 \ 738 libc6-dbg \ 739 libc6-dev \ 740 libtool \ 741 bison \ 742 libdbus-1-dev \ 743 flex \ 744 cmake \ 745 python3 \ 746 python3-dev\ 747 python3-yaml \ 748 python3-mako \ 749 python3-pip \ 750 python3-setuptools \ 751 python3-git \ 752 python3-socks \ 753 pkg-config \ 754 autoconf \ 755 autoconf-archive \ 756 libsystemd-dev \ 757 systemd \ 758 libssl-dev \ 759 libevdev-dev \ 760 libjpeg-dev \ 761 libpng-dev \ 762 ninja-build \ 763 sudo \ 764 curl \ 765 git \ 766 dbus \ 767 iputils-ping \ 768 clang-11 \ 769 clang-format-11 \ 770 clang-tidy-11 \ 771 clang-tools-11 \ 772 shellcheck \ 773 npm \ 774 iproute2 \ 775 libnl-3-dev \ 776 libnl-genl-3-dev \ 777 libconfig++-dev \ 778 libsnmp-dev \ 779 valgrind \ 780 valgrind-dbg \ 781 libpam0g-dev \ 782 xxd \ 783 libi2c-dev \ 784 wget \ 785 libldap2-dev \ 786 libprotobuf-dev \ 787 libperlio-gzip-perl \ 788 libjson-perl \ 789 protobuf-compiler \ 790 libgpiod-dev \ 791 device-tree-compiler \ 792 cppcheck \ 793 libpciaccess-dev \ 794 libmimetic-dev \ 795 libxml2-utils \ 796 libxml-simple-perl \ 797 rsync 798 799RUN update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-11 11 \ 800 --slave /usr/bin/g++ g++ /usr/bin/g++-11 \ 801 --slave /usr/bin/gcov gcov /usr/bin/gcov-11 \ 802 --slave /usr/bin/gcov-dump gcov-dump /usr/bin/gcov-dump-11 \ 803 --slave /usr/bin/gcov-tool gcov-tool /usr/bin/gcov-tool-11 804 805RUN update-alternatives --install /usr/bin/clang clang /usr/bin/clang-11 1000 \ 806 --slave /usr/bin/clang++ clang++ /usr/bin/clang++-11 \ 807 --slave /usr/bin/clang-tidy clang-tidy /usr/bin/clang-tidy-11 \ 808 --slave /usr/bin/clang-format clang-format /usr/bin/clang-format-11 \ 809 --slave /usr/bin/run-clang-tidy.py run-clang-tidy.py /usr/bin/run-clang-tidy-11.py \ 810 --slave /usr/bin/scan-build scan-build /usr/bin/scan-build-11 811 812""" 813 814if is_automated_ci_build: 815 dockerfile_base += f""" 816# Run an arbitrary command to polute the docker cache regularly force us 817# to re-run `apt-get update` daily. 818RUN echo {Docker.timestamp()} 819RUN apt-get update && apt-get dist-upgrade -yy 820 821""" 822 823dockerfile_base += f""" 824RUN pip3 install inflection 825RUN pip3 install pycodestyle 826RUN pip3 install jsonschema 827RUN pip3 install meson==0.58.0 828RUN pip3 install protobuf 829RUN pip3 install codespell 830""" 831 832# Build the base and stage docker images. 833docker_base_img_name = Docker.tagname("base", dockerfile_base) 834Docker.build("base", docker_base_img_name, dockerfile_base) 835Package.generate_all() 836 837# Create the final Dockerfile. 838dockerfile = f""" 839# Build the final output image 840FROM {docker_base_img_name} 841{Package.df_all_copycmds()} 842 843# Some of our infrastructure still relies on the presence of this file 844# even though it is no longer needed to rebuild the docker environment 845# NOTE: The file is sorted to ensure the ordering is stable. 846RUN echo '{Package.depcache()}' > /tmp/depcache 847 848# Final configuration for the workspace 849RUN grep -q {gid} /etc/group || groupadd -g {gid} {username} 850RUN mkdir -p "{os.path.dirname(homedir)}" 851RUN grep -q {uid} /etc/passwd || useradd -d {homedir} -m -u {uid} -g {gid} {username} 852RUN sed -i '1iDefaults umask=000' /etc/sudoers 853RUN echo "{username} ALL=(ALL) NOPASSWD: ALL" >>/etc/sudoers 854 855# Ensure user has ability to write to /usr/local for different tool 856# and data installs 857RUN chown -R {username}:{username} /usr/local/share 858 859{proxy_cmd} 860 861RUN /bin/bash 862""" 863 864# Do the final docker build 865docker_final_img_name = Docker.tagname(None, dockerfile) 866Docker.build("final", docker_final_img_name, dockerfile) 867 868# Print the tag of the final image. 869print(docker_final_img_name) 870