1#!/usr/bin/env python3 2# 3# Build the required docker image to run package unit tests 4# 5# Script Variables: 6# DOCKER_IMG_NAME: <optional, the name of the docker image to generate> 7# default is openbmc/ubuntu-unit-test 8# DISTRO: <optional, the distro to build a docker image against> 9# default is ubuntu:focal 10# FORCE_DOCKER_BUILD: <optional, a non-zero value with force all Docker 11# images to be rebuilt rather than reusing caches.> 12# BUILD_URL: <optional, used to detect running under CI context 13# (ex. Jenkins)> 14# BRANCH: <optional, branch to build from each of the openbmc/ 15# repositories> 16# default is master, which will be used if input branch not 17# provided or not found 18# UBUNTU_MIRROR: <optional, the URL of a mirror of Ubuntu to override the 19# default ones in /etc/apt/sources.list> 20# default is empty, and no mirror is used. 21# http_proxy The HTTP address of the proxy server to connect to. 22# Default: "", proxy is not setup if this is not set 23 24import os 25import sys 26import threading 27from datetime import date 28from hashlib import sha256 29from sh import docker, git, nproc, uname 30 31# Read a bunch of environment variables. 32docker_image_name = os.environ.get("DOCKER_IMAGE_NAME", "openbmc/ubuntu-unit-test") 33force_build = os.environ.get("FORCE_DOCKER_BUILD") 34is_automated_ci_build = os.environ.get("BUILD_URL", False) 35distro = os.environ.get("DISTRO", "ubuntu:focal") 36branch = os.environ.get("BRANCH", "master") 37ubuntu_mirror = os.environ.get("UBUNTU_MIRROR") 38http_proxy = os.environ.get("http_proxy") 39prefix = "/usr/local" 40 41# Set up some common variables. 42proc_count = nproc().strip() 43username = os.environ.get("USER") 44homedir = os.environ.get("HOME") 45gid = os.getgid() 46uid = os.getuid() 47 48# Determine the architecture for Docker. 49arch = uname("-m").strip() 50if arch == "ppc64le": 51 docker_base = "ppc64le/" 52elif arch == "x86_64": 53 docker_base = "" 54else: 55 print( 56 f"Unsupported system architecture({arch}) found for docker image", 57 file=sys.stderr, 58 ) 59 sys.exit(1) 60 61# Packages to include in image. 62packages = { 63 "boost": { 64 "rev": "1.74.0", 65 "url": ( 66 lambda pkg, rev: f"https://dl.bintray.com/boostorg/release/{rev}/source/{pkg}_{rev.replace('.', '_')}.tar.bz2" 67 ), 68 "build_type": "custom", 69 "build_steps": [ 70 f"./bootstrap.sh --prefix={prefix} --with-libraries=context,coroutine", 71 "./b2", 72 f"./b2 install --prefix={prefix}", 73 ], 74 }, 75 "USCiLab/cereal": { 76 "rev": "v1.3.0", 77 "build_type": "custom", 78 "build_steps": [f"cp -a include/cereal/ {prefix}/include/"], 79 }, 80 "catchorg/Catch2": { 81 "rev": "v2.12.2", 82 "build_type": "cmake", 83 "config_flags": ["-DBUILD_TESTING=OFF", "-DCATCH_INSTALL_DOCS=OFF"], 84 }, 85 "CLIUtils/CLI11": { 86 "rev": "v1.9.0", 87 "build_type": "cmake", 88 "config_flags": [ 89 "-DBUILD_TESTING=OFF", 90 "-DCLI11_BUILD_DOCS=OFF", 91 "-DCLI11_BUILD_EXAMPLES=OFF", 92 ], 93 }, 94 "fmtlib/fmt": { 95 "rev": "6.2.1", 96 "build_type": "cmake", 97 "config_flags": [ 98 "-DFMT_DOC=OFF", 99 "-DFMT_TEST=OFF", 100 ], 101 }, 102 # Snapshot from 2020-01-03 103 "Naios/function2": { 104 "rev": "3a0746bf5f601dfed05330aefcb6854354fce07d", 105 "build_type": "custom", 106 "build_steps": [ 107 f"mkdir {prefix}/include/function2", 108 f"cp include/function2/function2.hpp {prefix}/include/function2/", 109 ], 110 }, 111 # Snapshot from 2020-02-13 112 "google/googletest": { 113 "rev": "23b2a3b1cf803999fb38175f6e9e038a4495c8a5", 114 "build_type": "cmake", 115 "config_env": ["CXXFLAGS=-std=c++17"], 116 "config_flags": ["-DTHREADS_PREFER_PTHREAD_FLAG=ON"], 117 }, 118 # Release 2020-08-06 119 "nlohmann/json": { 120 "rev": "v3.9.1", 121 "build_type": "custom", 122 "build_steps": [ 123 f"mkdir {prefix}/include/nlohmann", 124 f"cp include/nlohmann/json.hpp {prefix}/include/nlohmann", 125 f"ln -s {prefix}/include/nlohmann/json.hpp {prefix}/include/json.hpp", 126 ], 127 }, 128 # Snapshot from 2019-05-24 129 "linux-test-project/lcov": { 130 "rev": "75fbae1cfc5027f818a0bb865bf6f96fab3202da", 131 "build_type": "make", 132 }, 133 # dev-5.0 2019-05-03 134 "openbmc/linux": { 135 "rev": "8bf6567e77f7aa68975b7c9c6d044bba690bf327", 136 "build_type": "custom", 137 "build_steps": [ 138 f"make -j{proc_count} defconfig", 139 f"make INSTALL_HDR_PATH={prefix} headers_install", 140 ], 141 }, 142 # Snapshot from 2019-09-03 143 "LibVNC/libvncserver": { 144 "rev": "1354f7f1bb6962dab209eddb9d6aac1f03408110", 145 "build_type": "cmake", 146 }, 147 "martinmoene/span-lite": { 148 "rev": "v0.7.0", 149 "build_type": "cmake", 150 "config_flags": [ 151 "-DSPAN_LITE_OPT_BUILD_TESTS=OFF", 152 ], 153 }, 154 # version from meta-openembedded/meta-oe/recipes-support/libtinyxml2/libtinyxml2_5.0.1.bb 155 "leethomason/tinyxml2": { 156 "rev": "37bc3aca429f0164adf68c23444540b4a24b5778", 157 "build_type": "cmake", 158 }, 159 # version from /meta-openembedded/meta-oe/recipes-devtools/boost-url/boost-url_git.bb 160 "CPPAlliance/url": { 161 "rev": "a56ae0df6d3078319755fbaa67822b4fa7fd352b", 162 "build_type": "cmake", 163 "config_flags": [ 164 "-DBOOST_URL_BUILD_EXAMPLES=OFF", 165 "-DBOOST_URL_BUILD_TESTS=OFF", 166 "-DBOOST_URL_STANDALONE=ON", 167 ], 168 }, 169 # version from meta-openembedded/meta-oe/recipes-devtools/valijson/valijson_git.bb 170 "tristanpenman/valijson": { 171 "rev": "c2f22fddf599d04dc33fcd7ed257c698a05345d9", 172 "build_type": "cmake", 173 "config_flags": [ 174 "-DBUILD_TESTS=0", 175 "-DINSTALL_HEADERS=1", 176 ], 177 }, 178 # version from meta-openembedded/meta-oe/recipes-devtools/nlohmann-fifo/nlohmann-fifo_git.bb 179 "nlohmann/fifo_map": { 180 "rev": "0dfbf5dacbb15a32c43f912a7e66a54aae39d0f9", 181 "build_type": "custom", 182 "build_steps": [f"cp src/fifo_map.hpp {prefix}/include/"], 183 }, 184 "open-power/pdbg": {"build_type": "autoconf"}, 185 "openbmc/gpioplus": { 186 "depends": ["openbmc/stdplus"], 187 "build_type": "meson", 188 "config_flags": [ 189 "-Dexamples=false", 190 "-Dtests=disabled", 191 ], 192 }, 193 "openbmc/phosphor-dbus-interfaces": { 194 "depends": ["openbmc/sdbusplus"], 195 "build_type": "meson", 196 "config_flags": [ 197 "-Ddata_com_ibm=true", 198 "-Ddata_org_open_power=true", 199 ], 200 }, 201 "openbmc/phosphor-logging": { 202 "depends": [ 203 "USCiLab/cereal", 204 "nlohmann/fifo_map", 205 "openbmc/phosphor-dbus-interfaces", 206 "openbmc/sdbusplus", 207 "openbmc/sdeventplus", 208 ], 209 "build_type": "autoconf", 210 "config_flags": [ 211 "--enable-metadata-processing", 212 f"YAML_DIR={prefix}/share/phosphor-dbus-yaml/yaml", 213 ], 214 }, 215 "openbmc/phosphor-objmgr": { 216 "depends": [ 217 "boost", 218 "leethomason/tinyxml2", 219 "openbmc/phosphor-logging", 220 "openbmc/sdbusplus", 221 ], 222 "build_type": "autoconf", 223 }, 224 "openbmc/pldm": { 225 "depends": [ 226 "CLIUtils/CLI11", 227 "boost", 228 "nlohmann/json", 229 "openbmc/phosphor-dbus-interfaces", 230 "openbmc/phosphor-logging", 231 "openbmc/sdbusplus", 232 "openbmc/sdeventplus", 233 ], 234 "build_type": "meson", 235 "config_flags": [ 236 "-Dlibpldm-only=enabled", 237 "-Doem-ibm=enabled", 238 "-Dtests=disabled", 239 ], 240 }, 241 "openbmc/sdbusplus": { 242 "build_type": "meson", 243 "custom_post_dl": [ 244 "cd tools", 245 f"./setup.py install --root=/ --prefix={prefix}", 246 "cd ..", 247 ], 248 "config_flags": [ 249 "-Dexamples=disabled", 250 "-Dtests=disabled", 251 ], 252 }, 253 "openbmc/sdeventplus": { 254 "depends": ["Naios/function2", "openbmc/stdplus"], 255 "build_type": "meson", 256 "config_flags": [ 257 "-Dexamples=false", 258 "-Dtests=disabled", 259 ], 260 }, 261 "openbmc/stdplus": { 262 "depends": ["fmtlib/fmt", "martinmoene/span-lite"], 263 "build_type": "meson", 264 "config_flags": [ 265 "-Dexamples=false", 266 "-Dtests=disabled", 267 ], 268 }, 269} 270 271 272def pkg_rev(pkg): 273 return packages[pkg]["rev"] 274 275 276def pkg_stagename(pkg): 277 return pkg.replace("/", "-").lower() 278 279 280def pkg_url(pkg): 281 if "url" in packages[pkg]: 282 return packages[pkg]["url"](pkg, pkg_rev(pkg)) 283 return f"https://github.com/{pkg}/archive/{pkg_rev(pkg)}.tar.gz" 284 285 286def pkg_download(pkg): 287 url = pkg_url(pkg) 288 if ".tar." not in url: 289 raise NotImplementedError(f"Unhandled download type for {pkg}: {url}") 290 cmd = f"curl -L {url} | tar -x" 291 if url.endswith(".bz2"): 292 cmd += "j" 293 if url.endswith(".gz"): 294 cmd += "z" 295 return cmd 296 297 298def pkg_copycmds(pkg=None): 299 pkgs = [] 300 if pkg: 301 if "depends" not in packages[pkg]: 302 return "" 303 pkgs = sorted(packages[pkg]["depends"]) 304 else: 305 pkgs = sorted(packages.keys()) 306 307 copy_cmds = "" 308 for p in pkgs: 309 copy_cmds += f"COPY --from={packages[p]['__tag']} {prefix} {prefix}\n" 310 # Workaround for upstream docker bug and multiple COPY cmds 311 # https://github.com/moby/moby/issues/37965 312 copy_cmds += "RUN true\n" 313 return copy_cmds 314 315 316def pkg_cd_srcdir(pkg): 317 return f"cd {pkg.split('/')[-1]}* && " 318 319 320def pkg_build(pkg): 321 result = f"RUN {pkg_download(pkg)} && " 322 result += pkg_cd_srcdir(pkg) 323 324 if "custom_post_dl" in packages[pkg]: 325 result += " && ".join(packages[pkg]["custom_post_dl"]) + " && " 326 327 build_type = packages[pkg]["build_type"] 328 if build_type == "autoconf": 329 result += pkg_build_autoconf(pkg) 330 elif build_type == "cmake": 331 result += pkg_build_cmake(pkg) 332 elif build_type == "custom": 333 result += pkg_build_custom(pkg) 334 elif build_type == "make": 335 result += pkg_build_make(pkg) 336 elif build_type == "meson": 337 result += pkg_build_meson(pkg) 338 else: 339 raise NotImplementedError( 340 f"Unhandled build type for {pkg}: {packages[pkg]['build_type']}" 341 ) 342 343 return result 344 345 346def pkg_build_autoconf(pkg): 347 options = " ".join(packages[pkg].get("config_flags", [])) 348 env = " ".join(packages[pkg].get("config_env", [])) 349 result = "./bootstrap.sh && " 350 result += f"{env} ./configure {configure_flags} {options} && " 351 result += f"make -j{proc_count} && " 352 result += "make install " 353 return result 354 355 356def pkg_build_cmake(pkg): 357 options = " ".join(packages[pkg].get("config_flags", [])) 358 env = " ".join(packages[pkg].get("config_env", [])) 359 result = "mkdir builddir && cd builddir && " 360 result += f"{env} cmake {cmake_flags} {options} .. && " 361 result += "cmake --build . --target all && " 362 result += "cmake --build . --target install && " 363 result += "cd .. " 364 return result 365 366 367def pkg_build_custom(pkg): 368 return " && ".join(packages[pkg].get("build_steps", [])) 369 370 371def pkg_build_make(pkg): 372 result = f"make -j{proc_count} && " 373 result += "make install " 374 return result 375 376 377def pkg_build_meson(pkg): 378 options = " ".join(packages[pkg].get("config_flags", [])) 379 env = " ".join(packages[pkg].get("config_env", [])) 380 result = f"{env} meson builddir {meson_flags} {options} && " 381 result += "ninja -C builddir && ninja -C builddir install " 382 return result 383 384 385pkg_lock = threading.Lock() 386 387 388def pkg_generate(pkg): 389 class pkg_thread(threading.Thread): 390 def run(self): 391 pkg_lock.acquire() 392 deps = [ 393 packages[deppkg]["__thread"] 394 for deppkg in sorted(packages[pkg].get("depends", [])) 395 ] 396 pkg_lock.release() 397 for deppkg in deps: 398 deppkg.join() 399 400 dockerfile = f""" 401FROM {docker_base_img_name} 402{pkg_copycmds(pkg)} 403{pkg_build(pkg)} 404""" 405 406 pkg_lock.acquire() 407 tag = docker_img_tagname(pkg_stagename(pkg), dockerfile) 408 packages[pkg]["__tag"] = tag 409 pkg_lock.release() 410 411 try: 412 self.exception = None 413 docker_img_build(pkg, tag, dockerfile) 414 except Exception as e: 415 self.package = pkg 416 self.exception = e 417 418 packages[pkg]["__thread"] = pkg_thread() 419 420 421def pkg_generate_packages(): 422 for pkg in packages.keys(): 423 pkg_generate(pkg) 424 425 pkg_lock.acquire() 426 pkg_threads = [packages[p]["__thread"] for p in packages.keys()] 427 for t in pkg_threads: 428 t.start() 429 pkg_lock.release() 430 431 for t in pkg_threads: 432 t.join() 433 if t.exception: 434 print(f"Package {t.package} failed!", file=sys.stderr) 435 raise t.exception 436 437def timestamp(): 438 today = date.today().isocalendar() 439 return f"{today[0]}-W{today[1]:02}" 440 441def docker_img_tagname(pkgname, dockerfile): 442 result = docker_image_name 443 if pkgname: 444 result += "-" + pkgname 445 result += ":" + timestamp() 446 result += "-" + sha256(dockerfile.encode()).hexdigest()[0:16] 447 return result 448 449 450def docker_img_build(pkg, tag, dockerfile): 451 if not force_build and pkg != "final": 452 # TODO: the 'final' is here because we do not tag the final image yet 453 # so we always need to rebuild it. This will be changed in a future 454 # commit so that we tag even the final image. 455 if docker.image.ls(tag, "--format", '"{{.Repository}}:{{.Tag}}"'): 456 print(f"Image {tag} already exists. Skipping.", file=sys.stderr) 457 return 458 459 docker.build( 460 proxy_args, 461 "--network=host", 462 "--force-rm", 463 "--no-cache=true" if force_build else "--no-cache=false", 464 "-t", 465 tag, 466 "-", 467 _in=dockerfile, 468 _out=( 469 lambda line: print(pkg + ":", line, end="", file=sys.stderr, flush=True) 470 ), 471 ) 472 473 474# Look up the HEAD for missing a static rev. 475pkg_lookups = {} 476for pkg in packages.keys(): 477 if "rev" in packages[pkg]: 478 continue 479 pkg_lookups[pkg] = git( 480 "ls-remote", "--heads", f"https://github.com/{pkg}", _bg=True 481 ) 482for pkg, result in pkg_lookups.items(): 483 for line in result.stdout.decode().split("\n"): 484 if f"refs/heads/{branch}" in line: 485 packages[pkg]["rev"] = line.strip().split()[0] 486 elif "refs/heads/master" in line and p not in packages: 487 packages[pkg]["rev"] = line.strip().split()[0] 488 489# Create the contents of the '/tmp/depcache'. 490# This needs to be sorted for consistency. 491depcache = "" 492for pkg in sorted(packages.keys()): 493 depcache += "%s:%s," % (pkg, pkg_rev(pkg)) 494 495# Define common flags used for builds 496configure_flags = " ".join( 497 [ 498 f"--prefix={prefix}", 499 ] 500) 501cmake_flags = " ".join( 502 [ 503 "-DBUILD_SHARED_LIBS=ON", 504 "-DCMAKE_BUILD_TYPE=RelWithDebInfo", 505 f"-DCMAKE_INSTALL_PREFIX:PATH={prefix}", 506 "-GNinja", 507 "-DCMAKE_MAKE_PROGRAM=ninja", 508 ] 509) 510meson_flags = " ".join( 511 [ 512 "--wrap-mode=nodownload", 513 f"-Dprefix={prefix}", 514 ] 515) 516 517# Special flags if setting up a deb mirror. 518mirror = "" 519if "ubuntu" in distro and ubuntu_mirror: 520 mirror = f""" 521RUN echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME) main restricted universe multiverse" > /etc/apt/sources.list && \\ 522 echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-updates main restricted universe multiverse" >> /etc/apt/sources.list && \\ 523 echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-security main restricted universe multiverse" >> /etc/apt/sources.list && \\ 524 echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-proposed main restricted universe multiverse" >> /etc/apt/sources.list && \\ 525 echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-backports main restricted universe multiverse" >> /etc/apt/sources.list 526""" 527 528# Special flags for proxying. 529proxy_cmd = "" 530proxy_args = [] 531if http_proxy: 532 proxy_cmd = f""" 533RUN echo "[http]" >> {homedir}/.gitconfig && \ 534 echo "proxy = {http_proxy}" >> {homedir}/.gitconfig 535""" 536 proxy_args.extend( 537 [ 538 "--build-arg", 539 f"http_proxy={http_proxy}", 540 "--build-arg", 541 "https_proxy={https_proxy}", 542 ] 543 ) 544 545# Create docker image that can run package unit tests 546dockerfile_base = f""" 547FROM {docker_base}{distro} 548 549{mirror} 550 551ENV DEBIAN_FRONTEND noninteractive 552 553ENV PYTHONPATH "/usr/local/lib/python3.8/site-packages/" 554 555# We need the keys to be imported for dbgsym repos 556# New releases have a package, older ones fall back to manual fetching 557# https://wiki.ubuntu.com/Debug%20Symbol%20Packages 558RUN apt-get update && apt-get dist-upgrade -yy && \ 559 ( apt-get install ubuntu-dbgsym-keyring || \ 560 ( apt-get install -yy dirmngr && \ 561 apt-key adv --keyserver keyserver.ubuntu.com \ 562 --recv-keys F2EDC64DC5AEE1F6B9C621F0C8CAB6595FDFF622 ) ) 563 564# Parse the current repo list into a debug repo list 565RUN sed -n '/^deb /s,^deb [^ ]* ,deb http://ddebs.ubuntu.com ,p' /etc/apt/sources.list >/etc/apt/sources.list.d/debug.list 566 567# Remove non-existent debug repos 568RUN sed -i '/-\(backports\|security\) /d' /etc/apt/sources.list.d/debug.list 569 570RUN cat /etc/apt/sources.list.d/debug.list 571 572RUN apt-get update && apt-get dist-upgrade -yy && apt-get install -yy \ 573 gcc-10 \ 574 g++-10 \ 575 libc6-dbg \ 576 libc6-dev \ 577 libtool \ 578 bison \ 579 libdbus-1-dev \ 580 flex \ 581 cmake \ 582 python3 \ 583 python3-dev\ 584 python3-yaml \ 585 python3-mako \ 586 python3-pip \ 587 python3-setuptools \ 588 python3-git \ 589 python3-socks \ 590 pkg-config \ 591 autoconf \ 592 autoconf-archive \ 593 libsystemd-dev \ 594 systemd \ 595 libssl-dev \ 596 libevdev-dev \ 597 libevdev2-dbgsym \ 598 libjpeg-dev \ 599 libpng-dev \ 600 ninja-build \ 601 sudo \ 602 curl \ 603 git \ 604 dbus \ 605 iputils-ping \ 606 clang-10 \ 607 clang-format-10 \ 608 clang-tidy-10 \ 609 clang-tools-10 \ 610 shellcheck \ 611 npm \ 612 iproute2 \ 613 libnl-3-dev \ 614 libnl-genl-3-dev \ 615 libconfig++-dev \ 616 libsnmp-dev \ 617 valgrind \ 618 valgrind-dbg \ 619 libpam0g-dev \ 620 xxd \ 621 libi2c-dev \ 622 wget \ 623 libldap2-dev \ 624 libprotobuf-dev \ 625 libperlio-gzip-perl \ 626 libjson-perl \ 627 protobuf-compiler \ 628 libgpiod-dev \ 629 device-tree-compiler \ 630 cppcheck \ 631 libpciaccess-dev \ 632 libmimetic-dev \ 633 libxml2-utils \ 634 libxml-simple-perl 635 636RUN update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-10 1000 \ 637 --slave /usr/bin/g++ g++ /usr/bin/g++-10 \ 638 --slave /usr/bin/gcov gcov /usr/bin/gcov-10 \ 639 --slave /usr/bin/gcov-dump gcov-dump /usr/bin/gcov-dump-10 \ 640 --slave /usr/bin/gcov-tool gcov-tool /usr/bin/gcov-tool-10 641 642RUN update-alternatives --install /usr/bin/clang clang /usr/bin/clang-10 1000 \ 643 --slave /usr/bin/clang++ clang++ /usr/bin/clang++-10 \ 644 --slave /usr/bin/clang-tidy clang-tidy /usr/bin/clang-tidy-10 \ 645 --slave /usr/bin/clang-format clang-format /usr/bin/clang-format-10 \ 646 --slave /usr/bin/run-clang-tidy.py run-clang-tidy.py /usr/bin/run-clang-tidy-10.py 647 648""" 649 650if is_automated_ci_build: 651 dockerfile_base += f""" 652# Run an arbitrary command to polute the docker cache regularly force us 653# to re-run `apt-get update` daily. 654RUN echo {timestamp()} 655RUN apt-get update && apt-get dist-upgrade -yy 656 657""" 658 659dockerfile_base += f""" 660RUN pip3 install inflection 661RUN pip3 install pycodestyle 662RUN pip3 install jsonschema 663RUN pip3 install meson==0.54.3 664RUN pip3 install protobuf 665""" 666 667# Build the stage docker images. 668docker_base_img_name = docker_img_tagname("base", dockerfile_base) 669docker_img_build("base", docker_base_img_name, dockerfile_base) 670pkg_generate_packages() 671 672dockerfile = f""" 673# Build the final output image 674FROM {docker_base_img_name} 675{pkg_copycmds()} 676 677# Some of our infrastructure still relies on the presence of this file 678# even though it is no longer needed to rebuild the docker environment 679# NOTE: The file is sorted to ensure the ordering is stable. 680RUN echo '{depcache}' > /tmp/depcache 681 682# Final configuration for the workspace 683RUN grep -q {gid} /etc/group || groupadd -g {gid} {username} 684RUN mkdir -p "{os.path.dirname(homedir)}" 685RUN grep -q {uid} /etc/passwd || useradd -d {homedir} -m -u {uid} -g {gid} {username} 686RUN sed -i '1iDefaults umask=000' /etc/sudoers 687RUN echo "{username} ALL=(ALL) NOPASSWD: ALL" >>/etc/sudoers 688 689{proxy_cmd} 690 691RUN /bin/bash 692""" 693 694# Do the final docker build 695docker_img_build("final", docker_image_name, dockerfile) 696