1#!/usr/bin/env python3
2#
3# Build the required docker image to run package unit tests
4#
5# Script Variables:
6#   DOCKER_IMG_NAME:  <optional, the name of the docker image to generate>
7#                     default is openbmc/ubuntu-unit-test
8#   DISTRO:           <optional, the distro to build a docker image against>
9#                     default is ubuntu:focal
10#   FORCE_DOCKER_BUILD: <optional, a non-zero value with force all Docker
11#                     images to be rebuilt rather than reusing caches.>
12#   BUILD_URL:        <optional, used to detect running under CI context
13#                     (ex. Jenkins)>
14#   BRANCH:           <optional, branch to build from each of the openbmc/
15#                     repositories>
16#                     default is master, which will be used if input branch not
17#                     provided or not found
18#   UBUNTU_MIRROR:    <optional, the URL of a mirror of Ubuntu to override the
19#                     default ones in /etc/apt/sources.list>
20#                     default is empty, and no mirror is used.
21#   http_proxy        The HTTP address of the proxy server to connect to.
22#                     Default: "", proxy is not setup if this is not set
23
24import os
25import sys
26import threading
27from datetime import date
28from hashlib import sha256
29from sh import docker, git, nproc, uname
30
31# Read a bunch of environment variables.
32docker_image_name = os.environ.get("DOCKER_IMAGE_NAME", "openbmc/ubuntu-unit-test")
33force_build = os.environ.get("FORCE_DOCKER_BUILD")
34is_automated_ci_build = os.environ.get("BUILD_URL", False)
35distro = os.environ.get("DISTRO", "ubuntu:focal")
36branch = os.environ.get("BRANCH", "master")
37ubuntu_mirror = os.environ.get("UBUNTU_MIRROR")
38http_proxy = os.environ.get("http_proxy")
39prefix = "/usr/local"
40
41# Set up some common variables.
42proc_count = nproc().strip()
43username = os.environ.get("USER")
44homedir = os.environ.get("HOME")
45gid = os.getgid()
46uid = os.getuid()
47
48# Determine the architecture for Docker.
49arch = uname("-m").strip()
50if arch == "ppc64le":
51    docker_base = "ppc64le/"
52elif arch == "x86_64":
53    docker_base = ""
54else:
55    print(
56        f"Unsupported system architecture({arch}) found for docker image",
57        file=sys.stderr,
58    )
59    sys.exit(1)
60
61# Packages to include in image.
62packages = {
63    "boost": {
64        "rev": "1.75.0",
65        "url": (
66            lambda pkg, rev: f"https://dl.bintray.com/boostorg/release/{rev}/source/{pkg}_{rev.replace('.', '_')}.tar.bz2"
67        ),
68        "build_type": "custom",
69        "build_steps": [
70            f"./bootstrap.sh --prefix={prefix} --with-libraries=context,coroutine",
71            "./b2",
72            f"./b2 install --prefix={prefix}",
73        ],
74    },
75    "USCiLab/cereal": {
76        "rev": "v1.3.0",
77        "build_type": "custom",
78        "build_steps": [f"cp -a include/cereal/ {prefix}/include/"],
79    },
80    "catchorg/Catch2": {
81        "rev": "v2.12.2",
82        "build_type": "cmake",
83        "config_flags": ["-DBUILD_TESTING=OFF", "-DCATCH_INSTALL_DOCS=OFF"],
84    },
85    "CLIUtils/CLI11": {
86        "rev": "v1.9.1",
87        "build_type": "cmake",
88        "config_flags": [
89            "-DBUILD_TESTING=OFF",
90            "-DCLI11_BUILD_DOCS=OFF",
91            "-DCLI11_BUILD_EXAMPLES=OFF",
92        ],
93    },
94    "fmtlib/fmt": {
95        "rev": "7.1.3",
96        "build_type": "cmake",
97        "config_flags": [
98            "-DFMT_DOC=OFF",
99            "-DFMT_TEST=OFF",
100        ],
101    },
102    # Snapshot from 2020-01-03
103    "Naios/function2": {
104        "rev": "3a0746bf5f601dfed05330aefcb6854354fce07d",
105        "build_type": "custom",
106        "build_steps": [
107            f"mkdir {prefix}/include/function2",
108            f"cp include/function2/function2.hpp {prefix}/include/function2/",
109        ],
110    },
111    "google/googletest": {
112        "rev": "release-1.10.0",
113        "build_type": "cmake",
114        "config_env": ["CXXFLAGS=-std=c++17"],
115        "config_flags": ["-DTHREADS_PREFER_PTHREAD_FLAG=ON"],
116    },
117    # Release 2020-08-06
118    "nlohmann/json": {
119        "rev": "v3.9.1",
120        "build_type": "custom",
121        "build_steps": [
122            f"mkdir {prefix}/include/nlohmann",
123            f"cp single_include/nlohmann/json.hpp {prefix}/include/nlohmann",
124            f"ln -s {prefix}/include/nlohmann/json.hpp {prefix}/include/json.hpp",
125        ],
126    },
127    # Snapshot from 2019-05-24
128    "linux-test-project/lcov": {
129        "rev": "v1.15",
130        "build_type": "make",
131    },
132    # dev-5.8 2021-01-11
133    "openbmc/linux": {
134        "rev": "3cc95ae40716e56f81b69615781f54c78079042d",
135        "build_type": "custom",
136        "build_steps": [
137            f"make -j{proc_count} defconfig",
138            f"make INSTALL_HDR_PATH={prefix} headers_install",
139        ],
140    },
141    # Snapshot from 2020-06-13
142    "LibVNC/libvncserver": {
143        "rev": "LibVNCServer-0.9.13",
144        "build_type": "cmake",
145    },
146    "martinmoene/span-lite": {
147        "rev": "v0.8.1",
148        "build_type": "cmake",
149        "config_flags": [
150            "-DSPAN_LITE_OPT_BUILD_TESTS=OFF",
151        ],
152    },
153    # version from meta-openembedded/meta-oe/recipes-support/libtinyxml2/libtinyxml2_8.0.0.bb
154    "leethomason/tinyxml2": {
155        "rev": "8.0.0",
156        "build_type": "cmake",
157    },
158    # version from /meta-openembedded/meta-oe/recipes-devtools/boost-url/boost-url_git.bb
159    "CPPAlliance/url": {
160        "rev": "a56ae0df6d3078319755fbaa67822b4fa7fd352b",
161        "build_type": "cmake",
162        "config_flags": [
163            "-DBOOST_URL_BUILD_EXAMPLES=OFF",
164            "-DBOOST_URL_BUILD_TESTS=OFF",
165            "-DBOOST_URL_STANDALONE=ON",
166        ],
167    },
168    # version from ./meta-openembedded/meta-oe/dynamic-layers/networking-layer/recipes-devtools/valijson/valijson_0.3.bb
169    # Snapshot from 2020-12-02 - fix for curlpp dependency
170    "tristanpenman/valijson": {
171        "rev": "8cc83c8be9c1c927f5da952b2333b30e5f0353be",
172        "build_type": "cmake",
173        "config_flags": [
174            "-Dvalijson_BUILD_TESTS=0",
175            "-Dvalijson_INSTALL_HEADERS=1",
176        ],
177    },
178    # version from meta-openembedded/meta-oe/recipes-devtools/nlohmann-fifo/nlohmann-fifo_git.bb
179    "nlohmann/fifo_map": {
180        "rev": "0dfbf5dacbb15a32c43f912a7e66a54aae39d0f9",
181        "build_type": "custom",
182        "build_steps": [f"cp src/fifo_map.hpp {prefix}/include/"],
183    },
184    "open-power/pdbg": {"build_type": "autoconf"},
185    "openbmc/gpioplus": {
186        "depends": ["openbmc/stdplus"],
187        "build_type": "meson",
188        "config_flags": [
189            "-Dexamples=false",
190            "-Dtests=disabled",
191        ],
192    },
193    "openbmc/phosphor-dbus-interfaces": {
194        "depends": ["openbmc/sdbusplus"],
195        "build_type": "meson",
196        "config_flags": [
197            "-Ddata_com_ibm=true",
198            "-Ddata_org_open_power=true",
199        ],
200    },
201    "openbmc/phosphor-logging": {
202        "depends": [
203            "USCiLab/cereal",
204            "nlohmann/fifo_map",
205            "openbmc/phosphor-dbus-interfaces",
206            "openbmc/sdbusplus",
207            "openbmc/sdeventplus",
208        ],
209        "build_type": "autoconf",
210        "config_flags": [
211            "--enable-metadata-processing",
212            f"YAML_DIR={prefix}/share/phosphor-dbus-yaml/yaml",
213        ],
214    },
215    "openbmc/phosphor-objmgr": {
216        "depends": [
217            "boost",
218            "leethomason/tinyxml2",
219            "openbmc/phosphor-logging",
220            "openbmc/sdbusplus",
221        ],
222        "build_type": "autoconf",
223    },
224    "openbmc/pldm": {
225        "depends": [
226            "CLIUtils/CLI11",
227            "boost",
228            "nlohmann/json",
229            "openbmc/phosphor-dbus-interfaces",
230            "openbmc/phosphor-logging",
231            "openbmc/sdbusplus",
232            "openbmc/sdeventplus",
233        ],
234        "build_type": "meson",
235        "config_flags": [
236            "-Dlibpldm-only=enabled",
237            "-Doem-ibm=enabled",
238            "-Dtests=disabled",
239        ],
240    },
241    "openbmc/sdbusplus": {
242        "build_type": "meson",
243        "custom_post_dl": [
244            "cd tools",
245            f"./setup.py install --root=/ --prefix={prefix}",
246            "cd ..",
247        ],
248        "config_flags": [
249            "-Dexamples=disabled",
250            "-Dtests=disabled",
251        ],
252    },
253    "openbmc/sdeventplus": {
254        "depends": ["Naios/function2", "openbmc/stdplus"],
255        "build_type": "meson",
256        "config_flags": [
257            "-Dexamples=false",
258            "-Dtests=disabled",
259        ],
260    },
261    "openbmc/stdplus": {
262        "depends": ["fmtlib/fmt", "martinmoene/span-lite"],
263        "build_type": "meson",
264        "config_flags": [
265            "-Dexamples=false",
266            "-Dtests=disabled",
267        ],
268    },
269}
270
271
272def pkg_rev(pkg):
273    return packages[pkg]["rev"]
274
275
276def pkg_stagename(pkg):
277    return pkg.replace("/", "-").lower()
278
279
280def pkg_url(pkg):
281    if "url" in packages[pkg]:
282        return packages[pkg]["url"](pkg, pkg_rev(pkg))
283    return f"https://github.com/{pkg}/archive/{pkg_rev(pkg)}.tar.gz"
284
285
286def pkg_download(pkg):
287    url = pkg_url(pkg)
288    if ".tar." not in url:
289        raise NotImplementedError(f"Unhandled download type for {pkg}: {url}")
290    cmd = f"curl -L {url} | tar -x"
291    if url.endswith(".bz2"):
292        cmd += "j"
293    if url.endswith(".gz"):
294        cmd += "z"
295    return cmd
296
297
298def pkg_copycmds(pkg=None):
299    pkgs = []
300    if pkg:
301        if "depends" not in packages[pkg]:
302            return ""
303        pkgs = sorted(packages[pkg]["depends"])
304    else:
305        pkgs = sorted(packages.keys())
306
307    copy_cmds = ""
308    for p in pkgs:
309        copy_cmds += f"COPY --from={packages[p]['__tag']} {prefix} {prefix}\n"
310        # Workaround for upstream docker bug and multiple COPY cmds
311        # https://github.com/moby/moby/issues/37965
312        copy_cmds += "RUN true\n"
313    return copy_cmds
314
315
316def pkg_cd_srcdir(pkg):
317    return f"cd {pkg.split('/')[-1]}* && "
318
319
320def pkg_build(pkg):
321    result = f"RUN {pkg_download(pkg)} && "
322    result += pkg_cd_srcdir(pkg)
323
324    if "custom_post_dl" in packages[pkg]:
325        result += " && ".join(packages[pkg]["custom_post_dl"]) + " && "
326
327    build_type = packages[pkg]["build_type"]
328    if build_type == "autoconf":
329        result += pkg_build_autoconf(pkg)
330    elif build_type == "cmake":
331        result += pkg_build_cmake(pkg)
332    elif build_type == "custom":
333        result += pkg_build_custom(pkg)
334    elif build_type == "make":
335        result += pkg_build_make(pkg)
336    elif build_type == "meson":
337        result += pkg_build_meson(pkg)
338    else:
339        raise NotImplementedError(
340            f"Unhandled build type for {pkg}: {packages[pkg]['build_type']}"
341        )
342
343    return result
344
345
346def pkg_build_autoconf(pkg):
347    options = " ".join(packages[pkg].get("config_flags", []))
348    env = " ".join(packages[pkg].get("config_env", []))
349    result = "./bootstrap.sh && "
350    result += f"{env} ./configure {configure_flags} {options} && "
351    result += f"make -j{proc_count} && "
352    result += "make install "
353    return result
354
355
356def pkg_build_cmake(pkg):
357    options = " ".join(packages[pkg].get("config_flags", []))
358    env = " ".join(packages[pkg].get("config_env", []))
359    result = "mkdir builddir && cd builddir && "
360    result += f"{env} cmake {cmake_flags} {options} .. && "
361    result += "cmake --build . --target all && "
362    result += "cmake --build . --target install && "
363    result += "cd .. "
364    return result
365
366
367def pkg_build_custom(pkg):
368    return " && ".join(packages[pkg].get("build_steps", []))
369
370
371def pkg_build_make(pkg):
372    result = f"make -j{proc_count} && "
373    result += "make install "
374    return result
375
376
377def pkg_build_meson(pkg):
378    options = " ".join(packages[pkg].get("config_flags", []))
379    env = " ".join(packages[pkg].get("config_env", []))
380    result = f"{env} meson builddir {meson_flags} {options} && "
381    result += "ninja -C builddir && ninja -C builddir install "
382    return result
383
384
385pkg_lock = threading.Lock()
386
387
388def pkg_generate(pkg):
389    class pkg_thread(threading.Thread):
390        def run(self):
391            pkg_lock.acquire()
392            deps = [
393                packages[deppkg]["__thread"]
394                for deppkg in sorted(packages[pkg].get("depends", []))
395            ]
396            pkg_lock.release()
397            for deppkg in deps:
398                deppkg.join()
399
400            dockerfile = f"""
401FROM {docker_base_img_name}
402{pkg_copycmds(pkg)}
403{pkg_build(pkg)}
404"""
405
406            pkg_lock.acquire()
407            tag = docker_img_tagname(pkg_stagename(pkg), dockerfile)
408            packages[pkg]["__tag"] = tag
409            pkg_lock.release()
410
411            try:
412                self.exception = None
413                docker_img_build(pkg, tag, dockerfile)
414            except Exception as e:
415                self.package = pkg
416                self.exception = e
417
418    packages[pkg]["__thread"] = pkg_thread()
419
420
421def pkg_generate_packages():
422    for pkg in packages.keys():
423        pkg_generate(pkg)
424
425    pkg_lock.acquire()
426    pkg_threads = [packages[p]["__thread"] for p in packages.keys()]
427    for t in pkg_threads:
428        t.start()
429    pkg_lock.release()
430
431    for t in pkg_threads:
432        t.join()
433        if t.exception:
434            print(f"Package {t.package} failed!", file=sys.stderr)
435            raise t.exception
436
437def timestamp():
438    today = date.today().isocalendar()
439    return f"{today[0]}-W{today[1]:02}"
440
441def docker_img_tagname(pkgname, dockerfile):
442    result = docker_image_name
443    if pkgname:
444        result += "-" + pkgname
445    result += ":" + timestamp()
446    result += "-" + sha256(dockerfile.encode()).hexdigest()[0:16]
447    return result
448
449
450def docker_img_build(pkg, tag, dockerfile):
451    if not force_build:
452        if docker.image.ls(tag, "--format", '"{{.Repository}}:{{.Tag}}"'):
453            print(f"Image {tag} already exists.  Skipping.", file=sys.stderr)
454            return
455
456    docker.build(
457        proxy_args,
458        "--network=host",
459        "--force-rm",
460        "--no-cache=true" if force_build else "--no-cache=false",
461        "-t",
462        tag,
463        "-",
464        _in=dockerfile,
465        _out=(
466            lambda line: print(pkg + ":", line, end="", file=sys.stderr, flush=True)
467        ),
468    )
469
470
471# Look up the HEAD for missing a static rev.
472pkg_lookups = {}
473for pkg in packages.keys():
474    if "rev" in packages[pkg]:
475        continue
476    pkg_lookups[pkg] = git(
477        "ls-remote", "--heads", f"https://github.com/{pkg}", _bg=True
478    )
479for pkg, result in pkg_lookups.items():
480    for line in result.stdout.decode().split("\n"):
481        if f"refs/heads/{branch}" in line:
482            packages[pkg]["rev"] = line.strip().split()[0]
483        elif "refs/heads/master" in line and p not in packages:
484            packages[pkg]["rev"] = line.strip().split()[0]
485
486# Create the contents of the '/tmp/depcache'.
487# This needs to be sorted for consistency.
488depcache = ""
489for pkg in sorted(packages.keys()):
490    depcache += "%s:%s," % (pkg, pkg_rev(pkg))
491
492# Define common flags used for builds
493configure_flags = " ".join(
494    [
495        f"--prefix={prefix}",
496    ]
497)
498cmake_flags = " ".join(
499    [
500        "-DBUILD_SHARED_LIBS=ON",
501        "-DCMAKE_BUILD_TYPE=RelWithDebInfo",
502        f"-DCMAKE_INSTALL_PREFIX:PATH={prefix}",
503        "-GNinja",
504        "-DCMAKE_MAKE_PROGRAM=ninja",
505    ]
506)
507meson_flags = " ".join(
508    [
509        "--wrap-mode=nodownload",
510        f"-Dprefix={prefix}",
511    ]
512)
513
514# Special flags if setting up a deb mirror.
515mirror = ""
516if "ubuntu" in distro and ubuntu_mirror:
517    mirror = f"""
518RUN echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME) main restricted universe multiverse" > /etc/apt/sources.list && \\
519    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-updates main restricted universe multiverse" >> /etc/apt/sources.list && \\
520    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-security main restricted universe multiverse" >> /etc/apt/sources.list && \\
521    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-proposed main restricted universe multiverse" >> /etc/apt/sources.list && \\
522    echo "deb {ubuntu_mirror} $(. /etc/os-release && echo $VERSION_CODENAME)-backports main restricted universe multiverse" >> /etc/apt/sources.list
523"""
524
525# Special flags for proxying.
526proxy_cmd = ""
527proxy_args = []
528if http_proxy:
529    proxy_cmd = f"""
530RUN echo "[http]" >> {homedir}/.gitconfig && \
531    echo "proxy = {http_proxy}" >> {homedir}/.gitconfig
532"""
533    proxy_args.extend(
534        [
535            "--build-arg",
536            f"http_proxy={http_proxy}",
537            "--build-arg",
538            "https_proxy={https_proxy}",
539        ]
540    )
541
542# Create docker image that can run package unit tests
543dockerfile_base = f"""
544FROM {docker_base}{distro}
545
546{mirror}
547
548ENV DEBIAN_FRONTEND noninteractive
549
550ENV PYTHONPATH "/usr/local/lib/python3.8/site-packages/"
551
552# We need the keys to be imported for dbgsym repos
553# New releases have a package, older ones fall back to manual fetching
554# https://wiki.ubuntu.com/Debug%20Symbol%20Packages
555RUN apt-get update && apt-get dist-upgrade -yy && \
556    ( apt-get install ubuntu-dbgsym-keyring || \
557        ( apt-get install -yy dirmngr && \
558          apt-key adv --keyserver keyserver.ubuntu.com \
559                      --recv-keys F2EDC64DC5AEE1F6B9C621F0C8CAB6595FDFF622 ) )
560
561# Parse the current repo list into a debug repo list
562RUN sed -n '/^deb /s,^deb [^ ]* ,deb http://ddebs.ubuntu.com ,p' /etc/apt/sources.list >/etc/apt/sources.list.d/debug.list
563
564# Remove non-existent debug repos
565RUN sed -i '/-\(backports\|security\) /d' /etc/apt/sources.list.d/debug.list
566
567RUN cat /etc/apt/sources.list.d/debug.list
568
569RUN apt-get update && apt-get dist-upgrade -yy && apt-get install -yy \
570    gcc-10 \
571    g++-10 \
572    libc6-dbg \
573    libc6-dev \
574    libtool \
575    bison \
576    libdbus-1-dev \
577    flex \
578    cmake \
579    python3 \
580    python3-dev\
581    python3-yaml \
582    python3-mako \
583    python3-pip \
584    python3-setuptools \
585    python3-git \
586    python3-socks \
587    pkg-config \
588    autoconf \
589    autoconf-archive \
590    libsystemd-dev \
591    systemd \
592    libssl-dev \
593    libevdev-dev \
594    libevdev2-dbgsym \
595    libjpeg-dev \
596    libpng-dev \
597    ninja-build \
598    sudo \
599    curl \
600    git \
601    dbus \
602    iputils-ping \
603    clang-10 \
604    clang-format-10 \
605    clang-tidy-10 \
606    clang-tools-10 \
607    shellcheck \
608    npm \
609    iproute2 \
610    libnl-3-dev \
611    libnl-genl-3-dev \
612    libconfig++-dev \
613    libsnmp-dev \
614    valgrind \
615    valgrind-dbg \
616    libpam0g-dev \
617    xxd \
618    libi2c-dev \
619    wget \
620    libldap2-dev \
621    libprotobuf-dev \
622    libperlio-gzip-perl \
623    libjson-perl \
624    protobuf-compiler \
625    libgpiod-dev \
626    device-tree-compiler \
627    cppcheck \
628    libpciaccess-dev \
629    libmimetic-dev \
630    libxml2-utils \
631    libxml-simple-perl \
632    rsync
633
634RUN update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-10 1000 \
635  --slave /usr/bin/g++ g++ /usr/bin/g++-10 \
636  --slave /usr/bin/gcov gcov /usr/bin/gcov-10 \
637  --slave /usr/bin/gcov-dump gcov-dump /usr/bin/gcov-dump-10 \
638  --slave /usr/bin/gcov-tool gcov-tool /usr/bin/gcov-tool-10
639
640RUN update-alternatives --install /usr/bin/clang clang /usr/bin/clang-10 1000 \
641  --slave /usr/bin/clang++ clang++ /usr/bin/clang++-10 \
642  --slave /usr/bin/clang-tidy clang-tidy /usr/bin/clang-tidy-10 \
643  --slave /usr/bin/clang-format clang-format /usr/bin/clang-format-10 \
644  --slave /usr/bin/run-clang-tidy.py run-clang-tidy.py /usr/bin/run-clang-tidy-10.py
645
646"""
647
648if is_automated_ci_build:
649    dockerfile_base += f"""
650# Run an arbitrary command to polute the docker cache regularly force us
651# to re-run `apt-get update` daily.
652RUN echo {timestamp()}
653RUN apt-get update && apt-get dist-upgrade -yy
654
655"""
656
657dockerfile_base += f"""
658RUN pip3 install inflection
659RUN pip3 install pycodestyle
660RUN pip3 install jsonschema
661RUN pip3 install meson==0.54.3
662RUN pip3 install protobuf
663"""
664
665# Build the stage docker images.
666docker_base_img_name = docker_img_tagname("base", dockerfile_base)
667docker_img_build("base", docker_base_img_name, dockerfile_base)
668pkg_generate_packages()
669
670dockerfile = f"""
671# Build the final output image
672FROM {docker_base_img_name}
673{pkg_copycmds()}
674
675# Some of our infrastructure still relies on the presence of this file
676# even though it is no longer needed to rebuild the docker environment
677# NOTE: The file is sorted to ensure the ordering is stable.
678RUN echo '{depcache}' > /tmp/depcache
679
680# Final configuration for the workspace
681RUN grep -q {gid} /etc/group || groupadd -g {gid} {username}
682RUN mkdir -p "{os.path.dirname(homedir)}"
683RUN grep -q {uid} /etc/passwd || useradd -d {homedir} -m -u {uid} -g {gid} {username}
684RUN sed -i '1iDefaults umask=000' /etc/sudoers
685RUN echo "{username} ALL=(ALL) NOPASSWD: ALL" >>/etc/sudoers
686
687{proxy_cmd}
688
689RUN /bin/bash
690"""
691
692# Do the final docker build
693docker_final_img_name = docker_img_tagname(docker_image_name, dockerfile)
694docker_img_build("final", docker_final_img_name, dockerfile)
695# Print the tag of the final image.
696print(docker_final_img_name)
697