1# 2# Copyright OpenEmbedded Contributors 3# 4# SPDX-License-Identifier: GPL-2.0-only 5# 6 7import re 8import subprocess 9from oe.package_manager import * 10from oe.package_manager.common_deb_ipk import OpkgDpkgPM 11 12class DpkgIndexer(Indexer): 13 def _create_configs(self): 14 bb.utils.mkdirhier(self.apt_conf_dir) 15 bb.utils.mkdirhier(os.path.join(self.apt_conf_dir, "lists", "partial")) 16 bb.utils.mkdirhier(os.path.join(self.apt_conf_dir, "apt.conf.d")) 17 bb.utils.mkdirhier(os.path.join(self.apt_conf_dir, "preferences.d")) 18 19 with open(os.path.join(self.apt_conf_dir, "preferences"), 20 "w") as prefs_file: 21 pass 22 with open(os.path.join(self.apt_conf_dir, "sources.list"), 23 "w+") as sources_file: 24 pass 25 26 with open(self.apt_conf_file, "w") as apt_conf: 27 with open(os.path.join(self.d.expand("${STAGING_ETCDIR_NATIVE}"), 28 "apt", "apt.conf.sample")) as apt_conf_sample: 29 for line in apt_conf_sample.read().split("\n"): 30 line = re.sub(r"#ROOTFS#", "/dev/null", line) 31 line = re.sub(r"#APTCONF#", self.apt_conf_dir, line) 32 apt_conf.write(line + "\n") 33 34 def write_index(self): 35 self.apt_conf_dir = os.path.join(self.d.expand("${APTCONF_TARGET}"), 36 "apt-ftparchive") 37 self.apt_conf_file = os.path.join(self.apt_conf_dir, "apt.conf") 38 self._create_configs() 39 40 os.environ['APT_CONFIG'] = self.apt_conf_file 41 42 pkg_archs = self.d.getVar('PACKAGE_ARCHS') 43 if pkg_archs is not None: 44 arch_list = pkg_archs.split() 45 sdk_pkg_archs = self.d.getVar('SDK_PACKAGE_ARCHS') 46 if sdk_pkg_archs is not None: 47 for a in sdk_pkg_archs.split(): 48 if a not in pkg_archs: 49 arch_list.append(a) 50 51 all_mlb_pkg_arch_list = (self.d.getVar('ALL_MULTILIB_PACKAGE_ARCHS') or "").split() 52 arch_list.extend(arch for arch in all_mlb_pkg_arch_list if arch not in arch_list) 53 54 apt_ftparchive = bb.utils.which(os.getenv('PATH'), "apt-ftparchive") 55 gzip = bb.utils.which(os.getenv('PATH'), "gzip") 56 57 index_cmds = [] 58 deb_dirs_found = False 59 index_sign_files = set() 60 for arch in arch_list: 61 arch_dir = os.path.join(self.deploy_dir, arch) 62 if not os.path.isdir(arch_dir): 63 continue 64 65 cmd = "cd %s; PSEUDO_UNLOAD=1 %s packages . > Packages;" % (arch_dir, apt_ftparchive) 66 67 cmd += "%s -fcn Packages > Packages.gz;" % gzip 68 69 release_file = os.path.join(arch_dir, "Release") 70 index_sign_files.add(release_file) 71 72 with open(release_file, "w+") as release: 73 release.write("Label: %s\n" % arch) 74 75 cmd += "PSEUDO_UNLOAD=1 %s release . >> Release" % apt_ftparchive 76 77 index_cmds.append(cmd) 78 79 deb_dirs_found = True 80 81 if not deb_dirs_found: 82 bb.note("There are no packages in %s" % self.deploy_dir) 83 return 84 85 oe.utils.multiprocess_launch(create_index, index_cmds, self.d) 86 if self.d.getVar('PACKAGE_FEED_SIGN') == '1': 87 signer = get_signer(self.d, self.d.getVar('PACKAGE_FEED_GPG_BACKEND')) 88 else: 89 signer = None 90 if signer: 91 for f in index_sign_files: 92 signer.detach_sign(f, 93 self.d.getVar('PACKAGE_FEED_GPG_NAME'), 94 self.d.getVar('PACKAGE_FEED_GPG_PASSPHRASE_FILE'), 95 output_suffix="gpg", 96 use_sha256=True) 97 98class PMPkgsList(PkgsList): 99 100 def list_pkgs(self): 101 cmd = [bb.utils.which(os.getenv('PATH'), "dpkg-query"), 102 "--admindir=%s/var/lib/dpkg" % self.rootfs_dir, 103 "-W"] 104 105 cmd.append("-f=Package: ${Package}\nArchitecture: ${PackageArch}\nVersion: ${Version}\nFile: ${Package}_${Version}_${Architecture}.deb\nDepends: ${Depends}\nRecommends: ${Recommends}\nProvides: ${Provides}\n\n") 106 107 try: 108 cmd_output = subprocess.check_output(cmd, stderr=subprocess.STDOUT).strip().decode("utf-8") 109 except subprocess.CalledProcessError as e: 110 bb.fatal("Cannot get the installed packages list. Command '%s' " 111 "returned %d:\n%s" % (' '.join(cmd), e.returncode, e.output.decode("utf-8"))) 112 113 return opkg_query(cmd_output) 114 115 116class DpkgPM(OpkgDpkgPM): 117 def __init__(self, d, target_rootfs, archs, base_archs, apt_conf_dir=None, deb_repo_workdir="oe-rootfs-repo", filterbydependencies=True): 118 super(DpkgPM, self).__init__(d, target_rootfs) 119 self.deploy_dir = oe.path.join(self.d.getVar('WORKDIR'), deb_repo_workdir) 120 121 create_packages_dir(self.d, self.deploy_dir, d.getVar("DEPLOY_DIR_DEB"), "package_write_deb", filterbydependencies) 122 123 if apt_conf_dir is None: 124 self.apt_conf_dir = self.d.expand("${APTCONF_TARGET}/apt") 125 else: 126 self.apt_conf_dir = apt_conf_dir 127 self.apt_conf_file = os.path.join(self.apt_conf_dir, "apt.conf") 128 self.apt_get_cmd = bb.utils.which(os.getenv('PATH'), "apt-get") 129 self.apt_cache_cmd = bb.utils.which(os.getenv('PATH'), "apt-cache") 130 131 self.apt_args = d.getVar("APT_ARGS") 132 133 self.all_arch_list = archs.split() 134 all_mlb_pkg_arch_list = (self.d.getVar('ALL_MULTILIB_PACKAGE_ARCHS') or "").split() 135 self.all_arch_list.extend(arch for arch in all_mlb_pkg_arch_list if arch not in self.all_arch_list) 136 137 self._create_configs(archs, base_archs) 138 139 self.indexer = DpkgIndexer(self.d, self.deploy_dir) 140 141 def mark_packages(self, status_tag, packages=None): 142 """ 143 This function will change a package's status in /var/lib/dpkg/status file. 144 If 'packages' is None then the new_status will be applied to all 145 packages 146 """ 147 status_file = self.target_rootfs + "/var/lib/dpkg/status" 148 149 with open(status_file, "r") as sf: 150 with open(status_file + ".tmp", "w+") as tmp_sf: 151 if packages is None: 152 tmp_sf.write(re.sub(r"Package: (.*?)\n((?:[^\n]+\n)*?)Status: (.*)(?:unpacked|installed)", 153 r"Package: \1\n\2Status: \3%s" % status_tag, 154 sf.read())) 155 else: 156 if type(packages).__name__ != "list": 157 raise TypeError("'packages' should be a list object") 158 159 status = sf.read() 160 for pkg in packages: 161 status = re.sub(r"Package: %s\n((?:[^\n]+\n)*?)Status: (.*)(?:unpacked|installed)" % pkg, 162 r"Package: %s\n\1Status: \2%s" % (pkg, status_tag), 163 status) 164 165 tmp_sf.write(status) 166 167 bb.utils.rename(status_file + ".tmp", status_file) 168 169 def run_pre_post_installs(self, package_name=None): 170 """ 171 Run the pre/post installs for package "package_name". If package_name is 172 None, then run all pre/post install scriptlets. 173 """ 174 info_dir = self.target_rootfs + "/var/lib/dpkg/info" 175 ControlScript = collections.namedtuple("ControlScript", ["suffix", "name", "argument"]) 176 control_scripts = [ 177 ControlScript(".preinst", "Preinstall", "install"), 178 ControlScript(".postinst", "Postinstall", "configure")] 179 status_file = self.target_rootfs + "/var/lib/dpkg/status" 180 installed_pkgs = [] 181 182 with open(status_file, "r") as status: 183 for line in status.read().split('\n'): 184 m = re.match(r"^Package: (.*)", line) 185 if m is not None: 186 installed_pkgs.append(m.group(1)) 187 188 if package_name is not None and not package_name in installed_pkgs: 189 return 190 191 os.environ['D'] = self.target_rootfs 192 os.environ['OFFLINE_ROOT'] = self.target_rootfs 193 os.environ['IPKG_OFFLINE_ROOT'] = self.target_rootfs 194 os.environ['OPKG_OFFLINE_ROOT'] = self.target_rootfs 195 os.environ['INTERCEPT_DIR'] = self.intercepts_dir 196 os.environ['NATIVE_ROOT'] = self.d.getVar('STAGING_DIR_NATIVE') 197 198 for pkg_name in installed_pkgs: 199 for control_script in control_scripts: 200 p_full = os.path.join(info_dir, pkg_name + control_script.suffix) 201 if os.path.exists(p_full): 202 try: 203 bb.note("Executing %s for package: %s ..." % 204 (control_script.name.lower(), pkg_name)) 205 output = subprocess.check_output([p_full, control_script.argument], 206 stderr=subprocess.STDOUT).decode("utf-8") 207 bb.note(output) 208 except subprocess.CalledProcessError as e: 209 bb.warn("%s for package %s failed with %d:\n%s" % 210 (control_script.name, pkg_name, e.returncode, 211 e.output.decode("utf-8"))) 212 failed_postinsts_abort([pkg_name], self.d.expand("${T}/log.do_${BB_CURRENTTASK}")) 213 214 def update(self): 215 os.environ['APT_CONFIG'] = self.apt_conf_file 216 217 self.deploy_dir_lock() 218 219 cmd = "%s update" % self.apt_get_cmd 220 221 try: 222 subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT) 223 except subprocess.CalledProcessError as e: 224 bb.fatal("Unable to update the package index files. Command '%s' " 225 "returned %d:\n%s" % (e.cmd, e.returncode, e.output.decode("utf-8"))) 226 227 self.deploy_dir_unlock() 228 229 def install(self, pkgs, attempt_only=False, hard_depends_only=False): 230 if attempt_only and len(pkgs) == 0: 231 return 232 233 os.environ['APT_CONFIG'] = self.apt_conf_file 234 235 extra_args = "" 236 if hard_depends_only: 237 extra_args = "--no-install-recommends" 238 239 cmd = "%s %s install --allow-downgrades --allow-remove-essential --allow-change-held-packages --allow-unauthenticated --no-remove %s %s" % \ 240 (self.apt_get_cmd, self.apt_args, extra_args, ' '.join(pkgs)) 241 242 try: 243 bb.note("Installing the following packages: %s" % ' '.join(pkgs)) 244 output = subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT) 245 bb.note(output.decode("utf-8")) 246 except subprocess.CalledProcessError as e: 247 (bb.fatal, bb.warn)[attempt_only]("Unable to install packages. " 248 "Command '%s' returned %d:\n%s" % 249 (cmd, e.returncode, e.output.decode("utf-8"))) 250 251 # rename *.dpkg-new files/dirs 252 for root, dirs, files in os.walk(self.target_rootfs): 253 for dir in dirs: 254 new_dir = re.sub(r"\.dpkg-new", "", dir) 255 if dir != new_dir: 256 bb.utils.rename(os.path.join(root, dir), 257 os.path.join(root, new_dir)) 258 259 for file in files: 260 new_file = re.sub(r"\.dpkg-new", "", file) 261 if file != new_file: 262 bb.utils.rename(os.path.join(root, file), 263 os.path.join(root, new_file)) 264 265 266 def remove(self, pkgs, with_dependencies=True): 267 if not pkgs: 268 return 269 270 os.environ['D'] = self.target_rootfs 271 os.environ['OFFLINE_ROOT'] = self.target_rootfs 272 os.environ['IPKG_OFFLINE_ROOT'] = self.target_rootfs 273 os.environ['OPKG_OFFLINE_ROOT'] = self.target_rootfs 274 os.environ['INTERCEPT_DIR'] = self.intercepts_dir 275 276 if with_dependencies: 277 os.environ['APT_CONFIG'] = self.apt_conf_file 278 cmd = "%s purge %s" % (self.apt_get_cmd, ' '.join(pkgs)) 279 else: 280 cmd = "%s --admindir=%s/var/lib/dpkg --instdir=%s" \ 281 " -P --force-depends %s" % \ 282 (bb.utils.which(os.getenv('PATH'), "dpkg"), 283 self.target_rootfs, self.target_rootfs, ' '.join(pkgs)) 284 285 try: 286 subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT) 287 except subprocess.CalledProcessError as e: 288 bb.fatal("Unable to remove packages. Command '%s' " 289 "returned %d:\n%s" % (e.cmd, e.returncode, e.output.decode("utf-8"))) 290 291 def write_index(self): 292 self.deploy_dir_lock() 293 294 result = self.indexer.write_index() 295 296 self.deploy_dir_unlock() 297 298 if result is not None: 299 bb.fatal(result) 300 301 def insert_feeds_uris(self, feed_uris, feed_base_paths, feed_archs): 302 if feed_uris == "": 303 return 304 305 306 sources_conf = os.path.join("%s/etc/apt/sources.list" 307 % self.target_rootfs) 308 if not os.path.exists(os.path.dirname(sources_conf)): 309 return 310 311 arch_list = [] 312 313 if feed_archs is None: 314 for arch in self.all_arch_list: 315 if not os.path.exists(os.path.join(self.deploy_dir, arch)): 316 continue 317 arch_list.append(arch) 318 else: 319 arch_list = feed_archs.split() 320 321 feed_uris = self.construct_uris(feed_uris.split(), feed_base_paths.split()) 322 323 with open(sources_conf, "w+") as sources_file: 324 for uri in feed_uris: 325 if arch_list: 326 for arch in arch_list: 327 bb.note('Adding dpkg channel at (%s)' % uri) 328 sources_file.write("deb [trusted=yes] %s/%s ./\n" % 329 (uri, arch)) 330 else: 331 bb.note('Adding dpkg channel at (%s)' % uri) 332 sources_file.write("deb [trusted=yes] %s ./\n" % uri) 333 334 def _create_configs(self, archs, base_archs): 335 base_archs = re.sub(r"_", r"-", base_archs) 336 337 if os.path.exists(self.apt_conf_dir): 338 bb.utils.remove(self.apt_conf_dir, True) 339 340 bb.utils.mkdirhier(self.apt_conf_dir) 341 bb.utils.mkdirhier(self.apt_conf_dir + "/lists/partial/") 342 bb.utils.mkdirhier(self.apt_conf_dir + "/apt.conf.d/") 343 bb.utils.mkdirhier(self.apt_conf_dir + "/preferences.d/") 344 345 arch_list = [] 346 for arch in self.all_arch_list: 347 if not os.path.exists(os.path.join(self.deploy_dir, arch)): 348 continue 349 arch_list.append(arch) 350 351 with open(os.path.join(self.apt_conf_dir, "preferences"), "w+") as prefs_file: 352 priority = 801 353 for arch in arch_list: 354 prefs_file.write( 355 "Package: *\n" 356 "Pin: release l=%s\n" 357 "Pin-Priority: %d\n\n" % (arch, priority)) 358 359 priority += 5 360 361 pkg_exclude = self.d.getVar('PACKAGE_EXCLUDE') or "" 362 for pkg in pkg_exclude.split(): 363 prefs_file.write( 364 "Package: %s\n" 365 "Pin: release *\n" 366 "Pin-Priority: -1\n\n" % pkg) 367 368 arch_list.reverse() 369 370 with open(os.path.join(self.apt_conf_dir, "sources.list"), "w+") as sources_file: 371 for arch in arch_list: 372 sources_file.write("deb [trusted=yes] file:%s/ ./\n" % 373 os.path.join(self.deploy_dir, arch)) 374 375 base_arch_list = base_archs.split() 376 multilib_variants = self.d.getVar("MULTILIB_VARIANTS"); 377 for variant in multilib_variants.split(): 378 localdata = bb.data.createCopy(self.d) 379 variant_tune = localdata.getVar("DEFAULTTUNE:virtclass-multilib-" + variant, False) 380 orig_arch = localdata.getVar("DPKG_ARCH") 381 localdata.setVar("DEFAULTTUNE", variant_tune) 382 variant_arch = localdata.getVar("DPKG_ARCH") 383 if variant_arch not in base_arch_list: 384 base_arch_list.append(variant_arch) 385 386 with open(self.apt_conf_file, "w+") as apt_conf: 387 with open(self.d.expand("${STAGING_ETCDIR_NATIVE}/apt/apt.conf.sample")) as apt_conf_sample: 388 for line in apt_conf_sample.read().split("\n"): 389 match_arch = re.match(r" Architecture \".*\";$", line) 390 architectures = "" 391 if match_arch: 392 for base_arch in base_arch_list: 393 architectures += "\"%s\";" % base_arch 394 apt_conf.write(" Architectures {%s};\n" % architectures); 395 apt_conf.write(" Architecture \"%s\";\n" % base_archs) 396 else: 397 line = re.sub(r"#ROOTFS#", self.target_rootfs, line) 398 line = re.sub(r"#APTCONF#", self.apt_conf_dir, line) 399 apt_conf.write(line + "\n") 400 401 target_dpkg_dir = "%s/var/lib/dpkg" % self.target_rootfs 402 bb.utils.mkdirhier(os.path.join(target_dpkg_dir, "info")) 403 404 bb.utils.mkdirhier(os.path.join(target_dpkg_dir, "updates")) 405 406 if not os.path.exists(os.path.join(target_dpkg_dir, "status")): 407 open(os.path.join(target_dpkg_dir, "status"), "w+").close() 408 if not os.path.exists(os.path.join(target_dpkg_dir, "available")): 409 open(os.path.join(target_dpkg_dir, "available"), "w+").close() 410 411 def remove_packaging_data(self): 412 bb.utils.remove(self.target_rootfs + self.d.getVar('opkglibdir'), True) 413 bb.utils.remove(self.target_rootfs + "/var/lib/dpkg/", True) 414 415 def fix_broken_dependencies(self): 416 os.environ['APT_CONFIG'] = self.apt_conf_file 417 418 cmd = "%s %s --allow-unauthenticated -f install" % (self.apt_get_cmd, self.apt_args) 419 420 try: 421 subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT) 422 except subprocess.CalledProcessError as e: 423 bb.fatal("Cannot fix broken dependencies. Command '%s' " 424 "returned %d:\n%s" % (cmd, e.returncode, e.output.decode("utf-8"))) 425 426 def list_installed(self): 427 return PMPkgsList(self.d, self.target_rootfs).list_pkgs() 428 429 def package_info(self, pkg): 430 """ 431 Returns a dictionary with the package info. 432 """ 433 cmd = "%s show %s" % (self.apt_cache_cmd, pkg) 434 pkg_info = self._common_package_info(cmd) 435 436 pkg_arch = pkg_info[pkg]["pkgarch"] 437 pkg_filename = pkg_info[pkg]["filename"] 438 pkg_info[pkg]["filepath"] = \ 439 os.path.join(self.deploy_dir, pkg_arch, pkg_filename) 440 441 return pkg_info 442