From patchwork Wed Jan 4 15:47:09 2023 Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit X-Patchwork-Submitter: Richard Purdie X-Patchwork-Id: 17732 Return-Path: X-Spam-Checker-Version: SpamAssassin 3.4.0 (2014-02-07) on aws-us-west-2-korg-lkml-1.web.codeaurora.org Received: from aws-us-west-2-korg-lkml-1.web.codeaurora.org (localhost.localdomain [127.0.0.1]) by smtp.lore.kernel.org (Postfix) with ESMTP id 1F0BDC54EBE for ; Wed, 4 Jan 2023 15:47:24 +0000 (UTC) Received: from mail-wm1-f50.google.com (mail-wm1-f50.google.com [209.85.128.50]) by mx.groups.io with SMTP id smtpd.web11.15531.1672847242160520083 for ; Wed, 04 Jan 2023 07:47:22 -0800 Authentication-Results: mx.groups.io; dkim=pass header.i=@linuxfoundation.org header.s=google header.b=F/0UC5Rv; spf=pass (domain: linuxfoundation.org, ip: 209.85.128.50, mailfrom: richard.purdie@linuxfoundation.org) Received: by mail-wm1-f50.google.com with SMTP id z8-20020a05600c220800b003d33b0bda11so1096900wml.0 for ; Wed, 04 Jan 2023 07:47:21 -0800 (PST) DKIM-Signature: v=1; a=rsa-sha256; c=relaxed/relaxed; d=linuxfoundation.org; s=google; h=content-transfer-encoding:mime-version:references:in-reply-to :message-id:date:subject:to:from:from:to:cc:subject:date:message-id :reply-to; bh=Jn5a26K3ZMm2VFWTIIFE+FGjWQV02tQN5kwLLFmU9dI=; b=F/0UC5Rv8pe3NseyoOMyn/o4rGiFu29Fz4Oqu9Th6uEtsbFJ6cYbaPCUgbmKZKeMtk D6jdClVEnC2ZVDtefNPBbsy91oRSX/SJcKCIY64HSoX2T8ZL0DXkR4fjNciBwqYCG5tD sOL/gSNBADtqUag0wDpeOI1fVYb6flPLx2h5w= X-Google-DKIM-Signature: v=1; a=rsa-sha256; c=relaxed/relaxed; d=1e100.net; s=20210112; h=content-transfer-encoding:mime-version:references:in-reply-to :message-id:date:subject:to:from:x-gm-message-state:from:to:cc :subject:date:message-id:reply-to; bh=Jn5a26K3ZMm2VFWTIIFE+FGjWQV02tQN5kwLLFmU9dI=; b=IrOoLEl3mBEPb58KhucAph2IWKH2JIBAA0tEC5vFZ68ykPeVUKEBwZLxD6nUd6ml/j 7U3CmPLp8Po3rVi7iCEXG3/bBERRovTjdJqdbuhERsQiu8huMLKrkpXm/+InMVlb8JTb OrQohynU0cXlTG6k6n+8N9sGbUyT030qpPlG+vD89BdDCKMXSQRJFLBTFCgkMn1HOi5N 12Sl5LNWFUL8TbqZ3/KRHyrdrBN2k6cwPuGn11p+D+/49yJLafABbfROf0kDgezdR3vm l/4VYhUrvAl4kXRpOVC80J5lPH+FBPMd3zN0RXFszVfyvu/xF4HpBk/0aijmHsNvzONv TRXA== X-Gm-Message-State: AFqh2koIPUsGPQf6iqh5cNreciDTpBW3h+nrA8fPUeM/vPYQA5tdh2tE /jzAH9oAL6D7GMrPy8YvzvLt9P12Zdy2Y4HO X-Google-Smtp-Source: AMrXdXtKC9TLjL7B7sMywjuPpF+VLSHSzp0WwYOJezbhn2TIqYBe8T8N3w+pc8vFbqz12uThaEC/xQ== X-Received: by 2002:a05:600c:3485:b0:3d0:761b:f86 with SMTP id a5-20020a05600c348500b003d0761b0f86mr33600032wmq.28.1672847239096; Wed, 04 Jan 2023 07:47:19 -0800 (PST) Received: from max.int.rpsys.net ([2001:8b0:aba:5f3c:fcc2:4e85:26d2:6527]) by smtp.gmail.com with ESMTPSA id u3-20020adff883000000b002423edd7e50sm34812560wrp.32.2023.01.04.07.47.18 for (version=TLS1_3 cipher=TLS_AES_256_GCM_SHA384 bits=256/256); Wed, 04 Jan 2023 07:47:18 -0800 (PST) From: Richard Purdie To: openembedded-core@lists.openembedded.org Subject: [PATCH 05/10] package: Move package functions to function library Date: Wed, 4 Jan 2023 15:47:09 +0000 Message-Id: <20230104154714.1168535-5-richard.purdie@linuxfoundation.org> X-Mailer: git-send-email 2.37.2 In-Reply-To: <20230104154714.1168535-1-richard.purdie@linuxfoundation.org> References: <20230104154714.1168535-1-richard.purdie@linuxfoundation.org> MIME-Version: 1.0 List-Id: X-Webhook-Received: from li982-79.members.linode.com [45.33.32.79] by aws-us-west-2-korg-lkml-1.web.codeaurora.org with HTTPS for ; Wed, 04 Jan 2023 15:47:24 -0000 X-Groupsio-URL: https://lists.openembedded.org/g/openembedded-core/message/175496 Move the bulk of the remaining package "processing" functions over to the package function library for parsing efficiency. Signed-off-by: Richard Purdie --- meta/classes-global/package.bbclass | 1407 +------------------------- meta/lib/oe/package.py | 1408 ++++++++++++++++++++++++++- 2 files changed, 1416 insertions(+), 1399 deletions(-) diff --git a/meta/classes-global/package.bbclass b/meta/classes-global/package.bbclass index 389c8578040..21a50bbb45e 100644 --- a/meta/classes-global/package.bbclass +++ b/meta/classes-global/package.bbclass @@ -69,21 +69,7 @@ PACKAGE_DEPENDS += "rpm-native dwarfsrcfiles-native" PACKAGE_WRITE_DEPS ??= "" def legitimize_package_name(s): - """ - Make sure package names are legitimate strings - """ - import re - - def fixutf(m): - cp = m.group(1) - if cp: - return ('\\u%s' % cp).encode('latin-1').decode('unicode_escape') - - # Handle unicode codepoints encoded as , as in glibc locale files. - s = re.sub(r'', fixutf, s) - - # Remaining package name validity fixes - return s.lower().replace('_', '-').replace('@', '+').replace(',', '+').replace('/', '-') + return oe.package.legitimize_package_name(s) def do_split_packages(d, root, file_regex, output_pattern, description, postinst=None, recursive=False, hook=None, extra_depends=None, aux_files_pattern=None, postrm=None, allow_dirs=False, prepend=False, match_path=False, aux_files_pattern_verbatim=None, allow_links=False, summary=None): """ @@ -195,7 +181,7 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst mode = os.lstat(f).st_mode if not (stat.S_ISREG(mode) or (allow_links and stat.S_ISLNK(mode)) or (allow_dirs and stat.S_ISDIR(mode))): continue - on = legitimize_package_name(m.group(1)) + on = oe.package.legitimize_package_name(m.group(1)) pkg = output_pattern % on split_packages.add(pkg) if not pkg in packages: @@ -266,306 +252,6 @@ def checkbuildpath(file, d): return False -def parse_debugsources_from_dwarfsrcfiles_output(dwarfsrcfiles_output): - debugfiles = {} - - for line in dwarfsrcfiles_output.splitlines(): - if line.startswith("\t"): - debugfiles[os.path.normpath(line.split()[0])] = "" - - return debugfiles.keys() - -def source_info(file, d, fatal=True): - import subprocess - - cmd = ["dwarfsrcfiles", file] - try: - output = subprocess.check_output(cmd, universal_newlines=True, stderr=subprocess.STDOUT) - retval = 0 - except subprocess.CalledProcessError as exc: - output = exc.output - retval = exc.returncode - - # 255 means a specific file wasn't fully parsed to get the debug file list, which is not a fatal failure - if retval != 0 and retval != 255: - msg = "dwarfsrcfiles failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else "") - if fatal: - bb.fatal(msg) - bb.note(msg) - - debugsources = parse_debugsources_from_dwarfsrcfiles_output(output) - - return list(debugsources) - -def splitdebuginfo(file, dvar, dv, d): - # Function to split a single file into two components, one is the stripped - # target system binary, the other contains any debugging information. The - # two files are linked to reference each other. - # - # return a mapping of files:debugsources - - import stat - import subprocess - - src = file[len(dvar):] - dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"] - debugfile = dvar + dest - sources = [] - - if file.endswith(".ko") and file.find("/lib/modules/") != -1: - if oe.package.is_kernel_module_signed(file): - bb.debug(1, "Skip strip on signed module %s" % file) - return (file, sources) - - # Split the file... - bb.utils.mkdirhier(os.path.dirname(debugfile)) - #bb.note("Split %s -> %s" % (file, debugfile)) - # Only store off the hard link reference if we successfully split! - - dvar = d.getVar('PKGD') - objcopy = d.getVar("OBJCOPY") - - newmode = None - if not os.access(file, os.W_OK) or os.access(file, os.R_OK): - origmode = os.stat(file)[stat.ST_MODE] - newmode = origmode | stat.S_IWRITE | stat.S_IREAD - os.chmod(file, newmode) - - # We need to extract the debug src information here... - if dv["srcdir"]: - sources = source_info(file, d) - - bb.utils.mkdirhier(os.path.dirname(debugfile)) - - subprocess.check_output([objcopy, '--only-keep-debug', file, debugfile], stderr=subprocess.STDOUT) - - # Set the debuglink to have the view of the file path on the target - subprocess.check_output([objcopy, '--add-gnu-debuglink', debugfile, file], stderr=subprocess.STDOUT) - - if newmode: - os.chmod(file, origmode) - - return (file, sources) - -def splitstaticdebuginfo(file, dvar, dv, d): - # Unlike the function above, there is no way to split a static library - # two components. So to get similar results we will copy the unmodified - # static library (containing the debug symbols) into a new directory. - # We will then strip (preserving symbols) the static library in the - # typical location. - # - # return a mapping of files:debugsources - - import stat - - src = file[len(dvar):] - dest = dv["staticlibdir"] + os.path.dirname(src) + dv["staticdir"] + "/" + os.path.basename(src) + dv["staticappend"] - debugfile = dvar + dest - sources = [] - - # Copy the file... - bb.utils.mkdirhier(os.path.dirname(debugfile)) - #bb.note("Copy %s -> %s" % (file, debugfile)) - - dvar = d.getVar('PKGD') - - newmode = None - if not os.access(file, os.W_OK) or os.access(file, os.R_OK): - origmode = os.stat(file)[stat.ST_MODE] - newmode = origmode | stat.S_IWRITE | stat.S_IREAD - os.chmod(file, newmode) - - # We need to extract the debug src information here... - if dv["srcdir"]: - sources = source_info(file, d) - - bb.utils.mkdirhier(os.path.dirname(debugfile)) - - # Copy the unmodified item to the debug directory - shutil.copy2(file, debugfile) - - if newmode: - os.chmod(file, origmode) - - return (file, sources) - -def inject_minidebuginfo(file, dvar, dv, d): - # Extract just the symbols from debuginfo into minidebuginfo, - # compress it with xz and inject it back into the binary in a .gnu_debugdata section. - # https://sourceware.org/gdb/onlinedocs/gdb/MiniDebugInfo.html - - import subprocess - - readelf = d.getVar('READELF') - nm = d.getVar('NM') - objcopy = d.getVar('OBJCOPY') - - minidebuginfodir = d.expand('${WORKDIR}/minidebuginfo') - - src = file[len(dvar):] - dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"] - debugfile = dvar + dest - minidebugfile = minidebuginfodir + src + '.minidebug' - bb.utils.mkdirhier(os.path.dirname(minidebugfile)) - - # If we didn't produce debuginfo for any reason, we can't produce minidebuginfo either - # so skip it. - if not os.path.exists(debugfile): - bb.debug(1, 'ELF file {} has no debuginfo, skipping minidebuginfo injection'.format(file)) - return - - # minidebuginfo does not make sense to apply to ELF objects other than - # executables and shared libraries, skip applying the minidebuginfo - # generation for objects like kernel modules. - for line in subprocess.check_output([readelf, '-h', debugfile], universal_newlines=True).splitlines(): - if not line.strip().startswith("Type:"): - continue - elftype = line.split(":")[1].strip() - if not any(elftype.startswith(i) for i in ["EXEC", "DYN"]): - bb.debug(1, 'ELF file {} is not executable/shared, skipping minidebuginfo injection'.format(file)) - return - break - - # Find non-allocated PROGBITS, NOTE, and NOBITS sections in the debuginfo. - # We will exclude all of these from minidebuginfo to save space. - remove_section_names = [] - for line in subprocess.check_output([readelf, '-W', '-S', debugfile], universal_newlines=True).splitlines(): - # strip the leading " [ 1]" section index to allow splitting on space - if ']' not in line: - continue - fields = line[line.index(']') + 1:].split() - if len(fields) < 7: - continue - name = fields[0] - type = fields[1] - flags = fields[6] - # .debug_ sections will be removed by objcopy -S so no need to explicitly remove them - if name.startswith('.debug_'): - continue - if 'A' not in flags and type in ['PROGBITS', 'NOTE', 'NOBITS']: - remove_section_names.append(name) - - # List dynamic symbols in the binary. We can exclude these from minidebuginfo - # because they are always present in the binary. - dynsyms = set() - for line in subprocess.check_output([nm, '-D', file, '--format=posix', '--defined-only'], universal_newlines=True).splitlines(): - dynsyms.add(line.split()[0]) - - # Find all function symbols from debuginfo which aren't in the dynamic symbols table. - # These are the ones we want to keep in minidebuginfo. - keep_symbols_file = minidebugfile + '.symlist' - found_any_symbols = False - with open(keep_symbols_file, 'w') as f: - for line in subprocess.check_output([nm, debugfile, '--format=sysv', '--defined-only'], universal_newlines=True).splitlines(): - fields = line.split('|') - if len(fields) < 7: - continue - name = fields[0].strip() - type = fields[3].strip() - if type == 'FUNC' and name not in dynsyms: - f.write('{}\n'.format(name)) - found_any_symbols = True - - if not found_any_symbols: - bb.debug(1, 'ELF file {} contains no symbols, skipping minidebuginfo injection'.format(file)) - return - - bb.utils.remove(minidebugfile) - bb.utils.remove(minidebugfile + '.xz') - - subprocess.check_call([objcopy, '-S'] + - ['--remove-section={}'.format(s) for s in remove_section_names] + - ['--keep-symbols={}'.format(keep_symbols_file), debugfile, minidebugfile]) - - subprocess.check_call(['xz', '--keep', minidebugfile]) - - subprocess.check_call([objcopy, '--add-section', '.gnu_debugdata={}.xz'.format(minidebugfile), file]) - -def copydebugsources(debugsrcdir, sources, d): - # The debug src information written out to sourcefile is further processed - # and copied to the destination here. - - import stat - import subprocess - - if debugsrcdir and sources: - sourcefile = d.expand("${WORKDIR}/debugsources.list") - bb.utils.remove(sourcefile) - - # filenames are null-separated - this is an artefact of the previous use - # of rpm's debugedit, which was writing them out that way, and the code elsewhere - # is still assuming that. - debuglistoutput = '\0'.join(sources) + '\0' - with open(sourcefile, 'a') as sf: - sf.write(debuglistoutput) - - dvar = d.getVar('PKGD') - strip = d.getVar("STRIP") - objcopy = d.getVar("OBJCOPY") - workdir = d.getVar("WORKDIR") - sdir = d.getVar("S") - cflags = d.expand("${CFLAGS}") - - prefixmap = {} - for flag in cflags.split(): - if not flag.startswith("-fdebug-prefix-map"): - continue - if "recipe-sysroot" in flag: - continue - flag = flag.split("=") - prefixmap[flag[1]] = flag[2] - - nosuchdir = [] - basepath = dvar - for p in debugsrcdir.split("/"): - basepath = basepath + "/" + p - if not cpath.exists(basepath): - nosuchdir.append(basepath) - bb.utils.mkdirhier(basepath) - cpath.updatecache(basepath) - - for pmap in prefixmap: - # Ignore files from the recipe sysroots (target and native) - cmd = "LC_ALL=C ; sort -z -u '%s' | egrep -v -z '((|)$|/.*recipe-sysroot.*/)' | " % sourcefile - # We need to ignore files that are not actually ours - # we do this by only paying attention to items from this package - cmd += "fgrep -zw '%s' | " % prefixmap[pmap] - # Remove prefix in the source paths - cmd += "sed 's#%s/##g' | " % (prefixmap[pmap]) - cmd += "(cd '%s' ; cpio -pd0mlL --no-preserve-owner '%s%s' 2>/dev/null)" % (pmap, dvar, prefixmap[pmap]) - - try: - subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT) - except subprocess.CalledProcessError: - # Can "fail" if internal headers/transient sources are attempted - pass - # cpio seems to have a bug with -lL together and symbolic links are just copied, not dereferenced. - # Work around this by manually finding and copying any symbolic links that made it through. - cmd = "find %s%s -type l -print0 -delete | sed s#%s%s/##g | (cd '%s' ; cpio -pd0mL --no-preserve-owner '%s%s')" % \ - (dvar, prefixmap[pmap], dvar, prefixmap[pmap], pmap, dvar, prefixmap[pmap]) - subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT) - - # debugsources.list may be polluted from the host if we used externalsrc, - # cpio uses copy-pass and may have just created a directory structure - # matching the one from the host, if thats the case move those files to - # debugsrcdir to avoid host contamination. - # Empty dir structure will be deleted in the next step. - - # Same check as above for externalsrc - if workdir not in sdir: - if os.path.exists(dvar + debugsrcdir + sdir): - cmd = "mv %s%s%s/* %s%s" % (dvar, debugsrcdir, sdir, dvar,debugsrcdir) - subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT) - - # The copy by cpio may have resulted in some empty directories! Remove these - cmd = "find %s%s -empty -type d -delete" % (dvar, debugsrcdir) - subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT) - - # Also remove debugsrcdir if its empty - for p in nosuchdir[::-1]: - if os.path.exists(p) and not os.listdir(p): - os.rmdir(p) - # # Used by do_packagedata (and possibly other routines post do_package) # @@ -656,58 +342,7 @@ python package_convert_pr_autoinc() { LOCALEBASEPN ??= "${PN}" python package_do_split_locales() { - if (d.getVar('PACKAGE_NO_LOCALE') == '1'): - bb.debug(1, "package requested not splitting locales") - return - - packages = (d.getVar('PACKAGES') or "").split() - - datadir = d.getVar('datadir') - if not datadir: - bb.note("datadir not defined") - return - - dvar = d.getVar('PKGD') - pn = d.getVar('LOCALEBASEPN') - - if pn + '-locale' in packages: - packages.remove(pn + '-locale') - - localedir = os.path.join(dvar + datadir, 'locale') - - if not cpath.isdir(localedir): - bb.debug(1, "No locale files in this package") - return - - locales = os.listdir(localedir) - - summary = d.getVar('SUMMARY') or pn - description = d.getVar('DESCRIPTION') or "" - locale_section = d.getVar('LOCALE_SECTION') - mlprefix = d.getVar('MLPREFIX') or "" - for l in sorted(locales): - ln = legitimize_package_name(l) - pkg = pn + '-locale-' + ln - packages.append(pkg) - d.setVar('FILES:' + pkg, os.path.join(datadir, 'locale', l)) - d.setVar('RRECOMMENDS:' + pkg, '%svirtual-locale-%s' % (mlprefix, ln)) - d.setVar('RPROVIDES:' + pkg, '%s-locale %s%s-translation' % (pn, mlprefix, ln)) - d.setVar('SUMMARY:' + pkg, '%s - %s translations' % (summary, l)) - d.setVar('DESCRIPTION:' + pkg, '%s This package contains language translation files for the %s locale.' % (description, l)) - if locale_section: - d.setVar('SECTION:' + pkg, locale_section) - - d.setVar('PACKAGES', ' '.join(packages)) - - # Disabled by RP 18/06/07 - # Wildcards aren't supported in debian - # They break with ipkg since glibc-locale* will mean that - # glibc-localedata-translit* won't install as a dependency - # for some other package which breaks meta-toolchain - # Probably breaks since virtual-locale- isn't provided anywhere - #rdep = (d.getVar('RDEPENDS:%s' % pn) or "").split() - #rdep.append('%s-locale*' % pn) - #d.setVar('RDEPENDS:%s' % pn, ' '.join(rdep)) + oe.package.split_locales(d) } python perform_packagecopy () { @@ -734,488 +369,19 @@ python fixup_perms () { oe.package.fixup_perms(d) } -def package_debug_vars(d): - # We default to '.debug' style - if d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-file-directory': - # Single debug-file-directory style debug info - debug_vars = { - "append": ".debug", - "staticappend": "", - "dir": "", - "staticdir": "", - "libdir": "/usr/lib/debug", - "staticlibdir": "/usr/lib/debug-static", - "srcdir": "/usr/src/debug", - } - elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-without-src': - # Original OE-core, a.k.a. ".debug", style debug info, but without sources in /usr/src/debug - debug_vars = { - "append": "", - "staticappend": "", - "dir": "/.debug", - "staticdir": "/.debug-static", - "libdir": "", - "staticlibdir": "", - "srcdir": "", - } - elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-with-srcpkg': - debug_vars = { - "append": "", - "staticappend": "", - "dir": "/.debug", - "staticdir": "/.debug-static", - "libdir": "", - "staticlibdir": "", - "srcdir": "/usr/src/debug", - } - else: - # Original OE-core, a.k.a. ".debug", style debug info - debug_vars = { - "append": "", - "staticappend": "", - "dir": "/.debug", - "staticdir": "/.debug-static", - "libdir": "", - "staticlibdir": "", - "srcdir": "/usr/src/debug", - } - - return debug_vars - python split_and_strip_files () { - import stat, errno - import subprocess - - dvar = d.getVar('PKGD') - pn = d.getVar('PN') - hostos = d.getVar('HOST_OS') - - oldcwd = os.getcwd() - os.chdir(dvar) - - dv = package_debug_vars(d) - - # - # First lets figure out all of the files we may have to process ... do this only once! - # - elffiles = {} - symlinks = {} - staticlibs = [] - inodes = {} - libdir = os.path.abspath(dvar + os.sep + d.getVar("libdir")) - baselibdir = os.path.abspath(dvar + os.sep + d.getVar("base_libdir")) - skipfiles = (d.getVar("INHIBIT_PACKAGE_STRIP_FILES") or "").split() - if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1' or \ - d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'): - checkelf = {} - checkelflinks = {} - for root, dirs, files in cpath.walk(dvar): - for f in files: - file = os.path.join(root, f) - - # Skip debug files - if dv["append"] and file.endswith(dv["append"]): - continue - if dv["dir"] and dv["dir"] in os.path.dirname(file[len(dvar):]): - continue - - if file in skipfiles: - continue - - if oe.package.is_static_lib(file): - staticlibs.append(file) - continue - - try: - ltarget = cpath.realpath(file, dvar, False) - s = cpath.lstat(ltarget) - except OSError as e: - (err, strerror) = e.args - if err != errno.ENOENT: - raise - # Skip broken symlinks - continue - if not s: - continue - # Check its an executable - if (s[stat.ST_MODE] & stat.S_IXUSR) or (s[stat.ST_MODE] & stat.S_IXGRP) \ - or (s[stat.ST_MODE] & stat.S_IXOTH) \ - or ((file.startswith(libdir) or file.startswith(baselibdir)) \ - and (".so" in f or ".node" in f)) \ - or (f.startswith('vmlinux') or ".ko" in f): - - if cpath.islink(file): - checkelflinks[file] = ltarget - continue - # Use a reference of device ID and inode number to identify files - file_reference = "%d_%d" % (s.st_dev, s.st_ino) - checkelf[file] = (file, file_reference) - - results = oe.utils.multiprocess_launch(oe.package.is_elf, checkelflinks.values(), d) - results_map = {} - for (ltarget, elf_file) in results: - results_map[ltarget] = elf_file - for file in checkelflinks: - ltarget = checkelflinks[file] - # If it's a symlink, and points to an ELF file, we capture the readlink target - if results_map[ltarget]: - target = os.readlink(file) - #bb.note("Sym: %s (%d)" % (ltarget, results_map[ltarget])) - symlinks[file] = target - - results = oe.utils.multiprocess_launch(oe.package.is_elf, checkelf.keys(), d) - - # Sort results by file path. This ensures that the files are always - # processed in the same order, which is important to make sure builds - # are reproducible when dealing with hardlinks - results.sort(key=lambda x: x[0]) - - for (file, elf_file) in results: - # It's a file (or hardlink), not a link - # ...but is it ELF, and is it already stripped? - if elf_file & 1: - if elf_file & 2: - if 'already-stripped' in (d.getVar('INSANE_SKIP:' + pn) or "").split(): - bb.note("Skipping file %s from %s for already-stripped QA test" % (file[len(dvar):], pn)) - else: - msg = "File '%s' from %s was already stripped, this will prevent future debugging!" % (file[len(dvar):], pn) - oe.qa.handle_error("already-stripped", msg, d) - continue - - # At this point we have an unstripped elf file. We need to: - # a) Make sure any file we strip is not hardlinked to anything else outside this tree - # b) Only strip any hardlinked file once (no races) - # c) Track any hardlinks between files so that we can reconstruct matching debug file hardlinks - - # Use a reference of device ID and inode number to identify files - file_reference = checkelf[file][1] - if file_reference in inodes: - os.unlink(file) - os.link(inodes[file_reference][0], file) - inodes[file_reference].append(file) - else: - inodes[file_reference] = [file] - # break hardlink - bb.utils.break_hardlinks(file) - elffiles[file] = elf_file - # Modified the file so clear the cache - cpath.updatecache(file) - - def strip_pkgd_prefix(f): - nonlocal dvar - - if f.startswith(dvar): - return f[len(dvar):] - - return f - - # - # First lets process debug splitting - # - if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'): - results = oe.utils.multiprocess_launch(splitdebuginfo, list(elffiles), d, extraargs=(dvar, dv, d)) - - if dv["srcdir"] and not hostos.startswith("mingw"): - if (d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'): - results = oe.utils.multiprocess_launch(splitstaticdebuginfo, staticlibs, d, extraargs=(dvar, dv, d)) - else: - for file in staticlibs: - results.append( (file,source_info(file, d)) ) - - d.setVar("PKGDEBUGSOURCES", {strip_pkgd_prefix(f): sorted(s) for f, s in results}) - - sources = set() - for r in results: - sources.update(r[1]) - - # Hardlink our debug symbols to the other hardlink copies - for ref in inodes: - if len(inodes[ref]) == 1: - continue - - target = inodes[ref][0][len(dvar):] - for file in inodes[ref][1:]: - src = file[len(dvar):] - dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(target) + dv["append"] - fpath = dvar + dest - ftarget = dvar + dv["libdir"] + os.path.dirname(target) + dv["dir"] + "/" + os.path.basename(target) + dv["append"] - bb.utils.mkdirhier(os.path.dirname(fpath)) - # Only one hardlink of separated debug info file in each directory - if not os.access(fpath, os.R_OK): - #bb.note("Link %s -> %s" % (fpath, ftarget)) - os.link(ftarget, fpath) - - # Create symlinks for all cases we were able to split symbols - for file in symlinks: - src = file[len(dvar):] - dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"] - fpath = dvar + dest - # Skip it if the target doesn't exist - try: - s = os.stat(fpath) - except OSError as e: - (err, strerror) = e.args - if err != errno.ENOENT: - raise - continue - - ltarget = symlinks[file] - lpath = os.path.dirname(ltarget) - lbase = os.path.basename(ltarget) - ftarget = "" - if lpath and lpath != ".": - ftarget += lpath + dv["dir"] + "/" - ftarget += lbase + dv["append"] - if lpath.startswith(".."): - ftarget = os.path.join("..", ftarget) - bb.utils.mkdirhier(os.path.dirname(fpath)) - #bb.note("Symlink %s -> %s" % (fpath, ftarget)) - os.symlink(ftarget, fpath) - - # Process the dv["srcdir"] if requested... - # This copies and places the referenced sources for later debugging... - copydebugsources(dv["srcdir"], sources, d) - # - # End of debug splitting - # - - # - # Now lets go back over things and strip them - # - if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1'): - strip = d.getVar("STRIP") - sfiles = [] - for file in elffiles: - elf_file = int(elffiles[file]) - #bb.note("Strip %s" % file) - sfiles.append((file, elf_file, strip)) - if (d.getVar('PACKAGE_STRIP_STATIC') == '1' or d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'): - for f in staticlibs: - sfiles.append((f, 16, strip)) - - oe.utils.multiprocess_launch(oe.package.runstrip, sfiles, d) - - # Build "minidebuginfo" and reinject it back into the stripped binaries - if d.getVar('PACKAGE_MINIDEBUGINFO') == '1': - oe.utils.multiprocess_launch(inject_minidebuginfo, list(elffiles), d, - extraargs=(dvar, dv, d)) - - # - # End of strip - # - os.chdir(oldcwd) + oe.package.process_split_and_strip_files(d) } python populate_packages () { - import glob, re - - workdir = d.getVar('WORKDIR') - outdir = d.getVar('DEPLOY_DIR') - dvar = d.getVar('PKGD') - packages = d.getVar('PACKAGES').split() - pn = d.getVar('PN') - - bb.utils.mkdirhier(outdir) - os.chdir(dvar) - - autodebug = not (d.getVar("NOAUTOPACKAGEDEBUG") or False) - - split_source_package = (d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-with-srcpkg') - - # If debug-with-srcpkg mode is enabled then add the source package if it - # doesn't exist and add the source file contents to the source package. - if split_source_package: - src_package_name = ('%s-src' % d.getVar('PN')) - if not src_package_name in packages: - packages.append(src_package_name) - d.setVar('FILES:%s' % src_package_name, '/usr/src/debug') - - # Sanity check PACKAGES for duplicates - # Sanity should be moved to sanity.bbclass once we have the infrastructure - package_dict = {} - - for i, pkg in enumerate(packages): - if pkg in package_dict: - msg = "%s is listed in PACKAGES multiple times, this leads to packaging errors." % pkg - oe.qa.handle_error("packages-list", msg, d) - # Ensure the source package gets the chance to pick up the source files - # before the debug package by ordering it first in PACKAGES. Whether it - # actually picks up any source files is controlled by - # PACKAGE_DEBUG_SPLIT_STYLE. - elif pkg.endswith("-src"): - package_dict[pkg] = (10, i) - elif autodebug and pkg.endswith("-dbg"): - package_dict[pkg] = (30, i) - else: - package_dict[pkg] = (50, i) - packages = sorted(package_dict.keys(), key=package_dict.get) - d.setVar('PACKAGES', ' '.join(packages)) - pkgdest = d.getVar('PKGDEST') - - seen = [] - - # os.mkdir masks the permissions with umask so we have to unset it first - oldumask = os.umask(0) - - debug = [] - for root, dirs, files in cpath.walk(dvar): - dir = root[len(dvar):] - if not dir: - dir = os.sep - for f in (files + dirs): - path = "." + os.path.join(dir, f) - if "/.debug/" in path or "/.debug-static/" in path or path.endswith("/.debug"): - debug.append(path) - - for pkg in packages: - root = os.path.join(pkgdest, pkg) - bb.utils.mkdirhier(root) - - filesvar = d.getVar('FILES:%s' % pkg) or "" - if "//" in filesvar: - msg = "FILES variable for package %s contains '//' which is invalid. Attempting to fix this but you should correct the metadata.\n" % pkg - oe.qa.handle_error("files-invalid", msg, d) - filesvar.replace("//", "/") - - origfiles = filesvar.split() - files, symlink_paths = oe.package.files_from_filevars(origfiles) - - if autodebug and pkg.endswith("-dbg"): - files.extend(debug) - - for file in files: - if (not cpath.islink(file)) and (not cpath.exists(file)): - continue - if file in seen: - continue - seen.append(file) - - def mkdir(src, dest, p): - src = os.path.join(src, p) - dest = os.path.join(dest, p) - fstat = cpath.stat(src) - os.mkdir(dest) - os.chmod(dest, fstat.st_mode) - os.chown(dest, fstat.st_uid, fstat.st_gid) - if p not in seen: - seen.append(p) - cpath.updatecache(dest) - - def mkdir_recurse(src, dest, paths): - if cpath.exists(dest + '/' + paths): - return - while paths.startswith("./"): - paths = paths[2:] - p = "." - for c in paths.split("/"): - p = os.path.join(p, c) - if not cpath.exists(os.path.join(dest, p)): - mkdir(src, dest, p) - - if cpath.isdir(file) and not cpath.islink(file): - mkdir_recurse(dvar, root, file) - continue - - mkdir_recurse(dvar, root, os.path.dirname(file)) - fpath = os.path.join(root,file) - if not cpath.islink(file): - os.link(file, fpath) - continue - ret = bb.utils.copyfile(file, fpath) - if ret is False or ret == 0: - bb.fatal("File population failed") - - # Check if symlink paths exist - for file in symlink_paths: - if not os.path.exists(os.path.join(root,file)): - bb.fatal("File '%s' cannot be packaged into '%s' because its " - "parent directory structure does not exist. One of " - "its parent directories is a symlink whose target " - "directory is not included in the package." % - (file, pkg)) - - os.umask(oldumask) - os.chdir(workdir) - - # Handle excluding packages with incompatible licenses - package_list = [] - for pkg in packages: - licenses = d.getVar('_exclude_incompatible-' + pkg) - if licenses: - msg = "Excluding %s from packaging as it has incompatible license(s): %s" % (pkg, licenses) - oe.qa.handle_error("incompatible-license", msg, d) - else: - package_list.append(pkg) - d.setVar('PACKAGES', ' '.join(package_list)) - - unshipped = [] - for root, dirs, files in cpath.walk(dvar): - dir = root[len(dvar):] - if not dir: - dir = os.sep - for f in (files + dirs): - path = os.path.join(dir, f) - if ('.' + path) not in seen: - unshipped.append(path) - - if unshipped != []: - msg = pn + ": Files/directories were installed but not shipped in any package:" - if "installed-vs-shipped" in (d.getVar('INSANE_SKIP:' + pn) or "").split(): - bb.note("Package %s skipping QA tests: installed-vs-shipped" % pn) - else: - for f in unshipped: - msg = msg + "\n " + f - msg = msg + "\nPlease set FILES such that these items are packaged. Alternatively if they are unneeded, avoid installing them or delete them within do_install.\n" - msg = msg + "%s: %d installed and not shipped files." % (pn, len(unshipped)) - oe.qa.handle_error("installed-vs-shipped", msg, d) + oe.package.populate_packages(d) } populate_packages[dirs] = "${D}" python package_fixsymlinks () { - import errno - pkgdest = d.getVar('PKGDEST') - packages = d.getVar("PACKAGES", False).split() - - dangling_links = {} - pkg_files = {} - for pkg in packages: - dangling_links[pkg] = [] - pkg_files[pkg] = [] - inst_root = os.path.join(pkgdest, pkg) - for path in pkgfiles[pkg]: - rpath = path[len(inst_root):] - pkg_files[pkg].append(rpath) - rtarget = cpath.realpath(path, inst_root, True, assume_dir = True) - if not cpath.lexists(rtarget): - dangling_links[pkg].append(os.path.normpath(rtarget[len(inst_root):])) - - newrdepends = {} - for pkg in dangling_links: - for l in dangling_links[pkg]: - found = False - bb.debug(1, "%s contains dangling link %s" % (pkg, l)) - for p in packages: - if l in pkg_files[p]: - found = True - bb.debug(1, "target found in %s" % p) - if p == pkg: - break - if pkg not in newrdepends: - newrdepends[pkg] = [] - newrdepends[pkg].append(p) - break - if found == False: - bb.note("%s contains dangling symlink to %s" % (pkg, l)) - - for pkg in newrdepends: - rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS:' + pkg) or "") - for p in newrdepends[pkg]: - if p not in rdepends: - rdepends[p] = [] - d.setVar('RDEPENDS:' + pkg, bb.utils.join_deps(rdepends, commasep=False)) + oe.package.process_fixsymlinks(pkgfiles, d) } - python package_package_name_hook() { """ A package_name_hook function can be used to rewrite the package names by @@ -1245,456 +411,23 @@ fi RPMDEPS = "${STAGING_LIBDIR_NATIVE}/rpm/rpmdeps --alldeps --define '__font_provides %{nil}'" -# Collect perfile run-time dependency metadata -# Output: -# FILERPROVIDESFLIST:pkg - list of all files w/ deps -# FILERPROVIDES:filepath:pkg - per file dep -# -# FILERDEPENDSFLIST:pkg - list of all files w/ deps -# FILERDEPENDS:filepath:pkg - per file dep - python package_do_filedeps() { - if d.getVar('SKIP_FILEDEPS') == '1': - return - - pkgdest = d.getVar('PKGDEST') - packages = d.getVar('PACKAGES') - rpmdeps = d.getVar('RPMDEPS') - - def chunks(files, n): - return [files[i:i+n] for i in range(0, len(files), n)] - - pkglist = [] - for pkg in packages.split(): - if d.getVar('SKIP_FILEDEPS:' + pkg) == '1': - continue - if pkg.endswith('-dbg') or pkg.endswith('-doc') or pkg.find('-locale-') != -1 or pkg.find('-localedata-') != -1 or pkg.find('-gconv-') != -1 or pkg.find('-charmap-') != -1 or pkg.startswith('kernel-module-') or pkg.endswith('-src'): - continue - for files in chunks(pkgfiles[pkg], 100): - pkglist.append((pkg, files, rpmdeps, pkgdest)) - - processed = oe.utils.multiprocess_launch(oe.package.filedeprunner, pkglist, d) - - provides_files = {} - requires_files = {} - - for result in processed: - (pkg, provides, requires) = result - - if pkg not in provides_files: - provides_files[pkg] = [] - if pkg not in requires_files: - requires_files[pkg] = [] - - for file in sorted(provides): - provides_files[pkg].append(file) - key = "FILERPROVIDES:" + file + ":" + pkg - d.appendVar(key, " " + " ".join(provides[file])) - - for file in sorted(requires): - requires_files[pkg].append(file) - key = "FILERDEPENDS:" + file + ":" + pkg - d.appendVar(key, " " + " ".join(requires[file])) - - for pkg in requires_files: - d.setVar("FILERDEPENDSFLIST:" + pkg, " ".join(sorted(requires_files[pkg]))) - for pkg in provides_files: - d.setVar("FILERPROVIDESFLIST:" + pkg, " ".join(sorted(provides_files[pkg]))) + oe.package.process_filedeps(pkgfiles, d) } SHLIBSDIRS = "${WORKDIR_PKGDATA}/${MLPREFIX}shlibs2" SHLIBSWORKDIR = "${PKGDESTWORK}/${MLPREFIX}shlibs2" python package_do_shlibs() { - import itertools - import re, pipes - import subprocess - - exclude_shlibs = d.getVar('EXCLUDE_FROM_SHLIBS', False) - if exclude_shlibs: - bb.note("not generating shlibs") - return - - lib_re = re.compile(r"^.*\.so") - libdir_re = re.compile(r".*/%s$" % d.getVar('baselib')) - - packages = d.getVar('PACKAGES') - - shlib_pkgs = [] - exclusion_list = d.getVar("EXCLUDE_PACKAGES_FROM_SHLIBS") - if exclusion_list: - for pkg in packages.split(): - if pkg not in exclusion_list.split(): - shlib_pkgs.append(pkg) - else: - bb.note("not generating shlibs for %s" % pkg) - else: - shlib_pkgs = packages.split() - - hostos = d.getVar('HOST_OS') - - workdir = d.getVar('WORKDIR') - - ver = d.getVar('PKGV') - if not ver: - msg = "PKGV not defined" - oe.qa.handle_error("pkgv-undefined", msg, d) - return - - pkgdest = d.getVar('PKGDEST') - - shlibswork_dir = d.getVar('SHLIBSWORKDIR') - - def linux_so(file, pkg, pkgver, d): - needs_ldconfig = False - needed = set() - sonames = set() - renames = [] - ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '') - cmd = d.getVar('OBJDUMP') + " -p " + pipes.quote(file) + " 2>/dev/null" - fd = os.popen(cmd) - lines = fd.readlines() - fd.close() - rpath = tuple() - for l in lines: - m = re.match(r"\s+RPATH\s+([^\s]*)", l) - if m: - rpaths = m.group(1).replace("$ORIGIN", ldir).split(":") - rpath = tuple(map(os.path.normpath, rpaths)) - for l in lines: - m = re.match(r"\s+NEEDED\s+([^\s]*)", l) - if m: - dep = m.group(1) - if dep not in needed: - needed.add((dep, file, rpath)) - m = re.match(r"\s+SONAME\s+([^\s]*)", l) - if m: - this_soname = m.group(1) - prov = (this_soname, ldir, pkgver) - if not prov in sonames: - # if library is private (only used by package) then do not build shlib for it - import fnmatch - if not private_libs or len([i for i in private_libs if fnmatch.fnmatch(this_soname, i)]) == 0: - sonames.add(prov) - if libdir_re.match(os.path.dirname(file)): - needs_ldconfig = True - if needs_ldconfig and snap_symlinks and (os.path.basename(file) != this_soname): - renames.append((file, os.path.join(os.path.dirname(file), this_soname))) - return (needs_ldconfig, needed, sonames, renames) - - def darwin_so(file, needed, sonames, renames, pkgver): - if not os.path.exists(file): - return - ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '') - - def get_combinations(base): - # - # Given a base library name, find all combinations of this split by "." and "-" - # - combos = [] - options = base.split(".") - for i in range(1, len(options) + 1): - combos.append(".".join(options[0:i])) - options = base.split("-") - for i in range(1, len(options) + 1): - combos.append("-".join(options[0:i])) - return combos - - if (file.endswith('.dylib') or file.endswith('.so')) and not pkg.endswith('-dev') and not pkg.endswith('-dbg') and not pkg.endswith('-src'): - # Drop suffix - name = os.path.basename(file).rsplit(".",1)[0] - # Find all combinations - combos = get_combinations(name) - for combo in combos: - if not combo in sonames: - prov = (combo, ldir, pkgver) - sonames.add(prov) - if file.endswith('.dylib') or file.endswith('.so'): - rpath = [] - p = subprocess.Popen([d.expand("${HOST_PREFIX}otool"), '-l', file], stdout=subprocess.PIPE, stderr=subprocess.PIPE) - out, err = p.communicate() - # If returned successfully, process stdout for results - if p.returncode == 0: - for l in out.split("\n"): - l = l.strip() - if l.startswith('path '): - rpath.append(l.split()[1]) - - p = subprocess.Popen([d.expand("${HOST_PREFIX}otool"), '-L', file], stdout=subprocess.PIPE, stderr=subprocess.PIPE) - out, err = p.communicate() - # If returned successfully, process stdout for results - if p.returncode == 0: - for l in out.split("\n"): - l = l.strip() - if not l or l.endswith(":"): - continue - if "is not an object file" in l: - continue - name = os.path.basename(l.split()[0]).rsplit(".", 1)[0] - if name and name not in needed[pkg]: - needed[pkg].add((name, file, tuple())) - - def mingw_dll(file, needed, sonames, renames, pkgver): - if not os.path.exists(file): - return - - if file.endswith(".dll"): - # assume all dlls are shared objects provided by the package - sonames.add((os.path.basename(file), os.path.dirname(file).replace(pkgdest + "/" + pkg, ''), pkgver)) - - if (file.endswith(".dll") or file.endswith(".exe")): - # use objdump to search for "DLL Name: .*\.dll" - p = subprocess.Popen([d.expand("${HOST_PREFIX}objdump"), "-p", file], stdout=subprocess.PIPE, stderr=subprocess.PIPE) - out, err = p.communicate() - # process the output, grabbing all .dll names - if p.returncode == 0: - for m in re.finditer(r"DLL Name: (.*?\.dll)$", out.decode(), re.MULTILINE | re.IGNORECASE): - dllname = m.group(1) - if dllname: - needed[pkg].add((dllname, file, tuple())) - - if d.getVar('PACKAGE_SNAP_LIB_SYMLINKS') == "1": - snap_symlinks = True - else: - snap_symlinks = False - - needed = {} - - shlib_provider = oe.package.read_shlib_providers(d) - - for pkg in shlib_pkgs: - private_libs = d.getVar('PRIVATE_LIBS:' + pkg) or d.getVar('PRIVATE_LIBS') or "" - private_libs = private_libs.split() - needs_ldconfig = False - bb.debug(2, "calculating shlib provides for %s" % pkg) - - pkgver = d.getVar('PKGV:' + pkg) - if not pkgver: - pkgver = d.getVar('PV_' + pkg) - if not pkgver: - pkgver = ver - - needed[pkg] = set() - sonames = set() - renames = [] - linuxlist = [] - for file in pkgfiles[pkg]: - soname = None - if cpath.islink(file): - continue - if hostos == "darwin" or hostos == "darwin8": - darwin_so(file, needed, sonames, renames, pkgver) - elif hostos.startswith("mingw"): - mingw_dll(file, needed, sonames, renames, pkgver) - elif os.access(file, os.X_OK) or lib_re.match(file): - linuxlist.append(file) - - if linuxlist: - results = oe.utils.multiprocess_launch(linux_so, linuxlist, d, extraargs=(pkg, pkgver, d)) - for r in results: - ldconfig = r[0] - needed[pkg] |= r[1] - sonames |= r[2] - renames.extend(r[3]) - needs_ldconfig = needs_ldconfig or ldconfig - - for (old, new) in renames: - bb.note("Renaming %s to %s" % (old, new)) - bb.utils.rename(old, new) - pkgfiles[pkg].remove(old) - - shlibs_file = os.path.join(shlibswork_dir, pkg + ".list") - if len(sonames): - with open(shlibs_file, 'w') as fd: - for s in sorted(sonames): - if s[0] in shlib_provider and s[1] in shlib_provider[s[0]]: - (old_pkg, old_pkgver) = shlib_provider[s[0]][s[1]] - if old_pkg != pkg: - bb.warn('%s-%s was registered as shlib provider for %s, changing it to %s-%s because it was built later' % (old_pkg, old_pkgver, s[0], pkg, pkgver)) - bb.debug(1, 'registering %s-%s as shlib provider for %s' % (pkg, pkgver, s[0])) - fd.write(s[0] + ':' + s[1] + ':' + s[2] + '\n') - if s[0] not in shlib_provider: - shlib_provider[s[0]] = {} - shlib_provider[s[0]][s[1]] = (pkg, pkgver) - if needs_ldconfig: - bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg) - postinst = d.getVar('pkg_postinst:%s' % pkg) - if not postinst: - postinst = '#!/bin/sh\n' - postinst += d.getVar('ldconfig_postinst_fragment') - d.setVar('pkg_postinst:%s' % pkg, postinst) - bb.debug(1, 'LIBNAMES: pkg %s sonames %s' % (pkg, sonames)) - - assumed_libs = d.getVar('ASSUME_SHLIBS') - if assumed_libs: - libdir = d.getVar("libdir") - for e in assumed_libs.split(): - l, dep_pkg = e.split(":") - lib_ver = None - dep_pkg = dep_pkg.rsplit("_", 1) - if len(dep_pkg) == 2: - lib_ver = dep_pkg[1] - dep_pkg = dep_pkg[0] - if l not in shlib_provider: - shlib_provider[l] = {} - shlib_provider[l][libdir] = (dep_pkg, lib_ver) - - libsearchpath = [d.getVar('libdir'), d.getVar('base_libdir')] - - for pkg in shlib_pkgs: - bb.debug(2, "calculating shlib requirements for %s" % pkg) - - private_libs = d.getVar('PRIVATE_LIBS:' + pkg) or d.getVar('PRIVATE_LIBS') or "" - private_libs = private_libs.split() - - deps = list() - for n in needed[pkg]: - # if n is in private libraries, don't try to search provider for it - # this could cause problem in case some abc.bb provides private - # /opt/abc/lib/libfoo.so.1 and contains /usr/bin/abc depending on system library libfoo.so.1 - # but skipping it is still better alternative than providing own - # version and then adding runtime dependency for the same system library - import fnmatch - if private_libs and len([i for i in private_libs if fnmatch.fnmatch(n[0], i)]) > 0: - bb.debug(2, '%s: Dependency %s covered by PRIVATE_LIBS' % (pkg, n[0])) - continue - if n[0] in shlib_provider.keys(): - shlib_provider_map = shlib_provider[n[0]] - matches = set() - for p in itertools.chain(list(n[2]), sorted(shlib_provider_map.keys()), libsearchpath): - if p in shlib_provider_map: - matches.add(p) - if len(matches) > 1: - matchpkgs = ', '.join([shlib_provider_map[match][0] for match in matches]) - bb.error("%s: Multiple shlib providers for %s: %s (used by files: %s)" % (pkg, n[0], matchpkgs, n[1])) - elif len(matches) == 1: - (dep_pkg, ver_needed) = shlib_provider_map[matches.pop()] - - bb.debug(2, '%s: Dependency %s requires package %s (used by files: %s)' % (pkg, n[0], dep_pkg, n[1])) - - if dep_pkg == pkg: - continue - - if ver_needed: - dep = "%s (>= %s)" % (dep_pkg, ver_needed) - else: - dep = dep_pkg - if not dep in deps: - deps.append(dep) - continue - bb.note("Couldn't find shared library provider for %s, used by files: %s" % (n[0], n[1])) - - deps_file = os.path.join(pkgdest, pkg + ".shlibdeps") - if os.path.exists(deps_file): - os.remove(deps_file) - if deps: - with open(deps_file, 'w') as fd: - for dep in sorted(deps): - fd.write(dep + '\n') + oe.package.process_shlibs(pkgfiles, d) } python package_do_pkgconfig () { - import re - - packages = d.getVar('PACKAGES') - workdir = d.getVar('WORKDIR') - pkgdest = d.getVar('PKGDEST') - - shlibs_dirs = d.getVar('SHLIBSDIRS').split() - shlibswork_dir = d.getVar('SHLIBSWORKDIR') - - pc_re = re.compile(r'(.*)\.pc$') - var_re = re.compile(r'(.*)=(.*)') - field_re = re.compile(r'(.*): (.*)') - - pkgconfig_provided = {} - pkgconfig_needed = {} - for pkg in packages.split(): - pkgconfig_provided[pkg] = [] - pkgconfig_needed[pkg] = [] - for file in sorted(pkgfiles[pkg]): - m = pc_re.match(file) - if m: - pd = bb.data.init() - name = m.group(1) - pkgconfig_provided[pkg].append(os.path.basename(name)) - if not os.access(file, os.R_OK): - continue - with open(file, 'r') as f: - lines = f.readlines() - for l in lines: - m = var_re.match(l) - if m: - name = m.group(1) - val = m.group(2) - pd.setVar(name, pd.expand(val)) - continue - m = field_re.match(l) - if m: - hdr = m.group(1) - exp = pd.expand(m.group(2)) - if hdr == 'Requires': - pkgconfig_needed[pkg] += exp.replace(',', ' ').split() - - for pkg in packages.split(): - pkgs_file = os.path.join(shlibswork_dir, pkg + ".pclist") - if pkgconfig_provided[pkg] != []: - with open(pkgs_file, 'w') as f: - for p in sorted(pkgconfig_provided[pkg]): - f.write('%s\n' % p) - - # Go from least to most specific since the last one found wins - for dir in reversed(shlibs_dirs): - if not os.path.exists(dir): - continue - for file in sorted(os.listdir(dir)): - m = re.match(r'^(.*)\.pclist$', file) - if m: - pkg = m.group(1) - with open(os.path.join(dir, file)) as fd: - lines = fd.readlines() - pkgconfig_provided[pkg] = [] - for l in lines: - pkgconfig_provided[pkg].append(l.rstrip()) - - for pkg in packages.split(): - deps = [] - for n in pkgconfig_needed[pkg]: - found = False - for k in pkgconfig_provided.keys(): - if n in pkgconfig_provided[k]: - if k != pkg and not (k in deps): - deps.append(k) - found = True - if found == False: - bb.note("couldn't find pkgconfig module '%s' in any package" % n) - deps_file = os.path.join(pkgdest, pkg + ".pcdeps") - if len(deps): - with open(deps_file, 'w') as fd: - for dep in deps: - fd.write(dep + '\n') + oe.package.process_pkgconfig(pkgfiles, d) } -def read_libdep_files(d): - pkglibdeps = {} - packages = d.getVar('PACKAGES').split() - for pkg in packages: - pkglibdeps[pkg] = {} - for extension in ".shlibdeps", ".pcdeps", ".clilibdeps": - depsfile = d.expand("${PKGDEST}/" + pkg + extension) - if os.access(depsfile, os.R_OK): - with open(depsfile) as fd: - lines = fd.readlines() - for l in lines: - l.rstrip() - deps = bb.utils.explode_dep_versions2(l) - for dep in deps: - if not dep in pkglibdeps[pkg]: - pkglibdeps[pkg][dep] = deps[dep] - return pkglibdeps - python read_shlibdeps () { - pkglibdeps = read_libdep_files(d) + pkglibdeps = oe.package.read_libdep_files(d) packages = d.getVar('PACKAGES').split() for pkg in packages: @@ -1710,125 +443,7 @@ python read_shlibdeps () { } python package_depchains() { - """ - For a given set of prefix and postfix modifiers, make those packages - RRECOMMENDS on the corresponding packages for its RDEPENDS. - - Example: If package A depends upon package B, and A's .bb emits an - A-dev package, this would make A-dev Recommends: B-dev. - - If only one of a given suffix is specified, it will take the RRECOMMENDS - based on the RDEPENDS of *all* other packages. If more than one of a given - suffix is specified, its will only use the RDEPENDS of the single parent - package. - """ - - packages = d.getVar('PACKAGES') - postfixes = (d.getVar('DEPCHAIN_POST') or '').split() - prefixes = (d.getVar('DEPCHAIN_PRE') or '').split() - - def pkg_adddeprrecs(pkg, base, suffix, getname, depends, d): - - #bb.note('depends for %s is %s' % (base, depends)) - rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS:' + pkg) or "") - - for depend in sorted(depends): - if depend.find('-native') != -1 or depend.find('-cross') != -1 or depend.startswith('virtual/'): - #bb.note("Skipping %s" % depend) - continue - if depend.endswith('-dev'): - depend = depend[:-4] - if depend.endswith('-dbg'): - depend = depend[:-4] - pkgname = getname(depend, suffix) - #bb.note("Adding %s for %s" % (pkgname, depend)) - if pkgname not in rreclist and pkgname != pkg: - rreclist[pkgname] = [] - - #bb.note('setting: RRECOMMENDS:%s=%s' % (pkg, ' '.join(rreclist))) - d.setVar('RRECOMMENDS:%s' % pkg, bb.utils.join_deps(rreclist, commasep=False)) - - def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d): - - #bb.note('rdepends for %s is %s' % (base, rdepends)) - rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS:' + pkg) or "") - - for depend in sorted(rdepends): - if depend.find('virtual-locale-') != -1: - #bb.note("Skipping %s" % depend) - continue - if depend.endswith('-dev'): - depend = depend[:-4] - if depend.endswith('-dbg'): - depend = depend[:-4] - pkgname = getname(depend, suffix) - #bb.note("Adding %s for %s" % (pkgname, depend)) - if pkgname not in rreclist and pkgname != pkg: - rreclist[pkgname] = [] - - #bb.note('setting: RRECOMMENDS:%s=%s' % (pkg, ' '.join(rreclist))) - d.setVar('RRECOMMENDS:%s' % pkg, bb.utils.join_deps(rreclist, commasep=False)) - - def add_dep(list, dep): - if dep not in list: - list.append(dep) - - depends = [] - for dep in bb.utils.explode_deps(d.getVar('DEPENDS') or ""): - add_dep(depends, dep) - - rdepends = [] - for pkg in packages.split(): - for dep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + pkg) or ""): - add_dep(rdepends, dep) - - #bb.note('rdepends is %s' % rdepends) - - def post_getname(name, suffix): - return '%s%s' % (name, suffix) - def pre_getname(name, suffix): - return '%s%s' % (suffix, name) - - pkgs = {} - for pkg in packages.split(): - for postfix in postfixes: - if pkg.endswith(postfix): - if not postfix in pkgs: - pkgs[postfix] = {} - pkgs[postfix][pkg] = (pkg[:-len(postfix)], post_getname) - - for prefix in prefixes: - if pkg.startswith(prefix): - if not prefix in pkgs: - pkgs[prefix] = {} - pkgs[prefix][pkg] = (pkg[:-len(prefix)], pre_getname) - - if "-dbg" in pkgs: - pkglibdeps = read_libdep_files(d) - pkglibdeplist = [] - for pkg in pkglibdeps: - for k in pkglibdeps[pkg]: - add_dep(pkglibdeplist, k) - dbgdefaultdeps = ((d.getVar('DEPCHAIN_DBGDEFAULTDEPS') == '1') or (bb.data.inherits_class('packagegroup', d))) - - for suffix in pkgs: - for pkg in pkgs[suffix]: - if d.getVarFlag('RRECOMMENDS:' + pkg, 'nodeprrecs'): - continue - (base, func) = pkgs[suffix][pkg] - if suffix == "-dev": - pkg_adddeprrecs(pkg, base, suffix, func, depends, d) - elif suffix == "-dbg": - if not dbgdefaultdeps: - pkg_addrrecs(pkg, base, suffix, func, pkglibdeplist, d) - continue - if len(pkgs[suffix]) == 1: - pkg_addrrecs(pkg, base, suffix, func, rdepends, d) - else: - rdeps = [] - for dep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + base) or ""): - add_dep(rdeps, dep) - pkg_addrrecs(pkg, base, suffix, func, rdeps, d) + oe.package.process_depchains(pkgfiles, d) } # Since bitbake can't determine which variables are accessed during package diff --git a/meta/lib/oe/package.py b/meta/lib/oe/package.py index b4c8ab7222a..c9eb75d8523 100644 --- a/meta/lib/oe/package.py +++ b/meta/lib/oe/package.py @@ -4,12 +4,19 @@ # SPDX-License-Identifier: GPL-2.0-only # +import errno +import fnmatch +import itertools import os +import pipes +import re import glob import stat import mmap import subprocess +import oe.cachedpath + def runstrip(arg): # Function to strip a single file, called from split_and_strip_files below # A working 'file' (one which works on the target architecture) @@ -300,7 +307,6 @@ def read_shlib_providers(d): # the fs-perms.txt files def fixup_perms(d): import pwd, grp - import oe.cachedpath cpath = oe.cachedpath.CachedPath() dvar = d.getVar('PKGD') @@ -537,8 +543,6 @@ def fixup_perms(d): # Get a list of files from file vars by searching files under current working directory # The list contains symlinks, directories and normal files. def files_from_filevars(filevars): - import oe.cachedpath - cpath = oe.cachedpath.CachedPath() files = [] for f in filevars: @@ -611,3 +615,1401 @@ def get_conffiles(pkg, d): os.chdir(cwd) return conf_list +def legitimize_package_name(s): + """ + Make sure package names are legitimate strings + """ + + def fixutf(m): + cp = m.group(1) + if cp: + return ('\\u%s' % cp).encode('latin-1').decode('unicode_escape') + + # Handle unicode codepoints encoded as , as in glibc locale files. + s = re.sub(r'', fixutf, s) + + # Remaining package name validity fixes + return s.lower().replace('_', '-').replace('@', '+').replace(',', '+').replace('/', '-') + +def split_locales(d): + cpath = oe.cachedpath.CachedPath() + if (d.getVar('PACKAGE_NO_LOCALE') == '1'): + bb.debug(1, "package requested not splitting locales") + return + + packages = (d.getVar('PACKAGES') or "").split() + + datadir = d.getVar('datadir') + if not datadir: + bb.note("datadir not defined") + return + + dvar = d.getVar('PKGD') + pn = d.getVar('LOCALEBASEPN') + + if pn + '-locale' in packages: + packages.remove(pn + '-locale') + + localedir = os.path.join(dvar + datadir, 'locale') + + if not cpath.isdir(localedir): + bb.debug(1, "No locale files in this package") + return + + locales = os.listdir(localedir) + + summary = d.getVar('SUMMARY') or pn + description = d.getVar('DESCRIPTION') or "" + locale_section = d.getVar('LOCALE_SECTION') + mlprefix = d.getVar('MLPREFIX') or "" + for l in sorted(locales): + ln = legitimize_package_name(l) + pkg = pn + '-locale-' + ln + packages.append(pkg) + d.setVar('FILES:' + pkg, os.path.join(datadir, 'locale', l)) + d.setVar('RRECOMMENDS:' + pkg, '%svirtual-locale-%s' % (mlprefix, ln)) + d.setVar('RPROVIDES:' + pkg, '%s-locale %s%s-translation' % (pn, mlprefix, ln)) + d.setVar('SUMMARY:' + pkg, '%s - %s translations' % (summary, l)) + d.setVar('DESCRIPTION:' + pkg, '%s This package contains language translation files for the %s locale.' % (description, l)) + if locale_section: + d.setVar('SECTION:' + pkg, locale_section) + + d.setVar('PACKAGES', ' '.join(packages)) + + # Disabled by RP 18/06/07 + # Wildcards aren't supported in debian + # They break with ipkg since glibc-locale* will mean that + # glibc-localedata-translit* won't install as a dependency + # for some other package which breaks meta-toolchain + # Probably breaks since virtual-locale- isn't provided anywhere + #rdep = (d.getVar('RDEPENDS:%s' % pn) or "").split() + #rdep.append('%s-locale*' % pn) + #d.setVar('RDEPENDS:%s' % pn, ' '.join(rdep)) + +def package_debug_vars(d): + # We default to '.debug' style + if d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-file-directory': + # Single debug-file-directory style debug info + debug_vars = { + "append": ".debug", + "staticappend": "", + "dir": "", + "staticdir": "", + "libdir": "/usr/lib/debug", + "staticlibdir": "/usr/lib/debug-static", + "srcdir": "/usr/src/debug", + } + elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-without-src': + # Original OE-core, a.k.a. ".debug", style debug info, but without sources in /usr/src/debug + debug_vars = { + "append": "", + "staticappend": "", + "dir": "/.debug", + "staticdir": "/.debug-static", + "libdir": "", + "staticlibdir": "", + "srcdir": "", + } + elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-with-srcpkg': + debug_vars = { + "append": "", + "staticappend": "", + "dir": "/.debug", + "staticdir": "/.debug-static", + "libdir": "", + "staticlibdir": "", + "srcdir": "/usr/src/debug", + } + else: + # Original OE-core, a.k.a. ".debug", style debug info + debug_vars = { + "append": "", + "staticappend": "", + "dir": "/.debug", + "staticdir": "/.debug-static", + "libdir": "", + "staticlibdir": "", + "srcdir": "/usr/src/debug", + } + + return debug_vars + + +def parse_debugsources_from_dwarfsrcfiles_output(dwarfsrcfiles_output): + debugfiles = {} + + for line in dwarfsrcfiles_output.splitlines(): + if line.startswith("\t"): + debugfiles[os.path.normpath(line.split()[0])] = "" + + return debugfiles.keys() + +def source_info(file, d, fatal=True): + cmd = ["dwarfsrcfiles", file] + try: + output = subprocess.check_output(cmd, universal_newlines=True, stderr=subprocess.STDOUT) + retval = 0 + except subprocess.CalledProcessError as exc: + output = exc.output + retval = exc.returncode + + # 255 means a specific file wasn't fully parsed to get the debug file list, which is not a fatal failure + if retval != 0 and retval != 255: + msg = "dwarfsrcfiles failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else "") + if fatal: + bb.fatal(msg) + bb.note(msg) + + debugsources = parse_debugsources_from_dwarfsrcfiles_output(output) + + return list(debugsources) + +def splitdebuginfo(file, dvar, dv, d): + # Function to split a single file into two components, one is the stripped + # target system binary, the other contains any debugging information. The + # two files are linked to reference each other. + # + # return a mapping of files:debugsources + + src = file[len(dvar):] + dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"] + debugfile = dvar + dest + sources = [] + + if file.endswith(".ko") and file.find("/lib/modules/") != -1: + if oe.package.is_kernel_module_signed(file): + bb.debug(1, "Skip strip on signed module %s" % file) + return (file, sources) + + # Split the file... + bb.utils.mkdirhier(os.path.dirname(debugfile)) + #bb.note("Split %s -> %s" % (file, debugfile)) + # Only store off the hard link reference if we successfully split! + + dvar = d.getVar('PKGD') + objcopy = d.getVar("OBJCOPY") + + newmode = None + if not os.access(file, os.W_OK) or os.access(file, os.R_OK): + origmode = os.stat(file)[stat.ST_MODE] + newmode = origmode | stat.S_IWRITE | stat.S_IREAD + os.chmod(file, newmode) + + # We need to extract the debug src information here... + if dv["srcdir"]: + sources = source_info(file, d) + + bb.utils.mkdirhier(os.path.dirname(debugfile)) + + subprocess.check_output([objcopy, '--only-keep-debug', file, debugfile], stderr=subprocess.STDOUT) + + # Set the debuglink to have the view of the file path on the target + subprocess.check_output([objcopy, '--add-gnu-debuglink', debugfile, file], stderr=subprocess.STDOUT) + + if newmode: + os.chmod(file, origmode) + + return (file, sources) + +def splitstaticdebuginfo(file, dvar, dv, d): + # Unlike the function above, there is no way to split a static library + # two components. So to get similar results we will copy the unmodified + # static library (containing the debug symbols) into a new directory. + # We will then strip (preserving symbols) the static library in the + # typical location. + # + # return a mapping of files:debugsources + + src = file[len(dvar):] + dest = dv["staticlibdir"] + os.path.dirname(src) + dv["staticdir"] + "/" + os.path.basename(src) + dv["staticappend"] + debugfile = dvar + dest + sources = [] + + # Copy the file... + bb.utils.mkdirhier(os.path.dirname(debugfile)) + #bb.note("Copy %s -> %s" % (file, debugfile)) + + dvar = d.getVar('PKGD') + + newmode = None + if not os.access(file, os.W_OK) or os.access(file, os.R_OK): + origmode = os.stat(file)[stat.ST_MODE] + newmode = origmode | stat.S_IWRITE | stat.S_IREAD + os.chmod(file, newmode) + + # We need to extract the debug src information here... + if dv["srcdir"]: + sources = source_info(file, d) + + bb.utils.mkdirhier(os.path.dirname(debugfile)) + + # Copy the unmodified item to the debug directory + shutil.copy2(file, debugfile) + + if newmode: + os.chmod(file, origmode) + + return (file, sources) + +def inject_minidebuginfo(file, dvar, dv, d): + # Extract just the symbols from debuginfo into minidebuginfo, + # compress it with xz and inject it back into the binary in a .gnu_debugdata section. + # https://sourceware.org/gdb/onlinedocs/gdb/MiniDebugInfo.html + + readelf = d.getVar('READELF') + nm = d.getVar('NM') + objcopy = d.getVar('OBJCOPY') + + minidebuginfodir = d.expand('${WORKDIR}/minidebuginfo') + + src = file[len(dvar):] + dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"] + debugfile = dvar + dest + minidebugfile = minidebuginfodir + src + '.minidebug' + bb.utils.mkdirhier(os.path.dirname(minidebugfile)) + + # If we didn't produce debuginfo for any reason, we can't produce minidebuginfo either + # so skip it. + if not os.path.exists(debugfile): + bb.debug(1, 'ELF file {} has no debuginfo, skipping minidebuginfo injection'.format(file)) + return + + # minidebuginfo does not make sense to apply to ELF objects other than + # executables and shared libraries, skip applying the minidebuginfo + # generation for objects like kernel modules. + for line in subprocess.check_output([readelf, '-h', debugfile], universal_newlines=True).splitlines(): + if not line.strip().startswith("Type:"): + continue + elftype = line.split(":")[1].strip() + if not any(elftype.startswith(i) for i in ["EXEC", "DYN"]): + bb.debug(1, 'ELF file {} is not executable/shared, skipping minidebuginfo injection'.format(file)) + return + break + + # Find non-allocated PROGBITS, NOTE, and NOBITS sections in the debuginfo. + # We will exclude all of these from minidebuginfo to save space. + remove_section_names = [] + for line in subprocess.check_output([readelf, '-W', '-S', debugfile], universal_newlines=True).splitlines(): + # strip the leading " [ 1]" section index to allow splitting on space + if ']' not in line: + continue + fields = line[line.index(']') + 1:].split() + if len(fields) < 7: + continue + name = fields[0] + type = fields[1] + flags = fields[6] + # .debug_ sections will be removed by objcopy -S so no need to explicitly remove them + if name.startswith('.debug_'): + continue + if 'A' not in flags and type in ['PROGBITS', 'NOTE', 'NOBITS']: + remove_section_names.append(name) + + # List dynamic symbols in the binary. We can exclude these from minidebuginfo + # because they are always present in the binary. + dynsyms = set() + for line in subprocess.check_output([nm, '-D', file, '--format=posix', '--defined-only'], universal_newlines=True).splitlines(): + dynsyms.add(line.split()[0]) + + # Find all function symbols from debuginfo which aren't in the dynamic symbols table. + # These are the ones we want to keep in minidebuginfo. + keep_symbols_file = minidebugfile + '.symlist' + found_any_symbols = False + with open(keep_symbols_file, 'w') as f: + for line in subprocess.check_output([nm, debugfile, '--format=sysv', '--defined-only'], universal_newlines=True).splitlines(): + fields = line.split('|') + if len(fields) < 7: + continue + name = fields[0].strip() + type = fields[3].strip() + if type == 'FUNC' and name not in dynsyms: + f.write('{}\n'.format(name)) + found_any_symbols = True + + if not found_any_symbols: + bb.debug(1, 'ELF file {} contains no symbols, skipping minidebuginfo injection'.format(file)) + return + + bb.utils.remove(minidebugfile) + bb.utils.remove(minidebugfile + '.xz') + + subprocess.check_call([objcopy, '-S'] + + ['--remove-section={}'.format(s) for s in remove_section_names] + + ['--keep-symbols={}'.format(keep_symbols_file), debugfile, minidebugfile]) + + subprocess.check_call(['xz', '--keep', minidebugfile]) + + subprocess.check_call([objcopy, '--add-section', '.gnu_debugdata={}.xz'.format(minidebugfile), file]) + +def copydebugsources(debugsrcdir, sources, d): + # The debug src information written out to sourcefile is further processed + # and copied to the destination here. + + cpath = oe.cachedpath.CachedPath() + + if debugsrcdir and sources: + sourcefile = d.expand("${WORKDIR}/debugsources.list") + bb.utils.remove(sourcefile) + + # filenames are null-separated - this is an artefact of the previous use + # of rpm's debugedit, which was writing them out that way, and the code elsewhere + # is still assuming that. + debuglistoutput = '\0'.join(sources) + '\0' + with open(sourcefile, 'a') as sf: + sf.write(debuglistoutput) + + dvar = d.getVar('PKGD') + strip = d.getVar("STRIP") + objcopy = d.getVar("OBJCOPY") + workdir = d.getVar("WORKDIR") + sdir = d.getVar("S") + cflags = d.expand("${CFLAGS}") + + prefixmap = {} + for flag in cflags.split(): + if not flag.startswith("-fdebug-prefix-map"): + continue + if "recipe-sysroot" in flag: + continue + flag = flag.split("=") + prefixmap[flag[1]] = flag[2] + + nosuchdir = [] + basepath = dvar + for p in debugsrcdir.split("/"): + basepath = basepath + "/" + p + if not cpath.exists(basepath): + nosuchdir.append(basepath) + bb.utils.mkdirhier(basepath) + cpath.updatecache(basepath) + + for pmap in prefixmap: + # Ignore files from the recipe sysroots (target and native) + cmd = "LC_ALL=C ; sort -z -u '%s' | egrep -v -z '((|)$|/.*recipe-sysroot.*/)' | " % sourcefile + # We need to ignore files that are not actually ours + # we do this by only paying attention to items from this package + cmd += "fgrep -zw '%s' | " % prefixmap[pmap] + # Remove prefix in the source paths + cmd += "sed 's#%s/##g' | " % (prefixmap[pmap]) + cmd += "(cd '%s' ; cpio -pd0mlL --no-preserve-owner '%s%s' 2>/dev/null)" % (pmap, dvar, prefixmap[pmap]) + + try: + subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT) + except subprocess.CalledProcessError: + # Can "fail" if internal headers/transient sources are attempted + pass + # cpio seems to have a bug with -lL together and symbolic links are just copied, not dereferenced. + # Work around this by manually finding and copying any symbolic links that made it through. + cmd = "find %s%s -type l -print0 -delete | sed s#%s%s/##g | (cd '%s' ; cpio -pd0mL --no-preserve-owner '%s%s')" % \ + (dvar, prefixmap[pmap], dvar, prefixmap[pmap], pmap, dvar, prefixmap[pmap]) + subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT) + + # debugsources.list may be polluted from the host if we used externalsrc, + # cpio uses copy-pass and may have just created a directory structure + # matching the one from the host, if thats the case move those files to + # debugsrcdir to avoid host contamination. + # Empty dir structure will be deleted in the next step. + + # Same check as above for externalsrc + if workdir not in sdir: + if os.path.exists(dvar + debugsrcdir + sdir): + cmd = "mv %s%s%s/* %s%s" % (dvar, debugsrcdir, sdir, dvar,debugsrcdir) + subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT) + + # The copy by cpio may have resulted in some empty directories! Remove these + cmd = "find %s%s -empty -type d -delete" % (dvar, debugsrcdir) + subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT) + + # Also remove debugsrcdir if its empty + for p in nosuchdir[::-1]: + if os.path.exists(p) and not os.listdir(p): + os.rmdir(p) + + +def process_split_and_strip_files(d): + cpath = oe.cachedpath.CachedPath() + + dvar = d.getVar('PKGD') + pn = d.getVar('PN') + hostos = d.getVar('HOST_OS') + + oldcwd = os.getcwd() + os.chdir(dvar) + + dv = package_debug_vars(d) + + # + # First lets figure out all of the files we may have to process ... do this only once! + # + elffiles = {} + symlinks = {} + staticlibs = [] + inodes = {} + libdir = os.path.abspath(dvar + os.sep + d.getVar("libdir")) + baselibdir = os.path.abspath(dvar + os.sep + d.getVar("base_libdir")) + skipfiles = (d.getVar("INHIBIT_PACKAGE_STRIP_FILES") or "").split() + if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1' or \ + d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'): + checkelf = {} + checkelflinks = {} + for root, dirs, files in cpath.walk(dvar): + for f in files: + file = os.path.join(root, f) + + # Skip debug files + if dv["append"] and file.endswith(dv["append"]): + continue + if dv["dir"] and dv["dir"] in os.path.dirname(file[len(dvar):]): + continue + + if file in skipfiles: + continue + + if oe.package.is_static_lib(file): + staticlibs.append(file) + continue + + try: + ltarget = cpath.realpath(file, dvar, False) + s = cpath.lstat(ltarget) + except OSError as e: + (err, strerror) = e.args + if err != errno.ENOENT: + raise + # Skip broken symlinks + continue + if not s: + continue + # Check its an executable + if (s[stat.ST_MODE] & stat.S_IXUSR) or (s[stat.ST_MODE] & stat.S_IXGRP) \ + or (s[stat.ST_MODE] & stat.S_IXOTH) \ + or ((file.startswith(libdir) or file.startswith(baselibdir)) \ + and (".so" in f or ".node" in f)) \ + or (f.startswith('vmlinux') or ".ko" in f): + + if cpath.islink(file): + checkelflinks[file] = ltarget + continue + # Use a reference of device ID and inode number to identify files + file_reference = "%d_%d" % (s.st_dev, s.st_ino) + checkelf[file] = (file, file_reference) + + results = oe.utils.multiprocess_launch(oe.package.is_elf, checkelflinks.values(), d) + results_map = {} + for (ltarget, elf_file) in results: + results_map[ltarget] = elf_file + for file in checkelflinks: + ltarget = checkelflinks[file] + # If it's a symlink, and points to an ELF file, we capture the readlink target + if results_map[ltarget]: + target = os.readlink(file) + #bb.note("Sym: %s (%d)" % (ltarget, results_map[ltarget])) + symlinks[file] = target + + results = oe.utils.multiprocess_launch(oe.package.is_elf, checkelf.keys(), d) + + # Sort results by file path. This ensures that the files are always + # processed in the same order, which is important to make sure builds + # are reproducible when dealing with hardlinks + results.sort(key=lambda x: x[0]) + + for (file, elf_file) in results: + # It's a file (or hardlink), not a link + # ...but is it ELF, and is it already stripped? + if elf_file & 1: + if elf_file & 2: + if 'already-stripped' in (d.getVar('INSANE_SKIP:' + pn) or "").split(): + bb.note("Skipping file %s from %s for already-stripped QA test" % (file[len(dvar):], pn)) + else: + msg = "File '%s' from %s was already stripped, this will prevent future debugging!" % (file[len(dvar):], pn) + oe.qa.handle_error("already-stripped", msg, d) + continue + + # At this point we have an unstripped elf file. We need to: + # a) Make sure any file we strip is not hardlinked to anything else outside this tree + # b) Only strip any hardlinked file once (no races) + # c) Track any hardlinks between files so that we can reconstruct matching debug file hardlinks + + # Use a reference of device ID and inode number to identify files + file_reference = checkelf[file][1] + if file_reference in inodes: + os.unlink(file) + os.link(inodes[file_reference][0], file) + inodes[file_reference].append(file) + else: + inodes[file_reference] = [file] + # break hardlink + bb.utils.break_hardlinks(file) + elffiles[file] = elf_file + # Modified the file so clear the cache + cpath.updatecache(file) + + def strip_pkgd_prefix(f): + nonlocal dvar + + if f.startswith(dvar): + return f[len(dvar):] + + return f + + # + # First lets process debug splitting + # + if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'): + results = oe.utils.multiprocess_launch(splitdebuginfo, list(elffiles), d, extraargs=(dvar, dv, d)) + + if dv["srcdir"] and not hostos.startswith("mingw"): + if (d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'): + results = oe.utils.multiprocess_launch(splitstaticdebuginfo, staticlibs, d, extraargs=(dvar, dv, d)) + else: + for file in staticlibs: + results.append( (file,source_info(file, d)) ) + + d.setVar("PKGDEBUGSOURCES", {strip_pkgd_prefix(f): sorted(s) for f, s in results}) + + sources = set() + for r in results: + sources.update(r[1]) + + # Hardlink our debug symbols to the other hardlink copies + for ref in inodes: + if len(inodes[ref]) == 1: + continue + + target = inodes[ref][0][len(dvar):] + for file in inodes[ref][1:]: + src = file[len(dvar):] + dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(target) + dv["append"] + fpath = dvar + dest + ftarget = dvar + dv["libdir"] + os.path.dirname(target) + dv["dir"] + "/" + os.path.basename(target) + dv["append"] + bb.utils.mkdirhier(os.path.dirname(fpath)) + # Only one hardlink of separated debug info file in each directory + if not os.access(fpath, os.R_OK): + #bb.note("Link %s -> %s" % (fpath, ftarget)) + os.link(ftarget, fpath) + + # Create symlinks for all cases we were able to split symbols + for file in symlinks: + src = file[len(dvar):] + dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"] + fpath = dvar + dest + # Skip it if the target doesn't exist + try: + s = os.stat(fpath) + except OSError as e: + (err, strerror) = e.args + if err != errno.ENOENT: + raise + continue + + ltarget = symlinks[file] + lpath = os.path.dirname(ltarget) + lbase = os.path.basename(ltarget) + ftarget = "" + if lpath and lpath != ".": + ftarget += lpath + dv["dir"] + "/" + ftarget += lbase + dv["append"] + if lpath.startswith(".."): + ftarget = os.path.join("..", ftarget) + bb.utils.mkdirhier(os.path.dirname(fpath)) + #bb.note("Symlink %s -> %s" % (fpath, ftarget)) + os.symlink(ftarget, fpath) + + # Process the dv["srcdir"] if requested... + # This copies and places the referenced sources for later debugging... + copydebugsources(dv["srcdir"], sources, d) + # + # End of debug splitting + # + + # + # Now lets go back over things and strip them + # + if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1'): + strip = d.getVar("STRIP") + sfiles = [] + for file in elffiles: + elf_file = int(elffiles[file]) + #bb.note("Strip %s" % file) + sfiles.append((file, elf_file, strip)) + if (d.getVar('PACKAGE_STRIP_STATIC') == '1' or d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'): + for f in staticlibs: + sfiles.append((f, 16, strip)) + + oe.utils.multiprocess_launch(oe.package.runstrip, sfiles, d) + + # Build "minidebuginfo" and reinject it back into the stripped binaries + if d.getVar('PACKAGE_MINIDEBUGINFO') == '1': + oe.utils.multiprocess_launch(inject_minidebuginfo, list(elffiles), d, + extraargs=(dvar, dv, d)) + + # + # End of strip + # + os.chdir(oldcwd) + + +def populate_packages(d): + cpath = oe.cachedpath.CachedPath() + + workdir = d.getVar('WORKDIR') + outdir = d.getVar('DEPLOY_DIR') + dvar = d.getVar('PKGD') + packages = d.getVar('PACKAGES').split() + pn = d.getVar('PN') + + bb.utils.mkdirhier(outdir) + os.chdir(dvar) + + autodebug = not (d.getVar("NOAUTOPACKAGEDEBUG") or False) + + split_source_package = (d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-with-srcpkg') + + # If debug-with-srcpkg mode is enabled then add the source package if it + # doesn't exist and add the source file contents to the source package. + if split_source_package: + src_package_name = ('%s-src' % d.getVar('PN')) + if not src_package_name in packages: + packages.append(src_package_name) + d.setVar('FILES:%s' % src_package_name, '/usr/src/debug') + + # Sanity check PACKAGES for duplicates + # Sanity should be moved to sanity.bbclass once we have the infrastructure + package_dict = {} + + for i, pkg in enumerate(packages): + if pkg in package_dict: + msg = "%s is listed in PACKAGES multiple times, this leads to packaging errors." % pkg + oe.qa.handle_error("packages-list", msg, d) + # Ensure the source package gets the chance to pick up the source files + # before the debug package by ordering it first in PACKAGES. Whether it + # actually picks up any source files is controlled by + # PACKAGE_DEBUG_SPLIT_STYLE. + elif pkg.endswith("-src"): + package_dict[pkg] = (10, i) + elif autodebug and pkg.endswith("-dbg"): + package_dict[pkg] = (30, i) + else: + package_dict[pkg] = (50, i) + packages = sorted(package_dict.keys(), key=package_dict.get) + d.setVar('PACKAGES', ' '.join(packages)) + pkgdest = d.getVar('PKGDEST') + + seen = [] + + # os.mkdir masks the permissions with umask so we have to unset it first + oldumask = os.umask(0) + + debug = [] + for root, dirs, files in cpath.walk(dvar): + dir = root[len(dvar):] + if not dir: + dir = os.sep + for f in (files + dirs): + path = "." + os.path.join(dir, f) + if "/.debug/" in path or "/.debug-static/" in path or path.endswith("/.debug"): + debug.append(path) + + for pkg in packages: + root = os.path.join(pkgdest, pkg) + bb.utils.mkdirhier(root) + + filesvar = d.getVar('FILES:%s' % pkg) or "" + if "//" in filesvar: + msg = "FILES variable for package %s contains '//' which is invalid. Attempting to fix this but you should correct the metadata.\n" % pkg + oe.qa.handle_error("files-invalid", msg, d) + filesvar.replace("//", "/") + + origfiles = filesvar.split() + files, symlink_paths = oe.package.files_from_filevars(origfiles) + + if autodebug and pkg.endswith("-dbg"): + files.extend(debug) + + for file in files: + if (not cpath.islink(file)) and (not cpath.exists(file)): + continue + if file in seen: + continue + seen.append(file) + + def mkdir(src, dest, p): + src = os.path.join(src, p) + dest = os.path.join(dest, p) + fstat = cpath.stat(src) + os.mkdir(dest) + os.chmod(dest, fstat.st_mode) + os.chown(dest, fstat.st_uid, fstat.st_gid) + if p not in seen: + seen.append(p) + cpath.updatecache(dest) + + def mkdir_recurse(src, dest, paths): + if cpath.exists(dest + '/' + paths): + return + while paths.startswith("./"): + paths = paths[2:] + p = "." + for c in paths.split("/"): + p = os.path.join(p, c) + if not cpath.exists(os.path.join(dest, p)): + mkdir(src, dest, p) + + if cpath.isdir(file) and not cpath.islink(file): + mkdir_recurse(dvar, root, file) + continue + + mkdir_recurse(dvar, root, os.path.dirname(file)) + fpath = os.path.join(root,file) + if not cpath.islink(file): + os.link(file, fpath) + continue + ret = bb.utils.copyfile(file, fpath) + if ret is False or ret == 0: + bb.fatal("File population failed") + + # Check if symlink paths exist + for file in symlink_paths: + if not os.path.exists(os.path.join(root,file)): + bb.fatal("File '%s' cannot be packaged into '%s' because its " + "parent directory structure does not exist. One of " + "its parent directories is a symlink whose target " + "directory is not included in the package." % + (file, pkg)) + + os.umask(oldumask) + os.chdir(workdir) + + # Handle excluding packages with incompatible licenses + package_list = [] + for pkg in packages: + licenses = d.getVar('_exclude_incompatible-' + pkg) + if licenses: + msg = "Excluding %s from packaging as it has incompatible license(s): %s" % (pkg, licenses) + oe.qa.handle_error("incompatible-license", msg, d) + else: + package_list.append(pkg) + d.setVar('PACKAGES', ' '.join(package_list)) + + unshipped = [] + for root, dirs, files in cpath.walk(dvar): + dir = root[len(dvar):] + if not dir: + dir = os.sep + for f in (files + dirs): + path = os.path.join(dir, f) + if ('.' + path) not in seen: + unshipped.append(path) + + if unshipped != []: + msg = pn + ": Files/directories were installed but not shipped in any package:" + if "installed-vs-shipped" in (d.getVar('INSANE_SKIP:' + pn) or "").split(): + bb.note("Package %s skipping QA tests: installed-vs-shipped" % pn) + else: + for f in unshipped: + msg = msg + "\n " + f + msg = msg + "\nPlease set FILES such that these items are packaged. Alternatively if they are unneeded, avoid installing them or delete them within do_install.\n" + msg = msg + "%s: %d installed and not shipped files." % (pn, len(unshipped)) + oe.qa.handle_error("installed-vs-shipped", msg, d) + +def process_fixsymlinks(pkgfiles, d): + cpath = oe.cachedpath.CachedPath() + pkgdest = d.getVar('PKGDEST') + packages = d.getVar("PACKAGES", False).split() + + dangling_links = {} + pkg_files = {} + for pkg in packages: + dangling_links[pkg] = [] + pkg_files[pkg] = [] + inst_root = os.path.join(pkgdest, pkg) + for path in pkgfiles[pkg]: + rpath = path[len(inst_root):] + pkg_files[pkg].append(rpath) + rtarget = cpath.realpath(path, inst_root, True, assume_dir = True) + if not cpath.lexists(rtarget): + dangling_links[pkg].append(os.path.normpath(rtarget[len(inst_root):])) + + newrdepends = {} + for pkg in dangling_links: + for l in dangling_links[pkg]: + found = False + bb.debug(1, "%s contains dangling link %s" % (pkg, l)) + for p in packages: + if l in pkg_files[p]: + found = True + bb.debug(1, "target found in %s" % p) + if p == pkg: + break + if pkg not in newrdepends: + newrdepends[pkg] = [] + newrdepends[pkg].append(p) + break + if found == False: + bb.note("%s contains dangling symlink to %s" % (pkg, l)) + + for pkg in newrdepends: + rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS:' + pkg) or "") + for p in newrdepends[pkg]: + if p not in rdepends: + rdepends[p] = [] + d.setVar('RDEPENDS:' + pkg, bb.utils.join_deps(rdepends, commasep=False)) + +def process_filedeps(pkgfiles, d): + """ + Collect perfile run-time dependency metadata + Output: + FILERPROVIDESFLIST:pkg - list of all files w/ deps + FILERPROVIDES:filepath:pkg - per file dep + + FILERDEPENDSFLIST:pkg - list of all files w/ deps + FILERDEPENDS:filepath:pkg - per file dep + """ + if d.getVar('SKIP_FILEDEPS') == '1': + return + + pkgdest = d.getVar('PKGDEST') + packages = d.getVar('PACKAGES') + rpmdeps = d.getVar('RPMDEPS') + + def chunks(files, n): + return [files[i:i+n] for i in range(0, len(files), n)] + + pkglist = [] + for pkg in packages.split(): + if d.getVar('SKIP_FILEDEPS:' + pkg) == '1': + continue + if pkg.endswith('-dbg') or pkg.endswith('-doc') or pkg.find('-locale-') != -1 or pkg.find('-localedata-') != -1 or pkg.find('-gconv-') != -1 or pkg.find('-charmap-') != -1 or pkg.startswith('kernel-module-') or pkg.endswith('-src'): + continue + for files in chunks(pkgfiles[pkg], 100): + pkglist.append((pkg, files, rpmdeps, pkgdest)) + + processed = oe.utils.multiprocess_launch(oe.package.filedeprunner, pkglist, d) + + provides_files = {} + requires_files = {} + + for result in processed: + (pkg, provides, requires) = result + + if pkg not in provides_files: + provides_files[pkg] = [] + if pkg not in requires_files: + requires_files[pkg] = [] + + for file in sorted(provides): + provides_files[pkg].append(file) + key = "FILERPROVIDES:" + file + ":" + pkg + d.appendVar(key, " " + " ".join(provides[file])) + + for file in sorted(requires): + requires_files[pkg].append(file) + key = "FILERDEPENDS:" + file + ":" + pkg + d.appendVar(key, " " + " ".join(requires[file])) + + for pkg in requires_files: + d.setVar("FILERDEPENDSFLIST:" + pkg, " ".join(sorted(requires_files[pkg]))) + for pkg in provides_files: + d.setVar("FILERPROVIDESFLIST:" + pkg, " ".join(sorted(provides_files[pkg]))) + +def process_shlibs(pkgfiles, d): + cpath = oe.cachedpath.CachedPath() + + exclude_shlibs = d.getVar('EXCLUDE_FROM_SHLIBS', False) + if exclude_shlibs: + bb.note("not generating shlibs") + return + + lib_re = re.compile(r"^.*\.so") + libdir_re = re.compile(r".*/%s$" % d.getVar('baselib')) + + packages = d.getVar('PACKAGES') + + shlib_pkgs = [] + exclusion_list = d.getVar("EXCLUDE_PACKAGES_FROM_SHLIBS") + if exclusion_list: + for pkg in packages.split(): + if pkg not in exclusion_list.split(): + shlib_pkgs.append(pkg) + else: + bb.note("not generating shlibs for %s" % pkg) + else: + shlib_pkgs = packages.split() + + hostos = d.getVar('HOST_OS') + + workdir = d.getVar('WORKDIR') + + ver = d.getVar('PKGV') + if not ver: + msg = "PKGV not defined" + oe.qa.handle_error("pkgv-undefined", msg, d) + return + + pkgdest = d.getVar('PKGDEST') + + shlibswork_dir = d.getVar('SHLIBSWORKDIR') + + def linux_so(file, pkg, pkgver, d): + needs_ldconfig = False + needed = set() + sonames = set() + renames = [] + ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '') + cmd = d.getVar('OBJDUMP') + " -p " + pipes.quote(file) + " 2>/dev/null" + fd = os.popen(cmd) + lines = fd.readlines() + fd.close() + rpath = tuple() + for l in lines: + m = re.match(r"\s+RPATH\s+([^\s]*)", l) + if m: + rpaths = m.group(1).replace("$ORIGIN", ldir).split(":") + rpath = tuple(map(os.path.normpath, rpaths)) + for l in lines: + m = re.match(r"\s+NEEDED\s+([^\s]*)", l) + if m: + dep = m.group(1) + if dep not in needed: + needed.add((dep, file, rpath)) + m = re.match(r"\s+SONAME\s+([^\s]*)", l) + if m: + this_soname = m.group(1) + prov = (this_soname, ldir, pkgver) + if not prov in sonames: + # if library is private (only used by package) then do not build shlib for it + if not private_libs or len([i for i in private_libs if fnmatch.fnmatch(this_soname, i)]) == 0: + sonames.add(prov) + if libdir_re.match(os.path.dirname(file)): + needs_ldconfig = True + if needs_ldconfig and snap_symlinks and (os.path.basename(file) != this_soname): + renames.append((file, os.path.join(os.path.dirname(file), this_soname))) + return (needs_ldconfig, needed, sonames, renames) + + def darwin_so(file, needed, sonames, renames, pkgver): + if not os.path.exists(file): + return + ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '') + + def get_combinations(base): + # + # Given a base library name, find all combinations of this split by "." and "-" + # + combos = [] + options = base.split(".") + for i in range(1, len(options) + 1): + combos.append(".".join(options[0:i])) + options = base.split("-") + for i in range(1, len(options) + 1): + combos.append("-".join(options[0:i])) + return combos + + if (file.endswith('.dylib') or file.endswith('.so')) and not pkg.endswith('-dev') and not pkg.endswith('-dbg') and not pkg.endswith('-src'): + # Drop suffix + name = os.path.basename(file).rsplit(".",1)[0] + # Find all combinations + combos = get_combinations(name) + for combo in combos: + if not combo in sonames: + prov = (combo, ldir, pkgver) + sonames.add(prov) + if file.endswith('.dylib') or file.endswith('.so'): + rpath = [] + p = subprocess.Popen([d.expand("${HOST_PREFIX}otool"), '-l', file], stdout=subprocess.PIPE, stderr=subprocess.PIPE) + out, err = p.communicate() + # If returned successfully, process stdout for results + if p.returncode == 0: + for l in out.split("\n"): + l = l.strip() + if l.startswith('path '): + rpath.append(l.split()[1]) + + p = subprocess.Popen([d.expand("${HOST_PREFIX}otool"), '-L', file], stdout=subprocess.PIPE, stderr=subprocess.PIPE) + out, err = p.communicate() + # If returned successfully, process stdout for results + if p.returncode == 0: + for l in out.split("\n"): + l = l.strip() + if not l or l.endswith(":"): + continue + if "is not an object file" in l: + continue + name = os.path.basename(l.split()[0]).rsplit(".", 1)[0] + if name and name not in needed[pkg]: + needed[pkg].add((name, file, tuple())) + + def mingw_dll(file, needed, sonames, renames, pkgver): + if not os.path.exists(file): + return + + if file.endswith(".dll"): + # assume all dlls are shared objects provided by the package + sonames.add((os.path.basename(file), os.path.dirname(file).replace(pkgdest + "/" + pkg, ''), pkgver)) + + if (file.endswith(".dll") or file.endswith(".exe")): + # use objdump to search for "DLL Name: .*\.dll" + p = subprocess.Popen([d.expand("${HOST_PREFIX}objdump"), "-p", file], stdout=subprocess.PIPE, stderr=subprocess.PIPE) + out, err = p.communicate() + # process the output, grabbing all .dll names + if p.returncode == 0: + for m in re.finditer(r"DLL Name: (.*?\.dll)$", out.decode(), re.MULTILINE | re.IGNORECASE): + dllname = m.group(1) + if dllname: + needed[pkg].add((dllname, file, tuple())) + + if d.getVar('PACKAGE_SNAP_LIB_SYMLINKS') == "1": + snap_symlinks = True + else: + snap_symlinks = False + + needed = {} + + shlib_provider = oe.package.read_shlib_providers(d) + + for pkg in shlib_pkgs: + private_libs = d.getVar('PRIVATE_LIBS:' + pkg) or d.getVar('PRIVATE_LIBS') or "" + private_libs = private_libs.split() + needs_ldconfig = False + bb.debug(2, "calculating shlib provides for %s" % pkg) + + pkgver = d.getVar('PKGV:' + pkg) + if not pkgver: + pkgver = d.getVar('PV_' + pkg) + if not pkgver: + pkgver = ver + + needed[pkg] = set() + sonames = set() + renames = [] + linuxlist = [] + for file in pkgfiles[pkg]: + soname = None + if cpath.islink(file): + continue + if hostos == "darwin" or hostos == "darwin8": + darwin_so(file, needed, sonames, renames, pkgver) + elif hostos.startswith("mingw"): + mingw_dll(file, needed, sonames, renames, pkgver) + elif os.access(file, os.X_OK) or lib_re.match(file): + linuxlist.append(file) + + if linuxlist: + results = oe.utils.multiprocess_launch(linux_so, linuxlist, d, extraargs=(pkg, pkgver, d)) + for r in results: + ldconfig = r[0] + needed[pkg] |= r[1] + sonames |= r[2] + renames.extend(r[3]) + needs_ldconfig = needs_ldconfig or ldconfig + + for (old, new) in renames: + bb.note("Renaming %s to %s" % (old, new)) + bb.utils.rename(old, new) + pkgfiles[pkg].remove(old) + + shlibs_file = os.path.join(shlibswork_dir, pkg + ".list") + if len(sonames): + with open(shlibs_file, 'w') as fd: + for s in sorted(sonames): + if s[0] in shlib_provider and s[1] in shlib_provider[s[0]]: + (old_pkg, old_pkgver) = shlib_provider[s[0]][s[1]] + if old_pkg != pkg: + bb.warn('%s-%s was registered as shlib provider for %s, changing it to %s-%s because it was built later' % (old_pkg, old_pkgver, s[0], pkg, pkgver)) + bb.debug(1, 'registering %s-%s as shlib provider for %s' % (pkg, pkgver, s[0])) + fd.write(s[0] + ':' + s[1] + ':' + s[2] + '\n') + if s[0] not in shlib_provider: + shlib_provider[s[0]] = {} + shlib_provider[s[0]][s[1]] = (pkg, pkgver) + if needs_ldconfig: + bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg) + postinst = d.getVar('pkg_postinst:%s' % pkg) + if not postinst: + postinst = '#!/bin/sh\n' + postinst += d.getVar('ldconfig_postinst_fragment') + d.setVar('pkg_postinst:%s' % pkg, postinst) + bb.debug(1, 'LIBNAMES: pkg %s sonames %s' % (pkg, sonames)) + + assumed_libs = d.getVar('ASSUME_SHLIBS') + if assumed_libs: + libdir = d.getVar("libdir") + for e in assumed_libs.split(): + l, dep_pkg = e.split(":") + lib_ver = None + dep_pkg = dep_pkg.rsplit("_", 1) + if len(dep_pkg) == 2: + lib_ver = dep_pkg[1] + dep_pkg = dep_pkg[0] + if l not in shlib_provider: + shlib_provider[l] = {} + shlib_provider[l][libdir] = (dep_pkg, lib_ver) + + libsearchpath = [d.getVar('libdir'), d.getVar('base_libdir')] + + for pkg in shlib_pkgs: + bb.debug(2, "calculating shlib requirements for %s" % pkg) + + private_libs = d.getVar('PRIVATE_LIBS:' + pkg) or d.getVar('PRIVATE_LIBS') or "" + private_libs = private_libs.split() + + deps = list() + for n in needed[pkg]: + # if n is in private libraries, don't try to search provider for it + # this could cause problem in case some abc.bb provides private + # /opt/abc/lib/libfoo.so.1 and contains /usr/bin/abc depending on system library libfoo.so.1 + # but skipping it is still better alternative than providing own + # version and then adding runtime dependency for the same system library + if private_libs and len([i for i in private_libs if fnmatch.fnmatch(n[0], i)]) > 0: + bb.debug(2, '%s: Dependency %s covered by PRIVATE_LIBS' % (pkg, n[0])) + continue + if n[0] in shlib_provider.keys(): + shlib_provider_map = shlib_provider[n[0]] + matches = set() + for p in itertools.chain(list(n[2]), sorted(shlib_provider_map.keys()), libsearchpath): + if p in shlib_provider_map: + matches.add(p) + if len(matches) > 1: + matchpkgs = ', '.join([shlib_provider_map[match][0] for match in matches]) + bb.error("%s: Multiple shlib providers for %s: %s (used by files: %s)" % (pkg, n[0], matchpkgs, n[1])) + elif len(matches) == 1: + (dep_pkg, ver_needed) = shlib_provider_map[matches.pop()] + + bb.debug(2, '%s: Dependency %s requires package %s (used by files: %s)' % (pkg, n[0], dep_pkg, n[1])) + + if dep_pkg == pkg: + continue + + if ver_needed: + dep = "%s (>= %s)" % (dep_pkg, ver_needed) + else: + dep = dep_pkg + if not dep in deps: + deps.append(dep) + continue + bb.note("Couldn't find shared library provider for %s, used by files: %s" % (n[0], n[1])) + + deps_file = os.path.join(pkgdest, pkg + ".shlibdeps") + if os.path.exists(deps_file): + os.remove(deps_file) + if deps: + with open(deps_file, 'w') as fd: + for dep in sorted(deps): + fd.write(dep + '\n') + +def process_pkgconfig(pkgfiles, d): + packages = d.getVar('PACKAGES') + workdir = d.getVar('WORKDIR') + pkgdest = d.getVar('PKGDEST') + + shlibs_dirs = d.getVar('SHLIBSDIRS').split() + shlibswork_dir = d.getVar('SHLIBSWORKDIR') + + pc_re = re.compile(r'(.*)\.pc$') + var_re = re.compile(r'(.*)=(.*)') + field_re = re.compile(r'(.*): (.*)') + + pkgconfig_provided = {} + pkgconfig_needed = {} + for pkg in packages.split(): + pkgconfig_provided[pkg] = [] + pkgconfig_needed[pkg] = [] + for file in sorted(pkgfiles[pkg]): + m = pc_re.match(file) + if m: + pd = bb.data.init() + name = m.group(1) + pkgconfig_provided[pkg].append(os.path.basename(name)) + if not os.access(file, os.R_OK): + continue + with open(file, 'r') as f: + lines = f.readlines() + for l in lines: + m = var_re.match(l) + if m: + name = m.group(1) + val = m.group(2) + pd.setVar(name, pd.expand(val)) + continue + m = field_re.match(l) + if m: + hdr = m.group(1) + exp = pd.expand(m.group(2)) + if hdr == 'Requires': + pkgconfig_needed[pkg] += exp.replace(',', ' ').split() + + for pkg in packages.split(): + pkgs_file = os.path.join(shlibswork_dir, pkg + ".pclist") + if pkgconfig_provided[pkg] != []: + with open(pkgs_file, 'w') as f: + for p in sorted(pkgconfig_provided[pkg]): + f.write('%s\n' % p) + + # Go from least to most specific since the last one found wins + for dir in reversed(shlibs_dirs): + if not os.path.exists(dir): + continue + for file in sorted(os.listdir(dir)): + m = re.match(r'^(.*)\.pclist$', file) + if m: + pkg = m.group(1) + with open(os.path.join(dir, file)) as fd: + lines = fd.readlines() + pkgconfig_provided[pkg] = [] + for l in lines: + pkgconfig_provided[pkg].append(l.rstrip()) + + for pkg in packages.split(): + deps = [] + for n in pkgconfig_needed[pkg]: + found = False + for k in pkgconfig_provided.keys(): + if n in pkgconfig_provided[k]: + if k != pkg and not (k in deps): + deps.append(k) + found = True + if found == False: + bb.note("couldn't find pkgconfig module '%s' in any package" % n) + deps_file = os.path.join(pkgdest, pkg + ".pcdeps") + if len(deps): + with open(deps_file, 'w') as fd: + for dep in deps: + fd.write(dep + '\n') + +def read_libdep_files(d): + pkglibdeps = {} + packages = d.getVar('PACKAGES').split() + for pkg in packages: + pkglibdeps[pkg] = {} + for extension in ".shlibdeps", ".pcdeps", ".clilibdeps": + depsfile = d.expand("${PKGDEST}/" + pkg + extension) + if os.access(depsfile, os.R_OK): + with open(depsfile) as fd: + lines = fd.readlines() + for l in lines: + l.rstrip() + deps = bb.utils.explode_dep_versions2(l) + for dep in deps: + if not dep in pkglibdeps[pkg]: + pkglibdeps[pkg][dep] = deps[dep] + return pkglibdeps + +def process_depchains(pkgfiles, d): + """ + For a given set of prefix and postfix modifiers, make those packages + RRECOMMENDS on the corresponding packages for its RDEPENDS. + + Example: If package A depends upon package B, and A's .bb emits an + A-dev package, this would make A-dev Recommends: B-dev. + + If only one of a given suffix is specified, it will take the RRECOMMENDS + based on the RDEPENDS of *all* other packages. If more than one of a given + suffix is specified, its will only use the RDEPENDS of the single parent + package. + """ + + packages = d.getVar('PACKAGES') + postfixes = (d.getVar('DEPCHAIN_POST') or '').split() + prefixes = (d.getVar('DEPCHAIN_PRE') or '').split() + + def pkg_adddeprrecs(pkg, base, suffix, getname, depends, d): + + #bb.note('depends for %s is %s' % (base, depends)) + rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS:' + pkg) or "") + + for depend in sorted(depends): + if depend.find('-native') != -1 or depend.find('-cross') != -1 or depend.startswith('virtual/'): + #bb.note("Skipping %s" % depend) + continue + if depend.endswith('-dev'): + depend = depend[:-4] + if depend.endswith('-dbg'): + depend = depend[:-4] + pkgname = getname(depend, suffix) + #bb.note("Adding %s for %s" % (pkgname, depend)) + if pkgname not in rreclist and pkgname != pkg: + rreclist[pkgname] = [] + + #bb.note('setting: RRECOMMENDS:%s=%s' % (pkg, ' '.join(rreclist))) + d.setVar('RRECOMMENDS:%s' % pkg, bb.utils.join_deps(rreclist, commasep=False)) + + def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d): + + #bb.note('rdepends for %s is %s' % (base, rdepends)) + rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS:' + pkg) or "") + + for depend in sorted(rdepends): + if depend.find('virtual-locale-') != -1: + #bb.note("Skipping %s" % depend) + continue + if depend.endswith('-dev'): + depend = depend[:-4] + if depend.endswith('-dbg'): + depend = depend[:-4] + pkgname = getname(depend, suffix) + #bb.note("Adding %s for %s" % (pkgname, depend)) + if pkgname not in rreclist and pkgname != pkg: + rreclist[pkgname] = [] + + #bb.note('setting: RRECOMMENDS:%s=%s' % (pkg, ' '.join(rreclist))) + d.setVar('RRECOMMENDS:%s' % pkg, bb.utils.join_deps(rreclist, commasep=False)) + + def add_dep(list, dep): + if dep not in list: + list.append(dep) + + depends = [] + for dep in bb.utils.explode_deps(d.getVar('DEPENDS') or ""): + add_dep(depends, dep) + + rdepends = [] + for pkg in packages.split(): + for dep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + pkg) or ""): + add_dep(rdepends, dep) + + #bb.note('rdepends is %s' % rdepends) + + def post_getname(name, suffix): + return '%s%s' % (name, suffix) + def pre_getname(name, suffix): + return '%s%s' % (suffix, name) + + pkgs = {} + for pkg in packages.split(): + for postfix in postfixes: + if pkg.endswith(postfix): + if not postfix in pkgs: + pkgs[postfix] = {} + pkgs[postfix][pkg] = (pkg[:-len(postfix)], post_getname) + + for prefix in prefixes: + if pkg.startswith(prefix): + if not prefix in pkgs: + pkgs[prefix] = {} + pkgs[prefix][pkg] = (pkg[:-len(prefix)], pre_getname) + + if "-dbg" in pkgs: + pkglibdeps = read_libdep_files(d) + pkglibdeplist = [] + for pkg in pkglibdeps: + for k in pkglibdeps[pkg]: + add_dep(pkglibdeplist, k) + dbgdefaultdeps = ((d.getVar('DEPCHAIN_DBGDEFAULTDEPS') == '1') or (bb.data.inherits_class('packagegroup', d))) + + for suffix in pkgs: + for pkg in pkgs[suffix]: + if d.getVarFlag('RRECOMMENDS:' + pkg, 'nodeprrecs'): + continue + (base, func) = pkgs[suffix][pkg] + if suffix == "-dev": + pkg_adddeprrecs(pkg, base, suffix, func, depends, d) + elif suffix == "-dbg": + if not dbgdefaultdeps: + pkg_addrrecs(pkg, base, suffix, func, pkglibdeplist, d) + continue + if len(pkgs[suffix]) == 1: + pkg_addrrecs(pkg, base, suffix, func, rdepends, d) + else: + rdeps = [] + for dep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + base) or ""): + add_dep(rdeps, dep) + pkg_addrrecs(pkg, base, suffix, func, rdeps, d) +