From 7c552996597faaee2fbee185b250c0ee30ea3b5f Mon Sep 17 00:00:00 2001 From: Joshua Lock Date: Wed, 14 Dec 2016 21:13:04 +0000 Subject: meta: remove True option to getVar calls getVar() now defaults to expanding by default, thus remove the True option from getVar() calls with a regex search and replace. Search made with the following regex: getVar ?\(( ?[^,()]*), True\) Signed-off-by: Joshua Lock Signed-off-by: Ross Burton --- meta/classes/allarch.bbclass | 4 +- meta/classes/archiver.bbclass | 74 ++++---- meta/classes/autotools.bbclass | 12 +- meta/classes/base.bbclass | 126 ++++++------- meta/classes/binconfig.bbclass | 2 +- meta/classes/blacklist.bbclass | 4 +- meta/classes/bugzilla.bbclass | 24 +-- meta/classes/buildhistory.bbclass | 62 +++---- meta/classes/buildstats-summary.bbclass | 2 +- meta/classes/buildstats.bbclass | 14 +- meta/classes/ccache.bbclass | 2 +- meta/classes/chrpath.bbclass | 4 +- meta/classes/cmake.bbclass | 16 +- meta/classes/cml1.bbclass | 4 +- meta/classes/compress_doc.bbclass | 26 +-- meta/classes/copyleft_compliance.bbclass | 10 +- meta/classes/copyleft_filter.bbclass | 10 +- meta/classes/cross-canadian.bbclass | 34 ++-- meta/classes/crosssdk.bbclass | 2 +- meta/classes/cve-check.bbclass | 46 ++--- meta/classes/debian.bbclass | 24 +-- meta/classes/devshell.bbclass | 6 +- meta/classes/distro_features_check.bbclass | 8 +- meta/classes/distrodata.bbclass | 132 +++++++------- meta/classes/distutils-base.bbclass | 2 +- meta/classes/distutils3-base.bbclass | 2 +- meta/classes/externalsrc.bbclass | 38 ++-- meta/classes/extrausers.bbclass | 2 +- meta/classes/fontcache.bbclass | 14 +- meta/classes/fs-uuid.bbclass | 2 +- meta/classes/gconf.bbclass | 12 +- meta/classes/gettext.bbclass | 8 +- meta/classes/gio-module-cache.bbclass | 10 +- meta/classes/grub-efi.bbclass | 22 +-- meta/classes/gsettings.bbclass | 10 +- meta/classes/gtk-icon-cache.bbclass | 14 +- meta/classes/gtk-immodules-cache.bbclass | 10 +- meta/classes/icecc.bbclass | 4 +- meta/classes/image-buildinfo.bbclass | 10 +- meta/classes/image-live.bbclass | 8 +- meta/classes/image-vm.bbclass | 12 +- meta/classes/image.bbclass | 96 +++++----- meta/classes/image_types.bbclass | 24 +-- meta/classes/insane.bbclass | 194 ++++++++++---------- meta/classes/kernel-arch.bbclass | 6 +- meta/classes/kernel-fitimage.bbclass | 14 +- meta/classes/kernel-grub.bbclass | 2 +- meta/classes/kernel-module-split.bbclass | 38 ++-- meta/classes/kernel-uimage.bbclass | 8 +- meta/classes/kernel-yocto.bbclass | 2 +- meta/classes/kernel.bbclass | 40 ++-- meta/classes/libc-common.bbclass | 10 +- meta/classes/libc-package.bbclass | 60 +++--- meta/classes/license.bbclass | 96 +++++----- meta/classes/live-vm-common.bbclass | 8 +- meta/classes/metadata_scm.bbclass | 2 +- meta/classes/migrate_localcount.bbclass | 12 +- meta/classes/mime.bbclass | 12 +- meta/classes/module.bbclass | 2 +- meta/classes/multilib.bbclass | 32 ++-- meta/classes/multilib_global.bbclass | 22 +-- meta/classes/native.bbclass | 10 +- meta/classes/nativesdk.bbclass | 8 +- meta/classes/npm.bbclass | 4 +- meta/classes/oelint.bbclass | 4 +- meta/classes/package.bbclass | 282 ++++++++++++++--------------- meta/classes/package_deb.bbclass | 54 +++--- meta/classes/package_ipk.bbclass | 60 +++--- meta/classes/package_rpm.bbclass | 136 +++++++------- meta/classes/package_tar.bbclass | 14 +- meta/classes/packagedata.bbclass | 10 +- meta/classes/packagefeed-stability.bbclass | 14 +- meta/classes/packagegroup.bbclass | 8 +- meta/classes/patch.bbclass | 20 +- meta/classes/pixbufcache.bbclass | 10 +- meta/classes/populate_sdk_base.bbclass | 36 ++-- meta/classes/populate_sdk_ext.bbclass | 108 +++++------ meta/classes/prexport.bbclass | 4 +- meta/classes/ptest.bbclass | 2 +- meta/classes/qemu.bbclass | 10 +- meta/classes/qemuboot.bbclass | 8 +- meta/classes/recipe_sanity.bbclass | 26 +-- meta/classes/report-error.bbclass | 22 +-- meta/classes/rm_work.bbclass | 6 +- meta/classes/rootfs-postcommands.bbclass | 6 +- meta/classes/rootfs_deb.bbclass | 4 +- meta/classes/rootfs_ipk.bbclass | 2 +- meta/classes/rootfs_rpm.bbclass | 2 +- meta/classes/sanity.bbclass | 108 +++++------ meta/classes/sign_ipk.bbclass | 12 +- meta/classes/sign_package_feed.bbclass | 4 +- meta/classes/sign_rpm.bbclass | 12 +- meta/classes/siteconfig.bbclass | 2 +- meta/classes/siteinfo.bbclass | 14 +- meta/classes/spdx.bbclass | 26 +-- meta/classes/sstate.bbclass | 114 ++++++------ meta/classes/staging.bbclass | 22 +-- meta/classes/syslinux.bbclass | 34 ++-- meta/classes/systemd-boot.bbclass | 14 +- meta/classes/systemd.bbclass | 38 ++-- meta/classes/terminal.bbclass | 8 +- meta/classes/testexport.bbclass | 40 ++-- meta/classes/testimage.bbclass | 12 +- meta/classes/testsdk.bbclass | 12 +- meta/classes/tinderclient.bbclass | 54 +++--- meta/classes/toaster.bbclass | 14 +- meta/classes/toolchain-scripts.bbclass | 4 +- meta/classes/uboot-config.bbclass | 14 +- meta/classes/uboot-extlinux-config.bbclass | 24 +-- meta/classes/uboot-sign.bbclass | 6 +- meta/classes/uninative.bbclass | 18 +- meta/classes/update-alternatives.bbclass | 52 +++--- meta/classes/update-rc.d.bbclass | 28 +-- meta/classes/useradd-staticids.bbclass | 48 ++--- meta/classes/useradd.bbclass | 22 +-- meta/classes/utility-tasks.bbclass | 4 +- meta/classes/utils.bbclass | 30 +-- meta/classes/waf.bbclass | 2 +- 118 files changed, 1585 insertions(+), 1585 deletions(-) (limited to 'meta/classes') diff --git a/meta/classes/allarch.bbclass b/meta/classes/allarch.bbclass index ddc2a85050..9dce49879a 100644 --- a/meta/classes/allarch.bbclass +++ b/meta/classes/allarch.bbclass @@ -11,7 +11,7 @@ PACKAGE_ARCH = "all" python () { # Allow this class to be included but overridden - only set # the values if we're still "all" package arch. - if d.getVar("PACKAGE_ARCH", True) == "all": + if d.getVar("PACKAGE_ARCH") == "all": # No need for virtual/libc or a cross compiler d.setVar("INHIBIT_DEFAULT_DEPS","1") @@ -47,6 +47,6 @@ python () { d.setVarFlag("emit_pkgdata", "vardepsexclude", "MULTILIB_VARIANTS") d.setVarFlag("write_specfile", "vardepsexclude", "MULTILIBS") elif bb.data.inherits_class('packagegroup', d) and not bb.data.inherits_class('nativesdk', d): - bb.error("Please ensure recipe %s sets PACKAGE_ARCH before inherit packagegroup" % d.getVar("FILE", True)) + bb.error("Please ensure recipe %s sets PACKAGE_ARCH before inherit packagegroup" % d.getVar("FILE")) } diff --git a/meta/classes/archiver.bbclass b/meta/classes/archiver.bbclass index 3543ca9c58..52959776c3 100644 --- a/meta/classes/archiver.bbclass +++ b/meta/classes/archiver.bbclass @@ -52,10 +52,10 @@ do_deploy_all_archives[dirs] = "${WORKDIR}" python () { - pn = d.getVar('PN', True) - assume_provided = (d.getVar("ASSUME_PROVIDED", True) or "").split() + pn = d.getVar('PN') + assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split() if pn in assume_provided: - for p in d.getVar("PROVIDES", True).split(): + for p in d.getVar("PROVIDES").split(): if p != pn: pn = p break @@ -68,7 +68,7 @@ python () { bb.debug(1, 'archiver: %s is included: %s' % (pn, reason)) # We just archive gcc-source for all the gcc related recipes - if d.getVar('BPN', True) in ['gcc', 'libgcc'] \ + if d.getVar('BPN') in ['gcc', 'libgcc'] \ and not pn.startswith('gcc-source'): bb.debug(1, 'archiver: %s is excluded, covered by gcc-source' % pn) return @@ -106,7 +106,7 @@ python () { # Output the srpm package ar_srpm = d.getVarFlag('ARCHIVER_MODE', 'srpm', True) if ar_srpm == "1": - if d.getVar('PACKAGES', True) != '' and d.getVar('IMAGE_PKGTYPE', True) == 'rpm': + if d.getVar('PACKAGES') != '' and d.getVar('IMAGE_PKGTYPE') == 'rpm': d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_package_write_rpm' % pn) if ar_dumpdata == "1": d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_dumpdata' % pn) @@ -130,9 +130,9 @@ python do_ar_original() { if d.getVarFlag('ARCHIVER_MODE', 'src', True) != "original": return - ar_outdir = d.getVar('ARCHIVER_OUTDIR', True) + ar_outdir = d.getVar('ARCHIVER_OUTDIR') bb.note('Archiving the original source...') - urls = d.getVar("SRC_URI", True).split() + urls = d.getVar("SRC_URI").split() # destsuffix (git fetcher) and subdir (everything else) are allowed to be # absolute paths (for example, destsuffix=${S}/foobar). # That messes with unpacking inside our tmpdir below, because the fetchers @@ -157,7 +157,7 @@ python do_ar_original() { if os.path.isfile(local): shutil.copy(local, ar_outdir) elif os.path.isdir(local): - tmpdir = tempfile.mkdtemp(dir=d.getVar('ARCHIVER_WORKDIR', True)) + tmpdir = tempfile.mkdtemp(dir=d.getVar('ARCHIVER_WORKDIR')) fetch.unpack(tmpdir, (url,)) # To handle recipes with more than one source, we add the "name" # URL parameter as suffix. We treat it as an error when @@ -195,24 +195,24 @@ python do_ar_patched() { return # Get the ARCHIVER_OUTDIR before we reset the WORKDIR - ar_outdir = d.getVar('ARCHIVER_OUTDIR', True) - ar_workdir = d.getVar('ARCHIVER_WORKDIR', True) + ar_outdir = d.getVar('ARCHIVER_OUTDIR') + ar_workdir = d.getVar('ARCHIVER_WORKDIR') bb.note('Archiving the patched source...') d.setVar('WORKDIR', ar_workdir) - create_tarball(d, d.getVar('S', True), 'patched', ar_outdir) + create_tarball(d, d.getVar('S'), 'patched', ar_outdir) } python do_ar_configured() { import shutil - ar_outdir = d.getVar('ARCHIVER_OUTDIR', True) + ar_outdir = d.getVar('ARCHIVER_OUTDIR') if d.getVarFlag('ARCHIVER_MODE', 'src', True) == 'configured': bb.note('Archiving the configured source...') - pn = d.getVar('PN', True) + pn = d.getVar('PN') # "gcc-source-${PV}" recipes don't have "do_configure" # task, so we need to run "do_preconfigure" instead if pn.startswith("gcc-source-"): - d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR', True)) + d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR')) bb.build.exec_func('do_preconfigure', d) # The libtool-native's do_configure will remove the @@ -221,7 +221,7 @@ python do_ar_configured() { # instead of. elif pn != 'libtool-native': # Change the WORKDIR to make do_configure run in another dir. - d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR', True)) + d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR')) if bb.data.inherits_class('kernel-yocto', d): bb.build.exec_func('do_kernel_configme', d) if bb.data.inherits_class('cmake', d): @@ -235,12 +235,12 @@ python do_ar_configured() { for func in (postfuncs or '').split(): if func != "do_qa_configure": bb.build.exec_func(func, d) - srcdir = d.getVar('S', True) - builddir = d.getVar('B', True) + srcdir = d.getVar('S') + builddir = d.getVar('B') if srcdir != builddir: if os.path.exists(builddir): oe.path.copytree(builddir, os.path.join(srcdir, \ - 'build.%s.ar_configured' % d.getVar('PF', True))) + 'build.%s.ar_configured' % d.getVar('PF'))) create_tarball(d, srcdir, 'configured', ar_outdir) } @@ -251,14 +251,14 @@ def create_tarball(d, srcdir, suffix, ar_outdir): import tarfile # Make sure we are only creating a single tarball for gcc sources - if (d.getVar('SRC_URI', True) == ""): + if (d.getVar('SRC_URI') == ""): return bb.utils.mkdirhier(ar_outdir) if suffix: - filename = '%s-%s.tar.gz' % (d.getVar('PF', True), suffix) + filename = '%s-%s.tar.gz' % (d.getVar('PF'), suffix) else: - filename = '%s.tar.gz' % d.getVar('PF', True) + filename = '%s.tar.gz' % d.getVar('PF') tarname = os.path.join(ar_outdir, filename) bb.note('Creating %s' % tarname) @@ -286,7 +286,7 @@ def create_diff_gz(d, src_orig, src, ar_outdir): dirname = os.path.dirname(src) basename = os.path.basename(src) os.chdir(dirname) - out_file = os.path.join(ar_outdir, '%s-diff.gz' % d.getVar('PF', True)) + out_file = os.path.join(ar_outdir, '%s-diff.gz' % d.getVar('PF')) diff_cmd = 'diff -Naur %s.orig %s.patched | gzip -c > %s' % (basename, basename, out_file) subprocess.call(diff_cmd, shell=True) bb.utils.remove(src_patched, recurse=True) @@ -297,9 +297,9 @@ python do_unpack_and_patch() { [ 'patched', 'configured'] and \ d.getVarFlag('ARCHIVER_MODE', 'diff', True) != '1': return - ar_outdir = d.getVar('ARCHIVER_OUTDIR', True) - ar_workdir = d.getVar('ARCHIVER_WORKDIR', True) - pn = d.getVar('PN', True) + ar_outdir = d.getVar('ARCHIVER_OUTDIR') + ar_workdir = d.getVar('ARCHIVER_WORKDIR') + pn = d.getVar('PN') # The kernel class functions require it to be on work-shared, so we dont change WORKDIR if not (bb.data.inherits_class('kernel-yocto', d) or pn.startswith('gcc-source')): @@ -309,18 +309,18 @@ python do_unpack_and_patch() { # The changed 'WORKDIR' also caused 'B' changed, create dir 'B' for the # possibly requiring of the following tasks (such as some recipes's # do_patch required 'B' existed). - bb.utils.mkdirhier(d.getVar('B', True)) + bb.utils.mkdirhier(d.getVar('B')) bb.build.exec_func('do_unpack', d) # Save the original source for creating the patches if d.getVarFlag('ARCHIVER_MODE', 'diff', True) == '1': - src = d.getVar('S', True).rstrip('/') + src = d.getVar('S').rstrip('/') src_orig = '%s.orig' % src oe.path.copytree(src, src_orig) # Make sure gcc and kernel sources are patched only once - if not (d.getVar('SRC_URI', True) == "" or (bb.data.inherits_class('kernel-yocto', d) or pn.startswith('gcc-source'))): + if not (d.getVar('SRC_URI') == "" or (bb.data.inherits_class('kernel-yocto', d) or pn.startswith('gcc-source'))): bb.build.exec_func('do_patch', d) # Create the patches @@ -339,14 +339,14 @@ python do_ar_recipe () { require_re = re.compile( r"require\s+(.+)" ) include_re = re.compile( r"include\s+(.+)" ) - bbfile = d.getVar('FILE', True) - outdir = os.path.join(d.getVar('WORKDIR', True), \ - '%s-recipe' % d.getVar('PF', True)) + bbfile = d.getVar('FILE') + outdir = os.path.join(d.getVar('WORKDIR'), \ + '%s-recipe' % d.getVar('PF')) bb.utils.mkdirhier(outdir) shutil.copy(bbfile, outdir) - pn = d.getVar('PN', True) - bbappend_files = d.getVar('BBINCLUDED', True).split() + pn = d.getVar('PN') + bbappend_files = d.getVar('BBINCLUDED').split() # If recipe name is aa, we need to match files like aa.bbappend and aa_1.1.bbappend # Files like aa1.bbappend or aa1_1.1.bbappend must be excluded. bbappend_re = re.compile( r".*/%s_[^/]*\.bbappend$" %pn) @@ -356,7 +356,7 @@ python do_ar_recipe () { shutil.copy(file, outdir) dirname = os.path.dirname(bbfile) - bbpath = '%s:%s' % (dirname, d.getVar('BBPATH', True)) + bbpath = '%s:%s' % (dirname, d.getVar('BBPATH')) f = open(bbfile, 'r') for line in f.readlines(): incfile = None @@ -370,7 +370,7 @@ python do_ar_recipe () { if incfile: shutil.copy(incfile, outdir) - create_tarball(d, outdir, 'recipe', d.getVar('ARCHIVER_OUTDIR', True)) + create_tarball(d, outdir, 'recipe', d.getVar('ARCHIVER_OUTDIR')) bb.utils.remove(outdir, recurse=True) } @@ -379,8 +379,8 @@ python do_dumpdata () { dump environment data to ${PF}-showdata.dump """ - dumpfile = os.path.join(d.getVar('ARCHIVER_OUTDIR', True), \ - '%s-showdata.dump' % d.getVar('PF', True)) + dumpfile = os.path.join(d.getVar('ARCHIVER_OUTDIR'), \ + '%s-showdata.dump' % d.getVar('PF')) bb.note('Dumping metadata into %s' % dumpfile) with open(dumpfile, "w") as f: # emit variables and shell functions diff --git a/meta/classes/autotools.bbclass b/meta/classes/autotools.bbclass index c43ea9a7ef..c43531b050 100644 --- a/meta/classes/autotools.bbclass +++ b/meta/classes/autotools.bbclass @@ -1,8 +1,8 @@ def autotools_dep_prepend(d): - if d.getVar('INHIBIT_AUTOTOOLS_DEPS', True): + if d.getVar('INHIBIT_AUTOTOOLS_DEPS'): return '' - pn = d.getVar('PN', True) + pn = d.getVar('PN') deps = '' if pn in ['autoconf-native', 'automake-native', 'help2man-native']: @@ -14,7 +14,7 @@ def autotools_dep_prepend(d): if not bb.data.inherits_class('native', d) \ and not bb.data.inherits_class('nativesdk', d) \ and not bb.data.inherits_class('cross', d) \ - and not d.getVar('INHIBIT_DEFAULT_DEPS', True): + and not d.getVar('INHIBIT_DEFAULT_DEPS'): deps += 'libtool-cross ' return deps + 'gnu-config-native ' @@ -139,15 +139,15 @@ ACLOCALDIR = "${WORKDIR}/aclocal-copy" python autotools_copy_aclocals () { import copy - s = d.getVar("AUTOTOOLS_SCRIPT_PATH", True) + s = d.getVar("AUTOTOOLS_SCRIPT_PATH") if not os.path.exists(s + "/configure.in") and not os.path.exists(s + "/configure.ac"): if not d.getVar("AUTOTOOLS_COPYACLOCAL", False): return taskdepdata = d.getVar("BB_TASKDEPDATA", False) #bb.warn(str(taskdepdata)) - pn = d.getVar("PN", True) - aclocaldir = d.getVar("ACLOCALDIR", True) + pn = d.getVar("PN") + aclocaldir = d.getVar("ACLOCALDIR") oe.path.remove(aclocaldir) bb.utils.mkdirhier(aclocaldir) start = None diff --git a/meta/classes/base.bbclass b/meta/classes/base.bbclass index 19673e6913..2765ebf61b 100644 --- a/meta/classes/base.bbclass +++ b/meta/classes/base.bbclass @@ -16,7 +16,7 @@ OE_IMPORTS[type] = "list" def oe_import(d): import sys - bbpath = d.getVar("BBPATH", True).split(":") + bbpath = d.getVar("BBPATH").split(":") sys.path[0:0] = [os.path.join(dir, "lib") for dir in bbpath] def inject(name, value): @@ -37,7 +37,7 @@ def oe_import(d): OE_IMPORTED := "${@oe_import(d)}" def lsb_distro_identifier(d): - adjust = d.getVar('LSB_DISTRO_ADJUST', True) + adjust = d.getVar('LSB_DISTRO_ADJUST') adjust_func = None if adjust: try: @@ -72,7 +72,7 @@ def base_dep_prepend(d): # we need that built is the responsibility of the patch function / class, not # the application. if not d.getVar('INHIBIT_DEFAULT_DEPS', False): - if (d.getVar('HOST_SYS', True) != d.getVar('BUILD_SYS', True)): + if (d.getVar('HOST_SYS') != d.getVar('BUILD_SYS')): deps += " virtual/${TARGET_PREFIX}gcc virtual/${TARGET_PREFIX}compilerlibs virtual/libc " return deps @@ -83,11 +83,11 @@ DEPENDS_prepend="${BASEDEPENDS} " FILESPATH = "${@base_set_filespath(["${FILE_DIRNAME}/${BP}", "${FILE_DIRNAME}/${BPN}", "${FILE_DIRNAME}/files"], d)}" # THISDIR only works properly with imediate expansion as it has to run # in the context of the location its used (:=) -THISDIR = "${@os.path.dirname(d.getVar('FILE', True))}" +THISDIR = "${@os.path.dirname(d.getVar('FILE'))}" def extra_path_elements(d): path = "" - elements = (d.getVar('EXTRANATIVEPATH', True) or "").split() + elements = (d.getVar('EXTRANATIVEPATH') or "").split() for e in elements: path = path + "${STAGING_BINDIR_NATIVE}/" + e + ":" return path @@ -96,11 +96,11 @@ PATH_prepend = "${@extra_path_elements(d)}" def get_lic_checksum_file_list(d): filelist = [] - lic_files = d.getVar("LIC_FILES_CHKSUM", True) or '' - tmpdir = d.getVar("TMPDIR", True) - s = d.getVar("S", True) - b = d.getVar("B", True) - workdir = d.getVar("WORKDIR", True) + lic_files = d.getVar("LIC_FILES_CHKSUM") or '' + tmpdir = d.getVar("TMPDIR") + s = d.getVar("S") + b = d.getVar("B") + workdir = d.getVar("WORKDIR") urls = lic_files.split() for url in urls: @@ -116,7 +116,7 @@ def get_lic_checksum_file_list(d): continue filelist.append(path + ":" + str(os.path.exists(path))) except bb.fetch.MalformedUrl: - bb.fatal(d.getVar('PN', True) + ": LIC_FILES_CHKSUM contains an invalid URL: " + url) + bb.fatal(d.getVar('PN') + ": LIC_FILES_CHKSUM contains an invalid URL: " + url) return " ".join(filelist) addtask fetch @@ -126,7 +126,7 @@ do_fetch[file-checksums] += " ${@get_lic_checksum_file_list(d)}" do_fetch[vardeps] += "SRCREV" python base_do_fetch() { - src_uri = (d.getVar('SRC_URI', True) or "").split() + src_uri = (d.getVar('SRC_URI') or "").split() if len(src_uri) == 0: return @@ -141,31 +141,31 @@ addtask unpack after do_fetch do_unpack[dirs] = "${WORKDIR}" python () { - if d.getVar('S', True) != d.getVar('WORKDIR', True): + if d.getVar('S') != d.getVar('WORKDIR'): d.setVarFlag('do_unpack', 'cleandirs', '${S}') else: d.setVarFlag('do_unpack', 'cleandirs', os.path.join('${S}', 'patches')) } python base_do_unpack() { - src_uri = (d.getVar('SRC_URI', True) or "").split() + src_uri = (d.getVar('SRC_URI') or "").split() if len(src_uri) == 0: return try: fetcher = bb.fetch2.Fetch(src_uri, d) - fetcher.unpack(d.getVar('WORKDIR', True)) + fetcher.unpack(d.getVar('WORKDIR')) except bb.fetch2.BBFetchException as e: bb.fatal(str(e)) } def pkgarch_mapping(d): # Compatibility mappings of TUNE_PKGARCH (opt in) - if d.getVar("PKGARCHCOMPAT_ARMV7A", True): - if d.getVar("TUNE_PKGARCH", True) == "armv7a-vfp-neon": + if d.getVar("PKGARCHCOMPAT_ARMV7A"): + if d.getVar("TUNE_PKGARCH") == "armv7a-vfp-neon": d.setVar("TUNE_PKGARCH", "armv7a") def get_layers_branch_rev(d): - layers = (d.getVar("BBLAYERS", True) or "").split() + layers = (d.getVar("BBLAYERS") or "").split() layers_branch_rev = ["%-17s = \"%s:%s\"" % (os.path.basename(i), \ base_get_metadata_git_branch(i, None).strip(), \ base_get_metadata_git_revision(i, None)) \ @@ -192,7 +192,7 @@ BUILDCFG_FUNCS[type] = "list" def buildcfg_vars(d): statusvars = oe.data.typed_value('BUILDCFG_VARS', d) for var in statusvars: - value = d.getVar(var, True) + value = d.getVar(var) if value is not None: yield '%-17s = "%s"' % (var, value) @@ -200,7 +200,7 @@ def buildcfg_neededvars(d): needed_vars = oe.data.typed_value("BUILDCFG_NEEDEDVARS", d) pesteruser = [] for v in needed_vars: - val = d.getVar(v, True) + val = d.getVar(v) if not val or val == 'INVALID': pesteruser.append(v) @@ -233,7 +233,7 @@ python base_eventhandler() { if flines: statuslines.extend(flines) - statusheader = e.data.getVar('BUILDCFG_HEADER', True) + statusheader = e.data.getVar('BUILDCFG_HEADER') if statusheader: bb.plain('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines))) @@ -241,7 +241,7 @@ python base_eventhandler() { # target ones and we'd see dulpicate key names overwriting each other # for various PREFERRED_PROVIDERS if isinstance(e, bb.event.RecipePreFinalise): - if e.data.getVar("TARGET_PREFIX", True) == e.data.getVar("SDK_PREFIX", True): + if e.data.getVar("TARGET_PREFIX") == e.data.getVar("SDK_PREFIX"): e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}binutils") e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}gcc-initial") e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}gcc") @@ -267,14 +267,14 @@ python base_eventhandler() { # sysroot since they're now "unreachable". This makes switching virtual/kernel work in # particular. # - pn = d.getVar('PN', True) + pn = d.getVar('PN') source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False) if not source_mirror_fetch: - provs = (d.getVar("PROVIDES", True) or "").split() - multiwhitelist = (d.getVar("MULTI_PROVIDER_WHITELIST", True) or "").split() + provs = (d.getVar("PROVIDES") or "").split() + multiwhitelist = (d.getVar("MULTI_PROVIDER_WHITELIST") or "").split() for p in provs: if p.startswith("virtual/") and p not in multiwhitelist: - profprov = d.getVar("PREFERRED_PROVIDER_" + p, True) + profprov = d.getVar("PREFERRED_PROVIDER_" + p) if profprov and pn != profprov: raise bb.parse.SkipPackage("PREFERRED_PROVIDER_%s set to %s, not %s" % (p, profprov, pn)) } @@ -336,9 +336,9 @@ def set_packagetriplet(d): tos = [] tvs = [] - archs.append(d.getVar("PACKAGE_ARCHS", True).split()) - tos.append(d.getVar("TARGET_OS", True)) - tvs.append(d.getVar("TARGET_VENDOR", True)) + archs.append(d.getVar("PACKAGE_ARCHS").split()) + tos.append(d.getVar("TARGET_OS")) + tvs.append(d.getVar("TARGET_VENDOR")) def settriplet(d, varname, archs, tos, tvs): triplets = [] @@ -350,16 +350,16 @@ def set_packagetriplet(d): settriplet(d, "PKGTRIPLETS", archs, tos, tvs) - variants = d.getVar("MULTILIB_VARIANTS", True) or "" + variants = d.getVar("MULTILIB_VARIANTS") or "" for item in variants.split(): localdata = bb.data.createCopy(d) overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item localdata.setVar("OVERRIDES", overrides) bb.data.update_data(localdata) - archs.append(localdata.getVar("PACKAGE_ARCHS", True).split()) - tos.append(localdata.getVar("TARGET_OS", True)) - tvs.append(localdata.getVar("TARGET_VENDOR", True)) + archs.append(localdata.getVar("PACKAGE_ARCHS").split()) + tos.append(localdata.getVar("TARGET_OS")) + tvs.append(localdata.getVar("TARGET_VENDOR")) settriplet(d, "PKGMLTRIPLETS", archs, tos, tvs) @@ -374,10 +374,10 @@ python () { # PACKAGECONFIG[foo] = "--enable-foo,--disable-foo,foo_depends,foo_runtime_depends" pkgconfigflags = d.getVarFlags("PACKAGECONFIG") or {} if pkgconfigflags: - pkgconfig = (d.getVar('PACKAGECONFIG', True) or "").split() - pn = d.getVar("PN", True) + pkgconfig = (d.getVar('PACKAGECONFIG') or "").split() + pn = d.getVar("PN") - mlprefix = d.getVar("MLPREFIX", True) + mlprefix = d.getVar("MLPREFIX") def expandFilter(appends, extension, prefix): appends = bb.utils.explode_deps(d.expand(" ".join(appends))) @@ -419,7 +419,7 @@ python () { num = len(items) if num > 4: bb.error("%s: PACKAGECONFIG[%s] Only enable,disable,depend,rdepend can be specified!" - % (d.getVar('PN', True), flag)) + % (d.getVar('PN'), flag)) if flag in pkgconfig: if num >= 3 and items[2]: @@ -434,8 +434,8 @@ python () { appendVar('RDEPENDS_${PN}', extrardeps) appendVar('PACKAGECONFIG_CONFARGS', extraconf) - pn = d.getVar('PN', True) - license = d.getVar('LICENSE', True) + pn = d.getVar('PN') + license = d.getVar('LICENSE') if license == "INVALID": bb.fatal('This recipe does not have the LICENSE field set (%s)' % pn) @@ -465,26 +465,26 @@ python () { d.setVarFlag('do_devshell', 'fakeroot', '1') d.appendVarFlag('do_devshell', 'depends', ' virtual/fakeroot-native:do_populate_sysroot') - need_machine = d.getVar('COMPATIBLE_MACHINE', True) + need_machine = d.getVar('COMPATIBLE_MACHINE') if need_machine: import re - compat_machines = (d.getVar('MACHINEOVERRIDES', True) or "").split(":") + compat_machines = (d.getVar('MACHINEOVERRIDES') or "").split(":") for m in compat_machines: if re.match(need_machine, m): break else: - raise bb.parse.SkipPackage("incompatible with machine %s (not in COMPATIBLE_MACHINE)" % d.getVar('MACHINE', True)) + raise bb.parse.SkipPackage("incompatible with machine %s (not in COMPATIBLE_MACHINE)" % d.getVar('MACHINE')) source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False) if not source_mirror_fetch: - need_host = d.getVar('COMPATIBLE_HOST', True) + need_host = d.getVar('COMPATIBLE_HOST') if need_host: import re - this_host = d.getVar('HOST_SYS', True) + this_host = d.getVar('HOST_SYS') if not re.match(need_host, this_host): raise bb.parse.SkipPackage("incompatible with host %s (not in COMPATIBLE_HOST)" % this_host) - bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE', True) or "").split() + bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE') or "").split() check_license = False if pn.startswith("nativesdk-") else True for t in ["-native", "-cross-${TARGET_ARCH}", "-cross-initial-${TARGET_ARCH}", @@ -503,21 +503,21 @@ python () { for lic in bad_licenses: spdx_license = return_spdx(d, lic) for w in ["LGPLv2_WHITELIST_", "WHITELIST_"]: - whitelist.extend((d.getVar(w + lic, True) or "").split()) + whitelist.extend((d.getVar(w + lic) or "").split()) if spdx_license: - whitelist.extend((d.getVar(w + spdx_license, True) or "").split()) + whitelist.extend((d.getVar(w + spdx_license) or "").split()) ''' We need to track what we are whitelisting and why. If pn is incompatible we need to be able to note that the image that is created may infact contain incompatible licenses despite INCOMPATIBLE_LICENSE being set. ''' - incompatwl.extend((d.getVar(w + lic, True) or "").split()) + incompatwl.extend((d.getVar(w + lic) or "").split()) if spdx_license: - incompatwl.extend((d.getVar(w + spdx_license, True) or "").split()) + incompatwl.extend((d.getVar(w + spdx_license) or "").split()) if not pn in whitelist: - pkgs = d.getVar('PACKAGES', True).split() + pkgs = d.getVar('PACKAGES').split() skipped_pkgs = [] unskipped_pkgs = [] for pkg in pkgs: @@ -529,7 +529,7 @@ python () { if unskipped_pkgs: for pkg in skipped_pkgs: bb.debug(1, "SKIPPING the package " + pkg + " at do_rootfs because it's " + license) - mlprefix = d.getVar('MLPREFIX', True) + mlprefix = d.getVar('MLPREFIX') d.setVar('LICENSE_EXCLUSION-' + mlprefix + pkg, 1) for pkg in unskipped_pkgs: bb.debug(1, "INCLUDING the package " + pkg) @@ -545,8 +545,8 @@ python () { # matching of license expressions - just check that all license strings # in LICENSE_ are found in LICENSE. license_set = oe.license.list_licenses(license) - for pkg in d.getVar('PACKAGES', True).split(): - pkg_license = d.getVar('LICENSE_' + pkg, True) + for pkg in d.getVar('PACKAGES').split(): + pkg_license = d.getVar('LICENSE_' + pkg) if pkg_license: unlisted = oe.license.list_licenses(pkg_license) - license_set if unlisted: @@ -554,7 +554,7 @@ python () { "listed in LICENSE" % (pkg, ' '.join(unlisted))) needsrcrev = False - srcuri = d.getVar('SRC_URI', True) + srcuri = d.getVar('SRC_URI') for uri in srcuri.split(): (scheme, _ , path) = bb.fetch.decodeurl(uri)[:3] @@ -614,8 +614,8 @@ python () { set_packagetriplet(d) # 'multimachine' handling - mach_arch = d.getVar('MACHINE_ARCH', True) - pkg_arch = d.getVar('PACKAGE_ARCH', True) + mach_arch = d.getVar('MACHINE_ARCH') + pkg_arch = d.getVar('PACKAGE_ARCH') if (pkg_arch == mach_arch): # Already machine specific - nothing further to do @@ -625,11 +625,11 @@ python () { # We always try to scan SRC_URI for urls with machine overrides # unless the package sets SRC_URI_OVERRIDES_PACKAGE_ARCH=0 # - override = d.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH', True) + override = d.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH') if override != '0': paths = [] - fpaths = (d.getVar('FILESPATH', True) or '').split(':') - machine = d.getVar('MACHINE', True) + fpaths = (d.getVar('FILESPATH') or '').split(':') + machine = d.getVar('MACHINE') for p in fpaths: if os.path.basename(p) == machine and os.path.isdir(p): paths.append(p) @@ -646,16 +646,16 @@ python () { d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}") return - packages = d.getVar('PACKAGES', True).split() + packages = d.getVar('PACKAGES').split() for pkg in packages: - pkgarch = d.getVar("PACKAGE_ARCH_%s" % pkg, True) + pkgarch = d.getVar("PACKAGE_ARCH_%s" % pkg) # We could look for != PACKAGE_ARCH here but how to choose # if multiple differences are present? # Look through PACKAGE_ARCHS for the priority order? if pkgarch and pkgarch == mach_arch: d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}") - bb.warn("Recipe %s is marked as only being architecture specific but seems to have machine specific packages?! The recipe may as well mark itself as machine specific directly." % d.getVar("PN", True)) + bb.warn("Recipe %s is marked as only being architecture specific but seems to have machine specific packages?! The recipe may as well mark itself as machine specific directly." % d.getVar("PN")) } addtask cleansstate after do_clean @@ -666,7 +666,7 @@ addtask cleanall after do_cleansstate do_cleansstate[nostamp] = "1" python do_cleanall() { - src_uri = (d.getVar('SRC_URI', True) or "").split() + src_uri = (d.getVar('SRC_URI') or "").split() if len(src_uri) == 0: return diff --git a/meta/classes/binconfig.bbclass b/meta/classes/binconfig.bbclass index 8591308aa7..5372294142 100644 --- a/meta/classes/binconfig.bbclass +++ b/meta/classes/binconfig.bbclass @@ -22,7 +22,7 @@ def get_binconfig_mangle(d): s += " -e 's:-I${WORKDIR}:-I${STAGING_INCDIR}:'" s += " -e 's:-L${WORKDIR}:-L${STAGING_LIBDIR}:'" if d.getVar("OE_BINCONFIG_EXTRA_MANGLE", False): - s += d.getVar("OE_BINCONFIG_EXTRA_MANGLE", True) + s += d.getVar("OE_BINCONFIG_EXTRA_MANGLE") return s diff --git a/meta/classes/blacklist.bbclass b/meta/classes/blacklist.bbclass index a0141a82c0..c6f422398c 100644 --- a/meta/classes/blacklist.bbclass +++ b/meta/classes/blacklist.bbclass @@ -16,7 +16,7 @@ addhandler blacklist_multilib_eventhandler blacklist_multilib_eventhandler[eventmask] = "bb.event.ConfigParsed" python blacklist_multilib_eventhandler() { - multilibs = e.data.getVar('MULTILIBS', True) + multilibs = e.data.getVar('MULTILIBS') if not multilibs: return @@ -38,7 +38,7 @@ python blacklist_multilib_eventhandler() { } python () { - blacklist = d.getVarFlag('PNBLACKLIST', d.getVar('PN', True), True) + blacklist = d.getVarFlag('PNBLACKLIST', d.getVar('PN'), True) if blacklist: raise bb.parse.SkipPackage("Recipe is blacklisted: %s" % (blacklist)) diff --git a/meta/classes/bugzilla.bbclass b/meta/classes/bugzilla.bbclass index 3fc8956428..6d9a8211f0 100644 --- a/meta/classes/bugzilla.bbclass +++ b/meta/classes/bugzilla.bbclass @@ -110,12 +110,12 @@ python bugzilla_eventhandler() { return if name == "TaskFailed": - xmlrpc = data.getVar("BUGZILLA_XMLRPC", True) - user = data.getVar("BUGZILLA_USER", True) - passw = data.getVar("BUGZILLA_PASS", True) - product = data.getVar("BUGZILLA_PRODUCT", True) - compon = data.getVar("BUGZILLA_COMPONENT", True) - version = data.getVar("BUGZILLA_VERSION", True) + xmlrpc = data.getVar("BUGZILLA_XMLRPC") + user = data.getVar("BUGZILLA_USER") + passw = data.getVar("BUGZILLA_PASS") + product = data.getVar("BUGZILLA_PRODUCT") + compon = data.getVar("BUGZILLA_COMPONENT") + version = data.getVar("BUGZILLA_VERSION") proxy = data.getVar('http_proxy', True ) if (proxy): @@ -133,14 +133,14 @@ python bugzilla_eventhandler() { 'component': compon} # evil hack to figure out what is going on - debug_file = open(os.path.join(data.getVar("TMPDIR", True),"..","bugzilla-log"),"a") + debug_file = open(os.path.join(data.getVar("TMPDIR"),"..","bugzilla-log"),"a") file = None - bugname = "%(package)s-%(pv)s-autobuild" % { "package" : data.getVar("PN", True), - "pv" : data.getVar("PV", True), + bugname = "%(package)s-%(pv)s-autobuild" % { "package" : data.getVar("PN"), + "pv" : data.getVar("PV"), } - log_file = glob.glob("%s/log.%s.*" % (event.data.getVar('T', True), event.task)) - text = "The %s step in %s failed at %s for machine %s" % (e.task, data.getVar("PN", True), data.getVar('DATETIME', True), data.getVar( 'MACHINE', True ) ) + log_file = glob.glob("%s/log.%s.*" % (event.data.getVar('T'), event.task)) + text = "The %s step in %s failed at %s for machine %s" % (e.task, data.getVar("PN"), data.getVar('DATETIME'), data.getVar( 'MACHINE', True ) ) if len(log_file) != 0: print >> debug_file, "Adding log file %s" % log_file[0] file = open(log_file[0], 'r') @@ -168,7 +168,7 @@ python bugzilla_eventhandler() { if bug_number and log: print >> debug_file, "The bug is known as '%s'" % bug_number - desc = "Build log for machine %s" % (data.getVar('MACHINE', True)) + desc = "Build log for machine %s" % (data.getVar('MACHINE')) if not bugzilla_create_attachment(debug_file, server, args.copy(), bug_number, text, log_file[0], log, desc): print >> debug_file, "Failed to attach the build log for bug #%s" % bug_number else: diff --git a/meta/classes/buildhistory.bbclass b/meta/classes/buildhistory.bbclass index d82e9bb55c..73cd88669a 100644 --- a/meta/classes/buildhistory.bbclass +++ b/meta/classes/buildhistory.bbclass @@ -64,18 +64,18 @@ PATCH_GIT_USER_NAME ?= "OpenEmbedded" # Write out metadata about this package for comparison when writing future packages # python buildhistory_emit_pkghistory() { - if not d.getVar('BB_CURRENTTASK', True) in ['packagedata', 'packagedata_setscene']: + if not d.getVar('BB_CURRENTTASK') in ['packagedata', 'packagedata_setscene']: return 0 - if not "package" in (d.getVar('BUILDHISTORY_FEATURES', True) or "").split(): + if not "package" in (d.getVar('BUILDHISTORY_FEATURES') or "").split(): return 0 import re import json import errno - pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE', True) - oldpkghistdir = d.getVar('BUILDHISTORY_OLD_DIR_PACKAGE', True) + pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE') + oldpkghistdir = d.getVar('BUILDHISTORY_OLD_DIR_PACKAGE') class RecipeInfo: def __init__(self, name): @@ -182,12 +182,12 @@ python buildhistory_emit_pkghistory() { items.sort() return ' '.join(items) - pn = d.getVar('PN', True) - pe = d.getVar('PE', True) or "0" - pv = d.getVar('PV', True) - pr = d.getVar('PR', True) + pn = d.getVar('PN') + pe = d.getVar('PE') or "0" + pv = d.getVar('PV') + pr = d.getVar('PR') - pkgdata_dir = d.getVar('PKGDATA_DIR', True) + pkgdata_dir = d.getVar('PKGDATA_DIR') packages = "" try: with open(os.path.join(pkgdata_dir, pn)) as f: @@ -203,7 +203,7 @@ python buildhistory_emit_pkghistory() { raise packagelist = packages.split() - preserve = d.getVar('BUILDHISTORY_PRESERVE', True).split() + preserve = d.getVar('BUILDHISTORY_PRESERVE').split() if not os.path.exists(pkghistdir): bb.utils.mkdirhier(pkghistdir) else: @@ -223,11 +223,11 @@ python buildhistory_emit_pkghistory() { rcpinfo.pe = pe rcpinfo.pv = pv rcpinfo.pr = pr - rcpinfo.depends = sortlist(oe.utils.squashspaces(d.getVar('DEPENDS', True) or "")) + rcpinfo.depends = sortlist(oe.utils.squashspaces(d.getVar('DEPENDS') or "")) rcpinfo.packages = packages write_recipehistory(rcpinfo, d) - pkgdest = d.getVar('PKGDEST', True) + pkgdest = d.getVar('PKGDEST') for pkg in packagelist: pkgdata = {} with open(os.path.join(pkgdata_dir, 'runtime', pkg)) as f: @@ -293,7 +293,7 @@ python buildhistory_emit_pkghistory() { def write_recipehistory(rcpinfo, d): bb.debug(2, "Writing recipe history") - pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE', True) + pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE') infofile = os.path.join(pkghistdir, "latest") with open(infofile, "w") as f: @@ -308,7 +308,7 @@ def write_recipehistory(rcpinfo, d): def write_pkghistory(pkginfo, d): bb.debug(2, "Writing package history for package %s" % pkginfo.name) - pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE', True) + pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE') pkgpath = os.path.join(pkghistdir, pkginfo.name) if not os.path.exists(pkgpath): @@ -369,7 +369,7 @@ def buildhistory_list_installed(d, rootfs_type="image"): pkgs = sdk_list_installed_packages(d, rootfs_type == "sdk_target") for output_type, output_file in process_list: - output_file_full = os.path.join(d.getVar('WORKDIR', True), output_file) + output_file_full = os.path.join(d.getVar('WORKDIR'), output_file) with open(output_file_full, 'w') as output: output.write(format_pkg_list(pkgs, output_type)) @@ -550,7 +550,7 @@ END python buildhistory_get_extra_sdkinfo() { import operator import math - if d.getVar('BB_CURRENTTASK', True) == 'populate_sdk_ext': + if d.getVar('BB_CURRENTTASK') == 'populate_sdk_ext': tasksizes = {} filesizes = {} for root, _, files in os.walk(d.expand('${SDK_OUTPUT}/${SDKPATH}/sstate-cache')): @@ -591,7 +591,7 @@ SDK_POSTPROCESS_COMMAND_append = " buildhistory_get_sdkinfo ; buildhistory_get_e SDK_POSTPROCESS_COMMAND[vardepvalueexclude] .= "| buildhistory_get_sdkinfo ; buildhistory_get_extra_sdkinfo; " def buildhistory_get_build_id(d): - if d.getVar('BB_WORKERCONTEXT', True) != '1': + if d.getVar('BB_WORKERCONTEXT') != '1': return "" localdata = bb.data.createCopy(d) bb.data.update_data(localdata) @@ -605,12 +605,12 @@ def buildhistory_get_build_id(d): if flines: statuslines.extend(flines) - statusheader = d.getVar('BUILDCFG_HEADER', True) + statusheader = d.getVar('BUILDCFG_HEADER') return('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines))) def buildhistory_get_metadata_revs(d): # We want an easily machine-readable format here, so get_layers_branch_rev isn't quite what we want - layers = (d.getVar("BBLAYERS", True) or "").split() + layers = (d.getVar("BBLAYERS") or "").split() medadata_revs = ["%-17s = %s:%s" % (os.path.basename(i), \ base_get_metadata_git_branch(i, None).strip(), \ base_get_metadata_git_revision(i, None)) \ @@ -622,7 +622,7 @@ def outputvars(vars, listvars, d): listvars = listvars.split() ret = "" for var in vars: - value = d.getVar(var, True) or "" + value = d.getVar(var) or "" if var in listvars: # Squash out spaces value = oe.utils.squashspaces(value) @@ -630,17 +630,17 @@ def outputvars(vars, listvars, d): return ret.rstrip('\n') def buildhistory_get_imagevars(d): - if d.getVar('BB_WORKERCONTEXT', True) != '1': + if d.getVar('BB_WORKERCONTEXT') != '1': return "" imagevars = "DISTRO DISTRO_VERSION USER_CLASSES IMAGE_CLASSES IMAGE_FEATURES IMAGE_LINGUAS IMAGE_INSTALL BAD_RECOMMENDATIONS NO_RECOMMENDATIONS PACKAGE_EXCLUDE ROOTFS_POSTPROCESS_COMMAND IMAGE_POSTPROCESS_COMMAND" listvars = "USER_CLASSES IMAGE_CLASSES IMAGE_FEATURES IMAGE_LINGUAS IMAGE_INSTALL BAD_RECOMMENDATIONS PACKAGE_EXCLUDE" return outputvars(imagevars, listvars, d) def buildhistory_get_sdkvars(d): - if d.getVar('BB_WORKERCONTEXT', True) != '1': + if d.getVar('BB_WORKERCONTEXT') != '1': return "" sdkvars = "DISTRO DISTRO_VERSION SDK_NAME SDK_VERSION SDKMACHINE SDKIMAGE_FEATURES BAD_RECOMMENDATIONS NO_RECOMMENDATIONS PACKAGE_EXCLUDE" - if d.getVar('BB_CURRENTTASK', True) == 'populate_sdk_ext': + if d.getVar('BB_CURRENTTASK') == 'populate_sdk_ext': # Extensible SDK uses some additional variables sdkvars += " SDK_LOCAL_CONF_WHITELIST SDK_LOCAL_CONF_BLACKLIST SDK_INHERIT_BLACKLIST SDK_UPDATE_URL SDK_EXT_TYPE SDK_RECRDEP_TASKS SDK_INCLUDE_PKGDATA SDK_INCLUDE_TOOLCHAIN" listvars = "SDKIMAGE_FEATURES BAD_RECOMMENDATIONS PACKAGE_EXCLUDE SDK_LOCAL_CONF_WHITELIST SDK_LOCAL_CONF_BLACKLIST SDK_INHERIT_BLACKLIST" @@ -735,16 +735,16 @@ END } python buildhistory_eventhandler() { - if e.data.getVar('BUILDHISTORY_FEATURES', True).strip(): - reset = e.data.getVar("BUILDHISTORY_RESET", True) - olddir = e.data.getVar("BUILDHISTORY_OLD_DIR", True) + if e.data.getVar('BUILDHISTORY_FEATURES').strip(): + reset = e.data.getVar("BUILDHISTORY_RESET") + olddir = e.data.getVar("BUILDHISTORY_OLD_DIR") if isinstance(e, bb.event.BuildStarted): if reset: import shutil # Clean up after potentially interrupted build. if os.path.isdir(olddir): shutil.rmtree(olddir) - rootdir = e.data.getVar("BUILDHISTORY_DIR", True) + rootdir = e.data.getVar("BUILDHISTORY_DIR") entries = [ x for x in os.listdir(rootdir) if not x.startswith('.') ] bb.utils.mkdirhier(olddir) for entry in entries: @@ -754,7 +754,7 @@ python buildhistory_eventhandler() { if reset: import shutil shutil.rmtree(olddir) - if e.data.getVar("BUILDHISTORY_COMMIT", True) == "1": + if e.data.getVar("BUILDHISTORY_COMMIT") == "1": bb.note("Writing buildhistory") localdata = bb.data.createCopy(e.data) localdata.setVar('BUILDHISTORY_BUILD_FAILURES', str(e._failures)) @@ -774,7 +774,7 @@ def _get_srcrev_values(d): """ scms = [] - fetcher = bb.fetch.Fetch(d.getVar('SRC_URI', True).split(), d) + fetcher = bb.fetch.Fetch(d.getVar('SRC_URI').split(), d) urldata = fetcher.ud for u in urldata: if urldata[u].method.supports_srcrev(): @@ -806,7 +806,7 @@ def _get_srcrev_values(d): do_fetch[postfuncs] += "write_srcrev" do_fetch[vardepsexclude] += "write_srcrev" python write_srcrev() { - pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE', True) + pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE') srcrevfile = os.path.join(pkghistdir, 'latest_srcrev') srcrevs, tag_srcrevs = _get_srcrev_values(d) @@ -838,7 +838,7 @@ python write_srcrev() { for name, srcrev in tag_srcrevs.items(): f.write('# tag_%s = "%s"\n' % (name, srcrev)) if name in old_tag_srcrevs and old_tag_srcrevs[name] != srcrev: - pkg = d.getVar('PN', True) + pkg = d.getVar('PN') bb.warn("Revision for tag %s in package %s was changed since last build (from %s to %s)" % (name, pkg, old_tag_srcrevs[name], srcrev)) else: diff --git a/meta/classes/buildstats-summary.bbclass b/meta/classes/buildstats-summary.bbclass index b86abcc3f1..f9b241b6c5 100644 --- a/meta/classes/buildstats-summary.bbclass +++ b/meta/classes/buildstats-summary.bbclass @@ -7,7 +7,7 @@ python buildstats_summary () { if not os.path.exists(bsdir): return - sstatetasks = (e.data.getVar('SSTATETASKS', True) or '').split() + sstatetasks = (e.data.getVar('SSTATETASKS') or '').split() built = collections.defaultdict(lambda: [set(), set()]) for pf in os.listdir(bsdir): taskdir = os.path.join(bsdir, pf) diff --git a/meta/classes/buildstats.bbclass b/meta/classes/buildstats.bbclass index c6b77e6a2a..8703cb2b33 100644 --- a/meta/classes/buildstats.bbclass +++ b/meta/classes/buildstats.bbclass @@ -75,8 +75,8 @@ def get_buildtimedata(var, d): return timediff, cpuperc def write_task_data(status, logfile, e, d): - bn = d.getVar('BUILDNAME', True) - bsdir = os.path.join(d.getVar('BUILDSTATS_BASE', True), bn) + bn = d.getVar('BUILDNAME') + bsdir = os.path.join(d.getVar('BUILDSTATS_BASE'), bn) with open(os.path.join(logfile), "a") as f: elapsedtime = get_timedata("__timedata_task", d, e.time) if elapsedtime: @@ -106,9 +106,9 @@ python run_buildstats () { import bb.event import time, subprocess, platform - bn = d.getVar('BUILDNAME', True) - bsdir = os.path.join(d.getVar('BUILDSTATS_BASE', True), bn) - taskdir = os.path.join(bsdir, d.getVar('PF', True)) + bn = d.getVar('BUILDNAME') + bsdir = os.path.join(d.getVar('BUILDSTATS_BASE'), bn) + taskdir = os.path.join(bsdir, d.getVar('PF')) if isinstance(e, bb.event.BuildStarted): ######################################################################## @@ -162,7 +162,7 @@ python run_buildstats () { if e.task == "do_rootfs": bs = os.path.join(bsdir, "build_stats") with open(bs, "a") as f: - rootfs = d.getVar('IMAGE_ROOTFS', True) + rootfs = d.getVar('IMAGE_ROOTFS') if os.path.isdir(rootfs): try: rootfs_size = subprocess.check_output(["du", "-sh", rootfs], @@ -197,7 +197,7 @@ python runqueue_stats () { # are available that we need to find the output directory. # The persistent SystemStats is stored in the datastore and # closed when the build is done. - system_stats = d.getVar('_buildstats_system_stats', True) + system_stats = d.getVar('_buildstats_system_stats') if not system_stats and isinstance(e, (bb.runqueue.sceneQueueTaskStarted, bb.runqueue.runQueueTaskStarted)): system_stats = buildstats.SystemStats(d) d.setVar('_buildstats_system_stats', system_stats) diff --git a/meta/classes/ccache.bbclass b/meta/classes/ccache.bbclass index 2e9837cf07..93fcacaf1a 100644 --- a/meta/classes/ccache.bbclass +++ b/meta/classes/ccache.bbclass @@ -1,4 +1,4 @@ -CCACHE = "${@bb.utils.which(d.getVar('PATH', True), 'ccache') and 'ccache '}" +CCACHE = "${@bb.utils.which(d.getVar('PATH'), 'ccache') and 'ccache '}" export CCACHE_DIR ?= "${TMPDIR}/ccache/${MULTIMACH_HOST_SYS}/${PN}" CCACHE_DISABLE[unexport] = "1" diff --git a/meta/classes/chrpath.bbclass b/meta/classes/chrpath.bbclass index f183b4aeeb..ad3c3975a5 100644 --- a/meta/classes/chrpath.bbclass +++ b/meta/classes/chrpath.bbclass @@ -44,7 +44,7 @@ def process_file_linux(cmd, fpath, rootdir, baseprefix, tmpdir, d): p = sub.Popen([cmd, '-r', args, fpath],stdout=sub.PIPE,stderr=sub.PIPE) out, err = p.communicate() if p.returncode != 0: - bb.fatal("%s: chrpath command failed with exit code %d:\n%s%s" % (d.getVar('PN', True), p.returncode, out, err)) + bb.fatal("%s: chrpath command failed with exit code %d:\n%s%s" % (d.getVar('PN'), p.returncode, out, err)) def process_file_darwin(cmd, fpath, rootdir, baseprefix, tmpdir, d): import subprocess as sub @@ -72,7 +72,7 @@ def process_dir (rootdir, directory, d): cmd = d.expand('${CHRPATH_BIN}') tmpdir = os.path.normpath(d.getVar('TMPDIR', False)) baseprefix = os.path.normpath(d.expand('${base_prefix}')) - hostos = d.getVar("HOST_OS", True) + hostos = d.getVar("HOST_OS") #bb.debug("Checking %s for binaries to process" % directory) if not os.path.exists(directory): diff --git a/meta/classes/cmake.bbclass b/meta/classes/cmake.bbclass index fad0baa519..9e74599296 100644 --- a/meta/classes/cmake.bbclass +++ b/meta/classes/cmake.bbclass @@ -46,7 +46,7 @@ cmake_do_generate_toolchain_file() { # CMake system name must be something like "Linux". # This is important for cross-compiling. set( CMAKE_SYSTEM_NAME `echo ${TARGET_OS} | sed -e 's/^./\u&/' -e 's/^\(Linux\).*/\1/'` ) -set( CMAKE_SYSTEM_PROCESSOR ${@map_target_arch_to_uname_arch(d.getVar('TARGET_ARCH', True))} ) +set( CMAKE_SYSTEM_PROCESSOR ${@map_target_arch_to_uname_arch(d.getVar('TARGET_ARCH'))} ) set( CMAKE_C_COMPILER ${OECMAKE_C_COMPILER} ) set( CMAKE_CXX_COMPILER ${OECMAKE_CXX_COMPILER} ) set( CMAKE_ASM_COMPILER ${OECMAKE_C_COMPILER} ) @@ -112,15 +112,15 @@ cmake_do_configure() { ${OECMAKE_SITEFILE} \ ${OECMAKE_SOURCEPATH} \ -DCMAKE_INSTALL_PREFIX:PATH=${prefix} \ - -DCMAKE_INSTALL_BINDIR:PATH=${@os.path.relpath(d.getVar('bindir', True), d.getVar('prefix', True))} \ - -DCMAKE_INSTALL_SBINDIR:PATH=${@os.path.relpath(d.getVar('sbindir', True), d.getVar('prefix', True))} \ - -DCMAKE_INSTALL_LIBEXECDIR:PATH=${@os.path.relpath(d.getVar('libexecdir', True), d.getVar('prefix', True))} \ + -DCMAKE_INSTALL_BINDIR:PATH=${@os.path.relpath(d.getVar('bindir'), d.getVar('prefix'))} \ + -DCMAKE_INSTALL_SBINDIR:PATH=${@os.path.relpath(d.getVar('sbindir'), d.getVar('prefix'))} \ + -DCMAKE_INSTALL_LIBEXECDIR:PATH=${@os.path.relpath(d.getVar('libexecdir'), d.getVar('prefix'))} \ -DCMAKE_INSTALL_SYSCONFDIR:PATH=${sysconfdir} \ - -DCMAKE_INSTALL_SHAREDSTATEDIR:PATH=${@os.path.relpath(d.getVar('sharedstatedir', True), d. getVar('prefix', True))} \ + -DCMAKE_INSTALL_SHAREDSTATEDIR:PATH=${@os.path.relpath(d.getVar('sharedstatedir'), d. getVar('prefix'))} \ -DCMAKE_INSTALL_LOCALSTATEDIR:PATH=${localstatedir} \ - -DCMAKE_INSTALL_LIBDIR:PATH=${@os.path.relpath(d.getVar('libdir', True), d.getVar('prefix', True))} \ - -DCMAKE_INSTALL_INCLUDEDIR:PATH=${@os.path.relpath(d.getVar('includedir', True), d.getVar('prefix', True))} \ - -DCMAKE_INSTALL_DATAROOTDIR:PATH=${@os.path.relpath(d.getVar('datadir', True), d.getVar('prefix', True))} \ + -DCMAKE_INSTALL_LIBDIR:PATH=${@os.path.relpath(d.getVar('libdir'), d.getVar('prefix'))} \ + -DCMAKE_INSTALL_INCLUDEDIR:PATH=${@os.path.relpath(d.getVar('includedir'), d.getVar('prefix'))} \ + -DCMAKE_INSTALL_DATAROOTDIR:PATH=${@os.path.relpath(d.getVar('datadir'), d.getVar('prefix'))} \ -DCMAKE_INSTALL_SO_NO_EXE=0 \ -DCMAKE_TOOLCHAIN_FILE=${WORKDIR}/toolchain.cmake \ -DCMAKE_VERBOSE_MAKEFILE=1 \ diff --git a/meta/classes/cml1.bbclass b/meta/classes/cml1.bbclass index 5834806269..187d407d98 100644 --- a/meta/classes/cml1.bbclass +++ b/meta/classes/cml1.bbclass @@ -26,7 +26,7 @@ python do_menuconfig() { except OSError: mtime = 0 - oe_terminal("${SHELL} -c \"make %s; if [ \$? -ne 0 ]; then echo 'Command failed.'; printf 'Press any key to continue... '; read r; fi\"" % d.getVar('KCONFIG_CONFIG_COMMAND', True), + oe_terminal("${SHELL} -c \"make %s; if [ \$? -ne 0 ]; then echo 'Command failed.'; printf 'Press any key to continue... '; read r; fi\"" % d.getVar('KCONFIG_CONFIG_COMMAND'), d.getVar('PN', True ) + ' Configuration', d) # FIXME this check can be removed when the minimum bitbake version has been bumped @@ -49,7 +49,7 @@ python do_diffconfig() { import shutil import subprocess - workdir = d.getVar('WORKDIR', True) + workdir = d.getVar('WORKDIR') fragment = workdir + '/fragment.cfg' configorig = '.config.orig' config = '.config' diff --git a/meta/classes/compress_doc.bbclass b/meta/classes/compress_doc.bbclass index 8073c173e5..069db1997b 100644 --- a/meta/classes/compress_doc.bbclass +++ b/meta/classes/compress_doc.bbclass @@ -31,25 +31,25 @@ DOC_DECOMPRESS_CMD[xz] ?= "unxz -v" PACKAGE_PREPROCESS_FUNCS += "package_do_compress_doc compress_doc_updatealternatives" python package_do_compress_doc() { - compress_mode = d.getVar('DOC_COMPRESS', True) - compress_list = (d.getVar('DOC_COMPRESS_LIST', True) or '').split() + compress_mode = d.getVar('DOC_COMPRESS') + compress_list = (d.getVar('DOC_COMPRESS_LIST') or '').split() if compress_mode not in compress_list: bb.fatal('Compression policy %s not supported (not listed in %s)\n' % (compress_mode, compress_list)) - dvar = d.getVar('PKGD', True) + dvar = d.getVar('PKGD') compress_cmds = {} decompress_cmds = {} for mode in compress_list: compress_cmds[mode] = d.getVarFlag('DOC_COMPRESS_CMD', mode, True) decompress_cmds[mode] = d.getVarFlag('DOC_DECOMPRESS_CMD', mode, True) - mandir = os.path.abspath(dvar + os.sep + d.getVar("mandir", True)) + mandir = os.path.abspath(dvar + os.sep + d.getVar("mandir")) if os.path.exists(mandir): # Decompress doc files which format is not compress_mode decompress_doc(mandir, compress_mode, decompress_cmds) compress_doc(mandir, compress_mode, compress_cmds) - infodir = os.path.abspath(dvar + os.sep + d.getVar("infodir", True)) + infodir = os.path.abspath(dvar + os.sep + d.getVar("infodir")) if os.path.exists(infodir): # Decompress doc files which format is not compress_mode decompress_doc(infodir, compress_mode, decompress_cmds) @@ -218,18 +218,18 @@ python compress_doc_updatealternatives () { if not bb.data.inherits_class('update-alternatives', d): return - mandir = d.getVar("mandir", True) - infodir = d.getVar("infodir", True) - compress_mode = d.getVar('DOC_COMPRESS', True) - for pkg in (d.getVar('PACKAGES', True) or "").split(): - old_names = (d.getVar('ALTERNATIVE_%s' % pkg, True) or "").split() + mandir = d.getVar("mandir") + infodir = d.getVar("infodir") + compress_mode = d.getVar('DOC_COMPRESS') + for pkg in (d.getVar('PACKAGES') or "").split(): + old_names = (d.getVar('ALTERNATIVE_%s' % pkg) or "").split() new_names = [] for old_name in old_names: old_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', old_name, True) old_target = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, old_name, True) or \ d.getVarFlag('ALTERNATIVE_TARGET', old_name, True) or \ - d.getVar('ALTERNATIVE_TARGET_%s' % pkg, True) or \ - d.getVar('ALTERNATIVE_TARGET', True) or \ + d.getVar('ALTERNATIVE_TARGET_%s' % pkg) or \ + d.getVar('ALTERNATIVE_TARGET') or \ old_link # Sometimes old_target is specified as relative to the link name. old_target = os.path.join(os.path.dirname(old_link), old_target) @@ -247,7 +247,7 @@ python compress_doc_updatealternatives () { elif d.getVarFlag('ALTERNATIVE_TARGET', old_name, True): d.delVarFlag('ALTERNATIVE_TARGET', old_name) d.setVarFlag('ALTERNATIVE_TARGET', new_name, new_target) - elif d.getVar('ALTERNATIVE_TARGET_%s' % pkg, True): + elif d.getVar('ALTERNATIVE_TARGET_%s' % pkg): d.setVar('ALTERNATIVE_TARGET_%s' % pkg, new_target) elif d.getVar('ALTERNATIVE_TARGET', old_name, True): d.setVar('ALTERNATIVE_TARGET', new_target) diff --git a/meta/classes/copyleft_compliance.bbclass b/meta/classes/copyleft_compliance.bbclass index 907c1836b3..eabf12ce7a 100644 --- a/meta/classes/copyleft_compliance.bbclass +++ b/meta/classes/copyleft_compliance.bbclass @@ -13,7 +13,7 @@ python do_prepare_copyleft_sources () { import os.path import shutil - p = d.getVar('P', True) + p = d.getVar('P') included, reason = copyleft_should_include(d) if not included: bb.debug(1, 'copyleft: %s is excluded: %s' % (p, reason)) @@ -21,13 +21,13 @@ python do_prepare_copyleft_sources () { else: bb.debug(1, 'copyleft: %s is included: %s' % (p, reason)) - sources_dir = d.getVar('COPYLEFT_SOURCES_DIR', True) - dl_dir = d.getVar('DL_DIR', True) - src_uri = d.getVar('SRC_URI', True).split() + sources_dir = d.getVar('COPYLEFT_SOURCES_DIR') + dl_dir = d.getVar('DL_DIR') + src_uri = d.getVar('SRC_URI').split() fetch = bb.fetch2.Fetch(src_uri, d) ud = fetch.ud - pf = d.getVar('PF', True) + pf = d.getVar('PF') dest = os.path.join(sources_dir, pf) shutil.rmtree(dest, ignore_errors=True) bb.utils.mkdirhier(dest) diff --git a/meta/classes/copyleft_filter.bbclass b/meta/classes/copyleft_filter.bbclass index 46be7f7d2f..426956f08f 100644 --- a/meta/classes/copyleft_filter.bbclass +++ b/meta/classes/copyleft_filter.bbclass @@ -49,7 +49,7 @@ def copyleft_should_include(d): included, motive = False, 'recipe did not match anything' - recipe_type = d.getVar('COPYLEFT_RECIPE_TYPE', True) + recipe_type = d.getVar('COPYLEFT_RECIPE_TYPE') if recipe_type not in oe.data.typed_value('COPYLEFT_RECIPE_TYPES', d): include, motive = False, 'recipe type "%s" is excluded' % recipe_type @@ -57,9 +57,9 @@ def copyleft_should_include(d): exclude = oe.data.typed_value('COPYLEFT_LICENSE_EXCLUDE', d) try: - is_included, reason = oe.license.is_included(d.getVar('LICENSE', True), include, exclude) + is_included, reason = oe.license.is_included(d.getVar('LICENSE'), include, exclude) except oe.license.LicenseError as exc: - bb.fatal('%s: %s' % (d.getVar('PF', True), exc)) + bb.fatal('%s: %s' % (d.getVar('PF'), exc)) else: if is_included: if reason: @@ -69,10 +69,10 @@ def copyleft_should_include(d): else: included, motive = False, 'recipe has excluded licenses: %s' % ', '.join(reason) - if any(fnmatch(d.getVar('PN', True), name) \ + if any(fnmatch(d.getVar('PN'), name) \ for name in oe.data.typed_value('COPYLEFT_PN_INCLUDE', d)): included, motive = True, 'recipe included by name' - if any(fnmatch(d.getVar('PN', True), name) \ + if any(fnmatch(d.getVar('PN'), name) \