diff options
Diffstat (limited to 'meta/classes')
-rw-r--r-- | meta/classes/base.bbclass | 4 | ||||
-rw-r--r-- | meta/classes/cross-canadian.bbclass | 2 | ||||
-rw-r--r-- | meta/classes/distrodata.bbclass | 26 | ||||
-rw-r--r-- | meta/classes/gconf.bbclass | 2 | ||||
-rw-r--r-- | meta/classes/image-swab.bbclass | 2 | ||||
-rw-r--r-- | meta/classes/kernel.bbclass | 6 | ||||
-rw-r--r-- | meta/classes/libc-package.bbclass | 76 | ||||
-rw-r--r-- | meta/classes/native.bbclass | 4 | ||||
-rw-r--r-- | meta/classes/nativesdk.bbclass | 6 | ||||
-rw-r--r-- | meta/classes/package.bbclass | 52 | ||||
-rw-r--r-- | meta/classes/package_deb.bbclass | 4 | ||||
-rw-r--r-- | meta/classes/package_ipk.bbclass | 4 | ||||
-rw-r--r-- | meta/classes/package_rpm.bbclass | 6 | ||||
-rw-r--r-- | meta/classes/package_tar.bbclass | 4 | ||||
-rw-r--r-- | meta/classes/src_distribute.bbclass | 6 | ||||
-rw-r--r-- | meta/classes/sstate.bbclass | 14 | ||||
-rw-r--r-- | meta/classes/task.bbclass | 2 | ||||
-rw-r--r-- | meta/classes/update-rc.d.bbclass | 2 |
18 files changed, 111 insertions, 111 deletions
diff --git a/meta/classes/base.bbclass b/meta/classes/base.bbclass index 72196d60a7..a95dfd9a08 100644 --- a/meta/classes/base.bbclass +++ b/meta/classes/base.bbclass @@ -360,12 +360,12 @@ python () { d.setVarFlag('do_compile', 'umask', 022) deps = (d.getVarFlag('do_install', 'depends') or "").split() deps.append('virtual/fakeroot-native:do_populate_sysroot') - bb.data.setVarFlag('do_install', 'depends', " ".join(deps),d) + d.setVarFlag('do_install', 'depends', " ".join(deps)) d.setVarFlag('do_install', 'fakeroot', 1) d.setVarFlag('do_install', 'umask', 022) deps = (d.getVarFlag('do_package', 'depends') or "").split() deps.append('virtual/fakeroot-native:do_populate_sysroot') - bb.data.setVarFlag('do_package', 'depends', " ".join(deps),d) + d.setVarFlag('do_package', 'depends', " ".join(deps)) d.setVarFlag('do_package', 'fakeroot', 1) d.setVarFlag('do_package', 'umask', 022) d.setVarFlag('do_package_setscene', 'fakeroot', 1) diff --git a/meta/classes/cross-canadian.bbclass b/meta/classes/cross-canadian.bbclass index 6f5bcd0ad4..499a1fb171 100644 --- a/meta/classes/cross-canadian.bbclass +++ b/meta/classes/cross-canadian.bbclass @@ -20,7 +20,7 @@ python () { sdkarchs = [] for arch in archs: sdkarchs.append(arch + '-nativesdk') - bb.data.setVar('PACKAGE_ARCHS', " ".join(sdkarchs), d) + d.setVar('PACKAGE_ARCHS', " ".join(sdkarchs)) } MULTIMACH_TARGET_SYS = "${PACKAGE_ARCH}${HOST_VENDOR}-${HOST_OS}" diff --git a/meta/classes/distrodata.bbclass b/meta/classes/distrodata.bbclass index 687247a649..0c0b549fef 100644 --- a/meta/classes/distrodata.bbclass +++ b/meta/classes/distrodata.bbclass @@ -31,31 +31,31 @@ python do_distrodata_np() { if pn.find("-native") != -1: pnstripped = pn.split("-native") bb.note("Native Split: %s" % pnstripped) - bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) + localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) bb.data.update_data(localdata) if pn.find("-nativesdk") != -1: pnstripped = pn.split("-nativesdk") bb.note("Native Split: %s" % pnstripped) - bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) + localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) bb.data.update_data(localdata) if pn.find("-cross") != -1: pnstripped = pn.split("-cross") bb.note("cross Split: %s" % pnstripped) - bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) + localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) bb.data.update_data(localdata) if pn.find("-crosssdk") != -1: pnstripped = pn.split("-crosssdk") bb.note("cross Split: %s" % pnstripped) - bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) + localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) bb.data.update_data(localdata) if pn.find("-initial") != -1: pnstripped = pn.split("-initial") bb.note("initial Split: %s" % pnstripped) - bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) + localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) bb.data.update_data(localdata) """generate package information from .bb file""" @@ -130,19 +130,19 @@ python do_distrodata() { if pn.find("-native") != -1: pnstripped = pn.split("-native") bb.note("Native Split: %s" % pnstripped) - bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) + localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) bb.data.update_data(localdata) if pn.find("-cross") != -1: pnstripped = pn.split("-cross") bb.note("cross Split: %s" % pnstripped) - bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) + localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) bb.data.update_data(localdata) if pn.find("-initial") != -1: pnstripped = pn.split("-initial") bb.note("initial Split: %s" % pnstripped) - bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) + localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) bb.data.update_data(localdata) """generate package information from .bb file""" @@ -308,8 +308,8 @@ python do_checkpkg() { which is designed for check purpose but we override check command for our own purpose """ ld = bb.data.createCopy(d) - bb.data.setVar('CHECKCOMMAND_wget', "/usr/bin/env wget -t 1 --passive-ftp -O %s --user-agent=\"Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.12) Gecko/20101027 Ubuntu/9.10 (karmic) Firefox/3.6.12\" '${URI}'" \ - % tmpf.name, d) + d.setVar('CHECKCOMMAND_wget', "/usr/bin/env wget -t 1 --passive-ftp -O %s --user-agent=\"Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.12) Gecko/20101027 Ubuntu/9.10 (karmic) Firefox/3.6.12\" '${URI}'" \ + % tmpf.name) bb.data.update_data(ld) try: @@ -452,19 +452,19 @@ python do_checkpkg() { if pname.find("-native") != -1: pnstripped = pname.split("-native") bb.note("Native Split: %s" % pnstripped) - bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) + localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) bb.data.update_data(localdata) if pname.find("-cross") != -1: pnstripped = pname.split("-cross") bb.note("cross Split: %s" % pnstripped) - bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) + localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) bb.data.update_data(localdata) if pname.find("-initial") != -1: pnstripped = pname.split("-initial") bb.note("initial Split: %s" % pnstripped) - bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) + localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) bb.data.update_data(localdata) pdesc = localdata.getVar('DESCRIPTION', True) diff --git a/meta/classes/gconf.bbclass b/meta/classes/gconf.bbclass index f164547782..c1dbbe30dc 100644 --- a/meta/classes/gconf.bbclass +++ b/meta/classes/gconf.bbclass @@ -45,7 +45,7 @@ python populate_packages_append () { schemas.append(f) if schemas != []: bb.note("adding gconf postinst and prerm scripts to %s" % pkg) - bb.data.setVar('SCHEMA_FILES', " ".join(schemas), d) + d.setVar('SCHEMA_FILES', " ".join(schemas)) postinst = d.getVar('pkg_postinst_%s' % pkg, 1) or d.getVar('pkg_postinst', 1) if not postinst: postinst = '#!/bin/sh\n' diff --git a/meta/classes/image-swab.bbclass b/meta/classes/image-swab.bbclass index 23183b3ec3..5aace0f5c8 100644 --- a/meta/classes/image-swab.bbclass +++ b/meta/classes/image-swab.bbclass @@ -53,7 +53,7 @@ python() { if not bb.data.inherits_class('native', d) and not bb.data.inherits_class('nativesdk', d) and not bb.data.inherits_class('cross', d): deps = (d.getVarFlag('do_setscene', 'depends') or "").split() deps.append('strace-native:do_populate_sysroot') - bb.data.setVarFlag('do_setscene', 'depends', " ".join(deps), d) + d.setVarFlag('do_setscene', 'depends', " ".join(deps)) logdir = bb.data.expand("${TRACE_LOGDIR}", d) bb.utils.mkdirhier(logdir) else: diff --git a/meta/classes/kernel.bbclass b/meta/classes/kernel.bbclass index 12e0b83b94..d0cc279a66 100644 --- a/meta/classes/kernel.bbclass +++ b/meta/classes/kernel.bbclass @@ -437,7 +437,7 @@ python populate_packages_prepend () { else: rdepends = [] rdepends.extend(get_dependencies(file, pattern, format)) - bb.data.setVar('RDEPENDS_' + pkg, ' '.join(rdepends), d) + d.setVar('RDEPENDS_' + pkg, ' '.join(rdepends)) module_deps = parse_depmod() module_regex = '^(.*)\.k?o$' @@ -464,10 +464,10 @@ python populate_packages_prepend () { for pkg in packages[1:]: if not pkg in blacklist and not pkg in metapkg_rdepends: metapkg_rdepends.append(pkg) - bb.data.setVar('RDEPENDS_' + metapkg, ' '.join(metapkg_rdepends), d) + d.setVar('RDEPENDS_' + metapkg, ' '.join(metapkg_rdepends)) d.setVar('DESCRIPTION_' + metapkg, 'Kernel modules meta package') packages.append(metapkg) - bb.data.setVar('PACKAGES', ' '.join(packages), d) + d.setVar('PACKAGES', ' '.join(packages)) } # Support checking the kernel size since some kernels need to reside in partitions diff --git a/meta/classes/libc-package.bbclass b/meta/classes/libc-package.bbclass index fc1a5794d7..bbe06fd54b 100644 --- a/meta/classes/libc-package.bbclass +++ b/meta/classes/libc-package.bbclass @@ -10,7 +10,7 @@ GLIBC_INTERNAL_USE_BINARY_LOCALE ?= "ondevice" python __anonymous () { - enabled = d.getVar("ENABLE_BINARY_LOCALE_GENERATION", 1) + enabled = d.getVar("ENABLE_BINARY_LOCALE_GENERATION", True) pn = d.getVar("PN", True) if pn.endswith("-initial"): @@ -19,15 +19,15 @@ python __anonymous () { if enabled and int(enabled): import re - target_arch = d.getVar("TARGET_ARCH", 1) - binary_arches = d.getVar("BINARY_LOCALE_ARCHES", 1) or "" - use_cross_localedef = d.getVar("LOCALE_GENERATION_WITH_CROSS-LOCALEDEF", 1) or "" + target_arch = d.getVar("TARGET_ARCH", True) + binary_arches = d.getVar("BINARY_LOCALE_ARCHES", True) or "" + use_cross_localedef = d.getVar("LOCALE_GENERATION_WITH_CROSS-LOCALEDEF", True) or "" for regexp in binary_arches.split(" "): r = re.compile(regexp) if r.match(target_arch): - depends = d.getVar("DEPENDS", 1) + depends = d.getVar("DEPENDS", True) if use_cross_localedef == "1" : depends = "%s cross-localedef-native" % depends else: @@ -109,19 +109,19 @@ inherit qemu python package_do_split_gconvs () { import os, re - if (d.getVar('PACKAGE_NO_GCONV', 1) == '1'): + if (d.getVar('PACKAGE_NO_GCONV', True) == '1'): bb.note("package requested not splitting gconvs") return - if not d.getVar('PACKAGES', 1): + if not d.getVar('PACKAGES', True): return - bpn = d.getVar('BPN', 1) - libdir = d.getVar('libdir', 1) + bpn = d.getVar('BPN', True) + libdir = d.getVar('libdir', True) if not libdir: bb.error("libdir not defined") return - datadir = d.getVar('datadir', 1) + datadir = d.getVar('datadir', True) if not datadir: bb.error("datadir not defined") return @@ -144,9 +144,9 @@ python package_do_split_gconvs () { deps.append(dp) f.close() if deps != []: - bb.data.setVar('RDEPENDS_%s' % pkg, " ".join(deps), d) + d.setVar('RDEPENDS_%s' % pkg, " ".join(deps)) if bpn != 'glibc': - bb.data.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc'), d) + d.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc')) do_split_packages(d, gconv_libdir, file_regex='^(.*)\.so$', output_pattern=bpn+'-gconv-%s', \ description='gconv module for character set %s', hook=calc_gconv_deps, \ @@ -165,9 +165,9 @@ python package_do_split_gconvs () { deps.append(dp) f.close() if deps != []: - bb.data.setVar('RDEPENDS_%s' % pkg, " ".join(deps), d) + d.setVar('RDEPENDS_%s' % pkg, " ".join(deps)) if bpn != 'glibc': - bb.data.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc'), d) + d.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc')) do_split_packages(d, charmap_dir, file_regex='^(.*)\.gz$', output_pattern=bpn+'-charmap-%s', \ description='character map for %s encoding', hook=calc_charmap_deps, extra_depends='') @@ -185,23 +185,23 @@ python package_do_split_gconvs () { deps.append(dp) f.close() if deps != []: - bb.data.setVar('RDEPENDS_%s' % pkg, " ".join(deps), d) + d.setVar('RDEPENDS_%s' % pkg, " ".join(deps)) if bpn != 'glibc': - bb.data.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc'), d) + d.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc')) do_split_packages(d, locales_dir, file_regex='(.*)', output_pattern=bpn+'-localedata-%s', \ description='locale definition for %s', hook=calc_locale_deps, extra_depends='') - bb.data.setVar('PACKAGES', d.getVar('PACKAGES') + ' ' + d.getVar('MLPREFIX') + bpn + '-gconv', d) + d.setVar('PACKAGES', d.getVar('PACKAGES') + ' ' + d.getVar('MLPREFIX') + bpn + '-gconv') - use_bin = d.getVar("GLIBC_INTERNAL_USE_BINARY_LOCALE", 1) + use_bin = d.getVar("GLIBC_INTERNAL_USE_BINARY_LOCALE", True) dot_re = re.compile("(.*)\.(.*)") #GLIBC_GENERATE_LOCALES var specifies which locales to be supported, empty or "all" means all locales if use_bin != "precompiled": - supported = d.getVar('GLIBC_GENERATE_LOCALES', 1) + supported = d.getVar('GLIBC_GENERATE_LOCALES', True) if not supported or supported == "all": - f = open(base_path_join(d.getVar('WORKDIR', 1), "SUPPORTED"), "r") + f = open(base_path_join(d.getVar('WORKDIR', True), "SUPPORTED"), "r") supported = f.readlines() f.close() else: @@ -218,7 +218,7 @@ python package_do_split_gconvs () { supported.append(dbase[0] + d2) # Collate the locales by base and encoding - utf8_only = int(d.getVar('LOCALE_UTF8_ONLY', 1) or 0) + utf8_only = int(d.getVar('LOCALE_UTF8_ONLY', True) or 0) encodings = {} for l in supported: l = l[:-1] @@ -233,12 +233,12 @@ python package_do_split_gconvs () { encodings[locale].append(charset) def output_locale_source(name, pkgname, locale, encoding): - bb.data.setVar('RDEPENDS_%s' % pkgname, 'localedef %s-localedata-%s %s-charmap-%s' % \ - (bpn, legitimize_package_name(locale), bpn, legitimize_package_name(encoding)), d) - bb.data.setVar('pkg_postinst_%s' % pkgname, d.getVar('locale_base_postinst', 1) \ - % (locale, encoding, locale), d) - bb.data.setVar('pkg_postrm_%s' % pkgname, d.getVar('locale_base_postrm', 1) % \ - (locale, encoding, locale), d) + setVar('RDEPENDS_%s' % pkgname, 'localedef %s-localedata-%s %s-charmap-%s' % \ + (bpn, legitimize_package_name(locale), bpn, legitimize_package_name(encoding))) + d.setVar('pkg_postinst_%s' % pkgname, d.getVar('locale_base_postinst', True) \ + % (locale, encoding, locale)) + d.setVar('pkg_postrm_%s' % pkgname, d.getVar('locale_base_postrm', True) % \ + (locale, encoding, locale)) def output_locale_binary_rdepends(name, pkgname, locale, encoding): m = re.match("(.*)\.(.*)", name) @@ -246,23 +246,23 @@ python package_do_split_gconvs () { libc_name = "%s.%s" % (m.group(1), m.group(2).lower().replace("-","")) else: libc_name = name - bb.data.setVar('RDEPENDS_%s' % pkgname, legitimize_package_name('%s-binary-localedata-%s' \ - % (bpn, libc_name)), d) + d.setVar('RDEPENDS_%s' % pkgname, legitimize_package_name('%s-binary-localedata-%s' \ + % (bpn, libc_name))) rprovides = (d.getVar('RPROVIDES_%s' % pkgname, True) or "").split() rprovides.append(legitimize_package_name('%s-binary-localedata-%s' % (bpn, libc_name))) - bb.data.setVar('RPROVIDES_%s' % pkgname, " ".join(rprovides), d) + d.setVar('RPROVIDES_%s' % pkgname, " ".join(rprovides)) commands = {} def output_locale_binary(name, pkgname, locale, encoding): - treedir = base_path_join(d.getVar("WORKDIR", 1), "locale-tree") - ldlibdir = base_path_join(treedir, d.getVar("base_libdir", 1)) - path = d.getVar("PATH", 1) + treedir = base_path_join(d.getVar("WORKDIR", True), "locale-tree") + ldlibdir = base_path_join(treedir, d.getVar("base_libdir", True)) + path = d.getVar("PATH", True) i18npath = base_path_join(treedir, datadir, "i18n") gconvpath = base_path_join(treedir, "iconvdata") outputpath = base_path_join(treedir, libdir, "locale") - use_cross_localedef = d.getVar("LOCALE_GENERATION_WITH_CROSS-LOCALEDEF", 1) or "0" + use_cross_localedef = d.getVar("LOCALE_GENERATION_WITH_CROSS-LOCALEDEF", True) or "0" if use_cross_localedef == "1": target_arch = d.getVar('TARGET_ARCH', True) locale_arch_options = { \ @@ -292,9 +292,9 @@ python package_do_split_gconvs () { --inputfile=%s/i18n/locales/%s --charmap=%s %s" \ % (treedir, datadir, locale, encoding, name) - qemu_options = bb.data.getVar("QEMU_OPTIONS_%s" % d.getVar('PACKAGE_ARCH', 1), d, 1) + qemu_options = d.getVar("QEMU_OPTIONS_%s" % d.getVar('PACKAGE_ARCH', True), True) if not qemu_options: - qemu_options = d.getVar('QEMU_OPTIONS', 1) + qemu_options = d.getVar('QEMU_OPTIONS', True) cmd = "PSEUDO_RELOADED=YES PATH=\"%s\" I18NPATH=\"%s\" %s -L %s \ -E LD_LIBRARY_PATH=%s %s %s/bin/localedef %s" % \ @@ -307,7 +307,7 @@ python package_do_split_gconvs () { def output_locale(name, locale, encoding): pkgname = d.getVar('MLPREFIX') + 'locale-base-' + legitimize_package_name(name) d.setVar('ALLOW_EMPTY_%s' % pkgname, '1') - bb.data.setVar('PACKAGES', '%s %s' % (pkgname, d.getVar('PACKAGES', 1)), d) + d.setVar('PACKAGES', '%s %s' % (pkgname, d.getVar('PACKAGES', True))) rprovides = ' virtual-locale-%s' % legitimize_package_name(name) m = re.match("(.*)_(.*)", name) if m: @@ -347,7 +347,7 @@ python package_do_split_gconvs () { bb.note(" " + " ".join(non_utf8)) if use_bin == "compile": - makefile = base_path_join(d.getVar("WORKDIR", 1), "locale-tree", "Makefile") + makefile = base_path_join(d.getVar("WORKDIR", True), "locale-tree", "Makefile") m = open(makefile, "w") m.write("all: %s\n\n" % " ".join(commands.keys())) for cmd in commands: diff --git a/meta/classes/native.bbclass b/meta/classes/native.bbclass index 5f25bdd2c2..8f7cc1f2d3 100644 --- a/meta/classes/native.bbclass +++ b/meta/classes/native.bbclass @@ -121,7 +121,7 @@ python native_virtclass_handler () { newdeps.append(dep + "-native") else: newdeps.append(dep) - bb.data.setVar(varname, " ".join(newdeps), d) + d.setVar(varname, " ".join(newdeps)) map_dependencies("DEPENDS", e.data) for pkg in (e.data.getVar("PACKAGES", True).split() + [""]): @@ -139,7 +139,7 @@ python native_virtclass_handler () { provides = provides.replace(prov, prov + "-native") e.data.setVar("PROVIDES", provides) - bb.data.setVar("OVERRIDES", e.data.getVar("OVERRIDES", False) + ":virtclass-native", e.data) + e.data.setVar("OVERRIDES", e.data.getVar("OVERRIDES", False) + ":virtclass-native") } addhandler native_virtclass_handler diff --git a/meta/classes/nativesdk.bbclass b/meta/classes/nativesdk.bbclass index ca24efaa7c..e6204c02ba 100644 --- a/meta/classes/nativesdk.bbclass +++ b/meta/classes/nativesdk.bbclass @@ -15,7 +15,7 @@ python () { sdkarchs = [] for arch in archs: sdkarchs.append(arch + '-nativesdk') - bb.data.setVar('PACKAGE_ARCHS', " ".join(sdkarchs), d) + d.setVar('PACKAGE_ARCHS', " ".join(sdkarchs)) } STAGING_DIR_HOST = "${STAGING_DIR}/${MULTIMACH_HOST_SYS}" @@ -66,7 +66,7 @@ python nativesdk_virtclass_handler () { if not pn.endswith("-nativesdk"): return - bb.data.setVar("OVERRIDES", e.data.getVar("OVERRIDES", False) + ":virtclass-nativesdk", e.data) + e.data.setVar("OVERRIDES", e.data.getVar("OVERRIDES", False) + ":virtclass-nativesdk") } python () { @@ -91,7 +91,7 @@ python () { newdeps.append(dep.replace("-nativesdk", "") + "-nativesdk") else: newdeps.append(dep) - bb.data.setVar(varname, " ".join(newdeps), d) + d.setVar(varname, " ".join(newdeps)) map_dependencies("DEPENDS", d) #for pkg in (d.getVar("PACKAGES", True).split() + [""]): diff --git a/meta/classes/package.bbclass b/meta/classes/package.bbclass index 2a78a8f54b..256cdc15de 100644 --- a/meta/classes/package.bbclass +++ b/meta/classes/package.bbclass @@ -151,7 +151,7 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst the_files.append(fp % m.group(1)) else: the_files.append(aux_files_pattern_verbatim % m.group(1)) - bb.data.setVar('FILES_' + pkg, " ".join(the_files), d) + d.setVar('FILES_' + pkg, " ".join(the_files)) if extra_depends != '': the_depends = d.getVar('RDEPENDS_' + pkg, True) if the_depends: @@ -165,11 +165,11 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst if postrm: d.setVar('pkg_postrm_' + pkg, postrm) else: - bb.data.setVar('FILES_' + pkg, oldfiles + " " + os.path.join(root, o), d) + d.setVar('FILES_' + pkg, oldfiles + " " + os.path.join(root, o)) if callable(hook): hook(f, pkg, file_regex, output_pattern, m.group(1)) - bb.data.setVar('PACKAGES', ' '.join(packages), d) + d.setVar('PACKAGES', ' '.join(packages)) PACKAGE_DEPENDS += "file-native" @@ -183,7 +183,7 @@ python () { deps = (d.getVarFlag('do_package', 'deptask') or "").split() # shlibs requires any DEPENDS to have already packaged for the *.list files deps.append("do_package") - bb.data.setVarFlag('do_package', 'deptask', " ".join(deps), d) + d.setVarFlag('do_package', 'deptask', " ".join(deps)) elif not bb.data.inherits_class('image', d): d.setVar("PACKAGERDEPTASK", "") } @@ -202,7 +202,7 @@ def splitfile(file, debugfile, debugsrcdir, d): pathprefix = "export PATH=%s; " % d.getVar('PATH', True) objcopy = d.getVar("OBJCOPY", True) debugedit = bb.data.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit", d) - workdir = bb.data.expand("${WORKDIR}", d) + workdir = d.getVar("WORKDIR", True) workparentdir = os.path.dirname(workdir) sourcefile = bb.data.expand("${WORKDIR}/debugsources.list", d) @@ -245,7 +245,7 @@ def splitfile2(debugsrcdir, d): strip = d.getVar("STRIP", True) objcopy = d.getVar("OBJCOPY", True) debugedit = bb.data.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit", d) - workdir = bb.data.expand("${WORKDIR}", d) + workdir = d.getVar("WORKDIR", True) workparentdir = os.path.dirname(workdir) workbasedir = os.path.basename(workdir) sourcefile = bb.data.expand("${WORKDIR}/debugsources.list", d) @@ -341,7 +341,7 @@ def runtime_mapping_rename (varname, d): else: new_depends.append(new_depend) - bb.data.setVar(varname, " ".join(new_depends) or None, d) + d.setVar(varname, " ".join(new_depends) or None) #bb.note("%s after: %s" % (varname, d.getVar(varname, True))) @@ -399,15 +399,15 @@ python package_do_split_locales() { ln = legitimize_package_name(l) pkg = pn + '-locale-' + ln packages.append(pkg) - bb.data.setVar('FILES_' + pkg, os.path.join(datadir, 'locale', l), d) - bb.data.setVar('RDEPENDS_' + pkg, '%s virtual-locale-%s' % (mainpkg, ln), d) - bb.data.setVar('RPROVIDES_' + pkg, '%s-locale %s-translation' % (pn, ln), d) - bb.data.setVar('SUMMARY_' + pkg, '%s - %s translations' % (summary, l), d) - bb.data.setVar('DESCRIPTION_' + pkg, '%s This package contains language translation files for the %s locale.' % (description, l), d) + d.setVar('FILES_' + pkg, os.path.join(datadir, 'locale', l)) + d.setVar('RDEPENDS_' + pkg, '%s virtual-locale-%s' % (mainpkg, ln)) + d.setVar('RPROVIDES_' + pkg, '%s-locale %s-translation' % (pn, ln)) + d.setVar('SUMMARY_' + pkg, '%s - %s translations' % (summary, l)) + d.setVar('DESCRIPTION_' + pkg, '%s This package contains language translation files for the %s locale.' % (description, l)) if locale_section: d.setVar('SECTION_' + pkg, locale_section) - bb.data.setVar('PACKAGES', ' '.join(packages), d) + d.setVar('PACKAGES', ' '.join(packages)) # Disabled by RP 18/06/07 # Wildcards aren't supported in debian @@ -417,7 +417,7 @@ python package_do_split_locales() { # Probably breaks since virtual-locale- isn't provided anywhere #rdep = (d.getVar('RDEPENDS_%s' % mainpkg, True) or d.getVar('RDEPENDS', True) or "").split() #rdep.append('%s-locale*' % pn) - #bb.data.setVar('RDEPENDS_%s' % mainpkg, ' '.join(rdep), d) + #d.setVar('RDEPENDS_%s' % mainpkg, ' '.join(rdep)) } python perform_packagecopy () { @@ -1018,7 +1018,7 @@ python populate_packages () { break if found == False: bb.note("%s contains dangling symlink to %s" % (pkg, l)) - bb.data.setVar('RDEPENDS_' + pkg, bb.utils.join_deps(rdepends, commasep=False), d) + d.setVar('RDEPENDS_' + pkg, bb.utils.join_deps(rdepends, commasep=False)) } populate_packages[dirs] = "${D}" @@ -1033,11 +1033,11 @@ python emit_pkgdata() { c = codecs.getencoder("string_escape") return c(str)[0] - val = bb.data.getVar('%s_%s' % (var, pkg), d, True) + val = d.getVar('%s_%s' % (var, pkg), True) if val: f.write('%s_%s: %s\n' % (var, pkg, encode(val))) return - val = bb.data.getVar('%s' % (var), d, True) + val = d.getVar('%s' % (var), True) if val: f.write('%s: %s\n' % (var, encode(val))) return @@ -1159,12 +1159,12 @@ python package_do_filedeps() { if len(provides) > 0: provides_files.append(file) key = "FILERPROVIDES_" + file + "_" + pkg - bb.data.setVar(key, " ".join(provides), d) + d.setVar(key, " ".join(provides)) if len(requires) > 0: requires_files.append(file) key = "FILERDEPENDS_" + file + "_" + pkg - bb.data.setVar(key, " ".join(requires), d) + d.setVar(key, " ".join(requires)) # Determine dependencies for pkg in packages.split(): @@ -1181,8 +1181,8 @@ python package_do_filedeps() { process_deps(dep_pipe, pkg, f, provides_files, requires_files) - bb.data.setVar("FILERDEPENDSFLIST_" + pkg, " ".join(requires_files), d) - bb.data.setVar("FILERPROVIDESFLIST_" + pkg, " ".join(provides_files), d) + d.setVar("FILERDEPENDSFLIST_" + pkg, " ".join(requires_files)) + d.setVar("FILERPROVIDESFLIST_" + pkg, " ".join(provides_files)) } SHLIBSDIR = "${STAGING_DIR_HOST}/shlibs" @@ -1461,7 +1461,7 @@ python package_do_pkgconfig () { if m: name = m.group(1) val = m.group(2) - bb.data.setVar(name, bb.data.expand(val, pd), pd) + pd.setVar(name, bb.data.expand(val, pd)) continue m = field_re.match(l) if m: @@ -1519,7 +1519,7 @@ python package_do_pkgconfig () { python read_shlibdeps () { packages = d.getVar('PACKAGES', True).split() for pkg in packages: - rdepends = bb.utils.explode_dep_versions(d.getVar('RDEPENDS_' + pkg, 0) or d.getVar('RDEPENDS', 0) or "") + rdepends = bb.utils.explode_dep_versions(d.getVar('RDEPENDS_' + pkg, False) or d.getVar('RDEPENDS', False) or "") for extension in ".shlibdeps", ".pcdeps", ".clilibdeps": depsfile = bb.data.expand("${PKGDEST}/" + pkg + extension, d) @@ -1529,7 +1529,7 @@ python read_shlibdeps () { fd.close() for l in lines: rdepends[l.rstrip()] = "" - bb.data.setVar('RDEPENDS_' + pkg, bb.utils.join_deps(rdepends, commasep=False), d) + d.setVar('RDEPENDS_' + pkg, bb.utils.join_deps(rdepends, commasep=False)) } python package_depchains() { @@ -1569,7 +1569,7 @@ python package_depchains() { rreclist[pkgname] = "" #bb.note('setting: RRECOMMENDS_%s=%s' % (pkg, ' '.join(rreclist))) - bb.data.setVar('RRECOMMENDS_%s' % pkg, bb.utils.join_deps(rreclist, commasep=False), d) + d.setVar('RRECOMMENDS_%s' % pkg, bb.utils.join_deps(rreclist, commasep=False)) def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d): @@ -1590,7 +1590,7 @@ python package_depchains() { rreclist[pkgname] = "" #bb.note('setting: RRECOMMENDS_%s=%s' % (pkg, ' '.join(rreclist))) - bb.data.setVar('RRECOMMENDS_%s' % pkg, bb.utils.join_deps(rreclist, commasep=False), d) + d.setVar('RRECOMMENDS_%s' % pkg, bb.utils.join_deps(rreclist, commasep=False)) def add_dep(list, dep): dep = dep.split(' (')[0].strip() diff --git a/meta/classes/package_deb.bbclass b/meta/classes/package_deb.bbclass index 71e46a8c8e..fc28ee1e2d 100644 --- a/meta/classes/package_deb.bbclass +++ b/meta/classes/package_deb.bbclass @@ -11,7 +11,7 @@ DPKG_ARCH ?= "${TARGET_ARCH}" PKGWRITEDIRDEB = "${WORKDIR}/deploy-debs" python package_deb_fn () { - bb.data.setVar('PKGFN', d.getVar('PKG'), d) + d.setVar('PKGFN', d.getVar('PKG')) } addtask package_deb_install @@ -409,7 +409,7 @@ python () { deps = (d.getVarFlag('do_package_write_deb', 'depends') or "").split() deps.append('dpkg-native:do_populate_sysroot') deps.append('virtual/fakeroot-native:do_populate_sysroot') - bb.data.setVarFlag('do_package_write_deb', 'depends', " ".join(deps), d) + d.setVarFlag('do_package_write_deb', 'depends', " ".join(deps)) d.setVarFlag('do_package_write_deb', 'fakeroot', "1") d.setVarFlag('do_package_write_deb_setscene', 'fakeroot', "1") diff --git a/meta/classes/package_ipk.bbclass b/meta/classes/package_ipk.bbclass index df608fc0e3..1633affb08 100644 --- a/meta/classes/package_ipk.bbclass +++ b/meta/classes/package_ipk.bbclass @@ -11,7 +11,7 @@ PKGWRITEDIRIPK = "${WORKDIR}/deploy-ipks" OPKGBUILDCMD ??= "opkg-build" python package_ipk_fn () { - bb.data.setVar('PKGFN', d.getVar('PKG'), d) + d.setVar('PKGFN', d.getVar('PKG')) } python package_ipk_install () { @@ -441,7 +441,7 @@ python () { deps = (d.getVarFlag('do_package_write_ipk', 'depends') or "").split() deps.append('opkg-utils-native:do_populate_sysroot') deps.append('virtual/fakeroot-native:do_populate_sysroot') - bb.data.setVarFlag('do_package_write_ipk', 'depends', " ".join(deps), d) + d.setVarFlag('do_package_write_ipk', 'depends', " ".join(deps)) d.setVarFlag('do_package_write_ipk', 'fakeroot', "1") d.setVarFlag('do_package_write_ipk_setscene', 'fakeroot', "1") } diff --git a/meta/classes/package_rpm.bbclass b/meta/classes/package_rpm.bbclass index 75e4f2d196..93a4c3123c 100644 --- a/meta/classes/package_rpm.bbclass +++ b/meta/classes/package_rpm.bbclass @@ -8,7 +8,7 @@ RPMBUILD="rpmbuild" PKGWRITEDIRRPM = "${WORKDIR}/deploy-rpms" python package_rpm_fn () { - bb.data.setVar('PKGFN', d.getVar('PKG'), d) + d.setVar('PKGFN', d.getVar('PKG')) } python package_rpm_install () { @@ -467,7 +467,7 @@ python write_specfile () { ver = ver.replace(pv, reppv) newdeps_dict[dep] = ver depends = bb.utils.join_deps(newdeps_dict) - bb.data.setVar(varname, depends.strip(), d) + d.setVar(varname, depends.strip()) # We need to change the style the dependency from BB to RPM # This needs to happen AFTER the mapping_rename_hook @@ -969,7 +969,7 @@ python () { deps = (d.getVarFlag('do_package_write_rpm', 'depends') or "").split() deps.append('rpm-native:do_populate_sysroot') deps.append('virtual/fakeroot-native:do_populate_sysroot') - bb.data.setVarFlag('do_package_write_rpm', 'depends', " ".join(deps), d) + d.setVarFlag('do_package_write_rpm', 'depends', " ".join(deps)) d.setVarFlag('do_package_write_rpm', 'fakeroot', 1) d.setVarFlag('do_package_write_rpm_setscene', 'fakeroot', 1) } diff --git a/meta/classes/package_tar.bbclass b/meta/classes/package_tar.bbclass index f26a2c0008..201bd91657 100644 --- a/meta/classes/package_tar.bbclass +++ b/meta/classes/package_tar.bbclass @@ -69,7 +69,7 @@ python do_package_tar () { if not overrides: raise bb.build.FuncFailed('OVERRIDES not defined') overrides = bb.data.expand(overrides, localdata) - bb.data.setVar('OVERRIDES', '%s:%s' % (overrides, pkg), localdata) + localdata.setVar('OVERRIDES', '%s:%s' % (overrides, pkg)) bb.data.update_data(localdata) @@ -95,7 +95,7 @@ python () { deps = (d.getVarFlag('do_package_write_tar', 'depends') or "").split() deps.append('tar-native:do_populate_sysroot') deps.append('virtual/fakeroot-native:do_populate_sysroot') - bb.data.setVarFlag('do_package_write_tar', 'depends', " ".join(deps), d) + d.setVarFlag('do_package_write_tar', 'depends', " ".join(deps)) d.setVarFlag('do_package_write_ipk', 'fakeroot', "1") } diff --git a/meta/classes/src_distribute.bbclass b/meta/classes/src_distribute.bbclass index 2069d652a3..651e492598 100644 --- a/meta/classes/src_distribute.bbclass +++ b/meta/classes/src_distribute.bbclass @@ -29,13 +29,13 @@ python do_distribute_sources () { if url.basename == '*': import os.path dest_dir = os.path.basename(os.path.dirname(os.path.abspath(url.localpath))) - bb.data.setVar('DEST', "%s_%s/" % (d.getVar('PF', 1), dest_dir), d) + d.setVar('DEST', "%s_%s/" % (d.getVar('PF', 1), dest_dir)) else: - bb.data.setVar('DEST', "%s_%s" % (d.getVar('PF', 1), url.basename), d) + d.setVar('DEST', "%s_%s" % (d.getVar('PF', 1), url.basename)) else: d.setVar('DEST', '') - bb.data.setVar('SRC_DISTRIBUTEDIR', "%s/%s" % (sources_dir, license), d) + d.setVar('SRC_DISTRIBUTEDIR', "%s/%s" % (sources_dir, license)) bb.build.exec_func('SRC_DISTRIBUTECOMMAND', d) } diff --git a/meta/classes/sstate.bbclass b/meta/classes/sstate.bbclass index 951caa360f..504b09975d 100644 --- a/meta/classes/sstate.bbclass +++ b/meta/classes/sstate.bbclass @@ -20,18 +20,18 @@ SSTATEPOSTINSTFUNCS ?= "" python () { if bb.data.inherits_class('native', d): - bb.data.setVar('SSTATE_PKGARCH', d.getVar('BUILD_ARCH'), d) + d.setVar('SSTATE_PKGARCH', d.getVar('BUILD_ARCH')) elif bb.data.inherits_class('cross', d): - bb.data.setVar('SSTATE_PKGARCH', bb.data.expand("${BUILD_ARCH}_${TUNE_PKGARCH}", d), d) - bb.data.setVar('SSTATE_MANMACH', bb.data.expand("${BUILD_ARCH}_${MACHINE}", d), d) + d.setVar('SSTATE_PKGARCH', bb.data.expand("${BUILD_ARCH}_${TUNE_PKGARCH}", d)) + d.setVar('SSTATE_MANMACH', bb.data.expand("${BUILD_ARCH}_${MACHINE}", d)) elif bb.data.inherits_class('crosssdk', d): - bb.data.setVar('SSTATE_PKGARCH', bb.data.expand("${BUILD_ARCH}_${PACKAGE_ARCH}", d), d) + d.setVar('SSTATE_PKGARCH', bb.data.expand("${BUILD_ARCH}_${PACKAGE_ARCH}", d)) elif bb.data.inherits_class('nativesdk', d): - bb.data.setVar('SSTATE_PKGARCH', bb.data.expand("${SDK_ARCH}", d), d) + d.setVar('SSTATE_PKGARCH', bb.data.expand("${SDK_ARCH}", d)) elif bb.data.inherits_class('cross-canadian', d): - bb.data.setVar('SSTATE_PKGARCH', bb.data.expand("${SDK_ARCH}_${PACKAGE_ARCH}", d), d) + d.setVar('SSTATE_PKGARCH', bb.data.expand("${SDK_ARCH}_${PACKAGE_ARCH}", d)) else: - bb.data.setVar('SSTATE_MANMACH', bb.data.expand("${MACHINE}", d), d) + d.setVar('SSTATE_MANMACH', bb.data.expand("${MACHINE}", d)) # These classes encode staging paths into their scripts data so can only be # reused if we manipulate the paths diff --git a/meta/classes/task.bbclass b/meta/classes/task.bbclass index 516d1a1c20..7891207a64 100644 --- a/meta/classes/task.bbclass +++ b/meta/classes/task.bbclass @@ -22,6 +22,6 @@ python () { for pkg in packages: for postfix in ['-dbg', '-dev']: genpackages.append(pkg+postfix) - bb.data.setVar('PACKAGES', ' '.join(packages+genpackages), d) + d.setVar('PACKAGES', ' '.join(packages+genpackages)) } diff --git a/meta/classes/update-rc.d.bbclass b/meta/classes/update-rc.d.bbclass index 492c5fba2d..cba44d688f 100644 --- a/meta/classes/update-rc.d.bbclass +++ b/meta/classes/update-rc.d.bbclass @@ -45,7 +45,7 @@ python populate_packages_prepend () { bb.debug(1, 'adding update-rc.d calls to postinst/postrm for %s' % pkg) localdata = bb.data.createCopy(d) overrides = localdata.getVar("OVERRIDES", 1) - bb.data.setVar("OVERRIDES", "%s:%s" % (pkg, overrides), localdata) + localdata.setVar("OVERRIDES", "%s:%s" % (pkg, overrides)) bb.data.update_data(localdata) """ |