diff options
author | Joshua Lock <joshua.g.lock@intel.com> | 2016-12-14 21:13:04 +0000 |
---|---|---|
committer | Richard Purdie <richard.purdie@linuxfoundation.org> | 2016-12-16 08:30:03 +0000 |
commit | 7c552996597faaee2fbee185b250c0ee30ea3b5f (patch) | |
tree | bb74186da3e2d4b03c33875a71fbe340ba09a0d7 /meta/lib/oe | |
parent | 84ec50e587e7464b260b1b189659b93b6dab0ef6 (diff) | |
download | openembedded-core-7c552996597faaee2fbee185b250c0ee30ea3b5f.tar.gz openembedded-core-7c552996597faaee2fbee185b250c0ee30ea3b5f.tar.bz2 openembedded-core-7c552996597faaee2fbee185b250c0ee30ea3b5f.zip |
meta: remove True option to getVar calls
getVar() now defaults to expanding by default, thus remove the True
option from getVar() calls with a regex search and replace.
Search made with the following regex: getVar ?\(( ?[^,()]*), True\)
Signed-off-by: Joshua Lock <joshua.g.lock@intel.com>
Signed-off-by: Ross Burton <ross.burton@intel.com>
Diffstat (limited to 'meta/lib/oe')
-rw-r--r-- | meta/lib/oe/classextend.py | 12 | ||||
-rw-r--r-- | meta/lib/oe/copy_buildsystem.py | 12 | ||||
-rw-r--r-- | meta/lib/oe/data.py | 2 | ||||
-rw-r--r-- | meta/lib/oe/distro_check.py | 20 | ||||
-rw-r--r-- | meta/lib/oe/gpg_sign.py | 4 | ||||
-rw-r--r-- | meta/lib/oe/manifest.py | 26 | ||||
-rw-r--r-- | meta/lib/oe/package.py | 2 | ||||
-rw-r--r-- | meta/lib/oe/package_manager.py | 174 | ||||
-rw-r--r-- | meta/lib/oe/packagedata.py | 2 | ||||
-rw-r--r-- | meta/lib/oe/packagegroup.py | 6 | ||||
-rw-r--r-- | meta/lib/oe/patch.py | 30 | ||||
-rw-r--r-- | meta/lib/oe/path.py | 2 | ||||
-rw-r--r-- | meta/lib/oe/prservice.py | 26 | ||||
-rw-r--r-- | meta/lib/oe/qa.py | 4 | ||||
-rw-r--r-- | meta/lib/oe/recipeutils.py | 38 | ||||
-rw-r--r-- | meta/lib/oe/rootfs.py | 114 | ||||
-rw-r--r-- | meta/lib/oe/sdk.py | 76 | ||||
-rw-r--r-- | meta/lib/oe/sstatesig.py | 30 | ||||
-rw-r--r-- | meta/lib/oe/terminal.py | 2 | ||||
-rw-r--r-- | meta/lib/oe/utils.py | 30 |
20 files changed, 306 insertions, 306 deletions
diff --git a/meta/lib/oe/classextend.py b/meta/lib/oe/classextend.py index 4c8a00070c..d2eeaf0e5c 100644 --- a/meta/lib/oe/classextend.py +++ b/meta/lib/oe/classextend.py @@ -25,7 +25,7 @@ class ClassExtender(object): return name def map_variable(self, varname, setvar = True): - var = self.d.getVar(varname, True) + var = self.d.getVar(varname) if not var: return "" var = var.split() @@ -38,7 +38,7 @@ class ClassExtender(object): return newdata def map_regexp_variable(self, varname, setvar = True): - var = self.d.getVar(varname, True) + var = self.d.getVar(varname) if not var: return "" var = var.split() @@ -60,7 +60,7 @@ class ClassExtender(object): return dep else: # Do not extend for that already have multilib prefix - var = self.d.getVar("MULTILIB_VARIANTS", True) + var = self.d.getVar("MULTILIB_VARIANTS") if var: var = var.split() for v in var: @@ -74,7 +74,7 @@ class ClassExtender(object): varname = varname + "_" + suffix orig = self.d.getVar("EXTENDPKGV", False) self.d.setVar("EXTENDPKGV", "EXTENDPKGV") - deps = self.d.getVar(varname, True) + deps = self.d.getVar(varname) if not deps: self.d.setVar("EXTENDPKGV", orig) return @@ -87,7 +87,7 @@ class ClassExtender(object): self.d.setVar("EXTENDPKGV", orig) def map_packagevars(self): - for pkg in (self.d.getVar("PACKAGES", True).split() + [""]): + for pkg in (self.d.getVar("PACKAGES").split() + [""]): self.map_depends_variable("RDEPENDS", pkg) self.map_depends_variable("RRECOMMENDS", pkg) self.map_depends_variable("RSUGGESTS", pkg) @@ -97,7 +97,7 @@ class ClassExtender(object): self.map_depends_variable("PKG", pkg) def rename_packages(self): - for pkg in (self.d.getVar("PACKAGES", True) or "").split(): + for pkg in (self.d.getVar("PACKAGES") or "").split(): if pkg.startswith(self.extname): self.pkgs_mapping.append([pkg.split(self.extname + "-")[1], pkg]) continue diff --git a/meta/lib/oe/copy_buildsystem.py b/meta/lib/oe/copy_buildsystem.py index 29ac6d418f..a372904183 100644 --- a/meta/lib/oe/copy_buildsystem.py +++ b/meta/lib/oe/copy_buildsystem.py @@ -21,8 +21,8 @@ class BuildSystem(object): def __init__(self, context, d): self.d = d self.context = context - self.layerdirs = [os.path.abspath(pth) for pth in d.getVar('BBLAYERS', True).split()] - self.layers_exclude = (d.getVar('SDK_LAYERS_EXCLUDE', True) or "").split() + self.layerdirs = [os.path.abspath(pth) for pth in d.getVar('BBLAYERS').split()] + self.layers_exclude = (d.getVar('SDK_LAYERS_EXCLUDE') or "").split() def copy_bitbake_and_layers(self, destdir, workspace_name=None): # Copy in all metadata layers + bitbake (as repositories) @@ -30,7 +30,7 @@ class BuildSystem(object): bb.utils.mkdirhier(destdir) layers = list(self.layerdirs) - corebase = os.path.abspath(self.d.getVar('COREBASE', True)) + corebase = os.path.abspath(self.d.getVar('COREBASE')) layers.append(corebase) # Exclude layers @@ -46,7 +46,7 @@ class BuildSystem(object): extranum += 1 workspace_newname = '%s-%d' % (workspace_name, extranum) - corebase_files = self.d.getVar('COREBASE_FILES', True).split() + corebase_files = self.d.getVar('COREBASE_FILES').split() corebase_files = [corebase + '/' +x for x in corebase_files] # Make sure bitbake goes in bitbake_dir = bb.__file__.rsplit('/', 3)[0] @@ -100,7 +100,7 @@ class BuildSystem(object): # Drop all bbappends except the one for the image the SDK is being built for # (because of externalsrc, the workspace bbappends will interfere with the # locked signatures if present, and we don't need them anyway) - image_bbappend = os.path.splitext(os.path.basename(self.d.getVar('FILE', True)))[0] + '.bbappend' + image_bbappend = os.path.splitext(os.path.basename(self.d.getVar('FILE')))[0] + '.bbappend' appenddir = os.path.join(layerdestpath, 'appends') if os.path.isdir(appenddir): for fn in os.listdir(appenddir): @@ -208,7 +208,7 @@ def create_locked_sstate_cache(lockedsigs, input_sstate_cache, output_sstate_cac import shutil bb.note('Generating sstate-cache...') - nativelsbstring = d.getVar('NATIVELSBSTRING', True) + nativelsbstring = d.getVar('NATIVELSBSTRING') bb.process.run("gen-lockedsig-cache %s %s %s %s %s" % (lockedsigs, input_sstate_cache, output_sstate_cache, nativelsbstring, filterfile or '')) if fixedlsbstring and nativelsbstring != fixedlsbstring: nativedir = output_sstate_cache + '/' + nativelsbstring diff --git a/meta/lib/oe/data.py b/meta/lib/oe/data.py index ee48950a82..032f68a847 100644 --- a/meta/lib/oe/data.py +++ b/meta/lib/oe/data.py @@ -12,6 +12,6 @@ def typed_value(key, d): flags = {} try: - return oe.maketype.create(d.getVar(key, True) or '', var_type, **flags) + return oe.maketype.create(d.getVar(key) or '', var_type, **flags) except (TypeError, ValueError) as exc: bb.msg.fatal("Data", "%s: %s" % (key, str(exc))) diff --git a/meta/lib/oe/distro_check.py b/meta/lib/oe/distro_check.py index c666ddc257..f54f4bb67d 100644 --- a/meta/lib/oe/distro_check.py +++ b/meta/lib/oe/distro_check.py @@ -224,37 +224,37 @@ def compare_in_distro_packages_list(distro_check_dir, d): localdata = bb.data.createCopy(d) pkglst_dir = os.path.join(distro_check_dir, "package_lists") matching_distros = [] - pn = recipe_name = d.getVar('PN', True) + pn = recipe_name = d.getVar('PN') bb.note("Checking: %s" % pn) if pn.find("-native") != -1: pnstripped = pn.split("-native") - localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) + localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES')) bb.data.update_data(localdata) recipe_name = pnstripped[0] if pn.startswith("nativesdk-"): pnstripped = pn.split("nativesdk-") - localdata.setVar('OVERRIDES', "pn-" + pnstripped[1] + ":" + d.getVar('OVERRIDES', True)) + localdata.setVar('OVERRIDES', "pn-" + pnstripped[1] + ":" + d.getVar('OVERRIDES')) bb.data.update_data(localdata) recipe_name = pnstripped[1] if pn.find("-cross") != -1: pnstripped = pn.split("-cross") - localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) + localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES')) bb.data.update_data(localdata) recipe_name = pnstripped[0] if pn.find("-initial") != -1: pnstripped = pn.split("-initial") - localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) + localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES')) bb.data.update_data(localdata) recipe_name = pnstripped[0] bb.note("Recipe: %s" % recipe_name) distro_exceptions = dict({"OE-Core":'OE-Core', "OpenedHand":'OpenedHand', "Intel":'Intel', "Upstream":'Upstream', "Windriver":'Windriver', "OSPDT":'OSPDT Approved', "Poky":'poky'}) - tmp = localdata.getVar('DISTRO_PN_ALIAS', True) or "" + tmp = localdata.getVar('DISTRO_PN_ALIAS') or "" for str in tmp.split(): if str and str.find("=") == -1 and distro_exceptions[str]: matching_distros.append(str) @@ -286,10 +286,10 @@ def compare_in_distro_packages_list(distro_check_dir, d): return matching_distros def create_log_file(d, logname): - logpath = d.getVar('LOG_DIR', True) + logpath = d.getVar('LOG_DIR') bb.utils.mkdirhier(logpath) logfn, logsuffix = os.path.splitext(logname) - logfile = os.path.join(logpath, "%s.%s%s" % (logfn, d.getVar('DATETIME', True), logsuffix)) + logfile = os.path.join(logpath, "%s.%s%s" % (logfn, d.getVar('DATETIME'), logsuffix)) if not os.path.exists(logfile): slogfile = os.path.join(logpath, logname) if os.path.exists(slogfile): @@ -301,8 +301,8 @@ def create_log_file(d, logname): def save_distro_check_result(result, datetime, result_file, d): - pn = d.getVar('PN', True) - logdir = d.getVar('LOG_DIR', True) + pn = d.getVar('PN') + logdir = d.getVar('LOG_DIR') if not logdir: bb.error("LOG_DIR variable is not defined, can't write the distro_check results") return diff --git a/meta/lib/oe/gpg_sign.py b/meta/lib/oe/gpg_sign.py index 38eb0cb137..dcd1990930 100644 --- a/meta/lib/oe/gpg_sign.py +++ b/meta/lib/oe/gpg_sign.py @@ -7,9 +7,9 @@ import oe.utils class LocalSigner(object): """Class for handling local (on the build host) signing""" def __init__(self, d): - self.gpg_bin = d.getVar('GPG_BIN', True) or \ + self.gpg_bin = d.getVar('GPG_BIN') or \ bb.utils.which(os.getenv('PATH'), 'gpg') - self.gpg_path = d.getVar('GPG_PATH', True) + self.gpg_path = d.getVar('GPG_PATH') self.rpm_bin = bb.utils.which(os.getenv('PATH'), "rpm") def export_pubkey(self, output_file, keyid, armor=True): diff --git a/meta/lib/oe/manifest.py b/meta/lib/oe/manifest.py index 95f8eb2df3..6ec9b1af8b 100644 --- a/meta/lib/oe/manifest.py +++ b/meta/lib/oe/manifest.py @@ -59,9 +59,9 @@ class Manifest(object, metaclass=ABCMeta): if manifest_dir is None: if manifest_type != self.MANIFEST_TYPE_IMAGE: - self.manifest_dir = self.d.getVar('SDK_DIR', True) + self.manifest_dir = self.d.getVar('SDK_DIR') else: - self.manifest_dir = self.d.getVar('WORKDIR', True) + self.manifest_dir = self.d.getVar('WORKDIR') else: self.manifest_dir = manifest_dir @@ -82,7 +82,7 @@ class Manifest(object, metaclass=ABCMeta): This will be used for testing until the class is implemented properly! """ def _create_dummy_initial(self): - image_rootfs = self.d.getVar('IMAGE_ROOTFS', True) + image_rootfs = self.d.getVar('IMAGE_ROOTFS') pkg_list = dict() if image_rootfs.find("core-image-sato-sdk") > 0: pkg_list[self.PKG_TYPE_MUST_INSTALL] = \ @@ -195,7 +195,7 @@ class RpmManifest(Manifest): for pkg in pkg_list.split(): pkg_type = self.PKG_TYPE_MUST_INSTALL - ml_variants = self.d.getVar('MULTILIB_VARIANTS', True).split() + ml_variants = self.d.getVar('MULTILIB_VARIANTS').split() for ml_variant in ml_variants: if pkg.startswith(ml_variant + '-'): @@ -216,13 +216,13 @@ class RpmManifest(Manifest): for var in self.var_maps[self.manifest_type]: if var in self.vars_to_split: - split_pkgs = self._split_multilib(self.d.getVar(var, True)) + split_pkgs = self._split_multilib(self.d.getVar(var)) if split_pkgs is not None: pkgs = dict(list(pkgs.items()) + list(split_pkgs.items())) else: - pkg_list = self.d.getVar(var, True) + pkg_list = self.d.getVar(var) if pkg_list is not None: - pkgs[self.var_maps[self.manifest_type][var]] = self.d.getVar(var, True) + pkgs[self.var_maps[self.manifest_type][var]] = self.d.getVar(var) for pkg_type in pkgs: for pkg in pkgs[pkg_type].split(): @@ -245,7 +245,7 @@ class OpkgManifest(Manifest): for pkg in pkg_list.split(): pkg_type = self.PKG_TYPE_MUST_INSTALL - ml_variants = self.d.getVar('MULTILIB_VARIANTS', True).split() + ml_variants = self.d.getVar('MULTILIB_VARIANTS').split() for ml_variant in ml_variants: if pkg.startswith(ml_variant + '-'): @@ -266,13 +266,13 @@ class OpkgManifest(Manifest): for var in self.var_maps[self.manifest_type]: if var in self.vars_to_split: - split_pkgs = self._split_multilib(self.d.getVar(var, True)) + split_pkgs = self._split_multilib(self.d.getVar(var)) if split_pkgs is not None: pkgs = dict(list(pkgs.items()) + list(split_pkgs.items())) else: - pkg_list = self.d.getVar(var, True) + pkg_list = self.d.getVar(var) if pkg_list is not None: - pkgs[self.var_maps[self.manifest_type][var]] = self.d.getVar(var, True) + pkgs[self.var_maps[self.manifest_type][var]] = self.d.getVar(var) for pkg_type in pkgs: for pkg in pkgs[pkg_type].split(): @@ -310,7 +310,7 @@ class DpkgManifest(Manifest): manifest.write(self.initial_manifest_file_header) for var in self.var_maps[self.manifest_type]: - pkg_list = self.d.getVar(var, True) + pkg_list = self.d.getVar(var) if pkg_list is None: continue @@ -332,7 +332,7 @@ def create_manifest(d, final_manifest=False, manifest_dir=None, 'ipk': OpkgManifest, 'deb': DpkgManifest} - manifest = manifest_map[d.getVar('IMAGE_PKGTYPE', True)](d, manifest_dir, manifest_type) + manifest = manifest_map[d.getVar('IMAGE_PKGTYPE')](d, manifest_dir, manifest_type) if final_manifest: manifest.create_final() diff --git a/meta/lib/oe/package.py b/meta/lib/oe/package.py index ae60a5843e..795389517f 100644 --- a/meta/lib/oe/package.py +++ b/meta/lib/oe/package.py @@ -104,7 +104,7 @@ def read_shlib_providers(d): import re shlib_provider = {} - shlibs_dirs = d.getVar('SHLIBSDIRS', True).split() + shlibs_dirs = d.getVar('SHLIBSDIRS').split() list_re = re.compile('^(.*)\.list$') # Go from least to most specific since the last one found wins for dir in reversed(shlibs_dirs): diff --git a/meta/lib/oe/package_manager.py b/meta/lib/oe/package_manager.py index e5e3c3b679..bb458691e3 100644 --- a/meta/lib/oe/package_manager.py +++ b/meta/lib/oe/package_manager.py @@ -107,16 +107,16 @@ class RpmIndexer(Indexer): target_os = collections.OrderedDict() if arch_var is not None and os_var is not None: - package_archs['default'] = self.d.getVar(arch_var, True).split() + package_archs['default'] = self.d.getVar(arch_var).split() package_archs['default'].reverse() - target_os['default'] = self.d.getVar(os_var, True).strip() + target_os['default'] = self.d.getVar(os_var).strip() else: - package_archs['default'] = self.d.getVar("PACKAGE_ARCHS", True).split() + package_archs['default'] = self.d.getVar("PACKAGE_ARCHS").split() # arch order is reversed. This ensures the -best- match is # listed first! package_archs['default'].reverse() - target_os['default'] = self.d.getVar("TARGET_OS", True).strip() - multilibs = self.d.getVar('MULTILIBS', True) or "" + target_os['default'] = self.d.getVar("TARGET_OS").strip() + multilibs = self.d.getVar('MULTILIBS') or "" for ext in multilibs.split(): eext = ext.split(':') if len(eext) > 1 and eext[0] == 'multilib': @@ -150,8 +150,8 @@ class RpmIndexer(Indexer): return (ml_prefix_list, target_os) def write_index(self): - sdk_pkg_archs = (self.d.getVar('SDK_PACKAGE_ARCHS', True) or "").replace('-', '_').split() - all_mlb_pkg_archs = (self.d.getVar('ALL_MULTILIB_PACKAGE_ARCHS', True) or "").replace('-', '_').split() + sdk_pkg_archs = (self.d.getVar('SDK_PACKAGE_ARCHS') or "").replace('-', '_').split() + all_mlb_pkg_archs = (self.d.getVar('ALL_MULTILIB_PACKAGE_ARCHS') or "").replace('-', '_').split() mlb_prefix_list = self.get_ml_prefix_and_os_list()[0] @@ -165,15 +165,15 @@ class RpmIndexer(Indexer): archs = archs.union(set(sdk_pkg_archs)) rpm_createrepo = bb.utils.which(os.getenv('PATH'), "createrepo") - if self.d.getVar('PACKAGE_FEED_SIGN', True) == '1': - signer = get_signer(self.d, self.d.getVar('PACKAGE_FEED_GPG_BACKEND', True)) + if self.d.getVar('PACKAGE_FEED_SIGN') == '1': + signer = get_signer(self.d, self.d.getVar('PACKAGE_FEED_GPG_BACKEND')) else: signer = None index_cmds = [] repomd_files = [] rpm_dirs_found = False for arch in archs: - dbpath = os.path.join(self.d.getVar('WORKDIR', True), 'rpmdb', arch) + dbpath = os.path.join(self.d.getVar('WORKDIR'), 'rpmdb', arch) if os.path.exists(dbpath): bb.utils.remove(dbpath, True) arch_dir = os.path.join(self.deploy_dir, arch) @@ -197,11 +197,11 @@ class RpmIndexer(Indexer): # Sign repomd if signer: for repomd in repomd_files: - feed_sig_type = self.d.getVar('PACKAGE_FEED_GPG_SIGNATURE_TYPE', True) + feed_sig_type = self.d.getVar('PACKAGE_FEED_GPG_SIGNATURE_TYPE') is_ascii_sig = (feed_sig_type.upper() != "BIN") signer.detach_sign(repomd, - self.d.getVar('PACKAGE_FEED_GPG_NAME', True), - self.d.getVar('PACKAGE_FEED_GPG_PASSPHRASE_FILE', True), + self.d.getVar('PACKAGE_FEED_GPG_NAME'), + self.d.getVar('PACKAGE_FEED_GPG_PASSPHRASE_FILE'), armor=is_ascii_sig) @@ -212,8 +212,8 @@ class OpkgIndexer(Indexer): "MULTILIB_ARCHS"] opkg_index_cmd = bb.utils.which(os.getenv('PATH'), "opkg-make-index") - if self.d.getVar('PACKAGE_FEED_SIGN', True) == '1': - signer = get_signer(self.d, self.d.getVar('PACKAGE_FEED_GPG_BACKEND', True)) + if self.d.getVar('PACKAGE_FEED_SIGN') == '1': + signer = get_signer(self.d, self.d.getVar('PACKAGE_FEED_GPG_BACKEND')) else: signer = None @@ -223,7 +223,7 @@ class OpkgIndexer(Indexer): index_cmds = set() index_sign_files = set() for arch_var in arch_vars: - archs = self.d.getVar(arch_var, True) + archs = self.d.getVar(arch_var) if archs is None: continue @@ -251,12 +251,12 @@ class OpkgIndexer(Indexer): bb.fatal('%s' % ('\n'.join(result))) if signer: - feed_sig_type = self.d.getVar('PACKAGE_FEED_GPG_SIGNATURE_TYPE', True) + feed_sig_type = self.d.getVar('PACKAGE_FEED_GPG_SIGNATURE_TYPE') is_ascii_sig = (feed_sig_type.upper() != "BIN") for f in index_sign_files: signer.detach_sign(f, - self.d.getVar('PACKAGE_FEED_GPG_NAME', True), - self.d.getVar('PACKAGE_FEED_GPG_PASSPHRASE_FILE', True), + self.d.getVar('PACKAGE_FEED_GPG_NAME'), + self.d.getVar('PACKAGE_FEED_GPG_PASSPHRASE_FILE'), armor=is_ascii_sig) @@ -290,16 +290,16 @@ class DpkgIndexer(Indexer): os.environ['APT_CONFIG'] = self.apt_conf_file - pkg_archs = self.d.getVar('PACKAGE_ARCHS', True) + pkg_archs = self.d.getVar('PACKAGE_ARCHS') if pkg_archs is not None: arch_list = pkg_archs.split() - sdk_pkg_archs = self.d.getVar('SDK_PACKAGE_ARCHS', True) + sdk_pkg_archs = self.d.getVar('SDK_PACKAGE_ARCHS') if sdk_pkg_archs is not None: for a in sdk_pkg_archs.split(): if a not in pkg_archs: arch_list.append(a) - all_mlb_pkg_arch_list = (self.d.getVar('ALL_MULTILIB_PACKAGE_ARCHS', True) or "").split() + all_mlb_pkg_arch_list = (self.d.getVar('ALL_MULTILIB_PACKAGE_ARCHS') or "").split() arch_list.extend(arch for arch in all_mlb_pkg_arch_list if arch not in arch_list) apt_ftparchive = bb.utils.which(os.getenv('PATH'), "apt-ftparchive") @@ -332,7 +332,7 @@ class DpkgIndexer(Indexer): result = oe.utils.multiprocess_exec(index_cmds, create_index) if result: bb.fatal('%s' % ('\n'.join(result))) - if self.d.getVar('PACKAGE_FEED_SIGN', True) == '1': + if self.d.getVar('PACKAGE_FEED_SIGN') == '1': raise NotImplementedError('Package feed signing not implementd for dpkg') @@ -386,7 +386,7 @@ class RpmPkgsList(PkgsList): # Workaround for bug 3565. Simply look to see if we # know of a package with that name, if not try again! - filename = os.path.join(self.d.getVar('PKGDATA_DIR', True), + filename = os.path.join(self.d.getVar('PKGDATA_DIR'), 'runtime-reverse', new_pkg) if os.path.exists(filename): @@ -464,7 +464,7 @@ class OpkgPkgsList(PkgsList): self.opkg_cmd = bb.utils.which(os.getenv('PATH'), "opkg") self.opkg_args = "-f %s -o %s " % (config_file, rootfs_dir) - self.opkg_args += self.d.getVar("OPKG_ARGS", True) + self.opkg_args += self.d.getVar("OPKG_ARGS") def list_pkgs(self, format=None): cmd = "%s %s status" % (self.opkg_cmd, self.opkg_args) @@ -512,9 +512,9 @@ class PackageManager(object, metaclass=ABCMeta): self.d = d self.deploy_dir = None self.deploy_lock = None - self.feed_uris = self.d.getVar('PACKAGE_FEED_URIS', True) or "" - self.feed_base_paths = self.d.getVar('PACKAGE_FEED_BASE_PATHS', True) or "" - self.feed_archs = self.d.getVar('PACKAGE_FEED_ARCHS', True) + self.feed_uris = self.d.getVar('PACKAGE_FEED_URIS') or "" + self.feed_base_paths = self.d.getVar('PACKAGE_FEED_BASE_PATHS') or "" + self.feed_archs = self.d.getVar('PACKAGE_FEED_ARCHS') """ Update the package manager package database. @@ -568,7 +568,7 @@ class PackageManager(object, metaclass=ABCMeta): def install_complementary(self, globs=None): # we need to write the list of installed packages to a file because the # oe-pkgdata-util reads it from a file - installed_pkgs_file = os.path.join(self.d.getVar('WORKDIR', True), + installed_pkgs_file = os.path.join(self.d.getVar('WORKDIR'), "installed_pkgs.txt") with open(installed_pkgs_file, "w+") as installed_pkgs: pkgs = self.list_installed() @@ -576,10 +576,10 @@ class PackageManager(object, metaclass=ABCMeta): installed_pkgs.write(output) if globs is None: - globs = self.d.getVar('IMAGE_INSTALL_COMPLEMENTARY', True) + globs = self.d.getVar('IMAGE_INSTALL_COMPLEMENTARY') split_linguas = set() - for translation in self.d.getVar('IMAGE_LINGUAS', True).split(): + for translation in self.d.getVar('IMAGE_LINGUAS').split(): split_linguas.add(translation) split_linguas.add(translation.split('-')[0]) @@ -592,9 +592,9 @@ class PackageManager(object, metaclass=ABCMeta): return cmd = [bb.utils.which(os.getenv('PATH'), "oe-pkgdata-util"), - "-p", self.d.getVar('PKGDATA_DIR', True), "glob", installed_pkgs_file, + "-p", self.d.getVar('PKGDATA_DIR'), "glob", installed_pkgs_file, globs] - exclude = self.d.getVar('PACKAGE_EXCLUDE_COMPLEMENTARY', True) + exclude = self.d.getVar('PACKAGE_EXCLUDE_COMPLEMENTARY') if exclude: cmd.extend(['--exclude=' + '|'.join(exclude.split())]) try: @@ -659,7 +659,7 @@ class RpmPM(PackageManager): self.task_name = task_name self.providename = providename self.fullpkglist = list() - self.deploy_dir = self.d.getVar('DEPLOY_DIR_RPM', True) + self.deploy_dir = self.d.getVar('DEPLOY_DIR_RPM') self.etcrpm_dir = os.path.join(self.target_rootfs, "etc/rpm") self.install_dir_name = "oe_install" self.install_dir_path = os.path.join(self.target_rootfs, self.install_dir_name) @@ -669,7 +669,7 @@ class RpmPM(PackageManager): # 1 = --log-level=info (includes information about executing scriptlets and their output) # 2 = --log-level=debug # 3 = --log-level=debug plus dumps of scriplet content and command invocation - self.debug_level = int(d.getVar('ROOTFS_RPM_DEBUG', True) or "0") + self.debug_level = int(d.getVar('ROOTFS_RPM_DEBUG') or "0") self.smart_opt = ["--log-level=%s" % ("warning" if self.debug_level == 0 else "info" if self.debug_level == 1 else @@ -684,7 +684,7 @@ class RpmPM(PackageManager): if not os.path.exists(self.d.expand('${T}/saved')): bb.utils.mkdirhier(self.d.expand('${T}/saved')) - packageindex_dir = os.path.join(self.d.getVar('WORKDIR', True), 'rpms') + packageindex_dir = os.path.join(self.d.getVar('WORKDIR'), 'rpms') self.indexer = RpmIndexer(self.d, packageindex_dir) self.pkgs_list = RpmPkgsList(self.d, self.target_rootfs, arch_var, os_var) @@ -702,7 +702,7 @@ class RpmPM(PackageManager): # List must be prefered to least preferred order default_platform_extra = list() platform_extra = list() - bbextendvariant = self.d.getVar('BBEXTENDVARIANT', True) or "" + bbextendvariant = self.d.getVar('BBEXTENDVARIANT') or "" for mlib in self.ml_os_list: for arch in self.ml_prefix_list[mlib]: plt = arch.replace('-', '_') + '-.*-' + self.ml_os_list[mlib] @@ -750,7 +750,7 @@ class RpmPM(PackageManager): Create configs for rpm and smart, and multilib is supported ''' def create_configs(self): - target_arch = self.d.getVar('TARGET_ARCH', True) + target_arch = self.d.getVar('TARGET_ARCH') platform = '%s%s-%s' % (target_arch.replace('-', '_'), self.target_vendor, self.ml_os_list['default']) @@ -758,7 +758,7 @@ class RpmPM(PackageManager): # List must be prefered to least preferred order default_platform_extra = list() platform_extra = list() - bbextendvariant = self.d.getVar('BBEXTENDVARIANT', True) or "" + bbextendvariant = self.d.getVar('BBEXTENDVARIANT') or "" for mlib in self.ml_os_list: for arch in self.ml_prefix_list[mlib]: plt = arch.replace('-', '_') + '-.*-' + self.ml_os_list[mlib] @@ -841,7 +841,7 @@ class RpmPM(PackageManager): if not new_pkg: # Failed to translate, package not found! err_msg = '%s not found in the %s feeds (%s) in %s.' % \ - (pkg, mlib, " ".join(feed_archs), self.d.getVar('DEPLOY_DIR_RPM', True)) + (pkg, mlib, " ".join(feed_archs), self.d.getVar('DEPLOY_DIR_RPM')) if not attempt_only: bb.error(err_msg) bb.fatal("This is often caused by an empty package declared " \ @@ -860,7 +860,7 @@ class RpmPM(PackageManager): new_pkg = self._search_pkg_name_in_feeds(pkg, default_archs) if not new_pkg: err_msg = '%s not found in the feeds (%s) in %s.' % \ - (pkg, " ".join(default_archs), self.d.getVar('DEPLOY_DIR_RPM', True)) + (pkg, " ".join(default_archs), self.d.getVar('DEPLOY_DIR_RPM')) if not attempt_only: bb.error(err_msg) bb.fatal("This is often caused by an empty package declared " \ @@ -887,7 +887,7 @@ class RpmPM(PackageManager): channel_priority = 5 platform_dir = os.path.join(self.etcrpm_dir, "platform") - sdkos = self.d.getVar("SDK_OS", True) + sdkos = self.d.getVar("SDK_OS") with open(platform_dir, "w+") as platform_fd: platform_fd.write(platform + '\n') for pt in platform_extra: @@ -957,8 +957,8 @@ class RpmPM(PackageManager): bb.fatal("Create rpm database failed. Command '%s' " "returned %d:\n%s" % (' '.join(cmd), e.returncode, e.output.decode("utf-8"))) # Import GPG key to RPM database of the target system - if self.d.getVar('RPM_SIGN_PACKAGES', True) == '1': - pubkey_path = self.d.getVar('RPM_GPG_PUBKEY', True) + if self.d.getVar('RPM_SIGN_PACKAGES') == '1': + pubkey_path = self.d.getVar('RPM_GPG_PUBKEY') cmd = [self.rpm_cmd, '--root', self.target_rootfs, '--dbpath', '/var/lib/rpm', '--import', pubkey_path] try: subprocess.check_output(cmd, stderr=subprocess.STDOUT) @@ -974,10 +974,10 @@ class RpmPM(PackageManager): self._invoke_smart(['config', '--set', 'rpm-root=%s' % self.target_rootfs]) self._invoke_smart(['config', '--set', 'rpm-dbpath=/var/lib/rpm']) self._invoke_smart(['config', '--set', 'rpm-extra-macros._var=%s' % - self.d.getVar('localstatedir', True)]) + self.d.getVar('localstatedir')]) cmd = ["config", "--set", "rpm-extra-macros._tmppath=/%s/tmp" % self.install_dir_name] - prefer_color = self.d.getVar('RPM_PREFER_ELF_ARCH', True) + prefer_color = self.d.getVar('RPM_PREFER_ELF_ARCH') if prefer_color: if prefer_color not in ['0', '1', '2', '4']: bb.fatal("Invalid RPM_PREFER_ELF_ARCH: %s, it should be one of:\n" @@ -985,7 +985,7 @@ class RpmPM(PackageManager): "\t2: ELF64 wins\n" "\t4: ELF64 N32 wins (mips64 or mips64el only)" % prefer_color) - if prefer_color == "4" and self.d.getVar("TUNE_ARCH", True) not in \ + if prefer_color == "4" and self.d.getVar("TUNE_ARCH") not in \ ['mips64', 'mips64el']: bb.fatal("RPM_PREFER_ELF_ARCH = \"4\" is for mips64 or mips64el " "only.") @@ -998,17 +998,17 @@ class RpmPM(PackageManager): # Write common configuration for host and target usage self._invoke_smart(['config', '--set', 'rpm-nolinktos=1']) self._invoke_smart(['config', '--set', 'rpm-noparentdirs=1']) - check_signature = self.d.getVar('RPM_CHECK_SIGNATURES', True) + check_signature = self.d.getVar('RPM_CHECK_SIGNATURES') if check_signature and check_signature.strip() == "0": self._invoke_smart(['config', '--set rpm-check-signatures=false']) - for i in self.d.getVar('BAD_RECOMMENDATIONS', True).split(): + for i in self.d.getVar('BAD_RECOMMENDATIONS').split(): self._invoke_smart(['flag', '--set', 'ignore-recommends', i]) # Do the following configurations here, to avoid them being # saved for field upgrade - if self.d.getVar('NO_RECOMMENDATIONS', True).strip() == "1": + if self.d.getVar('NO_RECOMMENDATIONS').strip() == "1": self._invoke_smart(['config', '--set', 'ignore-all-recommends=1']) - pkg_exclude = self.d.getVar('PACKAGE_EXCLUDE', True) or "" + pkg_exclude = self.d.getVar('PACKAGE_EXCLUDE') or "" for i in pkg_exclude.split(): self._invoke_smart(['flag', '--set', 'exclude-packages', i]) @@ -1019,13 +1019,13 @@ class RpmPM(PackageManager): ch_already_added = [] for canonical_arch in platform_extra: arch = canonical_arch.split('-')[0] - arch_channel = os.path.join(self.d.getVar('WORKDIR', True), 'rpms', arch) + arch_channel = os.path.join(self.d.getVar('WORKDIR'), 'rpms', arch) oe.path.remove(arch_channel) deploy_arch_dir = os.path.join(self.deploy_dir, arch) if not os.path.exists(deploy_arch_dir): continue - lockfilename = self.d.getVar('DEPLOY_DIR_RPM', True) + "/rpm.lock" + lockfilename = self.d.getVar('DEPLOY_DIR_RPM') + "/rpm.lock" lf = bb.utils.lockfile(lockfilename, False) oe.path.copyhardlinktree(deploy_arch_dir, arch_channel) bb.utils.unlockfile(lf) @@ -1096,7 +1096,7 @@ class RpmPM(PackageManager): "fi\n" intercept_dir = self.d.expand('${WORKDIR}/intercept_scripts') - native_root = self.d.getVar('STAGING_DIR_NATIVE', True) + native_root = self.d.getVar('STAGING_DIR_NATIVE') scriptlet_content = SCRIPTLET_FORMAT % (os.environ['PATH'], self.target_rootfs, intercept_dir, @@ -1170,7 +1170,7 @@ class RpmPM(PackageManager): ml_pkgs = [] non_ml_pkgs = pkgs[:] for pkg in pkgs: - for mlib in (self.d.getVar("MULTILIB_VARIANTS", True) or "").split(): + for mlib in (self.d.getVar("MULTILIB_VARIANTS") or "").split(): if pkg.startswith(mlib + '-'): ml_pkgs.append(pkg) non_ml_pkgs.remove(pkg) @@ -1184,7 +1184,7 @@ class RpmPM(PackageManager): # correctly. pkgs_new = [] for pkg in non_ml_pkgs: - for mlib in (self.d.getVar("MULTILIB_VARIANTS", True) or "").split(): + for mlib in (self.d.getVar("MULTILIB_VARIANTS") or "").split(): mlib_pkg = mlib + "-" + pkg if mlib_pkg in ml_pkgs: pkgs_new.append(pkg) @@ -1401,7 +1401,7 @@ class RpmPM(PackageManager): self._invoke_smart(['config', '--set', 'rpm-nolinktos=1']) self._invoke_smart(['config', '--set', 'rpm-noparentdirs=1']) - for i in self.d.getVar('BAD_RECOMMENDATIONS', True).split(): + for i in self.d.getVar('BAD_RECOMMENDATIONS').split(): self._invoke_smart(['flag', '--set', 'ignore-recommends', i]) self._invoke_smart(['channel', '--add', 'rpmsys', 'type=rpm-sys', '-y']) @@ -1575,13 +1575,13 @@ class OpkgPM(OpkgDpkgPM): self.pkg_archs = archs self.task_name = task_name - self.deploy_dir = self.d.getVar("DEPLOY_DIR_IPK", True) + self.deploy_dir = self.d.getVar("DEPLOY_DIR_IPK") self.deploy_lock_file = os.path.join(self.deploy_dir, "deploy.lock") self.opkg_cmd = bb.utils.which(os.getenv('PATH'), "opkg") self.opkg_args = "--volatile-cache -f %s -t %s -o %s " % (self.config_file, self.d.expand('${T}/ipktemp/') ,target_rootfs) - self.opkg_args += self.d.getVar("OPKG_ARGS", True) + self.opkg_args += self.d.getVar("OPKG_ARGS") - opkg_lib_dir = self.d.getVar('OPKGLIBDIR', True) + opkg_lib_dir = self.d.getVar('OPKGLIBDIR') if opkg_lib_dir[0] == "/": opkg_lib_dir = opkg_lib_dir[1:] @@ -1593,7 +1593,7 @@ class OpkgPM(OpkgDpkgPM): if not os.path.exists(self.d.expand('${T}/saved')): bb.utils.mkdirhier(self.d.expand('${T}/saved')) - self.from_feeds = (self.d.getVar('BUILD_IMAGES_FROM_FEEDS', True) or "") == "1" + self.from_feeds = (self.d.getVar('BUILD_IMAGES_FROM_FEEDS') or "") == "1" if self.from_feeds: self._create_custom_config() else: @@ -1638,7 +1638,7 @@ class OpkgPM(OpkgDpkgPM): config_file.write("arch %s %d\n" % (arch, priority)) priority += 5 - for line in (self.d.getVar('IPK_FEED_URIS', True) or "").split(): + for line in (self.d.getVar('IPK_FEED_URIS') or "").split(): feed_match = re.match("^[ \t]*(.*)##([^ \t]*)[ \t]*$", line) if feed_match is not None: @@ -1655,17 +1655,17 @@ class OpkgPM(OpkgDpkgPM): specified as compatible for the current machine. NOTE: Development-helper feature, NOT a full-fledged feed. """ - if (self.d.getVar('FEED_DEPLOYDIR_BASE_URI', True) or "") != "": + if (self.d.getVar('FEED_DEPLOYDIR_BASE_URI') or "") != "": for arch in self.pkg_archs.split(): cfg_file_name = os.path.join(self.target_rootfs, - self.d.getVar("sysconfdir", True), + self.d.getVar("sysconfdir"), "opkg", "local-%s-feed.conf" % arch) with open(cfg_file_name, "w+") as cfg_file: cfg_file.write("src/gz local-%s %s/%s" % (arch, - self.d.getVar('FEED_DEPLOYDIR_BASE_URI', True), + self.d.getVar('FEED_DEPLOYDIR_BASE_URI'), arch)) if self.opkg_dir != '/var/lib/opkg': @@ -1674,8 +1674,8 @@ class OpkgPM(OpkgDpkgPM): # the default value of "/var/lib" as defined in opkg: # libopkg/opkg_conf.h:#define OPKG_CONF_DEFAULT_INFO_DIR "/var/lib/opkg/info" # libopkg/opkg_conf.h:#define OPKG_CONF_DEFAULT_STATUS_FILE "/var/lib/opkg/status" - cfg_file.write("option info_dir %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR', True), 'opkg', 'info')) - cfg_file.write("option status_file %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR', True), 'opkg', 'status')) + cfg_file.write("option info_dir %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR'), 'opkg', 'info')) + cfg_file.write("option status_file %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR'), 'opkg', 'status')) def _create_config(self): @@ -1699,8 +1699,8 @@ class OpkgPM(OpkgDpkgPM): # the default value of "/var/lib" as defined in opkg: # libopkg/opkg_conf.h:#define OPKG_CONF_DEFAULT_INFO_DIR "/var/lib/opkg/info" # libopkg/opkg_conf.h:#define OPKG_CONF_DEFAULT_STATUS_FILE "/var/lib/opkg/status" - config_file.write("option info_dir %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR', True), 'opkg', 'info')) - config_file.write("option status_file %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR', True), 'opkg', 'status')) + config_file.write("option info_dir %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR'), 'opkg', 'info')) + config_file.write("option status_file %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR'), 'opkg', 'status')) def insert_feeds_uris(self): if self.feed_uris == "": @@ -1755,9 +1755,9 @@ class OpkgPM(OpkgDpkgPM): os.environ['OFFLINE_ROOT'] = self.target_rootfs os.environ['IPKG_OFFLINE_ROOT'] = self.target_rootfs os.environ['OPKG_OFFLINE_ROOT'] = self.target_rootfs - os.environ['INTERCEPT_DIR'] = os.path.join(self.d.getVar('WORKDIR', True), + os.environ['INTERCEPT_DIR'] = os.path.join(self.d.getVar('WORKDIR'), "intercept_scripts") - os.environ['NATIVE_ROOT'] = self.d.getVar('STAGING_DIR_NATIVE', True) + os.environ['NATIVE_ROOT'] = self.d.getVar('STAGING_DIR_NATIVE') try: bb.note("Installing the following packages: %s" % ' '.join(pkgs)) @@ -1808,7 +1808,7 @@ class OpkgPM(OpkgDpkgPM): return OpkgPkgsList(self.d, self.target_rootfs, self.config_file).list_pkgs() def handle_bad_recommendations(self): - bad_recommendations = self.d.getVar("BAD_RECOMMENDATIONS", True) or "" + bad_recommendations = self.d.getVar("BAD_RECOMMENDATIONS") or "" if bad_recommendations.strip() == "": return @@ -1859,7 +1859,7 @@ class OpkgPM(OpkgDpkgPM): bb.utils.mkdirhier(temp_opkg_dir) opkg_args = "-f %s -o %s " % (self.config_file, temp_rootfs) - opkg_args += self.d.getVar("OPKG_ARGS", True) + opkg_args += self.d.getVar("OPKG_ARGS") cmd = "%s %s update" % (self.opkg_cmd, opkg_args) try: @@ -1935,7 +1935,7 @@ class DpkgPM(OpkgDpkgPM): def __init__(self, d, target_rootfs, archs, base_archs, apt_conf_dir=None): super(DpkgPM, self).__init__(d) self.target_rootfs = target_rootfs - self.deploy_dir = self.d.getVar('DEPLOY_DIR_DEB', True) + self.deploy_dir = self.d.getVar('DEPLOY_DIR_DEB') if apt_conf_dir is None: self.apt_conf_dir = self.d.expand("${APTCONF_TARGET}/apt") else: @@ -1944,10 +1944,10 @@ class DpkgPM(OpkgDpkgPM): self.apt_get_cmd = bb.utils.which(os.getenv('PATH'), "apt-get") self.apt_cache_cmd = bb.utils.which(os.getenv('PATH'), "apt-cache") - self.apt_args = d.getVar("APT_ARGS", True) + self.apt_args = d.getVar("APT_ARGS") self.all_arch_list = archs.split() - all_mlb_pkg_arch_list = (self.d.getVar('ALL_MULTILIB_PACKAGE_ARCHS', True) or "").split() + all_mlb_pkg_arch_list = (self.d.getVar('ALL_MULTILIB_PACKAGE_ARCHS') or "").split() self.all_arch_list.extend(arch for arch in all_mlb_pkg_arch_list if arch not in self.all_arch_list) self._create_configs(archs, base_archs) @@ -2008,9 +2008,9 @@ class DpkgPM(OpkgDpkgPM): os.environ['OFFLINE_ROOT'] = self.target_rootfs os.environ['IPKG_OFFLINE_ROOT'] = self.target_rootfs os.environ['OPKG_OFFLINE_ROOT'] = self.target_rootfs - os.environ['INTERCEPT_DIR'] = os.path.join(self.d.getVar('WORKDIR', True), + os.environ['INTERCEPT_DIR'] = os.path.join(self.d.getVar('WORKDIR'), "intercept_scripts") - os.environ['NATIVE_ROOT'] = self.d.getVar('STAGING_DIR_NATIVE', True) + os.environ['NATIVE_ROOT'] = self.d.getVar('STAGING_DIR_NATIVE') failed_pkgs = [] for pkg_name in installed_pkgs: @@ -2161,7 +2161,7 @@ class DpkgPM(OpkgDpkgPM): priority += 5 - pkg_exclude = self.d.getVar('PACKAGE_EXCLUDE', True) or "" + pkg_exclude = self.d.getVar('PACKAGE_EXCLUDE') or "" for pkg in pkg_exclude.split(): prefs_file.write( "Package: %s\n" @@ -2176,14 +2176,14 @@ class DpkgPM(OpkgDpkgPM): os.path.join(self.deploy_dir, arch)) base_arch_list = base_archs.split() - multilib_variants = self.d.getVar("MULTILIB_VARIANTS", True); + multilib_variants = self.d.getVar("MULTILIB_VARIANTS"); for variant in multilib_variants.split(): localdata = bb.data.createCopy(self.d) variant_tune = localdata.getVar("DEFAULTTUNE_virtclass-multilib-" + variant, False) - orig_arch = localdata.getVar("DPKG_ARCH", True) + orig_arch = localdata.getVar("DPKG_ARCH") localdata.setVar("DEFAULTTUNE", variant_tune) bb.data.update_data(localdata) - variant_arch = localdata.getVar("DPKG_ARCH", True) + variant_arch = localdata.getVar("DPKG_ARCH") if variant_arch not in base_arch_list: base_arch_list.append(variant_arch) @@ -2214,7 +2214,7 @@ class DpkgPM(OpkgDpkgPM): def remove_packaging_data(self): bb.utils.remove(os.path.join(self.target_rootfs, - self.d.getVar('opkglibdir', True)), True) + self.d.getVar('opkglibdir')), True) bb.utils.remove(self.target_rootfs + "/var/lib/dpkg/", True) def fix_broken_dependencies(self): @@ -2262,12 +2262,12 @@ class DpkgPM(OpkgDpkgPM): return tmp_dir def generate_index_files(d): - classes = d.getVar('PACKAGE_CLASSES', True).replace("package_", "").split() + classes = d.getVar('PACKAGE_CLASSES').replace("package_", "").split() indexer_map = { - "rpm": (RpmIndexer, d.getVar('DEPLOY_DIR_RPM', True)), - "ipk": (OpkgIndexer, d.getVar('DEPLOY_DIR_IPK', True)), - "deb": (DpkgIndexer, d.getVar('DEPLOY_DIR_DEB', True)) + "rpm": (RpmIndexer, d.getVar('DEPLOY_DIR_RPM')), + "ipk": (OpkgIndexer, d.getVar('DEPLOY_DIR_IPK')), + "deb": (DpkgIndexer, d.getVar('DEPLOY_DIR_DEB')) } result = None diff --git a/meta/lib/oe/packagedata.py b/meta/lib/oe/packagedata.py index 21d4de914f..32e5c82a94 100644 --- a/meta/lib/oe/packagedata.py +++ b/meta/lib/oe/packagedata.py @@ -57,7 +57,7 @@ def read_subpkgdata_dict(pkg, d): def _pkgmap(d): """Return a dictionary mapping package to recipe name.""" - pkgdatadir = d.getVar("PKGDATA_DIR", True) + pkgdatadir = d.getVar("PKGDATA_DIR") pkgmap = {} try: diff --git a/meta/lib/oe/packagegroup.py b/meta/lib/oe/packagegroup.py index 97819279b7..d68e5d322b 100644 --- a/meta/lib/oe/packagegroup.py +++ b/meta/lib/oe/packagegroup.py @@ -1,7 +1,7 @@ import itertools def is_optional(feature, d): - packages = d.getVar("FEATURE_PACKAGES_%s" % feature, True) + packages = d.getVar("FEATURE_PACKAGES_%s" % feature) if packages: return bool(d.getVarFlag("FEATURE_PACKAGES_%s" % feature, "optional", True)) else: @@ -9,9 +9,9 @@ def is_optional(feature, d): def packages(features, d): for feature in features: - packages = d.getVar("FEATURE_PACKAGES_%s" % feature, True) + packages = d.getVar("FEATURE_PACKAGES_%s" % feature) if not packages: - packages = d.getVar("PACKAGE_GROUP_%s" % feature, True) + packages = d.getVar("PACKAGE_GROUP_%s" % feature) for pkg in (packages or "").split(): yield pkg diff --git a/meta/lib/oe/patch.py b/meta/lib/oe/patch.py index 456ee70f7d..95674b3706 100644 --- a/meta/lib/oe/patch.py +++ b/meta/lib/oe/patch.py @@ -281,8 +281,8 @@ class GitApplyTree(PatchTree): def __init__(self, dir, d): PatchTree.__init__(self, dir, d) - self.commituser = d.getVar('PATCH_GIT_USER_NAME', True) - self.commitemail = d.getVar('PATCH_GIT_USER_EMAIL', True) + self.commituser = d.getVar('PATCH_GIT_USER_NAME') + self.commitemail = d.getVar('PATCH_GIT_USER_EMAIL') @staticmethod def extractPatchHeader(patchfile): @@ -371,8 +371,8 @@ class GitApplyTree(PatchTree): @staticmethod def gitCommandUserOptions(cmd, commituser=None, commitemail=None, d=None): if d: - commituser = d.getVar('PATCH_GIT_USER_NAME', True) - commitemail = d.getVar('PATCH_GIT_USER_EMAIL', True) + commituser = d.getVar('PATCH_GIT_USER_NAME') + commitemail = d.getVar('PATCH_GIT_USER_EMAIL') if commituser: cmd += ['-c', 'user.name="%s"' % commituser] if commitemail: @@ -551,7 +551,7 @@ class GitApplyTree(PatchTree): class QuiltTree(PatchSet): def _runcmd(self, args, run = True): - quiltrc = self.d.getVar('QUILTRCFILE', True) + quiltrc = self.d.getVar('QUILTRCFILE') if not run: return ["quilt"] + ["--quiltrc"] + [quiltrc] + args runcmd(["quilt"] + ["--quiltrc"] + [quiltrc] + args, self.dir) @@ -727,7 +727,7 @@ class UserResolver(Resolver): # Patch application failed patchcmd = self.patchset.Push(True, False, False) - t = self.patchset.d.getVar('T', True) + t = self.patchset.d.getVar('T') if not t: bb.msg.fatal("Build", "T not set") bb.utils.mkdirhier(t) @@ -792,7 +792,7 @@ def patch_path(url, fetch, workdir, expand=True): return local def src_patches(d, all=False, expand=True): - workdir = d.getVar('WORKDIR', True) + workdir = d.getVar('WORKDIR') fetch = bb.fetch2.Fetch([], d) patches = [] sources = [] @@ -839,13 +839,13 @@ def src_patches(d, all=False, expand=True): def should_apply(parm, d): if "mindate" in parm or "maxdate" in parm: - pn = d.getVar('PN', True) - srcdate = d.getVar('SRCDATE_%s' % pn, True) + pn = d.getVar('PN') + srcdate = d.getVar('SRCDATE_%s' % pn) if not srcdate: - srcdate = d.getVar('SRCDATE', True) + srcdate = d.getVar('SRCDATE') if srcdate == "now": - srcdate = d.getVar('DATE', True) + srcdate = d.getVar('DATE') if "maxdate" in parm and parm["maxdate"] < srcdate: return False, 'is outdated' @@ -855,22 +855,22 @@ def should_apply(parm, d): if "minrev" in parm: - srcrev = d.getVar('SRCREV', True) + srcrev = d.getVar('SRCREV') if srcrev and srcrev < parm["minrev"]: return False, 'applies to later revisions' if "maxrev" in parm: - srcrev = d.getVar('SRCREV', True) + srcrev = d.getVar('SRCREV') if srcrev and srcrev > parm["maxrev"]: return False, 'applies to earlier revisions' if "rev" in parm: - srcrev = d.getVar('SRCREV', True) + srcrev = d.getVar('SRCREV') if srcrev and parm["rev"] not in srcrev: return False, "doesn't apply to revision" if "notrev" in parm: - srcrev = d.getVar('SRCREV', True) + srcrev = d.getVar('SRCREV') if srcrev and parm["notrev"] in srcrev: return False, "doesn't apply to revision" diff --git a/meta/lib/oe/path.py b/meta/lib/oe/path.py index f73fd4ac07..804ecd5fea 100644 --- a/meta/lib/oe/path.py +++ b/meta/lib/oe/path.py @@ -52,7 +52,7 @@ def make_relative_symlink(path): def format_display(path, metadata): """ Prepare a path for display to the user. """ - rel = relative(metadata.getVar("TOPDIR", True), path) + rel = relative(metadata.getVar("TOPDIR"), path) if len(rel) > len(path): return path else: diff --git a/meta/lib/oe/prservice.py b/meta/lib/oe/prservice.py index 0054f954cc..32dfc15e88 100644 --- a/meta/lib/oe/prservice.py +++ b/meta/lib/oe/prservice.py @@ -1,7 +1,7 @@ def prserv_make_conn(d, check = False): import prserv.serv - host_params = list([_f for _f in (d.getVar("PRSERV_HOST", True) or '').split(':') if _f]) + host_params = list([_f for _f in (d.getVar("PRSERV_HOST") or '').split(':') if _f]) try: conn = None conn = prserv.serv.PRServerConnection(host_params[0], int(host_params[1])) @@ -15,11 +15,11 @@ def prserv_make_conn(d, check = False): return conn def prserv_dump_db(d): - if not d.getVar('PRSERV_HOST', True): + if not d.getVar('PRSERV_HOST'): bb.error("Not using network based PR service") return None - conn = d.getVar("__PRSERV_CONN", True) + conn = d.getVar("__PRSERV_CONN") if conn is None: conn = prserv_make_conn(d) if conn is None: @@ -27,18 +27,18 @@ def prserv_dump_db(d): return None #dump db - opt_version = d.getVar('PRSERV_DUMPOPT_VERSION', True) - opt_pkgarch = d.getVar('PRSERV_DUMPOPT_PKGARCH', True) - opt_checksum = d.getVar('PRSERV_DUMPOPT_CHECKSUM', True) - opt_col = ("1" == d.getVar('PRSERV_DUMPOPT_COL', True)) + opt_version = d.getVar('PRSERV_DUMPOPT_VERSION') + opt_pkgarch = d.getVar('PRSERV_DUMPOPT_PKGARCH') + opt_checksum = d.getVar('PRSERV_DUMPOPT_CHECKSUM') + opt_col = ("1" == d.getVar('PRSERV_DUMPOPT_COL')) return conn.export(opt_version, opt_pkgarch, opt_checksum, opt_col) def prserv_import_db(d, filter_version=None, filter_pkgarch=None, filter_checksum=None): - if not d.getVar('PRSERV_HOST', True): + if not d.getVar('PRSERV_HOST'): bb.error("Not using network based PR service") return None - conn = d.getVar("__PRSERV_CONN", True) + conn = d.getVar("__PRSERV_CONN") if conn is None: conn = prserv_make_conn(d) if conn is None: @@ -58,7 +58,7 @@ def prserv_import_db(d, filter_version=None, filter_pkgarch=None, filter_checksu (filter_checksum and filter_checksum != checksum): continue try: - value = int(d.getVar(remain + '$' + version + '$' + pkgarch + '$' + checksum, True)) + value = int(d.getVar(remain + '$' + version + '$' + pkgarch + '$' + checksum)) except BaseException as exc: bb.debug("Not valid value of %s:%s" % (v,str(exc))) continue @@ -72,8 +72,8 @@ def prserv_import_db(d, filter_version=None, filter_pkgarch=None, filter_checksu def prserv_export_tofile(d, metainfo, datainfo, lockdown, nomax=False): import bb.utils #initilize the output file - bb.utils.mkdirhier(d.getVar('PRSERV_DUMPDIR', True)) - df = d.getVar('PRSERV_DUMPFILE', True) + bb.utils.mkdirhier(d.getVar('PRSERV_DUMPDIR')) + df = d.getVar('PRSERV_DUMPFILE') #write data lf = bb.utils.lockfile("%s.lock" % df) f = open(df, "a") @@ -114,7 +114,7 @@ def prserv_export_tofile(d, metainfo, datainfo, lockdown, nomax=False): bb.utils.unlockfile(lf) def prserv_check_avail(d): - host_params = list([_f for _f in (d.getVar("PRSERV_HOST", True) or '').split(':') if _f]) + host_params = list([_f for _f in (d.getVar("PRSERV_HOST") or '').split(':') if _f]) try: if len(host_params) != 2: raise TypeError diff --git a/meta/lib/oe/qa.py b/meta/lib/oe/qa.py index 22d76dcbcd..3231e60cea 100644 --- a/meta/lib/oe/qa.py +++ b/meta/lib/oe/qa.py @@ -129,11 +129,11 @@ class ELFFile: if cmd in self.objdump_output: return self.objdump_output[cmd] - objdump = d.getVar('OBJDUMP', True) + objdump = d.getVar('OBJDUMP') env = os.environ.copy() env["LC_ALL"] = "C" - env["PATH"] = d.getVar('PATH', True) + env["PATH"] = d.getVar('PATH') try: bb.note("%s %s %s" % (objdump, cmd, self.name)) diff --git a/meta/lib/oe/recipeutils.py b/meta/lib/oe/recipeutils.py index 26c926f214..a7fdd36e40 100644 --- a/meta/lib/oe/recipeutils.py +++ b/meta/lib/oe/recipeutils.py @@ -328,16 +328,16 @@ def copy_recipe_files(d, tgt_dir, whole_dir=False, download=True): # FIXME need a warning if the unexpanded SRC_URI value contains variable references - uris = (d.getVar('SRC_URI', True) or "").split() + uris = (d.getVar('SRC_URI') or "").split() fetch = bb.fetch2.Fetch(uris, d) if download: fetch.download() # Copy local files to target directory and gather any remote files - bb_dir = os.path.dirname(d.getVar('FILE', True)) + os.sep + bb_dir = os.path.dirname(d.getVar('FILE')) + os.sep remotes = [] copied = [] - includes = [path for path in d.getVar('BBINCLUDED', True).split() if + includes = [path for path in d.getVar('BBINCLUDED').split() if path.startswith(bb_dir) and os.path.exists(path)] for path in fetch.localpaths() + includes: # Only import files that are under the meta directory @@ -361,7 +361,7 @@ def copy_recipe_files(d, tgt_dir, whole_dir=False, download=True): def get_recipe_local_files(d, patches=False, archives=False): """Get a list of local files in SRC_URI within a recipe.""" import oe.patch - uris = (d.getVar('SRC_URI', True) or "").split() + uris = (d.getVar('SRC_URI') or "").split() fetch = bb.fetch2.Fetch(uris, d) # FIXME this list should be factored out somewhere else (such as the # fetcher) though note that this only encompasses actual container formats @@ -421,7 +421,7 @@ def get_recipe_patched_files(d): for patch in patches: _, _, patchfile, _, _, parm = bb.fetch.decodeurl(patch) striplevel = int(parm['striplevel']) - patchedfiles[patchfile] = oe.patch.PatchSet.getPatchedFiles(patchfile, striplevel, os.path.join(d.getVar('S', True), parm.get('patchdir', ''))) + patchedfiles[patchfile] = oe.patch.PatchSet.getPatchedFiles(patchfile, striplevel, os.path.join(d.getVar('S'), parm.get('patchdir', ''))) return patchedfiles @@ -459,9 +459,9 @@ def get_bbfile_path(d, destdir, extrapathhint=None): confdata.setVar('LAYERDIR', destlayerdir) destlayerconf = os.path.join(destlayerdir, "conf", "layer.conf") confdata = bb.cookerdata.parse_config_file(destlayerconf, confdata) - pn = d.getVar('PN', True) + pn = d.getVar('PN') - bbfilespecs = (confdata.getVar('BBFILES', True) or '').split() + bbfilespecs = (confdata.getVar('BBFILES') or '').split() if destdir == destlayerdir: for bbfilespec in bbfilespecs: if not bbfilespec.endswith('.bbappend'): @@ -474,8 +474,8 @@ def get_bbfile_path(d, destdir, extrapathhint=None): # Try to make up a path that matches BBFILES # this is a little crude, but better than nothing - bpn = d.getVar('BPN', True) - recipefn = os.path.basename(d.getVar('FILE', True)) + bpn = d.getVar('BPN') + recipefn = os.path.basename(d.getVar('FILE')) pathoptions = [destdir] if extrapathhint: pathoptions.append(os.path.join(destdir, extrapathhint)) @@ -499,7 +499,7 @@ def get_bbappend_path(d, destlayerdir, wildcardver=False): import bb.cookerdata destlayerdir = os.path.abspath(destlayerdir) - recipefile = d.getVar('FILE', True) + recipefile = d.getVar('FILE') recipefn = os.path.splitext(os.path.basename(recipefile))[0] if wildcardver and '_' in recipefn: recipefn = recipefn.split('_', 1)[0] + '_%' @@ -519,7 +519,7 @@ def get_bbappend_path(d, destlayerdir, wildcardver=False): appendpath = os.path.join(destlayerdir, os.path.relpath(os.path.dirname(recipefile), origlayerdir), appendfn) closepath = '' pathok = True - for bbfilespec in confdata.getVar('BBFILES', True).split(): + for bbfilespec in confdata.getVar('BBFILES').split(): if fnmatch.fnmatchcase(appendpath, bbfilespec): # Our append path works, we're done break @@ -592,7 +592,7 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False, # FIXME check if the bbappend doesn't get overridden by a higher priority layer? - layerdirs = [os.path.abspath(layerdir) for layerdir in rd.getVar('BBLAYERS', True).split()] + layerdirs = [os.path.abspath(layerdir) for layerdir in rd.getVar('BBLAYERS').split()] if not os.path.abspath(destlayerdir) in layerdirs: bb.warn('Specified layer is not currently enabled in bblayers.conf, you will need to add it before this bbappend will be active') @@ -628,7 +628,7 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False, else: bbappendlines.append((varname, op, value)) - destsubdir = rd.getVar('PN', True) + destsubdir = rd.getVar('PN') if srcfiles: bbappendlines.append(('FILESEXTRAPATHS_prepend', ':=', '${THISDIR}/${PN}:')) @@ -647,7 +647,7 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False, srcurientry = 'file://%s' % srcfile # Double-check it's not there already # FIXME do we care if the entry is added by another bbappend that might go away? - if not srcurientry in rd.getVar('SRC_URI', True).split(): + if not srcurientry in rd.getVar('SRC_URI').split(): if machine: appendline('SRC_URI_append%s' % appendoverride, '=', ' ' + srcurientry) else: @@ -796,7 +796,7 @@ def replace_dir_vars(path, d): # Sort by length so we get the variables we're interested in first for var in sorted(list(d.keys()), key=len): if var.endswith('dir') and var.lower() == var: - value = d.getVar(var, True) + value = d.getVar(var) if value.startswith('/') and not '\n' in value and value not in dirvars: dirvars[value] = var for dirpath in sorted(list(dirvars.keys()), reverse=True): @@ -850,12 +850,12 @@ def get_recipe_upstream_version(rd): ru['type'] = 'U' ru['datetime'] = '' - pv = rd.getVar('PV', True) + pv = rd.getVar('PV') # XXX: If don't have SRC_URI means that don't have upstream sources so # returns the current recipe version, so that upstream version check # declares a match. - src_uris = rd.getVar('SRC_URI', True) + src_uris = rd.getVar('SRC_URI') if not src_uris: ru['version'] = pv ru['type'] = 'M' @@ -866,13 +866,13 @@ def get_recipe_upstream_version(rd): src_uri = src_uris.split()[0] uri_type, _, _, _, _, _ = decodeurl(src_uri) - manual_upstream_version = rd.getVar("RECIPE_UPSTREAM_VERSION", True) + manual_upstream_version = rd.getVar("RECIPE_UPSTREAM_VERSION") if manual_upstream_version: # manual tracking of upstream version. ru['version'] = manual_upstream_version ru['type'] = 'M' - manual_upstream_date = rd.getVar("CHECK_DATE", True) + manual_upstream_date = rd.getVar("CHECK_DATE") if manual_upstream_date: date = datetime.strptime(manual_upstream_date, "%b %d, %Y") else: diff --git a/meta/lib/oe/rootfs.py b/meta/lib/oe/rootfs.py index 74fc3bd256..d9a473006a 100644 --- a/meta/lib/oe/rootfs.py +++ b/meta/lib/oe/rootfs.py @@ -18,8 +18,8 @@ class Rootfs(object, metaclass=ABCMeta): def __init__(self, d, progress_reporter=None, logcatcher=None): self.d = d self.pm = None - self.image_rootfs = self.d.getVar('IMAGE_ROOTFS', True) - self.deploydir = self.d.getVar('IMGDEPLOYDIR', True) + self.image_rootfs = self.d.getVar('IMAGE_ROOTFS') + self.deploydir = self.d.getVar('IMGDEPLOYDIR') self.progress_reporter = progress_reporter self.logcatcher = logcatcher @@ -72,7 +72,7 @@ class Rootfs(object, metaclass=ABCMeta): else: msg = '%d %s messages' % (len(messages), type) msg = '[log_check] %s: found %s in the logfile:\n%s' % \ - (self.d.getVar('PN', True), msg, ''.join(messages)) + (self.d.getVar('PN'), msg, ''.join(messages)) if type == 'error': bb.fatal(msg) else: @@ -103,7 +103,7 @@ class Rootfs(object, metaclass=ABCMeta): pass def _setup_dbg_rootfs(self, dirs): - gen_debugfs = self.d.getVar('IMAGE_GEN_DEBUGFS', True) or '0' + gen_debugfs = self.d.getVar('IMAGE_GEN_DEBUGFS') or '0' if gen_debugfs != '1': return @@ -156,7 +156,7 @@ class Rootfs(object, metaclass=ABCMeta): os.rename(self.image_rootfs + '-orig', self.image_rootfs) def _exec_shell_cmd(self, cmd): - fakerootcmd = self.d.getVar('FAKEROOT', True) + fakerootcmd = self.d.getVar('FAKEROOT') if fakerootcmd is not None: exec_cmd = [fakerootcmd, cmd] else: @@ -171,14 +171,14 @@ class Rootfs(object, metaclass=ABCMeta): def create(self): bb.note("###### Generate rootfs #######") - pre_process_cmds = self.d.getVar("ROOTFS_PREPROCESS_COMMAND", True) - post_process_cmds = self.d.getVar("ROOTFS_POSTPROCESS_COMMAND", True) - rootfs_post_install_cmds = self.d.getVar('ROOTFS_POSTINSTALL_COMMAND', True) + pre_process_cmds = self.d.getVar("ROOTFS_PREPROCESS_COMMAND") + post_process_cmds = self.d.getVar("ROOTFS_POSTPROCESS_COMMAND") + rootfs_post_install_cmds = self.d.getVar('ROOTFS_POSTINSTALL_COMMAND') - postinst_intercepts_dir = self.d.getVar("POSTINST_INTERCEPTS_DIR", True) + postinst_intercepts_dir = self.d.getVar("POSTINST_INTERCEPTS_DIR") if not postinst_intercepts_dir: postinst_intercepts_dir = self.d.expand("${COREBASE}/scripts/postinst-intercepts") - intercepts_dir = os.path.join(self.d.getVar('WORKDIR', True), + intercepts_dir = os.path.join(self.d.getVar('WORKDIR'), "intercept_scripts") bb.utils.remove(intercepts_dir, True) @@ -201,10 +201,10 @@ class Rootfs(object, metaclass=ABCMeta): # call the package manager dependent create method self._create() - sysconfdir = self.image_rootfs + self.d.getVar('sysconfdir', True) + sysconfdir = self.image_rootfs + self.d.getVar('sysconfdir') bb.utils.mkdirhier(sysconfdir) with open(sysconfdir + "/version", "w+") as ver: - ver.write(self.d.getVar('BUILDNAME', True) + "\n") + ver.write(self.d.getVar('BUILDNAME') + "\n") execute_pre_post_process(self.d, rootfs_post_install_cmds) @@ -223,7 +223,7 @@ class Rootfs(object, metaclass=ABCMeta): "offline and rootfs is read-only: %s" % delayed_postinsts) - if self.d.getVar('USE_DEVFS', True) != "1": + if self.d.getVar('USE_DEVFS') != "1": self._create_devfs() self._uninstall_unneeded() @@ -235,7 +235,7 @@ class Rootfs(object, metaclass=ABCMeta): self._run_ldconfig() - if self.d.getVar('USE_DEPMOD', True) != "0": + if self.d.getVar('USE_DEPMOD') != "0": self._generate_kernel_module_deps() self._cleanup() @@ -251,16 +251,16 @@ class Rootfs(object, metaclass=ABCMeta): if delayed_postinsts is None: if os.path.exists(self.d.expand("${IMAGE_ROOTFS}${sysconfdir}/init.d/run-postinsts")): self._exec_shell_cmd(["update-rc.d", "-f", "-r", - self.d.getVar('IMAGE_ROOTFS', True), + self.d.getVar('IMAGE_ROOTFS'), "run-postinsts", "remove"]) image_rorfs = bb.utils.contains("IMAGE_FEATURES", "read-only-rootfs", True, False, self.d) - image_rorfs_force = self.d.getVar('FORCE_RO_REMOVE', True) + image_rorfs_force = self.d.getVar('FORCE_RO_REMOVE') if image_rorfs or image_rorfs_force == "1": # Remove components that we don't need if it's a read-only rootfs - unneeded_pkgs = self.d.getVar("ROOTFS_RO_UNNEEDED", True).split() + unneeded_pkgs = self.d.getVar("ROOTFS_RO_UNNEEDED").split() pkgs_installed = image_list_installed_packages(self.d) pkgs_to_remove = [pkg for pkg in pkgs_installed if pkg in unneeded_pkgs] @@ -273,7 +273,7 @@ class Rootfs(object, metaclass=ABCMeta): bb.warn("There are post install scripts " "in a read-only rootfs") - post_uninstall_cmds = self.d.getVar("ROOTFS_POSTUNINSTALL_COMMAND", True) + post_uninstall_cmds = self.d.getVar("ROOTFS_POSTUNINSTALL_COMMAND") execute_pre_post_process(self.d, post_uninstall_cmds) runtime_pkgmanage = bb.utils.contains("IMAGE_FEATURES", "package-management", @@ -283,12 +283,12 @@ class Rootfs(object, metaclass=ABCMeta): self.pm.remove_packaging_data() def _run_intercepts(self): - intercepts_dir = os.path.join(self.d.getVar('WORKDIR', True), + intercepts_dir = os.path.join(self.d.getVar('WORKDIR'), "intercept_scripts") bb.note("Running intercept scripts:") os.environ['D'] = self.image_rootfs - os.environ['STAGING_DIR_NATIVE'] = self.d.getVar('STAGING_DIR_NATIVE', True) + os.environ['STAGING_DIR_NATIVE'] = self.d.getVar('STAGING_DIR_NATIVE') for script in os.listdir(intercepts_dir): script_full = os.path.join(intercepts_dir, script) @@ -320,7 +320,7 @@ class Rootfs(object, metaclass=ABCMeta): self._handle_intercept_failure(registered_pkgs) def _run_ldconfig(self): - if self.d.getVar('LDCONFIGDEPEND', True): + if self.d.getVar('LDCONFIGDEPEND'): bb.note("Executing: ldconfig -r" + self.image_rootfs + "-c new -v") self._exec_shell_cmd(['ldconfig', '-r', self.image_rootfs, '-c', 'new', '-v']) @@ -340,7 +340,7 @@ class Rootfs(object, metaclass=ABCMeta): bb.note("No Kernel Modules found, not running depmod") return - kernel_abi_ver_file = oe.path.join(self.d.getVar('PKGDATA_DIR', True), "kernel-depmod", + kernel_abi_ver_file = oe.path.join(self.d.getVar('PKGDATA_DIR'), "kernel-depmod", 'kernel-abiversion') if not os.path.exists(kernel_abi_ver_file): bb.fatal("No kernel-abiversion file found (%s), cannot run depmod, aborting" % kernel_abi_ver_file) @@ -362,15 +362,15 @@ class Rootfs(object, metaclass=ABCMeta): """ def _create_devfs(self): devtable_list = [] - devtable = self.d.getVar('IMAGE_DEVICE_TABLE', True) + devtable = self.d.getVar('IMAGE_DEVICE_TABLE') if devtable is not None: devtable_list.append(devtable) else: - devtables = self.d.getVar('IMAGE_DEVICE_TABLES', True) + devtables = self.d.getVar('IMAGE_DEVICE_TABLES') if devtables is None: devtables = 'files/device_table-minimal.txt' for devtable in devtables.split(): - devtable_list.append("%s" % bb.utils.which(self.d.getVar('BBPATH', True), devtable)) + devtable_list.append("%s" % bb.utils.which(self.d.getVar('BBPATH'), devtable)) for devtable in devtable_list: self._exec_shell_cmd(["makedevs", "-r", @@ -386,16 +386,16 @@ class RpmRootfs(Rootfs): self.manifest = RpmManifest(d, manifest_dir) self.pm = RpmPM(d, - d.getVar('IMAGE_ROOTFS', True), - self.d.getVar('TARGET_VENDOR', True) + d.getVar('IMAGE_ROOTFS'), + self.d.getVar('TARGET_VENDOR') ) - self.inc_rpm_image_gen = self.d.getVar('INC_RPM_IMAGE_GEN', True) + self.inc_rpm_image_gen = self.d.getVar('INC_RPM_IMAGE_GEN') if self.inc_rpm_image_gen != "1": bb.utils.remove(self.image_rootfs, True) else: self.pm.recovery_packaging_data() - bb.utils.remove(self.d.getVar('MULTILIB_TEMP_ROOTFS', True), True) + bb.utils.remove(self.d.getVar('MULTILIB_TEMP_ROOTFS'), True) self.pm.create_configs() @@ -429,8 +429,8 @@ class RpmRootfs(Rootfs): def _create(self): pkgs_to_install = self.manifest.parse_initial_manifest() - rpm_pre_process_cmds = self.d.getVar('RPM_PREPROCESS_COMMANDS', True) - rpm_post_process_cmds = self.d.getVar('RPM_POSTPROCESS_COMMANDS', True) + rpm_pre_process_cmds = self.d.getVar('RPM_PREPROCESS_COMMANDS') + rpm_post_process_cmds = self.d.getVar('RPM_POSTPROCESS_COMMANDS') # update PM index files self.pm.write_index() @@ -601,7 +601,7 @@ class DpkgOpkgRootfs(Rootfs): pkg_list = [] pkgs = None - if not self.d.getVar('PACKAGE_INSTALL', True).strip(): + if not self.d.getVar('PACKAGE_INSTALL').strip(): bb.note("Building empty image") else: pkgs = self._get_pkgs_postinsts(status_file) @@ -637,17 +637,17 @@ class DpkgRootfs(DpkgOpkgRootfs): ] bb.utils.remove(self.image_rootfs, True) - bb.utils.remove(self.d.getVar('MULTILIB_TEMP_ROOTFS', True), True) + bb.utils.remove(self.d.getVar('MULTILIB_TEMP_ROOTFS'), True) self.manifest = DpkgManifest(d, manifest_dir) - self.pm = DpkgPM(d, d.getVar('IMAGE_ROOTFS', True), - d.getVar('PACKAGE_ARCHS', True), - d.getVar('DPKG_ARCH', True)) + self.pm = DpkgPM(d, d.getVar('IMAGE_ROOTFS'), + d.getVar('PACKAGE_ARCHS'), + d.getVar('DPKG_ARCH')) def _create(self): pkgs_to_install = self.manifest.parse_initial_manifest() - deb_pre_process_cmds = self.d.getVar('DEB_PREPROCESS_COMMANDS', True) - deb_post_process_cmds = self.d.getVar('DEB_POSTPROCESS_COMMANDS', True) + deb_pre_process_cmds = self.d.getVar('DEB_PREPROCESS_COMMANDS') + deb_post_process_cmds = self.d.getVar('DEB_POSTPROCESS_COMMANDS') alt_dir = self.d.expand("${IMAGE_ROOTFS}/var/lib/dpkg/alternatives") bb.utils.mkdirhier(alt_dir) @@ -725,10 +725,10 @@ class OpkgRootfs(DpkgOpkgRootfs): self.log_check_regex = '(exit 1|Collected errors)' self.manifest = OpkgManifest(d, manifest_dir) - self.opkg_conf = self.d.getVar("IPKGCONF_TARGET", True) - self.pkg_archs = self.d.getVar("ALL_MULTILIB_PACKAGE_ARCHS", True) + self.opkg_conf = self.d.getVar("IPKGCONF_TARGET") + self.pkg_archs = self.d.getVar("ALL_MULTILIB_PACKAGE_ARCHS") - self.inc_opkg_image_gen = self.d.getVar('INC_IPK_IMAGE_GEN', True) or "" + self.inc_opkg_image_gen = self.d.getVar('INC_IPK_IMAGE_GEN') or "" if self._remove_old_rootfs(): bb.utils.remove(self.image_rootfs, True) self.pm = OpkgPM(d, @@ -742,7 +742,7 @@ class OpkgRootfs(DpkgOpkgRootfs): self.pkg_archs) self.pm.recover_packaging_data() - bb.utils.remove(self.d.getVar('MULTILIB_TEMP_ROOTFS', True), True) + bb.utils.remove(self.d.getVar('MULTILIB_TEMP_ROOTFS'), True) def _prelink_file(self, root_dir, filename): bb.note('prelink %s in %s' % (filename, root_dir)) @@ -797,7 +797,7 @@ class OpkgRootfs(DpkgOpkgRootfs): """ def _multilib_sanity_test(self, dirs): - allow_replace = self.d.getVar("MULTILIBRE_ALLOW_REP", True) + allow_replace = self.d.getVar("MULTILIBRE_ALLOW_REP") if allow_replace is None: allow_replace = "" @@ -829,12 +829,12 @@ class OpkgRootfs(DpkgOpkgRootfs): files[key] = item def _multilib_test_install(self, pkgs): - ml_temp = self.d.getVar("MULTILIB_TEMP_ROOTFS", True) + ml_temp = self.d.getVar("MULTILIB_TEMP_ROOTFS") bb.utils.mkdirhier(ml_temp) dirs = [self.image_rootfs] - for variant in self.d.getVar("MULTILIB_VARIANTS", True).split(): + for variant in self.d.getVar("MULTILIB_VARIANTS").split(): ml_target_rootfs = os.path.join(ml_temp, variant) bb.utils.remove(ml_target_rootfs, True) @@ -894,9 +894,9 @@ class OpkgRootfs(DpkgOpkgRootfs): old_vars_list = open(vars_list_file, 'r+').read() new_vars_list = '%s:%s:%s\n' % \ - ((self.d.getVar('BAD_RECOMMENDATIONS', True) or '').strip(), - (self.d.getVar('NO_RECOMMENDATIONS', True) or '').strip(), - (self.d.getVar('PACKAGE_EXCLUDE', True) or '').strip()) + ((self.d.getVar('BAD_RECOMMENDATIONS') or '').strip(), + (self.d.getVar('NO_RECOMMENDATIONS') or '').strip(), + (self.d.getVar('PACKAGE_EXCLUDE') or '').strip()) open(vars_list_file, 'w+').write(new_vars_list) if old_vars_list != new_vars_list: @@ -906,11 +906,11 @@ class OpkgRootfs(DpkgOpkgRootfs): def _create(self): pkgs_to_install = self.manifest.parse_initial_manifest() - opkg_pre_process_cmds = self.d.getVar('OPKG_PREPROCESS_COMMANDS', True) - opkg_post_process_cmds = self.d.getVar('OPKG_POSTPROCESS_COMMANDS', True) + opkg_pre_process_cmds = self.d.getVar('OPKG_PREPROCESS_COMMANDS') + opkg_post_process_cmds = self.d.getVar('OPKG_POSTPROCESS_COMMANDS') # update PM index files, unless users provide their own feeds - if (self.d.getVar('BUILD_IMAGES_FROM_FEEDS', True) or "") != "1": + if (self.d.getVar('BUILD_IMAGES_FROM_FEEDS') or "") != "1": self.pm.write_index() execute_pre_post_process(self.d, opkg_pre_process_cmds) @@ -968,7 +968,7 @@ class OpkgRootfs(DpkgOpkgRootfs): def _get_delayed_postinsts(self): status_file = os.path.join(self.image_rootfs, - self.d.getVar('OPKGLIBDIR', True).strip('/'), + self.d.getVar('OPKGLIBDIR').strip('/'), "opkg", "status") return self._get_delayed_postinsts_common(status_file) @@ -993,14 +993,14 @@ def get_class_for_type(imgtype): "deb": DpkgRootfs}[imgtype] def variable_depends(d, manifest_dir=None): - img_type = d.getVar('IMAGE_PKGTYPE', True) + img_type = d.getVar('IMAGE_PKGTYPE') cls = get_class_for_type(img_type) return cls._depends_list() def create_rootfs(d, manifest_dir=None, progress_reporter=None, logcatcher=None): env_bkp = os.environ.copy() - img_type = d.getVar('IMAGE_PKGTYPE', True) + img_type = d.getVar('IMAGE_PKGTYPE') if img_type == "rpm": RpmRootfs(d, manifest_dir, progress_reporter, logcatcher).create() elif img_type == "ipk": @@ -1014,13 +1014,13 @@ def create_rootfs(d, manifest_dir=None, progress_reporter=None, logcatcher=None) def image_list_installed_packages(d, rootfs_dir=None): if not rootfs_dir: - rootfs_dir = d.getVar('IMAGE_ROOTFS', True) + rootfs_dir = d.getVar('IMAGE_ROOTFS') - img_type = d.getVar('IMAGE_PKGTYPE', True) + img_type = d.getVar('IMAGE_PKGTYPE') if img_type == "rpm": return RpmPkgsList(d, rootfs_dir).list_pkgs() elif img_type == "ipk": - return OpkgPkgsList(d, rootfs_dir, d.getVar("IPKGCONF_TARGET", True)).list_pkgs() + return OpkgPkgsList(d, rootfs_dir, d.getVar("IPKGCONF_TARGET")).list_pkgs() elif img_type == "deb": return DpkgPkgsList(d, rootfs_dir).list_pkgs() diff --git a/meta/lib/oe/sdk.py b/meta/lib/oe/sdk.py index c74525f929..fef02d0777 100644 --- a/meta/lib/oe/sdk.py +++ b/meta/lib/oe/sdk.py @@ -11,16 +11,16 @@ import traceback class Sdk(object, metaclass=ABCMeta): def __init__(self, d, manifest_dir): self.d = d - self.sdk_output = self.d.getVar('SDK_OUTPUT', True) - self.sdk_native_path = self.d.getVar('SDKPATHNATIVE', True).strip('/') - self.target_path = self.d.getVar('SDKTARGETSYSROOT', True).strip('/') - self.sysconfdir = self.d.getVar('sysconfdir', True).strip('/') + self.sdk_output = self.d.getVar('SDK_OUTPUT') + self.sdk_native_path = self.d.getVar('SDKPATHNATIVE').strip('/') + self.target_path = self.d.getVar('SDKTARGETSYSROOT').strip('/') + self.sysconfdir = self.d.getVar('sysconfdir').strip('/') self.sdk_target_sysroot = os.path.join(self.sdk_output, self.target_path) self.sdk_host_sysroot = self.sdk_output if manifest_dir is None: - self.manifest_dir = self.d.getVar("SDK_DIR", True) + self.manifest_dir = self.d.getVar("SDK_DIR") else: self.manifest_dir = manifest_dir @@ -40,12 +40,12 @@ class Sdk(object, metaclass=ABCMeta): # Don't ship any libGL in the SDK self.remove(os.path.join(self.sdk_output, self.sdk_native_path, - self.d.getVar('libdir_nativesdk', True).strip('/'), + self.d.getVar('libdir_nativesdk').strip('/'), "libGL*")) # Fix or remove broken .la files self.remove(os.path.join(self.sdk_output, self.sdk_native_path, - self.d.getVar('libdir_nativesdk', True).strip('/'), + self.d.getVar('libdir_nativesdk').strip('/'), "*.la")) # Link the ld.so.cache file into the hosts filesystem @@ -54,7 +54,7 @@ class Sdk(object, metaclass=ABCMeta): self.mkdirhier(os.path.dirname(link_name)) os.symlink("/etc/ld.so.cache", link_name) - execute_pre_post_process(self.d, self.d.getVar('SDK_POSTPROCESS_COMMAND', True)) + execute_pre_post_process(self.d, self.d.getVar('SDK_POSTPROCESS_COMMAND')) def movefile(self, sourcefile, destdir): try: @@ -102,7 +102,7 @@ class RpmSdk(Sdk): self.target_pm = RpmPM(d, self.sdk_target_sysroot, - self.d.getVar('TARGET_VENDOR', True), + self.d.getVar('TARGET_VENDOR'), 'target', target_providename ) @@ -118,7 +118,7 @@ class RpmSdk(Sdk): self.host_pm = RpmPM(d, self.sdk_host_sysroot, - self.d.getVar('SDK_VENDOR', True), + self.d.getVar('SDK_VENDOR'), 'host', sdk_providename, "SDK_PACKAGE_ARCHS", @@ -149,9 +149,9 @@ class RpmSdk(Sdk): bb.note("Installing TARGET packages") self._populate_sysroot(self.target_pm, self.target_manifest) - self.target_pm.install_complementary(self.d.getVar('SDKIMAGE_INSTALL_COMPLEMENTARY', True)) + self.target_pm.install_complementary(self.d.getVar('SDKIMAGE_INSTALL_COMPLEMENTARY')) - execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_TARGET_COMMAND", True)) + execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_TARGET_COMMAND")) if not bb.utils.contains("SDKIMAGE_FEATURES", "package-management", True, False, self.d): self.target_pm.remove_packaging_data() @@ -159,7 +159,7 @@ class RpmSdk(Sdk): bb.note("Installing NATIVESDK packages") self._populate_sysroot(self.host_pm, self.host_manifest) - execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_HOST_COMMAND", True)) + execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_HOST_COMMAND")) if not bb.utils.contains("SDKIMAGE_FEATURES", "package-management", True, False, self.d): self.host_pm.remove_packaging_data() @@ -167,7 +167,7 @@ class RpmSdk(Sdk): # Move host RPM library data native_rpm_state_dir = os.path.join(self.sdk_output, self.sdk_native_path, - self.d.getVar('localstatedir_nativesdk', True).strip('/'), + self.d.getVar('localstatedir_nativesdk').strip('/'), "lib", "rpm" ) @@ -197,8 +197,8 @@ class OpkgSdk(Sdk): def __init__(self, d, manifest_dir=None): super(OpkgSdk, self).__init__(d, manifest_dir) - self.target_conf = self.d.getVar("IPKGCONF_TARGET", True) - self.host_conf = self.d.getVar("IPKGCONF_SDK", True) + self.target_conf = self.d.getVar("IPKGCONF_TARGET") + self.host_conf = self.d.getVar("IPKGCONF_SDK") self.target_manifest = OpkgManifest(d, self.manifest_dir, Manifest.MANIFEST_TYPE_SDK_TARGET) @@ -206,15 +206,15 @@ class OpkgSdk(Sdk): Manifest.MANIFEST_TYPE_SDK_HOST) self.target_pm = OpkgPM(d, self.sdk_target_sysroot, self.target_conf, - self.d.getVar("ALL_MULTILIB_PACKAGE_ARCHS", True)) + self.d.getVar("ALL_MULTILIB_PACKAGE_ARCHS")) self.host_pm = OpkgPM(d, self.sdk_host_sysroot, self.host_conf, - self.d.getVar("SDK_PACKAGE_ARCHS", True)) + self.d.getVar("SDK_PACKAGE_ARCHS")) def _populate_sysroot(self, pm, manifest): pkgs_to_install = manifest.parse_initial_manifest() - if (self.d.getVar('BUILD_IMAGES_FROM_FEEDS', True) or "") != "1": + if (self.d.getVar('BUILD_IMAGES_FROM_FEEDS') or "") != "1": pm.write_index() pm.update() @@ -228,9 +228,9 @@ class OpkgSdk(Sdk): bb.note("Installing TARGET packages") self._populate_sysroot(self.target_pm, self.target_manifest) - self.target_pm.install_complementary(self.d.getVar('SDKIMAGE_INSTALL_COMPLEMENTARY', True)) + self.target_pm.install_complementary(self.d.getVar('SDKIMAGE_INSTALL_COMPLEMENTARY')) - execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_TARGET_COMMAND", True)) + execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_TARGET_COMMAND")) if not bb.utils.contains("SDKIMAGE_FEATURES", "package-management", True, False, self.d): self.target_pm.remove_packaging_data() @@ -238,7 +238,7 @@ class OpkgSdk(Sdk): bb.note("Installing NATIVESDK packages") self._populate_sysroot(self.host_pm, self.host_manifest) - execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_HOST_COMMAND", True)) + execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_HOST_COMMAND")) if not bb.utils.contains("SDKIMAGE_FEATURES", "package-management", True, False, self.d): self.host_pm.remove_packaging_data() @@ -257,7 +257,7 @@ class OpkgSdk(Sdk): os.path.basename(self.host_conf)), 0o644) native_opkg_state_dir = os.path.join(self.sdk_output, self.sdk_native_path, - self.d.getVar('localstatedir_nativesdk', True).strip('/'), + self.d.getVar('localstatedir_nativesdk').strip('/'), "lib", "opkg") self.mkdirhier(native_opkg_state_dir) for f in glob.glob(os.path.join(self.sdk_output, "var", "lib", "opkg", "*")): @@ -270,8 +270,8 @@ class DpkgSdk(Sdk): def __init__(self, d, manifest_dir=None): super(DpkgSdk, self).__init__(d, manifest_dir) - self.target_conf_dir = os.path.join(self.d.getVar("APTCONF_TARGET", True), "apt") - self.host_conf_dir = os.path.join(self.d.getVar("APTCONF_TARGET", True), "apt-sdk") + self.target_conf_dir = os.path.join(self.d.getVar("APTCONF_TARGET"), "apt") + self.host_conf_dir = os.path.join(self.d.getVar("APTCONF_TARGET"), "apt-sdk") self.target_manifest = DpkgManifest(d, self.manifest_dir, Manifest.MANIFEST_TYPE_SDK_TARGET) @@ -279,17 +279,17 @@ class DpkgSdk(Sdk): Manifest.MANIFEST_TYPE_SDK_HOST) self.target_pm = DpkgPM(d, self.sdk_target_sysroot, - self.d.getVar("PACKAGE_ARCHS", True), - self.d.getVar("DPKG_ARCH", True), + self.d.getVar("PACKAGE_ARCHS"), + self.d.getVar("DPKG_ARCH"), self.target_conf_dir) self.host_pm = DpkgPM(d, self.sdk_host_sysroot, - self.d.getVar("SDK_PACKAGE_ARCHS", True), - self.d.getVar("DEB_SDK_ARCH", True), + self.d.getVar("SDK_PACKAGE_ARCHS"), + self.d.getVar("DEB_SDK_ARCH"), self.host_conf_dir) def _copy_apt_dir_to(self, dst_dir): - staging_etcdir_native = self.d.getVar("STAGING_ETCDIR_NATIVE", True) + staging_etcdir_native = self.d.getVar("STAGING_ETCDIR_NATIVE") self.remove(dst_dir, True) @@ -310,9 +310,9 @@ class DpkgSdk(Sdk): bb.note("Installing TARGET packages") self._populate_sysroot(self.target_pm, self.target_manifest) - self.target_pm.install_complementary(self.d.getVar('SDKIMAGE_INSTALL_COMPLEMENTARY', True)) + self.target_pm.install_complementary(self.d.getVar('SDKIMAGE_INSTALL_COMPLEMENTARY')) - execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_TARGET_COMMAND", True)) + execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_TARGET_COMMAND")) self._copy_apt_dir_to(os.path.join(self.sdk_target_sysroot, "etc", "apt")) @@ -322,7 +322,7 @@ class DpkgSdk(Sdk): bb.note("Installing NATIVESDK packages") self._populate_sysroot(self.host_pm, self.host_manifest) - execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_HOST_COMMAND", True)) + execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_HOST_COMMAND")) self._copy_apt_dir_to(os.path.join(self.sdk_output, self.sdk_native_path, "etc", "apt")) @@ -341,26 +341,26 @@ class DpkgSdk(Sdk): def sdk_list_installed_packages(d, target, rootfs_dir=None): if rootfs_dir is None: - sdk_output = d.getVar('SDK_OUTPUT', True) - target_path = d.getVar('SDKTARGETSYSROOT', True).strip('/') + sdk_output = d.getVar('SDK_OUTPUT') + target_path = d.getVar('SDKTARGETSYSROOT').strip('/') rootfs_dir = [sdk_output, os.path.join(sdk_output, target_path)][target is True] - img_type = d.getVar('IMAGE_PKGTYPE', True) + img_type = d.getVar('IMAGE_PKGTYPE') if img_type == "rpm": arch_var = ["SDK_PACKAGE_ARCHS", None][target is True] os_var = ["SDK_OS", None][target is True] return RpmPkgsList(d, rootfs_dir, arch_var, os_var).list_pkgs() elif img_type == "ipk": conf_file_var = ["IPKGCONF_SDK", "IPKGCONF_TARGET"][target is True] - return OpkgPkgsList(d, rootfs_dir, d.getVar(conf_file_var, True)).list_pkgs() + return OpkgPkgsList(d, rootfs_dir, d.getVar(conf_file_var)).list_pkgs() elif img_type == "deb": return DpkgPkgsList(d, rootfs_dir).list_pkgs() def populate_sdk(d, manifest_dir=None): env_bkp = os.environ.copy() - img_type = d.getVar('IMAGE_PKGTYPE', True) + img_type = d.getVar('IMAGE_PKGTYPE') if img_type == "rpm": RpmSdk(d, manifest_dir).populate() elif img_type == "ipk": diff --git a/meta/lib/oe/sstatesig.py b/meta/lib/oe/sstatesig.py index 8224e3a12e..e053c37e96 100644 --- a/meta/lib/oe/sstatesig.py +++ b/meta/lib/oe/sstatesig.py @@ -63,10 +63,10 @@ def sstate_rundepfilter(siggen, fn, recipename, task, dep, depname, dataCache): def sstate_lockedsigs(d): sigs = {} - types = (d.getVar("SIGGEN_LOCKEDSIGS_TYPES", True) or "").split() + types = (d.getVar("SIGGEN_LOCKEDSIGS_TYPES") or "").split() for t in types: siggen_lockedsigs_var = "SIGGEN_LOCKEDSIGS_%s" % t - lockedsigs = (d.getVar(siggen_lockedsigs_var, True) or "").split() + lockedsigs = (d.getVar(siggen_lockedsigs_var) or "").split() for ls in lockedsigs: pn, task, h = ls.split(":", 2) if pn not in sigs: @@ -77,8 +77,8 @@ def sstate_lockedsigs(d): class SignatureGeneratorOEBasic(bb.siggen.SignatureGeneratorBasic): name = "OEBasic" def init_rundepcheck(self, data): - self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE", True) or "").split() - self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS", True) or "").split() + self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split() + self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS") or "").split() pass def rundep_check(self, fn, recipename, task, dep, depname, dataCache = None): return sstate_rundepfilter(self, fn, recipename, task, dep, depname, dataCache) @@ -86,15 +86,15 @@ class SignatureGeneratorOEBasic(bb.siggen.SignatureGeneratorBasic): class SignatureGeneratorOEBasicHash(bb.siggen.SignatureGeneratorBasicHash): name = "OEBasicHash" def init_rundepcheck(self, data): - self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE", True) or "").split() - self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS", True) or "").split() + self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split() + self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS") or "").split() self.lockedsigs = sstate_lockedsigs(data) self.lockedhashes = {} self.lockedpnmap = {} self.lockedhashfn = {} - self.machine = data.getVar("MACHINE", True) + self.machine = data.getVar("MACHINE") self.mismatch_msgs = [] - self.unlockedrecipes = (data.getVar("SIGGEN_UNLOCKED_RECIPES", True) or + self.unlockedrecipes = (data.getVar("SIGGEN_UNLOCKED_RECIPES") or "").split() self.unlockedrecipes = { k: "" for k in self.unlockedrecipes } pass @@ -224,13 +224,13 @@ class SignatureGeneratorOEBasicHash(bb.siggen.SignatureGeneratorBasicHash): sstate_missing_msgs.append("Locked sig is set for %s:%s (%s) yet not in sstate cache?" % (pn, sq_task[task], sq_hash[task])) - checklevel = d.getVar("SIGGEN_LOCKEDSIGS_TASKSIG_CHECK", True) + checklevel = d.getVar("SIGGEN_LOCKEDSIGS_TASKSIG_CHECK") if checklevel == 'warn': warn_msgs += self.mismatch_msgs elif checklevel == 'error': error_msgs += self.mismatch_msgs - checklevel = d.getVar("SIGGEN_LOCKEDSIGS_SSTATE_EXISTS_CHECK", True) + checklevel = d.getVar("SIGGEN_LOCKEDSIGS_SSTATE_EXISTS_CHECK") if checklevel == 'warn': warn_msgs += sstate_missing_msgs elif checklevel == 'error': @@ -274,7 +274,7 @@ def find_siginfo(pn, taskname, taskhashlist, d): localdata.setVar('PV', '*') localdata.setVar('PR', '*') localdata.setVar('EXTENDPE', '') - stamp = localdata.getVar('STAMP', True) + stamp = localdata.getVar('STAMP') if pn.startswith("gcc-source"): # gcc-source shared workdir is a special case :( stamp = localdata.expand("${STAMPS_DIR}/work-shared/gcc-${PV}-${PR}") @@ -309,18 +309,18 @@ def find_siginfo(pn, taskname, taskhashlist, d): localdata.setVar('PV', '*') localdata.setVar('PR', '*') localdata.setVar('BB_TASKHASH', hashval) - swspec = localdata.getVar('SSTATE_SWSPEC', True) + swspec = localdata.getVar('SSTATE_SWSPEC') if taskname in ['do_fetch', 'do_unpack', 'do_patch', 'do_populate_lic', 'do_preconfigure'] and swspec: localdata.setVar('SSTATE_PKGSPEC', '${SSTATE_SWSPEC}') elif pn.endswith('-native') or "-cross-" in pn or "-crosssdk-" in pn: localdata.setVar('SSTATE_EXTRAPATH', "${NATIVELSBSTRING}/") sstatename = taskname[3:] - filespec = '%s_%s.*.siginfo' % (localdata.getVar('SSTATE_PKG', True), sstatename) + filespec = '%s_%s.*.siginfo' % (localdata.getVar('SSTATE_PKG'), sstatename) if hashval != '*': - sstatedir = "%s/%s" % (d.getVar('SSTATE_DIR', True), hashval[:2]) + sstatedir = "%s/%s" % (d.getVar('SSTATE_DIR'), hashval[:2]) else: - sstatedir = d.getVar('SSTATE_DIR', True) + sstatedir = d.getVar('SSTATE_DIR') for root, dirs, files in os.walk(sstatedir): for fn in files: diff --git a/meta/lib/oe/terminal.py b/meta/lib/oe/terminal.py index a89fa45691..0426e15834 100644 --- a/meta/lib/oe/terminal.py +++ b/meta/lib/oe/terminal.py @@ -196,7 +196,7 @@ class Custom(Terminal): priority = 3 def __init__(self, sh_cmd, title=None, env=None, d=None): - self.command = d and d.getVar('OE_TERMINAL_CUSTOMCMD', True) + self.command = d and d.getVar('OE_TERMINAL_CUSTOMCMD') if self.command: if not '{command}' in self.command: self.command += ' {command}' diff --git a/meta/lib/oe/utils.py b/meta/lib/oe/utils.py index 2b095f1f0a..bb3f0e5d75 100644 --- a/meta/lib/oe/utils.py +++ b/meta/lib/oe/utils.py @@ -23,13 +23,13 @@ def ifelse(condition, iftrue = True, iffalse = False): return iffalse def conditional(variable, checkvalue, truevalue, falsevalue, d): - if d.getVar(variable, True) == checkvalue: + if d.getVar(variable) == checkvalue: return truevalue else: return falsevalue def less_or_equal(variable, checkvalue, truevalue, falsevalue, d): - if float(d.getVar(variable, True)) <= float(checkvalue): + if float(d.getVar(variable)) <= float(checkvalue): return truevalue else: return falsevalue @@ -42,8 +42,8 @@ def version_less_or_equal(variable, checkvalue, truevalue, falsevalue, d): return falsevalue def both_contain(variable1, variable2, checkvalue, d): - val1 = d.getVar(variable1, True) - val2 = d.getVar(variable2, True) + val1 = d.getVar(variable1) + val2 = d.getVar(variable2) val1 = set(val1.split()) val2 = set(val2.split()) if isinstance(checkvalue, str): @@ -66,8 +66,8 @@ def set_intersect(variable1, variable2, d): s3 = set_intersect(s1, s2) => s3 = "b c" """ - val1 = set(d.getVar(variable1, True).split()) - val2 = set(d.getVar(variable2, True).split()) + val1 = set(d.getVar(variable1).split()) + val2 = set(d.getVar(variable2).split()) return " ".join(val1 & val2) def prune_suffix(var, suffixes, d): @@ -77,7 +77,7 @@ def prune_suffix(var, suffixes, d): if var.endswith(suffix): var = var.replace(suffix, "") - prefix = d.getVar("MLPREFIX", True) + prefix = d.getVar("MLPREFIX") if prefix and var.startswith(prefix): var = var.replace(prefix, "") @@ -115,9 +115,9 @@ def features_backfill(var,d): # disturbing distributions that have already set DISTRO_FEATURES. # Distributions wanting to elide a value in DISTRO_FEATURES_BACKFILL should # add the feature to DISTRO_FEATURES_BACKFILL_CONSIDERED - features = (d.getVar(var, True) or "").split() - backfill = (d.getVar(var+"_BACKFILL", True) or "").split() - considered = (d.getVar(var+"_BACKFILL_CONSIDERED", True) or "").split() + features = (d.getVar(var) or "").split() + backfill = (d.getVar(var+"_BACKFILL") or "").split() + considered = (d.getVar(var+"_BACKFILL_CONSIDERED") or "").split() addfeatures = [] for feature in backfill: @@ -133,12 +133,12 @@ def packages_filter_out_system(d): Return a list of packages from PACKAGES with the "system" packages such as PN-dbg PN-doc PN-locale-eb-gb removed. """ - pn = d.getVar('PN', True) + pn = d.getVar('PN') blacklist = [pn + suffix for suffix in ('', '-dbg', '-dev', '-doc', '-locale', '-staticdev')] localepkg = pn + "-locale-" pkgs = [] - for pkg in d.getVar('PACKAGES', True).split(): + for pkg in d.getVar('PACKAGES').split(): if pkg not in blacklist and localepkg not in pkg: pkgs.append(pkg) return pkgs @@ -231,7 +231,7 @@ def format_pkg_list(pkg_dict, ret_format=None): return '\n'.join(output) def host_gcc_version(d): - compiler = d.getVar("BUILD_CC", True) + compiler = d.getVar("BUILD_CC") retval, output = getstatusoutput("%s --version" % compiler) if retval: bb.fatal("Error running %s --version: %s" % (compiler, output)) @@ -316,8 +316,8 @@ def write_ld_so_conf(d): bb.utils.remove(ldsoconf) bb.utils.mkdirhier(os.path.dirname(ldsoconf)) with open(ldsoconf, "w") as f: - f.write(d.getVar("base_libdir", True) + '\n') - f.write(d.getVar("libdir", True) + '\n') + f.write(d.getVar("base_libdir") + '\n') + f.write(d.getVar("libdir") + '\n') class ImageQAFailed(bb.build.FuncFailed): def __init__(self, description, name=None, logfile=None): |