diff options
Diffstat (limited to 'bitbake/lib/bb/cache.py')
-rw-r--r-- | bitbake/lib/bb/cache.py | 664 |
1 files changed, 371 insertions, 293 deletions
diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py index c6f3794d5e..ff42a37b44 100644 --- a/bitbake/lib/bb/cache.py +++ b/bitbake/lib/bb/cache.py @@ -29,27 +29,153 @@ import os +import logging +from collections import defaultdict, namedtuple import bb.data import bb.utils +logger = logging.getLogger("BitBake.Cache") + try: import cPickle as pickle except ImportError: import pickle - bb.msg.note(1, bb.msg.domain.Cache, "Importing cPickle failed. Falling back to a very slow implementation.") - -__cache_version__ = "132" - -class Cache: + logger.info("Importing cPickle failed. " + "Falling back to a very slow implementation.") + +__cache_version__ = "136" + +recipe_fields = ( + 'pn', + 'pv', + 'pr', + 'pe', + 'defaultpref', + 'depends', + 'provides', + 'task_deps', + 'stamp', + 'stamp_extrainfo', + 'broken', + 'not_world', + 'skipped', + 'timestamp', + 'packages', + 'packages_dynamic', + 'rdepends', + 'rdepends_pkg', + 'rprovides', + 'rprovides_pkg', + 'rrecommends', + 'rrecommends_pkg', + 'nocache', + 'variants', + 'file_depends', + 'tasks', + 'basetaskhashes', + 'hashfilename', +) + + +class RecipeInfo(namedtuple('RecipeInfo', recipe_fields)): + __slots__ = () + + @classmethod + def listvar(cls, var, metadata): + return cls.getvar(var, metadata).split() + + @classmethod + def intvar(cls, var, metadata): + return int(cls.getvar(var, metadata) or 0) + + @classmethod + def depvar(cls, var, metadata): + return bb.utils.explode_deps(cls.getvar(var, metadata)) + + @classmethod + def pkgvar(cls, var, packages, metadata): + return dict((pkg, cls.depvar("%s_%s" % (var, pkg), metadata)) + for pkg in packages) + + @classmethod + def taskvar(cls, var, tasks, metadata): + return dict((task, cls.getvar("%s_task-%s" % (var, task), metadata)) + for task in tasks) + + @classmethod + def flaglist(cls, flag, varlist, metadata): + return dict((var, metadata.getVarFlag(var, flag, True)) + for var in varlist) + + @classmethod + def getvar(cls, var, metadata): + return metadata.getVar(var, True) or '' + + @classmethod + def make_optional(cls, default=None, **kwargs): + """Construct the namedtuple from the specified keyword arguments, + with every value considered optional, using the default value if + it was not specified.""" + for field in cls._fields: + kwargs[field] = kwargs.get(field, default) + return cls(**kwargs) + + @classmethod + def from_metadata(cls, filename, metadata): + if cls.getvar('__SKIPPED', metadata): + return cls.make_optional(skipped=True) + + tasks = metadata.getVar('__BBTASKS', False) + + pn = cls.getvar('PN', metadata) + packages = cls.listvar('PACKAGES', metadata) + if not pn in packages: + packages.append(pn) + + return RecipeInfo( + tasks = tasks, + basetaskhashes = cls.taskvar('BB_BASEHASH', tasks, metadata), + hashfilename = cls.getvar('BB_HASHFILENAME', metadata), + + file_depends = metadata.getVar('__depends', False), + task_deps = metadata.getVar('_task_deps', False) or + {'tasks': [], 'parents': {}}, + variants = cls.listvar('__VARIANTS', metadata) + [''], + + skipped = False, + timestamp = bb.parse.cached_mtime(filename), + packages = cls.listvar('PACKAGES', metadata), + pn = pn, + pe = cls.getvar('PE', metadata), + pv = cls.getvar('PV', metadata), + pr = cls.getvar('PR', metadata), + nocache = cls.getvar('__BB_DONT_CACHE', metadata), + defaultpref = cls.intvar('DEFAULT_PREFERENCE', metadata), + broken = cls.getvar('BROKEN', metadata), + not_world = cls.getvar('EXCLUDE_FROM_WORLD', metadata), + stamp = cls.getvar('STAMP', metadata), + stamp_extrainfo = cls.flaglist('stamp-extra-info', tasks, metadata), + packages_dynamic = cls.listvar('PACKAGES_DYNAMIC', metadata), + depends = cls.depvar('DEPENDS', metadata), + provides = cls.depvar('PROVIDES', metadata), + rdepends = cls.depvar('RDEPENDS', metadata), + rprovides = cls.depvar('RPROVIDES', metadata), + rrecommends = cls.depvar('RRECOMMENDS', metadata), + rprovides_pkg = cls.pkgvar('RPROVIDES', packages, metadata), + rdepends_pkg = cls.pkgvar('RDEPENDS', packages, metadata), + rrecommends_pkg = cls.pkgvar('RRECOMMENDS', packages, metadata), + ) + + +class Cache(object): """ BitBake Cache implementation """ - def __init__(self, data): - + def __init__(self, data): self.cachedir = bb.data.getVar("CACHE", data, True) - self.clean = {} - self.checked = {} + self.clean = set() + self.checked = set() self.depends_cache = {} self.data = None self.data_fn = None @@ -57,92 +183,74 @@ class Cache: if self.cachedir in [None, '']: self.has_cache = False - bb.msg.note(1, bb.msg.domain.Cache, "Not using a cache. Set CACHE = <directory> to enable.") + logger.info("Not using a cache. " + "Set CACHE = <directory> to enable.") return self.has_cache = True self.cachefile = os.path.join(self.cachedir, "bb_cache.dat") - bb.msg.debug(1, bb.msg.domain.Cache, "Using cache in '%s'" % self.cachedir) + logger.debug(1, "Using cache in '%s'", self.cachedir) bb.utils.mkdirhier(self.cachedir) # If any of configuration.data's dependencies are newer than the # cache there isn't even any point in loading it... newest_mtime = 0 - deps = bb.data.getVar("__depends", data) + deps = bb.data.getVar("__base_depends", data) - old_mtimes = [old_mtime for f, old_mtime in deps] + old_mtimes = [old_mtime for _, old_mtime in deps] old_mtimes.append(newest_mtime) newest_mtime = max(old_mtimes) if bb.parse.cached_mtime_noerror(self.cachefile) >= newest_mtime: - try: - p = pickle.Unpickler(file(self.cachefile, "rb")) - self.depends_cache, version_data = p.load() - if version_data['CACHE_VER'] != __cache_version__: - raise ValueError('Cache Version Mismatch') - if version_data['BITBAKE_VER'] != bb.__version__: - raise ValueError('Bitbake Version Mismatch') - except EOFError: - bb.msg.note(1, bb.msg.domain.Cache, "Truncated cache found, rebuilding...") - self.depends_cache = {} - except: - bb.msg.note(1, bb.msg.domain.Cache, "Invalid cache found, rebuilding...") - self.depends_cache = {} - else: - if os.path.isfile(self.cachefile): - bb.msg.note(1, bb.msg.domain.Cache, "Out of date cache found, rebuilding...") - - def getVar(self, var, fn, exp = 0): - """ - Gets the value of a variable - (similar to getVar in the data class) - - There are two scenarios: - 1. We have cached data - serve from depends_cache[fn] - 2. We're learning what data to cache - serve from data - backend but add a copy of the data to the cache. - """ - if fn in self.clean: - return self.depends_cache[fn][var] - - self.depends_cache.setdefault(fn, {}) - - if fn != self.data_fn: - # We're trying to access data in the cache which doesn't exist - # yet setData hasn't been called to setup the right access. Very bad. - bb.msg.error(bb.msg.domain.Cache, "Parsing error data_fn %s and fn %s don't match" % (self.data_fn, fn)) - - self.cacheclean = False - result = bb.data.getVar(var, self.data, exp) - self.depends_cache[fn][var] = result - return result - - def setData(self, virtualfn, fn, data): - """ - Called to prime bb_cache ready to learn which variables to cache. - Will be followed by calls to self.getVar which aren't cached - but can be fulfilled from self.data. - """ - self.data_fn = virtualfn - self.data = data - - # Make sure __depends makes the depends_cache - # If we're a virtual class we need to make sure all our depends are appended - # to the depends of fn. - depends = self.getVar("__depends", virtualfn) or set() - self.depends_cache.setdefault(fn, {}) - if "__depends" not in self.depends_cache[fn] or not self.depends_cache[fn]["__depends"]: - self.depends_cache[fn]["__depends"] = depends - else: - self.depends_cache[fn]["__depends"].update(depends) - - # Make sure the variants always make it into the cache too - self.getVar('__VARIANTS', virtualfn, True) + self.load_cachefile() + elif os.path.isfile(self.cachefile): + logger.info("Out of date cache found, rebuilding...") - self.depends_cache[virtualfn]["CACHETIMESTAMP"] = bb.parse.cached_mtime(fn) - - def virtualfn2realfn(self, virtualfn): + def load_cachefile(self): + with open(self.cachefile, "rb") as cachefile: + pickled = pickle.Unpickler(cachefile) + try: + cache_ver = pickled.load() + bitbake_ver = pickled.load() + except Exception: + logger.info('Invalid cache, rebuilding...') + return + + if cache_ver != __cache_version__: + logger.info('Cache version mismatch, rebuilding...') + return + elif bitbake_ver != bb.__version__: + logger.info('Bitbake version mismatch, rebuilding...') + return + + cachesize = os.fstat(cachefile.fileno()).st_size + bb.event.fire(bb.event.CacheLoadStarted(cachesize), self.data) + + previous_percent = 0 + while cachefile: + try: + key = pickled.load() + value = pickled.load() + except Exception: + break + + self.depends_cache[key] = value + + # only fire events on even percentage boundaries + current_progress = cachefile.tell() + current_percent = 100 * current_progress / cachesize + if current_percent > previous_percent: + previous_percent = current_percent + bb.event.fire(bb.event.CacheLoadProgress(current_progress), + self.data) + + bb.event.fire(bb.event.CacheLoadCompleted(cachesize, + len(self.depends_cache)), + self.data) + + @staticmethod + def virtualfn2realfn(virtualfn): """ Convert a virtual file name to a real one + the associated subclass keyword """ @@ -152,79 +260,94 @@ class Cache: if virtualfn.startswith('virtual:'): cls = virtualfn.split(':', 2)[1] fn = virtualfn.replace('virtual:' + cls + ':', '') - #bb.msg.debug(2, bb.msg.domain.Cache, "virtualfn2realfn %s to %s %s" % (virtualfn, fn, cls)) return (fn, cls) - def realfn2virtual(self, realfn, cls): + @staticmethod + def realfn2virtual(realfn, cls): """ Convert a real filename + the associated subclass keyword to a virtual filename """ if cls == "": - #bb.msg.debug(2, bb.msg.domain.Cache, "realfn2virtual %s and '%s' to %s" % (realfn, cls, realfn)) return realfn - #bb.msg.debug(2, bb.msg.domain.Cache, "realfn2virtual %s and %s to %s" % (realfn, cls, "virtual:" + cls + ":" + realfn)) return "virtual:" + cls + ":" + realfn - def loadDataFull(self, virtualfn, appends, cfgData): + @classmethod + def loadDataFull(cls, virtualfn, appends, cfgData): """ Return a complete set of data for fn. To do this, we need to parse the file. """ - (fn, cls) = self.virtualfn2realfn(virtualfn) - - bb.msg.debug(1, bb.msg.domain.Cache, "Parsing %s (full)" % fn) + (fn, virtual) = cls.virtualfn2realfn(virtualfn) + + logger.debug(1, "Parsing %s (full)", fn) + + bb_data = cls.load_bbfile(fn, appends, cfgData) + return bb_data[virtual] + + @classmethod + def parse(cls, filename, appends, configdata): + """Parse the specified filename, returning the recipe information""" + infos = [] + datastores = cls.load_bbfile(filename, appends, configdata) + depends = set() + for variant, data in sorted(datastores.iteritems(), + key=lambda i: i[0], + reverse=True): + virtualfn = cls.realfn2virtual(filename, variant) + depends |= (data.getVar("__depends", False) or set()) + if depends and not variant: + data.setVar("__depends", depends) + info = RecipeInfo.from_metadata(filename, data) + infos.append((virtualfn, info)) + return infos + + def load(self, filename, appends, configdata): + """Obtain the recipe information for the specified filename, + using cached values if available, otherwise parsing. + + Note that if it does parse to obtain the info, it will not + automatically add the information to the cache or to your + CacheData. Use the add or add_info method to do so after + running this, or use loadData instead.""" + cached = self.cacheValid(filename) + if cached: + infos = [] + info = self.depends_cache[filename] + for variant in info.variants: + virtualfn = self.realfn2virtual(filename, variant) + infos.append((virtualfn, self.depends_cache[virtualfn])) + else: + logger.debug(1, "Parsing %s", filename) + return self.parse(filename, appends, configdata) - bb_data = self.load_bbfile(fn, appends, cfgData) - return bb_data[cls] + return cached, infos def loadData(self, fn, appends, cfgData, cacheData): - """ - Load a subset of data for fn. - If the cached data is valid we do nothing, - To do this, we need to parse the file and set the system - to record the variables accessed. - Return the cache status and whether the file was skipped when parsed - """ - skipped = 0 - virtuals = 0 - - if fn not in self.checked: - self.cacheValidUpdate(fn) - - if self.cacheValid(fn): - multi = self.getVar('__VARIANTS', fn, True) - for cls in (multi or "").split() + [""]: - virtualfn = self.realfn2virtual(fn, cls) - if self.depends_cache[virtualfn]["__SKIPPED"]: - skipped += 1 - bb.msg.debug(1, bb.msg.domain.Cache, "Skipping %s" % virtualfn) - continue - self.handle_data(virtualfn, cacheData) - virtuals += 1 - return True, skipped, virtuals - - bb.msg.debug(1, bb.msg.domain.Cache, "Parsing %s" % fn) - - bb_data = self.load_bbfile(fn, appends, cfgData) - - for data in bb_data: - virtualfn = self.realfn2virtual(fn, data) - self.setData(virtualfn, fn, bb_data[data]) - if self.getVar("__SKIPPED", virtualfn): + """Load the recipe info for the specified filename, + parsing and adding to the cache if necessary, and adding + the recipe information to the supplied CacheData instance.""" + skipped, virtuals = 0, 0 + + cached, infos = self.load(fn, appends, cfgData) + for virtualfn, info in infos: + if info.skipped: + logger.debug(1, "Skipping %s", virtualfn) skipped += 1 - bb.msg.debug(1, bb.msg.domain.Cache, "Skipping %s" % virtualfn) else: - self.handle_data(virtualfn, cacheData) + self.add_info(virtualfn, info, cacheData, not cached) virtuals += 1 - return False, skipped, virtuals + return cached, skipped, virtuals def cacheValid(self, fn): """ Is the cache valid for fn? Fast version, no timestamps checked. """ + if fn not in self.checked: + self.cacheValidUpdate(fn) + # Is cache enabled? if not self.has_cache: return False @@ -241,70 +364,67 @@ class Cache: if not self.has_cache: return False - self.checked[fn] = "" - - # Pretend we're clean so getVar works - self.clean[fn] = "" + self.checked.add(fn) # File isn't in depends_cache if not fn in self.depends_cache: - bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s is not cached" % fn) - self.remove(fn) + logger.debug(2, "Cache: %s is not cached", fn) return False mtime = bb.parse.cached_mtime_noerror(fn) # Check file still exists if mtime == 0: - bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s no longer exists" % fn) + logger.debug(2, "Cache: %s no longer exists", fn) self.remove(fn) return False + info = self.depends_cache[fn] # Check the file's timestamp - if mtime != self.getVar("CACHETIMESTAMP", fn, True): - bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s changed" % fn) + if mtime != info.timestamp: + logger.debug(2, "Cache: %s changed", fn) self.remove(fn) return False # Check dependencies are still valid - depends = self.getVar("__depends", fn, True) + depends = info.file_depends if depends: for f, old_mtime in depends: fmtime = bb.parse.cached_mtime_noerror(f) # Check if file still exists if old_mtime != 0 and fmtime == 0: + logger.debug(2, "Cache: %s's dependency %s was removed", + fn, f) self.remove(fn) return False if (fmtime != old_mtime): - bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s's dependency %s changed" % (fn, f)) + logger.debug(2, "Cache: %s's dependency %s changed", + fn, f) self.remove(fn) return False - #bb.msg.debug(2, bb.msg.domain.Cache, "Depends Cache: %s is clean" % fn) - if not fn in self.clean: - self.clean[fn] = "" - invalid = False - # Mark extended class data as clean too - multi = self.getVar('__VARIANTS', fn, True) - for cls in (multi or "").split(): + for cls in info.variants: virtualfn = self.realfn2virtual(fn, cls) - self.clean[virtualfn] = "" - if not virtualfn in self.depends_cache: - bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s is not cached" % virtualfn) + self.clean.add(virtualfn) + if virtualfn not in self.depends_cache: + logger.debug(2, "Cache: %s is not cached", virtualfn) invalid = True - # If any one of the varients is not present, mark cache as invalid for all + # If any one of the variants is not present, mark as invalid for all if invalid: - for cls in (multi or "").split(): + for cls in info.variants: virtualfn = self.realfn2virtual(fn, cls) - bb.msg.debug(2, bb.msg.domain.Cache, "Cache: Removing %s from cache" % virtualfn) - del self.clean[virtualfn] - bb.msg.debug(2, bb.msg.domain.Cache, "Cache: Removing %s from cache" % fn) - del self.clean[fn] + if virtualfn in self.clean: + logger.debug(2, "Cache: Removing %s from cache", virtualfn) + self.clean.remove(virtualfn) + if fn in self.clean: + logger.debug(2, "Cache: Marking %s as not clean", fn) + self.clean.remove(fn) return False + self.clean.add(fn) return True def remove(self, fn): @@ -312,154 +432,61 @@ class Cache: Remove a fn from the cache Called from the parser in error cases """ - bb.msg.debug(1, bb.msg.domain.Cache, "Removing %s from cache" % fn) if fn in self.depends_cache: + logger.debug(1, "Removing %s from cache", fn) del self.depends_cache[fn] if fn in self.clean: - del self.clean[fn] + logger.debug(1, "Marking %s as unclean", fn) + self.clean.remove(fn) def sync(self): """ Save the cache Called from the parser when complete (or exiting) """ - import copy if not self.has_cache: return if self.cacheclean: - bb.msg.note(1, bb.msg.domain.Cache, "Cache is clean, not saving.") + logger.debug(2, "Cache is clean, not saving.") return - version_data = {} - version_data['CACHE_VER'] = __cache_version__ - version_data['BITBAKE_VER'] = bb.__version__ - - cache_data = copy.copy(self.depends_cache) - for fn in self.depends_cache: - if '__BB_DONT_CACHE' in self.depends_cache[fn] and self.depends_cache[fn]['__BB_DONT_CACHE']: - bb.msg.debug(2, bb.msg.domain.Cache, "Not caching %s, marked as not cacheable" % fn) - del cache_data[fn] - elif 'PV' in self.depends_cache[fn] and 'SRCREVINACTION' in self.depends_cache[fn]['PV']: - bb.msg.error(bb.msg.domain.Cache, "Not caching %s as it had SRCREVINACTION in PV. Please report this bug" % fn) - del cache_data[fn] + with open(self.cachefile, "wb") as cachefile: + pickler = pickle.Pickler(cachefile, pickle.HIGHEST_PROTOCOL) + pickler.dump(__cache_version__) + pickler.dump(bb.__version__) + for key, value in self.depends_cache.iteritems(): + pickler.dump(key) + pickler.dump(value) - p = pickle.Pickler(file(self.cachefile, "wb" ), -1 ) - p.dump([cache_data, version_data]) + del self.depends_cache - def mtime(self, cachefile): + @staticmethod + def mtime(cachefile): return bb.parse.cached_mtime_noerror(cachefile) - def handle_data(self, file_name, cacheData): + def add_info(self, filename, info, cacheData, parsed=None): + cacheData.add_from_recipeinfo(filename, info) + if not self.has_cache: + return + + if 'SRCREVINACTION' not in info.pv and not info.nocache: + if parsed: + self.cacheclean = False + self.depends_cache[filename] = info + + def add(self, file_name, data, cacheData, parsed=None): """ Save data we need into the cache """ - pn = self.getVar('PN', file_name, True) - pe = self.getVar('PE', file_name, True) or "0" - pv = self.getVar('PV', file_name, True) - if 'SRCREVINACTION' in pv: - bb.msg.note(1, bb.msg.domain.Cache, "Found SRCREVINACTION in PV (%s) or %s. Please report this bug." % (pv, file_name)) - pr = self.getVar('PR', file_name, True) - dp = int(self.getVar('DEFAULT_PREFERENCE', file_name, True) or "0") - depends = bb.utils.explode_deps(self.getVar("DEPENDS", file_name, True) or "") - packages = (self.getVar('PACKAGES', file_name, True) or "").split() - packages_dynamic = (self.getVar('PACKAGES_DYNAMIC', file_name, True) or "").split() - rprovides = (self.getVar("RPROVIDES", file_name, True) or "").split() - - cacheData.task_deps[file_name] = self.getVar("_task_deps", file_name) - - # build PackageName to FileName lookup table - if pn not in cacheData.pkg_pn: - cacheData.pkg_pn[pn] = [] - cacheData.pkg_pn[pn].append(file_name) - - cacheData.stamp[file_name] = self.getVar('STAMP', file_name, True) - - cacheData.tasks[file_name] = self.getVar('__BBTASKS', file_name, True) - for t in cacheData.tasks[file_name]: - cacheData.basetaskhash[file_name + "." + t] = self.getVar("BB_BASEHASH_task-%s" % t, file_name, True) - - # build FileName to PackageName lookup table - cacheData.pkg_fn[file_name] = pn - cacheData.pkg_pepvpr[file_name] = (pe, pv, pr) - cacheData.pkg_dp[file_name] = dp - - provides = [pn] - for provide in (self.getVar("PROVIDES", file_name, True) or "").split(): - if provide not in provides: - provides.append(provide) - - # Build forward and reverse provider hashes - # Forward: virtual -> [filenames] - # Reverse: PN -> [virtuals] - if pn not in cacheData.pn_provides: - cacheData.pn_provides[pn] = [] - - cacheData.fn_provides[file_name] = provides - for provide in provides: - if provide not in cacheData.providers: - cacheData.providers[provide] = [] - cacheData.providers[provide].append(file_name) - if not provide in cacheData.pn_provides[pn]: - cacheData.pn_provides[pn].append(provide) - - cacheData.deps[file_name] = [] - for dep in depends: - if not dep in cacheData.deps[file_name]: - cacheData.deps[file_name].append(dep) - if not dep in cacheData.all_depends: - cacheData.all_depends.append(dep) - - # Build reverse hash for PACKAGES, so runtime dependencies - # can be be resolved (RDEPENDS, RRECOMMENDS etc.) - for package in packages: - if not package in cacheData.packages: - cacheData.packages[package] = [] - cacheData.packages[package].append(file_name) - rprovides += (self.getVar("RPROVIDES_%s" % package, file_name, 1) or "").split() - - for package in packages_dynamic: - if not package in cacheData.packages_dynamic: - cacheData.packages_dynamic[package] = [] - cacheData.packages_dynamic[package].append(file_name) - - for rprovide in rprovides: - if not rprovide in cacheData.rproviders: - cacheData.rproviders[rprovide] = [] - cacheData.rproviders[rprovide].append(file_name) - - # Build hash of runtime depends and rececommends - - if not file_name in cacheData.rundeps: - cacheData.rundeps[file_name] = {} - if not file_name in cacheData.runrecs: - cacheData.runrecs[file_name] = {} - - rdepends = self.getVar('RDEPENDS', file_name, True) or "" - rrecommends = self.getVar('RRECOMMENDS', file_name, True) or "" - for package in packages + [pn]: - if not package in cacheData.rundeps[file_name]: - cacheData.rundeps[file_name][package] = [] - if not package in cacheData.runrecs[file_name]: - cacheData.runrecs[file_name][package] = [] - - cacheData.rundeps[file_name][package] = rdepends + " " + (self.getVar("RDEPENDS_%s" % package, file_name, True) or "") - cacheData.runrecs[file_name][package] = rrecommends + " " + (self.getVar("RRECOMMENDS_%s" % package, file_name, True) or "") - - # Collect files we may need for possible world-dep - # calculations - if not self.getVar('BROKEN', file_name, True) and not self.getVar('EXCLUDE_FROM_WORLD', file_name, True): - cacheData.possible_world.append(file_name) + realfn = self.virtualfn2realfn(file_name)[0] + info = RecipeInfo.from_metadata(realfn, data) + self.add_info(file_name, info, cacheData, parsed) - cacheData.hashfn[file_name] = self.getVar('BB_HASHFILENAME', file_name, True) - - # Touch this to make sure its in the cache - self.getVar('__BB_DONT_CACHE', file_name, True) - self.getVar('__VARIANTS', file_name, True) - - def load_bbfile(self, bbfile, appends, config): + @staticmethod + def load_bbfile(bbfile, appends, config): """ Load and parse one .bb build file Return the data and whether parsing resulted in the file being skipped @@ -485,13 +512,16 @@ class Cache: try: if appends: data.setVar('__BBAPPEND', " ".join(appends), bb_data) - bb_data = parse.handle(bbfile, bb_data) # read .bb data - if chdir_back: os.chdir(oldpath) + bb_data = parse.handle(bbfile, bb_data) + if chdir_back: + os.chdir(oldpath) return bb_data except: - if chdir_back: os.chdir(oldpath) + if chdir_back: + os.chdir(oldpath) raise + def init(cooker): """ The Objective: Cache the minimum amount of data possible yet get to the @@ -512,11 +542,7 @@ def init(cooker): return Cache(cooker.configuration.data) - -#============================================================================# -# CacheData -#============================================================================# -class CacheData: +class CacheData(object): """ The data structures we compile from the cached data """ @@ -524,26 +550,26 @@ class CacheData: def __init__(self): """ Direct cache variables - (from Cache.handle_data) """ - self.providers = {} - self.rproviders = {} - self.packages = {} - self.packages_dynamic = {} + self.providers = defaultdict(list) + self.rproviders = defaultdict(list) + self.packages = defaultdict(list) + self.packages_dynamic = defaultdict(list) self.possible_world = [] - self.pkg_pn = {} + self.pkg_pn = defaultdict(list) self.pkg_fn = {} self.pkg_pepvpr = {} self.pkg_dp = {} - self.pn_provides = {} + self.pn_provides = defaultdict(list) self.fn_provides = {} self.all_depends = [] - self.deps = {} - self.rundeps = {} - self.runrecs = {} + self.deps = defaultdict(list) + self.rundeps = defaultdict(lambda: defaultdict(list)) + self.runrecs = defaultdict(lambda: defaultdict(list)) self.task_queues = {} self.task_deps = {} self.stamp = {} + self.stamp_extrainfo = {} self.preferred = {} self.tasks = {} self.basetaskhash = {} @@ -557,3 +583,55 @@ class CacheData: self.world_target = set() self.bbfile_priority = {} self.bbfile_config_priorities = [] + + def add_from_recipeinfo(self, fn, info): + self.task_deps[fn] = info.task_deps + self.pkg_fn[fn] = info.pn + self.pkg_pn[info.pn].append(fn) + self.pkg_pepvpr[fn] = (info.pe, info.pv, info.pr) + self.pkg_dp[fn] = info.defaultpref + self.stamp[fn] = info.stamp + self.stamp_extrainfo[fn] = info.stamp_extrainfo + + provides = [info.pn] + for provide in info.provides: + if provide not in provides: + provides.append(provide) + self.fn_provides[fn] = provides + + for provide in provides: + self.providers[provide].append(fn) + if provide not in self.pn_provides[info.pn]: + self.pn_provides[info.pn].append(provide) + + for dep in info.depends: + if dep not in self.deps[fn]: + self.deps[fn].append(dep) + if dep not in self.all_depends: + self.all_depends.append(dep) + + rprovides = info.rprovides + for package in info.packages: + self.packages[package].append(fn) + rprovides += info.rprovides_pkg[package] + + for rprovide in rprovides: + self.rproviders[rprovide].append(fn) + + for package in info.packages_dynamic: + self.packages_dynamic[package].append(fn) + + # Build hash of runtime depends and rececommends + for package in info.packages + [info.pn]: + self.rundeps[fn][package] = list(info.rdepends) + info.rdepends_pkg[package] + self.runrecs[fn][package] = list(info.rrecommends) + info.rrecommends_pkg[package] + + # Collect files we may need for possible world-dep + # calculations + if not info.broken and not info.not_world: + self.possible_world.append(fn) + + self.hashfn[fn] = info.hashfilename + for task, taskhash in info.basetaskhashes.iteritems(): + identifier = '%s.%s' % (fn, task) + self.basetaskhash[identifier] = taskhash |