diff options
author | Richard Purdie <richard@openedhand.com> | 2008-03-14 11:44:34 +0000 |
---|---|---|
committer | Richard Purdie <richard@openedhand.com> | 2008-03-14 11:44:34 +0000 |
commit | 853280f161ed24f9b976a6f53739c08ac2362ddb (patch) | |
tree | ae67e05e66c92dd3fc668b1976084a628bc99882 /bitbake/lib/bb | |
parent | 097076da2349e638342e69e465c85683adb6ccc7 (diff) | |
download | openembedded-core-853280f161ed24f9b976a6f53739c08ac2362ddb.tar.gz openembedded-core-853280f161ed24f9b976a6f53739c08ac2362ddb.tar.bz2 openembedded-core-853280f161ed24f9b976a6f53739c08ac2362ddb.zip |
bitbake: Update with changes from bitbake 1.8 branch
git-svn-id: https://svn.o-hand.com/repos/poky/trunk@4006 311d38ba-8fff-0310-9ca6-ca027cbcb966
Diffstat (limited to 'bitbake/lib/bb')
-rw-r--r-- | bitbake/lib/bb/cache.py | 78 | ||||
-rw-r--r-- | bitbake/lib/bb/cooker.py | 7 | ||||
-rw-r--r-- | bitbake/lib/bb/runqueue.py | 130 | ||||
-rw-r--r-- | bitbake/lib/bb/taskdata.py | 14 |
4 files changed, 147 insertions, 82 deletions
diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py index 60e863d52d..9acd301f52 100644 --- a/bitbake/lib/bb/cache.py +++ b/bitbake/lib/bb/cache.py @@ -80,7 +80,7 @@ class Cache: if old_mtime > newest_mtime: newest_mtime = old_mtime - if self.mtime(self.cachefile) >= newest_mtime: + if bb.parse.cached_mtime_noerror(self.cachefile) >= newest_mtime: try: p = pickle.Unpickler(file(self.cachefile, "rb")) self.depends_cache, version_data = p.load() @@ -91,7 +91,7 @@ class Cache: except EOFError: bb.msg.note(1, bb.msg.domain.Cache, "Truncated cache found, rebuilding...") self.depends_cache = {} - except (ValueError, KeyError): + except: bb.msg.note(1, bb.msg.domain.Cache, "Invalid cache found, rebuilding...") self.depends_cache = {} else: @@ -199,31 +199,34 @@ class Cache: self.remove(fn) return False + mtime = bb.parse.cached_mtime_noerror(fn) + # Check file still exists - if self.mtime(fn) == 0: + if mtime == 0: bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s not longer exists" % fn) self.remove(fn) return False # Check the file's timestamp - if bb.parse.cached_mtime(fn) > self.getVar("CACHETIMESTAMP", fn, True): + if mtime > self.getVar("CACHETIMESTAMP", fn, True): bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s changed" % fn) self.remove(fn) return False # Check dependencies are still valid depends = self.getVar("__depends", fn, True) - for f,old_mtime in depends: - # Check if file still exists - if self.mtime(f) == 0: - self.remove(fn) - return False - - new_mtime = bb.parse.cached_mtime(f) - if (new_mtime > old_mtime): - bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s's dependency %s changed" % (fn, f)) - self.remove(fn) - return False + if depends: + for f,old_mtime in depends: + fmtime = bb.parse.cached_mtime_noerror(f) + # Check if file still exists + if fmtime == 0: + self.remove(fn) + return False + + if (fmtime > old_mtime): + bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s's dependency %s changed" % (fn, f)) + self.remove(fn) + return False #bb.msg.debug(2, bb.msg.domain.Cache, "Depends Cache: %s is clean" % fn) if not fn in self.clean: @@ -284,7 +287,6 @@ class Cache: pv = self.getVar('PV', file_name, True) pr = self.getVar('PR', file_name, True) dp = int(self.getVar('DEFAULT_PREFERENCE', file_name, True) or "0") - provides = Set([pn] + (self.getVar("PROVIDES", file_name, True) or "").split()) depends = bb.utils.explode_deps(self.getVar("DEPENDS", file_name, True) or "") packages = (self.getVar('PACKAGES', file_name, True) or "").split() packages_dynamic = (self.getVar('PACKAGES_DYNAMIC', file_name, True) or "").split() @@ -304,24 +306,31 @@ class Cache: cacheData.pkg_pepvpr[file_name] = (pe,pv,pr) cacheData.pkg_dp[file_name] = dp + provides = [pn] + for provide in (self.getVar("PROVIDES", file_name, True) or "").split(): + if provide not in provides: + provides.append(provide) + # Build forward and reverse provider hashes # Forward: virtual -> [filenames] # Reverse: PN -> [virtuals] if pn not in cacheData.pn_provides: - cacheData.pn_provides[pn] = Set() - cacheData.pn_provides[pn] |= provides + cacheData.pn_provides[pn] = [] - cacheData.fn_provides[file_name] = Set() + cacheData.fn_provides[file_name] = provides for provide in provides: if provide not in cacheData.providers: cacheData.providers[provide] = [] cacheData.providers[provide].append(file_name) - cacheData.fn_provides[file_name].add(provide) + if not provide in cacheData.pn_provides[pn]: + cacheData.pn_provides[pn].append(provide) - cacheData.deps[file_name] = Set() + cacheData.deps[file_name] = [] for dep in depends: - cacheData.all_depends.add(dep) - cacheData.deps[file_name].add(dep) + if not dep in cacheData.deps[file_name]: + cacheData.deps[file_name].append(dep) + if not dep in cacheData.all_depends: + cacheData.all_depends.append(dep) # Build reverse hash for PACKAGES, so runtime dependencies # can be be resolved (RDEPENDS, RRECOMMENDS etc.) @@ -343,28 +352,21 @@ class Cache: # Build hash of runtime depends and rececommends - def add_dep(deplist, deps): - for dep in deps: - if not dep in deplist: - deplist[dep] = "" - if not file_name in cacheData.rundeps: cacheData.rundeps[file_name] = {} if not file_name in cacheData.runrecs: cacheData.runrecs[file_name] = {} - rdepends = bb.utils.explode_deps(self.getVar('RDEPENDS', file_name, True) or "") - rrecommends = bb.utils.explode_deps(self.getVar('RRECOMMENDS', file_name, True) or "") + rdepends = self.getVar('RDEPENDS', file_name, True) or "" + rrecommends = self.getVar('RRECOMMENDS', file_name, True) or "" for package in packages + [pn]: if not package in cacheData.rundeps[file_name]: - cacheData.rundeps[file_name][package] = {} + cacheData.rundeps[file_name][package] = [] if not package in cacheData.runrecs[file_name]: - cacheData.runrecs[file_name][package] = {} + cacheData.runrecs[file_name][package] = [] - add_dep(cacheData.rundeps[file_name][package], rdepends) - add_dep(cacheData.runrecs[file_name][package], rrecommends) - add_dep(cacheData.rundeps[file_name][package], bb.utils.explode_deps(self.getVar("RDEPENDS_%s" % package, file_name, True) or "")) - add_dep(cacheData.runrecs[file_name][package], bb.utils.explode_deps(self.getVar("RRECOMMENDS_%s" % package, file_name, True) or "")) + cacheData.rundeps[file_name][package] = rdepends + " " + (self.getVar("RDEPENDS_%s" % package, file_name, True) or "") + cacheData.runrecs[file_name][package] = rrecommends + " " + (self.getVar("RRECOMMENDS_%s" % package, file_name, True) or "") # Collect files we may need for possible world-dep # calculations @@ -385,7 +387,7 @@ class Cache: data.setVar('TMPDIR', data.getVar('TMPDIR', config, 1) or "", config) bbfile_loc = os.path.abspath(os.path.dirname(bbfile)) oldpath = os.path.abspath(os.getcwd()) - if self.mtime(bbfile_loc): + if bb.parse.cached_mtime_noerror(bbfile_loc): os.chdir(bbfile_loc) bb_data = data.init_db(config) try: @@ -444,7 +446,7 @@ class CacheData: self.pkg_dp = {} self.pn_provides = {} self.fn_provides = {} - self.all_depends = Set() + self.all_depends = [] self.deps = {} self.rundeps = {} self.runrecs = {} diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py index 38a8209760..619791f174 100644 --- a/bitbake/lib/bb/cooker.py +++ b/bitbake/lib/bb/cooker.py @@ -473,11 +473,11 @@ class BBCooker: # Load data into the cache for fn self.bb_cache = bb.cache.init(self) - self.bb_cache.loadData(fn, self.configuration.data) + self.bb_cache.loadData(fn, self.configuration.data) # Parse the loaded cache data self.status = bb.cache.CacheData() - self.bb_cache.handle_data(fn, self.status) + self.bb_cache.handle_data(fn, self.status) # Tweak some variables item = self.bb_cache.getVar('PN', fn, True) @@ -493,7 +493,7 @@ class BBCooker: # Remove stamp for target if force mode active if self.configuration.force: bb.msg.note(2, bb.msg.domain.RunQueue, "Remove stamp %s, %s" % (self.configuration.cmd, fn)) - bb.build.del_stamp('do_%s' % self.configuration.cmd, bbfile_data) + bb.build.del_stamp('do_%s' % self.configuration.cmd, self.configuration.data) # Setup taskdata structure taskdata = bb.taskdata.TaskData(self.configuration.abort) @@ -573,6 +573,7 @@ class BBCooker: bb.msg.debug(1, bb.msg.domain.Collection, "collecting .bb files") (filelist, masked) = self.collect_bbfiles() + bb.data.renameVar("__depends", "__base_depends", self.configuration.data) self.parse_bbfiles(filelist, masked, self.myProgressCallback) bb.msg.debug(1, bb.msg.domain.Collection, "parsing complete") diff --git a/bitbake/lib/bb/runqueue.py b/bitbake/lib/bb/runqueue.py index 2765343a3e..7b3defd343 100644 --- a/bitbake/lib/bb/runqueue.py +++ b/bitbake/lib/bb/runqueue.py @@ -317,6 +317,7 @@ class RunQueue: depends = [] runq_build = [] + recursive_tdepends = {} taskData = self.taskData @@ -382,14 +383,45 @@ class RunQueue: # e.g. do_sometask[depends] = "targetname:do_someothertask" # (makes sure sometask runs after targetname's someothertask) idepends = taskData.tasks_idepends[task] - for idepend in idepends: - depid = int(idepend.split(":")[0]) + for (depid, idependtask) in idepends: if depid in taskData.build_targets: # Won't be in build_targets if ASSUME_PROVIDED depdata = taskData.build_targets[depid][0] if depdata is not None: dep = taskData.fn_index[depdata] - depends.append(taskData.gettask_id(dep, idepend.split(":")[1])) + depends.append(taskData.gettask_id(dep, idependtask)) + + # Create a list of recursive dependent tasks (from tdepends) and cache + def get_recursive_tdepends(task): + if not task: + return [] + if task in recursive_tdepends: + return recursive_tdepends[task] + rectdepends = [task] + nextdeps = [task] + while len(nextdeps) != 0: + newdeps = [] + for nextdep in nextdeps: + for tdepend in taskData.tasks_tdepends[nextdep]: + if tdepend not in rectdepends: + rectdepends.append(tdepend) + newdeps.append(tdepend) + nextdeps = newdeps + recursive_tdepends[task] = rectdepends + return rectdepends + + # Using the list of tdepends for this task create a list of + # the recursive idepends we have + def get_recursive_idepends(task): + if not task: + return [] + rectdepends = get_recursive_tdepends(task) + + recidepends = [] + for tdepend in rectdepends: + for idepend in taskData.tasks_idepends[tdepend]: + recidepends.append(idepend) + return recidepends def add_recursive_build(depid, depfnid): """ @@ -404,13 +436,11 @@ class RunQueue: depdata = taskData.build_targets[depid][0] if depdata is not None: dep = taskData.fn_index[depdata] - idepends = [] # Need to avoid creating new tasks here taskid = taskData.gettask_id(dep, taskname, False) if taskid is not None: depends.append(taskid) fnid = taskData.tasks_fnid[taskid] - idepends = taskData.tasks_idepends[taskid] #print "Added %s (%s) due to %s" % (taskid, taskData.fn_index[fnid], taskData.fn_index[depfnid]) else: fnid = taskData.getfn_id(dep) @@ -420,10 +450,9 @@ class RunQueue: for nextdepid in taskData.rdepids[fnid]: if nextdepid not in rdep_seen: add_recursive_run(nextdepid, fnid) - for idepend in idepends: - nextdepid = int(idepend.split(":")[0]) - if nextdepid not in dep_seen: - add_recursive_build(nextdepid, fnid) + for (idependid, idependtask) in get_recursive_idepends(taskid): + if idependid not in dep_seen: + add_recursive_build(idependid, fnid) def add_recursive_run(rdepid, depfnid): """ @@ -438,13 +467,11 @@ class RunQueue: depdata = taskData.run_targets[rdepid][0] if depdata is not None: dep = taskData.fn_index[depdata] - idepends = [] # Need to avoid creating new tasks here taskid = taskData.gettask_id(dep, taskname, False) if taskid is not None: depends.append(taskid) fnid = taskData.tasks_fnid[taskid] - idepends = taskData.tasks_idepends[taskid] #print "Added %s (%s) due to %s" % (taskid, taskData.fn_index[fnid], taskData.fn_index[depfnid]) else: fnid = taskData.getfn_id(dep) @@ -454,10 +481,9 @@ class RunQueue: for nextdepid in taskData.rdepids[fnid]: if nextdepid not in rdep_seen: add_recursive_run(nextdepid, fnid) - for idepend in idepends: - nextdepid = int(idepend.split(":")[0]) - if nextdepid not in dep_seen: - add_recursive_build(nextdepid, fnid) + for (idependid, idependtask) in get_recursive_idepends(taskid): + if idependid not in dep_seen: + add_recursive_build(idependid, fnid) # Resolve recursive 'recrdeptask' dependencies # @@ -472,9 +498,9 @@ class RunQueue: add_recursive_build(depid, fnid) for rdepid in taskData.rdepids[fnid]: add_recursive_run(rdepid, fnid) - for idepend in idepends: - depid = int(idepend.split(":")[0]) - add_recursive_build(depid, fnid) + deptaskid = taskData.gettask_id(fn, taskname, False) + for (idependid, idependtask) in get_recursive_idepends(deptaskid): + add_recursive_build(idependid, fnid) # Rmove all self references if task in depends: @@ -659,6 +685,16 @@ class RunQueue: if len(self.runq_depends[task]) == 0: buildable.append(task) + def check_buildable(self, task, buildable): + for revdep in self.runq_revdeps[task]: + alldeps = 1 + for dep in self.runq_depends[revdep]: + if dep in unchecked: + alldeps = 0 + if alldeps == 1: + if revdep in unchecked: + buildable.append(revdep) + for task in range(len(self.runq_fnid)): if task not in unchecked: continue @@ -669,12 +705,14 @@ class RunQueue: if not os.access(stampfile, os.F_OK): del unchecked[task] notcurrent.append(task) + check_buildable(self, task, buildable) continue # If its a 'nostamp' task, it's not current taskdep = self.dataCache.task_deps[fn] if 'nostamp' in taskdep and task in taskdep['nostamp']: del unchecked[task] notcurrent.append(task) + check_buildable(self, task, buildable) continue while (len(buildable) > 0): @@ -705,14 +743,7 @@ class RunQueue: else: notcurrent.append(task) - for revdep in self.runq_revdeps[task]: - alldeps = 1 - for dep in self.runq_depends[revdep]: - if dep in unchecked: - alldeps = 0 - if alldeps == 1: - if revdep in unchecked: - nextbuildable.append(revdep) + check_buildable(self, task, nextbuildable) buildable = nextbuildable @@ -729,6 +760,40 @@ class RunQueue: bb.fatal("check_stamps fatal internal error") return current + def check_stamp(self, task): + + if self.stamppolicy == "perfile": + fulldeptree = False + else: + fulldeptree = True + + fn = self.taskData.fn_index[self.runq_fnid[task]] + taskname = self.runq_task[task] + stampfile = "%s.%s" % (self.dataCache.stamp[fn], taskname) + # If the stamp is missing its not current + if not os.access(stampfile, os.F_OK): + return False + # If its a 'nostamp' task, it's not current + taskdep = self.dataCache.task_deps[fn] + if 'nostamp' in taskdep and task in taskdep['nostamp']: + return False + + iscurrent = True + t1 = os.stat(stampfile)[stat.ST_MTIME] + for dep in self.runq_depends[task]: + if iscurrent: + fn2 = self.taskData.fn_index[self.runq_fnid[dep]] + taskname2 = self.runq_task[dep] + stampfile2 = "%s.%s" % (self.dataCache.stamp[fn2], taskname2) + if fulldeptree or fn == fn2: + try: + t2 = os.stat(stampfile2)[stat.ST_MTIME] + if t1 < t2: + iscurrent = False + except: + iscurrent = False + + return iscurrent def execute_runqueue(self): """ @@ -817,25 +882,18 @@ class RunQueue: event.fire(bb.event.StampUpdate(self.target_pairs, self.dataCache.stamp, self.cfgdata)) - # Find out which tasks have current stamps which we can skip when the - # time comes - currentstamps = self.check_stamps() - self.stats.taskSkipped(len(currentstamps)) - self.stats.taskCompleted(len(currentstamps)) - while True: task = self.sched.next() if task is not None: fn = self.taskData.fn_index[self.runq_fnid[task]] taskname = self.runq_task[task] - if task in currentstamps: - #if bb.build.stamp_is_current(taskname, self.dataCache, fn): + if self.check_stamp(task): bb.msg.debug(2, bb.msg.domain.RunQueue, "Stamp current task %s (%s)" % (task, self.get_user_idstring(task))) self.runq_running[task] = 1 self.task_complete(task) - #self.stats.taskCompleted() - #self.stats.taskSkipped() + self.stats.taskCompleted() + self.stats.taskSkipped() continue bb.msg.note(1, bb.msg.domain.RunQueue, "Running task %d of %d (ID: %s, %s)" % (self.stats.completed + self.active_builds + 1, len(self.runq_fnid), task, self.get_user_idstring(task))) diff --git a/bitbake/lib/bb/taskdata.py b/bitbake/lib/bb/taskdata.py index 4a79e7a56d..0fb34ad748 100644 --- a/bitbake/lib/bb/taskdata.py +++ b/bitbake/lib/bb/taskdata.py @@ -150,7 +150,7 @@ class TaskData: ids = [] for dep in task_deps['depends'][task].split(): if dep: - ids.append(str(self.getbuild_id(dep.split(":")[0])) + ":" + dep.split(":")[1]) + ids.append(((self.getbuild_id(dep.split(":")[0])), dep.split(":")[1])) self.tasks_idepends[taskid].extend(ids) # Work out build dependencies @@ -167,11 +167,11 @@ class TaskData: rdepends = dataCache.rundeps[fn] rrecs = dataCache.runrecs[fn] for package in rdepends: - for rdepend in rdepends[package]: + for rdepend in bb.utils.explode_deps(rdepends[package]): bb.msg.debug(2, bb.msg.domain.TaskData, "Added runtime dependency %s for %s" % (rdepend, fn)) rdependids[self.getrun_id(rdepend)] = None for package in rrecs: - for rdepend in rrecs[package]: + for rdepend in bb.utils.explode_deps(rrecs[package]): bb.msg.debug(2, bb.msg.domain.TaskData, "Added runtime recommendation %s for %s" % (rdepend, fn)) rdependids[self.getrun_id(rdepend)] = None self.rdepids[fnid] = rdependids.keys() @@ -458,8 +458,6 @@ class TaskData: """ if fnid in self.failed_fnids: return - if not missing_list: - missing_list = [fnid] bb.msg.debug(1, bb.msg.domain.Provider, "File '%s' is unbuildable, removing..." % self.fn_index[fnid]) self.failed_fnids.append(fnid) for target in self.build_targets: @@ -487,6 +485,12 @@ class TaskData: dependees = self.get_dependees(targetid) for fnid in dependees: self.fail_fnid(fnid, missing_list) + for taskid in range(len(self.tasks_idepends)): + idepends = self.tasks_idepends[taskid] + for (idependid, idependtask) in idepends: + if idependid == targetid: + self.fail_fnid(self.tasks_fnid[taskid], missing_list) + if self.abort and targetid in self.external_targets: bb.msg.error(bb.msg.domain.Provider, "Required build target '%s' has no buildable providers.\nMissing or unbuildable dependency chain was: %s" % (self.build_names_index[targetid], missing_list)) raise bb.providers.NoProvider |