summaryrefslogtreecommitdiff
path: root/bitbake/lib/bb
diff options
context:
space:
mode:
authorRichard Purdie <richard@openedhand.com>2007-08-20 07:48:43 +0000
committerRichard Purdie <richard@openedhand.com>2007-08-20 07:48:43 +0000
commitd8bfa5c6eff1cff34895304a33be671fb141084e (patch)
tree8f63f2cad401f42f5dd30930b0f042aa9c5bdaf8 /bitbake/lib/bb
parente68823a20c6e3b629c947bc7e329e5ea71a9860c (diff)
downloadopenembedded-core-d8bfa5c6eff1cff34895304a33be671fb141084e.tar.gz
openembedded-core-d8bfa5c6eff1cff34895304a33be671fb141084e.tar.bz2
openembedded-core-d8bfa5c6eff1cff34895304a33be671fb141084e.zip
bitbake: Sync with 1.8.8 release
git-svn-id: https://svn.o-hand.com/repos/poky/trunk@2513 311d38ba-8fff-0310-9ca6-ca027cbcb966
Diffstat (limited to 'bitbake/lib/bb')
-rw-r--r--bitbake/lib/bb/__init__.py16
-rw-r--r--bitbake/lib/bb/build.py52
-rw-r--r--bitbake/lib/bb/cooker.py13
-rw-r--r--bitbake/lib/bb/data.py7
-rw-r--r--bitbake/lib/bb/fetch/__init__.py39
-rw-r--r--bitbake/lib/bb/fetch/svn.py9
-rw-r--r--bitbake/lib/bb/parse/parse_py/BBHandler.py18
-rw-r--r--bitbake/lib/bb/runqueue.py30
-rw-r--r--bitbake/lib/bb/taskdata.py41
9 files changed, 146 insertions, 79 deletions
diff --git a/bitbake/lib/bb/__init__.py b/bitbake/lib/bb/__init__.py
index 585eec8875..77b1255c77 100644
--- a/bitbake/lib/bb/__init__.py
+++ b/bitbake/lib/bb/__init__.py
@@ -21,7 +21,7 @@
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-__version__ = "1.8.7"
+__version__ = "1.8.9"
__all__ = [
@@ -1124,7 +1124,12 @@ class digraph:
def allnodes(self):
"returns all nodes in the dictionary"
- return self.dict.keys()
+ keys = self.dict.keys()
+ ret = []
+ for key in keys:
+ ret.append(key)
+ ret.sort()
+ return ret
def firstzero(self):
"returns first node with zero references, or NULL if no such node exists"
@@ -1168,7 +1173,12 @@ class digraph:
def getparents(self, item):
if not self.hasnode(item):
return []
- return self.dict[item][1]
+ parents = self.dict[item][1]
+ ret = []
+ for parent in parents:
+ ret.append(parent)
+ ret.sort()
+ return ret
def getchildren(self, item):
if not self.hasnode(item):
diff --git a/bitbake/lib/bb/build.py b/bitbake/lib/bb/build.py
index bcbc55eea5..e9a6fc8c61 100644
--- a/bitbake/lib/bb/build.py
+++ b/bitbake/lib/bb/build.py
@@ -397,35 +397,41 @@ def del_stamp(task, d, file_name = None):
"""
stamp_internal(task, d, file_name)
-def add_task(task, deps, d):
+def add_tasks(tasklist, d):
task_graph = data.getVar('_task_graph', d)
+ task_deps = data.getVar('_task_deps', d)
if not task_graph:
task_graph = bb.digraph()
- data.setVarFlag(task, 'task', 1, d)
- task_graph.addnode(task, None)
- for dep in deps:
- if not task_graph.hasnode(dep):
- task_graph.addnode(dep, None)
- task_graph.addnode(task, dep)
- # don't assume holding a reference
- data.setVar('_task_graph', task_graph, d)
-
- task_deps = data.getVar('_task_deps', d)
if not task_deps:
task_deps = {}
- def getTask(name):
- deptask = data.getVarFlag(task, name, d)
- if deptask:
- deptask = data.expand(deptask, d)
- if not name in task_deps:
- task_deps[name] = {}
- task_deps[name][task] = deptask
- getTask('depends')
- getTask('deptask')
- getTask('rdeptask')
- getTask('recrdeptask')
- getTask('nostamp')
+ for task in tasklist:
+ deps = tasklist[task]
+ task = data.expand(task, d)
+
+ data.setVarFlag(task, 'task', 1, d)
+ task_graph.addnode(task, None)
+ for dep in deps:
+ dep = data.expand(dep, d)
+ if not task_graph.hasnode(dep):
+ task_graph.addnode(dep, None)
+ task_graph.addnode(task, dep)
+
+ flags = data.getVarFlags(task, d)
+ def getTask(name):
+ if name in flags:
+ deptask = data.expand(flags[name], d)
+ if not name in task_deps:
+ task_deps[name] = {}
+ task_deps[name][task] = deptask
+ getTask('depends')
+ getTask('deptask')
+ getTask('rdeptask')
+ getTask('recrdeptask')
+ getTask('nostamp')
+
+ # don't assume holding a reference
+ data.setVar('_task_graph', task_graph, d)
data.setVar('_task_deps', task_deps, d)
def remove_task(task, kill, d):
diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py
index 955fbb434c..0eda9eed99 100644
--- a/bitbake/lib/bb/cooker.py
+++ b/bitbake/lib/bb/cooker.py
@@ -73,6 +73,19 @@ class BBCooker:
self.configuration.event_data = bb.data.createCopy(self.configuration.data)
bb.data.update_data(self.configuration.event_data)
+ #
+ # TOSTOP must not be set or our children will hang when they output
+ #
+ fd = sys.stdout.fileno()
+ if os.isatty(fd):
+ import termios
+ tcattr = termios.tcgetattr(fd)
+ if tcattr[3] & termios.TOSTOP:
+ bb.msg.note(1, bb.msg.domain.Build, "The terminal had the TOSTOP bit set, clearing...")
+ tcattr[3] = tcattr[3] & ~termios.TOSTOP
+ termios.tcsetattr(fd, termios.TCSANOW, tcattr)
+
+
def tryBuildPackage(self, fn, item, task, the_data, build_depends):
"""
Build one task of a package, optionally build following task depends
diff --git a/bitbake/lib/bb/data.py b/bitbake/lib/bb/data.py
index 21cdde04a8..7ad1acad1c 100644
--- a/bitbake/lib/bb/data.py
+++ b/bitbake/lib/bb/data.py
@@ -282,6 +282,7 @@ def expandKeys(alterdata, readdata = None):
if readdata == None:
readdata = alterdata
+ todolist = {}
for key in keys(alterdata):
if not '${' in key:
continue
@@ -289,7 +290,13 @@ def expandKeys(alterdata, readdata = None):
ekey = expand(key, readdata)
if key == ekey:
continue
+ todolist[key] = ekey
+ # These two for loops are split for performance to maximise the
+ # usefulness of the expand cache
+
+ for key in todolist:
+ ekey = todolist[key]
renameVar(key, ekey, alterdata)
def expandData(alterdata, readdata = None):
diff --git a/bitbake/lib/bb/fetch/__init__.py b/bitbake/lib/bb/fetch/__init__.py
index bbff516ffc..c34405738b 100644
--- a/bitbake/lib/bb/fetch/__init__.py
+++ b/bitbake/lib/bb/fetch/__init__.py
@@ -135,26 +135,27 @@ def go(d):
for u in urldata:
ud = urldata[u]
m = ud.method
- if ud.localfile and not m.forcefetch(u, ud, d) and os.path.exists(ud.md5):
- # File already present along with md5 stamp file
- # Touch md5 file to show activity
- os.utime(ud.md5, None)
- continue
- lf = open(ud.lockfile, "a+")
- fcntl.flock(lf.fileno(), fcntl.LOCK_EX)
- if ud.localfile and not m.forcefetch(u, ud, d) and os.path.exists(ud.md5):
- # If someone else fetched this before we got the lock,
- # notice and don't try again
- os.utime(ud.md5, None)
+ if ud.localfile:
+ if not m.forcefetch(u, ud, d) and os.path.exists(ud.md5):
+ # File already present along with md5 stamp file
+ # Touch md5 file to show activity
+ os.utime(ud.md5, None)
+ continue
+ lf = open(ud.lockfile, "a+")
+ fcntl.flock(lf.fileno(), fcntl.LOCK_EX)
+ if not m.forcefetch(u, ud, d) and os.path.exists(ud.md5):
+ # If someone else fetched this before we got the lock,
+ # notice and don't try again
+ os.utime(ud.md5, None)
+ fcntl.flock(lf.fileno(), fcntl.LOCK_UN)
+ lf.close
+ continue
+ m.go(u, ud, d)
+ if ud.localfile:
+ if not m.forcefetch(u, ud, d):
+ Fetch.write_md5sum(u, ud, d)
fcntl.flock(lf.fileno(), fcntl.LOCK_UN)
lf.close
- continue
- m.go(u, ud, d)
- if ud.localfile and not m.forcefetch(u, ud, d):
- Fetch.write_md5sum(u, ud, d)
- fcntl.flock(lf.fileno(), fcntl.LOCK_UN)
- lf.close
-
def localpaths(d):
"""
@@ -339,7 +340,7 @@ class Fetch(object):
pn = data.getVar("PN", d, 1)
if pn:
- return data.getVar("SRCDATE_%s" % pn, d, 1) or data.getVar("CVSDATE_%s" % pn, d, 1) or data.getVar("DATE", d, 1)
+ return data.getVar("SRCDATE_%s" % pn, d, 1) or data.getVar("CVSDATE_%s" % pn, d, 1) or data.getVar("SRCDATE", d, 1) or data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1)
return data.getVar("SRCDATE", d, 1) or data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1)
getSRCDate = staticmethod(getSRCDate)
diff --git a/bitbake/lib/bb/fetch/svn.py b/bitbake/lib/bb/fetch/svn.py
index ca12efe158..af8543ab34 100644
--- a/bitbake/lib/bb/fetch/svn.py
+++ b/bitbake/lib/bb/fetch/svn.py
@@ -74,11 +74,14 @@ class Svn(Fetch):
ud.revision = ""
else:
rev = data.getVar("SRCREV", d, 0)
- if "get_srcrev" in rev:
+ if rev and "get_srcrev" in rev:
ud.revision = self.latest_revision(url, ud, d)
- else:
+ ud.date = ""
+ elif rev:
ud.revision = rev
- ud.date = ""
+ ud.date = ""
+ else:
+ ud.revision = ""
ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d)
diff --git a/bitbake/lib/bb/parse/parse_py/BBHandler.py b/bitbake/lib/bb/parse/parse_py/BBHandler.py
index aaa262d3e2..0f19f9a5d5 100644
--- a/bitbake/lib/bb/parse/parse_py/BBHandler.py
+++ b/bitbake/lib/bb/parse/parse_py/BBHandler.py
@@ -176,15 +176,23 @@ def handle(fn, d, include = 0):
if bb.event.register(var,handler) == bb.event.Registered:
all_handlers[var] = handler
+ tasklist = {}
for var in data.getVar('__BBTASKS', d) or []:
+ if var not in tasklist:
+ tasklist[var] = []
deps = data.getVarFlag(var, 'deps', d) or []
+ for p in deps:
+ if p not in tasklist[var]:
+ tasklist[var].append(p)
+
postdeps = data.getVarFlag(var, 'postdeps', d) or []
- bb.build.add_task(var, deps, d)
for p in postdeps:
- pdeps = data.getVarFlag(p, 'deps', d) or []
- pdeps.append(var)
- data.setVarFlag(p, 'deps', pdeps, d)
- bb.build.add_task(p, pdeps, d)
+ if p not in tasklist:
+ tasklist[p] = []
+ if var not in tasklist[p]:
+ tasklist[p].append(var)
+
+ bb.build.add_tasks(tasklist, d)
# now add the handlers
if not len(all_handlers) == 0:
diff --git a/bitbake/lib/bb/runqueue.py b/bitbake/lib/bb/runqueue.py
index c55a58da2b..3dfae219d2 100644
--- a/bitbake/lib/bb/runqueue.py
+++ b/bitbake/lib/bb/runqueue.py
@@ -137,7 +137,7 @@ class RunQueue:
dep = taskData.fn_index[depdata]
depends.append(taskData.gettask_id(dep, idepend.split(":")[1]))
- def add_recursive_build(depid):
+ def add_recursive_build(depid, depfnid):
"""
Add build depends of depid to depends
(if we've not see it before)
@@ -150,26 +150,28 @@ class RunQueue:
depdata = taskData.build_targets[depid][0]
if depdata is not None:
dep = taskData.fn_index[depdata]
+ idepends = []
# Need to avoid creating new tasks here
taskid = taskData.gettask_id(dep, taskname, False)
if taskid is not None:
depends.append(taskid)
fnid = taskData.tasks_fnid[taskid]
+ idepends = taskData.tasks_idepends[taskid]
+ #print "Added %s (%s) due to %s" % (taskid, taskData.fn_index[fnid], taskData.fn_index[depfnid])
else:
fnid = taskData.getfn_id(dep)
for nextdepid in taskData.depids[fnid]:
if nextdepid not in dep_seen:
- add_recursive_build(nextdepid)
+ add_recursive_build(nextdepid, fnid)
for nextdepid in taskData.rdepids[fnid]:
if nextdepid not in rdep_seen:
- add_recursive_run(nextdepid)
- idepends = taskData.tasks_idepends[depid]
+ add_recursive_run(nextdepid, fnid)
for idepend in idepends:
nextdepid = int(idepend.split(":")[0])
if nextdepid not in dep_seen:
- add_recursive_build(nextdepid)
+ add_recursive_build(nextdepid, fnid)
- def add_recursive_run(rdepid):
+ def add_recursive_run(rdepid, depfnid):
"""
Add runtime depends of rdepid to depends
(if we've not see it before)
@@ -182,24 +184,26 @@ class RunQueue:
depdata = taskData.run_targets[rdepid][0]
if depdata is not None:
dep = taskData.fn_index[depdata]
+ idepends = []
# Need to avoid creating new tasks here
taskid = taskData.gettask_id(dep, taskname, False)
if taskid is not None:
depends.append(taskid)
fnid = taskData.tasks_fnid[taskid]
+ idepends = taskData.tasks_idepends[taskid]
+ #print "Added %s (%s) due to %s" % (taskid, taskData.fn_index[fnid], taskData.fn_index[depfnid])
else:
fnid = taskData.getfn_id(dep)
for nextdepid in taskData.depids[fnid]:
if nextdepid not in dep_seen:
- add_recursive_build(nextdepid)
+ add_recursive_build(nextdepid, fnid)
for nextdepid in taskData.rdepids[fnid]:
if nextdepid not in rdep_seen:
- add_recursive_run(nextdepid)
- idepends = taskData.tasks_idepends[rdepid]
+ add_recursive_run(nextdepid, fnid)
for idepend in idepends:
nextdepid = int(idepend.split(":")[0])
if nextdepid not in dep_seen:
- add_recursive_build(nextdepid)
+ add_recursive_build(nextdepid, fnid)
# Resolve Recursive Runtime Depends
@@ -210,12 +214,12 @@ class RunQueue:
rdep_seen = []
idep_seen = []
for depid in taskData.depids[fnid]:
- add_recursive_build(depid)
+ add_recursive_build(depid, fnid)
for rdepid in taskData.rdepids[fnid]:
- add_recursive_run(rdepid)
+ add_recursive_run(rdepid, fnid)
for idepend in idepends:
depid = int(idepend.split(":")[0])
- add_recursive_build(depid)
+ add_recursive_build(depid, fnid)
#Prune self references
if task in depends:
diff --git a/bitbake/lib/bb/taskdata.py b/bitbake/lib/bb/taskdata.py
index f448b5b666..902cc140ef 100644
--- a/bitbake/lib/bb/taskdata.py
+++ b/bitbake/lib/bb/taskdata.py
@@ -450,10 +450,12 @@ class TaskData:
self.add_runtime_target(fn, item)
self.add_tasks(fn, dataCache)
- def fail_fnid(self, fnid):
+ def fail_fnid(self, fnid, missing_list = []):
"""
Mark a file as failed (unbuildable)
Remove any references from build and runtime provider lists
+
+ missing_list, A list of missing requirements for this target
"""
if fnid in self.failed_fnids:
return
@@ -463,14 +465,14 @@ class TaskData:
if fnid in self.build_targets[target]:
self.build_targets[target].remove(fnid)
if len(self.build_targets[target]) == 0:
- self.remove_buildtarget(target)
+ self.remove_buildtarget(target, missing_list)
for target in self.run_targets:
if fnid in self.run_targets[target]:
self.run_targets[target].remove(fnid)
if len(self.run_targets[target]) == 0:
- self.remove_runtarget(target)
+ self.remove_runtarget(target, missing_list)
- def remove_buildtarget(self, targetid):
+ def remove_buildtarget(self, targetid, missing_list = []):
"""
Mark a build target as failed (unbuildable)
Trigger removal of any files that have this as a dependency
@@ -479,21 +481,21 @@ class TaskData:
self.failed_deps.append(targetid)
dependees = self.get_dependees(targetid)
for fnid in dependees:
- self.fail_fnid(fnid)
+ self.fail_fnid(fnid, [self.build_names_index[targetid]]+missing_list)
if self.abort and targetid in self.external_targets:
- bb.msg.error(bb.msg.domain.Provider, "No buildable providers available for required build target %s" % self.build_names_index[targetid])
+ bb.msg.error(bb.msg.domain.Provider, "No buildable providers available for required build target %s ('%s')" % (self.build_names_index[targetid], missing_list))
raise bb.providers.NoProvider
- def remove_runtarget(self, targetid):
+ def remove_runtarget(self, targetid, missing_list = []):
"""
Mark a run target as failed (unbuildable)
Trigger removal of any files that have this as a dependency
"""
- bb.msg.note(1, bb.msg.domain.Provider, "Removing failed runtime build target %s" % self.run_names_index[targetid])
+ bb.msg.note(1, bb.msg.domain.Provider, "Removing failed runtime build target %s ('%s')" % (self.run_names_index[targetid], missing_list))
self.failed_rdeps.append(targetid)
dependees = self.get_rdependees(targetid)
for fnid in dependees:
- self.fail_fnid(fnid)
+ self.fail_fnid(fnid, [self.run_names_index[targetid]]+missing_list)
def add_unresolved(self, cfgData, dataCache):
"""
@@ -529,14 +531,26 @@ class TaskData:
"""
bb.msg.debug(3, bb.msg.domain.TaskData, "build_names:")
bb.msg.debug(3, bb.msg.domain.TaskData, ", ".join(self.build_names_index))
+
bb.msg.debug(3, bb.msg.domain.TaskData, "run_names:")
bb.msg.debug(3, bb.msg.domain.TaskData, ", ".join(self.run_names_index))
+
bb.msg.debug(3, bb.msg.domain.TaskData, "build_targets:")
- for target in self.build_targets.keys():
- bb.msg.debug(3, bb.msg.domain.TaskData, " %s: %s" % (self.build_names_index[target], self.build_targets[target]))
+ for buildid in range(len(self.build_names_index)):
+ target = self.build_names_index[buildid]
+ targets = "None"
+ if buildid in self.build_targets:
+ targets = self.build_targets[buildid]
+ bb.msg.debug(3, bb.msg.domain.TaskData, " (%s)%s: %s" % (buildid, target, targets))
+
bb.msg.debug(3, bb.msg.domain.TaskData, "run_targets:")
- for target in self.run_targets.keys():
- bb.msg.debug(3, bb.msg.domain.TaskData, " %s: %s" % (self.run_names_index[target], self.run_targets[target]))
+ for runid in range(len(self.run_names_index)):
+ target = self.run_names_index[runid]
+ targets = "None"
+ if runid in self.run_targets:
+ targets = self.run_targets[runid]
+ bb.msg.debug(3, bb.msg.domain.TaskData, " (%s)%s: %s" % (runid, target, targets))
+
bb.msg.debug(3, bb.msg.domain.TaskData, "tasks:")
for task in range(len(self.tasks_name)):
bb.msg.debug(3, bb.msg.domain.TaskData, " (%s)%s - %s: %s" % (
@@ -544,6 +558,7 @@ class TaskData:
self.fn_index[self.tasks_fnid[task]],
self.tasks_name[task],
self.tasks_tdepends[task]))
+
bb.msg.debug(3, bb.msg.domain.TaskData, "runtime ids (per fn):")
for fnid in self.rdepids:
bb.msg.debug(3, bb.msg.domain.TaskData, " %s %s: %s" % (fnid, self.fn_index[fnid], self.rdepids[fnid]))