diff options
83 files changed, 3021 insertions, 278 deletions
diff --git a/bitbake/lib/bb/__init__.py b/bitbake/lib/bb/__init__.py index f8577c6794..e1ddbd53ff 100644 --- a/bitbake/lib/bb/__init__.py +++ b/bitbake/lib/bb/__init__.py @@ -35,6 +35,11 @@ class NullHandler(logging.Handler): def emit(self, record): pass +class BBLogRecord(logging.LogRecord): + def __init__(self, name, level, fn, lno, msg, args, exc_info, func, extra): + self.taskpid = bb.event.worker_pid + logging.LogRecord.__init__(self, name, level, fn, lno, msg, args, exc_info, func) + Logger = logging.getLoggerClass() class BBLogger(Logger): def __init__(self, name): @@ -42,6 +47,9 @@ class BBLogger(Logger): self.debug = self.bbdebug Logger.__init__(self, name) + def makeRecord(self, name, lvl, fn, lno, msg, args, exc_info, func=None, extra=None): + return BBLogRecord(name, lvl, fn, lno, msg, args, exc_info, func, extra) + def bbdebug(self, level, msg, *args, **kwargs): return self.log(logging.DEBUG - level - 1, msg, *args, **kwargs) @@ -70,6 +78,9 @@ if "BBDEBUG" in os.environ: if level: bb.msg.set_debug_level(level) +if True or os.environ.get("BBFETCH2"): + from bb import fetch2 as fetch + sys.modules['bb.fetch'] = sys.modules['bb.fetch2'] # Messaging convenience functions def plain(*args): diff --git a/bitbake/lib/bb/build.py b/bitbake/lib/bb/build.py index 968e2ea562..f127796c07 100644 --- a/bitbake/lib/bb/build.py +++ b/bitbake/lib/bb/build.py @@ -416,7 +416,7 @@ def del_stamp(task, d, file_name = None): Removes a stamp for a given task (d can be a data dict or dataCache) """ - stamp_internal(task, d, file_name) + stamp = stamp_internal(task, d, file_name) if os.access(stamp, os.F_OK): os.remove(stamp) diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py index c477501d68..9a2e2d5298 100644 --- a/bitbake/lib/bb/cache.py +++ b/bitbake/lib/bb/cache.py @@ -106,7 +106,19 @@ class RecipeInfo(namedtuple('RecipeInfo', recipe_fields)): return metadata.getVar(var, True) or '' @classmethod + def make_optional(cls, default=None, **kwargs): + """Construct the namedtuple from the specified keyword arguments, + with every value considered optional, using the default value if + it was not specified.""" + for field in cls._fields: + kwargs[field] = kwargs.get(field, default) + return cls(**kwargs) + + @classmethod def from_metadata(cls, filename, metadata): + if cls.getvar('__SKIPPED', metadata): + return cls.make_optional(skipped=True) + tasks = metadata.getVar('__BBTASKS', False) pn = cls.getvar('PN', metadata) @@ -124,7 +136,7 @@ class RecipeInfo(namedtuple('RecipeInfo', recipe_fields)): {'tasks': [], 'parents': {}}, variants = cls.listvar('__VARIANTS', metadata) + [''], - skipped = cls.getvar('__SKIPPED', metadata), + skipped = False, timestamp = bb.parse.cached_mtime(filename), packages = cls.listvar('PACKAGES', metadata), pn = pn, diff --git a/bitbake/lib/bb/codeparser.py b/bitbake/lib/bb/codeparser.py index 1d3557cd6d..06409319e1 100644 --- a/bitbake/lib/bb/codeparser.py +++ b/bitbake/lib/bb/codeparser.py @@ -4,7 +4,8 @@ import logging import os.path import bb.utils, bb.data from itertools import chain -from bb.pysh import pyshyacc, pyshlex +from pysh import pyshyacc, pyshlex + logger = logging.getLogger('BitBake.CodeParser') PARSERCACHE_VERSION = 2 diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py index 23fd72f432..e524db7498 100644 --- a/bitbake/lib/bb/cooker.py +++ b/bitbake/lib/bb/cooker.py @@ -893,6 +893,11 @@ class BBCooker: def post_serve(self): bb.event.fire(CookerExit(), self.configuration.event_data) + def shutdown(self): + self.state = state.shutdown + + def stop(self): + self.state = state.stop def server_main(cooker, func, *args): cooker.pre_serve() @@ -935,12 +940,6 @@ def server_main(cooker, func, *args): return ret - def shutdown(self): - self.state = state.shutdown - - def stop(self): - self.state = state.stop - class CookerExit(bb.event.Event): """ Notify clients of the Cooker shutdown diff --git a/bitbake/lib/bb/data.py b/bitbake/lib/bb/data.py index 6ec522aa48..198b628fad 100644 --- a/bitbake/lib/bb/data.py +++ b/bitbake/lib/bb/data.py @@ -161,7 +161,7 @@ def expandKeys(alterdata, readdata = None): def inheritFromOS(d): """Inherit variables from the environment.""" - exportlist = bb.utils.preserved_envvars_export_list() + exportlist = bb.utils.preserved_envvars_exported() for s in os.environ.keys(): try: setVar(s, os.environ[s], d) @@ -192,7 +192,8 @@ def emit_var(var, o=sys.__stdout__, d = init(), all=False): return 0 if all: - o.write('# %s=%s\n' % (var, oval)) + commentVal = re.sub('\n', '\n#', str(oval)) + o.write('# %s=%s\n' % (var, commentVal)) if (var.find("-") != -1 or var.find(".") != -1 or var.find('{') != -1 or var.find('}') != -1 or var.find('+') != -1) and not all: return 0 @@ -219,6 +220,7 @@ def emit_var(var, o=sys.__stdout__, d = init(), all=False): # if we're going to output this within doublequotes, # to a shell, we need to escape the quotes in the var alter = re.sub('"', '\\"', val.strip()) + alter = re.sub('\n', ' \\\n', alter) o.write('%s="%s"\n' % (varExpanded, alter)) return 0 diff --git a/bitbake/lib/bb/event.py b/bitbake/lib/bb/event.py index 8b45501c00..450d913633 100644 --- a/bitbake/lib/bb/event.py +++ b/bitbake/lib/bb/event.py @@ -129,13 +129,11 @@ def fire(event, d): def worker_fire(event, d): data = "<event>" + pickle.dumps(event) + "</event>" worker_pipe.write(data) - worker_pipe.flush() def fire_from_worker(event, d): if not event.startswith("<event>") or not event.endswith("</event>"): print("Error, not an event %s" % event) return - #print "Got event %s" % event event = pickle.loads(event[7:-8]) fire_ui_handlers(event, d) diff --git a/bitbake/lib/bb/fetch/__init__.py b/bitbake/lib/bb/fetch/__init__.py index 67e5addfe0..07eb77dbfc 100644 --- a/bitbake/lib/bb/fetch/__init__.py +++ b/bitbake/lib/bb/fetch/__init__.py @@ -243,17 +243,20 @@ def verify_checksum(u, ud, d): sha256data = bb.utils.sha256_file(ud.localpath) if (ud.md5_expected == None or ud.sha256_expected == None): - bb.warn("Missing SRC_URI checksum for %s, consider to add\n" \ - "SRC_URI[%s] = \"%s\"\nSRC_URI[%s] = \"%s\"" \ - % (ud.localpath, ud.md5_name, md5data, ud.sha256_name, sha256data)) + logger.warn('Missing SRC_URI checksum for %s, consider adding to the recipe:\n' + 'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"', + ud.localpath, ud.md5_name, md5data, + ud.sha256_name, sha256data) if bb.data.getVar("BB_STRICT_CHECKSUM", d, True) == "1": raise FetchError("No checksum specified for %s." % u) return if (ud.md5_expected != md5data or ud.sha256_expected != sha256data): - bb.error("The checksums for '%s' did not match." % ud.localpath) - bb.error("Expected MD5: '%s' and Got: '%s'" % (ud.md5_expected, md5data)) - bb.error("Expected SHA256: '%s' and Got: '%s'" % (ud.sha256_expected, sha256data)) + logger.error('The checksums for "%s" did not match.\n' + ' MD5: expected "%s", got "%s"\n' + ' SHA256: expected "%s", got "%s"\n', + ud.localpath, ud.md5_expected, md5data, + ud.sha256_expected, sha256data) raise FetchError("%s checksum mismatch." % u) def go(d, urls = None): @@ -326,7 +329,7 @@ def checkstatus(d, urls = None): for u in urls: ud = urldata[u] m = ud.method - logger.debug(1, "Testing URL %s" % u) + logger.debug(1, "Testing URL %s", u) # First try checking uri, u, from PREMIRRORS mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', d, True)) ret = try_mirrors(d, u, mirrors, True) @@ -357,6 +360,9 @@ def localpaths(d): srcrev_internal_call = False +def get_autorev(d): + return get_srcrev(d) + def get_srcrev(d): """ Return the version string for the current package @@ -482,7 +488,7 @@ def try_mirrors(d, uri, mirrors, check = False, force = False): """ fpath = os.path.join(data.getVar("DL_DIR", d, 1), os.path.basename(uri)) if not check and os.access(fpath, os.R_OK) and not force: - logger.debug(1, "%s already exists, skipping checkout." % fpath) + logger.debug(1, "%s already exists, skipping checkout.", fpath) return fpath ld = d.createCopy() @@ -510,7 +516,7 @@ def try_mirrors(d, uri, mirrors, check = False, force = False): bb.fetch.MD5SumError): import sys (type, value, traceback) = sys.exc_info() - logger.debug(2, "Mirror fetch failure: %s" % value) + logger.debug(2, "Mirror fetch failure: %s", value) removefile(ud.localpath) continue return None @@ -694,7 +700,7 @@ class Fetch(object): if not rev: rev = data.getVar("SRCREV_pn-%s_%s" % (pn, ud.parm['name']), d, 1) if not rev: - rev = data.getVar("SRCREV_%s" % (ud.parm['name']), d, 1) + rev = data.getVar("SRCREV_%s" % (ud.parm['name']), d, 1) if not rev: rev = data.getVar("SRCREV", d, 1) if rev == "INVALID": diff --git a/bitbake/lib/bb/fetch/git.py b/bitbake/lib/bb/fetch/git.py index de415ec309..b37a09743e 100644 --- a/bitbake/lib/bb/fetch/git.py +++ b/bitbake/lib/bb/fetch/git.py @@ -22,6 +22,7 @@ BitBake 'Fetch' git implementation import os import bb +import bb.persist_data from bb import data from bb.fetch import Fetch from bb.fetch import runfetchcmd @@ -117,6 +118,7 @@ class Git(Fetch): repofile = os.path.join(data.getVar("DL_DIR", d, 1), ud.mirrortarball) + coname = '%s' % (ud.tag) codir = os.path.join(ud.clonedir, coname) @@ -206,11 +208,19 @@ class Git(Fetch): output = runfetchcmd("%s log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % (basecmd, tag), d, quiet=True) return output.split()[0] != "0" - def _revision_key(self, url, ud, d): + def _revision_key(self, url, ud, d, branch=False): """ Return a unique key for the url """ - return "git:" + ud.host + ud.path.replace('/', '.') + ud.branch + key = 'git:' + ud.host + ud.path.replace('/', '.') + if branch: + return key + ud.branch + else: + return key + + def generate_revision_key(self, url, ud, d, branch=False): + key = self._revision_key(url, ud, d, branch) + return "%s-%s" % (key, bb.data.getVar("PN", d, True) or "") def _latest_revision(self, url, ud, d): """ @@ -228,6 +238,74 @@ class Git(Fetch): raise bb.fetch.FetchError("Fetch command %s gave empty output\n" % (cmd)) return output.split()[0] + def latest_revision(self, url, ud, d): + """ + Look in the cache for the latest revision, if not present ask the SCM. + """ + persisted = bb.persist_data.persist(d) + revs = persisted['BB_URI_HEADREVS'] + + key = self.generate_revision_key(url, ud, d, branch=True) + rev = revs[key] + if rev is None: + # Compatibility with old key format, no branch included + oldkey = self.generate_revision_key(url, ud, d, branch=False) + rev = revs[oldkey] + if rev is not None: + del revs[oldkey] + else: + rev = self._latest_revision(url, ud, d) + revs[key] = rev + + return str(rev) + + def sortable_revision(self, url, ud, d): + """ + + """ + pd = bb.persist_data.persist(d) + localcounts = pd['BB_URI_LOCALCOUNT'] + key = self.generate_revision_key(url, ud, d, branch=True) + oldkey = self.generate_revision_key(url, ud, d, branch=False) + + latest_rev = self._build_revision(url, ud, d) + last_rev = localcounts[key + '_rev'] + if last_rev is None: + last_rev = localcounts[oldkey + '_rev'] + if last_rev is not None: + del localcounts[oldkey + '_rev'] + localcounts[key + '_rev'] = last_rev + + uselocalcount = bb.data.getVar("BB_LOCALCOUNT_OVERRIDE", d, True) or False + count = None + if uselocalcount: + count = Fetch.localcount_internal_helper(ud, d) + if count is None: + count = localcounts[key + '_count'] + if count is None: + count = localcounts[oldkey + '_count'] + if count is not None: + del localcounts[oldkey + '_count'] + localcounts[key + '_count'] = count + + if last_rev == latest_rev: + return str(count + "+" + latest_rev) + + buildindex_provided = hasattr(self, "_sortable_buildindex") + if buildindex_provided: + count = self._sortable_buildindex(url, ud, d, latest_rev) + if count is None: + count = "0" + elif uselocalcount or buildindex_provided: + count = str(count) + else: + count = str(int(count) + 1) + + localcounts[key + '_rev'] = latest_rev + localcounts[key + '_count'] = count + + return str(count + "+" + latest_rev) + def _build_revision(self, url, ud, d): return ud.tag diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py new file mode 100644 index 0000000000..751e514121 --- /dev/null +++ b/bitbake/lib/bb/fetch2/__init__.py @@ -0,0 +1,824 @@ +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +""" +BitBake 'Fetch' implementations + +Classes for obtaining upstream sources for the +BitBake build tools. +""" + +# Copyright (C) 2003, 2004 Chris Larson +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. +# +# Based on functions from the base bb module, Copyright 2003 Holger Schurig + +from __future__ import absolute_import +from __future__ import print_function +import os, re +import logging +import bb +from bb import data +from bb import persist_data + +logger = logging.getLogger("BitBake.Fetch") + +class MalformedUrl(Exception): + """Exception raised when encountering an invalid url""" + +class FetchError(Exception): + """Exception raised when a download fails""" + +class NoMethodError(Exception): + """Exception raised when there is no method to obtain a supplied url or set of urls""" + +class MissingParameterError(Exception): + """Exception raised when a fetch method is missing a critical parameter in the url""" + +class ParameterError(Exception): + """Exception raised when a url cannot be proccessed due to invalid parameters.""" + +class MD5SumError(Exception): + """Exception raised when a MD5SUM of a file does not match the expected one""" + +class InvalidSRCREV(Exception): + """Exception raised when an invalid SRCREV is encountered""" + +def decodeurl(url): + """Decodes an URL into the tokens (scheme, network location, path, + user, password, parameters). + """ + + m = re.compile('(?P<type>[^:]*)://((?P<user>.+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url) + if not m: + raise MalformedUrl(url) + + type = m.group('type') + location = m.group('location') + if not location: + raise MalformedUrl(url) + user = m.group('user') + parm = m.group('parm') + + locidx = location.find('/') + if locidx != -1 and type.lower() != 'file': + host = location[:locidx] + path = location[locidx:] + else: + host = "" + path = location + if user: + m = re.compile('(?P<user>[^:]+)(:?(?P<pswd>.*))').match(user) + if m: + user = m.group('user') + pswd = m.group('pswd') + else: + user = '' + pswd = '' + + p = {} + if parm: + for s in parm.split(';'): + s1, s2 = s.split('=') + p[s1] = s2 + + return (type, host, path, user, pswd, p) + +def encodeurl(decoded): + """Encodes a URL from tokens (scheme, network location, path, + user, password, parameters). + """ + + (type, host, path, user, pswd, p) = decoded + + if not type or not path: + raise MissingParameterError("Type or path url components missing when encoding %s" % decoded) + url = '%s://' % type + if user: + url += "%s" % user + if pswd: + url += ":%s" % pswd + url += "@" + if host: + url += "%s" % host + url += "%s" % path + if p: + for parm in p: + url += ";%s=%s" % (parm, p[parm]) + + return url + +def uri_replace(uri, uri_find, uri_replace, d): + if not uri or not uri_find or not uri_replace: + logger.debug(1, "uri_replace: passed an undefined value, not replacing") + uri_decoded = list(decodeurl(uri)) + uri_find_decoded = list(decodeurl(uri_find)) + uri_replace_decoded = list(decodeurl(uri_replace)) + result_decoded = ['', '', '', '', '', {}] + for i in uri_find_decoded: + loc = uri_find_decoded.index(i) + result_decoded[loc] = uri_decoded[loc] + if isinstance(i, basestring): + if (re.match(i, uri_decoded[loc])): + result_decoded[loc] = re.sub(i, uri_replace_decoded[loc], uri_decoded[loc]) + if uri_find_decoded.index(i) == 2: + if d: + localfn = bb.fetch2.localpath(uri, d) + if localfn: + result_decoded[loc] = os.path.join(os.path.dirname(result_decoded[loc]), os.path.basename(bb.fetch2.localpath(uri, d))) + else: + return uri + return encodeurl(result_decoded) + +methods = [] +urldata_cache = {} +saved_headrevs = {} + +def fetcher_init(d): + """ + Called to initialize the fetchers once the configuration data is known. + Calls before this must not hit the cache. + """ + pd = persist_data.persist(d) + # When to drop SCM head revisions controlled by user policy + srcrev_policy = bb.data.getVar('BB_SRCREV_POLICY', d, 1) or "clear" + if srcrev_policy == "cache": + logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy) + elif srcrev_policy == "clear": + logger.debug(1, "Clearing SRCREV cache due to cache policy of: %s", srcrev_policy) + try: + bb.fetch2.saved_headrevs = pd['BB_URI_HEADREVS'].items() + except: + pass + del pd['BB_URI_HEADREVS'] + else: + raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy) + + for m in methods: + if hasattr(m, "init"): + m.init(d) + +def fetcher_compare_revisions(d): + """ + Compare the revisions in the persistant cache with current values and + return true/false on whether they've changed. + """ + + pd = persist_data.persist(d) + data = pd['BB_URI_HEADREVS'].items() + data2 = bb.fetch2.saved_headrevs + + changed = False + for key in data: + if key not in data2 or data2[key] != data[key]: + logger.debug(1, "%s changed", key) + changed = True + return True + else: + logger.debug(2, "%s did not change", key) + return False + +# Function call order is usually: +# 1. init +# 2. go +# 3. localpaths +# localpath can be called at any time + +def init(urls, d, setup = True): + urldata = {} + + fn = bb.data.getVar('FILE', d, 1) + if fn in urldata_cache: + urldata = urldata_cache[fn] + + for url in urls: + if url not in urldata: + urldata[url] = FetchData(url, d) + + if setup: + for url in urldata: + if not urldata[url].setup: + urldata[url].setup_localpath(d) + + urldata_cache[fn] = urldata + return urldata + +def mirror_from_string(data): + return [ i.split() for i in (data or "").replace('\\n','\n').split('\n') if i ] + +def removefile(f): + try: + os.remove(f) + except: + pass + +def verify_checksum(u, ud, d): + """ + verify the MD5 and SHA256 checksum for downloaded src + + return value: + - True: checksum matched + - False: checksum unmatched + + if checksum is missing in recipes file, "BB_STRICT_CHECKSUM" decide the return value. + if BB_STRICT_CHECKSUM = "1" then return false as unmatched, otherwise return true as + matched + """ + + if not ud.type in ["http", "https", "ftp", "ftps"]: + return + + md5data = bb.utils.md5_file(ud.localpath) + sha256data = bb.utils.sha256_file(ud.localpath) + + if (ud.md5_expected == None or ud.sha256_expected == None): + logger.warn('Missing SRC_URI checksum for %s, consider adding to the recipe:\n' + 'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"', + ud.localpath, ud.md5_name, md5data, + ud.sha256_name, sha256data) + if bb.data.getVar("BB_STRICT_CHECKSUM", d, True) == "1": + raise FetchError("No checksum specified for %s." % u) + return + + if (ud.md5_expected != md5data or ud.sha256_expected != sha256data): + logger.error('The checksums for "%s" did not match.\n' + ' MD5: expected "%s", got "%s"\n' + ' SHA256: expected "%s", got "%s"\n', + ud.localpath, ud.md5_expected, md5data, + ud.sha256_expected, sha256data) + raise FetchError("%s checksum mismatch." % u) + +def go(d, urls = None): + """ + Fetch all urls + init must have previously been called + """ + if not urls: + urls = d.getVar("SRC_URI", 1).split() + urldata = init(urls, d, True) + + for u in urls: + ud = urldata[u] + m = ud.method + localpath = "" + + if not ud.localfile: + continue + + lf = bb.utils.lockfile(ud.lockfile) + + if m.try_premirror(u, ud, d): + # First try fetching uri, u, from PREMIRRORS + mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', d, True)) + localpath = try_mirrors(d, u, mirrors, False, m.forcefetch(u, ud, d)) + elif os.path.exists(ud.localfile): + localpath = ud.localfile + + # Need to re-test forcefetch() which will return true if our copy is too old + if m.forcefetch(u, ud, d) or not localpath: + # Next try fetching from the original uri, u + try: + m.go(u, ud, d) + localpath = ud.localpath + except FetchError: + # Remove any incomplete file + removefile(ud.localpath) + # Finally, try fetching uri, u, from MIRRORS + mirrors = mirror_from_string(bb.data.getVar('MIRRORS', d, True)) + localpath = try_mirrors (d, u, mirrors) + if not localpath or not os.path.exists(localpath): + raise FetchError("Unable to fetch URL %s from any source." % u) + + ud.localpath = localpath + + if os.path.exists(ud.md5): + # Touch the md5 file to show active use of the download + try: + os.utime(ud.md5, None) + except: + # Errors aren't fatal here + pass + else: + # Only check the checksums if we've not seen this item before + verify_checksum(u, ud, d) + Fetch.write_md5sum(u, ud, d) + + bb.utils.unlockfile(lf) + +def checkstatus(d, urls = None): + """ + Check all urls exist upstream + init must have previously been called + """ + urldata = init([], d, True) + + if not urls: + urls = urldata + + for u in urls: + ud = urldata[u] + m = ud.method + logger.debug(1, "Testing URL %s", u) + # First try checking uri, u, from PREMIRRORS + mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', d, True)) + ret = try_mirrors(d, u, mirrors, True) + if not ret: + # Next try checking from the original uri, u + try: + ret = m.checkstatus(u, ud, d) + except: + # Finally, try checking uri, u, from MIRRORS + mirrors = mirror_from_string(bb.data.getVar('MIRRORS', d, True)) + ret = try_mirrors (d, u, mirrors, True) + + if not ret: + raise FetchError("URL %s doesn't work" % u) + +def localpaths(d): + """ + Return a list of the local filenames, assuming successful fetch + """ + local = [] + urldata = init([], d, True) + + for u in urldata: + ud = urldata[u] + local.append(ud.localpath) + + return local + +def get_autorev(d): + # only not cache src rev in autorev case + if bb.data.getVar('BB_SRCREV_POLICY', d, True) != "cache": + bb.data.setVar('__BB_DONT_CACHE', '1', d) + return "AUTOINC" + +def get_srcrev(d): + """ + Return the version string for the current package + (usually to be used as PV) + Most packages usually only have one SCM so we just pass on the call. + In the multi SCM case, we build a value based on SRCREV_FORMAT which must + have been set. + """ + + scms = [] + + # Only call setup_localpath on URIs which supports_srcrev() + urldata = init(bb.data.getVar('SRC_URI', d, 1).split(), d, False) + for u in urldata: + ud = urldata[u] + if ud.method.supports_srcrev(): + if not ud.setup: + ud.setup_localpath(d) + scms.append(u) + + if len(scms) == 0: + logger.error("SRCREV was used yet no valid SCM was found in SRC_URI") + raise ParameterError + + if len(scms) == 1: + return urldata[scms[0]].method.sortable_revision(scms[0], urldata[scms[0]], d) + + # + # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT + # + format = bb.data.getVar('SRCREV_FORMAT', d, 1) + if not format: + logger.error("The SRCREV_FORMAT variable must be set when multiple SCMs are used.") + raise ParameterError + + for scm in scms: + if 'name' in urldata[scm].parm: + name = urldata[scm].parm["name"] + rev = urldata[scm].method.sortable_revision(scm, urldata[scm], d) + format = format.replace(name, rev) + + return format + +def localpath(url, d, cache = True): + """ + Called from the parser with cache=False since the cache isn't ready + at this point. Also called from classed in OE e.g. patch.bbclass + """ + ud = init([url], d) + if ud[url].method: + return ud[url].localpath + return url + +def runfetchcmd(cmd, d, quiet = False): + """ + Run cmd returning the command output + Raise an error if interrupted or cmd fails + Optionally echo command output to stdout + """ + + # Need to export PATH as binary could be in metadata paths + # rather than host provided + # Also include some other variables. + # FIXME: Should really include all export varaiables? + exportvars = ['PATH', 'GIT_PROXY_COMMAND', 'GIT_PROXY_HOST', + 'GIT_PROXY_PORT', 'GIT_CONFIG', 'http_proxy', 'ftp_proxy', + 'https_proxy', 'no_proxy', 'ALL_PROXY', 'all_proxy', + 'SSH_AUTH_SOCK', 'SSH_AGENT_PID', 'HOME'] + + for var in exportvars: + val = data.getVar(var, d, True) + if val: + cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd) + + logger.debug(1, "Running %s", cmd) + + # redirect stderr to stdout + stdout_handle = os.popen(cmd + " 2>&1", "r") + output = "" + + while True: + line = stdout_handle.readline() + if not line: + break + if not quiet: + print(line, end=' ') + output += line + + status = stdout_handle.close() or 0 + signal = status >> 8 + exitstatus = status & 0xff + + if signal: + raise FetchError("Fetch command %s failed with signal %s, output:\n%s" % (cmd, signal, output)) + elif status != 0: + raise FetchError("Fetch command %s failed with exit code %s, output:\n%s" % (cmd, status, output)) + + return output + +def try_mirrors(d, uri, mirrors, check = False, force = False): + """ + Try to use a mirrored version of the sources. + This method will be automatically called before the fetchers go. + + d Is a bb.data instance + uri is the original uri we're trying to download + mirrors is the list of mirrors we're going to try + """ + fpath = os.path.join(data.getVar("DL_DIR", d, 1), os.path.basename(uri)) + if not check and os.access(fpath, os.R_OK) and not force: + logger.debug(1, "%s already exists, skipping checkout.", fpath) + return fpath + + ld = d.createCopy() + for (find, replace) in mirrors: + newuri = uri_replace(uri, find, replace, ld) + if newuri != uri: + try: + ud = FetchData(newuri, ld) + except bb.fetch2.NoMethodError: + logger.debug(1, "No method for %s", uri) + continue + + ud.setup_localpath(ld) + + try: + if check: + found = ud.method.checkstatus(newuri, ud, ld) + if found: + return found + else: + ud.method.go(newuri, ud, ld) + return ud.localpath + except (bb.fetch2.MissingParameterError, + bb.fetch2.FetchError, + bb.fetch2.MD5SumError): + import sys + (type, value, traceback) = sys.exc_info() + logger.debug(2, "Mirror fetch failure: %s", value) + removefile(ud.localpath) + continue + return None + + +class FetchData(object): + """ + A class which represents the fetcher state for a given URI. + """ + def __init__(self, url, d): + self.localfile = "" + (self.type, self.host, self.path, self.user, self.pswd, self.parm) = decodeurl(data.expand(url, d)) + self.date = Fetch.getSRCDate(self, d) + self.url = url + if not self.user and "user" in self.parm: + self.user = self.parm["user"] + if not self.pswd and "pswd" in self.parm: + self.pswd = self.parm["pswd"] + self.setup = False + + if "name" in self.parm: + self.md5_name = "%s.md5sum" % self.parm["name"] + self.sha256_name = "%s.sha256sum" % self.parm["name"] + else: + self.md5_name = "md5sum" + self.sha256_name = "sha256sum" + self.md5_expected = bb.data.getVarFlag("SRC_URI", self.md5_name, d) + self.sha256_expected = bb.data.getVarFlag("SRC_URI", self.sha256_name, d) + + for m in methods: + if m.supports(url, self, d): + self.method = m + if hasattr(m,"urldata_init"): + m.urldata_init(self, d) + if m.supports_srcrev(): + self.revision = Fetch.srcrev_internal_helper(self, d); + return + raise NoMethodError("Missing implementation for url %s" % url) + + def setup_localpath(self, d): + self.setup = True + if "localpath" in self.parm: + # if user sets localpath for file, use it instead. + self.localpath = self.parm["localpath"] + self.basename = os.path.basename(self.localpath) + else: + premirrors = bb.data.getVar('PREMIRRORS', d, True) + local = "" + if premirrors and self.url: + aurl = self.url.split(";")[0] + mirrors = mirror_from_string(premirrors) + for (find, replace) in mirrors: + if replace.startswith("file://"): + path = aurl.split("://")[1] + path = path.split(";")[0] + local = replace.split("://")[1] + os.path.basename(path) + if local == aurl or not os.path.exists(local) or os.path.isdir(local): + local = "" + self.localpath = local + if not local: + self.localpath = self.method.localpath(self.url, self, d) + # We have to clear data's internal caches since the cached value of SRCREV is now wrong. + # Horrible... + bb.data.delVar("ISHOULDNEVEREXIST", d) + + if self.localpath is not None: + # Note: These files should always be in DL_DIR whereas localpath may not be. + basepath = bb.data.expand("${DL_DIR}/%s" % os.path.basename(self.localpath), d) + self.md5 = basepath + '.md5' + self.lockfile = basepath + '.lock' + + +class Fetch(object): + """Base class for 'fetch'ing data""" + + def __init__(self, urls = []): + self.urls = [] + + def supports(self, url, urldata, d): + """ + Check to see if this fetch class supports a given url. + """ + return 0 + + def localpath(self, url, urldata, d): + """ + Return the local filename of a given url assuming a successful fetch. + Can also setup variables in urldata for use in go (saving code duplication + and duplicate code execution) + """ + return url + def _strip_leading_slashes(self, relpath): + """ + Remove leading slash as os.path.join can't cope + """ + while os.path.isabs(relpath): + relpath = relpath[1:] + return relpath + + def setUrls(self, urls): + self.__urls = urls + + def getUrls(self): + return self.__urls + + urls = property(getUrls, setUrls, None, "Urls property") + + def forcefetch(self, url, urldata, d): + """ + Force a fetch, even if localpath exists? + """ + return False + + def supports_srcrev(self): + """ + The fetcher supports auto source revisions (SRCREV) + """ + return False + + def go(self, url, urldata, d): + """ + Fetch urls + Assumes localpath was called first + """ + raise NoMethodError("Missing implementation for url") + + def try_premirror(self, url, urldata, d): + """ + Should premirrors be used? + """ + if urldata.method.forcefetch(url, urldata, d): + return True + elif os.path.exists(urldata.md5) and os.path.exists(urldata.localfile): + return False + else: + return True + + def checkstatus(self, url, urldata, d): + """ + Check the status of a URL + Assumes localpath was called first + """ + logger.info("URL %s could not be checked for status since no method exists.", url) + return True + + def getSRCDate(urldata, d): + """ + Return the SRC Date for the component + + d the bb.data module + """ + if "srcdate" in urldata.parm: + return urldata.parm['srcdate'] + + pn = data.getVar("PN", d, 1) + + if pn: + return data.getVar("SRCDATE_%s" % pn, d, 1) or data.getVar("CVSDATE_%s" % pn, d, 1) or data.getVar("SRCDATE", d, 1) or data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1) + + return data.getVar("SRCDATE", d, 1) or data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1) + getSRCDate = staticmethod(getSRCDate) + + def srcrev_internal_helper(ud, d): + """ + Return: + a) a source revision if specified + b) latest revision if SREREV="AUTOINC" + c) None if not specified + """ + + if 'rev' in ud.parm: + return ud.parm['rev'] + + if 'tag' in ud.parm: + return ud.parm['tag'] + + rev = None + if 'name' in ud.parm: + pn = data.getVar("PN", d, 1) + rev = data.getVar("SRCREV_%s_pn-%s" % (ud.parm['name'], pn), d, 1) + if not rev: + rev = data.getVar("SRCREV_pn-%s_%s" % (pn, ud.parm['name']), d, 1) + if not rev: + rev = data.getVar("SRCREV_%s" % (ud.parm['name']), d, 1) + if not rev: + rev = data.getVar("SRCREV", d, 1) + if rev == "INVALID": + raise InvalidSRCREV("Please set SRCREV to a valid value") + if rev == "AUTOINC": + rev = ud.method.latest_revision(ud.url, ud, d) + + return rev + + srcrev_internal_helper = staticmethod(srcrev_internal_helper) + + def localcount_internal_helper(ud, d): + """ + Return: + a) a locked localcount if specified + b) None otherwise + """ + + localcount = None + if 'name' in ud.parm: + pn = data.getVar("PN", d, 1) + localcount = data.getVar("LOCALCOUNT_" + ud.parm['name'], d, 1) + if not localcount: + localcount = data.getVar("LOCALCOUNT", d, 1) + return localcount + + localcount_internal_helper = staticmethod(localcount_internal_helper) + + def verify_md5sum(ud, got_sum): + """ + Verify the md5sum we wanted with the one we got + """ + wanted_sum = ud.parm.get('md5sum') + if not wanted_sum: + return True + + return wanted_sum == got_sum + verify_md5sum = staticmethod(verify_md5sum) + + def write_md5sum(url, ud, d): + md5data = bb.utils.md5_file(ud.localpath) + # verify the md5sum + if not Fetch.verify_md5sum(ud, md5data): + raise MD5SumError(url) + + md5out = file(ud.md5, 'w') + md5out.write(md5data) + md5out.close() + write_md5sum = staticmethod(write_md5sum) + + def latest_revision(self, url, ud, d): + """ + Look in the cache for the latest revision, if not present ask the SCM. + """ + if not hasattr(self, "_latest_revision"): + raise ParameterError + + pd = persist_data.persist(d) + revs = pd['BB_URI_HEADREVS'] + key = self.generate_revision_key(url, ud, d) + rev = revs[key] + if rev != None: + return str(rev) + + revs[key] = rev = self._latest_revision(url, ud, d) + return rev + + def sortable_revision(self, url, ud, d): + """ + + """ + if hasattr(self, "_sortable_revision"): + return self._sortable_revision(url, ud, d) + + pd = persist_data.persist(d) + localcounts = pd['BB_URI_LOCALCOUNT'] + key = self.generate_revision_key(url, ud, d) + + latest_rev = self._build_revision(url, ud, d) + last_rev = localcounts[key + '_rev'] + uselocalcount = bb.data.getVar("BB_LOCALCOUNT_OVERRIDE", d, True) or False + count = None + if uselocalcount: + count = Fetch.localcount_internal_helper(ud, d) + if count is None: + count = localcounts[key + '_count'] + + if last_rev == latest_rev: + return str(count + "+" + latest_rev) + + buildindex_provided = hasattr(self, "_sortable_buildindex") + if buildindex_provided: + count = self._sortable_buildindex(url, ud, d, latest_rev) + + if count is None: + count = "0" + elif uselocalcount or buildindex_provided: + count = str(count) + else: + count = str(int(count) + 1) + + localcounts[key + '_rev'] = latest_rev + localcounts[key + '_count'] = count + + return str(count + "+" + latest_rev) + + def generate_revision_key(self, url, ud, d): + key = self._revision_key(url, ud, d) + return "%s-%s" % (key, bb.data.getVar("PN", d, True) or "") + +from . import cvs +from . import git +from . import local +from . import svn +from . import wget +from . import svk +from . import ssh +from . import perforce +from . import bzr +from . import hg +from . import osc +from . import repo + +methods.append(local.Local()) +methods.append(wget.Wget()) +methods.append(svn.Svn()) +methods.append(git.Git()) +methods.append(cvs.Cvs()) +methods.append(svk.Svk()) +methods.append(ssh.SSH()) +methods.append(perforce.Perforce()) +methods.append(bzr.Bzr()) +methods.append(hg.Hg()) +methods.append(osc.Osc()) +methods.append(repo.Repo()) diff --git a/bitbake/lib/bb/fetch2/bzr.py b/bitbake/lib/bb/fetch2/bzr.py new file mode 100644 index 0000000000..97b042b2a5 --- /dev/null +++ b/bitbake/lib/bb/fetch2/bzr.py @@ -0,0 +1,145 @@ +""" +BitBake 'Fetch' implementation for bzr. + +""" + +# Copyright (C) 2007 Ross Burton +# Copyright (C) 2007 Richard Purdie +# +# Classes for obtaining upstream sources for the +# BitBake build tools. +# Copyright (C) 2003, 2004 Chris Larson +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +import os +import sys +import logging +import bb +from bb import data +from bb.fetch2 import Fetch, FetchError, runfetchcmd, logger + +class Bzr(Fetch): + def supports(self, url, ud, d): + return ud.type in ['bzr'] + + def urldata_init(self, ud, d): + """ + init bzr specific variable within url data + """ + # Create paths to bzr checkouts + relpath = self._strip_leading_slashes(ud.path) + ud.pkgdir = os.path.join(data.expand('${BZRDIR}', d), ud.host, relpath) + + def localpath (self, url, ud, d): + if not ud.revision: + ud.revision = self.latest_revision(url, ud, d) + + ud.localfile = data.expand('bzr_%s_%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), ud.revision), d) + + return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) + + def _buildbzrcommand(self, ud, d, command): + """ + Build up an bzr commandline based on ud + command is "fetch", "update", "revno" + """ + + basecmd = data.expand('${FETCHCMD_bzr}', d) + + proto = ud.parm.get('proto', 'http') + + bzrroot = ud.host + ud.path + + options = [] + + if command is "revno": + bzrcmd = "%s revno %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot) + else: + if ud.revision: + options.append("-r %s" % ud.revision) + + if command is "fetch": + bzrcmd = "%s co %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot) + elif command is "update": + bzrcmd = "%s pull %s --overwrite" % (basecmd, " ".join(options)) + else: + raise FetchError("Invalid bzr command %s" % command) + + return bzrcmd + + def go(self, loc, ud, d): + """Fetch url""" + + if os.access(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir), '.bzr'), os.R_OK): + bzrcmd = self._buildbzrcommand(ud, d, "update") + logger.debug(1, "BZR Update %s", loc) + os.chdir(os.path.join (ud.pkgdir, os.path.basename(ud.path))) + runfetchcmd(bzrcmd, d) + else: + bb.utils.remove(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir)), True) + bzrcmd = self._buildbzrcommand(ud, d, "fetch") + logger.debug(1, "BZR Checkout %s", loc) + bb.mkdirhier(ud.pkgdir) + os.chdir(ud.pkgdir) + logger.debug(1, "Running %s", bzrcmd) + runfetchcmd(bzrcmd, d) + + os.chdir(ud.pkgdir) + + scmdata = ud.parm.get("scmdata", "") + if scmdata == "keep": + tar_flags = "" + else: + tar_flags = "--exclude '.bzr' --exclude '.bzrtags'" + + # tar them up to a defined filename + try: + runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(ud.pkgdir)), d) + except: + t, v, tb = sys.exc_info() + try: + os.unlink(ud.localpath) + except OSError: + pass + raise t, v, tb + + def supports_srcrev(self): + return True + + def _revision_key(self, url, ud, d): + """ + Return a unique key for the url + """ + return "bzr:" + ud.pkgdir + + def _latest_revision(self, url, ud, d): + """ + Return the latest upstream revision number + """ + logger.debug(2, "BZR fetcher hitting network for %s", url) + + output = runfetchcmd(self._buildbzrcommand(ud, d, "revno"), d, True) + + return output.strip() + + def _sortable_revision(self, url, ud, d): + """ + Return a sortable revision number which in our case is the revision number + """ + + return self._build_revision(url, ud, d) + + def _build_revision(self, url, ud, d): + return ud.revision diff --git a/bitbake/lib/bb/fetch2/cvs.py b/bitbake/lib/bb/fetch2/cvs.py new file mode 100644 index 0000000000..1570cab9c3 --- /dev/null +++ b/bitbake/lib/bb/fetch2/cvs.py @@ -0,0 +1,172 @@ +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +""" +BitBake 'Fetch' implementations + +Classes for obtaining upstream sources for the +BitBake build tools. + +""" + +# Copyright (C) 2003, 2004 Chris Larson +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. +# +#Based on functions from the base bb module, Copyright 2003 Holger Schurig +# + +import os +import logging +import bb +from bb import data +from bb.fetch2 import Fetch, FetchError, MissingParameterError, logger + +class Cvs(Fetch): + """ + Class to fetch a module or modules from cvs repositories + """ + def supports(self, url, ud, d): + """ + Check to see if a given url can be fetched with cvs. + """ + return ud.type in ['cvs'] + + def localpath(self, url, ud, d): + if not "module" in ud.parm: + raise MissingParameterError("cvs method needs a 'module' parameter") + ud.module = ud.parm["module"] + + ud.tag = ud.parm.get('tag', "") + + # Override the default date in certain cases + if 'date' in ud.parm: + ud.date = ud.parm['date'] + elif ud.tag: + ud.date = "" + + norecurse = '' + if 'norecurse' in ud.parm: + norecurse = '_norecurse' + + fullpath = '' + if 'fullpath' in ud.parm: + fullpath = '_fullpath' + + ud.localfile = data.expand('%s_%s_%s_%s%s%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.tag, ud.date, norecurse, fullpath), d) + + return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) + + def forcefetch(self, url, ud, d): + if (ud.date == "now"): + return True + return False + + def go(self, loc, ud, d): + + method = ud.parm.get('method', 'pserver') + localdir = ud.parm.get('localdir', ud.module) + cvs_port = ud.parm.get('port', '') + + cvs_rsh = None + if method == "ext": + if "rsh" in ud.parm: + cvs_rsh = ud.parm["rsh"] + + if method == "dir": + cvsroot = ud.path + else: + cvsroot = ":" + method + cvsproxyhost = data.getVar('CVS_PROXY_HOST', d, True) + if cvsproxyhost: + cvsroot += ";proxy=" + cvsproxyhost + cvsproxyport = data.getVar('CVS_PROXY_PORT', d, True) + if cvsproxyport: + cvsroot += ";proxyport=" + cvsproxyport + cvsroot += ":" + ud.user + if ud.pswd: + cvsroot += ":" + ud.pswd + cvsroot += "@" + ud.host + ":" + cvs_port + ud.path + + options = [] + if 'norecurse' in ud.parm: + options.append("-l") + if ud.date: + # treat YYYYMMDDHHMM specially for CVS + if len(ud.date) == 12: + options.append("-D \"%s %s:%s UTC\"" % (ud.date[0:8], ud.date[8:10], ud.date[10:12])) + else: + options.append("-D \"%s UTC\"" % ud.date) + if ud.tag: + options.append("-r %s" % ud.tag) + + localdata = data.createCopy(d) + data.setVar('OVERRIDES', "cvs:%s" % data.getVar('OVERRIDES', localdata), localdata) + data.update_data(localdata) + + data.setVar('CVSROOT', cvsroot, localdata) + data.setVar('CVSCOOPTS', " ".join(options), localdata) + data.setVar('CVSMODULE', ud.module, localdata) + cvscmd = data.getVar('FETCHCOMMAND', localdata, 1) + cvsupdatecmd = data.getVar('UPDATECOMMAND', localdata, 1) + + if cvs_rsh: + cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd) + cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd) + + # create module directory + logger.debug(2, "Fetch: checking for module directory") + pkg = data.expand('${PN}', d) + pkgdir = os.path.join(data.expand('${CVSDIR}', localdata), pkg) + moddir = os.path.join(pkgdir, localdir) + if os.access(os.path.join(moddir, 'CVS'), os.R_OK): + logger.info("Update " + loc) + # update sources there + os.chdir(moddir) + myret = os.system(cvsupdatecmd) + else: + logger.info("Fetch " + loc) + # check out sources there + bb.mkdirhier(pkgdir) + os.chdir(pkgdir) + logger.debug(1, "Running %s", cvscmd) + myret = os.system(cvscmd) + + if myret != 0 or not os.access(moddir, os.R_OK): + try: + os.rmdir(moddir) + except OSError: + pass + raise FetchError(ud.module) + + scmdata = ud.parm.get("scmdata", "") + if scmdata == "keep": + tar_flags = "" + else: + tar_flags = "--exclude 'CVS'" + + # tar them up to a defined filename + if 'fullpath' in ud.parm: + os.chdir(pkgdir) + myret = os.system("tar %s -czf %s %s" % (tar_flags, ud.localpath, localdir)) + else: + os.chdir(moddir) + os.chdir('..') + myret = os.system("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(moddir))) + + if myret != 0: + try: + os.unlink(ud.localpath) + except OSError: + pass + raise FetchError(ud.module) diff --git a/bitbake/lib/bb/fetch2/git.py b/bitbake/lib/bb/fetch2/git.py new file mode 100644 index 0000000000..c62145770f --- /dev/null +++ b/bitbake/lib/bb/fetch2/git.py @@ -0,0 +1,260 @@ +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +""" +BitBake 'Fetch' git implementation + +""" + +#Copyright (C) 2005 Richard Purdie +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +import os +import bb +from bb import data +from bb.fetch2 import Fetch +from bb.fetch2 import runfetchcmd +from bb.fetch2 import logger + +class Git(Fetch): + """Class to fetch a module or modules from git repositories""" + def init(self, d): + # + # Only enable _sortable revision if the key is set + # + if bb.data.getVar("BB_GIT_CLONE_FOR_SRCREV", d, True): + self._sortable_buildindex = self._sortable_buildindex_disabled + def supports(self, url, ud, d): + """ + Check to see if a given url can be fetched with git. + """ + return ud.type in ['git'] + + def urldata_init(self, ud, d): + """ + init git specific variable within url data + so that the git method like latest_revision() can work + """ + if 'protocol' in ud.parm: + ud.proto = ud.parm['protocol'] + elif not ud.host: + ud.proto = 'file' + else: + ud.proto = "rsync" + + ud.branch = ud.parm.get("branch", "master") + + gitsrcname = '%s%s' % (ud.host, ud.path.replace('/', '.')) + ud.mirrortarball = 'git_%s.tar.gz' % (gitsrcname) + ud.clonedir = os.path.join(data.expand('${GITDIR}', d), gitsrcname) + + ud.basecmd = data.getVar("FETCHCMD_git", d, True) or "git" + + def localpath(self, url, ud, d): + ud.tag = ud.revision + if not ud.tag or ud.tag == "master": + ud.tag = self.latest_revision(url, ud, d) + + subdir = ud.parm.get("subpath", "") + if subdir != "": + if subdir.endswith("/"): + subdir = subdir[:-1] + subdirpath = os.path.join(ud.path, subdir); + else: + subdirpath = ud.path; + + if 'fullclone' in ud.parm: + ud.localfile = ud.mirrortarball + else: + ud.localfile = data.expand('git_%s%s_%s.tar.gz' % (ud.host, subdirpath.replace('/', '.'), ud.tag), d) + + if 'noclone' in ud.parm: + ud.localfile = None + return None + + return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) + + def forcefetch(self, url, ud, d): + if 'fullclone' in ud.parm: + return True + if 'noclone' in ud.parm: + return False + if os.path.exists(ud.localpath): + return False + if not self._contains_ref(ud.tag, d): + return True + return False + + def try_premirror(self, u, ud, d): + if 'noclone' in ud.parm: + return False + if os.path.exists(ud.clonedir): + return False + if os.path.exists(ud.localpath): + return False + + return True + + def go(self, loc, ud, d): + """Fetch url""" + + if ud.user: + username = ud.user + '@' + else: + username = "" + + repofile = os.path.join(data.getVar("DL_DIR", d, 1), ud.mirrortarball) + + coname = '%s' % (ud.tag) + codir = os.path.join(ud.clonedir, coname) + + # If we have no existing clone and no mirror tarball, try and obtain one + if not os.path.exists(ud.clonedir) and not os.path.exists(repofile): + try: + Fetch.try_mirrors(ud.mirrortarball) + except: + pass + + # If the checkout doesn't exist and the mirror tarball does, extract it + if not os.path.exists(ud.clonedir) and os.path.exists(repofile): + bb.mkdirhier(ud.clonedir) + os.chdir(ud.clonedir) + runfetchcmd("tar -xzf %s" % (repofile), d) + + # If the repo still doesn't exist, fallback to cloning it + if not os.path.exists(ud.clonedir): + runfetchcmd("%s clone -n %s://%s%s%s %s" % (ud.basecmd, ud.proto, username, ud.host, ud.path, ud.clonedir), d) + + os.chdir(ud.clonedir) + # Update the checkout if needed + if not self._contains_ref(ud.tag, d) or 'fullclone' in ud.parm: + # Remove all but the .git directory + runfetchcmd("rm * -Rf", d) + if 'fullclone' in ud.parm: + runfetchcmd("%s fetch --all" % (ud.basecmd), d) + else: + runfetchcmd("%s fetch %s://%s%s%s %s" % (ud.basecmd, ud.proto, username, ud.host, ud.path, ud.branch), d) + runfetchcmd("%s fetch --tags %s://%s%s%s" % (ud.basecmd, ud.proto, username, ud.host, ud.path), d) + runfetchcmd("%s prune-packed" % ud.basecmd, d) + runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d) + + # Generate a mirror tarball if needed + os.chdir(ud.clonedir) + mirror_tarballs = data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) + if mirror_tarballs != "0" or 'fullclone' in ud.parm: + logger.info("Creating tarball of git repository") + runfetchcmd("tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*") ), d) + + if 'fullclone' in ud.parm: + return + + if os.path.exists(codir): + bb.utils.prunedir(codir) + + subdir = ud.parm.get("subpath", "") + if subdir != "": + if subdir.endswith("/"): + subdirbase = os.path.basename(subdir[:-1]) + else: + subdirbase = os.path.basename(subdir) + else: + subdirbase = "" + + if subdir != "": + readpathspec = ":%s" % (subdir) + codir = os.path.join(codir, "git") + coprefix = os.path.join(codir, subdirbase, "") + else: + readpathspec = "" + coprefix = os.path.join(codir, "git", "") + + scmdata = ud.parm.get("scmdata", "") + if scmdata == "keep": + runfetchcmd("%s clone -n %s %s" % (ud.basecmd, ud.clonedir, coprefix), d) + os.chdir(coprefix) + runfetchcmd("%s checkout -q -f %s%s" % (ud.basecmd, ud.tag, readpathspec), d) + else: + bb.mkdirhier(codir) + os.chdir(ud.clonedir) + runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.tag, readpathspec), d) + runfetchcmd("%s checkout-index -q -f --prefix=%s -a" % (ud.basecmd, coprefix), d) + + os.chdir(codir) + logger.info("Creating tarball of git checkout") + runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.join(".", "*") ), d) + + os.chdir(ud.clonedir) + bb.utils.prunedir(codir) + + def supports_srcrev(self): + return True + + def _contains_ref(self, tag, d): + basecmd = data.getVar("FETCHCMD_git", d, True) or "git" + output = runfetchcmd("%s log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % (basecmd, tag), d, quiet=True) + return output.split()[0] != "0" + + def _revision_key(self, url, ud, d): + """ + Return a unique key for the url + """ + return "git:" + ud.host + ud.path.replace('/', '.') + ud.branch + + def _latest_revision(self, url, ud, d): + """ + Compute the HEAD revision for the url + """ + if ud.user: + username = ud.user + '@' + else: + username = "" + + basecmd = data.getVar("FETCHCMD_git", d, True) or "git" + cmd = "%s ls-remote %s://%s%s%s %s" % (basecmd, ud.proto, username, ud.host, ud.path, ud.branch) + output = runfetchcmd(cmd, d, True) + if not output: + raise bb.fetch2.FetchError("Fetch command %s gave empty output\n" % (cmd)) + return output.split()[0] + + def _build_revision(self, url, ud, d): + return ud.tag + + def _sortable_buildindex_disabled(self, url, ud, d, rev): + """ + Return a suitable buildindex for the revision specified. This is done by counting revisions + using "git rev-list" which may or may not work in different circumstances. + """ + + cwd = os.getcwd() + + # Check if we have the rev already + + if not os.path.exists(ud.clonedir): + print("no repo") + self.go(None, ud, d) + if not os.path.exists(ud.clonedir): + logger.error("GIT repository for %s doesn't exist in %s, cannot get sortable buildnumber, using old value", url, ud.clonedir) + return None + + + os.chdir(ud.clonedir) + if not self._contains_ref(rev, d): + self.go(None, ud, d) + + output = runfetchcmd("%s rev-list %s -- 2> /dev/null | wc -l" % (ud.basecmd, rev), d, quiet=True) + os.chdir(cwd) + + buildindex = "%s" % output.split()[0] + logger.debug(1, "GIT repository for %s in %s is returning %s revisions in rev-list before %s", url, ud.clonedir, buildindex, rev) + return buildindex diff --git a/bitbake/lib/bb/fetch2/hg.py b/bitbake/lib/bb/fetch2/hg.py new file mode 100644 index 0000000000..0ba84330a5 --- /dev/null +++ b/bitbake/lib/bb/fetch2/hg.py @@ -0,0 +1,178 @@ +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +""" +BitBake 'Fetch' implementation for mercurial DRCS (hg). + +""" + +# Copyright (C) 2003, 2004 Chris Larson +# Copyright (C) 2004 Marcin Juszkiewicz +# Copyright (C) 2007 Robert Schuster +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. +# +# Based on functions from the base bb module, Copyright 2003 Holger Schurig + +import os +import sys +import logging +import bb +from bb import data +from bb.fetch2 import Fetch +from bb.fetch2 import FetchError +from bb.fetch2 import MissingParameterError +from bb.fetch2 import runfetchcmd +from bb.fetch2 import logger + +class Hg(Fetch): + """Class to fetch from mercurial repositories""" + def supports(self, url, ud, d): + """ + Check to see if a given url can be fetched with mercurial. + """ + return ud.type in ['hg'] + + def urldata_init(self, ud, d): + """ + init hg specific variable within url data + """ + if not "module" in ud.parm: + raise MissingParameterError("hg method needs a 'module' parameter") + + ud.module = ud.parm["module"] + + # Create paths to mercurial checkouts + relpath = self._strip_leading_slashes(ud.path) + ud.pkgdir = os.path.join(data.expand('${HGDIR}', d), ud.host, relpath) + ud.moddir = os.path.join(ud.pkgdir, ud.module) + + def forcefetch(self, url, ud, d): + revTag = ud.parm.get('rev', 'tip') + return revTag == "tip" + + def localpath(self, url, ud, d): + if 'rev' in ud.parm: + ud.revision = ud.parm['rev'] + elif not ud.revision: + ud.revision = self.latest_revision(url, ud, d) + + ud.localfile = data.expand('%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision), d) + + return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) + + def _buildhgcommand(self, ud, d, command): + """ + Build up an hg commandline based on ud + command is "fetch", "update", "info" + """ + + basecmd = data.expand('${FETCHCMD_hg}', d) + + proto = ud.parm.get('proto', 'http') + + host = ud.host + if proto == "file": + host = "/" + ud.host = "localhost" + + if not ud.user: + hgroot = host + ud.path + else: + hgroot = ud.user + "@" + host + ud.path + + if command is "info": + return "%s identify -i %s://%s/%s" % (basecmd, proto, hgroot, ud.module) + + options = []; + if ud.revision: + options.append("-r %s" % ud.revision) + + if command is "fetch": + cmd = "%s clone %s %s://%s/%s %s" % (basecmd, " ".join(options), proto, hgroot, ud.module, ud.module) + elif command is "pull": + # do not pass options list; limiting pull to rev causes the local + # repo not to contain it and immediately following "update" command + # will crash + cmd = "%s pull" % (basecmd) + elif command is "update": + cmd = "%s update -C %s" % (basecmd, " ".join(options)) + else: + raise FetchError("Invalid hg command %s" % command) + + return cmd + + def go(self, loc, ud, d): + """Fetch url""" + + logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'") + + if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK): + updatecmd = self._buildhgcommand(ud, d, "pull") + logger.info("Update " + loc) + # update sources there + os.chdir(ud.moddir) + logger.debug(1, "Running %s", updatecmd) + runfetchcmd(updatecmd, d) + + else: + fetchcmd = self._buildhgcommand(ud, d, "fetch") + logger.info("Fetch " + loc) + # check out sources there + bb.mkdirhier(ud.pkgdir) + os.chdir(ud.pkgdir) + logger.debug(1, "Running %s", fetchcmd) + runfetchcmd(fetchcmd, d) + + # Even when we clone (fetch), we still need to update as hg's clone + # won't checkout the specified revision if its on a branch + updatecmd = self._buildhgcommand(ud, d, "update") + os.chdir(ud.moddir) + logger.debug(1, "Running %s", updatecmd) + runfetchcmd(updatecmd, d) + + scmdata = ud.parm.get("scmdata", "") + if scmdata == "keep": + tar_flags = "" + else: + tar_flags = "--exclude '.hg' --exclude '.hgrags'" + + os.chdir(ud.pkgdir) + try: + runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d) + except: + t, v, tb = sys.exc_info() + try: + os.unlink(ud.localpath) + except OSError: + pass + raise t, v, tb + + def supports_srcrev(self): + return True + + def _latest_revision(self, url, ud, d): + """ + Compute tip revision for the url + """ + output = runfetchcmd(self._buildhgcommand(ud, d, "info"), d) + return output.strip() + + def _build_revision(self, url, ud, d): + return ud.revision + + def _revision_key(self, url, ud, d): + """ + Return a unique key for the url + """ + return "hg:" + ud.moddir diff --git a/bitbake/lib/bb/fetch2/local.py b/bitbake/lib/bb/fetch2/local.py new file mode 100644 index 0000000000..bcb30dfc95 --- /dev/null +++ b/bitbake/lib/bb/fetch2/local.py @@ -0,0 +1,73 @@ +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +""" +BitBake 'Fetch' implementations + +Classes for obtaining upstream sources for the +BitBake build tools. + +""" + +# Copyright (C) 2003, 2004 Chris Larson +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. +# +# Based on functions from the base bb module, Copyright 2003 Holger Schurig + +import os +import bb +import bb.utils +from bb import data +from bb.fetch2 import Fetch + +class Local(Fetch): + def supports(self, url, urldata, d): + """ + Check to see if a given url represents a local fetch. + """ + return urldata.type in ['file'] + + def localpath(self, url, urldata, d): + """ + Return the local filename of a given url assuming a successful fetch. + """ + path = url.split("://")[1] + path = path.split(";")[0] + newpath = path + if path[0] != "/": + filespath = data.getVar('FILESPATH', d, 1) + if filespath: + newpath = bb.utils.which(filespath, path) + if not newpath: + filesdir = data.getVar('FILESDIR', d, 1) + if filesdir: + newpath = os.path.join(filesdir, path) + # We don't set localfile as for this fetcher the file is already local! + return newpath + + def go(self, url, urldata, d): + """Fetch urls (no-op for Local method)""" + # no need to fetch local files, we'll deal with them in place. + return 1 + + def checkstatus(self, url, urldata, d): + """ + Check the status of the url + """ + if urldata.localpath.find("*") != -1: + logger.info("URL %s looks like a glob and was therefore not checked.", url) + return True + if os.path.exists(urldata.localpath): + return True + return False diff --git a/bitbake/lib/bb/fetch2/osc.py b/bitbake/lib/bb/fetch2/osc.py new file mode 100644 index 0000000000..06ac5a9ce3 --- /dev/null +++ b/bitbake/lib/bb/fetch2/osc.py @@ -0,0 +1,143 @@ +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +""" +Bitbake "Fetch" implementation for osc (Opensuse build service client). +Based on the svn "Fetch" implementation. + +""" + +import os +import sys +import logging +import bb +from bb import data +from bb.fetch2 import Fetch +from bb.fetch2 import FetchError +from bb.fetch2 import MissingParameterError +from bb.fetch2 import runfetchcmd + +class Osc(Fetch): + """Class to fetch a module or modules from Opensuse build server + repositories.""" + + def supports(self, url, ud, d): + """ + Check to see if a given url can be fetched with osc. + """ + return ud.type in ['osc'] + + def localpath(self, url, ud, d): + if not "module" in ud.parm: + raise MissingParameterError("osc method needs a 'module' parameter.") + + ud.module = ud.parm["module"] + + # Create paths to osc checkouts + relpath = self._strip_leading_slashes(ud.path) + ud.pkgdir = os.path.join(data.expand('${OSCDIR}', d), ud.host) + ud.moddir = os.path.join(ud.pkgdir, relpath, ud.module) + + if 'rev' in ud.parm: + ud.revision = ud.parm['rev'] + else: + pv = data.getVar("PV", d, 0) + rev = Fetch.srcrev_internal_helper(ud, d) + if rev and rev != True: + ud.revision = rev + else: + ud.revision = "" + + ud.localfile = data.expand('%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.path.replace('/', '.'), ud.revision), d) + + return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) + + def _buildosccommand(self, ud, d, command): + """ + Build up an ocs commandline based on ud + command is "fetch", "update", "info" + """ + + basecmd = data.expand('${FETCHCMD_osc}', d) + + proto = ud.parm.get('proto', 'ocs') + + options = [] + + config = "-c %s" % self.generate_config(ud, d) + + if ud.revision: + options.append("-r %s" % ud.revision) + + coroot = self._strip_leading_slashes(ud.path) + + if command is "fetch": + osccmd = "%s %s co %s/%s %s" % (basecmd, config, coroot, ud.module, " ".join(options)) + elif command is "update": + osccmd = "%s %s up %s" % (basecmd, config, " ".join(options)) + else: + raise FetchError("Invalid osc command %s" % command) + + return osccmd + + def go(self, loc, ud, d): + """ + Fetch url + """ + + logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'") + + if os.access(os.path.join(data.expand('${OSCDIR}', d), ud.path, ud.module), os.R_OK): + oscupdatecmd = self._buildosccommand(ud, d, "update") + logger.info("Update "+ loc) + # update sources there + os.chdir(ud.moddir) + logger.debug(1, "Running %s", oscupdatecmd) + runfetchcmd(oscupdatecmd, d) + else: + oscfetchcmd = self._buildosccommand(ud, d, "fetch") + logger.info("Fetch " + loc) + # check out sources there + bb.mkdirhier(ud.pkgdir) + os.chdir(ud.pkgdir) + logger.debug(1, "Running %s", oscfetchcmd) + runfetchcmd(oscfetchcmd, d) + + os.chdir(os.path.join(ud.pkgdir + ud.path)) + # tar them up to a defined filename + try: + runfetchcmd("tar -czf %s %s" % (ud.localpath, ud.module), d) + except: + t, v, tb = sys.exc_info() + try: + os.unlink(ud.localpath) + except OSError: + pass + raise t, v, tb + + def supports_srcrev(self): + return False + + def generate_config(self, ud, d): + """ + Generate a .oscrc to be used for this run. + """ + + config_path = os.path.join(data.expand('${OSCDIR}', d), "oscrc") + if (os.path.exists(config_path)): + os.remove(config_path) + + f = open(config_path, 'w') + f.write("[general]\n") + f.write("apisrv = %s\n" % ud.host) + f.write("scheme = http\n") + f.write("su-wrapper = su -c\n") + f.write("build-root = %s\n" % data.expand('${WORKDIR}', d)) + f.write("urllist = http://moblin-obs.jf.intel.com:8888/build/%(project)s/%(repository)s/%(buildarch)s/:full/%(name)s.rpm\n") + f.write("extra-pkgs = gzip\n") + f.write("\n") + f.write("[%s]\n" % ud.host) + f.write("user = %s\n" % ud.parm["user"]) + f.write("pass = %s\n" % ud.parm["pswd"]) + f.close() + + return config_path diff --git a/bitbake/lib/bb/fetch2/perforce.py b/bitbake/lib/bb/fetch2/perforce.py new file mode 100644 index 0000000000..18b27812e0 --- /dev/null +++ b/bitbake/lib/bb/fetch2/perforce.py @@ -0,0 +1,206 @@ +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +""" +BitBake 'Fetch' implementations + +Classes for obtaining upstream sources for the +BitBake build tools. + +""" + +# Copyright (C) 2003, 2004 Chris Larson +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. +# +# Based on functions from the base bb module, Copyright 2003 Holger Schurig + +from future_builtins import zip +import os +import logging +import bb +from bb import data +from bb.fetch2 import Fetch +from bb.fetch2 import FetchError +from bb.fetch2 import logger + +class Perforce(Fetch): + def supports(self, url, ud, d): + return ud.type in ['p4'] + + def doparse(url, d): + parm = {} + path = url.split("://")[1] + delim = path.find("@"); + if delim != -1: + (user, pswd, host, port) = path.split('@')[0].split(":") + path = path.split('@')[1] + else: + (host, port) = data.getVar('P4PORT', d).split(':') + user = "" + pswd = "" + + if path.find(";") != -1: + keys=[] + values=[] + plist = path.split(';') + for item in plist: + if item.count('='): + (key, value) = item.split('=') + keys.append(key) + values.append(value) + + parm = dict(zip(keys, values)) + path = "//" + path.split(';')[0] + host += ":%s" % (port) + parm["cset"] = Perforce.getcset(d, path, host, user, pswd, parm) + + return host, path, user, pswd, parm + doparse = staticmethod(doparse) + + def getcset(d, depot, host, user, pswd, parm): + p4opt = "" + if "cset" in parm: + return parm["cset"]; + if user: + p4opt += " -u %s" % (user) + if pswd: + p4opt += " -P %s" % (pswd) + if host: + p4opt += " -p %s" % (host) + + p4date = data.getVar("P4DATE", d, 1) + if "revision" in parm: + depot += "#%s" % (parm["revision"]) + elif "label" in parm: + depot += "@%s" % (parm["label"]) + elif p4date: + depot += "@%s" % (p4date) + + p4cmd = data.getVar('FETCHCOMMAND_p4', d, 1) + logger.debug(1, "Running %s%s changes -m 1 %s", p4cmd, p4opt, depot) + p4file = os.popen("%s%s changes -m 1 %s" % (p4cmd, p4opt, depot)) + cset = p4file.readline().strip() + logger.debug(1, "READ %s", cset) + if not cset: + return -1 + + return cset.split(' ')[1] + getcset = staticmethod(getcset) + + def localpath(self, url, ud, d): + + (host, path, user, pswd, parm) = Perforce.doparse(url, d) + + # If a label is specified, we use that as our filename + + if "label" in parm: + ud.localfile = "%s.tar.gz" % (parm["label"]) + return os.path.join(data.getVar("DL_DIR", d, 1), ud.localfile) + + base = path + which = path.find('/...') + if which != -1: + base = path[:which] + + base = self._strip_leading_slashes(base) + + cset = Perforce.getcset(d, path, host, user, pswd, parm) + + ud.localfile = data.expand('%s+%s+%s.tar.gz' % (host, base.replace('/', '.'), cset), d) + + return os.path.join(data.getVar("DL_DIR", d, 1), ud.localfile) + + def go(self, loc, ud, d): + """ + Fetch urls + """ + + (host, depot, user, pswd, parm) = Perforce.doparse(loc, d) + + if depot.find('/...') != -1: + path = depot[:depot.find('/...')] + else: + path = depot + + module = parm.get('module', os.path.basename(path)) + + localdata = data.createCopy(d) + data.setVar('OVERRIDES', "p4:%s" % data.getVar('OVERRIDES', localdata), localdata) + data.update_data(localdata) + + # Get the p4 command + p4opt = "" + if user: + p4opt += " -u %s" % (user) + + if pswd: + p4opt += " -P %s" % (pswd) + + if host: + p4opt += " -p %s" % (host) + + p4cmd = data.getVar('FETCHCOMMAND', localdata, 1) + + # create temp directory + logger.debug(2, "Fetch: creating temporary directory") + bb.mkdirhier(data.expand('${WORKDIR}', localdata)) + data.setVar('TMPBASE', data.expand('${WORKDIR}/oep4.XXXXXX', localdata), localdata) + tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false") + tmpfile = tmppipe.readline().strip() + if not tmpfile: + logger.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.") + raise FetchError(module) + + if "label" in parm: + depot = "%s@%s" % (depot, parm["label"]) + else: + cset = Perforce.getcset(d, depot, host, user, pswd, parm) + depot = "%s@%s" % (depot, cset) + + os.chdir(tmpfile) + logger.info("Fetch " + loc) + logger.info("%s%s files %s", p4cmd, p4opt, depot) + p4file = os.popen("%s%s files %s" % (p4cmd, p4opt, depot)) + + if not p4file: + logger.error("Fetch: unable to get the P4 files from %s", depot) + raise FetchError(module) + + count = 0 + + for file in p4file: + list = file.split() + + if list[2] == "delete": + continue + + dest = list[0][len(path)+1:] + where = dest.find("#") + + os.system("%s%s print -o %s/%s %s" % (p4cmd, p4opt, module, dest[:where], list[0])) + count = count + 1 + + if count == 0: + logger.error("Fetch: No files gathered from the P4 fetch") + raise FetchError(module) + + myret = os.system("tar -czf %s %s" % (ud.localpath, module)) + if myret != 0: + try: + os.unlink(ud.localpath) + except OSError: + pass + raise FetchError(module) + # cleanup + bb.utils.prunedir(tmpfile) diff --git a/bitbake/lib/bb/fetch2/repo.py b/bitbake/lib/bb/fetch2/repo.py new file mode 100644 index 0000000000..3330957ce3 --- /dev/null +++ b/bitbake/lib/bb/fetch2/repo.py @@ -0,0 +1,98 @@ +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +""" +BitBake "Fetch" repo (git) implementation + +""" + +# Copyright (C) 2009 Tom Rini <trini@embeddedalley.com> +# +# Based on git.py which is: +#Copyright (C) 2005 Richard Purdie +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +import os +import bb +from bb import data +from bb.fetch2 import Fetch +from bb.fetch2 import runfetchcmd + +class Repo(Fetch): + """Class to fetch a module or modules from repo (git) repositories""" + def supports(self, url, ud, d): + """ + Check to see if a given url can be fetched with repo. + """ + return ud.type in ["repo"] + + def localpath(self, url, ud, d): + """ + We don"t care about the git rev of the manifests repository, but + we do care about the manifest to use. The default is "default". + We also care about the branch or tag to be used. The default is + "master". + """ + + ud.proto = ud.parm.get('protocol', 'git') + ud.branch = ud.parm.get('branch', 'master') + ud.manifest = ud.parm.get('manifest', 'default.xml') + if not ud.manifest.endswith('.xml'): + ud.manifest += '.xml' + + ud.localfile = data.expand("repo_%s%s_%s_%s.tar.gz" % (ud.host, ud.path.replace("/", "."), ud.manifest, ud.branch), d) + + return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) + + def go(self, loc, ud, d): + """Fetch url""" + + if os.access(os.path.join(data.getVar("DL_DIR", d, True), ud.localfile), os.R_OK): + logger.debug(1, "%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath) + return + + gitsrcname = "%s%s" % (ud.host, ud.path.replace("/", ".")) + repodir = data.getVar("REPODIR", d, True) or os.path.join(data.getVar("DL_DIR", d, True), "repo") + codir = os.path.join(repodir, gitsrcname, ud.manifest) + + if ud.user: + username = ud.user + "@" + else: + username = "" + + bb.mkdirhier(os.path.join(codir, "repo")) + os.chdir(os.path.join(codir, "repo")) + if not os.path.exists(os.path.join(codir, "repo", ".repo")): + runfetchcmd("repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), d) + + runfetchcmd("repo sync", d) + os.chdir(codir) + + scmdata = ud.parm.get("scmdata", "") + if scmdata == "keep": + tar_flags = "" + else: + tar_flags = "--exclude '.repo' --exclude '.git'" + + # Create a cache + runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.join(".", "*") ), d) + + def supports_srcrev(self): + return False + + def _build_revision(self, url, ud, d): + return ud.manifest + + def _want_sortable_revision(self, url, ud, d): + return False diff --git a/bitbake/lib/bb/fetch2/ssh.py b/bitbake/lib/bb/fetch2/ssh.py new file mode 100644 index 0000000000..8b283222bf --- /dev/null +++ b/bitbake/lib/bb/fetch2/ssh.py @@ -0,0 +1,118 @@ +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +''' +BitBake 'Fetch' implementations + +This implementation is for Secure Shell (SSH), and attempts to comply with the +IETF secsh internet draft: + http://tools.ietf.org/wg/secsh/draft-ietf-secsh-scp-sftp-ssh-uri/ + + Currently does not support the sftp parameters, as this uses scp + Also does not support the 'fingerprint' connection parameter. + +''' + +# Copyright (C) 2006 OpenedHand Ltd. +# +# +# Based in part on svk.py: +# Copyright (C) 2006 Holger Hans Peter Freyther +# Based on svn.py: +# Copyright (C) 2003, 2004 Chris Larson +# Based on functions from the base bb module: +# Copyright 2003 Holger Schurig +# +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +import re, os +from bb import data +from bb.fetch2 import Fetch +from bb.fetch2 import FetchError + + +__pattern__ = re.compile(r''' + \s* # Skip leading whitespace + ssh:// # scheme + ( # Optional username/password block + (?P<user>\S+) # username + (:(?P<pass>\S+))? # colon followed by the password (optional) + )? + (?P<cparam>(;[^;]+)*)? # connection parameters block (optional) + @ + (?P<host>\S+?) # non-greedy match of the host + (:(?P<port>[0-9]+))? # colon followed by the port (optional) + / + (?P<path>[^;]+) # path on the remote system, may be absolute or relative, + # and may include the use of '~' to reference the remote home + # directory + (?P<sparam>(;[^;]+)*)? # parameters block (optional) + $ +''', re.VERBOSE) + +class SSH(Fetch): + '''Class to fetch a module or modules via Secure Shell''' + + def supports(self, url, urldata, d): + return __pattern__.match(url) != None + + def localpath(self, url, urldata, d): + m = __pattern__.match(url) + path = m.group('path') + host = m.group('host') + lpath = os.path.join(data.getVar('DL_DIR', d, True), host, os.path.basename(path)) + return lpath + + def go(self, url, urldata, d): + dldir = data.getVar('DL_DIR', d, 1) + + m = __pattern__.match(url) + path = m.group('path') + host = m.group('host') + port = m.group('port') + user = m.group('user') + password = m.group('pass') + + ldir = os.path.join(dldir, host) + lpath = os.path.join(ldir, os.path.basename(path)) + + if not os.path.exists(ldir): + os.makedirs(ldir) + + if port: + port = '-P %s' % port + else: + port = '' + + if user: + fr = user + if password: + fr += ':%s' % password + fr += '@%s' % host + else: + fr = host + fr += ':%s' % path + + + import commands + cmd = 'scp -B -r %s %s %s/' % ( + port, + commands.mkarg(fr), + commands.mkarg(ldir) + ) + + (exitstatus, output) = commands.getstatusoutput(cmd) + if exitstatus != 0: + print(output) + raise FetchError('Unable to fetch %s' % url) diff --git a/bitbake/lib/bb/fetch2/svk.py b/bitbake/lib/bb/fetch2/svk.py new file mode 100644 index 0000000000..7990ff21fa --- /dev/null +++ b/bitbake/lib/bb/fetch2/svk.py @@ -0,0 +1,104 @@ +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +""" +BitBake 'Fetch' implementations + +This implementation is for svk. It is based on the svn implementation + +""" + +# Copyright (C) 2006 Holger Hans Peter Freyther +# Copyright (C) 2003, 2004 Chris Larson +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. +# +# Based on functions from the base bb module, Copyright 2003 Holger Schurig + +import os +import logging +import bb +from bb import data +from bb.fetch2 import Fetch +from bb.fetch2 import FetchError +from bb.fetch2 import MissingParameterError +from bb.fetch2 import logger + +class Svk(Fetch): + """Class to fetch a module or modules from svk repositories""" + def supports(self, url, ud, d): + """ + Check to see if a given url can be fetched with svk. + """ + return ud.type in ['svk'] + + def localpath(self, url, ud, d): + if not "module" in ud.parm: + raise MissingParameterError("svk method needs a 'module' parameter") + else: + ud.module = ud.parm["module"] + + ud.revision = ud.parm.get('rev', "") + + ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d) + + return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) + + def forcefetch(self, url, ud, d): + return ud.date == "now" + + def go(self, loc, ud, d): + """Fetch urls""" + + svkroot = ud.host + ud.path + + svkcmd = "svk co -r {%s} %s/%s" % (ud.date, svkroot, ud.module) + + if ud.revision: + svkcmd = "svk co -r %s %s/%s" % (ud.revision, svkroot, ud.module) + + # create temp directory + localdata = data.createCopy(d) + data.update_data(localdata) + logger.debug(2, "Fetch: creating temporary directory") + bb.mkdirhier(data.expand('${WORKDIR}', localdata)) + data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata) + tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false") + tmpfile = tmppipe.readline().strip() + if not tmpfile: + logger.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.") + raise FetchError(ud.module) + + # check out sources there + os.chdir(tmpfile) + logger.info("Fetch " + loc) + logger.debug(1, "Running %s", svkcmd) + myret = os.system(svkcmd) + if myret != 0: + try: + os.rmdir(tmpfile) + except OSError: + pass + raise FetchError(ud.module) + + os.chdir(os.path.join(tmpfile, os.path.dirname(ud.module))) + # tar them up to a defined filename + myret = os.system("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module))) + if myret != 0: + try: + os.unlink(ud.localpath) + except OSError: + pass + raise FetchError(ud.module) + # cleanup + bb.utils.prunedir(tmpfile) diff --git a/bitbake/lib/bb/fetch2/svn.py b/bitbake/lib/bb/fetch2/svn.py new file mode 100644 index 0000000000..1116795e87 --- /dev/null +++ b/bitbake/lib/bb/fetch2/svn.py @@ -0,0 +1,202 @@ +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +""" +BitBake 'Fetch' implementation for svn. + +""" + +# Copyright (C) 2003, 2004 Chris Larson +# Copyright (C) 2004 Marcin Juszkiewicz +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. +# +# Based on functions from the base bb module, Copyright 2003 Holger Schurig + +import os +import sys +import logging +import bb +from bb import data +from bb.fetch2 import Fetch +from bb.fetch2 import FetchError +from bb.fetch2 import MissingParameterError +from bb.fetch2 import runfetchcmd +from bb.fetch2 import logger + +class Svn(Fetch): + """Class to fetch a module or modules from svn repositories""" + def supports(self, url, ud, d): + """ + Check to see if a given url can be fetched with svn. + """ + return ud.type in ['svn'] + + def urldata_init(self, ud, d): + """ + init svn specific variable within url data + """ + if not "module" in ud.parm: + raise MissingParameterError("svn method needs a 'module' parameter") + + ud.module = ud.parm["module"] + + # Create paths to svn checkouts + relpath = self._strip_leading_slashes(ud.path) + ud.pkgdir = os.path.join(data.expand('${SVNDIR}', d), ud.host, relpath) + ud.moddir = os.path.join(ud.pkgdir, ud.module) + + def localpath(self, url, ud, d): + if 'rev' in ud.parm: + ud.date = "" + ud.revision = ud.parm['rev'] + elif 'date' in ud.date: + ud.date = ud.parm['date'] + ud.revision = "" + else: + # + # ***Nasty hack*** + # If DATE in unexpanded PV, use ud.date (which is set from SRCDATE) + # Should warn people to switch to SRCREV here + # + pv = data.getVar("PV", d, 0) + if "DATE" in pv: + ud.revision = "" + else: + # use the initizlied revision + if ud.revision: + ud.date = "" + + ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d) + + return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) + + def _buildsvncommand(self, ud, d, command): + """ + Build up an svn commandline based on ud + command is "fetch", "update", "info" + """ + + basecmd = data.expand('${FETCHCMD_svn}', d) + + proto = ud.parm.get('proto', 'svn') + + svn_rsh = None + if proto == "svn+ssh" and "rsh" in ud.parm: + svn_rsh = ud.parm["rsh"] + + svnroot = ud.host + ud.path + + # either use the revision, or SRCDATE in braces, + options = [] + + if ud.user: + options.append("--username %s" % ud.user) + + if ud.pswd: + options.append("--password %s" % ud.pswd) + + if command is "info": + svncmd = "%s info %s %s://%s/%s/" % (basecmd, " ".join(options), proto, svnroot, ud.module) + else: + suffix = "" + if ud.revision: + options.append("-r %s" % ud.revision) + suffix = "@%s" % (ud.revision) + elif ud.date: + options.append("-r {%s}" % ud.date) + + if command is "fetch": + svncmd = "%s co %s %s://%s/%s%s %s" % (basecmd, " ".join(options), proto, svnroot, ud.module, suffix, ud.module) + elif command is "update": + svncmd = "%s update %s" % (basecmd, " ".join(options)) + else: + raise FetchError("Invalid svn command %s" % command) + + if svn_rsh: + svncmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svncmd) + + return svncmd + + def go(self, loc, ud, d): + """Fetch url""" + + logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'") + + if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK): + svnupdatecmd = self._buildsvncommand(ud, d, "update") + logger.info("Update " + loc) + # update sources there + os.chdir(ud.moddir) + logger.debug(1, "Running %s", svnupdatecmd) + runfetchcmd(svnupdatecmd, d) + else: + svnfetchcmd = self._buildsvncommand(ud, d, "fetch") + logger.info("Fetch " + loc) + # check out sources there + bb.mkdirhier(ud.pkgdir) + os.chdir(ud.pkgdir) + logger.debug(1, "Running %s", svnfetchcmd) + runfetchcmd(svnfetchcmd, d) + + scmdata = ud.parm.get("scmdata", "") + if scmdata == "keep": + tar_flags = "" + else: + tar_flags = "--exclude '.svn'" + + os.chdir(ud.pkgdir) + # tar them up to a defined filename + try: + runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d) + except: + t, v, tb = sys.exc_info() + try: + os.unlink(ud.localpath) + except OSError: + pass + raise t, v, tb + + def supports_srcrev(self): + return True + + def _revision_key(self, url, ud, d): + """ + Return a unique key for the url + """ + return "svn:" + ud.moddir + + def _latest_revision(self, url, ud, d): + """ + Return the latest upstream revision number + """ + logger.debug(2, "SVN fetcher hitting network for %s", url) + + output = runfetchcmd("LANG=C LC_ALL=C " + self._buildsvncommand(ud, d, "info"), d, True) + + revision = None + for line in output.splitlines(): + if "Last Changed Rev" in line: + revision = line.split(":")[1].strip() + + return revision + + def _sortable_revision(self, url, ud, d): + """ + Return a sortable revision number which in our case is the revision number + """ + + return self._build_revision(url, ud, d) + + def _build_revision(self, url, ud, d): + return ud.revision diff --git a/bitbake/lib/bb/fetch2/wget.py b/bitbake/lib/bb/fetch2/wget.py new file mode 100644 index 0000000000..cf36ccad0a --- /dev/null +++ b/bitbake/lib/bb/fetch2/wget.py @@ -0,0 +1,93 @@ +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +""" +BitBake 'Fetch' implementations + +Classes for obtaining upstream sources for the +BitBake build tools. + +""" + +# Copyright (C) 2003, 2004 Chris Larson +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. +# +# Based on functions from the base bb module, Copyright 2003 Holger Schurig + +import os +import logging +import bb +import urllib +from bb import data +from bb.fetch2 import Fetch, FetchError, encodeurl, decodeurl, logger, runfetchcmd + +class Wget(Fetch): + """Class to fetch urls via 'wget'""" + def supports(self, url, ud, d): + """ + Check to see if a given url can be fetched with wget. + """ + return ud.type in ['http', 'https', 'ftp'] + + def localpath(self, url, ud, d): + + url = encodeurl([ud.type, ud.host, ud.path, ud.user, ud.pswd, {}]) + ud.basename = os.path.basename(ud.path) + ud.localfile = data.expand(urllib.unquote(ud.basename), d) + + return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) + + def go(self, uri, ud, d, checkonly = False): + """Fetch urls""" + + def fetch_uri(uri, ud, d): + if checkonly: + fetchcmd = data.getVar("CHECKCOMMAND", d, 1) + elif os.path.exists(ud.localpath): + # file exists, but we didnt complete it.. trying again.. + fetchcmd = data.getVar("RESUMECOMMAND", d, 1) + else: + fetchcmd = data.getVar("FETCHCOMMAND", d, 1) + + uri = uri.split(";")[0] + uri_decoded = list(decodeurl(uri)) + uri_type = uri_decoded[0] + uri_host = uri_decoded[1] + + fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0]) + fetchcmd = fetchcmd.replace("${FILE}", ud.basename) + logger.info("fetch " + uri) + logger.debug(2, "executing " + fetchcmd) + runfetchcmd(fetchcmd, d) + + # Sanity check since wget can pretend it succeed when it didn't + # Also, this used to happen if sourceforge sent us to the mirror page + if not os.path.exists(ud.localpath) and not checkonly: + logger.debug(2, "The fetch command for %s returned success but %s doesn't exist?...", uri, ud.localpath) + return False + + return True + + localdata = data.createCopy(d) + data.setVar('OVERRIDES', "wget:" + data.getVar('OVERRIDES', localdata), localdata) + data.update_data(localdata) + + if fetch_uri(uri, ud, localdata): + return True + + raise FetchError(uri) + + + def checkstatus(self, uri, ud, d): + return self.go(uri, ud, d, True) diff --git a/bitbake/lib/bb/msg.py b/bitbake/lib/bb/msg.py index 13ee569e9a..00dc139c88 100644 --- a/bitbake/lib/bb/msg.py +++ b/bitbake/lib/bb/msg.py @@ -33,17 +33,22 @@ import bb.event class BBLogFormatter(logging.Formatter): """Formatter which ensures that our 'plain' messages (logging.INFO + 1) are used as is""" + DEBUG = logging.DEBUG VERBOSE = 19 + NOTE = logging.INFO PLAIN = 21 + ERROR = logging.ERROR + WARNING = logging.WARNING + CRITICAL = logging.CRITICAL + levelnames = { + DEBUG : 'DEBUG', PLAIN : '', + NOTE : 'NOTE', VERBOSE: 'NOTE', - - logging.DEBUG : 'DEBUG', - logging.INFO : 'NOTE', - logging.WARNING : 'WARNING', - logging.ERROR : 'ERROR', - logging.CRITICAL: 'ERROR', + WARNING : 'WARNING', + ERROR : 'ERROR', + CRITICAL: 'ERROR', } def getLevelName(self, levelno): diff --git a/bitbake/lib/bb/parse/ast.py b/bitbake/lib/bb/parse/ast.py index e34f1fe894..8fffe1e8f0 100644 --- a/bitbake/lib/bb/parse/ast.py +++ b/bitbake/lib/bb/parse/ast.py @@ -40,13 +40,14 @@ class StatementGroup(list): statement.eval(data) class AstNode(object): - pass + def __init__(self, filename, lineno): + self.filename = filename + self.lineno = lineno class IncludeNode(AstNode): - def __init__(self, what_file, fn, lineno, force): + def __init__(self, filename, lineno, what_file, force): + AstNode.__init__(self, filename, lineno) self.what_file = what_file - self.from_fn = fn - self.from_lineno = lineno self.force = force def eval(self, data): @@ -54,16 +55,17 @@ class IncludeNode(AstNode): Include the file and evaluate the statements """ s = bb.data.expand(self.what_file, data) - logger.debug(2, "CONF %s:%s: including %s", self.from_fn, self.from_lineno, s) + logger.debug(2, "CONF %s:%s: including %s", self.filename, self.lineno, s) # TODO: Cache those includes... maybe not here though if self.force: - bb.parse.ConfHandler.include(self.from_fn, s, data, "include required") + bb.parse.ConfHandler.include(self.filename, s, data, "include required") else: - bb.parse.ConfHandler.include(self.from_fn, s, data, False) + bb.parse.ConfHandler.include(self.filename, s, data, False) class ExportNode(AstNode): - def __init__(self, var): + def __init__(self, filename, lineno, var): + AstNode.__init__(self, filename, lineno) self.var = var def eval(self, data): @@ -76,7 +78,8 @@ class DataNode(AstNode): this need to be re-evaluated... we might be able to do that faster with multiple classes. """ - def __init__(self, groupd): + def __init__(self, filename, lineno, groupd): + AstNode.__init__(self, filename, lineno) self.groupd = groupd def getFunc(self, key, data): @@ -119,19 +122,18 @@ class DataNode(AstNode): else: bb.data.setVar(key, val, data) -class MethodNode: - def __init__(self, func_name, body, lineno, fn): +class MethodNode(AstNode): + def __init__(self, filename, lineno, func_name, body): + AstNode.__init__(self, filename, lineno) self.func_name = func_name self.body = body - self.fn = fn - self.lineno = lineno def eval(self, data): if self.func_name == "__anonymous": - funcname = ("__anon_%s_%s" % (self.lineno, self.fn.translate(string.maketrans('/.+-', '____')))) + funcname = ("__anon_%s_%s" % (self.lineno, self.filename.translate(string.maketrans('/.+-', '____')))) if not funcname in bb.methodpool._parsed_fns: text = "def %s(d):\n" % (funcname) + '\n'.join(self.body) - bb.methodpool.insert_method(funcname, text, self.fn) + bb.methodpool.insert_method(funcname, text, self.filename) anonfuncs = bb.data.getVar('__BBANONFUNCS', data) or [] anonfuncs.append(funcname) bb.data.setVar('__BBANONFUNCS', anonfuncs, data) @@ -140,25 +142,26 @@ class MethodNode: bb.data.setVar(self.func_name, '\n'.join(self.body), data) class PythonMethodNode(AstNode): - def __init__(self, funcname, root, body, fn): - self.func_name = funcname - self.root = root + def __init__(self, filename, lineno, function, define, body): + AstNode.__init__(self, filename, lineno) + self.function = function + self.define = define self.body = body - self.fn = fn def eval(self, data): # Note we will add root to parsedmethods after having parse # 'this' file. This means we will not parse methods from # bb classes twice text = '\n'.join(self.body) - if not bb.methodpool.parsed_module(self.root): - bb.methodpool.insert_method(self.root, text, self.fn) - bb.data.setVarFlag(self.func_name, "func", 1, data) - bb.data.setVarFlag(self.func_name, "python", 1, data) - bb.data.setVar(self.func_name, text, data) + if not bb.methodpool.parsed_module(self.define): + bb.methodpool.insert_method(self.define, text, self.filename) + bb.data.setVarFlag(self.function, "func", 1, data) + bb.data.setVarFlag(self.function, "python", 1, data) + bb.data.setVar(self.function, text, data) class MethodFlagsNode(AstNode): - def __init__(self, key, m): + def __init__(self, filename, lineno, key, m): + AstNode.__init__(self, filename, lineno) self.key = key self.m = m @@ -178,7 +181,8 @@ class MethodFlagsNode(AstNode): bb.data.delVarFlag(self.key, "fakeroot", data) class ExportFuncsNode(AstNode): - def __init__(self, fns, classes): + def __init__(self, filename, lineno, fns, classes): + AstNode.__init__(self, filename, lineno) self.n = fns.split() self.classes = classes @@ -217,7 +221,8 @@ class ExportFuncsNode(AstNode): bb.data.setVarFlag(var, 'export_func', '1', data) class AddTaskNode(AstNode): - def __init__(self, func, before, after): + def __init__(self, filename, lineno, func, before, after): + AstNode.__init__(self, filename, lineno) self.func = func self.before = before self.after = after @@ -248,7 +253,8 @@ class AddTaskNode(AstNode): bb.data.setVarFlag(entry, "deps", [var] + existing, data) class BBHandlerNode(AstNode): - def __init__(self, fns): + def __init__(self, filename, lineno, fns): + AstNode.__init__(self, filename, lineno) self.hs = fns.split() def eval(self, data): @@ -259,48 +265,49 @@ class BBHandlerNode(AstNode): bb.data.setVar('__BBHANDLERS', bbhands, data) class InheritNode(AstNode): - def __init__(self, classes): + def __init__(self, filename, lineno, classes): + AstNode.__init__(self, filename, lineno) self.classes = classes def eval(self, data): bb.parse.BBHandler.inherit(self.classes, data) -def handleInclude(statements, m, fn, lineno, force): - statements.append(IncludeNode(m.group(1), fn, lineno, force)) +def handleInclude(statements, filename, lineno, m, force): + statements.append(IncludeNode(filename, lineno, m.group(1), force)) -def handleExport(statements, m): - statements.append(ExportNode(m.group(1))) +def handleExport(statements, filename, lineno, m): + statements.append(ExportNode(filename, lineno, m.group(1))) -def handleData(statements, groupd): - statements.append(DataNode(groupd)) +def handleData(statements, filename, lineno, groupd): + statements.append(DataNode(filename, lineno, groupd)) -def handleMethod(statements, func_name, lineno, fn, body): - statements.append(MethodNode(func_name, body, lineno, fn)) +def handleMethod(statements, filename, lineno, func_name, body): + statements.append(MethodNode(filename, lineno, func_name, body)) -def handlePythonMethod(statements, funcname, root, body, fn): - statements.append(PythonMethodNode(funcname, root, body, fn)) +def handlePythonMethod(statements, filename, lineno, funcname, root, body): + statements.append(PythonMethodNode(filename, lineno, funcname, root, body)) -def handleMethodFlags(statements, key, m): - statements.append(MethodFlagsNode(key, m)) +def handleMethodFlags(statements, filename, lineno, key, m): + statements.append(MethodFlagsNode(filename, lineno, key, m)) -def handleExportFuncs(statements, m, classes): - statements.append(ExportFuncsNode(m.group(1), classes)) +def handleExportFuncs(statements, filename, lineno, m, classes): + statements.append(ExportFuncsNode(filename, lineno, m.group(1), classes)) -def handleAddTask(statements, m): +def handleAddTask(statements, filename, lineno, m): func = m.group("func") before = m.group("before") after = m.group("after") if func is None: return - statements.append(AddTaskNode(func, before, after)) + statements.append(AddTaskNode(filename, lineno, func, before, after)) -def handleBBHandlers(statements, m): - statements.append(BBHandlerNode(m.group(1))) +def handleBBHandlers(statements, filename, lineno, m): + statements.append(BBHandlerNode(filename, lineno, m.group(1))) -def handleInherit(statements, m): +def handleInherit(statements, filename, lineno, m): classes = m.group(1) - statements.append(InheritNode(classes.split())) + statements.append(InheritNode(filename, lineno, classes.split())) def finalize(fn, d, variant = None): for lazykey in bb.data.getVar("__lazy_assigned", d) or (): diff --git a/bitbake/lib/bb/parse/parse_py/BBHandler.py b/bitbake/lib/bb/parse/parse_py/BBHandler.py index 81554b9435..31d1e21c67 100644 --- a/bitbake/lib/bb/parse/parse_py/BBHandler.py +++ b/bitbake/lib/bb/parse/parse_py/BBHandler.py @@ -28,7 +28,7 @@ from __future__ import absolute_import import re, bb, os import logging -import bb.fetch, bb.build, bb.utils +import bb.build, bb.utils from bb import data from . import ConfHandler @@ -172,7 +172,7 @@ def feeder(lineno, s, fn, root, statements): if __infunc__: if s == '}': __body__.append('') - ast.handleMethod(statements, __infunc__, lineno, fn, __body__) + ast.handleMethod(statements, fn, lineno, __infunc__, __body__) __infunc__ = "" __body__ = [] else: @@ -185,7 +185,8 @@ def feeder(lineno, s, fn, root, statements): __body__.append(s) return else: - ast.handlePythonMethod(statements, __inpython__, root, __body__, fn) + ast.handlePythonMethod(statements, fn, lineno, __inpython__, + root, __body__) __body__ = [] __inpython__ = False @@ -206,7 +207,7 @@ def feeder(lineno, s, fn, root, statements): m = __func_start_regexp__.match(s) if m: __infunc__ = m.group("func") or "__anonymous" - ast.handleMethodFlags(statements, __infunc__, m) + ast.handleMethodFlags(statements, fn, lineno, __infunc__, m) return m = __def_regexp__.match(s) @@ -218,22 +219,22 @@ def feeder(lineno, s, fn, root, statements): m = __export_func_regexp__.match(s) if m: - ast.handleExportFuncs(statements, m, classes) + ast.handleExportFuncs(statements, fn, lineno, m, classes) return m = __addtask_regexp__.match(s) if m: - ast.handleAddTask(statements, m) + ast.handleAddTask(statements, fn, lineno, m) return m = __addhandler_regexp__.match(s) if m: - ast.handleBBHandlers(statements, m) + ast.handleBBHandlers(statements, fn, lineno, m) return m = __inherit_regexp__.match(s) if m: - ast.handleInherit(statements, m) + ast.handleInherit(statements, fn, lineno, m) return return ConfHandler.feeder(lineno, s, fn, statements) diff --git a/bitbake/lib/bb/parse/parse_py/ConfHandler.py b/bitbake/lib/bb/parse/parse_py/ConfHandler.py index d90f5d868e..fc239a3540 100644 --- a/bitbake/lib/bb/parse/parse_py/ConfHandler.py +++ b/bitbake/lib/bb/parse/parse_py/ConfHandler.py @@ -113,22 +113,22 @@ def feeder(lineno, s, fn, statements): m = __config_regexp__.match(s) if m: groupd = m.groupdict() - ast.handleData(statements, groupd) + ast.handleData(statements, fn, lineno, groupd) return m = __include_regexp__.match(s) if m: - ast.handleInclude(statements, m, fn, lineno, False) + ast.handleInclude(statements, fn, lineno, m, False) return m = __require_regexp__.match(s) if m: - ast.handleInclude(statements, m, fn, lineno, True) + ast.handleInclude(statements, fn, lineno, m, True) return m = __export_regexp__.match(s) if m: - ast.handleExport(statements, m) + ast.handleExport(statements, fn, lineno, m) return raise ParseError("%s:%d: unparsed line: '%s'" % (fn, lineno, s)); diff --git a/bitbake/lib/bb/runqueue.py b/bitbake/lib/bb/runqueue.py index 187720fc46..2b37619ae3 100644 --- a/bitbake/lib/bb/runqueue.py +++ b/bitbake/lib/bb/runqueue.py @@ -22,13 +22,12 @@ Handles preparation and execution of a queue of tasks # with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. +import copy import os import sys -import subprocess import signal import stat import fcntl -import copy import logging import bb from bb import msg, data, event @@ -36,12 +35,6 @@ from bb import msg, data, event bblogger = logging.getLogger("BitBake") logger = logging.getLogger("BitBake.RunQueue") -try: - import cPickle as pickle -except ImportError: - import pickle - logger.info("Importing cPickle failed. Falling back to a very slow implementation.") - class RunQueueStats: """ Holds statistics on the tasks handled by the associated runQueue @@ -93,28 +86,28 @@ class RunQueueScheduler(object): """ self.rq = runqueue self.rqdata = rqdata - numTasks = len(self.rq.runq_fnid) + numTasks = len(self.rqdata.runq_fnid) self.prio_map = [] self.prio_map.extend(range(numTasks)) - def next_buildable_tasks(self): + def next_buildable_task(self): """ Return the id of the first task we find that is buildable """ - for tasknum in range(len(self.rqdata.runq_fnid)): + for tasknum in xrange(len(self.rqdata.runq_fnid)): taskid = self.prio_map[tasknum] if self.rq.runq_running[taskid] == 1: continue if self.rq.runq_buildable[taskid] == 1: - yield taskid + return taskid def next(self): """ Return the id of the task we should build next """ if self.rq.stats.active < self.rq.number_tasks: - return next(self.next_buildable_tasks(), None) + return self.next_buildable_task() class RunQueueSchedulerSpeed(RunQueueScheduler): """ @@ -127,13 +120,12 @@ class RunQueueSchedulerSpeed(RunQueueScheduler): """ The priority map is sorted by task weight. """ - from copy import deepcopy self.rq = runqueue self.rqdata = rqdata - sortweight = sorted(deepcopy(self.rqdata.runq_weight)) - copyweight = deepcopy(self.rqdata.runq_weight) + sortweight = sorted(copy.deepcopy(self.rqdata.runq_weight)) + copyweight = copy.deepcopy(self.rqdata.runq_weight) self.prio_map = [] for weight in sortweight: @@ -155,12 +147,11 @@ class RunQueueSchedulerCompletion(RunQueueSchedulerSpeed): def __init__(self, runqueue, rqdata): RunQueueSchedulerSpeed.__init__(self, runqueue, rqdata) - from copy import deepcopy #FIXME - whilst this groups all fnids together it does not reorder the #fnid groups optimally. - basemap = deepcopy(self.prio_map) + basemap = copy.deepcopy(self.prio_map) self.prio_map = [] while (len(basemap) > 0): entry = basemap.pop(0) @@ -190,25 +181,6 @@ class RunQueueData: self.stampwhitelist = bb.data.getVar("BB_STAMP_WHITELIST", cfgData, 1) or "" self.multi_provider_whitelist = (bb.data.getVar("MULTI_PROVIDER_WHITELIST", cfgData, 1) or "").split() - self.schedulers = set(obj for obj in globals().itervalues() - if type(obj) is type and issubclass(obj, RunQueueScheduler)) - - user_schedulers = bb.data.getVar("BB_SCHEDULERS", cfgData, True) - if user_schedulers: - for sched in user_schedulers.split(): - if not "." in sched: - bb.note("Ignoring scheduler '%s' from BB_SCHEDULERS: not an import" % sched) - continue - - modname, name = sched.rsplit(".", 1) - try: - module = __import__(modname, fromlist=(name,)) - except ImportError, exc: - logger.critical("Unable to import scheduler '%s' from '%s': %s" % (name, modname, exc)) - raise SystemExit(1) - else: - self.schedulers.add(getattr(module, name)) - self.reset() def reset(self): @@ -313,7 +285,7 @@ class RunQueueData: if dep in explored_deps[revdep]: scan = True if scan: - find_chains(revdep, deepcopy(prev_chain)) + find_chains(revdep, copy.deepcopy(prev_chain)) for dep in explored_deps[revdep]: if dep not in total_deps: total_deps.append(dep) @@ -715,20 +687,15 @@ class RunQueueData: stampfnwhitelist.append(fn) self.stampfnwhitelist = stampfnwhitelist - #self.dump_data(taskData) - # Interate over the task list looking for tasks with a 'setscene' function - self.runq_setscene = [] for task in range(len(self.runq_fnid)): setscene = taskData.gettask_id(self.taskData.fn_index[self.runq_fnid[task]], self.runq_task[task] + "_setscene", False) if not setscene: continue - #bb.note("Found setscene for %s %s" % (self.taskData.fn_index[self.runq_fnid[task]], self.runq_task[task])) self.runq_setscene.append(task) # Interate over the task list and call into the siggen code - dealtwith = set() todeal = set(range(len(self.runq_fnid))) while len(todeal) > 0: @@ -744,7 +711,7 @@ class RunQueueData: hashdata = {} hashdata["hashes"] = {} hashdata["deps"] = {} - for task in range(len(self.runq_fnid)): + for task in xrange(len(self.runq_fnid)): hashdata["hashes"][self.taskData.fn_index[self.runq_fnid[task]] + "." + self.runq_task[task]] = self.runq_hash[task] deps = [] for dep in self.runq_depends[task]: @@ -764,24 +731,24 @@ class RunQueueData: Dump some debug information on the internal data structures """ logger.debug(3, "run_tasks:") - for task in range(len(self.rqdata.runq_task)): - logger.debug(3, " (%s)%s - %s: %s Deps %s RevDeps %s" % (task, - taskQueue.fn_index[self.rqdata.runq_fnid[task]], - self.rqdata.runq_task[task], - self.rqdata.runq_weight[task], - self.rqdata.runq_depends[task], - self.rqdata.runq_revdeps[task])) + for task in xrange(len(self.rqdata.runq_task)): + logger.debug(3, " (%s)%s - %s: %s Deps %s RevDeps %s", task, + taskQueue.fn_index[self.rqdata.runq_fnid[task]], + self.rqdata.runq_task[task], + self.rqdata.runq_weight[task], + self.rqdata.runq_depends[task], + self.rqdata.runq_revdeps[task]) logger.debug(3, "sorted_tasks:") - for task1 in range(len(self.rqdata.runq_task)): + for task1 in xrange(len(self.rqdata.runq_task)): if task1 in self.prio_map: task = self.prio_map[task1] - logger.debug(3, " (%s)%s - %s: %s Deps %s RevDeps %s" % (task, - taskQueue.fn_index[self.rqdata.runq_fnid[task]], - self.rqdata.runq_task[task], - self.rqdata.runq_weight[task], - self.rqdata.runq_depends[task], - self.rqdata.runq_revdeps[task])) + logger.debug(3, " (%s)%s - %s: %s Deps %s RevDeps %s", task, + taskQueue.fn_index[self.rqdata.runq_fnid[task]], + self.rqdata.runq_task[task], + self.rqdata.runq_weight[task], + self.rqdata.runq_depends[task], + self.rqdata.runq_revdeps[task]) class RunQueue: def __init__(self, cooker, cfgData, dataCache, taskData, targets): @@ -809,7 +776,7 @@ class RunQueue: if self.stamppolicy == "whitelist": stampwhitelist = self.rqdata.stampfnwhitelist - for task in range(len(self.rqdata.runq_fnid)): + for task in xrange(len(self.rqdata.runq_fnid)): unchecked[task] = "" if len(self.rqdata.runq_depends[task]) == 0: buildable.append(task) @@ -824,7 +791,7 @@ class RunQueue: if revdep in unchecked: buildable.append(revdep) - for task in range(len(self.rqdata.runq_fnid)): + for task in xrange(len(self.rqdata.runq_fnid)): if task not in unchecked: continue fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]] @@ -909,7 +876,7 @@ class RunQueue: fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]] if taskname is None: taskname = self.rqdata.runq_task[task] - + stampfile = bb.parse.siggen.stampfile(self.rqdata.dataCache.stamp[fn], fn, taskname) # If the stamp is missing its not current @@ -919,7 +886,7 @@ class RunQueue: # If its a 'nostamp' task, it's not current taskdep = self.rqdata.dataCache.task_deps[fn] if 'nostamp' in taskdep and taskname in taskdep['nostamp']: - logger.debug(2, "%s.%s is nostamp\n" % (fn, taskname)) + logger.debug(2, "%s.%s is nostamp\n", fn, taskname) return False if taskname != "do_setscene" and taskname.endswith("_setscene"): @@ -939,10 +906,10 @@ class RunQueue: continue if fn == fn2 or (fulldeptree and fn2 not in stampwhitelist): if not t2: - logger.debug(2, "Stampfile %s does not exist" % (stampfile2)) + logger.debug(2, 'Stampfile %s does not exist', stampfile2) iscurrent = False if t1 < t2: - logger.debug(2, "Stampfile %s < %s" % (stampfile, stampfile2)) + logger.debug(2, 'Stampfile %s < %s', stampfile, stampfile2) iscurrent = False return iscurrent @@ -1014,7 +981,7 @@ class RunQueue: bb.note("Reparsing files to collect dependency data") for task in range(len(self.rqdata.runq_fnid)): if self.rqdata.runq_fnid[task] not in done: - fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]] + fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]] the_data = bb.cache.Cache.loadDataFull(fn, self.cooker.get_file_appends(fn), self.cooker.configuration.data) done.add(self.rqdata.runq_fnid[task]) @@ -1087,7 +1054,7 @@ class RunQueueExecute: self.rq.state = runQueueComplete return - def fork_off_task(self, fn, task, taskname): + def fork_off_task(self, fn, task, taskname, quieterrors=False): the_data = bb.cache.Cache.loadDataFull(fn, self.cooker.get_file_appends(fn), self.cooker.configuration.data) env = bb.data.export_vars(the_data) @@ -1115,10 +1082,9 @@ class RunQueueExecute: sys.stdout.flush() sys.stderr.flush() try: - pipeinfd, pipeoutfd = os.pipe() - pipein = os.fdopen(pipeinfd, 'rb', 4096) - pipeout = os.fdopen(pipeoutfd, 'wb', 4096) - + pipein, pipeout = os.pipe() + pipein = os.fdopen(pipein, 'rb', 4096) + pipeout = os.fdopen(pipeout, 'wb', 0) pid = os.fork() except OSError as e: bb.msg.fatal(bb.msg.domain.RunQueue, "fork failed: %d (%s)" % (e.errno, e.strerror)) @@ -1148,7 +1114,7 @@ class RunQueueExecute: #newso = open(logout, 'w') #os.dup2(newso.fileno(), sys.stdout.fileno()) #os.dup2(newso.fileno(), sys.stderr.fileno()) - if taskname.endswith("_setscene"): + if quieterrors: the_data.setVarFlag(taskname, "quieterrors", "1") bb.data.setVar("BB_WORKERCONTEXT", "1", the_data) @@ -1219,14 +1185,38 @@ class RunQueueExecuteTasks(RunQueueExecute): event.fire(bb.event.StampUpdate(self.rqdata.target_pairs, self.rqdata.dataCache.stamp), self.cfgData) - for scheduler in self.rqdata.schedulers: + schedulers = self.get_schedulers() + for scheduler in schedulers: if self.scheduler == scheduler.name: self.sched = scheduler(self, self.rqdata) logger.debug(1, "Using runqueue scheduler '%s'", scheduler.name) break else: - bb.fatal("Invalid scheduler '%s'. Available schedulers: %s" % - (self.scheduler, ", ".join(obj.name for obj in self.rqdata.schedulers))) + bb.fatal("Invalid scheduler '%s'. Available schedulers: %s" % + (self.scheduler, ", ".join(obj.name for obj in schedulers))) + + + def get_schedulers(self): + schedulers = set(obj for obj in globals().values() + if type(obj) is type and + issubclass(obj, RunQueueScheduler)) + + user_schedulers = bb.data.getVar("BB_SCHEDULERS", self.cfgData, True) + if user_schedulers: + for sched in user_schedulers.split(): + if not "." in sched: + bb.note("Ignoring scheduler '%s' from BB_SCHEDULERS: not an import" % sched) + continue + + modname, name = sched.rsplit(".", 1) + try: + module = __import__(modname, fromlist=(name,)) + except ImportError, exc: + logger.critical("Unable to import scheduler '%s' from '%s': %s" % (name, modname, exc)) + raise SystemExit(1) + else: + schedulers.add(getattr(module, name)) + return schedulers def task_completeoutright(self, task): """ @@ -1283,17 +1273,17 @@ class RunQueueExecuteTasks(RunQueueExecute): # nothing to do self.rq.state = runQueueCleanUp - for task in iter(self.sched.next, None): + task = self.sched.next() + if task is not None: fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]] taskname = self.rqdata.runq_task[task] if self.rq.check_stamp_task(task, taskname): - logger.debug(2, "Stamp current task %s (%s)" % (task, self.rqdata.get_user_idstring(task))) + logger.debug(2, "Stamp current task %s (%s)", task, + self.rqdata.get_user_idstring(task)) self.task_skip(task) return True - bb.event.fire(runQueueTaskStarted(task, self.stats, self.rq), self.cfgData) - taskdep = self.rqdata.dataCache.task_deps[fn] if 'noexec' in taskdep and taskname in taskdep['noexec']: startevent = runQueueTaskStarted(task, self.stats, self.rq, @@ -1457,12 +1447,11 @@ class RunQueueExecuteScenequeue(RunQueueExecute): for task in xrange(len(self.sq_revdeps)): if task not in valid_new and task not in noexec: - logger.debug(2, "No package found so skipping setscene task %s" % (self.rqdata.get_user_idstring(self.rqdata.runq_setscene[task]))) + logger.debug(2, 'No package found, so skipping setscene task %s', + self.rqdata.get_user_idstring(task)) self.task_failoutright(task) - #print(str(valid)) - - logger.info("Executing SetScene Tasks") + logger.info('Executing SetScene Tasks') self.rq.state = runQueueSceneRun @@ -1523,11 +1512,6 @@ class RunQueueExecuteScenequeue(RunQueueExecute): # Find the next setscene to run for nexttask in xrange(self.stats.total): if self.runq_buildable[nexttask] == 1 and self.runq_running[nexttask] != 1: - #bb.note("Comparing %s to %s" % (self.sq_revdeps[nexttask], self.scenequeue_covered)) - #if len(self.sq_revdeps[nexttask]) > 0 and self.sq_revdeps[nexttask].issubset(self.scenequeue_covered): - # bb.note("Skipping task %s" % nexttask) - # self.scenequeue_skip(nexttask) - # return True task = nexttask break if task is not None: @@ -1536,7 +1520,8 @@ class RunQueueExecuteScenequeue(RunQueueExecute): taskname = self.rqdata.runq_task[realtask] + "_setscene" if self.rq.check_stamp_task(realtask, self.rqdata.runq_task[realtask]): - logger.debug(2, "Stamp for underlying task %s (%s) is current so skipping setscene varient" % (task, self.rqdata.get_user_idstring(task))) + logger.debug(2, 'Stamp for underlying task %s(%s) is current, so skipping setscene variant', + task, self.rqdata.get_user_idstring(task)) self.task_failoutright(task) return True @@ -1547,7 +1532,8 @@ class RunQueueExecuteScenequeue(RunQueueExecute): return True if self.rq.check_stamp_task(realtask, taskname): - logger.debug(2, "Setscene stamp current task %s (%s) so skip it and its dependencies" % (task, self.rqdata.get_user_idstring(realtask))) + logger.debug(2, 'Setscene stamp current task %s(%s), so skip it and its dependencies', + task, self.rqdata.get_user_idstring(realtask)) self.task_skip(task) return True @@ -1577,11 +1563,14 @@ class RunQueueExecuteScenequeue(RunQueueExecute): for task in oldcovered: self.rq.scenequeue_covered.add(self.rqdata.runq_setscene[task]) - bb.debug(1, "We can skip tasks %s" % self.rq.scenequeue_covered) + logger.debug(1, 'We can skip tasks %s', self.rq.scenequeue_covered) self.rq.state = runQueueRunInit return True + def fork_off_task(self, fn, task, taskname): + return RunQueueExecute.fork_off_task(self, fn, task, taskname, quieterrors=True) + class TaskFailure(Exception): """ Exception raised when a task in a runqueue fails @@ -1632,12 +1621,12 @@ class runQueueTaskCompleted(runQueueEvent): """ #def check_stamp_fn(fn, taskname, d): -# rq = bb.data.getVar("__RUNQUEUE_DO_NOT_USE_EXTERNALLY", d) +# rqexe = bb.data.getVar("__RUNQUEUE_DO_NOT_USE_EXTERNALLY", d) # fn = bb.data.getVar("__RUNQUEUE_DO_NOT_USE_EXTERNALLY2", d) -# fnid = rq.rqdata.taskData.getfn_id(fn) -# taskid = rq.get_task_id(fnid, taskname) +# fnid = rqexe.rqdata.taskData.getfn_id(fn) +# taskid = rqexe.rqdata.get_task_id(fnid, taskname) # if taskid is not None: -# return rq.check_stamp_task(taskid) +# return rqexe.rq.check_stamp_task(taskid) # return None class runQueuePipe(): @@ -1645,17 +1634,17 @@ class runQueuePipe(): Abstraction for a pipe between a worker thread and the server """ def __init__(self, pipein, pipeout, d): - self.fd = pipein + self.input = pipein pipeout.close() - fcntl.fcntl(self.fd, fcntl.F_SETFL, fcntl.fcntl(self.fd, fcntl.F_GETFL) | os.O_NONBLOCK) + fcntl.fcntl(self.input, fcntl.F_SETFL, fcntl.fcntl(self.input, fcntl.F_GETFL) | os.O_NONBLOCK) self.queue = "" self.d = d def read(self): start = len(self.queue) try: - self.queue = self.queue + self.fd.read(1024) - except IOError: + self.queue = self.queue + self.input.read(1024) + except (OSError, IOError): pass end = len(self.queue) index = self.queue.find("</event>") @@ -1670,4 +1659,4 @@ class runQueuePipe(): continue if len(self.queue) > 0: print("Warning, worker left partial message: %s" % self.queue) - self.fd.close() + self.input.close() diff --git a/bitbake/lib/bb/siggen.py b/bitbake/lib/bb/siggen.py index 7d7a203b83..4dc09b3f9e 100644 --- a/bitbake/lib/bb/siggen.py +++ b/bitbake/lib/bb/siggen.py @@ -178,6 +178,17 @@ class SignatureGeneratorBasic(SignatureGenerator): bb.error("The mismatched hashes were %s and %s" % (dataCache.basetaskhash[k], self.basehash[k])) self.dump_sigtask(fn, task, dataCache.stamp[fn], True) +class SignatureGeneratorBasicHash(SignatureGeneratorBasic): + name = "basichash" + + def stampfile(self, stampbase, fn, taskname): + if taskname != "do_setscene" and taskname.endswith("_setscene"): + k = fn + "." + taskname[:-9] + else: + k = fn + "." + taskname + h = self.taskhash[k] + return "%s.%s.%s" % (stampbase, taskname, h) + def dump_this_task(outfile, d): fn = d.getVar("BB_FILENAME", True) task = "do_" + d.getVar("BB_CURRENTTASK", True) diff --git a/bitbake/lib/bb/ui/knotty.py b/bitbake/lib/bb/ui/knotty.py index 7fb7f84e5b..34180fb93e 100644 --- a/bitbake/lib/bb/ui/knotty.py +++ b/bitbake/lib/bb/ui/knotty.py @@ -111,10 +111,10 @@ def main(server, eventHandler): print("%s: %s (pid %s)" % (tasknum, activetasks[task]["title"], task)) if isinstance(event, logging.LogRecord): - if event.levelno >= logging.CRITICAL: - return_value = 1 - if event.levelno is logging.ERROR: + if event.levelno >= format.ERROR: return_value = 1 + if event.taskpid != 0 and event.levelno <= format.NOTE: + continue logger.handle(event) continue diff --git a/bitbake/lib/bb/utils.py b/bitbake/lib/bb/utils.py index 48ed0d72e5..5b3710f84f 100644 --- a/bitbake/lib/bb/utils.py +++ b/bitbake/lib/bb/utils.py @@ -291,17 +291,6 @@ def join_deps(deps): result.append(dep) return ", ".join(result) -def extend_deps(dest, src): - """ - Extend the results from explode_dep_versions by appending all of the items - in the second list, avoiding duplicates. - """ - for dep in src: - if dep not in dest: - dest[dep] = src[dep] - elif dest[dep] != src[dep]: - dest[dep] = src[dep] - def _print_trace(body, line): """ Print the Environment of a Text Body @@ -355,16 +344,14 @@ def better_exec(code, context, text, realfile = "<code>"): if t in [bb.parse.SkipPackage, bb.build.FuncFailed]: raise - logger.exception("Error executing python function in '%s'", code.co_filename) - - # print the Header of the Error Message - logger.error("There was an error when executing a python function in: %s" % code.co_filename) - logger.error("Exception:%s Message:%s" % (t, value)) + import traceback + exception = traceback.format_exception_only(t, value) + logger.error('Error executing a python function in %s:\n%s', + realfile, ''.join(exception)) # Strip 'us' from the stack (better_exec call) tb = tb.tb_next - import traceback textarray = text.split('\n') linefailed = traceback.tb_lineno(tb) @@ -490,9 +477,9 @@ def sha256_file(filename): s.update(line) return s.hexdigest() -# Variables which are preserved from the original environment *and* exported -# into our worker context -def preserved_envvars_export_list(): +def preserved_envvars_exported(): + """Variables which are taken from the environment and placed in and exported + from the metadata""" return [ 'BB_TASKHASH', 'HOME', @@ -505,9 +492,9 @@ def preserved_envvars_export_list(): 'USERNAME', ] -# Variables which are preserved from the original environment *and* exported -# into our worker context for interactive tasks (e.g. requiring X) -def preserved_envvars_export_interactive_list(): +def preserved_envvars_exported_interactive(): + """Variables which are taken from the environment and placed in and exported + from the metadata, for interactive tasks""" return [ 'COLORTERM', 'DBUS_SESSION_BUS_ADDRESS', @@ -525,8 +512,8 @@ def preserved_envvars_export_interactive_list(): 'XDG_SESSION_COOKIE', ] -# Variables which are preserved from the original environment into the datastore -def preserved_envvars_list(): +def preserved_envvars(): + """Variables which are taken from the environment and placed in the metadata""" v = [ 'BBPATH', 'BB_PRESERVE_ENV', @@ -535,7 +522,7 @@ def preserved_envvars_list(): 'LANG', '_', ] - return v + preserved_envvars_export_list() + preserved_envvars_export_interactive_list() + return v + preserved_envvars_exported() + preserved_envvars_exported_interactive() def filter_environment(good_vars): """ @@ -557,8 +544,8 @@ def filter_environment(good_vars): return removed_vars -def create_intereactive_env(d): - for k in preserved_envvars_export_interactive_list(): +def create_interactive_env(d): + for k in preserved_envvars_exported_interactive(): os.setenv(k, bb.data.getVar(k, d, True)) def clean_environment(): @@ -570,7 +557,7 @@ def clean_environment(): if 'BB_ENV_WHITELIST' in os.environ: good_vars = os.environ['BB_ENV_WHITELIST'].split() else: - good_vars = preserved_envvars_list() + good_vars = preserved_envvars() if 'BB_ENV_EXTRAWHITE' in os.environ: good_vars.extend(os.environ['BB_ENV_EXTRAWHITE'].split()) filter_environment(good_vars) diff --git a/meta/classes/insane.bbclass b/meta/classes/insane.bbclass index 71ed5b6433..031583442d 100644 --- a/meta/classes/insane.bbclass +++ b/meta/classes/insane.bbclass @@ -534,9 +534,9 @@ python do_package_qa () { checks = [package_qa_check_rpath, package_qa_check_dev, package_qa_check_perm, package_qa_check_arch, - package_qa_check_desktop, + package_qa_check_desktop, package_qa_hash_style, package_qa_check_dbg] - # package_qa_check_buildpaths, package_qa_hash_style + # package_qa_check_buildpaths, walk_sane = True rdepends_sane = True for package in packages.split(): diff --git a/meta/classes/kernel-yocto.bbclass b/meta/classes/kernel-yocto.bbclass index 7ebe5d69aa..df2a8a43bc 100644 --- a/meta/classes/kernel-yocto.bbclass +++ b/meta/classes/kernel-yocto.bbclass @@ -121,7 +121,7 @@ do_kernel_configme() { echo "[INFO] doing kernel configme" cd ${S} - configme --reconfig + configme --reconfig --output ${B} if [ $? -ne 0 ]; then echo "ERROR. Could not configure ${KMACHINE}-${LINUX_KERNEL_TYPE}" exit 1 diff --git a/meta/conf/bitbake.conf b/meta/conf/bitbake.conf index 30fcc7947b..376e3cf384 100644 --- a/meta/conf/bitbake.conf +++ b/meta/conf/bitbake.conf @@ -449,8 +449,9 @@ BUILDSDK_LDFLAGS = "-L${STAGING_LIBDIR} \ -Wl,-rpath-link,${STAGING_DIR_HOST}${base_libdir} \ -Wl,-rpath,${base_libdir} -Wl,-O1" +TARGET_LINK_HASH_STYLE ??= "" export LDFLAGS = "${TARGET_LDFLAGS}" -export TARGET_LDFLAGS = "-Wl,-O1" +export TARGET_LDFLAGS = "-Wl,-O1 ${TARGET_LINK_HASH_STYLE}" #export TARGET_LDFLAGS = "-L${STAGING_DIR_TARGET}${libdir} \ # -Wl,-rpath-link,${STAGING_DIR_TARGET}${libdir} \ # -Wl,-O1" @@ -536,7 +537,7 @@ UPDATECOMMAND_cvs = "/usr/bin/env cvs -d${CVSROOT} update -d -P ${CVSCOOPTS}" UPDATECOMMAND_svn = "/usr/bin/env svn update ${SVNCOOPTS}" SRCDATE = "${DATE}" SRCREV = "INVALID" -AUTOREV = "${SRCPV}" +AUTOREV = "${@bb.fetch.get_autorev(d)}" SRCPV = "${@bb.fetch.get_srcrev(d)}" SRC_URI = "file://${FILE}" diff --git a/meta/conf/distro/include/poky-default-revisions.inc b/meta/conf/distro/include/poky-default-revisions.inc index c11faf9316..e094109756 100644 --- a/meta/conf/distro/include/poky-default-revisions.inc +++ b/meta/conf/distro/include/poky-default-revisions.inc @@ -57,7 +57,7 @@ SRCREV_pn-gypsy ??= "147" SRCREV_pn-inputproto ??= "7203036522ba9d4b224d282d6afc2d0b947711ee" SRCREV_pn-inputproto-native ??= "7203036522ba9d4b224d282d6afc2d0b947711ee" SRCREV_pn-inputproto-nativesdk ??= "7203036522ba9d4b224d282d6afc2d0b947711ee" -SRCREV_pn-kern-tools-native ??= "72683bf61fdb83a1c0b4110763f803ff3e39f8ca" +SRCREV_pn-kern-tools-native ??= "f553af044615ba75c2b4b7ef1f382e6f75087213" SRCREV_pn-libdrm ??= "3f3c5be6f908272199ccf53f108b1124bfe0a00e" SRCREV_pn-libfakekey ??= "2031" SRCREV_pn-libgdbus ??= "aeab6e3c0185b271ca343b439470491b99cc587f" @@ -93,20 +93,20 @@ SRCREV_machine_pn-linux-yocto-stable_atom-pc ?= "35521a5a70316785a67aca1de1d39a7 SRCREV_machine_pn-linux-yocto-stable_routerstationpro ?= "b323ab98f055df012277f09d444951619fda24e3" SRCREV_machine_pn-linux-yocto-stable_mpc8315e-rdb ?= "0d19c4ab6185b2e093beb5f366ccff4746197166" SRCREV_machine_pn-linux-yocto-stable_beagleboard ?= "35521a5a70316785a67aca1de1d39a7b84c49ccf" -SRCREV_meta_pn-linux-yocto-stable ?= "582a28e4bc966ea367cbc2dc1f0de89dd4e7c3d8" +SRCREV_meta_pn-linux-yocto-stable ?= "ed446ecd29e8c0f81ab8630a2db652121eeb0b75" # development SRCREVs -SRCREV_machine_pn-linux-yocto_qemuarm = "605b1b1e0c68e9f78a7fb33aa219327153a4a890" -SRCREV_machine_pn-linux-yocto_qemumips = "0f05ab306ac79684a3352f28db91dfb903c67410" -SRCREV_machine_pn-linux-yocto_qemuppc = "d7f2a74cf32e9d6e26e66699d1422096437d3d0e" -SRCREV_machine_pn-linux-yocto_qemux86 = "6635864790ac23db856147fcbc4e8ebbfd30a0ba" -SRCREV_machine_pn-linux-yocto_qemux86-64 = "6635864790ac23db856147fcbc4e8ebbfd30a0ba" -SRCREV_machine_pn-linux-yocto_emenlow = "6635864790ac23db856147fcbc4e8ebbfd30a0ba" -SRCREV_machine_pn-linux-yocto_atom-pc = "6635864790ac23db856147fcbc4e8ebbfd30a0ba" -SRCREV_machine_pn-linux-yocto_routerstationpro = "cc1d802228803e0ebf27f3d3772de3620159d195" -SRCREV_machine_pn-linux-yocto_mpc8315e-rdb = "be26676bf67b0c4b5f36b57fe9e46f8bfd2a67c9" -SRCREV_machine_pn-linux-yocto_beagleboard = "6635864790ac23db856147fcbc4e8ebbfd30a0ba" -SRCREV_meta_pn-linux-yocto ?= "5955ebea1f0d2fbd67a66ed138ce2b3363adf72a" -SRCREV_pn-linux-libc-headers-yocto ??= "6635864790ac23db856147fcbc4e8ebbfd30a0ba" +SRCREV_machine_pn-linux-yocto_qemuarm ?= "4f86b5a0cb23faa4134cc004730d673f6640614b" +SRCREV_machine_pn-linux-yocto_qemumips ?= "2b43d84250a582b61397ad5f59a6445ce1258615" +SRCREV_machine_pn-linux-yocto_qemuppc ?= "20f182db27f3e6fc6bd55bfc9e24c8c023fbff7f" +SRCREV_machine_pn-linux-yocto_qemux86 ?= "a9d833fda90e2f1257888a97e092135610b5f259" +SRCREV_machine_pn-linux-yocto_qemux86-64 ?= "a9d833fda90e2f1257888a97e092135610b5f259" +SRCREV_machine_pn-linux-yocto_emenlow ?= "a9d833fda90e2f1257888a97e092135610b5f259" +SRCREV_machine_pn-linux-yocto_atom-pc ?= "a9d833fda90e2f1257888a97e092135610b5f259" +SRCREV_machine_pn-linux-yocto_routerstationpro ?= "9d24b148fc23a2c8fab0a6add18500453b375d54" +SRCREV_machine_pn-linux-yocto_mpc8315e-rdb ?= "3f01b98238c2ff7913ce9f68f9db5ae9538717bc" +SRCREV_machine_pn-linux-yocto_beagleboard ?= "a9d833fda90e2f1257888a97e092135610b5f259" +SRCREV_meta_pn-linux-yocto ?= "7102097a25c7658e0f4d4dc71844e0ff6c446b25" +SRCREV_pn-linux-libc-headers-yocto ??= "a9d833fda90e2f1257888a97e092135610b5f259" SRCREV_pn-matchbox-config-gtk ??= "2081" SRCREV_pn-matchbox-desktop-sato ??= "76" SRCREV_pn-matchbox-desktop ??= "2096" diff --git a/meta/conf/distro/poky.conf b/meta/conf/distro/poky.conf index e6ef465052..1c5c72104b 100644 --- a/meta/conf/distro/poky.conf +++ b/meta/conf/distro/poky.conf @@ -22,6 +22,8 @@ TARGET_FPU_arm ?= "soft" TARGET_FPU_armeb ?= "soft" TARGET_FPU_nokia800 = "hard" +TARGET_LINK_HASH_STYLE ?= "${@['-Wl,--hash-style=gnu',''][bb.data.getVar('TARGET_ARCH', d, True) in ['mips', 'mipsel', 'mips64', 'mips64el']]}" + QA_LOGFILE = "${TMPDIR}/qa.log" IMAGE_ROOTFS_SIZE_ext2 ?= "131072" diff --git a/meta/conf/machine/akita.conf b/meta/conf/machine/akita.conf index 0f5eaca9cc..cdd465200c 100644 --- a/meta/conf/machine/akita.conf +++ b/meta/conf/machine/akita.conf @@ -5,7 +5,6 @@ require conf/machine/include/zaurus-2.6.inc require conf/machine/include/zaurus-clamshell.inc -PACKAGE_EXTRA_ARCHS += "iwmmxt" MACHINE_FEATURES += "iwmmxt" IMAGE_FSTYPES ?= "jffs2" diff --git a/meta/conf/machine/atom-pc.conf b/meta/conf/machine/atom-pc.conf index 8cf09b811e..58c9b0da65 100644 --- a/meta/conf/machine/atom-pc.conf +++ b/meta/conf/machine/atom-pc.conf @@ -4,7 +4,6 @@ #@DESCRIPTION: Machine configuration for Intel Atom based PCs. Currently supported machines are the Asus eee901, Acer Aspire One, Toshiba NB305, and Intel BlackSand development board. TARGET_ARCH = "i586" -PACKAGE_EXTRA_ARCHS = "x86 core2" include conf/machine/include/tune-atom.inc diff --git a/meta/conf/machine/beagleboard.conf b/meta/conf/machine/beagleboard.conf index 1b73250c24..cb6417b7fb 100644 --- a/meta/conf/machine/beagleboard.conf +++ b/meta/conf/machine/beagleboard.conf @@ -2,7 +2,6 @@ #@NAME: Beagleboard machine #@DESCRIPTION: Machine configuration for the http://beagleboard.org/ board TARGET_ARCH = "arm" -PACKAGE_EXTRA_ARCHS = "armv4 armv4t armv5te armv6 armv7 armv7a" PREFERRED_PROVIDER_virtual/xserver = "xserver-kdrive" XSERVER = "xserver-kdrive-fbdev" diff --git a/meta/conf/machine/cm-x270.conf b/meta/conf/machine/cm-x270.conf index 226942de73..9bf7d8f6ae 100644 --- a/meta/conf/machine/cm-x270.conf +++ b/meta/conf/machine/cm-x270.conf @@ -8,9 +8,9 @@ GUI_MACHINE_CLASS ?= "bigscreen" PREFERRED_PROVIDER_virtual/kernel = "linux" PREFERRED_VERSION_linux ?= "2.6.23" -PACKAGE_EXTRA_ARCHS = "armv4 armv4t armv5te iwmmxt" require conf/machine/include/tune-xscale.inc +PACKAGE_EXTRA_ARCHS += "iwmmxt" # Console serial port. # If you have a W module, add the following line to your local.conf: diff --git a/meta/conf/machine/depicture.conf b/meta/conf/machine/depicture.conf index 333af4f2f2..f72ab2939e 100644 --- a/meta/conf/machine/depicture.conf +++ b/meta/conf/machine/depicture.conf @@ -3,7 +3,6 @@ #@DESCRIPTION: Machine configuration for running an ARM 920t in the BBD20EUROU board. TARGET_ARCH = "arm" -PACKAGE_EXTRA_ARCHS = "armv4 armv4t" require conf/machine/include/qemu.inc require conf/machine/include/tune-arm920t.inc diff --git a/meta/conf/machine/em-x270.conf b/meta/conf/machine/em-x270.conf index 47fb8ca533..9bcec5b742 100644 --- a/meta/conf/machine/em-x270.conf +++ b/meta/conf/machine/em-x270.conf @@ -8,7 +8,6 @@ TARGET_ARCH = "arm" GUI_MACHINE_CLASS ?= "bigscreen" -PACKAGE_EXTRA_ARCHS = "armv4 armv4t armv5te iwmmxt " PREFERRED_PROVIDER_virtual/kernel = "linux" require conf/machine/include/tune-xscale.inc diff --git a/meta/conf/machine/htcuniversal.conf b/meta/conf/machine/htcuniversal.conf index 2f5b798bff..72f3bc4aad 100644 --- a/meta/conf/machine/htcuniversal.conf +++ b/meta/conf/machine/htcuniversal.conf @@ -4,9 +4,9 @@ #include conf/machine/include/tune-iwmmxt.inc include conf/machine/include/tune-xscale.inc +PACKAGE_EXTRA_ARCHS += "iwmmxt" TARGET_ARCH = "arm" -PACKAGE_EXTRA_ARCHS = "armv4t armv5te iwmmxt" PREFERRED_PROVIDER_virtual/kernel = "linux-rp" diff --git a/meta/conf/machine/hx2000.conf b/meta/conf/machine/hx2000.conf index 8b4c39f805..a6775f2c3a 100644 --- a/meta/conf/machine/hx2000.conf +++ b/meta/conf/machine/hx2000.conf @@ -3,7 +3,7 @@ #@DESCRIPTION: Machine configuration for the hx2000 iPAQs with a pxa27x CPU TARGET_ARCH = "arm" -PACKAGE_EXTRA_ARCHS = "armv4 armv4t armv5te iwmmxt " +PACKAGE_EXTRA_ARCHS += "iwmmxt" PREFERRED_PROVIDER_virtual/kernel = "linux-rp" diff --git a/meta/conf/machine/igep0020.conf b/meta/conf/machine/igep0020.conf index 9ddbad65b3..86adfcf1a0 100644 --- a/meta/conf/machine/igep0020.conf +++ b/meta/conf/machine/igep0020.conf @@ -2,7 +2,6 @@ #@NAME: IGEP v2 machine #@DESCRIPTION: Machine configuration for the IGEP v2 board (http://www.myigep.com) TARGET_ARCH = "arm" -PACKAGE_EXTRA_ARCHS = "armv4 armv4t armv5te armv6 armv7 armv7a" # Floating point option TARGET_FPU_arm = "hard" diff --git a/meta/conf/machine/igep0030.conf b/meta/conf/machine/igep0030.conf index 7ebfaa0ca5..af36982a6a 100644 --- a/meta/conf/machine/igep0030.conf +++ b/meta/conf/machine/igep0030.conf @@ -2,7 +2,6 @@ #@NAME: OMAP3 IGEP module #@DESCRIPTION: Machine configuration for the OMAP3 IGEP module (http://www.myigep.com) TARGET_ARCH = "arm" -PACKAGE_EXTRA_ARCHS = "armv4 armv4t armv5te armv6 armv7 armv7a" # Floating point option TARGET_FPU_arm = "hard" diff --git a/meta/conf/machine/include/tune-arm1136jf-s.inc b/meta/conf/machine/include/tune-arm1136jf-s.inc index be5aedc509..a7717510e6 100644 --- a/meta/conf/machine/include/tune-arm1136jf-s.inc +++ b/meta/conf/machine/include/tune-arm1136jf-s.inc @@ -4,5 +4,6 @@ FEED_ARCH = "armv6" BASE_PACKAGE_ARCH = "armv6" ARM_INSTRUCTION_SET = "${@['thumb','arm'][bb.data.getVar('CPU_FEATURES', d, 1).find('thumb') == -1]}" +PACKAGE_EXTRA_ARCHS += "armv4 armv4t armv5te armv6" require conf/machine/include/tune-thumb.inc diff --git a/meta/conf/machine/include/tune-arm920t.inc b/meta/conf/machine/include/tune-arm920t.inc index 1f7a04bb3b..fee5c586c9 100644 --- a/meta/conf/machine/include/tune-arm920t.inc +++ b/meta/conf/machine/include/tune-arm920t.inc @@ -1,4 +1,4 @@ FEED_ARCH = "armv4t" BASE_PACKAGE_ARCH = "armv4t" TARGET_CC_ARCH = "-march=armv4t -mtune=arm920t" - +PACKAGE_EXTRA_ARCHS += "armv4 armv4t" diff --git a/meta/conf/machine/include/tune-arm926ejs.inc b/meta/conf/machine/include/tune-arm926ejs.inc index e80ef10490..f41e460999 100644 --- a/meta/conf/machine/include/tune-arm926ejs.inc +++ b/meta/conf/machine/include/tune-arm926ejs.inc @@ -1,5 +1,5 @@ FEED_ARCH = "armv5te" - +PACKAGE_EXTRA_ARCHS += "armv4 armv4t armv5te" # For gcc 3.x you need: #TARGET_CC_ARCH = "-march=armv5te -mtune=arm926ejs" # For gcc 4.x you need: diff --git a/meta/conf/machine/include/tune-arm9tdmi.inc b/meta/conf/machine/include/tune-arm9tdmi.inc index 39e4493014..d788b5304f 100644 --- a/meta/conf/machine/include/tune-arm9tdmi.inc +++ b/meta/conf/machine/include/tune-arm9tdmi.inc @@ -1,2 +1,4 @@ +FEED_ARCH = "armv4t" BASE_PACKAGE_ARCH = "armv4t" +PACKAGE_EXTRA_ARCHS += "armv4 armv4t" TARGET_CC_ARCH = "-mcpu=arm9tdmi -mtune=arm9tdmi" diff --git a/meta/conf/machine/include/tune-armv7.inc b/meta/conf/machine/include/tune-armv7.inc index 96c8c64904..379a3eb011 100644 --- a/meta/conf/machine/include/tune-armv7.inc +++ b/meta/conf/machine/include/tune-armv7.inc @@ -3,4 +3,5 @@ # This will NOT compile programs in 'ARM' mode, which is what you really want TARGET_CC_ARCH = "-march=armv7 -mfpu=vfp -mfloat-abi=softfp" FEED_ARCH = "armv7" +PACKAGE_EXTRA_ARCHS += "armv4 armv4t armv5te armv6 armv7" BASE_PACKAGE_ARCH = "armv7" diff --git a/meta/conf/machine/include/tune-atom.inc b/meta/conf/machine/include/tune-atom.inc index 0f09e8d56f..a401856c8b 100644 --- a/meta/conf/machine/include/tune-atom.inc +++ b/meta/conf/machine/include/tune-atom.inc @@ -2,3 +2,4 @@ BASE_PACKAGE_ARCH = "core2" TARGET_CC_ARCH = "-m32 -march=core2 -msse3 -mtune=generic -mfpmath=sse" #MOBLIN_CFLAGS = "-Os -Wp,-D_FORTIFY_SOURCE=2 -fexceptions -fstack-protector --param=ssp-buffer-size=4 -fasynchronous-unwind-tables" +PACKAGE_EXTRA_ARCHS += "x86 i386 i486 i586 core2" diff --git a/meta/conf/machine/include/tune-c3.inc b/meta/conf/machine/include/tune-c3.inc index 107341eb62..1d636e89c6 100644 --- a/meta/conf/machine/include/tune-c3.inc +++ b/meta/conf/machine/include/tune-c3.inc @@ -1,3 +1,4 @@ TARGET_CC_ARCH = "-march=c3 -mtune=c3" +PACKAGE_EXTRA_ARCHS += "i386 i486 i586" BASE_PACKAGE_ARCH = "i586" - +FEED_ARCH = "i586" diff --git a/meta/conf/machine/include/tune-cortexa8.inc b/meta/conf/machine/include/tune-cortexa8.inc index 2c40b70560..3ec1c03500 100644 --- a/meta/conf/machine/include/tune-cortexa8.inc +++ b/meta/conf/machine/include/tune-cortexa8.inc @@ -10,3 +10,4 @@ TARGET_CC_ARCH = "-march=armv7-a -mtune=cortex-a8 -mfpu=neon -mfloat-abi=softfp FEED_ARCH = "armv7a" BASE_PACKAGE_ARCH = "armv7a" +PACKAGE_EXTRA_ARCHS += "armv4 armv4t armv5te armv6 armv7 armv7a" diff --git a/meta/conf/machine/include/tune-ep9312.inc b/meta/conf/machine/include/tune-ep9312.inc index 2cfdf5270f..d0c5bb2e98 100644 --- a/meta/conf/machine/include/tune-ep9312.inc +++ b/meta/conf/machine/include/tune-ep9312.inc @@ -2,5 +2,7 @@ TARGET_CC_ARCH = "-march=ep9312 -mtune=ep9312 -mcpu=ep9312" # add "-mfp=maverick" for newer gcc versions > 4.0 #set arch to ep9312 for all generated packages +PACKAGE_EXTRA_ARCHS += "armv4t ep9312" BASE_PACKAGE_ARCH = "ep9312" +FEED_ARCH = "ep9312" diff --git a/meta/conf/machine/include/tune-iwmmxt.inc b/meta/conf/machine/include/tune-iwmmxt.inc index 49e560ea1a..f7291be805 100644 --- a/meta/conf/machine/include/tune-iwmmxt.inc +++ b/meta/conf/machine/include/tune-iwmmxt.inc @@ -2,5 +2,6 @@ # Please use tune-xscale for PXA255/PXA26x based processors. TARGET_CC_ARCH = "-march=iwmmxt -mcpu=iwmmxt -mtune=iwmmxt" BASE_PACKAGE_ARCH = "iwmmxt" -PACKAGE_EXTRA_ARCHS += "iwmmxt" +PACKAGE_EXTRA_ARCHS += "armv4 armv4t armv5te iwmmxt" +FEED_ARCH = "iwmmxt" diff --git a/meta/conf/machine/include/tune-mips32.inc b/meta/conf/machine/include/tune-mips32.inc index 93da66b158..67edca2530 100644 --- a/meta/conf/machine/include/tune-mips32.inc +++ b/meta/conf/machine/include/tune-mips32.inc @@ -1 +1,3 @@ TARGET_CC_ARCH = "-march=mips32" +FEED_ARCH = "${TARGET_ARCH}" +BASE_PACKAGE_ARCH = "${TARGET_ARCH}" diff --git a/meta/conf/machine/include/tune-ppc603e.inc b/meta/conf/machine/include/tune-ppc603e.inc index a4a68d60e3..15a72d10a1 100644 --- a/meta/conf/machine/include/tune-ppc603e.inc +++ b/meta/conf/machine/include/tune-ppc603e.inc @@ -1,2 +1,4 @@ -TARGET_CC_ARCH = "-mcpu=603e -mhard-float" +TARGET_CC_ARCH = "-mcpu=603e -mhard-float" BASE_PACKAGE_ARCH = "ppc603e" +FEED_ARCH = "ppc603e" +PACKAGE_EXTRA_ARCHS += "ppc603e" diff --git a/meta/conf/machine/include/tune-ppce300c2.inc b/meta/conf/machine/include/tune-ppce300c2.inc index 067a7b100c..2956875186 100644 --- a/meta/conf/machine/include/tune-ppce300c2.inc +++ b/meta/conf/machine/include/tune-ppce300c2.inc @@ -1,2 +1,3 @@ TARGET_CC_ARCH = "-mcpu=e300c2 -msoft-float" BASE_PACKAGE_ARCH = "ppce300" +PACKAGE_EXTRA_ARCHS += "ppce300" diff --git a/meta/conf/machine/include/tune-ppce500.inc b/meta/conf/machine/include/tune-ppce500.inc index 2482196424..44f8742f9f 100644 --- a/meta/conf/machine/include/tune-ppce500.inc +++ b/meta/conf/machine/include/tune-ppce500.inc @@ -1,2 +1,4 @@ TARGET_CC_ARCH = "-mcpu=8540" BASE_PACKAGE_ARCH = "ppce500" +FEED_ARCH = "ppce500" +PACKAGE_EXTRA_ARCHS += "ppce500" diff --git a/meta/conf/machine/include/tune-sh3.inc b/meta/conf/machine/include/tune-sh3.inc index 192dd8fd22..32801a61c3 100644 --- a/meta/conf/machine/include/tune-sh3.inc +++ b/meta/conf/machine/include/tune-sh3.inc @@ -1 +1,3 @@ TARGET_CC_ARCH = "-ml -m3" +FEED_ARCH = "sh3" +BASE_PACKAGE_ARCH = "sh3" diff --git a/meta/conf/machine/include/tune-sh4.inc b/meta/conf/machine/include/tune-sh4.inc index 866f7f89d5..51b40691bc 100644 --- a/meta/conf/machine/include/tune-sh4.inc +++ b/meta/conf/machine/include/tune-sh4.inc @@ -1 +1,7 @@ +# NOTE: If you want to optimize to sh4a, conf/machine/include/tune-sh4a.inc. +# But it is not compatible for sh4. +# The binary optimized by m4a doesn't operate on sh4. It works on sh4a only. + TARGET_CC_ARCH = "-ml -m4" +FEED_ARCH = "sh4" +BASE_PACKAGE_ARCH = "sh4" diff --git a/meta/conf/machine/include/tune-supersparc.inc b/meta/conf/machine/include/tune-supersparc.inc index 10133c6c21..a402e7cd2b 100644 --- a/meta/conf/machine/include/tune-supersparc.inc +++ b/meta/conf/machine/include/tune-supersparc.inc @@ -1,2 +1,3 @@ TARGET_CC_ARCH = "-mcpu=supersparc" -BASE_PACKAGE_ARCH = "supersparc" +BASE_PACKAGE_ARCH = "supersparc" +FEED_ARCH = "supersparc" diff --git a/meta/conf/machine/include/tune-xscale.inc b/meta/conf/machine/include/tune-xscale.inc index 4f96814a72..f1ef3d46af 100644 --- a/meta/conf/machine/include/tune-xscale.inc +++ b/meta/conf/machine/include/tune-xscale.inc @@ -5,6 +5,7 @@ INHERIT += "siteinfo" TARGET_CC_ARCH = "-march=armv5te -mtune=xscale" TARGET_CC_KERNEL_ARCH = "-march=armv5te -mtune=xscale" BASE_PACKAGE_ARCH = "${@['armv5teb', 'armv5te'][bb.data.getVar('SITEINFO_ENDIANESS', d, 1) == 'le']}" +PACKAGE_EXTRA_ARCHS += "${@['armv4b armv4tb armv5teb', 'armv4 armv4t armv5te'][bb.data.getVar('SITEINFO_ENDIANESS', d, 1) == 'le']}" # webkit-gtk has alignment issues with double instructions on armv5 so # disable them here diff --git a/meta/conf/machine/mpc8315e-rdb.conf b/meta/conf/machine/mpc8315e-rdb.conf index 1b7982a5dd..919a95d9ef 100644 --- a/meta/conf/machine/mpc8315e-rdb.conf +++ b/meta/conf/machine/mpc8315e-rdb.conf @@ -2,7 +2,6 @@ #@DESCRIPTION: Machine configuration for running TARGET_ARCH = "powerpc" -PACKAGE_EXTRA_ARCHS = "ppc603e" TARGET_FPU = "spe" require conf/machine/include/tune-ppc603e.inc diff --git a/meta/conf/machine/mx31ads.conf b/meta/conf/machine/mx31ads.conf index 04a238d600..8b3e3dbc1a 100644 --- a/meta/conf/machine/mx31ads.conf +++ b/meta/conf/machine/mx31ads.conf @@ -2,7 +2,6 @@ #@NAME: Freescale imx31 ADS Board #@DESCRIPTION: Machine configuration for the imx31 based Freescale ADS Board TARGET_ARCH = "arm" -PACKAGE_EXTRA_ARCHS = "armv4 armv4t armv5te armv6" PREFERRED_PROVIDER_virtual/kernel = "linux-mx31" diff --git a/meta/conf/machine/mx31phy.conf b/meta/conf/machine/mx31phy.conf index 28b255ccec..eea673ab16 100644 --- a/meta/conf/machine/mx31phy.conf +++ b/meta/conf/machine/mx31phy.conf @@ -2,7 +2,6 @@ #@NAME: Phytec phyCORE-iMX31 Board #@DESCRIPTION: Machine configuration for the imx31 based Phytec phyCORE-iMX31 TARGET_ARCH = "arm" -PACKAGE_EXTRA_ARCHS = "armv4 armv4t armv5te armv6" PREFERRED_PROVIDER_virtual/kernel = "linux-mx31" diff --git a/meta/conf/machine/omap-3430ldp.conf b/meta/conf/machine/omap-3430ldp.conf index 0be0546b80..759d58f015 100644 --- a/meta/conf/machine/omap-3430ldp.conf +++ b/meta/conf/machine/omap-3430ldp.conf @@ -2,7 +2,6 @@ #@NAME: TI OMAP 3430 LDP #@DESCRIPTION: Machine configuration for the TI OMAP 3430 LDP (aka OMAP Zoom) TARGET_ARCH = "arm" -PACKAGE_EXTRA_ARCHS = "armv4 armv4t armv5te armv6 armv7a" PREFERRED_PROVIDER_virtual/kernel = "linux-omap" diff --git a/meta/conf/machine/omap-3430sdp.conf b/meta/conf/machine/omap-3430sdp.conf index 5d87bd1483..651d6a9968 100644 --- a/meta/conf/machine/omap-3430sdp.conf +++ b/meta/conf/machine/omap-3430sdp.conf @@ -2,7 +2,6 @@ #@NAME: TI OMAP 3430 SDP #@DESCRIPTION: Machine configuration for the TI OMAP 3430 SDP TARGET_ARCH = "arm" -PACKAGE_EXTRA_ARCHS = "armv4 armv4t armv5te armv6 armv7a" PREFERRED_PROVIDER_virtual/kernel = "linux-omap" diff --git a/meta/conf/machine/overo.conf b/meta/conf/machine/overo.conf index dde4bff476..9fefd73b48 100644 --- a/meta/conf/machine/overo.conf +++ b/meta/conf/machine/overo.conf @@ -5,7 +5,6 @@ include conf/machine/include/tune-cortexa8.inc TARGET_ARCH = "arm" -PACKAGE_EXTRA_ARCHS = "armv4 armv4t armv5te armv6 armv7 armv7a" MACHINE_FEATURES = "kernel26 apm usbgadget usbhost vfat alsa" PREFERRED_VERSION_u-boot = "git" diff --git a/meta/conf/machine/qemuarm.conf b/meta/conf/machine/qemuarm.conf index 400ecc8245..9fb18932ca 100644 --- a/meta/conf/machine/qemuarm.conf +++ b/meta/conf/machine/qemuarm.conf @@ -3,7 +3,6 @@ #@DESCRIPTION: arm_versaile_926ejs TARGET_ARCH = "arm" -PACKAGE_EXTRA_ARCHS = "armv4 armv4t armv5te" require conf/machine/include/qemu.inc require conf/machine/include/tune-arm926ejs.inc diff --git a/meta/conf/machine/qemuppc.conf b/meta/conf/machine/qemuppc.conf index 0bbeac7da6..83d0841159 100644 --- a/meta/conf/machine/qemuppc.conf +++ b/meta/conf/machine/qemuppc.conf @@ -3,7 +3,6 @@ #@DESCRIPTION: Machine configuration for running an PPC system under qemu emulation TARGET_ARCH = "powerpc" -PACKAGE_EXTRA_ARCHS = "ppc603e" require conf/machine/include/qemu.inc require conf/machine/include/tune-ppc603e.inc diff --git a/meta/conf/machine/qemux86-64.conf b/meta/conf/machine/qemux86-64.conf index fd34709b78..182759adfe 100644 --- a/meta/conf/machine/qemux86-64.conf +++ b/meta/conf/machine/qemux86-64.conf @@ -4,7 +4,6 @@ TARGET_ARCH = "x86_64" TRANSLATED_TARGET_ARCH = "x86-64" -PACKAGE_EXTRA_ARCHS = "x86" PREFERRED_PROVIDER_virtual/libgl = "mesa-dri" PREFERRED_PROVIDER_virtual/libx11 ?= "libx11-trim" diff --git a/meta/conf/machine/qemux86.conf b/meta/conf/machine/qemux86.conf index cb2a32d7bd..f1a0939ebc 100644 --- a/meta/conf/machine/qemux86.conf +++ b/meta/conf/machine/qemux86.conf @@ -3,7 +3,6 @@ #@DESCRIPTION: Machine configuration for running a common x86 TARGET_ARCH = "i586" -PACKAGE_EXTRA_ARCHS = "x86" PREFERRED_PROVIDER_virtual/libgl = "mesa-dri" PREFERRED_PROVIDER_virtual/libx11 ?= "libx11-trim" diff --git a/meta/conf/machine/spitz.conf b/meta/conf/machine/spitz.conf index 4a794331e8..faa00e991f 100644 --- a/meta/conf/machine/spitz.conf +++ b/meta/conf/machine/spitz.conf @@ -9,7 +9,6 @@ MACHINE_FEATURES += "iwmmxt" PIVOTBOOT_EXTRA_RDEPENDS += "pivotinit ${PCMCIA_MANAGER}" -PACKAGE_EXTRA_ARCHS += "iwmmxt" IMAGE_FSTYPES ?= "tar.gz ext3" IMAGE_ROOTFS_SIZE_ext3 = "250000" diff --git a/meta/conf/machine/zoom2.conf b/meta/conf/machine/zoom2.conf index 142a40c5a1..dec90c2c7b 100644 --- a/meta/conf/machine/zoom2.conf +++ b/meta/conf/machine/zoom2.conf @@ -2,7 +2,6 @@ #@NAME: Logic PD Zoom2 #@DESCRIPTION: Machine configuration for the OMAP3-powered Logic PD Zoom2 TARGET_ARCH = "arm" -PACKAGE_EXTRA_ARCHS = "armv4 armv4t armv5te armv6 armv7a" PREFERRED_PROVIDER_virtual/kernel = "linux-omap-zoomsync" diff --git a/meta/conf/machine/zylonite.conf b/meta/conf/machine/zylonite.conf index 644675e849..e82f041ff2 100644 --- a/meta/conf/machine/zylonite.conf +++ b/meta/conf/machine/zylonite.conf @@ -4,8 +4,9 @@ require conf/machine/include/tune-xscale.inc +PACKAGE_EXTRA_ARCHS += "iwmmxt" + TARGET_ARCH = "arm" -PACKAGE_EXTRA_ARCHS = "armv4 armv4t armv5e armv5te iwmmxt" EXTRA_IMAGECMD_jffs2 = "--little-endian --eraseblock=0x20000 -n -x lzo -p" diff --git a/meta/recipes-devtools/bison/bison_2.4.3.bb b/meta/recipes-devtools/bison/bison_2.4.3.bb index 03fd463fd8..fa2f15400a 100644 --- a/meta/recipes-devtools/bison/bison_2.4.3.bb +++ b/meta/recipes-devtools/bison/bison_2.4.3.bb @@ -10,7 +10,7 @@ SECTION = "devel" PRIORITY = "optional" DEPENDS = "gettext bison-native" -PR = "r0" +PR = "r1" BASE_SRC_URI = "${GNU_MIRROR}/bison/bison-${PV}.tar.gz \ file://m4.patch;patch=1" @@ -27,4 +27,10 @@ SRC_URI_virtclass-native = "${BASE_SRC_URI}" inherit autotools acpaths = "-I ${S}/m4" +do_install_append_virtclass-native() { + create_wrapper ${D}/${bindir}/bison \ + M4=${STAGING_BINDIR_NATIVE}/m4 \ + BISON_PKGDATADIR=${STAGING_DATADIR_NATIVE}/bison +} +#bison-native encodes the staging M4 path BBCLASSEXTEND = "native" diff --git a/meta/recipes-devtools/flex/flex.inc b/meta/recipes-devtools/flex/flex.inc index 93fa7e81a4..54c400c044 100644 --- a/meta/recipes-devtools/flex/flex.inc +++ b/meta/recipes-devtools/flex/flex.inc @@ -11,3 +11,6 @@ SRC_URI = "${SOURCEFORGE_MIRROR}/flex/flex-${PV}.tar.bz2 " inherit autotools +do_install_append_virtclass-native() { + create_wrapper ${D}/${bindir}/flex M4=${STAGING_BINDIR_NATIVE}/m4 +} diff --git a/meta/recipes-devtools/flex/flex_2.5.35.bb b/meta/recipes-devtools/flex/flex_2.5.35.bb index 4fdce33ccf..28a4f1f06b 100644 --- a/meta/recipes-devtools/flex/flex_2.5.35.bb +++ b/meta/recipes-devtools/flex/flex_2.5.35.bb @@ -1,5 +1,5 @@ require flex.inc -PR = "r1" +PR = "r2" LICENSE="BSD" LIC_FILES_CHKSUM = "file://COPYING;md5=e4742cf92e89040b39486a6219b68067" BBCLASSEXTEND = "native" diff --git a/meta/recipes-devtools/perl/perl_5.8.8.bb b/meta/recipes-devtools/perl/perl_5.8.8.bb index 8e2f0c0af7..5cd4d4fe2f 100644 --- a/meta/recipes-devtools/perl/perl_5.8.8.bb +++ b/meta/recipes-devtools/perl/perl_5.8.8.bb @@ -11,7 +11,7 @@ PRIORITY = "optional" # We need gnugrep (for -I) DEPENDS = "virtual/db perl-native grep-native" DEPENDS += "gdbm" -PR = "r21" +PR = "r22" # Major part of version PVM = "5.8" @@ -114,7 +114,7 @@ do_compile() { sed -i -e 's|/usr/include|${STAGING_INCDIR}|g' ext/Errno/Errno_pm.PL fi cd Cross - oe_runmake perl LD="${TARGET_SYS}-gcc" + oe_runmake perl LD="${CCLD}" } do_install() { diff --git a/meta/recipes-graphics/xorg-app/xvinfo_1.1.1.bb b/meta/recipes-graphics/xorg-app/xvinfo_1.1.1.bb index 0bebbfb86e..0547bd62ca 100644 --- a/meta/recipes-graphics/xorg-app/xvinfo_1.1.1.bb +++ b/meta/recipes-graphics/xorg-app/xvinfo_1.1.1.bb @@ -1,7 +1,7 @@ require xorg-app-common.inc DESCRIPTION = "Print out X-Video extension adaptor information" -LIC_FILES_CHKSUM = "file://COPYING;md5=4641deddaa80fe7ca88e944e1fd94a94" +LIC_FILES_CHKSUM = "file://COPYING;md5=b664101ad7a1dc758a4c4109bf978e68" DEPENDS += " libxv" PE = "1" diff --git a/meta/recipes-graphics/xorg-xserver/xserver-xf86-common.inc b/meta/recipes-graphics/xorg-xserver/xserver-xf86-common.inc index 5cbfd11d18..f9ea0e4124 100644 --- a/meta/recipes-graphics/xorg-xserver/xserver-xf86-common.inc +++ b/meta/recipes-graphics/xorg-xserver/xserver-xf86-common.inc @@ -34,4 +34,3 @@ FILES_${PN}-dbg += "${libdir}/xorg/modules/.debug \ SRC_URI += "file://macro_tweak.patch" -COMPATIBLE_MACHINE = "(qemux86*|igep*)" diff --git a/meta/recipes-multimedia/pulseaudio/libcanberra_0.26.bb b/meta/recipes-multimedia/pulseaudio/libcanberra_0.26.bb index 7f91bdc978..0daeb0e6c5 100644 --- a/meta/recipes-multimedia/pulseaudio/libcanberra_0.26.bb +++ b/meta/recipes-multimedia/pulseaudio/libcanberra_0.26.bb @@ -2,7 +2,7 @@ SUMMARY = "Implementation of XDG Sound Theme and Name Specifications" DESCRIPTION = "Libcanberra is an implementation of the XDG Sound Theme and Name Specifications, for generating event sounds on free desktops." LICENSE = "LGPLv2.1" LIC_FILES_CHKSUM = "file://LGPL;md5=2d5025d4aa3495befef8f17206a5b0a1 \ - file://src/canberra.h;endline=24;md5=8dd99ba690687f5816f711d9313c8967" + file://src/canberra.h;beginline=7;endline=24;md5=c616c687cf8da540a14f917e0d23ab03" DEPENDS = "gtk+ pulseaudio alsa-lib libtool" PR = "r0" |