diff options
author | Richard Purdie <richard.purdie@linuxfoundation.org> | 2011-02-04 13:20:28 +0000 |
---|---|---|
committer | Richard Purdie <richard.purdie@linuxfoundation.org> | 2011-02-07 09:06:37 +0000 |
commit | 984e90f4d71d866580131c4927b0a77baf1bb9bd (patch) | |
tree | adfe717341c87f2719990a962951492b65c03c1c | |
parent | ca7adf75295c2a6041b891bfa61e0b4bc2f7c860 (diff) | |
download | openembedded-core-984e90f4d71d866580131c4927b0a77baf1bb9bd.tar.gz openembedded-core-984e90f4d71d866580131c4927b0a77baf1bb9bd.tar.bz2 openembedded-core-984e90f4d71d866580131c4927b0a77baf1bb9bd.zip |
meta/classes: Update classes to use new fetcher API
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
-rw-r--r-- | meta/classes/base.bbclass | 176 | ||||
-rw-r--r-- | meta/classes/patch.bbclass | 3 | ||||
-rw-r--r-- | meta/classes/sstate.bbclass | 56 | ||||
-rw-r--r-- | meta/classes/utils.bbclass | 11 | ||||
-rw-r--r-- | meta/lib/oe/patch.py | 6 |
5 files changed, 63 insertions, 189 deletions
diff --git a/meta/classes/base.bbclass b/meta/classes/base.bbclass index d8efcc0f8c..edb65eb96b 100644 --- a/meta/classes/base.bbclass +++ b/meta/classes/base.bbclass @@ -116,163 +116,38 @@ addtask setscene before do_fetch addtask fetch do_fetch[dirs] = "${DL_DIR}" python base_do_fetch() { - import sys + + src_uri = (bb.data.getVar('SRC_URI', d, True) or "").split() + if len(src_uri) == 0: + return localdata = bb.data.createCopy(d) bb.data.update_data(localdata) - src_uri = bb.data.getVar('SRC_URI', localdata, 1) - if not src_uri: - return 1 - - try: - bb.fetch.init(src_uri.split(),d) - except bb.fetch.NoMethodError: - (type, value, traceback) = sys.exc_info() - raise bb.build.FuncFailed("No method: %s" % value) - except bb.MalformedUrl: - (type, value, traceback) = sys.exc_info() - raise bb.build.FuncFailed("Malformed URL: %s" % value) - - try: - if bb.fetch.__version__ == "1": - bb.fetch.go(localdata) - else: - bb.fetch.download(localdata) - except bb.fetch.MissingParameterError: - (type, value, traceback) = sys.exc_info() - raise bb.build.FuncFailed("Missing parameters: %s" % value) - except bb.fetch.FetchError: - (type, value, traceback) = sys.exc_info() - raise bb.build.FuncFailed("Fetch failed: %s" % value) - except bb.fetch.MD5SumError: - (type, value, traceback) = sys.exc_info() - raise bb.build.FuncFailed("MD5 failed: %s" % value) - except: - (type, value, traceback) = sys.exc_info() - raise bb.build.FuncFailed("Unknown fetch Error: %s" % value) + try: + fetcher = bb.fetch2.Fetch(src_uri, localdata) + fetcher.download() + except bb.fetch2.BBFetchException, e: + raise bb.build.FuncFailed(e) } -def subprocess_setup(): - import signal - # Python installs a SIGPIPE handler by default. This is usually not what - # non-Python subprocesses expect. - # SIGPIPE errors are known issues with gzip/bash - signal.signal(signal.SIGPIPE, signal.SIG_DFL) - -def oe_unpack_file(file, data, url = None): - import subprocess - if not url: - url = "file://%s" % file - dots = file.split(".") - if dots[-1] in ['gz', 'bz2', 'Z']: - efile = os.path.join(bb.data.getVar('WORKDIR', data, 1),os.path.basename('.'.join(dots[0:-1]))) - else: - efile = file - cmd = None - if file.endswith('.tar'): - cmd = 'tar x --no-same-owner -f %s' % file - elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'): - cmd = 'tar xz --no-same-owner -f %s' % file - elif file.endswith('.tbz') or file.endswith('.tbz2') or file.endswith('.tar.bz2'): - cmd = 'bzip2 -dc %s | tar x --no-same-owner -f -' % file - elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'): - cmd = 'gzip -dc %s > %s' % (file, efile) - elif file.endswith('.bz2'): - cmd = 'bzip2 -dc %s > %s' % (file, efile) - elif file.endswith('.tar.xz'): - cmd = 'xz -dc %s | tar x --no-same-owner -f -' % file - elif file.endswith('.xz'): - cmd = 'xz -dc %s > %s' % (file, efile) - elif file.endswith('.zip') or file.endswith('.jar'): - cmd = 'unzip -q -o' - (type, host, path, user, pswd, parm) = bb.decodeurl(url) - if 'dos' in parm: - cmd = '%s -a' % cmd - cmd = "%s '%s'" % (cmd, file) - elif os.path.isdir(file): - filesdir = os.path.realpath(bb.data.getVar("FILESDIR", data, 1)) - destdir = "." - if file[0:len(filesdir)] == filesdir: - destdir = file[len(filesdir):file.rfind('/')] - destdir = destdir.strip('/') - if len(destdir) < 1: - destdir = "." - elif not os.access("%s/%s" % (os.getcwd(), destdir), os.F_OK): - os.makedirs("%s/%s" % (os.getcwd(), destdir)) - cmd = 'cp -pPR %s %s/%s/' % (file, os.getcwd(), destdir) - else: - (type, host, path, user, pswd, parm) = bb.decodeurl(url) - if not 'patch' in parm: - # The "destdir" handling was specifically done for FILESPATH - # items. So, only do so for file:// entries. - if type == "file" and path.find("/") != -1: - destdir = path.rsplit("/", 1)[0] - else: - destdir = "." - bb.mkdirhier("%s/%s" % (os.getcwd(), destdir)) - cmd = 'cp %s %s/%s/' % (file, os.getcwd(), destdir) - - if not cmd: - return True - - dest = os.path.join(os.getcwd(), os.path.basename(file)) - if os.path.exists(dest): - if os.path.samefile(file, dest): - return True - - # Change to subdir before executing command - save_cwd = os.getcwd(); - parm = bb.decodeurl(url)[5] - if 'subdir' in parm: - newdir = ("%s/%s" % (os.getcwd(), parm['subdir'])) - bb.mkdirhier(newdir) - os.chdir(newdir) - - cmd = "PATH=\"%s\" %s" % (bb.data.getVar('PATH', data, 1), cmd) - bb.note("Unpacking %s to %s/" % (file, os.getcwd())) - ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True) - - os.chdir(save_cwd) - - return ret == 0 - addtask unpack after do_fetch do_unpack[dirs] = "${WORKDIR}" python base_do_unpack() { - import re + src_uri = (bb.data.getVar('SRC_URI', d, True) or "").split() + if len(src_uri) == 0: + return localdata = bb.data.createCopy(d) bb.data.update_data(localdata) - urldata = bb.fetch.init([], localdata) + rootdir = bb.data.getVar('WORKDIR', localdata, True) - src_uri = bb.data.getVar('SRC_URI', localdata, True) - if not src_uri: - return - for url in src_uri.split(): - try: - local = bb.data.expand(bb.fetch.localpath(url, localdata), localdata) - except bb.MalformedUrl, e: - raise FuncFailed('Unable to generate local path for malformed uri: %s' % e) - if local is None: - continue - local = os.path.realpath(local) - lockfile = urldata[url].lockfile - if lockfile: - lf = bb.utils.lockfile(urldata[url].lockfile) - - if bb.fetch.__version__ == "1": - ret = oe_unpack_file(local, localdata, url) - else: - # use bb.fetch2 unpack API - ud = urldata[url] - rootdir = bb.data.getVar('WORKDIR', localdata, True) - ret = ud.method.unpack(ud, rootdir, localdata) - if lockfile: - bb.utils.unlockfile(lf) - if not ret: - raise bb.build.FuncFailed("oe_unpack_file failed with return value %s" % ret) + try: + fetcher = bb.fetch2.Fetch(src_uri, localdata) + fetcher.unpack(rootdir) + except bb.fetch2.BBFetchException, e: + raise bb.build.FuncFailed(e) } GIT_CONFIG = "${STAGING_DIR_NATIVE}/usr/etc/gitconfig" @@ -550,7 +425,8 @@ python () { for s in srcuri.split(): if not s.startswith("file://"): continue - local = bb.data.expand(bb.fetch.localpath(s, d), d) + fetcher = bb.fetch2.Fetch([s], d) + local = fetcher.localpath(s) for mp in paths: if local.startswith(mp): #bb.note("overriding PACKAGE_ARCH from %s to %s" % (pkg_arch, mach_arch)) @@ -594,14 +470,12 @@ python do_cleanall() { dl_dir = bb.data.getVar('DL_DIR', localdata, True) dl_dir = os.path.realpath(dl_dir) - src_uri = bb.data.getVar('SRC_URI', localdata, True) - if not src_uri: + src_uri = (bb.data.getVar('SRC_URI', localdata, True) or "").split() + if len(src_uri) == 0: return - for url in src_uri.split(): - try: - local = bb.data.expand(bb.fetch.localpath(url, localdata), localdata) - except bb.MalformedUrl, e: - raise FuncFailed('Unable to generate local path for malformed uri: %s' % e) + fetcher = bb.fetch2.Fetch(src_uri, localdata) + for url in src_uri: + local = fetcher.localpath(url) if local is None: continue local = os.path.realpath(local) diff --git a/meta/classes/patch.bbclass b/meta/classes/patch.bbclass index ee8a2026fc..80fd45f0e3 100644 --- a/meta/classes/patch.bbclass +++ b/meta/classes/patch.bbclass @@ -58,9 +58,8 @@ python patch_do_patch() { continue if not local: - bb.fetch.init([url],d) url = bb.encodeurl((type, host, path, user, pswd, [])) - local = os.path.join('/', bb.fetch.localpath(url, d)) + local = os.path.join('/', bb.fetch2.localpath(url, d)) local = bb.data.expand(local, d) if "striplevel" in parm: diff --git a/meta/classes/sstate.bbclass b/meta/classes/sstate.bbclass index a754821c08..e4564e4b07 100644 --- a/meta/classes/sstate.bbclass +++ b/meta/classes/sstate.bbclass @@ -326,40 +326,40 @@ def sstate_package(ss, d): return def pstaging_fetch(sstatepkg, d): - import bb.fetch - - # only try and fetch if the user has configured a mirror + # Only try and fetch if the user has configured a mirror mirrors = bb.data.getVar('SSTATE_MIRRORS', d, True) - if mirrors: - # Copy the data object and override DL_DIR and SRC_URI - localdata = bb.data.createCopy(d) - bb.data.update_data(localdata) + if not mirrors: + return - dldir = bb.data.expand("${SSTATE_DIR}", localdata) - srcuri = "file://" + os.path.basename(sstatepkg) + import bb.fetch2 + # Copy the data object and override DL_DIR and SRC_URI + localdata = bb.data.createCopy(d) + bb.data.update_data(localdata) - bb.mkdirhier(dldir) + dldir = bb.data.expand("${SSTATE_DIR}", localdata) + srcuri = "file://" + os.path.basename(sstatepkg) - bb.data.setVar('DL_DIR', dldir, localdata) - bb.data.setVar('PREMIRRORS', mirrors, localdata) - bb.data.setVar('SRC_URI', srcuri, localdata) + bb.mkdirhier(dldir) - # Try a fetch from the sstate mirror, if it fails just return and - # we will build the package - try: - bb.fetch.init([srcuri], localdata) - if bb.fetch.__version__ == "1": - bb.fetch.go(localdata, [srcuri]) - else: - bb.fetch.download(localdata, [srcuri]) - # Need to optimise this, if using file:// urls, the fetcher just changes the local path - # For now work around by symlinking - localpath = bb.data.expand(bb.fetch.localpath(srcuri, localdata), localdata) - if localpath != sstatepkg and os.path.exists(localpath): - os.symlink(localpath, sstatepkg) - except: - pass + bb.data.setVar('DL_DIR', dldir, localdata) + bb.data.setVar('PREMIRRORS', mirrors, localdata) + bb.data.setVar('SRC_URI', srcuri, localdata) + + # Try a fetch from the sstate mirror, if it fails just return and + # we will build the package + try: + fetcher = bb.fetch2.Fetch([srcuri], localdata) + fetcher.download() + + # Need to optimise this, if using file:// urls, the fetcher just changes the local path + # For now work around by symlinking + localpath = bb.data.expand(fetcher.localpath(srcuri), localdata) + if localpath != sstatepkg and os.path.exists(localpath) and not os.path.exists(sstatepkg): + os.symlink(localpath, sstatepkg) + + except bb.fetch2.BBFetchException: + pass def sstate_setscene(d): shared_state = sstate_state_fromvars(d) diff --git a/meta/classes/utils.bbclass b/meta/classes/utils.bbclass index 4d4b9953e2..455b49d54a 100644 --- a/meta/classes/utils.bbclass +++ b/meta/classes/utils.bbclass @@ -51,11 +51,12 @@ def machine_paths(d): def is_machine_specific(d): """Determine whether the current recipe is machine specific""" machinepaths = set(machine_paths(d)) - urldatadict = bb.fetch.init(d.getVar("SRC_URI", True).split(), d, True) - for urldata in (urldata for urldata in urldatadict.itervalues() - if urldata.type == "file"): - if any(urldata.localpath.startswith(mp + "/") for mp in machinepaths): - return True + srcuri = d.getVar("SRC_URI", True).split() + for url in srcuri: + fetcher = bb.fetch2.Fetch([srcuri], d) + if url.startswith("file://"): + if any(fetcher.localpath(url).startswith(mp + "/") for mp in machinepaths): + return True def oe_popen_env(d): env = d.getVar("__oe_popen_env", False) diff --git a/meta/lib/oe/patch.py b/meta/lib/oe/patch.py index f203d683da..c8eeb8bc4c 100644 --- a/meta/lib/oe/patch.py +++ b/meta/lib/oe/patch.py @@ -72,14 +72,14 @@ class PatchSet(object): if not patch.get("remote"): raise PatchError("Patch file must be specified in patch import.") else: - patch["file"] = bb.fetch.localpath(patch["remote"], self.d) + patch["file"] = bb.fetch2.localpath(patch["remote"], self.d) for param in PatchSet.defaults: if not patch.get(param): patch[param] = PatchSet.defaults[param] if patch.get("remote"): - patch["file"] = bb.data.expand(bb.fetch.localpath(patch["remote"], self.d), self.d) + patch["file"] = bb.data.expand(bb.fetch2.localpath(patch["remote"], self.d), self.d) patch["filemd5"] = bb.utils.md5_file(patch["file"]) @@ -293,7 +293,7 @@ class QuiltTree(PatchSet): if type == "file": import shutil if not patch.get("file") and patch.get("remote"): - patch["file"] = bb.fetch.localpath(patch["remote"], self.d) + patch["file"] = bb.fetch2.localpath(patch["remote"], self.d) shutil.copyfile(patch["quiltfile"], patch["file"]) else: |