diff options
-rw-r--r-- | classes/package.bbclass | 142 | ||||
-rw-r--r-- | classes/package_deb.bbclass | 34 | ||||
-rw-r--r-- | classes/package_ipk.bbclass | 29 | ||||
-rw-r--r-- | classes/package_rpm.bbclass | 5 | ||||
-rw-r--r-- | classes/package_tar.bbclass | 10 | ||||
-rw-r--r-- | classes/packaged-staging.bbclass | 389 | ||||
-rw-r--r-- | classes/packaged-staging2.bbclass | 229 | ||||
-rw-r--r-- | conf/sanity.conf | 2 | ||||
-rw-r--r-- | packages/gcc/gcc-package.inc | 4 |
9 files changed, 277 insertions, 567 deletions
diff --git a/classes/package.bbclass b/classes/package.bbclass index ec8c3d97e2..e267478561 100644 --- a/classes/package.bbclass +++ b/classes/package.bbclass @@ -291,76 +291,6 @@ python package_do_split_locales() { #bb.data.setVar('RDEPENDS_%s' % mainpkg, ' '.join(rdep), d) } -def copyfile(src,dest,newmtime=None,sstat=None): - """ - Copies a file from src to dest, preserving all permissions and - attributes; mtime will be preserved even when moving across - filesystems. Returns true on success and false on failure. - """ - import os, stat, shutil, commands - - #print "copyfile("+src+","+dest+","+str(newmtime)+","+str(sstat)+")" - try: - if not sstat: - sstat=os.lstat(src) - except Exception, e: - print "copyfile: Stating source file failed...", e - return False - - destexists=1 - try: - dstat=os.lstat(dest) - except: - dstat=os.lstat(os.path.dirname(dest)) - destexists=0 - - if destexists: - if stat.S_ISLNK(dstat[stat.ST_MODE]): - try: - os.unlink(dest) - destexists=0 - except Exception, e: - pass - - if stat.S_ISLNK(sstat[stat.ST_MODE]): - try: - target=os.readlink(src) - if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]): - os.unlink(dest) - os.symlink(target,dest) - #os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID]) - return os.lstat(dest) - except Exception, e: - print "copyfile: failed to properly create symlink:", dest, "->", target, e - return False - - if stat.S_ISREG(sstat[stat.ST_MODE]): - try: # For safety copy then move it over. - shutil.copyfile(src,dest+"#new") - os.rename(dest+"#new",dest) - except Exception, e: - print 'copyfile: copy', src, '->', dest, 'failed.', e - return False - else: - #we don't yet handle special, so we need to fall back to /bin/mv - a=commands.getstatusoutput("/bin/cp -f "+"'"+src+"' '"+dest+"'") - if a[0]!=0: - print "copyfile: Failed to copy special file:" + src + "' to '" + dest + "'", a - return False # failure - try: - os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID]) - os.chmod(dest, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown - except Exception, e: - print "copyfile: Failed to chown/chmod/unlink", dest, e - return False - - if newmtime: - os.utime(dest,(newmtime,newmtime)) - else: - os.utime(dest, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME])) - newmtime=sstat[stat.ST_MTIME] - return newmtime - python populate_packages () { import glob, stat, errno, re @@ -462,7 +392,7 @@ python populate_packages () { fpath = os.path.join(root,file) dpath = os.path.dirname(fpath) bb.mkdirhier(dpath) - ret = copyfile(file, fpath) + ret = bb.copyfile(file, fpath) if ret is False or ret == 0: raise bb.build.FuncFailed("File population failed") del localdata @@ -578,7 +508,8 @@ python emit_pkgdata() { os.chdir(root) g = glob('*') if g or allow_empty == "1": - file(bb.data.expand('${PKGDATA_DIR}/runtime/%s.packaged' % pkg, d), 'w').close() + packagedfile = bb.data.expand('${PKGDATA_DIR}/runtime/%s.packaged' % pkg, d) + file(packagedfile, 'w').close() } emit_pkgdata[dirs] = "${PKGDATA_DIR}/runtime" @@ -588,6 +519,8 @@ if [ x"$D" = "x" ]; then fi } +SHLIBSDIR = "${STAGING_DIR}/${HOST_SYS}/shlibs" + python package_do_shlibs() { import os, re, os.path @@ -606,25 +539,14 @@ python package_do_shlibs() { bb.error("WORKDIR not defined") return - staging = bb.data.getVar('STAGING_DIR', d, 1) - if not staging: - bb.error("STAGING_DIR not defined") - return - ver = bb.data.getVar('PV', d, 1) if not ver: bb.error("PV not defined") return - target_sys = bb.data.getVar('TARGET_SYS', d, 1) - if not target_sys: - bb.error("TARGET_SYS not defined") - return - pkgdest = bb.data.getVar('PKGDEST', d, 1) - shlibs_dir = os.path.join(staging, target_sys, "shlibs") - old_shlibs_dir = os.path.join(staging, "shlibs") + shlibs_dir = bb.data.getVar('SHLIBSDIR', d, 1) bb.mkdirhier(shlibs_dir) needed = {} @@ -681,7 +603,7 @@ python package_do_shlibs() { shlib_provider = {} list_re = re.compile('^(.*)\.list$') - for dir in [old_shlibs_dir, shlibs_dir]: + for dir in [shlibs_dir]: if not os.path.exists(dir): continue for file in os.listdir(dir): @@ -751,20 +673,9 @@ python package_do_pkgconfig () { bb.error("WORKDIR not defined") return - staging = bb.data.getVar('STAGING_DIR', d, 1) - if not staging: - bb.error("STAGING_DIR not defined") - return - - target_sys = bb.data.getVar('TARGET_SYS', d, 1) - if not target_sys: - bb.error("TARGET_SYS not defined") - return - pkgdest = bb.data.getVar('PKGDEST', d, 1) - shlibs_dir = os.path.join(staging, target_sys, "shlibs") - old_shlibs_dir = os.path.join(staging, "shlibs") + shlibs_dir = bb.data.getVar('SHLIBSDIR', d, 1) bb.mkdirhier(shlibs_dir) pc_re = re.compile('(.*)\.pc$') @@ -814,7 +725,7 @@ python package_do_pkgconfig () { f.write('%s\n' % p) f.close() - for dir in [old_shlibs_dir, shlibs_dir]: + for dir in [shlibs_dir]: if not os.path.exists(dir): continue for file in os.listdir(dir): @@ -882,14 +793,39 @@ python package_depchains() { postfixes = (bb.data.getVar('DEPCHAIN_POST', d, 1) or '').split() prefixes = (bb.data.getVar('DEPCHAIN_PRE', d, 1) or '').split() + def pkg_adddeprrecs(pkg, base, suffix, getname, depends, d): + + #bb.note('depends for %s is %s' % (base, depends)) + rreclist = explode_deps(bb.data.getVar('RRECOMMENDS_' + pkg, d, 1) or bb.data.getVar('RRECOMMENDS', d, 1) or "") + + for depend in depends: + if depend.find('-native') != -1 or depend.find('-cross') != -1 or depend.startswith('virtual/'): + #bb.note("Skipping %s" % depend) + continue + if depend.endswith('-dev'): + depend = depend.replace('-dev', '') + if depend.endswith('-dbg'): + depend = depend.replace('-dbg', '') + pkgname = getname(depend, suffix) + #bb.note("Adding %s for %s" % (pkgname, depend)) + if not pkgname in rreclist: + rreclist.append(pkgname) + + #bb.note('setting: RRECOMMENDS_%s=%s' % (pkg, ' '.join(rreclist))) + bb.data.setVar('RRECOMMENDS_%s' % pkg, ' '.join(rreclist), d) + def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d): - #bb.note('rdepends for %s is %s' % (base, rdepends)) + #bb.note('rdepends for %s is %s' % (base, rdepends)) rreclist = explode_deps(bb.data.getVar('RRECOMMENDS_' + pkg, d, 1) or bb.data.getVar('RRECOMMENDS', d, 1) or "") for depend in rdepends: + if depend.endswith('-dev'): + depend = depend.replace('-dev', '') + if depend.endswith('-dbg'): + depend = depend.replace('-dbg', '') pkgname = getname(depend, suffix) - if not pkgname in rreclist and packaged(pkgname, d): + if not pkgname in rreclist: rreclist.append(pkgname) #bb.note('setting: RRECOMMENDS_%s=%s' % (pkg, ' '.join(rreclist))) @@ -900,6 +836,10 @@ python package_depchains() { if dep not in list: list.append(dep) + depends = [] + for dep in explode_deps(bb.data.getVar('DEPENDS', d, 1) or ""): + add_dep(depends, dep) + rdepends = [] for dep in explode_deps(bb.data.getVar('RDEPENDS', d, 1) or ""): add_dep(rdepends, dep) @@ -932,6 +872,8 @@ python package_depchains() { for suffix in pkgs: for pkg in pkgs[suffix]: (base, func) = pkgs[suffix][pkg] + if suffix == "-dev": + pkg_adddeprrecs(pkg, base, suffix, func, depends, d) if len(pkgs[suffix]) == 1: pkg_addrrecs(pkg, base, suffix, func, rdepends, d) else: diff --git a/classes/package_deb.bbclass b/classes/package_deb.bbclass index b85ffe254f..1a3622c3bb 100644 --- a/classes/package_deb.bbclass +++ b/classes/package_deb.bbclass @@ -54,7 +54,7 @@ python do_package_deb_install () { # env of the fork+execve'd processs # Set up environment - apt_config = os.getenv('APT_CONFIG') + apt_config_backup = os.getenv('APT_CONFIG') os.putenv('APT_CONFIG', os.path.join(stagingdir, 'etc', 'apt', 'apt.conf')) path = os.getenv('PATH') os.putenv('PATH', '%s:%s' % (stagingbindir, os.getenv('PATH'))) @@ -64,12 +64,12 @@ python do_package_deb_install () { commands.getstatusoutput('apt-get install -y %s' % pkgfn) # revert environment - os.putenv('APT_CONFIG', apt_config) + os.putenv('APT_CONFIG', apt_config_backup) os.putenv('PATH', path) } python do_package_deb () { - import sys, re, fcntl, copy + import sys, re, copy workdir = bb.data.getVar('WORKDIR', d, 1) if not workdir: @@ -102,20 +102,12 @@ python do_package_deb () { bb.debug(1, "No packages; nothing to do") return - def lockfile(name): - lf = open(name, "a+") - fcntl.flock(lf.fileno(), fcntl.LOCK_EX) - return lf - - def unlockfile(lf): - fcntl.flock(lf.fileno(), fcntl.LOCK_UN) - lf.close - for pkg in packages.split(): localdata = bb.data.createCopy(d) - root = "%s/install/%s" % (workdir, pkg) + pkgdest = bb.data.getVar('PKGDEST', d, 1) + root = "%s/%s" % (pkgdest, pkg) - lf = lockfile(root + ".lock") + lf = bb.utils.lockfile(root + ".lock") bb.data.setVar('ROOT', '', localdata) bb.data.setVar('ROOT_%s' % pkg, root, localdata) @@ -147,7 +139,7 @@ python do_package_deb () { if not g and bb.data.getVar('ALLOW_EMPTY', localdata) != "1": from bb import note note("Not creating empty archive for %s-%s-%s" % (pkg, bb.data.getVar('PV', localdata, 1), bb.data.getVar('PR', localdata, 1))) - unlockfile(lf) + bb.utils.unlockfile(lf) continue controldir = os.path.join(root, 'DEBIAN') @@ -158,6 +150,7 @@ python do_package_deb () { # import codecs # ctrlfile = codecs.open("someFile", "w", "utf-8") except OSError: + bb.utils.unlockfile(lf) raise bb.build.FuncFailed("unable to open control file for writing.") fields = [] @@ -196,6 +189,7 @@ python do_package_deb () { ctrlfile.write(unicode(c % tuple(pullData(fs, localdata)))) except KeyError: (type, value, traceback) = sys.exc_info() + bb.utils.unlockfile(lf) ctrlfile.close() raise bb.build.FuncFailed("Missing field for deb generation: %s" % value) # more fields @@ -231,6 +225,7 @@ python do_package_deb () { try: scriptfile = file(os.path.join(controldir, script), 'w') except OSError: + bb.utils.unlockfile(lf) raise bb.build.FuncFailed("unable to open %s script file for writing." % script) scriptfile.write("#!/bin/sh\n") scriptfile.write(scriptvar) @@ -242,6 +237,7 @@ python do_package_deb () { try: conffiles = file(os.path.join(controldir, 'conffiles'), 'w') except OSError: + bb.utils.unlockfile(lf) raise bb.build.FuncFailed("unable to open conffiles for writing.") for f in conffiles_str.split(): conffiles.write('%s\n' % f) @@ -250,6 +246,7 @@ python do_package_deb () { os.chdir(basedir) ret = os.system("PATH=\"%s\" fakeroot dpkg-deb -b %s %s" % (bb.data.getVar("PATH", localdata, 1), root, pkgoutdir)) if ret != 0: + bb.utils.unlockfile(lf) raise bb.build.FuncFailed("dpkg-deb execution failed") for script in ["preinst", "postinst", "prerm", "postrm", "control" ]: @@ -263,13 +260,16 @@ python do_package_deb () { except OSError: pass - unlockfile(lf) + bb.utils.unlockfile(lf) } python () { import bb if bb.data.getVar('PACKAGES', d, True) != '': - bb.data.setVarFlag('do_package_write_deb', 'depends', 'dpkg-native:do_populate_staging fakeroot-native:do_populate_staging', d) + deps = (bb.data.getVarFlag('do_package_write_deb', 'depends', d) or "").split() + deps.append('dpkg-native:do_populate_staging') + deps.append('fakeroot-native:do_populate_staging') + bb.data.setVarFlag('do_package_write_deb', 'depends', " ".join(deps), d) } python do_package_write_deb () { diff --git a/classes/package_ipk.bbclass b/classes/package_ipk.bbclass index 087bbcbfb4..de7f0fe0c4 100644 --- a/classes/package_ipk.bbclass +++ b/classes/package_ipk.bbclass @@ -114,7 +114,7 @@ package_generate_ipkg_conf () { } python do_package_ipk () { - import sys, re, copy, fcntl + import sys, re, copy workdir = bb.data.getVar('WORKDIR', d, 1) if not workdir: @@ -147,22 +147,12 @@ python do_package_ipk () { bb.debug(1, "No packages; nothing to do") return - def lockfile(name): - lf = open(name, "a+") - fcntl.flock(lf.fileno(), fcntl.LOCK_EX) - return lf - - def unlockfile(lf): - fcntl.flock(lf.fileno(), fcntl.LOCK_UN) - lf.close - - for pkg in packages.split(): localdata = bb.data.createCopy(d) pkgdest = bb.data.getVar('PKGDEST', d, 1) root = "%s/%s" % (pkgdest, pkg) - lf = lockfile(root + ".lock") + lf = bb.utils.lockfile(root + ".lock") bb.data.setVar('ROOT', '', localdata) bb.data.setVar('ROOT_%s' % pkg, root, localdata) @@ -193,7 +183,7 @@ python do_package_ipk () { if not g and bb.data.getVar('ALLOW_EMPTY', localdata) != "1": from bb import note note("Not creating empty archive for %s-%s-%s" % (pkg, bb.data.getVar('PV', localdata, 1), bb.data.getVar('PR', localdata, 1))) - unlockfile(lf) + bb.utils.unlockfile(lf) continue controldir = os.path.join(root, 'CONTROL') @@ -201,7 +191,7 @@ python do_package_ipk () { try: ctrlfile = file(os.path.join(controldir, 'control'), 'w') except OSError: - unlockfile(lf) + bb.utils.unlockfile(lf) raise bb.build.FuncFailed("unable to open control file for writing.") fields = [] @@ -235,7 +225,7 @@ python do_package_ipk () { except KeyError: (type, value, traceback) = sys.exc_info() ctrlfile.close() - unlockfile(lf) + bb.utils.unlockfile(lf) raise bb.build.FuncFailed("Missing field for ipk generation: %s" % value) # more fields @@ -272,7 +262,7 @@ python do_package_ipk () { try: scriptfile = file(os.path.join(controldir, script), 'w') except OSError: - unlockfile(lf) + bb.utils.unlockfile(lf) raise bb.build.FuncFailed("unable to open %s script file for writing." % script) scriptfile.write(scriptvar) scriptfile.close() @@ -283,7 +273,7 @@ python do_package_ipk () { try: conffiles = file(os.path.join(controldir, 'conffiles'), 'w') except OSError: - unlockfile(lf) + bb.utils.unlockfile(lf) raise bb.build.FuncFailed("unable to open conffiles for writing.") for f in conffiles_str.split(): conffiles.write('%s\n' % f) @@ -293,7 +283,7 @@ python do_package_ipk () { ret = os.system("PATH=\"%s\" %s %s %s" % (bb.data.getVar("PATH", localdata, 1), bb.data.getVar("IPKGBUILDCMD",d,1), pkg, pkgoutdir)) if ret != 0: - unlockfile(lf) + bb.utils.unlockfile(lf) raise bb.build.FuncFailed("ipkg-build execution failed") for script in ["preinst", "postinst", "prerm", "postrm", "control" ]: @@ -306,7 +296,7 @@ python do_package_ipk () { os.rmdir(controldir) except OSError: pass - unlockfile(lf) + bb.utils.unlockfile(lf) } python () { @@ -323,5 +313,4 @@ python do_package_write_ipk () { bb.build.exec_func("do_package_ipk", d) } do_package_write_ipk[dirs] = "${D}" -do_package_write_ipk[depends] = "ipkg-utils-native:do_populate_staging" addtask package_write_ipk before do_package_write after do_package diff --git a/classes/package_rpm.bbclass b/classes/package_rpm.bbclass index 7fc5e8ea96..6713f8fcad 100644 --- a/classes/package_rpm.bbclass +++ b/classes/package_rpm.bbclass @@ -134,7 +134,10 @@ python do_package_rpm () { python () { import bb if bb.data.getVar('PACKAGES', d, True) != '': - bb.data.setVarFlag('do_package_write_rpm', 'depends', 'rpm-native:do_populate_staging', d) + deps = (bb.data.getVarFlag('do_package_write_rpm', 'depends', d) or "").split() + deps.append('rpm-native:do_populate_staging') + deps.append('fakeroot-native:do_populate_staging') + bb.data.setVarFlag('do_package_write_rpm', 'depends', " ".join(deps), d) } diff --git a/classes/package_tar.bbclass b/classes/package_tar.bbclass index cb4c42b261..876cec6cfe 100644 --- a/classes/package_tar.bbclass +++ b/classes/package_tar.bbclass @@ -83,15 +83,12 @@ python do_package_tar () { bb.mkdirhier(pkgoutdir) bb.build.exec_func('package_tar_fn', localdata) tarfn = bb.data.getVar('PKGFN', localdata, 1) -# if os.path.exists(tarfn): -# del localdata -# continue os.chdir(root) from glob import glob if not glob('*'): bb.note("Not creating empty archive for %s-%s-%s" % (pkg, bb.data.getVar('PV', localdata, 1), bb.data.getVar('PR', localdata, 1))) continue - ret = os.system("tar -czvf %s %s" % (tarfn, '.')) + ret = os.system("tar -czf %s %s" % (tarfn, '.')) if ret != 0: bb.error("Creation of tar %s failed." % tarfn) } @@ -99,7 +96,10 @@ python do_package_tar () { python () { import bb if bb.data.getVar('PACKAGES', d, True) != '': - bb.data.setVarFlag('do_package_write_tar', 'depends', 'tar-native:do_populate_staging', d) + deps = (bb.data.getVarFlag('do_package_write_tar', 'depends', d) or "").split() + deps.append('tar-native:do_populate_staging') + deps.append('fakeroot-native:do_populate_staging') + bb.data.setVarFlag('do_package_write_tar', 'depends', " ".join(deps), d) } diff --git a/classes/packaged-staging.bbclass b/classes/packaged-staging.bbclass index 4b4ebb58bf..29ce72d65c 100644 --- a/classes/packaged-staging.bbclass +++ b/classes/packaged-staging.bbclass @@ -1,228 +1,229 @@ # -# Populated ${STAGING} using packages +# Populate builds using prebuilt packages where possible to speed up builds +# and allow staging to be reconstructed. # # To use it add that line to conf/local.conf: # -# INHERIT += "packaged-staging" -# -# You also need ipkg-cl and ipkg-make-index installed on your host -# put ipkg-build from org.openembedded.packaged-staging/contrib/ in your $PATH - -# BUGS: -# * does not distinguish between -native, -cross and other packages - -# TODO: -# * also build a feed for native and cross packages -# * make package detection a bit smarter (search for compatible archs) -# * make do_clean clean staging as well - -# Summary: -# This class will have two modes of operation: -# PSTAGE_MODE = 'repopulate': repopulated staging from scratch for each packages -# PSTAGE_MODE = 'append': append each package to staging (current behaviour) - -inherit package +# INHERIT = "packaged-staging" + +python () { + import bb + if not bb.data.inherits_class('native', d) and not bb.data.inherits_class('image', d) and not bb.data.inherits_class('cross', d) and not bb.data.inherits_class('sdk', d): + deps = bb.data.getVarFlag('do_populate_staging', 'depends', d) or "" + deps += " stagemanager-native:do_populate_staging" + bb.data.setVarFlag('do_populate_staging', 'depends', deps, d) + + deps = bb.data.getVarFlag('do_prepackaged_stage', 'depends', d) or "" + deps += " ipkg-native:do_populate_staging ipkg-utils-native:do_populate_staging" + bb.data.setVarFlag('do_prepackaged_stage', 'depends', deps, d) + else: + bb.data.setVar("PSTAGING_DISABLED", "1", d) +} -DEPENDS = "stagemanager-native" +export PSTAGING_DISABLED = "0" DEPLOY_DIR_PSTAGE = "${DEPLOY_DIR}/pstage" PSTAGE_BUILD_CMD = "${IPKGBUILDCMD}" -PSTAGE_INSTALL_CMD = "ipkg-cl install -force-depends -f ${DEPLOY_DIR_PSTAGE}/ipkg.conf -o " -PSTAGE_UPDATE_CMD = "ipkg-cl update -f ${DEPLOY_DIR_PSTAGE}/ipkg.conf -o " -PSTAGE_LIST_CMD = "ipkg-cl list_installed -f ${DEPLOY_DIR_PSTAGE}/ipkg.conf -o " -PSTAGE_PKGNAME = "staging-${PN}_${PV}-${PR}_${PACKAGE_ARCH}.ipk" -PCROSS_PKGNAME = "cross-${PN}_${PV}-${PR}_${BUILD_ARCH}.ipk" - -SPAWNFILE = "${STAGING_DIR}/pkgmaps/${P}-${PR}.spawn" -SPAWNIPK = "${spawn}" - -PSTAGE_TMPDIR_STAGE = "${TMPDIR}/tmp-staging" -PSTAGE_TMPDIR_CROSS = "${TMPDIR}/tmp-cross" - -STAGING_BASEDIR = "${STAGING_LIBDIR}/.." - -PACKAGEFUNCS += "do_write_ipk_list" - -python do_write_ipk_list () { - import os, sys - ipkdir = bb.data.getVar('DEPLOY_DIR_IPK', d, 1) - stagingdir = bb.data.getVar('STAGING_DIR', d, 1) - tmpdir = bb.data.getVar('TMPDIR', d, 1) - p = bb.data.getVar('P', d, 1) - pr = bb.data.getVar('PR', d, 1) - - packages = bb.data.getVar('PACKAGES', d, 1) - if not packages: - bb.debug(1, "PACKAGES not defined, nothing to package") - return - - if packages == []: - bb.debug(1, "No packages; nothing to do") - return - - # Generate ipk.conf if it or the stamp doesnt exist - listfile = os.path.join(stagingdir,"pkgmaps","%s-%s.spawn" % ( p , pr )) - os.system('mkdir -p ' + stagingdir + '/pkgmaps') - if not os.access(listfile, os.R_OK): - os.system('rm -f ' + listfile) - f = open(listfile,"w") - for spawn in packages.split(): - #check if the packagename has changed due to debian shlib renaming - localdata = bb.data.createCopy(d) - pkgname = bb.data.getVar('PKG_%s' % spawn, localdata, 1) - if not pkgname: - pkgname = spawn - f.write("%s\n" % pkgname) - f.close() -} +PSTAGE_INSTALL_CMD = "ipkg-cl install -force-depends -f ${DEPLOY_DIR_PSTAGE}/ipkg-${MACHINE}.conf -o ${TMPDIR}" +PSTAGE_UPDATE_CMD = "ipkg-cl update -f ${DEPLOY_DIR_PSTAGE}/ipkg-${MACHINE}.conf -o ${TMPDIR}" +PSTAGE_REMOVE_CMD = "ipkg-cl remove -force-depends -f ${DEPLOY_DIR_PSTAGE}/ipkg-${MACHINE}.conf -o ${TMPDIR}" +PSTAGE_LIST_CMD = "ipkg-cl list_installed -f ${DEPLOY_DIR_PSTAGE}/ipkg-${MACHINE}.conf -o ${TMPDIR}" +PSTAGE_PKGNAME = "staging-${PN}_${PV}-${PR}_${MULTIMACH_ARCH}.ipk" +PSTAGE_TMPDIR_STAGE = "${WORKDIR}/staging-pkg" do_clean_append() { - """clear the build and temp directories""" + """ + Clear the build and temp directories + """ + bb.note("Uninstalling package from staging...") + path = bb.data.getVar("PATH", d, 1) + removecmd = bb.data.getVar("PSTAGE_REMOVE_CMD", d, 1) + removepkg = bb.data.expand("staging-${PN}", d) + ret = os.system("PATH=\"%s\" %s %s" % (path, removecmd, removepkg)) + if ret != 0: + bb.note("Failure removing staging package") + stagepkg = bb.data.expand("${DEPLOY_DIR_PSTAGE}/${PSTAGE_PKGNAME}", d) - if stagepkg == '//': raise bb.build.FuncFailed("wrong DATADIR") - bb.note("removing " + stagepkg) - os.system('rm -rf ' + stagepkg) + bb.note("Removing staging package %s" % stagepkg) + #os.system('rm -rf ' + stagepkg) } - -do_stage_prepend() { - - stage-manager -p ${STAGING_DIR} -c ${DEPLOY_DIR_PSTAGE}/staging-stamp-cache -u - - stage-manager -p ${CROSS_DIR} -c ${DEPLOY_DIR_PSTAGE}/cross-stamp-cache -u - - if [ ! -e ${STAGING_BASEDIR} ]; then - mkdir -p ${STAGING_BASEDIR} - fi - - if [ ! -e ${DEPLOY_DIR_PSTAGE} ]; then - mkdir -p ${DEPLOY_DIR_PSTAGE} - fi - - if [ -e ${STAGING_BASEDIR}/usr ]; then - oenote "${STAGING_BASEDIR}/usr already present, leaving it alone" - else - oenote "${STAGING_BASEDIR}/usr not present, symlinking it" - ln -s ${STAGING_BASEDIR}/ ${STAGING_BASEDIR}/usr - fi - +staging_helper () { #assemble appropriate ipkg.conf - if [ -e ${DEPLOY_DIR_PSTAGE}/ipkg.conf ]; then - rm ${DEPLOY_DIR_PSTAGE}/ipkg.conf + conffile=${DEPLOY_DIR_PSTAGE}/ipkg-${MACHINE}.conf + mkdir -p ${DEPLOY_DIR_PSTAGE}/pstaging_lists + if [ ! -e $conffile ]; then + ipkgarchs="${BUILD_ARCH} all any noarch ${TARGET_ARCH} ${PACKAGE_ARCHS} ${PACKAGE_EXTRA_ARCHS} ${MACHINE}" + priority=1 + for arch in $ipkgarchs; do + echo "arch $arch $priority" >> $conffile + priority=$(expr $priority + 5) + done + echo "src oe-staging file:${DEPLOY_DIR_PSTAGE}" >> $conffile + + OLD_PWD=`pwd` + cd ${DEPLOY_DIR_PSTAGE} + ipkg-make-index -p Packages . + cd ${OLD_PWD} + + ${PSTAGE_UPDATE_CMD} fi +} - ipkgarchs="${BUILD_ARCH} all any noarch ${TARGET_ARCH} ${PACKAGE_ARCHS} ${PACKAGE_EXTRA_ARCHS} ${MACHINE}" - priority=1 - for arch in $ipkgarchs; do - echo "arch $arch $priority" >> ${DEPLOY_DIR_PSTAGE}/ipkg.conf - priority=$(expr $priority + 5) - done - echo "src oe file:${DEPLOY_DIR_IPK}" >> ${DEPLOY_DIR_PSTAGE}/ipkg.conf - export OLD_PWD=`pwd` - cd ${DEPLOY_DIR_IPK} && rm *${BUILD_ARCH}.ipk -f ; ipkg-make-index -p Packages . ; cd ${OLD_PWD} - ${PSTAGE_UPDATE_CMD} ${STAGING_BASEDIR} - - #check for generated packages - if [ -e ${SPAWNFILE} ]; then - oenote "List of spawned packages found: ${P}.spawn" - for spawn in `cat ${SPAWNFILE} | grep -v locale | grep -v dbg | grep -v gconv | grep -v charmap` ; do \ - if [ -e ${DEPLOY_DIR_IPK}/${spawn}_* ]; then - ${PSTAGE_INSTALL_CMD} ${STAGING_BASEDIR} ${spawn} - # clean up .la files to avoid having references to the builddirs in the binaries - for lafile in ${STAGING_LIBDIR}/*.la ; do \ - sed -i s:installed=yes:installed=no:g ${lafile} || true - done - - #fix up linker script to poin to staging - if [ -e ${STAGING_LIBDIR}/libc.so ]; then - sed -i s:\ /lib:\ ${STAGING_LIBDIR}:g ${STAGING_LIBDIR}/libc.so - sed -i s:\ /usr/lib:\ ${STAGING_LIBDIR}:g ${STAGING_LIBDIR}/libc.so - fi - else - oenote "${spawn} not found, probably empty package" - fi - done - exit 0 - else - oenote "Spawn file not found!" - fi +python do_prepackaged_stage () { + import os + + if bb.data.getVar("PSTAGING_DISABLED", d, 1) == "1": + bb.build.make_stamp("do_prepackaged_stage", d) + return + + bb.note("Uninstalling any existing package from staging...") + path = bb.data.getVar("PATH", d, 1) + removecmd = bb.data.getVar("PSTAGE_REMOVE_CMD", d, 1) + removepkg = bb.data.expand("staging-${PN}", d) + lf = bb.utils.lockfile(bb.data.expand("${STAGING_DIR}/staging.lock", d)) + ret = os.system("PATH=\"%s\" %s %s" % (path, removecmd, removepkg)) + bb.utils.unlockfile(lf) + if ret != 0: + bb.note("Failure attempting to remove staging package") + + stagepkg = bb.data.expand("${DEPLOY_DIR_PSTAGE}/${PSTAGE_PKGNAME}", d) + + if os.path.exists(stagepkg): + bb.note("Following speedup\n") + path = bb.data.getVar("PATH", d, 1) + installcmd = bb.data.getVar("PSTAGE_INSTALL_CMD", d, 1) + + bb.build.exec_func("staging_helper", d) + + bb.debug(1, "Staging stuff already packaged, using that instead") + lf = bb.utils.lockfile(bb.data.expand("${STAGING_DIR}/staging.lock", d)) + ret = os.system("PATH=\"%s\" %s %s" % (path, installcmd, stagepkg)) + bb.utils.unlockfile(lf) + if ret != 0: + bb.note("Failure installing prestage package") + + bb.build.make_stamp("do_prepackaged_stage", d) + bb.build.make_stamp("do_fetch", d) + bb.build.make_stamp("do_unpack", d) + bb.build.make_stamp("do_munge", d) + bb.build.make_stamp("do_patch", d) + bb.build.make_stamp("do_configure", d) + bb.build.make_stamp("do_qa_configure", d) + bb.build.make_stamp("do_rig_locales", d) + bb.build.make_stamp("do_compile", d) + bb.build.make_stamp("do_install", d) + bb.build.make_stamp("do_deploy", d) + bb.build.make_stamp("do_package", d) + bb.build.make_stamp("do_populate_staging", d) + bb.build.make_stamp("do_package_write_deb", d) + bb.build.make_stamp("do_package_write_ipk", d) + bb.build.make_stamp("do_package_write", d) + bb.build.make_stamp("do_package_stage", d) + bb.build.make_stamp("do_qa_staging", d) + + else: + bb.build.make_stamp("do_prepackaged_stage", d) +} +do_prepackaged_stage[cleandirs] = "${PSTAGE_TMPDIR_STAGE}" +do_prepackaged_stage[selfstamp] = "1" +addtask prepackaged_stage before do_fetch - if [ -e ${DEPLOY_DIR_PSTAGE}/${PCROSS_PKGNAME} ]; then - oenote "Cross stuff already packaged, using that instead" - ${PSTAGE_INSTALL_CMD} ${CROSS_DIR} ${DEPLOY_DIR_PSTAGE}/${PCROSS_PKGNAME} - fi +populate_staging_preamble () { + if [ "$PSTAGING_DISABLED" != "1" ]; then + #mkdir -p ${DEPLOY_DIR_PSTAGE} - if [ -e ${DEPLOY_DIR_PSTAGE}/${PSTAGE_PKGNAME} ]; then - oenote "Staging stuff already packaged, using that instead" - ${PSTAGE_INSTALL_CMD} ${STAGING_DIR} ${DEPLOY_DIR_PSTAGE}/${PSTAGE_PKGNAME} - exit 0 + stage-manager -p ${STAGING_DIR} -c ${DEPLOY_DIR_PSTAGE}/stamp-cache-staging -u + stage-manager -p ${CROSS_DIR} -c ${DEPLOY_DIR_PSTAGE}/stamp-cache-cross -u fi - - - mkdir -p ${STAGING_BINDIR} - mkdir -p ${STAGING_LIBDIR} - mkdir -p ${STAGING_INCDIR} - mkdir -p ${STAGING_DATADIR}/aclocal } -do_stage_append() { +populate_staging_postamble () { + if [ "$PSTAGING_DISABLED" != "1" ]; then + # list the packages currently installed in staging + ${PSTAGE_LIST_CMD} | awk '{print $1}' > ${DEPLOY_DIR_PSTAGE}/installed-list - mkdir -p ${DEPLOY_DIR_PSTAGE} + set +e + stage-manager -p ${STAGING_DIR} -c ${DEPLOY_DIR_PSTAGE}/stamp-cache-staging -u -d ${PSTAGE_TMPDIR_STAGE}/staging + stage-manager -p ${CROSS_DIR} -c ${DEPLOY_DIR_PSTAGE}/stamp-cache-cross -u -d ${PSTAGE_TMPDIR_STAGE}/cross + set -e + fi +} - # list the packages currently installed in staging - ${PSTAGE_LIST_CMD} ${STAGING_DIR} | awk '{print $1}' > ${DEPLOY_DIR_PSTAGE}/installed-staging_list - ${PSTAGE_LIST_CMD} ${CROSS_DIR} | awk '{print $1}' > ${DEPLOY_DIR_PSTAGE}/installed-cross_list +do_populate_staging[lockfiles] = "${STAGING_DIR}/staging.lock" +do_populate_staging[dirs] =+ "${DEPLOY_DIR_PSTAGE}" +python do_populate_staging_prepend() { + bb.build.exec_func("populate_staging_preamble", d) +} - set +e - rm -rf ${PSTAGE_TMPDIR_STAGE} - stage-manager -p ${STAGING_DIR} -c ${DEPLOY_DIR_PSTAGE}/staging-stamp-cache -u -d ${PSTAGE_TMPDIR_STAGE} - rc=$? - set -e +python do_populate_staging_append() { + bb.build.exec_func("populate_staging_postamble", d) +} - if [ $rc == 5 ]; then - #make a package for staging - mkdir -p ${PSTAGE_TMPDIR_STAGE}/CONTROL +staging_packager () { - echo "Package: staging-${PN}" > ${PSTAGE_TMPDIR_STAGE}/CONTROL/control - echo "Version: ${PV}-${PR}" >> ${PSTAGE_TMPDIR_STAGE}/CONTROL/control - echo "Description: ${DESCRIPTION}" >> ${PSTAGE_TMPDIR_STAGE}/CONTROL/control - echo "Section: ${SECTION}" >> ${PSTAGE_TMPDIR_STAGE}/CONTROL/control - echo "Priority: Optional" >> ${PSTAGE_TMPDIR_STAGE}/CONTROL/control - echo "Maintainer: ${MAINTAINER}" >> ${PSTAGE_TMPDIR_STAGE}/CONTROL/control - echo "Architecture: ${PACKAGE_ARCH}" >> ${PSTAGE_TMPDIR_STAGE}/CONTROL/control - echo "Source: ${SRC_URI}" >> ${PSTAGE_TMPDIR_STAGE}/CONTROL/control + mkdir -p ${PSTAGE_TMPDIR_STAGE}/CONTROL - ${PSTAGE_BUILD_CMD} ${PSTAGE_TMPDIR_STAGE} ${DEPLOY_DIR_PSTAGE} + echo "Package: staging-${PN}" > ${PSTAGE_TMPDIR_STAGE}/CONTROL/control + echo "Version: ${PV}-${PR}" >> ${PSTAGE_TMPDIR_STAGE}/CONTROL/control + echo "Description: ${DESCRIPTION}" >> ${PSTAGE_TMPDIR_STAGE}/CONTROL/control + echo "Section: ${SECTION}" >> ${PSTAGE_TMPDIR_STAGE}/CONTROL/control + echo "Priority: Optional" >> ${PSTAGE_TMPDIR_STAGE}/CONTROL/control + echo "Maintainer: ${MAINTAINER}" >> ${PSTAGE_TMPDIR_STAGE}/CONTROL/control + echo "Architecture: ${MULTIMACH_ARCH}" >> ${PSTAGE_TMPDIR_STAGE}/CONTROL/control + echo "Source: ${SRC_URI}" >> ${PSTAGE_TMPDIR_STAGE}/CONTROL/control - ${PSTAGE_INSTALL_CMD} ${STAGING_DIR} ${DEPLOY_DIR_PSTAGE}/${PSTAGE_PKGNAME} - fi + ${PSTAGE_BUILD_CMD} ${PSTAGE_TMPDIR_STAGE} ${DEPLOY_DIR_PSTAGE} + ${PSTAGE_INSTALL_CMD} ${DEPLOY_DIR_PSTAGE}/${PSTAGE_PKGNAME} +} - set +e - rm -rf ${PSTAGE_TMPDIR_CROSS} - stage-manager -p ${CROSS_DIR} -c ${DEPLOY_DIR_PSTAGE}/cross-stamp-cache -u -d ${PSTAGE_TMPDIR_CROSS} - rc=$? - set -e - - if [ $rc == 5 ]; then - - #make a package for cross - mkdir -p ${PSTAGE_TMPDIR_CROSS}/CONTROL - - echo "Package: cross-${PN}" > ${PSTAGE_TMPDIR_CROSS}/CONTROL/control - echo "Version: ${PV}-${PR}" >> ${PSTAGE_TMPDIR_CROSS}/CONTROL/control - echo "Description: ${DESCRIPTION}" >> ${PSTAGE_TMPDIR_CROSS}/CONTROL/control - echo "Section: ${SECTION}" >> ${PSTAGE_TMPDIR_CROSS}/CONTROL/control - echo "Priority: Optional" >> ${PSTAGE_TMPDIR_CROSS}/CONTROL/control - echo "Maintainer: ${MAINTAINER}" >> ${PSTAGE_TMPDIR_CROSS}/CONTROL/control - echo "Architecture: ${BUILD_ARCH}" >> ${PSTAGE_TMPDIR_CROSS}/CONTROL/control - echo "Source: ${SRC_URI}" >> ${PSTAGE_TMPDIR_CROSS}/CONTROL/control - - ${PSTAGE_BUILD_CMD} ${PSTAGE_TMPDIR_CROSS} ${DEPLOY_DIR_PSTAGE} - - ${PSTAGE_INSTALL_CMD} ${CROSS_DIR} ${DEPLOY_DIR_PSTAGE}/${PCROSS_PKGNAME} - fi +python do_package_stage () { + if bb.data.getVar("PSTAGING_DISABLED", d, 1) == "1": + return + + bb.build.exec_func("read_subpackage_metadata", d) + packages = (bb.data.getVar('PACKAGES', d, 1) or "").split() + if len(packages) > 0: + stagepath = bb.data.getVar("PSTAGE_TMPDIR_STAGE", d, 1) + if bb.data.inherits_class('package_ipk', d): + ipkpath = os.path.join(stagepath, "deploy", "ipk") + bb.mkdirhier(ipkpath) + if bb.data.inherits_class('package_deb', d): + debpath = os.path.join(stagepath, "deploy", "deb") + bb.mkdirhier(debpath) + + for pkg in packages: + pkgname = bb.data.getVar('PKG_%s' % pkg, d, 1) + if not pkgname: + pkgname = pkg + arch = bb.data.getVar('PACKAGE_ARCH_%s' % pkg, d, 1) + if not arch: + arch = bb.data.getVar('PACKAGE_ARCH', d, 1) + if not packaged(pkg, d): + continue + if bb.data.inherits_class('package_ipk', d): + srcname = bb.data.expand(pkgname + "_${PV}-${PR}_" + arch + ".ipk", d) + srcfile = bb.data.expand("${DEPLOY_DIR_IPK}/" + arch + "/" + srcname, d) + if not os.path.exists(srcfile): + bb.fatal("Package %s does not exist yet it should" % srcfile) + bb.copyfile(srcfile, ipkpath + "/" + srcname) + if bb.data.inherits_class('package_deb', d): + if arch == 'all': + srcname = bb.data.expand(pkgname + "_${PV}-${PR}_all.deb", d) + else: + srcname = bb.data.expand(pkgname + "_${PV}-${PR}_${DPKG_ARCH}.deb", d) + srcfile = bb.data.expand("${DEPLOY_DIR_DEB}/" + arch + "/" + srcname, d) + if not os.path.exists(srcfile): + bb.fatal("Package %s does not exist yet it should" % srcfile) + bb.copyfile(srcfile, debpath + "/" + srcname) + bb.build.exec_func("staging_helper", d) + lf = bb.utils.lockfile(bb.data.expand("${STAGING_DIR}/staging.lock", d)) + bb.build.exec_func("staging_packager", d) + bb.utils.unlockfile(lf) } +addtask package_stage after do_package_write_ipk do_package_write_deb do_package_write do_populate_staging before do_build + diff --git a/classes/packaged-staging2.bbclass b/classes/packaged-staging2.bbclass deleted file mode 100644 index 29ce72d65c..0000000000 --- a/classes/packaged-staging2.bbclass +++ /dev/null @@ -1,229 +0,0 @@ -# -# Populate builds using prebuilt packages where possible to speed up builds -# and allow staging to be reconstructed. -# -# To use it add that line to conf/local.conf: -# -# INHERIT = "packaged-staging" - -python () { - import bb - if not bb.data.inherits_class('native', d) and not bb.data.inherits_class('image', d) and not bb.data.inherits_class('cross', d) and not bb.data.inherits_class('sdk', d): - deps = bb.data.getVarFlag('do_populate_staging', 'depends', d) or "" - deps += " stagemanager-native:do_populate_staging" - bb.data.setVarFlag('do_populate_staging', 'depends', deps, d) - - deps = bb.data.getVarFlag('do_prepackaged_stage', 'depends', d) or "" - deps += " ipkg-native:do_populate_staging ipkg-utils-native:do_populate_staging" - bb.data.setVarFlag('do_prepackaged_stage', 'depends', deps, d) - else: - bb.data.setVar("PSTAGING_DISABLED", "1", d) -} - -export PSTAGING_DISABLED = "0" - -DEPLOY_DIR_PSTAGE = "${DEPLOY_DIR}/pstage" - -PSTAGE_BUILD_CMD = "${IPKGBUILDCMD}" -PSTAGE_INSTALL_CMD = "ipkg-cl install -force-depends -f ${DEPLOY_DIR_PSTAGE}/ipkg-${MACHINE}.conf -o ${TMPDIR}" -PSTAGE_UPDATE_CMD = "ipkg-cl update -f ${DEPLOY_DIR_PSTAGE}/ipkg-${MACHINE}.conf -o ${TMPDIR}" -PSTAGE_REMOVE_CMD = "ipkg-cl remove -force-depends -f ${DEPLOY_DIR_PSTAGE}/ipkg-${MACHINE}.conf -o ${TMPDIR}" -PSTAGE_LIST_CMD = "ipkg-cl list_installed -f ${DEPLOY_DIR_PSTAGE}/ipkg-${MACHINE}.conf -o ${TMPDIR}" -PSTAGE_PKGNAME = "staging-${PN}_${PV}-${PR}_${MULTIMACH_ARCH}.ipk" - -PSTAGE_TMPDIR_STAGE = "${WORKDIR}/staging-pkg" - -do_clean_append() { - """ - Clear the build and temp directories - """ - bb.note("Uninstalling package from staging...") - path = bb.data.getVar("PATH", d, 1) - removecmd = bb.data.getVar("PSTAGE_REMOVE_CMD", d, 1) - removepkg = bb.data.expand("staging-${PN}", d) - ret = os.system("PATH=\"%s\" %s %s" % (path, removecmd, removepkg)) - if ret != 0: - bb.note("Failure removing staging package") - - stagepkg = bb.data.expand("${DEPLOY_DIR_PSTAGE}/${PSTAGE_PKGNAME}", d) - bb.note("Removing staging package %s" % stagepkg) - #os.system('rm -rf ' + stagepkg) -} - -staging_helper () { - #assemble appropriate ipkg.conf - conffile=${DEPLOY_DIR_PSTAGE}/ipkg-${MACHINE}.conf - mkdir -p ${DEPLOY_DIR_PSTAGE}/pstaging_lists - if [ ! -e $conffile ]; then - ipkgarchs="${BUILD_ARCH} all any noarch ${TARGET_ARCH} ${PACKAGE_ARCHS} ${PACKAGE_EXTRA_ARCHS} ${MACHINE}" - priority=1 - for arch in $ipkgarchs; do - echo "arch $arch $priority" >> $conffile - priority=$(expr $priority + 5) - done - echo "src oe-staging file:${DEPLOY_DIR_PSTAGE}" >> $conffile - - OLD_PWD=`pwd` - cd ${DEPLOY_DIR_PSTAGE} - ipkg-make-index -p Packages . - cd ${OLD_PWD} - - ${PSTAGE_UPDATE_CMD} - fi -} - -python do_prepackaged_stage () { - import os - - if bb.data.getVar("PSTAGING_DISABLED", d, 1) == "1": - bb.build.make_stamp("do_prepackaged_stage", d) - return - - bb.note("Uninstalling any existing package from staging...") - path = bb.data.getVar("PATH", d, 1) - removecmd = bb.data.getVar("PSTAGE_REMOVE_CMD", d, 1) - removepkg = bb.data.expand("staging-${PN}", d) - lf = bb.utils.lockfile(bb.data.expand("${STAGING_DIR}/staging.lock", d)) - ret = os.system("PATH=\"%s\" %s %s" % (path, removecmd, removepkg)) - bb.utils.unlockfile(lf) - if ret != 0: - bb.note("Failure attempting to remove staging package") - - stagepkg = bb.data.expand("${DEPLOY_DIR_PSTAGE}/${PSTAGE_PKGNAME}", d) - - if os.path.exists(stagepkg): - bb.note("Following speedup\n") - path = bb.data.getVar("PATH", d, 1) - installcmd = bb.data.getVar("PSTAGE_INSTALL_CMD", d, 1) - - bb.build.exec_func("staging_helper", d) - - bb.debug(1, "Staging stuff already packaged, using that instead") - lf = bb.utils.lockfile(bb.data.expand("${STAGING_DIR}/staging.lock", d)) - ret = os.system("PATH=\"%s\" %s %s" % (path, installcmd, stagepkg)) - bb.utils.unlockfile(lf) - if ret != 0: - bb.note("Failure installing prestage package") - - bb.build.make_stamp("do_prepackaged_stage", d) - bb.build.make_stamp("do_fetch", d) - bb.build.make_stamp("do_unpack", d) - bb.build.make_stamp("do_munge", d) - bb.build.make_stamp("do_patch", d) - bb.build.make_stamp("do_configure", d) - bb.build.make_stamp("do_qa_configure", d) - bb.build.make_stamp("do_rig_locales", d) - bb.build.make_stamp("do_compile", d) - bb.build.make_stamp("do_install", d) - bb.build.make_stamp("do_deploy", d) - bb.build.make_stamp("do_package", d) - bb.build.make_stamp("do_populate_staging", d) - bb.build.make_stamp("do_package_write_deb", d) - bb.build.make_stamp("do_package_write_ipk", d) - bb.build.make_stamp("do_package_write", d) - bb.build.make_stamp("do_package_stage", d) - bb.build.make_stamp("do_qa_staging", d) - - else: - bb.build.make_stamp("do_prepackaged_stage", d) -} -do_prepackaged_stage[cleandirs] = "${PSTAGE_TMPDIR_STAGE}" -do_prepackaged_stage[selfstamp] = "1" -addtask prepackaged_stage before do_fetch - -populate_staging_preamble () { - if [ "$PSTAGING_DISABLED" != "1" ]; then - #mkdir -p ${DEPLOY_DIR_PSTAGE} - - stage-manager -p ${STAGING_DIR} -c ${DEPLOY_DIR_PSTAGE}/stamp-cache-staging -u - stage-manager -p ${CROSS_DIR} -c ${DEPLOY_DIR_PSTAGE}/stamp-cache-cross -u - fi -} - -populate_staging_postamble () { - if [ "$PSTAGING_DISABLED" != "1" ]; then - # list the packages currently installed in staging - ${PSTAGE_LIST_CMD} | awk '{print $1}' > ${DEPLOY_DIR_PSTAGE}/installed-list - - set +e - stage-manager -p ${STAGING_DIR} -c ${DEPLOY_DIR_PSTAGE}/stamp-cache-staging -u -d ${PSTAGE_TMPDIR_STAGE}/staging - stage-manager -p ${CROSS_DIR} -c ${DEPLOY_DIR_PSTAGE}/stamp-cache-cross -u -d ${PSTAGE_TMPDIR_STAGE}/cross - set -e - fi -} - -do_populate_staging[lockfiles] = "${STAGING_DIR}/staging.lock" -do_populate_staging[dirs] =+ "${DEPLOY_DIR_PSTAGE}" -python do_populate_staging_prepend() { - bb.build.exec_func("populate_staging_preamble", d) -} - -python do_populate_staging_append() { - bb.build.exec_func("populate_staging_postamble", d) -} - - -staging_packager () { - - mkdir -p ${PSTAGE_TMPDIR_STAGE}/CONTROL - - echo "Package: staging-${PN}" > ${PSTAGE_TMPDIR_STAGE}/CONTROL/control - echo "Version: ${PV}-${PR}" >> ${PSTAGE_TMPDIR_STAGE}/CONTROL/control - echo "Description: ${DESCRIPTION}" >> ${PSTAGE_TMPDIR_STAGE}/CONTROL/control - echo "Section: ${SECTION}" >> ${PSTAGE_TMPDIR_STAGE}/CONTROL/control - echo "Priority: Optional" >> ${PSTAGE_TMPDIR_STAGE}/CONTROL/control - echo "Maintainer: ${MAINTAINER}" >> ${PSTAGE_TMPDIR_STAGE}/CONTROL/control - echo "Architecture: ${MULTIMACH_ARCH}" >> ${PSTAGE_TMPDIR_STAGE}/CONTROL/control - echo "Source: ${SRC_URI}" >> ${PSTAGE_TMPDIR_STAGE}/CONTROL/control - - ${PSTAGE_BUILD_CMD} ${PSTAGE_TMPDIR_STAGE} ${DEPLOY_DIR_PSTAGE} - ${PSTAGE_INSTALL_CMD} ${DEPLOY_DIR_PSTAGE}/${PSTAGE_PKGNAME} -} - -python do_package_stage () { - if bb.data.getVar("PSTAGING_DISABLED", d, 1) == "1": - return - - bb.build.exec_func("read_subpackage_metadata", d) - packages = (bb.data.getVar('PACKAGES', d, 1) or "").split() - if len(packages) > 0: - stagepath = bb.data.getVar("PSTAGE_TMPDIR_STAGE", d, 1) - if bb.data.inherits_class('package_ipk', d): - ipkpath = os.path.join(stagepath, "deploy", "ipk") - bb.mkdirhier(ipkpath) - if bb.data.inherits_class('package_deb', d): - debpath = os.path.join(stagepath, "deploy", "deb") - bb.mkdirhier(debpath) - - for pkg in packages: - pkgname = bb.data.getVar('PKG_%s' % pkg, d, 1) - if not pkgname: - pkgname = pkg - arch = bb.data.getVar('PACKAGE_ARCH_%s' % pkg, d, 1) - if not arch: - arch = bb.data.getVar('PACKAGE_ARCH', d, 1) - if not packaged(pkg, d): - continue - if bb.data.inherits_class('package_ipk', d): - srcname = bb.data.expand(pkgname + "_${PV}-${PR}_" + arch + ".ipk", d) - srcfile = bb.data.expand("${DEPLOY_DIR_IPK}/" + arch + "/" + srcname, d) - if not os.path.exists(srcfile): - bb.fatal("Package %s does not exist yet it should" % srcfile) - bb.copyfile(srcfile, ipkpath + "/" + srcname) - if bb.data.inherits_class('package_deb', d): - if arch == 'all': - srcname = bb.data.expand(pkgname + "_${PV}-${PR}_all.deb", d) - else: - srcname = bb.data.expand(pkgname + "_${PV}-${PR}_${DPKG_ARCH}.deb", d) - srcfile = bb.data.expand("${DEPLOY_DIR_DEB}/" + arch + "/" + srcname, d) - if not os.path.exists(srcfile): - bb.fatal("Package %s does not exist yet it should" % srcfile) - bb.copyfile(srcfile, debpath + "/" + srcname) - bb.build.exec_func("staging_helper", d) - lf = bb.utils.lockfile(bb.data.expand("${STAGING_DIR}/staging.lock", d)) - bb.build.exec_func("staging_packager", d) - bb.utils.unlockfile(lf) -} - -addtask package_stage after do_package_write_ipk do_package_write_deb do_package_write do_populate_staging before do_build - diff --git a/conf/sanity.conf b/conf/sanity.conf index fe1e2282e4..0d494f1409 100644 --- a/conf/sanity.conf +++ b/conf/sanity.conf @@ -3,6 +3,6 @@ # See sanity.bbclass # # Expert users can confirm their sanity with "touch conf/sanity.conf" -BB_MIN_VERSION = "1.8.8" +BB_MIN_VERSION = "1.8.10" INHERIT += "sanity" diff --git a/packages/gcc/gcc-package.inc b/packages/gcc/gcc-package.inc index d1b4607de5..3d57d01113 100644 --- a/packages/gcc/gcc-package.inc +++ b/packages/gcc/gcc-package.inc @@ -12,6 +12,10 @@ PACKAGES = "libgcc ${PN} ${PN}-symlinks \ libstdc++-dev libg2c-dev \ ${PN}-doc" +# We really need HOST_SYS here for some packages and TARGET_SYS for others. +# For now, libgcc is most important so we fix for that - RP. +SHLIBSDIR = "${STAGING_DIR}/${TARGET_SYS}/shlibs" + FILES_${PN} = "${bindir}/${TARGET_PREFIX}gcc \ ${bindir}/${TARGET_PREFIX}gccbug \ ${libexecdir}/gcc/${TARGET_SYS}/${BINV}/cc1 \ |