diff options
Diffstat (limited to 'classes')
-rw-r--r-- | classes/base.bbclass | 32 | ||||
-rw-r--r-- | classes/cross.bbclass | 2 | ||||
-rw-r--r-- | classes/debian.bbclass | 2 | ||||
-rw-r--r-- | classes/distutils.bbclass | 6 | ||||
-rw-r--r-- | classes/kernel.bbclass | 5 | ||||
-rw-r--r-- | classes/nslu2-image.bbclass | 2 | ||||
-rw-r--r-- | classes/package.bbclass | 53 | ||||
-rw-r--r-- | classes/package_ipk.bbclass | 25 | ||||
-rw-r--r-- | classes/packaged-staging2.bbclass | 229 | ||||
-rw-r--r-- | classes/pkgconfig.bbclass | 2 | ||||
-rw-r--r-- | classes/rootfs_deb.bbclass | 8 | ||||
-rw-r--r-- | classes/scons.bbclass | 1 | ||||
-rw-r--r-- | classes/sip.bbclass | 10 | ||||
-rw-r--r-- | classes/siteinfo.bbclass | 1 | ||||
-rw-r--r-- | classes/sourcepkg.bbclass | 2 |
15 files changed, 303 insertions, 77 deletions
diff --git a/classes/base.bbclass b/classes/base.bbclass index 06a49814d2..7526dff65e 100644 --- a/classes/base.bbclass +++ b/classes/base.bbclass @@ -315,7 +315,9 @@ oe_libinstall() { # stop libtool using the final directory name for libraries # in staging: __runcmd rm -f $destpath/$libname.la - __runcmd sed -e 's/^installed=yes$/installed=no/' -e '/^dependency_libs=/s,${WORKDIR}[[:alnum:]/\._+-]*/\([[:alnum:]\._+-]*\),${STAGING_LIBDIR}/\1,g' $dotlai >$destpath/$libname.la + __runcmd sed -e 's/^installed=yes$/installed=no/' \ + -e '/^dependency_libs=/s,${WORKDIR}[[:alnum:]/\._+-]*/\([[:alnum:]\._+-]*\),${STAGING_LIBDIR}/\1,g' \ + $dotlai >$destpath/$libname.la else __runcmd install -m 0644 $dotlai $destpath/$libname.la fi @@ -508,6 +510,13 @@ base_do_fetchall() { : } +addtask buildall after do_build +do_buildall[recrdeptask] = "do_build" +base_do_buildall() { + : +} + + def oe_unpack_file(file, data, url = None): import bb, os if not url: @@ -695,7 +704,7 @@ do_populate_staging[dirs] = "${STAGING_DIR_TARGET}/${layout_bindir} ${STAGING_DI addtask populate_staging after do_install python do_populate_staging () { - bb.build.exec_func('do_stage', d) + bb.build.exec_func('do_stage', d) } addtask install after do_compile @@ -737,7 +746,7 @@ def explode_deps(s): def packaged(pkg, d): import os, bb - return os.access(bb.data.expand('${PKGDATA_DIR}/runtime/%s.packaged' % pkg, d), os.R_OK) + return os.access(get_subpkgedata_fn(pkg, d) + '.packaged', os.R_OK) def read_pkgdatafile(fn): pkgdata = {} @@ -761,16 +770,23 @@ def read_pkgdatafile(fn): return pkgdata +def get_subpkgedata_fn(pkg, d): + import bb, os + archs = bb.data.expand("${PACKAGE_ARCHS}", d).split(" ") + archs.reverse() + for arch in archs: + fn = bb.data.expand('${STAGING_DIR}/pkgdata/' + arch + '${TARGET_VENDOR}-${TARGET_OS}/runtime/%s' % pkg, d) + if os.path.exists(fn): + return fn + return bb.data.expand('${PKGDATA_DIR}/runtime/%s' % pkg, d) + def has_subpkgdata(pkg, d): import bb, os - fn = bb.data.expand('${PKGDATA_DIR}/runtime/%s' % pkg, d) - return os.access(fn, os.R_OK) + return os.access(get_subpkgedata_fn(pkg, d), os.R_OK) def read_subpkgdata(pkg, d): import bb, os - fn = bb.data.expand('${PKGDATA_DIR}/runtime/%s' % pkg, d) - return read_pkgdatafile(fn) - + return read_pkgdatafile(get_subpkgedata_fn(pkg, d)) def has_pkgdata(pn, d): import bb, os diff --git a/classes/cross.bbclass b/classes/cross.bbclass index 6e5480a2a0..89cb3e9569 100644 --- a/classes/cross.bbclass +++ b/classes/cross.bbclass @@ -20,6 +20,8 @@ CXXFLAGS = "${BUILD_CFLAGS}" LDFLAGS = "${BUILD_LDFLAGS}" LDFLAGS_build-darwin = "-L${STAGING_LIBDIR_NATIVE}" +TOOLCHAIN_OPTIONS = "" + # Overrides for paths # Path prefixes diff --git a/classes/debian.bbclass b/classes/debian.bbclass index a38f10d629..dd0789adae 100644 --- a/classes/debian.bbclass +++ b/classes/debian.bbclass @@ -1,5 +1,3 @@ -STAGING_PKGMAPS_DIR = "${STAGING_DIR}/pkgmaps/debian" - # Debian package renaming only occurs when a package is built # We therefore have to make sure we build all runtime packages # before building the current package to make the packages runtime diff --git a/classes/distutils.bbclass b/classes/distutils.bbclass index 8af10a0c8a..18aee09a26 100644 --- a/classes/distutils.bbclass +++ b/classes/distutils.bbclass @@ -13,13 +13,15 @@ distutils_do_compile() { } distutils_stage_headers() { + install -d ${STAGING_DIR_HOST}${layout_prefix}/lib/${PYTHON_DIR}/site-packages BUILD_SYS=${BUILD_SYS} HOST_SYS=${HOST_SYS} \ ${STAGING_BINDIR_NATIVE}/python setup.py install_headers ${DISTUTILS_STAGE_HEADERS_ARGS} || \ oefatal "python setup.py install_headers execution failed." } distutils_stage_all() { - install -d ${STAGING_DIR_HOST}${layout_prefix}/${PYTHON_DIR}/site-packages + install -d ${STAGING_DIR_HOST}${layout_prefix}/lib/${PYTHON_DIR}/site-packages + # is this missing a lib below? PYTHONPATH=${STAGING_DIR_HOST}${layout_prefix}/${PYTHON_DIR}/site-packages \ BUILD_SYS=${BUILD_SYS} HOST_SYS=${HOST_SYS} \ ${STAGING_BINDIR_NATIVE}/python setup.py install ${DISTUTILS_STAGE_ALL_ARGS} || \ @@ -49,7 +51,7 @@ distutils_do_install() { done fi - rm -f ${D}${libdir}/${PYTHON_DIR}/site-packages/easy-install.pth + rm -f ${D}${libdir}/${PYTHON_DIR}/site-packages/easy-install.pth } EXPORT_FUNCTIONS do_compile do_install diff --git a/classes/kernel.bbclass b/classes/kernel.bbclass index 5976f42c71..c00d0f3f58 100644 --- a/classes/kernel.bbclass +++ b/classes/kernel.bbclass @@ -349,6 +349,9 @@ python populate_packages_prepend () { # If autoloading is requested, output /etc/modutils/<name> and append # appropriate modprobe commands to the postinst autoload = bb.data.getVar('module_autoload_%s' % basename, d, 1) + if not autoload: + # Also, try canonical name with dashes + autoload = bb.data.getVar('module_autoload_%s' % basename.replace('_', '-'), d, 1) if autoload: name = '%s/etc/modutils/%s' % (dvar, basename) f = open(name, 'w') @@ -400,7 +403,7 @@ python populate_packages_prepend () { metapkg = "kernel-modules" bb.data.setVar('ALLOW_EMPTY_' + metapkg, "1", d) bb.data.setVar('FILES_' + metapkg, "", d) - blacklist = [ 'kernel-dev', 'kernel-image', 'kernel-base' ] + blacklist = [ 'kernel-dev', 'kernel-image', 'kernel-base', 'kernel-vmlinux' ] for l in module_deps.values(): for i in l: pkg = module_pattern % legitimize_package_name(re.match(module_regex, os.path.basename(i)).group(1)) diff --git a/classes/nslu2-image.bbclass b/classes/nslu2-image.bbclass index 33caa54b21..8be1fa762f 100644 --- a/classes/nslu2-image.bbclass +++ b/classes/nslu2-image.bbclass @@ -19,5 +19,5 @@ nslu2_pack_image () { -o ${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}-nslu2-16mb.bin } -EXTRA_IMAGEDEPENDS += 'slugimage-native nslu2-linksys-firmware ixp4xx-npe upslug2-native apex-nslu2 apex-nslu2-16mb' +EXTRA_IMAGEDEPENDS += 'slugimage-native nslu2-linksys-firmware ixp4xx-npe apex-nslu2 apex-nslu2-16mb' IMAGE_POSTPROCESS_COMMAND += "nslu2_pack_image; " diff --git a/classes/package.bbclass b/classes/package.bbclass index 6c61f7bdda..67aeb33a13 100644 --- a/classes/package.bbclass +++ b/classes/package.bbclass @@ -34,9 +34,6 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst return packages = bb.data.getVar('PACKAGES', d, 1).split() - if not packages: - # nothing to do - return if postinst: postinst = '#!/bin/sh\n' + postinst + '\n' @@ -189,42 +186,15 @@ def runstrip(file, d): # Package data handling routines # -STAGING_PKGMAPS_DIR ?= "${STAGING_DIR}/pkgmaps" - -def add_package_mapping (pkg, new_name, d): - import bb, os - - def encode(str): - import codecs - c = codecs.getencoder("string_escape") - return c(str)[0] - - pmap_dir = bb.data.getVar('STAGING_PKGMAPS_DIR', d, 1) - - bb.mkdirhier(pmap_dir) - - data_file = os.path.join(pmap_dir, pkg) - - f = open(data_file, 'w') - f.write("%s\n" % encode(new_name)) - f.close() - def get_package_mapping (pkg, d): import bb, os - def decode(str): - import codecs - c = codecs.getdecoder("string_escape") - return c(str)[0] + data = read_subpkgdata(pkg, d) + key = "PKG_%s" % pkg - data_file = bb.data.expand("${STAGING_PKGMAPS_DIR}/%s" % pkg, d) + if key in data: + return data[key] - if os.access(data_file, os.R_OK): - f = file(data_file, 'r') - lines = f.readlines() - f.close() - for l in lines: - return decode(l).strip() return pkg def runtime_mapping_rename (varname, d): @@ -258,9 +228,6 @@ python package_do_split_locales() { return packages = (bb.data.getVar('PACKAGES', d, 1) or "").split() - if not packages: - bb.debug(1, "no packages to build; not splitting locales") - return datadir = bb.data.getVar('datadir', d, 1) if not datadir: @@ -410,9 +377,6 @@ python populate_packages () { bb.mkdirhier(dvar) packages = bb.data.getVar('PACKAGES', d, 1) - if not packages: - bb.debug(1, "PACKAGES not defined, nothing to package") - return pn = bb.data.getVar('PN', d, 1) if not pn: @@ -516,8 +480,6 @@ python populate_packages () { pkgname = bb.data.getVar('PKG_%s' % pkg, d, 1) if pkgname is None: bb.data.setVar('PKG_%s' % pkg, pkg, d) - else: - add_package_mapping(pkg, pkgname, d) dangling_links = {} pkg_files = {} @@ -604,6 +566,8 @@ python emit_pkgdata() { sf.close() allow_empty = bb.data.getVar('ALLOW_EMPTY_%s' % pkg, d, 1) + if not allow_empty: + allow_empty = bb.data.getVar('ALLOW_EMPTY', d, 1) root = "%s/install/%s" % (workdir, pkg) os.chdir(root) g = glob('*') @@ -903,10 +867,7 @@ python package_depchains() { prefixes = (bb.data.getVar('DEPCHAIN_PRE', d, 1) or '').split() def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d): - def packaged(pkg, d): - return os.access(bb.data.expand('${PKGDATA_DIR}/runtime/%s.packaged' % pkg, d), os.R_OK) - - #bb.note('rdepends for %s is %s' % (base, rdepends)) + #bb.note('rdepends for %s is %s' % (base, rdepends)) rreclist = explode_deps(bb.data.getVar('RRECOMMENDS_' + pkg, d, 1) or bb.data.getVar('RRECOMMENDS', d, 1) or "") diff --git a/classes/package_ipk.bbclass b/classes/package_ipk.bbclass index a12899e2a1..16d6050573 100644 --- a/classes/package_ipk.bbclass +++ b/classes/package_ipk.bbclass @@ -32,7 +32,7 @@ python package_ipk_install () { # Generate ipk.conf if it or the stamp doesnt exist conffile = os.path.join(stagingdir,"ipkg.conf") - if not os.access(conffile, os.R_OK): + if not os.access(conffile, os.R_OK): ipkg_archs = bb.data.getVar('PACKAGE_ARCHS',d) if ipkg_archs is None: bb.error("PACKAGE_ARCHS missing") @@ -114,7 +114,7 @@ package_generate_ipkg_conf () { } python do_package_ipk () { - import sys, re, fcntl, copy + import sys, re, copy, fcntl workdir = bb.data.getVar('WORKDIR', d, 1) if not workdir: @@ -127,10 +127,6 @@ python do_package_ipk () { bb.error("DEPLOY_DIR_IPK not defined, unable to package") return - arch = bb.data.getVar('PACKAGE_ARCH', d, 1) - outdir = "%s/%s" % (outdir, arch) - bb.mkdirhier(outdir) - dvar = bb.data.getVar('D', d, 1) if not dvar: bb.error("D not defined, unable to package") @@ -160,9 +156,11 @@ python do_package_ipk () { fcntl.flock(lf.fileno(), fcntl.LOCK_UN) lf.close + for pkg in packages.split(): localdata = bb.data.createCopy(d) - root = "%s/install/%s" % (workdir, pkg) + pkgdest = bb.data.getVar('PKGDEST', d, 1) + root = "%s/%s" % (pkgdest, pkg) lf = lockfile(root + ".lock") @@ -181,7 +179,8 @@ python do_package_ipk () { bb.data.update_data(localdata) basedir = os.path.join(os.path.dirname(root)) - pkgoutdir = outdir + arch = bb.data.getVar('PACKAGE_ARCH', localdata, 1) + pkgoutdir = "%s/%s" % (outdir, arch) bb.mkdirhier(pkgoutdir) os.chdir(root) from glob import glob @@ -202,6 +201,7 @@ python do_package_ipk () { try: ctrlfile = file(os.path.join(controldir, 'control'), 'w') except OSError: + unlockfile(lf) raise bb.build.FuncFailed("unable to open control file for writing.") fields = [] @@ -235,6 +235,7 @@ python do_package_ipk () { except KeyError: (type, value, traceback) = sys.exc_info() ctrlfile.close() + unlockfile(lf) raise bb.build.FuncFailed("Missing field for ipk generation: %s" % value) # more fields @@ -271,6 +272,7 @@ python do_package_ipk () { try: scriptfile = file(os.path.join(controldir, script), 'w') except OSError: + unlockfile(lf) raise bb.build.FuncFailed("unable to open %s script file for writing." % script) scriptfile.write(scriptvar) scriptfile.close() @@ -281,6 +283,7 @@ python do_package_ipk () { try: conffiles = file(os.path.join(controldir, 'conffiles'), 'w') except OSError: + unlockfile(lf) raise bb.build.FuncFailed("unable to open conffiles for writing.") for f in conffiles_str.split(): conffiles.write('%s\n' % f) @@ -290,6 +293,7 @@ python do_package_ipk () { ret = os.system("PATH=\"%s\" %s %s %s" % (bb.data.getVar("PATH", localdata, 1), bb.data.getVar("IPKGBUILDCMD",d,1), pkg, pkgoutdir)) if ret != 0: + unlockfile(lf) raise bb.build.FuncFailed("ipkg-build execution failed") for script in ["preinst", "postinst", "prerm", "postrm", "control" ]: @@ -308,7 +312,10 @@ python do_package_ipk () { python () { import bb if bb.data.getVar('PACKAGES', d, True) != '': - bb.data.setVarFlag('do_package_write_ipk', 'depends', 'ipkg-utils-native:do_populate_staging fakeroot-native:do_populate_staging', d) + deps = (bb.data.getVarFlag('do_package_write_ipk', 'depends', d) or "").split() + deps.append('ipkg-utils-native:do_populate_staging') + deps.append('fakeroot-native:do_populate_staging') + bb.data.setVarFlag('do_package_write_ipk', 'depends', " ".join(deps), d) } python do_package_write_ipk () { diff --git a/classes/packaged-staging2.bbclass b/classes/packaged-staging2.bbclass new file mode 100644 index 0000000000..29ce72d65c --- /dev/null +++ b/classes/packaged-staging2.bbclass @@ -0,0 +1,229 @@ +# +# Populate builds using prebuilt packages where possible to speed up builds +# and allow staging to be reconstructed. +# +# To use it add that line to conf/local.conf: +# +# INHERIT = "packaged-staging" + +python () { + import bb + if not bb.data.inherits_class('native', d) and not bb.data.inherits_class('image', d) and not bb.data.inherits_class('cross', d) and not bb.data.inherits_class('sdk', d): + deps = bb.data.getVarFlag('do_populate_staging', 'depends', d) or "" + deps += " stagemanager-native:do_populate_staging" + bb.data.setVarFlag('do_populate_staging', 'depends', deps, d) + + deps = bb.data.getVarFlag('do_prepackaged_stage', 'depends', d) or "" + deps += " ipkg-native:do_populate_staging ipkg-utils-native:do_populate_staging" + bb.data.setVarFlag('do_prepackaged_stage', 'depends', deps, d) + else: + bb.data.setVar("PSTAGING_DISABLED", "1", d) +} + +export PSTAGING_DISABLED = "0" + +DEPLOY_DIR_PSTAGE = "${DEPLOY_DIR}/pstage" + +PSTAGE_BUILD_CMD = "${IPKGBUILDCMD}" +PSTAGE_INSTALL_CMD = "ipkg-cl install -force-depends -f ${DEPLOY_DIR_PSTAGE}/ipkg-${MACHINE}.conf -o ${TMPDIR}" +PSTAGE_UPDATE_CMD = "ipkg-cl update -f ${DEPLOY_DIR_PSTAGE}/ipkg-${MACHINE}.conf -o ${TMPDIR}" +PSTAGE_REMOVE_CMD = "ipkg-cl remove -force-depends -f ${DEPLOY_DIR_PSTAGE}/ipkg-${MACHINE}.conf -o ${TMPDIR}" +PSTAGE_LIST_CMD = "ipkg-cl list_installed -f ${DEPLOY_DIR_PSTAGE}/ipkg-${MACHINE}.conf -o ${TMPDIR}" +PSTAGE_PKGNAME = "staging-${PN}_${PV}-${PR}_${MULTIMACH_ARCH}.ipk" + +PSTAGE_TMPDIR_STAGE = "${WORKDIR}/staging-pkg" + +do_clean_append() { + """ + Clear the build and temp directories + """ + bb.note("Uninstalling package from staging...") + path = bb.data.getVar("PATH", d, 1) + removecmd = bb.data.getVar("PSTAGE_REMOVE_CMD", d, 1) + removepkg = bb.data.expand("staging-${PN}", d) + ret = os.system("PATH=\"%s\" %s %s" % (path, removecmd, removepkg)) + if ret != 0: + bb.note("Failure removing staging package") + + stagepkg = bb.data.expand("${DEPLOY_DIR_PSTAGE}/${PSTAGE_PKGNAME}", d) + bb.note("Removing staging package %s" % stagepkg) + #os.system('rm -rf ' + stagepkg) +} + +staging_helper () { + #assemble appropriate ipkg.conf + conffile=${DEPLOY_DIR_PSTAGE}/ipkg-${MACHINE}.conf + mkdir -p ${DEPLOY_DIR_PSTAGE}/pstaging_lists + if [ ! -e $conffile ]; then + ipkgarchs="${BUILD_ARCH} all any noarch ${TARGET_ARCH} ${PACKAGE_ARCHS} ${PACKAGE_EXTRA_ARCHS} ${MACHINE}" + priority=1 + for arch in $ipkgarchs; do + echo "arch $arch $priority" >> $conffile + priority=$(expr $priority + 5) + done + echo "src oe-staging file:${DEPLOY_DIR_PSTAGE}" >> $conffile + + OLD_PWD=`pwd` + cd ${DEPLOY_DIR_PSTAGE} + ipkg-make-index -p Packages . + cd ${OLD_PWD} + + ${PSTAGE_UPDATE_CMD} + fi +} + +python do_prepackaged_stage () { + import os + + if bb.data.getVar("PSTAGING_DISABLED", d, 1) == "1": + bb.build.make_stamp("do_prepackaged_stage", d) + return + + bb.note("Uninstalling any existing package from staging...") + path = bb.data.getVar("PATH", d, 1) + removecmd = bb.data.getVar("PSTAGE_REMOVE_CMD", d, 1) + removepkg = bb.data.expand("staging-${PN}", d) + lf = bb.utils.lockfile(bb.data.expand("${STAGING_DIR}/staging.lock", d)) + ret = os.system("PATH=\"%s\" %s %s" % (path, removecmd, removepkg)) + bb.utils.unlockfile(lf) + if ret != 0: + bb.note("Failure attempting to remove staging package") + + stagepkg = bb.data.expand("${DEPLOY_DIR_PSTAGE}/${PSTAGE_PKGNAME}", d) + + if os.path.exists(stagepkg): + bb.note("Following speedup\n") + path = bb.data.getVar("PATH", d, 1) + installcmd = bb.data.getVar("PSTAGE_INSTALL_CMD", d, 1) + + bb.build.exec_func("staging_helper", d) + + bb.debug(1, "Staging stuff already packaged, using that instead") + lf = bb.utils.lockfile(bb.data.expand("${STAGING_DIR}/staging.lock", d)) + ret = os.system("PATH=\"%s\" %s %s" % (path, installcmd, stagepkg)) + bb.utils.unlockfile(lf) + if ret != 0: + bb.note("Failure installing prestage package") + + bb.build.make_stamp("do_prepackaged_stage", d) + bb.build.make_stamp("do_fetch", d) + bb.build.make_stamp("do_unpack", d) + bb.build.make_stamp("do_munge", d) + bb.build.make_stamp("do_patch", d) + bb.build.make_stamp("do_configure", d) + bb.build.make_stamp("do_qa_configure", d) + bb.build.make_stamp("do_rig_locales", d) + bb.build.make_stamp("do_compile", d) + bb.build.make_stamp("do_install", d) + bb.build.make_stamp("do_deploy", d) + bb.build.make_stamp("do_package", d) + bb.build.make_stamp("do_populate_staging", d) + bb.build.make_stamp("do_package_write_deb", d) + bb.build.make_stamp("do_package_write_ipk", d) + bb.build.make_stamp("do_package_write", d) + bb.build.make_stamp("do_package_stage", d) + bb.build.make_stamp("do_qa_staging", d) + + else: + bb.build.make_stamp("do_prepackaged_stage", d) +} +do_prepackaged_stage[cleandirs] = "${PSTAGE_TMPDIR_STAGE}" +do_prepackaged_stage[selfstamp] = "1" +addtask prepackaged_stage before do_fetch + +populate_staging_preamble () { + if [ "$PSTAGING_DISABLED" != "1" ]; then + #mkdir -p ${DEPLOY_DIR_PSTAGE} + + stage-manager -p ${STAGING_DIR} -c ${DEPLOY_DIR_PSTAGE}/stamp-cache-staging -u + stage-manager -p ${CROSS_DIR} -c ${DEPLOY_DIR_PSTAGE}/stamp-cache-cross -u + fi +} + +populate_staging_postamble () { + if [ "$PSTAGING_DISABLED" != "1" ]; then + # list the packages currently installed in staging + ${PSTAGE_LIST_CMD} | awk '{print $1}' > ${DEPLOY_DIR_PSTAGE}/installed-list + + set +e + stage-manager -p ${STAGING_DIR} -c ${DEPLOY_DIR_PSTAGE}/stamp-cache-staging -u -d ${PSTAGE_TMPDIR_STAGE}/staging + stage-manager -p ${CROSS_DIR} -c ${DEPLOY_DIR_PSTAGE}/stamp-cache-cross -u -d ${PSTAGE_TMPDIR_STAGE}/cross + set -e + fi +} + +do_populate_staging[lockfiles] = "${STAGING_DIR}/staging.lock" +do_populate_staging[dirs] =+ "${DEPLOY_DIR_PSTAGE}" +python do_populate_staging_prepend() { + bb.build.exec_func("populate_staging_preamble", d) +} + +python do_populate_staging_append() { + bb.build.exec_func("populate_staging_postamble", d) +} + + +staging_packager () { + + mkdir -p ${PSTAGE_TMPDIR_STAGE}/CONTROL + + echo "Package: staging-${PN}" > ${PSTAGE_TMPDIR_STAGE}/CONTROL/control + echo "Version: ${PV}-${PR}" >> ${PSTAGE_TMPDIR_STAGE}/CONTROL/control + echo "Description: ${DESCRIPTION}" >> ${PSTAGE_TMPDIR_STAGE}/CONTROL/control + echo "Section: ${SECTION}" >> ${PSTAGE_TMPDIR_STAGE}/CONTROL/control + echo "Priority: Optional" >> ${PSTAGE_TMPDIR_STAGE}/CONTROL/control + echo "Maintainer: ${MAINTAINER}" >> ${PSTAGE_TMPDIR_STAGE}/CONTROL/control + echo "Architecture: ${MULTIMACH_ARCH}" >> ${PSTAGE_TMPDIR_STAGE}/CONTROL/control + echo "Source: ${SRC_URI}" >> ${PSTAGE_TMPDIR_STAGE}/CONTROL/control + + ${PSTAGE_BUILD_CMD} ${PSTAGE_TMPDIR_STAGE} ${DEPLOY_DIR_PSTAGE} + ${PSTAGE_INSTALL_CMD} ${DEPLOY_DIR_PSTAGE}/${PSTAGE_PKGNAME} +} + +python do_package_stage () { + if bb.data.getVar("PSTAGING_DISABLED", d, 1) == "1": + return + + bb.build.exec_func("read_subpackage_metadata", d) + packages = (bb.data.getVar('PACKAGES', d, 1) or "").split() + if len(packages) > 0: + stagepath = bb.data.getVar("PSTAGE_TMPDIR_STAGE", d, 1) + if bb.data.inherits_class('package_ipk', d): + ipkpath = os.path.join(stagepath, "deploy", "ipk") + bb.mkdirhier(ipkpath) + if bb.data.inherits_class('package_deb', d): + debpath = os.path.join(stagepath, "deploy", "deb") + bb.mkdirhier(debpath) + + for pkg in packages: + pkgname = bb.data.getVar('PKG_%s' % pkg, d, 1) + if not pkgname: + pkgname = pkg + arch = bb.data.getVar('PACKAGE_ARCH_%s' % pkg, d, 1) + if not arch: + arch = bb.data.getVar('PACKAGE_ARCH', d, 1) + if not packaged(pkg, d): + continue + if bb.data.inherits_class('package_ipk', d): + srcname = bb.data.expand(pkgname + "_${PV}-${PR}_" + arch + ".ipk", d) + srcfile = bb.data.expand("${DEPLOY_DIR_IPK}/" + arch + "/" + srcname, d) + if not os.path.exists(srcfile): + bb.fatal("Package %s does not exist yet it should" % srcfile) + bb.copyfile(srcfile, ipkpath + "/" + srcname) + if bb.data.inherits_class('package_deb', d): + if arch == 'all': + srcname = bb.data.expand(pkgname + "_${PV}-${PR}_all.deb", d) + else: + srcname = bb.data.expand(pkgname + "_${PV}-${PR}_${DPKG_ARCH}.deb", d) + srcfile = bb.data.expand("${DEPLOY_DIR_DEB}/" + arch + "/" + srcname, d) + if not os.path.exists(srcfile): + bb.fatal("Package %s does not exist yet it should" % srcfile) + bb.copyfile(srcfile, debpath + "/" + srcname) + bb.build.exec_func("staging_helper", d) + lf = bb.utils.lockfile(bb.data.expand("${STAGING_DIR}/staging.lock", d)) + bb.build.exec_func("staging_packager", d) + bb.utils.unlockfile(lf) +} + +addtask package_stage after do_package_write_ipk do_package_write_deb do_package_write do_populate_staging before do_build + diff --git a/classes/pkgconfig.bbclass b/classes/pkgconfig.bbclass index c50e0b484f..d2176d8b39 100644 --- a/classes/pkgconfig.bbclass +++ b/classes/pkgconfig.bbclass @@ -24,7 +24,7 @@ def get_pkgconfig_mangle(d): do_install_append () { for pc in `find ${D} -name '*.pc' -type f | grep -v -- '-uninstalled.pc$'`; do - sed -i ${@get_pkgconfig_mangle(d)} -e 's:${D}::g' ${pc} + sed -i ${@get_pkgconfig_mangle(d)} -e 's:${D}::g' -e 's:${STAGING_LIBDIR}:${libdir}:g' -e 's:${STAGING_INCDIR}:${includedir}:g' -e 's:${STAGING_DIR_TARGET}:${prefix}:g' ${pc} done } diff --git a/classes/rootfs_deb.bbclass b/classes/rootfs_deb.bbclass index 935ef6e3f7..853ea225fc 100644 --- a/classes/rootfs_deb.bbclass +++ b/classes/rootfs_deb.bbclass @@ -10,8 +10,12 @@ fakeroot rootfs_deb_do_rootfs () { mkdir -p ${IMAGE_ROOTFS}/var/dpkg/info mkdir -p ${IMAGE_ROOTFS}/var/dpkg/updates + mkdir -p ${STAGING_ETCDIR_NATIVE}/apt/ + rm -f ${STAGING_ETCDIR_NATIVE}/apt/sources.list.rev rm -f ${STAGING_ETCDIR_NATIVE}/apt/preferences + > ${STAGING_ETCDIR_NATIVE}/apt/sources.list.rev + > ${STAGING_ETCDIR_NATIVE}/apt/preferences > ${IMAGE_ROOTFS}/var/dpkg/status > ${IMAGE_ROOTFS}/var/dpkg/available # > ${STAGING_DIR}/var/dpkg/status @@ -36,9 +40,9 @@ fakeroot rootfs_deb_do_rootfs () { priority=$(expr $priority + 5) done - tac ${STAGING_ETCDIR_NATIVE}/apt/sources.list.rev > ${STAGING_ETCDIR_NATIVE}/apt/sources.list + tac ${STAGING_ETCDIR_NATIVE}/apt/sources.list.rev > ${STAGING_DIR}/etc/apt/sources.list - cat "${STAGING_ETCDIR_NATIVE}/apt/apt.conf.sample" \ + cat "${STAGING_DIR}/etc/apt/apt.conf.sample" \ | sed -e 's#Architecture ".*";#Architecture "${TARGET_ARCH}";#' \ > "${STAGING_ETCDIR_NATIVE}/apt/apt-rootfs.conf" diff --git a/classes/scons.bbclass b/classes/scons.bbclass index 534b3bd4c7..6d0a783dc2 100644 --- a/classes/scons.bbclass +++ b/classes/scons.bbclass @@ -6,6 +6,7 @@ scons_do_compile() { } scons_do_install() { + install -d ${D}${prefix} ${STAGING_BINDIR_NATIVE}/scons PREFIX=${D}${prefix} prefix=${D}${prefix} install || \ oefatal "scons install execution failed." } diff --git a/classes/sip.bbclass b/classes/sip.bbclass index a258fda629..6f77f460dc 100644 --- a/classes/sip.bbclass +++ b/classes/sip.bbclass @@ -1,8 +1,10 @@ # Build Class for Sip based Python Bindings # (C) Michael 'Mickey' Lauer <mickey@Vanille.de> # -DEPENDS =+ "sip-native" -RDEPENDS += "python-sip" + +# yes, python-sip is actually a build-time dependency, since +# the recipe installs sip.h +DEPENDS =+ "sip-native python-sip" # default stuff, do not uncomment # EXTRA_SIPTAGS = "-tWS_X11 -tQt_4_3_0" @@ -34,8 +36,8 @@ sip_do_generate() { for module in $MODULES do install -d ${module}/ - echo "calling 'sip4 -I sip -I ${STAGING_SIPDIR} ${SIPTAGS} ${FEATURES} -c ${module} -b ${module}/${module}.pro.in sip/${module}/${module}mod.sip'" - sip4 -I ${STAGING_SIPDIR} -I sip ${SIPTAGS} ${FEATURES} -c ${module} -b ${module}/${module}.sbf \ + echo "calling 'sip -I sip -I ${STAGING_SIPDIR} ${SIPTAGS} ${FEATURES} -c ${module} -b ${module}/${module}.pro.in sip/${module}/${module}mod.sip'" + sip -I ${STAGING_SIPDIR} -I sip ${SIPTAGS} ${FEATURES} -c ${module} -b ${module}/${module}.sbf \ sip/${module}/${module}mod.sip || die "Error calling sip on ${module}" cat ${module}/${module}.sbf | sed s,target,TARGET, \ | sed s,sources,SOURCES, \ diff --git a/classes/siteinfo.bbclass b/classes/siteinfo.bbclass index c41a3ff369..a3b67b5798 100644 --- a/classes/siteinfo.bbclass +++ b/classes/siteinfo.bbclass @@ -25,6 +25,7 @@ def get_siteinfo_list(d): "armeb-linux-gnueabi": "endian-big bit-32 common-glibc arm-common armeb-linux",\ "armeb-linux-uclibc": "endian-big bit-32 common-uclibc arm-common",\ "armeb-linux-uclibcgnueabi": "endian-big bit-32 common-uclibc arm-common armeb-linux-uclibc",\ + "arm-darwin": "endian-little bit-32 common-darwin",\ "arm-linux": "endian-little bit-32 common-glibc arm-common",\ "arm-linux-gnueabi": "endian-little bit-32 common-glibc arm-common arm-linux",\ "arm-linux-uclibc": "endian-little bit-32 common-uclibc arm-common",\ diff --git a/classes/sourcepkg.bbclass b/classes/sourcepkg.bbclass index 390d3684d4..bbc9f187ec 100644 --- a/classes/sourcepkg.bbclass +++ b/classes/sourcepkg.bbclass @@ -106,6 +106,6 @@ EXPORT_FUNCTIONS do_create_orig_tgz do_archive_bb do_dumpdata do_create_diff_gz addtask create_orig_tgz after do_unpack before do_patch addtask archive_bb after do_patch before do_dumpdata -addtask dumpdata after archive_bb before do_create_diff_gz +addtask dumpdata after do_archive_bb before do_create_diff_gz addtask create_diff_gz after do_dump_data before do_configure |