summaryrefslogtreecommitdiff
path: root/classes
diff options
context:
space:
mode:
Diffstat (limited to 'classes')
-rw-r--r--classes/autotools.bbclass60
-rw-r--r--classes/base.bbclass10
-rw-r--r--classes/bootimg.bbclass8
-rw-r--r--classes/cross-canadian.bbclass2
-rw-r--r--classes/cross.bbclass39
-rw-r--r--classes/gconf.bbclass25
-rw-r--r--classes/gettext.bbclass5
-rw-r--r--classes/image.bbclass8
-rw-r--r--classes/insane.bbclass23
-rw-r--r--classes/kernel.bbclass2
-rw-r--r--classes/mono.bbclass2
-rw-r--r--classes/native.bbclass4
-rw-r--r--classes/package.bbclass2
-rw-r--r--classes/package_deb.bbclass4
-rw-r--r--classes/package_ipk.bbclass6
-rw-r--r--classes/package_rpm.bbclass4
-rw-r--r--classes/package_tar.bbclass4
-rw-r--r--classes/packaged-staging.bbclass128
-rw-r--r--classes/patch.bbclass2
-rw-r--r--classes/relocatable.bbclass93
-rw-r--r--classes/rootfs_deb.bbclass2
-rw-r--r--classes/rootfs_ipk.bbclass2
-rw-r--r--classes/rootfs_rpm.bbclass4
-rw-r--r--classes/sanity.bbclass9
-rw-r--r--classes/sip.bbclass2
-rw-r--r--classes/sip3.bbclass2
-rw-r--r--classes/srctree.bbclass12
-rw-r--r--classes/staging.bbclass26
28 files changed, 358 insertions, 132 deletions
diff --git a/classes/autotools.bbclass b/classes/autotools.bbclass
index d47630ef54..9bb4f6c43e 100644
--- a/classes/autotools.bbclass
+++ b/classes/autotools.bbclass
@@ -44,26 +44,28 @@ def autotools_set_crosscompiling(d):
# EXTRA_OECONF_append = "${@autotools_set_crosscompiling(d)}"
+CONFIGUREOPTS = " --build=${BUILD_SYS} \
+ --host=${HOST_SYS} \
+ --target=${TARGET_SYS} \
+ --prefix=${prefix} \
+ --exec_prefix=${exec_prefix} \
+ --bindir=${bindir} \
+ --sbindir=${sbindir} \
+ --libexecdir=${libexecdir} \
+ --datadir=${datadir} \
+ --sysconfdir=${sysconfdir} \
+ --sharedstatedir=${sharedstatedir} \
+ --localstatedir=${localstatedir} \
+ --libdir=${libdir} \
+ --includedir=${includedir} \
+ --oldincludedir=${oldincludedir} \
+ --infodir=${infodir} \
+ --mandir=${mandir}"
+
oe_runconf () {
if [ -x ${S}/configure ] ; then
cfgcmd="${S}/configure \
- --build=${BUILD_SYS} \
- --host=${HOST_SYS} \
- --target=${TARGET_SYS} \
- --prefix=${prefix} \
- --exec_prefix=${exec_prefix} \
- --bindir=${bindir} \
- --sbindir=${sbindir} \
- --libexecdir=${libexecdir} \
- --datadir=${datadir} \
- --sysconfdir=${sysconfdir} \
- --sharedstatedir=${sharedstatedir} \
- --localstatedir=${localstatedir} \
- --libdir=${libdir} \
- --includedir=${includedir} \
- --oldincludedir=${oldincludedir} \
- --infodir=${infodir} \
- --mandir=${mandir} \
+ ${CONFIGUREOPTS} \
${EXTRA_OECONF} \
$@"
oenote "Running $cfgcmd..."
@@ -127,6 +129,10 @@ autotools_do_configure() {
oenote Executing glib-gettextize --force --copy
echo "no" | glib-gettextize --force --copy
fi
+ else if grep "^[[:space:]]*AM_GNU_GETTEXT" $CONFIGURE_AC >/dev/null; then
+ cp ${STAGING_DATADIR}/gettext/config.rpath ${S}/
+ fi
+
fi
mkdir -p m4
oenote Executing autoreconf --verbose --install --force ${EXTRA_AUTORECONF} $acpaths
@@ -154,21 +160,17 @@ PACKAGE_PREPROCESS_FUNCS += "autotools_prepackage_lamangler"
autotools_prepackage_lamangler () {
for i in `find ${PKGD} -name "*.la"` ; do \
- sed -i -e '/^dependency_libs=/s,${WORKDIR}[[:alnum:]/\._+-]*/\([[:alnum:]\._+-]*\),${libdir}/\1,g' $i
- sed -i -e s:${CROSS_DIR}/${HOST_SYS}::g $i
- sed -i -e s:${CROSS_DIR}::g $i
- sed -i -e s:${STAGING_LIBDIR}:${libdir}:g $i
- if [ -n "${STAGING_DIR_HOST}" ]; then
- sed -i -e s:${STAGING_DIR_HOST}::g $i
- fi
- sed -i -e s:${STAGING_DIR}::g $i
- sed -i -e s:${S}::g $i
- sed -i -e s:${T}::g $i
- sed -i -e s:${D}::g $i
- done
+ sed -i -e 's:${STAGING_LIBDIR}:${libdir}:g;' \
+ -e 's:${D}::g;' \
+ -e 's:-I${WORKDIR}\S*: :g;' \
+ -e 's:-L${WORKDIR}\S*: :g;' \
+ $i
+ done
}
+# STAGE_TEMP_PREFIX is used for a speedup by packaged-staging
STAGE_TEMP="${WORKDIR}/temp-staging"
+STAGE_TEMP_PREFIX = ""
autotools_stage_includes() {
if [ "${INHIBIT_AUTO_STAGE_INCLUDES}" != "1" ]
diff --git a/classes/base.bbclass b/classes/base.bbclass
index 4d5d73df86..3c854c6e7b 100644
--- a/classes/base.bbclass
+++ b/classes/base.bbclass
@@ -249,10 +249,9 @@ python base_do_unpack() {
localdata = bb.data.createCopy(d)
bb.data.update_data(localdata)
- src_uri = bb.data.getVar('SRC_URI', localdata)
+ src_uri = bb.data.getVar('SRC_URI', localdata, True)
if not src_uri:
return
- src_uri = bb.data.expand(src_uri, localdata)
for url in src_uri.split():
try:
local = bb.data.expand(bb.fetch.localpath(url, localdata), localdata)
@@ -326,7 +325,7 @@ python base_eventhandler() {
addtask configure after do_unpack do_patch
do_configure[dirs] = "${S} ${B}"
-do_configure[deptask] = "do_populate_staging"
+do_configure[deptask] = "do_populate_sysroot"
base_do_configure() {
:
}
@@ -341,7 +340,6 @@ base_do_compile() {
fi
}
-
addtask install after do_compile
do_install[dirs] = "${D} ${S} ${B}"
# Remove and re-create ${D} so that is it guaranteed to be empty
@@ -395,7 +393,7 @@ python () {
srcuri = bb.data.getVar('SRC_URI', d, 1)
if "git://" in srcuri:
depends = bb.data.getVarFlag('do_fetch', 'depends', d) or ""
- depends = depends + " git-native:do_populate_staging"
+ depends = depends + " git-native:do_populate_sysroot"
bb.data.setVarFlag('do_fetch', 'depends', depends, d)
# unzip-native should already be staged before unpacking ZIP recipes
@@ -404,7 +402,7 @@ python () {
if ".zip" in src_uri or need_unzip == "1":
depends = bb.data.getVarFlag('do_unpack', 'depends', d) or ""
- depends = depends + " unzip-native:do_populate_staging"
+ depends = depends + " unzip-native:do_populate_sysroot"
bb.data.setVarFlag('do_unpack', 'depends', depends, d)
# 'multimachine' handling
diff --git a/classes/bootimg.bbclass b/classes/bootimg.bbclass
index d58cf1f424..35b0d76b4e 100644
--- a/classes/bootimg.bbclass
+++ b/classes/bootimg.bbclass
@@ -12,10 +12,10 @@
# ${APPEND} - an override list of append strings for each label
# ${SYSLINUX_OPTS} - additional options to add to the syslinux file ';' delimited
-do_bootimg[depends] += "dosfstools-native:do_populate_staging \
- syslinux-native:do_populate_staging \
- mtools-native:do_populate_staging \
- cdrtools-native:do_populate_staging"
+do_bootimg[depends] += "dosfstools-native:do_populate_sysroot \
+ syslinux-native:do_populate_sysroot \
+ mtools-native:do_populate_sysroot \
+ cdrtools-native:do_populate_sysroot"
PACKAGES = " "
diff --git a/classes/cross-canadian.bbclass b/classes/cross-canadian.bbclass
index bd738ecd4d..a149fd44b7 100644
--- a/classes/cross-canadian.bbclass
+++ b/classes/cross-canadian.bbclass
@@ -31,6 +31,8 @@ CFLAGS = "${BUILDSDK_CFLAGS}"
CXXFLAGS = "${BUILDSDK_CFLAGS}"
LDFLAGS = "${BUILDSDK_LDFLAGS}"
+DEPENDS_GETTEXT = "gettext-native gettext-nativesdk"
+
# Change to place files in SDKPATH
prefix = "${SDKPATH}"
exec_prefix = "${SDKPATH}"
diff --git a/classes/cross.bbclass b/classes/cross.bbclass
index 9a3d39cb49..69f6511753 100644
--- a/classes/cross.bbclass
+++ b/classes/cross.bbclass
@@ -1,3 +1,6 @@
+# Disabled for now since the relocation paths are too long
+#inherit relocatable
+
# Cross packages are built indirectly via dependency,
# no need for them to be a direct target of 'world'
EXCLUDE_FROM_WORLD = "1"
@@ -32,6 +35,16 @@ LDFLAGS_build-darwin = "-L${STAGING_LIBDIR_NATIVE}"
TOOLCHAIN_OPTIONS = ""
+DEPENDS_GETTEXT = "gettext-native"
+
+# Path mangling needed by the cross packaging
+# Note that we use := here to ensure that libdir and includedir are
+# target paths, not CROSS_DIR paths.
+target_libdir := "${libdir}"
+target_includedir := "${includedir}"
+target_base_libdir := "${base_libdir}"
+target_prefix := "${prefix}"
+
# Overrides for paths
prefix = "${CROSS_DIR}"
base_prefix = "${prefix}"
@@ -39,10 +52,30 @@ exec_prefix = "${prefix}"
base_sbindir = "${base_prefix}/bin"
sbindir = "${exec_prefix}/bin"
+do_install () {
+ oe_runmake 'DESTDIR=${D}' install
+}
+
do_stage () {
- oe_runmake install
+ autotools_stage_all
}
-do_install () {
- :
+#
+# Override the default sysroot staging copy since this won't look like a target system
+#
+sysroot_stage_all() {
+ sysroot_stage_dir ${D} ${SYSROOT_DESTDIR}
+ install -d ${SYSROOT_DESTDIR}${STAGING_DIR_TARGET}${target_base_libdir}/
+ install -d ${SYSROOT_DESTDIR}${STAGING_DIR_TARGET}${target_libdir}/
+ if [ -d ${SYSROOT_DESTDIR}${target_base_libdir} ]; then
+ sysroot_stage_libdir ${SYSROOT_DESTDIR}${target_base_libdir} ${SYSROOT_DESTDIR}${STAGING_DIR_TARGET}${target_base_libdir}
+ fi
+ if [ -d ${SYSROOT_DESTDIR}${target_libdir} ]; then
+ sysroot_stage_libdir ${SYSROOT_DESTDIR}${target_libdir} ${SYSROOT_DESTDIR}${STAGING_DIR_TARGET}${target_libdir}
+ fi
}
+
+#
+# Cross .la files have more path issues we have to correct
+SYSROOTEXTRALIBDIRSED = '-e "/^libdir=/s,.*,libdir=${STAGING_DIR_TARGET}${target_libdir},g"'
+
diff --git a/classes/gconf.bbclass b/classes/gconf.bbclass
index f55ae2c2ae..30e7ef7591 100644
--- a/classes/gconf.bbclass
+++ b/classes/gconf.bbclass
@@ -5,14 +5,14 @@ if [ "$1" = configure ]; then
if [ "x$D" != "x" ]; then
exit 1
fi
- SCHEMA_LOCATION=/etc/gconf/schemas
- for SCHEMA in ${SCHEMA_FILES}; do
- if [ -e $SCHEMA_LOCATION/$SCHEMA ]; then
- HOME=/root GCONF_CONFIG_SOURCE=`gconftool-2 --get-default-source` \
- gconftool-2 \
- --makefile-install-rule $SCHEMA_LOCATION/$SCHEMA > /dev/null
- fi
- done
+ SCHEMA_LOCATION=/etc/gconf/schemas
+ for SCHEMA in ${SCHEMA_FILES}; do
+ if [ -e $SCHEMA_LOCATION/$SCHEMA ]; then
+ HOME=/root GCONF_CONFIG_SOURCE=`gconftool-2 --get-default-source` \
+ gconftool-2 \
+ --makefile-install-rule $SCHEMA_LOCATION/$SCHEMA > /dev/null
+ fi
+ done
fi
}
@@ -30,7 +30,7 @@ fi
}
python populate_packages_append () {
- import os.path, re
+ import re
packages = bb.data.getVar('PACKAGES', d, 1).split()
pkgdest = bb.data.getVar('PKGDEST', d, 1)
@@ -45,10 +45,9 @@ python populate_packages_append () {
if schemas != []:
bb.note("adding gconf postinst and prerm scripts to %s" % pkg)
bb.data.setVar('SCHEMA_FILES', " ".join(schemas), d)
- postinst = '#!/bin/sh\n'
- pkgpostinst = bb.data.getVar('pkg_postinst_%s' % pkg, d, 1) or bb.data.getVar('pkg_postinst', d, 1)
- if pkgpostinst:
- postinst += pkgpostinst
+ postinst = bb.data.getVar('pkg_postinst_%s' % pkg, d, 1) or bb.data.getVar('pkg_postinst', d, 1)
+ if not postinst:
+ postinst = '#!/bin/sh\n'
postinst += bb.data.getVar('gconf_postinst', d, 1)
bb.data.setVar('pkg_postinst_%s' % pkg, postinst, d)
prerm = bb.data.getVar('pkg_prerm_%s' % pkg, d, 1) or bb.data.getVar('pkg_prerm', d, 1)
diff --git a/classes/gettext.bbclass b/classes/gettext.bbclass
index b2ff2849fa..a40e74f819 100644
--- a/classes/gettext.bbclass
+++ b/classes/gettext.bbclass
@@ -1,4 +1,3 @@
-DEPENDS =+ "gettext-native"
def gettext_after_parse(d):
# Remove the NLS bits if USE_NLS is no.
if bb.data.getVar('USE_NLS', d, 1) == 'no':
@@ -12,5 +11,7 @@ python () {
gettext_after_parse(d)
}
-DEPENDS =+ "gettext-native"
+DEPENDS_GETTEXT = "gettext gettext-native"
+
+DEPENDS =+ "${DEPENDS_GETTEXT}"
EXTRA_OECONF += "--enable-nls"
diff --git a/classes/image.bbclass b/classes/image.bbclass
index 614d368c9b..f695d3c4df 100644
--- a/classes/image.bbclass
+++ b/classes/image.bbclass
@@ -36,7 +36,7 @@ IMAGE_BASENAME[export] = "1"
export PACKAGE_INSTALL ?= "${IMAGE_INSTALL} ${IMAGE_BOOT}"
# We need to recursively follow RDEPENDS and RRECOMMENDS for images
-do_rootfs[recrdeptask] += "do_deploy do_populate_staging"
+do_rootfs[recrdeptask] += "do_deploy do_populate_sysroot"
# Images are generally built explicitly, do not need to be part of world.
EXCLUDE_FROM_WORLD = "1"
@@ -47,7 +47,7 @@ PID = "${@os.getpid()}"
PACKAGE_ARCH = "${MACHINE_ARCH}"
-do_rootfs[depends] += "makedevs-native:do_populate_staging fakeroot-native:do_populate_staging"
+do_rootfs[depends] += "makedevs-native:do_populate_sysroot fakeroot-native:do_populate_sysroot"
python () {
import bb
@@ -55,9 +55,9 @@ python () {
deps = bb.data.getVarFlag('do_rootfs', 'depends', d) or ""
for type in (bb.data.getVar('IMAGE_FSTYPES', d, True) or "").split():
for dep in ((bb.data.getVar('IMAGE_DEPENDS_%s' % type, d) or "").split() or []):
- deps += " %s:do_populate_staging" % dep
+ deps += " %s:do_populate_sysroot" % dep
for dep in (bb.data.getVar('EXTRA_IMAGEDEPENDS', d, True) or "").split():
- deps += " %s:do_populate_staging" % dep
+ deps += " %s:do_populate_sysroot" % dep
bb.data.setVarFlag('do_rootfs', 'depends', deps, d)
runtime_mapping_rename("PACKAGE_INSTALL", d)
diff --git a/classes/insane.bbclass b/classes/insane.bbclass
index 923751d029..2118a27fbd 100644
--- a/classes/insane.bbclass
+++ b/classes/insane.bbclass
@@ -445,7 +445,7 @@ python do_package_qa () {
# The Staging Func, to check all staging
-addtask qa_staging after do_populate_staging before do_build
+addtask qa_staging after do_populate_sysroot before do_build
python do_qa_staging() {
bb.debug(2, "QA checking staging")
@@ -456,6 +456,7 @@ python do_qa_staging() {
# Check broken config.log files
addtask qa_configure after do_configure before do_compile
python do_qa_configure() {
+ configs = []
bb.debug(1, "Checking sanity of the config.log file")
for root, dirs, files in os.walk(bb.data.getVar('WORKDIR', d, True)):
statement = "grep 'CROSS COMPILE Badness:' %s > /dev/null" % \
@@ -464,4 +465,24 @@ python do_qa_configure() {
if os.system(statement) == 0:
bb.fatal("""This autoconf log indicates errors, it looked at host includes.
Rerun configure task after fixing this. The path was '%s'""" % root)
+
+ if "configure.ac" in files:
+ configs.append(os.path.join(root,"configure.ac"))
+ if "configure.in" in files:
+ configs.append(os.path.join(root, "configure.in"))
+
+ if "gettext" not in bb.data.getVar('P', d, True):
+ if bb.data.inherits_class('native', d) or bb.data.inherits_class('cross', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('nativesdk', d):
+ gt = "gettext-native"
+ elif bb.data.inherits_class('cross-canadian', d):
+ gt = "gettext-nativesdk"
+ else:
+ gt = "gettext"
+ deps = bb.utils.explode_deps(bb.data.getVar('DEPENDS', d, True) or "")
+ if gt not in deps:
+ for config in configs:
+ gnu = "grep \"^[[:space:]]*AM_GNU_GETTEXT\" %s >/dev/null" % config
+ if os.system(gnu) == 0:
+ bb.note("""Gettext required but not in DEPENDS for file %s.
+Missing inherit gettext?""" % config)
}
diff --git a/classes/kernel.bbclass b/classes/kernel.bbclass
index f27b24ebcc..989d3e3a21 100644
--- a/classes/kernel.bbclass
+++ b/classes/kernel.bbclass
@@ -549,6 +549,6 @@ do_deploy() {
}
do_deploy[dirs] = "${S}"
-do_deploy[depends] += "fakeroot-native:do_populate_staging"
+do_deploy[depends] += "fakeroot-native:do_populate_sysroot"
addtask deploy before do_build after do_package
diff --git a/classes/mono.bbclass b/classes/mono.bbclass
index cf8dd15bdf..c3dc87c905 100644
--- a/classes/mono.bbclass
+++ b/classes/mono.bbclass
@@ -152,7 +152,7 @@ do_mono_stage() {
fi
done
}
-addtask mono_stage after do_package before do_populate_staging
+addtask mono_stage after do_package before do_populate_sysroot
def mono_after_parse(d):
import bb
diff --git a/classes/native.bbclass b/classes/native.bbclass
index 6698b61cfc..b53ae1d102 100644
--- a/classes/native.bbclass
+++ b/classes/native.bbclass
@@ -1,3 +1,6 @@
+# We want native packages to be relocatable
+inherit relocatable
+
# Native packages are built indirectly via dependency,
# no need for them to be a direct target of 'world'
EXCLUDE_FROM_WORLD = "1"
@@ -37,6 +40,7 @@ STAGING_BINDIR = "${STAGING_BINDIR_NATIVE}"
STAGING_BINDIR_CROSS = "${STAGING_BINDIR_NATIVE}"
STAGING_DIR_JAVA = "${STAGING_DATADIR_JAVA_NATIVE}"
+DEPENDS_GETTEXT = "gettext-native"
# Don't use site files for native builds
export CONFIG_SITE = ""
diff --git a/classes/package.bbclass b/classes/package.bbclass
index 910d282a01..77a20bf9c5 100644
--- a/classes/package.bbclass
+++ b/classes/package.bbclass
@@ -138,7 +138,7 @@ python () {
if bb.data.getVar('PACKAGES', d, True) != '':
deps = bb.data.getVarFlag('do_package', 'depends', d) or ""
for dep in (bb.data.getVar('PACKAGE_DEPENDS', d, True) or "").split():
- deps += " %s:do_populate_staging" % dep
+ deps += " %s:do_populate_sysroot" % dep
bb.data.setVarFlag('do_package', 'depends', deps, d)
deps = (bb.data.getVarFlag('do_package', 'deptask', d) or "").split()
diff --git a/classes/package_deb.bbclass b/classes/package_deb.bbclass
index 9fb16740d8..837efd8438 100644
--- a/classes/package_deb.bbclass
+++ b/classes/package_deb.bbclass
@@ -254,8 +254,8 @@ python do_package_deb () {
python () {
if bb.data.getVar('PACKAGES', d, True) != '':
deps = (bb.data.getVarFlag('do_package_write_deb', 'depends', d) or "").split()
- deps.append('dpkg-native:do_populate_staging')
- deps.append('fakeroot-native:do_populate_staging')
+ deps.append('dpkg-native:do_populate_sysroot')
+ deps.append('fakeroot-native:do_populate_sysroot')
bb.data.setVarFlag('do_package_write_deb', 'depends', " ".join(deps), d)
}
diff --git a/classes/package_ipk.bbclass b/classes/package_ipk.bbclass
index 031c658762..435d59fb06 100644
--- a/classes/package_ipk.bbclass
+++ b/classes/package_ipk.bbclass
@@ -71,7 +71,7 @@ do_package_update_index_ipk[lockfiles] = "${DEPLOY_DIR_IPK}.lock"
do_package_update_index_ipk[nostamp] = "1"
do_package_update_index_ipk[recrdeptask] += "do_package_write_ipk"
do_package_update_index_ipk[recrdeptask] += "do_package_write_ipk"
-do_package_update_index_ipk[depends] += "ipkg-utils-native:do_populate_staging"
+do_package_update_index_ipk[depends] += "ipkg-utils-native:do_populate_sysroot"
#
# Update the Packages index files in ${DEPLOY_DIR_IPK}
@@ -312,8 +312,8 @@ python do_package_ipk () {
python () {
if bb.data.getVar('PACKAGES', d, True) != '':
deps = (bb.data.getVarFlag('do_package_write_ipk', 'depends', d) or "").split()
- deps.append('ipkg-utils-native:do_populate_staging')
- deps.append('fakeroot-native:do_populate_staging')
+ deps.append('ipkg-utils-native:do_populate_sysroot')
+ deps.append('fakeroot-native:do_populate_sysroot')
bb.data.setVarFlag('do_package_write_ipk', 'depends', " ".join(deps), d)
}
diff --git a/classes/package_rpm.bbclass b/classes/package_rpm.bbclass
index ab09bb24d3..56fa006bb9 100644
--- a/classes/package_rpm.bbclass
+++ b/classes/package_rpm.bbclass
@@ -209,8 +209,8 @@ python () {
not bb.data.inherits_class('native', d) and \
not bb.data.inherits_class('cross', d):
deps = (bb.data.getVarFlag('do_package_write_rpm', 'depends', d) or "").split()
- deps.append('rpm-native:do_populate_staging')
- deps.append('fakeroot-native:do_populate_staging')
+ deps.append('rpm-native:do_populate_sysroot')
+ deps.append('fakeroot-native:do_populate_sysroot')
bb.data.setVarFlag('do_package_write_rpm', 'depends', " ".join(deps), d)
}
diff --git a/classes/package_tar.bbclass b/classes/package_tar.bbclass
index b905e170f4..4ba69c77d4 100644
--- a/classes/package_tar.bbclass
+++ b/classes/package_tar.bbclass
@@ -91,8 +91,8 @@ python do_package_tar () {
python () {
if bb.data.getVar('PACKAGES', d, True) != '':
deps = (bb.data.getVarFlag('do_package_write_tar', 'depends', d) or "").split()
- deps.append('tar-native:do_populate_staging')
- deps.append('fakeroot-native:do_populate_staging')
+ deps.append('tar-native:do_populate_sysroot')
+ deps.append('fakeroot-native:do_populate_sysroot')
bb.data.setVarFlag('do_package_write_tar', 'depends', " ".join(deps), d)
}
diff --git a/classes/packaged-staging.bbclass b/classes/packaged-staging.bbclass
index f3648d2f6a..56f9dc0dcc 100644
--- a/classes/packaged-staging.bbclass
+++ b/classes/packaged-staging.bbclass
@@ -10,14 +10,16 @@
#
# bitbake.conf set PSTAGING_ACTIVE = "0", this class sets to "1" if we're active
-#
+#
PSTAGE_PKGVERSION = "${PV}-${PR}"
PSTAGE_PKGARCH = "${BUILD_SYS}"
PSTAGE_EXTRAPATH ?= "/${OELAYOUT_ABI}/${DISTRO_PR}/"
PSTAGE_PKGPATH = "${DISTRO}${PSTAGE_EXTRAPATH}"
PSTAGE_PKGPN = "${@bb.data.expand('staging-${PN}-${MULTIMACH_ARCH}${TARGET_VENDOR}-${TARGET_OS}', d).replace('_', '-')}"
PSTAGE_PKGNAME = "${PSTAGE_PKGPN}_${PSTAGE_PKGVERSION}_${PSTAGE_PKGARCH}.ipk"
-PSTAGE_PKG ?= "${DEPLOY_DIR_PSTAGE}/${PSTAGE_PKGPATH}/${PSTAGE_PKGNAME}"
+PSTAGE_PKG = "${PSTAGE_DIR}/${PSTAGE_PKGPATH}/${PSTAGE_PKGNAME}"
+PSTAGE_WORKDIR = "${TMPDIR}/pstage"
+PSTAGE_SCAN_CMD ?= "find ${PSTAGE_TMPDIR_STAGE} \( -name "*.la" -o -name "*-config"\) -type f"
PSTAGE_NATIVEDEPENDS = "\
shasum-native \
@@ -41,10 +43,12 @@ python () {
# These classes encode staging paths into the binary data so can only be
# reused if the path doesn't change/
- if bb.data.inherits_class('native', d) or bb.data.inherits_class('cross', d) or bb.data.inherits_class('sdk', d):
+ if bb.data.inherits_class('native', d) or bb.data.inherits_class('cross', d) or bb.data.inherits_class('sdk', d) or bb.data.inherits_class('crosssdk', d):
path = bb.data.getVar('PSTAGE_PKGPATH', d, 1)
path = path + bb.data.getVar('TMPDIR', d, 1).replace('/', '-')
bb.data.setVar('PSTAGE_PKGPATH', path, d)
+ scan_cmd = "grep -Irl ${STAGING_DIR} ${PSTAGE_TMDPDIR_STAGE}"
+ bb.data.setVar('PSTAGE_SCAN_CMD', scan_cmd, d)
# PSTAGE_NATIVEDEPENDS lists the packages we need before we can use packaged
# staging. There will always be some packages we depend on.
@@ -65,7 +69,7 @@ python () {
# as inactive.
if pstage_allowed:
deps = bb.data.getVarFlag('do_setscene', 'depends', d) or ""
- deps += " stagemanager-native:do_populate_staging"
+ deps += " stagemanager-native:do_populate_sysroot"
bb.data.setVarFlag('do_setscene', 'depends', deps, d)
policy = bb.data.getVar("BB_STAMP_POLICY", d, True)
@@ -79,8 +83,7 @@ python () {
bb.data.setVar("PSTAGING_ACTIVE", "0", d)
}
-DEPLOY_DIR_PSTAGE ?= "${DEPLOY_DIR}/pstage"
-PSTAGE_MACHCONFIG = "${DEPLOY_DIR_PSTAGE}/opkg.conf"
+PSTAGE_MACHCONFIG = "${PSTAGE_WORKDIR}/opkg.conf"
PSTAGE_PKGMANAGER = "stage-manager-ipkg"
@@ -99,6 +102,9 @@ def pstage_manualclean(srcname, destvarname, d):
for walkroot, dirs, files in os.walk(src):
bb.debug("rm %s" % walkroot)
for file in files:
+ # Avoid breaking the held lock
+ if (file == "staging.lock"):
+ continue
filepath = os.path.join(walkroot, file).replace(src, dest)
_package_unlink(filepath)
@@ -127,7 +133,7 @@ def pstage_cleanpackage(pkgname, d):
bb.note("Failure removing staging package")
else:
bb.debug(1, "Manually removing any installed files from staging...")
- pstage_manualclean("staging", "STAGING_DIR", d)
+ pstage_manualclean("sysroots", "STAGING_DIR", d)
pstage_manualclean("cross", "CROSS_DIR", d)
pstage_manualclean("deploy", "DEPLOY_DIR", d)
@@ -143,13 +149,15 @@ do_clean_prepend() {
stagepkg = bb.data.expand("${PSTAGE_PKG}", d)
bb.note("Removing staging package %s" % base_path_out(stagepkg, d))
- os.system('rm -rf ' + stagepkg)
+ # Add a wildcard to the end of stagepkg to also get its md5
+ # if it's a fetched package
+ os.system('rm -rf ' + stagepkg + '*')
}
staging_helper () {
# Assemble appropriate opkg.conf
conffile=${PSTAGE_MACHCONFIG}
- mkdir -p ${DEPLOY_DIR_PSTAGE}/pstaging_lists
+ mkdir -p ${PSTAGE_WORKDIR}/pstaging_lists
if [ ! -e $conffile ]; then
ipkgarchs="${BUILD_SYS}"
priority=1
@@ -167,7 +175,28 @@ staging_helper () {
fi
}
-PSTAGE_TASKS_COVERED = "fetch unpack munge patch configure qa_configure rig_locales compile sizecheck install deploy package populate_staging package_write_deb package_write_ipk package_write package_stage qa_staging"
+def staging_fetch(stagepkg, d):
+ import bb.fetch
+
+ # only try and fetch if the user has configured a mirror
+ if bb.data.getVar('PSTAGE_MIRROR', d) != "":
+ # Copy the data object and override DL_DIR and SRC_URI
+ pd = d.createCopy()
+ dldir = bb.data.expand("${PSTAGE_DIR}/${PSTAGE_PKGPATH}", pd)
+ mirror = bb.data.expand("${PSTAGE_MIRROR}/${PSTAGE_PKGPATH}/", pd)
+ srcuri = mirror + os.path.basename(stagepkg)
+ bb.data.setVar('DL_DIR', dldir, pd)
+ bb.data.setVar('SRC_URI', srcuri, pd)
+
+ # Try a fetch from the pstage mirror, if it fails just return and
+ # we will build the package
+ try:
+ bb.fetch.init([srcuri], pd)
+ bb.fetch.go(pd, [srcuri])
+ except:
+ return
+
+PSTAGE_TASKS_COVERED = "fetch unpack munge patch configure qa_configure rig_locales compile sizecheck install deploy package populate_sysroot package_write_deb package_write_ipk package_write package_stage qa_staging"
SCENEFUNCS += "packagestage_scenefunc"
@@ -183,6 +212,8 @@ python packagestage_scenefunc () {
pstage_cleanpackage(removepkg, d)
stagepkg = bb.data.expand("${PSTAGE_PKG}", d)
+ if not os.path.exists(stagepkg):
+ staging_fetch(stagepkg, d)
if os.path.exists(stagepkg):
path = bb.data.getVar("PATH", d, 1)
@@ -246,11 +277,14 @@ python packagestage_scenefunc () {
if stageok:
bb.note("Staging package found, using it for %s." % file)
installcmd = bb.data.getVar("PSTAGE_INSTALL_CMD", d, 1)
+ lf = bb.utils.lockfile(bb.data.expand("${SYSROOT_LOCK}", d))
ret = os.system("PATH=\"%s\" %s %s" % (path, installcmd, stagepkg))
bb.utils.unlockfile(lf)
if ret != 0:
bb.note("Failure installing prestage package")
+ bb.build.exec_func("staging_package_libtoolhack", d)
+
bb.build.make_stamp("do_stage_package_populated", d)
else:
bb.note("Staging package found but invalid for %s" % file)
@@ -277,26 +311,26 @@ python packagedstage_stampfixing_eventhandler() {
_package_unlink(stamp)
}
-populate_staging_preamble () {
+populate_sysroot_preamble () {
if [ "$PSTAGING_ACTIVE" = "1" ]; then
- stage-manager -p ${STAGING_DIR} -c ${DEPLOY_DIR_PSTAGE}/stamp-cache-staging -u || true
- stage-manager -p ${CROSS_DIR} -c ${DEPLOY_DIR_PSTAGE}/stamp-cache-cross -u || true
+ stage-manager -p ${STAGING_DIR} -c ${PSTAGE_WORKDIR}/stamp-cache-staging -u || true
+ stage-manager -p ${CROSS_DIR} -c ${PSTAGE_WORKDIR}/stamp-cache-cross -u || true
fi
}
-populate_staging_postamble () {
+populate_sysroot_postamble () {
if [ "$PSTAGING_ACTIVE" = "1" ]; then
# list the packages currently installed in staging
- # ${PSTAGE_LIST_CMD} | awk '{print $1}' > ${DEPLOY_DIR_PSTAGE}/installed-list
+ # ${PSTAGE_LIST_CMD} | awk '{print $1}' > ${PSTAGE_WORKDIR}/installed-list
# exitcode == 5 is ok, it means the files change
set +e
- stage-manager -p ${STAGING_DIR} -c ${DEPLOY_DIR_PSTAGE}/stamp-cache-staging -u -d ${PSTAGE_TMPDIR_STAGE}/staging
+ stage-manager -p ${STAGING_DIR} -c ${PSTAGE_WORKDIR}/stamp-cache-staging -u -d ${PSTAGE_TMPDIR_STAGE}/sysroots
exitcode=$?
if [ "$exitcode" != "5" -a "$exitcode" != "0" ]; then
exit $exitcode
fi
- stage-manager -p ${CROSS_DIR} -c ${DEPLOY_DIR_PSTAGE}/stamp-cache-cross -u -d ${PSTAGE_TMPDIR_STAGE}/cross/${BASE_PACKAGE_ARCH}
+ stage-manager -p ${CROSS_DIR} -c ${PSTAGE_WORKDIR}/stamp-cache-cross -u -d ${PSTAGE_TMPDIR_STAGE}/cross/${BASE_PACKAGE_ARCH}
if [ "$exitcode" != "5" -a "$exitcode" != "0" ]; then
exit $exitcode
fi
@@ -306,27 +340,27 @@ populate_staging_postamble () {
packagedstaging_fastpath () {
if [ "$PSTAGING_ACTIVE" = "1" ]; then
- mkdir -p ${PSTAGE_TMPDIR_STAGE}/staging/
- mkdir -p ${PSTAGE_TMPDIR_STAGE}/cross/
- cp -fpPR ${SYSROOT_DESTDIR}${STAGING_DIR}/* ${PSTAGE_TMPDIR_STAGE}/staging/ || /bin/true
- cp -fpPR ${SYSROOT_DESTDIR}${CROSS_DIR}/* ${PSTAGE_TMPDIR_STAGE}/cross/${BASE_PACKAGE_ARCH}/ || /bin/true
+ mkdir -p ${PSTAGE_TMPDIR_STAGE}/sysroots/
+ mkdir -p ${PSTAGE_TMPDIR_STAGE}/cross/${BASE_PACKAGE_ARCH}/
+ cp -fpPR ${SYSROOT_DESTDIR}/${STAGING_DIR}/* ${PSTAGE_TMPDIR_STAGE}/sysroots/ || /bin/true
+ cp -fpPR ${SYSROOT_DESTDIR}/${CROSS_DIR}/* ${PSTAGE_TMPDIR_STAGE}/cross/${BASE_PACKAGE_ARCH}/ || /bin/true
fi
}
-do_populate_staging[dirs] =+ "${DEPLOY_DIR_PSTAGE}"
-python populate_staging_prehook() {
- bb.build.exec_func("populate_staging_preamble", d)
+do_populate_sysroot[dirs] =+ "${PSTAGE_DIR}"
+python populate_sysroot_prehook() {
+ bb.build.exec_func("populate_sysroot_preamble", d)
}
-python populate_staging_posthook() {
- bb.build.exec_func("populate_staging_postamble", d)
+python populate_sysroot_posthook() {
+ bb.build.exec_func("populate_sysroot_postamble", d)
}
staging_packager () {
mkdir -p ${PSTAGE_TMPDIR_STAGE}/CONTROL
- mkdir -p ${DEPLOY_DIR_PSTAGE}/${PSTAGE_PKGPATH}
+ mkdir -p ${PSTAGE_DIR}/${PSTAGE_PKGPATH}
echo "Package: ${PSTAGE_PKGPN}" > ${PSTAGE_TMPDIR_STAGE}/CONTROL/control
echo "Version: ${PSTAGE_PKGVERSION}" >> ${PSTAGE_TMPDIR_STAGE}/CONTROL/control
@@ -337,13 +371,22 @@ staging_packager () {
echo "Architecture: ${PSTAGE_PKGARCH}" >> ${PSTAGE_TMPDIR_STAGE}/CONTROL/control
# Protect against empty SRC_URI
- if [ "${SRC_URI}" != "" ] ; then
- echo "Source: ${SRC_URI}" >> ${PSTAGE_TMPDIR_STAGE}/CONTROL/control
- else
- echo "Source: OpenEmbedded" >> ${PSTAGE_TMPDIR_STAGE}/CONTROL/control
+ srcuri="${SRC_URI}"
+ if [ "$srcuri" == "" ]; then
+ srcuri="OpenEmbedded"
fi
+ echo "Source: ${SRC_URI}" >> ${PSTAGE_TMPDIR_STAGE}/CONTROL/control
+
+ # Deal with libtool not supporting sysroots
+ # Need to remove hardcoded paths and fix these when we install the
+ # staging packages.
+ # Could someone please add sysroot support to libtool!
+ for i in `${PSTAGE_SCAN_CMD}` ; do \
+ sed -i -e s:${STAGING_DIR}:FIXMESTAGINGDIR:g $i
+ echo $i | sed -e 's:${PSTAGE_TMPDIR_STAGE}/::' >> ${PSTAGE_TMPDIR_STAGE}/sysroots/fixmepath
+ done
- ${PSTAGE_BUILD_CMD} ${PSTAGE_TMPDIR_STAGE} ${DEPLOY_DIR_PSTAGE}/${PSTAGE_PKGPATH}
+ ${PSTAGE_BUILD_CMD} ${PSTAGE_TMPDIR_STAGE} ${PSTAGE_DIR}/${PSTAGE_PKGPATH}
}
staging_package_installer () {
@@ -365,6 +408,23 @@ staging_package_installer () {
find -type f | grep -v ./CONTROL | sed -e 's/^\.//' > ${TMPDIR}${libdir_native}/opkg/info/${PSTAGE_PKGPN}.list
}
+python staging_package_libtoolhack () {
+ # Deal with libtool not supporting sysroots and add our new
+ # staging location
+ tmpdir = bb.data.getVar('TMPDIR', d, True)
+ staging = bb.data.getVar('STAGING_DIR', d, True)
+ fixmefn = staging + "/fixmepath"
+ try:
+ fixmefd = open(fixmefn,"r")
+ fixmefiles = fixmefd.readlines()
+ fixmefd.close()
+ os.system('rm -f ' + fixmefn)
+ for file in fixmefiles:
+ os.system("sed -i -e s:FIXMESTAGINGDIR:%s:g %s" % (staging, tmpdir + '/' + file))
+ except IOError:
+ pass
+}
+
python do_package_stage () {
if bb.data.getVar("PSTAGING_ACTIVE", d, 1) != "1":
return
@@ -447,9 +507,9 @@ python do_package_stage () {
}
#
-# Note an assumption here is that do_deploy runs before do_package_write/do_populate_staging
+# Note an assumption here is that do_deploy runs before do_package_write/do_populate_sysroot
#
-addtask package_stage after do_package_write do_populate_staging before do_build
+addtask package_stage after do_package_write do_populate_sysroot before do_build
do_package_stage_all () {
:
diff --git a/classes/patch.bbclass b/classes/patch.bbclass
index cc3d8dd5da..1af374b39b 100644
--- a/classes/patch.bbclass
+++ b/classes/patch.bbclass
@@ -3,7 +3,7 @@
# Point to an empty file so any user's custom settings don't break things
QUILTRCFILE ?= "${STAGING_BINDIR_NATIVE}/quiltrc"
-PATCHDEPENDENCY = "${PATCHTOOL}-native:do_populate_staging"
+PATCHDEPENDENCY = "${PATCHTOOL}-native:do_populate_sysroot"
python patch_do_patch() {
import oe.patch
diff --git a/classes/relocatable.bbclass b/classes/relocatable.bbclass
new file mode 100644
index 0000000000..eb5b9e62ed
--- /dev/null
+++ b/classes/relocatable.bbclass
@@ -0,0 +1,93 @@
+SYSROOT_PREPROCESS_FUNCS += "relocatable_binaries_preprocess"
+
+CHRPATH_BIN ?= "chrpath"
+PREPROCESS_RELOCATE_DIRS ?= ""
+
+def process_dir (directory, d):
+ import subprocess as sub
+ import stat
+
+ cmd = bb.data.expand('${CHRPATH_BIN}', d)
+ tmpdir = bb.data.getVar('TMPDIR', d)
+ basedir = bb.data.expand('${base_prefix}', d)
+
+ #bb.debug("Checking %s for binaries to process" % directory)
+ if not os.path.exists(directory):
+ return
+
+ dirs = os.listdir(directory)
+ for file in dirs:
+ fpath = directory + "/" + file
+ if os.path.islink(fpath):
+ # Skip symlinks
+ continue
+
+ if os.path.isdir(fpath):
+ process_dir(fpath, d)
+ else:
+ #bb.note("Testing %s for relocatability" % fpath)
+
+ # We need read and write permissions for chrpath, if we don't have
+ # them then set them temporarily. Take a copy of the files
+ # permissions so that we can restore them afterwards.
+ perms = os.stat(fpath)[stat.ST_MODE]
+ if os.access(fpath, os.W_OK|os.R_OK):
+ perms = None
+ else:
+ # Temporarily make the file writeable so we can chrpath it
+ os.chmod(fpath, perms|stat.S_IRWXU)
+
+ p = sub.Popen([cmd, '-l', fpath],stdout=sub.PIPE,stderr=sub.PIPE)
+ err, out = p.communicate()
+ # If returned succesfully, process stderr for results
+ if p.returncode != 0:
+ continue
+
+ # Throw away everything other than the rpath list
+ curr_rpath = err.partition("RPATH=")[2]
+ #bb.note("Current rpath for %s is %s" % (fpath, curr_rpath.strip()))
+ rpaths = curr_rpath.split(":")
+ new_rpaths = []
+ for rpath in rpaths:
+ # If rpath is already dynamic continue
+ if rpath.find("$ORIGIN") != -1:
+ continue
+ # If the rpath shares a root with base_prefix determine a new dynamic rpath from the
+ # base_prefix shared root
+ if rpath.find(basedir) != -1:
+ depth = fpath.partition(basedir)[2].count('/')
+ libpath = rpath.partition(basedir)[2].strip()
+ # otherwise (i.e. cross packages) determine a shared root based on the TMPDIR
+ # NOTE: This will not work reliably for cross packages, particularly in the case
+ # where your TMPDIR is a short path (i.e. /usr/poky) as chrpath cannot insert an
+ # rpath longer than that which is already set.
+ else:
+ depth = fpath.rpartition(tmpdir)[2].count('/')
+ libpath = rpath.partition(tmpdir)[2].strip()
+
+ base = "$ORIGIN"
+ while depth > 1:
+ base += "/.."
+ depth-=1
+ new_rpaths.append("%s%s" % (base, libpath))
+
+ # if we have modified some rpaths call chrpath to update the binary
+ if len(new_rpaths):
+ args = ":".join(new_rpaths)
+ #bb.note("Setting rpath for %s to %s" %(fpath,args))
+ sub.call([cmd, '-r', args, fpath])
+
+ if perms:
+ os.chmod(fpath, perms)
+
+def rpath_replace (path, d):
+ bindirs = bb.data.expand("${bindir} ${sbindir} ${base_sbindir} ${base_bindir} ${libdir} ${base_libdir} ${PREPROCESS_RELOCATE_DIRS}", d).split()
+
+ for bindir in bindirs:
+ #bb.note ("Processing directory " + bindir)
+ directory = path + "/" + bindir
+ process_dir (directory, d)
+
+python relocatable_binaries_preprocess() {
+ rpath_replace(bb.data.expand('${SYSROOT_DESTDIR}', d), d)
+}
diff --git a/classes/rootfs_deb.bbclass b/classes/rootfs_deb.bbclass
index dc2b2cb02c..e077127144 100644
--- a/classes/rootfs_deb.bbclass
+++ b/classes/rootfs_deb.bbclass
@@ -4,7 +4,7 @@
ROOTFS_PKGMANAGE = "run-postinsts dpkg"
ROOTFS_PKGMANAGE_BOOTSTRAP = "run-postinsts"
-do_rootfs[depends] += "dpkg-native:do_populate_staging apt-native:do_populate_staging"
+do_rootfs[depends] += "dpkg-native:do_populate_sysroot apt-native:do_populate_sysroot"
do_rootfs[recrdeptask] += "do_package_write_deb"
fakeroot rootfs_deb_do_rootfs () {
diff --git a/classes/rootfs_ipk.bbclass b/classes/rootfs_ipk.bbclass
index 5483c7e077..9fa1d5ec6c 100644
--- a/classes/rootfs_ipk.bbclass
+++ b/classes/rootfs_ipk.bbclass
@@ -5,7 +5,7 @@
# See image.bbclass for a usage of this.
#
-do_rootfs[depends] += "opkg-native:do_populate_staging"
+do_rootfs[depends] += "opkg-native:do_populate_sysroot"
IPKG_TMP_DIR = "${IMAGE_ROOTFS}-tmp"
IPKG_ARGS = "-f ${IPKGCONF_TARGET} -o ${IMAGE_ROOTFS} -t ${IPKG_TMP_DIR} ${@base_conditional("PACKAGE_INSTALL_NO_DEPS", "1", "-nodeps", "", d)}"
diff --git a/classes/rootfs_rpm.bbclass b/classes/rootfs_rpm.bbclass
index e3e306450e..c018a03e2f 100644
--- a/classes/rootfs_rpm.bbclass
+++ b/classes/rootfs_rpm.bbclass
@@ -6,7 +6,7 @@ ROOTFS_PKGMANAGE = "rpm yum"
ROOTFS_PKGMANAGE_BOOTSTRAP = "run-postinsts"
-do_rootfs[depends] += "rpm-native:do_populate_staging yum-native:do_populate_staging createrepo-native:do_populate_staging fakechroot-native:do_populate_staging"
+do_rootfs[depends] += "rpm-native:do_populate_sysroot yum-native:do_populate_sysroot createrepo-native:do_populate_sysroot fakechroot-native:do_populate_sysroot"
do_rootfs[recrdeptask] += "do_package_write_rpm"
YUMCONF = "${IMAGE_ROOTFS}/etc/yum.conf"
@@ -203,7 +203,7 @@ python () {
flags = bb.data.getVarFlag('do_rootfs', 'recrdeptask', d)
flags = flags.replace("do_package_write_rpm", "")
flags = flags.replace("do_deploy", "")
- flags = flags.replace("do_populate_staging", "")
+ flags = flags.replace("do_populate_sysroot", "")
bb.data.setVarFlag('do_rootfs', 'recrdeptask', flags, d)
bb.data.setVar('RPM_PREPROCESS_COMMANDS', "rpm_insert_feed_uris", d)
bb.data.setVar('RPM_POSTPROCESS_COMMANDS', '', d)
diff --git a/classes/sanity.bbclass b/classes/sanity.bbclass
index 4051d49557..0a1e8683cf 100644
--- a/classes/sanity.bbclass
+++ b/classes/sanity.bbclass
@@ -83,7 +83,7 @@ def check_sanity(e):
if not check_app_exists('${BUILD_PREFIX}g++', e.data):
missing = missing + "C++ Compiler (${BUILD_PREFIX}g++),"
- required_utilities = "patch help2man diffstat texi2html makeinfo cvs svn bzip2 tar gzip gawk md5sum"
+ required_utilities = "patch help2man diffstat texi2html makeinfo cvs svn bzip2 tar gzip gawk md5sum chrpath"
# If we'll be running qemu, perform some sanity checks
if data.getVar('ENABLE_BINARY_LOCALE_GENERATION', e.data, True):
@@ -141,6 +141,13 @@ def check_sanity(e):
if not abi.isdigit():
f = file(abifile, "w")
f.write(current_abi)
+ elif abi == "3" and current_abi == "4":
+ bb.note("Converting staging from layout version 2 to layout version 3")
+ os.system(bb.data.expand("mv ${TMPDIR}/staging ${TMPDIR}/sysroots", e.data))
+ os.system(bb.data.expand("ln -s sysroots ${TMPDIR}/staging", e.data))
+ os.system(bb.data.expand("cd ${TMPDIR}/stamps; for i in */*do_populate_staging; do new=`echo $i | sed -e 's/do_populate_staging/do_populate_sysroot/'`; mv $i $new; done", e.data))
+ f = file(abifile, "w")
+ f.write(current_abi)
elif (abi != current_abi):
# Code to convert from one ABI to another could go here if possible.
messages = messages + "Error, TMPDIR has changed ABI (%s to %s) and you need to either rebuild, revert or adjust it at your own risk.\n" % (abi, current_abi)
diff --git a/classes/sip.bbclass b/classes/sip.bbclass
index 30c08b1ed9..6bc248aeb3 100644
--- a/classes/sip.bbclass
+++ b/classes/sip.bbclass
@@ -55,7 +55,7 @@ sip_do_generate() {
done
}
-do_generate[deptask] = "do_populate_staging"
+do_generate[deptask] = "do_populate_sysroot"
EXPORT_FUNCTIONS do_generate
diff --git a/classes/sip3.bbclass b/classes/sip3.bbclass
index 7ecc63bf02..c2c4725377 100644
--- a/classes/sip3.bbclass
+++ b/classes/sip3.bbclass
@@ -53,7 +53,7 @@ sip3_do_generate() {
done
}
-do_generate[deptask] = "do_populate_staging"
+do_generate[deptask] = "do_populate_sysroot"
EXPORT_FUNCTIONS do_generate
diff --git a/classes/srctree.bbclass b/classes/srctree.bbclass
index dbf8ad2c3f..7232c26b12 100644
--- a/classes/srctree.bbclass
+++ b/classes/srctree.bbclass
@@ -26,8 +26,8 @@ def merge_tasks(d):
"""
merge_tasks performs two operations:
1) removes do_patch and its deps from the build entirely.
- 2) merges all of the operations that occur prior to do_populate_staging
- into do_populate_staging.
+ 2) merges all of the operations that occur prior to do_populate_sysroot
+ into do_populate_sysroot.
This is necessary, because of recipe variants (normal, native, cross,
sdk). If a bitbake run happens to want to build more than one of
@@ -50,7 +50,7 @@ def merge_tasks(d):
__gather_taskdeps(task, items)
return items
- newtask = "do_populate_staging"
+ newtask = "do_populate_sysroot"
mergedtasks = gather_taskdeps(newtask)
mergedtasks.pop()
deltasks = gather_taskdeps("do_patch")
@@ -86,14 +86,14 @@ def merge_tasks(d):
depends = (d.getVarFlag(task, "depends") or ""
for task in mergedtasks[:-1]
if not task in deltasks)
- d.setVarFlag("do_populate_staging", "depends", " ".join(depends))
+ d.setVarFlag("do_populate_sysroot", "depends", " ".join(depends))
python () {
merge_tasks(d)
}
# Manually run do_install & all of its deps, then do_stage
-python do_populate_staging () {
+python do_populate_sysroot () {
from os.path import exists
from bb.build import exec_task, exec_func
from bb import note
@@ -112,4 +112,4 @@ python do_populate_staging () {
rec_exec_task("do_install", set())
exec_func("do_stage", d)
}
-do_populate_staging[lockfiles] += "${S}/.lock"
+do_populate_sysroot[lockfiles] += "${S}/.lock"
diff --git a/classes/staging.bbclass b/classes/staging.bbclass
index 32038a5076..e80644961c 100644
--- a/classes/staging.bbclass
+++ b/classes/staging.bbclass
@@ -1,9 +1,9 @@
-python populate_staging_prehook () {
- return
+python populate_sysroot_prehook () {
+ return
}
-python populate_staging_posthook () {
- return
+python populate_sysroot_posthook () {
+ return
}
packagedstaging_fastpath () {
@@ -38,6 +38,7 @@ sysroot_stage_dir() {
fi
}
+SYSROOTEXTRALIBDIRSED ?= ""
sysroot_stage_libdir() {
src="$1"
dest="$2"
@@ -52,6 +53,7 @@ sysroot_stage_libdir() {
sed -e 's/^installed=yes$/installed=no/' \
-e '/^dependency_libs=/s,${WORKDIR}[[:alnum:]/\._+-]*/\([[:alnum:]\._+-]*\),${STAGING_LIBDIR}/\1,g' \
-e "/^dependency_libs=/s,\([[:space:]']\)${libdir},\1${STAGING_LIBDIR},g" \
+ ${SYSROOTEXTRALIBDIRSED} \
-i $src/$i
done
sysroot_stage_dir $src $dest
@@ -68,6 +70,7 @@ sysroot_stage_dirs() {
sysroot_stage_dir $from${base_bindir} $to${STAGING_DIR_HOST}${base_bindir}
sysroot_stage_dir $from${base_sbindir} $to${STAGING_DIR_HOST}${base_sbindir}
sysroot_stage_dir $from${libexecdir} $to${STAGING_DIR_HOST}${libexecdir}
+ sysroot_stage_dir $from${sysconfdir} $to${STAGING_DIR_HOST}${sysconfdir}
if [ "${prefix}/lib" != "${libdir}" ]; then
# python puts its files in here, make sure they are staged as well
autotools_stage_dir $from/${prefix}/lib $to${STAGING_DIR_HOST}${prefix}/lib
@@ -94,6 +97,8 @@ def is_legacy_staging(d):
legacy = True
if stagefunc is None:
legacy = False
+ elif stagefunc.strip() == "use_do_install_for_stage":
+ legacy = False
elif stagefunc.strip() == "autotools_stage_all":
legacy = False
elif stagefunc.strip() == "do_stage_native" and bb.data.getVar('AUTOTOOLS_NATIVE_STAGE_INSTALL', d, 1) == "1":
@@ -106,21 +111,22 @@ def is_legacy_staging(d):
legacy = True
return legacy
-do_populate_staging[dirs] = "${STAGING_DIR_TARGET}/${bindir} ${STAGING_DIR_TARGET}/${libdir} \
+do_populate_sysroot[dirs] = "${STAGING_DIR_TARGET}/${bindir} ${STAGING_DIR_TARGET}/${libdir} \
${STAGING_DIR_TARGET}/${includedir} \
${STAGING_BINDIR_NATIVE} ${STAGING_LIBDIR_NATIVE} \
${STAGING_INCDIR_NATIVE} \
${STAGING_DATADIR} \
${S} ${B}"
-# Could be compile but populate_staging and do_install shouldn't run at the same time
-addtask populate_staging after do_install before do_build
+# Could be compile but populate_sysroot and do_install shouldn't run at the same time
+addtask populate_sysroot after do_install
SYSROOT_PREPROCESS_FUNCS ?= ""
SYSROOT_DESTDIR = "${WORKDIR}/sysroot-destdir/"
SYSROOT_LOCK = "${STAGING_DIR}/staging.lock"
-python do_populate_staging () {
+
+python do_populate_sysroot () {
#
# if do_stage exists, we're legacy. In that case run the do_stage,
# modify the SYSROOT_DESTDIR variable and then run the staging preprocess
@@ -147,11 +153,11 @@ python do_populate_staging () {
if bb.data.getVarFlags('do_stage', d) is None:
bb.fatal("This recipe (%s) has a do_stage_prepend or do_stage_append and do_stage now doesn't exist. Please rename this to do_stage()" % bb.data.getVar("FILE", d, True))
lock = bb.utils.lockfile(lockfile)
- bb.build.exec_func('populate_staging_prehook', d)
+ bb.build.exec_func('populate_sysroot_prehook', d)
bb.build.exec_func('do_stage', d)
for f in (bb.data.getVar('SYSROOT_PREPROCESS_FUNCS', d, True) or '').split():
bb.build.exec_func(f, d)
- bb.build.exec_func('populate_staging_posthook', d)
+ bb.build.exec_func('populate_sysroot_posthook', d)
bb.utils.unlockfile(lock)
else:
dest = bb.data.getVar('D', d, True)