summaryrefslogtreecommitdiff
path: root/classes
diff options
context:
space:
mode:
authorJohn Lee <john_lee@openmoko.org>2008-02-27 07:50:52 +0000
committerOpenmoko anonymizer key <devel@lists.openmoko.org>2008-02-27 07:50:52 +0000
commitd6f3635f2e2baa680d878461fc8fa9b68c51162b (patch)
treea07627fc6746f21a02a543381681f1cb1320b38d /classes
parentb14906e8fa695a85b9ddf27838c959be1c8a5fad (diff)
parent9d22cb8106117d2d923bf7b2c263bf9e31b5305f (diff)
merge of '8dccbf106093aaa7b62b2a96962945185534c923'
and 'c50825ba187c292f591cf43c91216cebffb5e290'
Diffstat (limited to 'classes')
-rw-r--r--classes/base.bbclass90
-rw-r--r--classes/bootimg.bbclass14
-rw-r--r--classes/debian.bbclass8
-rw-r--r--classes/efl_base.bbclass2
-rw-r--r--classes/image.bbclass14
-rw-r--r--classes/java.bbclass60
-rw-r--r--classes/kernel-arch.bbclass2
-rw-r--r--classes/kernel.bbclass27
-rw-r--r--classes/native.bbclass6
-rw-r--r--classes/nylon-mirrors.bbclass8
-rw-r--r--classes/oplinux-mirrors.bbclass16
-rw-r--r--classes/package.bbclass144
-rw-r--r--classes/package_deb.bbclass34
-rw-r--r--classes/package_ipk.bbclass29
-rw-r--r--classes/package_rpm.bbclass5
-rw-r--r--classes/package_tar.bbclass10
-rw-r--r--classes/packaged-staging.bbclass389
-rw-r--r--classes/packaged-staging2.bbclass229
-rw-r--r--classes/patch.bbclass2
-rw-r--r--classes/qpf.bbclass32
-rw-r--r--classes/rm_work.bbclass20
-rw-r--r--classes/sanity.bbclass66
-rw-r--r--classes/sdl.bbclass2
-rw-r--r--classes/seppuku.bbclass2
-rw-r--r--classes/tinderclient.bbclass2
25 files changed, 543 insertions, 670 deletions
diff --git a/classes/base.bbclass b/classes/base.bbclass
index d48d2a5726..089cd5acd3 100644
--- a/classes/base.bbclass
+++ b/classes/base.bbclass
@@ -85,7 +85,7 @@ def base_chk_file(parser, pn, pv, src_uri, localpath, data):
def base_dep_prepend(d):
- import bb;
+ import bb
#
# Ideally this will check a flag so we will operate properly in
# the case where host == build == target, for now we don't work in
@@ -128,6 +128,14 @@ def base_less_or_equal(variable, checkvalue, truevalue, falsevalue, d):
else:
return falsevalue
+def base_version_less_or_equal(variable, checkvalue, truevalue, falsevalue, d):
+ import bb
+ result = bb.vercmp(bb.data.getVar(variable,d,True), checkvalue)
+ if result <= 0:
+ return truevalue
+ else:
+ return falsevalue
+
def base_contains(variable, checkvalues, truevalue, falsevalue, d):
import bb
matches = 0
@@ -404,7 +412,6 @@ python do_listtasks() {
addtask clean
do_clean[dirs] = "${TOPDIR}"
do_clean[nostamp] = "1"
-do_clean[bbdepcmd] = ""
python base_do_clean() {
"""clear the build and temp directories"""
dir = bb.data.expand("${WORKDIR}", d)
@@ -417,27 +424,33 @@ python base_do_clean() {
os.system('rm -f '+ dir)
}
+#Uncomment this for bitbake 1.8.12
+#addtask rebuild after do_${BB_DEFAULT_TASK}
addtask rebuild
do_rebuild[dirs] = "${TOPDIR}"
do_rebuild[nostamp] = "1"
-do_rebuild[bbdepcmd] = ""
python base_do_rebuild() {
"""rebuild a package"""
- bb.build.exec_task('do_clean', d)
- bb.build.exec_task('do_' + bb.data.getVar('BB_DEFAULT_TASK', d, 1), d)
+ from bb import __version__
+ try:
+ from distutils.version import LooseVersion
+ except ImportError:
+ def LooseVersion(v): print "WARNING: sanity.bbclass can't compare versions without python-distutils"; return 1
+ if (LooseVersion(__version__) < LooseVersion('1.8.11')):
+ bb.build.exec_func('do_clean', d)
+ bb.build.exec_task('do_' + bb.data.getVar('BB_DEFAULT_TASK', d, 1), d)
}
addtask mrproper
do_mrproper[dirs] = "${TOPDIR}"
do_mrproper[nostamp] = "1"
-do_mrproper[bbdepcmd] = ""
python base_do_mrproper() {
"""clear downloaded sources, build and temp directories"""
dir = bb.data.expand("${DL_DIR}", d)
if dir == '/': bb.build.FuncFailed("wrong DATADIR")
bb.debug(2, "removing " + dir)
os.system('rm -rf ' + dir)
- bb.build.exec_task('do_clean', d)
+ bb.build.exec_func('do_clean', d)
}
addtask fetch
@@ -658,6 +671,17 @@ python base_eventhandler() {
if pesteruser:
bb.fatal('The following variable(s) were not set: %s\nPlease set them directly, or choose a MACHINE or DISTRO that sets them.' % ', '.join(pesteruser))
+ #
+ # Handle removing stamps for 'rebuild' task
+ #
+ if name.startswith("StampUpdate"):
+ for (fn, task) in e.targets:
+ #print "%s %s" % (task, fn)
+ if task == "do_rebuild":
+ dir = "%s.*" % e.stampPrefix[fn]
+ bb.note("Removing stamps: " + dir)
+ os.system('rm -f '+ dir)
+
if not data in e.__dict__:
return NotHandled
@@ -672,7 +696,6 @@ python base_eventhandler() {
addtask configure after do_unpack do_patch
do_configure[dirs] = "${S} ${B}"
-do_configure[bbdepcmd] = "do_populate_staging"
do_configure[deptask] = "do_populate_staging"
base_do_configure() {
:
@@ -680,7 +703,6 @@ base_do_configure() {
addtask compile after do_configure
do_compile[dirs] = "${S} ${B}"
-do_compile[bbdepcmd] = "do_populate_staging"
base_do_compile() {
if [ -e Makefile -o -e makefile ]; then
oe_runmake || die "make failed"
@@ -774,8 +796,10 @@ def get_subpkgedata_fn(pkg, d):
import bb, os
archs = bb.data.expand("${PACKAGE_ARCHS}", d).split(" ")
archs.reverse()
+ pkgdata = bb.data.expand('${STAGING_DIR}/pkgdata/', d)
+ targetdir = bb.data.expand('${TARGET_VENDOR}-${TARGET_OS}/runtime/', d)
for arch in archs:
- fn = bb.data.expand('${STAGING_DIR}/pkgdata/' + arch + '${TARGET_VENDOR}-${TARGET_OS}/runtime/%s' % pkg, d)
+ fn = pkgdata + arch + targetdir + pkg
if os.path.exists(fn):
return fn
return bb.data.expand('${PKGDATA_DIR}/runtime/%s' % pkg, d)
@@ -811,6 +835,20 @@ python read_subpackage_metadata () {
bb.data.setVar(key, sdata[key], d)
}
+# Make sure MACHINE isn't exported
+# (breaks binutils at least)
+MACHINE[unexport] = "1"
+
+# Make sure TARGET_ARCH isn't exported
+# (breaks Makefiles using implicit rules, e.g. quilt, as GNU make has this
+# in them, undocumented)
+TARGET_ARCH[unexport] = "1"
+
+# Make sure DISTRO isn't exported
+# (breaks sysvinit at least)
+DISTRO[unexport] = "1"
+
+
def base_after_parse(d):
import bb, os, exceptions
@@ -830,8 +868,6 @@ def base_after_parse(d):
if this_machine and not re.match(need_machine, this_machine):
raise bb.parse.SkipPackage("incompatible with machine %s" % this_machine)
-
-
pn = bb.data.getVar('PN', d, 1)
# OBSOLETE in bitbake 1.7.4
@@ -843,22 +879,6 @@ def base_after_parse(d):
if use_nls != None:
bb.data.setVar('USE_NLS', use_nls, d)
- # Make sure MACHINE isn't exported
- # (breaks binutils at least)
- bb.data.delVarFlag('MACHINE', 'export', d)
- bb.data.setVarFlag('MACHINE', 'unexport', 1, d)
-
- # Make sure TARGET_ARCH isn't exported
- # (breaks Makefiles using implicit rules, e.g. quilt, as GNU make has this
- # in them, undocumented)
- bb.data.delVarFlag('TARGET_ARCH', 'export', d)
- bb.data.setVarFlag('TARGET_ARCH', 'unexport', 1, d)
-
- # Make sure DISTRO isn't exported
- # (breaks sysvinit at least)
- bb.data.delVarFlag('DISTRO', 'export', d)
- bb.data.setVarFlag('DISTRO', 'unexport', 1, d)
-
# Git packages should DEPEND on git-native
srcuri = bb.data.getVar('SRC_URI', d, 1)
if "git://" in srcuri:
@@ -888,7 +908,7 @@ def base_after_parse(d):
if len(paths) == 0:
return
- for s in bb.data.getVar('SRC_URI', d, 1).split():
+ for s in srcuri.split():
if not s.startswith("file://"):
continue
local = bb.data.expand(bb.fetch.localpath(s, d), d)
@@ -899,7 +919,19 @@ def base_after_parse(d):
return
python () {
+ import bb
+ from bb import __version__
base_after_parse(d)
+
+ # Remove this for bitbake 1.8.12
+ try:
+ from distutils.version import LooseVersion
+ except ImportError:
+ def LooseVersion(v): print "WARNING: sanity.bbclass can't compare versions without python-distutils"; return 1
+ if (LooseVersion(__version__) >= LooseVersion('1.8.11')):
+ deps = bb.data.getVarFlag('do_rebuild', 'deps', d) or []
+ deps.append('do_' + bb.data.getVar('BB_DEFAULT_TASK', d, 1))
+ bb.data.setVarFlag('do_rebuild', 'deps', deps, d)
}
def check_app_exists(app, d):
diff --git a/classes/bootimg.bbclass b/classes/bootimg.bbclass
index 0c96001096..d58cf1f424 100644
--- a/classes/bootimg.bbclass
+++ b/classes/bootimg.bbclass
@@ -13,9 +13,9 @@
# ${SYSLINUX_OPTS} - additional options to add to the syslinux file ';' delimited
do_bootimg[depends] += "dosfstools-native:do_populate_staging \
- syslinux-native:do_populate_staging \
- mtools-native:do_populate_staging \
- cdrtools-native:do_populate_staging"
+ syslinux-native:do_populate_staging \
+ mtools-native:do_populate_staging \
+ cdrtools-native:do_populate_staging"
PACKAGES = " "
@@ -31,6 +31,8 @@ SYSLINUXCFG = "${HDDDIR}/syslinux.cfg"
SYSLINUXMENU = "${HDDDIR}/menu"
inherit syslinux
+
+IMAGE_POSTPROCESS_COMMAND ?= ""
build_boot_bin() {
install -d ${HDDDIR}
@@ -49,13 +51,15 @@ build_boot_bin() {
BLOCKS=`du -bks ${HDDDIR} | cut -f 1`
SIZE=`expr $BLOCKS + ${BOOTIMG_EXTRA_SPACE}`
+ install -d ${DEPLOY_DIR_IMAGE}
+
mkdosfs -F 12 -n ${BOOTIMG_VOLUME_ID} -d ${HDDDIR} \
-C ${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}.hddimg $SIZE
syslinux ${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}.hddimg
chmod 644 ${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}.hddimg
- #Create an ISO if we have an INITRD
+ # Create an ISO if we have an INITRD
if [ -n "${INITRD}" ] && [ -s "${INITRD}" ] && [ "${NOISO}" != "1" ] ; then
install -d ${ISODIR}
@@ -78,6 +82,8 @@ build_boot_bin() {
cp ${STAGING_DATADIR_NATIVE}/syslinux/isolinux.bin \
${ISODIR}
+ ${IMAGE_POSTPROCESS_COMMAND}
+
mkisofs -V ${BOOTIMG_VOLUME_ID} \
-o ${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}.iso \
-b isolinux/isolinux.bin -c isolinux/boot.cat -r \
diff --git a/classes/debian.bbclass b/classes/debian.bbclass
index dd0789adae..0afe9fcc39 100644
--- a/classes/debian.bbclass
+++ b/classes/debian.bbclass
@@ -3,6 +3,9 @@
# before building the current package to make the packages runtime
# depends are correct
#
+# Custom library package names can be defined setting
+# DEBIANNAME_ + pkgname to the desired name.
+#
# Better expressed as ensure all RDEPENDS package before we package
# This means we can't have circular RDEPENDS/RRECOMMENDS
do_package_write_ipk[rdeptask] = "do_package"
@@ -88,7 +91,10 @@ python debian_package_name_hook () {
for pkg in packages.split():
if (bb.data.getVar('PKG_' + pkg, d) or bb.data.getVar('DEBIAN_NOAUTONAME_' + pkg, d)):
continue
- if pkg == orig_pkg:
+ debian_pn = bb.data.getVar('DEBIANNAME_' + pkg, d)
+ if debian_pn:
+ newpkg = debian_pn
+ elif pkg == orig_pkg:
newpkg = pkgname
else:
newpkg = pkg.replace(orig_pkg, devname, 1)
diff --git a/classes/efl_base.bbclass b/classes/efl_base.bbclass
index 3865441779..023b2edf74 100644
--- a/classes/efl_base.bbclass
+++ b/classes/efl_base.bbclass
@@ -3,7 +3,7 @@ inherit autotools pkgconfig
SECTION = "e/libs"
HOMEPAGE = "http://www.enlightenment.org"
SRCNAME = "${@bb.data.getVar('PN', d, 1).replace('-native', '')}"
-SRC_URI = "http://download.enlightenment.org/snapshots/2007-07-10/${SRCNAME}-${PV}.tar.gz"
+SRC_URI = "http://download.enlightenment.org/snapshots/2008-01-25/${SRCNAME}-${PV}.tar.gz"
S = "${WORKDIR}/${SRCNAME}-${PV}"
do_stage() {
diff --git a/classes/image.bbclass b/classes/image.bbclass
index ca9c9458e1..67a3a4fa10 100644
--- a/classes/image.bbclass
+++ b/classes/image.bbclass
@@ -35,9 +35,9 @@ python () {
}
#
-# Get a list of files containing device tables to create.
+# Get a list of files containing tables of devices to be created.
# * IMAGE_DEVICE_TABLE is the old name to an absolute path to a device table file
-# * IMAGE_DEVICE_TABLES is a new name for a file, or list of files, seached
+# * IMAGE_DEVICE_TABLES is a new name for a file, or list of files, searched
# for in the BBPATH
# If neither are specified then the default name of files/device_table-minimal.txt
# is searched for in the BBPATH (same as the old version.)
@@ -90,6 +90,7 @@ fakeroot do_rootfs () {
${IMAGE_PREPROCESS_COMMAND}
export TOPDIR=${TOPDIR}
+ export DISTRO=${USERDISTRO}
export MACHINE=${MACHINE}
for type in ${IMAGE_FSTYPES}; do
@@ -109,6 +110,14 @@ fakeroot do_rootfs () {
${MACHINE_POSTPROCESS_COMMAND}
}
+do_deploy_to[nostamp] = "1"
+do_deploy_to () {
+ # A standalone task to deploy built image to the location specified
+ # by DEPLOY_TO variable (likely passed via environment).
+ # Assumes ${IMAGE_FSTYPES} is a single value!
+ cp "${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}.rootfs.${IMAGE_FSTYPES}" ${DEPLOY_TO}
+}
+
insert_feed_uris () {
echo "Building feeds for [${DISTRO}].."
@@ -201,3 +210,4 @@ rootfs_update_timestamp () {
EXPORT_FUNCTIONS zap_root_password create_etc_timestamp remove_init_link do_rootfs make_zimage_symlink_relative set_image_autologin rootfs_update_timestamp
addtask rootfs before do_build after do_install
+addtask deploy_to after do_rootfs
diff --git a/classes/java.bbclass b/classes/java.bbclass
index 7fa6dc1786..41d52fe425 100644
--- a/classes/java.bbclass
+++ b/classes/java.bbclass
@@ -60,3 +60,63 @@ oe_jarinstall() {
shift
done
}
+
+# Creates a simple wrapper script for your Java program.
+# The script is written to ${PN} by default.
+#
+# Parameters are as follows:
+# [options] <output file> <main class> [jar files ...]
+#
+# Options are
+# -o <name> where name is the output file name
+#
+# It can only take jar files from ${datadir_java}!
+oe_java_simple_wrapper() {
+ delimiter=
+ mainclass=
+ classpath=
+ output=${PN}
+
+ while [ "$#" -gt 0 ]; do
+ case "$1" in
+ -o)
+ shift
+ output=$1
+ ;;
+ -*)
+ oefatal "oe_java_simple_wrapper: unknown option: $1"
+ ;;
+ *)
+ if [ $mainclass ]
+ then
+ classpath=$classpath$delimiter${datadir_java}/$1
+ delimiter=":"
+ else
+ mainclass=$1
+ fi
+ ;;
+ esac
+ shift
+ done
+
+ oenote "Creating simple Java wrapper script"
+ oenote "Output File: $output"
+ oenote "Main Class: $mainclass"
+ oenote "Classpath: $classpath"
+
+ echo "#!/bin/sh" > $output
+ echo "# This file is autogenerated by the oe_java_simple_wrapper function of OpenEmbedded" >> $output
+ echo >> $output
+ echo "# You can provide additional VM arguments by setting the VMARGS environment variable." >> $output
+ echo "CLASSPATH_ARG=\"-cp $classpath\"" >> $output
+ echo >> $output
+ echo "MAIN_CLASS=$mainclass" >> $output
+ echo >> $output
+ echo "# Allows overriding the VM by setting the JAVA environment variable." >> $output
+ echo "if [ x\${JAVA} = x ]" >> $output
+ echo "then" >> $output
+ echo " JAVA=java" >> $output
+ echo "fi" >> $output
+ echo >> $output
+ echo "exec \${JAVA} \${VMARGS} \${CLASSPATH_ARG} \${MAIN_CLASS} \${@}" >> $output
+}
diff --git a/classes/kernel-arch.bbclass b/classes/kernel-arch.bbclass
index 9208c3507a..5e5d9a94a8 100644
--- a/classes/kernel-arch.bbclass
+++ b/classes/kernel-arch.bbclass
@@ -5,7 +5,7 @@
#
valid_archs = "alpha cris ia64 \
- x86_64,i386 \
+ x86_64 i386 x86 \
m68knommu m68k ppc powerpc ppc64 \
sparc sparc64 \
arm arm26 \
diff --git a/classes/kernel.bbclass b/classes/kernel.bbclass
index 8724ad2cf1..459c553ffe 100644
--- a/classes/kernel.bbclass
+++ b/classes/kernel.bbclass
@@ -11,6 +11,10 @@ PACKAGES_DYNAMIC += "kernel-image-*"
export OS = "${TARGET_OS}"
export CROSS_COMPILE = "${TARGET_PREFIX}"
KERNEL_IMAGETYPE ?= "zImage"
+# Base filename under which users see built kernel (i.e. deploy name)
+KERNEL_IMAGE_BASE_NAME = "${KERNEL_IMAGETYPE}-${PV}-${PR}-${MACHINE}"
+# Symlink basename pointing to the most recently built kernel for a machine
+KERNEL_IMAGE_SYMLINK_NAME = "${KERNEL_IMAGETYPE}-${MACHINE}"
KERNEL_PRIORITY = "${@bb.data.getVar('PV',d,1).split('-')[0].split('.')[-1]}"
@@ -30,6 +34,7 @@ HOST_LD_KERNEL_ARCH ?= "${TARGET_LD_KERNEL_ARCH}"
KERNEL_CC = "${CCACHE}${HOST_PREFIX}gcc${KERNEL_CCSUFFIX} ${HOST_CC_KERNEL_ARCH}"
KERNEL_LD = "${LD}${KERNEL_LDSUFFIX} ${HOST_LD_KERNEL_ARCH}"
+# Where built kernel lies in the kernel tree
KERNEL_OUTPUT = "arch/${ARCH}/boot/${KERNEL_IMAGETYPE}"
KERNEL_IMAGEDEST = "boot"
@@ -60,6 +65,22 @@ kernel_do_compile() {
fi
}
+INITRAMFS_SYMLINK_NAME ?= "initramfs-${MACHINE}"
+INITRAMFS_IMAGE_TARGET ?= "initramfs-image"
+
+do_builtin_initramfs() {
+ unset CFLAGS CPPFLAGS CXXFLAGS LDFLAGS MACHINE
+ cp "${DEPLOY_DIR_IMAGE}/${INITRAMFS_SYMLINK_NAME}" usr/initramfs_data.cpio.gz
+ oe_runmake ${KERNEL_IMAGETYPE} CC="${KERNEL_CC}" LD="${KERNEL_LD}"
+ install -d ${DEPLOY_DIR_IMAGE}
+ install -m 0644 ${KERNEL_OUTPUT} ${DEPLOY_DIR_IMAGE}/${KERNEL_IMAGE_BASE_NAME}-initramfs.bin
+ # Make sure to kill injected initramfs, in case someone will do "-c compile -f"
+ rm usr/initramfs_data.cpio.gz
+}
+addtask builtin_initramfs after do_compile
+do_builtin_initramfs[nostamp] = "1"
+do_builtin_initramfs[depends] = "${INITRAMFS_IMAGE_TARGET}:do_rootfs"
+
kernel_do_stage() {
ASMDIR=`readlink include/asm`
@@ -107,6 +128,10 @@ kernel_do_stage() {
if [ -e arch/${ARCH}/Makefile ]; then
install -d ${STAGING_KERNEL_DIR}/arch/${ARCH}
install -m 0644 arch/${ARCH}/Makefile* ${STAGING_KERNEL_DIR}/arch/${ARCH}
+ # Otherwise check arch/x86/Makefile for i386 and x86_64 on kernels >= 2.6.24
+ elif [ -e arch/x86/Makefile ]; then
+ install -d ${STAGING_KERNEL_DIR}/arch/x86
+ install -m 0644 arch/x86/Makefile* ${STAGING_KERNEL_DIR}/arch/x86
fi
cp -fR include/config* ${STAGING_KERNEL_DIR}/include/
install -m 0644 ${KERNEL_OUTPUT} ${STAGING_KERNEL_DIR}/${KERNEL_IMAGETYPE}
@@ -180,6 +205,8 @@ FILES_kernel-image = "/boot/${KERNEL_IMAGETYPE}*"
FILES_kernel-dev = "/boot/System.map* /boot/config*"
FILES_kernel-vmlinux = "/boot/vmlinux*"
RDEPENDS_kernel = "kernel-base"
+RRECOMMENDS_kernel-module-hostap-cs += '${@base_version_less_or_equal("KERNEL_VERSION", "2.6.17", "", "apm-wifi-suspendfix", d)}'
+RRECOMMENDS_kernel-module-orinoco-cs += '${@base_version_less_or_equal("KERNEL_VERSION", "2.6.17", "", "apm-wifi-suspendfix", d)}'
# Allow machines to override this dependency if kernel image files are
# not wanted in images as standard
RDEPENDS_kernel-base ?= "kernel-image"
diff --git a/classes/native.bbclass b/classes/native.bbclass
index d51c675909..934303cc0c 100644
--- a/classes/native.bbclass
+++ b/classes/native.bbclass
@@ -49,9 +49,9 @@ export STRIP = "${HOST_PREFIX}strip"
# Path prefixes
-base_prefix = "${exec_prefix}"
-prefix = "${STAGING_DIR}"
-exec_prefix = "${STAGING_DIR}/${BUILD_ARCH}-${BUILD_OS}"
+base_prefix = "${STAGING_DIR}/${BUILD_ARCH}-${BUILD_OS}"
+prefix = "${base_prefix}"
+exec_prefix = "${base_prefix}"
# Base paths
base_bindir = "${base_prefix}/bin"
diff --git a/classes/nylon-mirrors.bbclass b/classes/nylon-mirrors.bbclass
index 25a8b87d9b..02fddc01ba 100644
--- a/classes/nylon-mirrors.bbclass
+++ b/classes/nylon-mirrors.bbclass
@@ -1,6 +1,6 @@
MIRRORS_append () {
-ftp://.*/.*/ http://meshcube.org/nylon/stable/sources/
-https?://.*/.*/ http://meshcube.org/nylon/stable/sources/
-ftp://.*/.*/ http://meshcube.org/nylon/unstable/sources/
-https?://.*/.*/ http://meshcube.org/nylon/unstable/sources/
+ftp://.*/.* http://meshcube.org/nylon/stable/sources/
+https?://.*/.* http://meshcube.org/nylon/stable/sources/
+ftp://.*/.* http://meshcube.org/nylon/unstable/sources/
+https?://.*/.* http://meshcube.org/nylon/unstable/sources/
} \ No newline at end of file
diff --git a/classes/oplinux-mirrors.bbclass b/classes/oplinux-mirrors.bbclass
index c76e822ce3..59c199485c 100644
--- a/classes/oplinux-mirrors.bbclass
+++ b/classes/oplinux-mirrors.bbclass
@@ -2,13 +2,13 @@
# Released under the MIT license (see packages/COPYING)
MIRRORS_append () {
-ftp://.*/.*/ http://digital-opsis.com/oplinux/stable/sources/
-https?://.*/.*/ http://digital-opsis.com/oplinux/stable/sources/
-ftp://.*/.*/ http://digital-opsis.com/oplinux/unstable/sources/
-https?://.*/.*/ http://digital-opsis.com/oplinux/unstable/sources/
+ftp://.*/.* http://digital-opsis.com/oplinux/stable/sources/
+https?://.*/.* http://digital-opsis.com/oplinux/stable/sources/
+ftp://.*/.* http://digital-opsis.com/oplinux/unstable/sources/
+https?://.*/.* http://digital-opsis.com/oplinux/unstable/sources/
-ftp://.*/.*/ http://digital-opsis.com/oplinux-uclibc/stable/sources/
-https?://.*/.*/ http://digital-opsis.com/oplinux-uclibc/stable/sources/
-ftp://.*/.*/ http://digital-opsis.com/oplinux-uclibc/unstable/sources/
-https?://.*/.*/ http://digital-opsis.com/oplinux-uclibc/unstable/sources/
+ftp://.*/.* http://digital-opsis.com/oplinux-uclibc/stable/sources/
+https?://.*/.* http://digital-opsis.com/oplinux-uclibc/stable/sources/
+ftp://.*/.* http://digital-opsis.com/oplinux-uclibc/unstable/sources/
+https?://.*/.* http://digital-opsis.com/oplinux-uclibc/unstable/sources/
}
diff --git a/classes/package.bbclass b/classes/package.bbclass
index ec8c3d97e2..cb4417fc7f 100644
--- a/classes/package.bbclass
+++ b/classes/package.bbclass
@@ -291,76 +291,6 @@ python package_do_split_locales() {
#bb.data.setVar('RDEPENDS_%s' % mainpkg, ' '.join(rdep), d)
}
-def copyfile(src,dest,newmtime=None,sstat=None):
- """
- Copies a file from src to dest, preserving all permissions and
- attributes; mtime will be preserved even when moving across
- filesystems. Returns true on success and false on failure.
- """
- import os, stat, shutil, commands
-
- #print "copyfile("+src+","+dest+","+str(newmtime)+","+str(sstat)+")"
- try:
- if not sstat:
- sstat=os.lstat(src)
- except Exception, e:
- print "copyfile: Stating source file failed...", e
- return False
-
- destexists=1
- try:
- dstat=os.lstat(dest)
- except:
- dstat=os.lstat(os.path.dirname(dest))
- destexists=0
-
- if destexists:
- if stat.S_ISLNK(dstat[stat.ST_MODE]):
- try:
- os.unlink(dest)
- destexists=0
- except Exception, e:
- pass
-
- if stat.S_ISLNK(sstat[stat.ST_MODE]):
- try:
- target=os.readlink(src)
- if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]):
- os.unlink(dest)
- os.symlink(target,dest)
- #os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
- return os.lstat(dest)
- except Exception, e:
- print "copyfile: failed to properly create symlink:", dest, "->", target, e
- return False
-
- if stat.S_ISREG(sstat[stat.ST_MODE]):
- try: # For safety copy then move it over.
- shutil.copyfile(src,dest+"#new")
- os.rename(dest+"#new",dest)
- except Exception, e:
- print 'copyfile: copy', src, '->', dest, 'failed.', e
- return False
- else:
- #we don't yet handle special, so we need to fall back to /bin/mv
- a=commands.getstatusoutput("/bin/cp -f "+"'"+src+"' '"+dest+"'")
- if a[0]!=0:
- print "copyfile: Failed to copy special file:" + src + "' to '" + dest + "'", a
- return False # failure
- try:
- os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
- os.chmod(dest, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
- except Exception, e:
- print "copyfile: Failed to chown/chmod/unlink", dest, e
- return False
-
- if newmtime:
- os.utime(dest,(newmtime,newmtime))
- else:
- os.utime(dest, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
- newmtime=sstat[stat.ST_MTIME]
- return newmtime
-
python populate_packages () {
import glob, stat, errno, re
@@ -462,7 +392,7 @@ python populate_packages () {
fpath = os.path.join(root,file)
dpath = os.path.dirname(fpath)
bb.mkdirhier(dpath)
- ret = copyfile(file, fpath)
+ ret = bb.copyfile(file, fpath)
if ret is False or ret == 0:
raise bb.build.FuncFailed("File population failed")
del localdata
@@ -578,16 +508,19 @@ python emit_pkgdata() {
os.chdir(root)
g = glob('*')
if g or allow_empty == "1":
- file(bb.data.expand('${PKGDATA_DIR}/runtime/%s.packaged' % pkg, d), 'w').close()
+ packagedfile = bb.data.expand('${PKGDATA_DIR}/runtime/%s.packaged' % pkg, d)
+ file(packagedfile, 'w').close()
}
emit_pkgdata[dirs] = "${PKGDATA_DIR}/runtime"
ldconfig_postinst_fragment() {
if [ x"$D" = "x" ]; then
- ldconfig
+ [ -x /sbin/ldconfig ] && /sbin/ldconfig
fi
}
+SHLIBSDIR = "${STAGING_DIR}/${HOST_SYS}/shlibs"
+
python package_do_shlibs() {
import os, re, os.path
@@ -606,25 +539,14 @@ python package_do_shlibs() {
bb.error("WORKDIR not defined")
return
- staging = bb.data.getVar('STAGING_DIR', d, 1)
- if not staging:
- bb.error("STAGING_DIR not defined")
- return
-
ver = bb.data.getVar('PV', d, 1)
if not ver:
bb.error("PV not defined")
return
- target_sys = bb.data.getVar('TARGET_SYS', d, 1)
- if not target_sys:
- bb.error("TARGET_SYS not defined")
- return
-
pkgdest = bb.data.getVar('PKGDEST', d, 1)
- shlibs_dir = os.path.join(staging, target_sys, "shlibs")
- old_shlibs_dir = os.path.join(staging, "shlibs")
+ shlibs_dir = bb.data.getVar('SHLIBSDIR', d, 1)
bb.mkdirhier(shlibs_dir)
needed = {}
@@ -681,7 +603,7 @@ python package_do_shlibs() {
shlib_provider = {}
list_re = re.compile('^(.*)\.list$')
- for dir in [old_shlibs_dir, shlibs_dir]:
+ for dir in [shlibs_dir]:
if not os.path.exists(dir):
continue
for file in os.listdir(dir):
@@ -751,20 +673,9 @@ python package_do_pkgconfig () {
bb.error("WORKDIR not defined")
return
- staging = bb.data.getVar('STAGING_DIR', d, 1)
- if not staging:
- bb.error("STAGING_DIR not defined")
- return
-
- target_sys = bb.data.getVar('TARGET_SYS', d, 1)
- if not target_sys:
- bb.error("TARGET_SYS not defined")
- return
-
pkgdest = bb.data.getVar('PKGDEST', d, 1)
- shlibs_dir = os.path.join(staging, target_sys, "shlibs")
- old_shlibs_dir = os.path.join(staging, "shlibs")
+ shlibs_dir = bb.data.getVar('SHLIBSDIR', d, 1)
bb.mkdirhier(shlibs_dir)
pc_re = re.compile('(.*)\.pc$')
@@ -814,7 +725,7 @@ python package_do_pkgconfig () {
f.write('%s\n' % p)
f.close()
- for dir in [old_shlibs_dir, shlibs_dir]:
+ for dir in [shlibs_dir]:
if not os.path.exists(dir):
continue
for file in os.listdir(dir):
@@ -882,14 +793,39 @@ python package_depchains() {
postfixes = (bb.data.getVar('DEPCHAIN_POST', d, 1) or '').split()
prefixes = (bb.data.getVar('DEPCHAIN_PRE', d, 1) or '').split()
+ def pkg_adddeprrecs(pkg, base, suffix, getname, depends, d):
+
+ #bb.note('depends for %s is %s' % (base, depends))
+ rreclist = explode_deps(bb.data.getVar('RRECOMMENDS_' + pkg, d, 1) or bb.data.getVar('RRECOMMENDS', d, 1) or "")
+
+ for depend in depends:
+ if depend.find('-native') != -1 or depend.find('-cross') != -1 or depend.startswith('virtual/'):
+ #bb.note("Skipping %s" % depend)
+ continue
+ if depend.endswith('-dev'):
+ depend = depend.replace('-dev', '')
+ if depend.endswith('-dbg'):
+ depend = depend.replace('-dbg', '')
+ pkgname = getname(depend, suffix)
+ #bb.note("Adding %s for %s" % (pkgname, depend))
+ if not pkgname in rreclist:
+ rreclist.append(pkgname)
+
+ #bb.note('setting: RRECOMMENDS_%s=%s' % (pkg, ' '.join(rreclist)))
+ bb.data.setVar('RRECOMMENDS_%s' % pkg, ' '.join(rreclist), d)
+
def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d):
- #bb.note('rdepends for %s is %s' % (base, rdepends))
+ #bb.note('rdepends for %s is %s' % (base, rdepends))
rreclist = explode_deps(bb.data.getVar('RRECOMMENDS_' + pkg, d, 1) or bb.data.getVar('RRECOMMENDS', d, 1) or "")
for depend in rdepends:
+ if depend.endswith('-dev'):
+ depend = depend.replace('-dev', '')
+ if depend.endswith('-dbg'):
+ depend = depend.replace('-dbg', '')
pkgname = getname(depend, suffix)
- if not pkgname in rreclist and packaged(pkgname, d):
+ if not pkgname in rreclist:
rreclist.append(pkgname)
#bb.note('setting: RRECOMMENDS_%s=%s' % (pkg, ' '.join(rreclist)))
@@ -900,6 +836,10 @@ python package_depchains() {
if dep not in list:
list.append(dep)
+ depends = []
+ for dep in explode_deps(bb.data.getVar('DEPENDS', d, 1) or ""):
+ add_dep(depends, dep)
+
rdepends = []
for dep in explode_deps(bb.data.getVar('RDEPENDS', d, 1) or ""):
add_dep(rdepends, dep)
@@ -932,6 +872,8 @@ python package_depchains() {
for suffix in pkgs:
for pkg in pkgs[suffix]:
(base, func) = pkgs[suffix][pkg]
+ if suffix == "-dev":
+ pkg_adddeprrecs(pkg, base, suffix, func, depends, d)
if len(pkgs[suffix]) == 1:
pkg_addrrecs(pkg, base, suffix, func, rdepends, d)
else:
diff --git a/classes/package_deb.bbclass b/classes/package_deb.bbclass
index b85ffe254f..1a3622c3bb 100644
--- a/classes/package_deb.bbclass
+++ b/classes/package_deb.bbclass
@@ -54,7 +54,7 @@ python do_package_deb_install () {
# env of the fork+execve'd processs
# Set up environment
- apt_config = os.getenv('APT_CONFIG')
+ apt_config_backup = os.getenv('APT_CONFIG')
os.putenv('APT_CONFIG', os.path.join(stagingdir, 'etc', 'apt', 'apt.conf'))
path = os.getenv('PATH')
os.putenv('PATH', '%s:%s' % (stagingbindir, os.getenv('PATH')))
@@ -64,12 +64,12 @@ python do_package_deb_install () {
commands.getstatusoutput('apt-get install -y %s' % pkgfn)
# revert environment
- os.putenv('APT_CONFIG', apt_config)
+ os.putenv('APT_CONFIG', apt_config_backup)
os.putenv('PATH', path)
}
python do_package_deb () {
- import sys, re, fcntl, copy
+ import sys, re, copy
workdir = bb.data.getVar('WORKDIR', d, 1)
if not workdir:
@@ -102,20 +102,12 @@ python do_package_deb () {
bb.debug(1, "No packages; nothing to do")
return
- def lockfile(name):
- lf = open(name, "a+")
- fcntl.flock(lf.fileno(), fcntl.LOCK_EX)
- return lf
-
- def unlockfile(lf):
- fcntl.flock(lf.fileno(), fcntl.LOCK_UN)
- lf.close
-
for pkg in packages.split():
localdata = bb.data.createCopy(d)
- root = "%s/install/%s" % (workdir, pkg)
+ pkgdest = bb.data.getVar('PKGDEST', d, 1)
+ root = "%s/%s" % (pkgdest, pkg)
- lf = lockfile(root + ".lock")
+ lf = bb.utils.lockfile(root + ".lock")
bb.data.setVar('ROOT', '', localdata)
bb.data.setVar('ROOT_%s' % pkg, root, localdata)
@@ -147,7 +139,7 @@ python do_package_deb () {
if not g and bb.data.getVar('ALLOW_EMPTY', localdata) != "1":
from bb import note
note("Not creating empty archive for %s-%s-%s" % (pkg, bb.data.getVar('PV', localdata, 1), bb.data.getVar('PR', localdata, 1)))
- unlockfile(lf)
+ bb.utils.unlockfile(lf)
continue
controldir = os.path.join(root, 'DEBIAN')
@@ -158,6 +150,7 @@ python do_package_deb () {
# import codecs
# ctrlfile = codecs.open("someFile", "w", "utf-8")
except OSError:
+ bb.utils.unlockfile(lf)
raise bb.build.FuncFailed("unable to open control file for writing.")
fields = []
@@ -196,6 +189,7 @@ python do_package_deb () {
ctrlfile.write(unicode(c % tuple(pullData(fs, localdata))))
except KeyError:
(type, value, traceback) = sys.exc_info()
+ bb.utils.unlockfile(lf)
ctrlfile.close()
raise bb.build.FuncFailed("Missing field for deb generation: %s" % value)
# more fields
@@ -231,6 +225,7 @@ python do_package_deb () {
try:
scriptfile = file(os.path.join(controldir, script), 'w')
except OSError:
+ bb.utils.unlockfile(lf)
raise bb.build.FuncFailed("unable to open %s script file for writing." % script)
scriptfile.write("#!/bin/sh\n")
scriptfile.write(scriptvar)
@@ -242,6 +237,7 @@ python do_package_deb () {
try:
conffiles = file(os.path.join(controldir, 'conffiles'), 'w')
except OSError:
+ bb.utils.unlockfile(lf)
raise bb.build.FuncFailed("unable to open conffiles for writing.")
for f in conffiles_str.split():
conffiles.write('%s\n' % f)
@@ -250,6 +246,7 @@ python do_package_deb () {
os.chdir(basedir)
ret = os.system("PATH=\"%s\" fakeroot dpkg-deb -b %s %s" % (bb.data.getVar("PATH", localdata, 1), root, pkgoutdir))
if ret != 0:
+ bb.utils.unlockfile(lf)
raise bb.build.FuncFailed("dpkg-deb execution failed")
for script in ["preinst", "postinst", "prerm", "postrm", "control" ]:
@@ -263,13 +260,16 @@ python do_package_deb () {
except OSError:
pass
- unlockfile(lf)
+ bb.utils.unlockfile(lf)
}
python () {
import bb
if bb.data.getVar('PACKAGES', d, True) != '':
- bb.data.setVarFlag('do_package_write_deb', 'depends', 'dpkg-native:do_populate_staging fakeroot-native:do_populate_staging', d)
+ deps = (bb.data.getVarFlag('do_package_write_deb', 'depends', d) or "").split()
+ deps.append('dpkg-native:do_populate_staging')
+ deps.append('fakeroot-native:do_populate_staging')
+ bb.data.setVarFlag('do_package_write_deb', 'depends', " ".join(deps), d)
}
python do_package_write_deb () {
diff --git a/classes/package_ipk.bbclass b/classes/package_ipk.bbclass
index 087bbcbfb4..de7f0fe0c4 100644
--- a/classes/package_ipk.bbclass
+++ b/classes/package_ipk.bbclass
@@ -114,7 +114,7 @@ package_generate_ipkg_conf () {
}
python do_package_ipk () {
- import sys, re, copy, fcntl
+ import sys, re, copy
workdir = bb.data.getVar('WORKDIR', d, 1)
if not workdir:
@@ -147,22 +147,12 @@ python do_package_ipk () {
bb.debug(1, "No packages; nothing to do")
return
- def lockfile(name):
- lf = open(name, "a+")
- fcntl.flock(lf.fileno(), fcntl.LOCK_EX)
- return lf
-
- def unlockfile(lf):
- fcntl.flock(lf.fileno(), fcntl.LOCK_UN)
- lf.close
-
-
for pkg in packages.split():
localdata = bb.data.createCopy(d)
pkgdest = bb.data.getVar('PKGDEST', d, 1)
root = "%s/%s" % (pkgdest, pkg)
- lf = lockfile(root + ".lock")
+ lf = bb.utils.lockfile(root + ".lock")
bb.data.setVar('ROOT', '', localdata)
bb.data.setVar('ROOT_%s' % pkg, root, localdata)
@@ -193,7 +183,7 @@ python do_package_ipk () {
if not g and bb.data.getVar('ALLOW_EMPTY', localdata) != "1":
from bb import note
note("Not creating empty archive for %s-%s-%s" % (pkg, bb.data.getVar('PV', localdata, 1), bb.data.getVar('PR', localdata, 1)))
- unlockfile(lf)
+ bb.utils.unlockfile(lf)
continue
controldir = os.path.join(root, 'CONTROL')
@@ -201,7 +191,7 @@ python do_package_ipk () {
try:
ctrlfile = file(os.path.join(controldir, 'control'), 'w')
except OSError:
- unlockfile(lf)
+ bb.utils.unlockfile(lf)
raise bb.build.FuncFailed("unable to open control file for writing.")
fields = []
@@ -235,7 +225,7 @@ python do_package_ipk () {
except KeyError:
(type, value, traceback) = sys.exc_info()
ctrlfile.close()
- unlockfile(lf)
+ bb.utils.unlockfile(lf)
raise bb.build.FuncFailed("Missing field for ipk generation: %s" % value)
# more fields
@@ -272,7 +262,7 @@ python do_package_ipk () {
try:
scriptfile = file(os.path.join(controldir, script), 'w')
except OSError:
- unlockfile(lf)
+ bb.utils.unlockfile(lf)
raise bb.build.FuncFailed("unable to open %s script file for writing." % script)
scriptfile.write(scriptvar)
scriptfile.close()
@@ -283,7 +273,7 @@ python do_package_ipk () {
try:
conffiles = file(os.path.join(controldir, 'conffiles'), 'w')
except OSError:
- unlockfile(lf)
+ bb.utils.unlockfile(lf)
raise bb.build.FuncFailed("unable to open conffiles for writing.")
for f in conffiles_str.split():
conffiles.write('%s\n' % f)
@@ -293,7 +283,7 @@ python do_package_ipk () {
ret = os.system("PATH=\"%s\" %s %s %s" % (bb.data.getVar("PATH", localdata, 1),
bb.data.getVar("IPKGBUILDCMD",d,1), pkg, pkgoutdir))
if ret != 0:
- unlockfile(lf)
+ bb.utils.unlockfile(lf)
raise bb.build.FuncFailed("ipkg-build execution failed")
for script in ["preinst", "postinst", "prerm", "postrm", "control" ]:
@@ -306,7 +296,7 @@ python do_package_ipk () {
os.rmdir(controldir)
except OSError:
pass
- unlockfile(lf)
+ bb.utils.unlockfile(lf)
}
python () {
@@ -323,5 +313,4 @@ python do_package_write_ipk () {
bb.build.exec_func("do_package_ipk", d)
}
do_package_write_ipk[dirs] = "${D}"
-do_package_write_ipk[depends] = "ipkg-utils-native:do_populate_staging"
addtask package_write_ipk before do_package_write after do_package
diff --git a/classes/package_rpm.bbclass b/classes/package_rpm.bbclass
index 7fc5e8ea96..6713f8fcad 100644
--- a/classes/package_rpm.bbclass
+++ b/classes/package_rpm.bbclass
@@ -134,7 +134,10 @@ python do_package_rpm () {
python () {
import bb
if bb.data.getVar('PACKAGES', d, True) != '':
- bb.data.setVarFlag('do_package_write_rpm', 'depends', 'rpm-native:do_populate_staging', d)
+ deps = (bb.data.getVarFlag('do_package_write_rpm', 'depends', d) or "").split()
+ deps.append('rpm-native:do_populate_staging')
+ deps.append('fakeroot-native:do_populate_staging')
+ bb.data.setVarFlag('do_package_write_rpm', 'depends', " ".join(deps), d)
}
diff --git a/classes/package_tar.bbclass b/classes/package_tar.bbclass
index cb4c42b261..876cec6cfe 100644
--- a/classes/package_tar.bbclass
+++ b/classes/package_tar.bbclass
@@ -83,15 +83,12 @@ python do_package_tar () {
bb.mkdirhier(pkgoutdir)
bb.build.exec_func('package_tar_fn', localdata)
tarfn = bb.data.getVar('PKGFN', localdata, 1)
-# if os.path.exists(tarfn):
-# del localdata
-# continue
os.chdir(root)
from glob import glob
if not glob('*'):
bb.note("Not creating empty archive for %s-%s-%s" % (pkg, bb.data.getVar('PV', localdata, 1), bb.data.getVar('PR', localdata, 1)))
continue
- ret = os.system("tar -czvf %s %s" % (tarfn, '.'))
+ ret = os.system("tar -czf %s %s" % (tarfn, '.'))
if ret != 0:
bb.error("Creation of tar %s failed." % tarfn)
}
@@ -99,7 +96,10 @@ python do_package_tar () {
python () {
import bb
if bb.data.getVar('PACKAGES', d, True) != '':
- bb.data.setVarFlag('do_package_write_tar', 'depends', 'tar-native:do_populate_staging', d)
+ deps = (bb.data.getVarFlag('do_package_write_tar', 'depends', d) or "").split()
+ deps.append('tar-native:do_populate_staging')
+ deps.append('fakeroot-native:do_populate_staging')
+ bb.data.setVarFlag('do_package_write_tar', 'depends', " ".join(deps), d)
}
diff --git a/classes/packaged-staging.bbclass b/classes/packaged-staging.bbclass
index 4b4ebb58bf..29ce72d65c 100644
--- a/classes/packaged-staging.bbclass
+++ b/classes/packaged-staging.bbclass
@@ -1,228 +1,229 @@
#
-# Populated ${STAGING} using packages
+# Populate builds using prebuilt packages where possible to speed up builds
+# and allow staging to be reconstructed.
#
# To use it add that line to conf/local.conf:
#
-# INHERIT += "packaged-staging"
-#
-# You also need ipkg-cl and ipkg-make-index installed on your host
-# put ipkg-build from org.openembedded.packaged-staging/contrib/ in your $PATH
-
-# BUGS:
-# * does not distinguish between -native, -cross and other packages
-
-# TODO:
-# * also build a feed for native and cross packages
-# * make package detection a bit smarter (search for compatible archs)
-# * make do_clean clean staging as well
-
-# Summary:
-# This class will have two modes of operation:
-# PSTAGE_MODE = 'repopulate': repopulated staging from scratch for each packages
-# PSTAGE_MODE = 'append': append each package to staging (current behaviour)
-
-inherit package
+# INHERIT = "packaged-staging"
+
+python () {
+ import bb
+ if not bb.data.inherits_class('native', d) and not bb.data.inherits_class('image', d) and not bb.data.inherits_class('cross', d) and not bb.data.inherits_class('sdk', d):
+ deps = bb.data.getVarFlag('do_populate_staging', 'depends', d) or ""
+ deps += " stagemanager-native:do_populate_staging"
+ bb.data.setVarFlag('do_populate_staging', 'depends', deps, d)
+
+ deps = bb.data.getVarFlag('do_prepackaged_stage', 'depends', d) or ""
+ deps += " ipkg-native:do_populate_staging ipkg-utils-native:do_populate_staging"
+ bb.data.setVarFlag('do_prepackaged_stage', 'depends', deps, d)
+ else:
+ bb.data.setVar("PSTAGING_DISABLED", "1", d)
+}
-DEPENDS = "stagemanager-native"
+export PSTAGING_DISABLED = "0"
DEPLOY_DIR_PSTAGE = "${DEPLOY_DIR}/pstage"
PSTAGE_BUILD_CMD = "${IPKGBUILDCMD}"
-PSTAGE_INSTALL_CMD = "ipkg-cl install -force-depends -f ${DEPLOY_DIR_PSTAGE}/ipkg.conf -o "
-PSTAGE_UPDATE_CMD = "ipkg-cl update -f ${DEPLOY_DIR_PSTAGE}/ipkg.conf -o "
-PSTAGE_LIST_CMD = "ipkg-cl list_installed -f ${DEPLOY_DIR_PSTAGE}/ipkg.conf -o "
-PSTAGE_PKGNAME = "staging-${PN}_${PV}-${PR}_${PACKAGE_ARCH}.ipk"
-PCROSS_PKGNAME = "cross-${PN}_${PV}-${PR}_${BUILD_ARCH}.ipk"
-
-SPAWNFILE = "${STAGING_DIR}/pkgmaps/${P}-${PR}.spawn"
-SPAWNIPK = "${spawn}"
-
-PSTAGE_TMPDIR_STAGE = "${TMPDIR}/tmp-staging"
-PSTAGE_TMPDIR_CROSS = "${TMPDIR}/tmp-cross"
-
-STAGING_BASEDIR = "${STAGING_LIBDIR}/.."
-
-PACKAGEFUNCS += "do_write_ipk_list"
-
-python do_write_ipk_list () {
- import os, sys
- ipkdir = bb.data.getVar('DEPLOY_DIR_IPK', d, 1)
- stagingdir = bb.data.getVar('STAGING_DIR', d, 1)
- tmpdir = bb.data.getVar('TMPDIR', d, 1)
- p = bb.data.getVar('P', d, 1)
- pr = bb.data.getVar('PR', d, 1)
-
- packages = bb.data.getVar('PACKAGES', d, 1)
- if not packages:
- bb.debug(1, "PACKAGES not defined, nothing to package")
- return
-
- if packages == []:
- bb.debug(1, "No packages; nothing to do")
- return
-
- # Generate ipk.conf if it or the stamp doesnt exist
- listfile = os.path.join(stagingdir,"pkgmaps","%s-%s.spawn" % ( p , pr ))
- os.system('mkdir -p ' + stagingdir + '/pkgmaps')
- if not os.access(listfile, os.R_OK):
- os.system('rm -f ' + listfile)
- f = open(listfile,"w")
- for spawn in packages.split():
- #check if the packagename has changed due to debian shlib renaming
- localdata = bb.data.createCopy(d)
- pkgname = bb.data.getVar('PKG_%s' % spawn, localdata, 1)
- if not pkgname:
- pkgname = spawn
- f.write("%s\n" % pkgname)
- f.close()
-}
+PSTAGE_INSTALL_CMD = "ipkg-cl install -force-depends -f ${DEPLOY_DIR_PSTAGE}/ipkg-${MACHINE}.conf -o ${TMPDIR}"
+PSTAGE_UPDATE_CMD = "ipkg-cl update -f ${DEPLOY_DIR_PSTAGE}/ipkg-${MACHINE}.conf -o ${TMPDIR}"
+PSTAGE_REMOVE_CMD = "ipkg-cl remove -force-depends -f ${DEPLOY_DIR_PSTAGE}/ipkg-${MACHINE}.conf -o ${TMPDIR}"
+PSTAGE_LIST_CMD = "ipkg-cl list_installed -f ${DEPLOY_DIR_PSTAGE}/ipkg-${MACHINE}.conf -o ${TMPDIR}"
+PSTAGE_PKGNAME = "staging-${PN}_${PV}-${PR}_${MULTIMACH_ARCH}.ipk"
+PSTAGE_TMPDIR_STAGE = "${WORKDIR}/staging-pkg"
do_clean_append() {
- """clear the build and temp directories"""
+ """
+ Clear the build and temp directories
+ """
+ bb.note("Uninstalling package from staging...")
+ path = bb.data.getVar("PATH", d, 1)
+ removecmd = bb.data.getVar("PSTAGE_REMOVE_CMD", d, 1)
+ removepkg = bb.data.expand("staging-${PN}", d)
+ ret = os.system("PATH=\"%s\" %s %s" % (path, removecmd, removepkg))
+ if ret != 0:
+ bb.note("Failure removing staging package")
+
stagepkg = bb.data.expand("${DEPLOY_DIR_PSTAGE}/${PSTAGE_PKGNAME}", d)
- if stagepkg == '//': raise bb.build.FuncFailed("wrong DATADIR")
- bb.note("removing " + stagepkg)
- os.system('rm -rf ' + stagepkg)
+ bb.note("Removing staging package %s" % stagepkg)
+ #os.system('rm -rf ' + stagepkg)
}
-
-do_stage_prepend() {
-
- stage-manager -p ${STAGING_DIR} -c ${DEPLOY_DIR_PSTAGE}/staging-stamp-cache -u
-
- stage-manager -p ${CROSS_DIR} -c ${DEPLOY_DIR_PSTAGE}/cross-stamp-cache -u
-
- if [ ! -e ${STAGING_BASEDIR} ]; then
- mkdir -p ${STAGING_BASEDIR}
- fi
-
- if [ ! -e ${DEPLOY_DIR_PSTAGE} ]; then
- mkdir -p ${DEPLOY_DIR_PSTAGE}
- fi
-
- if [ -e ${STAGING_BASEDIR}/usr ]; then
- oenote "${STAGING_BASEDIR}/usr already present, leaving it alone"
- else
- oenote "${STAGING_BASEDIR}/usr not present, symlinking it"
- ln -s ${STAGING_BASEDIR}/ ${STAGING_BASEDIR}/usr
- fi
-
+staging_helper () {
#assemble appropriate ipkg.conf
- if [ -e ${DEPLOY_DIR_PSTAGE}/ipkg.conf ]; then
- rm ${DEPLOY_DIR_PSTAGE}/ipkg.conf
+ conffile=${DEPLOY_DIR_PSTAGE}/ipkg-${MACHINE}.conf
+ mkdir -p ${DEPLOY_DIR_PSTAGE}/pstaging_lists
+ if [ ! -e $conffile ]; then
+ ipkgarchs="${BUILD_ARCH} all any noarch ${TARGET_ARCH} ${PACKAGE_ARCHS} ${PACKAGE_EXTRA_ARCHS} ${MACHINE}"
+ priority=1
+ for arch in $ipkgarchs; do
+ echo "arch $arch $priority" >> $conffile
+ priority=$(expr $priority + 5)
+ done
+ echo "src oe-staging file:${DEPLOY_DIR_PSTAGE}" >> $conffile
+
+ OLD_PWD=`pwd`
+ cd ${DEPLOY_DIR_PSTAGE}
+ ipkg-make-index -p Packages .
+ cd ${OLD_PWD}
+
+ ${PSTAGE_UPDATE_CMD}
fi
+}
- ipkgarchs="${BUILD_ARCH} all any noarch ${TARGET_ARCH} ${PACKAGE_ARCHS} ${PACKAGE_EXTRA_ARCHS} ${MACHINE}"
- priority=1
- for arch in $ipkgarchs; do
- echo "arch $arch $priority" >> ${DEPLOY_DIR_PSTAGE}/ipkg.conf
- priority=$(expr $priority + 5)
- done
- echo "src oe file:${DEPLOY_DIR_IPK}" >> ${DEPLOY_DIR_PSTAGE}/ipkg.conf
- export OLD_PWD=`pwd`
- cd ${DEPLOY_DIR_IPK} && rm *${BUILD_ARCH}.ipk -f ; ipkg-make-index -p Packages . ; cd ${OLD_PWD}
- ${PSTAGE_UPDATE_CMD} ${STAGING_BASEDIR}
-
- #check for generated packages
- if [ -e ${SPAWNFILE} ]; then
- oenote "List of spawned packages found: ${P}.spawn"
- for spawn in `cat ${SPAWNFILE} | grep -v locale | grep -v dbg | grep -v gconv | grep -v charmap` ; do \
- if [ -e ${DEPLOY_DIR_IPK}/${spawn}_* ]; then
- ${PSTAGE_INSTALL_CMD} ${STAGING_BASEDIR} ${spawn}
- # clean up .la files to avoid having references to the builddirs in the binaries
- for lafile in ${STAGING_LIBDIR}/*.la ; do \
- sed -i s:installed=yes:installed=no:g ${lafile} || true
- done
-
- #fix up linker script to poin to staging
- if [ -e ${STAGING_LIBDIR}/libc.so ]; then
- sed -i s:\ /lib:\ ${STAGING_LIBDIR}:g ${STAGING_LIBDIR}/libc.so
- sed -i s:\ /usr/lib:\ ${STAGING_LIBDIR}:g ${STAGING_LIBDIR}/libc.so
- fi
- else
- oenote "${spawn} not found, probably empty package"
- fi
- done
- exit 0
- else
- oenote "Spawn file not found!"
- fi
+python do_prepackaged_stage () {
+ import os
+
+ if bb.data.getVar("PSTAGING_DISABLED", d, 1) == "1":
+ bb.build.make_stamp("do_prepackaged_stage", d)
+ return
+
+ bb.note("Uninstalling any existing package from staging...")
+ path = bb.data.getVar("PATH", d, 1)
+ removecmd = bb.data.getVar("PSTAGE_REMOVE_CMD", d, 1)
+ removepkg = bb.data.expand("staging-${PN}", d)
+ lf = bb.utils.lockfile(bb.data.expand("${STAGING_DIR}/staging.lock", d))
+ ret = os.system("PATH=\"%s\" %s %s" % (path, removecmd, removepkg))
+ bb.utils.unlockfile(lf)
+ if ret != 0:
+ bb.note("Failure attempting to remove staging package")
+
+ stagepkg = bb.data.expand("${DEPLOY_DIR_PSTAGE}/${PSTAGE_PKGNAME}", d)
+
+ if os.path.exists(stagepkg):
+ bb.note("Following speedup\n")
+ path = bb.data.getVar("PATH", d, 1)
+ installcmd = bb.data.getVar("PSTAGE_INSTALL_CMD", d, 1)
+
+ bb.build.exec_func("staging_helper", d)
+
+ bb.debug(1, "Staging stuff already packaged, using that instead")
+ lf = bb.utils.lockfile(bb.data.expand("${STAGING_DIR}/staging.lock", d))
+ ret = os.system("PATH=\"%s\" %s %s" % (path, installcmd, stagepkg))
+ bb.utils.unlockfile(lf)
+ if ret != 0:
+ bb.note("Failure installing prestage package")
+
+ bb.build.make_stamp("do_prepackaged_stage", d)
+ bb.build.make_stamp("do_fetch", d)
+ bb.build.make_stamp("do_unpack", d)
+ bb.build.make_stamp("do_munge", d)
+ bb.build.make_stamp("do_patch", d)
+ bb.build.make_stamp("do_configure", d)
+ bb.build.make_stamp("do_qa_configure", d)
+ bb.build.make_stamp("do_rig_locales", d)
+ bb.build.make_stamp("do_compile", d)
+ bb.build.make_stamp("do_install", d)
+ bb.build.make_stamp("do_deploy", d)
+ bb.build.make_stamp("do_package", d)
+ bb.build.make_stamp("do_populate_staging", d)
+ bb.build.make_stamp("do_package_write_deb", d)
+ bb.build.make_stamp("do_package_write_ipk", d)
+ bb.build.make_stamp("do_package_write", d)
+ bb.build.make_stamp("do_package_stage", d)
+ bb.build.make_stamp("do_qa_staging", d)
+
+ else:
+ bb.build.make_stamp("do_prepackaged_stage", d)
+}
+do_prepackaged_stage[cleandirs] = "${PSTAGE_TMPDIR_STAGE}"
+do_prepackaged_stage[selfstamp] = "1"
+addtask prepackaged_stage before do_fetch
- if [ -e ${DEPLOY_DIR_PSTAGE}/${PCROSS_PKGNAME} ]; then
- oenote "Cross stuff already packaged, using that instead"
- ${PSTAGE_INSTALL_CMD} ${CROSS_DIR} ${DEPLOY_DIR_PSTAGE}/${PCROSS_PKGNAME}
- fi
+populate_staging_preamble () {
+ if [ "$PSTAGING_DISABLED" != "1" ]; then
+ #mkdir -p ${DEPLOY_DIR_PSTAGE}
- if [ -e ${DEPLOY_DIR_PSTAGE}/${PSTAGE_PKGNAME} ]; then
- oenote "Staging stuff already packaged, using that instead"
- ${PSTAGE_INSTALL_CMD} ${STAGING_DIR} ${DEPLOY_DIR_PSTAGE}/${PSTAGE_PKGNAME}
- exit 0
+ stage-manager -p ${STAGING_DIR} -c ${DEPLOY_DIR_PSTAGE}/stamp-cache-staging -u
+ stage-manager -p ${CROSS_DIR} -c ${DEPLOY_DIR_PSTAGE}/stamp-cache-cross -u
fi
-
-
- mkdir -p ${STAGING_BINDIR}
- mkdir -p ${STAGING_LIBDIR}
- mkdir -p ${STAGING_INCDIR}
- mkdir -p ${STAGING_DATADIR}/aclocal
}
-do_stage_append() {
+populate_staging_postamble () {
+ if [ "$PSTAGING_DISABLED" != "1" ]; then
+ # list the packages currently installed in staging
+ ${PSTAGE_LIST_CMD} | awk '{print $1}' > ${DEPLOY_DIR_PSTAGE}/installed-list
- mkdir -p ${DEPLOY_DIR_PSTAGE}
+ set +e
+ stage-manager -p ${STAGING_DIR} -c ${DEPLOY_DIR_PSTAGE}/stamp-cache-staging -u -d ${PSTAGE_TMPDIR_STAGE}/staging
+ stage-manager -p ${CROSS_DIR} -c ${DEPLOY_DIR_PSTAGE}/stamp-cache-cross -u -d ${PSTAGE_TMPDIR_STAGE}/cross
+ set -e
+ fi
+}
- # list the packages currently installed in staging
- ${PSTAGE_LIST_CMD} ${STAGING_DIR} | awk '{print $1}' > ${DEPLOY_DIR_PSTAGE}/installed-staging_list
- ${PSTAGE_LIST_CMD} ${CROSS_DIR} | awk '{print $1}' > ${DEPLOY_DIR_PSTAGE}/installed-cross_list
+do_populate_staging[lockfiles] = "${STAGING_DIR}/staging.lock"
+do_populate_staging[dirs] =+ "${DEPLOY_DIR_PSTAGE}"
+python do_populate_staging_prepend() {
+ bb.build.exec_func("populate_staging_preamble", d)
+}
- set +e
- rm -rf ${PSTAGE_TMPDIR_STAGE}
- stage-manager -p ${STAGING_DIR} -c ${DEPLOY_DIR_PSTAGE}/staging-stamp-cache -u -d ${PSTAGE_TMPDIR_STAGE}
- rc=$?
- set -e
+python do_populate_staging_append() {
+ bb.build.exec_func("populate_staging_postamble", d)
+}
- if [ $rc == 5 ]; then
- #make a package for staging
- mkdir -p ${PSTAGE_TMPDIR_STAGE}/CONTROL
+staging_packager () {
- echo "Package: staging-${PN}" > ${PSTAGE_TMPDIR_STAGE}/CONTROL/control
- echo "Version: ${PV}-${PR}" >> ${PSTAGE_TMPDIR_STAGE}/CONTROL/control
- echo "Description: ${DESCRIPTION}" >> ${PSTAGE_TMPDIR_STAGE}/CONTROL/control
- echo "Section: ${SECTION}" >> ${PSTAGE_TMPDIR_STAGE}/CONTROL/control
- echo "Priority: Optional" >> ${PSTAGE_TMPDIR_STAGE}/CONTROL/control
- echo "Maintainer: ${MAINTAINER}" >> ${PSTAGE_TMPDIR_STAGE}/CONTROL/control
- echo "Architecture: ${PACKAGE_ARCH}" >> ${PSTAGE_TMPDIR_STAGE}/CONTROL/control
- echo "Source: ${SRC_URI}" >> ${PSTAGE_TMPDIR_STAGE}/CONTROL/control
+ mkdir -p ${PSTAGE_TMPDIR_STAGE}/CONTROL
- ${PSTAGE_BUILD_CMD} ${PSTAGE_TMPDIR_STAGE} ${DEPLOY_DIR_PSTAGE}
+ echo "Package: staging-${PN}" > ${PSTAGE_TMPDIR_STAGE}/CONTROL/control
+ echo "Version: ${PV}-${PR}" >> ${PSTAGE_TMPDIR_STAGE}/CONTROL/control
+ echo "Description: ${DESCRIPTION}" >> ${PSTAGE_TMPDIR_STAGE}/CONTROL/control
+ echo "Section: ${SECTION}" >> ${PSTAGE_TMPDIR_STAGE}/CONTROL/control
+ echo "Priority: Optional" >> ${PSTAGE_TMPDIR_STAGE}/CONTROL/control
+ echo "Maintainer: ${MAINTAINER}" >> ${PSTAGE_TMPDIR_STAGE}/CONTROL/control
+ echo "Architecture: ${MULTIMACH_ARCH}" >> ${PSTAGE_TMPDIR_STAGE}/CONTROL/control
+ echo "Source: ${SRC_URI}" >> ${PSTAGE_TMPDIR_STAGE}/CONTROL/control
- ${PSTAGE_INSTALL_CMD} ${STAGING_DIR} ${DEPLOY_DIR_PSTAGE}/${PSTAGE_PKGNAME}
- fi
+ ${PSTAGE_BUILD_CMD} ${PSTAGE_TMPDIR_STAGE} ${DEPLOY_DIR_PSTAGE}
+ ${PSTAGE_INSTALL_CMD} ${DEPLOY_DIR_PSTAGE}/${PSTAGE_PKGNAME}
+}
- set +e
- rm -rf ${PSTAGE_TMPDIR_CROSS}
- stage-manager -p ${CROSS_DIR} -c ${DEPLOY_DIR_PSTAGE}/cross-stamp-cache -u -d ${PSTAGE_TMPDIR_CROSS}
- rc=$?
- set -e
-
- if [ $rc == 5 ]; then
-
- #make a package for cross
- mkdir -p ${PSTAGE_TMPDIR_CROSS}/CONTROL
-
- echo "Package: cross-${PN}" > ${PSTAGE_TMPDIR_CROSS}/CONTROL/control
- echo "Version: ${PV}-${PR}" >> ${PSTAGE_TMPDIR_CROSS}/CONTROL/control
- echo "Description: ${DESCRIPTION}" >> ${PSTAGE_TMPDIR_CROSS}/CONTROL/control
- echo "Section: ${SECTION}" >> ${PSTAGE_TMPDIR_CROSS}/CONTROL/control
- echo "Priority: Optional" >> ${PSTAGE_TMPDIR_CROSS}/CONTROL/control
- echo "Maintainer: ${MAINTAINER}" >> ${PSTAGE_TMPDIR_CROSS}/CONTROL/control
- echo "Architecture: ${BUILD_ARCH}" >> ${PSTAGE_TMPDIR_CROSS}/CONTROL/control
- echo "Source: ${SRC_URI}" >> ${PSTAGE_TMPDIR_CROSS}/CONTROL/control
-
- ${PSTAGE_BUILD_CMD} ${PSTAGE_TMPDIR_CROSS} ${DEPLOY_DIR_PSTAGE}
-
- ${PSTAGE_INSTALL_CMD} ${CROSS_DIR} ${DEPLOY_DIR_PSTAGE}/${PCROSS_PKGNAME}
- fi
+python do_package_stage () {
+ if bb.data.getVar("PSTAGING_DISABLED", d, 1) == "1":
+ return
+
+ bb.build.exec_func("read_subpackage_metadata", d)
+ packages = (bb.data.getVar('PACKAGES', d, 1) or "").split()
+ if len(packages) > 0:
+ stagepath = bb.data.getVar("PSTAGE_TMPDIR_STAGE", d, 1)
+ if bb.data.inherits_class('package_ipk', d):
+ ipkpath = os.path.join(stagepath, "deploy", "ipk")
+ bb.mkdirhier(ipkpath)
+ if bb.data.inherits_class('package_deb', d):
+ debpath = os.path.join(stagepath, "deploy", "deb")
+ bb.mkdirhier(debpath)
+
+ for pkg in packages:
+ pkgname = bb.data.getVar('PKG_%s' % pkg, d, 1)
+ if not pkgname:
+ pkgname = pkg
+ arch = bb.data.getVar('PACKAGE_ARCH_%s' % pkg, d, 1)
+ if not arch:
+ arch = bb.data.getVar('PACKAGE_ARCH', d, 1)
+ if not packaged(pkg, d):
+ continue
+ if bb.data.inherits_class('package_ipk', d):
+ srcname = bb.data.expand(pkgname + "_${PV}-${PR}_" + arch + ".ipk", d)
+ srcfile = bb.data.expand("${DEPLOY_DIR_IPK}/" + arch + "/" + srcname, d)
+ if not os.path.exists(srcfile):
+ bb.fatal("Package %s does not exist yet it should" % srcfile)
+ bb.copyfile(srcfile, ipkpath + "/" + srcname)
+ if bb.data.inherits_class('package_deb', d):
+ if arch == 'all':
+ srcname = bb.data.expand(pkgname + "_${PV}-${PR}_all.deb", d)
+ else:
+ srcname = bb.data.expand(pkgname + "_${PV}-${PR}_${DPKG_ARCH}.deb", d)
+ srcfile = bb.data.expand("${DEPLOY_DIR_DEB}/" + arch + "/" + srcname, d)
+ if not os.path.exists(srcfile):
+ bb.fatal("Package %s does not exist yet it should" % srcfile)
+ bb.copyfile(srcfile, debpath + "/" + srcname)
+ bb.build.exec_func("staging_helper", d)
+ lf = bb.utils.lockfile(bb.data.expand("${STAGING_DIR}/staging.lock", d))
+ bb.build.exec_func("staging_packager", d)
+ bb.utils.unlockfile(lf)
}
+addtask package_stage after do_package_write_ipk do_package_write_deb do_package_write do_populate_staging before do_build
+
diff --git a/classes/packaged-staging2.bbclass b/classes/packaged-staging2.bbclass
deleted file mode 100644
index 29ce72d65c..0000000000
--- a/classes/packaged-staging2.bbclass
+++ /dev/null
@@ -1,229 +0,0 @@
-#
-# Populate builds using prebuilt packages where possible to speed up builds
-# and allow staging to be reconstructed.
-#
-# To use it add that line to conf/local.conf:
-#
-# INHERIT = "packaged-staging"
-
-python () {
- import bb
- if not bb.data.inherits_class('native', d) and not bb.data.inherits_class('image', d) and not bb.data.inherits_class('cross', d) and not bb.data.inherits_class('sdk', d):
- deps = bb.data.getVarFlag('do_populate_staging', 'depends', d) or ""
- deps += " stagemanager-native:do_populate_staging"
- bb.data.setVarFlag('do_populate_staging', 'depends', deps, d)
-
- deps = bb.data.getVarFlag('do_prepackaged_stage', 'depends', d) or ""
- deps += " ipkg-native:do_populate_staging ipkg-utils-native:do_populate_staging"
- bb.data.setVarFlag('do_prepackaged_stage', 'depends', deps, d)
- else:
- bb.data.setVar("PSTAGING_DISABLED", "1", d)
-}
-
-export PSTAGING_DISABLED = "0"
-
-DEPLOY_DIR_PSTAGE = "${DEPLOY_DIR}/pstage"
-
-PSTAGE_BUILD_CMD = "${IPKGBUILDCMD}"
-PSTAGE_INSTALL_CMD = "ipkg-cl install -force-depends -f ${DEPLOY_DIR_PSTAGE}/ipkg-${MACHINE}.conf -o ${TMPDIR}"
-PSTAGE_UPDATE_CMD = "ipkg-cl update -f ${DEPLOY_DIR_PSTAGE}/ipkg-${MACHINE}.conf -o ${TMPDIR}"
-PSTAGE_REMOVE_CMD = "ipkg-cl remove -force-depends -f ${DEPLOY_DIR_PSTAGE}/ipkg-${MACHINE}.conf -o ${TMPDIR}"
-PSTAGE_LIST_CMD = "ipkg-cl list_installed -f ${DEPLOY_DIR_PSTAGE}/ipkg-${MACHINE}.conf -o ${TMPDIR}"
-PSTAGE_PKGNAME = "staging-${PN}_${PV}-${PR}_${MULTIMACH_ARCH}.ipk"
-
-PSTAGE_TMPDIR_STAGE = "${WORKDIR}/staging-pkg"
-
-do_clean_append() {
- """
- Clear the build and temp directories
- """
- bb.note("Uninstalling package from staging...")
- path = bb.data.getVar("PATH", d, 1)
- removecmd = bb.data.getVar("PSTAGE_REMOVE_CMD", d, 1)
- removepkg = bb.data.expand("staging-${PN}", d)
- ret = os.system("PATH=\"%s\" %s %s" % (path, removecmd, removepkg))
- if ret != 0:
- bb.note("Failure removing staging package")
-
- stagepkg = bb.data.expand("${DEPLOY_DIR_PSTAGE}/${PSTAGE_PKGNAME}", d)
- bb.note("Removing staging package %s" % stagepkg)
- #os.system('rm -rf ' + stagepkg)
-}
-
-staging_helper () {
- #assemble appropriate ipkg.conf
- conffile=${DEPLOY_DIR_PSTAGE}/ipkg-${MACHINE}.conf
- mkdir -p ${DEPLOY_DIR_PSTAGE}/pstaging_lists
- if [ ! -e $conffile ]; then
- ipkgarchs="${BUILD_ARCH} all any noarch ${TARGET_ARCH} ${PACKAGE_ARCHS} ${PACKAGE_EXTRA_ARCHS} ${MACHINE}"
- priority=1
- for arch in $ipkgarchs; do
- echo "arch $arch $priority" >> $conffile
- priority=$(expr $priority + 5)
- done
- echo "src oe-staging file:${DEPLOY_DIR_PSTAGE}" >> $conffile
-
- OLD_PWD=`pwd`
- cd ${DEPLOY_DIR_PSTAGE}
- ipkg-make-index -p Packages .
- cd ${OLD_PWD}
-
- ${PSTAGE_UPDATE_CMD}
- fi
-}
-
-python do_prepackaged_stage () {
- import os
-
- if bb.data.getVar("PSTAGING_DISABLED", d, 1) == "1":
- bb.build.make_stamp("do_prepackaged_stage", d)
- return
-
- bb.note("Uninstalling any existing package from staging...")
- path = bb.data.getVar("PATH", d, 1)
- removecmd = bb.data.getVar("PSTAGE_REMOVE_CMD", d, 1)
- removepkg = bb.data.expand("staging-${PN}", d)
- lf = bb.utils.lockfile(bb.data.expand("${STAGING_DIR}/staging.lock", d))
- ret = os.system("PATH=\"%s\" %s %s" % (path, removecmd, removepkg))
- bb.utils.unlockfile(lf)
- if ret != 0:
- bb.note("Failure attempting to remove staging package")
-
- stagepkg = bb.data.expand("${DEPLOY_DIR_PSTAGE}/${PSTAGE_PKGNAME}", d)
-
- if os.path.exists(stagepkg):
- bb.note("Following speedup\n")
- path = bb.data.getVar("PATH", d, 1)
- installcmd = bb.data.getVar("PSTAGE_INSTALL_CMD", d, 1)
-
- bb.build.exec_func("staging_helper", d)
-
- bb.debug(1, "Staging stuff already packaged, using that instead")
- lf = bb.utils.lockfile(bb.data.expand("${STAGING_DIR}/staging.lock", d))
- ret = os.system("PATH=\"%s\" %s %s" % (path, installcmd, stagepkg))
- bb.utils.unlockfile(lf)
- if ret != 0:
- bb.note("Failure installing prestage package")
-
- bb.build.make_stamp("do_prepackaged_stage", d)
- bb.build.make_stamp("do_fetch", d)
- bb.build.make_stamp("do_unpack", d)
- bb.build.make_stamp("do_munge", d)
- bb.build.make_stamp("do_patch", d)
- bb.build.make_stamp("do_configure", d)
- bb.build.make_stamp("do_qa_configure", d)
- bb.build.make_stamp("do_rig_locales", d)
- bb.build.make_stamp("do_compile", d)
- bb.build.make_stamp("do_install", d)
- bb.build.make_stamp("do_deploy", d)
- bb.build.make_stamp("do_package", d)
- bb.build.make_stamp("do_populate_staging", d)
- bb.build.make_stamp("do_package_write_deb", d)
- bb.build.make_stamp("do_package_write_ipk", d)
- bb.build.make_stamp("do_package_write", d)
- bb.build.make_stamp("do_package_stage", d)
- bb.build.make_stamp("do_qa_staging", d)
-
- else:
- bb.build.make_stamp("do_prepackaged_stage", d)
-}
-do_prepackaged_stage[cleandirs] = "${PSTAGE_TMPDIR_STAGE}"
-do_prepackaged_stage[selfstamp] = "1"
-addtask prepackaged_stage before do_fetch
-
-populate_staging_preamble () {
- if [ "$PSTAGING_DISABLED" != "1" ]; then
- #mkdir -p ${DEPLOY_DIR_PSTAGE}
-
- stage-manager -p ${STAGING_DIR} -c ${DEPLOY_DIR_PSTAGE}/stamp-cache-staging -u
- stage-manager -p ${CROSS_DIR} -c ${DEPLOY_DIR_PSTAGE}/stamp-cache-cross -u
- fi
-}
-
-populate_staging_postamble () {
- if [ "$PSTAGING_DISABLED" != "1" ]; then
- # list the packages currently installed in staging
- ${PSTAGE_LIST_CMD} | awk '{print $1}' > ${DEPLOY_DIR_PSTAGE}/installed-list
-
- set +e
- stage-manager -p ${STAGING_DIR} -c ${DEPLOY_DIR_PSTAGE}/stamp-cache-staging -u -d ${PSTAGE_TMPDIR_STAGE}/staging
- stage-manager -p ${CROSS_DIR} -c ${DEPLOY_DIR_PSTAGE}/stamp-cache-cross -u -d ${PSTAGE_TMPDIR_STAGE}/cross
- set -e
- fi
-}
-
-do_populate_staging[lockfiles] = "${STAGING_DIR}/staging.lock"
-do_populate_staging[dirs] =+ "${DEPLOY_DIR_PSTAGE}"
-python do_populate_staging_prepend() {
- bb.build.exec_func("populate_staging_preamble", d)
-}
-
-python do_populate_staging_append() {
- bb.build.exec_func("populate_staging_postamble", d)
-}
-
-
-staging_packager () {
-
- mkdir -p ${PSTAGE_TMPDIR_STAGE}/CONTROL
-
- echo "Package: staging-${PN}" > ${PSTAGE_TMPDIR_STAGE}/CONTROL/control
- echo "Version: ${PV}-${PR}" >> ${PSTAGE_TMPDIR_STAGE}/CONTROL/control
- echo "Description: ${DESCRIPTION}" >> ${PSTAGE_TMPDIR_STAGE}/CONTROL/control
- echo "Section: ${SECTION}" >> ${PSTAGE_TMPDIR_STAGE}/CONTROL/control
- echo "Priority: Optional" >> ${PSTAGE_TMPDIR_STAGE}/CONTROL/control
- echo "Maintainer: ${MAINTAINER}" >> ${PSTAGE_TMPDIR_STAGE}/CONTROL/control
- echo "Architecture: ${MULTIMACH_ARCH}" >> ${PSTAGE_TMPDIR_STAGE}/CONTROL/control
- echo "Source: ${SRC_URI}" >> ${PSTAGE_TMPDIR_STAGE}/CONTROL/control
-
- ${PSTAGE_BUILD_CMD} ${PSTAGE_TMPDIR_STAGE} ${DEPLOY_DIR_PSTAGE}
- ${PSTAGE_INSTALL_CMD} ${DEPLOY_DIR_PSTAGE}/${PSTAGE_PKGNAME}
-}
-
-python do_package_stage () {
- if bb.data.getVar("PSTAGING_DISABLED", d, 1) == "1":
- return
-
- bb.build.exec_func("read_subpackage_metadata", d)
- packages = (bb.data.getVar('PACKAGES', d, 1) or "").split()
- if len(packages) > 0:
- stagepath = bb.data.getVar("PSTAGE_TMPDIR_STAGE", d, 1)
- if bb.data.inherits_class('package_ipk', d):
- ipkpath = os.path.join(stagepath, "deploy", "ipk")
- bb.mkdirhier(ipkpath)
- if bb.data.inherits_class('package_deb', d):
- debpath = os.path.join(stagepath, "deploy", "deb")
- bb.mkdirhier(debpath)
-
- for pkg in packages:
- pkgname = bb.data.getVar('PKG_%s' % pkg, d, 1)
- if not pkgname:
- pkgname = pkg
- arch = bb.data.getVar('PACKAGE_ARCH_%s' % pkg, d, 1)
- if not arch:
- arch = bb.data.getVar('PACKAGE_ARCH', d, 1)
- if not packaged(pkg, d):
- continue
- if bb.data.inherits_class('package_ipk', d):
- srcname = bb.data.expand(pkgname + "_${PV}-${PR}_" + arch + ".ipk", d)
- srcfile = bb.data.expand("${DEPLOY_DIR_IPK}/" + arch + "/" + srcname, d)
- if not os.path.exists(srcfile):
- bb.fatal("Package %s does not exist yet it should" % srcfile)
- bb.copyfile(srcfile, ipkpath + "/" + srcname)
- if bb.data.inherits_class('package_deb', d):
- if arch == 'all':
- srcname = bb.data.expand(pkgname + "_${PV}-${PR}_all.deb", d)
- else:
- srcname = bb.data.expand(pkgname + "_${PV}-${PR}_${DPKG_ARCH}.deb", d)
- srcfile = bb.data.expand("${DEPLOY_DIR_DEB}/" + arch + "/" + srcname, d)
- if not os.path.exists(srcfile):
- bb.fatal("Package %s does not exist yet it should" % srcfile)
- bb.copyfile(srcfile, debpath + "/" + srcname)
- bb.build.exec_func("staging_helper", d)
- lf = bb.utils.lockfile(bb.data.expand("${STAGING_DIR}/staging.lock", d))
- bb.build.exec_func("staging_packager", d)
- bb.utils.unlockfile(lf)
-}
-
-addtask package_stage after do_package_write_ipk do_package_write_deb do_package_write do_populate_staging before do_build
-
diff --git a/classes/patch.bbclass b/classes/patch.bbclass
index 0cc202820f..6f83d9c88b 100644
--- a/classes/patch.bbclass
+++ b/classes/patch.bbclass
@@ -526,7 +526,7 @@ python patch_do_patch() {
bb.note("Patch '%s' applies to earlier revisions" % pname)
continue
- bb.note("Applying patch '%s'" % pname)
+ bb.note("Applying patch '%s' (%s)" % (pname, unpacked))
try:
patchset.Import({"file":unpacked, "remote":url, "strippath": pnum}, True)
except:
diff --git a/classes/qpf.bbclass b/classes/qpf.bbclass
deleted file mode 100644
index 67761bd4be..0000000000
--- a/classes/qpf.bbclass
+++ /dev/null
@@ -1,32 +0,0 @@
-PACKAGE_ARCH = "all"
-
-do_configure() {
- :
-}
-
-do_compile() {
- :
-}
-
-pkg_postinst_fonts() {
-. /etc/profile
-${sbindir}/update-qtfontdir
-}
-
-pkg_postrm_fonts() {
-. /etc/profile
-${sbindir}/update-qtfontdir -f
-}
-
-python populate_packages_prepend() {
- postinst = bb.data.getVar('pkg_postinst_fonts', d, 1)
- postrm = bb.data.getVar('pkg_postrm_fonts', d, 1)
- fontdir = bb.data.getVar('palmtopdir', d, 1) + '/lib/fonts'
- pkgregex = "^([a-z-]*_[0-9]*).*.qpf$"
- pkgpattern = bb.data.getVar('QPF_PKGPATTERN', d, 1) or 'qpf-%s'
- pkgdescription = bb.data.getVar('QPF_DESCRIPTION', d, 1) or 'QPF font %s'
-
- do_split_packages(d, root=fontdir, file_regex=pkgregex, output_pattern=pkgpattern,
- description=pkgdescription, postinst=postinst, postrm=postrm, recursive=True, hook=None,
- extra_depends='qpf-font-common')
-}
diff --git a/classes/rm_work.bbclass b/classes/rm_work.bbclass
index 4efd39c557..97de5e2774 100644
--- a/classes/rm_work.bbclass
+++ b/classes/rm_work.bbclass
@@ -25,3 +25,23 @@ do_rm_work_all () {
}
do_rm_work_all[recrdeptask] = "do_rm_work"
addtask rm_work_all after do_rm_work
+
+
+addhandler rmwork_stampfixing_eventhandler
+python rmwork_stampfixing_eventhandler() {
+ from bb.event import getName
+ import os
+
+ if getName(e) == "StampUpdate":
+ for (fn, task) in e.targets:
+ if task == 'rm_work_all':
+ continue
+ stamp = "%s.do_rm_work" % e.stampPrefix[fn]
+ if os.path.exists(stamp):
+ dir = "%s.*" % e.stampPrefix[fn]
+ bb.note("Removing stamps: " + dir)
+ os.system('rm -f '+ dir)
+
+ return NotHandled
+}
+
diff --git a/classes/sanity.bbclass b/classes/sanity.bbclass
index 9994febf0d..44c74a0354 100644
--- a/classes/sanity.bbclass
+++ b/classes/sanity.bbclass
@@ -31,7 +31,7 @@ def check_sanity(e):
from distutils.version import LooseVersion
except ImportError:
def LooseVersion(v): print "WARNING: sanity.bbclass can't compare versions without python-distutils"; return 1
- import os
+ import os, commands
# Check the bitbake version meets minimum requirements
minversion = data.getVar('BB_MIN_VERSION', e.data , True)
@@ -64,8 +64,8 @@ def check_sanity(e):
# Check that the MACHINE is valid, if it is set
if data.getVar('MACHINE', e.data, True):
- if not check_conf_exists("conf/machine/${MACHINE}.conf", e.data):
- messages = messages + 'Please set a valid MACHINE in your local.conf\n'
+ if not check_conf_exists("conf/machine/${MACHINE}.conf", e.data):
+ messages = messages + 'Please set a valid MACHINE in your local.conf\n'
# Check that the DISTRO is valid
# need to take into account DISTRO renaming DISTRO
@@ -85,6 +85,19 @@ def check_sanity(e):
required_utilities = "patch help2man diffstat texi2html makeinfo cvs svn bzip2 tar gzip gawk md5sum"
+ if data.getVar('TARGET_ARCH', e.data, True) == "arm":
+ # qemu-native needs gcc 3.x
+ if "qemu-native" not in assume_provided and "gcc3-native" in assume_provided:
+ gcc_version = commands.getoutput("${BUILD_PREFIX}gcc --version | head -n 1 | cut -f 3 -d ' '")
+
+ if not check_gcc3(e.data) and gcc_version[0] != '3':
+ messages = messages + "gcc3-native was in ASSUME_PROVIDED but the gcc-3.x binary can't be found in PATH"
+ missing = missing + "gcc-3.x (needed for qemu-native),"
+
+ if "qemu-native" in assume_provided:
+ if not check_app_exists("qemu-arm", e.data):
+ messages = messages + "qemu-native was in ASSUME_PROVIDED but the QEMU binaries (qemu-arm) can't be found in PATH"
+
for util in required_utilities.split():
if not check_app_exists( util, e.data ):
missing = missing + "%s," % util
@@ -100,7 +113,40 @@ def check_sanity(e):
oes_bb_conf = data.getVar( 'OES_BITBAKE_CONF', e.data, True )
if not oes_bb_conf:
- messages = messages + 'You do not include OpenEmbeddeds version of conf/bitbake.conf\n'
+ messages = messages + 'You do not include OpenEmbeddeds version of conf/bitbake.conf. This means your environment is misconfigured, in particular check BBPATH.\n'
+
+ #
+ # Check that TMPDIR hasn't changed location since the last time we were run
+ #
+ tmpdir = data.getVar('TMPDIR', e.data, True)
+ checkfile = os.path.join(tmpdir, "saved_tmpdir")
+ if os.path.exists(checkfile):
+ f = file(checkfile, "r")
+ if (f.read().strip() != tmpdir):
+ messages = messages + "Error, TMPDIR has changed location. You need to either move it back to %s or rebuild\n" % tmpdir
+ else:
+ f = file(checkfile, "w")
+ f.write(tmpdir)
+ f.close()
+
+ #
+ # Check the 'ABI' of TMPDIR
+ #
+ current_abi = data.getVar('SANITY_ABI', e.data, True)
+ abifile = data.getVar('SANITY_ABIFILE', e.data, True)
+ if os.path.exists(abifile):
+ f = file(abifile, "r")
+ abi = f.read().strip()
+ if not abi.isdigit():
+ f = file(abifile, "w")
+ f.write(current_abi)
+ elif (abi != current_abi):
+ # Code to convert from one ABI to another could go here if possible.
+ messages = messages + "Error, TMPDIR has changed ABI (%s to %s) and you need to either rebuild, revert or adjust it at your own risk.\n" % (abi, current_abi)
+ else:
+ f = file(abifile, "w")
+ f.write(current_abi)
+ f.close()
if messages != "":
raise_sanity_error(messages)
@@ -110,17 +156,7 @@ python check_sanity_eventhandler() {
from bb import note, error, data, __version__
from bb.event import getName
- try:
- from distutils.version import LooseVersion
- except ImportError:
- def LooseVersion(v): print "WARNING: sanity.bbclass can't compare versions without python-distutils"; return 1
-
- if (LooseVersion(bb.__version__) > LooseVersion("1.8.6")):
- if getName(e) == "ConfigParsed":
- check_sanity(e)
- return NotHandled
-
- if getName(e) == "BuildStarted":
+ if getName(e) == "ConfigParsed":
check_sanity(e)
return NotHandled
diff --git a/classes/sdl.bbclass b/classes/sdl.bbclass
index 23cbf10919..dc4cd8ffa2 100644
--- a/classes/sdl.bbclass
+++ b/classes/sdl.bbclass
@@ -8,6 +8,8 @@ APPDESKTOP ?= "${WORKDIR}/${PN}.desktop"
APPNAME ?= "${PN}"
APPIMAGE ?= "${WORKDIR}/${PN}.png"
+export SDL_CONFIG = "${STAGING_BINDIR_CROSS}/sdl-config"
+
sdl_do_sdl_install() {
install -d ${D}${datadir}/applications
install -d ${D}${datadir}/pixmaps
diff --git a/classes/seppuku.bbclass b/classes/seppuku.bbclass
index 834bee34b4..2e097a812e 100644
--- a/classes/seppuku.bbclass
+++ b/classes/seppuku.bbclass
@@ -303,7 +303,7 @@ python seppuku_eventhandler() {
if name == "PkgFailed":
if not bb.data.getVar('SEPPUKU_AUTOBUILD', data, True) == "0":
- build.exec_task('do_clean', data)
+ build.exec_func('do_clean', data)
elif name == "TaskFailed":
cj = cookielib.CookieJar()
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
diff --git a/classes/tinderclient.bbclass b/classes/tinderclient.bbclass
index bc004efb26..0b7fc1d843 100644
--- a/classes/tinderclient.bbclass
+++ b/classes/tinderclient.bbclass
@@ -328,7 +328,7 @@ def tinder_do_tinder_report(event):
log += "<--- TINDERBOX Package %s done (SUCCESS)\n" % data.getVar('PF', event.data, True)
elif name == "PkgFailed":
if not data.getVar('TINDER_AUTOBUILD', event.data, True) == "0":
- build.exec_task('do_clean', event.data)
+ build.exec_func('do_clean', event.data)
log += "<--- TINDERBOX Package %s failed (FAILURE)\n" % data.getVar('PF', event.data, True)
status = 200
# remember the failure for the -k case