summaryrefslogtreecommitdiff
path: root/classes
diff options
context:
space:
mode:
Diffstat (limited to 'classes')
-rw-r--r--classes/base.bbclass57
-rw-r--r--classes/distutils-base.bbclass6
-rw-r--r--classes/gtk-icon-cache.bbclass4
-rw-r--r--classes/insane.bbclass4
-rw-r--r--classes/nslu2-image.bbclass13
-rw-r--r--classes/openmoko2.bbclass2
-rw-r--r--classes/package.bbclass108
-rw-r--r--classes/package_deb.bbclass9
-rw-r--r--classes/package_ipk.bbclass2
-rw-r--r--classes/rm_work.bbclass18
-rw-r--r--classes/rootfs_deb.bbclass21
-rw-r--r--classes/sanity.bbclass10
-rw-r--r--classes/seppuku.bbclass37
-rw-r--r--classes/sip.bbclass14
-rw-r--r--classes/sip3.bbclass (renamed from classes/sip4.bbclass)16
-rw-r--r--classes/task.bbclass27
-rw-r--r--classes/tinderclient.bbclass7
-rw-r--r--classes/update-rc.d.bbclass10
18 files changed, 230 insertions, 135 deletions
diff --git a/classes/base.bbclass b/classes/base.bbclass
index 45a0282265..9998982bd1 100644
--- a/classes/base.bbclass
+++ b/classes/base.bbclass
@@ -82,6 +82,9 @@ def base_dep_prepend(d):
if bb.data.getVar('PN', d, True) == "shasum-native":
deps = ""
+ # INHIBIT_DEFAULT_DEPS doesn't apply to the patch command. Whether or not
+ # we need that built is the responsibility of the patch function / class, not
+ # the application.
if not bb.data.getVar('INHIBIT_DEFAULT_DEPS', d):
if (bb.data.getVar('HOST_SYS', d, 1) !=
bb.data.getVar('BUILD_SYS', d, 1)):
@@ -272,8 +275,12 @@ oe_libinstall() {
# If such file doesn't exist, try to cut version suffix
if [ ! -f "$lafile" ]; then
- libname=`echo "$libname" | sed 's/-[0-9.]*$//'`
- lafile=$libname.la
+ libname1=`echo "$libname" | sed 's/-[0-9.]*$//'`
+ lafile1=$libname.la
+ if [ -f "$lafile1" ]; then
+ libname=$libname1
+ lafile=$lafile1
+ fi
fi
if [ -f "$lafile" ]; then
@@ -367,18 +374,6 @@ oe_machinstall() {
fi
}
-addtask showdata
-do_showdata[nostamp] = "1"
-python do_showdata() {
- import sys
- # emit variables and shell functions
- bb.data.emit_env(sys.__stdout__, d, True)
- # emit the metadata which isnt valid shell
- for e in d.keys():
- if bb.data.getVarFlag(e, 'python', d):
- sys.__stdout__.write("\npython %s () {\n%s}\n" % (e, bb.data.getVar(e, d, 1)))
-}
-
addtask listtasks
do_listtasks[nostamp] = "1"
python do_listtasks() {
@@ -579,6 +574,8 @@ python base_do_unpack() {
except bb.MalformedUrl, e:
raise FuncFailed('Unable to generate local path for malformed uri: %s' % e)
# dont need any parameters for extraction, strip them off
+ # RP: Insane. localpath shouldn't have parameters
+ # RP: Scehdule for removal with bitbake 1.8.8
local = re.sub(';.*$', '', local)
local = os.path.realpath(local)
ret = oe_unpack_file(local, localdata, url)
@@ -807,6 +804,7 @@ def base_after_parse(d):
pn = bb.data.getVar('PN', d, 1)
+
# OBSOLETE in bitbake 1.7.4
srcdate = bb.data.getVar('SRCDATE_%s' % pn, d, 1)
if srcdate != None:
@@ -816,9 +814,15 @@ def base_after_parse(d):
if use_nls != None:
bb.data.setVar('USE_NLS', use_nls, d)
- # Make sure MACHINE *isn't* exported
+ # Make sure MACHINE isn't exported
+ # (breaks binutils at least)
bb.data.delVarFlag('MACHINE', 'export', d)
bb.data.setVarFlag('MACHINE', 'unexport', 1, d)
+
+ # Make sure DISTRO isn't exported
+ # (breaks sysvinit at least)
+ bb.data.delVarFlag('DISTRO', 'export', d)
+ bb.data.setVarFlag('DISTRO', 'unexport', 1, d)
# Git packages should DEPEND on git-native
srcuri = bb.data.getVar('SRC_URI', d, 1)
@@ -826,27 +830,38 @@ def base_after_parse(d):
depends = bb.data.getVarFlag('do_fetch', 'depends', d) or ""
depends = depends + " git-native:do_populate_staging"
bb.data.setVarFlag('do_fetch', 'depends', depends, d)
-
mach_arch = bb.data.getVar('MACHINE_ARCH', d, 1)
old_arch = bb.data.getVar('PACKAGE_ARCH', d, 1)
if (old_arch == mach_arch):
# Nothing to do
return
+
+ #
+ # We always try to scan SRC_URI for urls with machine overrides
+ # unless the package sets SRC_URI_OVERRIDES_PACKAGE_ARCH=0
+ #
override = bb.data.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH', d, 1)
-
- if not override or override == '0':
+ if override == '0':
return
paths = []
- for p in [ "${FILE_DIRNAME}/${PF}", "${FILE_DIRNAME}/${P}", "${FILE_DIRNAME}/${PN}", "${FILE_DIRNAME}/files", "${FILE_DIRNAME}" ]:
- paths.append(bb.data.expand(os.path.join(p, mach_arch), d))
+ for p in [ "${PF}", "${P}", "${PN}", "files", "" ]:
+ paths.append(bb.data.expand(os.path.join("${FILE_DIRNAME}", p, "${MACHINE}"), d))
+ path = bb.data.expand(os.path.join("${FILE_DIRNAME}", p, "${MACHINE}"), d)
+ if os.path.isdir(path):
+ paths.append(path)
+ if len(paths) == 0:
+ return
+
for s in bb.data.getVar('SRC_URI', d, 1).split():
+ if not s.startswith("file://"):
+ continue
local = bb.data.expand(bb.fetch.localpath(s, d), d)
for mp in paths:
if local.startswith(mp):
#bb.note("overriding PACKAGE_ARCH from %s to %s" % (old_arch, mach_arch))
- bb.data.setVar('PACKAGE_ARCH', mach_arch, d)
+ bb.data.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}", d)
return
#
diff --git a/classes/distutils-base.bbclass b/classes/distutils-base.bbclass
index c3f325768d..5150be76b9 100644
--- a/classes/distutils-base.bbclass
+++ b/classes/distutils-base.bbclass
@@ -5,14 +5,14 @@ RDEPENDS += "python-core"
def python_dir(d):
import os, bb
staging_incdir = bb.data.getVar( "STAGING_INCDIR", d, 1 )
- if os.path.exists( "%s/python2.3" % staging_incdir ): return "python2.3"
- if os.path.exists( "%s/python2.4" % staging_incdir ): return "python2.4"
if os.path.exists( "%s/python2.5" % staging_incdir ): return "python2.5"
+ if os.path.exists( "%s/python2.4" % staging_incdir ): return "python2.4"
+ if os.path.exists( "%s/python2.3" % staging_incdir ): return "python2.3"
raise "No Python in STAGING_INCDIR. Forgot to build python-native ?"
PYTHON_DIR = "${@python_dir(d)}"
FILES_${PN} = "${bindir}/* ${libdir}/* ${libdir}/${PYTHON_DIR}/*"
FILES_${PN}-dbg = "${libdir}/${PYTHON_DIR}/site-packages/.debug \
- ${libdir}/${PYTHON_DIR}/site-packages/./*/debug \
+ ${libdir}/${PYTHON_DIR}/site-packages/*/.debug \
${libdir}/${PYTHON_DIR}/site-packages/*/*/.debug"
diff --git a/classes/gtk-icon-cache.bbclass b/classes/gtk-icon-cache.bbclass
index 855a72a2f7..b86562890a 100644
--- a/classes/gtk-icon-cache.bbclass
+++ b/classes/gtk-icon-cache.bbclass
@@ -1,6 +1,8 @@
FILES_${PN} += "${datadir}/icons/hicolor"
-RDEPENDS += " hicolor-icon-theme "
+RDEPENDS += "hicolor-icon-theme"
+# This could run on the host as icon cache files are architecture independent,
+# but there is no gtk-update-icon-cache built natively.
gtk-icon-cache_postinst() {
if [ "x$D" != "x" ]; then
exit 1
diff --git a/classes/insane.bbclass b/classes/insane.bbclass
index 08c1058edf..d54d6c7b9e 100644
--- a/classes/insane.bbclass
+++ b/classes/insane.bbclass
@@ -61,7 +61,7 @@ def package_qa_get_machine_dict():
"avr32": (6317, 0, 0, False, True),
},
"uclinux-uclibc" : {
- "bfin": ( 0, 0, 0, True, True),
+ "bfin": ( 106, 0, 0, True, True),
},
"linux-gnueabi" : {
"arm" : (40, 0, 0, True, True),
@@ -233,7 +233,7 @@ def package_qa_check_rpath(file,name,d):
bb.error("QA Issue package %s contains bad RPATH %s in file %s" % (name, line, file))
#bb.note("Fixing RPATH for you in %s" % file)
#os.popen("%s -r /lib %s" % (chrpath,file))
- return False
+ #return False
return True
def package_qa_check_devdbg(path, name,d):
diff --git a/classes/nslu2-image.bbclass b/classes/nslu2-image.bbclass
index 14bf989055..edd23ae07f 100644
--- a/classes/nslu2-image.bbclass
+++ b/classes/nslu2-image.bbclass
@@ -3,18 +3,23 @@ nslu2_pack_image () {
install -m 0644 ${STAGING_LIBDIR}/nslu2-binaries/RedBoot \
${STAGING_LIBDIR}/nslu2-binaries/Trailer \
${STAGING_LIBDIR}/nslu2-binaries/SysConf \
+ ${STAGING_LOADER_DIR}/apex-nslu2.bin \
+ ${STAGING_LOADER_DIR}/apex-nslu2-16mb.bin \
${DEPLOY_DIR_IMAGE}/slug/
- install -m 0644 ${DEPLOY_DIR_IMAGE}/zImage-nslu2${SITEINFO_ENDIANESS} \
+ install -m 0644 ${DEPLOY_DIR_IMAGE}/zImage-ixp4xx${SITEINFO_ENDIANESS} \
${DEPLOY_DIR_IMAGE}/slug/vmlinuz
install -m 0644 ${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}.rootfs.jffs2 \
${DEPLOY_DIR_IMAGE}/slug/flashdisk.jffs2
install -m 0644 ${STAGING_FIRMWARE_DIR}/NPE-B ${DEPLOY_DIR_IMAGE}/slug/
cd ${DEPLOY_DIR_IMAGE}/slug
- slugimage -p -b RedBoot -s SysConf -k vmlinuz \
- -r Ramdisk:1,Flashdisk:flashdisk.jffs2 -m NPE-B -t Trailer \
+ slugimage -p -b RedBoot -s SysConf -k vmlinuz -L apex-nslu2.bin \
+ -r Flashdisk:flashdisk.jffs2 -m NPE-B -t Trailer \
-o ${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}-nslu2.bin
+ slugimage -F -p -b RedBoot -s SysConf -k vmlinuz -L apex-nslu2-16mb.bin \
+ -r Flashdisk:flashdisk.jffs2 -m NPE-B -t Trailer \
+ -o ${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}-nslu2-16mb.bin
rm -rf ${DEPLOY_DIR_IMAGE}/slug
}
-EXTRA_IMAGEDEPENDS_nslu2 += 'slugimage-native nslu2-linksys-firmware ixp4xx-npe upslug2-native'
+EXTRA_IMAGEDEPENDS_nslu2 += 'slugimage-native nslu2-linksys-firmware ixp4xx-npe upslug2-native apex-nslu2 apex-nslu2-16mb'
IMAGE_POSTPROCESS_COMMAND_nslu2 += "nslu2_pack_image; "
diff --git a/classes/openmoko2.bbclass b/classes/openmoko2.bbclass
index 17b3bbafa6..872dd4915c 100644
--- a/classes/openmoko2.bbclass
+++ b/classes/openmoko2.bbclass
@@ -18,6 +18,7 @@ def openmoko_two_get_subdir(d):
elif section == "panel-plugin": return "panel-plugins"
elif section == "inputmethods": return "inputmethods"
elif section == "daemons": return "daemons"
+ elif section == "misc": return "misc"
else: return section
LICENSE = "${@openmoko_two_get_license(d)}"
@@ -30,4 +31,3 @@ FILES_${PN} += "${datadir}/icons"
# SVNREV = "r${SRCREV}"
SVNREV = "${SRCDATE}"
-
diff --git a/classes/package.bbclass b/classes/package.bbclass
index e044395347..95e4acd4d6 100644
--- a/classes/package.bbclass
+++ b/classes/package.bbclass
@@ -131,6 +131,9 @@ python () {
for dep in (bb.data.getVar('PACKAGE_EXTRA_DEPENDS', d, True) or "").split():
deps += " %s:do_populate_staging" % dep
bb.data.setVarFlag('do_package_write', 'depends', deps, d)
+
+ # shlibs requires any DEPENDS to have already packaged for the *.list files
+ bb.data.setVarFlag('do_package', 'deptask', 'do_package', d)
}
# file(1) output to match to consider a file an unstripped executable
@@ -309,9 +312,15 @@ python package_do_split_locales() {
bb.data.setVar('PACKAGES', ' '.join(packages), d)
- rdep = (bb.data.getVar('RDEPENDS_%s' % mainpkg, d, 1) or bb.data.getVar('RDEPENDS', d, 1) or "").split()
- rdep.append('%s-locale*' % pn)
- bb.data.setVar('RDEPENDS_%s' % mainpkg, ' '.join(rdep), d)
+ # Disabled by RP 18/06/07
+ # Wildcards aren't supported in debian
+ # They break with ipkg since glibc-locale* will mean that
+ # glibc-localedata-translit* won't install as a dependency
+ # for some other package which breaks meta-toolchain
+ # Probably breaks since virtual-locale- isn't provided anywhere
+ #rdep = (bb.data.getVar('RDEPENDS_%s' % mainpkg, d, 1) or bb.data.getVar('RDEPENDS', d, 1) or "").split()
+ #rdep.append('%s-locale*' % pn)
+ #bb.data.setVar('RDEPENDS_%s' % mainpkg, ' '.join(rdep), d)
}
python populate_packages () {
@@ -360,7 +369,7 @@ python populate_packages () {
for pkg in packages.split():
if pkg in package_list:
bb.error("-------------------")
- bb.error("%s is listed in PACKAGES mutliple times, this leads to packaging errors." % pkg)
+ bb.error("%s is listed in PACKAGES multiple times, this leads to packaging errors." % pkg)
bb.error("Please fix the metadata/report this as bug to OE bugtracker.")
bb.error("-------------------")
else:
@@ -402,7 +411,6 @@ python populate_packages () {
bb.mkdirhier(root)
filesvar = bb.data.getVar('FILES', localdata, 1) or ""
files = filesvar.split()
- cleandirs = []
for file in files:
if os.path.isabs(file):
file = '.' + file
@@ -411,8 +419,6 @@ python populate_packages () {
newfiles = [ os.path.join(file,x) for x in os.listdir(file) ]
if newfiles:
files += newfiles
- if file != "./":
- cleandirs = [file] + cleandirs
continue
globbed = glob.glob(file)
if globbed:
@@ -424,16 +430,9 @@ python populate_packages () {
fpath = os.path.join(root,file)
dpath = os.path.dirname(fpath)
bb.mkdirhier(dpath)
-# if file in cleandirs:
-# cleandirs.remove(file)
ret = bb.movefile(file,fpath)
if ret is None or ret == 0:
raise bb.build.FuncFailed("File population failed")
-# for dir in cleandirs:
-# if os.path.isdir(dir):
-# os.rmdir(dir)
-# else:
-# bb.note("ERROR: directory %s went away unexpectedly during package population" % dir)
del localdata
os.chdir(workdir)
@@ -824,54 +823,81 @@ python read_shlibdeps () {
python package_depchains() {
"""
For a given set of prefix and postfix modifiers, make those packages
- RRECOMMENDS on the corresponding packages for its DEPENDS.
+ RRECOMMENDS on the corresponding packages for its RDEPENDS.
Example: If package A depends upon package B, and A's .bb emits an
A-dev package, this would make A-dev Recommends: B-dev.
+
+ If only one of a given suffix is specified, it will take the RRECOMMENDS
+ based on the RDEPENDS of *all* other packages. If more than one of a given
+ suffix is specified, its will only use the RDEPENDS of the single parent
+ package.
"""
packages = bb.data.getVar('PACKAGES', d, 1)
postfixes = (bb.data.getVar('DEPCHAIN_POST', d, 1) or '').split()
prefixes = (bb.data.getVar('DEPCHAIN_PRE', d, 1) or '').split()
- def pkg_addrrecs(pkg, base, func, d):
- rdepends = explode_deps(bb.data.getVar('RDEPENDS_' + base, d, 1) or bb.data.getVar('RDEPENDS', d, 1) or "")
- # bb.note('rdepends for %s is %s' % (base, rdepends))
+ def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d):
+ def packaged(pkg, d):
+ return os.access(bb.data.expand('${STAGING_DIR}/pkgdata/runtime/%s.packaged' % pkg, d), os.R_OK)
+
+ #bb.note('rdepends for %s is %s' % (base, rdepends))
+
rreclist = explode_deps(bb.data.getVar('RRECOMMENDS_' + pkg, d, 1) or bb.data.getVar('RRECOMMENDS', d, 1) or "")
for depend in rdepends:
- split_depend = depend.split(' (')
- name = split_depend[0].strip()
- func(rreclist, name)
+ pkgname = getname(depend, suffix)
+ if not pkgname in rreclist and packaged(pkgname, d):
+ rreclist.append(pkgname)
+ #bb.note('setting: RRECOMMENDS_%s=%s' % (pkg, ' '.join(rreclist)))
bb.data.setVar('RRECOMMENDS_%s' % pkg, ' '.join(rreclist), d)
- def packaged(pkg, d):
- return os.access(bb.data.expand('${STAGING_DIR}/pkgdata/runtime/%s.packaged' % pkg, d), os.R_OK)
+ def add_dep(list, dep):
+ dep = dep.split(' (')[0].strip()
+ if dep not in list:
+ list.append(dep)
+
+ rdepends = []
+ for dep in explode_deps(bb.data.getVar('RDEPENDS', d, 1) or ""):
+ add_dep(rdepends, dep)
for pkg in packages.split():
- for postfix in postfixes:
- def func(list, name):
- pkg = '%s%s' % (name, postfix)
- if not pkg in list:
- if packaged(pkg, d):
- list.append(pkg)
+ for dep in explode_deps(bb.data.getVar('RDEPENDS_' + pkg, d, 1) or ""):
+ add_dep(rdepends, dep)
+
+ #bb.note('rdepends is %s' % rdepends)
- base = pkg[:-len(postfix)]
+ def post_getname(name, suffix):
+ return '%s%s' % (name, suffix)
+ def pre_getname(name, suffix):
+ return '%s%s' % (suffix, name)
+
+ pkgs = {}
+ for pkg in packages.split():
+ for postfix in postfixes:
if pkg.endswith(postfix):
- pkg_addrrecs(pkg, base, func, d)
- continue
+ if not postfix in pkgs:
+ pkgs[postfix] = {}
+ pkgs[postfix][pkg] = (pkg[:-len(postfix)], post_getname)
for prefix in prefixes:
- def func(list, name):
- pkg = '%s%s' % (prefix, name)
- if not pkg in list:
- if packaged(pkg, d):
- list.append(pkg)
-
- base = pkg[len(prefix):]
if pkg.startswith(prefix):
- pkg_addrrecs(pkg, base, func, d)
+ if not prefix in pkgs:
+ pkgs[prefix] = {}
+ pkgs[prefix][pkg] = (pkg[:-len(prefix)], pre_getname)
+
+ for suffix in pkgs:
+ for pkg in pkgs[suffix]:
+ (base, func) = pkgs[suffix][pkg]
+ if len(pkgs[suffix]) == 1:
+ pkg_addrrecs(pkg, base, suffix, func, rdepends, d)
+ else:
+ rdeps = []
+ for dep in explode_deps(bb.data.getVar('RDEPENDS_' + base, d, 1) or bb.data.getVar('RDEPENDS', d, 1) or ""):
+ add_dep(rdeps, dep)
+ pkg_addrrecs(pkg, base, suffix, func, rdeps, d)
}
@@ -887,8 +913,6 @@ python package_do_package () {
for f in (bb.data.getVar('PACKAGEFUNCS', d, 1) or '').split():
bb.build.exec_func(f, d)
}
-# shlibs requires any DEPENDS to have already packaged for the *.list files
-do_package[deptask] = "do_package"
do_package[dirs] = "${D}"
addtask package before do_build after do_install
diff --git a/classes/package_deb.bbclass b/classes/package_deb.bbclass
index d172fb1766..c322af1f15 100644
--- a/classes/package_deb.bbclass
+++ b/classes/package_deb.bbclass
@@ -1,3 +1,7 @@
+#
+# Copyright 2006-2007 OpenedHand Ltd.
+#
+
inherit package
PACKAGE_EXTRA_DEPENDS += "dpkg-native fakeroot-native"
@@ -126,12 +130,13 @@ python do_package_deb () {
del g[g.index('./DEBIAN')]
except ValueError:
pass
- if not g and not bb.data.getVar('ALLOW_EMPTY', localdata):
+ if not g and bb.data.getVar('ALLOW_EMPTY', localdata) != "1":
from bb import note
note("Not creating empty archive for %s-%s-%s" % (pkg, bb.data.getVar('PV', localdata, 1), bb.data.getVar('PR', localdata, 1)))
continue
controldir = os.path.join(root, 'DEBIAN')
bb.mkdirhier(controldir)
+ os.chmod(controldir, 0755)
try:
ctrlfile = file(os.path.join(controldir, 'control'), 'wb')
# import codecs
@@ -150,7 +155,7 @@ python do_package_deb () {
fields.append(["Priority: %s\n", ['PRIORITY']])
fields.append(["Maintainer: %s\n", ['MAINTAINER']])
fields.append(["Architecture: %s\n", ['TARGET_ARCH']])
- fields.append(["OE: %s\n", ['P']])
+ fields.append(["OE: %s\n", ['PN']])
fields.append(["Homepage: %s\n", ['HOMEPAGE']])
# Package, Version, Maintainer, Description - mandatory
diff --git a/classes/package_ipk.bbclass b/classes/package_ipk.bbclass
index b5cc6af3bb..9200055495 100644
--- a/classes/package_ipk.bbclass
+++ b/classes/package_ipk.bbclass
@@ -131,7 +131,7 @@ python do_package_ipk () {
del g[g.index('./CONTROL')]
except ValueError:
pass
- if not g and not bb.data.getVar('ALLOW_EMPTY', localdata):
+ if not g and bb.data.getVar('ALLOW_EMPTY', localdata) != "1":
from bb import note
note("Not creating empty archive for %s-%s-%s" % (pkg, bb.data.getVar('PV', localdata, 1), bb.data.getVar('PR', localdata, 1)))
continue
diff --git a/classes/rm_work.bbclass b/classes/rm_work.bbclass
index e3c92b8572..45812bbb81 100644
--- a/classes/rm_work.bbclass
+++ b/classes/rm_work.bbclass
@@ -6,6 +6,9 @@
# INHERIT += "rm_work"
#
+RMWORK_ORIG_TASK := "${BB_DEFAULT_TASK}"
+BB_DEFAULT_TASK = "rm_work_all"
+
do_rm_work () {
cd ${WORKDIR}
for dir in *
@@ -17,13 +20,12 @@ do_rm_work () {
fi
done
}
+# Uncomment me when we can use bitbake 1.8.8
+#addtask rm_work after do_${RMWORK_ORIG_TASK}
+addtask rm_work after do_build
-addtask rmall after do_rm_work
-do_rmall[recrdeptask] = "do_rm_work"
-do_rmall() {
- :
+do_rm_work_all () {
+ :
}
-
-
-addtask rm_work before do_build
-addtask rm_work after do_populate_staging
+do_rm_work_all[recrdeptask] = "do_rm_work"
+addtask rm_work_all after do_rm_work
diff --git a/classes/rootfs_deb.bbclass b/classes/rootfs_deb.bbclass
index f444541509..67fa661308 100644
--- a/classes/rootfs_deb.bbclass
+++ b/classes/rootfs_deb.bbclass
@@ -1,3 +1,6 @@
+#
+# Copyright 2006-2007 Openedhand Ltd.
+#
do_rootfs[depends] += "dpkg-native:do_populate_staging apt-native:do_populate_staging"
@@ -52,21 +55,21 @@ fakeroot rootfs_deb_do_rootfs () {
}
if [ ! -z "${LINGUAS_INSTALL}" ]; then
- apt-get install glibc-localedata-i18n
- if [ $? -eq 1 ]; then
- exit 1
+ apt-get install glibc-localedata-i18n --force-yes --allow-unauthenticated
+ if [ $? -ne 0 ]; then
+ exit $?
fi
for i in ${LINGUAS_INSTALL}; do
- apt-get install $i
- if [ $? -eq 1 ]; then
- exit 1
+ apt-get install $i --force-yes --allow-unauthenticated
+ if [ $? -ne 0 ]; then
+ exit $?
fi
done
fi
if [ ! -z "${PACKAGE_INSTALL}" ]; then
for i in ${PACKAGE_INSTALL}; do
- apt-get install $i
+ apt-get install $i --force-yes --allow-unauthenticated
if [ $? -eq 1 ]; then
exit 1
fi
@@ -134,3 +137,7 @@ rootfs_deb_log_check() {
true
}
+remove_packaging_data_files() {
+ rm -rf ${IMAGE_ROOTFS}/usr/lib/ipkg/
+ rm -rf ${IMAGE_ROOTFS}/usr/dpkg/
+}
diff --git a/classes/sanity.bbclass b/classes/sanity.bbclass
index 016b0d500b..479abce7fa 100644
--- a/classes/sanity.bbclass
+++ b/classes/sanity.bbclass
@@ -65,8 +65,9 @@ def check_sanity(e):
if data.getVar('TARGET_OS', e.data, True) == 'INVALID':
messages = messages + 'Please set TARGET_OS directly, or choose a MACHINE or DISTRO that does so.\n'
+ assume_provided = data.getVar('ASSUME_PROVIDED', e.data , True).split()
# Check user doesn't have ASSUME_PROVIDED = instead of += in local.conf
- if "diffstat-native" not in data.getVar('ASSUME_PROVIDED', e.data, True).split():
+ if "diffstat-native" not in assume_provided:
messages = messages + 'Please use ASSUME_PROVIDED +=, not ASSUME_PROVIDED = in your local.conf\n'
# Check that the MACHINE is valid
@@ -89,7 +90,7 @@ def check_sanity(e):
if not check_app_exists('${BUILD_PREFIX}g++', e.data):
missing = missing + "C++ Compiler (${BUILD_PREFIX}g++),"
- required_utilities = "patch help2man diffstat texi2html makeinfo cvs svn git bzip2 tar gzip gawk md5sum bison"
+ required_utilities = "patch help2man diffstat texi2html makeinfo cvs svn bzip2 tar gzip gawk md5sum"
for util in required_utilities.split():
if not check_app_exists( util, e.data ):
@@ -99,6 +100,11 @@ def check_sanity(e):
missing = missing.rstrip(',')
messages = messages + "Please install following missing utilities: %s\n" % missing
+ omask = os.umask(022)
+ if omask & 0755:
+ messages = messages + "Please use a umask which allows a+rx and u+rwx\n"
+ os.umask(omask)
+
oes_bb_conf = data.getVar( 'OES_BITBAKE_CONF', e.data, True )
if not oes_bb_conf:
messages = messages + 'You do not include OpenEmbeddeds version of conf/bitbake.conf\n'
diff --git a/classes/seppuku.bbclass b/classes/seppuku.bbclass
index 937c973ad5..7241ae3e7a 100644
--- a/classes/seppuku.bbclass
+++ b/classes/seppuku.bbclass
@@ -129,7 +129,9 @@ def seppuku_find_bug_report(debug_file, opener, query, product, component, bugna
component = urllib.quote(component)
bugname = urllib.quote(bugname)
- result = opener.open("%(query)s?product=%(product)s&component=%(component)s&short_desc_type=substring&short_desc=%(bugname)s" % vars())
+ file = "%(query)sproduct=%(product)s&component=%(component)s&short_desc_type=substring&short_desc=%(bugname)s" % vars()
+ print >> debug_file, "Trying %s" % file
+ result = opener.open(file)
if result.code != 200:
raise "Can not query the bugzilla at all"
txt = result.read()
@@ -137,7 +139,7 @@ def seppuku_find_bug_report(debug_file, opener, query, product, component, bugna
scanner.feed(txt)
if len(scanner.result()) == 0:
print >> debug_file, "Scanner failed to scan the html site"
- print >> debug_file, "%(query)s?product=%(product)s&component=%(component)s&short_desc_type=substring&short_desc=%(bugname)s" % vars()
+ print >> debug_file, "%(query)sproduct=%(product)s&component=%(component)s&short_desc_type=substring&short_desc=%(bugname)s" % vars()
print >> debug_file, txt
return (False,None)
else: # silently pick the first result
@@ -218,9 +220,9 @@ def seppuku_file_bug(poster, file, product, component, bugname, text):
# scan the result for a bug number
# it will look like
- # '<a href="show_bug.cgi?id=308">Back To BUG# 308</a>'
+ # '<title>Bug 2742 Submitted</title>'
import re
- res = re.findall(("\>Back To BUG\# (?P<int>\d+)\</a\>"), result.read() )
+ res = re.findall(("\>Bug (?P<int>\d+) Submitted"), result.read() )
if result.code != 200 or len(res) != 1:
return None
else:
@@ -234,7 +236,7 @@ def seppuku_create_attachment(debug, poster, attach_query, product, component, b
if not bug_number:
import bb
- bb.note("Can't create an attachment, the bug is not present")
+ bb.note("Can't create an attachment, no bugnumber passed to method")
return False
import urllib2
@@ -267,6 +269,13 @@ python seppuku_eventhandler() {
from bb import data, mkdirhier, build
import bb, os, glob
+ event = e
+ data = e.data
+ name = getName(event)
+ if name == "MsgNote":
+ # avoid recursion
+ return NotHandled
+
# Try to load our exotic libraries
try:
import MultipartPostHandler
@@ -280,13 +289,10 @@ python seppuku_eventhandler() {
bb.note("Failed to import the cookielib and urllib2, make sure to use python2.4")
return NotHandled
- event = e
- data = e.data
- name = getName(event)
if name == "PkgFailed":
if not bb.data.getVar('SEPPUKU_AUTOBUILD', data, True) == "0":
build.exec_task('do_clean', data)
- elif name == "TaskFailed" or name == "NoProvider":
+ elif name == "TaskFailed":
cj = cookielib.CookieJar()
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
poster = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj),MultipartPostHandler.MultipartPostHandler)
@@ -316,15 +322,12 @@ python seppuku_eventhandler() {
"pr" : bb.data.getVar("PR", data, True),
"task" : e.task }
log_file = glob.glob("%s/log.%s.*" % (bb.data.getVar('T', event.data, True), event.task))
- text = "The package failed to build at %s" % bb.data.getVar('DATETIME', data, True)
+ text = "The package failed to build at %s for machine %s" % (bb.data.getVar('DATETIME', data, True), bb.data.getVar( 'MACHINE', data, True ) )
if len(log_file) != 0:
print >> debug_file, "Adding log file %s" % log_file[0]
file = open(log_file[0], 'r')
else:
print >> debug_file, "No log file found for the glob"
- elif name == "NoProvider":
- bugname = "noprovider for %s runtime: %s" % (event.getItem, event.getisRuntime)
- text = "Please fix it"
else:
print >> debug_file, "Unknown name '%s'" % name
assert False
@@ -345,11 +348,11 @@ python seppuku_eventhandler() {
else:
bug_number = seppuku_file_bug(poster, newbug, product, component, bugname, text)
if not bug_number:
- print >> debug_file, "Filing a bugreport failed"
- else:
- print >> debug_file, "The new bug_number: '%s'" % bug_number
+ print >> debug_file, "Couldn't acquire a new bug_numer, filing a bugreport failed"
+ else:
+ print >> debug_file, "The new bug_number: '%s'" % bug_number
- if file:
+ if bug_number and file:
if not seppuku_create_attachment(debug_file, poster, attach, product, component, bug_number, text, file):
print >> debug_file, "Failed to attach the build log"
else:
diff --git a/classes/sip.bbclass b/classes/sip.bbclass
index adf179b130..a258fda629 100644
--- a/classes/sip.bbclass
+++ b/classes/sip.bbclass
@@ -1,11 +1,11 @@
# Build Class for Sip based Python Bindings
# (C) Michael 'Mickey' Lauer <mickey@Vanille.de>
#
-
-DEPENDS =+ "sip-native python-sip"
+DEPENDS =+ "sip-native"
+RDEPENDS += "python-sip"
# default stuff, do not uncomment
-# EXTRA_SIPTAGS = "-tWS_QWS -tQtPE_1_6_0 -tQt_2_3_1"
+# EXTRA_SIPTAGS = "-tWS_X11 -tQt_4_3_0"
sip_do_generate() {
if [ -z "${SIP_MODULES}" ]; then
@@ -33,10 +33,10 @@ sip_do_generate() {
for module in $MODULES
do
- install -d ${module}/
- oenote "calling 'sip -I sip -I ${STAGING_SIPDIR} ${SIPTAGS} ${FEATURES} -c ${module} -b ${module}/${module}.pro.in sip/${module}/${module}mod.sip'"
- sip -I ${STAGING_SIPDIR} -I sip ${SIPTAGS} ${FEATURES} -c ${module} -b ${module}/${module}.sbf sip/${module}/${module}mod.sip \
- || die "Error calling sip on ${module}"
+ install -d ${module}/
+ echo "calling 'sip4 -I sip -I ${STAGING_SIPDIR} ${SIPTAGS} ${FEATURES} -c ${module} -b ${module}/${module}.pro.in sip/${module}/${module}mod.sip'"
+ sip4 -I ${STAGING_SIPDIR} -I sip ${SIPTAGS} ${FEATURES} -c ${module} -b ${module}/${module}.sbf \
+ sip/${module}/${module}mod.sip || die "Error calling sip on ${module}"
cat ${module}/${module}.sbf | sed s,target,TARGET, \
| sed s,sources,SOURCES, \
| sed s,headers,HEADERS, \
diff --git a/classes/sip4.bbclass b/classes/sip3.bbclass
index ca2b1dae20..1dd42ba86b 100644
--- a/classes/sip4.bbclass
+++ b/classes/sip3.bbclass
@@ -1,13 +1,13 @@
# Build Class for Sip based Python Bindings
# (C) Michael 'Mickey' Lauer <mickey@Vanille.de>
#
-DEPENDS =+ "sip4-native"
-RDEPENDS += "python-sip4"
+
+DEPENDS =+ "sip-native python-sip"
# default stuff, do not uncomment
-# EXTRA_SIPTAGS = "-tWS_X11 -tQt_4_1_1"
+# EXTRA_SIPTAGS = "-tWS_QWS -tQtPE_1_6_0 -tQt_2_3_1"
-sip4_do_generate() {
+sip3_do_generate() {
if [ -z "${SIP_MODULES}" ]; then
MODULES="`ls sip/*mod.sip`"
else
@@ -33,10 +33,10 @@ sip4_do_generate() {
for module in $MODULES
do
- install -d ${module}/
- echo "calling 'sip4 -I sip -I ${STAGING_SIPDIR} ${SIPTAGS} ${FEATURES} -c ${module} -b ${module}/${module}.pro.in sip/${module}/${module}mod.sip'"
- sip4 -I ${STAGING_SIPDIR} -I sip ${SIPTAGS} ${FEATURES} -c ${module} -b ${module}/${module}.sbf \
- sip/${module}/${module}mod.sip || die "Error calling sip on ${module}"
+ install -d ${module}/
+ oenote "calling 'sip -I sip -I ${STAGING_SIPDIR} ${SIPTAGS} ${FEATURES} -c ${module} -b ${module}/${module}.pro.in sip/${module}/${module}mod.sip'"
+ sip -I ${STAGING_SIPDIR} -I sip ${SIPTAGS} ${FEATURES} -c ${module} -b ${module}/${module}.sbf sip/${module}/${module}mod.sip \
+ || die "Error calling sip on ${module}"
cat ${module}/${module}.sbf | sed s,target,TARGET, \
| sed s,sources,SOURCES, \
| sed s,headers,HEADERS, \
diff --git a/classes/task.bbclass b/classes/task.bbclass
new file mode 100644
index 0000000000..4edd704829
--- /dev/null
+++ b/classes/task.bbclass
@@ -0,0 +1,27 @@
+# Task packages are only used to pull in other packages
+# via their dependencies. They are empty.
+ALLOW_EMPTY = "1"
+
+# By default, only the task package itself is in PACKAGES.
+# -dbg and -dev flavours are handled by the anonfunc below.
+# This means that task recipes used to build multiple task
+# packages have to modify PACKAGES after inheriting task.bbclass.
+PACKAGES = "${PN}"
+
+# By default, task packages do not depend on a certain architecture.
+# Only if dependencies are modified by MACHINE_FEATURES, packages
+# need to be set to MACHINE_ARCH after inheriting task.bbclass
+PACKAGE_ARCH = "all"
+
+# This automatically adds -dbg and -dev flavours of all PACKAGES
+# to the list. Their dependencies (RRECOMMENDS) are handled as usual
+# by package_depchains in a following step.
+python () {
+ packages = bb.data.getVar('PACKAGES', d, 1).split()
+ genpackages = []
+ for pkg in packages:
+ for postfix in ['-dbg', '-dev']:
+ genpackages.append(pkg+postfix)
+ bb.data.setVar('PACKAGES', ' '.join(packages+genpackages), d)
+}
+
diff --git a/classes/tinderclient.bbclass b/classes/tinderclient.bbclass
index d1d9f49fac..bc004efb26 100644
--- a/classes/tinderclient.bbclass
+++ b/classes/tinderclient.bbclass
@@ -24,6 +24,7 @@ def tinder_form_data(bound, dict, log):
output = []
# for each key in the dictionary
for name in dict:
+ assert dict[name]
output.append( "--" + bound )
output.append( 'Content-Disposition: form-data; name="%s"' % name )
output.append( "" )
@@ -60,7 +61,7 @@ def tinder_format_http_post(d,status,log):
"os" : os.uname()[0],
"os_version" : os.uname()[2],
"compiler" : "gcc",
- "clobber" : data.getVar('TINDER_CLOBBER', d, True),
+ "clobber" : data.getVar('TINDER_CLOBBER', d, True) or "0",
"srcdate" : data.getVar('SRCDATE', d, True),
"PN" : data.getVar('PN', d, True),
"PV" : data.getVar('PV', d, True),
@@ -370,9 +371,9 @@ def tinder_do_tinder_report(event):
addhandler tinderclient_eventhandler
python tinderclient_eventhandler() {
from bb import note, error, data
- from bb.event import NotHandled
+ from bb.event import NotHandled, getName
- if e.data is None:
+ if e.data is None or getName(e) == "MsgNote":
return NotHandled
do_tinder_report = data.getVar('TINDER_REPORT', e.data, True)
diff --git a/classes/update-rc.d.bbclass b/classes/update-rc.d.bbclass
index 9821eec5b2..3051b7933f 100644
--- a/classes/update-rc.d.bbclass
+++ b/classes/update-rc.d.bbclass
@@ -7,17 +7,15 @@ INIT_D_DIR = "${sysconfdir}/init.d"
updatercd_postinst() {
if test "x$D" != "x"; then
- D="-r $D"
+ OPT="-r $D"
else
- D="-s"
+ OPT="-s"
fi
-update-rc.d $D ${INITSCRIPT_NAME} ${INITSCRIPT_PARAMS}
+update-rc.d $OPT ${INITSCRIPT_NAME} ${INITSCRIPT_PARAMS}
}
updatercd_prerm() {
-if test "x$D" != "x"; then
- D="-r $D"
-else
+if test "x$D" = "x"; then
${INIT_D_DIR}/${INITSCRIPT_NAME} stop
fi
}