diff options
Diffstat (limited to 'classes')
37 files changed, 397 insertions, 208 deletions
diff --git a/classes/angstrom-mirrors.bbclass b/classes/angstrom-mirrors.bbclass index e44a78c983..10bf75044f 100644 --- a/classes/angstrom-mirrors.bbclass +++ b/classes/angstrom-mirrors.bbclass @@ -1,4 +1,4 @@ MIRRORS_append () { -ftp://.*/.*/ http://www.angstrom-distribution.org/unstable/sources/ -https?$://.*/.*/ http://www.angstrom-distribution.org/unstable/sources/ +ftp://.*/.* http://www.angstrom-distribution.org/unstable/sources/ +https?$://.*/.* http://www.angstrom-distribution.org/unstable/sources/ } diff --git a/classes/autotools.bbclass b/classes/autotools.bbclass index 8e4fba9400..991e472b14 100644 --- a/classes/autotools.bbclass +++ b/classes/autotools.bbclass @@ -174,6 +174,8 @@ autotools_stage_all() { fi if [ -d ${STAGE_TEMP}/${libdir} ] then + find ${STAGE_TEMP}/${libdir} -name '*.la' -exec sed -i s,installed=yes,installed=no, {} \; + for i in ${STAGE_TEMP}/${libdir}/*.la do if [ ! -f "$i" ]; then diff --git a/classes/base.bbclass b/classes/base.bbclass index 7526dff65e..c8ee75627f 100644 --- a/classes/base.bbclass +++ b/classes/base.bbclass @@ -128,6 +128,14 @@ def base_less_or_equal(variable, checkvalue, truevalue, falsevalue, d): else: return falsevalue +def base_version_less_or_equal(variable, checkvalue, truevalue, falsevalue, d): + import bb + result = bb.vercmp(bb.data.getVar(variable,d,True), checkvalue) + if result <= 0: + return truevalue + else: + return falsevalue + def base_contains(variable, checkvalues, truevalue, falsevalue, d): import bb matches = 0 @@ -848,6 +856,12 @@ def base_after_parse(d): bb.data.delVarFlag('MACHINE', 'export', d) bb.data.setVarFlag('MACHINE', 'unexport', 1, d) + # Make sure TARGET_ARCH isn't exported + # (breaks Makefiles using implicit rules, e.g. quilt, as GNU make has this + # in them, undocumented) + bb.data.delVarFlag('TARGET_ARCH', 'export', d) + bb.data.setVarFlag('TARGET_ARCH', 'unexport', 1, d) + # Make sure DISTRO isn't exported # (breaks sysvinit at least) bb.data.delVarFlag('DISTRO', 'export', d) @@ -896,6 +910,23 @@ python () { base_after_parse(d) } +def check_app_exists(app, d): + from bb import which, data + + app = data.expand(app, d) + path = data.getVar('PATH', d, 1) + return len(which(path, app)) != 0 + +def check_gcc3(data): + + gcc3_versions = 'gcc-3.4 gcc34 gcc-3.4.4 gcc-3.4.6 gcc-3.4.7 gcc-3.3 gcc33 gcc-3.3.6 gcc-3.2 gcc32' + + for gcc3 in gcc3_versions.split(): + if check_app_exists(gcc3, data): + return gcc3 + + return False + # Patch handling inherit patch diff --git a/classes/binconfig.bbclass b/classes/binconfig.bbclass index 4e425a76d6..5ce9ff6f0d 100644 --- a/classes/binconfig.bbclass +++ b/classes/binconfig.bbclass @@ -20,6 +20,8 @@ def get_binconfig_mangle(d): s += " -e 's:OEEXECPREFIX:${STAGING_DIR_HOST}${layout_exec_prefix}:'" s += " -e 's:-I${WORKDIR}:-I${STAGING_INCDIR}:'" s += " -e 's:-L${WORKDIR}:-L${STAGING_LIBDIR}:'" + if bb.data.getVar("OE_BINCONFIG_EXTRA_MANGLE", d): + s += bb.data.getVar("OE_BINCONFIG_EXTRA_MANGLE", d) return s BINCONFIG_GLOB ?= "*-config" diff --git a/classes/cmake.bbclass b/classes/cmake.bbclass new file mode 100644 index 0000000000..823635c24d --- /dev/null +++ b/classes/cmake.bbclass @@ -0,0 +1,10 @@ +DEPENDS += " cmake-native " + +# We want the staging and installing functions from autotools +inherit autotools + +cmake_do_configure() { + cmake . -DCMAKE_INSTALL_PREFIX:PATH=${prefix} +} + +EXPORT_FUNCTIONS do_configure diff --git a/classes/concatenated-image.bbclass b/classes/concatenated-image.bbclass new file mode 100644 index 0000000000..5cf8d33c05 --- /dev/null +++ b/classes/concatenated-image.bbclass @@ -0,0 +1,38 @@ + +# +# define the FLASH_KERNEL_SIZE and FLASH_ROOT_SIZE in your machine.conf, +# and this class builds a simple, padded concatenated image of +# <kernel><padding><rootfs> and performs error checking that either +# kernel or rootfs isn't too large to fit. +# +concat_pack_image() { + # find latest kernel - is there a more general way to do this? + KERNEL=`ls -tr ${DEPLOY_DIR_IMAGE}/${KERNEL_IMAGETYPE}* | tail -n 1` + if [ -z "$KERNEL" ]; then + oefatal "No kernel found in ${DEPLOY_DIR_IMAGE}. Was expecting a ${KERNEL_IMAGETYPE}\* file." + exit 1 + fi + ROOTFS=${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}.rootfs.jffs2 + OUTPUT=${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}.flash.img + PADFILE=${DEPLOY_DIR_IMAGE}/padfile.zzz + KERNEL_SIZE_MAX_DEC=`echo ${FLASH_KERNEL_SIZE} | awk --non-decimal-data '{printf "%d\n", $1}' ` + ROOT_SIZE_MAX_DEC=`echo ${FLASH_ROOT_SIZE} | awk --non-decimal-data '{printf "%d\n", $1}' ` + KERNEL_SIZE=`ls -l $KERNEL | awk '{print $5}'` + if [ $KERNEL_SIZE -gt $KERNEL_SIZE_MAX_DEC ]; then + oefatal "Kernel too large at $KERNEL_SIZE bytes. Max is $KERNEL_SIZE_MAX_DEC." + exit 1 + fi + ROOT_SIZE=`ls -l $ROOTFS | awk '{print $5}'` + if [ $ROOT_SIZE -gt $ROOT_SIZE_MAX_DEC ]; then + oefatal "Rootfs is too large at $ROOT_SIZE bytes. Max is $ROOT_SIZE_MAX_DEC." + exit 1 + fi + PAD_SIZE=`echo "$KERNEL_SIZE_MAX_DEC - $KERNEL_SIZE" | bc ` + dd if=/dev/zero of=$PADFILE bs=$PAD_SIZE count=1 2>>/dev/null + cat $KERNEL $PADFILE $ROOTFS > $OUTPUT + rm -f $PADFILE + ls -l $OUTPUT +} + +IMAGE_POSTPROCESS_COMMAND += "concat_pack_image; " + diff --git a/classes/debian.bbclass b/classes/debian.bbclass index dd0789adae..0afe9fcc39 100644 --- a/classes/debian.bbclass +++ b/classes/debian.bbclass @@ -3,6 +3,9 @@ # before building the current package to make the packages runtime # depends are correct # +# Custom library package names can be defined setting +# DEBIANNAME_ + pkgname to the desired name. +# # Better expressed as ensure all RDEPENDS package before we package # This means we can't have circular RDEPENDS/RRECOMMENDS do_package_write_ipk[rdeptask] = "do_package" @@ -88,7 +91,10 @@ python debian_package_name_hook () { for pkg in packages.split(): if (bb.data.getVar('PKG_' + pkg, d) or bb.data.getVar('DEBIAN_NOAUTONAME_' + pkg, d)): continue - if pkg == orig_pkg: + debian_pn = bb.data.getVar('DEBIANNAME_' + pkg, d) + if debian_pn: + newpkg = debian_pn + elif pkg == orig_pkg: newpkg = pkgname else: newpkg = pkg.replace(orig_pkg, devname, 1) diff --git a/classes/efl_base.bbclass b/classes/efl_base.bbclass index 3865441779..023b2edf74 100644 --- a/classes/efl_base.bbclass +++ b/classes/efl_base.bbclass @@ -3,7 +3,7 @@ inherit autotools pkgconfig SECTION = "e/libs" HOMEPAGE = "http://www.enlightenment.org" SRCNAME = "${@bb.data.getVar('PN', d, 1).replace('-native', '')}" -SRC_URI = "http://download.enlightenment.org/snapshots/2007-07-10/${SRCNAME}-${PV}.tar.gz" +SRC_URI = "http://download.enlightenment.org/snapshots/2008-01-25/${SRCNAME}-${PV}.tar.gz" S = "${WORKDIR}/${SRCNAME}-${PV}" do_stage() { diff --git a/classes/image.bbclass b/classes/image.bbclass index 312307c831..ca9c9458e1 100644 --- a/classes/image.bbclass +++ b/classes/image.bbclass @@ -125,6 +125,17 @@ insert_feed_uris () { # insert new feed-sources echo "src/gz $feed_name $feed_uri" >> ${IMAGE_ROOTFS}/etc/ipkg/${feed_name}-feed.conf done + + # Allow to use package deploy directory contents as quick devel-testing + # feed. This creates individual feed configs for each arch subdir of those + # specified as compatible for the current machine. + # NOTE: Development-helper feature, NOT a full-fledged feed. + if [ -n "${FEED_DEPLOYDIR_BASE_URI}" ]; then + for arch in ${PACKAGE_ARCHS} + do + echo "src/gz local-$arch ${FEED_DEPLOYDIR_BASE_URI}/$arch" >> ${IMAGE_ROOTFS}/etc/ipkg/local-$arch-feed.conf + done + fi } log_check() { diff --git a/classes/insane.bbclass b/classes/insane.bbclass index 473fe6ebbf..97cf036dd9 100644 --- a/classes/insane.bbclass +++ b/classes/insane.bbclass @@ -8,28 +8,25 @@ # -Check the RUNTIME path for the $TMPDIR # -Check if .la files wrongly point to workdir # -Check if .pc files wrongly point to workdir -# -Check if packages contains .debug directories or .so files where they should be in -dev or -dbg +# -Check if packages contains .debug directories or .so files +# where they should be in -dev or -dbg # -Check if config.log contains traces to broken autoconf tests -# # # We need to have the scanelf utility as soon as -# possible and this is contained within the pax-utils-native +# possible and this is contained within the pax-utils-native. +# The package.bbclass can help us here. # - - -# We play a special package function inherit package PACKAGE_DEPENDS += "pax-utils-native desktop-file-utils-native" -#PACKAGE_DEPENDS += chrpath-native" PACKAGEFUNCS += " do_package_qa " # # dictionary for elf headers # -# feel free to add and correct. +# feel free to add and correct. # # TARGET_OS TARGET_ARCH MACHINE, OSABI, ABIVERSION, Little Endian, 32bit? def package_qa_get_machine_dict(): @@ -75,7 +72,7 @@ def package_qa_get_machine_dict(): "arm" : (40, 0, 0, True, True), "armeb" : (40, 0, 0, False, True), }, - + } # factory for a class, embedded in a method @@ -102,11 +99,10 @@ def package_qa_get_elf(path, bits32): ELFDATA2LSB = 1 ELFDATA2MSB = 2 - def my_assert(expectation, result): + def my_assert(self, expectation, result): if not expectation == result: - #print "'%x','%x'" % (ord(expectation), ord(result)) + #print "'%x','%x' %s" % (ord(expectation), ord(result), self.name) raise Exception("This does not work as expected") - my_assert = staticmethod(my_assert) def __init__(self, name): self.name = name @@ -115,16 +111,16 @@ def package_qa_get_elf(path, bits32): self.file = file(self.name, "r") self.data = self.file.read(ELFFile.EI_NIDENT+4) - ELFFile.my_assert(len(self.data), ELFFile.EI_NIDENT+4) - ELFFile.my_assert(self.data[0], chr(0x7f) ) - ELFFile.my_assert(self.data[1], 'E') - ELFFile.my_assert(self.data[2], 'L') - ELFFile.my_assert(self.data[3], 'F') + self.my_assert(len(self.data), ELFFile.EI_NIDENT+4) + self.my_assert(self.data[0], chr(0x7f) ) + self.my_assert(self.data[1], 'E') + self.my_assert(self.data[2], 'L') + self.my_assert(self.data[3], 'F') if bits32 : - ELFFile.my_assert(self.data[ELFFile.EI_CLASS], chr(ELFFile.ELFCLASS32)) # only 32 bits + self.my_assert(self.data[ELFFile.EI_CLASS], chr(ELFFile.ELFCLASS32)) else: - ELFFile.my_assert(self.data[ELFFile.EI_CLASS], chr(ELFFile.ELFCLASS64)) # only 64 bits - ELFFile.my_assert(self.data[ELFFile.EI_VERSION], chr(ELFFile.EV_CURRENT) ) + self.my_assert(self.data[ELFFile.EI_CLASS], chr(ELFFile.ELFCLASS64)) + self.my_assert(self.data[ELFFile.EI_VERSION], chr(ELFFile.EV_CURRENT) ) self.sex = self.data[ELFFile.EI_DATA] if self.sex == chr(ELFFile.ELFDATANONE): @@ -160,20 +156,19 @@ def package_qa_get_elf(path, bits32): return ELFFile(path) -# -# # Known Error classes # 0 - non dev contains .so # 1 - package contains a dangerous RPATH # 2 - package depends on debug package # 3 - non dbg contains .so # 4 - wrong architecture -# 5 - .la contains installed=yes or reference to the workdir +# 5 - .la contains installed=yes # 6 - .pc contains reference to /usr/include or workdir -# -# +# 7 - the desktop file is not valid +# 8 - .la contains reference to the workdir def package_qa_clean_path(path,d): + """ Remove the common prefix from the path. In this case it is the TMPDIR""" import bb return path.replace(bb.data.getVar('TMPDIR',d,True),"") @@ -183,14 +178,15 @@ def package_qa_make_fatal_error(error_class, name, path,d): TODO: Load a whitelist of known errors """ - if error_class == 0: - return False - else: - return True + return not error_class in [0, 5, 7, 8] def package_qa_write_error(error_class, name, path, d): + """ + Log the error + """ import bb, os if not bb.data.getVar('QA_LOG', d): + bb.note("a QA error occured but will not be logged because QA_LOG is not set") return ERROR_NAMES =[ @@ -201,44 +197,45 @@ def package_qa_write_error(error_class, name, path, d): "wrong architecture", "evil hides inside the .la", "evil hides inside the .pc", + "the desktop file is not valid", + ".la contains reference to the workdir", ] - log_path = os.path.join( bb.data.getVar('T', d, True), "log.qa_package" ) f = file( log_path, "a+") - print >> f, "%s, %s, %s" % (ERROR_NAMES[error_class], name, package_qa_clean_path(path,d)) + print >> f, "%s, %s, %s" % \ + (ERROR_NAMES[error_class], name, package_qa_clean_path(path,d)) f.close() +def package_qa_handle_error(error_class, error_msg, name, path, d): + import bb + bb.error("QA Issue: %s" % error_msg) + package_qa_write_error(error_class, name, path, d) + return not package_qa_make_fatal_error(error_class, name, path, d) def package_qa_check_rpath(file,name,d): """ Check for dangerous RPATHs """ import bb, os + sane = True scanelf = os.path.join(bb.data.getVar('STAGING_BINDIR_NATIVE',d,True),'scanelf') - #chrpath = os.path.join(bb.data.getVar('STAGING_BINDIR_NATIVE',d,True),'chrpath') bad_dir = bb.data.getVar('TMPDIR', d, True) + "/work" bad_dir_test = bb.data.getVar('TMPDIR', d, True) if not os.path.exists(scanelf): bb.fatal("Can not check RPATH, scanelf (part of pax-utils-native) not found") - #if not os.path.exists(chrpath): - # bb.fatal("Can not fix RPATH, chrpath (part of chrpath-native) not found") + if not bad_dir in bb.data.getVar('WORKDIR', d, True): bb.fatal("This class assumed that WORKDIR is ${TMPDIR}/work... Not doing any check") - #bb.note("%s -B -F%%r#F %s" % (scanelf,file)) output = os.popen("%s -B -F%%r#F '%s'" % (scanelf,file)) txt = output.readline().split() - #bb.note("???%s???" % bad_dir_test) for line in txt: - #bb.note("===%s===" % line) if bad_dir in line: - package_qa_write_error( 1, name, file, d) - bb.error("QA Issue package %s contains bad RPATH %s in file %s" % (name, line, file)) - #bb.note("Fixing RPATH for you in %s" % file) - #os.popen("%s -r /lib %s" % (chrpath,file)) - #return False - return True + error_msg = "package %s contains bad RPATH %s in file %s" % (name, line, file) + sane = package_qa_handle_error(1, error_msg, name, file, d) + + return sane def package_qa_check_devdbg(path, name,d): """ @@ -251,17 +248,15 @@ def package_qa_check_devdbg(path, name,d): if not "-dev" in name: if path[-3:] == ".so" and os.path.islink(path): - package_qa_write_error( 0, name, path, d ) - bb.error("QA Issue: non -dev package contains symlink .so: %s path '%s'" % (name, package_qa_clean_path(path,d))) - if package_qa_make_fatal_error( 0, name, path, d ): - sane = False + error_msg = "non -dev package contains symlink .so: %s path '%s'" % \ + (name, package_qa_clean_path(path,d)) + sane = package_qa_handle_error(0, error_msg, name, path, d) if not "-dbg" in name: if '.debug' in path: - package_qa_write_error( 3, name, path, d ) - bb.error("QA Issue: non debug package contains .debug directory: %s path %s" % (name, package_qa_clean_path(path,d))) - if package_qa_make_fatal_error( 3, name, path, d ): - sane = False + error_msg = "non debug package contains .debug directory: %s path %s" % \ + (name, package_qa_clean_path(path,d)) + sane = package_qa_handle_error(3, error_msg, name, path, d) return sane @@ -277,9 +272,10 @@ def package_qa_check_arch(path,name,d): Check if archs are compatible """ import bb, os + sane = True target_os = bb.data.getVar('TARGET_OS', d, True) target_arch = bb.data.getVar('TARGET_ARCH', d, True) - + # FIXME: Cross package confuse this check, so just skip them if bb.data.inherits_class('cross', d) or bb.data.inherits_class('sdk', d): return True @@ -290,29 +286,24 @@ def package_qa_check_arch(path,name,d): return True #if this will throw an exception, then fix the dict above - (machine, osabi, abiversion, littleendian, bits32) = package_qa_get_machine_dict()[target_os][target_arch] + (machine, osabi, abiversion, littleendian, bits32) \ + = package_qa_get_machine_dict()[target_os][target_arch] elf = package_qa_get_elf(path, bits32) try: elf.open() except: - # just for debbugging to check the parser, remove once convinced... return True + # Check the architecture and endiannes of the binary if not machine == elf.machine(): - bb.error("Architecture did not match (%d to %d) on %s" %(machine, elf.machine(), package_qa_clean_path(path,d))) - return not package_qa_make_fatal_error( 4, name, path, d ) + error_msg = "Architecture did not match (%d to %d) on %s" % \ + (machine, elf.machine(), package_qa_clean_path(path,d)) + sane = package_qa_handle_error(4, error_msg, name, path, d) elif not littleendian == elf.isLittleEndian(): - bb.error("Endiannes did not match (%d to %d) on %s" % (littleendian, elf.isLittleEndian(), package_qa_clean_path(path,d))) - return not package_qa_make_fatal_error( 4, name, path, d ) - - return True + error_msg = "Endiannes did not match (%d to %d) on %s" % \ + (littleendian, elf.isLittleEndian(), package_qa_clean_path(path,d)) + sane = package_qa_handle_error(4, error_msg, name, path, d) -def package_qa_check_pcla(path,name,d): - """ - .pc and .la files should not point to the WORKDIR - """ - sane = True - # TODO return sane def package_qa_check_desktop(path, name, d): @@ -320,13 +311,15 @@ def package_qa_check_desktop(path, name, d): Run all desktop files through desktop-file-validate. """ import bb, os + sane = True if path.endswith(".desktop"): - validate = os.path.join(bb.data.getVar('STAGING_BINDIR_NATIVE',d,True), 'desktop-file-validate') + validate = os.path.join(bb.data.getVar('STAGING_BINDIR_NATIVE',d,True), \ + 'desktop-file-validate') output = os.popen("%s %s" % (validate, path)) - # This only produces output on errors for l in output: - bb.error(l.strip()) - return True + sane = package_qa_handle_error(7, l.strip(), name, path, d) + + return sane def package_qa_check_staged(path,d): """ @@ -355,16 +348,17 @@ def package_qa_check_staged(path,d): path = os.path.join(root,file) if file[-2:] == "la": file_content = open(path).read() - if installed in file_content or workdir in file_content: - bb.error("QA issue: %s failed sanity test (reference to workdir or installed)" % file ) - if package_qa_make_fatal_error( 5, "staging", path, d): - sane = True + if installed in file_content: + error_msg = "%s failed sanity test (installed) in path %s" % (file,root) + sane = package_qa_handle_error(5, error_msg, "staging", path, d) + if workdir in file_content: + error_msg = "%s failed sanity test (workdir) in path %s" % (file,root) + sane = package_qa_handle_error(8, error_msg, "staging", path, d) elif file[-2:] == "pc": file_content = open(path).read() if workdir in file_content: - bb.error("QA issue: %s failed sanity test (reference to workdir)" % file ) - if package_qa_make_fatal_error( 6, "staging", path, d): - sane = False + error_msg = "%s failed sanity test (workdir) in path %s" % (file,root) + sane = package_qa_handle_error(6, error_msg, "staging", path, d) return sane @@ -382,7 +376,6 @@ def package_qa_walk(path, funcs, package,d): return sane - def package_qa_check_rdepends(pkg, workdir, d): import bb sane = True @@ -414,10 +407,8 @@ def package_qa_check_rdepends(pkg, workdir, d): # Now do the sanity check!!! for rdepend in rdepends: if "-dbg" in rdepend: - package_qa_write_error( 2, pkgname, rdepend, d ) - bb.error("QA issue: %s rdepends on %s" % (pkgname,rdepend)) - if package_qa_make_fatal_error( 2, pkgname, rdepend, d ): - sane = False + error_msg = "%s rdepends on %s" % (pkgname,rdepend) + sane = package_qa_handle_error(2, error_msg, pkgname, rdepend, d) return sane @@ -431,6 +422,9 @@ python do_package_qa () { if not packages: return + checks = [package_qa_check_rpath, package_qa_check_devdbg, + package_qa_check_perm, package_qa_check_arch, + package_qa_check_desktop] walk_sane = True rdepends_sane = True for package in packages.split(): @@ -440,7 +434,7 @@ python do_package_qa () { bb.note("Checking Package: %s" % package) path = "%s/install/%s" % (workdir, package) - if not package_qa_walk(path, [package_qa_check_rpath, package_qa_check_devdbg, package_qa_check_perm, package_qa_check_arch, package_qa_check_desktop], package, d): + if not package_qa_walk(path, checks, package, d): walk_sane = False if not package_qa_check_rdepends(package, workdir, d): rdepends_sane = False @@ -466,7 +460,10 @@ python do_qa_configure() { bb.note("Checking sanity of the config.log file") import os for root, dirs, files in os.walk(bb.data.getVar('WORKDIR', d, True)): + statement = "grep 'CROSS COMPILE Badness:' %s > /dev/null" % \ + os.path.join(root,"config.log") if "config.log" in files: - if os.system("grep 'CROSS COMPILE Badness:' %s > /dev/null" % (os.path.join(root,"config.log"))) == 0: - bb.fatal("This autoconf log indicates errors, it looked at host includes. Rerun configure task after fixing this. Path was '%s'" % root) + if os.system(statement) == 0: + bb.fatal("""This autoconf log indicates errors, it looked at host includes. +Rerun configure task after fixing this. The path was '%s'""" % root) } diff --git a/classes/java-library.bbclass b/classes/java-library.bbclass new file mode 100644 index 0000000000..8aecfef1b9 --- /dev/null +++ b/classes/java-library.bbclass @@ -0,0 +1,37 @@ +# Inherit this bbclass for each java recipe that builds a Java library (jar file[s]). +# +# It automatically adds important build dependencies, defines JPN (Java Package Name) +# a package named ${JPN} whose contents are those of ${datadir}/java (the jar location). +# +# The JPN is basically lib${PN}-java but takes care of the fact that ${PN} already +# starts with "lib" and/or ends with "-java". In case the "lib" prefix is part of +# your package's normal name (e.g. liberator) the guessing is wrong and you have +# to set JPN manually! + +inherit java + +def java_package_name(d): + import bb; + + pre="" + post="" + + pn = bb.data.getVar('PN', d, 1) + if not pn.startswith("lib"): + pre='lib' + + if not pn.endswith("-java"): + post='-java' + + return pre + pn + post + +JPN ?= "${@java_package_name(d)}" + +DEPENDS_prepend = "virtual/javac-native fastjar-native " + +PACKAGES = "${JPN}" + +PACKAGE_ARCH_${JPN} = "all" + +FILES_${JPN} = "${datadir_java}" + diff --git a/classes/java.bbclass b/classes/java.bbclass new file mode 100644 index 0000000000..7fa6dc1786 --- /dev/null +++ b/classes/java.bbclass @@ -0,0 +1,62 @@ +# Defines the commonly used target directories and provides a convenience +# function to install jar files. + +# Jar location on target +datadir_java ?= ${datadir}/java + +# JNI library location on target +libdir_jni ?= ${libdir}/jni + +STAGING_DATADIR_JAVA ?= ${STAGING_DATADIR}/java +STAGING_LIBDIR_JNI ?= ${STAGING_LIBDIR}/jni + +oe_jarinstall() { + # Purpose: Install a jar file and create all the given symlinks to it. + # Example: + # oe_jarinstall foo-1.3.jar foo.jar + # Installs foo-1.3.jar and creates symlink foo.jar. + # + # oe_jarinstall -s foo-1.3.jar foo.jar + # Installs foo-1.3.jar to staging and creates symlink foo.jar. + # + # oe_jarinstall -r foo-1.3.jar foo_1_3.jar foo.jar + # Installs foo_1_3.jar as foo-1.3.jar and creates a symlink to this. + # + dir=${D}${datadir_java} + destname="" + while [ "$#" -gt 0 ]; do + case "$1" in + -s) + dir=${STAGING_DATADIR_JAVA} + ;; + -r) + shift + destname=$1 + ;; + -*) + oefatal "oe_jarinstall: unknown option: $1" + ;; + *) + break; + ;; + esac + shift + done + + jarname=$1 + destname=${destname:-`basename $jarname`} + shift + + install -d $dir + install -m 0644 $jarname $dir/$destname + + # Creates symlinks out of the remaining arguments. + while [ "$#" -gt 0 ]; do + if [ -e $dir/$1 ]; then + oewarn "file was in the way. removing:" $dir/$1 + rm $dir/$1 + fi + ln -s $destname $dir/$1 + shift + done +} diff --git a/classes/kernel-arch.bbclass b/classes/kernel-arch.bbclass index 9208c3507a..b45f3f9696 100644 --- a/classes/kernel-arch.bbclass +++ b/classes/kernel-arch.bbclass @@ -5,7 +5,7 @@ # valid_archs = "alpha cris ia64 \ - x86_64,i386 \ + x86_64,i386 x86 \ m68knommu m68k ppc powerpc ppc64 \ sparc sparc64 \ arm arm26 \ diff --git a/classes/kernel.bbclass b/classes/kernel.bbclass index c00d0f3f58..047a3ec047 100644 --- a/classes/kernel.bbclass +++ b/classes/kernel.bbclass @@ -80,25 +80,12 @@ kernel_do_stage() { mkdir -p ${STAGING_KERNEL_DIR}/include/pcmcia cp -fR include/pcmcia/* ${STAGING_KERNEL_DIR}/include/pcmcia/ - if [ -d drivers/crypto ]; then - mkdir -p ${STAGING_KERNEL_DIR}/drivers/crypto - cp -fR drivers/crypto/* ${STAGING_KERNEL_DIR}/drivers/crypto/ - fi - - if [ -d include/media ]; then - mkdir -p ${STAGING_KERNEL_DIR}/include/media - cp -fR include/media/* ${STAGING_KERNEL_DIR}/include/media/ - fi - - if [ -d include/acpi ]; then - mkdir -p ${STAGING_KERNEL_DIR}/include/acpi - cp -fR include/acpi/* ${STAGING_KERNEL_DIR}/include/acpi/ - fi - - if [ -d include/sound ]; then - mkdir -p ${STAGING_KERNEL_DIR}/include/sound - cp -fR include/sound/* ${STAGING_KERNEL_DIR}/include/sound/ - fi + for entry in drivers/crypto include/media include/acpi include/sound include/video; do + if [ -d $entry ]; then + mkdir -p ${STAGING_KERNEL_DIR}/$entry + cp -fR $entry/* ${STAGING_KERNEL_DIR}/$entry/ + fi + done if [ -d drivers/sound ]; then # 2.4 alsa needs some headers from this directory @@ -193,6 +180,8 @@ FILES_kernel-image = "/boot/${KERNEL_IMAGETYPE}*" FILES_kernel-dev = "/boot/System.map* /boot/config*" FILES_kernel-vmlinux = "/boot/vmlinux*" RDEPENDS_kernel = "kernel-base" +RRECOMMENDS_kernel-module-hostap-cs += '${@base_version_less_or_equal("KERNEL_VERSION", "2.6.17", "", "apm-wifi-suspendfix", d)}' +RRECOMMENDS_kernel-module-orinoco-cs += '${@base_version_less_or_equal("KERNEL_VERSION", "2.6.17", "", "apm-wifi-suspendfix", d)}' # Allow machines to override this dependency if kernel image files are # not wanted in images as standard RDEPENDS_kernel-base ?= "kernel-image" @@ -244,6 +233,8 @@ module_autoload_ipsec = "ipsec" module_autoload_ircomm-tty = "ircomm-tty" module_autoload_rfcomm = "rfcomm" module_autoload_sa1100-rtc = "sa1100-rtc" +# sa1100-rtc was renamed in 2.6.23 onwards +module_autoload_rtc-sa1100 = "rtc-sa1100" # alias defaults (alphabetically sorted) module_conf_af_packet = "alias net-pf-17 af_packet" @@ -418,3 +409,17 @@ python populate_packages_prepend () { packages.append(metapkg) bb.data.setVar('PACKAGES', ' '.join(packages), d) } + +# Support checking the kernel size since some kernels need to reside in partitions +# with a fixed length or there is a limit in transferring the kernel to memory +do_sizecheck() { + if [ ! -z "${KERNEL_IMAGE_MAXSIZE}" ]; then + size=`ls -l arch/${ARCH}/boot/${KERNEL_IMAGETYPE} | awk '{ print $5}'` + if [ $size -ge ${KERNEL_IMAGE_MAXSIZE} ]; then + rm arch/${ARCH}/boot/${KERNEL_IMAGETYPE} + die "This kernel (size=$size > ${KERNEL_IMAGE_MAXSIZE}) is too big for your device. Please reduce the size of the kernel by making more of it modular." + fi + fi +} + +addtask sizecheck before do_install after do_compile diff --git a/classes/module-base.bbclass b/classes/module-base.bbclass index da5bd01dae..c98baceeab 100644 --- a/classes/module-base.bbclass +++ b/classes/module-base.bbclass @@ -10,6 +10,7 @@ export KERNEL_SOURCE = "${@base_read_file('${STAGING_KERNEL_DIR}/kernel-source') KERNEL_OBJECT_SUFFIX = "${@[".o", ".ko"][base_read_file('${STAGING_KERNEL_DIR}/kernel-abiversion') > "2.6.0"]}" KERNEL_CCSUFFIX = "${@base_read_file('${STAGING_KERNEL_DIR}/kernel-ccsuffix')}" KERNEL_LDSUFFIX = "${@base_read_file('${STAGING_KERNEL_DIR}/kernel-ldsuffix')}" +KERNEL_ARSUFFIX = "${@base_read_file('${STAGING_KERNEL_DIR}/kernel-arsuffix')}" # Set TARGET_??_KERNEL_ARCH in the machine .conf to set architecture # specific options necessary for building the kernel and modules. @@ -17,9 +18,12 @@ TARGET_CC_KERNEL_ARCH ?= "" HOST_CC_KERNEL_ARCH ?= "${TARGET_CC_KERNEL_ARCH}" TARGET_LD_KERNEL_ARCH ?= "" HOST_LD_KERNEL_ARCH ?= "${TARGET_LD_KERNEL_ARCH}" +TARGET_AR_KERNEL_ARCH ?= "" +HOST_AR_KERNEL_ARCH ?= "${TARGET_AR_KERNEL_ARCH}" KERNEL_CC = "${CCACHE}${HOST_PREFIX}gcc${KERNEL_CCSUFFIX} ${HOST_CC_KERNEL_ARCH}" KERNEL_LD = "${LD}${KERNEL_LDSUFFIX} ${HOST_LD_KERNEL_ARCH}" +KERNEL_AR = "${AR}${KERNEL_ARSUFFIX} ${HOST_AR_KERNEL_ARCH}" # kernel modules are generally machine specific PACKAGE_ARCH = "${MACHINE_ARCH}" diff --git a/classes/module.bbclass b/classes/module.bbclass index 7083076b5f..1d0f1dd4f8 100644 --- a/classes/module.bbclass +++ b/classes/module.bbclass @@ -9,7 +9,8 @@ module_do_compile() { KERNEL_SRC=${STAGING_KERNEL_DIR} \ KERNEL_VERSION=${KERNEL_VERSION} \ CC="${KERNEL_CC}" LD="${KERNEL_LD}" \ - ${MAKE_TARGETS} + AR="${KERNEL_AR}" \ + ${MAKE_TARGETS} } module_do_install() { diff --git a/classes/native.bbclass b/classes/native.bbclass index d51c675909..934303cc0c 100644 --- a/classes/native.bbclass +++ b/classes/native.bbclass @@ -49,9 +49,9 @@ export STRIP = "${HOST_PREFIX}strip" # Path prefixes -base_prefix = "${exec_prefix}" -prefix = "${STAGING_DIR}" -exec_prefix = "${STAGING_DIR}/${BUILD_ARCH}-${BUILD_OS}" +base_prefix = "${STAGING_DIR}/${BUILD_ARCH}-${BUILD_OS}" +prefix = "${base_prefix}" +exec_prefix = "${base_prefix}" # Base paths base_bindir = "${base_prefix}/bin" diff --git a/classes/nslu2-mirrors.bbclass b/classes/nslu2-mirrors.bbclass index 1181edc716..b5ad01adea 100644 --- a/classes/nslu2-mirrors.bbclass +++ b/classes/nslu2-mirrors.bbclass @@ -1,4 +1,4 @@ MIRRORS_append () { -ftp://.*/.*/ http://sources.nslu2-linux.org/sources/ -http://.*/.*/ http://sources.nslu2-linux.org/sources/ +ftp://.*/.* http://sources.nslu2-linux.org/sources/ +https?://.*/.* http://sources.nslu2-linux.org/sources/ } diff --git a/classes/nylon-mirrors.bbclass b/classes/nylon-mirrors.bbclass index 2986bd8f80..25a8b87d9b 100644 --- a/classes/nylon-mirrors.bbclass +++ b/classes/nylon-mirrors.bbclass @@ -1,6 +1,6 @@ MIRRORS_append () { ftp://.*/.*/ http://meshcube.org/nylon/stable/sources/ -http://.*/.*/ http://meshcube.org/nylon/stable/sources/ +https?://.*/.*/ http://meshcube.org/nylon/stable/sources/ ftp://.*/.*/ http://meshcube.org/nylon/unstable/sources/ -http://.*/.*/ http://meshcube.org/nylon/unstable/sources/ +https?://.*/.*/ http://meshcube.org/nylon/unstable/sources/ }
\ No newline at end of file diff --git a/classes/openmoko-base.bbclass b/classes/openmoko-base.bbclass index 8643daa7a4..184477b1c0 100644 --- a/classes/openmoko-base.bbclass +++ b/classes/openmoko-base.bbclass @@ -1,6 +1,6 @@ HOMEPAGE = "http://www.openmoko.org" LICENSE ?= "GPL" -OPENMOKO_RELEASE ?= "OM-2007" +OPENMOKO_RELEASE ?= "OM-2007.2" OPENMOKO_MIRROR ?= "svn://svn.openmoko.org/trunk" def openmoko_base_get_subdir(d): diff --git a/classes/opie_i18n.bbclass b/classes/opie_i18n.bbclass index cb3d07de75..107d090451 100644 --- a/classes/opie_i18n.bbclass +++ b/classes/opie_i18n.bbclass @@ -13,7 +13,7 @@ SRC_URI += "${HANDHELDS_CVS};module=opie/i18n" DEPENDS += "opie-i18n" die () { - echo -e "opie_18n: ERROR: $1" + printf "opie_18n: ERROR: $1\n" exit 1 } @@ -60,21 +60,21 @@ do_build_opie_i18n () { package_name2="`echo "${PN}"| sed "s/^opie\-//;s/\-//"`" test "$package_name" != "$package_name2" && I18N_FILES="${package_name}.ts lib${package_name}.ts opie-${package_name}.ts ${package_name2}.ts lib${package_name2}.ts opie-${package_name2}.ts" test "$package_name" = "$package_name2" && I18N_FILES="${package_name}.ts lib${package_name}.ts opie-${package_name}.ts" - echo -e "I18N Datafiles: ${I18N_FILES} (auto-detected)\nYou can overide the auto-detection by setting I18N_FILES in your .oe file" + printf "I18N Datafiles: ${I18N_FILES} (auto-detected)\nYou can overide the auto-detection by setting I18N_FILES in your .oe file\n" else echo "I18N Datafiles: ${I18N_FILES} (provided by .bb)" fi rm -f "${WORKDIR}/FILES.tmp" "${WORKDIR}/PACKAGES.tmp" - echo -e "\nFILES is set to [${FILES}]\n" + printf "\nFILES is set to [${FILES}]\n\n" for file in ${I18N_FILES} do echo "Working on [$file]" for ts_file in `ls -1 */*.ts | egrep "/$file"` do - echo -e "\tCompiling [$ts_file]" + printf "\tCompiling [$ts_file]\n" cd "${WORKDIR}/i18n/`dirname $ts_file`" || die "[${WORKDIR}/i18n/`dirname $ts_file`] not found" opie-lrelease "`basename $ts_file`" || die "lrelease failed! Make sure that <inherit opie_i18n> or <inherit opie> is *below* <DEPENDS =>!" @@ -82,7 +82,7 @@ do_build_opie_i18n () { # to allow packaging as "_" is not allowed in a package name lang="`echo "$ts_file" | sed -n "s#\(.*\)/\(.*\)#\1#p"`" lang_sane="`echo "$ts_file" | sed -n "s#\(.*\)/\(.*\)#\1#p"|sed s/\_/\-/`" - echo -e "\tPackaging [`basename $ts_file`] for language [$lang]" + printf "\tPackaging [`basename $ts_file`] for language [$lang]\n" install -d ${D}${palmtopdir}/i18n/$lang install -m 0644 ${WORKDIR}/i18n/$lang/.directory ${D}${palmtopdir}/i18n/$lang/ @@ -93,14 +93,14 @@ do_build_opie_i18n () { # function do_build_opie_i18n_data() which sets the variables FILES_* and # PACKAGES as needed. echo -n "${PN}-${lang_sane} " >> "${WORKDIR}/PACKAGES.tmp" - echo -e "${PN}-${lang_sane}#${palmtopdir}/i18n/$lang" >> "${WORKDIR}/FILES.tmp" + printf "${PN}-${lang_sane}#${palmtopdir}/i18n/$lang" >> "${WORKDIR}/FILES.tmp\n" ts_found_something=1 done if test "$ts_found_something" != 1 then - echo -e "\tNo translations found" + printf "\tNo translations found\n" else ts_found_something="" ts_found="$ts_found $file" @@ -109,7 +109,7 @@ do_build_opie_i18n () { # Only used for debugging purposes test "${I18N_STATS}" = 1 && cd "${WORKDIR}/i18n" - echo -e "Completed [$file]\n\n" + printf "Completed [$file]\n\n\n" done qt_dirs="apps bin etc lib pics plugins share sounds" @@ -125,7 +125,7 @@ do_build_opie_i18n () { if (echo "${FILES}" | egrep "${palmtopdir}/? |${palmtopdir}/?$") &>/dev/null then echo "NOTE: FILES was set to ${palmtopdir} which would include the i18n directory" - echo -e "\n\nI'll remove ${palmtopdir} from FILES and replace it with all directories" + printf "\n\nI'll remove ${palmtopdir} from FILES and replace it with all directories\n" echo "below QtPalmtop, except i18n ($qt_dirs). See classes/opie_i18n.oeclass for details" # Removes /opt/QtPalmtop from FILES but keeps /opt/QtPalmtop/$some_dir @@ -138,7 +138,7 @@ do_build_opie_i18n () { if test -z "${FILES}" then echo "NOTE:" - echo -e "Since FILES is empty, i'll add all directories below ${palmtopdir} to it,\nexcluding i18n: ( $qt_dirs )" + printf "Since FILES is empty, i'll add all directories below ${palmtopdir} to it,\nexcluding i18n: ( $qt_dirs )\n" echo "${PN}#$FILES $dir_" >> "${WORKDIR}/FILES.tmp" fi diff --git a/classes/oplinux-mirrors.bbclass b/classes/oplinux-mirrors.bbclass index 076cd6f1fe..c76e822ce3 100644 --- a/classes/oplinux-mirrors.bbclass +++ b/classes/oplinux-mirrors.bbclass @@ -3,12 +3,12 @@ MIRRORS_append () { ftp://.*/.*/ http://digital-opsis.com/oplinux/stable/sources/ -http://.*/.*/ http://digital-opsis.com/oplinux/stable/sources/ +https?://.*/.*/ http://digital-opsis.com/oplinux/stable/sources/ ftp://.*/.*/ http://digital-opsis.com/oplinux/unstable/sources/ -http://.*/.*/ http://digital-opsis.com/oplinux/unstable/sources/ +https?://.*/.*/ http://digital-opsis.com/oplinux/unstable/sources/ ftp://.*/.*/ http://digital-opsis.com/oplinux-uclibc/stable/sources/ -http://.*/.*/ http://digital-opsis.com/oplinux-uclibc/stable/sources/ +https?://.*/.*/ http://digital-opsis.com/oplinux-uclibc/stable/sources/ ftp://.*/.*/ http://digital-opsis.com/oplinux-uclibc/unstable/sources/ -http://.*/.*/ http://digital-opsis.com/oplinux-uclibc/unstable/sources/ +https?://.*/.*/ http://digital-opsis.com/oplinux-uclibc/unstable/sources/ } diff --git a/classes/own-mirrors.bbclass b/classes/own-mirrors.bbclass index 32763ed24f..e8a0f481d7 100644 --- a/classes/own-mirrors.bbclass +++ b/classes/own-mirrors.bbclass @@ -1,4 +1,4 @@ PREMIRRORS() { -http://.*/.* ${SOURCE_MIRROR_URL} +https?://.*/.* ${SOURCE_MIRROR_URL} ftp://.*/.* ${SOURCE_MIRROR_URL} } diff --git a/classes/package.bbclass b/classes/package.bbclass index 67aeb33a13..ec8c3d97e2 100644 --- a/classes/package.bbclass +++ b/classes/package.bbclass @@ -149,6 +149,12 @@ def runstrip(file, d): bb.debug(1, "runstrip: skip %s" % file) return 0 + # If the file is in a .debug directory it was already stripped, + # don't do it again... + if os.path.dirname(file).endswith(".debug"): + bb.note("Already run strip") + return 0 + strip = bb.data.getVar("STRIP", d, 1) objcopy = bb.data.getVar("OBJCOPY", d, 1) @@ -694,6 +700,16 @@ python package_do_shlibs() { for l in lines: shlib_provider[l.rstrip()] = (dep_pkg, lib_ver) + assumed_libs = bb.data.getVar('ASSUME_SHLIBS', d, 1) + if assumed_libs: + for e in assumed_libs.split(): + l, dep_pkg = e.split(":") + lib_ver = None + dep_pkg = dep_pkg.rsplit("_", 1) + if len(dep_pkg) == 2: + lib_ver = dep_pkg[1] + dep_pkg = dep_pkg[0] + shlib_provider[l] = (dep_pkg, lib_ver) for pkg in packages.split(): bb.debug(2, "calculating shlib requirements for %s" % pkg) diff --git a/classes/package_ipk.bbclass b/classes/package_ipk.bbclass index 16d6050573..087bbcbfb4 100644 --- a/classes/package_ipk.bbclass +++ b/classes/package_ipk.bbclass @@ -323,4 +323,5 @@ python do_package_write_ipk () { bb.build.exec_func("do_package_ipk", d) } do_package_write_ipk[dirs] = "${D}" +do_package_write_ipk[depends] = "ipkg-utils-native:do_populate_staging" addtask package_write_ipk before do_package_write after do_package diff --git a/classes/patch.bbclass b/classes/patch.bbclass index 0cc202820f..6f83d9c88b 100644 --- a/classes/patch.bbclass +++ b/classes/patch.bbclass @@ -526,7 +526,7 @@ python patch_do_patch() { bb.note("Patch '%s' applies to earlier revisions" % pname) continue - bb.note("Applying patch '%s'" % pname) + bb.note("Applying patch '%s' (%s)" % (pname, unpacked)) try: patchset.Import({"file":unpacked, "remote":url, "strippath": pnum}, True) except: diff --git a/classes/qmake2.bbclass b/classes/qmake2.bbclass index d0f59d2cd4..cf3419f824 100644 --- a/classes/qmake2.bbclass +++ b/classes/qmake2.bbclass @@ -5,8 +5,7 @@ inherit qmake_base DEPENDS_prepend = "qmake2-native uicmoc4-native " -export QTDIR = "${STAGING_DIR}/${HOST_SYS}/qt4" -export QMAKESPEC = "${QTDIR}/mkspecs/${TARGET_OS}-oe-g++" +export QMAKESPEC = "${CROSS_DATADIR}/qt4/mkspecs/${TARGET_OS}-oe-g++" export OE_QMAKE_UIC = "${STAGING_BINDIR_NATIVE}/uic4" export OE_QMAKE_UIC3 = "${STAGING_BINDIR_NATIVE}/uic34" export OE_QMAKE_MOC = "${STAGING_BINDIR_NATIVE}/moc4" @@ -14,7 +13,7 @@ export OE_QMAKE_RCC = "${STAGING_BINDIR_NATIVE}/rcc4" export OE_QMAKE_QMAKE = "${STAGING_BINDIR_NATIVE}/qmake2" export OE_QMAKE_LINK = "${CXX}" export OE_QMAKE_CXXFLAGS = "${CXXFLAGS}" -export OE_QMAKE_INCDIR_QT = "${QTDIR}/include" -export OE_QMAKE_LIBDIR_QT = "${QTDIR}/lib" +export OE_QMAKE_INCDIR_QT = "${STAGING_INCDIR}/qt4" +export OE_QMAKE_LIBDIR_QT = "${STAGING_LIBDIR}/qt4" export OE_QMAKE_LIBS_QT = "qt" export OE_QMAKE_LIBS_X11 = "-lXext -lX11 -lm" diff --git a/classes/qpf.bbclass b/classes/qpf.bbclass deleted file mode 100644 index 67761bd4be..0000000000 --- a/classes/qpf.bbclass +++ /dev/null @@ -1,32 +0,0 @@ -PACKAGE_ARCH = "all" - -do_configure() { - : -} - -do_compile() { - : -} - -pkg_postinst_fonts() { -. /etc/profile -${sbindir}/update-qtfontdir -} - -pkg_postrm_fonts() { -. /etc/profile -${sbindir}/update-qtfontdir -f -} - -python populate_packages_prepend() { - postinst = bb.data.getVar('pkg_postinst_fonts', d, 1) - postrm = bb.data.getVar('pkg_postrm_fonts', d, 1) - fontdir = bb.data.getVar('palmtopdir', d, 1) + '/lib/fonts' - pkgregex = "^([a-z-]*_[0-9]*).*.qpf$" - pkgpattern = bb.data.getVar('QPF_PKGPATTERN', d, 1) or 'qpf-%s' - pkgdescription = bb.data.getVar('QPF_DESCRIPTION', d, 1) or 'QPF font %s' - - do_split_packages(d, root=fontdir, file_regex=pkgregex, output_pattern=pkgpattern, - description=pkgdescription, postinst=postinst, postrm=postrm, recursive=True, hook=None, - extra_depends='qpf-font-common') -} diff --git a/classes/qtopia4core.bbclass b/classes/qtopia4core.bbclass index 86bc7afc2f..fd8939a23f 100644 --- a/classes/qtopia4core.bbclass +++ b/classes/qtopia4core.bbclass @@ -4,8 +4,9 @@ inherit qmake2 # # override variables set by qmake-base to compile QtopiaCore apps # -export OE_QMAKE_INCDIR_QT = "${STAGING_INCDIR}/qtopiacore4" -export OE_QMAKE_LIBDIR_QT = "${STAGING_LIBDIR}/qtopiacore4/" +export OE_QMAKE_INCDIR_QT = "${STAGING_INCDIR}/qtopia" +export OE_QMAKE_LIBDIR_QT = "${STAGING_LIBDIR}/qtopia" export OE_QMAKE_LIBS_QT = "qt" export OE_QMAKE_LIBS_X11 = "" -EXTRA_QMAKEVARS_POST += "LIBS+=-lQtNetwork " +export OE_QMAKE_EXTRA_MODULES = "network" +EXTRA_QMAKEVARS_PRE += " QT_LIBINFIX=E " diff --git a/classes/rootfs_deb.bbclass b/classes/rootfs_deb.bbclass index 935ef6e3f7..c2c4a764e4 100644 --- a/classes/rootfs_deb.bbclass +++ b/classes/rootfs_deb.bbclass @@ -10,8 +10,12 @@ fakeroot rootfs_deb_do_rootfs () { mkdir -p ${IMAGE_ROOTFS}/var/dpkg/info mkdir -p ${IMAGE_ROOTFS}/var/dpkg/updates + mkdir -p ${STAGING_ETCDIR_NATIVE}/apt/ + rm -f ${STAGING_ETCDIR_NATIVE}/apt/sources.list.rev rm -f ${STAGING_ETCDIR_NATIVE}/apt/preferences + > ${STAGING_ETCDIR_NATIVE}/apt/sources.list.rev + > ${STAGING_ETCDIR_NATIVE}/apt/preferences > ${IMAGE_ROOTFS}/var/dpkg/status > ${IMAGE_ROOTFS}/var/dpkg/available # > ${STAGING_DIR}/var/dpkg/status @@ -36,9 +40,9 @@ fakeroot rootfs_deb_do_rootfs () { priority=$(expr $priority + 5) done - tac ${STAGING_ETCDIR_NATIVE}/apt/sources.list.rev > ${STAGING_ETCDIR_NATIVE}/apt/sources.list + tac ${STAGING_ETCDIR_NATIVE}/apt/sources.list.rev > ${STAGING_DIR}/etc/apt/sources.list - cat "${STAGING_ETCDIR_NATIVE}/apt/apt.conf.sample" \ + cat "${STAGING_DIR}/etc/apt/apt.conf.sample" \ | sed -e 's#Architecture ".*";#Architecture "${TARGET_ARCH}";#' \ > "${STAGING_ETCDIR_NATIVE}/apt/apt-rootfs.conf" @@ -141,7 +145,7 @@ rootfs_deb_log_check() { if (echo "$lf_txt" | grep -v log_check | grep "$keyword_die") >/dev/null 2>&1 then echo "log_check: There were error messages in the logfile" - echo -e "log_check: Matched keyword: [$keyword_die]\n" + printf "log_check: Matched keyword: [$keyword_die]\n" echo "$lf_txt" | grep -v log_check | grep -C 5 -i "$keyword_die" echo "" do_exit=1 diff --git a/classes/rootfs_ipk.bbclass b/classes/rootfs_ipk.bbclass index c6e2099e7c..28ba03d675 100644 --- a/classes/rootfs_ipk.bbclass +++ b/classes/rootfs_ipk.bbclass @@ -71,7 +71,7 @@ rootfs_ipk_log_check() { if (echo "$lf_txt" | grep -v log_check | grep "$keyword_die") >/dev/null 2>&1 then echo "log_check: There were error messages in the logfile" - echo -e "log_check: Matched keyword: [$keyword_die]\n" + printf "log_check: Matched keyword: [$keyword_die]\n" echo "$lf_txt" | grep -v log_check | grep -i "$keyword_die" -C1 echo "" do_exit=1 diff --git a/classes/sanity.bbclass b/classes/sanity.bbclass index 479abce7fa..9994febf0d 100644 --- a/classes/sanity.bbclass +++ b/classes/sanity.bbclass @@ -24,14 +24,6 @@ def check_conf_exists(fn, data): return True return False -def check_app_exists(app, d): - from bb import which, data - - app = data.expand(app, d) - path = data.getVar('PATH', d) - return len(which(path, app)) != 0 - - def check_sanity(e): from bb import note, error, data, __version__ from bb.event import Handled, NotHandled, getName @@ -70,8 +62,9 @@ def check_sanity(e): if "diffstat-native" not in assume_provided: messages = messages + 'Please use ASSUME_PROVIDED +=, not ASSUME_PROVIDED = in your local.conf\n' - # Check that the MACHINE is valid - if not check_conf_exists("conf/machine/${MACHINE}.conf", e.data): + # Check that the MACHINE is valid, if it is set + if data.getVar('MACHINE', e.data, True): + if not check_conf_exists("conf/machine/${MACHINE}.conf", e.data): messages = messages + 'Please set a valid MACHINE in your local.conf\n' # Check that the DISTRO is valid diff --git a/classes/scons.bbclass b/classes/scons.bbclass index 534b3bd4c7..6d0a783dc2 100644 --- a/classes/scons.bbclass +++ b/classes/scons.bbclass @@ -6,6 +6,7 @@ scons_do_compile() { } scons_do_install() { + install -d ${D}${prefix} ${STAGING_BINDIR_NATIVE}/scons PREFIX=${D}${prefix} prefix=${D}${prefix} install || \ oefatal "scons install execution failed." } diff --git a/classes/sdl.bbclass b/classes/sdl.bbclass index d478d97f18..23cbf10919 100644 --- a/classes/sdl.bbclass +++ b/classes/sdl.bbclass @@ -4,41 +4,41 @@ DEPENDS += "virtual/libsdl libsdl-mixer libsdl-image" -APPDESKTOP ?= "${PN}.desktop" +APPDESKTOP ?= "${WORKDIR}/${PN}.desktop" APPNAME ?= "${PN}" -APPIMAGE ?= "${PN}.png" +APPIMAGE ?= "${WORKDIR}/${PN}.png" sdl_do_sdl_install() { - install -d ${D}${palmtopdir}/bin - install -d ${D}${palmtopdir}/pics - install -d ${D}${palmtopdir}/apps/Games - ln -sf ${bindir}/${APPNAME} ${D}${palmtopdir}/bin/${APPNAME} - install -m 0644 ${APPIMAGE} ${D}${palmtopdir}/pics/${PN}.png + install -d ${D}${datadir}/applications + install -d ${D}${datadir}/pixmaps + + install -m 0644 ${APPIMAGE} ${D}${datadir}/pixmaps/${PN}.png if [ -e "${APPDESKTOP}" ] then - echo ${APPDESKTOP} present, installing to palmtopdir... - install -m 0644 ${APPDESKTOP} ${D}${palmtopdir}/apps/Games/${PN}.desktop + echo ${APPDESKTOP} present, using it... + install -m 0644 ${APPDESKTOP} ${D}${datadir}/applications/ else echo ${APPDESKTOP} not present, creating one on-the-fly... - cat >${D}${palmtopdir}/apps/Games/${PN}.desktop <<EOF + cat >${D}${datadir}/applications/${PN}.desktop <<EOF [Desktop Entry] -Note=Auto Generated... this may be not what you want +Name=${PN} Comment=${DESCRIPTION} +Note=Auto Generated by OE SDL bbclass Exec=${APPNAME} Icon=${PN}.png Type=Application -Name=${PN} +Categories=Games EOF fi } EXPORT_FUNCTIONS do_sdl_install -addtask sdl_install after do_compile before do_populate_staging +addtask sdl_install after do_install before do_package -SECTION = "x11/games" -SECTION_${PN}-opie = "opie/games" +#SECTION = "x11/games" +#SECTION_${PN}-opie = "opie/games" -PACKAGES += "${PN}-opie" -RDEPENDS_${PN}-opie += "${PN}" -FILES_${PN}-opie = "${palmtopdir}" +#PACKAGES += "${PN}-opie" +#RDEPENDS_${PN}-opie += "${PN}" +#FILES_${PN}-opie = "${palmtopdir}" diff --git a/classes/sip.bbclass b/classes/sip.bbclass index 6f77f460dc..ae186bd04a 100644 --- a/classes/sip.bbclass +++ b/classes/sip.bbclass @@ -4,7 +4,7 @@ # yes, python-sip is actually a build-time dependency, since # the recipe installs sip.h -DEPENDS =+ "sip-native python-sip" +DEPENDS += "sip-native python-sip" # default stuff, do not uncomment # EXTRA_SIPTAGS = "-tWS_X11 -tQt_4_3_0" diff --git a/classes/sourcepkg.bbclass b/classes/sourcepkg.bbclass index 390d3684d4..bbc9f187ec 100644 --- a/classes/sourcepkg.bbclass +++ b/classes/sourcepkg.bbclass @@ -106,6 +106,6 @@ EXPORT_FUNCTIONS do_create_orig_tgz do_archive_bb do_dumpdata do_create_diff_gz addtask create_orig_tgz after do_unpack before do_patch addtask archive_bb after do_patch before do_dumpdata -addtask dumpdata after archive_bb before do_create_diff_gz +addtask dumpdata after do_archive_bb before do_create_diff_gz addtask create_diff_gz after do_dump_data before do_configure diff --git a/classes/xilinx-bsp.bbclass b/classes/xilinx-bsp.bbclass index fd09946061..f657e5be94 100644 --- a/classes/xilinx-bsp.bbclass +++ b/classes/xilinx-bsp.bbclass @@ -34,7 +34,7 @@ fi case "${XILINX_BOARD}" in ML403) oenote "ML403 board setup" - cp -a ${XILINX_BSP_PATH}/ppc405_0/libsrc/linux_2_6_v1_00_a/linux/arch/ppc/platforms/4xx/xparameters/xparameters_ml40x.h \ + cp -pPR ${XILINX_BSP_PATH}/ppc405_0/libsrc/linux_2_6_v1_00_a/linux/arch/ppc/platforms/4xx/xparameters/xparameters_ml40x.h \ ${S}/arch/ppc/platforms/4xx/xparameters/xparameters_ml403.h ;; |
