summaryrefslogtreecommitdiff
path: root/classes
diff options
context:
space:
mode:
authorKoen Kooi <koen@openembedded.org>2005-06-30 08:19:37 +0000
committerOpenEmbedded Project <openembedded-devel@lists.openembedded.org>2005-06-30 08:19:37 +0000
commitc8e5702127e507e82e6f68a4b8c546803accea9d (patch)
tree00583491f40ecc640f2b28452af995e3a63a09d7 /classes
parent87ec8ca4d2e2eb4d1c1e1e1a6b46a395d56805b9 (diff)
import clean BK tree at cset 1.3670
Diffstat (limited to 'classes')
-rw-r--r--classes/.mtn2git_empty0
-rw-r--r--classes/base_srpm.bbclass20
-rw-r--r--classes/ccache.inc11
-rw-r--r--classes/cml1.bbclass8
-rw-r--r--classes/cpan.bbclass20
-rw-r--r--classes/debian.bbclass93
-rw-r--r--classes/gconf.bbclass57
-rw-r--r--classes/gettext.bbclass12
-rw-r--r--classes/gpe.bbclass15
-rw-r--r--classes/icecc.bbclass156
-rw-r--r--classes/kernel-arch.bbclass26
-rw-r--r--classes/lib_package.bbclass9
-rw-r--r--classes/mozilla.bbclass53
-rw-r--r--classes/multimachine.inc3
-rw-r--r--classes/oebuildstamp.bbclass16
-rw-r--r--classes/oelint.bbclass174
-rw-r--r--classes/pkg_metainfo.bbclass22
-rw-r--r--classes/qmake-base.bbclass42
-rw-r--r--classes/qmake.bbclass57
-rw-r--r--classes/qt3x11.bbclass11
-rw-r--r--classes/qt4x11.bbclass11
-rw-r--r--classes/rpm_core.bbclass16
-rw-r--r--classes/sdk.bbclass22
-rw-r--r--classes/sdl.bbclass27
-rw-r--r--classes/src_distribute_local.bbclass31
-rw-r--r--classes/srec.bbclass26
-rw-r--r--classes/tinderclient.bbclass117
-rw-r--r--classes/tmake.bbclass77
-rw-r--r--classes/update-alternatives.bbclass33
29 files changed, 1165 insertions, 0 deletions
diff --git a/classes/.mtn2git_empty b/classes/.mtn2git_empty
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/classes/.mtn2git_empty
diff --git a/classes/base_srpm.bbclass b/classes/base_srpm.bbclass
index e69de29bb2..aea6335278 100644
--- a/classes/base_srpm.bbclass
+++ b/classes/base_srpm.bbclass
@@ -0,0 +1,20 @@
+inherit base package rpm_core
+
+SPECFILE="${RPMBUILDPATH}/SPECS/${PN}.spec"
+
+base_srpm_do_unpack() {
+ test -e ${SRPMFILE} || die "Source rpm \"${SRPMFILE}\"does not exist"
+ if ! test -e ${SPECFILE}; then
+ ${RPM} -i ${SRPMFILE}
+ fi
+ test -e ${SPECFILE} || die "Spec file \"${SPECFILE}\" does not exist"
+ ${RPMBUILD} -bp ${SPECFILE}
+}
+
+base_srpm_do_compile() {
+ ${RPMBUILD} -bc ${SPECFILE}
+}
+
+base_srpm_do_install() {
+ ${RPMBUILD} -bi ${SPECFILE}
+}
diff --git a/classes/ccache.inc b/classes/ccache.inc
index e69de29bb2..5e9356104b 100644
--- a/classes/ccache.inc
+++ b/classes/ccache.inc
@@ -0,0 +1,11 @@
+# Make ccache use a TMPDIR specific ccache directory if using the crosscompiler,
+# since it isn't likely to be useful with any other toolchain than the one we just
+# built, and would otherwise push more useful things out of the default cache.
+
+CCACHE_DIR_TARGET = "${TMPDIR}/ccache"
+
+python () {
+ if not bb.data.inherits_class('native', d) and not bb.data.inherits_class('cross', d):
+ bb.data.setVar('CCACHE_DIR', '${CCACHE_DIR_TARGET}', d)
+ bb.data.setVarFlag('CCACHE_DIR', 'export', '1', d)
+}
diff --git a/classes/cml1.bbclass b/classes/cml1.bbclass
index e69de29bb2..79218b4a12 100644
--- a/classes/cml1.bbclass
+++ b/classes/cml1.bbclass
@@ -0,0 +1,8 @@
+cml1_do_configure() {
+ set -e
+ unset CFLAGS CPPFLAGS CXXFLAGS LDFLAGS
+ oe_runmake oldconfig
+}
+
+EXPORT_FUNCTIONS do_configure
+addtask configure after do_unpack do_patch before do_compile
diff --git a/classes/cpan.bbclass b/classes/cpan.bbclass
index e69de29bb2..853abfd1b3 100644
--- a/classes/cpan.bbclass
+++ b/classes/cpan.bbclass
@@ -0,0 +1,20 @@
+FILES_${PN} += '${libdir}/perl5'
+
+cpan_do_configure () {
+ perl Makefile.PL
+ if [ "${BUILD_SYS}" != "${HOST_SYS}" ]; then
+ . ${STAGING_DIR}/${TARGET_SYS}/perl/config.sh
+ sed -e "s:\(SITELIBEXP = \).*:\1${sitelibexp}:; s:\(SITEARCHEXP = \).*:\1${sitearchexp}:; s:\(INSTALLVENDORLIB = \).*:\1${D}${libdir}/perl5:; s:\(INSTALLVENDORARCH = \).*:\1${D}${libdir}/perl5:" < Makefile > Makefile.new
+ mv Makefile.new Makefile
+ fi
+}
+
+cpan_do_compile () {
+ oe_runmake PASTHRU_INC="${CFLAGS}"
+}
+
+cpan_do_install () {
+ oe_runmake install_vendor
+}
+
+EXPORT_FUNCTIONS do_configure do_compile do_install
diff --git a/classes/debian.bbclass b/classes/debian.bbclass
index e69de29bb2..2480ffd664 100644
--- a/classes/debian.bbclass
+++ b/classes/debian.bbclass
@@ -0,0 +1,93 @@
+python debian_package_name_hook () {
+ import glob, copy, stat, errno, re
+
+ workdir = bb.data.getVar('WORKDIR', d, 1)
+ packages = bb.data.getVar('PACKAGES', d, 1)
+
+ def socrunch(s):
+ s = s.lower().replace('_', '-')
+ m = re.match("^(.*)(.)\.so\.(.*)$", s)
+ if m is None:
+ return None
+ if m.group(2) in '0123456789':
+ bin = '%s%s-%s' % (m.group(1), m.group(2), m.group(3))
+ else:
+ bin = m.group(1) + m.group(2) + m.group(3)
+ dev = m.group(1) + m.group(2)
+ return (bin, dev)
+
+ def isexec(path):
+ try:
+ s = os.stat(path)
+ except (os.error, AttributeError):
+ return 0
+ return (s[stat.ST_MODE] & stat.S_IEXEC)
+
+ def auto_libname(packages, orig_pkg):
+ bin_re = re.compile(".*/s?bin$")
+ lib_re = re.compile(".*/lib$")
+ so_re = re.compile("lib.*\.so")
+ sonames = []
+ has_bins = 0
+ has_libs = 0
+ pkg_dir = os.path.join(workdir, "install", orig_pkg)
+ for root, dirs, files in os.walk(pkg_dir):
+ if bin_re.match(root) and files:
+ has_bins = 1
+ if lib_re.match(root) and files:
+ has_libs = 1
+ for f in files:
+ if so_re.match(f):
+ fp = os.path.join(root, f)
+ cmd = (bb.data.getVar('BUILD_PREFIX', d, 1) or "") + "objdump -p " + fp + " 2>/dev/null"
+ fd = os.popen(cmd)
+ lines = fd.readlines()
+ fd.close()
+ for l in lines:
+ m = re.match("\s+SONAME\s+([^\s]*)", l)
+ if m and not m.group(1) in sonames:
+ sonames.append(m.group(1))
+
+ bb.debug(1, 'LIBNAMES: pkg %s libs %d bins %d sonames %s' % (orig_pkg, has_libs, has_bins, sonames))
+ soname = None
+ if len(sonames) == 1:
+ soname = sonames[0]
+ elif len(sonames) > 1:
+ lead = bb.data.getVar('LEAD_SONAME', d, 1)
+ if lead:
+ r = re.compile(lead)
+ filtered = []
+ for s in sonames:
+ if r.match(s):
+ filtered.append(s)
+ if len(filtered) == 1:
+ soname = filtered[0]
+ elif len(filtered) > 1:
+ bb.note("Multiple matches (%s) for LEAD_SONAME '%s'" % (", ".join(filtered), lead))
+ else:
+ bb.note("Multiple libraries (%s) found, but LEAD_SONAME '%s' doesn't match any of them" % (", ".join(sonames), lead))
+ else:
+ bb.note("Multiple libraries (%s) found and LEAD_SONAME not defined" % ", ".join(sonames))
+
+ if has_libs and not has_bins and soname:
+ soname_result = socrunch(soname)
+ if soname_result:
+ (pkgname, devname) = soname_result
+ for pkg in packages.split():
+ if (bb.data.getVar('PKG_' + pkg, d)):
+ continue
+ if pkg == orig_pkg:
+ newpkg = pkgname
+ else:
+ newpkg = pkg.replace(orig_pkg, devname)
+ if newpkg != pkg:
+ bb.data.setVar('PKG_' + pkg, newpkg, d)
+
+ for pkg in (bb.data.getVar('AUTO_LIBNAME_PKGS', d, 1) or "").split():
+ auto_libname(packages, pkg)
+}
+
+EXPORT_FUNCTIONS package_name_hook
+
+DEBIAN_NAMES = 1
+
diff --git a/classes/gconf.bbclass b/classes/gconf.bbclass
index e69de29bb2..b0c5723873 100644
--- a/classes/gconf.bbclass
+++ b/classes/gconf.bbclass
@@ -0,0 +1,57 @@
+gconf_postinst() {
+if [ "$1" = configure ]; then
+ if [ "x$D" != "x" ]; then
+ exit 1
+ fi
+ SCHEMA_LOCATION=/etc/gconf/schemas
+ for SCHEMA in ${SCHEMA_FILES}; do
+ if [ -e $SCHEMA_LOCATION/$SCHEMA ]; then
+ HOME=/root GCONF_CONFIG_SOURCE=`gconftool-2 --get-default-source` \
+ gconftool-2 \
+ --makefile-install-rule $SCHEMA_LOCATION/$SCHEMA > /dev/null
+ fi
+ done
+fi
+}
+
+gconf_prerm() {
+if [ "$1" = remove ] || [ "$1" = upgrade ]; then
+ SCHEMA_LOCATION=/etc/gconf/schemas
+ for SCHEMA in ${SCHEMA_FILES}; do
+ if [ -e $SCHEMA_LOCATION/$SCHEMA ]; then
+ HOME=/root GCONF_CONFIG_SOURCE=`gconftool-2 --get-default-source` \
+ gconftool-2 \
+ --makefile-uninstall-rule $SCHEMA_LOCATION/$SCHEMA > /dev/null
+ fi
+ done
+fi
+}
+
+python populate_packages_append () {
+ import os.path, re
+ packages = bb.data.getVar('PACKAGES', d, 1).split()
+ workdir = bb.data.getVar('WORKDIR', d, 1)
+
+ for pkg in packages:
+ schema_dir = '%s/install/%s/etc/gconf/schemas' % (workdir, pkg)
+ schemas = []
+ schema_re = re.compile(".*\.schemas$")
+ if os.path.exists(schema_dir):
+ for f in os.listdir(schema_dir):
+ if schema_re.match(f):
+ schemas.append(f)
+ if schemas != []:
+ bb.note("adding gconf postinst and prerm scripts to %s" % pkg)
+ bb.data.setVar('SCHEMA_FILES', " ".join(schemas), d)
+ postinst = bb.data.getVar('pkg_postinst_%s' % pkg, d, 1) or bb.data.getVar('pkg_postinst', d, 1)
+ if not postinst:
+ postinst = '#!/bin/sh\n'
+ postinst += bb.data.getVar('gconf_postinst', d, 1)
+ bb.data.setVar('pkg_postinst_%s' % pkg, postinst, d)
+ prerm = bb.data.getVar('pkg_prerm_%s' % pkg, d, 1) or bb.data.getVar('pkg_prerm', d, 1)
+ if not prerm:
+ prerm = '#!/bin/sh\n'
+ prerm += bb.data.getVar('gconf_prerm', d, 1)
+ bb.data.setVar('pkg_prerm_%s' % pkg, prerm, d)
+
+}
diff --git a/classes/gettext.bbclass b/classes/gettext.bbclass
index e69de29bb2..3785f5acd3 100644
--- a/classes/gettext.bbclass
+++ b/classes/gettext.bbclass
@@ -0,0 +1,12 @@
+python () {
+ # Remove the NLS bits if USE_NLS is no.
+ if bb.data.getVar('USE_NLS', d, 1) == 'no':
+ cfg = oe_filter_out('^--(dis|en)able-nls$', bb.data.getVar('EXTRA_OECONF', d, 1) or "", d)
+ cfg += " --disable-nls"
+ depends = bb.data.getVar('DEPENDS', d, 1) or ""
+ bb.data.setVar('DEPENDS', oe_filter_out('^(virtual/libiconv|virtual/libintl)$', depends, d), d)
+ bb.data.setVar('EXTRA_OECONF', cfg, d)
+}
+
+DEPENDS =+ "gettext-native"
+EXTRA_OECONF += "--enable-nls"
diff --git a/classes/gpe.bbclass b/classes/gpe.bbclass
index e69de29bb2..3b0cd0a001 100644
--- a/classes/gpe.bbclass
+++ b/classes/gpe.bbclass
@@ -0,0 +1,15 @@
+DEPENDS_prepend = "coreutils-native virtual/libintl intltool-native "
+SRC_URI = "${GPE_MIRROR}/${PN}-${PV}.tar.gz"
+FILES_${PN} += "${datadir}/gpe ${datadir}/application-registry"
+
+inherit gettext
+
+gpe_do_compile() {
+ oe_runmake PREFIX=${prefix}
+}
+
+gpe_do_install() {
+ oe_runmake PREFIX=${prefix} DESTDIR=${D} install
+}
+
+EXPORT_FUNCTIONS do_compile do_install
diff --git a/classes/icecc.bbclass b/classes/icecc.bbclass
index e69de29bb2..7dfcfc29a4 100644
--- a/classes/icecc.bbclass
+++ b/classes/icecc.bbclass
@@ -0,0 +1,156 @@
+# IceCream distributed compiling support
+#
+# We need to create a tar.bz2 of our toolchain and set
+# ICECC_VERSION, ICECC_CXX and ICEC_CC
+#
+
+def create_env(bb,d):
+ """
+ Create a tar.bz of the current toolchain
+ """
+
+ # Constin native-native compilation no environment needed if
+ # host prefix is empty (let us duplicate the query for ease)
+ prefix = bb.data.expand('${HOST_PREFIX}', d)
+ if len(prefix) == 0:
+ return ""
+
+ import tarfile
+ import socket
+ import time
+ import os
+ ice_dir = bb.data.expand('${CROSS_DIR}', d)
+ prefix = bb.data.expand('${HOST_PREFIX}' , d)
+ distro = bb.data.expand('${DISTRO}', d)
+ target_sys = bb.data.expand('${TARGET_SYS}', d)
+ #float = bb.data.getVar('${TARGET_FPU}', d)
+ float = "anyfloat"
+ name = socket.gethostname()
+
+ try:
+ os.stat(ice_dir + '/' + target_sys + '/lib/ld-linux.so.2')
+ os.stat(ice_dir + '/' + target_sys + '/bin/g++')
+ except:
+ return ""
+
+ VERSION = '3.4.3'
+ cross_name = prefix + distro + target_sys + float +VERSION+ name
+ tar_file = ice_dir + '/ice/' + cross_name + '.tar.bz2'
+
+ try:
+ os.stat(tar_file)
+ return tar_file
+ except:
+ try:
+ os.makedirs(ice_dir+'/ice')
+ except:
+ pass
+
+ # FIXME find out the version of the compiler
+ tar = tarfile.open(tar_file, 'w:bz2')
+ tar.add(ice_dir + '/' + target_sys + '/lib/ld-linux.so.2',
+ target_sys + 'cross/lib/ld-linux.so.2')
+ tar.add(ice_dir + '/' + target_sys + '/lib/ld-linux.so.2',
+ target_sys + 'cross/lib/ld-2.3.3.so')
+ tar.add(ice_dir + '/' + target_sys + '/lib/libc-2.3.3.so',
+ target_sys + 'cross/lib/libc-2.3.3.so')
+ tar.add(ice_dir + '/' + target_sys + '/lib/libc.so.6',
+ target_sys + 'cross/lib/libc.so.6')
+ tar.add(ice_dir + '/' + target_sys + '/bin/gcc',
+ target_sys + 'cross/usr/bin/gcc')
+ tar.add(ice_dir + '/' + target_sys + '/bin/g++',
+ target_sys + 'cross/usr/bin/g++')
+ tar.add(ice_dir + '/' + target_sys + '/bin/as',
+ target_sys + 'cross/usr/bin/as')
+ tar.add(ice_dir + '/lib/gcc/' + target_sys +'/'+ VERSION + '/specs',
+ target_sys+'cross/usr/lib/gcc/'+target_sys+'/'+VERSION+'/lib/specs')
+ tar.add(ice_dir + '/libexec/gcc/'+target_sys+'/' + VERSION + '/cc1',
+ target_sys + 'cross/usr/lib/gcc/'+target_sys+'/'+VERSION+'/lib/cc1')
+ tar.add(ice_dir + '/libexec/gcc/arm-linux/' + VERSION + '/cc1plus',
+ target_sys+'cross/usr/lib/gcc/'+target_sys+'/'+VERSION+'/lib/cc1plus')
+ tar.close()
+ return tar_file
+
+
+def create_path(compilers, type, bb, d):
+ """
+ Create Symlinks for the icecc in the staging directory
+ """
+ import os
+
+ staging = bb.data.expand('${STAGING_DIR}', d) + "/ice/" + type
+ icecc = bb.data.getVar('ICECC_PATH', d)
+
+ # Create the dir if necessary
+ try:
+ os.stat(staging)
+ except:
+ os.makedirs(staging)
+
+
+ for compiler in compilers:
+ gcc_path = staging + "/" + compiler
+ try:
+ os.stat(gcc_path)
+ except:
+ os.symlink(icecc, gcc_path)
+
+ return staging + ":"
+
+
+def use_icc_version(bb,d):
+ # Constin native native
+ prefix = bb.data.expand('${HOST_PREFIX}', d)
+ if len(prefix) == 0:
+ return "no"
+
+
+ native = bb.data.expand('${PN}', d)
+ blacklist = [ "-cross", "-native" ]
+
+ for black in blacklist:
+ if black in native:
+ return "no"
+
+ return "yes"
+
+def icc_path(bb,d,compile):
+ native = bb.data.expand('${PN}', d)
+ blacklist = [ "ulibc", "glibc", "ncurses" ]
+ for black in blacklist:
+ if black in native:
+ return ""
+
+ if "-native" in native:
+ compile = False
+ if "-cross" in native:
+ compile = False
+
+ prefix = bb.data.expand('${HOST_PREFIX}', d)
+ if compile and len(prefix) != 0:
+ return create_path( [prefix+"gcc", prefix+"g++"], "cross", bb, d )
+ elif not compile or len(prefix) == 0:
+ return create_path( ["gcc", "g++"], "native", bb, d)
+
+
+def icc_version(bb,d):
+ return create_env(bb,d)
+
+
+#
+# set the IceCream environment variables
+do_configure_prepend() {
+ export PATH=${@icc_path(bb,d,False)}$PATH
+ export ICECC_CC="gcc"
+ export ICECC_CXX="g++"
+}
+
+do_compile_prepend() {
+ export PATH=${@icc_path(bb,d,True)}$PATH
+ export ICECC_CC="${HOST_PREFIX}gcc"
+ export ICECC_CXX="${HOST_PREFIX}g++"
+
+ if [ "${@use_icc_version(bb,d)}" = "yes" ]; then
+ export ICECC_VERSION="${@icc_version(bb,d)}"
+ fi
+}
diff --git a/classes/kernel-arch.bbclass b/classes/kernel-arch.bbclass
index e69de29bb2..92a6c982fb 100644
--- a/classes/kernel-arch.bbclass
+++ b/classes/kernel-arch.bbclass
@@ -0,0 +1,26 @@
+#
+# set the ARCH environment variable for kernel compilation (including
+# modules). return value must match one of the architecture directories
+# in the kernel source "arch" directory
+#
+
+valid_archs = "alpha cris ia64 m68knommu ppc sh \
+ sparc64 x86_64 arm h8300 m32r mips \
+ ppc64 sh64 um arm26 i386 m68k \
+ parisc s390 sparc v850"
+
+def map_kernel_arch(a, d):
+ import bb, re
+
+ valid_archs = bb.data.getVar('valid_archs', d, 1).split()
+
+ if re.match('(i.86|athlon)$', a): return 'i386'
+ elif re.match('arm26$', a): return 'arm26'
+ elif re.match('armeb$', a): return 'arm'
+ elif re.match('powerpc$', a): return 'ppc'
+ elif re.match('mipsel$', a): return 'mips'
+ elif a in valid_archs: return a
+ else:
+ bb.error("cannot map '%s' to a linux kernel architecture" % a)
+
+export ARCH = "${@map_kernel_arch(bb.data.getVar('TARGET_ARCH', d, 1), d)}"
diff --git a/classes/lib_package.bbclass b/classes/lib_package.bbclass
index e69de29bb2..5042ed5a95 100644
--- a/classes/lib_package.bbclass
+++ b/classes/lib_package.bbclass
@@ -0,0 +1,9 @@
+PACKAGES = ${PN} ${PN}-dev ${PN}-doc ${PN}-bin
+
+FILES_${PN} = "${libexecdir} ${libdir}/lib*.so.* \
+ ${sysconfdir} ${sharedstatedir} ${localstatedir} \
+ /lib/*.so* ${datadir}/${PN} ${libdir}/${PN}"
+FILES_${PN}-dev = "${includedir} ${libdir}/lib*.so ${libdir}/*.la \
+ ${libdir}/*.a ${libdir}/pkgconfig /lib/*.a /lib/*.o \
+ ${datadir}/aclocal ${bindir}/*-config"
+FILES_${PN}-bin = "${bindir} ${sbindir} /bin /sbin"
diff --git a/classes/mozilla.bbclass b/classes/mozilla.bbclass
index e69de29bb2..629f2531da 100644
--- a/classes/mozilla.bbclass
+++ b/classes/mozilla.bbclass
@@ -0,0 +1,53 @@
+SECTION = "x11/utils"
+DEPENDS += "gnu-config-native virtual/libintl xt libxi \
+ zip-native gtk+ orbit2 libidl-native"
+LICENSE = "MPL NPL"
+SRC_URI += "file://mozconfig"
+
+inherit gettext
+
+EXTRA_OECONF = "--target=${TARGET_SYS} --host=${BUILD_SYS} \
+ --build=${BUILD_SYS} --prefix=${prefix}"
+EXTRA_OEMAKE = "'HOST_LIBIDL_LIBS=${HOST_LIBIDL_LIBS}' \
+ 'HOST_LIBIDL_CFLAGS=${HOST_LIBIDL_CFLAGS}'"
+SELECTED_OPTIMIZATION = "-Os -fsigned-char -fno-strict-aliasing"
+
+export CROSS_COMPILE = "1"
+export MOZCONFIG = "${WORKDIR}/mozconfig"
+export MOZ_OBJDIR = "${S}"
+
+export CONFIGURE_ARGS = "${EXTRA_OECONF}"
+export HOST_LIBIDL_CFLAGS = "`${HOST_LIBIDL_CONFIG} --cflags`"
+export HOST_LIBIDL_LIBS = "`${HOST_LIBIDL_CONFIG} --libs`"
+export HOST_LIBIDL_CONFIG = "PKG_CONFIG_PATH=${STAGING_BINDIR}/../share/pkgconfig pkg-config libIDL-2.0"
+export HOST_CC = "${BUILD_CC}"
+export HOST_CXX = "${BUILD_CXX}"
+export HOST_CFLAGS = "${BUILD_CFLAGS}"
+export HOST_CXXFLAGS = "${BUILD_CXXFLAGS}"
+export HOST_LDFLAGS = "${BUILD_LDFLAGS}"
+export HOST_RANLIB = "${BUILD_RANLIB}"
+export HOST_AR = "${BUILD_AR}"
+
+mozilla_do_configure() {
+ (
+ set -e
+ for cg in `find ${S} -name config.guess`; do
+ install -m 0755 \
+ ${STAGING_BINDIR}/../share/gnu-config/config.guess \
+ ${STAGING_BINDIR}/../share/gnu-config/config.sub \
+ `dirname $cg`/
+ done
+ )
+ oe_runmake -f client.mk ${MOZ_OBJDIR}/Makefile \
+ ${MOZ_OBJDIR}/config.status
+}
+
+mozilla_do_compile() {
+ oe_runmake -f client.mk build_all
+}
+
+mozilla_do_install() {
+ oe_runmake DESTDIR="${D}" destdir="${D}" install
+}
+
+EXPORT_FUNCTIONS do_configure do_compile do_install
diff --git a/classes/multimachine.inc b/classes/multimachine.inc
index e69de29bb2..1f73ffd30f 100644
--- a/classes/multimachine.inc
+++ b/classes/multimachine.inc
@@ -0,0 +1,3 @@
+STAMP = "${TMPDIR}/stamps/${PACKAGE_ARCH}-${HOST_OS}/${PF}"
+WORKDIR = "${TMPDIR}/work/${PACKAGE_ARCH}-${HOST_OS}/${PF}"
+STAGING_KERNEL_DIR = "${STAGING_DIR}/${PACKAGE_ARCH}-${HOST_OS}/kernel"
diff --git a/classes/oebuildstamp.bbclass b/classes/oebuildstamp.bbclass
index e69de29bb2..1de1b95c2e 100644
--- a/classes/oebuildstamp.bbclass
+++ b/classes/oebuildstamp.bbclass
@@ -0,0 +1,16 @@
+#
+# Because base.oeclasses set the variable
+#
+# do_fetch[nostamp] = "1"
+# do_build[nostamp] = "1"
+#
+# for every build we're doing oemake calls all of the phases to check if
+# something new is to download. This class unset's this nostamp flag. This
+# makes a package "finished", once it's completely build.
+#
+# This means that the subsequent builds are faster, but when you change the
+# behaviour of the package, e.g. by adding INHERIT="package_ipk", you won't
+# get the ipk file except you delete the build stamp manually or all of them
+# with oebuild clean <oe-file>.
+
+do_build[nostamp] = ""
diff --git a/classes/oelint.bbclass b/classes/oelint.bbclass
index e69de29bb2..baa1c630b4 100644
--- a/classes/oelint.bbclass
+++ b/classes/oelint.bbclass
@@ -0,0 +1,174 @@
+addtask lint before do_fetch
+do_lint[nostamp] = 1
+python do_lint() {
+ def testVar(var, explain=None):
+ try:
+ s = d[var]
+ return s["content"]
+ except KeyError:
+ bb.error("%s is not set" % var)
+ if explain: bb.note(explain)
+ return None
+
+
+ ##############################
+ # Test that DESCRIPTION exists
+ #
+ testVar("DESCRIPTION")
+
+
+ ##############################
+ # Test that HOMEPAGE exists
+ #
+ s = testVar("HOMEPAGE")
+ if s=="unknown":
+ bb.error("HOMEPAGE is not set")
+ elif not s.startswith("http://"):
+ bb.error("HOMEPAGE doesn't start with http://")
+
+
+
+ ##############################
+ # Test for valid LICENSE
+ #
+ valid_licenses = {
+ "GPL-2" : "GPLv2",
+ "GPL LGPL FDL" : True,
+ "GPL PSF" : True,
+ "GPL/QPL" : True,
+ "GPL" : True,
+ "GPLv2" : True,
+ "IBM" : True,
+ "LGPL GPL" : True,
+ "LGPL" : True,
+ "MIT" : True,
+ "OSL" : True,
+ "Perl" : True,
+ "Public Domain" : True,
+ "QPL" : "GPL/QPL",
+ }
+ s = testVar("LICENSE")
+ if s=="unknown":
+ bb.error("LICENSE is not set")
+ elif s.startswith("Vendor"):
+ pass
+ else:
+ try:
+ newlic = valid_licenses[s]
+ if newlic == False:
+ bb.note("LICENSE '%s' is not recommended" % s)
+ elif newlic != True:
+ bb.note("LICENSE '%s' is not recommended, better use '%s'" % (s, newsect))
+ except:
+ bb.note("LICENSE '%s' is not recommended" % s)
+
+
+ ##############################
+ # Test for valid MAINTAINER
+ #
+ s = testVar("MAINTAINER")
+ if s=="OpenEmbedded Team <oe@handhelds.org>":
+ bb.error("explicit MAINTAINER is missing, using default")
+ elif s and s.find("@") == -1:
+ bb.error("You forgot to put an e-mail address into MAINTAINER")
+
+
+ ##############################
+ # Test for valid SECTION
+ #
+ # if Correct section: True section name is valid
+ # False section name is invalid, no suggestion
+ # string section name is invalid, better name suggested
+ #
+ valid_sections = {
+ # Current Section Correct section
+ "apps" : True,
+ "audio" : True,
+ "base" : True,
+ "console/games" : True,
+ "console/net" : "console/network",
+ "console/network" : True,
+ "console/utils" : True,
+ "devel" : True,
+ "developing" : "devel",
+ "devel/python" : True,
+ "fonts" : True,
+ "games" : True,
+ "games/libs" : True,
+ "gnome/base" : True,
+ "gnome/libs" : True,
+ "gpe" : True,
+ "gpe/libs" : True,
+ "gui" : False,
+ "libc" : "libs",
+ "libs" : True,
+ "libs/net" : True,
+ "multimedia" : True,
+ "net" : "network",
+ "NET" : "network",
+ "network" : True,
+ "opie/applets" : True,
+ "opie/applications" : True,
+ "opie/base" : True,
+ "opie/codecs" : True,
+ "opie/decorations" : True,
+ "opie/fontfactories" : True,
+ "opie/fonts" : True,
+ "opie/games" : True,
+ "opie/help" : True,
+ "opie/inputmethods" : True,
+ "opie/libs" : True,
+ "opie/multimedia" : True,
+ "opie/pim" : True,
+ "opie/setting" : "opie/settings",
+ "opie/settings" : True,
+ "opie/Shell" : False,
+ "opie/styles" : True,
+ "opie/today" : True,
+ "scientific" : True,
+ "utils" : True,
+ "x11" : True,
+ "x11/libs" : True,
+ "x11/wm" : True,
+ }
+ s = testVar("SECTION")
+ if s:
+ try:
+ newsect = valid_sections[s]
+ if newsect == False:
+ bb.note("SECTION '%s' is not recommended" % s)
+ elif newsect != True:
+ bb.note("SECTION '%s' is not recommended, better use '%s'" % (s, newsect))
+ except:
+ bb.note("SECTION '%s' is not recommended" % s)
+
+ if not s.islower():
+ bb.error("SECTION should only use lower case")
+
+
+
+
+ ##############################
+ # Test for valid PRIORITY
+ #
+ valid_priorities = {
+ "standard" : True,
+ "required" : True,
+ "optional" : True,
+ "extra" : True,
+ }
+ s = testVar("PRIORITY")
+ if s:
+ try:
+ newprio = valid_priorities[s]
+ if newprio == False:
+ bb.note("PRIORITY '%s' is not recommended" % s)
+ elif newprio != True:
+ bb.note("PRIORITY '%s' is not recommended, better use '%s'" % (s, newprio))
+ except:
+ bb.note("PRIORITY '%s' is not recommended" % s)
+
+ if not s.islower():
+ bb.error("PRIORITY should only use lower case")
+
+}
diff --git a/classes/pkg_metainfo.bbclass b/classes/pkg_metainfo.bbclass
index e69de29bb2..ac4f73c77b 100644
--- a/classes/pkg_metainfo.bbclass
+++ b/classes/pkg_metainfo.bbclass
@@ -0,0 +1,22 @@
+python do_pkg_write_metainfo () {
+ deploydir = bb.data.getVar('DEPLOY_DIR', d, 1)
+ if not deploydir:
+ bb.error("DEPLOY_DIR not defined, unable to write package info")
+ return
+
+ try:
+ infofile = file(os.path.join(deploydir, 'package-metainfo'), 'a')
+ except OSError:
+ raise bb.build.FuncFailed("unable to open package-info file for writing.")
+
+ name = bb.data.getVar('PN', d, 1)
+ version = bb.data.getVar('PV', d, 1)
+ desc = bb.data.getVar('DESCRIPTION', d, 1)
+ page = bb.data.getVar('HOMEPAGE', d, 1)
+ lic = bb.data.getVar('LICENSE', d, 1)
+
+ infofile.write("|| "+ name +" || "+ version + " || "+ desc +" || "+ page +" || "+ lic + " ||\n" )
+ infofile.close()
+}
+
+addtask pkg_write_metainfo after do_package before do_build \ No newline at end of file
diff --git a/classes/qmake-base.bbclass b/classes/qmake-base.bbclass
index e69de29bb2..4a360dae43 100644
--- a/classes/qmake-base.bbclass
+++ b/classes/qmake-base.bbclass
@@ -0,0 +1,42 @@
+DEPENDS_prepend = "qmake-native "
+
+OE_QMAKE_PLATFORM = "${TARGET_OS}-oe-g++"
+QMAKESPEC := "${QMAKE_MKSPEC_PATH}/${OE_QMAKE_PLATFORM}"
+
+# We override this completely to eliminate the -e normally passed in
+EXTRA_OEMAKE = ' MAKEFLAGS= '
+
+export OE_QMAKE_CC="${CC}"
+export OE_QMAKE_CFLAGS="${CFLAGS}"
+export OE_QMAKE_CXX="${CXX}"
+export OE_QMAKE_CXXFLAGS="-fno-exceptions -fno-rtti ${CXXFLAGS}"
+export OE_QMAKE_LDFLAGS="${LDFLAGS}"
+export OE_QMAKE_LINK="${CCLD}"
+export OE_QMAKE_AR="${AR}"
+export OE_QMAKE_STRIP="${STRIP}"
+export OE_QMAKE_UIC="${STAGING_BINDIR}/uic"
+export OE_QMAKE_MOC="${STAGING_BINDIR}/moc"
+export OE_QMAKE_RPATH="-Wl,-rpath-link,"
+
+# default to qte2 via bb.conf, inherit qt3x11 to configure for qt3x11
+export OE_QMAKE_INCDIR_QT="${QTDIR}/include"
+export OE_QMAKE_LIBDIR_QT="${QTDIR}/lib"
+export OE_QMAKE_LIBS_QT="qte"
+export OE_QMAKE_LIBS_X11=""
+
+oe_qmake_mkspecs () {
+ mkdir -p mkspecs/${OE_QMAKE_PLATFORM}
+ for f in ${QMAKE_MKSPEC_PATH}/${OE_QMAKE_PLATFORM}/*; do
+ if [ -L $f ]; then
+ lnk=`readlink $f`
+ if [ -f mkspecs/${OE_QMAKE_PLATFORM}/$lnk ]; then
+ ln -s $lnk mkspecs/${OE_QMAKE_PLATFORM}/`basename $f`
+ else
+ cp $f mkspecs/${OE_QMAKE_PLATFORM}/
+ fi
+ else
+ cp $f mkspecs/${OE_QMAKE_PLATFORM}/
+ fi
+ done
+}
+
diff --git a/classes/qmake.bbclass b/classes/qmake.bbclass
index e69de29bb2..10aa8c9f7d 100644
--- a/classes/qmake.bbclass
+++ b/classes/qmake.bbclass
@@ -0,0 +1,57 @@
+inherit qmake-base
+
+qmake_do_configure() {
+ case ${QMAKESPEC} in
+ *linux-oe-g++|*linux-uclibc-oe-g++)
+ ;;
+ *-oe-g++)
+ die Unsupported target ${TARGET_OS} for oe-g++ qmake spec
+ ;;
+ *)
+ oenote Searching for qmake spec file
+ paths="${QMAKE_MKSPEC_PATH}/qws/${TARGET_OS}-${TARGET_ARCH}-g++"
+ paths="${QMAKE_MKSPEC_PATH}/${TARGET_OS}-g++ $paths"
+
+ if (echo "${TARGET_ARCH}"|grep -q 'i.86'); then
+ paths="${QMAKE_MKSPEC_PATH}/qws/${TARGET_OS}-x86-g++ $paths"
+ fi
+ for i in $paths; do
+ if test -e $i; then
+ export QMAKESPEC=$i
+ break
+ fi
+ done
+ ;;
+ esac
+
+ oenote "using qmake spec in ${QMAKESPEC}, using profiles '${QMAKE_PROFILES}'"
+
+ if [ -z "${QMAKE_PROFILES}" ]; then
+ PROFILES="`ls *.pro`"
+ else
+ PROFILES="${QMAKE_PROFILES}"
+ fi
+
+ if [ -z "$PROFILES" ]; then
+ die "QMAKE_PROFILES not set and no profiles found in $PWD"
+ fi
+
+ if [ ! -z "${EXTRA_QMAKEVARS_POST}" ]; then
+ AFTER="-after"
+ QMAKE_VARSUBST_POST="${EXTRA_QMAKEVARS_POST}"
+ oenote "qmake postvar substitution: ${EXTRA_QMAKEVARS_POST}"
+ fi
+
+ if [ ! -z "${EXTRA_QMAKEVARS_PRE}" ]; then
+ QMAKE_VARSUBST_PRE="${EXTRA_QMAKEVARS_PRE}"
+ oenote "qmake prevar substitution: ${EXTRA_QMAKEVARS_PRE}"
+ fi
+
+#oenote "Calling 'qmake -makefile -spec ${QMAKESPEC} -o Makefile $QMAKE_VARSUBST_PRE $AFTER $PROFILES $QMAKE_VARSUBST_POST'"
+ unset QMAKESPEC || true
+ qmake -makefile -spec ${QMAKESPEC} -o Makefile $QMAKE_VARSUBST_PRE $AFTER $PROFILES $QMAKE_VARSUBST_POST || die "Error calling qmake on $PROFILES"
+}
+
+EXPORT_FUNCTIONS do_configure
+
+addtask configure after do_unpack do_patch before do_compile
diff --git a/classes/qt3x11.bbclass b/classes/qt3x11.bbclass
index e69de29bb2..2a2f3d5bbd 100644
--- a/classes/qt3x11.bbclass
+++ b/classes/qt3x11.bbclass
@@ -0,0 +1,11 @@
+#
+# override variables set by qmake-base to compile Qt/X11 apps
+#
+export QTDIR="${STAGING_DIR}/${HOST_SYS}/qt3"
+export OE_QMAKE_UIC="${STAGING_BINDIR}/uic3"
+export OE_QMAKE_MOC="${STAGING_BINDIR}/moc3"
+export OE_QMAKE_CXXFLAGS="${CXXFLAGS}"
+export OE_QMAKE_INCDIR_QT="${QTDIR}/include"
+export OE_QMAKE_LIBDIR_QT="${QTDIR}/lib"
+export OE_QMAKE_LIBS_QT="qt"
+export OE_QMAKE_LIBS_X11="-lXext -lX11 -lm"
diff --git a/classes/qt4x11.bbclass b/classes/qt4x11.bbclass
index e69de29bb2..d4ca0073df 100644
--- a/classes/qt4x11.bbclass
+++ b/classes/qt4x11.bbclass
@@ -0,0 +1,11 @@
+#
+# override variables set by qmake-base to compile Qt/X11 apps
+#
+export QTDIR="${STAGING_DIR}/${HOST_SYS}/qt4"
+export OE_QMAKE_UIC="${STAGING_BINDIR}/uic4"
+export OE_QMAKE_MOC="${STAGING_BINDIR}/moc4"
+export OE_QMAKE_CXXFLAGS="${CXXFLAGS}"
+export OE_QMAKE_INCDIR_QT="${QTDIR}/include"
+export OE_QMAKE_LIBDIR_QT="${QTDIR}/lib"
+export OE_QMAKE_LIBS_QT="qt"
+export OE_QMAKE_LIBS_X11="-lXext -lX11 -lm"
diff --git a/classes/rpm_core.bbclass b/classes/rpm_core.bbclass
index e69de29bb2..f28abbb1c3 100644
--- a/classes/rpm_core.bbclass
+++ b/classes/rpm_core.bbclass
@@ -0,0 +1,16 @@
+RPMBUILDPATH="${WORKDIR}/rpm"
+
+RPMOPTS="--rcfile=${WORKDIR}/rpmrc"
+RPMOPTS="--rcfile=${WORKDIR}/rpmrc --target ${TARGET_SYS}"
+RPM="rpm ${RPMOPTS}"
+RPMBUILD="rpmbuild --buildroot ${D} --short-circuit ${RPMOPTS}"
+
+rpm_core_do_preprpm() {
+ mkdir -p ${RPMBUILDPATH}/{SPECS,RPMS/{i386,i586,i686,noarch,ppc,mips,mipsel,arm},SRPMS,SOURCES,BUILD}
+ echo 'macrofiles:/usr/lib/rpm/macros:${WORKDIR}/macros' > ${WORKDIR}/rpmrc
+ echo '%_topdir ${RPMBUILDPATH}' > ${WORKDIR}/macros
+ echo '%_repackage_dir ${WORKDIR}' >> ${WORKDIR}/macros
+}
+
+EXPORT_FUNCTIONS do_preprpm
+addtask preprpm before do_fetch
diff --git a/classes/sdk.bbclass b/classes/sdk.bbclass
index e69de29bb2..bd49c51225 100644
--- a/classes/sdk.bbclass
+++ b/classes/sdk.bbclass
@@ -0,0 +1,22 @@
+# SDK packages are built either explicitly by the user,
+# or indirectly via dependency. No need to be in 'world'.
+EXCLUDE_FROM_WORLD = "1"
+
+SDK_NAME = "${TARGET_ARCH}/oe"
+PACKAGE_ARCH = "${BUILD_ARCH}"
+
+HOST_ARCH = "${BUILD_ARCH}"
+HOST_VENDOR = "${BUILD_VENDOR}"
+HOST_OS = "${BUILD_OS}"
+HOST_PREFIX = "${BUILD_PREFIX}"
+HOST_CC_ARCH = "${BUILD_CC_ARCH}"
+
+export CPPFLAGS = "${BUILD_CPPFLAGS}"
+export CFLAGS = "${BUILD_CFLAGS}"
+export CXXFLAGS = "${BUILD_CFLAGS}"
+export LDFLAGS = "${BUILD_LDFLAGS}"
+
+prefix = "/usr/local/${SDK_NAME}"
+exec_prefix = "${prefix}"
+
+FILES_${PN} = "${prefix}"
diff --git a/classes/sdl.bbclass b/classes/sdl.bbclass
index e69de29bb2..541812ed93 100644
--- a/classes/sdl.bbclass
+++ b/classes/sdl.bbclass
@@ -0,0 +1,27 @@
+FILES_${PN} += '${libdir}/perl5'
+
+sdl_do_configure () {
+ if [ -x ${S}/configure ] ; then
+ cfgcmd="${S}/configure \
+ -GL -GLU"
+ oenote "Running $cfgcmd..."
+ $cfgcmd || oefatal "oe_runconf failed"
+ if [ "${BUILD_SYS}" != "${HOST_SYS}" ]; then
+ . ${STAGING_DIR}/${TARGET_SYS}/perl/config.sh
+ sed -e "s:\(SITELIBEXP = \).*:\1${sitelibexp}:; s:\(SITEARCHEXP = \).*:\1${sitearchexp}:; s:\(INSTALLVENDORLIB = \).*:\1${D}${libdir}/perl5:; s:\(INSTALLVENDORARCH = \).*:\1${D}${libdir}/perl5:" < Makefile > Makefile.new
+ mv Makefile.new Makefile
+ fi
+ else
+ oefatal "no configure script found"
+ fi
+}
+
+sdl_do_compile () {
+ oe_runmake PASTHRU_INC="${CFLAGS}"
+}
+
+sdl_do_install () {
+ oe_runmake install_vendor
+}
+
+EXPORT_FUNCTIONS do_configure do_compile do_install
diff --git a/classes/src_distribute_local.bbclass b/classes/src_distribute_local.bbclass
index e69de29bb2..5f0cef5bec 100644
--- a/classes/src_distribute_local.bbclass
+++ b/classes/src_distribute_local.bbclass
@@ -0,0 +1,31 @@
+inherit src_distribute
+
+# SRC_DIST_LOCAL possible values:
+# copy copies the files from ${A} to the distributedir
+# symlink symlinks the files from ${A} to the distributedir
+# move+symlink moves the files into distributedir, and symlinks them back
+SRC_DIST_LOCAL ?= "move+symlink"
+SRC_DISTRIBUTEDIR ?= "${DEPLOY_DIR}/sources"
+SRC_DISTRIBUTECOMMAND () {
+ s="${SRC}"
+ if [ ! -L "$s" ] && (echo "$s"|grep "^${DL_DIR}"); then
+ :
+ else
+ exit 0;
+ fi
+ mkdir -p ${SRC_DISTRIBUTEDIR}
+ case "${SRC_DIST_LOCAL}" in
+ copy)
+ test -e $s.md5 && cp -f $s.md5 ${SRC_DISTRIBUTEDIR}/
+ cp -f $s ${SRC_DISTRIBUTEDIR}/
+ ;;
+ symlink)
+ test -e $s.md5 && ln -sf $s.md5 ${SRC_DISTRIBUTEDIR}/
+ ln -sf $s ${SRC_DISTRIBUTEDIR}/
+ ;;
+ move+symlink)
+ mv $s ${SRC_DISTRIBUTEDIR}/
+ ln -sf ${SRC_DISTRIBUTEDIR}/`basename $s` $s
+ ;;
+ esac
+}
diff --git a/classes/srec.bbclass b/classes/srec.bbclass
index e69de29bb2..ae46a407fe 100644
--- a/classes/srec.bbclass
+++ b/classes/srec.bbclass
@@ -0,0 +1,26 @@
+#
+# Creates .srec files from images.
+#
+# Useful for loading with Yamon.
+
+# Define SREC_VMAADDR in your machine.conf.
+
+SREC_CMD = "${TARGET_PREFIX}objcopy -O srec -I binary --adjust-vma ${SREC_VMAADDR} ${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}.rootfs.${type} ${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}.rootfs.${type}.srec"
+
+# Do not build srec files for these types of images:
+SREC_SKIP = "tar"
+
+do_srec () {
+ if [ ${SREC_VMAADDR} = "" ] ; then
+ oefatal Cannot do_srec without SREC_VMAADDR defined.
+ fi
+ for type in ${IMAGE_FSTYPES}; do
+ for skiptype in ${SREC_SKIP}; do
+ if [ $type = $skiptype ] ; then continue 2 ; fi
+ done
+ ${SREC_CMD}
+ done
+ return 0
+}
+
+addtask srec after do_rootfs before do_build
diff --git a/classes/tinderclient.bbclass b/classes/tinderclient.bbclass
index e69de29bb2..1553d16319 100644
--- a/classes/tinderclient.bbclass
+++ b/classes/tinderclient.bbclass
@@ -0,0 +1,117 @@
+def base_tinder_time():
+ import time
+ return time.strftime('%m/%d/%Y %H:%M:%S', time.localtime())
+
+# Prepare tinderbox mail header
+def base_prepare_mail_header(data, status):
+ import bb
+
+ str = "tinderbox: administrator: %s\n" % bb.data.getVar('TINDER_ADMIN', data, True)
+ str += "tinderbox: starttime: %s\n" % bb.data.getVar('TINDER_START', bb.make.cfg, True)
+ str += "tinderbox: buildname: %s\n" % bb.data.getVar('TINDER_BUILD', data, True)
+ str += "tinderbox: errorparser: %s\n" % bb.data.getVar('TINDER_ERROR', data, True)
+ str += "tinderbox: status: %s\n" % status
+ str += "tinderbox: timenow: %s\n" % base_tinder_time()
+ str += "tinderbox: tree: %s\n" % bb.data.getVar('TINDER_TREE', data, True)
+ str += "tinderbox: buildfamily: %s\n" % "unix"
+ str += "tinderbox: END"
+
+ return str
+
+def base_do_tinder_report(event):
+ """
+ Report to the tinderbox. Either we will report every step
+ (depending on TINDER_VERBOSE_REPORT) at the end we will send the
+ tinderclient.log
+ """
+ from bb.event import getName
+ from bb import data, make, mkdirhier
+ import os, glob
+
+ # variables
+ name = getName(event)
+ log = ""
+ header = ""
+ verbose = data.getVar('TINDER_VERBOSE_REPORT', event.data, True) == "1"
+
+ # Check what we need to do Build* shows we start or are done
+ if name == "BuildStarted":
+ data.setVar('TINDER_START', base_tinder_time(), event.data)
+ header = base_prepare_mail_header(event.data, 'building')
+ # generate
+ for var in os.environ:
+ log += "%s=%s\n" % (var, os.environ[var])
+
+ mkdirhier(data.getVar('TMPDIR', event.data, True))
+ file = open(data.getVar('TINDER_LOG', event.data, True), 'w')
+ file.write(log)
+
+ if not verbose:
+ header = ""
+
+ if name == "PkgFailed" or name == "BuildCompleted":
+ status = 'build_failed'
+ if name == "BuildCompleted":
+ status = "success"
+ header = base_prepare_mail_header(event.data, status)
+ # append the log
+ log_file = data.getVar('TINDER_LOG', event.data, True)
+ file = open(log_file, 'r')
+ for line in file.readlines():
+ log += line
+
+ if verbose and name == "TaskStarted":
+ header = base_prepare_mail_header(event.data, 'building')
+ log = "Task %s started" % event.task
+
+ if verbose and name == "PkgStarted":
+ header = base_prepare_mail_header(event.data, 'building')
+ log = "Package %s started" % data.getVar('P', event.data, True)
+
+ if verbose and name == "PkgSucceeded":
+ header = base_prepare_mail_header(event.data, 'building')
+ log = "Package %s done" % data.getVar('P', event.data, True)
+
+ # Append the Task Log
+ if name == "TaskSucceeded" or name == "TaskFailed":
+ log_file = glob.glob("%s/log.%s.*" % (data.getVar('T', event.data, True), event.task))
+
+ if len(log_file) != 0:
+ to_file = data.getVar('TINDER_LOG', event.data, True)
+ log_txt = open(log_file[0], 'r').readlines()
+ to_file = open(to_file, 'a')
+
+ to_file.writelines(log_txt)
+
+ # append to the log
+ if verbose:
+ header = base_prepare_mail_header(event.data, 'building')
+ for line in log_txt:
+ log += line
+
+ # now mail the log
+ if len(log) == 0 or len(header) == 0:
+ return
+
+ import smtplib
+ from email.MIMEText import MIMEText
+ msg = MIMEText(header +'\n' + log)
+ msg['Subject'] = "Tinder-Client build log"
+ msg['To'] = data.getVar('TINDER_MAILTO',event.data, True)
+ msg['From'] = data.getVar('TINDER_FROM', event.data, True)
+
+
+ s = smtplib.SMTP()
+ s.connect()
+ s.sendmail(data.getVar('TINDER_FROM', event.data, True), [data.getVar('TINDER_MAILTO', event.data, True)], msg.as_string())
+ s.close()
+
+addhandler tinderclient_eventhandler
+python base_eventhandler() {
+
+ do_tinder_report = data.getVar('TINDER_REPORT', e.data, True)
+ if do_tinder_report and do_tinder_report == "1":
+ base_do_tinder_report(e)
+
+ return NotHandled
+}
diff --git a/classes/tmake.bbclass b/classes/tmake.bbclass
index e69de29bb2..05b82e496d 100644
--- a/classes/tmake.bbclass
+++ b/classes/tmake.bbclass
@@ -0,0 +1,77 @@
+DEPENDS_prepend="tmake "
+
+python tmake_do_createpro() {
+ import glob, sys
+ from bb import note
+ out_vartranslate = {
+ "TMAKE_HEADERS": "HEADERS",
+ "TMAKE_INTERFACES": "INTERFACES",
+ "TMAKE_TEMPLATE": "TEMPLATE",
+ "TMAKE_CONFIG": "CONFIG",
+ "TMAKE_DESTDIR": "DESTDIR",
+ "TMAKE_SOURCES": "SOURCES",
+ "TMAKE_DEPENDPATH": "DEPENDPATH",
+ "TMAKE_INCLUDEPATH": "INCLUDEPATH",
+ "TMAKE_TARGET": "TARGET",
+ "TMAKE_LIBS": "LIBS",
+ }
+ s = data.getVar('S', d, 1) or ""
+ os.chdir(s)
+ profiles = (data.getVar('TMAKE_PROFILES', d, 1) or "").split()
+ if not profiles:
+ profiles = ["*.pro"]
+ for pro in profiles:
+ ppro = glob.glob(pro)
+ if ppro:
+ if ppro != [pro]:
+ del profiles[profiles.index(pro)]
+ profiles += ppro
+ continue
+ if ppro[0].find('*'):
+ del profiles[profiles.index(pro)]
+ continue
+ else:
+ del profiles[profiles.index(pro)]
+ if len(profiles) != 0:
+ return
+
+ # output .pro using this metadata store
+ try:
+ from __builtin__ import file
+ profile = file(data.expand('${PN}.pro', d), 'w')
+ except OSError:
+ raise FuncFailed("unable to open pro file for writing.")
+
+# fd = sys.__stdout__
+ fd = profile
+ for var in out_vartranslate.keys():
+ val = data.getVar(var, d, 1)
+ if val:
+ fd.write("%s\t: %s\n" % (out_vartranslate[var], val))
+
+# if fd is not sys.__stdout__:
+ fd.close()
+}
+
+tmake_do_configure() {
+ paths="${STAGING_DATADIR}/tmake/qws/${TARGET_OS}-${TARGET_ARCH}-g++ $STAGING_DIR/share/tmake/$OS-g++"
+ if (echo "${TARGET_ARCH}"|grep -q 'i.86'); then
+ paths="${STAGING_DATADIR}/tmake/qws/${TARGET_OS}-x86-g++ $paths"
+ fi
+ for i in $paths; do
+ if test -e $i; then
+ export TMAKEPATH=$i
+ break
+ fi
+ done
+
+ if [ -z "${TMAKE_PROFILES}" ]; then
+ TMAKE_PROFILES="`ls *.pro`"
+ fi
+ tmake -o Makefile $TMAKE_PROFILES || die "Error calling tmake on ${TMAKE_PROFILES}"
+}
+
+EXPORT_FUNCTIONS do_configure do_createpro
+
+addtask configure after do_unpack do_patch before do_compile
+addtask createpro before do_configure after do_unpack do_patch
diff --git a/classes/update-alternatives.bbclass b/classes/update-alternatives.bbclass
index e69de29bb2..6b2b547d5f 100644
--- a/classes/update-alternatives.bbclass
+++ b/classes/update-alternatives.bbclass
@@ -0,0 +1,33 @@
+# defaults
+ALTERNATIVE_PRIORITY = "10"
+ALTERNATIVE_LINK = "${bindir}/${ALTERNATIVE_NAME}"
+
+update_alternatives_postinst() {
+update-alternatives --install ${ALTERNATIVE_LINK} ${ALTERNATIVE_NAME} ${ALTERNATIVE_PATH} ${ALTERNATIVE_PRIORITY}
+}
+
+update_alternatives_postrm() {
+update-alternatives --remove ${ALTERNATIVE_NAME} ${ALTERNATIVE_PATH}
+}
+
+python __anonymous() {
+ if bb.data.getVar('ALTERNATIVE_NAME', d) == None:
+ raise bb.build.FuncFailed, "%s inherits update-alternatives but doesn't set ALTERNATIVE_NAME" % bb.data.getVar('FILE', d)
+ if bb.data.getVar('ALTERNATIVE_PATH', d) == None:
+ raise bb.build.FuncFailed, "%s inherits update-alternatives but doesn't set ALTERNATIVE_PATH" % bb.data.getVar('FILE', d)
+}
+
+python populate_packages_prepend () {
+ pkg = bb.data.getVar('PN', d, 1)
+ bb.note('adding update-alternatives calls to postinst/postrm for %s' % pkg)
+ postinst = bb.data.getVar('pkg_postinst_%s' % pkg, d, 1) or bb.data.getVar('pkg_postinst', d, 1)
+ if not postinst:
+ postinst = '#!/bin/sh\n'
+ postinst += bb.data.getVar('update_alternatives_postinst', d, 1)
+ bb.data.setVar('pkg_postinst_%s' % pkg, postinst, d)
+ postrm = bb.data.getVar('pkg_postrm_%s' % pkg, d, 1) or bb.data.getVar('pkg_postrm', d, 1)
+ if not postrm:
+ postrm = '#!/bin/sh\n'
+ postrm += bb.data.getVar('update_alternatives_postrm', d, 1)
+ bb.data.setVar('pkg_postrm_%s' % pkg, postrm, d)
+}