diff options
author | Kristoffer Ericson <kristoffer_e1@hotmail.com> | 2006-10-25 23:27:19 +0000 |
---|---|---|
committer | Kristoffer Ericson <kristoffer_e1@hotmail.com> | 2006-10-25 23:27:19 +0000 |
commit | e26108affb65e89fcf248a58564b9c25934a04a0 (patch) | |
tree | 3c27feb6d440ed2df41309dae277afc13427c12f /classes | |
parent | d7fd0e38cb60cb0ac871723c34616bf542d423cc (diff) | |
parent | 12f3d309156ac047b0826904ef729b83b997b8b5 (diff) |
merge of 5d13f311dd93df73db461d6abca7480b39bbecbc
and 6bdfd4680fb56e499b8a254034efb752dead5002
Diffstat (limited to 'classes')
-rw-r--r-- | classes/base.bbclass | 42 | ||||
-rw-r--r-- | classes/debian.bbclass | 2 | ||||
-rw-r--r-- | classes/gtk-binver.bbclass | 9 | ||||
-rw-r--r-- | classes/image_ipk.bbclass | 2 | ||||
-rw-r--r-- | classes/insane.bbclass | 39 | ||||
-rw-r--r-- | classes/kernel.bbclass | 33 | ||||
-rw-r--r-- | classes/lib_package.bbclass | 4 | ||||
-rw-r--r-- | classes/linux-kernel-base.bbclass (renamed from classes/linux_modules.bbclass) | 23 | ||||
-rw-r--r-- | classes/module_strip.bbclass | 6 | ||||
-rw-r--r-- | classes/package.bbclass | 443 | ||||
-rw-r--r-- | classes/package_ipk.bbclass | 4 | ||||
-rw-r--r-- | classes/package_rpm.bbclass | 2 | ||||
-rw-r--r-- | classes/package_tar.bbclass | 2 | ||||
-rw-r--r-- | classes/packaged-staging.bbclass | 4 | ||||
-rw-r--r-- | classes/rootfs_ipk.bbclass | 6 | ||||
-rw-r--r-- | classes/sanity.bbclass | 20 | ||||
-rw-r--r-- | classes/tinderclient.bbclass | 13 |
17 files changed, 386 insertions, 268 deletions
diff --git a/classes/base.bbclass b/classes/base.bbclass index 0c62568107..eda1b23b04 100644 --- a/classes/base.bbclass +++ b/classes/base.bbclass @@ -382,6 +382,12 @@ python base_do_fetch() { raise bb.build.FuncFailed("Fetch failed: %s" % value) } +addtask fetchall +do_fetchall[recrdeptask] = "do_fetch" +python base_do_fetchall() { + bb.build.exec_task('do_fetch', d) +} + def oe_unpack_file(file, data, url = None): import bb, os if not url: @@ -505,6 +511,9 @@ python base_eventhandler() { monotone_revision = "<unknown>" try: monotone_revision = file( "%s/_MTN/revision" % path_to_packages ).read().strip() + if monotone_revision.startswith( "format_version" ): + monotone_revision_words = monotone_revision.split() + monotone_revision = monotone_revision_words[ monotone_revision_words.index( "old_revision" )+1][1:-1] except IOError: pass bb.data.setVar( 'OE_REVISION', monotone_revision, e.data ) @@ -566,7 +575,7 @@ do_populate_staging[dirs] = "${STAGING_DIR}/${TARGET_SYS}/bin ${STAGING_DIR}/${T ${STAGING_DATADIR} \ ${S} ${B}" -addtask populate_staging after do_package +addtask populate_staging after do_package_write python do_populate_staging () { bb.build.exec_func('do_stage', d) @@ -590,9 +599,6 @@ do_build[func] = "1" # Functions that update metadata based on files outputted # during the build process. -SHLIBS = "" -RDEPENDS_prepend = " ${SHLIBS}" - def explode_deps(s): r = [] l = s.split() @@ -610,27 +616,6 @@ def explode_deps(s): r.append(i) return r -python read_shlibdeps () { - packages = (bb.data.getVar('PACKAGES', d, 1) or "").split() - for pkg in packages: - rdepends = explode_deps(bb.data.getVar('RDEPENDS_' + pkg, d, 0) or bb.data.getVar('RDEPENDS', d, 0) or "") - shlibsfile = bb.data.expand("${WORKDIR}/install/" + pkg + ".shlibdeps", d) - if os.access(shlibsfile, os.R_OK): - fd = file(shlibsfile) - lines = fd.readlines() - fd.close() - for l in lines: - rdepends.append(l.rstrip()) - pcfile = bb.data.expand("${WORKDIR}/install/" + pkg + ".pcdeps", d) - if os.access(pcfile, os.R_OK): - fd = file(pcfile) - lines = fd.readlines() - fd.close() - for l in lines: - rdepends.append(l.rstrip()) - bb.data.setVar('RDEPENDS_' + pkg, " " + " ".join(rdepends), d) -} - def read_pkgdatafile(fn): pkgdata = {} @@ -738,7 +723,7 @@ python () { # Patch handling inherit patch -EXPORT_FUNCTIONS do_clean do_mrproper do_fetch do_unpack do_configure do_compile do_install do_package do_populate_pkgs do_stage do_rebuild +EXPORT_FUNCTIONS do_clean do_mrproper do_fetch do_unpack do_configure do_compile do_install do_package do_populate_pkgs do_stage do_rebuild do_fetchall MIRRORS[func] = "0" MIRRORS () { @@ -782,10 +767,5 @@ ftp://ftp.gnutls.org/pub/gnutls ftp://ftp.gnupg.org/gcrypt/gnutls/ ftp://ftp.gnutls.org/pub/gnutls http://www.mirrors.wiretapped.net/security/network-security/gnutls/ ftp://ftp.gnutls.org/pub/gnutls ftp://ftp.mirrors.wiretapped.net/pub/security/network-security/gnutls/ ftp://ftp.gnutls.org/pub/gnutls http://josefsson.org/gnutls/releases/ - - - -ftp://.*/.*/ http://www.oesources.org/source/current/ -http://.*/.*/ http://www.oesources.org/source/current/ } diff --git a/classes/debian.bbclass b/classes/debian.bbclass index 698d917b51..7ffa6c1a27 100644 --- a/classes/debian.bbclass +++ b/classes/debian.bbclass @@ -8,7 +8,7 @@ BUILD_ALL_DEPS = "1" # Better expressed as ensure all RDEPENDS package before we package # This means we can't have circular RDEPENDS/RRECOMMENDS -do_package[rdeptask] = "do_package" +do_package_write[rdeptask] = "do_package" python debian_package_name_hook () { import glob, copy, stat, errno, re diff --git a/classes/gtk-binver.bbclass b/classes/gtk-binver.bbclass new file mode 100644 index 0000000000..d59145e641 --- /dev/null +++ b/classes/gtk-binver.bbclass @@ -0,0 +1,9 @@ +def gtkbinver_find(d): + import bb + try: + for line in file( "%s/pkgconfig/gtk+-2.0.pc" % bb.data.getVar('STAGING_DATADIR', d, 1) ).readlines(): + if line.startswith( "gtk_binary_version" ): + # bb.note( "gtk_binary_version = '%s'" % line.split("=")[1].strip() ) + return line.split("=")[1].strip() + except OSError: + return "0.0.0" diff --git a/classes/image_ipk.bbclass b/classes/image_ipk.bbclass index 83e9acf315..2beb137aef 100644 --- a/classes/image_ipk.bbclass +++ b/classes/image_ipk.bbclass @@ -2,7 +2,7 @@ inherit rootfs_ipk # We need to recursively follow RDEPENDS and RRECOMMENDS for images BUILD_ALL_DEPS = "1" -do_rootfs[recrdeptask] = "do_package" +do_rootfs[recrdeptask] = "do_package_write" # Images are generally built explicitly, do not need to be part of world. EXCLUDE_FROM_WORLD = "1" diff --git a/classes/insane.bbclass b/classes/insane.bbclass index ead718db7f..062b1505b1 100644 --- a/classes/insane.bbclass +++ b/classes/insane.bbclass @@ -50,11 +50,11 @@ def package_qa_check_devdbg(path, name,d): import bb if not "-dev" in name: if path[-3:] == ".so": - bb.error("QA Issue: non dev package contains .so") + bb.error("QA Issue: non dev package contains .so: %s" % name) if not "-dbg" in name: - if path[-4:] == ".dbg": - bb.error("QA Issue: non debug package contains .dbg file") + if '.debug' in path: + bb.error("QA Issue: non debug package contains .dbg file: %s" % name) def package_qa_check_perm(path,name,d): """ @@ -90,6 +90,37 @@ def package_qa_walk(path, funcs, package,d): func(path, package,d) +def package_qa_check_rdepends(pkg, d): + if not "-dbg" in pkg and not "task-" in pkg and not "-image" in pkg: + # Copied from package_ipk.bbclass + # boiler plate to update the data + localdata = bb.data.createCopy(d) + root = "%s/install/%s" % (workdir, pkg) + + bb.data.setVar('ROOT', '', localdata) + bb.data.setVar('ROOT_%s' % pkg, root, localdata) + pkgname = bb.data.getVar('PKG_%s' % pkg, localdata, 1) + if not pkgname: + pkgname = pkg + bb.data.setVar('PKG', pkgname, localdata) + + overrides = bb.data.getVar('OVERRIDES', localdata) + if not overrides: + raise bb.build.FuncFailed('OVERRIDES not defined') + overrides = bb.data.expand(overrides, localdata) + bb.data.setVar('OVERRIDES', overrides + ':' + pkg, localdata) + + bb.data.update_data(localdata) + + # Now check the RDEPENDS + rdepends = explode_deps(bb.data.getVar('RDEPENDS', localdata, True) or "") + + + # Now do the sanity check!!! + for rdepend in rdepends: + if "-dbg" in rdepend: + bb.error("QA issue, koen give us a better msg!!!") + # The PACKAGE FUNC to scan each package python do_package_qa () { bb.note("DO PACKAGE QA") @@ -104,6 +135,8 @@ python do_package_qa () { bb.note("Package: %s" % package) path = "%s/install/%s" % (workdir, package) package_qa_walk(path, [package_qa_check_rpath, package_qa_check_devdbg, package_qa_check_perm, package_qa_check_arch], package, d) + package_qa_check_rdepends(package, d) + } diff --git a/classes/kernel.bbclass b/classes/kernel.bbclass index 94e02925d7..3a7fd5b499 100644 --- a/classes/kernel.bbclass +++ b/classes/kernel.bbclass @@ -1,4 +1,4 @@ -inherit module_strip +inherit linux-kernel-base module_strip PROVIDES += "virtual/kernel" DEPENDS += "virtual/${TARGET_PREFIX}depmod-${@get_kernelmajorversion('${PV}')} virtual/${TARGET_PREFIX}gcc${KERNEL_CCSUFFIX} update-modules" @@ -43,37 +43,6 @@ KERNEL_IMAGEDEST = "boot" # export CMDLINE_CONSOLE = "console=${@bb.data.getVar("KERNEL_CONSOLE",d,1) or "ttyS0"}" -# parse kernel ABI version out of <linux/version.h> -def get_kernelversion(p): - import re, os - - fn = p + '/include/linux/utsrelease.h' - if not os.path.isfile(fn): - fn = p + '/include/linux/version.h' - - import re - try: - f = open(fn, 'r') - except IOError: - return None - - l = f.readlines() - f.close() - r = re.compile("#define UTS_RELEASE \"(.*)\"") - for s in l: - m = r.match(s) - if m: - return m.group(1) - return None - -def get_kernelmajorversion(p): - import re - r = re.compile("([0-9]+\.[0-9]+).*") - m = r.match(p); - if m: - return m.group(1) - return None - KERNEL_VERSION = "${@get_kernelversion('${S}')}" KERNEL_MAJOR_VERSION = "${@get_kernelmajorversion('${KERNEL_VERSION}')}" diff --git a/classes/lib_package.bbclass b/classes/lib_package.bbclass index e29d2659b0..9a48408527 100644 --- a/classes/lib_package.bbclass +++ b/classes/lib_package.bbclass @@ -1,4 +1,4 @@ -PACKAGES = "${PN} ${PN}-dev ${PN}-doc ${PN}-bin" +PACKAGES += "${PN}-bin" FILES_${PN} = "${libexecdir} ${libdir}/lib*.so.* \ ${sysconfdir} ${sharedstatedir} ${localstatedir} \ @@ -6,4 +6,4 @@ FILES_${PN} = "${libexecdir} ${libdir}/lib*.so.* \ FILES_${PN}-dev = "${includedir} ${libdir}/lib*.so ${libdir}/*.la \ ${libdir}/*.a ${libdir}/pkgconfig /lib/*.a /lib/*.o \ ${datadir}/aclocal ${bindir}/*-config" -FILES_${PN}-bin = "${bindir} ${sbindir} /bin /sbin" +FILES_${PN}-bin = "${bindir}/* ${sbindir}/* /bin/* /sbin/*" diff --git a/classes/linux_modules.bbclass b/classes/linux-kernel-base.bbclass index d5c4e74ca1..e58c228080 100644 --- a/classes/linux_modules.bbclass +++ b/classes/linux-kernel-base.bbclass @@ -1,3 +1,26 @@ +# parse kernel ABI version out of <linux/version.h> +def get_kernelversion(p): + import re, os + + fn = p + '/include/linux/utsrelease.h' + if not os.path.isfile(fn): + fn = p + '/include/linux/version.h' + + import re + try: + f = open(fn, 'r') + except IOError: + return None + + l = f.readlines() + f.close() + r = re.compile("#define UTS_RELEASE \"(.*)\"") + for s in l: + m = r.match(s) + if m: + return m.group(1) + return None + def get_kernelmajorversion(p): import re r = re.compile("([0-9]+\.[0-9]+).*") diff --git a/classes/module_strip.bbclass b/classes/module_strip.bbclass index 116e8b902f..63e6569799 100644 --- a/classes/module_strip.bbclass +++ b/classes/module_strip.bbclass @@ -5,7 +5,11 @@ do_strip_modules () { if test -e ${WORKDIR}/install/$p/lib/modules; then modules="`find ${WORKDIR}/install/$p/lib/modules -name \*${KERNEL_OBJECT_SUFFIX}`" if [ -n "$modules" ]; then - ${STRIP} -v -g $modules + for module in $modules ; do + if ! [ -d "$module" ] ; then + ${STRIP} -v -g $module + fi + done # NM="${CROSS_DIR}/bin/${HOST_PREFIX}nm" OBJCOPY="${CROSS_DIR}/bin/${HOST_PREFIX}objcopy" strip_module $modules fi fi diff --git a/classes/package.bbclass b/classes/package.bbclass index 27d5a3a685..652b60e226 100644 --- a/classes/package.bbclass +++ b/classes/package.bbclass @@ -1,4 +1,11 @@ +# +# General packaging help functions +# + def legitimize_package_name(s): + """ + Make sure package names are legitimate strings + """ import re def fixutf(m): @@ -12,74 +19,11 @@ def legitimize_package_name(s): # Remaining package name validity fixes return s.lower().replace('_', '-').replace('@', '+').replace(',', '+').replace('/', '-') -STAGING_PKGMAPS_DIR ?= "${STAGING_DIR}/pkgmaps" - -def add_package_mapping (pkg, new_name, d): - import bb, os - - def encode(str): - import codecs - c = codecs.getencoder("string_escape") - return c(str)[0] - - pmap_dir = bb.data.getVar('STAGING_PKGMAPS_DIR', d, 1) - - bb.mkdirhier(pmap_dir) - - data_file = os.path.join(pmap_dir, pkg) - - f = open(data_file, 'w') - f.write("%s\n" % encode(new_name)) - f.close() - -def get_package_mapping (pkg, d): - import bb, os - - def decode(str): - import codecs - c = codecs.getdecoder("string_escape") - return c(str)[0] - - data_file = bb.data.expand("${STAGING_PKGMAPS_DIR}/%s" % pkg, d) - - if os.access(data_file, os.R_OK): - f = file(data_file, 'r') - lines = f.readlines() - f.close() - for l in lines: - return decode(l).strip() - return pkg - -def runtime_mapping_rename (varname, d): - import bb, os - - #bb.note("%s before: %s" % (varname, bb.data.getVar(varname, d, 1))) - - new_depends = [] - for depend in explode_deps(bb.data.getVar(varname, d, 1) or ""): - # Have to be careful with any version component of the depend - split_depend = depend.split(' (') - new_depend = get_package_mapping(split_depend[0].strip(), d) - if len(split_depend) > 1: - new_depends.append("%s (%s" % (new_depend, split_depend[1])) - else: - new_depends.append(new_depend) - - bb.data.setVar(varname, " ".join(new_depends) or None, d) - - #bb.note("%s after: %s" % (varname, bb.data.getVar(varname, d, 1))) - -python package_mapping_rename_hook () { - runtime_mapping_rename("RDEPENDS", d) - runtime_mapping_rename("RRECOMMENDS", d) - runtime_mapping_rename("RSUGGESTS", d) - runtime_mapping_rename("RPROVIDES", d) - runtime_mapping_rename("RREPLACES", d) - runtime_mapping_rename("RCONFLICTS", d) -} - - def do_split_packages(d, root, file_regex, output_pattern, description, postinst=None, recursive=False, hook=None, extra_depends=None, aux_files_pattern=None, postrm=None, allow_dirs=False, prepend=False, match_path=False, aux_files_pattern_verbatim=None): + """ + Used in .bb files to split up dynamically generated subpackages of a + given package, usually plugins or modules. + """ import os, os.path, bb dvar = bb.data.getVar('D', d, 1) @@ -107,7 +51,14 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst objs.append(relpath) if extra_depends == None: - extra_depends = packages[0] + # This is *really* broken + mainpkg = packages[0] + # At least try and patch it up I guess... + if mainpkg.find('-dbg'): + mainpkg = mainpkg.replace('-dbg', '') + if mainpkg.find('-dev'): + mainpkg = mainpkg.replace('-dev', '') + extra_depends = mainpkg for o in objs: import re, stat @@ -165,9 +116,6 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst bb.data.setVar('PACKAGES', ' '.join(packages), d) -# Function to strip a single file, called from RUNSTRIP below -# A working 'file' (one which works on the target architecture) -# is necessary for this stuff to work. PACKAGE_DEPENDS ?= "file-native" DEPENDS_prepend =+ "${PACKAGE_DEPENDS} " # file(1) output to match to consider a file an unstripped executable @@ -176,7 +124,12 @@ FILE_UNSTRIPPED_MATCH ?= "not stripped" IGNORE_STRIP_ERRORS ?= "1" runstrip() { + # Function to strip a single file, called from RUNSTRIP in populate_packages below + # A working 'file' (one which works on the target architecture) + # is necessary for this stuff to work, hence the addition to PACKAGES_DEPENDS + local ro st + st=0 if { file "$1" || { oewarn "file $1: failed (forced strip)" >&2 @@ -218,6 +171,134 @@ runstrip() { return $st } + +# +# Package data handling routines +# + +STAGING_PKGMAPS_DIR ?= "${STAGING_DIR}/pkgmaps" + +def add_package_mapping (pkg, new_name, d): + import bb, os + + def encode(str): + import codecs + c = codecs.getencoder("string_escape") + return c(str)[0] + + pmap_dir = bb.data.getVar('STAGING_PKGMAPS_DIR', d, 1) + + bb.mkdirhier(pmap_dir) + + data_file = os.path.join(pmap_dir, pkg) + + f = open(data_file, 'w') + f.write("%s\n" % encode(new_name)) + f.close() + +def get_package_mapping (pkg, d): + import bb, os + + def decode(str): + import codecs + c = codecs.getdecoder("string_escape") + return c(str)[0] + + data_file = bb.data.expand("${STAGING_PKGMAPS_DIR}/%s" % pkg, d) + + if os.access(data_file, os.R_OK): + f = file(data_file, 'r') + lines = f.readlines() + f.close() + for l in lines: + return decode(l).strip() + return pkg + +def runtime_mapping_rename (varname, d): + import bb, os + + #bb.note("%s before: %s" % (varname, bb.data.getVar(varname, d, 1))) + + new_depends = [] + for depend in explode_deps(bb.data.getVar(varname, d, 1) or ""): + # Have to be careful with any version component of the depend + split_depend = depend.split(' (') + new_depend = get_package_mapping(split_depend[0].strip(), d) + if len(split_depend) > 1: + new_depends.append("%s (%s" % (new_depend, split_depend[1])) + else: + new_depends.append(new_depend) + + bb.data.setVar(varname, " ".join(new_depends) or None, d) + + #bb.note("%s after: %s" % (varname, bb.data.getVar(varname, d, 1))) + +# +# Package functions suitable for inclusion in PACKAGEFUNCS +# + +python package_do_split_locales() { + import os + + if (bb.data.getVar('PACKAGE_NO_LOCALE', d, 1) == '1'): + bb.debug(1, "package requested not splitting locales") + return + + packages = (bb.data.getVar('PACKAGES', d, 1) or "").split() + if not packages: + bb.debug(1, "no packages to build; not splitting locales") + return + + datadir = bb.data.getVar('datadir', d, 1) + if not datadir: + bb.note("datadir not defined") + return + + dvar = bb.data.getVar('D', d, 1) + if not dvar: + bb.error("D not defined") + return + + pn = bb.data.getVar('PN', d, 1) + if not pn: + bb.error("PN not defined") + return + + if pn + '-locale' in packages: + packages.remove(pn + '-locale') + + localedir = os.path.join(dvar + datadir, 'locale') + + if not os.path.isdir(localedir): + bb.debug(1, "No locale files in this package") + return + + locales = os.listdir(localedir) + + # This is *really* broken + mainpkg = packages[0] + # At least try and patch it up I guess... + if mainpkg.find('-dbg'): + mainpkg = mainpkg.replace('-dbg', '') + if mainpkg.find('-dev'): + mainpkg = mainpkg.replace('-dev', '') + + for l in locales: + ln = legitimize_package_name(l) + pkg = pn + '-locale-' + ln + packages.append(pkg) + bb.data.setVar('FILES_' + pkg, os.path.join(datadir, 'locale', l), d) + bb.data.setVar('RDEPENDS_' + pkg, '%s virtual-locale-%s' % (mainpkg, ln), d) + bb.data.setVar('RPROVIDES_' + pkg, '%s-locale %s-translation' % (pn, ln), d) + bb.data.setVar('DESCRIPTION_' + pkg, '%s translation for %s' % (l, pn), d) + + bb.data.setVar('PACKAGES', ' '.join(packages), d) + + rdep = (bb.data.getVar('RDEPENDS_%s' % mainpkg, d, 1) or bb.data.getVar('RDEPENDS', d, 1) or "").split() + rdep.append('%s-locale*' % pn) + bb.data.setVar('RDEPENDS_%s' % mainpkg, ' '.join(rdep), d) +} + python populate_packages () { import glob, stat, errno, re @@ -275,7 +356,7 @@ python populate_packages () { for root, dirs, files in os.walk(dvar): for f in files: file = os.path.join(root, f) - if not os.path.islink(file) and isexec(file): + if not os.path.islink(file) and not os.path.isdir(file) and isexec(file): stripfunc += "\trunstrip %s || st=1\n" % (file) if not stripfunc == "": from bb import build @@ -390,7 +471,10 @@ python populate_packages () { if found == False: bb.note("%s contains dangling symlink to %s" % (pkg, l)) bb.data.setVar('RDEPENDS_' + pkg, " " + " ".join(rdepends), d) +} +populate_packages[dirs] = "${D}" +python emit_pkgdata() { def write_if_exists(f, pkg, var): def encode(str): import codecs @@ -401,17 +485,26 @@ python populate_packages () { if val: f.write('%s_%s: %s\n' % (var, pkg, encode(val))) + packages = bb.data.getVar('PACKAGES', d, 1) + if not packages: + return + data_file = bb.data.expand("${STAGING_DIR}/pkgdata/${PN}", d) f = open(data_file, 'w') f.write("PACKAGES: %s\n" % packages) f.close() - for pkg in package_list: + for pkg in packages.split(): subdata_file = bb.data.expand("${STAGING_DIR}/pkgdata/runtime/%s" % pkg, d) sf = open(subdata_file, 'w') write_if_exists(sf, pkg, 'DESCRIPTION') write_if_exists(sf, pkg, 'RDEPENDS') write_if_exists(sf, pkg, 'RPROVIDES') + write_if_exists(sf, pkg, 'RRECOMMENDS') + write_if_exists(sf, pkg, 'RSUGGESTS') + write_if_exists(sf, pkg, 'RPROVIDES') + write_if_exists(sf, pkg, 'RREPLACES') + write_if_exists(sf, pkg, 'RCONFLICTS') write_if_exists(sf, pkg, 'PKG') write_if_exists(sf, pkg, 'ALLOW_EMPTY') write_if_exists(sf, pkg, 'FILES') @@ -420,8 +513,8 @@ python populate_packages () { write_if_exists(sf, pkg, 'pkg_preinst') write_if_exists(sf, pkg, 'pkg_prerm') sf.close() - bb.build.exec_func("read_subpackage_metadata", d) } +emit_pkgdata[dirs] = "${STAGING_DIR}/pkgdata/runtime" ldconfig_postinst_fragment() { if [ x"$D" = "x" ]; then @@ -429,58 +522,6 @@ if [ x"$D" = "x" ]; then fi } -python package_depchains() { - """ - For a given set of prefix and postfix modifiers, make those packages - RRECOMMENDS on the corresponding packages for its DEPENDS. - - Example: If package A depends upon package B, and A's .bb emits an - A-dev package, this would make A-dev Recommends: B-dev. - """ - - packages = bb.data.getVar('PACKAGES', d, 1) - postfixes = (bb.data.getVar('DEPCHAIN_POST', d, 1) or '').split() - prefixes = (bb.data.getVar('DEPCHAIN_PRE', d, 1) or '').split() - - def pkg_addrrecs(pkg, base, func, d): - rdepends = explode_deps(bb.data.getVar('RDEPENDS_' + base, d, 1) or bb.data.getVar('RDEPENDS', d, 1) or "") - # bb.note('rdepends for %s is %s' % (base, rdepends)) - rreclist = [] - - for depend in rdepends: - split_depend = depend.split(' (') - name = split_depend[0].strip() - func(rreclist, name) - - oldrrec = bb.data.getVar('RRECOMMENDS_%s', d) or '' - bb.data.setVar('RRECOMMENDS_%s' % pkg, oldrrec + ' '.join(rreclist), d) - - def packaged(pkg, d): - return os.access(bb.data.expand('${STAGING_DIR}/pkgdata/runtime/%s.packaged' % pkg, d), os.R_OK) - - for pkg in packages.split(): - for postfix in postfixes: - def func(list, name): - pkg = '%s%s' % (name, postfix) - if packaged(pkg, d): - list.append(pkg) - - base = pkg[:-len(postfix)] - if pkg.endswith(postfix): - pkg_addrrecs(pkg, base, func, d) - continue - - for prefix in prefixes: - def func(list, name): - pkg = '%s%s' % (prefix, name) - if packaged(pkg, d): - list.append(pkg) - - base = pkg[len(prefix):] - if pkg.startswith(prefix): - pkg_addrrecs(pkg, base, func, d) -} - python package_do_shlibs() { import os, re, os.path @@ -730,74 +771,126 @@ python package_do_pkgconfig () { fd.close() } -python package_do_split_locales() { - import os - - if (bb.data.getVar('PACKAGE_NO_LOCALE', d, 1) == '1'): - bb.debug(1, "package requested not splitting locales") - return - +python read_shlibdeps () { packages = (bb.data.getVar('PACKAGES', d, 1) or "").split() - if not packages: - bb.debug(1, "no packages to build; not splitting locales") - return + for pkg in packages: + rdepends = explode_deps(bb.data.getVar('RDEPENDS_' + pkg, d, 0) or bb.data.getVar('RDEPENDS', d, 0) or "") + shlibsfile = bb.data.expand("${WORKDIR}/install/" + pkg + ".shlibdeps", d) + if os.access(shlibsfile, os.R_OK): + fd = file(shlibsfile) + lines = fd.readlines() + fd.close() + for l in lines: + rdepends.append(l.rstrip()) + pcfile = bb.data.expand("${WORKDIR}/install/" + pkg + ".pcdeps", d) + if os.access(pcfile, os.R_OK): + fd = file(pcfile) + lines = fd.readlines() + fd.close() + for l in lines: + rdepends.append(l.rstrip()) + bb.data.setVar('RDEPENDS_' + pkg, " " + " ".join(rdepends), d) +} - datadir = bb.data.getVar('datadir', d, 1) - if not datadir: - bb.note("datadir not defined") - return +python package_depchains() { + """ + For a given set of prefix and postfix modifiers, make those packages + RRECOMMENDS on the corresponding packages for its DEPENDS. - dvar = bb.data.getVar('D', d, 1) - if not dvar: - bb.error("D not defined") - return + Example: If package A depends upon package B, and A's .bb emits an + A-dev package, this would make A-dev Recommends: B-dev. + """ - pn = bb.data.getVar('PN', d, 1) - if not pn: - bb.error("PN not defined") - return + packages = bb.data.getVar('PACKAGES', d, 1) + postfixes = (bb.data.getVar('DEPCHAIN_POST', d, 1) or '').split() + prefixes = (bb.data.getVar('DEPCHAIN_PRE', d, 1) or '').split() - if pn + '-locale' in packages: - packages.remove(pn + '-locale') + def pkg_addrrecs(pkg, base, func, d): + rdepends = explode_deps(bb.data.getVar('RDEPENDS_' + base, d, 1) or bb.data.getVar('RDEPENDS', d, 1) or "") + # bb.note('rdepends for %s is %s' % (base, rdepends)) + rreclist = [] - localedir = os.path.join(dvar + datadir, 'locale') + for depend in rdepends: + split_depend = depend.split(' (') + name = split_depend[0].strip() + func(rreclist, name) - if not os.path.isdir(localedir): - bb.debug(1, "No locale files in this package") - return + bb.data.setVar('RRECOMMENDS_%s' % pkg, ' '.join(rreclist), d) - locales = os.listdir(localedir) + def packaged(pkg, d): + return os.access(bb.data.expand('${STAGING_DIR}/pkgdata/runtime/%s.packaged' % pkg, d), os.R_OK) - mainpkg = packages[0] + for pkg in packages.split(): + for postfix in postfixes: + def func(list, name): + pkg = '%s%s' % (name, postfix) + if packaged(pkg, d): + list.append(pkg) - for l in locales: - ln = legitimize_package_name(l) - pkg = pn + '-locale-' + ln - packages.append(pkg) - bb.data.setVar('FILES_' + pkg, os.path.join(datadir, 'locale', l), d) - bb.data.setVar('RDEPENDS_' + pkg, '%s virtual-locale-%s' % (mainpkg, ln), d) - bb.data.setVar('RPROVIDES_' + pkg, '%s-locale %s-translation' % (pn, ln), d) - bb.data.setVar('DESCRIPTION_' + pkg, '%s translation for %s' % (l, pn), d) + base = pkg[:-len(postfix)] + if pkg.endswith(postfix): + pkg_addrrecs(pkg, base, func, d) + continue - bb.data.setVar('PACKAGES', ' '.join(packages), d) + for prefix in prefixes: + def func(list, name): + pkg = '%s%s' % (prefix, name) + if packaged(pkg, d): + list.append(pkg) - rdep = (bb.data.getVar('RDEPENDS_%s' % mainpkg, d, 1) or bb.data.getVar('RDEPENDS', d, 1) or "").split() - rdep.append('%s-locale*' % pn) - bb.data.setVar('RDEPENDS_%s' % mainpkg, ' '.join(rdep), d) + base = pkg[len(prefix):] + if pkg.startswith(prefix): + pkg_addrrecs(pkg, base, func, d) } + PACKAGEFUNCS ?= "package_do_split_locales \ - populate_packages package_do_shlibs \ - package_do_pkgconfig read_shlibdeps \ - package_depchains" + populate_packages \ + package_do_shlibs \ + package_do_pkgconfig \ + read_shlibdeps \ + package_depchains \ + emit_pkgdata" + python package_do_package () { for f in (bb.data.getVar('PACKAGEFUNCS', d, 1) or '').split(): bb.build.exec_func(f, d) } - -do_package[dirs] = "${D}" # shlibs requires any DEPENDS to have already packaged for the *.list files do_package[deptask] = "do_package" -populate_packages[dirs] = "${STAGING_DIR}/pkgdata/runtime ${D}" -EXPORT_FUNCTIONS do_package do_shlibs do_split_locales mapping_rename_hook +do_package[dirs] = "${D}" addtask package before do_build after do_install + + + +PACKAGE_WRITE_FUNCS ?= "read_subpackage_metadata" + +python package_do_package_write () { + for f in (bb.data.getVar('PACKAGE_WRITE_FUNCS', d, 1) or '').split(): + bb.build.exec_func(f, d) +} +do_package_write[dirs] = "${D}" +addtask package_write before do_build after do_package + + +EXPORT_FUNCTIONS do_package do_package_write + + +# +# Helper functions for the package writing classes +# + +python package_mapping_rename_hook () { + """ + Rewrite variables to account for package renaming in things + like debian.bbclass or manual PKG variable name changes + """ + runtime_mapping_rename("RDEPENDS", d) + runtime_mapping_rename("RRECOMMENDS", d) + runtime_mapping_rename("RSUGGESTS", d) + runtime_mapping_rename("RPROVIDES", d) + runtime_mapping_rename("RREPLACES", d) + runtime_mapping_rename("RCONFLICTS", d) +} + +EXPORT_FUNCTIONS mapping_rename_hook diff --git a/classes/package_ipk.bbclass b/classes/package_ipk.bbclass index 0cb5128e17..47cff1d27c 100644 --- a/classes/package_ipk.bbclass +++ b/classes/package_ipk.bbclass @@ -1,7 +1,7 @@ inherit package DEPENDS_prepend="${@["ipkg-utils-native ", ""][(bb.data.getVar('PACKAGES', d, 1) == '')]}" -BOOTSTRAP_EXTRA_RDEPENDS += "ipkg-collateral ipkg ipkg-link" -PACKAGEFUNCS += "do_package_ipk" +BOOTSTRAP_EXTRA_RDEPENDS += "ipkg-collateral ipkg" +PACKAGE_WRITE_FUNCS += "do_package_ipk" python package_ipk_fn () { from bb import data diff --git a/classes/package_rpm.bbclass b/classes/package_rpm.bbclass index c29ab5f423..d5a1c8b379 100644 --- a/classes/package_rpm.bbclass +++ b/classes/package_rpm.bbclass @@ -2,7 +2,7 @@ inherit package inherit rpm_core RPMBUILD="rpmbuild --short-circuit ${RPMOPTS}" -PACKAGEFUNCS += "do_package_rpm" +PACKAGE_WRITE_FUNCS += "do_package_rpm" python write_specfile() { from bb import data, build diff --git a/classes/package_tar.bbclass b/classes/package_tar.bbclass index 63e82f7f39..9217811e38 100644 --- a/classes/package_tar.bbclass +++ b/classes/package_tar.bbclass @@ -1,6 +1,6 @@ inherit package -PACKAGEFUNCS += "do_package_tar" +PACKAGE_WRITE_FUNCS += "do_package_tar" python package_tar_fn () { import os diff --git a/classes/packaged-staging.bbclass b/classes/packaged-staging.bbclass index 8a2a03ca17..f0a721fd4a 100644 --- a/classes/packaged-staging.bbclass +++ b/classes/packaged-staging.bbclass @@ -6,7 +6,7 @@ # INHERIT += "packaged-staging" # # You also need ipkg-cl and ipkg-make-index installed on your host -# put stage-manager and ipkg-build from org.openembedded.packaged-staging/contrib/ in your $PATH +# put ipkg-build from org.openembedded.packaged-staging/contrib/ in your $PATH # BUGS: # * does not distinguish between -native, -cross and other packages @@ -23,6 +23,8 @@ inherit package +DEPENDS = "stagemanager-native" + DEPLOY_DIR_PSTAGE = "${DEPLOY_DIR}/pstage" PSTAGE_BUILD_CMD = "${IPKGBUILDCMD}" diff --git a/classes/rootfs_ipk.bbclass b/classes/rootfs_ipk.bbclass index 25738e8cb1..8cea393b43 100644 --- a/classes/rootfs_ipk.bbclass +++ b/classes/rootfs_ipk.bbclass @@ -7,6 +7,7 @@ DEPENDS_prepend="ipkg-native ipkg-utils-native fakeroot-native " DEPENDS_append=" ${EXTRA_IMAGEDEPENDS}" +RDEPENDS += "ipkg ipkg-collateral" PACKAGES = "" @@ -16,6 +17,8 @@ do_build[nostamp] = 1 IPKG_ARGS = "-f ${T}/ipkg.conf -o ${IMAGE_ROOTFS}" +IPKG_INSTALL += "ipkg ipkg-collateral" + ROOTFS_POSTPROCESS_COMMAND ?= "" PID = "${@os.getpid()}" @@ -30,6 +33,9 @@ real_do_rootfs () { mkdir -p ${IMAGE_ROOTFS}/dev + #work around a build in ipkg-make-index + touch ${DEPLOY_DIR_IPK}/Packages + if [ -z "${DEPLOY_KEEP_PACKAGES}" ]; then touch ${DEPLOY_DIR_IPK}/Packages ipkg-make-index -r ${DEPLOY_DIR_IPK}/Packages -p ${DEPLOY_DIR_IPK}/Packages -l ${DEPLOY_DIR_IPK}/Packages.filelist -m ${DEPLOY_DIR_IPK} diff --git a/classes/sanity.bbclass b/classes/sanity.bbclass index 23a8f656b2..64c1bc0a0c 100644 --- a/classes/sanity.bbclass +++ b/classes/sanity.bbclass @@ -82,23 +82,11 @@ def check_sanity(e): if not check_app_exists('${BUILD_PREFIX}g++', e.data): raise_sanity_error('C++ Host-Compiler is missing, please install one' ) - if not check_app_exists('patch', e.data): - raise_sanity_error('Please install the patch utility, preferable GNU patch.') + required_utilities = "patch diffstat texi2html makeinfo cvs svn git bzip2 tar gzip" - if not check_app_exists('diffstat', e.data): - raise_sanity_error('Please install the diffstat utility') - - if not check_app_exists('texi2html', e.data): - raise_sanity_error('Please install the texi2html binary') - - if not check_app_exists('cvs', e.data): - raise_sanity_error('Please install the cvs utility') - - if not check_app_exists('svn', e.data): - raise_sanity_error('Please install the svn utility') - - if not check_app_exists('bzip2', e.data): - raise_sanity_error('Please install the bzip2 utility') + for util in required_utilities.split(): + if not check_app_exists( util, e.data ): + raise_sanity_error( "Please install the %s utility." % util ) oes_bb_conf = data.getVar( 'OES_BITBAKE_CONF', e.data, True ) if not oes_bb_conf: diff --git a/classes/tinderclient.bbclass b/classes/tinderclient.bbclass index d36ef0b343..3f5183cc8f 100644 --- a/classes/tinderclient.bbclass +++ b/classes/tinderclient.bbclass @@ -60,7 +60,18 @@ def tinder_format_http_post(d,status,log): "os" : os.uname()[0], "os_version" : os.uname()[2], "compiler" : "gcc", - "clobber" : data.getVar('TINDER_CLOBBER', d, True) + "clobber" : data.getVar('TINDER_CLOBBER', d, True), + "srcdate" : data.getVar('SRCDATE', d, True), + "PN" : data.getVar('PN', d, True), + "PV" : data.getVar('PV', d, True), + "PR" : data.getVar('PR', d, True), + "FILE" : data.getVar('FILE', d, True) or "N/A", + "TARGETARCH" : data.getVar('TARGET_ARCH', d, True), + "TARGETFPU" : data.getVar('TARGET_FPU', d, True) or "Unknown", + "TARGETOS" : data.getVar('TARGET_OS', d, True) or "Unknown", + "MACHINE" : data.getVar('MACHINE', d, True) or "Unknown", + "DISTRO" : data.getVar('DISTRO', d, True) or "Unknown", + "zecke-rocks" : "sure", } # optionally add the status |