diff options
author | OpenEmbedded Project <openembedded-devel@lists.openembedded.org> | 2007-10-05 12:58:57 +0000 |
---|---|---|
committer | OpenEmbedded Project <openembedded-devel@lists.openembedded.org> | 2007-10-05 12:58:57 +0000 |
commit | 55d68353a541f363a196d51eac01b6a3ab8c54a4 (patch) | |
tree | 041da3bf21685d908c42ea77be5b6be2e87e4d17 /classes | |
parent | 94e1ce507e69378472f22faedd6606eb91510e6a (diff) | |
parent | bab0c48ac5132ad63b4f3da27a1c39c20b5db86c (diff) |
merge of '2409f1286888442abc1534359db7a59eb6d8b5fd'
and 'f42403739ba7106ebfe92b3af59bf9c81b54f06d'
Diffstat (limited to 'classes')
-rw-r--r-- | classes/gnome.bbclass | 2 | ||||
-rw-r--r-- | classes/insane.bbclass | 8 | ||||
-rw-r--r-- | classes/kernel.bbclass | 16 | ||||
-rw-r--r-- | classes/mono.bbclass | 218 | ||||
-rw-r--r-- | classes/package.bbclass | 53 | ||||
-rw-r--r-- | classes/package_deb.bbclass | 21 | ||||
-rw-r--r-- | classes/package_ipk.bbclass | 10 |
7 files changed, 286 insertions, 42 deletions
diff --git a/classes/gnome.bbclass b/classes/gnome.bbclass index 56233578aa..b29d86cefd 100644 --- a/classes/gnome.bbclass +++ b/classes/gnome.bbclass @@ -1,6 +1,6 @@ def gnome_verdir(v): import re - m = re.match("([0-9]+)\.([0-9]+)\..*", v) + m = re.match("^([0-9]+)\.([0-9]+)", v) return "%s.%s" % (m.group(1), m.group(2)) SECTION ?= "x11/gnome" diff --git a/classes/insane.bbclass b/classes/insane.bbclass index d54d6c7b9e..83f8c43bba 100644 --- a/classes/insane.bbclass +++ b/classes/insane.bbclass @@ -101,7 +101,7 @@ def package_qa_get_elf(path, bits32): def my_assert(expectation, result): if not expectation == result: #print "'%x','%x'" % (ord(expectation), ord(result)) - raise "This does not work as expected" + raise Exception("This does not work as expected") my_assert = staticmethod(my_assert) def __init__(self, name): @@ -124,13 +124,13 @@ def package_qa_get_elf(path, bits32): self.sex = self.data[ELFFile.EI_DATA] if self.sex == chr(ELFFile.ELFDATANONE): - raise "Can't be" + raise Exception("self.sex == ELFDATANONE") elif self.sex == chr(ELFFile.ELFDATA2LSB): self.sex = "<" elif self.sex == chr(ELFFile.ELFDATA2MSB): self.sex = ">" else: - raise "Even more worse" + raise Exception("Unknown self.sex") def osAbi(self): return ord(self.data[ELFFile.EI_OSABI]) @@ -376,7 +376,7 @@ def package_qa_check_rdepends(pkg, workdir, d): bb.data.setVar('ROOT', '', localdata) bb.data.setVar('ROOT_%s' % pkg, root, localdata) - pkgname = bb.data.getVar('PKG_%s' % pkg, localdata, 1) + pkgname = bb.data.getVar('PKG_%s' % pkg, localdata, True) if not pkgname: pkgname = pkg bb.data.setVar('PKG', pkgname, localdata) diff --git a/classes/kernel.bbclass b/classes/kernel.bbclass index b25275e702..5976f42c71 100644 --- a/classes/kernel.bbclass +++ b/classes/kernel.bbclass @@ -142,6 +142,7 @@ kernel_do_install() { install -m 0644 ${KERNEL_OUTPUT} ${D}/${KERNEL_IMAGEDEST}/${KERNEL_IMAGETYPE}-${KERNEL_VERSION} install -m 0644 System.map ${D}/boot/System.map-${KERNEL_VERSION} install -m 0644 .config ${D}/boot/config-${KERNEL_VERSION} + install -m 0644 vmlinux ${D}/boot/vmlinux-${KERNEL_VERSION} install -d ${D}/etc/modutils if [ "${KERNEL_MAJOR_VERSION}" = "2.6" ]; then install -d ${D}/etc/modprobe.d @@ -160,6 +161,18 @@ kernel_do_configure() { yes '' | oe_runmake oldconfig } +do_menuconfig() { + export TERMWINDOWTITLE="${PN} Kernel Configuration" + export SHELLCMDS="make menuconfig" + ${TERMCMDRUN} + if [ $? -ne 0 ]; then + echo "Fatal: '${TERMCMD}' not found. Check TERMCMD variable." + exit 1 + fi +} +do_menuconfig[nostamp] = "1" +addtask menuconfig after do_patch + pkg_postinst_kernel () { cd /${KERNEL_IMAGEDEST}; update-alternatives --install /${KERNEL_IMAGEDEST}/${KERNEL_IMAGETYPE} ${KERNEL_IMAGETYPE} ${KERNEL_IMAGETYPE}-${KERNEL_VERSION} ${KERNEL_PRIORITY} || true } @@ -174,10 +187,11 @@ EXPORT_FUNCTIONS do_compile do_install do_stage do_configure # kernel-base becomes kernel-${KERNEL_VERSION} # kernel-image becomes kernel-image-${KERNEL_VERISON} -PACKAGES = "kernel kernel-base kernel-image kernel-dev" +PACKAGES = "kernel kernel-base kernel-image kernel-dev kernel-vmlinux" FILES = "" FILES_kernel-image = "/boot/${KERNEL_IMAGETYPE}*" FILES_kernel-dev = "/boot/System.map* /boot/config*" +FILES_kernel-vmlinux = "/boot/vmlinux*" RDEPENDS_kernel = "kernel-base" # Allow machines to override this dependency if kernel image files are # not wanted in images as standard diff --git a/classes/mono.bbclass b/classes/mono.bbclass new file mode 100644 index 0000000000..c50274ed0c --- /dev/null +++ b/classes/mono.bbclass @@ -0,0 +1,218 @@ +def mono_get_file_table(packageversion, d): + # The packageversion is currently ignored, but might be used in the future + # if more than one mono version is available and different versions + # need to use different tables + + import bb, sys, os, glob, commands + curdir = os.path.dirname( bb.data.getVar('FILE', d, 1) ) + if curdir not in sys.path: sys.path.append( curdir ) + from mono_files import debian_mono_file_table + + # mono-jay is not being built (for all platforms at least) + IGNORE = ("mono-jay", ) + file_table = [ + # Standard package + {"name": "mono-doc"}, + + # Virtual packages + {"name": "mono"}, + {"name": "mono-runtime"}, + + # Not provided by Debian: + {"name": "libnunit2.2-cil", + "patterns": [ + "/usr/lib/mono/gac/nunit.*/2.2.*", + "/usr/lib/mono/1.0/nunit.*.dll", + "/usr/lib/pkgconfig/mono-nunit.pc", + ], + "assemblies": [ + ("nunit.core", "2.2.0.0"), + ("nunit.framework", "2.2.0.0"), + ("nunit.util", "2.2.0.0"), + ("nunit.mocks", "2.2.8.0"), + ], + }, + {"name": "libmono-cecil0.5-cil", + "patterns": [ + "/usr/lib/mono/gac/Mono.Cecil/0.5.*", + ], + "assemblies": [ + ("Mono.Cecil", "0.5.*"), + ], + }, + {"name": "libmono-db2-1.0-cil", + "patterns": [ + "/usr/lib/mono/gac/IBM.Data.DB2/1.0*", + "/usr/lib/mono/1.0/IBM.Data.DB2.dll", + ], + "assemblies": [ + ("IBM.Data.DB2", "1.0*"), + ], + }, + ] + debian_mono_file_table + + file_table = [e for e in file_table + if not (e.has_key("name") and e["name"] in IGNORE)] + + return file_table + +def mono_find_provides_and_requires(files, d): + provides = [] + requires = [] + + import bb, os, commands + + pathprefix = "export PATH=%s; export LANG=; export LC_ALL=; " % bb.data.getVar('PATH', d, 1) + for filename in files: + if not filename.endswith(".dll") and not filename.endswith(".exe"): + continue + if not os.path.isfile(filename) or os.path.islink(filename): + continue + + ## Provides + name, version = None, None + + ret, result = commands.getstatusoutput("%smonodis --assembly '%s'" % (pathprefix, filename)) + if ret: + bb.error("raw_provides_and_requires: monodis --assembly '%s' failed, dependency information will be inaccurate" % filename) + continue + for line in result.splitlines(): + if not ":" in line: continue + key, value = line.split(":", 1) + if key.strip() == "Name": + name = value.strip() + elif key.strip() == "Version": + version = value.strip() + if name is not None and version is not None: + if (name, version) not in provides: + provides.append( (name, version) ) + + ## Requires + name, version = None, None + ret, result = commands.getstatusoutput("%smonodis --assemblyref '%s'" % (pathprefix, filename)) + if ret: + bb.error("raw_provides_and_requires: monodis --assemblyref '%s' failed, dependency information will be inaccurate" % filename) + continue + for line in result.splitlines(): + if not "=" in line: continue + key, value = line.split("=", 1) + if ":" in key and key.split(":",1)[1].strip() == "Version": + version = value.strip() + elif key.strip() == "Name": + name = value.strip() + if name is not None and version is not None: + if (name, version) not in requires: + requires.append( (name, version) ) + name, version = None, None + + # Remove everything from requires that's already in provides as it's not actually required + # to be provided externally + requires = [e for e in requires if not e in provides] + return provides, requires + +python mono_do_clilibs() { + import bb, os, re, os.path + + exclude_clilibs = bb.data.getVar('EXCLUDE_FROM_CLILIBS', d, 0) + if exclude_clilibs: + bb.note("not generating clilibs") + return + + lib_re = re.compile("^lib.*\.so") + libdir_re = re.compile(".*/lib$") + + packages = bb.data.getVar('PACKAGES', d, 1) + + workdir = bb.data.getVar('WORKDIR', d, 1) + if not workdir: + bb.error("WORKDIR not defined") + return + + staging = bb.data.getVar('STAGING_DIR', d, 1) + if not staging: + bb.error("STAGING_DIR not defined") + return + + pkgdest = bb.data.getVar('PKGDEST', d, 1) + + clilibs_dir = os.path.join(staging, "clilibs") + bb.mkdirhier(clilibs_dir) + + provides, requires = {}, {} + private_libs = bb.data.getVar('PRIVATE_CLILIBS', d, 1) + for pkg in packages.split(): + bb.debug(2, "calculating clilib provides for %s" % pkg) + + files_to_check = [] + top = os.path.join(pkgdest, pkg) + for root, dirs, files in os.walk(top): + for file in files: + path = os.path.join(root, file) + if file.endswith(".exe") or file.endswith(".dll"): + files_to_check.append( path ) + provides[pkg], requires[pkg] = mono_find_provides_and_requires(files_to_check, d) + clilibs_file = os.path.join(clilibs_dir, pkg + ".list") + if os.path.exists(clilibs_file): + os.remove(clilibs_file) + if len(provides[pkg]) > 0: + fd = open(clilibs_file, 'w') + for s in provides[pkg]: + fd.write(" ".join(s) + '\n') + fd.close() + + clilib_provider = {} + list_re = re.compile('^(.*)\.list$') + for file in os.listdir(clilibs_dir): + m = list_re.match(file) + if m: + dep_pkg = m.group(1) + fd = open(os.path.join(clilibs_dir, file)) + lines = fd.readlines() + fd.close() + for l in lines: + clilib_provider[tuple(l.rstrip().split())] = dep_pkg + + for pkg in packages.split(): + bb.debug(2, "calculating clilib requirements for %s" % pkg) + + deps = [] + for n in requires[pkg]: + if n in clilib_provider.keys(): + dep_pkg = clilib_provider[n] + + if dep_pkg == pkg: + continue + + if not dep_pkg in deps: + deps.append(dep_pkg) + else: + bb.note("Couldn't find CLI library provider for %s" % (n,)) + + deps_file = os.path.join(pkgdest, pkg + ".clilibdeps") + if os.path.exists(deps_file): + os.remove(deps_file) + if len(deps) > 0: + fd = open(deps_file, 'w') + for dep in deps: + fd.write(dep + '\n') + fd.close() +} + +def mono_after_parse(d): + import bb + # Insert mono_do_clilibs into PACKAGEFUNCS + # Needs to be called after populate_packages, but before read_shlibdeps + PACKAGEFUNCS = bb.data.getVar("PACKAGEFUNCS", d, 1) + if PACKAGEFUNCS: + PACKAGEFUNCS = PACKAGEFUNCS.split() + if "read_shlibdeps" in PACKAGEFUNCS: + i = PACKAGEFUNCS.index("read_shlibdeps") + PACKAGEFUNCS.insert(i, "mono_do_clilibs") + elif "populate_packages" in PACKAGEFUNCS: + i = PACKAGEFUNCS.index("populate_packages") + PACKAGEFUNCS.insert(i+1, "mono_do_clilibs") + bb.data.setVar("PACKAGEFUNCS", " ".join(PACKAGEFUNCS), d) + +python () { + mono_after_parse(d) +} diff --git a/classes/package.bbclass b/classes/package.bbclass index 5233fa363c..b114049b8e 100644 --- a/classes/package.bbclass +++ b/classes/package.bbclass @@ -128,11 +128,12 @@ python () { deps += " %s:do_populate_staging" % dep bb.data.setVarFlag('do_package', 'depends', deps, d) + deps = (bb.data.getVarFlag('do_package', 'deptask', d) or "").split() # shlibs requires any DEPENDS to have already packaged for the *.list files - bb.data.setVarFlag('do_package', 'deptask', 'do_package', d) + deps.append("do_package") + bb.data.setVarFlag('do_package', 'deptask', " ".join(deps), d) } - def runstrip(file, d): # Function to strip a single file, called from populate_packages below # A working 'file' (one which works on the target architecture) @@ -481,6 +482,8 @@ python populate_packages () { populate_packages[dirs] = "${D}" python emit_pkgdata() { + from glob import glob + def write_if_exists(f, pkg, var): def encode(str): import codecs @@ -492,14 +495,14 @@ python emit_pkgdata() { f.write('%s_%s: %s\n' % (var, pkg, encode(val))) packages = bb.data.getVar('PACKAGES', d, 1) - if not packages: - return data_file = bb.data.expand("${PKGDATA_DIR}/${PN}", d) f = open(data_file, 'w') f.write("PACKAGES: %s\n" % packages) f.close() + workdir = bb.data.getVar('WORKDIR', d, 1) + for pkg in packages.split(): subdata_file = bb.data.expand("${PKGDATA_DIR}/runtime/%s" % pkg, d) sf = open(subdata_file, 'w') @@ -519,6 +522,13 @@ python emit_pkgdata() { write_if_exists(sf, pkg, 'pkg_preinst') write_if_exists(sf, pkg, 'pkg_prerm') sf.close() + + allow_empty = bb.data.getVar('ALLOW_EMPTY_%s' % pkg, d, 1) + root = "%s/install/%s" % (workdir, pkg) + os.chdir(root) + g = glob('*') + if g or allow_empty == "1": + file(bb.data.expand('${PKGDATA_DIR}/runtime/%s.packaged' % pkg, d), 'w').close() } emit_pkgdata[dirs] = "${PKGDATA_DIR}/runtime" @@ -540,9 +550,6 @@ python package_do_shlibs() { libdir_re = re.compile(".*/lib$") packages = bb.data.getVar('PACKAGES', d, 1) - if not packages: - bb.debug(1, "no packages to build; not calculating shlibs") - return workdir = bb.data.getVar('WORKDIR', d, 1) if not workdir: @@ -678,9 +685,6 @@ python package_do_pkgconfig () { import re, os packages = bb.data.getVar('PACKAGES', d, 1) - if not packages: - bb.debug(1, "no packages to build; not calculating pkgconfig dependencies") - return workdir = bb.data.getVar('WORKDIR', d, 1) if not workdir: @@ -786,23 +790,17 @@ python package_do_pkgconfig () { } python read_shlibdeps () { - packages = (bb.data.getVar('PACKAGES', d, 1) or "").split() + packages = bb.data.getVar('PACKAGES', d, 1).split() for pkg in packages: rdepends = explode_deps(bb.data.getVar('RDEPENDS_' + pkg, d, 0) or bb.data.getVar('RDEPENDS', d, 0) or "") - shlibsfile = bb.data.expand("${PKGDEST}/" + pkg + ".shlibdeps", d) - if os.access(shlibsfile, os.R_OK): - fd = file(shlibsfile) - lines = fd.readlines() - fd.close() - for l in lines: - rdepends.append(l.rstrip()) - pcfile = bb.data.expand("${PKGDEST}/" + pkg + ".pcdeps", d) - if os.access(pcfile, os.R_OK): - fd = file(pcfile) - lines = fd.readlines() - fd.close() - for l in lines: - rdepends.append(l.rstrip()) + for extension in ".shlibdeps", ".pcdeps", ".clilibdeps": + depsfile = bb.data.expand("${PKGDEST}/" + pkg + extension, d) + if os.access(depsfile, os.R_OK): + fd = file(depsfile) + lines = fd.readlines() + fd.close() + for l in lines: + rdepends.append(l.rstrip()) bb.data.setVar('RDEPENDS_' + pkg, " " + " ".join(rdepends), d) } @@ -896,6 +894,11 @@ PACKAGEFUNCS ?= "package_do_split_locales \ emit_pkgdata" python package_do_package () { + packages = (bb.data.getVar('PACKAGES', d, 1) or "").split() + if len(packages) < 1: + bb.debug(1, "No packages to build, skipping do_package") + return + for f in (bb.data.getVar('PACKAGEFUNCS', d, 1) or '').split(): bb.build.exec_func(f, d) } diff --git a/classes/package_deb.bbclass b/classes/package_deb.bbclass index 2ab537f174..b85ffe254f 100644 --- a/classes/package_deb.bbclass +++ b/classes/package_deb.bbclass @@ -8,6 +8,14 @@ BOOTSTRAP_EXTRA_RDEPENDS += "dpkg" DISTRO_EXTRA_RDEPENDS += "dpkg" IMAGE_PKGTYPE ?= "deb" +# Map TARGET_ARCH to Debian's ideas about architectures +DPKG_ARCH ?= "${TARGET_ARCH}" +DPKG_ARCH_x86 ?= "i386" +DPKG_ARCH_i486 ?= "i386" +DPKG_ARCH_i586 ?= "i386" +DPKG_ARCH_i686 ?= "i386" +DPKG_ARCH_pentium ?= "i386" + python package_deb_fn () { from bb import data bb.data.setVar('PKGFN', bb.data.getVar('PKG',d), d) @@ -38,7 +46,7 @@ python do_package_deb_install () { if (exitstatus != 0 ): raise bb.build.FuncFailed(output) - f = open(os.path.join(tmpdir, "stamps", "do_packages"), "w") + f = open(os.path.join(tmpdir, "stamps", "DEB_PACKAGE_INDEX_CLEAN"), "w") f.close() # NOTE: this env stuff is racy at best, we need something more capable @@ -86,9 +94,9 @@ python do_package_deb () { return tmpdir = bb.data.getVar('TMPDIR', d, 1) - # Invalidate the packages file - if os.access(os.path.join(tmpdir, "stamps", "do_packages"),os.R_OK): - os.unlink(os.path.join(tmpdir, "stamps", "do_packages")) + + if os.access(os.path.join(tmpdir, "stamps", "DEB_PACKAGE_INDEX_CLEAN"),os.R_OK): + os.unlink(os.path.join(tmpdir, "stamps", "DEB_PACKAGE_INDEX_CLEAN")) if packages == []: bb.debug(1, "No packages; nothing to do") @@ -141,6 +149,7 @@ python do_package_deb () { note("Not creating empty archive for %s-%s-%s" % (pkg, bb.data.getVar('PV', localdata, 1), bb.data.getVar('PR', localdata, 1))) unlockfile(lf) continue + controldir = os.path.join(root, 'DEBIAN') bb.mkdirhier(controldir) os.chmod(controldir, 0755) @@ -161,7 +170,7 @@ python do_package_deb () { fields.append(["Section: %s\n", ['SECTION']]) fields.append(["Priority: %s\n", ['PRIORITY']]) fields.append(["Maintainer: %s\n", ['MAINTAINER']]) - fields.append(["Architecture: %s\n", ['TARGET_ARCH']]) + fields.append(["Architecture: %s\n", ['DPKG_ARCH']]) fields.append(["OE: %s\n", ['PN']]) fields.append(["Homepage: %s\n", ['HOMEPAGE']]) @@ -175,7 +184,7 @@ python do_package_deb () { data = bb.data.getVar(i, d, 1) if data is None: raise KeyError(f) - if i == 'TARGET_ARCH' and bb.data.getVar('PACKAGE_ARCH', d, 1) == 'all': + if i == 'DPKG_ARCH' and bb.data.getVar('PACKAGE_ARCH', d, 1) == 'all': data = 'all' l2.append(data) return l2 diff --git a/classes/package_ipk.bbclass b/classes/package_ipk.bbclass index 0b7f274a96..a12899e2a1 100644 --- a/classes/package_ipk.bbclass +++ b/classes/package_ipk.bbclass @@ -49,11 +49,11 @@ python package_ipk_install () { if (not os.access(os.path.join(ipkdir,"Packages"), os.R_OK) or - not os.access(os.path.join(tmpdir, "stamps", "do_packages"),os.R_OK): + not os.access(os.path.join(tmpdir, "stamps", "IPK_PACKAGE_INDEX_CLEAN"),os.R_OK): ret = os.system('ipkg-make-index -p %s %s ' % (os.path.join(ipkdir, "Packages"), ipkdir)) if (ret != 0 ): raise bb.build.FuncFailed - f=open(os.path.join(tmpdir, "stamps", "do_packages"),"w") + f = open(os.path.join(tmpdir, "stamps", "IPK_PACKAGE_INDEX_CLEAN"),"w") f.close() ret = os.system('ipkg-cl -o %s -f %s update' % (rootfs, conffile)) @@ -143,9 +143,9 @@ python do_package_ipk () { return tmpdir = bb.data.getVar('TMPDIR', d, 1) - # Invalidate the packages file - if os.access(os.path.join(tmpdir, "stamps", "do_packages"),os.R_OK): - os.unlink(os.path.join(tmpdir, "stamps", "do_packages")) + + if os.access(os.path.join(tmpdir, "stamps", "IPK_PACKAGE_INDEX_CLEAN"), os.R_OK): + os.unlink(os.path.join(tmpdir, "stamps", "IPK_PACKAGE_INDEX_CLEAN")) if packages == []: bb.debug(1, "No packages; nothing to do") |