diff options
Diffstat (limited to 'classes')
-rw-r--r-- | classes/angstrom-mirrors.bbclass | 2 | ||||
-rw-r--r-- | classes/base.bbclass | 23 | ||||
-rw-r--r-- | classes/insane.bbclass | 2 | ||||
-rw-r--r-- | classes/mono.bbclass | 177 | ||||
-rw-r--r-- | classes/package.bbclass | 22 | ||||
-rw-r--r-- | classes/rootfs_deb.bbclass | 32 | ||||
-rw-r--r-- | classes/rootfs_ipk.bbclass | 6 | ||||
-rw-r--r-- | classes/sdk.bbclass | 3 |
8 files changed, 225 insertions, 42 deletions
diff --git a/classes/angstrom-mirrors.bbclass b/classes/angstrom-mirrors.bbclass index cd27a8545b..e44a78c983 100644 --- a/classes/angstrom-mirrors.bbclass +++ b/classes/angstrom-mirrors.bbclass @@ -1,4 +1,4 @@ MIRRORS_append () { ftp://.*/.*/ http://www.angstrom-distribution.org/unstable/sources/ -http://.*/.*/ http://www.angstrom-distribution.org/unstable/sources/ +https?$://.*/.*/ http://www.angstrom-distribution.org/unstable/sources/ } diff --git a/classes/base.bbclass b/classes/base.bbclass index 999d409914..2cf205fbe9 100644 --- a/classes/base.bbclass +++ b/classes/base.bbclass @@ -22,6 +22,7 @@ def base_chk_load_parser(config_path): def base_chk_file(parser, pn, pv, src_uri, localpath, data): import os, bb + no_checksum = False # Try PN-PV-SRC_URI first and then try PN-SRC_URI # we rely on the get method to create errors pn_pv_src = "%s-%s-%s" % (pn,pv,src_uri) @@ -36,8 +37,7 @@ def base_chk_file(parser, pn, pv, src_uri, localpath, data): md5 = parser.get(src_uri, "md5") sha256 = parser.get(src_uri, "sha256") else: - return False - #raise Exception("Can not find a section for '%s' '%s' and '%s'" % (pn,pv,src_uri)) + no_checksum = True # md5 and sha256 should be valid now if not os.path.exists(localpath): @@ -60,6 +60,19 @@ def base_chk_file(parser, pn, pv, src_uri, localpath, data): except OSError: raise Exception("Executing shasum failed") + if no_checksum == True: # we do not have conf/checksums.ini entry + try: + file = open("%s/checksums.ini" % bb.data.getVar("TMPDIR", data, 1), "a") + except: + return False + + if not file: + raise Exception("Creating checksums.ini failed") + + file.write("[%s]\nmd5=%s\nsha256=%s\n\n" % (src_uri, md5data, shadata)) + file.close() + return False + if not md5 == md5data: bb.note("The MD5Sums did not match. Wanted: '%s' and Got: '%s'" % (md5,md5data)) raise Exception("MD5 Sums do not match. Wanted: '%s' Got: '%s'" % (md5, md5data)) @@ -485,11 +498,9 @@ python base_do_fetch() { (type,host,path,_,_,_) = bb.decodeurl(url) uri = "%s://%s%s" % (type,host,path) try: - if not base_chk_file(parser, pn, pv,uri, localpath, d): - if type != "file": + if type == "http" or type == "https" or type == "ftp" or type == "ftps": + if not base_chk_file(parser, pn, pv,uri, localpath, d): bb.note("%s-%s: %s has no entry in conf/checksums.ini, not checking URI" % (pn,pv,uri)) - else: - bb.debug("%s-%s: %s has no entry in conf/checksums.ini, not checking URI" % (pn,pv,uri)) except Exception: raise bb.build.FuncFailed("Checksum of '%s' failed" % uri) } diff --git a/classes/insane.bbclass b/classes/insane.bbclass index 83f8c43bba..3fb2d04a81 100644 --- a/classes/insane.bbclass +++ b/classes/insane.bbclass @@ -42,7 +42,7 @@ def package_qa_get_machine_dict(): "i486": ( 3, 0, 0, True, True), "i586": ( 3, 0, 0, True, True), "i686": ( 3, 0, 0, True, True), - "x64_64": (62, 0, 0, True, False), + "x86_64": (62, 0, 0, True, False), "ia64": (50, 0, 0, True, False), "alpha": (36902, 0, 0, True, False), "hppa": (15, 3, 0, False, True), diff --git a/classes/mono.bbclass b/classes/mono.bbclass new file mode 100644 index 0000000000..ccb16d9acc --- /dev/null +++ b/classes/mono.bbclass @@ -0,0 +1,177 @@ +def mono_find_provides_and_requires(files, d): + provides = [] + requires = [] + + import bb, os, commands + + pathprefix = "export PATH=%s; export LANG=; export LC_ALL=; " % bb.data.getVar('PATH', d, 1) + for filename in files: + if not filename.endswith(".dll") and not filename.endswith(".exe"): + continue + if not os.path.isfile(filename) or os.path.islink(filename): + continue + + ## Provides + name, version = None, None + + ret, result = commands.getstatusoutput("%smonodis --assembly '%s'" % (pathprefix, filename)) + if ret: + bb.error("raw_provides_and_requires: monodis --assembly '%s' failed, dependency information will be inaccurate" % filename) + continue + for line in result.splitlines(): + if not ":" in line: continue + key, value = line.split(":", 1) + if key.strip() == "Name": + name = value.strip() + elif key.strip() == "Version": + version = value.strip() + if name is not None and version is not None: + if (name, version) not in provides: + provides.append( (name, version) ) + + ## Requires + name, version = None, None + ret, result = commands.getstatusoutput("%smonodis --assemblyref '%s'" % (pathprefix, filename)) + if ret: + bb.error("raw_provides_and_requires: monodis --assemblyref '%s' failed, dependency information will be inaccurate" % filename) + continue + for line in result.splitlines(): + if not "=" in line: continue + key, value = line.split("=", 1) + if ":" in key and key.split(":",1)[1].strip() == "Version": + version = value.strip() + elif key.strip() == "Name": + name = value.strip() + if name is not None and version is not None: + if (name, version) not in requires: + requires.append( (name, version) ) + name, version = None, None + + # Remove everything from requires that's already in provides as it's not actually required + # to be provided externally + requires = [e for e in requires if not e in provides] + return provides, requires + +python mono_do_clilibs() { + import bb, os, re, os.path + + exclude_clilibs = bb.data.getVar('EXCLUDE_FROM_CLILIBS', d, 0) + if exclude_clilibs: + bb.note("not generating clilibs") + return + + lib_re = re.compile("^lib.*\.so") + libdir_re = re.compile(".*/lib$") + + packages = bb.data.getVar('PACKAGES', d, 1) + + workdir = bb.data.getVar('WORKDIR', d, 1) + if not workdir: + bb.error("WORKDIR not defined") + return + + staging = bb.data.getVar('STAGING_DIR', d, 1) + if not staging: + bb.error("STAGING_DIR not defined") + return + + pkgdest = bb.data.getVar('PKGDEST', d, 1) + + clilibs_dir = os.path.join(staging, "clilibs") + bb.mkdirhier(clilibs_dir) + + provides, requires = {}, {} + private_libs = bb.data.getVar('PRIVATE_CLILIBS', d, 1) + for pkg in packages.split(): + bb.debug(2, "calculating clilib provides for %s" % pkg) + + files_to_check = [] + top = os.path.join(pkgdest, pkg) + for root, dirs, files in os.walk(top): + for file in files: + path = os.path.join(root, file) + if file.endswith(".exe") or file.endswith(".dll"): + files_to_check.append( path ) + provides[pkg], requires[pkg] = mono_find_provides_and_requires(files_to_check, d) + clilibs_file = os.path.join(clilibs_dir, pkg + ".list") + if os.path.exists(clilibs_file): + os.remove(clilibs_file) + if len(provides[pkg]) > 0: + fd = open(clilibs_file, 'w') + for s in provides[pkg]: + fd.write(" ".join(s) + '\n') + fd.close() + + clilib_provider = {} + list_re = re.compile('^(.*)\.list$') + for file in os.listdir(clilibs_dir): + m = list_re.match(file) + if m: + dep_pkg = m.group(1) + fd = open(os.path.join(clilibs_dir, file)) + lines = fd.readlines() + fd.close() + for l in lines: + clilib_provider[tuple(l.rstrip().split())] = dep_pkg + + for pkg in packages.split(): + bb.debug(2, "calculating clilib requirements for %s" % pkg) + + deps = [] + for n in requires[pkg]: + if n in clilib_provider.keys(): + dep_pkg = clilib_provider[n] + + if dep_pkg == pkg: + continue + + if not dep_pkg in deps: + deps.append(dep_pkg) + else: + bb.note("Couldn't find CLI library provider for %s" % (n,)) + + deps_file = os.path.join(pkgdest, pkg + ".clilibdeps") + if os.path.exists(deps_file): + os.remove(deps_file) + if len(deps) > 0: + fd = open(deps_file, 'w') + for dep in deps: + fd.write(dep + '\n') + fd.close() +} + +do_mono_stage() { + if [ "${INHIBIT_MONO_STAGE}" = "1" ] + then + return + fi + + for package in ${PACKAGES}; do + if [ -d "${PKGDEST}/${package}/${libdir}" ]; then + cd "${PKGDEST}/${package}/${libdir}" + for file in `find . -iname "*.dll"`; do + cp --parent -fpPR "${file}" "${STAGING_LIBDIR}/" + done + fi + done +} +addtask mono_stage after do_package before do_populate_staging + +def mono_after_parse(d): + import bb + # Insert mono_do_clilibs into PACKAGEFUNCS + # Needs to be called after populate_packages, but before read_shlibdeps + PACKAGEFUNCS = bb.data.getVar("PACKAGEFUNCS", d, 1) + if PACKAGEFUNCS: + PACKAGEFUNCS = PACKAGEFUNCS.split() + if "read_shlibdeps" in PACKAGEFUNCS: + i = PACKAGEFUNCS.index("read_shlibdeps") + PACKAGEFUNCS.insert(i, "mono_do_clilibs") + elif "populate_packages" in PACKAGEFUNCS: + i = PACKAGEFUNCS.index("populate_packages") + PACKAGEFUNCS.insert(i+1, "mono_do_clilibs") + bb.data.setVar("PACKAGEFUNCS", " ".join(PACKAGEFUNCS), d) + +python () { + mono_after_parse(d) +} diff --git a/classes/package.bbclass b/classes/package.bbclass index 516cae823d..b114049b8e 100644 --- a/classes/package.bbclass +++ b/classes/package.bbclass @@ -793,20 +793,14 @@ python read_shlibdeps () { packages = bb.data.getVar('PACKAGES', d, 1).split() for pkg in packages: rdepends = explode_deps(bb.data.getVar('RDEPENDS_' + pkg, d, 0) or bb.data.getVar('RDEPENDS', d, 0) or "") - shlibsfile = bb.data.expand("${PKGDEST}/" + pkg + ".shlibdeps", d) - if os.access(shlibsfile, os.R_OK): - fd = file(shlibsfile) - lines = fd.readlines() - fd.close() - for l in lines: - rdepends.append(l.rstrip()) - pcfile = bb.data.expand("${PKGDEST}/" + pkg + ".pcdeps", d) - if os.access(pcfile, os.R_OK): - fd = file(pcfile) - lines = fd.readlines() - fd.close() - for l in lines: - rdepends.append(l.rstrip()) + for extension in ".shlibdeps", ".pcdeps", ".clilibdeps": + depsfile = bb.data.expand("${PKGDEST}/" + pkg + extension, d) + if os.access(depsfile, os.R_OK): + fd = file(depsfile) + lines = fd.readlines() + fd.close() + for l in lines: + rdepends.append(l.rstrip()) bb.data.setVar('RDEPENDS_' + pkg, " " + " ".join(rdepends), d) } diff --git a/classes/rootfs_deb.bbclass b/classes/rootfs_deb.bbclass index d3e5832251..5571f699e7 100644 --- a/classes/rootfs_deb.bbclass +++ b/classes/rootfs_deb.bbclass @@ -10,8 +10,8 @@ fakeroot rootfs_deb_do_rootfs () { mkdir -p ${IMAGE_ROOTFS}/var/dpkg/info mkdir -p ${IMAGE_ROOTFS}/var/dpkg/updates - rm -f ${STAGING_DIR}/etc/apt/sources.list.rev - rm -f ${STAGING_DIR}/etc/apt/preferences + rm -f ${STAGING_DIR}${sysconfdir}/apt/sources.list.rev + rm -f ${STAGING_DIR}${sysconfdir}/apt/preferences > ${IMAGE_ROOTFS}/var/dpkg/status > ${IMAGE_ROOTFS}/var/dpkg/available # > ${STAGING_DIR}/var/dpkg/status @@ -28,21 +28,21 @@ fakeroot rootfs_deb_do_rootfs () { apt-ftparchive packages . | bzip2 > Packages.bz2 echo "Label: $arch" > Release - echo "deb file:${DEPLOY_DIR_DEB}/$arch/ ./" >> ${STAGING_DIR}/etc/apt/sources.list.rev + echo "deb file:${DEPLOY_DIR_DEB}/$arch/ ./" >> ${STAGING_DIR}${sysconfdir}/apt/sources.list.rev (echo "Package: *" echo "Pin: release l=$arch" echo "Pin-Priority: $((800 + $priority))" - echo) >> ${STAGING_DIR}/etc/apt/preferences + echo) >> ${STAGING_DIR}${sysconfdir}/apt/preferences priority=$(expr $priority + 5) done - tac ${STAGING_DIR}/etc/apt/sources.list.rev > ${STAGING_DIR}/etc/apt/sources.list + tac ${STAGING_DIR}${sysconfdir}/apt/sources.list.rev > ${STAGING_DIR}${sysconfdir}/apt/sources.list - cat "${STAGING_DIR}/etc/apt/apt.conf.sample" \ + cat "${STAGING_DIR}${sysconfdir}/apt/apt.conf.sample" \ | sed -e 's#Architecture ".*";#Architecture "${TARGET_ARCH}";#' \ - > "${STAGING_DIR}/etc/apt/apt-rootfs.conf" + > "${STAGING_DIR}${sysconfdir}/apt/apt-rootfs.conf" - export APT_CONFIG="${STAGING_DIR}/etc/apt/apt-rootfs.conf" + export APT_CONFIG="${STAGING_DIR}${sysconfdir}/apt/apt-rootfs.conf" export D=${IMAGE_ROOTFS} export OFFLINE_ROOT=${IMAGE_ROOTFS} export IPKG_OFFLINE_ROOT=${IMAGE_ROOTFS} @@ -112,17 +112,17 @@ fakeroot rootfs_deb_do_rootfs () { if [ -e ${IMAGE_ROOTFS}/usr/dpkg/alternatives ]; then rmdir ${IMAGE_ROOTFS}/usr/dpkg/alternatives fi - if [ ! -e ${IMAGE_ROOTFS}/usr/lib/ipkg ] ; then - mkdir -p ${IMAGE_ROOTFS}/usr/lib/ipkg + if [ ! -e ${IMAGE_ROOTFS}${libdir}/ipkg ] ; then + mkdir -p ${IMAGE_ROOTFS}${libdir}/ipkg fi - if [ ! -e ${IMAGE_ROOTFS}/etc/ipkg ] ; then - mkdir -p ${IMAGE_ROOTFS}/etc/ipkg + if [ ! -e ${IMAGE_ROOTFS}${sysconfdir}/ipkg ] ; then + mkdir -p ${IMAGE_ROOTFS}${sysconfdir}/ipkg fi - ln -sf /usr/lib/ipkg/alternatives ${IMAGE_ROOTFS}/usr/dpkg/alternatives - ln -sf /usr/dpkg/info ${IMAGE_ROOTFS}/usr/lib/ipkg/info - ln -sf /usr/dpkg/status ${IMAGE_ROOTFS}/usr/lib/ipkg/status + ln -sf ${libdir}/ipkg/alternatives ${IMAGE_ROOTFS}/usr/dpkg/alternatives + ln -sf /usr/dpkg/info ${IMAGE_ROOTFS}${libdir}/ipkg/info + ln -sf /usr/dpkg/status ${IMAGE_ROOTFS}${libdir}/ipkg/status ${ROOTFS_POSTPROCESS_COMMAND} @@ -150,6 +150,6 @@ rootfs_deb_log_check() { } remove_packaging_data_files() { - rm -rf ${IMAGE_ROOTFS}/usr/lib/ipkg/ + rm -rf ${IMAGE_ROOTFS}${libdir}/ipkg/ rm -rf ${IMAGE_ROOTFS}/usr/dpkg/ } diff --git a/classes/rootfs_ipk.bbclass b/classes/rootfs_ipk.bbclass index 6babee30a2..c6e2099e7c 100644 --- a/classes/rootfs_ipk.bbclass +++ b/classes/rootfs_ipk.bbclass @@ -39,8 +39,8 @@ fakeroot rootfs_ipk_do_rootfs () { export D=${IMAGE_ROOTFS} export OFFLINE_ROOT=${IMAGE_ROOTFS} export IPKG_OFFLINE_ROOT=${IMAGE_ROOTFS} - mkdir -p ${IMAGE_ROOTFS}/etc/ipkg/ - grep "^arch" ${IPKGCONF_TARGET} >${IMAGE_ROOTFS}/etc/ipkg/arch.conf + mkdir -p ${IMAGE_ROOTFS}${sysconfdir}/ipkg/ + grep "^arch" ${IPKGCONF_TARGET} >${IMAGE_ROOTFS}${sysconfdir}/ipkg/arch.conf for i in ${IMAGE_ROOTFS}${libdir}/ipkg/info/*.preinst; do if [ -f $i ] && ! sh $i; then @@ -82,5 +82,5 @@ rootfs_ipk_log_check() { } remove_packaging_data_files() { - rm -rf ${IMAGE_ROOTFS}/usr/lib/ipkg/ + rm -rf ${IMAGE_ROOTFS}${libdir}/ipkg/ } diff --git a/classes/sdk.bbclass b/classes/sdk.bbclass index 8067798000..441da77bd0 100644 --- a/classes/sdk.bbclass +++ b/classes/sdk.bbclass @@ -21,6 +21,7 @@ exec_prefix = "${prefix}" base_prefix = "${exec_prefix}" FILES_${PN} = "${prefix}" -FILES_${PN}-dbg += "${prefix}/bin/.debug \ +FILES_${PN}-dbg += "${prefix}/.debug \ + ${prefix}/bin/.debug \ ${prefix}/sbin/.debug \ " |