summaryrefslogtreecommitdiff
path: root/meta/lib
diff options
context:
space:
mode:
authorRoss Burton <ross.burton@intel.com>2015-11-04 20:24:02 +0000
committerRichard Purdie <richard.purdie@linuxfoundation.org>2015-11-24 15:55:27 +0000
commit2d91290ab5608dd1297d1c26ab807fc4574a8a6b (patch)
treed9b65cb22d656b4ec65ea8373c7ce5e815930c44 /meta/lib
parentab87d3649c39326938d82d623efafb76905f770d (diff)
downloadopenembedded-core-2d91290ab5608dd1297d1c26ab807fc4574a8a6b.tar.gz
openembedded-core-2d91290ab5608dd1297d1c26ab807fc4574a8a6b.tar.bz2
openembedded-core-2d91290ab5608dd1297d1c26ab807fc4574a8a6b.zip
lib/oe/distro_check: don't set empty proxy keys
If the proxies dictionary has a proxy set to None urllib will throw an exception instead of not using a proxy (abridged stack): File: '/home/ross/Yocto/poky/meta/lib/oe/distro_check.py', lineno: 43, function: get_links_from_url *** 0043: with create_socket(url,d) as sock: 0044: webpage = sock.read() File: '/home/ross/Yocto/poky/meta/lib/oe/distro_check.py', lineno: 5, function: create_socket 0003:def create_socket(url, d): 0004: import urllib *** 0005: socket = urllib.urlopen(url, proxies=get_proxies(d)) File: '/usr/lib/python2.7/urllib.py', lineno: 87, function: urlopen 0086: if data is None: *** 0087: return opener.open(url) File: '/usr/lib/python2.7/urllib.py', lineno: 203, function: open 0201: else: 0202: proxy = None *** 0203: name = 'open_' + urltype 0204: self.type = urltype 0205: name = name.replace('-', '_') Exception: TypeError: cannot concatenate 'str' and 'NoneType' objects Filter out unset values so that the dictionary only has valid assignments in. Signed-off-by: Ross Burton <ross.burton@intel.com>
Diffstat (limited to 'meta/lib')
-rw-r--r--meta/lib/oe/distro_check.py10
1 files changed, 6 insertions, 4 deletions
diff --git a/meta/lib/oe/distro_check.py b/meta/lib/oe/distro_check.py
index f92cd2e423..8655a6fc14 100644
--- a/meta/lib/oe/distro_check.py
+++ b/meta/lib/oe/distro_check.py
@@ -9,10 +9,12 @@ def create_socket(url, d):
socket.close()
def get_proxies(d):
- import os
- proxykeys = ['http', 'https', 'ftp', 'ftps', 'no', 'all']
- proxyvalues = map(lambda key: d.getVar(key+'_proxy', True), proxykeys)
- return dict(zip(proxykeys, proxyvalues))
+ proxies = {}
+ for key in ['http', 'https', 'ftp', 'ftps', 'no', 'all']:
+ proxy = d.getVar(key + '_proxy', True)
+ if proxy:
+ proxies[key] = proxy
+ return proxies
def get_links_from_url(url, d):
"Return all the href links found on the web location"