From 3069c0b2588c9e88a4fa2fd4d37356410d364410 Mon Sep 17 00:00:00 2001 From: Chris Larson Date: Thu, 18 Nov 2010 19:51:51 -0700 Subject: Revert "persist_data: cache connection and use cursor" Caching the database connection can cause serious issues if it results in multiple processes (e.g. multiple tasks) simultaneously using the same connection. This reverts commit 8a6876752b90efd81d92f0947bfc9527d8260969. (Bitbake rev: 60b9b18eafad5ac46c7cf1048d749d673c2ee0ad) Signed-off-by: Chris Larson Signed-off-by: Richard Purdie --- bitbake/lib/bb/fetch/__init__.py | 9 ++++----- bitbake/lib/bb/persist_data.py | 11 +++-------- 2 files changed, 7 insertions(+), 13 deletions(-) (limited to 'bitbake/lib') diff --git a/bitbake/lib/bb/fetch/__init__.py b/bitbake/lib/bb/fetch/__init__.py index 668b788698..d8f5f167fc 100644 --- a/bitbake/lib/bb/fetch/__init__.py +++ b/bitbake/lib/bb/fetch/__init__.py @@ -144,14 +144,13 @@ def uri_replace(uri, uri_find, uri_replace, d): methods = [] urldata_cache = {} saved_headrevs = {} -persistent_database_connection = {} def fetcher_init(d): """ Called to initialize the fetchers once the configuration data is known. Calls before this must not hit the cache. """ - pd = persist_data.PersistData(d, persistent_database_connection) + pd = persist_data.PersistData(d) # When to drop SCM head revisions controlled by user policy srcrev_policy = bb.data.getVar('BB_SRCREV_POLICY', d, 1) or "clear" if srcrev_policy == "cache": @@ -180,7 +179,7 @@ def fetcher_compare_revisions(d): return true/false on whether they've changed. """ - pd = persist_data.PersistData(d, persistent_database_connection) + pd = persist_data.PersistData(d) data = pd.getKeyValues("BB_URI_HEADREVS") data2 = bb.fetch.saved_headrevs @@ -758,7 +757,7 @@ class Fetch(object): if not hasattr(self, "_latest_revision"): raise ParameterError - pd = persist_data.PersistData(d, persistent_database_connection) + pd = persist_data.PersistData(d) key = self.generate_revision_key(url, ud, d) rev = pd.getValue("BB_URI_HEADREVS", key) if rev != None: @@ -775,7 +774,7 @@ class Fetch(object): if hasattr(self, "_sortable_revision"): return self._sortable_revision(url, ud, d) - pd = persist_data.PersistData(d, persistent_database_connection) + pd = persist_data.PersistData(d) key = self.generate_revision_key(url, ud, d) latest_rev = self._build_revision(url, ud, d) diff --git a/bitbake/lib/bb/persist_data.py b/bitbake/lib/bb/persist_data.py index 76bff16658..9558e71283 100644 --- a/bitbake/lib/bb/persist_data.py +++ b/bitbake/lib/bb/persist_data.py @@ -47,10 +47,7 @@ class PersistData: Why sqlite? It handles all the locking issues for us. """ - def __init__(self, d, persistent_database_connection): - if "connection" in persistent_database_connection: - self.cursor = persistent_database_connection["connection"].cursor() - return + def __init__(self, d): self.cachedir = bb.data.getVar("PERSISTENT_DIR", d, True) or bb.data.getVar("CACHE", d, True) if self.cachedir in [None, '']: bb.msg.fatal(bb.msg.domain.PersistData, "Please set the 'PERSISTENT_DIR' or 'CACHE' variable.") @@ -62,9 +59,7 @@ class PersistData: self.cachefile = os.path.join(self.cachedir, "bb_persist_data.sqlite3") logger.debug(1, "Using '%s' as the persistent data cache", self.cachefile) - connection = sqlite3.connect(self.cachefile, timeout=5, isolation_level=None) - persistent_database_connection["connection"] = connection - self.cursor = persistent_database_connection["connection"].cursor() + self.connection = sqlite3.connect(self.cachefile, timeout=5, isolation_level=None) def addDomain(self, domain): """ @@ -127,7 +122,7 @@ class PersistData: count = 0 while True: try: - return self.cursor.execute(*query) + return self.connection.execute(*query) except sqlite3.OperationalError as e: if 'database is locked' in str(e) and count < 500: count = count + 1 -- cgit v1.2.3