diff options
| author | Richard Purdie <richard@openedhand.com> | 2006-05-09 15:44:08 +0000 |
|---|---|---|
| committer | Richard Purdie <richard@openedhand.com> | 2006-05-09 15:44:08 +0000 |
| commit | 27dba1e6247ae48349aee1bce141a9eefaafaad1 (patch) | |
| tree | 822235005ccbd2707f7874ad680dedc4df36760c /bitbake/lib | |
| parent | ed234aca98d0867c7b32801fc63820b19cf67df9 (diff) | |
| download | openembedded-core-27dba1e6247ae48349aee1bce141a9eefaafaad1.tar.gz openembedded-core-27dba1e6247ae48349aee1bce141a9eefaafaad1.tar.bz2 openembedded-core-27dba1e6247ae48349aee1bce141a9eefaafaad1.zip | |
Update to bitbake 1.4.2 (latest stable branch release). This includes the caching speedups
git-svn-id: https://svn.o-hand.com/repos/poky/trunk@371 311d38ba-8fff-0310-9ca6-ca027cbcb966
Diffstat (limited to 'bitbake/lib')
22 files changed, 2277 insertions, 1528 deletions
diff --git a/bitbake/lib/bb/__init__.py b/bitbake/lib/bb/__init__.py index c6c0beb792..c3e7a16658 100644 --- a/bitbake/lib/bb/__init__.py +++ b/bitbake/lib/bb/__init__.py @@ -23,7 +23,7 @@ this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA. """ -__version__ = "1.3.3.4" +__version__ = "1.4.3" __all__ = [ @@ -60,7 +60,9 @@ __all__ = [ "event", "build", "fetch", - "manifest" + "manifest", + "methodpool", + "cache", ] whitespace = '\t\n\x0b\x0c\r ' diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py new file mode 100644 index 0000000000..921a9f7589 --- /dev/null +++ b/bitbake/lib/bb/cache.py @@ -0,0 +1,306 @@ +#!/usr/bin/env python +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +""" +BitBake 'Event' implementation + +Caching of bitbake variables before task execution + +# Copyright (C) 2006 Richard Purdie + +# but small sections based on code from bin/bitbake: +# Copyright (C) 2003, 2004 Chris Larson +# Copyright (C) 2003, 2004 Phil Blundell +# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer +# Copyright (C) 2005 Holger Hans Peter Freyther +# Copyright (C) 2005 ROAD GmbH + +This program is free software; you can redistribute it and/or modify it under +the terms of the GNU General Public License as published by the Free Software +Foundation; either version 2 of the License, or (at your option) any later +version. + +This program is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. + +You should have received a copy of the GNU General Public License along with +this program; if not, write to the Free Software Foundation, Inc., 59 Temple +Place, Suite 330, Boston, MA 02111-1307 USA. + +""" + +import os, re +import bb.data +import bb.utils + +try: + import cPickle as pickle +except ImportError: + import pickle + print "NOTE: Importing cPickle failed. Falling back to a very slow implementation." + +# __cache_version__ = "123" +__cache_version__ = "124" # changes the __depends structure + +class Cache: + """ + BitBake Cache implementation + """ + def __init__(self, cooker): + + + self.cachedir = bb.data.getVar("CACHE", cooker.configuration.data, True) + self.clean = {} + self.depends_cache = {} + self.data = None + self.data_fn = None + + if self.cachedir in [None, '']: + self.has_cache = False + if cooker.cb is not None: + print "NOTE: Not using a cache. Set CACHE = <directory> to enable." + else: + self.has_cache = True + self.cachefile = os.path.join(self.cachedir,"bb_cache.dat") + + if cooker.cb is not None: + print "NOTE: Using cache in '%s'" % self.cachedir + try: + os.stat( self.cachedir ) + except OSError: + bb.mkdirhier( self.cachedir ) + + if self.has_cache and (self.mtime(self.cachefile)): + try: + p = pickle.Unpickler( file(self.cachefile,"rb")) + self.depends_cache, version_data = p.load() + if version_data['CACHE_VER'] != __cache_version__: + raise ValueError, 'Cache Version Mismatch' + if version_data['BITBAKE_VER'] != bb.__version__: + raise ValueError, 'Bitbake Version Mismatch' + except (ValueError, KeyError): + bb.note("Invalid cache found, rebuilding...") + self.depends_cache = {} + + if self.depends_cache: + for fn in self.depends_cache.keys(): + self.clean[fn] = "" + self.cacheValidUpdate(fn) + + def getVar(self, var, fn, exp = 0): + """ + Gets the value of a variable + (similar to getVar in the data class) + + There are two scenarios: + 1. We have cached data - serve from depends_cache[fn] + 2. We're learning what data to cache - serve from data + backend but add a copy of the data to the cache. + """ + + if fn in self.clean: + return self.depends_cache[fn][var] + + if not fn in self.depends_cache: + self.depends_cache[fn] = {} + + if fn != self.data_fn: + # We're trying to access data in the cache which doesn't exist + # yet setData hasn't been called to setup the right access. Very bad. + bb.error("Parsing error data_fn %s and fn %s don't match" % (self.data_fn, fn)) + + result = bb.data.getVar(var, self.data, exp) + self.depends_cache[fn][var] = result + return result + + def setData(self, fn, data): + """ + Called to prime bb_cache ready to learn which variables to cache. + Will be followed by calls to self.getVar which aren't cached + but can be fulfilled from self.data. + """ + self.data_fn = fn + self.data = data + + # Make sure __depends makes the depends_cache + self.getVar("__depends", fn, True) + self.depends_cache[fn]["CACHETIMESTAMP"] = bb.parse.cached_mtime(fn) + + def loadDataFull(self, fn, cooker): + """ + Return a complete set of data for fn. + To do this, we need to parse the file. + """ + bb_data, skipped = self.load_bbfile(fn, cooker) + return bb_data + + def loadData(self, fn, cooker): + """ + Load a subset of data for fn. + If the cached data is valid we do nothing, + To do this, we need to parse the file and set the system + to record the variables accessed. + Return the cache status and whether the file was skipped when parsed + """ + if self.cacheValid(fn): + if "SKIPPED" in self.depends_cache[fn]: + return True, True + return True, False + + bb_data, skipped = self.load_bbfile(fn, cooker) + self.setData(fn, bb_data) + return False, skipped + + def cacheValid(self, fn): + """ + Is the cache valid for fn? + Fast version, no timestamps checked. + """ + # Is cache enabled? + if not self.has_cache: + return False + if fn in self.clean: + return True + return False + + def cacheValidUpdate(self, fn): + """ + Is the cache valid for fn? + Make thorough (slower) checks including timestamps. + """ + # Is cache enabled? + if not self.has_cache: + return False + + # Check file still exists + if self.mtime(fn) == 0: + bb.debug(2, "Cache: %s not longer exists" % fn) + self.remove(fn) + return False + + # File isn't in depends_cache + if not fn in self.depends_cache: + bb.debug(2, "Cache: %s is not cached" % fn) + self.remove(fn) + return False + + # Check the file's timestamp + if bb.parse.cached_mtime(fn) > self.getVar("CACHETIMESTAMP", fn, True): + bb.debug(2, "Cache: %s changed" % fn) + self.remove(fn) + return False + + # Check dependencies are still valid + depends = self.getVar("__depends", fn, True) + for f,old_mtime in depends: + new_mtime = bb.parse.cached_mtime(f) + if (new_mtime > old_mtime): + bb.debug(2, "Cache: %s's dependency %s changed" % (fn, f)) + self.remove(fn) + return False + + bb.debug(2, "Depends Cache: %s is clean" % fn) + if not fn in self.clean: + self.clean[fn] = "" + + return True + + def skip(self, fn): + """ + Mark a fn as skipped + Called from the parser + """ + if not fn in self.depends_cache: + self.depends_cache[fn] = {} + self.depends_cache[fn]["SKIPPED"] = "1" + + def remove(self, fn): + """ + Remove a fn from the cache + Called from the parser in error cases + """ + bb.debug(1, "Removing %s from cache" % fn) + if fn in self.depends_cache: + del self.depends_cache[fn] + if fn in self.clean: + del self.clean[fn] + + def sync(self): + """ + Save the cache + Called from the parser when complete (or exitting) + """ + + if not self.has_cache: + return + + version_data = {} + version_data['CACHE_VER'] = __cache_version__ + version_data['BITBAKE_VER'] = bb.__version__ + + p = pickle.Pickler(file(self.cachefile, "wb" ), -1 ) + p.dump([self.depends_cache, version_data]) + + def mtime(self, cachefile): + try: + return os.stat(cachefile)[8] + except OSError: + return 0 + + def load_bbfile( self, bbfile , cooker): + """ + Load and parse one .bb build file + Return the data and whether parsing resulted in the file being skipped + """ + + import bb + from bb import utils, data, parse, debug, event, fatal + + topdir = data.getVar('TOPDIR', cooker.configuration.data) + if not topdir: + topdir = os.path.abspath(os.getcwd()) + # set topdir to here + data.setVar('TOPDIR', topdir, cooker.configuration) + bbfile = os.path.abspath(bbfile) + bbfile_loc = os.path.abspath(os.path.dirname(bbfile)) + # expand tmpdir to include this topdir + data.setVar('TMPDIR', data.getVar('TMPDIR', cooker.configuration.data, 1) or "", cooker.configuration.data) + # set topdir to location of .bb file + topdir = bbfile_loc + #data.setVar('TOPDIR', topdir, cfg) + # go there + oldpath = os.path.abspath(os.getcwd()) + if self.mtime(topdir): + os.chdir(topdir) + bb_data = data.init_db(cooker.configuration.data) + try: + parse.handle(bbfile, bb_data) # read .bb data + os.chdir(oldpath) + return bb_data, False + except bb.parse.SkipPackage: + os.chdir(oldpath) + return bb_data, True + except: + os.chdir(oldpath) + raise + +def init(cooker): + """ + The Objective: Cache the minimum amount of data possible yet get to the + stage of building packages (i.e. tryBuild) without reparsing any .bb files. + + To do this, we intercept getVar calls and only cache the variables we see + being accessed. We rely on the cache getVar calls being made for all + variables bitbake might need to use to reach this stage. For each cached + file we need to track: + + * Its mtime + * The mtimes of all its dependencies + * Whether it caused a parse.SkipPackage exception + + Files causing parsing errors are evicted from the cache. + + """ + return Cache(cooker) + diff --git a/bitbake/lib/bb/data.py b/bitbake/lib/bb/data.py index 56ee977f66..55d1cc9053 100644 --- a/bitbake/lib/bb/data.py +++ b/bitbake/lib/bb/data.py @@ -7,6 +7,18 @@ BitBake 'Data' implementations Functions for interacting with the data structure used by the BitBake build tools. +The expandData and update_data are the most expensive +operations. At night the cookie monster came by and +suggested 'give me cookies on setting the variables and +things will work out'. Taking this suggestion into account +applying the skills from the not yet passed 'Entwurf und +Analyse von Algorithmen' lecture and the cookie +monster seems to be right. We will track setVar more carefully +to have faster update_data and expandKeys operations. + +This is a treade-off between speed and memory again but +the speed is more critical here. + Copyright (C) 2003, 2004 Chris Larson Copyright (C) 2005 Holger Hans Peter Freyther @@ -36,88 +48,15 @@ sys.path.insert(0,path) from bb import note, debug, data_smart _dict_type = data_smart.DataSmart -_dict_p_type = data_smart.DataSmartPackage - -class DataDictFull(dict): - """ - This implements our Package Data Storage Interface. - setDirty is a no op as all items are held in memory - """ - def setDirty(self, bbfile, data): - """ - No-Op we assume data was manipulated as some sort of - reference - """ - if not bbfile in self: - raise Exception("File %s was not in dictionary before" % bbfile) - - self[bbfile] = data - -class DataDictCache: - """ - Databacked Dictionary implementation - """ - def __init__(self, cache_dir, config): - self.cache_dir = cache_dir - self.files = [] - self.dirty = {} - self.config = config - - def has_key(self,key): - return key in self.files - - def keys(self): - return self.files - - def __setitem__(self, key, data): - """ - Add the key to the list of known files and - place the data in the cache? - """ - if key in self.files: - return - - self.files.append(key) - - def __getitem__(self, key): - if not key in self.files: - return None - - # if it was dirty we will - if key in self.dirty: - return self.dirty[key] - - # not cached yet - return _dict_p_type(self.cache_dir, key,False,self.config) - - def setDirty(self, bbfile, data): - """ - Only already added items can be declared dirty!!! - """ - - if not bbfile in self.files: - raise Exception("File %s was not in dictionary before" % bbfile) - - self.dirty[bbfile] = data - - def init(): return _dict_type() -def init_db(cache,name,clean,parent = None): - return _dict_p_type(cache,name,clean,parent) - -def init_db_mtime(cache,cache_bbfile): - return _dict_p_type.mtime(cache,cache_bbfile) - -def pkgdata(use_cache, cache, config = None): - """ - Return some sort of dictionary to lookup parsed dictionaires - """ - if use_cache: - return DataDictCache(cache, config) - return DataDictFull() +def init_db(parent = None): + if parent: + return parent.createCopy() + else: + return _dict_type() def createCopy(source): """Link the source set to the destination @@ -273,6 +212,27 @@ def setData(newData, d): """Sets the data object to the supplied value""" d = newData + +## +## Cookie Monsters' query functions +## +def _get_override_vars(d, override): + """ + Internal!!! + + Get the Names of Variables that have a specific + override. This function returns a iterable + Set or an empty list + """ + return [] + +def _get_var_flags_triple(d): + """ + Internal!!! + + """ + return [] + __expand_var_regexp__ = re.compile(r"\${[^{}]+}") __expand_python_regexp__ = re.compile(r"\${@.+?}") @@ -303,43 +263,7 @@ def expand(s, d, varname = None): >>> print expand('${SRC_URI}', d) http://somebug.${TARGET_MOO} """ - def var_sub(match): - key = match.group()[2:-1] - if varname and key: - if varname == key: - raise Exception("variable %s references itself!" % varname) - var = getVar(key, d, 1) - if var is not None: - return var - else: - return match.group() - - def python_sub(match): - import bb - code = match.group()[3:-1] - locals()['d'] = d - s = eval(code) - if type(s) == types.IntType: s = str(s) - return s - - if type(s) is not types.StringType: # sanity check - return s - - while s.find('$') != -1: - olds = s - try: - s = __expand_var_regexp__.sub(var_sub, s) - s = __expand_python_regexp__.sub(python_sub, s) - if s == olds: break - if type(s) is not types.StringType: # sanity check - import bb - bb.error('expansion of %s returned non-string %s' % (olds, s)) - except KeyboardInterrupt: - raise - except: - note("%s:%s while evaluating:\n%s" % (sys.exc_info()[0], sys.exc_info()[1], s)) - raise - return s + return d.expand(s, varname) def expandKeys(alterdata, readdata = None): if readdata == None: @@ -356,7 +280,7 @@ def expandKeys(alterdata, readdata = None): # setVarFlags(ekey, copy.copy(getVarFlags(key, readdata)), alterdata) setVar(ekey, val, alterdata) - for i in ('_append', '_prepend', '_delete'): + for i in ('_append', '_prepend'): dest = getVarFlag(ekey, i, alterdata) or [] src = getVarFlag(key, i, readdata) or [] dest.extend(src) @@ -507,67 +431,76 @@ def update_data(d): >>> print getVar('TEST', d) local """ - debug(2, "update_data()") -# can't do delete env[...] while iterating over the dictionary, so remember them - dodel = [] + # now ask the cookie monster for help + #print "Cookie Monster" + #print "Append/Prepend %s" % d._special_values + #print "Overrides %s" % d._seen_overrides + overrides = (getVar('OVERRIDES', d, 1) or "").split(':') or [] - def applyOverrides(var, d): - if not overrides: - debug(1, "OVERRIDES not defined, nothing to do") - return - val = getVar(var, d) - for o in overrides: - if var.endswith("_" + o): - l = len(o)+1 - name = var[:-l] - d[name] = d[var] + # + # Well let us see what breaks here. We used to iterate + # over each variable and apply the override and then + # do the line expanding. + # If we have bad luck - which we will have - the keys + # where in some order that is so important for this + # method which we don't have anymore. + # Anyway we will fix that and write test cases this + # time. + + # + # First we apply all overrides + # Then we will handle _append and _prepend + # + + for o in overrides: + # calculate '_'+override + l = len(o)+1 + + # see if one should even try + if not o in d._seen_overrides: + continue - for s in keys(d): - applyOverrides(s, d) - sval = getVar(s, d) or "" - -# Handle line appends: - for (a, o) in getVarFlag(s, '_append', d) or []: - # maybe the OVERRIDE was not yet added so keep the append - if (o and o in overrides) or not o: - delVarFlag(s, '_append', d) - if o: - if not o in overrides: + vars = d._seen_overrides[o] + for var in vars: + name = var[:-l] + try: + d[name] = d[var] + except: + note ("Untracked delVar") + + # now on to the appends and prepends + if '_append' in d._special_values: + appends = d._special_values['_append'] or [] + for append in appends: + for (a, o) in getVarFlag(append, '_append', d) or []: + # maybe the OVERRIDE was not yet added so keep the append + if (o and o in overrides) or not o: + delVarFlag(append, '_append', d) + if o and not o in overrides: continue - sval+=a - setVar(s, sval, d) - -# Handle line prepends - for (a, o) in getVarFlag(s, '_prepend', d) or []: - # maybe the OVERRIDE was not yet added so keep the append - if (o and o in overrides) or not o: - delVarFlag(s, '_prepend', d) - if o: - if not o in overrides: + + sval = getVar(append,d) or "" + sval+=a + setVar(append, sval, d) + + + if '_prepend' in d._special_values: + prepends = d._special_values['_prepend'] or [] + + for prepend in prepends: + for (a, o) in getVarFlag(prepend, '_prepend', d) or []: + # maybe the OVERRIDE was not yet added so keep the prepend + if (o and o in overrides) or not o: + delVarFlag(prepend, '_prepend', d) + if o and not o in overrides: continue - sval=a+sval - setVar(s, sval, d) - -# Handle line deletions - name = s + "_delete" - nameval = getVar(name, d) - if nameval: - sval = getVar(s, d) - if sval: - new = '' - pattern = nameval.replace('\n','').strip() - for line in sval.split('\n'): - if line.find(pattern) == -1: - new = new + '\n' + line - setVar(s, new, d) - dodel.append(name) - -# delete all environment vars no longer needed - for s in dodel: - delVar(s, d) + + sval = a + (getVar(prepend,d) or "") + setVar(prepend, sval, d) + def inherits_class(klass, d): val = getVar('__inherit_cache', d) or "" diff --git a/bitbake/lib/bb/data_smart.py b/bitbake/lib/bb/data_smart.py index 52f391dec1..fbd4167fe4 100644 --- a/bitbake/lib/bb/data_smart.py +++ b/bitbake/lib/bb/data_smart.py @@ -8,7 +8,7 @@ BitBake build tools. Copyright (C) 2003, 2004 Chris Larson Copyright (C) 2004, 2005 Seb Frankengul -Copyright (C) 2005 Holger Hans Peter Freyther +Copyright (C) 2005, 2006 Holger Hans Peter Freyther Copyright (C) 2005 Uli Luckas Copyright (C) 2005 ROAD GmbH @@ -29,7 +29,8 @@ Based on functions from the base bb module, Copyright 2003 Holger Schurig """ import copy, os, re, sys, time, types -from bb import note, debug, fatal, utils +from bb import note, debug, error, fatal, utils, methodpool +from sets import Set try: import cPickle as pickle @@ -37,9 +38,8 @@ except ImportError: import pickle print "NOTE: Importing cPickle failed. Falling back to a very slow implementation." - -__setvar_keyword__ = ["_append","_prepend","_delete"] -__setvar_regexp__ = re.compile('(?P<base>.*?)(?P<keyword>_append|_prepend|_delete)(_(?P<add>.*))?') +__setvar_keyword__ = ["_append","_prepend"] +__setvar_regexp__ = re.compile('(?P<base>.*?)(?P<keyword>_append|_prepend)(_(?P<add>.*))?') __expand_var_regexp__ = re.compile(r"\${[^{}]+}") __expand_python_regexp__ = re.compile(r"\${@.+?}") @@ -48,6 +48,10 @@ class DataSmart: def __init__(self): self.dict = {} + # cookie monster tribute + self._special_values = {} + self._seen_overrides = {} + def expand(self,s, varname): def var_sub(match): key = match.group()[2:-1] @@ -78,8 +82,7 @@ class DataSmart: s = __expand_python_regexp__.sub(python_sub, s) if s == olds: break if type(s) is not types.StringType: # sanity check - import bb - bb.error('expansion of %s returned non-string %s' % (olds, s)) + error('expansion of %s returned non-string %s' % (olds, s)) except KeyboardInterrupt: raise except: @@ -91,18 +94,6 @@ class DataSmart: if not var in self.dict: self.dict[var] = {} - def pickle_prep(self, cfg): - if "_data" in self.dict: - if self.dict["_data"] == cfg: - self.dict["_data"] = "cfg"; - else: # this is an unknown array for the moment - pass - - def unpickle_prep(self, cfg): - if "_data" in self.dict: - if self.dict["_data"] == "cfg": - self.dict["_data"] = cfg; - def _findVar(self,var): _dest = self.dict @@ -116,14 +107,6 @@ class DataSmart: return _dest[var] return None - def _copyVar(self,var,name): - local_var = self._findVar(var) - if local_var: - self.dict[name] = copy.copy(local_var) - else: - debug(1,"Warning, _copyVar %s to %s, %s does not exists" % (var,name,var)) - - def _makeShadowCopy(self, var): if var in self.dict: return @@ -142,11 +125,20 @@ class DataSmart: keyword = match.group("keyword") override = match.group('add') l = self.getVarFlag(base, keyword) or [] - if override == 'delete': - if l.count([value, None]): - del l[l.index([value, None])] l.append([value, override]) - self.setVarFlag(base, match.group("keyword"), l) + self.setVarFlag(base, keyword, l) + + # pay the cookie monster + try: + self._special_values[keyword].add( base ) + except: + self._special_values[keyword] = Set() + self._special_values[keyword].add( base ) + + # SRC_URI_append_simpad is both a flag and a override + #if not override in self._seen_overrides: + # self._seen_overrides[override] = Set() + #self._seen_overrides[override].add( base ) return if not var in self.dict: @@ -155,6 +147,13 @@ class DataSmart: self.delVarFlag(var, 'matchesenv') self.setVarFlag(var, 'export', 1) + # more cookies for the cookie monster + if '_' in var: + override = var[var.rfind('_')+1:] + if not override in self._seen_overrides: + self._seen_overrides[override] = Set() + self._seen_overrides[override].add( var ) + # setting var self.dict[var]["content"] = value @@ -237,6 +236,8 @@ class DataSmart: # we really want this to be a DataSmart... data = DataSmart() data.dict["_data"] = self.dict + data._seen_overrides = copy.deepcopy(self._seen_overrides) + data._special_values = copy.deepcopy(self._special_values) return data @@ -254,98 +255,11 @@ class DataSmart: return keytab.keys() def __getitem__(self,item): - start = self.dict - while start: - if item in start: - return start[item] - elif "_data" in start: - start = start["_data"] - else: - start = None - return None + #print "Warning deprecated" + return self.getVar(item, False) def __setitem__(self,var,data): - self._makeShadowCopy(var) - self.dict[var] = data - - -class DataSmartPackage(DataSmart): - """ - Persistent Data Storage - """ - def sanitize_filename(bbfile): - return bbfile.replace( '/', '_' ) - sanitize_filename = staticmethod(sanitize_filename) + #print "Warning deprecated" + self.setVar(var,data) - def unpickle(self): - """ - Restore the dict from memory - """ - cache_bbfile = self.sanitize_filename(self.bbfile) - p = pickle.Unpickler( file("%s/%s"%(self.cache,cache_bbfile),"rb")) - self.dict = p.load() - self.unpickle_prep() - funcstr = self.getVar('__functions__', 0) - if funcstr: - comp = utils.better_compile(funcstr, "<pickled>", self.bbfile) - utils.better_exec(comp, __builtins__, funcstr, self.bbfile) - - def linkDataSet(self): - if not self.parent == None: - # assume parent is a DataSmartInstance - self.dict["_data"] = self.parent.dict - - - def __init__(self,cache,name,clean,parent): - """ - Construct a persistent data instance - """ - #Initialize the dictionary - DataSmart.__init__(self) - - self.cache = cache - self.bbfile = os.path.abspath( name ) - self.parent = parent - - # Either unpickle the data or do copy on write - if clean: - self.linkDataSet() - else: - self.unpickle() - - def commit(self, mtime): - """ - |
