2 # ex:ts=4:sw=4:sts=4:et
3 # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
5 BitBake 'Event' implementation
7 Caching of bitbake variables before task execution
9 # Copyright (C) 2006 Richard Purdie
11 # but small sections based on code from bin/bitbake:
12 # Copyright (C) 2003, 2004 Chris Larson
13 # Copyright (C) 2003, 2004 Phil Blundell
14 # Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
15 # Copyright (C) 2005 Holger Hans Peter Freyther
16 # Copyright (C) 2005 ROAD GmbH
18 This program is free software; you can redistribute it and/or modify it under
19 the terms of the GNU General Public License as published by the Free Software
20 Foundation; either version 2 of the License, or (at your option) any later
23 This program is distributed in the hope that it will be useful, but WITHOUT
24 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
25 FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
27 You should have received a copy of the GNU General Public License along with
28 this program; if not, write to the Free Software Foundation, Inc., 59 Temple
29 Place, Suite 330, Boston, MA 02111-1307 USA.
39 import cPickle as pickle
42 bb.msg.note(1, bb.msg.domain.Cache, "Importing cPickle failed. Falling back to a very slow implementation.")
44 __cache_version__ = "125"
48 BitBake Cache implementation
50 def __init__(self, cooker):
53 self.cachedir = bb.data.getVar("CACHE", cooker.configuration.data, True)
55 self.depends_cache = {}
59 if self.cachedir in [None, '']:
60 self.has_cache = False
61 bb.msg.note(1, bb.msg.domain.Cache, "Not using a cache. Set CACHE = <directory> to enable.")
64 self.cachefile = os.path.join(self.cachedir,"bb_cache.dat")
66 bb.msg.debug(1, bb.msg.domain.Cache, "Using cache in '%s'" % self.cachedir)
68 os.stat( self.cachedir )
70 bb.mkdirhier( self.cachedir )
72 if self.has_cache and (self.mtime(self.cachefile)):
74 p = pickle.Unpickler( file(self.cachefile,"rb"))
75 self.depends_cache, version_data = p.load()
76 if version_data['CACHE_VER'] != __cache_version__:
77 raise ValueError, 'Cache Version Mismatch'
78 if version_data['BITBAKE_VER'] != bb.__version__:
79 raise ValueError, 'Bitbake Version Mismatch'
80 except (ValueError, KeyError):
81 bb.msg.note(1, bb.msg.domain.Cache, "Invalid cache found, rebuilding...")
82 self.depends_cache = {}
84 if self.depends_cache:
85 for fn in self.depends_cache.keys():
87 self.cacheValidUpdate(fn)
89 def getVar(self, var, fn, exp = 0):
91 Gets the value of a variable
92 (similar to getVar in the data class)
94 There are two scenarios:
95 1. We have cached data - serve from depends_cache[fn]
96 2. We're learning what data to cache - serve from data
97 backend but add a copy of the data to the cache.
101 return self.depends_cache[fn][var]
103 if not fn in self.depends_cache:
104 self.depends_cache[fn] = {}
106 if fn != self.data_fn:
107 # We're trying to access data in the cache which doesn't exist
108 # yet setData hasn't been called to setup the right access. Very bad.
109 bb.msg.error(bb.msg.domain.Cache, "Parsing error data_fn %s and fn %s don't match" % (self.data_fn, fn))
111 result = bb.data.getVar(var, self.data, exp)
112 self.depends_cache[fn][var] = result
115 def setData(self, fn, data):
117 Called to prime bb_cache ready to learn which variables to cache.
118 Will be followed by calls to self.getVar which aren't cached
119 but can be fulfilled from self.data.
124 # Make sure __depends makes the depends_cache
125 self.getVar("__depends", fn, True)
126 self.depends_cache[fn]["CACHETIMESTAMP"] = bb.parse.cached_mtime(fn)
128 def loadDataFull(self, fn, cooker):
130 Return a complete set of data for fn.
131 To do this, we need to parse the file.
133 bb_data, skipped = self.load_bbfile(fn, cooker.configuration.data)
136 def loadData(self, fn, cooker):
138 Load a subset of data for fn.
139 If the cached data is valid we do nothing,
140 To do this, we need to parse the file and set the system
141 to record the variables accessed.
142 Return the cache status and whether the file was skipped when parsed
144 if self.cacheValid(fn):
145 if "SKIPPED" in self.depends_cache[fn]:
149 bb_data, skipped = self.load_bbfile(fn, cooker.configuration.data)
150 self.setData(fn, bb_data)
151 return False, skipped
153 def cacheValid(self, fn):
155 Is the cache valid for fn?
156 Fast version, no timestamps checked.
159 if not self.has_cache:
165 def cacheValidUpdate(self, fn):
167 Is the cache valid for fn?
168 Make thorough (slower) checks including timestamps.
171 if not self.has_cache:
174 # Check file still exists
175 if self.mtime(fn) == 0:
176 bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s not longer exists" % fn)
180 # File isn't in depends_cache
181 if not fn in self.depends_cache:
182 bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s is not cached" % fn)
186 # Check the file's timestamp
187 if bb.parse.cached_mtime(fn) > self.getVar("CACHETIMESTAMP", fn, True):
188 bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s changed" % fn)
192 # Check dependencies are still valid
193 depends = self.getVar("__depends", fn, True)
194 for f,old_mtime in depends:
195 new_mtime = bb.parse.cached_mtime(f)
196 if (new_mtime > old_mtime):
197 bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s's dependency %s changed" % (fn, f))
201 bb.msg.debug(2, bb.msg.domain.Cache, "Depends Cache: %s is clean" % fn)
202 if not fn in self.clean:
210 Called from the parser
212 if not fn in self.depends_cache:
213 self.depends_cache[fn] = {}
214 self.depends_cache[fn]["SKIPPED"] = "1"
216 def remove(self, fn):
218 Remove a fn from the cache
219 Called from the parser in error cases
221 bb.msg.debug(1, bb.msg.domain.Cache, "Removing %s from cache" % fn)
222 if fn in self.depends_cache:
223 del self.depends_cache[fn]
230 Called from the parser when complete (or exiting)
233 if not self.has_cache:
237 version_data['CACHE_VER'] = __cache_version__
238 version_data['BITBAKE_VER'] = bb.__version__
240 p = pickle.Pickler(file(self.cachefile, "wb" ), -1 )
241 p.dump([self.depends_cache, version_data])
243 def mtime(self, cachefile):
245 return os.stat(cachefile)[8]
249 def handle_data(self, file_name, cacheData):
251 Save data we need into the cache
254 pn = self.getVar('PN', file_name, True)
255 pv = self.getVar('PV', file_name, True)
256 pr = self.getVar('PR', file_name, True)
257 dp = int(self.getVar('DEFAULT_PREFERENCE', file_name, True) or "0")
258 provides = Set([pn] + (self.getVar("PROVIDES", file_name, True) or "").split())
259 depends = (self.getVar("DEPENDS", file_name, True) or "").split()
260 packages = (self.getVar('PACKAGES', file_name, True) or "").split()
261 packages_dynamic = (self.getVar('PACKAGES_DYNAMIC', file_name, True) or "").split()
262 rprovides = (self.getVar("RPROVIDES", file_name, True) or "").split()
264 cacheData.task_queues[file_name] = self.getVar("_task_graph", file_name, True)
265 cacheData.task_deps[file_name] = self.getVar("_task_deps", file_name, True)
267 # build PackageName to FileName lookup table
268 if pn not in cacheData.pkg_pn:
269 cacheData.pkg_pn[pn] = []
270 cacheData.pkg_pn[pn].append(file_name)
272 cacheData.stamp[file_name] = self.getVar('STAMP', file_name, True)
274 # build FileName to PackageName lookup table
275 cacheData.pkg_fn[file_name] = pn
276 cacheData.pkg_pvpr[file_name] = (pv,pr)
277 cacheData.pkg_dp[file_name] = dp
279 # Build forward and reverse provider hashes
280 # Forward: virtual -> [filenames]
281 # Reverse: PN -> [virtuals]
282 if pn not in cacheData.pn_provides:
283 cacheData.pn_provides[pn] = Set()
284 cacheData.pn_provides[pn] |= provides
286 for provide in provides:
287 if provide not in cacheData.providers:
288 cacheData.providers[provide] = []
289 cacheData.providers[provide].append(file_name)
291 cacheData.deps[file_name] = Set()
293 cacheData.all_depends.add(dep)
294 cacheData.deps[file_name].add(dep)
296 # Build reverse hash for PACKAGES, so runtime dependencies
297 # can be be resolved (RDEPENDS, RRECOMMENDS etc.)
298 for package in packages:
299 if not package in cacheData.packages:
300 cacheData.packages[package] = []
301 cacheData.packages[package].append(file_name)
302 rprovides += (self.getVar("RPROVIDES_%s" % package, file_name, 1) or "").split()
304 for package in packages_dynamic:
305 if not package in cacheData.packages_dynamic:
306 cacheData.packages_dynamic[package] = []
307 cacheData.packages_dynamic[package].append(file_name)
309 for rprovide in rprovides:
310 if not rprovide in cacheData.rproviders:
311 cacheData.rproviders[rprovide] = []
312 cacheData.rproviders[rprovide].append(file_name)
314 # Build hash of runtime depends and rececommends
316 def add_dep(deplist, deps):
318 if not dep in deplist:
321 if not file_name in cacheData.rundeps:
322 cacheData.rundeps[file_name] = {}
323 if not file_name in cacheData.runrecs:
324 cacheData.runrecs[file_name] = {}
326 for package in packages + [pn]:
327 if not package in cacheData.rundeps[file_name]:
328 cacheData.rundeps[file_name][package] = {}
329 if not package in cacheData.runrecs[file_name]:
330 cacheData.runrecs[file_name][package] = {}
332 add_dep(cacheData.rundeps[file_name][package], bb.utils.explode_deps(self.getVar('RDEPENDS', file_name, True) or ""))
333 add_dep(cacheData.runrecs[file_name][package], bb.utils.explode_deps(self.getVar('RRECOMMENDS', file_name, True) or ""))
334 add_dep(cacheData.rundeps[file_name][package], bb.utils.explode_deps(self.getVar("RDEPENDS_%s" % package, file_name, True) or ""))
335 add_dep(cacheData.runrecs[file_name][package], bb.utils.explode_deps(self.getVar("RRECOMMENDS_%s" % package, file_name, True) or ""))
337 # Collect files we may need for possible world-dep
339 if not self.getVar('BROKEN', file_name, True) and not self.getVar('EXCLUDE_FROM_WORLD', file_name, True):
340 cacheData.possible_world.append(file_name)
343 def load_bbfile( self, bbfile , config):
345 Load and parse one .bb build file
346 Return the data and whether parsing resulted in the file being skipped
350 from bb import utils, data, parse, debug, event, fatal
352 # expand tmpdir to include this topdir
353 data.setVar('TMPDIR', data.getVar('TMPDIR', config, 1) or "", config)
354 bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
355 oldpath = os.path.abspath(os.getcwd())
356 if self.mtime(bbfile_loc):
358 bb_data = data.init_db(config)
360 bb_data = parse.handle(bbfile, bb_data) # read .bb data
362 return bb_data, False
363 except bb.parse.SkipPackage:
372 The Objective: Cache the minimum amount of data possible yet get to the
373 stage of building packages (i.e. tryBuild) without reparsing any .bb files.
375 To do this, we intercept getVar calls and only cache the variables we see
376 being accessed. We rely on the cache getVar calls being made for all
377 variables bitbake might need to use to reach this stage. For each cached
378 file we need to track:
381 * The mtimes of all its dependencies
382 * Whether it caused a parse.SkipPackage exception
384 Files causing parsing errors are evicted from the cache.
391 #============================================================================#
393 #============================================================================#
396 The data structures we compile from the cached data
401 Direct cache variables
402 (from Cache.handle_data)
407 self.packages_dynamic = {}
408 self.possible_world = []
413 self.pn_provides = {}
414 self.all_depends = Set()
418 self.task_queues = {}
424 Indirect Cache variables
427 self.ignored_dependencies = []
428 self.world_target = Set()
429 self.bbfile_priority = {}
430 self.bbfile_config_priorities = []