cache.py 31 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843
  1. # ex:ts=4:sw=4:sts=4:et
  2. # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
  3. #
  4. # BitBake Cache implementation
  5. #
  6. # Caching of bitbake variables before task execution
  7. # Copyright (C) 2006 Richard Purdie
  8. # Copyright (C) 2012 Intel Corporation
  9. # but small sections based on code from bin/bitbake:
  10. # Copyright (C) 2003, 2004 Chris Larson
  11. # Copyright (C) 2003, 2004 Phil Blundell
  12. # Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
  13. # Copyright (C) 2005 Holger Hans Peter Freyther
  14. # Copyright (C) 2005 ROAD GmbH
  15. #
  16. # This program is free software; you can redistribute it and/or modify
  17. # it under the terms of the GNU General Public License version 2 as
  18. # published by the Free Software Foundation.
  19. #
  20. # This program is distributed in the hope that it will be useful,
  21. # but WITHOUT ANY WARRANTY; without even the implied warranty of
  22. # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  23. # GNU General Public License for more details.
  24. #
  25. # You should have received a copy of the GNU General Public License along
  26. # with this program; if not, write to the Free Software Foundation, Inc.,
  27. # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
  28. import os
  29. import sys
  30. import logging
  31. import pickle
  32. from collections import defaultdict
  33. import bb.utils
  34. logger = logging.getLogger("BitBake.Cache")
  35. __cache_version__ = "150"
  36. def getCacheFile(path, filename, data_hash):
  37. return os.path.join(path, filename + "." + data_hash)
  38. # RecipeInfoCommon defines common data retrieving methods
  39. # from meta data for caches. CoreRecipeInfo as well as other
  40. # Extra RecipeInfo needs to inherit this class
  41. class RecipeInfoCommon(object):
  42. @classmethod
  43. def listvar(cls, var, metadata):
  44. return cls.getvar(var, metadata).split()
  45. @classmethod
  46. def intvar(cls, var, metadata):
  47. return int(cls.getvar(var, metadata) or 0)
  48. @classmethod
  49. def depvar(cls, var, metadata):
  50. return bb.utils.explode_deps(cls.getvar(var, metadata))
  51. @classmethod
  52. def pkgvar(cls, var, packages, metadata):
  53. return dict((pkg, cls.depvar("%s_%s" % (var, pkg), metadata))
  54. for pkg in packages)
  55. @classmethod
  56. def taskvar(cls, var, tasks, metadata):
  57. return dict((task, cls.getvar("%s_task-%s" % (var, task), metadata))
  58. for task in tasks)
  59. @classmethod
  60. def flaglist(cls, flag, varlist, metadata, squash=False):
  61. out_dict = dict((var, metadata.getVarFlag(var, flag, True))
  62. for var in varlist)
  63. if squash:
  64. return dict((k,v) for (k,v) in out_dict.items() if v)
  65. else:
  66. return out_dict
  67. @classmethod
  68. def getvar(cls, var, metadata, expand = True):
  69. return metadata.getVar(var, expand) or ''
  70. class CoreRecipeInfo(RecipeInfoCommon):
  71. __slots__ = ()
  72. cachefile = "bb_cache.dat"
  73. def __init__(self, filename, metadata):
  74. self.file_depends = metadata.getVar('__depends', False)
  75. self.timestamp = bb.parse.cached_mtime(filename)
  76. self.variants = self.listvar('__VARIANTS', metadata) + ['']
  77. self.appends = self.listvar('__BBAPPEND', metadata)
  78. self.nocache = self.getvar('BB_DONT_CACHE', metadata)
  79. self.skipreason = self.getvar('__SKIPPED', metadata)
  80. if self.skipreason:
  81. self.pn = self.getvar('PN', metadata) or bb.parse.BBHandler.vars_from_file(filename,metadata)[0]
  82. self.skipped = True
  83. self.provides = self.depvar('PROVIDES', metadata)
  84. self.rprovides = self.depvar('RPROVIDES', metadata)
  85. return
  86. self.tasks = metadata.getVar('__BBTASKS', False)
  87. self.pn = self.getvar('PN', metadata)
  88. self.packages = self.listvar('PACKAGES', metadata)
  89. if not self.pn in self.packages:
  90. self.packages.append(self.pn)
  91. self.basetaskhashes = self.taskvar('BB_BASEHASH', self.tasks, metadata)
  92. self.hashfilename = self.getvar('BB_HASHFILENAME', metadata)
  93. self.task_deps = metadata.getVar('_task_deps', False) or {'tasks': [], 'parents': {}}
  94. self.skipped = False
  95. self.pe = self.getvar('PE', metadata)
  96. self.pv = self.getvar('PV', metadata)
  97. self.pr = self.getvar('PR', metadata)
  98. self.defaultpref = self.intvar('DEFAULT_PREFERENCE', metadata)
  99. self.not_world = self.getvar('EXCLUDE_FROM_WORLD', metadata)
  100. self.stamp = self.getvar('STAMP', metadata)
  101. self.stampclean = self.getvar('STAMPCLEAN', metadata)
  102. self.stamp_extrainfo = self.flaglist('stamp-extra-info', self.tasks, metadata)
  103. self.file_checksums = self.flaglist('file-checksums', self.tasks, metadata, True)
  104. self.packages_dynamic = self.listvar('PACKAGES_DYNAMIC', metadata)
  105. self.depends = self.depvar('DEPENDS', metadata)
  106. self.provides = self.depvar('PROVIDES', metadata)
  107. self.rdepends = self.depvar('RDEPENDS', metadata)
  108. self.rprovides = self.depvar('RPROVIDES', metadata)
  109. self.rrecommends = self.depvar('RRECOMMENDS', metadata)
  110. self.rprovides_pkg = self.pkgvar('RPROVIDES', self.packages, metadata)
  111. self.rdepends_pkg = self.pkgvar('RDEPENDS', self.packages, metadata)
  112. self.rrecommends_pkg = self.pkgvar('RRECOMMENDS', self.packages, metadata)
  113. self.inherits = self.getvar('__inherit_cache', metadata, expand=False)
  114. self.fakerootenv = self.getvar('FAKEROOTENV', metadata)
  115. self.fakerootdirs = self.getvar('FAKEROOTDIRS', metadata)
  116. self.fakerootnoenv = self.getvar('FAKEROOTNOENV', metadata)
  117. self.extradepsfunc = self.getvar('calculate_extra_depends', metadata)
  118. @classmethod
  119. def init_cacheData(cls, cachedata):
  120. # CacheData in Core RecipeInfo Class
  121. cachedata.task_deps = {}
  122. cachedata.pkg_fn = {}
  123. cachedata.pkg_pn = defaultdict(list)
  124. cachedata.pkg_pepvpr = {}
  125. cachedata.pkg_dp = {}
  126. cachedata.stamp = {}
  127. cachedata.stampclean = {}
  128. cachedata.stamp_extrainfo = {}
  129. cachedata.file_checksums = {}
  130. cachedata.fn_provides = {}
  131. cachedata.pn_provides = defaultdict(list)
  132. cachedata.all_depends = []
  133. cachedata.deps = defaultdict(list)
  134. cachedata.packages = defaultdict(list)
  135. cachedata.providers = defaultdict(list)
  136. cachedata.rproviders = defaultdict(list)
  137. cachedata.packages_dynamic = defaultdict(list)
  138. cachedata.rundeps = defaultdict(lambda: defaultdict(list))
  139. cachedata.runrecs = defaultdict(lambda: defaultdict(list))
  140. cachedata.possible_world = []
  141. cachedata.universe_target = []
  142. cachedata.hashfn = {}
  143. cachedata.basetaskhash = {}
  144. cachedata.inherits = {}
  145. cachedata.fakerootenv = {}
  146. cachedata.fakerootnoenv = {}
  147. cachedata.fakerootdirs = {}
  148. cachedata.extradepsfunc = {}
  149. def add_cacheData(self, cachedata, fn):
  150. cachedata.task_deps[fn] = self.task_deps
  151. cachedata.pkg_fn[fn] = self.pn
  152. cachedata.pkg_pn[self.pn].append(fn)
  153. cachedata.pkg_pepvpr[fn] = (self.pe, self.pv, self.pr)
  154. cachedata.pkg_dp[fn] = self.defaultpref
  155. cachedata.stamp[fn] = self.stamp
  156. cachedata.stampclean[fn] = self.stampclean
  157. cachedata.stamp_extrainfo[fn] = self.stamp_extrainfo
  158. cachedata.file_checksums[fn] = self.file_checksums
  159. provides = [self.pn]
  160. for provide in self.provides:
  161. if provide not in provides:
  162. provides.append(provide)
  163. cachedata.fn_provides[fn] = provides
  164. for provide in provides:
  165. cachedata.providers[provide].append(fn)
  166. if provide not in cachedata.pn_provides[self.pn]:
  167. cachedata.pn_provides[self.pn].append(provide)
  168. for dep in self.depends:
  169. if dep not in cachedata.deps[fn]:
  170. cachedata.deps[fn].append(dep)
  171. if dep not in cachedata.all_depends:
  172. cachedata.all_depends.append(dep)
  173. rprovides = self.rprovides
  174. for package in self.packages:
  175. cachedata.packages[package].append(fn)
  176. rprovides += self.rprovides_pkg[package]
  177. for rprovide in rprovides:
  178. if fn not in cachedata.rproviders[rprovide]:
  179. cachedata.rproviders[rprovide].append(fn)
  180. for package in self.packages_dynamic:
  181. cachedata.packages_dynamic[package].append(fn)
  182. # Build hash of runtime depends and recommends
  183. for package in self.packages + [self.pn]:
  184. cachedata.rundeps[fn][package] = list(self.rdepends) + self.rdepends_pkg[package]
  185. cachedata.runrecs[fn][package] = list(self.rrecommends) + self.rrecommends_pkg[package]
  186. # Collect files we may need for possible world-dep
  187. # calculations
  188. if self.not_world:
  189. logger.debug(1, "EXCLUDE FROM WORLD: %s", fn)
  190. else:
  191. cachedata.possible_world.append(fn)
  192. # create a collection of all targets for sanity checking
  193. # tasks, such as upstream versions, license, and tools for
  194. # task and image creation.
  195. cachedata.universe_target.append(self.pn)
  196. cachedata.hashfn[fn] = self.hashfilename
  197. for task, taskhash in self.basetaskhashes.items():
  198. identifier = '%s.%s' % (fn, task)
  199. cachedata.basetaskhash[identifier] = taskhash
  200. cachedata.inherits[fn] = self.inherits
  201. cachedata.fakerootenv[fn] = self.fakerootenv
  202. cachedata.fakerootnoenv[fn] = self.fakerootnoenv
  203. cachedata.fakerootdirs[fn] = self.fakerootdirs
  204. cachedata.extradepsfunc[fn] = self.extradepsfunc
  205. class Cache(object):
  206. """
  207. BitBake Cache implementation
  208. """
  209. def __init__(self, data, data_hash, caches_array):
  210. # Pass caches_array information into Cache Constructor
  211. # It will be used later for deciding whether we
  212. # need extra cache file dump/load support
  213. self.caches_array = caches_array
  214. self.cachedir = data.getVar("CACHE", True)
  215. self.clean = set()
  216. self.checked = set()
  217. self.depends_cache = {}
  218. self.data = None
  219. self.data_fn = None
  220. self.cacheclean = True
  221. self.data_hash = data_hash
  222. if self.cachedir in [None, '']:
  223. self.has_cache = False
  224. logger.info("Not using a cache. "
  225. "Set CACHE = <directory> to enable.")
  226. return
  227. self.has_cache = True
  228. self.cachefile = getCacheFile(self.cachedir, "bb_cache.dat", self.data_hash)
  229. logger.debug(1, "Using cache in '%s'", self.cachedir)
  230. bb.utils.mkdirhier(self.cachedir)
  231. cache_ok = True
  232. if self.caches_array:
  233. for cache_class in self.caches_array:
  234. if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
  235. cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
  236. cache_ok = cache_ok and os.path.exists(cachefile)
  237. cache_class.init_cacheData(self)
  238. if cache_ok:
  239. self.load_cachefile()
  240. elif os.path.isfile(self.cachefile):
  241. logger.info("Out of date cache found, rebuilding...")
  242. def load_cachefile(self):
  243. # Firstly, using core cache file information for
  244. # valid checking
  245. with open(self.cachefile, "rb") as cachefile:
  246. pickled = pickle.Unpickler(cachefile)
  247. try:
  248. cache_ver = pickled.load()
  249. bitbake_ver = pickled.load()
  250. except Exception:
  251. logger.info('Invalid cache, rebuilding...')
  252. return
  253. if cache_ver != __cache_version__:
  254. logger.info('Cache version mismatch, rebuilding...')
  255. return
  256. elif bitbake_ver != bb.__version__:
  257. logger.info('Bitbake version mismatch, rebuilding...')
  258. return
  259. cachesize = 0
  260. previous_progress = 0
  261. previous_percent = 0
  262. # Calculate the correct cachesize of all those cache files
  263. for cache_class in self.caches_array:
  264. if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
  265. cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
  266. with open(cachefile, "rb") as cachefile:
  267. cachesize += os.fstat(cachefile.fileno()).st_size
  268. bb.event.fire(bb.event.CacheLoadStarted(cachesize), self.data)
  269. for cache_class in self.caches_array:
  270. if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
  271. cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
  272. with open(cachefile, "rb") as cachefile:
  273. pickled = pickle.Unpickler(cachefile)
  274. while cachefile:
  275. try:
  276. key = pickled.load()
  277. value = pickled.load()
  278. except Exception:
  279. break
  280. if key in self.depends_cache:
  281. self.depends_cache[key].append(value)
  282. else:
  283. self.depends_cache[key] = [value]
  284. # only fire events on even percentage boundaries
  285. current_progress = cachefile.tell() + previous_progress
  286. current_percent = 100 * current_progress / cachesize
  287. if current_percent > previous_percent:
  288. previous_percent = current_percent
  289. bb.event.fire(bb.event.CacheLoadProgress(current_progress, cachesize),
  290. self.data)
  291. previous_progress += current_progress
  292. # Note: depends cache number is corresponding to the parsing file numbers.
  293. # The same file has several caches, still regarded as one item in the cache
  294. bb.event.fire(bb.event.CacheLoadCompleted(cachesize,
  295. len(self.depends_cache)),
  296. self.data)
  297. @staticmethod
  298. def virtualfn2realfn(virtualfn):
  299. """
  300. Convert a virtual file name to a real one + the associated subclass keyword
  301. """
  302. fn = virtualfn
  303. cls = ""
  304. if virtualfn.startswith('virtual:'):
  305. elems = virtualfn.split(':')
  306. cls = ":".join(elems[1:-1])
  307. fn = elems[-1]
  308. return (fn, cls)
  309. @staticmethod
  310. def realfn2virtual(realfn, cls):
  311. """
  312. Convert a real filename + the associated subclass keyword to a virtual filename
  313. """
  314. if cls == "":
  315. return realfn
  316. return "virtual:" + cls + ":" + realfn
  317. @classmethod
  318. def loadDataFull(cls, virtualfn, appends, cfgData):
  319. """
  320. Return a complete set of data for fn.
  321. To do this, we need to parse the file.
  322. """
  323. (fn, virtual) = cls.virtualfn2realfn(virtualfn)
  324. logger.debug(1, "Parsing %s (full)", fn)
  325. cfgData.setVar("__ONLYFINALISE", virtual or "default")
  326. bb_data = cls.load_bbfile(fn, appends, cfgData)
  327. return bb_data[virtual]
  328. @classmethod
  329. def parse(cls, filename, appends, configdata, caches_array):
  330. """Parse the specified filename, returning the recipe information"""
  331. infos = []
  332. datastores = cls.load_bbfile(filename, appends, configdata)
  333. depends = []
  334. for variant, data in sorted(datastores.items(),
  335. key=lambda i: i[0],
  336. reverse=True):
  337. virtualfn = cls.realfn2virtual(filename, variant)
  338. depends = depends + (data.getVar("__depends", False) or [])
  339. if depends and not variant:
  340. data.setVar("__depends", depends)
  341. info_array = []
  342. for cache_class in caches_array:
  343. if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
  344. info = cache_class(filename, data)
  345. info_array.append(info)
  346. infos.append((virtualfn, info_array))
  347. return infos
  348. def load(self, filename, appends, configdata):
  349. """Obtain the recipe information for the specified filename,
  350. using cached values if available, otherwise parsing.
  351. Note that if it does parse to obtain the info, it will not
  352. automatically add the information to the cache or to your
  353. CacheData. Use the add or add_info method to do so after
  354. running this, or use loadData instead."""
  355. cached = self.cacheValid(filename, appends)
  356. if cached:
  357. infos = []
  358. # info_array item is a list of [CoreRecipeInfo, XXXRecipeInfo]
  359. info_array = self.depends_cache[filename]
  360. for variant in info_array[0].variants:
  361. virtualfn = self.realfn2virtual(filename, variant)
  362. infos.append((virtualfn, self.depends_cache[virtualfn]))
  363. else:
  364. logger.debug(1, "Parsing %s", filename)
  365. return self.parse(filename, appends, configdata, self.caches_array)
  366. return cached, infos
  367. def loadData(self, fn, appends, cfgData, cacheData):
  368. """Load the recipe info for the specified filename,
  369. parsing and adding to the cache if necessary, and adding
  370. the recipe information to the supplied CacheData instance."""
  371. skipped, virtuals = 0, 0
  372. cached, infos = self.load(fn, appends, cfgData)
  373. for virtualfn, info_array in infos:
  374. if info_array[0].skipped:
  375. logger.debug(1, "Skipping %s: %s", virtualfn, info_array[0].skipreason)
  376. skipped += 1
  377. else:
  378. self.add_info(virtualfn, info_array, cacheData, not cached)
  379. virtuals += 1
  380. return cached, skipped, virtuals
  381. def cacheValid(self, fn, appends):
  382. """
  383. Is the cache valid for fn?
  384. Fast version, no timestamps checked.
  385. """
  386. if fn not in self.checked:
  387. self.cacheValidUpdate(fn, appends)
  388. # Is cache enabled?
  389. if not self.has_cache:
  390. return False
  391. if fn in self.clean:
  392. return True
  393. return False
  394. def cacheValidUpdate(self, fn, appends):
  395. """
  396. Is the cache valid for fn?
  397. Make thorough (slower) checks including timestamps.
  398. """
  399. # Is cache enabled?
  400. if not self.has_cache:
  401. return False
  402. self.checked.add(fn)
  403. # File isn't in depends_cache
  404. if not fn in self.depends_cache:
  405. logger.debug(2, "Cache: %s is not cached", fn)
  406. return False
  407. mtime = bb.parse.cached_mtime_noerror(fn)
  408. # Check file still exists
  409. if mtime == 0:
  410. logger.debug(2, "Cache: %s no longer exists", fn)
  411. self.remove(fn)
  412. return False
  413. info_array = self.depends_cache[fn]
  414. # Check the file's timestamp
  415. if mtime != info_array[0].timestamp:
  416. logger.debug(2, "Cache: %s changed", fn)
  417. self.remove(fn)
  418. return False
  419. # Check dependencies are still valid
  420. depends = info_array[0].file_depends
  421. if depends:
  422. for f, old_mtime in depends:
  423. fmtime = bb.parse.cached_mtime_noerror(f)
  424. # Check if file still exists
  425. if old_mtime != 0 and fmtime == 0:
  426. logger.debug(2, "Cache: %s's dependency %s was removed",
  427. fn, f)
  428. self.remove(fn)
  429. return False
  430. if (fmtime != old_mtime):
  431. logger.debug(2, "Cache: %s's dependency %s changed",
  432. fn, f)
  433. self.remove(fn)
  434. return False
  435. if hasattr(info_array[0], 'file_checksums'):
  436. for _, fl in info_array[0].file_checksums.items():
  437. fl = fl.strip()
  438. while fl:
  439. # A .split() would be simpler but means spaces or colons in filenames would break
  440. a = fl.find(":True")
  441. b = fl.find(":False")
  442. if ((a < 0) and b) or ((b > 0) and (b < a)):
  443. f = fl[:b+6]
  444. fl = fl[b+7:]
  445. elif ((b < 0) and a) or ((a > 0) and (a < b)):
  446. f = fl[:a+5]
  447. fl = fl[a+6:]
  448. else:
  449. break
  450. fl = fl.strip()
  451. if "*" in f:
  452. continue
  453. f, exist = f.split(":")
  454. if (exist == "True" and not os.path.exists(f)) or (exist == "False" and os.path.exists(f)):
  455. logger.debug(2, "Cache: %s's file checksum list file %s changed",
  456. fn, f)
  457. self.remove(fn)
  458. return False
  459. if appends != info_array[0].appends:
  460. logger.debug(2, "Cache: appends for %s changed", fn)
  461. logger.debug(2, "%s to %s" % (str(appends), str(info_array[0].appends)))
  462. self.remove(fn)
  463. return False
  464. invalid = False
  465. for cls in info_array[0].variants:
  466. virtualfn = self.realfn2virtual(fn, cls)
  467. self.clean.add(virtualfn)
  468. if virtualfn not in self.depends_cache:
  469. logger.debug(2, "Cache: %s is not cached", virtualfn)
  470. invalid = True
  471. # If any one of the variants is not present, mark as invalid for all
  472. if invalid:
  473. for cls in info_array[0].variants:
  474. virtualfn = self.realfn2virtual(fn, cls)
  475. if virtualfn in self.clean:
  476. logger.debug(2, "Cache: Removing %s from cache", virtualfn)
  477. self.clean.remove(virtualfn)
  478. if fn in self.clean:
  479. logger.debug(2, "Cache: Marking %s as not clean", fn)
  480. self.clean.remove(fn)
  481. return False
  482. self.clean.add(fn)
  483. return True
  484. def remove(self, fn):
  485. """
  486. Remove a fn from the cache
  487. Called from the parser in error cases
  488. """
  489. if fn in self.depends_cache:
  490. logger.debug(1, "Removing %s from cache", fn)
  491. del self.depends_cache[fn]
  492. if fn in self.clean:
  493. logger.debug(1, "Marking %s as unclean", fn)
  494. self.clean.remove(fn)
  495. def sync(self):
  496. """
  497. Save the cache
  498. Called from the parser when complete (or exiting)
  499. """
  500. if not self.has_cache:
  501. return
  502. if self.cacheclean:
  503. logger.debug(2, "Cache is clean, not saving.")
  504. return
  505. file_dict = {}
  506. pickler_dict = {}
  507. for cache_class in self.caches_array:
  508. if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
  509. cache_class_name = cache_class.__name__
  510. cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
  511. file_dict[cache_class_name] = open(cachefile, "wb")
  512. pickler_dict[cache_class_name] = pickle.Pickler(file_dict[cache_class_name], pickle.HIGHEST_PROTOCOL)
  513. pickler_dict['CoreRecipeInfo'].dump(__cache_version__)
  514. pickler_dict['CoreRecipeInfo'].dump(bb.__version__)
  515. try:
  516. for key, info_array in self.depends_cache.items():
  517. for info in info_array:
  518. if isinstance(info, RecipeInfoCommon):
  519. cache_class_name = info.__class__.__name__
  520. pickler_dict[cache_class_name].dump(key)
  521. pickler_dict[cache_class_name].dump(info)
  522. finally:
  523. for cache_class in self.caches_array:
  524. if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
  525. cache_class_name = cache_class.__name__
  526. file_dict[cache_class_name].close()
  527. del self.depends_cache
  528. @staticmethod
  529. def mtime(cachefile):
  530. return bb.parse.cached_mtime_noerror(cachefile)
  531. def add_info(self, filename, info_array, cacheData, parsed=None, watcher=None):
  532. if isinstance(info_array[0], CoreRecipeInfo) and (not info_array[0].skipped):
  533. cacheData.add_from_recipeinfo(filename, info_array)
  534. if watcher:
  535. watcher(info_array[0].file_depends)
  536. if not self.has_cache:
  537. return
  538. if (info_array[0].skipped or 'SRCREVINACTION' not in info_array[0].pv) and not info_array[0].nocache:
  539. if parsed:
  540. self.cacheclean = False
  541. self.depends_cache[filename] = info_array
  542. def add(self, file_name, data, cacheData, parsed=None):
  543. """
  544. Save data we need into the cache
  545. """
  546. realfn = self.virtualfn2realfn(file_name)[0]
  547. info_array = []
  548. for cache_class in self.caches_array:
  549. if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
  550. info_array.append(cache_class(realfn, data))
  551. self.add_info(file_name, info_array, cacheData, parsed)
  552. @staticmethod
  553. def load_bbfile(bbfile, appends, config):
  554. """
  555. Load and parse one .bb build file
  556. Return the data and whether parsing resulted in the file being skipped
  557. """
  558. chdir_back = False
  559. from bb import parse
  560. # expand tmpdir to include this topdir
  561. config.setVar('TMPDIR', config.getVar('TMPDIR', True) or "")
  562. bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
  563. oldpath = os.path.abspath(os.getcwd())
  564. parse.cached_mtime_noerror(bbfile_loc)
  565. bb_data = config.createCopy()
  566. # The ConfHandler first looks if there is a TOPDIR and if not
  567. # then it would call getcwd().
  568. # Previously, we chdir()ed to bbfile_loc, called the handler
  569. # and finally chdir()ed back, a couple of thousand times. We now
  570. # just fill in TOPDIR to point to bbfile_loc if there is no TOPDIR yet.
  571. if not bb_data.getVar('TOPDIR', False):
  572. chdir_back = True
  573. bb_data.setVar('TOPDIR', bbfile_loc)
  574. try:
  575. if appends:
  576. bb_data.setVar('__BBAPPEND', " ".join(appends))
  577. bb_data = parse.handle(bbfile, bb_data)
  578. if chdir_back:
  579. os.chdir(oldpath)
  580. return bb_data
  581. except:
  582. if chdir_back:
  583. os.chdir(oldpath)
  584. raise
  585. def init(cooker):
  586. """
  587. The Objective: Cache the minimum amount of data possible yet get to the
  588. stage of building packages (i.e. tryBuild) without reparsing any .bb files.
  589. To do this, we intercept getVar calls and only cache the variables we see
  590. being accessed. We rely on the cache getVar calls being made for all
  591. variables bitbake might need to use to reach this stage. For each cached
  592. file we need to track:
  593. * Its mtime
  594. * The mtimes of all its dependencies
  595. * Whether it caused a parse.SkipRecipe exception
  596. Files causing parsing errors are evicted from the cache.
  597. """
  598. return Cache(cooker.configuration.data, cooker.configuration.data_hash)
  599. class CacheData(object):
  600. """
  601. The data structures we compile from the cached data
  602. """
  603. def __init__(self, caches_array):
  604. self.caches_array = caches_array
  605. for cache_class in self.caches_array:
  606. if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
  607. cache_class.init_cacheData(self)
  608. # Direct cache variables
  609. self.task_queues = {}
  610. self.preferred = {}
  611. self.tasks = {}
  612. # Indirect Cache variables (set elsewhere)
  613. self.ignored_dependencies = []
  614. self.world_target = set()
  615. self.bbfile_priority = {}
  616. def add_from_recipeinfo(self, fn, info_array):
  617. for info in info_array:
  618. info.add_cacheData(self, fn)
  619. class MultiProcessCache(object):
  620. """
  621. BitBake multi-process cache implementation
  622. Used by the codeparser & file checksum caches
  623. """
  624. def __init__(self):
  625. self.cachefile = None
  626. self.cachedata = self.create_cachedata()
  627. self.cachedata_extras = self.create_cachedata()
  628. def init_cache(self, d, cache_file_name=None):
  629. cachedir = (d.getVar("PERSISTENT_DIR", True) or
  630. d.getVar("CACHE", True))
  631. if cachedir in [None, '']:
  632. return
  633. bb.utils.mkdirhier(cachedir)
  634. self.cachefile = os.path.join(cachedir,
  635. cache_file_name or self.__class__.cache_file_name)
  636. logger.debug(1, "Using cache in '%s'", self.cachefile)
  637. glf = bb.utils.lockfile(self.cachefile + ".lock")
  638. try:
  639. with open(self.cachefile, "rb") as f:
  640. p = pickle.Unpickler(f)
  641. data, version = p.load()
  642. except:
  643. bb.utils.unlockfile(glf)
  644. return
  645. bb.utils.unlockfile(glf)
  646. if version != self.__class__.CACHE_VERSION:
  647. return
  648. self.cachedata = data
  649. def create_cachedata(self):
  650. data = [{}]
  651. return data
  652. def save_extras(self):
  653. if not self.cachefile:
  654. return
  655. glf = bb.utils.lockfile(self.cachefile + ".lock", shared=True)
  656. i = os.getpid()
  657. lf = None
  658. while not lf:
  659. lf = bb.utils.lockfile(self.cachefile + ".lock." + str(i), retry=False)
  660. if not lf or os.path.exists(self.cachefile + "-" + str(i)):
  661. if lf:
  662. bb.utils.unlockfile(lf)
  663. lf = None
  664. i = i + 1
  665. continue
  666. with open(self.cachefile + "-" + str(i), "wb") as f:
  667. p = pickle.Pickler(f, -1)
  668. p.dump([self.cachedata_extras, self.__class__.CACHE_VERSION])
  669. bb.utils.unlockfile(lf)
  670. bb.utils.unlockfile(glf)
  671. def merge_data(self, source, dest):
  672. for j in range(0,len(dest)):
  673. for h in source[j]:
  674. if h not in dest[j]:
  675. dest[j][h] = source[j][h]
  676. def save_merge(self):
  677. if not self.cachefile:
  678. return
  679. glf = bb.utils.lockfile(self.cachefile + ".lock")
  680. data = self.cachedata
  681. for f in [y for y in os.listdir(os.path.dirname(self.cachefile)) if y.startswith(os.path.basename(self.cachefile) + '-')]:
  682. f = os.path.join(os.path.dirname(self.cachefile), f)
  683. try:
  684. with open(f, "rb") as fd:
  685. p = pickle.Unpickler(fd)
  686. extradata, version = p.load()
  687. except (IOError, EOFError):
  688. os.unlink(f)
  689. continue
  690. if version != self.__class__.CACHE_VERSION:
  691. os.unlink(f)
  692. continue
  693. self.merge_data(extradata, data)
  694. os.unlink(f)
  695. with open(self.cachefile, "wb") as f:
  696. p = pickle.Pickler(f, -1)
  697. p.dump([data, self.__class__.CACHE_VERSION])
  698. bb.utils.unlockfile(glf)