cooker.py 71 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876
  1. #!/usr/bin/env python
  2. # ex:ts=4:sw=4:sts=4:et
  3. # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
  4. #
  5. # Copyright (C) 2003, 2004 Chris Larson
  6. # Copyright (C) 2003, 2004 Phil Blundell
  7. # Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
  8. # Copyright (C) 2005 Holger Hans Peter Freyther
  9. # Copyright (C) 2005 ROAD GmbH
  10. # Copyright (C) 2006 - 2007 Richard Purdie
  11. #
  12. # This program is free software; you can redistribute it and/or modify
  13. # it under the terms of the GNU General Public License version 2 as
  14. # published by the Free Software Foundation.
  15. #
  16. # This program is distributed in the hope that it will be useful,
  17. # but WITHOUT ANY WARRANTY; without even the implied warranty of
  18. # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  19. # GNU General Public License for more details.
  20. #
  21. # You should have received a copy of the GNU General Public License along
  22. # with this program; if not, write to the Free Software Foundation, Inc.,
  23. # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
  24. from __future__ import print_function
  25. import sys, os, glob, os.path, re, time
  26. import atexit
  27. import itertools
  28. import logging
  29. import multiprocessing
  30. import sre_constants
  31. import threading
  32. from cStringIO import StringIO
  33. from contextlib import closing
  34. from functools import wraps
  35. from collections import defaultdict
  36. import bb, bb.exceptions, bb.command
  37. from bb import utils, data, parse, event, cache, providers, taskdata, runqueue
  38. import Queue
  39. import signal
  40. import prserv.serv
  41. logger = logging.getLogger("BitBake")
  42. collectlog = logging.getLogger("BitBake.Collection")
  43. buildlog = logging.getLogger("BitBake.Build")
  44. parselog = logging.getLogger("BitBake.Parsing")
  45. providerlog = logging.getLogger("BitBake.Provider")
  46. class NoSpecificMatch(bb.BBHandledException):
  47. """
  48. Exception raised when no or multiple file matches are found
  49. """
  50. class NothingToBuild(Exception):
  51. """
  52. Exception raised when there is nothing to build
  53. """
  54. class CollectionError(bb.BBHandledException):
  55. """
  56. Exception raised when layer configuration is incorrect
  57. """
  58. class state:
  59. initial, parsing, running, shutdown, forceshutdown, stopped, error = range(7)
  60. class SkippedPackage:
  61. def __init__(self, info = None, reason = None):
  62. self.pn = None
  63. self.skipreason = None
  64. self.provides = None
  65. self.rprovides = None
  66. if info:
  67. self.pn = info.pn
  68. self.skipreason = info.skipreason
  69. self.provides = info.provides
  70. self.rprovides = info.rprovides
  71. elif reason:
  72. self.skipreason = reason
  73. class CookerFeatures(object):
  74. _feature_list = [HOB_EXTRA_CACHES, SEND_DEPENDS_TREE, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS] = range(4)
  75. def __init__(self):
  76. self._features=set()
  77. def setFeature(self, f):
  78. # validate we got a request for a feature we support
  79. if f not in CookerFeatures._feature_list:
  80. return
  81. self._features.add(f)
  82. def __contains__(self, f):
  83. return f in self._features
  84. def __iter__(self):
  85. return self._features.__iter__()
  86. def next(self):
  87. return self._features.next()
  88. #============================================================================#
  89. # BBCooker
  90. #============================================================================#
  91. class BBCooker:
  92. """
  93. Manages one bitbake build run
  94. """
  95. def __init__(self, configuration, featureSet = []):
  96. self.recipecache = None
  97. self.skiplist = {}
  98. self.featureset = CookerFeatures()
  99. for f in featureSet:
  100. self.featureset.setFeature(f)
  101. self.configuration = configuration
  102. self.initConfigurationData()
  103. # Take a lock so only one copy of bitbake can run against a given build
  104. # directory at a time
  105. lockfile = self.data.expand("${TOPDIR}/bitbake.lock")
  106. self.lock = bb.utils.lockfile(lockfile, False, False)
  107. if not self.lock:
  108. bb.fatal("Only one copy of bitbake should be run against a build directory")
  109. try:
  110. self.lock.seek(0)
  111. self.lock.truncate()
  112. if len(configuration.interface) >= 2:
  113. self.lock.write("%s:%s\n" % (configuration.interface[0], configuration.interface[1]));
  114. self.lock.flush()
  115. except:
  116. pass
  117. # TOSTOP must not be set or our children will hang when they output
  118. fd = sys.stdout.fileno()
  119. if os.isatty(fd):
  120. import termios
  121. tcattr = termios.tcgetattr(fd)
  122. if tcattr[3] & termios.TOSTOP:
  123. buildlog.info("The terminal had the TOSTOP bit set, clearing...")
  124. tcattr[3] = tcattr[3] & ~termios.TOSTOP
  125. termios.tcsetattr(fd, termios.TCSANOW, tcattr)
  126. self.command = bb.command.Command(self)
  127. self.state = state.initial
  128. self.parser = None
  129. signal.signal(signal.SIGTERM, self.sigterm_exception)
  130. def sigterm_exception(self, signum, stackframe):
  131. bb.warn("Cooker recieved SIGTERM, shutting down...")
  132. self.state = state.forceshutdown
  133. def setFeatures(self, features):
  134. # we only accept a new feature set if we're in state initial, so we can reset without problems
  135. if self.state != state.initial:
  136. raise Exception("Illegal state for feature set change")
  137. original_featureset = list(self.featureset)
  138. for feature in features:
  139. self.featureset.setFeature(feature)
  140. bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset)))
  141. if (original_featureset != list(self.featureset)):
  142. self.reset()
  143. def initConfigurationData(self):
  144. self.state = state.initial
  145. self.caches_array = []
  146. if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
  147. self.enableDataTracking()
  148. all_extra_cache_names = []
  149. # We hardcode all known cache types in a single place, here.
  150. if CookerFeatures.HOB_EXTRA_CACHES in self.featureset:
  151. all_extra_cache_names.append("bb.cache_extra:HobRecipeInfo")
  152. caches_name_array = ['bb.cache:CoreRecipeInfo'] + all_extra_cache_names
  153. # At least CoreRecipeInfo will be loaded, so caches_array will never be empty!
  154. # This is the entry point, no further check needed!
  155. for var in caches_name_array:
  156. try:
  157. module_name, cache_name = var.split(':')
  158. module = __import__(module_name, fromlist=(cache_name,))
  159. self.caches_array.append(getattr(module, cache_name))
  160. except ImportError as exc:
  161. logger.critical("Unable to import extra RecipeInfo '%s' from '%s': %s" % (cache_name, module_name, exc))
  162. sys.exit("FATAL: Failed to import extra cache class '%s'." % cache_name)
  163. self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False)
  164. self.databuilder.parseBaseConfiguration()
  165. self.data = self.databuilder.data
  166. self.data_hash = self.databuilder.data_hash
  167. #
  168. # Special updated configuration we use for firing events
  169. #
  170. self.event_data = bb.data.createCopy(self.data)
  171. bb.data.update_data(self.event_data)
  172. bb.parse.init_parser(self.event_data)
  173. if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
  174. self.disableDataTracking()
  175. def enableDataTracking(self):
  176. self.configuration.tracking = True
  177. if hasattr(self, "data"):
  178. self.data.enableTracking()
  179. def disableDataTracking(self):
  180. self.configuration.tracking = False
  181. if hasattr(self, "data"):
  182. self.data.disableTracking()
  183. def modifyConfigurationVar(self, var, val, default_file, op):
  184. if op == "append":
  185. self.appendConfigurationVar(var, val, default_file)
  186. elif op == "set":
  187. self.saveConfigurationVar(var, val, default_file, "=")
  188. elif op == "earlyAssign":
  189. self.saveConfigurationVar(var, val, default_file, "?=")
  190. def appendConfigurationVar(self, var, val, default_file):
  191. #add append var operation to the end of default_file
  192. default_file = bb.cookerdata.findConfigFile(default_file, self.data)
  193. total = "#added by hob"
  194. total += "\n%s += \"%s\"\n" % (var, val)
  195. with open(default_file, 'a') as f:
  196. f.write(total)
  197. #add to history
  198. loginfo = {"op":append, "file":default_file, "line":total.count("\n")}
  199. self.data.appendVar(var, val, **loginfo)
  200. def saveConfigurationVar(self, var, val, default_file, op):
  201. replaced = False
  202. #do not save if nothing changed
  203. if str(val) == self.data.getVar(var):
  204. return
  205. conf_files = self.data.varhistory.get_variable_files(var)
  206. #format the value when it is a list
  207. if isinstance(val, list):
  208. listval = ""
  209. for value in val:
  210. listval += "%s " % value
  211. val = listval
  212. topdir = self.data.getVar("TOPDIR")
  213. #comment or replace operations made on var
  214. for conf_file in conf_files:
  215. if topdir in conf_file:
  216. with open(conf_file, 'r') as f:
  217. contents = f.readlines()
  218. lines = self.data.varhistory.get_variable_lines(var, conf_file)
  219. for line in lines:
  220. total = ""
  221. i = 0
  222. for c in contents:
  223. total += c
  224. i = i + 1
  225. if i==int(line):
  226. end_index = len(total)
  227. index = total.rfind(var, 0, end_index)
  228. begin_line = total.count("\n",0,index)
  229. end_line = int(line)
  230. #check if the variable was saved before in the same way
  231. #if true it replace the place where the variable was declared
  232. #else it comments it
  233. if contents[begin_line-1]== "#added by hob\n":
  234. contents[begin_line] = "%s %s \"%s\"\n" % (var, op, val)
  235. replaced = True
  236. else:
  237. for ii in range(begin_line, end_line):
  238. contents[ii] = "#" + contents[ii]
  239. with open(conf_file, 'w') as f:
  240. f.writelines(contents)
  241. if replaced == False:
  242. #remove var from history
  243. self.data.varhistory.del_var_history(var)
  244. #add var to the end of default_file
  245. default_file = bb.cookerdata.findConfigFile(default_file, self.data)
  246. #add the variable on a single line, to be easy to replace the second time
  247. total = "\n#added by hob"
  248. total += "\n%s %s \"%s\"\n" % (var, op, val)
  249. with open(default_file, 'a') as f:
  250. f.write(total)
  251. #add to history
  252. loginfo = {"op":set, "file":default_file, "line":total.count("\n")}
  253. self.data.setVar(var, val, **loginfo)
  254. def removeConfigurationVar(self, var):
  255. conf_files = self.data.varhistory.get_variable_files(var)
  256. topdir = self.data.getVar("TOPDIR")
  257. for conf_file in conf_files:
  258. if topdir in conf_file:
  259. with open(conf_file, 'r') as f:
  260. contents = f.readlines()
  261. lines = self.data.varhistory.get_variable_lines(var, conf_file)
  262. for line in lines:
  263. total = ""
  264. i = 0
  265. for c in contents:
  266. total += c
  267. i = i + 1
  268. if i==int(line):
  269. end_index = len(total)
  270. index = total.rfind(var, 0, end_index)
  271. begin_line = total.count("\n",0,index)
  272. #check if the variable was saved before in the same way
  273. if contents[begin_line-1]== "#added by hob\n":
  274. contents[begin_line-1] = contents[begin_line] = "\n"
  275. else:
  276. contents[begin_line] = "\n"
  277. #remove var from history
  278. self.data.varhistory.del_var_history(var, conf_file, line)
  279. #remove variable
  280. self.data.delVar(var)
  281. with open(conf_file, 'w') as f:
  282. f.writelines(contents)
  283. def createConfigFile(self, name):
  284. path = os.getcwd()
  285. confpath = os.path.join(path, "conf", name)
  286. open(confpath, 'w').close()
  287. def parseConfiguration(self):
  288. # Set log file verbosity
  289. verboselogs = bb.utils.to_boolean(self.data.getVar("BB_VERBOSE_LOGS", "0"))
  290. if verboselogs:
  291. bb.msg.loggerVerboseLogs = True
  292. # Change nice level if we're asked to
  293. nice = self.data.getVar("BB_NICE_LEVEL", True)
  294. if nice:
  295. curnice = os.nice(0)
  296. nice = int(nice) - curnice
  297. buildlog.verbose("Renice to %s " % os.nice(nice))
  298. if self.recipecache:
  299. del self.recipecache
  300. self.recipecache = bb.cache.CacheData(self.caches_array)
  301. self.handleCollections( self.data.getVar("BBFILE_COLLECTIONS", True) )
  302. def runCommands(self, server, data, abort):
  303. """
  304. Run any queued asynchronous command
  305. This is done by the idle handler so it runs in true context rather than
  306. tied to any UI.
  307. """
  308. return self.command.runAsyncCommand()
  309. def showVersions(self):
  310. pkg_pn = self.recipecache.pkg_pn
  311. (latest_versions, preferred_versions) = bb.providers.findProviders(self.data, self.recipecache, pkg_pn)
  312. logger.plain("%-35s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version")
  313. logger.plain("%-35s %25s %25s\n", "===========", "==============", "=================")
  314. for p in sorted(pkg_pn):
  315. pref = preferred_versions[p]
  316. latest = latest_versions[p]
  317. prefstr = pref[0][0] + ":" + pref[0][1] + '-' + pref[0][2]
  318. lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2]
  319. if pref == latest:
  320. prefstr = ""
  321. logger.plain("%-35s %25s %25s", p, lateststr, prefstr)
  322. def showEnvironment(self, buildfile = None, pkgs_to_build = []):
  323. """
  324. Show the outer or per-package environment
  325. """
  326. fn = None
  327. envdata = None
  328. if buildfile:
  329. # Parse the configuration here. We need to do it explicitly here since
  330. # this showEnvironment() code path doesn't use the cache
  331. self.parseConfiguration()
  332. fn, cls = bb.cache.Cache.virtualfn2realfn(buildfile)
  333. fn = self.matchFile(fn)
  334. fn = bb.cache.Cache.realfn2virtual(fn, cls)
  335. elif len(pkgs_to_build) == 1:
  336. ignore = self.data.getVar("ASSUME_PROVIDED", True) or ""
  337. if pkgs_to_build[0] in set(ignore.split()):
  338. bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0])
  339. taskdata, runlist, pkgs_to_build = self.buildTaskData(pkgs_to_build, None, self.configuration.abort)
  340. targetid = taskdata.getbuild_id(pkgs_to_build[0])
  341. fnid = taskdata.build_targets[targetid][0]
  342. fn = taskdata.fn_index[fnid]
  343. else:
  344. envdata = self.data
  345. if fn:
  346. try:
  347. envdata = bb.cache.Cache.loadDataFull(fn, self.collection.get_file_appends(fn), self.data)
  348. except Exception as e:
  349. parselog.exception("Unable to read %s", fn)
  350. raise
  351. # Display history
  352. with closing(StringIO()) as env:
  353. self.data.inchistory.emit(env)
  354. logger.plain(env.getvalue())
  355. # emit variables and shell functions
  356. data.update_data(envdata)
  357. with closing(StringIO()) as env:
  358. data.emit_env(env, envdata, True)
  359. logger.plain(env.getvalue())
  360. # emit the metadata which isnt valid shell
  361. data.expandKeys(envdata)
  362. for e in envdata.keys():
  363. if data.getVarFlag( e, 'python', envdata ):
  364. logger.plain("\npython %s () {\n%s}\n", e, data.getVar(e, envdata, 1))
  365. def buildTaskData(self, pkgs_to_build, task, abort):
  366. """
  367. Prepare a runqueue and taskdata object for iteration over pkgs_to_build
  368. """
  369. bb.event.fire(bb.event.TreeDataPreparationStarted(), self.data)
  370. # A task of None means use the default task
  371. if task is None:
  372. task = self.configuration.cmd
  373. fulltargetlist = self.checkPackages(pkgs_to_build)
  374. localdata = data.createCopy(self.data)
  375. bb.data.update_data(localdata)
  376. bb.data.expandKeys(localdata)
  377. taskdata = bb.taskdata.TaskData(abort, skiplist=self.skiplist)
  378. current = 0
  379. runlist = []
  380. for k in fulltargetlist:
  381. ktask = task
  382. if ":do_" in k:
  383. k2 = k.split(":do_")
  384. k = k2[0]
  385. ktask = k2[1]
  386. taskdata.add_provider(localdata, self.recipecache, k)
  387. current += 1
  388. runlist.append([k, "do_%s" % ktask])
  389. bb.event.fire(bb.event.TreeDataPreparationProgress(current, len(fulltargetlist)), self.data)
  390. taskdata.add_unresolved(localdata, self.recipecache)
  391. bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data)
  392. return taskdata, runlist, fulltargetlist
  393. def prepareTreeData(self, pkgs_to_build, task):
  394. """
  395. Prepare a runqueue and taskdata object for iteration over pkgs_to_build
  396. """
  397. # We set abort to False here to prevent unbuildable targets raising
  398. # an exception when we're just generating data
  399. taskdata, runlist, pkgs_to_build = self.buildTaskData(pkgs_to_build, task, False)
  400. return runlist, taskdata
  401. ######## WARNING : this function requires cache_extra to be enabled ########
  402. def generateTaskDepTreeData(self, pkgs_to_build, task):
  403. """
  404. Create a dependency graph of pkgs_to_build including reverse dependency
  405. information.
  406. """
  407. runlist, taskdata = self.prepareTreeData(pkgs_to_build, task)
  408. rq = bb.runqueue.RunQueue(self, self.data, self.recipecache, taskdata, runlist)
  409. rq.rqdata.prepare()
  410. return self.buildDependTree(rq, taskdata)
  411. def buildDependTree(self, rq, taskdata):
  412. seen_fnids = []
  413. depend_tree = {}
  414. depend_tree["depends"] = {}
  415. depend_tree["tdepends"] = {}
  416. depend_tree["pn"] = {}
  417. depend_tree["rdepends-pn"] = {}
  418. depend_tree["packages"] = {}
  419. depend_tree["rdepends-pkg"] = {}
  420. depend_tree["rrecs-pkg"] = {}
  421. depend_tree["layer-priorities"] = self.recipecache.bbfile_config_priorities
  422. for task in xrange(len(rq.rqdata.runq_fnid)):
  423. taskname = rq.rqdata.runq_task[task]
  424. fnid = rq.rqdata.runq_fnid[task]
  425. fn = taskdata.fn_index[fnid]
  426. pn = self.recipecache.pkg_fn[fn]
  427. version = "%s:%s-%s" % self.recipecache.pkg_pepvpr[fn]
  428. if pn not in depend_tree["pn"]:
  429. depend_tree["pn"][pn] = {}
  430. depend_tree["pn"][pn]["filename"] = fn
  431. depend_tree["pn"][pn]["version"] = version
  432. depend_tree["pn"][pn]["inherits"] = self.recipecache.inherits.get(fn, None)
  433. # if we have extra caches, list all attributes they bring in
  434. extra_info = []
  435. for cache_class in self.caches_array:
  436. if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
  437. cachefields = getattr(cache_class, 'cachefields', [])
  438. extra_info = extra_info + cachefields
  439. # for all attributes stored, add them to the dependency tree
  440. for ei in extra_info:
  441. depend_tree["pn"][pn][ei] = vars(self.recipecache)[ei][fn]
  442. for dep in rq.rqdata.runq_depends[task]:
  443. depfn = taskdata.fn_index[rq.rqdata.runq_fnid[dep]]
  444. deppn = self.recipecache.pkg_fn[depfn]
  445. dotname = "%s.%s" % (pn, rq.rqdata.runq_task[task])
  446. if not dotname in depend_tree["tdepends"]:
  447. depend_tree["tdepends"][dotname] = []
  448. depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, rq.rqdata.runq_task[dep]))
  449. if fnid not in seen_fnids:
  450. seen_fnids.append(fnid)
  451. packages = []
  452. depend_tree["depends"][pn] = []
  453. for dep in taskdata.depids[fnid]:
  454. depend_tree["depends"][pn].append(taskdata.build_names_index[dep])
  455. depend_tree["rdepends-pn"][pn] = []
  456. for rdep in taskdata.rdepids[fnid]:
  457. depend_tree["rdepends-pn"][pn].append(taskdata.run_names_index[rdep])
  458. rdepends = self.recipecache.rundeps[fn]
  459. for package in rdepends:
  460. depend_tree["rdepends-pkg"][package] = []
  461. for rdepend in rdepends[package]:
  462. depend_tree["rdepends-pkg"][package].append(rdepend)
  463. packages.append(package)
  464. rrecs = self.recipecache.runrecs[fn]
  465. for package in rrecs:
  466. depend_tree["rrecs-pkg"][package] = []
  467. for rdepend in rrecs[package]:
  468. depend_tree["rrecs-pkg"][package].append(rdepend)
  469. if not package in packages:
  470. packages.append(package)
  471. for package in packages:
  472. if package not in depend_tree["packages"]:
  473. depend_tree["packages"][package] = {}
  474. depend_tree["packages"][package]["pn"] = pn
  475. depend_tree["packages"][package]["filename"] = fn
  476. depend_tree["packages"][package]["version"] = version
  477. return depend_tree
  478. ######## WARNING : this function requires cache_extra to be enabled ########
  479. def generatePkgDepTreeData(self, pkgs_to_build, task):
  480. """
  481. Create a dependency tree of pkgs_to_build, returning the data.
  482. """
  483. _, taskdata = self.prepareTreeData(pkgs_to_build, task)
  484. tasks_fnid = []
  485. if len(taskdata.tasks_name) != 0:
  486. for task in xrange(len(taskdata.tasks_name)):
  487. tasks_fnid.append(taskdata.tasks_fnid[task])
  488. seen_fnids = []
  489. depend_tree = {}
  490. depend_tree["depends"] = {}
  491. depend_tree["pn"] = {}
  492. depend_tree["rdepends-pn"] = {}
  493. depend_tree["rdepends-pkg"] = {}
  494. depend_tree["rrecs-pkg"] = {}
  495. # if we have extra caches, list all attributes they bring in
  496. extra_info = []
  497. for cache_class in self.caches_array:
  498. if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
  499. cachefields = getattr(cache_class, 'cachefields', [])
  500. extra_info = extra_info + cachefields
  501. for task in xrange(len(tasks_fnid)):
  502. fnid = tasks_fnid[task]
  503. fn = taskdata.fn_index[fnid]
  504. pn = self.recipecache.pkg_fn[fn]
  505. if pn not in depend_tree["pn"]:
  506. depend_tree["pn"][pn] = {}
  507. depend_tree["pn"][pn]["filename"] = fn
  508. version = "%s:%s-%s" % self.recipecache.pkg_pepvpr[fn]
  509. depend_tree["pn"][pn]["version"] = version
  510. rdepends = self.recipecache.rundeps[fn]
  511. rrecs = self.recipecache.runrecs[fn]
  512. depend_tree["pn"][pn]["inherits"] = self.recipecache.inherits.get(fn, None)
  513. # for all extra attributes stored, add them to the dependency tree
  514. for ei in extra_info:
  515. depend_tree["pn"][pn][ei] = vars(self.recipecache)[ei][fn]
  516. if fnid not in seen_fnids:
  517. seen_fnids.append(fnid)
  518. depend_tree["depends"][pn] = []
  519. for dep in taskdata.depids[fnid]:
  520. item = taskdata.build_names_index[dep]
  521. pn_provider = ""
  522. targetid = taskdata.getbuild_id(item)
  523. if targetid in taskdata.build_targets and taskdata.build_targets[targetid]:
  524. id = taskdata.build_targets[targetid][0]
  525. fn_provider = taskdata.fn_index[id]
  526. pn_provider = self.recipecache.pkg_fn[fn_provider]
  527. else:
  528. pn_provider = item
  529. depend_tree["depends"][pn].append(pn_provider)
  530. depend_tree["rdepends-pn"][pn] = []
  531. for rdep in taskdata.rdepids[fnid]:
  532. item = taskdata.run_names_index[rdep]
  533. pn_rprovider = ""
  534. targetid = taskdata.getrun_id(item)
  535. if targetid in taskdata.run_targets and taskdata.run_targets[targetid]:
  536. id = taskdata.run_targets[targetid][0]
  537. fn_rprovider = taskdata.fn_index[id]
  538. pn_rprovider = self.recipecache.pkg_fn[fn_rprovider]
  539. else:
  540. pn_rprovider = item
  541. depend_tree["rdepends-pn"][pn].append(pn_rprovider)
  542. depend_tree["rdepends-pkg"].update(rdepends)
  543. depend_tree["rrecs-pkg"].update(rrecs)
  544. return depend_tree
  545. def generateDepTreeEvent(self, pkgs_to_build, task):
  546. """
  547. Create a task dependency graph of pkgs_to_build.
  548. Generate an event with the result
  549. """
  550. depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
  551. bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.data)
  552. def generateDotGraphFiles(self, pkgs_to_build, task):
  553. """
  554. Create a task dependency graph of pkgs_to_build.
  555. Save the result to a set of .dot files.
  556. """
  557. depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
  558. # Prints a flattened form of package-depends below where subpackages of a package are merged into the main pn
  559. depends_file = file('pn-depends.dot', 'w' )
  560. buildlist_file = file('pn-buildlist', 'w' )
  561. print("digraph depends {", file=depends_file)
  562. for pn in depgraph["pn"]:
  563. fn = depgraph["pn"][pn]["filename"]
  564. version = depgraph["pn"][pn]["version"]
  565. print('"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn), file=depends_file)
  566. print("%s" % pn, file=buildlist_file)
  567. buildlist_file.close()
  568. logger.info("PN build list saved to 'pn-buildlist'")
  569. for pn in depgraph["depends"]:
  570. for depend in depgraph["depends"][pn]:
  571. print('"%s" -> "%s"' % (pn, depend), file=depends_file)
  572. for pn in depgraph["rdepends-pn"]:
  573. for rdepend in depgraph["rdepends-pn"][pn]:
  574. print('"%s" -> "%s" [style=dashed]' % (pn, rdepend), file=depends_file)
  575. print("}", file=depends_file)
  576. logger.info("PN dependencies saved to 'pn-depends.dot'")
  577. depends_file = file('package-depends.dot', 'w' )
  578. print("digraph depends {", file=depends_file)
  579. for package in depgraph["packages"]:
  580. pn = depgraph["packages"][package]["pn"]
  581. fn = depgraph["packages"][package]["filename"]
  582. version = depgraph["packages"][package]["version"]
  583. if package == pn:
  584. print('"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn), file=depends_file)
  585. else:
  586. print('"%s" [label="%s(%s) %s\\n%s"]' % (package, package, pn, version, fn), file=depends_file)
  587. for depend in depgraph["depends"][pn]:
  588. print('"%s" -> "%s"' % (package, depend), file=depends_file)
  589. for package in depgraph["rdepends-pkg"]:
  590. for rdepend in depgraph["rdepends-pkg"][package]:
  591. print('"%s" -> "%s" [style=dashed]' % (package, rdepend), file=depends_file)
  592. for package in depgraph["rrecs-pkg"]:
  593. for rdepend in depgraph["rrecs-pkg"][package]:
  594. print('"%s" -> "%s" [style=dashed]' % (package, rdepend), file=depends_file)
  595. print("}", file=depends_file)
  596. logger.info("Package dependencies saved to 'package-depends.dot'")
  597. tdepends_file = file('task-depends.dot', 'w' )
  598. print("digraph depends {", file=tdepends_file)
  599. for task in depgraph["tdepends"]:
  600. (pn, taskname) = task.rsplit(".", 1)
  601. fn = depgraph["pn"][pn]["filename"]
  602. version = depgraph["pn"][pn]["version"]
  603. print('"%s.%s" [label="%s %s\\n%s\\n%s"]' % (pn, taskname, pn, taskname, version, fn), file=tdepends_file)
  604. for dep in depgraph["tdepends"][task]:
  605. print('"%s" -> "%s"' % (task, dep), file=tdepends_file)
  606. print("}", file=tdepends_file)
  607. logger.info("Task dependencies saved to 'task-depends.dot'")
  608. def show_appends_with_no_recipes( self ):
  609. appends_without_recipes = [self.collection.appendlist[recipe]
  610. for recipe in self.collection.appendlist
  611. if recipe not in self.collection.appliedappendlist]
  612. if appends_without_recipes:
  613. appendlines = (' %s' % append
  614. for appends in appends_without_recipes
  615. for append in appends)
  616. msg = 'No recipes available for:\n%s' % '\n'.join(appendlines)
  617. warn_only = data.getVar("BB_DANGLINGAPPENDS_WARNONLY", \
  618. self.data, False) or "no"
  619. if warn_only.lower() in ("1", "yes", "true"):
  620. bb.warn(msg)
  621. else:
  622. bb.fatal(msg)
  623. def handlePrefProviders(self):
  624. localdata = data.createCopy(self.data)
  625. bb.data.update_data(localdata)
  626. bb.data.expandKeys(localdata)
  627. # Handle PREFERRED_PROVIDERS
  628. for p in (localdata.getVar('PREFERRED_PROVIDERS', True) or "").split():
  629. try:
  630. (providee, provider) = p.split(':')
  631. except:
  632. providerlog.critical("Malformed option in PREFERRED_PROVIDERS variable: %s" % p)
  633. continue
  634. if providee in self.recipecache.preferred and self.recipecache.preferred[providee] != provider:
  635. providerlog.error("conflicting preferences for %s: both %s and %s specified", providee, provider, self.recipecache.preferred[providee])
  636. self.recipecache.preferred[providee] = provider
  637. def findCoreBaseFiles(self, subdir, configfile):
  638. corebase = self.data.getVar('COREBASE', True) or ""
  639. paths = []
  640. for root, dirs, files in os.walk(corebase + '/' + subdir):
  641. for d in dirs:
  642. configfilepath = os.path.join(root, d, configfile)
  643. if os.path.exists(configfilepath):
  644. paths.append(os.path.join(root, d))
  645. if paths:
  646. bb.event.fire(bb.event.CoreBaseFilesFound(paths), self.data)
  647. def findConfigFilePath(self, configfile):
  648. """
  649. Find the location on disk of configfile and if it exists and was parsed by BitBake
  650. emit the ConfigFilePathFound event with the path to the file.
  651. """
  652. path = bb.cookerdata.findConfigFile(configfile, self.data)
  653. if not path:
  654. return
  655. # Generate a list of parsed configuration files by searching the files
  656. # listed in the __depends and __base_depends variables with a .conf suffix.
  657. conffiles = []
  658. dep_files = self.data.getVar('__base_depends') or []
  659. dep_files = dep_files + (self.data.getVar('__depends') or [])
  660. for f in dep_files:
  661. if f[0].endswith(".conf"):
  662. conffiles.append(f[0])
  663. _, conf, conffile = path.rpartition("conf/")
  664. match = os.path.join(conf, conffile)
  665. # Try and find matches for conf/conffilename.conf as we don't always
  666. # have the full path to the file.
  667. for cfg in conffiles:
  668. if cfg.endswith(match):
  669. bb.event.fire(bb.event.ConfigFilePathFound(path),
  670. self.data)
  671. break
  672. def findFilesMatchingInDir(self, filepattern, directory):
  673. """
  674. Searches for files matching the regex 'pattern' which are children of
  675. 'directory' in each BBPATH. i.e. to find all rootfs package classes available
  676. to BitBake one could call findFilesMatchingInDir(self, 'rootfs_', 'classes')
  677. or to find all machine configuration files one could call:
  678. findFilesMatchingInDir(self, 'conf/machines', 'conf')
  679. """
  680. import re
  681. matches = []
  682. p = re.compile(re.escape(filepattern))
  683. bbpaths = self.data.getVar('BBPATH', True).split(':')
  684. for path in bbpaths:
  685. dirpath = os.path.join(path, directory)
  686. if os.path.exists(dirpath):
  687. for root, dirs, files in os.walk(dirpath):
  688. for f in files:
  689. if p.search(f):
  690. matches.append(f)
  691. if matches:
  692. bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
  693. def findConfigFiles(self, varname):
  694. """
  695. Find config files which are appropriate values for varname.
  696. i.e. MACHINE, DISTRO
  697. """
  698. possible = []
  699. var = varname.lower()
  700. data = self.data
  701. # iterate configs
  702. bbpaths = data.getVar('BBPATH', True).split(':')
  703. for path in bbpaths:
  704. confpath = os.path.join(path, "conf", var)
  705. if os.path.exists(confpath):
  706. for root, dirs, files in os.walk(confpath):
  707. # get all child files, these are appropriate values
  708. for f in files:
  709. val, sep, end = f.rpartition('.')
  710. if end == 'conf':
  711. possible.append(val)
  712. if possible:
  713. bb.event.fire(bb.event.ConfigFilesFound(var, possible), self.data)
  714. def findInheritsClass(self, klass):
  715. """
  716. Find all recipes which inherit the specified class
  717. """
  718. pkg_list = []
  719. for pfn in self.recipecache.pkg_fn:
  720. inherits = self.recipecache.inherits.get(pfn, None)
  721. if inherits and inherits.count(klass) > 0:
  722. pkg_list.append(self.recipecache.pkg_fn[pfn])
  723. return pkg_list
  724. def generateTargetsTree(self, klass=None, pkgs=[]):
  725. """
  726. Generate a dependency tree of buildable targets
  727. Generate an event with the result
  728. """
  729. # if the caller hasn't specified a pkgs list default to universe
  730. if not len(pkgs):
  731. pkgs = ['universe']
  732. # if inherited_class passed ensure all recipes which inherit the
  733. # specified class are included in pkgs
  734. if klass:
  735. extra_pkgs = self.findInheritsClass(klass)
  736. pkgs = pkgs + extra_pkgs
  737. # generate a dependency tree for all our packages
  738. tree = self.generatePkgDepTreeData(pkgs, 'build')
  739. bb.event.fire(bb.event.TargetsTreeGenerated(tree), self.data)
  740. def buildWorldTargetList(self):
  741. """
  742. Build package list for "bitbake world"
  743. """
  744. parselog.debug(1, "collating packages for \"world\"")
  745. for f in self.recipecache.possible_world:
  746. terminal = True
  747. pn = self.recipecache.pkg_fn[f]
  748. for p in self.recipecache.pn_provides[pn]:
  749. if p.startswith('virtual/'):
  750. parselog.debug(2, "World build skipping %s due to %s provider starting with virtual/", f, p)
  751. terminal = False
  752. break
  753. for pf in self.recipecache.providers[p]:
  754. if self.recipecache.pkg_fn[pf] != pn:
  755. parselog.debug(2, "World build skipping %s due to both us and %s providing %s", f, pf, p)
  756. terminal = False
  757. break
  758. if terminal:
  759. self.recipecache.world_target.add(pn)
  760. def interactiveMode( self ):
  761. """Drop off into a shell"""
  762. try:
  763. from bb import shell
  764. except ImportError:
  765. parselog.exception("Interactive mode not available")
  766. sys.exit(1)
  767. else:
  768. shell.start( self )
  769. def handleCollections( self, collections ):
  770. """Handle collections"""
  771. errors = False
  772. self.recipecache.bbfile_config_priorities = []
  773. if collections:
  774. collection_priorities = {}
  775. collection_depends = {}
  776. collection_list = collections.split()
  777. min_prio = 0
  778. for c in collection_list:
  779. # Get collection priority if defined explicitly
  780. priority = self.data.getVar("BBFILE_PRIORITY_%s" % c, True)
  781. if priority:
  782. try:
  783. prio = int(priority)
  784. except ValueError:
  785. parselog.error("invalid value for BBFILE_PRIORITY_%s: \"%s\"", c, priority)
  786. errors = True
  787. if min_prio == 0 or prio < min_prio:
  788. min_prio = prio
  789. collection_priorities[c] = prio
  790. else:
  791. collection_priorities[c] = None
  792. # Check dependencies and store information for priority calculation
  793. deps = self.data.getVar("LAYERDEPENDS_%s" % c, True)
  794. if deps:
  795. depnamelist = []
  796. deplist = deps.split()
  797. for dep in deplist:
  798. depsplit = dep.split(':')
  799. if len(depsplit) > 1:
  800. try:
  801. depver = int(depsplit[1])
  802. except ValueError:
  803. parselog.error("invalid version value in LAYERDEPENDS_%s: \"%s\"", c, dep)
  804. errors = True
  805. continue
  806. else:
  807. depver = None
  808. dep = depsplit[0]
  809. depnamelist.append(dep)
  810. if dep in collection_list:
  811. if depver:
  812. layerver = self.data.getVar("LAYERVERSION_%s" % dep, True)
  813. if layerver:
  814. try:
  815. lver = int(layerver)
  816. except ValueError:
  817. parselog.error("invalid value for LAYERVERSION_%s: \"%s\"", c, layerver)
  818. errors = True
  819. continue
  820. if lver != depver:
  821. parselog.error("Layer '%s' depends on version %d of layer '%s', but version %d is enabled in your configuration", c, depver, dep, lver)
  822. errors = True
  823. else:
  824. parselog.error("Layer '%s' depends on version %d of layer '%s', which exists in your configuration but does not specify a version", c, depver, dep)
  825. errors = True
  826. else:
  827. parselog.error("Layer '%s' depends on layer '%s', but this layer is not enabled in your configuration", c, dep)
  828. errors = True
  829. collection_depends[c] = depnamelist
  830. else:
  831. collection_depends[c] = []
  832. # Recursively work out collection priorities based on dependencies
  833. def calc_layer_priority(collection):
  834. if not collection_priorities[collection]:
  835. max_depprio = min_prio
  836. for dep in collection_depends[collection]:
  837. calc_layer_priority(dep)
  838. depprio = collection_priorities[dep]
  839. if depprio > max_depprio:
  840. max_depprio = depprio
  841. max_depprio += 1
  842. parselog.debug(1, "Calculated priority of layer %s as %d", collection, max_depprio)
  843. collection_priorities[collection] = max_depprio
  844. # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities
  845. for c in collection_list:
  846. calc_layer_priority(c)
  847. regex = self.data.getVar("BBFILE_PATTERN_%s" % c, True)
  848. if regex == None:
  849. parselog.error("BBFILE_PATTERN_%s not defined" % c)
  850. errors = True
  851. continue
  852. try:
  853. cre = re.compile(regex)
  854. except re.error:
  855. parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex)
  856. errors = True
  857. continue
  858. self.recipecache.bbfile_config_priorities.append((c, regex, cre, collection_priorities[c]))
  859. if errors:
  860. # We've already printed the actual error(s)
  861. raise CollectionError("Errors during parsing layer configuration")
  862. def buildSetVars(self):
  863. """
  864. Setup any variables needed before starting a build
  865. """
  866. if not self.data.getVar("BUILDNAME"):
  867. self.data.setVar("BUILDNAME", time.strftime('%Y%m%d%H%M'))
  868. self.data.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', time.gmtime()))
  869. def matchFiles(self, bf):
  870. """
  871. Find the .bb files which match the expression in 'buildfile'.
  872. """
  873. if bf.startswith("/") or bf.startswith("../"):
  874. bf = os.path.abspath(bf)
  875. self.collection = CookerCollectFiles(self.recipecache.bbfile_config_priorities)
  876. filelist, masked = self.collection.collect_bbfiles(self.data, self.event_data)
  877. try:
  878. os.stat(bf)
  879. bf = os.path.abspath(bf)
  880. return [bf]
  881. except OSError:
  882. regexp = re.compile(bf)
  883. matches = []
  884. for f in filelist:
  885. if regexp.search(f) and os.path.isfile(f):
  886. matches.append(f)
  887. return matches
  888. def matchFile(self, buildfile):
  889. """
  890. Find the .bb file which matches the expression in 'buildfile'.
  891. Raise an error if multiple files
  892. """
  893. matches = self.matchFiles(buildfile)
  894. if len(matches) != 1:
  895. if matches:
  896. msg = "Unable to match '%s' to a specific recipe file - %s matches found:" % (buildfile, len(matches))
  897. if matches:
  898. for f in matches:
  899. msg += "\n %s" % f
  900. parselog.error(msg)
  901. else:
  902. parselog.error("Unable to find any recipe file matching '%s'" % buildfile)
  903. raise NoSpecificMatch
  904. return matches[0]
  905. def buildFile(self, buildfile, task):
  906. """
  907. Build the file matching regexp buildfile
  908. """
  909. # Too many people use -b because they think it's how you normally
  910. # specify a target to be built, so show a warning
  911. bb.warn("Buildfile specified, dependencies will not be handled. If this is not what you want, do not use -b / --buildfile.")
  912. # Parse the configuration here. We need to do it explicitly here since
  913. # buildFile() doesn't use the cache
  914. self.parseConfiguration()
  915. # If we are told to do the None task then query the default task
  916. if (task == None):
  917. task = self.configuration.cmd
  918. fn, cls = bb.cache.Cache.virtualfn2realfn(buildfile)
  919. fn = self.matchFile(fn)
  920. self.buildSetVars()
  921. infos = bb.cache.Cache.parse(fn, self.collection.get_file_appends(fn), \
  922. self.data,
  923. self.caches_array)
  924. infos = dict(infos)
  925. fn = bb.cache.Cache.realfn2virtual(fn, cls)
  926. try:
  927. info_array = infos[fn]
  928. except KeyError:
  929. bb.fatal("%s does not exist" % fn)
  930. if info_array[0].skipped:
  931. bb.fatal("%s was skipped: %s" % (fn, info_array[0].skipreason))
  932. self.recipecache.add_from_recipeinfo(fn, info_array)
  933. # Tweak some variables
  934. item = info_array[0].pn
  935. self.recipecache.ignored_dependencies = set()
  936. self.recipecache.bbfile_priority[fn] = 1
  937. # Remove external dependencies
  938. self.recipecache.task_deps[fn]['depends'] = {}
  939. self.recipecache.deps[fn] = []
  940. self.recipecache.rundeps[fn] = []
  941. self.recipecache.runrecs[fn] = []
  942. # Invalidate task for target if force mode active
  943. if self.configuration.force:
  944. logger.verbose("Invalidate task %s, %s", task, fn)
  945. bb.parse.siggen.invalidate_task('do_%s' % task, self.recipecache, fn)
  946. # Setup taskdata structure
  947. taskdata = bb.taskdata.TaskData(self.configuration.abort)
  948. taskdata.add_provider(self.data, self.recipecache, item)
  949. buildname = self.data.getVar("BUILDNAME")
  950. bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.event_data)
  951. # Execute the runqueue
  952. runlist = [[item, "do_%s" % task]]
  953. rq = bb.runqueue.RunQueue(self, self.data, self.recipecache, taskdata, runlist)
  954. def buildFileIdle(server, rq, abort):
  955. msg = None
  956. if abort or self.state == state.forceshutdown:
  957. rq.finish_runqueue(True)
  958. msg = "Forced shutdown"
  959. elif self.state == state.shutdown:
  960. rq.finish_runqueue(False)
  961. msg = "Stopped build"
  962. failures = 0
  963. try:
  964. retval = rq.execute_runqueue()
  965. except runqueue.TaskFailure as exc:
  966. failures += len(exc.args)
  967. retval = False
  968. except SystemExit as exc:
  969. self.command.finishAsyncCommand()
  970. return False
  971. if not retval:
  972. bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runq_fnid), buildname, item, failures), self.event_data)
  973. self.command.finishAsyncCommand(msg)
  974. return False
  975. if retval is True:
  976. return True
  977. return retval
  978. self.configuration.server_register_idlecallback(buildFileIdle, rq)
  979. def buildTargets(self, targets, task):
  980. """
  981. Attempt to build the targets specified
  982. """
  983. def buildTargetsIdle(server, rq, abort):
  984. msg = None
  985. if abort or self.state == state.forceshutdown:
  986. rq.finish_runqueue(True)
  987. msg = "Forced shutdown"
  988. elif self.state == state.shutdown:
  989. rq.finish_runqueue(False)
  990. msg = "Stopped build"
  991. failures = 0
  992. try:
  993. retval = rq.execute_runqueue()
  994. except runqueue.TaskFailure as exc:
  995. failures += len(exc.args)
  996. retval = False
  997. except SystemExit as exc:
  998. self.command.finishAsyncCommand()
  999. return False
  1000. if not retval:
  1001. bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runq_fnid), buildname, targets, failures), self.data)
  1002. self.command.finishAsyncCommand(msg)
  1003. return False
  1004. if retval is True:
  1005. return True
  1006. return retval
  1007. self.buildSetVars()
  1008. taskdata, runlist, fulltargetlist = self.buildTaskData(targets, task, self.configuration.abort)
  1009. buildname = self.data.getVar("BUILDNAME")
  1010. bb.event.fire(bb.event.BuildStarted(buildname, fulltargetlist), self.data)
  1011. rq = bb.runqueue.RunQueue(self, self.data, self.recipecache, taskdata, runlist)
  1012. if 'universe' in targets:
  1013. rq.rqdata.warn_multi_bb = True
  1014. self.configuration.server_register_idlecallback(buildTargetsIdle, rq)
  1015. def getAllKeysWithFlags(self, flaglist):
  1016. dump = {}
  1017. for k in self.data.keys():
  1018. try:
  1019. v = self.data.getVar(k, True)
  1020. if not k.startswith("__") and not isinstance(v, bb.data_smart.DataSmart):
  1021. dump[k] = {
  1022. 'v' : v ,
  1023. 'history' : self.data.varhistory.variable(k),
  1024. }
  1025. for d in flaglist:
  1026. dump[k][d] = self.data.getVarFlag(k, d)
  1027. except Exception as e:
  1028. print(e)
  1029. return dump
  1030. def generateNewImage(self, image, base_image, package_queue, timestamp, description):
  1031. '''
  1032. Create a new image with a "require"/"inherit" base_image statement
  1033. '''
  1034. import re
  1035. if timestamp:
  1036. image_name = os.path.splitext(image)[0]
  1037. timestr = time.strftime("-%Y%m%d-%H%M%S")
  1038. dest = image_name + str(timestr) + ".bb"
  1039. else:
  1040. if not image.endswith(".bb"):
  1041. dest = image + ".bb"
  1042. else:
  1043. dest = image
  1044. basename = False
  1045. if base_image:
  1046. with open(base_image, 'r') as f:
  1047. require_line = f.readline()
  1048. p = re.compile("IMAGE_BASENAME *=")
  1049. for line in f:
  1050. if p.search(line):
  1051. basename = True
  1052. with open(dest, "w") as imagefile:
  1053. if base_image is None:
  1054. imagefile.write("inherit core-image\n")
  1055. else:
  1056. topdir = self.data.getVar("TOPDIR")
  1057. if topdir in base_image:
  1058. base_image = require_line.split()[1]
  1059. imagefile.write("require " + base_image + "\n")
  1060. image_install = "IMAGE_INSTALL = \""
  1061. for package in package_queue:
  1062. image_install += str(package) + " "
  1063. image_install += "\"\n"
  1064. imagefile.write(image_install)
  1065. description_var = "DESCRIPTION = \"" + description + "\"\n"
  1066. imagefile.write(description_var)
  1067. if basename:
  1068. # If this is overwritten in a inherited image, reset it to default
  1069. image_basename = "IMAGE_BASENAME = \"${PN}\"\n"
  1070. imagefile.write(image_basename)
  1071. self.state = state.initial
  1072. if timestamp:
  1073. return timestr
  1074. # This is called for all async commands when self.state != running
  1075. def updateCache(self):
  1076. if self.state == state.running:
  1077. return
  1078. if self.state in (state.shutdown, state.forceshutdown):
  1079. if hasattr(self.parser, 'shutdown'):
  1080. self.parser.shutdown(clean=False, force = True)
  1081. raise bb.BBHandledException()
  1082. if self.state != state.parsing:
  1083. self.parseConfiguration ()
  1084. if CookerFeatures.SEND_SANITYEVENTS in self.featureset:
  1085. bb.event.fire(bb.event.SanityCheck(False), self.data)
  1086. ignore = self.data.getVar("ASSUME_PROVIDED", True) or ""
  1087. self.recipecache.ignored_dependencies = set(ignore.split())
  1088. for dep in self.configuration.extra_assume_provided:
  1089. self.recipecache.ignored_dependencies.add(dep)
  1090. self.collection = CookerCollectFiles(self.recipecache.bbfile_config_priorities)
  1091. (filelist, masked) = self.collection.collect_bbfiles(self.data, self.event_data)
  1092. self.data.renameVar("__depends", "__base_depends")
  1093. self.parser = CookerParser(self, filelist, masked)
  1094. self.state = state.parsing
  1095. if not self.parser.parse_next():
  1096. collectlog.debug(1, "parsing complete")
  1097. if self.parser.error:
  1098. raise bb.BBHandledException()
  1099. self.show_appends_with_no_recipes()
  1100. self.handlePrefProviders()
  1101. self.recipecache.bbfile_priority = self.collection.collection_priorities(self.recipecache.pkg_fn)
  1102. self.state = state.running
  1103. return None
  1104. return True
  1105. def checkPackages(self, pkgs_to_build):
  1106. # Return a copy, don't modify the original
  1107. pkgs_to_build = pkgs_to_build[:]
  1108. if len(pkgs_to_build) == 0:
  1109. raise NothingToBuild
  1110. ignore = (self.data.getVar("ASSUME_PROVIDED", True) or "").split()
  1111. for pkg in pkgs_to_build:
  1112. if pkg in ignore:
  1113. parselog.warn("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg)
  1114. if 'world' in pkgs_to_build:
  1115. self.buildWorldTargetList()
  1116. pkgs_to_build.remove('world')
  1117. for t in self.recipecache.world_target:
  1118. pkgs_to_build.append(t)
  1119. if 'universe' in pkgs_to_build:
  1120. parselog.warn("The \"universe\" target is only intended for testing and may produce errors.")
  1121. parselog.debug(1, "collating packages for \"universe\"")
  1122. pkgs_to_build.remove('universe')
  1123. for t in self.recipecache.universe_target:
  1124. pkgs_to_build.append(t)
  1125. return pkgs_to_build
  1126. def pre_serve(self):
  1127. # Empty the environment. The environment will be populated as
  1128. # necessary from the data store.
  1129. #bb.utils.empty_environment()
  1130. try:
  1131. self.prhost = prserv.serv.auto_start(self.data)
  1132. except prserv.serv.PRServiceConfigError:
  1133. bb.event.fire(CookerExit(), self.event_data)
  1134. self.state = state.error
  1135. return
  1136. def post_serve(self):
  1137. prserv.serv.auto_shutdown(self.data)
  1138. bb.event.fire(CookerExit(), self.event_data)
  1139. def shutdown(self, force = False):
  1140. if force:
  1141. self.state = state.forceshutdown
  1142. else:
  1143. self.state = state.shutdown
  1144. def finishcommand(self):
  1145. self.state = state.initial
  1146. def reset(self):
  1147. self.initConfigurationData()
  1148. def server_main(cooker, func, *args):
  1149. cooker.pre_serve()
  1150. if cooker.configuration.profile:
  1151. try:
  1152. import cProfile as profile
  1153. except:
  1154. import profile
  1155. prof = profile.Profile()
  1156. ret = profile.Profile.runcall(prof, func, *args)
  1157. prof.dump_stats("profile.log")
  1158. bb.utils.process_profilelog("profile.log")
  1159. print("Raw profiling information saved to profile.log and processed statistics to profile.log.processed")
  1160. else:
  1161. ret = func(*args)
  1162. cooker.post_serve()
  1163. return ret
  1164. class CookerExit(bb.event.Event):
  1165. """
  1166. Notify clients of the Cooker shutdown
  1167. """
  1168. def __init__(self):
  1169. bb.event.Event.__init__(self)
  1170. class CookerCollectFiles(object):
  1171. def __init__(self, priorities):
  1172. self.appendlist = {}
  1173. self.bbappends = []
  1174. self.appliedappendlist = []
  1175. self.bbfile_config_priorities = priorities
  1176. def calc_bbfile_priority( self, filename, matched = None ):
  1177. for _, _, regex, pri in self.bbfile_config_priorities:
  1178. if regex.match(filename):
  1179. if matched != None:
  1180. if not regex in matched:
  1181. matched.add(regex)
  1182. return pri
  1183. return 0
  1184. def get_bbfiles(self):
  1185. """Get list of default .bb files by reading out the current directory"""
  1186. path = os.getcwd()
  1187. contents = os.listdir(path)
  1188. bbfiles = []
  1189. for f in contents:
  1190. if f.endswith(".bb"):
  1191. bbfiles.append(os.path.abspath(os.path.join(path, f)))
  1192. return bbfiles
  1193. def find_bbfiles(self, path):
  1194. """Find all the .bb and .bbappend files in a directory"""
  1195. found = []
  1196. for dir, dirs, files in os.walk(path):
  1197. for ignored in ('SCCS', 'CVS', '.svn'):
  1198. if ignored in dirs:
  1199. dirs.remove(ignored)
  1200. found += [os.path.join(dir, f) for f in files if (f.endswith('.bb') or f.endswith('.bbappend'))]
  1201. return found
  1202. def collect_bbfiles(self, config, eventdata):
  1203. """Collect all available .bb build files"""
  1204. masked = 0
  1205. collectlog.debug(1, "collecting .bb files")
  1206. files = (config.getVar( "BBFILES", True) or "").split()
  1207. config.setVar("BBFILES", " ".join(files))
  1208. # Sort files by priority
  1209. files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem) )
  1210. if not len(files):
  1211. files = self.get_bbfiles()
  1212. if not len(files):
  1213. collectlog.error("no recipe files to build, check your BBPATH and BBFILES?")
  1214. bb.event.fire(CookerExit(), eventdata)
  1215. # Can't use set here as order is important
  1216. newfiles = []
  1217. for f in files:
  1218. if os.path.isdir(f):
  1219. dirfiles = self.find_bbfiles(f)
  1220. for g in dirfiles:
  1221. if g not in newfiles:
  1222. newfiles.append(g)
  1223. else:
  1224. globbed = glob.glob(f)
  1225. if not globbed and os.path.exists(f):
  1226. globbed = [f]
  1227. for g in globbed:
  1228. if g not in newfiles:
  1229. newfiles.append(g)
  1230. bbmask = config.getVar('BBMASK', True)
  1231. if bbmask:
  1232. try:
  1233. bbmask_compiled = re.compile(bbmask)
  1234. except sre_constants.error:
  1235. collectlog.critical("BBMASK is not a valid regular expression, ignoring.")
  1236. return list(newfiles), 0
  1237. bbfiles = []
  1238. bbappend = []
  1239. for f in newfiles:
  1240. if bbmask and bbmask_compiled.search(f):
  1241. collectlog.debug(1, "skipping masked file %s", f)
  1242. masked += 1
  1243. continue
  1244. if f.endswith('.bb'):
  1245. bbfiles.append(f)
  1246. elif f.endswith('.bbappend'):
  1247. bbappend.append(f)
  1248. else:
  1249. collectlog.debug(1, "skipping %s: unknown file extension", f)
  1250. # Build a list of .bbappend files for each .bb file
  1251. for f in bbappend:
  1252. base = os.path.basename(f).replace('.bbappend', '.bb')
  1253. self.bbappends.append((base, f))
  1254. if not base in self.appendlist:
  1255. self.appendlist[base] = []
  1256. if f not in self.appendlist[base]:
  1257. self.appendlist[base].append(f)
  1258. # Find overlayed recipes
  1259. # bbfiles will be in priority order which makes this easy
  1260. bbfile_seen = dict()
  1261. self.overlayed = defaultdict(list)
  1262. for f in reversed(bbfiles):
  1263. base = os.path.basename(f)
  1264. if base not in bbfile_seen:
  1265. bbfile_seen[base] = f
  1266. else:
  1267. topfile = bbfile_seen[base]
  1268. self.overlayed[topfile].append(f)
  1269. return (bbfiles, masked)
  1270. def get_file_appends(self, fn):
  1271. """
  1272. Returns a list of .bbappend files to apply to fn
  1273. """
  1274. filelist = []
  1275. f = os.path.basename(fn)
  1276. for b in self.bbappends:
  1277. (bbappend, filename) = b
  1278. if (bbappend == f) or ('%' in bbappend and bbappend.startswith(f[:bbappend.index('%')])):
  1279. self.appliedappendlist.append(bbappend)
  1280. filelist.append(filename)
  1281. return filelist
  1282. def collection_priorities(self, pkgfns):
  1283. priorities = {}
  1284. # Calculate priorities for each file
  1285. matched = set()
  1286. for p in pkgfns:
  1287. realfn, cls = bb.cache.Cache.virtualfn2realfn(p)
  1288. priorities[p] = self.calc_bbfile_priority(realfn, matched)
  1289. # Don't show the warning if the BBFILE_PATTERN did match .bbappend files
  1290. unmatched = set()
  1291. for _, _, regex, pri in self.bbfile_config_priorities:
  1292. if not regex in matched:
  1293. unmatched.add(regex)
  1294. def findmatch(regex):
  1295. for b in self.bbappends:
  1296. (bbfile, append) = b
  1297. if regex.match(append):
  1298. return True
  1299. return False
  1300. for unmatch in unmatched.copy():
  1301. if findmatch(unmatch):
  1302. unmatched.remove(unmatch)
  1303. for collection, pattern, regex, _ in self.bbfile_config_priorities:
  1304. if regex in unmatched:
  1305. collectlog.warn("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern))
  1306. return priorities
  1307. class ParsingFailure(Exception):
  1308. def __init__(self, realexception, recipe):
  1309. self.realexception = realexception
  1310. self.recipe = recipe
  1311. Exception.__init__(self, realexception, recipe)
  1312. class Feeder(multiprocessing.Process):
  1313. def __init__(self, jobs, to_parsers, quit):
  1314. self.quit = quit
  1315. self.jobs = jobs
  1316. self.to_parsers = to_parsers
  1317. multiprocessing.Process.__init__(self)
  1318. def run(self):
  1319. while True:
  1320. try:
  1321. quit = self.quit.get_nowait()
  1322. except Queue.Empty:
  1323. pass
  1324. else:
  1325. if quit == 'cancel':
  1326. self.to_parsers.cancel_join_thread()
  1327. break
  1328. try:
  1329. job = self.jobs.pop()
  1330. except IndexError:
  1331. break
  1332. try:
  1333. self.to_parsers.put(job, timeout=0.5)
  1334. except Queue.Full:
  1335. self.jobs.insert(0, job)
  1336. continue
  1337. class Parser(multiprocessing.Process):
  1338. def __init__(self, jobs, results, quit, init, profile):
  1339. self.jobs = jobs
  1340. self.results = results
  1341. self.quit = quit
  1342. self.init = init
  1343. multiprocessing.Process.__init__(self)
  1344. self.context = bb.utils.get_context().copy()
  1345. self.handlers = bb.event.get_class_handlers().copy()
  1346. self.profile = profile
  1347. def run(self):
  1348. if not self.profile:
  1349. self.realrun()
  1350. return
  1351. try:
  1352. import cProfile as profile
  1353. except:
  1354. import profile
  1355. prof = profile.Profile()
  1356. try:
  1357. profile.Profile.runcall(prof, self.realrun)
  1358. finally:
  1359. logfile = "profile-parse-%s.log" % multiprocessing.current_process().name
  1360. prof.dump_stats(logfile)
  1361. bb.utils.process_profilelog(logfile)
  1362. print("Raw profiling information saved to %s and processed statistics to %s.processed" % (logfile, logfile))
  1363. def realrun(self):
  1364. if self.init:
  1365. self.init()
  1366. pending = []
  1367. while True:
  1368. try:
  1369. self.quit.get_nowait()
  1370. except Queue.Empty:
  1371. pass
  1372. else:
  1373. self.results.cancel_join_thread()
  1374. break
  1375. if pending:
  1376. result = pending.pop()
  1377. else:
  1378. try:
  1379. job = self.jobs.get(timeout=0.25)
  1380. except Queue.Empty:
  1381. continue
  1382. if job is None:
  1383. break
  1384. result = self.parse(*job)
  1385. try:
  1386. self.results.put(result, timeout=0.25)
  1387. except Queue.Full:
  1388. pending.append(result)
  1389. def parse(self, filename, appends, caches_array):
  1390. try:
  1391. # Reset our environment and handlers to the original settings
  1392. bb.utils.set_context(self.context.copy())
  1393. bb.event.set_class_handlers(self.handlers.copy())
  1394. return True, bb.cache.Cache.parse(filename, appends, self.cfg, caches_array)
  1395. except Exception as exc:
  1396. tb = sys.exc_info()[2]
  1397. exc.recipe = filename
  1398. exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3))
  1399. return True, exc
  1400. # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown
  1401. # and for example a worker thread doesn't just exit on its own in response to
  1402. # a SystemExit event for example.
  1403. except BaseException as exc:
  1404. return True, ParsingFailure(exc, filename)
  1405. class CookerParser(object):
  1406. def __init__(self, cooker, filelist, masked):
  1407. self.filelist = filelist
  1408. self.cooker = cooker
  1409. self.cfgdata = cooker.data
  1410. self.cfghash = cooker.data_hash
  1411. # Accounting statistics
  1412. self.parsed = 0
  1413. self.cached = 0
  1414. self.error = 0
  1415. self.masked = masked
  1416. self.skipped = 0
  1417. self.virtuals = 0
  1418. self.total = len(filelist)
  1419. self.current = 0
  1420. self.num_processes = int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS", True) or
  1421. multiprocessing.cpu_count())
  1422. self.bb_cache = bb.cache.Cache(self.cfgdata, self.cfghash, cooker.caches_array)
  1423. self.fromcache = []
  1424. self.willparse = []
  1425. for filename in self.filelist:
  1426. appends = self.cooker.collection.get_file_appends(filename)
  1427. if not self.bb_cache.cacheValid(filename, appends):
  1428. self.willparse.append((filename, appends, cooker.caches_array))
  1429. else:
  1430. self.fromcache.append((filename, appends))
  1431. self.toparse = self.total - len(self.fromcache)
  1432. self.progress_chunk = max(self.toparse / 100, 1)
  1433. self.start()
  1434. self.haveshutdown = False
  1435. def start(self):
  1436. self.results = self.load_cached()
  1437. self.processes = []
  1438. if self.toparse:
  1439. bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata)
  1440. def init():
  1441. Parser.cfg = self.cfgdata
  1442. multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, args=(self.cfgdata,), exitpriority=1)
  1443. multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, args=(self.cfgdata,), exitpriority=1)
  1444. self.feeder_quit = multiprocessing.Queue(maxsize=1)
  1445. self.parser_quit = multiprocessing.Queue(maxsize=self.num_processes)
  1446. self.jobs = multiprocessing.Queue(maxsize=self.num_processes)
  1447. self.result_queue = multiprocessing.Queue()
  1448. self.feeder = Feeder(self.willparse, self.jobs, self.feeder_quit)
  1449. self.feeder.start()
  1450. for i in range(0, self.num_processes):
  1451. parser = Parser(self.jobs, self.result_queue, self.parser_quit, init, self.cooker.configuration.profile)
  1452. parser.start()
  1453. self.processes.append(parser)
  1454. self.results = itertools.chain(self.results, self.parse_generator())
  1455. def shutdown(self, clean=True, force=False):
  1456. if not self.toparse:
  1457. return
  1458. if self.haveshutdown:
  1459. return
  1460. self.haveshutdown = True
  1461. if clean:
  1462. event = bb.event.ParseCompleted(self.cached, self.parsed,
  1463. self.skipped, self.masked,
  1464. self.virtuals, self.error,
  1465. self.total)
  1466. bb.event.fire(event, self.cfgdata)
  1467. self.feeder_quit.put(None)
  1468. for process in self.processes:
  1469. self.jobs.put(None)
  1470. else:
  1471. self.feeder_quit.put('cancel')
  1472. self.parser_quit.cancel_join_thread()
  1473. for process in self.processes:
  1474. self.parser_quit.put(None)
  1475. self.jobs.cancel_join_thread()
  1476. for process in self.processes:
  1477. if force:
  1478. process.join(.1)
  1479. process.terminate()
  1480. else:
  1481. process.join()
  1482. self.feeder.join()
  1483. sync = threading.Thread(target=self.bb_cache.sync)
  1484. sync.start()
  1485. multiprocessing.util.Finalize(None, sync.join, exitpriority=-100)
  1486. bb.codeparser.parser_cache_savemerge(self.cooker.data)
  1487. bb.fetch.fetcher_parse_done(self.cooker.data)
  1488. def load_cached(self):
  1489. for filename, appends in self.fromcache:
  1490. cached, infos = self.bb_cache.load(filename, appends, self.cfgdata)
  1491. yield not cached, infos
  1492. def parse_generator(self):
  1493. while True:
  1494. if self.parsed >= self.toparse:
  1495. break
  1496. try:
  1497. result = self.result_queue.get(timeout=0.25)
  1498. except Queue.Empty:
  1499. pass
  1500. else:
  1501. value = result[1]
  1502. if isinstance(value, BaseException):
  1503. raise value
  1504. else:
  1505. yield result
  1506. def parse_next(self):
  1507. result = []
  1508. parsed = None
  1509. try:
  1510. parsed, result = self.results.next()
  1511. except StopIteration:
  1512. self.shutdown()
  1513. return False
  1514. except bb.BBHandledException as exc:
  1515. self.error += 1
  1516. logger.error('Failed to parse recipe: %s' % exc.recipe)
  1517. self.shutdown(clean=False)
  1518. return False
  1519. except ParsingFailure as exc:
  1520. self.error += 1
  1521. logger.error('Unable to parse %s: %s' %
  1522. (exc.recipe, bb.exceptions.to_string(exc.realexception)))
  1523. self.shutdown(clean=False)
  1524. return False
  1525. except bb.parse.ParseError as exc:
  1526. self.error += 1
  1527. logger.error(str(exc))
  1528. self.shutdown(clean=False)
  1529. return False
  1530. except bb.data_smart.ExpansionError as exc:
  1531. self.error += 1
  1532. _, value, _ = sys.exc_info()
  1533. logger.error('ExpansionError during parsing %s: %s', value.recipe, str(exc))
  1534. self.shutdown(clean=False)
  1535. return False
  1536. except SyntaxError as exc:
  1537. self.error += 1
  1538. logger.error('Unable to parse %s', exc.recipe)
  1539. self.shutdown(clean=False)
  1540. return False
  1541. except Exception as exc:
  1542. self.error += 1
  1543. etype, value, tb = sys.exc_info()
  1544. if hasattr(value, "recipe"):
  1545. logger.error('Unable to parse %s', value.recipe,
  1546. exc_info=(etype, value, exc.traceback))
  1547. else:
  1548. # Most likely, an exception occurred during raising an exception
  1549. import traceback
  1550. logger.error('Exception during parse: %s' % traceback.format_exc())
  1551. self.shutdown(clean=False)
  1552. return False
  1553. self.current += 1
  1554. self.virtuals += len(result)
  1555. if parsed:
  1556. self.parsed += 1
  1557. if self.parsed % self.progress_chunk == 0:
  1558. bb.event.fire(bb.event.ParseProgress(self.parsed, self.toparse),
  1559. self.cfgdata)
  1560. else:
  1561. self.cached += 1
  1562. for virtualfn, info_array in result:
  1563. if info_array[0].skipped:
  1564. self.skipped += 1
  1565. self.cooker.skiplist[virtualfn] = SkippedPackage(info_array[0])
  1566. self.bb_cache.add_info(virtualfn, info_array, self.cooker.recipecache,
  1567. parsed=parsed)
  1568. return True
  1569. def reparse(self, filename):
  1570. infos = self.bb_cache.parse(filename,
  1571. self.cooker.collection.get_file_appends(filename),
  1572. self.cfgdata, self.cooker.caches_array)
  1573. for vfn, info_array in infos:
  1574. self.cooker.recipecache.add_from_recipeinfo(vfn, info_array)