cooker.py 90 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273
  1. #
  2. # Copyright (C) 2003, 2004 Chris Larson
  3. # Copyright (C) 2003, 2004 Phil Blundell
  4. # Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
  5. # Copyright (C) 2005 Holger Hans Peter Freyther
  6. # Copyright (C) 2005 ROAD GmbH
  7. # Copyright (C) 2006 - 2007 Richard Purdie
  8. #
  9. # SPDX-License-Identifier: GPL-2.0-only
  10. #
  11. import sys, os, glob, os.path, re, time
  12. import itertools
  13. import logging
  14. import multiprocessing
  15. import sre_constants
  16. import threading
  17. from io import StringIO, UnsupportedOperation
  18. from contextlib import closing
  19. from collections import defaultdict, namedtuple
  20. import bb, bb.exceptions, bb.command
  21. from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build
  22. import queue
  23. import signal
  24. import prserv.serv
  25. import pyinotify
  26. import json
  27. import pickle
  28. import codecs
  29. import hashserv
  30. logger = logging.getLogger("BitBake")
  31. collectlog = logging.getLogger("BitBake.Collection")
  32. buildlog = logging.getLogger("BitBake.Build")
  33. parselog = logging.getLogger("BitBake.Parsing")
  34. providerlog = logging.getLogger("BitBake.Provider")
  35. class NoSpecificMatch(bb.BBHandledException):
  36. """
  37. Exception raised when no or multiple file matches are found
  38. """
  39. class NothingToBuild(Exception):
  40. """
  41. Exception raised when there is nothing to build
  42. """
  43. class CollectionError(bb.BBHandledException):
  44. """
  45. Exception raised when layer configuration is incorrect
  46. """
  47. class state:
  48. initial, parsing, running, shutdown, forceshutdown, stopped, error = list(range(7))
  49. @classmethod
  50. def get_name(cls, code):
  51. for name in dir(cls):
  52. value = getattr(cls, name)
  53. if type(value) == type(cls.initial) and value == code:
  54. return name
  55. raise ValueError("Invalid status code: %s" % code)
  56. class SkippedPackage:
  57. def __init__(self, info = None, reason = None):
  58. self.pn = None
  59. self.skipreason = None
  60. self.provides = None
  61. self.rprovides = None
  62. if info:
  63. self.pn = info.pn
  64. self.skipreason = info.skipreason
  65. self.provides = info.provides
  66. self.rprovides = info.packages + info.rprovides
  67. for package in info.packages:
  68. self.rprovides += info.rprovides_pkg[package]
  69. elif reason:
  70. self.skipreason = reason
  71. class CookerFeatures(object):
  72. _feature_list = [HOB_EXTRA_CACHES, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS] = list(range(3))
  73. def __init__(self):
  74. self._features=set()
  75. def setFeature(self, f):
  76. # validate we got a request for a feature we support
  77. if f not in CookerFeatures._feature_list:
  78. return
  79. self._features.add(f)
  80. def __contains__(self, f):
  81. return f in self._features
  82. def __iter__(self):
  83. return self._features.__iter__()
  84. def __next__(self):
  85. return next(self._features)
  86. class EventWriter:
  87. def __init__(self, cooker, eventfile):
  88. self.file_inited = None
  89. self.cooker = cooker
  90. self.eventfile = eventfile
  91. self.event_queue = []
  92. def write_event(self, event):
  93. with open(self.eventfile, "a") as f:
  94. try:
  95. str_event = codecs.encode(pickle.dumps(event), 'base64').decode('utf-8')
  96. f.write("%s\n" % json.dumps({"class": event.__module__ + "." + event.__class__.__name__,
  97. "vars": str_event}))
  98. except Exception as err:
  99. import traceback
  100. print(err, traceback.format_exc())
  101. def send(self, event):
  102. if self.file_inited:
  103. # we have the file, just write the event
  104. self.write_event(event)
  105. else:
  106. # init on bb.event.BuildStarted
  107. name = "%s.%s" % (event.__module__, event.__class__.__name__)
  108. if name in ("bb.event.BuildStarted", "bb.cooker.CookerExit"):
  109. with open(self.eventfile, "w") as f:
  110. f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
  111. self.file_inited = True
  112. # write pending events
  113. for evt in self.event_queue:
  114. self.write_event(evt)
  115. # also write the current event
  116. self.write_event(event)
  117. else:
  118. # queue all events until the file is inited
  119. self.event_queue.append(event)
  120. #============================================================================#
  121. # BBCooker
  122. #============================================================================#
  123. class BBCooker:
  124. """
  125. Manages one bitbake build run
  126. """
  127. def __init__(self, featureSet=None, idleCallBackRegister=None):
  128. self.recipecaches = None
  129. self.eventlog = None
  130. self.skiplist = {}
  131. self.featureset = CookerFeatures()
  132. if featureSet:
  133. for f in featureSet:
  134. self.featureset.setFeature(f)
  135. self.configuration = bb.cookerdata.CookerConfiguration()
  136. self.idleCallBackRegister = idleCallBackRegister
  137. bb.debug(1, "BBCooker starting %s" % time.time())
  138. sys.stdout.flush()
  139. self.configwatcher = pyinotify.WatchManager()
  140. bb.debug(1, "BBCooker pyinotify1 %s" % time.time())
  141. sys.stdout.flush()
  142. self.configwatcher.bbseen = set()
  143. self.configwatcher.bbwatchedfiles = set()
  144. self.confignotifier = pyinotify.Notifier(self.configwatcher, self.config_notifications)
  145. bb.debug(1, "BBCooker pyinotify2 %s" % time.time())
  146. sys.stdout.flush()
  147. self.watchmask = pyinotify.IN_CLOSE_WRITE | pyinotify.IN_CREATE | pyinotify.IN_DELETE | \
  148. pyinotify.IN_DELETE_SELF | pyinotify.IN_MODIFY | pyinotify.IN_MOVE_SELF | \
  149. pyinotify.IN_MOVED_FROM | pyinotify.IN_MOVED_TO
  150. self.watcher = pyinotify.WatchManager()
  151. bb.debug(1, "BBCooker pyinotify3 %s" % time.time())
  152. sys.stdout.flush()
  153. self.watcher.bbseen = set()
  154. self.watcher.bbwatchedfiles = set()
  155. self.notifier = pyinotify.Notifier(self.watcher, self.notifications)
  156. bb.debug(1, "BBCooker pyinotify complete %s" % time.time())
  157. sys.stdout.flush()
  158. # If being called by something like tinfoil, we need to clean cached data
  159. # which may now be invalid
  160. bb.parse.clear_cache()
  161. bb.parse.BBHandler.cached_statements = {}
  162. self.ui_cmdline = None
  163. self.hashserv = None
  164. self.hashservaddr = None
  165. self.inotify_modified_files = []
  166. def _process_inotify_updates(server, cooker, abort):
  167. cooker.process_inotify_updates()
  168. return 1.0
  169. self.idleCallBackRegister(_process_inotify_updates, self)
  170. # TOSTOP must not be set or our children will hang when they output
  171. try:
  172. fd = sys.stdout.fileno()
  173. if os.isatty(fd):
  174. import termios
  175. tcattr = termios.tcgetattr(fd)
  176. if tcattr[3] & termios.TOSTOP:
  177. buildlog.info("The terminal had the TOSTOP bit set, clearing...")
  178. tcattr[3] = tcattr[3] & ~termios.TOSTOP
  179. termios.tcsetattr(fd, termios.TCSANOW, tcattr)
  180. except UnsupportedOperation:
  181. pass
  182. self.command = bb.command.Command(self)
  183. self.state = state.initial
  184. self.parser = None
  185. signal.signal(signal.SIGTERM, self.sigterm_exception)
  186. # Let SIGHUP exit as SIGTERM
  187. signal.signal(signal.SIGHUP, self.sigterm_exception)
  188. bb.debug(1, "BBCooker startup complete %s" % time.time())
  189. sys.stdout.flush()
  190. def init_configdata(self):
  191. if not hasattr(self, "data"):
  192. self.initConfigurationData()
  193. bb.debug(1, "BBCooker parsed base configuration %s" % time.time())
  194. sys.stdout.flush()
  195. self.handlePRServ()
  196. def process_inotify_updates(self):
  197. for n in [self.confignotifier, self.notifier]:
  198. if n.check_events(timeout=0):
  199. # read notified events and enqeue them
  200. n.read_events()
  201. n.process_events()
  202. def config_notifications(self, event):
  203. if event.maskname == "IN_Q_OVERFLOW":
  204. bb.warn("inotify event queue overflowed, invalidating caches.")
  205. self.parsecache_valid = False
  206. self.baseconfig_valid = False
  207. bb.parse.clear_cache()
  208. return
  209. if not event.pathname in self.configwatcher.bbwatchedfiles:
  210. return
  211. if not event.pathname in self.inotify_modified_files:
  212. self.inotify_modified_files.append(event.pathname)
  213. self.baseconfig_valid = False
  214. def notifications(self, event):
  215. if event.maskname == "IN_Q_OVERFLOW":
  216. bb.warn("inotify event queue overflowed, invalidating caches.")
  217. self.parsecache_valid = False
  218. bb.parse.clear_cache()
  219. return
  220. if event.pathname.endswith("bitbake-cookerdaemon.log") \
  221. or event.pathname.endswith("bitbake.lock"):
  222. return
  223. if not event.pathname in self.inotify_modified_files:
  224. self.inotify_modified_files.append(event.pathname)
  225. self.parsecache_valid = False
  226. def add_filewatch(self, deps, watcher=None, dirs=False):
  227. if not watcher:
  228. watcher = self.watcher
  229. for i in deps:
  230. watcher.bbwatchedfiles.add(i[0])
  231. if dirs:
  232. f = i[0]
  233. else:
  234. f = os.path.dirname(i[0])
  235. if f in watcher.bbseen:
  236. continue
  237. watcher.bbseen.add(f)
  238. watchtarget = None
  239. while True:
  240. # We try and add watches for files that don't exist but if they did, would influence
  241. # the parser. The parent directory of these files may not exist, in which case we need
  242. # to watch any parent that does exist for changes.
  243. try:
  244. watcher.add_watch(f, self.watchmask, quiet=False)
  245. if watchtarget:
  246. watcher.bbwatchedfiles.add(watchtarget)
  247. break
  248. except pyinotify.WatchManagerError as e:
  249. if 'ENOENT' in str(e):
  250. watchtarget = f
  251. f = os.path.dirname(f)
  252. if f in watcher.bbseen:
  253. break
  254. watcher.bbseen.add(f)
  255. continue
  256. if 'ENOSPC' in str(e):
  257. providerlog.error("No space left on device or exceeds fs.inotify.max_user_watches?")
  258. providerlog.error("To check max_user_watches: sysctl -n fs.inotify.max_user_watches.")
  259. providerlog.error("To modify max_user_watches: sysctl -n -w fs.inotify.max_user_watches=<value>.")
  260. providerlog.error("Root privilege is required to modify max_user_watches.")
  261. raise
  262. def sigterm_exception(self, signum, stackframe):
  263. if signum == signal.SIGTERM:
  264. bb.warn("Cooker received SIGTERM, shutting down...")
  265. elif signum == signal.SIGHUP:
  266. bb.warn("Cooker received SIGHUP, shutting down...")
  267. self.state = state.forceshutdown
  268. def setFeatures(self, features):
  269. # we only accept a new feature set if we're in state initial, so we can reset without problems
  270. if not self.state in [state.initial, state.shutdown, state.forceshutdown, state.stopped, state.error]:
  271. raise Exception("Illegal state for feature set change")
  272. original_featureset = list(self.featureset)
  273. for feature in features:
  274. self.featureset.setFeature(feature)
  275. bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset)))
  276. if (original_featureset != list(self.featureset)) and self.state != state.error and hasattr(self, "data"):
  277. self.reset()
  278. def initConfigurationData(self):
  279. self.state = state.initial
  280. self.caches_array = []
  281. # Need to preserve BB_CONSOLELOG over resets
  282. consolelog = None
  283. if hasattr(self, "data"):
  284. consolelog = self.data.getVar("BB_CONSOLELOG")
  285. if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
  286. self.enableDataTracking()
  287. all_extra_cache_names = []
  288. # We hardcode all known cache types in a single place, here.
  289. if CookerFeatures.HOB_EXTRA_CACHES in self.featureset:
  290. all_extra_cache_names.append("bb.cache_extra:HobRecipeInfo")
  291. caches_name_array = ['bb.cache:CoreRecipeInfo'] + all_extra_cache_names
  292. # At least CoreRecipeInfo will be loaded, so caches_array will never be empty!
  293. # This is the entry point, no further check needed!
  294. for var in caches_name_array:
  295. try:
  296. module_name, cache_name = var.split(':')
  297. module = __import__(module_name, fromlist=(cache_name,))
  298. self.caches_array.append(getattr(module, cache_name))
  299. except ImportError as exc:
  300. logger.critical("Unable to import extra RecipeInfo '%s' from '%s': %s" % (cache_name, module_name, exc))
  301. raise bb.BBHandledException()
  302. self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False)
  303. self.databuilder.parseBaseConfiguration()
  304. self.data = self.databuilder.data
  305. self.data_hash = self.databuilder.data_hash
  306. self.extraconfigdata = {}
  307. if consolelog:
  308. self.data.setVar("BB_CONSOLELOG", consolelog)
  309. self.data.setVar('BB_CMDLINE', self.ui_cmdline)
  310. if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
  311. self.disableDataTracking()
  312. for mc in self.databuilder.mcdata.values():
  313. mc.renameVar("__depends", "__base_depends")
  314. self.add_filewatch(mc.getVar("__base_depends", False), self.configwatcher)
  315. self.baseconfig_valid = True
  316. self.parsecache_valid = False
  317. def handlePRServ(self):
  318. # Setup a PR Server based on the new configuration
  319. try:
  320. self.prhost = prserv.serv.auto_start(self.data)
  321. except prserv.serv.PRServiceConfigError as e:
  322. bb.fatal("Unable to start PR Server, exitting")
  323. if self.data.getVar("BB_HASHSERVE") == "auto":
  324. # Create a new hash server bound to a unix domain socket
  325. if not self.hashserv:
  326. dbfile = (self.data.getVar("PERSISTENT_DIR") or self.data.getVar("CACHE")) + "/hashserv.db"
  327. self.hashservaddr = "unix://%s/hashserve.sock" % self.data.getVar("TOPDIR")
  328. self.hashserv = hashserv.create_server(self.hashservaddr, dbfile, sync=False)
  329. self.hashserv.process = multiprocessing.Process(target=self.hashserv.serve_forever)
  330. self.hashserv.process.start()
  331. self.data.setVar("BB_HASHSERVE", self.hashservaddr)
  332. self.databuilder.origdata.setVar("BB_HASHSERVE", self.hashservaddr)
  333. self.databuilder.data.setVar("BB_HASHSERVE", self.hashservaddr)
  334. for mc in self.databuilder.mcdata:
  335. self.databuilder.mcdata[mc].setVar("BB_HASHSERVE", self.hashservaddr)
  336. bb.parse.init_parser(self.data)
  337. def enableDataTracking(self):
  338. self.configuration.tracking = True
  339. if hasattr(self, "data"):
  340. self.data.enableTracking()
  341. def disableDataTracking(self):
  342. self.configuration.tracking = False
  343. if hasattr(self, "data"):
  344. self.data.disableTracking()
  345. def parseConfiguration(self):
  346. # Change nice level if we're asked to
  347. nice = self.data.getVar("BB_NICE_LEVEL")
  348. if nice:
  349. curnice = os.nice(0)
  350. nice = int(nice) - curnice
  351. buildlog.verbose("Renice to %s " % os.nice(nice))
  352. if self.recipecaches:
  353. del self.recipecaches
  354. self.multiconfigs = self.databuilder.mcdata.keys()
  355. self.recipecaches = {}
  356. for mc in self.multiconfigs:
  357. self.recipecaches[mc] = bb.cache.CacheData(self.caches_array)
  358. self.handleCollections(self.data.getVar("BBFILE_COLLECTIONS"))
  359. self.parsecache_valid = False
  360. def updateConfigOpts(self, options, environment, cmdline):
  361. self.ui_cmdline = cmdline
  362. clean = True
  363. for o in options:
  364. if o in ['prefile', 'postfile']:
  365. # Only these options may require a reparse
  366. try:
  367. if getattr(self.configuration, o) == options[o]:
  368. # Value is the same, no need to mark dirty
  369. continue
  370. except AttributeError:
  371. pass
  372. logger.debug(1, "Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
  373. print("Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
  374. clean = False
  375. if hasattr(self.configuration, o):
  376. setattr(self.configuration, o, options[o])
  377. if self.configuration.writeeventlog:
  378. if self.eventlog and self.eventlog[0] != self.configuration.writeeventlog:
  379. bb.event.unregister_UIHhandler(self.eventlog[1])
  380. if not self.eventlog or self.eventlog[0] != self.configuration.writeeventlog:
  381. # we log all events to a file if so directed
  382. # register the log file writer as UI Handler
  383. writer = EventWriter(self, self.configuration.writeeventlog)
  384. EventLogWriteHandler = namedtuple('EventLogWriteHandler', ['event'])
  385. self.eventlog = (self.configuration.writeeventlog, bb.event.register_UIHhandler(EventLogWriteHandler(writer)))
  386. bb.msg.loggerDefaultLogLevel = self.configuration.default_loglevel
  387. bb.msg.loggerDefaultDomains = self.configuration.debug_domains
  388. if hasattr(self, "data"):
  389. origenv = bb.data.init()
  390. for k in environment:
  391. origenv.setVar(k, environment[k])
  392. self.data.setVar("BB_ORIGENV", origenv)
  393. for k in bb.utils.approved_variables():
  394. if k in environment and k not in self.configuration.env:
  395. logger.debug(1, "Updating new environment variable %s to %s" % (k, environment[k]))
  396. self.configuration.env[k] = environment[k]
  397. clean = False
  398. if k in self.configuration.env and k not in environment:
  399. logger.debug(1, "Updating environment variable %s (deleted)" % (k))
  400. del self.configuration.env[k]
  401. clean = False
  402. if k not in self.configuration.env and k not in environment:
  403. continue
  404. if environment[k] != self.configuration.env[k]:
  405. logger.debug(1, "Updating environment variable %s from %s to %s" % (k, self.configuration.env[k], environment[k]))
  406. self.configuration.env[k] = environment[k]
  407. clean = False
  408. # Now update all the variables not in the datastore to match
  409. self.configuration.env = environment
  410. if not clean:
  411. logger.debug(1, "Base environment change, triggering reparse")
  412. self.reset()
  413. def runCommands(self, server, data, abort):
  414. """
  415. Run any queued asynchronous command
  416. This is done by the idle handler so it runs in true context rather than
  417. tied to any UI.
  418. """
  419. return self.command.runAsyncCommand()
  420. def showVersions(self):
  421. (latest_versions, preferred_versions) = self.findProviders()
  422. logger.plain("%-35s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version")
  423. logger.plain("%-35s %25s %25s\n", "===========", "==============", "=================")
  424. for p in sorted(self.recipecaches[''].pkg_pn):
  425. pref = preferred_versions[p]
  426. latest = latest_versions[p]
  427. prefstr = pref[0][0] + ":" + pref[0][1] + '-' + pref[0][2]
  428. lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2]
  429. if pref == latest:
  430. prefstr = ""
  431. logger.plain("%-35s %25s %25s", p, lateststr, prefstr)
  432. def showEnvironment(self, buildfile=None, pkgs_to_build=None):
  433. """
  434. Show the outer or per-recipe environment
  435. """
  436. fn = None
  437. envdata = None
  438. mc = ''
  439. if not pkgs_to_build:
  440. pkgs_to_build = []
  441. orig_tracking = self.configuration.tracking
  442. if not orig_tracking:
  443. self.enableDataTracking()
  444. self.reset()
  445. def mc_base(p):
  446. if p.startswith('mc:'):
  447. s = p.split(':')
  448. if len(s) == 2:
  449. return s[1]
  450. return None
  451. if buildfile:
  452. # Parse the configuration here. We need to do it explicitly here since
  453. # this showEnvironment() code path doesn't use the cache
  454. self.parseConfiguration()
  455. fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
  456. fn = self.matchFile(fn, mc)
  457. fn = bb.cache.realfn2virtual(fn, cls, mc)
  458. elif len(pkgs_to_build) == 1:
  459. mc = mc_base(pkgs_to_build[0])
  460. if not mc:
  461. ignore = self.data.getVar("ASSUME_PROVIDED") or ""
  462. if pkgs_to_build[0] in set(ignore.split()):
  463. bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0])
  464. taskdata, runlist = self.buildTaskData(pkgs_to_build, None, self.configuration.abort, allowincomplete=True)
  465. mc = runlist[0][0]
  466. fn = runlist[0][3]
  467. if fn:
  468. try:
  469. bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.data_hash, self.caches_array)
  470. envdata = bb_caches[mc].loadDataFull(fn, self.collections[mc].get_file_appends(fn))
  471. except Exception as e:
  472. parselog.exception("Unable to read %s", fn)
  473. raise
  474. else:
  475. if not mc in self.databuilder.mcdata:
  476. bb.fatal('Not multiconfig named "%s" found' % mc)
  477. envdata = self.databuilder.mcdata[mc]
  478. data.expandKeys(envdata)
  479. parse.ast.runAnonFuncs(envdata)
  480. # Display history
  481. with closing(StringIO()) as env:
  482. self.data.inchistory.emit(env)
  483. logger.plain(env.getvalue())
  484. # emit variables and shell functions
  485. with closing(StringIO()) as env:
  486. data.emit_env(env, envdata, True)
  487. logger.plain(env.getvalue())
  488. # emit the metadata which isnt valid shell
  489. for e in sorted(envdata.keys()):
  490. if envdata.getVarFlag(e, 'func', False) and envdata.getVarFlag(e, 'python', False):
  491. logger.plain("\npython %s () {\n%s}\n", e, envdata.getVar(e, False))
  492. if not orig_tracking:
  493. self.disableDataTracking()
  494. self.reset()
  495. def buildTaskData(self, pkgs_to_build, task, abort, allowincomplete=False):
  496. """
  497. Prepare a runqueue and taskdata object for iteration over pkgs_to_build
  498. """
  499. bb.event.fire(bb.event.TreeDataPreparationStarted(), self.data)
  500. # A task of None means use the default task
  501. if task is None:
  502. task = self.configuration.cmd
  503. if not task.startswith("do_"):
  504. task = "do_%s" % task
  505. targetlist = self.checkPackages(pkgs_to_build, task)
  506. fulltargetlist = []
  507. defaulttask_implicit = ''
  508. defaulttask_explicit = False
  509. wildcard = False
  510. # Wild card expansion:
  511. # Replace string such as "mc:*:bash"
  512. # into "mc:A:bash mc:B:bash bash"
  513. for k in targetlist:
  514. if k.startswith("mc:"):
  515. if wildcard:
  516. bb.fatal('multiconfig conflict')
  517. if k.split(":")[1] == "*":
  518. wildcard = True
  519. for mc in self.multiconfigs:
  520. if mc:
  521. fulltargetlist.append(k.replace('*', mc))
  522. # implicit default task
  523. else:
  524. defaulttask_implicit = k.split(":")[2]
  525. else:
  526. fulltargetlist.append(k)
  527. else:
  528. defaulttask_explicit = True
  529. fulltargetlist.append(k)
  530. if not defaulttask_explicit and defaulttask_implicit != '':
  531. fulltargetlist.append(defaulttask_implicit)
  532. bb.debug(1,"Target list: %s" % (str(fulltargetlist)))
  533. taskdata = {}
  534. localdata = {}
  535. for mc in self.multiconfigs:
  536. taskdata[mc] = bb.taskdata.TaskData(abort, skiplist=self.skiplist, allowincomplete=allowincomplete)
  537. localdata[mc] = data.createCopy(self.databuilder.mcdata[mc])
  538. bb.data.expandKeys(localdata[mc])
  539. current = 0
  540. runlist = []
  541. for k in fulltargetlist:
  542. origk = k
  543. mc = ""
  544. if k.startswith("mc:"):
  545. mc = k.split(":")[1]
  546. k = ":".join(k.split(":")[2:])
  547. ktask = task
  548. if ":do_" in k:
  549. k2 = k.split(":do_")
  550. k = k2[0]
  551. ktask = k2[1]
  552. if mc not in self.multiconfigs:
  553. bb.fatal("Multiconfig dependency %s depends on nonexistent multiconfig configuration named %s" % (origk, mc))
  554. taskdata[mc].add_provider(localdata[mc], self.recipecaches[mc], k)
  555. current += 1
  556. if not ktask.startswith("do_"):
  557. ktask = "do_%s" % ktask
  558. if k not in taskdata[mc].build_targets or not taskdata[mc].build_targets[k]:
  559. # e.g. in ASSUME_PROVIDED
  560. continue
  561. fn = taskdata[mc].build_targets[k][0]
  562. runlist.append([mc, k, ktask, fn])
  563. bb.event.fire(bb.event.TreeDataPreparationProgress(current, len(fulltargetlist)), self.data)
  564. havemc = False
  565. for mc in self.multiconfigs:
  566. if taskdata[mc].get_mcdepends():
  567. havemc = True
  568. # No need to do check providers if there are no mcdeps or not an mc build
  569. if havemc or len(self.multiconfigs) > 1:
  570. seen = set()
  571. new = True
  572. # Make sure we can provide the multiconfig dependency
  573. while new:
  574. mcdeps = set()
  575. # Add unresolved first, so we can get multiconfig indirect dependencies on time
  576. for mc in self.multiconfigs:
  577. taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
  578. mcdeps |= set(taskdata[mc].get_mcdepends())
  579. new = False
  580. for mc in self.multiconfigs:
  581. for k in mcdeps:
  582. if k in seen:
  583. continue
  584. l = k.split(':')
  585. depmc = l[2]
  586. if depmc not in self.multiconfigs:
  587. bb.fatal("Multiconfig dependency %s depends on nonexistent multiconfig configuration named configuration %s" % (k,depmc))
  588. else:
  589. logger.debug(1, "Adding providers for multiconfig dependency %s" % l[3])
  590. taskdata[depmc].add_provider(localdata[depmc], self.recipecaches[depmc], l[3])
  591. seen.add(k)
  592. new = True
  593. for mc in self.multiconfigs:
  594. taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
  595. bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data)
  596. return taskdata, runlist
  597. def prepareTreeData(self, pkgs_to_build, task):
  598. """
  599. Prepare a runqueue and taskdata object for iteration over pkgs_to_build
  600. """
  601. # We set abort to False here to prevent unbuildable targets raising
  602. # an exception when we're just generating data
  603. taskdata, runlist = self.buildTaskData(pkgs_to_build, task, False, allowincomplete=True)
  604. return runlist, taskdata
  605. ######## WARNING : this function requires cache_extra to be enabled ########
  606. def generateTaskDepTreeData(self, pkgs_to_build, task):
  607. """
  608. Create a dependency graph of pkgs_to_build including reverse dependency
  609. information.
  610. """
  611. if not task.startswith("do_"):
  612. task = "do_%s" % task
  613. runlist, taskdata = self.prepareTreeData(pkgs_to_build, task)
  614. rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
  615. rq.rqdata.prepare()
  616. return self.buildDependTree(rq, taskdata)
  617. @staticmethod
  618. def add_mc_prefix(mc, pn):
  619. if mc:
  620. return "mc:%s:%s" % (mc, pn)
  621. return pn
  622. def buildDependTree(self, rq, taskdata):
  623. seen_fns = []
  624. depend_tree = {}
  625. depend_tree["depends"] = {}
  626. depend_tree["tdepends"] = {}
  627. depend_tree["pn"] = {}
  628. depend_tree["rdepends-pn"] = {}
  629. depend_tree["packages"] = {}
  630. depend_tree["rdepends-pkg"] = {}
  631. depend_tree["rrecs-pkg"] = {}
  632. depend_tree['providermap'] = {}
  633. depend_tree["layer-priorities"] = self.bbfile_config_priorities
  634. for mc in taskdata:
  635. for name, fn in list(taskdata[mc].get_providermap().items()):
  636. pn = self.recipecaches[mc].pkg_fn[fn]
  637. pn = self.add_mc_prefix(mc, pn)
  638. if name != pn:
  639. version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[fn]
  640. depend_tree['providermap'][name] = (pn, version)
  641. for tid in rq.rqdata.runtaskentries:
  642. (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
  643. pn = self.recipecaches[mc].pkg_fn[taskfn]
  644. pn = self.add_mc_prefix(mc, pn)
  645. version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
  646. if pn not in depend_tree["pn"]:
  647. depend_tree["pn"][pn] = {}
  648. depend_tree["pn"][pn]["filename"] = taskfn
  649. depend_tree["pn"][pn]["version"] = version
  650. depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
  651. # if we have extra caches, list all attributes they bring in
  652. extra_info = []
  653. for cache_class in self.caches_array:
  654. if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
  655. cachefields = getattr(cache_class, 'cachefields', [])
  656. extra_info = extra_info + cachefields
  657. # for all attributes stored, add them to the dependency tree
  658. for ei in extra_info:
  659. depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
  660. dotname = "%s.%s" % (pn, bb.runqueue.taskname_from_tid(tid))
  661. if not dotname in depend_tree["tdepends"]:
  662. depend_tree["tdepends"][dotname] = []
  663. for dep in rq.rqdata.runtaskentries[tid].depends:
  664. (depmc, depfn, _, deptaskfn) = bb.runqueue.split_tid_mcfn(dep)
  665. deppn = self.recipecaches[depmc].pkg_fn[deptaskfn]
  666. depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, bb.runqueue.taskname_from_tid(dep)))
  667. if taskfn not in seen_fns:
  668. seen_fns.append(taskfn)
  669. packages = []
  670. depend_tree["depends"][pn] = []
  671. for dep in taskdata[mc].depids[taskfn]:
  672. depend_tree["depends"][pn].append(dep)
  673. depend_tree["rdepends-pn"][pn] = []
  674. for rdep in taskdata[mc].rdepids[taskfn]:
  675. depend_tree["rdepends-pn"][pn].append(rdep)
  676. rdepends = self.recipecaches[mc].rundeps[taskfn]
  677. for package in rdepends:
  678. depend_tree["rdepends-pkg"][package] = []
  679. for rdepend in rdepends[package]:
  680. depend_tree["rdepends-pkg"][package].append(rdepend)
  681. packages.append(package)
  682. rrecs = self.recipecaches[mc].runrecs[taskfn]
  683. for package in rrecs:
  684. depend_tree["rrecs-pkg"][package] = []
  685. for rdepend in rrecs[package]:
  686. depend_tree["rrecs-pkg"][package].append(rdepend)
  687. if not package in packages:
  688. packages.append(package)
  689. for package in packages:
  690. if package not in depend_tree["packages"]:
  691. depend_tree["packages"][package] = {}
  692. depend_tree["packages"][package]["pn"] = pn
  693. depend_tree["packages"][package]["filename"] = taskfn
  694. depend_tree["packages"][package]["version"] = version
  695. return depend_tree
  696. ######## WARNING : this function requires cache_extra to be enabled ########
  697. def generatePkgDepTreeData(self, pkgs_to_build, task):
  698. """
  699. Create a dependency tree of pkgs_to_build, returning the data.
  700. """
  701. if not task.startswith("do_"):
  702. task = "do_%s" % task
  703. _, taskdata = self.prepareTreeData(pkgs_to_build, task)
  704. seen_fns = []
  705. depend_tree = {}
  706. depend_tree["depends"] = {}
  707. depend_tree["pn"] = {}
  708. depend_tree["rdepends-pn"] = {}
  709. depend_tree["rdepends-pkg"] = {}
  710. depend_tree["rrecs-pkg"] = {}
  711. # if we have extra caches, list all attributes they bring in
  712. extra_info = []
  713. for cache_class in self.caches_array:
  714. if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
  715. cachefields = getattr(cache_class, 'cachefields', [])
  716. extra_info = extra_info + cachefields
  717. tids = []
  718. for mc in taskdata:
  719. for tid in taskdata[mc].taskentries:
  720. tids.append(tid)
  721. for tid in tids:
  722. (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
  723. pn = self.recipecaches[mc].pkg_fn[taskfn]
  724. pn = self.add_mc_prefix(mc, pn)
  725. if pn not in depend_tree["pn"]:
  726. depend_tree["pn"][pn] = {}
  727. depend_tree["pn"][pn]["filename"] = taskfn
  728. version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
  729. depend_tree["pn"][pn]["version"] = version
  730. rdepends = self.recipecaches[mc].rundeps[taskfn]
  731. rrecs = self.recipecaches[mc].runrecs[taskfn]
  732. depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
  733. # for all extra attributes stored, add them to the dependency tree
  734. for ei in extra_info:
  735. depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
  736. if taskfn not in seen_fns:
  737. seen_fns.append(taskfn)
  738. depend_tree["depends"][pn] = []
  739. for dep in taskdata[mc].depids[taskfn]:
  740. pn_provider = ""
  741. if dep in taskdata[mc].build_targets and taskdata[mc].build_targets[dep]:
  742. fn_provider = taskdata[mc].build_targets[dep][0]
  743. pn_provider = self.recipecaches[mc].pkg_fn[fn_provider]
  744. else:
  745. pn_provider = dep
  746. pn_provider = self.add_mc_prefix(mc, pn_provider)
  747. depend_tree["depends"][pn].append(pn_provider)
  748. depend_tree["rdepends-pn"][pn] = []
  749. for rdep in taskdata[mc].rdepids[taskfn]:
  750. pn_rprovider = ""
  751. if rdep in taskdata[mc].run_targets and taskdata[mc].run_targets[rdep]:
  752. fn_rprovider = taskdata[mc].run_targets[rdep][0]
  753. pn_rprovider = self.recipecaches[mc].pkg_fn[fn_rprovider]
  754. else:
  755. pn_rprovider = rdep
  756. pn_rprovider = self.add_mc_prefix(mc, pn_rprovider)
  757. depend_tree["rdepends-pn"][pn].append(pn_rprovider)
  758. depend_tree["rdepends-pkg"].update(rdepends)
  759. depend_tree["rrecs-pkg"].update(rrecs)
  760. return depend_tree
  761. def generateDepTreeEvent(self, pkgs_to_build, task):
  762. """
  763. Create a task dependency graph of pkgs_to_build.
  764. Generate an event with the result
  765. """
  766. depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
  767. bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.data)
  768. def generateDotGraphFiles(self, pkgs_to_build, task):
  769. """
  770. Create a task dependency graph of pkgs_to_build.
  771. Save the result to a set of .dot files.
  772. """
  773. depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
  774. with open('pn-buildlist', 'w') as f:
  775. for pn in depgraph["pn"]:
  776. f.write(pn + "\n")
  777. logger.info("PN build list saved to 'pn-buildlist'")
  778. # Remove old format output files to ensure no confusion with stale data
  779. try:
  780. os.unlink('pn-depends.dot')
  781. except FileNotFoundError:
  782. pass
  783. try:
  784. os.unlink('package-depends.dot')
  785. except FileNotFoundError:
  786. pass
  787. try:
  788. os.unlink('recipe-depends.dot')
  789. except FileNotFoundError:
  790. pass
  791. with open('task-depends.dot', 'w') as f:
  792. f.write("digraph depends {\n")
  793. for task in sorted(depgraph["tdepends"]):
  794. (pn, taskname) = task.rsplit(".", 1)
  795. fn = depgraph["pn"][pn]["filename"]
  796. version = depgraph["pn"][pn]["version"]
  797. f.write('"%s.%s" [label="%s %s\\n%s\\n%s"]\n' % (pn, taskname, pn, taskname, version, fn))
  798. for dep in sorted(depgraph["tdepends"][task]):
  799. f.write('"%s" -> "%s"\n' % (task, dep))
  800. f.write("}\n")
  801. logger.info("Task dependencies saved to 'task-depends.dot'")
  802. def show_appends_with_no_recipes(self):
  803. appends_without_recipes = {}
  804. # Determine which bbappends haven't been applied
  805. for mc in self.multiconfigs:
  806. # First get list of recipes, including skipped
  807. recipefns = list(self.recipecaches[mc].pkg_fn.keys())
  808. recipefns.extend(self.skiplist.keys())
  809. # Work out list of bbappends that have been applied
  810. applied_appends = []
  811. for fn in recipefns:
  812. applied_appends.extend(self.collections[mc].get_file_appends(fn))
  813. appends_without_recipes[mc] = []
  814. for _, appendfn in self.collections[mc].bbappends:
  815. if not appendfn in applied_appends:
  816. appends_without_recipes[mc].append(appendfn)
  817. msgs = []
  818. for mc in sorted(appends_without_recipes.keys()):
  819. if appends_without_recipes[mc]:
  820. msgs.append('No recipes in %s available for:\n %s' % (mc if mc else 'default',
  821. '\n '.join(appends_without_recipes[mc])))
  822. if msgs:
  823. msg = "\n".join(msgs)
  824. warn_only = self.databuilder.mcdata[mc].getVar("BB_DANGLINGAPPENDS_WARNONLY", \
  825. False) or "no"
  826. if warn_only.lower() in ("1", "yes", "true"):
  827. bb.warn(msg)
  828. else:
  829. bb.fatal(msg)
  830. def handlePrefProviders(self):
  831. for mc in self.multiconfigs:
  832. localdata = data.createCopy(self.databuilder.mcdata[mc])
  833. bb.data.expandKeys(localdata)
  834. # Handle PREFERRED_PROVIDERS
  835. for p in (localdata.getVar('PREFERRED_PROVIDERS') or "").split():
  836. try:
  837. (providee, provider) = p.split(':')
  838. except:
  839. providerlog.critical("Malformed option in PREFERRED_PROVIDERS variable: %s" % p)
  840. continue
  841. if providee in self.recipecaches[mc].preferred and self.recipecaches[mc].preferred[providee] != provider:
  842. providerlog.error("conflicting preferences for %s: both %s and %s specified", providee, provider, self.recipecaches[mc].preferred[providee])
  843. self.recipecaches[mc].preferred[providee] = provider
  844. def findConfigFilePath(self, configfile):
  845. """
  846. Find the location on disk of configfile and if it exists and was parsed by BitBake
  847. emit the ConfigFilePathFound event with the path to the file.
  848. """
  849. path = bb.cookerdata.findConfigFile(configfile, self.data)
  850. if not path:
  851. return
  852. # Generate a list of parsed configuration files by searching the files
  853. # listed in the __depends and __base_depends variables with a .conf suffix.
  854. conffiles = []
  855. dep_files = self.data.getVar('__base_depends', False) or []
  856. dep_files = dep_files + (self.data.getVar('__depends', False) or [])
  857. for f in dep_files:
  858. if f[0].endswith(".conf"):
  859. conffiles.append(f[0])
  860. _, conf, conffile = path.rpartition("conf/")
  861. match = os.path.join(conf, conffile)
  862. # Try and find matches for conf/conffilename.conf as we don't always
  863. # have the full path to the file.
  864. for cfg in conffiles:
  865. if cfg.endswith(match):
  866. bb.event.fire(bb.event.ConfigFilePathFound(path),
  867. self.data)
  868. break
  869. def findFilesMatchingInDir(self, filepattern, directory):
  870. """
  871. Searches for files containing the substring 'filepattern' which are children of
  872. 'directory' in each BBPATH. i.e. to find all rootfs package classes available
  873. to BitBake one could call findFilesMatchingInDir(self, 'rootfs_', 'classes')
  874. or to find all machine configuration files one could call:
  875. findFilesMatchingInDir(self, '.conf', 'conf/machine')
  876. """
  877. matches = []
  878. bbpaths = self.data.getVar('BBPATH').split(':')
  879. for path in bbpaths:
  880. dirpath = os.path.join(path, directory)
  881. if os.path.exists(dirpath):
  882. for root, dirs, files in os.walk(dirpath):
  883. for f in files:
  884. if filepattern in f:
  885. matches.append(f)
  886. if matches:
  887. bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
  888. def findProviders(self, mc=''):
  889. return bb.providers.findProviders(self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
  890. def findBestProvider(self, pn, mc=''):
  891. if pn in self.recipecaches[mc].providers:
  892. filenames = self.recipecaches[mc].providers[pn]
  893. eligible, foundUnique = bb.providers.filterProviders(filenames, pn, self.databuilder.mcdata[mc], self.recipecaches[mc])
  894. filename = eligible[0]
  895. return None, None, None, filename
  896. elif pn in self.recipecaches[mc].pkg_pn:
  897. return bb.providers.findBestProvider(pn, self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
  898. else:
  899. return None, None, None, None
  900. def findConfigFiles(self, varname):
  901. """
  902. Find config files which are appropriate values for varname.
  903. i.e. MACHINE, DISTRO
  904. """
  905. possible = []
  906. var = varname.lower()
  907. data = self.data
  908. # iterate configs
  909. bbpaths = data.getVar('BBPATH').split(':')
  910. for path in bbpaths:
  911. confpath = os.path.join(path, "conf", var)
  912. if os.path.exists(confpath):
  913. for root, dirs, files in os.walk(confpath):
  914. # get all child files, these are appropriate values
  915. for f in files:
  916. val, sep, end = f.rpartition('.')
  917. if end == 'conf':
  918. possible.append(val)
  919. if possible:
  920. bb.event.fire(bb.event.ConfigFilesFound(var, possible), self.data)
  921. def findInheritsClass(self, klass):
  922. """
  923. Find all recipes which inherit the specified class
  924. """
  925. pkg_list = []
  926. for pfn in self.recipecaches[''].pkg_fn:
  927. inherits = self.recipecaches[''].inherits.get(pfn, None)
  928. if inherits and klass in inherits:
  929. pkg_list.append(self.recipecaches[''].pkg_fn[pfn])
  930. return pkg_list
  931. def generateTargetsTree(self, klass=None, pkgs=None):
  932. """
  933. Generate a dependency tree of buildable targets
  934. Generate an event with the result
  935. """
  936. # if the caller hasn't specified a pkgs list default to universe
  937. if not pkgs:
  938. pkgs = ['universe']
  939. # if inherited_class passed ensure all recipes which inherit the
  940. # specified class are included in pkgs
  941. if klass:
  942. extra_pkgs = self.findInheritsClass(klass)
  943. pkgs = pkgs + extra_pkgs
  944. # generate a dependency tree for all our packages
  945. tree = self.generatePkgDepTreeData(pkgs, 'build')
  946. bb.event.fire(bb.event.TargetsTreeGenerated(tree), self.data)
  947. def interactiveMode( self ):
  948. """Drop off into a shell"""
  949. try:
  950. from bb import shell
  951. except ImportError:
  952. parselog.exception("Interactive mode not available")
  953. raise bb.BBHandledException()
  954. else:
  955. shell.start( self )
  956. def handleCollections(self, collections):
  957. """Handle collections"""
  958. errors = False
  959. self.bbfile_config_priorities = []
  960. if collections:
  961. collection_priorities = {}
  962. collection_depends = {}
  963. collection_list = collections.split()
  964. min_prio = 0
  965. for c in collection_list:
  966. bb.debug(1,'Processing %s in collection list' % (c))
  967. # Get collection priority if defined explicitly
  968. priority = self.data.getVar("BBFILE_PRIORITY_%s" % c)
  969. if priority:
  970. try:
  971. prio = int(priority)
  972. except ValueError:
  973. parselog.error("invalid value for BBFILE_PRIORITY_%s: \"%s\"", c, priority)
  974. errors = True
  975. if min_prio == 0 or prio < min_prio:
  976. min_prio = prio
  977. collection_priorities[c] = prio
  978. else:
  979. collection_priorities[c] = None
  980. # Check dependencies and store information for priority calculation
  981. deps = self.data.getVar("LAYERDEPENDS_%s" % c)
  982. if deps:
  983. try:
  984. depDict = bb.utils.explode_dep_versions2(deps)
  985. except bb.utils.VersionStringException as vse:
  986. bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
  987. for dep, oplist in list(depDict.items()):
  988. if dep in collection_list:
  989. for opstr in oplist:
  990. layerver = self.data.getVar("LAYERVERSION_%s" % dep)
  991. (op, depver) = opstr.split()
  992. if layerver:
  993. try:
  994. res = bb.utils.vercmp_string_op(layerver, depver, op)
  995. except bb.utils.VersionStringException as vse:
  996. bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
  997. if not res:
  998. parselog.error("Layer '%s' depends on version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep, layerver)
  999. errors = True
  1000. else:
  1001. parselog.error("Layer '%s' depends on version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep)
  1002. errors = True
  1003. else:
  1004. parselog.error("Layer '%s' depends on layer '%s', but this layer is not enabled in your configuration", c, dep)
  1005. errors = True
  1006. collection_depends[c] = list(depDict.keys())
  1007. else:
  1008. collection_depends[c] = []
  1009. # Check recommends and store information for priority calculation
  1010. recs = self.data.getVar("LAYERRECOMMENDS_%s" % c)
  1011. if recs:
  1012. try:
  1013. recDict = bb.utils.explode_dep_versions2(recs)
  1014. except bb.utils.VersionStringException as vse:
  1015. bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
  1016. for rec, oplist in list(recDict.items()):
  1017. if rec in collection_list:
  1018. if oplist:
  1019. opstr = oplist[0]
  1020. layerver = self.data.getVar("LAYERVERSION_%s" % rec)
  1021. if layerver:
  1022. (op, recver) = opstr.split()
  1023. try:
  1024. res = bb.utils.vercmp_string_op(layerver, recver, op)
  1025. except bb.utils.VersionStringException as vse:
  1026. bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
  1027. if not res:
  1028. parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec, layerver)
  1029. continue
  1030. else:
  1031. parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec)
  1032. continue
  1033. parselog.debug(3,"Layer '%s' recommends layer '%s', so we are adding it", c, rec)
  1034. collection_depends[c].append(rec)
  1035. else:
  1036. parselog.debug(3,"Layer '%s' recommends layer '%s', but this layer is not enabled in your configuration", c, rec)
  1037. # Recursively work out collection priorities based on dependencies
  1038. def calc_layer_priority(collection):
  1039. if not collection_priorities[collection]:
  1040. max_depprio = min_prio
  1041. for dep in collection_depends[collection]:
  1042. calc_layer_priority(dep)
  1043. depprio = collection_priorities[dep]
  1044. if depprio > max_depprio:
  1045. max_depprio = depprio
  1046. max_depprio += 1
  1047. parselog.debug(1, "Calculated priority of layer %s as %d", collection, max_depprio)
  1048. collection_priorities[collection] = max_depprio
  1049. # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities
  1050. for c in collection_list:
  1051. calc_layer_priority(c)
  1052. regex = self.data.getVar("BBFILE_PATTERN_%s" % c)
  1053. if regex is None:
  1054. parselog.error("BBFILE_PATTERN_%s not defined" % c)
  1055. errors = True
  1056. continue
  1057. elif regex == "":
  1058. parselog.debug(1, "BBFILE_PATTERN_%s is empty" % c)
  1059. cre = re.compile('^NULL$')
  1060. errors = False
  1061. else:
  1062. try:
  1063. cre = re.compile(regex)
  1064. except re.error:
  1065. parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex)
  1066. errors = True
  1067. continue
  1068. self.bbfile_config_priorities.append((c, regex, cre, collection_priorities[c]))
  1069. if errors:
  1070. # We've already printed the actual error(s)
  1071. raise CollectionError("Errors during parsing layer configuration")
  1072. def buildSetVars(self):
  1073. """
  1074. Setup any variables needed before starting a build
  1075. """
  1076. t = time.gmtime()
  1077. for mc in self.databuilder.mcdata:
  1078. ds = self.databuilder.mcdata[mc]
  1079. if not ds.getVar("BUILDNAME", False):
  1080. ds.setVar("BUILDNAME", "${DATE}${TIME}")
  1081. ds.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', t))
  1082. ds.setVar("DATE", time.strftime('%Y%m%d', t))
  1083. ds.setVar("TIME", time.strftime('%H%M%S', t))
  1084. def reset_mtime_caches(self):
  1085. """
  1086. Reset mtime caches - this is particularly important when memory resident as something
  1087. which is cached is not unlikely to have changed since the last invocation (e.g. a
  1088. file associated with a recipe might have been modified by the user).
  1089. """
  1090. build.reset_cache()
  1091. bb.fetch._checksum_cache.mtime_cache.clear()
  1092. siggen_cache = getattr(bb.parse.siggen, 'checksum_cache', None)
  1093. if siggen_cache:
  1094. bb.parse.siggen.checksum_cache.mtime_cache.clear()
  1095. def matchFiles(self, bf, mc=''):
  1096. """
  1097. Find the .bb files which match the expression in 'buildfile'.
  1098. """
  1099. if bf.startswith("/") or bf.startswith("../"):
  1100. bf = os.path.abspath(bf)
  1101. self.collections = {mc: CookerCollectFiles(self.bbfile_config_priorities, mc)}
  1102. filelist, masked, searchdirs = self.collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc])
  1103. try:
  1104. os.stat(bf)
  1105. bf = os.path.abspath(bf)
  1106. return [bf]
  1107. except OSError:
  1108. regexp = re.compile(bf)
  1109. matches = []
  1110. for f in filelist:
  1111. if regexp.search(f) and os.path.isfile(f):
  1112. matches.append(f)
  1113. return matches
  1114. def matchFile(self, buildfile, mc=''):
  1115. """
  1116. Find the .bb file which matches the expression in 'buildfile'.
  1117. Raise an error if multiple files
  1118. """
  1119. matches = self.matchFiles(buildfile, mc)
  1120. if len(matches) != 1:
  1121. if matches:
  1122. msg = "Unable to match '%s' to a specific recipe file - %s matches found:" % (buildfile, len(matches))
  1123. if matches:
  1124. for f in matches:
  1125. msg += "\n %s" % f
  1126. parselog.error(msg)
  1127. else:
  1128. parselog.error("Unable to find any recipe file matching '%s'" % buildfile)
  1129. raise NoSpecificMatch
  1130. return matches[0]
  1131. def buildFile(self, buildfile, task):
  1132. """
  1133. Build the file matching regexp buildfile
  1134. """
  1135. bb.event.fire(bb.event.BuildInit(), self.data)
  1136. # Too many people use -b because they think it's how you normally
  1137. # specify a target to be built, so show a warning
  1138. bb.warn("Buildfile specified, dependencies will not be handled. If this is not what you want, do not use -b / --buildfile.")
  1139. self.buildFileInternal(buildfile, task)
  1140. def buildFileInternal(self, buildfile, task, fireevents=True, quietlog=False):
  1141. """
  1142. Build the file matching regexp buildfile
  1143. """
  1144. # Parse the configuration here. We need to do it explicitly here since
  1145. # buildFile() doesn't use the cache
  1146. self.parseConfiguration()
  1147. # If we are told to do the None task then query the default task
  1148. if task is None:
  1149. task = self.configuration.cmd
  1150. if not task.startswith("do_"):
  1151. task = "do_%s" % task
  1152. fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
  1153. fn = self.matchFile(fn, mc)
  1154. self.buildSetVars()
  1155. self.reset_mtime_caches()
  1156. bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.data_hash, self.caches_array)
  1157. infos = bb_caches[mc].parse(fn, self.collections[mc].get_file_appends(fn))
  1158. infos = dict(infos)
  1159. fn = bb.cache.realfn2virtual(fn, cls, mc)
  1160. try:
  1161. info_array = infos[fn]
  1162. except KeyError:
  1163. bb.fatal("%s does not exist" % fn)
  1164. if info_array[0].skipped:
  1165. bb.fatal("%s was skipped: %s" % (fn, info_array[0].skipreason))
  1166. self.recipecaches[mc].add_from_recipeinfo(fn, info_array)
  1167. # Tweak some variables
  1168. item = info_array[0].pn
  1169. self.recipecaches[mc].ignored_dependencies = set()
  1170. self.recipecaches[mc].bbfile_priority[fn] = 1
  1171. self.configuration.limited_deps = True
  1172. # Remove external dependencies
  1173. self.recipecaches[mc].task_deps[fn]['depends'] = {}
  1174. self.recipecaches[mc].deps[fn] = []
  1175. self.recipecaches[mc].rundeps[fn] = defaultdict(list)
  1176. self.recipecaches[mc].runrecs[fn] = defaultdict(list)
  1177. # Invalidate task for target if force mode active
  1178. if self.configuration.force:
  1179. logger.verbose("Invalidate task %s, %s", task, fn)
  1180. bb.parse.siggen.invalidate_task(task, self.recipecaches[mc], fn)
  1181. # Setup taskdata structure
  1182. taskdata = {}
  1183. taskdata[mc] = bb.taskdata.TaskData(self.configuration.abort)
  1184. taskdata[mc].add_provider(self.databuilder.mcdata[mc], self.recipecaches[mc], item)
  1185. if quietlog:
  1186. rqloglevel = bb.runqueue.logger.getEffectiveLevel()
  1187. bb.runqueue.logger.setLevel(logging.WARNING)
  1188. buildname = self.databuilder.mcdata[mc].getVar("BUILDNAME")
  1189. if fireevents:
  1190. bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.databuilder.mcdata[mc])
  1191. # Execute the runqueue
  1192. runlist = [[mc, item, task, fn]]
  1193. rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
  1194. def buildFileIdle(server, rq, abort):
  1195. msg = None
  1196. interrupted = 0
  1197. if abort or self.state == state.forceshutdown:
  1198. rq.finish_runqueue(True)
  1199. msg = "Forced shutdown"
  1200. interrupted = 2
  1201. elif self.state == state.shutdown:
  1202. rq.finish_runqueue(False)
  1203. msg = "Stopped build"
  1204. interrupted = 1
  1205. failures = 0
  1206. try:
  1207. retval = rq.execute_runqueue()
  1208. except runqueue.TaskFailure as exc:
  1209. failures += len(exc.args)
  1210. retval = False
  1211. except SystemExit as exc:
  1212. self.command.finishAsyncCommand(str(exc))
  1213. if quietlog:
  1214. bb.runqueue.logger.setLevel(rqloglevel)
  1215. return False
  1216. if not retval:
  1217. if fireevents:
  1218. bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, item, failures, interrupted), self.databuilder.mcdata[mc])
  1219. self.command.finishAsyncCommand(msg)
  1220. # We trashed self.recipecaches above
  1221. self.parsecache_valid = False
  1222. self.configuration.limited_deps = False
  1223. bb.parse.siggen.reset(self.data)
  1224. if quietlog:
  1225. bb.runqueue.logger.setLevel(rqloglevel)
  1226. return False
  1227. if retval is True:
  1228. return True
  1229. return retval
  1230. self.idleCallBackRegister(buildFileIdle, rq)
  1231. def buildTargets(self, targets, task):
  1232. """
  1233. Attempt to build the targets specified
  1234. """
  1235. def buildTargetsIdle(server, rq, abort):
  1236. msg = None
  1237. interrupted = 0
  1238. if abort or self.state == state.forceshutdown:
  1239. rq.finish_runqueue(True)
  1240. msg = "Forced shutdown"
  1241. interrupted = 2
  1242. elif self.state == state.shutdown:
  1243. rq.finish_runqueue(False)
  1244. msg = "Stopped build"
  1245. interrupted = 1
  1246. failures = 0
  1247. try:
  1248. retval = rq.execute_runqueue()
  1249. except runqueue.TaskFailure as exc:
  1250. failures += len(exc.args)
  1251. retval = False
  1252. except SystemExit as exc:
  1253. self.command.finishAsyncCommand(str(exc))
  1254. return False
  1255. if not retval:
  1256. try:
  1257. for mc in self.multiconfigs:
  1258. bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, targets, failures, interrupted), self.databuilder.mcdata[mc])
  1259. finally:
  1260. self.command.finishAsyncCommand(msg)
  1261. return False
  1262. if retval is True:
  1263. return True
  1264. return retval
  1265. self.reset_mtime_caches()
  1266. self.buildSetVars()
  1267. # If we are told to do the None task then query the default task
  1268. if task is None:
  1269. task = self.configuration.cmd
  1270. if not task.startswith("do_"):
  1271. task = "do_%s" % task
  1272. packages = [target if ':' in target else '%s:%s' % (target, task) for target in targets]
  1273. bb.event.fire(bb.event.BuildInit(packages), self.data)
  1274. taskdata, runlist = self.buildTaskData(targets, task, self.configuration.abort)
  1275. buildname = self.data.getVar("BUILDNAME", False)
  1276. # make targets to always look as <target>:do_<task>
  1277. ntargets = []
  1278. for target in runlist:
  1279. if target[0]:
  1280. ntargets.append("mc:%s:%s:%s" % (target[0], target[1], target[2]))
  1281. ntargets.append("%s:%s" % (target[1], target[2]))
  1282. for mc in self.multiconfigs:
  1283. bb.event.fire(bb.event.BuildStarted(buildname, ntargets), self.databuilder.mcdata[mc])
  1284. rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
  1285. if 'universe' in targets:
  1286. rq.rqdata.warn_multi_bb = True
  1287. self.idleCallBackRegister(buildTargetsIdle, rq)
  1288. def getAllKeysWithFlags(self, flaglist):
  1289. dump = {}
  1290. for k in self.data.keys():
  1291. try:
  1292. expand = True
  1293. flags = self.data.getVarFlags(k)
  1294. if flags and "func" in flags and "python" in flags:
  1295. expand = False
  1296. v = self.data.getVar(k, expand)
  1297. if not k.startswith("__") and not isinstance(v, bb.data_smart.DataSmart):
  1298. dump[k] = {
  1299. 'v' : str(v) ,
  1300. 'history' : self.data.varhistory.variable(k),
  1301. }
  1302. for d in flaglist:
  1303. if flags and d in flags:
  1304. dump[k][d] = flags[d]
  1305. else:
  1306. dump[k][d] = None
  1307. except Exception as e:
  1308. print(e)
  1309. return dump
  1310. def updateCacheSync(self):
  1311. if self.state == state.running:
  1312. return
  1313. # reload files for which we got notifications
  1314. for p in self.inotify_modified_files:
  1315. bb.parse.update_cache(p)
  1316. if p in bb.parse.BBHandler.cached_statements:
  1317. del bb.parse.BBHandler.cached_statements[p]
  1318. self.inotify_modified_files = []
  1319. if not self.baseconfig_valid:
  1320. logger.debug(1, "Reloading base configuration data")
  1321. self.initConfigurationData()
  1322. self.handlePRServ()
  1323. # This is called for all async commands when self.state != running
  1324. def updateCache(self):
  1325. if self.state == state.running:
  1326. return
  1327. if self.state in (state.shutdown, state.forceshutdown, state.error):
  1328. if hasattr(self.parser, 'shutdown'):
  1329. self.parser.shutdown(clean=False, force = True)
  1330. self.parser.final_cleanup()
  1331. raise bb.BBHandledException()
  1332. if self.state != state.parsing:
  1333. self.updateCacheSync()
  1334. if self.state != state.parsing and not self.parsecache_valid:
  1335. bb.parse.siggen.reset(self.data)
  1336. self.parseConfiguration ()
  1337. if CookerFeatures.SEND_SANITYEVENTS in self.featureset:
  1338. for mc in self.multiconfigs:
  1339. bb.event.fire(bb.event.SanityCheck(False), self.databuilder.mcdata[mc])
  1340. for mc in self.multiconfigs:
  1341. ignore = self.databuilder.mcdata[mc].getVar("ASSUME_PROVIDED") or ""
  1342. self.recipecaches[mc].ignored_dependencies = set(ignore.split())
  1343. for dep in self.configuration.extra_assume_provided:
  1344. self.recipecaches[mc].ignored_dependencies.add(dep)
  1345. self.collections = {}
  1346. mcfilelist = {}
  1347. total_masked = 0
  1348. searchdirs = set()
  1349. for mc in self.multiconfigs:
  1350. self.collections[mc] = CookerCollectFiles(self.bbfile_config_priorities, mc)
  1351. (filelist, masked, search) = self.collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc])
  1352. mcfilelist[mc] = filelist
  1353. total_masked += masked
  1354. searchdirs |= set(search)
  1355. # Add inotify watches for directories searched for bb/bbappend files
  1356. for dirent in searchdirs:
  1357. self.add_filewatch([[dirent]], dirs=True)
  1358. self.parser = CookerParser(self, mcfilelist, total_masked)
  1359. self.parsecache_valid = True
  1360. self.state = state.parsing
  1361. if not self.parser.parse_next():
  1362. collectlog.debug(1, "parsing complete")
  1363. if self.parser.error:
  1364. raise bb.BBHandledException()
  1365. self.show_appends_with_no_recipes()
  1366. self.handlePrefProviders()
  1367. for mc in self.multiconfigs:
  1368. self.recipecaches[mc].bbfile_priority = self.collections[mc].collection_priorities(self.recipecaches[mc].pkg_fn, self.parser.mcfilelist[mc], self.data)
  1369. self.state = state.running
  1370. # Send an event listing all stamps reachable after parsing
  1371. # which the metadata may use to clean up stale data
  1372. for mc in self.multiconfigs:
  1373. event = bb.event.ReachableStamps(self.recipecaches[mc].stamp)
  1374. bb.event.fire(event, self.databuilder.mcdata[mc])
  1375. return None
  1376. return True
  1377. def checkPackages(self, pkgs_to_build, task=None):
  1378. # Return a copy, don't modify the original
  1379. pkgs_to_build = pkgs_to_build[:]
  1380. if len(pkgs_to_build) == 0:
  1381. raise NothingToBuild
  1382. ignore = (self.data.getVar("ASSUME_PROVIDED") or "").split()
  1383. for pkg in pkgs_to_build.copy():
  1384. if pkg in ignore:
  1385. parselog.warning("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg)
  1386. if pkg.startswith("multiconfig:"):
  1387. pkgs_to_build.remove(pkg)
  1388. pkgs_to_build.append(pkg.replace("multiconfig:", "mc:"))
  1389. if 'world' in pkgs_to_build:
  1390. pkgs_to_build.remove('world')
  1391. for mc in self.multiconfigs:
  1392. bb.providers.buildWorldTargetList(self.recipecaches[mc], task)
  1393. for t in self.recipecaches[mc].world_target:
  1394. if mc:
  1395. t = "mc:" + mc + ":" + t
  1396. pkgs_to_build.append(t)
  1397. if 'universe' in pkgs_to_build:
  1398. parselog.verbnote("The \"universe\" target is only intended for testing and may produce errors.")
  1399. parselog.debug(1, "collating packages for \"universe\"")
  1400. pkgs_to_build.remove('universe')
  1401. for mc in self.multiconfigs:
  1402. for t in self.recipecaches[mc].universe_target:
  1403. if task:
  1404. foundtask = False
  1405. for provider_fn in self.recipecaches[mc].providers[t]:
  1406. if task in self.recipecaches[mc].task_deps[provider_fn]['tasks']:
  1407. foundtask = True
  1408. break
  1409. if not foundtask:
  1410. bb.debug(1, "Skipping %s for universe tasks as task %s doesn't exist" % (t, task))
  1411. continue
  1412. if mc:
  1413. t = "mc:" + mc + ":" + t
  1414. pkgs_to_build.append(t)
  1415. return pkgs_to_build
  1416. def pre_serve(self):
  1417. return
  1418. def post_serve(self):
  1419. self.shutdown(force=True)
  1420. prserv.serv.auto_shutdown()
  1421. if self.hashserv:
  1422. self.hashserv.process.terminate()
  1423. self.hashserv.process.join()
  1424. if hasattr(self, "data"):
  1425. bb.event.fire(CookerExit(), self.data)
  1426. def shutdown(self, force = False):
  1427. if force:
  1428. self.state = state.forceshutdown
  1429. else:
  1430. self.state = state.shutdown
  1431. if self.parser:
  1432. self.parser.shutdown(clean=not force, force=force)
  1433. self.parser.final_cleanup()
  1434. def finishcommand(self):
  1435. self.state = state.initial
  1436. def reset(self):
  1437. self.initConfigurationData()
  1438. self.handlePRServ()
  1439. def clientComplete(self):
  1440. """Called when the client is done using the server"""
  1441. self.finishcommand()
  1442. self.extraconfigdata = {}
  1443. self.command.reset()
  1444. if hasattr(self, "data"):
  1445. self.databuilder.reset()
  1446. self.data = self.databuilder.data
  1447. self.parsecache_valid = False
  1448. self.baseconfig_valid = False
  1449. class CookerExit(bb.event.Event):
  1450. """
  1451. Notify clients of the Cooker shutdown
  1452. """
  1453. def __init__(self):
  1454. bb.event.Event.__init__(self)
  1455. class CookerCollectFiles(object):
  1456. def __init__(self, priorities, mc=''):
  1457. self.mc = mc
  1458. self.bbappends = []
  1459. # Priorities is a list of tupples, with the second element as the pattern.
  1460. # We need to sort the list with the longest pattern first, and so on to
  1461. # the shortest. This allows nested layers to be properly evaluated.
  1462. self.bbfile_config_priorities = sorted(priorities, key=lambda tup: tup[1], reverse=True)
  1463. def calc_bbfile_priority(self, filename):
  1464. for _, _, regex, pri in self.bbfile_config_priorities:
  1465. if regex.match(filename):
  1466. return pri, regex
  1467. return 0, None
  1468. def get_bbfiles(self):
  1469. """Get list of default .bb files by reading out the current directory"""
  1470. path = os.getcwd()
  1471. contents = os.listdir(path)
  1472. bbfiles = []
  1473. for f in contents:
  1474. if f.endswith(".bb"):
  1475. bbfiles.append(os.path.abspath(os.path.join(path, f)))
  1476. return bbfiles
  1477. def find_bbfiles(self, path):
  1478. """Find all the .bb and .bbappend files in a directory"""
  1479. found = []
  1480. for dir, dirs, files in os.walk(path):
  1481. for ignored in ('SCCS', 'CVS', '.svn'):
  1482. if ignored in dirs:
  1483. dirs.remove(ignored)
  1484. found += [os.path.join(dir, f) for f in files if (f.endswith(['.bb', '.bbappend']))]
  1485. return found
  1486. def collect_bbfiles(self, config, eventdata):
  1487. """Collect all available .bb build files"""
  1488. masked = 0
  1489. collectlog.debug(1, "collecting .bb files")
  1490. files = (config.getVar( "BBFILES") or "").split()
  1491. # Sort files by priority
  1492. files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem)[0] )
  1493. config.setVar("BBFILES_PRIORITIZED", " ".join(files))
  1494. if not len(files):
  1495. files = self.get_bbfiles()
  1496. if not len(files):
  1497. collectlog.error("no recipe files to build, check your BBPATH and BBFILES?")
  1498. bb.event.fire(CookerExit(), eventdata)
  1499. # We need to track where we look so that we can add inotify watches. There
  1500. # is no nice way to do this, this is horrid. We intercept the os.listdir()
  1501. # (or os.scandir() for python 3.6+) calls while we run glob().
  1502. origlistdir = os.listdir
  1503. if hasattr(os, 'scandir'):
  1504. origscandir = os.scandir
  1505. searchdirs = []
  1506. def ourlistdir(d):
  1507. searchdirs.append(d)
  1508. return origlistdir(d)
  1509. def ourscandir(d):
  1510. searchdirs.append(d)
  1511. return origscandir(d)
  1512. os.listdir = ourlistdir
  1513. if hasattr(os, 'scandir'):
  1514. os.scandir = ourscandir
  1515. try:
  1516. # Can't use set here as order is important
  1517. newfiles = []
  1518. for f in files:
  1519. if os.path.isdir(f):
  1520. dirfiles = self.find_bbfiles(f)
  1521. for g in dirfiles:
  1522. if g not in newfiles:
  1523. newfiles.append(g)
  1524. else:
  1525. globbed = glob.glob(f)
  1526. if not globbed and os.path.exists(f):
  1527. globbed = [f]
  1528. # glob gives files in order on disk. Sort to be deterministic.
  1529. for g in sorted(globbed):
  1530. if g not in newfiles:
  1531. newfiles.append(g)
  1532. finally:
  1533. os.listdir = origlistdir
  1534. if hasattr(os, 'scandir'):
  1535. os.scandir = origscandir
  1536. bbmask = config.getVar('BBMASK')
  1537. if bbmask:
  1538. # First validate the individual regular expressions and ignore any
  1539. # that do not compile
  1540. bbmasks = []
  1541. for mask in bbmask.split():
  1542. # When constructing an older style single regex, it's possible for BBMASK
  1543. # to end up beginning with '|', which matches and masks _everything_.
  1544. if mask.startswith("|"):
  1545. collectlog.warning("BBMASK contains regular expression beginning with '|', fixing: %s" % mask)
  1546. mask = mask[1:]
  1547. try:
  1548. re.compile(mask)
  1549. bbmasks.append(mask)
  1550. except sre_constants.error:
  1551. collectlog.critical("BBMASK contains an invalid regular expression, ignoring: %s" % mask)
  1552. # Then validate the combined regular expressions. This should never
  1553. # fail, but better safe than sorry...
  1554. bbmask = "|".join(bbmasks)
  1555. try:
  1556. bbmask_compiled = re.compile(bbmask)
  1557. except sre_constants.error:
  1558. collectlog.critical("BBMASK is not a valid regular expression, ignoring: %s" % bbmask)
  1559. bbmask = None
  1560. bbfiles = []
  1561. bbappend = []
  1562. for f in newfiles:
  1563. if bbmask and bbmask_compiled.search(f):
  1564. collectlog.debug(1, "skipping masked file %s", f)
  1565. masked += 1
  1566. continue
  1567. if f.endswith('.bb'):
  1568. bbfiles.append(f)
  1569. elif f.endswith('.bbappend'):
  1570. bbappend.append(f)
  1571. else:
  1572. collectlog.debug(1, "skipping %s: unknown file extension", f)
  1573. # Build a list of .bbappend files for each .bb file
  1574. for f in bbappend:
  1575. base = os.path.basename(f).replace('.bbappend', '.bb')
  1576. self.bbappends.append((base, f))
  1577. # Find overlayed recipes
  1578. # bbfiles will be in priority order which makes this easy
  1579. bbfile_seen = dict()
  1580. self.overlayed = defaultdict(list)
  1581. for f in reversed(bbfiles):
  1582. base = os.path.basename(f)
  1583. if base not in bbfile_seen:
  1584. bbfile_seen[base] = f
  1585. else:
  1586. topfile = bbfile_seen[base]
  1587. self.overlayed[topfile].append(f)
  1588. return (bbfiles, masked, searchdirs)
  1589. def get_file_appends(self, fn):
  1590. """
  1591. Returns a list of .bbappend files to apply to fn
  1592. """
  1593. filelist = []
  1594. f = os.path.basename(fn)
  1595. for b in self.bbappends:
  1596. (bbappend, filename) = b
  1597. if (bbappend == f) or ('%' in bbappend and bbappend.startswith(f[:bbappend.index('%')])):
  1598. filelist.append(filename)
  1599. return tuple(filelist)
  1600. def collection_priorities(self, pkgfns, fns, d):
  1601. # Return the priorities of the entries in pkgfns
  1602. # Also check that all the regexes in self.bbfile_config_priorities are used
  1603. # (but to do that we need to ensure skipped recipes aren't counted, nor
  1604. # collections in BBFILE_PATTERN_IGNORE_EMPTY)
  1605. priorities = {}
  1606. seen = set()
  1607. matched = set()
  1608. matched_regex = set()
  1609. unmatched_regex = set()
  1610. for _, _, regex, _ in self.bbfile_config_priorities:
  1611. unmatched_regex.add(regex)
  1612. # Calculate priorities for each file
  1613. for p in pkgfns:
  1614. realfn, cls, mc = bb.cache.virtualfn2realfn(p)
  1615. priorities[p], regex = self.calc_bbfile_priority(realfn)
  1616. if regex in unmatched_regex:
  1617. matched_regex.add(regex)
  1618. unmatched_regex.remove(regex)
  1619. seen.add(realfn)
  1620. if regex:
  1621. matched.add(realfn)
  1622. if unmatched_regex:
  1623. # Account for bbappend files
  1624. for b in self.bbappends:
  1625. (bbfile, append) = b
  1626. seen.add(append)
  1627. # Account for skipped recipes
  1628. seen.update(fns)
  1629. seen.difference_update(matched)
  1630. def already_matched(fn):
  1631. for regex in matched_regex:
  1632. if regex.match(fn):
  1633. return True
  1634. return False
  1635. for unmatch in unmatched_regex.copy():
  1636. for fn in seen:
  1637. if unmatch.match(fn):
  1638. # If the bbappend or file was already matched by another regex, skip it
  1639. # e.g. for a layer within a layer, the outer regex could match, the inner
  1640. # regex may match nothing and we should warn about that
  1641. if already_matched(fn):
  1642. continue
  1643. unmatched_regex.remove(unmatch)
  1644. break
  1645. for collection, pattern, regex, _ in self.bbfile_config_priorities:
  1646. if regex in unmatched_regex:
  1647. if d.getVar('BBFILE_PATTERN_IGNORE_EMPTY_%s' % collection) != '1':
  1648. collectlog.warning("No bb files in %s matched BBFILE_PATTERN_%s '%s'" % (self.mc if self.mc else 'default',
  1649. collection, pattern))
  1650. return priorities
  1651. class ParsingFailure(Exception):
  1652. def __init__(self, realexception, recipe):
  1653. self.realexception = realexception
  1654. self.recipe = recipe
  1655. Exception.__init__(self, realexception, recipe)
  1656. class Parser(multiprocessing.Process):
  1657. def __init__(self, jobs, results, quit, init, profile):
  1658. self.jobs = jobs
  1659. self.results = results
  1660. self.quit = quit
  1661. self.init = init
  1662. multiprocessing.Process.__init__(self)
  1663. self.context = bb.utils.get_context().copy()
  1664. self.handlers = bb.event.get_class_handlers().copy()
  1665. self.profile = profile
  1666. def run(self):
  1667. if not self.profile:
  1668. self.realrun()
  1669. return
  1670. try:
  1671. import cProfile as profile
  1672. except:
  1673. import profile
  1674. prof = profile.Profile()
  1675. try:
  1676. profile.Profile.runcall(prof, self.realrun)
  1677. finally:
  1678. logfile = "profile-parse-%s.log" % multiprocessing.current_process().name
  1679. prof.dump_stats(logfile)
  1680. def realrun(self):
  1681. if self.init:
  1682. self.init()
  1683. pending = []
  1684. while True:
  1685. try:
  1686. self.quit.get_nowait()
  1687. except queue.Empty:
  1688. pass
  1689. else:
  1690. self.results.close()
  1691. self.results.join_thread()
  1692. break
  1693. if pending:
  1694. result = pending.pop()
  1695. else:
  1696. try:
  1697. job = self.jobs.pop()
  1698. except IndexError:
  1699. self.results.close()
  1700. self.results.join_thread()
  1701. break
  1702. result = self.parse(*job)
  1703. # Clear the siggen cache after parsing to control memory usage, its huge
  1704. bb.parse.siggen.postparsing_clean_cache()
  1705. try:
  1706. self.results.put(result, timeout=0.25)
  1707. except queue.Full:
  1708. pending.append(result)
  1709. def parse(self, mc, cache, filename, appends):
  1710. try:
  1711. origfilter = bb.event.LogHandler.filter
  1712. # Record the filename we're parsing into any events generated
  1713. def parse_filter(self, record):
  1714. record.taskpid = bb.event.worker_pid
  1715. record.fn = filename
  1716. return True
  1717. # Reset our environment and handlers to the original settings
  1718. bb.utils.set_context(self.context.copy())
  1719. bb.event.set_class_handlers(self.handlers.copy())
  1720. bb.event.LogHandler.filter = parse_filter
  1721. return True, mc, cache.parse(filename, appends)
  1722. except Exception as exc:
  1723. tb = sys.exc_info()[2]
  1724. exc.recipe = filename
  1725. exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3))
  1726. return True, exc
  1727. # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown
  1728. # and for example a worker thread doesn't just exit on its own in response to
  1729. # a SystemExit event for example.
  1730. except BaseException as exc:
  1731. return True, ParsingFailure(exc, filename)
  1732. finally:
  1733. bb.event.LogHandler.filter = origfilter
  1734. class CookerParser(object):
  1735. def __init__(self, cooker, mcfilelist, masked):
  1736. self.mcfilelist = mcfilelist
  1737. self.cooker = cooker
  1738. self.cfgdata = cooker.data
  1739. self.cfghash = cooker.data_hash
  1740. self.cfgbuilder = cooker.databuilder
  1741. # Accounting statistics
  1742. self.parsed = 0
  1743. self.cached = 0
  1744. self.error = 0
  1745. self.masked = masked
  1746. self.skipped = 0
  1747. self.virtuals = 0
  1748. self.current = 0
  1749. self.process_names = []
  1750. self.bb_caches = bb.cache.MulticonfigCache(self.cfgbuilder, self.cfghash, cooker.caches_array)
  1751. self.fromcache = set()
  1752. self.willparse = set()
  1753. for mc in self.cooker.multiconfigs:
  1754. for filename in self.mcfilelist[mc]:
  1755. appends = self.cooker.collections[mc].get_file_appends(filename)
  1756. if not self.bb_caches[mc].cacheValid(filename, appends):
  1757. self.willparse.add((mc, self.bb_caches[mc], filename, appends))
  1758. else:
  1759. self.fromcache.add((mc, self.bb_caches[mc], filename, appends))
  1760. self.total = len(self.fromcache) + len(self.willparse)
  1761. self.toparse = len(self.willparse)
  1762. self.progress_chunk = int(max(self.toparse / 100, 1))
  1763. self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS") or
  1764. multiprocessing.cpu_count()), self.toparse)
  1765. self.start()
  1766. self.haveshutdown = False
  1767. self.syncthread = None
  1768. def start(self):
  1769. self.results = self.load_cached()
  1770. self.processes = []
  1771. if self.toparse:
  1772. bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata)
  1773. def init():
  1774. signal.signal(signal.SIGTERM, signal.SIG_DFL)
  1775. signal.signal(signal.SIGHUP, signal.SIG_DFL)
  1776. signal.signal(signal.SIGINT, signal.SIG_IGN)
  1777. bb.utils.set_process_name(multiprocessing.current_process().name)
  1778. multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, exitpriority=1)
  1779. multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, exitpriority=1)
  1780. self.parser_quit = multiprocessing.Queue(maxsize=self.num_processes)
  1781. self.result_queue = multiprocessing.Queue()
  1782. def chunkify(lst,n):
  1783. return [lst[i::n] for i in range(n)]
  1784. self.jobs = chunkify(list(self.willparse), self.num_processes)
  1785. for i in range(0, self.num_processes):
  1786. parser = Parser(self.jobs[i], self.result_queue, self.parser_quit, init, self.cooker.configuration.profile)
  1787. parser.start()
  1788. self.process_names.append(parser.name)
  1789. self.processes.append(parser)
  1790. self.results = itertools.chain(self.results, self.parse_generator())
  1791. def shutdown(self, clean=True, force=False):
  1792. if not self.toparse:
  1793. return
  1794. if self.haveshutdown:
  1795. return
  1796. self.haveshutdown = True
  1797. if clean:
  1798. event = bb.event.ParseCompleted(self.cached, self.parsed,
  1799. self.skipped, self.masked,
  1800. self.virtuals, self.error,
  1801. self.total)
  1802. bb.event.fire(event, self.cfgdata)
  1803. for process in self.processes:
  1804. self.parser_quit.put(None)
  1805. # Cleanup the queue before call process.join(), otherwise there might be
  1806. # deadlocks.
  1807. while True:
  1808. try:
  1809. self.result_queue.get(timeout=0.25)
  1810. except queue.Empty:
  1811. break
  1812. for process in self.processes:
  1813. if force:
  1814. process.join(.1)
  1815. process.terminate()
  1816. else:
  1817. process.join()
  1818. self.parser_quit.close()
  1819. # Allow data left in the cancel queue to be discarded
  1820. self.parser_quit.cancel_join_thread()
  1821. def sync_caches():
  1822. for c in self.bb_caches.values():
  1823. c.sync()
  1824. sync = threading.Thread(target=sync_caches, name="SyncThread")
  1825. self.syncthread = sync
  1826. sync.start()
  1827. bb.codeparser.parser_cache_savemerge()
  1828. bb.fetch.fetcher_parse_done()
  1829. if self.cooker.configuration.profile:
  1830. profiles = []
  1831. for i in self.process_names:
  1832. logfile = "profile-parse-%s.log" % i
  1833. if os.path.exists(logfile):
  1834. profiles.append(logfile)
  1835. pout = "profile-parse.log.processed"
  1836. bb.utils.process_profilelog(profiles, pout = pout)
  1837. print("Processed parsing statistics saved to %s" % (pout))
  1838. def final_cleanup(self):
  1839. if self.syncthread:
  1840. self.syncthread.join()
  1841. def load_cached(self):
  1842. for mc, cache, filename, appends in self.fromcache:
  1843. cached, infos = cache.load(filename, appends)
  1844. yield not cached, mc, infos
  1845. def parse_generator(self):
  1846. while True:
  1847. if self.parsed >= self.toparse:
  1848. break
  1849. try:
  1850. result = self.result_queue.get(timeout=0.25)
  1851. except queue.Empty:
  1852. pass
  1853. else:
  1854. value = result[1]
  1855. if isinstance(value, BaseException):
  1856. raise value
  1857. else:
  1858. yield result
  1859. def parse_next(self):
  1860. result = []
  1861. parsed = None
  1862. try:
  1863. parsed, mc, result = next(self.results)
  1864. except StopIteration:
  1865. self.shutdown()
  1866. return False
  1867. except bb.BBHandledException as exc:
  1868. self.error += 1
  1869. logger.error('Failed to parse recipe: %s' % exc.recipe)
  1870. self.shutdown(clean=False)
  1871. return False
  1872. except ParsingFailure as exc:
  1873. self.error += 1
  1874. logger.error('Unable to parse %s: %s' %
  1875. (exc.recipe, bb.exceptions.to_string(exc.realexception)))
  1876. self.shutdown(clean=False)
  1877. return False
  1878. except bb.parse.ParseError as exc:
  1879. self.error += 1
  1880. logger.error(str(exc))
  1881. self.shutdown(clean=False)
  1882. return False
  1883. except bb.data_smart.ExpansionError as exc:
  1884. self.error += 1
  1885. bbdir = os.path.dirname(__file__) + os.sep
  1886. etype, value, _ = sys.exc_info()
  1887. tb = list(itertools.dropwhile(lambda e: e.filename.startswith(bbdir), exc.traceback))
  1888. logger.error('ExpansionError during parsing %s', value.recipe,
  1889. exc_info=(etype, value, tb))
  1890. self.shutdown(clean=False)
  1891. return False
  1892. except Exception as exc:
  1893. self.error += 1
  1894. etype, value, tb = sys.exc_info()
  1895. if hasattr(value, "recipe"):
  1896. logger.error('Unable to parse %s' % value.recipe,
  1897. exc_info=(etype, value, exc.traceback))
  1898. else:
  1899. # Most likely, an exception occurred during raising an exception
  1900. import traceback
  1901. logger.error('Exception during parse: %s' % traceback.format_exc())
  1902. self.shutdown(clean=False)
  1903. return False
  1904. self.current += 1
  1905. self.virtuals += len(result)
  1906. if parsed:
  1907. self.parsed += 1
  1908. if self.parsed % self.progress_chunk == 0:
  1909. bb.event.fire(bb.event.ParseProgress(self.parsed, self.toparse),
  1910. self.cfgdata)
  1911. else:
  1912. self.cached += 1
  1913. for virtualfn, info_array in result:
  1914. if info_array[0].skipped:
  1915. self.skipped += 1
  1916. self.cooker.skiplist[virtualfn] = SkippedPackage(info_array[0])
  1917. self.bb_caches[mc].add_info(virtualfn, info_array, self.cooker.recipecaches[mc],
  1918. parsed=parsed, watcher = self.cooker.add_filewatch)
  1919. return True
  1920. def reparse(self, filename):
  1921. to_reparse = set()
  1922. for mc in self.cooker.multiconfigs:
  1923. to_reparse.add((mc, filename, self.cooker.collections[mc].get_file_appends(filename)))
  1924. for mc, filename, appends in to_reparse:
  1925. infos = self.bb_caches[mc].parse(filename, appends)
  1926. for vfn, info_array in infos:
  1927. self.cooker.recipecaches[mc].add_from_recipeinfo(vfn, info_array)