sstatesig.py 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417
  1. import bb.siggen
  2. import oe
  3. def sstate_rundepfilter(siggen, fn, recipename, task, dep, depname, dataCache):
  4. # Return True if we should keep the dependency, False to drop it
  5. def isNative(x):
  6. return x.endswith("-native")
  7. def isCross(x):
  8. return "-cross-" in x
  9. def isNativeSDK(x):
  10. return x.startswith("nativesdk-")
  11. def isKernel(fn):
  12. inherits = " ".join(dataCache.inherits[fn])
  13. return inherits.find("/module-base.bbclass") != -1 or inherits.find("/linux-kernel-base.bbclass") != -1
  14. def isPackageGroup(fn):
  15. inherits = " ".join(dataCache.inherits[fn])
  16. return "/packagegroup.bbclass" in inherits
  17. def isAllArch(fn):
  18. inherits = " ".join(dataCache.inherits[fn])
  19. return "/allarch.bbclass" in inherits
  20. def isImage(fn):
  21. return "/image.bbclass" in " ".join(dataCache.inherits[fn])
  22. # (Almost) always include our own inter-task dependencies.
  23. # The exception is the special do_kernel_configme->do_unpack_and_patch
  24. # dependency from archiver.bbclass.
  25. if recipename == depname:
  26. if task == "do_kernel_configme" and dep.endswith(".do_unpack_and_patch"):
  27. return False
  28. return True
  29. # Exclude well defined recipe->dependency
  30. if "%s->%s" % (recipename, depname) in siggen.saferecipedeps:
  31. return False
  32. # Check for special wildcard
  33. if "*->%s" % depname in siggen.saferecipedeps and recipename != depname:
  34. return False
  35. # Don't change native/cross/nativesdk recipe dependencies any further
  36. if isNative(recipename) or isCross(recipename) or isNativeSDK(recipename):
  37. return True
  38. # Only target packages beyond here
  39. # allarch packagegroups are assumed to have well behaved names which don't change between architecures/tunes
  40. if isPackageGroup(fn) and isAllArch(fn) and not isNative(depname):
  41. return False
  42. # Exclude well defined machine specific configurations which don't change ABI
  43. if depname in siggen.abisaferecipes and not isImage(fn):
  44. return False
  45. # Kernel modules are well namespaced. We don't want to depend on the kernel's checksum
  46. # if we're just doing an RRECOMMENDS_xxx = "kernel-module-*", not least because the checksum
  47. # is machine specific.
  48. # Therefore if we're not a kernel or a module recipe (inheriting the kernel classes)
  49. # and we reccomend a kernel-module, we exclude the dependency.
  50. depfn = dep.rsplit(".", 1)[0]
  51. if dataCache and isKernel(depfn) and not isKernel(fn):
  52. for pkg in dataCache.runrecs[fn]:
  53. if " ".join(dataCache.runrecs[fn][pkg]).find("kernel-module-") != -1:
  54. return False
  55. # Default to keep dependencies
  56. return True
  57. def sstate_lockedsigs(d):
  58. sigs = {}
  59. types = (d.getVar("SIGGEN_LOCKEDSIGS_TYPES") or "").split()
  60. for t in types:
  61. siggen_lockedsigs_var = "SIGGEN_LOCKEDSIGS_%s" % t
  62. lockedsigs = (d.getVar(siggen_lockedsigs_var) or "").split()
  63. for ls in lockedsigs:
  64. pn, task, h = ls.split(":", 2)
  65. if pn not in sigs:
  66. sigs[pn] = {}
  67. sigs[pn][task] = [h, siggen_lockedsigs_var]
  68. return sigs
  69. class SignatureGeneratorOEBasic(bb.siggen.SignatureGeneratorBasic):
  70. name = "OEBasic"
  71. def init_rundepcheck(self, data):
  72. self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split()
  73. self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS") or "").split()
  74. pass
  75. def rundep_check(self, fn, recipename, task, dep, depname, dataCache = None):
  76. return sstate_rundepfilter(self, fn, recipename, task, dep, depname, dataCache)
  77. class SignatureGeneratorOEBasicHash(bb.siggen.SignatureGeneratorBasicHash):
  78. name = "OEBasicHash"
  79. def init_rundepcheck(self, data):
  80. self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split()
  81. self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS") or "").split()
  82. self.lockedsigs = sstate_lockedsigs(data)
  83. self.lockedhashes = {}
  84. self.lockedpnmap = {}
  85. self.lockedhashfn = {}
  86. self.machine = data.getVar("MACHINE")
  87. self.mismatch_msgs = []
  88. self.unlockedrecipes = (data.getVar("SIGGEN_UNLOCKED_RECIPES") or
  89. "").split()
  90. self.unlockedrecipes = { k: "" for k in self.unlockedrecipes }
  91. pass
  92. def tasks_resolved(self, virtmap, virtpnmap, dataCache):
  93. # Translate virtual/xxx entries to PN values
  94. newabisafe = []
  95. for a in self.abisaferecipes:
  96. if a in virtpnmap:
  97. newabisafe.append(virtpnmap[a])
  98. else:
  99. newabisafe.append(a)
  100. self.abisaferecipes = newabisafe
  101. newsafedeps = []
  102. for a in self.saferecipedeps:
  103. a1, a2 = a.split("->")
  104. if a1 in virtpnmap:
  105. a1 = virtpnmap[a1]
  106. if a2 in virtpnmap:
  107. a2 = virtpnmap[a2]
  108. newsafedeps.append(a1 + "->" + a2)
  109. self.saferecipedeps = newsafedeps
  110. def rundep_check(self, fn, recipename, task, dep, depname, dataCache = None):
  111. return sstate_rundepfilter(self, fn, recipename, task, dep, depname, dataCache)
  112. def get_taskdata(self):
  113. data = super(bb.siggen.SignatureGeneratorBasicHash, self).get_taskdata()
  114. return (data, self.lockedpnmap, self.lockedhashfn)
  115. def set_taskdata(self, data):
  116. coredata, self.lockedpnmap, self.lockedhashfn = data
  117. super(bb.siggen.SignatureGeneratorBasicHash, self).set_taskdata(coredata)
  118. def dump_sigs(self, dataCache, options):
  119. sigfile = os.getcwd() + "/locked-sigs.inc"
  120. bb.plain("Writing locked sigs to %s" % sigfile)
  121. self.dump_lockedsigs(sigfile)
  122. return super(bb.siggen.SignatureGeneratorBasicHash, self).dump_sigs(dataCache, options)
  123. def get_taskhash(self, fn, task, deps, dataCache):
  124. h = super(bb.siggen.SignatureGeneratorBasicHash, self).get_taskhash(fn, task, deps, dataCache)
  125. recipename = dataCache.pkg_fn[fn]
  126. self.lockedpnmap[fn] = recipename
  127. self.lockedhashfn[fn] = dataCache.hashfn[fn]
  128. unlocked = False
  129. if recipename in self.unlockedrecipes:
  130. unlocked = True
  131. else:
  132. def get_mc(tid):
  133. tid = tid.rsplit('.', 1)[0]
  134. if tid.startswith('multiconfig:'):
  135. elems = tid.split(':')
  136. return elems[1]
  137. def recipename_from_dep(dep):
  138. # The dep entry will look something like
  139. # /path/path/recipename.bb.task, virtual:native:/p/foo.bb.task,
  140. # ...
  141. fn = dep.rsplit('.', 1)[0]
  142. return dataCache.pkg_fn[fn]
  143. mc = get_mc(fn)
  144. # If any unlocked recipe is in the direct dependencies then the
  145. # current recipe should be unlocked as well.
  146. depnames = [ recipename_from_dep(x) for x in deps if mc == get_mc(x)]
  147. if any(x in y for y in depnames for x in self.unlockedrecipes):
  148. self.unlockedrecipes[recipename] = ''
  149. unlocked = True
  150. if not unlocked and recipename in self.lockedsigs:
  151. if task in self.lockedsigs[recipename]:
  152. k = fn + "." + task
  153. h_locked = self.lockedsigs[recipename][task][0]
  154. var = self.lockedsigs[recipename][task][1]
  155. self.lockedhashes[k] = h_locked
  156. self.taskhash[k] = h_locked
  157. #bb.warn("Using %s %s %s" % (recipename, task, h))
  158. if h != h_locked:
  159. self.mismatch_msgs.append('The %s:%s sig is computed to be %s, but the sig is locked to %s in %s'
  160. % (recipename, task, h, h_locked, var))
  161. return h_locked
  162. #bb.warn("%s %s %s" % (recipename, task, h))
  163. return h
  164. def dump_sigtask(self, fn, task, stampbase, runtime):
  165. k = fn + "." + task
  166. if k in self.lockedhashes:
  167. return
  168. super(bb.siggen.SignatureGeneratorBasicHash, self).dump_sigtask(fn, task, stampbase, runtime)
  169. def dump_lockedsigs(self, sigfile, taskfilter=None):
  170. types = {}
  171. for k in self.runtaskdeps:
  172. if taskfilter:
  173. if not k in taskfilter:
  174. continue
  175. fn = k.rsplit(".",1)[0]
  176. t = self.lockedhashfn[fn].split(" ")[1].split(":")[5]
  177. t = 't-' + t.replace('_', '-')
  178. if t not in types:
  179. types[t] = []
  180. types[t].append(k)
  181. with open(sigfile, "w") as f:
  182. l = sorted(types)
  183. for t in l:
  184. f.write('SIGGEN_LOCKEDSIGS_%s = "\\\n' % t)
  185. types[t].sort()
  186. sortedk = sorted(types[t], key=lambda k: self.lockedpnmap[k.rsplit(".",1)[0]])
  187. for k in sortedk:
  188. fn = k.rsplit(".",1)[0]
  189. task = k.rsplit(".",1)[1]
  190. if k not in self.taskhash:
  191. continue
  192. f.write(" " + self.lockedpnmap[fn] + ":" + task + ":" + self.taskhash[k] + " \\\n")
  193. f.write(' "\n')
  194. f.write('SIGGEN_LOCKEDSIGS_TYPES_%s = "%s"' % (self.machine, " ".join(l)))
  195. def dump_siglist(self, sigfile):
  196. with open(sigfile, "w") as f:
  197. tasks = []
  198. for taskitem in self.taskhash:
  199. (fn, task) = taskitem.rsplit(".", 1)
  200. pn = self.lockedpnmap[fn]
  201. tasks.append((pn, task, fn, self.taskhash[taskitem]))
  202. for (pn, task, fn, taskhash) in sorted(tasks):
  203. f.write('%s.%s %s %s\n' % (pn, task, fn, taskhash))
  204. def checkhashes(self, missed, ret, sq_fn, sq_task, sq_hash, sq_hashfn, d):
  205. warn_msgs = []
  206. error_msgs = []
  207. sstate_missing_msgs = []
  208. for task in range(len(sq_fn)):
  209. if task not in ret:
  210. for pn in self.lockedsigs:
  211. if sq_hash[task] in iter(self.lockedsigs[pn].values()):
  212. if sq_task[task] == 'do_shared_workdir':
  213. continue
  214. sstate_missing_msgs.append("Locked sig is set for %s:%s (%s) yet not in sstate cache?"
  215. % (pn, sq_task[task], sq_hash[task]))
  216. checklevel = d.getVar("SIGGEN_LOCKEDSIGS_TASKSIG_CHECK")
  217. if checklevel == 'warn':
  218. warn_msgs += self.mismatch_msgs
  219. elif checklevel == 'error':
  220. error_msgs += self.mismatch_msgs
  221. checklevel = d.getVar("SIGGEN_LOCKEDSIGS_SSTATE_EXISTS_CHECK")
  222. if checklevel == 'warn':
  223. warn_msgs += sstate_missing_msgs
  224. elif checklevel == 'error':
  225. error_msgs += sstate_missing_msgs
  226. if warn_msgs:
  227. bb.warn("\n".join(warn_msgs))
  228. if error_msgs:
  229. bb.fatal("\n".join(error_msgs))
  230. # Insert these classes into siggen's namespace so it can see and select them
  231. bb.siggen.SignatureGeneratorOEBasic = SignatureGeneratorOEBasic
  232. bb.siggen.SignatureGeneratorOEBasicHash = SignatureGeneratorOEBasicHash
  233. def find_siginfo(pn, taskname, taskhashlist, d):
  234. """ Find signature data files for comparison purposes """
  235. import fnmatch
  236. import glob
  237. if not taskname:
  238. # We have to derive pn and taskname
  239. key = pn
  240. splitit = key.split('.bb.')
  241. taskname = splitit[1]
  242. pn = os.path.basename(splitit[0]).split('_')[0]
  243. if key.startswith('virtual:native:'):
  244. pn = pn + '-native'
  245. hashfiles = {}
  246. filedates = {}
  247. def get_hashval(siginfo):
  248. if siginfo.endswith('.siginfo'):
  249. return siginfo.rpartition(':')[2].partition('_')[0]
  250. else:
  251. return siginfo.rpartition('.')[2]
  252. # First search in stamps dir
  253. localdata = d.createCopy()
  254. localdata.setVar('MULTIMACH_TARGET_SYS', '*')
  255. localdata.setVar('PN', pn)
  256. localdata.setVar('PV', '*')
  257. localdata.setVar('PR', '*')
  258. localdata.setVar('EXTENDPE', '')
  259. stamp = localdata.getVar('STAMP')
  260. if pn.startswith("gcc-source"):
  261. # gcc-source shared workdir is a special case :(
  262. stamp = localdata.expand("${STAMPS_DIR}/work-shared/gcc-${PV}-${PR}")
  263. filespec = '%s.%s.sigdata.*' % (stamp, taskname)
  264. foundall = False
  265. import glob
  266. for fullpath in glob.glob(filespec):
  267. match = False
  268. if taskhashlist:
  269. for taskhash in taskhashlist:
  270. if fullpath.endswith('.%s' % taskhash):
  271. hashfiles[taskhash] = fullpath
  272. if len(hashfiles) == len(taskhashlist):
  273. foundall = True
  274. break
  275. else:
  276. try:
  277. filedates[fullpath] = os.stat(fullpath).st_mtime
  278. except OSError:
  279. continue
  280. hashval = get_hashval(fullpath)
  281. hashfiles[hashval] = fullpath
  282. if not taskhashlist or (len(filedates) < 2 and not foundall):
  283. # That didn't work, look in sstate-cache
  284. hashes = taskhashlist or ['?' * 32]
  285. localdata = bb.data.createCopy(d)
  286. for hashval in hashes:
  287. localdata.setVar('PACKAGE_ARCH', '*')
  288. localdata.setVar('TARGET_VENDOR', '*')
  289. localdata.setVar('TARGET_OS', '*')
  290. localdata.setVar('PN', pn)
  291. localdata.setVar('PV', '*')
  292. localdata.setVar('PR', '*')
  293. localdata.setVar('BB_TASKHASH', hashval)
  294. swspec = localdata.getVar('SSTATE_SWSPEC')
  295. if taskname in ['do_fetch', 'do_unpack', 'do_patch', 'do_populate_lic', 'do_preconfigure'] and swspec:
  296. localdata.setVar('SSTATE_PKGSPEC', '${SSTATE_SWSPEC}')
  297. elif pn.endswith('-native') or "-cross-" in pn or "-crosssdk-" in pn:
  298. localdata.setVar('SSTATE_EXTRAPATH', "${NATIVELSBSTRING}/")
  299. sstatename = taskname[3:]
  300. filespec = '%s_%s.*.siginfo' % (localdata.getVar('SSTATE_PKG'), sstatename)
  301. matchedfiles = glob.glob(filespec)
  302. for fullpath in matchedfiles:
  303. actual_hashval = get_hashval(fullpath)
  304. if actual_hashval in hashfiles:
  305. continue
  306. hashfiles[hashval] = fullpath
  307. if not taskhashlist:
  308. try:
  309. filedates[fullpath] = os.stat(fullpath).st_mtime
  310. except:
  311. continue
  312. if taskhashlist:
  313. return hashfiles
  314. else:
  315. return filedates
  316. bb.siggen.find_siginfo = find_siginfo
  317. def sstate_get_manifest_filename(task, d):
  318. """
  319. Return the sstate manifest file path for a particular task.
  320. Also returns the datastore that can be used to query related variables.
  321. """
  322. d2 = d.createCopy()
  323. extrainf = d.getVarFlag("do_" + task, 'stamp-extra-info')
  324. if extrainf:
  325. d2.setVar("SSTATE_MANMACH", extrainf)
  326. return (d2.expand("${SSTATE_MANFILEPREFIX}.%s" % task), d2)
  327. def find_sstate_manifest(taskdata, taskdata2, taskname, d, multilibcache):
  328. d2 = d
  329. variant = ''
  330. curr_variant = ''
  331. if d.getVar("BBEXTENDCURR") == "multilib":
  332. curr_variant = d.getVar("BBEXTENDVARIANT")
  333. if "virtclass-multilib" not in d.getVar("OVERRIDES"):
  334. curr_variant = "invalid"
  335. if taskdata2.startswith("virtual:multilib"):
  336. variant = taskdata2.split(":")[2]
  337. if curr_variant != variant:
  338. if variant not in multilibcache:
  339. multilibcache[variant] = oe.utils.get_multilib_datastore(variant, d)
  340. d2 = multilibcache[variant]
  341. if taskdata.endswith("-native"):
  342. pkgarchs = ["${BUILD_ARCH}"]
  343. elif taskdata.startswith("nativesdk-"):
  344. pkgarchs = ["${SDK_ARCH}_${SDK_OS}", "allarch"]
  345. elif "-cross-canadian" in taskdata:
  346. pkgarchs = ["${SDK_ARCH}_${SDK_ARCH}-${SDKPKGSUFFIX}"]
  347. elif "-cross-" in taskdata:
  348. pkgarchs = ["${BUILD_ARCH}_${TARGET_ARCH}"]
  349. elif "-crosssdk" in taskdata:
  350. pkgarchs = ["${BUILD_ARCH}_${SDK_ARCH}_${SDK_OS}"]
  351. else:
  352. pkgarchs = ['${MACHINE_ARCH}']
  353. pkgarchs = pkgarchs + list(reversed(d2.getVar("PACKAGE_EXTRA_ARCHS").split()))
  354. pkgarchs.append('allarch')
  355. pkgarchs.append('${SDK_ARCH}_${SDK_ARCH}-${SDKPKGSUFFIX}')
  356. for pkgarch in pkgarchs:
  357. manifest = d2.expand("${SSTATE_MANIFESTS}/manifest-%s-%s.%s" % (pkgarch, taskdata, taskname))
  358. if os.path.exists(manifest):
  359. return manifest, d2
  360. bb.warn("Manifest %s not found in %s (variant '%s')?" % (manifest, d2.expand(" ".join(pkgarchs)), variant))
  361. return None, d2