sstatesig.py 29 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734
  1. #
  2. # Copyright OpenEmbedded Contributors
  3. #
  4. # SPDX-License-Identifier: GPL-2.0-only
  5. #
  6. import bb.siggen
  7. import bb.runqueue
  8. import oe
  9. import netrc
  10. def sstate_rundepfilter(siggen, fn, recipename, task, dep, depname, dataCaches):
  11. # Return True if we should keep the dependency, False to drop it
  12. def isNative(x):
  13. return x.endswith("-native")
  14. def isCross(x):
  15. return "-cross-" in x
  16. def isNativeSDK(x):
  17. return x.startswith("nativesdk-")
  18. def isKernel(mc, fn):
  19. inherits = " ".join(dataCaches[mc].inherits[fn])
  20. return inherits.find("/module-base.bbclass") != -1 or inherits.find("/linux-kernel-base.bbclass") != -1
  21. def isPackageGroup(mc, fn):
  22. inherits = " ".join(dataCaches[mc].inherits[fn])
  23. return "/packagegroup.bbclass" in inherits
  24. def isAllArch(mc, fn):
  25. inherits = " ".join(dataCaches[mc].inherits[fn])
  26. return "/allarch.bbclass" in inherits
  27. def isImage(mc, fn):
  28. return "/image.bbclass" in " ".join(dataCaches[mc].inherits[fn])
  29. depmc, _, deptaskname, depmcfn = bb.runqueue.split_tid_mcfn(dep)
  30. mc, _ = bb.runqueue.split_mc(fn)
  31. # We can skip the rm_work task signature to avoid running the task
  32. # when we remove some tasks from the dependencie chain
  33. # i.e INHERIT:remove = "create-spdx" will trigger the do_rm_work
  34. if task == "do_rm_work":
  35. return False
  36. # (Almost) always include our own inter-task dependencies (unless it comes
  37. # from a mcdepends). The exception is the special
  38. # do_kernel_configme->do_unpack_and_patch dependency from archiver.bbclass.
  39. if recipename == depname and depmc == mc:
  40. if task == "do_kernel_configme" and deptaskname == "do_unpack_and_patch":
  41. return False
  42. return True
  43. # Exclude well defined recipe->dependency
  44. if "%s->%s" % (recipename, depname) in siggen.saferecipedeps:
  45. return False
  46. # Check for special wildcard
  47. if "*->%s" % depname in siggen.saferecipedeps and recipename != depname:
  48. return False
  49. # Don't change native/cross/nativesdk recipe dependencies any further
  50. if isNative(recipename) or isCross(recipename) or isNativeSDK(recipename):
  51. return True
  52. # Only target packages beyond here
  53. # allarch packagegroups are assumed to have well behaved names which don't change between architecures/tunes
  54. if isPackageGroup(mc, fn) and isAllArch(mc, fn) and not isNative(depname):
  55. return False
  56. # Exclude well defined machine specific configurations which don't change ABI
  57. if depname in siggen.abisaferecipes and not isImage(mc, fn):
  58. return False
  59. # Kernel modules are well namespaced. We don't want to depend on the kernel's checksum
  60. # if we're just doing an RRECOMMENDS:xxx = "kernel-module-*", not least because the checksum
  61. # is machine specific.
  62. # Therefore if we're not a kernel or a module recipe (inheriting the kernel classes)
  63. # and we reccomend a kernel-module, we exclude the dependency.
  64. if dataCaches and isKernel(depmc, depmcfn) and not isKernel(mc, fn):
  65. for pkg in dataCaches[mc].runrecs[fn]:
  66. if " ".join(dataCaches[mc].runrecs[fn][pkg]).find("kernel-module-") != -1:
  67. return False
  68. # Default to keep dependencies
  69. return True
  70. def sstate_lockedsigs(d):
  71. sigs = {}
  72. types = (d.getVar("SIGGEN_LOCKEDSIGS_TYPES") or "").split()
  73. for t in types:
  74. siggen_lockedsigs_var = "SIGGEN_LOCKEDSIGS_%s" % t
  75. lockedsigs = (d.getVar(siggen_lockedsigs_var) or "").split()
  76. for ls in lockedsigs:
  77. pn, task, h = ls.split(":", 2)
  78. if pn not in sigs:
  79. sigs[pn] = {}
  80. sigs[pn][task] = [h, siggen_lockedsigs_var]
  81. return sigs
  82. def lockedsigs_unihashmap(d):
  83. unihashmap = {}
  84. data = (d.getVar("SIGGEN_UNIHASHMAP") or "").split()
  85. for entry in data:
  86. pn, task, taskhash, unihash = entry.split(":")
  87. unihashmap[(pn, task)] = (taskhash, unihash)
  88. return unihashmap
  89. class SignatureGeneratorOEBasicHashMixIn(object):
  90. supports_multiconfig_datacaches = True
  91. def init_rundepcheck(self, data):
  92. self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split()
  93. self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS") or "").split()
  94. self.lockedsigs = sstate_lockedsigs(data)
  95. self.unihashmap = lockedsigs_unihashmap(data)
  96. self.lockedhashes = {}
  97. self.lockedpnmap = {}
  98. self.lockedhashfn = {}
  99. self.machine = data.getVar("MACHINE")
  100. self.mismatch_msgs = []
  101. self.mismatch_number = 0
  102. self.lockedsigs_msgs = ""
  103. self.unlockedrecipes = (data.getVar("SIGGEN_UNLOCKED_RECIPES") or
  104. "").split()
  105. self.unlockedrecipes = { k: "" for k in self.unlockedrecipes }
  106. self._internal = False
  107. pass
  108. def tasks_resolved(self, virtmap, virtpnmap, dataCache):
  109. # Translate virtual/xxx entries to PN values
  110. newabisafe = []
  111. for a in self.abisaferecipes:
  112. if a in virtpnmap:
  113. newabisafe.append(virtpnmap[a])
  114. else:
  115. newabisafe.append(a)
  116. self.abisaferecipes = newabisafe
  117. newsafedeps = []
  118. for a in self.saferecipedeps:
  119. a1, a2 = a.split("->")
  120. if a1 in virtpnmap:
  121. a1 = virtpnmap[a1]
  122. if a2 in virtpnmap:
  123. a2 = virtpnmap[a2]
  124. newsafedeps.append(a1 + "->" + a2)
  125. self.saferecipedeps = newsafedeps
  126. def rundep_check(self, fn, recipename, task, dep, depname, dataCaches = None):
  127. return sstate_rundepfilter(self, fn, recipename, task, dep, depname, dataCaches)
  128. def get_taskdata(self):
  129. return (self.lockedpnmap, self.lockedhashfn, self.lockedhashes) + super().get_taskdata()
  130. def set_taskdata(self, data):
  131. self.lockedpnmap, self.lockedhashfn, self.lockedhashes = data[:3]
  132. super().set_taskdata(data[3:])
  133. def dump_sigs(self, dataCache, options):
  134. if 'lockedsigs' in options:
  135. sigfile = os.getcwd() + "/locked-sigs.inc"
  136. bb.plain("Writing locked sigs to %s" % sigfile)
  137. self.dump_lockedsigs(sigfile)
  138. return super(bb.siggen.SignatureGeneratorBasicHash, self).dump_sigs(dataCache, options)
  139. def get_taskhash(self, tid, deps, dataCaches):
  140. if tid in self.lockedhashes:
  141. if self.lockedhashes[tid]:
  142. return self.lockedhashes[tid]
  143. else:
  144. return super().get_taskhash(tid, deps, dataCaches)
  145. h = super().get_taskhash(tid, deps, dataCaches)
  146. (mc, _, task, fn) = bb.runqueue.split_tid_mcfn(tid)
  147. recipename = dataCaches[mc].pkg_fn[fn]
  148. self.lockedpnmap[fn] = recipename
  149. self.lockedhashfn[fn] = dataCaches[mc].hashfn[fn]
  150. unlocked = False
  151. if recipename in self.unlockedrecipes:
  152. unlocked = True
  153. else:
  154. def recipename_from_dep(dep):
  155. (depmc, _, _, depfn) = bb.runqueue.split_tid_mcfn(dep)
  156. return dataCaches[depmc].pkg_fn[depfn]
  157. # If any unlocked recipe is in the direct dependencies then the
  158. # current recipe should be unlocked as well.
  159. depnames = [ recipename_from_dep(x) for x in deps if mc == bb.runqueue.mc_from_tid(x)]
  160. if any(x in y for y in depnames for x in self.unlockedrecipes):
  161. self.unlockedrecipes[recipename] = ''
  162. unlocked = True
  163. if not unlocked and recipename in self.lockedsigs:
  164. if task in self.lockedsigs[recipename]:
  165. h_locked = self.lockedsigs[recipename][task][0]
  166. var = self.lockedsigs[recipename][task][1]
  167. self.lockedhashes[tid] = h_locked
  168. self._internal = True
  169. unihash = self.get_unihash(tid)
  170. self._internal = False
  171. #bb.warn("Using %s %s %s" % (recipename, task, h))
  172. if h != h_locked and h_locked != unihash:
  173. self.mismatch_number += 1
  174. self.mismatch_msgs.append('The %s:%s sig is computed to be %s, but the sig is locked to %s in %s'
  175. % (recipename, task, h, h_locked, var))
  176. return h_locked
  177. self.lockedhashes[tid] = False
  178. #bb.warn("%s %s %s" % (recipename, task, h))
  179. return h
  180. def get_stampfile_hash(self, tid):
  181. if tid in self.lockedhashes and self.lockedhashes[tid]:
  182. return self.lockedhashes[tid]
  183. return super().get_stampfile_hash(tid)
  184. def get_cached_unihash(self, tid):
  185. if tid in self.lockedhashes and self.lockedhashes[tid] and not self._internal:
  186. return self.lockedhashes[tid]
  187. (mc, _, task, fn) = bb.runqueue.split_tid_mcfn(tid)
  188. recipename = self.lockedpnmap[fn]
  189. if (recipename, task) in self.unihashmap:
  190. taskhash, unihash = self.unihashmap[(recipename, task)]
  191. if taskhash == self.taskhash[tid]:
  192. return unihash
  193. return super().get_cached_unihash(tid)
  194. def dump_sigtask(self, fn, task, stampbase, runtime):
  195. tid = fn + ":" + task
  196. if tid in self.lockedhashes and self.lockedhashes[tid]:
  197. return
  198. super(bb.siggen.SignatureGeneratorBasicHash, self).dump_sigtask(fn, task, stampbase, runtime)
  199. def dump_lockedsigs(self, sigfile, taskfilter=None):
  200. types = {}
  201. unihashmap = {}
  202. for tid in self.runtaskdeps:
  203. # Bitbake changed this to a tuple in newer versions
  204. if isinstance(tid, tuple):
  205. tid = tid[1]
  206. if taskfilter:
  207. if not tid in taskfilter:
  208. continue
  209. (_, _, task, fn) = bb.runqueue.split_tid_mcfn(tid)
  210. t = self.lockedhashfn[fn].split(" ")[1].split(":")[5]
  211. t = 't-' + t.replace('_', '-')
  212. if t not in types:
  213. types[t] = []
  214. types[t].append(tid)
  215. taskhash = self.taskhash[tid]
  216. unihash = self.get_unihash(tid)
  217. if taskhash != unihash:
  218. unihashmap[tid] = " " + self.lockedpnmap[fn] + ":" + task + ":" + taskhash + ":" + unihash
  219. with open(sigfile, "w") as f:
  220. l = sorted(types)
  221. for t in l:
  222. f.write('SIGGEN_LOCKEDSIGS_%s = "\\\n' % t)
  223. types[t].sort()
  224. sortedtid = sorted(types[t], key=lambda tid: self.lockedpnmap[bb.runqueue.fn_from_tid(tid)])
  225. for tid in sortedtid:
  226. (_, _, task, fn) = bb.runqueue.split_tid_mcfn(tid)
  227. if tid not in self.taskhash:
  228. continue
  229. f.write(" " + self.lockedpnmap[fn] + ":" + task + ":" + self.get_unihash(tid) + " \\\n")
  230. f.write(' "\n')
  231. f.write('SIGGEN_LOCKEDSIGS_TYPES:%s = "%s"\n' % (self.machine, " ".join(l)))
  232. f.write('SIGGEN_UNIHASHMAP += "\\\n')
  233. sortedtid = sorted(unihashmap, key=lambda tid: self.lockedpnmap[bb.runqueue.fn_from_tid(tid)])
  234. for tid in sortedtid:
  235. f.write(unihashmap[tid] + " \\\n")
  236. f.write(' "\n')
  237. def dump_siglist(self, sigfile, path_prefix_strip=None):
  238. def strip_fn(fn):
  239. nonlocal path_prefix_strip
  240. if not path_prefix_strip:
  241. return fn
  242. fn_exp = fn.split(":")
  243. if fn_exp[-1].startswith(path_prefix_strip):
  244. fn_exp[-1] = fn_exp[-1][len(path_prefix_strip):]
  245. return ":".join(fn_exp)
  246. with open(sigfile, "w") as f:
  247. tasks = []
  248. for taskitem in self.taskhash:
  249. (fn, task) = taskitem.rsplit(":", 1)
  250. pn = self.lockedpnmap[fn]
  251. tasks.append((pn, task, strip_fn(fn), self.taskhash[taskitem]))
  252. for (pn, task, fn, taskhash) in sorted(tasks):
  253. f.write('%s:%s %s %s\n' % (pn, task, fn, taskhash))
  254. def checkhashes(self, sq_data, missed, found, d):
  255. warn_msgs = []
  256. error_msgs = []
  257. sstate_missing_msgs = []
  258. info_msgs = None
  259. if self.lockedsigs:
  260. if len(self.lockedsigs) > 10:
  261. self.lockedsigs_msgs = "There are %s recipes with locked tasks (%s task(s) have non matching signature)" % (len(self.lockedsigs), self.mismatch_number)
  262. else:
  263. self.lockedsigs_msgs = "The following recipes have locked tasks:"
  264. for pn in self.lockedsigs:
  265. self.lockedsigs_msgs += " %s" % (pn)
  266. for tid in sq_data['hash']:
  267. if tid not in found:
  268. for pn in self.lockedsigs:
  269. taskname = bb.runqueue.taskname_from_tid(tid)
  270. if sq_data['hash'][tid] in iter(self.lockedsigs[pn].values()):
  271. if taskname == 'do_shared_workdir':
  272. continue
  273. sstate_missing_msgs.append("Locked sig is set for %s:%s (%s) yet not in sstate cache?"
  274. % (pn, taskname, sq_data['hash'][tid]))
  275. checklevel = d.getVar("SIGGEN_LOCKEDSIGS_TASKSIG_CHECK")
  276. if checklevel == 'info':
  277. info_msgs = self.lockedsigs_msgs
  278. if checklevel == 'warn' or checklevel == 'info':
  279. warn_msgs += self.mismatch_msgs
  280. elif checklevel == 'error':
  281. error_msgs += self.mismatch_msgs
  282. checklevel = d.getVar("SIGGEN_LOCKEDSIGS_SSTATE_EXISTS_CHECK")
  283. if checklevel == 'warn':
  284. warn_msgs += sstate_missing_msgs
  285. elif checklevel == 'error':
  286. error_msgs += sstate_missing_msgs
  287. if info_msgs:
  288. bb.note(info_msgs)
  289. if warn_msgs:
  290. bb.warn("\n".join(warn_msgs))
  291. if error_msgs:
  292. bb.fatal("\n".join(error_msgs))
  293. class SignatureGeneratorOEBasicHash(SignatureGeneratorOEBasicHashMixIn, bb.siggen.SignatureGeneratorBasicHash):
  294. name = "OEBasicHash"
  295. class SignatureGeneratorOEEquivHash(SignatureGeneratorOEBasicHashMixIn, bb.siggen.SignatureGeneratorUniHashMixIn, bb.siggen.SignatureGeneratorBasicHash):
  296. name = "OEEquivHash"
  297. def init_rundepcheck(self, data):
  298. super().init_rundepcheck(data)
  299. self.server = data.getVar('BB_HASHSERVE')
  300. if not self.server:
  301. bb.fatal("OEEquivHash requires BB_HASHSERVE to be set")
  302. self.method = data.getVar('SSTATE_HASHEQUIV_METHOD')
  303. if not self.method:
  304. bb.fatal("OEEquivHash requires SSTATE_HASHEQUIV_METHOD to be set")
  305. self.username = data.getVar("BB_HASHSERVE_USERNAME")
  306. self.password = data.getVar("BB_HASHSERVE_PASSWORD")
  307. if not self.username or not self.password:
  308. try:
  309. n = netrc.netrc()
  310. auth = n.authenticators(self.server)
  311. if auth is not None:
  312. self.username, _, self.password = auth
  313. except FileNotFoundError:
  314. pass
  315. except netrc.NetrcParseError as e:
  316. bb.warn("Error parsing %s:%s: %s" % (e.filename, str(e.lineno), e.msg))
  317. # Insert these classes into siggen's namespace so it can see and select them
  318. bb.siggen.SignatureGeneratorOEBasicHash = SignatureGeneratorOEBasicHash
  319. bb.siggen.SignatureGeneratorOEEquivHash = SignatureGeneratorOEEquivHash
  320. def find_siginfo(pn, taskname, taskhashlist, d):
  321. """ Find signature data files for comparison purposes """
  322. import fnmatch
  323. import glob
  324. if not taskname:
  325. # We have to derive pn and taskname
  326. key = pn
  327. if key.startswith("mc:"):
  328. # mc:<mc>:<pn>:<task>
  329. _, _, pn, taskname = key.split(':', 3)
  330. else:
  331. # <pn>:<task>
  332. pn, taskname = key.split(':', 1)
  333. hashfiles = {}
  334. def get_hashval(siginfo):
  335. if siginfo.endswith('.siginfo'):
  336. return siginfo.rpartition(':')[2].partition('_')[0]
  337. else:
  338. return siginfo.rpartition('.')[2]
  339. def get_time(fullpath):
  340. # NFS can end up in a weird state where the file exists but has no stat info.
  341. # If that happens, we assume it doesn't acutally exist and show a warning
  342. try:
  343. return os.stat(fullpath).st_mtime
  344. except FileNotFoundError:
  345. bb.warn("Could not obtain mtime for {}".format(fullpath))
  346. return None
  347. # First search in stamps dir
  348. localdata = d.createCopy()
  349. localdata.setVar('MULTIMACH_TARGET_SYS', '*')
  350. localdata.setVar('PN', pn)
  351. localdata.setVar('PV', '*')
  352. localdata.setVar('PR', '*')
  353. localdata.setVar('EXTENDPE', '')
  354. stamp = localdata.getVar('STAMP')
  355. if pn.startswith("gcc-source"):
  356. # gcc-source shared workdir is a special case :(
  357. stamp = localdata.expand("${STAMPS_DIR}/work-shared/gcc-${PV}-${PR}")
  358. elif pn.startswith("llvm-project-source"):
  359. # llvm-project-source shared workdir is also a special case :*(
  360. stamp = localdata.expand("${STAMPS_DIR}/work-shared/llvm-project-source-${PV}-${PR}")
  361. filespec = '%s.%s.sigdata.*' % (stamp, taskname)
  362. foundall = False
  363. import glob
  364. bb.debug(1, "Calling glob.glob on {}".format(filespec))
  365. for fullpath in glob.glob(filespec):
  366. match = False
  367. if taskhashlist:
  368. for taskhash in taskhashlist:
  369. if fullpath.endswith('.%s' % taskhash):
  370. mtime = get_time(fullpath)
  371. if mtime:
  372. hashfiles[taskhash] = {'path':fullpath, 'sstate':False, 'time':mtime}
  373. if len(hashfiles) == len(taskhashlist):
  374. foundall = True
  375. break
  376. else:
  377. hashval = get_hashval(fullpath)
  378. mtime = get_time(fullpath)
  379. if mtime:
  380. hashfiles[hashval] = {'path':fullpath, 'sstate':False, 'time':mtime}
  381. if not taskhashlist or (len(hashfiles) < 2 and not foundall):
  382. # That didn't work, look in sstate-cache
  383. hashes = taskhashlist or ['?' * 64]
  384. localdata = bb.data.createCopy(d)
  385. for hashval in hashes:
  386. localdata.setVar('PACKAGE_ARCH', '*')
  387. localdata.setVar('TARGET_VENDOR', '*')
  388. localdata.setVar('TARGET_OS', '*')
  389. localdata.setVar('PN', pn)
  390. # gcc-source is a special case, same as with local stamps above
  391. if pn.startswith("gcc-source"):
  392. localdata.setVar('PN', "gcc")
  393. localdata.setVar('PV', '*')
  394. localdata.setVar('PR', '*')
  395. localdata.setVar('BB_TASKHASH', hashval)
  396. localdata.setVar('SSTATE_CURRTASK', taskname[3:])
  397. swspec = localdata.getVar('SSTATE_SWSPEC')
  398. if taskname in ['do_fetch', 'do_unpack', 'do_patch', 'do_populate_lic', 'do_preconfigure'] and swspec:
  399. localdata.setVar('SSTATE_PKGSPEC', '${SSTATE_SWSPEC}')
  400. elif pn.endswith('-native') or "-cross-" in pn or "-crosssdk-" in pn:
  401. localdata.setVar('SSTATE_EXTRAPATH', "${NATIVELSBSTRING}/")
  402. filespec = '%s.siginfo' % localdata.getVar('SSTATE_PKG')
  403. bb.debug(1, "Calling glob.glob on {}".format(filespec))
  404. matchedfiles = glob.glob(filespec)
  405. for fullpath in matchedfiles:
  406. actual_hashval = get_hashval(fullpath)
  407. if actual_hashval in hashfiles:
  408. continue
  409. mtime = get_time(fullpath)
  410. if mtime:
  411. hashfiles[actual_hashval] = {'path':fullpath, 'sstate':True, 'time':mtime}
  412. return hashfiles
  413. bb.siggen.find_siginfo = find_siginfo
  414. bb.siggen.find_siginfo_version = 2
  415. def sstate_get_manifest_filename(task, d):
  416. """
  417. Return the sstate manifest file path for a particular task.
  418. Also returns the datastore that can be used to query related variables.
  419. """
  420. d2 = d.createCopy()
  421. extrainf = d.getVarFlag("do_" + task, 'stamp-extra-info')
  422. if extrainf:
  423. d2.setVar("SSTATE_MANMACH", extrainf)
  424. return (d2.expand("${SSTATE_MANFILEPREFIX}.%s" % task), d2)
  425. def find_sstate_manifest(taskdata, taskdata2, taskname, d, multilibcache):
  426. d2 = d
  427. variant = ''
  428. curr_variant = ''
  429. if d.getVar("BBEXTENDCURR") == "multilib":
  430. curr_variant = d.getVar("BBEXTENDVARIANT")
  431. if "virtclass-multilib" not in d.getVar("OVERRIDES"):
  432. curr_variant = "invalid"
  433. if taskdata2.startswith("virtual:multilib"):
  434. variant = taskdata2.split(":")[2]
  435. if curr_variant != variant:
  436. if variant not in multilibcache:
  437. multilibcache[variant] = oe.utils.get_multilib_datastore(variant, d)
  438. d2 = multilibcache[variant]
  439. if taskdata.endswith("-native"):
  440. pkgarchs = ["${BUILD_ARCH}", "${BUILD_ARCH}_${ORIGNATIVELSBSTRING}"]
  441. elif taskdata.startswith("nativesdk-"):
  442. pkgarchs = ["${SDK_ARCH}_${SDK_OS}", "allarch"]
  443. elif "-cross-canadian" in taskdata:
  444. pkgarchs = ["${SDK_ARCH}_${SDK_ARCH}-${SDKPKGSUFFIX}"]
  445. elif "-cross-" in taskdata:
  446. pkgarchs = ["${BUILD_ARCH}"]
  447. elif "-crosssdk" in taskdata:
  448. pkgarchs = ["${BUILD_ARCH}_${SDK_ARCH}_${SDK_OS}"]
  449. else:
  450. pkgarchs = ['${MACHINE_ARCH}']
  451. pkgarchs = pkgarchs + list(reversed(d2.getVar("PACKAGE_EXTRA_ARCHS").split()))
  452. pkgarchs.append('allarch')
  453. pkgarchs.append('${SDK_ARCH}_${SDK_ARCH}-${SDKPKGSUFFIX}')
  454. searched_manifests = []
  455. for pkgarch in pkgarchs:
  456. manifest = d2.expand("${SSTATE_MANIFESTS}/manifest-%s-%s.%s" % (pkgarch, taskdata, taskname))
  457. if os.path.exists(manifest):
  458. return manifest, d2
  459. searched_manifests.append(manifest)
  460. bb.fatal("The sstate manifest for task '%s:%s' (multilib variant '%s') could not be found.\nThe pkgarchs considered were: %s.\nBut none of these manifests exists:\n %s"
  461. % (taskdata, taskname, variant, d2.expand(", ".join(pkgarchs)),"\n ".join(searched_manifests)))
  462. return None, d2
  463. def OEOuthashBasic(path, sigfile, task, d):
  464. """
  465. Basic output hash function
  466. Calculates the output hash of a task by hashing all output file metadata,
  467. and file contents.
  468. """
  469. import hashlib
  470. import stat
  471. import pwd
  472. import grp
  473. import re
  474. import fnmatch
  475. def update_hash(s):
  476. s = s.encode('utf-8')
  477. h.update(s)
  478. if sigfile:
  479. sigfile.write(s)
  480. h = hashlib.sha256()
  481. prev_dir = os.getcwd()
  482. corebase = d.getVar("COREBASE")
  483. tmpdir = d.getVar("TMPDIR")
  484. include_owners = os.environ.get('PSEUDO_DISABLED') == '0'
  485. if "package_write_" in task or task == "package_qa":
  486. include_owners = False
  487. include_timestamps = False
  488. include_root = True
  489. if task == "package":
  490. include_timestamps = True
  491. include_root = False
  492. source_date_epoch = float(d.getVar("SOURCE_DATE_EPOCH"))
  493. hash_version = d.getVar('HASHEQUIV_HASH_VERSION')
  494. extra_sigdata = d.getVar("HASHEQUIV_EXTRA_SIGDATA")
  495. filemaps = {}
  496. for m in (d.getVar('SSTATE_HASHEQUIV_FILEMAP') or '').split():
  497. entry = m.split(":")
  498. if len(entry) != 3 or entry[0] != task:
  499. continue
  500. filemaps.setdefault(entry[1], [])
  501. filemaps[entry[1]].append(entry[2])
  502. try:
  503. os.chdir(path)
  504. basepath = os.path.normpath(path)
  505. update_hash("OEOuthashBasic\n")
  506. if hash_version:
  507. update_hash(hash_version + "\n")
  508. if extra_sigdata:
  509. update_hash(extra_sigdata + "\n")
  510. # It is only currently useful to get equivalent hashes for things that
  511. # can be restored from sstate. Since the sstate object is named using
  512. # SSTATE_PKGSPEC and the task name, those should be included in the
  513. # output hash calculation.
  514. update_hash("SSTATE_PKGSPEC=%s\n" % d.getVar('SSTATE_PKGSPEC'))
  515. update_hash("task=%s\n" % task)
  516. for root, dirs, files in os.walk('.', topdown=True):
  517. # Sort directories to ensure consistent ordering when recursing
  518. dirs.sort()
  519. files.sort()
  520. def process(path):
  521. s = os.lstat(path)
  522. if stat.S_ISDIR(s.st_mode):
  523. update_hash('d')
  524. elif stat.S_ISCHR(s.st_mode):
  525. update_hash('c')
  526. elif stat.S_ISBLK(s.st_mode):
  527. update_hash('b')
  528. elif stat.S_ISSOCK(s.st_mode):
  529. update_hash('s')
  530. elif stat.S_ISLNK(s.st_mode):
  531. update_hash('l')
  532. elif stat.S_ISFIFO(s.st_mode):
  533. update_hash('p')
  534. else:
  535. update_hash('-')
  536. def add_perm(mask, on, off='-'):
  537. if mask & s.st_mode:
  538. update_hash(on)
  539. else:
  540. update_hash(off)
  541. add_perm(stat.S_IRUSR, 'r')
  542. add_perm(stat.S_IWUSR, 'w')
  543. if stat.S_ISUID & s.st_mode:
  544. add_perm(stat.S_IXUSR, 's', 'S')
  545. else:
  546. add_perm(stat.S_IXUSR, 'x')
  547. if include_owners:
  548. # Group/other permissions are only relevant in pseudo context
  549. add_perm(stat.S_IRGRP, 'r')
  550. add_perm(stat.S_IWGRP, 'w')
  551. if stat.S_ISGID & s.st_mode:
  552. add_perm(stat.S_IXGRP, 's', 'S')
  553. else:
  554. add_perm(stat.S_IXGRP, 'x')
  555. add_perm(stat.S_IROTH, 'r')
  556. add_perm(stat.S_IWOTH, 'w')
  557. if stat.S_ISVTX & s.st_mode:
  558. update_hash('t')
  559. else:
  560. add_perm(stat.S_IXOTH, 'x')
  561. try:
  562. update_hash(" %10s" % pwd.getpwuid(s.st_uid).pw_name)
  563. update_hash(" %10s" % grp.getgrgid(s.st_gid).gr_name)
  564. except KeyError as e:
  565. msg = ("KeyError: %s\nPath %s is owned by uid %d, gid %d, which doesn't match "
  566. "any user/group on target. This may be due to host contamination." %
  567. (e, os.path.abspath(path), s.st_uid, s.st_gid))
  568. raise Exception(msg).with_traceback(e.__traceback__)
  569. if include_timestamps:
  570. # Need to clamp to SOURCE_DATE_EPOCH
  571. if s.st_mtime > source_date_epoch:
  572. update_hash(" %10d" % source_date_epoch)
  573. else:
  574. update_hash(" %10d" % s.st_mtime)
  575. update_hash(" ")
  576. if stat.S_ISBLK(s.st_mode) or stat.S_ISCHR(s.st_mode):
  577. update_hash("%9s" % ("%d.%d" % (os.major(s.st_rdev), os.minor(s.st_rdev))))
  578. else:
  579. update_hash(" " * 9)
  580. filterfile = False
  581. for entry in filemaps:
  582. if fnmatch.fnmatch(path, entry):
  583. filterfile = True
  584. update_hash(" ")
  585. if stat.S_ISREG(s.st_mode) and not filterfile:
  586. update_hash("%10d" % s.st_size)
  587. else:
  588. update_hash(" " * 10)
  589. update_hash(" ")
  590. fh = hashlib.sha256()
  591. if stat.S_ISREG(s.st_mode):
  592. # Hash file contents
  593. if filterfile:
  594. # Need to ignore paths in crossscripts and postinst-useradd files.
  595. with open(path, 'rb') as d:
  596. chunk = d.read()
  597. chunk = chunk.replace(bytes(basepath, encoding='utf8'), b'')
  598. for entry in filemaps:
  599. if not fnmatch.fnmatch(path, entry):
  600. continue
  601. for r in filemaps[entry]:
  602. if r.startswith("regex-"):
  603. chunk = re.sub(bytes(r[6:], encoding='utf8'), b'', chunk)
  604. else:
  605. chunk = chunk.replace(bytes(r, encoding='utf8'), b'')
  606. fh.update(chunk)
  607. else:
  608. with open(path, 'rb') as d:
  609. for chunk in iter(lambda: d.read(4096), b""):
  610. fh.update(chunk)
  611. update_hash(fh.hexdigest())
  612. else:
  613. update_hash(" " * len(fh.hexdigest()))
  614. update_hash(" %s" % path)
  615. if stat.S_ISLNK(s.st_mode):
  616. update_hash(" -> %s" % os.readlink(path))
  617. update_hash("\n")
  618. # Process this directory and all its child files
  619. if include_root or root != ".":
  620. process(root)
  621. for f in files:
  622. if f == 'fixmepath':
  623. continue
  624. process(os.path.join(root, f))
  625. for dir in dirs:
  626. if os.path.islink(os.path.join(root, dir)):
  627. process(os.path.join(root, dir))
  628. finally:
  629. os.chdir(prev_dir)
  630. return h.hexdigest()