sstatesig.py 23 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603
  1. #
  2. # SPDX-License-Identifier: GPL-2.0-only
  3. #
  4. import bb.siggen
  5. import oe
  6. def sstate_rundepfilter(siggen, fn, recipename, task, dep, depname, dataCache):
  7. # Return True if we should keep the dependency, False to drop it
  8. def isNative(x):
  9. return x.endswith("-native")
  10. def isCross(x):
  11. return "-cross-" in x
  12. def isNativeSDK(x):
  13. return x.startswith("nativesdk-")
  14. def isKernel(fn):
  15. inherits = " ".join(dataCache.inherits[fn])
  16. return inherits.find("/module-base.bbclass") != -1 or inherits.find("/linux-kernel-base.bbclass") != -1
  17. def isPackageGroup(fn):
  18. inherits = " ".join(dataCache.inherits[fn])
  19. return "/packagegroup.bbclass" in inherits
  20. def isAllArch(fn):
  21. inherits = " ".join(dataCache.inherits[fn])
  22. return "/allarch.bbclass" in inherits
  23. def isImage(fn):
  24. return "/image.bbclass" in " ".join(dataCache.inherits[fn])
  25. # (Almost) always include our own inter-task dependencies.
  26. # The exception is the special do_kernel_configme->do_unpack_and_patch
  27. # dependency from archiver.bbclass.
  28. if recipename == depname:
  29. if task == "do_kernel_configme" and dep.endswith(".do_unpack_and_patch"):
  30. return False
  31. return True
  32. # Exclude well defined recipe->dependency
  33. if "%s->%s" % (recipename, depname) in siggen.saferecipedeps:
  34. return False
  35. # Check for special wildcard
  36. if "*->%s" % depname in siggen.saferecipedeps and recipename != depname:
  37. return False
  38. # Don't change native/cross/nativesdk recipe dependencies any further
  39. if isNative(recipename) or isCross(recipename) or isNativeSDK(recipename):
  40. return True
  41. # Only target packages beyond here
  42. # allarch packagegroups are assumed to have well behaved names which don't change between architecures/tunes
  43. if isPackageGroup(fn) and isAllArch(fn) and not isNative(depname):
  44. return False
  45. # Exclude well defined machine specific configurations which don't change ABI
  46. if depname in siggen.abisaferecipes and not isImage(fn):
  47. return False
  48. # Kernel modules are well namespaced. We don't want to depend on the kernel's checksum
  49. # if we're just doing an RRECOMMENDS_xxx = "kernel-module-*", not least because the checksum
  50. # is machine specific.
  51. # Therefore if we're not a kernel or a module recipe (inheriting the kernel classes)
  52. # and we reccomend a kernel-module, we exclude the dependency.
  53. depfn = dep.rsplit(":", 1)[0]
  54. if dataCache and isKernel(depfn) and not isKernel(fn):
  55. for pkg in dataCache.runrecs[fn]:
  56. if " ".join(dataCache.runrecs[fn][pkg]).find("kernel-module-") != -1:
  57. return False
  58. # Default to keep dependencies
  59. return True
  60. def sstate_lockedsigs(d):
  61. sigs = {}
  62. types = (d.getVar("SIGGEN_LOCKEDSIGS_TYPES") or "").split()
  63. for t in types:
  64. siggen_lockedsigs_var = "SIGGEN_LOCKEDSIGS_%s" % t
  65. lockedsigs = (d.getVar(siggen_lockedsigs_var) or "").split()
  66. for ls in lockedsigs:
  67. pn, task, h = ls.split(":", 2)
  68. if pn not in sigs:
  69. sigs[pn] = {}
  70. sigs[pn][task] = [h, siggen_lockedsigs_var]
  71. return sigs
  72. class SignatureGeneratorOEBasic(bb.siggen.SignatureGeneratorBasic):
  73. name = "OEBasic"
  74. def init_rundepcheck(self, data):
  75. self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split()
  76. self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS") or "").split()
  77. pass
  78. def rundep_check(self, fn, recipename, task, dep, depname, dataCache = None):
  79. return sstate_rundepfilter(self, fn, recipename, task, dep, depname, dataCache)
  80. class SignatureGeneratorOEBasicHashMixIn(object):
  81. def init_rundepcheck(self, data):
  82. self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split()
  83. self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS") or "").split()
  84. self.lockedsigs = sstate_lockedsigs(data)
  85. self.lockedhashes = {}
  86. self.lockedpnmap = {}
  87. self.lockedhashfn = {}
  88. self.machine = data.getVar("MACHINE")
  89. self.mismatch_msgs = []
  90. self.unlockedrecipes = (data.getVar("SIGGEN_UNLOCKED_RECIPES") or
  91. "").split()
  92. self.unlockedrecipes = { k: "" for k in self.unlockedrecipes }
  93. self.buildarch = data.getVar('BUILD_ARCH')
  94. self._internal = False
  95. pass
  96. def tasks_resolved(self, virtmap, virtpnmap, dataCache):
  97. # Translate virtual/xxx entries to PN values
  98. newabisafe = []
  99. for a in self.abisaferecipes:
  100. if a in virtpnmap:
  101. newabisafe.append(virtpnmap[a])
  102. else:
  103. newabisafe.append(a)
  104. self.abisaferecipes = newabisafe
  105. newsafedeps = []
  106. for a in self.saferecipedeps:
  107. a1, a2 = a.split("->")
  108. if a1 in virtpnmap:
  109. a1 = virtpnmap[a1]
  110. if a2 in virtpnmap:
  111. a2 = virtpnmap[a2]
  112. newsafedeps.append(a1 + "->" + a2)
  113. self.saferecipedeps = newsafedeps
  114. def rundep_check(self, fn, recipename, task, dep, depname, dataCache = None):
  115. return sstate_rundepfilter(self, fn, recipename, task, dep, depname, dataCache)
  116. def get_taskdata(self):
  117. return (self.lockedpnmap, self.lockedhashfn, self.lockedhashes) + super().get_taskdata()
  118. def set_taskdata(self, data):
  119. self.lockedpnmap, self.lockedhashfn, self.lockedhashes = data[:3]
  120. super().set_taskdata(data[3:])
  121. def dump_sigs(self, dataCache, options):
  122. sigfile = os.getcwd() + "/locked-sigs.inc"
  123. bb.plain("Writing locked sigs to %s" % sigfile)
  124. self.dump_lockedsigs(sigfile)
  125. return super(bb.siggen.SignatureGeneratorBasicHash, self).dump_sigs(dataCache, options)
  126. def prep_taskhash(self, tid, deps, dataCache):
  127. super().prep_taskhash(tid, deps, dataCache)
  128. if hasattr(self, "extramethod"):
  129. (_, _, _, fn) = bb.runqueue.split_tid_mcfn(tid)
  130. inherits = " ".join(dataCache.inherits[fn])
  131. if inherits.find("/native.bbclass") != -1 or inherits.find("/cross.bbclass") != -1:
  132. self.extramethod[tid] = ":" + self.buildarch
  133. def get_taskhash(self, tid, deps, dataCache):
  134. if tid in self.lockedhashes:
  135. if self.lockedhashes[tid]:
  136. return self.lockedhashes[tid]
  137. else:
  138. return super().get_taskhash(tid, deps, dataCache)
  139. # get_taskhash will call get_unihash internally in the parent class, we
  140. # need to disable our filter of it whilst this runs else
  141. # incorrect hashes can be calculated.
  142. self._internal = True
  143. h = super().get_taskhash(tid, deps, dataCache)
  144. self._internal = False
  145. (mc, _, task, fn) = bb.runqueue.split_tid_mcfn(tid)
  146. recipename = dataCache.pkg_fn[fn]
  147. self.lockedpnmap[fn] = recipename
  148. self.lockedhashfn[fn] = dataCache.hashfn[fn]
  149. unlocked = False
  150. if recipename in self.unlockedrecipes:
  151. unlocked = True
  152. else:
  153. def recipename_from_dep(dep):
  154. fn = bb.runqueue.fn_from_tid(dep)
  155. return dataCache.pkg_fn[fn]
  156. # If any unlocked recipe is in the direct dependencies then the
  157. # current recipe should be unlocked as well.
  158. depnames = [ recipename_from_dep(x) for x in deps if mc == bb.runqueue.mc_from_tid(x)]
  159. if any(x in y for y in depnames for x in self.unlockedrecipes):
  160. self.unlockedrecipes[recipename] = ''
  161. unlocked = True
  162. if not unlocked and recipename in self.lockedsigs:
  163. if task in self.lockedsigs[recipename]:
  164. h_locked = self.lockedsigs[recipename][task][0]
  165. var = self.lockedsigs[recipename][task][1]
  166. self.lockedhashes[tid] = h_locked
  167. self._internal = True
  168. unihash = self.get_unihash(tid)
  169. self._internal = False
  170. #bb.warn("Using %s %s %s" % (recipename, task, h))
  171. if h != h_locked and h_locked != unihash:
  172. self.mismatch_msgs.append('The %s:%s sig is computed to be %s, but the sig is locked to %s in %s'
  173. % (recipename, task, h, h_locked, var))
  174. return h_locked
  175. self.lockedhashes[tid] = False
  176. #bb.warn("%s %s %s" % (recipename, task, h))
  177. return h
  178. def get_stampfile_hash(self, tid):
  179. if tid in self.lockedhashes and self.lockedhashes[tid]:
  180. return self.lockedhashes[tid]
  181. return super().get_stampfile_hash(tid)
  182. def get_unihash(self, tid):
  183. if tid in self.lockedhashes and self.lockedhashes[tid] and not self._internal:
  184. return self.lockedhashes[tid]
  185. return super().get_unihash(tid)
  186. def dump_sigtask(self, fn, task, stampbase, runtime):
  187. tid = fn + ":" + task
  188. if tid in self.lockedhashes and self.lockedhashes[tid]:
  189. return
  190. super(bb.siggen.SignatureGeneratorBasicHash, self).dump_sigtask(fn, task, stampbase, runtime)
  191. def dump_lockedsigs(self, sigfile, taskfilter=None):
  192. types = {}
  193. for tid in self.runtaskdeps:
  194. if taskfilter:
  195. if not tid in taskfilter:
  196. continue
  197. fn = bb.runqueue.fn_from_tid(tid)
  198. t = self.lockedhashfn[fn].split(" ")[1].split(":")[5]
  199. t = 't-' + t.replace('_', '-')
  200. if t not in types:
  201. types[t] = []
  202. types[t].append(tid)
  203. with open(sigfile, "w") as f:
  204. l = sorted(types)
  205. for t in l:
  206. f.write('SIGGEN_LOCKEDSIGS_%s = "\\\n' % t)
  207. types[t].sort()
  208. sortedtid = sorted(types[t], key=lambda tid: self.lockedpnmap[bb.runqueue.fn_from_tid(tid)])
  209. for tid in sortedtid:
  210. (_, _, task, fn) = bb.runqueue.split_tid_mcfn(tid)
  211. if tid not in self.taskhash:
  212. continue
  213. f.write(" " + self.lockedpnmap[fn] + ":" + task + ":" + self.get_unihash(tid) + " \\\n")
  214. f.write(' "\n')
  215. f.write('SIGGEN_LOCKEDSIGS_TYPES_%s = "%s"' % (self.machine, " ".join(l)))
  216. def dump_siglist(self, sigfile):
  217. with open(sigfile, "w") as f:
  218. tasks = []
  219. for taskitem in self.taskhash:
  220. (fn, task) = taskitem.rsplit(":", 1)
  221. pn = self.lockedpnmap[fn]
  222. tasks.append((pn, task, fn, self.taskhash[taskitem]))
  223. for (pn, task, fn, taskhash) in sorted(tasks):
  224. f.write('%s:%s %s %s\n' % (pn, task, fn, taskhash))
  225. def checkhashes(self, sq_data, missed, found, d):
  226. warn_msgs = []
  227. error_msgs = []
  228. sstate_missing_msgs = []
  229. for tid in sq_data['hash']:
  230. if tid not in found:
  231. for pn in self.lockedsigs:
  232. taskname = bb.runqueue.taskname_from_tid(tid)
  233. if sq_data['hash'][tid] in iter(self.lockedsigs[pn].values()):
  234. if taskname == 'do_shared_workdir':
  235. continue
  236. sstate_missing_msgs.append("Locked sig is set for %s:%s (%s) yet not in sstate cache?"
  237. % (pn, taskname, sq_data['hash'][tid]))
  238. checklevel = d.getVar("SIGGEN_LOCKEDSIGS_TASKSIG_CHECK")
  239. if checklevel == 'warn':
  240. warn_msgs += self.mismatch_msgs
  241. elif checklevel == 'error':
  242. error_msgs += self.mismatch_msgs
  243. checklevel = d.getVar("SIGGEN_LOCKEDSIGS_SSTATE_EXISTS_CHECK")
  244. if checklevel == 'warn':
  245. warn_msgs += sstate_missing_msgs
  246. elif checklevel == 'error':
  247. error_msgs += sstate_missing_msgs
  248. if warn_msgs:
  249. bb.warn("\n".join(warn_msgs))
  250. if error_msgs:
  251. bb.fatal("\n".join(error_msgs))
  252. class SignatureGeneratorOEBasicHash(SignatureGeneratorOEBasicHashMixIn, bb.siggen.SignatureGeneratorBasicHash):
  253. name = "OEBasicHash"
  254. class SignatureGeneratorOEEquivHash(SignatureGeneratorOEBasicHashMixIn, bb.siggen.SignatureGeneratorUniHashMixIn, bb.siggen.SignatureGeneratorBasicHash):
  255. name = "OEEquivHash"
  256. def init_rundepcheck(self, data):
  257. super().init_rundepcheck(data)
  258. self.server = data.getVar('BB_HASHSERVE')
  259. if not self.server:
  260. bb.fatal("OEEquivHash requires BB_HASHSERVE to be set")
  261. self.method = data.getVar('SSTATE_HASHEQUIV_METHOD')
  262. if not self.method:
  263. bb.fatal("OEEquivHash requires SSTATE_HASHEQUIV_METHOD to be set")
  264. # Insert these classes into siggen's namespace so it can see and select them
  265. bb.siggen.SignatureGeneratorOEBasic = SignatureGeneratorOEBasic
  266. bb.siggen.SignatureGeneratorOEBasicHash = SignatureGeneratorOEBasicHash
  267. bb.siggen.SignatureGeneratorOEEquivHash = SignatureGeneratorOEEquivHash
  268. def find_siginfo(pn, taskname, taskhashlist, d):
  269. """ Find signature data files for comparison purposes """
  270. import fnmatch
  271. import glob
  272. if not taskname:
  273. # We have to derive pn and taskname
  274. key = pn
  275. splitit = key.split('.bb:')
  276. taskname = splitit[1]
  277. pn = os.path.basename(splitit[0]).split('_')[0]
  278. if key.startswith('virtual:native:'):
  279. pn = pn + '-native'
  280. hashfiles = {}
  281. filedates = {}
  282. def get_hashval(siginfo):
  283. if siginfo.endswith('.siginfo'):
  284. return siginfo.rpartition(':')[2].partition('_')[0]
  285. else:
  286. return siginfo.rpartition('.')[2]
  287. # First search in stamps dir
  288. localdata = d.createCopy()
  289. localdata.setVar('MULTIMACH_TARGET_SYS', '*')
  290. localdata.setVar('PN', pn)
  291. localdata.setVar('PV', '*')
  292. localdata.setVar('PR', '*')
  293. localdata.setVar('EXTENDPE', '')
  294. stamp = localdata.getVar('STAMP')
  295. if pn.startswith("gcc-source"):
  296. # gcc-source shared workdir is a special case :(
  297. stamp = localdata.expand("${STAMPS_DIR}/work-shared/gcc-${PV}-${PR}")
  298. filespec = '%s.%s.sigdata.*' % (stamp, taskname)
  299. foundall = False
  300. import glob
  301. for fullpath in glob.glob(filespec):
  302. match = False
  303. if taskhashlist:
  304. for taskhash in taskhashlist:
  305. if fullpath.endswith('.%s' % taskhash):
  306. hashfiles[taskhash] = fullpath
  307. if len(hashfiles) == len(taskhashlist):
  308. foundall = True
  309. break
  310. else:
  311. try:
  312. filedates[fullpath] = os.stat(fullpath).st_mtime
  313. except OSError:
  314. continue
  315. hashval = get_hashval(fullpath)
  316. hashfiles[hashval] = fullpath
  317. if not taskhashlist or (len(filedates) < 2 and not foundall):
  318. # That didn't work, look in sstate-cache
  319. hashes = taskhashlist or ['?' * 64]
  320. localdata = bb.data.createCopy(d)
  321. for hashval in hashes:
  322. localdata.setVar('PACKAGE_ARCH', '*')
  323. localdata.setVar('TARGET_VENDOR', '*')
  324. localdata.setVar('TARGET_OS', '*')
  325. localdata.setVar('PN', pn)
  326. localdata.setVar('PV', '*')
  327. localdata.setVar('PR', '*')
  328. localdata.setVar('BB_TASKHASH', hashval)
  329. swspec = localdata.getVar('SSTATE_SWSPEC')
  330. if taskname in ['do_fetch', 'do_unpack', 'do_patch', 'do_populate_lic', 'do_preconfigure'] and swspec:
  331. localdata.setVar('SSTATE_PKGSPEC', '${SSTATE_SWSPEC}')
  332. elif pn.endswith('-native') or "-cross-" in pn or "-crosssdk-" in pn:
  333. localdata.setVar('SSTATE_EXTRAPATH', "${NATIVELSBSTRING}/")
  334. sstatename = taskname[3:]
  335. filespec = '%s_%s.*.siginfo' % (localdata.getVar('SSTATE_PKG'), sstatename)
  336. matchedfiles = glob.glob(filespec)
  337. for fullpath in matchedfiles:
  338. actual_hashval = get_hashval(fullpath)
  339. if actual_hashval in hashfiles:
  340. continue
  341. hashfiles[hashval] = fullpath
  342. if not taskhashlist:
  343. try:
  344. filedates[fullpath] = os.stat(fullpath).st_mtime
  345. except:
  346. continue
  347. if taskhashlist:
  348. return hashfiles
  349. else:
  350. return filedates
  351. bb.siggen.find_siginfo = find_siginfo
  352. def sstate_get_manifest_filename(task, d):
  353. """
  354. Return the sstate manifest file path for a particular task.
  355. Also returns the datastore that can be used to query related variables.
  356. """
  357. d2 = d.createCopy()
  358. extrainf = d.getVarFlag("do_" + task, 'stamp-extra-info')
  359. if extrainf:
  360. d2.setVar("SSTATE_MANMACH", extrainf)
  361. return (d2.expand("${SSTATE_MANFILEPREFIX}.%s" % task), d2)
  362. def find_sstate_manifest(taskdata, taskdata2, taskname, d, multilibcache):
  363. d2 = d
  364. variant = ''
  365. curr_variant = ''
  366. if d.getVar("BBEXTENDCURR") == "multilib":
  367. curr_variant = d.getVar("BBEXTENDVARIANT")
  368. if "virtclass-multilib" not in d.getVar("OVERRIDES"):
  369. curr_variant = "invalid"
  370. if taskdata2.startswith("virtual:multilib"):
  371. variant = taskdata2.split(":")[2]
  372. if curr_variant != variant:
  373. if variant not in multilibcache:
  374. multilibcache[variant] = oe.utils.get_multilib_datastore(variant, d)
  375. d2 = multilibcache[variant]
  376. if taskdata.endswith("-native"):
  377. pkgarchs = ["${BUILD_ARCH}"]
  378. elif taskdata.startswith("nativesdk-"):
  379. pkgarchs = ["${SDK_ARCH}_${SDK_OS}", "allarch"]
  380. elif "-cross-canadian" in taskdata:
  381. pkgarchs = ["${SDK_ARCH}_${SDK_ARCH}-${SDKPKGSUFFIX}"]
  382. elif "-cross-" in taskdata:
  383. pkgarchs = ["${BUILD_ARCH}_${TARGET_ARCH}"]
  384. elif "-crosssdk" in taskdata:
  385. pkgarchs = ["${BUILD_ARCH}_${SDK_ARCH}_${SDK_OS}"]
  386. else:
  387. pkgarchs = ['${MACHINE_ARCH}']
  388. pkgarchs = pkgarchs + list(reversed(d2.getVar("PACKAGE_EXTRA_ARCHS").split()))
  389. pkgarchs.append('allarch')
  390. pkgarchs.append('${SDK_ARCH}_${SDK_ARCH}-${SDKPKGSUFFIX}')
  391. for pkgarch in pkgarchs:
  392. manifest = d2.expand("${SSTATE_MANIFESTS}/manifest-%s-%s.%s" % (pkgarch, taskdata, taskname))
  393. if os.path.exists(manifest):
  394. return manifest, d2
  395. bb.warn("Manifest %s not found in %s (variant '%s')?" % (manifest, d2.expand(" ".join(pkgarchs)), variant))
  396. return None, d2
  397. def OEOuthashBasic(path, sigfile, task, d):
  398. """
  399. Basic output hash function
  400. Calculates the output hash of a task by hashing all output file metadata,
  401. and file contents.
  402. """
  403. import hashlib
  404. import stat
  405. import pwd
  406. import grp
  407. def update_hash(s):
  408. s = s.encode('utf-8')
  409. h.update(s)
  410. if sigfile:
  411. sigfile.write(s)
  412. h = hashlib.sha256()
  413. prev_dir = os.getcwd()
  414. include_owners = os.environ.get('PSEUDO_DISABLED') == '0'
  415. if "package_write_" in task or task == "package_qa":
  416. include_owners = False
  417. include_timestamps = False
  418. if task == "package":
  419. include_timestamps = d.getVar('BUILD_REPRODUCIBLE_BINARIES') == '1'
  420. extra_content = d.getVar('HASHEQUIV_HASH_VERSION')
  421. try:
  422. os.chdir(path)
  423. update_hash("OEOuthashBasic\n")
  424. if extra_content:
  425. update_hash(extra_content + "\n")
  426. # It is only currently useful to get equivalent hashes for things that
  427. # can be restored from sstate. Since the sstate object is named using
  428. # SSTATE_PKGSPEC and the task name, those should be included in the
  429. # output hash calculation.
  430. update_hash("SSTATE_PKGSPEC=%s\n" % d.getVar('SSTATE_PKGSPEC'))
  431. update_hash("task=%s\n" % task)
  432. for root, dirs, files in os.walk('.', topdown=True):
  433. # Sort directories to ensure consistent ordering when recursing
  434. dirs.sort()
  435. files.sort()
  436. def process(path):
  437. s = os.lstat(path)
  438. if stat.S_ISDIR(s.st_mode):
  439. update_hash('d')
  440. elif stat.S_ISCHR(s.st_mode):
  441. update_hash('c')
  442. elif stat.S_ISBLK(s.st_mode):
  443. update_hash('b')
  444. elif stat.S_ISSOCK(s.st_mode):
  445. update_hash('s')
  446. elif stat.S_ISLNK(s.st_mode):
  447. update_hash('l')
  448. elif stat.S_ISFIFO(s.st_mode):
  449. update_hash('p')
  450. else:
  451. update_hash('-')
  452. def add_perm(mask, on, off='-'):
  453. if mask & s.st_mode:
  454. update_hash(on)
  455. else:
  456. update_hash(off)
  457. add_perm(stat.S_IRUSR, 'r')
  458. add_perm(stat.S_IWUSR, 'w')
  459. if stat.S_ISUID & s.st_mode:
  460. add_perm(stat.S_IXUSR, 's', 'S')
  461. else:
  462. add_perm(stat.S_IXUSR, 'x')
  463. add_perm(stat.S_IRGRP, 'r')
  464. add_perm(stat.S_IWGRP, 'w')
  465. if stat.S_ISGID & s.st_mode:
  466. add_perm(stat.S_IXGRP, 's', 'S')
  467. else:
  468. add_perm(stat.S_IXGRP, 'x')
  469. add_perm(stat.S_IROTH, 'r')
  470. add_perm(stat.S_IWOTH, 'w')
  471. if stat.S_ISVTX & s.st_mode:
  472. update_hash('t')
  473. else:
  474. add_perm(stat.S_IXOTH, 'x')
  475. if include_owners:
  476. try:
  477. update_hash(" %10s" % pwd.getpwuid(s.st_uid).pw_name)
  478. update_hash(" %10s" % grp.getgrgid(s.st_gid).gr_name)
  479. except KeyError:
  480. bb.warn("KeyError in %s" % path)
  481. raise
  482. if include_timestamps:
  483. update_hash(" %10d" % s.st_mtime)
  484. update_hash(" ")
  485. if stat.S_ISBLK(s.st_mode) or stat.S_ISCHR(s.st_mode):
  486. update_hash("%9s" % ("%d.%d" % (os.major(s.st_rdev), os.minor(s.st_rdev))))
  487. else:
  488. update_hash(" " * 9)
  489. update_hash(" ")
  490. if stat.S_ISREG(s.st_mode):
  491. update_hash("%10d" % s.st_size)
  492. else:
  493. update_hash(" " * 10)
  494. update_hash(" ")
  495. fh = hashlib.sha256()
  496. if stat.S_ISREG(s.st_mode):
  497. # Hash file contents
  498. with open(path, 'rb') as d:
  499. for chunk in iter(lambda: d.read(4096), b""):
  500. fh.update(chunk)
  501. update_hash(fh.hexdigest())
  502. else:
  503. update_hash(" " * len(fh.hexdigest()))
  504. update_hash(" %s" % path)
  505. if stat.S_ISLNK(s.st_mode):
  506. update_hash(" -> %s" % os.readlink(path))
  507. update_hash("\n")
  508. # Process this directory and all its child files
  509. process(root)
  510. for f in files:
  511. if f == 'fixmepath':
  512. continue
  513. process(os.path.join(root, f))
  514. finally:
  515. os.chdir(prev_dir)
  516. return h.hexdigest()