package.py 79 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061
  1. #
  2. # Copyright OpenEmbedded Contributors
  3. #
  4. # SPDX-License-Identifier: GPL-2.0-only
  5. #
  6. import errno
  7. import fnmatch
  8. import itertools
  9. import os
  10. import shlex
  11. import re
  12. import glob
  13. import stat
  14. import mmap
  15. import subprocess
  16. import shutil
  17. import oe.cachedpath
  18. def runstrip(file, elftype, strip, extra_strip_sections=''):
  19. # Function to strip a single file, called from split_and_strip_files below
  20. # A working 'file' (one which works on the target architecture)
  21. #
  22. # The elftype is a bit pattern (explained in is_elf below) to tell
  23. # us what type of file we're processing...
  24. # 4 - executable
  25. # 8 - shared library
  26. # 16 - kernel module
  27. newmode = None
  28. if not os.access(file, os.W_OK) or os.access(file, os.R_OK):
  29. origmode = os.stat(file)[stat.ST_MODE]
  30. newmode = origmode | stat.S_IWRITE | stat.S_IREAD
  31. os.chmod(file, newmode)
  32. stripcmd = [strip]
  33. skip_strip = False
  34. # kernel module
  35. if elftype & 16:
  36. if is_kernel_module_signed(file):
  37. bb.debug(1, "Skip strip on signed module %s" % file)
  38. skip_strip = True
  39. else:
  40. stripcmd.extend(["--strip-debug", "--remove-section=.comment",
  41. "--remove-section=.note", "--preserve-dates"])
  42. # .so and shared library
  43. elif ".so" in file and elftype & 8:
  44. stripcmd.extend(["--remove-section=.comment", "--remove-section=.note", "--strip-unneeded"])
  45. # shared or executable:
  46. elif elftype & 8 or elftype & 4:
  47. stripcmd.extend(["--remove-section=.comment", "--remove-section=.note"])
  48. if extra_strip_sections != '':
  49. for section in extra_strip_sections.split():
  50. stripcmd.extend(["--remove-section=" + section])
  51. stripcmd.append(file)
  52. bb.debug(1, "runstrip: %s" % stripcmd)
  53. if not skip_strip:
  54. output = subprocess.check_output(stripcmd, stderr=subprocess.STDOUT)
  55. if newmode:
  56. os.chmod(file, origmode)
  57. # Detect .ko module by searching for "vermagic=" string
  58. def is_kernel_module(path):
  59. with open(path) as f:
  60. return mmap.mmap(f.fileno(), 0, prot=mmap.PROT_READ).find(b"vermagic=") >= 0
  61. # Detect if .ko module is signed
  62. def is_kernel_module_signed(path):
  63. with open(path, "rb") as f:
  64. f.seek(-28, 2)
  65. module_tail = f.read()
  66. return "Module signature appended" in "".join(chr(c) for c in bytearray(module_tail))
  67. # Return type (bits):
  68. # 0 - not elf
  69. # 1 - ELF
  70. # 2 - stripped
  71. # 4 - executable
  72. # 8 - shared library
  73. # 16 - kernel module
  74. def is_elf(path):
  75. exec_type = 0
  76. result = subprocess.check_output(["file", "-b", path], stderr=subprocess.STDOUT).decode("utf-8")
  77. if "ELF" in result:
  78. exec_type |= 1
  79. if "not stripped" not in result:
  80. exec_type |= 2
  81. if "executable" in result:
  82. exec_type |= 4
  83. if "shared" in result:
  84. exec_type |= 8
  85. if "relocatable" in result:
  86. if path.endswith(".ko") and path.find("/lib/modules/") != -1 and is_kernel_module(path):
  87. exec_type |= 16
  88. return (path, exec_type)
  89. def is_static_lib(path):
  90. if path.endswith('.a') and not os.path.islink(path):
  91. with open(path, 'rb') as fh:
  92. # The magic must include the first slash to avoid
  93. # matching golang static libraries
  94. magic = b'!<arch>\x0a/'
  95. start = fh.read(len(magic))
  96. return start == magic
  97. return False
  98. def strip_execs(pn, dstdir, strip_cmd, libdir, base_libdir, max_process, qa_already_stripped=False):
  99. """
  100. Strip executable code (like executables, shared libraries) _in_place_
  101. - Based on sysroot_strip in staging.bbclass
  102. :param dstdir: directory in which to strip files
  103. :param strip_cmd: Strip command (usually ${STRIP})
  104. :param libdir: ${libdir} - strip .so files in this directory
  105. :param base_libdir: ${base_libdir} - strip .so files in this directory
  106. :param max_process: number of stripping processes started in parallel
  107. :param qa_already_stripped: Set to True if already-stripped' in ${INSANE_SKIP}
  108. This is for proper logging and messages only.
  109. """
  110. import stat, errno, oe.path, oe.utils
  111. elffiles = {}
  112. inodes = {}
  113. libdir = os.path.abspath(dstdir + os.sep + libdir)
  114. base_libdir = os.path.abspath(dstdir + os.sep + base_libdir)
  115. exec_mask = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
  116. #
  117. # First lets figure out all of the files we may have to process
  118. #
  119. checkelf = []
  120. inodecache = {}
  121. for root, dirs, files in os.walk(dstdir):
  122. for f in files:
  123. file = os.path.join(root, f)
  124. try:
  125. ltarget = oe.path.realpath(file, dstdir, False)
  126. s = os.lstat(ltarget)
  127. except OSError as e:
  128. (err, strerror) = e.args
  129. if err != errno.ENOENT:
  130. raise
  131. # Skip broken symlinks
  132. continue
  133. if not s:
  134. continue
  135. # Check its an excutable
  136. if s[stat.ST_MODE] & exec_mask \
  137. or ((file.startswith(libdir) or file.startswith(base_libdir)) and ".so" in f) \
  138. or file.endswith('.ko'):
  139. # If it's a symlink, and points to an ELF file, we capture the readlink target
  140. if os.path.islink(file):
  141. continue
  142. # It's a file (or hardlink), not a link
  143. # ...but is it ELF, and is it already stripped?
  144. checkelf.append(file)
  145. inodecache[file] = s.st_ino
  146. results = oe.utils.multiprocess_launch_mp(is_elf, checkelf, max_process)
  147. for (file, elf_file) in results:
  148. #elf_file = is_elf(file)
  149. if elf_file & 1:
  150. if elf_file & 2:
  151. if qa_already_stripped:
  152. bb.note("Skipping file %s from %s for already-stripped QA test" % (file[len(dstdir):], pn))
  153. else:
  154. bb.warn("File '%s' from %s was already stripped, this will prevent future debugging!" % (file[len(dstdir):], pn))
  155. continue
  156. if inodecache[file] in inodes:
  157. os.unlink(file)
  158. os.link(inodes[inodecache[file]], file)
  159. else:
  160. # break hardlinks so that we do not strip the original.
  161. inodes[inodecache[file]] = file
  162. bb.utils.break_hardlinks(file)
  163. elffiles[file] = elf_file
  164. #
  165. # Now strip them (in parallel)
  166. #
  167. sfiles = []
  168. for file in elffiles:
  169. elf_file = int(elffiles[file])
  170. sfiles.append((file, elf_file, strip_cmd))
  171. oe.utils.multiprocess_launch_mp(runstrip, sfiles, max_process)
  172. TRANSLATE = (
  173. ("@", "@at@"),
  174. (" ", "@space@"),
  175. ("\t", "@tab@"),
  176. ("[", "@openbrace@"),
  177. ("]", "@closebrace@"),
  178. ("_", "@underscore@"),
  179. (":", "@colon@"),
  180. )
  181. def file_translate(file):
  182. ft = file
  183. for s, replace in TRANSLATE:
  184. ft = ft.replace(s, replace)
  185. return ft
  186. def file_reverse_translate(file):
  187. ft = file
  188. for s, replace in reversed(TRANSLATE):
  189. ft = ft.replace(replace, s)
  190. return ft
  191. def filedeprunner(pkg, pkgfiles, rpmdeps, pkgdest):
  192. import re, subprocess, shlex
  193. provides = {}
  194. requires = {}
  195. file_re = re.compile(r'\s+\d+\s(.*)')
  196. dep_re = re.compile(r'\s+(\S)\s+(.*)')
  197. r = re.compile(r'[<>=]+\s+\S*')
  198. def process_deps(pipe, pkg, pkgdest, provides, requires):
  199. file = None
  200. for line in pipe.split("\n"):
  201. m = file_re.match(line)
  202. if m:
  203. file = m.group(1)
  204. file = file.replace(pkgdest + "/" + pkg, "")
  205. file = file_translate(file)
  206. continue
  207. m = dep_re.match(line)
  208. if not m or not file:
  209. continue
  210. type, dep = m.groups()
  211. if type == 'R':
  212. i = requires
  213. elif type == 'P':
  214. i = provides
  215. else:
  216. continue
  217. if dep.startswith("python("):
  218. continue
  219. # Ignore all perl(VMS::...) and perl(Mac::...) dependencies. These
  220. # are typically used conditionally from the Perl code, but are
  221. # generated as unconditional dependencies.
  222. if dep.startswith('perl(VMS::') or dep.startswith('perl(Mac::'):
  223. continue
  224. # Ignore perl dependencies on .pl files.
  225. if dep.startswith('perl(') and dep.endswith('.pl)'):
  226. continue
  227. # Remove perl versions and perl module versions since they typically
  228. # do not make sense when used as package versions.
  229. if dep.startswith('perl') and r.search(dep):
  230. dep = dep.split()[0]
  231. # Put parentheses around any version specifications.
  232. dep = r.sub(r'(\g<0>)',dep)
  233. if file not in i:
  234. i[file] = []
  235. i[file].append(dep)
  236. return provides, requires
  237. output = subprocess.check_output(shlex.split(rpmdeps) + pkgfiles, stderr=subprocess.STDOUT).decode("utf-8")
  238. provides, requires = process_deps(output, pkg, pkgdest, provides, requires)
  239. return (pkg, provides, requires)
  240. def read_shlib_providers(d):
  241. import re
  242. shlib_provider = {}
  243. shlibs_dirs = d.getVar('SHLIBSDIRS').split()
  244. list_re = re.compile(r'^(.*)\.list$')
  245. # Go from least to most specific since the last one found wins
  246. for dir in reversed(shlibs_dirs):
  247. bb.debug(2, "Reading shlib providers in %s" % (dir))
  248. if not os.path.exists(dir):
  249. continue
  250. for file in sorted(os.listdir(dir)):
  251. m = list_re.match(file)
  252. if m:
  253. dep_pkg = m.group(1)
  254. try:
  255. fd = open(os.path.join(dir, file))
  256. except IOError:
  257. # During a build unrelated shlib files may be deleted, so
  258. # handle files disappearing between the listdirs and open.
  259. continue
  260. lines = fd.readlines()
  261. fd.close()
  262. for l in lines:
  263. s = l.strip().split(":")
  264. if s[0] not in shlib_provider:
  265. shlib_provider[s[0]] = {}
  266. shlib_provider[s[0]][s[1]] = (dep_pkg, s[2])
  267. return shlib_provider
  268. # We generate a master list of directories to process, we start by
  269. # seeding this list with reasonable defaults, then load from
  270. # the fs-perms.txt files
  271. def fixup_perms(d):
  272. import pwd, grp
  273. cpath = oe.cachedpath.CachedPath()
  274. dvar = d.getVar('PKGD')
  275. # init using a string with the same format as a line as documented in
  276. # the fs-perms.txt file
  277. # <path> <mode> <uid> <gid> <walk> <fmode> <fuid> <fgid>
  278. # <path> link <link target>
  279. #
  280. # __str__ can be used to print out an entry in the input format
  281. #
  282. # if fs_perms_entry.path is None:
  283. # an error occurred
  284. # if fs_perms_entry.link, you can retrieve:
  285. # fs_perms_entry.path = path
  286. # fs_perms_entry.link = target of link
  287. # if not fs_perms_entry.link, you can retrieve:
  288. # fs_perms_entry.path = path
  289. # fs_perms_entry.mode = expected dir mode or None
  290. # fs_perms_entry.uid = expected uid or -1
  291. # fs_perms_entry.gid = expected gid or -1
  292. # fs_perms_entry.walk = 'true' or something else
  293. # fs_perms_entry.fmode = expected file mode or None
  294. # fs_perms_entry.fuid = expected file uid or -1
  295. # fs_perms_entry_fgid = expected file gid or -1
  296. class fs_perms_entry():
  297. def __init__(self, line):
  298. lsplit = line.split()
  299. if len(lsplit) == 3 and lsplit[1].lower() == "link":
  300. self._setlink(lsplit[0], lsplit[2])
  301. elif len(lsplit) == 8:
  302. self._setdir(lsplit[0], lsplit[1], lsplit[2], lsplit[3], lsplit[4], lsplit[5], lsplit[6], lsplit[7])
  303. else:
  304. msg = "Fixup Perms: invalid config line %s" % line
  305. oe.qa.handle_error("perm-config", msg, d)
  306. self.path = None
  307. self.link = None
  308. def _setdir(self, path, mode, uid, gid, walk, fmode, fuid, fgid):
  309. self.path = os.path.normpath(path)
  310. self.link = None
  311. self.mode = self._procmode(mode)
  312. self.uid = self._procuid(uid)
  313. self.gid = self._procgid(gid)
  314. self.walk = walk.lower()
  315. self.fmode = self._procmode(fmode)
  316. self.fuid = self._procuid(fuid)
  317. self.fgid = self._procgid(fgid)
  318. def _setlink(self, path, link):
  319. self.path = os.path.normpath(path)
  320. self.link = link
  321. def _procmode(self, mode):
  322. if not mode or (mode and mode == "-"):
  323. return None
  324. else:
  325. return int(mode,8)
  326. # Note uid/gid -1 has special significance in os.lchown
  327. def _procuid(self, uid):
  328. if uid is None or uid == "-":
  329. return -1
  330. elif uid.isdigit():
  331. return int(uid)
  332. else:
  333. return pwd.getpwnam(uid).pw_uid
  334. def _procgid(self, gid):
  335. if gid is None or gid == "-":
  336. return -1
  337. elif gid.isdigit():
  338. return int(gid)
  339. else:
  340. return grp.getgrnam(gid).gr_gid
  341. # Use for debugging the entries
  342. def __str__(self):
  343. if self.link:
  344. return "%s link %s" % (self.path, self.link)
  345. else:
  346. mode = "-"
  347. if self.mode:
  348. mode = "0%o" % self.mode
  349. fmode = "-"
  350. if self.fmode:
  351. fmode = "0%o" % self.fmode
  352. uid = self._mapugid(self.uid)
  353. gid = self._mapugid(self.gid)
  354. fuid = self._mapugid(self.fuid)
  355. fgid = self._mapugid(self.fgid)
  356. return "%s %s %s %s %s %s %s %s" % (self.path, mode, uid, gid, self.walk, fmode, fuid, fgid)
  357. def _mapugid(self, id):
  358. if id is None or id == -1:
  359. return "-"
  360. else:
  361. return "%d" % id
  362. # Fix the permission, owner and group of path
  363. def fix_perms(path, mode, uid, gid, dir):
  364. if mode and not os.path.islink(path):
  365. #bb.note("Fixup Perms: chmod 0%o %s" % (mode, dir))
  366. os.chmod(path, mode)
  367. # -1 is a special value that means don't change the uid/gid
  368. # if they are BOTH -1, don't bother to lchown
  369. if not (uid == -1 and gid == -1):
  370. #bb.note("Fixup Perms: lchown %d:%d %s" % (uid, gid, dir))
  371. os.lchown(path, uid, gid)
  372. # Return a list of configuration files based on either the default
  373. # files/fs-perms.txt or the contents of FILESYSTEM_PERMS_TABLES
  374. # paths are resolved via BBPATH
  375. def get_fs_perms_list(d):
  376. str = ""
  377. bbpath = d.getVar('BBPATH')
  378. fs_perms_tables = d.getVar('FILESYSTEM_PERMS_TABLES') or ""
  379. for conf_file in fs_perms_tables.split():
  380. confpath = bb.utils.which(bbpath, conf_file)
  381. if confpath:
  382. str += " %s" % bb.utils.which(bbpath, conf_file)
  383. else:
  384. bb.warn("cannot find %s specified in FILESYSTEM_PERMS_TABLES" % conf_file)
  385. return str
  386. fs_perms_table = {}
  387. fs_link_table = {}
  388. # By default all of the standard directories specified in
  389. # bitbake.conf will get 0755 root:root.
  390. target_path_vars = [ 'base_prefix',
  391. 'prefix',
  392. 'exec_prefix',
  393. 'base_bindir',
  394. 'base_sbindir',
  395. 'base_libdir',
  396. 'datadir',
  397. 'sysconfdir',
  398. 'servicedir',
  399. 'sharedstatedir',
  400. 'localstatedir',
  401. 'infodir',
  402. 'mandir',
  403. 'docdir',
  404. 'bindir',
  405. 'sbindir',
  406. 'libexecdir',
  407. 'libdir',
  408. 'includedir' ]
  409. for path in target_path_vars:
  410. dir = d.getVar(path) or ""
  411. if dir == "":
  412. continue
  413. fs_perms_table[dir] = fs_perms_entry(d.expand("%s 0755 root root false - - -" % (dir)))
  414. # Now we actually load from the configuration files
  415. for conf in get_fs_perms_list(d).split():
  416. if not os.path.exists(conf):
  417. continue
  418. with open(conf) as f:
  419. for line in f:
  420. if line.startswith('#'):
  421. continue
  422. lsplit = line.split()
  423. if len(lsplit) == 0:
  424. continue
  425. if len(lsplit) != 8 and not (len(lsplit) == 3 and lsplit[1].lower() == "link"):
  426. msg = "Fixup perms: %s invalid line: %s" % (conf, line)
  427. oe.qa.handle_error("perm-line", msg, d)
  428. continue
  429. entry = fs_perms_entry(d.expand(line))
  430. if entry and entry.path:
  431. if entry.link:
  432. fs_link_table[entry.path] = entry
  433. if entry.path in fs_perms_table:
  434. fs_perms_table.pop(entry.path)
  435. else:
  436. fs_perms_table[entry.path] = entry
  437. if entry.path in fs_link_table:
  438. fs_link_table.pop(entry.path)
  439. # Debug -- list out in-memory table
  440. #for dir in fs_perms_table:
  441. # bb.note("Fixup Perms: %s: %s" % (dir, str(fs_perms_table[dir])))
  442. #for link in fs_link_table:
  443. # bb.note("Fixup Perms: %s: %s" % (link, str(fs_link_table[link])))
  444. # We process links first, so we can go back and fixup directory ownership
  445. # for any newly created directories
  446. # Process in sorted order so /run gets created before /run/lock, etc.
  447. for entry in sorted(fs_link_table.values(), key=lambda x: x.link):
  448. link = entry.link
  449. dir = entry.path
  450. origin = dvar + dir
  451. if not (cpath.exists(origin) and cpath.isdir(origin) and not cpath.islink(origin)):
  452. continue
  453. if link[0] == "/":
  454. target = dvar + link
  455. ptarget = link
  456. else:
  457. target = os.path.join(os.path.dirname(origin), link)
  458. ptarget = os.path.join(os.path.dirname(dir), link)
  459. if os.path.exists(target):
  460. msg = "Fixup Perms: Unable to correct directory link, target already exists: %s -> %s" % (dir, ptarget)
  461. oe.qa.handle_error("perm-link", msg, d)
  462. continue
  463. # Create path to move directory to, move it, and then setup the symlink
  464. bb.utils.mkdirhier(os.path.dirname(target))
  465. #bb.note("Fixup Perms: Rename %s -> %s" % (dir, ptarget))
  466. bb.utils.rename(origin, target)
  467. #bb.note("Fixup Perms: Link %s -> %s" % (dir, link))
  468. os.symlink(link, origin)
  469. for dir in fs_perms_table:
  470. origin = dvar + dir
  471. if not (cpath.exists(origin) and cpath.isdir(origin)):
  472. continue
  473. fix_perms(origin, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir)
  474. if fs_perms_table[dir].walk == 'true':
  475. for root, dirs, files in os.walk(origin):
  476. for dr in dirs:
  477. each_dir = os.path.join(root, dr)
  478. fix_perms(each_dir, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir)
  479. for f in files:
  480. each_file = os.path.join(root, f)
  481. fix_perms(each_file, fs_perms_table[dir].fmode, fs_perms_table[dir].fuid, fs_perms_table[dir].fgid, dir)
  482. # Get a list of files from file vars by searching files under current working directory
  483. # The list contains symlinks, directories and normal files.
  484. def files_from_filevars(filevars):
  485. cpath = oe.cachedpath.CachedPath()
  486. files = []
  487. for f in filevars:
  488. if os.path.isabs(f):
  489. f = '.' + f
  490. if not f.startswith("./"):
  491. f = './' + f
  492. globbed = glob.glob(f, recursive=True)
  493. if globbed:
  494. if [ f ] != globbed:
  495. files += globbed
  496. continue
  497. files.append(f)
  498. symlink_paths = []
  499. for ind, f in enumerate(files):
  500. # Handle directory symlinks. Truncate path to the lowest level symlink
  501. parent = ''
  502. for dirname in f.split('/')[:-1]:
  503. parent = os.path.join(parent, dirname)
  504. if dirname == '.':
  505. continue
  506. if cpath.islink(parent):
  507. bb.warn("FILES contains file '%s' which resides under a "
  508. "directory symlink. Please fix the recipe and use the "
  509. "real path for the file." % f[1:])
  510. symlink_paths.append(f)
  511. files[ind] = parent
  512. f = parent
  513. break
  514. if not cpath.islink(f):
  515. if cpath.isdir(f):
  516. newfiles = [ os.path.join(f,x) for x in os.listdir(f) ]
  517. if newfiles:
  518. files += newfiles
  519. return files, symlink_paths
  520. # Called in package_<rpm,ipk,deb>.bbclass to get the correct list of configuration files
  521. def get_conffiles(pkg, d):
  522. pkgdest = d.getVar('PKGDEST')
  523. root = os.path.join(pkgdest, pkg)
  524. cwd = os.getcwd()
  525. os.chdir(root)
  526. conffiles = d.getVar('CONFFILES:%s' % pkg);
  527. if conffiles == None:
  528. conffiles = d.getVar('CONFFILES')
  529. if conffiles == None:
  530. conffiles = ""
  531. conffiles = conffiles.split()
  532. conf_orig_list = files_from_filevars(conffiles)[0]
  533. # Remove links and directories from conf_orig_list to get conf_list which only contains normal files
  534. conf_list = []
  535. for f in conf_orig_list:
  536. if os.path.isdir(f):
  537. continue
  538. if os.path.islink(f):
  539. continue
  540. if not os.path.exists(f):
  541. continue
  542. conf_list.append(f)
  543. # Remove the leading './'
  544. for i in range(0, len(conf_list)):
  545. conf_list[i] = conf_list[i][1:]
  546. os.chdir(cwd)
  547. return sorted(conf_list)
  548. def legitimize_package_name(s):
  549. """
  550. Make sure package names are legitimate strings
  551. """
  552. def fixutf(m):
  553. cp = m.group(1)
  554. if cp:
  555. return ('\\u%s' % cp).encode('latin-1').decode('unicode_escape')
  556. # Handle unicode codepoints encoded as <U0123>, as in glibc locale files.
  557. s = re.sub(r'<U([0-9A-Fa-f]{1,4})>', fixutf, s)
  558. # Remaining package name validity fixes
  559. return s.lower().replace('_', '-').replace('@', '+').replace(',', '+').replace('/', '-')
  560. def split_locales(d):
  561. cpath = oe.cachedpath.CachedPath()
  562. if (d.getVar('PACKAGE_NO_LOCALE') == '1'):
  563. bb.debug(1, "package requested not splitting locales")
  564. return
  565. packages = (d.getVar('PACKAGES') or "").split()
  566. dvar = d.getVar('PKGD')
  567. pn = d.getVar('LOCALEBASEPN')
  568. try:
  569. locale_index = packages.index(pn + '-locale')
  570. packages.pop(locale_index)
  571. except ValueError:
  572. locale_index = len(packages)
  573. lic = d.getVar("LICENSE:" + pn + "-locale")
  574. localepaths = []
  575. locales = set()
  576. for localepath in (d.getVar('LOCALE_PATHS') or "").split():
  577. localedir = dvar + localepath
  578. if not cpath.isdir(localedir):
  579. bb.debug(1, 'No locale files in %s' % localepath)
  580. continue
  581. localepaths.append(localepath)
  582. with os.scandir(localedir) as it:
  583. for entry in it:
  584. if entry.is_dir():
  585. locales.add(entry.name)
  586. if len(locales) == 0:
  587. bb.debug(1, "No locale files in this package")
  588. return
  589. summary = d.getVar('SUMMARY') or pn
  590. description = d.getVar('DESCRIPTION') or ""
  591. locale_section = d.getVar('LOCALE_SECTION')
  592. mlprefix = d.getVar('MLPREFIX') or ""
  593. for l in sorted(locales):
  594. ln = legitimize_package_name(l)
  595. pkg = pn + '-locale-' + ln
  596. packages.insert(locale_index, pkg)
  597. locale_index += 1
  598. files = []
  599. for localepath in localepaths:
  600. files.append(os.path.join(localepath, l))
  601. d.setVar('FILES:' + pkg, " ".join(files))
  602. d.setVar('RRECOMMENDS:' + pkg, '%svirtual-locale-%s' % (mlprefix, ln))
  603. d.setVar('RPROVIDES:' + pkg, '%s-locale %s%s-translation' % (pn, mlprefix, ln))
  604. d.setVar('SUMMARY:' + pkg, '%s - %s translations' % (summary, l))
  605. d.setVar('DESCRIPTION:' + pkg, '%s This package contains language translation files for the %s locale.' % (description, l))
  606. if lic:
  607. d.setVar('LICENSE:' + pkg, lic)
  608. if locale_section:
  609. d.setVar('SECTION:' + pkg, locale_section)
  610. d.setVar('PACKAGES', ' '.join(packages))
  611. # Disabled by RP 18/06/07
  612. # Wildcards aren't supported in debian
  613. # They break with ipkg since glibc-locale* will mean that
  614. # glibc-localedata-translit* won't install as a dependency
  615. # for some other package which breaks meta-toolchain
  616. # Probably breaks since virtual-locale- isn't provided anywhere
  617. #rdep = (d.getVar('RDEPENDS:%s' % pn) or "").split()
  618. #rdep.append('%s-locale*' % pn)
  619. #d.setVar('RDEPENDS:%s' % pn, ' '.join(rdep))
  620. def package_debug_vars(d):
  621. # We default to '.debug' style
  622. if d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-file-directory':
  623. # Single debug-file-directory style debug info
  624. debug_vars = {
  625. "append": ".debug",
  626. "staticappend": "",
  627. "dir": "",
  628. "staticdir": "",
  629. "libdir": "/usr/lib/debug",
  630. "staticlibdir": "/usr/lib/debug-static",
  631. "srcdir": "/usr/src/debug",
  632. }
  633. elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-without-src':
  634. # Original OE-core, a.k.a. ".debug", style debug info, but without sources in /usr/src/debug
  635. debug_vars = {
  636. "append": "",
  637. "staticappend": "",
  638. "dir": "/.debug",
  639. "staticdir": "/.debug-static",
  640. "libdir": "",
  641. "staticlibdir": "",
  642. "srcdir": "",
  643. }
  644. elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-with-srcpkg':
  645. debug_vars = {
  646. "append": "",
  647. "staticappend": "",
  648. "dir": "/.debug",
  649. "staticdir": "/.debug-static",
  650. "libdir": "",
  651. "staticlibdir": "",
  652. "srcdir": "/usr/src/debug",
  653. }
  654. else:
  655. # Original OE-core, a.k.a. ".debug", style debug info
  656. debug_vars = {
  657. "append": "",
  658. "staticappend": "",
  659. "dir": "/.debug",
  660. "staticdir": "/.debug-static",
  661. "libdir": "",
  662. "staticlibdir": "",
  663. "srcdir": "/usr/src/debug",
  664. }
  665. return debug_vars
  666. def parse_debugsources_from_dwarfsrcfiles_output(dwarfsrcfiles_output):
  667. debugfiles = {}
  668. for line in dwarfsrcfiles_output.splitlines():
  669. if line.startswith("\t"):
  670. debugfiles[os.path.normpath(line.split()[0])] = ""
  671. return debugfiles.keys()
  672. def source_info(file, d, fatal=True):
  673. cmd = ["dwarfsrcfiles", file]
  674. try:
  675. output = subprocess.check_output(cmd, universal_newlines=True, stderr=subprocess.STDOUT)
  676. retval = 0
  677. except subprocess.CalledProcessError as exc:
  678. output = exc.output
  679. retval = exc.returncode
  680. # 255 means a specific file wasn't fully parsed to get the debug file list, which is not a fatal failure
  681. if retval != 0 and retval != 255:
  682. msg = "dwarfsrcfiles failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else "")
  683. if fatal:
  684. bb.fatal(msg)
  685. bb.note(msg)
  686. debugsources = parse_debugsources_from_dwarfsrcfiles_output(output)
  687. return list(debugsources)
  688. def splitdebuginfo(file, dvar, dv, d):
  689. # Function to split a single file into two components, one is the stripped
  690. # target system binary, the other contains any debugging information. The
  691. # two files are linked to reference each other.
  692. #
  693. # return a mapping of files:debugsources
  694. src = file[len(dvar):]
  695. dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"]
  696. debugfile = dvar + dest
  697. sources = []
  698. if file.endswith(".ko") and file.find("/lib/modules/") != -1:
  699. if oe.package.is_kernel_module_signed(file):
  700. bb.debug(1, "Skip strip on signed module %s" % file)
  701. return (file, sources)
  702. # Split the file...
  703. bb.utils.mkdirhier(os.path.dirname(debugfile))
  704. #bb.note("Split %s -> %s" % (file, debugfile))
  705. # Only store off the hard link reference if we successfully split!
  706. dvar = d.getVar('PKGD')
  707. objcopy = d.getVar("OBJCOPY")
  708. newmode = None
  709. if not os.access(file, os.W_OK) or os.access(file, os.R_OK):
  710. origmode = os.stat(file)[stat.ST_MODE]
  711. newmode = origmode | stat.S_IWRITE | stat.S_IREAD
  712. os.chmod(file, newmode)
  713. # We need to extract the debug src information here...
  714. if dv["srcdir"]:
  715. sources = source_info(file, d)
  716. bb.utils.mkdirhier(os.path.dirname(debugfile))
  717. subprocess.check_output([objcopy, '--only-keep-debug', file, debugfile], stderr=subprocess.STDOUT)
  718. # Set the debuglink to have the view of the file path on the target
  719. subprocess.check_output([objcopy, '--add-gnu-debuglink', debugfile, file], stderr=subprocess.STDOUT)
  720. if newmode:
  721. os.chmod(file, origmode)
  722. return (file, sources)
  723. def splitstaticdebuginfo(file, dvar, dv, d):
  724. # Unlike the function above, there is no way to split a static library
  725. # two components. So to get similar results we will copy the unmodified
  726. # static library (containing the debug symbols) into a new directory.
  727. # We will then strip (preserving symbols) the static library in the
  728. # typical location.
  729. #
  730. # return a mapping of files:debugsources
  731. src = file[len(dvar):]
  732. dest = dv["staticlibdir"] + os.path.dirname(src) + dv["staticdir"] + "/" + os.path.basename(src) + dv["staticappend"]
  733. debugfile = dvar + dest
  734. sources = []
  735. # Copy the file...
  736. bb.utils.mkdirhier(os.path.dirname(debugfile))
  737. #bb.note("Copy %s -> %s" % (file, debugfile))
  738. dvar = d.getVar('PKGD')
  739. newmode = None
  740. if not os.access(file, os.W_OK) or os.access(file, os.R_OK):
  741. origmode = os.stat(file)[stat.ST_MODE]
  742. newmode = origmode | stat.S_IWRITE | stat.S_IREAD
  743. os.chmod(file, newmode)
  744. # We need to extract the debug src information here...
  745. if dv["srcdir"]:
  746. sources = source_info(file, d)
  747. bb.utils.mkdirhier(os.path.dirname(debugfile))
  748. # Copy the unmodified item to the debug directory
  749. shutil.copy2(file, debugfile)
  750. if newmode:
  751. os.chmod(file, origmode)
  752. return (file, sources)
  753. def inject_minidebuginfo(file, dvar, dv, d):
  754. # Extract just the symbols from debuginfo into minidebuginfo,
  755. # compress it with xz and inject it back into the binary in a .gnu_debugdata section.
  756. # https://sourceware.org/gdb/onlinedocs/gdb/MiniDebugInfo.html
  757. readelf = d.getVar('READELF')
  758. nm = d.getVar('NM')
  759. objcopy = d.getVar('OBJCOPY')
  760. minidebuginfodir = d.expand('${WORKDIR}/minidebuginfo')
  761. src = file[len(dvar):]
  762. dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"]
  763. debugfile = dvar + dest
  764. minidebugfile = minidebuginfodir + src + '.minidebug'
  765. bb.utils.mkdirhier(os.path.dirname(minidebugfile))
  766. # If we didn't produce debuginfo for any reason, we can't produce minidebuginfo either
  767. # so skip it.
  768. if not os.path.exists(debugfile):
  769. bb.debug(1, 'ELF file {} has no debuginfo, skipping minidebuginfo injection'.format(file))
  770. return
  771. # minidebuginfo does not make sense to apply to ELF objects other than
  772. # executables and shared libraries, skip applying the minidebuginfo
  773. # generation for objects like kernel modules.
  774. for line in subprocess.check_output([readelf, '-h', debugfile], universal_newlines=True).splitlines():
  775. if not line.strip().startswith("Type:"):
  776. continue
  777. elftype = line.split(":")[1].strip()
  778. if not any(elftype.startswith(i) for i in ["EXEC", "DYN"]):
  779. bb.debug(1, 'ELF file {} is not executable/shared, skipping minidebuginfo injection'.format(file))
  780. return
  781. break
  782. # Find non-allocated PROGBITS, NOTE, and NOBITS sections in the debuginfo.
  783. # We will exclude all of these from minidebuginfo to save space.
  784. remove_section_names = []
  785. for line in subprocess.check_output([readelf, '-W', '-S', debugfile], universal_newlines=True).splitlines():
  786. # strip the leading " [ 1]" section index to allow splitting on space
  787. if ']' not in line:
  788. continue
  789. fields = line[line.index(']') + 1:].split()
  790. if len(fields) < 7:
  791. continue
  792. name = fields[0]
  793. type = fields[1]
  794. flags = fields[6]
  795. # .debug_ sections will be removed by objcopy -S so no need to explicitly remove them
  796. if name.startswith('.debug_'):
  797. continue
  798. if 'A' not in flags and type in ['PROGBITS', 'NOTE', 'NOBITS']:
  799. remove_section_names.append(name)
  800. # List dynamic symbols in the binary. We can exclude these from minidebuginfo
  801. # because they are always present in the binary.
  802. dynsyms = set()
  803. for line in subprocess.check_output([nm, '-D', file, '--format=posix', '--defined-only'], universal_newlines=True).splitlines():
  804. dynsyms.add(line.split()[0])
  805. # Find all function symbols from debuginfo which aren't in the dynamic symbols table.
  806. # These are the ones we want to keep in minidebuginfo.
  807. keep_symbols_file = minidebugfile + '.symlist'
  808. found_any_symbols = False
  809. with open(keep_symbols_file, 'w') as f:
  810. for line in subprocess.check_output([nm, debugfile, '--format=sysv', '--defined-only'], universal_newlines=True).splitlines():
  811. fields = line.split('|')
  812. if len(fields) < 7:
  813. continue
  814. name = fields[0].strip()
  815. type = fields[3].strip()
  816. if type == 'FUNC' and name not in dynsyms:
  817. f.write('{}\n'.format(name))
  818. found_any_symbols = True
  819. if not found_any_symbols:
  820. bb.debug(1, 'ELF file {} contains no symbols, skipping minidebuginfo injection'.format(file))
  821. return
  822. bb.utils.remove(minidebugfile)
  823. bb.utils.remove(minidebugfile + '.xz')
  824. subprocess.check_call([objcopy, '-S'] +
  825. ['--remove-section={}'.format(s) for s in remove_section_names] +
  826. ['--keep-symbols={}'.format(keep_symbols_file), debugfile, minidebugfile])
  827. subprocess.check_call(['xz', '--keep', minidebugfile])
  828. subprocess.check_call([objcopy, '--add-section', '.gnu_debugdata={}.xz'.format(minidebugfile), file])
  829. def copydebugsources(debugsrcdir, sources, d):
  830. # The debug src information written out to sourcefile is further processed
  831. # and copied to the destination here.
  832. cpath = oe.cachedpath.CachedPath()
  833. if debugsrcdir and sources:
  834. sourcefile = d.expand("${WORKDIR}/debugsources.list")
  835. bb.utils.remove(sourcefile)
  836. # filenames are null-separated - this is an artefact of the previous use
  837. # of rpm's debugedit, which was writing them out that way, and the code elsewhere
  838. # is still assuming that.
  839. debuglistoutput = '\0'.join(sources) + '\0'
  840. with open(sourcefile, 'a') as sf:
  841. sf.write(debuglistoutput)
  842. dvar = d.getVar('PKGD')
  843. strip = d.getVar("STRIP")
  844. objcopy = d.getVar("OBJCOPY")
  845. workdir = d.getVar("WORKDIR")
  846. sdir = d.getVar("S")
  847. cflags = d.expand("${CFLAGS}")
  848. prefixmap = {}
  849. for flag in cflags.split():
  850. if not flag.startswith("-ffile-prefix-map"):
  851. continue
  852. if "recipe-sysroot" in flag:
  853. continue
  854. flag = flag.split("=")
  855. prefixmap[flag[1]] = flag[2]
  856. nosuchdir = []
  857. basepath = dvar
  858. for p in debugsrcdir.split("/"):
  859. basepath = basepath + "/" + p
  860. if not cpath.exists(basepath):
  861. nosuchdir.append(basepath)
  862. bb.utils.mkdirhier(basepath)
  863. cpath.updatecache(basepath)
  864. for pmap in prefixmap:
  865. # Ignore files from the recipe sysroots (target and native)
  866. cmd = "LC_ALL=C ; sort -z -u '%s' | egrep -v -z '((<internal>|<built-in>)$|/.*recipe-sysroot.*/)' | " % sourcefile
  867. # We need to ignore files that are not actually ours
  868. # we do this by only paying attention to items from this package
  869. cmd += "fgrep -zw '%s' | " % prefixmap[pmap]
  870. # Remove prefix in the source paths
  871. cmd += "sed 's#%s/##g' | " % (prefixmap[pmap])
  872. cmd += "(cd '%s' ; cpio -pd0mlL --no-preserve-owner '%s%s' 2>/dev/null)" % (pmap, dvar, prefixmap[pmap])
  873. try:
  874. subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
  875. except subprocess.CalledProcessError:
  876. # Can "fail" if internal headers/transient sources are attempted
  877. pass
  878. # cpio seems to have a bug with -lL together and symbolic links are just copied, not dereferenced.
  879. # Work around this by manually finding and copying any symbolic links that made it through.
  880. cmd = "find %s%s -type l -print0 -delete | sed s#%s%s/##g | (cd '%s' ; cpio -pd0mL --no-preserve-owner '%s%s')" % \
  881. (dvar, prefixmap[pmap], dvar, prefixmap[pmap], pmap, dvar, prefixmap[pmap])
  882. subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
  883. # debugsources.list may be polluted from the host if we used externalsrc,
  884. # cpio uses copy-pass and may have just created a directory structure
  885. # matching the one from the host, if thats the case move those files to
  886. # debugsrcdir to avoid host contamination.
  887. # Empty dir structure will be deleted in the next step.
  888. # Same check as above for externalsrc
  889. if workdir not in sdir:
  890. if os.path.exists(dvar + debugsrcdir + sdir):
  891. cmd = "mv %s%s%s/* %s%s" % (dvar, debugsrcdir, sdir, dvar,debugsrcdir)
  892. subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
  893. # The copy by cpio may have resulted in some empty directories! Remove these
  894. cmd = "find %s%s -empty -type d -delete" % (dvar, debugsrcdir)
  895. subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
  896. # Also remove debugsrcdir if its empty
  897. for p in nosuchdir[::-1]:
  898. if os.path.exists(p) and not os.listdir(p):
  899. os.rmdir(p)
  900. def process_split_and_strip_files(d):
  901. cpath = oe.cachedpath.CachedPath()
  902. dvar = d.getVar('PKGD')
  903. pn = d.getVar('PN')
  904. hostos = d.getVar('HOST_OS')
  905. oldcwd = os.getcwd()
  906. os.chdir(dvar)
  907. dv = package_debug_vars(d)
  908. #
  909. # First lets figure out all of the files we may have to process ... do this only once!
  910. #
  911. elffiles = {}
  912. symlinks = {}
  913. staticlibs = []
  914. inodes = {}
  915. libdir = os.path.abspath(dvar + os.sep + d.getVar("libdir"))
  916. baselibdir = os.path.abspath(dvar + os.sep + d.getVar("base_libdir"))
  917. skipfiles = (d.getVar("INHIBIT_PACKAGE_STRIP_FILES") or "").split()
  918. if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1' or \
  919. d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'):
  920. checkelf = {}
  921. checkelflinks = {}
  922. checkstatic = {}
  923. for root, dirs, files in cpath.walk(dvar):
  924. for f in files:
  925. file = os.path.join(root, f)
  926. # Skip debug files
  927. if dv["append"] and file.endswith(dv["append"]):
  928. continue
  929. if dv["dir"] and dv["dir"] in os.path.dirname(file[len(dvar):]):
  930. continue
  931. if file in skipfiles:
  932. continue
  933. try:
  934. ltarget = cpath.realpath(file, dvar, False)
  935. s = cpath.lstat(ltarget)
  936. except OSError as e:
  937. (err, strerror) = e.args
  938. if err != errno.ENOENT:
  939. raise
  940. # Skip broken symlinks
  941. continue
  942. if not s:
  943. continue
  944. if oe.package.is_static_lib(file):
  945. # Use a reference of device ID and inode number to identify files
  946. file_reference = "%d_%d" % (s.st_dev, s.st_ino)
  947. checkstatic[file] = (file, file_reference)
  948. continue
  949. # Check its an executable
  950. if (s[stat.ST_MODE] & stat.S_IXUSR) or (s[stat.ST_MODE] & stat.S_IXGRP) \
  951. or (s[stat.ST_MODE] & stat.S_IXOTH) \
  952. or ((file.startswith(libdir) or file.startswith(baselibdir)) \
  953. and (".so" in f or ".node" in f)) \
  954. or (f.startswith('vmlinux') or ".ko" in f):
  955. if cpath.islink(file):
  956. checkelflinks[file] = ltarget
  957. continue
  958. # Use a reference of device ID and inode number to identify files
  959. file_reference = "%d_%d" % (s.st_dev, s.st_ino)
  960. checkelf[file] = (file, file_reference)
  961. results = oe.utils.multiprocess_launch(oe.package.is_elf, checkelflinks.values(), d)
  962. results_map = {}
  963. for (ltarget, elf_file) in results:
  964. results_map[ltarget] = elf_file
  965. for file in checkelflinks:
  966. ltarget = checkelflinks[file]
  967. # If it's a symlink, and points to an ELF file, we capture the readlink target
  968. if results_map[ltarget]:
  969. target = os.readlink(file)
  970. #bb.note("Sym: %s (%d)" % (ltarget, results_map[ltarget]))
  971. symlinks[file] = target
  972. results = oe.utils.multiprocess_launch(oe.package.is_elf, checkelf.keys(), d)
  973. # Sort results by file path. This ensures that the files are always
  974. # processed in the same order, which is important to make sure builds
  975. # are reproducible when dealing with hardlinks
  976. results.sort(key=lambda x: x[0])
  977. for (file, elf_file) in results:
  978. # It's a file (or hardlink), not a link
  979. # ...but is it ELF, and is it already stripped?
  980. if elf_file & 1:
  981. if elf_file & 2:
  982. if 'already-stripped' in (d.getVar('INSANE_SKIP:' + pn) or "").split():
  983. bb.note("Skipping file %s from %s for already-stripped QA test" % (file[len(dvar):], pn))
  984. else:
  985. msg = "File '%s' from %s was already stripped, this will prevent future debugging!" % (file[len(dvar):], pn)
  986. oe.qa.handle_error("already-stripped", msg, d)
  987. continue
  988. # At this point we have an unstripped elf file. We need to:
  989. # a) Make sure any file we strip is not hardlinked to anything else outside this tree
  990. # b) Only strip any hardlinked file once (no races)
  991. # c) Track any hardlinks between files so that we can reconstruct matching debug file hardlinks
  992. # Use a reference of device ID and inode number to identify files
  993. file_reference = checkelf[file][1]
  994. if file_reference in inodes:
  995. os.unlink(file)
  996. os.link(inodes[file_reference][0], file)
  997. inodes[file_reference].append(file)
  998. else:
  999. inodes[file_reference] = [file]
  1000. # break hardlink
  1001. bb.utils.break_hardlinks(file)
  1002. elffiles[file] = elf_file
  1003. # Modified the file so clear the cache
  1004. cpath.updatecache(file)
  1005. # Do the same hardlink processing as above, but for static libraries
  1006. results = list(checkstatic.keys())
  1007. # As above, sort the results.
  1008. results.sort(key=lambda x: x[0])
  1009. for file in results:
  1010. # Use a reference of device ID and inode number to identify files
  1011. file_reference = checkstatic[file][1]
  1012. if file_reference in inodes:
  1013. os.unlink(file)
  1014. os.link(inodes[file_reference][0], file)
  1015. inodes[file_reference].append(file)
  1016. else:
  1017. inodes[file_reference] = [file]
  1018. # break hardlink
  1019. bb.utils.break_hardlinks(file)
  1020. staticlibs.append(file)
  1021. # Modified the file so clear the cache
  1022. cpath.updatecache(file)
  1023. def strip_pkgd_prefix(f):
  1024. nonlocal dvar
  1025. if f.startswith(dvar):
  1026. return f[len(dvar):]
  1027. return f
  1028. #
  1029. # First lets process debug splitting
  1030. #
  1031. if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'):
  1032. results = oe.utils.multiprocess_launch(splitdebuginfo, list(elffiles), d, extraargs=(dvar, dv, d))
  1033. if dv["srcdir"] and not hostos.startswith("mingw"):
  1034. if (d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'):
  1035. results = oe.utils.multiprocess_launch(splitstaticdebuginfo, staticlibs, d, extraargs=(dvar, dv, d))
  1036. else:
  1037. for file in staticlibs:
  1038. results.append( (file,source_info(file, d)) )
  1039. d.setVar("PKGDEBUGSOURCES", {strip_pkgd_prefix(f): sorted(s) for f, s in results})
  1040. sources = set()
  1041. for r in results:
  1042. sources.update(r[1])
  1043. # Hardlink our debug symbols to the other hardlink copies
  1044. for ref in inodes:
  1045. if len(inodes[ref]) == 1:
  1046. continue
  1047. target = inodes[ref][0][len(dvar):]
  1048. for file in inodes[ref][1:]:
  1049. src = file[len(dvar):]
  1050. dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(target) + dv["append"]
  1051. fpath = dvar + dest
  1052. ftarget = dvar + dv["libdir"] + os.path.dirname(target) + dv["dir"] + "/" + os.path.basename(target) + dv["append"]
  1053. if os.access(ftarget, os.R_OK):
  1054. bb.utils.mkdirhier(os.path.dirname(fpath))
  1055. # Only one hardlink of separated debug info file in each directory
  1056. if not os.access(fpath, os.R_OK):
  1057. #bb.note("Link %s -> %s" % (fpath, ftarget))
  1058. os.link(ftarget, fpath)
  1059. elif (d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'):
  1060. deststatic = dv["staticlibdir"] + os.path.dirname(src) + dv["staticdir"] + "/" + os.path.basename(file) + dv["staticappend"]
  1061. fpath = dvar + deststatic
  1062. ftarget = dvar + dv["staticlibdir"] + os.path.dirname(target) + dv["staticdir"] + "/" + os.path.basename(target) + dv["staticappend"]
  1063. if os.access(ftarget, os.R_OK):
  1064. bb.utils.mkdirhier(os.path.dirname(fpath))
  1065. # Only one hardlink of separated debug info file in each directory
  1066. if not os.access(fpath, os.R_OK):
  1067. #bb.note("Link %s -> %s" % (fpath, ftarget))
  1068. os.link(ftarget, fpath)
  1069. else:
  1070. bb.note("Unable to find inode link target %s" % (target))
  1071. # Create symlinks for all cases we were able to split symbols
  1072. for file in symlinks:
  1073. src = file[len(dvar):]
  1074. dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"]
  1075. fpath = dvar + dest
  1076. # Skip it if the target doesn't exist
  1077. try:
  1078. s = os.stat(fpath)
  1079. except OSError as e:
  1080. (err, strerror) = e.args
  1081. if err != errno.ENOENT:
  1082. raise
  1083. continue
  1084. ltarget = symlinks[file]
  1085. lpath = os.path.dirname(ltarget)
  1086. lbase = os.path.basename(ltarget)
  1087. ftarget = ""
  1088. if lpath and lpath != ".":
  1089. ftarget += lpath + dv["dir"] + "/"
  1090. ftarget += lbase + dv["append"]
  1091. if lpath.startswith(".."):
  1092. ftarget = os.path.join("..", ftarget)
  1093. bb.utils.mkdirhier(os.path.dirname(fpath))
  1094. #bb.note("Symlink %s -> %s" % (fpath, ftarget))
  1095. os.symlink(ftarget, fpath)
  1096. # Process the dv["srcdir"] if requested...
  1097. # This copies and places the referenced sources for later debugging...
  1098. copydebugsources(dv["srcdir"], sources, d)
  1099. #
  1100. # End of debug splitting
  1101. #
  1102. #
  1103. # Now lets go back over things and strip them
  1104. #
  1105. if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1'):
  1106. strip = d.getVar("STRIP")
  1107. sfiles = []
  1108. for file in elffiles:
  1109. elf_file = int(elffiles[file])
  1110. #bb.note("Strip %s" % file)
  1111. sfiles.append((file, elf_file, strip))
  1112. if (d.getVar('PACKAGE_STRIP_STATIC') == '1' or d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'):
  1113. for f in staticlibs:
  1114. sfiles.append((f, 16, strip))
  1115. oe.utils.multiprocess_launch(oe.package.runstrip, sfiles, d)
  1116. # Build "minidebuginfo" and reinject it back into the stripped binaries
  1117. if bb.utils.contains('DISTRO_FEATURES', 'minidebuginfo', True, False, d):
  1118. oe.utils.multiprocess_launch(inject_minidebuginfo, list(elffiles), d,
  1119. extraargs=(dvar, dv, d))
  1120. #
  1121. # End of strip
  1122. #
  1123. os.chdir(oldcwd)
  1124. def populate_packages(d):
  1125. cpath = oe.cachedpath.CachedPath()
  1126. workdir = d.getVar('WORKDIR')
  1127. outdir = d.getVar('DEPLOY_DIR')
  1128. dvar = d.getVar('PKGD')
  1129. packages = d.getVar('PACKAGES').split()
  1130. pn = d.getVar('PN')
  1131. bb.utils.mkdirhier(outdir)
  1132. os.chdir(dvar)
  1133. autodebug = not (d.getVar("NOAUTOPACKAGEDEBUG") or False)
  1134. split_source_package = (d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-with-srcpkg')
  1135. # If debug-with-srcpkg mode is enabled then add the source package if it
  1136. # doesn't exist and add the source file contents to the source package.
  1137. if split_source_package:
  1138. src_package_name = ('%s-src' % d.getVar('PN'))
  1139. if not src_package_name in packages:
  1140. packages.append(src_package_name)
  1141. d.setVar('FILES:%s' % src_package_name, '/usr/src/debug')
  1142. # Sanity check PACKAGES for duplicates
  1143. # Sanity should be moved to sanity.bbclass once we have the infrastructure
  1144. package_dict = {}
  1145. for i, pkg in enumerate(packages):
  1146. if pkg in package_dict:
  1147. msg = "%s is listed in PACKAGES multiple times, this leads to packaging errors." % pkg
  1148. oe.qa.handle_error("packages-list", msg, d)
  1149. # Ensure the source package gets the chance to pick up the source files
  1150. # before the debug package by ordering it first in PACKAGES. Whether it
  1151. # actually picks up any source files is controlled by
  1152. # PACKAGE_DEBUG_SPLIT_STYLE.
  1153. elif pkg.endswith("-src"):
  1154. package_dict[pkg] = (10, i)
  1155. elif autodebug and pkg.endswith("-dbg"):
  1156. package_dict[pkg] = (30, i)
  1157. else:
  1158. package_dict[pkg] = (50, i)
  1159. packages = sorted(package_dict.keys(), key=package_dict.get)
  1160. d.setVar('PACKAGES', ' '.join(packages))
  1161. pkgdest = d.getVar('PKGDEST')
  1162. seen = []
  1163. # os.mkdir masks the permissions with umask so we have to unset it first
  1164. oldumask = os.umask(0)
  1165. debug = []
  1166. for root, dirs, files in cpath.walk(dvar):
  1167. dir = root[len(dvar):]
  1168. if not dir:
  1169. dir = os.sep
  1170. for f in (files + dirs):
  1171. path = "." + os.path.join(dir, f)
  1172. if "/.debug/" in path or "/.debug-static/" in path or path.endswith("/.debug"):
  1173. debug.append(path)
  1174. for pkg in packages:
  1175. root = os.path.join(pkgdest, pkg)
  1176. bb.utils.mkdirhier(root)
  1177. filesvar = d.getVar('FILES:%s' % pkg) or ""
  1178. if "//" in filesvar:
  1179. msg = "FILES variable for package %s contains '//' which is invalid. Attempting to fix this but you should correct the metadata.\n" % pkg
  1180. oe.qa.handle_error("files-invalid", msg, d)
  1181. filesvar.replace("//", "/")
  1182. origfiles = filesvar.split()
  1183. files, symlink_paths = oe.package.files_from_filevars(origfiles)
  1184. if autodebug and pkg.endswith("-dbg"):
  1185. files.extend(debug)
  1186. for file in files:
  1187. if (not cpath.islink(file)) and (not cpath.exists(file)):
  1188. continue
  1189. if file in seen:
  1190. continue
  1191. seen.append(file)
  1192. def mkdir(src, dest, p):
  1193. src = os.path.join(src, p)
  1194. dest = os.path.join(dest, p)
  1195. fstat = cpath.stat(src)
  1196. os.mkdir(dest)
  1197. os.chmod(dest, fstat.st_mode)
  1198. os.chown(dest, fstat.st_uid, fstat.st_gid)
  1199. if p not in seen:
  1200. seen.append(p)
  1201. cpath.updatecache(dest)
  1202. def mkdir_recurse(src, dest, paths):
  1203. if cpath.exists(dest + '/' + paths):
  1204. return
  1205. while paths.startswith("./"):
  1206. paths = paths[2:]
  1207. p = "."
  1208. for c in paths.split("/"):
  1209. p = os.path.join(p, c)
  1210. if not cpath.exists(os.path.join(dest, p)):
  1211. mkdir(src, dest, p)
  1212. if cpath.isdir(file) and not cpath.islink(file):
  1213. mkdir_recurse(dvar, root, file)
  1214. continue
  1215. mkdir_recurse(dvar, root, os.path.dirname(file))
  1216. fpath = os.path.join(root,file)
  1217. if not cpath.islink(file):
  1218. os.link(file, fpath)
  1219. continue
  1220. ret = bb.utils.copyfile(file, fpath)
  1221. if ret is False or ret == 0:
  1222. bb.fatal("File population failed")
  1223. # Check if symlink paths exist
  1224. for file in symlink_paths:
  1225. if not os.path.exists(os.path.join(root,file)):
  1226. bb.fatal("File '%s' cannot be packaged into '%s' because its "
  1227. "parent directory structure does not exist. One of "
  1228. "its parent directories is a symlink whose target "
  1229. "directory is not included in the package." %
  1230. (file, pkg))
  1231. os.umask(oldumask)
  1232. os.chdir(workdir)
  1233. # Handle excluding packages with incompatible licenses
  1234. package_list = []
  1235. skipped_pkgs = oe.license.skip_incompatible_package_licenses(d, packages)
  1236. for pkg in packages:
  1237. if pkg in skipped_pkgs:
  1238. msg = "Excluding %s from packaging as it has incompatible license(s): %s" % (pkg, skipped_pkgs[pkg])
  1239. oe.qa.handle_error("incompatible-license", msg, d)
  1240. else:
  1241. package_list.append(pkg)
  1242. d.setVar('PACKAGES', ' '.join(package_list))
  1243. unshipped = []
  1244. for root, dirs, files in cpath.walk(dvar):
  1245. dir = root[len(dvar):]
  1246. if not dir:
  1247. dir = os.sep
  1248. for f in (files + dirs):
  1249. path = os.path.join(dir, f)
  1250. if ('.' + path) not in seen:
  1251. unshipped.append(path)
  1252. if unshipped != []:
  1253. msg = pn + ": Files/directories were installed but not shipped in any package:"
  1254. if "installed-vs-shipped" in (d.getVar('INSANE_SKIP:' + pn) or "").split():
  1255. bb.note("Package %s skipping QA tests: installed-vs-shipped" % pn)
  1256. else:
  1257. for f in unshipped:
  1258. msg = msg + "\n " + f
  1259. msg = msg + "\nPlease set FILES such that these items are packaged. Alternatively if they are unneeded, avoid installing them or delete them within do_install.\n"
  1260. msg = msg + "%s: %d installed and not shipped files." % (pn, len(unshipped))
  1261. oe.qa.handle_error("installed-vs-shipped", msg, d)
  1262. def process_fixsymlinks(pkgfiles, d):
  1263. cpath = oe.cachedpath.CachedPath()
  1264. pkgdest = d.getVar('PKGDEST')
  1265. packages = d.getVar("PACKAGES", False).split()
  1266. dangling_links = {}
  1267. pkg_files = {}
  1268. for pkg in packages:
  1269. dangling_links[pkg] = []
  1270. pkg_files[pkg] = []
  1271. inst_root = os.path.join(pkgdest, pkg)
  1272. for path in pkgfiles[pkg]:
  1273. rpath = path[len(inst_root):]
  1274. pkg_files[pkg].append(rpath)
  1275. rtarget = cpath.realpath(path, inst_root, True, assume_dir = True)
  1276. if not cpath.lexists(rtarget):
  1277. dangling_links[pkg].append(os.path.normpath(rtarget[len(inst_root):]))
  1278. newrdepends = {}
  1279. for pkg in dangling_links:
  1280. for l in dangling_links[pkg]:
  1281. found = False
  1282. bb.debug(1, "%s contains dangling link %s" % (pkg, l))
  1283. for p in packages:
  1284. if l in pkg_files[p]:
  1285. found = True
  1286. bb.debug(1, "target found in %s" % p)
  1287. if p == pkg:
  1288. break
  1289. if pkg not in newrdepends:
  1290. newrdepends[pkg] = []
  1291. newrdepends[pkg].append(p)
  1292. break
  1293. if found == False:
  1294. bb.note("%s contains dangling symlink to %s" % (pkg, l))
  1295. for pkg in newrdepends:
  1296. rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS:' + pkg) or "")
  1297. for p in newrdepends[pkg]:
  1298. if p not in rdepends:
  1299. rdepends[p] = []
  1300. d.setVar('RDEPENDS:' + pkg, bb.utils.join_deps(rdepends, commasep=False))
  1301. def process_filedeps(pkgfiles, d):
  1302. """
  1303. Collect perfile run-time dependency metadata
  1304. Output:
  1305. FILERPROVIDESFLIST:pkg - list of all files w/ deps
  1306. FILERPROVIDES:filepath:pkg - per file dep
  1307. FILERDEPENDSFLIST:pkg - list of all files w/ deps
  1308. FILERDEPENDS:filepath:pkg - per file dep
  1309. """
  1310. if d.getVar('SKIP_FILEDEPS') == '1':
  1311. return
  1312. pkgdest = d.getVar('PKGDEST')
  1313. packages = d.getVar('PACKAGES')
  1314. rpmdeps = d.getVar('RPMDEPS')
  1315. def chunks(files, n):
  1316. return [files[i:i+n] for i in range(0, len(files), n)]
  1317. pkglist = []
  1318. for pkg in packages.split():
  1319. if d.getVar('SKIP_FILEDEPS:' + pkg) == '1':
  1320. continue
  1321. if pkg.endswith('-dbg') or pkg.endswith('-doc') or pkg.find('-locale-') != -1 or pkg.find('-localedata-') != -1 or pkg.find('-gconv-') != -1 or pkg.find('-charmap-') != -1 or pkg.startswith('kernel-module-') or pkg.endswith('-src'):
  1322. continue
  1323. for files in chunks(pkgfiles[pkg], 100):
  1324. pkglist.append((pkg, files, rpmdeps, pkgdest))
  1325. processed = oe.utils.multiprocess_launch(oe.package.filedeprunner, pkglist, d)
  1326. provides_files = {}
  1327. requires_files = {}
  1328. for result in processed:
  1329. (pkg, provides, requires) = result
  1330. if pkg not in provides_files:
  1331. provides_files[pkg] = []
  1332. if pkg not in requires_files:
  1333. requires_files[pkg] = []
  1334. for file in sorted(provides):
  1335. provides_files[pkg].append(file)
  1336. key = "FILERPROVIDES:" + file + ":" + pkg
  1337. d.appendVar(key, " " + " ".join(provides[file]))
  1338. for file in sorted(requires):
  1339. requires_files[pkg].append(file)
  1340. key = "FILERDEPENDS:" + file + ":" + pkg
  1341. d.appendVar(key, " " + " ".join(requires[file]))
  1342. for pkg in requires_files:
  1343. d.setVar("FILERDEPENDSFLIST:" + pkg, " ".join(sorted(requires_files[pkg])))
  1344. for pkg in provides_files:
  1345. d.setVar("FILERPROVIDESFLIST:" + pkg, " ".join(sorted(provides_files[pkg])))
  1346. def process_shlibs(pkgfiles, d):
  1347. cpath = oe.cachedpath.CachedPath()
  1348. exclude_shlibs = d.getVar('EXCLUDE_FROM_SHLIBS', False)
  1349. if exclude_shlibs:
  1350. bb.note("not generating shlibs")
  1351. return
  1352. lib_re = re.compile(r"^.*\.so")
  1353. libdir_re = re.compile(r".*/%s$" % d.getVar('baselib'))
  1354. packages = d.getVar('PACKAGES')
  1355. shlib_pkgs = []
  1356. exclusion_list = d.getVar("EXCLUDE_PACKAGES_FROM_SHLIBS")
  1357. if exclusion_list:
  1358. for pkg in packages.split():
  1359. if pkg not in exclusion_list.split():
  1360. shlib_pkgs.append(pkg)
  1361. else:
  1362. bb.note("not generating shlibs for %s" % pkg)
  1363. else:
  1364. shlib_pkgs = packages.split()
  1365. hostos = d.getVar('HOST_OS')
  1366. workdir = d.getVar('WORKDIR')
  1367. ver = d.getVar('PKGV')
  1368. if not ver:
  1369. msg = "PKGV not defined"
  1370. oe.qa.handle_error("pkgv-undefined", msg, d)
  1371. return
  1372. pkgdest = d.getVar('PKGDEST')
  1373. shlibswork_dir = d.getVar('SHLIBSWORKDIR')
  1374. def linux_so(file, pkg, pkgver, d):
  1375. needs_ldconfig = False
  1376. needed = set()
  1377. sonames = set()
  1378. ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '')
  1379. cmd = d.getVar('OBJDUMP') + " -p " + shlex.quote(file) + " 2>/dev/null"
  1380. fd = os.popen(cmd)
  1381. lines = fd.readlines()
  1382. fd.close()
  1383. rpath = tuple()
  1384. for l in lines:
  1385. m = re.match(r"\s+RPATH\s+([^\s]*)", l)
  1386. if m:
  1387. rpaths = m.group(1).replace("$ORIGIN", ldir).split(":")
  1388. rpath = tuple(map(os.path.normpath, rpaths))
  1389. for l in lines:
  1390. m = re.match(r"\s+NEEDED\s+([^\s]*)", l)
  1391. if m:
  1392. dep = m.group(1)
  1393. if dep not in needed:
  1394. needed.add((dep, file, rpath))
  1395. m = re.match(r"\s+SONAME\s+([^\s]*)", l)
  1396. if m:
  1397. this_soname = m.group(1)
  1398. prov = (this_soname, ldir, pkgver)
  1399. if not prov in sonames:
  1400. # if library is private (only used by package) then do not build shlib for it
  1401. if not private_libs or len([i for i in private_libs if fnmatch.fnmatch(this_soname, i)]) == 0:
  1402. sonames.add(prov)
  1403. if libdir_re.match(os.path.dirname(file)):
  1404. needs_ldconfig = True
  1405. return (needs_ldconfig, needed, sonames)
  1406. def darwin_so(file, needed, sonames, pkgver):
  1407. if not os.path.exists(file):
  1408. return
  1409. ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '')
  1410. def get_combinations(base):
  1411. #
  1412. # Given a base library name, find all combinations of this split by "." and "-"
  1413. #
  1414. combos = []
  1415. options = base.split(".")
  1416. for i in range(1, len(options) + 1):
  1417. combos.append(".".join(options[0:i]))
  1418. options = base.split("-")
  1419. for i in range(1, len(options) + 1):
  1420. combos.append("-".join(options[0:i]))
  1421. return combos
  1422. if (file.endswith('.dylib') or file.endswith('.so')) and not pkg.endswith('-dev') and not pkg.endswith('-dbg') and not pkg.endswith('-src'):
  1423. # Drop suffix
  1424. name = os.path.basename(file).rsplit(".",1)[0]
  1425. # Find all combinations
  1426. combos = get_combinations(name)
  1427. for combo in combos:
  1428. if not combo in sonames:
  1429. prov = (combo, ldir, pkgver)
  1430. sonames.add(prov)
  1431. if file.endswith('.dylib') or file.endswith('.so'):
  1432. rpath = []
  1433. p = subprocess.Popen([d.expand("${HOST_PREFIX}otool"), '-l', file], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
  1434. out, err = p.communicate()
  1435. # If returned successfully, process stdout for results
  1436. if p.returncode == 0:
  1437. for l in out.split("\n"):
  1438. l = l.strip()
  1439. if l.startswith('path '):
  1440. rpath.append(l.split()[1])
  1441. p = subprocess.Popen([d.expand("${HOST_PREFIX}otool"), '-L', file], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
  1442. out, err = p.communicate()
  1443. # If returned successfully, process stdout for results
  1444. if p.returncode == 0:
  1445. for l in out.split("\n"):
  1446. l = l.strip()
  1447. if not l or l.endswith(":"):
  1448. continue
  1449. if "is not an object file" in l:
  1450. continue
  1451. name = os.path.basename(l.split()[0]).rsplit(".", 1)[0]
  1452. if name and name not in needed[pkg]:
  1453. needed[pkg].add((name, file, tuple()))
  1454. def mingw_dll(file, needed, sonames, pkgver):
  1455. if not os.path.exists(file):
  1456. return
  1457. if file.endswith(".dll"):
  1458. # assume all dlls are shared objects provided by the package
  1459. sonames.add((os.path.basename(file), os.path.dirname(file).replace(pkgdest + "/" + pkg, ''), pkgver))
  1460. if (file.endswith(".dll") or file.endswith(".exe")):
  1461. # use objdump to search for "DLL Name: .*\.dll"
  1462. p = subprocess.Popen([d.expand("${OBJDUMP}"), "-p", file], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
  1463. out, err = p.communicate()
  1464. # process the output, grabbing all .dll names
  1465. if p.returncode == 0:
  1466. for m in re.finditer(r"DLL Name: (.*?\.dll)$", out.decode(), re.MULTILINE | re.IGNORECASE):
  1467. dllname = m.group(1)
  1468. if dllname:
  1469. needed[pkg].add((dllname, file, tuple()))
  1470. needed = {}
  1471. shlib_provider = oe.package.read_shlib_providers(d)
  1472. for pkg in shlib_pkgs:
  1473. private_libs = d.getVar('PRIVATE_LIBS:' + pkg) or d.getVar('PRIVATE_LIBS') or ""
  1474. private_libs = private_libs.split()
  1475. needs_ldconfig = False
  1476. bb.debug(2, "calculating shlib provides for %s" % pkg)
  1477. pkgver = d.getVar('PKGV:' + pkg)
  1478. if not pkgver:
  1479. pkgver = d.getVar('PV_' + pkg)
  1480. if not pkgver:
  1481. pkgver = ver
  1482. needed[pkg] = set()
  1483. sonames = set()
  1484. linuxlist = []
  1485. for file in pkgfiles[pkg]:
  1486. soname = None
  1487. if cpath.islink(file):
  1488. continue
  1489. if hostos.startswith("darwin"):
  1490. darwin_so(file, needed, sonames, pkgver)
  1491. elif hostos.startswith("mingw"):
  1492. mingw_dll(file, needed, sonames, pkgver)
  1493. elif os.access(file, os.X_OK) or lib_re.match(file):
  1494. linuxlist.append(file)
  1495. if linuxlist:
  1496. results = oe.utils.multiprocess_launch(linux_so, linuxlist, d, extraargs=(pkg, pkgver, d))
  1497. for r in results:
  1498. ldconfig = r[0]
  1499. needed[pkg] |= r[1]
  1500. sonames |= r[2]
  1501. needs_ldconfig = needs_ldconfig or ldconfig
  1502. shlibs_file = os.path.join(shlibswork_dir, pkg + ".list")
  1503. if len(sonames):
  1504. with open(shlibs_file, 'w') as fd:
  1505. for s in sorted(sonames):
  1506. if s[0] in shlib_provider and s[1] in shlib_provider[s[0]]:
  1507. (old_pkg, old_pkgver) = shlib_provider[s[0]][s[1]]
  1508. if old_pkg != pkg:
  1509. bb.warn('%s-%s was registered as shlib provider for %s, changing it to %s-%s because it was built later' % (old_pkg, old_pkgver, s[0], pkg, pkgver))
  1510. bb.debug(1, 'registering %s-%s as shlib provider for %s' % (pkg, pkgver, s[0]))
  1511. fd.write(s[0] + ':' + s[1] + ':' + s[2] + '\n')
  1512. if s[0] not in shlib_provider:
  1513. shlib_provider[s[0]] = {}
  1514. shlib_provider[s[0]][s[1]] = (pkg, pkgver)
  1515. if needs_ldconfig:
  1516. bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg)
  1517. postinst = d.getVar('pkg_postinst:%s' % pkg)
  1518. if not postinst:
  1519. postinst = '#!/bin/sh\n'
  1520. postinst += d.getVar('ldconfig_postinst_fragment')
  1521. d.setVar('pkg_postinst:%s' % pkg, postinst)
  1522. bb.debug(1, 'LIBNAMES: pkg %s sonames %s' % (pkg, sonames))
  1523. assumed_libs = d.getVar('ASSUME_SHLIBS')
  1524. if assumed_libs:
  1525. libdir = d.getVar("libdir")
  1526. for e in assumed_libs.split():
  1527. l, dep_pkg = e.split(":")
  1528. lib_ver = None
  1529. dep_pkg = dep_pkg.rsplit("_", 1)
  1530. if len(dep_pkg) == 2:
  1531. lib_ver = dep_pkg[1]
  1532. dep_pkg = dep_pkg[0]
  1533. if l not in shlib_provider:
  1534. shlib_provider[l] = {}
  1535. shlib_provider[l][libdir] = (dep_pkg, lib_ver)
  1536. libsearchpath = [d.getVar('libdir'), d.getVar('base_libdir')]
  1537. for pkg in shlib_pkgs:
  1538. bb.debug(2, "calculating shlib requirements for %s" % pkg)
  1539. private_libs = d.getVar('PRIVATE_LIBS:' + pkg) or d.getVar('PRIVATE_LIBS') or ""
  1540. private_libs = private_libs.split()
  1541. deps = list()
  1542. for n in needed[pkg]:
  1543. # if n is in private libraries, don't try to search provider for it
  1544. # this could cause problem in case some abc.bb provides private
  1545. # /opt/abc/lib/libfoo.so.1 and contains /usr/bin/abc depending on system library libfoo.so.1
  1546. # but skipping it is still better alternative than providing own
  1547. # version and then adding runtime dependency for the same system library
  1548. if private_libs and len([i for i in private_libs if fnmatch.fnmatch(n[0], i)]) > 0:
  1549. bb.debug(2, '%s: Dependency %s covered by PRIVATE_LIBS' % (pkg, n[0]))
  1550. continue
  1551. if n[0] in shlib_provider.keys():
  1552. shlib_provider_map = shlib_provider[n[0]]
  1553. matches = set()
  1554. for p in itertools.chain(list(n[2]), sorted(shlib_provider_map.keys()), libsearchpath):
  1555. if p in shlib_provider_map:
  1556. matches.add(p)
  1557. if len(matches) > 1:
  1558. matchpkgs = ', '.join([shlib_provider_map[match][0] for match in matches])
  1559. bb.error("%s: Multiple shlib providers for %s: %s (used by files: %s)" % (pkg, n[0], matchpkgs, n[1]))
  1560. elif len(matches) == 1:
  1561. (dep_pkg, ver_needed) = shlib_provider_map[matches.pop()]
  1562. bb.debug(2, '%s: Dependency %s requires package %s (used by files: %s)' % (pkg, n[0], dep_pkg, n[1]))
  1563. if dep_pkg == pkg:
  1564. continue
  1565. if ver_needed:
  1566. dep = "%s (>= %s)" % (dep_pkg, ver_needed)
  1567. else:
  1568. dep = dep_pkg
  1569. if not dep in deps:
  1570. deps.append(dep)
  1571. continue
  1572. bb.note("Couldn't find shared library provider for %s, used by files: %s" % (n[0], n[1]))
  1573. deps_file = os.path.join(pkgdest, pkg + ".shlibdeps")
  1574. if os.path.exists(deps_file):
  1575. os.remove(deps_file)
  1576. if deps:
  1577. with open(deps_file, 'w') as fd:
  1578. for dep in sorted(deps):
  1579. fd.write(dep + '\n')
  1580. def process_pkgconfig(pkgfiles, d):
  1581. packages = d.getVar('PACKAGES')
  1582. workdir = d.getVar('WORKDIR')
  1583. pkgdest = d.getVar('PKGDEST')
  1584. shlibs_dirs = d.getVar('SHLIBSDIRS').split()
  1585. shlibswork_dir = d.getVar('SHLIBSWORKDIR')
  1586. pc_re = re.compile(r'(.*)\.pc$')
  1587. var_re = re.compile(r'(.*)=(.*)')
  1588. field_re = re.compile(r'(.*): (.*)')
  1589. pkgconfig_provided = {}
  1590. pkgconfig_needed = {}
  1591. for pkg in packages.split():
  1592. pkgconfig_provided[pkg] = []
  1593. pkgconfig_needed[pkg] = []
  1594. for file in sorted(pkgfiles[pkg]):
  1595. m = pc_re.match(file)
  1596. if m:
  1597. pd = bb.data.init()
  1598. name = m.group(1)
  1599. pkgconfig_provided[pkg].append(os.path.basename(name))
  1600. if not os.access(file, os.R_OK):
  1601. continue
  1602. with open(file, 'r') as f:
  1603. lines = f.readlines()
  1604. for l in lines:
  1605. m = field_re.match(l)
  1606. if m:
  1607. hdr = m.group(1)
  1608. exp = pd.expand(m.group(2))
  1609. if hdr == 'Requires' or hdr == 'Requires.private':
  1610. pkgconfig_needed[pkg] += exp.replace(',', ' ').split()
  1611. continue
  1612. m = var_re.match(l)
  1613. if m:
  1614. name = m.group(1)
  1615. val = m.group(2)
  1616. pd.setVar(name, pd.expand(val))
  1617. for pkg in packages.split():
  1618. pkgs_file = os.path.join(shlibswork_dir, pkg + ".pclist")
  1619. if pkgconfig_provided[pkg] != []:
  1620. with open(pkgs_file, 'w') as f:
  1621. for p in sorted(pkgconfig_provided[pkg]):
  1622. f.write('%s\n' % p)
  1623. # Go from least to most specific since the last one found wins
  1624. for dir in reversed(shlibs_dirs):
  1625. if not os.path.exists(dir):
  1626. continue
  1627. for file in sorted(os.listdir(dir)):
  1628. m = re.match(r'^(.*)\.pclist$', file)
  1629. if m:
  1630. pkg = m.group(1)
  1631. with open(os.path.join(dir, file)) as fd:
  1632. lines = fd.readlines()
  1633. pkgconfig_provided[pkg] = []
  1634. for l in lines:
  1635. pkgconfig_provided[pkg].append(l.rstrip())
  1636. for pkg in packages.split():
  1637. deps = []
  1638. for n in pkgconfig_needed[pkg]:
  1639. found = False
  1640. for k in pkgconfig_provided.keys():
  1641. if n in pkgconfig_provided[k]:
  1642. if k != pkg and not (k in deps):
  1643. deps.append(k)
  1644. found = True
  1645. if found == False:
  1646. bb.note("couldn't find pkgconfig module '%s' in any package" % n)
  1647. deps_file = os.path.join(pkgdest, pkg + ".pcdeps")
  1648. if len(deps):
  1649. with open(deps_file, 'w') as fd:
  1650. for dep in deps:
  1651. fd.write(dep + '\n')
  1652. def read_libdep_files(d):
  1653. pkglibdeps = {}
  1654. packages = d.getVar('PACKAGES').split()
  1655. for pkg in packages:
  1656. pkglibdeps[pkg] = {}
  1657. for extension in ".shlibdeps", ".pcdeps", ".clilibdeps":
  1658. depsfile = d.expand("${PKGDEST}/" + pkg + extension)
  1659. if os.access(depsfile, os.R_OK):
  1660. with open(depsfile) as fd:
  1661. lines = fd.readlines()
  1662. for l in lines:
  1663. l.rstrip()
  1664. deps = bb.utils.explode_dep_versions2(l)
  1665. for dep in deps:
  1666. if not dep in pkglibdeps[pkg]:
  1667. pkglibdeps[pkg][dep] = deps[dep]
  1668. return pkglibdeps
  1669. def process_depchains(pkgfiles, d):
  1670. """
  1671. For a given set of prefix and postfix modifiers, make those packages
  1672. RRECOMMENDS on the corresponding packages for its RDEPENDS.
  1673. Example: If package A depends upon package B, and A's .bb emits an
  1674. A-dev package, this would make A-dev Recommends: B-dev.
  1675. If only one of a given suffix is specified, it will take the RRECOMMENDS
  1676. based on the RDEPENDS of *all* other packages. If more than one of a given
  1677. suffix is specified, its will only use the RDEPENDS of the single parent
  1678. package.
  1679. """
  1680. packages = d.getVar('PACKAGES')
  1681. postfixes = (d.getVar('DEPCHAIN_POST') or '').split()
  1682. prefixes = (d.getVar('DEPCHAIN_PRE') or '').split()
  1683. def pkg_adddeprrecs(pkg, base, suffix, getname, depends, d):
  1684. #bb.note('depends for %s is %s' % (base, depends))
  1685. rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS:' + pkg) or "")
  1686. for depend in sorted(depends):
  1687. if depend.find('-native') != -1 or depend.find('-cross') != -1 or depend.startswith('virtual/'):
  1688. #bb.note("Skipping %s" % depend)
  1689. continue
  1690. if depend.endswith('-dev'):
  1691. depend = depend[:-4]
  1692. if depend.endswith('-dbg'):
  1693. depend = depend[:-4]
  1694. pkgname = getname(depend, suffix)
  1695. #bb.note("Adding %s for %s" % (pkgname, depend))
  1696. if pkgname not in rreclist and pkgname != pkg:
  1697. rreclist[pkgname] = []
  1698. #bb.note('setting: RRECOMMENDS:%s=%s' % (pkg, ' '.join(rreclist)))
  1699. d.setVar('RRECOMMENDS:%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
  1700. def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d):
  1701. #bb.note('rdepends for %s is %s' % (base, rdepends))
  1702. rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS:' + pkg) or "")
  1703. for depend in sorted(rdepends):
  1704. if depend.find('virtual-locale-') != -1:
  1705. #bb.note("Skipping %s" % depend)
  1706. continue
  1707. if depend.endswith('-dev'):
  1708. depend = depend[:-4]
  1709. if depend.endswith('-dbg'):
  1710. depend = depend[:-4]
  1711. pkgname = getname(depend, suffix)
  1712. #bb.note("Adding %s for %s" % (pkgname, depend))
  1713. if pkgname not in rreclist and pkgname != pkg:
  1714. rreclist[pkgname] = []
  1715. #bb.note('setting: RRECOMMENDS:%s=%s' % (pkg, ' '.join(rreclist)))
  1716. d.setVar('RRECOMMENDS:%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
  1717. def add_dep(list, dep):
  1718. if dep not in list:
  1719. list.append(dep)
  1720. depends = []
  1721. for dep in bb.utils.explode_deps(d.getVar('DEPENDS') or ""):
  1722. add_dep(depends, dep)
  1723. rdepends = []
  1724. for pkg in packages.split():
  1725. for dep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + pkg) or ""):
  1726. add_dep(rdepends, dep)
  1727. #bb.note('rdepends is %s' % rdepends)
  1728. def post_getname(name, suffix):
  1729. return '%s%s' % (name, suffix)
  1730. def pre_getname(name, suffix):
  1731. return '%s%s' % (suffix, name)
  1732. pkgs = {}
  1733. for pkg in packages.split():
  1734. for postfix in postfixes:
  1735. if pkg.endswith(postfix):
  1736. if not postfix in pkgs:
  1737. pkgs[postfix] = {}
  1738. pkgs[postfix][pkg] = (pkg[:-len(postfix)], post_getname)
  1739. for prefix in prefixes:
  1740. if pkg.startswith(prefix):
  1741. if not prefix in pkgs:
  1742. pkgs[prefix] = {}
  1743. pkgs[prefix][pkg] = (pkg[:-len(prefix)], pre_getname)
  1744. if "-dbg" in pkgs:
  1745. pkglibdeps = read_libdep_files(d)
  1746. pkglibdeplist = []
  1747. for pkg in pkglibdeps:
  1748. for k in pkglibdeps[pkg]:
  1749. add_dep(pkglibdeplist, k)
  1750. dbgdefaultdeps = ((d.getVar('DEPCHAIN_DBGDEFAULTDEPS') == '1') or (bb.data.inherits_class('packagegroup', d)))
  1751. for suffix in pkgs:
  1752. for pkg in pkgs[suffix]:
  1753. if d.getVarFlag('RRECOMMENDS:' + pkg, 'nodeprrecs'):
  1754. continue
  1755. (base, func) = pkgs[suffix][pkg]
  1756. if suffix == "-dev":
  1757. pkg_adddeprrecs(pkg, base, suffix, func, depends, d)
  1758. elif suffix == "-dbg":
  1759. if not dbgdefaultdeps:
  1760. pkg_addrrecs(pkg, base, suffix, func, pkglibdeplist, d)
  1761. continue
  1762. if len(pkgs[suffix]) == 1:
  1763. pkg_addrrecs(pkg, base, suffix, func, rdepends, d)
  1764. else:
  1765. rdeps = []
  1766. for dep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + base) or ""):
  1767. add_dep(rdeps, dep)
  1768. pkg_addrrecs(pkg, base, suffix, func, rdeps, d)