package.bbclass 83 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202
  1. #
  2. # Packaging process
  3. #
  4. # Executive summary: This class iterates over the functions listed in PACKAGEFUNCS
  5. # Taking D and splitting it up into the packages listed in PACKAGES, placing the
  6. # resulting output in PKGDEST.
  7. #
  8. # There are the following default steps but PACKAGEFUNCS can be extended:
  9. #
  10. # a) package_get_auto_pr - get PRAUTO from remote PR service
  11. #
  12. # b) perform_packagecopy - Copy D into PKGD
  13. #
  14. # c) package_do_split_locales - Split out the locale files, updates FILES and PACKAGES
  15. #
  16. # d) split_and_strip_files - split the files into runtime and debug and strip them.
  17. # Debug files include debug info split, and associated sources that end up in -dbg packages
  18. #
  19. # e) fixup_perms - Fix up permissions in the package before we split it.
  20. #
  21. # f) populate_packages - Split the files in PKGD into separate packages in PKGDEST/<pkgname>
  22. # Also triggers the binary stripping code to put files in -dbg packages.
  23. #
  24. # g) package_do_filedeps - Collect perfile run-time dependency metadata
  25. # The data is stores in FILER{PROVIDES,DEPENDS}_file_pkg variables with
  26. # a list of affected files in FILER{PROVIDES,DEPENDS}FLIST_pkg
  27. #
  28. # h) package_do_shlibs - Look at the shared libraries generated and autotmatically add any
  29. # depenedencies found. Also stores the package name so anyone else using this library
  30. # knows which package to depend on.
  31. #
  32. # i) package_do_pkgconfig - Keep track of which packages need and provide which .pc files
  33. #
  34. # j) read_shlibdeps - Reads the stored shlibs information into the metadata
  35. #
  36. # k) package_depchains - Adds automatic dependencies to -dbg and -dev packages
  37. #
  38. # l) emit_pkgdata - saves the packaging data into PKGDATA_DIR for use in later
  39. # packaging steps
  40. inherit packagedata
  41. inherit chrpath
  42. # Need the package_qa_handle_error() in insane.bbclass
  43. inherit insane
  44. PKGD = "${WORKDIR}/package"
  45. PKGDEST = "${WORKDIR}/packages-split"
  46. LOCALE_SECTION ?= ''
  47. ALL_MULTILIB_PACKAGE_ARCHS = "${@all_multilib_tune_values(d, 'PACKAGE_ARCHS')}"
  48. # rpm is used for the per-file dependency identification
  49. PACKAGE_DEPENDS += "rpm-native"
  50. # If your postinstall can execute at rootfs creation time rather than on
  51. # target but depends on a native/cross tool in order to execute, you need to
  52. # list that tool in PACKAGE_WRITE_DEPENDS. Target package dependencies belong
  53. # in the package dependencies as normal, this is just for native/cross support
  54. # tools at rootfs build time.
  55. PACKAGE_WRITE_DEPS ??= ""
  56. def legitimize_package_name(s):
  57. """
  58. Make sure package names are legitimate strings
  59. """
  60. import re
  61. def fixutf(m):
  62. cp = m.group(1)
  63. if cp:
  64. return ('\\u%s' % cp).encode('latin-1').decode('unicode_escape')
  65. # Handle unicode codepoints encoded as <U0123>, as in glibc locale files.
  66. s = re.sub('<U([0-9A-Fa-f]{1,4})>', fixutf, s)
  67. # Remaining package name validity fixes
  68. return s.lower().replace('_', '-').replace('@', '+').replace(',', '+').replace('/', '-')
  69. def do_split_packages(d, root, file_regex, output_pattern, description, postinst=None, recursive=False, hook=None, extra_depends=None, aux_files_pattern=None, postrm=None, allow_dirs=False, prepend=False, match_path=False, aux_files_pattern_verbatim=None, allow_links=False, summary=None):
  70. """
  71. Used in .bb files to split up dynamically generated subpackages of a
  72. given package, usually plugins or modules.
  73. Arguments:
  74. root -- the path in which to search
  75. file_regex -- regular expression to match searched files. Use
  76. parentheses () to mark the part of this expression
  77. that should be used to derive the module name (to be
  78. substituted where %s is used in other function
  79. arguments as noted below)
  80. output_pattern -- pattern to use for the package names. Must include %s.
  81. description -- description to set for each package. Must include %s.
  82. postinst -- postinstall script to use for all packages (as a
  83. string)
  84. recursive -- True to perform a recursive search - default False
  85. hook -- a hook function to be called for every match. The
  86. function will be called with the following arguments
  87. (in the order listed):
  88. f: full path to the file/directory match
  89. pkg: the package name
  90. file_regex: as above
  91. output_pattern: as above
  92. modulename: the module name derived using file_regex
  93. extra_depends -- extra runtime dependencies (RDEPENDS) to be set for
  94. all packages. The default value of None causes a
  95. dependency on the main package (${PN}) - if you do
  96. not want this, pass '' for this parameter.
  97. aux_files_pattern -- extra item(s) to be added to FILES for each
  98. package. Can be a single string item or a list of
  99. strings for multiple items. Must include %s.
  100. postrm -- postrm script to use for all packages (as a string)
  101. allow_dirs -- True allow directories to be matched - default False
  102. prepend -- if True, prepend created packages to PACKAGES instead
  103. of the default False which appends them
  104. match_path -- match file_regex on the whole relative path to the
  105. root rather than just the file name
  106. aux_files_pattern_verbatim -- extra item(s) to be added to FILES for
  107. each package, using the actual derived module name
  108. rather than converting it to something legal for a
  109. package name. Can be a single string item or a list
  110. of strings for multiple items. Must include %s.
  111. allow_links -- True to allow symlinks to be matched - default False
  112. summary -- Summary to set for each package. Must include %s;
  113. defaults to description if not set.
  114. """
  115. dvar = d.getVar('PKGD')
  116. root = d.expand(root)
  117. output_pattern = d.expand(output_pattern)
  118. extra_depends = d.expand(extra_depends)
  119. # If the root directory doesn't exist, don't error out later but silently do
  120. # no splitting.
  121. if not os.path.exists(dvar + root):
  122. return []
  123. ml = d.getVar("MLPREFIX")
  124. if ml:
  125. if not output_pattern.startswith(ml):
  126. output_pattern = ml + output_pattern
  127. newdeps = []
  128. for dep in (extra_depends or "").split():
  129. if dep.startswith(ml):
  130. newdeps.append(dep)
  131. else:
  132. newdeps.append(ml + dep)
  133. if newdeps:
  134. extra_depends = " ".join(newdeps)
  135. packages = d.getVar('PACKAGES').split()
  136. split_packages = set()
  137. if postinst:
  138. postinst = '#!/bin/sh\n' + postinst + '\n'
  139. if postrm:
  140. postrm = '#!/bin/sh\n' + postrm + '\n'
  141. if not recursive:
  142. objs = os.listdir(dvar + root)
  143. else:
  144. objs = []
  145. for walkroot, dirs, files in os.walk(dvar + root):
  146. for file in files:
  147. relpath = os.path.join(walkroot, file).replace(dvar + root + '/', '', 1)
  148. if relpath:
  149. objs.append(relpath)
  150. if extra_depends == None:
  151. extra_depends = d.getVar("PN")
  152. if not summary:
  153. summary = description
  154. for o in sorted(objs):
  155. import re, stat
  156. if match_path:
  157. m = re.match(file_regex, o)
  158. else:
  159. m = re.match(file_regex, os.path.basename(o))
  160. if not m:
  161. continue
  162. f = os.path.join(dvar + root, o)
  163. mode = os.lstat(f).st_mode
  164. if not (stat.S_ISREG(mode) or (allow_links and stat.S_ISLNK(mode)) or (allow_dirs and stat.S_ISDIR(mode))):
  165. continue
  166. on = legitimize_package_name(m.group(1))
  167. pkg = output_pattern % on
  168. split_packages.add(pkg)
  169. if not pkg in packages:
  170. if prepend:
  171. packages = [pkg] + packages
  172. else:
  173. packages.append(pkg)
  174. oldfiles = d.getVar('FILES_' + pkg)
  175. newfile = os.path.join(root, o)
  176. # These names will be passed through glob() so if the filename actually
  177. # contains * or ? (rare, but possible) we need to handle that specially
  178. newfile = newfile.replace('*', '[*]')
  179. newfile = newfile.replace('?', '[?]')
  180. if not oldfiles:
  181. the_files = [newfile]
  182. if aux_files_pattern:
  183. if type(aux_files_pattern) is list:
  184. for fp in aux_files_pattern:
  185. the_files.append(fp % on)
  186. else:
  187. the_files.append(aux_files_pattern % on)
  188. if aux_files_pattern_verbatim:
  189. if type(aux_files_pattern_verbatim) is list:
  190. for fp in aux_files_pattern_verbatim:
  191. the_files.append(fp % m.group(1))
  192. else:
  193. the_files.append(aux_files_pattern_verbatim % m.group(1))
  194. d.setVar('FILES_' + pkg, " ".join(the_files))
  195. else:
  196. d.setVar('FILES_' + pkg, oldfiles + " " + newfile)
  197. if extra_depends != '':
  198. d.appendVar('RDEPENDS_' + pkg, ' ' + extra_depends)
  199. if not d.getVar('DESCRIPTION_' + pkg):
  200. d.setVar('DESCRIPTION_' + pkg, description % on)
  201. if not d.getVar('SUMMARY_' + pkg):
  202. d.setVar('SUMMARY_' + pkg, summary % on)
  203. if postinst:
  204. d.setVar('pkg_postinst_' + pkg, postinst)
  205. if postrm:
  206. d.setVar('pkg_postrm_' + pkg, postrm)
  207. if callable(hook):
  208. hook(f, pkg, file_regex, output_pattern, m.group(1))
  209. d.setVar('PACKAGES', ' '.join(packages))
  210. return list(split_packages)
  211. PACKAGE_DEPENDS += "file-native"
  212. python () {
  213. if d.getVar('PACKAGES') != '':
  214. deps = ""
  215. for dep in (d.getVar('PACKAGE_DEPENDS') or "").split():
  216. deps += " %s:do_populate_sysroot" % dep
  217. d.appendVarFlag('do_package', 'depends', deps)
  218. # shlibs requires any DEPENDS to have already packaged for the *.list files
  219. d.appendVarFlag('do_package', 'deptask', " do_packagedata")
  220. }
  221. # Get a list of files from file vars by searching files under current working directory
  222. # The list contains symlinks, directories and normal files.
  223. def files_from_filevars(filevars):
  224. import os,glob
  225. cpath = oe.cachedpath.CachedPath()
  226. files = []
  227. for f in filevars:
  228. if os.path.isabs(f):
  229. f = '.' + f
  230. if not f.startswith("./"):
  231. f = './' + f
  232. globbed = glob.glob(f)
  233. if globbed:
  234. if [ f ] != globbed:
  235. files += globbed
  236. continue
  237. files.append(f)
  238. symlink_paths = []
  239. for ind, f in enumerate(files):
  240. # Handle directory symlinks. Truncate path to the lowest level symlink
  241. parent = ''
  242. for dirname in f.split('/')[:-1]:
  243. parent = os.path.join(parent, dirname)
  244. if dirname == '.':
  245. continue
  246. if cpath.islink(parent):
  247. bb.warn("FILES contains file '%s' which resides under a "
  248. "directory symlink. Please fix the recipe and use the "
  249. "real path for the file." % f[1:])
  250. symlink_paths.append(f)
  251. files[ind] = parent
  252. f = parent
  253. break
  254. if not cpath.islink(f):
  255. if cpath.isdir(f):
  256. newfiles = [ os.path.join(f,x) for x in os.listdir(f) ]
  257. if newfiles:
  258. files += newfiles
  259. return files, symlink_paths
  260. # Called in package_<rpm,ipk,deb>.bbclass to get the correct list of configuration files
  261. def get_conffiles(pkg, d):
  262. pkgdest = d.getVar('PKGDEST')
  263. root = os.path.join(pkgdest, pkg)
  264. cwd = os.getcwd()
  265. os.chdir(root)
  266. conffiles = d.getVar('CONFFILES_%s' % pkg);
  267. if conffiles == None:
  268. conffiles = d.getVar('CONFFILES')
  269. if conffiles == None:
  270. conffiles = ""
  271. conffiles = conffiles.split()
  272. conf_orig_list = files_from_filevars(conffiles)[0]
  273. # Remove links and directories from conf_orig_list to get conf_list which only contains normal files
  274. conf_list = []
  275. for f in conf_orig_list:
  276. if os.path.isdir(f):
  277. continue
  278. if os.path.islink(f):
  279. continue
  280. if not os.path.exists(f):
  281. continue
  282. conf_list.append(f)
  283. # Remove the leading './'
  284. for i in range(0, len(conf_list)):
  285. conf_list[i] = conf_list[i][1:]
  286. os.chdir(cwd)
  287. return conf_list
  288. def checkbuildpath(file, d):
  289. tmpdir = d.getVar('TMPDIR')
  290. with open(file) as f:
  291. file_content = f.read()
  292. if tmpdir in file_content:
  293. return True
  294. return False
  295. def splitdebuginfo(file, debugfile, debugsrcdir, sourcefile, d):
  296. # Function to split a single file into two components, one is the stripped
  297. # target system binary, the other contains any debugging information. The
  298. # two files are linked to reference each other.
  299. #
  300. # sourcefile is also generated containing a list of debugsources
  301. import stat
  302. dvar = d.getVar('PKGD')
  303. objcopy = d.getVar("OBJCOPY")
  304. debugedit = d.expand("${STAGING_LIBDIR_NATIVE}/rpm/debugedit")
  305. # We ignore kernel modules, we don't generate debug info files.
  306. if file.find("/lib/modules/") != -1 and file.endswith(".ko"):
  307. return 1
  308. newmode = None
  309. if not os.access(file, os.W_OK) or os.access(file, os.R_OK):
  310. origmode = os.stat(file)[stat.ST_MODE]
  311. newmode = origmode | stat.S_IWRITE | stat.S_IREAD
  312. os.chmod(file, newmode)
  313. # We need to extract the debug src information here...
  314. if debugsrcdir:
  315. cmd = "'%s' -i -l '%s' '%s'" % (debugedit, sourcefile, file)
  316. (retval, output) = oe.utils.getstatusoutput(cmd)
  317. if retval:
  318. bb.fatal("debugedit failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else ""))
  319. bb.utils.mkdirhier(os.path.dirname(debugfile))
  320. cmd = "'%s' --only-keep-debug '%s' '%s'" % (objcopy, file, debugfile)
  321. (retval, output) = oe.utils.getstatusoutput(cmd)
  322. if retval:
  323. bb.fatal("objcopy failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else ""))
  324. # Set the debuglink to have the view of the file path on the target
  325. cmd = "'%s' --add-gnu-debuglink='%s' '%s'" % (objcopy, debugfile, file)
  326. (retval, output) = oe.utils.getstatusoutput(cmd)
  327. if retval:
  328. bb.fatal("objcopy failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else ""))
  329. if newmode:
  330. os.chmod(file, origmode)
  331. return 0
  332. def copydebugsources(debugsrcdir, d):
  333. # The debug src information written out to sourcefile is further procecessed
  334. # and copied to the destination here.
  335. import stat
  336. sourcefile = d.expand("${WORKDIR}/debugsources.list")
  337. if debugsrcdir and os.path.isfile(sourcefile):
  338. dvar = d.getVar('PKGD')
  339. strip = d.getVar("STRIP")
  340. objcopy = d.getVar("OBJCOPY")
  341. debugedit = d.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit")
  342. workdir = d.getVar("WORKDIR")
  343. workparentdir = os.path.dirname(os.path.dirname(workdir))
  344. workbasedir = os.path.basename(os.path.dirname(workdir)) + "/" + os.path.basename(workdir)
  345. # If build path exists in sourcefile, it means toolchain did not use
  346. # -fdebug-prefix-map to compile
  347. if checkbuildpath(sourcefile, d):
  348. localsrc_prefix = workparentdir + "/"
  349. else:
  350. localsrc_prefix = "/usr/src/debug/"
  351. nosuchdir = []
  352. basepath = dvar
  353. for p in debugsrcdir.split("/"):
  354. basepath = basepath + "/" + p
  355. if not cpath.exists(basepath):
  356. nosuchdir.append(basepath)
  357. bb.utils.mkdirhier(basepath)
  358. cpath.updatecache(basepath)
  359. # Ignore files from the recipe sysroots (target and native)
  360. processdebugsrc = "LC_ALL=C ; sort -z -u '%s' | egrep -v -z '((<internal>|<built-in>)$|/.*recipe-sysroot.*/)' | "
  361. # We need to ignore files that are not actually ours
  362. # we do this by only paying attention to items from this package
  363. processdebugsrc += "fgrep -zw '%s' | "
  364. # Remove prefix in the source paths
  365. processdebugsrc += "sed 's#%s##g' | "
  366. processdebugsrc += "(cd '%s' ; cpio -pd0mlL --no-preserve-owner '%s%s' 2>/dev/null)"
  367. cmd = processdebugsrc % (sourcefile, workbasedir, localsrc_prefix, workparentdir, dvar, debugsrcdir)
  368. (retval, output) = oe.utils.getstatusoutput(cmd)
  369. # Can "fail" if internal headers/transient sources are attempted
  370. #if retval:
  371. # bb.fatal("debug source copy failed with exit code %s (cmd was %s)" % (retval, cmd))
  372. # cpio seems to have a bug with -lL together and symbolic links are just copied, not dereferenced.
  373. # Work around this by manually finding and copying any symbolic links that made it through.
  374. cmd = "find %s%s -type l -print0 -delete | sed s#%s%s/##g | (cd '%s' ; cpio -pd0mL --no-preserve-owner '%s%s' 2>/dev/null)" % (dvar, debugsrcdir, dvar, debugsrcdir, workparentdir, dvar, debugsrcdir)
  375. (retval, output) = oe.utils.getstatusoutput(cmd)
  376. if retval:
  377. bb.fatal("debugsrc symlink fixup failed with exit code %s (cmd was %s)" % (retval, cmd))
  378. # The copy by cpio may have resulted in some empty directories! Remove these
  379. cmd = "find %s%s -empty -type d -delete" % (dvar, debugsrcdir)
  380. (retval, output) = oe.utils.getstatusoutput(cmd)
  381. if retval:
  382. bb.fatal("empty directory removal failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else ""))
  383. # Also remove debugsrcdir if its empty
  384. for p in nosuchdir[::-1]:
  385. if os.path.exists(p) and not os.listdir(p):
  386. os.rmdir(p)
  387. #
  388. # Package data handling routines
  389. #
  390. def get_package_mapping (pkg, basepkg, d):
  391. import oe.packagedata
  392. data = oe.packagedata.read_subpkgdata(pkg, d)
  393. key = "PKG_%s" % pkg
  394. if key in data:
  395. # Have to avoid undoing the write_extra_pkgs(global_variants...)
  396. if bb.data.inherits_class('allarch', d) and data[key] == basepkg:
  397. return pkg
  398. return data[key]
  399. return pkg
  400. def get_package_additional_metadata (pkg_type, d):
  401. base_key = "PACKAGE_ADD_METADATA"
  402. for key in ("%s_%s" % (base_key, pkg_type.upper()), base_key):
  403. if d.getVar(key, False) is None:
  404. continue
  405. d.setVarFlag(key, "type", "list")
  406. if d.getVarFlag(key, "separator") is None:
  407. d.setVarFlag(key, "separator", "\\n")
  408. metadata_fields = [field.strip() for field in oe.data.typed_value(key, d)]
  409. return "\n".join(metadata_fields).strip()
  410. def runtime_mapping_rename (varname, pkg, d):
  411. #bb.note("%s before: %s" % (varname, d.getVar(varname)))
  412. new_depends = {}
  413. deps = bb.utils.explode_dep_versions2(d.getVar(varname) or "")
  414. for depend in deps:
  415. new_depend = get_package_mapping(depend, pkg, d)
  416. new_depends[new_depend] = deps[depend]
  417. d.setVar(varname, bb.utils.join_deps(new_depends, commasep=False))
  418. #bb.note("%s after: %s" % (varname, d.getVar(varname)))
  419. #
  420. # Package functions suitable for inclusion in PACKAGEFUNCS
  421. #
  422. python package_get_auto_pr() {
  423. import oe.prservice
  424. import re
  425. # Support per recipe PRSERV_HOST
  426. pn = d.getVar('PN')
  427. host = d.getVar("PRSERV_HOST_" + pn)
  428. if not (host is None):
  429. d.setVar("PRSERV_HOST", host)
  430. pkgv = d.getVar("PKGV")
  431. # PR Server not active, handle AUTOINC
  432. if not d.getVar('PRSERV_HOST'):
  433. if 'AUTOINC' in pkgv:
  434. d.setVar("PKGV", pkgv.replace("AUTOINC", "0"))
  435. return
  436. auto_pr = None
  437. pv = d.getVar("PV")
  438. version = d.getVar("PRAUTOINX")
  439. pkgarch = d.getVar("PACKAGE_ARCH")
  440. checksum = d.getVar("BB_TASKHASH")
  441. if d.getVar('PRSERV_LOCKDOWN'):
  442. auto_pr = d.getVar('PRAUTO_' + version + '_' + pkgarch) or d.getVar('PRAUTO_' + version) or None
  443. if auto_pr is None:
  444. bb.fatal("Can NOT get PRAUTO from lockdown exported file")
  445. d.setVar('PRAUTO',str(auto_pr))
  446. return
  447. try:
  448. conn = d.getVar("__PRSERV_CONN")
  449. if conn is None:
  450. conn = oe.prservice.prserv_make_conn(d)
  451. if conn is not None:
  452. if "AUTOINC" in pkgv:
  453. srcpv = bb.fetch2.get_srcrev(d)
  454. base_ver = "AUTOINC-%s" % version[:version.find(srcpv)]
  455. value = conn.getPR(base_ver, pkgarch, srcpv)
  456. d.setVar("PKGV", pkgv.replace("AUTOINC", str(value)))
  457. auto_pr = conn.getPR(version, pkgarch, checksum)
  458. except Exception as e:
  459. bb.fatal("Can NOT get PRAUTO, exception %s" % str(e))
  460. if auto_pr is None:
  461. bb.fatal("Can NOT get PRAUTO from remote PR service")
  462. d.setVar('PRAUTO',str(auto_pr))
  463. }
  464. LOCALEBASEPN ??= "${PN}"
  465. python package_do_split_locales() {
  466. if (d.getVar('PACKAGE_NO_LOCALE') == '1'):
  467. bb.debug(1, "package requested not splitting locales")
  468. return
  469. packages = (d.getVar('PACKAGES') or "").split()
  470. datadir = d.getVar('datadir')
  471. if not datadir:
  472. bb.note("datadir not defined")
  473. return
  474. dvar = d.getVar('PKGD')
  475. pn = d.getVar('LOCALEBASEPN')
  476. if pn + '-locale' in packages:
  477. packages.remove(pn + '-locale')
  478. localedir = os.path.join(dvar + datadir, 'locale')
  479. if not cpath.isdir(localedir):
  480. bb.debug(1, "No locale files in this package")
  481. return
  482. locales = os.listdir(localedir)
  483. summary = d.getVar('SUMMARY') or pn
  484. description = d.getVar('DESCRIPTION') or ""
  485. locale_section = d.getVar('LOCALE_SECTION')
  486. mlprefix = d.getVar('MLPREFIX') or ""
  487. for l in sorted(locales):
  488. ln = legitimize_package_name(l)
  489. pkg = pn + '-locale-' + ln
  490. packages.append(pkg)
  491. d.setVar('FILES_' + pkg, os.path.join(datadir, 'locale', l))
  492. d.setVar('RRECOMMENDS_' + pkg, '%svirtual-locale-%s' % (mlprefix, ln))
  493. d.setVar('RPROVIDES_' + pkg, '%s-locale %s%s-translation' % (pn, mlprefix, ln))
  494. d.setVar('SUMMARY_' + pkg, '%s - %s translations' % (summary, l))
  495. d.setVar('DESCRIPTION_' + pkg, '%s This package contains language translation files for the %s locale.' % (description, l))
  496. if locale_section:
  497. d.setVar('SECTION_' + pkg, locale_section)
  498. d.setVar('PACKAGES', ' '.join(packages))
  499. # Disabled by RP 18/06/07
  500. # Wildcards aren't supported in debian
  501. # They break with ipkg since glibc-locale* will mean that
  502. # glibc-localedata-translit* won't install as a dependency
  503. # for some other package which breaks meta-toolchain
  504. # Probably breaks since virtual-locale- isn't provided anywhere
  505. #rdep = (d.getVar('RDEPENDS_%s' % pn) or "").split()
  506. #rdep.append('%s-locale*' % pn)
  507. #d.setVar('RDEPENDS_%s' % pn, ' '.join(rdep))
  508. }
  509. python perform_packagecopy () {
  510. dest = d.getVar('D')
  511. dvar = d.getVar('PKGD')
  512. # Start by package population by taking a copy of the installed
  513. # files to operate on
  514. # Preserve sparse files and hard links
  515. cmd = 'tar -cf - -C %s -p . | tar -xf - -C %s' % (dest, dvar)
  516. (retval, output) = oe.utils.getstatusoutput(cmd)
  517. if retval:
  518. bb.fatal("file copy failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else ""))
  519. # replace RPATHs for the nativesdk binaries, to make them relocatable
  520. if bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('cross-canadian', d):
  521. rpath_replace (dvar, d)
  522. }
  523. perform_packagecopy[cleandirs] = "${PKGD}"
  524. perform_packagecopy[dirs] = "${PKGD}"
  525. # We generate a master list of directories to process, we start by
  526. # seeding this list with reasonable defaults, then load from
  527. # the fs-perms.txt files
  528. python fixup_perms () {
  529. import pwd, grp
  530. # init using a string with the same format as a line as documented in
  531. # the fs-perms.txt file
  532. # <path> <mode> <uid> <gid> <walk> <fmode> <fuid> <fgid>
  533. # <path> link <link target>
  534. #
  535. # __str__ can be used to print out an entry in the input format
  536. #
  537. # if fs_perms_entry.path is None:
  538. # an error occured
  539. # if fs_perms_entry.link, you can retrieve:
  540. # fs_perms_entry.path = path
  541. # fs_perms_entry.link = target of link
  542. # if not fs_perms_entry.link, you can retrieve:
  543. # fs_perms_entry.path = path
  544. # fs_perms_entry.mode = expected dir mode or None
  545. # fs_perms_entry.uid = expected uid or -1
  546. # fs_perms_entry.gid = expected gid or -1
  547. # fs_perms_entry.walk = 'true' or something else
  548. # fs_perms_entry.fmode = expected file mode or None
  549. # fs_perms_entry.fuid = expected file uid or -1
  550. # fs_perms_entry_fgid = expected file gid or -1
  551. class fs_perms_entry():
  552. def __init__(self, line):
  553. lsplit = line.split()
  554. if len(lsplit) == 3 and lsplit[1].lower() == "link":
  555. self._setlink(lsplit[0], lsplit[2])
  556. elif len(lsplit) == 8:
  557. self._setdir(lsplit[0], lsplit[1], lsplit[2], lsplit[3], lsplit[4], lsplit[5], lsplit[6], lsplit[7])
  558. else:
  559. msg = "Fixup Perms: invalid config line %s" % line
  560. package_qa_handle_error("perm-config", msg, d)
  561. self.path = None
  562. self.link = None
  563. def _setdir(self, path, mode, uid, gid, walk, fmode, fuid, fgid):
  564. self.path = os.path.normpath(path)
  565. self.link = None
  566. self.mode = self._procmode(mode)
  567. self.uid = self._procuid(uid)
  568. self.gid = self._procgid(gid)
  569. self.walk = walk.lower()
  570. self.fmode = self._procmode(fmode)
  571. self.fuid = self._procuid(fuid)
  572. self.fgid = self._procgid(fgid)
  573. def _setlink(self, path, link):
  574. self.path = os.path.normpath(path)
  575. self.link = link
  576. def _procmode(self, mode):
  577. if not mode or (mode and mode == "-"):
  578. return None
  579. else:
  580. return int(mode,8)
  581. # Note uid/gid -1 has special significance in os.lchown
  582. def _procuid(self, uid):
  583. if uid is None or uid == "-":
  584. return -1
  585. elif uid.isdigit():
  586. return int(uid)
  587. else:
  588. return pwd.getpwnam(uid).pw_uid
  589. def _procgid(self, gid):
  590. if gid is None or gid == "-":
  591. return -1
  592. elif gid.isdigit():
  593. return int(gid)
  594. else:
  595. return grp.getgrnam(gid).gr_gid
  596. # Use for debugging the entries
  597. def __str__(self):
  598. if self.link:
  599. return "%s link %s" % (self.path, self.link)
  600. else:
  601. mode = "-"
  602. if self.mode:
  603. mode = "0%o" % self.mode
  604. fmode = "-"
  605. if self.fmode:
  606. fmode = "0%o" % self.fmode
  607. uid = self._mapugid(self.uid)
  608. gid = self._mapugid(self.gid)
  609. fuid = self._mapugid(self.fuid)
  610. fgid = self._mapugid(self.fgid)
  611. return "%s %s %s %s %s %s %s %s" % (self.path, mode, uid, gid, self.walk, fmode, fuid, fgid)
  612. def _mapugid(self, id):
  613. if id is None or id == -1:
  614. return "-"
  615. else:
  616. return "%d" % id
  617. # Fix the permission, owner and group of path
  618. def fix_perms(path, mode, uid, gid, dir):
  619. if mode and not os.path.islink(path):
  620. #bb.note("Fixup Perms: chmod 0%o %s" % (mode, dir))
  621. os.chmod(path, mode)
  622. # -1 is a special value that means don't change the uid/gid
  623. # if they are BOTH -1, don't bother to lchown
  624. if not (uid == -1 and gid == -1):
  625. #bb.note("Fixup Perms: lchown %d:%d %s" % (uid, gid, dir))
  626. os.lchown(path, uid, gid)
  627. # Return a list of configuration files based on either the default
  628. # files/fs-perms.txt or the contents of FILESYSTEM_PERMS_TABLES
  629. # paths are resolved via BBPATH
  630. def get_fs_perms_list(d):
  631. str = ""
  632. bbpath = d.getVar('BBPATH')
  633. fs_perms_tables = d.getVar('FILESYSTEM_PERMS_TABLES')
  634. if not fs_perms_tables:
  635. fs_perms_tables = 'files/fs-perms.txt' if oe.types.boolean(d.getVar('VOLATILE_LOG_DIR', True)) else 'files/fs-perms-persistent-log.txt'
  636. for conf_file in fs_perms_tables.split():
  637. str += " %s" % bb.utils.which(bbpath, conf_file)
  638. return str
  639. dvar = d.getVar('PKGD')
  640. fs_perms_table = {}
  641. fs_link_table = {}
  642. # By default all of the standard directories specified in
  643. # bitbake.conf will get 0755 root:root.
  644. target_path_vars = [ 'base_prefix',
  645. 'prefix',
  646. 'exec_prefix',
  647. 'base_bindir',
  648. 'base_sbindir',
  649. 'base_libdir',
  650. 'datadir',
  651. 'sysconfdir',
  652. 'servicedir',
  653. 'sharedstatedir',
  654. 'localstatedir',
  655. 'infodir',
  656. 'mandir',
  657. 'docdir',
  658. 'bindir',
  659. 'sbindir',
  660. 'libexecdir',
  661. 'libdir',
  662. 'includedir',
  663. 'oldincludedir' ]
  664. for path in target_path_vars:
  665. dir = d.getVar(path) or ""
  666. if dir == "":
  667. continue
  668. fs_perms_table[dir] = fs_perms_entry(d.expand("%s 0755 root root false - - -" % (dir)))
  669. # Now we actually load from the configuration files
  670. for conf in get_fs_perms_list(d).split():
  671. if os.path.exists(conf):
  672. f = open(conf)
  673. for line in f:
  674. if line.startswith('#'):
  675. continue
  676. lsplit = line.split()
  677. if len(lsplit) == 0:
  678. continue
  679. if len(lsplit) != 8 and not (len(lsplit) == 3 and lsplit[1].lower() == "link"):
  680. msg = "Fixup perms: %s invalid line: %s" % (conf, line)
  681. package_qa_handle_error("perm-line", msg, d)
  682. continue
  683. entry = fs_perms_entry(d.expand(line))
  684. if entry and entry.path:
  685. if entry.link:
  686. fs_link_table[entry.path] = entry
  687. if entry.path in fs_perms_table:
  688. fs_perms_table.pop(entry.path)
  689. else:
  690. fs_perms_table[entry.path] = entry
  691. if entry.path in fs_link_table:
  692. fs_link_table.pop(entry.path)
  693. f.close()
  694. # Debug -- list out in-memory table
  695. #for dir in fs_perms_table:
  696. # bb.note("Fixup Perms: %s: %s" % (dir, str(fs_perms_table[dir])))
  697. #for link in fs_link_table:
  698. # bb.note("Fixup Perms: %s: %s" % (link, str(fs_link_table[link])))
  699. # We process links first, so we can go back and fixup directory ownership
  700. # for any newly created directories
  701. # Process in sorted order so /run gets created before /run/lock, etc.
  702. for entry in sorted(fs_link_table.values(), key=lambda x: x.link):
  703. link = entry.link
  704. dir = entry.path
  705. origin = dvar + dir
  706. if not (cpath.exists(origin) and cpath.isdir(origin) and not cpath.islink(origin)):
  707. continue
  708. if link[0] == "/":
  709. target = dvar + link
  710. ptarget = link
  711. else:
  712. target = os.path.join(os.path.dirname(origin), link)
  713. ptarget = os.path.join(os.path.dirname(dir), link)
  714. if os.path.exists(target):
  715. msg = "Fixup Perms: Unable to correct directory link, target already exists: %s -> %s" % (dir, ptarget)
  716. package_qa_handle_error("perm-link", msg, d)
  717. continue
  718. # Create path to move directory to, move it, and then setup the symlink
  719. bb.utils.mkdirhier(os.path.dirname(target))
  720. #bb.note("Fixup Perms: Rename %s -> %s" % (dir, ptarget))
  721. os.rename(origin, target)
  722. #bb.note("Fixup Perms: Link %s -> %s" % (dir, link))
  723. os.symlink(link, origin)
  724. for dir in fs_perms_table:
  725. origin = dvar + dir
  726. if not (cpath.exists(origin) and cpath.isdir(origin)):
  727. continue
  728. fix_perms(origin, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir)
  729. if fs_perms_table[dir].walk == 'true':
  730. for root, dirs, files in os.walk(origin):
  731. for dr in dirs:
  732. each_dir = os.path.join(root, dr)
  733. fix_perms(each_dir, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir)
  734. for f in files:
  735. each_file = os.path.join(root, f)
  736. fix_perms(each_file, fs_perms_table[dir].fmode, fs_perms_table[dir].fuid, fs_perms_table[dir].fgid, dir)
  737. }
  738. python split_and_strip_files () {
  739. import stat, errno
  740. dvar = d.getVar('PKGD')
  741. pn = d.getVar('PN')
  742. oldcwd = os.getcwd()
  743. os.chdir(dvar)
  744. # We default to '.debug' style
  745. if d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-file-directory':
  746. # Single debug-file-directory style debug info
  747. debugappend = ".debug"
  748. debugdir = ""
  749. debuglibdir = "/usr/lib/debug"
  750. debugsrcdir = "/usr/src/debug"
  751. elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-without-src':
  752. # Original OE-core, a.k.a. ".debug", style debug info, but without sources in /usr/src/debug
  753. debugappend = ""
  754. debugdir = "/.debug"
  755. debuglibdir = ""
  756. debugsrcdir = ""
  757. elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-with-srcpkg':
  758. debugappend = ""
  759. debugdir = "/.debug"
  760. debuglibdir = ""
  761. debugsrcdir = "/usr/src/debug"
  762. else:
  763. # Original OE-core, a.k.a. ".debug", style debug info
  764. debugappend = ""
  765. debugdir = "/.debug"
  766. debuglibdir = ""
  767. debugsrcdir = "/usr/src/debug"
  768. sourcefile = d.expand("${WORKDIR}/debugsources.list")
  769. bb.utils.remove(sourcefile)
  770. # Return type (bits):
  771. # 0 - not elf
  772. # 1 - ELF
  773. # 2 - stripped
  774. # 4 - executable
  775. # 8 - shared library
  776. # 16 - kernel module
  777. def isELF(path):
  778. type = 0
  779. ret, result = oe.utils.getstatusoutput("file \"%s\"" % path.replace("\"", "\\\""))
  780. if ret:
  781. msg = "split_and_strip_files: 'file %s' failed" % path
  782. package_qa_handle_error("split-strip", msg, d)
  783. return type
  784. # Not stripped
  785. if "ELF" in result:
  786. type |= 1
  787. if "not stripped" not in result:
  788. type |= 2
  789. if "executable" in result:
  790. type |= 4
  791. if "shared" in result:
  792. type |= 8
  793. return type
  794. #
  795. # First lets figure out all of the files we may have to process ... do this only once!
  796. #
  797. elffiles = {}
  798. symlinks = {}
  799. kernmods = []
  800. inodes = {}
  801. libdir = os.path.abspath(dvar + os.sep + d.getVar("libdir"))
  802. baselibdir = os.path.abspath(dvar + os.sep + d.getVar("base_libdir"))
  803. if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1' or \
  804. d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'):
  805. for root, dirs, files in cpath.walk(dvar):
  806. for f in files:
  807. file = os.path.join(root, f)
  808. if file.endswith(".ko") and file.find("/lib/modules/") != -1:
  809. kernmods.append(file)
  810. continue
  811. # Skip debug files
  812. if debugappend and file.endswith(debugappend):
  813. continue
  814. if debugdir and debugdir in os.path.dirname(file[len(dvar):]):
  815. continue
  816. try:
  817. ltarget = cpath.realpath(file, dvar, False)
  818. s = cpath.lstat(ltarget)
  819. except OSError as e:
  820. (err, strerror) = e.args
  821. if err != errno.ENOENT:
  822. raise
  823. # Skip broken symlinks
  824. continue
  825. if not s:
  826. continue
  827. # Check its an excutable
  828. if (s[stat.ST_MODE] & stat.S_IXUSR) or (s[stat.ST_MODE] & stat.S_IXGRP) or (s[stat.ST_MODE] & stat.S_IXOTH) \
  829. or ((file.startswith(libdir) or file.startswith(baselibdir)) and (".so" in f or ".node" in f)):
  830. # If it's a symlink, and points to an ELF file, we capture the readlink target
  831. if cpath.islink(file):
  832. target = os.readlink(file)
  833. if isELF(ltarget):
  834. #bb.note("Sym: %s (%d)" % (ltarget, isELF(ltarget)))
  835. symlinks[file] = target
  836. continue
  837. # It's a file (or hardlink), not a link
  838. # ...but is it ELF, and is it already stripped?
  839. elf_file = isELF(file)
  840. if elf_file & 1:
  841. if elf_file & 2:
  842. if 'already-stripped' in (d.getVar('INSANE_SKIP_' + pn) or "").split():
  843. bb.note("Skipping file %s from %s for already-stripped QA test" % (file[len(dvar):], pn))
  844. else:
  845. msg = "File '%s' from %s was already stripped, this will prevent future debugging!" % (file[len(dvar):], pn)
  846. package_qa_handle_error("already-stripped", msg, d)
  847. continue
  848. # At this point we have an unstripped elf file. We need to:
  849. # a) Make sure any file we strip is not hardlinked to anything else outside this tree
  850. # b) Only strip any hardlinked file once (no races)
  851. # c) Track any hardlinks between files so that we can reconstruct matching debug file hardlinks
  852. # Use a reference of device ID and inode number to indentify files
  853. file_reference = "%d_%d" % (s.st_dev, s.st_ino)
  854. if file_reference in inodes:
  855. os.unlink(file)
  856. os.link(inodes[file_reference][0], file)
  857. inodes[file_reference].append(file)
  858. else:
  859. inodes[file_reference] = [file]
  860. # break hardlink
  861. bb.utils.copyfile(file, file)
  862. elffiles[file] = elf_file
  863. # Modified the file so clear the cache
  864. cpath.updatecache(file)
  865. #
  866. # First lets process debug splitting
  867. #
  868. if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'):
  869. for file in elffiles:
  870. src = file[len(dvar):]
  871. dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend
  872. fpath = dvar + dest
  873. # Split the file...
  874. bb.utils.mkdirhier(os.path.dirname(fpath))
  875. #bb.note("Split %s -> %s" % (file, fpath))
  876. # Only store off the hard link reference if we successfully split!
  877. splitdebuginfo(file, fpath, debugsrcdir, sourcefile, d)
  878. # Hardlink our debug symbols to the other hardlink copies
  879. for ref in inodes:
  880. if len(inodes[ref]) == 1:
  881. continue
  882. for file in inodes[ref][1:]:
  883. src = file[len(dvar):]
  884. dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend
  885. fpath = dvar + dest
  886. target = inodes[ref][0][len(dvar):]
  887. ftarget = dvar + debuglibdir + os.path.dirname(target) + debugdir + "/" + os.path.basename(target) + debugappend
  888. bb.utils.mkdirhier(os.path.dirname(fpath))
  889. #bb.note("Link %s -> %s" % (fpath, ftarget))
  890. os.link(ftarget, fpath)
  891. # Create symlinks for all cases we were able to split symbols
  892. for file in symlinks:
  893. src = file[len(dvar):]
  894. dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend
  895. fpath = dvar + dest
  896. # Skip it if the target doesn't exist
  897. try:
  898. s = os.stat(fpath)
  899. except OSError as e:
  900. (err, strerror) = e.args
  901. if err != errno.ENOENT:
  902. raise
  903. continue
  904. ltarget = symlinks[file]
  905. lpath = os.path.dirname(ltarget)
  906. lbase = os.path.basename(ltarget)
  907. ftarget = ""
  908. if lpath and lpath != ".":
  909. ftarget += lpath + debugdir + "/"
  910. ftarget += lbase + debugappend
  911. if lpath.startswith(".."):
  912. ftarget = os.path.join("..", ftarget)
  913. bb.utils.mkdirhier(os.path.dirname(fpath))
  914. #bb.note("Symlink %s -> %s" % (fpath, ftarget))
  915. os.symlink(ftarget, fpath)
  916. # Process the debugsrcdir if requested...
  917. # This copies and places the referenced sources for later debugging...
  918. copydebugsources(debugsrcdir, d)
  919. #
  920. # End of debug splitting
  921. #
  922. #
  923. # Now lets go back over things and strip them
  924. #
  925. if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1'):
  926. strip = d.getVar("STRIP")
  927. sfiles = []
  928. for file in elffiles:
  929. elf_file = int(elffiles[file])
  930. #bb.note("Strip %s" % file)
  931. sfiles.append((file, elf_file, strip))
  932. for f in kernmods:
  933. sfiles.append((f, 16, strip))
  934. oe.utils.multiprocess_exec(sfiles, oe.package.runstrip)
  935. #
  936. # End of strip
  937. #
  938. os.chdir(oldcwd)
  939. }
  940. python populate_packages () {
  941. import glob, re
  942. workdir = d.getVar('WORKDIR')
  943. outdir = d.getVar('DEPLOY_DIR')
  944. dvar = d.getVar('PKGD')
  945. packages = d.getVar('PACKAGES')
  946. pn = d.getVar('PN')
  947. bb.utils.mkdirhier(outdir)
  948. os.chdir(dvar)
  949. autodebug = not (d.getVar("NOAUTOPACKAGEDEBUG") or False)
  950. split_source_package = (d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-with-srcpkg')
  951. # If debug-with-srcpkg mode is enabled then the src package is added
  952. # into the package list and the source directory as its main content
  953. if split_source_package:
  954. src_package_name = ('%s-src' % d.getVar('PN'))
  955. packages += (' ' + src_package_name)
  956. d.setVar('FILES_%s' % src_package_name, '/usr/src/debug')
  957. # Sanity check PACKAGES for duplicates
  958. # Sanity should be moved to sanity.bbclass once we have the infrastucture
  959. package_list = []
  960. for pkg in packages.split():
  961. if pkg in package_list:
  962. msg = "%s is listed in PACKAGES multiple times, this leads to packaging errors." % pkg
  963. package_qa_handle_error("packages-list", msg, d)
  964. # If debug-with-srcpkg mode is enabled then the src package will have
  965. # priority over dbg package when assigning the files.
  966. # This allows src package to include source files and remove them from dbg.
  967. elif split_source_package and pkg.endswith("-src"):
  968. package_list.insert(0, pkg)
  969. elif autodebug and pkg.endswith("-dbg") and not split_source_package:
  970. package_list.insert(0, pkg)
  971. else:
  972. package_list.append(pkg)
  973. d.setVar('PACKAGES', ' '.join(package_list))
  974. pkgdest = d.getVar('PKGDEST')
  975. seen = []
  976. # os.mkdir masks the permissions with umask so we have to unset it first
  977. oldumask = os.umask(0)
  978. debug = []
  979. for root, dirs, files in cpath.walk(dvar):
  980. dir = root[len(dvar):]
  981. if not dir:
  982. dir = os.sep
  983. for f in (files + dirs):
  984. path = "." + os.path.join(dir, f)
  985. if "/.debug/" in path or path.endswith("/.debug"):
  986. debug.append(path)
  987. for pkg in package_list:
  988. root = os.path.join(pkgdest, pkg)
  989. bb.utils.mkdirhier(root)
  990. filesvar = d.getVar('FILES_%s' % pkg) or ""
  991. if "//" in filesvar:
  992. msg = "FILES variable for package %s contains '//' which is invalid. Attempting to fix this but you should correct the metadata.\n" % pkg
  993. package_qa_handle_error("files-invalid", msg, d)
  994. filesvar.replace("//", "/")
  995. origfiles = filesvar.split()
  996. files, symlink_paths = files_from_filevars(origfiles)
  997. if autodebug and pkg.endswith("-dbg"):
  998. files.extend(debug)
  999. for file in files:
  1000. if (not cpath.islink(file)) and (not cpath.exists(file)):
  1001. continue
  1002. if file in seen:
  1003. continue
  1004. seen.append(file)
  1005. def mkdir(src, dest, p):
  1006. src = os.path.join(src, p)
  1007. dest = os.path.join(dest, p)
  1008. fstat = cpath.stat(src)
  1009. os.mkdir(dest, fstat.st_mode)
  1010. os.chown(dest, fstat.st_uid, fstat.st_gid)
  1011. if p not in seen:
  1012. seen.append(p)
  1013. cpath.updatecache(dest)
  1014. def mkdir_recurse(src, dest, paths):
  1015. if cpath.exists(dest + '/' + paths):
  1016. return
  1017. while paths.startswith("./"):
  1018. paths = paths[2:]
  1019. p = "."
  1020. for c in paths.split("/"):
  1021. p = os.path.join(p, c)
  1022. if not cpath.exists(os.path.join(dest, p)):
  1023. mkdir(src, dest, p)
  1024. if cpath.isdir(file) and not cpath.islink(file):
  1025. mkdir_recurse(dvar, root, file)
  1026. continue
  1027. mkdir_recurse(dvar, root, os.path.dirname(file))
  1028. fpath = os.path.join(root,file)
  1029. if not cpath.islink(file):
  1030. os.link(file, fpath)
  1031. continue
  1032. ret = bb.utils.copyfile(file, fpath)
  1033. if ret is False or ret == 0:
  1034. bb.fatal("File population failed")
  1035. # Check if symlink paths exist
  1036. for file in symlink_paths:
  1037. if not os.path.exists(os.path.join(root,file)):
  1038. bb.fatal("File '%s' cannot be packaged into '%s' because its "
  1039. "parent directory structure does not exist. One of "
  1040. "its parent directories is a symlink whose target "
  1041. "directory is not included in the package." %
  1042. (file, pkg))
  1043. os.umask(oldumask)
  1044. os.chdir(workdir)
  1045. # Handle LICENSE_EXCLUSION
  1046. package_list = []
  1047. for pkg in packages.split():
  1048. if d.getVar('LICENSE_EXCLUSION-' + pkg):
  1049. msg = "%s has an incompatible license. Excluding from packaging." % pkg
  1050. package_qa_handle_error("incompatible-license", msg, d)
  1051. else:
  1052. package_list.append(pkg)
  1053. d.setVar('PACKAGES', ' '.join(package_list))
  1054. unshipped = []
  1055. for root, dirs, files in cpath.walk(dvar):
  1056. dir = root[len(dvar):]
  1057. if not dir:
  1058. dir = os.sep
  1059. for f in (files + dirs):
  1060. path = os.path.join(dir, f)
  1061. if ('.' + path) not in seen:
  1062. unshipped.append(path)
  1063. if unshipped != []:
  1064. msg = pn + ": Files/directories were installed but not shipped in any package:"
  1065. if "installed-vs-shipped" in (d.getVar('INSANE_SKIP_' + pn) or "").split():
  1066. bb.note("Package %s skipping QA tests: installed-vs-shipped" % pn)
  1067. else:
  1068. for f in unshipped:
  1069. msg = msg + "\n " + f
  1070. msg = msg + "\nPlease set FILES such that these items are packaged. Alternatively if they are unneeded, avoid installing them or delete them within do_install.\n"
  1071. msg = msg + "%s: %d installed and not shipped files." % (pn, len(unshipped))
  1072. package_qa_handle_error("installed-vs-shipped", msg, d)
  1073. }
  1074. populate_packages[dirs] = "${D}"
  1075. python package_fixsymlinks () {
  1076. import errno
  1077. pkgdest = d.getVar('PKGDEST')
  1078. packages = d.getVar("PACKAGES", False).split()
  1079. dangling_links = {}
  1080. pkg_files = {}
  1081. for pkg in packages:
  1082. dangling_links[pkg] = []
  1083. pkg_files[pkg] = []
  1084. inst_root = os.path.join(pkgdest, pkg)
  1085. for path in pkgfiles[pkg]:
  1086. rpath = path[len(inst_root):]
  1087. pkg_files[pkg].append(rpath)
  1088. rtarget = cpath.realpath(path, inst_root, True, assume_dir = True)
  1089. if not cpath.lexists(rtarget):
  1090. dangling_links[pkg].append(os.path.normpath(rtarget[len(inst_root):]))
  1091. newrdepends = {}
  1092. for pkg in dangling_links:
  1093. for l in dangling_links[pkg]:
  1094. found = False
  1095. bb.debug(1, "%s contains dangling link %s" % (pkg, l))
  1096. for p in packages:
  1097. if l in pkg_files[p]:
  1098. found = True
  1099. bb.debug(1, "target found in %s" % p)
  1100. if p == pkg:
  1101. break
  1102. if pkg not in newrdepends:
  1103. newrdepends[pkg] = []
  1104. newrdepends[pkg].append(p)
  1105. break
  1106. if found == False:
  1107. bb.note("%s contains dangling symlink to %s" % (pkg, l))
  1108. for pkg in newrdepends:
  1109. rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS_' + pkg) or "")
  1110. for p in newrdepends[pkg]:
  1111. if p not in rdepends:
  1112. rdepends[p] = []
  1113. d.setVar('RDEPENDS_' + pkg, bb.utils.join_deps(rdepends, commasep=False))
  1114. }
  1115. python package_package_name_hook() {
  1116. """
  1117. A package_name_hook function can be used to rewrite the package names by
  1118. changing PKG. For an example, see debian.bbclass.
  1119. """
  1120. pass
  1121. }
  1122. EXPORT_FUNCTIONS package_name_hook
  1123. PKGDESTWORK = "${WORKDIR}/pkgdata"
  1124. python emit_pkgdata() {
  1125. from glob import glob
  1126. import json
  1127. def write_if_exists(f, pkg, var):
  1128. def encode(str):
  1129. import codecs
  1130. c = codecs.getencoder("unicode_escape")
  1131. return c(str)[0].decode("latin1")
  1132. val = d.getVar('%s_%s' % (var, pkg))
  1133. if val:
  1134. f.write('%s_%s: %s\n' % (var, pkg, encode(val)))
  1135. return val
  1136. val = d.getVar('%s' % (var))
  1137. if val:
  1138. f.write('%s: %s\n' % (var, encode(val)))
  1139. return val
  1140. def write_extra_pkgs(variants, pn, packages, pkgdatadir):
  1141. for variant in variants:
  1142. with open("%s/%s-%s" % (pkgdatadir, variant, pn), 'w') as fd:
  1143. fd.write("PACKAGES: %s\n" % ' '.join(
  1144. map(lambda pkg: '%s-%s' % (variant, pkg), packages.split())))
  1145. def write_extra_runtime_pkgs(variants, packages, pkgdatadir):
  1146. for variant in variants:
  1147. for pkg in packages.split():
  1148. ml_pkg = "%s-%s" % (variant, pkg)
  1149. subdata_file = "%s/runtime/%s" % (pkgdatadir, ml_pkg)
  1150. with open(subdata_file, 'w') as fd:
  1151. fd.write("PKG_%s: %s" % (ml_pkg, pkg))
  1152. packages = d.getVar('PACKAGES')
  1153. pkgdest = d.getVar('PKGDEST')
  1154. pkgdatadir = d.getVar('PKGDESTWORK')
  1155. # Take shared lock since we're only reading, not writing
  1156. lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}"), True)
  1157. data_file = pkgdatadir + d.expand("/${PN}" )
  1158. f = open(data_file, 'w')
  1159. f.write("PACKAGES: %s\n" % packages)
  1160. f.close()
  1161. pn = d.getVar('PN')
  1162. global_variants = (d.getVar('MULTILIB_GLOBAL_VARIANTS') or "").split()
  1163. variants = (d.getVar('MULTILIB_VARIANTS') or "").split()
  1164. if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d):
  1165. write_extra_pkgs(variants, pn, packages, pkgdatadir)
  1166. if (bb.data.inherits_class('allarch', d) and not bb.data.inherits_class('packagegroup', d)):
  1167. write_extra_pkgs(global_variants, pn, packages, pkgdatadir)
  1168. workdir = d.getVar('WORKDIR')
  1169. for pkg in packages.split():
  1170. pkgval = d.getVar('PKG_%s' % pkg)
  1171. if pkgval is None:
  1172. pkgval = pkg
  1173. d.setVar('PKG_%s' % pkg, pkg)
  1174. pkgdestpkg = os.path.join(pkgdest, pkg)
  1175. files = {}
  1176. total_size = 0
  1177. seen = set()
  1178. for f in pkgfiles[pkg]:
  1179. relpth = os.path.relpath(f, pkgdestpkg)
  1180. fstat = os.lstat(f)
  1181. files[os.sep + relpth] = fstat.st_size
  1182. if fstat.st_ino not in seen:
  1183. seen.add(fstat.st_ino)
  1184. total_size += fstat.st_size
  1185. d.setVar('FILES_INFO', json.dumps(files))
  1186. subdata_file = pkgdatadir + "/runtime/%s" % pkg
  1187. sf = open(subdata_file, 'w')
  1188. write_if_exists(sf, pkg, 'PN')
  1189. write_if_exists(sf, pkg, 'PE')
  1190. write_if_exists(sf, pkg, 'PV')
  1191. write_if_exists(sf, pkg, 'PR')
  1192. write_if_exists(sf, pkg, 'PKGE')
  1193. write_if_exists(sf, pkg, 'PKGV')
  1194. write_if_exists(sf, pkg, 'PKGR')
  1195. write_if_exists(sf, pkg, 'LICENSE')
  1196. write_if_exists(sf, pkg, 'DESCRIPTION')
  1197. write_if_exists(sf, pkg, 'SUMMARY')
  1198. write_if_exists(sf, pkg, 'RDEPENDS')
  1199. rprov = write_if_exists(sf, pkg, 'RPROVIDES')
  1200. write_if_exists(sf, pkg, 'RRECOMMENDS')
  1201. write_if_exists(sf, pkg, 'RSUGGESTS')
  1202. write_if_exists(sf, pkg, 'RREPLACES')
  1203. write_if_exists(sf, pkg, 'RCONFLICTS')
  1204. write_if_exists(sf, pkg, 'SECTION')
  1205. write_if_exists(sf, pkg, 'PKG')
  1206. write_if_exists(sf, pkg, 'ALLOW_EMPTY')
  1207. write_if_exists(sf, pkg, 'FILES')
  1208. write_if_exists(sf, pkg, 'CONFFILES')
  1209. write_if_exists(sf, pkg, 'pkg_postinst')
  1210. write_if_exists(sf, pkg, 'pkg_postrm')
  1211. write_if_exists(sf, pkg, 'pkg_preinst')
  1212. write_if_exists(sf, pkg, 'pkg_prerm')
  1213. write_if_exists(sf, pkg, 'FILERPROVIDESFLIST')
  1214. write_if_exists(sf, pkg, 'FILES_INFO')
  1215. for dfile in (d.getVar('FILERPROVIDESFLIST_' + pkg) or "").split():
  1216. write_if_exists(sf, pkg, 'FILERPROVIDES_' + dfile)
  1217. write_if_exists(sf, pkg, 'FILERDEPENDSFLIST')
  1218. for dfile in (d.getVar('FILERDEPENDSFLIST_' + pkg) or "").split():
  1219. write_if_exists(sf, pkg, 'FILERDEPENDS_' + dfile)
  1220. sf.write('%s_%s: %d\n' % ('PKGSIZE', pkg, total_size))
  1221. sf.close()
  1222. # Symlinks needed for rprovides lookup
  1223. if rprov:
  1224. for p in rprov.strip().split():
  1225. subdata_sym = pkgdatadir + "/runtime-rprovides/%s/%s" % (p, pkg)
  1226. bb.utils.mkdirhier(os.path.dirname(subdata_sym))
  1227. oe.path.symlink("../../runtime/%s" % pkg, subdata_sym, True)
  1228. allow_empty = d.getVar('ALLOW_EMPTY_%s' % pkg)
  1229. if not allow_empty:
  1230. allow_empty = d.getVar('ALLOW_EMPTY')
  1231. root = "%s/%s" % (pkgdest, pkg)
  1232. os.chdir(root)
  1233. g = glob('*')
  1234. if g or allow_empty == "1":
  1235. # Symlinks needed for reverse lookups (from the final package name)
  1236. subdata_sym = pkgdatadir + "/runtime-reverse/%s" % pkgval
  1237. oe.path.symlink("../runtime/%s" % pkg, subdata_sym, True)
  1238. packagedfile = pkgdatadir + '/runtime/%s.packaged' % pkg
  1239. open(packagedfile, 'w').close()
  1240. if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d):
  1241. write_extra_runtime_pkgs(variants, packages, pkgdatadir)
  1242. if bb.data.inherits_class('allarch', d) and not bb.data.inherits_class('packagegroup', d):
  1243. write_extra_runtime_pkgs(global_variants, packages, pkgdatadir)
  1244. bb.utils.unlockfile(lf)
  1245. }
  1246. emit_pkgdata[dirs] = "${PKGDESTWORK}/runtime ${PKGDESTWORK}/runtime-reverse ${PKGDESTWORK}/runtime-rprovides"
  1247. ldconfig_postinst_fragment() {
  1248. if [ x"$D" = "x" ]; then
  1249. if [ -x /sbin/ldconfig ]; then /sbin/ldconfig ; fi
  1250. fi
  1251. }
  1252. RPMDEPS = "${STAGING_LIBDIR_NATIVE}/rpm/rpmdeps --alldeps"
  1253. # Collect perfile run-time dependency metadata
  1254. # Output:
  1255. # FILERPROVIDESFLIST_pkg - list of all files w/ deps
  1256. # FILERPROVIDES_filepath_pkg - per file dep
  1257. #
  1258. # FILERDEPENDSFLIST_pkg - list of all files w/ deps
  1259. # FILERDEPENDS_filepath_pkg - per file dep
  1260. python package_do_filedeps() {
  1261. if d.getVar('SKIP_FILEDEPS') == '1':
  1262. return
  1263. pkgdest = d.getVar('PKGDEST')
  1264. packages = d.getVar('PACKAGES')
  1265. rpmdeps = d.getVar('RPMDEPS')
  1266. def chunks(files, n):
  1267. return [files[i:i+n] for i in range(0, len(files), n)]
  1268. pkglist = []
  1269. for pkg in packages.split():
  1270. if d.getVar('SKIP_FILEDEPS_' + pkg) == '1':
  1271. continue
  1272. if pkg.endswith('-dbg') or pkg.endswith('-doc') or pkg.find('-locale-') != -1 or pkg.find('-localedata-') != -1 or pkg.find('-gconv-') != -1 or pkg.find('-charmap-') != -1 or pkg.startswith('kernel-module-') or pkg.endswith('-src'):
  1273. continue
  1274. for files in chunks(pkgfiles[pkg], 100):
  1275. pkglist.append((pkg, files, rpmdeps, pkgdest))
  1276. processed = oe.utils.multiprocess_exec( pkglist, oe.package.filedeprunner)
  1277. provides_files = {}
  1278. requires_files = {}
  1279. for result in processed:
  1280. (pkg, provides, requires) = result
  1281. if pkg not in provides_files:
  1282. provides_files[pkg] = []
  1283. if pkg not in requires_files:
  1284. requires_files[pkg] = []
  1285. for file in provides:
  1286. provides_files[pkg].append(file)
  1287. key = "FILERPROVIDES_" + file + "_" + pkg
  1288. d.setVar(key, " ".join(provides[file]))
  1289. for file in requires:
  1290. requires_files[pkg].append(file)
  1291. key = "FILERDEPENDS_" + file + "_" + pkg
  1292. d.setVar(key, " ".join(requires[file]))
  1293. for pkg in requires_files:
  1294. d.setVar("FILERDEPENDSFLIST_" + pkg, " ".join(requires_files[pkg]))
  1295. for pkg in provides_files:
  1296. d.setVar("FILERPROVIDESFLIST_" + pkg, " ".join(provides_files[pkg]))
  1297. }
  1298. SHLIBSDIRS = "${PKGDATA_DIR}/${MLPREFIX}shlibs2"
  1299. SHLIBSWORKDIR = "${PKGDESTWORK}/${MLPREFIX}shlibs2"
  1300. python package_do_shlibs() {
  1301. import re, pipes
  1302. import subprocess as sub
  1303. exclude_shlibs = d.getVar('EXCLUDE_FROM_SHLIBS', False)
  1304. if exclude_shlibs:
  1305. bb.note("not generating shlibs")
  1306. return
  1307. lib_re = re.compile("^.*\.so")
  1308. libdir_re = re.compile(".*/%s$" % d.getVar('baselib'))
  1309. packages = d.getVar('PACKAGES')
  1310. targetos = d.getVar('TARGET_OS')
  1311. workdir = d.getVar('WORKDIR')
  1312. ver = d.getVar('PKGV')
  1313. if not ver:
  1314. msg = "PKGV not defined"
  1315. package_qa_handle_error("pkgv-undefined", msg, d)
  1316. return
  1317. pkgdest = d.getVar('PKGDEST')
  1318. shlibswork_dir = d.getVar('SHLIBSWORKDIR')
  1319. # Take shared lock since we're only reading, not writing
  1320. lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}"))
  1321. def linux_so(file, needed, sonames, renames, pkgver):
  1322. needs_ldconfig = False
  1323. ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '')
  1324. cmd = d.getVar('OBJDUMP') + " -p " + pipes.quote(file) + " 2>/dev/null"
  1325. fd = os.popen(cmd)
  1326. lines = fd.readlines()
  1327. fd.close()
  1328. rpath = []
  1329. for l in lines:
  1330. m = re.match("\s+RPATH\s+([^\s]*)", l)
  1331. if m:
  1332. rpaths = m.group(1).replace("$ORIGIN", ldir).split(":")
  1333. rpath = list(map(os.path.normpath, rpaths))
  1334. for l in lines:
  1335. m = re.match("\s+NEEDED\s+([^\s]*)", l)
  1336. if m:
  1337. dep = m.group(1)
  1338. if dep not in needed[pkg]:
  1339. needed[pkg].append((dep, file, rpath))
  1340. m = re.match("\s+SONAME\s+([^\s]*)", l)
  1341. if m:
  1342. this_soname = m.group(1)
  1343. prov = (this_soname, ldir, pkgver)
  1344. if not prov in sonames:
  1345. # if library is private (only used by package) then do not build shlib for it
  1346. if not private_libs or this_soname not in private_libs:
  1347. sonames.append(prov)
  1348. if libdir_re.match(os.path.dirname(file)):
  1349. needs_ldconfig = True
  1350. if snap_symlinks and (os.path.basename(file) != this_soname):
  1351. renames.append((file, os.path.join(os.path.dirname(file), this_soname)))
  1352. return needs_ldconfig
  1353. def darwin_so(file, needed, sonames, renames, pkgver):
  1354. if not os.path.exists(file):
  1355. return
  1356. ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '')
  1357. def get_combinations(base):
  1358. #
  1359. # Given a base library name, find all combinations of this split by "." and "-"
  1360. #
  1361. combos = []
  1362. options = base.split(".")
  1363. for i in range(1, len(options) + 1):
  1364. combos.append(".".join(options[0:i]))
  1365. options = base.split("-")
  1366. for i in range(1, len(options) + 1):
  1367. combos.append("-".join(options[0:i]))
  1368. return combos
  1369. if (file.endswith('.dylib') or file.endswith('.so')) and not pkg.endswith('-dev') and not pkg.endswith('-dbg') and not pkg.endswith('-src'):
  1370. # Drop suffix
  1371. name = os.path.basename(file).rsplit(".",1)[0]
  1372. # Find all combinations
  1373. combos = get_combinations(name)
  1374. for combo in combos:
  1375. if not combo in sonames:
  1376. prov = (combo, ldir, pkgver)
  1377. sonames.append(prov)
  1378. if file.endswith('.dylib') or file.endswith('.so'):
  1379. rpath = []
  1380. p = sub.Popen([d.expand("${HOST_PREFIX}otool"), '-l', file],stdout=sub.PIPE,stderr=sub.PIPE)
  1381. out, err = p.communicate()
  1382. # If returned successfully, process stdout for results
  1383. if p.returncode == 0:
  1384. for l in out.split("\n"):
  1385. l = l.strip()
  1386. if l.startswith('path '):
  1387. rpath.append(l.split()[1])
  1388. p = sub.Popen([d.expand("${HOST_PREFIX}otool"), '-L', file],stdout=sub.PIPE,stderr=sub.PIPE)
  1389. out, err = p.communicate()
  1390. # If returned successfully, process stdout for results
  1391. if p.returncode == 0:
  1392. for l in out.split("\n"):
  1393. l = l.strip()
  1394. if not l or l.endswith(":"):
  1395. continue
  1396. if "is not an object file" in l:
  1397. continue
  1398. name = os.path.basename(l.split()[0]).rsplit(".", 1)[0]
  1399. if name and name not in needed[pkg]:
  1400. needed[pkg].append((name, file, []))
  1401. def mingw_dll(file, needed, sonames, renames, pkgver):
  1402. if not os.path.exists(file):
  1403. return
  1404. if file.endswith(".dll"):
  1405. # assume all dlls are shared objects provided by the package
  1406. sonames.append((os.path.basename(file), os.path.dirname(file).replace(pkgdest + "/" + pkg, ''), pkgver))
  1407. if (file.endswith(".dll") or file.endswith(".exe")):
  1408. # use objdump to search for "DLL Name: .*\.dll"
  1409. p = sub.Popen([d.expand("${HOST_PREFIX}objdump"), "-p", file], stdout = sub.PIPE, stderr= sub.PIPE)
  1410. out, err = p.communicate()
  1411. # process the output, grabbing all .dll names
  1412. if p.returncode == 0:
  1413. for m in re.finditer("DLL Name: (.*?\.dll)$", out.decode(), re.MULTILINE | re.IGNORECASE):
  1414. dllname = m.group(1)
  1415. if dllname:
  1416. needed[pkg].append((dllname, file, []))
  1417. if d.getVar('PACKAGE_SNAP_LIB_SYMLINKS') == "1":
  1418. snap_symlinks = True
  1419. else:
  1420. snap_symlinks = False
  1421. use_ldconfig = bb.utils.contains('DISTRO_FEATURES', 'ldconfig', True, False, d)
  1422. needed = {}
  1423. shlib_provider = oe.package.read_shlib_providers(d)
  1424. for pkg in packages.split():
  1425. private_libs = d.getVar('PRIVATE_LIBS_' + pkg) or d.getVar('PRIVATE_LIBS') or ""
  1426. private_libs = private_libs.split()
  1427. needs_ldconfig = False
  1428. bb.debug(2, "calculating shlib provides for %s" % pkg)
  1429. pkgver = d.getVar('PKGV_' + pkg)
  1430. if not pkgver:
  1431. pkgver = d.getVar('PV_' + pkg)
  1432. if not pkgver:
  1433. pkgver = ver
  1434. needed[pkg] = []
  1435. sonames = list()
  1436. renames = list()
  1437. for file in pkgfiles[pkg]:
  1438. soname = None
  1439. if cpath.islink(file):
  1440. continue
  1441. if targetos == "darwin" or targetos == "darwin8":
  1442. darwin_so(file, needed, sonames, renames, pkgver)
  1443. elif targetos.startswith("mingw"):
  1444. mingw_dll(file, needed, sonames, renames, pkgver)
  1445. elif os.access(file, os.X_OK) or lib_re.match(file):
  1446. ldconfig = linux_so(file, needed, sonames, renames, pkgver)
  1447. needs_ldconfig = needs_ldconfig or ldconfig
  1448. for (old, new) in renames:
  1449. bb.note("Renaming %s to %s" % (old, new))
  1450. os.rename(old, new)
  1451. pkgfiles[pkg].remove(old)
  1452. shlibs_file = os.path.join(shlibswork_dir, pkg + ".list")
  1453. if len(sonames):
  1454. fd = open(shlibs_file, 'w')
  1455. for s in sonames:
  1456. if s[0] in shlib_provider and s[1] in shlib_provider[s[0]]:
  1457. (old_pkg, old_pkgver) = shlib_provider[s[0]][s[1]]
  1458. if old_pkg != pkg:
  1459. bb.warn('%s-%s was registered as shlib provider for %s, changing it to %s-%s because it was built later' % (old_pkg, old_pkgver, s[0], pkg, pkgver))
  1460. bb.debug(1, 'registering %s-%s as shlib provider for %s' % (pkg, pkgver, s[0]))
  1461. fd.write(s[0] + ':' + s[1] + ':' + s[2] + '\n')
  1462. if s[0] not in shlib_provider:
  1463. shlib_provider[s[0]] = {}
  1464. shlib_provider[s[0]][s[1]] = (pkg, pkgver)
  1465. fd.close()
  1466. if needs_ldconfig and use_ldconfig:
  1467. bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg)
  1468. postinst = d.getVar('pkg_postinst_%s' % pkg)
  1469. if not postinst:
  1470. postinst = '#!/bin/sh\n'
  1471. postinst += d.getVar('ldconfig_postinst_fragment')
  1472. d.setVar('pkg_postinst_%s' % pkg, postinst)
  1473. bb.debug(1, 'LIBNAMES: pkg %s sonames %s' % (pkg, sonames))
  1474. bb.utils.unlockfile(lf)
  1475. assumed_libs = d.getVar('ASSUME_SHLIBS')
  1476. if assumed_libs:
  1477. libdir = d.getVar("libdir")
  1478. for e in assumed_libs.split():
  1479. l, dep_pkg = e.split(":")
  1480. lib_ver = None
  1481. dep_pkg = dep_pkg.rsplit("_", 1)
  1482. if len(dep_pkg) == 2:
  1483. lib_ver = dep_pkg[1]
  1484. dep_pkg = dep_pkg[0]
  1485. if l not in shlib_provider:
  1486. shlib_provider[l] = {}
  1487. shlib_provider[l][libdir] = (dep_pkg, lib_ver)
  1488. libsearchpath = [d.getVar('libdir'), d.getVar('base_libdir')]
  1489. for pkg in packages.split():
  1490. bb.debug(2, "calculating shlib requirements for %s" % pkg)
  1491. deps = list()
  1492. for n in needed[pkg]:
  1493. # if n is in private libraries, don't try to search provider for it
  1494. # this could cause problem in case some abc.bb provides private
  1495. # /opt/abc/lib/libfoo.so.1 and contains /usr/bin/abc depending on system library libfoo.so.1
  1496. # but skipping it is still better alternative than providing own
  1497. # version and then adding runtime dependency for the same system library
  1498. if private_libs and n[0] in private_libs:
  1499. bb.debug(2, '%s: Dependency %s covered by PRIVATE_LIBS' % (pkg, n[0]))
  1500. continue
  1501. if n[0] in shlib_provider.keys():
  1502. shlib_provider_path = []
  1503. for k in shlib_provider[n[0]].keys():
  1504. shlib_provider_path.append(k)
  1505. match = None
  1506. for p in n[2] + shlib_provider_path + libsearchpath:
  1507. if p in shlib_provider[n[0]]:
  1508. match = p
  1509. break
  1510. if match:
  1511. (dep_pkg, ver_needed) = shlib_provider[n[0]][match]
  1512. bb.debug(2, '%s: Dependency %s requires package %s (used by files: %s)' % (pkg, n[0], dep_pkg, n[1]))
  1513. if dep_pkg == pkg:
  1514. continue
  1515. if ver_needed:
  1516. dep = "%s (>= %s)" % (dep_pkg, ver_needed)
  1517. else:
  1518. dep = dep_pkg
  1519. if not dep in deps:
  1520. deps.append(dep)
  1521. continue
  1522. bb.note("Couldn't find shared library provider for %s, used by files: %s" % (n[0], n[1]))
  1523. deps_file = os.path.join(pkgdest, pkg + ".shlibdeps")
  1524. if os.path.exists(deps_file):
  1525. os.remove(deps_file)
  1526. if len(deps):
  1527. fd = open(deps_file, 'w')
  1528. for dep in deps:
  1529. fd.write(dep + '\n')
  1530. fd.close()
  1531. }
  1532. python package_do_pkgconfig () {
  1533. import re
  1534. packages = d.getVar('PACKAGES')
  1535. workdir = d.getVar('WORKDIR')
  1536. pkgdest = d.getVar('PKGDEST')
  1537. shlibs_dirs = d.getVar('SHLIBSDIRS').split()
  1538. shlibswork_dir = d.getVar('SHLIBSWORKDIR')
  1539. pc_re = re.compile('(.*)\.pc$')
  1540. var_re = re.compile('(.*)=(.*)')
  1541. field_re = re.compile('(.*): (.*)')
  1542. pkgconfig_provided = {}
  1543. pkgconfig_needed = {}
  1544. for pkg in packages.split():
  1545. pkgconfig_provided[pkg] = []
  1546. pkgconfig_needed[pkg] = []
  1547. for file in pkgfiles[pkg]:
  1548. m = pc_re.match(file)
  1549. if m:
  1550. pd = bb.data.init()
  1551. name = m.group(1)
  1552. pkgconfig_provided[pkg].append(name)
  1553. if not os.access(file, os.R_OK):
  1554. continue
  1555. f = open(file, 'r')
  1556. lines = f.readlines()
  1557. f.close()
  1558. for l in lines:
  1559. m = var_re.match(l)
  1560. if m:
  1561. name = m.group(1)
  1562. val = m.group(2)
  1563. pd.setVar(name, pd.expand(val))
  1564. continue
  1565. m = field_re.match(l)
  1566. if m:
  1567. hdr = m.group(1)
  1568. exp = pd.expand(m.group(2))
  1569. if hdr == 'Requires':
  1570. pkgconfig_needed[pkg] += exp.replace(',', ' ').split()
  1571. # Take shared lock since we're only reading, not writing
  1572. lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}"))
  1573. for pkg in packages.split():
  1574. pkgs_file = os.path.join(shlibswork_dir, pkg + ".pclist")
  1575. if pkgconfig_provided[pkg] != []:
  1576. f = open(pkgs_file, 'w')
  1577. for p in pkgconfig_provided[pkg]:
  1578. f.write('%s\n' % p)
  1579. f.close()
  1580. # Go from least to most specific since the last one found wins
  1581. for dir in reversed(shlibs_dirs):
  1582. if not os.path.exists(dir):
  1583. continue
  1584. for file in os.listdir(dir):
  1585. m = re.match('^(.*)\.pclist$', file)
  1586. if m:
  1587. pkg = m.group(1)
  1588. fd = open(os.path.join(dir, file))
  1589. lines = fd.readlines()
  1590. fd.close()
  1591. pkgconfig_provided[pkg] = []
  1592. for l in lines:
  1593. pkgconfig_provided[pkg].append(l.rstrip())
  1594. for pkg in packages.split():
  1595. deps = []
  1596. for n in pkgconfig_needed[pkg]:
  1597. found = False
  1598. for k in pkgconfig_provided.keys():
  1599. if n in pkgconfig_provided[k]:
  1600. if k != pkg and not (k in deps):
  1601. deps.append(k)
  1602. found = True
  1603. if found == False:
  1604. bb.note("couldn't find pkgconfig module '%s' in any package" % n)
  1605. deps_file = os.path.join(pkgdest, pkg + ".pcdeps")
  1606. if len(deps):
  1607. fd = open(deps_file, 'w')
  1608. for dep in deps:
  1609. fd.write(dep + '\n')
  1610. fd.close()
  1611. bb.utils.unlockfile(lf)
  1612. }
  1613. def read_libdep_files(d):
  1614. pkglibdeps = {}
  1615. packages = d.getVar('PACKAGES').split()
  1616. for pkg in packages:
  1617. pkglibdeps[pkg] = {}
  1618. for extension in ".shlibdeps", ".pcdeps", ".clilibdeps":
  1619. depsfile = d.expand("${PKGDEST}/" + pkg + extension)
  1620. if os.access(depsfile, os.R_OK):
  1621. fd = open(depsfile)
  1622. lines = fd.readlines()
  1623. fd.close()
  1624. for l in lines:
  1625. l.rstrip()
  1626. deps = bb.utils.explode_dep_versions2(l)
  1627. for dep in deps:
  1628. if not dep in pkglibdeps[pkg]:
  1629. pkglibdeps[pkg][dep] = deps[dep]
  1630. return pkglibdeps
  1631. python read_shlibdeps () {
  1632. pkglibdeps = read_libdep_files(d)
  1633. packages = d.getVar('PACKAGES').split()
  1634. for pkg in packages:
  1635. rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS_' + pkg) or "")
  1636. for dep in pkglibdeps[pkg]:
  1637. # Add the dep if it's not already there, or if no comparison is set
  1638. if dep not in rdepends:
  1639. rdepends[dep] = []
  1640. for v in pkglibdeps[pkg][dep]:
  1641. if v not in rdepends[dep]:
  1642. rdepends[dep].append(v)
  1643. d.setVar('RDEPENDS_' + pkg, bb.utils.join_deps(rdepends, commasep=False))
  1644. }
  1645. python package_depchains() {
  1646. """
  1647. For a given set of prefix and postfix modifiers, make those packages
  1648. RRECOMMENDS on the corresponding packages for its RDEPENDS.
  1649. Example: If package A depends upon package B, and A's .bb emits an
  1650. A-dev package, this would make A-dev Recommends: B-dev.
  1651. If only one of a given suffix is specified, it will take the RRECOMMENDS
  1652. based on the RDEPENDS of *all* other packages. If more than one of a given
  1653. suffix is specified, its will only use the RDEPENDS of the single parent
  1654. package.
  1655. """
  1656. packages = d.getVar('PACKAGES')
  1657. postfixes = (d.getVar('DEPCHAIN_POST') or '').split()
  1658. prefixes = (d.getVar('DEPCHAIN_PRE') or '').split()
  1659. def pkg_adddeprrecs(pkg, base, suffix, getname, depends, d):
  1660. #bb.note('depends for %s is %s' % (base, depends))
  1661. rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS_' + pkg) or "")
  1662. for depend in depends:
  1663. if depend.find('-native') != -1 or depend.find('-cross') != -1 or depend.startswith('virtual/'):
  1664. #bb.note("Skipping %s" % depend)
  1665. continue
  1666. if depend.endswith('-dev'):
  1667. depend = depend[:-4]
  1668. if depend.endswith('-dbg'):
  1669. depend = depend[:-4]
  1670. pkgname = getname(depend, suffix)
  1671. #bb.note("Adding %s for %s" % (pkgname, depend))
  1672. if pkgname not in rreclist and pkgname != pkg:
  1673. rreclist[pkgname] = []
  1674. #bb.note('setting: RRECOMMENDS_%s=%s' % (pkg, ' '.join(rreclist)))
  1675. d.setVar('RRECOMMENDS_%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
  1676. def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d):
  1677. #bb.note('rdepends for %s is %s' % (base, rdepends))
  1678. rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS_' + pkg) or "")
  1679. for depend in rdepends:
  1680. if depend.find('virtual-locale-') != -1:
  1681. #bb.note("Skipping %s" % depend)
  1682. continue
  1683. if depend.endswith('-dev'):
  1684. depend = depend[:-4]
  1685. if depend.endswith('-dbg'):
  1686. depend = depend[:-4]
  1687. pkgname = getname(depend, suffix)
  1688. #bb.note("Adding %s for %s" % (pkgname, depend))
  1689. if pkgname not in rreclist and pkgname != pkg:
  1690. rreclist[pkgname] = []
  1691. #bb.note('setting: RRECOMMENDS_%s=%s' % (pkg, ' '.join(rreclist)))
  1692. d.setVar('RRECOMMENDS_%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
  1693. def add_dep(list, dep):
  1694. if dep not in list:
  1695. list.append(dep)
  1696. depends = []
  1697. for dep in bb.utils.explode_deps(d.getVar('DEPENDS') or ""):
  1698. add_dep(depends, dep)
  1699. rdepends = []
  1700. for pkg in packages.split():
  1701. for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + pkg) or ""):
  1702. add_dep(rdepends, dep)
  1703. #bb.note('rdepends is %s' % rdepends)
  1704. def post_getname(name, suffix):
  1705. return '%s%s' % (name, suffix)
  1706. def pre_getname(name, suffix):
  1707. return '%s%s' % (suffix, name)
  1708. pkgs = {}
  1709. for pkg in packages.split():
  1710. for postfix in postfixes:
  1711. if pkg.endswith(postfix):
  1712. if not postfix in pkgs:
  1713. pkgs[postfix] = {}
  1714. pkgs[postfix][pkg] = (pkg[:-len(postfix)], post_getname)
  1715. for prefix in prefixes:
  1716. if pkg.startswith(prefix):
  1717. if not prefix in pkgs:
  1718. pkgs[prefix] = {}
  1719. pkgs[prefix][pkg] = (pkg[:-len(prefix)], pre_getname)
  1720. if "-dbg" in pkgs:
  1721. pkglibdeps = read_libdep_files(d)
  1722. pkglibdeplist = []
  1723. for pkg in pkglibdeps:
  1724. for k in pkglibdeps[pkg]:
  1725. add_dep(pkglibdeplist, k)
  1726. dbgdefaultdeps = ((d.getVar('DEPCHAIN_DBGDEFAULTDEPS') == '1') or (bb.data.inherits_class('packagegroup', d)))
  1727. for suffix in pkgs:
  1728. for pkg in pkgs[suffix]:
  1729. if d.getVarFlag('RRECOMMENDS_' + pkg, 'nodeprrecs'):
  1730. continue
  1731. (base, func) = pkgs[suffix][pkg]
  1732. if suffix == "-dev":
  1733. pkg_adddeprrecs(pkg, base, suffix, func, depends, d)
  1734. elif suffix == "-dbg":
  1735. if not dbgdefaultdeps:
  1736. pkg_addrrecs(pkg, base, suffix, func, pkglibdeplist, d)
  1737. continue
  1738. if len(pkgs[suffix]) == 1:
  1739. pkg_addrrecs(pkg, base, suffix, func, rdepends, d)
  1740. else:
  1741. rdeps = []
  1742. for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + base) or ""):
  1743. add_dep(rdeps, dep)
  1744. pkg_addrrecs(pkg, base, suffix, func, rdeps, d)
  1745. }
  1746. # Since bitbake can't determine which variables are accessed during package
  1747. # iteration, we need to list them here:
  1748. PACKAGEVARS = "FILES RDEPENDS RRECOMMENDS SUMMARY DESCRIPTION RSUGGESTS RPROVIDES RCONFLICTS PKG ALLOW_EMPTY pkg_postinst pkg_postrm INITSCRIPT_NAME INITSCRIPT_PARAMS DEBIAN_NOAUTONAME ALTERNATIVE PKGE PKGV PKGR USERADD_PARAM GROUPADD_PARAM CONFFILES SYSTEMD_SERVICE LICENSE SECTION pkg_preinst pkg_prerm RREPLACES GROUPMEMS_PARAM SYSTEMD_AUTO_ENABLE SKIP_FILEDEPS PRIVATE_LIBS"
  1749. def gen_packagevar(d):
  1750. ret = []
  1751. pkgs = (d.getVar("PACKAGES") or "").split()
  1752. vars = (d.getVar("PACKAGEVARS") or "").split()
  1753. for p in pkgs:
  1754. for v in vars:
  1755. ret.append(v + "_" + p)
  1756. # Ensure that changes to INCOMPATIBLE_LICENSE re-run do_package for
  1757. # affected recipes.
  1758. ret.append('LICENSE_EXCLUSION-%s' % p)
  1759. return " ".join(ret)
  1760. PACKAGE_PREPROCESS_FUNCS ?= ""
  1761. # Functions for setting up PKGD
  1762. PACKAGEBUILDPKGD ?= " \
  1763. perform_packagecopy \
  1764. ${PACKAGE_PREPROCESS_FUNCS} \
  1765. split_and_strip_files \
  1766. fixup_perms \
  1767. "
  1768. # Functions which split PKGD up into separate packages
  1769. PACKAGESPLITFUNCS ?= " \
  1770. package_do_split_locales \
  1771. populate_packages"
  1772. # Functions which process metadata based on split packages
  1773. PACKAGEFUNCS += " \
  1774. package_fixsymlinks \
  1775. package_name_hook \
  1776. package_do_filedeps \
  1777. package_do_shlibs \
  1778. package_do_pkgconfig \
  1779. read_shlibdeps \
  1780. package_depchains \
  1781. emit_pkgdata"
  1782. python do_package () {
  1783. # Change the following version to cause sstate to invalidate the package
  1784. # cache. This is useful if an item this class depends on changes in a
  1785. # way that the output of this class changes. rpmdeps is a good example
  1786. # as any change to rpmdeps requires this to be rerun.
  1787. # PACKAGE_BBCLASS_VERSION = "1"
  1788. # Init cachedpath
  1789. global cpath
  1790. cpath = oe.cachedpath.CachedPath()
  1791. ###########################################################################
  1792. # Sanity test the setup
  1793. ###########################################################################
  1794. packages = (d.getVar('PACKAGES') or "").split()
  1795. if len(packages) < 1:
  1796. bb.debug(1, "No packages to build, skipping do_package")
  1797. return
  1798. workdir = d.getVar('WORKDIR')
  1799. outdir = d.getVar('DEPLOY_DIR')
  1800. dest = d.getVar('D')
  1801. dvar = d.getVar('PKGD')
  1802. pn = d.getVar('PN')
  1803. if not workdir or not outdir or not dest or not dvar or not pn:
  1804. msg = "WORKDIR, DEPLOY_DIR, D, PN and PKGD all must be defined, unable to package"
  1805. package_qa_handle_error("var-undefined", msg, d)
  1806. return
  1807. bb.build.exec_func("package_get_auto_pr", d)
  1808. ###########################################################################
  1809. # Optimisations
  1810. ###########################################################################
  1811. # Continually expanding complex expressions is inefficient, particularly
  1812. # when we write to the datastore and invalidate the expansion cache. This
  1813. # code pre-expands some frequently used variables
  1814. def expandVar(x, d):
  1815. d.setVar(x, d.getVar(x))
  1816. for x in 'PN', 'PV', 'BPN', 'TARGET_SYS', 'EXTENDPRAUTO':
  1817. expandVar(x, d)
  1818. ###########################################################################
  1819. # Setup PKGD (from D)
  1820. ###########################################################################
  1821. for f in (d.getVar('PACKAGEBUILDPKGD') or '').split():
  1822. bb.build.exec_func(f, d)
  1823. ###########################################################################
  1824. # Split up PKGD into PKGDEST
  1825. ###########################################################################
  1826. cpath = oe.cachedpath.CachedPath()
  1827. for f in (d.getVar('PACKAGESPLITFUNCS') or '').split():
  1828. bb.build.exec_func(f, d)
  1829. ###########################################################################
  1830. # Process PKGDEST
  1831. ###########################################################################
  1832. # Build global list of files in each split package
  1833. global pkgfiles
  1834. pkgfiles = {}
  1835. packages = d.getVar('PACKAGES').split()
  1836. pkgdest = d.getVar('PKGDEST')
  1837. for pkg in packages:
  1838. pkgfiles[pkg] = []
  1839. for walkroot, dirs, files in cpath.walk(pkgdest + "/" + pkg):
  1840. for file in files:
  1841. pkgfiles[pkg].append(walkroot + os.sep + file)
  1842. for f in (d.getVar('PACKAGEFUNCS') or '').split():
  1843. bb.build.exec_func(f, d)
  1844. qa_sane = d.getVar("QA_SANE")
  1845. if not qa_sane:
  1846. bb.fatal("Fatal QA errors found, failing task.")
  1847. }
  1848. do_package[dirs] = "${SHLIBSWORKDIR} ${PKGDESTWORK} ${D}"
  1849. do_package[vardeps] += "${PACKAGEBUILDPKGD} ${PACKAGESPLITFUNCS} ${PACKAGEFUNCS} ${@gen_packagevar(d)}"
  1850. addtask package after do_install
  1851. PACKAGELOCK = "${STAGING_DIR}/package-output.lock"
  1852. SSTATETASKS += "do_package"
  1853. do_package[cleandirs] = "${PKGDEST} ${PKGDESTWORK}"
  1854. do_package[sstate-plaindirs] = "${PKGD} ${PKGDEST} ${PKGDESTWORK}"
  1855. do_package[sstate-lockfile-shared] = "${PACKAGELOCK}"
  1856. do_package_setscene[dirs] = "${STAGING_DIR}"
  1857. python do_package_setscene () {
  1858. sstate_setscene(d)
  1859. }
  1860. addtask do_package_setscene
  1861. do_packagedata () {
  1862. :
  1863. }
  1864. addtask packagedata before do_build after do_package
  1865. SSTATETASKS += "do_packagedata"
  1866. do_packagedata[sstate-inputdirs] = "${PKGDESTWORK}"
  1867. do_packagedata[sstate-outputdirs] = "${PKGDATA_DIR}"
  1868. do_packagedata[sstate-lockfile-shared] = "${PACKAGELOCK}"
  1869. do_packagedata[stamp-extra-info] = "${MACHINE}"
  1870. python do_packagedata_setscene () {
  1871. sstate_setscene(d)
  1872. }
  1873. addtask do_packagedata_setscene
  1874. #
  1875. # Helper functions for the package writing classes
  1876. #
  1877. def mapping_rename_hook(d):
  1878. """
  1879. Rewrite variables to account for package renaming in things
  1880. like debian.bbclass or manual PKG variable name changes
  1881. """
  1882. pkg = d.getVar("PKG")
  1883. runtime_mapping_rename("RDEPENDS", pkg, d)
  1884. runtime_mapping_rename("RRECOMMENDS", pkg, d)
  1885. runtime_mapping_rename("RSUGGESTS", pkg, d)