base.bbclass 28 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734
  1. BB_DEFAULT_TASK ?= "build"
  2. CLASSOVERRIDE ?= "class-target"
  3. inherit patch
  4. inherit staging
  5. inherit mirrors
  6. inherit utils
  7. inherit utility-tasks
  8. inherit metadata_scm
  9. inherit logging
  10. OE_EXTRA_IMPORTS ?= ""
  11. OE_IMPORTS += "os sys time oe.path oe.utils oe.types oe.package oe.packagegroup oe.sstatesig oe.lsb oe.cachedpath oe.license ${OE_EXTRA_IMPORTS}"
  12. OE_IMPORTS[type] = "list"
  13. PACKAGECONFIG_CONFARGS ??= ""
  14. def oe_import(d):
  15. import sys
  16. bbpath = d.getVar("BBPATH").split(":")
  17. sys.path[0:0] = [os.path.join(dir, "lib") for dir in bbpath]
  18. def inject(name, value):
  19. """Make a python object accessible from the metadata"""
  20. if hasattr(bb.utils, "_context"):
  21. bb.utils._context[name] = value
  22. else:
  23. __builtins__[name] = value
  24. import oe.data
  25. for toimport in oe.data.typed_value("OE_IMPORTS", d):
  26. try:
  27. imported = __import__(toimport)
  28. inject(toimport.split(".", 1)[0], imported)
  29. except AttributeError as e:
  30. bb.error("Error importing OE modules: %s" % str(e))
  31. return ""
  32. # We need the oe module name space early (before INHERITs get added)
  33. OE_IMPORTED := "${@oe_import(d)}"
  34. def lsb_distro_identifier(d):
  35. adjust = d.getVar('LSB_DISTRO_ADJUST')
  36. adjust_func = None
  37. if adjust:
  38. try:
  39. adjust_func = globals()[adjust]
  40. except KeyError:
  41. pass
  42. return oe.lsb.distro_identifier(adjust_func)
  43. die() {
  44. bbfatal_log "$*"
  45. }
  46. oe_runmake_call() {
  47. bbnote ${MAKE} ${EXTRA_OEMAKE} "$@"
  48. ${MAKE} ${EXTRA_OEMAKE} "$@"
  49. }
  50. oe_runmake() {
  51. oe_runmake_call "$@" || die "oe_runmake failed"
  52. }
  53. def base_dep_prepend(d):
  54. if d.getVar('INHIBIT_DEFAULT_DEPS', False):
  55. return ""
  56. return "${BASE_DEFAULT_DEPS}"
  57. BASE_DEFAULT_DEPS = "virtual/${TARGET_PREFIX}gcc virtual/${TARGET_PREFIX}compilerlibs virtual/libc"
  58. BASEDEPENDS = ""
  59. BASEDEPENDS_class-target = "${@base_dep_prepend(d)}"
  60. BASEDEPENDS_class-nativesdk = "${@base_dep_prepend(d)}"
  61. DEPENDS_prepend="${BASEDEPENDS} "
  62. FILESPATH = "${@base_set_filespath(["${FILE_DIRNAME}/${BP}", "${FILE_DIRNAME}/${BPN}", "${FILE_DIRNAME}/files"], d)}"
  63. # THISDIR only works properly with imediate expansion as it has to run
  64. # in the context of the location its used (:=)
  65. THISDIR = "${@os.path.dirname(d.getVar('FILE'))}"
  66. def extra_path_elements(d):
  67. path = ""
  68. elements = (d.getVar('EXTRANATIVEPATH') or "").split()
  69. for e in elements:
  70. path = path + "${STAGING_BINDIR_NATIVE}/" + e + ":"
  71. return path
  72. PATH_prepend = "${@extra_path_elements(d)}"
  73. def get_lic_checksum_file_list(d):
  74. filelist = []
  75. lic_files = d.getVar("LIC_FILES_CHKSUM") or ''
  76. tmpdir = d.getVar("TMPDIR")
  77. s = d.getVar("S")
  78. b = d.getVar("B")
  79. workdir = d.getVar("WORKDIR")
  80. urls = lic_files.split()
  81. for url in urls:
  82. # We only care about items that are absolute paths since
  83. # any others should be covered by SRC_URI.
  84. try:
  85. (method, host, path, user, pswd, parm) = bb.fetch.decodeurl(url)
  86. if method != "file" or not path:
  87. raise bb.fetch.MalformedUrl(url)
  88. if path[0] == '/':
  89. if path.startswith((tmpdir, s, b, workdir)):
  90. continue
  91. filelist.append(path + ":" + str(os.path.exists(path)))
  92. except bb.fetch.MalformedUrl:
  93. bb.fatal(d.getVar('PN') + ": LIC_FILES_CHKSUM contains an invalid URL: " + url)
  94. return " ".join(filelist)
  95. def setup_hosttools_dir(dest, toolsvar, d, fatal=True):
  96. tools = d.getVar(toolsvar).split()
  97. origbbenv = d.getVar("BB_ORIGENV", False)
  98. path = origbbenv.getVar("PATH")
  99. bb.utils.mkdirhier(dest)
  100. notfound = []
  101. for tool in tools:
  102. desttool = os.path.join(dest, tool)
  103. if not os.path.exists(desttool):
  104. # clean up dead symlink
  105. if os.path.islink(desttool):
  106. os.unlink(desttool)
  107. srctool = bb.utils.which(path, tool, executable=True)
  108. # gcc/g++ may link to ccache on some hosts, e.g.,
  109. # /usr/local/bin/ccache/gcc -> /usr/bin/ccache, then which(gcc)
  110. # would return /usr/local/bin/ccache/gcc, but what we need is
  111. # /usr/bin/gcc, this code can check and fix that.
  112. if "ccache" in srctool:
  113. srctool = bb.utils.which(path, tool, executable=True, direction=1)
  114. if srctool:
  115. os.symlink(srctool, desttool)
  116. else:
  117. notfound.append(tool)
  118. if notfound and fatal:
  119. bb.fatal("The following required tools (as specified by HOSTTOOLS) appear to be unavailable in PATH, please install them in order to proceed:\n %s" % " ".join(notfound))
  120. addtask fetch
  121. do_fetch[dirs] = "${DL_DIR}"
  122. do_fetch[file-checksums] = "${@bb.fetch.get_checksum_file_list(d)}"
  123. do_fetch[file-checksums] += " ${@get_lic_checksum_file_list(d)}"
  124. do_fetch[vardeps] += "SRCREV"
  125. python base_do_fetch() {
  126. src_uri = (d.getVar('SRC_URI') or "").split()
  127. if len(src_uri) == 0:
  128. return
  129. try:
  130. fetcher = bb.fetch2.Fetch(src_uri, d)
  131. fetcher.download()
  132. except bb.fetch2.BBFetchException as e:
  133. bb.fatal(str(e))
  134. }
  135. addtask unpack after do_fetch
  136. do_unpack[dirs] = "${WORKDIR}"
  137. do_unpack[cleandirs] = "${@d.getVar('S') if os.path.normpath(d.getVar('S')) != os.path.normpath(d.getVar('WORKDIR')) else os.path.join('${S}', 'patches')}"
  138. python base_do_unpack() {
  139. src_uri = (d.getVar('SRC_URI') or "").split()
  140. if len(src_uri) == 0:
  141. return
  142. try:
  143. fetcher = bb.fetch2.Fetch(src_uri, d)
  144. fetcher.unpack(d.getVar('WORKDIR'))
  145. except bb.fetch2.BBFetchException as e:
  146. bb.fatal(str(e))
  147. }
  148. def get_layers_branch_rev(d):
  149. layers = (d.getVar("BBLAYERS") or "").split()
  150. layers_branch_rev = ["%-20s = \"%s:%s\"" % (os.path.basename(i), \
  151. base_get_metadata_git_branch(i, None).strip(), \
  152. base_get_metadata_git_revision(i, None)) \
  153. for i in layers]
  154. i = len(layers_branch_rev)-1
  155. p1 = layers_branch_rev[i].find("=")
  156. s1 = layers_branch_rev[i][p1:]
  157. while i > 0:
  158. p2 = layers_branch_rev[i-1].find("=")
  159. s2= layers_branch_rev[i-1][p2:]
  160. if s1 == s2:
  161. layers_branch_rev[i-1] = layers_branch_rev[i-1][0:p2]
  162. i -= 1
  163. else:
  164. i -= 1
  165. p1 = layers_branch_rev[i].find("=")
  166. s1= layers_branch_rev[i][p1:]
  167. return layers_branch_rev
  168. BUILDCFG_FUNCS ??= "buildcfg_vars get_layers_branch_rev buildcfg_neededvars"
  169. BUILDCFG_FUNCS[type] = "list"
  170. def buildcfg_vars(d):
  171. statusvars = oe.data.typed_value('BUILDCFG_VARS', d)
  172. for var in statusvars:
  173. value = d.getVar(var)
  174. if value is not None:
  175. yield '%-20s = "%s"' % (var, value)
  176. def buildcfg_neededvars(d):
  177. needed_vars = oe.data.typed_value("BUILDCFG_NEEDEDVARS", d)
  178. pesteruser = []
  179. for v in needed_vars:
  180. val = d.getVar(v)
  181. if not val or val == 'INVALID':
  182. pesteruser.append(v)
  183. if pesteruser:
  184. bb.fatal('The following variable(s) were not set: %s\nPlease set them directly, or choose a MACHINE or DISTRO that sets them.' % ', '.join(pesteruser))
  185. addhandler base_eventhandler
  186. base_eventhandler[eventmask] = "bb.event.ConfigParsed bb.event.MultiConfigParsed bb.event.BuildStarted bb.event.RecipePreFinalise bb.event.RecipeParsed"
  187. python base_eventhandler() {
  188. import bb.runqueue
  189. if isinstance(e, bb.event.ConfigParsed):
  190. if not d.getVar("NATIVELSBSTRING", False):
  191. d.setVar("NATIVELSBSTRING", lsb_distro_identifier(d))
  192. d.setVar('BB_VERSION', bb.__version__)
  193. # There might be no bb.event.ConfigParsed event if bitbake server is
  194. # running, so check bb.event.BuildStarted too to make sure ${HOSTTOOLS_DIR}
  195. # exists.
  196. if isinstance(e, bb.event.ConfigParsed) or \
  197. (isinstance(e, bb.event.BuildStarted) and not os.path.exists(d.getVar('HOSTTOOLS_DIR'))):
  198. # Works with the line in layer.conf which changes PATH to point here
  199. setup_hosttools_dir(d.getVar('HOSTTOOLS_DIR'), 'HOSTTOOLS', d)
  200. setup_hosttools_dir(d.getVar('HOSTTOOLS_DIR'), 'HOSTTOOLS_NONFATAL', d, fatal=False)
  201. if isinstance(e, bb.event.MultiConfigParsed):
  202. # We need to expand SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS in each of the multiconfig data stores
  203. # own contexts so the variables get expanded correctly for that arch, then inject back into
  204. # the main data store.
  205. deps = []
  206. for config in e.mcdata:
  207. deps.append(e.mcdata[config].getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS"))
  208. deps = " ".join(deps)
  209. e.mcdata[''].setVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS", deps)
  210. if isinstance(e, bb.event.BuildStarted):
  211. localdata = bb.data.createCopy(d)
  212. statuslines = []
  213. for func in oe.data.typed_value('BUILDCFG_FUNCS', localdata):
  214. g = globals()
  215. if func not in g:
  216. bb.warn("Build configuration function '%s' does not exist" % func)
  217. else:
  218. flines = g[func](localdata)
  219. if flines:
  220. statuslines.extend(flines)
  221. statusheader = d.getVar('BUILDCFG_HEADER')
  222. if statusheader:
  223. bb.plain('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines)))
  224. # This code is to silence warnings where the SDK variables overwrite the
  225. # target ones and we'd see dulpicate key names overwriting each other
  226. # for various PREFERRED_PROVIDERS
  227. if isinstance(e, bb.event.RecipePreFinalise):
  228. if d.getVar("TARGET_PREFIX") == d.getVar("SDK_PREFIX"):
  229. d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}binutils")
  230. d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}gcc")
  231. d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}g++")
  232. d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}compilerlibs")
  233. if isinstance(e, bb.event.RecipeParsed):
  234. #
  235. # If we have multiple providers of virtual/X and a PREFERRED_PROVIDER_virtual/X is set
  236. # skip parsing for all the other providers which will mean they get uninstalled from the
  237. # sysroot since they're now "unreachable". This makes switching virtual/kernel work in
  238. # particular.
  239. #
  240. pn = d.getVar('PN')
  241. source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False)
  242. if not source_mirror_fetch:
  243. provs = (d.getVar("PROVIDES") or "").split()
  244. multiwhitelist = (d.getVar("MULTI_PROVIDER_WHITELIST") or "").split()
  245. for p in provs:
  246. if p.startswith("virtual/") and p not in multiwhitelist:
  247. profprov = d.getVar("PREFERRED_PROVIDER_" + p)
  248. if profprov and pn != profprov:
  249. raise bb.parse.SkipRecipe("PREFERRED_PROVIDER_%s set to %s, not %s" % (p, profprov, pn))
  250. }
  251. CONFIGURESTAMPFILE = "${WORKDIR}/configure.sstate"
  252. CLEANBROKEN = "0"
  253. addtask configure after do_patch
  254. do_configure[dirs] = "${B}"
  255. base_do_configure() {
  256. if [ -n "${CONFIGURESTAMPFILE}" -a -e "${CONFIGURESTAMPFILE}" ]; then
  257. if [ "`cat ${CONFIGURESTAMPFILE}`" != "${BB_TASKHASH}" ]; then
  258. cd ${B}
  259. if [ "${CLEANBROKEN}" != "1" -a \( -e Makefile -o -e makefile -o -e GNUmakefile \) ]; then
  260. oe_runmake clean
  261. fi
  262. # -ignore_readdir_race does not work correctly with -delete;
  263. # use xargs to avoid spurious build failures
  264. find ${B} -ignore_readdir_race -name \*.la -type f -print0 | xargs -0 rm -f
  265. fi
  266. fi
  267. if [ -n "${CONFIGURESTAMPFILE}" ]; then
  268. mkdir -p `dirname ${CONFIGURESTAMPFILE}`
  269. echo ${BB_TASKHASH} > ${CONFIGURESTAMPFILE}
  270. fi
  271. }
  272. addtask compile after do_configure
  273. do_compile[dirs] = "${B}"
  274. base_do_compile() {
  275. if [ -e Makefile -o -e makefile -o -e GNUmakefile ]; then
  276. oe_runmake || die "make failed"
  277. else
  278. bbnote "nothing to compile"
  279. fi
  280. }
  281. addtask install after do_compile
  282. do_install[dirs] = "${B}"
  283. # Remove and re-create ${D} so that is it guaranteed to be empty
  284. do_install[cleandirs] = "${D}"
  285. base_do_install() {
  286. :
  287. }
  288. base_do_package() {
  289. :
  290. }
  291. addtask build after do_populate_sysroot
  292. do_build[noexec] = "1"
  293. do_build[recrdeptask] += "do_deploy"
  294. do_build () {
  295. :
  296. }
  297. def set_packagetriplet(d):
  298. archs = []
  299. tos = []
  300. tvs = []
  301. archs.append(d.getVar("PACKAGE_ARCHS").split())
  302. tos.append(d.getVar("TARGET_OS"))
  303. tvs.append(d.getVar("TARGET_VENDOR"))
  304. def settriplet(d, varname, archs, tos, tvs):
  305. triplets = []
  306. for i in range(len(archs)):
  307. for arch in archs[i]:
  308. triplets.append(arch + tvs[i] + "-" + tos[i])
  309. triplets.reverse()
  310. d.setVar(varname, " ".join(triplets))
  311. settriplet(d, "PKGTRIPLETS", archs, tos, tvs)
  312. variants = d.getVar("MULTILIB_VARIANTS") or ""
  313. for item in variants.split():
  314. localdata = bb.data.createCopy(d)
  315. overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item
  316. localdata.setVar("OVERRIDES", overrides)
  317. archs.append(localdata.getVar("PACKAGE_ARCHS").split())
  318. tos.append(localdata.getVar("TARGET_OS"))
  319. tvs.append(localdata.getVar("TARGET_VENDOR"))
  320. settriplet(d, "PKGMLTRIPLETS", archs, tos, tvs)
  321. python () {
  322. import string, re
  323. # Handle backfilling
  324. oe.utils.features_backfill("DISTRO_FEATURES", d)
  325. oe.utils.features_backfill("MACHINE_FEATURES", d)
  326. if d.getVar("WORKDIR") != d.getVar("S"):
  327. d.appendVar("PSEUDO_IGNORE_PATHS", ",${S}")
  328. if d.getVar("WORKDIR") != d.getVar("B"):
  329. d.appendVar("PSEUDO_IGNORE_PATHS", ",${B}")
  330. # Handle PACKAGECONFIG
  331. #
  332. # These take the form:
  333. #
  334. # PACKAGECONFIG ??= "<default options>"
  335. # PACKAGECONFIG[foo] = "--enable-foo,--disable-foo,foo_depends,foo_runtime_depends,foo_runtime_recommends,foo_conflict_packageconfig"
  336. pkgconfigflags = d.getVarFlags("PACKAGECONFIG") or {}
  337. if pkgconfigflags:
  338. pkgconfig = (d.getVar('PACKAGECONFIG') or "").split()
  339. pn = d.getVar("PN")
  340. mlprefix = d.getVar("MLPREFIX")
  341. def expandFilter(appends, extension, prefix):
  342. appends = bb.utils.explode_deps(d.expand(" ".join(appends)))
  343. newappends = []
  344. for a in appends:
  345. if a.endswith("-native") or ("-cross-" in a):
  346. newappends.append(a)
  347. elif a.startswith("virtual/"):
  348. subs = a.split("/", 1)[1]
  349. if subs.startswith(prefix):
  350. newappends.append(a + extension)
  351. else:
  352. newappends.append("virtual/" + prefix + subs + extension)
  353. else:
  354. if a.startswith(prefix):
  355. newappends.append(a + extension)
  356. else:
  357. newappends.append(prefix + a + extension)
  358. return newappends
  359. def appendVar(varname, appends):
  360. if not appends:
  361. return
  362. if varname.find("DEPENDS") != -1:
  363. if bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('cross-canadian', d) :
  364. appends = expandFilter(appends, "", "nativesdk-")
  365. elif bb.data.inherits_class('native', d):
  366. appends = expandFilter(appends, "-native", "")
  367. elif mlprefix:
  368. appends = expandFilter(appends, "", mlprefix)
  369. varname = d.expand(varname)
  370. d.appendVar(varname, " " + " ".join(appends))
  371. extradeps = []
  372. extrardeps = []
  373. extrarrecs = []
  374. extraconf = []
  375. for flag, flagval in sorted(pkgconfigflags.items()):
  376. items = flagval.split(",")
  377. num = len(items)
  378. if num > 6:
  379. bb.error("%s: PACKAGECONFIG[%s] Only enable,disable,depend,rdepend,rrecommend,conflict_packageconfig can be specified!"
  380. % (d.getVar('PN'), flag))
  381. if flag in pkgconfig:
  382. if num >= 3 and items[2]:
  383. extradeps.append(items[2])
  384. if num >= 4 and items[3]:
  385. extrardeps.append(items[3])
  386. if num >= 5 and items[4]:
  387. extrarrecs.append(items[4])
  388. if num >= 1 and items[0]:
  389. extraconf.append(items[0])
  390. elif num >= 2 and items[1]:
  391. extraconf.append(items[1])
  392. if num >= 6 and items[5]:
  393. conflicts = set(items[5].split())
  394. invalid = conflicts.difference(set(pkgconfigflags.keys()))
  395. if invalid:
  396. bb.error("%s: PACKAGECONFIG[%s] Invalid conflict package config%s '%s' specified."
  397. % (d.getVar('PN'), flag, 's' if len(invalid) > 1 else '', ' '.join(invalid)))
  398. if flag in pkgconfig:
  399. intersec = conflicts.intersection(set(pkgconfig))
  400. if intersec:
  401. bb.fatal("%s: PACKAGECONFIG[%s] Conflict package config%s '%s' set in PACKAGECONFIG."
  402. % (d.getVar('PN'), flag, 's' if len(intersec) > 1 else '', ' '.join(intersec)))
  403. appendVar('DEPENDS', extradeps)
  404. appendVar('RDEPENDS_${PN}', extrardeps)
  405. appendVar('RRECOMMENDS_${PN}', extrarrecs)
  406. appendVar('PACKAGECONFIG_CONFARGS', extraconf)
  407. pn = d.getVar('PN')
  408. license = d.getVar('LICENSE')
  409. if license == "INVALID" and pn != "defaultpkgname":
  410. bb.fatal('This recipe does not have the LICENSE field set (%s)' % pn)
  411. if bb.data.inherits_class('license', d):
  412. check_license_format(d)
  413. unmatched_license_flags = check_license_flags(d)
  414. if unmatched_license_flags:
  415. if len(unmatched_license_flags) == 1:
  416. message = "because it has a restricted license '{0}'. Which is not whitelisted in LICENSE_FLAGS_WHITELIST".format(unmatched_license_flags[0])
  417. else:
  418. message = "because it has restricted licenses {0}. Which are not whitelisted in LICENSE_FLAGS_WHITELIST".format(
  419. ", ".join("'{0}'".format(f) for f in unmatched_license_flags))
  420. bb.debug(1, "Skipping %s %s" % (pn, message))
  421. raise bb.parse.SkipRecipe(message)
  422. # If we're building a target package we need to use fakeroot (pseudo)
  423. # in order to capture permissions, owners, groups and special files
  424. if not bb.data.inherits_class('native', d) and not bb.data.inherits_class('cross', d):
  425. d.appendVarFlag('do_prepare_recipe_sysroot', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
  426. d.setVarFlag('do_unpack', 'umask', '022')
  427. d.setVarFlag('do_configure', 'umask', '022')
  428. d.setVarFlag('do_compile', 'umask', '022')
  429. d.appendVarFlag('do_install', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
  430. d.setVarFlag('do_install', 'fakeroot', '1')
  431. d.setVarFlag('do_install', 'umask', '022')
  432. d.appendVarFlag('do_package', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
  433. d.setVarFlag('do_package', 'fakeroot', '1')
  434. d.setVarFlag('do_package', 'umask', '022')
  435. d.setVarFlag('do_package_setscene', 'fakeroot', '1')
  436. d.appendVarFlag('do_package_setscene', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
  437. d.setVarFlag('do_devshell', 'fakeroot', '1')
  438. d.appendVarFlag('do_devshell', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
  439. need_machine = d.getVar('COMPATIBLE_MACHINE')
  440. if need_machine and not d.getVar('PARSE_ALL_RECIPES', False):
  441. import re
  442. compat_machines = (d.getVar('MACHINEOVERRIDES') or "").split(":")
  443. for m in compat_machines:
  444. if re.match(need_machine, m):
  445. break
  446. else:
  447. raise bb.parse.SkipRecipe("incompatible with machine %s (not in COMPATIBLE_MACHINE)" % d.getVar('MACHINE'))
  448. source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False) or d.getVar('PARSE_ALL_RECIPES', False)
  449. if not source_mirror_fetch:
  450. need_host = d.getVar('COMPATIBLE_HOST')
  451. if need_host:
  452. import re
  453. this_host = d.getVar('HOST_SYS')
  454. if not re.match(need_host, this_host):
  455. raise bb.parse.SkipRecipe("incompatible with host %s (not in COMPATIBLE_HOST)" % this_host)
  456. bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE') or "").split()
  457. check_license = False if pn.startswith("nativesdk-") else True
  458. for t in ["-native", "-cross-${TARGET_ARCH}", "-cross-initial-${TARGET_ARCH}",
  459. "-crosssdk-${SDK_SYS}", "-crosssdk-initial-${SDK_SYS}",
  460. "-cross-canadian-${TRANSLATED_TARGET_ARCH}"]:
  461. if pn.endswith(d.expand(t)):
  462. check_license = False
  463. if pn.startswith("gcc-source-"):
  464. check_license = False
  465. if check_license and bad_licenses:
  466. bad_licenses = expand_wildcard_licenses(d, bad_licenses)
  467. whitelist = []
  468. for lic in bad_licenses:
  469. spdx_license = return_spdx(d, lic)
  470. whitelist.extend((d.getVar("WHITELIST_" + lic) or "").split())
  471. if spdx_license:
  472. whitelist.extend((d.getVar("WHITELIST_" + spdx_license) or "").split())
  473. if pn in whitelist:
  474. '''
  475. We need to track what we are whitelisting and why. If pn is
  476. incompatible we need to be able to note that the image that
  477. is created may infact contain incompatible licenses despite
  478. INCOMPATIBLE_LICENSE being set.
  479. '''
  480. bb.note("Including %s as buildable despite it having an incompatible license because it has been whitelisted" % pn)
  481. else:
  482. pkgs = d.getVar('PACKAGES').split()
  483. skipped_pkgs = {}
  484. unskipped_pkgs = []
  485. for pkg in pkgs:
  486. incompatible_lic = incompatible_license(d, bad_licenses, pkg)
  487. if incompatible_lic:
  488. skipped_pkgs[pkg] = incompatible_lic
  489. else:
  490. unskipped_pkgs.append(pkg)
  491. if unskipped_pkgs:
  492. for pkg in skipped_pkgs:
  493. bb.debug(1, "Skipping the package %s at do_rootfs because of incompatible license(s): %s" % (pkg, ' '.join(skipped_pkgs[pkg])))
  494. d.setVar('LICENSE_EXCLUSION-' + pkg, ' '.join(skipped_pkgs[pkg]))
  495. for pkg in unskipped_pkgs:
  496. bb.debug(1, "Including the package %s" % pkg)
  497. else:
  498. incompatible_lic = incompatible_license(d, bad_licenses)
  499. for pkg in skipped_pkgs:
  500. incompatible_lic += skipped_pkgs[pkg]
  501. incompatible_lic = sorted(list(set(incompatible_lic)))
  502. if incompatible_lic:
  503. bb.debug(1, "Skipping recipe %s because of incompatible license(s): %s" % (pn, ' '.join(incompatible_lic)))
  504. raise bb.parse.SkipRecipe("it has incompatible license(s): %s" % ' '.join(incompatible_lic))
  505. needsrcrev = False
  506. srcuri = d.getVar('SRC_URI')
  507. for uri in srcuri.split():
  508. (scheme, _ , path) = bb.fetch.decodeurl(uri)[:3]
  509. # HTTP/FTP use the wget fetcher
  510. if scheme in ("http", "https", "ftp"):
  511. d.appendVarFlag('do_fetch', 'depends', ' wget-native:do_populate_sysroot')
  512. # Svn packages should DEPEND on subversion-native
  513. if scheme == "svn":
  514. needsrcrev = True
  515. d.appendVarFlag('do_fetch', 'depends', ' subversion-native:do_populate_sysroot')
  516. # Git packages should DEPEND on git-native
  517. elif scheme in ("git", "gitsm"):
  518. needsrcrev = True
  519. d.appendVarFlag('do_fetch', 'depends', ' git-native:do_populate_sysroot')
  520. # Mercurial packages should DEPEND on mercurial-native
  521. elif scheme == "hg":
  522. needsrcrev = True
  523. d.appendVar("EXTRANATIVEPATH", ' python3-native ')
  524. d.appendVarFlag('do_fetch', 'depends', ' mercurial-native:do_populate_sysroot')
  525. # Perforce packages support SRCREV = "${AUTOREV}"
  526. elif scheme == "p4":
  527. needsrcrev = True
  528. # OSC packages should DEPEND on osc-native
  529. elif scheme == "osc":
  530. d.appendVarFlag('do_fetch', 'depends', ' osc-native:do_populate_sysroot')
  531. elif scheme == "npm":
  532. d.appendVarFlag('do_fetch', 'depends', ' nodejs-native:do_populate_sysroot')
  533. # *.lz4 should DEPEND on lz4-native for unpacking
  534. if path.endswith('.lz4'):
  535. d.appendVarFlag('do_unpack', 'depends', ' lz4-native:do_populate_sysroot')
  536. # *.lz should DEPEND on lzip-native for unpacking
  537. elif path.endswith('.lz'):
  538. d.appendVarFlag('do_unpack', 'depends', ' lzip-native:do_populate_sysroot')
  539. # *.xz should DEPEND on xz-native for unpacking
  540. elif path.endswith('.xz') or path.endswith('.txz'):
  541. d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot')
  542. # .zip should DEPEND on unzip-native for unpacking
  543. elif path.endswith('.zip') or path.endswith('.jar'):
  544. d.appendVarFlag('do_unpack', 'depends', ' unzip-native:do_populate_sysroot')
  545. # Some rpm files may be compressed internally using xz (for example, rpms from Fedora)
  546. elif path.endswith('.rpm'):
  547. d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot')
  548. # *.deb should DEPEND on xz-native for unpacking
  549. elif path.endswith('.deb'):
  550. d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot')
  551. if needsrcrev:
  552. d.setVar("SRCPV", "${@bb.fetch2.get_srcrev(d)}")
  553. # Gather all named SRCREVs to add to the sstate hash calculation
  554. # This anonymous python snippet is called multiple times so we
  555. # need to be careful to not double up the appends here and cause
  556. # the base hash to mismatch the task hash
  557. for uri in srcuri.split():
  558. parm = bb.fetch.decodeurl(uri)[5]
  559. uri_names = parm.get("name", "").split(",")
  560. for uri_name in filter(None, uri_names):
  561. srcrev_name = "SRCREV_{}".format(uri_name)
  562. if srcrev_name not in (d.getVarFlag("do_fetch", "vardeps") or "").split():
  563. d.appendVarFlag("do_fetch", "vardeps", " {}".format(srcrev_name))
  564. set_packagetriplet(d)
  565. # 'multimachine' handling
  566. mach_arch = d.getVar('MACHINE_ARCH')
  567. pkg_arch = d.getVar('PACKAGE_ARCH')
  568. if (pkg_arch == mach_arch):
  569. # Already machine specific - nothing further to do
  570. return
  571. #
  572. # We always try to scan SRC_URI for urls with machine overrides
  573. # unless the package sets SRC_URI_OVERRIDES_PACKAGE_ARCH=0
  574. #
  575. override = d.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH')
  576. if override != '0':
  577. paths = []
  578. fpaths = (d.getVar('FILESPATH') or '').split(':')
  579. machine = d.getVar('MACHINE')
  580. for p in fpaths:
  581. if os.path.basename(p) == machine and os.path.isdir(p):
  582. paths.append(p)
  583. if len(paths) != 0:
  584. for s in srcuri.split():
  585. if not s.startswith("file://"):
  586. continue
  587. fetcher = bb.fetch2.Fetch([s], d)
  588. local = fetcher.localpath(s)
  589. for mp in paths:
  590. if local.startswith(mp):
  591. #bb.note("overriding PACKAGE_ARCH from %s to %s for %s" % (pkg_arch, mach_arch, pn))
  592. d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}")
  593. return
  594. packages = d.getVar('PACKAGES').split()
  595. for pkg in packages:
  596. pkgarch = d.getVar("PACKAGE_ARCH_%s" % pkg)
  597. # We could look for != PACKAGE_ARCH here but how to choose
  598. # if multiple differences are present?
  599. # Look through PACKAGE_ARCHS for the priority order?
  600. if pkgarch and pkgarch == mach_arch:
  601. d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}")
  602. bb.warn("Recipe %s is marked as only being architecture specific but seems to have machine specific packages?! The recipe may as well mark itself as machine specific directly." % d.getVar("PN"))
  603. }
  604. addtask cleansstate after do_clean
  605. python do_cleansstate() {
  606. sstate_clean_cachefiles(d)
  607. }
  608. addtask cleanall after do_cleansstate
  609. do_cleansstate[nostamp] = "1"
  610. python do_cleanall() {
  611. src_uri = (d.getVar('SRC_URI') or "").split()
  612. if len(src_uri) == 0:
  613. return
  614. try:
  615. fetcher = bb.fetch2.Fetch(src_uri, d)
  616. fetcher.clean()
  617. except bb.fetch2.BBFetchException as e:
  618. bb.fatal(str(e))
  619. }
  620. do_cleanall[nostamp] = "1"
  621. EXPORT_FUNCTIONS do_fetch do_unpack do_configure do_compile do_install do_package