123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023 |
- #
- # Records history of build output in order to detect regressions
- #
- # Based in part on testlab.bbclass and packagehistory.bbclass
- #
- # Copyright (C) 2011-2016 Intel Corporation
- # Copyright (C) 2007-2011 Koen Kooi <koen@openembedded.org>
- #
- # SPDX-License-Identifier: MIT
- #
- IMAGE_CLASSES += "image-artifact-names"
- BUILDHISTORY_FEATURES ?= "image package sdk"
- BUILDHISTORY_DIR ?= "${TOPDIR}/buildhistory"
- BUILDHISTORY_DIR_IMAGE = "${BUILDHISTORY_DIR}/images/${MACHINE_ARCH}/${TCLIBC}/${IMAGE_BASENAME}"
- BUILDHISTORY_DIR_PACKAGE = "${BUILDHISTORY_DIR}/packages/${MULTIMACH_TARGET_SYS}/${PN}"
- # Setting this to non-empty will remove the old content of the buildhistory as part of
- # the current bitbake invocation and replace it with information about what was built
- # during the build.
- #
- # This is meant to be used in continuous integration (CI) systems when invoking bitbake
- # for full world builds. The effect in that case is that information about packages
- # that no longer get build also gets removed from the buildhistory, which is not
- # the case otherwise.
- #
- # The advantage over manually cleaning the buildhistory outside of bitbake is that
- # the "version-going-backwards" check still works. When relying on that, be careful
- # about failed world builds: they will lead to incomplete information in the
- # buildhistory because information about packages that could not be built will
- # also get removed. A CI system should handle that by discarding the buildhistory
- # of failed builds.
- #
- # The expected usage is via auto.conf, but passing via the command line also works
- # with: BB_ENV_PASSTHROUGH_ADDITIONS=BUILDHISTORY_RESET BUILDHISTORY_RESET=1
- BUILDHISTORY_RESET ?= ""
- BUILDHISTORY_OLD_DIR = "${BUILDHISTORY_DIR}/${@ "old" if "${BUILDHISTORY_RESET}" else ""}"
- BUILDHISTORY_OLD_DIR_PACKAGE = "${BUILDHISTORY_OLD_DIR}/packages/${MULTIMACH_TARGET_SYS}/${PN}"
- BUILDHISTORY_DIR_SDK = "${BUILDHISTORY_DIR}/sdk/${SDK_NAME}${SDK_EXT}/${IMAGE_BASENAME}"
- BUILDHISTORY_IMAGE_FILES ?= "/etc/passwd /etc/group"
- BUILDHISTORY_SDK_FILES ?= "conf/local.conf conf/bblayers.conf conf/auto.conf conf/locked-sigs.inc conf/devtool.conf"
- BUILDHISTORY_COMMIT ?= "1"
- BUILDHISTORY_COMMIT_AUTHOR ?= "buildhistory <buildhistory@${DISTRO}>"
- BUILDHISTORY_PUSH_REPO ?= ""
- BUILDHISTORY_TAG ?= "build"
- BUILDHISTORY_PATH_PREFIX_STRIP ?= ""
- # We want to avoid influencing the signatures of the task so use vardepsexclude
- do_populate_sysroot[postfuncs] += "buildhistory_emit_sysroot"
- do_populate_sysroot_setscene[postfuncs] += "buildhistory_emit_sysroot"
- do_populate_sysroot[vardepsexclude] += "buildhistory_emit_sysroot"
- do_package[postfuncs] += "buildhistory_list_pkg_files"
- do_package_setscene[postfuncs] += "buildhistory_list_pkg_files"
- do_package[vardepsexclude] += "buildhistory_list_pkg_files"
- do_packagedata[postfuncs] += "buildhistory_emit_pkghistory"
- do_packagedata_setscene[postfuncs] += "buildhistory_emit_pkghistory"
- do_packagedata[vardepsexclude] += "buildhistory_emit_pkghistory"
- # Similarly for our function that gets the output signatures
- SSTATEPOSTUNPACKFUNCS:append = " buildhistory_emit_outputsigs"
- sstate_installpkgdir[vardepsexclude] += "buildhistory_emit_outputsigs"
- SSTATEPOSTUNPACKFUNCS[vardepvalueexclude] .= "| buildhistory_emit_outputsigs"
- # All items except those listed here will be removed from a recipe's
- # build history directory by buildhistory_emit_pkghistory(). This is
- # necessary because some of these items (package directories, files that
- # we no longer emit) might be obsolete.
- #
- # When extending build history, derive your class from buildhistory.bbclass
- # and extend this list here with the additional files created by the derived
- # class.
- BUILDHISTORY_PRESERVE = "latest latest_srcrev sysroot"
- PATCH_GIT_USER_EMAIL ?= "buildhistory@oe"
- PATCH_GIT_USER_NAME ?= "OpenEmbedded"
- #
- # Write out the contents of the sysroot
- #
- buildhistory_emit_sysroot() {
- mkdir --parents ${BUILDHISTORY_DIR_PACKAGE}
- case ${CLASSOVERRIDE} in
- class-native|class-cross|class-crosssdk)
- BASE=${SYSROOT_DESTDIR}/${STAGING_DIR_NATIVE}
- ;;
- *)
- BASE=${SYSROOT_DESTDIR}
- ;;
- esac
- buildhistory_list_files_no_owners $BASE ${BUILDHISTORY_DIR_PACKAGE}/sysroot
- }
- #
- # Write out metadata about this package for comparison when writing future packages
- #
- python buildhistory_emit_pkghistory() {
- import re
- import json
- import shlex
- import errno
- import shutil
- if not "package" in (d.getVar('BUILDHISTORY_FEATURES') or "").split():
- return 0
- pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE')
- oldpkghistdir = d.getVar('BUILDHISTORY_OLD_DIR_PACKAGE')
- class RecipeInfo:
- def __init__(self, name):
- self.name = name
- self.pe = "0"
- self.pv = "0"
- self.pr = "r0"
- self.depends = ""
- self.packages = ""
- self.srcrev = ""
- self.layer = ""
- self.license = ""
- self.config = ""
- self.src_uri = ""
- class PackageInfo:
- def __init__(self, name):
- self.name = name
- self.pe = "0"
- self.pv = "0"
- self.pr = "r0"
- # pkg/pkge/pkgv/pkgr should be empty because we want to be able to default them
- self.pkg = ""
- self.pkge = ""
- self.pkgv = ""
- self.pkgr = ""
- self.size = 0
- self.depends = ""
- self.rprovides = ""
- self.rdepends = ""
- self.rrecommends = ""
- self.rsuggests = ""
- self.rreplaces = ""
- self.rconflicts = ""
- self.files = ""
- self.filelist = ""
- # Variables that need to be written to their own separate file
- self.filevars = dict.fromkeys(['pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm'])
- # Should check PACKAGES here to see if anything was removed
- def readPackageInfo(pkg, histfile):
- pkginfo = PackageInfo(pkg)
- with open(histfile, "r") as f:
- for line in f:
- lns = line.split('=', 1)
- name = lns[0].strip()
- value = lns[1].strip(" \t\r\n").strip('"')
- if name == "PE":
- pkginfo.pe = value
- elif name == "PV":
- pkginfo.pv = value
- elif name == "PR":
- pkginfo.pr = value
- elif name == "PKG":
- pkginfo.pkg = value
- elif name == "PKGE":
- pkginfo.pkge = value
- elif name == "PKGV":
- pkginfo.pkgv = value
- elif name == "PKGR":
- pkginfo.pkgr = value
- elif name == "RPROVIDES":
- pkginfo.rprovides = value
- elif name == "RDEPENDS":
- pkginfo.rdepends = value
- elif name == "RRECOMMENDS":
- pkginfo.rrecommends = value
- elif name == "RSUGGESTS":
- pkginfo.rsuggests = value
- elif name == "RREPLACES":
- pkginfo.rreplaces = value
- elif name == "RCONFLICTS":
- pkginfo.rconflicts = value
- elif name == "PKGSIZE":
- pkginfo.size = int(value)
- elif name == "FILES":
- pkginfo.files = value
- elif name == "FILELIST":
- pkginfo.filelist = value
- # Apply defaults
- if not pkginfo.pkg:
- pkginfo.pkg = pkginfo.name
- if not pkginfo.pkge:
- pkginfo.pkge = pkginfo.pe
- if not pkginfo.pkgv:
- pkginfo.pkgv = pkginfo.pv
- if not pkginfo.pkgr:
- pkginfo.pkgr = pkginfo.pr
- return pkginfo
- def getlastpkgversion(pkg):
- try:
- histfile = os.path.join(oldpkghistdir, pkg, "latest")
- return readPackageInfo(pkg, histfile)
- except EnvironmentError:
- return None
- def sortpkglist(string):
- pkgiter = re.finditer(r'[a-zA-Z0-9.+-]+( \([><=]+[^)]+\))?', string, 0)
- pkglist = [p.group(0) for p in pkgiter]
- pkglist.sort()
- return ' '.join(pkglist)
- def sortlist(string):
- items = string.split(' ')
- items.sort()
- return ' '.join(items)
- def preservebuildhistoryfiles(pkg, preserve):
- if os.path.exists(os.path.join(oldpkghistdir, pkg)):
- listofobjs = os.listdir(os.path.join(oldpkghistdir, pkg))
- for obj in listofobjs:
- if obj not in preserve:
- continue
- try:
- bb.utils.mkdirhier(os.path.join(pkghistdir, pkg))
- shutil.copyfile(os.path.join(oldpkghistdir, pkg, obj), os.path.join(pkghistdir, pkg, obj))
- except IOError as e:
- bb.note("Unable to copy file. %s" % e)
- except EnvironmentError as e:
- bb.note("Unable to copy file. %s" % e)
- pn = d.getVar('PN')
- pe = d.getVar('PE') or "0"
- pv = d.getVar('PV')
- pr = d.getVar('PR')
- layer = bb.utils.get_file_layer(d.getVar('FILE'), d)
- license = d.getVar('LICENSE')
- pkgdata_dir = d.getVar('PKGDATA_DIR')
- packages = ""
- try:
- with open(os.path.join(pkgdata_dir, pn)) as f:
- for line in f.readlines():
- if line.startswith('PACKAGES: '):
- packages = oe.utils.squashspaces(line.split(': ', 1)[1])
- break
- except IOError as e:
- if e.errno == errno.ENOENT:
- # Probably a -cross recipe, just ignore
- return 0
- else:
- raise
- packagelist = packages.split()
- preserve = d.getVar('BUILDHISTORY_PRESERVE').split()
- if not os.path.exists(pkghistdir):
- bb.utils.mkdirhier(pkghistdir)
- else:
- # We need to make sure that all files kept in
- # buildhistory/old are restored successfully
- # otherwise next block of code wont have files to
- # check and purge
- if d.getVar("BUILDHISTORY_RESET"):
- for pkg in packagelist:
- preservebuildhistoryfiles(pkg, preserve)
- # Remove files for packages that no longer exist
- for item in os.listdir(pkghistdir):
- if item not in preserve:
- if item not in packagelist:
- itempath = os.path.join(pkghistdir, item)
- if os.path.isdir(itempath):
- for subfile in os.listdir(itempath):
- os.unlink(os.path.join(itempath, subfile))
- os.rmdir(itempath)
- else:
- os.unlink(itempath)
- rcpinfo = RecipeInfo(pn)
- rcpinfo.pe = pe
- rcpinfo.pv = pv
- rcpinfo.pr = pr
- rcpinfo.depends = sortlist(oe.utils.squashspaces(d.getVar('DEPENDS') or ""))
- rcpinfo.packages = packages
- rcpinfo.layer = layer
- rcpinfo.license = license
- rcpinfo.config = sortlist(oe.utils.squashspaces(d.getVar('PACKAGECONFIG') or ""))
- rcpinfo.src_uri = oe.utils.squashspaces(d.getVar('SRC_URI') or "")
- write_recipehistory(rcpinfo, d)
- bb.build.exec_func("read_subpackage_metadata", d)
- for pkg in packagelist:
- localdata = d.createCopy()
- localdata.setVar('OVERRIDES', d.getVar("OVERRIDES", False) + ":" + pkg)
- pkge = localdata.getVar("PKGE") or '0'
- pkgv = localdata.getVar("PKGV")
- pkgr = localdata.getVar("PKGR")
- #
- # Find out what the last version was
- # Make sure the version did not decrease
- #
- lastversion = getlastpkgversion(pkg)
- if lastversion:
- last_pkge = lastversion.pkge
- last_pkgv = lastversion.pkgv
- last_pkgr = lastversion.pkgr
- r = bb.utils.vercmp((pkge, pkgv, pkgr), (last_pkge, last_pkgv, last_pkgr))
- if r < 0:
- msg = "Package version for package %s went backwards which would break package feeds (from %s:%s-%s to %s:%s-%s)" % (pkg, last_pkge, last_pkgv, last_pkgr, pkge, pkgv, pkgr)
- oe.qa.handle_error("version-going-backwards", msg, d)
- pkginfo = PackageInfo(pkg)
- # Apparently the version can be different on a per-package basis (see Python)
- pkginfo.pe = localdata.getVar("PE") or '0'
- pkginfo.pv = localdata.getVar("PV")
- pkginfo.pr = localdata.getVar("PR")
- pkginfo.pkg = localdata.getVar("PKG")
- pkginfo.pkge = pkge
- pkginfo.pkgv = pkgv
- pkginfo.pkgr = pkgr
- pkginfo.rprovides = sortpkglist(oe.utils.squashspaces(localdata.getVar("RPROVIDES") or ""))
- pkginfo.rdepends = sortpkglist(oe.utils.squashspaces(localdata.getVar("RDEPENDS") or ""))
- pkginfo.rrecommends = sortpkglist(oe.utils.squashspaces(localdata.getVar("RRECOMMENDS") or ""))
- pkginfo.rsuggests = sortpkglist(oe.utils.squashspaces(localdata.getVar("RSUGGESTS") or ""))
- pkginfo.replaces = sortpkglist(oe.utils.squashspaces(localdata.getVar("RREPLACES") or ""))
- pkginfo.rconflicts = sortpkglist(oe.utils.squashspaces(localdata.getVar("RCONFLICTS") or ""))
- pkginfo.files = oe.utils.squashspaces(localdata.getVar("FILES") or "")
- for filevar in pkginfo.filevars:
- pkginfo.filevars[filevar] = localdata.getVar(filevar) or ""
- # Gather information about packaged files
- val = localdata.getVar('FILES_INFO') or ''
- dictval = json.loads(val)
- filelist = list(dictval.keys())
- filelist.sort()
- pkginfo.filelist = " ".join([shlex.quote(x) for x in filelist])
- pkginfo.size = int(localdata.getVar('PKGSIZE') or '0')
- write_pkghistory(pkginfo, d)
- oe.qa.exit_if_errors(d)
- }
- python buildhistory_emit_outputsigs() {
- if not "task" in (d.getVar('BUILDHISTORY_FEATURES') or "").split():
- return
- import hashlib
- taskoutdir = os.path.join(d.getVar('BUILDHISTORY_DIR'), 'task', 'output')
- bb.utils.mkdirhier(taskoutdir)
- currenttask = d.getVar('BB_CURRENTTASK')
- pn = d.getVar('PN')
- taskfile = os.path.join(taskoutdir, '%s.%s' % (pn, currenttask))
- cwd = os.getcwd()
- filesigs = {}
- for root, _, files in os.walk(cwd):
- for fname in files:
- if fname == 'fixmepath':
- continue
- fullpath = os.path.join(root, fname)
- try:
- if os.path.islink(fullpath):
- sha256 = hashlib.sha256(os.readlink(fullpath).encode('utf-8')).hexdigest()
- elif os.path.isfile(fullpath):
- sha256 = bb.utils.sha256_file(fullpath)
- else:
- continue
- except OSError:
- bb.warn('buildhistory: unable to read %s to get output signature' % fullpath)
- continue
- filesigs[os.path.relpath(fullpath, cwd)] = sha256
- with open(taskfile, 'w') as f:
- for fpath, fsig in sorted(filesigs.items(), key=lambda item: item[0]):
- f.write('%s %s\n' % (fpath, fsig))
- }
- def write_recipehistory(rcpinfo, d):
- bb.debug(2, "Writing recipe history")
- pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE')
- infofile = os.path.join(pkghistdir, "latest")
- with open(infofile, "w") as f:
- if rcpinfo.pe != "0":
- f.write(u"PE = %s\n" % rcpinfo.pe)
- f.write(u"PV = %s\n" % rcpinfo.pv)
- f.write(u"PR = %s\n" % rcpinfo.pr)
- f.write(u"DEPENDS = %s\n" % rcpinfo.depends)
- f.write(u"PACKAGES = %s\n" % rcpinfo.packages)
- f.write(u"LAYER = %s\n" % rcpinfo.layer)
- f.write(u"LICENSE = %s\n" % rcpinfo.license)
- f.write(u"CONFIG = %s\n" % rcpinfo.config)
- f.write(u"SRC_URI = %s\n" % rcpinfo.src_uri)
- write_latest_srcrev(d, pkghistdir)
- def write_pkghistory(pkginfo, d):
- bb.debug(2, "Writing package history for package %s" % pkginfo.name)
- pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE')
- pkgpath = os.path.join(pkghistdir, pkginfo.name)
- if not os.path.exists(pkgpath):
- bb.utils.mkdirhier(pkgpath)
- infofile = os.path.join(pkgpath, "latest")
- with open(infofile, "w") as f:
- if pkginfo.pe != "0":
- f.write(u"PE = %s\n" % pkginfo.pe)
- f.write(u"PV = %s\n" % pkginfo.pv)
- f.write(u"PR = %s\n" % pkginfo.pr)
- if pkginfo.pkg != pkginfo.name:
- f.write(u"PKG = %s\n" % pkginfo.pkg)
- if pkginfo.pkge != pkginfo.pe:
- f.write(u"PKGE = %s\n" % pkginfo.pkge)
- if pkginfo.pkgv != pkginfo.pv:
- f.write(u"PKGV = %s\n" % pkginfo.pkgv)
- if pkginfo.pkgr != pkginfo.pr:
- f.write(u"PKGR = %s\n" % pkginfo.pkgr)
- f.write(u"RPROVIDES = %s\n" % pkginfo.rprovides)
- f.write(u"RDEPENDS = %s\n" % pkginfo.rdepends)
- f.write(u"RRECOMMENDS = %s\n" % pkginfo.rrecommends)
- if pkginfo.rsuggests:
- f.write(u"RSUGGESTS = %s\n" % pkginfo.rsuggests)
- if pkginfo.rreplaces:
- f.write(u"RREPLACES = %s\n" % pkginfo.rreplaces)
- if pkginfo.rconflicts:
- f.write(u"RCONFLICTS = %s\n" % pkginfo.rconflicts)
- f.write(u"PKGSIZE = %d\n" % pkginfo.size)
- f.write(u"FILES = %s\n" % pkginfo.files)
- f.write(u"FILELIST = %s\n" % pkginfo.filelist)
- for filevar in pkginfo.filevars:
- filevarpath = os.path.join(pkgpath, "latest.%s" % filevar)
- val = pkginfo.filevars[filevar]
- if val:
- with open(filevarpath, "w") as f:
- f.write(val)
- else:
- if os.path.exists(filevarpath):
- os.unlink(filevarpath)
- #
- # rootfs_type can be: image, sdk_target, sdk_host
- #
- def buildhistory_list_installed(d, rootfs_type="image"):
- from oe.rootfs import image_list_installed_packages
- from oe.sdk import sdk_list_installed_packages
- from oe.utils import format_pkg_list
- process_list = [('file', 'bh_installed_pkgs_%s.txt' % os.getpid()),\
- ('deps', 'bh_installed_pkgs_deps_%s.txt' % os.getpid())]
- if rootfs_type == "image":
- pkgs = image_list_installed_packages(d)
- else:
- pkgs = sdk_list_installed_packages(d, rootfs_type == "sdk_target")
- if rootfs_type == "sdk_host":
- pkgdata_dir = d.getVar('PKGDATA_DIR_SDK')
- else:
- pkgdata_dir = d.getVar('PKGDATA_DIR')
- for output_type, output_file in process_list:
- output_file_full = os.path.join(d.getVar('WORKDIR'), output_file)
- with open(output_file_full, 'w') as output:
- output.write(format_pkg_list(pkgs, output_type, pkgdata_dir))
- python buildhistory_list_installed_image() {
- buildhistory_list_installed(d)
- }
- python buildhistory_list_installed_sdk_target() {
- buildhistory_list_installed(d, "sdk_target")
- }
- python buildhistory_list_installed_sdk_host() {
- buildhistory_list_installed(d, "sdk_host")
- }
- buildhistory_get_installed() {
- mkdir -p $1
- # Get list of installed packages
- pkgcache="$1/installed-packages.tmp"
- cat ${WORKDIR}/bh_installed_pkgs_${PID}.txt | sort > $pkgcache && rm ${WORKDIR}/bh_installed_pkgs_${PID}.txt
- cat $pkgcache | awk '{ print $1 }' > $1/installed-package-names.txt
- if [ -s $pkgcache ] ; then
- cat $pkgcache | awk '{ print $2 }' | xargs -n1 basename > $1/installed-packages.txt
- else
- printf "" > $1/installed-packages.txt
- fi
- # Produce dependency graph
- # First, quote each name to handle characters that cause issues for dot
- sed 's:\([^| ]*\):"\1":g' ${WORKDIR}/bh_installed_pkgs_deps_${PID}.txt > $1/depends.tmp &&
- rm ${WORKDIR}/bh_installed_pkgs_deps_${PID}.txt
- # Remove lines with rpmlib(...) and config(...) dependencies, change the
- # delimiter from pipe to "->", set the style for recommend lines and
- # turn versioned dependencies into edge labels.
- sed -i -e '/rpmlib(/d' \
- -e '/config(/d' \
- -e 's:|: -> :' \
- -e 's:"\[REC\]":[style=dotted]:' \
- -e 's:"\([<>=]\+\)" "\([^"]*\)":[label="\1 \2"]:' \
- -e 's:"\([*]\+\)" "\([^"]*\)":[label="\2"]:' \
- -e 's:"\[RPROVIDES\]":[style=dashed]:' \
- $1/depends.tmp
- # Add header, sorted and de-duped contents and footer and then delete the temp file
- printf "digraph depends {\n node [shape=plaintext]\n" > $1/depends.dot
- cat $1/depends.tmp | sort -u >> $1/depends.dot
- echo "}" >> $1/depends.dot
- rm $1/depends.tmp
- # Set correct pkgdatadir
- pkgdatadir=${PKGDATA_DIR}
- if [ "$2" = "sdk" ] && [ "$3" = "host" ] ; then
- pkgdatadir="${PKGDATA_DIR_SDK}"
- fi
- # Produce installed package sizes list
- oe-pkgdata-util -p $pkgdatadir read-value "PKGSIZE" -n -f $pkgcache > $1/installed-package-sizes.tmp
- cat $1/installed-package-sizes.tmp | awk '{print $2 "\tKiB\t" $1}' | sort -n -r > $1/installed-package-sizes.txt
- rm $1/installed-package-sizes.tmp
- # Produce package info: runtime_name, buildtime_name, recipe, version, size
- oe-pkgdata-util -p $pkgdatadir read-value "PACKAGE,PN,PV,PKGSIZE" -n -f $pkgcache > $1/installed-package-info.tmp
- cat $1/installed-package-info.tmp | sort -n -r -k 5 > $1/installed-package-info.txt
- rm $1/installed-package-info.tmp
- # We're now done with the cache, delete it
- rm $pkgcache
- if [ "$2" != "sdk" ] ; then
- # Produce some cut-down graphs (for readability)
- grep -v kernel-image $1/depends.dot | grep -v kernel-3 | grep -v kernel-4 > $1/depends-nokernel.dot
- grep -v libc6 $1/depends-nokernel.dot | grep -v libgcc > $1/depends-nokernel-nolibc.dot
- grep -v update- $1/depends-nokernel-nolibc.dot > $1/depends-nokernel-nolibc-noupdate.dot
- grep -v kernel-module $1/depends-nokernel-nolibc-noupdate.dot > $1/depends-nokernel-nolibc-noupdate-nomodules.dot
- fi
- # Add complementary package information
- if [ -e ${WORKDIR}/complementary_pkgs.txt ]; then
- cp ${WORKDIR}/complementary_pkgs.txt $1
- fi
- }
- buildhistory_get_image_installed() {
- # Anything requiring the use of the packaging system should be done in here
- # in case the packaging files are going to be removed for this image
- if [ "${@bb.utils.contains('BUILDHISTORY_FEATURES', 'image', '1', '0', d)}" = "0" ] ; then
- return
- fi
- buildhistory_get_installed ${BUILDHISTORY_DIR_IMAGE}
- }
- buildhistory_get_sdk_installed() {
- # Anything requiring the use of the packaging system should be done in here
- # in case the packaging files are going to be removed for this SDK
- if [ "${@bb.utils.contains('BUILDHISTORY_FEATURES', 'sdk', '1', '0', d)}" = "0" ] ; then
- return
- fi
- buildhistory_get_installed ${BUILDHISTORY_DIR_SDK}/$1 sdk $1
- }
- buildhistory_get_sdk_installed_host() {
- buildhistory_get_sdk_installed host
- }
- buildhistory_get_sdk_installed_target() {
- buildhistory_get_sdk_installed target
- }
- buildhistory_list_files() {
- # List the files in the specified directory, but exclude date/time etc.
- # This is somewhat messy, but handles cases where the size is not printed for device files under pseudo
- ( cd $1
- find_cmd='find . ! -path . -printf "%M %-10u %-10g %10s %p -> %l\n"'
- if [ "$3" = "fakeroot" ] ; then
- eval ${FAKEROOTENV} ${FAKEROOTCMD} $find_cmd
- else
- eval $find_cmd
- fi | sort -k5 | sed 's/ * -> $//' > $2 )
- }
- buildhistory_list_files_no_owners() {
- # List the files in the specified directory, but exclude date/time etc.
- # Also don't output the ownership data, but instead output just - - so
- # that the same parsing code as for _list_files works.
- # This is somewhat messy, but handles cases where the size is not printed for device files under pseudo
- ( cd $1
- find_cmd='find . ! -path . -printf "%M - - %10s %p -> %l\n"'
- if [ "$3" = "fakeroot" ] ; then
- eval ${FAKEROOTENV} ${FAKEROOTCMD} "$find_cmd"
- else
- eval "$find_cmd"
- fi | sort -k5 | sed 's/ * -> $//' > $2 )
- }
- buildhistory_list_pkg_files() {
- if [ "${@bb.utils.contains('BUILDHISTORY_FEATURES', 'package', '1', '0', d)}" = "0" ] ; then
- return
- fi
- # Create individual files-in-package for each recipe's package
- pkgdirlist=$(find ${PKGDEST}/* -maxdepth 0 -type d)
- for pkgdir in $pkgdirlist; do
- pkgname=$(basename $pkgdir)
- outfolder="${BUILDHISTORY_DIR_PACKAGE}/$pkgname"
- outfile="$outfolder/files-in-package.txt"
- mkdir -p $outfolder
- buildhistory_list_files $pkgdir $outfile fakeroot
- done
- }
- buildhistory_get_imageinfo() {
- if [ "${@bb.utils.contains('BUILDHISTORY_FEATURES', 'image', '1', '0', d)}" = "0" ] ; then
- return
- fi
- mkdir -p ${BUILDHISTORY_DIR_IMAGE}
- buildhistory_list_files ${IMAGE_ROOTFS} ${BUILDHISTORY_DIR_IMAGE}/files-in-image.txt
- # Collect files requested in BUILDHISTORY_IMAGE_FILES
- rm -rf ${BUILDHISTORY_DIR_IMAGE}/image-files
- for f in ${BUILDHISTORY_IMAGE_FILES}; do
- if [ -f ${IMAGE_ROOTFS}/$f ] ; then
- mkdir -p ${BUILDHISTORY_DIR_IMAGE}/image-files/`dirname $f`
- cp ${IMAGE_ROOTFS}/$f ${BUILDHISTORY_DIR_IMAGE}/image-files/$f
- fi
- done
- # Record some machine-readable meta-information about the image
- printf "" > ${BUILDHISTORY_DIR_IMAGE}/image-info.txt
- cat >> ${BUILDHISTORY_DIR_IMAGE}/image-info.txt <<END
- ${@buildhistory_get_imagevars(d)}
- END
- imagesize=`du -ks ${IMAGE_ROOTFS} | awk '{ print $1 }'`
- echo "IMAGESIZE = $imagesize" >> ${BUILDHISTORY_DIR_IMAGE}/image-info.txt
- # Add some configuration information
- echo "${MACHINE}: ${IMAGE_BASENAME} configured for ${DISTRO} ${DISTRO_VERSION}" > ${BUILDHISTORY_DIR_IMAGE}/build-id.txt
- cat >> ${BUILDHISTORY_DIR_IMAGE}/build-id.txt <<END
- ${@buildhistory_get_build_id(d)}
- END
- }
- buildhistory_get_sdkinfo() {
- if [ "${@bb.utils.contains('BUILDHISTORY_FEATURES', 'sdk', '1', '0', d)}" = "0" ] ; then
- return
- fi
- buildhistory_list_files ${SDK_OUTPUT} ${BUILDHISTORY_DIR_SDK}/files-in-sdk.txt
- # Collect files requested in BUILDHISTORY_SDK_FILES
- rm -rf ${BUILDHISTORY_DIR_SDK}/sdk-files
- for f in ${BUILDHISTORY_SDK_FILES}; do
- if [ -f ${SDK_OUTPUT}/${SDKPATH}/$f ] ; then
- mkdir -p ${BUILDHISTORY_DIR_SDK}/sdk-files/`dirname $f`
- cp ${SDK_OUTPUT}/${SDKPATH}/$f ${BUILDHISTORY_DIR_SDK}/sdk-files/$f
- fi
- done
- # Record some machine-readable meta-information about the SDK
- printf "" > ${BUILDHISTORY_DIR_SDK}/sdk-info.txt
- cat >> ${BUILDHISTORY_DIR_SDK}/sdk-info.txt <<END
- ${@buildhistory_get_sdkvars(d)}
- END
- sdksize=`du -ks ${SDK_OUTPUT} | awk '{ print $1 }'`
- echo "SDKSIZE = $sdksize" >> ${BUILDHISTORY_DIR_SDK}/sdk-info.txt
- }
- python buildhistory_get_extra_sdkinfo() {
- import operator
- from oe.sdk import get_extra_sdkinfo
- sstate_dir = d.expand('${SDK_OUTPUT}/${SDKPATH}/sstate-cache')
- extra_info = get_extra_sdkinfo(sstate_dir)
- if d.getVar('BB_CURRENTTASK') == 'populate_sdk_ext' and \
- "sdk" in (d.getVar('BUILDHISTORY_FEATURES') or "").split():
- with open(d.expand('${BUILDHISTORY_DIR_SDK}/sstate-package-sizes.txt'), 'w') as f:
- filesizes_sorted = sorted(extra_info['filesizes'].items(), key=operator.itemgetter(1, 0), reverse=True)
- for fn, size in filesizes_sorted:
- f.write('%10d KiB %s\n' % (size, fn))
- with open(d.expand('${BUILDHISTORY_DIR_SDK}/sstate-task-sizes.txt'), 'w') as f:
- tasksizes_sorted = sorted(extra_info['tasksizes'].items(), key=operator.itemgetter(1, 0), reverse=True)
- for task, size in tasksizes_sorted:
- f.write('%10d KiB %s\n' % (size, task))
- }
- # By using ROOTFS_POSTUNINSTALL_COMMAND we get in after uninstallation of
- # unneeded packages but before the removal of packaging files
- ROOTFS_POSTUNINSTALL_COMMAND += "buildhistory_list_installed_image"
- ROOTFS_POSTUNINSTALL_COMMAND += "buildhistory_get_image_installed"
- ROOTFS_POSTUNINSTALL_COMMAND[vardepvalueexclude] .= "| buildhistory_list_installed_image| buildhistory_get_image_installed"
- ROOTFS_POSTUNINSTALL_COMMAND[vardepsexclude] += "buildhistory_list_installed_image buildhistory_get_image_installed"
- IMAGE_POSTPROCESS_COMMAND += "buildhistory_get_imageinfo"
- IMAGE_POSTPROCESS_COMMAND[vardepvalueexclude] .= "| buildhistory_get_imageinfo"
- IMAGE_POSTPROCESS_COMMAND[vardepsexclude] += "buildhistory_get_imageinfo"
- # We want these to be the last run so that we get called after complementary package installation
- POPULATE_SDK_POST_TARGET_COMMAND:append = " buildhistory_list_installed_sdk_target"
- POPULATE_SDK_POST_TARGET_COMMAND:append = " buildhistory_get_sdk_installed_target"
- POPULATE_SDK_POST_TARGET_COMMAND[vardepvalueexclude] .= "| buildhistory_list_installed_sdk_target| buildhistory_get_sdk_installed_target"
- POPULATE_SDK_POST_TARGET_COMMAND[vardepsexclude] += "buildhistory_list_installed_sdk_target buildhistory_get_sdk_installed_target"
- POPULATE_SDK_POST_HOST_COMMAND:append = " buildhistory_list_installed_sdk_host"
- POPULATE_SDK_POST_HOST_COMMAND:append = " buildhistory_get_sdk_installed_host"
- POPULATE_SDK_POST_HOST_COMMAND[vardepvalueexclude] .= "| buildhistory_list_installed_sdk_host| buildhistory_get_sdk_installed_host"
- POPULATE_SDK_POST_HOST_COMMAND[vardepsexclude] += "buildhistory_list_installed_sdk_host buildhistory_get_sdk_installed_host"
- SDK_POSTPROCESS_COMMAND:append = " buildhistory_get_sdkinfo buildhistory_get_extra_sdkinfo"
- SDK_POSTPROCESS_COMMAND[vardepvalueexclude] .= "| buildhistory_get_sdkinfo buildhistory_get_extra_sdkinfo"
- SDK_POSTPROCESS_COMMAND[vardepsexclude] += "buildhistory_get_sdkinfo buildhistory_get_extra_sdkinfo"
- python buildhistory_write_sigs() {
- if not "task" in (d.getVar('BUILDHISTORY_FEATURES') or "").split():
- return
- # Create sigs file
- if hasattr(bb.parse.siggen, 'dump_siglist'):
- taskoutdir = os.path.join(d.getVar('BUILDHISTORY_DIR'), 'task')
- bb.utils.mkdirhier(taskoutdir)
- bb.parse.siggen.dump_siglist(os.path.join(taskoutdir, 'tasksigs.txt'), d.getVar("BUILDHISTORY_PATH_PREFIX_STRIP"))
- }
- def buildhistory_get_build_id(d):
- if d.getVar('BB_WORKERCONTEXT') != '1':
- return ""
- localdata = bb.data.createCopy(d)
- statuslines = []
- for func in oe.data.typed_value('BUILDCFG_FUNCS', localdata):
- g = globals()
- if func not in g:
- bb.warn("Build configuration function '%s' does not exist" % func)
- else:
- flines = g[func](localdata)
- if flines:
- statuslines.extend(flines)
- statusheader = d.getVar('BUILDCFG_HEADER')
- return('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines)))
- def buildhistory_get_metadata_revs(d):
- # We want an easily machine-readable format here
- revisions = oe.buildcfg.get_layer_revisions(d)
- medadata_revs = ["%-17s = %s:%s%s" % (r[1], r[2], r[3], r[4]) for r in revisions]
- return '\n'.join(medadata_revs)
- def outputvars(vars, listvars, d):
- vars = vars.split()
- listvars = listvars.split()
- ret = ""
- for var in vars:
- value = d.getVar(var) or ""
- if var in listvars:
- # Squash out spaces
- value = oe.utils.squashspaces(value)
- ret += "%s = %s\n" % (var, value)
- return ret.rstrip('\n')
- def buildhistory_get_imagevars(d):
- if d.getVar('BB_WORKERCONTEXT') != '1':
- return ""
- imagevars = "DISTRO DISTRO_VERSION USER_CLASSES IMAGE_CLASSES IMAGE_FEATURES IMAGE_LINGUAS IMAGE_INSTALL BAD_RECOMMENDATIONS NO_RECOMMENDATIONS PACKAGE_EXCLUDE ROOTFS_POSTPROCESS_COMMAND IMAGE_POSTPROCESS_COMMAND"
- listvars = "USER_CLASSES IMAGE_CLASSES IMAGE_FEATURES IMAGE_LINGUAS IMAGE_INSTALL BAD_RECOMMENDATIONS PACKAGE_EXCLUDE"
- return outputvars(imagevars, listvars, d)
- def buildhistory_get_sdkvars(d):
- if d.getVar('BB_WORKERCONTEXT') != '1':
- return ""
- sdkvars = "DISTRO DISTRO_VERSION SDK_NAME SDK_VERSION SDKMACHINE SDKIMAGE_FEATURES TOOLCHAIN_HOST_TASK TOOLCHAIN_TARGET_TASK BAD_RECOMMENDATIONS NO_RECOMMENDATIONS PACKAGE_EXCLUDE"
- if d.getVar('BB_CURRENTTASK') == 'populate_sdk_ext':
- # Extensible SDK uses some additional variables
- sdkvars += " ESDK_LOCALCONF_ALLOW ESDK_LOCALCONF_REMOVE ESDK_CLASS_INHERIT_DISABLE SDK_UPDATE_URL SDK_EXT_TYPE SDK_RECRDEP_TASKS SDK_INCLUDE_PKGDATA SDK_INCLUDE_TOOLCHAIN"
- listvars = "SDKIMAGE_FEATURES BAD_RECOMMENDATIONS PACKAGE_EXCLUDE ESDK_LOCALCONF_ALLOW ESDK_LOCALCONF_REMOVE ESDK_CLASS_INHERIT_DISABLE"
- return outputvars(sdkvars, listvars, d)
- def buildhistory_get_cmdline(d):
- argv = d.getVar('BB_CMDLINE', False)
- if argv:
- if argv[0].endswith('bin/bitbake'):
- bincmd = 'bitbake'
- else:
- bincmd = argv[0]
- return '%s %s' % (bincmd, ' '.join(argv[1:]))
- return ''
- buildhistory_single_commit() {
- if [ "$3" = "" ] ; then
- commitopts="${BUILDHISTORY_DIR}/ --allow-empty"
- shortlogprefix="No changes: "
- else
- commitopts=""
- shortlogprefix=""
- fi
- if [ "${BUILDHISTORY_BUILD_FAILURES}" = "0" ] ; then
- result="succeeded"
- else
- result="failed"
- fi
- case ${BUILDHISTORY_BUILD_INTERRUPTED} in
- 1)
- result="$result (interrupted)"
- ;;
- 2)
- result="$result (force interrupted)"
- ;;
- esac
- commitmsgfile=`mktemp`
- cat > $commitmsgfile << END
- ${shortlogprefix}Build ${BUILDNAME} of ${DISTRO} ${DISTRO_VERSION} for machine ${MACHINE} on $2
- cmd: $1
- result: $result
- metadata revisions:
- END
- cat ${BUILDHISTORY_DIR}/metadata-revs >> $commitmsgfile
- git commit $commitopts -F $commitmsgfile --author "${BUILDHISTORY_COMMIT_AUTHOR}" > /dev/null
- rm $commitmsgfile
- }
- buildhistory_commit() {
- if [ ! -d ${BUILDHISTORY_DIR} ] ; then
- # Code above that creates this dir never executed, so there can't be anything to commit
- return
- fi
- # Create a machine-readable list of metadata revisions for each layer
- cat > ${BUILDHISTORY_DIR}/metadata-revs <<END
- ${@buildhistory_get_metadata_revs(d)}
- END
- ( cd ${BUILDHISTORY_DIR}/
- # Initialise the repo if necessary
- if [ ! -e .git ] ; then
- git init -q
- else
- git tag -f --no-sign ${BUILDHISTORY_TAG}-minus-3 ${BUILDHISTORY_TAG}-minus-2 > /dev/null 2>&1 || true
- git tag -f --no-sign ${BUILDHISTORY_TAG}-minus-2 ${BUILDHISTORY_TAG}-minus-1 > /dev/null 2>&1 || true
- git tag -f --no-sign ${BUILDHISTORY_TAG}-minus-1 > /dev/null 2>&1 || true
- fi
- check_git_config
- # Check if there are new/changed files to commit (other than metadata-revs)
- repostatus=`git status --porcelain | grep -v " metadata-revs$"`
- HOSTNAME=`hostname 2>/dev/null || echo unknown`
- CMDLINE="${@buildhistory_get_cmdline(d)}"
- if [ "$repostatus" != "" ] ; then
- git add -A .
- # Porcelain output looks like "?? packages/foo/bar"
- # Ensure we commit metadata-revs with the first commit
- buildhistory_single_commit "$CMDLINE" "$HOSTNAME" dummy
- else
- buildhistory_single_commit "$CMDLINE" "$HOSTNAME"
- fi
- if [ "${BUILDHISTORY_PUSH_REPO}" != "" ] ; then
- git push -q ${BUILDHISTORY_PUSH_REPO}
- fi) || true
- }
- python buildhistory_eventhandler() {
- if (e.data.getVar('BUILDHISTORY_FEATURES') or "").strip():
- reset = e.data.getVar("BUILDHISTORY_RESET")
- olddir = e.data.getVar("BUILDHISTORY_OLD_DIR")
- if isinstance(e, bb.event.BuildStarted):
- if reset:
- import shutil
- # Clean up after potentially interrupted build.
- if os.path.isdir(olddir):
- shutil.rmtree(olddir)
- rootdir = e.data.getVar("BUILDHISTORY_DIR")
- bb.utils.mkdirhier(rootdir)
- entries = [ x for x in os.listdir(rootdir) if not x.startswith('.') ]
- bb.utils.mkdirhier(olddir)
- for entry in entries:
- bb.utils.rename(os.path.join(rootdir, entry),
- os.path.join(olddir, entry))
- elif isinstance(e, bb.event.BuildCompleted):
- if reset:
- import shutil
- shutil.rmtree(olddir)
- if e.data.getVar("BUILDHISTORY_COMMIT") == "1":
- bb.note("Writing buildhistory")
- bb.build.exec_func("buildhistory_write_sigs", d)
- import time
- start=time.time()
- localdata = bb.data.createCopy(e.data)
- localdata.setVar('BUILDHISTORY_BUILD_FAILURES', str(e._failures))
- interrupted = getattr(e, '_interrupted', 0)
- localdata.setVar('BUILDHISTORY_BUILD_INTERRUPTED', str(interrupted))
- bb.build.exec_func("buildhistory_commit", localdata)
- stop=time.time()
- bb.note("Writing buildhistory took: %s seconds" % round(stop-start))
- else:
- bb.note("No commit since BUILDHISTORY_COMMIT != '1'")
- }
- addhandler buildhistory_eventhandler
- buildhistory_eventhandler[eventmask] = "bb.event.BuildCompleted bb.event.BuildStarted"
- # FIXME this ought to be moved into the fetcher
- def _get_srcrev_values(d):
- """
- Return the version strings for the current recipe
- """
- scms = []
- fetcher = bb.fetch.Fetch(d.getVar('SRC_URI').split(), d)
- urldata = fetcher.ud
- for u in urldata:
- if urldata[u].method.supports_srcrev():
- scms.append(u)
- dict_srcrevs = {}
- dict_tag_srcrevs = {}
- for scm in scms:
- ud = urldata[scm]
- autoinc, rev = ud.method.sortable_revision(ud, d, ud.name)
- dict_srcrevs[ud.name] = rev
- if 'tag' in ud.parm:
- tag = ud.parm['tag'];
- key = ud.name+'_'+tag
- dict_tag_srcrevs[key] = rev
- return (dict_srcrevs, dict_tag_srcrevs)
- do_fetch[postfuncs] += "write_srcrev"
- do_fetch[vardepsexclude] += "write_srcrev"
- python write_srcrev() {
- write_latest_srcrev(d, d.getVar('BUILDHISTORY_DIR_PACKAGE'))
- }
- def write_latest_srcrev(d, pkghistdir):
- srcrevfile = os.path.join(pkghistdir, 'latest_srcrev')
- srcrevs, tag_srcrevs = _get_srcrev_values(d)
- if srcrevs:
- if not os.path.exists(pkghistdir):
- bb.utils.mkdirhier(pkghistdir)
- old_tag_srcrevs = {}
- if os.path.exists(srcrevfile):
- with open(srcrevfile) as f:
- for line in f:
- if line.startswith('# tag_'):
- key, value = line.split("=", 1)
- key = key.replace('# tag_', '').strip()
- value = value.replace('"', '').strip()
- old_tag_srcrevs[key] = value
- with open(srcrevfile, 'w') as f:
- for name, srcrev in sorted(srcrevs.items()):
- suffix = "_" + name
- if name == "default":
- suffix = ""
- orig_srcrev = d.getVar('SRCREV%s' % suffix, False)
- if orig_srcrev:
- f.write('# SRCREV%s = "%s"\n' % (suffix, orig_srcrev))
- f.write('SRCREV%s = "%s"\n' % (suffix, srcrev))
- for name, srcrev in sorted(tag_srcrevs.items()):
- f.write('# tag_%s = "%s"\n' % (name, srcrev))
- if name in old_tag_srcrevs and old_tag_srcrevs[name] != srcrev:
- pkg = d.getVar('PN')
- bb.warn("Revision for tag %s in package %s was changed since last build (from %s to %s)" % (name, pkg, old_tag_srcrevs[name], srcrev))
- else:
- if os.path.exists(srcrevfile):
- os.remove(srcrevfile)
- do_testimage[postfuncs] += "write_ptest_result"
- do_testimage[vardepsexclude] += "write_ptest_result"
- python write_ptest_result() {
- write_latest_ptest_result(d, d.getVar('BUILDHISTORY_DIR'))
- }
- def write_latest_ptest_result(d, histdir):
- import glob
- import subprocess
- test_log_dir = d.getVar('TEST_LOG_DIR')
- input_ptest = os.path.join(test_log_dir, 'ptest_log')
- output_ptest = os.path.join(histdir, 'ptest')
- if os.path.exists(input_ptest):
- try:
- # Lock it to avoid race issue
- lock = bb.utils.lockfile(output_ptest + "/ptest.lock")
- bb.utils.mkdirhier(output_ptest)
- oe.path.copytree(input_ptest, output_ptest)
- # Sort test result
- for result in glob.glob('%s/pass.fail.*' % output_ptest):
- bb.debug(1, 'Processing %s' % result)
- cmd = ['sort', result, '-o', result]
- bb.debug(1, 'Running %s' % cmd)
- ret = subprocess.call(cmd)
- if ret != 0:
- bb.error('Failed to run %s!' % cmd)
- finally:
- bb.utils.unlockfile(lock)
|