Browse Source

bitbake: checksum/fetch2: Switch from persist_data to a standard cache file

The sqlite connection handling is causing problems with python 3.13. The
connection can be closed at gc time which causing warnings and those
can appear at 'random' points and break output, causing weird failures
in different tinfoil tools and other tests.

Using sqlite as an IPC was never a great idea so drop that usage entirely
and just use the standard cache mechanism we already have for other
situations.

(Bitbake rev: fdc55bb649cb77456d0ac48a9600ef289a52af18)

Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Richard Purdie 7 months ago
parent
commit
76d24b00ff
4 changed files with 58 additions and 15 deletions
  1. 10 0
      bitbake/lib/bb/cache.py
  2. 25 0
      bitbake/lib/bb/checksum.py
  3. 3 2
      bitbake/lib/bb/cookerdata.py
  4. 20 13
      bitbake/lib/bb/fetch2/__init__.py

+ 10 - 0
bitbake/lib/bb/cache.py

@@ -847,6 +847,16 @@ class MultiProcessCache(object):
         data = [{}]
         return data
 
+    def clear_cache(self):
+        if not self.cachefile:
+            bb.fatal("Can't clear invalid cachefile")
+
+        self.cachedata = self.create_cachedata()
+        self.cachedata_extras = self.create_cachedata()
+        with bb.utils.fileslocked([self.cachefile + ".lock"]):
+            bb.utils.remove(self.cachefile)
+            bb.utils.remove(self.cachefile + "-*")
+
     def save_extras(self):
         if not self.cachefile:
             return

+ 25 - 0
bitbake/lib/bb/checksum.py

@@ -142,3 +142,28 @@ class FileChecksumCache(MultiProcessCache):
 
         checksums.sort(key=operator.itemgetter(1))
         return checksums
+
+class RevisionsCache(MultiProcessCache):
+    cache_file_name = "local_srcrevisions.dat"
+    CACHE_VERSION = 1
+
+    def __init__(self):
+        MultiProcessCache.__init__(self)
+
+    def get_revs(self):
+        return self.cachedata[0]
+
+    def get_rev(self, k):
+        if k in self.cachedata_extras[0]:
+            return self.cachedata_extras[0][k]
+        if k in self.cachedata[0]:
+            return self.cachedata[0][k]
+        return None
+
+    def set_rev(self, k, v):
+        self.cachedata[0][k] = v
+        self.cachedata_extras[0][k] = v
+
+    def merge_data(self, source, dest):
+        for h in source[0]:
+            dest[0][h] = source[0][h]

+ 3 - 2
bitbake/lib/bb/cookerdata.py

@@ -1,3 +1,4 @@
+
 #
 # Copyright (C) 2003, 2004  Chris Larson
 # Copyright (C) 2003, 2004  Phil Blundell
@@ -267,8 +268,8 @@ class CookerDataBuilder(object):
         try:
             self.data = self.parseConfigurationFiles(self.prefiles, self.postfiles)
 
-            if self.data.getVar("BB_WORKERCONTEXT", False) is None and not worker:
-                bb.fetch.fetcher_init(self.data)
+            servercontext = self.data.getVar("BB_WORKERCONTEXT", False) is None and not worker
+            bb.fetch.fetcher_init(self.data, servercontext)
             bb.parse.init_parser(self.data)
 
             bb.event.fire(bb.event.ConfigParsed(), self.data)

+ 20 - 13
bitbake/lib/bb/fetch2/__init__.py

@@ -23,13 +23,14 @@ import collections
 import subprocess
 import pickle
 import errno
-import bb.persist_data, bb.utils
+import bb.utils
 import bb.checksum
 import bb.process
 import bb.event
 
 __version__ = "2"
 _checksum_cache = bb.checksum.FileChecksumCache()
+_revisions_cache = bb.checksum.RevisionsCache()
 
 logger = logging.getLogger("BitBake.Fetcher")
 
@@ -493,18 +494,23 @@ methods = []
 urldata_cache = {}
 saved_headrevs = {}
 
-def fetcher_init(d):
+def fetcher_init(d, servercontext=True):
     """
     Called to initialize the fetchers once the configuration data is known.
     Calls before this must not hit the cache.
     """
 
-    revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
+    _checksum_cache.init_cache(d.getVar("BB_CACHEDIR"))
+    _revisions_cache.init_cache(d.getVar("BB_CACHEDIR"))
+
+    if not servercontext:
+        return
+
     try:
         # fetcher_init is called multiple times, so make sure we only save the
         # revs the first time it is called.
         if not bb.fetch2.saved_headrevs:
-            bb.fetch2.saved_headrevs = dict(revs)
+            bb.fetch2.saved_headrevs = _revisions_cache.get_revs()
     except:
         pass
 
@@ -514,11 +520,10 @@ def fetcher_init(d):
         logger.debug("Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
     elif srcrev_policy == "clear":
         logger.debug("Clearing SRCREV cache due to cache policy of: %s", srcrev_policy)
-        revs.clear()
+        _revisions_cache.clear_cache()
     else:
         raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
 
-    _checksum_cache.init_cache(d.getVar("BB_CACHEDIR"))
 
     for m in methods:
         if hasattr(m, "init"):
@@ -526,9 +531,11 @@ def fetcher_init(d):
 
 def fetcher_parse_save():
     _checksum_cache.save_extras()
+    _revisions_cache.save_extras()
 
 def fetcher_parse_done():
     _checksum_cache.save_merge()
+    _revisions_cache.save_merge()
 
 def fetcher_compare_revisions(d):
     """
@@ -536,7 +543,7 @@ def fetcher_compare_revisions(d):
     when bitbake was started and return true if they have changed.
     """
 
-    headrevs = dict(bb.persist_data.persist('BB_URI_HEADREVS', d))
+    headrevs = _revisions_cache.get_revs()
     return headrevs != bb.fetch2.saved_headrevs
 
 def mirror_from_string(data):
@@ -1662,13 +1669,13 @@ class FetchMethod(object):
         if not hasattr(self, "_latest_revision"):
             raise ParameterError("The fetcher for this URL does not support _latest_revision", ud.url)
 
-        revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
         key = self.generate_revision_key(ud, d, name)
-        try:
-            return revs[key]
-        except KeyError:
-            revs[key] = rev = self._latest_revision(ud, d, name)
-            return rev
+
+        rev = _revisions_cache.get_rev(key)
+        if rev is None:
+            rev = self._latest_revision(ud, d, name)
+            _revisions_cache.set_rev(key, rev)
+        return rev
 
     def sortable_revision(self, ud, d, name):
         latest_rev = self._build_revision(ud, d, name)