sstatesig/populate_sdk_ext: Improve unihash cache handling

Copying in the bb_unihashes cache file was at best a hack and creates a number of
challenges. One is staying in sync with bitbake since it may not have saved the
most recent version of the file. A second is a determinism problem since there may
be more entries in the file than the SDK should have had access to.

To improve the situation, add code to write the data into the locked-sigs.inc file
such that even when locked-sigs aren't used, the right hash mappings are injected
by the get_cached_unihash call.

The functions in copy_buildsystem need to be updated to preserve data they're not
editting.

(From OE-Core rev: 11373def3171e75b3b74ef694da213dd21f3064c)

Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
This commit is contained in:
Richard Purdie 2024-05-23 17:17:30 +01:00
parent 56c578f664
commit 652e3028d9
3 changed files with 40 additions and 11 deletions

View File

@ -413,10 +413,6 @@ def write_local_conf(d, baseoutpath, derivative, core_meta_subdir, uninative_che
if os.path.exists(builddir + dest_stub):
shutil.copyfile(builddir + dest_stub, baseoutpath + dest_stub)
cachedir = os.path.join(baseoutpath, 'cache')
bb.utils.mkdirhier(cachedir)
bb.parse.siggen.copy_unitaskhashes(cachedir)
# If PR Service is in use, we need to export this as well
bb.note('Do we have a pr database?')
if d.getVar("PRSERV_HOST"):
@ -507,10 +503,6 @@ def prepare_locked_cache(d, baseoutpath, derivative, conf_initpath):
else:
tasklistfn = None
cachedir = os.path.join(baseoutpath, 'cache')
bb.utils.mkdirhier(cachedir)
bb.parse.siggen.copy_unitaskhashes(cachedir)
# Add packagedata if enabled
if d.getVar('SDK_INCLUDE_PKGDATA') == '1':
lockedsigs_base = d.getVar('WORKDIR') + '/locked-sigs-base.inc'

View File

@ -193,13 +193,17 @@ def prune_lockedsigs(excluded_tasks, excluded_targets, lockedsigs, onlynative, p
else:
f.write(line)
invalue = False
elif line.startswith('SIGGEN_LOCKEDSIGS'):
elif line.startswith('SIGGEN_LOCKEDSIGS_t'):
invalue = True
f.write(line)
else:
invalue = False
f.write(line)
def merge_lockedsigs(copy_tasks, lockedsigs_main, lockedsigs_extra, merged_output, copy_output=None):
merged = {}
arch_order = []
otherdata = []
with open(lockedsigs_main, 'r') as f:
invalue = None
for line in f:
@ -212,6 +216,9 @@ def merge_lockedsigs(copy_tasks, lockedsigs_main, lockedsigs_extra, merged_outpu
invalue = line[18:].split('=', 1)[0].rstrip()
merged[invalue] = []
arch_order.append(invalue)
else:
invalue = None
otherdata.append(line)
with open(lockedsigs_extra, 'r') as f:
invalue = None
@ -246,6 +253,7 @@ def merge_lockedsigs(copy_tasks, lockedsigs_main, lockedsigs_extra, merged_outpu
f.write(' "\n')
fulltypes.append(typename)
f.write('SIGGEN_LOCKEDSIGS_TYPES = "%s"\n' % ' '.join(fulltypes))
f.write('\n' + ''.join(otherdata))
if copy_output:
write_sigs_file(copy_output, list(tocopy.keys()), tocopy)

View File

@ -93,6 +93,14 @@ def sstate_lockedsigs(d):
sigs[pn][task] = [h, siggen_lockedsigs_var]
return sigs
def lockedsigs_unihashmap(d):
unihashmap = {}
data = (d.getVar("SIGGEN_UNIHASHMAP") or "").split()
for entry in data:
pn, task, taskhash, unihash = entry.split(":")
unihashmap[(pn, task)] = (taskhash, unihash)
return unihashmap
class SignatureGeneratorOEBasicHashMixIn(object):
supports_multiconfig_datacaches = True
@ -100,6 +108,7 @@ class SignatureGeneratorOEBasicHashMixIn(object):
self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split()
self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS") or "").split()
self.lockedsigs = sstate_lockedsigs(data)
self.unihashmap = lockedsigs_unihashmap(data)
self.lockedhashes = {}
self.lockedpnmap = {}
self.lockedhashfn = {}
@ -209,6 +218,15 @@ class SignatureGeneratorOEBasicHashMixIn(object):
def get_cached_unihash(self, tid):
if tid in self.lockedhashes and self.lockedhashes[tid] and not self._internal:
return self.lockedhashes[tid]
(mc, _, task, fn) = bb.runqueue.split_tid_mcfn(tid)
recipename = self.lockedpnmap[fn]
if (recipename, task) in self.unihashmap:
taskhash, unihash = self.unihashmap[(recipename, task)]
if taskhash == self.taskhash[tid]:
return unihash
return super().get_cached_unihash(tid)
def dump_sigtask(self, fn, task, stampbase, runtime):
@ -219,6 +237,7 @@ class SignatureGeneratorOEBasicHashMixIn(object):
def dump_lockedsigs(self, sigfile, taskfilter=None):
types = {}
unihashmap = {}
for tid in self.runtaskdeps:
# Bitbake changed this to a tuple in newer versions
if isinstance(tid, tuple):
@ -226,13 +245,18 @@ class SignatureGeneratorOEBasicHashMixIn(object):
if taskfilter:
if not tid in taskfilter:
continue
fn = bb.runqueue.fn_from_tid(tid)
(_, _, task, fn) = bb.runqueue.split_tid_mcfn(tid)
t = self.lockedhashfn[fn].split(" ")[1].split(":")[5]
t = 't-' + t.replace('_', '-')
if t not in types:
types[t] = []
types[t].append(tid)
taskhash = self.taskhash[tid]
unihash = self.get_unihash(tid)
if taskhash != unihash:
unihashmap[tid] = " " + self.lockedpnmap[fn] + ":" + task + ":" + taskhash + ":" + unihash
with open(sigfile, "w") as f:
l = sorted(types)
for t in l:
@ -245,7 +269,12 @@ class SignatureGeneratorOEBasicHashMixIn(object):
continue
f.write(" " + self.lockedpnmap[fn] + ":" + task + ":" + self.get_unihash(tid) + " \\\n")
f.write(' "\n')
f.write('SIGGEN_LOCKEDSIGS_TYPES:%s = "%s"' % (self.machine, " ".join(l)))
f.write('SIGGEN_LOCKEDSIGS_TYPES:%s = "%s"\n' % (self.machine, " ".join(l)))
f.write('SIGGEN_UNIHASHMAP += "\\\n')
sortedtid = sorted(unihashmap, key=lambda tid: self.lockedpnmap[bb.runqueue.fn_from_tid(tid)])
for tid in sortedtid:
f.write(unihashmap[tid] + " \\\n")
f.write(' "\n')
def dump_siglist(self, sigfile, path_prefix_strip=None):
def strip_fn(fn):