binman: Skip processing "hash" subnodes of FIT subsections
Binman's FIT entry type can have image subentries with "hash" subnodes intended to be processed by mkimage, but not binman. However, the Entry class and any subclass that reuses its implementation tries to process these unconditionally. This can lead to an error when boards specify hash algorithms that binman doesn't support, but mkimage supports. Let entries skip processing these "hash" subnodes based on an instance variable, and set this instance variable for FIT subsections. Also re-enable processing of calculated and missing properties of FIT entries which was disabled to mitigate this issue. Signed-off-by: Alper Nebi Yasak <alpernebiyasak@gmail.com> Reviewed-by: Simon Glass <sjg@chromium.org>
This commit is contained in:
parent
17a0dc6abf
commit
ee813c86f9
@ -78,6 +78,8 @@ class Entry(object):
|
||||
external: True if this entry contains an external binary blob
|
||||
bintools: Bintools used by this entry (only populated for Image)
|
||||
missing_bintools: List of missing bintools for this entry
|
||||
update_hash: True if this entry's "hash" subnode should be
|
||||
updated with a hash of the entry contents
|
||||
"""
|
||||
def __init__(self, section, etype, node, name_prefix=''):
|
||||
# Put this here to allow entry-docs and help to work without libfdt
|
||||
@ -111,6 +113,7 @@ class Entry(object):
|
||||
self.allow_fake = False
|
||||
self.bintools = {}
|
||||
self.missing_bintools = []
|
||||
self.update_hash = True
|
||||
|
||||
@staticmethod
|
||||
def FindEntryClass(etype, expanded):
|
||||
@ -315,9 +318,11 @@ class Entry(object):
|
||||
|
||||
if self.compress != 'none':
|
||||
state.AddZeroProp(self._node, 'uncomp-size')
|
||||
err = state.CheckAddHashProp(self._node)
|
||||
if err:
|
||||
self.Raise(err)
|
||||
|
||||
if self.update_hash:
|
||||
err = state.CheckAddHashProp(self._node)
|
||||
if err:
|
||||
self.Raise(err)
|
||||
|
||||
def SetCalculatedProperties(self):
|
||||
"""Set the value of device-tree properties calculated by binman"""
|
||||
@ -333,7 +338,9 @@ class Entry(object):
|
||||
state.SetInt(self._node, 'orig-size', self.orig_size, True)
|
||||
if self.uncomp_size is not None:
|
||||
state.SetInt(self._node, 'uncomp-size', self.uncomp_size)
|
||||
state.CheckSetHashValue(self._node, self.GetData)
|
||||
|
||||
if self.update_hash:
|
||||
state.CheckSetHashValue(self._node, self.GetData)
|
||||
|
||||
def ProcessFdt(self, fdt):
|
||||
"""Allow entries to adjust the device tree
|
||||
@ -1108,3 +1115,11 @@ features to produce new behaviours.
|
||||
btool = bintool.Bintool.create(name)
|
||||
tools[name] = btool
|
||||
return btool
|
||||
|
||||
def SetUpdateHash(self, update_hash):
|
||||
"""Set whether this entry's "hash" subnode should be updated
|
||||
|
||||
Args:
|
||||
update_hash: True if hash should be updated, False if not
|
||||
"""
|
||||
self.update_hash = update_hash
|
||||
|
@ -186,6 +186,8 @@ class Entry_fit(Entry_section):
|
||||
# 'data' property later.
|
||||
entry = Entry.Create(self.section, node, etype='section')
|
||||
entry.ReadNode()
|
||||
# The hash subnodes here are for mkimage, not binman.
|
||||
entry.SetUpdateHash(False)
|
||||
self._entries[rel_path] = entry
|
||||
|
||||
for subnode in node.subnodes:
|
||||
@ -294,13 +296,3 @@ class Entry_fit(Entry_section):
|
||||
def AddBintools(self, tools):
|
||||
super().AddBintools(tools)
|
||||
self.mkimage = self.AddBintool(tools, 'mkimage')
|
||||
|
||||
def AddMissingProperties(self, have_image_pos):
|
||||
# We don't want to interfere with any hash properties in the FIT, so
|
||||
# disable this for now.
|
||||
pass
|
||||
|
||||
def SetCalculatedProperties(self):
|
||||
# We don't want to interfere with any hash properties in the FIT, so
|
||||
# disable this for now.
|
||||
pass
|
||||
|
@ -5160,5 +5160,29 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
|
||||
self.assertRegex(err,
|
||||
"Image 'main-section'.*missing bintools.*: futility")
|
||||
|
||||
def testFitSubentryHashSubnode(self):
|
||||
"""Test an image with a FIT inside"""
|
||||
data, _, _, out_dtb_name = self._DoReadFileDtb(
|
||||
'221_fit_subentry_hash.dts', use_real_dtb=True, update_dtb=True)
|
||||
|
||||
mkimage_dtb = fdt.Fdt.FromData(data)
|
||||
mkimage_dtb.Scan()
|
||||
binman_dtb = fdt.Fdt(out_dtb_name)
|
||||
binman_dtb.Scan()
|
||||
|
||||
# Check that binman didn't add hash values
|
||||
fnode = binman_dtb.GetNode('/binman/fit/images/kernel/hash')
|
||||
self.assertNotIn('value', fnode.props)
|
||||
|
||||
fnode = binman_dtb.GetNode('/binman/fit/images/fdt-1/hash')
|
||||
self.assertNotIn('value', fnode.props)
|
||||
|
||||
# Check that mkimage added hash values
|
||||
fnode = mkimage_dtb.GetNode('/images/kernel/hash')
|
||||
self.assertIn('value', fnode.props)
|
||||
|
||||
fnode = mkimage_dtb.GetNode('/images/fdt-1/hash')
|
||||
self.assertIn('value', fnode.props)
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
|
52
tools/binman/test/221_fit_subentry_hash.dts
Normal file
52
tools/binman/test/221_fit_subentry_hash.dts
Normal file
@ -0,0 +1,52 @@
|
||||
// SPDX-License-Identifier: GPL-2.0+
|
||||
|
||||
/dts-v1/;
|
||||
|
||||
/ {
|
||||
#address-cells = <1>;
|
||||
#size-cells = <1>;
|
||||
|
||||
binman {
|
||||
fit {
|
||||
description = "test-desc";
|
||||
#address-cells = <1>;
|
||||
|
||||
images {
|
||||
kernel {
|
||||
description = "Vanilla Linux kernel";
|
||||
type = "kernel";
|
||||
arch = "ppc";
|
||||
os = "linux";
|
||||
compression = "gzip";
|
||||
load = <00000000>;
|
||||
entry = <00000000>;
|
||||
hash {
|
||||
algo = "sha1";
|
||||
};
|
||||
u-boot {
|
||||
};
|
||||
};
|
||||
fdt-1 {
|
||||
description = "Flattened Device Tree blob";
|
||||
type = "flat_dt";
|
||||
arch = "ppc";
|
||||
compression = "none";
|
||||
hash {
|
||||
algo = "crc32";
|
||||
};
|
||||
u-boot-spl-dtb {
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
configurations {
|
||||
default = "conf-1";
|
||||
conf-1 {
|
||||
description = "Boot Linux kernel with FDT blob";
|
||||
kernel = "kernel";
|
||||
fdt = "fdt-1";
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
Loading…
Reference in New Issue
Block a user