moveconfig fix
binman support for listing files with generated entries -----BEGIN PGP SIGNATURE----- iQFFBAABCgAvFiEEslwAIq+Gp8wWVbYnfxc6PpAIreYFAmH3WqwRHHNqZ0BjaHJv bWl1bS5vcmcACgkQfxc6PpAIreYzPAf/bRBppRDMhjGP5DCCLmF3WwqeLPVBVI42 O5vjC1fNChpEADiV6HFt6Ply+lpWe/BL5/BxHJ8NE9yDUeJOQlBO3wjbNHWKmEW7 h54HXEGK+zfZD/Bourxn45BVOdGt8PV21ABZcQ9lQsbt20z0sZ7iDNTQjjbHO9Iq oDo69C06UynWweCG6ZfJky3Hnn07t2PtbyINlVCiKPc01/KFFMfJteQfR2onUgwj 9ZzEG9PUCmAvwuDLYqVhNehv1C08rZ9qV4SxXW3xJpEEsqaAgATm/L/jTOIu3PqR jm6PKVU14SD+qe9mp9gHM4n8VRTS2Brb4dlBxbYyUaCXoeOrhqxJMg== =zbnI -----END PGP SIGNATURE----- Merge tag 'dm-pull-30jan22' of https://source.denx.de/u-boot/custodians/u-boot-dm moveconfig fix binman support for listing files with generated entries
This commit is contained in:
commit
1047af5c65
@ -194,7 +194,7 @@ class Entry_fit(Entry):
|
|||||||
# the FIT (e.g. "/images/kernel/u-boot"), so don't call
|
# the FIT (e.g. "/images/kernel/u-boot"), so don't call
|
||||||
# fsw.add_node() or _AddNode() for it.
|
# fsw.add_node() or _AddNode() for it.
|
||||||
pass
|
pass
|
||||||
elif subnode.name.startswith('@'):
|
elif self.GetImage().generate and subnode.name.startswith('@'):
|
||||||
if self._fdts:
|
if self._fdts:
|
||||||
# Generate notes for each FDT
|
# Generate notes for each FDT
|
||||||
for seq, fdt_fname in enumerate(self._fdts):
|
for seq, fdt_fname in enumerate(self._fdts):
|
||||||
|
@ -5100,6 +5100,24 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
|
|||||||
self.assertIn('Documentation is missing for modules: mkimage',
|
self.assertIn('Documentation is missing for modules: mkimage',
|
||||||
str(e.exception))
|
str(e.exception))
|
||||||
|
|
||||||
|
def testListWithGenNode(self):
|
||||||
|
"""Check handling of an FDT map when the section cannot be found"""
|
||||||
|
entry_args = {
|
||||||
|
'of-list': 'test-fdt1 test-fdt2',
|
||||||
|
}
|
||||||
|
data = self._DoReadFileDtb(
|
||||||
|
'219_fit_gennode.dts',
|
||||||
|
entry_args=entry_args,
|
||||||
|
use_real_dtb=True,
|
||||||
|
extra_indirs=[os.path.join(self._indir, TEST_FDT_SUBDIR)])
|
||||||
|
|
||||||
|
try:
|
||||||
|
tmpdir, updated_fname = self._SetupImageInTmpdir()
|
||||||
|
with test_util.capture_sys_output() as (stdout, stderr):
|
||||||
|
self._RunBinman('ls', '-i', updated_fname)
|
||||||
|
finally:
|
||||||
|
shutil.rmtree(tmpdir)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
@ -67,9 +67,13 @@ class Image(section.Entry_section):
|
|||||||
does not exist in binman. This is useful if an image was created by
|
does not exist in binman. This is useful if an image was created by
|
||||||
binman a newer version of binman but we want to list it in an older
|
binman a newer version of binman but we want to list it in an older
|
||||||
version which does not support all the entry types.
|
version which does not support all the entry types.
|
||||||
|
generate: If true, generator nodes are processed. If false they are
|
||||||
|
ignored which is useful when an existing image is read back from a
|
||||||
|
file.
|
||||||
"""
|
"""
|
||||||
def __init__(self, name, node, copy_to_orig=True, test=False,
|
def __init__(self, name, node, copy_to_orig=True, test=False,
|
||||||
ignore_missing=False, use_expanded=False, missing_etype=False):
|
ignore_missing=False, use_expanded=False, missing_etype=False,
|
||||||
|
generate=True):
|
||||||
super().__init__(None, 'section', node, test=test)
|
super().__init__(None, 'section', node, test=test)
|
||||||
self.copy_to_orig = copy_to_orig
|
self.copy_to_orig = copy_to_orig
|
||||||
self.name = 'main-section'
|
self.name = 'main-section'
|
||||||
@ -83,6 +87,7 @@ class Image(section.Entry_section):
|
|||||||
self.use_expanded = use_expanded
|
self.use_expanded = use_expanded
|
||||||
self.test_section_timeout = False
|
self.test_section_timeout = False
|
||||||
self.bintools = {}
|
self.bintools = {}
|
||||||
|
self.generate = generate
|
||||||
if not test:
|
if not test:
|
||||||
self.ReadNode()
|
self.ReadNode()
|
||||||
|
|
||||||
@ -131,7 +136,7 @@ class Image(section.Entry_section):
|
|||||||
# Return an Image with the associated nodes
|
# Return an Image with the associated nodes
|
||||||
root = dtb.GetRoot()
|
root = dtb.GetRoot()
|
||||||
image = Image('image', root, copy_to_orig=False, ignore_missing=True,
|
image = Image('image', root, copy_to_orig=False, ignore_missing=True,
|
||||||
missing_etype=True)
|
missing_etype=True, generate=False)
|
||||||
|
|
||||||
image.image_node = fdt_util.GetString(root, 'image-node', 'image')
|
image.image_node = fdt_util.GetString(root, 'image-node', 'image')
|
||||||
image.fdtmap_dtb = dtb
|
image.fdtmap_dtb = dtb
|
||||||
|
26
tools/binman/test/219_fit_gennode.dts
Normal file
26
tools/binman/test/219_fit_gennode.dts
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
// SPDX-License-Identifier: GPL-2.0+
|
||||||
|
|
||||||
|
/dts-v1/;
|
||||||
|
|
||||||
|
/ {
|
||||||
|
#address-cells = <1>;
|
||||||
|
#size-cells = <1>;
|
||||||
|
|
||||||
|
binman {
|
||||||
|
fit {
|
||||||
|
description = "test-desc";
|
||||||
|
#address-cells = <1>;
|
||||||
|
fit,fdt-list = "of-list";
|
||||||
|
|
||||||
|
images {
|
||||||
|
@fdt-SEQ {
|
||||||
|
description = "fdt-NAME.dtb";
|
||||||
|
type = "flat_dt";
|
||||||
|
compression = "none";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
fdtmap {
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
@ -205,12 +205,12 @@ def show_diff(alines, blines, file_path, color_enabled):
|
|||||||
tofile=os.path.join('b', file_path))
|
tofile=os.path.join('b', file_path))
|
||||||
|
|
||||||
for line in diff:
|
for line in diff:
|
||||||
if line[0] == '-' and line[1] != '-':
|
if line.startswith('-') and not line.startswith('--'):
|
||||||
print(color_text(color_enabled, COLOR_RED, line), end=' ')
|
print(color_text(color_enabled, COLOR_RED, line))
|
||||||
elif line[0] == '+' and line[1] != '+':
|
elif line.startswith('+') and not line.startswith('++'):
|
||||||
print(color_text(color_enabled, COLOR_GREEN, line), end=' ')
|
print(color_text(color_enabled, COLOR_GREEN, line))
|
||||||
else:
|
else:
|
||||||
print(line, end=' ')
|
print(line)
|
||||||
|
|
||||||
def extend_matched_lines(lines, matched, pre_patterns, post_patterns,
|
def extend_matched_lines(lines, matched, pre_patterns, post_patterns,
|
||||||
extend_pre, extend_post):
|
extend_pre, extend_post):
|
||||||
@ -368,7 +368,7 @@ def cleanup_one_header(header_path, patterns, args):
|
|||||||
|
|
||||||
matched = []
|
matched = []
|
||||||
for i, line in enumerate(lines):
|
for i, line in enumerate(lines):
|
||||||
if i - 1 in matched and lines[i - 1][-2:] == '\\\n':
|
if i - 1 in matched and lines[i - 1].endswith('\\'):
|
||||||
matched.append(i)
|
matched.append(i)
|
||||||
continue
|
continue
|
||||||
for pattern in patterns:
|
for pattern in patterns:
|
||||||
@ -380,9 +380,9 @@ def cleanup_one_header(header_path, patterns, args):
|
|||||||
return
|
return
|
||||||
|
|
||||||
# remove empty #ifdef ... #endif, successive blank lines
|
# remove empty #ifdef ... #endif, successive blank lines
|
||||||
pattern_if = re.compile(r'#\s*if(def|ndef)?\W') # #if, #ifdef, #ifndef
|
pattern_if = re.compile(r'#\s*if(def|ndef)?\b') # #if, #ifdef, #ifndef
|
||||||
pattern_elif = re.compile(r'#\s*el(if|se)\W') # #elif, #else
|
pattern_elif = re.compile(r'#\s*el(if|se)\b') # #elif, #else
|
||||||
pattern_endif = re.compile(r'#\s*endif\W') # #endif
|
pattern_endif = re.compile(r'#\s*endif\b') # #endif
|
||||||
pattern_blank = re.compile(r'^\s*$') # empty line
|
pattern_blank = re.compile(r'^\s*$') # empty line
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
@ -424,8 +424,8 @@ def cleanup_headers(configs, args):
|
|||||||
|
|
||||||
patterns = []
|
patterns = []
|
||||||
for config in configs:
|
for config in configs:
|
||||||
patterns.append(re.compile(r'#\s*define\s+%s\W' % config))
|
patterns.append(re.compile(r'#\s*define\s+%s\b' % config))
|
||||||
patterns.append(re.compile(r'#\s*undef\s+%s\W' % config))
|
patterns.append(re.compile(r'#\s*undef\s+%s\b' % config))
|
||||||
|
|
||||||
for dir in 'include', 'arch', 'board':
|
for dir in 'include', 'arch', 'board':
|
||||||
for (dirpath, dirnames, filenames) in os.walk(dir):
|
for (dirpath, dirnames, filenames) in os.walk(dir):
|
||||||
@ -451,7 +451,7 @@ def cleanup_one_extra_option(defconfig_path, configs, args):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
start = 'CONFIG_SYS_EXTRA_OPTIONS="'
|
start = 'CONFIG_SYS_EXTRA_OPTIONS="'
|
||||||
end = '"\n'
|
end = '"'
|
||||||
|
|
||||||
lines = read_file(defconfig_path)
|
lines = read_file(defconfig_path)
|
||||||
|
|
||||||
@ -812,7 +812,7 @@ class KconfigParser:
|
|||||||
for (action, value) in self.results:
|
for (action, value) in self.results:
|
||||||
if action != ACTION_MOVE:
|
if action != ACTION_MOVE:
|
||||||
continue
|
continue
|
||||||
if not value + '\n' in defconfig_lines:
|
if not value in defconfig_lines:
|
||||||
log += color_text(self.args.color, COLOR_YELLOW,
|
log += color_text(self.args.color, COLOR_YELLOW,
|
||||||
"'%s' was removed by savedefconfig.\n" %
|
"'%s' was removed by savedefconfig.\n" %
|
||||||
value)
|
value)
|
||||||
|
Loading…
Reference in New Issue
Block a user