binman fixes for various things

binman clean-up of compression and addition of utilities
 -----BEGIN PGP SIGNATURE-----
 
 iQFFBAABCgAvFiEEslwAIq+Gp8wWVbYnfxc6PpAIreYFAmMBd6MRHHNqZ0BjaHJv
 bWl1bS5vcmcACgkQfxc6PpAIreau9wf/YQD9GsZh1HrgG5EGfaMN+NV4rGkSywZX
 8AxKCscTiLVtCzUlYOEZ3mFRpli9RJ5H9eWvwcOx8GyyHkvsIdGlzdvG4PZSLd/T
 bOqqtkAZxEDi0ZzQAd0bqKQ9D2ujPbatmElhBNMxyMap+Jkww7LDJvIpCKXSsGEN
 881a+CafAQfYICw9TtlVLzn+WirLiIXtWMxmCuZH3hrxFVjU6T1vxoyMPIBIEzkt
 SMAxRRvzjLwmYEajlDlAFwykcHJCdGbCyyoBQg1OYEIl/XZYyEN2IzdiiJFzAxn+
 QA4ctAQk4gASnZfoSzTWgbeyLZqqYy8j0+z+wZaF/+dp/veZWQBLlg==
 =Kn1x
 -----END PGP SIGNATURE-----

Merge tag 'dm-pull-20aug22' of https://source.denx.de/u-boot/custodians/u-boot-dm

binman fixes for various things
binman clean-up of compression and addition of utilities
This commit is contained in:
Tom Rini 2022-08-22 12:41:07 -04:00
commit 850ac7ceb7
40 changed files with 1099 additions and 203 deletions

View File

@ -147,15 +147,9 @@ ulong bootstage_add_record(enum bootstage_id id, const char *name,
return mark;
}
ulong bootstage_mark(enum bootstage_id id)
ulong bootstage_error_name(enum bootstage_id id, const char *name)
{
return bootstage_add_record(id, NULL, 0, timer_get_boot_us());
}
ulong bootstage_error(enum bootstage_id id)
{
return bootstage_add_record(id, NULL, BOOTSTAGEF_ERROR,
return bootstage_add_record(id, name, BOOTSTAGEF_ERROR,
timer_get_boot_us());
}

View File

@ -268,12 +268,27 @@ ulong bootstage_add_record(enum bootstage_id id, const char *name,
/**
* Mark a time stamp for the current boot stage.
*/
ulong bootstage_mark(enum bootstage_id id);
ulong bootstage_error(enum bootstage_id id);
#define bootstage_mark(id) bootstage_mark_name(id, __func__)
#define bootstage_error(id) bootstage_error_name(id, __func__)
/**
* bootstage_mark_name - record bootstage with passing id and name
* @id: Bootstage id to record this timestamp against
* @name: Textual name to display for this id in the report
*
* Return: recorded time stamp
*/
ulong bootstage_mark_name(enum bootstage_id id, const char *name);
/**
* bootstage_error_name - record bootstage error with passing id and name
* @id: Bootstage id to record this timestamp against
* @name: Textual name to display for this id in the report
*
* Return: recorded time stamp
*/
ulong bootstage_error_name(enum bootstage_id id, const char *name);
/**
* Mark a time stamp in the given function and line number
*

View File

@ -130,18 +130,6 @@ int _log(enum log_category_t cat, enum log_level_t level, const char *file,
int line, const char *func, const char *fmt, ...)
__attribute__ ((format (__printf__, 6, 7)));
static inline int _log_nop(enum log_category_t cat, enum log_level_t level,
const char *file, int line, const char *func,
const char *fmt, ...)
__attribute__ ((format (__printf__, 6, 7)));
static inline int _log_nop(enum log_category_t cat, enum log_level_t level,
const char *file, int line, const char *func,
const char *fmt, ...)
{
return 0;
}
/**
* _log_buffer - Internal function to print data buffer in hex and ascii form
*
@ -240,12 +228,6 @@ int _log_buffer(enum log_category_t cat, enum log_level_t level,
})
#endif
#define log_nop(_cat, _level, _fmt, _args...) ({ \
int _l = _level; \
_log_nop((enum log_category_t)(_cat), _l, __FILE__, __LINE__, \
__func__, pr_fmt(_fmt), ##_args); \
})
#ifdef DEBUG
#define _DEBUG 1
#else

View File

@ -1684,8 +1684,6 @@ Some ideas:
- Figure out how to make Fdt support changing the node order, so that
Node.AddSubnode() can support adding a node before another, existing node.
Perhaps it should completely regenerate the flat tree?
- Put faked files into a separate subdir and remove them on start-up, to avoid
seeing them as 'real' files on a subsequent run
--
Simon Glass <sjg@chromium.org>

View File

@ -1,5 +1,7 @@
# SPDX-License-Identifier: GPL-2.0+
# Copyright 2022 Google LLC
# Copyright (C) 2022 Weidmüller Interface GmbH & Co. KG
# Stefan Herbrechtsmeier <stefan.herbrechtsmeier@weidmueller.com>
#
"""Base class for all bintools
@ -71,17 +73,25 @@ class Bintool:
# interested in the type.
module_name = btype.replace('-', '_')
module = modules.get(module_name)
class_name = f'Bintool{module_name}'
# Import the module if we have not already done so
if not module:
try:
module = importlib.import_module('binman.btool.' + module_name)
except ImportError as exc:
return module_name, exc
try:
# Deal with classes which must be renamed due to conflicts
# with Python libraries
class_name = f'Bintoolbtool_{module_name}'
module = importlib.import_module('binman.btool.btool_' +
module_name)
except ImportError:
return module_name, exc
modules[module_name] = module
# Look up the expected class name
return getattr(module, 'Bintool%s' % module_name)
return getattr(module, class_name)
@staticmethod
def create(name):
@ -464,3 +474,104 @@ binaries. It is fairly easy to create new bintools. Just add a new file to the
str: Version string for this bintool
"""
return 'unknown'
class BintoolPacker(Bintool):
"""Tool which compression / decompression entry contents
This is a bintools base class for compression / decompression packer
Properties:
name: Name of packer tool
compression: Compression type (COMPRESS_...), value of 'name' property
if none
compress_args: List of positional args provided to tool for compress,
['--compress'] if none
decompress_args: List of positional args provided to tool for
decompress, ['--decompress'] if none
fetch_package: Name of the tool installed using the apt, value of 'name'
property if none
version_regex: Regular expressions to extract the version from tool
version output, '(v[0-9.]+)' if none
"""
def __init__(self, name, compression=None, compress_args=None,
decompress_args=None, fetch_package=None,
version_regex=r'(v[0-9.]+)'):
desc = '%s compression' % (compression if compression else name)
super().__init__(name, desc)
if compress_args is None:
compress_args = ['--compress']
self.compress_args = compress_args
if decompress_args is None:
decompress_args = ['--decompress']
self.decompress_args = decompress_args
if fetch_package is None:
fetch_package = name
self.fetch_package = fetch_package
self.version_regex = version_regex
def compress(self, indata):
"""Compress data
Args:
indata (bytes): Data to compress
Returns:
bytes: Compressed data
"""
with tempfile.NamedTemporaryFile(prefix='comp.tmp',
dir=tools.get_output_dir()) as tmp:
tools.write_file(tmp.name, indata)
args = self.compress_args + ['--stdout', tmp.name]
return self.run_cmd(*args, binary=True)
def decompress(self, indata):
"""Decompress data
Args:
indata (bytes): Data to decompress
Returns:
bytes: Decompressed data
"""
with tempfile.NamedTemporaryFile(prefix='decomp.tmp',
dir=tools.get_output_dir()) as inf:
tools.write_file(inf.name, indata)
args = self.decompress_args + ['--stdout', inf.name]
return self.run_cmd(*args, binary=True)
def fetch(self, method):
"""Fetch handler
This installs the gzip package using the apt utility.
Args:
method (FETCH_...): Method to use
Returns:
True if the file was fetched and now installed, None if a method
other than FETCH_BIN was requested
Raises:
Valuerror: Fetching could not be completed
"""
if method != FETCH_BIN:
return None
return self.apt_install(self.fetch_package)
def version(self):
"""Version handler
Returns:
str: Version number
"""
import re
result = self.run_cmd_result('-V')
out = result.stdout.strip()
if not out:
out = result.stderr.strip()
if not out:
return super().version()
m_version = re.search(self.version_regex, out)
return m_version.group(1) if m_version else out

View File

@ -0,0 +1,31 @@
# SPDX-License-Identifier: GPL-2.0+
# Copyright (C) 2022 Weidmüller Interface GmbH & Co. KG
# Stefan Herbrechtsmeier <stefan.herbrechtsmeier@weidmueller.com>
#
"""Bintool implementation for gzip
gzip allows compression and decompression of files.
Documentation is available via::
man gzip
"""
from binman import bintool
# pylint: disable=C0103
class Bintoolbtool_gzip(bintool.BintoolPacker):
"""Compression/decompression using the gzip algorithm
This bintool supports running `gzip` to compress and decompress data, as
used by binman.
It is also possible to fetch the tool, which uses `apt` to install it.
Documentation is available via::
man gzip
"""
def __init__(self, name):
super().__init__(name, compress_args=[],
version_regex=r'gzip ([0-9.]+)')

View File

@ -0,0 +1,30 @@
# SPDX-License-Identifier: GPL-2.0+
# Copyright (C) 2022 Weidmüller Interface GmbH & Co. KG
# Stefan Herbrechtsmeier <stefan.herbrechtsmeier@weidmueller.com>
#
"""Bintool implementation for bzip2
bzip2 allows compression and decompression of files.
Documentation is available via::
man bzip2
"""
from binman import bintool
# pylint: disable=C0103
class Bintoolbzip2(bintool.BintoolPacker):
"""Compression/decompression using the bzip2 algorithm
This bintool supports running `bzip2` to compress and decompress data, as
used by binman.
It is also possible to fetch the tool, which uses `apt` to install it.
Documentation is available via::
man bzip2
"""
def __init__(self, name):
super().__init__(name, version_regex=r'bzip2.*Version ([0-9.]+)')

View File

@ -0,0 +1,30 @@
# SPDX-License-Identifier: GPL-2.0+
# Copyright (C) 2022 Weidmüller Interface GmbH & Co. KG
# Stefan Herbrechtsmeier <stefan.herbrechtsmeier@weidmueller.com>
#
"""Bintool implementation for lzop
lzop allows compression and decompression of files.
Documentation is available via::
man lzop
"""
from binman import bintool
# pylint: disable=C0103
class Bintoollzop(bintool.BintoolPacker):
"""Compression/decompression using the lzop algorithm
This bintool supports running `lzop` to compress and decompress data, as
used by binman.
It is also possible to fetch the tool, which uses `apt` to install it.
Documentation is available via::
man lzop
"""
def __init__(self, name):
super().__init__(name, 'lzo', compress_args=[])

31
tools/binman/btool/xz.py Normal file
View File

@ -0,0 +1,31 @@
# SPDX-License-Identifier: GPL-2.0+
# Copyright (C) 2022 Weidmüller Interface GmbH & Co. KG
# Stefan Herbrechtsmeier <stefan.herbrechtsmeier@weidmueller.com>
#
"""Bintool implementation for xz
xz allows compression and decompression of files.
Documentation is available via::
man xz
"""
from binman import bintool
# pylint: disable=C0103
class Bintoolxz(bintool.BintoolPacker):
"""Compression/decompression using the xz algorithm
This bintool supports running `xz` to compress and decompress data, as
used by binman.
It is also possible to fetch the tool, which uses `apt` to install it.
Documentation is available via::
man xz
"""
def __init__(self, name):
super().__init__(name, fetch_package='xz-utils',
version_regex=r'xz \(XZ Utils\) ([0-9.]+)')

View File

@ -0,0 +1,30 @@
# SPDX-License-Identifier: GPL-2.0+
# Copyright (C) 2022 Weidmüller Interface GmbH & Co. KG
# Stefan Herbrechtsmeier <stefan.herbrechtsmeier@weidmueller.com>
#
"""Bintool implementation for zstd
zstd allows compression and decompression of files.
Documentation is available via::
man zstd
"""
from binman import bintool
# pylint: disable=C0103
class Bintoolzstd(bintool.BintoolPacker):
"""Compression/decompression using the zstd algorithm
This bintool supports running `zstd` to compress and decompress data, as
used by binman.
It is also possible to fetch the tool, which uses `apt` to install it.
Documentation is available via::
man zstd
"""
def __init__(self, name):
super().__init__(name)

View File

@ -20,7 +20,7 @@ import io
import struct
import sys
from binman import comp_util
from binman import bintool
from binman import elf
from patman import command
from patman import tools
@ -236,14 +236,18 @@ class CbfsFile(object):
self.data_len = len(data)
self.erase_byte = None
self.size = None
if self.compress == COMPRESS_LZ4:
self.comp_bintool = bintool.Bintool.create('lz4')
elif self.compress == COMPRESS_LZMA:
self.comp_bintool = bintool.Bintool.create('lzma_alone')
else:
self.comp_bintool = None
def decompress(self):
"""Handle decompressing data if necessary"""
indata = self.data
if self.compress == COMPRESS_LZ4:
data = comp_util.decompress(indata, 'lz4', with_header=False)
elif self.compress == COMPRESS_LZMA:
data = comp_util.decompress(indata, 'lzma', with_header=False)
if self.comp_bintool:
data = self.comp_bintool.decompress(indata)
else:
data = indata
self.memlen = len(data)
@ -361,10 +365,8 @@ class CbfsFile(object):
data = elf_data.data
elif self.ftype == TYPE_RAW:
orig_data = data
if self.compress == COMPRESS_LZ4:
data = comp_util.compress(orig_data, 'lz4', with_header=False)
elif self.compress == COMPRESS_LZMA:
data = comp_util.compress(orig_data, 'lzma', with_header=False)
if self.comp_bintool:
data = self.comp_bintool.compress(orig_data)
self.memlen = len(orig_data)
self.data_len = len(data)
attr = struct.pack(ATTR_COMPRESSION_FORMAT,

View File

@ -19,7 +19,6 @@ import unittest
from binman import bintool
from binman import cbfs_util
from binman.cbfs_util import CbfsWriter
from binman import comp_util
from binman import elf
from patman import test_util
from patman import tools
@ -50,7 +49,8 @@ class TestCbfs(unittest.TestCase):
cls.cbfstool = bintool.Bintool.create('cbfstool')
cls.have_cbfstool = cls.cbfstool.is_present()
cls.have_lz4 = comp_util.HAVE_LZ4
lz4 = bintool.Bintool.create('lz4')
cls.have_lz4 = lz4.is_present()
@classmethod
def tearDownClass(cls):

View File

@ -1,76 +0,0 @@
# SPDX-License-Identifier: GPL-2.0+
# Copyright 2022 Google LLC
#
"""Utilities to compress and decompress data"""
import struct
import tempfile
from binman import bintool
from patman import tools
LZ4 = bintool.Bintool.create('lz4')
HAVE_LZ4 = LZ4.is_present()
LZMA_ALONE = bintool.Bintool.create('lzma_alone')
HAVE_LZMA_ALONE = LZMA_ALONE.is_present()
def compress(indata, algo, with_header=True):
"""Compress some data using a given algorithm
Note that for lzma this uses an old version of the algorithm, not that
provided by xz.
This requires 'lz4' and 'lzma_alone' tools. It also requires an output
directory to be previously set up, by calling PrepareOutputDir().
Args:
indata (bytes): Input data to compress
algo (str): Algorithm to use ('none', 'lz4' or 'lzma')
Returns:
bytes: Compressed data
"""
if algo == 'none':
return indata
if algo == 'lz4':
data = LZ4.compress(indata)
# cbfstool uses a very old version of lzma
elif algo == 'lzma':
data = LZMA_ALONE.compress(indata)
else:
raise ValueError("Unknown algorithm '%s'" % algo)
if with_header:
hdr = struct.pack('<I', len(data))
data = hdr + data
return data
def decompress(indata, algo, with_header=True):
"""Decompress some data using a given algorithm
Note that for lzma this uses an old version of the algorithm, not that
provided by xz.
This requires 'lz4' and 'lzma_alone' tools. It also requires an output
directory to be previously set up, by calling PrepareOutputDir().
Args:
indata (bytes): Input data to decompress
algo (str): Algorithm to use ('none', 'lz4' or 'lzma')
Returns:
(bytes) Compressed data
"""
if algo == 'none':
return indata
if with_header:
data_len = struct.unpack('<I', indata[:4])[0]
indata = indata[4:4 + data_len]
if algo == 'lz4':
data = LZ4.decompress(indata)
elif algo == 'lzma':
data = LZMA_ALONE.decompress(indata)
else:
raise ValueError("Unknown algorithm '%s'" % algo)
return data

View File

@ -16,8 +16,9 @@ from patman import tools
from binman import bintool
from binman import cbfs_util
from binman import elf
from patman import command
from binman import elf
from binman import entry
from patman import tout
# These are imported if needed since they import libfdt
@ -215,6 +216,7 @@ def ReadEntry(image_fname, entry_path, decomp=True):
from binman.image import Image
image = Image.FromFile(image_fname)
image.CollectBintools()
entry = image.FindEntryPath(entry_path)
return entry.ReadData(decomp)
@ -251,6 +253,7 @@ def ExtractEntries(image_fname, output_fname, outdir, entry_paths,
List of EntryInfo records that were written
"""
image = Image.FromFile(image_fname)
image.CollectBintools()
if alt_format == 'list':
ShowAltFormats(image)
@ -370,6 +373,7 @@ def WriteEntry(image_fname, entry_path, data, do_compress=True,
"""
tout.info("Write entry '%s', file '%s'" % (entry_path, image_fname))
image = Image.FromFile(image_fname)
image.CollectBintools()
entry = image.FindEntryPath(entry_path)
WriteEntryToImage(image, entry, data, do_compress=do_compress,
allow_resize=allow_resize, write_map=write_map)
@ -507,8 +511,8 @@ def PrepareImagesAndDtbs(dtb_fname, select_images, update_fdt, use_expanded):
# without changing the device-tree size, thus ensuring that our
# entry offsets remain the same.
for image in images.values():
image.CollectBintools()
image.gen_entries()
image.CollectBintools()
if update_fdt:
image.AddMissingProperties(True)
image.ProcessFdt(dtb)
@ -717,6 +721,13 @@ def Binman(args):
bintool.Bintool.set_missing_list(
args.force_missing_bintools.split(',') if
args.force_missing_bintools else None)
# Create the directory here instead of Entry.check_fake_fname()
# since that is called from a threaded context so different threads
# may race to create the directory
if args.fake_ext_blobs:
entry.Entry.create_fake_dir()
for image in images.values():
invalid |= ProcessImage(image, args.update_fdt, args.map,
allow_missing=args.allow_missing,

View File

@ -122,6 +122,8 @@ class TestElf(unittest.TestCase):
def testOutsideFile(self):
"""Test a symbol which extends outside the entry area is detected"""
if not elf.ELF_TOOLS:
self.skipTest('Python elftools not available')
entry = FakeEntry(10)
section = FakeSection()
elf_fname = self.ElfTestFile('u_boot_binman_syms')
@ -147,6 +149,8 @@ class TestElf(unittest.TestCase):
Only 32 and 64 bits are supported, since we need to store an offset
into the image.
"""
if not elf.ELF_TOOLS:
self.skipTest('Python elftools not available')
entry = FakeEntry(10)
section = FakeSection()
elf_fname =self.ElfTestFile('u_boot_binman_syms_size')
@ -161,6 +165,8 @@ class TestElf(unittest.TestCase):
This should produce -1 values for all thress symbols, taking up the
first 16 bytes of the image.
"""
if not elf.ELF_TOOLS:
self.skipTest('Python elftools not available')
entry = FakeEntry(28)
section = FakeSection(sym_value=None)
elf_fname = self.ElfTestFile('u_boot_binman_syms')
@ -172,6 +178,8 @@ class TestElf(unittest.TestCase):
def testDebug(self):
"""Check that enabling debug in the elf module produced debug output"""
if not elf.ELF_TOOLS:
self.skipTest('Python elftools not available')
try:
tout.init(tout.DEBUG)
entry = FakeEntry(24)
@ -281,6 +289,8 @@ class TestElf(unittest.TestCase):
def test_read_segments_bad_data(self):
"""Test for read_loadable_segments() with an invalid ELF file"""
if not elf.ELF_TOOLS:
self.skipTest('Python elftools not available')
fname = self.ElfTestFile('embed_data')
with self.assertRaises(ValueError) as e:
elf.read_loadable_segments(tools.get_bytes(100, 100))
@ -288,6 +298,8 @@ class TestElf(unittest.TestCase):
def test_get_file_offset(self):
"""Test GetFileOffset() gives the correct file offset for a symbol"""
if not elf.ELF_TOOLS:
self.skipTest('Python elftools not available')
fname = self.ElfTestFile('embed_data')
syms = elf.GetSymbols(fname, ['embed'])
addr = syms['embed'].address
@ -314,6 +326,8 @@ class TestElf(unittest.TestCase):
def test_get_symbol_from_address(self):
"""Test GetSymbolFromAddress()"""
if not elf.ELF_TOOLS:
self.skipTest('Python elftools not available')
fname = self.ElfTestFile('elf_sections')
sym_name = 'calculate'
syms = elf.GetSymbols(fname, [sym_name])

View File

@ -216,6 +216,9 @@ This is a blob containing a device tree. The contents of the blob are
obtained from the list of available device-tree files, managed by the
'state' module.
Additional Properties / Entry arguments:
- prepend: Header type to use:
length: 32-bit length header
.. _etype_blob_ext:
@ -447,6 +450,9 @@ listed entries are combined to form this entry. This serves as a useful
base class for entry types which need to process data from elsewhere in
the image, not necessarily child entries.
The entries can generally be anywhere in the same image, even if they are in
a different section from this entry.
.. _etype_cros_ec_rw:
@ -1166,11 +1172,12 @@ Entry: mkimage: Binary produced by mkimage
------------------------------------------
Properties / Entry arguments:
- datafile: Filename for -d argument
- args: Other arguments to pass
- args: Arguments to pass
- data-to-imagename: Indicates that the -d data should be passed in as
the image name also (-n)
The data passed to mkimage is collected from subnodes of the mkimage node,
e.g.::
The data passed to mkimage via the -d flag is collected from subnodes of the
mkimage node, e.g.::
mkimage {
args = "-n test -T imximage";
@ -1179,9 +1186,24 @@ e.g.::
};
};
This calls mkimage to create an imximage with u-boot-spl.bin as the input
file. The output from mkimage then becomes part of the image produced by
binman.
This calls mkimage to create an imximage with `u-boot-spl.bin` as the data
file, which mkimage being called like this::
mkimage -d <data_file> -n test -T imximage <output_file>
The output from mkimage then becomes part of the image produced by
binman. If you need to put mulitple things in the data file, you can use
a section, or just multiple subnodes like this::
mkimage {
args = "-n test -T imximage";
u-boot-spl {
};
u-boot-tpl {
};
};
To use CONFIG options in the arguments, use a string list instead, as in
this example which also produces four arguments::
@ -1193,7 +1215,38 @@ this example which also produces four arguments::
};
};
If you need to pass the input data in with the -n argument as well, then use
the 'data-to-imagename' property::
mkimage {
args = "-T imximage";
data-to-imagename';
u-boot-spl {
};
};
That will pass the data to mkimage both as the data file (with -d) and as
the image name (with -n).
If need to pass different data in with -n, then use an imagename subnode::
mkimage {
args = "-T imximage";
imagename {
blob {
filename = "spl/u-boot-spl.cfgout"
};
};
u-boot-spl {
};
};
This will pass in u-boot-spl as the input data and the .cfgout file as the
-n data.
.. _etype_opensbi:
@ -1231,7 +1284,8 @@ Entry: pre-load: Pre load image header
--------------------------------------
Properties / Entry arguments:
- pre-load-key-path: Path of the directory that store key (provided by the environment variable PRE_LOAD_KEY_PATH)
- pre-load-key-path: Path of the directory that store key (provided by
the environment variable PRE_LOAD_KEY_PATH)
- content: List of phandles to entries to sign
- algo-name: Hash and signature algo to use for the signature
- padding-name: Name of the padding (pkcs-1.5 or pss)

View File

@ -9,9 +9,9 @@ import importlib
import os
import pathlib
import sys
import time
from binman import bintool
from binman import comp_util
from dtoc import fdt_util
from patman import tools
from patman.tools import to_hex, to_hex_size
@ -82,7 +82,13 @@ class Entry(object):
missing_bintools: List of missing bintools for this entry
update_hash: True if this entry's "hash" subnode should be
updated with a hash of the entry contents
comp_bintool: Bintools used for compress and decompress data
fake_fname: Fake filename, if one was created, else None
required_props (dict of str): Properties which must be present. This can
be added to by subclasses
"""
fake_dir = None
def __init__(self, section, etype, node, name_prefix=''):
# Put this here to allow entry-docs and help to work without libfdt
global state
@ -116,6 +122,9 @@ class Entry(object):
self.bintools = {}
self.missing_bintools = []
self.update_hash = True
self.fake_fname = None
self.required_props = []
self.comp_bintool = None
@staticmethod
def FindEntryClass(etype, expanded):
@ -233,6 +242,7 @@ class Entry(object):
This reads all the fields we recognise from the node, ready for use.
"""
self.ensure_props()
if 'pos' in self._node.props:
self.Raise("Please use 'offset' instead of 'pos'")
if 'expand-size' in self._node.props:
@ -670,6 +680,7 @@ class Entry(object):
self.WriteMapLine(fd, indent, self.name, self.offset, self.size,
self.image_pos)
# pylint: disable=assignment-from-none
def GetEntries(self):
"""Return a list of entries contained by this entry
@ -679,6 +690,28 @@ class Entry(object):
"""
return None
def FindEntryByNode(self, find_node):
"""Find a node in an entry, searching all subentries
This does a recursive search.
Args:
find_node (fdt.Node): Node to find
Returns:
Entry: entry, if found, else None
"""
entries = self.GetEntries()
if entries:
for entry in entries.values():
if entry._node == find_node:
return entry
found = entry.FindEntryByNode(find_node)
if found:
return found
return None
def GetArg(self, name, datatype=str):
"""Get the value of an entry argument or device-tree-node property
@ -1014,12 +1047,14 @@ features to produce new behaviours.
bool: True if the blob was faked, False if not
"""
if self.allow_fake and not pathlib.Path(fname).is_file():
outfname = tools.get_output_filename(os.path.basename(fname))
with open(outfname, "wb") as out:
out.truncate(size)
if not self.fake_fname:
outfname = os.path.join(self.fake_dir, os.path.basename(fname))
with open(outfname, "wb") as out:
out.truncate(size)
tout.info(f"Entry '{self._node.path}': Faked blob '{outfname}'")
self.fake_fname = outfname
self.faked = True
tout.info(f"Entry '{self._node.path}': Faked file '{outfname}'")
return outfname, True
return self.fake_fname, True
return fname, False
def CheckFakedBlobs(self, faked_blobs_list):
@ -1047,7 +1082,8 @@ features to produce new behaviours.
Args:
bintool (Bintool): Bintool that was missing
"""
self.missing_bintools.append(bintool)
if bintool not in self.missing_bintools:
self.missing_bintools.append(bintool)
def check_missing_bintools(self, missing_list):
"""Check if any entries in this section have missing bintools
@ -1057,7 +1093,10 @@ features to produce new behaviours.
Args:
missing_list: List of Bintool objects to be added to
"""
missing_list += self.missing_bintools
for bintool in self.missing_bintools:
if bintool not in missing_list:
missing_list.append(bintool)
def GetHelpTags(self):
"""Get the tags use for missing-blob help
@ -1074,12 +1113,39 @@ features to produce new behaviours.
indata: Data to compress
Returns:
Compressed data (first word is the compressed size)
Compressed data
"""
self.uncomp_data = indata
if self.compress != 'none':
self.uncomp_size = len(indata)
data = comp_util.compress(indata, self.compress)
if self.comp_bintool.is_present():
data = self.comp_bintool.compress(indata)
else:
self.record_missing_bintool(self.comp_bintool)
data = tools.get_bytes(0, 1024)
else:
data = indata
return data
def DecompressData(self, indata):
"""Decompress data according to the entry's compression method
Args:
indata: Data to decompress
Returns:
Decompressed data
"""
if self.compress != 'none':
if self.comp_bintool.is_present():
data = self.comp_bintool.decompress(indata)
self.uncomp_size = len(data)
else:
self.record_missing_bintool(self.comp_bintool)
data = tools.get_bytes(0, 1024)
else:
data = indata
self.uncomp_data = data
return data
@classmethod
@ -1119,8 +1185,18 @@ features to produce new behaviours.
Args:
btools (dict of Bintool):
Raise:
ValueError if compression algorithm is not supported
"""
pass
algo = self.compress
if algo != 'none':
algos = ['bzip2', 'gzip', 'lz4', 'lzma', 'lzo', 'xz', 'zstd']
if algo not in algos:
raise ValueError("Unknown algorithm '%s'" % algo)
names = {'lzma': 'lzma_alone', 'lzo': 'lzop'}
name = names.get(self.compress, self.compress)
self.comp_bintool = self.AddBintool(btools, name)
@classmethod
def AddBintool(self, tools, name):
@ -1169,3 +1245,27 @@ features to produce new behaviours.
fname = tools.get_output_filename(f'{prefix}.{uniq}')
tools.write_file(fname, data)
return data, fname, uniq
@classmethod
def create_fake_dir(cls):
"""Create the directory for fake files"""
cls.fake_dir = tools.get_output_filename('binman-fake')
if not os.path.exists(cls.fake_dir):
os.mkdir(cls.fake_dir)
tout.notice(f"Fake-blob dir is '{cls.fake_dir}'")
def ensure_props(self):
"""Raise an exception if properties are missing
Args:
prop_list (list of str): List of properties to check for
Raises:
ValueError: Any property is missing
"""
not_present = []
for prop in self.required_props:
if not prop in self._node.props:
not_present.append(prop)
if not_present:
self.Raise(f"'{self.etype}' entry is missing properties: {' '.join(not_present)}")

View File

@ -105,6 +105,15 @@ class TestEntry(unittest.TestCase):
self.assertTrue(isinstance(ent, Entry_blob))
self.assertEquals('missing', ent.etype)
def testDecompressData(self):
"""Test the DecompressData() method of the base class"""
base = entry.Entry.Create(None, self.GetNode(), 'blob-dtb')
base.compress = 'lz4'
bintools = {}
base.comp_bintool = base.AddBintool(bintools, '_testing')
self.assertEquals(tools.get_bytes(0, 1024), base.CompressData(b'abc'))
self.assertEquals(tools.get_bytes(0, 1024), base.DecompressData(b'abc'))
if __name__ == "__main__":
unittest.main()

View File

@ -7,6 +7,8 @@
from binman.entry import Entry
from binman.etype.blob import Entry_blob
from dtoc import fdt_util
import struct
# This is imported if needed
state = None
@ -17,6 +19,9 @@ class Entry_blob_dtb(Entry_blob):
This is a blob containing a device tree. The contents of the blob are
obtained from the list of available device-tree files, managed by the
'state' module.
Additional attributes:
prepend: Header used (e.g. 'length')
"""
def __init__(self, section, etype, node):
# Put this here to allow entry-docs and help to work without libfdt
@ -24,6 +29,14 @@ class Entry_blob_dtb(Entry_blob):
from binman import state
super().__init__(section, etype, node)
self.prepend = None
def ReadNode(self):
super().ReadNode()
self.prepend = fdt_util.GetString(self._node, 'prepend')
if self.prepend and self.prepend not in ['length']:
self.Raise("Invalid prepend in '%s': '%s'" %
(self._node.name, self.prepend))
def ObtainContents(self):
"""Get the device-tree from the list held by the 'state' module"""
@ -34,6 +47,10 @@ class Entry_blob_dtb(Entry_blob):
def ProcessContents(self):
"""Re-read the DTB contents so that we get any calculated properties"""
_, indata = state.GetFdtContents(self.GetFdtEtype())
if self.compress == 'zstd' and self.prepend != 'length':
self.Raise('The zstd compression requires a length header')
data = self.CompressData(indata)
return self.ProcessContentsUpdate(data)
@ -58,3 +75,17 @@ class Entry_blob_dtb(Entry_blob):
# will still return the old contents
state.UpdateFdtContents(self.GetFdtEtype(), data)
return ok
def CompressData(self, indata):
data = super().CompressData(indata)
if self.prepend == 'length':
hdr = struct.pack('<I', len(data))
data = hdr + data
return data
def DecompressData(self, indata):
if self.prepend == 'length':
data_len = struct.unpack('<I', indata[:4])[0]
indata = indata[4:4 + data_len]
data = super().DecompressData(indata)
return data

View File

@ -296,3 +296,8 @@ class Entry_cbfs(Entry):
# so that child.data is used to pack into the FIP.
self.ObtainContents(skip_entry=child)
return True
def AddBintools(self, btools):
super().AddBintools(btools)
for entry in self._entries.values():
entry.AddBintools(btools)

View File

@ -21,6 +21,9 @@ class Entry_collection(Entry):
listed entries are combined to form this entry. This serves as a useful
base class for entry types which need to process data from elsewhere in
the image, not necessarily child entries.
The entries can generally be anywhere in the same image, even if they are in
a different section from this entry.
"""
def __init__(self, section, etype, node):
super().__init__(section, etype, node)

View File

@ -23,11 +23,10 @@ class Entry_fill(Entry):
"""
def __init__(self, section, etype, node):
super().__init__(section, etype, node)
self.required_props = ['size']
def ReadNode(self):
super().ReadNode()
if self.size is None:
self.Raise("'fill' entry must have a size property")
self.fill_value = fdt_util.GetByte(self._node, 'fill-byte', 0)
def ObtainContents(self):

View File

@ -100,4 +100,5 @@ class Entry_gbb(Entry):
return True
def AddBintools(self, btools):
super().AddBintools(btools)
self.futility = self.AddBintool(btools, 'futility')

View File

@ -144,4 +144,5 @@ class Entry_intel_ifwi(Entry_blob_ext):
entry.WriteSymbols(self)
def AddBintools(self, btools):
super().AddBintools(btools)
self.ifwitool = self.AddBintool(btools, 'ifwitool')

View File

@ -15,11 +15,12 @@ class Entry_mkimage(Entry):
"""Binary produced by mkimage
Properties / Entry arguments:
- datafile: Filename for -d argument
- args: Other arguments to pass
- args: Arguments to pass
- data-to-imagename: Indicates that the -d data should be passed in as
the image name also (-n)
The data passed to mkimage is collected from subnodes of the mkimage node,
e.g.::
The data passed to mkimage via the -d flag is collected from subnodes of the
mkimage node, e.g.::
mkimage {
args = "-n test -T imximage";
@ -28,9 +29,27 @@ class Entry_mkimage(Entry):
};
};
This calls mkimage to create an imximage with u-boot-spl.bin as the input
file. The output from mkimage then becomes part of the image produced by
binman.
This calls mkimage to create an imximage with `u-boot-spl.bin` as the data
file, with mkimage being called like this::
mkimage -d <data_file> -n test -T imximage <output_file>
The output from mkimage then becomes part of the image produced by
binman. If you need to put multiple things in the data file, you can use
a section, or just multiple subnodes like this::
mkimage {
args = "-n test -T imximage";
u-boot-spl {
};
u-boot-tpl {
};
};
Note that binman places the contents (here SPL and TPL) into a single file
and passes that to mkimage using the -d option.
To use CONFIG options in the arguments, use a string list instead, as in
this example which also produces four arguments::
@ -42,23 +61,85 @@ class Entry_mkimage(Entry):
};
};
If you need to pass the input data in with the -n argument as well, then use
the 'data-to-imagename' property::
mkimage {
args = "-T imximage";
data-to-imagename;
u-boot-spl {
};
};
That will pass the data to mkimage both as the data file (with -d) and as
the image name (with -n). In both cases, a filename is passed as the
argument, with the actual data being in that file.
If need to pass different data in with -n, then use an `imagename` subnode::
mkimage {
args = "-T imximage";
imagename {
blob {
filename = "spl/u-boot-spl.cfgout"
};
};
u-boot-spl {
};
};
This will pass in u-boot-spl as the input data and the .cfgout file as the
-n data.
"""
def __init__(self, section, etype, node):
super().__init__(section, etype, node)
self._args = fdt_util.GetArgs(self._node, 'args')
self._mkimage_entries = OrderedDict()
self._imagename = None
self.align_default = None
def ReadNode(self):
super().ReadNode()
self._args = fdt_util.GetArgs(self._node, 'args')
self._data_to_imagename = fdt_util.GetBool(self._node,
'data-to-imagename')
if self._data_to_imagename and self._node.FindNode('imagename'):
self.Raise('Cannot use both imagename node and data-to-imagename')
self.ReadEntries()
def ReadEntries(self):
"""Read the subnodes to find out what should go in this image"""
for node in self._node.subnodes:
entry = Entry.Create(self, node)
entry.ReadNode()
if entry.name == 'imagename':
self._imagename = entry
else:
self._mkimage_entries[entry.name] = entry
def ObtainContents(self):
# Use a non-zero size for any fake files to keep mkimage happy
# Note that testMkimageImagename() relies on this 'mkimage' parameter
data, input_fname, uniq = self.collect_contents_to_file(
self._mkimage_entries.values(), 'mkimage', 1024)
if data is None:
return False
if self._imagename:
image_data, imagename_fname, _ = self.collect_contents_to_file(
[self._imagename], 'mkimage-n', 1024)
if image_data is None:
return False
output_fname = tools.get_output_filename('mkimage-out.%s' % uniq)
if self.mkimage.run_cmd('-d', input_fname, *self._args,
output_fname) is not None:
args = ['-d', input_fname]
if self._data_to_imagename:
args += ['-n', input_fname]
elif self._imagename:
args += ['-n', imagename_fname]
args += self._args + [output_fname]
if self.mkimage.run_cmd(*args) is not None:
self.SetContents(tools.read_file(output_fname))
else:
# Bintool is missing; just use the input data as the output
@ -67,12 +148,12 @@ class Entry_mkimage(Entry):
return True
def ReadEntries(self):
"""Read the subnodes to find out what should go in this image"""
for node in self._node.subnodes:
entry = Entry.Create(self, node)
entry.ReadNode()
self._mkimage_entries[entry.name] = entry
def GetEntries(self):
# Make a copy so we don't change the original
entries = OrderedDict(self._mkimage_entries)
if self._imagename:
entries['imagename'] = self._imagename
return entries
def SetAllowMissing(self, allow_missing):
"""Set whether a section allows missing external blobs
@ -83,6 +164,8 @@ class Entry_mkimage(Entry):
self.allow_missing = allow_missing
for entry in self._mkimage_entries.values():
entry.SetAllowMissing(allow_missing)
if self._imagename:
self._imagename.SetAllowMissing(allow_missing)
def SetAllowFakeBlob(self, allow_fake):
"""Set whether the sub nodes allows to create a fake blob
@ -92,6 +175,8 @@ class Entry_mkimage(Entry):
"""
for entry in self._mkimage_entries.values():
entry.SetAllowFakeBlob(allow_fake)
if self._imagename:
self._imagename.SetAllowFakeBlob(allow_fake)
def CheckFakedBlobs(self, faked_blobs_list):
"""Check if any entries in this section have faked external blobs
@ -103,6 +188,9 @@ class Entry_mkimage(Entry):
"""
for entry in self._mkimage_entries.values():
entry.CheckFakedBlobs(faked_blobs_list)
if self._imagename:
self._imagename.CheckFakedBlobs(faked_blobs_list)
def AddBintools(self, btools):
super().AddBintools(btools)
self.mkimage = self.AddBintool(btools, 'mkimage')

View File

@ -37,7 +37,8 @@ class Entry_pre_load(Entry_collection):
"""Pre load image header
Properties / Entry arguments:
- pre-load-key-path: Path of the directory that store key (provided by the environment variable PRE_LOAD_KEY_PATH)
- pre-load-key-path: Path of the directory that store key (provided by
the environment variable PRE_LOAD_KEY_PATH)
- content: List of phandles to entries to sign
- algo-name: Hash and signature algo to use for the signature
- padding-name: Name of the padding (pkcs-1.5 or pss)

View File

@ -13,7 +13,6 @@ import concurrent.futures
import re
import sys
from binman import comp_util
from binman.entry import Entry
from binman import state
from dtoc import fdt_util
@ -506,10 +505,10 @@ class Entry_section(Entry):
node = self._node.GetFdt().LookupPhandle(phandle)
if not node:
source_entry.Raise("Cannot find node for phandle %d" % phandle)
for entry in self._entries.values():
if entry._node == node:
return entry.GetData(required)
source_entry.Raise("Cannot find entry for node '%s'" % node.name)
entry = self.FindEntryByNode(node)
if not entry:
source_entry.Raise("Cannot find entry for node '%s'" % node.name)
return entry.GetData(required)
def LookupSymbol(self, sym_name, optional, msg, base_addr, entries=None):
"""Look up a symbol in an ELF file
@ -777,7 +776,7 @@ class Entry_section(Entry):
data = parent_data[offset:offset + child.size]
if decomp:
indata = data
data = comp_util.decompress(indata, child.compress)
data = child.DecompressData(indata)
if child.uncomp_size:
tout.info("%s: Decompressing data size %#x with algo '%s' to data size %#x" %
(child.GetPath(), len(indata), child.compress,
@ -899,5 +898,6 @@ class Entry_section(Entry):
entry.CheckAltFormats(alt_formats)
def AddBintools(self, btools):
super().AddBintools(btools)
for entry in self._entries.values():
entry.AddBintools(btools)

View File

@ -98,4 +98,5 @@ class Entry_vblock(Entry_collection):
return self.ProcessContentsUpdate(data)
def AddBintools(self, btools):
super().AddBintools(btools)
self.futility = self.AddBintool(btools, 'futility')

View File

@ -23,7 +23,6 @@ import urllib.error
from binman import bintool
from binman import cbfs_util
from binman import cmdline
from binman import comp_util
from binman import control
from binman import elf
from binman import elf_test
@ -107,6 +106,8 @@ BASE_DTB_PROPS = ['offset', 'size', 'image-pos']
# Extra properties expected to be in the device tree when allow-repack is used
REPACK_DTB_PROPS = ['orig-offset', 'orig-size']
# Supported compression bintools
COMP_BINTOOLS = ['bzip2', 'gzip', 'lz4', 'lzma_alone', 'lzop', 'xz', 'zstd']
class TestFunctional(unittest.TestCase):
"""Functional tests for binman
@ -212,7 +213,9 @@ class TestFunctional(unittest.TestCase):
TestFunctional._MakeInputFile('tee.elf',
tools.read_file(cls.ElfTestFile('elf_sections')))
cls.have_lz4 = comp_util.HAVE_LZ4
cls.comp_bintools = {}
for name in COMP_BINTOOLS:
cls.comp_bintools[name] = bintool.Bintool.create(name)
@classmethod
def tearDownClass(cls):
@ -242,9 +245,13 @@ class TestFunctional(unittest.TestCase):
cls.toolpath = toolpath
cls.verbosity = verbosity
def _CheckBintool(self, bintool):
if not bintool.is_present():
self.skipTest('%s not available' % bintool.name)
def _CheckLz4(self):
if not self.have_lz4:
self.skipTest('lz4 --no-frame-crc not available')
bintool = self.comp_bintools['lz4']
self._CheckBintool(bintool)
def _CleanupOutputDir(self):
"""Remove the temporary output directory"""
@ -1693,7 +1700,7 @@ class TestFunctional(unittest.TestCase):
"""Test for an fill entry type with no size"""
with self.assertRaises(ValueError) as e:
self._DoReadFile('070_fill_no_size.dts')
self.assertIn("'fill' entry must have a size property",
self.assertIn("'fill' entry is missing properties: size",
str(e.exception))
def _HandleGbbCommand(self, pipe_list):
@ -1967,7 +1974,8 @@ class TestFunctional(unittest.TestCase):
self._ResetDtbs()
def _decompress(self, data):
return comp_util.decompress(data, 'lz4')
bintool = self.comp_bintools['lz4']
return bintool.decompress(data)
def testCompress(self):
"""Test compression of blobs"""
@ -2855,8 +2863,10 @@ class TestFunctional(unittest.TestCase):
def testExtractCbfsRaw(self):
"""Test extracting CBFS compressed data without decompressing it"""
bintool = self.comp_bintools['lzma_alone']
self._CheckBintool(bintool)
data = self._RunExtractCmd('section/cbfs/u-boot-dtb', decomp=False)
dtb = comp_util.decompress(data, 'lzma', with_header=False)
dtb = bintool.decompress(data)
self.assertEqual(EXTRACT_DTB_SIZE, len(dtb))
def testExtractBadEntry(self):
@ -4412,6 +4422,15 @@ class TestFunctional(unittest.TestCase):
}
self.assertEqual(expected, props)
def testLz4Missing(self):
"""Test that binman still produces an image if lz4 is missing"""
with test_util.capture_sys_output() as (_, stderr):
self._DoTestFile('185_compress_section.dts',
force_missing_bintools='lz4')
err = stderr.getvalue()
self.assertRegex(err,
"Image 'main-section'.*missing bintools.*: lz4")
def testCompressExtra(self):
"""Test compression of a section with no fixed size"""
self._CheckLz4()
@ -4427,15 +4446,18 @@ class TestFunctional(unittest.TestCase):
rest = base[len(U_BOOT_DATA):]
# Check compressed data
section1 = self._decompress(rest)
expect1 = comp_util.compress(COMPRESS_DATA + U_BOOT_DATA, 'lz4')
self.assertEquals(expect1, rest[:len(expect1)])
bintool = self.comp_bintools['lz4']
expect1 = bintool.compress(COMPRESS_DATA + U_BOOT_DATA)
data1 = rest[:len(expect1)]
section1 = self._decompress(data1)
self.assertEquals(expect1, data1)
self.assertEquals(COMPRESS_DATA + U_BOOT_DATA, section1)
rest1 = rest[len(expect1):]
section2 = self._decompress(rest1)
expect2 = comp_util.compress(COMPRESS_DATA + COMPRESS_DATA, 'lz4')
self.assertEquals(expect2, rest1[:len(expect2)])
expect2 = bintool.compress(COMPRESS_DATA + COMPRESS_DATA)
data2 = rest1[:len(expect2)]
section2 = self._decompress(data2)
self.assertEquals(expect2, data2)
self.assertEquals(COMPRESS_DATA + COMPRESS_DATA, section2)
rest2 = rest1[len(expect2):]
@ -4807,6 +4829,8 @@ class TestFunctional(unittest.TestCase):
def testUpdateFdtInElf(self):
"""Test that we can update the devicetree in an ELF file"""
if not elf.ELF_TOOLS:
self.skipTest('Python elftools not available')
infile = elf_fname = self.ElfTestFile('u_boot_binman_embed')
outfile = os.path.join(self._indir, 'u-boot.out')
begin_sym = 'dtb_embed_begin'
@ -4858,6 +4882,8 @@ class TestFunctional(unittest.TestCase):
def testUpdateFdtInElfNoSyms(self):
"""Test that missing symbols are detected with --update-fdt-in-elf"""
if not elf.ELF_TOOLS:
self.skipTest('Python elftools not available')
infile = elf_fname = self.ElfTestFile('u_boot_binman_embed')
outfile = ''
begin_sym = 'wrong_begin'
@ -4871,6 +4897,8 @@ class TestFunctional(unittest.TestCase):
def testUpdateFdtInElfTooSmall(self):
"""Test that an over-large dtb is detected with --update-fdt-in-elf"""
if not elf.ELF_TOOLS:
self.skipTest('Python elftools not available')
infile = elf_fname = self.ElfTestFile('u_boot_binman_embed_sm')
outfile = os.path.join(self._indir, 'u-boot.out')
begin_sym = 'dtb_embed_begin'
@ -5209,15 +5237,6 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
self._DoBinman(*args)
self.assertIn('failed to fetch with all methods', stdout.getvalue())
def testInvalidCompress(self):
with self.assertRaises(ValueError) as e:
comp_util.compress(b'', 'invalid')
self.assertIn("Unknown algorithm 'invalid'", str(e.exception))
with self.assertRaises(ValueError) as e:
comp_util.decompress(b'1234', 'invalid')
self.assertIn("Unknown algorithm 'invalid'", str(e.exception))
def testBintoolDocs(self):
"""Test for creation of bintool documentation"""
with test_util.capture_sys_output() as (stdout, stderr):
@ -5344,6 +5363,8 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
def testFitSplitElf(self):
"""Test an image with an FIT with an split-elf operation"""
if not elf.ELF_TOOLS:
self.skipTest('Python elftools not available')
entry_args = {
'of-list': 'test-fdt1 test-fdt2',
'default-dt': 'test-fdt2',
@ -5421,6 +5442,8 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
def testFitSplitElfBadElf(self):
"""Test a FIT split-elf operation with an invalid ELF file"""
if not elf.ELF_TOOLS:
self.skipTest('Python elftools not available')
TestFunctional._MakeInputFile('bad.elf', tools.get_bytes(100, 100))
entry_args = {
'of-list': 'test-fdt1 test-fdt2',
@ -5440,6 +5463,8 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
def testFitSplitElfBadDirective(self):
"""Test a FIT split-elf invalid fit,xxx directive in an image node"""
if not elf.ELF_TOOLS:
self.skipTest('Python elftools not available')
err = self._check_bad_fit('227_fit_bad_dir.dts')
self.assertIn(
"Node '/binman/fit': subnode 'images/@atf-SEQ': Unknown directive 'fit,something'",
@ -5447,13 +5472,24 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
def testFitSplitElfBadDirectiveConfig(self):
"""Test a FIT split-elf with invalid fit,xxx directive in config"""
if not elf.ELF_TOOLS:
self.skipTest('Python elftools not available')
err = self._check_bad_fit('228_fit_bad_dir_config.dts')
self.assertEqual(
"Node '/binman/fit': subnode 'configurations/@config-SEQ': Unknown directive 'fit,config'",
err)
def checkFitSplitElf(self, **kwargs):
"""Test an split-elf FIT with a missing ELF file"""
"""Test an split-elf FIT with a missing ELF file
Args:
kwargs (dict of str): Arguments to pass to _DoTestFile()
Returns:
tuple:
str: stdout result
str: stderr result
"""
entry_args = {
'of-list': 'test-fdt1 test-fdt2',
'default-dt': 'test-fdt2',
@ -5464,23 +5500,36 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
with test_util.capture_sys_output() as (stdout, stderr):
self._DoTestFile(
'226_fit_split_elf.dts', entry_args=entry_args,
extra_indirs=[test_subdir], **kwargs)
err = stderr.getvalue()
return err
extra_indirs=[test_subdir], verbosity=3, **kwargs)
out = stdout.getvalue()
err = stderr.getvalue()
return out, err
def testFitSplitElfMissing(self):
"""Test an split-elf FIT with a missing ELF file"""
err = self.checkFitSplitElf(allow_missing=True)
if not elf.ELF_TOOLS:
self.skipTest('Python elftools not available')
out, err = self.checkFitSplitElf(allow_missing=True)
self.assertRegex(
err,
"Image '.*' is missing external blobs and is non-functional: .*")
self.assertNotRegex(out, '.*Faked blob.*')
fname = tools.get_output_filename('binman-fake/missing.elf')
self.assertFalse(os.path.exists(fname))
def testFitSplitElfFaked(self):
"""Test an split-elf FIT with faked ELF file"""
err = self.checkFitSplitElf(allow_missing=True, allow_fake_blobs=True)
if not elf.ELF_TOOLS:
self.skipTest('Python elftools not available')
out, err = self.checkFitSplitElf(allow_missing=True, allow_fake_blobs=True)
self.assertRegex(
err,
"Image '.*' is missing external blobs and is non-functional: .*")
self.assertRegex(
out,
"Entry '/binman/fit/images/@tee-SEQ/tee-os': Faked blob '.*binman-fake/missing.elf")
fname = tools.get_output_filename('binman-fake/missing.elf')
self.assertTrue(os.path.exists(fname))
def testPreLoad(self):
"""Test an image with a pre-load header"""
@ -5694,14 +5743,159 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
self.assertIsNotNone(path)
self.assertEqual(expected_fdtmap, fdtmap)
@unittest.expectedFailure
def testReplaceSectionSimple(self):
"""Test replacing a simple section with arbitrary data"""
new_data = b'w' * len(COMPRESS_DATA + U_BOOT_DATA)
data, expected_fdtmap, _ = self._RunReplaceCmd(
'section', new_data,
dts='234_replace_section_simple.dts')
self.assertEqual(new_data, data)
with self.assertRaises(ValueError) as exc:
self._RunReplaceCmd('section', new_data,
dts='234_replace_section_simple.dts')
self.assertIn(
"Node '/section': Replacing sections is not implemented yet",
str(exc.exception))
def testMkimageImagename(self):
"""Test using mkimage with -n holding the data too"""
data = self._DoReadFile('235_mkimage_name.dts')
# Check that the data appears in the file somewhere
self.assertIn(U_BOOT_SPL_DATA, data)
# Get struct image_header -> ih_name
name = data[0x20:0x40]
# Build the filename that we expect to be placed in there, by virtue of
# the -n paraameter
expect = os.path.join(tools.get_output_dir(), 'mkimage.mkimage')
# Check that the image name is set to the temporary filename used
self.assertEqual(expect.encode('utf-8')[:0x20], name)
def testMkimageImage(self):
"""Test using mkimage with -n holding the data too"""
data = self._DoReadFile('236_mkimage_image.dts')
# Check that the data appears in the file somewhere
self.assertIn(U_BOOT_SPL_DATA, data)
# Get struct image_header -> ih_name
name = data[0x20:0x40]
# Build the filename that we expect to be placed in there, by virtue of
# the -n paraameter
expect = os.path.join(tools.get_output_dir(), 'mkimage-n.mkimage')
# Check that the image name is set to the temporary filename used
self.assertEqual(expect.encode('utf-8')[:0x20], name)
# Check the corect data is in the imagename file
self.assertEqual(U_BOOT_DATA, tools.read_file(expect))
def testMkimageImageNoContent(self):
"""Test using mkimage with -n and no data"""
with self.assertRaises(ValueError) as exc:
self._DoReadFile('237_mkimage_image_no_content.dts')
self.assertIn('Could not complete processing of contents',
str(exc.exception))
def testMkimageImageBad(self):
"""Test using mkimage with imagename node and data-to-imagename"""
with self.assertRaises(ValueError) as exc:
self._DoReadFile('238_mkimage_image_bad.dts')
self.assertIn('Cannot use both imagename node and data-to-imagename',
str(exc.exception))
def testCollectionOther(self):
"""Test a collection where the data comes from another section"""
data = self._DoReadFile('239_collection_other.dts')
self.assertEqual(U_BOOT_NODTB_DATA + U_BOOT_DTB_DATA +
tools.get_bytes(0xff, 2) + U_BOOT_NODTB_DATA +
tools.get_bytes(0xfe, 3) + U_BOOT_DTB_DATA,
data)
def testMkimageCollection(self):
"""Test using a collection referring to an entry in a mkimage entry"""
data = self._DoReadFile('240_mkimage_coll.dts')
expect = U_BOOT_SPL_DATA + U_BOOT_DATA
self.assertEqual(expect, data[:len(expect)])
def testCompressDtbPrependInvalid(self):
"""Test that invalid header is detected"""
with self.assertRaises(ValueError) as e:
self._DoReadFileDtb('235_compress_dtb_prepend_invalid.dts')
self.assertIn("Node '/binman/u-boot-dtb': Invalid prepend in "
"'u-boot-dtb': 'invalid'", str(e.exception))
def testCompressDtbPrependLength(self):
"""Test that compress with length header works as expected"""
data = self._DoReadFileRealDtb('236_compress_dtb_prepend_length.dts')
image = control.images['image']
entries = image.GetEntries()
self.assertIn('u-boot-dtb', entries)
u_boot_dtb = entries['u-boot-dtb']
self.assertIn('fdtmap', entries)
fdtmap = entries['fdtmap']
image_fname = tools.get_output_filename('image.bin')
orig = control.ReadEntry(image_fname, 'u-boot-dtb')
dtb = fdt.Fdt.FromData(orig)
dtb.Scan()
props = self._GetPropTree(dtb, ['size', 'uncomp-size'])
expected = {
'u-boot:size': len(U_BOOT_DATA),
'u-boot-dtb:uncomp-size': len(orig),
'u-boot-dtb:size': u_boot_dtb.size,
'fdtmap:size': fdtmap.size,
'size': len(data),
}
self.assertEqual(expected, props)
# Check implementation
self.assertEqual(U_BOOT_DATA, data[:len(U_BOOT_DATA)])
rest = data[len(U_BOOT_DATA):]
comp_data_len = struct.unpack('<I', rest[:4])[0]
comp_data = rest[4:4 + comp_data_len]
orig2 = self._decompress(comp_data)
self.assertEqual(orig, orig2)
def testInvalidCompress(self):
"""Test that invalid compress algorithm is detected"""
with self.assertRaises(ValueError) as e:
self._DoTestFile('237_compress_dtb_invalid.dts')
self.assertIn("Unknown algorithm 'invalid'", str(e.exception))
def testCompUtilCompressions(self):
"""Test compression algorithms"""
for bintool in self.comp_bintools.values():
self._CheckBintool(bintool)
data = bintool.compress(COMPRESS_DATA)
self.assertNotEqual(COMPRESS_DATA, data)
orig = bintool.decompress(data)
self.assertEquals(COMPRESS_DATA, orig)
def testCompUtilVersions(self):
"""Test tool version of compression algorithms"""
for bintool in self.comp_bintools.values():
self._CheckBintool(bintool)
version = bintool.version()
self.assertRegex(version, '^v?[0-9]+[0-9.]*')
def testCompUtilPadding(self):
"""Test padding of compression algorithms"""
# Skip zstd because it doesn't support padding
for bintool in [v for k,v in self.comp_bintools.items() if k != 'zstd']:
self._CheckBintool(bintool)
data = bintool.compress(COMPRESS_DATA)
self.assertNotEqual(COMPRESS_DATA, data)
data += tools.get_bytes(0, 64)
orig = bintool.decompress(data)
self.assertEquals(COMPRESS_DATA, orig)
def testCompressDtbZstd(self):
"""Test that zstd compress of device-tree files failed"""
with self.assertRaises(ValueError) as e:
self._DoTestFile('238_compress_dtb_zstd.dts')
self.assertIn("Node '/binman/u-boot-dtb': The zstd compression "
"requires a length header", str(e.exception))
if __name__ == "__main__":

View File

@ -0,0 +1,17 @@
// SPDX-License-Identifier: GPL-2.0+
/dts-v1/;
/ {
#address-cells = <1>;
#size-cells = <1>;
binman {
u-boot {
};
u-boot-dtb {
compress = "lz4";
prepend = "invalid";
};
};
};

View File

@ -0,0 +1,18 @@
// SPDX-License-Identifier: GPL-2.0+
/dts-v1/;
/ {
#address-cells = <1>;
#size-cells = <1>;
binman {
mkimage {
args = "-T script";
data-to-imagename;
u-boot-spl {
};
};
};
};

View File

@ -0,0 +1,19 @@
// SPDX-License-Identifier: GPL-2.0+
/dts-v1/;
/ {
#address-cells = <1>;
#size-cells = <1>;
binman {
u-boot {
};
u-boot-dtb {
compress = "lz4";
prepend = "length";
};
fdtmap {
};
};
};

View File

@ -0,0 +1,21 @@
// SPDX-License-Identifier: GPL-2.0+
/dts-v1/;
/ {
#address-cells = <1>;
#size-cells = <1>;
binman {
mkimage {
args = "-T script";
imagename {
type = "u-boot";
};
u-boot-spl {
};
};
};
};

View File

@ -0,0 +1,16 @@
// SPDX-License-Identifier: GPL-2.0+
/dts-v1/;
/ {
#address-cells = <1>;
#size-cells = <1>;
binman {
u-boot {
};
u-boot-dtb {
compress = "invalid";
};
};
};

View File

@ -0,0 +1,22 @@
// SPDX-License-Identifier: GPL-2.0+
/dts-v1/;
/ {
#address-cells = <1>;
#size-cells = <1>;
binman {
mkimage {
args = "-T script";
imagename {
type = "_testing";
return-unknown-contents;
};
u-boot-spl {
};
};
};
};

View File

@ -0,0 +1,16 @@
// SPDX-License-Identifier: GPL-2.0+
/dts-v1/;
/ {
#address-cells = <1>;
#size-cells = <1>;
binman {
u-boot {
};
u-boot-dtb {
compress = "zstd";
};
};
};

View File

@ -0,0 +1,22 @@
// SPDX-License-Identifier: GPL-2.0+
/dts-v1/;
/ {
#address-cells = <1>;
#size-cells = <1>;
binman {
mkimage {
args = "-T script";
data-to-imagename;
imagename {
type = "u-boot";
};
u-boot-spl {
};
};
};
};

View File

@ -0,0 +1,29 @@
// SPDX-License-Identifier: GPL-2.0+
/dts-v1/;
/ {
#address-cells = <1>;
#size-cells = <1>;
binman {
collection {
content = <&u_boot_nodtb &dtb>;
};
section {
fill {
size = <2>;
fill-byte = [ff];
};
u_boot_nodtb: u-boot-nodtb {
};
fill2 {
type = "fill";
size = <3>;
fill-byte = [fe];
};
};
dtb: u-boot-dtb {
};
};
};

View File

@ -0,0 +1,27 @@
// SPDX-License-Identifier: GPL-2.0+
/dts-v1/;
/ {
#address-cells = <1>;
#size-cells = <1>;
binman {
collection {
content = <&spl &u_boot>;
};
mkimage {
args = "-T script";
spl: u-boot-spl {
};
imagename {
type = "section";
u_boot: u-boot {
};
};
};
};
};

View File

@ -81,8 +81,7 @@ def run_test_coverage(prog, filter_fname, exclude_list, build_dir, required=None
print(coverage)
if coverage != '100%':
print(stdout)
print("Type 'python3-coverage html' to get a report in "
'htmlcov/index.html')
print("To get a report in 'htmlcov/index.html', type: python3-coverage html")
print('Coverage error: %s, but should be 100%%' % coverage)
ok = False
if not ok:
@ -209,14 +208,14 @@ def run_test_suites(toolname, debug, verbosity, test_preserve_dirs, processes,
runner = unittest.TextTestRunner(
stream=sys.stdout,
verbosity=(1 if verbosity is None else verbosity),
buffer=buffer_outputs,
buffer=False if test_name else buffer_outputs,
resultclass=FullTextTestResult,
)
if use_concurrent and processes != 1:
suite = ConcurrentTestSuite(suite,
fork_for_tests(processes or multiprocessing.cpu_count(),
buffer=buffer_outputs))
buffer=False if test_name else buffer_outputs))
for module in class_and_module_list:
if isinstance(module, str) and (not test_name or test_name == module):