patman: Convert camel case in tools.py
Convert this file to snake case and update all files which use it. Signed-off-by: Simon Glass <sjg@chromium.org>
This commit is contained in:
parent
82ee8bfe51
commit
c1aa66e75d
@ -327,9 +327,9 @@ class Bintool:
|
||||
"""
|
||||
tmpdir = tempfile.mkdtemp(prefix='binmanf.')
|
||||
print(f"- clone git repo '{git_repo}' to '{tmpdir}'")
|
||||
tools.Run('git', 'clone', '--depth', '1', git_repo, tmpdir)
|
||||
tools.run('git', 'clone', '--depth', '1', git_repo, tmpdir)
|
||||
print(f"- build target '{make_target}'")
|
||||
tools.Run('make', '-C', tmpdir, '-j', f'{multiprocessing.cpu_count()}',
|
||||
tools.run('make', '-C', tmpdir, '-j', f'{multiprocessing.cpu_count()}',
|
||||
make_target)
|
||||
fname = os.path.join(tmpdir, bintool_path)
|
||||
if not os.path.exists(fname):
|
||||
@ -349,8 +349,8 @@ class Bintool:
|
||||
str: Filename of fetched file to copy to a suitable directory
|
||||
str: Name of temp directory to remove, or None
|
||||
"""
|
||||
fname, tmpdir = tools.Download(url)
|
||||
tools.Run('chmod', 'a+x', fname)
|
||||
fname, tmpdir = tools.download(url)
|
||||
tools.run('chmod', 'a+x', fname)
|
||||
return fname, tmpdir
|
||||
|
||||
@classmethod
|
||||
@ -384,7 +384,7 @@ class Bintool:
|
||||
"""
|
||||
args = ['sudo', 'apt', 'install', '-y', package]
|
||||
print('- %s' % ' '.join(args))
|
||||
tools.Run(*args)
|
||||
tools.run(*args)
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
|
@ -80,7 +80,7 @@ class TestBintool(unittest.TestCase):
|
||||
|
||||
Args:
|
||||
fake_download (function): Function to call instead of
|
||||
tools.Download()
|
||||
tools.download()
|
||||
method (bintool.FETCH_...: Fetch method to use
|
||||
|
||||
Returns:
|
||||
@ -88,7 +88,7 @@ class TestBintool(unittest.TestCase):
|
||||
"""
|
||||
btest = Bintool.create('_testing')
|
||||
col = terminal.Color()
|
||||
with unittest.mock.patch.object(tools, 'Download',
|
||||
with unittest.mock.patch.object(tools, 'download',
|
||||
side_effect=fake_download):
|
||||
with test_util.capture_sys_output() as (stdout, _):
|
||||
btest.fetch_tool(method, col, False)
|
||||
@ -97,7 +97,7 @@ class TestBintool(unittest.TestCase):
|
||||
def test_fetch_url_err(self):
|
||||
"""Test an error while fetching a tool from a URL"""
|
||||
def fail_download(url):
|
||||
"""Take the tools.Download() function by raising an exception"""
|
||||
"""Take the tools.download() function by raising an exception"""
|
||||
raise urllib.error.URLError('my error')
|
||||
|
||||
stdout = self.check_fetch_url(fail_download, bintool.FETCH_ANY)
|
||||
@ -114,7 +114,7 @@ class TestBintool(unittest.TestCase):
|
||||
def test_fetch_method(self):
|
||||
"""Test fetching using a particular method"""
|
||||
def fail_download(url):
|
||||
"""Take the tools.Download() function by raising an exception"""
|
||||
"""Take the tools.download() function by raising an exception"""
|
||||
raise urllib.error.URLError('my error')
|
||||
|
||||
stdout = self.check_fetch_url(fail_download, bintool.FETCH_BIN)
|
||||
@ -123,11 +123,11 @@ class TestBintool(unittest.TestCase):
|
||||
def test_fetch_pass_fail(self):
|
||||
"""Test fetching multiple tools with some passing and some failing"""
|
||||
def handle_download(_):
|
||||
"""Take the tools.Download() function by writing a file"""
|
||||
"""Take the tools.download() function by writing a file"""
|
||||
if self.seq:
|
||||
raise urllib.error.URLError('not found')
|
||||
self.seq += 1
|
||||
tools.WriteFile(fname, expected)
|
||||
tools.write_file(fname, expected)
|
||||
return fname, dirname
|
||||
|
||||
expected = b'this is a test'
|
||||
@ -140,12 +140,12 @@ class TestBintool(unittest.TestCase):
|
||||
self.seq = 0
|
||||
|
||||
with unittest.mock.patch.object(bintool, 'DOWNLOAD_DESTDIR', destdir):
|
||||
with unittest.mock.patch.object(tools, 'Download',
|
||||
with unittest.mock.patch.object(tools, 'download',
|
||||
side_effect=handle_download):
|
||||
with test_util.capture_sys_output() as (stdout, _):
|
||||
Bintool.fetch_tools(bintool.FETCH_ANY, ['_testing'] * 2)
|
||||
self.assertTrue(os.path.exists(dest_fname))
|
||||
data = tools.ReadFile(dest_fname)
|
||||
data = tools.read_file(dest_fname)
|
||||
self.assertEqual(expected, data)
|
||||
|
||||
lines = stdout.getvalue().splitlines()
|
||||
@ -245,14 +245,14 @@ class TestBintool(unittest.TestCase):
|
||||
tmpdir = cmd[2]
|
||||
self.fname = os.path.join(tmpdir, 'pathname')
|
||||
if write_file:
|
||||
tools.WriteFile(self.fname, b'hello')
|
||||
tools.write_file(self.fname, b'hello')
|
||||
|
||||
btest = Bintool.create('_testing')
|
||||
col = terminal.Color()
|
||||
self.fname = None
|
||||
with unittest.mock.patch.object(bintool, 'DOWNLOAD_DESTDIR',
|
||||
self._indir):
|
||||
with unittest.mock.patch.object(tools, 'Run', side_effect=fake_run):
|
||||
with unittest.mock.patch.object(tools, 'run', side_effect=fake_run):
|
||||
with test_util.capture_sys_output() as (stdout, _):
|
||||
btest.fetch_tool(bintool.FETCH_BUILD, col, False)
|
||||
fname = os.path.join(self._indir, '_testing')
|
||||
@ -275,7 +275,7 @@ class TestBintool(unittest.TestCase):
|
||||
btest = Bintool.create('_testing')
|
||||
btest.install = True
|
||||
col = terminal.Color()
|
||||
with unittest.mock.patch.object(tools, 'Run', return_value=None):
|
||||
with unittest.mock.patch.object(tools, 'run', return_value=None):
|
||||
with test_util.capture_sys_output() as _:
|
||||
result = btest.fetch_tool(bintool.FETCH_BIN, col, False)
|
||||
self.assertEqual(bintool.FETCHED, result)
|
||||
@ -292,8 +292,8 @@ class TestBintool(unittest.TestCase):
|
||||
def test_all_bintools(self):
|
||||
"""Test that all bintools can handle all available fetch types"""
|
||||
def handle_download(_):
|
||||
"""Take the tools.Download() function by writing a file"""
|
||||
tools.WriteFile(fname, expected)
|
||||
"""Take the tools.download() function by writing a file"""
|
||||
tools.write_file(fname, expected)
|
||||
return fname, dirname
|
||||
|
||||
def fake_run(*cmd):
|
||||
@ -301,15 +301,15 @@ class TestBintool(unittest.TestCase):
|
||||
# See Bintool.build_from_git()
|
||||
tmpdir = cmd[2]
|
||||
self.fname = os.path.join(tmpdir, 'pathname')
|
||||
tools.WriteFile(self.fname, b'hello')
|
||||
tools.write_file(self.fname, b'hello')
|
||||
|
||||
expected = b'this is a test'
|
||||
dirname = os.path.join(self._indir, 'download_dir')
|
||||
os.mkdir(dirname)
|
||||
fname = os.path.join(dirname, 'downloaded')
|
||||
|
||||
with unittest.mock.patch.object(tools, 'Run', side_effect=fake_run):
|
||||
with unittest.mock.patch.object(tools, 'Download',
|
||||
with unittest.mock.patch.object(tools, 'run', side_effect=fake_run):
|
||||
with unittest.mock.patch.object(tools, 'download',
|
||||
side_effect=handle_download):
|
||||
with test_util.capture_sys_output() as _:
|
||||
for name in Bintool.get_tool_list():
|
||||
@ -320,7 +320,7 @@ class TestBintool(unittest.TestCase):
|
||||
if result is not True and result is not None:
|
||||
result_fname, _ = result
|
||||
self.assertTrue(os.path.exists(result_fname))
|
||||
data = tools.ReadFile(result_fname)
|
||||
data = tools.read_file(result_fname)
|
||||
self.assertEqual(expected, data)
|
||||
os.remove(result_fname)
|
||||
|
||||
|
@ -88,8 +88,8 @@ class Bintoollz4(bintool.Bintool):
|
||||
bytes: Compressed data
|
||||
"""
|
||||
with tempfile.NamedTemporaryFile(prefix='comp.tmp',
|
||||
dir=tools.GetOutputDir()) as tmp:
|
||||
tools.WriteFile(tmp.name, indata)
|
||||
dir=tools.get_output_dir()) as tmp:
|
||||
tools.write_file(tmp.name, indata)
|
||||
args = ['--no-frame-crc', '-B4', '-5', '-c', tmp.name]
|
||||
return self.run_cmd(*args, binary=True)
|
||||
|
||||
@ -103,8 +103,8 @@ class Bintoollz4(bintool.Bintool):
|
||||
bytes: Decompressed data
|
||||
"""
|
||||
with tempfile.NamedTemporaryFile(prefix='decomp.tmp',
|
||||
dir=tools.GetOutputDir()) as inf:
|
||||
tools.WriteFile(inf.name, indata)
|
||||
dir=tools.get_output_dir()) as inf:
|
||||
tools.write_file(inf.name, indata)
|
||||
args = ['-cd', inf.name]
|
||||
return self.run_cmd(*args, binary=True)
|
||||
|
||||
|
@ -65,13 +65,13 @@ class Bintoollzma_alone(bintool.Bintool):
|
||||
bytes: Compressed data
|
||||
"""
|
||||
with tempfile.NamedTemporaryFile(prefix='comp.tmp',
|
||||
dir=tools.GetOutputDir()) as inf:
|
||||
tools.WriteFile(inf.name, indata)
|
||||
dir=tools.get_output_dir()) as inf:
|
||||
tools.write_file(inf.name, indata)
|
||||
with tempfile.NamedTemporaryFile(prefix='compo.otmp',
|
||||
dir=tools.GetOutputDir()) as outf:
|
||||
dir=tools.get_output_dir()) as outf:
|
||||
args = ['e', inf.name, outf.name, '-lc1', '-lp0', '-pb0', '-d8']
|
||||
self.run_cmd(*args, binary=True)
|
||||
return tools.ReadFile(outf.name)
|
||||
return tools.read_file(outf.name)
|
||||
|
||||
def decompress(self, indata):
|
||||
"""Decompress data with lzma_alone
|
||||
@ -83,13 +83,13 @@ class Bintoollzma_alone(bintool.Bintool):
|
||||
bytes: Decompressed data
|
||||
"""
|
||||
with tempfile.NamedTemporaryFile(prefix='decomp.tmp',
|
||||
dir=tools.GetOutputDir()) as inf:
|
||||
tools.WriteFile(inf.name, indata)
|
||||
dir=tools.get_output_dir()) as inf:
|
||||
tools.write_file(inf.name, indata)
|
||||
with tempfile.NamedTemporaryFile(prefix='compo.otmp',
|
||||
dir=tools.GetOutputDir()) as outf:
|
||||
dir=tools.get_output_dir()) as outf:
|
||||
args = ['d', inf.name, outf.name]
|
||||
self.run_cmd(*args, binary=True)
|
||||
return tools.ReadFile(outf.name, binary=True)
|
||||
return tools.read_file(outf.name, binary=True)
|
||||
|
||||
def fetch(self, method):
|
||||
"""Fetch handler for lzma_alone
|
||||
|
@ -189,9 +189,9 @@ def _pack_string(instr):
|
||||
Returns:
|
||||
String with required padding (at least one 0x00 byte) at the end
|
||||
"""
|
||||
val = tools.ToBytes(instr)
|
||||
val = tools.to_bytes(instr)
|
||||
pad_len = align_int(len(val) + 1, FILENAME_ALIGN)
|
||||
return val + tools.GetBytes(0, pad_len - len(val))
|
||||
return val + tools.get_bytes(0, pad_len - len(val))
|
||||
|
||||
|
||||
class CbfsFile(object):
|
||||
@ -371,7 +371,7 @@ class CbfsFile(object):
|
||||
FILE_ATTR_TAG_COMPRESSION, ATTR_COMPRESSION_LEN,
|
||||
self.compress, self.memlen)
|
||||
elif self.ftype == TYPE_EMPTY:
|
||||
data = tools.GetBytes(self.erase_byte, self.size)
|
||||
data = tools.get_bytes(self.erase_byte, self.size)
|
||||
else:
|
||||
raise ValueError('Unknown type %#x when writing\n' % self.ftype)
|
||||
if attr:
|
||||
@ -388,7 +388,7 @@ class CbfsFile(object):
|
||||
# possible.
|
||||
raise ValueError("Internal error: CBFS file '%s': Requested offset %#x but current output position is %#x" %
|
||||
(self.name, self.cbfs_offset, offset))
|
||||
pad = tools.GetBytes(pad_byte, pad_len)
|
||||
pad = tools.get_bytes(pad_byte, pad_len)
|
||||
hdr_len += pad_len
|
||||
|
||||
# This is the offset of the start of the file's data,
|
||||
@ -414,7 +414,7 @@ class CbfsWriter(object):
|
||||
Usage is something like:
|
||||
|
||||
cbw = CbfsWriter(size)
|
||||
cbw.add_file_raw('u-boot', tools.ReadFile('u-boot.bin'))
|
||||
cbw.add_file_raw('u-boot', tools.read_file('u-boot.bin'))
|
||||
...
|
||||
data, cbfs_offset = cbw.get_data_and_offset()
|
||||
|
||||
@ -482,7 +482,7 @@ class CbfsWriter(object):
|
||||
if fd.tell() > offset:
|
||||
raise ValueError('No space for data before offset %#x (current offset %#x)' %
|
||||
(offset, fd.tell()))
|
||||
fd.write(tools.GetBytes(self._erase_byte, offset - fd.tell()))
|
||||
fd.write(tools.get_bytes(self._erase_byte, offset - fd.tell()))
|
||||
|
||||
def _pad_to(self, fd, offset):
|
||||
"""Write out pad bytes and/or an empty file until a given offset
|
||||
|
@ -36,7 +36,7 @@ class TestCbfs(unittest.TestCase):
|
||||
def setUpClass(cls):
|
||||
# Create a temporary directory for test files
|
||||
cls._indir = tempfile.mkdtemp(prefix='cbfs_util.')
|
||||
tools.SetInputDirs([cls._indir])
|
||||
tools.set_input_dirs([cls._indir])
|
||||
|
||||
# Set up some useful data files
|
||||
TestCbfs._make_input_file('u-boot.bin', U_BOOT_DATA)
|
||||
@ -45,7 +45,7 @@ class TestCbfs(unittest.TestCase):
|
||||
|
||||
# Set up a temporary output directory, used by the tools library when
|
||||
# compressing files
|
||||
tools.PrepareOutputDir(None)
|
||||
tools.prepare_output_dir(None)
|
||||
|
||||
cls.cbfstool = bintool.Bintool.create('cbfstool')
|
||||
cls.have_cbfstool = cls.cbfstool.is_present()
|
||||
@ -58,7 +58,7 @@ class TestCbfs(unittest.TestCase):
|
||||
if cls._indir:
|
||||
shutil.rmtree(cls._indir)
|
||||
cls._indir = None
|
||||
tools.FinaliseOutputDir()
|
||||
tools.finalise_output_dir()
|
||||
|
||||
@classmethod
|
||||
def _make_input_file(cls, fname, contents):
|
||||
@ -71,7 +71,7 @@ class TestCbfs(unittest.TestCase):
|
||||
Full pathname of file created
|
||||
"""
|
||||
pathname = os.path.join(cls._indir, fname)
|
||||
tools.WriteFile(pathname, contents)
|
||||
tools.write_file(pathname, contents)
|
||||
return pathname
|
||||
|
||||
def _check_hdr(self, data, size, offset=0, arch=cbfs_util.ARCHITECTURE_X86):
|
||||
@ -176,12 +176,12 @@ class TestCbfs(unittest.TestCase):
|
||||
base = [(1 << 32) - size + b for b in base]
|
||||
self.cbfstool.add_raw(
|
||||
cbfs_fname, 'u-boot',
|
||||
tools.GetInputFilename(compress and 'compress' or 'u-boot.bin'),
|
||||
tools.get_input_filename(compress and 'compress' or 'u-boot.bin'),
|
||||
compress[0] if compress else None,
|
||||
base[0] if base else None)
|
||||
self.cbfstool.add_raw(
|
||||
cbfs_fname, 'u-boot-dtb',
|
||||
tools.GetInputFilename(compress and 'compress' or 'u-boot.dtb'),
|
||||
tools.get_input_filename(compress and 'compress' or 'u-boot.dtb'),
|
||||
compress[1] if compress else None,
|
||||
base[1] if base else None)
|
||||
return cbfs_fname
|
||||
@ -198,10 +198,10 @@ class TestCbfs(unittest.TestCase):
|
||||
"""
|
||||
if not self.have_cbfstool or not self.have_lz4:
|
||||
return
|
||||
expect = tools.ReadFile(cbfstool_fname)
|
||||
expect = tools.read_file(cbfstool_fname)
|
||||
if expect != data:
|
||||
tools.WriteFile('/tmp/expect', expect)
|
||||
tools.WriteFile('/tmp/actual', data)
|
||||
tools.write_file('/tmp/expect', expect)
|
||||
tools.write_file('/tmp/actual', data)
|
||||
print('diff -y <(xxd -g1 /tmp/expect) <(xxd -g1 /tmp/actual) | colordiff')
|
||||
self.fail('cbfstool produced a different result')
|
||||
|
||||
@ -482,7 +482,7 @@ class TestCbfs(unittest.TestCase):
|
||||
|
||||
size = 0xb0
|
||||
cbw = CbfsWriter(size)
|
||||
cbw.add_file_stage('u-boot', tools.ReadFile(elf_fname))
|
||||
cbw.add_file_stage('u-boot', tools.read_file(elf_fname))
|
||||
|
||||
data = cbw.get_data()
|
||||
cbfs = self._check_hdr(data, size)
|
||||
|
@ -258,7 +258,7 @@ def ExtractEntries(image_fname, output_fname, outdir, entry_paths,
|
||||
raise ValueError('Must specify exactly one entry path to write with -f')
|
||||
entry = image.FindEntryPath(entry_paths[0])
|
||||
data = entry.ReadData(decomp, alt_format)
|
||||
tools.WriteFile(output_fname, data)
|
||||
tools.write_file(output_fname, data)
|
||||
tout.Notice("Wrote %#x bytes to file '%s'" % (len(data), output_fname))
|
||||
return
|
||||
|
||||
@ -281,7 +281,7 @@ def ExtractEntries(image_fname, output_fname, outdir, entry_paths,
|
||||
fname = os.path.join(fname, 'root')
|
||||
tout.Notice("Write entry '%s' size %x to '%s'" %
|
||||
(entry.GetPath(), len(data), fname))
|
||||
tools.WriteFile(fname, data)
|
||||
tools.write_file(fname, data)
|
||||
return einfos
|
||||
|
||||
|
||||
@ -398,7 +398,7 @@ def ReplaceEntries(image_fname, input_fname, indir, entry_paths,
|
||||
if len(entry_paths) != 1:
|
||||
raise ValueError('Must specify exactly one entry path to write with -f')
|
||||
entry = image.FindEntryPath(entry_paths[0])
|
||||
data = tools.ReadFile(input_fname)
|
||||
data = tools.read_file(input_fname)
|
||||
tout.Notice("Read %#x bytes from file '%s'" % (len(data), input_fname))
|
||||
WriteEntryToImage(image, entry, data, do_compress=do_compress,
|
||||
allow_resize=allow_resize, write_map=write_map)
|
||||
@ -425,7 +425,7 @@ def ReplaceEntries(image_fname, input_fname, indir, entry_paths,
|
||||
if os.path.exists(fname):
|
||||
tout.Notice("Write entry '%s' from file '%s'" %
|
||||
(entry.GetPath(), fname))
|
||||
data = tools.ReadFile(fname)
|
||||
data = tools.read_file(fname)
|
||||
ReplaceOneEntry(image, entry, data, do_compress, allow_resize)
|
||||
else:
|
||||
tout.Warning("Skipping entry '%s' from missing file '%s'" %
|
||||
@ -468,8 +468,8 @@ def PrepareImagesAndDtbs(dtb_fname, select_images, update_fdt, use_expanded):
|
||||
# output into a file in our output directly. Then scan it for use
|
||||
# in binman.
|
||||
dtb_fname = fdt_util.EnsureCompiled(dtb_fname)
|
||||
fname = tools.GetOutputFilename('u-boot.dtb.out')
|
||||
tools.WriteFile(fname, tools.ReadFile(dtb_fname))
|
||||
fname = tools.get_output_filename('u-boot.dtb.out')
|
||||
tools.write_file(fname, tools.read_file(dtb_fname))
|
||||
dtb = fdt.FdtScan(fname)
|
||||
|
||||
node = _FindBinmanNode(dtb)
|
||||
@ -618,7 +618,7 @@ def Binman(args):
|
||||
global state
|
||||
|
||||
if args.full_help:
|
||||
tools.PrintFullHelp(
|
||||
tools.print_full_help(
|
||||
os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])), 'README.rst')
|
||||
)
|
||||
return 0
|
||||
@ -630,7 +630,7 @@ def Binman(args):
|
||||
if args.cmd in ['ls', 'extract', 'replace', 'tool']:
|
||||
try:
|
||||
tout.Init(args.verbosity)
|
||||
tools.PrepareOutputDir(None)
|
||||
tools.prepare_output_dir(None)
|
||||
if args.cmd == 'ls':
|
||||
ListEntries(args.image, args.paths)
|
||||
|
||||
@ -644,7 +644,7 @@ def Binman(args):
|
||||
allow_resize=not args.fix_size, write_map=args.map)
|
||||
|
||||
if args.cmd == 'tool':
|
||||
tools.SetToolPaths(args.toolpath)
|
||||
tools.set_tool_paths(args.toolpath)
|
||||
if args.list:
|
||||
bintool.Bintool.list_all()
|
||||
elif args.fetch:
|
||||
@ -658,7 +658,7 @@ def Binman(args):
|
||||
except:
|
||||
raise
|
||||
finally:
|
||||
tools.FinaliseOutputDir()
|
||||
tools.finalise_output_dir()
|
||||
return 0
|
||||
|
||||
elf_params = None
|
||||
@ -694,9 +694,9 @@ def Binman(args):
|
||||
# runtime.
|
||||
use_expanded = not args.no_expanded
|
||||
try:
|
||||
tools.SetInputDirs(args.indir)
|
||||
tools.PrepareOutputDir(args.outdir, args.preserve)
|
||||
tools.SetToolPaths(args.toolpath)
|
||||
tools.set_input_dirs(args.indir)
|
||||
tools.prepare_output_dir(args.outdir, args.preserve)
|
||||
tools.set_tool_paths(args.toolpath)
|
||||
state.SetEntryArgs(args.entry_arg)
|
||||
state.SetThreads(args.threads)
|
||||
|
||||
@ -717,7 +717,7 @@ def Binman(args):
|
||||
|
||||
# Write the updated FDTs to our output files
|
||||
for dtb_item in state.GetAllFdts():
|
||||
tools.WriteFile(dtb_item._fname, dtb_item.GetContents())
|
||||
tools.write_file(dtb_item._fname, dtb_item.GetContents())
|
||||
|
||||
if elf_params:
|
||||
data = state.GetFdtForEtype('u-boot-dtb').GetContents()
|
||||
@ -729,7 +729,7 @@ def Binman(args):
|
||||
# Use this to debug the time take to pack the image
|
||||
#state.TimingShow()
|
||||
finally:
|
||||
tools.FinaliseOutputDir()
|
||||
tools.finalise_output_dir()
|
||||
finally:
|
||||
tout.Uninit()
|
||||
|
||||
|
@ -54,7 +54,7 @@ def GetSymbols(fname, patterns):
|
||||
key: Name of symbol
|
||||
value: Hex value of symbol
|
||||
"""
|
||||
stdout = tools.Run('objdump', '-t', fname)
|
||||
stdout = tools.run('objdump', '-t', fname)
|
||||
lines = stdout.splitlines()
|
||||
if patterns:
|
||||
re_syms = re.compile('|'.join(patterns))
|
||||
@ -154,7 +154,7 @@ def LookupAndWriteSymbols(elf_fname, entry, section):
|
||||
entry: Entry to process
|
||||
section: Section which can be used to lookup symbol values
|
||||
"""
|
||||
fname = tools.GetInputFilename(elf_fname)
|
||||
fname = tools.get_input_filename(elf_fname)
|
||||
syms = GetSymbols(fname, ['image', 'binman'])
|
||||
if not syms:
|
||||
return
|
||||
@ -282,7 +282,7 @@ SECTIONS
|
||||
# text section at the start
|
||||
# -m32: Build for 32-bit x86
|
||||
# -T...: Specifies the link script, which sets the start address
|
||||
cc, args = tools.GetTargetCompileTool('cc')
|
||||
cc, args = tools.get_target_compile_tool('cc')
|
||||
args += ['-static', '-nostdlib', '-Wl,--build-id=none', '-m32', '-T',
|
||||
lds_file, '-o', elf_fname, s_file]
|
||||
stdout = command.Output(cc, *args)
|
||||
@ -363,9 +363,9 @@ def UpdateFile(infile, outfile, start_sym, end_sym, insert):
|
||||
raise ValueError("Not enough space in '%s' for data length %#x (%d); size is %#x (%d)" %
|
||||
(infile, len(insert), len(insert), size, size))
|
||||
|
||||
data = tools.ReadFile(infile)
|
||||
data = tools.read_file(infile)
|
||||
newdata = data[:syms[start_sym].offset]
|
||||
newdata += insert + tools.GetBytes(0, size - len(insert))
|
||||
newdata += insert + tools.get_bytes(0, size - len(insert))
|
||||
newdata += data[syms[end_sym].offset:]
|
||||
tools.WriteFile(outfile, newdata)
|
||||
tools.write_file(outfile, newdata)
|
||||
tout.Info('Written to offset %#x' % syms[start_sym].offset)
|
||||
|
@ -27,7 +27,7 @@ class FakeEntry:
|
||||
"""
|
||||
def __init__(self, contents_size):
|
||||
self.contents_size = contents_size
|
||||
self.data = tools.GetBytes(ord('a'), contents_size)
|
||||
self.data = tools.get_bytes(ord('a'), contents_size)
|
||||
|
||||
def GetPath(self):
|
||||
return 'entry_path'
|
||||
@ -72,7 +72,7 @@ def BuildElfTestFiles(target_dir):
|
||||
if 'MAKEFLAGS' in os.environ:
|
||||
del os.environ['MAKEFLAGS']
|
||||
try:
|
||||
tools.Run('make', '-C', target_dir, '-f',
|
||||
tools.run('make', '-C', target_dir, '-f',
|
||||
os.path.join(testdir, 'Makefile'), 'SRC=%s/' % testdir)
|
||||
except ValueError as e:
|
||||
# The test system seems to suppress this in a strange way
|
||||
@ -83,7 +83,7 @@ class TestElf(unittest.TestCase):
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
cls._indir = tempfile.mkdtemp(prefix='elf.')
|
||||
tools.SetInputDirs(['.'])
|
||||
tools.set_input_dirs(['.'])
|
||||
BuildElfTestFiles(cls._indir)
|
||||
|
||||
@classmethod
|
||||
@ -166,7 +166,7 @@ class TestElf(unittest.TestCase):
|
||||
section = FakeSection(sym_value=None)
|
||||
elf_fname = self.ElfTestFile('u_boot_binman_syms')
|
||||
syms = elf.LookupAndWriteSymbols(elf_fname, entry, section)
|
||||
self.assertEqual(tools.GetBytes(255, 20) + tools.GetBytes(ord('a'), 4),
|
||||
self.assertEqual(tools.get_bytes(255, 20) + tools.get_bytes(ord('a'), 4),
|
||||
entry.data)
|
||||
|
||||
def testDebug(self):
|
||||
@ -193,7 +193,7 @@ class TestElf(unittest.TestCase):
|
||||
# Make an Elf file and then convert it to a fkat binary file. This
|
||||
# should produce the original data.
|
||||
elf.MakeElf(elf_fname, expected_text, expected_data)
|
||||
objcopy, args = tools.GetTargetCompileTool('objcopy')
|
||||
objcopy, args = tools.get_target_compile_tool('objcopy')
|
||||
args += ['-O', 'binary', elf_fname, bin_fname]
|
||||
stdout = command.Output(objcopy, *args)
|
||||
with open(bin_fname, 'rb') as fd:
|
||||
@ -210,7 +210,7 @@ class TestElf(unittest.TestCase):
|
||||
expected_data = b'wxyz'
|
||||
elf_fname = os.path.join(outdir, 'elf')
|
||||
elf.MakeElf(elf_fname, expected_text, expected_data)
|
||||
data = tools.ReadFile(elf_fname)
|
||||
data = tools.read_file(elf_fname)
|
||||
|
||||
load = 0xfef20000
|
||||
entry = load + 2
|
||||
@ -231,7 +231,7 @@ class TestElf(unittest.TestCase):
|
||||
offset = elf.GetSymbolFileOffset(fname, ['embed_start', 'embed_end'])
|
||||
start = offset['embed_start'].offset
|
||||
end = offset['embed_end'].offset
|
||||
data = tools.ReadFile(fname)
|
||||
data = tools.read_file(fname)
|
||||
embed_data = data[start:end]
|
||||
expect = struct.pack('<III', 0x1234, 0x5678, 0)
|
||||
self.assertEqual(expect, embed_data)
|
||||
|
@ -14,7 +14,7 @@ from binman import bintool
|
||||
from binman import comp_util
|
||||
from dtoc import fdt_util
|
||||
from patman import tools
|
||||
from patman.tools import ToHex, ToHexSize
|
||||
from patman.tools import to_hex, to_hex_size
|
||||
from patman import tout
|
||||
|
||||
modules = {}
|
||||
@ -244,7 +244,7 @@ class Entry(object):
|
||||
self.uncomp_size = fdt_util.GetInt(self._node, 'uncomp-size')
|
||||
|
||||
self.align = fdt_util.GetInt(self._node, 'align')
|
||||
if tools.NotPowerOfTwo(self.align):
|
||||
if tools.not_power_of_two(self.align):
|
||||
raise ValueError("Node '%s': Alignment %s must be a power of two" %
|
||||
(self._node.path, self.align))
|
||||
if self.section and self.align is None:
|
||||
@ -252,7 +252,7 @@ class Entry(object):
|
||||
self.pad_before = fdt_util.GetInt(self._node, 'pad-before', 0)
|
||||
self.pad_after = fdt_util.GetInt(self._node, 'pad-after', 0)
|
||||
self.align_size = fdt_util.GetInt(self._node, 'align-size')
|
||||
if tools.NotPowerOfTwo(self.align_size):
|
||||
if tools.not_power_of_two(self.align_size):
|
||||
self.Raise("Alignment size %s must be a power of two" %
|
||||
self.align_size)
|
||||
self.align_end = fdt_util.GetInt(self._node, 'align-end')
|
||||
@ -397,12 +397,12 @@ class Entry(object):
|
||||
|
||||
# Don't let the data shrink. Pad it if necessary
|
||||
if size_ok and new_size < self.contents_size:
|
||||
data += tools.GetBytes(0, self.contents_size - new_size)
|
||||
data += tools.get_bytes(0, self.contents_size - new_size)
|
||||
|
||||
if not size_ok:
|
||||
tout.Debug("Entry '%s' size change from %s to %s" % (
|
||||
self._node.path, ToHex(self.contents_size),
|
||||
ToHex(new_size)))
|
||||
self._node.path, to_hex(self.contents_size),
|
||||
to_hex(new_size)))
|
||||
self.SetContents(data)
|
||||
return size_ok
|
||||
|
||||
@ -419,8 +419,8 @@ class Entry(object):
|
||||
def ResetForPack(self):
|
||||
"""Reset offset/size fields so that packing can be done again"""
|
||||
self.Detail('ResetForPack: offset %s->%s, size %s->%s' %
|
||||
(ToHex(self.offset), ToHex(self.orig_offset),
|
||||
ToHex(self.size), ToHex(self.orig_size)))
|
||||
(to_hex(self.offset), to_hex(self.orig_offset),
|
||||
to_hex(self.size), to_hex(self.orig_size)))
|
||||
self.pre_reset_size = self.size
|
||||
self.offset = self.orig_offset
|
||||
self.size = self.orig_size
|
||||
@ -444,20 +444,20 @@ class Entry(object):
|
||||
New section offset pointer (after this entry)
|
||||
"""
|
||||
self.Detail('Packing: offset=%s, size=%s, content_size=%x' %
|
||||
(ToHex(self.offset), ToHex(self.size),
|
||||
(to_hex(self.offset), to_hex(self.size),
|
||||
self.contents_size))
|
||||
if self.offset is None:
|
||||
if self.offset_unset:
|
||||
self.Raise('No offset set with offset-unset: should another '
|
||||
'entry provide this correct offset?')
|
||||
self.offset = tools.Align(offset, self.align)
|
||||
self.offset = tools.align(offset, self.align)
|
||||
needed = self.pad_before + self.contents_size + self.pad_after
|
||||
needed = tools.Align(needed, self.align_size)
|
||||
needed = tools.align(needed, self.align_size)
|
||||
size = self.size
|
||||
if not size:
|
||||
size = needed
|
||||
new_offset = self.offset + size
|
||||
aligned_offset = tools.Align(new_offset, self.align_end)
|
||||
aligned_offset = tools.align(new_offset, self.align_end)
|
||||
if aligned_offset != new_offset:
|
||||
size = aligned_offset - self.offset
|
||||
new_offset = aligned_offset
|
||||
@ -471,10 +471,10 @@ class Entry(object):
|
||||
# Check that the alignment is correct. It could be wrong if the
|
||||
# and offset or size values were provided (i.e. not calculated), but
|
||||
# conflict with the provided alignment values
|
||||
if self.size != tools.Align(self.size, self.align_size):
|
||||
if self.size != tools.align(self.size, self.align_size):
|
||||
self.Raise("Size %#x (%d) does not match align-size %#x (%d)" %
|
||||
(self.size, self.size, self.align_size, self.align_size))
|
||||
if self.offset != tools.Align(self.offset, self.align):
|
||||
if self.offset != tools.align(self.offset, self.align):
|
||||
self.Raise("Offset %#x (%d) does not match align %#x (%d)" %
|
||||
(self.offset, self.offset, self.align, self.align))
|
||||
self.Detail(' - packed: offset=%#x, size=%#x, content_size=%#x, next_offset=%x' %
|
||||
@ -541,7 +541,7 @@ class Entry(object):
|
||||
bytes content of the entry, excluding any padding. If the entry is
|
||||
compressed, the compressed data is returned
|
||||
"""
|
||||
self.Detail('GetData: size %s' % ToHexSize(self.data))
|
||||
self.Detail('GetData: size %s' % to_hex_size(self.data))
|
||||
return self.data
|
||||
|
||||
def GetPaddedData(self, data=None):
|
||||
@ -991,7 +991,7 @@ features to produce new behaviours.
|
||||
fname (str): Filename of faked file
|
||||
"""
|
||||
if self.allow_fake and not pathlib.Path(fname).is_file():
|
||||
outfname = tools.GetOutputFilename(os.path.basename(fname))
|
||||
outfname = tools.get_output_filename(os.path.basename(fname))
|
||||
with open(outfname, "wb") as out:
|
||||
out.truncate(1024)
|
||||
self.faked = True
|
||||
|
@ -17,10 +17,10 @@ from patman import tools
|
||||
|
||||
class TestEntry(unittest.TestCase):
|
||||
def setUp(self):
|
||||
tools.PrepareOutputDir(None)
|
||||
tools.prepare_output_dir(None)
|
||||
|
||||
def tearDown(self):
|
||||
tools.FinaliseOutputDir()
|
||||
tools.finalise_output_dir()
|
||||
|
||||
def GetNode(self):
|
||||
binman_dir = os.path.dirname(os.path.realpath(sys.argv[0]))
|
||||
|
@ -181,7 +181,7 @@ class Entry_atf_fip(Entry_section):
|
||||
self._pad_byte = fdt_util.GetInt(self._node, 'pad-byte', 0)
|
||||
self._fip_flags = fdt_util.GetInt64(self._node, 'fip-hdr-flags', 0)
|
||||
self._fip_align = fdt_util.GetInt(self._node, 'fip-align', 1)
|
||||
if tools.NotPowerOfTwo(self._fip_align):
|
||||
if tools.not_power_of_two(self._fip_align):
|
||||
raise ValueError("Node '%s': FIP alignment %s must be a power of two" %
|
||||
(self._node.path, self._fip_align))
|
||||
self.ReadEntries()
|
||||
|
@ -37,7 +37,7 @@ class Entry_blob(Entry):
|
||||
|
||||
def ObtainContents(self):
|
||||
self._filename = self.GetDefaultFilename()
|
||||
self._pathname = tools.GetInputFilename(self._filename,
|
||||
self._pathname = tools.get_input_filename(self._filename,
|
||||
self.external and self.section.GetAllowMissing())
|
||||
# Allow the file to be missing
|
||||
if not self._pathname:
|
||||
@ -68,7 +68,7 @@ class Entry_blob(Entry):
|
||||
bytes: Data read
|
||||
"""
|
||||
state.TimingStart('read')
|
||||
indata = tools.ReadFile(pathname)
|
||||
indata = tools.read_file(pathname)
|
||||
state.TimingAccum('read')
|
||||
state.TimingStart('compress')
|
||||
data = self.CompressData(indata)
|
||||
|
@ -38,7 +38,7 @@ class Entry_blob_ext_list(Entry_blob):
|
||||
pathnames = []
|
||||
for fname in self._filenames:
|
||||
fname = self.check_fake_fname(fname)
|
||||
pathname = tools.GetInputFilename(
|
||||
pathname = tools.get_input_filename(
|
||||
fname, self.external and self.section.GetAllowMissing())
|
||||
# Allow the file to be missing
|
||||
if not pathname:
|
||||
|
@ -140,7 +140,7 @@ class Entry_fdtmap(Entry):
|
||||
fdt.pack()
|
||||
outfdt = Fdt.FromData(fdt.as_bytearray())
|
||||
data = outfdt.GetContents()
|
||||
data = FDTMAP_MAGIC + tools.GetBytes(0, 8) + data
|
||||
data = FDTMAP_MAGIC + tools.get_bytes(0, 8) + data
|
||||
return data
|
||||
|
||||
def ObtainContents(self):
|
||||
|
@ -47,7 +47,7 @@ class Entry_files(Entry_section):
|
||||
'require-matches')
|
||||
|
||||
def ExpandEntries(self):
|
||||
files = tools.GetInputFilenameGlob(self._pattern)
|
||||
files = tools.get_input_filename_glob(self._pattern)
|
||||
if self._require_matches and not files:
|
||||
self.Raise("Pattern '%s' matched no files" % self._pattern)
|
||||
for fname in files:
|
||||
|
@ -31,5 +31,5 @@ class Entry_fill(Entry):
|
||||
self.fill_value = fdt_util.GetByte(self._node, 'fill-byte', 0)
|
||||
|
||||
def ObtainContents(self):
|
||||
self.SetContents(tools.GetBytes(self.fill_value, self.size))
|
||||
self.SetContents(tools.get_bytes(self.fill_value, self.size))
|
||||
return True
|
||||
|
@ -200,19 +200,19 @@ class Entry_fit(Entry):
|
||||
for seq, fdt_fname in enumerate(self._fdts):
|
||||
node_name = subnode.name[1:].replace('SEQ',
|
||||
str(seq + 1))
|
||||
fname = tools.GetInputFilename(fdt_fname + '.dtb')
|
||||
fname = tools.get_input_filename(fdt_fname + '.dtb')
|
||||
with fsw.add_node(node_name):
|
||||
for pname, prop in subnode.props.items():
|
||||
val = prop.bytes.replace(
|
||||
b'NAME', tools.ToBytes(fdt_fname))
|
||||
b'NAME', tools.to_bytes(fdt_fname))
|
||||
val = val.replace(
|
||||
b'SEQ', tools.ToBytes(str(seq + 1)))
|
||||
b'SEQ', tools.to_bytes(str(seq + 1)))
|
||||
fsw.property(pname, val)
|
||||
|
||||
# Add data for 'fdt' nodes (but not 'config')
|
||||
if depth == 1 and in_images:
|
||||
fsw.property('data',
|
||||
tools.ReadFile(fname))
|
||||
tools.read_file(fname))
|
||||
else:
|
||||
if self._fdts is None:
|
||||
if self._fit_list_prop:
|
||||
@ -246,10 +246,10 @@ class Entry_fit(Entry):
|
||||
# self._BuildInput() either returns bytes or raises an exception.
|
||||
data = self._BuildInput(self._fdt)
|
||||
uniq = self.GetUniqueName()
|
||||
input_fname = tools.GetOutputFilename('%s.itb' % uniq)
|
||||
output_fname = tools.GetOutputFilename('%s.fit' % uniq)
|
||||
tools.WriteFile(input_fname, data)
|
||||
tools.WriteFile(output_fname, data)
|
||||
input_fname = tools.get_output_filename('%s.itb' % uniq)
|
||||
output_fname = tools.get_output_filename('%s.fit' % uniq)
|
||||
tools.write_file(input_fname, data)
|
||||
tools.write_file(output_fname, data)
|
||||
|
||||
args = {}
|
||||
ext_offset = self._fit_props.get('fit,external-offset')
|
||||
@ -260,11 +260,11 @@ class Entry_fit(Entry):
|
||||
}
|
||||
if self.mkimage.run(reset_timestamp=True, output_fname=output_fname,
|
||||
**args) is not None:
|
||||
self.SetContents(tools.ReadFile(output_fname))
|
||||
self.SetContents(tools.read_file(output_fname))
|
||||
else:
|
||||
# Bintool is missing; just use empty data as the output
|
||||
self.record_missing_bintool(self.mkimage)
|
||||
self.SetContents(tools.GetBytes(0, 1024))
|
||||
self.SetContents(tools.get_bytes(0, 1024))
|
||||
|
||||
return True
|
||||
|
||||
|
@ -8,7 +8,7 @@
|
||||
from binman.entry import Entry
|
||||
from binman import fmap_util
|
||||
from patman import tools
|
||||
from patman.tools import ToHexSize
|
||||
from patman.tools import to_hex_size
|
||||
from patman import tout
|
||||
|
||||
|
||||
@ -47,7 +47,7 @@ class Entry_fmap(Entry):
|
||||
def _AddEntries(areas, entry):
|
||||
entries = entry.GetEntries()
|
||||
tout.Debug("fmap: Add entry '%s' type '%s' (%s subentries)" %
|
||||
(entry.GetPath(), entry.etype, ToHexSize(entries)))
|
||||
(entry.GetPath(), entry.etype, to_hex_size(entries)))
|
||||
if entries and entry.etype != 'cbfs':
|
||||
# Create an area for the section, which encompasses all entries
|
||||
# within it
|
||||
|
@ -70,14 +70,14 @@ class Entry_gbb(Entry):
|
||||
|
||||
def ObtainContents(self):
|
||||
gbb = 'gbb.bin'
|
||||
fname = tools.GetOutputFilename(gbb)
|
||||
fname = tools.get_output_filename(gbb)
|
||||
if not self.size:
|
||||
self.Raise('GBB must have a fixed size')
|
||||
gbb_size = self.size
|
||||
bmpfv_size = gbb_size - 0x2180
|
||||
if bmpfv_size < 0:
|
||||
self.Raise('GBB is too small (minimum 0x2180 bytes)')
|
||||
keydir = tools.GetInputFilename(self.keydir)
|
||||
keydir = tools.get_input_filename(self.keydir)
|
||||
|
||||
stdout = self.futility.gbb_create(
|
||||
fname, [0x100, 0x1000, bmpfv_size, 0x1000])
|
||||
@ -88,14 +88,14 @@ class Entry_gbb(Entry):
|
||||
rootkey='%s/root_key.vbpubk' % keydir,
|
||||
recoverykey='%s/recovery_key.vbpubk' % keydir,
|
||||
flags=self.gbb_flags,
|
||||
bmpfv=tools.GetInputFilename(self.bmpblk))
|
||||
bmpfv=tools.get_input_filename(self.bmpblk))
|
||||
|
||||
if stdout is not None:
|
||||
self.SetContents(tools.ReadFile(fname))
|
||||
self.SetContents(tools.read_file(fname))
|
||||
else:
|
||||
# Bintool is missing; just use the required amount of zero data
|
||||
self.record_missing_bintool(self.futility)
|
||||
self.SetContents(tools.GetBytes(0, gbb_size))
|
||||
self.SetContents(tools.get_bytes(0, gbb_size))
|
||||
|
||||
return True
|
||||
|
||||
|
@ -58,11 +58,11 @@ class Entry_intel_ifwi(Entry_blob_ext):
|
||||
# Create the IFWI file if needed
|
||||
if self._convert_fit:
|
||||
inname = self._pathname
|
||||
outname = tools.GetOutputFilename('ifwi.bin')
|
||||
outname = tools.get_output_filename('ifwi.bin')
|
||||
if self.ifwitool.create_ifwi(inname, outname) is None:
|
||||
# Bintool is missing; just create a zeroed ifwi.bin
|
||||
self.record_missing_bintool(self.ifwitool)
|
||||
self.SetContents(tools.GetBytes(0, 1024))
|
||||
self.SetContents(tools.get_bytes(0, 1024))
|
||||
|
||||
self._filename = 'ifwi.bin'
|
||||
self._pathname = outname
|
||||
@ -74,15 +74,15 @@ class Entry_intel_ifwi(Entry_blob_ext):
|
||||
if self.ifwitool.delete_subpart(outname, 'OBBP') is None:
|
||||
# Bintool is missing; just use zero data
|
||||
self.record_missing_bintool(self.ifwitool)
|
||||
self.SetContents(tools.GetBytes(0, 1024))
|
||||
self.SetContents(tools.get_bytes(0, 1024))
|
||||
return True
|
||||
|
||||
for entry in self._ifwi_entries.values():
|
||||
# First get the input data and put it in a file
|
||||
data = entry.GetPaddedData()
|
||||
uniq = self.GetUniqueName()
|
||||
input_fname = tools.GetOutputFilename('input.%s' % uniq)
|
||||
tools.WriteFile(input_fname, data)
|
||||
input_fname = tools.get_output_filename('input.%s' % uniq)
|
||||
tools.write_file(input_fname, data)
|
||||
|
||||
# At this point we know that ifwitool is present, so we don't need
|
||||
# to check for None here
|
||||
@ -107,7 +107,7 @@ class Entry_intel_ifwi(Entry_blob_ext):
|
||||
After that we delete the OBBP sub-partition and add each of the files
|
||||
that we want in the IFWI file, one for each sub-entry of the IWFI node.
|
||||
"""
|
||||
self._pathname = tools.GetInputFilename(self._filename,
|
||||
self._pathname = tools.get_input_filename(self._filename,
|
||||
self.section.GetAllowMissing())
|
||||
# Allow the file to be missing
|
||||
if not self._pathname:
|
||||
|
@ -48,12 +48,12 @@ class Entry_mkimage(Entry):
|
||||
return False
|
||||
data += entry.GetData()
|
||||
uniq = self.GetUniqueName()
|
||||
input_fname = tools.GetOutputFilename('mkimage.%s' % uniq)
|
||||
tools.WriteFile(input_fname, data)
|
||||
output_fname = tools.GetOutputFilename('mkimage-out.%s' % uniq)
|
||||
input_fname = tools.get_output_filename('mkimage.%s' % uniq)
|
||||
tools.write_file(input_fname, data)
|
||||
output_fname = tools.get_output_filename('mkimage-out.%s' % uniq)
|
||||
if self.mkimage.run_cmd('-d', input_fname, *self._args,
|
||||
output_fname) is not None:
|
||||
self.SetContents(tools.ReadFile(output_fname))
|
||||
self.SetContents(tools.read_file(output_fname))
|
||||
else:
|
||||
# Bintool is missing; just use the input data as the output
|
||||
self.record_missing_bintool(self.mkimage)
|
||||
|
@ -19,7 +19,7 @@ from binman import state
|
||||
from dtoc import fdt_util
|
||||
from patman import tools
|
||||
from patman import tout
|
||||
from patman.tools import ToHexSize
|
||||
from patman.tools import to_hex_size
|
||||
|
||||
|
||||
class Entry_section(Entry):
|
||||
@ -269,19 +269,19 @@ class Entry_section(Entry):
|
||||
data = bytearray()
|
||||
# Handle padding before the entry
|
||||
if entry.pad_before:
|
||||
data += tools.GetBytes(self._pad_byte, entry.pad_before)
|
||||
data += tools.get_bytes(self._pad_byte, entry.pad_before)
|
||||
|
||||
# Add in the actual entry data
|
||||
data += entry_data
|
||||
|
||||
# Handle padding after the entry
|
||||
if entry.pad_after:
|
||||
data += tools.GetBytes(self._pad_byte, entry.pad_after)
|
||||
data += tools.get_bytes(self._pad_byte, entry.pad_after)
|
||||
|
||||
if entry.size:
|
||||
data += tools.GetBytes(pad_byte, entry.size - len(data))
|
||||
data += tools.get_bytes(pad_byte, entry.size - len(data))
|
||||
|
||||
self.Detail('GetPaddedDataForEntry: size %s' % ToHexSize(self.data))
|
||||
self.Detail('GetPaddedDataForEntry: size %s' % to_hex_size(self.data))
|
||||
|
||||
return data
|
||||
|
||||
@ -316,7 +316,7 @@ class Entry_section(Entry):
|
||||
# Handle empty space before the entry
|
||||
pad = (entry.offset or 0) - self._skip_at_start - len(section_data)
|
||||
if pad > 0:
|
||||
section_data += tools.GetBytes(self._pad_byte, pad)
|
||||
section_data += tools.get_bytes(self._pad_byte, pad)
|
||||
|
||||
# Add in the actual entry data
|
||||
section_data += data
|
||||
@ -709,14 +709,14 @@ class Entry_section(Entry):
|
||||
if not size:
|
||||
data = self.GetPaddedData(self.data)
|
||||
size = len(data)
|
||||
size = tools.Align(size, self.align_size)
|
||||
size = tools.align(size, self.align_size)
|
||||
|
||||
if self.size and contents_size > self.size:
|
||||
self._Raise("contents size %#x (%d) exceeds section size %#x (%d)" %
|
||||
(contents_size, contents_size, self.size, self.size))
|
||||
if not self.size:
|
||||
self.size = size
|
||||
if self.size != tools.Align(self.size, self.align_size):
|
||||
if self.size != tools.align(self.size, self.align_size):
|
||||
self._Raise("Size %#x (%d) does not match align-size %#x (%d)" %
|
||||
(self.size, self.size, self.align_size,
|
||||
self.align_size))
|
||||
|
@ -60,14 +60,14 @@ class Entry_text(Entry):
|
||||
super().__init__(section, etype, node)
|
||||
value = fdt_util.GetString(self._node, 'text')
|
||||
if value:
|
||||
value = tools.ToBytes(value)
|
||||
value = tools.to_bytes(value)
|
||||
else:
|
||||
label, = self.GetEntryArgsOrProps([EntryArg('text-label', str)])
|
||||
self.text_label = label
|
||||
if self.text_label:
|
||||
value, = self.GetEntryArgsOrProps([EntryArg(self.text_label,
|
||||
str)])
|
||||
value = tools.ToBytes(value) if value is not None else value
|
||||
value = tools.to_bytes(value) if value is not None else value
|
||||
self.value = value
|
||||
|
||||
def ObtainContents(self):
|
||||
|
@ -27,9 +27,9 @@ class Entry_u_boot_elf(Entry_blob):
|
||||
def ReadBlobContents(self):
|
||||
if self._strip:
|
||||
uniq = self.GetUniqueName()
|
||||
out_fname = tools.GetOutputFilename('%s.stripped' % uniq)
|
||||
tools.WriteFile(out_fname, tools.ReadFile(self._pathname))
|
||||
tools.Run('strip', out_fname)
|
||||
out_fname = tools.get_output_filename('%s.stripped' % uniq)
|
||||
tools.write_file(out_fname, tools.read_file(self._pathname))
|
||||
tools.run('strip', out_fname)
|
||||
self._pathname = out_fname
|
||||
super().ReadBlobContents()
|
||||
return True
|
||||
|
@ -27,7 +27,7 @@ class Entry_u_boot_env(Entry_blob):
|
||||
self.fill_value = fdt_util.GetByte(self._node, 'fill-byte', 0)
|
||||
|
||||
def ReadBlobContents(self):
|
||||
indata = tools.ReadFile(self._pathname)
|
||||
indata = tools.read_file(self._pathname)
|
||||
data = b''
|
||||
for line in indata.splitlines():
|
||||
data += line + b'\0'
|
||||
@ -35,7 +35,7 @@ class Entry_u_boot_env(Entry_blob):
|
||||
pad = self.size - len(data) - 5
|
||||
if pad < 0:
|
||||
self.Raise("'u-boot-env' entry too small to hold data (need %#x more bytes)" % -pad)
|
||||
data += tools.GetBytes(self.fill_value, pad)
|
||||
data += tools.get_bytes(self.fill_value, pad)
|
||||
crc = zlib.crc32(data)
|
||||
buf = struct.pack('<I', crc) + b'\x01' + data
|
||||
self.SetContents(buf)
|
||||
|
@ -36,9 +36,9 @@ class Entry_u_boot_spl_bss_pad(Entry_blob):
|
||||
super().__init__(section, etype, node)
|
||||
|
||||
def ObtainContents(self):
|
||||
fname = tools.GetInputFilename('spl/u-boot-spl')
|
||||
fname = tools.get_input_filename('spl/u-boot-spl')
|
||||
bss_size = elf.GetSymbolAddress(fname, '__bss_size')
|
||||
if not bss_size:
|
||||
self.Raise('Expected __bss_size symbol in spl/u-boot-spl')
|
||||
self.SetContents(tools.GetBytes(0, bss_size))
|
||||
self.SetContents(tools.get_bytes(0, bss_size))
|
||||
return True
|
||||
|
@ -36,9 +36,9 @@ class Entry_u_boot_tpl_bss_pad(Entry_blob):
|
||||
super().__init__(section, etype, node)
|
||||
|
||||
def ObtainContents(self):
|
||||
fname = tools.GetInputFilename('tpl/u-boot-tpl')
|
||||
fname = tools.get_input_filename('tpl/u-boot-tpl')
|
||||
bss_size = elf.GetSymbolAddress(fname, '__bss_size')
|
||||
if not bss_size:
|
||||
self.Raise('Expected __bss_size symbol in tpl/u-boot-tpl')
|
||||
self.SetContents(tools.GetBytes(0, bss_size))
|
||||
self.SetContents(tools.get_bytes(0, bss_size))
|
||||
return True
|
||||
|
@ -92,8 +92,8 @@ class Entry_u_boot_ucode(Entry_blob):
|
||||
return True
|
||||
|
||||
# Write it out to a file
|
||||
self._pathname = tools.GetOutputFilename('u-boot-ucode.bin')
|
||||
tools.WriteFile(self._pathname, fdt_entry.ucode_data)
|
||||
self._pathname = tools.get_output_filename('u-boot-ucode.bin')
|
||||
tools.write_file(self._pathname, fdt_entry.ucode_data)
|
||||
|
||||
self.ReadBlobContents()
|
||||
|
||||
|
@ -38,7 +38,7 @@ class Entry_u_boot_with_ucode_ptr(Entry_blob):
|
||||
|
||||
def ProcessFdt(self, fdt):
|
||||
# Figure out where to put the microcode pointer
|
||||
fname = tools.GetInputFilename(self.elf_fname)
|
||||
fname = tools.get_input_filename(self.elf_fname)
|
||||
sym = elf.GetSymbolAddress(fname, '_dt_ucode_base_size')
|
||||
if sym:
|
||||
self.target_offset = sym
|
||||
|
@ -65,9 +65,9 @@ class Entry_vblock(Entry_collection):
|
||||
return None
|
||||
|
||||
uniq = self.GetUniqueName()
|
||||
output_fname = tools.GetOutputFilename('vblock.%s' % uniq)
|
||||
input_fname = tools.GetOutputFilename('input.%s' % uniq)
|
||||
tools.WriteFile(input_fname, input_data)
|
||||
output_fname = tools.get_output_filename('vblock.%s' % uniq)
|
||||
input_fname = tools.get_output_filename('input.%s' % uniq)
|
||||
tools.write_file(input_fname, input_data)
|
||||
prefix = self.keydir + '/'
|
||||
stdout = self.futility.sign_firmware(
|
||||
vblock=output_fname,
|
||||
@ -78,11 +78,11 @@ class Entry_vblock(Entry_collection):
|
||||
kernelkey=prefix + self.kernelkey,
|
||||
flags=f'{self.preamble_flags}')
|
||||
if stdout is not None:
|
||||
data = tools.ReadFile(output_fname)
|
||||
data = tools.read_file(output_fname)
|
||||
else:
|
||||
# Bintool is missing; just use 4KB of zero data
|
||||
self.record_missing_bintool(self.futility)
|
||||
data = tools.GetBytes(0, 4096)
|
||||
data = tools.get_bytes(0, 4096)
|
||||
return data
|
||||
|
||||
def ObtainContents(self):
|
||||
|
@ -19,11 +19,11 @@ class TestFdt(unittest.TestCase):
|
||||
def setUpClass(self):
|
||||
self._binman_dir = os.path.dirname(os.path.realpath(sys.argv[0]))
|
||||
self._indir = tempfile.mkdtemp(prefix='binmant.')
|
||||
tools.PrepareOutputDir(self._indir, True)
|
||||
tools.prepare_output_dir(self._indir, True)
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(self):
|
||||
tools._FinaliseForTest()
|
||||
tools._finalise_for_test()
|
||||
|
||||
def TestFile(self, fname):
|
||||
return os.path.join(self._binman_dir, 'test', fname)
|
||||
|
@ -248,7 +248,7 @@ class FipEntry:
|
||||
self.flags = flags
|
||||
self.fip_type = None
|
||||
self.data = None
|
||||
self.valid = uuid != tools.GetBytes(0, UUID_LEN)
|
||||
self.valid = uuid != tools.get_bytes(0, UUID_LEN)
|
||||
if self.valid:
|
||||
# Look up the friendly name
|
||||
matches = {val for (key, val) in FIP_TYPES.items()
|
||||
@ -309,7 +309,7 @@ class FipWriter:
|
||||
Usage is something like:
|
||||
|
||||
fip = FipWriter(size)
|
||||
fip.add_entry('scp-fwu-cfg', tools.ReadFile('something.bin'))
|
||||
fip.add_entry('scp-fwu-cfg', tools.read_file('something.bin'))
|
||||
...
|
||||
data = cbw.get_data()
|
||||
|
||||
@ -354,7 +354,7 @@ class FipWriter:
|
||||
offset += ENTRY_SIZE # terminating entry
|
||||
|
||||
for fent in self._fip_entries:
|
||||
offset = tools.Align(offset, self._align)
|
||||
offset = tools.align(offset, self._align)
|
||||
fent.offset = offset
|
||||
offset += fent.size
|
||||
|
||||
@ -443,7 +443,7 @@ def parse_macros(srcdir):
|
||||
re_uuid = re.compile('0x[0-9a-fA-F]{2}')
|
||||
re_comment = re.compile(r'^/\* (.*) \*/$')
|
||||
fname = os.path.join(srcdir, 'include/tools_share/firmware_image_package.h')
|
||||
data = tools.ReadFile(fname, binary=False)
|
||||
data = tools.read_file(fname, binary=False)
|
||||
macros = collections.OrderedDict()
|
||||
comment = None
|
||||
for linenum, line in enumerate(data.splitlines()):
|
||||
@ -489,7 +489,7 @@ def parse_names(srcdir):
|
||||
re_data = re.compile(r'\.name = "([^"]*)",\s*\.uuid = (UUID_\w*),\s*\.cmdline_name = "([^"]+)"',
|
||||
re.S)
|
||||
fname = os.path.join(srcdir, 'tools/fiptool/tbbr_config.c')
|
||||
data = tools.ReadFile(fname, binary=False)
|
||||
data = tools.read_file(fname, binary=False)
|
||||
|
||||
# Example entry:
|
||||
# {
|
||||
@ -574,21 +574,21 @@ def parse_atf_source(srcdir, dstfile, oldfile):
|
||||
raise ValueError(
|
||||
f"Expected file '{readme_fname}' - try using -s to specify the "
|
||||
'arm-trusted-firmware directory')
|
||||
readme = tools.ReadFile(readme_fname, binary=False)
|
||||
readme = tools.read_file(readme_fname, binary=False)
|
||||
first_line = 'Trusted Firmware-A'
|
||||
if readme.splitlines()[0] != first_line:
|
||||
raise ValueError(f"'{readme_fname}' does not start with '{first_line}'")
|
||||
macros = parse_macros(srcdir)
|
||||
names = parse_names(srcdir)
|
||||
output = create_code_output(macros, names)
|
||||
orig = tools.ReadFile(oldfile, binary=False)
|
||||
orig = tools.read_file(oldfile, binary=False)
|
||||
re_fip_list = re.compile(r'(.*FIP_TYPE_LIST = \[).*?( ] # end.*)', re.S)
|
||||
mat = re_fip_list.match(orig)
|
||||
new_code = mat.group(1) + '\n' + output + mat.group(2) if mat else output
|
||||
if new_code == orig:
|
||||
print(f"Existing code in '{oldfile}' is up-to-date")
|
||||
else:
|
||||
tools.WriteFile(dstfile, new_code, binary=False)
|
||||
tools.write_file(dstfile, new_code, binary=False)
|
||||
print(f'Needs update, try:\n\tmeld {dstfile} {oldfile}')
|
||||
|
||||
|
||||
|
@ -35,14 +35,14 @@ class TestFip(unittest.TestCase):
|
||||
def setUp(self):
|
||||
# Create a temporary directory for test files
|
||||
self._indir = tempfile.mkdtemp(prefix='fip_util.')
|
||||
tools.SetInputDirs([self._indir])
|
||||
tools.set_input_dirs([self._indir])
|
||||
|
||||
# Set up a temporary output directory, used by the tools library when
|
||||
# compressing files
|
||||
tools.PrepareOutputDir(None)
|
||||
tools.prepare_output_dir(None)
|
||||
|
||||
self.src_file = os.path.join(self._indir, 'orig.py')
|
||||
self.outname = tools.GetOutputFilename('out.py')
|
||||
self.outname = tools.get_output_filename('out.py')
|
||||
self.args = ['-D', '-s', self._indir, '-o', self.outname]
|
||||
self.readme = os.path.join(self._indir, 'readme.rst')
|
||||
self.macro_dir = os.path.join(self._indir, 'include/tools_share')
|
||||
@ -78,25 +78,25 @@ toc_entry_t toc_entries[] = {
|
||||
|
||||
def setup_readme(self):
|
||||
"""Set up the readme.txt file"""
|
||||
tools.WriteFile(self.readme, 'Trusted Firmware-A\n==================',
|
||||
tools.write_file(self.readme, 'Trusted Firmware-A\n==================',
|
||||
binary=False)
|
||||
|
||||
def setup_macro(self, data=macro_contents):
|
||||
"""Set up the tbbr_config.c file"""
|
||||
os.makedirs(self.macro_dir)
|
||||
tools.WriteFile(self.macro_fname, data, binary=False)
|
||||
tools.write_file(self.macro_fname, data, binary=False)
|
||||
|
||||
def setup_name(self, data=name_contents):
|
||||
"""Set up the firmware_image_package.h file"""
|
||||
os.makedirs(self.name_dir)
|
||||
tools.WriteFile(self.name_fname, data, binary=False)
|
||||
tools.write_file(self.name_fname, data, binary=False)
|
||||
|
||||
def tearDown(self):
|
||||
"""Remove the temporary input directory and its contents"""
|
||||
if self._indir:
|
||||
shutil.rmtree(self._indir)
|
||||
self._indir = None
|
||||
tools.FinaliseOutputDir()
|
||||
tools.finalise_output_dir()
|
||||
|
||||
def test_no_readme(self):
|
||||
"""Test handling of a missing readme.rst"""
|
||||
@ -106,7 +106,7 @@ toc_entry_t toc_entries[] = {
|
||||
|
||||
def test_invalid_readme(self):
|
||||
"""Test that an invalid readme.rst is detected"""
|
||||
tools.WriteFile(self.readme, 'blah', binary=False)
|
||||
tools.write_file(self.readme, 'blah', binary=False)
|
||||
with self.assertRaises(Exception) as err:
|
||||
fip_util.main(self.args, self.src_file)
|
||||
self.assertIn('does not start with', str(err.exception))
|
||||
@ -228,7 +228,7 @@ toc_entry_t toc_entries[] = {
|
||||
self.setup_name()
|
||||
|
||||
# Check generating the file when changes are needed
|
||||
tools.WriteFile(self.src_file, '''
|
||||
tools.write_file(self.src_file, '''
|
||||
|
||||
# This is taken from tbbr_config.c in ARM Trusted Firmware
|
||||
FIP_TYPE_LIST = [
|
||||
@ -244,7 +244,7 @@ blah de blah
|
||||
self.assertIn('Needs update', stdout.getvalue())
|
||||
|
||||
# Check generating the file when no changes are needed
|
||||
tools.WriteFile(self.src_file, '''
|
||||
tools.write_file(self.src_file, '''
|
||||
# This is taken from tbbr_config.c in ARM Trusted Firmware
|
||||
FIP_TYPE_LIST = [
|
||||
# ToC Entry UUIDs
|
||||
@ -268,7 +268,7 @@ blah blah''', binary=False)
|
||||
|
||||
args = self.args.copy()
|
||||
args.remove('-D')
|
||||
tools.WriteFile(self.src_file, '', binary=False)
|
||||
tools.write_file(self.src_file, '', binary=False)
|
||||
with test_util.capture_sys_output():
|
||||
fip_util.main(args, self.src_file)
|
||||
|
||||
@ -282,8 +282,8 @@ blah blah''', binary=False)
|
||||
fip.add_entry('tb-fw', tb_fw, 0)
|
||||
fip.add_entry(bytes(range(16)), tb_fw, 0)
|
||||
data = fip.get_data()
|
||||
fname = tools.GetOutputFilename('data.fip')
|
||||
tools.WriteFile(fname, data)
|
||||
fname = tools.get_output_filename('data.fip')
|
||||
tools.write_file(fname, data)
|
||||
result = FIPTOOL.info(fname)
|
||||
self.assertEqual(
|
||||
'''Firmware Updater NS_BL2U: offset=0xB0, size=0x7, cmdline="--fwu"
|
||||
@ -303,19 +303,19 @@ Trusted Boot Firmware BL2: offset=0xC0, size=0xE, cmdline="--tb-fw"
|
||||
FipReader: reader for the image
|
||||
"""
|
||||
fwu = os.path.join(self._indir, 'fwu')
|
||||
tools.WriteFile(fwu, self.fwu_data)
|
||||
tools.write_file(fwu, self.fwu_data)
|
||||
|
||||
tb_fw = os.path.join(self._indir, 'tb_fw')
|
||||
tools.WriteFile(tb_fw, self.tb_fw_data)
|
||||
tools.write_file(tb_fw, self.tb_fw_data)
|
||||
|
||||
other_fw = os.path.join(self._indir, 'other_fw')
|
||||
tools.WriteFile(other_fw, self.other_fw_data)
|
||||
tools.write_file(other_fw, self.other_fw_data)
|
||||
|
||||
fname = tools.GetOutputFilename('data.fip')
|
||||
fname = tools.get_output_filename('data.fip')
|
||||
uuid = 'e3b78d9e-4a64-11ec-b45c-fba2b9b49788'
|
||||
FIPTOOL.create_new(fname, 8, 0x123, fwu, tb_fw, uuid, other_fw)
|
||||
|
||||
return fip_util.FipReader(tools.ReadFile(fname))
|
||||
return fip_util.FipReader(tools.read_file(fname))
|
||||
|
||||
@unittest.skipIf(not HAVE_FIPTOOL, 'No fiptool available')
|
||||
def test_fiptool_create(self):
|
||||
|
@ -70,7 +70,7 @@ def ConvertName(field_names, fields):
|
||||
value: value of that field (string for the ones we support)
|
||||
"""
|
||||
name_index = field_names.index('name')
|
||||
fields[name_index] = tools.ToBytes(NameToFmap(fields[name_index]))
|
||||
fields[name_index] = tools.to_bytes(NameToFmap(fields[name_index]))
|
||||
|
||||
def DecodeFmap(data):
|
||||
"""Decode a flashmap into a header and list of areas
|
||||
|
@ -174,7 +174,7 @@ class TestFunctional(unittest.TestCase):
|
||||
|
||||
# ELF file with a '_dt_ucode_base_size' symbol
|
||||
TestFunctional._MakeInputFile('u-boot',
|
||||
tools.ReadFile(cls.ElfTestFile('u_boot_ucode_ptr')))
|
||||
tools.read_file(cls.ElfTestFile('u_boot_ucode_ptr')))
|
||||
|
||||
# Intel flash descriptor file
|
||||
cls._SetupDescriptor()
|
||||
@ -236,7 +236,7 @@ class TestFunctional(unittest.TestCase):
|
||||
if self.preserve_outdirs:
|
||||
print('Preserving output dir: %s' % tools.outdir)
|
||||
else:
|
||||
tools._FinaliseForTest()
|
||||
tools._finalise_for_test()
|
||||
|
||||
def setUp(self):
|
||||
# Enable this to turn on debugging output
|
||||
@ -262,10 +262,10 @@ class TestFunctional(unittest.TestCase):
|
||||
Temporary directory to use
|
||||
New image filename
|
||||
"""
|
||||
image_fname = tools.GetOutputFilename('image.bin')
|
||||
image_fname = tools.get_output_filename('image.bin')
|
||||
tmpdir = tempfile.mkdtemp(prefix='binman.')
|
||||
updated_fname = os.path.join(tmpdir, 'image-updated.bin')
|
||||
tools.WriteFile(updated_fname, tools.ReadFile(image_fname))
|
||||
tools.write_file(updated_fname, tools.read_file(image_fname))
|
||||
self._CleanupOutputDir()
|
||||
return tmpdir, updated_fname
|
||||
|
||||
@ -492,14 +492,14 @@ class TestFunctional(unittest.TestCase):
|
||||
use_expanded=use_expanded, extra_indirs=extra_indirs,
|
||||
threads=threads)
|
||||
self.assertEqual(0, retcode)
|
||||
out_dtb_fname = tools.GetOutputFilename('u-boot.dtb.out')
|
||||
out_dtb_fname = tools.get_output_filename('u-boot.dtb.out')
|
||||
|
||||
# Find the (only) image, read it and return its contents
|
||||
image = control.images['image']
|
||||
image_fname = tools.GetOutputFilename('image.bin')
|
||||
image_fname = tools.get_output_filename('image.bin')
|
||||
self.assertTrue(os.path.exists(image_fname))
|
||||
if map:
|
||||
map_fname = tools.GetOutputFilename('image.map')
|
||||
map_fname = tools.get_output_filename('image.map')
|
||||
with open(map_fname) as fd:
|
||||
map_data = fd.read()
|
||||
else:
|
||||
@ -578,7 +578,7 @@ class TestFunctional(unittest.TestCase):
|
||||
Filename of ELF file to use as SPL
|
||||
"""
|
||||
TestFunctional._MakeInputFile('spl/u-boot-spl',
|
||||
tools.ReadFile(cls.ElfTestFile(src_fname)))
|
||||
tools.read_file(cls.ElfTestFile(src_fname)))
|
||||
|
||||
@classmethod
|
||||
def _SetupTplElf(cls, src_fname='bss_data'):
|
||||
@ -588,7 +588,7 @@ class TestFunctional(unittest.TestCase):
|
||||
Filename of ELF file to use as TPL
|
||||
"""
|
||||
TestFunctional._MakeInputFile('tpl/u-boot-tpl',
|
||||
tools.ReadFile(cls.ElfTestFile(src_fname)))
|
||||
tools.read_file(cls.ElfTestFile(src_fname)))
|
||||
|
||||
@classmethod
|
||||
def _SetupDescriptor(cls):
|
||||
@ -756,7 +756,7 @@ class TestFunctional(unittest.TestCase):
|
||||
|
||||
image = control.images['image1']
|
||||
self.assertEqual(len(U_BOOT_DATA), image.size)
|
||||
fname = tools.GetOutputFilename('image1.bin')
|
||||
fname = tools.get_output_filename('image1.bin')
|
||||
self.assertTrue(os.path.exists(fname))
|
||||
with open(fname, 'rb') as fd:
|
||||
data = fd.read()
|
||||
@ -764,13 +764,13 @@ class TestFunctional(unittest.TestCase):
|
||||
|
||||
image = control.images['image2']
|
||||
self.assertEqual(3 + len(U_BOOT_DATA) + 5, image.size)
|
||||
fname = tools.GetOutputFilename('image2.bin')
|
||||
fname = tools.get_output_filename('image2.bin')
|
||||
self.assertTrue(os.path.exists(fname))
|
||||
with open(fname, 'rb') as fd:
|
||||
data = fd.read()
|
||||
self.assertEqual(U_BOOT_DATA, data[3:7])
|
||||
self.assertEqual(tools.GetBytes(0, 3), data[:3])
|
||||
self.assertEqual(tools.GetBytes(0, 5), data[7:])
|
||||
self.assertEqual(tools.get_bytes(0, 3), data[:3])
|
||||
self.assertEqual(tools.get_bytes(0, 5), data[7:])
|
||||
|
||||
def testBadAlign(self):
|
||||
"""Test that an invalid alignment value is detected"""
|
||||
@ -838,8 +838,8 @@ class TestFunctional(unittest.TestCase):
|
||||
self.assertEqual(3, entry.pad_before)
|
||||
self.assertEqual(3 + 5 + len(U_BOOT_DATA), entry.size)
|
||||
self.assertEqual(U_BOOT_DATA, entry.data)
|
||||
self.assertEqual(tools.GetBytes(0, 3) + U_BOOT_DATA +
|
||||
tools.GetBytes(0, 5), data[:entry.size])
|
||||
self.assertEqual(tools.get_bytes(0, 3) + U_BOOT_DATA +
|
||||
tools.get_bytes(0, 5), data[:entry.size])
|
||||
pos = entry.size
|
||||
|
||||
# Second u-boot has an aligned size, but it has no effect
|
||||
@ -857,7 +857,7 @@ class TestFunctional(unittest.TestCase):
|
||||
self.assertEqual(pos, entry.offset)
|
||||
self.assertEqual(32, entry.size)
|
||||
self.assertEqual(U_BOOT_DATA, entry.data)
|
||||
self.assertEqual(U_BOOT_DATA + tools.GetBytes(0, 32 - len(U_BOOT_DATA)),
|
||||
self.assertEqual(U_BOOT_DATA + tools.get_bytes(0, 32 - len(U_BOOT_DATA)),
|
||||
data[pos:pos + entry.size])
|
||||
pos += entry.size
|
||||
|
||||
@ -867,7 +867,7 @@ class TestFunctional(unittest.TestCase):
|
||||
self.assertEqual(48, entry.offset)
|
||||
self.assertEqual(16, entry.size)
|
||||
self.assertEqual(U_BOOT_DATA, entry.data[:len(U_BOOT_DATA)])
|
||||
self.assertEqual(U_BOOT_DATA + tools.GetBytes(0, 16 - len(U_BOOT_DATA)),
|
||||
self.assertEqual(U_BOOT_DATA + tools.get_bytes(0, 16 - len(U_BOOT_DATA)),
|
||||
data[pos:pos + entry.size])
|
||||
pos += entry.size
|
||||
|
||||
@ -877,7 +877,7 @@ class TestFunctional(unittest.TestCase):
|
||||
self.assertEqual(64, entry.offset)
|
||||
self.assertEqual(64, entry.size)
|
||||
self.assertEqual(U_BOOT_DATA, entry.data[:len(U_BOOT_DATA)])
|
||||
self.assertEqual(U_BOOT_DATA + tools.GetBytes(0, 64 - len(U_BOOT_DATA)),
|
||||
self.assertEqual(U_BOOT_DATA + tools.get_bytes(0, 64 - len(U_BOOT_DATA)),
|
||||
data[pos:pos + entry.size])
|
||||
|
||||
self.CheckNoGaps(entries)
|
||||
@ -997,7 +997,7 @@ class TestFunctional(unittest.TestCase):
|
||||
"""Test that the image pad byte can be specified"""
|
||||
self._SetupSplElf()
|
||||
data = self._DoReadFile('021_image_pad.dts')
|
||||
self.assertEqual(U_BOOT_SPL_DATA + tools.GetBytes(0xff, 1) +
|
||||
self.assertEqual(U_BOOT_SPL_DATA + tools.get_bytes(0xff, 1) +
|
||||
U_BOOT_DATA, data)
|
||||
|
||||
def testImageName(self):
|
||||
@ -1005,11 +1005,11 @@ class TestFunctional(unittest.TestCase):
|
||||
retcode = self._DoTestFile('022_image_name.dts')
|
||||
self.assertEqual(0, retcode)
|
||||
image = control.images['image1']
|
||||
fname = tools.GetOutputFilename('test-name')
|
||||
fname = tools.get_output_filename('test-name')
|
||||
self.assertTrue(os.path.exists(fname))
|
||||
|
||||
image = control.images['image2']
|
||||
fname = tools.GetOutputFilename('test-name.xx')
|
||||
fname = tools.get_output_filename('test-name.xx')
|
||||
self.assertTrue(os.path.exists(fname))
|
||||
|
||||
def testBlobFilename(self):
|
||||
@ -1021,8 +1021,8 @@ class TestFunctional(unittest.TestCase):
|
||||
"""Test that entries can be sorted"""
|
||||
self._SetupSplElf()
|
||||
data = self._DoReadFile('024_sorted.dts')
|
||||
self.assertEqual(tools.GetBytes(0, 1) + U_BOOT_SPL_DATA +
|
||||
tools.GetBytes(0, 2) + U_BOOT_DATA, data)
|
||||
self.assertEqual(tools.get_bytes(0, 1) + U_BOOT_SPL_DATA +
|
||||
tools.get_bytes(0, 2) + U_BOOT_DATA, data)
|
||||
|
||||
def testPackZeroOffset(self):
|
||||
"""Test that an entry at offset 0 is not given a new offset"""
|
||||
@ -1065,8 +1065,8 @@ class TestFunctional(unittest.TestCase):
|
||||
"""Test that a basic x86 ROM can be created"""
|
||||
self._SetupSplElf()
|
||||
data = self._DoReadFile('029_x86_rom.dts')
|
||||
self.assertEqual(U_BOOT_DATA + tools.GetBytes(0, 3) + U_BOOT_SPL_DATA +
|
||||
tools.GetBytes(0, 2), data)
|
||||
self.assertEqual(U_BOOT_DATA + tools.get_bytes(0, 3) + U_BOOT_SPL_DATA +
|
||||
tools.get_bytes(0, 2), data)
|
||||
|
||||
def testPackX86RomMeNoDesc(self):
|
||||
"""Test that an invalid Intel descriptor entry is detected"""
|
||||
@ -1090,7 +1090,7 @@ class TestFunctional(unittest.TestCase):
|
||||
def testPackX86RomMe(self):
|
||||
"""Test that an x86 ROM with an ME region can be created"""
|
||||
data = self._DoReadFile('031_x86_rom_me.dts')
|
||||
expected_desc = tools.ReadFile(self.TestFile('descriptor.bin'))
|
||||
expected_desc = tools.read_file(self.TestFile('descriptor.bin'))
|
||||
if data[:0x1000] != expected_desc:
|
||||
self.fail('Expected descriptor binary at start of image')
|
||||
self.assertEqual(ME_DATA, data[0x1000:0x1000 + len(ME_DATA)])
|
||||
@ -1139,7 +1139,7 @@ class TestFunctional(unittest.TestCase):
|
||||
fdt_len = self.GetFdtLen(dtb_with_ucode)
|
||||
ucode_content = dtb_with_ucode[fdt_len:]
|
||||
ucode_pos = len(nodtb_data) + fdt_len
|
||||
fname = tools.GetOutputFilename('test.dtb')
|
||||
fname = tools.get_output_filename('test.dtb')
|
||||
with open(fname, 'wb') as fd:
|
||||
fd.write(dtb_with_ucode)
|
||||
dtb = fdt.FdtScan(fname)
|
||||
@ -1244,7 +1244,7 @@ class TestFunctional(unittest.TestCase):
|
||||
# ELF file without a '_dt_ucode_base_size' symbol
|
||||
try:
|
||||
TestFunctional._MakeInputFile('u-boot',
|
||||
tools.ReadFile(self.ElfTestFile('u_boot_no_ucode_ptr')))
|
||||
tools.read_file(self.ElfTestFile('u_boot_no_ucode_ptr')))
|
||||
|
||||
with self.assertRaises(ValueError) as e:
|
||||
self._RunPackUbootSingleMicrocode()
|
||||
@ -1254,7 +1254,7 @@ class TestFunctional(unittest.TestCase):
|
||||
finally:
|
||||
# Put the original file back
|
||||
TestFunctional._MakeInputFile('u-boot',
|
||||
tools.ReadFile(self.ElfTestFile('u_boot_ucode_ptr')))
|
||||
tools.read_file(self.ElfTestFile('u_boot_ucode_ptr')))
|
||||
|
||||
def testMicrocodeNotInImage(self):
|
||||
"""Test that microcode must be placed within the image"""
|
||||
@ -1267,7 +1267,7 @@ class TestFunctional(unittest.TestCase):
|
||||
def testWithoutMicrocode(self):
|
||||
"""Test that we can cope with an image without microcode (e.g. qemu)"""
|
||||
TestFunctional._MakeInputFile('u-boot',
|
||||
tools.ReadFile(self.ElfTestFile('u_boot_no_ucode_ptr')))
|
||||
tools.read_file(self.ElfTestFile('u_boot_no_ucode_ptr')))
|
||||
data, dtb, _, _ = self._DoReadFileDtb('044_x86_optional_ucode.dts', True)
|
||||
|
||||
# Now check the device tree has no microcode
|
||||
@ -1279,7 +1279,7 @@ class TestFunctional(unittest.TestCase):
|
||||
|
||||
used_len = len(U_BOOT_NODTB_DATA) + fdt_len
|
||||
third = data[used_len:]
|
||||
self.assertEqual(tools.GetBytes(0, 0x200 - used_len), third)
|
||||
self.assertEqual(tools.get_bytes(0, 0x200 - used_len), third)
|
||||
|
||||
def testUnknownPosSize(self):
|
||||
"""Test that microcode must be placed within the image"""
|
||||
@ -1308,7 +1308,7 @@ class TestFunctional(unittest.TestCase):
|
||||
# ELF file with a '__bss_size' symbol
|
||||
self._SetupSplElf()
|
||||
data = self._DoReadFile('047_spl_bss_pad.dts')
|
||||
self.assertEqual(U_BOOT_SPL_DATA + tools.GetBytes(0, 10) + U_BOOT_DATA,
|
||||
self.assertEqual(U_BOOT_SPL_DATA + tools.get_bytes(0, 10) + U_BOOT_DATA,
|
||||
data)
|
||||
|
||||
def testSplBssPadMissing(self):
|
||||
@ -1404,7 +1404,7 @@ class TestFunctional(unittest.TestCase):
|
||||
u_boot_offset + len(U_BOOT_DATA),
|
||||
0x10 + u_boot_offset, 0x04)
|
||||
expected = (sym_values + base_data[20:] +
|
||||
tools.GetBytes(0xff, 1) + U_BOOT_DATA + sym_values +
|
||||
tools.get_bytes(0xff, 1) + U_BOOT_DATA + sym_values +
|
||||
base_data[20:])
|
||||
self.assertEqual(expected, data)
|
||||
|
||||
@ -1426,9 +1426,9 @@ class TestFunctional(unittest.TestCase):
|
||||
def testSections(self):
|
||||
"""Basic test of sections"""
|
||||
data = self._DoReadFile('055_sections.dts')
|
||||
expected = (U_BOOT_DATA + tools.GetBytes(ord('!'), 12) +
|
||||
U_BOOT_DATA + tools.GetBytes(ord('a'), 12) +
|
||||
U_BOOT_DATA + tools.GetBytes(ord('&'), 4))
|
||||
expected = (U_BOOT_DATA + tools.get_bytes(ord('!'), 12) +
|
||||
U_BOOT_DATA + tools.get_bytes(ord('a'), 12) +
|
||||
U_BOOT_DATA + tools.get_bytes(ord('&'), 4))
|
||||
self.assertEqual(expected, data)
|
||||
|
||||
def testMap(self):
|
||||
@ -1593,9 +1593,9 @@ class TestFunctional(unittest.TestCase):
|
||||
}
|
||||
data, _, _, _ = self._DoReadFileDtb('066_text.dts',
|
||||
entry_args=entry_args)
|
||||
expected = (tools.ToBytes(TEXT_DATA) +
|
||||
tools.GetBytes(0, 8 - len(TEXT_DATA)) +
|
||||
tools.ToBytes(TEXT_DATA2) + tools.ToBytes(TEXT_DATA3) +
|
||||
expected = (tools.to_bytes(TEXT_DATA) +
|
||||
tools.get_bytes(0, 8 - len(TEXT_DATA)) +
|
||||
tools.to_bytes(TEXT_DATA2) + tools.to_bytes(TEXT_DATA3) +
|
||||
b'some text' + b'more text')
|
||||
self.assertEqual(expected, data)
|
||||
|
||||
@ -1617,8 +1617,8 @@ class TestFunctional(unittest.TestCase):
|
||||
"""Basic test of generation of a flashrom fmap"""
|
||||
data = self._DoReadFile('067_fmap.dts')
|
||||
fhdr, fentries = fmap_util.DecodeFmap(data[32:])
|
||||
expected = (U_BOOT_DATA + tools.GetBytes(ord('!'), 12) +
|
||||
U_BOOT_DATA + tools.GetBytes(ord('a'), 12))
|
||||
expected = (U_BOOT_DATA + tools.get_bytes(ord('!'), 12) +
|
||||
U_BOOT_DATA + tools.get_bytes(ord('a'), 12))
|
||||
self.assertEqual(expected, data[:32])
|
||||
self.assertEqual(b'__FMAP__', fhdr.signature)
|
||||
self.assertEqual(1, fhdr.ver_major)
|
||||
@ -1670,7 +1670,7 @@ class TestFunctional(unittest.TestCase):
|
||||
def testFill(self):
|
||||
"""Test for an fill entry type"""
|
||||
data = self._DoReadFile('069_fill.dts')
|
||||
expected = tools.GetBytes(0xff, 8) + tools.GetBytes(0, 8)
|
||||
expected = tools.get_bytes(0xff, 8) + tools.get_bytes(0, 8)
|
||||
self.assertEqual(expected, data)
|
||||
|
||||
def testFillNoSize(self):
|
||||
@ -1700,8 +1700,8 @@ class TestFunctional(unittest.TestCase):
|
||||
data, _, _, _ = self._DoReadFileDtb('071_gbb.dts', entry_args=entry_args)
|
||||
|
||||
# Since futility
|
||||
expected = (GBB_DATA + GBB_DATA + tools.GetBytes(0, 8) +
|
||||
tools.GetBytes(0, 0x2180 - 16))
|
||||
expected = (GBB_DATA + GBB_DATA + tools.get_bytes(0, 8) +
|
||||
tools.get_bytes(0, 0x2180 - 16))
|
||||
self.assertEqual(expected, data)
|
||||
|
||||
def testGbbTooSmall(self):
|
||||
@ -1751,7 +1751,7 @@ class TestFunctional(unittest.TestCase):
|
||||
if self._hash_data:
|
||||
infile = pipe_list[0][11]
|
||||
m = hashlib.sha256()
|
||||
data = tools.ReadFile(infile)
|
||||
data = tools.read_file(infile)
|
||||
m.update(data)
|
||||
fd.write(m.digest())
|
||||
else:
|
||||
@ -1845,7 +1845,7 @@ class TestFunctional(unittest.TestCase):
|
||||
def testFillZero(self):
|
||||
"""Test for an fill entry type with a size of 0"""
|
||||
data = self._DoReadFile('080_fill_empty.dts')
|
||||
self.assertEqual(tools.GetBytes(0, 16), data)
|
||||
self.assertEqual(tools.get_bytes(0, 16), data)
|
||||
|
||||
def testTextMissing(self):
|
||||
"""Test for a text entry type where there is no text"""
|
||||
@ -1875,8 +1875,8 @@ class TestFunctional(unittest.TestCase):
|
||||
else:
|
||||
self.assertNotIn(expected, stdout.getvalue())
|
||||
|
||||
self.assertFalse(os.path.exists(tools.GetOutputFilename('image1.bin')))
|
||||
self.assertTrue(os.path.exists(tools.GetOutputFilename('image2.bin')))
|
||||
self.assertFalse(os.path.exists(tools.get_output_filename('image1.bin')))
|
||||
self.assertTrue(os.path.exists(tools.get_output_filename('image2.bin')))
|
||||
self._CleanupOutputDir()
|
||||
|
||||
def testUpdateFdtAll(self):
|
||||
@ -1933,8 +1933,8 @@ class TestFunctional(unittest.TestCase):
|
||||
'tpl/u-boot-tpl.dtb.out']:
|
||||
dtb = fdt.Fdt.FromData(data[start:])
|
||||
size = dtb._fdt_obj.totalsize()
|
||||
pathname = tools.GetOutputFilename(os.path.split(fname)[1])
|
||||
outdata = tools.ReadFile(pathname)
|
||||
pathname = tools.get_output_filename(os.path.split(fname)[1])
|
||||
outdata = tools.read_file(pathname)
|
||||
name = os.path.split(fname)[0]
|
||||
|
||||
if name:
|
||||
@ -2027,10 +2027,10 @@ class TestFunctional(unittest.TestCase):
|
||||
"""Test an expanding entry"""
|
||||
data, _, map_data, _ = self._DoReadFileDtb('088_expand_size.dts',
|
||||
map=True)
|
||||
expect = (tools.GetBytes(ord('a'), 8) + U_BOOT_DATA +
|
||||
MRC_DATA + tools.GetBytes(ord('b'), 1) + U_BOOT_DATA +
|
||||
tools.GetBytes(ord('c'), 8) + U_BOOT_DATA +
|
||||
tools.GetBytes(ord('d'), 8))
|
||||
expect = (tools.get_bytes(ord('a'), 8) + U_BOOT_DATA +
|
||||
MRC_DATA + tools.get_bytes(ord('b'), 1) + U_BOOT_DATA +
|
||||
tools.get_bytes(ord('c'), 8) + U_BOOT_DATA +
|
||||
tools.get_bytes(ord('d'), 8))
|
||||
self.assertEqual(expect, data)
|
||||
self.assertEqual('''ImagePos Offset Size Name
|
||||
00000000 00000000 00000028 main-section
|
||||
@ -2085,7 +2085,7 @@ class TestFunctional(unittest.TestCase):
|
||||
hash_node = dtb.GetNode('/binman/section/hash').props['value']
|
||||
m = hashlib.sha256()
|
||||
m.update(U_BOOT_DATA)
|
||||
m.update(tools.GetBytes(ord('a'), 16))
|
||||
m.update(tools.get_bytes(ord('a'), 16))
|
||||
self.assertEqual(m.digest(), b''.join(hash_node.value))
|
||||
|
||||
def testPackUBootTplMicrocode(self):
|
||||
@ -2107,7 +2107,7 @@ class TestFunctional(unittest.TestCase):
|
||||
"""Basic test of generation of a flashrom fmap"""
|
||||
data = self._DoReadFile('094_fmap_x86.dts')
|
||||
fhdr, fentries = fmap_util.DecodeFmap(data[32:])
|
||||
expected = U_BOOT_DATA + MRC_DATA + tools.GetBytes(ord('a'), 32 - 7)
|
||||
expected = U_BOOT_DATA + MRC_DATA + tools.get_bytes(ord('a'), 32 - 7)
|
||||
self.assertEqual(expected, data[:32])
|
||||
fhdr, fentries = fmap_util.DecodeFmap(data[32:])
|
||||
|
||||
@ -2129,7 +2129,7 @@ class TestFunctional(unittest.TestCase):
|
||||
def testFmapX86Section(self):
|
||||
"""Basic test of generation of a flashrom fmap"""
|
||||
data = self._DoReadFile('095_fmap_x86_section.dts')
|
||||
expected = U_BOOT_DATA + MRC_DATA + tools.GetBytes(ord('b'), 32 - 7)
|
||||
expected = U_BOOT_DATA + MRC_DATA + tools.get_bytes(ord('b'), 32 - 7)
|
||||
self.assertEqual(expected, data[:32])
|
||||
fhdr, fentries = fmap_util.DecodeFmap(data[36:])
|
||||
|
||||
@ -2177,14 +2177,14 @@ class TestFunctional(unittest.TestCase):
|
||||
with test_util.capture_sys_output() as (stdout, stderr):
|
||||
with self.assertRaises(ValueError) as e:
|
||||
self._DoTestFile('014_pack_overlap.dts', map=True)
|
||||
map_fname = tools.GetOutputFilename('image.map')
|
||||
map_fname = tools.get_output_filename('image.map')
|
||||
self.assertEqual("Wrote map file '%s' to show errors\n" % map_fname,
|
||||
stdout.getvalue())
|
||||
|
||||
# We should not get an inmage, but there should be a map file
|
||||
self.assertFalse(os.path.exists(tools.GetOutputFilename('image.bin')))
|
||||
self.assertFalse(os.path.exists(tools.get_output_filename('image.bin')))
|
||||
self.assertTrue(os.path.exists(map_fname))
|
||||
map_data = tools.ReadFile(map_fname, binary=False)
|
||||
map_data = tools.read_file(map_fname, binary=False)
|
||||
self.assertEqual('''ImagePos Offset Size Name
|
||||
<none> 00000000 00000008 main-section
|
||||
<none> 00000000 00000004 u-boot
|
||||
@ -2210,12 +2210,12 @@ class TestFunctional(unittest.TestCase):
|
||||
0000002c 00000000 00000004 u-boot
|
||||
''', map_data)
|
||||
self.assertEqual(data,
|
||||
tools.GetBytes(0x26, 4) + U_BOOT_DATA +
|
||||
tools.GetBytes(0x21, 12) +
|
||||
tools.GetBytes(0x26, 4) + U_BOOT_DATA +
|
||||
tools.GetBytes(0x61, 12) +
|
||||
tools.GetBytes(0x26, 4) + U_BOOT_DATA +
|
||||
tools.GetBytes(0x26, 8))
|
||||
tools.get_bytes(0x26, 4) + U_BOOT_DATA +
|
||||
tools.get_bytes(0x21, 12) +
|
||||
tools.get_bytes(0x26, 4) + U_BOOT_DATA +
|
||||
tools.get_bytes(0x61, 12) +
|
||||
tools.get_bytes(0x26, 4) + U_BOOT_DATA +
|
||||
tools.get_bytes(0x26, 8))
|
||||
|
||||
def testCbfsRaw(self):
|
||||
"""Test base handling of a Coreboot Filesystem (CBFS)
|
||||
@ -2332,17 +2332,17 @@ class TestFunctional(unittest.TestCase):
|
||||
Args:
|
||||
data: Conents of output file
|
||||
"""
|
||||
expected_desc = tools.ReadFile(self.TestFile('descriptor.bin'))
|
||||
expected_desc = tools.read_file(self.TestFile('descriptor.bin'))
|
||||
if data[:0x1000] != expected_desc:
|
||||
self.fail('Expected descriptor binary at start of image')
|
||||
|
||||
# We expect to find the TPL wil in subpart IBBP entry IBBL
|
||||
image_fname = tools.GetOutputFilename('image.bin')
|
||||
tpl_fname = tools.GetOutputFilename('tpl.out')
|
||||
image_fname = tools.get_output_filename('image.bin')
|
||||
tpl_fname = tools.get_output_filename('tpl.out')
|
||||
ifwitool = bintool.Bintool.create('ifwitool')
|
||||
ifwitool.extract(image_fname, 'IBBP', 'IBBL', tpl_fname)
|
||||
|
||||
tpl_data = tools.ReadFile(tpl_fname)
|
||||
tpl_data = tools.read_file(tpl_fname)
|
||||
self.assertEqual(U_BOOT_TPL_DATA, tpl_data[:len(U_BOOT_TPL_DATA)])
|
||||
|
||||
def testPackX86RomIfwi(self):
|
||||
@ -2403,7 +2403,7 @@ class TestFunctional(unittest.TestCase):
|
||||
fdtmap_data = data[len(U_BOOT_DATA):]
|
||||
magic = fdtmap_data[:8]
|
||||
self.assertEqual(b'_FDTMAP_', magic)
|
||||
self.assertEqual(tools.GetBytes(0, 8), fdtmap_data[8:16])
|
||||
self.assertEqual(tools.get_bytes(0, 8), fdtmap_data[8:16])
|
||||
|
||||
fdt_data = fdtmap_data[16:]
|
||||
dtb = fdt.Fdt.FromData(fdt_data)
|
||||
@ -2668,7 +2668,7 @@ class TestFunctional(unittest.TestCase):
|
||||
"""Test reading an image and accessing its FDT map"""
|
||||
self._CheckLz4()
|
||||
data = self.data = self._DoReadFileRealDtb('128_decode_image.dts')
|
||||
image_fname = tools.GetOutputFilename('image.bin')
|
||||
image_fname = tools.get_output_filename('image.bin')
|
||||
orig_image = control.images['image']
|
||||
image = Image.FromFile(image_fname)
|
||||
self.assertEqual(orig_image.GetEntries().keys(),
|
||||
@ -2684,7 +2684,7 @@ class TestFunctional(unittest.TestCase):
|
||||
"""Test accessing an image's FDT map without an image header"""
|
||||
self._CheckLz4()
|
||||
data = self._DoReadFileRealDtb('129_decode_image_nohdr.dts')
|
||||
image_fname = tools.GetOutputFilename('image.bin')
|
||||
image_fname = tools.get_output_filename('image.bin')
|
||||
image = Image.FromFile(image_fname)
|
||||
self.assertTrue(isinstance(image, Image))
|
||||
self.assertEqual('image', image.image_name[-5:])
|
||||
@ -2692,7 +2692,7 @@ class TestFunctional(unittest.TestCase):
|
||||
def testReadImageFail(self):
|
||||
"""Test failing to read an image image's FDT map"""
|
||||
self._DoReadFile('005_simple.dts')
|
||||
image_fname = tools.GetOutputFilename('image.bin')
|
||||
image_fname = tools.get_output_filename('image.bin')
|
||||
with self.assertRaises(ValueError) as e:
|
||||
image = Image.FromFile(image_fname)
|
||||
self.assertIn("Cannot find FDT map in image", str(e.exception))
|
||||
@ -2752,7 +2752,7 @@ class TestFunctional(unittest.TestCase):
|
||||
"""
|
||||
self._CheckLz4()
|
||||
self._DoReadFileRealDtb('130_list_fdtmap.dts')
|
||||
image_fname = tools.GetOutputFilename('image.bin')
|
||||
image_fname = tools.get_output_filename('image.bin')
|
||||
image = Image.FromFile(image_fname)
|
||||
lines = image.GetListEntries(paths)[1]
|
||||
files = [line[0].strip() for line in lines[1:]]
|
||||
@ -2798,7 +2798,7 @@ class TestFunctional(unittest.TestCase):
|
||||
"""
|
||||
self._CheckLz4()
|
||||
self._DoReadFileRealDtb('130_list_fdtmap.dts')
|
||||
image_fname = tools.GetOutputFilename('image.bin')
|
||||
image_fname = tools.get_output_filename('image.bin')
|
||||
return control.ReadEntry(image_fname, entry_name, decomp)
|
||||
|
||||
def testExtractSimple(self):
|
||||
@ -2858,7 +2858,7 @@ class TestFunctional(unittest.TestCase):
|
||||
def testExtractBadFile(self):
|
||||
"""Test extracting an invalid file"""
|
||||
fname = os.path.join(self._indir, 'badfile')
|
||||
tools.WriteFile(fname, b'')
|
||||
tools.write_file(fname, b'')
|
||||
with self.assertRaises(ValueError) as e:
|
||||
control.ReadEntry(fname, 'name')
|
||||
|
||||
@ -2874,17 +2874,17 @@ class TestFunctional(unittest.TestCase):
|
||||
'-f', fname)
|
||||
finally:
|
||||
shutil.rmtree(tmpdir)
|
||||
data = tools.ReadFile(fname)
|
||||
data = tools.read_file(fname)
|
||||
self.assertEqual(U_BOOT_DATA, data)
|
||||
|
||||
def testExtractOneEntry(self):
|
||||
"""Test extracting a single entry fron an image """
|
||||
self._CheckLz4()
|
||||
self._DoReadFileRealDtb('130_list_fdtmap.dts')
|
||||
image_fname = tools.GetOutputFilename('image.bin')
|
||||
image_fname = tools.get_output_filename('image.bin')
|
||||
fname = os.path.join(self._indir, 'output.extact')
|
||||
control.ExtractEntries(image_fname, fname, None, ['u-boot'])
|
||||
data = tools.ReadFile(fname)
|
||||
data = tools.read_file(fname)
|
||||
self.assertEqual(U_BOOT_DATA, data)
|
||||
|
||||
def _CheckExtractOutput(self, decomp):
|
||||
@ -2906,7 +2906,7 @@ class TestFunctional(unittest.TestCase):
|
||||
expect_size: Size of data to expect in file, or None to skip
|
||||
"""
|
||||
path = os.path.join(outdir, entry_path)
|
||||
data = tools.ReadFile(path)
|
||||
data = tools.read_file(path)
|
||||
os.remove(path)
|
||||
if expect_data:
|
||||
self.assertEqual(expect_data, data)
|
||||
@ -2926,7 +2926,7 @@ class TestFunctional(unittest.TestCase):
|
||||
os.rmdir(path)
|
||||
|
||||
self._DoReadFileRealDtb('130_list_fdtmap.dts')
|
||||
image_fname = tools.GetOutputFilename('image.bin')
|
||||
image_fname = tools.get_output_filename('image.bin')
|
||||
outdir = os.path.join(self._indir, 'extract')
|
||||
einfos = control.ExtractEntries(image_fname, None, outdir, [], decomp)
|
||||
|
||||
@ -2962,7 +2962,7 @@ class TestFunctional(unittest.TestCase):
|
||||
_CheckPresent('section/root', section.data)
|
||||
cbfs = section_entries['cbfs']
|
||||
_CheckPresent('section/cbfs/root', cbfs.data)
|
||||
data = tools.ReadFile(image_fname)
|
||||
data = tools.read_file(image_fname)
|
||||
_CheckPresent('root', data)
|
||||
|
||||
# There should be no files left. Remove all the directories to check.
|
||||
@ -2987,7 +2987,7 @@ class TestFunctional(unittest.TestCase):
|
||||
"""Test extracting some entries"""
|
||||
self._CheckLz4()
|
||||
self._DoReadFileRealDtb('130_list_fdtmap.dts')
|
||||
image_fname = tools.GetOutputFilename('image.bin')
|
||||
image_fname = tools.get_output_filename('image.bin')
|
||||
outdir = os.path.join(self._indir, 'extract')
|
||||
einfos = control.ExtractEntries(image_fname, None, outdir,
|
||||
['*cb*', '*head*'])
|
||||
@ -3002,7 +3002,7 @@ class TestFunctional(unittest.TestCase):
|
||||
"""Test extracting some entries"""
|
||||
self._CheckLz4()
|
||||
self._DoReadFileRealDtb('130_list_fdtmap.dts')
|
||||
image_fname = tools.GetOutputFilename('image.bin')
|
||||
image_fname = tools.get_output_filename('image.bin')
|
||||
with self.assertRaises(ValueError) as e:
|
||||
control.ExtractEntries(image_fname, 'fname', None, [])
|
||||
self.assertIn('Must specify an entry path to write with -f',
|
||||
@ -3012,7 +3012,7 @@ class TestFunctional(unittest.TestCase):
|
||||
"""Test extracting some entries"""
|
||||
self._CheckLz4()
|
||||
self._DoReadFileRealDtb('130_list_fdtmap.dts')
|
||||
image_fname = tools.GetOutputFilename('image.bin')
|
||||
image_fname = tools.get_output_filename('image.bin')
|
||||
with self.assertRaises(ValueError) as e:
|
||||
control.ExtractEntries(image_fname, 'fname', None, ['a', 'b'])
|
||||
self.assertIn('Must specify exactly one entry path to write with -f',
|
||||
@ -3113,9 +3113,9 @@ class TestFunctional(unittest.TestCase):
|
||||
orig_dtb_data = entries['u-boot-dtb'].data
|
||||
orig_fdtmap_data = entries['fdtmap'].data
|
||||
|
||||
image_fname = tools.GetOutputFilename('image.bin')
|
||||
updated_fname = tools.GetOutputFilename('image-updated.bin')
|
||||
tools.WriteFile(updated_fname, tools.ReadFile(image_fname))
|
||||
image_fname = tools.get_output_filename('image.bin')
|
||||
updated_fname = tools.get_output_filename('image-updated.bin')
|
||||
tools.write_file(updated_fname, tools.read_file(image_fname))
|
||||
image = control.WriteEntry(updated_fname, entry_name, data, decomp,
|
||||
allow_resize)
|
||||
data = control.ReadEntry(updated_fname, entry_name, decomp)
|
||||
@ -3170,8 +3170,8 @@ class TestFunctional(unittest.TestCase):
|
||||
data = self._DoReadFileDtb('133_replace_multi.dts', use_real_dtb=True,
|
||||
update_dtb=True)[0]
|
||||
expected = b'x' * len(U_BOOT_DATA)
|
||||
updated_fname = tools.GetOutputFilename('image-updated.bin')
|
||||
tools.WriteFile(updated_fname, data)
|
||||
updated_fname = tools.get_output_filename('image-updated.bin')
|
||||
tools.write_file(updated_fname, data)
|
||||
entry_name = 'u-boot'
|
||||
control.WriteEntry(updated_fname, entry_name, expected,
|
||||
allow_resize=False)
|
||||
@ -3182,9 +3182,9 @@ class TestFunctional(unittest.TestCase):
|
||||
self.assertEqual('/binman/image', state.fdt_path_prefix)
|
||||
|
||||
# Now check we can write the first image
|
||||
image_fname = tools.GetOutputFilename('first-image.bin')
|
||||
updated_fname = tools.GetOutputFilename('first-updated.bin')
|
||||
tools.WriteFile(updated_fname, tools.ReadFile(image_fname))
|
||||
image_fname = tools.get_output_filename('first-image.bin')
|
||||
updated_fname = tools.get_output_filename('first-updated.bin')
|
||||
tools.write_file(updated_fname, tools.read_file(image_fname))
|
||||
entry_name = 'u-boot'
|
||||
control.WriteEntry(updated_fname, entry_name, expected,
|
||||
allow_resize=False)
|
||||
@ -3348,8 +3348,8 @@ class TestFunctional(unittest.TestCase):
|
||||
self._CheckLz4()
|
||||
expected = b'x' * len(U_BOOT_DATA)
|
||||
data = self._DoReadFileRealDtb('142_replace_cbfs.dts')
|
||||
updated_fname = tools.GetOutputFilename('image-updated.bin')
|
||||
tools.WriteFile(updated_fname, data)
|
||||
updated_fname = tools.get_output_filename('image-updated.bin')
|
||||
tools.write_file(updated_fname, data)
|
||||
entry_name = 'section/cbfs/u-boot'
|
||||
control.WriteEntry(updated_fname, entry_name, expected,
|
||||
allow_resize=True)
|
||||
@ -3361,8 +3361,8 @@ class TestFunctional(unittest.TestCase):
|
||||
self._CheckLz4()
|
||||
expected = U_BOOT_DATA + b'x'
|
||||
data = self._DoReadFileRealDtb('142_replace_cbfs.dts')
|
||||
updated_fname = tools.GetOutputFilename('image-updated.bin')
|
||||
tools.WriteFile(updated_fname, data)
|
||||
updated_fname = tools.get_output_filename('image-updated.bin')
|
||||
tools.write_file(updated_fname, data)
|
||||
entry_name = 'section/cbfs/u-boot'
|
||||
control.WriteEntry(updated_fname, entry_name, expected,
|
||||
allow_resize=True)
|
||||
@ -3383,23 +3383,23 @@ class TestFunctional(unittest.TestCase):
|
||||
"""
|
||||
data = self._DoReadFileRealDtb('143_replace_all.dts')
|
||||
|
||||
updated_fname = tools.GetOutputFilename('image-updated.bin')
|
||||
tools.WriteFile(updated_fname, data)
|
||||
updated_fname = tools.get_output_filename('image-updated.bin')
|
||||
tools.write_file(updated_fname, data)
|
||||
|
||||
outdir = os.path.join(self._indir, 'extract')
|
||||
einfos = control.ExtractEntries(updated_fname, None, outdir, [])
|
||||
|
||||
expected1 = b'x' + U_BOOT_DATA + b'y'
|
||||
u_boot_fname1 = os.path.join(outdir, 'u-boot')
|
||||
tools.WriteFile(u_boot_fname1, expected1)
|
||||
tools.write_file(u_boot_fname1, expected1)
|
||||
|
||||
expected2 = b'a' + U_BOOT_DATA + b'b'
|
||||
u_boot_fname2 = os.path.join(outdir, 'u-boot2')
|
||||
tools.WriteFile(u_boot_fname2, expected2)
|
||||
tools.write_file(u_boot_fname2, expected2)
|
||||
|
||||
expected_text = b'not the same text'
|
||||
text_fname = os.path.join(outdir, 'text')
|
||||
tools.WriteFile(text_fname, expected_text)
|
||||
tools.write_file(text_fname, expected_text)
|
||||
|
||||
dtb_fname = os.path.join(outdir, 'u-boot-dtb')
|
||||
dtb = fdt.FdtScan(dtb_fname)
|
||||
@ -3475,10 +3475,10 @@ class TestFunctional(unittest.TestCase):
|
||||
|
||||
fname = os.path.join(tmpdir, 'update-u-boot.bin')
|
||||
expected = b'x' * len(U_BOOT_DATA)
|
||||
tools.WriteFile(fname, expected)
|
||||
tools.write_file(fname, expected)
|
||||
|
||||
self._DoBinman('replace', '-i', updated_fname, 'u-boot', '-f', fname)
|
||||
data = tools.ReadFile(updated_fname)
|
||||
data = tools.read_file(updated_fname)
|
||||
self.assertEqual(expected, data[:len(expected)])
|
||||
map_fname = os.path.join(tmpdir, 'image-updated.map')
|
||||
self.assertFalse(os.path.exists(map_fname))
|
||||
@ -3493,7 +3493,7 @@ class TestFunctional(unittest.TestCase):
|
||||
self._DoBinman('replace', '-i', updated_fname, '-I', outdir,
|
||||
'u-boot2', 'text')
|
||||
|
||||
tools.PrepareOutputDir(None)
|
||||
tools.prepare_output_dir(None)
|
||||
image = Image.FromFile(updated_fname)
|
||||
image.LoadData()
|
||||
entries = image.GetEntries()
|
||||
@ -3531,7 +3531,7 @@ class TestFunctional(unittest.TestCase):
|
||||
|
||||
fname = os.path.join(self._indir, 'update-u-boot.bin')
|
||||
expected = b'x' * len(U_BOOT_DATA)
|
||||
tools.WriteFile(fname, expected)
|
||||
tools.write_file(fname, expected)
|
||||
|
||||
self._DoBinman('replace', '-i', updated_fname, 'u-boot',
|
||||
'-f', fname, '-m')
|
||||
@ -3543,7 +3543,7 @@ class TestFunctional(unittest.TestCase):
|
||||
def testReplaceNoEntryPaths(self):
|
||||
"""Test replacing an entry without an entry path"""
|
||||
self._DoReadFileRealDtb('143_replace_all.dts')
|
||||
image_fname = tools.GetOutputFilename('image.bin')
|
||||
image_fname = tools.get_output_filename('image.bin')
|
||||
with self.assertRaises(ValueError) as e:
|
||||
control.ReplaceEntries(image_fname, 'fname', None, [])
|
||||
self.assertIn('Must specify an entry path to read with -f',
|
||||
@ -3552,7 +3552,7 @@ class TestFunctional(unittest.TestCase):
|
||||
def testReplaceTooManyEntryPaths(self):
|
||||
"""Test extracting some entries"""
|
||||
self._DoReadFileRealDtb('143_replace_all.dts')
|
||||
image_fname = tools.GetOutputFilename('image.bin')
|
||||
image_fname = tools.get_output_filename('image.bin')
|
||||
with self.assertRaises(ValueError) as e:
|
||||
control.ReplaceEntries(image_fname, 'fname', None, ['a', 'b'])
|
||||
self.assertIn('Must specify exactly one entry path to write with -f',
|
||||
@ -3597,15 +3597,15 @@ class TestFunctional(unittest.TestCase):
|
||||
data = self._DoReadFile(dts)
|
||||
sym_values = struct.pack('<LQLL', *expected_vals)
|
||||
upto1 = 4 + len(U_BOOT_SPL_DATA)
|
||||
expected1 = tools.GetBytes(0xff, 4) + sym_values + U_BOOT_SPL_DATA[20:]
|
||||
expected1 = tools.get_bytes(0xff, 4) + sym_values + U_BOOT_SPL_DATA[20:]
|
||||
self.assertEqual(expected1, data[:upto1])
|
||||
|
||||
upto2 = upto1 + 1 + len(U_BOOT_SPL_DATA)
|
||||
expected2 = tools.GetBytes(0xff, 1) + sym_values + U_BOOT_SPL_DATA[20:]
|
||||
expected2 = tools.get_bytes(0xff, 1) + sym_values + U_BOOT_SPL_DATA[20:]
|
||||
self.assertEqual(expected2, data[upto1:upto2])
|
||||
|
||||
upto3 = 0x34 + len(U_BOOT_DATA)
|
||||
expected3 = tools.GetBytes(0xff, 1) + U_BOOT_DATA
|
||||
expected3 = tools.get_bytes(0xff, 1) + U_BOOT_DATA
|
||||
self.assertEqual(expected3, data[upto2:upto3])
|
||||
|
||||
expected4 = sym_values + U_BOOT_TPL_DATA[20:]
|
||||
@ -3727,8 +3727,8 @@ class TestFunctional(unittest.TestCase):
|
||||
self.assertIn('data', fnode.props)
|
||||
|
||||
fname = os.path.join(self._indir, 'fit_data.fit')
|
||||
tools.WriteFile(fname, fit_data)
|
||||
out = tools.Run('dumpimage', '-l', fname)
|
||||
tools.write_file(fname, fit_data)
|
||||
out = tools.run('dumpimage', '-l', fname)
|
||||
|
||||
# Check a few features to make sure the plumbing works. We don't need
|
||||
# to test the operation of mkimage or dumpimage here. First convert the
|
||||
@ -3763,7 +3763,7 @@ class TestFunctional(unittest.TestCase):
|
||||
# Size of the external-data region as set up by mkimage
|
||||
external_data_size = len(U_BOOT_DATA) + 2
|
||||
expected_size = (len(U_BOOT_DATA) + 0x400 +
|
||||
tools.Align(external_data_size, 4) +
|
||||
tools.align(external_data_size, 4) +
|
||||
len(U_BOOT_NODTB_DATA))
|
||||
|
||||
# The data should be outside the FIT
|
||||
@ -3802,8 +3802,8 @@ class TestFunctional(unittest.TestCase):
|
||||
"""Test pad-before, pad-after for entries in sections"""
|
||||
data, _, _, out_dtb_fname = self._DoReadFileDtb(
|
||||
'166_pad_in_sections.dts', update_dtb=True)
|
||||
expected = (U_BOOT_DATA + tools.GetBytes(ord('!'), 12) +
|
||||
U_BOOT_DATA + tools.GetBytes(ord('!'), 6) +
|
||||
expected = (U_BOOT_DATA + tools.get_bytes(ord('!'), 12) +
|
||||
U_BOOT_DATA + tools.get_bytes(ord('!'), 6) +
|
||||
U_BOOT_DATA)
|
||||
self.assertEqual(expected, data)
|
||||
|
||||
@ -3846,14 +3846,14 @@ class TestFunctional(unittest.TestCase):
|
||||
node = dtb.GetNode('/images/kernel')
|
||||
data = dtb.GetProps(node)["data"].bytes
|
||||
align_pad = 0x10 - (len(U_BOOT_SPL_DATA) % 0x10)
|
||||
expected = (tools.GetBytes(0, 0x20) + U_BOOT_SPL_DATA +
|
||||
tools.GetBytes(0, align_pad) + U_BOOT_DATA)
|
||||
expected = (tools.get_bytes(0, 0x20) + U_BOOT_SPL_DATA +
|
||||
tools.get_bytes(0, align_pad) + U_BOOT_DATA)
|
||||
self.assertEqual(expected, data)
|
||||
|
||||
node = dtb.GetNode('/images/fdt-1')
|
||||
data = dtb.GetProps(node)["data"].bytes
|
||||
expected = (U_BOOT_SPL_DTB_DATA + tools.GetBytes(0, 20) +
|
||||
tools.ToBytes(TEXT_DATA) + tools.GetBytes(0, 30) +
|
||||
expected = (U_BOOT_SPL_DTB_DATA + tools.get_bytes(0, 20) +
|
||||
tools.to_bytes(TEXT_DATA) + tools.get_bytes(0, 30) +
|
||||
U_BOOT_DTB_DATA)
|
||||
self.assertEqual(expected, data)
|
||||
|
||||
@ -4069,8 +4069,8 @@ class TestFunctional(unittest.TestCase):
|
||||
def testSkipAtStartPad(self):
|
||||
"""Test handling of skip-at-start section with padded entry"""
|
||||
data = self._DoReadFile('178_skip_at_start_pad.dts')
|
||||
before = tools.GetBytes(0, 8)
|
||||
after = tools.GetBytes(0, 4)
|
||||
before = tools.get_bytes(0, 8)
|
||||
after = tools.get_bytes(0, 4)
|
||||
all = before + U_BOOT_DATA + after
|
||||
self.assertEqual(all, data)
|
||||
|
||||
@ -4089,8 +4089,8 @@ class TestFunctional(unittest.TestCase):
|
||||
def testSkipAtStartSectionPad(self):
|
||||
"""Test handling of skip-at-start section with padding"""
|
||||
data = self._DoReadFile('179_skip_at_start_section_pad.dts')
|
||||
before = tools.GetBytes(0, 8)
|
||||
after = tools.GetBytes(0, 4)
|
||||
before = tools.get_bytes(0, 8)
|
||||
after = tools.get_bytes(0, 4)
|
||||
all = before + U_BOOT_DATA + after
|
||||
self.assertEqual(all, data)
|
||||
|
||||
@ -4110,23 +4110,23 @@ class TestFunctional(unittest.TestCase):
|
||||
def testSectionPad(self):
|
||||
"""Testing padding with sections"""
|
||||
data = self._DoReadFile('180_section_pad.dts')
|
||||
expected = (tools.GetBytes(ord('&'), 3) +
|
||||
tools.GetBytes(ord('!'), 5) +
|
||||
expected = (tools.get_bytes(ord('&'), 3) +
|
||||
tools.get_bytes(ord('!'), 5) +
|
||||
U_BOOT_DATA +
|
||||
tools.GetBytes(ord('!'), 1) +
|
||||
tools.GetBytes(ord('&'), 2))
|
||||
tools.get_bytes(ord('!'), 1) +
|
||||
tools.get_bytes(ord('&'), 2))
|
||||
self.assertEqual(expected, data)
|
||||
|
||||
def testSectionAlign(self):
|
||||
"""Testing alignment with sections"""
|
||||
data = self._DoReadFileDtb('181_section_align.dts', map=True)[0]
|
||||
expected = (b'\0' + # fill section
|
||||
tools.GetBytes(ord('&'), 1) + # padding to section align
|
||||
tools.get_bytes(ord('&'), 1) + # padding to section align
|
||||
b'\0' + # fill section
|
||||
tools.GetBytes(ord('!'), 3) + # padding to u-boot align
|
||||
tools.get_bytes(ord('!'), 3) + # padding to u-boot align
|
||||
U_BOOT_DATA +
|
||||
tools.GetBytes(ord('!'), 4) + # padding to u-boot size
|
||||
tools.GetBytes(ord('!'), 4)) # padding to section size
|
||||
tools.get_bytes(ord('!'), 4) + # padding to u-boot size
|
||||
tools.get_bytes(ord('!'), 4)) # padding to section size
|
||||
self.assertEqual(expected, data)
|
||||
|
||||
def testCompressImage(self):
|
||||
@ -4357,7 +4357,7 @@ class TestFunctional(unittest.TestCase):
|
||||
'188_image_entryarg.dts',use_real_dtb=True, update_dtb=True,
|
||||
entry_args=entry_args)
|
||||
|
||||
image_fname = tools.GetOutputFilename('image.bin')
|
||||
image_fname = tools.get_output_filename('image.bin')
|
||||
orig_image = control.images['image']
|
||||
|
||||
# This should not generate an error about the missing 'cros-ec-rw-path'
|
||||
@ -4378,7 +4378,7 @@ class TestFunctional(unittest.TestCase):
|
||||
def testReadImageSkip(self):
|
||||
"""Test reading an image and accessing its FDT map"""
|
||||
data = self.data = self._DoReadFileRealDtb('191_read_image_skip.dts')
|
||||
image_fname = tools.GetOutputFilename('image.bin')
|
||||
image_fname = tools.get_output_filename('image.bin')
|
||||
orig_image = control.images['image']
|
||||
image = Image.FromFile(image_fname)
|
||||
self.assertEqual(orig_image.GetEntries().keys(),
|
||||
@ -4406,7 +4406,7 @@ class TestFunctional(unittest.TestCase):
|
||||
# ELF file with a '__bss_size' symbol
|
||||
self._SetupTplElf()
|
||||
data = self._DoReadFile('193_tpl_bss_pad.dts')
|
||||
self.assertEqual(U_BOOT_TPL_DATA + tools.GetBytes(0, 10) + U_BOOT_DATA,
|
||||
self.assertEqual(U_BOOT_TPL_DATA + tools.get_bytes(0, 10) + U_BOOT_DATA,
|
||||
data)
|
||||
|
||||
def testTplBssPadMissing(self):
|
||||
@ -4605,8 +4605,8 @@ class TestFunctional(unittest.TestCase):
|
||||
"""Test a collection"""
|
||||
data = self._DoReadFile('198_collection.dts')
|
||||
self.assertEqual(U_BOOT_NODTB_DATA + U_BOOT_DTB_DATA +
|
||||
tools.GetBytes(0xff, 2) + U_BOOT_NODTB_DATA +
|
||||
tools.GetBytes(0xfe, 3) + U_BOOT_DTB_DATA,
|
||||
tools.get_bytes(0xff, 2) + U_BOOT_NODTB_DATA +
|
||||
tools.get_bytes(0xfe, 3) + U_BOOT_DTB_DATA,
|
||||
data)
|
||||
|
||||
def testCollectionSection(self):
|
||||
@ -4617,21 +4617,21 @@ class TestFunctional(unittest.TestCase):
|
||||
# missing.
|
||||
data = self._DoReadFile('199_collection_section.dts')
|
||||
section = U_BOOT_NODTB_DATA + U_BOOT_DTB_DATA
|
||||
self.assertEqual(section + U_BOOT_DATA + tools.GetBytes(0xff, 2) +
|
||||
section + tools.GetBytes(0xfe, 3) + U_BOOT_DATA,
|
||||
self.assertEqual(section + U_BOOT_DATA + tools.get_bytes(0xff, 2) +
|
||||
section + tools.get_bytes(0xfe, 3) + U_BOOT_DATA,
|
||||
data)
|
||||
|
||||
def testAlignDefault(self):
|
||||
"""Test that default alignment works on sections"""
|
||||
data = self._DoReadFile('200_align_default.dts')
|
||||
expected = (U_BOOT_DATA + tools.GetBytes(0, 8 - len(U_BOOT_DATA)) +
|
||||
expected = (U_BOOT_DATA + tools.get_bytes(0, 8 - len(U_BOOT_DATA)) +
|
||||
U_BOOT_DATA)
|
||||
# Special alignment for section
|
||||
expected += tools.GetBytes(0, 32 - len(expected))
|
||||
expected += tools.get_bytes(0, 32 - len(expected))
|
||||
# No alignment within the nested section
|
||||
expected += U_BOOT_DATA + U_BOOT_NODTB_DATA;
|
||||
# Now the final piece, which should be default-aligned
|
||||
expected += tools.GetBytes(0, 88 - len(expected)) + U_BOOT_NODTB_DATA
|
||||
expected += tools.get_bytes(0, 88 - len(expected)) + U_BOOT_NODTB_DATA
|
||||
self.assertEqual(expected, data)
|
||||
|
||||
def testPackOpenSBI(self):
|
||||
@ -4642,9 +4642,9 @@ class TestFunctional(unittest.TestCase):
|
||||
def testSectionsSingleThread(self):
|
||||
"""Test sections without multithreading"""
|
||||
data = self._DoReadFileDtb('055_sections.dts', threads=0)[0]
|
||||
expected = (U_BOOT_DATA + tools.GetBytes(ord('!'), 12) +
|
||||
U_BOOT_DATA + tools.GetBytes(ord('a'), 12) +
|
||||
U_BOOT_DATA + tools.GetBytes(ord('&'), 4))
|
||||
expected = (U_BOOT_DATA + tools.get_bytes(ord('!'), 12) +
|
||||
U_BOOT_DATA + tools.get_bytes(ord('a'), 12) +
|
||||
U_BOOT_DATA + tools.get_bytes(ord('&'), 4))
|
||||
self.assertEqual(expected, data)
|
||||
|
||||
def testThreadTimeout(self):
|
||||
@ -4677,7 +4677,7 @@ class TestFunctional(unittest.TestCase):
|
||||
# definition in the correct place
|
||||
syms = elf.GetSymbolFileOffset(infile,
|
||||
['dtb_embed_begin', 'dtb_embed_end'])
|
||||
data = tools.ReadFile(outfile)
|
||||
data = tools.read_file(outfile)
|
||||
dtb_data = data[syms['dtb_embed_begin'].offset:
|
||||
syms['dtb_embed_end'].offset]
|
||||
|
||||
@ -4756,7 +4756,7 @@ class TestFunctional(unittest.TestCase):
|
||||
|
||||
# Set up a version file to make sure that works
|
||||
version = 'v2025.01-rc2'
|
||||
tools.WriteFile(os.path.join(self._indir, 'version'), version,
|
||||
tools.write_file(os.path.join(self._indir, 'version'), version,
|
||||
binary=False)
|
||||
self.assertEqual(version, state.GetVersion(self._indir))
|
||||
|
||||
@ -4780,7 +4780,7 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
|
||||
|
||||
# Check that we can read it and it can be scanning, meaning it does
|
||||
# not have a 16-byte fdtmap header
|
||||
data = tools.ReadFile(dtb)
|
||||
data = tools.read_file(dtb)
|
||||
dtb = fdt.Fdt.FromData(data)
|
||||
dtb.Scan()
|
||||
|
||||
@ -4788,7 +4788,7 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
|
||||
fname = os.path.join(tmpdir, 'fdt.dtb')
|
||||
self._DoBinman('extract', '-i', updated_fname, '-F', 'dummy',
|
||||
'-f', fname, 'u-boot')
|
||||
data = tools.ReadFile(fname)
|
||||
data = tools.read_file(fname)
|
||||
self.assertEqual(U_BOOT_DATA, data)
|
||||
|
||||
finally:
|
||||
@ -4917,7 +4917,7 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
|
||||
fdtmap_data = data[fdtmap.image_pos:fdtmap.image_pos + fdtmap.size]
|
||||
magic = fdtmap_data[:8]
|
||||
self.assertEqual(b'_FDTMAP_', magic)
|
||||
self.assertEqual(tools.GetBytes(0, 8), fdtmap_data[8:16])
|
||||
self.assertEqual(tools.get_bytes(0, 8), fdtmap_data[8:16])
|
||||
|
||||
fdt_data = fdtmap_data[16:]
|
||||
dtb = fdt.Fdt.FromData(fdt_data)
|
||||
@ -4944,25 +4944,25 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
|
||||
def testFipExtractOneEntry(self):
|
||||
"""Test extracting a single entry fron an FIP"""
|
||||
self._DoReadFileRealDtb('207_fip_ls.dts')
|
||||
image_fname = tools.GetOutputFilename('image.bin')
|
||||
image_fname = tools.get_output_filename('image.bin')
|
||||
fname = os.path.join(self._indir, 'output.extact')
|
||||
control.ExtractEntries(image_fname, fname, None, ['atf-fip/u-boot'])
|
||||
data = tools.ReadFile(fname)
|
||||
data = tools.read_file(fname)
|
||||
self.assertEqual(U_BOOT_DATA, data)
|
||||
|
||||
def testFipReplace(self):
|
||||
"""Test replacing a single file in a FIP"""
|
||||
expected = U_BOOT_DATA + tools.GetBytes(0x78, 50)
|
||||
expected = U_BOOT_DATA + tools.get_bytes(0x78, 50)
|
||||
data = self._DoReadFileRealDtb('208_fip_replace.dts')
|
||||
updated_fname = tools.GetOutputFilename('image-updated.bin')
|
||||
tools.WriteFile(updated_fname, data)
|
||||
updated_fname = tools.get_output_filename('image-updated.bin')
|
||||
tools.write_file(updated_fname, data)
|
||||
entry_name = 'atf-fip/u-boot'
|
||||
control.WriteEntry(updated_fname, entry_name, expected,
|
||||
allow_resize=True)
|
||||
actual = control.ReadEntry(updated_fname, entry_name)
|
||||
self.assertEqual(expected, actual)
|
||||
|
||||
new_data = tools.ReadFile(updated_fname)
|
||||
new_data = tools.read_file(updated_fname)
|
||||
hdr, fents = fip_util.decode_fip(new_data)
|
||||
|
||||
self.assertEqual(2, len(fents))
|
||||
@ -4999,7 +4999,7 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
|
||||
self.assertEqual(True, fent.valid)
|
||||
|
||||
rest = data[0x60 + len(ATF_BL31_DATA):0x100]
|
||||
self.assertEqual(tools.GetBytes(0xff, len(rest)), rest)
|
||||
self.assertEqual(tools.get_bytes(0xff, len(rest)), rest)
|
||||
|
||||
def testFipBadAlign(self):
|
||||
"""Test that an invalid alignment value in a FIP is detected"""
|
||||
@ -5055,7 +5055,7 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
|
||||
|
||||
def testFetchBintools(self):
|
||||
def fail_download(url):
|
||||
"""Take the tools.Download() function by raising an exception"""
|
||||
"""Take the tools.download() function by raising an exception"""
|
||||
raise urllib.error.URLError('my error')
|
||||
|
||||
args = ['tool']
|
||||
@ -5070,7 +5070,7 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
|
||||
self.assertIn('Please specify bintools to fetch', str(e.exception))
|
||||
|
||||
args = ['tool', '--fetch', '_testing']
|
||||
with unittest.mock.patch.object(tools, 'Download',
|
||||
with unittest.mock.patch.object(tools, 'download',
|
||||
side_effect=fail_download):
|
||||
with test_util.capture_sys_output() as (stdout, _):
|
||||
self._DoBinman(*args)
|
||||
|
@ -111,7 +111,7 @@ class Image(section.Entry_section):
|
||||
Raises:
|
||||
ValueError if something goes wrong
|
||||
"""
|
||||
data = tools.ReadFile(fname)
|
||||
data = tools.read_file(fname)
|
||||
size = len(data)
|
||||
|
||||
# First look for an image header
|
||||
@ -128,8 +128,8 @@ class Image(section.Entry_section):
|
||||
dtb_size = probe_dtb.GetFdtObj().totalsize()
|
||||
fdtmap_data = data[pos:pos + dtb_size + fdtmap.FDTMAP_HDR_LEN]
|
||||
fdt_data = fdtmap_data[fdtmap.FDTMAP_HDR_LEN:]
|
||||
out_fname = tools.GetOutputFilename('fdtmap.in.dtb')
|
||||
tools.WriteFile(out_fname, fdt_data)
|
||||
out_fname = tools.get_output_filename('fdtmap.in.dtb')
|
||||
tools.write_file(out_fname, fdt_data)
|
||||
dtb = fdt.Fdt(out_fname)
|
||||
dtb.Scan()
|
||||
|
||||
@ -174,7 +174,7 @@ class Image(section.Entry_section):
|
||||
|
||||
def BuildImage(self):
|
||||
"""Write the image to a file"""
|
||||
fname = tools.GetOutputFilename(self._filename)
|
||||
fname = tools.get_output_filename(self._filename)
|
||||
tout.Info("Writing image to '%s'" % fname)
|
||||
with open(fname, 'wb') as fd:
|
||||
data = self.GetPaddedData()
|
||||
@ -188,7 +188,7 @@ class Image(section.Entry_section):
|
||||
Filename of map file written
|
||||
"""
|
||||
filename = '%s.map' % self.image_name
|
||||
fname = tools.GetOutputFilename(filename)
|
||||
fname = tools.get_output_filename(filename)
|
||||
with open(fname, 'w') as fd:
|
||||
print('%8s %8s %8s %s' % ('ImagePos', 'Offset', 'Size', 'Name'),
|
||||
file=fd)
|
||||
|
@ -138,8 +138,8 @@ def GetFdtContents(etype='u-boot-dtb'):
|
||||
data = GetFdtForEtype(etype).GetContents()
|
||||
else:
|
||||
fname = output_fdt_info[etype][1]
|
||||
pathname = tools.GetInputFilename(fname)
|
||||
data = tools.ReadFile(pathname)
|
||||
pathname = tools.get_input_filename(fname)
|
||||
data = tools.read_file(pathname)
|
||||
return pathname, data
|
||||
|
||||
def UpdateFdtContents(etype, data):
|
||||
@ -154,7 +154,7 @@ def UpdateFdtContents(etype, data):
|
||||
"""
|
||||
dtb, fname = output_fdt_info[etype]
|
||||
dtb_fname = dtb.GetFilename()
|
||||
tools.WriteFile(dtb_fname, data)
|
||||
tools.write_file(dtb_fname, data)
|
||||
dtb = fdt.FdtScan(dtb_fname)
|
||||
output_fdt_info[etype] = [dtb, fname]
|
||||
|
||||
@ -235,12 +235,12 @@ def Prepare(images, dtb):
|
||||
else:
|
||||
fdt_set = {}
|
||||
for etype, fname in DTB_TYPE_FNAME.items():
|
||||
infile = tools.GetInputFilename(fname, allow_missing=True)
|
||||
infile = tools.get_input_filename(fname, allow_missing=True)
|
||||
if infile and os.path.exists(infile):
|
||||
fname_dtb = fdt_util.EnsureCompiled(infile)
|
||||
out_fname = tools.GetOutputFilename('%s.out' %
|
||||
out_fname = tools.get_output_filename('%s.out' %
|
||||
os.path.split(fname)[1])
|
||||
tools.WriteFile(out_fname, tools.ReadFile(fname_dtb))
|
||||
tools.write_file(out_fname, tools.read_file(fname_dtb))
|
||||
other_dtb = fdt.FdtScan(out_fname)
|
||||
output_fdt_info[etype] = [other_dtb, out_fname]
|
||||
|
||||
@ -271,13 +271,13 @@ def PrepareFromLoadedData(image):
|
||||
tout.Info(" Found device tree type 'fdtmap' '%s'" % image.fdtmap_dtb.name)
|
||||
for etype, value in image.GetFdts().items():
|
||||
entry, fname = value
|
||||
out_fname = tools.GetOutputFilename('%s.dtb' % entry.etype)
|
||||
out_fname = tools.get_output_filename('%s.dtb' % entry.etype)
|
||||
tout.Info(" Found device tree type '%s' at '%s' path '%s'" %
|
||||
(etype, out_fname, entry.GetPath()))
|
||||
entry._filename = entry.GetDefaultFilename()
|
||||
data = entry.ReadData()
|
||||
|
||||
tools.WriteFile(out_fname, data)
|
||||
tools.write_file(out_fname, data)
|
||||
dtb = fdt.Fdt(out_fname)
|
||||
dtb.Scan()
|
||||
image_node = dtb.GetNode('/binman')
|
||||
@ -529,7 +529,7 @@ def GetVersion(path=OUR_PATH):
|
||||
"""
|
||||
version_fname = os.path.join(path, 'version')
|
||||
if os.path.exists(version_fname):
|
||||
version = tools.ReadFile(version_fname, binary=False)
|
||||
version = tools.read_file(version_fname, binary=False)
|
||||
else:
|
||||
version = '(unreleased)'
|
||||
return version
|
||||
|
@ -135,7 +135,7 @@ def DoBuildman(options, args, toolchains=None, make_func=None, boards=None,
|
||||
global builder
|
||||
|
||||
if options.full_help:
|
||||
tools.PrintFullHelp(
|
||||
tools.print_full_help(
|
||||
os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])), 'README')
|
||||
)
|
||||
return 0
|
||||
|
@ -422,7 +422,7 @@ class TestFunctional(unittest.TestCase):
|
||||
if arg.startswith('O='):
|
||||
out_dir = arg[2:]
|
||||
fname = os.path.join(cwd or '', out_dir, 'u-boot')
|
||||
tools.WriteFile(fname, b'U-Boot')
|
||||
tools.write_file(fname, b'U-Boot')
|
||||
if type(commit) is not str:
|
||||
stderr = self._error.get((brd.target, commit.sequence))
|
||||
if stderr:
|
||||
|
@ -607,7 +607,7 @@ class TestBuild(unittest.TestCase):
|
||||
|
||||
def testPrepareOutputSpace(self):
|
||||
def _Touch(fname):
|
||||
tools.WriteFile(os.path.join(base_dir, fname), b'')
|
||||
tools.write_file(os.path.join(base_dir, fname), b'')
|
||||
|
||||
base_dir = tempfile.mkdtemp()
|
||||
|
||||
|
@ -201,11 +201,11 @@ class Toolchain:
|
||||
# We'll use MakeArgs() to provide this
|
||||
pass
|
||||
elif full_path:
|
||||
env[b'CROSS_COMPILE'] = tools.ToBytes(
|
||||
env[b'CROSS_COMPILE'] = tools.to_bytes(
|
||||
wrapper + os.path.join(self.path, self.cross))
|
||||
else:
|
||||
env[b'CROSS_COMPILE'] = tools.ToBytes(wrapper + self.cross)
|
||||
env[b'PATH'] = tools.ToBytes(self.path) + b':' + env[b'PATH']
|
||||
env[b'CROSS_COMPILE'] = tools.to_bytes(wrapper + self.cross)
|
||||
env[b'PATH'] = tools.to_bytes(self.path) + b':' + env[b'PATH']
|
||||
|
||||
env[b'LC_ALL'] = b'C'
|
||||
|
||||
@ -504,7 +504,7 @@ class Toolchains:
|
||||
url = '%s/%s/%s/' % (base, arch, version)
|
||||
print('Checking: %s' % url)
|
||||
response = urllib.request.urlopen(url)
|
||||
html = tools.ToString(response.read())
|
||||
html = tools.to_string(response.read())
|
||||
parser = MyHTMLParser(fetch_arch)
|
||||
parser.feed(html)
|
||||
if fetch_arch == 'list':
|
||||
@ -571,7 +571,7 @@ class Toolchains:
|
||||
os.mkdir(dest)
|
||||
|
||||
# Download the tar file for this toolchain and unpack it
|
||||
tarfile, tmpdir = tools.Download(url, '.buildman')
|
||||
tarfile, tmpdir = tools.download(url, '.buildman')
|
||||
if not tarfile:
|
||||
return 1
|
||||
print(col.Color(col.GREEN, 'Unpacking to: %s' % dest), end=' ')
|
||||
|
@ -396,7 +396,7 @@ class Node:
|
||||
prop_name: Name of property
|
||||
"""
|
||||
self.props[prop_name] = Prop(self, None, prop_name,
|
||||
tools.GetBytes(0, 4))
|
||||
tools.get_bytes(0, 4))
|
||||
|
||||
def AddEmptyProp(self, prop_name, len):
|
||||
"""Add a property with a fixed data size, for filling in later
|
||||
@ -408,7 +408,7 @@ class Node:
|
||||
prop_name: Name of property
|
||||
len: Length of data in property
|
||||
"""
|
||||
value = tools.GetBytes(0, len)
|
||||
value = tools.get_bytes(0, len)
|
||||
self.props[prop_name] = Prop(self, None, prop_name, value)
|
||||
|
||||
def _CheckProp(self, prop_name):
|
||||
|
@ -75,12 +75,12 @@ def EnsureCompiled(fname, tmpdir=None, capture_stderr=False):
|
||||
dts_input = os.path.join(tmpdir, 'source.dts')
|
||||
dtb_output = os.path.join(tmpdir, 'source.dtb')
|
||||
else:
|
||||
dts_input = tools.GetOutputFilename('source.dts')
|
||||
dtb_output = tools.GetOutputFilename('source.dtb')
|
||||
dts_input = tools.get_output_filename('source.dts')
|
||||
dtb_output = tools.get_output_filename('source.dtb')
|
||||
|
||||
search_paths = [os.path.join(os.getcwd(), 'include')]
|
||||
root, _ = os.path.splitext(fname)
|
||||
cc, args = tools.GetTargetCompileTool('cc')
|
||||
cc, args = tools.get_target_compile_tool('cc')
|
||||
args += ['-E', '-P', '-x', 'assembler-with-cpp', '-D__ASSEMBLY__']
|
||||
args += ['-Ulinux']
|
||||
for path in search_paths:
|
||||
@ -92,7 +92,7 @@ def EnsureCompiled(fname, tmpdir=None, capture_stderr=False):
|
||||
search_list = []
|
||||
for path in search_paths:
|
||||
search_list.extend(['-i', path])
|
||||
dtc, args = tools.GetTargetCompileTool('dtc')
|
||||
dtc, args = tools.get_target_compile_tool('dtc')
|
||||
args += ['-I', 'dts', '-o', dtb_output, '-O', 'dtb',
|
||||
'-W', 'no-unit_address_vs_reg']
|
||||
args.extend(search_list)
|
||||
|
@ -112,12 +112,12 @@ class TestDtoc(unittest.TestCase):
|
||||
"""Tests for dtoc"""
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
tools.PrepareOutputDir(None)
|
||||
tools.prepare_output_dir(None)
|
||||
cls.maxDiff = None
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
tools.FinaliseOutputDir()
|
||||
tools.finalise_output_dir()
|
||||
|
||||
@staticmethod
|
||||
def _write_python_string(fname, data):
|
||||
@ -218,7 +218,7 @@ class TestDtoc(unittest.TestCase):
|
||||
def test_empty_file(self):
|
||||
"""Test output from a device tree file with no nodes"""
|
||||
dtb_file = get_dtb_file('dtoc_test_empty.dts')
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
|
||||
# Run this one without saved_scan to complete test coverage
|
||||
dtb_platdata.run_steps(['struct'], dtb_file, False, output, [], None,
|
||||
@ -801,7 +801,7 @@ DM_DEVICE_INST(test0) = {
|
||||
def test_simple(self):
|
||||
"""Test output from some simple nodes with various types of data"""
|
||||
dtb_file = get_dtb_file('dtoc_test_simple.dts')
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
self.run_test(['struct'], dtb_file, output)
|
||||
with open(output) as infile:
|
||||
data = infile.read()
|
||||
@ -822,14 +822,14 @@ DM_DEVICE_INST(test0) = {
|
||||
|
||||
# Try the 'all' command
|
||||
self.run_test(['all'], dtb_file, output)
|
||||
data = tools.ReadFile(output, binary=False)
|
||||
data = tools.read_file(output, binary=False)
|
||||
self._check_strings(
|
||||
self.decl_text + self.platdata_text + self.struct_text, data)
|
||||
|
||||
def test_driver_alias(self):
|
||||
"""Test output from a device tree file with a driver alias"""
|
||||
dtb_file = get_dtb_file('dtoc_test_driver_alias.dts')
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
self.run_test(['struct'], dtb_file, output)
|
||||
with open(output) as infile:
|
||||
data = infile.read()
|
||||
@ -875,7 +875,7 @@ U_BOOT_DRVINFO(gpios_at_0) = {
|
||||
def test_invalid_driver(self):
|
||||
"""Test output from a device tree file with an invalid driver"""
|
||||
dtb_file = get_dtb_file('dtoc_test_invalid_driver.dts')
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
with test_util.capture_sys_output() as _:
|
||||
dtb_platdata.run_steps(
|
||||
['struct'], dtb_file, False, output, [], None, False,
|
||||
@ -918,7 +918,7 @@ U_BOOT_DRVINFO(spl_test) = {
|
||||
def test_phandle(self):
|
||||
"""Test output from a node containing a phandle reference"""
|
||||
dtb_file = get_dtb_file('dtoc_test_phandle.dts')
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
self.run_test(['struct'], dtb_file, output)
|
||||
with open(output) as infile:
|
||||
data = infile.read()
|
||||
@ -1013,7 +1013,7 @@ U_BOOT_DRVINFO(phandle_target) = {
|
||||
def test_phandle_single(self):
|
||||
"""Test output from a node containing a phandle reference"""
|
||||
dtb_file = get_dtb_file('dtoc_test_phandle_single.dts')
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
self.run_test(['struct'], dtb_file, output)
|
||||
with open(output) as infile:
|
||||
data = infile.read()
|
||||
@ -1029,7 +1029,7 @@ struct dtd_target {
|
||||
def test_phandle_reorder(self):
|
||||
"""Test that phandle targets are generated before their references"""
|
||||
dtb_file = get_dtb_file('dtoc_test_phandle_reorder.dts')
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
self.run_test(['platdata'], dtb_file, output)
|
||||
with open(output) as infile:
|
||||
data = infile.read()
|
||||
@ -1071,7 +1071,7 @@ U_BOOT_DRVINFO(phandle_target) = {
|
||||
def test_phandle_cd_gpio(self):
|
||||
"""Test that phandle targets are generated when unsing cd-gpios"""
|
||||
dtb_file = get_dtb_file('dtoc_test_phandle_cd_gpios.dts')
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
dtb_platdata.run_steps(
|
||||
['platdata'], dtb_file, False, output, [], None, False,
|
||||
warning_disabled=True, scan=copy_scan())
|
||||
@ -1157,7 +1157,7 @@ U_BOOT_DRVINFO(phandle_target) = {
|
||||
"""Test a node containing an invalid phandle fails"""
|
||||
dtb_file = get_dtb_file('dtoc_test_phandle_bad.dts',
|
||||
capture_stderr=True)
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
with self.assertRaises(ValueError) as exc:
|
||||
self.run_test(['struct'], dtb_file, output)
|
||||
self.assertIn("Cannot parse 'clocks' in node 'phandle-source'",
|
||||
@ -1167,7 +1167,7 @@ U_BOOT_DRVINFO(phandle_target) = {
|
||||
"""Test a phandle target missing its #*-cells property"""
|
||||
dtb_file = get_dtb_file('dtoc_test_phandle_bad2.dts',
|
||||
capture_stderr=True)
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
with self.assertRaises(ValueError) as exc:
|
||||
self.run_test(['struct'], dtb_file, output)
|
||||
self.assertIn("Node 'phandle-target' has no cells property",
|
||||
@ -1176,7 +1176,7 @@ U_BOOT_DRVINFO(phandle_target) = {
|
||||
def test_addresses64(self):
|
||||
"""Test output from a node with a 'reg' property with na=2, ns=2"""
|
||||
dtb_file = get_dtb_file('dtoc_test_addr64.dts')
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
self.run_test(['struct'], dtb_file, output)
|
||||
with open(output) as infile:
|
||||
data = infile.read()
|
||||
@ -1245,7 +1245,7 @@ U_BOOT_DRVINFO(test3) = {
|
||||
def test_addresses32(self):
|
||||
"""Test output from a node with a 'reg' property with na=1, ns=1"""
|
||||
dtb_file = get_dtb_file('dtoc_test_addr32.dts')
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
self.run_test(['struct'], dtb_file, output)
|
||||
with open(output) as infile:
|
||||
data = infile.read()
|
||||
@ -1299,7 +1299,7 @@ U_BOOT_DRVINFO(test2) = {
|
||||
def test_addresses64_32(self):
|
||||
"""Test output from a node with a 'reg' property with na=2, ns=1"""
|
||||
dtb_file = get_dtb_file('dtoc_test_addr64_32.dts')
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
self.run_test(['struct'], dtb_file, output)
|
||||
with open(output) as infile:
|
||||
data = infile.read()
|
||||
@ -1368,7 +1368,7 @@ U_BOOT_DRVINFO(test3) = {
|
||||
def test_addresses32_64(self):
|
||||
"""Test output from a node with a 'reg' property with na=1, ns=2"""
|
||||
dtb_file = get_dtb_file('dtoc_test_addr32_64.dts')
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
self.run_test(['struct'], dtb_file, output)
|
||||
with open(output) as infile:
|
||||
data = infile.read()
|
||||
@ -1438,7 +1438,7 @@ U_BOOT_DRVINFO(test3) = {
|
||||
"""Test that a reg property with an invalid type generates an error"""
|
||||
# Capture stderr since dtc will emit warnings for this file
|
||||
dtb_file = get_dtb_file('dtoc_test_bad_reg.dts', capture_stderr=True)
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
with self.assertRaises(ValueError) as exc:
|
||||
self.run_test(['struct'], dtb_file, output)
|
||||
self.assertIn("Node 'spl-test' reg property is not an int",
|
||||
@ -1448,7 +1448,7 @@ U_BOOT_DRVINFO(test3) = {
|
||||
"""Test that a reg property with an invalid cell count is detected"""
|
||||
# Capture stderr since dtc will emit warnings for this file
|
||||
dtb_file = get_dtb_file('dtoc_test_bad_reg2.dts', capture_stderr=True)
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
with self.assertRaises(ValueError) as exc:
|
||||
self.run_test(['struct'], dtb_file, output)
|
||||
self.assertIn(
|
||||
@ -1458,7 +1458,7 @@ U_BOOT_DRVINFO(test3) = {
|
||||
def test_add_prop(self):
|
||||
"""Test that a subequent node can add a new property to a struct"""
|
||||
dtb_file = get_dtb_file('dtoc_test_add_prop.dts')
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
self.run_test(['struct'], dtb_file, output)
|
||||
with open(output) as infile:
|
||||
data = infile.read()
|
||||
@ -1523,9 +1523,9 @@ U_BOOT_DRVINFO(spl_test2) = {
|
||||
def test_multi_to_file(self):
|
||||
"""Test output of multiple pieces to a single file"""
|
||||
dtb_file = get_dtb_file('dtoc_test_simple.dts')
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
self.run_test(['all'], dtb_file, output)
|
||||
data = tools.ReadFile(output, binary=False)
|
||||
data = tools.read_file(output, binary=False)
|
||||
self._check_strings(
|
||||
self.decl_text + self.platdata_text + self.struct_text, data)
|
||||
|
||||
@ -1539,7 +1539,7 @@ U_BOOT_DRVINFO(spl_test2) = {
|
||||
def test_bad_command(self):
|
||||
"""Test running dtoc with an invalid command"""
|
||||
dtb_file = get_dtb_file('dtoc_test_simple.dts')
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
with self.assertRaises(ValueError) as exc:
|
||||
self.run_test(['invalid-cmd'], dtb_file, output)
|
||||
self.assertIn(
|
||||
@ -1557,12 +1557,12 @@ U_BOOT_DRVINFO(spl_test2) = {
|
||||
|
||||
def check_output_dirs(self, instantiate):
|
||||
# Remove the directory so that files from other tests are not there
|
||||
tools._RemoveOutputDir()
|
||||
tools.PrepareOutputDir(None)
|
||||
tools._remove_output_dir()
|
||||
tools.prepare_output_dir(None)
|
||||
|
||||
# This should create the .dts and .dtb in the output directory
|
||||
dtb_file = get_dtb_file('dtoc_test_simple.dts')
|
||||
outdir = tools.GetOutputDir()
|
||||
outdir = tools.get_output_dir()
|
||||
fnames = glob.glob(outdir + '/*')
|
||||
self.assertEqual(2, len(fnames))
|
||||
|
||||
@ -1606,7 +1606,7 @@ U_BOOT_DRVINFO(spl_test2) = {
|
||||
Scanner: scanner to use
|
||||
"""
|
||||
dtb_file = get_dtb_file('dtoc_test_simple.dts')
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
|
||||
# Take a copy before messing with it
|
||||
scan = copy_scan()
|
||||
@ -1694,7 +1694,7 @@ U_BOOT_DRVINFO(spl_test2) = {
|
||||
def test_alias_read(self):
|
||||
"""Test obtaining aliases"""
|
||||
dtb_file = get_dtb_file('dtoc_test_inst.dts')
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
plat = self.run_test(['struct'], dtb_file, output)
|
||||
|
||||
scan = plat._scan
|
||||
@ -1716,7 +1716,7 @@ U_BOOT_DRVINFO(spl_test2) = {
|
||||
def test_alias_read_bad(self):
|
||||
"""Test invalid alias property name"""
|
||||
dtb_file = get_dtb_file('dtoc_test_alias_bad.dts')
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
with self.assertRaises(ValueError) as exc:
|
||||
plat = self.run_test(['struct'], dtb_file, output)
|
||||
self.assertIn("Cannot decode alias 'i2c4-'", str(exc.exception))
|
||||
@ -1728,7 +1728,7 @@ U_BOOT_DRVINFO(spl_test2) = {
|
||||
# node (/does/not/exist)
|
||||
dtb_file = get_dtb_file('dtoc_test_alias_bad_path.dts', True)
|
||||
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
with self.assertRaises(ValueError) as exc:
|
||||
plat = self.run_test(['struct'], dtb_file, output)
|
||||
self.assertIn("Alias 'i2c4' path '/does/not/exist' not found",
|
||||
@ -1737,7 +1737,7 @@ U_BOOT_DRVINFO(spl_test2) = {
|
||||
def test_alias_read_bad_uclass(self):
|
||||
"""Test alias for a uclass that doesn't exist"""
|
||||
dtb_file = get_dtb_file('dtoc_test_alias_bad_uc.dts')
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
with test_util.capture_sys_output() as (stdout, _):
|
||||
plat = self.run_test(['struct'], dtb_file, output)
|
||||
self.assertEqual("Could not find uclass for alias 'other1'",
|
||||
@ -1746,7 +1746,7 @@ U_BOOT_DRVINFO(spl_test2) = {
|
||||
def test_sequence(self):
|
||||
"""Test assignment of sequence numnbers"""
|
||||
dtb_file = get_dtb_file('dtoc_test_inst.dts')
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
plat = self.run_test(['struct'], dtb_file, output)
|
||||
|
||||
scan = plat._scan
|
||||
@ -1762,7 +1762,7 @@ U_BOOT_DRVINFO(spl_test2) = {
|
||||
def test_process_root(self):
|
||||
"""Test assignment of sequence numnbers"""
|
||||
dtb_file = get_dtb_file('dtoc_test_simple.dts')
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
|
||||
# Take a copy before messing with it
|
||||
scan = copy_scan()
|
||||
@ -1781,7 +1781,7 @@ U_BOOT_DRVINFO(spl_test2) = {
|
||||
def test_simple_inst(self):
|
||||
"""Test output from some simple nodes with instantiate enabled"""
|
||||
dtb_file = get_dtb_file('dtoc_test_inst.dts')
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
|
||||
self.run_test(['decl'], dtb_file, output, True)
|
||||
with open(output) as infile:
|
||||
@ -1804,7 +1804,7 @@ U_BOOT_DRVINFO(spl_test2) = {
|
||||
def test_inst_no_hdr(self):
|
||||
"""Test dealing with a struct tsssshat has no header"""
|
||||
dtb_file = get_dtb_file('dtoc_test_inst.dts')
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
|
||||
# Run it once to set everything up
|
||||
plat = self.run_test(['decl'], dtb_file, output, True)
|
||||
@ -1824,7 +1824,7 @@ U_BOOT_DRVINFO(spl_test2) = {
|
||||
def test_missing_props(self):
|
||||
"""Test detection of a parent node with no properties"""
|
||||
dtb_file = get_dtb_file('dtoc_test_noprops.dts', capture_stderr=True)
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
with self.assertRaises(ValueError) as exc:
|
||||
self.run_test(['struct'], dtb_file, output)
|
||||
self.assertIn("Parent node '/i2c@0' has no properties - do you need",
|
||||
@ -1833,13 +1833,13 @@ U_BOOT_DRVINFO(spl_test2) = {
|
||||
def test_single_reg(self):
|
||||
"""Test detection of a parent node with no properties"""
|
||||
dtb_file = get_dtb_file('dtoc_test_single_reg.dts')
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
self.run_test(['struct'], dtb_file, output)
|
||||
|
||||
def test_missing_parent(self):
|
||||
"""Test detection of a parent node with no properties"""
|
||||
dtb_file = get_dtb_file('dtoc_test_noparent.dts', capture_stderr=True)
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
with self.assertRaises(ValueError) as exc:
|
||||
self.run_test(['device'], dtb_file, output, instantiate=True)
|
||||
self.assertIn("Node '/i2c@0/spl-test/pmic@9' requires parent node "
|
||||
|
@ -74,11 +74,11 @@ class TestFdt(unittest.TestCase):
|
||||
"""
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
tools.PrepareOutputDir(None)
|
||||
tools.prepare_output_dir(None)
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
tools.FinaliseOutputDir()
|
||||
tools.finalise_output_dir()
|
||||
|
||||
def setUp(self):
|
||||
self.dtb = fdt.FdtScan(find_dtb_file('dtoc_test_simple.dts'))
|
||||
@ -152,11 +152,11 @@ class TestNode(unittest.TestCase):
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
tools.PrepareOutputDir(None)
|
||||
tools.prepare_output_dir(None)
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
tools.FinaliseOutputDir()
|
||||
tools.finalise_output_dir()
|
||||
|
||||
def setUp(self):
|
||||
self.dtb = fdt.FdtScan(find_dtb_file('dtoc_test_simple.dts'))
|
||||
@ -294,11 +294,11 @@ class TestProp(unittest.TestCase):
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
tools.PrepareOutputDir(None)
|
||||
tools.prepare_output_dir(None)
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
tools.FinaliseOutputDir()
|
||||
tools.finalise_output_dir()
|
||||
|
||||
def setUp(self):
|
||||
self.dtb = fdt.FdtScan(find_dtb_file('dtoc_test_simple.dts'))
|
||||
@ -370,7 +370,7 @@ class TestProp(unittest.TestCase):
|
||||
"""Tests the GetEmpty() function for the various supported types"""
|
||||
self.assertEqual(True, fdt.Prop.GetEmpty(Type.BOOL))
|
||||
self.assertEqual(chr(0), fdt.Prop.GetEmpty(Type.BYTE))
|
||||
self.assertEqual(tools.GetBytes(0, 4), fdt.Prop.GetEmpty(Type.INT))
|
||||
self.assertEqual(tools.get_bytes(0, 4), fdt.Prop.GetEmpty(Type.INT))
|
||||
self.assertEqual('', fdt.Prop.GetEmpty(Type.STRING))
|
||||
|
||||
def testGetOffset(self):
|
||||
@ -501,7 +501,7 @@ class TestProp(unittest.TestCase):
|
||||
self.node.AddString('string', val)
|
||||
self.dtb.Sync(auto_resize=True)
|
||||
data = self.fdt.getprop(self.node.Offset(), 'string')
|
||||
self.assertEqual(tools.ToBytes(val) + b'\0', data)
|
||||
self.assertEqual(tools.to_bytes(val) + b'\0', data)
|
||||
|
||||
self.fdt.pack()
|
||||
self.node.SetString('string', val + 'x')
|
||||
@ -511,24 +511,24 @@ class TestProp(unittest.TestCase):
|
||||
self.node.SetString('string', val[:-1])
|
||||
|
||||
prop = self.node.props['string']
|
||||
prop.SetData(tools.ToBytes(val))
|
||||
prop.SetData(tools.to_bytes(val))
|
||||
self.dtb.Sync(auto_resize=False)
|
||||
data = self.fdt.getprop(self.node.Offset(), 'string')
|
||||
self.assertEqual(tools.ToBytes(val), data)
|
||||
self.assertEqual(tools.to_bytes(val), data)
|
||||
|
||||
self.node.AddEmptyProp('empty', 5)
|
||||
self.dtb.Sync(auto_resize=True)
|
||||
prop = self.node.props['empty']
|
||||
prop.SetData(tools.ToBytes(val))
|
||||
prop.SetData(tools.to_bytes(val))
|
||||
self.dtb.Sync(auto_resize=False)
|
||||
data = self.fdt.getprop(self.node.Offset(), 'empty')
|
||||
self.assertEqual(tools.ToBytes(val), data)
|
||||
self.assertEqual(tools.to_bytes(val), data)
|
||||
|
||||
self.node.SetData('empty', b'123')
|
||||
self.assertEqual(b'123', prop.bytes)
|
||||
|
||||
# Trying adding a lot of data at once
|
||||
self.node.AddData('data', tools.GetBytes(65, 20000))
|
||||
self.node.AddData('data', tools.get_bytes(65, 20000))
|
||||
self.dtb.Sync(auto_resize=True)
|
||||
|
||||
def testFromData(self):
|
||||
@ -562,7 +562,7 @@ class TestProp(unittest.TestCase):
|
||||
|
||||
def testGetFilename(self):
|
||||
"""Test the dtb filename can be provided"""
|
||||
self.assertEqual(tools.GetOutputFilename('source.dtb'),
|
||||
self.assertEqual(tools.get_output_filename('source.dtb'),
|
||||
self.dtb.GetFilename())
|
||||
|
||||
|
||||
@ -575,11 +575,11 @@ class TestFdtUtil(unittest.TestCase):
|
||||
"""
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
tools.PrepareOutputDir(None)
|
||||
tools.prepare_output_dir(None)
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
tools.FinaliseOutputDir()
|
||||
tools.finalise_output_dir()
|
||||
|
||||
def setUp(self):
|
||||
self.dtb = fdt.FdtScan(find_dtb_file('dtoc_test_simple.dts'))
|
||||
|
@ -43,11 +43,11 @@ class TestSrcScan(unittest.TestCase):
|
||||
"""Tests for src_scan"""
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
tools.PrepareOutputDir(None)
|
||||
tools.prepare_output_dir(None)
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
tools.FinaliseOutputDir()
|
||||
tools.finalise_output_dir()
|
||||
|
||||
def test_simple(self):
|
||||
"""Simple test of scanning drivers"""
|
||||
@ -113,7 +113,7 @@ class TestSrcScan(unittest.TestCase):
|
||||
pathname = os.path.join(indir, fname)
|
||||
dirname = os.path.dirname(pathname)
|
||||
os.makedirs(dirname, exist_ok=True)
|
||||
tools.WriteFile(pathname, '', binary=False)
|
||||
tools.write_file(pathname, '', binary=False)
|
||||
fname_list.append(pathname)
|
||||
|
||||
try:
|
||||
@ -142,7 +142,7 @@ class TestSrcScan(unittest.TestCase):
|
||||
def test_scan(self):
|
||||
"""Test scanning of a driver"""
|
||||
fname = os.path.join(OUR_PATH, '..', '..', 'drivers/i2c/tegra_i2c.c')
|
||||
buff = tools.ReadFile(fname, False)
|
||||
buff = tools.read_file(fname, False)
|
||||
scan = src_scan.Scanner(None, None)
|
||||
scan._parse_driver(fname, buff)
|
||||
self.assertIn('i2c_tegra', scan._drivers)
|
||||
@ -374,8 +374,8 @@ struct another_struct {
|
||||
|
||||
def test_struct_scan_errors(self):
|
||||
"""Test scanning a header file with an invalid unicode file"""
|
||||
output = tools.GetOutputFilename('output.h')
|
||||
tools.WriteFile(output, b'struct this is a test \x81 of bad unicode')
|
||||
output = tools.get_output_filename('output.h')
|
||||
tools.write_file(output, b'struct this is a test \x81 of bad unicode')
|
||||
|
||||
scan = src_scan.Scanner(None, None)
|
||||
with test_util.capture_sys_output() as (stdout, _):
|
||||
|
@ -338,7 +338,7 @@ Changes in v2:
|
||||
text (str): Text to put into the file
|
||||
"""
|
||||
path = os.path.join(self.gitdir, fname)
|
||||
tools.WriteFile(path, text, binary=False)
|
||||
tools.write_file(path, text, binary=False)
|
||||
index = self.repo.index
|
||||
index.add(fname)
|
||||
author = pygit2.Signature('Test user', 'test@email.com')
|
||||
|
@ -159,7 +159,7 @@ elif args.cmd == 'send':
|
||||
fd.close()
|
||||
|
||||
elif args.full_help:
|
||||
tools.PrintFullHelp(
|
||||
tools.print_full_help(
|
||||
os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])), 'README')
|
||||
)
|
||||
|
||||
|
@ -23,7 +23,7 @@ preserve_outdir = False
|
||||
# Path to the Chrome OS chroot, if we know it
|
||||
chroot_path = None
|
||||
|
||||
# Search paths to use for Filename(), used to find files
|
||||
# Search paths to use for filename(), used to find files
|
||||
search_paths = []
|
||||
|
||||
tool_search_paths = []
|
||||
@ -36,7 +36,7 @@ packages = {
|
||||
# List of paths to use when looking for an input file
|
||||
indir = []
|
||||
|
||||
def PrepareOutputDir(dirname, preserve=False):
|
||||
def prepare_output_dir(dirname, preserve=False):
|
||||
"""Select an output directory, ensuring it exists.
|
||||
|
||||
This either creates a temporary directory or checks that the one supplied
|
||||
@ -69,22 +69,22 @@ def PrepareOutputDir(dirname, preserve=False):
|
||||
outdir = tempfile.mkdtemp(prefix='binman.')
|
||||
tout.Debug("Using temporary directory '%s'" % outdir)
|
||||
|
||||
def _RemoveOutputDir():
|
||||
def _remove_output_dir():
|
||||
global outdir
|
||||
|
||||
shutil.rmtree(outdir)
|
||||
tout.Debug("Deleted temporary directory '%s'" % outdir)
|
||||
outdir = None
|
||||
|
||||
def FinaliseOutputDir():
|
||||
def finalise_output_dir():
|
||||
global outdir, preserve_outdir
|
||||
|
||||
"""Tidy up: delete output directory if temporary and not preserved."""
|
||||
if outdir and not preserve_outdir:
|
||||
_RemoveOutputDir()
|
||||
_remove_output_dir()
|
||||
outdir = None
|
||||
|
||||
def GetOutputFilename(fname):
|
||||
def get_output_filename(fname):
|
||||
"""Return a filename within the output directory.
|
||||
|
||||
Args:
|
||||
@ -95,7 +95,7 @@ def GetOutputFilename(fname):
|
||||
"""
|
||||
return os.path.join(outdir, fname)
|
||||
|
||||
def GetOutputDir():
|
||||
def get_output_dir():
|
||||
"""Return the current output directory
|
||||
|
||||
Returns:
|
||||
@ -103,15 +103,15 @@ def GetOutputDir():
|
||||
"""
|
||||
return outdir
|
||||
|
||||
def _FinaliseForTest():
|
||||
def _finalise_for_test():
|
||||
"""Remove the output directory (for use by tests)"""
|
||||
global outdir
|
||||
|
||||
if outdir:
|
||||
_RemoveOutputDir()
|
||||
_remove_output_dir()
|
||||
outdir = None
|
||||
|
||||
def SetInputDirs(dirname):
|
||||
def set_input_dirs(dirname):
|
||||
"""Add a list of input directories, where input files are kept.
|
||||
|
||||
Args:
|
||||
@ -123,7 +123,7 @@ def SetInputDirs(dirname):
|
||||
indir = dirname
|
||||
tout.Debug("Using input directories %s" % indir)
|
||||
|
||||
def GetInputFilename(fname, allow_missing=False):
|
||||
def get_input_filename(fname, allow_missing=False):
|
||||
"""Return a filename for use as input.
|
||||
|
||||
Args:
|
||||
@ -150,7 +150,7 @@ def GetInputFilename(fname, allow_missing=False):
|
||||
raise ValueError("Filename '%s' not found in input path (%s) (cwd='%s')" %
|
||||
(fname, ','.join(indir), os.getcwd()))
|
||||
|
||||
def GetInputFilenameGlob(pattern):
|
||||
def get_input_filename_glob(pattern):
|
||||
"""Return a list of filenames for use as input.
|
||||
|
||||
Args:
|
||||
@ -167,26 +167,26 @@ def GetInputFilenameGlob(pattern):
|
||||
files += glob.glob(pathname)
|
||||
return sorted(files)
|
||||
|
||||
def Align(pos, align):
|
||||
def align(pos, align):
|
||||
if align:
|
||||
mask = align - 1
|
||||
pos = (pos + mask) & ~mask
|
||||
return pos
|
||||
|
||||
def NotPowerOfTwo(num):
|
||||
def not_power_of_two(num):
|
||||
return num and (num & (num - 1))
|
||||
|
||||
def SetToolPaths(toolpaths):
|
||||
def set_tool_paths(toolpaths):
|
||||
"""Set the path to search for tools
|
||||
|
||||
Args:
|
||||
toolpaths: List of paths to search for tools executed by Run()
|
||||
toolpaths: List of paths to search for tools executed by run()
|
||||
"""
|
||||
global tool_search_paths
|
||||
|
||||
tool_search_paths = toolpaths
|
||||
|
||||
def PathHasFile(path_spec, fname):
|
||||
def path_has_file(path_spec, fname):
|
||||
"""Check if a given filename is in the PATH
|
||||
|
||||
Args:
|
||||
@ -201,7 +201,7 @@ def PathHasFile(path_spec, fname):
|
||||
return True
|
||||
return False
|
||||
|
||||
def GetHostCompileTool(name):
|
||||
def get_host_compile_tool(name):
|
||||
"""Get the host-specific version for a compile tool
|
||||
|
||||
This checks the environment variables that specify which version of
|
||||
@ -244,7 +244,7 @@ def GetHostCompileTool(name):
|
||||
return host_name, extra_args
|
||||
return name, []
|
||||
|
||||
def GetTargetCompileTool(name, cross_compile=None):
|
||||
def get_target_compile_tool(name, cross_compile=None):
|
||||
"""Get the target-specific version for a compile tool
|
||||
|
||||
This first checks the environment variables that specify which
|
||||
@ -298,7 +298,7 @@ def GetTargetCompileTool(name, cross_compile=None):
|
||||
target_name = cross_compile + name
|
||||
elif name == 'ld':
|
||||
try:
|
||||
if Run(cross_compile + 'ld.bfd', '-v'):
|
||||
if run(cross_compile + 'ld.bfd', '-v'):
|
||||
target_name = cross_compile + 'ld.bfd'
|
||||
except:
|
||||
target_name = cross_compile + 'ld'
|
||||
@ -353,10 +353,10 @@ def run_result(name, *args, **kwargs):
|
||||
raise_on_error = kwargs.get('raise_on_error', True)
|
||||
env = get_env_with_path()
|
||||
if for_target:
|
||||
name, extra_args = GetTargetCompileTool(name)
|
||||
name, extra_args = get_target_compile_tool(name)
|
||||
args = tuple(extra_args) + args
|
||||
elif for_host:
|
||||
name, extra_args = GetHostCompileTool(name)
|
||||
name, extra_args = get_host_compile_tool(name)
|
||||
args = tuple(extra_args) + args
|
||||
name = os.path.expanduser(name) # Expand paths containing ~
|
||||
all_args = (name,) + args
|
||||
@ -369,7 +369,7 @@ def run_result(name, *args, **kwargs):
|
||||
result.stderr or result.stdout))
|
||||
return result
|
||||
except ValueError:
|
||||
if env and not PathHasFile(env['PATH'], name):
|
||||
if env and not path_has_file(env['PATH'], name):
|
||||
msg = "Please install tool '%s'" % name
|
||||
package = packages.get(name)
|
||||
if package:
|
||||
@ -380,7 +380,7 @@ def run_result(name, *args, **kwargs):
|
||||
def tool_find(name):
|
||||
"""Search the current path for a tool
|
||||
|
||||
This uses both PATH and any value from SetToolPaths() to search for a tool
|
||||
This uses both PATH and any value from set_tool_paths() to search for a tool
|
||||
|
||||
Args:
|
||||
name (str): Name of tool to locate
|
||||
@ -400,7 +400,7 @@ def tool_find(name):
|
||||
if os.path.isfile(fname) and os.access(fname, os.X_OK):
|
||||
return fname
|
||||
|
||||
def Run(name, *args, **kwargs):
|
||||
def run(name, *args, **kwargs):
|
||||
"""Run a tool with some arguments
|
||||
|
||||
This runs a 'tool', which is a program used by binman to process files and
|
||||
@ -421,7 +421,7 @@ def Run(name, *args, **kwargs):
|
||||
if result is not None:
|
||||
return result.stdout
|
||||
|
||||
def Filename(fname):
|
||||
def filename(fname):
|
||||
"""Resolve a file path to an absolute path.
|
||||
|
||||
If fname starts with ##/ and chroot is available, ##/ gets replaced with
|
||||
@ -455,7 +455,7 @@ def Filename(fname):
|
||||
# If not found, just return the standard, unchanged path
|
||||
return fname
|
||||
|
||||
def ReadFile(fname, binary=True):
|
||||
def read_file(fname, binary=True):
|
||||
"""Read and return the contents of a file.
|
||||
|
||||
Args:
|
||||
@ -464,13 +464,13 @@ def ReadFile(fname, binary=True):
|
||||
Returns:
|
||||
data read from file, as a string.
|
||||
"""
|
||||
with open(Filename(fname), binary and 'rb' or 'r') as fd:
|
||||
with open(filename(fname), binary and 'rb' or 'r') as fd:
|
||||
data = fd.read()
|
||||
#self._out.Info("Read file '%s' size %d (%#0x)" %
|
||||
#(fname, len(data), len(data)))
|
||||
return data
|
||||
|
||||
def WriteFile(fname, data, binary=True):
|
||||
def write_file(fname, data, binary=True):
|
||||
"""Write data into a file.
|
||||
|
||||
Args:
|
||||
@ -479,10 +479,10 @@ def WriteFile(fname, data, binary=True):
|
||||
"""
|
||||
#self._out.Info("Write file '%s' size %d (%#0x)" %
|
||||
#(fname, len(data), len(data)))
|
||||
with open(Filename(fname), binary and 'wb' or 'w') as fd:
|
||||
with open(filename(fname), binary and 'wb' or 'w') as fd:
|
||||
fd.write(data)
|
||||
|
||||
def GetBytes(byte, size):
|
||||
def get_bytes(byte, size):
|
||||
"""Get a string of bytes of a given size
|
||||
|
||||
Args:
|
||||
@ -494,7 +494,7 @@ def GetBytes(byte, size):
|
||||
"""
|
||||
return bytes([byte]) * size
|
||||
|
||||
def ToBytes(string):
|
||||
def to_bytes(string):
|
||||
"""Convert a str type into a bytes type
|
||||
|
||||
Args:
|
||||
@ -505,7 +505,7 @@ def ToBytes(string):
|
||||
"""
|
||||
return string.encode('utf-8')
|
||||
|
||||
def ToString(bval):
|
||||
def to_string(bval):
|
||||
"""Convert a bytes type into a str type
|
||||
|
||||
Args:
|
||||
@ -517,7 +517,7 @@ def ToString(bval):
|
||||
"""
|
||||
return bval.decode('utf-8')
|
||||
|
||||
def ToHex(val):
|
||||
def to_hex(val):
|
||||
"""Convert an integer value (or None) to a string
|
||||
|
||||
Returns:
|
||||
@ -525,7 +525,7 @@ def ToHex(val):
|
||||
"""
|
||||
return 'None' if val is None else '%#x' % val
|
||||
|
||||
def ToHexSize(val):
|
||||
def to_hex_size(val):
|
||||
"""Return the size of an object in hex
|
||||
|
||||
Returns:
|
||||
@ -533,7 +533,7 @@ def ToHexSize(val):
|
||||
"""
|
||||
return 'None' if val is None else '%#x' % len(val)
|
||||
|
||||
def PrintFullHelp(fname):
|
||||
def print_full_help(fname):
|
||||
"""Print the full help message for a tool using an appropriate pager.
|
||||
|
||||
Args:
|
||||
@ -547,7 +547,7 @@ def PrintFullHelp(fname):
|
||||
pager = ['more']
|
||||
command.Run(*pager, fname)
|
||||
|
||||
def Download(url, tmpdir_pattern='.patman'):
|
||||
def download(url, tmpdir_pattern='.patman'):
|
||||
"""Download a file to a temporary directory
|
||||
|
||||
Args:
|
||||
|
Loading…
Reference in New Issue
Block a user