patman snake-case conversion
binman fit improvements ACPI fixes and making MCFG available to ARM -----BEGIN PGP SIGNATURE----- iQFFBAABCgAvFiEEslwAIq+Gp8wWVbYnfxc6PpAIreYFAmIEGscRHHNqZ0BjaHJv bWl1bS5vcmcACgkQfxc6PpAIreZVkwf/RO9qLM/qtpcvtzFTciVFa/GQwd922abw Anc4DYDBFTWfVymBTZT4Te0luTkZpqBV9cLEGw4XGYKGt+daVYB4cNKknKDOWqLI adF7xHK2utT3OBlR7pL4d0Rvq0DAAFmyizkbi/CKHhrrpPkW8rPEiZlMwi+WQlTb 9Qv3deoVnRn3ivgw3AnTny+S52IFuI9pH4a34ASWfpOi3gGzICtIAnhVpRXdD+3A TIhgK6XaGxBc0lVThsqc20FWfZ6rb4WyBRTRgbYivHn/fQxkynxBdSD6WU1ZdGH8 UZGjk5wBIkf+OyHjNQo/VEwoRHXR1k435+gsehUV8LvzytUykzP/FA== =7Gel -----END PGP SIGNATURE----- Merge tag 'dm-pull-8feb22-take3' of https://gitlab.denx.de/u-boot/custodians/u-boot-dm patman snake-case conversion binman fit improvements ACPI fixes and making MCFG available to ARM [trini: Update scripts/pylint.base] Signed-off-by: Tom Rini <trini@konsulko.com>
This commit is contained in:
commit
2ccd2bc8c3
@ -8,6 +8,8 @@ dtb-$(CONFIG_TARGET_EMSDP) += emsdp.dtb
|
||||
dtb-$(CONFIG_TARGET_HSDK) += hsdk.dtb hsdk-4xd.dtb
|
||||
dtb-$(CONFIG_TARGET_IOT_DEVKIT) += iot_devkit.dtb
|
||||
|
||||
include $(srctree)/scripts/Makefile.dts
|
||||
|
||||
targets += $(dtb-y)
|
||||
|
||||
DTC_FLAGS += -R 4 -p 0x1000
|
||||
|
@ -1205,6 +1205,8 @@ dtb-$(CONFIG_TARGET_EA_LPC3250DEVKITV2) += lpc3250-ea3250.dtb
|
||||
|
||||
dtb-$(CONFIG_ARCH_QEMU) += qemu-arm.dtb qemu-arm64.dtb
|
||||
|
||||
include $(srctree)/scripts/Makefile.dts
|
||||
|
||||
targets += $(dtb-y)
|
||||
|
||||
# Add any required device tree compiler flags here
|
||||
|
@ -18,6 +18,8 @@ dtb-$(CONFIG_TARGET_M5373EVB) += M5373EVB.dtb
|
||||
dtb-$(CONFIG_TARGET_AMCORE) += amcore.dtb
|
||||
dtb-$(CONFIG_TARGET_STMARK2) += stmark2.dtb
|
||||
|
||||
include $(srctree)/scripts/Makefile.dts
|
||||
|
||||
targets += $(dtb-y)
|
||||
|
||||
DTC_FLAGS += -R 4 -p 0x1000
|
||||
|
@ -2,6 +2,8 @@
|
||||
|
||||
dtb-y += $(shell echo $(CONFIG_DEFAULT_DEVICE_TREE)).dtb
|
||||
|
||||
include $(srctree)/scripts/Makefile.dts
|
||||
|
||||
targets += $(dtb-y)
|
||||
|
||||
DTC_FLAGS += -R 4 -p 0x1000
|
||||
|
@ -34,6 +34,8 @@ dtb-$(CONFIG_SOC_JR2) += jr2_pcb110.dtb jr2_pcb111.dtb serval2_pcb112.dtb
|
||||
dtb-$(CONFIG_SOC_SERVALT) += servalt_pcb116.dtb
|
||||
dtb-$(CONFIG_SOC_SERVAL) += serval_pcb105.dtb serval_pcb106.dtb
|
||||
|
||||
include $(srctree)/scripts/Makefile.dts
|
||||
|
||||
targets += $(dtb-y)
|
||||
|
||||
# Add any required device tree compiler flags here
|
||||
|
@ -2,6 +2,8 @@
|
||||
|
||||
dtb-$(CONFIG_TARGET_ADP_AG101P) += ag101p.dtb
|
||||
dtb-$(CONFIG_TARGET_ADP_AE3XX) += ae3xx.dtb
|
||||
include $(srctree)/scripts/Makefile.dts
|
||||
|
||||
targets += $(dtb-y)
|
||||
|
||||
DTC_FLAGS += -R 4 -p 0x1000
|
||||
|
@ -2,6 +2,8 @@
|
||||
|
||||
dtb-y += $(CONFIG_DEFAULT_DEVICE_TREE:"%"=%).dtb
|
||||
|
||||
include $(srctree)/scripts/Makefile.dts
|
||||
|
||||
targets += $(dtb-y)
|
||||
|
||||
DTC_FLAGS += -R 4 -p 0x1000
|
||||
|
@ -30,6 +30,8 @@ dtb-$(CONFIG_TARGET_TUXX1) += kmtuxa1.dtb
|
||||
dtb-$(CONFIG_TARGET_MCR3000) += mcr3000.dtb
|
||||
dtb-$(CONFIG_TARGET_GAZERBEAM) += gazerbeam.dtb
|
||||
|
||||
include $(srctree)/scripts/Makefile.dts
|
||||
|
||||
targets += $(dtb-y)
|
||||
|
||||
# Add any required device tree compiler flags here
|
||||
|
@ -8,6 +8,8 @@ dtb-$(CONFIG_TARGET_SIFIVE_UNLEASHED) += hifive-unleashed-a00.dtb
|
||||
dtb-$(CONFIG_TARGET_SIFIVE_UNMATCHED) += hifive-unmatched-a00.dtb
|
||||
dtb-$(CONFIG_TARGET_SIPEED_MAIX) += k210-maix-bit.dtb
|
||||
|
||||
include $(srctree)/scripts/Makefile.dts
|
||||
|
||||
targets += $(dtb-y)
|
||||
|
||||
DTC_FLAGS += -R 4 -p 0x1000
|
||||
|
@ -4,3 +4,10 @@ head-y := arch/sandbox/cpu/start.o arch/sandbox/cpu/os.o
|
||||
head-$(CONFIG_SANDBOX_SDL) += arch/sandbox/cpu/sdl.o
|
||||
libs-y += arch/sandbox/cpu/
|
||||
libs-y += arch/sandbox/lib/
|
||||
|
||||
# sdl.c fails to compile with -fshort-wchar using musl.
|
||||
cmd_cc_sdl.o = $(CC) $(filter-out -nostdinc -fshort-wchar, \
|
||||
$(patsubst -I%,-idirafter%,$(c_flags))) -fno-lto -c -o $@ $<
|
||||
|
||||
$(obj)/sdl.o: $(src)/sdl.c FORCE
|
||||
$(call if_changed_dep,cc_sdl.o)
|
||||
|
@ -7,7 +7,7 @@
|
||||
|
||||
obj-y := cache.o cpu.o state.o
|
||||
extra-y := start.o os.o
|
||||
extra-$(CONFIG_SANDBOX_SDL) += sdl.o
|
||||
extra-$(CONFIG_SANDBOX_SDL) += sdl.o
|
||||
obj-$(CONFIG_SPL_BUILD) += spl.o
|
||||
obj-$(CONFIG_ETH_SANDBOX_RAW) += eth-raw-os.o
|
||||
|
||||
@ -19,8 +19,6 @@ cmd_cc_os.o = $(CC) $(filter-out -nostdinc, \
|
||||
|
||||
$(obj)/os.o: $(src)/os.c FORCE
|
||||
$(call if_changed_dep,cc_os.o)
|
||||
$(obj)/sdl.o: $(src)/sdl.c FORCE
|
||||
$(call if_changed_dep,cc_os.o)
|
||||
|
||||
# eth-raw-os.c is built in the system env, so needs standard includes
|
||||
# CFLAGS_REMOVE_eth-raw-os.o cannot be used to drop header include path
|
||||
@ -30,3 +28,10 @@ cmd_cc_eth-raw-os.o = $(CC) $(filter-out -nostdinc, \
|
||||
|
||||
$(obj)/eth-raw-os.o: $(src)/eth-raw-os.c FORCE
|
||||
$(call if_changed_dep,cc_eth-raw-os.o)
|
||||
|
||||
# sdl.c fails to build with -fshort-wchar using musl
|
||||
cmd_cc_sdl.o = $(CC) $(filter-out -nostdinc -fshort-wchar, \
|
||||
$(patsubst -I%,-idirafter%,$(c_flags))) -fno-lto -c -o $@ $<
|
||||
|
||||
$(obj)/sdl.o: $(src)/sdl.c FORCE
|
||||
$(call if_changed_dep,cc_sdl.o)
|
||||
|
@ -8,6 +8,8 @@ endif
|
||||
dtb-$(CONFIG_UT_DM) += test.dtb
|
||||
dtb-$(CONFIG_CMD_EXTENSION) += overlay0.dtbo overlay1.dtbo
|
||||
|
||||
include $(srctree)/scripts/Makefile.dts
|
||||
|
||||
targets += $(dtb-y)
|
||||
|
||||
DTC_FLAGS += -R 4 -p 0x1000
|
||||
|
@ -1,5 +1,7 @@
|
||||
dtb-y += sh7751-r2dplus.dtb
|
||||
|
||||
include $(srctree)/scripts/Makefile.dts
|
||||
|
||||
targets += $(dtb-y)
|
||||
|
||||
# Add any required device tree compiler flags here
|
||||
|
@ -31,14 +31,17 @@
|
||||
#include <linux/err.h>
|
||||
#include <power/acpi_pmc.h>
|
||||
|
||||
u32 acpi_fill_mcfg(u32 current)
|
||||
int acpi_fill_mcfg(struct acpi_ctx *ctx)
|
||||
{
|
||||
size_t size;
|
||||
|
||||
/* PCI Segment Group 0, Start Bus Number 0, End Bus Number is 255 */
|
||||
current += acpi_create_mcfg_mmconfig((void *)current,
|
||||
CONFIG_MMCONF_BASE_ADDRESS, 0, 0,
|
||||
(CONFIG_SA_PCIEX_LENGTH >> 20)
|
||||
- 1);
|
||||
return current;
|
||||
size = acpi_create_mcfg_mmconfig((void *)ctx->current,
|
||||
CONFIG_MMCONF_BASE_ADDRESS, 0, 0,
|
||||
(CONFIG_SA_PCIEX_LENGTH >> 20) - 1);
|
||||
acpi_inc(ctx, size);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int acpi_sci_irq(void)
|
||||
|
@ -68,14 +68,17 @@ u32 acpi_fill_madt(u32 current)
|
||||
return current;
|
||||
}
|
||||
|
||||
u32 acpi_fill_mcfg(u32 current)
|
||||
int acpi_fill_mcfg(struct acpi_ctx *ctx)
|
||||
{
|
||||
/* TODO: Derive parameters from SFI MCFG table */
|
||||
current += acpi_create_mcfg_mmconfig
|
||||
((struct acpi_mcfg_mmconfig *)current,
|
||||
MCFG_BASE_ADDRESS, 0x0, 0x0, 0x0);
|
||||
size_t size;
|
||||
|
||||
return current;
|
||||
/* TODO: Derive parameters from SFI MCFG table */
|
||||
size = acpi_create_mcfg_mmconfig
|
||||
((struct acpi_mcfg_mmconfig *)ctx->current,
|
||||
MCFG_BASE_ADDRESS, 0x0, 0x0, 0x0);
|
||||
acpi_inc(ctx, size);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static u32 acpi_fill_csrt_dma(struct acpi_csrt_group *grp)
|
||||
|
@ -22,6 +22,8 @@ dtb-y += bayleybay.dtb \
|
||||
slimbootloader.dtb \
|
||||
baytrail_som-db5800-som-6867.dtb
|
||||
|
||||
include $(srctree)/scripts/Makefile.dts
|
||||
|
||||
targets += $(dtb-y)
|
||||
|
||||
DTC_FLAGS += -R 4 -p $(if $(CONFIG_EFI_APP),0x8000,0x1000)
|
||||
|
@ -34,7 +34,6 @@ int acpi_create_madt_lapic_nmi(struct acpi_madt_lapic_nmi *lapic_nmi,
|
||||
u32 acpi_fill_madt(u32 current);
|
||||
int acpi_create_mcfg_mmconfig(struct acpi_mcfg_mmconfig *mmconfig, u32 base,
|
||||
u16 seg_nr, u8 start, u8 end);
|
||||
u32 acpi_fill_mcfg(u32 current);
|
||||
|
||||
/**
|
||||
* acpi_write_hpet() - Write out a HPET table
|
||||
|
@ -161,28 +161,6 @@ int acpi_write_madt(struct acpi_ctx *ctx, const struct acpi_writer *entry)
|
||||
}
|
||||
ACPI_WRITER(5x86, NULL, acpi_write_madt, 0);
|
||||
|
||||
int acpi_create_mcfg_mmconfig(struct acpi_mcfg_mmconfig *mmconfig, u32 base,
|
||||
u16 seg_nr, u8 start, u8 end)
|
||||
{
|
||||
memset(mmconfig, 0, sizeof(*mmconfig));
|
||||
mmconfig->base_address_l = base;
|
||||
mmconfig->base_address_h = 0;
|
||||
mmconfig->pci_segment_group_number = seg_nr;
|
||||
mmconfig->start_bus_number = start;
|
||||
mmconfig->end_bus_number = end;
|
||||
|
||||
return sizeof(struct acpi_mcfg_mmconfig);
|
||||
}
|
||||
|
||||
__weak u32 acpi_fill_mcfg(u32 current)
|
||||
{
|
||||
current += acpi_create_mcfg_mmconfig
|
||||
((struct acpi_mcfg_mmconfig *)current,
|
||||
CONFIG_PCIE_ECAM_BASE, 0x0, 0x0, 255);
|
||||
|
||||
return current;
|
||||
}
|
||||
|
||||
/**
|
||||
* acpi_create_tcpa() - Create a TCPA table
|
||||
*
|
||||
@ -480,36 +458,6 @@ int acpi_write_gnvs(struct acpi_ctx *ctx, const struct acpi_writer *entry)
|
||||
}
|
||||
ACPI_WRITER(4gnvs, "GNVS", acpi_write_gnvs, 0);
|
||||
|
||||
/* MCFG is defined in the PCI Firmware Specification 3.0 */
|
||||
int acpi_write_mcfg(struct acpi_ctx *ctx, const struct acpi_writer *entry)
|
||||
{
|
||||
struct acpi_table_header *header;
|
||||
struct acpi_mcfg *mcfg;
|
||||
u32 current;
|
||||
|
||||
mcfg = ctx->current;
|
||||
header = &mcfg->header;
|
||||
|
||||
current = (u32)mcfg + sizeof(struct acpi_mcfg);
|
||||
|
||||
memset(mcfg, '\0', sizeof(struct acpi_mcfg));
|
||||
|
||||
/* Fill out header fields */
|
||||
acpi_fill_header(header, "MCFG");
|
||||
header->length = sizeof(struct acpi_mcfg);
|
||||
header->revision = 1;
|
||||
|
||||
/* (Re)calculate length and checksum */
|
||||
header->length = current - (u32)mcfg;
|
||||
header->checksum = table_compute_checksum(mcfg, header->length);
|
||||
|
||||
acpi_inc(ctx, mcfg->header.length);
|
||||
acpi_add_table(ctx, mcfg);
|
||||
|
||||
return 0;
|
||||
}
|
||||
ACPI_WRITER(5mcfg, "MCFG", acpi_write_mcfg, 0);
|
||||
|
||||
/**
|
||||
* acpi_write_hpet() - Write out a HPET table
|
||||
*
|
||||
|
@ -2,6 +2,8 @@
|
||||
|
||||
dtb-$(CONFIG_XTFPGA) += ml605.dtb ml605_nommu.dtb kc705.dtb kc705_nommu.dtb
|
||||
|
||||
include $(srctree)/scripts/Makefile.dts
|
||||
|
||||
targets += $(dtb-y)
|
||||
|
||||
DTC_FLAGS +=
|
||||
|
@ -357,7 +357,6 @@ static void serial_stub_putc(struct stdio_dev *sdev, const char ch)
|
||||
{
|
||||
_serial_putc(sdev->priv, ch);
|
||||
}
|
||||
#endif
|
||||
|
||||
static void serial_stub_puts(struct stdio_dev *sdev, const char *str)
|
||||
{
|
||||
@ -374,6 +373,7 @@ static int serial_stub_tstc(struct stdio_dev *sdev)
|
||||
return _serial_tstc(sdev->priv);
|
||||
}
|
||||
#endif
|
||||
#endif
|
||||
|
||||
/**
|
||||
* on_baudrate() - Update the actual baudrate when the env var changes
|
||||
|
@ -22,11 +22,6 @@ static int nulldev_serial_pending(struct udevice *dev, bool input)
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int nulldev_serial_input(struct udevice *dev)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int nulldev_serial_putc(struct udevice *dev, const char ch)
|
||||
{
|
||||
return 0;
|
||||
|
@ -157,8 +157,8 @@ config DEVICE_TREE_INCLUDES
|
||||
.dtsi files that will also be used.
|
||||
|
||||
config OF_LIST
|
||||
string "List of device tree files to include for DT control"
|
||||
depends on SPL_LOAD_FIT || MULTI_DTB_FIT
|
||||
string "List of device tree files to include for DT control" if SPL_LOAD_FIT || MULTI_DTB_FIT
|
||||
depends on OF_CONTROL
|
||||
default DEFAULT_DEVICE_TREE
|
||||
help
|
||||
This option specifies a list of device tree files to use for DT
|
||||
@ -264,8 +264,8 @@ config SPL_MULTI_DTB_FIT
|
||||
capabilities, pad configurations).
|
||||
|
||||
config SPL_OF_LIST
|
||||
string "List of device tree files to include for DT control in SPL"
|
||||
depends on SPL_MULTI_DTB_FIT
|
||||
string "List of device tree files to include for DT control in SPL" if SPL_MULTI_DTB_FIT
|
||||
depends on SPL_OF_CONTROL
|
||||
default OF_LIST
|
||||
help
|
||||
This option specifies a list of device tree files to use for DT
|
||||
|
@ -212,7 +212,7 @@ struct udevice_rt {
|
||||
#define DM_MAX_SEQ_STR 3
|
||||
|
||||
/* Returns the operations for a device */
|
||||
#define device_get_ops(dev) (dev->driver->ops)
|
||||
#define device_get_ops(dev) ((dev)->driver->ops)
|
||||
|
||||
#if CONFIG_IS_ENABLED(OF_PLATDATA_RT)
|
||||
u32 dev_get_flags(const struct udevice *dev);
|
||||
|
@ -11,6 +11,7 @@ obj-y += acpi_writer.o
|
||||
ifndef CONFIG_QEMU
|
||||
obj-y += base.o
|
||||
obj-y += csrt.o
|
||||
obj-y += mcfg.o
|
||||
|
||||
# Sandbox does not build a .asl file
|
||||
ifndef CONFIG_SANDBOX
|
||||
|
64
lib/acpi/mcfg.c
Normal file
64
lib/acpi/mcfg.c
Normal file
@ -0,0 +1,64 @@
|
||||
// SPDX-License-Identifier: GPL-2.0+
|
||||
/*
|
||||
* Write an ACPI MCFG table
|
||||
*
|
||||
* Copyright 2022 Google LLC
|
||||
*/
|
||||
|
||||
#define LOG_CATEGORY LOGC_ACPI
|
||||
|
||||
#include <common.h>
|
||||
#include <mapmem.h>
|
||||
#include <tables_csum.h>
|
||||
#include <acpi/acpi_table.h>
|
||||
#include <dm/acpi.h>
|
||||
|
||||
int acpi_create_mcfg_mmconfig(struct acpi_mcfg_mmconfig *mmconfig, u32 base,
|
||||
u16 seg_nr, u8 start, u8 end)
|
||||
{
|
||||
memset(mmconfig, 0, sizeof(*mmconfig));
|
||||
mmconfig->base_address_l = base;
|
||||
mmconfig->base_address_h = 0;
|
||||
mmconfig->pci_segment_group_number = seg_nr;
|
||||
mmconfig->start_bus_number = start;
|
||||
mmconfig->end_bus_number = end;
|
||||
|
||||
return sizeof(struct acpi_mcfg_mmconfig);
|
||||
}
|
||||
|
||||
__weak int acpi_fill_mcfg(struct acpi_ctx *ctx)
|
||||
{
|
||||
return -ENOENT;
|
||||
}
|
||||
|
||||
/* MCFG is defined in the PCI Firmware Specification 3.0 */
|
||||
int acpi_write_mcfg(struct acpi_ctx *ctx, const struct acpi_writer *entry)
|
||||
{
|
||||
struct acpi_table_header *header;
|
||||
struct acpi_mcfg *mcfg;
|
||||
int ret;
|
||||
|
||||
mcfg = ctx->current;
|
||||
header = &mcfg->header;
|
||||
|
||||
memset(mcfg, '\0', sizeof(struct acpi_mcfg));
|
||||
|
||||
/* Fill out header fields */
|
||||
acpi_fill_header(header, "MCFG");
|
||||
header->length = sizeof(struct acpi_mcfg);
|
||||
header->revision = 1;
|
||||
acpi_inc(ctx, sizeof(*mcfg));
|
||||
|
||||
ret = acpi_fill_mcfg(ctx);
|
||||
if (ret)
|
||||
return log_msg_ret("fill", ret);
|
||||
|
||||
/* (Re)calculate length and checksum */
|
||||
header->length = (ulong)ctx->current - (ulong)mcfg;
|
||||
header->checksum = table_compute_checksum(mcfg, header->length);
|
||||
|
||||
acpi_add_table(ctx, mcfg);
|
||||
|
||||
return 0;
|
||||
}
|
||||
ACPI_WRITER(5mcfg, "MCFG", acpi_write_mcfg, 0);
|
3
scripts/Makefile.dts
Normal file
3
scripts/Makefile.dts
Normal file
@ -0,0 +1,3 @@
|
||||
# SPDX-License-Identifier: GPL-2.0+
|
||||
|
||||
dtb-y += $(patsubst %,%.dtb,$(subst ",,$(CONFIG_$(SPL_)OF_LIST)))
|
@ -44,7 +44,6 @@ test_tests_test_gpt.py 7.67
|
||||
test_tests_test_handoff.py 5.00
|
||||
test_tests_test_help.py 5.00
|
||||
test_tests_test_hush_if_test.py 9.27
|
||||
test_tests_test_kconfig.py 5.00
|
||||
test_tests_test_log.py 8.64
|
||||
test_tests_test_lsblk.py 8.00
|
||||
test_tests_test_md.py 3.64
|
||||
@ -109,7 +108,7 @@ tools_binman_etype_cros_ec_rw -6.00
|
||||
tools_binman_etype_fdtmap -3.28
|
||||
tools_binman_etype_files -7.43
|
||||
tools_binman_etype_fill -6.43
|
||||
tools_binman_etype_fit 5.26
|
||||
tools_binman_etype_fit 4.64
|
||||
tools_binman_etype_fmap -0.29
|
||||
tools_binman_etype_gbb 0.83
|
||||
tools_binman_etype_image_header 5.77
|
||||
@ -131,7 +130,7 @@ tools_binman_etype_mkimage 2.31
|
||||
tools_binman_etype_opensbi -6.00
|
||||
tools_binman_etype_powerpc_mpc85xx_bootpg_resetvec -10.00
|
||||
tools_binman_etype_scp -6.00
|
||||
tools_binman_etype_section 4.56
|
||||
tools_binman_etype_section 4.57
|
||||
tools_binman_etype_text -0.48
|
||||
tools_binman_etype_u_boot -15.71
|
||||
tools_binman_etype_u_boot_dtb -12.22
|
||||
@ -169,7 +168,7 @@ tools_binman_fdt_test 3.23
|
||||
tools_binman_fip_util 9.85
|
||||
tools_binman_fip_util_test 10.00
|
||||
tools_binman_fmap_util 6.88
|
||||
tools_binman_ftest 7.44
|
||||
tools_binman_ftest 7.45
|
||||
tools_binman_image 7.12
|
||||
tools_binman_image_test 4.48
|
||||
tools_binman_main 4.86
|
||||
@ -179,7 +178,7 @@ tools_buildman_board 7.82
|
||||
tools_buildman_bsettings 1.71
|
||||
tools_buildman_builder 6.92
|
||||
tools_buildman_builderthread 7.48
|
||||
tools_buildman_cfgutil 10.00
|
||||
tools_buildman_cfgutil 7.83
|
||||
tools_buildman_cmdline 8.89
|
||||
tools_buildman_control 8.12
|
||||
tools_buildman_func_test 7.18
|
||||
@ -203,26 +202,26 @@ tools_genboardscfg 7.95
|
||||
tools_microcode-tool 7.25
|
||||
tools_moveconfig 8.32
|
||||
tools_patman___init__ 0.00
|
||||
tools_patman_checkpatch 8.04
|
||||
tools_patman_command 4.74
|
||||
tools_patman_commit 3.25
|
||||
tools_patman_checkpatch 8.48
|
||||
tools_patman_command 5.51
|
||||
tools_patman_commit 4.50
|
||||
tools_patman_control 8.14
|
||||
tools_patman_cros_subprocess 7.56
|
||||
tools_patman_func_test 8.14
|
||||
tools_patman_get_maintainer 6.47
|
||||
tools_patman_gitutil 5.62
|
||||
tools_patman_cros_subprocess 7.76
|
||||
tools_patman_func_test 8.51
|
||||
tools_patman_get_maintainer 7.06
|
||||
tools_patman_gitutil 6.65
|
||||
tools_patman_main 7.90
|
||||
tools_patman_patchstream 9.11
|
||||
tools_patman_project 6.67
|
||||
tools_patman_project 7.78
|
||||
tools_patman_series 6.16
|
||||
tools_patman_settings 5.89
|
||||
tools_patman_setup 5.00
|
||||
tools_patman_status 8.62
|
||||
tools_patman_terminal 7.05
|
||||
tools_patman_test_checkpatch 6.81
|
||||
tools_patman_test_util 7.36
|
||||
tools_patman_tools 4.69
|
||||
tools_patman_tout 3.12
|
||||
tools_patman_terminal 8.00
|
||||
tools_patman_test_checkpatch 7.75
|
||||
tools_patman_test_util 7.64
|
||||
tools_patman_tools 5.68
|
||||
tools_patman_tout 5.31
|
||||
tools_rkmux 6.90
|
||||
tools_rmboard 7.76
|
||||
tools_zynqmp_pm_cfg_obj_convert 6.67
|
||||
|
@ -174,7 +174,7 @@ class Bintool:
|
||||
res = self.fetch(meth)
|
||||
except urllib.error.URLError as uerr:
|
||||
message = uerr.reason
|
||||
print(col.Color(col.RED, f'- {message}'))
|
||||
print(col.build(col.RED, f'- {message}'))
|
||||
|
||||
except ValueError as exc:
|
||||
print(f'Exception: {exc}')
|
||||
@ -182,7 +182,7 @@ class Bintool:
|
||||
|
||||
if skip_present and self.is_present():
|
||||
return PRESENT
|
||||
print(col.Color(col.YELLOW, 'Fetch: %s' % self.name))
|
||||
print(col.build(col.YELLOW, 'Fetch: %s' % self.name))
|
||||
if method == FETCH_ANY:
|
||||
for try_method in range(1, FETCH_COUNT):
|
||||
print(f'- trying method: {FETCH_NAMES[try_method]}')
|
||||
@ -216,7 +216,7 @@ class Bintool:
|
||||
True on success, False on failure
|
||||
"""
|
||||
def show_status(color, prompt, names):
|
||||
print(col.Color(
|
||||
print(col.build(
|
||||
color, f'{prompt}:%s{len(names):2}: %s' %
|
||||
(' ' * (16 - len(prompt)), ' '.join(names))))
|
||||
|
||||
@ -227,7 +227,7 @@ class Bintool:
|
||||
name_list = Bintool.get_tool_list()
|
||||
if names_to_fetch[0] == 'missing':
|
||||
skip_present = True
|
||||
print(col.Color(col.YELLOW,
|
||||
print(col.build(col.YELLOW,
|
||||
'Fetching tools: %s' % ' '.join(name_list)))
|
||||
status = collections.defaultdict(list)
|
||||
for name in name_list:
|
||||
@ -267,8 +267,8 @@ class Bintool:
|
||||
name = os.path.expanduser(self.name) # Expand paths containing ~
|
||||
all_args = (name,) + args
|
||||
env = tools.get_env_with_path()
|
||||
tout.Detail(f"bintool: {' '.join(all_args)}")
|
||||
result = command.RunPipe(
|
||||
tout.detail(f"bintool: {' '.join(all_args)}")
|
||||
result = command.run_pipe(
|
||||
[all_args], capture=True, capture_stderr=True, env=env,
|
||||
raise_on_error=False, binary=binary)
|
||||
|
||||
@ -278,17 +278,17 @@ class Bintool:
|
||||
# try to run it (as above) since RunPipe() allows faking the tool's
|
||||
# output
|
||||
if not any([result.stdout, result.stderr, tools.tool_find(name)]):
|
||||
tout.Info(f"bintool '{name}' not found")
|
||||
tout.info(f"bintool '{name}' not found")
|
||||
return None
|
||||
if raise_on_error:
|
||||
tout.Info(f"bintool '{name}' failed")
|
||||
tout.info(f"bintool '{name}' failed")
|
||||
raise ValueError("Error %d running '%s': %s" %
|
||||
(result.return_code, ' '.join(all_args),
|
||||
result.stderr or result.stdout))
|
||||
if result.stdout:
|
||||
tout.Debug(result.stdout)
|
||||
tout.debug(result.stdout)
|
||||
if result.stderr:
|
||||
tout.Debug(result.stderr)
|
||||
tout.debug(result.stderr)
|
||||
return result
|
||||
|
||||
def run_cmd(self, *args, binary=False):
|
||||
@ -327,9 +327,9 @@ class Bintool:
|
||||
"""
|
||||
tmpdir = tempfile.mkdtemp(prefix='binmanf.')
|
||||
print(f"- clone git repo '{git_repo}' to '{tmpdir}'")
|
||||
tools.Run('git', 'clone', '--depth', '1', git_repo, tmpdir)
|
||||
tools.run('git', 'clone', '--depth', '1', git_repo, tmpdir)
|
||||
print(f"- build target '{make_target}'")
|
||||
tools.Run('make', '-C', tmpdir, '-j', f'{multiprocessing.cpu_count()}',
|
||||
tools.run('make', '-C', tmpdir, '-j', f'{multiprocessing.cpu_count()}',
|
||||
make_target)
|
||||
fname = os.path.join(tmpdir, bintool_path)
|
||||
if not os.path.exists(fname):
|
||||
@ -349,8 +349,8 @@ class Bintool:
|
||||
str: Filename of fetched file to copy to a suitable directory
|
||||
str: Name of temp directory to remove, or None
|
||||
"""
|
||||
fname, tmpdir = tools.Download(url)
|
||||
tools.Run('chmod', 'a+x', fname)
|
||||
fname, tmpdir = tools.download(url)
|
||||
tools.run('chmod', 'a+x', fname)
|
||||
return fname, tmpdir
|
||||
|
||||
@classmethod
|
||||
@ -384,7 +384,7 @@ class Bintool:
|
||||
"""
|
||||
args = ['sudo', 'apt', 'install', '-y', package]
|
||||
print('- %s' % ' '.join(args))
|
||||
tools.Run(*args)
|
||||
tools.run(*args)
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
|
@ -80,7 +80,7 @@ class TestBintool(unittest.TestCase):
|
||||
|
||||
Args:
|
||||
fake_download (function): Function to call instead of
|
||||
tools.Download()
|
||||
tools.download()
|
||||
method (bintool.FETCH_...: Fetch method to use
|
||||
|
||||
Returns:
|
||||
@ -88,7 +88,7 @@ class TestBintool(unittest.TestCase):
|
||||
"""
|
||||
btest = Bintool.create('_testing')
|
||||
col = terminal.Color()
|
||||
with unittest.mock.patch.object(tools, 'Download',
|
||||
with unittest.mock.patch.object(tools, 'download',
|
||||
side_effect=fake_download):
|
||||
with test_util.capture_sys_output() as (stdout, _):
|
||||
btest.fetch_tool(method, col, False)
|
||||
@ -97,7 +97,7 @@ class TestBintool(unittest.TestCase):
|
||||
def test_fetch_url_err(self):
|
||||
"""Test an error while fetching a tool from a URL"""
|
||||
def fail_download(url):
|
||||
"""Take the tools.Download() function by raising an exception"""
|
||||
"""Take the tools.download() function by raising an exception"""
|
||||
raise urllib.error.URLError('my error')
|
||||
|
||||
stdout = self.check_fetch_url(fail_download, bintool.FETCH_ANY)
|
||||
@ -114,7 +114,7 @@ class TestBintool(unittest.TestCase):
|
||||
def test_fetch_method(self):
|
||||
"""Test fetching using a particular method"""
|
||||
def fail_download(url):
|
||||
"""Take the tools.Download() function by raising an exception"""
|
||||
"""Take the tools.download() function by raising an exception"""
|
||||
raise urllib.error.URLError('my error')
|
||||
|
||||
stdout = self.check_fetch_url(fail_download, bintool.FETCH_BIN)
|
||||
@ -123,11 +123,11 @@ class TestBintool(unittest.TestCase):
|
||||
def test_fetch_pass_fail(self):
|
||||
"""Test fetching multiple tools with some passing and some failing"""
|
||||
def handle_download(_):
|
||||
"""Take the tools.Download() function by writing a file"""
|
||||
"""Take the tools.download() function by writing a file"""
|
||||
if self.seq:
|
||||
raise urllib.error.URLError('not found')
|
||||
self.seq += 1
|
||||
tools.WriteFile(fname, expected)
|
||||
tools.write_file(fname, expected)
|
||||
return fname, dirname
|
||||
|
||||
expected = b'this is a test'
|
||||
@ -140,12 +140,12 @@ class TestBintool(unittest.TestCase):
|
||||
self.seq = 0
|
||||
|
||||
with unittest.mock.patch.object(bintool, 'DOWNLOAD_DESTDIR', destdir):
|
||||
with unittest.mock.patch.object(tools, 'Download',
|
||||
with unittest.mock.patch.object(tools, 'download',
|
||||
side_effect=handle_download):
|
||||
with test_util.capture_sys_output() as (stdout, _):
|
||||
Bintool.fetch_tools(bintool.FETCH_ANY, ['_testing'] * 2)
|
||||
self.assertTrue(os.path.exists(dest_fname))
|
||||
data = tools.ReadFile(dest_fname)
|
||||
data = tools.read_file(dest_fname)
|
||||
self.assertEqual(expected, data)
|
||||
|
||||
lines = stdout.getvalue().splitlines()
|
||||
@ -245,14 +245,14 @@ class TestBintool(unittest.TestCase):
|
||||
tmpdir = cmd[2]
|
||||
self.fname = os.path.join(tmpdir, 'pathname')
|
||||
if write_file:
|
||||
tools.WriteFile(self.fname, b'hello')
|
||||
tools.write_file(self.fname, b'hello')
|
||||
|
||||
btest = Bintool.create('_testing')
|
||||
col = terminal.Color()
|
||||
self.fname = None
|
||||
with unittest.mock.patch.object(bintool, 'DOWNLOAD_DESTDIR',
|
||||
self._indir):
|
||||
with unittest.mock.patch.object(tools, 'Run', side_effect=fake_run):
|
||||
with unittest.mock.patch.object(tools, 'run', side_effect=fake_run):
|
||||
with test_util.capture_sys_output() as (stdout, _):
|
||||
btest.fetch_tool(bintool.FETCH_BUILD, col, False)
|
||||
fname = os.path.join(self._indir, '_testing')
|
||||
@ -275,7 +275,7 @@ class TestBintool(unittest.TestCase):
|
||||
btest = Bintool.create('_testing')
|
||||
btest.install = True
|
||||
col = terminal.Color()
|
||||
with unittest.mock.patch.object(tools, 'Run', return_value=None):
|
||||
with unittest.mock.patch.object(tools, 'run', return_value=None):
|
||||
with test_util.capture_sys_output() as _:
|
||||
result = btest.fetch_tool(bintool.FETCH_BIN, col, False)
|
||||
self.assertEqual(bintool.FETCHED, result)
|
||||
@ -292,8 +292,8 @@ class TestBintool(unittest.TestCase):
|
||||
def test_all_bintools(self):
|
||||
"""Test that all bintools can handle all available fetch types"""
|
||||
def handle_download(_):
|
||||
"""Take the tools.Download() function by writing a file"""
|
||||
tools.WriteFile(fname, expected)
|
||||
"""Take the tools.download() function by writing a file"""
|
||||
tools.write_file(fname, expected)
|
||||
return fname, dirname
|
||||
|
||||
def fake_run(*cmd):
|
||||
@ -301,15 +301,15 @@ class TestBintool(unittest.TestCase):
|
||||
# See Bintool.build_from_git()
|
||||
tmpdir = cmd[2]
|
||||
self.fname = os.path.join(tmpdir, 'pathname')
|
||||
tools.WriteFile(self.fname, b'hello')
|
||||
tools.write_file(self.fname, b'hello')
|
||||
|
||||
expected = b'this is a test'
|
||||
dirname = os.path.join(self._indir, 'download_dir')
|
||||
os.mkdir(dirname)
|
||||
fname = os.path.join(dirname, 'downloaded')
|
||||
|
||||
with unittest.mock.patch.object(tools, 'Run', side_effect=fake_run):
|
||||
with unittest.mock.patch.object(tools, 'Download',
|
||||
with unittest.mock.patch.object(tools, 'run', side_effect=fake_run):
|
||||
with unittest.mock.patch.object(tools, 'download',
|
||||
side_effect=handle_download):
|
||||
with test_util.capture_sys_output() as _:
|
||||
for name in Bintool.get_tool_list():
|
||||
@ -320,7 +320,7 @@ class TestBintool(unittest.TestCase):
|
||||
if result is not True and result is not None:
|
||||
result_fname, _ = result
|
||||
self.assertTrue(os.path.exists(result_fname))
|
||||
data = tools.ReadFile(result_fname)
|
||||
data = tools.read_file(result_fname)
|
||||
self.assertEqual(expected, data)
|
||||
os.remove(result_fname)
|
||||
|
||||
|
@ -88,8 +88,8 @@ class Bintoollz4(bintool.Bintool):
|
||||
bytes: Compressed data
|
||||
"""
|
||||
with tempfile.NamedTemporaryFile(prefix='comp.tmp',
|
||||
dir=tools.GetOutputDir()) as tmp:
|
||||
tools.WriteFile(tmp.name, indata)
|
||||
dir=tools.get_output_dir()) as tmp:
|
||||
tools.write_file(tmp.name, indata)
|
||||
args = ['--no-frame-crc', '-B4', '-5', '-c', tmp.name]
|
||||
return self.run_cmd(*args, binary=True)
|
||||
|
||||
@ -103,8 +103,8 @@ class Bintoollz4(bintool.Bintool):
|
||||
bytes: Decompressed data
|
||||
"""
|
||||
with tempfile.NamedTemporaryFile(prefix='decomp.tmp',
|
||||
dir=tools.GetOutputDir()) as inf:
|
||||
tools.WriteFile(inf.name, indata)
|
||||
dir=tools.get_output_dir()) as inf:
|
||||
tools.write_file(inf.name, indata)
|
||||
args = ['-cd', inf.name]
|
||||
return self.run_cmd(*args, binary=True)
|
||||
|
||||
|
@ -65,13 +65,13 @@ class Bintoollzma_alone(bintool.Bintool):
|
||||
bytes: Compressed data
|
||||
"""
|
||||
with tempfile.NamedTemporaryFile(prefix='comp.tmp',
|
||||
dir=tools.GetOutputDir()) as inf:
|
||||
tools.WriteFile(inf.name, indata)
|
||||
dir=tools.get_output_dir()) as inf:
|
||||
tools.write_file(inf.name, indata)
|
||||
with tempfile.NamedTemporaryFile(prefix='compo.otmp',
|
||||
dir=tools.GetOutputDir()) as outf:
|
||||
dir=tools.get_output_dir()) as outf:
|
||||
args = ['e', inf.name, outf.name, '-lc1', '-lp0', '-pb0', '-d8']
|
||||
self.run_cmd(*args, binary=True)
|
||||
return tools.ReadFile(outf.name)
|
||||
return tools.read_file(outf.name)
|
||||
|
||||
def decompress(self, indata):
|
||||
"""Decompress data with lzma_alone
|
||||
@ -83,13 +83,13 @@ class Bintoollzma_alone(bintool.Bintool):
|
||||
bytes: Decompressed data
|
||||
"""
|
||||
with tempfile.NamedTemporaryFile(prefix='decomp.tmp',
|
||||
dir=tools.GetOutputDir()) as inf:
|
||||
tools.WriteFile(inf.name, indata)
|
||||
dir=tools.get_output_dir()) as inf:
|
||||
tools.write_file(inf.name, indata)
|
||||
with tempfile.NamedTemporaryFile(prefix='compo.otmp',
|
||||
dir=tools.GetOutputDir()) as outf:
|
||||
dir=tools.get_output_dir()) as outf:
|
||||
args = ['d', inf.name, outf.name]
|
||||
self.run_cmd(*args, binary=True)
|
||||
return tools.ReadFile(outf.name, binary=True)
|
||||
return tools.read_file(outf.name, binary=True)
|
||||
|
||||
def fetch(self, method):
|
||||
"""Fetch handler for lzma_alone
|
||||
|
@ -189,9 +189,9 @@ def _pack_string(instr):
|
||||
Returns:
|
||||
String with required padding (at least one 0x00 byte) at the end
|
||||
"""
|
||||
val = tools.ToBytes(instr)
|
||||
val = tools.to_bytes(instr)
|
||||
pad_len = align_int(len(val) + 1, FILENAME_ALIGN)
|
||||
return val + tools.GetBytes(0, pad_len - len(val))
|
||||
return val + tools.get_bytes(0, pad_len - len(val))
|
||||
|
||||
|
||||
class CbfsFile(object):
|
||||
@ -371,7 +371,7 @@ class CbfsFile(object):
|
||||
FILE_ATTR_TAG_COMPRESSION, ATTR_COMPRESSION_LEN,
|
||||
self.compress, self.memlen)
|
||||
elif self.ftype == TYPE_EMPTY:
|
||||
data = tools.GetBytes(self.erase_byte, self.size)
|
||||
data = tools.get_bytes(self.erase_byte, self.size)
|
||||
else:
|
||||
raise ValueError('Unknown type %#x when writing\n' % self.ftype)
|
||||
if attr:
|
||||
@ -388,7 +388,7 @@ class CbfsFile(object):
|
||||
# possible.
|
||||
raise ValueError("Internal error: CBFS file '%s': Requested offset %#x but current output position is %#x" %
|
||||
(self.name, self.cbfs_offset, offset))
|
||||
pad = tools.GetBytes(pad_byte, pad_len)
|
||||
pad = tools.get_bytes(pad_byte, pad_len)
|
||||
hdr_len += pad_len
|
||||
|
||||
# This is the offset of the start of the file's data,
|
||||
@ -414,7 +414,7 @@ class CbfsWriter(object):
|
||||
Usage is something like:
|
||||
|
||||
cbw = CbfsWriter(size)
|
||||
cbw.add_file_raw('u-boot', tools.ReadFile('u-boot.bin'))
|
||||
cbw.add_file_raw('u-boot', tools.read_file('u-boot.bin'))
|
||||
...
|
||||
data, cbfs_offset = cbw.get_data_and_offset()
|
||||
|
||||
@ -482,7 +482,7 @@ class CbfsWriter(object):
|
||||
if fd.tell() > offset:
|
||||
raise ValueError('No space for data before offset %#x (current offset %#x)' %
|
||||
(offset, fd.tell()))
|
||||
fd.write(tools.GetBytes(self._erase_byte, offset - fd.tell()))
|
||||
fd.write(tools.get_bytes(self._erase_byte, offset - fd.tell()))
|
||||
|
||||
def _pad_to(self, fd, offset):
|
||||
"""Write out pad bytes and/or an empty file until a given offset
|
||||
|
@ -36,7 +36,7 @@ class TestCbfs(unittest.TestCase):
|
||||
def setUpClass(cls):
|
||||
# Create a temporary directory for test files
|
||||
cls._indir = tempfile.mkdtemp(prefix='cbfs_util.')
|
||||
tools.SetInputDirs([cls._indir])
|
||||
tools.set_input_dirs([cls._indir])
|
||||
|
||||
# Set up some useful data files
|
||||
TestCbfs._make_input_file('u-boot.bin', U_BOOT_DATA)
|
||||
@ -45,7 +45,7 @@ class TestCbfs(unittest.TestCase):
|
||||
|
||||
# Set up a temporary output directory, used by the tools library when
|
||||
# compressing files
|
||||
tools.PrepareOutputDir(None)
|
||||
tools.prepare_output_dir(None)
|
||||
|
||||
cls.cbfstool = bintool.Bintool.create('cbfstool')
|
||||
cls.have_cbfstool = cls.cbfstool.is_present()
|
||||
@ -58,7 +58,7 @@ class TestCbfs(unittest.TestCase):
|
||||
if cls._indir:
|
||||
shutil.rmtree(cls._indir)
|
||||
cls._indir = None
|
||||
tools.FinaliseOutputDir()
|
||||
tools.finalise_output_dir()
|
||||
|
||||
@classmethod
|
||||
def _make_input_file(cls, fname, contents):
|
||||
@ -71,7 +71,7 @@ class TestCbfs(unittest.TestCase):
|
||||
Full pathname of file created
|
||||
"""
|
||||
pathname = os.path.join(cls._indir, fname)
|
||||
tools.WriteFile(pathname, contents)
|
||||
tools.write_file(pathname, contents)
|
||||
return pathname
|
||||
|
||||
def _check_hdr(self, data, size, offset=0, arch=cbfs_util.ARCHITECTURE_X86):
|
||||
@ -176,12 +176,12 @@ class TestCbfs(unittest.TestCase):
|
||||
base = [(1 << 32) - size + b for b in base]
|
||||
self.cbfstool.add_raw(
|
||||
cbfs_fname, 'u-boot',
|
||||
tools.GetInputFilename(compress and 'compress' or 'u-boot.bin'),
|
||||
tools.get_input_filename(compress and 'compress' or 'u-boot.bin'),
|
||||
compress[0] if compress else None,
|
||||
base[0] if base else None)
|
||||
self.cbfstool.add_raw(
|
||||
cbfs_fname, 'u-boot-dtb',
|
||||
tools.GetInputFilename(compress and 'compress' or 'u-boot.dtb'),
|
||||
tools.get_input_filename(compress and 'compress' or 'u-boot.dtb'),
|
||||
compress[1] if compress else None,
|
||||
base[1] if base else None)
|
||||
return cbfs_fname
|
||||
@ -198,10 +198,10 @@ class TestCbfs(unittest.TestCase):
|
||||
"""
|
||||
if not self.have_cbfstool or not self.have_lz4:
|
||||
return
|
||||
expect = tools.ReadFile(cbfstool_fname)
|
||||
expect = tools.read_file(cbfstool_fname)
|
||||
if expect != data:
|
||||
tools.WriteFile('/tmp/expect', expect)
|
||||
tools.WriteFile('/tmp/actual', data)
|
||||
tools.write_file('/tmp/expect', expect)
|
||||
tools.write_file('/tmp/actual', data)
|
||||
print('diff -y <(xxd -g1 /tmp/expect) <(xxd -g1 /tmp/actual) | colordiff')
|
||||
self.fail('cbfstool produced a different result')
|
||||
|
||||
@ -482,7 +482,7 @@ class TestCbfs(unittest.TestCase):
|
||||
|
||||
size = 0xb0
|
||||
cbw = CbfsWriter(size)
|
||||
cbw.add_file_stage('u-boot', tools.ReadFile(elf_fname))
|
||||
cbw.add_file_stage('u-boot', tools.read_file(elf_fname))
|
||||
|
||||
data = cbw.get_data()
|
||||
cbfs = self._check_hdr(data, size)
|
||||
|
@ -99,9 +99,9 @@ def _ReadMissingBlobHelp():
|
||||
return result
|
||||
|
||||
def _ShowBlobHelp(path, text):
|
||||
tout.Warning('\n%s:' % path)
|
||||
tout.warning('\n%s:' % path)
|
||||
for line in text.splitlines():
|
||||
tout.Warning(' %s' % line)
|
||||
tout.warning(' %s' % line)
|
||||
|
||||
def _ShowHelpForMissingBlobs(missing_list):
|
||||
"""Show help for each missing blob to help the user take action
|
||||
@ -258,15 +258,15 @@ def ExtractEntries(image_fname, output_fname, outdir, entry_paths,
|
||||
raise ValueError('Must specify exactly one entry path to write with -f')
|
||||
entry = image.FindEntryPath(entry_paths[0])
|
||||
data = entry.ReadData(decomp, alt_format)
|
||||
tools.WriteFile(output_fname, data)
|
||||
tout.Notice("Wrote %#x bytes to file '%s'" % (len(data), output_fname))
|
||||
tools.write_file(output_fname, data)
|
||||
tout.notice("Wrote %#x bytes to file '%s'" % (len(data), output_fname))
|
||||
return
|
||||
|
||||
# Otherwise we will output to a path given by the entry path of each entry.
|
||||
# This means that entries will appear in subdirectories if they are part of
|
||||
# a sub-section.
|
||||
einfos = image.GetListEntries(entry_paths)[0]
|
||||
tout.Notice('%d entries match and will be written' % len(einfos))
|
||||
tout.notice('%d entries match and will be written' % len(einfos))
|
||||
for einfo in einfos:
|
||||
entry = einfo.entry
|
||||
data = entry.ReadData(decomp, alt_format)
|
||||
@ -279,9 +279,9 @@ def ExtractEntries(image_fname, output_fname, outdir, entry_paths,
|
||||
if fname and not os.path.exists(fname):
|
||||
os.makedirs(fname)
|
||||
fname = os.path.join(fname, 'root')
|
||||
tout.Notice("Write entry '%s' size %x to '%s'" %
|
||||
tout.notice("Write entry '%s' size %x to '%s'" %
|
||||
(entry.GetPath(), len(data), fname))
|
||||
tools.WriteFile(fname, data)
|
||||
tools.write_file(fname, data)
|
||||
return einfos
|
||||
|
||||
|
||||
@ -328,7 +328,7 @@ def AfterReplace(image, allow_resize, write_map):
|
||||
of the entries), False to raise an exception
|
||||
write_map: True to write a map file
|
||||
"""
|
||||
tout.Info('Processing image')
|
||||
tout.info('Processing image')
|
||||
ProcessImage(image, update_fdt=True, write_map=write_map,
|
||||
get_contents=False, allow_resize=allow_resize)
|
||||
|
||||
@ -336,7 +336,7 @@ def AfterReplace(image, allow_resize, write_map):
|
||||
def WriteEntryToImage(image, entry, data, do_compress=True, allow_resize=True,
|
||||
write_map=False):
|
||||
BeforeReplace(image, allow_resize)
|
||||
tout.Info('Writing data to %s' % entry.GetPath())
|
||||
tout.info('Writing data to %s' % entry.GetPath())
|
||||
ReplaceOneEntry(image, entry, data, do_compress, allow_resize)
|
||||
AfterReplace(image, allow_resize=allow_resize, write_map=write_map)
|
||||
|
||||
@ -361,7 +361,7 @@ def WriteEntry(image_fname, entry_path, data, do_compress=True,
|
||||
Returns:
|
||||
Image object that was updated
|
||||
"""
|
||||
tout.Info("Write entry '%s', file '%s'" % (entry_path, image_fname))
|
||||
tout.info("Write entry '%s', file '%s'" % (entry_path, image_fname))
|
||||
image = Image.FromFile(image_fname)
|
||||
entry = image.FindEntryPath(entry_path)
|
||||
WriteEntryToImage(image, entry, data, do_compress=do_compress,
|
||||
@ -398,8 +398,8 @@ def ReplaceEntries(image_fname, input_fname, indir, entry_paths,
|
||||
if len(entry_paths) != 1:
|
||||
raise ValueError('Must specify exactly one entry path to write with -f')
|
||||
entry = image.FindEntryPath(entry_paths[0])
|
||||
data = tools.ReadFile(input_fname)
|
||||
tout.Notice("Read %#x bytes from file '%s'" % (len(data), input_fname))
|
||||
data = tools.read_file(input_fname)
|
||||
tout.notice("Read %#x bytes from file '%s'" % (len(data), input_fname))
|
||||
WriteEntryToImage(image, entry, data, do_compress=do_compress,
|
||||
allow_resize=allow_resize, write_map=write_map)
|
||||
return
|
||||
@ -408,7 +408,7 @@ def ReplaceEntries(image_fname, input_fname, indir, entry_paths,
|
||||
# This means that files must appear in subdirectories if they are part of
|
||||
# a sub-section.
|
||||
einfos = image.GetListEntries(entry_paths)[0]
|
||||
tout.Notice("Replacing %d matching entries in image '%s'" %
|
||||
tout.notice("Replacing %d matching entries in image '%s'" %
|
||||
(len(einfos), image_fname))
|
||||
|
||||
BeforeReplace(image, allow_resize)
|
||||
@ -416,19 +416,19 @@ def ReplaceEntries(image_fname, input_fname, indir, entry_paths,
|
||||
for einfo in einfos:
|
||||
entry = einfo.entry
|
||||
if entry.GetEntries():
|
||||
tout.Info("Skipping section entry '%s'" % entry.GetPath())
|
||||
tout.info("Skipping section entry '%s'" % entry.GetPath())
|
||||
continue
|
||||
|
||||
path = entry.GetPath()[1:]
|
||||
fname = os.path.join(indir, path)
|
||||
|
||||
if os.path.exists(fname):
|
||||
tout.Notice("Write entry '%s' from file '%s'" %
|
||||
tout.notice("Write entry '%s' from file '%s'" %
|
||||
(entry.GetPath(), fname))
|
||||
data = tools.ReadFile(fname)
|
||||
data = tools.read_file(fname)
|
||||
ReplaceOneEntry(image, entry, data, do_compress, allow_resize)
|
||||
else:
|
||||
tout.Warning("Skipping entry '%s' from missing file '%s'" %
|
||||
tout.warning("Skipping entry '%s' from missing file '%s'" %
|
||||
(entry.GetPath(), fname))
|
||||
|
||||
AfterReplace(image, allow_resize=allow_resize, write_map=write_map)
|
||||
@ -468,8 +468,8 @@ def PrepareImagesAndDtbs(dtb_fname, select_images, update_fdt, use_expanded):
|
||||
# output into a file in our output directly. Then scan it for use
|
||||
# in binman.
|
||||
dtb_fname = fdt_util.EnsureCompiled(dtb_fname)
|
||||
fname = tools.GetOutputFilename('u-boot.dtb.out')
|
||||
tools.WriteFile(fname, tools.ReadFile(dtb_fname))
|
||||
fname = tools.get_output_filename('u-boot.dtb.out')
|
||||
tools.write_file(fname, tools.read_file(dtb_fname))
|
||||
dtb = fdt.FdtScan(fname)
|
||||
|
||||
node = _FindBinmanNode(dtb)
|
||||
@ -488,7 +488,7 @@ def PrepareImagesAndDtbs(dtb_fname, select_images, update_fdt, use_expanded):
|
||||
else:
|
||||
skip.append(name)
|
||||
images = new_images
|
||||
tout.Notice('Skipping images: %s' % ', '.join(skip))
|
||||
tout.notice('Skipping images: %s' % ', '.join(skip))
|
||||
|
||||
state.Prepare(images, dtb)
|
||||
|
||||
@ -574,7 +574,7 @@ def ProcessImage(image, update_fdt, write_map, get_contents=True,
|
||||
if sizes_ok:
|
||||
break
|
||||
image.ResetForPack()
|
||||
tout.Info('Pack completed after %d pass(es)' % (pack_pass + 1))
|
||||
tout.info('Pack completed after %d pass(es)' % (pack_pass + 1))
|
||||
if not sizes_ok:
|
||||
image.Raise('Entries changed size after packing (tried %s passes)' %
|
||||
passes)
|
||||
@ -585,20 +585,20 @@ def ProcessImage(image, update_fdt, write_map, get_contents=True,
|
||||
missing_list = []
|
||||
image.CheckMissing(missing_list)
|
||||
if missing_list:
|
||||
tout.Warning("Image '%s' is missing external blobs and is non-functional: %s" %
|
||||
tout.warning("Image '%s' is missing external blobs and is non-functional: %s" %
|
||||
(image.name, ' '.join([e.name for e in missing_list])))
|
||||
_ShowHelpForMissingBlobs(missing_list)
|
||||
faked_list = []
|
||||
image.CheckFakedBlobs(faked_list)
|
||||
if faked_list:
|
||||
tout.Warning(
|
||||
tout.warning(
|
||||
"Image '%s' has faked external blobs and is non-functional: %s" %
|
||||
(image.name, ' '.join([os.path.basename(e.GetDefaultFilename())
|
||||
for e in faked_list])))
|
||||
missing_bintool_list = []
|
||||
image.check_missing_bintools(missing_bintool_list)
|
||||
if missing_bintool_list:
|
||||
tout.Warning(
|
||||
tout.warning(
|
||||
"Image '%s' has missing bintools and is non-functional: %s" %
|
||||
(image.name, ' '.join([os.path.basename(bintool.name)
|
||||
for bintool in missing_bintool_list])))
|
||||
@ -618,7 +618,7 @@ def Binman(args):
|
||||
global state
|
||||
|
||||
if args.full_help:
|
||||
tools.PrintFullHelp(
|
||||
tools.print_full_help(
|
||||
os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])), 'README.rst')
|
||||
)
|
||||
return 0
|
||||
@ -629,8 +629,8 @@ def Binman(args):
|
||||
|
||||
if args.cmd in ['ls', 'extract', 'replace', 'tool']:
|
||||
try:
|
||||
tout.Init(args.verbosity)
|
||||
tools.PrepareOutputDir(None)
|
||||
tout.init(args.verbosity)
|
||||
tools.prepare_output_dir(None)
|
||||
if args.cmd == 'ls':
|
||||
ListEntries(args.image, args.paths)
|
||||
|
||||
@ -644,7 +644,7 @@ def Binman(args):
|
||||
allow_resize=not args.fix_size, write_map=args.map)
|
||||
|
||||
if args.cmd == 'tool':
|
||||
tools.SetToolPaths(args.toolpath)
|
||||
tools.set_tool_paths(args.toolpath)
|
||||
if args.list:
|
||||
bintool.Bintool.list_all()
|
||||
elif args.fetch:
|
||||
@ -658,7 +658,7 @@ def Binman(args):
|
||||
except:
|
||||
raise
|
||||
finally:
|
||||
tools.FinaliseOutputDir()
|
||||
tools.finalise_output_dir()
|
||||
return 0
|
||||
|
||||
elf_params = None
|
||||
@ -682,7 +682,7 @@ def Binman(args):
|
||||
args.indir.append(board_pathname)
|
||||
|
||||
try:
|
||||
tout.Init(args.verbosity)
|
||||
tout.init(args.verbosity)
|
||||
elf.debug = args.debug
|
||||
cbfs_util.VERBOSE = args.verbosity > 2
|
||||
state.use_fake_dtb = args.fake_dtb
|
||||
@ -694,9 +694,9 @@ def Binman(args):
|
||||
# runtime.
|
||||
use_expanded = not args.no_expanded
|
||||
try:
|
||||
tools.SetInputDirs(args.indir)
|
||||
tools.PrepareOutputDir(args.outdir, args.preserve)
|
||||
tools.SetToolPaths(args.toolpath)
|
||||
tools.set_input_dirs(args.indir)
|
||||
tools.prepare_output_dir(args.outdir, args.preserve)
|
||||
tools.set_tool_paths(args.toolpath)
|
||||
state.SetEntryArgs(args.entry_arg)
|
||||
state.SetThreads(args.threads)
|
||||
|
||||
@ -717,20 +717,20 @@ def Binman(args):
|
||||
|
||||
# Write the updated FDTs to our output files
|
||||
for dtb_item in state.GetAllFdts():
|
||||
tools.WriteFile(dtb_item._fname, dtb_item.GetContents())
|
||||
tools.write_file(dtb_item._fname, dtb_item.GetContents())
|
||||
|
||||
if elf_params:
|
||||
data = state.GetFdtForEtype('u-boot-dtb').GetContents()
|
||||
elf.UpdateFile(*elf_params, data)
|
||||
|
||||
if invalid:
|
||||
tout.Warning("\nSome images are invalid")
|
||||
tout.warning("\nSome images are invalid")
|
||||
|
||||
# Use this to debug the time take to pack the image
|
||||
#state.TimingShow()
|
||||
finally:
|
||||
tools.FinaliseOutputDir()
|
||||
tools.finalise_output_dir()
|
||||
finally:
|
||||
tout.Uninit()
|
||||
tout.uninit()
|
||||
|
||||
return 0
|
||||
|
@ -54,7 +54,7 @@ def GetSymbols(fname, patterns):
|
||||
key: Name of symbol
|
||||
value: Hex value of symbol
|
||||
"""
|
||||
stdout = tools.Run('objdump', '-t', fname)
|
||||
stdout = tools.run('objdump', '-t', fname)
|
||||
lines = stdout.splitlines()
|
||||
if patterns:
|
||||
re_syms = re.compile('|'.join(patterns))
|
||||
@ -154,7 +154,7 @@ def LookupAndWriteSymbols(elf_fname, entry, section):
|
||||
entry: Entry to process
|
||||
section: Section which can be used to lookup symbol values
|
||||
"""
|
||||
fname = tools.GetInputFilename(elf_fname)
|
||||
fname = tools.get_input_filename(elf_fname)
|
||||
syms = GetSymbols(fname, ['image', 'binman'])
|
||||
if not syms:
|
||||
return
|
||||
@ -185,7 +185,7 @@ def LookupAndWriteSymbols(elf_fname, entry, section):
|
||||
value = -1
|
||||
pack_string = pack_string.lower()
|
||||
value_bytes = struct.pack(pack_string, value)
|
||||
tout.Debug('%s:\n insert %s, offset %x, value %x, length %d' %
|
||||
tout.debug('%s:\n insert %s, offset %x, value %x, length %d' %
|
||||
(msg, name, offset, value, len(value_bytes)))
|
||||
entry.data = (entry.data[:offset] + value_bytes +
|
||||
entry.data[offset + sym.size:])
|
||||
@ -282,10 +282,10 @@ SECTIONS
|
||||
# text section at the start
|
||||
# -m32: Build for 32-bit x86
|
||||
# -T...: Specifies the link script, which sets the start address
|
||||
cc, args = tools.GetTargetCompileTool('cc')
|
||||
cc, args = tools.get_target_compile_tool('cc')
|
||||
args += ['-static', '-nostdlib', '-Wl,--build-id=none', '-m32', '-T',
|
||||
lds_file, '-o', elf_fname, s_file]
|
||||
stdout = command.Output(cc, *args)
|
||||
stdout = command.output(cc, *args)
|
||||
shutil.rmtree(outdir)
|
||||
|
||||
def DecodeElf(data, location):
|
||||
@ -350,7 +350,7 @@ def DecodeElf(data, location):
|
||||
mem_end - data_start)
|
||||
|
||||
def UpdateFile(infile, outfile, start_sym, end_sym, insert):
|
||||
tout.Notice("Creating file '%s' with data length %#x (%d) between symbols '%s' and '%s'" %
|
||||
tout.notice("Creating file '%s' with data length %#x (%d) between symbols '%s' and '%s'" %
|
||||
(outfile, len(insert), len(insert), start_sym, end_sym))
|
||||
syms = GetSymbolFileOffset(infile, [start_sym, end_sym])
|
||||
if len(syms) != 2:
|
||||
@ -363,9 +363,9 @@ def UpdateFile(infile, outfile, start_sym, end_sym, insert):
|
||||
raise ValueError("Not enough space in '%s' for data length %#x (%d); size is %#x (%d)" %
|
||||
(infile, len(insert), len(insert), size, size))
|
||||
|
||||
data = tools.ReadFile(infile)
|
||||
data = tools.read_file(infile)
|
||||
newdata = data[:syms[start_sym].offset]
|
||||
newdata += insert + tools.GetBytes(0, size - len(insert))
|
||||
newdata += insert + tools.get_bytes(0, size - len(insert))
|
||||
newdata += data[syms[end_sym].offset:]
|
||||
tools.WriteFile(outfile, newdata)
|
||||
tout.Info('Written to offset %#x' % syms[start_sym].offset)
|
||||
tools.write_file(outfile, newdata)
|
||||
tout.info('Written to offset %#x' % syms[start_sym].offset)
|
||||
|
@ -27,7 +27,7 @@ class FakeEntry:
|
||||
"""
|
||||
def __init__(self, contents_size):
|
||||
self.contents_size = contents_size
|
||||
self.data = tools.GetBytes(ord('a'), contents_size)
|
||||
self.data = tools.get_bytes(ord('a'), contents_size)
|
||||
|
||||
def GetPath(self):
|
||||
return 'entry_path'
|
||||
@ -72,7 +72,7 @@ def BuildElfTestFiles(target_dir):
|
||||
if 'MAKEFLAGS' in os.environ:
|
||||
del os.environ['MAKEFLAGS']
|
||||
try:
|
||||
tools.Run('make', '-C', target_dir, '-f',
|
||||
tools.run('make', '-C', target_dir, '-f',
|
||||
os.path.join(testdir, 'Makefile'), 'SRC=%s/' % testdir)
|
||||
except ValueError as e:
|
||||
# The test system seems to suppress this in a strange way
|
||||
@ -83,7 +83,7 @@ class TestElf(unittest.TestCase):
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
cls._indir = tempfile.mkdtemp(prefix='elf.')
|
||||
tools.SetInputDirs(['.'])
|
||||
tools.set_input_dirs(['.'])
|
||||
BuildElfTestFiles(cls._indir)
|
||||
|
||||
@classmethod
|
||||
@ -166,13 +166,13 @@ class TestElf(unittest.TestCase):
|
||||
section = FakeSection(sym_value=None)
|
||||
elf_fname = self.ElfTestFile('u_boot_binman_syms')
|
||||
syms = elf.LookupAndWriteSymbols(elf_fname, entry, section)
|
||||
self.assertEqual(tools.GetBytes(255, 20) + tools.GetBytes(ord('a'), 4),
|
||||
self.assertEqual(tools.get_bytes(255, 20) + tools.get_bytes(ord('a'), 4),
|
||||
entry.data)
|
||||
|
||||
def testDebug(self):
|
||||
"""Check that enabling debug in the elf module produced debug output"""
|
||||
try:
|
||||
tout.Init(tout.DEBUG)
|
||||
tout.init(tout.DEBUG)
|
||||
entry = FakeEntry(20)
|
||||
section = FakeSection()
|
||||
elf_fname = self.ElfTestFile('u_boot_binman_syms')
|
||||
@ -180,7 +180,7 @@ class TestElf(unittest.TestCase):
|
||||
syms = elf.LookupAndWriteSymbols(elf_fname, entry, section)
|
||||
self.assertTrue(len(stdout.getvalue()) > 0)
|
||||
finally:
|
||||
tout.Init(tout.WARNING)
|
||||
tout.init(tout.WARNING)
|
||||
|
||||
def testMakeElf(self):
|
||||
"""Test for the MakeElf function"""
|
||||
@ -193,9 +193,9 @@ class TestElf(unittest.TestCase):
|
||||
# Make an Elf file and then convert it to a fkat binary file. This
|
||||
# should produce the original data.
|
||||
elf.MakeElf(elf_fname, expected_text, expected_data)
|
||||
objcopy, args = tools.GetTargetCompileTool('objcopy')
|
||||
objcopy, args = tools.get_target_compile_tool('objcopy')
|
||||
args += ['-O', 'binary', elf_fname, bin_fname]
|
||||
stdout = command.Output(objcopy, *args)
|
||||
stdout = command.output(objcopy, *args)
|
||||
with open(bin_fname, 'rb') as fd:
|
||||
data = fd.read()
|
||||
self.assertEqual(expected_text + expected_data, data)
|
||||
@ -210,7 +210,7 @@ class TestElf(unittest.TestCase):
|
||||
expected_data = b'wxyz'
|
||||
elf_fname = os.path.join(outdir, 'elf')
|
||||
elf.MakeElf(elf_fname, expected_text, expected_data)
|
||||
data = tools.ReadFile(elf_fname)
|
||||
data = tools.read_file(elf_fname)
|
||||
|
||||
load = 0xfef20000
|
||||
entry = load + 2
|
||||
@ -231,7 +231,7 @@ class TestElf(unittest.TestCase):
|
||||
offset = elf.GetSymbolFileOffset(fname, ['embed_start', 'embed_end'])
|
||||
start = offset['embed_start'].offset
|
||||
end = offset['embed_end'].offset
|
||||
data = tools.ReadFile(fname)
|
||||
data = tools.read_file(fname)
|
||||
embed_data = data[start:end]
|
||||
expect = struct.pack('<III', 0x1234, 0x5678, 0)
|
||||
self.assertEqual(expect, embed_data)
|
||||
|
@ -14,7 +14,7 @@ from binman import bintool
|
||||
from binman import comp_util
|
||||
from dtoc import fdt_util
|
||||
from patman import tools
|
||||
from patman.tools import ToHex, ToHexSize
|
||||
from patman.tools import to_hex, to_hex_size
|
||||
from patman import tout
|
||||
|
||||
modules = {}
|
||||
@ -244,7 +244,7 @@ class Entry(object):
|
||||
self.uncomp_size = fdt_util.GetInt(self._node, 'uncomp-size')
|
||||
|
||||
self.align = fdt_util.GetInt(self._node, 'align')
|
||||
if tools.NotPowerOfTwo(self.align):
|
||||
if tools.not_power_of_two(self.align):
|
||||
raise ValueError("Node '%s': Alignment %s must be a power of two" %
|
||||
(self._node.path, self.align))
|
||||
if self.section and self.align is None:
|
||||
@ -252,7 +252,7 @@ class Entry(object):
|
||||
self.pad_before = fdt_util.GetInt(self._node, 'pad-before', 0)
|
||||
self.pad_after = fdt_util.GetInt(self._node, 'pad-after', 0)
|
||||
self.align_size = fdt_util.GetInt(self._node, 'align-size')
|
||||
if tools.NotPowerOfTwo(self.align_size):
|
||||
if tools.not_power_of_two(self.align_size):
|
||||
self.Raise("Alignment size %s must be a power of two" %
|
||||
self.align_size)
|
||||
self.align_end = fdt_util.GetInt(self._node, 'align-end')
|
||||
@ -397,12 +397,12 @@ class Entry(object):
|
||||
|
||||
# Don't let the data shrink. Pad it if necessary
|
||||
if size_ok and new_size < self.contents_size:
|
||||
data += tools.GetBytes(0, self.contents_size - new_size)
|
||||
data += tools.get_bytes(0, self.contents_size - new_size)
|
||||
|
||||
if not size_ok:
|
||||
tout.Debug("Entry '%s' size change from %s to %s" % (
|
||||
self._node.path, ToHex(self.contents_size),
|
||||
ToHex(new_size)))
|
||||
tout.debug("Entry '%s' size change from %s to %s" % (
|
||||
self._node.path, to_hex(self.contents_size),
|
||||
to_hex(new_size)))
|
||||
self.SetContents(data)
|
||||
return size_ok
|
||||
|
||||
@ -419,8 +419,8 @@ class Entry(object):
|
||||
def ResetForPack(self):
|
||||
"""Reset offset/size fields so that packing can be done again"""
|
||||
self.Detail('ResetForPack: offset %s->%s, size %s->%s' %
|
||||
(ToHex(self.offset), ToHex(self.orig_offset),
|
||||
ToHex(self.size), ToHex(self.orig_size)))
|
||||
(to_hex(self.offset), to_hex(self.orig_offset),
|
||||
to_hex(self.size), to_hex(self.orig_size)))
|
||||
self.pre_reset_size = self.size
|
||||
self.offset = self.orig_offset
|
||||
self.size = self.orig_size
|
||||
@ -444,20 +444,20 @@ class Entry(object):
|
||||
New section offset pointer (after this entry)
|
||||
"""
|
||||
self.Detail('Packing: offset=%s, size=%s, content_size=%x' %
|
||||
(ToHex(self.offset), ToHex(self.size),
|
||||
(to_hex(self.offset), to_hex(self.size),
|
||||
self.contents_size))
|
||||
if self.offset is None:
|
||||
if self.offset_unset:
|
||||
self.Raise('No offset set with offset-unset: should another '
|
||||
'entry provide this correct offset?')
|
||||
self.offset = tools.Align(offset, self.align)
|
||||
self.offset = tools.align(offset, self.align)
|
||||
needed = self.pad_before + self.contents_size + self.pad_after
|
||||
needed = tools.Align(needed, self.align_size)
|
||||
needed = tools.align(needed, self.align_size)
|
||||
size = self.size
|
||||
if not size:
|
||||
size = needed
|
||||
new_offset = self.offset + size
|
||||
aligned_offset = tools.Align(new_offset, self.align_end)
|
||||
aligned_offset = tools.align(new_offset, self.align_end)
|
||||
if aligned_offset != new_offset:
|
||||
size = aligned_offset - self.offset
|
||||
new_offset = aligned_offset
|
||||
@ -471,10 +471,10 @@ class Entry(object):
|
||||
# Check that the alignment is correct. It could be wrong if the
|
||||
# and offset or size values were provided (i.e. not calculated), but
|
||||
# conflict with the provided alignment values
|
||||
if self.size != tools.Align(self.size, self.align_size):
|
||||
if self.size != tools.align(self.size, self.align_size):
|
||||
self.Raise("Size %#x (%d) does not match align-size %#x (%d)" %
|
||||
(self.size, self.size, self.align_size, self.align_size))
|
||||
if self.offset != tools.Align(self.offset, self.align):
|
||||
if self.offset != tools.align(self.offset, self.align):
|
||||
self.Raise("Offset %#x (%d) does not match align %#x (%d)" %
|
||||
(self.offset, self.offset, self.align, self.align))
|
||||
self.Detail(' - packed: offset=%#x, size=%#x, content_size=%#x, next_offset=%x' %
|
||||
@ -489,12 +489,12 @@ class Entry(object):
|
||||
def Info(self, msg):
|
||||
"""Convenience function to log info referencing a node"""
|
||||
tag = "Info '%s'" % self._node.path
|
||||
tout.Detail('%30s: %s' % (tag, msg))
|
||||
tout.detail('%30s: %s' % (tag, msg))
|
||||
|
||||
def Detail(self, msg):
|
||||
"""Convenience function to log detail referencing a node"""
|
||||
tag = "Node '%s'" % self._node.path
|
||||
tout.Detail('%30s: %s' % (tag, msg))
|
||||
tout.detail('%30s: %s' % (tag, msg))
|
||||
|
||||
def GetEntryArgsOrProps(self, props, required=False):
|
||||
"""Return the values of a set of properties
|
||||
@ -541,7 +541,7 @@ class Entry(object):
|
||||
bytes content of the entry, excluding any padding. If the entry is
|
||||
compressed, the compressed data is returned
|
||||
"""
|
||||
self.Detail('GetData: size %s' % ToHexSize(self.data))
|
||||
self.Detail('GetData: size %s' % to_hex_size(self.data))
|
||||
return self.data
|
||||
|
||||
def GetPaddedData(self, data=None):
|
||||
@ -841,7 +841,7 @@ features to produce new behaviours.
|
||||
"""
|
||||
# Use True here so that we get an uncompressed section to work from,
|
||||
# although compressed sections are currently not supported
|
||||
tout.Debug("ReadChildData section '%s', entry '%s'" %
|
||||
tout.debug("ReadChildData section '%s', entry '%s'" %
|
||||
(self.section.GetPath(), self.GetPath()))
|
||||
data = self.section.ReadChildData(self, decomp, alt_format)
|
||||
return data
|
||||
@ -991,7 +991,7 @@ features to produce new behaviours.
|
||||
fname (str): Filename of faked file
|
||||
"""
|
||||
if self.allow_fake and not pathlib.Path(fname).is_file():
|
||||
outfname = tools.GetOutputFilename(os.path.basename(fname))
|
||||
outfname = tools.get_output_filename(os.path.basename(fname))
|
||||
with open(outfname, "wb") as out:
|
||||
out.truncate(1024)
|
||||
self.faked = True
|
||||
@ -1076,7 +1076,7 @@ features to produce new behaviours.
|
||||
Returns:
|
||||
True to use this entry type, False to use the original one
|
||||
"""
|
||||
tout.Info("Node '%s': etype '%s': %s selected" %
|
||||
tout.info("Node '%s': etype '%s': %s selected" %
|
||||
(node.path, etype, new_etype))
|
||||
return True
|
||||
|
||||
|
@ -17,10 +17,10 @@ from patman import tools
|
||||
|
||||
class TestEntry(unittest.TestCase):
|
||||
def setUp(self):
|
||||
tools.PrepareOutputDir(None)
|
||||
tools.prepare_output_dir(None)
|
||||
|
||||
def tearDown(self):
|
||||
tools.FinaliseOutputDir()
|
||||
tools.finalise_output_dir()
|
||||
|
||||
def GetNode(self):
|
||||
binman_dir = os.path.dirname(os.path.realpath(sys.argv[0]))
|
||||
|
@ -181,7 +181,7 @@ class Entry_atf_fip(Entry_section):
|
||||
self._pad_byte = fdt_util.GetInt(self._node, 'pad-byte', 0)
|
||||
self._fip_flags = fdt_util.GetInt64(self._node, 'fip-hdr-flags', 0)
|
||||
self._fip_align = fdt_util.GetInt(self._node, 'fip-align', 1)
|
||||
if tools.NotPowerOfTwo(self._fip_align):
|
||||
if tools.not_power_of_two(self._fip_align):
|
||||
raise ValueError("Node '%s': FIP alignment %s must be a power of two" %
|
||||
(self._node.path, self._fip_align))
|
||||
self.ReadEntries()
|
||||
|
@ -37,7 +37,7 @@ class Entry_blob(Entry):
|
||||
|
||||
def ObtainContents(self):
|
||||
self._filename = self.GetDefaultFilename()
|
||||
self._pathname = tools.GetInputFilename(self._filename,
|
||||
self._pathname = tools.get_input_filename(self._filename,
|
||||
self.external and self.section.GetAllowMissing())
|
||||
# Allow the file to be missing
|
||||
if not self._pathname:
|
||||
@ -68,7 +68,7 @@ class Entry_blob(Entry):
|
||||
bytes: Data read
|
||||
"""
|
||||
state.TimingStart('read')
|
||||
indata = tools.ReadFile(pathname)
|
||||
indata = tools.read_file(pathname)
|
||||
state.TimingAccum('read')
|
||||
state.TimingStart('compress')
|
||||
data = self.CompressData(indata)
|
||||
|
@ -38,7 +38,7 @@ class Entry_blob_ext_list(Entry_blob):
|
||||
pathnames = []
|
||||
for fname in self._filenames:
|
||||
fname = self.check_fake_fname(fname)
|
||||
pathname = tools.GetInputFilename(
|
||||
pathname = tools.get_input_filename(
|
||||
fname, self.external and self.section.GetAllowMissing())
|
||||
# Allow the file to be missing
|
||||
if not pathname:
|
||||
|
@ -140,7 +140,7 @@ class Entry_fdtmap(Entry):
|
||||
fdt.pack()
|
||||
outfdt = Fdt.FromData(fdt.as_bytearray())
|
||||
data = outfdt.GetContents()
|
||||
data = FDTMAP_MAGIC + tools.GetBytes(0, 8) + data
|
||||
data = FDTMAP_MAGIC + tools.get_bytes(0, 8) + data
|
||||
return data
|
||||
|
||||
def ObtainContents(self):
|
||||
|
@ -47,7 +47,7 @@ class Entry_files(Entry_section):
|
||||
'require-matches')
|
||||
|
||||
def ExpandEntries(self):
|
||||
files = tools.GetInputFilenameGlob(self._pattern)
|
||||
files = tools.get_input_filename_glob(self._pattern)
|
||||
if self._require_matches and not files:
|
||||
self.Raise("Pattern '%s' matched no files" % self._pattern)
|
||||
for fname in files:
|
||||
|
@ -31,5 +31,5 @@ class Entry_fill(Entry):
|
||||
self.fill_value = fdt_util.GetByte(self._node, 'fill-byte', 0)
|
||||
|
||||
def ObtainContents(self):
|
||||
self.SetContents(tools.GetBytes(self.fill_value, self.size))
|
||||
self.SetContents(tools.get_bytes(self.fill_value, self.size))
|
||||
return True
|
||||
|
@ -9,11 +9,12 @@ from collections import defaultdict, OrderedDict
|
||||
import libfdt
|
||||
|
||||
from binman.entry import Entry, EntryArg
|
||||
from binman.etype.section import Entry_section
|
||||
from dtoc import fdt_util
|
||||
from dtoc.fdt import Fdt
|
||||
from patman import tools
|
||||
|
||||
class Entry_fit(Entry):
|
||||
class Entry_fit(Entry_section):
|
||||
"""Flat Image Tree (FIT)
|
||||
|
||||
This calls mkimage to create a FIT (U-Boot Flat Image Tree) based on the
|
||||
@ -112,15 +113,15 @@ class Entry_fit(Entry):
|
||||
"""
|
||||
Members:
|
||||
_fit: FIT file being built
|
||||
_fit_sections: dict:
|
||||
_entries: dict from Entry_section:
|
||||
key: relative path to entry Node (from the base of the FIT)
|
||||
value: Entry_section object comprising the contents of this
|
||||
node
|
||||
"""
|
||||
super().__init__(section, etype, node)
|
||||
self._fit = None
|
||||
self._fit_sections = {}
|
||||
self._fit_props = {}
|
||||
|
||||
for pname, prop in self._node.props.items():
|
||||
if pname.startswith('fit,'):
|
||||
self._fit_props[pname] = prop
|
||||
@ -185,7 +186,7 @@ class Entry_fit(Entry):
|
||||
# 'data' property later.
|
||||
entry = Entry.Create(self.section, node, etype='section')
|
||||
entry.ReadNode()
|
||||
self._fit_sections[rel_path] = entry
|
||||
self._entries[rel_path] = entry
|
||||
|
||||
for subnode in node.subnodes:
|
||||
if has_images and not (subnode.name.startswith('hash') or
|
||||
@ -200,19 +201,19 @@ class Entry_fit(Entry):
|
||||
for seq, fdt_fname in enumerate(self._fdts):
|
||||
node_name = subnode.name[1:].replace('SEQ',
|
||||
str(seq + 1))
|
||||
fname = tools.GetInputFilename(fdt_fname + '.dtb')
|
||||
fname = tools.get_input_filename(fdt_fname + '.dtb')
|
||||
with fsw.add_node(node_name):
|
||||
for pname, prop in subnode.props.items():
|
||||
val = prop.bytes.replace(
|
||||
b'NAME', tools.ToBytes(fdt_fname))
|
||||
b'NAME', tools.to_bytes(fdt_fname))
|
||||
val = val.replace(
|
||||
b'SEQ', tools.ToBytes(str(seq + 1)))
|
||||
b'SEQ', tools.to_bytes(str(seq + 1)))
|
||||
fsw.property(pname, val)
|
||||
|
||||
# Add data for 'fdt' nodes (but not 'config')
|
||||
if depth == 1 and in_images:
|
||||
fsw.property('data',
|
||||
tools.ReadFile(fname))
|
||||
tools.read_file(fname))
|
||||
else:
|
||||
if self._fdts is None:
|
||||
if self._fit_list_prop:
|
||||
@ -237,19 +238,25 @@ class Entry_fit(Entry):
|
||||
self._fdt = Fdt.FromData(fdt.as_bytearray())
|
||||
self._fdt.Scan()
|
||||
|
||||
def ObtainContents(self):
|
||||
"""Obtain the contents of the FIT
|
||||
def BuildSectionData(self, required):
|
||||
"""Build FIT entry contents
|
||||
|
||||
This adds the 'data' properties to the input ITB (Image-tree Binary)
|
||||
then runs mkimage to process it.
|
||||
|
||||
Args:
|
||||
required: True if the data must be present, False if it is OK to
|
||||
return None
|
||||
|
||||
Returns:
|
||||
Contents of the section (bytes)
|
||||
"""
|
||||
# self._BuildInput() either returns bytes or raises an exception.
|
||||
data = self._BuildInput(self._fdt)
|
||||
uniq = self.GetUniqueName()
|
||||
input_fname = tools.GetOutputFilename('%s.itb' % uniq)
|
||||
output_fname = tools.GetOutputFilename('%s.fit' % uniq)
|
||||
tools.WriteFile(input_fname, data)
|
||||
tools.WriteFile(output_fname, data)
|
||||
input_fname = tools.get_output_filename('%s.itb' % uniq)
|
||||
output_fname = tools.get_output_filename('%s.fit' % uniq)
|
||||
tools.write_file(input_fname, data)
|
||||
tools.write_file(output_fname, data)
|
||||
|
||||
args = {}
|
||||
ext_offset = self._fit_props.get('fit,external-offset')
|
||||
@ -259,14 +266,12 @@ class Entry_fit(Entry):
|
||||
'pad': fdt_util.fdt32_to_cpu(ext_offset.value)
|
||||
}
|
||||
if self.mkimage.run(reset_timestamp=True, output_fname=output_fname,
|
||||
**args) is not None:
|
||||
self.SetContents(tools.ReadFile(output_fname))
|
||||
else:
|
||||
**args) is None:
|
||||
# Bintool is missing; just use empty data as the output
|
||||
self.record_missing_bintool(self.mkimage)
|
||||
self.SetContents(tools.GetBytes(0, 1024))
|
||||
return tools.get_bytes(0, 1024)
|
||||
|
||||
return True
|
||||
return tools.read_file(output_fname)
|
||||
|
||||
def _BuildInput(self, fdt):
|
||||
"""Finish the FIT by adding the 'data' properties to it
|
||||
@ -277,12 +282,8 @@ class Entry_fit(Entry):
|
||||
Returns:
|
||||
New fdt contents (bytes)
|
||||
"""
|
||||
for path, section in self._fit_sections.items():
|
||||
for path, section in self._entries.items():
|
||||
node = fdt.GetNode(path)
|
||||
# Entry_section.ObtainContents() either returns True or
|
||||
# raises an exception.
|
||||
section.ObtainContents()
|
||||
section.Pack(0)
|
||||
data = section.GetData()
|
||||
node.AddData('data', data)
|
||||
|
||||
@ -290,20 +291,16 @@ class Entry_fit(Entry):
|
||||
data = fdt.GetContents()
|
||||
return data
|
||||
|
||||
def CheckMissing(self, missing_list):
|
||||
"""Check if any entries in this FIT have missing external blobs
|
||||
|
||||
If there are missing blobs, the entries are added to the list
|
||||
|
||||
Args:
|
||||
missing_list: List of Entry objects to be added to
|
||||
"""
|
||||
for path, section in self._fit_sections.items():
|
||||
section.CheckMissing(missing_list)
|
||||
|
||||
def SetAllowMissing(self, allow_missing):
|
||||
for section in self._fit_sections.values():
|
||||
section.SetAllowMissing(allow_missing)
|
||||
|
||||
def AddBintools(self, tools):
|
||||
super().AddBintools(tools)
|
||||
self.mkimage = self.AddBintool(tools, 'mkimage')
|
||||
|
||||
def AddMissingProperties(self, have_image_pos):
|
||||
# We don't want to interfere with any hash properties in the FIT, so
|
||||
# disable this for now.
|
||||
pass
|
||||
|
||||
def SetCalculatedProperties(self):
|
||||
# We don't want to interfere with any hash properties in the FIT, so
|
||||
# disable this for now.
|
||||
pass
|
||||
|
@ -8,7 +8,7 @@
|
||||
from binman.entry import Entry
|
||||
from binman import fmap_util
|
||||
from patman import tools
|
||||
from patman.tools import ToHexSize
|
||||
from patman.tools import to_hex_size
|
||||
from patman import tout
|
||||
|
||||
|
||||
@ -46,8 +46,8 @@ class Entry_fmap(Entry):
|
||||
"""
|
||||
def _AddEntries(areas, entry):
|
||||
entries = entry.GetEntries()
|
||||
tout.Debug("fmap: Add entry '%s' type '%s' (%s subentries)" %
|
||||
(entry.GetPath(), entry.etype, ToHexSize(entries)))
|
||||
tout.debug("fmap: Add entry '%s' type '%s' (%s subentries)" %
|
||||
(entry.GetPath(), entry.etype, to_hex_size(entries)))
|
||||
if entries and entry.etype != 'cbfs':
|
||||
# Create an area for the section, which encompasses all entries
|
||||
# within it
|
||||
|
@ -70,14 +70,14 @@ class Entry_gbb(Entry):
|
||||
|
||||
def ObtainContents(self):
|
||||
gbb = 'gbb.bin'
|
||||
fname = tools.GetOutputFilename(gbb)
|
||||
fname = tools.get_output_filename(gbb)
|
||||
if not self.size:
|
||||
self.Raise('GBB must have a fixed size')
|
||||
gbb_size = self.size
|
||||
bmpfv_size = gbb_size - 0x2180
|
||||
if bmpfv_size < 0:
|
||||
self.Raise('GBB is too small (minimum 0x2180 bytes)')
|
||||
keydir = tools.GetInputFilename(self.keydir)
|
||||
keydir = tools.get_input_filename(self.keydir)
|
||||
|
||||
stdout = self.futility.gbb_create(
|
||||
fname, [0x100, 0x1000, bmpfv_size, 0x1000])
|
||||
@ -88,14 +88,14 @@ class Entry_gbb(Entry):
|
||||
rootkey='%s/root_key.vbpubk' % keydir,
|
||||
recoverykey='%s/recovery_key.vbpubk' % keydir,
|
||||
flags=self.gbb_flags,
|
||||
bmpfv=tools.GetInputFilename(self.bmpblk))
|
||||
bmpfv=tools.get_input_filename(self.bmpblk))
|
||||
|
||||
if stdout is not None:
|
||||
self.SetContents(tools.ReadFile(fname))
|
||||
self.SetContents(tools.read_file(fname))
|
||||
else:
|
||||
# Bintool is missing; just use the required amount of zero data
|
||||
self.record_missing_bintool(self.futility)
|
||||
self.SetContents(tools.GetBytes(0, gbb_size))
|
||||
self.SetContents(tools.get_bytes(0, gbb_size))
|
||||
|
||||
return True
|
||||
|
||||
|
@ -58,11 +58,11 @@ class Entry_intel_ifwi(Entry_blob_ext):
|
||||
# Create the IFWI file if needed
|
||||
if self._convert_fit:
|
||||
inname = self._pathname
|
||||
outname = tools.GetOutputFilename('ifwi.bin')
|
||||
outname = tools.get_output_filename('ifwi.bin')
|
||||
if self.ifwitool.create_ifwi(inname, outname) is None:
|
||||
# Bintool is missing; just create a zeroed ifwi.bin
|
||||
self.record_missing_bintool(self.ifwitool)
|
||||
self.SetContents(tools.GetBytes(0, 1024))
|
||||
self.SetContents(tools.get_bytes(0, 1024))
|
||||
|
||||
self._filename = 'ifwi.bin'
|
||||
self._pathname = outname
|
||||
@ -74,15 +74,15 @@ class Entry_intel_ifwi(Entry_blob_ext):
|
||||
if self.ifwitool.delete_subpart(outname, 'OBBP') is None:
|
||||
# Bintool is missing; just use zero data
|
||||
self.record_missing_bintool(self.ifwitool)
|
||||
self.SetContents(tools.GetBytes(0, 1024))
|
||||
self.SetContents(tools.get_bytes(0, 1024))
|
||||
return True
|
||||
|
||||
for entry in self._ifwi_entries.values():
|
||||
# First get the input data and put it in a file
|
||||
data = entry.GetPaddedData()
|
||||
uniq = self.GetUniqueName()
|
||||
input_fname = tools.GetOutputFilename('input.%s' % uniq)
|
||||
tools.WriteFile(input_fname, data)
|
||||
input_fname = tools.get_output_filename('input.%s' % uniq)
|
||||
tools.write_file(input_fname, data)
|
||||
|
||||
# At this point we know that ifwitool is present, so we don't need
|
||||
# to check for None here
|
||||
@ -107,7 +107,7 @@ class Entry_intel_ifwi(Entry_blob_ext):
|
||||
After that we delete the OBBP sub-partition and add each of the files
|
||||
that we want in the IFWI file, one for each sub-entry of the IWFI node.
|
||||
"""
|
||||
self._pathname = tools.GetInputFilename(self._filename,
|
||||
self._pathname = tools.get_input_filename(self._filename,
|
||||
self.section.GetAllowMissing())
|
||||
# Allow the file to be missing
|
||||
if not self._pathname:
|
||||
|
@ -48,12 +48,12 @@ class Entry_mkimage(Entry):
|
||||
return False
|
||||
data += entry.GetData()
|
||||
uniq = self.GetUniqueName()
|
||||
input_fname = tools.GetOutputFilename('mkimage.%s' % uniq)
|
||||
tools.WriteFile(input_fname, data)
|
||||
output_fname = tools.GetOutputFilename('mkimage-out.%s' % uniq)
|
||||
input_fname = tools.get_output_filename('mkimage.%s' % uniq)
|
||||
tools.write_file(input_fname, data)
|
||||
output_fname = tools.get_output_filename('mkimage-out.%s' % uniq)
|
||||
if self.mkimage.run_cmd('-d', input_fname, *self._args,
|
||||
output_fname) is not None:
|
||||
self.SetContents(tools.ReadFile(output_fname))
|
||||
self.SetContents(tools.read_file(output_fname))
|
||||
else:
|
||||
# Bintool is missing; just use the input data as the output
|
||||
self.record_missing_bintool(self.mkimage)
|
||||
|
@ -19,7 +19,7 @@ from binman import state
|
||||
from dtoc import fdt_util
|
||||
from patman import tools
|
||||
from patman import tout
|
||||
from patman.tools import ToHexSize
|
||||
from patman.tools import to_hex_size
|
||||
|
||||
|
||||
class Entry_section(Entry):
|
||||
@ -269,19 +269,19 @@ class Entry_section(Entry):
|
||||
data = bytearray()
|
||||
# Handle padding before the entry
|
||||
if entry.pad_before:
|
||||
data += tools.GetBytes(self._pad_byte, entry.pad_before)
|
||||
data += tools.get_bytes(self._pad_byte, entry.pad_before)
|
||||
|
||||
# Add in the actual entry data
|
||||
data += entry_data
|
||||
|
||||
# Handle padding after the entry
|
||||
if entry.pad_after:
|
||||
data += tools.GetBytes(self._pad_byte, entry.pad_after)
|
||||
data += tools.get_bytes(self._pad_byte, entry.pad_after)
|
||||
|
||||
if entry.size:
|
||||
data += tools.GetBytes(pad_byte, entry.size - len(data))
|
||||
data += tools.get_bytes(pad_byte, entry.size - len(data))
|
||||
|
||||
self.Detail('GetPaddedDataForEntry: size %s' % ToHexSize(self.data))
|
||||
self.Detail('GetPaddedDataForEntry: size %s' % to_hex_size(self.data))
|
||||
|
||||
return data
|
||||
|
||||
@ -316,7 +316,7 @@ class Entry_section(Entry):
|
||||
# Handle empty space before the entry
|
||||
pad = (entry.offset or 0) - self._skip_at_start - len(section_data)
|
||||
if pad > 0:
|
||||
section_data += tools.GetBytes(self._pad_byte, pad)
|
||||
section_data += tools.get_bytes(self._pad_byte, pad)
|
||||
|
||||
# Add in the actual entry data
|
||||
section_data += data
|
||||
@ -709,14 +709,14 @@ class Entry_section(Entry):
|
||||
if not size:
|
||||
data = self.GetPaddedData(self.data)
|
||||
size = len(data)
|
||||
size = tools.Align(size, self.align_size)
|
||||
size = tools.align(size, self.align_size)
|
||||
|
||||
if self.size and contents_size > self.size:
|
||||
self._Raise("contents size %#x (%d) exceeds section size %#x (%d)" %
|
||||
(contents_size, contents_size, self.size, self.size))
|
||||
if not self.size:
|
||||
self.size = size
|
||||
if self.size != tools.Align(self.size, self.align_size):
|
||||
if self.size != tools.align(self.size, self.align_size):
|
||||
self._Raise("Size %#x (%d) does not match align-size %#x (%d)" %
|
||||
(self.size, self.size, self.align_size,
|
||||
self.align_size))
|
||||
@ -757,28 +757,28 @@ class Entry_section(Entry):
|
||||
return self._sort
|
||||
|
||||
def ReadData(self, decomp=True, alt_format=None):
|
||||
tout.Info("ReadData path='%s'" % self.GetPath())
|
||||
tout.info("ReadData path='%s'" % self.GetPath())
|
||||
parent_data = self.section.ReadData(True, alt_format)
|
||||
offset = self.offset - self.section._skip_at_start
|
||||
data = parent_data[offset:offset + self.size]
|
||||
tout.Info(
|
||||
tout.info(
|
||||
'%s: Reading data from offset %#x-%#x (real %#x), size %#x, got %#x' %
|
||||
(self.GetPath(), self.offset, self.offset + self.size, offset,
|
||||
self.size, len(data)))
|
||||
return data
|
||||
|
||||
def ReadChildData(self, child, decomp=True, alt_format=None):
|
||||
tout.Debug(f"ReadChildData for child '{child.GetPath()}'")
|
||||
tout.debug(f"ReadChildData for child '{child.GetPath()}'")
|
||||
parent_data = self.ReadData(True, alt_format)
|
||||
offset = child.offset - self._skip_at_start
|
||||
tout.Debug("Extract for child '%s': offset %#x, skip_at_start %#x, result %#x" %
|
||||
tout.debug("Extract for child '%s': offset %#x, skip_at_start %#x, result %#x" %
|
||||
(child.GetPath(), child.offset, self._skip_at_start, offset))
|
||||
data = parent_data[offset:offset + child.size]
|
||||
if decomp:
|
||||
indata = data
|
||||
data = comp_util.decompress(indata, child.compress)
|
||||
if child.uncomp_size:
|
||||
tout.Info("%s: Decompressing data size %#x with algo '%s' to data size %#x" %
|
||||
tout.info("%s: Decompressing data size %#x with algo '%s' to data size %#x" %
|
||||
(child.GetPath(), len(indata), child.compress,
|
||||
len(data)))
|
||||
if alt_format:
|
||||
@ -840,6 +840,7 @@ class Entry_section(Entry):
|
||||
Args:
|
||||
missing_list: List of Bintool objects to be added to
|
||||
"""
|
||||
super().check_missing_bintools(missing_list)
|
||||
for entry in self._entries.values():
|
||||
entry.check_missing_bintools(missing_list)
|
||||
|
||||
|
@ -60,14 +60,14 @@ class Entry_text(Entry):
|
||||
super().__init__(section, etype, node)
|
||||
value = fdt_util.GetString(self._node, 'text')
|
||||
if value:
|
||||
value = tools.ToBytes(value)
|
||||
value = tools.to_bytes(value)
|
||||
else:
|
||||
label, = self.GetEntryArgsOrProps([EntryArg('text-label', str)])
|
||||
self.text_label = label
|
||||
if self.text_label:
|
||||
value, = self.GetEntryArgsOrProps([EntryArg(self.text_label,
|
||||
str)])
|
||||
value = tools.ToBytes(value) if value is not None else value
|
||||
value = tools.to_bytes(value) if value is not None else value
|
||||
self.value = value
|
||||
|
||||
def ObtainContents(self):
|
||||
|
@ -27,9 +27,9 @@ class Entry_u_boot_elf(Entry_blob):
|
||||
def ReadBlobContents(self):
|
||||
if self._strip:
|
||||
uniq = self.GetUniqueName()
|
||||
out_fname = tools.GetOutputFilename('%s.stripped' % uniq)
|
||||
tools.WriteFile(out_fname, tools.ReadFile(self._pathname))
|
||||
tools.Run('strip', out_fname)
|
||||
out_fname = tools.get_output_filename('%s.stripped' % uniq)
|
||||
tools.write_file(out_fname, tools.read_file(self._pathname))
|
||||
tools.run('strip', out_fname)
|
||||
self._pathname = out_fname
|
||||
super().ReadBlobContents()
|
||||
return True
|
||||
|
@ -27,7 +27,7 @@ class Entry_u_boot_env(Entry_blob):
|
||||
self.fill_value = fdt_util.GetByte(self._node, 'fill-byte', 0)
|
||||
|
||||
def ReadBlobContents(self):
|
||||
indata = tools.ReadFile(self._pathname)
|
||||
indata = tools.read_file(self._pathname)
|
||||
data = b''
|
||||
for line in indata.splitlines():
|
||||
data += line + b'\0'
|
||||
@ -35,7 +35,7 @@ class Entry_u_boot_env(Entry_blob):
|
||||
pad = self.size - len(data) - 5
|
||||
if pad < 0:
|
||||
self.Raise("'u-boot-env' entry too small to hold data (need %#x more bytes)" % -pad)
|
||||
data += tools.GetBytes(self.fill_value, pad)
|
||||
data += tools.get_bytes(self.fill_value, pad)
|
||||
crc = zlib.crc32(data)
|
||||
buf = struct.pack('<I', crc) + b'\x01' + data
|
||||
self.SetContents(buf)
|
||||
|
@ -36,9 +36,9 @@ class Entry_u_boot_spl_bss_pad(Entry_blob):
|
||||
super().__init__(section, etype, node)
|
||||
|
||||
def ObtainContents(self):
|
||||
fname = tools.GetInputFilename('spl/u-boot-spl')
|
||||
fname = tools.get_input_filename('spl/u-boot-spl')
|
||||
bss_size = elf.GetSymbolAddress(fname, '__bss_size')
|
||||
if not bss_size:
|
||||
self.Raise('Expected __bss_size symbol in spl/u-boot-spl')
|
||||
self.SetContents(tools.GetBytes(0, bss_size))
|
||||
self.SetContents(tools.get_bytes(0, bss_size))
|
||||
return True
|
||||
|
@ -39,7 +39,7 @@ class Entry_u_boot_spl_expanded(Entry_blob_phase):
|
||||
@classmethod
|
||||
def UseExpanded(cls, node, etype, new_etype):
|
||||
val = state.GetEntryArgBool('spl-dtb')
|
||||
tout.DoOutput(tout.INFO if val else tout.DETAIL,
|
||||
tout.do_output(tout.INFO if val else tout.DETAIL,
|
||||
"Node '%s': etype '%s': %s %sselected" %
|
||||
(node.path, etype, new_etype, '' if val else 'not '))
|
||||
return val
|
||||
|
@ -36,9 +36,9 @@ class Entry_u_boot_tpl_bss_pad(Entry_blob):
|
||||
super().__init__(section, etype, node)
|
||||
|
||||
def ObtainContents(self):
|
||||
fname = tools.GetInputFilename('tpl/u-boot-tpl')
|
||||
fname = tools.get_input_filename('tpl/u-boot-tpl')
|
||||
bss_size = elf.GetSymbolAddress(fname, '__bss_size')
|
||||
if not bss_size:
|
||||
self.Raise('Expected __bss_size symbol in tpl/u-boot-tpl')
|
||||
self.SetContents(tools.GetBytes(0, bss_size))
|
||||
self.SetContents(tools.get_bytes(0, bss_size))
|
||||
return True
|
||||
|
@ -39,7 +39,7 @@ class Entry_u_boot_tpl_expanded(Entry_blob_phase):
|
||||
@classmethod
|
||||
def UseExpanded(cls, node, etype, new_etype):
|
||||
val = state.GetEntryArgBool('tpl-dtb')
|
||||
tout.DoOutput(tout.INFO if val else tout.DETAIL,
|
||||
tout.do_output(tout.INFO if val else tout.DETAIL,
|
||||
"Node '%s': etype '%s': %s %sselected" %
|
||||
(node.path, etype, new_etype, '' if val else 'not '))
|
||||
return val
|
||||
|
@ -92,8 +92,8 @@ class Entry_u_boot_ucode(Entry_blob):
|
||||
return True
|
||||
|
||||
# Write it out to a file
|
||||
self._pathname = tools.GetOutputFilename('u-boot-ucode.bin')
|
||||
tools.WriteFile(self._pathname, fdt_entry.ucode_data)
|
||||
self._pathname = tools.get_output_filename('u-boot-ucode.bin')
|
||||
tools.write_file(self._pathname, fdt_entry.ucode_data)
|
||||
|
||||
self.ReadBlobContents()
|
||||
|
||||
|
@ -38,7 +38,7 @@ class Entry_u_boot_with_ucode_ptr(Entry_blob):
|
||||
|
||||
def ProcessFdt(self, fdt):
|
||||
# Figure out where to put the microcode pointer
|
||||
fname = tools.GetInputFilename(self.elf_fname)
|
||||
fname = tools.get_input_filename(self.elf_fname)
|
||||
sym = elf.GetSymbolAddress(fname, '_dt_ucode_base_size')
|
||||
if sym:
|
||||
self.target_offset = sym
|
||||
|
@ -65,9 +65,9 @@ class Entry_vblock(Entry_collection):
|
||||
return None
|
||||
|
||||
uniq = self.GetUniqueName()
|
||||
output_fname = tools.GetOutputFilename('vblock.%s' % uniq)
|
||||
input_fname = tools.GetOutputFilename('input.%s' % uniq)
|
||||
tools.WriteFile(input_fname, input_data)
|
||||
output_fname = tools.get_output_filename('vblock.%s' % uniq)
|
||||
input_fname = tools.get_output_filename('input.%s' % uniq)
|
||||
tools.write_file(input_fname, input_data)
|
||||
prefix = self.keydir + '/'
|
||||
stdout = self.futility.sign_firmware(
|
||||
vblock=output_fname,
|
||||
@ -78,11 +78,11 @@ class Entry_vblock(Entry_collection):
|
||||
kernelkey=prefix + self.kernelkey,
|
||||
flags=f'{self.preamble_flags}')
|
||||
if stdout is not None:
|
||||
data = tools.ReadFile(output_fname)
|
||||
data = tools.read_file(output_fname)
|
||||
else:
|
||||
# Bintool is missing; just use 4KB of zero data
|
||||
self.record_missing_bintool(self.futility)
|
||||
data = tools.GetBytes(0, 4096)
|
||||
data = tools.get_bytes(0, 4096)
|
||||
return data
|
||||
|
||||
def ObtainContents(self):
|
||||
|
@ -19,11 +19,11 @@ class TestFdt(unittest.TestCase):
|
||||
def setUpClass(self):
|
||||
self._binman_dir = os.path.dirname(os.path.realpath(sys.argv[0]))
|
||||
self._indir = tempfile.mkdtemp(prefix='binmant.')
|
||||
tools.PrepareOutputDir(self._indir, True)
|
||||
tools.prepare_output_dir(self._indir, True)
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(self):
|
||||
tools._FinaliseForTest()
|
||||
tools._finalise_for_test()
|
||||
|
||||
def TestFile(self, fname):
|
||||
return os.path.join(self._binman_dir, 'test', fname)
|
||||
|
@ -248,7 +248,7 @@ class FipEntry:
|
||||
self.flags = flags
|
||||
self.fip_type = None
|
||||
self.data = None
|
||||
self.valid = uuid != tools.GetBytes(0, UUID_LEN)
|
||||
self.valid = uuid != tools.get_bytes(0, UUID_LEN)
|
||||
if self.valid:
|
||||
# Look up the friendly name
|
||||
matches = {val for (key, val) in FIP_TYPES.items()
|
||||
@ -309,7 +309,7 @@ class FipWriter:
|
||||
Usage is something like:
|
||||
|
||||
fip = FipWriter(size)
|
||||
fip.add_entry('scp-fwu-cfg', tools.ReadFile('something.bin'))
|
||||
fip.add_entry('scp-fwu-cfg', tools.read_file('something.bin'))
|
||||
...
|
||||
data = cbw.get_data()
|
||||
|
||||
@ -354,7 +354,7 @@ class FipWriter:
|
||||
offset += ENTRY_SIZE # terminating entry
|
||||
|
||||
for fent in self._fip_entries:
|
||||
offset = tools.Align(offset, self._align)
|
||||
offset = tools.align(offset, self._align)
|
||||
fent.offset = offset
|
||||
offset += fent.size
|
||||
|
||||
@ -443,7 +443,7 @@ def parse_macros(srcdir):
|
||||
re_uuid = re.compile('0x[0-9a-fA-F]{2}')
|
||||
re_comment = re.compile(r'^/\* (.*) \*/$')
|
||||
fname = os.path.join(srcdir, 'include/tools_share/firmware_image_package.h')
|
||||
data = tools.ReadFile(fname, binary=False)
|
||||
data = tools.read_file(fname, binary=False)
|
||||
macros = collections.OrderedDict()
|
||||
comment = None
|
||||
for linenum, line in enumerate(data.splitlines()):
|
||||
@ -489,7 +489,7 @@ def parse_names(srcdir):
|
||||
re_data = re.compile(r'\.name = "([^"]*)",\s*\.uuid = (UUID_\w*),\s*\.cmdline_name = "([^"]+)"',
|
||||
re.S)
|
||||
fname = os.path.join(srcdir, 'tools/fiptool/tbbr_config.c')
|
||||
data = tools.ReadFile(fname, binary=False)
|
||||
data = tools.read_file(fname, binary=False)
|
||||
|
||||
# Example entry:
|
||||
# {
|
||||
@ -574,21 +574,21 @@ def parse_atf_source(srcdir, dstfile, oldfile):
|
||||
raise ValueError(
|
||||
f"Expected file '{readme_fname}' - try using -s to specify the "
|
||||
'arm-trusted-firmware directory')
|
||||
readme = tools.ReadFile(readme_fname, binary=False)
|
||||
readme = tools.read_file(readme_fname, binary=False)
|
||||
first_line = 'Trusted Firmware-A'
|
||||
if readme.splitlines()[0] != first_line:
|
||||
raise ValueError(f"'{readme_fname}' does not start with '{first_line}'")
|
||||
macros = parse_macros(srcdir)
|
||||
names = parse_names(srcdir)
|
||||
output = create_code_output(macros, names)
|
||||
orig = tools.ReadFile(oldfile, binary=False)
|
||||
orig = tools.read_file(oldfile, binary=False)
|
||||
re_fip_list = re.compile(r'(.*FIP_TYPE_LIST = \[).*?( ] # end.*)', re.S)
|
||||
mat = re_fip_list.match(orig)
|
||||
new_code = mat.group(1) + '\n' + output + mat.group(2) if mat else output
|
||||
if new_code == orig:
|
||||
print(f"Existing code in '{oldfile}' is up-to-date")
|
||||
else:
|
||||
tools.WriteFile(dstfile, new_code, binary=False)
|
||||
tools.write_file(dstfile, new_code, binary=False)
|
||||
print(f'Needs update, try:\n\tmeld {dstfile} {oldfile}')
|
||||
|
||||
|
||||
|
@ -35,14 +35,14 @@ class TestFip(unittest.TestCase):
|
||||
def setUp(self):
|
||||
# Create a temporary directory for test files
|
||||
self._indir = tempfile.mkdtemp(prefix='fip_util.')
|
||||
tools.SetInputDirs([self._indir])
|
||||
tools.set_input_dirs([self._indir])
|
||||
|
||||
# Set up a temporary output directory, used by the tools library when
|
||||
# compressing files
|
||||
tools.PrepareOutputDir(None)
|
||||
tools.prepare_output_dir(None)
|
||||
|
||||
self.src_file = os.path.join(self._indir, 'orig.py')
|
||||
self.outname = tools.GetOutputFilename('out.py')
|
||||
self.outname = tools.get_output_filename('out.py')
|
||||
self.args = ['-D', '-s', self._indir, '-o', self.outname]
|
||||
self.readme = os.path.join(self._indir, 'readme.rst')
|
||||
self.macro_dir = os.path.join(self._indir, 'include/tools_share')
|
||||
@ -78,25 +78,25 @@ toc_entry_t toc_entries[] = {
|
||||
|
||||
def setup_readme(self):
|
||||
"""Set up the readme.txt file"""
|
||||
tools.WriteFile(self.readme, 'Trusted Firmware-A\n==================',
|
||||
tools.write_file(self.readme, 'Trusted Firmware-A\n==================',
|
||||
binary=False)
|
||||
|
||||
def setup_macro(self, data=macro_contents):
|
||||
"""Set up the tbbr_config.c file"""
|
||||
os.makedirs(self.macro_dir)
|
||||
tools.WriteFile(self.macro_fname, data, binary=False)
|
||||
tools.write_file(self.macro_fname, data, binary=False)
|
||||
|
||||
def setup_name(self, data=name_contents):
|
||||
"""Set up the firmware_image_package.h file"""
|
||||
os.makedirs(self.name_dir)
|
||||
tools.WriteFile(self.name_fname, data, binary=False)
|
||||
tools.write_file(self.name_fname, data, binary=False)
|
||||
|
||||
def tearDown(self):
|
||||
"""Remove the temporary input directory and its contents"""
|
||||
if self._indir:
|
||||
shutil.rmtree(self._indir)
|
||||
self._indir = None
|
||||
tools.FinaliseOutputDir()
|
||||
tools.finalise_output_dir()
|
||||
|
||||
def test_no_readme(self):
|
||||
"""Test handling of a missing readme.rst"""
|
||||
@ -106,7 +106,7 @@ toc_entry_t toc_entries[] = {
|
||||
|
||||
def test_invalid_readme(self):
|
||||
"""Test that an invalid readme.rst is detected"""
|
||||
tools.WriteFile(self.readme, 'blah', binary=False)
|
||||
tools.write_file(self.readme, 'blah', binary=False)
|
||||
with self.assertRaises(Exception) as err:
|
||||
fip_util.main(self.args, self.src_file)
|
||||
self.assertIn('does not start with', str(err.exception))
|
||||
@ -228,7 +228,7 @@ toc_entry_t toc_entries[] = {
|
||||
self.setup_name()
|
||||
|
||||
# Check generating the file when changes are needed
|
||||
tools.WriteFile(self.src_file, '''
|
||||
tools.write_file(self.src_file, '''
|
||||
|
||||
# This is taken from tbbr_config.c in ARM Trusted Firmware
|
||||
FIP_TYPE_LIST = [
|
||||
@ -244,7 +244,7 @@ blah de blah
|
||||
self.assertIn('Needs update', stdout.getvalue())
|
||||
|
||||
# Check generating the file when no changes are needed
|
||||
tools.WriteFile(self.src_file, '''
|
||||
tools.write_file(self.src_file, '''
|
||||
# This is taken from tbbr_config.c in ARM Trusted Firmware
|
||||
FIP_TYPE_LIST = [
|
||||
# ToC Entry UUIDs
|
||||
@ -268,7 +268,7 @@ blah blah''', binary=False)
|
||||
|
||||
args = self.args.copy()
|
||||
args.remove('-D')
|
||||
tools.WriteFile(self.src_file, '', binary=False)
|
||||
tools.write_file(self.src_file, '', binary=False)
|
||||
with test_util.capture_sys_output():
|
||||
fip_util.main(args, self.src_file)
|
||||
|
||||
@ -282,8 +282,8 @@ blah blah''', binary=False)
|
||||
fip.add_entry('tb-fw', tb_fw, 0)
|
||||
fip.add_entry(bytes(range(16)), tb_fw, 0)
|
||||
data = fip.get_data()
|
||||
fname = tools.GetOutputFilename('data.fip')
|
||||
tools.WriteFile(fname, data)
|
||||
fname = tools.get_output_filename('data.fip')
|
||||
tools.write_file(fname, data)
|
||||
result = FIPTOOL.info(fname)
|
||||
self.assertEqual(
|
||||
'''Firmware Updater NS_BL2U: offset=0xB0, size=0x7, cmdline="--fwu"
|
||||
@ -303,19 +303,19 @@ Trusted Boot Firmware BL2: offset=0xC0, size=0xE, cmdline="--tb-fw"
|
||||
FipReader: reader for the image
|
||||
"""
|
||||
fwu = os.path.join(self._indir, 'fwu')
|
||||
tools.WriteFile(fwu, self.fwu_data)
|
||||
tools.write_file(fwu, self.fwu_data)
|
||||
|
||||
tb_fw = os.path.join(self._indir, 'tb_fw')
|
||||
tools.WriteFile(tb_fw, self.tb_fw_data)
|
||||
tools.write_file(tb_fw, self.tb_fw_data)
|
||||
|
||||
other_fw = os.path.join(self._indir, 'other_fw')
|
||||
tools.WriteFile(other_fw, self.other_fw_data)
|
||||
tools.write_file(other_fw, self.other_fw_data)
|
||||
|
||||
fname = tools.GetOutputFilename('data.fip')
|
||||
fname = tools.get_output_filename('data.fip')
|
||||
uuid = 'e3b78d9e-4a64-11ec-b45c-fba2b9b49788'
|
||||
FIPTOOL.create_new(fname, 8, 0x123, fwu, tb_fw, uuid, other_fw)
|
||||
|
||||
return fip_util.FipReader(tools.ReadFile(fname))
|
||||
return fip_util.FipReader(tools.read_file(fname))
|
||||
|
||||
@unittest.skipIf(not HAVE_FIPTOOL, 'No fiptool available')
|
||||
def test_fiptool_create(self):
|
||||
|
@ -70,7 +70,7 @@ def ConvertName(field_names, fields):
|
||||
value: value of that field (string for the ones we support)
|
||||
"""
|
||||
name_index = field_names.index('name')
|
||||
fields[name_index] = tools.ToBytes(NameToFmap(fields[name_index]))
|
||||
fields[name_index] = tools.to_bytes(NameToFmap(fields[name_index]))
|
||||
|
||||
def DecodeFmap(data):
|
||||
"""Decode a flashmap into a header and list of areas
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -111,7 +111,7 @@ class Image(section.Entry_section):
|
||||
Raises:
|
||||
ValueError if something goes wrong
|
||||
"""
|
||||
data = tools.ReadFile(fname)
|
||||
data = tools.read_file(fname)
|
||||
size = len(data)
|
||||
|
||||
# First look for an image header
|
||||
@ -128,8 +128,8 @@ class Image(section.Entry_section):
|
||||
dtb_size = probe_dtb.GetFdtObj().totalsize()
|
||||
fdtmap_data = data[pos:pos + dtb_size + fdtmap.FDTMAP_HDR_LEN]
|
||||
fdt_data = fdtmap_data[fdtmap.FDTMAP_HDR_LEN:]
|
||||
out_fname = tools.GetOutputFilename('fdtmap.in.dtb')
|
||||
tools.WriteFile(out_fname, fdt_data)
|
||||
out_fname = tools.get_output_filename('fdtmap.in.dtb')
|
||||
tools.write_file(out_fname, fdt_data)
|
||||
dtb = fdt.Fdt(out_fname)
|
||||
dtb.Scan()
|
||||
|
||||
@ -174,12 +174,12 @@ class Image(section.Entry_section):
|
||||
|
||||
def BuildImage(self):
|
||||
"""Write the image to a file"""
|
||||
fname = tools.GetOutputFilename(self._filename)
|
||||
tout.Info("Writing image to '%s'" % fname)
|
||||
fname = tools.get_output_filename(self._filename)
|
||||
tout.info("Writing image to '%s'" % fname)
|
||||
with open(fname, 'wb') as fd:
|
||||
data = self.GetPaddedData()
|
||||
fd.write(data)
|
||||
tout.Info("Wrote %#x bytes" % len(data))
|
||||
tout.info("Wrote %#x bytes" % len(data))
|
||||
|
||||
def WriteMap(self):
|
||||
"""Write a map of the image to a .map file
|
||||
@ -188,7 +188,7 @@ class Image(section.Entry_section):
|
||||
Filename of map file written
|
||||
"""
|
||||
filename = '%s.map' % self.image_name
|
||||
fname = tools.GetOutputFilename(filename)
|
||||
fname = tools.get_output_filename(filename)
|
||||
with open(fname, 'w') as fd:
|
||||
print('%8s %8s %8s %s' % ('ImagePos', 'Offset', 'Size', 'Name'),
|
||||
file=fd)
|
||||
@ -230,7 +230,7 @@ class Image(section.Entry_section):
|
||||
return entry
|
||||
|
||||
def ReadData(self, decomp=True, alt_format=None):
|
||||
tout.Debug("Image '%s' ReadData(), size=%#x" %
|
||||
tout.debug("Image '%s' ReadData(), size=%#x" %
|
||||
(self.GetPath(), len(self._data)))
|
||||
return self._data
|
||||
|
||||
|
@ -84,14 +84,14 @@ def RunTests(debug, verbosity, processes, test_preserve_dirs, args, toolpath):
|
||||
|
||||
# Run the entry tests first ,since these need to be the first to import the
|
||||
# 'entry' module.
|
||||
test_util.RunTestSuites(
|
||||
test_util.run_test_suites(
|
||||
result, debug, verbosity, test_preserve_dirs, processes, test_name,
|
||||
toolpath,
|
||||
[bintool_test.TestBintool, entry_test.TestEntry, ftest.TestFunctional,
|
||||
fdt_test.TestFdt, elf_test.TestElf, image_test.TestImage,
|
||||
cbfs_util_test.TestCbfs, fip_util_test.TestFip])
|
||||
|
||||
return test_util.ReportResult('binman', test_name, result)
|
||||
return test_util.report_result('binman', test_name, result)
|
||||
|
||||
def RunTestCoverage(toolpath):
|
||||
"""Run the tests and check that we get 100% coverage"""
|
||||
@ -102,7 +102,7 @@ def RunTestCoverage(toolpath):
|
||||
if toolpath:
|
||||
for path in toolpath:
|
||||
extra_args += ' --toolpath %s' % path
|
||||
test_util.RunTestCoverage('tools/binman/binman', None,
|
||||
test_util.run_test_coverage('tools/binman/binman', None,
|
||||
['*test*', '*main.py', 'tools/patman/*', 'tools/dtoc/*'],
|
||||
args.build_dir, all_set, extra_args or None)
|
||||
|
||||
|
@ -138,8 +138,8 @@ def GetFdtContents(etype='u-boot-dtb'):
|
||||
data = GetFdtForEtype(etype).GetContents()
|
||||
else:
|
||||
fname = output_fdt_info[etype][1]
|
||||
pathname = tools.GetInputFilename(fname)
|
||||
data = tools.ReadFile(pathname)
|
||||
pathname = tools.get_input_filename(fname)
|
||||
data = tools.read_file(pathname)
|
||||
return pathname, data
|
||||
|
||||
def UpdateFdtContents(etype, data):
|
||||
@ -154,7 +154,7 @@ def UpdateFdtContents(etype, data):
|
||||
"""
|
||||
dtb, fname = output_fdt_info[etype]
|
||||
dtb_fname = dtb.GetFilename()
|
||||
tools.WriteFile(dtb_fname, data)
|
||||
tools.write_file(dtb_fname, data)
|
||||
dtb = fdt.FdtScan(dtb_fname)
|
||||
output_fdt_info[etype] = [dtb, fname]
|
||||
|
||||
@ -170,16 +170,16 @@ def SetEntryArgs(args):
|
||||
global entry_args
|
||||
|
||||
entry_args = {}
|
||||
tout.Debug('Processing entry args:')
|
||||
tout.debug('Processing entry args:')
|
||||
if args:
|
||||
for arg in args:
|
||||
m = re.match('([^=]*)=(.*)', arg)
|
||||
if not m:
|
||||
raise ValueError("Invalid entry arguemnt '%s'" % arg)
|
||||
name, value = m.groups()
|
||||
tout.Debug(' %20s = %s' % (name, value))
|
||||
tout.debug(' %20s = %s' % (name, value))
|
||||
entry_args[name] = value
|
||||
tout.Debug('Processing entry args done')
|
||||
tout.debug('Processing entry args done')
|
||||
|
||||
def GetEntryArg(name):
|
||||
"""Get the value of an entry argument
|
||||
@ -235,12 +235,12 @@ def Prepare(images, dtb):
|
||||
else:
|
||||
fdt_set = {}
|
||||
for etype, fname in DTB_TYPE_FNAME.items():
|
||||
infile = tools.GetInputFilename(fname, allow_missing=True)
|
||||
infile = tools.get_input_filename(fname, allow_missing=True)
|
||||
if infile and os.path.exists(infile):
|
||||
fname_dtb = fdt_util.EnsureCompiled(infile)
|
||||
out_fname = tools.GetOutputFilename('%s.out' %
|
||||
out_fname = tools.get_output_filename('%s.out' %
|
||||
os.path.split(fname)[1])
|
||||
tools.WriteFile(out_fname, tools.ReadFile(fname_dtb))
|
||||
tools.write_file(out_fname, tools.read_file(fname_dtb))
|
||||
other_dtb = fdt.FdtScan(out_fname)
|
||||
output_fdt_info[etype] = [other_dtb, out_fname]
|
||||
|
||||
@ -263,21 +263,21 @@ def PrepareFromLoadedData(image):
|
||||
"""
|
||||
global output_fdt_info, main_dtb, fdt_path_prefix
|
||||
|
||||
tout.Info('Preparing device trees')
|
||||
tout.info('Preparing device trees')
|
||||
output_fdt_info.clear()
|
||||
fdt_path_prefix = ''
|
||||
output_fdt_info['fdtmap'] = [image.fdtmap_dtb, 'u-boot.dtb']
|
||||
main_dtb = None
|
||||
tout.Info(" Found device tree type 'fdtmap' '%s'" % image.fdtmap_dtb.name)
|
||||
tout.info(" Found device tree type 'fdtmap' '%s'" % image.fdtmap_dtb.name)
|
||||
for etype, value in image.GetFdts().items():
|
||||
entry, fname = value
|
||||
out_fname = tools.GetOutputFilename('%s.dtb' % entry.etype)
|
||||
tout.Info(" Found device tree type '%s' at '%s' path '%s'" %
|
||||
out_fname = tools.get_output_filename('%s.dtb' % entry.etype)
|
||||
tout.info(" Found device tree type '%s' at '%s' path '%s'" %
|
||||
(etype, out_fname, entry.GetPath()))
|
||||
entry._filename = entry.GetDefaultFilename()
|
||||
data = entry.ReadData()
|
||||
|
||||
tools.WriteFile(out_fname, data)
|
||||
tools.write_file(out_fname, data)
|
||||
dtb = fdt.Fdt(out_fname)
|
||||
dtb.Scan()
|
||||
image_node = dtb.GetNode('/binman')
|
||||
@ -285,7 +285,7 @@ def PrepareFromLoadedData(image):
|
||||
image_node = dtb.GetNode('/binman/%s' % image.image_node)
|
||||
fdt_path_prefix = image_node.path
|
||||
output_fdt_info[etype] = [dtb, None]
|
||||
tout.Info(" FDT path prefix '%s'" % fdt_path_prefix)
|
||||
tout.info(" FDT path prefix '%s'" % fdt_path_prefix)
|
||||
|
||||
|
||||
def GetAllFdts():
|
||||
@ -384,7 +384,7 @@ def SetInt(node, prop, value, for_repack=False):
|
||||
for_repack: True is this property is only needed for repacking
|
||||
"""
|
||||
for n in GetUpdateNodes(node, for_repack):
|
||||
tout.Detail("File %s: Update node '%s' prop '%s' to %#x" %
|
||||
tout.detail("File %s: Update node '%s' prop '%s' to %#x" %
|
||||
(n.GetFdt().name, n.path, prop, value))
|
||||
n.SetInt(prop, value)
|
||||
|
||||
@ -529,7 +529,7 @@ def GetVersion(path=OUR_PATH):
|
||||
"""
|
||||
version_fname = os.path.join(path, 'version')
|
||||
if os.path.exists(version_fname):
|
||||
version = tools.ReadFile(version_fname, binary=False)
|
||||
version = tools.read_file(version_fname, binary=False)
|
||||
else:
|
||||
version = '(unreleased)'
|
||||
return version
|
||||
|
39
tools/binman/test/220_fit_subentry_bintool.dts
Normal file
39
tools/binman/test/220_fit_subentry_bintool.dts
Normal file
@ -0,0 +1,39 @@
|
||||
// SPDX-License-Identifier: GPL-2.0+
|
||||
|
||||
/dts-v1/;
|
||||
|
||||
/ {
|
||||
#address-cells = <1>;
|
||||
#size-cells = <1>;
|
||||
|
||||
binman {
|
||||
fit {
|
||||
description = "test-desc";
|
||||
#address-cells = <1>;
|
||||
|
||||
images {
|
||||
test {
|
||||
description = "Something using a bintool";
|
||||
type = "kernel";
|
||||
arch = "arm";
|
||||
os = "linux";
|
||||
compression = "gzip";
|
||||
load = <00000000>;
|
||||
entry = <00000000>;
|
||||
|
||||
gbb {
|
||||
size = <0x2180>;
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
configurations {
|
||||
default = "conf-1";
|
||||
conf-1 {
|
||||
description = "Boot bintool output";
|
||||
kernel = "kernel";
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
@ -22,7 +22,7 @@ from buildman import toolchain
|
||||
from patman import command
|
||||
from patman import gitutil
|
||||
from patman import terminal
|
||||
from patman.terminal import Print
|
||||
from patman.terminal import tprint
|
||||
|
||||
# This indicates an new int or hex Kconfig property with no default
|
||||
# It hangs the build since the 'conf' tool cannot proceed without valid input.
|
||||
@ -442,7 +442,7 @@ class Builder:
|
||||
"""
|
||||
self.commit = commit
|
||||
if checkout and self.checkout:
|
||||
gitutil.Checkout(commit.hash)
|
||||
gitutil.checkout(commit.hash)
|
||||
|
||||
def Make(self, commit, brd, stage, cwd, *args, **kwargs):
|
||||
"""Run make
|
||||
@ -453,7 +453,7 @@ class Builder:
|
||||
stage: Stage that we are at (mrproper, config, build)
|
||||
cwd: Directory where make should be run
|
||||
args: Arguments to pass to make
|
||||
kwargs: Arguments to pass to command.RunPipe()
|
||||
kwargs: Arguments to pass to command.run_pipe()
|
||||
"""
|
||||
|
||||
def check_output(stream, data):
|
||||
@ -476,7 +476,7 @@ class Builder:
|
||||
self._restarting_config = False
|
||||
self._terminated = False
|
||||
cmd = [self.gnu_make] + list(args)
|
||||
result = command.RunPipe([cmd], capture=True, capture_stderr=True,
|
||||
result = command.run_pipe([cmd], capture=True, capture_stderr=True,
|
||||
cwd=cwd, raise_on_error=False, infile='/dev/null',
|
||||
output_func=check_output, **kwargs)
|
||||
|
||||
@ -508,7 +508,7 @@ class Builder:
|
||||
if result.already_done:
|
||||
self.already_done += 1
|
||||
if self._verbose:
|
||||
terminal.PrintClear()
|
||||
terminal.print_clear()
|
||||
boards_selected = {target : result.brd}
|
||||
self.ResetResultSummary(boards_selected)
|
||||
self.ProduceResultSummary(result.commit_upto, self.commits,
|
||||
@ -518,14 +518,14 @@ class Builder:
|
||||
|
||||
# Display separate counts for ok, warned and fail
|
||||
ok = self.upto - self.warned - self.fail
|
||||
line = '\r' + self.col.Color(self.col.GREEN, '%5d' % ok)
|
||||
line += self.col.Color(self.col.YELLOW, '%5d' % self.warned)
|
||||
line += self.col.Color(self.col.RED, '%5d' % self.fail)
|
||||
line = '\r' + self.col.build(self.col.GREEN, '%5d' % ok)
|
||||
line += self.col.build(self.col.YELLOW, '%5d' % self.warned)
|
||||
line += self.col.build(self.col.RED, '%5d' % self.fail)
|
||||
|
||||
line += ' /%-5d ' % self.count
|
||||
remaining = self.count - self.upto
|
||||
if remaining:
|
||||
line += self.col.Color(self.col.MAGENTA, ' -%-5d ' % remaining)
|
||||
line += self.col.build(self.col.MAGENTA, ' -%-5d ' % remaining)
|
||||
else:
|
||||
line += ' ' * 8
|
||||
|
||||
@ -535,8 +535,8 @@ class Builder:
|
||||
line += '%s : ' % self._complete_delay
|
||||
|
||||
line += target
|
||||
terminal.PrintClear()
|
||||
Print(line, newline=False, limit_to_line=True)
|
||||
terminal.print_clear()
|
||||
tprint(line, newline=False, limit_to_line=True)
|
||||
|
||||
def _GetOutputDir(self, commit_upto):
|
||||
"""Get the name of the output directory for a commit number
|
||||
@ -666,7 +666,7 @@ class Builder:
|
||||
if line.strip():
|
||||
size, type, name = line[:-1].split()
|
||||
except:
|
||||
Print("Invalid line in file '%s': '%s'" % (fname, line[:-1]))
|
||||
tprint("Invalid line in file '%s': '%s'" % (fname, line[:-1]))
|
||||
continue
|
||||
if type in 'tTdDbB':
|
||||
# function names begin with '.' on 64-bit powerpc
|
||||
@ -933,9 +933,9 @@ class Builder:
|
||||
arch = board_dict[target].arch
|
||||
else:
|
||||
arch = 'unknown'
|
||||
str = self.col.Color(color, ' ' + target)
|
||||
str = self.col.build(color, ' ' + target)
|
||||
if not arch in done_arch:
|
||||
str = ' %s %s' % (self.col.Color(color, char), str)
|
||||
str = ' %s %s' % (self.col.build(color, char), str)
|
||||
done_arch[arch] = True
|
||||
if not arch in arch_list:
|
||||
arch_list[arch] = str
|
||||
@ -947,7 +947,7 @@ class Builder:
|
||||
color = self.col.RED if num > 0 else self.col.GREEN
|
||||
if num == 0:
|
||||
return '0'
|
||||
return self.col.Color(color, str(num))
|
||||
return self.col.build(color, str(num))
|
||||
|
||||
def ResetResultSummary(self, board_selected):
|
||||
"""Reset the results summary ready for use.
|
||||
@ -1009,16 +1009,16 @@ class Builder:
|
||||
return
|
||||
args = [self.ColourNum(x) for x in args]
|
||||
indent = ' ' * 15
|
||||
Print('%s%s: add: %s/%s, grow: %s/%s bytes: %s/%s (%s)' %
|
||||
tuple([indent, self.col.Color(self.col.YELLOW, fname)] + args))
|
||||
Print('%s %-38s %7s %7s %+7s' % (indent, 'function', 'old', 'new',
|
||||
tprint('%s%s: add: %s/%s, grow: %s/%s bytes: %s/%s (%s)' %
|
||||
tuple([indent, self.col.build(self.col.YELLOW, fname)] + args))
|
||||
tprint('%s %-38s %7s %7s %+7s' % (indent, 'function', 'old', 'new',
|
||||
'delta'))
|
||||
for diff, name in delta:
|
||||
if diff:
|
||||
color = self.col.RED if diff > 0 else self.col.GREEN
|
||||
msg = '%s %-38s %7s %7s %+7d' % (indent, name,
|
||||
old.get(name, '-'), new.get(name,'-'), diff)
|
||||
Print(msg, colour=color)
|
||||
tprint(msg, colour=color)
|
||||
|
||||
|
||||
def PrintSizeDetail(self, target_list, show_bloat):
|
||||
@ -1043,12 +1043,12 @@ class Builder:
|
||||
color = self.col.RED if diff > 0 else self.col.GREEN
|
||||
msg = ' %s %+d' % (name, diff)
|
||||
if not printed_target:
|
||||
Print('%10s %-15s:' % ('', result['_target']),
|
||||
tprint('%10s %-15s:' % ('', result['_target']),
|
||||
newline=False)
|
||||
printed_target = True
|
||||
Print(msg, colour=color, newline=False)
|
||||
tprint(msg, colour=color, newline=False)
|
||||
if printed_target:
|
||||
Print()
|
||||
tprint()
|
||||
if show_bloat:
|
||||
target = result['_target']
|
||||
outcome = result['_outcome']
|
||||
@ -1153,13 +1153,13 @@ class Builder:
|
||||
color = self.col.RED if avg_diff > 0 else self.col.GREEN
|
||||
msg = ' %s %+1.1f' % (name, avg_diff)
|
||||
if not printed_arch:
|
||||
Print('%10s: (for %d/%d boards)' % (arch, count,
|
||||
tprint('%10s: (for %d/%d boards)' % (arch, count,
|
||||
arch_count[arch]), newline=False)
|
||||
printed_arch = True
|
||||
Print(msg, colour=color, newline=False)
|
||||
tprint(msg, colour=color, newline=False)
|
||||
|
||||
if printed_arch:
|
||||
Print()
|
||||
tprint()
|
||||
if show_detail:
|
||||
self.PrintSizeDetail(target_list, show_bloat)
|
||||
|
||||
@ -1304,7 +1304,7 @@ class Builder:
|
||||
col = self.col.RED
|
||||
elif line[0] == 'c':
|
||||
col = self.col.YELLOW
|
||||
Print(' ' + line, newline=True, colour=col)
|
||||
tprint(' ' + line, newline=True, colour=col)
|
||||
|
||||
def _OutputErrLines(err_lines, colour):
|
||||
"""Output the line of error/warning lines, if not empty
|
||||
@ -1324,14 +1324,14 @@ class Builder:
|
||||
names = [board.target for board in line.boards]
|
||||
board_str = ' '.join(names) if names else ''
|
||||
if board_str:
|
||||
out = self.col.Color(colour, line.char + '(')
|
||||
out += self.col.Color(self.col.MAGENTA, board_str,
|
||||
out = self.col.build(colour, line.char + '(')
|
||||
out += self.col.build(self.col.MAGENTA, board_str,
|
||||
bright=False)
|
||||
out += self.col.Color(colour, ') %s' % line.errline)
|
||||
out += self.col.build(colour, ') %s' % line.errline)
|
||||
else:
|
||||
out = self.col.Color(colour, line.char + line.errline)
|
||||
out = self.col.build(colour, line.char + line.errline)
|
||||
out_list.append(out)
|
||||
Print('\n'.join(out_list))
|
||||
tprint('\n'.join(out_list))
|
||||
self._error_lines += 1
|
||||
|
||||
|
||||
@ -1385,7 +1385,7 @@ class Builder:
|
||||
self.AddOutcome(board_selected, arch_list, unknown_boards, '?',
|
||||
self.col.MAGENTA)
|
||||
for arch, target_list in arch_list.items():
|
||||
Print('%10s: %s' % (arch, target_list))
|
||||
tprint('%10s: %s' % (arch, target_list))
|
||||
self._error_lines += 1
|
||||
_OutputErrLines(better_err, colour=self.col.GREEN)
|
||||
_OutputErrLines(worse_err, colour=self.col.RED)
|
||||
@ -1515,13 +1515,13 @@ class Builder:
|
||||
_AddConfig(lines, 'all', all_plus, all_minus, all_change)
|
||||
#arch_summary[target] = '\n'.join(lines)
|
||||
if lines:
|
||||
Print('%s:' % arch)
|
||||
tprint('%s:' % arch)
|
||||
_OutputConfigInfo(lines)
|
||||
|
||||
for lines, targets in lines_by_target.items():
|
||||
if not lines:
|
||||
continue
|
||||
Print('%s :' % ' '.join(sorted(targets)))
|
||||
tprint('%s :' % ' '.join(sorted(targets)))
|
||||
_OutputConfigInfo(lines.split('\n'))
|
||||
|
||||
|
||||
@ -1540,7 +1540,7 @@ class Builder:
|
||||
if not board in board_dict:
|
||||
not_built.append(board)
|
||||
if not_built:
|
||||
Print("Boards not built (%d): %s" % (len(not_built),
|
||||
tprint("Boards not built (%d): %s" % (len(not_built),
|
||||
', '.join(not_built)))
|
||||
|
||||
def ProduceResultSummary(self, commit_upto, commits, board_selected):
|
||||
@ -1553,7 +1553,7 @@ class Builder:
|
||||
if commits:
|
||||
msg = '%02d: %s' % (commit_upto + 1,
|
||||
commits[commit_upto].subject)
|
||||
Print(msg, colour=self.col.BLUE)
|
||||
tprint(msg, colour=self.col.BLUE)
|
||||
self.PrintResultSummary(board_selected, board_dict,
|
||||
err_lines if self._show_errors else [], err_line_boards,
|
||||
warn_lines if self._show_errors else [], warn_line_boards,
|
||||
@ -1578,7 +1578,7 @@ class Builder:
|
||||
for commit_upto in range(0, self.commit_count, self._step):
|
||||
self.ProduceResultSummary(commit_upto, commits, board_selected)
|
||||
if not self._error_lines:
|
||||
Print('(no errors to report)', colour=self.col.GREEN)
|
||||
tprint('(no errors to report)', colour=self.col.GREEN)
|
||||
|
||||
|
||||
def SetupBuild(self, board_selected, commits):
|
||||
@ -1629,10 +1629,10 @@ class Builder:
|
||||
if os.path.isdir(git_dir):
|
||||
# This is a clone of the src_dir repo, we can keep using
|
||||
# it but need to fetch from src_dir.
|
||||
Print('\rFetching repo for thread %d' % thread_num,
|
||||
tprint('\rFetching repo for thread %d' % thread_num,
|
||||
newline=False)
|
||||
gitutil.Fetch(git_dir, thread_dir)
|
||||
terminal.PrintClear()
|
||||
gitutil.fetch(git_dir, thread_dir)
|
||||
terminal.print_clear()
|
||||
elif os.path.isfile(git_dir):
|
||||
# This is a worktree of the src_dir repo, we don't need to
|
||||
# create it again or update it in any way.
|
||||
@ -1643,15 +1643,15 @@ class Builder:
|
||||
raise ValueError('Git dir %s exists, but is not a file '
|
||||
'or a directory.' % git_dir)
|
||||
elif setup_git == 'worktree':
|
||||
Print('\rChecking out worktree for thread %d' % thread_num,
|
||||
tprint('\rChecking out worktree for thread %d' % thread_num,
|
||||
newline=False)
|
||||
gitutil.AddWorktree(src_dir, thread_dir)
|
||||
terminal.PrintClear()
|
||||
gitutil.add_worktree(src_dir, thread_dir)
|
||||
terminal.print_clear()
|
||||
elif setup_git == 'clone' or setup_git == True:
|
||||
Print('\rCloning repo for thread %d' % thread_num,
|
||||
tprint('\rCloning repo for thread %d' % thread_num,
|
||||
newline=False)
|
||||
gitutil.Clone(src_dir, thread_dir)
|
||||
terminal.PrintClear()
|
||||
gitutil.clone(src_dir, thread_dir)
|
||||
terminal.print_clear()
|
||||
else:
|
||||
raise ValueError("Can't setup git repo with %s." % setup_git)
|
||||
|
||||
@ -1670,12 +1670,12 @@ class Builder:
|
||||
builderthread.Mkdir(self._working_dir)
|
||||
if setup_git and self.git_dir:
|
||||
src_dir = os.path.abspath(self.git_dir)
|
||||
if gitutil.CheckWorktreeIsAvailable(src_dir):
|
||||
if gitutil.check_worktree_is_available(src_dir):
|
||||
setup_git = 'worktree'
|
||||
# If we previously added a worktree but the directory for it
|
||||
# got deleted, we need to prune its files from the repo so
|
||||
# that we can check out another in its place.
|
||||
gitutil.PruneWorktrees(src_dir)
|
||||
gitutil.prune_worktrees(src_dir)
|
||||
else:
|
||||
setup_git = 'clone'
|
||||
|
||||
@ -1717,11 +1717,11 @@ class Builder:
|
||||
"""
|
||||
to_remove = self._GetOutputSpaceRemovals()
|
||||
if to_remove:
|
||||
Print('Removing %d old build directories...' % len(to_remove),
|
||||
tprint('Removing %d old build directories...' % len(to_remove),
|
||||
newline=False)
|
||||
for dirname in to_remove:
|
||||
shutil.rmtree(dirname)
|
||||
terminal.PrintClear()
|
||||
terminal.print_clear()
|
||||
|
||||
def BuildBoards(self, commits, board_selected, keep_outputs, verbose):
|
||||
"""Build all commits for a list of boards
|
||||
@ -1747,7 +1747,7 @@ class Builder:
|
||||
self._PrepareWorkingSpace(min(self.num_threads, len(board_selected)),
|
||||
commits is not None)
|
||||
self._PrepareOutputSpace()
|
||||
Print('\rStarting build...', newline=False)
|
||||
tprint('\rStarting build...', newline=False)
|
||||
self.SetupBuild(board_selected, commits)
|
||||
self.ProcessResult(None)
|
||||
self.thread_exceptions = []
|
||||
@ -1774,7 +1774,7 @@ class Builder:
|
||||
|
||||
# Wait until we have processed all output
|
||||
self.out_queue.join()
|
||||
Print()
|
||||
tprint()
|
||||
|
||||
msg = 'Completed: %d total built' % self.count
|
||||
if self.already_done:
|
||||
@ -1789,9 +1789,9 @@ class Builder:
|
||||
duration = duration - timedelta(microseconds=duration.microseconds)
|
||||
rate = float(self.count) / duration.total_seconds()
|
||||
msg += ', duration %s, rate %1.2f' % (duration, rate)
|
||||
Print(msg)
|
||||
tprint(msg)
|
||||
if self.thread_exceptions:
|
||||
Print('Failed: %d thread exceptions' % len(self.thread_exceptions),
|
||||
tprint('Failed: %d thread exceptions' % len(self.thread_exceptions),
|
||||
colour=self.col.RED)
|
||||
|
||||
return (self.fail, self.warned, self.thread_exceptions)
|
||||
|
@ -122,7 +122,7 @@ class BuilderThread(threading.Thread):
|
||||
config - called to configure for a board
|
||||
build - the main make invocation - it does the build
|
||||
args: A list of arguments to pass to 'make'
|
||||
kwargs: A list of keyword arguments to pass to command.RunPipe()
|
||||
kwargs: A list of keyword arguments to pass to command.run_pipe()
|
||||
|
||||
Returns:
|
||||
CommandResult object
|
||||
@ -219,7 +219,7 @@ class BuilderThread(threading.Thread):
|
||||
commit = self.builder.commits[commit_upto]
|
||||
if self.builder.checkout:
|
||||
git_dir = os.path.join(work_dir, '.git')
|
||||
gitutil.Checkout(commit.hash, git_dir, work_dir,
|
||||
gitutil.checkout(commit.hash, git_dir, work_dir,
|
||||
force=True)
|
||||
else:
|
||||
commit = 'current'
|
||||
@ -375,7 +375,7 @@ class BuilderThread(threading.Thread):
|
||||
lines = []
|
||||
for fname in BASE_ELF_FILENAMES:
|
||||
cmd = ['%snm' % self.toolchain.cross, '--size-sort', fname]
|
||||
nm_result = command.RunPipe([cmd], capture=True,
|
||||
nm_result = command.run_pipe([cmd], capture=True,
|
||||
capture_stderr=True, cwd=result.out_dir,
|
||||
raise_on_error=False, env=env)
|
||||
if nm_result.stdout:
|
||||
@ -385,7 +385,7 @@ class BuilderThread(threading.Thread):
|
||||
print(nm_result.stdout, end=' ', file=fd)
|
||||
|
||||
cmd = ['%sobjdump' % self.toolchain.cross, '-h', fname]
|
||||
dump_result = command.RunPipe([cmd], capture=True,
|
||||
dump_result = command.run_pipe([cmd], capture=True,
|
||||
capture_stderr=True, cwd=result.out_dir,
|
||||
raise_on_error=False, env=env)
|
||||
rodata_size = ''
|
||||
@ -400,7 +400,7 @@ class BuilderThread(threading.Thread):
|
||||
rodata_size = fields[2]
|
||||
|
||||
cmd = ['%ssize' % self.toolchain.cross, fname]
|
||||
size_result = command.RunPipe([cmd], capture=True,
|
||||
size_result = command.run_pipe([cmd], capture=True,
|
||||
capture_stderr=True, cwd=result.out_dir,
|
||||
raise_on_error=False, env=env)
|
||||
if size_result.stdout:
|
||||
@ -411,7 +411,7 @@ class BuilderThread(threading.Thread):
|
||||
cmd = ['%sobjcopy' % self.toolchain.cross, '-O', 'binary',
|
||||
'-j', '.rodata.default_environment',
|
||||
'env/built-in.o', 'uboot.env']
|
||||
command.RunPipe([cmd], capture=True,
|
||||
command.run_pipe([cmd], capture=True,
|
||||
capture_stderr=True, cwd=result.out_dir,
|
||||
raise_on_error=False, env=env)
|
||||
ubootenv = os.path.join(result.out_dir, 'uboot.env')
|
||||
|
@ -18,7 +18,7 @@ from patman import gitutil
|
||||
from patman import patchstream
|
||||
from patman import terminal
|
||||
from patman import tools
|
||||
from patman.terminal import Print
|
||||
from patman.terminal import tprint
|
||||
|
||||
def GetPlural(count):
|
||||
"""Returns a plural 's' if count is not 1"""
|
||||
@ -73,7 +73,7 @@ def ShowActions(series, why_selected, boards_selected, builder, options,
|
||||
if commits:
|
||||
for upto in range(0, len(series.commits), options.step):
|
||||
commit = series.commits[upto]
|
||||
print(' ', col.Color(col.YELLOW, commit.hash[:8], bright=False), end=' ')
|
||||
print(' ', col.build(col.YELLOW, commit.hash[:8], bright=False), end=' ')
|
||||
print(commit.subject)
|
||||
print()
|
||||
for arg in why_selected:
|
||||
@ -85,7 +85,7 @@ def ShowActions(series, why_selected, boards_selected, builder, options,
|
||||
len(why_selected['all'])))
|
||||
if board_warnings:
|
||||
for warning in board_warnings:
|
||||
print(col.Color(col.YELLOW, warning))
|
||||
print(col.build(col.YELLOW, warning))
|
||||
|
||||
def ShowToolchainPrefix(boards, toolchains):
|
||||
"""Show information about a the tool chain used by one or more boards
|
||||
@ -135,12 +135,12 @@ def DoBuildman(options, args, toolchains=None, make_func=None, boards=None,
|
||||
global builder
|
||||
|
||||
if options.full_help:
|
||||
tools.PrintFullHelp(
|
||||
tools.print_full_help(
|
||||
os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])), 'README')
|
||||
)
|
||||
return 0
|
||||
|
||||
gitutil.Setup()
|
||||
gitutil.setup()
|
||||
col = terminal.Color()
|
||||
|
||||
options.git_dir = os.path.join(options.git, '.git')
|
||||
@ -152,14 +152,14 @@ def DoBuildman(options, args, toolchains=None, make_func=None, boards=None,
|
||||
if options.fetch_arch:
|
||||
if options.fetch_arch == 'list':
|
||||
sorted_list = toolchains.ListArchs()
|
||||
print(col.Color(col.BLUE, 'Available architectures: %s\n' %
|
||||
print(col.build(col.BLUE, 'Available architectures: %s\n' %
|
||||
' '.join(sorted_list)))
|
||||
return 0
|
||||
else:
|
||||
fetch_arch = options.fetch_arch
|
||||
if fetch_arch == 'all':
|
||||
fetch_arch = ','.join(toolchains.ListArchs())
|
||||
print(col.Color(col.CYAN, '\nDownloading toolchains: %s' %
|
||||
print(col.build(col.CYAN, '\nDownloading toolchains: %s' %
|
||||
fetch_arch))
|
||||
for arch in fetch_arch.split(','):
|
||||
print()
|
||||
@ -177,11 +177,11 @@ def DoBuildman(options, args, toolchains=None, make_func=None, boards=None,
|
||||
return 0
|
||||
|
||||
if options.incremental:
|
||||
print(col.Color(col.RED,
|
||||
print(col.build(col.RED,
|
||||
'Warning: -I has been removed. See documentation'))
|
||||
if not options.output_dir:
|
||||
if options.work_in_output:
|
||||
sys.exit(col.Color(col.RED, '-w requires that you specify -o'))
|
||||
sys.exit(col.build(col.RED, '-w requires that you specify -o'))
|
||||
options.output_dir = '..'
|
||||
|
||||
# Work out what subset of the boards we are building
|
||||
@ -218,12 +218,12 @@ def DoBuildman(options, args, toolchains=None, make_func=None, boards=None,
|
||||
requested_boards)
|
||||
selected = boards.GetSelected()
|
||||
if not len(selected):
|
||||
sys.exit(col.Color(col.RED, 'No matching boards found'))
|
||||
sys.exit(col.build(col.RED, 'No matching boards found'))
|
||||
|
||||
if options.print_prefix:
|
||||
err = ShowToolchainPrefix(boards, toolchains)
|
||||
if err:
|
||||
sys.exit(col.Color(col.RED, err))
|
||||
sys.exit(col.build(col.RED, err))
|
||||
return 0
|
||||
|
||||
# Work out how many commits to build. We want to build everything on the
|
||||
@ -236,30 +236,30 @@ def DoBuildman(options, args, toolchains=None, make_func=None, boards=None,
|
||||
count = 1
|
||||
else:
|
||||
if has_range:
|
||||
count, msg = gitutil.CountCommitsInRange(options.git_dir,
|
||||
count, msg = gitutil.count_commits_in_range(options.git_dir,
|
||||
options.branch)
|
||||
else:
|
||||
count, msg = gitutil.CountCommitsInBranch(options.git_dir,
|
||||
count, msg = gitutil.count_commits_in_branch(options.git_dir,
|
||||
options.branch)
|
||||
if count is None:
|
||||
sys.exit(col.Color(col.RED, msg))
|
||||
sys.exit(col.build(col.RED, msg))
|
||||
elif count == 0:
|
||||
sys.exit(col.Color(col.RED, "Range '%s' has no commits" %
|
||||
sys.exit(col.build(col.RED, "Range '%s' has no commits" %
|
||||
options.branch))
|
||||
if msg:
|
||||
print(col.Color(col.YELLOW, msg))
|
||||
print(col.build(col.YELLOW, msg))
|
||||
count += 1 # Build upstream commit also
|
||||
|
||||
if not count:
|
||||
str = ("No commits found to process in branch '%s': "
|
||||
"set branch's upstream or use -c flag" % options.branch)
|
||||
sys.exit(col.Color(col.RED, str))
|
||||
sys.exit(col.build(col.RED, str))
|
||||
if options.work_in_output:
|
||||
if len(selected) != 1:
|
||||
sys.exit(col.Color(col.RED,
|
||||
sys.exit(col.build(col.RED,
|
||||
'-w can only be used with a single board'))
|
||||
if count != 1:
|
||||
sys.exit(col.Color(col.RED,
|
||||
sys.exit(col.build(col.RED,
|
||||
'-w can only be used with a single commit'))
|
||||
|
||||
# Read the metadata from the commits. First look at the upstream commit,
|
||||
@ -276,9 +276,9 @@ def DoBuildman(options, args, toolchains=None, make_func=None, boards=None,
|
||||
if has_range:
|
||||
range_expr = options.branch
|
||||
else:
|
||||
range_expr = gitutil.GetRangeInBranch(options.git_dir,
|
||||
range_expr = gitutil.get_range_in_branch(options.git_dir,
|
||||
options.branch)
|
||||
upstream_commit = gitutil.GetUpstream(options.git_dir,
|
||||
upstream_commit = gitutil.get_upstream(options.git_dir,
|
||||
options.branch)
|
||||
series = patchstream.get_metadata_for_list(upstream_commit,
|
||||
options.git_dir, 1, series=None, allow_overwrite=True)
|
||||
@ -307,7 +307,7 @@ def DoBuildman(options, args, toolchains=None, make_func=None, boards=None,
|
||||
if not options.step:
|
||||
options.step = len(series.commits) - 1
|
||||
|
||||
gnu_make = command.Output(os.path.join(options.git,
|
||||
gnu_make = command.output(os.path.join(options.git,
|
||||
'scripts/show-gnu-make'), raise_on_error=False).rstrip()
|
||||
if not gnu_make:
|
||||
sys.exit('GNU Make not found')
|
||||
@ -362,7 +362,7 @@ def DoBuildman(options, args, toolchains=None, make_func=None, boards=None,
|
||||
else:
|
||||
commits = None
|
||||
|
||||
Print(GetActionSummary(options.summary, commits, board_selected,
|
||||
tprint(GetActionSummary(options.summary, commits, board_selected,
|
||||
options))
|
||||
|
||||
# We can't show function sizes without board details at present
|
||||
|
@ -205,8 +205,8 @@ class TestFunctional(unittest.TestCase):
|
||||
self._test_branch = TEST_BRANCH
|
||||
|
||||
# Avoid sending any output and clear all terminal output
|
||||
terminal.SetPrintTestMode()
|
||||
terminal.GetPrintTestLines()
|
||||
terminal.set_print_test_mode()
|
||||
terminal.get_print_test_lines()
|
||||
|
||||
def tearDown(self):
|
||||
shutil.rmtree(self._base_dir)
|
||||
@ -217,7 +217,7 @@ class TestFunctional(unittest.TestCase):
|
||||
self._toolchains.Add('gcc', test=False)
|
||||
|
||||
def _RunBuildman(self, *args):
|
||||
return command.RunPipe([[self._buildman_pathname] + list(args)],
|
||||
return command.run_pipe([[self._buildman_pathname] + list(args)],
|
||||
capture=True, capture_stderr=True)
|
||||
|
||||
def _RunControl(self, *args, boards=None, clean_dir=False,
|
||||
@ -267,11 +267,11 @@ class TestFunctional(unittest.TestCase):
|
||||
def testGitSetup(self):
|
||||
"""Test gitutils.Setup(), from outside the module itself"""
|
||||
command.test_result = command.CommandResult(return_code=1)
|
||||
gitutil.Setup()
|
||||
gitutil.setup()
|
||||
self.assertEqual(gitutil.use_no_decorate, False)
|
||||
|
||||
command.test_result = command.CommandResult(return_code=0)
|
||||
gitutil.Setup()
|
||||
gitutil.setup()
|
||||
self.assertEqual(gitutil.use_no_decorate, True)
|
||||
|
||||
def _HandleCommandGitLog(self, args):
|
||||
@ -407,7 +407,7 @@ class TestFunctional(unittest.TestCase):
|
||||
stage: Stage that we are at (mrproper, config, build)
|
||||
cwd: Directory where make should be run
|
||||
args: Arguments to pass to make
|
||||
kwargs: Arguments to pass to command.RunPipe()
|
||||
kwargs: Arguments to pass to command.run_pipe()
|
||||
"""
|
||||
self._make_calls += 1
|
||||
if stage == 'mrproper':
|
||||
@ -422,7 +422,7 @@ class TestFunctional(unittest.TestCase):
|
||||
if arg.startswith('O='):
|
||||
out_dir = arg[2:]
|
||||
fname = os.path.join(cwd or '', out_dir, 'u-boot')
|
||||
tools.WriteFile(fname, b'U-Boot')
|
||||
tools.write_file(fname, b'U-Boot')
|
||||
if type(commit) is not str:
|
||||
stderr = self._error.get((brd.target, commit.sequence))
|
||||
if stderr:
|
||||
@ -438,7 +438,7 @@ class TestFunctional(unittest.TestCase):
|
||||
print(len(lines))
|
||||
for line in lines:
|
||||
print(line)
|
||||
#self.print_lines(terminal.GetPrintTestLines())
|
||||
#self.print_lines(terminal.get_print_test_lines())
|
||||
|
||||
def testNoBoards(self):
|
||||
"""Test that buildman aborts when there are no boards"""
|
||||
@ -450,7 +450,7 @@ class TestFunctional(unittest.TestCase):
|
||||
"""Very simple test to invoke buildman on the current source"""
|
||||
self.setupToolchains();
|
||||
self._RunControl('-o', self._output_dir)
|
||||
lines = terminal.GetPrintTestLines()
|
||||
lines = terminal.get_print_test_lines()
|
||||
self.assertIn('Building current source for %d boards' % len(boards),
|
||||
lines[0].text)
|
||||
|
||||
@ -463,7 +463,7 @@ class TestFunctional(unittest.TestCase):
|
||||
"""Test that missing toolchains are detected"""
|
||||
self.setupToolchains();
|
||||
ret_code = self._RunControl('-b', TEST_BRANCH, '-o', self._output_dir)
|
||||
lines = terminal.GetPrintTestLines()
|
||||
lines = terminal.get_print_test_lines()
|
||||
|
||||
# Buildman always builds the upstream commit as well
|
||||
self.assertIn('Building %d commits for %d boards' %
|
||||
|
@ -41,12 +41,12 @@ def RunTests(skip_net_tests, verboose, args):
|
||||
|
||||
# Run the entry tests first ,since these need to be the first to import the
|
||||
# 'entry' module.
|
||||
test_util.RunTestSuites(
|
||||
test_util.run_test_suites(
|
||||
result, False, verboose, False, None, test_name, [],
|
||||
[test.TestBuild, func_test.TestFunctional,
|
||||
'buildman.toolchain', 'patman.gitutil'])
|
||||
|
||||
return test_util.ReportResult('buildman', test_name, result)
|
||||
return test_util.report_result('buildman', test_name, result)
|
||||
|
||||
options, args = cmdline.ParseArgs()
|
||||
|
||||
|
@ -148,7 +148,7 @@ class TestBuild(unittest.TestCase):
|
||||
self.toolchains.Add('gcc', test=False)
|
||||
|
||||
# Avoid sending any output
|
||||
terminal.SetPrintTestMode()
|
||||
terminal.set_print_test_mode()
|
||||
self._col = terminal.Color()
|
||||
|
||||
self.base_dir = tempfile.mkdtemp()
|
||||
@ -182,10 +182,10 @@ class TestBuild(unittest.TestCase):
|
||||
col.YELLOW if outcome == OUTCOME_WARN else col.RED)
|
||||
expect = '%10s: ' % arch
|
||||
# TODO(sjg@chromium.org): If plus is '', we shouldn't need this
|
||||
expect += ' ' + col.Color(expected_colour, plus)
|
||||
expect += ' ' + col.build(expected_colour, plus)
|
||||
expect += ' '
|
||||
for board in boards:
|
||||
expect += col.Color(expected_colour, ' %s' % board)
|
||||
expect += col.build(expected_colour, ' %s' % board)
|
||||
self.assertEqual(text, expect)
|
||||
|
||||
def _SetupTest(self, echo_lines=False, threads=1, **kwdisplay_args):
|
||||
@ -209,7 +209,7 @@ class TestBuild(unittest.TestCase):
|
||||
# associated with each. This calls our Make() to inject the fake output.
|
||||
build.BuildBoards(self.commits, board_selected, keep_outputs=False,
|
||||
verbose=False)
|
||||
lines = terminal.GetPrintTestLines()
|
||||
lines = terminal.get_print_test_lines()
|
||||
count = 0
|
||||
for line in lines:
|
||||
if line.text.strip():
|
||||
@ -221,8 +221,8 @@ class TestBuild(unittest.TestCase):
|
||||
build.SetDisplayOptions(**kwdisplay_args);
|
||||
build.ShowSummary(self.commits, board_selected)
|
||||
if echo_lines:
|
||||
terminal.EchoPrintTestLines()
|
||||
return iter(terminal.GetPrintTestLines())
|
||||
terminal.echo_print_test_lines()
|
||||
return iter(terminal.get_print_test_lines())
|
||||
|
||||
def _CheckOutput(self, lines, list_error_boards=False,
|
||||
filter_dtb_warnings=False,
|
||||
@ -254,12 +254,12 @@ class TestBuild(unittest.TestCase):
|
||||
new_lines = []
|
||||
for line in lines:
|
||||
if boards:
|
||||
expect = self._col.Color(colour, prefix + '(')
|
||||
expect += self._col.Color(self._col.MAGENTA, boards,
|
||||
expect = self._col.build(colour, prefix + '(')
|
||||
expect += self._col.build(self._col.MAGENTA, boards,
|
||||
bright=False)
|
||||
expect += self._col.Color(colour, ') %s' % line)
|
||||
expect += self._col.build(colour, ') %s' % line)
|
||||
else:
|
||||
expect = self._col.Color(colour, prefix + line)
|
||||
expect = self._col.build(colour, prefix + line)
|
||||
new_lines.append(expect)
|
||||
return '\n'.join(new_lines)
|
||||
|
||||
@ -317,12 +317,12 @@ class TestBuild(unittest.TestCase):
|
||||
self.assertEqual(next(lines).text, '04: %s' % commits[3][1])
|
||||
if filter_migration_warnings:
|
||||
expect = '%10s: ' % 'powerpc'
|
||||
expect += ' ' + col.Color(col.GREEN, '')
|
||||
expect += ' ' + col.build(col.GREEN, '')
|
||||
expect += ' '
|
||||
expect += col.Color(col.GREEN, ' %s' % 'board2')
|
||||
expect += ' ' + col.Color(col.YELLOW, 'w+')
|
||||
expect += col.build(col.GREEN, ' %s' % 'board2')
|
||||
expect += ' ' + col.build(col.YELLOW, 'w+')
|
||||
expect += ' '
|
||||
expect += col.Color(col.YELLOW, ' %s' % 'board3')
|
||||
expect += col.build(col.YELLOW, ' %s' % 'board3')
|
||||
self.assertEqual(next(lines).text, expect)
|
||||
else:
|
||||
self.assertSummary(next(lines).text, 'powerpc', 'w+',
|
||||
@ -607,7 +607,7 @@ class TestBuild(unittest.TestCase):
|
||||
|
||||
def testPrepareOutputSpace(self):
|
||||
def _Touch(fname):
|
||||
tools.WriteFile(os.path.join(base_dir, fname), b'')
|
||||
tools.write_file(os.path.join(base_dir, fname), b'')
|
||||
|
||||
base_dir = tempfile.mkdtemp()
|
||||
|
||||
|
@ -99,7 +99,7 @@ class Toolchain:
|
||||
else:
|
||||
self.priority = priority
|
||||
if test:
|
||||
result = command.RunPipe([cmd], capture=True, env=env,
|
||||
result = command.run_pipe([cmd], capture=True, env=env,
|
||||
raise_on_error=False)
|
||||
self.ok = result.return_code == 0
|
||||
if verbose:
|
||||
@ -201,11 +201,11 @@ class Toolchain:
|
||||
# We'll use MakeArgs() to provide this
|
||||
pass
|
||||
elif full_path:
|
||||
env[b'CROSS_COMPILE'] = tools.ToBytes(
|
||||
env[b'CROSS_COMPILE'] = tools.to_bytes(
|
||||
wrapper + os.path.join(self.path, self.cross))
|
||||
else:
|
||||
env[b'CROSS_COMPILE'] = tools.ToBytes(wrapper + self.cross)
|
||||
env[b'PATH'] = tools.ToBytes(self.path) + b':' + env[b'PATH']
|
||||
env[b'CROSS_COMPILE'] = tools.to_bytes(wrapper + self.cross)
|
||||
env[b'PATH'] = tools.to_bytes(self.path) + b':' + env[b'PATH']
|
||||
|
||||
env[b'LC_ALL'] = b'C'
|
||||
|
||||
@ -381,7 +381,7 @@ class Toolchains:
|
||||
def List(self):
|
||||
"""List out the selected toolchains for each architecture"""
|
||||
col = terminal.Color()
|
||||
print(col.Color(col.BLUE, 'List of available toolchains (%d):' %
|
||||
print(col.build(col.BLUE, 'List of available toolchains (%d):' %
|
||||
len(self.toolchains)))
|
||||
if len(self.toolchains):
|
||||
for key, value in sorted(self.toolchains.items()):
|
||||
@ -494,7 +494,7 @@ class Toolchains:
|
||||
else
|
||||
URL containing this toolchain, if avaialble, else None
|
||||
"""
|
||||
arch = command.OutputOneLine('uname', '-m')
|
||||
arch = command.output_one_line('uname', '-m')
|
||||
if arch == 'aarch64':
|
||||
arch = 'arm64'
|
||||
base = 'https://www.kernel.org/pub/tools/crosstool/files/bin'
|
||||
@ -504,7 +504,7 @@ class Toolchains:
|
||||
url = '%s/%s/%s/' % (base, arch, version)
|
||||
print('Checking: %s' % url)
|
||||
response = urllib.request.urlopen(url)
|
||||
html = tools.ToString(response.read())
|
||||
html = tools.to_string(response.read())
|
||||
parser = MyHTMLParser(fetch_arch)
|
||||
parser.feed(html)
|
||||
if fetch_arch == 'list':
|
||||
@ -525,7 +525,7 @@ class Toolchains:
|
||||
Directory name of the first entry in the archive, without the
|
||||
trailing /
|
||||
"""
|
||||
stdout = command.Output('tar', 'xvfJ', fname, '-C', dest)
|
||||
stdout = command.output('tar', 'xvfJ', fname, '-C', dest)
|
||||
dirs = stdout.splitlines()[1].split('/')[:2]
|
||||
return '/'.join(dirs)
|
||||
|
||||
@ -559,7 +559,7 @@ class Toolchains:
|
||||
"""
|
||||
# Fist get the URL for this architecture
|
||||
col = terminal.Color()
|
||||
print(col.Color(col.BLUE, "Downloading toolchain for arch '%s'" % arch))
|
||||
print(col.build(col.BLUE, "Downloading toolchain for arch '%s'" % arch))
|
||||
url = self.LocateArchUrl(arch)
|
||||
if not url:
|
||||
print(("Cannot find toolchain for arch '%s' - use 'list' to list" %
|
||||
@ -571,10 +571,10 @@ class Toolchains:
|
||||
os.mkdir(dest)
|
||||
|
||||
# Download the tar file for this toolchain and unpack it
|
||||
tarfile, tmpdir = tools.Download(url, '.buildman')
|
||||
tarfile, tmpdir = tools.download(url, '.buildman')
|
||||
if not tarfile:
|
||||
return 1
|
||||
print(col.Color(col.GREEN, 'Unpacking to: %s' % dest), end=' ')
|
||||
print(col.build(col.GREEN, 'Unpacking to: %s' % dest), end=' ')
|
||||
sys.stdout.flush()
|
||||
path = self.Unpack(tarfile, dest)
|
||||
os.remove(tarfile)
|
||||
@ -582,14 +582,14 @@ class Toolchains:
|
||||
print()
|
||||
|
||||
# Check that the toolchain works
|
||||
print(col.Color(col.GREEN, 'Testing'))
|
||||
print(col.build(col.GREEN, 'Testing'))
|
||||
dirpath = os.path.join(dest, path)
|
||||
compiler_fname_list = self.ScanPath(dirpath, True)
|
||||
if not compiler_fname_list:
|
||||
print('Could not locate C compiler - fetch failed.')
|
||||
return 1
|
||||
if len(compiler_fname_list) != 1:
|
||||
print(col.Color(col.RED, 'Warning, ambiguous toolchains: %s' %
|
||||
print(col.build(col.RED, 'Warning, ambiguous toolchains: %s' %
|
||||
', '.join(compiler_fname_list)))
|
||||
toolchain = Toolchain(compiler_fname_list[0], True, True)
|
||||
|
||||
|
@ -396,7 +396,7 @@ class Node:
|
||||
prop_name: Name of property
|
||||
"""
|
||||
self.props[prop_name] = Prop(self, None, prop_name,
|
||||
tools.GetBytes(0, 4))
|
||||
tools.get_bytes(0, 4))
|
||||
|
||||
def AddEmptyProp(self, prop_name, len):
|
||||
"""Add a property with a fixed data size, for filling in later
|
||||
@ -408,7 +408,7 @@ class Node:
|
||||
prop_name: Name of property
|
||||
len: Length of data in property
|
||||
"""
|
||||
value = tools.GetBytes(0, len)
|
||||
value = tools.get_bytes(0, len)
|
||||
self.props[prop_name] = Prop(self, None, prop_name, value)
|
||||
|
||||
def _CheckProp(self, prop_name):
|
||||
|
@ -75,29 +75,29 @@ def EnsureCompiled(fname, tmpdir=None, capture_stderr=False):
|
||||
dts_input = os.path.join(tmpdir, 'source.dts')
|
||||
dtb_output = os.path.join(tmpdir, 'source.dtb')
|
||||
else:
|
||||
dts_input = tools.GetOutputFilename('source.dts')
|
||||
dtb_output = tools.GetOutputFilename('source.dtb')
|
||||
dts_input = tools.get_output_filename('source.dts')
|
||||
dtb_output = tools.get_output_filename('source.dtb')
|
||||
|
||||
search_paths = [os.path.join(os.getcwd(), 'include')]
|
||||
root, _ = os.path.splitext(fname)
|
||||
cc, args = tools.GetTargetCompileTool('cc')
|
||||
cc, args = tools.get_target_compile_tool('cc')
|
||||
args += ['-E', '-P', '-x', 'assembler-with-cpp', '-D__ASSEMBLY__']
|
||||
args += ['-Ulinux']
|
||||
for path in search_paths:
|
||||
args.extend(['-I', path])
|
||||
args += ['-o', dts_input, fname]
|
||||
command.Run(cc, *args)
|
||||
command.run(cc, *args)
|
||||
|
||||
# If we don't have a directory, put it in the tools tempdir
|
||||
search_list = []
|
||||
for path in search_paths:
|
||||
search_list.extend(['-i', path])
|
||||
dtc, args = tools.GetTargetCompileTool('dtc')
|
||||
dtc, args = tools.get_target_compile_tool('dtc')
|
||||
args += ['-I', 'dts', '-o', dtb_output, '-O', 'dtb',
|
||||
'-W', 'no-unit_address_vs_reg']
|
||||
args.extend(search_list)
|
||||
args.append(dts_input)
|
||||
command.Run(dtc, *args, capture_stderr=capture_stderr)
|
||||
command.run(dtc, *args, capture_stderr=capture_stderr)
|
||||
return dtb_output
|
||||
|
||||
def GetInt(node, propname, default=None):
|
||||
|
@ -55,17 +55,17 @@ def run_tests(processes, args):
|
||||
|
||||
test_dtoc.setup()
|
||||
|
||||
test_util.RunTestSuites(
|
||||
test_util.run_test_suites(
|
||||
result, debug=True, verbosity=1, test_preserve_dirs=False,
|
||||
processes=processes, test_name=test_name, toolpath=[],
|
||||
class_and_module_list=[test_dtoc.TestDtoc,test_src_scan.TestSrcScan])
|
||||
|
||||
return test_util.ReportResult('binman', test_name, result)
|
||||
return test_util.report_result('binman', test_name, result)
|
||||
|
||||
def RunTestCoverage():
|
||||
"""Run the tests and check that we get 100% coverage"""
|
||||
sys.argv = [sys.argv[0]]
|
||||
test_util.RunTestCoverage('tools/dtoc/dtoc', '/main.py',
|
||||
test_util.run_test_coverage('tools/dtoc/dtoc', '/main.py',
|
||||
['tools/patman/*.py', '*/fdt*', '*test*'], args.build_dir)
|
||||
|
||||
|
||||
|
@ -112,12 +112,12 @@ class TestDtoc(unittest.TestCase):
|
||||
"""Tests for dtoc"""
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
tools.PrepareOutputDir(None)
|
||||
tools.prepare_output_dir(None)
|
||||
cls.maxDiff = None
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
tools.FinaliseOutputDir()
|
||||
tools.finalise_output_dir()
|
||||
|
||||
@staticmethod
|
||||
def _write_python_string(fname, data):
|
||||
@ -218,7 +218,7 @@ class TestDtoc(unittest.TestCase):
|
||||
def test_empty_file(self):
|
||||
"""Test output from a device tree file with no nodes"""
|
||||
dtb_file = get_dtb_file('dtoc_test_empty.dts')
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
|
||||
# Run this one without saved_scan to complete test coverage
|
||||
dtb_platdata.run_steps(['struct'], dtb_file, False, output, [], None,
|
||||
@ -801,7 +801,7 @@ DM_DEVICE_INST(test0) = {
|
||||
def test_simple(self):
|
||||
"""Test output from some simple nodes with various types of data"""
|
||||
dtb_file = get_dtb_file('dtoc_test_simple.dts')
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
self.run_test(['struct'], dtb_file, output)
|
||||
with open(output) as infile:
|
||||
data = infile.read()
|
||||
@ -822,14 +822,14 @@ DM_DEVICE_INST(test0) = {
|
||||
|
||||
# Try the 'all' command
|
||||
self.run_test(['all'], dtb_file, output)
|
||||
data = tools.ReadFile(output, binary=False)
|
||||
data = tools.read_file(output, binary=False)
|
||||
self._check_strings(
|
||||
self.decl_text + self.platdata_text + self.struct_text, data)
|
||||
|
||||
def test_driver_alias(self):
|
||||
"""Test output from a device tree file with a driver alias"""
|
||||
dtb_file = get_dtb_file('dtoc_test_driver_alias.dts')
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
self.run_test(['struct'], dtb_file, output)
|
||||
with open(output) as infile:
|
||||
data = infile.read()
|
||||
@ -875,7 +875,7 @@ U_BOOT_DRVINFO(gpios_at_0) = {
|
||||
def test_invalid_driver(self):
|
||||
"""Test output from a device tree file with an invalid driver"""
|
||||
dtb_file = get_dtb_file('dtoc_test_invalid_driver.dts')
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
with test_util.capture_sys_output() as _:
|
||||
dtb_platdata.run_steps(
|
||||
['struct'], dtb_file, False, output, [], None, False,
|
||||
@ -918,7 +918,7 @@ U_BOOT_DRVINFO(spl_test) = {
|
||||
def test_phandle(self):
|
||||
"""Test output from a node containing a phandle reference"""
|
||||
dtb_file = get_dtb_file('dtoc_test_phandle.dts')
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
self.run_test(['struct'], dtb_file, output)
|
||||
with open(output) as infile:
|
||||
data = infile.read()
|
||||
@ -1013,7 +1013,7 @@ U_BOOT_DRVINFO(phandle_target) = {
|
||||
def test_phandle_single(self):
|
||||
"""Test output from a node containing a phandle reference"""
|
||||
dtb_file = get_dtb_file('dtoc_test_phandle_single.dts')
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
self.run_test(['struct'], dtb_file, output)
|
||||
with open(output) as infile:
|
||||
data = infile.read()
|
||||
@ -1029,7 +1029,7 @@ struct dtd_target {
|
||||
def test_phandle_reorder(self):
|
||||
"""Test that phandle targets are generated before their references"""
|
||||
dtb_file = get_dtb_file('dtoc_test_phandle_reorder.dts')
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
self.run_test(['platdata'], dtb_file, output)
|
||||
with open(output) as infile:
|
||||
data = infile.read()
|
||||
@ -1071,7 +1071,7 @@ U_BOOT_DRVINFO(phandle_target) = {
|
||||
def test_phandle_cd_gpio(self):
|
||||
"""Test that phandle targets are generated when unsing cd-gpios"""
|
||||
dtb_file = get_dtb_file('dtoc_test_phandle_cd_gpios.dts')
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
dtb_platdata.run_steps(
|
||||
['platdata'], dtb_file, False, output, [], None, False,
|
||||
warning_disabled=True, scan=copy_scan())
|
||||
@ -1157,7 +1157,7 @@ U_BOOT_DRVINFO(phandle_target) = {
|
||||
"""Test a node containing an invalid phandle fails"""
|
||||
dtb_file = get_dtb_file('dtoc_test_phandle_bad.dts',
|
||||
capture_stderr=True)
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
with self.assertRaises(ValueError) as exc:
|
||||
self.run_test(['struct'], dtb_file, output)
|
||||
self.assertIn("Cannot parse 'clocks' in node 'phandle-source'",
|
||||
@ -1167,7 +1167,7 @@ U_BOOT_DRVINFO(phandle_target) = {
|
||||
"""Test a phandle target missing its #*-cells property"""
|
||||
dtb_file = get_dtb_file('dtoc_test_phandle_bad2.dts',
|
||||
capture_stderr=True)
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
with self.assertRaises(ValueError) as exc:
|
||||
self.run_test(['struct'], dtb_file, output)
|
||||
self.assertIn("Node 'phandle-target' has no cells property",
|
||||
@ -1176,7 +1176,7 @@ U_BOOT_DRVINFO(phandle_target) = {
|
||||
def test_addresses64(self):
|
||||
"""Test output from a node with a 'reg' property with na=2, ns=2"""
|
||||
dtb_file = get_dtb_file('dtoc_test_addr64.dts')
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
self.run_test(['struct'], dtb_file, output)
|
||||
with open(output) as infile:
|
||||
data = infile.read()
|
||||
@ -1245,7 +1245,7 @@ U_BOOT_DRVINFO(test3) = {
|
||||
def test_addresses32(self):
|
||||
"""Test output from a node with a 'reg' property with na=1, ns=1"""
|
||||
dtb_file = get_dtb_file('dtoc_test_addr32.dts')
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
self.run_test(['struct'], dtb_file, output)
|
||||
with open(output) as infile:
|
||||
data = infile.read()
|
||||
@ -1299,7 +1299,7 @@ U_BOOT_DRVINFO(test2) = {
|
||||
def test_addresses64_32(self):
|
||||
"""Test output from a node with a 'reg' property with na=2, ns=1"""
|
||||
dtb_file = get_dtb_file('dtoc_test_addr64_32.dts')
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
self.run_test(['struct'], dtb_file, output)
|
||||
with open(output) as infile:
|
||||
data = infile.read()
|
||||
@ -1368,7 +1368,7 @@ U_BOOT_DRVINFO(test3) = {
|
||||
def test_addresses32_64(self):
|
||||
"""Test output from a node with a 'reg' property with na=1, ns=2"""
|
||||
dtb_file = get_dtb_file('dtoc_test_addr32_64.dts')
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
self.run_test(['struct'], dtb_file, output)
|
||||
with open(output) as infile:
|
||||
data = infile.read()
|
||||
@ -1438,7 +1438,7 @@ U_BOOT_DRVINFO(test3) = {
|
||||
"""Test that a reg property with an invalid type generates an error"""
|
||||
# Capture stderr since dtc will emit warnings for this file
|
||||
dtb_file = get_dtb_file('dtoc_test_bad_reg.dts', capture_stderr=True)
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
with self.assertRaises(ValueError) as exc:
|
||||
self.run_test(['struct'], dtb_file, output)
|
||||
self.assertIn("Node 'spl-test' reg property is not an int",
|
||||
@ -1448,7 +1448,7 @@ U_BOOT_DRVINFO(test3) = {
|
||||
"""Test that a reg property with an invalid cell count is detected"""
|
||||
# Capture stderr since dtc will emit warnings for this file
|
||||
dtb_file = get_dtb_file('dtoc_test_bad_reg2.dts', capture_stderr=True)
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
with self.assertRaises(ValueError) as exc:
|
||||
self.run_test(['struct'], dtb_file, output)
|
||||
self.assertIn(
|
||||
@ -1458,7 +1458,7 @@ U_BOOT_DRVINFO(test3) = {
|
||||
def test_add_prop(self):
|
||||
"""Test that a subequent node can add a new property to a struct"""
|
||||
dtb_file = get_dtb_file('dtoc_test_add_prop.dts')
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
self.run_test(['struct'], dtb_file, output)
|
||||
with open(output) as infile:
|
||||
data = infile.read()
|
||||
@ -1523,9 +1523,9 @@ U_BOOT_DRVINFO(spl_test2) = {
|
||||
def test_multi_to_file(self):
|
||||
"""Test output of multiple pieces to a single file"""
|
||||
dtb_file = get_dtb_file('dtoc_test_simple.dts')
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
self.run_test(['all'], dtb_file, output)
|
||||
data = tools.ReadFile(output, binary=False)
|
||||
data = tools.read_file(output, binary=False)
|
||||
self._check_strings(
|
||||
self.decl_text + self.platdata_text + self.struct_text, data)
|
||||
|
||||
@ -1539,7 +1539,7 @@ U_BOOT_DRVINFO(spl_test2) = {
|
||||
def test_bad_command(self):
|
||||
"""Test running dtoc with an invalid command"""
|
||||
dtb_file = get_dtb_file('dtoc_test_simple.dts')
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
with self.assertRaises(ValueError) as exc:
|
||||
self.run_test(['invalid-cmd'], dtb_file, output)
|
||||
self.assertIn(
|
||||
@ -1557,12 +1557,12 @@ U_BOOT_DRVINFO(spl_test2) = {
|
||||
|
||||
def check_output_dirs(self, instantiate):
|
||||
# Remove the directory so that files from other tests are not there
|
||||
tools._RemoveOutputDir()
|
||||
tools.PrepareOutputDir(None)
|
||||
tools._remove_output_dir()
|
||||
tools.prepare_output_dir(None)
|
||||
|
||||
# This should create the .dts and .dtb in the output directory
|
||||
dtb_file = get_dtb_file('dtoc_test_simple.dts')
|
||||
outdir = tools.GetOutputDir()
|
||||
outdir = tools.get_output_dir()
|
||||
fnames = glob.glob(outdir + '/*')
|
||||
self.assertEqual(2, len(fnames))
|
||||
|
||||
@ -1606,7 +1606,7 @@ U_BOOT_DRVINFO(spl_test2) = {
|
||||
Scanner: scanner to use
|
||||
"""
|
||||
dtb_file = get_dtb_file('dtoc_test_simple.dts')
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
|
||||
# Take a copy before messing with it
|
||||
scan = copy_scan()
|
||||
@ -1694,7 +1694,7 @@ U_BOOT_DRVINFO(spl_test2) = {
|
||||
def test_alias_read(self):
|
||||
"""Test obtaining aliases"""
|
||||
dtb_file = get_dtb_file('dtoc_test_inst.dts')
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
plat = self.run_test(['struct'], dtb_file, output)
|
||||
|
||||
scan = plat._scan
|
||||
@ -1716,7 +1716,7 @@ U_BOOT_DRVINFO(spl_test2) = {
|
||||
def test_alias_read_bad(self):
|
||||
"""Test invalid alias property name"""
|
||||
dtb_file = get_dtb_file('dtoc_test_alias_bad.dts')
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
with self.assertRaises(ValueError) as exc:
|
||||
plat = self.run_test(['struct'], dtb_file, output)
|
||||
self.assertIn("Cannot decode alias 'i2c4-'", str(exc.exception))
|
||||
@ -1728,7 +1728,7 @@ U_BOOT_DRVINFO(spl_test2) = {
|
||||
# node (/does/not/exist)
|
||||
dtb_file = get_dtb_file('dtoc_test_alias_bad_path.dts', True)
|
||||
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
with self.assertRaises(ValueError) as exc:
|
||||
plat = self.run_test(['struct'], dtb_file, output)
|
||||
self.assertIn("Alias 'i2c4' path '/does/not/exist' not found",
|
||||
@ -1737,7 +1737,7 @@ U_BOOT_DRVINFO(spl_test2) = {
|
||||
def test_alias_read_bad_uclass(self):
|
||||
"""Test alias for a uclass that doesn't exist"""
|
||||
dtb_file = get_dtb_file('dtoc_test_alias_bad_uc.dts')
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
with test_util.capture_sys_output() as (stdout, _):
|
||||
plat = self.run_test(['struct'], dtb_file, output)
|
||||
self.assertEqual("Could not find uclass for alias 'other1'",
|
||||
@ -1746,7 +1746,7 @@ U_BOOT_DRVINFO(spl_test2) = {
|
||||
def test_sequence(self):
|
||||
"""Test assignment of sequence numnbers"""
|
||||
dtb_file = get_dtb_file('dtoc_test_inst.dts')
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
plat = self.run_test(['struct'], dtb_file, output)
|
||||
|
||||
scan = plat._scan
|
||||
@ -1762,7 +1762,7 @@ U_BOOT_DRVINFO(spl_test2) = {
|
||||
def test_process_root(self):
|
||||
"""Test assignment of sequence numnbers"""
|
||||
dtb_file = get_dtb_file('dtoc_test_simple.dts')
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
|
||||
# Take a copy before messing with it
|
||||
scan = copy_scan()
|
||||
@ -1781,7 +1781,7 @@ U_BOOT_DRVINFO(spl_test2) = {
|
||||
def test_simple_inst(self):
|
||||
"""Test output from some simple nodes with instantiate enabled"""
|
||||
dtb_file = get_dtb_file('dtoc_test_inst.dts')
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
|
||||
self.run_test(['decl'], dtb_file, output, True)
|
||||
with open(output) as infile:
|
||||
@ -1804,7 +1804,7 @@ U_BOOT_DRVINFO(spl_test2) = {
|
||||
def test_inst_no_hdr(self):
|
||||
"""Test dealing with a struct tsssshat has no header"""
|
||||
dtb_file = get_dtb_file('dtoc_test_inst.dts')
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
|
||||
# Run it once to set everything up
|
||||
plat = self.run_test(['decl'], dtb_file, output, True)
|
||||
@ -1824,7 +1824,7 @@ U_BOOT_DRVINFO(spl_test2) = {
|
||||
def test_missing_props(self):
|
||||
"""Test detection of a parent node with no properties"""
|
||||
dtb_file = get_dtb_file('dtoc_test_noprops.dts', capture_stderr=True)
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
with self.assertRaises(ValueError) as exc:
|
||||
self.run_test(['struct'], dtb_file, output)
|
||||
self.assertIn("Parent node '/i2c@0' has no properties - do you need",
|
||||
@ -1833,13 +1833,13 @@ U_BOOT_DRVINFO(spl_test2) = {
|
||||
def test_single_reg(self):
|
||||
"""Test detection of a parent node with no properties"""
|
||||
dtb_file = get_dtb_file('dtoc_test_single_reg.dts')
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
self.run_test(['struct'], dtb_file, output)
|
||||
|
||||
def test_missing_parent(self):
|
||||
"""Test detection of a parent node with no properties"""
|
||||
dtb_file = get_dtb_file('dtoc_test_noparent.dts', capture_stderr=True)
|
||||
output = tools.GetOutputFilename('output')
|
||||
output = tools.get_output_filename('output')
|
||||
with self.assertRaises(ValueError) as exc:
|
||||
self.run_test(['device'], dtb_file, output, instantiate=True)
|
||||
self.assertIn("Node '/i2c@0/spl-test/pmic@9' requires parent node "
|
||||
|
@ -74,11 +74,11 @@ class TestFdt(unittest.TestCase):
|
||||
"""
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
tools.PrepareOutputDir(None)
|
||||
tools.prepare_output_dir(None)
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
tools.FinaliseOutputDir()
|
||||
tools.finalise_output_dir()
|
||||
|
||||
def setUp(self):
|
||||
self.dtb = fdt.FdtScan(find_dtb_file('dtoc_test_simple.dts'))
|
||||
@ -152,11 +152,11 @@ class TestNode(unittest.TestCase):
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
tools.PrepareOutputDir(None)
|
||||
tools.prepare_output_dir(None)
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
tools.FinaliseOutputDir()
|
||||
tools.finalise_output_dir()
|
||||
|
||||
def setUp(self):
|
||||
self.dtb = fdt.FdtScan(find_dtb_file('dtoc_test_simple.dts'))
|
||||
@ -294,11 +294,11 @@ class TestProp(unittest.TestCase):
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
tools.PrepareOutputDir(None)
|
||||
tools.prepare_output_dir(None)
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
tools.FinaliseOutputDir()
|
||||
tools.finalise_output_dir()
|
||||
|
||||
def setUp(self):
|
||||
self.dtb = fdt.FdtScan(find_dtb_file('dtoc_test_simple.dts'))
|
||||
@ -370,7 +370,7 @@ class TestProp(unittest.TestCase):
|
||||
"""Tests the GetEmpty() function for the various supported types"""
|
||||
self.assertEqual(True, fdt.Prop.GetEmpty(Type.BOOL))
|
||||
self.assertEqual(chr(0), fdt.Prop.GetEmpty(Type.BYTE))
|
||||
self.assertEqual(tools.GetBytes(0, 4), fdt.Prop.GetEmpty(Type.INT))
|
||||
self.assertEqual(tools.get_bytes(0, 4), fdt.Prop.GetEmpty(Type.INT))
|
||||
self.assertEqual('', fdt.Prop.GetEmpty(Type.STRING))
|
||||
|
||||
def testGetOffset(self):
|
||||
@ -501,7 +501,7 @@ class TestProp(unittest.TestCase):
|
||||
self.node.AddString('string', val)
|
||||
self.dtb.Sync(auto_resize=True)
|
||||
data = self.fdt.getprop(self.node.Offset(), 'string')
|
||||
self.assertEqual(tools.ToBytes(val) + b'\0', data)
|
||||
self.assertEqual(tools.to_bytes(val) + b'\0', data)
|
||||
|
||||
self.fdt.pack()
|
||||
self.node.SetString('string', val + 'x')
|
||||
@ -511,24 +511,24 @@ class TestProp(unittest.TestCase):
|
||||
self.node.SetString('string', val[:-1])
|
||||
|
||||
prop = self.node.props['string']
|
||||
prop.SetData(tools.ToBytes(val))
|
||||
prop.SetData(tools.to_bytes(val))
|
||||
self.dtb.Sync(auto_resize=False)
|
||||
data = self.fdt.getprop(self.node.Offset(), 'string')
|
||||
self.assertEqual(tools.ToBytes(val), data)
|
||||
self.assertEqual(tools.to_bytes(val), data)
|
||||
|
||||
self.node.AddEmptyProp('empty', 5)
|
||||
self.dtb.Sync(auto_resize=True)
|
||||
prop = self.node.props['empty']
|
||||
prop.SetData(tools.ToBytes(val))
|
||||
prop.SetData(tools.to_bytes(val))
|
||||
self.dtb.Sync(auto_resize=False)
|
||||
data = self.fdt.getprop(self.node.Offset(), 'empty')
|
||||
self.assertEqual(tools.ToBytes(val), data)
|
||||
self.assertEqual(tools.to_bytes(val), data)
|
||||
|
||||
self.node.SetData('empty', b'123')
|
||||
self.assertEqual(b'123', prop.bytes)
|
||||
|
||||
# Trying adding a lot of data at once
|
||||
self.node.AddData('data', tools.GetBytes(65, 20000))
|
||||
self.node.AddData('data', tools.get_bytes(65, 20000))
|
||||
self.dtb.Sync(auto_resize=True)
|
||||
|
||||
def testFromData(self):
|
||||
@ -562,7 +562,7 @@ class TestProp(unittest.TestCase):
|
||||
|
||||
def testGetFilename(self):
|
||||
"""Test the dtb filename can be provided"""
|
||||
self.assertEqual(tools.GetOutputFilename('source.dtb'),
|
||||
self.assertEqual(tools.get_output_filename('source.dtb'),
|
||||
self.dtb.GetFilename())
|
||||
|
||||
|
||||
@ -575,11 +575,11 @@ class TestFdtUtil(unittest.TestCase):
|
||||
"""
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
tools.PrepareOutputDir(None)
|
||||
tools.prepare_output_dir(None)
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
tools.FinaliseOutputDir()
|
||||
tools.finalise_output_dir()
|
||||
|
||||
def setUp(self):
|
||||
self.dtb = fdt.FdtScan(find_dtb_file('dtoc_test_simple.dts'))
|
||||
@ -715,7 +715,7 @@ class TestFdtUtil(unittest.TestCase):
|
||||
|
||||
def RunTestCoverage():
|
||||
"""Run the tests and check that we get 100% coverage"""
|
||||
test_util.RunTestCoverage('tools/dtoc/test_fdt.py', None,
|
||||
test_util.run_test_coverage('tools/dtoc/test_fdt.py', None,
|
||||
['tools/patman/*.py', '*test_fdt.py'], options.build_dir)
|
||||
|
||||
|
||||
|
@ -43,11 +43,11 @@ class TestSrcScan(unittest.TestCase):
|
||||
"""Tests for src_scan"""
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
tools.PrepareOutputDir(None)
|
||||
tools.prepare_output_dir(None)
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
tools.FinaliseOutputDir()
|
||||
tools.finalise_output_dir()
|
||||
|
||||
def test_simple(self):
|
||||
"""Simple test of scanning drivers"""
|
||||
@ -113,7 +113,7 @@ class TestSrcScan(unittest.TestCase):
|
||||
pathname = os.path.join(indir, fname)
|
||||
dirname = os.path.dirname(pathname)
|
||||
os.makedirs(dirname, exist_ok=True)
|
||||
tools.WriteFile(pathname, '', binary=False)
|
||||
tools.write_file(pathname, '', binary=False)
|
||||
fname_list.append(pathname)
|
||||
|
||||
try:
|
||||
@ -142,7 +142,7 @@ class TestSrcScan(unittest.TestCase):
|
||||
def test_scan(self):
|
||||
"""Test scanning of a driver"""
|
||||
fname = os.path.join(OUR_PATH, '..', '..', 'drivers/i2c/tegra_i2c.c')
|
||||
buff = tools.ReadFile(fname, False)
|
||||
buff = tools.read_file(fname, False)
|
||||
scan = src_scan.Scanner(None, None)
|
||||
scan._parse_driver(fname, buff)
|
||||
self.assertIn('i2c_tegra', scan._drivers)
|
||||
@ -374,8 +374,8 @@ struct another_struct {
|
||||
|
||||
def test_struct_scan_errors(self):
|
||||
"""Test scanning a header file with an invalid unicode file"""
|
||||
output = tools.GetOutputFilename('output.h')
|
||||
tools.WriteFile(output, b'struct this is a test \x81 of bad unicode')
|
||||
output = tools.get_output_filename('output.h')
|
||||
tools.write_file(output, b'struct this is a test \x81 of bad unicode')
|
||||
|
||||
scan = src_scan.Scanner(None, None)
|
||||
with test_util.capture_sys_output() as (stdout, _):
|
||||
|
@ -20,8 +20,8 @@ RE_FILE = re.compile(r'#(\d+): (FILE: ([^:]*):(\d+):)?')
|
||||
RE_NOTE = re.compile(r'NOTE: (.*)')
|
||||
|
||||
|
||||
def FindCheckPatch():
|
||||
top_level = gitutil.GetTopLevel()
|
||||
def find_check_patch():
|
||||
top_level = gitutil.get_top_level()
|
||||
try_list = [
|
||||
os.getcwd(),
|
||||
os.path.join(os.getcwd(), '..', '..'),
|
||||
@ -47,7 +47,7 @@ def FindCheckPatch():
|
||||
'~/bin directory or use --no-check')
|
||||
|
||||
|
||||
def CheckPatchParseOneMessage(message):
|
||||
def check_patch_parse_one_message(message):
|
||||
"""Parse one checkpatch message
|
||||
|
||||
Args:
|
||||
@ -114,7 +114,7 @@ def CheckPatchParseOneMessage(message):
|
||||
return item
|
||||
|
||||
|
||||
def CheckPatchParse(checkpatch_output, verbose=False):
|
||||
def check_patch_parse(checkpatch_output, verbose=False):
|
||||
"""Parse checkpatch.pl output
|
||||
|
||||
Args:
|
||||
@ -179,14 +179,14 @@ def CheckPatchParse(checkpatch_output, verbose=False):
|
||||
elif re_bad.match(message):
|
||||
result.ok = False
|
||||
else:
|
||||
problem = CheckPatchParseOneMessage(message)
|
||||
problem = check_patch_parse_one_message(message)
|
||||
if problem:
|
||||
result.problems.append(problem)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def CheckPatch(fname, verbose=False, show_types=False):
|
||||
def check_patch(fname, verbose=False, show_types=False):
|
||||
"""Run checkpatch.pl on a file and parse the results.
|
||||
|
||||
Args:
|
||||
@ -209,16 +209,16 @@ def CheckPatch(fname, verbose=False, show_types=False):
|
||||
lines: Number of lines
|
||||
stdout: Full output of checkpatch
|
||||
"""
|
||||
chk = FindCheckPatch()
|
||||
chk = find_check_patch()
|
||||
args = [chk, '--no-tree']
|
||||
if show_types:
|
||||
args.append('--show-types')
|
||||
output = command.Output(*args, fname, raise_on_error=False)
|
||||
output = command.output(*args, fname, raise_on_error=False)
|
||||
|
||||
return CheckPatchParse(output, verbose)
|
||||
return check_patch_parse(output, verbose)
|
||||
|
||||
|
||||
def GetWarningMsg(col, msg_type, fname, line, msg):
|
||||
def get_warning_msg(col, msg_type, fname, line, msg):
|
||||
'''Create a message for a given file/line
|
||||
|
||||
Args:
|
||||
@ -228,33 +228,33 @@ def GetWarningMsg(col, msg_type, fname, line, msg):
|
||||
msg: Message to report
|
||||
'''
|
||||
if msg_type == 'warning':
|
||||
msg_type = col.Color(col.YELLOW, msg_type)
|
||||
msg_type = col.build(col.YELLOW, msg_type)
|
||||
elif msg_type == 'error':
|
||||
msg_type = col.Color(col.RED, msg_type)
|
||||
msg_type = col.build(col.RED, msg_type)
|
||||
elif msg_type == 'check':
|
||||
msg_type = col.Color(col.MAGENTA, msg_type)
|
||||
msg_type = col.build(col.MAGENTA, msg_type)
|
||||
line_str = '' if line is None else '%d' % line
|
||||
return '%s:%s: %s: %s\n' % (fname, line_str, msg_type, msg)
|
||||
|
||||
def CheckPatches(verbose, args):
|
||||
def check_patches(verbose, args):
|
||||
'''Run the checkpatch.pl script on each patch'''
|
||||
error_count, warning_count, check_count = 0, 0, 0
|
||||
col = terminal.Color()
|
||||
|
||||
for fname in args:
|
||||
result = CheckPatch(fname, verbose)
|
||||
result = check_patch(fname, verbose)
|
||||
if not result.ok:
|
||||
error_count += result.errors
|
||||
warning_count += result.warnings
|
||||
check_count += result.checks
|
||||
print('%d errors, %d warnings, %d checks for %s:' % (result.errors,
|
||||
result.warnings, result.checks, col.Color(col.BLUE, fname)))
|
||||
result.warnings, result.checks, col.build(col.BLUE, fname)))
|
||||
if (len(result.problems) != result.errors + result.warnings +
|
||||
result.checks):
|
||||
print("Internal error: some problems lost")
|
||||
for item in result.problems:
|
||||
sys.stderr.write(
|
||||
GetWarningMsg(col, item.get('type', '<unknown>'),
|
||||
get_warning_msg(col, item.get('type', '<unknown>'),
|
||||
item.get('file', '<unknown>'),
|
||||
item.get('line', 0), item.get('msg', 'message')))
|
||||
print
|
||||
@ -266,6 +266,6 @@ def CheckPatches(verbose, args):
|
||||
color = col.YELLOW
|
||||
if error_count:
|
||||
color = col.RED
|
||||
print(col.Color(color, str % (error_count, warning_count, check_count)))
|
||||
print(col.build(color, str % (error_count, warning_count, check_count)))
|
||||
return False
|
||||
return True
|
||||
|
@ -32,7 +32,7 @@ class CommandResult:
|
||||
self.return_code = return_code
|
||||
self.exception = exception
|
||||
|
||||
def ToOutput(self, binary):
|
||||
def to_output(self, binary):
|
||||
if not binary:
|
||||
self.stdout = self.stdout.decode('utf-8')
|
||||
self.stderr = self.stderr.decode('utf-8')
|
||||
@ -43,11 +43,11 @@ class CommandResult:
|
||||
# This permits interception of RunPipe for test purposes. If it is set to
|
||||
# a function, then that function is called with the pipe list being
|
||||
# executed. Otherwise, it is assumed to be a CommandResult object, and is
|
||||
# returned as the result for every RunPipe() call.
|
||||
# returned as the result for every run_pipe() call.
|
||||
# When this value is None, commands are executed as normal.
|
||||
test_result = None
|
||||
|
||||
def RunPipe(pipe_list, infile=None, outfile=None,
|
||||
def run_pipe(pipe_list, infile=None, outfile=None,
|
||||
capture=False, capture_stderr=False, oneline=False,
|
||||
raise_on_error=True, cwd=None, binary=False,
|
||||
output_func=None, **kwargs):
|
||||
@ -104,11 +104,11 @@ def RunPipe(pipe_list, infile=None, outfile=None,
|
||||
if raise_on_error:
|
||||
raise Exception("Error running '%s': %s" % (user_pipestr, str))
|
||||
result.return_code = 255
|
||||
return result.ToOutput(binary)
|
||||
return result.to_output(binary)
|
||||
|
||||
if capture:
|
||||
result.stdout, result.stderr, result.combined = (
|
||||
last_pipe.CommunicateFilter(output_func))
|
||||
last_pipe.communicate_filter(output_func))
|
||||
if result.stdout and oneline:
|
||||
result.output = result.stdout.rstrip(b'\r\n')
|
||||
result.return_code = last_pipe.wait()
|
||||
@ -116,13 +116,13 @@ def RunPipe(pipe_list, infile=None, outfile=None,
|
||||
result.return_code = os.waitpid(last_pipe.pid, 0)[1]
|
||||
if raise_on_error and result.return_code:
|
||||
raise Exception("Error running '%s'" % user_pipestr)
|
||||
return result.ToOutput(binary)
|
||||
return result.to_output(binary)
|
||||
|
||||
def Output(*cmd, **kwargs):
|
||||
def output(*cmd, **kwargs):
|
||||
kwargs['raise_on_error'] = kwargs.get('raise_on_error', True)
|
||||
return RunPipe([cmd], capture=True, **kwargs).stdout
|
||||
return run_pipe([cmd], capture=True, **kwargs).stdout
|
||||
|
||||
def OutputOneLine(*cmd, **kwargs):
|
||||
def output_one_line(*cmd, **kwargs):
|
||||
"""Run a command and output it as a single-line string
|
||||
|
||||
The command us expected to produce a single line of output
|
||||
@ -131,15 +131,15 @@ def OutputOneLine(*cmd, **kwargs):
|
||||
String containing output of command
|
||||
"""
|
||||
raise_on_error = kwargs.pop('raise_on_error', True)
|
||||
result = RunPipe([cmd], capture=True, oneline=True,
|
||||
result = run_pipe([cmd], capture=True, oneline=True,
|
||||
raise_on_error=raise_on_error, **kwargs).stdout.strip()
|
||||
return result
|
||||
|
||||
def Run(*cmd, **kwargs):
|
||||
return RunPipe([cmd], **kwargs).stdout
|
||||
def run(*cmd, **kwargs):
|
||||
return run_pipe([cmd], **kwargs).stdout
|
||||
|
||||
def RunList(cmd):
|
||||
return RunPipe([cmd], capture=True).stdout
|
||||
def run_list(cmd):
|
||||
return run_pipe([cmd], capture=True).stdout
|
||||
|
||||
def StopAll():
|
||||
def stop_all():
|
||||
cros_subprocess.stay_alive = False
|
||||
|
@ -44,7 +44,7 @@ class Commit:
|
||||
def __str__(self):
|
||||
return self.subject
|
||||
|
||||
def AddChange(self, version, info):
|
||||
def add_change(self, version, info):
|
||||
"""Add a new change line to the change list for a version.
|
||||
|
||||
Args:
|
||||
@ -55,7 +55,7 @@ class Commit:
|
||||
self.changes[version] = []
|
||||
self.changes[version].append(info)
|
||||
|
||||
def CheckTags(self):
|
||||
def check_tags(self):
|
||||
"""Create a list of subject tags in the commit
|
||||
|
||||
Subject tags look like this:
|
||||
@ -78,7 +78,7 @@ class Commit:
|
||||
str = m.group(2)
|
||||
return None
|
||||
|
||||
def AddCc(self, cc_list):
|
||||
def add_cc(self, cc_list):
|
||||
"""Add a list of people to Cc when we send this patch.
|
||||
|
||||
Args:
|
||||
@ -86,7 +86,7 @@ class Commit:
|
||||
"""
|
||||
self.cc_list += cc_list
|
||||
|
||||
def CheckDuplicateSignoff(self, signoff):
|
||||
def check_duplicate_signoff(self, signoff):
|
||||
"""Check a list of signoffs we have send for this patch
|
||||
|
||||
Args:
|
||||
@ -99,7 +99,7 @@ class Commit:
|
||||
self.signoff_set.add(signoff)
|
||||
return True
|
||||
|
||||
def AddRtag(self, rtag_type, who):
|
||||
def add_rtag(self, rtag_type, who):
|
||||
"""Add a response tag to a commit
|
||||
|
||||
Args:
|
||||
|
@ -18,7 +18,7 @@ from patman import terminal
|
||||
|
||||
def setup():
|
||||
"""Do required setup before doing anything"""
|
||||
gitutil.Setup()
|
||||
gitutil.setup()
|
||||
|
||||
def prepare_patches(col, branch, count, start, end, ignore_binary, signoff):
|
||||
"""Figure out what patches to generate, then generate them
|
||||
@ -45,17 +45,17 @@ def prepare_patches(col, branch, count, start, end, ignore_binary, signoff):
|
||||
"""
|
||||
if count == -1:
|
||||
# Work out how many patches to send if we can
|
||||
count = (gitutil.CountCommitsToBranch(branch) - start)
|
||||
count = (gitutil.count_commits_to_branch(branch) - start)
|
||||
|
||||
if not count:
|
||||
str = 'No commits found to process - please use -c flag, or run:\n' \
|
||||
' git branch --set-upstream-to remote/branch'
|
||||
sys.exit(col.Color(col.RED, str))
|
||||
sys.exit(col.build(col.RED, str))
|
||||
|
||||
# Read the metadata from the commits
|
||||
to_do = count - end
|
||||
series = patchstream.get_metadata(branch, start, to_do)
|
||||
cover_fname, patch_files = gitutil.CreatePatches(
|
||||
cover_fname, patch_files = gitutil.create_patches(
|
||||
branch, start, to_do, ignore_binary, series, signoff)
|
||||
|
||||
# Fix up the patch files to our liking, and insert the cover letter
|
||||
@ -86,7 +86,7 @@ def check_patches(series, patch_files, run_checkpatch, verbose):
|
||||
|
||||
# Check the patches, and run them through 'git am' just to be sure
|
||||
if run_checkpatch:
|
||||
ok = checkpatch.CheckPatches(verbose, patch_files)
|
||||
ok = checkpatch.check_patches(verbose, patch_files)
|
||||
else:
|
||||
ok = True
|
||||
return ok
|
||||
@ -138,18 +138,18 @@ def email_patches(col, series, cover_fname, patch_files, process_tags, its_a_go,
|
||||
# Email the patches out (giving the user time to check / cancel)
|
||||
cmd = ''
|
||||
if its_a_go:
|
||||
cmd = gitutil.EmailPatches(
|
||||
cmd = gitutil.email_patches(
|
||||
series, cover_fname, patch_files, dry_run, not ignore_bad_tags,
|
||||
cc_file, in_reply_to=in_reply_to, thread=thread,
|
||||
smtp_server=smtp_server)
|
||||
else:
|
||||
print(col.Color(col.RED, "Not sending emails due to errors/warnings"))
|
||||
print(col.build(col.RED, "Not sending emails due to errors/warnings"))
|
||||
|
||||
# For a dry run, just show our actions as a sanity check
|
||||
if dry_run:
|
||||
series.ShowActions(patch_files, cmd, process_tags)
|
||||
if not its_a_go:
|
||||
print(col.Color(col.RED, "Email would not be sent"))
|
||||
print(col.build(col.RED, "Email would not be sent"))
|
||||
|
||||
os.remove(cc_file)
|
||||
|
||||
@ -167,7 +167,7 @@ def send(args):
|
||||
ok = check_patches(series, patch_files, args.check_patch,
|
||||
args.verbose)
|
||||
|
||||
ok = ok and gitutil.CheckSuppressCCConfig()
|
||||
ok = ok and gitutil.check_suppress_cc_config()
|
||||
|
||||
its_a_go = ok or args.ignore_errors
|
||||
email_patches(
|
||||
@ -204,7 +204,7 @@ def patchwork_status(branch, count, start, end, dest_branch, force,
|
||||
"""
|
||||
if count == -1:
|
||||
# Work out how many patches to send if we can
|
||||
count = (gitutil.CountCommitsToBranch(branch) - start)
|
||||
count = (gitutil.count_commits_to_branch(branch) - start)
|
||||
|
||||
series = patchstream.get_metadata(branch, start, count - end)
|
||||
warnings = 0
|
||||
|
@ -49,7 +49,7 @@ class Popen(subprocess.Popen):
|
||||
to us as soon as it is produced, rather than waiting for the end of a
|
||||
line.
|
||||
|
||||
Use CommunicateFilter() to handle output from the subprocess.
|
||||
Use communicate_filter() to handle output from the subprocess.
|
||||
|
||||
"""
|
||||
|
||||
@ -100,7 +100,7 @@ class Popen(subprocess.Popen):
|
||||
if kwargs:
|
||||
raise ValueError("Unit tests do not test extra args - please add tests")
|
||||
|
||||
def ConvertData(self, data):
|
||||
def convert_data(self, data):
|
||||
"""Convert stdout/stderr data to the correct format for output
|
||||
|
||||
Args:
|
||||
@ -113,7 +113,7 @@ class Popen(subprocess.Popen):
|
||||
return b''
|
||||
return data
|
||||
|
||||
def CommunicateFilter(self, output):
|
||||
def communicate_filter(self, output):
|
||||
"""Interact with process: Read data from stdout and stderr.
|
||||
|
||||
This method runs until end-of-file is reached, then waits for the
|
||||
@ -122,7 +122,7 @@ class Popen(subprocess.Popen):
|
||||
The output function is sent all output from the subprocess and must be
|
||||
defined like this:
|
||||
|
||||
def Output([self,] stream, data)
|
||||
def output([self,] stream, data)
|
||||
Args:
|
||||
stream: the stream the output was received on, which will be
|
||||
sys.stdout or sys.stderr.
|
||||
@ -236,9 +236,9 @@ class Popen(subprocess.Popen):
|
||||
self.terminate()
|
||||
|
||||
# All data exchanged. Translate lists into strings.
|
||||
stdout = self.ConvertData(stdout)
|
||||
stderr = self.ConvertData(stderr)
|
||||
combined = self.ConvertData(combined)
|
||||
stdout = self.convert_data(stdout)
|
||||
stderr = self.convert_data(stderr)
|
||||
combined = self.convert_data(combined)
|
||||
|
||||
# Translate newlines, if requested. We cannot let the file
|
||||
# object do the translation: It is based on stdio, which is
|
||||
@ -281,7 +281,7 @@ class TestSubprocess(unittest.TestCase):
|
||||
self.stdin_read_pipe = pipe[0]
|
||||
self._stdin_write_pipe = os.fdopen(pipe[1], 'w')
|
||||
|
||||
def Output(self, stream, data):
|
||||
def output(self, stream, data):
|
||||
"""Output handler for Popen. Stores the data for later comparison"""
|
||||
if stream == sys.stdout:
|
||||
self.stdout_data += data
|
||||
@ -294,7 +294,7 @@ class TestSubprocess(unittest.TestCase):
|
||||
self._stdin_write_pipe.write(self._input_to_send + '\r\n')
|
||||
self._stdin_write_pipe.flush()
|
||||
|
||||
def _BasicCheck(self, plist, oper):
|
||||
def _basic_check(self, plist, oper):
|
||||
"""Basic checks that the output looks sane."""
|
||||
self.assertEqual(plist[0], oper.stdout_data)
|
||||
self.assertEqual(plist[1], oper.stderr_data)
|
||||
@ -306,15 +306,15 @@ class TestSubprocess(unittest.TestCase):
|
||||
def test_simple(self):
|
||||
"""Simple redirection: Get process list"""
|
||||
oper = TestSubprocess.MyOperation()
|
||||
plist = Popen(['ps']).CommunicateFilter(oper.Output)
|
||||
self._BasicCheck(plist, oper)
|
||||
plist = Popen(['ps']).communicate_filter(oper.output)
|
||||
self._basic_check(plist, oper)
|
||||
|
||||
def test_stderr(self):
|
||||
"""Check stdout and stderr"""
|
||||
oper = TestSubprocess.MyOperation()
|
||||
cmd = 'echo fred >/dev/stderr && false || echo bad'
|
||||
plist = Popen([cmd], shell=True).CommunicateFilter(oper.Output)
|
||||
self._BasicCheck(plist, oper)
|
||||
plist = Popen([cmd], shell=True).communicate_filter(oper.output)
|
||||
self._basic_check(plist, oper)
|
||||
self.assertEqual(plist [0], 'bad\r\n')
|
||||
self.assertEqual(plist [1], 'fred\r\n')
|
||||
|
||||
@ -323,8 +323,8 @@ class TestSubprocess(unittest.TestCase):
|
||||
oper = TestSubprocess.MyOperation()
|
||||
cmd = 'echo test >/dev/stderr'
|
||||
self.assertRaises(OSError, Popen, [cmd], shell=False)
|
||||
plist = Popen([cmd], shell=True).CommunicateFilter(oper.Output)
|
||||
self._BasicCheck(plist, oper)
|
||||
plist = Popen([cmd], shell=True).communicate_filter(oper.output)
|
||||
self._basic_check(plist, oper)
|
||||
self.assertEqual(len(plist [0]), 0)
|
||||
self.assertEqual(plist [1], 'test\r\n')
|
||||
|
||||
@ -332,8 +332,8 @@ class TestSubprocess(unittest.TestCase):
|
||||
"""Check with and without shell works using list arguments"""
|
||||
oper = TestSubprocess.MyOperation()
|
||||
cmd = ['echo', 'test', '>/dev/stderr']
|
||||
plist = Popen(cmd, shell=False).CommunicateFilter(oper.Output)
|
||||
self._BasicCheck(plist, oper)
|
||||
plist = Popen(cmd, shell=False).communicate_filter(oper.output)
|
||||
self._basic_check(plist, oper)
|
||||
self.assertEqual(plist [0], ' '.join(cmd[1:]) + '\r\n')
|
||||
self.assertEqual(len(plist [1]), 0)
|
||||
|
||||
@ -341,16 +341,17 @@ class TestSubprocess(unittest.TestCase):
|
||||
|
||||
# this should be interpreted as 'echo' with the other args dropped
|
||||
cmd = ['echo', 'test', '>/dev/stderr']
|
||||
plist = Popen(cmd, shell=True).CommunicateFilter(oper.Output)
|
||||
self._BasicCheck(plist, oper)
|
||||
plist = Popen(cmd, shell=True).communicate_filter(oper.output)
|
||||
self._basic_check(plist, oper)
|
||||
self.assertEqual(plist [0], '\r\n')
|
||||
|
||||
def test_cwd(self):
|
||||
"""Check we can change directory"""
|
||||
for shell in (False, True):
|
||||
oper = TestSubprocess.MyOperation()
|
||||
plist = Popen('pwd', shell=shell, cwd='/tmp').CommunicateFilter(oper.Output)
|
||||
self._BasicCheck(plist, oper)
|
||||
plist = Popen('pwd', shell=shell, cwd='/tmp').communicate_filter(
|
||||
oper.output)
|
||||
self._basic_check(plist, oper)
|
||||
self.assertEqual(plist [0], '/tmp\r\n')
|
||||
|
||||
def test_env(self):
|
||||
@ -361,8 +362,8 @@ class TestSubprocess(unittest.TestCase):
|
||||
if add:
|
||||
env ['FRED'] = 'fred'
|
||||
cmd = 'echo $FRED'
|
||||
plist = Popen(cmd, shell=True, env=env).CommunicateFilter(oper.Output)
|
||||
self._BasicCheck(plist, oper)
|
||||
plist = Popen(cmd, shell=True, env=env).communicate_filter(oper.output)
|
||||
self._basic_check(plist, oper)
|
||||
self.assertEqual(plist [0], add and 'fred\r\n' or '\r\n')
|
||||
|
||||
def test_extra_args(self):
|
||||
@ -380,8 +381,8 @@ class TestSubprocess(unittest.TestCase):
|
||||
prompt = 'What is your name?: '
|
||||
cmd = 'echo -n "%s"; read name; echo Hello $name' % prompt
|
||||
plist = Popen([cmd], stdin=oper.stdin_read_pipe,
|
||||
shell=True).CommunicateFilter(oper.Output)
|
||||
self._BasicCheck(plist, oper)
|
||||
shell=True).communicate_filter(oper.output)
|
||||
self._basic_check(plist, oper)
|
||||
self.assertEqual(len(plist [1]), 0)
|
||||
self.assertEqual(plist [0], prompt + 'Hello Flash\r\r\n')
|
||||
|
||||
@ -393,16 +394,16 @@ class TestSubprocess(unittest.TestCase):
|
||||
both_cmds = ''
|
||||
for fd in (1, 2):
|
||||
both_cmds += cmd % (fd, fd, fd, fd, fd)
|
||||
plist = Popen(both_cmds, shell=True).CommunicateFilter(oper.Output)
|
||||
self._BasicCheck(plist, oper)
|
||||
plist = Popen(both_cmds, shell=True).communicate_filter(oper.output)
|
||||
self._basic_check(plist, oper)
|
||||
self.assertEqual(plist [0], 'terminal 1\r\n')
|
||||
self.assertEqual(plist [1], 'terminal 2\r\n')
|
||||
|
||||
# Now try with PIPE and make sure it is not a terminal
|
||||
oper = TestSubprocess.MyOperation()
|
||||
plist = Popen(both_cmds, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
|
||||
shell=True).CommunicateFilter(oper.Output)
|
||||
self._BasicCheck(plist, oper)
|
||||
shell=True).communicate_filter(oper.output)
|
||||
self._basic_check(plist, oper)
|
||||
self.assertEqual(plist [0], 'not 1\n')
|
||||
self.assertEqual(plist [1], 'not 2\n')
|
||||
|
||||
|
@ -45,7 +45,7 @@ class TestFunctional(unittest.TestCase):
|
||||
|
||||
def tearDown(self):
|
||||
shutil.rmtree(self.tmpdir)
|
||||
terminal.SetPrintTestMode(False)
|
||||
terminal.set_print_test_mode(False)
|
||||
|
||||
@staticmethod
|
||||
def _get_path(fname):
|
||||
@ -114,7 +114,7 @@ class TestFunctional(unittest.TestCase):
|
||||
|
||||
return cover_fname, fname_list
|
||||
|
||||
def testBasic(self):
|
||||
def test_basic(self):
|
||||
"""Tests the basic flow of patman
|
||||
|
||||
This creates a series from some hard-coded patches build from a simple
|
||||
@ -208,7 +208,7 @@ class TestFunctional(unittest.TestCase):
|
||||
cc_file = series.MakeCcFile(process_tags, cover_fname,
|
||||
not ignore_bad_tags, add_maintainers,
|
||||
None)
|
||||
cmd = gitutil.EmailPatches(
|
||||
cmd = gitutil.email_patches(
|
||||
series, cover_fname, args, dry_run, not ignore_bad_tags,
|
||||
cc_file, in_reply_to=in_reply_to, thread=None)
|
||||
series.ShowActions(args, cmd, process_tags)
|
||||
@ -338,7 +338,7 @@ Changes in v2:
|
||||
text (str): Text to put into the file
|
||||
"""
|
||||
path = os.path.join(self.gitdir, fname)
|
||||
tools.WriteFile(path, text, binary=False)
|
||||
tools.write_file(path, text, binary=False)
|
||||
index = self.repo.index
|
||||
index.add(fname)
|
||||
author = pygit2.Signature('Test user', 'test@email.com')
|
||||
@ -455,7 +455,7 @@ complicated as possible''')
|
||||
repo.branches.local.create('base', base_target)
|
||||
return repo
|
||||
|
||||
def testBranch(self):
|
||||
def test_branch(self):
|
||||
"""Test creating patches from a branch"""
|
||||
repo = self.make_git_tree()
|
||||
target = repo.lookup_reference('refs/heads/first')
|
||||
@ -466,7 +466,7 @@ complicated as possible''')
|
||||
os.chdir(self.gitdir)
|
||||
|
||||
# Check that it can detect the current branch
|
||||
self.assertEqual(2, gitutil.CountCommitsToBranch(None))
|
||||
self.assertEqual(2, gitutil.count_commits_to_branch(None))
|
||||
col = terminal.Color()
|
||||
with capture_sys_output() as _:
|
||||
_, cover_fname, patch_files = control.prepare_patches(
|
||||
@ -476,7 +476,7 @@ complicated as possible''')
|
||||
self.assertEqual(2, len(patch_files))
|
||||
|
||||
# Check that it can detect a different branch
|
||||
self.assertEqual(3, gitutil.CountCommitsToBranch('second'))
|
||||
self.assertEqual(3, gitutil.count_commits_to_branch('second'))
|
||||
with capture_sys_output() as _:
|
||||
_, cover_fname, patch_files = control.prepare_patches(
|
||||
col, branch='second', count=-1, start=0, end=0,
|
||||
@ -494,7 +494,7 @@ complicated as possible''')
|
||||
finally:
|
||||
os.chdir(orig_dir)
|
||||
|
||||
def testTags(self):
|
||||
def test_tags(self):
|
||||
"""Test collection of tags in a patchstream"""
|
||||
text = '''This is a patch
|
||||
|
||||
@ -508,7 +508,7 @@ Tested-by: %s
|
||||
'Reviewed-by': {self.joe, self.mary},
|
||||
'Tested-by': {self.leb}})
|
||||
|
||||
def testInvalidTag(self):
|
||||
def test_invalid_tag(self):
|
||||
"""Test invalid tag in a patchstream"""
|
||||
text = '''This is a patch
|
||||
|
||||
@ -519,7 +519,7 @@ Serie-version: 2
|
||||
self.assertEqual("Line 3: Invalid tag = 'Serie-version: 2'",
|
||||
str(exc.exception))
|
||||
|
||||
def testMissingEnd(self):
|
||||
def test_missing_end(self):
|
||||
"""Test a missing END tag"""
|
||||
text = '''This is a patch
|
||||
|
||||
@ -532,7 +532,7 @@ Signed-off-by: Fred
|
||||
self.assertEqual(["Missing 'END' in section 'cover'"],
|
||||
pstrm.commit.warn)
|
||||
|
||||
def testMissingBlankLine(self):
|
||||
def test_missing_blank_line(self):
|
||||
"""Test a missing blank line after a tag"""
|
||||
text = '''This is a patch
|
||||
|
||||
@ -545,7 +545,7 @@ Signed-off-by: Fred
|
||||
self.assertEqual(["Missing 'blank line' in section 'Series-changes'"],
|
||||
pstrm.commit.warn)
|
||||
|
||||
def testInvalidCommitTag(self):
|
||||
def test_invalid_commit_tag(self):
|
||||
"""Test an invalid Commit-xxx tag"""
|
||||
text = '''This is a patch
|
||||
|
||||
@ -554,7 +554,7 @@ Commit-fred: testing
|
||||
pstrm = PatchStream.process_text(text)
|
||||
self.assertEqual(["Line 3: Ignoring Commit-fred"], pstrm.commit.warn)
|
||||
|
||||
def testSelfTest(self):
|
||||
def test_self_test(self):
|
||||
"""Test a tested by tag by this user"""
|
||||
test_line = 'Tested-by: %s@napier.com' % os.getenv('USER')
|
||||
text = '''This is a patch
|
||||
@ -564,7 +564,7 @@ Commit-fred: testing
|
||||
pstrm = PatchStream.process_text(text)
|
||||
self.assertEqual(["Ignoring '%s'" % test_line], pstrm.commit.warn)
|
||||
|
||||
def testSpaceBeforeTab(self):
|
||||
def test_space_before_tab(self):
|
||||
"""Test a space before a tab"""
|
||||
text = '''This is a patch
|
||||
|
||||
@ -573,7 +573,7 @@ Commit-fred: testing
|
||||
pstrm = PatchStream.process_text(text)
|
||||
self.assertEqual(["Line 3/0 has space before tab"], pstrm.commit.warn)
|
||||
|
||||
def testLinesAfterTest(self):
|
||||
def test_lines_after_test(self):
|
||||
"""Test detecting lines after TEST= line"""
|
||||
text = '''This is a patch
|
||||
|
||||
@ -584,7 +584,7 @@ here
|
||||
pstrm = PatchStream.process_text(text)
|
||||
self.assertEqual(["Found 2 lines after TEST="], pstrm.commit.warn)
|
||||
|
||||
def testBlankLineAtEnd(self):
|
||||
def test_blank_line_at_end(self):
|
||||
"""Test detecting a blank line at the end of a file"""
|
||||
text = '''This is a patch
|
||||
|
||||
@ -611,7 +611,7 @@ diff --git a/lib/efi_loader/efi_memory.c b/lib/efi_loader/efi_memory.c
|
||||
["Found possible blank line(s) at end of file 'lib/fdtdec.c'"],
|
||||
pstrm.commit.warn)
|
||||
|
||||
def testNoUpstream(self):
|
||||
def test_no_upstream(self):
|
||||
"""Test CountCommitsToBranch when there is no upstream"""
|
||||
repo = self.make_git_tree()
|
||||
target = repo.lookup_reference('refs/heads/base')
|
||||
@ -622,7 +622,7 @@ diff --git a/lib/efi_loader/efi_memory.c b/lib/efi_loader/efi_memory.c
|
||||
orig_dir = os.getcwd()
|
||||
os.chdir(self.gitdir)
|
||||
with self.assertRaises(ValueError) as exc:
|
||||
gitutil.CountCommitsToBranch(None)
|
||||
gitutil.count_commits_to_branch(None)
|
||||
self.assertIn(
|
||||
"Failed to determine upstream: fatal: no upstream configured for branch 'base'",
|
||||
str(exc.exception))
|
||||
@ -648,7 +648,7 @@ diff --git a/lib/efi_loader/efi_memory.c b/lib/efi_loader/efi_memory.c
|
||||
{'id': '1', 'name': 'Some patch'}]}
|
||||
raise ValueError('Fake Patchwork does not understand: %s' % subpath)
|
||||
|
||||
def testStatusMismatch(self):
|
||||
def test_status_mismatch(self):
|
||||
"""Test Patchwork patches not matching the series"""
|
||||
series = Series()
|
||||
|
||||
@ -657,7 +657,7 @@ diff --git a/lib/efi_loader/efi_memory.c b/lib/efi_loader/efi_memory.c
|
||||
self.assertIn('Warning: Patchwork reports 1 patches, series has 0',
|
||||
err.getvalue())
|
||||
|
||||
def testStatusReadPatch(self):
|
||||
def test_status_read_patch(self):
|
||||
"""Test handling a single patch in Patchwork"""
|
||||
series = Series()
|
||||
series.commits = [Commit('abcd')]
|
||||
@ -669,7 +669,7 @@ diff --git a/lib/efi_loader/efi_memory.c b/lib/efi_loader/efi_memory.c
|
||||
self.assertEqual('1', patch.id)
|
||||
self.assertEqual('Some patch', patch.raw_subject)
|
||||
|
||||
def testParseSubject(self):
|
||||
def test_parse_subject(self):
|
||||
"""Test parsing of the patch subject"""
|
||||
patch = status.Patch('1')
|
||||
|
||||
@ -731,7 +731,7 @@ diff --git a/lib/efi_loader/efi_memory.c b/lib/efi_loader/efi_memory.c
|
||||
self.assertEqual('RESEND', patch.prefix)
|
||||
self.assertEqual(None, patch.version)
|
||||
|
||||
def testCompareSeries(self):
|
||||
def test_compare_series(self):
|
||||
"""Test operation of compare_with_series()"""
|
||||
commit1 = Commit('abcd')
|
||||
commit1.subject = 'Subject 1'
|
||||
@ -833,7 +833,7 @@ diff --git a/lib/efi_loader/efi_memory.c b/lib/efi_loader/efi_memory.c
|
||||
return patch.comments
|
||||
raise ValueError('Fake Patchwork does not understand: %s' % subpath)
|
||||
|
||||
def testFindNewResponses(self):
|
||||
def test_find_new_responses(self):
|
||||
"""Test operation of find_new_responses()"""
|
||||
commit1 = Commit('abcd')
|
||||
commit1.subject = 'Subject 1'
|
||||
@ -907,10 +907,10 @@ diff --git a/lib/efi_loader/efi_memory.c b/lib/efi_loader/efi_memory.c
|
||||
|
||||
series = Series()
|
||||
series.commits = [commit1, commit2]
|
||||
terminal.SetPrintTestMode()
|
||||
terminal.set_print_test_mode()
|
||||
status.check_patchwork_status(series, '1234', None, None, False, False,
|
||||
None, self._fake_patchwork2)
|
||||
lines = iter(terminal.GetPrintTestLines())
|
||||
lines = iter(terminal.get_print_test_lines())
|
||||
col = terminal.Color()
|
||||
self.assertEqual(terminal.PrintLine(' 1 Subject 1', col.BLUE),
|
||||
next(lines))
|
||||
@ -971,7 +971,7 @@ diff --git a/lib/efi_loader/efi_memory.c b/lib/efi_loader/efi_memory.c
|
||||
return patch.comments
|
||||
raise ValueError('Fake Patchwork does not understand: %s' % subpath)
|
||||
|
||||
def testCreateBranch(self):
|
||||
def test_create_branch(self):
|
||||
"""Test operation of create_branch()"""
|
||||
repo = self.make_git_tree()
|
||||
branch = 'first'
|
||||
@ -1021,11 +1021,11 @@ diff --git a/lib/efi_loader/efi_memory.c b/lib/efi_loader/efi_memory.c
|
||||
# 4 responses added from patchwork into new branch 'first2'
|
||||
# <unittest.result.TestResult run=8 errors=0 failures=0>
|
||||
|
||||
terminal.SetPrintTestMode()
|
||||
terminal.set_print_test_mode()
|
||||
status.check_patchwork_status(series, '1234', branch, dest_branch,
|
||||
False, False, None, self._fake_patchwork3,
|
||||
repo)
|
||||
lines = terminal.GetPrintTestLines()
|
||||
lines = terminal.get_print_test_lines()
|
||||
self.assertEqual(12, len(lines))
|
||||
self.assertEqual(
|
||||
"4 responses added from patchwork into new branch 'first2'",
|
||||
@ -1058,7 +1058,7 @@ diff --git a/lib/efi_loader/efi_memory.c b/lib/efi_loader/efi_memory.c
|
||||
self.assertEqual('Reviewed-by: %s' % self.mary, next(lines))
|
||||
self.assertEqual('Tested-by: %s' % self.leb, next(lines))
|
||||
|
||||
def testParseSnippets(self):
|
||||
def test_parse_snippets(self):
|
||||
"""Test parsing of review snippets"""
|
||||
text = '''Hi Fred,
|
||||
|
||||
@ -1088,7 +1088,7 @@ Even more
|
||||
|
||||
And another comment
|
||||
|
||||
> @@ -153,8 +143,13 @@ def CheckPatch(fname, show_types=False):
|
||||
> @@ -153,8 +143,13 @@ def check_patch(fname, show_types=False):
|
||||
> further down on the file
|
||||
> and more code
|
||||
> +Addition here
|
||||
@ -1131,7 +1131,7 @@ line8
|
||||
'> Code line 7', '> Code line 8', '> Code line 9',
|
||||
'And another comment'],
|
||||
['> File: file.c',
|
||||
'> Line: 153 / 143: def CheckPatch(fname, show_types=False):',
|
||||
'> Line: 153 / 143: def check_patch(fname, show_types=False):',
|
||||
'> and more code', '> +Addition here', '> +Another addition here',
|
||||
'> codey', '> more codey', 'and another thing in same file'],
|
||||
['> File: file.c', '> Line: 253 / 243',
|
||||
@ -1141,7 +1141,7 @@ line8
|
||||
'line2', 'line3', 'line4', 'line5', 'line6', 'line7', 'line8']],
|
||||
pstrm.snippets)
|
||||
|
||||
def testReviewSnippets(self):
|
||||
def test_review_snippets(self):
|
||||
"""Test showing of review snippets"""
|
||||
def _to_submitter(who):
|
||||
m_who = re.match('(.*) <(.*)>', who)
|
||||
@ -1196,7 +1196,7 @@ On some date Fred wrote:
|
||||
> + def __str__(self):
|
||||
> + return self.subject
|
||||
> +
|
||||
> def AddChange(self, version, info):
|
||||
> def add_change(self, version, info):
|
||||
> """Add a new change line to the change list for a version.
|
||||
>
|
||||
A comment
|
||||
@ -1223,10 +1223,10 @@ Reviewed-by: %s
|
||||
|
||||
series = Series()
|
||||
series.commits = [commit1, commit2]
|
||||
terminal.SetPrintTestMode()
|
||||
terminal.set_print_test_mode()
|
||||
status.check_patchwork_status(series, '1234', None, None, False, True,
|
||||
None, self._fake_patchwork2)
|
||||
lines = iter(terminal.GetPrintTestLines())
|
||||
lines = iter(terminal.get_print_test_lines())
|
||||
col = terminal.Color()
|
||||
self.assertEqual(terminal.PrintLine(' 1 Subject 1', col.BLUE),
|
||||
next(lines))
|
||||
@ -1280,7 +1280,7 @@ Reviewed-by: %s
|
||||
self.assertEqual(terminal.PrintLine(
|
||||
' > +', col.MAGENTA), next(lines))
|
||||
self.assertEqual(
|
||||
terminal.PrintLine(' > def AddChange(self, version, info):',
|
||||
terminal.PrintLine(' > def add_change(self, version, info):',
|
||||
col.MAGENTA),
|
||||
next(lines))
|
||||
self.assertEqual(terminal.PrintLine(
|
||||
@ -1296,7 +1296,7 @@ Reviewed-by: %s
|
||||
'4 new responses available in patchwork (use -d to write them to a new branch)',
|
||||
None), next(lines))
|
||||
|
||||
def testInsertTags(self):
|
||||
def test_insert_tags(self):
|
||||
"""Test inserting of review tags"""
|
||||
msg = '''first line
|
||||
second line.'''
|
||||
|
@ -6,7 +6,7 @@ import os
|
||||
|
||||
from patman import command
|
||||
|
||||
def FindGetMaintainer(try_list):
|
||||
def find_get_maintainer(try_list):
|
||||
"""Look for the get_maintainer.pl script.
|
||||
|
||||
Args:
|
||||
@ -23,7 +23,7 @@ def FindGetMaintainer(try_list):
|
||||
|
||||
return None
|
||||
|
||||
def GetMaintainer(dir_list, fname, verbose=False):
|
||||
def get_maintainer(dir_list, fname, verbose=False):
|
||||
"""Run get_maintainer.pl on a file if we find it.
|
||||
|
||||
We look for get_maintainer.pl in the 'scripts' directory at the top of
|
||||
@ -37,12 +37,12 @@ def GetMaintainer(dir_list, fname, verbose=False):
|
||||
Returns:
|
||||
A list of email addresses to CC to.
|
||||
"""
|
||||
get_maintainer = FindGetMaintainer(dir_list)
|
||||
get_maintainer = find_get_maintainer(dir_list)
|
||||
if not get_maintainer:
|
||||
if verbose:
|
||||
print("WARNING: Couldn't find get_maintainer.pl")
|
||||
return []
|
||||
|
||||
stdout = command.Output(get_maintainer, '--norolestats', fname)
|
||||
stdout = command.output(get_maintainer, '--norolestats', fname)
|
||||
lines = stdout.splitlines()
|
||||
return [ x.replace('"', '') for x in lines ]
|
||||
|
@ -12,10 +12,10 @@ from patman import settings
|
||||
from patman import terminal
|
||||
from patman import tools
|
||||
|
||||
# True to use --no-decorate - we check this in Setup()
|
||||
# True to use --no-decorate - we check this in setup()
|
||||
use_no_decorate = True
|
||||
|
||||
def LogCmd(commit_range, git_dir=None, oneline=False, reverse=False,
|
||||
def log_cmd(commit_range, git_dir=None, oneline=False, reverse=False,
|
||||
count=None):
|
||||
"""Create a command to perform a 'git log'
|
||||
|
||||
@ -49,7 +49,7 @@ def LogCmd(commit_range, git_dir=None, oneline=False, reverse=False,
|
||||
cmd.append('--')
|
||||
return cmd
|
||||
|
||||
def CountCommitsToBranch(branch):
|
||||
def count_commits_to_branch(branch):
|
||||
"""Returns number of commits between HEAD and the tracking branch.
|
||||
|
||||
This looks back to the tracking branch and works out the number of commits
|
||||
@ -62,12 +62,12 @@ def CountCommitsToBranch(branch):
|
||||
Number of patches that exist on top of the branch
|
||||
"""
|
||||
if branch:
|
||||
us, msg = GetUpstream('.git', branch)
|
||||
us, msg = get_upstream('.git', branch)
|
||||
rev_range = '%s..%s' % (us, branch)
|
||||
else:
|
||||
rev_range = '@{upstream}..'
|
||||
pipe = [LogCmd(rev_range, oneline=True)]
|
||||
result = command.RunPipe(pipe, capture=True, capture_stderr=True,
|
||||
pipe = [log_cmd(rev_range, oneline=True)]
|
||||
result = command.run_pipe(pipe, capture=True, capture_stderr=True,
|
||||
oneline=True, raise_on_error=False)
|
||||
if result.return_code:
|
||||
raise ValueError('Failed to determine upstream: %s' %
|
||||
@ -75,7 +75,7 @@ def CountCommitsToBranch(branch):
|
||||
patch_count = len(result.stdout.splitlines())
|
||||
return patch_count
|
||||
|
||||
def NameRevision(commit_hash):
|
||||
def name_revision(commit_hash):
|
||||
"""Gets the revision name for a commit
|
||||
|
||||
Args:
|
||||
@ -85,13 +85,13 @@ def NameRevision(commit_hash):
|
||||
Name of revision, if any, else None
|
||||
"""
|
||||
pipe = ['git', 'name-rev', commit_hash]
|
||||
stdout = command.RunPipe([pipe], capture=True, oneline=True).stdout
|
||||
stdout = command.run_pipe([pipe], capture=True, oneline=True).stdout
|
||||
|
||||
# We expect a commit, a space, then a revision name
|
||||
name = stdout.split(' ')[1].strip()
|
||||
return name
|
||||
|
||||
def GuessUpstream(git_dir, branch):
|
||||
def guess_upstream(git_dir, branch):
|
||||
"""Tries to guess the upstream for a branch
|
||||
|
||||
This lists out top commits on a branch and tries to find a suitable
|
||||
@ -107,21 +107,21 @@ def GuessUpstream(git_dir, branch):
|
||||
Name of upstream branch (e.g. 'upstream/master') or None if none
|
||||
Warning/error message, or None if none
|
||||
"""
|
||||
pipe = [LogCmd(branch, git_dir=git_dir, oneline=True, count=100)]
|
||||
result = command.RunPipe(pipe, capture=True, capture_stderr=True,
|
||||
pipe = [log_cmd(branch, git_dir=git_dir, oneline=True, count=100)]
|
||||
result = command.run_pipe(pipe, capture=True, capture_stderr=True,
|
||||
raise_on_error=False)
|
||||
if result.return_code:
|
||||
return None, "Branch '%s' not found" % branch
|
||||
for line in result.stdout.splitlines()[1:]:
|
||||
commit_hash = line.split(' ')[0]
|
||||
name = NameRevision(commit_hash)
|
||||
name = name_revision(commit_hash)
|
||||
if '~' not in name and '^' not in name:
|
||||
if name.startswith('remotes/'):
|
||||
name = name[8:]
|
||||
return name, "Guessing upstream as '%s'" % name
|
||||
return None, "Cannot find a suitable upstream for branch '%s'" % branch
|
||||
|
||||
def GetUpstream(git_dir, branch):
|
||||
def get_upstream(git_dir, branch):
|
||||
"""Returns the name of the upstream for a branch
|
||||
|
||||
Args:
|
||||
@ -134,12 +134,12 @@ def GetUpstream(git_dir, branch):
|
||||
Warning/error message, or None if none
|
||||
"""
|
||||
try:
|
||||
remote = command.OutputOneLine('git', '--git-dir', git_dir, 'config',
|
||||
remote = command.output_one_line('git', '--git-dir', git_dir, 'config',
|
||||
'branch.%s.remote' % branch)
|
||||
merge = command.OutputOneLine('git', '--git-dir', git_dir, 'config',
|
||||
merge = command.output_one_line('git', '--git-dir', git_dir, 'config',
|
||||
'branch.%s.merge' % branch)
|
||||
except:
|
||||
upstream, msg = GuessUpstream(git_dir, branch)
|
||||
upstream, msg = guess_upstream(git_dir, branch)
|
||||
return upstream, msg
|
||||
|
||||
if remote == '.':
|
||||
@ -152,7 +152,7 @@ def GetUpstream(git_dir, branch):
|
||||
"'%s' remote='%s', merge='%s'" % (branch, remote, merge))
|
||||
|
||||
|
||||
def GetRangeInBranch(git_dir, branch, include_upstream=False):
|
||||
def get_range_in_branch(git_dir, branch, include_upstream=False):
|
||||
"""Returns an expression for the commits in the given branch.
|
||||
|
||||
Args:
|
||||
@ -162,13 +162,13 @@ def GetRangeInBranch(git_dir, branch, include_upstream=False):
|
||||
Expression in the form 'upstream..branch' which can be used to
|
||||
access the commits. If the branch does not exist, returns None.
|
||||
"""
|
||||
upstream, msg = GetUpstream(git_dir, branch)
|
||||
upstream, msg = get_upstream(git_dir, branch)
|
||||
if not upstream:
|
||||
return None, msg
|
||||
rstr = '%s%s..%s' % (upstream, '~' if include_upstream else '', branch)
|
||||
return rstr, msg
|
||||
|
||||
def CountCommitsInRange(git_dir, range_expr):
|
||||
def count_commits_in_range(git_dir, range_expr):
|
||||
"""Returns the number of commits in the given range.
|
||||
|
||||
Args:
|
||||
@ -178,15 +178,15 @@ def CountCommitsInRange(git_dir, range_expr):
|
||||
Number of patches that exist in the supplied range or None if none
|
||||
were found
|
||||
"""
|
||||
pipe = [LogCmd(range_expr, git_dir=git_dir, oneline=True)]
|
||||
result = command.RunPipe(pipe, capture=True, capture_stderr=True,
|
||||
pipe = [log_cmd(range_expr, git_dir=git_dir, oneline=True)]
|
||||
result = command.run_pipe(pipe, capture=True, capture_stderr=True,
|
||||
raise_on_error=False)
|
||||
if result.return_code:
|
||||
return None, "Range '%s' not found or is invalid" % range_expr
|
||||
patch_count = len(result.stdout.splitlines())
|
||||
return patch_count, None
|
||||
|
||||
def CountCommitsInBranch(git_dir, branch, include_upstream=False):
|
||||
def count_commits_in_branch(git_dir, branch, include_upstream=False):
|
||||
"""Returns the number of commits in the given branch.
|
||||
|
||||
Args:
|
||||
@ -196,12 +196,12 @@ def CountCommitsInBranch(git_dir, branch, include_upstream=False):
|
||||
Number of patches that exist on top of the branch, or None if the
|
||||
branch does not exist.
|
||||
"""
|
||||
range_expr, msg = GetRangeInBranch(git_dir, branch, include_upstream)
|
||||
range_expr, msg = get_range_in_branch(git_dir, branch, include_upstream)
|
||||
if not range_expr:
|
||||
return None, msg
|
||||
return CountCommitsInRange(git_dir, range_expr)
|
||||
return count_commits_in_range(git_dir, range_expr)
|
||||
|
||||
def CountCommits(commit_range):
|
||||
def count_commits(commit_range):
|
||||
"""Returns the number of commits in the given range.
|
||||
|
||||
Args:
|
||||
@ -209,13 +209,13 @@ def CountCommits(commit_range):
|
||||
Return:
|
||||
Number of patches that exist on top of the branch
|
||||
"""
|
||||
pipe = [LogCmd(commit_range, oneline=True),
|
||||
pipe = [log_cmd(commit_range, oneline=True),
|
||||
['wc', '-l']]
|
||||
stdout = command.RunPipe(pipe, capture=True, oneline=True).stdout
|
||||
stdout = command.run_pipe(pipe, capture=True, oneline=True).stdout
|
||||
patch_count = int(stdout)
|
||||
return patch_count
|
||||
|
||||
def Checkout(commit_hash, git_dir=None, work_tree=None, force=False):
|
||||
def checkout(commit_hash, git_dir=None, work_tree=None, force=False):
|
||||
"""Checkout the selected commit for this build
|
||||
|
||||
Args:
|
||||
@ -230,24 +230,24 @@ def Checkout(commit_hash, git_dir=None, work_tree=None, force=False):
|
||||
if force:
|
||||
pipe.append('-f')
|
||||
pipe.append(commit_hash)
|
||||
result = command.RunPipe([pipe], capture=True, raise_on_error=False,
|
||||
result = command.run_pipe([pipe], capture=True, raise_on_error=False,
|
||||
capture_stderr=True)
|
||||
if result.return_code != 0:
|
||||
raise OSError('git checkout (%s): %s' % (pipe, result.stderr))
|
||||
|
||||
def Clone(git_dir, output_dir):
|
||||
def clone(git_dir, output_dir):
|
||||
"""Checkout the selected commit for this build
|
||||
|
||||
Args:
|
||||
commit_hash: Commit hash to check out
|
||||
"""
|
||||
pipe = ['git', 'clone', git_dir, '.']
|
||||
result = command.RunPipe([pipe], capture=True, cwd=output_dir,
|
||||
result = command.run_pipe([pipe], capture=True, cwd=output_dir,
|
||||
capture_stderr=True)
|
||||
if result.return_code != 0:
|
||||
raise OSError('git clone: %s' % result.stderr)
|
||||
|
||||
def Fetch(git_dir=None, work_tree=None):
|
||||
def fetch(git_dir=None, work_tree=None):
|
||||
"""Fetch from the origin repo
|
||||
|
||||
Args:
|
||||
@ -259,11 +259,11 @@ def Fetch(git_dir=None, work_tree=None):
|
||||
if work_tree:
|
||||
pipe.extend(['--work-tree', work_tree])
|
||||
pipe.append('fetch')
|
||||
result = command.RunPipe([pipe], capture=True, capture_stderr=True)
|
||||
result = command.run_pipe([pipe], capture=True, capture_stderr=True)
|
||||
if result.return_code != 0:
|
||||
raise OSError('git fetch: %s' % result.stderr)
|
||||
|
||||
def CheckWorktreeIsAvailable(git_dir):
|
||||
def check_worktree_is_available(git_dir):
|
||||
"""Check if git-worktree functionality is available
|
||||
|
||||
Args:
|
||||
@ -273,11 +273,11 @@ def CheckWorktreeIsAvailable(git_dir):
|
||||
True if git-worktree commands will work, False otherwise.
|
||||
"""
|
||||
pipe = ['git', '--git-dir', git_dir, 'worktree', 'list']
|
||||
result = command.RunPipe([pipe], capture=True, capture_stderr=True,
|
||||
result = command.run_pipe([pipe], capture=True, capture_stderr=True,
|
||||
raise_on_error=False)
|
||||
return result.return_code == 0
|
||||
|
||||
def AddWorktree(git_dir, output_dir, commit_hash=None):
|
||||
def add_worktree(git_dir, output_dir, commit_hash=None):
|
||||
"""Create and checkout a new git worktree for this build
|
||||
|
||||
Args:
|
||||
@ -289,23 +289,23 @@ def AddWorktree(git_dir, output_dir, commit_hash=None):
|
||||
pipe = ['git', '--git-dir', git_dir, 'worktree', 'add', '.', '--detach']
|
||||
if commit_hash:
|
||||
pipe.append(commit_hash)
|
||||
result = command.RunPipe([pipe], capture=True, cwd=output_dir,
|
||||
result = command.run_pipe([pipe], capture=True, cwd=output_dir,
|
||||
capture_stderr=True)
|
||||
if result.return_code != 0:
|
||||
raise OSError('git worktree add: %s' % result.stderr)
|
||||
|
||||
def PruneWorktrees(git_dir):
|
||||
def prune_worktrees(git_dir):
|
||||
"""Remove administrative files for deleted worktrees
|
||||
|
||||
Args:
|
||||
git_dir: The repository whose deleted worktrees should be pruned
|
||||
"""
|
||||
pipe = ['git', '--git-dir', git_dir, 'worktree', 'prune']
|
||||
result = command.RunPipe([pipe], capture=True, capture_stderr=True)
|
||||
result = command.run_pipe([pipe], capture=True, capture_stderr=True)
|
||||
if result.return_code != 0:
|
||||
raise OSError('git worktree prune: %s' % result.stderr)
|
||||
|
||||
def CreatePatches(branch, start, count, ignore_binary, series, signoff = True):
|
||||
def create_patches(branch, start, count, ignore_binary, series, signoff = True):
|
||||
"""Create a series of patches from the top of the current branch.
|
||||
|
||||
The patch files are written to the current directory using
|
||||
@ -336,7 +336,7 @@ def CreatePatches(branch, start, count, ignore_binary, series, signoff = True):
|
||||
brname = branch or 'HEAD'
|
||||
cmd += ['%s~%d..%s~%d' % (brname, start + count, brname, start)]
|
||||
|
||||
stdout = command.RunList(cmd)
|
||||
stdout = command.run_list(cmd)
|
||||
files = stdout.splitlines()
|
||||
|
||||
# We have an extra file if there is a cover letter
|
||||
@ -345,7 +345,7 @@ def CreatePatches(branch, start, count, ignore_binary, series, signoff = True):
|
||||
else:
|
||||
return None, files
|
||||
|
||||
def BuildEmailList(in_list, tag=None, alias=None, warn_on_error=True):
|
||||
def build_email_list(in_list, tag=None, alias=None, warn_on_error=True):
|
||||
"""Build a list of email addresses based on an input list.
|
||||
|
||||
Takes a list of email addresses and aliases, and turns this into a list
|
||||
@ -371,18 +371,18 @@ def BuildEmailList(in_list, tag=None, alias=None, warn_on_error=True):
|
||||
>>> alias['mary'] = ['Mary Poppins <m.poppins@cloud.net>']
|
||||
>>> alias['boys'] = ['fred', ' john']
|
||||
>>> alias['all'] = ['fred ', 'john', ' mary ']
|
||||
>>> BuildEmailList(['john', 'mary'], None, alias)
|
||||
>>> build_email_list(['john', 'mary'], None, alias)
|
||||
['j.bloggs@napier.co.nz', 'Mary Poppins <m.poppins@cloud.net>']
|
||||
>>> BuildEmailList(['john', 'mary'], '--to', alias)
|
||||
>>> build_email_list(['john', 'mary'], '--to', alias)
|
||||
['--to "j.bloggs@napier.co.nz"', \
|
||||
'--to "Mary Poppins <m.poppins@cloud.net>"']
|
||||
>>> BuildEmailList(['john', 'mary'], 'Cc', alias)
|
||||
>>> build_email_list(['john', 'mary'], 'Cc', alias)
|
||||
['Cc j.bloggs@napier.co.nz', 'Cc Mary Poppins <m.poppins@cloud.net>']
|
||||
"""
|
||||
quote = '"' if tag and tag[0] == '-' else ''
|
||||
raw = []
|
||||
for item in in_list:
|
||||
raw += LookupEmail(item, alias, warn_on_error=warn_on_error)
|
||||
raw += lookup_email(item, alias, warn_on_error=warn_on_error)
|
||||
result = []
|
||||
for item in raw:
|
||||
if not item in result:
|
||||
@ -391,20 +391,20 @@ def BuildEmailList(in_list, tag=None, alias=None, warn_on_error=True):
|
||||
return ['%s %s%s%s' % (tag, quote, email, quote) for email in result]
|
||||
return result
|
||||
|
||||
def CheckSuppressCCConfig():
|
||||
def check_suppress_cc_config():
|
||||
"""Check if sendemail.suppresscc is configured correctly.
|
||||
|
||||
Returns:
|
||||
True if the option is configured correctly, False otherwise.
|
||||
"""
|
||||
suppresscc = command.OutputOneLine('git', 'config', 'sendemail.suppresscc',
|
||||
suppresscc = command.output_one_line('git', 'config', 'sendemail.suppresscc',
|
||||
raise_on_error=False)
|
||||
|
||||
# Other settings should be fine.
|
||||
if suppresscc == 'all' or suppresscc == 'cccmd':
|
||||
col = terminal.Color()
|
||||
|
||||
print((col.Color(col.RED, "error") +
|
||||
print((col.build(col.RED, "error") +
|
||||
": git config sendemail.suppresscc set to %s\n" % (suppresscc)) +
|
||||
" patman needs --cc-cmd to be run to set the cc list.\n" +
|
||||
" Please run:\n" +
|
||||
@ -416,7 +416,7 @@ def CheckSuppressCCConfig():
|
||||
|
||||
return True
|
||||
|
||||
def EmailPatches(series, cover_fname, args, dry_run, warn_on_error, cc_fname,
|
||||
def email_patches(series, cover_fname, args, dry_run, warn_on_error, cc_fname,
|
||||
self_only=False, alias=None, in_reply_to=None, thread=False,
|
||||
smtp_server=None):
|
||||
"""Email a patch series.
|
||||
@ -453,20 +453,20 @@ def EmailPatches(series, cover_fname, args, dry_run, warn_on_error, cc_fname,
|
||||
>>> series = {}
|
||||
>>> series['to'] = ['fred']
|
||||
>>> series['cc'] = ['mary']
|
||||
>>> EmailPatches(series, 'cover', ['p1', 'p2'], True, True, 'cc-fname', \
|
||||
>>> email_patches(series, 'cover', ['p1', 'p2'], True, True, 'cc-fname', \
|
||||
False, alias)
|
||||
'git send-email --annotate --to "f.bloggs@napier.co.nz" --cc \
|
||||
"m.poppins@cloud.net" --cc-cmd "./patman send --cc-cmd cc-fname" cover p1 p2'
|
||||
>>> EmailPatches(series, None, ['p1'], True, True, 'cc-fname', False, \
|
||||
>>> email_patches(series, None, ['p1'], True, True, 'cc-fname', False, \
|
||||
alias)
|
||||
'git send-email --annotate --to "f.bloggs@napier.co.nz" --cc \
|
||||
"m.poppins@cloud.net" --cc-cmd "./patman send --cc-cmd cc-fname" p1'
|
||||
>>> series['cc'] = ['all']
|
||||
>>> EmailPatches(series, 'cover', ['p1', 'p2'], True, True, 'cc-fname', \
|
||||
>>> email_patches(series, 'cover', ['p1', 'p2'], True, True, 'cc-fname', \
|
||||
True, alias)
|
||||
'git send-email --annotate --to "this-is-me@me.com" --cc-cmd "./patman \
|
||||
send --cc-cmd cc-fname" cover p1 p2'
|
||||
>>> EmailPatches(series, 'cover', ['p1', 'p2'], True, True, 'cc-fname', \
|
||||
>>> email_patches(series, 'cover', ['p1', 'p2'], True, True, 'cc-fname', \
|
||||
False, alias)
|
||||
'git send-email --annotate --to "f.bloggs@napier.co.nz" --cc \
|
||||
"f.bloggs@napier.co.nz" --cc "j.bloggs@napier.co.nz" --cc \
|
||||
@ -475,9 +475,9 @@ send --cc-cmd cc-fname" cover p1 p2'
|
||||
# Restore argv[0] since we clobbered it.
|
||||
>>> sys.argv[0] = _old_argv0
|
||||
"""
|
||||
to = BuildEmailList(series.get('to'), '--to', alias, warn_on_error)
|
||||
to = build_email_list(series.get('to'), '--to', alias, warn_on_error)
|
||||
if not to:
|
||||
git_config_to = command.Output('git', 'config', 'sendemail.to',
|
||||
git_config_to = command.output('git', 'config', 'sendemail.to',
|
||||
raise_on_error=False)
|
||||
if not git_config_to:
|
||||
print("No recipient.\n"
|
||||
@ -486,10 +486,10 @@ send --cc-cmd cc-fname" cover p1 p2'
|
||||
"Or do something like this\n"
|
||||
"git config sendemail.to u-boot@lists.denx.de")
|
||||
return
|
||||
cc = BuildEmailList(list(set(series.get('cc')) - set(series.get('to'))),
|
||||
cc = build_email_list(list(set(series.get('cc')) - set(series.get('to'))),
|
||||
'--cc', alias, warn_on_error)
|
||||
if self_only:
|
||||
to = BuildEmailList([os.getenv('USER')], '--to', alias, warn_on_error)
|
||||
to = build_email_list([os.getenv('USER')], '--to', alias, warn_on_error)
|
||||
cc = []
|
||||
cmd = ['git', 'send-email', '--annotate']
|
||||
if smtp_server:
|
||||
@ -511,7 +511,7 @@ send --cc-cmd cc-fname" cover p1 p2'
|
||||
return cmdstr
|
||||
|
||||
|
||||
def LookupEmail(lookup_name, alias=None, warn_on_error=True, level=0):
|
||||
def lookup_email(lookup_name, alias=None, warn_on_error=True, level=0):
|
||||
"""If an email address is an alias, look it up and return the full name
|
||||
|
||||
TODO: Why not just use git's own alias feature?
|
||||
@ -538,25 +538,25 @@ def LookupEmail(lookup_name, alias=None, warn_on_error=True, level=0):
|
||||
>>> alias['all'] = ['fred ', 'john', ' mary ']
|
||||
>>> alias['loop'] = ['other', 'john', ' mary ']
|
||||
>>> alias['other'] = ['loop', 'john', ' mary ']
|
||||
>>> LookupEmail('mary', alias)
|
||||
>>> lookup_email('mary', alias)
|
||||
['m.poppins@cloud.net']
|
||||
>>> LookupEmail('arthur.wellesley@howe.ro.uk', alias)
|
||||
>>> lookup_email('arthur.wellesley@howe.ro.uk', alias)
|
||||
['arthur.wellesley@howe.ro.uk']
|
||||
>>> LookupEmail('boys', alias)
|
||||
>>> lookup_email('boys', alias)
|
||||
['f.bloggs@napier.co.nz', 'j.bloggs@napier.co.nz']
|
||||
>>> LookupEmail('all', alias)
|
||||
>>> lookup_email('all', alias)
|
||||
['f.bloggs@napier.co.nz', 'j.bloggs@napier.co.nz', 'm.poppins@cloud.net']
|
||||
>>> LookupEmail('odd', alias)
|
||||
>>> lookup_email('odd', alias)
|
||||
Alias 'odd' not found
|
||||
[]
|
||||
>>> LookupEmail('loop', alias)
|
||||
>>> lookup_email('loop', alias)
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
OSError: Recursive email alias at 'other'
|
||||
>>> LookupEmail('odd', alias, warn_on_error=False)
|
||||
>>> lookup_email('odd', alias, warn_on_error=False)
|
||||
[]
|
||||
>>> # In this case the loop part will effectively be ignored.
|
||||
>>> LookupEmail('loop', alias, warn_on_error=False)
|
||||
>>> lookup_email('loop', alias, warn_on_error=False)
|
||||
Recursive email alias at 'other'
|
||||
Recursive email alias at 'john'
|
||||
Recursive email alias at 'mary'
|
||||
@ -577,24 +577,24 @@ def LookupEmail(lookup_name, alias=None, warn_on_error=True, level=0):
|
||||
if warn_on_error:
|
||||
raise OSError(msg)
|
||||
else:
|
||||
print(col.Color(col.RED, msg))
|
||||
print(col.build(col.RED, msg))
|
||||
return out_list
|
||||
|
||||
if lookup_name:
|
||||
if not lookup_name in alias:
|
||||
msg = "Alias '%s' not found" % lookup_name
|
||||
if warn_on_error:
|
||||
print(col.Color(col.RED, msg))
|
||||
print(col.build(col.RED, msg))
|
||||
return out_list
|
||||
for item in alias[lookup_name]:
|
||||
todo = LookupEmail(item, alias, warn_on_error, level + 1)
|
||||
todo = lookup_email(item, alias, warn_on_error, level + 1)
|
||||
for new_item in todo:
|
||||
if not new_item in out_list:
|
||||
out_list.append(new_item)
|
||||
|
||||
return out_list
|
||||
|
||||
def GetTopLevel():
|
||||
def get_top_level():
|
||||
"""Return name of top-level directory for this git repo.
|
||||
|
||||
Returns:
|
||||
@ -603,18 +603,18 @@ def GetTopLevel():
|
||||
This test makes sure that we are running tests in the right subdir
|
||||
|
||||
>>> os.path.realpath(os.path.dirname(__file__)) == \
|
||||
os.path.join(GetTopLevel(), 'tools', 'patman')
|
||||
os.path.join(get_top_level(), 'tools', 'patman')
|
||||
True
|
||||
"""
|
||||
return command.OutputOneLine('git', 'rev-parse', '--show-toplevel')
|
||||
return command.output_one_line('git', 'rev-parse', '--show-toplevel')
|
||||
|
||||
def GetAliasFile():
|
||||
def get_alias_file():
|
||||
"""Gets the name of the git alias file.
|
||||
|
||||
Returns:
|
||||
Filename of git alias file, or None if none
|
||||
"""
|
||||
fname = command.OutputOneLine('git', 'config', 'sendemail.aliasesfile',
|
||||
fname = command.output_one_line('git', 'config', 'sendemail.aliasesfile',
|
||||
raise_on_error=False)
|
||||
if not fname:
|
||||
return None
|
||||
@ -623,56 +623,56 @@ def GetAliasFile():
|
||||
if os.path.isabs(fname):
|
||||
return fname
|
||||
|
||||
return os.path.join(GetTopLevel(), fname)
|
||||
return os.path.join(get_top_level(), fname)
|
||||
|
||||
def GetDefaultUserName():
|
||||
def get_default_user_name():
|
||||
"""Gets the user.name from .gitconfig file.
|
||||
|
||||
Returns:
|
||||
User name found in .gitconfig file, or None if none
|
||||
"""
|
||||
uname = command.OutputOneLine('git', 'config', '--global', 'user.name')
|
||||
uname = command.output_one_line('git', 'config', '--global', 'user.name')
|
||||
return uname
|
||||
|
||||
def GetDefaultUserEmail():
|
||||
def get_default_user_email():
|
||||
"""Gets the user.email from the global .gitconfig file.
|
||||
|
||||
Returns:
|
||||
User's email found in .gitconfig file, or None if none
|
||||
"""
|
||||
uemail = command.OutputOneLine('git', 'config', '--global', 'user.email')
|
||||
uemail = command.output_one_line('git', 'config', '--global', 'user.email')
|
||||
return uemail
|
||||
|
||||
def GetDefaultSubjectPrefix():
|
||||
def get_default_subject_prefix():
|
||||
"""Gets the format.subjectprefix from local .git/config file.
|
||||
|
||||
Returns:
|
||||
Subject prefix found in local .git/config file, or None if none
|
||||
"""
|
||||
sub_prefix = command.OutputOneLine('git', 'config', 'format.subjectprefix',
|
||||
sub_prefix = command.output_one_line('git', 'config', 'format.subjectprefix',
|
||||
raise_on_error=False)
|
||||
|
||||
return sub_prefix
|
||||
|
||||
def Setup():
|
||||
def setup():
|
||||
"""Set up git utils, by reading the alias files."""
|
||||
# Check for a git alias file also
|
||||
global use_no_decorate
|
||||
|
||||
alias_fname = GetAliasFile()
|
||||
alias_fname = get_alias_file()
|
||||
if alias_fname:
|
||||
settings.ReadGitAliases(alias_fname)
|
||||
cmd = LogCmd(None, count=0)
|
||||
use_no_decorate = (command.RunPipe([cmd], raise_on_error=False)
|
||||
cmd = log_cmd(None, count=0)
|
||||
use_no_decorate = (command.run_pipe([cmd], raise_on_error=False)
|
||||
.return_code == 0)
|
||||
|
||||
def GetHead():
|
||||
def get_head():
|
||||
"""Get the hash of the current HEAD
|
||||
|
||||
Returns:
|
||||
Hash of HEAD
|
||||
"""
|
||||
return command.OutputOneLine('git', 'show', '-s', '--pretty=format:%H')
|
||||
return command.output_one_line('git', 'show', '-s', '--pretty=format:%H')
|
||||
|
||||
if __name__ == "__main__":
|
||||
import doctest
|
||||
|
@ -42,7 +42,7 @@ parser.add_argument('-e', '--end', type=int, default=0,
|
||||
help='Commits to skip at end of patch list')
|
||||
parser.add_argument('-D', '--debug', action='store_true',
|
||||
help='Enabling debugging (provides a full traceback on error)')
|
||||
parser.add_argument('-p', '--project', default=project.DetectProject(),
|
||||
parser.add_argument('-p', '--project', default=project.detect_project(),
|
||||
help="Project name; affects default option values and "
|
||||
"aliases [default: %(default)s]")
|
||||
parser.add_argument('-P', '--patchwork-url',
|
||||
@ -135,12 +135,12 @@ if args.cmd == 'test':
|
||||
from patman import func_test
|
||||
|
||||
result = unittest.TestResult()
|
||||
test_util.RunTestSuites(
|
||||
test_util.run_test_suites(
|
||||
result, False, False, False, None, None, None,
|
||||
[test_checkpatch.TestPatch, func_test.TestFunctional,
|
||||
'gitutil', 'settings', 'terminal'])
|
||||
|
||||
sys.exit(test_util.ReportResult('patman', args.testname, result))
|
||||
sys.exit(test_util.report_result('patman', args.testname, result))
|
||||
|
||||
# Process commits, produce patches files, check them, email them
|
||||
elif args.cmd == 'send':
|
||||
@ -159,7 +159,7 @@ elif args.cmd == 'send':
|
||||
fd.close()
|
||||
|
||||
elif args.full_help:
|
||||
tools.PrintFullHelp(
|
||||
tools.print_full_help(
|
||||
os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])), 'README')
|
||||
)
|
||||
|
||||
@ -177,7 +177,7 @@ elif args.cmd == 'status':
|
||||
args.dest_branch, args.force,
|
||||
args.show_comments, args.patchwork_url)
|
||||
except Exception as e:
|
||||
terminal.Print('patman: %s: %s' % (type(e).__name__, e),
|
||||
terminal.tprint('patman: %s: %s' % (type(e).__name__, e),
|
||||
colour=terminal.Color.RED)
|
||||
if args.debug:
|
||||
print()
|
||||
|
@ -180,7 +180,7 @@ class PatchStream:
|
||||
who (str): Person who gave that rtag, e.g.
|
||||
'Fred Bloggs <fred@bloggs.org>'
|
||||
"""
|
||||
self.commit.AddRtag(rtag_type, who)
|
||||
self.commit.add_rtag(rtag_type, who)
|
||||
|
||||
def _close_commit(self):
|
||||
"""Save the current commit into our commit list, and reset our state"""
|
||||
@ -230,7 +230,7 @@ class PatchStream:
|
||||
elif self.in_change == 'Cover':
|
||||
self.series.AddChange(self.change_version, None, change)
|
||||
elif self.in_change == 'Commit':
|
||||
self.commit.AddChange(self.change_version, change)
|
||||
self.commit.add_change(self.change_version, change)
|
||||
self.change_lines = []
|
||||
|
||||
def _finalise_snippet(self):
|
||||
@ -494,14 +494,14 @@ class PatchStream:
|
||||
who.find(os.getenv('USER') + '@') != -1):
|
||||
self._add_warn("Ignoring '%s'" % line)
|
||||
elif rtag_type == 'Patch-cc':
|
||||
self.commit.AddCc(who.split(','))
|
||||
self.commit.add_cc(who.split(','))
|
||||
else:
|
||||
out = [line]
|
||||
|
||||
# Suppress duplicate signoffs
|
||||
elif signoff_match:
|
||||
if (self.is_log or not self.commit or
|
||||
self.commit.CheckDuplicateSignoff(signoff_match.group(1))):
|
||||
self.commit.check_duplicate_signoff(signoff_match.group(1))):
|
||||
out = [line]
|
||||
|
||||
# Well that means this is an ordinary line
|
||||
@ -698,9 +698,9 @@ def get_list(commit_range, git_dir=None, count=None):
|
||||
Returns
|
||||
str: String containing the contents of the git log
|
||||
"""
|
||||
params = gitutil.LogCmd(commit_range, reverse=True, count=count,
|
||||
params = gitutil.log_cmd(commit_range, reverse=True, count=count,
|
||||
git_dir=git_dir)
|
||||
return command.RunPipe([params], capture=True).stdout
|
||||
return command.run_pipe([params], capture=True).stdout
|
||||
|
||||
def get_metadata_for_list(commit_range, git_dir=None, count=None,
|
||||
series=None, allow_overwrite=False):
|
||||
|
@ -6,7 +6,7 @@ import os.path
|
||||
|
||||
from patman import gitutil
|
||||
|
||||
def DetectProject():
|
||||
def detect_project():
|
||||
"""Autodetect the name of the current project.
|
||||
|
||||
This looks for signature files/directories that are unlikely to exist except
|
||||
@ -16,7 +16,7 @@ def DetectProject():
|
||||
The name of the project, like "linux" or "u-boot". Returns "unknown"
|
||||
if we can't detect the project.
|
||||
"""
|
||||
top_level = gitutil.GetTopLevel()
|
||||
top_level = gitutil.get_top_level()
|
||||
|
||||
if os.path.exists(os.path.join(top_level, "include", "u-boot")):
|
||||
return "u-boot"
|
||||
|
@ -94,7 +94,7 @@ class Series(dict):
|
||||
Args:
|
||||
commit: Commit object to add
|
||||
"""
|
||||
commit.CheckTags()
|
||||
commit.check_tags()
|
||||
self.commits.append(commit)
|
||||
|
||||
def ShowActions(self, args, cmd, process_tags):
|
||||
@ -105,8 +105,8 @@ class Series(dict):
|
||||
cmd: The git command we would have run
|
||||
process_tags: Process tags as if they were aliases
|
||||
"""
|
||||
to_set = set(gitutil.BuildEmailList(self.to));
|
||||
cc_set = set(gitutil.BuildEmailList(self.cc));
|
||||
to_set = set(gitutil.build_email_list(self.to));
|
||||
cc_set = set(gitutil.build_email_list(self.cc));
|
||||
|
||||
col = terminal.Color()
|
||||
print('Dry run, so not doing much. But I would do this:')
|
||||
@ -118,11 +118,11 @@ class Series(dict):
|
||||
# TODO: Colour the patches according to whether they passed checks
|
||||
for upto in range(len(args)):
|
||||
commit = self.commits[upto]
|
||||
print(col.Color(col.GREEN, ' %s' % args[upto]))
|
||||
print(col.build(col.GREEN, ' %s' % args[upto]))
|
||||
cc_list = list(self._generated_cc[commit.patch])
|
||||
for email in sorted(set(cc_list) - to_set - cc_set):
|
||||
if email == None:
|
||||
email = col.Color(col.YELLOW, "<alias '%s' not found>"
|
||||
email = col.build(col.YELLOW, "<alias '%s' not found>"
|
||||
% tag)
|
||||
if email:
|
||||
print(' Cc: ', email)
|
||||
@ -136,7 +136,7 @@ class Series(dict):
|
||||
print('Postfix:\t ', self.get('postfix'))
|
||||
if self.cover:
|
||||
print('Cover: %d lines' % len(self.cover))
|
||||
cover_cc = gitutil.BuildEmailList(self.get('cover_cc', ''))
|
||||
cover_cc = gitutil.build_email_list(self.get('cover_cc', ''))
|
||||
all_ccs = itertools.chain(cover_cc, *self._generated_cc.values())
|
||||
for email in sorted(set(all_ccs) - to_set - cc_set):
|
||||
print(' Cc: ', email)
|
||||
@ -227,13 +227,13 @@ class Series(dict):
|
||||
else:
|
||||
if version > 1:
|
||||
str = 'Change log missing for v%d' % version
|
||||
print(col.Color(col.RED, str))
|
||||
print(col.build(col.RED, str))
|
||||
for version in changes_copy:
|
||||
str = 'Change log for unknown version v%d' % version
|
||||
print(col.Color(col.RED, str))
|
||||
print(col.build(col.RED, str))
|
||||
elif self.changes:
|
||||
str = 'Change log exists, but no version is set'
|
||||
print(col.Color(col.RED, str))
|
||||
print(col.build(col.RED, str))
|
||||
|
||||
def MakeCcFile(self, process_tags, cover_fname, warn_on_error,
|
||||
add_maintainers, limit):
|
||||
@ -261,17 +261,17 @@ class Series(dict):
|
||||
for commit in self.commits:
|
||||
cc = []
|
||||
if process_tags:
|
||||
cc += gitutil.BuildEmailList(commit.tags,
|
||||
cc += gitutil.build_email_list(commit.tags,
|
||||
warn_on_error=warn_on_error)
|
||||
cc += gitutil.BuildEmailList(commit.cc_list,
|
||||
cc += gitutil.build_email_list(commit.cc_list,
|
||||
warn_on_error=warn_on_error)
|
||||
if type(add_maintainers) == type(cc):
|
||||
cc += add_maintainers
|
||||
elif add_maintainers:
|
||||
dir_list = [os.path.join(gitutil.GetTopLevel(), 'scripts')]
|
||||
cc += get_maintainer.GetMaintainer(dir_list, commit.patch)
|
||||
dir_list = [os.path.join(gitutil.get_top_level(), 'scripts')]
|
||||
cc += get_maintainer.get_maintainer(dir_list, commit.patch)
|
||||
for x in set(cc) & set(settings.bounces):
|
||||
print(col.Color(col.YELLOW, 'Skipping "%s"' % x))
|
||||
print(col.build(col.YELLOW, 'Skipping "%s"' % x))
|
||||
cc = list(set(cc) - set(settings.bounces))
|
||||
if limit is not None:
|
||||
cc = cc[:limit]
|
||||
@ -280,7 +280,7 @@ class Series(dict):
|
||||
self._generated_cc[commit.patch] = cc
|
||||
|
||||
if cover_fname:
|
||||
cover_cc = gitutil.BuildEmailList(self.get('cover_cc', ''))
|
||||
cover_cc = gitutil.build_email_list(self.get('cover_cc', ''))
|
||||
cover_cc = list(set(cover_cc + all_ccs))
|
||||
if limit is not None:
|
||||
cover_cc = cover_cc[:limit]
|
||||
@ -309,7 +309,7 @@ class Series(dict):
|
||||
Return:
|
||||
Patch string, like 'RFC PATCH v5' or just 'PATCH'
|
||||
"""
|
||||
git_prefix = gitutil.GetDefaultSubjectPrefix()
|
||||
git_prefix = gitutil.get_default_subject_prefix()
|
||||
if git_prefix:
|
||||
git_prefix = '%s][' % git_prefix
|
||||
else:
|
||||
|
@ -198,11 +198,11 @@ def CreatePatmanConfigFile(gitutil, config_fname):
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
name = gitutil.GetDefaultUserName()
|
||||
name = gitutil.get_default_user_name()
|
||||
if name == None:
|
||||
name = raw_input("Enter name: ")
|
||||
|
||||
email = gitutil.GetDefaultUserEmail()
|
||||
email = gitutil.get_default_user_email()
|
||||
|
||||
if email == None:
|
||||
email = raw_input("Enter email: ")
|
||||
|
@ -245,7 +245,7 @@ def collect_patches(series, series_id, url, rest_api=call_rest_api):
|
||||
count = len(patch_dict)
|
||||
num_commits = len(series.commits)
|
||||
if count != num_commits:
|
||||
tout.Warning('Warning: Patchwork reports %d patches, series has %d' %
|
||||
tout.warning('Warning: Patchwork reports %d patches, series has %d' %
|
||||
(count, num_commits))
|
||||
|
||||
patches = []
|
||||
@ -257,7 +257,7 @@ def collect_patches(series, series_id, url, rest_api=call_rest_api):
|
||||
patch.parse_subject(pw_patch['name'])
|
||||
patches.append(patch)
|
||||
if warn_count > 1:
|
||||
tout.Warning(' (total of %d warnings)' % warn_count)
|
||||
tout.warning(' (total of %d warnings)' % warn_count)
|
||||
|
||||
# Sort patches by patch number
|
||||
patches = sorted(patches, key=lambda x: x.seq)
|
||||
@ -338,9 +338,9 @@ def show_responses(rtags, indent, is_new):
|
||||
for tag in sorted(rtags.keys()):
|
||||
people = rtags[tag]
|
||||
for who in sorted(people):
|
||||
terminal.Print(indent + '%s %s: ' % ('+' if is_new else ' ', tag),
|
||||
terminal.tprint(indent + '%s %s: ' % ('+' if is_new else ' ', tag),
|
||||
newline=False, colour=col.GREEN, bright=is_new)
|
||||
terminal.Print(who, colour=col.WHITE, bright=is_new)
|
||||
terminal.tprint(who, colour=col.WHITE, bright=is_new)
|
||||
count += 1
|
||||
return count
|
||||
|
||||
@ -437,7 +437,7 @@ def check_patchwork_status(series, series_id, branch, dest_branch, force,
|
||||
|
||||
patch_for_commit, _, warnings = compare_with_series(series, patches)
|
||||
for warn in warnings:
|
||||
tout.Warning(warn)
|
||||
tout.warning(warn)
|
||||
|
||||
patch_list = [patch_for_commit.get(c) for c in range(len(series.commits))]
|
||||
|
||||
@ -455,7 +455,7 @@ def check_patchwork_status(series, series_id, branch, dest_branch, force,
|
||||
patch = patch_for_commit.get(seq)
|
||||
if not patch:
|
||||
continue
|
||||
terminal.Print('%3d %s' % (patch.seq, patch.subject[:50]),
|
||||
terminal.tprint('%3d %s' % (patch.seq, patch.subject[:50]),
|
||||
colour=col.BLUE)
|
||||
cmt = series.commits[seq]
|
||||
base_rtags = cmt.rtags
|
||||
@ -466,15 +466,15 @@ def check_patchwork_status(series, series_id, branch, dest_branch, force,
|
||||
num_to_add += show_responses(new_rtags, indent, True)
|
||||
if show_comments:
|
||||
for review in review_list[seq]:
|
||||
terminal.Print('Review: %s' % review.meta, colour=col.RED)
|
||||
terminal.tprint('Review: %s' % review.meta, colour=col.RED)
|
||||
for snippet in review.snippets:
|
||||
for line in snippet:
|
||||
quoted = line.startswith('>')
|
||||
terminal.Print(' %s' % line,
|
||||
terminal.tprint(' %s' % line,
|
||||
colour=col.MAGENTA if quoted else None)
|
||||
terminal.Print()
|
||||
terminal.tprint()
|
||||
|
||||
terminal.Print("%d new response%s available in patchwork%s" %
|
||||
terminal.tprint("%d new response%s available in patchwork%s" %
|
||||
(num_to_add, 's' if num_to_add != 1 else '',
|
||||
'' if dest_branch
|
||||
else ' (use -d to write them to a new branch)'))
|
||||
@ -482,6 +482,6 @@ def check_patchwork_status(series, series_id, branch, dest_branch, force,
|
||||
if dest_branch:
|
||||
num_added = create_branch(series, new_rtag_list, branch,
|
||||
dest_branch, force, test_repo)
|
||||
terminal.Print(
|
||||
terminal.tprint(
|
||||
"%d response%s added from patchwork into new branch '%s'" %
|
||||
(num_added, 's' if num_added != 1 else '', dest_branch))
|
||||
|
@ -51,7 +51,7 @@ class PrintLine:
|
||||
(self.newline, self.colour, self.bright, self.text))
|
||||
|
||||
|
||||
def CalcAsciiLen(text):
|
||||
def calc_ascii_len(text):
|
||||
"""Calculate the length of a string, ignoring any ANSI sequences
|
||||
|
||||
When displayed on a terminal, ANSI sequences don't take any space, so we
|
||||
@ -64,44 +64,44 @@ def CalcAsciiLen(text):
|
||||
Length of text, after skipping ANSI sequences
|
||||
|
||||
>>> col = Color(COLOR_ALWAYS)
|
||||
>>> text = col.Color(Color.RED, 'abc')
|
||||
>>> text = col.build(Color.RED, 'abc')
|
||||
>>> len(text)
|
||||
14
|
||||
>>> CalcAsciiLen(text)
|
||||
>>> calc_ascii_len(text)
|
||||
3
|
||||
>>>
|
||||
>>> text += 'def'
|
||||
>>> CalcAsciiLen(text)
|
||||
>>> calc_ascii_len(text)
|
||||
6
|
||||
>>> text += col.Color(Color.RED, 'abc')
|
||||
>>> CalcAsciiLen(text)
|
||||
>>> text += col.build(Color.RED, 'abc')
|
||||
>>> calc_ascii_len(text)
|
||||
9
|
||||
"""
|
||||
result = ansi_escape.sub('', text)
|
||||
return len(result)
|
||||
|
||||
def TrimAsciiLen(text, size):
|
||||
def trim_ascii_len(text, size):
|
||||
"""Trim a string containing ANSI sequences to the given ASCII length
|
||||
|
||||
The string is trimmed with ANSI sequences being ignored for the length
|
||||
calculation.
|
||||
|
||||
>>> col = Color(COLOR_ALWAYS)
|
||||
>>> text = col.Color(Color.RED, 'abc')
|
||||
>>> text = col.build(Color.RED, 'abc')
|
||||
>>> len(text)
|
||||
14
|
||||
>>> CalcAsciiLen(TrimAsciiLen(text, 4))
|
||||
>>> calc_ascii_len(trim_ascii_len(text, 4))
|
||||
3
|
||||
>>> CalcAsciiLen(TrimAsciiLen(text, 2))
|
||||
>>> calc_ascii_len(trim_ascii_len(text, 2))
|
||||
2
|
||||
>>> text += 'def'
|
||||
>>> CalcAsciiLen(TrimAsciiLen(text, 4))
|
||||
>>> calc_ascii_len(trim_ascii_len(text, 4))
|
||||
4
|
||||
>>> text += col.Color(Color.RED, 'ghi')
|
||||
>>> CalcAsciiLen(TrimAsciiLen(text, 7))
|
||||
>>> text += col.build(Color.RED, 'ghi')
|
||||
>>> calc_ascii_len(trim_ascii_len(text, 7))
|
||||
7
|
||||
"""
|
||||
if CalcAsciiLen(text) < size:
|
||||
if calc_ascii_len(text) < size:
|
||||
return text
|
||||
pos = 0
|
||||
out = ''
|
||||
@ -130,7 +130,7 @@ def TrimAsciiLen(text, size):
|
||||
return out
|
||||
|
||||
|
||||
def Print(text='', newline=True, colour=None, limit_to_line=False, bright=True):
|
||||
def tprint(text='', newline=True, colour=None, limit_to_line=False, bright=True):
|
||||
"""Handle a line of output to the terminal.
|
||||
|
||||
In test mode this is recorded in a list. Otherwise it is output to the
|
||||
@ -148,18 +148,18 @@ def Print(text='', newline=True, colour=None, limit_to_line=False, bright=True):
|
||||
else:
|
||||
if colour:
|
||||
col = Color()
|
||||
text = col.Color(colour, text, bright=bright)
|
||||
text = col.build(colour, text, bright=bright)
|
||||
if newline:
|
||||
print(text)
|
||||
last_print_len = None
|
||||
else:
|
||||
if limit_to_line:
|
||||
cols = shutil.get_terminal_size().columns
|
||||
text = TrimAsciiLen(text, cols)
|
||||
text = trim_ascii_len(text, cols)
|
||||
print(text, end='', flush=True)
|
||||
last_print_len = CalcAsciiLen(text)
|
||||
last_print_len = calc_ascii_len(text)
|
||||
|
||||
def PrintClear():
|
||||
def print_clear():
|
||||
"""Clear a previously line that was printed with no newline"""
|
||||
global last_print_len
|
||||
|
||||
@ -167,15 +167,15 @@ def PrintClear():
|
||||
print('\r%s\r' % (' '* last_print_len), end='', flush=True)
|
||||
last_print_len = None
|
||||
|
||||
def SetPrintTestMode(enable=True):
|
||||
def set_print_test_mode(enable=True):
|
||||
"""Go into test mode, where all printing is recorded"""
|
||||
global print_test_mode
|
||||
|
||||
print_test_mode = enable
|
||||
GetPrintTestLines()
|
||||
get_print_test_lines()
|
||||
|
||||
def GetPrintTestLines():
|
||||
"""Get a list of all lines output through Print()
|
||||
def get_print_test_lines():
|
||||
"""Get a list of all lines output through tprint()
|
||||
|
||||
Returns:
|
||||
A list of PrintLine objects
|
||||
@ -186,12 +186,12 @@ def GetPrintTestLines():
|
||||
print_test_list = []
|
||||
return ret
|
||||
|
||||
def EchoPrintTestLines():
|
||||
def echo_print_test_lines():
|
||||
"""Print out the text lines collected"""
|
||||
for line in print_test_list:
|
||||
if line.colour:
|
||||
col = Color()
|
||||
print(col.Color(line.colour, line.text), end='')
|
||||
print(col.build(line.colour, line.text), end='')
|
||||
else:
|
||||
print(line.text, end='')
|
||||
if line.newline:
|
||||
@ -221,7 +221,7 @@ class Color(object):
|
||||
except:
|
||||
self._enabled = False
|
||||
|
||||
def Start(self, color, bright=True):
|
||||
def start(self, color, bright=True):
|
||||
"""Returns a start color code.
|
||||
|
||||
Args:
|
||||
@ -236,7 +236,7 @@ class Color(object):
|
||||
return base % (color + 30)
|
||||
return ''
|
||||
|
||||
def Stop(self):
|
||||
def stop(self):
|
||||
"""Returns a stop color code.
|
||||
|
||||
Returns:
|
||||
@ -247,7 +247,7 @@ class Color(object):
|
||||
return self.RESET
|
||||
return ''
|
||||
|
||||
def Color(self, color, text, bright=True):
|
||||
def build(self, color, text, bright=True):
|
||||
"""Returns text with conditionally added color escape sequences.
|
||||
|
||||
Keyword arguments:
|
||||
|
@ -82,13 +82,13 @@ Signed-off-by: Simon Glass <sjg@chromium.org>
|
||||
return inname
|
||||
|
||||
def run_checkpatch(self):
|
||||
return checkpatch.CheckPatch(self.get_patch(), show_types=True)
|
||||
return checkpatch.check_patch(self.get_patch(), show_types=True)
|
||||
|
||||
|
||||
class TestPatch(unittest.TestCase):
|
||||
"""Test the u_boot_line() function in checkpatch.pl"""
|
||||
|
||||
def testBasic(self):
|
||||
def test_basic(self):
|
||||
"""Test basic filter operation"""
|
||||
data='''
|
||||
|
||||
@ -164,7 +164,7 @@ Signed-off-by: Simon Glass <sjg@chromium.org>
|
||||
os.remove(inname)
|
||||
os.remove(expname)
|
||||
|
||||
def GetData(self, data_type):
|
||||
def get_data(self, data_type):
|
||||
data='''From 4924887af52713cabea78420eff03badea8f0035 Mon Sep 17 00:00:00 2001
|
||||
From: Simon Glass <sjg@chromium.org>
|
||||
Date: Thu, 7 Apr 2011 10:14:41 -0700
|
||||
@ -284,18 +284,18 @@ index 0000000..2234c87
|
||||
print('not implemented')
|
||||
return data % (signoff, license, tab, indent, tab)
|
||||
|
||||
def SetupData(self, data_type):
|
||||
def setup_data(self, data_type):
|
||||
inhandle, inname = tempfile.mkstemp()
|
||||
infd = os.fdopen(inhandle, 'w')
|
||||
data = self.GetData(data_type)
|
||||
data = self.get_data(data_type)
|
||||
infd.write(data)
|
||||
infd.close()
|
||||
return inname
|
||||
|
||||
def testGood(self):
|
||||
def test_good(self):
|
||||
"""Test checkpatch operation"""
|
||||
inf = self.SetupData('good')
|
||||
result = checkpatch.CheckPatch(inf)
|
||||
inf = self.setup_data('good')
|
||||
result = checkpatch.check_patch(inf)
|
||||
self.assertEqual(result.ok, True)
|
||||
self.assertEqual(result.problems, [])
|
||||
self.assertEqual(result.errors, 0)
|
||||
@ -304,9 +304,9 @@ index 0000000..2234c87
|
||||
self.assertEqual(result.lines, 62)
|
||||
os.remove(inf)
|
||||
|
||||
def testNoSignoff(self):
|
||||
inf = self.SetupData('no-signoff')
|
||||
result = checkpatch.CheckPatch(inf)
|
||||
def test_no_signoff(self):
|
||||
inf = self.setup_data('no-signoff')
|
||||
result = checkpatch.check_patch(inf)
|
||||
self.assertEqual(result.ok, False)
|
||||
self.assertEqual(len(result.problems), 1)
|
||||
self.assertEqual(result.errors, 1)
|
||||
@ -315,9 +315,9 @@ index 0000000..2234c87
|
||||
self.assertEqual(result.lines, 62)
|
||||
os.remove(inf)
|
||||
|
||||
def testNoLicense(self):
|
||||
inf = self.SetupData('no-license')
|
||||
result = checkpatch.CheckPatch(inf)
|
||||
def test_no_license(self):
|
||||
inf = self.setup_data('no-license')
|
||||
result = checkpatch.check_patch(inf)
|
||||
self.assertEqual(result.ok, False)
|
||||
self.assertEqual(len(result.problems), 1)
|
||||
self.assertEqual(result.errors, 0)
|
||||
@ -326,9 +326,9 @@ index 0000000..2234c87
|
||||
self.assertEqual(result.lines, 62)
|
||||
os.remove(inf)
|
||||
|
||||
def testSpaces(self):
|
||||
inf = self.SetupData('spaces')
|
||||
result = checkpatch.CheckPatch(inf)
|
||||
def test_spaces(self):
|
||||
inf = self.setup_data('spaces')
|
||||
result = checkpatch.check_patch(inf)
|
||||
self.assertEqual(result.ok, False)
|
||||
self.assertEqual(len(result.problems), 3)
|
||||
self.assertEqual(result.errors, 0)
|
||||
@ -337,9 +337,9 @@ index 0000000..2234c87
|
||||
self.assertEqual(result.lines, 62)
|
||||
os.remove(inf)
|
||||
|
||||
def testIndent(self):
|
||||
inf = self.SetupData('indent')
|
||||
result = checkpatch.CheckPatch(inf)
|
||||
def test_indent(self):
|
||||
inf = self.setup_data('indent')
|
||||
result = checkpatch.check_patch(inf)
|
||||
self.assertEqual(result.ok, False)
|
||||
self.assertEqual(len(result.problems), 1)
|
||||
self.assertEqual(result.errors, 0)
|
||||
@ -348,7 +348,7 @@ index 0000000..2234c87
|
||||
self.assertEqual(result.lines, 62)
|
||||
os.remove(inf)
|
||||
|
||||
def checkSingleMessage(self, pm, msg, pmtype = 'warning'):
|
||||
def check_single_message(self, pm, msg, pmtype = 'warning'):
|
||||
"""Helper function to run checkpatch and check the result
|
||||
|
||||
Args:
|
||||
@ -366,50 +366,50 @@ index 0000000..2234c87
|
||||
self.assertEqual(len(result.problems), 1)
|
||||
self.assertIn(msg, result.problems[0]['cptype'])
|
||||
|
||||
def testUclass(self):
|
||||
def test_uclass(self):
|
||||
"""Test for possible new uclass"""
|
||||
pm = PatchMaker()
|
||||
pm.add_line('include/dm/uclass-id.h', 'UCLASS_WIBBLE,')
|
||||
self.checkSingleMessage(pm, 'NEW_UCLASS')
|
||||
self.check_single_message(pm, 'NEW_UCLASS')
|
||||
|
||||
def testLivetree(self):
|
||||
def test_livetree(self):
|
||||
"""Test for using the livetree API"""
|
||||
pm = PatchMaker()
|
||||
pm.add_line('common/main.c', 'fdtdec_do_something()')
|
||||
self.checkSingleMessage(pm, 'LIVETREE')
|
||||
self.check_single_message(pm, 'LIVETREE')
|
||||
|
||||
def testNewCommand(self):
|
||||
def test_new_command(self):
|
||||
"""Test for adding a new command"""
|
||||
pm = PatchMaker()
|
||||
pm.add_line('common/main.c', 'do_wibble(struct cmd_tbl *cmd_tbl)')
|
||||
self.checkSingleMessage(pm, 'CMD_TEST')
|
||||
self.check_single_message(pm, 'CMD_TEST')
|
||||
|
||||
def testPreferIf(self):
|
||||
def test_prefer_if(self):
|
||||
"""Test for using #ifdef"""
|
||||
pm = PatchMaker()
|
||||
pm.add_line('common/main.c', '#ifdef CONFIG_YELLOW')
|
||||
pm.add_line('common/init.h', '#ifdef CONFIG_YELLOW')
|
||||
pm.add_line('fred.dtsi', '#ifdef CONFIG_YELLOW')
|
||||
self.checkSingleMessage(pm, "PREFER_IF")
|
||||
self.check_single_message(pm, "PREFER_IF")
|
||||
|
||||
def testCommandUseDefconfig(self):
|
||||
def test_command_use_defconfig(self):
|
||||
"""Test for enabling/disabling commands using preprocesor"""
|
||||
pm = PatchMaker()
|
||||
pm.add_line('common/main.c', '#undef CONFIG_CMD_WHICH')
|
||||
self.checkSingleMessage(pm, 'DEFINE_CONFIG_CMD', 'error')
|
||||
self.check_single_message(pm, 'DEFINE_CONFIG_CMD', 'error')
|
||||
|
||||
def testBarredIncludeInHdr(self):
|
||||
def test_barred_include_in_hdr(self):
|
||||
"""Test for using a barred include in a header file"""
|
||||
pm = PatchMaker()
|
||||
#pm.add_line('include/myfile.h', '#include <common.h>')
|
||||
pm.add_line('include/myfile.h', '#include <dm.h>')
|
||||
self.checkSingleMessage(pm, 'BARRED_INCLUDE_IN_HDR', 'error')
|
||||
self.check_single_message(pm, 'BARRED_INCLUDE_IN_HDR', 'error')
|
||||
|
||||
def testConfigIsEnabledConfig(self):
|
||||
def test_config_is_enabled_config(self):
|
||||
"""Test for accidental CONFIG_IS_ENABLED(CONFIG_*) calls"""
|
||||
pm = PatchMaker()
|
||||
pm.add_line('common/main.c', 'if (CONFIG_IS_ENABLED(CONFIG_CLK))')
|
||||
self.checkSingleMessage(pm, 'CONFIG_IS_ENABLED_CONFIG', 'error')
|
||||
self.check_single_message(pm, 'CONFIG_IS_ENABLED_CONFIG', 'error')
|
||||
|
||||
def check_struct(self, auto, suffix, warning):
|
||||
"""Check one of the warnings for struct naming
|
||||
@ -423,17 +423,17 @@ index 0000000..2234c87
|
||||
pm.add_line('common/main.c', '.%s = sizeof(struct(fred)),' % auto)
|
||||
pm.add_line('common/main.c', '.%s = sizeof(struct(mary%s)),' %
|
||||
(auto, suffix))
|
||||
self.checkSingleMessage(
|
||||
self.check_single_message(
|
||||
pm, warning, "struct 'fred' should have a %s suffix" % suffix)
|
||||
|
||||
def testDmDriverAuto(self):
|
||||
def test_dm_driver_auto(self):
|
||||
"""Check for the correct suffix on 'struct driver' auto members"""
|
||||
self.check_struct('priv_auto', '_priv', 'PRIV_AUTO')
|
||||
self.check_struct('plat_auto', '_plat', 'PLAT_AUTO')
|
||||
self.check_struct('per_child_auto', '_priv', 'CHILD_PRIV_AUTO')
|
||||
self.check_struct('per_child_plat_auto', '_plat', 'CHILD_PLAT_AUTO')
|
||||
|
||||
def testDmUclassAuto(self):
|
||||
def test_dm_uclass_auto(self):
|
||||
"""Check for the correct suffix on 'struct uclass' auto members"""
|
||||
# Some of these are omitted since they match those from struct driver
|
||||
self.check_struct('per_device_auto', '_priv', 'DEVICE_PRIV_AUTO')
|
||||
@ -443,11 +443,11 @@ index 0000000..2234c87
|
||||
"""Check one of the checks for strn(cpy|cat)"""
|
||||
pm = PatchMaker()
|
||||
pm.add_line('common/main.c', "strn%s(foo, bar, sizeof(foo));" % func)
|
||||
self.checkSingleMessage(pm, "STRL",
|
||||
self.check_single_message(pm, "STRL",
|
||||
"strl%s is preferred over strn%s because it always produces a nul-terminated string\n"
|
||||
% (func, func))
|
||||
|
||||
def testStrl(self):
|
||||
def test_strl(self):
|
||||
"""Check for uses of strn(cat|cpy)"""
|
||||
self.check_strl("cat");
|
||||
self.check_strl("cpy");
|
||||
|
@ -23,7 +23,7 @@ except:
|
||||
use_concurrent = False
|
||||
|
||||
|
||||
def RunTestCoverage(prog, filter_fname, exclude_list, build_dir, required=None,
|
||||
def run_test_coverage(prog, filter_fname, exclude_list, build_dir, required=None,
|
||||
extra_args=None):
|
||||
"""Run tests and check that we get 100% coverage
|
||||
|
||||
@ -61,7 +61,7 @@ def RunTestCoverage(prog, filter_fname, exclude_list, build_dir, required=None,
|
||||
'--omit "%s" %s %s %s -P1' % (prefix, ','.join(glob_list),
|
||||
prog, extra_args or '', test_cmd))
|
||||
os.system(cmd)
|
||||
stdout = command.Output('python3-coverage', 'report')
|
||||
stdout = command.output('python3-coverage', 'report')
|
||||
lines = stdout.splitlines()
|
||||
if required:
|
||||
# Convert '/path/to/name.py' just the module name 'name'
|
||||
@ -102,7 +102,7 @@ def capture_sys_output():
|
||||
sys.stdout, sys.stderr = old_out, old_err
|
||||
|
||||
|
||||
def ReportResult(toolname:str, test_name: str, result: unittest.TestResult):
|
||||
def report_result(toolname:str, test_name: str, result: unittest.TestResult):
|
||||
"""Report the results from a suite of tests
|
||||
|
||||
Args:
|
||||
@ -139,8 +139,8 @@ def ReportResult(toolname:str, test_name: str, result: unittest.TestResult):
|
||||
return 0
|
||||
|
||||
|
||||
def RunTestSuites(result, debug, verbosity, test_preserve_dirs, processes,
|
||||
test_name, toolpath, class_and_module_list):
|
||||
def run_test_suites(result, debug, verbosity, test_preserve_dirs, processes,
|
||||
test_name, toolpath, class_and_module_list):
|
||||
"""Run a series of test suites and collect the results
|
||||
|
||||
Args:
|
||||
|
@ -23,7 +23,7 @@ preserve_outdir = False
|
||||
# Path to the Chrome OS chroot, if we know it
|
||||
chroot_path = None
|
||||
|
||||
# Search paths to use for Filename(), used to find files
|
||||
# Search paths to use for filename(), used to find files
|
||||
search_paths = []
|
||||
|
||||
tool_search_paths = []
|
||||
@ -36,7 +36,7 @@ packages = {
|
||||
# List of paths to use when looking for an input file
|
||||
indir = []
|
||||
|
||||
def PrepareOutputDir(dirname, preserve=False):
|
||||
def prepare_output_dir(dirname, preserve=False):
|
||||
"""Select an output directory, ensuring it exists.
|
||||
|
||||
This either creates a temporary directory or checks that the one supplied
|
||||
@ -64,27 +64,27 @@ def PrepareOutputDir(dirname, preserve=False):
|
||||
except OSError as err:
|
||||
raise CmdError("Cannot make output directory '%s': '%s'" %
|
||||
(outdir, err.strerror))
|
||||
tout.Debug("Using output directory '%s'" % outdir)
|
||||
tout.debug("Using output directory '%s'" % outdir)
|
||||
else:
|
||||
outdir = tempfile.mkdtemp(prefix='binman.')
|
||||
tout.Debug("Using temporary directory '%s'" % outdir)
|
||||
tout.debug("Using temporary directory '%s'" % outdir)
|
||||
|
||||
def _RemoveOutputDir():
|
||||
def _remove_output_dir():
|
||||
global outdir
|
||||
|
||||
shutil.rmtree(outdir)
|
||||
tout.Debug("Deleted temporary directory '%s'" % outdir)
|
||||
tout.debug("Deleted temporary directory '%s'" % outdir)
|
||||
outdir = None
|
||||
|
||||
def FinaliseOutputDir():
|
||||
def finalise_output_dir():
|
||||
global outdir, preserve_outdir
|
||||
|
||||
"""Tidy up: delete output directory if temporary and not preserved."""
|
||||
if outdir and not preserve_outdir:
|
||||
_RemoveOutputDir()
|
||||
_remove_output_dir()
|
||||
outdir = None
|
||||
|
||||
def GetOutputFilename(fname):
|
||||
def get_output_filename(fname):
|
||||
"""Return a filename within the output directory.
|
||||
|
||||
Args:
|
||||
@ -95,7 +95,7 @@ def GetOutputFilename(fname):
|
||||
"""
|
||||
return os.path.join(outdir, fname)
|
||||
|
||||
def GetOutputDir():
|
||||
def get_output_dir():
|
||||
"""Return the current output directory
|
||||
|
||||
Returns:
|
||||
@ -103,15 +103,15 @@ def GetOutputDir():
|
||||
"""
|
||||
return outdir
|
||||
|
||||
def _FinaliseForTest():
|
||||
def _finalise_for_test():
|
||||
"""Remove the output directory (for use by tests)"""
|
||||
global outdir
|
||||
|
||||
if outdir:
|
||||
_RemoveOutputDir()
|
||||
_remove_output_dir()
|
||||
outdir = None
|
||||
|
||||
def SetInputDirs(dirname):
|
||||
def set_input_dirs(dirname):
|
||||
"""Add a list of input directories, where input files are kept.
|
||||
|
||||
Args:
|
||||
@ -121,9 +121,9 @@ def SetInputDirs(dirname):
|
||||
global indir
|
||||
|
||||
indir = dirname
|
||||
tout.Debug("Using input directories %s" % indir)
|
||||
tout.debug("Using input directories %s" % indir)
|
||||
|
||||
def GetInputFilename(fname, allow_missing=False):
|
||||
def get_input_filename(fname, allow_missing=False):
|
||||
"""Return a filename for use as input.
|
||||
|
||||
Args:
|
||||
@ -150,7 +150,7 @@ def GetInputFilename(fname, allow_missing=False):
|
||||
raise ValueError("Filename '%s' not found in input path (%s) (cwd='%s')" %
|
||||
(fname, ','.join(indir), os.getcwd()))
|
||||
|
||||
def GetInputFilenameGlob(pattern):
|
||||
def get_input_filename_glob(pattern):
|
||||
"""Return a list of filenames for use as input.
|
||||
|
||||
Args:
|
||||
@ -167,26 +167,26 @@ def GetInputFilenameGlob(pattern):
|
||||
files += glob.glob(pathname)
|
||||
return sorted(files)
|
||||
|
||||
def Align(pos, align):
|
||||
def align(pos, align):
|
||||
if align:
|
||||
mask = align - 1
|
||||
pos = (pos + mask) & ~mask
|
||||
return pos
|
||||
|
||||
def NotPowerOfTwo(num):
|
||||
def not_power_of_two(num):
|
||||
return num and (num & (num - 1))
|
||||
|
||||
def SetToolPaths(toolpaths):
|
||||
def set_tool_paths(toolpaths):
|
||||
"""Set the path to search for tools
|
||||
|
||||
Args:
|
||||
toolpaths: List of paths to search for tools executed by Run()
|
||||
toolpaths: List of paths to search for tools executed by run()
|
||||
"""
|
||||
global tool_search_paths
|
||||
|
||||
tool_search_paths = toolpaths
|
||||
|
||||
def PathHasFile(path_spec, fname):
|
||||
def path_has_file(path_spec, fname):
|
||||
"""Check if a given filename is in the PATH
|
||||
|
||||
Args:
|
||||
@ -201,7 +201,7 @@ def PathHasFile(path_spec, fname):
|
||||
return True
|
||||
return False
|
||||
|
||||
def GetHostCompileTool(name):
|
||||
def get_host_compile_tool(name):
|
||||
"""Get the host-specific version for a compile tool
|
||||
|
||||
This checks the environment variables that specify which version of
|
||||
@ -244,7 +244,7 @@ def GetHostCompileTool(name):
|
||||
return host_name, extra_args
|
||||
return name, []
|
||||
|
||||
def GetTargetCompileTool(name, cross_compile=None):
|
||||
def get_target_compile_tool(name, cross_compile=None):
|
||||
"""Get the target-specific version for a compile tool
|
||||
|
||||
This first checks the environment variables that specify which
|
||||
@ -298,7 +298,7 @@ def GetTargetCompileTool(name, cross_compile=None):
|
||||
target_name = cross_compile + name
|
||||
elif name == 'ld':
|
||||
try:
|
||||
if Run(cross_compile + 'ld.bfd', '-v'):
|
||||
if run(cross_compile + 'ld.bfd', '-v'):
|
||||
target_name = cross_compile + 'ld.bfd'
|
||||
except:
|
||||
target_name = cross_compile + 'ld'
|
||||
@ -353,14 +353,14 @@ def run_result(name, *args, **kwargs):
|
||||
raise_on_error = kwargs.get('raise_on_error', True)
|
||||
env = get_env_with_path()
|
||||
if for_target:
|
||||
name, extra_args = GetTargetCompileTool(name)
|
||||
name, extra_args = get_target_compile_tool(name)
|
||||
args = tuple(extra_args) + args
|
||||
elif for_host:
|
||||
name, extra_args = GetHostCompileTool(name)
|
||||
name, extra_args = get_host_compile_tool(name)
|
||||
args = tuple(extra_args) + args
|
||||
name = os.path.expanduser(name) # Expand paths containing ~
|
||||
all_args = (name,) + args
|
||||
result = command.RunPipe([all_args], capture=True, capture_stderr=True,
|
||||
result = command.run_pipe([all_args], capture=True, capture_stderr=True,
|
||||
env=env, raise_on_error=False, binary=binary)
|
||||
if result.return_code:
|
||||
if raise_on_error:
|
||||
@ -369,7 +369,7 @@ def run_result(name, *args, **kwargs):
|
||||
result.stderr or result.stdout))
|
||||
return result
|
||||
except ValueError:
|
||||
if env and not PathHasFile(env['PATH'], name):
|
||||
if env and not path_has_file(env['PATH'], name):
|
||||
msg = "Please install tool '%s'" % name
|
||||
package = packages.get(name)
|
||||
if package:
|
||||
@ -380,7 +380,7 @@ def run_result(name, *args, **kwargs):
|
||||
def tool_find(name):
|
||||
"""Search the current path for a tool
|
||||
|
||||
This uses both PATH and any value from SetToolPaths() to search for a tool
|
||||
This uses both PATH and any value from set_tool_paths() to search for a tool
|
||||
|
||||
Args:
|
||||
name (str): Name of tool to locate
|
||||
@ -400,7 +400,7 @@ def tool_find(name):
|
||||
if os.path.isfile(fname) and os.access(fname, os.X_OK):
|
||||
return fname
|
||||
|
||||
def Run(name, *args, **kwargs):
|
||||
def run(name, *args, **kwargs):
|
||||
"""Run a tool with some arguments
|
||||
|
||||
This runs a 'tool', which is a program used by binman to process files and
|
||||
@ -421,7 +421,7 @@ def Run(name, *args, **kwargs):
|
||||
if result is not None:
|
||||
return result.stdout
|
||||
|
||||
def Filename(fname):
|
||||
def filename(fname):
|
||||
"""Resolve a file path to an absolute path.
|
||||
|
||||
If fname starts with ##/ and chroot is available, ##/ gets replaced with
|
||||
@ -455,7 +455,7 @@ def Filename(fname):
|
||||
# If not found, just return the standard, unchanged path
|
||||
return fname
|
||||
|
||||
def ReadFile(fname, binary=True):
|
||||
def read_file(fname, binary=True):
|
||||
"""Read and return the contents of a file.
|
||||
|
||||
Args:
|
||||
@ -464,13 +464,13 @@ def ReadFile(fname, binary=True):
|
||||
Returns:
|
||||
data read from file, as a string.
|
||||
"""
|
||||
with open(Filename(fname), binary and 'rb' or 'r') as fd:
|
||||
with open(filename(fname), binary and 'rb' or 'r') as fd:
|
||||
data = fd.read()
|
||||
#self._out.Info("Read file '%s' size %d (%#0x)" %
|
||||
#(fname, len(data), len(data)))
|
||||
return data
|
||||
|
||||
def WriteFile(fname, data, binary=True):
|
||||
def write_file(fname, data, binary=True):
|
||||
"""Write data into a file.
|
||||
|
||||
Args:
|
||||
@ -479,10 +479,10 @@ def WriteFile(fname, data, binary=True):
|
||||
"""
|
||||
#self._out.Info("Write file '%s' size %d (%#0x)" %
|
||||
#(fname, len(data), len(data)))
|
||||
with open(Filename(fname), binary and 'wb' or 'w') as fd:
|
||||
with open(filename(fname), binary and 'wb' or 'w') as fd:
|
||||
fd.write(data)
|
||||
|
||||
def GetBytes(byte, size):
|
||||
def get_bytes(byte, size):
|
||||
"""Get a string of bytes of a given size
|
||||
|
||||
Args:
|
||||
@ -494,7 +494,7 @@ def GetBytes(byte, size):
|
||||
"""
|
||||
return bytes([byte]) * size
|
||||
|
||||
def ToBytes(string):
|
||||
def to_bytes(string):
|
||||
"""Convert a str type into a bytes type
|
||||
|
||||
Args:
|
||||
@ -505,7 +505,7 @@ def ToBytes(string):
|
||||
"""
|
||||
return string.encode('utf-8')
|
||||
|
||||
def ToString(bval):
|
||||
def to_string(bval):
|
||||
"""Convert a bytes type into a str type
|
||||
|
||||
Args:
|
||||
@ -517,7 +517,7 @@ def ToString(bval):
|
||||
"""
|
||||
return bval.decode('utf-8')
|
||||
|
||||
def ToHex(val):
|
||||
def to_hex(val):
|
||||
"""Convert an integer value (or None) to a string
|
||||
|
||||
Returns:
|
||||
@ -525,7 +525,7 @@ def ToHex(val):
|
||||
"""
|
||||
return 'None' if val is None else '%#x' % val
|
||||
|
||||
def ToHexSize(val):
|
||||
def to_hex_size(val):
|
||||
"""Return the size of an object in hex
|
||||
|
||||
Returns:
|
||||
@ -533,7 +533,7 @@ def ToHexSize(val):
|
||||
"""
|
||||
return 'None' if val is None else '%#x' % len(val)
|
||||
|
||||
def PrintFullHelp(fname):
|
||||
def print_full_help(fname):
|
||||
"""Print the full help message for a tool using an appropriate pager.
|
||||
|
||||
Args:
|
||||
@ -545,9 +545,9 @@ def PrintFullHelp(fname):
|
||||
pager = [lesspath] if lesspath else None
|
||||
if not pager:
|
||||
pager = ['more']
|
||||
command.Run(*pager, fname)
|
||||
command.run(*pager, fname)
|
||||
|
||||
def Download(url, tmpdir_pattern='.patman'):
|
||||
def download(url, tmpdir_pattern='.patman'):
|
||||
"""Download a file to a temporary directory
|
||||
|
||||
Args:
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user