From 3609e1dc5f4d4c238dcd23e045ba6223310feffa Mon Sep 17 00:00:00 2001 From: Rasmus Villemoes Date: Mon, 10 Jan 2022 14:34:41 +0100 Subject: [PATCH 01/27] dts: automatically build necessary .dtb files When building for a custom board, it is quite common to maintain a private branch which include some defconfig and .dts files. But to hook up those .dts files requires modifying a file "belonging" to upstream U-Boot, the arch/*/dts/Makefile. Forward-porting that branch to a newer upstream then often results in a conflict which, while it is trivial to resolve by hand, makes it harder to have a CI do "try to build our board against latest upstream". The .config usually includes information on precisely what .dtb(s) are needed, so to avoid having to modify the Makefile, simply add the files in (SPL_)OF_LIST to dtb-y. A technicality is that (SPL_)OF_LIST is not always defined, so rework the Kconfig symbols so that (SPL_)OF_LIST is always defined (when (SPL_)OF_CONTROL), but only prompted for in the cases which used to be their "depends on". nios2 and microblaze already have something like this in their dts/Makefile, and the rationale in commit 41f59f68539 is similar to the above. So this simply generalizes existing practice. Followup patches could remove the logic in those two makefiles, just as there's potential for moving some common boilerplate from all the arch/*/dts/Makefile files to the new scripts/Makefile.dts. Signed-off-by: Rasmus Villemoes Reviewed-by: Simon Glass --- arch/arc/dts/Makefile | 2 ++ arch/arm/dts/Makefile | 2 ++ arch/m68k/dts/Makefile | 2 ++ arch/microblaze/dts/Makefile | 2 ++ arch/mips/dts/Makefile | 2 ++ arch/nds32/dts/Makefile | 2 ++ arch/nios2/dts/Makefile | 2 ++ arch/powerpc/dts/Makefile | 2 ++ arch/riscv/dts/Makefile | 2 ++ arch/sandbox/dts/Makefile | 2 ++ arch/sh/dts/Makefile | 2 ++ arch/x86/dts/Makefile | 2 ++ arch/xtensa/dts/Makefile | 2 ++ dts/Kconfig | 8 ++++---- scripts/Makefile.dts | 3 +++ 15 files changed, 33 insertions(+), 4 deletions(-) create mode 100644 scripts/Makefile.dts diff --git a/arch/arc/dts/Makefile b/arch/arc/dts/Makefile index 515fe1fe535..532a8131c59 100644 --- a/arch/arc/dts/Makefile +++ b/arch/arc/dts/Makefile @@ -8,6 +8,8 @@ dtb-$(CONFIG_TARGET_EMSDP) += emsdp.dtb dtb-$(CONFIG_TARGET_HSDK) += hsdk.dtb hsdk-4xd.dtb dtb-$(CONFIG_TARGET_IOT_DEVKIT) += iot_devkit.dtb +include $(srctree)/scripts/Makefile.dts + targets += $(dtb-y) DTC_FLAGS += -R 4 -p 0x1000 diff --git a/arch/arm/dts/Makefile b/arch/arm/dts/Makefile index 5a5706918ae..c1cec726cf0 100644 --- a/arch/arm/dts/Makefile +++ b/arch/arm/dts/Makefile @@ -1205,6 +1205,8 @@ dtb-$(CONFIG_TARGET_EA_LPC3250DEVKITV2) += lpc3250-ea3250.dtb dtb-$(CONFIG_ARCH_QEMU) += qemu-arm.dtb qemu-arm64.dtb +include $(srctree)/scripts/Makefile.dts + targets += $(dtb-y) # Add any required device tree compiler flags here diff --git a/arch/m68k/dts/Makefile b/arch/m68k/dts/Makefile index fdd435bc345..7988522eb98 100644 --- a/arch/m68k/dts/Makefile +++ b/arch/m68k/dts/Makefile @@ -18,6 +18,8 @@ dtb-$(CONFIG_TARGET_M5373EVB) += M5373EVB.dtb dtb-$(CONFIG_TARGET_AMCORE) += amcore.dtb dtb-$(CONFIG_TARGET_STMARK2) += stmark2.dtb +include $(srctree)/scripts/Makefile.dts + targets += $(dtb-y) DTC_FLAGS += -R 4 -p 0x1000 diff --git a/arch/microblaze/dts/Makefile b/arch/microblaze/dts/Makefile index 4690dc1b9f0..427a8f9aaca 100644 --- a/arch/microblaze/dts/Makefile +++ b/arch/microblaze/dts/Makefile @@ -2,6 +2,8 @@ dtb-y += $(shell echo $(CONFIG_DEFAULT_DEVICE_TREE)).dtb +include $(srctree)/scripts/Makefile.dts + targets += $(dtb-y) DTC_FLAGS += -R 4 -p 0x1000 diff --git a/arch/mips/dts/Makefile b/arch/mips/dts/Makefile index 215283cfa05..95144b24dcf 100644 --- a/arch/mips/dts/Makefile +++ b/arch/mips/dts/Makefile @@ -34,6 +34,8 @@ dtb-$(CONFIG_SOC_JR2) += jr2_pcb110.dtb jr2_pcb111.dtb serval2_pcb112.dtb dtb-$(CONFIG_SOC_SERVALT) += servalt_pcb116.dtb dtb-$(CONFIG_SOC_SERVAL) += serval_pcb105.dtb serval_pcb106.dtb +include $(srctree)/scripts/Makefile.dts + targets += $(dtb-y) # Add any required device tree compiler flags here diff --git a/arch/nds32/dts/Makefile b/arch/nds32/dts/Makefile index a8e23ad9ad8..5a09e3b45b3 100644 --- a/arch/nds32/dts/Makefile +++ b/arch/nds32/dts/Makefile @@ -2,6 +2,8 @@ dtb-$(CONFIG_TARGET_ADP_AG101P) += ag101p.dtb dtb-$(CONFIG_TARGET_ADP_AE3XX) += ae3xx.dtb +include $(srctree)/scripts/Makefile.dts + targets += $(dtb-y) DTC_FLAGS += -R 4 -p 0x1000 diff --git a/arch/nios2/dts/Makefile b/arch/nios2/dts/Makefile index 0014acfdfb7..2b29fa90f6c 100644 --- a/arch/nios2/dts/Makefile +++ b/arch/nios2/dts/Makefile @@ -2,6 +2,8 @@ dtb-y += $(CONFIG_DEFAULT_DEVICE_TREE:"%"=%).dtb +include $(srctree)/scripts/Makefile.dts + targets += $(dtb-y) DTC_FLAGS += -R 4 -p 0x1000 diff --git a/arch/powerpc/dts/Makefile b/arch/powerpc/dts/Makefile index 66d22ae8a45..a4b0d7ddc4f 100644 --- a/arch/powerpc/dts/Makefile +++ b/arch/powerpc/dts/Makefile @@ -30,6 +30,8 @@ dtb-$(CONFIG_TARGET_TUXX1) += kmtuxa1.dtb dtb-$(CONFIG_TARGET_MCR3000) += mcr3000.dtb dtb-$(CONFIG_TARGET_GAZERBEAM) += gazerbeam.dtb +include $(srctree)/scripts/Makefile.dts + targets += $(dtb-y) # Add any required device tree compiler flags here diff --git a/arch/riscv/dts/Makefile b/arch/riscv/dts/Makefile index 90d3f35e6e3..5c15a0f303a 100644 --- a/arch/riscv/dts/Makefile +++ b/arch/riscv/dts/Makefile @@ -8,6 +8,8 @@ dtb-$(CONFIG_TARGET_SIFIVE_UNLEASHED) += hifive-unleashed-a00.dtb dtb-$(CONFIG_TARGET_SIFIVE_UNMATCHED) += hifive-unmatched-a00.dtb dtb-$(CONFIG_TARGET_SIPEED_MAIX) += k210-maix-bit.dtb +include $(srctree)/scripts/Makefile.dts + targets += $(dtb-y) DTC_FLAGS += -R 4 -p 0x1000 diff --git a/arch/sandbox/dts/Makefile b/arch/sandbox/dts/Makefile index 3e5dc67d53e..6cbc9bbcaa1 100644 --- a/arch/sandbox/dts/Makefile +++ b/arch/sandbox/dts/Makefile @@ -8,6 +8,8 @@ endif dtb-$(CONFIG_UT_DM) += test.dtb dtb-$(CONFIG_CMD_EXTENSION) += overlay0.dtbo overlay1.dtbo +include $(srctree)/scripts/Makefile.dts + targets += $(dtb-y) DTC_FLAGS += -R 4 -p 0x1000 diff --git a/arch/sh/dts/Makefile b/arch/sh/dts/Makefile index e423bfd5664..144fd3e7d22 100644 --- a/arch/sh/dts/Makefile +++ b/arch/sh/dts/Makefile @@ -1,5 +1,7 @@ dtb-y += sh7751-r2dplus.dtb +include $(srctree)/scripts/Makefile.dts + targets += $(dtb-y) # Add any required device tree compiler flags here diff --git a/arch/x86/dts/Makefile b/arch/x86/dts/Makefile index 5c8c05ec499..cd77f4c4e81 100644 --- a/arch/x86/dts/Makefile +++ b/arch/x86/dts/Makefile @@ -22,6 +22,8 @@ dtb-y += bayleybay.dtb \ slimbootloader.dtb \ baytrail_som-db5800-som-6867.dtb +include $(srctree)/scripts/Makefile.dts + targets += $(dtb-y) DTC_FLAGS += -R 4 -p $(if $(CONFIG_EFI_APP),0x8000,0x1000) diff --git a/arch/xtensa/dts/Makefile b/arch/xtensa/dts/Makefile index 06ee25d2da9..fbbdefaf2cf 100644 --- a/arch/xtensa/dts/Makefile +++ b/arch/xtensa/dts/Makefile @@ -2,6 +2,8 @@ dtb-$(CONFIG_XTFPGA) += ml605.dtb ml605_nommu.dtb kc705.dtb kc705_nommu.dtb +include $(srctree)/scripts/Makefile.dts + targets += $(dtb-y) DTC_FLAGS += diff --git a/dts/Kconfig b/dts/Kconfig index 4de1a70efce..b19912d10dd 100644 --- a/dts/Kconfig +++ b/dts/Kconfig @@ -157,8 +157,8 @@ config DEVICE_TREE_INCLUDES .dtsi files that will also be used. config OF_LIST - string "List of device tree files to include for DT control" - depends on SPL_LOAD_FIT || MULTI_DTB_FIT + string "List of device tree files to include for DT control" if SPL_LOAD_FIT || MULTI_DTB_FIT + depends on OF_CONTROL default DEFAULT_DEVICE_TREE help This option specifies a list of device tree files to use for DT @@ -264,8 +264,8 @@ config SPL_MULTI_DTB_FIT capabilities, pad configurations). config SPL_OF_LIST - string "List of device tree files to include for DT control in SPL" - depends on SPL_MULTI_DTB_FIT + string "List of device tree files to include for DT control in SPL" if SPL_MULTI_DTB_FIT + depends on SPL_OF_CONTROL default OF_LIST help This option specifies a list of device tree files to use for DT diff --git a/scripts/Makefile.dts b/scripts/Makefile.dts new file mode 100644 index 00000000000..2561025da82 --- /dev/null +++ b/scripts/Makefile.dts @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: GPL-2.0+ + +dtb-y += $(patsubst %,%.dtb,$(subst ",,$(CONFIG_$(SPL_)OF_LIST))) From ebc87d0dafd4ca7859c3b7d2b38430bc2669ee85 Mon Sep 17 00:00:00 2001 From: Heinrich Schuchardt Date: Fri, 28 Jan 2022 10:08:32 +0100 Subject: [PATCH 02/27] sandbox: fix build failure with musl and SDL MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit sdl.c is compiled against the SDL library. Trying to redefine wchar_t with -fshort-wchar is not necessary and leads to build failures when compiling against musl. Cc: Milan P. Stanić Signed-off-by: Heinrich Schuchardt Reviewed-by: Simon Glass --- arch/sandbox/Makefile | 7 +++++++ arch/sandbox/cpu/Makefile | 11 ++++++++--- 2 files changed, 15 insertions(+), 3 deletions(-) diff --git a/arch/sandbox/Makefile b/arch/sandbox/Makefile index f6cf859f249..a335f8acfde 100644 --- a/arch/sandbox/Makefile +++ b/arch/sandbox/Makefile @@ -4,3 +4,10 @@ head-y := arch/sandbox/cpu/start.o arch/sandbox/cpu/os.o head-$(CONFIG_SANDBOX_SDL) += arch/sandbox/cpu/sdl.o libs-y += arch/sandbox/cpu/ libs-y += arch/sandbox/lib/ + +# sdl.c fails to compile with -fshort-wchar using musl. +cmd_cc_sdl.o = $(CC) $(filter-out -nostdinc -fshort-wchar, \ + $(patsubst -I%,-idirafter%,$(c_flags))) -fno-lto -c -o $@ $< + +$(obj)/sdl.o: $(src)/sdl.c FORCE + $(call if_changed_dep,cc_sdl.o) diff --git a/arch/sandbox/cpu/Makefile b/arch/sandbox/cpu/Makefile index de7fe7f3918..7c5c52652f5 100644 --- a/arch/sandbox/cpu/Makefile +++ b/arch/sandbox/cpu/Makefile @@ -7,7 +7,7 @@ obj-y := cache.o cpu.o state.o extra-y := start.o os.o -extra-$(CONFIG_SANDBOX_SDL) += sdl.o +extra-$(CONFIG_SANDBOX_SDL) += sdl.o obj-$(CONFIG_SPL_BUILD) += spl.o obj-$(CONFIG_ETH_SANDBOX_RAW) += eth-raw-os.o @@ -19,8 +19,6 @@ cmd_cc_os.o = $(CC) $(filter-out -nostdinc, \ $(obj)/os.o: $(src)/os.c FORCE $(call if_changed_dep,cc_os.o) -$(obj)/sdl.o: $(src)/sdl.c FORCE - $(call if_changed_dep,cc_os.o) # eth-raw-os.c is built in the system env, so needs standard includes # CFLAGS_REMOVE_eth-raw-os.o cannot be used to drop header include path @@ -30,3 +28,10 @@ cmd_cc_eth-raw-os.o = $(CC) $(filter-out -nostdinc, \ $(obj)/eth-raw-os.o: $(src)/eth-raw-os.c FORCE $(call if_changed_dep,cc_eth-raw-os.o) + +# sdl.c fails to build with -fshort-wchar using musl +cmd_cc_sdl.o = $(CC) $(filter-out -nostdinc -fshort-wchar, \ + $(patsubst -I%,-idirafter%,$(c_flags))) -fno-lto -c -o $@ $< + +$(obj)/sdl.o: $(src)/sdl.c FORCE + $(call if_changed_dep,cc_sdl.o) From 82ee8bfe519307b4175bb0f751da73c8555a0a25 Mon Sep 17 00:00:00 2001 From: Sughosh Ganu Date: Sun, 30 Jan 2022 00:51:08 +0530 Subject: [PATCH 03/27] dm: Use parenthesis for the device_get_ops macro argument Use parenthesis for the device_get_ops macro argument. This prevents errors when using an expression for the parameter. Signed-off-by: Sughosh Ganu Reviewed-by: Simon Glass --- include/dm/device.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/include/dm/device.h b/include/dm/device.h index 435a1114f1c..cb52a0997c8 100644 --- a/include/dm/device.h +++ b/include/dm/device.h @@ -212,7 +212,7 @@ struct udevice_rt { #define DM_MAX_SEQ_STR 3 /* Returns the operations for a device */ -#define device_get_ops(dev) (dev->driver->ops) +#define device_get_ops(dev) ((dev)->driver->ops) #if CONFIG_IS_ENABLED(OF_PLATDATA_RT) u32 dev_get_flags(const struct udevice *dev); From c1aa66e75dbfcacab1fbca0e3e19c09e08d932d5 Mon Sep 17 00:00:00 2001 From: Simon Glass Date: Sat, 29 Jan 2022 14:14:04 -0700 Subject: [PATCH 04/27] patman: Convert camel case in tools.py Convert this file to snake case and update all files which use it. Signed-off-by: Simon Glass --- tools/binman/bintool.py | 10 +- tools/binman/bintool_test.py | 34 +-- tools/binman/btool/lz4.py | 8 +- tools/binman/btool/lzma_alone.py | 16 +- tools/binman/cbfs_util.py | 12 +- tools/binman/cbfs_util_test.py | 20 +- tools/binman/control.py | 30 +- tools/binman/elf.py | 12 +- tools/binman/elf_test.py | 14 +- tools/binman/entry.py | 32 +- tools/binman/entry_test.py | 4 +- tools/binman/etype/atf_fip.py | 2 +- tools/binman/etype/blob.py | 4 +- tools/binman/etype/blob_ext_list.py | 2 +- tools/binman/etype/fdtmap.py | 2 +- tools/binman/etype/files.py | 2 +- tools/binman/etype/fill.py | 2 +- tools/binman/etype/fit.py | 20 +- tools/binman/etype/fmap.py | 4 +- tools/binman/etype/gbb.py | 10 +- tools/binman/etype/intel_ifwi.py | 12 +- tools/binman/etype/mkimage.py | 8 +- tools/binman/etype/section.py | 16 +- tools/binman/etype/text.py | 4 +- tools/binman/etype/u_boot_elf.py | 6 +- tools/binman/etype/u_boot_env.py | 4 +- tools/binman/etype/u_boot_spl_bss_pad.py | 4 +- tools/binman/etype/u_boot_tpl_bss_pad.py | 4 +- tools/binman/etype/u_boot_ucode.py | 4 +- tools/binman/etype/u_boot_with_ucode_ptr.py | 2 +- tools/binman/etype/vblock.py | 10 +- tools/binman/fdt_test.py | 4 +- tools/binman/fip_util.py | 16 +- tools/binman/fip_util_test.py | 36 +-- tools/binman/fmap_util.py | 2 +- tools/binman/ftest.py | 320 ++++++++++---------- tools/binman/image.py | 10 +- tools/binman/state.py | 18 +- tools/buildman/control.py | 2 +- tools/buildman/func_test.py | 2 +- tools/buildman/test.py | 2 +- tools/buildman/toolchain.py | 10 +- tools/dtoc/fdt.py | 4 +- tools/dtoc/fdt_util.py | 8 +- tools/dtoc/test_dtoc.py | 76 ++--- tools/dtoc/test_fdt.py | 32 +- tools/dtoc/test_src_scan.py | 12 +- tools/patman/func_test.py | 2 +- tools/patman/main.py | 2 +- tools/patman/tools.py | 74 ++--- 50 files changed, 473 insertions(+), 473 deletions(-) diff --git a/tools/binman/bintool.py b/tools/binman/bintool.py index e2e5660d167..068d766c507 100644 --- a/tools/binman/bintool.py +++ b/tools/binman/bintool.py @@ -327,9 +327,9 @@ class Bintool: """ tmpdir = tempfile.mkdtemp(prefix='binmanf.') print(f"- clone git repo '{git_repo}' to '{tmpdir}'") - tools.Run('git', 'clone', '--depth', '1', git_repo, tmpdir) + tools.run('git', 'clone', '--depth', '1', git_repo, tmpdir) print(f"- build target '{make_target}'") - tools.Run('make', '-C', tmpdir, '-j', f'{multiprocessing.cpu_count()}', + tools.run('make', '-C', tmpdir, '-j', f'{multiprocessing.cpu_count()}', make_target) fname = os.path.join(tmpdir, bintool_path) if not os.path.exists(fname): @@ -349,8 +349,8 @@ class Bintool: str: Filename of fetched file to copy to a suitable directory str: Name of temp directory to remove, or None """ - fname, tmpdir = tools.Download(url) - tools.Run('chmod', 'a+x', fname) + fname, tmpdir = tools.download(url) + tools.run('chmod', 'a+x', fname) return fname, tmpdir @classmethod @@ -384,7 +384,7 @@ class Bintool: """ args = ['sudo', 'apt', 'install', '-y', package] print('- %s' % ' '.join(args)) - tools.Run(*args) + tools.run(*args) return True @staticmethod diff --git a/tools/binman/bintool_test.py b/tools/binman/bintool_test.py index 3d6bcdab9d1..7efb8391db2 100644 --- a/tools/binman/bintool_test.py +++ b/tools/binman/bintool_test.py @@ -80,7 +80,7 @@ class TestBintool(unittest.TestCase): Args: fake_download (function): Function to call instead of - tools.Download() + tools.download() method (bintool.FETCH_...: Fetch method to use Returns: @@ -88,7 +88,7 @@ class TestBintool(unittest.TestCase): """ btest = Bintool.create('_testing') col = terminal.Color() - with unittest.mock.patch.object(tools, 'Download', + with unittest.mock.patch.object(tools, 'download', side_effect=fake_download): with test_util.capture_sys_output() as (stdout, _): btest.fetch_tool(method, col, False) @@ -97,7 +97,7 @@ class TestBintool(unittest.TestCase): def test_fetch_url_err(self): """Test an error while fetching a tool from a URL""" def fail_download(url): - """Take the tools.Download() function by raising an exception""" + """Take the tools.download() function by raising an exception""" raise urllib.error.URLError('my error') stdout = self.check_fetch_url(fail_download, bintool.FETCH_ANY) @@ -114,7 +114,7 @@ class TestBintool(unittest.TestCase): def test_fetch_method(self): """Test fetching using a particular method""" def fail_download(url): - """Take the tools.Download() function by raising an exception""" + """Take the tools.download() function by raising an exception""" raise urllib.error.URLError('my error') stdout = self.check_fetch_url(fail_download, bintool.FETCH_BIN) @@ -123,11 +123,11 @@ class TestBintool(unittest.TestCase): def test_fetch_pass_fail(self): """Test fetching multiple tools with some passing and some failing""" def handle_download(_): - """Take the tools.Download() function by writing a file""" + """Take the tools.download() function by writing a file""" if self.seq: raise urllib.error.URLError('not found') self.seq += 1 - tools.WriteFile(fname, expected) + tools.write_file(fname, expected) return fname, dirname expected = b'this is a test' @@ -140,12 +140,12 @@ class TestBintool(unittest.TestCase): self.seq = 0 with unittest.mock.patch.object(bintool, 'DOWNLOAD_DESTDIR', destdir): - with unittest.mock.patch.object(tools, 'Download', + with unittest.mock.patch.object(tools, 'download', side_effect=handle_download): with test_util.capture_sys_output() as (stdout, _): Bintool.fetch_tools(bintool.FETCH_ANY, ['_testing'] * 2) self.assertTrue(os.path.exists(dest_fname)) - data = tools.ReadFile(dest_fname) + data = tools.read_file(dest_fname) self.assertEqual(expected, data) lines = stdout.getvalue().splitlines() @@ -245,14 +245,14 @@ class TestBintool(unittest.TestCase): tmpdir = cmd[2] self.fname = os.path.join(tmpdir, 'pathname') if write_file: - tools.WriteFile(self.fname, b'hello') + tools.write_file(self.fname, b'hello') btest = Bintool.create('_testing') col = terminal.Color() self.fname = None with unittest.mock.patch.object(bintool, 'DOWNLOAD_DESTDIR', self._indir): - with unittest.mock.patch.object(tools, 'Run', side_effect=fake_run): + with unittest.mock.patch.object(tools, 'run', side_effect=fake_run): with test_util.capture_sys_output() as (stdout, _): btest.fetch_tool(bintool.FETCH_BUILD, col, False) fname = os.path.join(self._indir, '_testing') @@ -275,7 +275,7 @@ class TestBintool(unittest.TestCase): btest = Bintool.create('_testing') btest.install = True col = terminal.Color() - with unittest.mock.patch.object(tools, 'Run', return_value=None): + with unittest.mock.patch.object(tools, 'run', return_value=None): with test_util.capture_sys_output() as _: result = btest.fetch_tool(bintool.FETCH_BIN, col, False) self.assertEqual(bintool.FETCHED, result) @@ -292,8 +292,8 @@ class TestBintool(unittest.TestCase): def test_all_bintools(self): """Test that all bintools can handle all available fetch types""" def handle_download(_): - """Take the tools.Download() function by writing a file""" - tools.WriteFile(fname, expected) + """Take the tools.download() function by writing a file""" + tools.write_file(fname, expected) return fname, dirname def fake_run(*cmd): @@ -301,15 +301,15 @@ class TestBintool(unittest.TestCase): # See Bintool.build_from_git() tmpdir = cmd[2] self.fname = os.path.join(tmpdir, 'pathname') - tools.WriteFile(self.fname, b'hello') + tools.write_file(self.fname, b'hello') expected = b'this is a test' dirname = os.path.join(self._indir, 'download_dir') os.mkdir(dirname) fname = os.path.join(dirname, 'downloaded') - with unittest.mock.patch.object(tools, 'Run', side_effect=fake_run): - with unittest.mock.patch.object(tools, 'Download', + with unittest.mock.patch.object(tools, 'run', side_effect=fake_run): + with unittest.mock.patch.object(tools, 'download', side_effect=handle_download): with test_util.capture_sys_output() as _: for name in Bintool.get_tool_list(): @@ -320,7 +320,7 @@ class TestBintool(unittest.TestCase): if result is not True and result is not None: result_fname, _ = result self.assertTrue(os.path.exists(result_fname)) - data = tools.ReadFile(result_fname) + data = tools.read_file(result_fname) self.assertEqual(expected, data) os.remove(result_fname) diff --git a/tools/binman/btool/lz4.py b/tools/binman/btool/lz4.py index d165f52da92..f09c5c8904b 100644 --- a/tools/binman/btool/lz4.py +++ b/tools/binman/btool/lz4.py @@ -88,8 +88,8 @@ class Bintoollz4(bintool.Bintool): bytes: Compressed data """ with tempfile.NamedTemporaryFile(prefix='comp.tmp', - dir=tools.GetOutputDir()) as tmp: - tools.WriteFile(tmp.name, indata) + dir=tools.get_output_dir()) as tmp: + tools.write_file(tmp.name, indata) args = ['--no-frame-crc', '-B4', '-5', '-c', tmp.name] return self.run_cmd(*args, binary=True) @@ -103,8 +103,8 @@ class Bintoollz4(bintool.Bintool): bytes: Decompressed data """ with tempfile.NamedTemporaryFile(prefix='decomp.tmp', - dir=tools.GetOutputDir()) as inf: - tools.WriteFile(inf.name, indata) + dir=tools.get_output_dir()) as inf: + tools.write_file(inf.name, indata) args = ['-cd', inf.name] return self.run_cmd(*args, binary=True) diff --git a/tools/binman/btool/lzma_alone.py b/tools/binman/btool/lzma_alone.py index d7c62dfd2a5..52a960fd2fa 100644 --- a/tools/binman/btool/lzma_alone.py +++ b/tools/binman/btool/lzma_alone.py @@ -65,13 +65,13 @@ class Bintoollzma_alone(bintool.Bintool): bytes: Compressed data """ with tempfile.NamedTemporaryFile(prefix='comp.tmp', - dir=tools.GetOutputDir()) as inf: - tools.WriteFile(inf.name, indata) + dir=tools.get_output_dir()) as inf: + tools.write_file(inf.name, indata) with tempfile.NamedTemporaryFile(prefix='compo.otmp', - dir=tools.GetOutputDir()) as outf: + dir=tools.get_output_dir()) as outf: args = ['e', inf.name, outf.name, '-lc1', '-lp0', '-pb0', '-d8'] self.run_cmd(*args, binary=True) - return tools.ReadFile(outf.name) + return tools.read_file(outf.name) def decompress(self, indata): """Decompress data with lzma_alone @@ -83,13 +83,13 @@ class Bintoollzma_alone(bintool.Bintool): bytes: Decompressed data """ with tempfile.NamedTemporaryFile(prefix='decomp.tmp', - dir=tools.GetOutputDir()) as inf: - tools.WriteFile(inf.name, indata) + dir=tools.get_output_dir()) as inf: + tools.write_file(inf.name, indata) with tempfile.NamedTemporaryFile(prefix='compo.otmp', - dir=tools.GetOutputDir()) as outf: + dir=tools.get_output_dir()) as outf: args = ['d', inf.name, outf.name] self.run_cmd(*args, binary=True) - return tools.ReadFile(outf.name, binary=True) + return tools.read_file(outf.name, binary=True) def fetch(self, method): """Fetch handler for lzma_alone diff --git a/tools/binman/cbfs_util.py b/tools/binman/cbfs_util.py index eea7868b16c..9cad03886f7 100644 --- a/tools/binman/cbfs_util.py +++ b/tools/binman/cbfs_util.py @@ -189,9 +189,9 @@ def _pack_string(instr): Returns: String with required padding (at least one 0x00 byte) at the end """ - val = tools.ToBytes(instr) + val = tools.to_bytes(instr) pad_len = align_int(len(val) + 1, FILENAME_ALIGN) - return val + tools.GetBytes(0, pad_len - len(val)) + return val + tools.get_bytes(0, pad_len - len(val)) class CbfsFile(object): @@ -371,7 +371,7 @@ class CbfsFile(object): FILE_ATTR_TAG_COMPRESSION, ATTR_COMPRESSION_LEN, self.compress, self.memlen) elif self.ftype == TYPE_EMPTY: - data = tools.GetBytes(self.erase_byte, self.size) + data = tools.get_bytes(self.erase_byte, self.size) else: raise ValueError('Unknown type %#x when writing\n' % self.ftype) if attr: @@ -388,7 +388,7 @@ class CbfsFile(object): # possible. raise ValueError("Internal error: CBFS file '%s': Requested offset %#x but current output position is %#x" % (self.name, self.cbfs_offset, offset)) - pad = tools.GetBytes(pad_byte, pad_len) + pad = tools.get_bytes(pad_byte, pad_len) hdr_len += pad_len # This is the offset of the start of the file's data, @@ -414,7 +414,7 @@ class CbfsWriter(object): Usage is something like: cbw = CbfsWriter(size) - cbw.add_file_raw('u-boot', tools.ReadFile('u-boot.bin')) + cbw.add_file_raw('u-boot', tools.read_file('u-boot.bin')) ... data, cbfs_offset = cbw.get_data_and_offset() @@ -482,7 +482,7 @@ class CbfsWriter(object): if fd.tell() > offset: raise ValueError('No space for data before offset %#x (current offset %#x)' % (offset, fd.tell())) - fd.write(tools.GetBytes(self._erase_byte, offset - fd.tell())) + fd.write(tools.get_bytes(self._erase_byte, offset - fd.tell())) def _pad_to(self, fd, offset): """Write out pad bytes and/or an empty file until a given offset diff --git a/tools/binman/cbfs_util_test.py b/tools/binman/cbfs_util_test.py index 494f6145edb..f86b2951490 100755 --- a/tools/binman/cbfs_util_test.py +++ b/tools/binman/cbfs_util_test.py @@ -36,7 +36,7 @@ class TestCbfs(unittest.TestCase): def setUpClass(cls): # Create a temporary directory for test files cls._indir = tempfile.mkdtemp(prefix='cbfs_util.') - tools.SetInputDirs([cls._indir]) + tools.set_input_dirs([cls._indir]) # Set up some useful data files TestCbfs._make_input_file('u-boot.bin', U_BOOT_DATA) @@ -45,7 +45,7 @@ class TestCbfs(unittest.TestCase): # Set up a temporary output directory, used by the tools library when # compressing files - tools.PrepareOutputDir(None) + tools.prepare_output_dir(None) cls.cbfstool = bintool.Bintool.create('cbfstool') cls.have_cbfstool = cls.cbfstool.is_present() @@ -58,7 +58,7 @@ class TestCbfs(unittest.TestCase): if cls._indir: shutil.rmtree(cls._indir) cls._indir = None - tools.FinaliseOutputDir() + tools.finalise_output_dir() @classmethod def _make_input_file(cls, fname, contents): @@ -71,7 +71,7 @@ class TestCbfs(unittest.TestCase): Full pathname of file created """ pathname = os.path.join(cls._indir, fname) - tools.WriteFile(pathname, contents) + tools.write_file(pathname, contents) return pathname def _check_hdr(self, data, size, offset=0, arch=cbfs_util.ARCHITECTURE_X86): @@ -176,12 +176,12 @@ class TestCbfs(unittest.TestCase): base = [(1 << 32) - size + b for b in base] self.cbfstool.add_raw( cbfs_fname, 'u-boot', - tools.GetInputFilename(compress and 'compress' or 'u-boot.bin'), + tools.get_input_filename(compress and 'compress' or 'u-boot.bin'), compress[0] if compress else None, base[0] if base else None) self.cbfstool.add_raw( cbfs_fname, 'u-boot-dtb', - tools.GetInputFilename(compress and 'compress' or 'u-boot.dtb'), + tools.get_input_filename(compress and 'compress' or 'u-boot.dtb'), compress[1] if compress else None, base[1] if base else None) return cbfs_fname @@ -198,10 +198,10 @@ class TestCbfs(unittest.TestCase): """ if not self.have_cbfstool or not self.have_lz4: return - expect = tools.ReadFile(cbfstool_fname) + expect = tools.read_file(cbfstool_fname) if expect != data: - tools.WriteFile('/tmp/expect', expect) - tools.WriteFile('/tmp/actual', data) + tools.write_file('/tmp/expect', expect) + tools.write_file('/tmp/actual', data) print('diff -y <(xxd -g1 /tmp/expect) <(xxd -g1 /tmp/actual) | colordiff') self.fail('cbfstool produced a different result') @@ -482,7 +482,7 @@ class TestCbfs(unittest.TestCase): size = 0xb0 cbw = CbfsWriter(size) - cbw.add_file_stage('u-boot', tools.ReadFile(elf_fname)) + cbw.add_file_stage('u-boot', tools.read_file(elf_fname)) data = cbw.get_data() cbfs = self._check_hdr(data, size) diff --git a/tools/binman/control.py b/tools/binman/control.py index 2daad05b804..305f14bad31 100644 --- a/tools/binman/control.py +++ b/tools/binman/control.py @@ -258,7 +258,7 @@ def ExtractEntries(image_fname, output_fname, outdir, entry_paths, raise ValueError('Must specify exactly one entry path to write with -f') entry = image.FindEntryPath(entry_paths[0]) data = entry.ReadData(decomp, alt_format) - tools.WriteFile(output_fname, data) + tools.write_file(output_fname, data) tout.Notice("Wrote %#x bytes to file '%s'" % (len(data), output_fname)) return @@ -281,7 +281,7 @@ def ExtractEntries(image_fname, output_fname, outdir, entry_paths, fname = os.path.join(fname, 'root') tout.Notice("Write entry '%s' size %x to '%s'" % (entry.GetPath(), len(data), fname)) - tools.WriteFile(fname, data) + tools.write_file(fname, data) return einfos @@ -398,7 +398,7 @@ def ReplaceEntries(image_fname, input_fname, indir, entry_paths, if len(entry_paths) != 1: raise ValueError('Must specify exactly one entry path to write with -f') entry = image.FindEntryPath(entry_paths[0]) - data = tools.ReadFile(input_fname) + data = tools.read_file(input_fname) tout.Notice("Read %#x bytes from file '%s'" % (len(data), input_fname)) WriteEntryToImage(image, entry, data, do_compress=do_compress, allow_resize=allow_resize, write_map=write_map) @@ -425,7 +425,7 @@ def ReplaceEntries(image_fname, input_fname, indir, entry_paths, if os.path.exists(fname): tout.Notice("Write entry '%s' from file '%s'" % (entry.GetPath(), fname)) - data = tools.ReadFile(fname) + data = tools.read_file(fname) ReplaceOneEntry(image, entry, data, do_compress, allow_resize) else: tout.Warning("Skipping entry '%s' from missing file '%s'" % @@ -468,8 +468,8 @@ def PrepareImagesAndDtbs(dtb_fname, select_images, update_fdt, use_expanded): # output into a file in our output directly. Then scan it for use # in binman. dtb_fname = fdt_util.EnsureCompiled(dtb_fname) - fname = tools.GetOutputFilename('u-boot.dtb.out') - tools.WriteFile(fname, tools.ReadFile(dtb_fname)) + fname = tools.get_output_filename('u-boot.dtb.out') + tools.write_file(fname, tools.read_file(dtb_fname)) dtb = fdt.FdtScan(fname) node = _FindBinmanNode(dtb) @@ -618,7 +618,7 @@ def Binman(args): global state if args.full_help: - tools.PrintFullHelp( + tools.print_full_help( os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])), 'README.rst') ) return 0 @@ -630,7 +630,7 @@ def Binman(args): if args.cmd in ['ls', 'extract', 'replace', 'tool']: try: tout.Init(args.verbosity) - tools.PrepareOutputDir(None) + tools.prepare_output_dir(None) if args.cmd == 'ls': ListEntries(args.image, args.paths) @@ -644,7 +644,7 @@ def Binman(args): allow_resize=not args.fix_size, write_map=args.map) if args.cmd == 'tool': - tools.SetToolPaths(args.toolpath) + tools.set_tool_paths(args.toolpath) if args.list: bintool.Bintool.list_all() elif args.fetch: @@ -658,7 +658,7 @@ def Binman(args): except: raise finally: - tools.FinaliseOutputDir() + tools.finalise_output_dir() return 0 elf_params = None @@ -694,9 +694,9 @@ def Binman(args): # runtime. use_expanded = not args.no_expanded try: - tools.SetInputDirs(args.indir) - tools.PrepareOutputDir(args.outdir, args.preserve) - tools.SetToolPaths(args.toolpath) + tools.set_input_dirs(args.indir) + tools.prepare_output_dir(args.outdir, args.preserve) + tools.set_tool_paths(args.toolpath) state.SetEntryArgs(args.entry_arg) state.SetThreads(args.threads) @@ -717,7 +717,7 @@ def Binman(args): # Write the updated FDTs to our output files for dtb_item in state.GetAllFdts(): - tools.WriteFile(dtb_item._fname, dtb_item.GetContents()) + tools.write_file(dtb_item._fname, dtb_item.GetContents()) if elf_params: data = state.GetFdtForEtype('u-boot-dtb').GetContents() @@ -729,7 +729,7 @@ def Binman(args): # Use this to debug the time take to pack the image #state.TimingShow() finally: - tools.FinaliseOutputDir() + tools.finalise_output_dir() finally: tout.Uninit() diff --git a/tools/binman/elf.py b/tools/binman/elf.py index de2bb4651fa..d22a0d4bf5c 100644 --- a/tools/binman/elf.py +++ b/tools/binman/elf.py @@ -54,7 +54,7 @@ def GetSymbols(fname, patterns): key: Name of symbol value: Hex value of symbol """ - stdout = tools.Run('objdump', '-t', fname) + stdout = tools.run('objdump', '-t', fname) lines = stdout.splitlines() if patterns: re_syms = re.compile('|'.join(patterns)) @@ -154,7 +154,7 @@ def LookupAndWriteSymbols(elf_fname, entry, section): entry: Entry to process section: Section which can be used to lookup symbol values """ - fname = tools.GetInputFilename(elf_fname) + fname = tools.get_input_filename(elf_fname) syms = GetSymbols(fname, ['image', 'binman']) if not syms: return @@ -282,7 +282,7 @@ SECTIONS # text section at the start # -m32: Build for 32-bit x86 # -T...: Specifies the link script, which sets the start address - cc, args = tools.GetTargetCompileTool('cc') + cc, args = tools.get_target_compile_tool('cc') args += ['-static', '-nostdlib', '-Wl,--build-id=none', '-m32', '-T', lds_file, '-o', elf_fname, s_file] stdout = command.Output(cc, *args) @@ -363,9 +363,9 @@ def UpdateFile(infile, outfile, start_sym, end_sym, insert): raise ValueError("Not enough space in '%s' for data length %#x (%d); size is %#x (%d)" % (infile, len(insert), len(insert), size, size)) - data = tools.ReadFile(infile) + data = tools.read_file(infile) newdata = data[:syms[start_sym].offset] - newdata += insert + tools.GetBytes(0, size - len(insert)) + newdata += insert + tools.get_bytes(0, size - len(insert)) newdata += data[syms[end_sym].offset:] - tools.WriteFile(outfile, newdata) + tools.write_file(outfile, newdata) tout.Info('Written to offset %#x' % syms[start_sym].offset) diff --git a/tools/binman/elf_test.py b/tools/binman/elf_test.py index f7272584878..b531062f4a9 100644 --- a/tools/binman/elf_test.py +++ b/tools/binman/elf_test.py @@ -27,7 +27,7 @@ class FakeEntry: """ def __init__(self, contents_size): self.contents_size = contents_size - self.data = tools.GetBytes(ord('a'), contents_size) + self.data = tools.get_bytes(ord('a'), contents_size) def GetPath(self): return 'entry_path' @@ -72,7 +72,7 @@ def BuildElfTestFiles(target_dir): if 'MAKEFLAGS' in os.environ: del os.environ['MAKEFLAGS'] try: - tools.Run('make', '-C', target_dir, '-f', + tools.run('make', '-C', target_dir, '-f', os.path.join(testdir, 'Makefile'), 'SRC=%s/' % testdir) except ValueError as e: # The test system seems to suppress this in a strange way @@ -83,7 +83,7 @@ class TestElf(unittest.TestCase): @classmethod def setUpClass(cls): cls._indir = tempfile.mkdtemp(prefix='elf.') - tools.SetInputDirs(['.']) + tools.set_input_dirs(['.']) BuildElfTestFiles(cls._indir) @classmethod @@ -166,7 +166,7 @@ class TestElf(unittest.TestCase): section = FakeSection(sym_value=None) elf_fname = self.ElfTestFile('u_boot_binman_syms') syms = elf.LookupAndWriteSymbols(elf_fname, entry, section) - self.assertEqual(tools.GetBytes(255, 20) + tools.GetBytes(ord('a'), 4), + self.assertEqual(tools.get_bytes(255, 20) + tools.get_bytes(ord('a'), 4), entry.data) def testDebug(self): @@ -193,7 +193,7 @@ class TestElf(unittest.TestCase): # Make an Elf file and then convert it to a fkat binary file. This # should produce the original data. elf.MakeElf(elf_fname, expected_text, expected_data) - objcopy, args = tools.GetTargetCompileTool('objcopy') + objcopy, args = tools.get_target_compile_tool('objcopy') args += ['-O', 'binary', elf_fname, bin_fname] stdout = command.Output(objcopy, *args) with open(bin_fname, 'rb') as fd: @@ -210,7 +210,7 @@ class TestElf(unittest.TestCase): expected_data = b'wxyz' elf_fname = os.path.join(outdir, 'elf') elf.MakeElf(elf_fname, expected_text, expected_data) - data = tools.ReadFile(elf_fname) + data = tools.read_file(elf_fname) load = 0xfef20000 entry = load + 2 @@ -231,7 +231,7 @@ class TestElf(unittest.TestCase): offset = elf.GetSymbolFileOffset(fname, ['embed_start', 'embed_end']) start = offset['embed_start'].offset end = offset['embed_end'].offset - data = tools.ReadFile(fname) + data = tools.read_file(fname) embed_data = data[start:end] expect = struct.pack('%s, size %s->%s' % - (ToHex(self.offset), ToHex(self.orig_offset), - ToHex(self.size), ToHex(self.orig_size))) + (to_hex(self.offset), to_hex(self.orig_offset), + to_hex(self.size), to_hex(self.orig_size))) self.pre_reset_size = self.size self.offset = self.orig_offset self.size = self.orig_size @@ -444,20 +444,20 @@ class Entry(object): New section offset pointer (after this entry) """ self.Detail('Packing: offset=%s, size=%s, content_size=%x' % - (ToHex(self.offset), ToHex(self.size), + (to_hex(self.offset), to_hex(self.size), self.contents_size)) if self.offset is None: if self.offset_unset: self.Raise('No offset set with offset-unset: should another ' 'entry provide this correct offset?') - self.offset = tools.Align(offset, self.align) + self.offset = tools.align(offset, self.align) needed = self.pad_before + self.contents_size + self.pad_after - needed = tools.Align(needed, self.align_size) + needed = tools.align(needed, self.align_size) size = self.size if not size: size = needed new_offset = self.offset + size - aligned_offset = tools.Align(new_offset, self.align_end) + aligned_offset = tools.align(new_offset, self.align_end) if aligned_offset != new_offset: size = aligned_offset - self.offset new_offset = aligned_offset @@ -471,10 +471,10 @@ class Entry(object): # Check that the alignment is correct. It could be wrong if the # and offset or size values were provided (i.e. not calculated), but # conflict with the provided alignment values - if self.size != tools.Align(self.size, self.align_size): + if self.size != tools.align(self.size, self.align_size): self.Raise("Size %#x (%d) does not match align-size %#x (%d)" % (self.size, self.size, self.align_size, self.align_size)) - if self.offset != tools.Align(self.offset, self.align): + if self.offset != tools.align(self.offset, self.align): self.Raise("Offset %#x (%d) does not match align %#x (%d)" % (self.offset, self.offset, self.align, self.align)) self.Detail(' - packed: offset=%#x, size=%#x, content_size=%#x, next_offset=%x' % @@ -541,7 +541,7 @@ class Entry(object): bytes content of the entry, excluding any padding. If the entry is compressed, the compressed data is returned """ - self.Detail('GetData: size %s' % ToHexSize(self.data)) + self.Detail('GetData: size %s' % to_hex_size(self.data)) return self.data def GetPaddedData(self, data=None): @@ -991,7 +991,7 @@ features to produce new behaviours. fname (str): Filename of faked file """ if self.allow_fake and not pathlib.Path(fname).is_file(): - outfname = tools.GetOutputFilename(os.path.basename(fname)) + outfname = tools.get_output_filename(os.path.basename(fname)) with open(outfname, "wb") as out: out.truncate(1024) self.faked = True diff --git a/tools/binman/entry_test.py b/tools/binman/entry_test.py index 1b59c9056ec..7ed9b262bb4 100644 --- a/tools/binman/entry_test.py +++ b/tools/binman/entry_test.py @@ -17,10 +17,10 @@ from patman import tools class TestEntry(unittest.TestCase): def setUp(self): - tools.PrepareOutputDir(None) + tools.prepare_output_dir(None) def tearDown(self): - tools.FinaliseOutputDir() + tools.finalise_output_dir() def GetNode(self): binman_dir = os.path.dirname(os.path.realpath(sys.argv[0])) diff --git a/tools/binman/etype/atf_fip.py b/tools/binman/etype/atf_fip.py index 07e6c645b06..6ecd95b71f9 100644 --- a/tools/binman/etype/atf_fip.py +++ b/tools/binman/etype/atf_fip.py @@ -181,7 +181,7 @@ class Entry_atf_fip(Entry_section): self._pad_byte = fdt_util.GetInt(self._node, 'pad-byte', 0) self._fip_flags = fdt_util.GetInt64(self._node, 'fip-hdr-flags', 0) self._fip_align = fdt_util.GetInt(self._node, 'fip-align', 1) - if tools.NotPowerOfTwo(self._fip_align): + if tools.not_power_of_two(self._fip_align): raise ValueError("Node '%s': FIP alignment %s must be a power of two" % (self._node.path, self._fip_align)) self.ReadEntries() diff --git a/tools/binman/etype/blob.py b/tools/binman/etype/blob.py index 59728f368ec..25ec5d26c9b 100644 --- a/tools/binman/etype/blob.py +++ b/tools/binman/etype/blob.py @@ -37,7 +37,7 @@ class Entry_blob(Entry): def ObtainContents(self): self._filename = self.GetDefaultFilename() - self._pathname = tools.GetInputFilename(self._filename, + self._pathname = tools.get_input_filename(self._filename, self.external and self.section.GetAllowMissing()) # Allow the file to be missing if not self._pathname: @@ -68,7 +68,7 @@ class Entry_blob(Entry): bytes: Data read """ state.TimingStart('read') - indata = tools.ReadFile(pathname) + indata = tools.read_file(pathname) state.TimingAccum('read') state.TimingStart('compress') data = self.CompressData(indata) diff --git a/tools/binman/etype/blob_ext_list.py b/tools/binman/etype/blob_ext_list.py index 29c9092dc35..76ad32a1eea 100644 --- a/tools/binman/etype/blob_ext_list.py +++ b/tools/binman/etype/blob_ext_list.py @@ -38,7 +38,7 @@ class Entry_blob_ext_list(Entry_blob): pathnames = [] for fname in self._filenames: fname = self.check_fake_fname(fname) - pathname = tools.GetInputFilename( + pathname = tools.get_input_filename( fname, self.external and self.section.GetAllowMissing()) # Allow the file to be missing if not pathname: diff --git a/tools/binman/etype/fdtmap.py b/tools/binman/etype/fdtmap.py index aaaf2de4383..76e8dbe8187 100644 --- a/tools/binman/etype/fdtmap.py +++ b/tools/binman/etype/fdtmap.py @@ -140,7 +140,7 @@ class Entry_fdtmap(Entry): fdt.pack() outfdt = Fdt.FromData(fdt.as_bytearray()) data = outfdt.GetContents() - data = FDTMAP_MAGIC + tools.GetBytes(0, 8) + data + data = FDTMAP_MAGIC + tools.get_bytes(0, 8) + data return data def ObtainContents(self): diff --git a/tools/binman/etype/files.py b/tools/binman/etype/files.py index 927d0f071df..0650a69c550 100644 --- a/tools/binman/etype/files.py +++ b/tools/binman/etype/files.py @@ -47,7 +47,7 @@ class Entry_files(Entry_section): 'require-matches') def ExpandEntries(self): - files = tools.GetInputFilenameGlob(self._pattern) + files = tools.get_input_filename_glob(self._pattern) if self._require_matches and not files: self.Raise("Pattern '%s' matched no files" % self._pattern) for fname in files: diff --git a/tools/binman/etype/fill.py b/tools/binman/etype/fill.py index efb2d13e910..cd382799199 100644 --- a/tools/binman/etype/fill.py +++ b/tools/binman/etype/fill.py @@ -31,5 +31,5 @@ class Entry_fill(Entry): self.fill_value = fdt_util.GetByte(self._node, 'fill-byte', 0) def ObtainContents(self): - self.SetContents(tools.GetBytes(self.fill_value, self.size)) + self.SetContents(tools.get_bytes(self.fill_value, self.size)) return True diff --git a/tools/binman/etype/fit.py b/tools/binman/etype/fit.py index 6ad4a686df5..954cbc3d855 100644 --- a/tools/binman/etype/fit.py +++ b/tools/binman/etype/fit.py @@ -200,19 +200,19 @@ class Entry_fit(Entry): for seq, fdt_fname in enumerate(self._fdts): node_name = subnode.name[1:].replace('SEQ', str(seq + 1)) - fname = tools.GetInputFilename(fdt_fname + '.dtb') + fname = tools.get_input_filename(fdt_fname + '.dtb') with fsw.add_node(node_name): for pname, prop in subnode.props.items(): val = prop.bytes.replace( - b'NAME', tools.ToBytes(fdt_fname)) + b'NAME', tools.to_bytes(fdt_fname)) val = val.replace( - b'SEQ', tools.ToBytes(str(seq + 1))) + b'SEQ', tools.to_bytes(str(seq + 1))) fsw.property(pname, val) # Add data for 'fdt' nodes (but not 'config') if depth == 1 and in_images: fsw.property('data', - tools.ReadFile(fname)) + tools.read_file(fname)) else: if self._fdts is None: if self._fit_list_prop: @@ -246,10 +246,10 @@ class Entry_fit(Entry): # self._BuildInput() either returns bytes or raises an exception. data = self._BuildInput(self._fdt) uniq = self.GetUniqueName() - input_fname = tools.GetOutputFilename('%s.itb' % uniq) - output_fname = tools.GetOutputFilename('%s.fit' % uniq) - tools.WriteFile(input_fname, data) - tools.WriteFile(output_fname, data) + input_fname = tools.get_output_filename('%s.itb' % uniq) + output_fname = tools.get_output_filename('%s.fit' % uniq) + tools.write_file(input_fname, data) + tools.write_file(output_fname, data) args = {} ext_offset = self._fit_props.get('fit,external-offset') @@ -260,11 +260,11 @@ class Entry_fit(Entry): } if self.mkimage.run(reset_timestamp=True, output_fname=output_fname, **args) is not None: - self.SetContents(tools.ReadFile(output_fname)) + self.SetContents(tools.read_file(output_fname)) else: # Bintool is missing; just use empty data as the output self.record_missing_bintool(self.mkimage) - self.SetContents(tools.GetBytes(0, 1024)) + self.SetContents(tools.get_bytes(0, 1024)) return True diff --git a/tools/binman/etype/fmap.py b/tools/binman/etype/fmap.py index cac99b60ebf..72b44a78693 100644 --- a/tools/binman/etype/fmap.py +++ b/tools/binman/etype/fmap.py @@ -8,7 +8,7 @@ from binman.entry import Entry from binman import fmap_util from patman import tools -from patman.tools import ToHexSize +from patman.tools import to_hex_size from patman import tout @@ -47,7 +47,7 @@ class Entry_fmap(Entry): def _AddEntries(areas, entry): entries = entry.GetEntries() tout.Debug("fmap: Add entry '%s' type '%s' (%s subentries)" % - (entry.GetPath(), entry.etype, ToHexSize(entries))) + (entry.GetPath(), entry.etype, to_hex_size(entries))) if entries and entry.etype != 'cbfs': # Create an area for the section, which encompasses all entries # within it diff --git a/tools/binman/etype/gbb.py b/tools/binman/etype/gbb.py index ca8af1be424..e32fae27ce6 100644 --- a/tools/binman/etype/gbb.py +++ b/tools/binman/etype/gbb.py @@ -70,14 +70,14 @@ class Entry_gbb(Entry): def ObtainContents(self): gbb = 'gbb.bin' - fname = tools.GetOutputFilename(gbb) + fname = tools.get_output_filename(gbb) if not self.size: self.Raise('GBB must have a fixed size') gbb_size = self.size bmpfv_size = gbb_size - 0x2180 if bmpfv_size < 0: self.Raise('GBB is too small (minimum 0x2180 bytes)') - keydir = tools.GetInputFilename(self.keydir) + keydir = tools.get_input_filename(self.keydir) stdout = self.futility.gbb_create( fname, [0x100, 0x1000, bmpfv_size, 0x1000]) @@ -88,14 +88,14 @@ class Entry_gbb(Entry): rootkey='%s/root_key.vbpubk' % keydir, recoverykey='%s/recovery_key.vbpubk' % keydir, flags=self.gbb_flags, - bmpfv=tools.GetInputFilename(self.bmpblk)) + bmpfv=tools.get_input_filename(self.bmpblk)) if stdout is not None: - self.SetContents(tools.ReadFile(fname)) + self.SetContents(tools.read_file(fname)) else: # Bintool is missing; just use the required amount of zero data self.record_missing_bintool(self.futility) - self.SetContents(tools.GetBytes(0, gbb_size)) + self.SetContents(tools.get_bytes(0, gbb_size)) return True diff --git a/tools/binman/etype/intel_ifwi.py b/tools/binman/etype/intel_ifwi.py index ed14046ba6e..46bdf116e6a 100644 --- a/tools/binman/etype/intel_ifwi.py +++ b/tools/binman/etype/intel_ifwi.py @@ -58,11 +58,11 @@ class Entry_intel_ifwi(Entry_blob_ext): # Create the IFWI file if needed if self._convert_fit: inname = self._pathname - outname = tools.GetOutputFilename('ifwi.bin') + outname = tools.get_output_filename('ifwi.bin') if self.ifwitool.create_ifwi(inname, outname) is None: # Bintool is missing; just create a zeroed ifwi.bin self.record_missing_bintool(self.ifwitool) - self.SetContents(tools.GetBytes(0, 1024)) + self.SetContents(tools.get_bytes(0, 1024)) self._filename = 'ifwi.bin' self._pathname = outname @@ -74,15 +74,15 @@ class Entry_intel_ifwi(Entry_blob_ext): if self.ifwitool.delete_subpart(outname, 'OBBP') is None: # Bintool is missing; just use zero data self.record_missing_bintool(self.ifwitool) - self.SetContents(tools.GetBytes(0, 1024)) + self.SetContents(tools.get_bytes(0, 1024)) return True for entry in self._ifwi_entries.values(): # First get the input data and put it in a file data = entry.GetPaddedData() uniq = self.GetUniqueName() - input_fname = tools.GetOutputFilename('input.%s' % uniq) - tools.WriteFile(input_fname, data) + input_fname = tools.get_output_filename('input.%s' % uniq) + tools.write_file(input_fname, data) # At this point we know that ifwitool is present, so we don't need # to check for None here @@ -107,7 +107,7 @@ class Entry_intel_ifwi(Entry_blob_ext): After that we delete the OBBP sub-partition and add each of the files that we want in the IFWI file, one for each sub-entry of the IWFI node. """ - self._pathname = tools.GetInputFilename(self._filename, + self._pathname = tools.get_input_filename(self._filename, self.section.GetAllowMissing()) # Allow the file to be missing if not self._pathname: diff --git a/tools/binman/etype/mkimage.py b/tools/binman/etype/mkimage.py index 201ee4b5696..baa16f36f34 100644 --- a/tools/binman/etype/mkimage.py +++ b/tools/binman/etype/mkimage.py @@ -48,12 +48,12 @@ class Entry_mkimage(Entry): return False data += entry.GetData() uniq = self.GetUniqueName() - input_fname = tools.GetOutputFilename('mkimage.%s' % uniq) - tools.WriteFile(input_fname, data) - output_fname = tools.GetOutputFilename('mkimage-out.%s' % uniq) + input_fname = tools.get_output_filename('mkimage.%s' % uniq) + tools.write_file(input_fname, data) + output_fname = tools.get_output_filename('mkimage-out.%s' % uniq) if self.mkimage.run_cmd('-d', input_fname, *self._args, output_fname) is not None: - self.SetContents(tools.ReadFile(output_fname)) + self.SetContents(tools.read_file(output_fname)) else: # Bintool is missing; just use the input data as the output self.record_missing_bintool(self.mkimage) diff --git a/tools/binman/etype/section.py b/tools/binman/etype/section.py index bb375e9063d..b3d73023949 100644 --- a/tools/binman/etype/section.py +++ b/tools/binman/etype/section.py @@ -19,7 +19,7 @@ from binman import state from dtoc import fdt_util from patman import tools from patman import tout -from patman.tools import ToHexSize +from patman.tools import to_hex_size class Entry_section(Entry): @@ -269,19 +269,19 @@ class Entry_section(Entry): data = bytearray() # Handle padding before the entry if entry.pad_before: - data += tools.GetBytes(self._pad_byte, entry.pad_before) + data += tools.get_bytes(self._pad_byte, entry.pad_before) # Add in the actual entry data data += entry_data # Handle padding after the entry if entry.pad_after: - data += tools.GetBytes(self._pad_byte, entry.pad_after) + data += tools.get_bytes(self._pad_byte, entry.pad_after) if entry.size: - data += tools.GetBytes(pad_byte, entry.size - len(data)) + data += tools.get_bytes(pad_byte, entry.size - len(data)) - self.Detail('GetPaddedDataForEntry: size %s' % ToHexSize(self.data)) + self.Detail('GetPaddedDataForEntry: size %s' % to_hex_size(self.data)) return data @@ -316,7 +316,7 @@ class Entry_section(Entry): # Handle empty space before the entry pad = (entry.offset or 0) - self._skip_at_start - len(section_data) if pad > 0: - section_data += tools.GetBytes(self._pad_byte, pad) + section_data += tools.get_bytes(self._pad_byte, pad) # Add in the actual entry data section_data += data @@ -709,14 +709,14 @@ class Entry_section(Entry): if not size: data = self.GetPaddedData(self.data) size = len(data) - size = tools.Align(size, self.align_size) + size = tools.align(size, self.align_size) if self.size and contents_size > self.size: self._Raise("contents size %#x (%d) exceeds section size %#x (%d)" % (contents_size, contents_size, self.size, self.size)) if not self.size: self.size = size - if self.size != tools.Align(self.size, self.align_size): + if self.size != tools.align(self.size, self.align_size): self._Raise("Size %#x (%d) does not match align-size %#x (%d)" % (self.size, self.size, self.align_size, self.align_size)) diff --git a/tools/binman/etype/text.py b/tools/binman/etype/text.py index 45dfcc401e4..c55e0233b1e 100644 --- a/tools/binman/etype/text.py +++ b/tools/binman/etype/text.py @@ -60,14 +60,14 @@ class Entry_text(Entry): super().__init__(section, etype, node) value = fdt_util.GetString(self._node, 'text') if value: - value = tools.ToBytes(value) + value = tools.to_bytes(value) else: label, = self.GetEntryArgsOrProps([EntryArg('text-label', str)]) self.text_label = label if self.text_label: value, = self.GetEntryArgsOrProps([EntryArg(self.text_label, str)]) - value = tools.ToBytes(value) if value is not None else value + value = tools.to_bytes(value) if value is not None else value self.value = value def ObtainContents(self): diff --git a/tools/binman/etype/u_boot_elf.py b/tools/binman/etype/u_boot_elf.py index 6614a75fafa..3ec774f38ad 100644 --- a/tools/binman/etype/u_boot_elf.py +++ b/tools/binman/etype/u_boot_elf.py @@ -27,9 +27,9 @@ class Entry_u_boot_elf(Entry_blob): def ReadBlobContents(self): if self._strip: uniq = self.GetUniqueName() - out_fname = tools.GetOutputFilename('%s.stripped' % uniq) - tools.WriteFile(out_fname, tools.ReadFile(self._pathname)) - tools.Run('strip', out_fname) + out_fname = tools.get_output_filename('%s.stripped' % uniq) + tools.write_file(out_fname, tools.read_file(self._pathname)) + tools.run('strip', out_fname) self._pathname = out_fname super().ReadBlobContents() return True diff --git a/tools/binman/etype/u_boot_env.py b/tools/binman/etype/u_boot_env.py index 1694c2a6eef..c38340b256e 100644 --- a/tools/binman/etype/u_boot_env.py +++ b/tools/binman/etype/u_boot_env.py @@ -27,7 +27,7 @@ class Entry_u_boot_env(Entry_blob): self.fill_value = fdt_util.GetByte(self._node, 'fill-byte', 0) def ReadBlobContents(self): - indata = tools.ReadFile(self._pathname) + indata = tools.read_file(self._pathname) data = b'' for line in indata.splitlines(): data += line + b'\0' @@ -35,7 +35,7 @@ class Entry_u_boot_env(Entry_blob): pad = self.size - len(data) - 5 if pad < 0: self.Raise("'u-boot-env' entry too small to hold data (need %#x more bytes)" % -pad) - data += tools.GetBytes(self.fill_value, pad) + data += tools.get_bytes(self.fill_value, pad) crc = zlib.crc32(data) buf = struct.pack(' 00000000 00000008 main-section 00000000 00000004 u-boot @@ -2210,12 +2210,12 @@ class TestFunctional(unittest.TestCase): 0000002c 00000000 00000004 u-boot ''', map_data) self.assertEqual(data, - tools.GetBytes(0x26, 4) + U_BOOT_DATA + - tools.GetBytes(0x21, 12) + - tools.GetBytes(0x26, 4) + U_BOOT_DATA + - tools.GetBytes(0x61, 12) + - tools.GetBytes(0x26, 4) + U_BOOT_DATA + - tools.GetBytes(0x26, 8)) + tools.get_bytes(0x26, 4) + U_BOOT_DATA + + tools.get_bytes(0x21, 12) + + tools.get_bytes(0x26, 4) + U_BOOT_DATA + + tools.get_bytes(0x61, 12) + + tools.get_bytes(0x26, 4) + U_BOOT_DATA + + tools.get_bytes(0x26, 8)) def testCbfsRaw(self): """Test base handling of a Coreboot Filesystem (CBFS) @@ -2332,17 +2332,17 @@ class TestFunctional(unittest.TestCase): Args: data: Conents of output file """ - expected_desc = tools.ReadFile(self.TestFile('descriptor.bin')) + expected_desc = tools.read_file(self.TestFile('descriptor.bin')) if data[:0x1000] != expected_desc: self.fail('Expected descriptor binary at start of image') # We expect to find the TPL wil in subpart IBBP entry IBBL - image_fname = tools.GetOutputFilename('image.bin') - tpl_fname = tools.GetOutputFilename('tpl.out') + image_fname = tools.get_output_filename('image.bin') + tpl_fname = tools.get_output_filename('tpl.out') ifwitool = bintool.Bintool.create('ifwitool') ifwitool.extract(image_fname, 'IBBP', 'IBBL', tpl_fname) - tpl_data = tools.ReadFile(tpl_fname) + tpl_data = tools.read_file(tpl_fname) self.assertEqual(U_BOOT_TPL_DATA, tpl_data[:len(U_BOOT_TPL_DATA)]) def testPackX86RomIfwi(self): @@ -2403,7 +2403,7 @@ class TestFunctional(unittest.TestCase): fdtmap_data = data[len(U_BOOT_DATA):] magic = fdtmap_data[:8] self.assertEqual(b'_FDTMAP_', magic) - self.assertEqual(tools.GetBytes(0, 8), fdtmap_data[8:16]) + self.assertEqual(tools.get_bytes(0, 8), fdtmap_data[8:16]) fdt_data = fdtmap_data[16:] dtb = fdt.Fdt.FromData(fdt_data) @@ -2668,7 +2668,7 @@ class TestFunctional(unittest.TestCase): """Test reading an image and accessing its FDT map""" self._CheckLz4() data = self.data = self._DoReadFileRealDtb('128_decode_image.dts') - image_fname = tools.GetOutputFilename('image.bin') + image_fname = tools.get_output_filename('image.bin') orig_image = control.images['image'] image = Image.FromFile(image_fname) self.assertEqual(orig_image.GetEntries().keys(), @@ -2684,7 +2684,7 @@ class TestFunctional(unittest.TestCase): """Test accessing an image's FDT map without an image header""" self._CheckLz4() data = self._DoReadFileRealDtb('129_decode_image_nohdr.dts') - image_fname = tools.GetOutputFilename('image.bin') + image_fname = tools.get_output_filename('image.bin') image = Image.FromFile(image_fname) self.assertTrue(isinstance(image, Image)) self.assertEqual('image', image.image_name[-5:]) @@ -2692,7 +2692,7 @@ class TestFunctional(unittest.TestCase): def testReadImageFail(self): """Test failing to read an image image's FDT map""" self._DoReadFile('005_simple.dts') - image_fname = tools.GetOutputFilename('image.bin') + image_fname = tools.get_output_filename('image.bin') with self.assertRaises(ValueError) as e: image = Image.FromFile(image_fname) self.assertIn("Cannot find FDT map in image", str(e.exception)) @@ -2752,7 +2752,7 @@ class TestFunctional(unittest.TestCase): """ self._CheckLz4() self._DoReadFileRealDtb('130_list_fdtmap.dts') - image_fname = tools.GetOutputFilename('image.bin') + image_fname = tools.get_output_filename('image.bin') image = Image.FromFile(image_fname) lines = image.GetListEntries(paths)[1] files = [line[0].strip() for line in lines[1:]] @@ -2798,7 +2798,7 @@ class TestFunctional(unittest.TestCase): """ self._CheckLz4() self._DoReadFileRealDtb('130_list_fdtmap.dts') - image_fname = tools.GetOutputFilename('image.bin') + image_fname = tools.get_output_filename('image.bin') return control.ReadEntry(image_fname, entry_name, decomp) def testExtractSimple(self): @@ -2858,7 +2858,7 @@ class TestFunctional(unittest.TestCase): def testExtractBadFile(self): """Test extracting an invalid file""" fname = os.path.join(self._indir, 'badfile') - tools.WriteFile(fname, b'') + tools.write_file(fname, b'') with self.assertRaises(ValueError) as e: control.ReadEntry(fname, 'name') @@ -2874,17 +2874,17 @@ class TestFunctional(unittest.TestCase): '-f', fname) finally: shutil.rmtree(tmpdir) - data = tools.ReadFile(fname) + data = tools.read_file(fname) self.assertEqual(U_BOOT_DATA, data) def testExtractOneEntry(self): """Test extracting a single entry fron an image """ self._CheckLz4() self._DoReadFileRealDtb('130_list_fdtmap.dts') - image_fname = tools.GetOutputFilename('image.bin') + image_fname = tools.get_output_filename('image.bin') fname = os.path.join(self._indir, 'output.extact') control.ExtractEntries(image_fname, fname, None, ['u-boot']) - data = tools.ReadFile(fname) + data = tools.read_file(fname) self.assertEqual(U_BOOT_DATA, data) def _CheckExtractOutput(self, decomp): @@ -2906,7 +2906,7 @@ class TestFunctional(unittest.TestCase): expect_size: Size of data to expect in file, or None to skip """ path = os.path.join(outdir, entry_path) - data = tools.ReadFile(path) + data = tools.read_file(path) os.remove(path) if expect_data: self.assertEqual(expect_data, data) @@ -2926,7 +2926,7 @@ class TestFunctional(unittest.TestCase): os.rmdir(path) self._DoReadFileRealDtb('130_list_fdtmap.dts') - image_fname = tools.GetOutputFilename('image.bin') + image_fname = tools.get_output_filename('image.bin') outdir = os.path.join(self._indir, 'extract') einfos = control.ExtractEntries(image_fname, None, outdir, [], decomp) @@ -2962,7 +2962,7 @@ class TestFunctional(unittest.TestCase): _CheckPresent('section/root', section.data) cbfs = section_entries['cbfs'] _CheckPresent('section/cbfs/root', cbfs.data) - data = tools.ReadFile(image_fname) + data = tools.read_file(image_fname) _CheckPresent('root', data) # There should be no files left. Remove all the directories to check. @@ -2987,7 +2987,7 @@ class TestFunctional(unittest.TestCase): """Test extracting some entries""" self._CheckLz4() self._DoReadFileRealDtb('130_list_fdtmap.dts') - image_fname = tools.GetOutputFilename('image.bin') + image_fname = tools.get_output_filename('image.bin') outdir = os.path.join(self._indir, 'extract') einfos = control.ExtractEntries(image_fname, None, outdir, ['*cb*', '*head*']) @@ -3002,7 +3002,7 @@ class TestFunctional(unittest.TestCase): """Test extracting some entries""" self._CheckLz4() self._DoReadFileRealDtb('130_list_fdtmap.dts') - image_fname = tools.GetOutputFilename('image.bin') + image_fname = tools.get_output_filename('image.bin') with self.assertRaises(ValueError) as e: control.ExtractEntries(image_fname, 'fname', None, []) self.assertIn('Must specify an entry path to write with -f', @@ -3012,7 +3012,7 @@ class TestFunctional(unittest.TestCase): """Test extracting some entries""" self._CheckLz4() self._DoReadFileRealDtb('130_list_fdtmap.dts') - image_fname = tools.GetOutputFilename('image.bin') + image_fname = tools.get_output_filename('image.bin') with self.assertRaises(ValueError) as e: control.ExtractEntries(image_fname, 'fname', None, ['a', 'b']) self.assertIn('Must specify exactly one entry path to write with -f', @@ -3113,9 +3113,9 @@ class TestFunctional(unittest.TestCase): orig_dtb_data = entries['u-boot-dtb'].data orig_fdtmap_data = entries['fdtmap'].data - image_fname = tools.GetOutputFilename('image.bin') - updated_fname = tools.GetOutputFilename('image-updated.bin') - tools.WriteFile(updated_fname, tools.ReadFile(image_fname)) + image_fname = tools.get_output_filename('image.bin') + updated_fname = tools.get_output_filename('image-updated.bin') + tools.write_file(updated_fname, tools.read_file(image_fname)) image = control.WriteEntry(updated_fname, entry_name, data, decomp, allow_resize) data = control.ReadEntry(updated_fname, entry_name, decomp) @@ -3170,8 +3170,8 @@ class TestFunctional(unittest.TestCase): data = self._DoReadFileDtb('133_replace_multi.dts', use_real_dtb=True, update_dtb=True)[0] expected = b'x' * len(U_BOOT_DATA) - updated_fname = tools.GetOutputFilename('image-updated.bin') - tools.WriteFile(updated_fname, data) + updated_fname = tools.get_output_filename('image-updated.bin') + tools.write_file(updated_fname, data) entry_name = 'u-boot' control.WriteEntry(updated_fname, entry_name, expected, allow_resize=False) @@ -3182,9 +3182,9 @@ class TestFunctional(unittest.TestCase): self.assertEqual('/binman/image', state.fdt_path_prefix) # Now check we can write the first image - image_fname = tools.GetOutputFilename('first-image.bin') - updated_fname = tools.GetOutputFilename('first-updated.bin') - tools.WriteFile(updated_fname, tools.ReadFile(image_fname)) + image_fname = tools.get_output_filename('first-image.bin') + updated_fname = tools.get_output_filename('first-updated.bin') + tools.write_file(updated_fname, tools.read_file(image_fname)) entry_name = 'u-boot' control.WriteEntry(updated_fname, entry_name, expected, allow_resize=False) @@ -3348,8 +3348,8 @@ class TestFunctional(unittest.TestCase): self._CheckLz4() expected = b'x' * len(U_BOOT_DATA) data = self._DoReadFileRealDtb('142_replace_cbfs.dts') - updated_fname = tools.GetOutputFilename('image-updated.bin') - tools.WriteFile(updated_fname, data) + updated_fname = tools.get_output_filename('image-updated.bin') + tools.write_file(updated_fname, data) entry_name = 'section/cbfs/u-boot' control.WriteEntry(updated_fname, entry_name, expected, allow_resize=True) @@ -3361,8 +3361,8 @@ class TestFunctional(unittest.TestCase): self._CheckLz4() expected = U_BOOT_DATA + b'x' data = self._DoReadFileRealDtb('142_replace_cbfs.dts') - updated_fname = tools.GetOutputFilename('image-updated.bin') - tools.WriteFile(updated_fname, data) + updated_fname = tools.get_output_filename('image-updated.bin') + tools.write_file(updated_fname, data) entry_name = 'section/cbfs/u-boot' control.WriteEntry(updated_fname, entry_name, expected, allow_resize=True) @@ -3383,23 +3383,23 @@ class TestFunctional(unittest.TestCase): """ data = self._DoReadFileRealDtb('143_replace_all.dts') - updated_fname = tools.GetOutputFilename('image-updated.bin') - tools.WriteFile(updated_fname, data) + updated_fname = tools.get_output_filename('image-updated.bin') + tools.write_file(updated_fname, data) outdir = os.path.join(self._indir, 'extract') einfos = control.ExtractEntries(updated_fname, None, outdir, []) expected1 = b'x' + U_BOOT_DATA + b'y' u_boot_fname1 = os.path.join(outdir, 'u-boot') - tools.WriteFile(u_boot_fname1, expected1) + tools.write_file(u_boot_fname1, expected1) expected2 = b'a' + U_BOOT_DATA + b'b' u_boot_fname2 = os.path.join(outdir, 'u-boot2') - tools.WriteFile(u_boot_fname2, expected2) + tools.write_file(u_boot_fname2, expected2) expected_text = b'not the same text' text_fname = os.path.join(outdir, 'text') - tools.WriteFile(text_fname, expected_text) + tools.write_file(text_fname, expected_text) dtb_fname = os.path.join(outdir, 'u-boot-dtb') dtb = fdt.FdtScan(dtb_fname) @@ -3475,10 +3475,10 @@ class TestFunctional(unittest.TestCase): fname = os.path.join(tmpdir, 'update-u-boot.bin') expected = b'x' * len(U_BOOT_DATA) - tools.WriteFile(fname, expected) + tools.write_file(fname, expected) self._DoBinman('replace', '-i', updated_fname, 'u-boot', '-f', fname) - data = tools.ReadFile(updated_fname) + data = tools.read_file(updated_fname) self.assertEqual(expected, data[:len(expected)]) map_fname = os.path.join(tmpdir, 'image-updated.map') self.assertFalse(os.path.exists(map_fname)) @@ -3493,7 +3493,7 @@ class TestFunctional(unittest.TestCase): self._DoBinman('replace', '-i', updated_fname, '-I', outdir, 'u-boot2', 'text') - tools.PrepareOutputDir(None) + tools.prepare_output_dir(None) image = Image.FromFile(updated_fname) image.LoadData() entries = image.GetEntries() @@ -3531,7 +3531,7 @@ class TestFunctional(unittest.TestCase): fname = os.path.join(self._indir, 'update-u-boot.bin') expected = b'x' * len(U_BOOT_DATA) - tools.WriteFile(fname, expected) + tools.write_file(fname, expected) self._DoBinman('replace', '-i', updated_fname, 'u-boot', '-f', fname, '-m') @@ -3543,7 +3543,7 @@ class TestFunctional(unittest.TestCase): def testReplaceNoEntryPaths(self): """Test replacing an entry without an entry path""" self._DoReadFileRealDtb('143_replace_all.dts') - image_fname = tools.GetOutputFilename('image.bin') + image_fname = tools.get_output_filename('image.bin') with self.assertRaises(ValueError) as e: control.ReplaceEntries(image_fname, 'fname', None, []) self.assertIn('Must specify an entry path to read with -f', @@ -3552,7 +3552,7 @@ class TestFunctional(unittest.TestCase): def testReplaceTooManyEntryPaths(self): """Test extracting some entries""" self._DoReadFileRealDtb('143_replace_all.dts') - image_fname = tools.GetOutputFilename('image.bin') + image_fname = tools.get_output_filename('image.bin') with self.assertRaises(ValueError) as e: control.ReplaceEntries(image_fname, 'fname', None, ['a', 'b']) self.assertIn('Must specify exactly one entry path to write with -f', @@ -3597,15 +3597,15 @@ class TestFunctional(unittest.TestCase): data = self._DoReadFile(dts) sym_values = struct.pack(' Date: Sat, 29 Jan 2022 14:14:05 -0700 Subject: [PATCH 05/27] patman: Convert camel case in command.py Convert this file to snake case and update all files which use it. Signed-off-by: Simon Glass --- tools/binman/bintool.py | 2 +- tools/binman/elf.py | 2 +- tools/binman/elf_test.py | 2 +- tools/binman/ftest.py | 2 +- tools/buildman/builder.py | 4 +-- tools/buildman/builderthread.py | 10 +++---- tools/buildman/control.py | 2 +- tools/buildman/func_test.py | 4 +-- tools/buildman/toolchain.py | 6 ++--- tools/dtoc/fdt_util.py | 4 +-- tools/patman/checkpatch.py | 2 +- tools/patman/command.py | 28 ++++++++++---------- tools/patman/get_maintainer.py | 2 +- tools/patman/gitutil.py | 46 ++++++++++++++++----------------- tools/patman/patchstream.py | 2 +- tools/patman/test_util.py | 2 +- tools/patman/tools.py | 4 +-- tools/rmboard.py | 16 ++++++------ 18 files changed, 70 insertions(+), 70 deletions(-) diff --git a/tools/binman/bintool.py b/tools/binman/bintool.py index 068d766c507..7a0c8163924 100644 --- a/tools/binman/bintool.py +++ b/tools/binman/bintool.py @@ -268,7 +268,7 @@ class Bintool: all_args = (name,) + args env = tools.get_env_with_path() tout.Detail(f"bintool: {' '.join(all_args)}") - result = command.RunPipe( + result = command.run_pipe( [all_args], capture=True, capture_stderr=True, env=env, raise_on_error=False, binary=binary) diff --git a/tools/binman/elf.py b/tools/binman/elf.py index d22a0d4bf5c..47e0a3f51cc 100644 --- a/tools/binman/elf.py +++ b/tools/binman/elf.py @@ -285,7 +285,7 @@ SECTIONS cc, args = tools.get_target_compile_tool('cc') args += ['-static', '-nostdlib', '-Wl,--build-id=none', '-m32', '-T', lds_file, '-o', elf_fname, s_file] - stdout = command.Output(cc, *args) + stdout = command.output(cc, *args) shutil.rmtree(outdir) def DecodeElf(data, location): diff --git a/tools/binman/elf_test.py b/tools/binman/elf_test.py index b531062f4a9..0f749ee2541 100644 --- a/tools/binman/elf_test.py +++ b/tools/binman/elf_test.py @@ -195,7 +195,7 @@ class TestElf(unittest.TestCase): elf.MakeElf(elf_fname, expected_text, expected_data) objcopy, args = tools.get_target_compile_tool('objcopy') args += ['-O', 'binary', elf_fname, bin_fname] - stdout = command.Output(objcopy, *args) + stdout = command.output(objcopy, *args) with open(bin_fname, 'rb') as fd: data = fd.read() self.assertEqual(expected_text + expected_data, data) diff --git a/tools/binman/ftest.py b/tools/binman/ftest.py index 21adf433fe3..f85581ccd42 100644 --- a/tools/binman/ftest.py +++ b/tools/binman/ftest.py @@ -282,7 +282,7 @@ class TestFunctional(unittest.TestCase): Arguments to pass, as a list of strings kwargs: Arguments to pass to Command.RunPipe() """ - result = command.RunPipe([[self._binman_pathname] + list(args)], + result = command.run_pipe([[self._binman_pathname] + list(args)], capture=True, capture_stderr=True, raise_on_error=False) if result.return_code and kwargs.get('raise_on_error', True): raise Exception("Error running '%s': %s" % (' '.join(args), diff --git a/tools/buildman/builder.py b/tools/buildman/builder.py index 720bbb2cf4d..94f843e2a94 100644 --- a/tools/buildman/builder.py +++ b/tools/buildman/builder.py @@ -453,7 +453,7 @@ class Builder: stage: Stage that we are at (mrproper, config, build) cwd: Directory where make should be run args: Arguments to pass to make - kwargs: Arguments to pass to command.RunPipe() + kwargs: Arguments to pass to command.run_pipe() """ def check_output(stream, data): @@ -476,7 +476,7 @@ class Builder: self._restarting_config = False self._terminated = False cmd = [self.gnu_make] + list(args) - result = command.RunPipe([cmd], capture=True, capture_stderr=True, + result = command.run_pipe([cmd], capture=True, capture_stderr=True, cwd=cwd, raise_on_error=False, infile='/dev/null', output_func=check_output, **kwargs) diff --git a/tools/buildman/builderthread.py b/tools/buildman/builderthread.py index ecb285c0bfa..90099eee04f 100644 --- a/tools/buildman/builderthread.py +++ b/tools/buildman/builderthread.py @@ -122,7 +122,7 @@ class BuilderThread(threading.Thread): config - called to configure for a board build - the main make invocation - it does the build args: A list of arguments to pass to 'make' - kwargs: A list of keyword arguments to pass to command.RunPipe() + kwargs: A list of keyword arguments to pass to command.run_pipe() Returns: CommandResult object @@ -375,7 +375,7 @@ class BuilderThread(threading.Thread): lines = [] for fname in BASE_ELF_FILENAMES: cmd = ['%snm' % self.toolchain.cross, '--size-sort', fname] - nm_result = command.RunPipe([cmd], capture=True, + nm_result = command.run_pipe([cmd], capture=True, capture_stderr=True, cwd=result.out_dir, raise_on_error=False, env=env) if nm_result.stdout: @@ -385,7 +385,7 @@ class BuilderThread(threading.Thread): print(nm_result.stdout, end=' ', file=fd) cmd = ['%sobjdump' % self.toolchain.cross, '-h', fname] - dump_result = command.RunPipe([cmd], capture=True, + dump_result = command.run_pipe([cmd], capture=True, capture_stderr=True, cwd=result.out_dir, raise_on_error=False, env=env) rodata_size = '' @@ -400,7 +400,7 @@ class BuilderThread(threading.Thread): rodata_size = fields[2] cmd = ['%ssize' % self.toolchain.cross, fname] - size_result = command.RunPipe([cmd], capture=True, + size_result = command.run_pipe([cmd], capture=True, capture_stderr=True, cwd=result.out_dir, raise_on_error=False, env=env) if size_result.stdout: @@ -411,7 +411,7 @@ class BuilderThread(threading.Thread): cmd = ['%sobjcopy' % self.toolchain.cross, '-O', 'binary', '-j', '.rodata.default_environment', 'env/built-in.o', 'uboot.env'] - command.RunPipe([cmd], capture=True, + command.run_pipe([cmd], capture=True, capture_stderr=True, cwd=result.out_dir, raise_on_error=False, env=env) ubootenv = os.path.join(result.out_dir, 'uboot.env') diff --git a/tools/buildman/control.py b/tools/buildman/control.py index e8a531eed45..ebab126adf2 100644 --- a/tools/buildman/control.py +++ b/tools/buildman/control.py @@ -307,7 +307,7 @@ def DoBuildman(options, args, toolchains=None, make_func=None, boards=None, if not options.step: options.step = len(series.commits) - 1 - gnu_make = command.Output(os.path.join(options.git, + gnu_make = command.output(os.path.join(options.git, 'scripts/show-gnu-make'), raise_on_error=False).rstrip() if not gnu_make: sys.exit('GNU Make not found') diff --git a/tools/buildman/func_test.py b/tools/buildman/func_test.py index b92081863e6..4beca8aa7d0 100644 --- a/tools/buildman/func_test.py +++ b/tools/buildman/func_test.py @@ -217,7 +217,7 @@ class TestFunctional(unittest.TestCase): self._toolchains.Add('gcc', test=False) def _RunBuildman(self, *args): - return command.RunPipe([[self._buildman_pathname] + list(args)], + return command.run_pipe([[self._buildman_pathname] + list(args)], capture=True, capture_stderr=True) def _RunControl(self, *args, boards=None, clean_dir=False, @@ -407,7 +407,7 @@ class TestFunctional(unittest.TestCase): stage: Stage that we are at (mrproper, config, build) cwd: Directory where make should be run args: Arguments to pass to make - kwargs: Arguments to pass to command.RunPipe() + kwargs: Arguments to pass to command.run_pipe() """ self._make_calls += 1 if stage == 'mrproper': diff --git a/tools/buildman/toolchain.py b/tools/buildman/toolchain.py index d88c155b5f9..3442d998abf 100644 --- a/tools/buildman/toolchain.py +++ b/tools/buildman/toolchain.py @@ -99,7 +99,7 @@ class Toolchain: else: self.priority = priority if test: - result = command.RunPipe([cmd], capture=True, env=env, + result = command.run_pipe([cmd], capture=True, env=env, raise_on_error=False) self.ok = result.return_code == 0 if verbose: @@ -494,7 +494,7 @@ class Toolchains: else URL containing this toolchain, if avaialble, else None """ - arch = command.OutputOneLine('uname', '-m') + arch = command.output_one_line('uname', '-m') if arch == 'aarch64': arch = 'arm64' base = 'https://www.kernel.org/pub/tools/crosstool/files/bin' @@ -525,7 +525,7 @@ class Toolchains: Directory name of the first entry in the archive, without the trailing / """ - stdout = command.Output('tar', 'xvfJ', fname, '-C', dest) + stdout = command.output('tar', 'xvfJ', fname, '-C', dest) dirs = stdout.splitlines()[1].split('/')[:2] return '/'.join(dirs) diff --git a/tools/dtoc/fdt_util.py b/tools/dtoc/fdt_util.py index 19e645daa0b..d59ea2fe62a 100644 --- a/tools/dtoc/fdt_util.py +++ b/tools/dtoc/fdt_util.py @@ -86,7 +86,7 @@ def EnsureCompiled(fname, tmpdir=None, capture_stderr=False): for path in search_paths: args.extend(['-I', path]) args += ['-o', dts_input, fname] - command.Run(cc, *args) + command.run(cc, *args) # If we don't have a directory, put it in the tools tempdir search_list = [] @@ -97,7 +97,7 @@ def EnsureCompiled(fname, tmpdir=None, capture_stderr=False): '-W', 'no-unit_address_vs_reg'] args.extend(search_list) args.append(dts_input) - command.Run(dtc, *args, capture_stderr=capture_stderr) + command.run(dtc, *args, capture_stderr=capture_stderr) return dtb_output def GetInt(node, propname, default=None): diff --git a/tools/patman/checkpatch.py b/tools/patman/checkpatch.py index 8978df25c15..4677a7ae214 100644 --- a/tools/patman/checkpatch.py +++ b/tools/patman/checkpatch.py @@ -213,7 +213,7 @@ def CheckPatch(fname, verbose=False, show_types=False): args = [chk, '--no-tree'] if show_types: args.append('--show-types') - output = command.Output(*args, fname, raise_on_error=False) + output = command.output(*args, fname, raise_on_error=False) return CheckPatchParse(output, verbose) diff --git a/tools/patman/command.py b/tools/patman/command.py index d54b1e0efce..c848aa19879 100644 --- a/tools/patman/command.py +++ b/tools/patman/command.py @@ -32,7 +32,7 @@ class CommandResult: self.return_code = return_code self.exception = exception - def ToOutput(self, binary): + def to_output(self, binary): if not binary: self.stdout = self.stdout.decode('utf-8') self.stderr = self.stderr.decode('utf-8') @@ -43,11 +43,11 @@ class CommandResult: # This permits interception of RunPipe for test purposes. If it is set to # a function, then that function is called with the pipe list being # executed. Otherwise, it is assumed to be a CommandResult object, and is -# returned as the result for every RunPipe() call. +# returned as the result for every run_pipe() call. # When this value is None, commands are executed as normal. test_result = None -def RunPipe(pipe_list, infile=None, outfile=None, +def run_pipe(pipe_list, infile=None, outfile=None, capture=False, capture_stderr=False, oneline=False, raise_on_error=True, cwd=None, binary=False, output_func=None, **kwargs): @@ -104,7 +104,7 @@ def RunPipe(pipe_list, infile=None, outfile=None, if raise_on_error: raise Exception("Error running '%s': %s" % (user_pipestr, str)) result.return_code = 255 - return result.ToOutput(binary) + return result.to_output(binary) if capture: result.stdout, result.stderr, result.combined = ( @@ -116,13 +116,13 @@ def RunPipe(pipe_list, infile=None, outfile=None, result.return_code = os.waitpid(last_pipe.pid, 0)[1] if raise_on_error and result.return_code: raise Exception("Error running '%s'" % user_pipestr) - return result.ToOutput(binary) + return result.to_output(binary) -def Output(*cmd, **kwargs): +def output(*cmd, **kwargs): kwargs['raise_on_error'] = kwargs.get('raise_on_error', True) - return RunPipe([cmd], capture=True, **kwargs).stdout + return run_pipe([cmd], capture=True, **kwargs).stdout -def OutputOneLine(*cmd, **kwargs): +def output_one_line(*cmd, **kwargs): """Run a command and output it as a single-line string The command us expected to produce a single line of output @@ -131,15 +131,15 @@ def OutputOneLine(*cmd, **kwargs): String containing output of command """ raise_on_error = kwargs.pop('raise_on_error', True) - result = RunPipe([cmd], capture=True, oneline=True, + result = run_pipe([cmd], capture=True, oneline=True, raise_on_error=raise_on_error, **kwargs).stdout.strip() return result -def Run(*cmd, **kwargs): - return RunPipe([cmd], **kwargs).stdout +def run(*cmd, **kwargs): + return run_pipe([cmd], **kwargs).stdout -def RunList(cmd): - return RunPipe([cmd], capture=True).stdout +def run_list(cmd): + return run_pipe([cmd], capture=True).stdout -def StopAll(): +def stop_all(): cros_subprocess.stay_alive = False diff --git a/tools/patman/get_maintainer.py b/tools/patman/get_maintainer.py index af4ba15bcdd..98ab82f78f5 100644 --- a/tools/patman/get_maintainer.py +++ b/tools/patman/get_maintainer.py @@ -43,6 +43,6 @@ def GetMaintainer(dir_list, fname, verbose=False): print("WARNING: Couldn't find get_maintainer.pl") return [] - stdout = command.Output(get_maintainer, '--norolestats', fname) + stdout = command.output(get_maintainer, '--norolestats', fname) lines = stdout.splitlines() return [ x.replace('"', '') for x in lines ] diff --git a/tools/patman/gitutil.py b/tools/patman/gitutil.py index e1ef96df22e..d06f052ffe9 100644 --- a/tools/patman/gitutil.py +++ b/tools/patman/gitutil.py @@ -67,7 +67,7 @@ def CountCommitsToBranch(branch): else: rev_range = '@{upstream}..' pipe = [LogCmd(rev_range, oneline=True)] - result = command.RunPipe(pipe, capture=True, capture_stderr=True, + result = command.run_pipe(pipe, capture=True, capture_stderr=True, oneline=True, raise_on_error=False) if result.return_code: raise ValueError('Failed to determine upstream: %s' % @@ -85,7 +85,7 @@ def NameRevision(commit_hash): Name of revision, if any, else None """ pipe = ['git', 'name-rev', commit_hash] - stdout = command.RunPipe([pipe], capture=True, oneline=True).stdout + stdout = command.run_pipe([pipe], capture=True, oneline=True).stdout # We expect a commit, a space, then a revision name name = stdout.split(' ')[1].strip() @@ -108,7 +108,7 @@ def GuessUpstream(git_dir, branch): Warning/error message, or None if none """ pipe = [LogCmd(branch, git_dir=git_dir, oneline=True, count=100)] - result = command.RunPipe(pipe, capture=True, capture_stderr=True, + result = command.run_pipe(pipe, capture=True, capture_stderr=True, raise_on_error=False) if result.return_code: return None, "Branch '%s' not found" % branch @@ -134,9 +134,9 @@ def GetUpstream(git_dir, branch): Warning/error message, or None if none """ try: - remote = command.OutputOneLine('git', '--git-dir', git_dir, 'config', + remote = command.output_one_line('git', '--git-dir', git_dir, 'config', 'branch.%s.remote' % branch) - merge = command.OutputOneLine('git', '--git-dir', git_dir, 'config', + merge = command.output_one_line('git', '--git-dir', git_dir, 'config', 'branch.%s.merge' % branch) except: upstream, msg = GuessUpstream(git_dir, branch) @@ -179,7 +179,7 @@ def CountCommitsInRange(git_dir, range_expr): were found """ pipe = [LogCmd(range_expr, git_dir=git_dir, oneline=True)] - result = command.RunPipe(pipe, capture=True, capture_stderr=True, + result = command.run_pipe(pipe, capture=True, capture_stderr=True, raise_on_error=False) if result.return_code: return None, "Range '%s' not found or is invalid" % range_expr @@ -211,7 +211,7 @@ def CountCommits(commit_range): """ pipe = [LogCmd(commit_range, oneline=True), ['wc', '-l']] - stdout = command.RunPipe(pipe, capture=True, oneline=True).stdout + stdout = command.run_pipe(pipe, capture=True, oneline=True).stdout patch_count = int(stdout) return patch_count @@ -230,7 +230,7 @@ def Checkout(commit_hash, git_dir=None, work_tree=None, force=False): if force: pipe.append('-f') pipe.append(commit_hash) - result = command.RunPipe([pipe], capture=True, raise_on_error=False, + result = command.run_pipe([pipe], capture=True, raise_on_error=False, capture_stderr=True) if result.return_code != 0: raise OSError('git checkout (%s): %s' % (pipe, result.stderr)) @@ -242,7 +242,7 @@ def Clone(git_dir, output_dir): commit_hash: Commit hash to check out """ pipe = ['git', 'clone', git_dir, '.'] - result = command.RunPipe([pipe], capture=True, cwd=output_dir, + result = command.run_pipe([pipe], capture=True, cwd=output_dir, capture_stderr=True) if result.return_code != 0: raise OSError('git clone: %s' % result.stderr) @@ -259,7 +259,7 @@ def Fetch(git_dir=None, work_tree=None): if work_tree: pipe.extend(['--work-tree', work_tree]) pipe.append('fetch') - result = command.RunPipe([pipe], capture=True, capture_stderr=True) + result = command.run_pipe([pipe], capture=True, capture_stderr=True) if result.return_code != 0: raise OSError('git fetch: %s' % result.stderr) @@ -273,7 +273,7 @@ def CheckWorktreeIsAvailable(git_dir): True if git-worktree commands will work, False otherwise. """ pipe = ['git', '--git-dir', git_dir, 'worktree', 'list'] - result = command.RunPipe([pipe], capture=True, capture_stderr=True, + result = command.run_pipe([pipe], capture=True, capture_stderr=True, raise_on_error=False) return result.return_code == 0 @@ -289,7 +289,7 @@ def AddWorktree(git_dir, output_dir, commit_hash=None): pipe = ['git', '--git-dir', git_dir, 'worktree', 'add', '.', '--detach'] if commit_hash: pipe.append(commit_hash) - result = command.RunPipe([pipe], capture=True, cwd=output_dir, + result = command.run_pipe([pipe], capture=True, cwd=output_dir, capture_stderr=True) if result.return_code != 0: raise OSError('git worktree add: %s' % result.stderr) @@ -301,7 +301,7 @@ def PruneWorktrees(git_dir): git_dir: The repository whose deleted worktrees should be pruned """ pipe = ['git', '--git-dir', git_dir, 'worktree', 'prune'] - result = command.RunPipe([pipe], capture=True, capture_stderr=True) + result = command.run_pipe([pipe], capture=True, capture_stderr=True) if result.return_code != 0: raise OSError('git worktree prune: %s' % result.stderr) @@ -336,7 +336,7 @@ def CreatePatches(branch, start, count, ignore_binary, series, signoff = True): brname = branch or 'HEAD' cmd += ['%s~%d..%s~%d' % (brname, start + count, brname, start)] - stdout = command.RunList(cmd) + stdout = command.run_list(cmd) files = stdout.splitlines() # We have an extra file if there is a cover letter @@ -397,7 +397,7 @@ def CheckSuppressCCConfig(): Returns: True if the option is configured correctly, False otherwise. """ - suppresscc = command.OutputOneLine('git', 'config', 'sendemail.suppresscc', + suppresscc = command.output_one_line('git', 'config', 'sendemail.suppresscc', raise_on_error=False) # Other settings should be fine. @@ -477,7 +477,7 @@ send --cc-cmd cc-fname" cover p1 p2' """ to = BuildEmailList(series.get('to'), '--to', alias, warn_on_error) if not to: - git_config_to = command.Output('git', 'config', 'sendemail.to', + git_config_to = command.output('git', 'config', 'sendemail.to', raise_on_error=False) if not git_config_to: print("No recipient.\n" @@ -606,7 +606,7 @@ def GetTopLevel(): os.path.join(GetTopLevel(), 'tools', 'patman') True """ - return command.OutputOneLine('git', 'rev-parse', '--show-toplevel') + return command.output_one_line('git', 'rev-parse', '--show-toplevel') def GetAliasFile(): """Gets the name of the git alias file. @@ -614,7 +614,7 @@ def GetAliasFile(): Returns: Filename of git alias file, or None if none """ - fname = command.OutputOneLine('git', 'config', 'sendemail.aliasesfile', + fname = command.output_one_line('git', 'config', 'sendemail.aliasesfile', raise_on_error=False) if not fname: return None @@ -631,7 +631,7 @@ def GetDefaultUserName(): Returns: User name found in .gitconfig file, or None if none """ - uname = command.OutputOneLine('git', 'config', '--global', 'user.name') + uname = command.output_one_line('git', 'config', '--global', 'user.name') return uname def GetDefaultUserEmail(): @@ -640,7 +640,7 @@ def GetDefaultUserEmail(): Returns: User's email found in .gitconfig file, or None if none """ - uemail = command.OutputOneLine('git', 'config', '--global', 'user.email') + uemail = command.output_one_line('git', 'config', '--global', 'user.email') return uemail def GetDefaultSubjectPrefix(): @@ -649,7 +649,7 @@ def GetDefaultSubjectPrefix(): Returns: Subject prefix found in local .git/config file, or None if none """ - sub_prefix = command.OutputOneLine('git', 'config', 'format.subjectprefix', + sub_prefix = command.output_one_line('git', 'config', 'format.subjectprefix', raise_on_error=False) return sub_prefix @@ -663,7 +663,7 @@ def Setup(): if alias_fname: settings.ReadGitAliases(alias_fname) cmd = LogCmd(None, count=0) - use_no_decorate = (command.RunPipe([cmd], raise_on_error=False) + use_no_decorate = (command.run_pipe([cmd], raise_on_error=False) .return_code == 0) def GetHead(): @@ -672,7 +672,7 @@ def GetHead(): Returns: Hash of HEAD """ - return command.OutputOneLine('git', 'show', '-s', '--pretty=format:%H') + return command.output_one_line('git', 'show', '-s', '--pretty=format:%H') if __name__ == "__main__": import doctest diff --git a/tools/patman/patchstream.py b/tools/patman/patchstream.py index 1da9d53b650..d57d22a45f7 100644 --- a/tools/patman/patchstream.py +++ b/tools/patman/patchstream.py @@ -700,7 +700,7 @@ def get_list(commit_range, git_dir=None, count=None): """ params = gitutil.LogCmd(commit_range, reverse=True, count=count, git_dir=git_dir) - return command.RunPipe([params], capture=True).stdout + return command.run_pipe([params], capture=True).stdout def get_metadata_for_list(commit_range, git_dir=None, count=None, series=None, allow_overwrite=False): diff --git a/tools/patman/test_util.py b/tools/patman/test_util.py index 9654e7319c1..c3f15f8a4bc 100644 --- a/tools/patman/test_util.py +++ b/tools/patman/test_util.py @@ -61,7 +61,7 @@ def RunTestCoverage(prog, filter_fname, exclude_list, build_dir, required=None, '--omit "%s" %s %s %s -P1' % (prefix, ','.join(glob_list), prog, extra_args or '', test_cmd)) os.system(cmd) - stdout = command.Output('python3-coverage', 'report') + stdout = command.output('python3-coverage', 'report') lines = stdout.splitlines() if required: # Convert '/path/to/name.py' just the module name 'name' diff --git a/tools/patman/tools.py b/tools/patman/tools.py index 453f2a70167..35fade0f72c 100644 --- a/tools/patman/tools.py +++ b/tools/patman/tools.py @@ -360,7 +360,7 @@ def run_result(name, *args, **kwargs): args = tuple(extra_args) + args name = os.path.expanduser(name) # Expand paths containing ~ all_args = (name,) + args - result = command.RunPipe([all_args], capture=True, capture_stderr=True, + result = command.run_pipe([all_args], capture=True, capture_stderr=True, env=env, raise_on_error=False, binary=binary) if result.return_code: if raise_on_error: @@ -545,7 +545,7 @@ def print_full_help(fname): pager = [lesspath] if lesspath else None if not pager: pager = ['more'] - command.Run(*pager, fname) + command.run(*pager, fname) def download(url, tmpdir_pattern='.patman'): """Download a file to a temporary directory diff --git a/tools/rmboard.py b/tools/rmboard.py index de685638cf1..ae256321270 100755 --- a/tools/rmboard.py +++ b/tools/rmboard.py @@ -44,17 +44,17 @@ def rm_kconfig_include(path): path: Path to search for and remove """ cmd = ['git', 'grep', path] - stdout = command.RunPipe([cmd], capture=True, raise_on_error=False).stdout + stdout = command.run_pipe([cmd], capture=True, raise_on_error=False).stdout if not stdout: return fname = stdout.split(':')[0] print("Fixing up '%s' to remove reference to '%s'" % (fname, path)) cmd = ['sed', '-i', '\|%s|d' % path, fname] - stdout = command.RunPipe([cmd], capture=True).stdout + stdout = command.run_pipe([cmd], capture=True).stdout cmd = ['git', 'add', fname] - stdout = command.RunPipe([cmd], capture=True).stdout + stdout = command.run_pipe([cmd], capture=True).stdout def rm_board(board): """Create a commit which removes a single board @@ -69,7 +69,7 @@ def rm_board(board): # Find all MAINTAINERS and Kconfig files which mention the board cmd = ['git', 'grep', '-l', board] - stdout = command.RunPipe([cmd], capture=True).stdout + stdout = command.run_pipe([cmd], capture=True).stdout maintain = [] kconfig = [] for line in stdout.splitlines(): @@ -110,7 +110,7 @@ def rm_board(board): # which reference Kconfig files we want to remove for path in real: cmd = ['find', path] - stdout = (command.RunPipe([cmd], capture=True, raise_on_error=False). + stdout = (command.run_pipe([cmd], capture=True, raise_on_error=False). stdout) for fname in stdout.splitlines(): if fname.endswith('Kconfig'): @@ -118,7 +118,7 @@ def rm_board(board): # Remove unwanted files cmd = ['git', 'rm', '-r'] + real - stdout = command.RunPipe([cmd], capture=True).stdout + stdout = command.run_pipe([cmd], capture=True).stdout ## Change the messages as needed msg = '''arm: Remove %s board @@ -132,12 +132,12 @@ Remove it. # Create the commit cmd = ['git', 'commit', '-s', '-m', msg] - stdout = command.RunPipe([cmd], capture=True).stdout + stdout = command.run_pipe([cmd], capture=True).stdout # Check if the board is mentioned anywhere else. The user will need to deal # with this cmd = ['git', 'grep', '-il', board] - print(command.RunPipe([cmd], capture=True, raise_on_error=False).stdout) + print(command.run_pipe([cmd], capture=True, raise_on_error=False).stdout) print(' '.join(cmd)) for board in sys.argv[1:]: From ae5e9265509bcb4bed7a0a1c3da613419919681d Mon Sep 17 00:00:00 2001 From: Simon Glass Date: Sat, 29 Jan 2022 14:14:06 -0700 Subject: [PATCH 06/27] patman: Convert camel case in checkpatch.py Convert this file to snake case and update all files which use it. Signed-off-by: Simon Glass --- tools/patman/checkpatch.py | 22 +++++++++++----------- tools/patman/control.py | 2 +- tools/patman/test_checkpatch.py | 12 ++++++------ 3 files changed, 18 insertions(+), 18 deletions(-) diff --git a/tools/patman/checkpatch.py b/tools/patman/checkpatch.py index 4677a7ae214..e1321abd3c8 100644 --- a/tools/patman/checkpatch.py +++ b/tools/patman/checkpatch.py @@ -20,7 +20,7 @@ RE_FILE = re.compile(r'#(\d+): (FILE: ([^:]*):(\d+):)?') RE_NOTE = re.compile(r'NOTE: (.*)') -def FindCheckPatch(): +def find_check_patch(): top_level = gitutil.GetTopLevel() try_list = [ os.getcwd(), @@ -47,7 +47,7 @@ def FindCheckPatch(): '~/bin directory or use --no-check') -def CheckPatchParseOneMessage(message): +def check_patch_parse_one_message(message): """Parse one checkpatch message Args: @@ -114,7 +114,7 @@ def CheckPatchParseOneMessage(message): return item -def CheckPatchParse(checkpatch_output, verbose=False): +def check_patch_parse(checkpatch_output, verbose=False): """Parse checkpatch.pl output Args: @@ -179,14 +179,14 @@ def CheckPatchParse(checkpatch_output, verbose=False): elif re_bad.match(message): result.ok = False else: - problem = CheckPatchParseOneMessage(message) + problem = check_patch_parse_one_message(message) if problem: result.problems.append(problem) return result -def CheckPatch(fname, verbose=False, show_types=False): +def check_patch(fname, verbose=False, show_types=False): """Run checkpatch.pl on a file and parse the results. Args: @@ -209,16 +209,16 @@ def CheckPatch(fname, verbose=False, show_types=False): lines: Number of lines stdout: Full output of checkpatch """ - chk = FindCheckPatch() + chk = find_check_patch() args = [chk, '--no-tree'] if show_types: args.append('--show-types') output = command.output(*args, fname, raise_on_error=False) - return CheckPatchParse(output, verbose) + return check_patch_parse(output, verbose) -def GetWarningMsg(col, msg_type, fname, line, msg): +def get_warning_msg(col, msg_type, fname, line, msg): '''Create a message for a given file/line Args: @@ -236,13 +236,13 @@ def GetWarningMsg(col, msg_type, fname, line, msg): line_str = '' if line is None else '%d' % line return '%s:%s: %s: %s\n' % (fname, line_str, msg_type, msg) -def CheckPatches(verbose, args): +def check_patches(verbose, args): '''Run the checkpatch.pl script on each patch''' error_count, warning_count, check_count = 0, 0, 0 col = terminal.Color() for fname in args: - result = CheckPatch(fname, verbose) + result = check_patch(fname, verbose) if not result.ok: error_count += result.errors warning_count += result.warnings @@ -254,7 +254,7 @@ def CheckPatches(verbose, args): print("Internal error: some problems lost") for item in result.problems: sys.stderr.write( - GetWarningMsg(col, item.get('type', ''), + get_warning_msg(col, item.get('type', ''), item.get('file', ''), item.get('line', 0), item.get('msg', 'message'))) print diff --git a/tools/patman/control.py b/tools/patman/control.py index ee9717cbf62..a19b17170af 100644 --- a/tools/patman/control.py +++ b/tools/patman/control.py @@ -86,7 +86,7 @@ def check_patches(series, patch_files, run_checkpatch, verbose): # Check the patches, and run them through 'git am' just to be sure if run_checkpatch: - ok = checkpatch.CheckPatches(verbose, patch_files) + ok = checkpatch.check_patches(verbose, patch_files) else: ok = True return ok diff --git a/tools/patman/test_checkpatch.py b/tools/patman/test_checkpatch.py index 56af5265cc8..cf6cb713b76 100644 --- a/tools/patman/test_checkpatch.py +++ b/tools/patman/test_checkpatch.py @@ -82,7 +82,7 @@ Signed-off-by: Simon Glass return inname def run_checkpatch(self): - return checkpatch.CheckPatch(self.get_patch(), show_types=True) + return checkpatch.check_patch(self.get_patch(), show_types=True) class TestPatch(unittest.TestCase): @@ -295,7 +295,7 @@ index 0000000..2234c87 def testGood(self): """Test checkpatch operation""" inf = self.SetupData('good') - result = checkpatch.CheckPatch(inf) + result = checkpatch.check_patch(inf) self.assertEqual(result.ok, True) self.assertEqual(result.problems, []) self.assertEqual(result.errors, 0) @@ -306,7 +306,7 @@ index 0000000..2234c87 def testNoSignoff(self): inf = self.SetupData('no-signoff') - result = checkpatch.CheckPatch(inf) + result = checkpatch.check_patch(inf) self.assertEqual(result.ok, False) self.assertEqual(len(result.problems), 1) self.assertEqual(result.errors, 1) @@ -317,7 +317,7 @@ index 0000000..2234c87 def testNoLicense(self): inf = self.SetupData('no-license') - result = checkpatch.CheckPatch(inf) + result = checkpatch.check_patch(inf) self.assertEqual(result.ok, False) self.assertEqual(len(result.problems), 1) self.assertEqual(result.errors, 0) @@ -328,7 +328,7 @@ index 0000000..2234c87 def testSpaces(self): inf = self.SetupData('spaces') - result = checkpatch.CheckPatch(inf) + result = checkpatch.check_patch(inf) self.assertEqual(result.ok, False) self.assertEqual(len(result.problems), 3) self.assertEqual(result.errors, 0) @@ -339,7 +339,7 @@ index 0000000..2234c87 def testIndent(self): inf = self.SetupData('indent') - result = checkpatch.CheckPatch(inf) + result = checkpatch.check_patch(inf) self.assertEqual(result.ok, False) self.assertEqual(len(result.problems), 1) self.assertEqual(result.errors, 0) From a3eeadfeb9f3aaa03c716d7aedbd98bb49c88172 Mon Sep 17 00:00:00 2001 From: Simon Glass Date: Sat, 29 Jan 2022 14:14:07 -0700 Subject: [PATCH 07/27] patman: Convert camel case in commit.py Convert this file to snake case and update all files which use it. Signed-off-by: Simon Glass --- tools/patman/commit.py | 10 +++++----- tools/patman/patchstream.py | 8 ++++---- tools/patman/series.py | 2 +- 3 files changed, 10 insertions(+), 10 deletions(-) diff --git a/tools/patman/commit.py b/tools/patman/commit.py index 5bf2b940299..c331a3b1221 100644 --- a/tools/patman/commit.py +++ b/tools/patman/commit.py @@ -44,7 +44,7 @@ class Commit: def __str__(self): return self.subject - def AddChange(self, version, info): + def add_change(self, version, info): """Add a new change line to the change list for a version. Args: @@ -55,7 +55,7 @@ class Commit: self.changes[version] = [] self.changes[version].append(info) - def CheckTags(self): + def check_tags(self): """Create a list of subject tags in the commit Subject tags look like this: @@ -78,7 +78,7 @@ class Commit: str = m.group(2) return None - def AddCc(self, cc_list): + def add_cc(self, cc_list): """Add a list of people to Cc when we send this patch. Args: @@ -86,7 +86,7 @@ class Commit: """ self.cc_list += cc_list - def CheckDuplicateSignoff(self, signoff): + def check_duplicate_signoff(self, signoff): """Check a list of signoffs we have send for this patch Args: @@ -99,7 +99,7 @@ class Commit: self.signoff_set.add(signoff) return True - def AddRtag(self, rtag_type, who): + def add_rtag(self, rtag_type, who): """Add a response tag to a commit Args: diff --git a/tools/patman/patchstream.py b/tools/patman/patchstream.py index d57d22a45f7..387e9deae33 100644 --- a/tools/patman/patchstream.py +++ b/tools/patman/patchstream.py @@ -180,7 +180,7 @@ class PatchStream: who (str): Person who gave that rtag, e.g. 'Fred Bloggs ' """ - self.commit.AddRtag(rtag_type, who) + self.commit.add_rtag(rtag_type, who) def _close_commit(self): """Save the current commit into our commit list, and reset our state""" @@ -230,7 +230,7 @@ class PatchStream: elif self.in_change == 'Cover': self.series.AddChange(self.change_version, None, change) elif self.in_change == 'Commit': - self.commit.AddChange(self.change_version, change) + self.commit.add_change(self.change_version, change) self.change_lines = [] def _finalise_snippet(self): @@ -494,14 +494,14 @@ class PatchStream: who.find(os.getenv('USER') + '@') != -1): self._add_warn("Ignoring '%s'" % line) elif rtag_type == 'Patch-cc': - self.commit.AddCc(who.split(',')) + self.commit.add_cc(who.split(',')) else: out = [line] # Suppress duplicate signoffs elif signoff_match: if (self.is_log or not self.commit or - self.commit.CheckDuplicateSignoff(signoff_match.group(1))): + self.commit.check_duplicate_signoff(signoff_match.group(1))): out = [line] # Well that means this is an ordinary line diff --git a/tools/patman/series.py b/tools/patman/series.py index da734d92cf3..98b4c9c9e7f 100644 --- a/tools/patman/series.py +++ b/tools/patman/series.py @@ -94,7 +94,7 @@ class Series(dict): Args: commit: Commit object to add """ - commit.CheckTags() + commit.check_tags() self.commits.append(commit) def ShowActions(self, args, cmd, process_tags): From 208f01b0f771c2724fa1404182189b7f624cc6e0 Mon Sep 17 00:00:00 2001 From: Simon Glass Date: Sat, 29 Jan 2022 14:14:08 -0700 Subject: [PATCH 08/27] patman: Convert camel case in cros_subprocess.py Convert this file to snake case and update all files which use it. Signed-off-by: Simon Glass --- tools/patman/command.py | 2 +- tools/patman/cros_subprocess.py | 59 +++++++++++++++++---------------- 2 files changed, 31 insertions(+), 30 deletions(-) diff --git a/tools/patman/command.py b/tools/patman/command.py index c848aa19879..24358784f26 100644 --- a/tools/patman/command.py +++ b/tools/patman/command.py @@ -108,7 +108,7 @@ def run_pipe(pipe_list, infile=None, outfile=None, if capture: result.stdout, result.stderr, result.combined = ( - last_pipe.CommunicateFilter(output_func)) + last_pipe.communicate_filter(output_func)) if result.stdout and oneline: result.output = result.stdout.rstrip(b'\r\n') result.return_code = last_pipe.wait() diff --git a/tools/patman/cros_subprocess.py b/tools/patman/cros_subprocess.py index 88a4693feff..f1b26087cfd 100644 --- a/tools/patman/cros_subprocess.py +++ b/tools/patman/cros_subprocess.py @@ -49,7 +49,7 @@ class Popen(subprocess.Popen): to us as soon as it is produced, rather than waiting for the end of a line. - Use CommunicateFilter() to handle output from the subprocess. + Use communicate_filter() to handle output from the subprocess. """ @@ -100,7 +100,7 @@ class Popen(subprocess.Popen): if kwargs: raise ValueError("Unit tests do not test extra args - please add tests") - def ConvertData(self, data): + def convert_data(self, data): """Convert stdout/stderr data to the correct format for output Args: @@ -113,7 +113,7 @@ class Popen(subprocess.Popen): return b'' return data - def CommunicateFilter(self, output): + def communicate_filter(self, output): """Interact with process: Read data from stdout and stderr. This method runs until end-of-file is reached, then waits for the @@ -122,7 +122,7 @@ class Popen(subprocess.Popen): The output function is sent all output from the subprocess and must be defined like this: - def Output([self,] stream, data) + def output([self,] stream, data) Args: stream: the stream the output was received on, which will be sys.stdout or sys.stderr. @@ -236,9 +236,9 @@ class Popen(subprocess.Popen): self.terminate() # All data exchanged. Translate lists into strings. - stdout = self.ConvertData(stdout) - stderr = self.ConvertData(stderr) - combined = self.ConvertData(combined) + stdout = self.convert_data(stdout) + stderr = self.convert_data(stderr) + combined = self.convert_data(combined) # Translate newlines, if requested. We cannot let the file # object do the translation: It is based on stdio, which is @@ -281,7 +281,7 @@ class TestSubprocess(unittest.TestCase): self.stdin_read_pipe = pipe[0] self._stdin_write_pipe = os.fdopen(pipe[1], 'w') - def Output(self, stream, data): + def output(self, stream, data): """Output handler for Popen. Stores the data for later comparison""" if stream == sys.stdout: self.stdout_data += data @@ -294,7 +294,7 @@ class TestSubprocess(unittest.TestCase): self._stdin_write_pipe.write(self._input_to_send + '\r\n') self._stdin_write_pipe.flush() - def _BasicCheck(self, plist, oper): + def _basic_check(self, plist, oper): """Basic checks that the output looks sane.""" self.assertEqual(plist[0], oper.stdout_data) self.assertEqual(plist[1], oper.stderr_data) @@ -306,15 +306,15 @@ class TestSubprocess(unittest.TestCase): def test_simple(self): """Simple redirection: Get process list""" oper = TestSubprocess.MyOperation() - plist = Popen(['ps']).CommunicateFilter(oper.Output) - self._BasicCheck(plist, oper) + plist = Popen(['ps']).communicate_filter(oper.output) + self._basic_check(plist, oper) def test_stderr(self): """Check stdout and stderr""" oper = TestSubprocess.MyOperation() cmd = 'echo fred >/dev/stderr && false || echo bad' - plist = Popen([cmd], shell=True).CommunicateFilter(oper.Output) - self._BasicCheck(plist, oper) + plist = Popen([cmd], shell=True).communicate_filter(oper.output) + self._basic_check(plist, oper) self.assertEqual(plist [0], 'bad\r\n') self.assertEqual(plist [1], 'fred\r\n') @@ -323,8 +323,8 @@ class TestSubprocess(unittest.TestCase): oper = TestSubprocess.MyOperation() cmd = 'echo test >/dev/stderr' self.assertRaises(OSError, Popen, [cmd], shell=False) - plist = Popen([cmd], shell=True).CommunicateFilter(oper.Output) - self._BasicCheck(plist, oper) + plist = Popen([cmd], shell=True).communicate_filter(oper.output) + self._basic_check(plist, oper) self.assertEqual(len(plist [0]), 0) self.assertEqual(plist [1], 'test\r\n') @@ -332,8 +332,8 @@ class TestSubprocess(unittest.TestCase): """Check with and without shell works using list arguments""" oper = TestSubprocess.MyOperation() cmd = ['echo', 'test', '>/dev/stderr'] - plist = Popen(cmd, shell=False).CommunicateFilter(oper.Output) - self._BasicCheck(plist, oper) + plist = Popen(cmd, shell=False).communicate_filter(oper.output) + self._basic_check(plist, oper) self.assertEqual(plist [0], ' '.join(cmd[1:]) + '\r\n') self.assertEqual(len(plist [1]), 0) @@ -341,16 +341,17 @@ class TestSubprocess(unittest.TestCase): # this should be interpreted as 'echo' with the other args dropped cmd = ['echo', 'test', '>/dev/stderr'] - plist = Popen(cmd, shell=True).CommunicateFilter(oper.Output) - self._BasicCheck(plist, oper) + plist = Popen(cmd, shell=True).communicate_filter(oper.output) + self._basic_check(plist, oper) self.assertEqual(plist [0], '\r\n') def test_cwd(self): """Check we can change directory""" for shell in (False, True): oper = TestSubprocess.MyOperation() - plist = Popen('pwd', shell=shell, cwd='/tmp').CommunicateFilter(oper.Output) - self._BasicCheck(plist, oper) + plist = Popen('pwd', shell=shell, cwd='/tmp').communicate_filter( + oper.output) + self._basic_check(plist, oper) self.assertEqual(plist [0], '/tmp\r\n') def test_env(self): @@ -361,8 +362,8 @@ class TestSubprocess(unittest.TestCase): if add: env ['FRED'] = 'fred' cmd = 'echo $FRED' - plist = Popen(cmd, shell=True, env=env).CommunicateFilter(oper.Output) - self._BasicCheck(plist, oper) + plist = Popen(cmd, shell=True, env=env).communicate_filter(oper.output) + self._basic_check(plist, oper) self.assertEqual(plist [0], add and 'fred\r\n' or '\r\n') def test_extra_args(self): @@ -380,8 +381,8 @@ class TestSubprocess(unittest.TestCase): prompt = 'What is your name?: ' cmd = 'echo -n "%s"; read name; echo Hello $name' % prompt plist = Popen([cmd], stdin=oper.stdin_read_pipe, - shell=True).CommunicateFilter(oper.Output) - self._BasicCheck(plist, oper) + shell=True).communicate_filter(oper.output) + self._basic_check(plist, oper) self.assertEqual(len(plist [1]), 0) self.assertEqual(plist [0], prompt + 'Hello Flash\r\r\n') @@ -393,16 +394,16 @@ class TestSubprocess(unittest.TestCase): both_cmds = '' for fd in (1, 2): both_cmds += cmd % (fd, fd, fd, fd, fd) - plist = Popen(both_cmds, shell=True).CommunicateFilter(oper.Output) - self._BasicCheck(plist, oper) + plist = Popen(both_cmds, shell=True).communicate_filter(oper.output) + self._basic_check(plist, oper) self.assertEqual(plist [0], 'terminal 1\r\n') self.assertEqual(plist [1], 'terminal 2\r\n') # Now try with PIPE and make sure it is not a terminal oper = TestSubprocess.MyOperation() plist = Popen(both_cmds, stdout=subprocess.PIPE, stderr=subprocess.PIPE, - shell=True).CommunicateFilter(oper.Output) - self._BasicCheck(plist, oper) + shell=True).communicate_filter(oper.output) + self._basic_check(plist, oper) self.assertEqual(plist [0], 'not 1\n') self.assertEqual(plist [1], 'not 2\n') From c3aaa05e34179f70d292dc8d5973d7066d734f29 Mon Sep 17 00:00:00 2001 From: Simon Glass Date: Sat, 29 Jan 2022 14:14:09 -0700 Subject: [PATCH 09/27] patman: Convert camel case in func_test.py Convert this file to snake case and update all files which use it. Signed-off-by: Simon Glass --- tools/patman/func_test.py | 50 +++++++++++++++++++-------------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/tools/patman/func_test.py b/tools/patman/func_test.py index defa1cca716..89950c254b0 100644 --- a/tools/patman/func_test.py +++ b/tools/patman/func_test.py @@ -114,7 +114,7 @@ class TestFunctional(unittest.TestCase): return cover_fname, fname_list - def testBasic(self): + def test_basic(self): """Tests the basic flow of patman This creates a series from some hard-coded patches build from a simple @@ -455,7 +455,7 @@ complicated as possible''') repo.branches.local.create('base', base_target) return repo - def testBranch(self): + def test_branch(self): """Test creating patches from a branch""" repo = self.make_git_tree() target = repo.lookup_reference('refs/heads/first') @@ -494,7 +494,7 @@ complicated as possible''') finally: os.chdir(orig_dir) - def testTags(self): + def test_tags(self): """Test collection of tags in a patchstream""" text = '''This is a patch @@ -508,7 +508,7 @@ Tested-by: %s 'Reviewed-by': {self.joe, self.mary}, 'Tested-by': {self.leb}}) - def testInvalidTag(self): + def test_invalid_tag(self): """Test invalid tag in a patchstream""" text = '''This is a patch @@ -519,7 +519,7 @@ Serie-version: 2 self.assertEqual("Line 3: Invalid tag = 'Serie-version: 2'", str(exc.exception)) - def testMissingEnd(self): + def test_missing_end(self): """Test a missing END tag""" text = '''This is a patch @@ -532,7 +532,7 @@ Signed-off-by: Fred self.assertEqual(["Missing 'END' in section 'cover'"], pstrm.commit.warn) - def testMissingBlankLine(self): + def test_missing_blank_line(self): """Test a missing blank line after a tag""" text = '''This is a patch @@ -545,7 +545,7 @@ Signed-off-by: Fred self.assertEqual(["Missing 'blank line' in section 'Series-changes'"], pstrm.commit.warn) - def testInvalidCommitTag(self): + def test_invalid_commit_tag(self): """Test an invalid Commit-xxx tag""" text = '''This is a patch @@ -554,7 +554,7 @@ Commit-fred: testing pstrm = PatchStream.process_text(text) self.assertEqual(["Line 3: Ignoring Commit-fred"], pstrm.commit.warn) - def testSelfTest(self): + def test_self_test(self): """Test a tested by tag by this user""" test_line = 'Tested-by: %s@napier.com' % os.getenv('USER') text = '''This is a patch @@ -564,7 +564,7 @@ Commit-fred: testing pstrm = PatchStream.process_text(text) self.assertEqual(["Ignoring '%s'" % test_line], pstrm.commit.warn) - def testSpaceBeforeTab(self): + def test_space_before_tab(self): """Test a space before a tab""" text = '''This is a patch @@ -573,7 +573,7 @@ Commit-fred: testing pstrm = PatchStream.process_text(text) self.assertEqual(["Line 3/0 has space before tab"], pstrm.commit.warn) - def testLinesAfterTest(self): + def test_lines_after_test(self): """Test detecting lines after TEST= line""" text = '''This is a patch @@ -584,7 +584,7 @@ here pstrm = PatchStream.process_text(text) self.assertEqual(["Found 2 lines after TEST="], pstrm.commit.warn) - def testBlankLineAtEnd(self): + def test_blank_line_at_end(self): """Test detecting a blank line at the end of a file""" text = '''This is a patch @@ -611,7 +611,7 @@ diff --git a/lib/efi_loader/efi_memory.c b/lib/efi_loader/efi_memory.c ["Found possible blank line(s) at end of file 'lib/fdtdec.c'"], pstrm.commit.warn) - def testNoUpstream(self): + def test_no_upstream(self): """Test CountCommitsToBranch when there is no upstream""" repo = self.make_git_tree() target = repo.lookup_reference('refs/heads/base') @@ -648,7 +648,7 @@ diff --git a/lib/efi_loader/efi_memory.c b/lib/efi_loader/efi_memory.c {'id': '1', 'name': 'Some patch'}]} raise ValueError('Fake Patchwork does not understand: %s' % subpath) - def testStatusMismatch(self): + def test_status_mismatch(self): """Test Patchwork patches not matching the series""" series = Series() @@ -657,7 +657,7 @@ diff --git a/lib/efi_loader/efi_memory.c b/lib/efi_loader/efi_memory.c self.assertIn('Warning: Patchwork reports 1 patches, series has 0', err.getvalue()) - def testStatusReadPatch(self): + def test_status_read_patch(self): """Test handling a single patch in Patchwork""" series = Series() series.commits = [Commit('abcd')] @@ -669,7 +669,7 @@ diff --git a/lib/efi_loader/efi_memory.c b/lib/efi_loader/efi_memory.c self.assertEqual('1', patch.id) self.assertEqual('Some patch', patch.raw_subject) - def testParseSubject(self): + def test_parse_subject(self): """Test parsing of the patch subject""" patch = status.Patch('1') @@ -731,7 +731,7 @@ diff --git a/lib/efi_loader/efi_memory.c b/lib/efi_loader/efi_memory.c self.assertEqual('RESEND', patch.prefix) self.assertEqual(None, patch.version) - def testCompareSeries(self): + def test_compare_series(self): """Test operation of compare_with_series()""" commit1 = Commit('abcd') commit1.subject = 'Subject 1' @@ -833,7 +833,7 @@ diff --git a/lib/efi_loader/efi_memory.c b/lib/efi_loader/efi_memory.c return patch.comments raise ValueError('Fake Patchwork does not understand: %s' % subpath) - def testFindNewResponses(self): + def test_find_new_responses(self): """Test operation of find_new_responses()""" commit1 = Commit('abcd') commit1.subject = 'Subject 1' @@ -971,7 +971,7 @@ diff --git a/lib/efi_loader/efi_memory.c b/lib/efi_loader/efi_memory.c return patch.comments raise ValueError('Fake Patchwork does not understand: %s' % subpath) - def testCreateBranch(self): + def test_create_branch(self): """Test operation of create_branch()""" repo = self.make_git_tree() branch = 'first' @@ -1058,7 +1058,7 @@ diff --git a/lib/efi_loader/efi_memory.c b/lib/efi_loader/efi_memory.c self.assertEqual('Reviewed-by: %s' % self.mary, next(lines)) self.assertEqual('Tested-by: %s' % self.leb, next(lines)) - def testParseSnippets(self): + def test_parse_snippets(self): """Test parsing of review snippets""" text = '''Hi Fred, @@ -1088,7 +1088,7 @@ Even more And another comment -> @@ -153,8 +143,13 @@ def CheckPatch(fname, show_types=False): +> @@ -153,8 +143,13 @@ def check_patch(fname, show_types=False): > further down on the file > and more code > +Addition here @@ -1131,7 +1131,7 @@ line8 '> Code line 7', '> Code line 8', '> Code line 9', 'And another comment'], ['> File: file.c', - '> Line: 153 / 143: def CheckPatch(fname, show_types=False):', + '> Line: 153 / 143: def check_patch(fname, show_types=False):', '> and more code', '> +Addition here', '> +Another addition here', '> codey', '> more codey', 'and another thing in same file'], ['> File: file.c', '> Line: 253 / 243', @@ -1141,7 +1141,7 @@ line8 'line2', 'line3', 'line4', 'line5', 'line6', 'line7', 'line8']], pstrm.snippets) - def testReviewSnippets(self): + def test_review_snippets(self): """Test showing of review snippets""" def _to_submitter(who): m_who = re.match('(.*) <(.*)>', who) @@ -1196,7 +1196,7 @@ On some date Fred wrote: > + def __str__(self): > + return self.subject > + -> def AddChange(self, version, info): +> def add_change(self, version, info): > """Add a new change line to the change list for a version. > A comment @@ -1280,7 +1280,7 @@ Reviewed-by: %s self.assertEqual(terminal.PrintLine( ' > +', col.MAGENTA), next(lines)) self.assertEqual( - terminal.PrintLine(' > def AddChange(self, version, info):', + terminal.PrintLine(' > def add_change(self, version, info):', col.MAGENTA), next(lines)) self.assertEqual(terminal.PrintLine( @@ -1296,7 +1296,7 @@ Reviewed-by: %s '4 new responses available in patchwork (use -d to write them to a new branch)', None), next(lines)) - def testInsertTags(self): + def test_insert_tags(self): """Test inserting of review tags""" msg = '''first line second line.''' From 967af26b6aedc21ccb51273fbbbb898ad8c4305f Mon Sep 17 00:00:00 2001 From: Simon Glass Date: Sat, 29 Jan 2022 14:14:10 -0700 Subject: [PATCH 10/27] patman: Convert camel case in get_maintainer.py Convert this file to snake case and update all files which use it. Signed-off-by: Simon Glass --- tools/patman/get_maintainer.py | 6 +++--- tools/patman/series.py | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/tools/patman/get_maintainer.py b/tools/patman/get_maintainer.py index 98ab82f78f5..e1d15ff6ab5 100644 --- a/tools/patman/get_maintainer.py +++ b/tools/patman/get_maintainer.py @@ -6,7 +6,7 @@ import os from patman import command -def FindGetMaintainer(try_list): +def find_get_maintainer(try_list): """Look for the get_maintainer.pl script. Args: @@ -23,7 +23,7 @@ def FindGetMaintainer(try_list): return None -def GetMaintainer(dir_list, fname, verbose=False): +def get_maintainer(dir_list, fname, verbose=False): """Run get_maintainer.pl on a file if we find it. We look for get_maintainer.pl in the 'scripts' directory at the top of @@ -37,7 +37,7 @@ def GetMaintainer(dir_list, fname, verbose=False): Returns: A list of email addresses to CC to. """ - get_maintainer = FindGetMaintainer(dir_list) + get_maintainer = find_get_maintainer(dir_list) if not get_maintainer: if verbose: print("WARNING: Couldn't find get_maintainer.pl") diff --git a/tools/patman/series.py b/tools/patman/series.py index 98b4c9c9e7f..7f5e6001340 100644 --- a/tools/patman/series.py +++ b/tools/patman/series.py @@ -269,7 +269,7 @@ class Series(dict): cc += add_maintainers elif add_maintainers: dir_list = [os.path.join(gitutil.GetTopLevel(), 'scripts')] - cc += get_maintainer.GetMaintainer(dir_list, commit.patch) + cc += get_maintainer.get_maintainer(dir_list, commit.patch) for x in set(cc) & set(settings.bounces): print(col.Color(col.YELLOW, 'Skipping "%s"' % x)) cc = list(set(cc) - set(settings.bounces)) From 0157b187f45c00ffb3e85c7f5c33808454243608 Mon Sep 17 00:00:00 2001 From: Simon Glass Date: Sat, 29 Jan 2022 14:14:11 -0700 Subject: [PATCH 11/27] patman: Convert camel case in gitutil.py Convert this file to snake case and update all files which use it. Signed-off-by: Simon Glass --- tools/buildman/builder.py | 12 ++-- tools/buildman/builderthread.py | 2 +- tools/buildman/control.py | 10 +-- tools/buildman/func_test.py | 4 +- tools/patman/checkpatch.py | 2 +- tools/patman/control.py | 12 ++-- tools/patman/func_test.py | 8 +-- tools/patman/gitutil.py | 124 ++++++++++++++++---------------- tools/patman/patchstream.py | 2 +- tools/patman/project.py | 2 +- tools/patman/series.py | 16 ++--- tools/patman/settings.py | 4 +- 12 files changed, 99 insertions(+), 99 deletions(-) diff --git a/tools/buildman/builder.py b/tools/buildman/builder.py index 94f843e2a94..502ac7b274c 100644 --- a/tools/buildman/builder.py +++ b/tools/buildman/builder.py @@ -442,7 +442,7 @@ class Builder: """ self.commit = commit if checkout and self.checkout: - gitutil.Checkout(commit.hash) + gitutil.checkout(commit.hash) def Make(self, commit, brd, stage, cwd, *args, **kwargs): """Run make @@ -1631,7 +1631,7 @@ class Builder: # it but need to fetch from src_dir. Print('\rFetching repo for thread %d' % thread_num, newline=False) - gitutil.Fetch(git_dir, thread_dir) + gitutil.fetch(git_dir, thread_dir) terminal.PrintClear() elif os.path.isfile(git_dir): # This is a worktree of the src_dir repo, we don't need to @@ -1645,12 +1645,12 @@ class Builder: elif setup_git == 'worktree': Print('\rChecking out worktree for thread %d' % thread_num, newline=False) - gitutil.AddWorktree(src_dir, thread_dir) + gitutil.add_worktree(src_dir, thread_dir) terminal.PrintClear() elif setup_git == 'clone' or setup_git == True: Print('\rCloning repo for thread %d' % thread_num, newline=False) - gitutil.Clone(src_dir, thread_dir) + gitutil.clone(src_dir, thread_dir) terminal.PrintClear() else: raise ValueError("Can't setup git repo with %s." % setup_git) @@ -1670,12 +1670,12 @@ class Builder: builderthread.Mkdir(self._working_dir) if setup_git and self.git_dir: src_dir = os.path.abspath(self.git_dir) - if gitutil.CheckWorktreeIsAvailable(src_dir): + if gitutil.check_worktree_is_available(src_dir): setup_git = 'worktree' # If we previously added a worktree but the directory for it # got deleted, we need to prune its files from the repo so # that we can check out another in its place. - gitutil.PruneWorktrees(src_dir) + gitutil.prune_worktrees(src_dir) else: setup_git = 'clone' diff --git a/tools/buildman/builderthread.py b/tools/buildman/builderthread.py index 90099eee04f..7522ff62de6 100644 --- a/tools/buildman/builderthread.py +++ b/tools/buildman/builderthread.py @@ -219,7 +219,7 @@ class BuilderThread(threading.Thread): commit = self.builder.commits[commit_upto] if self.builder.checkout: git_dir = os.path.join(work_dir, '.git') - gitutil.Checkout(commit.hash, git_dir, work_dir, + gitutil.checkout(commit.hash, git_dir, work_dir, force=True) else: commit = 'current' diff --git a/tools/buildman/control.py b/tools/buildman/control.py index ebab126adf2..2c25fd70668 100644 --- a/tools/buildman/control.py +++ b/tools/buildman/control.py @@ -140,7 +140,7 @@ def DoBuildman(options, args, toolchains=None, make_func=None, boards=None, ) return 0 - gitutil.Setup() + gitutil.setup() col = terminal.Color() options.git_dir = os.path.join(options.git, '.git') @@ -236,10 +236,10 @@ def DoBuildman(options, args, toolchains=None, make_func=None, boards=None, count = 1 else: if has_range: - count, msg = gitutil.CountCommitsInRange(options.git_dir, + count, msg = gitutil.count_commits_in_range(options.git_dir, options.branch) else: - count, msg = gitutil.CountCommitsInBranch(options.git_dir, + count, msg = gitutil.count_commits_in_branch(options.git_dir, options.branch) if count is None: sys.exit(col.Color(col.RED, msg)) @@ -276,9 +276,9 @@ def DoBuildman(options, args, toolchains=None, make_func=None, boards=None, if has_range: range_expr = options.branch else: - range_expr = gitutil.GetRangeInBranch(options.git_dir, + range_expr = gitutil.get_range_in_branch(options.git_dir, options.branch) - upstream_commit = gitutil.GetUpstream(options.git_dir, + upstream_commit = gitutil.get_upstream(options.git_dir, options.branch) series = patchstream.get_metadata_for_list(upstream_commit, options.git_dir, 1, series=None, allow_overwrite=True) diff --git a/tools/buildman/func_test.py b/tools/buildman/func_test.py index 4beca8aa7d0..b48dd1671d9 100644 --- a/tools/buildman/func_test.py +++ b/tools/buildman/func_test.py @@ -267,11 +267,11 @@ class TestFunctional(unittest.TestCase): def testGitSetup(self): """Test gitutils.Setup(), from outside the module itself""" command.test_result = command.CommandResult(return_code=1) - gitutil.Setup() + gitutil.setup() self.assertEqual(gitutil.use_no_decorate, False) command.test_result = command.CommandResult(return_code=0) - gitutil.Setup() + gitutil.setup() self.assertEqual(gitutil.use_no_decorate, True) def _HandleCommandGitLog(self, args): diff --git a/tools/patman/checkpatch.py b/tools/patman/checkpatch.py index e1321abd3c8..043419089a8 100644 --- a/tools/patman/checkpatch.py +++ b/tools/patman/checkpatch.py @@ -21,7 +21,7 @@ RE_NOTE = re.compile(r'NOTE: (.*)') def find_check_patch(): - top_level = gitutil.GetTopLevel() + top_level = gitutil.get_top_level() try_list = [ os.getcwd(), os.path.join(os.getcwd(), '..', '..'), diff --git a/tools/patman/control.py b/tools/patman/control.py index a19b17170af..cea4f3e770a 100644 --- a/tools/patman/control.py +++ b/tools/patman/control.py @@ -18,7 +18,7 @@ from patman import terminal def setup(): """Do required setup before doing anything""" - gitutil.Setup() + gitutil.setup() def prepare_patches(col, branch, count, start, end, ignore_binary, signoff): """Figure out what patches to generate, then generate them @@ -45,7 +45,7 @@ def prepare_patches(col, branch, count, start, end, ignore_binary, signoff): """ if count == -1: # Work out how many patches to send if we can - count = (gitutil.CountCommitsToBranch(branch) - start) + count = (gitutil.count_commits_to_branch(branch) - start) if not count: str = 'No commits found to process - please use -c flag, or run:\n' \ @@ -55,7 +55,7 @@ def prepare_patches(col, branch, count, start, end, ignore_binary, signoff): # Read the metadata from the commits to_do = count - end series = patchstream.get_metadata(branch, start, to_do) - cover_fname, patch_files = gitutil.CreatePatches( + cover_fname, patch_files = gitutil.create_patches( branch, start, to_do, ignore_binary, series, signoff) # Fix up the patch files to our liking, and insert the cover letter @@ -138,7 +138,7 @@ def email_patches(col, series, cover_fname, patch_files, process_tags, its_a_go, # Email the patches out (giving the user time to check / cancel) cmd = '' if its_a_go: - cmd = gitutil.EmailPatches( + cmd = gitutil.email_patches( series, cover_fname, patch_files, dry_run, not ignore_bad_tags, cc_file, in_reply_to=in_reply_to, thread=thread, smtp_server=smtp_server) @@ -167,7 +167,7 @@ def send(args): ok = check_patches(series, patch_files, args.check_patch, args.verbose) - ok = ok and gitutil.CheckSuppressCCConfig() + ok = ok and gitutil.check_suppress_cc_config() its_a_go = ok or args.ignore_errors email_patches( @@ -204,7 +204,7 @@ def patchwork_status(branch, count, start, end, dest_branch, force, """ if count == -1: # Work out how many patches to send if we can - count = (gitutil.CountCommitsToBranch(branch) - start) + count = (gitutil.count_commits_to_branch(branch) - start) series = patchstream.get_metadata(branch, start, count - end) warnings = 0 diff --git a/tools/patman/func_test.py b/tools/patman/func_test.py index 89950c254b0..9e869c58ff6 100644 --- a/tools/patman/func_test.py +++ b/tools/patman/func_test.py @@ -208,7 +208,7 @@ class TestFunctional(unittest.TestCase): cc_file = series.MakeCcFile(process_tags, cover_fname, not ignore_bad_tags, add_maintainers, None) - cmd = gitutil.EmailPatches( + cmd = gitutil.email_patches( series, cover_fname, args, dry_run, not ignore_bad_tags, cc_file, in_reply_to=in_reply_to, thread=None) series.ShowActions(args, cmd, process_tags) @@ -466,7 +466,7 @@ complicated as possible''') os.chdir(self.gitdir) # Check that it can detect the current branch - self.assertEqual(2, gitutil.CountCommitsToBranch(None)) + self.assertEqual(2, gitutil.count_commits_to_branch(None)) col = terminal.Color() with capture_sys_output() as _: _, cover_fname, patch_files = control.prepare_patches( @@ -476,7 +476,7 @@ complicated as possible''') self.assertEqual(2, len(patch_files)) # Check that it can detect a different branch - self.assertEqual(3, gitutil.CountCommitsToBranch('second')) + self.assertEqual(3, gitutil.count_commits_to_branch('second')) with capture_sys_output() as _: _, cover_fname, patch_files = control.prepare_patches( col, branch='second', count=-1, start=0, end=0, @@ -622,7 +622,7 @@ diff --git a/lib/efi_loader/efi_memory.c b/lib/efi_loader/efi_memory.c orig_dir = os.getcwd() os.chdir(self.gitdir) with self.assertRaises(ValueError) as exc: - gitutil.CountCommitsToBranch(None) + gitutil.count_commits_to_branch(None) self.assertIn( "Failed to determine upstream: fatal: no upstream configured for branch 'base'", str(exc.exception)) diff --git a/tools/patman/gitutil.py b/tools/patman/gitutil.py index d06f052ffe9..86972973249 100644 --- a/tools/patman/gitutil.py +++ b/tools/patman/gitutil.py @@ -12,10 +12,10 @@ from patman import settings from patman import terminal from patman import tools -# True to use --no-decorate - we check this in Setup() +# True to use --no-decorate - we check this in setup() use_no_decorate = True -def LogCmd(commit_range, git_dir=None, oneline=False, reverse=False, +def log_cmd(commit_range, git_dir=None, oneline=False, reverse=False, count=None): """Create a command to perform a 'git log' @@ -49,7 +49,7 @@ def LogCmd(commit_range, git_dir=None, oneline=False, reverse=False, cmd.append('--') return cmd -def CountCommitsToBranch(branch): +def count_commits_to_branch(branch): """Returns number of commits between HEAD and the tracking branch. This looks back to the tracking branch and works out the number of commits @@ -62,11 +62,11 @@ def CountCommitsToBranch(branch): Number of patches that exist on top of the branch """ if branch: - us, msg = GetUpstream('.git', branch) + us, msg = get_upstream('.git', branch) rev_range = '%s..%s' % (us, branch) else: rev_range = '@{upstream}..' - pipe = [LogCmd(rev_range, oneline=True)] + pipe = [log_cmd(rev_range, oneline=True)] result = command.run_pipe(pipe, capture=True, capture_stderr=True, oneline=True, raise_on_error=False) if result.return_code: @@ -75,7 +75,7 @@ def CountCommitsToBranch(branch): patch_count = len(result.stdout.splitlines()) return patch_count -def NameRevision(commit_hash): +def name_revision(commit_hash): """Gets the revision name for a commit Args: @@ -91,7 +91,7 @@ def NameRevision(commit_hash): name = stdout.split(' ')[1].strip() return name -def GuessUpstream(git_dir, branch): +def guess_upstream(git_dir, branch): """Tries to guess the upstream for a branch This lists out top commits on a branch and tries to find a suitable @@ -107,21 +107,21 @@ def GuessUpstream(git_dir, branch): Name of upstream branch (e.g. 'upstream/master') or None if none Warning/error message, or None if none """ - pipe = [LogCmd(branch, git_dir=git_dir, oneline=True, count=100)] + pipe = [log_cmd(branch, git_dir=git_dir, oneline=True, count=100)] result = command.run_pipe(pipe, capture=True, capture_stderr=True, raise_on_error=False) if result.return_code: return None, "Branch '%s' not found" % branch for line in result.stdout.splitlines()[1:]: commit_hash = line.split(' ')[0] - name = NameRevision(commit_hash) + name = name_revision(commit_hash) if '~' not in name and '^' not in name: if name.startswith('remotes/'): name = name[8:] return name, "Guessing upstream as '%s'" % name return None, "Cannot find a suitable upstream for branch '%s'" % branch -def GetUpstream(git_dir, branch): +def get_upstream(git_dir, branch): """Returns the name of the upstream for a branch Args: @@ -139,7 +139,7 @@ def GetUpstream(git_dir, branch): merge = command.output_one_line('git', '--git-dir', git_dir, 'config', 'branch.%s.merge' % branch) except: - upstream, msg = GuessUpstream(git_dir, branch) + upstream, msg = guess_upstream(git_dir, branch) return upstream, msg if remote == '.': @@ -152,7 +152,7 @@ def GetUpstream(git_dir, branch): "'%s' remote='%s', merge='%s'" % (branch, remote, merge)) -def GetRangeInBranch(git_dir, branch, include_upstream=False): +def get_range_in_branch(git_dir, branch, include_upstream=False): """Returns an expression for the commits in the given branch. Args: @@ -162,13 +162,13 @@ def GetRangeInBranch(git_dir, branch, include_upstream=False): Expression in the form 'upstream..branch' which can be used to access the commits. If the branch does not exist, returns None. """ - upstream, msg = GetUpstream(git_dir, branch) + upstream, msg = get_upstream(git_dir, branch) if not upstream: return None, msg rstr = '%s%s..%s' % (upstream, '~' if include_upstream else '', branch) return rstr, msg -def CountCommitsInRange(git_dir, range_expr): +def count_commits_in_range(git_dir, range_expr): """Returns the number of commits in the given range. Args: @@ -178,7 +178,7 @@ def CountCommitsInRange(git_dir, range_expr): Number of patches that exist in the supplied range or None if none were found """ - pipe = [LogCmd(range_expr, git_dir=git_dir, oneline=True)] + pipe = [log_cmd(range_expr, git_dir=git_dir, oneline=True)] result = command.run_pipe(pipe, capture=True, capture_stderr=True, raise_on_error=False) if result.return_code: @@ -186,7 +186,7 @@ def CountCommitsInRange(git_dir, range_expr): patch_count = len(result.stdout.splitlines()) return patch_count, None -def CountCommitsInBranch(git_dir, branch, include_upstream=False): +def count_commits_in_branch(git_dir, branch, include_upstream=False): """Returns the number of commits in the given branch. Args: @@ -196,12 +196,12 @@ def CountCommitsInBranch(git_dir, branch, include_upstream=False): Number of patches that exist on top of the branch, or None if the branch does not exist. """ - range_expr, msg = GetRangeInBranch(git_dir, branch, include_upstream) + range_expr, msg = get_range_in_branch(git_dir, branch, include_upstream) if not range_expr: return None, msg - return CountCommitsInRange(git_dir, range_expr) + return count_commits_in_range(git_dir, range_expr) -def CountCommits(commit_range): +def count_commits(commit_range): """Returns the number of commits in the given range. Args: @@ -209,13 +209,13 @@ def CountCommits(commit_range): Return: Number of patches that exist on top of the branch """ - pipe = [LogCmd(commit_range, oneline=True), + pipe = [log_cmd(commit_range, oneline=True), ['wc', '-l']] stdout = command.run_pipe(pipe, capture=True, oneline=True).stdout patch_count = int(stdout) return patch_count -def Checkout(commit_hash, git_dir=None, work_tree=None, force=False): +def checkout(commit_hash, git_dir=None, work_tree=None, force=False): """Checkout the selected commit for this build Args: @@ -235,7 +235,7 @@ def Checkout(commit_hash, git_dir=None, work_tree=None, force=False): if result.return_code != 0: raise OSError('git checkout (%s): %s' % (pipe, result.stderr)) -def Clone(git_dir, output_dir): +def clone(git_dir, output_dir): """Checkout the selected commit for this build Args: @@ -247,7 +247,7 @@ def Clone(git_dir, output_dir): if result.return_code != 0: raise OSError('git clone: %s' % result.stderr) -def Fetch(git_dir=None, work_tree=None): +def fetch(git_dir=None, work_tree=None): """Fetch from the origin repo Args: @@ -263,7 +263,7 @@ def Fetch(git_dir=None, work_tree=None): if result.return_code != 0: raise OSError('git fetch: %s' % result.stderr) -def CheckWorktreeIsAvailable(git_dir): +def check_worktree_is_available(git_dir): """Check if git-worktree functionality is available Args: @@ -277,7 +277,7 @@ def CheckWorktreeIsAvailable(git_dir): raise_on_error=False) return result.return_code == 0 -def AddWorktree(git_dir, output_dir, commit_hash=None): +def add_worktree(git_dir, output_dir, commit_hash=None): """Create and checkout a new git worktree for this build Args: @@ -294,7 +294,7 @@ def AddWorktree(git_dir, output_dir, commit_hash=None): if result.return_code != 0: raise OSError('git worktree add: %s' % result.stderr) -def PruneWorktrees(git_dir): +def prune_worktrees(git_dir): """Remove administrative files for deleted worktrees Args: @@ -305,7 +305,7 @@ def PruneWorktrees(git_dir): if result.return_code != 0: raise OSError('git worktree prune: %s' % result.stderr) -def CreatePatches(branch, start, count, ignore_binary, series, signoff = True): +def create_patches(branch, start, count, ignore_binary, series, signoff = True): """Create a series of patches from the top of the current branch. The patch files are written to the current directory using @@ -345,7 +345,7 @@ def CreatePatches(branch, start, count, ignore_binary, series, signoff = True): else: return None, files -def BuildEmailList(in_list, tag=None, alias=None, warn_on_error=True): +def build_email_list(in_list, tag=None, alias=None, warn_on_error=True): """Build a list of email addresses based on an input list. Takes a list of email addresses and aliases, and turns this into a list @@ -371,18 +371,18 @@ def BuildEmailList(in_list, tag=None, alias=None, warn_on_error=True): >>> alias['mary'] = ['Mary Poppins '] >>> alias['boys'] = ['fred', ' john'] >>> alias['all'] = ['fred ', 'john', ' mary '] - >>> BuildEmailList(['john', 'mary'], None, alias) + >>> build_email_list(['john', 'mary'], None, alias) ['j.bloggs@napier.co.nz', 'Mary Poppins '] - >>> BuildEmailList(['john', 'mary'], '--to', alias) + >>> build_email_list(['john', 'mary'], '--to', alias) ['--to "j.bloggs@napier.co.nz"', \ '--to "Mary Poppins "'] - >>> BuildEmailList(['john', 'mary'], 'Cc', alias) + >>> build_email_list(['john', 'mary'], 'Cc', alias) ['Cc j.bloggs@napier.co.nz', 'Cc Mary Poppins '] """ quote = '"' if tag and tag[0] == '-' else '' raw = [] for item in in_list: - raw += LookupEmail(item, alias, warn_on_error=warn_on_error) + raw += lookup_email(item, alias, warn_on_error=warn_on_error) result = [] for item in raw: if not item in result: @@ -391,7 +391,7 @@ def BuildEmailList(in_list, tag=None, alias=None, warn_on_error=True): return ['%s %s%s%s' % (tag, quote, email, quote) for email in result] return result -def CheckSuppressCCConfig(): +def check_suppress_cc_config(): """Check if sendemail.suppresscc is configured correctly. Returns: @@ -416,7 +416,7 @@ def CheckSuppressCCConfig(): return True -def EmailPatches(series, cover_fname, args, dry_run, warn_on_error, cc_fname, +def email_patches(series, cover_fname, args, dry_run, warn_on_error, cc_fname, self_only=False, alias=None, in_reply_to=None, thread=False, smtp_server=None): """Email a patch series. @@ -453,20 +453,20 @@ def EmailPatches(series, cover_fname, args, dry_run, warn_on_error, cc_fname, >>> series = {} >>> series['to'] = ['fred'] >>> series['cc'] = ['mary'] - >>> EmailPatches(series, 'cover', ['p1', 'p2'], True, True, 'cc-fname', \ + >>> email_patches(series, 'cover', ['p1', 'p2'], True, True, 'cc-fname', \ False, alias) 'git send-email --annotate --to "f.bloggs@napier.co.nz" --cc \ "m.poppins@cloud.net" --cc-cmd "./patman send --cc-cmd cc-fname" cover p1 p2' - >>> EmailPatches(series, None, ['p1'], True, True, 'cc-fname', False, \ + >>> email_patches(series, None, ['p1'], True, True, 'cc-fname', False, \ alias) 'git send-email --annotate --to "f.bloggs@napier.co.nz" --cc \ "m.poppins@cloud.net" --cc-cmd "./patman send --cc-cmd cc-fname" p1' >>> series['cc'] = ['all'] - >>> EmailPatches(series, 'cover', ['p1', 'p2'], True, True, 'cc-fname', \ + >>> email_patches(series, 'cover', ['p1', 'p2'], True, True, 'cc-fname', \ True, alias) 'git send-email --annotate --to "this-is-me@me.com" --cc-cmd "./patman \ send --cc-cmd cc-fname" cover p1 p2' - >>> EmailPatches(series, 'cover', ['p1', 'p2'], True, True, 'cc-fname', \ + >>> email_patches(series, 'cover', ['p1', 'p2'], True, True, 'cc-fname', \ False, alias) 'git send-email --annotate --to "f.bloggs@napier.co.nz" --cc \ "f.bloggs@napier.co.nz" --cc "j.bloggs@napier.co.nz" --cc \ @@ -475,7 +475,7 @@ send --cc-cmd cc-fname" cover p1 p2' # Restore argv[0] since we clobbered it. >>> sys.argv[0] = _old_argv0 """ - to = BuildEmailList(series.get('to'), '--to', alias, warn_on_error) + to = build_email_list(series.get('to'), '--to', alias, warn_on_error) if not to: git_config_to = command.output('git', 'config', 'sendemail.to', raise_on_error=False) @@ -486,10 +486,10 @@ send --cc-cmd cc-fname" cover p1 p2' "Or do something like this\n" "git config sendemail.to u-boot@lists.denx.de") return - cc = BuildEmailList(list(set(series.get('cc')) - set(series.get('to'))), + cc = build_email_list(list(set(series.get('cc')) - set(series.get('to'))), '--cc', alias, warn_on_error) if self_only: - to = BuildEmailList([os.getenv('USER')], '--to', alias, warn_on_error) + to = build_email_list([os.getenv('USER')], '--to', alias, warn_on_error) cc = [] cmd = ['git', 'send-email', '--annotate'] if smtp_server: @@ -511,7 +511,7 @@ send --cc-cmd cc-fname" cover p1 p2' return cmdstr -def LookupEmail(lookup_name, alias=None, warn_on_error=True, level=0): +def lookup_email(lookup_name, alias=None, warn_on_error=True, level=0): """If an email address is an alias, look it up and return the full name TODO: Why not just use git's own alias feature? @@ -538,25 +538,25 @@ def LookupEmail(lookup_name, alias=None, warn_on_error=True, level=0): >>> alias['all'] = ['fred ', 'john', ' mary '] >>> alias['loop'] = ['other', 'john', ' mary '] >>> alias['other'] = ['loop', 'john', ' mary '] - >>> LookupEmail('mary', alias) + >>> lookup_email('mary', alias) ['m.poppins@cloud.net'] - >>> LookupEmail('arthur.wellesley@howe.ro.uk', alias) + >>> lookup_email('arthur.wellesley@howe.ro.uk', alias) ['arthur.wellesley@howe.ro.uk'] - >>> LookupEmail('boys', alias) + >>> lookup_email('boys', alias) ['f.bloggs@napier.co.nz', 'j.bloggs@napier.co.nz'] - >>> LookupEmail('all', alias) + >>> lookup_email('all', alias) ['f.bloggs@napier.co.nz', 'j.bloggs@napier.co.nz', 'm.poppins@cloud.net'] - >>> LookupEmail('odd', alias) + >>> lookup_email('odd', alias) Alias 'odd' not found [] - >>> LookupEmail('loop', alias) + >>> lookup_email('loop', alias) Traceback (most recent call last): ... OSError: Recursive email alias at 'other' - >>> LookupEmail('odd', alias, warn_on_error=False) + >>> lookup_email('odd', alias, warn_on_error=False) [] >>> # In this case the loop part will effectively be ignored. - >>> LookupEmail('loop', alias, warn_on_error=False) + >>> lookup_email('loop', alias, warn_on_error=False) Recursive email alias at 'other' Recursive email alias at 'john' Recursive email alias at 'mary' @@ -587,14 +587,14 @@ def LookupEmail(lookup_name, alias=None, warn_on_error=True, level=0): print(col.Color(col.RED, msg)) return out_list for item in alias[lookup_name]: - todo = LookupEmail(item, alias, warn_on_error, level + 1) + todo = lookup_email(item, alias, warn_on_error, level + 1) for new_item in todo: if not new_item in out_list: out_list.append(new_item) return out_list -def GetTopLevel(): +def get_top_level(): """Return name of top-level directory for this git repo. Returns: @@ -603,12 +603,12 @@ def GetTopLevel(): This test makes sure that we are running tests in the right subdir >>> os.path.realpath(os.path.dirname(__file__)) == \ - os.path.join(GetTopLevel(), 'tools', 'patman') + os.path.join(get_top_level(), 'tools', 'patman') True """ return command.output_one_line('git', 'rev-parse', '--show-toplevel') -def GetAliasFile(): +def get_alias_file(): """Gets the name of the git alias file. Returns: @@ -623,9 +623,9 @@ def GetAliasFile(): if os.path.isabs(fname): return fname - return os.path.join(GetTopLevel(), fname) + return os.path.join(get_top_level(), fname) -def GetDefaultUserName(): +def get_default_user_name(): """Gets the user.name from .gitconfig file. Returns: @@ -634,7 +634,7 @@ def GetDefaultUserName(): uname = command.output_one_line('git', 'config', '--global', 'user.name') return uname -def GetDefaultUserEmail(): +def get_default_user_email(): """Gets the user.email from the global .gitconfig file. Returns: @@ -643,7 +643,7 @@ def GetDefaultUserEmail(): uemail = command.output_one_line('git', 'config', '--global', 'user.email') return uemail -def GetDefaultSubjectPrefix(): +def get_default_subject_prefix(): """Gets the format.subjectprefix from local .git/config file. Returns: @@ -654,19 +654,19 @@ def GetDefaultSubjectPrefix(): return sub_prefix -def Setup(): +def setup(): """Set up git utils, by reading the alias files.""" # Check for a git alias file also global use_no_decorate - alias_fname = GetAliasFile() + alias_fname = get_alias_file() if alias_fname: settings.ReadGitAliases(alias_fname) - cmd = LogCmd(None, count=0) + cmd = log_cmd(None, count=0) use_no_decorate = (command.run_pipe([cmd], raise_on_error=False) .return_code == 0) -def GetHead(): +def get_head(): """Get the hash of the current HEAD Returns: diff --git a/tools/patman/patchstream.py b/tools/patman/patchstream.py index 387e9deae33..9b32fd4790e 100644 --- a/tools/patman/patchstream.py +++ b/tools/patman/patchstream.py @@ -698,7 +698,7 @@ def get_list(commit_range, git_dir=None, count=None): Returns str: String containing the contents of the git log """ - params = gitutil.LogCmd(commit_range, reverse=True, count=count, + params = gitutil.log_cmd(commit_range, reverse=True, count=count, git_dir=git_dir) return command.run_pipe([params], capture=True).stdout diff --git a/tools/patman/project.py b/tools/patman/project.py index 2dfc303729b..641e2d68520 100644 --- a/tools/patman/project.py +++ b/tools/patman/project.py @@ -16,7 +16,7 @@ def DetectProject(): The name of the project, like "linux" or "u-boot". Returns "unknown" if we can't detect the project. """ - top_level = gitutil.GetTopLevel() + top_level = gitutil.get_top_level() if os.path.exists(os.path.join(top_level, "include", "u-boot")): return "u-boot" diff --git a/tools/patman/series.py b/tools/patman/series.py index 7f5e6001340..27dd3e1a7ed 100644 --- a/tools/patman/series.py +++ b/tools/patman/series.py @@ -105,8 +105,8 @@ class Series(dict): cmd: The git command we would have run process_tags: Process tags as if they were aliases """ - to_set = set(gitutil.BuildEmailList(self.to)); - cc_set = set(gitutil.BuildEmailList(self.cc)); + to_set = set(gitutil.build_email_list(self.to)); + cc_set = set(gitutil.build_email_list(self.cc)); col = terminal.Color() print('Dry run, so not doing much. But I would do this:') @@ -136,7 +136,7 @@ class Series(dict): print('Postfix:\t ', self.get('postfix')) if self.cover: print('Cover: %d lines' % len(self.cover)) - cover_cc = gitutil.BuildEmailList(self.get('cover_cc', '')) + cover_cc = gitutil.build_email_list(self.get('cover_cc', '')) all_ccs = itertools.chain(cover_cc, *self._generated_cc.values()) for email in sorted(set(all_ccs) - to_set - cc_set): print(' Cc: ', email) @@ -261,14 +261,14 @@ class Series(dict): for commit in self.commits: cc = [] if process_tags: - cc += gitutil.BuildEmailList(commit.tags, + cc += gitutil.build_email_list(commit.tags, warn_on_error=warn_on_error) - cc += gitutil.BuildEmailList(commit.cc_list, + cc += gitutil.build_email_list(commit.cc_list, warn_on_error=warn_on_error) if type(add_maintainers) == type(cc): cc += add_maintainers elif add_maintainers: - dir_list = [os.path.join(gitutil.GetTopLevel(), 'scripts')] + dir_list = [os.path.join(gitutil.get_top_level(), 'scripts')] cc += get_maintainer.get_maintainer(dir_list, commit.patch) for x in set(cc) & set(settings.bounces): print(col.Color(col.YELLOW, 'Skipping "%s"' % x)) @@ -280,7 +280,7 @@ class Series(dict): self._generated_cc[commit.patch] = cc if cover_fname: - cover_cc = gitutil.BuildEmailList(self.get('cover_cc', '')) + cover_cc = gitutil.build_email_list(self.get('cover_cc', '')) cover_cc = list(set(cover_cc + all_ccs)) if limit is not None: cover_cc = cover_cc[:limit] @@ -309,7 +309,7 @@ class Series(dict): Return: Patch string, like 'RFC PATCH v5' or just 'PATCH' """ - git_prefix = gitutil.GetDefaultSubjectPrefix() + git_prefix = gitutil.get_default_subject_prefix() if git_prefix: git_prefix = '%s][' % git_prefix else: diff --git a/tools/patman/settings.py b/tools/patman/settings.py index 13c1ee4f569..014bb376d8b 100644 --- a/tools/patman/settings.py +++ b/tools/patman/settings.py @@ -198,11 +198,11 @@ def CreatePatmanConfigFile(gitutil, config_fname): Returns: None """ - name = gitutil.GetDefaultUserName() + name = gitutil.get_default_user_name() if name == None: name = raw_input("Enter name: ") - email = gitutil.GetDefaultUserEmail() + email = gitutil.get_default_user_email() if email == None: email = raw_input("Enter email: ") From 642df431d51ea179a7473810bbfec0d81f8d37a8 Mon Sep 17 00:00:00 2001 From: Simon Glass Date: Sat, 29 Jan 2022 14:14:12 -0700 Subject: [PATCH 12/27] patman: Convert camel case in project.py Convert this file to snake case and update all files which use it. Signed-off-by: Simon Glass --- tools/patman/main.py | 2 +- tools/patman/project.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tools/patman/main.py b/tools/patman/main.py index 63f0774783d..85073217b51 100755 --- a/tools/patman/main.py +++ b/tools/patman/main.py @@ -42,7 +42,7 @@ parser.add_argument('-e', '--end', type=int, default=0, help='Commits to skip at end of patch list') parser.add_argument('-D', '--debug', action='store_true', help='Enabling debugging (provides a full traceback on error)') -parser.add_argument('-p', '--project', default=project.DetectProject(), +parser.add_argument('-p', '--project', default=project.detect_project(), help="Project name; affects default option values and " "aliases [default: %(default)s]") parser.add_argument('-P', '--patchwork-url', diff --git a/tools/patman/project.py b/tools/patman/project.py index 641e2d68520..4459042b5d4 100644 --- a/tools/patman/project.py +++ b/tools/patman/project.py @@ -6,7 +6,7 @@ import os.path from patman import gitutil -def DetectProject(): +def detect_project(): """Autodetect the name of the current project. This looks for signature files/directories that are unlikely to exist except From ce31277160de2b8500f6a0e6fd284ef137e26628 Mon Sep 17 00:00:00 2001 From: Simon Glass Date: Sat, 29 Jan 2022 14:14:13 -0700 Subject: [PATCH 13/27] patman: Convert camel case in test_checkpatch.py Convert this file to snake case and update all files which use it. Signed-off-by: Simon Glass --- tools/patman/test_checkpatch.py | 68 ++++++++++++++++----------------- 1 file changed, 34 insertions(+), 34 deletions(-) diff --git a/tools/patman/test_checkpatch.py b/tools/patman/test_checkpatch.py index cf6cb713b76..8960cd505f8 100644 --- a/tools/patman/test_checkpatch.py +++ b/tools/patman/test_checkpatch.py @@ -88,7 +88,7 @@ Signed-off-by: Simon Glass class TestPatch(unittest.TestCase): """Test the u_boot_line() function in checkpatch.pl""" - def testBasic(self): + def test_basic(self): """Test basic filter operation""" data=''' @@ -164,7 +164,7 @@ Signed-off-by: Simon Glass os.remove(inname) os.remove(expname) - def GetData(self, data_type): + def get_data(self, data_type): data='''From 4924887af52713cabea78420eff03badea8f0035 Mon Sep 17 00:00:00 2001 From: Simon Glass Date: Thu, 7 Apr 2011 10:14:41 -0700 @@ -284,17 +284,17 @@ index 0000000..2234c87 print('not implemented') return data % (signoff, license, tab, indent, tab) - def SetupData(self, data_type): + def setup_data(self, data_type): inhandle, inname = tempfile.mkstemp() infd = os.fdopen(inhandle, 'w') - data = self.GetData(data_type) + data = self.get_data(data_type) infd.write(data) infd.close() return inname - def testGood(self): + def test_good(self): """Test checkpatch operation""" - inf = self.SetupData('good') + inf = self.setup_data('good') result = checkpatch.check_patch(inf) self.assertEqual(result.ok, True) self.assertEqual(result.problems, []) @@ -304,8 +304,8 @@ index 0000000..2234c87 self.assertEqual(result.lines, 62) os.remove(inf) - def testNoSignoff(self): - inf = self.SetupData('no-signoff') + def test_no_signoff(self): + inf = self.setup_data('no-signoff') result = checkpatch.check_patch(inf) self.assertEqual(result.ok, False) self.assertEqual(len(result.problems), 1) @@ -315,8 +315,8 @@ index 0000000..2234c87 self.assertEqual(result.lines, 62) os.remove(inf) - def testNoLicense(self): - inf = self.SetupData('no-license') + def test_no_license(self): + inf = self.setup_data('no-license') result = checkpatch.check_patch(inf) self.assertEqual(result.ok, False) self.assertEqual(len(result.problems), 1) @@ -326,8 +326,8 @@ index 0000000..2234c87 self.assertEqual(result.lines, 62) os.remove(inf) - def testSpaces(self): - inf = self.SetupData('spaces') + def test_spaces(self): + inf = self.setup_data('spaces') result = checkpatch.check_patch(inf) self.assertEqual(result.ok, False) self.assertEqual(len(result.problems), 3) @@ -337,8 +337,8 @@ index 0000000..2234c87 self.assertEqual(result.lines, 62) os.remove(inf) - def testIndent(self): - inf = self.SetupData('indent') + def test_indent(self): + inf = self.setup_data('indent') result = checkpatch.check_patch(inf) self.assertEqual(result.ok, False) self.assertEqual(len(result.problems), 1) @@ -348,7 +348,7 @@ index 0000000..2234c87 self.assertEqual(result.lines, 62) os.remove(inf) - def checkSingleMessage(self, pm, msg, pmtype = 'warning'): + def check_single_message(self, pm, msg, pmtype = 'warning'): """Helper function to run checkpatch and check the result Args: @@ -366,50 +366,50 @@ index 0000000..2234c87 self.assertEqual(len(result.problems), 1) self.assertIn(msg, result.problems[0]['cptype']) - def testUclass(self): + def test_uclass(self): """Test for possible new uclass""" pm = PatchMaker() pm.add_line('include/dm/uclass-id.h', 'UCLASS_WIBBLE,') - self.checkSingleMessage(pm, 'NEW_UCLASS') + self.check_single_message(pm, 'NEW_UCLASS') - def testLivetree(self): + def test_livetree(self): """Test for using the livetree API""" pm = PatchMaker() pm.add_line('common/main.c', 'fdtdec_do_something()') - self.checkSingleMessage(pm, 'LIVETREE') + self.check_single_message(pm, 'LIVETREE') - def testNewCommand(self): + def test_new_command(self): """Test for adding a new command""" pm = PatchMaker() pm.add_line('common/main.c', 'do_wibble(struct cmd_tbl *cmd_tbl)') - self.checkSingleMessage(pm, 'CMD_TEST') + self.check_single_message(pm, 'CMD_TEST') - def testPreferIf(self): + def test_prefer_if(self): """Test for using #ifdef""" pm = PatchMaker() pm.add_line('common/main.c', '#ifdef CONFIG_YELLOW') pm.add_line('common/init.h', '#ifdef CONFIG_YELLOW') pm.add_line('fred.dtsi', '#ifdef CONFIG_YELLOW') - self.checkSingleMessage(pm, "PREFER_IF") + self.check_single_message(pm, "PREFER_IF") - def testCommandUseDefconfig(self): + def test_command_use_defconfig(self): """Test for enabling/disabling commands using preprocesor""" pm = PatchMaker() pm.add_line('common/main.c', '#undef CONFIG_CMD_WHICH') - self.checkSingleMessage(pm, 'DEFINE_CONFIG_CMD', 'error') + self.check_single_message(pm, 'DEFINE_CONFIG_CMD', 'error') - def testBarredIncludeInHdr(self): + def test_barred_include_in_hdr(self): """Test for using a barred include in a header file""" pm = PatchMaker() #pm.add_line('include/myfile.h', '#include ') pm.add_line('include/myfile.h', '#include ') - self.checkSingleMessage(pm, 'BARRED_INCLUDE_IN_HDR', 'error') + self.check_single_message(pm, 'BARRED_INCLUDE_IN_HDR', 'error') - def testConfigIsEnabledConfig(self): + def test_config_is_enabled_config(self): """Test for accidental CONFIG_IS_ENABLED(CONFIG_*) calls""" pm = PatchMaker() pm.add_line('common/main.c', 'if (CONFIG_IS_ENABLED(CONFIG_CLK))') - self.checkSingleMessage(pm, 'CONFIG_IS_ENABLED_CONFIG', 'error') + self.check_single_message(pm, 'CONFIG_IS_ENABLED_CONFIG', 'error') def check_struct(self, auto, suffix, warning): """Check one of the warnings for struct naming @@ -423,17 +423,17 @@ index 0000000..2234c87 pm.add_line('common/main.c', '.%s = sizeof(struct(fred)),' % auto) pm.add_line('common/main.c', '.%s = sizeof(struct(mary%s)),' % (auto, suffix)) - self.checkSingleMessage( + self.check_single_message( pm, warning, "struct 'fred' should have a %s suffix" % suffix) - def testDmDriverAuto(self): + def test_dm_driver_auto(self): """Check for the correct suffix on 'struct driver' auto members""" self.check_struct('priv_auto', '_priv', 'PRIV_AUTO') self.check_struct('plat_auto', '_plat', 'PLAT_AUTO') self.check_struct('per_child_auto', '_priv', 'CHILD_PRIV_AUTO') self.check_struct('per_child_plat_auto', '_plat', 'CHILD_PLAT_AUTO') - def testDmUclassAuto(self): + def test_dm_uclass_auto(self): """Check for the correct suffix on 'struct uclass' auto members""" # Some of these are omitted since they match those from struct driver self.check_struct('per_device_auto', '_priv', 'DEVICE_PRIV_AUTO') @@ -443,11 +443,11 @@ index 0000000..2234c87 """Check one of the checks for strn(cpy|cat)""" pm = PatchMaker() pm.add_line('common/main.c', "strn%s(foo, bar, sizeof(foo));" % func) - self.checkSingleMessage(pm, "STRL", + self.check_single_message(pm, "STRL", "strl%s is preferred over strn%s because it always produces a nul-terminated string\n" % (func, func)) - def testStrl(self): + def test_strl(self): """Check for uses of strn(cat|cpy)""" self.check_strl("cat"); self.check_strl("cpy"); From 5e2ab40172b42ae9ce6d58b95f238013184fa865 Mon Sep 17 00:00:00 2001 From: Simon Glass Date: Sat, 29 Jan 2022 14:14:14 -0700 Subject: [PATCH 14/27] patman: Convert camel case in test_util.py Convert this file to snake case and update all files which use it. Signed-off-by: Simon Glass --- tools/binman/main.py | 6 +++--- tools/buildman/main.py | 4 ++-- tools/dtoc/main.py | 6 +++--- tools/dtoc/test_fdt.py | 2 +- tools/patman/main.py | 4 ++-- tools/patman/test_util.py | 8 ++++---- 6 files changed, 15 insertions(+), 15 deletions(-) diff --git a/tools/binman/main.py b/tools/binman/main.py index 03462e7bb8b..ab25b48b5fb 100755 --- a/tools/binman/main.py +++ b/tools/binman/main.py @@ -84,14 +84,14 @@ def RunTests(debug, verbosity, processes, test_preserve_dirs, args, toolpath): # Run the entry tests first ,since these need to be the first to import the # 'entry' module. - test_util.RunTestSuites( + test_util.run_test_suites( result, debug, verbosity, test_preserve_dirs, processes, test_name, toolpath, [bintool_test.TestBintool, entry_test.TestEntry, ftest.TestFunctional, fdt_test.TestFdt, elf_test.TestElf, image_test.TestImage, cbfs_util_test.TestCbfs, fip_util_test.TestFip]) - return test_util.ReportResult('binman', test_name, result) + return test_util.report_result('binman', test_name, result) def RunTestCoverage(toolpath): """Run the tests and check that we get 100% coverage""" @@ -102,7 +102,7 @@ def RunTestCoverage(toolpath): if toolpath: for path in toolpath: extra_args += ' --toolpath %s' % path - test_util.RunTestCoverage('tools/binman/binman', None, + test_util.run_test_coverage('tools/binman/binman', None, ['*test*', '*main.py', 'tools/patman/*', 'tools/dtoc/*'], args.build_dir, all_set, extra_args or None) diff --git a/tools/buildman/main.py b/tools/buildman/main.py index c6af311a69b..01271061e6c 100755 --- a/tools/buildman/main.py +++ b/tools/buildman/main.py @@ -41,12 +41,12 @@ def RunTests(skip_net_tests, verboose, args): # Run the entry tests first ,since these need to be the first to import the # 'entry' module. - test_util.RunTestSuites( + test_util.run_test_suites( result, False, verboose, False, None, test_name, [], [test.TestBuild, func_test.TestFunctional, 'buildman.toolchain', 'patman.gitutil']) - return test_util.ReportResult('buildman', test_name, result) + return test_util.report_result('buildman', test_name, result) options, args = cmdline.ParseArgs() diff --git a/tools/dtoc/main.py b/tools/dtoc/main.py index 276cfadf5a3..fac9db9c786 100755 --- a/tools/dtoc/main.py +++ b/tools/dtoc/main.py @@ -55,17 +55,17 @@ def run_tests(processes, args): test_dtoc.setup() - test_util.RunTestSuites( + test_util.run_test_suites( result, debug=True, verbosity=1, test_preserve_dirs=False, processes=processes, test_name=test_name, toolpath=[], class_and_module_list=[test_dtoc.TestDtoc,test_src_scan.TestSrcScan]) - return test_util.ReportResult('binman', test_name, result) + return test_util.report_result('binman', test_name, result) def RunTestCoverage(): """Run the tests and check that we get 100% coverage""" sys.argv = [sys.argv[0]] - test_util.RunTestCoverage('tools/dtoc/dtoc', '/main.py', + test_util.run_test_coverage('tools/dtoc/dtoc', '/main.py', ['tools/patman/*.py', '*/fdt*', '*test*'], args.build_dir) diff --git a/tools/dtoc/test_fdt.py b/tools/dtoc/test_fdt.py index 5a4e9e08145..c789822afae 100755 --- a/tools/dtoc/test_fdt.py +++ b/tools/dtoc/test_fdt.py @@ -715,7 +715,7 @@ class TestFdtUtil(unittest.TestCase): def RunTestCoverage(): """Run the tests and check that we get 100% coverage""" - test_util.RunTestCoverage('tools/dtoc/test_fdt.py', None, + test_util.run_test_coverage('tools/dtoc/test_fdt.py', None, ['tools/patman/*.py', '*test_fdt.py'], options.build_dir) diff --git a/tools/patman/main.py b/tools/patman/main.py index 85073217b51..7a6e910f4b3 100755 --- a/tools/patman/main.py +++ b/tools/patman/main.py @@ -135,12 +135,12 @@ if args.cmd == 'test': from patman import func_test result = unittest.TestResult() - test_util.RunTestSuites( + test_util.run_test_suites( result, False, False, False, None, None, None, [test_checkpatch.TestPatch, func_test.TestFunctional, 'gitutil', 'settings', 'terminal']) - sys.exit(test_util.ReportResult('patman', args.testname, result)) + sys.exit(test_util.report_result('patman', args.testname, result)) # Process commits, produce patches files, check them, email them elif args.cmd == 'send': diff --git a/tools/patman/test_util.py b/tools/patman/test_util.py index c3f15f8a4bc..c60eb3628e2 100644 --- a/tools/patman/test_util.py +++ b/tools/patman/test_util.py @@ -23,7 +23,7 @@ except: use_concurrent = False -def RunTestCoverage(prog, filter_fname, exclude_list, build_dir, required=None, +def run_test_coverage(prog, filter_fname, exclude_list, build_dir, required=None, extra_args=None): """Run tests and check that we get 100% coverage @@ -102,7 +102,7 @@ def capture_sys_output(): sys.stdout, sys.stderr = old_out, old_err -def ReportResult(toolname:str, test_name: str, result: unittest.TestResult): +def report_result(toolname:str, test_name: str, result: unittest.TestResult): """Report the results from a suite of tests Args: @@ -139,8 +139,8 @@ def ReportResult(toolname:str, test_name: str, result: unittest.TestResult): return 0 -def RunTestSuites(result, debug, verbosity, test_preserve_dirs, processes, - test_name, toolpath, class_and_module_list): +def run_test_suites(result, debug, verbosity, test_preserve_dirs, processes, + test_name, toolpath, class_and_module_list): """Run a series of test suites and collect the results Args: From f3385a5b1c2024e33e276aef829a4da43ceee0fe Mon Sep 17 00:00:00 2001 From: Simon Glass Date: Sat, 29 Jan 2022 14:14:15 -0700 Subject: [PATCH 15/27] patman: Convert camel case in tout.py Convert this file to snake case and update all files which use it. Signed-off-by: Simon Glass --- tools/binman/bintool.py | 10 ++--- tools/binman/control.py | 44 +++++++++--------- tools/binman/elf.py | 6 +-- tools/binman/elf_test.py | 4 +- tools/binman/entry.py | 10 ++--- tools/binman/etype/fmap.py | 2 +- tools/binman/etype/section.py | 10 ++--- tools/binman/etype/u_boot_spl_expanded.py | 2 +- tools/binman/etype/u_boot_tpl_expanded.py | 2 +- tools/binman/ftest.py | 2 +- tools/binman/image.py | 6 +-- tools/binman/state.py | 16 +++---- tools/patman/status.py | 6 +-- tools/patman/tools.py | 8 ++-- tools/patman/tout.py | 54 +++++++++++------------ 15 files changed, 91 insertions(+), 91 deletions(-) diff --git a/tools/binman/bintool.py b/tools/binman/bintool.py index 7a0c8163924..4bc3cfebce5 100644 --- a/tools/binman/bintool.py +++ b/tools/binman/bintool.py @@ -267,7 +267,7 @@ class Bintool: name = os.path.expanduser(self.name) # Expand paths containing ~ all_args = (name,) + args env = tools.get_env_with_path() - tout.Detail(f"bintool: {' '.join(all_args)}") + tout.detail(f"bintool: {' '.join(all_args)}") result = command.run_pipe( [all_args], capture=True, capture_stderr=True, env=env, raise_on_error=False, binary=binary) @@ -278,17 +278,17 @@ class Bintool: # try to run it (as above) since RunPipe() allows faking the tool's # output if not any([result.stdout, result.stderr, tools.tool_find(name)]): - tout.Info(f"bintool '{name}' not found") + tout.info(f"bintool '{name}' not found") return None if raise_on_error: - tout.Info(f"bintool '{name}' failed") + tout.info(f"bintool '{name}' failed") raise ValueError("Error %d running '%s': %s" % (result.return_code, ' '.join(all_args), result.stderr or result.stdout)) if result.stdout: - tout.Debug(result.stdout) + tout.debug(result.stdout) if result.stderr: - tout.Debug(result.stderr) + tout.debug(result.stderr) return result def run_cmd(self, *args, binary=False): diff --git a/tools/binman/control.py b/tools/binman/control.py index 305f14bad31..a179f781298 100644 --- a/tools/binman/control.py +++ b/tools/binman/control.py @@ -99,9 +99,9 @@ def _ReadMissingBlobHelp(): return result def _ShowBlobHelp(path, text): - tout.Warning('\n%s:' % path) + tout.warning('\n%s:' % path) for line in text.splitlines(): - tout.Warning(' %s' % line) + tout.warning(' %s' % line) def _ShowHelpForMissingBlobs(missing_list): """Show help for each missing blob to help the user take action @@ -259,14 +259,14 @@ def ExtractEntries(image_fname, output_fname, outdir, entry_paths, entry = image.FindEntryPath(entry_paths[0]) data = entry.ReadData(decomp, alt_format) tools.write_file(output_fname, data) - tout.Notice("Wrote %#x bytes to file '%s'" % (len(data), output_fname)) + tout.notice("Wrote %#x bytes to file '%s'" % (len(data), output_fname)) return # Otherwise we will output to a path given by the entry path of each entry. # This means that entries will appear in subdirectories if they are part of # a sub-section. einfos = image.GetListEntries(entry_paths)[0] - tout.Notice('%d entries match and will be written' % len(einfos)) + tout.notice('%d entries match and will be written' % len(einfos)) for einfo in einfos: entry = einfo.entry data = entry.ReadData(decomp, alt_format) @@ -279,7 +279,7 @@ def ExtractEntries(image_fname, output_fname, outdir, entry_paths, if fname and not os.path.exists(fname): os.makedirs(fname) fname = os.path.join(fname, 'root') - tout.Notice("Write entry '%s' size %x to '%s'" % + tout.notice("Write entry '%s' size %x to '%s'" % (entry.GetPath(), len(data), fname)) tools.write_file(fname, data) return einfos @@ -328,7 +328,7 @@ def AfterReplace(image, allow_resize, write_map): of the entries), False to raise an exception write_map: True to write a map file """ - tout.Info('Processing image') + tout.info('Processing image') ProcessImage(image, update_fdt=True, write_map=write_map, get_contents=False, allow_resize=allow_resize) @@ -336,7 +336,7 @@ def AfterReplace(image, allow_resize, write_map): def WriteEntryToImage(image, entry, data, do_compress=True, allow_resize=True, write_map=False): BeforeReplace(image, allow_resize) - tout.Info('Writing data to %s' % entry.GetPath()) + tout.info('Writing data to %s' % entry.GetPath()) ReplaceOneEntry(image, entry, data, do_compress, allow_resize) AfterReplace(image, allow_resize=allow_resize, write_map=write_map) @@ -361,7 +361,7 @@ def WriteEntry(image_fname, entry_path, data, do_compress=True, Returns: Image object that was updated """ - tout.Info("Write entry '%s', file '%s'" % (entry_path, image_fname)) + tout.info("Write entry '%s', file '%s'" % (entry_path, image_fname)) image = Image.FromFile(image_fname) entry = image.FindEntryPath(entry_path) WriteEntryToImage(image, entry, data, do_compress=do_compress, @@ -399,7 +399,7 @@ def ReplaceEntries(image_fname, input_fname, indir, entry_paths, raise ValueError('Must specify exactly one entry path to write with -f') entry = image.FindEntryPath(entry_paths[0]) data = tools.read_file(input_fname) - tout.Notice("Read %#x bytes from file '%s'" % (len(data), input_fname)) + tout.notice("Read %#x bytes from file '%s'" % (len(data), input_fname)) WriteEntryToImage(image, entry, data, do_compress=do_compress, allow_resize=allow_resize, write_map=write_map) return @@ -408,7 +408,7 @@ def ReplaceEntries(image_fname, input_fname, indir, entry_paths, # This means that files must appear in subdirectories if they are part of # a sub-section. einfos = image.GetListEntries(entry_paths)[0] - tout.Notice("Replacing %d matching entries in image '%s'" % + tout.notice("Replacing %d matching entries in image '%s'" % (len(einfos), image_fname)) BeforeReplace(image, allow_resize) @@ -416,19 +416,19 @@ def ReplaceEntries(image_fname, input_fname, indir, entry_paths, for einfo in einfos: entry = einfo.entry if entry.GetEntries(): - tout.Info("Skipping section entry '%s'" % entry.GetPath()) + tout.info("Skipping section entry '%s'" % entry.GetPath()) continue path = entry.GetPath()[1:] fname = os.path.join(indir, path) if os.path.exists(fname): - tout.Notice("Write entry '%s' from file '%s'" % + tout.notice("Write entry '%s' from file '%s'" % (entry.GetPath(), fname)) data = tools.read_file(fname) ReplaceOneEntry(image, entry, data, do_compress, allow_resize) else: - tout.Warning("Skipping entry '%s' from missing file '%s'" % + tout.warning("Skipping entry '%s' from missing file '%s'" % (entry.GetPath(), fname)) AfterReplace(image, allow_resize=allow_resize, write_map=write_map) @@ -488,7 +488,7 @@ def PrepareImagesAndDtbs(dtb_fname, select_images, update_fdt, use_expanded): else: skip.append(name) images = new_images - tout.Notice('Skipping images: %s' % ', '.join(skip)) + tout.notice('Skipping images: %s' % ', '.join(skip)) state.Prepare(images, dtb) @@ -574,7 +574,7 @@ def ProcessImage(image, update_fdt, write_map, get_contents=True, if sizes_ok: break image.ResetForPack() - tout.Info('Pack completed after %d pass(es)' % (pack_pass + 1)) + tout.info('Pack completed after %d pass(es)' % (pack_pass + 1)) if not sizes_ok: image.Raise('Entries changed size after packing (tried %s passes)' % passes) @@ -585,20 +585,20 @@ def ProcessImage(image, update_fdt, write_map, get_contents=True, missing_list = [] image.CheckMissing(missing_list) if missing_list: - tout.Warning("Image '%s' is missing external blobs and is non-functional: %s" % + tout.warning("Image '%s' is missing external blobs and is non-functional: %s" % (image.name, ' '.join([e.name for e in missing_list]))) _ShowHelpForMissingBlobs(missing_list) faked_list = [] image.CheckFakedBlobs(faked_list) if faked_list: - tout.Warning( + tout.warning( "Image '%s' has faked external blobs and is non-functional: %s" % (image.name, ' '.join([os.path.basename(e.GetDefaultFilename()) for e in faked_list]))) missing_bintool_list = [] image.check_missing_bintools(missing_bintool_list) if missing_bintool_list: - tout.Warning( + tout.warning( "Image '%s' has missing bintools and is non-functional: %s" % (image.name, ' '.join([os.path.basename(bintool.name) for bintool in missing_bintool_list]))) @@ -629,7 +629,7 @@ def Binman(args): if args.cmd in ['ls', 'extract', 'replace', 'tool']: try: - tout.Init(args.verbosity) + tout.init(args.verbosity) tools.prepare_output_dir(None) if args.cmd == 'ls': ListEntries(args.image, args.paths) @@ -682,7 +682,7 @@ def Binman(args): args.indir.append(board_pathname) try: - tout.Init(args.verbosity) + tout.init(args.verbosity) elf.debug = args.debug cbfs_util.VERBOSE = args.verbosity > 2 state.use_fake_dtb = args.fake_dtb @@ -724,13 +724,13 @@ def Binman(args): elf.UpdateFile(*elf_params, data) if invalid: - tout.Warning("\nSome images are invalid") + tout.warning("\nSome images are invalid") # Use this to debug the time take to pack the image #state.TimingShow() finally: tools.finalise_output_dir() finally: - tout.Uninit() + tout.uninit() return 0 diff --git a/tools/binman/elf.py b/tools/binman/elf.py index 47e0a3f51cc..bc4966e8a84 100644 --- a/tools/binman/elf.py +++ b/tools/binman/elf.py @@ -185,7 +185,7 @@ def LookupAndWriteSymbols(elf_fname, entry, section): value = -1 pack_string = pack_string.lower() value_bytes = struct.pack(pack_string, value) - tout.Debug('%s:\n insert %s, offset %x, value %x, length %d' % + tout.debug('%s:\n insert %s, offset %x, value %x, length %d' % (msg, name, offset, value, len(value_bytes))) entry.data = (entry.data[:offset] + value_bytes + entry.data[offset + sym.size:]) @@ -350,7 +350,7 @@ def DecodeElf(data, location): mem_end - data_start) def UpdateFile(infile, outfile, start_sym, end_sym, insert): - tout.Notice("Creating file '%s' with data length %#x (%d) between symbols '%s' and '%s'" % + tout.notice("Creating file '%s' with data length %#x (%d) between symbols '%s' and '%s'" % (outfile, len(insert), len(insert), start_sym, end_sym)) syms = GetSymbolFileOffset(infile, [start_sym, end_sym]) if len(syms) != 2: @@ -368,4 +368,4 @@ def UpdateFile(infile, outfile, start_sym, end_sym, insert): newdata += insert + tools.get_bytes(0, size - len(insert)) newdata += data[syms[end_sym].offset:] tools.write_file(outfile, newdata) - tout.Info('Written to offset %#x' % syms[start_sym].offset) + tout.info('Written to offset %#x' % syms[start_sym].offset) diff --git a/tools/binman/elf_test.py b/tools/binman/elf_test.py index 0f749ee2541..47ebfbac4a6 100644 --- a/tools/binman/elf_test.py +++ b/tools/binman/elf_test.py @@ -172,7 +172,7 @@ class TestElf(unittest.TestCase): def testDebug(self): """Check that enabling debug in the elf module produced debug output""" try: - tout.Init(tout.DEBUG) + tout.init(tout.DEBUG) entry = FakeEntry(20) section = FakeSection() elf_fname = self.ElfTestFile('u_boot_binman_syms') @@ -180,7 +180,7 @@ class TestElf(unittest.TestCase): syms = elf.LookupAndWriteSymbols(elf_fname, entry, section) self.assertTrue(len(stdout.getvalue()) > 0) finally: - tout.Init(tout.WARNING) + tout.init(tout.WARNING) def testMakeElf(self): """Test for the MakeElf function""" diff --git a/tools/binman/entry.py b/tools/binman/entry.py index 07a27132880..dc26f8f167b 100644 --- a/tools/binman/entry.py +++ b/tools/binman/entry.py @@ -400,7 +400,7 @@ class Entry(object): data += tools.get_bytes(0, self.contents_size - new_size) if not size_ok: - tout.Debug("Entry '%s' size change from %s to %s" % ( + tout.debug("Entry '%s' size change from %s to %s" % ( self._node.path, to_hex(self.contents_size), to_hex(new_size))) self.SetContents(data) @@ -489,12 +489,12 @@ class Entry(object): def Info(self, msg): """Convenience function to log info referencing a node""" tag = "Info '%s'" % self._node.path - tout.Detail('%30s: %s' % (tag, msg)) + tout.detail('%30s: %s' % (tag, msg)) def Detail(self, msg): """Convenience function to log detail referencing a node""" tag = "Node '%s'" % self._node.path - tout.Detail('%30s: %s' % (tag, msg)) + tout.detail('%30s: %s' % (tag, msg)) def GetEntryArgsOrProps(self, props, required=False): """Return the values of a set of properties @@ -841,7 +841,7 @@ features to produce new behaviours. """ # Use True here so that we get an uncompressed section to work from, # although compressed sections are currently not supported - tout.Debug("ReadChildData section '%s', entry '%s'" % + tout.debug("ReadChildData section '%s', entry '%s'" % (self.section.GetPath(), self.GetPath())) data = self.section.ReadChildData(self, decomp, alt_format) return data @@ -1076,7 +1076,7 @@ features to produce new behaviours. Returns: True to use this entry type, False to use the original one """ - tout.Info("Node '%s': etype '%s': %s selected" % + tout.info("Node '%s': etype '%s': %s selected" % (node.path, etype, new_etype)) return True diff --git a/tools/binman/etype/fmap.py b/tools/binman/etype/fmap.py index 72b44a78693..0c576202a48 100644 --- a/tools/binman/etype/fmap.py +++ b/tools/binman/etype/fmap.py @@ -46,7 +46,7 @@ class Entry_fmap(Entry): """ def _AddEntries(areas, entry): entries = entry.GetEntries() - tout.Debug("fmap: Add entry '%s' type '%s' (%s subentries)" % + tout.debug("fmap: Add entry '%s' type '%s' (%s subentries)" % (entry.GetPath(), entry.etype, to_hex_size(entries))) if entries and entry.etype != 'cbfs': # Create an area for the section, which encompasses all entries diff --git a/tools/binman/etype/section.py b/tools/binman/etype/section.py index b3d73023949..706f6bd3e41 100644 --- a/tools/binman/etype/section.py +++ b/tools/binman/etype/section.py @@ -757,28 +757,28 @@ class Entry_section(Entry): return self._sort def ReadData(self, decomp=True, alt_format=None): - tout.Info("ReadData path='%s'" % self.GetPath()) + tout.info("ReadData path='%s'" % self.GetPath()) parent_data = self.section.ReadData(True, alt_format) offset = self.offset - self.section._skip_at_start data = parent_data[offset:offset + self.size] - tout.Info( + tout.info( '%s: Reading data from offset %#x-%#x (real %#x), size %#x, got %#x' % (self.GetPath(), self.offset, self.offset + self.size, offset, self.size, len(data))) return data def ReadChildData(self, child, decomp=True, alt_format=None): - tout.Debug(f"ReadChildData for child '{child.GetPath()}'") + tout.debug(f"ReadChildData for child '{child.GetPath()}'") parent_data = self.ReadData(True, alt_format) offset = child.offset - self._skip_at_start - tout.Debug("Extract for child '%s': offset %#x, skip_at_start %#x, result %#x" % + tout.debug("Extract for child '%s': offset %#x, skip_at_start %#x, result %#x" % (child.GetPath(), child.offset, self._skip_at_start, offset)) data = parent_data[offset:offset + child.size] if decomp: indata = data data = comp_util.decompress(indata, child.compress) if child.uncomp_size: - tout.Info("%s: Decompressing data size %#x with algo '%s' to data size %#x" % + tout.info("%s: Decompressing data size %#x with algo '%s' to data size %#x" % (child.GetPath(), len(indata), child.compress, len(data))) if alt_format: diff --git a/tools/binman/etype/u_boot_spl_expanded.py b/tools/binman/etype/u_boot_spl_expanded.py index 8e138e6a624..319f6708fe6 100644 --- a/tools/binman/etype/u_boot_spl_expanded.py +++ b/tools/binman/etype/u_boot_spl_expanded.py @@ -39,7 +39,7 @@ class Entry_u_boot_spl_expanded(Entry_blob_phase): @classmethod def UseExpanded(cls, node, etype, new_etype): val = state.GetEntryArgBool('spl-dtb') - tout.DoOutput(tout.INFO if val else tout.DETAIL, + tout.do_output(tout.INFO if val else tout.DETAIL, "Node '%s': etype '%s': %s %sselected" % (node.path, etype, new_etype, '' if val else 'not ')) return val diff --git a/tools/binman/etype/u_boot_tpl_expanded.py b/tools/binman/etype/u_boot_tpl_expanded.py index 15cdac46556..55fde3c8e66 100644 --- a/tools/binman/etype/u_boot_tpl_expanded.py +++ b/tools/binman/etype/u_boot_tpl_expanded.py @@ -39,7 +39,7 @@ class Entry_u_boot_tpl_expanded(Entry_blob_phase): @classmethod def UseExpanded(cls, node, etype, new_etype): val = state.GetEntryArgBool('tpl-dtb') - tout.DoOutput(tout.INFO if val else tout.DETAIL, + tout.do_output(tout.INFO if val else tout.DETAIL, "Node '%s': etype '%s': %s %sselected" % (node.path, etype, new_etype, '' if val else 'not ')) return val diff --git a/tools/binman/ftest.py b/tools/binman/ftest.py index f85581ccd42..123fdb15f78 100644 --- a/tools/binman/ftest.py +++ b/tools/binman/ftest.py @@ -240,7 +240,7 @@ class TestFunctional(unittest.TestCase): def setUp(self): # Enable this to turn on debugging output - # tout.Init(tout.DEBUG) + # tout.init(tout.DEBUG) command.test_result = None def tearDown(self): diff --git a/tools/binman/image.py b/tools/binman/image.py index 93a1d3e645b..afc4b4d6430 100644 --- a/tools/binman/image.py +++ b/tools/binman/image.py @@ -175,11 +175,11 @@ class Image(section.Entry_section): def BuildImage(self): """Write the image to a file""" fname = tools.get_output_filename(self._filename) - tout.Info("Writing image to '%s'" % fname) + tout.info("Writing image to '%s'" % fname) with open(fname, 'wb') as fd: data = self.GetPaddedData() fd.write(data) - tout.Info("Wrote %#x bytes" % len(data)) + tout.info("Wrote %#x bytes" % len(data)) def WriteMap(self): """Write a map of the image to a .map file @@ -230,7 +230,7 @@ class Image(section.Entry_section): return entry def ReadData(self, decomp=True, alt_format=None): - tout.Debug("Image '%s' ReadData(), size=%#x" % + tout.debug("Image '%s' ReadData(), size=%#x" % (self.GetPath(), len(self._data))) return self._data diff --git a/tools/binman/state.py b/tools/binman/state.py index b27c800126c..8cd8a483182 100644 --- a/tools/binman/state.py +++ b/tools/binman/state.py @@ -170,16 +170,16 @@ def SetEntryArgs(args): global entry_args entry_args = {} - tout.Debug('Processing entry args:') + tout.debug('Processing entry args:') if args: for arg in args: m = re.match('([^=]*)=(.*)', arg) if not m: raise ValueError("Invalid entry arguemnt '%s'" % arg) name, value = m.groups() - tout.Debug(' %20s = %s' % (name, value)) + tout.debug(' %20s = %s' % (name, value)) entry_args[name] = value - tout.Debug('Processing entry args done') + tout.debug('Processing entry args done') def GetEntryArg(name): """Get the value of an entry argument @@ -263,16 +263,16 @@ def PrepareFromLoadedData(image): """ global output_fdt_info, main_dtb, fdt_path_prefix - tout.Info('Preparing device trees') + tout.info('Preparing device trees') output_fdt_info.clear() fdt_path_prefix = '' output_fdt_info['fdtmap'] = [image.fdtmap_dtb, 'u-boot.dtb'] main_dtb = None - tout.Info(" Found device tree type 'fdtmap' '%s'" % image.fdtmap_dtb.name) + tout.info(" Found device tree type 'fdtmap' '%s'" % image.fdtmap_dtb.name) for etype, value in image.GetFdts().items(): entry, fname = value out_fname = tools.get_output_filename('%s.dtb' % entry.etype) - tout.Info(" Found device tree type '%s' at '%s' path '%s'" % + tout.info(" Found device tree type '%s' at '%s' path '%s'" % (etype, out_fname, entry.GetPath())) entry._filename = entry.GetDefaultFilename() data = entry.ReadData() @@ -285,7 +285,7 @@ def PrepareFromLoadedData(image): image_node = dtb.GetNode('/binman/%s' % image.image_node) fdt_path_prefix = image_node.path output_fdt_info[etype] = [dtb, None] - tout.Info(" FDT path prefix '%s'" % fdt_path_prefix) + tout.info(" FDT path prefix '%s'" % fdt_path_prefix) def GetAllFdts(): @@ -384,7 +384,7 @@ def SetInt(node, prop, value, for_repack=False): for_repack: True is this property is only needed for repacking """ for n in GetUpdateNodes(node, for_repack): - tout.Detail("File %s: Update node '%s' prop '%s' to %#x" % + tout.detail("File %s: Update node '%s' prop '%s' to %#x" % (n.GetFdt().name, n.path, prop, value)) n.SetInt(prop, value) diff --git a/tools/patman/status.py b/tools/patman/status.py index f3fbc661b2f..ece6b159d20 100644 --- a/tools/patman/status.py +++ b/tools/patman/status.py @@ -245,7 +245,7 @@ def collect_patches(series, series_id, url, rest_api=call_rest_api): count = len(patch_dict) num_commits = len(series.commits) if count != num_commits: - tout.Warning('Warning: Patchwork reports %d patches, series has %d' % + tout.warning('Warning: Patchwork reports %d patches, series has %d' % (count, num_commits)) patches = [] @@ -257,7 +257,7 @@ def collect_patches(series, series_id, url, rest_api=call_rest_api): patch.parse_subject(pw_patch['name']) patches.append(patch) if warn_count > 1: - tout.Warning(' (total of %d warnings)' % warn_count) + tout.warning(' (total of %d warnings)' % warn_count) # Sort patches by patch number patches = sorted(patches, key=lambda x: x.seq) @@ -437,7 +437,7 @@ def check_patchwork_status(series, series_id, branch, dest_branch, force, patch_for_commit, _, warnings = compare_with_series(series, patches) for warn in warnings: - tout.Warning(warn) + tout.warning(warn) patch_list = [patch_for_commit.get(c) for c in range(len(series.commits))] diff --git a/tools/patman/tools.py b/tools/patman/tools.py index 35fade0f72c..5e4d4ac05cf 100644 --- a/tools/patman/tools.py +++ b/tools/patman/tools.py @@ -64,16 +64,16 @@ def prepare_output_dir(dirname, preserve=False): except OSError as err: raise CmdError("Cannot make output directory '%s': '%s'" % (outdir, err.strerror)) - tout.Debug("Using output directory '%s'" % outdir) + tout.debug("Using output directory '%s'" % outdir) else: outdir = tempfile.mkdtemp(prefix='binman.') - tout.Debug("Using temporary directory '%s'" % outdir) + tout.debug("Using temporary directory '%s'" % outdir) def _remove_output_dir(): global outdir shutil.rmtree(outdir) - tout.Debug("Deleted temporary directory '%s'" % outdir) + tout.debug("Deleted temporary directory '%s'" % outdir) outdir = None def finalise_output_dir(): @@ -121,7 +121,7 @@ def set_input_dirs(dirname): global indir indir = dirname - tout.Debug("Using input directories %s" % indir) + tout.debug("Using input directories %s" % indir) def get_input_filename(fname, allow_missing=False): """Return a filename for use as input. diff --git a/tools/patman/tout.py b/tools/patman/tout.py index 33305263d8e..7eb555aaaea 100644 --- a/tools/patman/tout.py +++ b/tools/patman/tout.py @@ -30,10 +30,10 @@ def __enter__(): def __exit__(unused1, unused2, unused3): """Clean up and remove any progress message.""" - ClearProgress() + clear_progress() return False -def UserIsPresent(): +def user_is_present(): """This returns True if it is likely that a user is present. Sometimes we want to prompt the user, but if no one is there then this @@ -44,7 +44,7 @@ def UserIsPresent(): """ return stdout_is_tty and verbose > 0 -def ClearProgress(): +def clear_progress(): """Clear any active progress message on the terminal.""" global in_progress if verbose > 0 and stdout_is_tty and in_progress: @@ -52,14 +52,14 @@ def ClearProgress(): _stdout.flush() in_progress = False -def Progress(msg, warning=False, trailer='...'): +def progress(msg, warning=False, trailer='...'): """Display progress information. Args: msg: Message to display. warning: True if this is a warning.""" global in_progress - ClearProgress() + clear_progress() if verbose > 0: _progress = msg + trailer if stdout_is_tty: @@ -70,7 +70,7 @@ def Progress(msg, warning=False, trailer='...'): else: _stdout.write(_progress + '\n') -def _Output(level, msg, color=None): +def _output(level, msg, color=None): """Output a message to the terminal. Args: @@ -80,7 +80,7 @@ def _Output(level, msg, color=None): error: True if this is an error message, else False. """ if verbose >= level: - ClearProgress() + clear_progress() if color: msg = _color.Color(color, msg) if level < NOTICE: @@ -88,7 +88,7 @@ def _Output(level, msg, color=None): else: print(msg) -def DoOutput(level, msg): +def do_output(level, msg): """Output a message to the terminal. Args: @@ -96,66 +96,66 @@ def DoOutput(level, msg): this as high as the currently selected level. msg; Message to display. """ - _Output(level, msg) + _output(level, msg) -def Error(msg): +def error(msg): """Display an error message Args: msg; Message to display. """ - _Output(ERROR, msg, _color.RED) + _output(ERROR, msg, _color.RED) -def Warning(msg): +def warning(msg): """Display a warning message Args: msg; Message to display. """ - _Output(WARNING, msg, _color.YELLOW) + _output(WARNING, msg, _color.YELLOW) -def Notice(msg): +def notice(msg): """Display an important infomation message Args: msg; Message to display. """ - _Output(NOTICE, msg) + _output(NOTICE, msg) -def Info(msg): +def info(msg): """Display an infomation message Args: msg; Message to display. """ - _Output(INFO, msg) + _output(INFO, msg) -def Detail(msg): +def detail(msg): """Display a detailed message Args: msg; Message to display. """ - _Output(DETAIL, msg) + _output(DETAIL, msg) -def Debug(msg): +def debug(msg): """Display a debug message Args: msg; Message to display. """ - _Output(DEBUG, msg) + _output(DEBUG, msg) -def UserOutput(msg): +def user_output(msg): """Display a message regardless of the current output level. This is used when the output was specifically requested by the user. Args: msg; Message to display. """ - _Output(0, msg) + _output(0, msg) -def Init(_verbose=WARNING, stdout=sys.stdout): +def init(_verbose=WARNING, stdout=sys.stdout): """Initialize a new output object. Args: @@ -173,7 +173,7 @@ def Init(_verbose=WARNING, stdout=sys.stdout): stdout_is_tty = hasattr(sys.stdout, 'isatty') and sys.stdout.isatty() stderr_is_tty = hasattr(sys.stderr, 'isatty') and sys.stderr.isatty() -def Uninit(): - ClearProgress() +def uninit(): + clear_progress() -Init() +init() From 82e0e732ee2cf6d0e125aeb7ed7de69711f35ec8 Mon Sep 17 00:00:00 2001 From: Simon Glass Date: Sat, 29 Jan 2022 14:14:16 -0700 Subject: [PATCH 16/27] patman: Rename Print() to Tprint() Rename this function so that when we convert it to snake case it will not conflict with the built-in print() function. Signed-off-by: Simon Glass --- tools/buildman/builder.py | 56 +++++++++++++++++++-------------------- tools/buildman/control.py | 4 +-- tools/patman/main.py | 2 +- tools/patman/status.py | 16 +++++------ tools/patman/terminal.py | 4 +-- 5 files changed, 41 insertions(+), 41 deletions(-) diff --git a/tools/buildman/builder.py b/tools/buildman/builder.py index 502ac7b274c..8c7fc725de2 100644 --- a/tools/buildman/builder.py +++ b/tools/buildman/builder.py @@ -22,7 +22,7 @@ from buildman import toolchain from patman import command from patman import gitutil from patman import terminal -from patman.terminal import Print +from patman.terminal import Tprint # This indicates an new int or hex Kconfig property with no default # It hangs the build since the 'conf' tool cannot proceed without valid input. @@ -536,7 +536,7 @@ class Builder: line += target terminal.PrintClear() - Print(line, newline=False, limit_to_line=True) + Tprint(line, newline=False, limit_to_line=True) def _GetOutputDir(self, commit_upto): """Get the name of the output directory for a commit number @@ -666,7 +666,7 @@ class Builder: if line.strip(): size, type, name = line[:-1].split() except: - Print("Invalid line in file '%s': '%s'" % (fname, line[:-1])) + Tprint("Invalid line in file '%s': '%s'" % (fname, line[:-1])) continue if type in 'tTdDbB': # function names begin with '.' on 64-bit powerpc @@ -1009,16 +1009,16 @@ class Builder: return args = [self.ColourNum(x) for x in args] indent = ' ' * 15 - Print('%s%s: add: %s/%s, grow: %s/%s bytes: %s/%s (%s)' % + Tprint('%s%s: add: %s/%s, grow: %s/%s bytes: %s/%s (%s)' % tuple([indent, self.col.Color(self.col.YELLOW, fname)] + args)) - Print('%s %-38s %7s %7s %+7s' % (indent, 'function', 'old', 'new', + Tprint('%s %-38s %7s %7s %+7s' % (indent, 'function', 'old', 'new', 'delta')) for diff, name in delta: if diff: color = self.col.RED if diff > 0 else self.col.GREEN msg = '%s %-38s %7s %7s %+7d' % (indent, name, old.get(name, '-'), new.get(name,'-'), diff) - Print(msg, colour=color) + Tprint(msg, colour=color) def PrintSizeDetail(self, target_list, show_bloat): @@ -1043,12 +1043,12 @@ class Builder: color = self.col.RED if diff > 0 else self.col.GREEN msg = ' %s %+d' % (name, diff) if not printed_target: - Print('%10s %-15s:' % ('', result['_target']), + Tprint('%10s %-15s:' % ('', result['_target']), newline=False) printed_target = True - Print(msg, colour=color, newline=False) + Tprint(msg, colour=color, newline=False) if printed_target: - Print() + Tprint() if show_bloat: target = result['_target'] outcome = result['_outcome'] @@ -1153,13 +1153,13 @@ class Builder: color = self.col.RED if avg_diff > 0 else self.col.GREEN msg = ' %s %+1.1f' % (name, avg_diff) if not printed_arch: - Print('%10s: (for %d/%d boards)' % (arch, count, + Tprint('%10s: (for %d/%d boards)' % (arch, count, arch_count[arch]), newline=False) printed_arch = True - Print(msg, colour=color, newline=False) + Tprint(msg, colour=color, newline=False) if printed_arch: - Print() + Tprint() if show_detail: self.PrintSizeDetail(target_list, show_bloat) @@ -1304,7 +1304,7 @@ class Builder: col = self.col.RED elif line[0] == 'c': col = self.col.YELLOW - Print(' ' + line, newline=True, colour=col) + Tprint(' ' + line, newline=True, colour=col) def _OutputErrLines(err_lines, colour): """Output the line of error/warning lines, if not empty @@ -1331,7 +1331,7 @@ class Builder: else: out = self.col.Color(colour, line.char + line.errline) out_list.append(out) - Print('\n'.join(out_list)) + Tprint('\n'.join(out_list)) self._error_lines += 1 @@ -1385,7 +1385,7 @@ class Builder: self.AddOutcome(board_selected, arch_list, unknown_boards, '?', self.col.MAGENTA) for arch, target_list in arch_list.items(): - Print('%10s: %s' % (arch, target_list)) + Tprint('%10s: %s' % (arch, target_list)) self._error_lines += 1 _OutputErrLines(better_err, colour=self.col.GREEN) _OutputErrLines(worse_err, colour=self.col.RED) @@ -1515,13 +1515,13 @@ class Builder: _AddConfig(lines, 'all', all_plus, all_minus, all_change) #arch_summary[target] = '\n'.join(lines) if lines: - Print('%s:' % arch) + Tprint('%s:' % arch) _OutputConfigInfo(lines) for lines, targets in lines_by_target.items(): if not lines: continue - Print('%s :' % ' '.join(sorted(targets))) + Tprint('%s :' % ' '.join(sorted(targets))) _OutputConfigInfo(lines.split('\n')) @@ -1540,7 +1540,7 @@ class Builder: if not board in board_dict: not_built.append(board) if not_built: - Print("Boards not built (%d): %s" % (len(not_built), + Tprint("Boards not built (%d): %s" % (len(not_built), ', '.join(not_built))) def ProduceResultSummary(self, commit_upto, commits, board_selected): @@ -1553,7 +1553,7 @@ class Builder: if commits: msg = '%02d: %s' % (commit_upto + 1, commits[commit_upto].subject) - Print(msg, colour=self.col.BLUE) + Tprint(msg, colour=self.col.BLUE) self.PrintResultSummary(board_selected, board_dict, err_lines if self._show_errors else [], err_line_boards, warn_lines if self._show_errors else [], warn_line_boards, @@ -1578,7 +1578,7 @@ class Builder: for commit_upto in range(0, self.commit_count, self._step): self.ProduceResultSummary(commit_upto, commits, board_selected) if not self._error_lines: - Print('(no errors to report)', colour=self.col.GREEN) + Tprint('(no errors to report)', colour=self.col.GREEN) def SetupBuild(self, board_selected, commits): @@ -1629,7 +1629,7 @@ class Builder: if os.path.isdir(git_dir): # This is a clone of the src_dir repo, we can keep using # it but need to fetch from src_dir. - Print('\rFetching repo for thread %d' % thread_num, + Tprint('\rFetching repo for thread %d' % thread_num, newline=False) gitutil.fetch(git_dir, thread_dir) terminal.PrintClear() @@ -1643,12 +1643,12 @@ class Builder: raise ValueError('Git dir %s exists, but is not a file ' 'or a directory.' % git_dir) elif setup_git == 'worktree': - Print('\rChecking out worktree for thread %d' % thread_num, + Tprint('\rChecking out worktree for thread %d' % thread_num, newline=False) gitutil.add_worktree(src_dir, thread_dir) terminal.PrintClear() elif setup_git == 'clone' or setup_git == True: - Print('\rCloning repo for thread %d' % thread_num, + Tprint('\rCloning repo for thread %d' % thread_num, newline=False) gitutil.clone(src_dir, thread_dir) terminal.PrintClear() @@ -1717,7 +1717,7 @@ class Builder: """ to_remove = self._GetOutputSpaceRemovals() if to_remove: - Print('Removing %d old build directories...' % len(to_remove), + Tprint('Removing %d old build directories...' % len(to_remove), newline=False) for dirname in to_remove: shutil.rmtree(dirname) @@ -1747,7 +1747,7 @@ class Builder: self._PrepareWorkingSpace(min(self.num_threads, len(board_selected)), commits is not None) self._PrepareOutputSpace() - Print('\rStarting build...', newline=False) + Tprint('\rStarting build...', newline=False) self.SetupBuild(board_selected, commits) self.ProcessResult(None) self.thread_exceptions = [] @@ -1774,7 +1774,7 @@ class Builder: # Wait until we have processed all output self.out_queue.join() - Print() + Tprint() msg = 'Completed: %d total built' % self.count if self.already_done: @@ -1789,9 +1789,9 @@ class Builder: duration = duration - timedelta(microseconds=duration.microseconds) rate = float(self.count) / duration.total_seconds() msg += ', duration %s, rate %1.2f' % (duration, rate) - Print(msg) + Tprint(msg) if self.thread_exceptions: - Print('Failed: %d thread exceptions' % len(self.thread_exceptions), + Tprint('Failed: %d thread exceptions' % len(self.thread_exceptions), colour=self.col.RED) return (self.fail, self.warned, self.thread_exceptions) diff --git a/tools/buildman/control.py b/tools/buildman/control.py index 2c25fd70668..0e4b2e0a9dc 100644 --- a/tools/buildman/control.py +++ b/tools/buildman/control.py @@ -18,7 +18,7 @@ from patman import gitutil from patman import patchstream from patman import terminal from patman import tools -from patman.terminal import Print +from patman.terminal import Tprint def GetPlural(count): """Returns a plural 's' if count is not 1""" @@ -362,7 +362,7 @@ def DoBuildman(options, args, toolchains=None, make_func=None, boards=None, else: commits = None - Print(GetActionSummary(options.summary, commits, board_selected, + Tprint(GetActionSummary(options.summary, commits, board_selected, options)) # We can't show function sizes without board details at present diff --git a/tools/patman/main.py b/tools/patman/main.py index 7a6e910f4b3..d32eae4bfca 100755 --- a/tools/patman/main.py +++ b/tools/patman/main.py @@ -177,7 +177,7 @@ elif args.cmd == 'status': args.dest_branch, args.force, args.show_comments, args.patchwork_url) except Exception as e: - terminal.Print('patman: %s: %s' % (type(e).__name__, e), + terminal.Tprint('patman: %s: %s' % (type(e).__name__, e), colour=terminal.Color.RED) if args.debug: print() diff --git a/tools/patman/status.py b/tools/patman/status.py index ece6b159d20..fbed055d347 100644 --- a/tools/patman/status.py +++ b/tools/patman/status.py @@ -338,9 +338,9 @@ def show_responses(rtags, indent, is_new): for tag in sorted(rtags.keys()): people = rtags[tag] for who in sorted(people): - terminal.Print(indent + '%s %s: ' % ('+' if is_new else ' ', tag), + terminal.Tprint(indent + '%s %s: ' % ('+' if is_new else ' ', tag), newline=False, colour=col.GREEN, bright=is_new) - terminal.Print(who, colour=col.WHITE, bright=is_new) + terminal.Tprint(who, colour=col.WHITE, bright=is_new) count += 1 return count @@ -455,7 +455,7 @@ def check_patchwork_status(series, series_id, branch, dest_branch, force, patch = patch_for_commit.get(seq) if not patch: continue - terminal.Print('%3d %s' % (patch.seq, patch.subject[:50]), + terminal.Tprint('%3d %s' % (patch.seq, patch.subject[:50]), colour=col.BLUE) cmt = series.commits[seq] base_rtags = cmt.rtags @@ -466,15 +466,15 @@ def check_patchwork_status(series, series_id, branch, dest_branch, force, num_to_add += show_responses(new_rtags, indent, True) if show_comments: for review in review_list[seq]: - terminal.Print('Review: %s' % review.meta, colour=col.RED) + terminal.Tprint('Review: %s' % review.meta, colour=col.RED) for snippet in review.snippets: for line in snippet: quoted = line.startswith('>') - terminal.Print(' %s' % line, + terminal.Tprint(' %s' % line, colour=col.MAGENTA if quoted else None) - terminal.Print() + terminal.Tprint() - terminal.Print("%d new response%s available in patchwork%s" % + terminal.Tprint("%d new response%s available in patchwork%s" % (num_to_add, 's' if num_to_add != 1 else '', '' if dest_branch else ' (use -d to write them to a new branch)')) @@ -482,6 +482,6 @@ def check_patchwork_status(series, series_id, branch, dest_branch, force, if dest_branch: num_added = create_branch(series, new_rtag_list, branch, dest_branch, force, test_repo) - terminal.Print( + terminal.Tprint( "%d response%s added from patchwork into new branch '%s'" % (num_added, 's' if num_added != 1 else '', dest_branch)) diff --git a/tools/patman/terminal.py b/tools/patman/terminal.py index 9be03b3a6fd..f76d2b17772 100644 --- a/tools/patman/terminal.py +++ b/tools/patman/terminal.py @@ -130,7 +130,7 @@ def TrimAsciiLen(text, size): return out -def Print(text='', newline=True, colour=None, limit_to_line=False, bright=True): +def Tprint(text='', newline=True, colour=None, limit_to_line=False, bright=True): """Handle a line of output to the terminal. In test mode this is recorded in a list. Otherwise it is output to the @@ -175,7 +175,7 @@ def SetPrintTestMode(enable=True): GetPrintTestLines() def GetPrintTestLines(): - """Get a list of all lines output through Print() + """Get a list of all lines output through Tprint() Returns: A list of PrintLine objects From 252ac589969acbc4c17379a4e862a18e1518d12d Mon Sep 17 00:00:00 2001 From: Simon Glass Date: Sat, 29 Jan 2022 14:14:17 -0700 Subject: [PATCH 17/27] patman: Rename Color() method to build() This method has the same name as its class which is confusing. It is also annoying when searching the code. It builds a string with a colour, so rename it to build(). Signed-off-by: Simon Glass --- tools/binman/bintool.py | 8 ++++---- tools/buildman/builder.py | 24 ++++++++++++------------ tools/buildman/control.py | 28 ++++++++++++++-------------- tools/buildman/test.py | 20 ++++++++++---------- tools/buildman/toolchain.py | 10 +++++----- tools/patman/checkpatch.py | 10 +++++----- tools/patman/control.py | 6 +++--- tools/patman/gitutil.py | 6 +++--- tools/patman/series.py | 12 ++++++------ tools/patman/terminal.py | 14 +++++++------- tools/patman/tout.py | 4 ++-- 11 files changed, 71 insertions(+), 71 deletions(-) diff --git a/tools/binman/bintool.py b/tools/binman/bintool.py index 4bc3cfebce5..8435b29749b 100644 --- a/tools/binman/bintool.py +++ b/tools/binman/bintool.py @@ -174,7 +174,7 @@ class Bintool: res = self.fetch(meth) except urllib.error.URLError as uerr: message = uerr.reason - print(col.Color(col.RED, f'- {message}')) + print(col.build(col.RED, f'- {message}')) except ValueError as exc: print(f'Exception: {exc}') @@ -182,7 +182,7 @@ class Bintool: if skip_present and self.is_present(): return PRESENT - print(col.Color(col.YELLOW, 'Fetch: %s' % self.name)) + print(col.build(col.YELLOW, 'Fetch: %s' % self.name)) if method == FETCH_ANY: for try_method in range(1, FETCH_COUNT): print(f'- trying method: {FETCH_NAMES[try_method]}') @@ -216,7 +216,7 @@ class Bintool: True on success, False on failure """ def show_status(color, prompt, names): - print(col.Color( + print(col.build( color, f'{prompt}:%s{len(names):2}: %s' % (' ' * (16 - len(prompt)), ' '.join(names)))) @@ -227,7 +227,7 @@ class Bintool: name_list = Bintool.get_tool_list() if names_to_fetch[0] == 'missing': skip_present = True - print(col.Color(col.YELLOW, + print(col.build(col.YELLOW, 'Fetching tools: %s' % ' '.join(name_list))) status = collections.defaultdict(list) for name in name_list: diff --git a/tools/buildman/builder.py b/tools/buildman/builder.py index 8c7fc725de2..364adb1cb5f 100644 --- a/tools/buildman/builder.py +++ b/tools/buildman/builder.py @@ -518,14 +518,14 @@ class Builder: # Display separate counts for ok, warned and fail ok = self.upto - self.warned - self.fail - line = '\r' + self.col.Color(self.col.GREEN, '%5d' % ok) - line += self.col.Color(self.col.YELLOW, '%5d' % self.warned) - line += self.col.Color(self.col.RED, '%5d' % self.fail) + line = '\r' + self.col.build(self.col.GREEN, '%5d' % ok) + line += self.col.build(self.col.YELLOW, '%5d' % self.warned) + line += self.col.build(self.col.RED, '%5d' % self.fail) line += ' /%-5d ' % self.count remaining = self.count - self.upto if remaining: - line += self.col.Color(self.col.MAGENTA, ' -%-5d ' % remaining) + line += self.col.build(self.col.MAGENTA, ' -%-5d ' % remaining) else: line += ' ' * 8 @@ -933,9 +933,9 @@ class Builder: arch = board_dict[target].arch else: arch = 'unknown' - str = self.col.Color(color, ' ' + target) + str = self.col.build(color, ' ' + target) if not arch in done_arch: - str = ' %s %s' % (self.col.Color(color, char), str) + str = ' %s %s' % (self.col.build(color, char), str) done_arch[arch] = True if not arch in arch_list: arch_list[arch] = str @@ -947,7 +947,7 @@ class Builder: color = self.col.RED if num > 0 else self.col.GREEN if num == 0: return '0' - return self.col.Color(color, str(num)) + return self.col.build(color, str(num)) def ResetResultSummary(self, board_selected): """Reset the results summary ready for use. @@ -1010,7 +1010,7 @@ class Builder: args = [self.ColourNum(x) for x in args] indent = ' ' * 15 Tprint('%s%s: add: %s/%s, grow: %s/%s bytes: %s/%s (%s)' % - tuple([indent, self.col.Color(self.col.YELLOW, fname)] + args)) + tuple([indent, self.col.build(self.col.YELLOW, fname)] + args)) Tprint('%s %-38s %7s %7s %+7s' % (indent, 'function', 'old', 'new', 'delta')) for diff, name in delta: @@ -1324,12 +1324,12 @@ class Builder: names = [board.target for board in line.boards] board_str = ' '.join(names) if names else '' if board_str: - out = self.col.Color(colour, line.char + '(') - out += self.col.Color(self.col.MAGENTA, board_str, + out = self.col.build(colour, line.char + '(') + out += self.col.build(self.col.MAGENTA, board_str, bright=False) - out += self.col.Color(colour, ') %s' % line.errline) + out += self.col.build(colour, ') %s' % line.errline) else: - out = self.col.Color(colour, line.char + line.errline) + out = self.col.build(colour, line.char + line.errline) out_list.append(out) Tprint('\n'.join(out_list)) self._error_lines += 1 diff --git a/tools/buildman/control.py b/tools/buildman/control.py index 0e4b2e0a9dc..195d27a044c 100644 --- a/tools/buildman/control.py +++ b/tools/buildman/control.py @@ -73,7 +73,7 @@ def ShowActions(series, why_selected, boards_selected, builder, options, if commits: for upto in range(0, len(series.commits), options.step): commit = series.commits[upto] - print(' ', col.Color(col.YELLOW, commit.hash[:8], bright=False), end=' ') + print(' ', col.build(col.YELLOW, commit.hash[:8], bright=False), end=' ') print(commit.subject) print() for arg in why_selected: @@ -85,7 +85,7 @@ def ShowActions(series, why_selected, boards_selected, builder, options, len(why_selected['all']))) if board_warnings: for warning in board_warnings: - print(col.Color(col.YELLOW, warning)) + print(col.build(col.YELLOW, warning)) def ShowToolchainPrefix(boards, toolchains): """Show information about a the tool chain used by one or more boards @@ -152,14 +152,14 @@ def DoBuildman(options, args, toolchains=None, make_func=None, boards=None, if options.fetch_arch: if options.fetch_arch == 'list': sorted_list = toolchains.ListArchs() - print(col.Color(col.BLUE, 'Available architectures: %s\n' % + print(col.build(col.BLUE, 'Available architectures: %s\n' % ' '.join(sorted_list))) return 0 else: fetch_arch = options.fetch_arch if fetch_arch == 'all': fetch_arch = ','.join(toolchains.ListArchs()) - print(col.Color(col.CYAN, '\nDownloading toolchains: %s' % + print(col.build(col.CYAN, '\nDownloading toolchains: %s' % fetch_arch)) for arch in fetch_arch.split(','): print() @@ -177,11 +177,11 @@ def DoBuildman(options, args, toolchains=None, make_func=None, boards=None, return 0 if options.incremental: - print(col.Color(col.RED, + print(col.build(col.RED, 'Warning: -I has been removed. See documentation')) if not options.output_dir: if options.work_in_output: - sys.exit(col.Color(col.RED, '-w requires that you specify -o')) + sys.exit(col.build(col.RED, '-w requires that you specify -o')) options.output_dir = '..' # Work out what subset of the boards we are building @@ -218,12 +218,12 @@ def DoBuildman(options, args, toolchains=None, make_func=None, boards=None, requested_boards) selected = boards.GetSelected() if not len(selected): - sys.exit(col.Color(col.RED, 'No matching boards found')) + sys.exit(col.build(col.RED, 'No matching boards found')) if options.print_prefix: err = ShowToolchainPrefix(boards, toolchains) if err: - sys.exit(col.Color(col.RED, err)) + sys.exit(col.build(col.RED, err)) return 0 # Work out how many commits to build. We want to build everything on the @@ -242,24 +242,24 @@ def DoBuildman(options, args, toolchains=None, make_func=None, boards=None, count, msg = gitutil.count_commits_in_branch(options.git_dir, options.branch) if count is None: - sys.exit(col.Color(col.RED, msg)) + sys.exit(col.build(col.RED, msg)) elif count == 0: - sys.exit(col.Color(col.RED, "Range '%s' has no commits" % + sys.exit(col.build(col.RED, "Range '%s' has no commits" % options.branch)) if msg: - print(col.Color(col.YELLOW, msg)) + print(col.build(col.YELLOW, msg)) count += 1 # Build upstream commit also if not count: str = ("No commits found to process in branch '%s': " "set branch's upstream or use -c flag" % options.branch) - sys.exit(col.Color(col.RED, str)) + sys.exit(col.build(col.RED, str)) if options.work_in_output: if len(selected) != 1: - sys.exit(col.Color(col.RED, + sys.exit(col.build(col.RED, '-w can only be used with a single board')) if count != 1: - sys.exit(col.Color(col.RED, + sys.exit(col.build(col.RED, '-w can only be used with a single commit')) # Read the metadata from the commits. First look at the upstream commit, diff --git a/tools/buildman/test.py b/tools/buildman/test.py index 43b012171da..4b4a0349e81 100644 --- a/tools/buildman/test.py +++ b/tools/buildman/test.py @@ -182,10 +182,10 @@ class TestBuild(unittest.TestCase): col.YELLOW if outcome == OUTCOME_WARN else col.RED) expect = '%10s: ' % arch # TODO(sjg@chromium.org): If plus is '', we shouldn't need this - expect += ' ' + col.Color(expected_colour, plus) + expect += ' ' + col.build(expected_colour, plus) expect += ' ' for board in boards: - expect += col.Color(expected_colour, ' %s' % board) + expect += col.build(expected_colour, ' %s' % board) self.assertEqual(text, expect) def _SetupTest(self, echo_lines=False, threads=1, **kwdisplay_args): @@ -254,12 +254,12 @@ class TestBuild(unittest.TestCase): new_lines = [] for line in lines: if boards: - expect = self._col.Color(colour, prefix + '(') - expect += self._col.Color(self._col.MAGENTA, boards, + expect = self._col.build(colour, prefix + '(') + expect += self._col.build(self._col.MAGENTA, boards, bright=False) - expect += self._col.Color(colour, ') %s' % line) + expect += self._col.build(colour, ') %s' % line) else: - expect = self._col.Color(colour, prefix + line) + expect = self._col.build(colour, prefix + line) new_lines.append(expect) return '\n'.join(new_lines) @@ -317,12 +317,12 @@ class TestBuild(unittest.TestCase): self.assertEqual(next(lines).text, '04: %s' % commits[3][1]) if filter_migration_warnings: expect = '%10s: ' % 'powerpc' - expect += ' ' + col.Color(col.GREEN, '') + expect += ' ' + col.build(col.GREEN, '') expect += ' ' - expect += col.Color(col.GREEN, ' %s' % 'board2') - expect += ' ' + col.Color(col.YELLOW, 'w+') + expect += col.build(col.GREEN, ' %s' % 'board2') + expect += ' ' + col.build(col.YELLOW, 'w+') expect += ' ' - expect += col.Color(col.YELLOW, ' %s' % 'board3') + expect += col.build(col.YELLOW, ' %s' % 'board3') self.assertEqual(next(lines).text, expect) else: self.assertSummary(next(lines).text, 'powerpc', 'w+', diff --git a/tools/buildman/toolchain.py b/tools/buildman/toolchain.py index 3442d998abf..46a4e5ed409 100644 --- a/tools/buildman/toolchain.py +++ b/tools/buildman/toolchain.py @@ -381,7 +381,7 @@ class Toolchains: def List(self): """List out the selected toolchains for each architecture""" col = terminal.Color() - print(col.Color(col.BLUE, 'List of available toolchains (%d):' % + print(col.build(col.BLUE, 'List of available toolchains (%d):' % len(self.toolchains))) if len(self.toolchains): for key, value in sorted(self.toolchains.items()): @@ -559,7 +559,7 @@ class Toolchains: """ # Fist get the URL for this architecture col = terminal.Color() - print(col.Color(col.BLUE, "Downloading toolchain for arch '%s'" % arch)) + print(col.build(col.BLUE, "Downloading toolchain for arch '%s'" % arch)) url = self.LocateArchUrl(arch) if not url: print(("Cannot find toolchain for arch '%s' - use 'list' to list" % @@ -574,7 +574,7 @@ class Toolchains: tarfile, tmpdir = tools.download(url, '.buildman') if not tarfile: return 1 - print(col.Color(col.GREEN, 'Unpacking to: %s' % dest), end=' ') + print(col.build(col.GREEN, 'Unpacking to: %s' % dest), end=' ') sys.stdout.flush() path = self.Unpack(tarfile, dest) os.remove(tarfile) @@ -582,14 +582,14 @@ class Toolchains: print() # Check that the toolchain works - print(col.Color(col.GREEN, 'Testing')) + print(col.build(col.GREEN, 'Testing')) dirpath = os.path.join(dest, path) compiler_fname_list = self.ScanPath(dirpath, True) if not compiler_fname_list: print('Could not locate C compiler - fetch failed.') return 1 if len(compiler_fname_list) != 1: - print(col.Color(col.RED, 'Warning, ambiguous toolchains: %s' % + print(col.build(col.RED, 'Warning, ambiguous toolchains: %s' % ', '.join(compiler_fname_list))) toolchain = Toolchain(compiler_fname_list[0], True, True) diff --git a/tools/patman/checkpatch.py b/tools/patman/checkpatch.py index 043419089a8..dd792efee0b 100644 --- a/tools/patman/checkpatch.py +++ b/tools/patman/checkpatch.py @@ -228,11 +228,11 @@ def get_warning_msg(col, msg_type, fname, line, msg): msg: Message to report ''' if msg_type == 'warning': - msg_type = col.Color(col.YELLOW, msg_type) + msg_type = col.build(col.YELLOW, msg_type) elif msg_type == 'error': - msg_type = col.Color(col.RED, msg_type) + msg_type = col.build(col.RED, msg_type) elif msg_type == 'check': - msg_type = col.Color(col.MAGENTA, msg_type) + msg_type = col.build(col.MAGENTA, msg_type) line_str = '' if line is None else '%d' % line return '%s:%s: %s: %s\n' % (fname, line_str, msg_type, msg) @@ -248,7 +248,7 @@ def check_patches(verbose, args): warning_count += result.warnings check_count += result.checks print('%d errors, %d warnings, %d checks for %s:' % (result.errors, - result.warnings, result.checks, col.Color(col.BLUE, fname))) + result.warnings, result.checks, col.build(col.BLUE, fname))) if (len(result.problems) != result.errors + result.warnings + result.checks): print("Internal error: some problems lost") @@ -266,6 +266,6 @@ def check_patches(verbose, args): color = col.YELLOW if error_count: color = col.RED - print(col.Color(color, str % (error_count, warning_count, check_count))) + print(col.build(color, str % (error_count, warning_count, check_count))) return False return True diff --git a/tools/patman/control.py b/tools/patman/control.py index cea4f3e770a..b40382388e0 100644 --- a/tools/patman/control.py +++ b/tools/patman/control.py @@ -50,7 +50,7 @@ def prepare_patches(col, branch, count, start, end, ignore_binary, signoff): if not count: str = 'No commits found to process - please use -c flag, or run:\n' \ ' git branch --set-upstream-to remote/branch' - sys.exit(col.Color(col.RED, str)) + sys.exit(col.build(col.RED, str)) # Read the metadata from the commits to_do = count - end @@ -143,13 +143,13 @@ def email_patches(col, series, cover_fname, patch_files, process_tags, its_a_go, cc_file, in_reply_to=in_reply_to, thread=thread, smtp_server=smtp_server) else: - print(col.Color(col.RED, "Not sending emails due to errors/warnings")) + print(col.build(col.RED, "Not sending emails due to errors/warnings")) # For a dry run, just show our actions as a sanity check if dry_run: series.ShowActions(patch_files, cmd, process_tags) if not its_a_go: - print(col.Color(col.RED, "Email would not be sent")) + print(col.build(col.RED, "Email would not be sent")) os.remove(cc_file) diff --git a/tools/patman/gitutil.py b/tools/patman/gitutil.py index 86972973249..ceaf2ce1504 100644 --- a/tools/patman/gitutil.py +++ b/tools/patman/gitutil.py @@ -404,7 +404,7 @@ def check_suppress_cc_config(): if suppresscc == 'all' or suppresscc == 'cccmd': col = terminal.Color() - print((col.Color(col.RED, "error") + + print((col.build(col.RED, "error") + ": git config sendemail.suppresscc set to %s\n" % (suppresscc)) + " patman needs --cc-cmd to be run to set the cc list.\n" + " Please run:\n" + @@ -577,14 +577,14 @@ def lookup_email(lookup_name, alias=None, warn_on_error=True, level=0): if warn_on_error: raise OSError(msg) else: - print(col.Color(col.RED, msg)) + print(col.build(col.RED, msg)) return out_list if lookup_name: if not lookup_name in alias: msg = "Alias '%s' not found" % lookup_name if warn_on_error: - print(col.Color(col.RED, msg)) + print(col.build(col.RED, msg)) return out_list for item in alias[lookup_name]: todo = lookup_email(item, alias, warn_on_error, level + 1) diff --git a/tools/patman/series.py b/tools/patman/series.py index 27dd3e1a7ed..891f2785342 100644 --- a/tools/patman/series.py +++ b/tools/patman/series.py @@ -118,11 +118,11 @@ class Series(dict): # TODO: Colour the patches according to whether they passed checks for upto in range(len(args)): commit = self.commits[upto] - print(col.Color(col.GREEN, ' %s' % args[upto])) + print(col.build(col.GREEN, ' %s' % args[upto])) cc_list = list(self._generated_cc[commit.patch]) for email in sorted(set(cc_list) - to_set - cc_set): if email == None: - email = col.Color(col.YELLOW, "" + email = col.build(col.YELLOW, "" % tag) if email: print(' Cc: ', email) @@ -227,13 +227,13 @@ class Series(dict): else: if version > 1: str = 'Change log missing for v%d' % version - print(col.Color(col.RED, str)) + print(col.build(col.RED, str)) for version in changes_copy: str = 'Change log for unknown version v%d' % version - print(col.Color(col.RED, str)) + print(col.build(col.RED, str)) elif self.changes: str = 'Change log exists, but no version is set' - print(col.Color(col.RED, str)) + print(col.build(col.RED, str)) def MakeCcFile(self, process_tags, cover_fname, warn_on_error, add_maintainers, limit): @@ -271,7 +271,7 @@ class Series(dict): dir_list = [os.path.join(gitutil.get_top_level(), 'scripts')] cc += get_maintainer.get_maintainer(dir_list, commit.patch) for x in set(cc) & set(settings.bounces): - print(col.Color(col.YELLOW, 'Skipping "%s"' % x)) + print(col.build(col.YELLOW, 'Skipping "%s"' % x)) cc = list(set(cc) - set(settings.bounces)) if limit is not None: cc = cc[:limit] diff --git a/tools/patman/terminal.py b/tools/patman/terminal.py index f76d2b17772..e72c55ba98e 100644 --- a/tools/patman/terminal.py +++ b/tools/patman/terminal.py @@ -64,7 +64,7 @@ def CalcAsciiLen(text): Length of text, after skipping ANSI sequences >>> col = Color(COLOR_ALWAYS) - >>> text = col.Color(Color.RED, 'abc') + >>> text = col.build(Color.RED, 'abc') >>> len(text) 14 >>> CalcAsciiLen(text) @@ -73,7 +73,7 @@ def CalcAsciiLen(text): >>> text += 'def' >>> CalcAsciiLen(text) 6 - >>> text += col.Color(Color.RED, 'abc') + >>> text += col.build(Color.RED, 'abc') >>> CalcAsciiLen(text) 9 """ @@ -87,7 +87,7 @@ def TrimAsciiLen(text, size): calculation. >>> col = Color(COLOR_ALWAYS) - >>> text = col.Color(Color.RED, 'abc') + >>> text = col.build(Color.RED, 'abc') >>> len(text) 14 >>> CalcAsciiLen(TrimAsciiLen(text, 4)) @@ -97,7 +97,7 @@ def TrimAsciiLen(text, size): >>> text += 'def' >>> CalcAsciiLen(TrimAsciiLen(text, 4)) 4 - >>> text += col.Color(Color.RED, 'ghi') + >>> text += col.build(Color.RED, 'ghi') >>> CalcAsciiLen(TrimAsciiLen(text, 7)) 7 """ @@ -148,7 +148,7 @@ def Tprint(text='', newline=True, colour=None, limit_to_line=False, bright=True) else: if colour: col = Color() - text = col.Color(colour, text, bright=bright) + text = col.build(colour, text, bright=bright) if newline: print(text) last_print_len = None @@ -191,7 +191,7 @@ def EchoPrintTestLines(): for line in print_test_list: if line.colour: col = Color() - print(col.Color(line.colour, line.text), end='') + print(col.build(line.colour, line.text), end='') else: print(line.text, end='') if line.newline: @@ -247,7 +247,7 @@ class Color(object): return self.RESET return '' - def Color(self, color, text, bright=True): + def build(self, color, text, bright=True): """Returns text with conditionally added color escape sequences. Keyword arguments: diff --git a/tools/patman/tout.py b/tools/patman/tout.py index 7eb555aaaea..ff0fd92afcc 100644 --- a/tools/patman/tout.py +++ b/tools/patman/tout.py @@ -64,7 +64,7 @@ def progress(msg, warning=False, trailer='...'): _progress = msg + trailer if stdout_is_tty: col = _color.YELLOW if warning else _color.GREEN - _stdout.write('\r' + _color.Color(col, _progress)) + _stdout.write('\r' + _color.build(col, _progress)) _stdout.flush() in_progress = True else: @@ -82,7 +82,7 @@ def _output(level, msg, color=None): if verbose >= level: clear_progress() if color: - msg = _color.Color(color, msg) + msg = _color.build(color, msg) if level < NOTICE: print(msg, file=sys.stderr) else: From 098b10fb34140f4ac37cfab9c9afade9135710a8 Mon Sep 17 00:00:00 2001 From: Simon Glass Date: Sat, 29 Jan 2022 14:14:18 -0700 Subject: [PATCH 18/27] patman: Convert camel case in terminal.py Convert this file to snake case and update all files which use it. Signed-off-by: Simon Glass --- tools/buildman/builder.py | 68 ++++++++++++++++++------------------- tools/buildman/control.py | 4 +-- tools/buildman/func_test.py | 10 +++--- tools/buildman/test.py | 8 ++--- tools/patman/func_test.py | 14 ++++---- tools/patman/main.py | 2 +- tools/patman/status.py | 16 ++++----- tools/patman/terminal.py | 42 +++++++++++------------ 8 files changed, 82 insertions(+), 82 deletions(-) diff --git a/tools/buildman/builder.py b/tools/buildman/builder.py index 364adb1cb5f..754642d4a68 100644 --- a/tools/buildman/builder.py +++ b/tools/buildman/builder.py @@ -22,7 +22,7 @@ from buildman import toolchain from patman import command from patman import gitutil from patman import terminal -from patman.terminal import Tprint +from patman.terminal import tprint # This indicates an new int or hex Kconfig property with no default # It hangs the build since the 'conf' tool cannot proceed without valid input. @@ -508,7 +508,7 @@ class Builder: if result.already_done: self.already_done += 1 if self._verbose: - terminal.PrintClear() + terminal.print_clear() boards_selected = {target : result.brd} self.ResetResultSummary(boards_selected) self.ProduceResultSummary(result.commit_upto, self.commits, @@ -535,8 +535,8 @@ class Builder: line += '%s : ' % self._complete_delay line += target - terminal.PrintClear() - Tprint(line, newline=False, limit_to_line=True) + terminal.print_clear() + tprint(line, newline=False, limit_to_line=True) def _GetOutputDir(self, commit_upto): """Get the name of the output directory for a commit number @@ -666,7 +666,7 @@ class Builder: if line.strip(): size, type, name = line[:-1].split() except: - Tprint("Invalid line in file '%s': '%s'" % (fname, line[:-1])) + tprint("Invalid line in file '%s': '%s'" % (fname, line[:-1])) continue if type in 'tTdDbB': # function names begin with '.' on 64-bit powerpc @@ -1009,16 +1009,16 @@ class Builder: return args = [self.ColourNum(x) for x in args] indent = ' ' * 15 - Tprint('%s%s: add: %s/%s, grow: %s/%s bytes: %s/%s (%s)' % + tprint('%s%s: add: %s/%s, grow: %s/%s bytes: %s/%s (%s)' % tuple([indent, self.col.build(self.col.YELLOW, fname)] + args)) - Tprint('%s %-38s %7s %7s %+7s' % (indent, 'function', 'old', 'new', + tprint('%s %-38s %7s %7s %+7s' % (indent, 'function', 'old', 'new', 'delta')) for diff, name in delta: if diff: color = self.col.RED if diff > 0 else self.col.GREEN msg = '%s %-38s %7s %7s %+7d' % (indent, name, old.get(name, '-'), new.get(name,'-'), diff) - Tprint(msg, colour=color) + tprint(msg, colour=color) def PrintSizeDetail(self, target_list, show_bloat): @@ -1043,12 +1043,12 @@ class Builder: color = self.col.RED if diff > 0 else self.col.GREEN msg = ' %s %+d' % (name, diff) if not printed_target: - Tprint('%10s %-15s:' % ('', result['_target']), + tprint('%10s %-15s:' % ('', result['_target']), newline=False) printed_target = True - Tprint(msg, colour=color, newline=False) + tprint(msg, colour=color, newline=False) if printed_target: - Tprint() + tprint() if show_bloat: target = result['_target'] outcome = result['_outcome'] @@ -1153,13 +1153,13 @@ class Builder: color = self.col.RED if avg_diff > 0 else self.col.GREEN msg = ' %s %+1.1f' % (name, avg_diff) if not printed_arch: - Tprint('%10s: (for %d/%d boards)' % (arch, count, + tprint('%10s: (for %d/%d boards)' % (arch, count, arch_count[arch]), newline=False) printed_arch = True - Tprint(msg, colour=color, newline=False) + tprint(msg, colour=color, newline=False) if printed_arch: - Tprint() + tprint() if show_detail: self.PrintSizeDetail(target_list, show_bloat) @@ -1304,7 +1304,7 @@ class Builder: col = self.col.RED elif line[0] == 'c': col = self.col.YELLOW - Tprint(' ' + line, newline=True, colour=col) + tprint(' ' + line, newline=True, colour=col) def _OutputErrLines(err_lines, colour): """Output the line of error/warning lines, if not empty @@ -1331,7 +1331,7 @@ class Builder: else: out = self.col.build(colour, line.char + line.errline) out_list.append(out) - Tprint('\n'.join(out_list)) + tprint('\n'.join(out_list)) self._error_lines += 1 @@ -1385,7 +1385,7 @@ class Builder: self.AddOutcome(board_selected, arch_list, unknown_boards, '?', self.col.MAGENTA) for arch, target_list in arch_list.items(): - Tprint('%10s: %s' % (arch, target_list)) + tprint('%10s: %s' % (arch, target_list)) self._error_lines += 1 _OutputErrLines(better_err, colour=self.col.GREEN) _OutputErrLines(worse_err, colour=self.col.RED) @@ -1515,13 +1515,13 @@ class Builder: _AddConfig(lines, 'all', all_plus, all_minus, all_change) #arch_summary[target] = '\n'.join(lines) if lines: - Tprint('%s:' % arch) + tprint('%s:' % arch) _OutputConfigInfo(lines) for lines, targets in lines_by_target.items(): if not lines: continue - Tprint('%s :' % ' '.join(sorted(targets))) + tprint('%s :' % ' '.join(sorted(targets))) _OutputConfigInfo(lines.split('\n')) @@ -1540,7 +1540,7 @@ class Builder: if not board in board_dict: not_built.append(board) if not_built: - Tprint("Boards not built (%d): %s" % (len(not_built), + tprint("Boards not built (%d): %s" % (len(not_built), ', '.join(not_built))) def ProduceResultSummary(self, commit_upto, commits, board_selected): @@ -1553,7 +1553,7 @@ class Builder: if commits: msg = '%02d: %s' % (commit_upto + 1, commits[commit_upto].subject) - Tprint(msg, colour=self.col.BLUE) + tprint(msg, colour=self.col.BLUE) self.PrintResultSummary(board_selected, board_dict, err_lines if self._show_errors else [], err_line_boards, warn_lines if self._show_errors else [], warn_line_boards, @@ -1578,7 +1578,7 @@ class Builder: for commit_upto in range(0, self.commit_count, self._step): self.ProduceResultSummary(commit_upto, commits, board_selected) if not self._error_lines: - Tprint('(no errors to report)', colour=self.col.GREEN) + tprint('(no errors to report)', colour=self.col.GREEN) def SetupBuild(self, board_selected, commits): @@ -1629,10 +1629,10 @@ class Builder: if os.path.isdir(git_dir): # This is a clone of the src_dir repo, we can keep using # it but need to fetch from src_dir. - Tprint('\rFetching repo for thread %d' % thread_num, + tprint('\rFetching repo for thread %d' % thread_num, newline=False) gitutil.fetch(git_dir, thread_dir) - terminal.PrintClear() + terminal.print_clear() elif os.path.isfile(git_dir): # This is a worktree of the src_dir repo, we don't need to # create it again or update it in any way. @@ -1643,15 +1643,15 @@ class Builder: raise ValueError('Git dir %s exists, but is not a file ' 'or a directory.' % git_dir) elif setup_git == 'worktree': - Tprint('\rChecking out worktree for thread %d' % thread_num, + tprint('\rChecking out worktree for thread %d' % thread_num, newline=False) gitutil.add_worktree(src_dir, thread_dir) - terminal.PrintClear() + terminal.print_clear() elif setup_git == 'clone' or setup_git == True: - Tprint('\rCloning repo for thread %d' % thread_num, + tprint('\rCloning repo for thread %d' % thread_num, newline=False) gitutil.clone(src_dir, thread_dir) - terminal.PrintClear() + terminal.print_clear() else: raise ValueError("Can't setup git repo with %s." % setup_git) @@ -1717,11 +1717,11 @@ class Builder: """ to_remove = self._GetOutputSpaceRemovals() if to_remove: - Tprint('Removing %d old build directories...' % len(to_remove), + tprint('Removing %d old build directories...' % len(to_remove), newline=False) for dirname in to_remove: shutil.rmtree(dirname) - terminal.PrintClear() + terminal.print_clear() def BuildBoards(self, commits, board_selected, keep_outputs, verbose): """Build all commits for a list of boards @@ -1747,7 +1747,7 @@ class Builder: self._PrepareWorkingSpace(min(self.num_threads, len(board_selected)), commits is not None) self._PrepareOutputSpace() - Tprint('\rStarting build...', newline=False) + tprint('\rStarting build...', newline=False) self.SetupBuild(board_selected, commits) self.ProcessResult(None) self.thread_exceptions = [] @@ -1774,7 +1774,7 @@ class Builder: # Wait until we have processed all output self.out_queue.join() - Tprint() + tprint() msg = 'Completed: %d total built' % self.count if self.already_done: @@ -1789,9 +1789,9 @@ class Builder: duration = duration - timedelta(microseconds=duration.microseconds) rate = float(self.count) / duration.total_seconds() msg += ', duration %s, rate %1.2f' % (duration, rate) - Tprint(msg) + tprint(msg) if self.thread_exceptions: - Tprint('Failed: %d thread exceptions' % len(self.thread_exceptions), + tprint('Failed: %d thread exceptions' % len(self.thread_exceptions), colour=self.col.RED) return (self.fail, self.warned, self.thread_exceptions) diff --git a/tools/buildman/control.py b/tools/buildman/control.py index 195d27a044c..8f4810bc3ef 100644 --- a/tools/buildman/control.py +++ b/tools/buildman/control.py @@ -18,7 +18,7 @@ from patman import gitutil from patman import patchstream from patman import terminal from patman import tools -from patman.terminal import Tprint +from patman.terminal import tprint def GetPlural(count): """Returns a plural 's' if count is not 1""" @@ -362,7 +362,7 @@ def DoBuildman(options, args, toolchains=None, make_func=None, boards=None, else: commits = None - Tprint(GetActionSummary(options.summary, commits, board_selected, + tprint(GetActionSummary(options.summary, commits, board_selected, options)) # We can't show function sizes without board details at present diff --git a/tools/buildman/func_test.py b/tools/buildman/func_test.py index b48dd1671d9..6fcceb0ea56 100644 --- a/tools/buildman/func_test.py +++ b/tools/buildman/func_test.py @@ -205,8 +205,8 @@ class TestFunctional(unittest.TestCase): self._test_branch = TEST_BRANCH # Avoid sending any output and clear all terminal output - terminal.SetPrintTestMode() - terminal.GetPrintTestLines() + terminal.set_print_test_mode() + terminal.get_print_test_lines() def tearDown(self): shutil.rmtree(self._base_dir) @@ -438,7 +438,7 @@ class TestFunctional(unittest.TestCase): print(len(lines)) for line in lines: print(line) - #self.print_lines(terminal.GetPrintTestLines()) + #self.print_lines(terminal.get_print_test_lines()) def testNoBoards(self): """Test that buildman aborts when there are no boards""" @@ -450,7 +450,7 @@ class TestFunctional(unittest.TestCase): """Very simple test to invoke buildman on the current source""" self.setupToolchains(); self._RunControl('-o', self._output_dir) - lines = terminal.GetPrintTestLines() + lines = terminal.get_print_test_lines() self.assertIn('Building current source for %d boards' % len(boards), lines[0].text) @@ -463,7 +463,7 @@ class TestFunctional(unittest.TestCase): """Test that missing toolchains are detected""" self.setupToolchains(); ret_code = self._RunControl('-b', TEST_BRANCH, '-o', self._output_dir) - lines = terminal.GetPrintTestLines() + lines = terminal.get_print_test_lines() # Buildman always builds the upstream commit as well self.assertIn('Building %d commits for %d boards' % diff --git a/tools/buildman/test.py b/tools/buildman/test.py index 4b4a0349e81..714bb3e4f91 100644 --- a/tools/buildman/test.py +++ b/tools/buildman/test.py @@ -148,7 +148,7 @@ class TestBuild(unittest.TestCase): self.toolchains.Add('gcc', test=False) # Avoid sending any output - terminal.SetPrintTestMode() + terminal.set_print_test_mode() self._col = terminal.Color() self.base_dir = tempfile.mkdtemp() @@ -209,7 +209,7 @@ class TestBuild(unittest.TestCase): # associated with each. This calls our Make() to inject the fake output. build.BuildBoards(self.commits, board_selected, keep_outputs=False, verbose=False) - lines = terminal.GetPrintTestLines() + lines = terminal.get_print_test_lines() count = 0 for line in lines: if line.text.strip(): @@ -221,8 +221,8 @@ class TestBuild(unittest.TestCase): build.SetDisplayOptions(**kwdisplay_args); build.ShowSummary(self.commits, board_selected) if echo_lines: - terminal.EchoPrintTestLines() - return iter(terminal.GetPrintTestLines()) + terminal.echo_print_test_lines() + return iter(terminal.get_print_test_lines()) def _CheckOutput(self, lines, list_error_boards=False, filter_dtb_warnings=False, diff --git a/tools/patman/func_test.py b/tools/patman/func_test.py index 9e869c58ff6..59ee90c344f 100644 --- a/tools/patman/func_test.py +++ b/tools/patman/func_test.py @@ -45,7 +45,7 @@ class TestFunctional(unittest.TestCase): def tearDown(self): shutil.rmtree(self.tmpdir) - terminal.SetPrintTestMode(False) + terminal.set_print_test_mode(False) @staticmethod def _get_path(fname): @@ -907,10 +907,10 @@ diff --git a/lib/efi_loader/efi_memory.c b/lib/efi_loader/efi_memory.c series = Series() series.commits = [commit1, commit2] - terminal.SetPrintTestMode() + terminal.set_print_test_mode() status.check_patchwork_status(series, '1234', None, None, False, False, None, self._fake_patchwork2) - lines = iter(terminal.GetPrintTestLines()) + lines = iter(terminal.get_print_test_lines()) col = terminal.Color() self.assertEqual(terminal.PrintLine(' 1 Subject 1', col.BLUE), next(lines)) @@ -1021,11 +1021,11 @@ diff --git a/lib/efi_loader/efi_memory.c b/lib/efi_loader/efi_memory.c # 4 responses added from patchwork into new branch 'first2' # - terminal.SetPrintTestMode() + terminal.set_print_test_mode() status.check_patchwork_status(series, '1234', branch, dest_branch, False, False, None, self._fake_patchwork3, repo) - lines = terminal.GetPrintTestLines() + lines = terminal.get_print_test_lines() self.assertEqual(12, len(lines)) self.assertEqual( "4 responses added from patchwork into new branch 'first2'", @@ -1223,10 +1223,10 @@ Reviewed-by: %s series = Series() series.commits = [commit1, commit2] - terminal.SetPrintTestMode() + terminal.set_print_test_mode() status.check_patchwork_status(series, '1234', None, None, False, True, None, self._fake_patchwork2) - lines = iter(terminal.GetPrintTestLines()) + lines = iter(terminal.get_print_test_lines()) col = terminal.Color() self.assertEqual(terminal.PrintLine(' 1 Subject 1', col.BLUE), next(lines)) diff --git a/tools/patman/main.py b/tools/patman/main.py index d32eae4bfca..2a2ac457093 100755 --- a/tools/patman/main.py +++ b/tools/patman/main.py @@ -177,7 +177,7 @@ elif args.cmd == 'status': args.dest_branch, args.force, args.show_comments, args.patchwork_url) except Exception as e: - terminal.Tprint('patman: %s: %s' % (type(e).__name__, e), + terminal.tprint('patman: %s: %s' % (type(e).__name__, e), colour=terminal.Color.RED) if args.debug: print() diff --git a/tools/patman/status.py b/tools/patman/status.py index fbed055d347..47ed6d61d4d 100644 --- a/tools/patman/status.py +++ b/tools/patman/status.py @@ -338,9 +338,9 @@ def show_responses(rtags, indent, is_new): for tag in sorted(rtags.keys()): people = rtags[tag] for who in sorted(people): - terminal.Tprint(indent + '%s %s: ' % ('+' if is_new else ' ', tag), + terminal.tprint(indent + '%s %s: ' % ('+' if is_new else ' ', tag), newline=False, colour=col.GREEN, bright=is_new) - terminal.Tprint(who, colour=col.WHITE, bright=is_new) + terminal.tprint(who, colour=col.WHITE, bright=is_new) count += 1 return count @@ -455,7 +455,7 @@ def check_patchwork_status(series, series_id, branch, dest_branch, force, patch = patch_for_commit.get(seq) if not patch: continue - terminal.Tprint('%3d %s' % (patch.seq, patch.subject[:50]), + terminal.tprint('%3d %s' % (patch.seq, patch.subject[:50]), colour=col.BLUE) cmt = series.commits[seq] base_rtags = cmt.rtags @@ -466,15 +466,15 @@ def check_patchwork_status(series, series_id, branch, dest_branch, force, num_to_add += show_responses(new_rtags, indent, True) if show_comments: for review in review_list[seq]: - terminal.Tprint('Review: %s' % review.meta, colour=col.RED) + terminal.tprint('Review: %s' % review.meta, colour=col.RED) for snippet in review.snippets: for line in snippet: quoted = line.startswith('>') - terminal.Tprint(' %s' % line, + terminal.tprint(' %s' % line, colour=col.MAGENTA if quoted else None) - terminal.Tprint() + terminal.tprint() - terminal.Tprint("%d new response%s available in patchwork%s" % + terminal.tprint("%d new response%s available in patchwork%s" % (num_to_add, 's' if num_to_add != 1 else '', '' if dest_branch else ' (use -d to write them to a new branch)')) @@ -482,6 +482,6 @@ def check_patchwork_status(series, series_id, branch, dest_branch, force, if dest_branch: num_added = create_branch(series, new_rtag_list, branch, dest_branch, force, test_repo) - terminal.Tprint( + terminal.tprint( "%d response%s added from patchwork into new branch '%s'" % (num_added, 's' if num_added != 1 else '', dest_branch)) diff --git a/tools/patman/terminal.py b/tools/patman/terminal.py index e72c55ba98e..40d79f8ac07 100644 --- a/tools/patman/terminal.py +++ b/tools/patman/terminal.py @@ -51,7 +51,7 @@ class PrintLine: (self.newline, self.colour, self.bright, self.text)) -def CalcAsciiLen(text): +def calc_ascii_len(text): """Calculate the length of a string, ignoring any ANSI sequences When displayed on a terminal, ANSI sequences don't take any space, so we @@ -67,20 +67,20 @@ def CalcAsciiLen(text): >>> text = col.build(Color.RED, 'abc') >>> len(text) 14 - >>> CalcAsciiLen(text) + >>> calc_ascii_len(text) 3 >>> >>> text += 'def' - >>> CalcAsciiLen(text) + >>> calc_ascii_len(text) 6 >>> text += col.build(Color.RED, 'abc') - >>> CalcAsciiLen(text) + >>> calc_ascii_len(text) 9 """ result = ansi_escape.sub('', text) return len(result) -def TrimAsciiLen(text, size): +def trim_ascii_len(text, size): """Trim a string containing ANSI sequences to the given ASCII length The string is trimmed with ANSI sequences being ignored for the length @@ -90,18 +90,18 @@ def TrimAsciiLen(text, size): >>> text = col.build(Color.RED, 'abc') >>> len(text) 14 - >>> CalcAsciiLen(TrimAsciiLen(text, 4)) + >>> calc_ascii_len(trim_ascii_len(text, 4)) 3 - >>> CalcAsciiLen(TrimAsciiLen(text, 2)) + >>> calc_ascii_len(trim_ascii_len(text, 2)) 2 >>> text += 'def' - >>> CalcAsciiLen(TrimAsciiLen(text, 4)) + >>> calc_ascii_len(trim_ascii_len(text, 4)) 4 >>> text += col.build(Color.RED, 'ghi') - >>> CalcAsciiLen(TrimAsciiLen(text, 7)) + >>> calc_ascii_len(trim_ascii_len(text, 7)) 7 """ - if CalcAsciiLen(text) < size: + if calc_ascii_len(text) < size: return text pos = 0 out = '' @@ -130,7 +130,7 @@ def TrimAsciiLen(text, size): return out -def Tprint(text='', newline=True, colour=None, limit_to_line=False, bright=True): +def tprint(text='', newline=True, colour=None, limit_to_line=False, bright=True): """Handle a line of output to the terminal. In test mode this is recorded in a list. Otherwise it is output to the @@ -155,11 +155,11 @@ def Tprint(text='', newline=True, colour=None, limit_to_line=False, bright=True) else: if limit_to_line: cols = shutil.get_terminal_size().columns - text = TrimAsciiLen(text, cols) + text = trim_ascii_len(text, cols) print(text, end='', flush=True) - last_print_len = CalcAsciiLen(text) + last_print_len = calc_ascii_len(text) -def PrintClear(): +def print_clear(): """Clear a previously line that was printed with no newline""" global last_print_len @@ -167,15 +167,15 @@ def PrintClear(): print('\r%s\r' % (' '* last_print_len), end='', flush=True) last_print_len = None -def SetPrintTestMode(enable=True): +def set_print_test_mode(enable=True): """Go into test mode, where all printing is recorded""" global print_test_mode print_test_mode = enable - GetPrintTestLines() + get_print_test_lines() -def GetPrintTestLines(): - """Get a list of all lines output through Tprint() +def get_print_test_lines(): + """Get a list of all lines output through tprint() Returns: A list of PrintLine objects @@ -186,7 +186,7 @@ def GetPrintTestLines(): print_test_list = [] return ret -def EchoPrintTestLines(): +def echo_print_test_lines(): """Print out the text lines collected""" for line in print_test_list: if line.colour: @@ -221,7 +221,7 @@ class Color(object): except: self._enabled = False - def Start(self, color, bright=True): + def start(self, color, bright=True): """Returns a start color code. Args: @@ -236,7 +236,7 @@ class Color(object): return base % (color + 30) return '' - def Stop(self): + def stop(self): """Returns a stop color code. Returns: From 6d7ac6a148158738980ee1f06eb09f61479119ef Mon Sep 17 00:00:00 2001 From: Simon Glass Date: Sat, 29 Jan 2022 14:14:19 -0700 Subject: [PATCH 19/27] patman: Update with new pylint scores Update the new baseline since various scores have improved. Signed-off-by: Simon Glass --- scripts/pylint.base | 34 +++++++++++++++++----------------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/scripts/pylint.base b/scripts/pylint.base index 3da31bb1a32..765fd6727bc 100644 --- a/scripts/pylint.base +++ b/scripts/pylint.base @@ -204,25 +204,25 @@ tools_microcode-tool 7.25 tools_moveconfig 8.32 tools_patman___init__ 0.00 tools_patman_checkpatch 8.04 -tools_patman_command 4.74 -tools_patman_commit 3.25 +tools_patman_command 5.00 +tools_patman_commit 4.00 tools_patman_control 8.14 -tools_patman_cros_subprocess 7.56 -tools_patman_func_test 8.14 -tools_patman_get_maintainer 6.47 +tools_patman_cros_subprocess 7.61 +tools_patman_func_test 8.24 +tools_patman_get_maintainer 5.29 tools_patman_gitutil 5.62 -tools_patman_main 7.90 -tools_patman_patchstream 9.11 -tools_patman_project 6.67 -tools_patman_series 6.16 -tools_patman_settings 5.89 +tools_patman_main 8.23 +tools_patman_patchstream 9.04 +tools_patman_project 4.44 +tools_patman_series 5.95 +tools_patman_settings 5.63 tools_patman_setup 5.00 -tools_patman_status 8.62 -tools_patman_terminal 7.05 -tools_patman_test_checkpatch 6.81 -tools_patman_test_util 7.36 -tools_patman_tools 4.69 -tools_patman_tout 3.12 -tools_rkmux 6.90 +tools_patman_status 8.43 +tools_patman_terminal 7.24 +tools_patman_test_checkpatch 7.75 +tools_patman_test_util 6.79 +tools_patman_tools 4.98 +tools_patman_tout 5.16 +tools_rkmux 6.76 tools_rmboard 7.76 tools_zynqmp_pm_cfg_obj_convert 6.67 From 0129f2d8ee41d221519287de2a7417f7431584c7 Mon Sep 17 00:00:00 2001 From: Heiko Schocher Date: Wed, 2 Feb 2022 13:53:17 +0100 Subject: [PATCH 20/27] serial: remove nulldev_serial_input nulldev_serial_input is static and not used in this file, so remove it. Signed-off-by: Heiko Schocher Reviewed-by: Simon Glass --- drivers/serial/serial_nulldev.c | 5 ----- 1 file changed, 5 deletions(-) diff --git a/drivers/serial/serial_nulldev.c b/drivers/serial/serial_nulldev.c index efae14dd6ca..f3ca7f52559 100644 --- a/drivers/serial/serial_nulldev.c +++ b/drivers/serial/serial_nulldev.c @@ -22,11 +22,6 @@ static int nulldev_serial_pending(struct udevice *dev, bool input) return 0; } -static int nulldev_serial_input(struct udevice *dev) -{ - return 0; -} - static int nulldev_serial_putc(struct udevice *dev, const char ch) { return 0; From 3ed8c4c88379557cce495b3b703c3b393db2d648 Mon Sep 17 00:00:00 2001 From: Heiko Schocher Date: Wed, 2 Feb 2022 13:55:19 +0100 Subject: [PATCH 21/27] serial-uclass: fix build warning if CONFIG_DM_STDIO is defined but SERIAL_PRESENT not, gcc drops warnings for serial_stub_* functions that they are defined but not used. Fix it. Signed-off-by: Heiko Schocher Reviewed-by: Simon Glass --- drivers/serial/serial-uclass.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/drivers/serial/serial-uclass.c b/drivers/serial/serial-uclass.c index 96a1cb65ba2..362cedd9552 100644 --- a/drivers/serial/serial-uclass.c +++ b/drivers/serial/serial-uclass.c @@ -357,7 +357,6 @@ static void serial_stub_putc(struct stdio_dev *sdev, const char ch) { _serial_putc(sdev->priv, ch); } -#endif static void serial_stub_puts(struct stdio_dev *sdev, const char *str) { @@ -374,6 +373,7 @@ static int serial_stub_tstc(struct stdio_dev *sdev) return _serial_tstc(sdev->priv); } #endif +#endif /** * on_baudrate() - Update the actual baudrate when the env var changes From ab2ffe735979b5e6ef417adbd6019fa603de1715 Mon Sep 17 00:00:00 2001 From: Moritz Fischer Date: Sat, 5 Feb 2022 12:17:44 -0800 Subject: [PATCH 22/27] arch: x86: lib: acpi_table: Fix MCFG entries Commit d953137526cc ("x86: Move SSDT table to a writer function") introduced a bug where the actual MCFG entries are no longer generated. Cc: Simon Glass Fixes: d953137526cc ("x86: Move SSDT table to a writer function") Signed-off-by: Moritz Fischer Reviewed-by: Simon Glass Tested-by: Simon Glass --- arch/x86/lib/acpi_table.c | 2 ++ 1 file changed, 2 insertions(+) diff --git a/arch/x86/lib/acpi_table.c b/arch/x86/lib/acpi_table.c index c0534343f1f..753bf396199 100644 --- a/arch/x86/lib/acpi_table.c +++ b/arch/x86/lib/acpi_table.c @@ -499,6 +499,8 @@ int acpi_write_mcfg(struct acpi_ctx *ctx, const struct acpi_writer *entry) header->length = sizeof(struct acpi_mcfg); header->revision = 1; + current = acpi_fill_mcfg(current); + /* (Re)calculate length and checksum */ header->length = current - (u32)mcfg; header->checksum = table_compute_checksum(mcfg, header->length); From 058fb9f5ffc422f987c33adb01f8fa6e4434eff8 Mon Sep 17 00:00:00 2001 From: Moritz Fischer Date: Sat, 5 Feb 2022 12:17:45 -0800 Subject: [PATCH 23/27] acpi: Move MCFG implementation to common lib MCFG tables are used on multiple arches. Move to common ACPI lib. Cc: Simon Glass Signed-off-by: Moritz Fischer Reviewed-by: Simon Glass Use sizeof(*mcfg) instead of sizeof(*header) Signed-off-by: Simon Glass --- arch/x86/cpu/intel_common/acpi.c | 15 +++++--- arch/x86/cpu/tangier/acpi.c | 15 +++++--- arch/x86/include/asm/acpi_table.h | 1 - arch/x86/lib/acpi_table.c | 54 -------------------------- lib/acpi/Makefile | 1 + lib/acpi/mcfg.c | 64 +++++++++++++++++++++++++++++++ 6 files changed, 83 insertions(+), 67 deletions(-) create mode 100644 lib/acpi/mcfg.c diff --git a/arch/x86/cpu/intel_common/acpi.c b/arch/x86/cpu/intel_common/acpi.c index 15f19da2067..d94ec208f65 100644 --- a/arch/x86/cpu/intel_common/acpi.c +++ b/arch/x86/cpu/intel_common/acpi.c @@ -31,14 +31,17 @@ #include #include -u32 acpi_fill_mcfg(u32 current) +int acpi_fill_mcfg(struct acpi_ctx *ctx) { + size_t size; + /* PCI Segment Group 0, Start Bus Number 0, End Bus Number is 255 */ - current += acpi_create_mcfg_mmconfig((void *)current, - CONFIG_MMCONF_BASE_ADDRESS, 0, 0, - (CONFIG_SA_PCIEX_LENGTH >> 20) - - 1); - return current; + size = acpi_create_mcfg_mmconfig((void *)ctx->current, + CONFIG_MMCONF_BASE_ADDRESS, 0, 0, + (CONFIG_SA_PCIEX_LENGTH >> 20) - 1); + acpi_inc(ctx, size); + + return 0; } static int acpi_sci_irq(void) diff --git a/arch/x86/cpu/tangier/acpi.c b/arch/x86/cpu/tangier/acpi.c index 12f92896124..e3a2fcea76d 100644 --- a/arch/x86/cpu/tangier/acpi.c +++ b/arch/x86/cpu/tangier/acpi.c @@ -68,14 +68,17 @@ u32 acpi_fill_madt(u32 current) return current; } -u32 acpi_fill_mcfg(u32 current) +int acpi_fill_mcfg(struct acpi_ctx *ctx) { - /* TODO: Derive parameters from SFI MCFG table */ - current += acpi_create_mcfg_mmconfig - ((struct acpi_mcfg_mmconfig *)current, - MCFG_BASE_ADDRESS, 0x0, 0x0, 0x0); + size_t size; - return current; + /* TODO: Derive parameters from SFI MCFG table */ + size = acpi_create_mcfg_mmconfig + ((struct acpi_mcfg_mmconfig *)ctx->current, + MCFG_BASE_ADDRESS, 0x0, 0x0, 0x0); + acpi_inc(ctx, size); + + return 0; } static u32 acpi_fill_csrt_dma(struct acpi_csrt_group *grp) diff --git a/arch/x86/include/asm/acpi_table.h b/arch/x86/include/asm/acpi_table.h index 0d07f7cad87..39547de0d4d 100644 --- a/arch/x86/include/asm/acpi_table.h +++ b/arch/x86/include/asm/acpi_table.h @@ -34,7 +34,6 @@ int acpi_create_madt_lapic_nmi(struct acpi_madt_lapic_nmi *lapic_nmi, u32 acpi_fill_madt(u32 current); int acpi_create_mcfg_mmconfig(struct acpi_mcfg_mmconfig *mmconfig, u32 base, u16 seg_nr, u8 start, u8 end); -u32 acpi_fill_mcfg(u32 current); /** * acpi_write_hpet() - Write out a HPET table diff --git a/arch/x86/lib/acpi_table.c b/arch/x86/lib/acpi_table.c index 753bf396199..c5b33dc65de 100644 --- a/arch/x86/lib/acpi_table.c +++ b/arch/x86/lib/acpi_table.c @@ -161,28 +161,6 @@ int acpi_write_madt(struct acpi_ctx *ctx, const struct acpi_writer *entry) } ACPI_WRITER(5x86, NULL, acpi_write_madt, 0); -int acpi_create_mcfg_mmconfig(struct acpi_mcfg_mmconfig *mmconfig, u32 base, - u16 seg_nr, u8 start, u8 end) -{ - memset(mmconfig, 0, sizeof(*mmconfig)); - mmconfig->base_address_l = base; - mmconfig->base_address_h = 0; - mmconfig->pci_segment_group_number = seg_nr; - mmconfig->start_bus_number = start; - mmconfig->end_bus_number = end; - - return sizeof(struct acpi_mcfg_mmconfig); -} - -__weak u32 acpi_fill_mcfg(u32 current) -{ - current += acpi_create_mcfg_mmconfig - ((struct acpi_mcfg_mmconfig *)current, - CONFIG_PCIE_ECAM_BASE, 0x0, 0x0, 255); - - return current; -} - /** * acpi_create_tcpa() - Create a TCPA table * @@ -480,38 +458,6 @@ int acpi_write_gnvs(struct acpi_ctx *ctx, const struct acpi_writer *entry) } ACPI_WRITER(4gnvs, "GNVS", acpi_write_gnvs, 0); -/* MCFG is defined in the PCI Firmware Specification 3.0 */ -int acpi_write_mcfg(struct acpi_ctx *ctx, const struct acpi_writer *entry) -{ - struct acpi_table_header *header; - struct acpi_mcfg *mcfg; - u32 current; - - mcfg = ctx->current; - header = &mcfg->header; - - current = (u32)mcfg + sizeof(struct acpi_mcfg); - - memset(mcfg, '\0', sizeof(struct acpi_mcfg)); - - /* Fill out header fields */ - acpi_fill_header(header, "MCFG"); - header->length = sizeof(struct acpi_mcfg); - header->revision = 1; - - current = acpi_fill_mcfg(current); - - /* (Re)calculate length and checksum */ - header->length = current - (u32)mcfg; - header->checksum = table_compute_checksum(mcfg, header->length); - - acpi_inc(ctx, mcfg->header.length); - acpi_add_table(ctx, mcfg); - - return 0; -} -ACPI_WRITER(5mcfg, "MCFG", acpi_write_mcfg, 0); - /** * acpi_write_hpet() - Write out a HPET table * diff --git a/lib/acpi/Makefile b/lib/acpi/Makefile index f9b504988f2..956b5a0d726 100644 --- a/lib/acpi/Makefile +++ b/lib/acpi/Makefile @@ -11,6 +11,7 @@ obj-y += acpi_writer.o ifndef CONFIG_QEMU obj-y += base.o obj-y += csrt.o +obj-y += mcfg.o # Sandbox does not build a .asl file ifndef CONFIG_SANDBOX diff --git a/lib/acpi/mcfg.c b/lib/acpi/mcfg.c new file mode 100644 index 00000000000..7404ae586ab --- /dev/null +++ b/lib/acpi/mcfg.c @@ -0,0 +1,64 @@ +// SPDX-License-Identifier: GPL-2.0+ +/* + * Write an ACPI MCFG table + * + * Copyright 2022 Google LLC + */ + +#define LOG_CATEGORY LOGC_ACPI + +#include +#include +#include +#include +#include + +int acpi_create_mcfg_mmconfig(struct acpi_mcfg_mmconfig *mmconfig, u32 base, + u16 seg_nr, u8 start, u8 end) +{ + memset(mmconfig, 0, sizeof(*mmconfig)); + mmconfig->base_address_l = base; + mmconfig->base_address_h = 0; + mmconfig->pci_segment_group_number = seg_nr; + mmconfig->start_bus_number = start; + mmconfig->end_bus_number = end; + + return sizeof(struct acpi_mcfg_mmconfig); +} + +__weak int acpi_fill_mcfg(struct acpi_ctx *ctx) +{ + return -ENOENT; +} + +/* MCFG is defined in the PCI Firmware Specification 3.0 */ +int acpi_write_mcfg(struct acpi_ctx *ctx, const struct acpi_writer *entry) +{ + struct acpi_table_header *header; + struct acpi_mcfg *mcfg; + int ret; + + mcfg = ctx->current; + header = &mcfg->header; + + memset(mcfg, '\0', sizeof(struct acpi_mcfg)); + + /* Fill out header fields */ + acpi_fill_header(header, "MCFG"); + header->length = sizeof(struct acpi_mcfg); + header->revision = 1; + acpi_inc(ctx, sizeof(*mcfg)); + + ret = acpi_fill_mcfg(ctx); + if (ret) + return log_msg_ret("fill", ret); + + /* (Re)calculate length and checksum */ + header->length = (ulong)ctx->current - (ulong)mcfg; + header->checksum = table_compute_checksum(mcfg, header->length); + + acpi_add_table(ctx, mcfg); + + return 0; +} +ACPI_WRITER(5mcfg, "MCFG", acpi_write_mcfg, 0); From 21353311ffe9bf28e168ab7d69ec670f0093cf96 Mon Sep 17 00:00:00 2001 From: Alper Nebi Yasak Date: Tue, 8 Feb 2022 01:08:04 +0300 Subject: [PATCH 24/27] binman: Fix subentry expansion for FIT entry type Binman tries to expand some entries into parts that make it up, e.g. 'u-boot' into a 'u-boot-expanded' section that contains 'u-boot-nodtb' and 'u-boot-dtb'. Entries with child entries must call ExpandEntries() on them to build a correct image, as it's possible that unexpanded child entries have no data of their own. The FIT entry type doesn't currently do this, which means putting a "u-boot" entry inside it doesn't work as expected. Implement ExpandEntries() for FIT and add a copy of a simple FIT image test that checks subentry expansion in FIT entries. Signed-off-by: Alper Nebi Yasak Reviewed-by: Simon Glass --- tools/binman/etype/fit.py | 5 +++++ tools/binman/ftest.py | 33 ++++++++++++++++++++++++--------- 2 files changed, 29 insertions(+), 9 deletions(-) diff --git a/tools/binman/etype/fit.py b/tools/binman/etype/fit.py index 954cbc3d855..bb2a4e2d1e8 100644 --- a/tools/binman/etype/fit.py +++ b/tools/binman/etype/fit.py @@ -237,6 +237,11 @@ class Entry_fit(Entry): self._fdt = Fdt.FromData(fdt.as_bytearray()) self._fdt.Scan() + def ExpandEntries(self): + super().ExpandEntries() + for section in self._fit_sections.values(): + section.ExpandEntries() + def ObtainContents(self): """Obtain the contents of the FIT diff --git a/tools/binman/ftest.py b/tools/binman/ftest.py index 123fdb15f78..16956e00e94 100644 --- a/tools/binman/ftest.py +++ b/tools/binman/ftest.py @@ -61,6 +61,9 @@ PPC_MPC85XX_BR_DATA = b'ppcmpc85xxbr' U_BOOT_NODTB_DATA = b'nodtb with microcode pointer somewhere in here' U_BOOT_SPL_NODTB_DATA = b'splnodtb with microcode pointer somewhere in here' U_BOOT_TPL_NODTB_DATA = b'tplnodtb with microcode pointer somewhere in here' +U_BOOT_EXP_DATA = U_BOOT_NODTB_DATA + U_BOOT_DTB_DATA +U_BOOT_SPL_EXP_DATA = U_BOOT_SPL_NODTB_DATA + U_BOOT_SPL_DTB_DATA +U_BOOT_TPL_EXP_DATA = U_BOOT_TPL_NODTB_DATA + U_BOOT_TPL_DTB_DATA FSP_DATA = b'fsp' CMC_DATA = b'cmc' VBT_DATA = b'vbt' @@ -3713,13 +3716,7 @@ class TestFunctional(unittest.TestCase): """Test that zero-size overlapping regions are ignored""" self._DoTestFile('160_pack_overlap_zero.dts') - def testSimpleFit(self): - """Test an image with a FIT inside""" - data = self._DoReadFile('161_fit.dts') - self.assertEqual(U_BOOT_DATA, data[:len(U_BOOT_DATA)]) - self.assertEqual(U_BOOT_NODTB_DATA, data[-len(U_BOOT_NODTB_DATA):]) - fit_data = data[len(U_BOOT_DATA):-len(U_BOOT_NODTB_DATA)] - + def _CheckSimpleFitData(self, fit_data, kernel_data, fdt1_data): # The data should be inside the FIT dtb = fdt.Fdt.FromData(fit_data) dtb.Scan() @@ -3752,8 +3749,26 @@ class TestFunctional(unittest.TestCase): self.assertIsNotNone(data_sizes) self.assertEqual(2, len(data_sizes)) # Format is "4 Bytes = 0.00 KiB = 0.00 MiB" so take the first word - self.assertEqual(len(U_BOOT_DATA), int(data_sizes[0].split()[0])) - self.assertEqual(len(U_BOOT_SPL_DTB_DATA), int(data_sizes[1].split()[0])) + self.assertEqual(len(kernel_data), int(data_sizes[0].split()[0])) + self.assertEqual(len(fdt1_data), int(data_sizes[1].split()[0])) + + def testSimpleFit(self): + """Test an image with a FIT inside""" + data = self._DoReadFile('161_fit.dts') + self.assertEqual(U_BOOT_DATA, data[:len(U_BOOT_DATA)]) + self.assertEqual(U_BOOT_NODTB_DATA, data[-len(U_BOOT_NODTB_DATA):]) + fit_data = data[len(U_BOOT_DATA):-len(U_BOOT_NODTB_DATA)] + + self._CheckSimpleFitData(fit_data, U_BOOT_DATA, U_BOOT_SPL_DTB_DATA) + + def testSimpleFitExpandsSubentries(self): + """Test that FIT images expand their subentries""" + data = self._DoReadFileDtb('161_fit.dts', use_expanded=True)[0] + self.assertEqual(U_BOOT_EXP_DATA, data[:len(U_BOOT_EXP_DATA)]) + self.assertEqual(U_BOOT_NODTB_DATA, data[-len(U_BOOT_NODTB_DATA):]) + fit_data = data[len(U_BOOT_EXP_DATA):-len(U_BOOT_NODTB_DATA)] + + self._CheckSimpleFitData(fit_data, U_BOOT_EXP_DATA, U_BOOT_SPL_DTB_DATA) def testFitExternal(self): """Test an image with an FIT with external images""" From ed293c322188d9d41bf6f26e01ad6343974150d1 Mon Sep 17 00:00:00 2001 From: Alper Nebi Yasak Date: Tue, 8 Feb 2022 01:08:05 +0300 Subject: [PATCH 25/27] binman: Register and check bintools from FIT subentries Binman keeps track of binary tools each entry wants to use. The implementation of this for the FIT entry only adds "mkimage", but not the tools that would be used by its subentries. Register the binary tools that FIT subentries will use in addition to the one FIT itself uses, and check their existence by copying the appropriate method from Section entry type. Also add tests that check if these subentries can use and warn about binary tools. Signed-off-by: Alper Nebi Yasak Reviewed-by: Simon Glass --- tools/binman/etype/fit.py | 14 +++++++ tools/binman/ftest.py | 25 ++++++++++++ .../binman/test/220_fit_subentry_bintool.dts | 39 +++++++++++++++++++ 3 files changed, 78 insertions(+) create mode 100644 tools/binman/test/220_fit_subentry_bintool.dts diff --git a/tools/binman/etype/fit.py b/tools/binman/etype/fit.py index bb2a4e2d1e8..9445997fdac 100644 --- a/tools/binman/etype/fit.py +++ b/tools/binman/etype/fit.py @@ -311,4 +311,18 @@ class Entry_fit(Entry): section.SetAllowMissing(allow_missing) def AddBintools(self, tools): + for section in self._fit_sections.values(): + section.AddBintools(tools) self.mkimage = self.AddBintool(tools, 'mkimage') + + def check_missing_bintools(self, missing_list): + """Check if any entries in this section have missing bintools + + If there are missing bintools, these are added to the list + + Args: + missing_list: List of Bintool objects to be added to + """ + super().check_missing_bintools(missing_list) + for entry in self._fit_sections.values(): + entry.check_missing_bintools(missing_list) diff --git a/tools/binman/ftest.py b/tools/binman/ftest.py index 16956e00e94..dc14a74904a 100644 --- a/tools/binman/ftest.py +++ b/tools/binman/ftest.py @@ -5133,6 +5133,31 @@ fdt fdtmap Extract the devicetree blob from the fdtmap finally: shutil.rmtree(tmpdir) + def testFitSubentryUsesBintool(self): + """Test that binman FIT subentries can use bintools""" + command.test_result = self._HandleGbbCommand + entry_args = { + 'keydir': 'devkeys', + 'bmpblk': 'bmpblk.bin', + } + data, _, _, _ = self._DoReadFileDtb('220_fit_subentry_bintool.dts', + entry_args=entry_args) + + expected = (GBB_DATA + GBB_DATA + tools.GetBytes(0, 8) + + tools.GetBytes(0, 0x2180 - 16)) + self.assertIn(expected, data) + + def testFitSubentryMissingBintool(self): + """Test that binman reports missing bintools for FIT subentries""" + entry_args = { + 'keydir': 'devkeys', + } + with test_util.capture_sys_output() as (_, stderr): + self._DoTestFile('220_fit_subentry_bintool.dts', + force_missing_bintools='futility', entry_args=entry_args) + err = stderr.getvalue() + self.assertRegex(err, + "Image 'main-section'.*missing bintools.*: futility") if __name__ == "__main__": unittest.main() diff --git a/tools/binman/test/220_fit_subentry_bintool.dts b/tools/binman/test/220_fit_subentry_bintool.dts new file mode 100644 index 00000000000..6e29d41eeb3 --- /dev/null +++ b/tools/binman/test/220_fit_subentry_bintool.dts @@ -0,0 +1,39 @@ +// SPDX-License-Identifier: GPL-2.0+ + +/dts-v1/; + +/ { + #address-cells = <1>; + #size-cells = <1>; + + binman { + fit { + description = "test-desc"; + #address-cells = <1>; + + images { + test { + description = "Something using a bintool"; + type = "kernel"; + arch = "arm"; + os = "linux"; + compression = "gzip"; + load = <00000000>; + entry = <00000000>; + + gbb { + size = <0x2180>; + }; + }; + }; + + configurations { + default = "conf-1"; + conf-1 { + description = "Boot bintool output"; + kernel = "kernel"; + }; + }; + }; + }; +}; From 4897d331f37e1fd1c3a4ca599ea8093c4c977a09 Mon Sep 17 00:00:00 2001 From: Alper Nebi Yasak Date: Tue, 8 Feb 2022 01:08:06 +0300 Subject: [PATCH 26/27] binman: Check missing bintools of Section subclasses Binman can check for missing binary tools and prints warnings if anything required for an image is missing. The implementation of this for the Section entry only checks the subentries, presumably because Section does not use any binary tools itself. However, this means the check is also skipped for subclasses of Section which might need binary tools. Make sure missing binary tools are checked for subclasses of the Section entry type as well, by calling the parent class' implementation in the relevant Section method. Signed-off-by: Alper Nebi Yasak Reviewed-by: Simon Glass --- tools/binman/etype/section.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tools/binman/etype/section.py b/tools/binman/etype/section.py index 706f6bd3e41..25159074ba6 100644 --- a/tools/binman/etype/section.py +++ b/tools/binman/etype/section.py @@ -840,6 +840,7 @@ class Entry_section(Entry): Args: missing_list: List of Bintool objects to be added to """ + super().check_missing_bintools(missing_list) for entry in self._entries.values(): entry.check_missing_bintools(missing_list) From f3078d4ea707931c2307a623ecf6e4d215b413d5 Mon Sep 17 00:00:00 2001 From: Alper Nebi Yasak Date: Tue, 8 Feb 2022 01:08:07 +0300 Subject: [PATCH 27/27] binman: Convert FIT entry type to a subclass of Section entry type The binman FIT entry type shares some code with the Section entry type. This shared code is bound to grow, since FIT entries are conceptually a variation of Section entries. Make FIT entry type a subclass of Section entry type, simplifying it a bit and providing us the features that Section implements. Also fix the subentry alignment test which now attempts to write symbols to a nonexistent SPL ELF test file by creating it first. Signed-off-by: Alper Nebi Yasak Reviewed-by: Simon Glass Avoid AddMissingProperties() and SetCalculatedProperties() with FIT: Signed-off-by: Simon Glass --- tools/binman/etype/fit.py | 76 ++++++++++++++------------------------- tools/binman/ftest.py | 5 +-- 2 files changed, 30 insertions(+), 51 deletions(-) diff --git a/tools/binman/etype/fit.py b/tools/binman/etype/fit.py index 9445997fdac..a56b0564f9a 100644 --- a/tools/binman/etype/fit.py +++ b/tools/binman/etype/fit.py @@ -9,11 +9,12 @@ from collections import defaultdict, OrderedDict import libfdt from binman.entry import Entry, EntryArg +from binman.etype.section import Entry_section from dtoc import fdt_util from dtoc.fdt import Fdt from patman import tools -class Entry_fit(Entry): +class Entry_fit(Entry_section): """Flat Image Tree (FIT) This calls mkimage to create a FIT (U-Boot Flat Image Tree) based on the @@ -112,15 +113,15 @@ class Entry_fit(Entry): """ Members: _fit: FIT file being built - _fit_sections: dict: + _entries: dict from Entry_section: key: relative path to entry Node (from the base of the FIT) value: Entry_section object comprising the contents of this node """ super().__init__(section, etype, node) self._fit = None - self._fit_sections = {} self._fit_props = {} + for pname, prop in self._node.props.items(): if pname.startswith('fit,'): self._fit_props[pname] = prop @@ -185,7 +186,7 @@ class Entry_fit(Entry): # 'data' property later. entry = Entry.Create(self.section, node, etype='section') entry.ReadNode() - self._fit_sections[rel_path] = entry + self._entries[rel_path] = entry for subnode in node.subnodes: if has_images and not (subnode.name.startswith('hash') or @@ -237,18 +238,19 @@ class Entry_fit(Entry): self._fdt = Fdt.FromData(fdt.as_bytearray()) self._fdt.Scan() - def ExpandEntries(self): - super().ExpandEntries() - for section in self._fit_sections.values(): - section.ExpandEntries() - - def ObtainContents(self): - """Obtain the contents of the FIT + def BuildSectionData(self, required): + """Build FIT entry contents This adds the 'data' properties to the input ITB (Image-tree Binary) then runs mkimage to process it. + + Args: + required: True if the data must be present, False if it is OK to + return None + + Returns: + Contents of the section (bytes) """ - # self._BuildInput() either returns bytes or raises an exception. data = self._BuildInput(self._fdt) uniq = self.GetUniqueName() input_fname = tools.get_output_filename('%s.itb' % uniq) @@ -264,14 +266,12 @@ class Entry_fit(Entry): 'pad': fdt_util.fdt32_to_cpu(ext_offset.value) } if self.mkimage.run(reset_timestamp=True, output_fname=output_fname, - **args) is not None: - self.SetContents(tools.read_file(output_fname)) - else: + **args) is None: # Bintool is missing; just use empty data as the output self.record_missing_bintool(self.mkimage) - self.SetContents(tools.get_bytes(0, 1024)) + return tools.get_bytes(0, 1024) - return True + return tools.read_file(output_fname) def _BuildInput(self, fdt): """Finish the FIT by adding the 'data' properties to it @@ -282,12 +282,8 @@ class Entry_fit(Entry): Returns: New fdt contents (bytes) """ - for path, section in self._fit_sections.items(): + for path, section in self._entries.items(): node = fdt.GetNode(path) - # Entry_section.ObtainContents() either returns True or - # raises an exception. - section.ObtainContents() - section.Pack(0) data = section.GetData() node.AddData('data', data) @@ -295,34 +291,16 @@ class Entry_fit(Entry): data = fdt.GetContents() return data - def CheckMissing(self, missing_list): - """Check if any entries in this FIT have missing external blobs - - If there are missing blobs, the entries are added to the list - - Args: - missing_list: List of Entry objects to be added to - """ - for path, section in self._fit_sections.items(): - section.CheckMissing(missing_list) - - def SetAllowMissing(self, allow_missing): - for section in self._fit_sections.values(): - section.SetAllowMissing(allow_missing) - def AddBintools(self, tools): - for section in self._fit_sections.values(): - section.AddBintools(tools) + super().AddBintools(tools) self.mkimage = self.AddBintool(tools, 'mkimage') - def check_missing_bintools(self, missing_list): - """Check if any entries in this section have missing bintools + def AddMissingProperties(self, have_image_pos): + # We don't want to interfere with any hash properties in the FIT, so + # disable this for now. + pass - If there are missing bintools, these are added to the list - - Args: - missing_list: List of Bintool objects to be added to - """ - super().check_missing_bintools(missing_list) - for entry in self._fit_sections.values(): - entry.check_missing_bintools(missing_list) + def SetCalculatedProperties(self): + # We don't want to interfere with any hash properties in the FIT, so + # disable this for now. + pass diff --git a/tools/binman/ftest.py b/tools/binman/ftest.py index dc14a74904a..59b6d52fbe4 100644 --- a/tools/binman/ftest.py +++ b/tools/binman/ftest.py @@ -3850,6 +3850,7 @@ class TestFunctional(unittest.TestCase): def testFitImageSubentryAlignment(self): """Test relative alignability of FIT image subentries""" + self._SetupSplElf() entry_args = { 'test-id': TEXT_DATA, } @@ -5143,8 +5144,8 @@ fdt fdtmap Extract the devicetree blob from the fdtmap data, _, _, _ = self._DoReadFileDtb('220_fit_subentry_bintool.dts', entry_args=entry_args) - expected = (GBB_DATA + GBB_DATA + tools.GetBytes(0, 8) + - tools.GetBytes(0, 0x2180 - 16)) + expected = (GBB_DATA + GBB_DATA + tools.get_bytes(0, 8) + + tools.get_bytes(0, 0x2180 - 16)) self.assertIn(expected, data) def testFitSubentryMissingBintool(self):