binman: Tidy up comments and pylint warnings in fit

Update this entry type to resolve some pylint warnings and make sure
that functions and members are fully commented.

Signed-off-by: Simon Glass <sjg@chromium.org>
This commit is contained in:
Simon Glass
2024-08-26 13:11:32 -06:00
parent be45bb941a
commit fb428a63c1

View File

@@ -6,9 +6,10 @@
"""Entry-type module for producing a FIT""" """Entry-type module for producing a FIT"""
import glob import glob
import libfdt
import os import os
import libfdt
from binman.entry import Entry, EntryArg from binman.entry import Entry, EntryArg
from binman.etype.section import Entry_section from binman.etype.section import Entry_section
from binman import elf from binman import elf
@@ -23,6 +24,7 @@ OPERATIONS = {
'split-elf': OP_SPLIT_ELF, 'split-elf': OP_SPLIT_ELF,
} }
# pylint: disable=invalid-name
class Entry_fit(Entry_section): class Entry_fit(Entry_section):
"""Flat Image Tree (FIT) """Flat Image Tree (FIT)
@@ -381,31 +383,46 @@ class Entry_fit(Entry_section):
def __init__(self, section, etype, node): def __init__(self, section, etype, node):
""" """
Members: Members:
_fit: FIT file being built _fit (str): FIT file being built
_entries: dict from Entry_section: _fit_props (list of str): 'fit,...' properties found in the
top-level node
_fdts (list of str): Filenames of .dtb files to process
_fdt_dir (str): Directory to scan to find .dtb files, or None
_fit_list_prop (str): Name of the EntryArg containing a list of .dtb
files
_fit_default_dt (str): Name of the EntryArg containing the default
.dtb file
_entries (dict of entries): from Entry_section:
key: relative path to entry Node (from the base of the FIT) key: relative path to entry Node (from the base of the FIT)
value: Entry_section object comprising the contents of this value: Entry_section object comprising the contents of this
node node
_priv_entries: Internal copy of _entries which includes 'generator' _priv_entries (dict of entries): Internal copy of _entries which
entries which are used to create the FIT, but should not be includes 'generator' entries which are used to create the FIT,
processed as real entries. This is set up once we have the but should not be processed as real entries. This is set up once
entries we have the entries
_loadables: List of generated split-elf nodes, each a node name _loadables (list of str): List of generated split-elf nodes, each
a node name
_remove_props (list of str): Value of of-spl-remove-props EntryArg,
the list of properties to remove with fdtgrep
mkimage (Bintool): mkimage tool
fdtgrep (Bintool): fdtgrep tool
""" """
super().__init__(section, etype, node) super().__init__(section, etype, node)
self._fit = None self._fit = None
self._fit_props = {} self._fit_props = {}
self._fdts = None self._fdts = None
self._fdt_dir = None self._fdt_dir = None
self.mkimage = None self._fit_list_prop = None
self.fdtgrep = None self._fit_default_dt = None
self._priv_entries = {} self._priv_entries = {}
self._loadables = [] self._loadables = []
self._remove_props = [] self._remove_props = []
props, = self.GetEntryArgsOrProps( props = self.GetEntryArgsOrProps(
[EntryArg('of-spl-remove-props', str)], required=False) [EntryArg('of-spl-remove-props', str)], required=False)[0]
if props: if props:
self._remove_props = props.split() self._remove_props = props.split()
self.mkimage = None
self.fdtgrep = None
def ReadNode(self): def ReadNode(self):
super().ReadNode() super().ReadNode()
@@ -414,8 +431,8 @@ class Entry_fit(Entry_section):
self._fit_props[pname] = prop self._fit_props[pname] = prop
self._fit_list_prop = self._fit_props.get('fit,fdt-list') self._fit_list_prop = self._fit_props.get('fit,fdt-list')
if self._fit_list_prop: if self._fit_list_prop:
fdts, = self.GetEntryArgsOrProps( fdts = self.GetEntryArgsOrProps(
[EntryArg(self._fit_list_prop.value, str)]) [EntryArg(self._fit_list_prop.value, str)])[0]
if fdts is not None: if fdts is not None:
self._fdts = fdts.split() self._fdts = fdts.split()
else: else:
@@ -431,7 +448,7 @@ class Entry_fit(Entry_section):
self._fit_default_dt = self.GetEntryArgsOrProps([EntryArg('default-dt', self._fit_default_dt = self.GetEntryArgsOrProps([EntryArg('default-dt',
str)])[0] str)])[0]
def _get_operation(self, base_node, node): def _get_operation(self, node):
"""Get the operation referenced by a subnode """Get the operation referenced by a subnode
Args: Args:
@@ -560,9 +577,6 @@ class Entry_fit(Entry_section):
def _build_input(self): def _build_input(self):
"""Finish the FIT by adding the 'data' properties to it """Finish the FIT by adding the 'data' properties to it
Arguments:
fdt: FIT to update
Returns: Returns:
bytes: New fdt contents bytes: New fdt contents
""" """
@@ -637,7 +651,7 @@ class Entry_fit(Entry_section):
result.append(name) result.append(name)
return firmware, result return firmware, result
def _gen_fdt_nodes(base_node, node, depth, in_images): def _gen_fdt_nodes(node, depth, in_images):
"""Generate FDT nodes """Generate FDT nodes
This creates one node for each member of self._fdts using the This creates one node for each member of self._fdts using the
@@ -710,11 +724,10 @@ class Entry_fit(Entry_section):
else: else:
self.Raise("Generator node requires 'fit,fdt-list' property") self.Raise("Generator node requires 'fit,fdt-list' property")
def _gen_split_elf(base_node, node, depth, segments, entry_addr): def _gen_split_elf(node, depth, segments, entry_addr):
"""Add nodes for the ELF file, one per group of contiguous segments """Add nodes for the ELF file, one per group of contiguous segments
Args: Args:
base_node (Node): Template node from the binman definition
node (Node): Node to replace (in the FIT being built) node (Node): Node to replace (in the FIT being built)
depth: Current node depth (0 is the base 'fit' node) depth: Current node depth (0 is the base 'fit' node)
segments (list): list of segments, each: segments (list): list of segments, each:
@@ -745,7 +758,7 @@ class Entry_fit(Entry_section):
with fsw.add_node(subnode.name): with fsw.add_node(subnode.name):
_add_node(node, depth + 1, subnode) _add_node(node, depth + 1, subnode)
def _gen_node(base_node, node, depth, in_images, entry): def _gen_node(node, depth, in_images, entry):
"""Generate nodes from a template """Generate nodes from a template
This creates one or more nodes depending on the fit,operation being This creates one or more nodes depending on the fit,operation being
@@ -761,8 +774,6 @@ class Entry_fit(Entry_section):
If the file is missing, nothing is generated. If the file is missing, nothing is generated.
Args: Args:
base_node (Node): Base Node of the FIT (with 'description'
property)
node (Node): Generator node to process node (Node): Generator node to process
depth (int): Current node depth (0 is the base 'fit' node) depth (int): Current node depth (0 is the base 'fit' node)
in_images (bool): True if this is inside the 'images' node, so in_images (bool): True if this is inside the 'images' node, so
@@ -770,13 +781,12 @@ class Entry_fit(Entry_section):
entry (entry_Section): Entry for the section containing the entry (entry_Section): Entry for the section containing the
contents of this node contents of this node
""" """
oper = self._get_operation(base_node, node) oper = self._get_operation(node)
if oper == OP_GEN_FDT_NODES: if oper == OP_GEN_FDT_NODES:
_gen_fdt_nodes(base_node, node, depth, in_images) _gen_fdt_nodes(node, depth, in_images)
elif oper == OP_SPLIT_ELF: elif oper == OP_SPLIT_ELF:
# Entry_section.ObtainContents() either returns True or # Entry_section.ObtainContents() either returns True or
# raises an exception. # raises an exception.
data = None
missing_opt_list = [] missing_opt_list = []
entry.ObtainContents() entry.ObtainContents()
entry.Pack(0) entry.Pack(0)
@@ -798,7 +808,7 @@ class Entry_fit(Entry_section):
self._raise_subnode( self._raise_subnode(
node, f'Failed to read ELF file: {str(exc)}') node, f'Failed to read ELF file: {str(exc)}')
_gen_split_elf(base_node, node, depth, segments, entry_addr) _gen_split_elf(node, depth, segments, entry_addr)
def _add_node(base_node, depth, node): def _add_node(base_node, depth, node):
"""Add nodes to the output FIT """Add nodes to the output FIT
@@ -829,7 +839,6 @@ class Entry_fit(Entry_section):
fsw.property('data', bytes(data)) fsw.property('data', bytes(data))
for subnode in node.subnodes: for subnode in node.subnodes:
subnode_path = f'{rel_path}/{subnode.name}'
if has_images and not self.IsSpecialSubnode(subnode): if has_images and not self.IsSpecialSubnode(subnode):
# This subnode is a content node not meant to appear in # This subnode is a content node not meant to appear in
# the FIT (e.g. "/images/kernel/u-boot"), so don't call # the FIT (e.g. "/images/kernel/u-boot"), so don't call
@@ -837,7 +846,7 @@ class Entry_fit(Entry_section):
pass pass
elif self.GetImage().generate and subnode.name.startswith('@'): elif self.GetImage().generate and subnode.name.startswith('@'):
entry = self._priv_entries.get(subnode.name) entry = self._priv_entries.get(subnode.name)
_gen_node(base_node, subnode, depth, in_images, entry) _gen_node(subnode, depth, in_images, entry)
# This is a generator (template) entry, so remove it from # This is a generator (template) entry, so remove it from
# the list of entries used by PackEntries(), etc. Otherwise # the list of entries used by PackEntries(), etc. Otherwise
# it will appear in the binman output # it will appear in the binman output
@@ -917,6 +926,8 @@ class Entry_fit(Entry_section):
# This should never happen # This should never happen
else: # pragma: no cover else: # pragma: no cover
offset = None
size = None
self.Raise(f'{path}: missing data properties') self.Raise(f'{path}: missing data properties')
section.SetOffsetSize(offset, size) section.SetOffsetSize(offset, size)
@@ -950,7 +961,7 @@ class Entry_fit(Entry_section):
if input_fname: if input_fname:
fname = input_fname fname = input_fname
else: else:
fname = tools.get_output_filename('%s.fit' % uniq) fname = tools.get_output_filename(f'{uniq}.fit')
tools.write_file(fname, self.GetData()) tools.write_file(fname, self.GetData())
args.append(fname) args.append(fname)