diff options
author | Tom Rini <trini@konsulko.com> | 2021-07-22 11:15:52 -0400 |
---|---|---|
committer | Tom Rini <trini@konsulko.com> | 2021-07-22 11:15:52 -0400 |
commit | a15fa1ba67d7b3c8061b515e7713f733fa328018 (patch) | |
tree | f7746e2e7a3410043e9ea3f3f7c0a97e2c5e6dbb /tools | |
parent | 806734f41b25931798fdf667b5a2ae830229c13f (diff) | |
parent | 1b098b3e655451572054ce933a87231ee16f7133 (diff) |
Merge tag 'dm-pull-21jul21' of https://gitlab.denx.de/u-boot/custodians/u-boot-dm
dtoc improvements to show better warnings
minor test build fixes
sandbox fixes for SDL2 and running TPL
bloblist resize feature
binman multithreading
Diffstat (limited to 'tools')
-rw-r--r-- | tools/binman/binman.rst | 18 | ||||
-rw-r--r-- | tools/binman/cmdline.py | 4 | ||||
-rw-r--r-- | tools/binman/control.py | 7 | ||||
-rw-r--r-- | tools/binman/etype/blob.py | 5 | ||||
-rw-r--r-- | tools/binman/etype/collection.py | 2 | ||||
-rw-r--r-- | tools/binman/etype/files.py | 3 | ||||
-rw-r--r-- | tools/binman/etype/section.py | 40 | ||||
-rw-r--r-- | tools/binman/ftest.py | 41 | ||||
-rw-r--r-- | tools/binman/image.py | 3 | ||||
-rw-r--r-- | tools/binman/state.py | 95 | ||||
-rw-r--r-- | tools/binman/test/202_section_timeout.dts | 21 | ||||
-rwxr-xr-x | tools/dtoc/main.py | 51 | ||||
-rw-r--r-- | tools/dtoc/src_scan.py | 45 | ||||
-rw-r--r-- | tools/dtoc/test_src_scan.py | 133 | ||||
-rw-r--r-- | tools/patman/cros_subprocess.py | 6 | ||||
-rw-r--r-- | tools/patman/tools.py | 9 |
16 files changed, 434 insertions, 49 deletions
diff --git a/tools/binman/binman.rst b/tools/binman/binman.rst index bc635aa00a..09e7b57198 100644 --- a/tools/binman/binman.rst +++ b/tools/binman/binman.rst @@ -1142,6 +1142,22 @@ adds a -v<level> option to the call to binman:: make BINMAN_VERBOSE=5 +Building sections in parallel +----------------------------- + +By default binman uses multiprocessing to speed up compilation of large images. +This works at a section level, with one thread for each entry in the section. +This can speed things up if the entries are large and use compression. + +This feature can be disabled with the '-T' flag, which defaults to a suitable +value for your machine. This depends on the Python version, e.g on v3.8 it uses +12 threads on an 8-core machine. See ConcurrentFutures_ for more details. + +The special value -T0 selects single-threaded mode, useful for debugging during +development, since dealing with exceptions and problems in threads is more +difficult. This avoids any use of ThreadPoolExecutor. + + History / Credits ----------------- @@ -1190,3 +1206,5 @@ Some ideas: -- Simon Glass <sjg@chromium.org> 7/7/2016 + +.. _ConcurrentFutures: https://docs.python.org/3/library/concurrent.futures.html#concurrent.futures.ThreadPoolExecutor diff --git a/tools/binman/cmdline.py b/tools/binman/cmdline.py index 95f9ba27fb..d6156df408 100644 --- a/tools/binman/cmdline.py +++ b/tools/binman/cmdline.py @@ -32,6 +32,10 @@ controlled by a description in the board device tree.''' default=False, help='Display the README file') parser.add_argument('--toolpath', type=str, action='append', help='Add a path to the directories containing tools') + parser.add_argument('-T', '--threads', type=int, + default=None, help='Number of threads to use (0=single-thread)') + parser.add_argument('--test-section-timeout', action='store_true', + help='Use a zero timeout for section multi-threading (for testing)') parser.add_argument('-v', '--verbosity', default=1, type=int, help='Control verbosity: 0=silent, 1=warnings, 2=notices, ' '3=info, 4=detail, 5=debug') diff --git a/tools/binman/control.py b/tools/binman/control.py index f57e34daaa..dcba02ff7f 100644 --- a/tools/binman/control.py +++ b/tools/binman/control.py @@ -628,9 +628,13 @@ def Binman(args): tools.PrepareOutputDir(args.outdir, args.preserve) tools.SetToolPaths(args.toolpath) state.SetEntryArgs(args.entry_arg) + state.SetThreads(args.threads) images = PrepareImagesAndDtbs(dtb_fname, args.image, args.update_fdt, use_expanded) + if args.test_section_timeout: + # Set the first image to timeout, used in testThreadTimeout() + images[list(images.keys())[0]].test_section_timeout = True missing = False for image in images.values(): missing |= ProcessImage(image, args.update_fdt, args.map, @@ -642,6 +646,9 @@ def Binman(args): if missing: tout.Warning("\nSome images are invalid") + + # Use this to debug the time take to pack the image + #state.TimingShow() finally: tools.FinaliseOutputDir() finally: diff --git a/tools/binman/etype/blob.py b/tools/binman/etype/blob.py index 018f8c9a31..fae86ca3ec 100644 --- a/tools/binman/etype/blob.py +++ b/tools/binman/etype/blob.py @@ -6,6 +6,7 @@ # from binman.entry import Entry +from binman import state from dtoc import fdt_util from patman import tools from patman import tout @@ -59,8 +60,12 @@ class Entry_blob(Entry): the data in chunks and avoid reading it all at once. For now this seems like an unnecessary complication. """ + state.TimingStart('read') indata = tools.ReadFile(self._pathname) + state.TimingAccum('read') + state.TimingStart('compress') data = self.CompressData(indata) + state.TimingAccum('compress') self.SetContents(data) return True diff --git a/tools/binman/etype/collection.py b/tools/binman/etype/collection.py index 1625575fe9..442b40b48b 100644 --- a/tools/binman/etype/collection.py +++ b/tools/binman/etype/collection.py @@ -40,7 +40,7 @@ class Entry_collection(Entry): """ # Join up all the data self.Info('Getting contents, required=%s' % required) - data = b'' + data = bytearray() for entry_phandle in self.content: entry_data = self.section.GetContentsByPhandle(entry_phandle, self, required) diff --git a/tools/binman/etype/files.py b/tools/binman/etype/files.py index 5db36abef0..9b04a496a8 100644 --- a/tools/binman/etype/files.py +++ b/tools/binman/etype/files.py @@ -34,6 +34,9 @@ class Entry_files(Entry_section): from binman import state super().__init__(section, etype, node) + + def ReadNode(self): + super().ReadNode() self._pattern = fdt_util.GetString(self._node, 'pattern') if not self._pattern: self.Raise("Missing 'pattern' property") diff --git a/tools/binman/etype/section.py b/tools/binman/etype/section.py index c3bac026c1..e2949fc916 100644 --- a/tools/binman/etype/section.py +++ b/tools/binman/etype/section.py @@ -9,10 +9,12 @@ images to be created. """ from collections import OrderedDict +import concurrent.futures import re import sys from binman.entry import Entry +from binman import state from dtoc import fdt_util from patman import tools from patman import tout @@ -164,7 +166,7 @@ class Entry_section(Entry): pad_byte = (entry._pad_byte if isinstance(entry, Entry_section) else self._pad_byte) - data = b'' + data = bytearray() # Handle padding before the entry if entry.pad_before: data += tools.GetBytes(self._pad_byte, entry.pad_before) @@ -198,7 +200,7 @@ class Entry_section(Entry): Returns: Contents of the section (bytes) """ - section_data = b'' + section_data = bytearray() for entry in self._entries.values(): entry_data = entry.GetData(required) @@ -525,15 +527,43 @@ class Entry_section(Entry): def GetEntryContents(self): """Call ObtainContents() for each entry in the section """ + def _CheckDone(entry): + if not entry.ObtainContents(): + next_todo.append(entry) + return entry + todo = self._entries.values() for passnum in range(3): + threads = state.GetThreads() next_todo = [] - for entry in todo: - if not entry.ObtainContents(): - next_todo.append(entry) + + if threads == 0: + for entry in todo: + _CheckDone(entry) + else: + with concurrent.futures.ThreadPoolExecutor( + max_workers=threads) as executor: + future_to_data = { + entry: executor.submit(_CheckDone, entry) + for entry in todo} + timeout = 60 + if self.GetImage().test_section_timeout: + timeout = 0 + done, not_done = concurrent.futures.wait( + future_to_data.values(), timeout=timeout) + # Make sure we check the result, so any exceptions are + # generated. Check the results in entry order, since tests + # may expect earlier entries to fail first. + for entry in todo: + job = future_to_data[entry] + job.result() + if not_done: + self.Raise('Timed out obtaining contents') + todo = next_todo if not todo: break + if todo: self.Raise('Internal error: Could not complete processing of contents: remaining %s' % todo) diff --git a/tools/binman/ftest.py b/tools/binman/ftest.py index 5383eec489..cea3ebf2b9 100644 --- a/tools/binman/ftest.py +++ b/tools/binman/ftest.py @@ -308,7 +308,8 @@ class TestFunctional(unittest.TestCase): def _DoTestFile(self, fname, debug=False, map=False, update_dtb=False, entry_args=None, images=None, use_real_dtb=False, use_expanded=False, verbosity=None, allow_missing=False, - extra_indirs=None): + extra_indirs=None, threads=None, + test_section_timeout=False): """Run binman with a given test file Args: @@ -331,6 +332,8 @@ class TestFunctional(unittest.TestCase): allow_missing: Set the '--allow-missing' flag so that missing external binaries just produce a warning instead of an error extra_indirs: Extra input directories to add using -I + threads: Number of threads to use (None for default, 0 for + single-threaded) """ args = [] if debug: @@ -342,6 +345,10 @@ class TestFunctional(unittest.TestCase): if self.toolpath: for path in self.toolpath: args += ['--toolpath', path] + if threads is not None: + args.append('-T%d' % threads) + if test_section_timeout: + args.append('--test-section-timeout') args += ['build', '-p', '-I', self._indir, '-d', self.TestFile(fname)] if map: args.append('-m') @@ -412,7 +419,7 @@ class TestFunctional(unittest.TestCase): def _DoReadFileDtb(self, fname, use_real_dtb=False, use_expanded=False, map=False, update_dtb=False, entry_args=None, - reset_dtbs=True, extra_indirs=None): + reset_dtbs=True, extra_indirs=None, threads=None): """Run binman and return the resulting image This runs binman with a given test file and then reads the resulting @@ -439,6 +446,8 @@ class TestFunctional(unittest.TestCase): function. If reset_dtbs is True, then the original test dtb is written back before this function finishes extra_indirs: Extra input directories to add using -I + threads: Number of threads to use (None for default, 0 for + single-threaded) Returns: Tuple: @@ -463,7 +472,8 @@ class TestFunctional(unittest.TestCase): try: retcode = self._DoTestFile(fname, map=map, update_dtb=update_dtb, entry_args=entry_args, use_real_dtb=use_real_dtb, - use_expanded=use_expanded, extra_indirs=extra_indirs) + use_expanded=use_expanded, extra_indirs=extra_indirs, + threads=threads) self.assertEqual(0, retcode) out_dtb_fname = tools.GetOutputFilename('u-boot.dtb.out') @@ -4542,5 +4552,30 @@ class TestFunctional(unittest.TestCase): data = self._DoReadFile('201_opensbi.dts') self.assertEqual(OPENSBI_DATA, data[:len(OPENSBI_DATA)]) + def testSectionsSingleThread(self): + """Test sections without multithreading""" + data = self._DoReadFileDtb('055_sections.dts', threads=0)[0] + expected = (U_BOOT_DATA + tools.GetBytes(ord('!'), 12) + + U_BOOT_DATA + tools.GetBytes(ord('a'), 12) + + U_BOOT_DATA + tools.GetBytes(ord('&'), 4)) + self.assertEqual(expected, data) + + def testThreadTimeout(self): + """Test handling a thread that takes too long""" + with self.assertRaises(ValueError) as e: + self._DoTestFile('202_section_timeout.dts', + test_section_timeout=True) + self.assertIn("Node '/binman/section@0': Timed out obtaining contents", + str(e.exception)) + + def testTiming(self): + """Test output of timing information""" + data = self._DoReadFile('055_sections.dts') + with test_util.capture_sys_output() as (stdout, stderr): + state.TimingShow() + self.assertIn('read:', stdout.getvalue()) + self.assertIn('compress:', stdout.getvalue()) + + if __name__ == "__main__": unittest.main() diff --git a/tools/binman/image.py b/tools/binman/image.py index 10778f47fe..cdc58b39a4 100644 --- a/tools/binman/image.py +++ b/tools/binman/image.py @@ -36,6 +36,8 @@ class Image(section.Entry_section): fdtmap_data: Contents of the fdtmap when loading from a file allow_repack: True to add properties to allow the image to be safely repacked later + test_section_timeout: Use a zero timeout for section multi-threading + (for testing) Args: copy_to_orig: Copy offset/size to orig_offset/orig_size after reading @@ -74,6 +76,7 @@ class Image(section.Entry_section): self.allow_repack = False self._ignore_missing = ignore_missing self.use_expanded = use_expanded + self.test_section_timeout = False if not test: self.ReadNode() diff --git a/tools/binman/state.py b/tools/binman/state.py index dfb1760455..9e5b8a3931 100644 --- a/tools/binman/state.py +++ b/tools/binman/state.py @@ -5,8 +5,11 @@ # Holds and modifies the state information held by binman # +from collections import defaultdict import hashlib import re +import time +import threading from dtoc import fdt import os @@ -55,6 +58,30 @@ allow_entry_expansion = True # to the new ones, the compressed size increases, etc. allow_entry_contraction = False +# Number of threads to use for binman (None means machine-dependent) +num_threads = None + + +class Timing: + """Holds information about an operation that is being timed + + Properties: + name: Operation name (only one of each name is stored) + start: Start time of operation in seconds (None if not start) + accum:: Amount of time spent on this operation so far, in seconds + """ + def __init__(self, name): + self.name = name + self.start = None # cause an error if TimingStart() is not called + self.accum = 0.0 + + +# Holds timing info for each name: +# key: name of Timing info (Timing.name) +# value: Timing object +timing_info = {} + + def GetFdtForEtype(etype): """Get the Fdt object for a particular device-tree entry @@ -420,3 +447,71 @@ def AllowEntryContraction(): raised """ return allow_entry_contraction + +def SetThreads(threads): + """Set the number of threads to use when building sections + + Args: + threads: Number of threads to use (None for default, 0 for + single-threaded) + """ + global num_threads + + num_threads = threads + +def GetThreads(): + """Get the number of threads to use when building sections + + Returns: + Number of threads to use (None for default, 0 for single-threaded) + """ + return num_threads + +def GetTiming(name): + """Get the timing info for a particular operation + + The object is created if it does not already exist. + + Args: + name: Operation name to get + + Returns: + Timing object for the current thread + """ + threaded_name = '%s:%d' % (name, threading.get_ident()) + timing = timing_info.get(threaded_name) + if not timing: + timing = Timing(threaded_name) + timing_info[threaded_name] = timing + return timing + +def TimingStart(name): + """Start the timer for an operation + + Args: + name: Operation name to start + """ + timing = GetTiming(name) + timing.start = time.monotonic() + +def TimingAccum(name): + """Stop and accumlate the time for an operation + + This measures the time since the last TimingStart() and adds that to the + accumulated time. + + Args: + name: Operation name to start + """ + timing = GetTiming(name) + timing.accum += time.monotonic() - timing.start + +def TimingShow(): + """Show all timing information""" + duration = defaultdict(float) + for threaded_name, timing in timing_info.items(): + name = threaded_name.split(':')[0] + duration[name] += timing.accum + + for name, seconds in duration.items(): + print('%10s: %10.1fms' % (name, seconds * 1000)) diff --git a/tools/binman/test/202_section_timeout.dts b/tools/binman/test/202_section_timeout.dts new file mode 100644 index 0000000000..1481450367 --- /dev/null +++ b/tools/binman/test/202_section_timeout.dts @@ -0,0 +1,21 @@ +// SPDX-License-Identifier: GPL-2.0+ + +/dts-v1/; + +/ { + #address-cells = <1>; + #size-cells = <1>; + + binman { + pad-byte = <0x26>; + size = <0x28>; + section@0 { + read-only; + size = <0x10>; + pad-byte = <0x21>; + + u-boot { + }; + }; + }; +}; diff --git a/tools/dtoc/main.py b/tools/dtoc/main.py index 93706de89b..6f9b526bd7 100755 --- a/tools/dtoc/main.py +++ b/tools/dtoc/main.py @@ -21,7 +21,7 @@ options. For more information about the use of this options and tool please see doc/driver-model/of-plat.rst """ -from optparse import OptionParser +from argparse import ArgumentParser import os import sys import unittest @@ -51,7 +51,7 @@ def run_tests(processes, args): result = unittest.TestResult() sys.argv = [sys.argv[0]] - test_name = args and args[0] or None + test_name = args.files and args.files[0] or None test_dtoc.setup() @@ -66,47 +66,50 @@ def RunTestCoverage(): """Run the tests and check that we get 100% coverage""" sys.argv = [sys.argv[0]] test_util.RunTestCoverage('tools/dtoc/dtoc', '/main.py', - ['tools/patman/*.py', '*/fdt*', '*test*'], options.build_dir) + ['tools/patman/*.py', '*/fdt*', '*test*'], args.build_dir) if __name__ != '__main__': sys.exit(1) -parser = OptionParser() -parser.add_option('-B', '--build-dir', type='string', default='b', +epilog = '''Generate C code from devicetree files. See of-plat.rst for details''' + +parser = ArgumentParser(epilog=epilog) +parser.add_argument('-B', '--build-dir', type=str, default='b', help='Directory containing the build output') -parser.add_option('-c', '--c-output-dir', action='store', +parser.add_argument('-c', '--c-output-dir', action='store', help='Select output directory for C files') -parser.add_option('-C', '--h-output-dir', action='store', +parser.add_argument('-C', '--h-output-dir', action='store', help='Select output directory for H files (defaults to --c-output-di)') -parser.add_option('-d', '--dtb-file', action='store', +parser.add_argument('-d', '--dtb-file', action='store', help='Specify the .dtb input file') -parser.add_option('-i', '--instantiate', action='store_true', default=False, +parser.add_argument('-i', '--instantiate', action='store_true', default=False, help='Instantiate devices to avoid needing device_bind()') -parser.add_option('--include-disabled', action='store_true', +parser.add_argument('--include-disabled', action='store_true', help='Include disabled nodes') -parser.add_option('-o', '--output', action='store', +parser.add_argument('-o', '--output', action='store', help='Select output filename') -parser.add_option('-p', '--phase', type=str, +parser.add_argument('-p', '--phase', type=str, help='set phase of U-Boot this invocation is for (spl/tpl)') -parser.add_option('-P', '--processes', type=int, +parser.add_argument('-P', '--processes', type=int, help='set number of processes to use for running tests') -parser.add_option('-t', '--test', action='store_true', dest='test', +parser.add_argument('-t', '--test', action='store_true', dest='test', default=False, help='run tests') -parser.add_option('-T', '--test-coverage', action='store_true', - default=False, help='run tests and check for 100% coverage') -(options, args) = parser.parse_args() +parser.add_argument('-T', '--test-coverage', action='store_true', + default=False, help='run tests and check for 100%% coverage') +parser.add_argument('files', nargs='*') +args = parser.parse_args() # Run our meagre tests -if options.test: - ret_code = run_tests(options.processes, args) +if args.test: + ret_code = run_tests(args.processes, args) sys.exit(ret_code) -elif options.test_coverage: +elif args.test_coverage: RunTestCoverage() else: - dtb_platdata.run_steps(args, options.dtb_file, options.include_disabled, - options.output, - [options.c_output_dir, options.h_output_dir], - options.phase, instantiate=options.instantiate) + dtb_platdata.run_steps(args.files, args.dtb_file, args.include_disabled, + args.output, + [args.c_output_dir, args.h_output_dir], + args.phase, instantiate=args.instantiate) diff --git a/tools/dtoc/src_scan.py b/tools/dtoc/src_scan.py index 2db96884c8..3bef59d616 100644 --- a/tools/dtoc/src_scan.py +++ b/tools/dtoc/src_scan.py @@ -13,6 +13,7 @@ U_BOOT_DRIVER(), UCLASS_DRIVER and all struct declarations in header files. See doc/driver-model/of-plat.rst for more informaiton """ +import collections import os import re import sys @@ -190,6 +191,9 @@ class Scanner: value: Driver name declared with U_BOOT_DRIVER(driver_name) _drivers_additional (list or str): List of additional drivers to use during scanning + _warnings: Dict of warnings found: + key: Driver name + value: Set of warnings _of_match: Dict holding information about compatible strings key: Name of struct udevice_id variable value: Dict of compatible info in that variable: @@ -217,6 +221,7 @@ class Scanner: self._driver_aliases = {} self._drivers_additional = drivers_additional or [] self._missing_drivers = set() + self._warnings = collections.defaultdict(set) self._of_match = {} self._compat_to_driver = {} self._uclass = {} @@ -267,7 +272,10 @@ class Scanner: aliases_c.remove(compat_c) return compat_c, aliases_c - self._missing_drivers.add(compat_list_c[0]) + name = compat_list_c[0] + self._missing_drivers.add(name) + self._warnings[name].add( + 'WARNING: the driver %s was not found in the driver list' % name) return compat_list_c[0], compat_list_c[1:] @@ -444,8 +452,8 @@ class Scanner: # Collect the compatible string, e.g. 'rockchip,rk3288-grf' compat = None - re_compat = re.compile(r'{\s*.compatible\s*=\s*"(.*)"\s*' - r'(,\s*.data\s*=\s*(\S*))?\s*},') + re_compat = re.compile(r'{\s*\.compatible\s*=\s*"(.*)"\s*' + r'(,\s*\.data\s*=\s*(\S*))?\s*},') # This is a dict of compatible strings that were found: # key: Compatible string, e.g. 'rockchip,rk3288-grf' @@ -460,7 +468,7 @@ class Scanner: # Matches the references to the udevice_id list re_of_match = re.compile( - r'\.of_match\s*=\s*(of_match_ptr\()?([a-z0-9_]+)(\))?,') + r'\.of_match\s*=\s*(of_match_ptr\()?([a-z0-9_]+)([^,]*),') re_phase = re.compile('^\s*DM_PHASE\((.*)\).*$') re_hdr = re.compile('^\s*DM_HEADER\((.*)\).*$') @@ -506,6 +514,11 @@ class Scanner: driver.uclass_id = m_id.group(1) elif m_of_match: compat = m_of_match.group(2) + suffix = m_of_match.group(3) + if suffix and suffix != ')': + self._warnings[driver.name].add( + "%s: Warning: unexpected suffix '%s' on .of_match line for compat '%s'" % + (fname, suffix, compat)) elif m_phase: driver.phase = m_phase.group(1) elif m_hdr: @@ -533,7 +546,12 @@ class Scanner: # The driver does not have a uclass or compat string. # The first is required but the second is not, so just # ignore this. - pass + if not driver.uclass_id: + warn = 'Missing .uclass' + else: + warn = 'Missing .compatible' + self._warnings[driver.name].add('%s in %s' % + (warn, fname)) driver = None ids_name = None compat = None @@ -555,7 +573,7 @@ class Scanner: if ids_m: ids_name = ids_m.group(1) elif m_alias: - self._driver_aliases[m_alias[2]] = m_alias[1] + self._driver_aliases[m_alias.group(2)] = m_alias.group(1) # Make the updates based on what we found for driver in drivers.values(): @@ -577,9 +595,18 @@ class Scanner: def show_warnings(self): """Show any warnings that have been collected""" - for name in sorted(list(self._missing_drivers)): - print('WARNING: the driver %s was not found in the driver list' - % name) + used_drivers = [drv.name for drv in self._drivers.values() if drv.used] + missing = self._missing_drivers.copy() + for name in sorted(self._warnings.keys()): + if name in missing or name in used_drivers: + warns = sorted(list(self._warnings[name])) + print('%s: %s' % (name, warns[0])) + indent = ' ' * len(name) + for warn in warns[1:]: + print('%-s: %s' % (indent, warn)) + if name in missing: + missing.remove(name) + print() def scan_driver(self, fname): """Scan a driver file to build a list of driver names and aliases diff --git a/tools/dtoc/test_src_scan.py b/tools/dtoc/test_src_scan.py index d6da03849f..f03cf8ed7c 100644 --- a/tools/dtoc/test_src_scan.py +++ b/tools/dtoc/test_src_scan.py @@ -20,6 +20,9 @@ from patman import tools OUR_PATH = os.path.dirname(os.path.realpath(__file__)) +EXPECT_WARN = {'rockchip_rk3288_grf': + {'WARNING: the driver rockchip_rk3288_grf was not found in the driver list'}} + class FakeNode: """Fake Node object for testing""" def __init__(self): @@ -152,6 +155,7 @@ class TestSrcScan(unittest.TestCase): 'nvidia,tegra20-i2c-dvc': 'TYPE_DVC'}, drv.compat) self.assertEqual('i2c_bus', drv.priv) self.assertEqual(1, len(scan._drivers)) + self.assertEqual({}, scan._warnings) def test_normalized_name(self): """Test operation of get_normalized_compat_name()""" @@ -171,8 +175,8 @@ class TestSrcScan(unittest.TestCase): self.assertEqual([], aliases) self.assertEqual(1, len(scan._missing_drivers)) self.assertEqual({'rockchip_rk3288_grf'}, scan._missing_drivers) - #'WARNING: the driver rockchip_rk3288_grf was not found in the driver list', - #stdout.getvalue().strip()) + self.assertEqual('', stdout.getvalue().strip()) + self.assertEqual(EXPECT_WARN, scan._warnings) i2c = 'I2C_UCLASS' compat = {'rockchip,rk3288-grf': 'ROCKCHIP_SYSCON_GRF', @@ -189,6 +193,7 @@ class TestSrcScan(unittest.TestCase): self.assertEqual('', stdout.getvalue().strip()) self.assertEqual('rockchip_rk3288_grf', name) self.assertEqual([], aliases) + self.assertEqual(EXPECT_WARN, scan._warnings) prop.value = 'rockchip,rk3288-srf' with test_util.capture_sys_output() as (stdout, _): @@ -196,6 +201,7 @@ class TestSrcScan(unittest.TestCase): self.assertEqual('', stdout.getvalue().strip()) self.assertEqual('rockchip_rk3288_grf', name) self.assertEqual(['rockchip_rk3288_srf'], aliases) + self.assertEqual(EXPECT_WARN, scan._warnings) def test_scan_errors(self): """Test detection of scanning errors""" @@ -490,3 +496,126 @@ U_BOOT_DRIVER(%s) = { self.assertEqual(3, seq) self.assertEqual({'mypath': 3}, uc.alias_path_to_num) self.assertEqual({2: node, 3: node}, uc.alias_num_to_node) + + def test_scan_warnings(self): + """Test detection of scanning warnings""" + buff = ''' +static const struct udevice_id tegra_i2c_ids[] = { + { .compatible = "nvidia,tegra114-i2c", .data = TYPE_114 }, + { } +}; + +U_BOOT_DRIVER(i2c_tegra) = { + .name = "i2c_tegra", + .id = UCLASS_I2C, + .of_match = tegra_i2c_ids + 1, +}; +''' + # The '+ 1' above should generate a warning + + prop = FakeProp() + prop.name = 'compatible' + prop.value = 'rockchip,rk3288-grf' + node = FakeNode() + node.props = {'compatible': prop} + + # get_normalized_compat_name() uses this to check for root node + node.parent = FakeNode() + + scan = src_scan.Scanner(None, None) + scan._parse_driver('file.c', buff) + self.assertEqual( + {'i2c_tegra': + {"file.c: Warning: unexpected suffix ' + 1' on .of_match line for compat 'tegra_i2c_ids'"}}, + scan._warnings) + + tprop = FakeProp() + tprop.name = 'compatible' + tprop.value = 'nvidia,tegra114-i2c' + tnode = FakeNode() + tnode.props = {'compatible': tprop} + + # get_normalized_compat_name() uses this to check for root node + tnode.parent = FakeNode() + + with test_util.capture_sys_output() as (stdout, _): + scan.get_normalized_compat_name(node) + scan.get_normalized_compat_name(tnode) + self.assertEqual('', stdout.getvalue().strip()) + + self.assertEqual(2, len(scan._missing_drivers)) + self.assertEqual({'rockchip_rk3288_grf', 'nvidia_tegra114_i2c'}, + scan._missing_drivers) + with test_util.capture_sys_output() as (stdout, _): + scan.show_warnings() + self.assertIn('rockchip_rk3288_grf', stdout.getvalue()) + + # This should show just the rockchip warning, since the tegra driver + # is not in self._missing_drivers + scan._missing_drivers.remove('nvidia_tegra114_i2c') + with test_util.capture_sys_output() as (stdout, _): + scan.show_warnings() + self.assertIn('rockchip_rk3288_grf', stdout.getvalue()) + self.assertNotIn('tegra_i2c_ids', stdout.getvalue()) + + # Do a similar thing with used drivers. By marking the tegra driver as + # used, the warning related to that driver will be shown + drv = scan._drivers['i2c_tegra'] + drv.used = True + with test_util.capture_sys_output() as (stdout, _): + scan.show_warnings() + self.assertIn('rockchip_rk3288_grf', stdout.getvalue()) + self.assertIn('tegra_i2c_ids', stdout.getvalue()) + + # Add a warning to make sure multiple warnings are shown + scan._warnings['i2c_tegra'].update( + scan._warnings['nvidia_tegra114_i2c']) + del scan._warnings['nvidia_tegra114_i2c'] + with test_util.capture_sys_output() as (stdout, _): + scan.show_warnings() + self.assertEqual('''i2c_tegra: WARNING: the driver nvidia_tegra114_i2c was not found in the driver list + : file.c: Warning: unexpected suffix ' + 1' on .of_match line for compat 'tegra_i2c_ids' + +rockchip_rk3288_grf: WARNING: the driver rockchip_rk3288_grf was not found in the driver list + +''', + stdout.getvalue()) + self.assertIn('tegra_i2c_ids', stdout.getvalue()) + + def scan_uclass_warning(self): + """Test a missing .uclass in the driver""" + buff = ''' +static const struct udevice_id tegra_i2c_ids[] = { + { .compatible = "nvidia,tegra114-i2c", .data = TYPE_114 }, + { } +}; + +U_BOOT_DRIVER(i2c_tegra) = { + .name = "i2c_tegra", + .of_match = tegra_i2c_ids, +}; +''' + scan = src_scan.Scanner(None, None) + scan._parse_driver('file.c', buff) + self.assertEqual( + {'i2c_tegra': {'Missing .uclass in file.c'}}, + scan._warnings) + + def scan_compat_warning(self): + """Test a missing .compatible in the driver""" + buff = ''' +static const struct udevice_id tegra_i2c_ids[] = { + { .compatible = "nvidia,tegra114-i2c", .data = TYPE_114 }, + { } +}; + +U_BOOT_DRIVER(i2c_tegra) = { + .name = "i2c_tegra", + .id = UCLASS_I2C, +}; +''' + scan = src_scan.Scanner(None, None) + scan._parse_driver('file.c', buff) + self.assertEqual( + {'i2c_tegra': {'Missing .compatible in file.c'}}, + scan._warnings) diff --git a/tools/patman/cros_subprocess.py b/tools/patman/cros_subprocess.py index efd0a5aaf7..fdd5138685 100644 --- a/tools/patman/cros_subprocess.py +++ b/tools/patman/cros_subprocess.py @@ -169,11 +169,11 @@ class Popen(subprocess.Popen): self.stdin.close() if self.stdout: read_set.append(self.stdout) - stdout = b'' + stdout = bytearray() if self.stderr and self.stderr != self.stdout: read_set.append(self.stderr) - stderr = b'' - combined = b'' + stderr = bytearray() + combined = bytearray() input_offset = 0 while read_set or write_set: diff --git a/tools/patman/tools.py b/tools/patman/tools.py index ec95a543bd..877e37cd8d 100644 --- a/tools/patman/tools.py +++ b/tools/patman/tools.py @@ -466,6 +466,9 @@ def Compress(indata, algo, with_header=True): This requires 'lz4' and 'lzma_alone' tools. It also requires an output directory to be previously set up, by calling PrepareOutputDir(). + Care is taken to use unique temporary files so that this function can be + called from multiple threads. + Args: indata: Input data to compress algo: Algorithm to use ('none', 'gzip', 'lz4' or 'lzma') @@ -475,14 +478,16 @@ def Compress(indata, algo, with_header=True): """ if algo == 'none': return indata - fname = GetOutputFilename('%s.comp.tmp' % algo) + fname = tempfile.NamedTemporaryFile(prefix='%s.comp.tmp' % algo, + dir=outdir).name WriteFile(fname, indata) if algo == 'lz4': data = Run('lz4', '--no-frame-crc', '-B4', '-5', '-c', fname, binary=True) # cbfstool uses a very old version of lzma elif algo == 'lzma': - outfname = GetOutputFilename('%s.comp.otmp' % algo) + outfname = tempfile.NamedTemporaryFile(prefix='%s.comp.otmp' % algo, + dir=outdir).name Run('lzma_alone', 'e', fname, outfname, '-lc1', '-lp0', '-pb0', '-d8') data = ReadFile(outfname) elif algo == 'gzip': |