mirror of
				https://github.com/eledio-devices/thirdparty-littlefs.git
				synced 2025-10-31 00:32:38 +01:00 
			
		
		
		
	Merge pull request #658 from littlefs-project/no-recursion
Restructure littlefs to not use recursion, measure stack usage
This commit is contained in:
		
							
								
								
									
										158
									
								
								scripts/code.py
									
									
									
									
									
								
							
							
						
						
									
										158
									
								
								scripts/code.py
									
									
									
									
									
								
							| @@ -15,7 +15,7 @@ import csv | ||||
| import collections as co | ||||
|  | ||||
|  | ||||
| OBJ_PATHS = ['*.o', 'bd/*.o'] | ||||
| OBJ_PATHS = ['*.o'] | ||||
|  | ||||
| def collect(paths, **args): | ||||
|     results = co.defaultdict(lambda: 0) | ||||
| @@ -31,7 +31,8 @@ def collect(paths, **args): | ||||
|         proc = sp.Popen(cmd, | ||||
|             stdout=sp.PIPE, | ||||
|             stderr=sp.PIPE if not args.get('verbose') else None, | ||||
|             universal_newlines=True) | ||||
|             universal_newlines=True, | ||||
|             errors='replace') | ||||
|         for line in proc.stdout: | ||||
|             m = pattern.match(line) | ||||
|             if m: | ||||
| @@ -48,16 +49,30 @@ def collect(paths, **args): | ||||
|         # map to source files | ||||
|         if args.get('build_dir'): | ||||
|             file = re.sub('%s/*' % re.escape(args['build_dir']), '', file) | ||||
|         # replace .o with .c, different scripts report .o/.c, we need to | ||||
|         # choose one if we want to deduplicate csv files | ||||
|         file = re.sub('\.o$', '.c', file) | ||||
|         # discard internal functions | ||||
|         if func.startswith('__'): | ||||
|             continue | ||||
|         if not args.get('everything'): | ||||
|             if func.startswith('__'): | ||||
|                 continue | ||||
|         # discard .8449 suffixes created by optimizer | ||||
|         func = re.sub('\.[0-9]+', '', func) | ||||
|  | ||||
|         flat_results.append((file, func, size)) | ||||
|  | ||||
|     return flat_results | ||||
|  | ||||
| def main(**args): | ||||
|     def openio(path, mode='r'): | ||||
|         if path == '-': | ||||
|             if 'r' in mode: | ||||
|                 return os.fdopen(os.dup(sys.stdin.fileno()), 'r') | ||||
|             else: | ||||
|                 return os.fdopen(os.dup(sys.stdout.fileno()), 'w') | ||||
|         else: | ||||
|             return open(path, mode) | ||||
|  | ||||
|     # find sizes | ||||
|     if not args.get('use', None): | ||||
|         # find .o files | ||||
| @@ -75,13 +90,14 @@ def main(**args): | ||||
|  | ||||
|         results = collect(paths, **args) | ||||
|     else: | ||||
|         with open(args['use']) as f: | ||||
|         with openio(args['use']) as f: | ||||
|             r = csv.DictReader(f) | ||||
|             results = [ | ||||
|                 (   result['file'], | ||||
|                     result['function'], | ||||
|                     int(result['size'])) | ||||
|                 for result in r] | ||||
|                     result['name'], | ||||
|                     int(result['code_size'])) | ||||
|                 for result in r | ||||
|                 if result.get('code_size') not in {None, ''}] | ||||
|  | ||||
|     total = 0 | ||||
|     for _, _, size in results: | ||||
| @@ -89,13 +105,17 @@ def main(**args): | ||||
|  | ||||
|     # find previous results? | ||||
|     if args.get('diff'): | ||||
|         with open(args['diff']) as f: | ||||
|             r = csv.DictReader(f) | ||||
|             prev_results = [ | ||||
|                 (   result['file'], | ||||
|                     result['function'], | ||||
|                     int(result['size'])) | ||||
|                 for result in r] | ||||
|         try: | ||||
|             with openio(args['diff']) as f: | ||||
|                 r = csv.DictReader(f) | ||||
|                 prev_results = [ | ||||
|                     (   result['file'], | ||||
|                         result['name'], | ||||
|                         int(result['code_size'])) | ||||
|                     for result in r | ||||
|                     if result.get('code_size') not in {None, ''}] | ||||
|         except FileNotFoundError: | ||||
|             prev_results = [] | ||||
|  | ||||
|         prev_total = 0 | ||||
|         for _, _, size in prev_results: | ||||
| @@ -103,14 +123,34 @@ def main(**args): | ||||
|  | ||||
|     # write results to CSV | ||||
|     if args.get('output'): | ||||
|         with open(args['output'], 'w') as f: | ||||
|             w = csv.writer(f) | ||||
|             w.writerow(['file', 'function', 'size']) | ||||
|             for file, func, size in sorted(results): | ||||
|                 w.writerow((file, func, size)) | ||||
|         merged_results = co.defaultdict(lambda: {}) | ||||
|         other_fields = [] | ||||
|  | ||||
|         # merge? | ||||
|         if args.get('merge'): | ||||
|             try: | ||||
|                 with openio(args['merge']) as f: | ||||
|                     r = csv.DictReader(f) | ||||
|                     for result in r: | ||||
|                         file = result.pop('file', '') | ||||
|                         func = result.pop('name', '') | ||||
|                         result.pop('code_size', None) | ||||
|                         merged_results[(file, func)] = result | ||||
|                         other_fields = result.keys() | ||||
|             except FileNotFoundError: | ||||
|                 pass | ||||
|  | ||||
|         for file, func, size in results: | ||||
|             merged_results[(file, func)]['code_size'] = size | ||||
|  | ||||
|         with openio(args['output'], 'w') as f: | ||||
|             w = csv.DictWriter(f, ['file', 'name', *other_fields, 'code_size']) | ||||
|             w.writeheader() | ||||
|             for (file, func), result in sorted(merged_results.items()): | ||||
|                 w.writerow({'file': file, 'name': func, **result}) | ||||
|  | ||||
|     # print results | ||||
|     def dedup_entries(results, by='function'): | ||||
|     def dedup_entries(results, by='name'): | ||||
|         entries = co.defaultdict(lambda: 0) | ||||
|         for file, func, size in results: | ||||
|             entry = (file if by == 'file' else func) | ||||
| @@ -126,45 +166,67 @@ def main(**args): | ||||
|             diff[name] = (old, new, new-old, (new-old)/old if old else 1.0) | ||||
|         return diff | ||||
|  | ||||
|     def sorted_entries(entries): | ||||
|         if args.get('size_sort'): | ||||
|             return sorted(entries, key=lambda x: (-x[1], x)) | ||||
|         elif args.get('reverse_size_sort'): | ||||
|             return sorted(entries, key=lambda x: (+x[1], x)) | ||||
|         else: | ||||
|             return sorted(entries) | ||||
|  | ||||
|     def sorted_diff_entries(entries): | ||||
|         if args.get('size_sort'): | ||||
|             return sorted(entries, key=lambda x: (-x[1][1], x)) | ||||
|         elif args.get('reverse_size_sort'): | ||||
|             return sorted(entries, key=lambda x: (+x[1][1], x)) | ||||
|         else: | ||||
|             return sorted(entries, key=lambda x: (-x[1][3], x)) | ||||
|  | ||||
|     def print_header(by=''): | ||||
|         if not args.get('diff'): | ||||
|             print('%-36s %7s' % (by, 'size')) | ||||
|         else: | ||||
|             print('%-36s %7s %7s %7s' % (by, 'old', 'new', 'diff')) | ||||
|  | ||||
|     def print_entries(by='function'): | ||||
|     def print_entry(name, size): | ||||
|         print("%-36s %7d" % (name, size)) | ||||
|  | ||||
|     def print_diff_entry(name, old, new, diff, ratio): | ||||
|         print("%-36s %7s %7s %+7d%s" % (name, | ||||
|             old or "-", | ||||
|             new or "-", | ||||
|             diff, | ||||
|             ' (%+.1f%%)' % (100*ratio) if ratio else '')) | ||||
|  | ||||
|     def print_entries(by='name'): | ||||
|         entries = dedup_entries(results, by=by) | ||||
|  | ||||
|         if not args.get('diff'): | ||||
|             print_header(by=by) | ||||
|             for name, size in sorted(entries.items()): | ||||
|                 print("%-36s %7d" % (name, size)) | ||||
|             for name, size in sorted_entries(entries.items()): | ||||
|                 print_entry(name, size) | ||||
|         else: | ||||
|             prev_entries = dedup_entries(prev_results, by=by) | ||||
|             diff = diff_entries(prev_entries, entries) | ||||
|             print_header(by='%s (%d added, %d removed)' % (by, | ||||
|                 sum(1 for old, _, _, _ in diff.values() if not old), | ||||
|                 sum(1 for _, new, _, _ in diff.values() if not new))) | ||||
|             for name, (old, new, diff, ratio) in sorted(diff.items(), | ||||
|                     key=lambda x: (-x[1][3], x)): | ||||
|             for name, (old, new, diff, ratio) in sorted_diff_entries( | ||||
|                     diff.items()): | ||||
|                 if ratio or args.get('all'): | ||||
|                     print("%-36s %7s %7s %+7d%s" % (name, | ||||
|                         old or "-", | ||||
|                         new or "-", | ||||
|                         diff, | ||||
|                         ' (%+.1f%%)' % (100*ratio) if ratio else '')) | ||||
|                     print_diff_entry(name, old, new, diff, ratio) | ||||
|  | ||||
|     def print_totals(): | ||||
|         if not args.get('diff'): | ||||
|             print("%-36s %7d" % ('TOTAL', total)) | ||||
|             print_entry('TOTAL', total) | ||||
|         else: | ||||
|             ratio = (total-prev_total)/prev_total if prev_total else 1.0 | ||||
|             print("%-36s %7s %7s %+7d%s" % ( | ||||
|                 'TOTAL', | ||||
|                 prev_total if prev_total else '-', | ||||
|                 total if total else '-', | ||||
|             ratio = (0.0 if not prev_total and not total | ||||
|                 else 1.0 if not prev_total | ||||
|                 else (total-prev_total)/prev_total) | ||||
|             print_diff_entry('TOTAL', | ||||
|                 prev_total, total, | ||||
|                 total-prev_total, | ||||
|                 ' (%+.1f%%)' % (100*ratio) if ratio else '')) | ||||
|                 ratio) | ||||
|  | ||||
|     if args.get('quiet'): | ||||
|         pass | ||||
| @@ -175,7 +237,7 @@ def main(**args): | ||||
|         print_entries(by='file') | ||||
|         print_totals() | ||||
|     else: | ||||
|         print_entries(by='function') | ||||
|         print_entries(by='name') | ||||
|         print_totals() | ||||
|  | ||||
| if __name__ == "__main__": | ||||
| @@ -188,22 +250,30 @@ if __name__ == "__main__": | ||||
|             or a list of paths. Defaults to %r." % OBJ_PATHS) | ||||
|     parser.add_argument('-v', '--verbose', action='store_true', | ||||
|         help="Output commands that run behind the scenes.") | ||||
|     parser.add_argument('-q', '--quiet', action='store_true', | ||||
|         help="Don't show anything, useful with -o.") | ||||
|     parser.add_argument('-o', '--output', | ||||
|         help="Specify CSV file to store results.") | ||||
|     parser.add_argument('-u', '--use', | ||||
|         help="Don't compile and find code sizes, instead use this CSV file.") | ||||
|     parser.add_argument('-d', '--diff', | ||||
|         help="Specify CSV file to diff code size against.") | ||||
|     parser.add_argument('-m', '--merge', | ||||
|         help="Merge with an existing CSV file when writing to output.") | ||||
|     parser.add_argument('-a', '--all', action='store_true', | ||||
|         help="Show all functions, not just the ones that changed.") | ||||
|     parser.add_argument('--files', action='store_true', | ||||
|     parser.add_argument('-A', '--everything', action='store_true', | ||||
|         help="Include builtin and libc specific symbols.") | ||||
|     parser.add_argument('-s', '--size-sort', action='store_true', | ||||
|         help="Sort by size.") | ||||
|     parser.add_argument('-S', '--reverse-size-sort', action='store_true', | ||||
|         help="Sort by size, but backwards.") | ||||
|     parser.add_argument('-F', '--files', action='store_true', | ||||
|         help="Show file-level code sizes. Note this does not include padding! " | ||||
|             "So sizes may differ from other tools.") | ||||
|     parser.add_argument('-s', '--summary', action='store_true', | ||||
|     parser.add_argument('-Y', '--summary', action='store_true', | ||||
|         help="Only show the total code size.") | ||||
|     parser.add_argument('-q', '--quiet', action='store_true', | ||||
|         help="Don't show anything, useful with -o.") | ||||
|     parser.add_argument('--type', default='tTrRdDbB', | ||||
|     parser.add_argument('--type', default='tTrRdD', | ||||
|         help="Type of symbols to report, this uses the same single-character " | ||||
|             "type-names emitted by nm. Defaults to %(default)r.") | ||||
|     parser.add_argument('--nm-tool', default=['nm'], type=lambda x: x.split(), | ||||
|   | ||||
| @@ -55,8 +55,9 @@ def collect(paths, **args): | ||||
|     for (file, func), (hits, count) in reduced_funcs.items(): | ||||
|         # discard internal/testing functions (test_* injected with | ||||
|         # internal testing) | ||||
|         if func.startswith('__') or func.startswith('test_'): | ||||
|             continue | ||||
|         if not args.get('everything'): | ||||
|             if func.startswith('__') or func.startswith('test_'): | ||||
|                 continue | ||||
|         # discard .8449 suffixes created by optimizer | ||||
|         func = re.sub('\.[0-9]+', '', func) | ||||
|         results.append((file, func, hits, count)) | ||||
| @@ -65,6 +66,15 @@ def collect(paths, **args): | ||||
|  | ||||
|  | ||||
| def main(**args): | ||||
|     def openio(path, mode='r'): | ||||
|         if path == '-': | ||||
|             if 'r' in mode: | ||||
|                 return os.fdopen(os.dup(sys.stdin.fileno()), 'r') | ||||
|             else: | ||||
|                 return os.fdopen(os.dup(sys.stdout.fileno()), 'w') | ||||
|         else: | ||||
|             return open(path, mode) | ||||
|  | ||||
|     # find coverage | ||||
|     if not args.get('use'): | ||||
|         # find *.info files | ||||
| @@ -82,14 +92,16 @@ def main(**args): | ||||
|  | ||||
|         results = collect(paths, **args) | ||||
|     else: | ||||
|         with open(args['use']) as f: | ||||
|         with openio(args['use']) as f: | ||||
|             r = csv.DictReader(f) | ||||
|             results = [ | ||||
|                 (   result['file'], | ||||
|                     result['function'], | ||||
|                     int(result['hits']), | ||||
|                     int(result['count'])) | ||||
|                 for result in r] | ||||
|                     result['name'], | ||||
|                     int(result['coverage_hits']), | ||||
|                     int(result['coverage_count'])) | ||||
|                 for result in r | ||||
|                 if result.get('coverage_hits') not in {None, ''} | ||||
|                 if result.get('coverage_count') not in {None, ''}] | ||||
|  | ||||
|     total_hits, total_count = 0, 0 | ||||
|     for _, _, hits, count in results: | ||||
| @@ -98,14 +110,19 @@ def main(**args): | ||||
|  | ||||
|     # find previous results? | ||||
|     if args.get('diff'): | ||||
|         with open(args['diff']) as f: | ||||
|             r = csv.DictReader(f) | ||||
|             prev_results = [ | ||||
|                 (   result['file'], | ||||
|                     result['function'], | ||||
|                     int(result['hits']), | ||||
|                     int(result['count'])) | ||||
|                 for result in r] | ||||
|         try: | ||||
|             with openio(args['diff']) as f: | ||||
|                 r = csv.DictReader(f) | ||||
|                 prev_results = [ | ||||
|                     (   result['file'], | ||||
|                         result['name'], | ||||
|                         int(result['coverage_hits']), | ||||
|                         int(result['coverage_count'])) | ||||
|                     for result in r | ||||
|                     if result.get('coverage_hits') not in {None, ''} | ||||
|                     if result.get('coverage_count') not in {None, ''}] | ||||
|         except FileNotFoundError: | ||||
|             prev_results = [] | ||||
|  | ||||
|         prev_total_hits, prev_total_count = 0, 0 | ||||
|         for _, _, hits, count in prev_results: | ||||
| @@ -114,14 +131,36 @@ def main(**args): | ||||
|  | ||||
|     # write results to CSV | ||||
|     if args.get('output'): | ||||
|         with open(args['output'], 'w') as f: | ||||
|             w = csv.writer(f) | ||||
|             w.writerow(['file', 'function', 'hits', 'count']) | ||||
|             for file, func, hits, count in sorted(results): | ||||
|                 w.writerow((file, func, hits, count)) | ||||
|         merged_results = co.defaultdict(lambda: {}) | ||||
|         other_fields = [] | ||||
|  | ||||
|         # merge? | ||||
|         if args.get('merge'): | ||||
|             try: | ||||
|                 with openio(args['merge']) as f: | ||||
|                     r = csv.DictReader(f) | ||||
|                     for result in r: | ||||
|                         file = result.pop('file', '') | ||||
|                         func = result.pop('name', '') | ||||
|                         result.pop('coverage_hits', None) | ||||
|                         result.pop('coverage_count', None) | ||||
|                         merged_results[(file, func)] = result | ||||
|                         other_fields = result.keys() | ||||
|             except FileNotFoundError: | ||||
|                 pass | ||||
|  | ||||
|         for file, func, hits, count in results: | ||||
|             merged_results[(file, func)]['coverage_hits'] = hits | ||||
|             merged_results[(file, func)]['coverage_count'] = count | ||||
|  | ||||
|         with openio(args['output'], 'w') as f: | ||||
|             w = csv.DictWriter(f, ['file', 'name', *other_fields, 'coverage_hits', 'coverage_count']) | ||||
|             w.writeheader() | ||||
|             for (file, func), result in sorted(merged_results.items()): | ||||
|                 w.writerow({'file': file, 'name': func, **result}) | ||||
|  | ||||
|     # print results | ||||
|     def dedup_entries(results, by='function'): | ||||
|     def dedup_entries(results, by='name'): | ||||
|         entries = co.defaultdict(lambda: (0, 0)) | ||||
|         for file, func, hits, count in results: | ||||
|             entry = (file if by == 'file' else func) | ||||
| @@ -147,23 +186,59 @@ def main(**args): | ||||
|                     - (old_hits/old_count if old_count else 1.0))) | ||||
|         return diff | ||||
|  | ||||
|     def sorted_entries(entries): | ||||
|         if args.get('coverage_sort'): | ||||
|             return sorted(entries, key=lambda x: (-(x[1][0]/x[1][1] if x[1][1] else -1), x)) | ||||
|         elif args.get('reverse_coverage_sort'): | ||||
|             return sorted(entries, key=lambda x: (+(x[1][0]/x[1][1] if x[1][1] else -1), x)) | ||||
|         else: | ||||
|             return sorted(entries) | ||||
|  | ||||
|     def sorted_diff_entries(entries): | ||||
|         if args.get('coverage_sort'): | ||||
|             return sorted(entries, key=lambda x: (-(x[1][2]/x[1][3] if x[1][3] else -1), x)) | ||||
|         elif args.get('reverse_coverage_sort'): | ||||
|             return sorted(entries, key=lambda x: (+(x[1][2]/x[1][3] if x[1][3] else -1), x)) | ||||
|         else: | ||||
|             return sorted(entries, key=lambda x: (-x[1][6], x)) | ||||
|  | ||||
|     def print_header(by=''): | ||||
|         if not args.get('diff'): | ||||
|             print('%-36s %19s' % (by, 'hits/line')) | ||||
|         else: | ||||
|             print('%-36s %19s %19s %11s' % (by, 'old', 'new', 'diff')) | ||||
|  | ||||
|     def print_entries(by='function'): | ||||
|     def print_entry(name, hits, count): | ||||
|         print("%-36s %11s %7s" % (name, | ||||
|             '%d/%d' % (hits, count) | ||||
|                 if count else '-', | ||||
|             '%.1f%%' % (100*hits/count) | ||||
|                 if count else '-')) | ||||
|  | ||||
|     def print_diff_entry(name, | ||||
|             old_hits, old_count, | ||||
|             new_hits, new_count, | ||||
|             diff_hits, diff_count, | ||||
|             ratio): | ||||
|         print("%-36s %11s %7s %11s %7s %11s%s" % (name, | ||||
|             '%d/%d' % (old_hits, old_count) | ||||
|                 if old_count else '-', | ||||
|             '%.1f%%' % (100*old_hits/old_count) | ||||
|                 if old_count else '-', | ||||
|             '%d/%d' % (new_hits, new_count) | ||||
|                 if new_count else '-', | ||||
|             '%.1f%%' % (100*new_hits/new_count) | ||||
|                 if new_count else '-', | ||||
|             '%+d/%+d' % (diff_hits, diff_count), | ||||
|             ' (%+.1f%%)' % (100*ratio) if ratio else '')) | ||||
|  | ||||
|     def print_entries(by='name'): | ||||
|         entries = dedup_entries(results, by=by) | ||||
|  | ||||
|         if not args.get('diff'): | ||||
|             print_header(by=by) | ||||
|             for name, (hits, count) in sorted(entries.items()): | ||||
|                 print("%-36s %11s %7s" % (name, | ||||
|                     '%d/%d' % (hits, count) | ||||
|                         if count else '-', | ||||
|                     '%.1f%%' % (100*hits/count) | ||||
|                         if count else '-')) | ||||
|             for name, (hits, count) in sorted_entries(entries.items()): | ||||
|                 print_entry(name, hits, count) | ||||
|         else: | ||||
|             prev_entries = dedup_entries(prev_results, by=by) | ||||
|             diff = diff_entries(prev_entries, entries) | ||||
| @@ -173,45 +248,28 @@ def main(**args): | ||||
|             for name, ( | ||||
|                     old_hits, old_count, | ||||
|                     new_hits, new_count, | ||||
|                     diff_hits, diff_count, ratio) in sorted(diff.items(), | ||||
|                         key=lambda x: (-x[1][6], x)): | ||||
|                     diff_hits, diff_count, ratio) in sorted_diff_entries( | ||||
|                         diff.items()): | ||||
|                 if ratio or args.get('all'): | ||||
|                     print("%-36s %11s %7s %11s %7s %11s%s" % (name, | ||||
|                         '%d/%d' % (old_hits, old_count) | ||||
|                             if old_count else '-', | ||||
|                         '%.1f%%' % (100*old_hits/old_count) | ||||
|                             if old_count else '-', | ||||
|                         '%d/%d' % (new_hits, new_count) | ||||
|                             if new_count else '-', | ||||
|                         '%.1f%%' % (100*new_hits/new_count) | ||||
|                             if new_count else '-', | ||||
|                         '%+d/%+d' % (diff_hits, diff_count), | ||||
|                         ' (%+.1f%%)' % (100*ratio) if ratio else '')) | ||||
|                     print_diff_entry(name, | ||||
|                         old_hits, old_count, | ||||
|                         new_hits, new_count, | ||||
|                         diff_hits, diff_count, | ||||
|                         ratio) | ||||
|  | ||||
|     def print_totals(): | ||||
|         if not args.get('diff'): | ||||
|             print("%-36s %11s %7s" % ('TOTAL', | ||||
|                 '%d/%d' % (total_hits, total_count) | ||||
|                     if total_count else '-', | ||||
|                 '%.1f%%' % (100*total_hits/total_count) | ||||
|                     if total_count else '-')) | ||||
|             print_entry('TOTAL', total_hits, total_count) | ||||
|         else: | ||||
|             ratio = ((total_hits/total_count | ||||
|                     if total_count else 1.0) | ||||
|                 - (prev_total_hits/prev_total_count | ||||
|                     if prev_total_count else 1.0)) | ||||
|             print("%-36s %11s %7s %11s %7s %11s%s" % ('TOTAL', | ||||
|                 '%d/%d' % (prev_total_hits, prev_total_count) | ||||
|                     if prev_total_count else '-', | ||||
|                 '%.1f%%' % (100*prev_total_hits/prev_total_count) | ||||
|                     if prev_total_count else '-', | ||||
|                 '%d/%d' % (total_hits, total_count) | ||||
|                     if total_count else '-', | ||||
|                 '%.1f%%' % (100*total_hits/total_count) | ||||
|                     if total_count else '-', | ||||
|                 '%+d/%+d' % (total_hits-prev_total_hits, | ||||
|                     total_count-prev_total_count), | ||||
|                 ' (%+.1f%%)' % (100*ratio) if ratio else '')) | ||||
|             print_diff_entry('TOTAL', | ||||
|                 prev_total_hits, prev_total_count, | ||||
|                 total_hits, total_count, | ||||
|                 total_hits-prev_total_hits, total_count-prev_total_count, | ||||
|                 ratio) | ||||
|  | ||||
|     if args.get('quiet'): | ||||
|         pass | ||||
| @@ -222,7 +280,7 @@ def main(**args): | ||||
|         print_entries(by='file') | ||||
|         print_totals() | ||||
|     else: | ||||
|         print_entries(by='function') | ||||
|         print_entries(by='name') | ||||
|         print_totals() | ||||
|  | ||||
| if __name__ == "__main__": | ||||
| @@ -243,12 +301,23 @@ if __name__ == "__main__": | ||||
|         help="Don't do any work, instead use this CSV file.") | ||||
|     parser.add_argument('-d', '--diff', | ||||
|         help="Specify CSV file to diff code size against.") | ||||
|     parser.add_argument('-m', '--merge', | ||||
|         help="Merge with an existing CSV file when writing to output.") | ||||
|     parser.add_argument('-a', '--all', action='store_true', | ||||
|         help="Show all functions, not just the ones that changed.") | ||||
|     parser.add_argument('--files', action='store_true', | ||||
|     parser.add_argument('-A', '--everything', action='store_true', | ||||
|         help="Include builtin and libc specific symbols.") | ||||
|     parser.add_argument('-s', '--coverage-sort', action='store_true', | ||||
|         help="Sort by coverage.") | ||||
|     parser.add_argument('-S', '--reverse-coverage-sort', action='store_true', | ||||
|         help="Sort by coverage, but backwards.") | ||||
|     parser.add_argument('-F', '--files', action='store_true', | ||||
|         help="Show file-level coverage.") | ||||
|     parser.add_argument('-s', '--summary', action='store_true', | ||||
|     parser.add_argument('-Y', '--summary', action='store_true', | ||||
|         help="Only show the total coverage.") | ||||
|     parser.add_argument('-q', '--quiet', action='store_true', | ||||
|         help="Don't show anything, useful with -o.") | ||||
|     parser.add_argument('--build-dir', | ||||
|         help="Specify the relative build directory. Used to map object files \ | ||||
|             to the correct source files.") | ||||
|     sys.exit(main(**vars(parser.parse_args()))) | ||||
|   | ||||
							
								
								
									
										283
									
								
								scripts/data.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										283
									
								
								scripts/data.py
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,283 @@ | ||||
| #!/usr/bin/env python3 | ||||
| # | ||||
| # Script to find data size at the function level. Basically just a bit wrapper | ||||
| # around nm with some extra conveniences for comparing builds. Heavily inspired | ||||
| # by Linux's Bloat-O-Meter. | ||||
| # | ||||
|  | ||||
| import os | ||||
| import glob | ||||
| import itertools as it | ||||
| import subprocess as sp | ||||
| import shlex | ||||
| import re | ||||
| import csv | ||||
| import collections as co | ||||
|  | ||||
|  | ||||
| OBJ_PATHS = ['*.o'] | ||||
|  | ||||
| def collect(paths, **args): | ||||
|     results = co.defaultdict(lambda: 0) | ||||
|     pattern = re.compile( | ||||
|         '^(?P<size>[0-9a-fA-F]+)' + | ||||
|         ' (?P<type>[%s])' % re.escape(args['type']) + | ||||
|         ' (?P<func>.+?)$') | ||||
|     for path in paths: | ||||
|         # note nm-tool may contain extra args | ||||
|         cmd = args['nm_tool'] + ['--size-sort', path] | ||||
|         if args.get('verbose'): | ||||
|             print(' '.join(shlex.quote(c) for c in cmd)) | ||||
|         proc = sp.Popen(cmd, | ||||
|             stdout=sp.PIPE, | ||||
|             stderr=sp.PIPE if not args.get('verbose') else None, | ||||
|             universal_newlines=True, | ||||
|             errors='replace') | ||||
|         for line in proc.stdout: | ||||
|             m = pattern.match(line) | ||||
|             if m: | ||||
|                 results[(path, m.group('func'))] += int(m.group('size'), 16) | ||||
|         proc.wait() | ||||
|         if proc.returncode != 0: | ||||
|             if not args.get('verbose'): | ||||
|                 for line in proc.stderr: | ||||
|                     sys.stdout.write(line) | ||||
|             sys.exit(-1) | ||||
|  | ||||
|     flat_results = [] | ||||
|     for (file, func), size in results.items(): | ||||
|         # map to source files | ||||
|         if args.get('build_dir'): | ||||
|             file = re.sub('%s/*' % re.escape(args['build_dir']), '', file) | ||||
|         # replace .o with .c, different scripts report .o/.c, we need to | ||||
|         # choose one if we want to deduplicate csv files | ||||
|         file = re.sub('\.o$', '.c', file) | ||||
|         # discard internal functions | ||||
|         if not args.get('everything'): | ||||
|             if func.startswith('__'): | ||||
|                 continue | ||||
|         # discard .8449 suffixes created by optimizer | ||||
|         func = re.sub('\.[0-9]+', '', func) | ||||
|         flat_results.append((file, func, size)) | ||||
|  | ||||
|     return flat_results | ||||
|  | ||||
| def main(**args): | ||||
|     def openio(path, mode='r'): | ||||
|         if path == '-': | ||||
|             if 'r' in mode: | ||||
|                 return os.fdopen(os.dup(sys.stdin.fileno()), 'r') | ||||
|             else: | ||||
|                 return os.fdopen(os.dup(sys.stdout.fileno()), 'w') | ||||
|         else: | ||||
|             return open(path, mode) | ||||
|  | ||||
|     # find sizes | ||||
|     if not args.get('use', None): | ||||
|         # find .o files | ||||
|         paths = [] | ||||
|         for path in args['obj_paths']: | ||||
|             if os.path.isdir(path): | ||||
|                 path = path + '/*.o' | ||||
|  | ||||
|             for path in glob.glob(path): | ||||
|                 paths.append(path) | ||||
|  | ||||
|         if not paths: | ||||
|             print('no .obj files found in %r?' % args['obj_paths']) | ||||
|             sys.exit(-1) | ||||
|  | ||||
|         results = collect(paths, **args) | ||||
|     else: | ||||
|         with openio(args['use']) as f: | ||||
|             r = csv.DictReader(f) | ||||
|             results = [ | ||||
|                 (   result['file'], | ||||
|                     result['name'], | ||||
|                     int(result['data_size'])) | ||||
|                 for result in r | ||||
|                 if result.get('data_size') not in {None, ''}] | ||||
|  | ||||
|     total = 0 | ||||
|     for _, _, size in results: | ||||
|         total += size | ||||
|  | ||||
|     # find previous results? | ||||
|     if args.get('diff'): | ||||
|         try: | ||||
|             with openio(args['diff']) as f: | ||||
|                 r = csv.DictReader(f) | ||||
|                 prev_results = [ | ||||
|                     (   result['file'], | ||||
|                         result['name'], | ||||
|                         int(result['data_size'])) | ||||
|                     for result in r | ||||
|                     if result.get('data_size') not in {None, ''}] | ||||
|         except FileNotFoundError: | ||||
|             prev_results = [] | ||||
|  | ||||
|         prev_total = 0 | ||||
|         for _, _, size in prev_results: | ||||
|             prev_total += size | ||||
|  | ||||
|     # write results to CSV | ||||
|     if args.get('output'): | ||||
|         merged_results = co.defaultdict(lambda: {}) | ||||
|         other_fields = [] | ||||
|  | ||||
|         # merge? | ||||
|         if args.get('merge'): | ||||
|             try: | ||||
|                 with openio(args['merge']) as f: | ||||
|                     r = csv.DictReader(f) | ||||
|                     for result in r: | ||||
|                         file = result.pop('file', '') | ||||
|                         func = result.pop('name', '') | ||||
|                         result.pop('data_size', None) | ||||
|                         merged_results[(file, func)] = result | ||||
|                         other_fields = result.keys() | ||||
|             except FileNotFoundError: | ||||
|                 pass | ||||
|  | ||||
|         for file, func, size in results: | ||||
|             merged_results[(file, func)]['data_size'] = size | ||||
|  | ||||
|         with openio(args['output'], 'w') as f: | ||||
|             w = csv.DictWriter(f, ['file', 'name', *other_fields, 'data_size']) | ||||
|             w.writeheader() | ||||
|             for (file, func), result in sorted(merged_results.items()): | ||||
|                 w.writerow({'file': file, 'name': func, **result}) | ||||
|  | ||||
|     # print results | ||||
|     def dedup_entries(results, by='name'): | ||||
|         entries = co.defaultdict(lambda: 0) | ||||
|         for file, func, size in results: | ||||
|             entry = (file if by == 'file' else func) | ||||
|             entries[entry] += size | ||||
|         return entries | ||||
|  | ||||
|     def diff_entries(olds, news): | ||||
|         diff = co.defaultdict(lambda: (0, 0, 0, 0)) | ||||
|         for name, new in news.items(): | ||||
|             diff[name] = (0, new, new, 1.0) | ||||
|         for name, old in olds.items(): | ||||
|             _, new, _, _ = diff[name] | ||||
|             diff[name] = (old, new, new-old, (new-old)/old if old else 1.0) | ||||
|         return diff | ||||
|  | ||||
|     def sorted_entries(entries): | ||||
|         if args.get('size_sort'): | ||||
|             return sorted(entries, key=lambda x: (-x[1], x)) | ||||
|         elif args.get('reverse_size_sort'): | ||||
|             return sorted(entries, key=lambda x: (+x[1], x)) | ||||
|         else: | ||||
|             return sorted(entries) | ||||
|  | ||||
|     def sorted_diff_entries(entries): | ||||
|         if args.get('size_sort'): | ||||
|             return sorted(entries, key=lambda x: (-x[1][1], x)) | ||||
|         elif args.get('reverse_size_sort'): | ||||
|             return sorted(entries, key=lambda x: (+x[1][1], x)) | ||||
|         else: | ||||
|             return sorted(entries, key=lambda x: (-x[1][3], x)) | ||||
|  | ||||
|     def print_header(by=''): | ||||
|         if not args.get('diff'): | ||||
|             print('%-36s %7s' % (by, 'size')) | ||||
|         else: | ||||
|             print('%-36s %7s %7s %7s' % (by, 'old', 'new', 'diff')) | ||||
|  | ||||
|     def print_entry(name, size): | ||||
|         print("%-36s %7d" % (name, size)) | ||||
|  | ||||
|     def print_diff_entry(name, old, new, diff, ratio): | ||||
|         print("%-36s %7s %7s %+7d%s" % (name, | ||||
|             old or "-", | ||||
|             new or "-", | ||||
|             diff, | ||||
|             ' (%+.1f%%)' % (100*ratio) if ratio else '')) | ||||
|  | ||||
|     def print_entries(by='name'): | ||||
|         entries = dedup_entries(results, by=by) | ||||
|  | ||||
|         if not args.get('diff'): | ||||
|             print_header(by=by) | ||||
|             for name, size in sorted_entries(entries.items()): | ||||
|                 print_entry(name, size) | ||||
|         else: | ||||
|             prev_entries = dedup_entries(prev_results, by=by) | ||||
|             diff = diff_entries(prev_entries, entries) | ||||
|             print_header(by='%s (%d added, %d removed)' % (by, | ||||
|                 sum(1 for old, _, _, _ in diff.values() if not old), | ||||
|                 sum(1 for _, new, _, _ in diff.values() if not new))) | ||||
|             for name, (old, new, diff, ratio) in sorted_diff_entries( | ||||
|                     diff.items()): | ||||
|                 if ratio or args.get('all'): | ||||
|                     print_diff_entry(name, old, new, diff, ratio) | ||||
|  | ||||
|     def print_totals(): | ||||
|         if not args.get('diff'): | ||||
|             print_entry('TOTAL', total) | ||||
|         else: | ||||
|             ratio = (0.0 if not prev_total and not total | ||||
|                 else 1.0 if not prev_total | ||||
|                 else (total-prev_total)/prev_total) | ||||
|             print_diff_entry('TOTAL', | ||||
|                 prev_total, total, | ||||
|                 total-prev_total, | ||||
|                 ratio) | ||||
|  | ||||
|     if args.get('quiet'): | ||||
|         pass | ||||
|     elif args.get('summary'): | ||||
|         print_header() | ||||
|         print_totals() | ||||
|     elif args.get('files'): | ||||
|         print_entries(by='file') | ||||
|         print_totals() | ||||
|     else: | ||||
|         print_entries(by='name') | ||||
|         print_totals() | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     import argparse | ||||
|     import sys | ||||
|     parser = argparse.ArgumentParser( | ||||
|         description="Find data size at the function level.") | ||||
|     parser.add_argument('obj_paths', nargs='*', default=OBJ_PATHS, | ||||
|         help="Description of where to find *.o files. May be a directory \ | ||||
|             or a list of paths. Defaults to %r." % OBJ_PATHS) | ||||
|     parser.add_argument('-v', '--verbose', action='store_true', | ||||
|         help="Output commands that run behind the scenes.") | ||||
|     parser.add_argument('-q', '--quiet', action='store_true', | ||||
|         help="Don't show anything, useful with -o.") | ||||
|     parser.add_argument('-o', '--output', | ||||
|         help="Specify CSV file to store results.") | ||||
|     parser.add_argument('-u', '--use', | ||||
|         help="Don't compile and find data sizes, instead use this CSV file.") | ||||
|     parser.add_argument('-d', '--diff', | ||||
|         help="Specify CSV file to diff data size against.") | ||||
|     parser.add_argument('-m', '--merge', | ||||
|         help="Merge with an existing CSV file when writing to output.") | ||||
|     parser.add_argument('-a', '--all', action='store_true', | ||||
|         help="Show all functions, not just the ones that changed.") | ||||
|     parser.add_argument('-A', '--everything', action='store_true', | ||||
|         help="Include builtin and libc specific symbols.") | ||||
|     parser.add_argument('-s', '--size-sort', action='store_true', | ||||
|         help="Sort by size.") | ||||
|     parser.add_argument('-S', '--reverse-size-sort', action='store_true', | ||||
|         help="Sort by size, but backwards.") | ||||
|     parser.add_argument('-F', '--files', action='store_true', | ||||
|         help="Show file-level data sizes. Note this does not include padding! " | ||||
|             "So sizes may differ from other tools.") | ||||
|     parser.add_argument('-Y', '--summary', action='store_true', | ||||
|         help="Only show the total data size.") | ||||
|     parser.add_argument('--type', default='dDbB', | ||||
|         help="Type of symbols to report, this uses the same single-character " | ||||
|             "type-names emitted by nm. Defaults to %(default)r.") | ||||
|     parser.add_argument('--nm-tool', default=['nm'], type=lambda x: x.split(), | ||||
|         help="Path to the nm tool to use.") | ||||
|     parser.add_argument('--build-dir', | ||||
|         help="Specify the relative build directory. Used to map object files \ | ||||
|             to the correct source files.") | ||||
|     sys.exit(main(**vars(parser.parse_args()))) | ||||
							
								
								
									
										430
									
								
								scripts/stack.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										430
									
								
								scripts/stack.py
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,430 @@ | ||||
| #!/usr/bin/env python3 | ||||
| # | ||||
| # Script to find stack usage at the function level. Will detect recursion and | ||||
| # report as infinite stack usage. | ||||
| # | ||||
|  | ||||
| import os | ||||
| import glob | ||||
| import itertools as it | ||||
| import re | ||||
| import csv | ||||
| import collections as co | ||||
| import math as m | ||||
|  | ||||
|  | ||||
| CI_PATHS = ['*.ci'] | ||||
|  | ||||
| def collect(paths, **args): | ||||
|     # parse the vcg format | ||||
|     k_pattern = re.compile('([a-z]+)\s*:', re.DOTALL) | ||||
|     v_pattern = re.compile('(?:"(.*?)"|([a-z]+))', re.DOTALL) | ||||
|     def parse_vcg(rest): | ||||
|         def parse_vcg(rest): | ||||
|             node = [] | ||||
|             while True: | ||||
|                 rest = rest.lstrip() | ||||
|                 m = k_pattern.match(rest) | ||||
|                 if not m: | ||||
|                     return (node, rest) | ||||
|                 k, rest = m.group(1), rest[m.end(0):] | ||||
|  | ||||
|                 rest = rest.lstrip() | ||||
|                 if rest.startswith('{'): | ||||
|                     v, rest = parse_vcg(rest[1:]) | ||||
|                     assert rest[0] == '}', "unexpected %r" % rest[0:1] | ||||
|                     rest = rest[1:] | ||||
|                     node.append((k, v)) | ||||
|                 else: | ||||
|                     m = v_pattern.match(rest) | ||||
|                     assert m, "unexpected %r" % rest[0:1] | ||||
|                     v, rest = m.group(1) or m.group(2), rest[m.end(0):] | ||||
|                     node.append((k, v)) | ||||
|  | ||||
|         node, rest = parse_vcg(rest) | ||||
|         assert rest == '', "unexpected %r" % rest[0:1] | ||||
|         return node | ||||
|  | ||||
|     # collect into functions | ||||
|     results = co.defaultdict(lambda: (None, None, 0, set())) | ||||
|     f_pattern = re.compile( | ||||
|         r'([^\\]*)\\n([^:]*)[^\\]*\\n([0-9]+) bytes \((.*)\)') | ||||
|     for path in paths: | ||||
|         with open(path) as f: | ||||
|             vcg = parse_vcg(f.read()) | ||||
|         for k, graph in vcg: | ||||
|             if k != 'graph': | ||||
|                 continue | ||||
|             for k, info in graph: | ||||
|                 if k == 'node': | ||||
|                     info = dict(info) | ||||
|                     m = f_pattern.match(info['label']) | ||||
|                     if m: | ||||
|                         function, file, size, type = m.groups() | ||||
|                         if not args.get('quiet') and type != 'static': | ||||
|                             print('warning: found non-static stack for %s (%s)' | ||||
|                                 % (function, type)) | ||||
|                         _, _, _, targets = results[info['title']] | ||||
|                         results[info['title']] = ( | ||||
|                             file, function, int(size), targets) | ||||
|                 elif k == 'edge': | ||||
|                     info = dict(info) | ||||
|                     _, _, _, targets = results[info['sourcename']] | ||||
|                     targets.add(info['targetname']) | ||||
|                 else: | ||||
|                     continue | ||||
|  | ||||
|     if not args.get('everything'): | ||||
|         for source, (s_file, s_function, _, _) in list(results.items()): | ||||
|             # discard internal functions | ||||
|             if s_file.startswith('<') or s_file.startswith('/usr/include'): | ||||
|                 del results[source] | ||||
|  | ||||
|     # find maximum stack size recursively, this requires also detecting cycles | ||||
|     # (in case of recursion) | ||||
|     def find_limit(source, seen=None): | ||||
|         seen = seen or set() | ||||
|         if source not in results: | ||||
|             return 0 | ||||
|         _, _, frame, targets = results[source] | ||||
|  | ||||
|         limit = 0 | ||||
|         for target in targets: | ||||
|             if target in seen: | ||||
|                 # found a cycle | ||||
|                 return float('inf') | ||||
|             limit_ = find_limit(target, seen | {target}) | ||||
|             limit = max(limit, limit_) | ||||
|  | ||||
|         return frame + limit | ||||
|  | ||||
|     def find_deps(targets): | ||||
|         deps = set() | ||||
|         for target in targets: | ||||
|             if target in results: | ||||
|                 t_file, t_function, _, _ = results[target] | ||||
|                 deps.add((t_file, t_function)) | ||||
|         return deps | ||||
|  | ||||
|     # flatten into a list | ||||
|     flat_results = [] | ||||
|     for source, (s_file, s_function, frame, targets) in results.items(): | ||||
|         limit = find_limit(source) | ||||
|         deps = find_deps(targets) | ||||
|         flat_results.append((s_file, s_function, frame, limit, deps)) | ||||
|  | ||||
|     return flat_results | ||||
|  | ||||
| def main(**args): | ||||
|     def openio(path, mode='r'): | ||||
|         if path == '-': | ||||
|             if 'r' in mode: | ||||
|                 return os.fdopen(os.dup(sys.stdin.fileno()), 'r') | ||||
|             else: | ||||
|                 return os.fdopen(os.dup(sys.stdout.fileno()), 'w') | ||||
|         else: | ||||
|             return open(path, mode) | ||||
|  | ||||
|     # find sizes | ||||
|     if not args.get('use', None): | ||||
|         # find .ci files | ||||
|         paths = [] | ||||
|         for path in args['ci_paths']: | ||||
|             if os.path.isdir(path): | ||||
|                 path = path + '/*.ci' | ||||
|  | ||||
|             for path in glob.glob(path): | ||||
|                 paths.append(path) | ||||
|  | ||||
|         if not paths: | ||||
|             print('no .ci files found in %r?' % args['ci_paths']) | ||||
|             sys.exit(-1) | ||||
|  | ||||
|         results = collect(paths, **args) | ||||
|     else: | ||||
|         with openio(args['use']) as f: | ||||
|             r = csv.DictReader(f) | ||||
|             results = [ | ||||
|                 (   result['file'], | ||||
|                     result['name'], | ||||
|                     int(result['stack_frame']), | ||||
|                     float(result['stack_limit']), # note limit can be inf | ||||
|                     set()) | ||||
|                 for result in r | ||||
|                 if result.get('stack_frame') not in {None, ''} | ||||
|                 if result.get('stack_limit') not in {None, ''}] | ||||
|  | ||||
|     total_frame = 0 | ||||
|     total_limit = 0 | ||||
|     for _, _, frame, limit, _ in results: | ||||
|         total_frame += frame | ||||
|         total_limit = max(total_limit, limit) | ||||
|  | ||||
|     # find previous results? | ||||
|     if args.get('diff'): | ||||
|         try: | ||||
|             with openio(args['diff']) as f: | ||||
|                 r = csv.DictReader(f) | ||||
|                 prev_results = [ | ||||
|                     (   result['file'], | ||||
|                         result['name'], | ||||
|                         int(result['stack_frame']), | ||||
|                         float(result['stack_limit']), | ||||
|                         set()) | ||||
|                     for result in r | ||||
|                     if result.get('stack_frame') not in {None, ''} | ||||
|                     if result.get('stack_limit') not in {None, ''}] | ||||
|         except FileNotFoundError: | ||||
|             prev_results = [] | ||||
|  | ||||
|         prev_total_frame = 0 | ||||
|         prev_total_limit = 0 | ||||
|         for _, _, frame, limit, _ in prev_results: | ||||
|             prev_total_frame += frame | ||||
|             prev_total_limit = max(prev_total_limit, limit) | ||||
|  | ||||
|     # write results to CSV | ||||
|     if args.get('output'): | ||||
|         merged_results = co.defaultdict(lambda: {}) | ||||
|         other_fields = [] | ||||
|  | ||||
|         # merge? | ||||
|         if args.get('merge'): | ||||
|             try: | ||||
|                 with openio(args['merge']) as f: | ||||
|                     r = csv.DictReader(f) | ||||
|                     for result in r: | ||||
|                         file = result.pop('file', '') | ||||
|                         func = result.pop('name', '') | ||||
|                         result.pop('stack_frame', None) | ||||
|                         result.pop('stack_limit', None) | ||||
|                         merged_results[(file, func)] = result | ||||
|                         other_fields = result.keys() | ||||
|             except FileNotFoundError: | ||||
|                 pass | ||||
|  | ||||
|         for file, func, frame, limit, _ in results: | ||||
|             merged_results[(file, func)]['stack_frame'] = frame | ||||
|             merged_results[(file, func)]['stack_limit'] = limit | ||||
|  | ||||
|         with openio(args['output'], 'w') as f: | ||||
|             w = csv.DictWriter(f, ['file', 'name', *other_fields, 'stack_frame', 'stack_limit']) | ||||
|             w.writeheader() | ||||
|             for (file, func), result in sorted(merged_results.items()): | ||||
|                 w.writerow({'file': file, 'name': func, **result}) | ||||
|  | ||||
|     # print results | ||||
|     def dedup_entries(results, by='name'): | ||||
|         entries = co.defaultdict(lambda: (0, 0, set())) | ||||
|         for file, func, frame, limit, deps in results: | ||||
|             entry = (file if by == 'file' else func) | ||||
|             entry_frame, entry_limit, entry_deps = entries[entry] | ||||
|             entries[entry] = ( | ||||
|                 entry_frame + frame, | ||||
|                 max(entry_limit, limit), | ||||
|                 entry_deps | {file if by == 'file' else func | ||||
|                     for file, func in deps}) | ||||
|         return entries | ||||
|  | ||||
|     def diff_entries(olds, news): | ||||
|         diff = co.defaultdict(lambda: (None, None, None, None, 0, 0, 0, set())) | ||||
|         for name, (new_frame, new_limit, deps) in news.items(): | ||||
|             diff[name] = ( | ||||
|                 None, None, | ||||
|                 new_frame, new_limit, | ||||
|                 new_frame, new_limit, | ||||
|                 1.0, | ||||
|                 deps) | ||||
|         for name, (old_frame, old_limit, _) in olds.items(): | ||||
|             _, _, new_frame, new_limit, _, _, _, deps = diff[name] | ||||
|             diff[name] = ( | ||||
|                 old_frame, old_limit, | ||||
|                 new_frame, new_limit, | ||||
|                 (new_frame or 0) - (old_frame or 0), | ||||
|                 0 if m.isinf(new_limit or 0) and m.isinf(old_limit or 0) | ||||
|                     else (new_limit or 0) - (old_limit or 0), | ||||
|                 0.0 if m.isinf(new_limit or 0) and m.isinf(old_limit or 0) | ||||
|                     else +float('inf') if m.isinf(new_limit or 0) | ||||
|                     else -float('inf') if m.isinf(old_limit or 0) | ||||
|                     else +0.0 if not old_limit and not new_limit | ||||
|                     else +1.0 if not old_limit | ||||
|                     else ((new_limit or 0) - (old_limit or 0))/(old_limit or 0), | ||||
|                 deps) | ||||
|         return diff | ||||
|  | ||||
|     def sorted_entries(entries): | ||||
|         if args.get('limit_sort'): | ||||
|             return sorted(entries, key=lambda x: (-x[1][1], x)) | ||||
|         elif args.get('reverse_limit_sort'): | ||||
|             return sorted(entries, key=lambda x: (+x[1][1], x)) | ||||
|         elif args.get('frame_sort'): | ||||
|             return sorted(entries, key=lambda x: (-x[1][0], x)) | ||||
|         elif args.get('reverse_frame_sort'): | ||||
|             return sorted(entries, key=lambda x: (+x[1][0], x)) | ||||
|         else: | ||||
|             return sorted(entries) | ||||
|  | ||||
|     def sorted_diff_entries(entries): | ||||
|         if args.get('limit_sort'): | ||||
|             return sorted(entries, key=lambda x: (-(x[1][3] or 0), x)) | ||||
|         elif args.get('reverse_limit_sort'): | ||||
|             return sorted(entries, key=lambda x: (+(x[1][3] or 0), x)) | ||||
|         elif args.get('frame_sort'): | ||||
|             return sorted(entries, key=lambda x: (-(x[1][2] or 0), x)) | ||||
|         elif args.get('reverse_frame_sort'): | ||||
|             return sorted(entries, key=lambda x: (+(x[1][2] or 0), x)) | ||||
|         else: | ||||
|             return sorted(entries, key=lambda x: (-x[1][6], x)) | ||||
|  | ||||
|     def print_header(by=''): | ||||
|         if not args.get('diff'): | ||||
|             print('%-36s %7s %7s' % (by, 'frame', 'limit')) | ||||
|         else: | ||||
|             print('%-36s %15s %15s %15s' % (by, 'old', 'new', 'diff')) | ||||
|  | ||||
|     def print_entry(name, frame, limit): | ||||
|         print("%-36s %7d %7s" % (name, | ||||
|             frame, '∞' if m.isinf(limit) else int(limit))) | ||||
|  | ||||
|     def print_diff_entry(name, | ||||
|             old_frame, old_limit, | ||||
|             new_frame, new_limit, | ||||
|             diff_frame, diff_limit, | ||||
|             ratio): | ||||
|         print('%-36s %7s %7s %7s %7s %+7d %7s%s' % (name, | ||||
|             old_frame if old_frame is not None else "-", | ||||
|             ('∞' if m.isinf(old_limit) else int(old_limit)) | ||||
|                 if old_limit is not None else "-", | ||||
|             new_frame if new_frame is not None else "-", | ||||
|             ('∞' if m.isinf(new_limit) else int(new_limit)) | ||||
|                 if new_limit is not None else "-", | ||||
|             diff_frame, | ||||
|             ('+∞' if diff_limit > 0 and m.isinf(diff_limit) | ||||
|                 else '-∞' if diff_limit < 0 and m.isinf(diff_limit) | ||||
|                 else '%+d' % diff_limit), | ||||
|             '' if not ratio | ||||
|                 else ' (+∞%)' if ratio > 0 and m.isinf(ratio) | ||||
|                 else ' (-∞%)' if ratio < 0 and m.isinf(ratio) | ||||
|                 else ' (%+.1f%%)' % (100*ratio))) | ||||
|  | ||||
|     def print_entries(by='name'): | ||||
|         # build optional tree of dependencies | ||||
|         def print_deps(entries, depth, print, | ||||
|                 filter=lambda _: True, | ||||
|                 prefixes=('', '', '', '')): | ||||
|             entries = entries if isinstance(entries, list) else list(entries) | ||||
|             filtered_entries = [(name, entry) | ||||
|                 for name, entry in entries | ||||
|                 if filter(name)] | ||||
|             for i, (name, entry) in enumerate(filtered_entries): | ||||
|                 last = (i == len(filtered_entries)-1) | ||||
|                 print(prefixes[0+last] + name, entry) | ||||
|  | ||||
|                 if depth > 0: | ||||
|                     deps = entry[-1] | ||||
|                     print_deps(entries, depth-1, print, | ||||
|                         lambda name: name in deps, | ||||
|                         (   prefixes[2+last] + "|-> ", | ||||
|                             prefixes[2+last] + "'-> ", | ||||
|                             prefixes[2+last] + "|   ", | ||||
|                             prefixes[2+last] + "    ")) | ||||
|  | ||||
|         entries = dedup_entries(results, by=by) | ||||
|  | ||||
|         if not args.get('diff'): | ||||
|             print_header(by=by) | ||||
|             print_deps( | ||||
|                 sorted_entries(entries.items()), | ||||
|                 args.get('depth') or 0, | ||||
|                 lambda name, entry: print_entry(name, *entry[:-1])) | ||||
|         else: | ||||
|             prev_entries = dedup_entries(prev_results, by=by) | ||||
|             diff = diff_entries(prev_entries, entries) | ||||
|  | ||||
|             print_header(by='%s (%d added, %d removed)' % (by, | ||||
|                 sum(1 for _, old, _, _, _, _, _, _ in diff.values() if old is None), | ||||
|                 sum(1 for _, _, _, new, _, _, _, _ in diff.values() if new is None))) | ||||
|             print_deps( | ||||
|                 filter( | ||||
|                     lambda x: x[1][6] or args.get('all'), | ||||
|                     sorted_diff_entries(diff.items())), | ||||
|                 args.get('depth') or 0, | ||||
|                 lambda name, entry: print_diff_entry(name, *entry[:-1])) | ||||
|  | ||||
|     def print_totals(): | ||||
|         if not args.get('diff'): | ||||
|             print_entry('TOTAL', total_frame, total_limit) | ||||
|         else: | ||||
|             diff_frame = total_frame - prev_total_frame | ||||
|             diff_limit = ( | ||||
|                 0 if m.isinf(total_limit or 0) and m.isinf(prev_total_limit or 0) | ||||
|                     else (total_limit or 0) - (prev_total_limit or 0)) | ||||
|             ratio = ( | ||||
|                 0.0 if m.isinf(total_limit or 0) and m.isinf(prev_total_limit or 0) | ||||
|                     else +float('inf') if m.isinf(total_limit or 0) | ||||
|                     else -float('inf') if m.isinf(prev_total_limit or 0) | ||||
|                     else 0.0 if not prev_total_limit and not total_limit | ||||
|                     else 1.0 if not prev_total_limit | ||||
|                     else ((total_limit or 0) - (prev_total_limit or 0))/(prev_total_limit or 0)) | ||||
|             print_diff_entry('TOTAL', | ||||
|                 prev_total_frame, prev_total_limit, | ||||
|                 total_frame, total_limit, | ||||
|                 diff_frame, diff_limit, | ||||
|                 ratio) | ||||
|  | ||||
|     if args.get('quiet'): | ||||
|         pass | ||||
|     elif args.get('summary'): | ||||
|         print_header() | ||||
|         print_totals() | ||||
|     elif args.get('files'): | ||||
|         print_entries(by='file') | ||||
|         print_totals() | ||||
|     else: | ||||
|         print_entries(by='name') | ||||
|         print_totals() | ||||
|  | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     import argparse | ||||
|     import sys | ||||
|     parser = argparse.ArgumentParser( | ||||
|         description="Find stack usage at the function level.") | ||||
|     parser.add_argument('ci_paths', nargs='*', default=CI_PATHS, | ||||
|         help="Description of where to find *.ci files. May be a directory \ | ||||
|             or a list of paths. Defaults to %r." % CI_PATHS) | ||||
|     parser.add_argument('-v', '--verbose', action='store_true', | ||||
|         help="Output commands that run behind the scenes.") | ||||
|     parser.add_argument('-q', '--quiet', action='store_true', | ||||
|         help="Don't show anything, useful with -o.") | ||||
|     parser.add_argument('-o', '--output', | ||||
|         help="Specify CSV file to store results.") | ||||
|     parser.add_argument('-u', '--use', | ||||
|         help="Don't parse callgraph files, instead use this CSV file.") | ||||
|     parser.add_argument('-d', '--diff', | ||||
|         help="Specify CSV file to diff against.") | ||||
|     parser.add_argument('-m', '--merge', | ||||
|         help="Merge with an existing CSV file when writing to output.") | ||||
|     parser.add_argument('-a', '--all', action='store_true', | ||||
|         help="Show all functions, not just the ones that changed.") | ||||
|     parser.add_argument('-A', '--everything', action='store_true', | ||||
|         help="Include builtin and libc specific symbols.") | ||||
|     parser.add_argument('-s', '--limit-sort', action='store_true', | ||||
|         help="Sort by stack limit.") | ||||
|     parser.add_argument('-S', '--reverse-limit-sort', action='store_true', | ||||
|         help="Sort by stack limit, but backwards.") | ||||
|     parser.add_argument('--frame-sort', action='store_true', | ||||
|         help="Sort by stack frame size.") | ||||
|     parser.add_argument('--reverse-frame-sort', action='store_true', | ||||
|         help="Sort by stack frame size, but backwards.") | ||||
|     parser.add_argument('-L', '--depth', default=0, type=lambda x: int(x, 0), | ||||
|         nargs='?', const=float('inf'), | ||||
|         help="Depth of dependencies to show.") | ||||
|     parser.add_argument('-F', '--files', action='store_true', | ||||
|         help="Show file-level calls.") | ||||
|     parser.add_argument('-Y', '--summary', action='store_true', | ||||
|         help="Only show the total stack size.") | ||||
|     parser.add_argument('--build-dir', | ||||
|         help="Specify the relative build directory. Used to map object files \ | ||||
|             to the correct source files.") | ||||
|     sys.exit(main(**vars(parser.parse_args()))) | ||||
							
								
								
									
										331
									
								
								scripts/structs.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										331
									
								
								scripts/structs.py
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,331 @@ | ||||
| #!/usr/bin/env python3 | ||||
| # | ||||
| # Script to find struct sizes. | ||||
| # | ||||
|  | ||||
| import os | ||||
| import glob | ||||
| import itertools as it | ||||
| import subprocess as sp | ||||
| import shlex | ||||
| import re | ||||
| import csv | ||||
| import collections as co | ||||
|  | ||||
|  | ||||
| OBJ_PATHS = ['*.o'] | ||||
|  | ||||
| def collect(paths, **args): | ||||
|     decl_pattern = re.compile( | ||||
|         '^\s+(?P<no>[0-9]+)' | ||||
|             '\s+(?P<dir>[0-9]+)' | ||||
|             '\s+.*' | ||||
|             '\s+(?P<file>[^\s]+)$') | ||||
|     struct_pattern = re.compile( | ||||
|         '^(?:.*DW_TAG_(?P<tag>[a-z_]+).*' | ||||
|             '|^.*DW_AT_name.*:\s*(?P<name>[^:\s]+)\s*' | ||||
|             '|^.*DW_AT_decl_file.*:\s*(?P<decl>[0-9]+)\s*' | ||||
|             '|^.*DW_AT_byte_size.*:\s*(?P<size>[0-9]+)\s*)$') | ||||
|  | ||||
|     results = co.defaultdict(lambda: 0) | ||||
|     for path in paths: | ||||
|         # find decl, we want to filter by structs in .h files | ||||
|         decls = {} | ||||
|         # note objdump-tool may contain extra args | ||||
|         cmd = args['objdump_tool'] + ['--dwarf=rawline', path] | ||||
|         if args.get('verbose'): | ||||
|             print(' '.join(shlex.quote(c) for c in cmd)) | ||||
|         proc = sp.Popen(cmd, | ||||
|             stdout=sp.PIPE, | ||||
|             stderr=sp.PIPE if not args.get('verbose') else None, | ||||
|             universal_newlines=True, | ||||
|             errors='replace') | ||||
|         for line in proc.stdout: | ||||
|             # find file numbers | ||||
|             m = decl_pattern.match(line) | ||||
|             if m: | ||||
|                 decls[int(m.group('no'))] = m.group('file') | ||||
|         proc.wait() | ||||
|         if proc.returncode != 0: | ||||
|             if not args.get('verbose'): | ||||
|                 for line in proc.stderr: | ||||
|                     sys.stdout.write(line) | ||||
|             sys.exit(-1) | ||||
|  | ||||
|         # collect structs as we parse dwarf info | ||||
|         found = False | ||||
|         name = None | ||||
|         decl = None | ||||
|         size = None | ||||
|  | ||||
|         # note objdump-tool may contain extra args | ||||
|         cmd = args['objdump_tool'] + ['--dwarf=info', path] | ||||
|         if args.get('verbose'): | ||||
|             print(' '.join(shlex.quote(c) for c in cmd)) | ||||
|         proc = sp.Popen(cmd, | ||||
|             stdout=sp.PIPE, | ||||
|             stderr=sp.PIPE if not args.get('verbose') else None, | ||||
|             universal_newlines=True, | ||||
|             errors='replace') | ||||
|         for line in proc.stdout: | ||||
|             # state machine here to find structs | ||||
|             m = struct_pattern.match(line) | ||||
|             if m: | ||||
|                 if m.group('tag'): | ||||
|                     if (name is not None | ||||
|                             and decl is not None | ||||
|                             and size is not None): | ||||
|                         decl = decls.get(decl, '?') | ||||
|                         results[(decl, name)] = size | ||||
|                     found = (m.group('tag') == 'structure_type') | ||||
|                     name = None | ||||
|                     decl = None | ||||
|                     size = None | ||||
|                 elif found and m.group('name'): | ||||
|                     name = m.group('name') | ||||
|                 elif found and name and m.group('decl'): | ||||
|                     decl = int(m.group('decl')) | ||||
|                 elif found and name and m.group('size'): | ||||
|                     size = int(m.group('size')) | ||||
|         proc.wait() | ||||
|         if proc.returncode != 0: | ||||
|             if not args.get('verbose'): | ||||
|                 for line in proc.stderr: | ||||
|                     sys.stdout.write(line) | ||||
|             sys.exit(-1) | ||||
|  | ||||
|     flat_results = [] | ||||
|     for (file, struct), size in results.items(): | ||||
|         # map to source files | ||||
|         if args.get('build_dir'): | ||||
|             file = re.sub('%s/*' % re.escape(args['build_dir']), '', file) | ||||
|         # only include structs declared in header files in the current | ||||
|         # directory, ignore internal-only # structs (these are represented | ||||
|         # in other measurements) | ||||
|         if not args.get('everything'): | ||||
|             if not file.endswith('.h'): | ||||
|                 continue | ||||
|         # replace .o with .c, different scripts report .o/.c, we need to | ||||
|         # choose one if we want to deduplicate csv files | ||||
|         file = re.sub('\.o$', '.c', file) | ||||
|  | ||||
|         flat_results.append((file, struct, size)) | ||||
|  | ||||
|     return flat_results | ||||
|  | ||||
|  | ||||
| def main(**args): | ||||
|     def openio(path, mode='r'): | ||||
|         if path == '-': | ||||
|             if 'r' in mode: | ||||
|                 return os.fdopen(os.dup(sys.stdin.fileno()), 'r') | ||||
|             else: | ||||
|                 return os.fdopen(os.dup(sys.stdout.fileno()), 'w') | ||||
|         else: | ||||
|             return open(path, mode) | ||||
|  | ||||
|     # find sizes | ||||
|     if not args.get('use', None): | ||||
|         # find .o files | ||||
|         paths = [] | ||||
|         for path in args['obj_paths']: | ||||
|             if os.path.isdir(path): | ||||
|                 path = path + '/*.o' | ||||
|  | ||||
|             for path in glob.glob(path): | ||||
|                 paths.append(path) | ||||
|  | ||||
|         if not paths: | ||||
|             print('no .obj files found in %r?' % args['obj_paths']) | ||||
|             sys.exit(-1) | ||||
|  | ||||
|         results = collect(paths, **args) | ||||
|     else: | ||||
|         with openio(args['use']) as f: | ||||
|             r = csv.DictReader(f) | ||||
|             results = [ | ||||
|                 (   result['file'], | ||||
|                     result['name'], | ||||
|                     int(result['struct_size'])) | ||||
|                 for result in r | ||||
|                 if result.get('struct_size') not in {None, ''}] | ||||
|  | ||||
|     total = 0 | ||||
|     for _, _, size in results: | ||||
|         total += size | ||||
|  | ||||
|     # find previous results? | ||||
|     if args.get('diff'): | ||||
|         try: | ||||
|             with openio(args['diff']) as f: | ||||
|                 r = csv.DictReader(f) | ||||
|                 prev_results = [ | ||||
|                     (   result['file'], | ||||
|                         result['name'], | ||||
|                         int(result['struct_size'])) | ||||
|                     for result in r | ||||
|                     if result.get('struct_size') not in {None, ''}] | ||||
|         except FileNotFoundError: | ||||
|             prev_results = [] | ||||
|  | ||||
|         prev_total = 0 | ||||
|         for _, _, size in prev_results: | ||||
|             prev_total += size | ||||
|  | ||||
|     # write results to CSV | ||||
|     if args.get('output'): | ||||
|         merged_results = co.defaultdict(lambda: {}) | ||||
|         other_fields = [] | ||||
|  | ||||
|         # merge? | ||||
|         if args.get('merge'): | ||||
|             try: | ||||
|                 with openio(args['merge']) as f: | ||||
|                     r = csv.DictReader(f) | ||||
|                     for result in r: | ||||
|                         file = result.pop('file', '') | ||||
|                         struct = result.pop('name', '') | ||||
|                         result.pop('struct_size', None) | ||||
|                         merged_results[(file, struct)] = result | ||||
|                         other_fields = result.keys() | ||||
|             except FileNotFoundError: | ||||
|                 pass | ||||
|  | ||||
|         for file, struct, size in results: | ||||
|             merged_results[(file, struct)]['struct_size'] = size | ||||
|  | ||||
|         with openio(args['output'], 'w') as f: | ||||
|             w = csv.DictWriter(f, ['file', 'name', *other_fields, 'struct_size']) | ||||
|             w.writeheader() | ||||
|             for (file, struct), result in sorted(merged_results.items()): | ||||
|                 w.writerow({'file': file, 'name': struct, **result}) | ||||
|  | ||||
|     # print results | ||||
|     def dedup_entries(results, by='name'): | ||||
|         entries = co.defaultdict(lambda: 0) | ||||
|         for file, struct, size in results: | ||||
|             entry = (file if by == 'file' else struct) | ||||
|             entries[entry] += size | ||||
|         return entries | ||||
|  | ||||
|     def diff_entries(olds, news): | ||||
|         diff = co.defaultdict(lambda: (0, 0, 0, 0)) | ||||
|         for name, new in news.items(): | ||||
|             diff[name] = (0, new, new, 1.0) | ||||
|         for name, old in olds.items(): | ||||
|             _, new, _, _ = diff[name] | ||||
|             diff[name] = (old, new, new-old, (new-old)/old if old else 1.0) | ||||
|         return diff | ||||
|  | ||||
|     def sorted_entries(entries): | ||||
|         if args.get('size_sort'): | ||||
|             return sorted(entries, key=lambda x: (-x[1], x)) | ||||
|         elif args.get('reverse_size_sort'): | ||||
|             return sorted(entries, key=lambda x: (+x[1], x)) | ||||
|         else: | ||||
|             return sorted(entries) | ||||
|  | ||||
|     def sorted_diff_entries(entries): | ||||
|         if args.get('size_sort'): | ||||
|             return sorted(entries, key=lambda x: (-x[1][1], x)) | ||||
|         elif args.get('reverse_size_sort'): | ||||
|             return sorted(entries, key=lambda x: (+x[1][1], x)) | ||||
|         else: | ||||
|             return sorted(entries, key=lambda x: (-x[1][3], x)) | ||||
|  | ||||
|     def print_header(by=''): | ||||
|         if not args.get('diff'): | ||||
|             print('%-36s %7s' % (by, 'size')) | ||||
|         else: | ||||
|             print('%-36s %7s %7s %7s' % (by, 'old', 'new', 'diff')) | ||||
|  | ||||
|     def print_entry(name, size): | ||||
|         print("%-36s %7d" % (name, size)) | ||||
|  | ||||
|     def print_diff_entry(name, old, new, diff, ratio): | ||||
|         print("%-36s %7s %7s %+7d%s" % (name, | ||||
|             old or "-", | ||||
|             new or "-", | ||||
|             diff, | ||||
|             ' (%+.1f%%)' % (100*ratio) if ratio else '')) | ||||
|  | ||||
|     def print_entries(by='name'): | ||||
|         entries = dedup_entries(results, by=by) | ||||
|  | ||||
|         if not args.get('diff'): | ||||
|             print_header(by=by) | ||||
|             for name, size in sorted_entries(entries.items()): | ||||
|                 print_entry(name, size) | ||||
|         else: | ||||
|             prev_entries = dedup_entries(prev_results, by=by) | ||||
|             diff = diff_entries(prev_entries, entries) | ||||
|             print_header(by='%s (%d added, %d removed)' % (by, | ||||
|                 sum(1 for old, _, _, _ in diff.values() if not old), | ||||
|                 sum(1 for _, new, _, _ in diff.values() if not new))) | ||||
|             for name, (old, new, diff, ratio) in sorted_diff_entries( | ||||
|                     diff.items()): | ||||
|                 if ratio or args.get('all'): | ||||
|                     print_diff_entry(name, old, new, diff, ratio) | ||||
|  | ||||
|     def print_totals(): | ||||
|         if not args.get('diff'): | ||||
|             print_entry('TOTAL', total) | ||||
|         else: | ||||
|             ratio = (0.0 if not prev_total and not total | ||||
|                 else 1.0 if not prev_total | ||||
|                 else (total-prev_total)/prev_total) | ||||
|             print_diff_entry('TOTAL', | ||||
|                 prev_total, total, | ||||
|                 total-prev_total, | ||||
|                 ratio) | ||||
|  | ||||
|     if args.get('quiet'): | ||||
|         pass | ||||
|     elif args.get('summary'): | ||||
|         print_header() | ||||
|         print_totals() | ||||
|     elif args.get('files'): | ||||
|         print_entries(by='file') | ||||
|         print_totals() | ||||
|     else: | ||||
|         print_entries(by='name') | ||||
|         print_totals() | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     import argparse | ||||
|     import sys | ||||
|     parser = argparse.ArgumentParser( | ||||
|         description="Find struct sizes.") | ||||
|     parser.add_argument('obj_paths', nargs='*', default=OBJ_PATHS, | ||||
|         help="Description of where to find *.o files. May be a directory \ | ||||
|             or a list of paths. Defaults to %r." % OBJ_PATHS) | ||||
|     parser.add_argument('-v', '--verbose', action='store_true', | ||||
|         help="Output commands that run behind the scenes.") | ||||
|     parser.add_argument('-q', '--quiet', action='store_true', | ||||
|         help="Don't show anything, useful with -o.") | ||||
|     parser.add_argument('-o', '--output', | ||||
|         help="Specify CSV file to store results.") | ||||
|     parser.add_argument('-u', '--use', | ||||
|         help="Don't compile and find struct sizes, instead use this CSV file.") | ||||
|     parser.add_argument('-d', '--diff', | ||||
|         help="Specify CSV file to diff struct size against.") | ||||
|     parser.add_argument('-m', '--merge', | ||||
|         help="Merge with an existing CSV file when writing to output.") | ||||
|     parser.add_argument('-a', '--all', action='store_true', | ||||
|         help="Show all functions, not just the ones that changed.") | ||||
|     parser.add_argument('-A', '--everything', action='store_true', | ||||
|         help="Include builtin and libc specific symbols.") | ||||
|     parser.add_argument('-s', '--size-sort', action='store_true', | ||||
|         help="Sort by size.") | ||||
|     parser.add_argument('-S', '--reverse-size-sort', action='store_true', | ||||
|         help="Sort by size, but backwards.") | ||||
|     parser.add_argument('-F', '--files', action='store_true', | ||||
|         help="Show file-level struct sizes.") | ||||
|     parser.add_argument('-Y', '--summary', action='store_true', | ||||
|         help="Only show the total struct size.") | ||||
|     parser.add_argument('--objdump-tool', default=['objdump'], type=lambda x: x.split(), | ||||
|         help="Path to the objdump tool to use.") | ||||
|     parser.add_argument('--build-dir', | ||||
|         help="Specify the relative build directory. Used to map object files \ | ||||
|             to the correct source files.") | ||||
|     sys.exit(main(**vars(parser.parse_args()))) | ||||
							
								
								
									
										279
									
								
								scripts/summary.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										279
									
								
								scripts/summary.py
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,279 @@ | ||||
| #!/usr/bin/env python3 | ||||
| # | ||||
| # Script to summarize the outputs of other scripts. Operates on CSV files. | ||||
| # | ||||
|  | ||||
| import functools as ft | ||||
| import collections as co | ||||
| import os | ||||
| import csv | ||||
| import re | ||||
| import math as m | ||||
|  | ||||
| # displayable fields | ||||
| Field = co.namedtuple('Field', 'name,parse,acc,key,fmt,repr,null,ratio') | ||||
| FIELDS = [ | ||||
|     # name, parse, accumulate, fmt, print, null | ||||
|     Field('code', | ||||
|         lambda r: int(r['code_size']), | ||||
|         sum, | ||||
|         lambda r: r, | ||||
|         '%7s', | ||||
|         lambda r: r, | ||||
|         '-', | ||||
|         lambda old, new: (new-old)/old), | ||||
|     Field('data', | ||||
|         lambda r: int(r['data_size']), | ||||
|         sum, | ||||
|         lambda r: r, | ||||
|         '%7s', | ||||
|         lambda r: r, | ||||
|         '-', | ||||
|         lambda old, new: (new-old)/old), | ||||
|     Field('stack', | ||||
|         lambda r: float(r['stack_limit']), | ||||
|         max, | ||||
|         lambda r: r, | ||||
|         '%7s', | ||||
|         lambda r: '∞' if m.isinf(r) else int(r), | ||||
|         '-', | ||||
|         lambda old, new: (new-old)/old), | ||||
|     Field('structs', | ||||
|         lambda r: int(r['struct_size']), | ||||
|         sum, | ||||
|         lambda r: r, | ||||
|         '%8s', | ||||
|         lambda r: r, | ||||
|         '-', | ||||
|         lambda old, new: (new-old)/old), | ||||
|     Field('coverage', | ||||
|         lambda r: (int(r['coverage_hits']), int(r['coverage_count'])), | ||||
|         lambda rs: ft.reduce(lambda a, b: (a[0]+b[0], a[1]+b[1]), rs), | ||||
|         lambda r: r[0]/r[1], | ||||
|         '%19s', | ||||
|         lambda r: '%11s %7s' % ('%d/%d' % (r[0], r[1]), '%.1f%%' % (100*r[0]/r[1])), | ||||
|         '%11s %7s' % ('-', '-'), | ||||
|         lambda old, new: ((new[0]/new[1]) - (old[0]/old[1]))) | ||||
| ] | ||||
|  | ||||
|  | ||||
| def main(**args): | ||||
|     def openio(path, mode='r'): | ||||
|         if path == '-': | ||||
|             if 'r' in mode: | ||||
|                 return os.fdopen(os.dup(sys.stdin.fileno()), 'r') | ||||
|             else: | ||||
|                 return os.fdopen(os.dup(sys.stdout.fileno()), 'w') | ||||
|         else: | ||||
|             return open(path, mode) | ||||
|  | ||||
|     # find results | ||||
|     results = co.defaultdict(lambda: {}) | ||||
|     for path in args.get('csv_paths', '-'): | ||||
|         try: | ||||
|             with openio(path) as f: | ||||
|                 r = csv.DictReader(f) | ||||
|                 for result in r: | ||||
|                     file = result.pop('file', '') | ||||
|                     name = result.pop('name', '') | ||||
|                     prev = results[(file, name)] | ||||
|                     for field in FIELDS: | ||||
|                         try: | ||||
|                             r = field.parse(result) | ||||
|                             if field.name in prev: | ||||
|                                 results[(file, name)][field.name] = field.acc( | ||||
|                                     [prev[field.name], r]) | ||||
|                             else: | ||||
|                                 results[(file, name)][field.name] = r | ||||
|                         except (KeyError, ValueError): | ||||
|                             pass | ||||
|         except FileNotFoundError: | ||||
|             pass | ||||
|  | ||||
|     # find fields | ||||
|     if args.get('all_fields'): | ||||
|         fields = FIELDS | ||||
|     elif args.get('fields') is not None: | ||||
|         fields_dict = {field.name: field for field in FIELDS} | ||||
|         fields = [fields_dict[f] for f in args['fields']] | ||||
|     else: | ||||
|         fields = [] | ||||
|         for field in FIELDS: | ||||
|             if any(field.name in result for result in results.values()): | ||||
|                 fields.append(field) | ||||
|  | ||||
|     # find total for every field | ||||
|     total = {} | ||||
|     for result in results.values(): | ||||
|         for field in fields: | ||||
|             if field.name in result and field.name in total: | ||||
|                 total[field.name] = field.acc( | ||||
|                     [total[field.name], result[field.name]]) | ||||
|             elif field.name in result: | ||||
|                 total[field.name] = result[field.name] | ||||
|  | ||||
|     # find previous results? | ||||
|     if args.get('diff'): | ||||
|         prev_results = co.defaultdict(lambda: {}) | ||||
|         try: | ||||
|             with openio(args['diff']) as f: | ||||
|                 r = csv.DictReader(f) | ||||
|                 for result in r: | ||||
|                     file = result.pop('file', '') | ||||
|                     name = result.pop('name', '') | ||||
|                     prev = prev_results[(file, name)] | ||||
|                     for field in FIELDS: | ||||
|                         try: | ||||
|                             r = field.parse(result) | ||||
|                             if field.name in prev: | ||||
|                                 prev_results[(file, name)][field.name] = field.acc( | ||||
|                                     [prev[field.name], r]) | ||||
|                             else: | ||||
|                                 prev_results[(file, name)][field.name] = r | ||||
|                         except (KeyError, ValueError): | ||||
|                             pass | ||||
|         except FileNotFoundError: | ||||
|             pass | ||||
|  | ||||
|         prev_total = {} | ||||
|         for result in prev_results.values(): | ||||
|             for field in fields: | ||||
|                 if field.name in result and field.name in prev_total: | ||||
|                     prev_total[field.name] = field.acc( | ||||
|                         [prev_total[field.name], result[field.name]]) | ||||
|                 elif field.name in result: | ||||
|                     prev_total[field.name] = result[field.name] | ||||
|  | ||||
|     # print results | ||||
|     def dedup_entries(results, by='name'): | ||||
|         entries = co.defaultdict(lambda: {}) | ||||
|         for (file, func), result in results.items(): | ||||
|             entry = (file if by == 'file' else func) | ||||
|             prev = entries[entry] | ||||
|             for field in fields: | ||||
|                 if field.name in result and field.name in prev: | ||||
|                     entries[entry][field.name] = field.acc( | ||||
|                         [prev[field.name], result[field.name]]) | ||||
|                 elif field.name in result: | ||||
|                     entries[entry][field.name] = result[field.name] | ||||
|         return entries | ||||
|  | ||||
|     def sorted_entries(entries): | ||||
|         if args.get('sort') is not None: | ||||
|             field = {field.name: field for field in FIELDS}[args['sort']] | ||||
|             return sorted(entries, key=lambda x: ( | ||||
|                 -(field.key(x[1][field.name])) if field.name in x[1] else -1, x)) | ||||
|         elif args.get('reverse_sort') is not None: | ||||
|             field = {field.name: field for field in FIELDS}[args['reverse_sort']] | ||||
|             return sorted(entries, key=lambda x: ( | ||||
|                 +(field.key(x[1][field.name])) if field.name in x[1] else -1, x)) | ||||
|         else: | ||||
|             return sorted(entries) | ||||
|  | ||||
|     def print_header(by=''): | ||||
|         if not args.get('diff'): | ||||
|             print('%-36s' % by, end='') | ||||
|             for field in fields: | ||||
|                 print((' '+field.fmt) % field.name, end='') | ||||
|             print() | ||||
|         else: | ||||
|             print('%-36s' % by, end='') | ||||
|             for field in fields: | ||||
|                 print((' '+field.fmt) % field.name, end='') | ||||
|                 print(' %-9s' % '', end='') | ||||
|             print() | ||||
|  | ||||
|     def print_entry(name, result): | ||||
|         print('%-36s' % name, end='') | ||||
|         for field in fields: | ||||
|             r = result.get(field.name) | ||||
|             if r is not None: | ||||
|                 print((' '+field.fmt) % field.repr(r), end='') | ||||
|             else: | ||||
|                 print((' '+field.fmt) % '-', end='') | ||||
|         print() | ||||
|  | ||||
|     def print_diff_entry(name, old, new): | ||||
|         print('%-36s' % name, end='') | ||||
|         for field in fields: | ||||
|             n = new.get(field.name) | ||||
|             if n is not None: | ||||
|                 print((' '+field.fmt) % field.repr(n), end='') | ||||
|             else: | ||||
|                 print((' '+field.fmt) % '-', end='') | ||||
|             o = old.get(field.name) | ||||
|             ratio = ( | ||||
|                 0.0 if m.isinf(o or 0) and m.isinf(n or 0) | ||||
|                     else +float('inf') if m.isinf(n or 0) | ||||
|                     else -float('inf') if m.isinf(o or 0) | ||||
|                     else 0.0 if not o and not n | ||||
|                     else +1.0 if not o | ||||
|                     else -1.0 if not n | ||||
|                     else field.ratio(o, n)) | ||||
|             print(' %-9s' % ( | ||||
|                 '' if not ratio | ||||
|                     else '(+∞%)' if ratio > 0 and m.isinf(ratio) | ||||
|                     else '(-∞%)' if ratio < 0 and m.isinf(ratio) | ||||
|                     else '(%+.1f%%)' % (100*ratio)), end='') | ||||
|         print() | ||||
|  | ||||
|     def print_entries(by='name'): | ||||
|         entries = dedup_entries(results, by=by) | ||||
|  | ||||
|         if not args.get('diff'): | ||||
|             print_header(by=by) | ||||
|             for name, result in sorted_entries(entries.items()): | ||||
|                 print_entry(name, result) | ||||
|         else: | ||||
|             prev_entries = dedup_entries(prev_results, by=by) | ||||
|             print_header(by='%s (%d added, %d removed)' % (by, | ||||
|                 sum(1 for name in entries if name not in prev_entries), | ||||
|                 sum(1 for name in prev_entries if name not in entries))) | ||||
|             for name, result in sorted_entries(entries.items()): | ||||
|                 if args.get('all') or result != prev_entries.get(name, {}): | ||||
|                     print_diff_entry(name, prev_entries.get(name, {}), result) | ||||
|  | ||||
|     def print_totals(): | ||||
|         if not args.get('diff'): | ||||
|             print_entry('TOTAL', total) | ||||
|         else: | ||||
|             print_diff_entry('TOTAL', prev_total, total) | ||||
|  | ||||
|     if args.get('summary'): | ||||
|         print_header() | ||||
|         print_totals() | ||||
|     elif args.get('files'): | ||||
|         print_entries(by='file') | ||||
|         print_totals() | ||||
|     else: | ||||
|         print_entries(by='name') | ||||
|         print_totals() | ||||
|  | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     import argparse | ||||
|     import sys | ||||
|     parser = argparse.ArgumentParser( | ||||
|         description="Summarize measurements") | ||||
|     parser.add_argument('csv_paths', nargs='*', default='-', | ||||
|         help="Description of where to find *.csv files. May be a directory \ | ||||
|             or list of paths. *.csv files will be merged to show the total \ | ||||
|             coverage.") | ||||
|     parser.add_argument('-d', '--diff', | ||||
|         help="Specify CSV file to diff against.") | ||||
|     parser.add_argument('-a', '--all', action='store_true', | ||||
|         help="Show all objects, not just the ones that changed.") | ||||
|     parser.add_argument('-e', '--all-fields', action='store_true', | ||||
|         help="Show all fields, even those with no results.") | ||||
|     parser.add_argument('-f', '--fields', type=lambda x: re.split('\s*,\s*', x), | ||||
|         help="Comma separated list of fields to print, by default all fields \ | ||||
|             that are found in the CSV files are printed.") | ||||
|     parser.add_argument('-s', '--sort', | ||||
|         help="Sort by this field.") | ||||
|     parser.add_argument('-S', '--reverse-sort', | ||||
|         help="Sort by this field, but backwards.") | ||||
|     parser.add_argument('-F', '--files', action='store_true', | ||||
|         help="Show file-level calls.") | ||||
|     parser.add_argument('-Y', '--summary', action='store_true', | ||||
|         help="Only show the totals.") | ||||
|     sys.exit(main(**vars(parser.parse_args()))) | ||||
| @@ -784,10 +784,13 @@ def main(**args): | ||||
|             stdout=sp.PIPE if not args.get('verbose') else None, | ||||
|             stderr=sp.STDOUT if not args.get('verbose') else None, | ||||
|             universal_newlines=True) | ||||
|         stdout = [] | ||||
|         for line in proc.stdout: | ||||
|             stdout.append(line) | ||||
|         proc.wait() | ||||
|         if proc.returncode != 0: | ||||
|             if not args.get('verbose'): | ||||
|                 for line in proc.stdout: | ||||
|                 for line in stdout: | ||||
|                     sys.stdout.write(line) | ||||
|             sys.exit(-1) | ||||
|  | ||||
| @@ -803,9 +806,9 @@ def main(**args): | ||||
|             failure.case.test(failure=failure, **args) | ||||
|             sys.exit(0) | ||||
|  | ||||
|     print('tests passed %d/%d (%.2f%%)' % (passed, total, | ||||
|     print('tests passed %d/%d (%.1f%%)' % (passed, total, | ||||
|         100*(passed/total if total else 1.0))) | ||||
|     print('tests failed %d/%d (%.2f%%)' % (failed, total, | ||||
|     print('tests failed %d/%d (%.1f%%)' % (failed, total, | ||||
|         100*(failed/total if total else 1.0))) | ||||
|     return 1 if failed > 0 else 0 | ||||
|  | ||||
|   | ||||
		Reference in New Issue
	
	Block a user