Added ./script/summary.py

A full summary of static measurements (code size, stack usage, etc) can now
be found with:

    make summary

This is done through the combination of a new ./scripts/summary.py
script and the ability of existing scripts to merge into existing csv
files, allowing multiple results to be merged either in a pipeline, or
in parallel with a single ./script/summary.py call.

The ./scripts/summary.py script can also be used to quickly compare
different builds or configurations. This is a proper implementation
of a similar but hacky shell script that has already been very useful
for making optimization decisions:

    $ ./scripts/structs.py new.csv -d old.csv --summary
    name (2 added, 0 removed)               code             stack            structs
    TOTAL                                  28648 (-2.7%)      2448               1012

Also some other small tweaks to scripts:

- Removed state saving diff rules. This isn't the most useful way to
  handle comparing changes.

- Added short flags for --summary (-Y) and --files (-F), since these
  are quite often used.
This commit is contained in:
Christopher Haster
2022-03-07 00:24:30 -06:00
parent eb8be9f351
commit 55b3c538d5
7 changed files with 580 additions and 110 deletions

View File

@@ -62,11 +62,24 @@ def collect(paths, **args):
# map to source files
if args.get('build_dir'):
file = re.sub('%s/*' % re.escape(args['build_dir']), '', file)
# replace .o with .c, different scripts report .o/.c, we need to
# choose one if we want to deduplicate csv files
file = re.sub('\.o$', '.c', file)
flat_results.append((file, struct, size))
return flat_results
def main(**args):
def openio(path, mode='r'):
if path == '-':
if 'r' in mode:
return os.fdopen(os.dup(sys.stdin.fileno()), 'r')
else:
return os.fdopen(os.dup(sys.stdout.fileno()), 'w')
else:
return open(path, mode)
# find sizes
if not args.get('use', None):
# find .o files
@@ -84,13 +97,14 @@ def main(**args):
results = collect(paths, **args)
else:
with open(args['use']) as f:
with openio(args['use']) as f:
r = csv.DictReader(f)
results = [
( result['file'],
result['struct'],
result['name'],
int(result['struct_size']))
for result in r]
for result in r
if result.get('struct_size') not in {None, ''}]
total = 0
for _, _, size in results:
@@ -99,13 +113,14 @@ def main(**args):
# find previous results?
if args.get('diff'):
try:
with open(args['diff']) as f:
with openio(args['diff']) as f:
r = csv.DictReader(f)
prev_results = [
( result['file'],
result['struct'],
result['name'],
int(result['struct_size']))
for result in r]
for result in r
if result.get('struct_size') not in {None, ''}]
except FileNotFoundError:
prev_results = []
@@ -115,14 +130,34 @@ def main(**args):
# write results to CSV
if args.get('output'):
with open(args['output'], 'w') as f:
w = csv.writer(f)
w.writerow(['file', 'struct', 'struct_size'])
for file, struct, size in sorted(results):
w.writerow((file, struct, size))
merged_results = co.defaultdict(lambda: {})
other_fields = []
# merge?
if args.get('merge'):
try:
with openio(args['merge']) as f:
r = csv.DictReader(f)
for result in r:
file = result.pop('file', '')
struct = result.pop('name', '')
result.pop('struct_size', None)
merged_results[(file, struct)] = result
other_fields = result.keys()
except FileNotFoundError:
pass
for file, struct, size in results:
merged_results[(file, struct)]['struct_size'] = size
with openio(args['output'], 'w') as f:
w = csv.DictWriter(f, ['file', 'name', *other_fields, 'struct_size'])
w.writeheader()
for (file, struct), result in sorted(merged_results.items()):
w.writerow({'file': file, 'name': struct, **result})
# print results
def dedup_entries(results, by='struct'):
def dedup_entries(results, by='name'):
entries = co.defaultdict(lambda: 0)
for file, struct, size in results:
entry = (file if by == 'file' else struct)
@@ -170,7 +205,7 @@ def main(**args):
diff,
' (%+.1f%%)' % (100*ratio) if ratio else ''))
def print_entries(by='struct'):
def print_entries(by='name'):
entries = dedup_entries(results, by=by)
if not args.get('diff'):
@@ -209,25 +244,29 @@ def main(**args):
print_entries(by='file')
print_totals()
else:
print_entries(by='struct')
print_entries(by='name')
print_totals()
if __name__ == "__main__":
import argparse
import sys
parser = argparse.ArgumentParser(
description="Find code size at the function level.")
description="Find struct sizes.")
parser.add_argument('obj_paths', nargs='*', default=OBJ_PATHS,
help="Description of where to find *.o files. May be a directory \
or a list of paths. Defaults to %r." % OBJ_PATHS)
parser.add_argument('-v', '--verbose', action='store_true',
help="Output commands that run behind the scenes.")
parser.add_argument('-q', '--quiet', action='store_true',
help="Don't show anything, useful with -o.")
parser.add_argument('-o', '--output',
help="Specify CSV file to store results.")
parser.add_argument('-u', '--use',
help="Don't compile and find struct sizes, instead use this CSV file.")
parser.add_argument('-d', '--diff',
help="Specify CSV file to diff struct size against.")
parser.add_argument('-m', '--merge',
help="Merge with an existing CSV file when writing to output.")
parser.add_argument('-a', '--all', action='store_true',
help="Show all functions, not just the ones that changed.")
parser.add_argument('-A', '--everything', action='store_true',
@@ -236,12 +275,10 @@ if __name__ == "__main__":
help="Sort by size.")
parser.add_argument('-S', '--reverse-size-sort', action='store_true',
help="Sort by size, but backwards.")
parser.add_argument('--files', action='store_true',
parser.add_argument('-F', '--files', action='store_true',
help="Show file-level struct sizes.")
parser.add_argument('--summary', action='store_true',
parser.add_argument('-Y', '--summary', action='store_true',
help="Only show the total struct size.")
parser.add_argument('-q', '--quiet', action='store_true',
help="Don't show anything, useful with -o.")
parser.add_argument('--objdump-tool', default=['objdump'], type=lambda x: x.split(),
help="Path to the objdump tool to use.")
parser.add_argument('--build-dir',