mirror of
				https://github.com/eledio-devices/thirdparty-littlefs.git
				synced 2025-10-31 08:42:40 +01:00 
			
		
		
		
	Generated v2 prefixes
This commit is contained in:
		| @@ -1,44 +0,0 @@ | ||||
| #!/usr/bin/env python2 | ||||
|  | ||||
| import struct | ||||
| import sys | ||||
| import os | ||||
| import argparse | ||||
|  | ||||
| def corrupt(block): | ||||
|     with open(block, 'r+b') as file: | ||||
|         # skip rev | ||||
|         file.read(4) | ||||
|  | ||||
|         # go to last commit | ||||
|         tag = 0xffffffff | ||||
|         while True: | ||||
|             try: | ||||
|                 ntag, = struct.unpack('>I', file.read(4)) | ||||
|             except struct.error: | ||||
|                 break | ||||
|  | ||||
|             tag ^= ntag | ||||
|             size = (tag & 0x3ff) if (tag & 0x3ff) != 0x3ff else 0 | ||||
|             file.seek(size, os.SEEK_CUR) | ||||
|  | ||||
|         # lob off last 3 bytes | ||||
|         file.seek(-(size + 3), os.SEEK_CUR) | ||||
|         file.truncate() | ||||
|  | ||||
| def main(args): | ||||
|     if args.n or not args.blocks: | ||||
|         with open('blocks/.history', 'rb') as file: | ||||
|             for i in range(int(args.n or 1)): | ||||
|                 last, = struct.unpack('<I', file.read(4)) | ||||
|                 args.blocks.append('blocks/%x' % last) | ||||
|  | ||||
|     for block in args.blocks: | ||||
|         print 'corrupting %s' % block | ||||
|         corrupt(block) | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     parser = argparse.ArgumentParser() | ||||
|     parser.add_argument('-n') | ||||
|     parser.add_argument('blocks', nargs='*') | ||||
|     main(parser.parse_args()) | ||||
							
								
								
									
										112
									
								
								scripts/debug.py
									
									
									
									
									
								
							
							
						
						
									
										112
									
								
								scripts/debug.py
									
									
									
									
									
								
							| @@ -1,112 +0,0 @@ | ||||
| #!/usr/bin/env python2 | ||||
|  | ||||
| import struct | ||||
| import binascii | ||||
|  | ||||
| TYPES = { | ||||
|     (0x700, 0x400): 'splice', | ||||
|     (0x7ff, 0x401): 'create', | ||||
|     (0x7ff, 0x4ff): 'delete', | ||||
|     (0x700, 0x000): 'name', | ||||
|     (0x7ff, 0x001): 'name reg', | ||||
|     (0x7ff, 0x002): 'name dir', | ||||
|     (0x7ff, 0x0ff): 'name superblock', | ||||
|     (0x700, 0x200): 'struct', | ||||
|     (0x7ff, 0x200): 'struct dir', | ||||
|     (0x7ff, 0x202): 'struct ctz', | ||||
|     (0x7ff, 0x201): 'struct inline', | ||||
|     (0x700, 0x300): 'userattr', | ||||
|     (0x700, 0x600): 'tail', | ||||
|     (0x7ff, 0x600): 'tail soft', | ||||
|     (0x7ff, 0x601): 'tail hard', | ||||
|     (0x700, 0x700): 'gstate', | ||||
|     (0x7ff, 0x7ff): 'gstate move', | ||||
|     (0x700, 0x500): 'crc', | ||||
| } | ||||
|  | ||||
| def typeof(type): | ||||
|     for prefix in range(12): | ||||
|         mask = 0x7ff & ~((1 << prefix)-1) | ||||
|         if (mask, type & mask) in TYPES: | ||||
|             return TYPES[mask, type & mask] + ( | ||||
|                 ' %0*x' % (prefix/4, type & ((1 << prefix)-1)) | ||||
|                 if prefix else '') | ||||
|     else: | ||||
|         return '%02x' % type | ||||
|  | ||||
| def main(*blocks): | ||||
|     # find most recent block | ||||
|     file = None | ||||
|     rev = None | ||||
|     crc = None | ||||
|     versions = [] | ||||
|  | ||||
|     for block in blocks: | ||||
|         try: | ||||
|             nfile = open(block, 'rb') | ||||
|             ndata = nfile.read(4) | ||||
|             ncrc = binascii.crc32(ndata) | ||||
|             nrev, = struct.unpack('<I', ndata) | ||||
|  | ||||
|             assert rev != nrev | ||||
|             if not file or ((rev - nrev) & 0x80000000): | ||||
|                 file = nfile | ||||
|                 rev = nrev | ||||
|                 crc = ncrc | ||||
|  | ||||
|             versions.append((nrev, '%s (rev %d)' % (block, nrev))) | ||||
|         except (IOError, struct.error): | ||||
|             pass | ||||
|  | ||||
|     if not file: | ||||
|         print 'Bad metadata pair {%s}' % ', '.join(blocks) | ||||
|         return 1 | ||||
|  | ||||
|     print "--- %s ---" % ', '.join(v for _,v in sorted(versions, reverse=True)) | ||||
|  | ||||
|     # go through each tag, print useful information | ||||
|     print "%-4s  %-8s  %-14s  %3s %4s  %s" % ( | ||||
|         'off', 'tag', 'type', 'id', 'len', 'dump') | ||||
|  | ||||
|     tag = 0xffffffff | ||||
|     off = 4 | ||||
|     while True: | ||||
|         try: | ||||
|             data = file.read(4) | ||||
|             crc = binascii.crc32(data, crc) | ||||
|             ntag, = struct.unpack('>I', data) | ||||
|         except struct.error: | ||||
|             break | ||||
|  | ||||
|         tag ^= ntag | ||||
|         off += 4 | ||||
|  | ||||
|         type = (tag & 0x7ff00000) >> 20 | ||||
|         id   = (tag & 0x000ffc00) >> 10 | ||||
|         size = (tag & 0x000003ff) >> 0 | ||||
|         iscrc = (type & 0x700) == 0x500 | ||||
|  | ||||
|         data = file.read(size if size != 0x3ff else 0) | ||||
|         if iscrc: | ||||
|             crc = binascii.crc32(data[:4], crc) | ||||
|         else: | ||||
|             crc = binascii.crc32(data, crc) | ||||
|  | ||||
|         print '%04x: %08x  %-15s %3s %4s  %-23s  %-8s' % ( | ||||
|             off, tag, | ||||
|             typeof(type) + (' bad!' if iscrc and ~crc else ''), | ||||
|             hex(id)[2:] if id != 0x3ff else '.', | ||||
|             size if size != 0x3ff else 'x', | ||||
|             ' '.join('%02x' % ord(c) for c in data[:8]), | ||||
|             ''.join(c if c >= ' ' and c <= '~' else '.' for c in data[:8])) | ||||
|  | ||||
|         off += size if size != 0x3ff else 0 | ||||
|         if iscrc: | ||||
|             crc = 0 | ||||
|             tag ^= (type & 1) << 31 | ||||
|  | ||||
|     return 0 | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     import sys | ||||
|     sys.exit(main(*sys.argv[1:])) | ||||
							
								
								
									
										383
									
								
								scripts/explode_asserts.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										383
									
								
								scripts/explode_asserts.py
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,383 @@ | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| import re | ||||
| import sys | ||||
|  | ||||
| PATTERN = ['LFS2_ASSERT', 'assert'] | ||||
| PREFIX = 'LFS2' | ||||
| MAXWIDTH = 16 | ||||
|  | ||||
| ASSERT = "__{PREFIX}_ASSERT_{TYPE}_{COMP}" | ||||
| FAIL = """ | ||||
| __attribute__((unused)) | ||||
| static void __{prefix}_assert_fail_{type}( | ||||
|         const char *file, int line, const char *comp, | ||||
|         {ctype} lh, size_t lsize, | ||||
|         {ctype} rh, size_t rsize) {{ | ||||
|     printf("%s:%d:assert: assert failed with ", file, line); | ||||
|     __{prefix}_assert_print_{type}(lh, lsize); | ||||
|     printf(", expected %s ", comp); | ||||
|     __{prefix}_assert_print_{type}(rh, rsize); | ||||
|     printf("\\n"); | ||||
|     fflush(NULL); | ||||
|     raise(SIGABRT); | ||||
| }} | ||||
| """ | ||||
|  | ||||
| COMP = { | ||||
|     '==': 'eq', | ||||
|     '!=': 'ne', | ||||
|     '<=': 'le', | ||||
|     '>=': 'ge', | ||||
|     '<':  'lt', | ||||
|     '>':  'gt', | ||||
| } | ||||
|  | ||||
| TYPE = { | ||||
|     'int': { | ||||
|         'ctype': 'intmax_t', | ||||
|         'fail': FAIL, | ||||
|         'print': """ | ||||
|         __attribute__((unused)) | ||||
|         static void __{prefix}_assert_print_{type}({ctype} v, size_t size) {{ | ||||
|             (void)size; | ||||
|             printf("%"PRIiMAX, v); | ||||
|         }} | ||||
|         """, | ||||
|         'assert': """ | ||||
|         #define __{PREFIX}_ASSERT_{TYPE}_{COMP}(file, line, lh, rh) | ||||
|         do {{ | ||||
|             __typeof__(lh) _lh = lh; | ||||
|             __typeof__(lh) _rh = (__typeof__(lh))rh; | ||||
|             if (!(_lh {op} _rh)) {{ | ||||
|                 __{prefix}_assert_fail_{type}(file, line, "{comp}", | ||||
|                         (intmax_t)_lh, 0, (intmax_t)_rh, 0); | ||||
|             }} | ||||
|         }} while (0) | ||||
|         """ | ||||
|     }, | ||||
|     'bool': { | ||||
|         'ctype': 'bool', | ||||
|         'fail': FAIL, | ||||
|         'print': """ | ||||
|         __attribute__((unused)) | ||||
|         static void __{prefix}_assert_print_{type}({ctype} v, size_t size) {{ | ||||
|             (void)size; | ||||
|             printf("%s", v ? "true" : "false"); | ||||
|         }} | ||||
|         """, | ||||
|         'assert': """ | ||||
|         #define __{PREFIX}_ASSERT_{TYPE}_{COMP}(file, line, lh, rh) | ||||
|         do {{ | ||||
|             bool _lh = !!(lh); | ||||
|             bool _rh = !!(rh); | ||||
|             if (!(_lh {op} _rh)) {{ | ||||
|                 __{prefix}_assert_fail_{type}(file, line, "{comp}", | ||||
|                         _lh, 0, _rh, 0); | ||||
|             }} | ||||
|         }} while (0) | ||||
|         """ | ||||
|     }, | ||||
|     'mem': { | ||||
|         'ctype': 'const void *', | ||||
|         'fail': FAIL, | ||||
|         'print': """ | ||||
|         __attribute__((unused)) | ||||
|         static void __{prefix}_assert_print_{type}({ctype} v, size_t size) {{ | ||||
|             const uint8_t *s = v; | ||||
|             printf("\\\""); | ||||
|             for (size_t i = 0; i < size && i < {maxwidth}; i++) {{ | ||||
|                 if (s[i] >= ' ' && s[i] <= '~') {{ | ||||
|                     printf("%c", s[i]); | ||||
|                 }} else {{ | ||||
|                     printf("\\\\x%02x", s[i]); | ||||
|                 }} | ||||
|             }} | ||||
|             if (size > {maxwidth}) {{ | ||||
|                 printf("..."); | ||||
|             }} | ||||
|             printf("\\\""); | ||||
|         }} | ||||
|         """, | ||||
|         'assert': """ | ||||
|         #define __{PREFIX}_ASSERT_{TYPE}_{COMP}(file, line, lh, rh, size) | ||||
|         do {{ | ||||
|             const void *_lh = lh; | ||||
|             const void *_rh = rh; | ||||
|             if (!(memcmp(_lh, _rh, size) {op} 0)) {{ | ||||
|                 __{prefix}_assert_fail_{type}(file, line, "{comp}", | ||||
|                         _lh, size, _rh, size); | ||||
|             }} | ||||
|         }} while (0) | ||||
|         """ | ||||
|     }, | ||||
|     'str': { | ||||
|         'ctype': 'const char *', | ||||
|         'fail': FAIL, | ||||
|         'print': """ | ||||
|         __attribute__((unused)) | ||||
|         static void __{prefix}_assert_print_{type}({ctype} v, size_t size) {{ | ||||
|             __{prefix}_assert_print_mem(v, size); | ||||
|         }} | ||||
|         """, | ||||
|         'assert': """ | ||||
|         #define __{PREFIX}_ASSERT_{TYPE}_{COMP}(file, line, lh, rh) | ||||
|         do {{ | ||||
|             const char *_lh = lh; | ||||
|             const char *_rh = rh; | ||||
|             if (!(strcmp(_lh, _rh) {op} 0)) {{ | ||||
|                 __{prefix}_assert_fail_{type}(file, line, "{comp}", | ||||
|                         _lh, strlen(_lh), _rh, strlen(_rh)); | ||||
|             }} | ||||
|         }} while (0) | ||||
|         """ | ||||
|     } | ||||
| } | ||||
|  | ||||
| def mkdecls(outf, maxwidth=16): | ||||
|     outf.write("#include <stdio.h>\n") | ||||
|     outf.write("#include <stdbool.h>\n") | ||||
|     outf.write("#include <stdint.h>\n") | ||||
|     outf.write("#include <inttypes.h>\n") | ||||
|     outf.write("#include <signal.h>\n") | ||||
|  | ||||
|     for type, desc in sorted(TYPE.items()): | ||||
|         format = { | ||||
|             'type': type.lower(), 'TYPE': type.upper(), | ||||
|             'ctype': desc['ctype'], | ||||
|             'prefix': PREFIX.lower(), 'PREFIX': PREFIX.upper(), | ||||
|             'maxwidth': maxwidth, | ||||
|         } | ||||
|         outf.write(re.sub('\s+', ' ', | ||||
|             desc['print'].strip().format(**format))+'\n') | ||||
|         outf.write(re.sub('\s+', ' ', | ||||
|             desc['fail'].strip().format(**format))+'\n') | ||||
|  | ||||
|         for op, comp in sorted(COMP.items()): | ||||
|             format.update({ | ||||
|                 'comp': comp.lower(), 'COMP': comp.upper(), | ||||
|                 'op': op, | ||||
|             }) | ||||
|             outf.write(re.sub('\s+', ' ', | ||||
|                 desc['assert'].strip().format(**format))+'\n') | ||||
|  | ||||
| def mkassert(type, comp, lh, rh, size=None): | ||||
|     format = { | ||||
|         'type': type.lower(), 'TYPE': type.upper(), | ||||
|         'comp': comp.lower(), 'COMP': comp.upper(), | ||||
|         'prefix': PREFIX.lower(), 'PREFIX': PREFIX.upper(), | ||||
|         'lh': lh.strip(' '), | ||||
|         'rh': rh.strip(' '), | ||||
|         'size': size, | ||||
|     } | ||||
|     if size: | ||||
|         return ((ASSERT + '(__FILE__, __LINE__, {lh}, {rh}, {size})') | ||||
|             .format(**format)) | ||||
|     else: | ||||
|         return ((ASSERT + '(__FILE__, __LINE__, {lh}, {rh})') | ||||
|             .format(**format)) | ||||
|  | ||||
|  | ||||
| # simple recursive descent parser | ||||
| LEX = { | ||||
|     'ws':       [r'(?:\s|\n|#.*?\n|//.*?\n|/\*.*?\*/)+'], | ||||
|     'assert':   PATTERN, | ||||
|     'string':   [r'"(?:\\.|[^"])*"', r"'(?:\\.|[^'])\'"], | ||||
|     'arrow':    ['=>'], | ||||
|     'paren':    ['\(', '\)'], | ||||
|     'op':       ['strcmp', 'memcmp', '->'], | ||||
|     'comp':     ['==', '!=', '<=', '>=', '<', '>'], | ||||
|     'logic':    ['\&\&', '\|\|'], | ||||
|     'sep':      [':', ';', '\{', '\}', ','], | ||||
| } | ||||
|  | ||||
| class ParseFailure(Exception): | ||||
|     def __init__(self, expected, found): | ||||
|         self.expected = expected | ||||
|         self.found = found | ||||
|  | ||||
|     def __str__(self): | ||||
|         return "expected %r, found %s..." % ( | ||||
|             self.expected, repr(self.found)[:70]) | ||||
|  | ||||
| class Parse: | ||||
|     def __init__(self, inf, lexemes): | ||||
|         p = '|'.join('(?P<%s>%s)' % (n, '|'.join(l)) | ||||
|             for n, l in lexemes.items()) | ||||
|         p = re.compile(p, re.DOTALL) | ||||
|         data = inf.read() | ||||
|         tokens = [] | ||||
|         while True: | ||||
|             m = p.search(data) | ||||
|             if m: | ||||
|                 if m.start() > 0: | ||||
|                     tokens.append((None, data[:m.start()])) | ||||
|                 tokens.append((m.lastgroup, m.group())) | ||||
|                 data = data[m.end():] | ||||
|             else: | ||||
|                 tokens.append((None, data)) | ||||
|                 break | ||||
|         self.tokens = tokens | ||||
|         self.off = 0 | ||||
|  | ||||
|     def lookahead(self, *pattern): | ||||
|         if self.off < len(self.tokens): | ||||
|             token = self.tokens[self.off] | ||||
|             if token[0] in pattern or token[1] in pattern: | ||||
|                 self.m = token[1] | ||||
|                 return self.m | ||||
|         self.m = None | ||||
|         return self.m | ||||
|  | ||||
|     def accept(self, *patterns): | ||||
|         m = self.lookahead(*patterns) | ||||
|         if m is not None: | ||||
|             self.off += 1 | ||||
|         return m | ||||
|  | ||||
|     def expect(self, *patterns): | ||||
|         m = self.accept(*patterns) | ||||
|         if not m: | ||||
|             raise ParseFailure(patterns, self.tokens[self.off:]) | ||||
|         return m | ||||
|  | ||||
|     def push(self): | ||||
|         return self.off | ||||
|  | ||||
|     def pop(self, state): | ||||
|         self.off = state | ||||
|  | ||||
| def passert(p): | ||||
|     def pastr(p): | ||||
|         p.expect('assert') ; p.accept('ws') ; p.expect('(') ; p.accept('ws') | ||||
|         p.expect('strcmp') ; p.accept('ws') ; p.expect('(') ; p.accept('ws') | ||||
|         lh = pexpr(p) ; p.accept('ws') | ||||
|         p.expect(',') ; p.accept('ws') | ||||
|         rh = pexpr(p) ; p.accept('ws') | ||||
|         p.expect(')') ; p.accept('ws') | ||||
|         comp = p.expect('comp') ; p.accept('ws') | ||||
|         p.expect('0') ; p.accept('ws') | ||||
|         p.expect(')') | ||||
|         return mkassert('str', COMP[comp], lh, rh) | ||||
|  | ||||
|     def pamem(p): | ||||
|         p.expect('assert') ; p.accept('ws') ; p.expect('(') ; p.accept('ws') | ||||
|         p.expect('memcmp') ; p.accept('ws') ; p.expect('(') ; p.accept('ws') | ||||
|         lh = pexpr(p) ; p.accept('ws') | ||||
|         p.expect(',') ; p.accept('ws') | ||||
|         rh = pexpr(p) ; p.accept('ws') | ||||
|         p.expect(',') ; p.accept('ws') | ||||
|         size = pexpr(p) ; p.accept('ws') | ||||
|         p.expect(')') ; p.accept('ws') | ||||
|         comp = p.expect('comp') ; p.accept('ws') | ||||
|         p.expect('0') ; p.accept('ws') | ||||
|         p.expect(')') | ||||
|         return mkassert('mem', COMP[comp], lh, rh, size) | ||||
|  | ||||
|     def paint(p): | ||||
|         p.expect('assert') ; p.accept('ws') ; p.expect('(') ; p.accept('ws') | ||||
|         lh = pexpr(p) ; p.accept('ws') | ||||
|         comp = p.expect('comp') ; p.accept('ws') | ||||
|         rh = pexpr(p) ; p.accept('ws') | ||||
|         p.expect(')') | ||||
|         return mkassert('int', COMP[comp], lh, rh) | ||||
|  | ||||
|     def pabool(p): | ||||
|         p.expect('assert') ; p.accept('ws') ; p.expect('(') ; p.accept('ws') | ||||
|         lh = pexprs(p) ; p.accept('ws') | ||||
|         p.expect(')') | ||||
|         return mkassert('bool', 'eq', lh, 'true') | ||||
|  | ||||
|     def pa(p): | ||||
|         return p.expect('assert') | ||||
|  | ||||
|     state = p.push() | ||||
|     lastf = None | ||||
|     for pa in [pastr, pamem, paint, pabool, pa]: | ||||
|         try: | ||||
|             return pa(p) | ||||
|         except ParseFailure as f: | ||||
|             p.pop(state) | ||||
|             lastf = f | ||||
|     else: | ||||
|         raise lastf | ||||
|  | ||||
| def pexpr(p): | ||||
|     res = [] | ||||
|     while True: | ||||
|         if p.accept('('): | ||||
|             res.append(p.m) | ||||
|             while True: | ||||
|                 res.append(pexprs(p)) | ||||
|                 if p.accept('sep'): | ||||
|                     res.append(p.m) | ||||
|                 else: | ||||
|                     break | ||||
|             res.append(p.expect(')')) | ||||
|         elif p.lookahead('assert'): | ||||
|             res.append(passert(p)) | ||||
|         elif p.accept('assert', 'ws', 'string', 'op', None): | ||||
|             res.append(p.m) | ||||
|         else: | ||||
|             return ''.join(res) | ||||
|  | ||||
| def pexprs(p): | ||||
|     res = [] | ||||
|     while True: | ||||
|         res.append(pexpr(p)) | ||||
|         if p.accept('comp', 'logic', ','): | ||||
|             res.append(p.m) | ||||
|         else: | ||||
|             return ''.join(res) | ||||
|  | ||||
| def pstmt(p): | ||||
|     ws = p.accept('ws') or '' | ||||
|     lh = pexprs(p) | ||||
|     if p.accept('=>'): | ||||
|         rh = pexprs(p) | ||||
|         return ws + mkassert('int', 'eq', lh, rh) | ||||
|     else: | ||||
|         return ws + lh | ||||
|  | ||||
|  | ||||
| def main(args): | ||||
|     inf = open(args.input, 'r') if args.input else sys.stdin | ||||
|     outf = open(args.output, 'w') if args.output else sys.stdout | ||||
|  | ||||
|     lexemes = LEX.copy() | ||||
|     if args.pattern: | ||||
|         lexemes['assert'] = args.pattern | ||||
|     p = Parse(inf, lexemes) | ||||
|  | ||||
|     # write extra verbose asserts | ||||
|     mkdecls(outf, maxwidth=args.maxwidth) | ||||
|     if args.input: | ||||
|         outf.write("#line %d \"%s\"\n" % (1, args.input)) | ||||
|  | ||||
|     # parse and write out stmt at a time | ||||
|     try: | ||||
|         while True: | ||||
|             outf.write(pstmt(p)) | ||||
|             if p.accept('sep'): | ||||
|                 outf.write(p.m) | ||||
|             else: | ||||
|                 break | ||||
|     except ParseFailure as f: | ||||
|         pass | ||||
|  | ||||
|     for i in range(p.off, len(p.tokens)): | ||||
|         outf.write(p.tokens[i][1]) | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     import argparse | ||||
|     parser = argparse.ArgumentParser( | ||||
|         description="Cpp step that increases assert verbosity") | ||||
|     parser.add_argument('input', nargs='?', | ||||
|         help="Input C file after cpp.") | ||||
|     parser.add_argument('-o', '--output', required=True, | ||||
|         help="Output C file.") | ||||
|     parser.add_argument('-p', '--pattern', action='append', | ||||
|         help="Patterns to search for starting an assert statement.") | ||||
|     parser.add_argument('--maxwidth', default=MAXWIDTH, type=int, | ||||
|         help="Maximum number of characters to display for strcmp and memcmp.") | ||||
|     main(parser.parse_args()) | ||||
							
								
								
									
										26
									
								
								scripts/readblock.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										26
									
								
								scripts/readblock.py
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,26 @@ | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| import subprocess as sp | ||||
|  | ||||
| def main(args): | ||||
|     with open(args.disk, 'rb') as f: | ||||
|         f.seek(args.block * args.block_size) | ||||
|         block = (f.read(args.block_size) | ||||
|             .ljust(args.block_size, b'\xff')) | ||||
|  | ||||
|     # what did you expect? | ||||
|     print("%-8s  %-s" % ('off', 'data')) | ||||
|     return sp.run(['xxd', '-g1', '-'], input=block).returncode | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     import argparse | ||||
|     import sys | ||||
|     parser = argparse.ArgumentParser( | ||||
|         description="Hex dump a specific block in a disk.") | ||||
|     parser.add_argument('disk', | ||||
|         help="File representing the block device.") | ||||
|     parser.add_argument('block_size', type=lambda x: int(x, 0), | ||||
|         help="Size of a block in bytes.") | ||||
|     parser.add_argument('block', type=lambda x: int(x, 0), | ||||
|         help="Address of block to dump.") | ||||
|     sys.exit(main(parser.parse_args())) | ||||
							
								
								
									
										367
									
								
								scripts/readmdir.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										367
									
								
								scripts/readmdir.py
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,367 @@ | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| import struct | ||||
| import binascii | ||||
| import sys | ||||
| import itertools as it | ||||
|  | ||||
| TAG_TYPES = { | ||||
|     'splice':       (0x700, 0x400), | ||||
|     'create':       (0x7ff, 0x401), | ||||
|     'delete':       (0x7ff, 0x4ff), | ||||
|     'name':         (0x700, 0x000), | ||||
|     'reg':          (0x7ff, 0x001), | ||||
|     'dir':          (0x7ff, 0x002), | ||||
|     'superblock':   (0x7ff, 0x0ff), | ||||
|     'struct':       (0x700, 0x200), | ||||
|     'dirstruct':    (0x7ff, 0x200), | ||||
|     'ctzstruct':    (0x7ff, 0x202), | ||||
|     'inlinestruct': (0x7ff, 0x201), | ||||
|     'userattr':     (0x700, 0x300), | ||||
|     'tail':         (0x700, 0x600), | ||||
|     'softtail':     (0x7ff, 0x600), | ||||
|     'hardtail':     (0x7ff, 0x601), | ||||
|     'gstate':       (0x700, 0x700), | ||||
|     'movestate':    (0x7ff, 0x7ff), | ||||
|     'crc':          (0x700, 0x500), | ||||
| } | ||||
|  | ||||
| class Tag: | ||||
|     def __init__(self, *args): | ||||
|         if len(args) == 1: | ||||
|             self.tag = args[0] | ||||
|         elif len(args) == 3: | ||||
|             if isinstance(args[0], str): | ||||
|                 type = TAG_TYPES[args[0]][1] | ||||
|             else: | ||||
|                 type = args[0] | ||||
|  | ||||
|             if isinstance(args[1], str): | ||||
|                 id = int(args[1], 0) if args[1] not in 'x.' else 0x3ff | ||||
|             else: | ||||
|                 id = args[1] | ||||
|  | ||||
|             if isinstance(args[2], str): | ||||
|                 size = int(args[2], str) if args[2] not in 'x.' else 0x3ff | ||||
|             else: | ||||
|                 size = args[2] | ||||
|  | ||||
|             self.tag = (type << 20) | (id << 10) | size | ||||
|         else: | ||||
|             assert False | ||||
|  | ||||
|     @property | ||||
|     def isvalid(self): | ||||
|         return not bool(self.tag & 0x80000000) | ||||
|  | ||||
|     @property | ||||
|     def isattr(self): | ||||
|         return not bool(self.tag & 0x40000000) | ||||
|  | ||||
|     @property | ||||
|     def iscompactable(self): | ||||
|         return bool(self.tag & 0x20000000) | ||||
|  | ||||
|     @property | ||||
|     def isunique(self): | ||||
|         return not bool(self.tag & 0x10000000) | ||||
|  | ||||
|     @property | ||||
|     def type(self): | ||||
|         return (self.tag & 0x7ff00000) >> 20 | ||||
|  | ||||
|     @property | ||||
|     def type1(self): | ||||
|         return (self.tag & 0x70000000) >> 20 | ||||
|  | ||||
|     @property | ||||
|     def type3(self): | ||||
|         return (self.tag & 0x7ff00000) >> 20 | ||||
|  | ||||
|     @property | ||||
|     def id(self): | ||||
|         return (self.tag & 0x000ffc00) >> 10 | ||||
|  | ||||
|     @property | ||||
|     def size(self): | ||||
|         return (self.tag & 0x000003ff) >> 0 | ||||
|  | ||||
|     @property | ||||
|     def dsize(self): | ||||
|         return 4 + (self.size if self.size != 0x3ff else 0) | ||||
|  | ||||
|     @property | ||||
|     def chunk(self): | ||||
|         return self.type & 0xff | ||||
|  | ||||
|     @property | ||||
|     def schunk(self): | ||||
|         return struct.unpack('b', struct.pack('B', self.chunk))[0] | ||||
|  | ||||
|     def is_(self, type): | ||||
|         return (self.type & TAG_TYPES[type][0]) == TAG_TYPES[type][1] | ||||
|  | ||||
|     def mkmask(self): | ||||
|         return Tag( | ||||
|             0x700 if self.isunique else 0x7ff, | ||||
|             0x3ff if self.isattr else 0, | ||||
|             0) | ||||
|  | ||||
|     def chid(self, nid): | ||||
|         ntag = Tag(self.type, nid, self.size) | ||||
|         if hasattr(self, 'off'):  ntag.off  = self.off | ||||
|         if hasattr(self, 'data'): ntag.data = self.data | ||||
|         if hasattr(self, 'crc'):  ntag.crc  = self.crc | ||||
|         return ntag | ||||
|  | ||||
|     def typerepr(self): | ||||
|         if self.is_('crc') and getattr(self, 'crc', 0xffffffff) != 0xffffffff: | ||||
|             return 'crc (bad)' | ||||
|  | ||||
|         reverse_types = {v: k for k, v in TAG_TYPES.items()} | ||||
|         for prefix in range(12): | ||||
|             mask = 0x7ff & ~((1 << prefix)-1) | ||||
|             if (mask, self.type & mask) in reverse_types: | ||||
|                 type = reverse_types[mask, self.type & mask] | ||||
|                 if prefix > 0: | ||||
|                     return '%s %#0*x' % ( | ||||
|                         type, prefix//4, self.type & ((1 << prefix)-1)) | ||||
|                 else: | ||||
|                     return type | ||||
|         else: | ||||
|             return '%02x' % self.type | ||||
|  | ||||
|     def idrepr(self): | ||||
|         return repr(self.id) if self.id != 0x3ff else '.' | ||||
|  | ||||
|     def sizerepr(self): | ||||
|         return repr(self.size) if self.size != 0x3ff else 'x' | ||||
|  | ||||
|     def __repr__(self): | ||||
|         return 'Tag(%r, %d, %d)' % (self.typerepr(), self.id, self.size) | ||||
|  | ||||
|     def __lt__(self, other): | ||||
|         return (self.id, self.type) < (other.id, other.type) | ||||
|  | ||||
|     def __bool__(self): | ||||
|         return self.isvalid | ||||
|  | ||||
|     def __int__(self): | ||||
|         return self.tag | ||||
|  | ||||
|     def __index__(self): | ||||
|         return self.tag | ||||
|  | ||||
| class MetadataPair: | ||||
|     def __init__(self, blocks): | ||||
|         if len(blocks) > 1: | ||||
|             self.pair = [MetadataPair([block]) for block in blocks] | ||||
|             self.pair = sorted(self.pair, reverse=True) | ||||
|  | ||||
|             self.data = self.pair[0].data | ||||
|             self.rev  = self.pair[0].rev | ||||
|             self.tags = self.pair[0].tags | ||||
|             self.ids  = self.pair[0].ids | ||||
|             self.log  = self.pair[0].log | ||||
|             self.all_ = self.pair[0].all_ | ||||
|             return | ||||
|  | ||||
|         self.pair = [self] | ||||
|         self.data = blocks[0] | ||||
|         block = self.data | ||||
|  | ||||
|         self.rev, = struct.unpack('<I', block[0:4]) | ||||
|         crc = binascii.crc32(block[0:4]) | ||||
|  | ||||
|         # parse tags | ||||
|         corrupt = False | ||||
|         tag = Tag(0xffffffff) | ||||
|         off = 4 | ||||
|         self.log = [] | ||||
|         self.all_ = [] | ||||
|         while len(block) - off >= 4: | ||||
|             ntag, = struct.unpack('>I', block[off:off+4]) | ||||
|  | ||||
|             tag = Tag(int(tag) ^ ntag) | ||||
|             tag.off = off + 4 | ||||
|             tag.data = block[off+4:off+tag.dsize] | ||||
|             if tag.is_('crc'): | ||||
|                 crc = binascii.crc32(block[off:off+4+4], crc) | ||||
|             else: | ||||
|                 crc = binascii.crc32(block[off:off+tag.dsize], crc) | ||||
|             tag.crc = crc | ||||
|             off += tag.dsize | ||||
|  | ||||
|             self.all_.append(tag) | ||||
|  | ||||
|             if tag.is_('crc'): | ||||
|                 # is valid commit? | ||||
|                 if crc != 0xffffffff: | ||||
|                     corrupt = True | ||||
|                 if not corrupt: | ||||
|                     self.log = self.all_.copy() | ||||
|  | ||||
|                 # reset tag parsing | ||||
|                 crc = 0 | ||||
|                 tag = Tag(int(tag) ^ ((tag.type & 1) << 31)) | ||||
|  | ||||
|         # find active ids | ||||
|         self.ids = list(it.takewhile( | ||||
|             lambda id: Tag('name', id, 0) in self, | ||||
|             it.count())) | ||||
|  | ||||
|         # find most recent tags | ||||
|         self.tags = [] | ||||
|         for tag in self.log: | ||||
|             if tag.is_('crc') or tag.is_('splice'): | ||||
|                 continue | ||||
|             elif tag.id == 0x3ff: | ||||
|                 if tag in self and self[tag] is tag: | ||||
|                     self.tags.append(tag) | ||||
|             else: | ||||
|                 # id could have change, I know this is messy and slow | ||||
|                 # but it works | ||||
|                 for id in self.ids: | ||||
|                     ntag = tag.chid(id) | ||||
|                     if ntag in self and self[ntag] is tag: | ||||
|                         self.tags.append(ntag) | ||||
|  | ||||
|         self.tags = sorted(self.tags) | ||||
|  | ||||
|     def __bool__(self): | ||||
|         return bool(self.log) | ||||
|  | ||||
|     def __lt__(self, other): | ||||
|         # corrupt blocks don't count | ||||
|         if not self or not other: | ||||
|             return bool(other) | ||||
|  | ||||
|         # use sequence arithmetic to avoid overflow | ||||
|         return not ((other.rev - self.rev) & 0x80000000) | ||||
|  | ||||
|     def __contains__(self, args): | ||||
|         try: | ||||
|             self[args] | ||||
|             return True | ||||
|         except KeyError: | ||||
|             return False | ||||
|  | ||||
|     def __getitem__(self, args): | ||||
|         if isinstance(args, tuple): | ||||
|             gmask, gtag = args | ||||
|         else: | ||||
|             gmask, gtag = args.mkmask(), args | ||||
|  | ||||
|         gdiff = 0 | ||||
|         for tag in reversed(self.log): | ||||
|             if (gmask.id != 0 and tag.is_('splice') and | ||||
|                     tag.id <= gtag.id - gdiff): | ||||
|                 if tag.is_('create') and tag.id == gtag.id - gdiff: | ||||
|                     # creation point | ||||
|                     break | ||||
|  | ||||
|                 gdiff += tag.schunk | ||||
|  | ||||
|             if ((int(gmask) & int(tag)) == | ||||
|                     (int(gmask) & int(gtag.chid(gtag.id - gdiff)))): | ||||
|                 if tag.size == 0x3ff: | ||||
|                     # deleted | ||||
|                     break | ||||
|  | ||||
|                 return tag | ||||
|  | ||||
|         raise KeyError(gmask, gtag) | ||||
|  | ||||
|     def _dump_tags(self, tags, f=sys.stdout, truncate=True): | ||||
|         f.write("%-8s  %-8s  %-13s %4s %4s" % ( | ||||
|             'off', 'tag', 'type', 'id', 'len')) | ||||
|         if truncate: | ||||
|             f.write('  data (truncated)') | ||||
|         f.write('\n') | ||||
|  | ||||
|         for tag in tags: | ||||
|             f.write("%08x: %08x  %-13s %4s %4s" % ( | ||||
|                 tag.off, tag, | ||||
|                 tag.typerepr(), tag.idrepr(), tag.sizerepr())) | ||||
|             if truncate: | ||||
|                 f.write("  %-23s  %-8s\n" % ( | ||||
|                     ' '.join('%02x' % c for c in tag.data[:8]), | ||||
|                     ''.join(c if c >= ' ' and c <= '~' else '.' | ||||
|                         for c in map(chr, tag.data[:8])))) | ||||
|             else: | ||||
|                 f.write("\n") | ||||
|                 for i in range(0, len(tag.data), 16): | ||||
|                     f.write("  %08x: %-47s  %-16s\n" % ( | ||||
|                         tag.off+i, | ||||
|                         ' '.join('%02x' % c for c in tag.data[i:i+16]), | ||||
|                         ''.join(c if c >= ' ' and c <= '~' else '.' | ||||
|                             for c in map(chr, tag.data[i:i+16])))) | ||||
|  | ||||
|     def dump_tags(self, f=sys.stdout, truncate=True): | ||||
|         self._dump_tags(self.tags, f=f, truncate=truncate) | ||||
|  | ||||
|     def dump_log(self, f=sys.stdout, truncate=True): | ||||
|         self._dump_tags(self.log, f=f, truncate=truncate) | ||||
|  | ||||
|     def dump_all(self, f=sys.stdout, truncate=True): | ||||
|         self._dump_tags(self.all_, f=f, truncate=truncate) | ||||
|  | ||||
| def main(args): | ||||
|     blocks = [] | ||||
|     with open(args.disk, 'rb') as f: | ||||
|         for block in [args.block1, args.block2]: | ||||
|             if block is None: | ||||
|                 continue | ||||
|             f.seek(block * args.block_size) | ||||
|             blocks.append(f.read(args.block_size) | ||||
|                 .ljust(args.block_size, b'\xff')) | ||||
|  | ||||
|     # find most recent pair | ||||
|     mdir = MetadataPair(blocks) | ||||
|  | ||||
|     try: | ||||
|         mdir.tail = mdir[Tag('tail', 0, 0)] | ||||
|         if mdir.tail.size != 8 or mdir.tail.data == 8*b'\xff': | ||||
|             mdir.tail = None | ||||
|     except KeyError: | ||||
|         mdir.tail = None | ||||
|  | ||||
|     print("mdir {%s} rev %d%s%s%s" % ( | ||||
|         ', '.join('%#x' % b | ||||
|             for b in [args.block1, args.block2] | ||||
|             if b is not None), | ||||
|         mdir.rev, | ||||
|         ' (was %s)' % ', '.join('%d' % m.rev for m in mdir.pair[1:]) | ||||
|         if len(mdir.pair) > 1 else '', | ||||
|         ' (corrupted!)' if not mdir else '', | ||||
|         ' -> {%#x, %#x}' % struct.unpack('<II', mdir.tail.data) | ||||
|         if mdir.tail else '')) | ||||
|     if args.all: | ||||
|         mdir.dump_all(truncate=not args.no_truncate) | ||||
|     elif args.log: | ||||
|         mdir.dump_log(truncate=not args.no_truncate) | ||||
|     else: | ||||
|         mdir.dump_tags(truncate=not args.no_truncate) | ||||
|  | ||||
|     return 0 if mdir else 1 | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     import argparse | ||||
|     import sys | ||||
|     parser = argparse.ArgumentParser( | ||||
|         description="Dump useful info about metadata pairs in littlefs.") | ||||
|     parser.add_argument('disk', | ||||
|         help="File representing the block device.") | ||||
|     parser.add_argument('block_size', type=lambda x: int(x, 0), | ||||
|         help="Size of a block in bytes.") | ||||
|     parser.add_argument('block1', type=lambda x: int(x, 0), | ||||
|         help="First block address for finding the metadata pair.") | ||||
|     parser.add_argument('block2', nargs='?', type=lambda x: int(x, 0), | ||||
|         help="Second block address for finding the metadata pair.") | ||||
|     parser.add_argument('-l', '--log', action='store_true', | ||||
|         help="Show tags in log.") | ||||
|     parser.add_argument('-a', '--all', action='store_true', | ||||
|         help="Show all tags in log, included tags in corrupted commits.") | ||||
|     parser.add_argument('-T', '--no-truncate', action='store_true', | ||||
|         help="Don't truncate large amounts of data.") | ||||
|     sys.exit(main(parser.parse_args())) | ||||
							
								
								
									
										183
									
								
								scripts/readtree.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										183
									
								
								scripts/readtree.py
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,183 @@ | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| import struct | ||||
| import sys | ||||
| import json | ||||
| import io | ||||
| import itertools as it | ||||
| from readmdir import Tag, MetadataPair | ||||
|  | ||||
| def main(args): | ||||
|     superblock = None | ||||
|     gstate = b'\0\0\0\0\0\0\0\0\0\0\0\0' | ||||
|     dirs = [] | ||||
|     mdirs = [] | ||||
|     corrupted = [] | ||||
|     cycle = False | ||||
|     with open(args.disk, 'rb') as f: | ||||
|         tail = (args.block1, args.block2) | ||||
|         hard = False | ||||
|         while True: | ||||
|             for m in it.chain((m for d in dirs for m in d), mdirs): | ||||
|                 if set(m.blocks) == set(tail): | ||||
|                     # cycle detected | ||||
|                     cycle = m.blocks | ||||
|             if cycle: | ||||
|                 break | ||||
|  | ||||
|             # load mdir | ||||
|             data = [] | ||||
|             blocks = {} | ||||
|             for block in tail: | ||||
|                 f.seek(block * args.block_size) | ||||
|                 data.append(f.read(args.block_size) | ||||
|                     .ljust(args.block_size, b'\xff')) | ||||
|                 blocks[id(data[-1])] = block | ||||
|  | ||||
|             mdir = MetadataPair(data) | ||||
|             mdir.blocks = tuple(blocks[id(p.data)] for p in mdir.pair) | ||||
|  | ||||
|             # fetch some key metadata as a we scan | ||||
|             try: | ||||
|                 mdir.tail = mdir[Tag('tail', 0, 0)] | ||||
|                 if mdir.tail.size != 8 or mdir.tail.data == 8*b'\xff': | ||||
|                     mdir.tail = None | ||||
|             except KeyError: | ||||
|                 mdir.tail = None | ||||
|  | ||||
|             # have superblock? | ||||
|             try: | ||||
|                 nsuperblock = mdir[ | ||||
|                     Tag(0x7ff, 0x3ff, 0), Tag('superblock', 0, 0)] | ||||
|                 superblock = nsuperblock, mdir[Tag('inlinestruct', 0, 0)] | ||||
|             except KeyError: | ||||
|                 pass | ||||
|  | ||||
|             # have gstate? | ||||
|             try: | ||||
|                 ngstate = mdir[Tag('movestate', 0, 0)] | ||||
|                 gstate = bytes((a or 0) ^ (b or 0) | ||||
|                     for a,b in it.zip_longest(gstate, ngstate.data)) | ||||
|             except KeyError: | ||||
|                 pass | ||||
|  | ||||
|             # corrupted? | ||||
|             if not mdir: | ||||
|                 corrupted.append(mdir) | ||||
|  | ||||
|             # add to directories | ||||
|             mdirs.append(mdir) | ||||
|             if mdir.tail is None or not mdir.tail.is_('hardtail'): | ||||
|                 dirs.append(mdirs) | ||||
|                 mdirs = [] | ||||
|  | ||||
|             if mdir.tail is None: | ||||
|                 break | ||||
|  | ||||
|             tail = struct.unpack('<II', mdir.tail.data) | ||||
|             hard = mdir.tail.is_('hardtail') | ||||
|  | ||||
|     # find paths | ||||
|     dirtable = {} | ||||
|     for dir in dirs: | ||||
|         dirtable[frozenset(dir[0].blocks)] = dir | ||||
|  | ||||
|     pending = [("/", dirs[0])] | ||||
|     while pending: | ||||
|         path, dir = pending.pop(0) | ||||
|         for mdir in dir: | ||||
|             for tag in mdir.tags: | ||||
|                 if tag.is_('dir'): | ||||
|                     try: | ||||
|                         npath = tag.data.decode('utf8') | ||||
|                         dirstruct = mdir[Tag('dirstruct', tag.id, 0)] | ||||
|                         nblocks = struct.unpack('<II', dirstruct.data) | ||||
|                         nmdir = dirtable[frozenset(nblocks)] | ||||
|                         pending.append(((path + '/' + npath), nmdir)) | ||||
|                     except KeyError: | ||||
|                         pass | ||||
|  | ||||
|         dir[0].path = path.replace('//', '/') | ||||
|  | ||||
|     # print littlefs + version info | ||||
|     version = ('?', '?') | ||||
|     if superblock: | ||||
|         version = tuple(reversed( | ||||
|             struct.unpack('<HH', superblock[1].data[0:4].ljust(4, b'\xff')))) | ||||
|     print("%-47s%s" % ("littlefs v%s.%s" % version, | ||||
|         "data (truncated, if it fits)" | ||||
|         if not any([args.no_truncate, args.tags, args.log, args.all]) else "")) | ||||
|  | ||||
|     # print gstate | ||||
|     print("gstate 0x%s" % ''.join('%02x' % c for c in gstate)) | ||||
|     tag = Tag(struct.unpack('<I', gstate[0:4].ljust(4, b'\xff'))[0]) | ||||
|     blocks = struct.unpack('<II', gstate[4:4+8].ljust(8, b'\xff')) | ||||
|     if tag.size or not tag.isvalid: | ||||
|         print("  orphans >=%d" % max(tag.size, 1)) | ||||
|     if tag.type: | ||||
|         print("  move dir {%#x, %#x} id %d" % ( | ||||
|             blocks[0], blocks[1], tag.id)) | ||||
|  | ||||
|     # print mdir info | ||||
|     for i, dir in enumerate(dirs): | ||||
|         print("dir %s" % (json.dumps(dir[0].path) | ||||
|             if hasattr(dir[0], 'path') else '(orphan)')) | ||||
|  | ||||
|         for j, mdir in enumerate(dir): | ||||
|             print("mdir {%#x, %#x} rev %d (was %d)%s%s" % ( | ||||
|                 mdir.blocks[0], mdir.blocks[1], mdir.rev, mdir.pair[1].rev, | ||||
|                 ' (corrupted!)' if not mdir else '', | ||||
|                 ' -> {%#x, %#x}' % struct.unpack('<II', mdir.tail.data) | ||||
|                 if mdir.tail else '')) | ||||
|  | ||||
|             f = io.StringIO() | ||||
|             if args.log: | ||||
|                 mdir.dump_log(f, truncate=not args.no_truncate) | ||||
|             elif args.all: | ||||
|                 mdir.dump_all(f, truncate=not args.no_truncate) | ||||
|             else: | ||||
|                 mdir.dump_tags(f, truncate=not args.no_truncate) | ||||
|  | ||||
|             lines = list(filter(None, f.getvalue().split('\n'))) | ||||
|             for k, line in enumerate(lines): | ||||
|                 print("%s %s" % ( | ||||
|                     ' ' if j == len(dir)-1 else | ||||
|                     'v' if k == len(lines)-1 else | ||||
|                     '|', | ||||
|                     line)) | ||||
|  | ||||
|     errcode = 0 | ||||
|     for mdir in corrupted: | ||||
|         errcode = errcode or 1 | ||||
|         print("*** corrupted mdir {%#x, %#x}! ***" % ( | ||||
|             mdir.blocks[0], mdir.blocks[1])) | ||||
|  | ||||
|     if cycle: | ||||
|         errcode = errcode or 2 | ||||
|         print("*** cycle detected {%#x, %#x}! ***" % ( | ||||
|             cycle[0], cycle[1])) | ||||
|  | ||||
|     return errcode | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     import argparse | ||||
|     import sys | ||||
|     parser = argparse.ArgumentParser( | ||||
|         description="Dump semantic info about the metadata tree in littlefs") | ||||
|     parser.add_argument('disk', | ||||
|         help="File representing the block device.") | ||||
|     parser.add_argument('block_size', type=lambda x: int(x, 0), | ||||
|         help="Size of a block in bytes.") | ||||
|     parser.add_argument('block1', nargs='?', default=0, | ||||
|         type=lambda x: int(x, 0), | ||||
|         help="Optional first block address for finding the superblock.") | ||||
|     parser.add_argument('block2', nargs='?', default=1, | ||||
|         type=lambda x: int(x, 0), | ||||
|         help="Optional second block address for finding the superblock.") | ||||
|     parser.add_argument('-l', '--log', action='store_true', | ||||
|         help="Show tags in log.") | ||||
|     parser.add_argument('-a', '--all', action='store_true', | ||||
|         help="Show all tags in log, included tags in corrupted commits.") | ||||
|     parser.add_argument('-T', '--no-truncate', action='store_true', | ||||
|         help="Show the full contents of files/attrs/tags.") | ||||
|     sys.exit(main(parser.parse_args())) | ||||
| @@ -1,28 +0,0 @@ | ||||
| #!/usr/bin/env python2 | ||||
|  | ||||
| import struct | ||||
| import sys | ||||
| import time | ||||
| import os | ||||
| import re | ||||
|  | ||||
| def main(): | ||||
|     with open('blocks/.config') as file: | ||||
|         read_size, prog_size, block_size, block_count = ( | ||||
|             struct.unpack('<LLLL', file.read())) | ||||
|  | ||||
|     real_size = sum( | ||||
|         os.path.getsize(os.path.join('blocks', f)) | ||||
|         for f in os.listdir('blocks') if re.match('\d+', f)) | ||||
|  | ||||
|     with open('blocks/.stats') as file: | ||||
|         read_count, prog_count, erase_count = ( | ||||
|             struct.unpack('<QQQ', file.read())) | ||||
|  | ||||
|     runtime = time.time() - os.stat('blocks').st_ctime | ||||
|  | ||||
|     print 'results: %dB %dB %dB %.3fs' % ( | ||||
|         read_count, prog_count, erase_count, runtime) | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     main(*sys.argv[1:]) | ||||
| @@ -1,96 +0,0 @@ | ||||
| /// AUTOGENERATED TEST /// | ||||
| #include "lfs2.h" | ||||
| #include "emubd/lfs2_emubd.h" | ||||
| #include <stdio.h> | ||||
| #include <string.h> | ||||
| #include <stdlib.h> | ||||
|  | ||||
|  | ||||
| // test stuff | ||||
| static void test_assert(const char *file, unsigned line, | ||||
|         const char *s, uintmax_t v, uintmax_t e) {{ | ||||
|     if (v != e) {{ | ||||
|         fprintf(stderr, "\033[97m%s:%u: \033[91m" | ||||
|                 "assert failed with %jd, expected %jd\033[0m\n" | ||||
|                 "    %s\n\n", file, line, v, e, s); | ||||
|         exit(-2); | ||||
|     }} | ||||
| }} | ||||
|  | ||||
| #define test_assert(v, e) \ | ||||
|         test_assert(__FILE__, __LINE__, #v " => " #e, v, e) | ||||
|  | ||||
| // implicit variable for asserts | ||||
| uintmax_t test; | ||||
|  | ||||
| // utility functions for traversals | ||||
| static int __attribute__((used)) test_count(void *p, lfs2_block_t b) {{ | ||||
|     (void)b; | ||||
|     unsigned *u = (unsigned*)p; | ||||
|     *u += 1; | ||||
|     return 0; | ||||
| }} | ||||
|  | ||||
| // lfs2 declarations | ||||
| lfs2_t lfs2; | ||||
| lfs2_emubd_t bd; | ||||
| // other declarations for convenience | ||||
| lfs2_file_t file; | ||||
| lfs2_dir_t dir; | ||||
| struct lfs2_info info; | ||||
| uint8_t buffer[1024]; | ||||
| char path[1024]; | ||||
|  | ||||
| // test configuration options | ||||
| #ifndef LFS2_READ_SIZE | ||||
| #define LFS2_READ_SIZE 16 | ||||
| #endif | ||||
|  | ||||
| #ifndef LFS2_PROG_SIZE | ||||
| #define LFS2_PROG_SIZE LFS2_READ_SIZE | ||||
| #endif | ||||
|  | ||||
| #ifndef LFS2_BLOCK_SIZE | ||||
| #define LFS2_BLOCK_SIZE 512 | ||||
| #endif | ||||
|  | ||||
| #ifndef LFS2_BLOCK_COUNT | ||||
| #define LFS2_BLOCK_COUNT 1024 | ||||
| #endif | ||||
|  | ||||
| #ifndef LFS2_BLOCK_CYCLES | ||||
| #define LFS2_BLOCK_CYCLES 1024 | ||||
| #endif | ||||
|  | ||||
| #ifndef LFS2_CACHE_SIZE | ||||
| #define LFS2_CACHE_SIZE (64 % LFS2_PROG_SIZE == 0 ? 64 : LFS2_PROG_SIZE) | ||||
| #endif | ||||
|  | ||||
| #ifndef LFS2_LOOKAHEAD_SIZE | ||||
| #define LFS2_LOOKAHEAD_SIZE 16 | ||||
| #endif | ||||
|  | ||||
| const struct lfs2_config cfg = {{ | ||||
|     .context = &bd, | ||||
|     .read  = &lfs2_emubd_read, | ||||
|     .prog  = &lfs2_emubd_prog, | ||||
|     .erase = &lfs2_emubd_erase, | ||||
|     .sync  = &lfs2_emubd_sync, | ||||
|  | ||||
|     .read_size      = LFS2_READ_SIZE, | ||||
|     .prog_size      = LFS2_PROG_SIZE, | ||||
|     .block_size     = LFS2_BLOCK_SIZE, | ||||
|     .block_count    = LFS2_BLOCK_COUNT, | ||||
|     .block_cycles   = LFS2_BLOCK_CYCLES, | ||||
|     .cache_size     = LFS2_CACHE_SIZE, | ||||
|     .lookahead_size = LFS2_LOOKAHEAD_SIZE, | ||||
| }}; | ||||
|  | ||||
|  | ||||
| // Entry point | ||||
| int main(void) {{ | ||||
|     lfs2_emubd_create(&cfg, "blocks"); | ||||
|  | ||||
| {tests} | ||||
|     lfs2_emubd_destroy(&cfg); | ||||
| }} | ||||
							
								
								
									
										821
									
								
								scripts/test.py
									
									
									
									
									
								
							
							
						
						
									
										821
									
								
								scripts/test.py
									
									
									
									
									
								
							| @@ -1,81 +1,778 @@ | ||||
| #!/usr/bin/env python2 | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| # This script manages littlefs tests, which are configured with | ||||
| # .toml files stored in the tests directory. | ||||
| # | ||||
|  | ||||
| import toml | ||||
| import glob | ||||
| import re | ||||
| import sys | ||||
| import subprocess | ||||
| import os | ||||
| import io | ||||
| import itertools as it | ||||
| import collections.abc as abc | ||||
| import subprocess as sp | ||||
| import base64 | ||||
| import sys | ||||
| import copy | ||||
| import shlex | ||||
| import pty | ||||
| import errno | ||||
| import signal | ||||
|  | ||||
| TESTDIR = 'tests' | ||||
| RULES = """ | ||||
| define FLATTEN | ||||
| tests/%$(subst /,.,$(target)): $(target) | ||||
|     ./scripts/explode_asserts.py $$< -o $$@ | ||||
| endef | ||||
| $(foreach target,$(SRC),$(eval $(FLATTEN))) | ||||
|  | ||||
| def generate(test): | ||||
|     with open("scripts/template.fmt") as file: | ||||
|         template = file.read() | ||||
| -include tests/*.d | ||||
|  | ||||
|     haslines = 'TEST_LINE' in os.environ and 'TEST_FILE' in os.environ | ||||
| .SECONDARY: | ||||
| %.test: %.test.o $(foreach f,$(subst /,.,$(SRC:.c=.o)),%.$f) | ||||
|     $(CC) $(CFLAGS) $^ $(LFLAGS) -o $@ | ||||
| """ | ||||
| GLOBALS = """ | ||||
| //////////////// AUTOGENERATED TEST //////////////// | ||||
| #include "lfs2.h" | ||||
| #include "bd/lfs2_testbd.h" | ||||
| #include <stdio.h> | ||||
| extern const char *lfs2_testbd_path; | ||||
| extern uint32_t lfs2_testbd_cycles; | ||||
| """ | ||||
| DEFINES = { | ||||
|     'LFS2_READ_SIZE': 16, | ||||
|     'LFS2_PROG_SIZE': 'LFS2_READ_SIZE', | ||||
|     'LFS2_BLOCK_SIZE': 512, | ||||
|     'LFS2_BLOCK_COUNT': 1024, | ||||
|     'LFS2_BLOCK_CYCLES': -1, | ||||
|     'LFS2_CACHE_SIZE': '(64 % LFS2_PROG_SIZE == 0 ? 64 : LFS2_PROG_SIZE)', | ||||
|     'LFS2_LOOKAHEAD_SIZE': 16, | ||||
|     'LFS2_ERASE_VALUE': 0xff, | ||||
|     'LFS2_ERASE_CYCLES': 0, | ||||
|     'LFS2_BADBLOCK_BEHAVIOR': 'LFS2_TESTBD_BADBLOCK_PROGERROR', | ||||
| } | ||||
| PROLOGUE = """ | ||||
|     // prologue | ||||
|     __attribute__((unused)) lfs2_t lfs2; | ||||
|     __attribute__((unused)) lfs2_testbd_t bd; | ||||
|     __attribute__((unused)) lfs2_file_t file; | ||||
|     __attribute__((unused)) lfs2_dir_t dir; | ||||
|     __attribute__((unused)) struct lfs2_info info; | ||||
|     __attribute__((unused)) char path[1024]; | ||||
|     __attribute__((unused)) uint8_t buffer[1024]; | ||||
|     __attribute__((unused)) lfs2_size_t size; | ||||
|     __attribute__((unused)) int err; | ||||
|      | ||||
|     __attribute__((unused)) const struct lfs2_config cfg = { | ||||
|         .context        = &bd, | ||||
|         .read           = lfs2_testbd_read, | ||||
|         .prog           = lfs2_testbd_prog, | ||||
|         .erase          = lfs2_testbd_erase, | ||||
|         .sync           = lfs2_testbd_sync, | ||||
|         .read_size      = LFS2_READ_SIZE, | ||||
|         .prog_size      = LFS2_PROG_SIZE, | ||||
|         .block_size     = LFS2_BLOCK_SIZE, | ||||
|         .block_count    = LFS2_BLOCK_COUNT, | ||||
|         .block_cycles   = LFS2_BLOCK_CYCLES, | ||||
|         .cache_size     = LFS2_CACHE_SIZE, | ||||
|         .lookahead_size = LFS2_LOOKAHEAD_SIZE, | ||||
|     }; | ||||
|  | ||||
|     lines = [] | ||||
|     for offset, line in enumerate( | ||||
|             re.split('(?<=(?:.;| [{}]))\n', test.read())): | ||||
|         match = re.match('((?: *\n)*)( *)(.*)=>(.*);', | ||||
|                 line, re.DOTALL | re.MULTILINE) | ||||
|         if match: | ||||
|             preface, tab, test, expect = match.groups() | ||||
|             lines.extend(['']*preface.count('\n')) | ||||
|             lines.append(tab+'test_assert({test}, {expect});'.format( | ||||
|                 test=test.strip(), expect=expect.strip())) | ||||
|     __attribute__((unused)) const struct lfs2_testbd_config bdcfg = { | ||||
|         .erase_value        = LFS2_ERASE_VALUE, | ||||
|         .erase_cycles       = LFS2_ERASE_CYCLES, | ||||
|         .badblock_behavior  = LFS2_BADBLOCK_BEHAVIOR, | ||||
|         .power_cycles       = lfs2_testbd_cycles, | ||||
|     }; | ||||
|  | ||||
|     lfs2_testbd_createcfg(&cfg, lfs2_testbd_path, &bdcfg) => 0; | ||||
| """ | ||||
| EPILOGUE = """ | ||||
|     // epilogue | ||||
|     lfs2_testbd_destroy(&cfg) => 0; | ||||
| """ | ||||
| PASS = '\033[32m✓\033[0m' | ||||
| FAIL = '\033[31m✗\033[0m' | ||||
|  | ||||
| class TestFailure(Exception): | ||||
|     def __init__(self, case, returncode=None, stdout=None, assert_=None): | ||||
|         self.case = case | ||||
|         self.returncode = returncode | ||||
|         self.stdout = stdout | ||||
|         self.assert_ = assert_ | ||||
|  | ||||
| class TestCase: | ||||
|     def __init__(self, config, filter=filter, | ||||
|             suite=None, caseno=None, lineno=None, **_): | ||||
|         self.config = config | ||||
|         self.filter = filter | ||||
|         self.suite = suite | ||||
|         self.caseno = caseno | ||||
|         self.lineno = lineno | ||||
|  | ||||
|         self.code = config['code'] | ||||
|         self.code_lineno = config['code_lineno'] | ||||
|         self.defines = config.get('define', {}) | ||||
|         self.if_ = config.get('if', None) | ||||
|         self.in_ = config.get('in', None) | ||||
|  | ||||
|     def __str__(self): | ||||
|         if hasattr(self, 'permno'): | ||||
|             if any(k not in self.case.defines for k in self.defines): | ||||
|                 return '%s#%d#%d (%s)' % ( | ||||
|                     self.suite.name, self.caseno, self.permno, ', '.join( | ||||
|                         '%s=%s' % (k, v) for k, v in self.defines.items() | ||||
|                         if k not in self.case.defines)) | ||||
|             else: | ||||
|                 return '%s#%d#%d' % ( | ||||
|                     self.suite.name, self.caseno, self.permno) | ||||
|         else: | ||||
|             lines.append(line) | ||||
|             return '%s#%d' % ( | ||||
|                 self.suite.name, self.caseno) | ||||
|  | ||||
|     # Create test file | ||||
|     with open('test.c', 'w') as file: | ||||
|         if 'TEST_LINE' in os.environ and 'TEST_FILE' in os.environ: | ||||
|             lines.insert(0, '#line %d "%s"' % ( | ||||
|                     int(os.environ['TEST_LINE']) + 1, | ||||
|                     os.environ['TEST_FILE'])) | ||||
|             lines.append('#line %d "test.c"' % ( | ||||
|                     template[:template.find('{tests}')].count('\n') | ||||
|                     + len(lines) + 2)) | ||||
|     def permute(self, class_=None, defines={}, permno=None, **_): | ||||
|         ncase = (class_ or type(self))(self.config) | ||||
|         for k, v in self.__dict__.items(): | ||||
|             setattr(ncase, k, v) | ||||
|         ncase.case = self | ||||
|         ncase.perms = [ncase] | ||||
|         ncase.permno = permno | ||||
|         ncase.defines = defines | ||||
|         return ncase | ||||
|  | ||||
|         file.write(template.format(tests='\n'.join(lines))) | ||||
|     def build(self, f, **_): | ||||
|         # prologue | ||||
|         for k, v in sorted(self.defines.items()): | ||||
|             if k not in self.suite.defines: | ||||
|                 f.write('#define %s %s\n' % (k, v)) | ||||
|  | ||||
|     # Remove build artifacts to force rebuild | ||||
|         f.write('void test_case%d(%s) {' % (self.caseno, ','.join( | ||||
|             '\n'+8*' '+'__attribute__((unused)) intmax_t %s' % k | ||||
|             for k in sorted(self.perms[0].defines) | ||||
|             if k not in self.defines))) | ||||
|  | ||||
|         f.write(PROLOGUE) | ||||
|         f.write('\n') | ||||
|         f.write(4*' '+'// test case %d\n' % self.caseno) | ||||
|         f.write(4*' '+'#line %d "%s"\n' % (self.code_lineno, self.suite.path)) | ||||
|  | ||||
|         # test case goes here | ||||
|         f.write(self.code) | ||||
|  | ||||
|         # epilogue | ||||
|         f.write(EPILOGUE) | ||||
|         f.write('}\n') | ||||
|  | ||||
|         for k, v in sorted(self.defines.items()): | ||||
|             if k not in self.suite.defines: | ||||
|                 f.write('#undef %s\n' % k) | ||||
|  | ||||
|     def shouldtest(self, **args): | ||||
|         if (self.filter is not None and | ||||
|                 len(self.filter) >= 1 and | ||||
|                 self.filter[0] != self.caseno): | ||||
|             return False | ||||
|         elif (self.filter is not None and | ||||
|                 len(self.filter) >= 2 and | ||||
|                 self.filter[1] != self.permno): | ||||
|             return False | ||||
|         elif args.get('no_internal', False) and self.in_ is not None: | ||||
|             return False | ||||
|         elif self.if_ is not None: | ||||
|             if_ = self.if_ | ||||
|             while True: | ||||
|                 for k, v in sorted(self.defines.items(), | ||||
|                         key=lambda x: len(x[0]), reverse=True): | ||||
|                     if k in if_: | ||||
|                         if_ = if_.replace(k, '(%s)' % v) | ||||
|                         break | ||||
|                 else: | ||||
|                     break | ||||
|             if_ = ( | ||||
|                 re.sub('(\&\&|\?)', ' and ', | ||||
|                 re.sub('(\|\||:)', ' or ', | ||||
|                 re.sub('!(?!=)', ' not ', if_)))) | ||||
|             return eval(if_) | ||||
|         else: | ||||
|             return True | ||||
|  | ||||
|     def test(self, exec=[], persist=False, cycles=None, | ||||
|             gdb=False, failure=None, disk=None, **args): | ||||
|         # build command | ||||
|         cmd = exec + ['./%s.test' % self.suite.path, | ||||
|             repr(self.caseno), repr(self.permno)] | ||||
|  | ||||
|         # persist disk or keep in RAM for speed? | ||||
|         if persist: | ||||
|             if not disk: | ||||
|                 disk = self.suite.path + '.disk' | ||||
|             if persist != 'noerase': | ||||
|                 try: | ||||
|                     with open(disk, 'w') as f: | ||||
|                         f.truncate(0) | ||||
|                     if args.get('verbose', False): | ||||
|                         print('truncate --size=0', disk) | ||||
|                 except FileNotFoundError: | ||||
|                     pass | ||||
|  | ||||
|             cmd.append(disk) | ||||
|  | ||||
|         # simulate power-loss after n cycles? | ||||
|         if cycles: | ||||
|             cmd.append(str(cycles)) | ||||
|  | ||||
|         # failed? drop into debugger? | ||||
|         if gdb and failure: | ||||
|             ncmd = ['gdb'] | ||||
|             if gdb == 'assert': | ||||
|                 ncmd.extend(['-ex', 'r']) | ||||
|                 if failure.assert_: | ||||
|                     ncmd.extend(['-ex', 'up 2']) | ||||
|             elif gdb == 'main': | ||||
|                 ncmd.extend([ | ||||
|                     '-ex', 'b %s:%d' % (self.suite.path, self.code_lineno), | ||||
|                     '-ex', 'r']) | ||||
|             ncmd.extend(['--args'] + cmd) | ||||
|  | ||||
|             if args.get('verbose', False): | ||||
|                 print(' '.join(shlex.quote(c) for c in ncmd)) | ||||
|             signal.signal(signal.SIGINT, signal.SIG_IGN) | ||||
|             sys.exit(sp.call(ncmd)) | ||||
|  | ||||
|         # run test case! | ||||
|         mpty, spty = pty.openpty() | ||||
|         if args.get('verbose', False): | ||||
|             print(' '.join(shlex.quote(c) for c in cmd)) | ||||
|         proc = sp.Popen(cmd, stdout=spty, stderr=spty) | ||||
|         os.close(spty) | ||||
|         mpty = os.fdopen(mpty, 'r', 1) | ||||
|         stdout = [] | ||||
|         assert_ = None | ||||
|         try: | ||||
|             while True: | ||||
|                 try: | ||||
|                     line = mpty.readline() | ||||
|                 except OSError as e: | ||||
|                     if e.errno == errno.EIO: | ||||
|                         break | ||||
|                     raise | ||||
|                 stdout.append(line) | ||||
|                 if args.get('verbose', False): | ||||
|                     sys.stdout.write(line) | ||||
|                 # intercept asserts | ||||
|                 m = re.match( | ||||
|                     '^{0}([^:]+):(\d+):(?:\d+:)?{0}{1}:{0}(.*)$' | ||||
|                     .format('(?:\033\[[\d;]*.| )*', 'assert'), | ||||
|                     line) | ||||
|                 if m and assert_ is None: | ||||
|                     try: | ||||
|                         with open(m.group(1)) as f: | ||||
|                             lineno = int(m.group(2)) | ||||
|                             line = (next(it.islice(f, lineno-1, None)) | ||||
|                                 .strip('\n')) | ||||
|                         assert_ = { | ||||
|                             'path': m.group(1), | ||||
|                             'line': line, | ||||
|                             'lineno': lineno, | ||||
|                             'message': m.group(3)} | ||||
|                     except: | ||||
|                         pass | ||||
|         except KeyboardInterrupt: | ||||
|             raise TestFailure(self, 1, stdout, None) | ||||
|         proc.wait() | ||||
|  | ||||
|         # did we pass? | ||||
|         if proc.returncode != 0: | ||||
|             raise TestFailure(self, proc.returncode, stdout, assert_) | ||||
|         else: | ||||
|             return PASS | ||||
|  | ||||
| class ValgrindTestCase(TestCase): | ||||
|     def __init__(self, config, **args): | ||||
|         self.leaky = config.get('leaky', False) | ||||
|         super().__init__(config, **args) | ||||
|  | ||||
|     def shouldtest(self, **args): | ||||
|         return not self.leaky and super().shouldtest(**args) | ||||
|  | ||||
|     def test(self, exec=[], **args): | ||||
|         verbose = args.get('verbose', False) | ||||
|         uninit = (self.defines.get('LFS2_ERASE_VALUE', None) == -1) | ||||
|         exec = [ | ||||
|             'valgrind', | ||||
|             '--leak-check=full', | ||||
|             ] + (['--undef-value-errors=no'] if uninit else []) + [ | ||||
|             ] + (['--track-origins=yes'] if not uninit else []) + [ | ||||
|             '--error-exitcode=4', | ||||
|             '--error-limit=no', | ||||
|             ] + (['--num-callers=1'] if not verbose else []) + [ | ||||
|             '-q'] + exec | ||||
|         return super().test(exec=exec, **args) | ||||
|  | ||||
| class ReentrantTestCase(TestCase): | ||||
|     def __init__(self, config, **args): | ||||
|         self.reentrant = config.get('reentrant', False) | ||||
|         super().__init__(config, **args) | ||||
|  | ||||
|     def shouldtest(self, **args): | ||||
|         return self.reentrant and super().shouldtest(**args) | ||||
|  | ||||
|     def test(self, persist=False, gdb=False, failure=None, **args): | ||||
|         for cycles in it.count(1): | ||||
|             # clear disk first? | ||||
|             if cycles == 1 and persist != 'noerase': | ||||
|                 persist = 'erase' | ||||
|             else: | ||||
|                 persist = 'noerase' | ||||
|  | ||||
|             # exact cycle we should drop into debugger? | ||||
|             if gdb and failure and failure.cycleno == cycles: | ||||
|                 return super().test(gdb=gdb, persist=persist, cycles=cycles, | ||||
|                     failure=failure, **args) | ||||
|  | ||||
|             # run tests, but kill the program after prog/erase has | ||||
|             # been hit n cycles. We exit with a special return code if the | ||||
|             # program has not finished, since this isn't a test failure. | ||||
|             try: | ||||
|                 return super().test(persist=persist, cycles=cycles, **args) | ||||
|             except TestFailure as nfailure: | ||||
|                 if nfailure.returncode == 33: | ||||
|                     continue | ||||
|                 else: | ||||
|                     nfailure.cycleno = cycles | ||||
|                     raise | ||||
|  | ||||
| class TestSuite: | ||||
|     def __init__(self, path, classes=[TestCase], defines={}, | ||||
|             filter=None, **args): | ||||
|         self.name = os.path.basename(path) | ||||
|         if self.name.endswith('.toml'): | ||||
|             self.name = self.name[:-len('.toml')] | ||||
|         self.path = path | ||||
|         self.classes = classes | ||||
|         self.defines = defines.copy() | ||||
|         self.filter = filter | ||||
|  | ||||
|         with open(path) as f: | ||||
|             # load tests | ||||
|             config = toml.load(f) | ||||
|  | ||||
|             # find line numbers | ||||
|             f.seek(0) | ||||
|             linenos = [] | ||||
|             code_linenos = [] | ||||
|             for i, line in enumerate(f): | ||||
|                 if re.match(r'\[\[\s*case\s*\]\]', line): | ||||
|                     linenos.append(i+1) | ||||
|                 if re.match(r'code\s*=\s*(\'\'\'|""")', line): | ||||
|                     code_linenos.append(i+2) | ||||
|  | ||||
|             code_linenos.reverse() | ||||
|  | ||||
|         # grab global config | ||||
|         for k, v in config.get('define', {}).items(): | ||||
|             if k not in self.defines: | ||||
|                 self.defines[k] = v | ||||
|         self.code = config.get('code', None) | ||||
|         if self.code is not None: | ||||
|             self.code_lineno = code_linenos.pop() | ||||
|  | ||||
|         # create initial test cases | ||||
|         self.cases = [] | ||||
|         for i, (case, lineno) in enumerate(zip(config['case'], linenos)): | ||||
|             # code lineno? | ||||
|             if 'code' in case: | ||||
|                 case['code_lineno'] = code_linenos.pop() | ||||
|             # merge conditions if necessary | ||||
|             if 'if' in config and 'if' in case: | ||||
|                 case['if'] = '(%s) && (%s)' % (config['if'], case['if']) | ||||
|             elif 'if' in config: | ||||
|                 case['if'] = config['if'] | ||||
|             # initialize test case | ||||
|             self.cases.append(TestCase(case, filter=filter, | ||||
|                 suite=self, caseno=i+1, lineno=lineno, **args)) | ||||
|  | ||||
|     def __str__(self): | ||||
|         return self.name | ||||
|  | ||||
|     def __lt__(self, other): | ||||
|         return self.name < other.name | ||||
|  | ||||
|     def permute(self, **args): | ||||
|         for case in self.cases: | ||||
|             # lets find all parameterized definitions, in one of [args.D, | ||||
|             # suite.defines, case.defines, DEFINES]. Note that each of these | ||||
|             # can be either a dict of defines, or a list of dicts, expressing | ||||
|             # an initial set of permutations. | ||||
|             pending = [{}] | ||||
|             for inits in [self.defines, case.defines, DEFINES]: | ||||
|                 if not isinstance(inits, list): | ||||
|                     inits = [inits] | ||||
|  | ||||
|                 npending = [] | ||||
|                 for init, pinit in it.product(inits, pending): | ||||
|                     ninit = pinit.copy() | ||||
|                     for k, v in init.items(): | ||||
|                         if k not in ninit: | ||||
|                             try: | ||||
|                                 ninit[k] = eval(v) | ||||
|                             except: | ||||
|                                 ninit[k] = v | ||||
|                     npending.append(ninit) | ||||
|  | ||||
|                 pending = npending | ||||
|  | ||||
|             # expand permutations | ||||
|             pending = list(reversed(pending)) | ||||
|             expanded = [] | ||||
|             while pending: | ||||
|                 perm = pending.pop() | ||||
|                 for k, v in sorted(perm.items()): | ||||
|                     if not isinstance(v, str) and isinstance(v, abc.Iterable): | ||||
|                         for nv in reversed(v): | ||||
|                             nperm = perm.copy() | ||||
|                             nperm[k] = nv | ||||
|                             pending.append(nperm) | ||||
|                         break | ||||
|                 else: | ||||
|                     expanded.append(perm) | ||||
|  | ||||
|             # generate permutations | ||||
|             case.perms = [] | ||||
|             for i, (class_, defines) in enumerate( | ||||
|                     it.product(self.classes, expanded)): | ||||
|                 case.perms.append(case.permute( | ||||
|                     class_, defines, permno=i+1, **args)) | ||||
|  | ||||
|             # also track non-unique defines | ||||
|             case.defines = {} | ||||
|             for k, v in case.perms[0].defines.items(): | ||||
|                 if all(perm.defines[k] == v for perm in case.perms): | ||||
|                     case.defines[k] = v | ||||
|  | ||||
|         # track all perms and non-unique defines | ||||
|         self.perms = [] | ||||
|         for case in self.cases: | ||||
|             self.perms.extend(case.perms) | ||||
|  | ||||
|         self.defines = {} | ||||
|         for k, v in self.perms[0].defines.items(): | ||||
|             if all(perm.defines.get(k, None) == v for perm in self.perms): | ||||
|                 self.defines[k] = v | ||||
|  | ||||
|         return self.perms | ||||
|  | ||||
|     def build(self, **args): | ||||
|         # build test files | ||||
|         tf = open(self.path + '.test.c.t', 'w') | ||||
|         tf.write(GLOBALS) | ||||
|         if self.code is not None: | ||||
|             tf.write('#line %d "%s"\n' % (self.code_lineno, self.path)) | ||||
|             tf.write(self.code) | ||||
|  | ||||
|         tfs = {None: tf} | ||||
|         for case in self.cases: | ||||
|             if case.in_ not in tfs: | ||||
|                 tfs[case.in_] = open(self.path+'.'+ | ||||
|                     case.in_.replace('/', '.')+'.t', 'w') | ||||
|                 tfs[case.in_].write('#line 1 "%s"\n' % case.in_) | ||||
|                 with open(case.in_) as f: | ||||
|                     for line in f: | ||||
|                         tfs[case.in_].write(line) | ||||
|                 tfs[case.in_].write('\n') | ||||
|                 tfs[case.in_].write(GLOBALS) | ||||
|  | ||||
|             tfs[case.in_].write('\n') | ||||
|             case.build(tfs[case.in_], **args) | ||||
|  | ||||
|         tf.write('\n') | ||||
|         tf.write('const char *lfs2_testbd_path;\n') | ||||
|         tf.write('uint32_t lfs2_testbd_cycles;\n') | ||||
|         tf.write('int main(int argc, char **argv) {\n') | ||||
|         tf.write(4*' '+'int case_         = (argc > 1) ? atoi(argv[1]) : 0;\n') | ||||
|         tf.write(4*' '+'int perm          = (argc > 2) ? atoi(argv[2]) : 0;\n') | ||||
|         tf.write(4*' '+'lfs2_testbd_path   = (argc > 3) ? argv[3] : NULL;\n') | ||||
|         tf.write(4*' '+'lfs2_testbd_cycles = (argc > 4) ? atoi(argv[4]) : 0;\n') | ||||
|         for perm in self.perms: | ||||
|             # test declaration | ||||
|             tf.write(4*' '+'extern void test_case%d(%s);\n' % ( | ||||
|                 perm.caseno, ', '.join( | ||||
|                     'intmax_t %s' % k for k in sorted(perm.defines) | ||||
|                     if k not in perm.case.defines))) | ||||
|             # test call | ||||
|             tf.write(4*' '+ | ||||
|                 'if (argc < 3 || (case_ == %d && perm == %d)) {' | ||||
|                 ' test_case%d(%s); ' | ||||
|                 '}\n' % (perm.caseno, perm.permno, perm.caseno, ', '.join( | ||||
|                     str(v) for k, v in sorted(perm.defines.items()) | ||||
|                     if k not in perm.case.defines))) | ||||
|         tf.write('}\n') | ||||
|  | ||||
|         for tf in tfs.values(): | ||||
|             tf.close() | ||||
|  | ||||
|         # write makefiles | ||||
|         with open(self.path + '.mk', 'w') as mk: | ||||
|             mk.write(RULES.replace(4*' ', '\t')) | ||||
|             mk.write('\n') | ||||
|  | ||||
|             # add truely global defines globally | ||||
|             for k, v in sorted(self.defines.items()): | ||||
|                 mk.write('%s: override CFLAGS += -D%s=%r\n' % ( | ||||
|                     self.path+'.test', k, v)) | ||||
|  | ||||
|             for path in tfs: | ||||
|                 if path is None: | ||||
|                     mk.write('%s: %s | %s\n' % ( | ||||
|                         self.path+'.test.c', | ||||
|                         self.path, | ||||
|                         self.path+'.test.c.t')) | ||||
|                 else: | ||||
|                     mk.write('%s: %s %s | %s\n' % ( | ||||
|                         self.path+'.'+path.replace('/', '.'), | ||||
|                         self.path, path, | ||||
|                         self.path+'.'+path.replace('/', '.')+'.t')) | ||||
|                 mk.write('\t./scripts/explode_asserts.py $| -o $@\n') | ||||
|  | ||||
|         self.makefile = self.path + '.mk' | ||||
|         self.target = self.path + '.test' | ||||
|         return self.makefile, self.target | ||||
|  | ||||
|     def test(self, **args): | ||||
|         # run test suite! | ||||
|         if not args.get('verbose', True): | ||||
|             sys.stdout.write(self.name + ' ') | ||||
|             sys.stdout.flush() | ||||
|         for perm in self.perms: | ||||
|             if not perm.shouldtest(**args): | ||||
|                 continue | ||||
|  | ||||
|             try: | ||||
|                 result = perm.test(**args) | ||||
|             except TestFailure as failure: | ||||
|                 perm.result = failure | ||||
|                 if not args.get('verbose', True): | ||||
|                     sys.stdout.write(FAIL) | ||||
|                     sys.stdout.flush() | ||||
|                 if not args.get('keep_going', False): | ||||
|                     if not args.get('verbose', True): | ||||
|                         sys.stdout.write('\n') | ||||
|                     raise | ||||
|             else: | ||||
|                 perm.result = PASS | ||||
|                 if not args.get('verbose', True): | ||||
|                     sys.stdout.write(PASS) | ||||
|                     sys.stdout.flush() | ||||
|  | ||||
|         if not args.get('verbose', True): | ||||
|             sys.stdout.write('\n') | ||||
|  | ||||
| def main(**args): | ||||
|     # figure out explicit defines | ||||
|     defines = {} | ||||
|     for define in args['D']: | ||||
|         k, v, *_ = define.split('=', 2) + [''] | ||||
|         defines[k] = v | ||||
|  | ||||
|     # and what class of TestCase to run | ||||
|     classes = [] | ||||
|     if args.get('normal', False): | ||||
|         classes.append(TestCase) | ||||
|     if args.get('reentrant', False): | ||||
|         classes.append(ReentrantTestCase) | ||||
|     if args.get('valgrind', False): | ||||
|         classes.append(ValgrindTestCase) | ||||
|     if not classes: | ||||
|         classes = [TestCase] | ||||
|  | ||||
|     suites = [] | ||||
|     for testpath in args['testpaths']: | ||||
|         # optionally specified test case/perm | ||||
|         testpath, *filter = testpath.split('#') | ||||
|         filter = [int(f) for f in filter] | ||||
|  | ||||
|         # figure out the suite's toml file | ||||
|         if os.path.isdir(testpath): | ||||
|             testpath = testpath + '/test_*.toml' | ||||
|         elif os.path.isfile(testpath): | ||||
|             testpath = testpath | ||||
|         elif testpath.endswith('.toml'): | ||||
|             testpath = TESTDIR + '/' + testpath | ||||
|         else: | ||||
|             testpath = TESTDIR + '/' + testpath + '.toml' | ||||
|  | ||||
|         # find tests | ||||
|         for path in glob.glob(testpath): | ||||
|             suites.append(TestSuite(path, classes, defines, filter, **args)) | ||||
|  | ||||
|     # sort for reproducability | ||||
|     suites = sorted(suites) | ||||
|  | ||||
|     # generate permutations | ||||
|     for suite in suites: | ||||
|         suite.permute(**args) | ||||
|  | ||||
|     # build tests in parallel | ||||
|     print('====== building ======') | ||||
|     makefiles = [] | ||||
|     targets = [] | ||||
|     for suite in suites: | ||||
|         makefile, target = suite.build(**args) | ||||
|         makefiles.append(makefile) | ||||
|         targets.append(target) | ||||
|  | ||||
|     cmd = (['make', '-f', 'Makefile'] + | ||||
|         list(it.chain.from_iterable(['-f', m] for m in makefiles)) + | ||||
|         [target for target in targets]) | ||||
|     mpty, spty = pty.openpty() | ||||
|     if args.get('verbose', False): | ||||
|         print(' '.join(shlex.quote(c) for c in cmd)) | ||||
|     proc = sp.Popen(cmd, stdout=spty, stderr=spty) | ||||
|     os.close(spty) | ||||
|     mpty = os.fdopen(mpty, 'r', 1) | ||||
|     stdout = [] | ||||
|     while True: | ||||
|         try: | ||||
|             line = mpty.readline() | ||||
|         except OSError as e: | ||||
|             if e.errno == errno.EIO: | ||||
|                 break | ||||
|             raise | ||||
|         stdout.append(line) | ||||
|         if args.get('verbose', False): | ||||
|             sys.stdout.write(line) | ||||
|         # intercept warnings | ||||
|         m = re.match( | ||||
|             '^{0}([^:]+):(\d+):(?:\d+:)?{0}{1}:{0}(.*)$' | ||||
|             .format('(?:\033\[[\d;]*.| )*', 'warning'), | ||||
|             line) | ||||
|         if m and not args.get('verbose', False): | ||||
|             try: | ||||
|                 with open(m.group(1)) as f: | ||||
|                     lineno = int(m.group(2)) | ||||
|                     line = next(it.islice(f, lineno-1, None)).strip('\n') | ||||
|                 sys.stdout.write( | ||||
|                     "\033[01m{path}:{lineno}:\033[01;35mwarning:\033[m " | ||||
|                     "{message}\n{line}\n\n".format( | ||||
|                         path=m.group(1), line=line, lineno=lineno, | ||||
|                         message=m.group(3))) | ||||
|             except: | ||||
|                 pass | ||||
|     proc.wait() | ||||
|  | ||||
|     if proc.returncode != 0: | ||||
|         if not args.get('verbose', False): | ||||
|             for line in stdout: | ||||
|                 sys.stdout.write(line) | ||||
|         sys.exit(-3) | ||||
|  | ||||
|     print('built %d test suites, %d test cases, %d permutations' % ( | ||||
|         len(suites), | ||||
|         sum(len(suite.cases) for suite in suites), | ||||
|         sum(len(suite.perms) for suite in suites))) | ||||
|  | ||||
|     filtered = 0 | ||||
|     for suite in suites: | ||||
|         for perm in suite.perms: | ||||
|             filtered += perm.shouldtest(**args) | ||||
|     if filtered != sum(len(suite.perms) for suite in suites): | ||||
|         print('filtered down to %d permutations' % filtered) | ||||
|  | ||||
|     # only requested to build? | ||||
|     if args.get('build', False): | ||||
|         return 0 | ||||
|  | ||||
|     print('====== testing ======') | ||||
|     try: | ||||
|         os.remove('test.o') | ||||
|         os.remove('lfs2') | ||||
|     except OSError: | ||||
|         for suite in suites: | ||||
|             suite.test(**args) | ||||
|     except TestFailure: | ||||
|         pass | ||||
|  | ||||
| def compile(): | ||||
|     subprocess.check_call([ | ||||
|             os.environ.get('MAKE', 'make'), | ||||
|             '--no-print-directory', '-s']) | ||||
|     print('====== results ======') | ||||
|     passed = 0 | ||||
|     failed = 0 | ||||
|     for suite in suites: | ||||
|         for perm in suite.perms: | ||||
|             if not hasattr(perm, 'result'): | ||||
|                 continue | ||||
|  | ||||
| def execute(): | ||||
|     if 'EXEC' in os.environ: | ||||
|         subprocess.check_call([os.environ['EXEC'], "./lfs2"]) | ||||
|     else: | ||||
|         subprocess.check_call(["./lfs2"]) | ||||
|             if perm.result == PASS: | ||||
|                 passed += 1 | ||||
|             else: | ||||
|                 sys.stdout.write( | ||||
|                     "\033[01m{path}:{lineno}:\033[01;31mfailure:\033[m " | ||||
|                     "{perm} failed with {returncode}\n".format( | ||||
|                         perm=perm, path=perm.suite.path, lineno=perm.lineno, | ||||
|                         returncode=perm.result.returncode or 0)) | ||||
|                 if perm.result.stdout: | ||||
|                     if perm.result.assert_: | ||||
|                         stdout = perm.result.stdout[:-1] | ||||
|                     else: | ||||
|                         stdout = perm.result.stdout | ||||
|                     for line in stdout[-5:]: | ||||
|                         sys.stdout.write(line) | ||||
|                 if perm.result.assert_: | ||||
|                     sys.stdout.write( | ||||
|                         "\033[01m{path}:{lineno}:\033[01;31massert:\033[m " | ||||
|                         "{message}\n{line}\n".format( | ||||
|                             **perm.result.assert_)) | ||||
|                 sys.stdout.write('\n') | ||||
|                 failed += 1 | ||||
|  | ||||
| def main(test=None): | ||||
|     try: | ||||
|         if test and not test.startswith('-'): | ||||
|             with open(test) as file: | ||||
|                 generate(file) | ||||
|         else: | ||||
|             generate(sys.stdin) | ||||
|     if args.get('gdb', False): | ||||
|         failure = None | ||||
|         for suite in suites: | ||||
|             for perm in suite.perms: | ||||
|                 if getattr(perm, 'result', PASS) != PASS: | ||||
|                     failure = perm.result | ||||
|         if failure is not None: | ||||
|             print('======= gdb ======') | ||||
|             # drop into gdb | ||||
|             failure.case.test(failure=failure, **args) | ||||
|             sys.exit(0) | ||||
|  | ||||
|         compile() | ||||
|  | ||||
|         if test == '-s': | ||||
|             sys.exit(1) | ||||
|  | ||||
|         execute() | ||||
|  | ||||
|     except subprocess.CalledProcessError: | ||||
|         # Python stack trace is counterproductive, just exit | ||||
|         sys.exit(2) | ||||
|     except KeyboardInterrupt: | ||||
|         # Python stack trace is counterproductive, just exit | ||||
|         sys.exit(3) | ||||
|     print('tests passed: %d' % passed) | ||||
|     print('tests failed: %d' % failed) | ||||
|     return 1 if failed > 0 else 0 | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     main(*sys.argv[1:]) | ||||
|     import argparse | ||||
|     parser = argparse.ArgumentParser( | ||||
|         description="Run parameterized tests in various configurations.") | ||||
|     parser.add_argument('testpaths', nargs='*', default=[TESTDIR], | ||||
|         help="Description of test(s) to run. By default, this is all tests \ | ||||
|             found in the \"{0}\" directory. Here, you can specify a different \ | ||||
|             directory of tests, a specific file, a suite by name, and even a \ | ||||
|             specific test case by adding brackets. For example \ | ||||
|             \"test_dirs[0]\" or \"{0}/test_dirs.toml[0]\".".format(TESTDIR)) | ||||
|     parser.add_argument('-D', action='append', default=[], | ||||
|         help="Overriding parameter definitions.") | ||||
|     parser.add_argument('-v', '--verbose', action='store_true', | ||||
|         help="Output everything that is happening.") | ||||
|     parser.add_argument('-k', '--keep-going', action='store_true', | ||||
|         help="Run all tests instead of stopping on first error. Useful for CI.") | ||||
|     parser.add_argument('-p', '--persist', choices=['erase', 'noerase'], | ||||
|         nargs='?', const='erase', | ||||
|         help="Store disk image in a file.") | ||||
|     parser.add_argument('-b', '--build', action='store_true', | ||||
|         help="Only build the tests, do not execute.") | ||||
|     parser.add_argument('-g', '--gdb', choices=['init', 'main', 'assert'], | ||||
|         nargs='?', const='assert', | ||||
|         help="Drop into gdb on test failure.") | ||||
|     parser.add_argument('--no-internal', action='store_true', | ||||
|         help="Don't run tests that require internal knowledge.") | ||||
|     parser.add_argument('-n', '--normal', action='store_true', | ||||
|         help="Run tests normally.") | ||||
|     parser.add_argument('-r', '--reentrant', action='store_true', | ||||
|         help="Run reentrant tests with simulated power-loss.") | ||||
|     parser.add_argument('-V', '--valgrind', action='store_true', | ||||
|         help="Run non-leaky tests under valgrind to check for memory leaks.") | ||||
|     parser.add_argument('-e', '--exec', default=[], type=lambda e: e.split(' '), | ||||
|         help="Run tests with another executable prefixed on the command line.") | ||||
|     parser.add_argument('-d', '--disk', | ||||
|         help="Specify a file to use for persistent/reentrant tests.") | ||||
|     sys.exit(main(**vars(parser.parse_args()))) | ||||
|   | ||||
		Reference in New Issue
	
	Block a user