mirror of
https://github.com/NationalSecurityAgency/ghidra.git
synced 2024-11-23 12:42:30 +00:00
pcodetest: tpp: refactor test parsing to a separate file, rename file
This commit is contained in:
parent
b1c5b62a47
commit
1fd9e2ad73
@ -97,7 +97,7 @@ class PCodeTestBuild(BuildUtil):
|
||||
return
|
||||
|
||||
# save path to tpp
|
||||
tpp_py = os.getcwd() + '/tpp.py'
|
||||
tpp_py = os.getcwd() + '/test_preprocessor.py'
|
||||
|
||||
# Get a list of strings to filter input files
|
||||
available_files = sorted(glob.glob(self.config.format('%(pcodetest_src)s/*')))
|
||||
|
210
Ghidra/Extensions/SleighDevTools/pcodetest/test_parser.py
Normal file
210
Ghidra/Extensions/SleighDevTools/pcodetest/test_parser.py
Normal file
@ -0,0 +1,210 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# IP: GHIDRA
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import re
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
class _TestfileReader:
|
||||
def __init__(self, path: Path):
|
||||
'''
|
||||
This class reads a file while keeping count of the file number.
|
||||
'''
|
||||
self._path = path
|
||||
self._file = None
|
||||
self._curr_line = None
|
||||
|
||||
def open(self):
|
||||
if self._file is None:
|
||||
self._file = self._path.open('r')
|
||||
self._curr_line = 0
|
||||
|
||||
def close(self):
|
||||
if self._file is not None:
|
||||
self._file.close()
|
||||
self._file = None
|
||||
self._curr_line = None
|
||||
|
||||
def __enter__(self):
|
||||
self.open()
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
self.close()
|
||||
|
||||
def peekline(self):
|
||||
pos = self._file.tell()
|
||||
line = self._file.readline()
|
||||
self._file.seek(pos)
|
||||
return line
|
||||
|
||||
def readline(self) -> str:
|
||||
line = self._file.readline()
|
||||
self._curr_line += 1
|
||||
return line
|
||||
|
||||
def current_line(self) -> int:
|
||||
return self._curr_line
|
||||
|
||||
def file_path(self) -> str:
|
||||
return str(self._path)
|
||||
|
||||
|
||||
class _TokenMatcher:
|
||||
# Token enumeration
|
||||
TOKEN_UNKNOWN = 0
|
||||
TOKEN_INCLUDE = 1
|
||||
TOKEN_IF_START = 2
|
||||
TOKEN_IF_END = 3
|
||||
TOKEN_TEST_FCN = 4
|
||||
TOKEN_MAIN_FCN = 5
|
||||
TOKEN_BODY_START = 6
|
||||
TOKEN_BODY_END = 7
|
||||
TOKEN_EMPTY = 8
|
||||
TOKEN_TEXT = 9
|
||||
TOKEN_ASSERT = 10
|
||||
|
||||
# Regex matchers
|
||||
_MATCHERS = [
|
||||
(TOKEN_INCLUDE, re.compile(r'(?:#include)\s+.*')),
|
||||
(TOKEN_IF_START, re.compile(r'(?:#if|#ifdef)\s+.*')),
|
||||
(TOKEN_IF_END, re.compile(r'#endif.*')),
|
||||
(TOKEN_TEST_FCN, re.compile(r'TEST\s+(\w*).*')),
|
||||
(TOKEN_MAIN_FCN, re.compile(r'MAIN\s+(\w*).*')),
|
||||
(TOKEN_BODY_START, re.compile(r'{\s*(.*)')),
|
||||
(TOKEN_BODY_END, re.compile(r'}.*')),
|
||||
(TOKEN_EMPTY, re.compile(r'^\s*$')),
|
||||
(TOKEN_ASSERT, re.compile(r'^\s+ASSERT')),
|
||||
(TOKEN_TEXT, re.compile(r'.*')),
|
||||
]
|
||||
|
||||
def match(self, string: str):
|
||||
'''
|
||||
Returns a tuple with a matched token and possible token parameters.
|
||||
'''
|
||||
for token, matcher in self._MATCHERS:
|
||||
res = matcher.match(string)
|
||||
if res:
|
||||
return token, list(res.groups())
|
||||
return self.TOKEN_UNKNOWN, []
|
||||
|
||||
def peek_token(self, file):
|
||||
while line := file.peekline():
|
||||
token, params = self.match(line)
|
||||
return (token, params, line)
|
||||
return (None, None, None)
|
||||
|
||||
def get_token(self, file):
|
||||
while line := file.readline():
|
||||
token, params = self.match(line)
|
||||
if token == self.TOKEN_EMPTY:
|
||||
continue
|
||||
return (token, params, line)
|
||||
return (None, None, None)
|
||||
|
||||
|
||||
class TestParser:
|
||||
def __init__(self, path: Path):
|
||||
'''
|
||||
Parse `.test` files.
|
||||
'''
|
||||
self._tree = []
|
||||
self._parse(path)
|
||||
|
||||
def _raise_syntax_error(self, file, msg, line):
|
||||
raise SyntaxError(
|
||||
msg, (file.file_path(), file.current_line(), 0, line))
|
||||
|
||||
def _parse(self, path: Path):
|
||||
with _TestfileReader(path) as file:
|
||||
tokenizer = _TokenMatcher()
|
||||
self._parse_root(file, tokenizer)
|
||||
|
||||
def _parse_root(self, file, tokenizer):
|
||||
while True:
|
||||
element = self._parse_element(file, tokenizer)
|
||||
if element is None:
|
||||
return
|
||||
self._tree.append(element)
|
||||
|
||||
def _parse_element(self, file, tokenizer):
|
||||
token, params, line = tokenizer.get_token(file)
|
||||
if token is None:
|
||||
return None
|
||||
elif token == tokenizer.TOKEN_INCLUDE or \
|
||||
token == tokenizer.TOKEN_TEXT:
|
||||
return {'type': 'text', 'body': line}
|
||||
elif token == tokenizer.TOKEN_IF_START:
|
||||
children = self._parse_if_children(file, tokenizer)
|
||||
return {'type': 'if', 'body': line, 'children': children}
|
||||
elif token == tokenizer.TOKEN_TEST_FCN:
|
||||
body, assert_num = self._parse_body(file, tokenizer)
|
||||
return {'type': 'test', 'name': params[0], 'body': body, 'assert_num': assert_num}
|
||||
elif token == tokenizer.TOKEN_MAIN_FCN:
|
||||
return {'type': 'main', 'name': params[0]}
|
||||
else:
|
||||
self._raise_syntax_error(
|
||||
file, "Unexpected token at root", line)
|
||||
|
||||
def _parse_if_children(self, file, tokenizer):
|
||||
children = []
|
||||
while True:
|
||||
token, params, line = tokenizer.peek_token(file)
|
||||
if token is None:
|
||||
self._raise_syntax_error(
|
||||
file, "Missing if closing statement!", line)
|
||||
elif token != tokenizer.TOKEN_IF_END:
|
||||
children.append(self._parse_element(file, tokenizer))
|
||||
else:
|
||||
tokenizer.get_token(file)
|
||||
break
|
||||
return children
|
||||
|
||||
def _parse_body(self, file, tokenizer):
|
||||
token, _, line = tokenizer.get_token(file)
|
||||
if token != tokenizer.TOKEN_BODY_START:
|
||||
self._raise_syntax_error(
|
||||
file, "Unexpected token at test function body start", line)
|
||||
assert_num = 0
|
||||
body = ""
|
||||
while True:
|
||||
token, _, line = tokenizer.get_token(file)
|
||||
if token == tokenizer.TOKEN_BODY_END:
|
||||
break
|
||||
elif token == tokenizer.TOKEN_ASSERT:
|
||||
assert_num += 1
|
||||
body += line
|
||||
elif token == tokenizer.TOKEN_TEXT:
|
||||
body += line
|
||||
else:
|
||||
self._raise_syntax_error(
|
||||
file, "Unexpected token at test function body", line)
|
||||
return body, assert_num
|
||||
|
||||
def get_main_function(self):
|
||||
for n in self._tree:
|
||||
if n['type'] == 'main':
|
||||
return n['name']
|
||||
|
||||
def get_tree(self):
|
||||
return self._tree
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Test the parser by parsing all the files in the c_src directory...
|
||||
for file in Path('c_src/').glob('*.test'):
|
||||
test = TestParser(file)
|
||||
print(test._tree)
|
191
Ghidra/Extensions/SleighDevTools/pcodetest/test_preprocessor.py
Normal file
191
Ghidra/Extensions/SleighDevTools/pcodetest/test_preprocessor.py
Normal file
@ -0,0 +1,191 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# IP: GHIDRA
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from test_parser import TestParser
|
||||
|
||||
|
||||
def print_err(*args, **kwargs):
|
||||
'''
|
||||
Print to stderr.
|
||||
'''
|
||||
print(*args, file=sys.stderr, **kwargs)
|
||||
|
||||
|
||||
def create_test_entry(filepath: Path) -> bool:
|
||||
'''
|
||||
Create a file in `filepath` that serves as the entry for the tests.
|
||||
The entry function will contain a call to all of the main functions found
|
||||
in test files in the directory of the output file.
|
||||
'''
|
||||
if filepath.exists():
|
||||
print_err(f'ERROR: entry filename {filepath} exists\n')
|
||||
return False
|
||||
# Iterate testfiles in the same folder as filepath and collect entry names
|
||||
test_entries = []
|
||||
for testpath in filepath.parent.glob('*.test'):
|
||||
testdata = TestParser(testpath)
|
||||
test_entries.append(testdata.get_main_function())
|
||||
# Generate the file
|
||||
with filepath.open('w') as file:
|
||||
file.write('#include "pcode_test.h"\n')
|
||||
file.write('\n')
|
||||
for t in test_entries:
|
||||
file.write(f'extern void {t}(TestInfo* info);\n')
|
||||
file.write('\n')
|
||||
file.write('void main(void) {\n')
|
||||
file.write(' TestInfo info;\n')
|
||||
file.write('\n')
|
||||
for t in test_entries:
|
||||
file.write(f' {t}(&info);\n')
|
||||
file.write('\n')
|
||||
file.write('#ifdef BUILD_EXE\n')
|
||||
file.write(' exit(0);\n')
|
||||
file.write('#endif\n')
|
||||
file.write('}\n')
|
||||
return True
|
||||
|
||||
|
||||
def _write_test_body_element(outfile, element):
|
||||
if element['type'] == 'text':
|
||||
outfile.write(element['body'])
|
||||
return True
|
||||
elif element['type'] == 'test':
|
||||
outfile.write(f'\n')
|
||||
outfile.write(
|
||||
f'#define {element["name"]}_NUMB {element["assert_num"]}\n')
|
||||
outfile.write(
|
||||
f'static const char {element["name"]}_NAME [] = "{element["name"]}";\n')
|
||||
outfile.write(f'static void {element["name"]}()\n')
|
||||
outfile.write(f'{{\n')
|
||||
outfile.write(
|
||||
f' noteTestMain(__FILE__, __LINE__, {element["name"]}_NAME);\n')
|
||||
outfile.write(f' {{\n')
|
||||
outfile.write(f'{element["body"]}')
|
||||
outfile.write(f' }}\n')
|
||||
outfile.write(
|
||||
f' breakOnSubDone(__FILE__, __LINE__, {element["name"]}_NAME);\n')
|
||||
outfile.write(f'}}\n')
|
||||
return True
|
||||
elif element['type'] == 'main':
|
||||
outfile.write(f'\n')
|
||||
outfile.write(f'extern void {element["name"]}(TestInfo*);\n')
|
||||
outfile.write(f'#define {element["name"]}_NUMB 0\n')
|
||||
outfile.write(
|
||||
f'static const char {element["name"]}_NAME [] = "{element["name"]}";\n')
|
||||
return True
|
||||
elif element['type'] == 'if':
|
||||
outfile.write(f'\n')
|
||||
outfile.write(element['body'])
|
||||
for c in element['children']:
|
||||
if not _write_test_body_element(outfile, c):
|
||||
return False
|
||||
outfile.write(f'#endif\n')
|
||||
return True
|
||||
else:
|
||||
print_err(f'ERROR: Unrecognized tree entry {element} in test!')
|
||||
return False
|
||||
|
||||
|
||||
def _write_test_info_table_element(outfile, element):
|
||||
if element['type'] == 'test':
|
||||
outfile.write(
|
||||
f' {{ {element["name"]}_NAME, (testFuncPtr) &{element["name"]}, {element["name"]}_NUMB }},\n')
|
||||
return True
|
||||
elif element['type'] == 'if':
|
||||
outfile.write(element['body'])
|
||||
for el in element['children']:
|
||||
_write_test_info_table_element(outfile, el)
|
||||
outfile.write(f'#endif\n')
|
||||
return True
|
||||
return True
|
||||
|
||||
|
||||
def create_test_file(filepath: Path) -> bool:
|
||||
'''
|
||||
Parse the test file in `filepath` and generate the corresponding
|
||||
C source test file.
|
||||
'''
|
||||
if filepath.suffix != '.test':
|
||||
print_err(f'ERROR: filename {filepath} must end with .test\n')
|
||||
return False
|
||||
testdata = TestParser(filepath)
|
||||
with filepath.with_suffix('.c').open('w') as outfile:
|
||||
# Write testfile body
|
||||
for element in testdata.get_tree():
|
||||
if not _write_test_body_element(outfile, element):
|
||||
return False
|
||||
|
||||
# Now write the function info
|
||||
main = testdata.get_main_function()
|
||||
outfile.write(f'\nstatic FunctionInfo fi[] = {{\n')
|
||||
outfile.write(
|
||||
f' {{ {main}_NAME, (testFuncPtr) &{main}, {main}_NUMB }},\n')
|
||||
for element in testdata.get_tree():
|
||||
if not _write_test_info_table_element(outfile, element):
|
||||
return False
|
||||
outfile.write(f' {{ 0, 0, 0 }}\n')
|
||||
outfile.write(f'}};\n')
|
||||
|
||||
# Now write the boilerplate...
|
||||
outfile.write(f'\n')
|
||||
outfile.write(f'static GroupInfo Info = {{\n')
|
||||
outfile.write(
|
||||
f' {{\'a\', \'B\', \'c\', \'D\', \'e\', \'f\', \'G\', \'h\'}},\n')
|
||||
outfile.write(f' fi\n')
|
||||
outfile.write(f'}};\n\n')
|
||||
outfile.write(
|
||||
f'/* Function exists to make sure that the GroupInfo structure does not\n')
|
||||
outfile.write(f' * get optimized away.\n')
|
||||
outfile.write(f' **/\n\n')
|
||||
outfile.write(f'GroupInfo *{main}_Force() {{\n')
|
||||
outfile.write(f' return &Info;\n')
|
||||
outfile.write(f'}}\n\n')
|
||||
outfile.write(f'void {main}(TestInfo* not_used) {{\n')
|
||||
outfile.write(f' i4 i = 0;\n')
|
||||
outfile.write(f' int numTest = 0;\n')
|
||||
outfile.write(f' TestInfo_reset();\n')
|
||||
outfile.write(f' for (i = 1; Info.funcTable[i].name; i++)\n')
|
||||
outfile.write(f' Info.funcTable[i].func();\n')
|
||||
outfile.write(f' breakOnDone(__FILE__, __LINE__, {main}_NAME);\n')
|
||||
outfile.write(f'}}\n')
|
||||
return True
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description='Precompile test file')
|
||||
parser.add_argument('--entry', type=Path, default=None,
|
||||
help='Create a main entry file contianing calls all test functions')
|
||||
parser.add_argument('test_file', type=Path, default=None, nargs='*',
|
||||
help='Test file to preprocess, must end with .test')
|
||||
args = parser.parse_args()
|
||||
|
||||
if (args.test_file is None or len(args.test_file) == 0) and args.entry is None:
|
||||
parser.print_help()
|
||||
sys.exit(1)
|
||||
|
||||
print(args.test_file)
|
||||
|
||||
if args.test_file is not None:
|
||||
for test_file in args.test_file:
|
||||
if not create_test_file(test_file):
|
||||
sys.exit(1)
|
||||
|
||||
if args.entry is not None:
|
||||
if not create_test_entry(args.entry):
|
||||
sys.exit(1)
|
@ -1,236 +0,0 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
## ###
|
||||
# IP: GHIDRA
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
##
|
||||
|
||||
import argparse
|
||||
import re
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def print_err(*args, **kwargs):
|
||||
'''
|
||||
Print to stderr.
|
||||
'''
|
||||
print(*args, file=sys.stderr, **kwargs)
|
||||
|
||||
|
||||
def create_test_entry(filepath: Path) -> bool:
|
||||
'''
|
||||
Create a file in `filepath` that serves as the entry for the tests.
|
||||
The entry function will contain a call to all of the main functions found
|
||||
in test files in the directory of the output file.
|
||||
'''
|
||||
if filepath.exists():
|
||||
print_err(f'WARNING: entry filename {filepath} exists\n')
|
||||
return False
|
||||
# Iterate testfiles in the same folder as filepath and collect entry names
|
||||
test_entries = []
|
||||
for testpath in filepath.parent.glob('*.test'):
|
||||
with testpath.open('r') as testfile:
|
||||
for line in testfile:
|
||||
m = re.match(r'MAIN\s+(\w*).*', line)
|
||||
if m:
|
||||
test_entries.append(m.group(1))
|
||||
# Generate the file
|
||||
with filepath.open('w') as file:
|
||||
file.write('#include "pcode_test.h"\n')
|
||||
file.write('\n')
|
||||
for t in test_entries:
|
||||
file.write(f'extern void {t}(TestInfo* info);\n')
|
||||
file.write('\n')
|
||||
file.write('void main(void) {\n')
|
||||
file.write(' TestInfo info;\n')
|
||||
file.write('\n')
|
||||
for t in test_entries:
|
||||
file.write(f' {t}(&info);\n')
|
||||
file.write('\n')
|
||||
file.write('#ifdef BUILD_EXE\n')
|
||||
file.write(' exit(0);\n')
|
||||
file.write('#endif\n')
|
||||
file.write('}\n')
|
||||
return True
|
||||
|
||||
|
||||
class tpp:
|
||||
|
||||
def __init__(self, fname):
|
||||
self.data = {'name':'', 'ifdef':'', 'main':'', 'body':'', 'num':''}
|
||||
self.info = []
|
||||
self.c_file = None
|
||||
self.line_num = 0
|
||||
self.fname = fname
|
||||
|
||||
def c_write(self, line):
|
||||
if not self.c_file: self.c_write(line)
|
||||
else: self.c_file.write(line + '\n')
|
||||
|
||||
def test_hdr(self, line):
|
||||
self.c_write(line);
|
||||
|
||||
def test_test(self, name):
|
||||
self.data['name'] = name
|
||||
|
||||
def test_if(self, line):
|
||||
if self.data['name']: self.test_body(line)
|
||||
elif self.data['ifdef']:
|
||||
print_err(f'ERROR: nested ifdef not allowed in file {self.fname} at line {self.line_num}\n')
|
||||
sys.exit(1)
|
||||
else: self.data['ifdef'] = line.strip()
|
||||
|
||||
def test_endif(self, line):
|
||||
if self.data['name']: self.test_body(line)
|
||||
|
||||
def test_open_brace(self):
|
||||
self.data['body'] = ''
|
||||
|
||||
def test_main(self, main):
|
||||
self.data['main'] = main
|
||||
self.c_write('''
|
||||
extern void %(main)s(TestInfo*);
|
||||
#define %(main)s_NUMB 0
|
||||
static const char %(main)s_NAME [] = "%(main)s";
|
||||
''' % self.data)
|
||||
self.data['name'] = ''
|
||||
self.data['ifdef'] = ''
|
||||
self.data['body'] = ''
|
||||
self.data['num'] = ''
|
||||
|
||||
def test_close_brace(self):
|
||||
if not self.data['name']: return
|
||||
self.c_write('')
|
||||
if self.data['ifdef']: self.c_write(self.data['ifdef'])
|
||||
self.data['num'] = str(len(re.findall(r'^\s+ASSERT', self.data['body'], flags=re.MULTILINE)))
|
||||
self.c_write('''#define %(name)s_NUMB %(num)s
|
||||
static const char %(name)s_NAME [] = "%(name)s";
|
||||
static void %(name)s()
|
||||
{
|
||||
noteTestMain(__FILE__, __LINE__, %(name)s_NAME);
|
||||
{
|
||||
%(body)s\t}
|
||||
breakOnSubDone(__FILE__, __LINE__, %(name)s_NAME);
|
||||
}''' % self.data)
|
||||
|
||||
if self.data['ifdef']: self.c_write('#endif /* %(ifdef)s */\n' % self.data)
|
||||
self.info += [(self.data['name'], self.data['ifdef'])]
|
||||
|
||||
# clear this test
|
||||
self.data['name'] = ''
|
||||
self.data['ifdef'] = ''
|
||||
self.data['body'] = ''
|
||||
|
||||
def test_body(self, line):
|
||||
if self.data['name']:
|
||||
# add an indentation
|
||||
if line[0] == '\t': line = '\t' + line
|
||||
self.data['body'] += line
|
||||
else:
|
||||
self.c_write(line)
|
||||
|
||||
def test_fi(self):
|
||||
self.c_write('static FunctionInfo fi[] = {')
|
||||
|
||||
if self.data['main']: self.c_write('\t{ %(main)s_NAME, (testFuncPtr) &%(main)s, %(main)s_NUMB },' % self.data)
|
||||
|
||||
for (e, f) in self.info:
|
||||
if f: self.c_write(f)
|
||||
self.c_write('\t{ %s_NAME, (testFuncPtr) &%s, %s_NUMB },' % (e, e, e))
|
||||
if f: self.c_write('#endif /* %s */' % f)
|
||||
|
||||
self.c_write('\t{ 0, 0, 0 }')
|
||||
self.c_write('};')
|
||||
|
||||
# This is boilerplate, supplying the main, etc
|
||||
|
||||
def test_boilerplate(self):
|
||||
self.c_write('''
|
||||
static GroupInfo Info = {
|
||||
{\'a\', \'B\', \'c\', \'D\', \'e\', \'f\', \'G\', \'h\'},
|
||||
fi
|
||||
};
|
||||
|
||||
/* Function exists to make sure that the GroupInfo structure does not
|
||||
* get optimized away.
|
||||
**/
|
||||
|
||||
GroupInfo *%(main)s_Force() {
|
||||
return &Info;
|
||||
}
|
||||
|
||||
void %(main)s(TestInfo* not_used) {
|
||||
i4 i = 0;
|
||||
int numTest = 0;
|
||||
|
||||
TestInfo_reset();
|
||||
|
||||
for (i = 1; Info.funcTable[i].name; i++) Info.funcTable[i].func();
|
||||
|
||||
breakOnDone(__FILE__, __LINE__, %(main)s_NAME);
|
||||
}''' % self.data)
|
||||
|
||||
def match(self, rexp, line):
|
||||
self.m = re.match(rexp, line)
|
||||
return self.m
|
||||
|
||||
# parse the test file
|
||||
|
||||
def parse(self):
|
||||
|
||||
if not self.fname.endswith('.test'):
|
||||
print_err(f'ERROR: filename {self.fname} must end with .test\n')
|
||||
sys.exit(1)
|
||||
|
||||
self.c_file = open(re.sub('[.]test', '.c', self.fname), "w")
|
||||
|
||||
self.line_num = 0
|
||||
for line in open(self.fname):
|
||||
self.line_num += 1
|
||||
if self.match(r'TEST\s+(\w*).*', line):
|
||||
self.test_test(self.m.group(1))
|
||||
elif self.match(r'(?:#include)\s+.*', line):
|
||||
self.test_hdr(line)
|
||||
elif self.match(r'(?:#if|#ifdef)\s+.*', line):
|
||||
self.test_if(line)
|
||||
elif self.match(r'#endif.*', line):
|
||||
self.test_endif(line)
|
||||
elif self.match(r'{\s*(.*)', line):
|
||||
self.test_open_brace()
|
||||
elif self.match(r'MAIN\s+(\w*).*', line):
|
||||
self.test_main(self.m.group(1))
|
||||
elif self.match(r'}.*', line):
|
||||
self.test_close_brace()
|
||||
else:
|
||||
self.test_body(line)
|
||||
|
||||
self.test_fi()
|
||||
self.test_boilerplate()
|
||||
self.c_file.close()
|
||||
self.c_file = False
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description='Precompile test file')
|
||||
parser.add_argument('--entry', type=Path, default='', help='Create a main entry file contianing calls all test functions')
|
||||
parser.add_argument('test_file', nargs='*', help='Test file to preprocess, must end with .test')
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.test_file:
|
||||
for test_file in args.test_file:
|
||||
tpp(test_file).parse()
|
||||
|
||||
if args.entry:
|
||||
create_test_entry(args.entry)
|
Loading…
Reference in New Issue
Block a user