Merge pull request #41 from robotpy/moar-coverage

Moar coverage
This commit is contained in:
Dustin Spicuzza 2022-12-09 10:59:28 -05:00 committed by GitHub
commit 19c0604603
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 358 additions and 49 deletions

View File

@ -1,9 +1,11 @@
import argparse
import dataclasses
import inspect
import re
import subprocess
import typing
from .errors import CxxParseError
from .options import ParserOptions
from .simple import parse_string, ParsedData
@ -47,7 +49,7 @@ def nondefault_repr(data: ParsedData) -> str:
return _inner_repr(data)
def gentest(infile: str, name: str, outfile: str, verbose: bool) -> None:
def gentest(infile: str, name: str, outfile: str, verbose: bool, fail: bool) -> None:
# Goal is to allow making a unit test as easy as running this dumper
# on a file and copy/pasting this into a test
@ -56,23 +58,42 @@ def gentest(infile: str, name: str, outfile: str, verbose: bool) -> None:
options = ParserOptions(verbose=verbose)
try:
data = parse_string(content, options=options)
if fail:
raise ValueError("did not fail")
except CxxParseError as e:
if not fail:
raise
# do it again, but strip the content so the error message matches
try:
parse_string(content.strip(), options=options)
except CxxParseError as e2:
err = str(e2)
if not fail:
stmt = nondefault_repr(data)
stmt = f"""
data = parse_string(content, cleandoc=True)
assert data == {stmt}
"""
else:
stmt = f"""
err = {repr(err)}
with pytest.raises(CxxParseError, match=re.escape(err)):
parse_string(content, cleandoc=True)
"""
content = ("\n" + content.strip()).replace("\n", "\n ")
content = "\n".join(l.rstrip() for l in content.splitlines())
stmt = inspect.cleandoc(
f'''
def test_{name}() -> None:
content = """{content}
"""
data = parse_string(content, cleandoc=True)
assert data == {stmt}
{stmt.strip()}
'''
)
@ -94,6 +115,9 @@ if __name__ == "__main__":
parser.add_argument("name", nargs="?", default="TODO")
parser.add_argument("-v", "--verbose", default=False, action="store_true")
parser.add_argument("-o", "--output", default="-")
parser.add_argument(
"-x", "--fail", default=False, action="store_true", help="Expect failure"
)
args = parser.parse_args()
gentest(args.header, args.name, args.output, args.verbose)
gentest(args.header, args.name, args.output, args.verbose, args.fail)

View File

@ -474,7 +474,7 @@ class Lexer:
self.lookahead.extendleft(reversed(toks))
if __name__ == "__main__":
if __name__ == "__main__": # pragma: no cover
try:
lex.runmain(lexer=Lexer(None))
except EOFError:

View File

@ -156,21 +156,21 @@ class CxxParser:
raise self._parse_error(tok, "' or '".join(tokenTypes))
return tok
def _next_token_in_set(self, tokenTypes: typing.Set[str]) -> LexToken:
tok = self.lex.token()
if tok.type not in tokenTypes:
raise self._parse_error(tok, "' or '".join(sorted(tokenTypes)))
return tok
# def _next_token_in_set(self, tokenTypes: typing.Set[str]) -> LexToken:
# tok = self.lex.token()
# if tok.type not in tokenTypes:
# raise self._parse_error(tok, "' or '".join(sorted(tokenTypes)))
# return tok
def _consume_up_to(self, rtoks: LexTokenList, *token_types: str) -> LexTokenList:
# includes the last token
get_token = self.lex.token
while True:
tok = get_token()
rtoks.append(tok)
if tok.type in token_types:
break
return rtoks
# def _consume_up_to(self, rtoks: LexTokenList, *token_types: str) -> LexTokenList:
# # includes the last token
# get_token = self.lex.token
# while True:
# tok = get_token()
# rtoks.append(tok)
# if tok.type in token_types:
# break
# return rtoks
def _consume_until(self, rtoks: LexTokenList, *token_types: str) -> LexTokenList:
# does not include the found token
@ -230,16 +230,24 @@ class CxxParser:
if tok.type in self._end_balanced_tokens:
expected = match_stack.pop()
if tok.type != expected:
# hack: ambiguous right-shift issues here, really
# should be looking at the context
if tok.type == ">":
tok = self.lex.token_if(">")
if tok:
consumed.append(tok)
# hack: we only claim to parse correct code, so if this
# is less than or greater than, assume that the code is
# doing math and so this unexpected item is correct.
#
# If one of the other items on the stack match, pop back
# to that. Otherwise, ignore it and hope for the best
if tok.type != ">" and expected != ">":
raise self._parse_error(tok, expected)
for i, maybe in enumerate(reversed(match_stack)):
if tok.type == maybe:
for _ in range(i + 1):
match_stack.pop()
break
else:
match_stack.append(expected)
continue
raise self._parse_error(tok, expected)
if len(match_stack) == 0:
return consumed
@ -284,6 +292,7 @@ class CxxParser:
"alignas": self._consume_attribute_specifier_seq,
"extern": self._parse_extern,
"friend": self._parse_friend_decl,
"inline": self._parse_inline,
"namespace": self._parse_namespace,
"private": self._process_access_specifier,
"protected": self._process_access_specifier,
@ -398,9 +407,12 @@ class CxxParser:
tok = self._next_token_must_be("NAME")
if inline and len(names) > 1:
raise CxxParseError("a nested namespace definition cannot be inline")
# TODO: namespace_alias_definition
ns = NamespaceDecl(names, inline)
ns = NamespaceDecl(names, inline, doxygen)
state = self._push_state(NamespaceBlockState, ns)
state.location = location
self.visitor.on_namespace_start(state)
@ -444,12 +456,6 @@ class CxxParser:
else:
self._parse_declarations(tok, doxygen)
def _parse_mutable(self, tok: LexToken, doxygen: typing.Optional[str]) -> None:
if not isinstance(self.state, ClassBlockState):
raise self._parse_error(tok)
self._parse_declarations(tok, doxygen)
def _parse_typedef(self, tok: LexToken, doxygen: typing.Optional[str]) -> None:
tok = self.lex.token()
self._parse_declarations(tok, doxygen, is_typedef=True)
@ -1647,7 +1653,7 @@ class CxxParser:
if self.lex.token_if("throw"):
tok = self._next_token_must_be("(")
fn.throw = self._create_value(self._consume_balanced_tokens(tok))
fn.throw = self._create_value(self._consume_balanced_tokens(tok)[1:-1])
elif self.lex.token_if("noexcept"):
toks = []

View File

@ -1,7 +1,7 @@
import typing
if typing.TYPE_CHECKING:
from .visitor import CxxVisitor
from .visitor import CxxVisitor # pragma: nocover
from .errors import CxxParseError
from .lexer import LexToken, Location

View File

@ -91,6 +91,8 @@ class NamespaceScope:
"""
name: str = ""
inline: bool = False
doxygen: typing.Optional[str] = None
classes: typing.List["ClassScope"] = field(default_factory=list)
enums: typing.List[EnumDecl] = field(default_factory=list)
@ -248,6 +250,10 @@ class SimpleCxxVisitor:
assert ns is not None
# only set inline/doxygen on inner namespace
ns.inline = state.namespace.inline
ns.doxygen = state.namespace.doxygen
self.block = ns
self.namespace = ns

View File

@ -47,7 +47,7 @@ def tokfmt(toks: typing.List[Token]) -> str:
return "".join(vals)
if __name__ == "__main__":
if __name__ == "__main__": # pragma: no cover
import argparse
parser = argparse.ArgumentParser()

View File

@ -56,6 +56,9 @@ class NamespaceDecl:
names: typing.List[str]
inline: bool = False
#: Documentation if present
doxygen: typing.Optional[str] = None
@dataclass
class DecltypeSpecifier:
@ -511,7 +514,12 @@ class Function:
template: typing.Optional[TemplateDecl] = None
#: Value of any throw specification for this function. The value omits the
#: outer parentheses.
throw: typing.Optional[Value] = None
#: Value of any noexcept specification for this function. The value omits
#: the outer parentheses.
noexcept: typing.Optional[Value] = None
#: Only set if an MSVC calling convention (__stdcall, etc) is explictly

View File

@ -4,7 +4,7 @@ import typing
if sys.version_info >= (3, 8):
from typing import Protocol
else:
Protocol = object
Protocol = object # pragma: no cover
from .types import (
@ -65,7 +65,9 @@ class CxxVisitor(Protocol):
"""
def on_empty_block_end(self, state: EmptyBlockState) -> None:
...
"""
Called when an empty block ends
"""
def on_extern_block_start(self, state: ExternBlockState) -> None:
"""
@ -78,7 +80,9 @@ class CxxVisitor(Protocol):
"""
def on_extern_block_end(self, state: ExternBlockState) -> None:
...
"""
Called when an extern block ends
"""
def on_namespace_start(self, state: NamespaceBlockState) -> None:
"""
@ -101,10 +105,14 @@ class CxxVisitor(Protocol):
"""
def on_variable(self, state: State, v: Variable) -> None:
...
"""
Called when a global variable is encountered
"""
def on_function(self, state: State, fn: Function) -> None:
...
"""
Called when a function is encountered that isn't part of a class
"""
def on_method_impl(self, state: State, method: Method) -> None:
"""

View File

@ -290,3 +290,42 @@ def test_doxygen_var_after() -> None:
]
)
)
def test_doxygen_namespace() -> None:
content = """
/**
* x is a mysterious namespace
*/
namespace x {}
/**
* c is also a mysterious namespace
*/
namespace a::b::c {}
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
namespaces={
"x": NamespaceScope(
name="x", doxygen="/**\n* x is a mysterious namespace\n*/"
),
"a": NamespaceScope(
name="a",
namespaces={
"b": NamespaceScope(
name="b",
namespaces={
"c": NamespaceScope(
name="c",
doxygen="/**\n* c is also a mysterious namespace\n*/",
)
},
)
},
),
}
)
)

View File

@ -1045,3 +1045,99 @@ def test_msvc_conventions() -> None:
],
)
)
def test_throw_empty() -> None:
content = """
void foo() throw() { throw std::runtime_error("foo"); }
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
functions=[
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="void")])
),
name=PQName(segments=[NameSpecifier(name="foo")]),
parameters=[],
has_body=True,
throw=Value(tokens=[]),
)
]
)
)
def test_throw_dynamic() -> None:
content = """
void foo() throw(std::exception) { throw std::runtime_error("foo"); }
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
functions=[
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="void")])
),
name=PQName(segments=[NameSpecifier(name="foo")]),
parameters=[],
has_body=True,
throw=Value(
tokens=[
Token(value="std"),
Token(value="::"),
Token(value="exception"),
]
),
)
]
)
)
def test_noexcept_empty() -> None:
content = """
void foo() noexcept;
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
functions=[
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="void")])
),
name=PQName(segments=[NameSpecifier(name="foo")]),
parameters=[],
noexcept=Value(tokens=[]),
)
]
)
)
def test_noexcept_contents() -> None:
content = """
void foo() noexcept(false);
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
functions=[
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="void")])
),
name=PQName(segments=[NameSpecifier(name="foo")]),
parameters=[],
noexcept=Value(tokens=[Token(value="false")]),
)
]
)
)

View File

@ -1,6 +1,8 @@
# Note: testcases generated via `python -m cxxheaderparser.gentest`
from cxxheaderparser.errors import CxxParseError
from cxxheaderparser.types import (
ForwardDecl,
FundamentalSpecifier,
NameSpecifier,
PQName,
@ -15,6 +17,9 @@ from cxxheaderparser.simple import (
ParsedData,
)
import pytest
import re
def test_dups_in_different_ns() -> None:
content = """
@ -119,3 +124,47 @@ def test_correct_ns() -> None:
}
)
)
def test_inline_namespace() -> None:
content = """
namespace Lib {
inline namespace Lib_1 {
class A;
}
}
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
namespaces={
"Lib": NamespaceScope(
name="Lib",
namespaces={
"Lib_1": NamespaceScope(
name="Lib_1",
inline=True,
forward_decls=[
ForwardDecl(
typename=PQName(
segments=[NameSpecifier(name="A")],
classkey="class",
)
)
],
)
},
)
}
)
)
def test_invalid_inline_namespace() -> None:
content = """
inline namespace a::b {}
"""
err = "<str>:1: parse error evaluating 'inline': a nested namespace definition cannot be inline"
with pytest.raises(CxxParseError, match=re.escape(err)):
parse_string(content, cleandoc=True)

View File

@ -1,6 +1,6 @@
# Note: testcases generated via `python -m cxxheaderparser.gentest`
from cxxheaderparser.errors import CxxParseError
from cxxheaderparser.types import (
Array,
ClassDecl,
@ -21,6 +21,9 @@ from cxxheaderparser.types import (
)
from cxxheaderparser.simple import ClassScope, NamespaceScope, ParsedData, parse_string
import pytest
import re
def test_var_unixwiz_ridiculous() -> None:
# http://unixwiz.net/techtips/reading-cdecl.html
@ -766,3 +769,73 @@ def test_var_extern() -> None:
]
)
)
def test_balanced_with_gt() -> None:
"""Tests _consume_balanced_tokens handling of mismatched gt tokens"""
content = """
int x = (1 >> 2);
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
variables=[
Variable(
name=PQName(segments=[NameSpecifier(name="x")]),
type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="int")])
),
value=Value(
tokens=[
Token(value="("),
Token(value="1"),
Token(value=">"),
Token(value=">"),
Token(value="2"),
Token(value=")"),
]
),
)
]
)
)
def test_balanced_with_lt() -> None:
"""Tests _consume_balanced_tokens handling of mismatched lt tokens"""
content = """
bool z = (i < 4);
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
variables=[
Variable(
name=PQName(segments=[NameSpecifier(name="z")]),
type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="bool")])
),
value=Value(
tokens=[
Token(value="("),
Token(value="i"),
Token(value="<"),
Token(value="4"),
Token(value=")"),
]
),
)
]
)
)
def test_balanced_bad_mismatch() -> None:
content = """
bool z = (12 ]);
"""
err = "<str>:1: parse error evaluating ']': unexpected ']', expected ')'"
with pytest.raises(CxxParseError, match=re.escape(err)):
parse_string(content, cleandoc=True)