diff --git a/cxxheaderparser/lexer.py b/cxxheaderparser/lexer.py index c160eef..9650190 100644 --- a/cxxheaderparser/lexer.py +++ b/cxxheaderparser/lexer.py @@ -474,7 +474,7 @@ class Lexer: self.lookahead.extendleft(reversed(toks)) -if __name__ == "__main__": +if __name__ == "__main__": # pragma: no cover try: lex.runmain(lexer=Lexer(None)) except EOFError: diff --git a/cxxheaderparser/parser.py b/cxxheaderparser/parser.py index d0da4db..6bd8ba5 100644 --- a/cxxheaderparser/parser.py +++ b/cxxheaderparser/parser.py @@ -156,21 +156,21 @@ class CxxParser: raise self._parse_error(tok, "' or '".join(tokenTypes)) return tok - def _next_token_in_set(self, tokenTypes: typing.Set[str]) -> LexToken: - tok = self.lex.token() - if tok.type not in tokenTypes: - raise self._parse_error(tok, "' or '".join(sorted(tokenTypes))) - return tok + # def _next_token_in_set(self, tokenTypes: typing.Set[str]) -> LexToken: + # tok = self.lex.token() + # if tok.type not in tokenTypes: + # raise self._parse_error(tok, "' or '".join(sorted(tokenTypes))) + # return tok - def _consume_up_to(self, rtoks: LexTokenList, *token_types: str) -> LexTokenList: - # includes the last token - get_token = self.lex.token - while True: - tok = get_token() - rtoks.append(tok) - if tok.type in token_types: - break - return rtoks + # def _consume_up_to(self, rtoks: LexTokenList, *token_types: str) -> LexTokenList: + # # includes the last token + # get_token = self.lex.token + # while True: + # tok = get_token() + # rtoks.append(tok) + # if tok.type in token_types: + # break + # return rtoks def _consume_until(self, rtoks: LexTokenList, *token_types: str) -> LexTokenList: # does not include the found token @@ -448,12 +448,6 @@ class CxxParser: else: self._parse_declarations(tok, doxygen) - def _parse_mutable(self, tok: LexToken, doxygen: typing.Optional[str]) -> None: - if not isinstance(self.state, ClassBlockState): - raise self._parse_error(tok) - - self._parse_declarations(tok, doxygen) - def _parse_typedef(self, tok: LexToken, doxygen: typing.Optional[str]) -> None: tok = self.lex.token() self._parse_declarations(tok, doxygen, is_typedef=True) diff --git a/cxxheaderparser/parserstate.py b/cxxheaderparser/parserstate.py index 89c892d..b68deba 100644 --- a/cxxheaderparser/parserstate.py +++ b/cxxheaderparser/parserstate.py @@ -1,7 +1,7 @@ import typing if typing.TYPE_CHECKING: - from .visitor import CxxVisitor + from .visitor import CxxVisitor # pragma: nocover from .errors import CxxParseError from .lexer import LexToken, Location diff --git a/cxxheaderparser/tokfmt.py b/cxxheaderparser/tokfmt.py index 94b241e..3fa1bf2 100644 --- a/cxxheaderparser/tokfmt.py +++ b/cxxheaderparser/tokfmt.py @@ -47,7 +47,7 @@ def tokfmt(toks: typing.List[Token]) -> str: return "".join(vals) -if __name__ == "__main__": +if __name__ == "__main__": # pragma: no cover import argparse parser = argparse.ArgumentParser() diff --git a/cxxheaderparser/visitor.py b/cxxheaderparser/visitor.py index 15d2100..c6d0d81 100644 --- a/cxxheaderparser/visitor.py +++ b/cxxheaderparser/visitor.py @@ -4,7 +4,7 @@ import typing if sys.version_info >= (3, 8): from typing import Protocol else: - Protocol = object + Protocol = object # pragma: no cover from .types import ( @@ -65,7 +65,9 @@ class CxxVisitor(Protocol): """ def on_empty_block_end(self, state: EmptyBlockState) -> None: - ... + """ + Called when an empty block ends + """ def on_extern_block_start(self, state: ExternBlockState) -> None: """ @@ -78,7 +80,9 @@ class CxxVisitor(Protocol): """ def on_extern_block_end(self, state: ExternBlockState) -> None: - ... + """ + Called when an extern block ends + """ def on_namespace_start(self, state: NamespaceBlockState) -> None: """ @@ -101,10 +105,14 @@ class CxxVisitor(Protocol): """ def on_variable(self, state: State, v: Variable) -> None: - ... + """ + Called when a global variable is encountered + """ def on_function(self, state: State, fn: Function) -> None: - ... + """ + Called when a function is encountered that isn't part of a class + """ def on_method_impl(self, state: State, method: Method) -> None: """