Remove unused pieces from coverage reporting

This commit is contained in:
Dustin Spicuzza 2022-12-09 01:29:59 -05:00
parent ff645920b8
commit b08d8783d4
5 changed files with 30 additions and 28 deletions

View File

@ -474,7 +474,7 @@ class Lexer:
self.lookahead.extendleft(reversed(toks))
if __name__ == "__main__":
if __name__ == "__main__": # pragma: no cover
try:
lex.runmain(lexer=Lexer(None))
except EOFError:

View File

@ -156,21 +156,21 @@ class CxxParser:
raise self._parse_error(tok, "' or '".join(tokenTypes))
return tok
def _next_token_in_set(self, tokenTypes: typing.Set[str]) -> LexToken:
tok = self.lex.token()
if tok.type not in tokenTypes:
raise self._parse_error(tok, "' or '".join(sorted(tokenTypes)))
return tok
# def _next_token_in_set(self, tokenTypes: typing.Set[str]) -> LexToken:
# tok = self.lex.token()
# if tok.type not in tokenTypes:
# raise self._parse_error(tok, "' or '".join(sorted(tokenTypes)))
# return tok
def _consume_up_to(self, rtoks: LexTokenList, *token_types: str) -> LexTokenList:
# includes the last token
get_token = self.lex.token
while True:
tok = get_token()
rtoks.append(tok)
if tok.type in token_types:
break
return rtoks
# def _consume_up_to(self, rtoks: LexTokenList, *token_types: str) -> LexTokenList:
# # includes the last token
# get_token = self.lex.token
# while True:
# tok = get_token()
# rtoks.append(tok)
# if tok.type in token_types:
# break
# return rtoks
def _consume_until(self, rtoks: LexTokenList, *token_types: str) -> LexTokenList:
# does not include the found token
@ -448,12 +448,6 @@ class CxxParser:
else:
self._parse_declarations(tok, doxygen)
def _parse_mutable(self, tok: LexToken, doxygen: typing.Optional[str]) -> None:
if not isinstance(self.state, ClassBlockState):
raise self._parse_error(tok)
self._parse_declarations(tok, doxygen)
def _parse_typedef(self, tok: LexToken, doxygen: typing.Optional[str]) -> None:
tok = self.lex.token()
self._parse_declarations(tok, doxygen, is_typedef=True)

View File

@ -1,7 +1,7 @@
import typing
if typing.TYPE_CHECKING:
from .visitor import CxxVisitor
from .visitor import CxxVisitor # pragma: nocover
from .errors import CxxParseError
from .lexer import LexToken, Location

View File

@ -47,7 +47,7 @@ def tokfmt(toks: typing.List[Token]) -> str:
return "".join(vals)
if __name__ == "__main__":
if __name__ == "__main__": # pragma: no cover
import argparse
parser = argparse.ArgumentParser()

View File

@ -4,7 +4,7 @@ import typing
if sys.version_info >= (3, 8):
from typing import Protocol
else:
Protocol = object
Protocol = object # pragma: no cover
from .types import (
@ -65,7 +65,9 @@ class CxxVisitor(Protocol):
"""
def on_empty_block_end(self, state: EmptyBlockState) -> None:
...
"""
Called when an empty block ends
"""
def on_extern_block_start(self, state: ExternBlockState) -> None:
"""
@ -78,7 +80,9 @@ class CxxVisitor(Protocol):
"""
def on_extern_block_end(self, state: ExternBlockState) -> None:
...
"""
Called when an extern block ends
"""
def on_namespace_start(self, state: NamespaceBlockState) -> None:
"""
@ -101,10 +105,14 @@ class CxxVisitor(Protocol):
"""
def on_variable(self, state: State, v: Variable) -> None:
...
"""
Called when a global variable is encountered
"""
def on_function(self, state: State, fn: Function) -> None:
...
"""
Called when a function is encountered that isn't part of a class
"""
def on_method_impl(self, state: State, method: Method) -> None:
"""