import pytest from cxxheaderparser.lexer import PlyLexer, LexerTokenStream from cxxheaderparser.tokfmt import tokfmt from cxxheaderparser.types import Token @pytest.mark.parametrize( "instr", [ "int", "unsigned int", "::uint8_t", "void *", "void * *", "const char *", "const char[]", "void * (*)()", "void (*)(void * buf, int buflen)", "void (* fnType)(void * buf, int buflen)", "TypeName& x", "vector&", "std::vector *", "Alpha::Omega", "Convoluted::Nested::Mixin", "std::function", "std::shared_ptr>", "tr1::shared_ptr>", "std::map>>", "std::is_base_of::value", "const char&&", "something{1, 2, 3}", "operator-=", "operator[]", "operator*", "operator>=", ], ) def test_tokfmt(instr: str) -> None: """ Each input string is exactly what the output of tokfmt should be """ toks = [] lexer = PlyLexer("") lexer.input(instr) while True: tok = lexer.token() if not tok: break if tok.type not in LexerTokenStream._discard_types: toks.append(Token(tok.value, tok.type)) assert tokfmt(toks) == instr