Compare commits

..

No commits in common. "main" and "1.0.0" have entirely different histories.
main ... 1.0.0

19 changed files with 121 additions and 2603 deletions

View File

@ -112,10 +112,6 @@ jobs:
- name: Install test dependencies - name: Install test dependencies
run: python -m pip --disable-pip-version-check install -r tests/requirements.txt run: python -m pip --disable-pip-version-check install -r tests/requirements.txt
- name: Setup MSVC compiler
uses: ilammy/msvc-dev-cmd@v1
if: matrix.os == 'windows-latest'
- name: Test wheel - name: Test wheel
shell: bash shell: bash
run: | run: |

View File

@ -1,15 +0,0 @@
version: 2
sphinx:
configuration: docs/conf.py
build:
os: ubuntu-22.04
tools:
python: "3.11"
python:
install:
- requirements: docs/requirements.txt
- method: pip
path: .

View File

@ -17,7 +17,7 @@ if sys.version_info >= (3, 8):
else: else:
Protocol = object Protocol = object
_line_re = re.compile(r'^\#[\t ]*(line)? (\d+) "(.*)"') _line_re = re.compile(r'^\#[\t ]*line (\d+) "(.*)"')
_multicomment_re = re.compile("\n[\\s]+\\*") _multicomment_re = re.compile("\n[\\s]+\\*")
@ -83,11 +83,8 @@ class PlyLexer:
"char16_t", "char16_t",
"char32_t", "char32_t",
"class", "class",
"concept",
"const", "const",
"constexpr", "constexpr",
"consteval",
"constinit",
"const_cast", "const_cast",
"continue", "continue",
"decltype", "decltype",
@ -124,7 +121,6 @@ class PlyLexer:
"public", "public",
"register", "register",
"reinterpret_cast", "reinterpret_cast",
"requires",
"return", "return",
"short", "short",
"signed", "signed",
@ -190,7 +186,6 @@ class PlyLexer:
"DBL_RBRACKET", "DBL_RBRACKET",
"DBL_COLON", "DBL_COLON",
"DBL_AMP", "DBL_AMP",
"DBL_PIPE",
"ARROW", "ARROW",
"SHIFT_LEFT", "SHIFT_LEFT",
] + list(keywords) ] + list(keywords)
@ -453,8 +448,8 @@ class PlyLexer:
# handle line macros # handle line macros
m = _line_re.match(t.value) m = _line_re.match(t.value)
if m: if m:
self.filename = m.group(3) self.filename = m.group(2)
self.line_offset = 1 + self.lex.lineno - int(m.group(2)) self.line_offset = 1 + self.lex.lineno - int(m.group(1))
return None return None
# ignore C++23 warning directive # ignore C++23 warning directive
if t.value.startswith("#warning"): if t.value.startswith("#warning"):
@ -476,7 +471,6 @@ class PlyLexer:
t_DBL_RBRACKET = r"\]\]" t_DBL_RBRACKET = r"\]\]"
t_DBL_COLON = r"::" t_DBL_COLON = r"::"
t_DBL_AMP = r"&&" t_DBL_AMP = r"&&"
t_DBL_PIPE = r"\|\|"
t_ARROW = r"->" t_ARROW = r"->"
t_SHIFT_LEFT = r"<<" t_SHIFT_LEFT = r"<<"
# SHIFT_RIGHT introduces ambiguity # SHIFT_RIGHT introduces ambiguity

View File

@ -2,7 +2,7 @@ from dataclasses import dataclass
from typing import Callable, Optional from typing import Callable, Optional
#: arguments are (filename, content) #: arguments are (filename, content)
PreprocessorFunction = Callable[[str, Optional[str]], str] PreprocessorFunction = Callable[[str, str], str]
@dataclass @dataclass

View File

@ -22,10 +22,8 @@ from .types import (
AutoSpecifier, AutoSpecifier,
BaseClass, BaseClass,
ClassDecl, ClassDecl,
Concept,
DecltypeSpecifier, DecltypeSpecifier,
DecoratedType, DecoratedType,
DeductionGuide,
EnumDecl, EnumDecl,
Enumerator, Enumerator,
Field, Field,
@ -43,7 +41,6 @@ from .types import (
Parameter, Parameter,
PQName, PQName,
Pointer, Pointer,
PointerToMember,
Reference, Reference,
TemplateArgument, TemplateArgument,
TemplateDecl, TemplateDecl,
@ -77,10 +74,9 @@ class CxxParser:
def __init__( def __init__(
self, self,
filename: str, filename: str,
content: typing.Optional[str], content: str,
visitor: CxxVisitor, visitor: CxxVisitor,
options: typing.Optional[ParserOptions] = None, options: typing.Optional[ParserOptions] = None,
encoding: typing.Optional[str] = None,
) -> None: ) -> None:
self.visitor = visitor self.visitor = visitor
self.filename = filename self.filename = filename
@ -89,13 +85,6 @@ class CxxParser:
if options and options.preprocessor is not None: if options and options.preprocessor is not None:
content = options.preprocessor(filename, content) content = options.preprocessor(filename, content)
if content is None:
if encoding is None:
encoding = "utf-8-sig"
with open(filename, "r", encoding=encoding) as fp:
content = fp.read()
self.lex: lexer.TokenStream = lexer.LexerTokenStream(filename, content) self.lex: lexer.TokenStream = lexer.LexerTokenStream(filename, content)
global_ns = NamespaceDecl([], False) global_ns = NamespaceDecl([], False)
@ -540,7 +529,7 @@ class CxxParser:
self._finish_class_decl(old_state) self._finish_class_decl(old_state)
# #
# Template and concept parsing # Template parsing
# #
def _parse_template_type_parameter( def _parse_template_type_parameter(
@ -608,13 +597,9 @@ class CxxParser:
lex.return_token(ptok) lex.return_token(ptok)
param = self._parse_template_type_parameter(tok, None) param = self._parse_template_type_parameter(tok, None)
else: else:
param, _ = self._parse_parameter( param = self._parse_parameter(ptok, TemplateNonTypeParam, ">")
ptok, TemplateNonTypeParam, False, ">"
)
else: else:
param, _ = self._parse_parameter( param = self._parse_parameter(tok, TemplateNonTypeParam, ">")
tok, TemplateNonTypeParam, concept_ok=False, end=">"
)
params.append(param) params.append(param)
@ -647,11 +632,6 @@ class CxxParser:
self._parse_using(tok, doxygen, template) self._parse_using(tok, doxygen, template)
elif tok.type == "friend": elif tok.type == "friend":
self._parse_friend_decl(tok, doxygen, template) self._parse_friend_decl(tok, doxygen, template)
elif tok.type == "concept":
self._parse_concept(tok, doxygen, template)
elif tok.type == "requires":
template.raw_requires_pre = self._parse_requires(tok)
self._parse_declarations(self.lex.token(), doxygen, template)
else: else:
self._parse_declarations(tok, doxygen, template) self._parse_declarations(tok, doxygen, template)
@ -762,117 +742,6 @@ class CxxParser:
self.state, TemplateInst(typename, extern, doxygen) self.state, TemplateInst(typename, extern, doxygen)
) )
def _parse_concept(
self,
tok: LexToken,
doxygen: typing.Optional[str],
template: TemplateDecl,
) -> None:
name = self._next_token_must_be("NAME")
self._next_token_must_be("=")
# not trying to understand this for now
raw_constraint = self._create_value(self._consume_value_until([], ",", ";"))
state = self.state
if isinstance(state, ClassBlockState):
raise CxxParseError("concept cannot be defined in a class")
self.visitor.on_concept(
state,
Concept(
template=template,
name=name.value,
raw_constraint=raw_constraint,
doxygen=doxygen,
),
)
# fmt: off
_expr_operators = {
"<", ">", "|", "%", "^", "!", "*", "-", "+", "&", "=",
"&&", "||", "<<"
}
# fmt: on
def _parse_requires(
self,
tok: LexToken,
) -> Value:
tok = self.lex.token()
rawtoks: typing.List[LexToken] = []
# The easier case -- requires requires
if tok.type == "requires":
rawtoks.append(tok)
for tt in ("(", "{"):
tok = self._next_token_must_be(tt)
rawtoks.extend(self._consume_balanced_tokens(tok))
# .. and that's it?
# this is either a parenthesized expression or a primary clause
elif tok.type == "(":
rawtoks.extend(self._consume_balanced_tokens(tok))
else:
while True:
if tok.type == "(":
rawtoks.extend(self._consume_balanced_tokens(tok))
else:
tok = self._parse_requires_segment(tok, rawtoks)
# If this is not an operator of some kind, we don't know how
# to proceed so let the next parser figure it out
if tok.value not in self._expr_operators:
break
rawtoks.append(tok)
# check once more for compound operator?
tok = self.lex.token()
if tok.value in self._expr_operators:
rawtoks.append(tok)
tok = self.lex.token()
self.lex.return_token(tok)
return self._create_value(rawtoks)
def _parse_requires_segment(
self, tok: LexToken, rawtoks: typing.List[LexToken]
) -> LexToken:
# first token could be a name or ::
if tok.type == "DBL_COLON":
rawtoks.append(tok)
tok = self.lex.token()
while True:
# This token has to be a name or some other valid name-like thing
if tok.value == "decltype":
rawtoks.append(tok)
tok = self._next_token_must_be("(")
rawtoks.extend(self._consume_balanced_tokens(tok))
elif tok.type == "NAME":
rawtoks.append(tok)
else:
# not sure what I expected, but I didn't find it
raise self._parse_error(tok)
tok = self.lex.token()
# Maybe there's a specialization
if tok.value == "<":
rawtoks.extend(self._consume_balanced_tokens(tok))
tok = self.lex.token()
# Maybe we keep trying to parse this name
if tok.type == "DBL_COLON":
tok = self.lex.token()
continue
# Let the caller decide
return tok
# #
# Attributes # Attributes
# #
@ -994,9 +863,7 @@ class CxxParser:
self.visitor.on_using_namespace(state, names) self.visitor.on_using_namespace(state, names)
def _parse_using_declaration( def _parse_using_declaration(self, tok: LexToken) -> None:
self, tok: LexToken, doxygen: typing.Optional[str]
) -> None:
""" """
using_declaration: "using" ["typename"] ["::"] nested_name_specifier unqualified_id ";" using_declaration: "using" ["typename"] ["::"] nested_name_specifier unqualified_id ";"
| "using" "::" unqualified_id ";" | "using" "::" unqualified_id ";"
@ -1008,15 +875,12 @@ class CxxParser:
typename, _ = self._parse_pqname( typename, _ = self._parse_pqname(
tok, fn_ok=True, compound_ok=True, fund_ok=True tok, fn_ok=True, compound_ok=True, fund_ok=True
) )
decl = UsingDecl(typename, self._current_access, doxygen) decl = UsingDecl(typename, self._current_access)
self.visitor.on_using_declaration(self.state, decl) self.visitor.on_using_declaration(self.state, decl)
def _parse_using_typealias( def _parse_using_typealias(
self, self, id_tok: LexToken, template: typing.Optional[TemplateDecl]
id_tok: LexToken,
template: typing.Optional[TemplateDecl],
doxygen: typing.Optional[str],
) -> None: ) -> None:
""" """
alias_declaration: "using" IDENTIFIER "=" type_id ";" alias_declaration: "using" IDENTIFIER "=" type_id ";"
@ -1030,7 +894,7 @@ class CxxParser:
dtype = self._parse_cv_ptr(parsed_type) dtype = self._parse_cv_ptr(parsed_type)
alias = UsingAlias(id_tok.value, dtype, template, self._current_access, doxygen) alias = UsingAlias(id_tok.value, dtype, template, self._current_access)
self.visitor.on_using_alias(self.state, alias) self.visitor.on_using_alias(self.state, alias)
@ -1059,9 +923,9 @@ class CxxParser:
raise CxxParseError( raise CxxParseError(
"unexpected using-declaration when parsing alias-declaration", tok "unexpected using-declaration when parsing alias-declaration", tok
) )
self._parse_using_declaration(tok, doxygen) self._parse_using_declaration(tok)
else: else:
self._parse_using_typealias(tok, template, doxygen) self._parse_using_typealias(tok, template)
# All using things end with a semicolon # All using things end with a semicolon
self._next_token_must_be(";") self._next_token_must_be(";")
@ -1599,7 +1463,6 @@ class CxxParser:
fn_ok: bool = False, fn_ok: bool = False,
compound_ok: bool = False, compound_ok: bool = False,
fund_ok: bool = False, fund_ok: bool = False,
ptr_to_member_ok: bool = False,
) -> typing.Tuple[PQName, typing.Optional[str]]: ) -> typing.Tuple[PQName, typing.Optional[str]]:
""" """
Parses a possibly qualified function name or a type name, returns when Parses a possibly qualified function name or a type name, returns when
@ -1727,12 +1590,7 @@ class CxxParser:
if not self.lex.token_if("DBL_COLON"): if not self.lex.token_if("DBL_COLON"):
break break
tok = self._next_token_must_be("NAME", "operator", "template", "decltype", "*") tok = self._next_token_must_be("NAME", "operator", "template", "decltype")
if tok.value == '*':
if not ptr_to_member_ok:
raise self._parse_error(tok)
return name, 'PTR_TO_MEMBER'
pqname = PQName(segments, classkey, has_typename) pqname = PQName(segments, classkey, has_typename)
@ -1749,43 +1607,23 @@ class CxxParser:
# #
def _parse_parameter( def _parse_parameter(
self, self, tok: typing.Optional[LexToken], cls: typing.Type[PT], end: str = ")"
tok: typing.Optional[LexToken], ) -> PT:
cls: typing.Type[PT],
concept_ok: bool,
end: str = ")",
) -> typing.Tuple[PT, typing.Optional[Type]]:
""" """
Parses a single parameter (excluding vararg parameters). Also used Parses a single parameter (excluding vararg parameters). Also used
to parse template non-type parameters to parse template non-type parameters
Returns parameter type, abbreviated template type
""" """
param_name = None param_name = None
default = None default = None
param_pack = False param_pack = False
parsed_type: typing.Optional[Type]
at_type: typing.Optional[Type] = None
if not tok: # required typename + decorators
tok = self.lex.token() parsed_type, mods = self._parse_type(tok)
if parsed_type is None:
raise self._parse_error(None)
# placeholder type, skip typename mods.validate(var_ok=False, meth_ok=False, msg="parsing parameter")
if tok.type == "auto":
at_type = parsed_type = Type(PQName([AutoSpecifier()]))
else:
# required typename + decorators
parsed_type, mods = self._parse_type(tok)
if parsed_type is None:
raise self._parse_error(None)
mods.validate(var_ok=False, meth_ok=False, msg="parsing parameter")
# Could be a concept
if concept_ok and self.lex.token_if("auto"):
at_type = Type(parsed_type.typename)
parsed_type.typename = PQName([AutoSpecifier()])
dtype = self._parse_cv_ptr(parsed_type) dtype = self._parse_cv_ptr(parsed_type)
@ -1799,31 +1637,10 @@ class CxxParser:
toks = self._consume_balanced_tokens(tok) toks = self._consume_balanced_tokens(tok)
self.lex.return_tokens(toks[1:-1]) self.lex.return_tokens(toks[1:-1])
# optional name # optional name
tok = self.lex.token_if("NAME", "final", "DBL_COLON") tok = self.lex.token_if("NAME", "final")
if tok: if tok:
pqname, op = self._parse_pqname(tok, fn_ok=True, ptr_to_member_ok=True) param_name = tok.value
while op == 'PTR_TO_MEMBER':
dtype = PointerToMember(base_type=Type(typename=pqname), ptr_to=dtype, const=dtype.const, volatile=dtype.volatile)
# dtype = self._parse_cv_ptr(dtype)
tok = self.lex.token_if("NAME", "final", "DBL_COLON")
if tok:
pqname, op = self._parse_pqname(tok, fn_ok=True, ptr_to_member_ok=True)
else:
pqname = None
op = None
if pqname:
if len(pqname.segments) != 1:
raise self._parse_error(None)
param_name = pqname.segments[0].name
if self.lex.token_if("("):
if isinstance(dtype, PointerToMember):
params, vararg, at_params = self._parse_parameters(False)
dtype.ptr_to = FunctionType(return_type=dtype.ptr_to, parameters=params, vararg=vararg)
else:
assert(False) # TODO
# optional array parameter # optional array parameter
tok = self.lex.token_if("[") tok = self.lex.token_if("[")
@ -1834,32 +1651,23 @@ class CxxParser:
if self.lex.token_if("="): if self.lex.token_if("="):
default = self._create_value(self._consume_value_until([], ",", end)) default = self._create_value(self._consume_value_until([], ",", end))
# abbreviated template pack
if at_type and self.lex.token_if("ELLIPSIS"):
param_pack = True
param = cls(type=dtype, name=param_name, default=default, param_pack=param_pack) param = cls(type=dtype, name=param_name, default=default, param_pack=param_pack)
self.debug_print("parameter: %s", param) self.debug_print("parameter: %s", param)
return param, at_type return param
def _parse_parameters( def _parse_parameters(self) -> typing.Tuple[typing.List[Parameter], bool]:
self, concept_ok: bool
) -> typing.Tuple[typing.List[Parameter], bool, typing.List[TemplateParam]]:
""" """
Consumes function parameters and returns them, and vararg if found, and Consumes function parameters and returns them, and vararg if found
promotes abbreviated template parameters to actual template parameters
if concept_ok is True
""" """
# starting at a ( # starting at a (
# special case: zero parameters # special case: zero parameters
if self.lex.token_if(")"): if self.lex.token_if(")"):
return [], False, [] return [], False
params: typing.List[Parameter] = [] params: typing.List[Parameter] = []
vararg = False vararg = False
at_params: typing.List[TemplateParam] = []
while True: while True:
if self.lex.token_if("ELLIPSIS"): if self.lex.token_if("ELLIPSIS"):
@ -1867,17 +1675,8 @@ class CxxParser:
self._next_token_must_be(")") self._next_token_must_be(")")
break break
param, at_type = self._parse_parameter(None, Parameter, concept_ok) param = self._parse_parameter(None, Parameter)
params.append(param) params.append(param)
if at_type:
at_params.append(
TemplateNonTypeParam(
type=at_type,
param_idx=len(params) - 1,
param_pack=param.param_pack,
)
)
tok = self._next_token_must_be(",", ")") tok = self._next_token_must_be(",", ")")
if tok.value == ")": if tok.value == ")":
break break
@ -1892,14 +1691,15 @@ class CxxParser:
): ):
params = [] params = []
return params, vararg, at_params return params, vararg
_auto_return_typename = PQName([AutoSpecifier()]) _auto_return_typename = PQName([AutoSpecifier()])
def _parse_trailing_return_type( def _parse_trailing_return_type(
self, return_type: typing.Optional[DecoratedType] self, fn: typing.Union[Function, FunctionType]
) -> DecoratedType: ) -> None:
# entry is "->" # entry is "->"
return_type = fn.return_type
if not ( if not (
isinstance(return_type, Type) isinstance(return_type, Type)
and not return_type.const and not return_type.const
@ -1918,7 +1718,8 @@ class CxxParser:
dtype = self._parse_cv_ptr(parsed_type) dtype = self._parse_cv_ptr(parsed_type)
return dtype fn.has_trailing_return = True
fn.return_type = dtype
def _parse_fn_end(self, fn: Function) -> None: def _parse_fn_end(self, fn: Function) -> None:
""" """
@ -1936,22 +1737,12 @@ class CxxParser:
if otok: if otok:
toks = self._consume_balanced_tokens(otok)[1:-1] toks = self._consume_balanced_tokens(otok)[1:-1]
fn.noexcept = self._create_value(toks) fn.noexcept = self._create_value(toks)
else:
rtok = self.lex.token_if("requires")
if rtok:
# requires on a function must always be accompanied by a template
if fn.template is None:
raise self._parse_error(rtok)
fn.raw_requires = self._parse_requires(rtok)
if self.lex.token_if("ARROW"):
return_type = self._parse_trailing_return_type(fn.return_type)
fn.has_trailing_return = True
fn.return_type = return_type
if self.lex.token_if("{"): if self.lex.token_if("{"):
self._discard_contents("{", "}") self._discard_contents("{", "}")
fn.has_body = True fn.has_body = True
elif self.lex.token_if("ARROW"):
self._parse_trailing_return_type(fn)
def _parse_method_end(self, method: Method) -> None: def _parse_method_end(self, method: Method) -> None:
""" """
@ -1995,12 +1786,7 @@ class CxxParser:
elif tok_value in ("&", "&&"): elif tok_value in ("&", "&&"):
method.ref_qualifier = tok_value method.ref_qualifier = tok_value
elif tok_value == "->": elif tok_value == "->":
return_type = self._parse_trailing_return_type(method.return_type) self._parse_trailing_return_type(method)
method.has_trailing_return = True
method.return_type = return_type
if self.lex.token_if("{"):
self._discard_contents("{", "}")
method.has_body = True
break break
elif tok_value == "throw": elif tok_value == "throw":
tok = self._next_token_must_be("(") tok = self._next_token_must_be("(")
@ -2011,8 +1797,6 @@ class CxxParser:
if otok: if otok:
toks = self._consume_balanced_tokens(otok)[1:-1] toks = self._consume_balanced_tokens(otok)[1:-1]
method.noexcept = self._create_value(toks) method.noexcept = self._create_value(toks)
elif tok_value == "requires":
method.raw_requires = self._parse_requires(tok)
else: else:
self.lex.return_token(tok) self.lex.return_token(tok)
break break
@ -2031,7 +1815,6 @@ class CxxParser:
is_friend: bool, is_friend: bool,
is_typedef: bool, is_typedef: bool,
msvc_convention: typing.Optional[LexToken], msvc_convention: typing.Optional[LexToken],
is_guide: bool = False,
) -> bool: ) -> bool:
""" """
Assumes the caller has already consumed the return type and name, this consumes the Assumes the caller has already consumed the return type and name, this consumes the
@ -2055,16 +1838,7 @@ class CxxParser:
state.location = location state.location = location
is_class_block = isinstance(state, ClassBlockState) is_class_block = isinstance(state, ClassBlockState)
params, vararg, at_params = self._parse_parameters(True) params, vararg = self._parse_parameters()
# Promote abbreviated template parameters
if at_params:
if template is None:
template = TemplateDecl(at_params)
elif isinstance(template, TemplateDecl):
template.params.extend(at_params)
else:
template[-1].params.extend(at_params)
# A method outside of a class has multiple name segments # A method outside of a class has multiple name segments
multiple_name_segments = len(pqname.segments) > 1 multiple_name_segments = len(pqname.segments) > 1
@ -2108,21 +1882,7 @@ class CxxParser:
self.visitor.on_method_impl(state, method) self.visitor.on_method_impl(state, method)
return method.has_body or method.has_trailing_return return method.has_body or method.has_trailing_return
elif is_guide:
assert isinstance(state, (ExternBlockState, NamespaceBlockState))
if not self.lex.token_if("ARROW"):
raise self._parse_error(None, expected="Trailing return type")
return_type = self._parse_trailing_return_type(
Type(PQName([AutoSpecifier()]))
)
guide = DeductionGuide(
return_type,
name=pqname,
parameters=params,
doxygen=doxygen,
)
self.visitor.on_deduction_guide(state, guide)
return False
else: else:
assert return_type is not None assert return_type is not None
fn = Function( fn = Function(
@ -2148,8 +1908,6 @@ class CxxParser:
if fn.constexpr: if fn.constexpr:
raise CxxParseError("typedef function may not be constexpr") raise CxxParseError("typedef function may not be constexpr")
if fn.consteval:
raise CxxParseError("typedef function may not be consteval")
if fn.extern: if fn.extern:
raise CxxParseError("typedef function may not be extern") raise CxxParseError("typedef function may not be extern")
if fn.static: if fn.static:
@ -2253,14 +2011,12 @@ class CxxParser:
toks = self._consume_balanced_tokens(gtok) toks = self._consume_balanced_tokens(gtok)
self.lex.return_tokens(toks[1:-1]) self.lex.return_tokens(toks[1:-1])
fn_params, vararg, _ = self._parse_parameters(False) fn_params, vararg = self._parse_parameters()
assert not isinstance(dtype, FunctionType) assert not isinstance(dtype, FunctionType)
dtype = dtype_fn = FunctionType(dtype, fn_params, vararg) dtype = dtype_fn = FunctionType(dtype, fn_params, vararg)
if self.lex.token_if("ARROW"): if self.lex.token_if("ARROW"):
return_type = self._parse_trailing_return_type(dtype_fn.return_type) self._parse_trailing_return_type(dtype_fn)
dtype_fn.has_trailing_return = True
dtype_fn.return_type = return_type
else: else:
msvc_convention = None msvc_convention = None
@ -2283,7 +2039,7 @@ class CxxParser:
assert not isinstance(dtype, FunctionType) assert not isinstance(dtype, FunctionType)
dtype = self._parse_array_type(aptok, dtype) dtype = self._parse_array_type(aptok, dtype)
elif aptok.type == "(": elif aptok.type == "(":
fn_params, vararg, _ = self._parse_parameters(False) fn_params, vararg = self._parse_parameters()
# the type we already have is the return type of the function pointer # the type we already have is the return type of the function pointer
assert not isinstance(dtype, FunctionType) assert not isinstance(dtype, FunctionType)
@ -2320,7 +2076,7 @@ class CxxParser:
return dtype return dtype
# Applies to variables and return values # Applies to variables and return values
_type_kwd_both = {"const", "consteval", "constexpr", "constinit", "extern", "inline", "static"} _type_kwd_both = {"const", "constexpr", "extern", "inline", "static"}
# Only found on methods # Only found on methods
_type_kwd_meth = {"explicit", "virtual"} _type_kwd_meth = {"explicit", "virtual"}
@ -2441,7 +2197,6 @@ class CxxParser:
destructor = False destructor = False
op = None op = None
msvc_convention = None msvc_convention = None
is_guide = False
# If we have a leading (, that's either an obnoxious grouping # If we have a leading (, that's either an obnoxious grouping
# paren or it's a constructor # paren or it's a constructor
@ -2492,32 +2247,15 @@ class CxxParser:
# grouping paren like "void (name(int x));" # grouping paren like "void (name(int x));"
toks = self._consume_balanced_tokens(tok) toks = self._consume_balanced_tokens(tok)
# check to see if the next token is an arrow, and thus a trailing return # .. not sure what it's grouping, so put it back?
if self.lex.token_peek_if("ARROW"): self.lex.return_tokens(toks[1:-1])
self.lex.return_tokens(toks)
# the leading name of the class/ctor has been parsed as a type before the parens
pqname = parsed_type.typename
is_guide = True
else:
# .. not sure what it's grouping, so put it back?
self.lex.return_tokens(toks[1:-1])
if dtype: if dtype:
msvc_convention = self.lex.token_if_val(*self._msvc_conventions) msvc_convention = self.lex.token_if_val(*self._msvc_conventions)
tok = self.lex.token_if_in_set(self._pqname_start_tokens) tok = self.lex.token_if_in_set(self._pqname_start_tokens)
if tok: if tok:
pqname, op = self._parse_pqname(tok, fn_ok=True, ptr_to_member_ok=True) pqname, op = self._parse_pqname(tok, fn_ok=True)
while op == 'PTR_TO_MEMBER':
dtype = PointerToMember(base_type=Type(typename=pqname), ptr_to=dtype, const=dtype.const, volatile=dtype.volatile)
# dtype = self._parse_cv_ptr(dtype)
tok = self.lex.token_if_in_set(self._pqname_start_tokens)
if tok:
pqname, op = self._parse_pqname(tok, fn_ok=True, ptr_to_member_ok=True)
else:
pqname = None
op = None
# TODO: "type fn(x);" is ambiguous here. Because this is a header # TODO: "type fn(x);" is ambiguous here. Because this is a header
# parser, we assume it's a function, not a variable declaration # parser, we assume it's a function, not a variable declaration
@ -2528,25 +2266,20 @@ class CxxParser:
if not pqname: if not pqname:
raise self._parse_error(None) raise self._parse_error(None)
if isinstance(dtype, PointerToMember): return self._parse_function(
params, vararg, at_params = self._parse_parameters(False) mods,
dtype.ptr_to = FunctionType(return_type=dtype.ptr_to, parameters=params, vararg=vararg) dtype,
else: pqname,
return self._parse_function( op,
mods, template,
dtype, doxygen,
pqname, location,
op, constructor,
template, destructor,
doxygen, is_friend,
location, is_typedef,
constructor, msvc_convention,
destructor, )
is_friend,
is_typedef,
msvc_convention,
is_guide,
)
elif msvc_convention: elif msvc_convention:
raise self._parse_error(msvc_convention) raise self._parse_error(msvc_convention)

View File

@ -1,248 +1,37 @@
""" """
Contains optional preprocessor support functions Contains optional preprocessor support via pcpp
""" """
import io import io
import re import re
import os import os
import subprocess
import sys
import tempfile
import typing import typing
from .options import PreprocessorFunction from .options import PreprocessorFunction
from pcpp import Preprocessor, OutputDirective, Action
class PreprocessorError(Exception): class PreprocessorError(Exception):
pass pass
# class _CustomPreprocessor(Preprocessor):
# GCC preprocessor support def __init__(self, encoding: typing.Optional[str]):
# Preprocessor.__init__(self)
self.errors: typing.List[str] = []
self.assume_encoding = encoding
def on_error(self, file, line, msg):
self.errors.append(f"{file}:{line} error: {msg}")
def on_include_not_found(self, *ignored):
raise OutputDirective(Action.IgnoreAndPassThrough)
def on_comment(self, *ignored):
return True
def _gcc_filter(fname: str, fp: typing.TextIO) -> str: def _filter_self(fname: str, fp: typing.TextIO) -> str:
new_output = io.StringIO()
keep = True
fname = fname.replace("\\", "\\\\")
for line in fp:
if line.startswith("# "):
last_quote = line.rfind('"')
if last_quote != -1:
keep = line[:last_quote].endswith(fname)
if keep:
new_output.write(line)
new_output.seek(0)
return new_output.read()
def make_gcc_preprocessor(
*,
defines: typing.List[str] = [],
include_paths: typing.List[str] = [],
retain_all_content: bool = False,
encoding: typing.Optional[str] = None,
gcc_args: typing.List[str] = ["g++"],
print_cmd: bool = True,
) -> PreprocessorFunction:
"""
Creates a preprocessor function that uses g++ to preprocess the input text.
gcc is a high performance and accurate precompiler, but if an #include
directive can't be resolved or other oddity exists in your input it will
throw an error.
:param defines: list of #define macros specified as "key value"
:param include_paths: list of directories to search for included files
:param retain_all_content: If False, only the parsed file content will be retained
:param encoding: If specified any include files are opened with this encoding
:param gcc_args: This is the path to G++ and any extra args you might want
:param print_cmd: Prints the gcc command as its executed
.. code-block:: python
pp = make_gcc_preprocessor()
options = ParserOptions(preprocessor=pp)
parse_file(content, options=options)
"""
if not encoding:
encoding = "utf-8"
def _preprocess_file(filename: str, content: typing.Optional[str]) -> str:
cmd = gcc_args + ["-w", "-E", "-C"]
for p in include_paths:
cmd.append(f"-I{p}")
for d in defines:
cmd.append(f"-D{d.replace(' ', '=')}")
kwargs = {"encoding": encoding}
if filename == "<str>":
cmd.append("-")
filename = "<stdin>"
if content is None:
raise PreprocessorError("no content specified for stdin")
kwargs["input"] = content
else:
cmd.append(filename)
if print_cmd:
print("+", " ".join(cmd), file=sys.stderr)
result: str = subprocess.check_output(cmd, **kwargs) # type: ignore
if not retain_all_content:
result = _gcc_filter(filename, io.StringIO(result))
return result
return _preprocess_file
#
# Microsoft Visual Studio preprocessor support
#
def _msvc_filter(fp: typing.TextIO) -> str:
# MSVC outputs the original file as the very first #line directive
# so we just use that
new_output = io.StringIO()
keep = True
first = fp.readline()
assert first.startswith("#line")
fname = first[first.find('"') :]
for line in fp:
if line.startswith("#line"):
keep = line.endswith(fname)
if keep:
new_output.write(line)
new_output.seek(0)
return new_output.read()
def make_msvc_preprocessor(
*,
defines: typing.List[str] = [],
include_paths: typing.List[str] = [],
retain_all_content: bool = False,
encoding: typing.Optional[str] = None,
msvc_args: typing.List[str] = ["cl.exe"],
print_cmd: bool = True,
) -> PreprocessorFunction:
"""
Creates a preprocessor function that uses cl.exe from Microsoft Visual Studio
to preprocess the input text. cl.exe is not typically on the path, so you
may need to open the correct developer tools shell or pass in the correct path
to cl.exe in the `msvc_args` parameter.
cl.exe will throw an error if a file referenced by an #include directive is not found.
:param defines: list of #define macros specified as "key value"
:param include_paths: list of directories to search for included files
:param retain_all_content: If False, only the parsed file content will be retained
:param encoding: If specified any include files are opened with this encoding
:param msvc_args: This is the path to cl.exe and any extra args you might want
:param print_cmd: Prints the command as its executed
.. code-block:: python
pp = make_msvc_preprocessor()
options = ParserOptions(preprocessor=pp)
parse_file(content, options=options)
"""
if not encoding:
encoding = "utf-8"
def _preprocess_file(filename: str, content: typing.Optional[str]) -> str:
cmd = msvc_args + ["/nologo", "/E", "/C"]
for p in include_paths:
cmd.append(f"/I{p}")
for d in defines:
cmd.append(f"/D{d.replace(' ', '=')}")
tfpname = None
try:
kwargs = {"encoding": encoding}
if filename == "<str>":
if content is None:
raise PreprocessorError("no content specified for stdin")
tfp = tempfile.NamedTemporaryFile(
mode="w", encoding=encoding, suffix=".h", delete=False
)
tfpname = tfp.name
tfp.write(content)
tfp.close()
cmd.append(tfpname)
else:
cmd.append(filename)
if print_cmd:
print("+", " ".join(cmd), file=sys.stderr)
result: str = subprocess.check_output(cmd, **kwargs) # type: ignore
if not retain_all_content:
result = _msvc_filter(io.StringIO(result))
finally:
if tfpname:
os.unlink(tfpname)
return result
return _preprocess_file
#
# PCPP preprocessor support (not installed by default)
#
try:
import pcpp
from pcpp import Preprocessor, OutputDirective, Action
class _CustomPreprocessor(Preprocessor):
def __init__(
self,
encoding: typing.Optional[str],
passthru_includes: typing.Optional["re.Pattern"],
):
Preprocessor.__init__(self)
self.errors: typing.List[str] = []
self.assume_encoding = encoding
self.passthru_includes = passthru_includes
def on_error(self, file, line, msg):
self.errors.append(f"{file}:{line} error: {msg}")
def on_include_not_found(self, *ignored):
raise OutputDirective(Action.IgnoreAndPassThrough)
def on_comment(self, *ignored):
return True
except ImportError:
pcpp = None
def _pcpp_filter(fname: str, fp: typing.TextIO) -> str:
# the output of pcpp includes the contents of all the included files, which # the output of pcpp includes the contents of all the included files, which
# isn't what a typical user of cxxheaderparser would want, so we strip out # isn't what a typical user of cxxheaderparser would want, so we strip out
# the line directives and any content that isn't in our original file # the line directives and any content that isn't in our original file
@ -269,22 +58,12 @@ def make_pcpp_preprocessor(
include_paths: typing.List[str] = [], include_paths: typing.List[str] = [],
retain_all_content: bool = False, retain_all_content: bool = False,
encoding: typing.Optional[str] = None, encoding: typing.Optional[str] = None,
passthru_includes: typing.Optional["re.Pattern"] = None,
) -> PreprocessorFunction: ) -> PreprocessorFunction:
""" """
Creates a preprocessor function that uses pcpp (which must be installed Creates a preprocessor function that uses pcpp (which must be installed
separately) to preprocess the input text. separately) to preprocess the input text.
If missing #include files are encountered, this preprocessor will ignore the
error. This preprocessor is pure python so it's very portable, and is a good
choice if performance isn't critical.
:param defines: list of #define macros specified as "key value"
:param include_paths: list of directories to search for included files
:param retain_all_content: If False, only the parsed file content will be retained
:param encoding: If specified any include files are opened with this encoding :param encoding: If specified any include files are opened with this encoding
:param passthru_includes: If specified any #include directives that match the
compiled regex pattern will be part of the output.
.. code-block:: python .. code-block:: python
@ -295,11 +74,8 @@ def make_pcpp_preprocessor(
""" """
if pcpp is None: def _preprocess_file(filename: str, content: str) -> str:
raise PreprocessorError("pcpp is not installed") pp = _CustomPreprocessor(encoding)
def _preprocess_file(filename: str, content: typing.Optional[str]) -> str:
pp = _CustomPreprocessor(encoding, passthru_includes)
if include_paths: if include_paths:
for p in include_paths: for p in include_paths:
pp.add_path(p) pp.add_path(p)
@ -310,10 +86,6 @@ def make_pcpp_preprocessor(
if not retain_all_content: if not retain_all_content:
pp.line_directive = "#line" pp.line_directive = "#line"
if content is None:
with open(filename, "r", encoding=encoding) as fp:
content = fp.read()
pp.parse(content, filename) pp.parse(content, filename)
if pp.errors: if pp.errors:
@ -339,6 +111,6 @@ def make_pcpp_preprocessor(
filename = filename.replace(os.sep, "/") filename = filename.replace(os.sep, "/")
break break
return _pcpp_filter(filename, fp) return _filter_self(filename, fp)
return _preprocess_file return _preprocess_file

View File

@ -34,8 +34,6 @@ from dataclasses import dataclass, field
from .types import ( from .types import (
ClassDecl, ClassDecl,
Concept,
DeductionGuide,
EnumDecl, EnumDecl,
Field, Field,
ForwardDecl, ForwardDecl,
@ -115,18 +113,12 @@ class NamespaceScope:
using_alias: typing.List[UsingAlias] = field(default_factory=list) using_alias: typing.List[UsingAlias] = field(default_factory=list)
ns_alias: typing.List[NamespaceAlias] = field(default_factory=list) ns_alias: typing.List[NamespaceAlias] = field(default_factory=list)
#: Concepts
concepts: typing.List[Concept] = field(default_factory=list)
#: Explicit template instantiations #: Explicit template instantiations
template_insts: typing.List[TemplateInst] = field(default_factory=list) template_insts: typing.List[TemplateInst] = field(default_factory=list)
#: Child namespaces #: Child namespaces
namespaces: typing.Dict[str, "NamespaceScope"] = field(default_factory=dict) namespaces: typing.Dict[str, "NamespaceScope"] = field(default_factory=dict)
#: Deduction guides
deduction_guides: typing.List[DeductionGuide] = field(default_factory=list)
Block = typing.Union[ClassScope, NamespaceScope] Block = typing.Union[ClassScope, NamespaceScope]
@ -251,9 +243,6 @@ class SimpleCxxVisitor:
def on_namespace_end(self, state: SNamespaceBlockState) -> None: def on_namespace_end(self, state: SNamespaceBlockState) -> None:
pass pass
def on_concept(self, state: SNonClassBlockState, concept: Concept) -> None:
state.user_data.concepts.append(concept)
def on_namespace_alias( def on_namespace_alias(
self, state: SNonClassBlockState, alias: NamespaceAlias self, state: SNonClassBlockState, alias: NamespaceAlias
) -> None: ) -> None:
@ -321,11 +310,6 @@ class SimpleCxxVisitor:
def on_class_end(self, state: SClassBlockState) -> None: def on_class_end(self, state: SClassBlockState) -> None:
pass pass
def on_deduction_guide(
self, state: SNonClassBlockState, guide: DeductionGuide
) -> None:
state.user_data.deduction_guides.append(guide)
def parse_string( def parse_string(
content: str, content: str,
@ -364,10 +348,7 @@ def parse_file(
if filename == "-": if filename == "-":
content = sys.stdin.read() content = sys.stdin.read()
else: else:
content = None with open(filename, encoding=encoding) as fp:
content = fp.read()
visitor = SimpleCxxVisitor() return parse_string(content, filename=filename, options=options)
parser = CxxParser(filename, content, visitor, options)
parser.parse()
return visitor.data

View File

@ -306,7 +306,7 @@ class Array:
""" """
#: The type that this is an array of #: The type that this is an array of
array_of: typing.Union["Array", "Pointer", "PointerToMember", Type] array_of: typing.Union["Array", "Pointer", Type]
#: Size of the array #: Size of the array
#: #:
@ -332,7 +332,7 @@ class Pointer:
""" """
#: Thing that this points to #: Thing that this points to
ptr_to: typing.Union[Array, FunctionType, "Pointer", "PointerToMember", Type] ptr_to: typing.Union[Array, FunctionType, "Pointer", Type]
const: bool = False const: bool = False
volatile: bool = False volatile: bool = False
@ -356,39 +356,6 @@ class Pointer:
else: else:
return f"{ptr_to.format()}*{c}{v} {name}" return f"{ptr_to.format()}*{c}{v} {name}"
@dataclass
class PointerToMember:
"""
Pointer to a class member. (``Class::* int``)
"""
#: Thing that this points to
base_type: Type
ptr_to: typing.Union[Array, FunctionType, "Pointer", "PointerToMember", Type]
const: bool = False
volatile: bool = False
def format(self) -> str:
c = " const" if self.const else ""
v = " volatile" if self.volatile else ""
ptr_to = self.ptr_to
if isinstance(ptr_to, (Array, FunctionType)):
return ptr_to.format_decl(f"({self.base_type.format()}::*{c}{v})")
else:
return f"{ptr_to.format()} {self.base_type.format()}::*{c}{v}"
def format_decl(self, name: str):
"""Format as a named declaration"""
c = " const" if self.const else ""
v = " volatile" if self.volatile else ""
ptr_to = self.ptr_to
if isinstance(ptr_to, (Array, FunctionType)):
return ptr_to.format_decl(f"({self.base_type.format()}::*{c}{v} {name})")
else:
return f"{ptr_to.format()} {self.base_type.format()}::*{c}{v} {name}"
@dataclass @dataclass
class Reference: class Reference:
@ -396,7 +363,7 @@ class Reference:
A lvalue (``&``) reference A lvalue (``&``) reference
""" """
ref_to: typing.Union[Array, FunctionType, Pointer, PointerToMember, Type] ref_to: typing.Union[Array, FunctionType, Pointer, Type]
def format(self) -> str: def format(self) -> str:
ref_to = self.ref_to ref_to = self.ref_to
@ -421,7 +388,7 @@ class MoveReference:
An rvalue (``&&``) reference An rvalue (``&&``) reference
""" """
moveref_to: typing.Union[Array, FunctionType, Pointer, PointerToMember, Type] moveref_to: typing.Union[Array, FunctionType, Pointer, Type]
def format(self) -> str: def format(self) -> str:
return f"{self.moveref_to.format()}&&" return f"{self.moveref_to.format()}&&"
@ -435,7 +402,7 @@ class MoveReference:
#: #:
#: .. note:: There can only be one of FunctionType or Type in a DecoratedType #: .. note:: There can only be one of FunctionType or Type in a DecoratedType
#: chain #: chain
DecoratedType = typing.Union[Array, Pointer, PointerToMember, MoveReference, Reference, Type] DecoratedType = typing.Union[Array, Pointer, MoveReference, Reference, Type]
@dataclass @dataclass
@ -487,19 +454,12 @@ class TemplateNonTypeParam:
template <auto T> template <auto T>
~~~~~~ ~~~~~~
// abbreviated template parameters are converted to this and param_idx is set
void fn(C auto p)
~~~~~~
""" """
type: DecoratedType type: DecoratedType
name: typing.Optional[str] = None name: typing.Optional[str] = None
default: typing.Optional[Value] = None default: typing.Optional[Value] = None
#: If this was promoted, the parameter index that this corresponds with
param_idx: typing.Optional[int] = None
#: Contains a ``...`` #: Contains a ``...``
param_pack: bool = False param_pack: bool = False
@ -553,12 +513,6 @@ class TemplateDecl:
params: typing.List[TemplateParam] = field(default_factory=list) params: typing.List[TemplateParam] = field(default_factory=list)
# Currently don't interpret requires, if that changes in the future
# then this API will change.
#: template <typename T> requires ...
raw_requires_pre: typing.Optional[Value] = None
#: If no template, this is None. This is a TemplateDecl if this there is a single #: If no template, this is None. This is a TemplateDecl if this there is a single
#: declaration: #: declaration:
@ -597,31 +551,6 @@ class TemplateInst:
doxygen: typing.Optional[str] = None doxygen: typing.Optional[str] = None
@dataclass
class Concept:
"""
Preliminary support for consuming headers that contain concepts, but
not trying to actually make sense of them at this time. If this is
something you care about, pull requests are welcomed!
.. code-block:: c++
template <class T>
concept Meowable = is_meowable<T>;
template<typename T>
concept Addable = requires (T x) { x + x; };
"""
template: TemplateDecl
name: str
#: In the future this will be removed if we fully parse the expression
raw_constraint: Value
doxygen: typing.Optional[str] = None
@dataclass @dataclass
class ForwardDecl: class ForwardDecl:
""" """
@ -721,7 +650,6 @@ class Function:
doxygen: typing.Optional[str] = None doxygen: typing.Optional[str] = None
constexpr: bool = False constexpr: bool = False
consteval: bool = False
extern: typing.Union[bool, str] = False extern: typing.Union[bool, str] = False
static: bool = False static: bool = False
inline: bool = False inline: bool = False
@ -761,13 +689,6 @@ class Function:
#: is the string "conversion" and the full Type is found in return_type #: is the string "conversion" and the full Type is found in return_type
operator: typing.Optional[str] = None operator: typing.Optional[str] = None
#: A requires constraint following the function declaration. If you need the
#: prior, look at TemplateDecl.raw_requires_pre. At the moment this is just
#: a raw value, if we interpret it in the future this will change.
#:
#: template <typename T> int main() requires ...
raw_requires: typing.Optional[Value] = None
@dataclass @dataclass
class Method(Function): class Method(Function):
@ -858,7 +779,6 @@ class Variable:
value: typing.Optional[Value] = None value: typing.Optional[Value] = None
constexpr: bool = False constexpr: bool = False
constinit: bool = False
extern: typing.Union[bool, str] = False extern: typing.Union[bool, str] = False
static: bool = False static: bool = False
inline: bool = False inline: bool = False
@ -885,10 +805,8 @@ class Field:
bits: typing.Optional[int] = None bits: typing.Optional[int] = None
constexpr: bool = False constexpr: bool = False
constinit: bool = False
mutable: bool = False mutable: bool = False
static: bool = False static: bool = False
inline: bool = False
doxygen: typing.Optional[str] = None doxygen: typing.Optional[str] = None
@ -906,9 +824,6 @@ class UsingDecl:
#: If within a class, the access level for this decl #: If within a class, the access level for this decl
access: typing.Optional[str] = None access: typing.Optional[str] = None
#: Documentation if present
doxygen: typing.Optional[str] = None
@dataclass @dataclass
class UsingAlias: class UsingAlias:
@ -929,24 +844,3 @@ class UsingAlias:
#: If within a class, the access level for this decl #: If within a class, the access level for this decl
access: typing.Optional[str] = None access: typing.Optional[str] = None
#: Documentation if present
doxygen: typing.Optional[str] = None
@dataclass
class DeductionGuide:
"""
.. code-block:: c++
template <class T>
MyClass(T) -> MyClass(int);
"""
#: Only constructors and destructors don't have a return type
result_type: typing.Optional[DecoratedType]
name: PQName
parameters: typing.List[Parameter]
doxygen: typing.Optional[str] = None

View File

@ -8,8 +8,6 @@ else:
from .types import ( from .types import (
Concept,
DeductionGuide,
EnumDecl, EnumDecl,
Field, Field,
ForwardDecl, ForwardDecl,
@ -91,14 +89,6 @@ class CxxVisitor(Protocol):
Called when a ``namespace`` alias is encountered Called when a ``namespace`` alias is encountered
""" """
def on_concept(self, state: NonClassBlockState, concept: Concept) -> None:
"""
.. code-block:: c++
template <class T>
concept Meowable = is_meowable<T>;
"""
def on_forward_decl(self, state: State, fdecl: ForwardDecl) -> None: def on_forward_decl(self, state: State, fdecl: ForwardDecl) -> None:
""" """
Called when a forward declaration is encountered Called when a forward declaration is encountered
@ -237,13 +227,6 @@ class CxxVisitor(Protocol):
``on_variable`` for each instance declared. ``on_variable`` for each instance declared.
""" """
def on_deduction_guide(
self, state: NonClassBlockState, guide: DeductionGuide
) -> None:
"""
Called when a deduction guide is encountered
"""
class NullVisitor: class NullVisitor:
""" """
@ -271,9 +254,6 @@ class NullVisitor:
def on_namespace_end(self, state: NamespaceBlockState) -> None: def on_namespace_end(self, state: NamespaceBlockState) -> None:
return None return None
def on_concept(self, state: NonClassBlockState, concept: Concept) -> None:
return None
def on_namespace_alias( def on_namespace_alias(
self, state: NonClassBlockState, alias: NamespaceAlias self, state: NonClassBlockState, alias: NamespaceAlias
) -> None: ) -> None:
@ -326,10 +306,5 @@ class NullVisitor:
def on_class_end(self, state: ClassBlockState) -> None: def on_class_end(self, state: ClassBlockState) -> None:
return None return None
def on_deduction_guide(
self, state: NonClassBlockState, guide: DeductionGuide
) -> None:
return None
null_visitor = NullVisitor() null_visitor = NullVisitor()

View File

@ -12,19 +12,27 @@ import pkg_resources
# -- Project information ----------------------------------------------------- # -- Project information -----------------------------------------------------
project = "cxxheaderparser" project = "cxxheaderparser"
copyright = "2020-2023, Dustin Spicuzza" copyright = "2020-2021, Dustin Spicuzza"
author = "Dustin Spicuzza" author = "Dustin Spicuzza"
# The full version, including alpha/beta/rc tags # The full version, including alpha/beta/rc tags
release = pkg_resources.get_distribution("cxxheaderparser").version release = pkg_resources.get_distribution("cxxheaderparser").version
# -- RTD configuration ------------------------------------------------
# on_rtd is whether we are on readthedocs.org, this line of code grabbed from docs.readthedocs.org
on_rtd = os.environ.get("READTHEDOCS", None) == "True"
# -- General configuration --------------------------------------------------- # -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be # Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones. # ones.
extensions = ["sphinx.ext.autodoc", "sphinx_autodoc_typehints", "sphinx_rtd_theme"] extensions = [
"sphinx.ext.autodoc",
"sphinx_autodoc_typehints",
]
# Add any paths that contain templates here, relative to this directory. # Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"] templates_path = ["_templates"]
@ -39,7 +47,13 @@ exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
# The theme to use for HTML and HTML Help pages. See the documentation for # The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes. # a list of builtin themes.
#
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = "sphinx_rtd_theme" html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
else:
html_theme = "default"
always_document_param_types = True always_document_param_types = True

View File

@ -1,353 +0,0 @@
# Note: testcases generated via `python -m cxxheaderparser.gentest`
#
# Tests various aspects of abbreviated function templates
#
from cxxheaderparser.simple import NamespaceScope, ParsedData, parse_string
from cxxheaderparser.types import (
AutoSpecifier,
Function,
FundamentalSpecifier,
NameSpecifier,
PQName,
Parameter,
Pointer,
Reference,
TemplateDecl,
TemplateNonTypeParam,
Type,
)
def test_abv_template_f1() -> None:
content = """
void f1(auto); // same as template<class T> void f1(T)
void f1p(auto p);
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
functions=[
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="void")])
),
name=PQName(segments=[NameSpecifier(name="f1")]),
parameters=[
Parameter(
type=Type(typename=PQName(segments=[AutoSpecifier()]))
)
],
template=TemplateDecl(
params=[
TemplateNonTypeParam(
type=Type(typename=PQName(segments=[AutoSpecifier()])),
param_idx=0,
)
]
),
),
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="void")])
),
name=PQName(segments=[NameSpecifier(name="f1p")]),
parameters=[
Parameter(
type=Type(typename=PQName(segments=[AutoSpecifier()])),
name="p",
)
],
template=TemplateDecl(
params=[
TemplateNonTypeParam(
type=Type(typename=PQName(segments=[AutoSpecifier()])),
param_idx=0,
)
]
),
),
]
)
)
def test_abv_template_f2() -> None:
content = """
void f2(C1 auto); // same as template<C1 T> void f2(T), if C1 is a concept
void f2p(C1 auto p);
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
functions=[
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="void")])
),
name=PQName(segments=[NameSpecifier(name="f2")]),
parameters=[
Parameter(
type=Type(typename=PQName(segments=[AutoSpecifier()]))
)
],
template=TemplateDecl(
params=[
TemplateNonTypeParam(
type=Type(
typename=PQName(segments=[NameSpecifier(name="C1")])
),
param_idx=0,
)
]
),
),
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="void")])
),
name=PQName(segments=[NameSpecifier(name="f2p")]),
parameters=[
Parameter(
type=Type(typename=PQName(segments=[AutoSpecifier()])),
name="p",
)
],
template=TemplateDecl(
params=[
TemplateNonTypeParam(
type=Type(
typename=PQName(segments=[NameSpecifier(name="C1")])
),
param_idx=0,
)
]
),
),
]
)
)
def test_abv_template_f3() -> None:
content = """
void f3(C2 auto...); // same as template<C2... Ts> void f3(Ts...), if C2 is a
// concept
void f3p(C2 auto p...);
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
functions=[
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="void")])
),
name=PQName(segments=[NameSpecifier(name="f3")]),
parameters=[
Parameter(
type=Type(typename=PQName(segments=[AutoSpecifier()])),
param_pack=True,
)
],
template=TemplateDecl(
params=[
TemplateNonTypeParam(
type=Type(
typename=PQName(segments=[NameSpecifier(name="C2")])
),
param_idx=0,
param_pack=True,
)
]
),
),
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="void")])
),
name=PQName(segments=[NameSpecifier(name="f3p")]),
parameters=[
Parameter(
type=Type(typename=PQName(segments=[AutoSpecifier()])),
name="p",
param_pack=True,
)
],
template=TemplateDecl(
params=[
TemplateNonTypeParam(
type=Type(
typename=PQName(segments=[NameSpecifier(name="C2")])
),
param_idx=0,
param_pack=True,
)
]
),
),
]
)
)
def test_abv_template_f4() -> None:
content = """
void f4(C2 auto, ...); // same as template<C2 T> void f4(T...), if C2 is a concept
void f4p(C2 auto p,...);
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
functions=[
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="void")])
),
name=PQName(segments=[NameSpecifier(name="f4")]),
parameters=[
Parameter(
type=Type(typename=PQName(segments=[AutoSpecifier()]))
)
],
vararg=True,
template=TemplateDecl(
params=[
TemplateNonTypeParam(
type=Type(
typename=PQName(segments=[NameSpecifier(name="C2")])
),
param_idx=0,
)
]
),
),
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="void")])
),
name=PQName(segments=[NameSpecifier(name="f4p")]),
parameters=[
Parameter(
type=Type(typename=PQName(segments=[AutoSpecifier()])),
name="p",
)
],
vararg=True,
template=TemplateDecl(
params=[
TemplateNonTypeParam(
type=Type(
typename=PQName(segments=[NameSpecifier(name="C2")])
),
param_idx=0,
)
]
),
),
]
)
)
def test_abv_template_f5() -> None:
content = """
void f5(const C3 auto *, C4 auto &); // same as template<C3 T, C4 U> void f5(const T*, U&);
void f5p(const C3 auto * p1, C4 auto &p2);
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
functions=[
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="void")])
),
name=PQName(segments=[NameSpecifier(name="f5")]),
parameters=[
Parameter(
type=Pointer(
ptr_to=Type(
typename=PQName(
segments=[AutoSpecifier()],
),
const=True,
)
)
),
Parameter(
type=Reference(
ref_to=Type(typename=PQName(segments=[AutoSpecifier()]))
)
),
],
template=TemplateDecl(
params=[
TemplateNonTypeParam(
type=Type(
typename=PQName(
segments=[NameSpecifier(name="C3")]
),
),
param_idx=0,
),
TemplateNonTypeParam(
type=Type(
typename=PQName(segments=[NameSpecifier(name="C4")])
),
param_idx=1,
),
]
),
),
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="void")])
),
name=PQName(segments=[NameSpecifier(name="f5p")]),
parameters=[
Parameter(
type=Pointer(
ptr_to=Type(
typename=PQName(
segments=[AutoSpecifier()],
),
const=True,
)
),
name="p1",
),
Parameter(
type=Reference(
ref_to=Type(typename=PQName(segments=[AutoSpecifier()]))
),
name="p2",
),
],
template=TemplateDecl(
params=[
TemplateNonTypeParam(
type=Type(
typename=PQName(
segments=[NameSpecifier(name="C3")]
),
),
param_idx=0,
),
TemplateNonTypeParam(
type=Type(
typename=PQName(segments=[NameSpecifier(name="C4")])
),
param_idx=1,
),
]
),
),
]
)
)

View File

@ -3336,40 +3336,3 @@ def test_constructor_outside_class() -> None:
] ]
) )
) )
def test_class_inline_static() -> None:
content = """
struct X {
inline static bool Foo = 1;
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="X")], classkey="struct"
)
),
fields=[
Field(
access="public",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="bool")]
)
),
name="Foo",
value=Value(tokens=[Token(value="1")]),
static=True,
inline=True,
)
],
)
]
)
)

View File

@ -1,878 +0,0 @@
from cxxheaderparser.simple import ClassScope, NamespaceScope, ParsedData, parse_string
from cxxheaderparser.tokfmt import Token
from cxxheaderparser.types import (
AutoSpecifier,
ClassDecl,
Concept,
Function,
FundamentalSpecifier,
Method,
MoveReference,
NameSpecifier,
PQName,
Parameter,
TemplateArgument,
TemplateDecl,
TemplateNonTypeParam,
TemplateSpecialization,
TemplateTypeParam,
Type,
Value,
Variable,
)
def test_concept_basic_constraint() -> None:
content = """
template <class T, class U>
concept Derived = std::is_base_of<U, T>::value;
template <Derived<Base> T> void f(T); // T is constrained by Derived<T, Base>
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
functions=[
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="void")])
),
name=PQName(segments=[NameSpecifier(name="f")]),
parameters=[
Parameter(
type=Type(
typename=PQName(segments=[NameSpecifier(name="T")])
)
)
],
template=TemplateDecl(
params=[
TemplateNonTypeParam(
type=Type(
typename=PQName(
segments=[
NameSpecifier(
name="Derived",
specialization=TemplateSpecialization(
args=[
TemplateArgument(
arg=Type(
typename=PQName(
segments=[
NameSpecifier(
name="Base"
)
]
)
)
)
]
),
)
]
)
),
name="T",
)
]
),
)
],
concepts=[
Concept(
template=TemplateDecl(
params=[
TemplateTypeParam(typekey="class", name="T"),
TemplateTypeParam(typekey="class", name="U"),
]
),
name="Derived",
raw_constraint=Value(
tokens=[
Token(value="std"),
Token(value="::"),
Token(value="is_base_of"),
Token(value="<"),
Token(value="U"),
Token(value=","),
Token(value="T"),
Token(value=">"),
Token(value="::"),
Token(value="value"),
]
),
)
],
)
)
def test_concept_basic_constraint2() -> None:
content = """
template <class T> constexpr bool is_meowable = true;
template <class T> constexpr bool is_cat = true;
template <class T>
concept Meowable = is_meowable<T>;
template <class T>
concept BadMeowableCat = is_meowable<T> && is_cat<T>;
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
variables=[
Variable(
name=PQName(segments=[NameSpecifier(name="is_meowable")]),
type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="bool")])
),
value=Value(tokens=[Token(value="true")]),
constexpr=True,
template=TemplateDecl(
params=[TemplateTypeParam(typekey="class", name="T")]
),
),
Variable(
name=PQName(segments=[NameSpecifier(name="is_cat")]),
type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="bool")])
),
value=Value(tokens=[Token(value="true")]),
constexpr=True,
template=TemplateDecl(
params=[TemplateTypeParam(typekey="class", name="T")]
),
),
],
concepts=[
Concept(
template=TemplateDecl(
params=[TemplateTypeParam(typekey="class", name="T")]
),
name="Meowable",
raw_constraint=Value(
tokens=[
Token(value="is_meowable"),
Token(value="<"),
Token(value="T"),
Token(value=">"),
]
),
),
Concept(
template=TemplateDecl(
params=[TemplateTypeParam(typekey="class", name="T")]
),
name="BadMeowableCat",
raw_constraint=Value(
tokens=[
Token(value="is_meowable"),
Token(value="<"),
Token(value="T"),
Token(value=">"),
Token(value="&&"),
Token(value="is_cat"),
Token(value="<"),
Token(value="T"),
Token(value=">"),
]
),
),
],
)
)
def test_concept_basic_requires() -> None:
content = """
template <typename T>
concept Hashable = requires(T a) {
{ std::hash<T>{}(a) } -> std::convertible_to<std::size_t>;
};
template <Hashable T> void f(T) {}
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
functions=[
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="void")])
),
name=PQName(segments=[NameSpecifier(name="f")]),
parameters=[
Parameter(
type=Type(
typename=PQName(segments=[NameSpecifier(name="T")])
)
)
],
has_body=True,
template=TemplateDecl(
params=[
TemplateNonTypeParam(
type=Type(
typename=PQName(
segments=[NameSpecifier(name="Hashable")]
)
),
name="T",
)
]
),
)
],
concepts=[
Concept(
template=TemplateDecl(
params=[TemplateTypeParam(typekey="typename", name="T")]
),
name="Hashable",
raw_constraint=Value(
tokens=[
Token(value="requires"),
Token(value="("),
Token(value="T"),
Token(value="a"),
Token(value=")"),
Token(value="{"),
Token(value="{"),
Token(value="std"),
Token(value="::"),
Token(value="hash"),
Token(value="<"),
Token(value="T"),
Token(value=">"),
Token(value="{"),
Token(value="}"),
Token(value="("),
Token(value="a"),
Token(value=")"),
Token(value="}"),
Token(value="->"),
Token(value="std"),
Token(value="::"),
Token(value="convertible_to"),
Token(value="<"),
Token(value="std"),
Token(value="::"),
Token(value="size_t"),
Token(value=">"),
Token(value=";"),
Token(value="}"),
]
),
)
],
)
)
def test_concept_nested_requirements() -> None:
content = """
template<class T>
concept Semiregular = DefaultConstructible<T> &&
CopyConstructible<T> && CopyAssignable<T> && Destructible<T> &&
requires(T a, std::size_t n)
{
requires Same<T*, decltype(&a)>; // nested: "Same<...> evaluates to true"
{ a.~T() } noexcept; // compound: "a.~T()" is a valid expression that doesn't throw
requires Same<T*, decltype(new T)>; // nested: "Same<...> evaluates to true"
requires Same<T*, decltype(new T[n])>; // nested
{ delete new T }; // compound
{ delete new T[n] }; // compound
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
concepts=[
Concept(
template=TemplateDecl(
params=[TemplateTypeParam(typekey="class", name="T")]
),
name="Semiregular",
raw_constraint=Value(
tokens=[
Token(value="DefaultConstructible"),
Token(value="<"),
Token(value="T"),
Token(value=">"),
Token(value="&&"),
Token(value="CopyConstructible"),
Token(value="<"),
Token(value="T"),
Token(value=">"),
Token(value="&&"),
Token(value="CopyAssignable"),
Token(value="<"),
Token(value="T"),
Token(value=">"),
Token(value="&&"),
Token(value="Destructible"),
Token(value="<"),
Token(value="T"),
Token(value=">"),
Token(value="&&"),
Token(value="requires"),
Token(value="("),
Token(value="T"),
Token(value="a"),
Token(value=","),
Token(value="std"),
Token(value="::"),
Token(value="size_t"),
Token(value="n"),
Token(value=")"),
Token(value="{"),
Token(value="requires"),
Token(value="Same"),
Token(value="<"),
Token(value="T"),
Token(value="*"),
Token(value=","),
Token(value="decltype"),
Token(value="("),
Token(value="&"),
Token(value="a"),
Token(value=")"),
Token(value=">"),
Token(value=";"),
Token(value="{"),
Token(value="a"),
Token(value="."),
Token(value="~T"),
Token(value="("),
Token(value=")"),
Token(value="}"),
Token(value="noexcept"),
Token(value=";"),
Token(value="requires"),
Token(value="Same"),
Token(value="<"),
Token(value="T"),
Token(value="*"),
Token(value=","),
Token(value="decltype"),
Token(value="("),
Token(value="new"),
Token(value="T"),
Token(value=")"),
Token(value=">"),
Token(value=";"),
Token(value="requires"),
Token(value="Same"),
Token(value="<"),
Token(value="T"),
Token(value="*"),
Token(value=","),
Token(value="decltype"),
Token(value="("),
Token(value="new"),
Token(value="T"),
Token(value="["),
Token(value="n"),
Token(value="]"),
Token(value=")"),
Token(value=">"),
Token(value=";"),
Token(value="{"),
Token(value="delete"),
Token(value="new"),
Token(value="T"),
Token(value="}"),
Token(value=";"),
Token(value="{"),
Token(value="delete"),
Token(value="new"),
Token(value="T"),
Token(value="["),
Token(value="n"),
Token(value="]"),
Token(value="}"),
Token(value=";"),
Token(value="}"),
]
),
)
]
)
)
def test_concept_requires_class() -> None:
content = """
// clang-format off
template <typename T>
concept Number = std::integral<T> || std::floating_point<T>;
template <typename T>
requires Number<T>
struct WrappedNumber {};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="WrappedNumber")],
classkey="struct",
),
template=TemplateDecl(
params=[TemplateTypeParam(typekey="typename", name="T")],
raw_requires_pre=Value(
tokens=[
Token(value="Number"),
Token(value="<"),
Token(value="T"),
Token(value=">"),
]
),
),
)
)
],
concepts=[
Concept(
template=TemplateDecl(
params=[TemplateTypeParam(typekey="typename", name="T")]
),
name="Number",
raw_constraint=Value(
tokens=[
Token(value="std"),
Token(value="::"),
Token(value="integral"),
Token(value="<"),
Token(value="T"),
Token(value=">"),
Token(value="||"),
Token(value="std"),
Token(value="::"),
Token(value="floating_point"),
Token(value="<"),
Token(value="T"),
Token(value=">"),
]
),
)
],
)
)
def test_requires_last_elem() -> None:
content = """
template<typename T>
void f(T&&) requires Eq<T>; // can appear as the last element of a function declarator
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
functions=[
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="void")])
),
name=PQName(segments=[NameSpecifier(name="f")]),
parameters=[
Parameter(
type=MoveReference(
moveref_to=Type(
typename=PQName(segments=[NameSpecifier(name="T")])
)
)
)
],
template=TemplateDecl(
params=[TemplateTypeParam(typekey="typename", name="T")]
),
raw_requires=Value(
tokens=[
Token(value="Eq"),
Token(value="<"),
Token(value="T"),
Token(value=">"),
]
),
)
]
)
)
def test_requires_first_elem1() -> None:
content = """
template<typename T> requires Addable<T> // or right after a template parameter list
T add(T a, T b) { return a + b; }
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
functions=[
Function(
return_type=Type(
typename=PQName(segments=[NameSpecifier(name="T")])
),
name=PQName(segments=[NameSpecifier(name="add")]),
parameters=[
Parameter(
type=Type(
typename=PQName(segments=[NameSpecifier(name="T")])
),
name="a",
),
Parameter(
type=Type(
typename=PQName(segments=[NameSpecifier(name="T")])
),
name="b",
),
],
has_body=True,
template=TemplateDecl(
params=[TemplateTypeParam(typekey="typename", name="T")],
raw_requires_pre=Value(
tokens=[
Token(value="Addable"),
Token(value="<"),
Token(value="T"),
Token(value=">"),
]
),
),
)
]
)
)
def test_requires_first_elem2() -> None:
content = """
template<typename T> requires std::is_arithmetic_v<T>
T add(T a, T b) { return a + b; }
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
functions=[
Function(
return_type=Type(
typename=PQName(segments=[NameSpecifier(name="T")])
),
name=PQName(segments=[NameSpecifier(name="add")]),
parameters=[
Parameter(
type=Type(
typename=PQName(segments=[NameSpecifier(name="T")])
),
name="a",
),
Parameter(
type=Type(
typename=PQName(segments=[NameSpecifier(name="T")])
),
name="b",
),
],
has_body=True,
template=TemplateDecl(
params=[TemplateTypeParam(typekey="typename", name="T")],
raw_requires_pre=Value(
tokens=[
Token(value="std"),
Token(value="is_arithmetic_v"),
Token(value="<"),
Token(value="T"),
Token(value=">"),
]
),
),
)
]
)
)
def test_requires_compound() -> None:
content = """
template<typename T> requires Addable<T> || Subtractable<T>
T add(T a, T b) { return a + b; }
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
functions=[
Function(
return_type=Type(
typename=PQName(segments=[NameSpecifier(name="T")])
),
name=PQName(segments=[NameSpecifier(name="add")]),
parameters=[
Parameter(
type=Type(
typename=PQName(segments=[NameSpecifier(name="T")])
),
name="a",
),
Parameter(
type=Type(
typename=PQName(segments=[NameSpecifier(name="T")])
),
name="b",
),
],
has_body=True,
template=TemplateDecl(
params=[TemplateTypeParam(typekey="typename", name="T")],
raw_requires_pre=Value(
tokens=[
Token(value="Addable"),
Token(value="<"),
Token(value="T"),
Token(value=">"),
Token(value="||"),
Token(value="Subtractable"),
Token(value="<"),
Token(value="T"),
Token(value=">"),
]
),
),
)
]
)
)
def test_requires_ad_hoc() -> None:
content = """
template<typename T>
requires requires (T x) { x + x; } // ad-hoc constraint, note keyword used twice
T add(T a, T b) { return a + b; }
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
functions=[
Function(
return_type=Type(
typename=PQName(segments=[NameSpecifier(name="T")])
),
name=PQName(segments=[NameSpecifier(name="add")]),
parameters=[
Parameter(
type=Type(
typename=PQName(segments=[NameSpecifier(name="T")])
),
name="a",
),
Parameter(
type=Type(
typename=PQName(segments=[NameSpecifier(name="T")])
),
name="b",
),
],
has_body=True,
template=TemplateDecl(
params=[TemplateTypeParam(typekey="typename", name="T")],
raw_requires_pre=Value(
tokens=[
Token(value="requires"),
Token(value="("),
Token(value="T"),
Token(value="x"),
Token(value=")"),
Token(value="{"),
Token(value="x"),
Token(value="+"),
Token(value="x"),
Token(value=";"),
Token(value="}"),
]
),
),
)
]
)
)
def test_requires_both() -> None:
content = """
// clang-format off
template<typename T>
requires Addable<T>
auto f1(T a, T b) requires Subtractable<T>;
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
functions=[
Function(
return_type=Type(typename=PQName(segments=[AutoSpecifier()])),
name=PQName(segments=[NameSpecifier(name="f1")]),
parameters=[
Parameter(
type=Type(
typename=PQName(segments=[NameSpecifier(name="T")])
),
name="a",
),
Parameter(
type=Type(
typename=PQName(segments=[NameSpecifier(name="T")])
),
name="b",
),
],
template=TemplateDecl(
params=[TemplateTypeParam(typekey="typename", name="T")],
raw_requires_pre=Value(
tokens=[
Token(value="Addable"),
Token(value="<"),
Token(value="T"),
Token(value=">"),
]
),
),
raw_requires=Value(
tokens=[
Token(value="Subtractable"),
Token(value="<"),
Token(value="T"),
Token(value=">"),
]
),
)
]
)
)
def test_requires_paren() -> None:
content = """
// clang-format off
template<class T>
void h(T) requires (is_purrable<T>());
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
functions=[
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="void")])
),
name=PQName(segments=[NameSpecifier(name="h")]),
parameters=[
Parameter(
type=Type(
typename=PQName(segments=[NameSpecifier(name="T")])
)
)
],
template=TemplateDecl(
params=[TemplateTypeParam(typekey="class", name="T")]
),
raw_requires=Value(
tokens=[
Token(value="("),
Token(value="is_purrable"),
Token(value="<"),
Token(value="T"),
Token(value=">"),
Token(value="("),
Token(value=")"),
Token(value=")"),
]
),
)
]
)
)
def test_non_template_requires() -> None:
content = """
// clang-format off
template <class T>
struct Payload
{
constexpr Payload(T v)
requires(std::is_pod_v<T>)
: Value(v)
{
}
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Payload")], classkey="struct"
),
template=TemplateDecl(
params=[TemplateTypeParam(typekey="class", name="T")]
),
),
methods=[
Method(
return_type=None,
name=PQName(segments=[NameSpecifier(name="Payload")]),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[NameSpecifier(name="T")]
)
),
name="v",
)
],
constexpr=True,
has_body=True,
raw_requires=Value(
tokens=[
Token(value="("),
Token(value="std"),
Token(value="::"),
Token(value="is_pod_v"),
Token(value="<"),
Token(value="T"),
Token(value=">"),
Token(value=")"),
]
),
access="public",
constructor=True,
)
],
)
]
)
)

View File

@ -1,104 +0,0 @@
def test_constinit_consteval() -> None:
content = """
struct S
{
static constinit int i = 5;
static consteval int func(int i) { return i*i; }
};
template<std::size_t numBits>
consteval auto getUintType()
{
if constexpr (numBits == 8) {
return std::uint8_t{};
}
else if constexpr (numBits == 16) {
return std::uint16_t{};
}
else if constexpr (numBits == 32) {
return std::uint32_t{};
}
else if constexpr (numBits == 64) {
return std::uint64_t{};
}
}
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="S")], classkey="struct"
)
),
fields=[
Field(
access="public",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="i",
value=Value(tokens=[Token(value="5")]),
constinit=True,
static=True,
)
],
methods=[
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name=PQName(segments=[NameSpecifier(name="func")]),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="i",
)
],
consteval=True,
static=True,
has_body=True,
access="public",
)
],
)
],
functions=[
Function(
return_type=Type(typename=PQName(segments=[AutoSpecifier()])),
name=PQName(segments=[NameSpecifier(name="getUintType")]),
parameters=[],
consteval=True,
has_body=True,
template=TemplateDecl(
params=[
TemplateNonTypeParam(
type=Type(
typename=PQName(
segments=[
NameSpecifier(name="std"),
NameSpecifier(name="size_t"),
]
)
),
name="numBits",
)
]
),
)
],
)
)

View File

@ -26,7 +26,6 @@ from cxxheaderparser.types import (
Type, Type,
Typedef, Typedef,
UsingDecl, UsingDecl,
UsingAlias,
Value, Value,
Variable, Variable,
) )
@ -437,53 +436,3 @@ def test_doxygen_attribute() -> None:
] ]
) )
) )
def test_doxygen_using_decl() -> None:
content = """
// clang-format off
/// Comment
using ns::ClassName;
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
using=[
UsingDecl(
typename=PQName(
segments=[
NameSpecifier(name="ns"),
NameSpecifier(name="ClassName"),
]
),
doxygen="/// Comment",
)
]
)
)
def test_doxygen_using_alias() -> None:
content = """
// clang-format off
/// Comment
using alias = sometype;
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
using_alias=[
UsingAlias(
alias="alias",
type=Type(
typename=PQName(segments=[NameSpecifier(name="sometype")])
),
doxygen="/// Comment",
)
]
)
)

View File

@ -1194,67 +1194,3 @@ def test_auto_decltype_return() -> None:
] ]
) )
) )
def test_fn_trailing_return_with_body() -> None:
content = """
auto test() -> void
{
}
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
functions=[
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="void")])
),
name=PQName(segments=[NameSpecifier(name="test")]),
parameters=[],
has_body=True,
has_trailing_return=True,
)
]
)
)
def test_method_trailing_return_with_body() -> None:
content = """
struct X {
auto test() -> void
{
}
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="X")], classkey="struct"
)
),
methods=[
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="test")]),
parameters=[],
has_body=True,
has_trailing_return=True,
access="public",
)
],
)
]
)
)

View File

@ -1,188 +0,0 @@
def test_pointer_to_member() -> None:
content = """
class Class
{
};
int Class::* intPtr;
int (Class::* intReturnFuncPtr)();
void (Class::* intParamFuncPtr)(int);
void (Class::* varargFuncPtr)(...);
template<typename... TArgs>
int takesFunc(void (*func)(TArgs...));
template<typename TObject, typename... TArgs>
int takesMemberFunc(TObject& object, void (TObject::* func)(TArgs...));
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Class")], classkey="class"
)
)
)
],
functions=[
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="int")])
),
name=PQName(segments=[NameSpecifier(name="takesFunc")]),
parameters=[
Parameter(
type=Pointer(
ptr_to=FunctionType(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[
NameSpecifier(name="TArgs")
]
)
),
param_pack=True,
)
],
)
),
name="func",
)
],
template=TemplateDecl(
params=[
TemplateTypeParam(
typekey="typename", name="TArgs", param_pack=True
)
]
),
),
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="int")])
),
name=PQName(segments=[NameSpecifier(name="takesMemberFunc")]),
parameters=[
Parameter(
type=Reference(
ref_to=Type(
typename=PQName(
segments=[NameSpecifier(name="TObject")]
)
)
),
name="object",
),
Parameter(
type=PointerToMember(
base_type=Type(typename=NameSpecifier(name="TObject")),
ptr_to=FunctionType(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[
NameSpecifier(name="TArgs")
]
)
),
param_pack=True,
)
],
),
),
name="func",
),
],
template=TemplateDecl(
params=[
TemplateTypeParam(typekey="typename", name="TObject"),
TemplateTypeParam(
typekey="typename", name="TArgs", param_pack=True
),
]
),
),
],
variables=[
Variable(
name=PQName(segments=[NameSpecifier(name="intPtr")]),
type=PointerToMember(
base_type=Type(typename=NameSpecifier(name="Class")),
ptr_to=Type(
typename=PQName(segments=[FundamentalSpecifier(name="int")])
),
),
),
Variable(
name=PQName(segments=[NameSpecifier(name="intReturnFuncPtr")]),
type=PointerToMember(
base_type=Type(typename=NameSpecifier(name="Class")),
ptr_to=FunctionType(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
parameters=[],
),
),
),
Variable(
name=PQName(segments=[NameSpecifier(name="intParamFuncPtr")]),
type=PointerToMember(
base_type=Type(typename=NameSpecifier(name="Class")),
ptr_to=FunctionType(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
)
)
],
),
),
),
Variable(
name=PQName(segments=[NameSpecifier(name="varargFuncPtr")]),
type=PointerToMember(
base_type=Type(typename=NameSpecifier(name="Class")),
ptr_to=FunctionType(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
parameters=[],
vararg=True,
),
),
),
],
)
)

View File

@ -1,20 +1,9 @@
import os import os
import pathlib import pathlib
import pytest
import re
import shutil
import subprocess
import typing
from cxxheaderparser.options import ParserOptions, PreprocessorFunction from cxxheaderparser.options import ParserOptions
from cxxheaderparser import preprocessor from cxxheaderparser.preprocessor import make_pcpp_preprocessor
from cxxheaderparser.simple import ( from cxxheaderparser.simple import NamespaceScope, ParsedData, parse_file, parse_string
NamespaceScope,
ParsedData,
parse_file,
parse_string,
Include,
)
from cxxheaderparser.types import ( from cxxheaderparser.types import (
FundamentalSpecifier, FundamentalSpecifier,
NameSpecifier, NameSpecifier,
@ -26,39 +15,12 @@ from cxxheaderparser.types import (
) )
@pytest.fixture(params=["gcc", "msvc", "pcpp"]) def test_basic_preprocessor() -> None:
def make_pp(request) -> typing.Callable[..., PreprocessorFunction]:
param = request.param
if param == "gcc":
gcc_path = shutil.which("g++")
if not gcc_path:
pytest.skip("g++ not found")
subprocess.run([gcc_path, "--version"])
return preprocessor.make_gcc_preprocessor
elif param == "msvc":
gcc_path = shutil.which("cl.exe")
if not gcc_path:
pytest.skip("cl.exe not found")
return preprocessor.make_msvc_preprocessor
elif param == "pcpp":
if preprocessor.pcpp is None:
pytest.skip("pcpp not installed")
return preprocessor.make_pcpp_preprocessor
else:
assert False
def test_basic_preprocessor(
make_pp: typing.Callable[..., PreprocessorFunction]
) -> None:
content = """ content = """
#define X 1 #define X 1
int x = X; int x = X;
""" """
options = ParserOptions(preprocessor=make_pcpp_preprocessor())
options = ParserOptions(preprocessor=make_pp())
data = parse_string(content, cleandoc=True, options=options) data = parse_string(content, cleandoc=True, options=options)
assert data == ParsedData( assert data == ParsedData(
@ -76,10 +38,7 @@ def test_basic_preprocessor(
) )
def test_preprocessor_omit_content( def test_preprocessor_omit_content(tmp_path: pathlib.Path) -> None:
make_pp: typing.Callable[..., PreprocessorFunction],
tmp_path: pathlib.Path,
) -> None:
"""Ensure that content in other headers is omitted""" """Ensure that content in other headers is omitted"""
h_content = '#include "t2.h"' "\n" "int x = X;\n" h_content = '#include "t2.h"' "\n" "int x = X;\n"
h2_content = "#define X 2\n" "int omitted = 1;\n" h2_content = "#define X 2\n" "int omitted = 1;\n"
@ -90,7 +49,7 @@ def test_preprocessor_omit_content(
with open(tmp_path / "t2.h", "w") as fp: with open(tmp_path / "t2.h", "w") as fp:
fp.write(h2_content) fp.write(h2_content)
options = ParserOptions(preprocessor=make_pp()) options = ParserOptions(preprocessor=make_pcpp_preprocessor())
data = parse_file(tmp_path / "t1.h", options=options) data = parse_file(tmp_path / "t1.h", options=options)
assert data == ParsedData( assert data == ParsedData(
@ -108,10 +67,7 @@ def test_preprocessor_omit_content(
) )
def test_preprocessor_omit_content2( def test_preprocessor_omit_content2(tmp_path: pathlib.Path) -> None:
make_pp: typing.Callable[..., PreprocessorFunction],
tmp_path: pathlib.Path,
) -> None:
""" """
Ensure that content in other headers is omitted while handling pcpp Ensure that content in other headers is omitted while handling pcpp
relative path quirk relative path quirk
@ -128,7 +84,9 @@ def test_preprocessor_omit_content2(
with open(tmp_path2 / "t2.h", "w") as fp: with open(tmp_path2 / "t2.h", "w") as fp:
fp.write(h2_content) fp.write(h2_content)
options = ParserOptions(preprocessor=make_pp(include_paths=[str(tmp_path)])) options = ParserOptions(
preprocessor=make_pcpp_preprocessor(include_paths=[str(tmp_path)])
)
# Weirdness happens here # Weirdness happens here
os.chdir(tmp_path) os.chdir(tmp_path)
@ -149,9 +107,7 @@ def test_preprocessor_omit_content2(
) )
def test_preprocessor_encoding( def test_preprocessor_encoding(tmp_path: pathlib.Path) -> None:
make_pp: typing.Callable[..., PreprocessorFunction], tmp_path: pathlib.Path
) -> None:
"""Ensure we can handle alternate encodings""" """Ensure we can handle alternate encodings"""
h_content = b"// \xa9 2023 someone\n" b'#include "t2.h"' b"\n" b"int x = X;\n" h_content = b"// \xa9 2023 someone\n" b'#include "t2.h"' b"\n" b"int x = X;\n"
@ -163,7 +119,7 @@ def test_preprocessor_encoding(
with open(tmp_path / "t2.h", "wb") as fp: with open(tmp_path / "t2.h", "wb") as fp:
fp.write(h2_content) fp.write(h2_content)
options = ParserOptions(preprocessor=make_pp(encoding="cp1252")) options = ParserOptions(preprocessor=make_pcpp_preprocessor(encoding="cp1252"))
data = parse_file(tmp_path / "t1.h", options=options, encoding="cp1252") data = parse_file(tmp_path / "t1.h", options=options, encoding="cp1252")
assert data == ParsedData( assert data == ParsedData(
@ -179,26 +135,3 @@ def test_preprocessor_encoding(
] ]
) )
) )
@pytest.mark.skipif(preprocessor.pcpp is None, reason="pcpp not installed")
def test_preprocessor_passthru_includes(tmp_path: pathlib.Path) -> None:
"""Ensure that all #include pass through"""
h_content = '#include "t2.h"\n'
with open(tmp_path / "t1.h", "w") as fp:
fp.write(h_content)
with open(tmp_path / "t2.h", "w") as fp:
fp.write("")
options = ParserOptions(
preprocessor=preprocessor.make_pcpp_preprocessor(
passthru_includes=re.compile(".+")
)
)
data = parse_file(tmp_path / "t1.h", options=options)
assert data == ParsedData(
namespace=NamespaceScope(), includes=[Include(filename='"t2.h"')]
)

View File

@ -5,7 +5,6 @@ from cxxheaderparser.types import (
BaseClass, BaseClass,
ClassDecl, ClassDecl,
DecltypeSpecifier, DecltypeSpecifier,
DeductionGuide,
Field, Field,
ForwardDecl, ForwardDecl,
Function, Function,
@ -2164,86 +2163,3 @@ def test_member_class_template_specialization() -> None:
] ]
) )
) )
def test_template_deduction_guide() -> None:
content = """
template <class CharT, class Traits = std::char_traits<CharT>>
Error(std::basic_string_view<CharT, Traits>) -> Error<std::string>;
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
deduction_guides=[
DeductionGuide(
result_type=Type(
typename=PQName(
segments=[
NameSpecifier(
name="Error",
specialization=TemplateSpecialization(
args=[
TemplateArgument(
arg=Type(
typename=PQName(
segments=[
NameSpecifier(name="std"),
NameSpecifier(
name="string"
),
]
)
)
)
]
),
)
]
)
),
name=PQName(segments=[NameSpecifier(name="Error")]),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[
NameSpecifier(name="std"),
NameSpecifier(
name="basic_string_view",
specialization=TemplateSpecialization(
args=[
TemplateArgument(
arg=Type(
typename=PQName(
segments=[
NameSpecifier(
name="CharT"
)
]
)
)
),
TemplateArgument(
arg=Type(
typename=PQName(
segments=[
NameSpecifier(
name="Traits"
)
]
)
)
),
]
),
),
]
)
)
)
],
)
]
)
)