159 Commits

Author SHA1 Message Date
98b68265cc Fixed PointerToMember format functions. 2024-01-05 14:37:46 +01:00
efa7f5eaad Fixed operator*(). 2024-01-05 14:36:06 +01:00
83a0fb805d Added basic parsing of pointer-to-member types. 2024-01-05 14:24:47 +01:00
Dustin Spicuzza
29b71ab2ed Merge pull request #87 from justinboswell/ctad
Added support for template deduction guides
2023-12-05 17:51:52 -05:00
Justin Boswell
88a7048513 Added support for template deduction guides
* Added DeductionGuide as a language element
2023-12-05 17:49:14 -05:00
Dustin Spicuzza
64c5290318 Merge pull request #89 from robotpy/fn-constraints
Move non-template requires to the function
2023-12-03 01:04:11 -05:00
Dustin Spicuzza
85f93ec09e Move non-template requires to the function
- Methods can have a requires() that refer to the class template without
  an explicit function template
- This is a breaking change, but since the values aren't parsed yet I
  can't imagine anyone is using it
2023-12-02 04:51:00 -05:00
Dustin Spicuzza
04ba4bffae Merge pull request #88 from robotpy/more-using
Retain doxygen comments for using declarations and type aliases
2023-12-02 04:45:11 -05:00
Dustin Spicuzza
73a81d3107 Retain doxygen comments for using declarations and type aliases 2023-12-02 04:24:10 -05:00
Dustin Spicuzza
f1708bf9b8 Merge pull request #85 from robotpy/static-inline
Allow fields to be marked inline
2023-11-19 12:50:09 -05:00
Dustin Spicuzza
cafb594179 Allow fields to be marked inline
- Fixes #84
2023-11-19 12:47:09 -05:00
Dustin Spicuzza
0e732f1d43 Merge pull request #82 from robotpy/trailing-return-type-body
Consume function body if present after trailing return type
2023-11-13 23:27:15 -05:00
Dustin Spicuzza
42bc6b60ad Consume function body if present after trailing return type
- Fixes #81
2023-11-13 23:23:40 -05:00
Dustin Spicuzza
9883a4e247 Merge pull request #80 from robotpy/cpp20
Add various C++20 features
2023-10-13 02:37:46 -04:00
Dustin Spicuzza
37cd3abee9 Parse C++20 requirement constraints for functions/classes
Co-authored-by: David Vo <auscompgeek@users.noreply.github.com>
2023-10-13 02:34:21 -04:00
Dustin Spicuzza
2957e70823 Add support for C++20 abbreviated function templates
Co-authored-by: David Vo <auscompgeek@users.noreply.github.com>
2023-10-13 02:34:17 -04:00
Dustin Spicuzza
e935959ad3 Implement basic concept parsing
- requires clauses are collected into a Value and not interpreted at
  this time
2023-10-10 03:45:55 -04:00
Dustin Spicuzza
e23c4db96d Merge pull request #78 from robotpy/msvc-preprocessor
Add MSVC compatible preprocessing function
2023-10-08 23:06:01 -04:00
Dustin Spicuzza
196e88b85e Add MSVC preprocessor support 2023-10-08 01:51:13 -04:00
Dustin Spicuzza
3d23375190 Make content optional
- Some preprocessors read the file directly
2023-10-08 01:21:31 -04:00
Dustin Spicuzza
d94df61c63 Merge pull request #77 from robotpy/gcc-preprocessor
Add GCC compatible preprocessing function
2023-10-08 01:07:13 -04:00
Dustin Spicuzza
8f9e8626af Add GCC compatible preprocessing function 2023-10-08 01:01:18 -04:00
Dustin Spicuzza
9dd573e433 Make pcpp more optional 2023-10-08 00:56:25 -04:00
Dustin Spicuzza
2a17b27225 Merge pull request #75 from seladb/do-not-skip-headers-in-preprocessor
Add option to retain #include directives in preprocessor
2023-10-06 02:38:34 -04:00
seladb
312f6fba6b Add option to retain #include directives in preprocessor 2023-10-06 02:34:06 -04:00
Dustin Spicuzza
26da91836a Update sphinx configuration 2023-10-06 02:09:45 -04:00
Dustin Spicuzza
458d3e0795 Fix RTD configuration 2023-10-06 02:03:29 -04:00
Dustin Spicuzza
e9df106bee Add RTFD configuration 2023-10-06 01:59:25 -04:00
Dustin Spicuzza
9c587e9414 Prepare to publish 2023-10-05 02:53:55 -04:00
Dustin Spicuzza
93bdfee934 Add preprocessor-only mode to the dump command 2023-10-05 02:48:22 -04:00
Dustin Spicuzza
955214cef4 Merge pull request #73 from robotpy/multiple-template-declarations
Support multiple template declarations on a class or function
2023-10-05 02:35:13 -04:00
Dustin Spicuzza
51d29a0791 Support multiple template declarations on a class or function
- Fixes #20
2023-10-05 02:29:25 -04:00
Dustin Spicuzza
175815525f Merge pull request #6 from robotpy/fmt-types
Add pretty formatting for types
2023-10-04 04:04:03 -04:00
Dustin Spicuzza
6ac0bdb4e3 Merge pull request #72 from robotpy/auto-decltype-fn
Parse auto functions with trailing return
2023-10-04 03:33:49 -04:00
Dustin Spicuzza
e30c117b62 Parse auto functions with trailing return 2023-10-04 03:29:27 -04:00
Dustin Spicuzza
a5ce9a24e6 Add pretty formatting for types and template specializations
- Fixes #3
2023-10-04 03:16:33 -04:00
seladb
1f5ba1b3ca Various fixes after running pylint (#71) 2023-10-04 02:57:34 -04:00
Dustin Spicuzza
0280e8d7e1 Merge pull request #70 from robotpy/fwd-template-method
Allow forward declared template methods
2023-10-02 01:48:52 -04:00
Dustin Spicuzza
4d16552544 Allow forward declared template methods 2023-10-02 01:45:51 -04:00
Dustin Spicuzza
cc47d6785a Merge pull request #69 from robotpy/fix-pp-again
Handle pcpp relative path quirk
2023-10-01 03:10:10 -04:00
Dustin Spicuzza
ab99f2fa72 Handle pcpp relative path quirk 2023-10-01 03:07:26 -04:00
Dustin Spicuzza
e6908386ea Add documentation clarifying FundamentalSpecifier 2023-09-30 16:28:42 -04:00
Dustin Spicuzza
bdcee6f9c0 Merge pull request #68 from robotpy/simplify-blocks
breaking change: remove empty block state and update visitor types
2023-09-28 21:01:26 -04:00
Dustin Spicuzza
bf54be5bbb Constrain visitor state types further to valid combinations 2023-09-27 11:37:48 -04:00
Dustin Spicuzza
326da6112c Remove EmptyBlockState
- Turns out you can't put an empty block except in code
2023-09-27 01:14:21 -04:00
Dustin Spicuzza
8c69970857 Merge pull request #66 from robotpy/fn-operator
breaking change: Remove operator type and make part of Function
2023-09-24 22:01:10 -04:00
Dustin Spicuzza
c3fbe4c16c breaking change: Remove operator type and make part of Function
- Unifies free function operators and method operators -- otherwise users
  would need to parse the operator themselves to deal with free function operators
- We don't have any releases yet, so there are no stability guarantees
2023-09-23 21:45:30 -04:00
Dustin Spicuzza
09eb5af3a3 Add tokfmt test for void** 2023-09-19 00:19:46 -04:00
Dustin Spicuzza
67c1f2b17d Reorder enum in file 2023-09-19 00:19:39 -04:00
Dustin Spicuzza
960ed68785 Merge pull request #65 from robotpy/skip-sections
Skip sections
2023-09-09 23:14:05 -04:00
Dustin Spicuzza
8e2d9909fa Provide mechanism to skip entire blocks
- Simplifies usage of visitor logic considerably
- Add null_visitor to help with this
2023-09-09 23:10:17 -04:00
Dustin Spicuzza
56278abe33 Fix gentest pcpp option 2023-09-09 22:53:23 -04:00
Dustin Spicuzza
f836f81baf Merge pull request #64 from robotpy/state-user-data
Allow parser visitors to leverage the parser's state instead of creating their own stacks
2023-09-09 11:27:22 -04:00
Dustin Spicuzza
3bae95f2a7 Allow parser visitors to leverage the parser's state instead of creating their own stacks 2023-09-03 19:49:40 -04:00
Dustin Spicuzza
15ec31b84f Actually call empty block visitor function 2023-09-03 19:30:00 -04:00
Dustin Spicuzza
a110a5508b Merge pull request #63 from robotpy/pp-encoding
Variety of preprocessor related fixes
2023-09-03 18:54:28 -04:00
Dustin Spicuzza
a13cdf4f67 Provide mechanism to specify preprocessor file encoding 2023-09-03 18:46:31 -04:00
Dustin Spicuzza
de4d06defe Fix preprocessor option to retain content 2023-09-03 18:46:31 -04:00
Dustin Spicuzza
a60bb7fd18 Add basic preprocessor test 2023-09-03 18:46:31 -04:00
Dustin Spicuzza
4ab7b3fd16 Adjust simple parse_file to accept a Path or string as filename 2023-09-03 18:46:24 -04:00
Dustin Spicuzza
acc2b27332 Merge pull request #61 from robotpy/preprocessor
Add easy to use preprocessor support
2023-08-23 22:57:57 -04:00
Dustin Spicuzza
34d7b4561b Add easy to use preprocessor support via pcpp
- Fixes #60
2023-08-22 11:20:31 -04:00
Dustin Spicuzza
1ba625a13b Merge pull request #62 from robotpy/fix-line-directives
Fix #line directives
2023-08-22 10:09:19 -04:00
Dustin Spicuzza
4febbe5d0d Fix #line directives 2023-08-22 10:02:22 -04:00
Dustin Spicuzza
dffcbf9034 Merge pull request #59 from robotpy/decorated-conversion-operator
Parse extra type information for conversion operators
2023-08-19 20:04:22 -04:00
Dustin Spicuzza
f597b691f7 Parse extra type information for conversion operators
- Fixes #57
2023-08-19 20:02:03 -04:00
Dustin Spicuzza
6ab0d00c2f Merge pull request #56 from robotpy/remove-pp-and-handle-continuations
Remove pp and handle continuations
2023-08-19 19:45:25 -04:00
Dustin Spicuzza
bd9907ad79 Restructure preprocessor support
- Remove partial support for #define and other PP directives
- Allow pragma to span multiple lines
- Pragma now emits a list of tokens instead of a single string
- Ignore #warning directive if present
2023-08-19 19:42:05 -04:00
Dustin Spicuzza
b07e1f81a6 Discard line continuations
- Fixes #54
2023-07-23 17:02:32 -04:00
Tomaž Šuštar
3938d0ffef fix #49 for anonymous unions (#50)
* fix #49 for anonymous unions

* fix formatting

* fix mypy and tests

* fix for structs

* fix formatting
2023-06-14 13:31:10 -04:00
Dustin Spicuzza
9bf99db87b Add bug report, fix parser error template 2023-05-29 09:09:50 -04:00
Dustin Spicuzza
edb1b1909a Add parser error issue template 2023-05-29 09:05:45 -04:00
Dustin Spicuzza
44ab824ffd Merge pull request #51 from robotpy/update-ci
Update CI to latest ubuntu runners
2023-05-05 12:59:40 -05:00
Dustin Spicuzza
05f2dc39eb Update CI
- Update ubuntu runner version
- Use latest setup-python and checkout actions
2023-05-05 13:46:24 -04:00
Dustin Spicuzza
7d302fdf91 Merge pull request #47 from tomazsustar/conditional-operator-fix
bugfix #46 adding question mark to lexer
2023-03-22 11:23:35 -05:00
Tomaž Šuštar
6461158422 bugfix #46 adding question mark to lexer 2023-03-22 16:15:48 +01:00
Dustin Spicuzza
adc8273e16 Update README.md 2023-03-17 01:39:39 -04:00
Dustin Spicuzza
01c93956b5 Merge pull request #44 from robotpy/ns-alias
Parse namespace alias and emit it
2023-03-17 01:27:24 -04:00
Dustin Spicuzza
2fe350bf35 Parse namespace alias and emit it
- Fixes #43
2023-03-17 01:24:23 -04:00
Dustin Spicuzza
1aa9e72ca1 Upgrade black to 2023.1 2023-03-16 18:45:34 -04:00
Dustin Spicuzza
296272fd39 Merge pull request #42 from robotpy/lexer-updates
Significant lexer overhaul
2022-12-15 21:29:32 -05:00
Dustin Spicuzza
e5295070a0 Add support for parsing user defined literals 2022-12-15 02:55:07 -05:00
Dustin Spicuzza
1eaa85ae8d Split the lexer into PlyLexer and TokenStream components
- There are two types of token streams: file based, and list based
- I think this has better component separation
- Doxygen parsing is a bit weirder, but I think it's more straightforward to see all the pieces?
2022-12-15 02:38:44 -05:00
Dustin Spicuzza
40bf05b384 Add additional doxygen related testcases to make sure things don't accidentally break 2022-12-15 01:49:49 -05:00
Dustin Spicuzza
2ba5c3c829 Support extracting doxygen comments when declspec or attributes are present 2022-12-15 01:49:49 -05:00
Dustin Spicuzza
079d643c67 Update PLY package to include version 2022-12-15 01:49:49 -05:00
Dustin Spicuzza
b54c807824 Remove unused lexer.filenames 2022-12-15 01:49:49 -05:00
Dustin Spicuzza
aee776072e Grab string/character lexer constants from pycparser 2022-12-15 01:49:49 -05:00
Dustin Spicuzza
03c24a2074 Better lexer error handling 2022-12-15 01:49:49 -05:00
Dustin Spicuzza
b47eb7ce10 Use lex.TOKEN decorator for lexer tokens instead of docstrings
- Allows usage with -OO
2022-12-10 14:08:59 -05:00
Dustin Spicuzza
19c0604603 Merge pull request #41 from robotpy/moar-coverage
Moar coverage
2022-12-09 10:59:28 -05:00
Dustin Spicuzza
ace1d09d9d Add throw/noexcept tests 2022-12-09 03:28:50 -05:00
Dustin Spicuzza
8a0568c0f5 Change balanced token handling to allow mismatched gt/lt tokens
- These can be used for math, so we just assume the code is doing that
2022-12-09 03:26:16 -05:00
Dustin Spicuzza
b08d8783d4 Remove unused pieces from coverage reporting 2022-12-09 01:29:59 -05:00
Dustin Spicuzza
ff645920b8 Capture doxygen comments for namespaces 2022-12-09 01:16:54 -05:00
Dustin Spicuzza
dc76328bd5 Add support for inline namespaces 2022-12-09 01:16:12 -05:00
Dustin Spicuzza
305312b095 Add ability to generate failure testcases 2022-12-09 01:10:39 -05:00
Dustin Spicuzza
a5277d951a Merge pull request #40 from robotpy/method-fixes
Method fixes
2022-12-08 09:58:55 -05:00
Dustin Spicuzza
919e35e45f Correctly parse constructor implementations outside of a class
- Fixes #32
2022-12-08 02:00:05 -05:00
Dustin Spicuzza
15e4e0b879 Add support for parsing method implementations outside of a class
- Fixes #10
2022-12-08 01:26:16 -05:00
Dustin Spicuzza
2c238058d3 Merge pull request #39 from robotpy/more-cppheaderparser-tests
More cppheaderparser tests
2022-12-07 23:03:58 -05:00
Dustin Spicuzza
ddad7cb6b1 Add support for parsing explicit template instantiation 2022-12-07 08:36:31 -05:00
Dustin Spicuzza
f1c1f2e6af Add misc tests added to CppHeaderParser 2022-12-06 07:52:51 -05:00
Dustin Spicuzza
2192a92003 Adjust testcase generator to remove unneeded whitespace 2022-12-06 07:46:16 -05:00
Dustin Spicuzza
ba4222560f Checkout full history to allow git describe to work 2022-05-02 12:51:11 -04:00
Dustin Spicuzza
a68f05a840 Add live demo link
- Fixes #4
2022-04-30 02:16:01 -04:00
Dustin Spicuzza
1b308ff0ab Ignore .nojekyll file on deploy 2022-04-30 02:12:54 -04:00
Dustin Spicuzza
b525f18f6b Copy latest commit to interactive demo 2022-04-30 02:06:38 -04:00
Dustin Spicuzza
944c9daf10 Merge pull request #35 from robotpy/mypy-madness
Check codebase using mypy
2022-01-02 21:55:40 -05:00
Dustin Spicuzza
977fe133d0 Check codebase using mypy in github actions 2022-01-02 21:53:22 -05:00
Dustin Spicuzza
f29c1d5a30 mypy: address issues parsing const/volatile/*/& items 2022-01-02 21:52:36 -05:00
Dustin Spicuzza
83aa34d06e mypy: functions should be a list of functions 2022-01-02 21:51:49 -05:00
Dustin Spicuzza
ce4124d5dd mypy: add temporary variables to ease type checking 2022-01-02 21:51:19 -05:00
Dustin Spicuzza
b05b1b16c1 mypy: add assertions/logic to ensure that ClassScope/NamespaceScope are correct 2022-01-02 21:48:49 -05:00
Dustin Spicuzza
2eb13496fa mypy: function return types marked as optional
Only methods can have optional return types, but it has to be applied to function
as well as method, otherwise mypy complains.

I considered making a special return type for constructors/destructors, but it
seemed to just make things too messy. Time will tell if this was the right
decision.
2022-01-02 21:45:06 -05:00
Dustin Spicuzza
36d23c37bd mypy: add trivial annotations to functions that don't change logic 2022-01-02 21:41:34 -05:00
Dustin Spicuzza
9756025e2d mypy: add None return types to functions that don't return a value 2022-01-02 21:30:41 -05:00
Dustin Spicuzza
cd6d4f23f3 parse errors should use standard filename:lineno prefix when possible 2021-12-17 00:22:21 -05:00
Dustin Spicuzza
8f7455f3d3 Enable test with duplicate name 2021-12-16 23:03:18 -05:00
Dustin Spicuzza
5d564687c3 Add support for conversion operators
- Fixes #9
2021-11-29 22:56:43 -05:00
Dustin Spicuzza
92f8e72779 Support parsing MSVC calling conventions
- Fixes #5
- Fixes #33
2021-11-27 21:40:10 -05:00
Dustin Spicuzza
a67c9c4afe Add test for #21
- Fixes #21
2021-11-26 10:19:48 -05:00
Dustin Spicuzza
bcc57c72c0 Merge pull request #30 from robotpy/fix-12
Support __declspec (and other attributes) when parsing a name
2021-11-26 10:09:11 -05:00
Dustin Spicuzza
8d506e5700 Support __declspec (and other attributes) when parsing a name
- Sometimes this will lead to accepting invalid code, but "dont do that"
- Fixes #12
2021-11-26 10:03:26 -05:00
Dustin Spicuzza
88dab86a9e Merge pull request #29 from robotpy/allow-fn-typedef
Allow typedefs of function definitions
2021-11-26 01:15:01 -05:00
Dustin Spicuzza
3c5174b52f Allow typedefs of function definitions
- Fixes #13
2021-11-26 01:11:50 -05:00
Dustin Spicuzza
e750418d08 Merge pull request #28 from robotpy/fix-final-as-name
Allow using 'final' as a name in some contexts
2021-11-26 00:50:37 -05:00
Dustin Spicuzza
b611b9189a Allow using 'final' as a name in some contexts
- Strictly speaking, this fix will allow 'final' in some contexts where it isn't actually
  allowed. However, cxxheaderparser "will not reject all invalid C++ headers!" so this is ok
- Fixes #14
2021-11-26 00:28:54 -05:00
Dustin Spicuzza
7f6ae6bcbf Merge pull request #27 from robotpy/fix-fnptr-w-refs
Handle function pointer parameters where the return value is a reference
2021-11-26 00:27:41 -05:00
Dustin Spicuzza
c7d480e8c3 Handle function pointer parameters where the return value is a reference
- Fixes #17
2021-11-26 00:11:36 -05:00
Dustin Spicuzza
b1808f77ad Merge pull request #25 from robotpy/fixes
Add tests for bugs that were already fixed
2021-11-25 23:41:04 -05:00
Dustin Spicuzza
7094a0c677 Add python 3.10 to github actions 2021-11-25 23:38:52 -05:00
Dustin Spicuzza
5f57509568 Format with latest black 2021-11-25 22:51:08 -05:00
Spicuzza
420f293ead Add test for inline + volatile function
- Fixes #18
2021-11-21 12:59:54 -05:00
Spicuzza
8591e3114f Add test for mutable
- Fixes #19
2021-11-21 12:56:26 -05:00
Dustin Spicuzza
4521e0d594 Merge pull request #24 from michelp/main
allow single hyphen to represent stdin for filename.
2021-11-07 23:17:09 -05:00
Michel Pelletier
83bb290379 allow single hyphen to represent stdin for filename. 2021-11-06 08:30:55 -07:00
Dustin Spicuzza
f08ad8dee6 Merge pull request #23 from robotpy/add-volatile-test
Add test for volatile field
2021-09-08 09:23:06 -04:00
Dustin Spicuzza
65e05bfb90 Add test for volatile field
- Fixes #16
2021-09-08 09:21:01 -04:00
Dustin Spicuzza
f9e19de5af Merge pull request #22 from robotpy/fix-volatile
Support volatile keyword correctly
2021-08-16 16:09:42 -04:00
Dustin Spicuzza
6435b47744 Support volatile keyword correctly 2021-08-16 16:07:13 -04:00
Dustin Spicuzza
a25bb077d7 Merge pull request #8 from robotpy/typename-type
Support using 'typename' qualifier before type names
2021-01-09 13:08:29 -05:00
Dustin Spicuzza
3c51a30efe Support using 'typename' qualifier before type names
- Fixes #7
2021-01-09 12:54:45 -05:00
Dustin Spicuzza
07b63127f7 Don't run CI on pushes unless part of a PR 2021-01-04 22:00:24 -05:00
Dustin Spicuzza
1b2e622418 Merge pull request #2 from robotpy/trailing-return-type
Add support for C++17 trailing return types
2021-01-04 21:59:14 -05:00
Dustin Spicuzza
8c9f8fd8ba Add support for C++17 trailing return types 2021-01-04 21:55:29 -05:00
Dustin Spicuzza
43431f3fe4 Use utf-8-sig to open files by default 2021-01-02 19:22:22 -05:00
Dustin Spicuzza
23b95f09f2 Make mypy happier 2021-01-01 15:49:43 -05:00
Dustin Spicuzza
0329268622 Remove static content from docs 2020-12-31 02:07:28 -05:00
Dustin Spicuzza
1392fceeb5 Add documentation 2020-12-31 01:55:29 -05:00
Dustin Spicuzza
e361d4443a Add additional template specialization tests 2020-12-31 00:40:42 -05:00
Dustin Spicuzza
80fb4982ba Fix formatting 2020-12-31 00:25:08 -05:00
Dustin Spicuzza
a6977325b9 Fix doxygen tests 2020-12-31 00:24:45 -05:00
Dustin Spicuzza
3f9c03a37b Ensure that lexers don't share state 2020-12-31 00:24:01 -05:00
Dustin Spicuzza
09194f599b Check for doxygen at the end of a statement 2020-12-30 23:44:12 -05:00
Dustin Spicuzza
d97ed6c99c Fix template specialization unit tests 2020-12-30 03:22:29 -05:00
Dustin Spicuzza
cd110bbe29 Parse template specialization arguments
- Falls back to Value when it can't figure it out
2020-12-30 03:14:35 -05:00
Dustin Spicuzza
0c9c49b7e3 Add lexer support for parsing a specified group of tokens 2020-12-30 02:51:00 -05:00
Dustin Spicuzza
2de33946c2 Fix CI 2020-12-28 03:48:17 -05:00
58 changed files with 9684 additions and 1452 deletions

21
.github/ISSUE_TEMPLATE/bug-report.yml vendored Normal file
View File

@@ -0,0 +1,21 @@
name: Other bug Report
description: File an issue about the Python API or other non-parsing issues
title: "[BUG]: "
body:
- type: textarea
id: description
attributes:
label: Problem description
placeholder: >-
Provide a short description, state the expected behavior and what
actually happens.
validations:
required: true
- type: textarea
id: code
attributes:
label: Reproducible example code
placeholder: >-
Minimal code to reproduce this issue
render: text

22
.github/ISSUE_TEMPLATE/parser-error.yml vendored Normal file
View File

@@ -0,0 +1,22 @@
name: C++ parsing error
description: cxxheaderparser fails to parse valid C/C++ code
title: "[PARSE BUG]: "
body:
- type: textarea
id: description
attributes:
label: Problem description
placeholder: >-
Provide a short description
validations:
required: true
- type: textarea
id: code
attributes:
label: C++ code that can't be parsed correctly (please double-check that https://robotpy.github.io/cxxheaderparser/ has the same error)
placeholder: >-
Paste header here
render: text
validations:
required: true

34
.github/workflows/deploy.yml vendored Normal file
View File

@@ -0,0 +1,34 @@
# deploy to github pages
name: Build and Deploy
on:
push:
branches:
- main
jobs:
deploy:
concurrency: ci-${{ github.ref }}
runs-on: ubuntu-latest
steps:
- name: Checkout 🛎️
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Build
run: |
echo "__version__ = '$(git describe --tags)'" > cxxheaderparser/version.py
mkdir build
cp -r cxxheaderparser build
- name: Deploy 🚀
uses: JamesIves/github-pages-deploy-action@v4.3.3
with:
branch: gh-pages
folder: build
clean: true
clean-exclude: |
.nojekyll
index.html
_index.py

View File

@@ -1,65 +1,120 @@
---
name: dist
on: [push, pull_request]
on:
pull_request:
push:
branches:
- main
tags:
- '*'
concurrency:
group: ${{ github.ref }}
cancel-in-progress: true
jobs:
check:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v3
- uses: psf/black@stable
# check-doc:
# runs-on: ubuntu-18.04
check-mypy:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
# - uses: jpetrucciani/mypy-check@0.930
# .. can't use that because we need to install pytest
- uses: actions/setup-python@v4
with:
python-version: 3.8
- name: Install requirements
run: |
pip --disable-pip-version-check install mypy pytest pcpp
- name: Run mypy
run: |
mypy .
# steps:
# - uses: actions/checkout@v2
# with:
# submodules: recursive
# fetch-depth: 0
# - uses: actions/setup-python@v2
# with:
# python-version: 3.8
# - name: Sphinx
# run: |
# pip --disable-pip-version-check install -e .
# pip --disable-pip-version-check install -r docs/requirements.txt
# cd docs && make clean html SPHINXOPTS="-W --keep-going"
test:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [windows-latest, macos-latest, ubuntu-18.04]
python_version: [3.6, 3.7, 3.8, 3.9]
architecture: [x86, x64]
exclude:
- os: macos-latest
architecture: x86
- os: ubuntu-18.04
architecture: x86
check-doc:
runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v3
with:
submodules: recursive
fetch-depth: 0
- uses: actions/setup-python@v2
- uses: actions/setup-python@v4
with:
python-version: 3.8
- name: Sphinx
run: |
pip --disable-pip-version-check install -e .
pip --disable-pip-version-check install -r docs/requirements.txt
cd docs && make clean html SPHINXOPTS="-W --keep-going"
#
# Build a wheel
#
build:
runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@v3
with:
submodules: recursive
fetch-depth: 0
- uses: actions/setup-python@v4
with:
python-version: 3.8
- run: pipx run build
- name: Upload build artifacts
uses: actions/upload-artifact@v3
with:
name: dist
path: dist
test:
needs: [build]
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [windows-latest, macos-latest, ubuntu-20.04]
python_version: [3.6, 3.7, 3.8, 3.9, "3.10", "3.11", "3.12"]
architecture: [x86, x64]
exclude:
- os: macos-latest
architecture: x86
- os: ubuntu-20.04
architecture: x86
steps:
- uses: actions/checkout@v3
with:
submodules: recursive
fetch-depth: 0
- uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python_version }}
architecture: ${{ matrix.architecture }}
- name: Install build dependencies
run: python -m pip --disable-pip-version-check install wheel
- name: Build wheel
run: python setup.py bdist_wheel
- name: Download build artifacts
uses: actions/download-artifact@v3
with:
name: dist
path: dist
- name: Install test dependencies
run: python -m pip --disable-pip-version-check install pytest
run: python -m pip --disable-pip-version-check install -r tests/requirements.txt
- name: Setup MSVC compiler
uses: ilammy/msvc-dev-cmd@v1
if: matrix.os == 'windows-latest'
- name: Test wheel
shell: bash
@@ -71,24 +126,17 @@ jobs:
publish:
runs-on: ubuntu-latest
needs: [check, check-doc, test]
needs: [check, check-mypy, check-doc, test]
permissions:
id-token: write
if: github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags')
steps:
- uses: actions/checkout@v2
- name: Download build artifacts
uses: actions/download-artifact@v3
with:
submodules: recursive
fetch-depth: 0
name: dist
path: dist
- uses: actions/setup-python@v2
with:
python-version: 3.8
- run: pip --disable-pip-version-check install wheel
- name: Build packages
run: python setup.py sdist bdist_wheel
- name: Publish to PyPI
uses: pypa/gh-action-pypi-publish@master
with:
user: __token__
password: ${{ secrets.PYPI_PASSWORD }}
uses: pypa/gh-action-pypi-publish@release/v1

15
.readthedocs.yml Normal file
View File

@@ -0,0 +1,15 @@
version: 2
sphinx:
configuration: docs/conf.py
build:
os: ubuntu-22.04
tools:
python: "3.11"
python:
install:
- requirements: docs/requirements.txt
- method: pip
path: .

View File

@@ -1,6 +1,6 @@
cxxheaderparser license:
Copyright (c) 2020 Dustin Spicuzza <dustin@virtualroadside.com>
Copyright (c) 2020-2022 Dustin Spicuzza <dustin@virtualroadside.com>
All rights reserved.
Redistribution and use in source and binary forms, with or without
@@ -102,3 +102,31 @@ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-----------------------------------------------------------------------------
pycparser -- A C parser in Python
Copyright (c) 2008-2022, Eli Bendersky
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the copyright holder nor the names of its contributors may
be used to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

View File

@@ -1,9 +1,6 @@
cxxheaderparser
===============
**Note**: This is still a work in progress, but should be stable in a few weeks
once I port robotpy-build over to use it
A pure python C++ header parser that parses C++ headers in a mildly naive
manner that allows it to handle many C++ constructs, including many modern
(C++11 and beyond) features.
@@ -31,6 +28,7 @@ Non-goals:
headers that contain macros, you should preprocess your code using the
excellent pure python preprocessor [pcpp](https://github.com/ned14/pcpp)
or your favorite compiler
* See `cxxheaderparser.preprocessor` for how to use
* Probably won't be able to parse most IOCCC entries
There are two APIs available:
@@ -38,17 +36,24 @@ There are two APIs available:
* A visitor-style interface to build up your own custom data structures
* A simple visitor that stores everything in a giant data structure
Live Demo
---------
A pyodide-powered interactive demo is at https://robotpy.github.io/cxxheaderparser/
Documentation
-------------
TODO: documentation site
Documentation can be found at https://cxxheaderparser.readthedocs.io
Install
-------
Requires Python 3.6+, no non-stdlib dependencies if using Python 3.7+.
TODO: distribute on pip
```
pip install cxxheaderparser
```
Usage
-----

View File

@@ -1,4 +1,4 @@
try:
from .version import __version__
from .version import __version__ # type: ignore
except ImportError:
__version__ = "master"

View File

@@ -0,0 +1,5 @@
# PLY package
# Author: David Beazley (dave@dabeaz.com)
# https://github.com/dabeaz/ply
__version__ = "2022.10.27"

View File

@@ -2,7 +2,7 @@
# -----------------------------------------------------------------------------
# ply: lex.py
#
# Copyright (C) 2001-2020
# Copyright (C) 2001-2022
# David M. Beazley (Dabeaz LLC)
# All rights reserved.
#

View File

@@ -9,8 +9,7 @@ from .options import ParserOptions
from .simple import parse_file
def dumpmain():
def dumpmain() -> None:
parser = argparse.ArgumentParser()
parser.add_argument("header")
parser.add_argument(
@@ -22,13 +21,33 @@ def dumpmain():
)
parser.add_argument("-v", "--verbose", default=False, action="store_true")
parser.add_argument(
"--mode", choices=["json", "pprint", "repr", "brepr"], default="pprint"
"--mode",
choices=["json", "pprint", "repr", "brepr", "pponly"],
default="pprint",
)
parser.add_argument(
"--pcpp", default=False, action="store_true", help="Use pcpp preprocessor"
)
parser.add_argument(
"--encoding", default=None, help="Use this encoding to open the file"
)
args = parser.parse_args()
options = ParserOptions(verbose=args.verbose)
data = parse_file(args.header, options=options)
preprocessor = None
if args.pcpp or args.mode == "pponly":
from .preprocessor import make_pcpp_preprocessor
preprocessor = make_pcpp_preprocessor(encoding=args.encoding)
if args.mode == "pponly":
with open(args.header, "r", encoding=args.encoding) as fp:
pp_content = preprocessor(args.header, fp.read())
sys.stdout.write(pp_content)
sys.exit(0)
options = ParserOptions(verbose=args.verbose, preprocessor=preprocessor)
data = parse_file(args.header, encoding=args.encoding, options=options)
if args.mode == "pprint":
ddata = dataclasses.asdict(data)

View File

@@ -1,5 +1,7 @@
import typing
from .lexer import LexToken
if typing.TYPE_CHECKING:
from .lexer import LexToken
class CxxParseError(Exception):
@@ -7,6 +9,6 @@ class CxxParseError(Exception):
Exception raised when a parsing error occurs
"""
def __init__(self, msg: str, tok: typing.Optional[LexToken] = None) -> None:
def __init__(self, msg: str, tok: typing.Optional["LexToken"] = None) -> None:
Exception.__init__(self, msg)
self.tok = tok

View File

@@ -2,12 +2,15 @@ import argparse
import dataclasses
import inspect
import subprocess
import typing
from .errors import CxxParseError
from .preprocessor import make_pcpp_preprocessor
from .options import ParserOptions
from .simple import parse_string
from .simple import parse_string, ParsedData
def nondefault_repr(data):
def nondefault_repr(data: ParsedData) -> str:
"""
Similar to the default dataclass repr, but exclude any
default parameters or parameters with compare=False
@@ -17,7 +20,7 @@ def nondefault_repr(data):
get_fields = dataclasses.fields
MISSING = dataclasses.MISSING
def _inner_repr(o) -> str:
def _inner_repr(o: typing.Any) -> str:
if is_dataclass(o):
vals = []
for f in get_fields(o):
@@ -46,32 +49,62 @@ def nondefault_repr(data):
return _inner_repr(data)
def gentest(infile: str, name: str, outfile: str, verbose: bool):
def gentest(
infile: str, name: str, outfile: str, verbose: bool, fail: bool, pcpp: bool
) -> None:
# Goal is to allow making a unit test as easy as running this dumper
# on a file and copy/pasting this into a test
with open(infile, "r") as fp:
content = fp.read()
maybe_options = ""
popt = ""
options = ParserOptions(verbose=verbose)
if pcpp:
options.preprocessor = make_pcpp_preprocessor()
maybe_options = "options = ParserOptions(preprocessor=make_pcpp_preprocessor())"
popt = ", options=options"
try:
data = parse_string(content, options=options)
if fail:
raise ValueError("did not fail")
except CxxParseError:
if not fail:
raise
# do it again, but strip the content so the error message matches
try:
parse_string(content.strip(), options=options)
except CxxParseError as e2:
err = str(e2)
if not fail:
stmt = nondefault_repr(data)
stmt = f"""
{maybe_options}
data = parse_string(content, cleandoc=True{popt})
content = content.replace("\n", "\n ")
assert data == {stmt}
"""
else:
stmt = f"""
{maybe_options}
err = {repr(err)}
with pytest.raises(CxxParseError, match=re.escape(err)):
parse_string(content, cleandoc=True{popt})
"""
content = ("\n" + content.strip()).replace("\n", "\n ")
content = "\n".join(l.rstrip() for l in content.splitlines())
stmt = inspect.cleandoc(
f'''
def test_{name}():
content = """
{content}
def test_{name}() -> None:
content = """{content}
"""
data = parse_string(content, cleandoc=True)
assert data == {stmt}
{stmt.strip()}
'''
)
@@ -91,8 +124,12 @@ if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("header")
parser.add_argument("name", nargs="?", default="TODO")
parser.add_argument("--pcpp", default=False, action="store_true")
parser.add_argument("-v", "--verbose", default=False, action="store_true")
parser.add_argument("-o", "--output", default="-")
parser.add_argument(
"-x", "--fail", default=False, action="store_true", help="Expect failure"
)
args = parser.parse_args()
gentest(args.header, args.name, args.output, args.verbose)
gentest(args.header, args.name, args.output, args.verbose, args.fail, args.pcpp)

View File

@@ -1,18 +1,23 @@
from collections import deque
import re
import typing
import sys
from ._ply import lex
from ._ply.lex import TOKEN
from .errors import CxxParseError
class LexError(CxxParseError):
pass
if sys.version_info >= (3, 8):
Protocol = typing.Protocol
from typing import Protocol
else:
Protocol = object
_line_re = re.compile(r'^#line (\d+) "(.*)"')
_line_re = re.compile(r'^\#[\t ]*(line)? (\d+) "(.*)"')
_multicomment_re = re.compile("\n[\\s]+\\*")
@@ -42,8 +47,26 @@ class LexToken(Protocol):
#: Location token was found at
location: Location
#: private
lexer: lex.Lexer
lexmatch: "re.Match"
class Lexer:
PhonyEnding: LexToken = lex.LexToken() # type: ignore
PhonyEnding.type = "PLACEHOLDER"
PhonyEnding.value = ""
PhonyEnding.lineno = 0
PhonyEnding.lexpos = 0
class PlyLexer:
"""
This lexer is a combination of pieces from the PLY lexers that CppHeaderParser
and pycparser have.
This tokenizes the input into tokens. The other lexer classes do more complex
things with the tokens.
"""
keywords = {
"__attribute__",
@@ -60,6 +83,7 @@ class Lexer:
"char16_t",
"char32_t",
"class",
"concept",
"const",
"constexpr",
"const_cast",
@@ -98,6 +122,7 @@ class Lexer:
"public",
"register",
"reinterpret_cast",
"requires",
"return",
"short",
"signed",
@@ -127,21 +152,44 @@ class Lexer:
}
tokens = [
"NUMBER",
"FLOAT_NUMBER",
# constants
"FLOAT_CONST",
"HEX_FLOAT_CONST",
"INT_CONST_HEX",
"INT_CONST_BIN",
"INT_CONST_OCT",
"INT_CONST_DEC",
"INT_CONST_CHAR",
"CHAR_CONST",
"WCHAR_CONST",
"U8CHAR_CONST",
"U16CHAR_CONST",
"U32CHAR_CONST",
# String literals
"STRING_LITERAL",
"WSTRING_LITERAL",
"U8STRING_LITERAL",
"U16STRING_LITERAL",
"U32STRING_LITERAL",
#
"NAME",
# Comments
"COMMENT_SINGLELINE",
"COMMENT_MULTILINE",
"PRECOMP_MACRO",
"PRAGMA_DIRECTIVE",
"INCLUDE_DIRECTIVE",
"PP_DIRECTIVE",
# misc
"DIVIDE",
"CHAR_LITERAL",
"STRING_LITERAL",
"NEWLINE",
"WHITESPACE",
"ELLIPSIS",
"DBL_LBRACKET",
"DBL_RBRACKET",
"DBL_COLON",
"DBL_AMP",
"DBL_PIPE",
"ARROW",
"SHIFT_LEFT",
] + list(keywords)
@@ -169,205 +217,399 @@ class Lexer:
"=",
"'",
".",
"?",
]
t_ignore = " \t\r?@\f"
t_NUMBER = r"[0-9][0-9XxA-Fa-f]*"
t_FLOAT_NUMBER = r"[-+]?[0-9]*\.[0-9]+([eE][-+]?[0-9]+)?"
#
# Regexes for use in tokens (taken from pycparser)
#
def t_NAME(self, t):
r"[A-Za-z_~][A-Za-z0-9_]*"
hex_prefix = "0[xX]"
hex_digits = "[0-9a-fA-F]+"
bin_prefix = "0[bB]"
bin_digits = "[01]+"
# integer constants (K&R2: A.2.5.1)
integer_suffix_opt = (
r"(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?"
)
decimal_constant = (
"(0" + integer_suffix_opt + ")|([1-9][0-9]*" + integer_suffix_opt + ")"
)
octal_constant = "0[0-7]*" + integer_suffix_opt
hex_constant = hex_prefix + hex_digits + integer_suffix_opt
bin_constant = bin_prefix + bin_digits + integer_suffix_opt
bad_octal_constant = "0[0-7]*[89]"
# character constants (K&R2: A.2.5.2)
# Note: a-zA-Z and '.-~^_!=&;,' are allowed as escape chars to support #line
# directives with Windows paths as filenames (..\..\dir\file)
# For the same reason, decimal_escape allows all digit sequences. We want to
# parse all correct code, even if it means to sometimes parse incorrect
# code.
#
# The original regexes were taken verbatim from the C syntax definition,
# and were later modified to avoid worst-case exponential running time.
#
# simple_escape = r"""([a-zA-Z._~!=&\^\-\\?'"])"""
# decimal_escape = r"""(\d+)"""
# hex_escape = r"""(x[0-9a-fA-F]+)"""
# bad_escape = r"""([\\][^a-zA-Z._~^!=&\^\-\\?'"x0-7])"""
#
# The following modifications were made to avoid the ambiguity that allowed backtracking:
# (https://github.com/eliben/pycparser/issues/61)
#
# - \x was removed from simple_escape, unless it was not followed by a hex digit, to avoid ambiguity with hex_escape.
# - hex_escape allows one or more hex characters, but requires that the next character(if any) is not hex
# - decimal_escape allows one or more decimal characters, but requires that the next character(if any) is not a decimal
# - bad_escape does not allow any decimals (8-9), to avoid conflicting with the permissive decimal_escape.
#
# Without this change, python's `re` module would recursively try parsing each ambiguous escape sequence in multiple ways.
# e.g. `\123` could be parsed as `\1`+`23`, `\12`+`3`, and `\123`.
simple_escape = r"""([a-wyzA-Z._~!=&\^\-\\?'"]|x(?![0-9a-fA-F]))"""
decimal_escape = r"""(\d+)(?!\d)"""
hex_escape = r"""(x[0-9a-fA-F]+)(?![0-9a-fA-F])"""
bad_escape = r"""([\\][^a-zA-Z._~^!=&\^\-\\?'"x0-9])"""
escape_sequence = (
r"""(\\(""" + simple_escape + "|" + decimal_escape + "|" + hex_escape + "))"
)
# This complicated regex with lookahead might be slow for strings, so because all of the valid escapes (including \x) allowed
# 0 or more non-escaped characters after the first character, simple_escape+decimal_escape+hex_escape got simplified to
escape_sequence_start_in_string = r"""(\\[0-9a-zA-Z._~!=&\^\-\\?'"])"""
cconst_char = r"""([^'\\\n]|""" + escape_sequence + ")"
char_const = "'" + cconst_char + "'"
wchar_const = "L" + char_const
u8char_const = "u8" + char_const
u16char_const = "u" + char_const
u32char_const = "U" + char_const
multicharacter_constant = "'" + cconst_char + "{2,4}'"
unmatched_quote = "('" + cconst_char + "*\\n)|('" + cconst_char + "*$)"
bad_char_const = (
r"""('"""
+ cconst_char
+ """[^'\n]+')|('')|('"""
+ bad_escape
+ r"""[^'\n]*')"""
)
# string literals (K&R2: A.2.6)
string_char = r"""([^"\\\n]|""" + escape_sequence_start_in_string + ")"
string_literal = '"' + string_char + '*"'
wstring_literal = "L" + string_literal
u8string_literal = "u8" + string_literal
u16string_literal = "u" + string_literal
u32string_literal = "U" + string_literal
bad_string_literal = '"' + string_char + "*" + bad_escape + string_char + '*"'
# floating constants (K&R2: A.2.5.3)
exponent_part = r"""([eE][-+]?[0-9]+)"""
fractional_constant = r"""([0-9]*\.[0-9]+)|([0-9]+\.)"""
floating_constant = (
"(((("
+ fractional_constant
+ ")"
+ exponent_part
+ "?)|([0-9]+"
+ exponent_part
+ "))[FfLl]?)"
)
binary_exponent_part = r"""([pP][+-]?[0-9]+)"""
hex_fractional_constant = (
"(((" + hex_digits + r""")?\.""" + hex_digits + ")|(" + hex_digits + r"""\.))"""
)
hex_floating_constant = (
"("
+ hex_prefix
+ "("
+ hex_digits
+ "|"
+ hex_fractional_constant
+ ")"
+ binary_exponent_part
+ "[FfLl]?)"
)
t_WHITESPACE = "[ \t]+"
t_ignore = "\r"
# The following floating and integer constants are defined as
# functions to impose a strict order (otherwise, decimal
# is placed before the others because its regex is longer,
# and this is bad)
#
@TOKEN(floating_constant)
def t_FLOAT_CONST(self, t: LexToken) -> LexToken:
return t
@TOKEN(hex_floating_constant)
def t_HEX_FLOAT_CONST(self, t: LexToken) -> LexToken:
return t
@TOKEN(hex_constant)
def t_INT_CONST_HEX(self, t: LexToken) -> LexToken:
return t
@TOKEN(bin_constant)
def t_INT_CONST_BIN(self, t: LexToken) -> LexToken:
return t
@TOKEN(bad_octal_constant)
def t_BAD_CONST_OCT(self, t: LexToken) -> None:
msg = "Invalid octal constant"
self._error(msg, t)
@TOKEN(octal_constant)
def t_INT_CONST_OCT(self, t: LexToken) -> LexToken:
return t
@TOKEN(decimal_constant)
def t_INT_CONST_DEC(self, t: LexToken) -> LexToken:
return t
# Must come before bad_char_const, to prevent it from
# catching valid char constants as invalid
#
@TOKEN(multicharacter_constant)
def t_INT_CONST_CHAR(self, t: LexToken) -> LexToken:
return t
@TOKEN(char_const)
def t_CHAR_CONST(self, t: LexToken) -> LexToken:
return t
@TOKEN(wchar_const)
def t_WCHAR_CONST(self, t: LexToken) -> LexToken:
return t
@TOKEN(u8char_const)
def t_U8CHAR_CONST(self, t: LexToken) -> LexToken:
return t
@TOKEN(u16char_const)
def t_U16CHAR_CONST(self, t: LexToken) -> LexToken:
return t
@TOKEN(u32char_const)
def t_U32CHAR_CONST(self, t: LexToken) -> LexToken:
return t
@TOKEN(unmatched_quote)
def t_UNMATCHED_QUOTE(self, t: LexToken) -> None:
msg = "Unmatched '"
self._error(msg, t)
@TOKEN(bad_char_const)
def t_BAD_CHAR_CONST(self, t: LexToken) -> None:
msg = "Invalid char constant %s" % t.value
self._error(msg, t)
@TOKEN(wstring_literal)
def t_WSTRING_LITERAL(self, t: LexToken) -> LexToken:
return t
@TOKEN(u8string_literal)
def t_U8STRING_LITERAL(self, t: LexToken) -> LexToken:
return t
@TOKEN(u16string_literal)
def t_U16STRING_LITERAL(self, t: LexToken) -> LexToken:
return t
@TOKEN(u32string_literal)
def t_U32STRING_LITERAL(self, t: LexToken) -> LexToken:
return t
# unmatched string literals are caught by the preprocessor
@TOKEN(bad_string_literal)
def t_BAD_STRING_LITERAL(self, t):
msg = "String contains invalid escape code"
self._error(msg, t)
@TOKEN(r"[A-Za-z_~][A-Za-z0-9_]*")
def t_NAME(self, t: LexToken) -> LexToken:
if t.value in self.keywords:
t.type = t.value
return t
def t_PRECOMP_MACRO(self, t):
r"\#.*"
@TOKEN(r"\#[\t ]*pragma")
def t_PRAGMA_DIRECTIVE(self, t: LexToken) -> LexToken:
return t
@TOKEN(r"\#[\t ]*include (.*)")
def t_INCLUDE_DIRECTIVE(self, t: LexToken) -> LexToken:
return t
@TOKEN(r"\#(.*)")
def t_PP_DIRECTIVE(self, t: LexToken):
# handle line macros
m = _line_re.match(t.value)
if m:
filename = m.group(2)
if filename not in self._filenames_set:
self.filenames.append(filename)
self._filenames_set.add(filename)
self.filename = filename
self.line_offset = 1 + self.lex.lineno - int(m.group(1))
self.filename = m.group(3)
self.line_offset = 1 + self.lex.lineno - int(m.group(2))
return None
# ignore C++23 warning directive
if t.value.startswith("#warning"):
return
if "define" in t.value:
msgtype = "#define"
else:
return t
def t_COMMENT_SINGLELINE(self, t):
r"\/\/.*\n?"
if t.value.startswith("///") or t.value.startswith("//!"):
self.comments.append(t.value.lstrip("\t ").rstrip("\n"))
t.lexer.lineno += t.value.count("\n")
return t
msgtype = "preprocessor"
self._error(
"cxxheaderparser does not support "
+ msgtype
+ " directives, please use a C++ preprocessor first",
t,
)
t_DIVIDE = r"/(?!/)"
t_CHAR_LITERAL = "'.'"
t_ELLIPSIS = r"\.\.\."
t_DBL_LBRACKET = r"\[\["
t_DBL_RBRACKET = r"\]\]"
t_DBL_COLON = r"::"
t_DBL_AMP = r"&&"
t_DBL_PIPE = r"\|\|"
t_ARROW = r"->"
t_SHIFT_LEFT = r"<<"
# SHIFT_RIGHT introduces ambiguity
# found at http://wordaligned.org/articles/string-literals-and-regular-expressions
# TODO: This does not work with the string "bla \" bla"
t_STRING_LITERAL = r'"([^"\\]|\\.)*"'
t_STRING_LITERAL = string_literal
# Found at http://ostermiller.org/findcomment.html
def t_COMMENT_MULTILINE(self, t):
r"/\*([^*]|[\r\n]|(\*+([^*/]|[\r\n])))*\*+/\n?"
if t.value.startswith("/**") or t.value.startswith("/*!"):
# not sure why, but get double new lines
v = t.value.replace("\n\n", "\n")
# strip prefixing whitespace
v = _multicomment_re.sub("\n*", v)
self.comments = v.splitlines()
@TOKEN(r"\/\/.*\n?")
def t_COMMENT_SINGLELINE(self, t: LexToken) -> LexToken:
t.lexer.lineno += t.value.count("\n")
return t
def t_NEWLINE(self, t):
r"\n+"
t.lexer.lineno += len(t.value)
del self.comments[:]
# Found at http://ostermiller.org/findcomment.html
@TOKEN(r"/\*([^*]|[\r\n]|(\*+([^*/]|[\r\n])))*\*+/\n?")
def t_COMMENT_MULTILINE(self, t: LexToken) -> LexToken:
t.lexer.lineno += t.value.count("\n")
return t
def t_error(self, v):
print("Lex error: ", v)
@TOKEN(r"\n+")
def t_NEWLINE(self, t: LexToken) -> LexToken:
t.lexer.lineno += len(t.value)
return t
def t_error(self, t: LexToken) -> None:
self._error(f"Illegal character {t.value!r}", t)
def _error(self, msg: str, tok: LexToken):
tok.location = self.current_location()
raise LexError(msg, tok)
_lexer = None
lex: lex.Lexer
def __new__(cls, *args, **kwargs):
def __new__(cls, *args, **kwargs) -> "PlyLexer":
# only build the lexer once
inst = super().__new__(cls)
if cls._lexer is None:
cls._lexer = lex.lex(module=inst)
inst.lex = cls._lexer.clone(inst)
inst.lex.begin("INITIAL")
return inst
def __init__(self, filename: typing.Optional[str] = None):
self.input = self.lex.input
self.input: typing.Callable[[str], None] = self.lex.input
self.token: typing.Callable[[], LexToken] = self.lex.token
# For tracking current file/line position
self.filename = filename
self.line_offset = 0
self.filenames = []
self._filenames_set = set()
def current_location(self) -> Location:
return Location(self.filename, self.lex.lineno - self.line_offset)
if self.filename:
self.filenames.append(filename)
self._filenames_set.add(filename)
# Doxygen comments
self.comments = []
class TokenStream:
"""
Provides access to a stream of tokens
"""
self.lookahead = typing.Deque[LexToken]()
tokbuf: typing.Deque[LexToken]
def _fill_tokbuf(self, tokbuf: typing.Deque[LexToken]) -> bool:
"""
Fills tokbuf with tokens from the next line. Return True if at least
one token was added to the buffer
"""
raise NotImplementedError
def current_location(self) -> Location:
if self.lookahead:
return self.lookahead[0].location
return Location(self.filename, self.lex.lineno - self.line_offset)
raise NotImplementedError
def get_doxygen(self) -> typing.Optional[str]:
"""
This should be called after the first element of something has
been consumed.
It will lookahead for comments that come after the item, if prior
comments don't exist.
This is called at the point that you want doxygen information
"""
raise NotImplementedError
# Assumption: This function is either called at the beginning of a
# statement or at the end of a statement
def get_doxygen_after(self) -> typing.Optional[str]:
"""
This is called to retrieve doxygen information after a statement
"""
raise NotImplementedError
if self.comments:
comments = self.comments
else:
comments = []
# only look for comments until a newline (including lookahead)
for tok in self.lookahead:
if tok.type == "NEWLINE":
return None
_discard_types = {
"NEWLINE",
"COMMENT_SINGLELINE",
"COMMENT_MULTILINE",
"WHITESPACE",
}
while True:
tok = self.lex.token()
comments.extend(self.comments)
if tok is None:
break
tok.location = Location(self.filename, tok.lineno - self.line_offset)
ttype = tok.type
if ttype == "NEWLINE":
self.lookahead.append(tok)
break
if ttype not in self._discard_types:
self.lookahead.append(tok)
if ttype == "NAME":
break
del self.comments[:]
comments = "\n".join(comments)
del self.comments[:]
if comments:
return comments
return None
_discard_types = {"NEWLINE", "COMMENT_SINGLELINE", "COMMENT_MULTILINE"}
_discard_types_except_newline = {
"COMMENT_SINGLELINE",
"COMMENT_MULTILINE",
"WHITESPACE",
}
def token(self) -> LexToken:
tok = None
while self.lookahead:
tok = self.lookahead.popleft()
tokbuf = self.tokbuf
while True:
while tokbuf:
tok = tokbuf.popleft()
if tok.type not in self._discard_types:
return tok
while True:
tok = self.lex.token()
if tok is None:
if not self._fill_tokbuf(tokbuf):
raise EOFError("unexpected end of file")
if tok.type not in self._discard_types:
tok.location = Location(self.filename, tok.lineno - self.line_offset)
break
return tok
def token_eof_ok(self) -> typing.Optional[LexToken]:
tok = None
while self.lookahead:
tok = self.lookahead.popleft()
if tok.type not in self._discard_types:
return tok
tokbuf = self.tokbuf
while True:
tok = self.lex.token()
if tok is None:
break
while tokbuf:
tok = tokbuf.popleft()
if tok.type not in self._discard_types:
tok.location = Location(self.filename, tok.lineno - self.line_offset)
break
return tok
if not self._fill_tokbuf(tokbuf):
return None
def token_newline_eof_ok(self) -> typing.Optional[LexToken]:
tokbuf = self.tokbuf
while True:
while tokbuf:
tok = tokbuf.popleft()
if tok.type not in self._discard_types_except_newline:
return tok
if not self._fill_tokbuf(tokbuf):
return None
def token_if(self, *types: str) -> typing.Optional[LexToken]:
tok = self.token_eof_ok()
if tok is None:
return None
if tok.type not in types:
# put it back on the left in case it was retrieved
# from the lookahead buffer
self.lookahead.appendleft(tok)
self.tokbuf.appendleft(tok)
return None
return tok
@@ -376,9 +618,7 @@ class Lexer:
if tok is None:
return None
if tok.type not in types:
# put it back on the left in case it was retrieved
# from the lookahead buffer
self.lookahead.appendleft(tok)
self.tokbuf.appendleft(tok)
return None
return tok
@@ -387,9 +627,7 @@ class Lexer:
if tok is None:
return None
if tok.value not in vals:
# put it back on the left in case it was retrieved
# from the lookahead buffer
self.lookahead.appendleft(tok)
self.tokbuf.appendleft(tok)
return None
return tok
@@ -398,9 +636,7 @@ class Lexer:
if tok is None:
return None
if tok.type in types:
# put it back on the left in case it was retrieved
# from the lookahead buffer
self.lookahead.appendleft(tok)
self.tokbuf.appendleft(tok)
return None
return tok
@@ -408,18 +644,219 @@ class Lexer:
tok = self.token_eof_ok()
if not tok:
return False
self.lookahead.appendleft(tok)
self.tokbuf.appendleft(tok)
return tok.type in types
def return_token(self, tok: LexToken) -> None:
self.lookahead.appendleft(tok)
self.tokbuf.appendleft(tok)
def return_tokens(self, toks: typing.Iterable[LexToken]) -> None:
self.lookahead.extendleft(reversed(toks))
def return_tokens(self, toks: typing.Sequence[LexToken]) -> None:
self.tokbuf.extendleft(reversed(toks))
if __name__ == "__main__":
class LexerTokenStream(TokenStream):
"""
Provides tokens from using PlyLexer on the given input text
"""
_user_defined_literal_start = {
"FLOAT_CONST",
"HEX_FLOAT_CONST",
"INT_CONST_HEX",
"INT_CONST_BIN",
"INT_CONST_OCT",
"INT_CONST_DEC",
"INT_CONST_CHAR",
"CHAR_CONST",
"WCHAR_CONST",
"U8CHAR_CONST",
"U16CHAR_CONST",
"U32CHAR_CONST",
# String literals
"STRING_LITERAL",
"WSTRING_LITERAL",
"U8STRING_LITERAL",
"U16STRING_LITERAL",
"U32STRING_LITERAL",
}
def __init__(self, filename: typing.Optional[str], content: str) -> None:
self._lex = PlyLexer(filename)
self._lex.input(content)
self.tokbuf = typing.Deque[LexToken]()
def _fill_tokbuf(self, tokbuf: typing.Deque[LexToken]) -> bool:
get_token = self._lex.token
tokbuf = self.tokbuf
tok = get_token()
if tok is None:
return False
udl_start = self._user_defined_literal_start
while True:
tok.location = self._lex.current_location()
tokbuf.append(tok)
if tok.type == "NEWLINE":
# detect/remove line continuations
if len(tokbuf) > 2 and tokbuf[-2].type == "\\":
tokbuf.pop()
tokbuf.pop()
else:
break
# detect/combine user defined literals
if tok.type in udl_start:
tok2 = get_token()
if tok2 is None:
break
if tok2.type != "NAME" or tok2.value[0] != "_":
tok = tok2
continue
tok.value = tok.value + tok2.value
tok.type = f"UD_{tok.type}"
tok = get_token()
if tok is None:
break
return True
def current_location(self) -> Location:
if self.tokbuf:
return self.tokbuf[0].location
return self._lex.current_location()
def get_doxygen(self) -> typing.Optional[str]:
tokbuf = self.tokbuf
# fill the token buffer if it's empty (which indicates a newline)
if not tokbuf and not self._fill_tokbuf(tokbuf):
return None
comments: typing.List[LexToken] = []
# retrieve any comments in the stream right before
# the first non-discard element
keep_going = True
while True:
while tokbuf:
tok = tokbuf.popleft()
if tok.type == "NEWLINE":
comments.clear()
elif tok.type == "WHITESPACE":
pass
elif tok.type in ("COMMENT_SINGLELINE", "COMMENT_MULTILINE"):
comments.append(tok)
else:
tokbuf.appendleft(tok)
keep_going = False
break
if not keep_going:
break
if not self._fill_tokbuf(tokbuf):
break
if comments:
return self._extract_comments(comments)
return None
def get_doxygen_after(self) -> typing.Optional[str]:
tokbuf = self.tokbuf
# if there's a newline directly after a statement, we're done
if not tokbuf:
return None
# retrieve comments after non-discard elements
comments: typing.List[LexToken] = []
new_tokbuf = typing.Deque[LexToken]()
# This is different: we only extract tokens here
while tokbuf:
tok = tokbuf.popleft()
if tok.type == "NEWLINE":
break
elif tok.type == "WHITESPACE":
new_tokbuf.append(tok)
elif tok.type in ("COMMENT_SINGLELINE", "COMMENT_MULTILINE"):
comments.append(tok)
else:
new_tokbuf.append(tok)
if comments:
break
new_tokbuf.extend(tokbuf)
self.tokbuf = new_tokbuf
if comments:
return self._extract_comments(comments)
return None
def _extract_comments(self, comments: typing.List[LexToken]):
# Now we have comments, need to extract the text from them
comment_lines: typing.List[str] = []
for c in comments:
text = c.value
if c.type == "COMMENT_SINGLELINE":
if text.startswith("///") or text.startswith("//!"):
comment_lines.append(text.rstrip("\n"))
else:
if text.startswith("/**") or text.startswith("/*!"):
# not sure why, but get double new lines
text = text.replace("\n\n", "\n")
# strip prefixing whitespace
text = _multicomment_re.sub("\n*", text)
comment_lines = text.splitlines()
comment_str = "\n".join(comment_lines)
if comment_str:
return comment_str
return None
class BoundedTokenStream(TokenStream):
"""
Provides tokens from a fixed list of tokens.
Intended for use when you have a group of tokens that you know
must be consumed, such as a paren grouping or some type of
lookahead case
"""
def __init__(self, toks: typing.List[LexToken]) -> None:
self.tokbuf = typing.Deque[LexToken](toks)
def has_tokens(self) -> bool:
return len(self.tokbuf) > 0
def _fill_tokbuf(self, tokbuf: typing.Deque[LexToken]) -> bool:
raise CxxParseError("no more tokens left in this group")
def current_location(self) -> Location:
if self.tokbuf:
return self.tokbuf[0].location
raise ValueError("internal error")
def get_doxygen(self) -> typing.Optional[str]:
# comment tokens aren't going to be in this stream
return None
def get_doxygen_after(self) -> typing.Optional[str]:
return None
if __name__ == "__main__": # pragma: no cover
try:
lex.runmain(lexer=Lexer(None))
lex.runmain(lexer=PlyLexer(None))
except EOFError:
pass

View File

@@ -1,4 +1,8 @@
from dataclasses import dataclass
from typing import Callable, Optional
#: arguments are (filename, content)
PreprocessorFunction = Callable[[str, Optional[str]], str]
@dataclass
@@ -12,3 +16,7 @@ class ParserOptions:
#: If true, converts a single void parameter to zero parameters
convert_void_to_zero_params: bool = True
#: A function that will preprocess the header before parsing. See
#: :py:mod:`cxxheaderparser.preprocessor` for available preprocessors
preprocessor: Optional[PreprocessorFunction] = None

File diff suppressed because it is too large Load Diff

View File

@@ -1,7 +1,7 @@
import typing
if typing.TYPE_CHECKING:
from .visitor import CxxVisitor
from .visitor import CxxVisitor # pragma: nocover
from .errors import CxxParseError
from .lexer import LexToken, Location
@@ -28,58 +28,72 @@ class ParsedTypeModifiers(typing.NamedTuple):
raise CxxParseError(f"{msg}: unexpected '{tok.value}'")
class State:
#: custom user data for this state type
T = typing.TypeVar("T")
#: type of custom user data for a parent state
PT = typing.TypeVar("PT")
class BaseState(typing.Generic[T, PT]):
#: Uninitialized user data available for use by visitor implementations. You
#: should set this in a ``*_start`` method.
user_data: T
#: parent state
parent: typing.Optional["State"]
def __init__(self, parent: typing.Optional["State"]) -> None:
#: Approximate location that the parsed element was found at
location: Location
#: internal detail used by parser
_prior_visitor: "CxxVisitor"
def __init__(self, parent: typing.Optional["State"], location: Location) -> None:
self.parent = parent
self.location = location
def _finish(self, visitor: "CxxVisitor") -> None:
pass
class BlockState(State):
#: Approximate location that the parsed element was found at
location: Location
class EmptyBlockState(BlockState):
def _finish(self, visitor: "CxxVisitor") -> None:
visitor.on_empty_block_end(self)
class ExternBlockState(BlockState):
class ExternBlockState(BaseState[T, PT]):
parent: "NonClassBlockState"
#: The linkage for this extern block
linkage: str
def __init__(self, parent: typing.Optional[State], linkage: str) -> None:
super().__init__(parent)
def __init__(
self, parent: "NonClassBlockState", location: Location, linkage: str
) -> None:
super().__init__(parent, location)
self.linkage = linkage
def _finish(self, visitor: "CxxVisitor"):
def _finish(self, visitor: "CxxVisitor") -> None:
visitor.on_extern_block_end(self)
class NamespaceBlockState(BlockState):
class NamespaceBlockState(BaseState[T, PT]):
parent: "NonClassBlockState"
#: The incremental namespace for this block
namespace: NamespaceDecl
def __init__(
self, parent: typing.Optional[State], namespace: NamespaceDecl
self,
parent: typing.Optional["NonClassBlockState"],
location: Location,
namespace: NamespaceDecl,
) -> None:
super().__init__(parent)
super().__init__(parent, location)
self.namespace = namespace
def _finish(self, visitor: "CxxVisitor") -> None:
visitor.on_namespace_end(self)
class ClassBlockState(BlockState):
class ClassBlockState(BaseState[T, PT]):
parent: "State"
#: class decl block being processed
class_decl: ClassDecl
@@ -95,13 +109,14 @@ class ClassBlockState(BlockState):
def __init__(
self,
parent: typing.Optional[State],
parent: typing.Optional["State"],
location: Location,
class_decl: ClassDecl,
access: str,
typedef: bool,
mods: ParsedTypeModifiers,
) -> None:
super().__init__(parent)
super().__init__(parent, location)
self.class_decl = class_decl
self.access = access
self.typedef = typedef
@@ -112,3 +127,9 @@ class ClassBlockState(BlockState):
def _finish(self, visitor: "CxxVisitor") -> None:
visitor.on_class_end(self)
State = typing.Union[
NamespaceBlockState[T, PT], ExternBlockState[T, PT], ClassBlockState[T, PT]
]
NonClassBlockState = typing.Union[ExternBlockState[T, PT], NamespaceBlockState[T, PT]]

View File

@@ -0,0 +1,344 @@
"""
Contains optional preprocessor support functions
"""
import io
import re
import os
import subprocess
import sys
import tempfile
import typing
from .options import PreprocessorFunction
class PreprocessorError(Exception):
pass
#
# GCC preprocessor support
#
def _gcc_filter(fname: str, fp: typing.TextIO) -> str:
new_output = io.StringIO()
keep = True
fname = fname.replace("\\", "\\\\")
for line in fp:
if line.startswith("# "):
last_quote = line.rfind('"')
if last_quote != -1:
keep = line[:last_quote].endswith(fname)
if keep:
new_output.write(line)
new_output.seek(0)
return new_output.read()
def make_gcc_preprocessor(
*,
defines: typing.List[str] = [],
include_paths: typing.List[str] = [],
retain_all_content: bool = False,
encoding: typing.Optional[str] = None,
gcc_args: typing.List[str] = ["g++"],
print_cmd: bool = True,
) -> PreprocessorFunction:
"""
Creates a preprocessor function that uses g++ to preprocess the input text.
gcc is a high performance and accurate precompiler, but if an #include
directive can't be resolved or other oddity exists in your input it will
throw an error.
:param defines: list of #define macros specified as "key value"
:param include_paths: list of directories to search for included files
:param retain_all_content: If False, only the parsed file content will be retained
:param encoding: If specified any include files are opened with this encoding
:param gcc_args: This is the path to G++ and any extra args you might want
:param print_cmd: Prints the gcc command as its executed
.. code-block:: python
pp = make_gcc_preprocessor()
options = ParserOptions(preprocessor=pp)
parse_file(content, options=options)
"""
if not encoding:
encoding = "utf-8"
def _preprocess_file(filename: str, content: typing.Optional[str]) -> str:
cmd = gcc_args + ["-w", "-E", "-C"]
for p in include_paths:
cmd.append(f"-I{p}")
for d in defines:
cmd.append(f"-D{d.replace(' ', '=')}")
kwargs = {"encoding": encoding}
if filename == "<str>":
cmd.append("-")
filename = "<stdin>"
if content is None:
raise PreprocessorError("no content specified for stdin")
kwargs["input"] = content
else:
cmd.append(filename)
if print_cmd:
print("+", " ".join(cmd), file=sys.stderr)
result: str = subprocess.check_output(cmd, **kwargs) # type: ignore
if not retain_all_content:
result = _gcc_filter(filename, io.StringIO(result))
return result
return _preprocess_file
#
# Microsoft Visual Studio preprocessor support
#
def _msvc_filter(fp: typing.TextIO) -> str:
# MSVC outputs the original file as the very first #line directive
# so we just use that
new_output = io.StringIO()
keep = True
first = fp.readline()
assert first.startswith("#line")
fname = first[first.find('"') :]
for line in fp:
if line.startswith("#line"):
keep = line.endswith(fname)
if keep:
new_output.write(line)
new_output.seek(0)
return new_output.read()
def make_msvc_preprocessor(
*,
defines: typing.List[str] = [],
include_paths: typing.List[str] = [],
retain_all_content: bool = False,
encoding: typing.Optional[str] = None,
msvc_args: typing.List[str] = ["cl.exe"],
print_cmd: bool = True,
) -> PreprocessorFunction:
"""
Creates a preprocessor function that uses cl.exe from Microsoft Visual Studio
to preprocess the input text. cl.exe is not typically on the path, so you
may need to open the correct developer tools shell or pass in the correct path
to cl.exe in the `msvc_args` parameter.
cl.exe will throw an error if a file referenced by an #include directive is not found.
:param defines: list of #define macros specified as "key value"
:param include_paths: list of directories to search for included files
:param retain_all_content: If False, only the parsed file content will be retained
:param encoding: If specified any include files are opened with this encoding
:param msvc_args: This is the path to cl.exe and any extra args you might want
:param print_cmd: Prints the command as its executed
.. code-block:: python
pp = make_msvc_preprocessor()
options = ParserOptions(preprocessor=pp)
parse_file(content, options=options)
"""
if not encoding:
encoding = "utf-8"
def _preprocess_file(filename: str, content: typing.Optional[str]) -> str:
cmd = msvc_args + ["/nologo", "/E", "/C"]
for p in include_paths:
cmd.append(f"/I{p}")
for d in defines:
cmd.append(f"/D{d.replace(' ', '=')}")
tfpname = None
try:
kwargs = {"encoding": encoding}
if filename == "<str>":
if content is None:
raise PreprocessorError("no content specified for stdin")
tfp = tempfile.NamedTemporaryFile(
mode="w", encoding=encoding, suffix=".h", delete=False
)
tfpname = tfp.name
tfp.write(content)
tfp.close()
cmd.append(tfpname)
else:
cmd.append(filename)
if print_cmd:
print("+", " ".join(cmd), file=sys.stderr)
result: str = subprocess.check_output(cmd, **kwargs) # type: ignore
if not retain_all_content:
result = _msvc_filter(io.StringIO(result))
finally:
if tfpname:
os.unlink(tfpname)
return result
return _preprocess_file
#
# PCPP preprocessor support (not installed by default)
#
try:
import pcpp
from pcpp import Preprocessor, OutputDirective, Action
class _CustomPreprocessor(Preprocessor):
def __init__(
self,
encoding: typing.Optional[str],
passthru_includes: typing.Optional["re.Pattern"],
):
Preprocessor.__init__(self)
self.errors: typing.List[str] = []
self.assume_encoding = encoding
self.passthru_includes = passthru_includes
def on_error(self, file, line, msg):
self.errors.append(f"{file}:{line} error: {msg}")
def on_include_not_found(self, *ignored):
raise OutputDirective(Action.IgnoreAndPassThrough)
def on_comment(self, *ignored):
return True
except ImportError:
pcpp = None
def _pcpp_filter(fname: str, fp: typing.TextIO) -> str:
# the output of pcpp includes the contents of all the included files, which
# isn't what a typical user of cxxheaderparser would want, so we strip out
# the line directives and any content that isn't in our original file
line_ending = f'{fname}"\n'
new_output = io.StringIO()
keep = True
for line in fp:
if line.startswith("#line"):
keep = line.endswith(line_ending)
if keep:
new_output.write(line)
new_output.seek(0)
return new_output.read()
def make_pcpp_preprocessor(
*,
defines: typing.List[str] = [],
include_paths: typing.List[str] = [],
retain_all_content: bool = False,
encoding: typing.Optional[str] = None,
passthru_includes: typing.Optional["re.Pattern"] = None,
) -> PreprocessorFunction:
"""
Creates a preprocessor function that uses pcpp (which must be installed
separately) to preprocess the input text.
If missing #include files are encountered, this preprocessor will ignore the
error. This preprocessor is pure python so it's very portable, and is a good
choice if performance isn't critical.
:param defines: list of #define macros specified as "key value"
:param include_paths: list of directories to search for included files
:param retain_all_content: If False, only the parsed file content will be retained
:param encoding: If specified any include files are opened with this encoding
:param passthru_includes: If specified any #include directives that match the
compiled regex pattern will be part of the output.
.. code-block:: python
pp = make_pcpp_preprocessor()
options = ParserOptions(preprocessor=pp)
parse_file(content, options=options)
"""
if pcpp is None:
raise PreprocessorError("pcpp is not installed")
def _preprocess_file(filename: str, content: typing.Optional[str]) -> str:
pp = _CustomPreprocessor(encoding, passthru_includes)
if include_paths:
for p in include_paths:
pp.add_path(p)
for define in defines:
pp.define(define)
if not retain_all_content:
pp.line_directive = "#line"
if content is None:
with open(filename, "r", encoding=encoding) as fp:
content = fp.read()
pp.parse(content, filename)
if pp.errors:
raise PreprocessorError("\n".join(pp.errors))
elif pp.return_code:
raise PreprocessorError("failed with exit code %d" % pp.return_code)
fp = io.StringIO()
pp.write(fp)
fp.seek(0)
if retain_all_content:
return fp.read()
else:
# pcpp emits the #line directive using the filename you pass in
# but will rewrite it if it's on the include path it uses. This
# is copied from pcpp:
abssource = os.path.abspath(filename)
for rewrite in pp.rewrite_paths:
temp = re.sub(rewrite[0], rewrite[1], abssource)
if temp != abssource:
filename = temp
if os.sep != "/":
filename = filename.replace(os.sep, "/")
break
return _pcpp_filter(filename, fp)
return _preprocess_file

0
cxxheaderparser/py.typed Normal file
View File

View File

@@ -7,8 +7,25 @@ your own parser listener, but you can accomplish most things with it.
cxxheaderparser's unit tests predominantly use the simple API for parsing,
so you can expect it to be pretty stable.
The :func:`parse_string` and :func:`parse_file` functions are a great place
to start:
.. code-block:: python
from cxxheaderparser.simple import parse_string
content = '''
int x;
'''
parsed_data = parse_string(content)
See below for the contents of the returned :class:`ParsedData`.
"""
import os
import sys
import inspect
import typing
@@ -17,21 +34,24 @@ from dataclasses import dataclass, field
from .types import (
ClassDecl,
Concept,
DeductionGuide,
EnumDecl,
Field,
ForwardDecl,
FriendDecl,
Function,
Method,
NamespaceAlias,
TemplateInst,
Typedef,
UsingAlias,
UsingDecl,
Variable,
Value,
)
from .parserstate import (
State,
EmptyBlockState,
ClassBlockState,
ExternBlockState,
NamespaceBlockState,
@@ -46,7 +66,11 @@ from .options import ParserOptions
@dataclass
class ClassScope:
"""
Contains all data collected for a single C++ class
"""
#: Information about the class declaration is here
class_decl: ClassDecl
#: Nested classes
@@ -64,35 +88,52 @@ class ClassScope:
@dataclass
class NamespaceScope:
"""
Contains all data collected for a single namespace. Content for child
namespaces are found in the ``namespaces`` attribute.
"""
name: str = ""
inline: bool = False
doxygen: typing.Optional[str] = None
classes: typing.List["ClassScope"] = field(default_factory=list)
enums: typing.List[EnumDecl] = field(default_factory=list)
functions: typing.List[Method] = field(default_factory=list)
#: Function declarations (with or without body)
functions: typing.List[Function] = field(default_factory=list)
#: Method implementations outside of a class (must have a body)
method_impls: typing.List[Method] = field(default_factory=list)
typedefs: typing.List[Typedef] = field(default_factory=list)
variables: typing.List[Variable] = field(default_factory=list)
forward_decls: typing.List[ForwardDecl] = field(default_factory=list)
using: typing.List[UsingDecl] = field(default_factory=list)
using_ns: typing.List[UsingDecl] = field(default_factory=list)
using_ns: typing.List["UsingNamespace"] = field(default_factory=list)
using_alias: typing.List[UsingAlias] = field(default_factory=list)
ns_alias: typing.List[NamespaceAlias] = field(default_factory=list)
#: Concepts
concepts: typing.List[Concept] = field(default_factory=list)
#: Explicit template instantiations
template_insts: typing.List[TemplateInst] = field(default_factory=list)
#: Child namespaces
namespaces: typing.Dict[str, "NamespaceScope"] = field(default_factory=dict)
#: Deduction guides
deduction_guides: typing.List[DeductionGuide] = field(default_factory=list)
Block = typing.Union[ClassScope, NamespaceScope]
@dataclass
class Define:
content: str
@dataclass
class Pragma:
content: str
content: Value
@dataclass
@@ -108,23 +149,9 @@ class UsingNamespace:
@dataclass
class ParsedData:
namespace: NamespaceScope = field(default_factory=lambda: NamespaceScope())
defines: typing.List[Define] = field(default_factory=list)
pragmas: typing.List[Pragma] = field(default_factory=list)
includes: typing.List[Include] = field(default_factory=list)
#
# Visitor implementation
#
class SimpleCxxVisitor:
"""
A simple visitor that stores all of the C++ elements passed to it
in an "easy" to use data structure
Container for information parsed by the :func:`parse_file` and
:func:`parse_string` functions.
.. warning:: Names are not resolved, so items are stored in the scope that
they are found. For example:
@@ -144,47 +171,60 @@ class SimpleCxxVisitor:
namespace instead of the 'N' namespace.
"""
#: Global namespace
namespace: NamespaceScope = field(default_factory=lambda: NamespaceScope())
#: Any ``#pragma`` directives encountered
pragmas: typing.List[Pragma] = field(default_factory=list)
#: Any ``#include`` directives encountered
includes: typing.List[Include] = field(default_factory=list)
#
# Visitor implementation
#
# define what user data we store in each state type
SClassBlockState = ClassBlockState[ClassScope, Block]
SExternBlockState = ExternBlockState[NamespaceScope, NamespaceScope]
SNamespaceBlockState = NamespaceBlockState[NamespaceScope, NamespaceScope]
SState = typing.Union[SClassBlockState, SExternBlockState, SNamespaceBlockState]
SNonClassBlockState = typing.Union[SExternBlockState, SNamespaceBlockState]
class SimpleCxxVisitor:
"""
A simple visitor that stores all of the C++ elements passed to it
in an "easy" to use data structure
You probably don't want to use this directly, use :func:`parse_file`
or :func:`parse_string` instead.
"""
data: ParsedData
namespace: NamespaceScope
block: Block
def __init__(self):
self.namespace = NamespaceScope("")
self.block = self.namespace
def on_parse_start(self, state: SNamespaceBlockState) -> None:
ns = NamespaceScope("")
self.data = ParsedData(ns)
state.user_data = ns
self.ns_stack = typing.Deque[NamespaceScope]()
self.block_stack = typing.Deque[Block]()
self.data = ParsedData(self.namespace)
def on_define(self, state: State, content: str) -> None:
self.data.defines.append(Define(content))
def on_pragma(self, state: State, content: str) -> None:
def on_pragma(self, state: SState, content: Value) -> None:
self.data.pragmas.append(Pragma(content))
def on_include(self, state: State, filename: str) -> None:
def on_include(self, state: SState, filename: str) -> None:
self.data.includes.append(Include(filename))
def on_empty_block_start(self, state: EmptyBlockState) -> None:
# this matters for some scope/resolving purposes, but you're
# probably going to want to use clang if you care about that
# level of detail
def on_extern_block_start(self, state: SExternBlockState) -> typing.Optional[bool]:
state.user_data = state.parent.user_data
return None
def on_extern_block_end(self, state: SExternBlockState) -> None:
pass
def on_empty_block_end(self, state: EmptyBlockState) -> None:
pass
def on_extern_block_start(self, state: ExternBlockState) -> None:
pass # TODO
def on_extern_block_end(self, state: ExternBlockState) -> None:
pass
def on_namespace_start(self, state: NamespaceBlockState) -> None:
parent_ns = self.namespace
self.block_stack.append(parent_ns)
self.ns_stack.append(parent_ns)
def on_namespace_start(self, state: SNamespaceBlockState) -> typing.Optional[bool]:
parent_ns = state.parent.user_data
ns = None
names = state.namespace.names
@@ -199,69 +239,98 @@ class SimpleCxxVisitor:
parent_ns.namespaces[name] = ns
parent_ns = ns
self.block = ns
self.namespace = ns
assert ns is not None
def on_namespace_end(self, state: NamespaceBlockState) -> None:
self.block = self.block_stack.pop()
self.namespace = self.ns_stack.pop()
# only set inline/doxygen on inner namespace
ns.inline = state.namespace.inline
ns.doxygen = state.namespace.doxygen
def on_forward_decl(self, state: State, fdecl: ForwardDecl) -> None:
self.block.forward_decls.append(fdecl)
state.user_data = ns
return None
def on_variable(self, state: State, v: Variable) -> None:
self.block.variables.append(v)
def on_namespace_end(self, state: SNamespaceBlockState) -> None:
pass
def on_function(self, state: State, fn: Function) -> None:
self.block.functions.append(fn)
def on_concept(self, state: SNonClassBlockState, concept: Concept) -> None:
state.user_data.concepts.append(concept)
def on_typedef(self, state: State, typedef: Typedef) -> None:
self.block.typedefs.append(typedef)
def on_namespace_alias(
self, state: SNonClassBlockState, alias: NamespaceAlias
) -> None:
state.user_data.ns_alias.append(alias)
def on_using_namespace(self, state: State, namespace: typing.List[str]) -> None:
def on_forward_decl(self, state: SState, fdecl: ForwardDecl) -> None:
state.user_data.forward_decls.append(fdecl)
def on_template_inst(self, state: SState, inst: TemplateInst) -> None:
assert isinstance(state.user_data, NamespaceScope)
state.user_data.template_insts.append(inst)
def on_variable(self, state: SState, v: Variable) -> None:
assert isinstance(state.user_data, NamespaceScope)
state.user_data.variables.append(v)
def on_function(self, state: SNonClassBlockState, fn: Function) -> None:
state.user_data.functions.append(fn)
def on_method_impl(self, state: SNonClassBlockState, method: Method) -> None:
state.user_data.method_impls.append(method)
def on_typedef(self, state: SState, typedef: Typedef) -> None:
state.user_data.typedefs.append(typedef)
def on_using_namespace(
self, state: SNonClassBlockState, namespace: typing.List[str]
) -> None:
ns = UsingNamespace("::".join(namespace))
self.block.using_ns.append(ns)
state.user_data.using_ns.append(ns)
def on_using_alias(self, state: State, using: UsingAlias):
self.block.using_alias.append(using)
def on_using_alias(self, state: SState, using: UsingAlias) -> None:
state.user_data.using_alias.append(using)
def on_using_declaration(self, state: State, using: UsingDecl) -> None:
self.block.using.append(using)
def on_using_declaration(self, state: SState, using: UsingDecl) -> None:
state.user_data.using.append(using)
#
# Enums
#
def on_enum(self, state: State, enum: EnumDecl) -> None:
self.block.enums.append(enum)
def on_enum(self, state: SState, enum: EnumDecl) -> None:
state.user_data.enums.append(enum)
#
# Class/union/struct
#
def on_class_start(self, state: ClassBlockState) -> None:
def on_class_start(self, state: SClassBlockState) -> typing.Optional[bool]:
parent = state.parent.user_data
block = ClassScope(state.class_decl)
self.block.classes.append(block)
self.block_stack.append(self.block)
self.block = block
parent.classes.append(block)
state.user_data = block
return None
def on_class_field(self, state: State, f: Field) -> None:
self.block.fields.append(f)
def on_class_field(self, state: SClassBlockState, f: Field) -> None:
state.user_data.fields.append(f)
def on_class_method(self, state: ClassBlockState, method: Method) -> None:
self.block.methods.append(method)
def on_class_method(self, state: SClassBlockState, method: Method) -> None:
state.user_data.methods.append(method)
def on_class_friend(self, state: ClassBlockState, friend: FriendDecl):
self.block.friends.append(friend)
def on_class_friend(self, state: SClassBlockState, friend: FriendDecl) -> None:
state.user_data.friends.append(friend)
def on_class_end(self, state: ClassBlockState) -> None:
self.block = self.block_stack.pop()
def on_class_end(self, state: SClassBlockState) -> None:
pass
def on_deduction_guide(
self, state: SNonClassBlockState, guide: DeductionGuide
) -> None:
state.user_data.deduction_guides.append(guide)
def parse_string(
content: str,
*,
filename="<str>",
filename: str = "<str>",
options: typing.Optional[ParserOptions] = None,
cleandoc: bool = False,
) -> ParsedData:
@@ -279,7 +348,7 @@ def parse_string(
def parse_file(
filename: str,
filename: typing.Union[str, os.PathLike],
encoding: typing.Optional[str] = None,
*,
options: typing.Optional[ParserOptions] = None,
@@ -287,8 +356,18 @@ def parse_file(
"""
Simple function to parse a header from a file and return a data structure
"""
filename = os.fsdecode(filename)
with open(filename, encoding=encoding) as fp:
content = fp.read()
if encoding is None:
encoding = "utf-8-sig"
return parse_string(content, filename=filename, options=options)
if filename == "-":
content = sys.stdin.read()
else:
content = None
visitor = SimpleCxxVisitor()
parser = CxxParser(filename, content, visitor, options)
parser.parse()
return visitor.data

View File

@@ -1,15 +1,28 @@
from dataclasses import dataclass, field
import typing
from .lexer import Lexer
from .types import Token
from .lexer import LexToken, PlyLexer, LexerTokenStream
# key: token type, value: (left spacing, right spacing)
_want_spacing = {
"NUMBER": (2, 2),
"FLOAT_NUMBER": (2, 2),
"FLOAT_CONST": (2, 2),
"HEX_FLOAT_CONST": (2, 2),
"INT_CONST_HEX": (2, 2),
"INT_CONST_BIN": (2, 2),
"INT_CONST_OCT": (2, 2),
"INT_CONST_DEC": (2, 2),
"INT_CONST_CHAR": (2, 2),
"NAME": (2, 2),
"CHAR_LITERAL": (2, 2),
"CHAR_CONST": (2, 2),
"WCHAR_CONST": (2, 2),
"U8CHAR_CONST": (2, 2),
"U16CHAR_CONST": (2, 2),
"U32CHAR_CONST": (2, 2),
"STRING_LITERAL": (2, 2),
"WSTRING_LITERAL": (2, 2),
"U8STRING_LITERAL": (2, 2),
"U16STRING_LITERAL": (2, 2),
"U32STRING_LITERAL": (2, 2),
"ELLIPSIS": (2, 2),
">": (0, 2),
")": (0, 1),
@@ -19,7 +32,28 @@ _want_spacing = {
"&": (0, 2),
}
_want_spacing.update(dict.fromkeys(Lexer.keywords, (2, 2)))
_want_spacing.update(dict.fromkeys(PlyLexer.keywords, (2, 2)))
@dataclass
class Token:
"""
In an ideal world, this Token class would not be exposed via the user
visible API. Unfortunately, getting to that point would take a significant
amount of effort.
It is not expected that these will change, but they might.
At the moment, the only supported use of Token objects are in conjunction
with the ``tokfmt`` function. As this library matures, we'll try to clarify
the expectations around these. File an issue on github if you have ideas!
"""
#: Raw value of the token
value: str
#: Lex type of the token
type: str = field(repr=False, compare=False, default="")
def tokfmt(toks: typing.List[Token]) -> str:
@@ -47,18 +81,18 @@ def tokfmt(toks: typing.List[Token]) -> str:
return "".join(vals)
if __name__ == "__main__":
if __name__ == "__main__": # pragma: no cover
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("header")
args = parser.parse_args()
lexer = Lexer(args.header)
with open(lexer.filename) as fp:
lexer.input(fp.read())
filename: str = args.header
with open(filename) as fp:
lexer = LexerTokenStream(filename, fp.read())
toks = []
toks: typing.List[Token] = []
while True:
tok = lexer.token_eof_ok()
if not tok:
@@ -68,7 +102,7 @@ if __name__ == "__main__":
print(tokfmt(toks))
toks = []
else:
toks.append(tok)
toks.append(Token(tok.value, tok.type))
print(toks)
print(tokfmt(toks))

View File

@@ -1,26 +1,7 @@
import typing
from dataclasses import dataclass, field
@dataclass
class Token:
"""
In an ideal world, this Token class would not be exposed via the user
visible API. Unfortunately, getting to that point would take a significant
amount of effort.
It is not expected that these will change, but they might.
At the moment, the only supported use of Token objects are in conjunction
with the ``tokfmt`` function. As this library matures, we'll try to clarify
the expectations around these. File an issue on github if you have ideas!
"""
#: Raw value of the token
value: str
#: Lex type of the token
type: str = field(repr=False, compare=False, default="")
from .tokfmt import tokfmt, Token
@dataclass
@@ -37,6 +18,29 @@ class Value:
#: Tokens corresponding to the value
tokens: typing.List[Token]
def format(self) -> str:
return tokfmt(self.tokens)
@dataclass
class NamespaceAlias:
"""
A namespace alias
.. code-block:: c++
namespace ANS = my::ns;
~~~ ~~~~~~
"""
alias: str
#: These are the names (split by ::) for the namespace that this alias
#: refers to, but does not include any parent namespace names. It may
#: include a leading "::", but does not include a following :: string.
names: typing.List[str]
@dataclass
class NamespaceDecl:
@@ -56,6 +60,9 @@ class NamespaceDecl:
names: typing.List[str]
inline: bool = False
#: Documentation if present
doxygen: typing.Optional[str] = None
@dataclass
class DecltypeSpecifier:
@@ -71,15 +78,25 @@ class DecltypeSpecifier:
#: Unparsed tokens within the decltype
tokens: typing.List[Token]
def format(self) -> str:
return f"decltype({tokfmt(self.tokens)})"
@dataclass
class FundamentalSpecifier:
"""
A specifier that only contains fundamental types
A specifier that only contains fundamental types.
Fundamental types include various combinations of the following: unsigned,
signed, short, int, long, float, double, char, bool, char16_t, char32_t,
nullptr_t, wchar_t, void
"""
name: str
def format(self) -> str:
return self.name
@dataclass
class NameSpecifier:
@@ -95,7 +112,13 @@ class NameSpecifier:
name: str
specialization: typing.Optional[typing.List["TemplateSpecialization"]] = None
specialization: typing.Optional["TemplateSpecialization"] = None
def format(self) -> str:
if self.specialization:
return f"{self.name}{self.specialization.format()}"
else:
return self.name
@dataclass
@@ -106,6 +129,9 @@ class AutoSpecifier:
name: str = "auto"
def format(self) -> str:
return self.name
@dataclass
class AnonymousName:
@@ -118,6 +144,10 @@ class AnonymousName:
#: Unique id associated with this name (only unique per parser instance!)
id: int
def format(self) -> str:
# TODO: not sure what makes sense here, subject to change
return f"<<id={self.id}>>"
PQNameSegment = typing.Union[
AnonymousName, FundamentalSpecifier, NameSpecifier, DecltypeSpecifier, AutoSpecifier
@@ -140,6 +170,273 @@ class PQName:
#: Set if the name starts with class/enum/struct
classkey: typing.Optional[str] = None
#: Set to true if the type was preceded with 'typename'
has_typename: bool = False
def format(self) -> str:
tn = "typename " if self.has_typename else ""
if self.classkey:
return f"{tn}{self.classkey} {'::'.join(seg.format() for seg in self.segments)}"
else:
return tn + "::".join(seg.format() for seg in self.segments)
@dataclass
class TemplateArgument:
"""
A single argument for a template specialization
.. code-block:: c++
Foo<int, Bar...>
~~~
"""
#: If this argument is a type, it is stored here as a DecoratedType,
#: otherwise it's stored as an unparsed set of values
arg: typing.Union["DecoratedType", "FunctionType", Value]
param_pack: bool = False
def format(self) -> str:
if self.param_pack:
return f"{self.arg.format()}..."
else:
return self.arg.format()
@dataclass
class TemplateSpecialization:
"""
Contains the arguments of a template specialization
.. code-block:: c++
Foo<int, Bar...>
~~~~~~~~~~~
"""
args: typing.List[TemplateArgument]
def format(self) -> str:
return f"<{', '.join(arg.format() for arg in self.args)}>"
@dataclass
class FunctionType:
"""
A function type, currently only used in a function pointer
.. note:: There can only be one of FunctionType or Type in a DecoratedType
chain
"""
return_type: "DecoratedType"
parameters: typing.List["Parameter"]
#: If a member function pointer
# TODO classname: typing.Optional[PQName]
#: Set to True if ends with ``...``
vararg: bool = False
#: True if function has a trailing return type (``auto foo() -> int``).
#: In this case, the 'auto' return type is removed and replaced with
#: whatever the trailing return type was
has_trailing_return: bool = False
noexcept: typing.Optional[Value] = None
#: Only set if an MSVC calling convention (__stdcall, etc) is explictly
#: specified.
#:
#: .. note:: If your code contains things like WINAPI, you will need to
#: use a preprocessor to transform it to the appropriate
#: calling convention
msvc_convention: typing.Optional[str] = None
def format(self) -> str:
vararg = "..." if self.vararg else ""
params = ", ".join(p.format() for p in self.parameters)
if self.has_trailing_return:
return f"auto ({params}{vararg}) -> {self.return_type.format()}"
else:
return f"{self.return_type.format()} ({params}{vararg})"
def format_decl(self, name: str) -> str:
"""Format as a named declaration"""
vararg = "..." if self.vararg else ""
params = ", ".join(p.format() for p in self.parameters)
if self.has_trailing_return:
return f"auto {name}({params}{vararg}) -> {self.return_type.format()}"
else:
return f"{self.return_type.format()} {name}({params}{vararg})"
@dataclass
class Type:
"""
A type with a name associated with it
"""
typename: PQName
const: bool = False
volatile: bool = False
def format(self) -> str:
c = "const " if self.const else ""
v = "volatile " if self.volatile else ""
return f"{c}{v}{self.typename.format()}"
def format_decl(self, name: str):
"""Format as a named declaration"""
c = "const " if self.const else ""
v = "volatile " if self.volatile else ""
return f"{c}{v}{self.typename.format()} {name}"
@dataclass
class Array:
"""
Information about an array. Multidimensional arrays are represented as
an array of array.
"""
#: The type that this is an array of
array_of: typing.Union["Array", "Pointer", "PointerToMember", Type]
#: Size of the array
#:
#: .. code-block:: c++
#:
#: int x[10];
#: ~~
size: typing.Optional[Value]
def format(self) -> str:
s = self.size.format() if self.size else ""
return f"{self.array_of.format()}[{s}]"
def format_decl(self, name: str) -> str:
s = self.size.format() if self.size else ""
return f"{self.array_of.format()} {name}[{s}]"
@dataclass
class Pointer:
"""
A pointer
"""
#: Thing that this points to
ptr_to: typing.Union[Array, FunctionType, "Pointer", "PointerToMember", Type]
const: bool = False
volatile: bool = False
def format(self) -> str:
c = " const" if self.const else ""
v = " volatile" if self.volatile else ""
ptr_to = self.ptr_to
if isinstance(ptr_to, (Array, FunctionType)):
return ptr_to.format_decl(f"(*{c}{v})")
else:
return f"{ptr_to.format()}*{c}{v}"
def format_decl(self, name: str):
"""Format as a named declaration"""
c = " const" if self.const else ""
v = " volatile" if self.volatile else ""
ptr_to = self.ptr_to
if isinstance(ptr_to, (Array, FunctionType)):
return ptr_to.format_decl(f"(*{c}{v} {name})")
else:
return f"{ptr_to.format()}*{c}{v} {name}"
@dataclass
class PointerToMember:
"""
Pointer to a class member. (``Class::* int``)
"""
#: Thing that this points to
base_type: Type
ptr_to: typing.Union[Array, FunctionType, "Pointer", "PointerToMember", Type]
const: bool = False
volatile: bool = False
def format(self) -> str:
c = " const" if self.const else ""
v = " volatile" if self.volatile else ""
ptr_to = self.ptr_to
if isinstance(ptr_to, (Array, FunctionType)):
return ptr_to.format_decl(f"({self.base_type.format()}::*{c}{v})")
else:
return f"{ptr_to.format()} {self.base_type.format()}::*{c}{v}"
def format_decl(self, name: str):
"""Format as a named declaration"""
c = " const" if self.const else ""
v = " volatile" if self.volatile else ""
ptr_to = self.ptr_to
if isinstance(ptr_to, (Array, FunctionType)):
return ptr_to.format_decl(f"({self.base_type.format()}::*{c}{v} {name})")
else:
return f"{ptr_to.format()} {self.base_type.format()}::*{c}{v} {name}"
@dataclass
class Reference:
"""
A lvalue (``&``) reference
"""
ref_to: typing.Union[Array, FunctionType, Pointer, PointerToMember, Type]
def format(self) -> str:
ref_to = self.ref_to
if isinstance(ref_to, Array):
return ref_to.format_decl("(&)")
else:
return f"{ref_to.format()}&"
def format_decl(self, name: str):
"""Format as a named declaration"""
ref_to = self.ref_to
if isinstance(ref_to, Array):
return ref_to.format_decl(f"(& {name})")
else:
return f"{ref_to.format()}& {name}"
@dataclass
class MoveReference:
"""
An rvalue (``&&``) reference
"""
moveref_to: typing.Union[Array, FunctionType, Pointer, PointerToMember, Type]
def format(self) -> str:
return f"{self.moveref_to.format()}&&"
def format_decl(self, name: str):
"""Format as a named declaration"""
return f"{self.moveref_to.format()}&& {name}"
#: A type or function type that is decorated with various things
#:
#: .. note:: There can only be one of FunctionType or Type in a DecoratedType
#: chain
DecoratedType = typing.Union[Array, Pointer, PointerToMember, MoveReference, Reference, Type]
@dataclass
class Enumerator:
@@ -176,142 +473,6 @@ class EnumDecl:
access: typing.Optional[str] = None
@dataclass
class TemplateArgument:
"""
A single argument for a template specialization
.. code-block:: c++
Foo<int, Bar...>
~~~
"""
#: This contains unparsed arbitrary expressions, including additional
#: specializations or decltypes or whatever
tokens: typing.List[Token]
@dataclass
class TemplateSpecialization:
"""
Contains the arguments of a template specialization
.. code-block:: c++s
Foo<int, Bar...>
~~~~~~~~~~~
"""
args: typing.List[TemplateArgument]
#: If True, indicates a parameter pack (...) on the last parameter
param_pack: bool = False
@dataclass
class FunctionType:
"""
A function type, currently only used in a function pointer
.. note:: There can only be one of FunctionType or Type in a DecoratedType
chain
"""
return_type: "DecoratedType"
parameters: typing.List["Parameter"]
#: If a member function pointer
# TODO classname: typing.Optional[PQName]
#: Set to True if ends with ``...``
vararg: bool = False
@dataclass
class Type:
""""""
typename: PQName
const: bool = False
volatile: bool = False
def get_type(self) -> "Type":
return self
@dataclass
class Array:
"""
Information about an array. Multidimensional arrays are represented as
an array of array.
"""
#: The type that this is an array of
array_of: typing.Union["Array", "Pointer", Type]
#: Size of the array
#:
#: .. code-block:: c++
#:
#: int x[10];
#: ~~
size: typing.Optional[Value]
def get_type(self) -> Type:
return self.array_of.get_type()
@dataclass
class Pointer:
"""
A pointer
"""
#: Thing that this points to
ptr_to: typing.Union[Array, FunctionType, "Pointer", Type]
const: bool = False
volatile: bool = False
def get_type(self) -> Type:
return self.ptr_to.get_type()
@dataclass
class Reference:
"""
A lvalue (``&``) reference
"""
ref_to: typing.Union[Array, Pointer, Type]
def get_type(self) -> Type:
return self.ref_to.get_type()
@dataclass
class MoveReference:
"""
An rvalue (``&&``) reference
"""
moveref_to: typing.Union[Array, Pointer, Type]
def get_type(self) -> Type:
return self.moveref_to.get_type()
#: A type or function type that is decorated with various things
#:
#: .. note:: There can only be one of FunctionType or Type in a DecoratedType
#: chain
DecoratedType = typing.Union[Array, Pointer, MoveReference, Reference, Type]
@dataclass
class TemplateNonTypeParam:
"""
@@ -326,12 +487,19 @@ class TemplateNonTypeParam:
template <auto T>
~~~~~~
// abbreviated template parameters are converted to this and param_idx is set
void fn(C auto p)
~~~~~~
"""
type: DecoratedType
name: typing.Optional[str] = None
default: typing.Optional[Value] = None
#: If this was promoted, the parameter index that this corresponds with
param_idx: typing.Optional[int] = None
#: Contains a ``...``
param_pack: bool = False
@@ -385,6 +553,74 @@ class TemplateDecl:
params: typing.List[TemplateParam] = field(default_factory=list)
# Currently don't interpret requires, if that changes in the future
# then this API will change.
#: template <typename T> requires ...
raw_requires_pre: typing.Optional[Value] = None
#: If no template, this is None. This is a TemplateDecl if this there is a single
#: declaration:
#:
#: .. code-block:: c++
#:
#: template <typename T>
#: struct C {};
#:
#: If there are multiple template declarations, then this is a list of
#: declarations in the order that they're encountered:
#:
#: .. code-block:: c++
#:
#: template<>
#: template<class U>
#: struct A<char>::C {};
#:
TemplateDeclTypeVar = typing.Union[None, TemplateDecl, typing.List[TemplateDecl]]
@dataclass
class TemplateInst:
"""
Explicit template instantiation
.. code-block:: c++
template class MyClass<1,2>;
extern template class MyClass<2,3>;
"""
typename: PQName
extern: bool
doxygen: typing.Optional[str] = None
@dataclass
class Concept:
"""
Preliminary support for consuming headers that contain concepts, but
not trying to actually make sense of them at this time. If this is
something you care about, pull requests are welcomed!
.. code-block:: c++
template <class T>
concept Meowable = is_meowable<T>;
template<typename T>
concept Addable = requires (T x) { x + x; };
"""
template: TemplateDecl
name: str
#: In the future this will be removed if we fully parse the expression
raw_constraint: Value
doxygen: typing.Optional[str] = None
@dataclass
class ForwardDecl:
@@ -393,7 +629,7 @@ class ForwardDecl:
"""
typename: PQName
template: typing.Optional[TemplateDecl] = None
template: TemplateDeclTypeVar = None
doxygen: typing.Optional[str] = None
#: Set if this is a forward declaration of an enum and it has a base
@@ -431,7 +667,7 @@ class ClassDecl:
typename: PQName
bases: typing.List[BaseClass] = field(default_factory=list)
template: typing.Optional[TemplateDecl] = None
template: TemplateDeclTypeVar = None
explicit: bool = False
final: bool = False
@@ -442,7 +678,7 @@ class ClassDecl:
access: typing.Optional[str] = None
@property
def classkey(self) -> str:
def classkey(self) -> typing.Optional[str]:
return self.typename.classkey
@@ -457,6 +693,15 @@ class Parameter:
default: typing.Optional[Value] = None
param_pack: bool = False
def format(self) -> str:
default = f" = {self.default.format()}" if self.default else ""
pp = "... " if self.param_pack else ""
name = self.name
if name:
return f"{self.type.format_decl(f'{pp}{name}')}{default}"
else:
return f"{self.type.format()}{pp}{default}"
@dataclass
class Function:
@@ -464,7 +709,9 @@ class Function:
A function declaration, potentially with the function body
"""
return_type: DecoratedType
#: Only constructors and destructors don't have a return type
return_type: typing.Optional[DecoratedType]
name: PQName
parameters: typing.List[Parameter]
@@ -481,11 +728,45 @@ class Function:
#: If true, the body of the function is present
has_body: bool = False
template: typing.Optional[TemplateDecl] = None
#: True if function has a trailing return type (``auto foo() -> int``).
#: In this case, the 'auto' return type is removed and replaced with
#: whatever the trailing return type was
has_trailing_return: bool = False
template: TemplateDeclTypeVar = None
#: Value of any throw specification for this function. The value omits the
#: outer parentheses.
throw: typing.Optional[Value] = None
#: Value of any noexcept specification for this function. The value omits
#: the outer parentheses.
noexcept: typing.Optional[Value] = None
#: Only set if an MSVC calling convention (__stdcall, etc) is explictly
#: specified.
#:
#: .. note:: If your code contains things like WINAPI, you will need to
#: use a preprocessor to transform it to the appropriate
#: calling convention
msvc_convention: typing.Optional[str] = None
#: The operator type (+, +=, etc).
#:
#: If this object is a Function, then this is a free operator function. If
#: this object is a Method, then it is an operator method.
#:
#: In the case of a conversion operator (such as 'operator bool'), this
#: is the string "conversion" and the full Type is found in return_type
operator: typing.Optional[str] = None
#: A requires constraint following the function declaration. If you need the
#: prior, look at TemplateDecl.raw_requires_pre. At the moment this is just
#: a raw value, if we interpret it in the future this will change.
#:
#: template <typename T> int main() requires ...
raw_requires: typing.Optional[Value] = None
@dataclass
class Method(Function):
@@ -493,10 +774,8 @@ class Method(Function):
A method declaration, potentially with the method body
"""
#: constructors and destructors don't have a return type
return_type: typing.Optional[DecoratedType]
access: str = ""
#: If parsed within a class, the access level for this method
access: typing.Optional[str] = None
const: bool = False
volatile: bool = False
@@ -523,11 +802,6 @@ class Method(Function):
override: bool = False
@dataclass
class Operator(Method):
operator: str = ""
@dataclass
class FriendDecl:
"""
@@ -551,13 +825,13 @@ class Typedef:
"""
#: The aliased type
#: The aliased type or function type
#:
#: .. code-block:: c++
#:
#: typedef type *pname;
#: ~~~~~~
type: DecoratedType
type: typing.Union[DecoratedType, FunctionType]
#: The alias introduced for the specified type
#:
@@ -611,6 +885,7 @@ class Field:
constexpr: bool = False
mutable: bool = False
static: bool = False
inline: bool = False
doxygen: typing.Optional[str] = None
@@ -628,6 +903,9 @@ class UsingDecl:
#: If within a class, the access level for this decl
access: typing.Optional[str] = None
#: Documentation if present
doxygen: typing.Optional[str] = None
@dataclass
class UsingAlias:
@@ -648,3 +926,24 @@ class UsingAlias:
#: If within a class, the access level for this decl
access: typing.Optional[str] = None
#: Documentation if present
doxygen: typing.Optional[str] = None
@dataclass
class DeductionGuide:
"""
.. code-block:: c++
template <class T>
MyClass(T) -> MyClass(int);
"""
#: Only constructors and destructors don't have a return type
result_type: typing.Optional[DecoratedType]
name: PQName
parameters: typing.List[Parameter]
doxygen: typing.Optional[str] = None

View File

@@ -2,29 +2,35 @@ import sys
import typing
if sys.version_info >= (3, 8):
Protocol = typing.Protocol
from typing import Protocol
else:
Protocol = object
Protocol = object # pragma: no cover
from .types import (
Concept,
DeductionGuide,
EnumDecl,
Field,
ForwardDecl,
FriendDecl,
Function,
Method,
NamespaceAlias,
TemplateInst,
Typedef,
UsingAlias,
UsingDecl,
Variable,
Value,
)
from .parserstate import (
State,
EmptyBlockState,
ClassBlockState,
ExternBlockState,
NamespaceBlockState,
NonClassBlockState,
)
@@ -33,14 +39,12 @@ class CxxVisitor(Protocol):
Defines the interface used by the parser to emit events
"""
def on_define(self, state: State, content: str) -> None:
def on_parse_start(self, state: NamespaceBlockState) -> None:
"""
.. warning:: cxxheaderparser intentionally does not have a C preprocessor
implementation. If you are parsing code with macros in it,
use a conforming preprocessor like ``pcpp``
Called when parsing begins
"""
def on_pragma(self, state: State, content: str) -> None:
def on_pragma(self, state: State, content: Value) -> None:
"""
Called once for each ``#pragma`` directive encountered
"""
@@ -50,22 +54,7 @@ class CxxVisitor(Protocol):
Called once for each ``#include`` directive encountered
"""
def on_empty_block_start(self, state: EmptyBlockState) -> None:
"""
Called when a ``{`` is encountered that isn't associated with or
consumed by other declarations.
.. code-block:: c++
{
// stuff
}
"""
def on_empty_block_end(self, state: EmptyBlockState) -> None:
...
def on_extern_block_start(self, state: ExternBlockState) -> None:
def on_extern_block_start(self, state: ExternBlockState) -> typing.Optional[bool]:
"""
.. code-block:: c++
@@ -73,14 +62,21 @@ class CxxVisitor(Protocol):
}
If this function returns False, the visitor will not be called for any
items inside this block (including on_extern_block_end)
"""
def on_extern_block_end(self, state: ExternBlockState) -> None:
...
"""
Called when an extern block ends
"""
def on_namespace_start(self, state: NamespaceBlockState) -> None:
def on_namespace_start(self, state: NamespaceBlockState) -> typing.Optional[bool]:
"""
Called when a ``namespace`` directive is encountered
If this function returns False, the visitor will not be called for any
items inside this namespace (including on_namespace_end)
"""
def on_namespace_end(self, state: NamespaceBlockState) -> None:
@@ -88,16 +84,56 @@ class CxxVisitor(Protocol):
Called at the end of a ``namespace`` block
"""
def on_namespace_alias(
self, state: NonClassBlockState, alias: NamespaceAlias
) -> None:
"""
Called when a ``namespace`` alias is encountered
"""
def on_concept(self, state: NonClassBlockState, concept: Concept) -> None:
"""
.. code-block:: c++
template <class T>
concept Meowable = is_meowable<T>;
"""
def on_forward_decl(self, state: State, fdecl: ForwardDecl) -> None:
"""
Called when a forward declaration is encountered
"""
def on_variable(self, state: State, v: Variable) -> None:
...
def on_template_inst(self, state: State, inst: TemplateInst) -> None:
"""
Called when an explicit template instantiation is encountered
"""
def on_function(self, state: State, fn: Function) -> None:
...
def on_variable(self, state: State, v: Variable) -> None:
"""
Called when a global variable is encountered
"""
def on_function(self, state: NonClassBlockState, fn: Function) -> None:
"""
Called when a function is encountered that isn't part of a class
"""
def on_method_impl(self, state: NonClassBlockState, method: Method) -> None:
"""
Called when a method implementation is encountered outside of a class
declaration. For example:
.. code-block:: c++
void MyClass::fn() {
// does something
}
.. note:: The above implementation is ambiguous, as it technically could
be a function in a namespace. We emit this instead as it's
more likely to be the case in common code.
"""
def on_typedef(self, state: State, typedef: Typedef) -> None:
"""
@@ -111,14 +147,16 @@ class CxxVisitor(Protocol):
once for ``*PT``
"""
def on_using_namespace(self, state: State, namespace: typing.List[str]) -> None:
def on_using_namespace(
self, state: NonClassBlockState, namespace: typing.List[str]
) -> None:
"""
.. code-block:: c++
using namespace std;
"""
def on_using_alias(self, state: State, using: UsingAlias):
def on_using_alias(self, state: State, using: UsingAlias) -> None:
"""
.. code-block:: c++
@@ -150,7 +188,7 @@ class CxxVisitor(Protocol):
# Class/union/struct
#
def on_class_start(self, state: ClassBlockState) -> None:
def on_class_start(self, state: ClassBlockState) -> typing.Optional[bool]:
"""
Called when a class/struct/union is encountered
@@ -163,6 +201,9 @@ class CxxVisitor(Protocol):
This is called first, followed by on_typedef for each typedef instance
encountered. The compound type object is passed as the type to the
typedef.
If this function returns False, the visitor will not be called for any
items inside this class (including on_class_end)
"""
def on_class_field(self, state: ClassBlockState, f: Field) -> None:
@@ -170,14 +211,14 @@ class CxxVisitor(Protocol):
Called when a field of a class is encountered
"""
def on_class_friend(self, state: ClassBlockState, friend: FriendDecl):
def on_class_friend(self, state: ClassBlockState, friend: FriendDecl) -> None:
"""
Called when a friend declaration is encountered
"""
def on_class_method(self, state: ClassBlockState, method: Method) -> None:
"""
Called when a method of a class is encountered
Called when a method of a class is encountered inside of a class
"""
def on_class_end(self, state: ClassBlockState) -> None:
@@ -195,3 +236,100 @@ class CxxVisitor(Protocol):
Then ``on_class_start``, .. ``on_class_end`` are emitted, along with
``on_variable`` for each instance declared.
"""
def on_deduction_guide(
self, state: NonClassBlockState, guide: DeductionGuide
) -> None:
"""
Called when a deduction guide is encountered
"""
class NullVisitor:
"""
This visitor does nothing
"""
def on_parse_start(self, state: NamespaceBlockState) -> None:
return None
def on_pragma(self, state: State, content: Value) -> None:
return None
def on_include(self, state: State, filename: str) -> None:
return None
def on_extern_block_start(self, state: ExternBlockState) -> typing.Optional[bool]:
return None
def on_extern_block_end(self, state: ExternBlockState) -> None:
return None
def on_namespace_start(self, state: NamespaceBlockState) -> typing.Optional[bool]:
return None
def on_namespace_end(self, state: NamespaceBlockState) -> None:
return None
def on_concept(self, state: NonClassBlockState, concept: Concept) -> None:
return None
def on_namespace_alias(
self, state: NonClassBlockState, alias: NamespaceAlias
) -> None:
return None
def on_forward_decl(self, state: State, fdecl: ForwardDecl) -> None:
return None
def on_template_inst(self, state: State, inst: TemplateInst) -> None:
return None
def on_variable(self, state: State, v: Variable) -> None:
return None
def on_function(self, state: NonClassBlockState, fn: Function) -> None:
return None
def on_method_impl(self, state: NonClassBlockState, method: Method) -> None:
return None
def on_typedef(self, state: State, typedef: Typedef) -> None:
return None
def on_using_namespace(
self, state: NonClassBlockState, namespace: typing.List[str]
) -> None:
return None
def on_using_alias(self, state: State, using: UsingAlias) -> None:
return None
def on_using_declaration(self, state: State, using: UsingDecl) -> None:
return None
def on_enum(self, state: State, enum: EnumDecl) -> None:
return None
def on_class_start(self, state: ClassBlockState) -> typing.Optional[bool]:
return None
def on_class_field(self, state: ClassBlockState, f: Field) -> None:
return None
def on_class_friend(self, state: ClassBlockState, friend: FriendDecl) -> None:
return None
def on_class_method(self, state: ClassBlockState, method: Method) -> None:
return None
def on_class_end(self, state: ClassBlockState) -> None:
return None
def on_deduction_guide(
self, state: NonClassBlockState, guide: DeductionGuide
) -> None:
return None
null_visitor = NullVisitor()

1
docs/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
/_build

20
docs/Makefile Normal file
View File

@@ -0,0 +1,20 @@
# Minimal makefile for Sphinx documentation
#
# You can set these variables from the command line, and also
# from the environment for the first two.
SPHINXOPTS ?=
SPHINXBUILD ?= sphinx-build
SOURCEDIR = .
BUILDDIR = _build
# Put it first so that "make" without argument is like "make help".
help:
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
.PHONY: help Makefile
# Catch-all target: route all unknown targets to Sphinx using the new
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
%: Makefile
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)

45
docs/conf.py Normal file
View File

@@ -0,0 +1,45 @@
# Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
import os
import pkg_resources
# -- Project information -----------------------------------------------------
project = "cxxheaderparser"
copyright = "2020-2023, Dustin Spicuzza"
author = "Dustin Spicuzza"
# The full version, including alpha/beta/rc tags
release = pkg_resources.get_distribution("cxxheaderparser").version
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ["sphinx.ext.autodoc", "sphinx_autodoc_typehints", "sphinx_rtd_theme"]
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = "sphinx_rtd_theme"
always_document_param_types = True

44
docs/custom.rst Normal file
View File

@@ -0,0 +1,44 @@
Custom parsing
==============
For many users, the data provided by the simple API is enough. In some advanced
cases you may find it necessary to use this more customizable parsing mechanism.
First, define a visitor that implements the :class:`CxxVisitor` protocol. Then
you can create an instance of it and pass it to the :class:`CxxParser`.
.. code-block:: python
visitor = MyVisitor()
parser = CxxParser(filename, content, visitor)
parser.parse()
# do something with the data collected by the visitor
Your visitor should do something with the data as the various callbacks are
called. See the :class:`SimpleCxxVisitor` for inspiration.
API
---
.. automodule:: cxxheaderparser.parser
:members:
:undoc-members:
.. automodule:: cxxheaderparser.visitor
:members:
:undoc-members:
Parser state
------------
.. automodule:: cxxheaderparser.parserstate
:members:
:undoc-members:
Preprocessor
------------
.. automodule:: cxxheaderparser.preprocessor
:members:
:undoc-members:

38
docs/index.rst Normal file
View File

@@ -0,0 +1,38 @@
.. cxxheaderparser documentation master file, created by
sphinx-quickstart on Thu Dec 31 00:46:02 2020.
You can adapt this file completely to your liking, but it should at least
contain the root `toctree` directive.
cxxheaderparser
===============
A pure python C++ header parser that parses C++ headers in a mildly naive
manner that allows it to handle many C++ constructs, including many modern
(C++11 and beyond) features.
.. warning:: cxxheaderparser intentionally does not use a C preprocessor by
default. If you are parsing code with macros in it, you need to
provide a preprocessor function in :py:class:`.ParserOptions`.
.. seealso:: :py:attr:`cxxheaderparser.options.ParserOptions.preprocessor`
.. _pcpp: https://github.com/ned14/pcpp
.. toctree::
:maxdepth: 2
:caption: Contents:
tools
simple
custom
types
Indices and tables
==================
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`

35
docs/make.bat Normal file
View File

@@ -0,0 +1,35 @@
@ECHO OFF
pushd %~dp0
REM Command file for Sphinx documentation
if "%SPHINXBUILD%" == "" (
set SPHINXBUILD=sphinx-build
)
set SOURCEDIR=.
set BUILDDIR=_build
if "%1" == "" goto help
%SPHINXBUILD% >NUL 2>NUL
if errorlevel 9009 (
echo.
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
echo.installed, then set the SPHINXBUILD environment variable to point
echo.to the full path of the 'sphinx-build' executable. Alternatively you
echo.may add the Sphinx directory to PATH.
echo.
echo.If you don't have Sphinx installed, grab it from
echo.http://sphinx-doc.org/
exit /b 1
)
%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
goto end
:help
%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
:end
popd

4
docs/requirements.txt Normal file
View File

@@ -0,0 +1,4 @@
sphinx >= 3.0
sphinx-rtd-theme
sphinx-autodoc-typehints
pcpp

12
docs/simple.rst Normal file
View File

@@ -0,0 +1,12 @@
.. _simple:
Simple API
==========
.. automodule:: cxxheaderparser.simple
:members:
:undoc-members:
.. automodule:: cxxheaderparser.options
:members:
:undoc-members:

41
docs/tools.rst Normal file
View File

@@ -0,0 +1,41 @@
Tools
=====
There are a variety of command line tools provided by the cxxheaderparser
project.
dump tool
---------
Dump data from a header to stdout
.. code-block:: sh
# pprint format
python -m cxxheaderparser myheader.h
# JSON format
python -m cxxheaderparser --mode=json myheader.h
# dataclasses repr format
python -m cxxheaderparser --mode=repr myheader.h
# dataclasses repr format (formatted with black)
python -m cxxheaderparser --mode=brepr myheader.h
Anything more than that and you should use the python API, start with the
:ref:`simple API <simple>` first.
test generator
--------------
To generate a unit test for cxxheaderparser:
* Put the C++ header content in a file
* Run the following:
.. code-block:: sh
python -m cxxheaderparser.gentest FILENAME.h TESTNAME
You can copy/paste the stdout to one of the test files in the tests directory.

16
docs/types.rst Normal file
View File

@@ -0,0 +1,16 @@
Types
=====
parser types
------------
.. automodule:: cxxheaderparser.types
:members:
:undoc-members:
exceptions
----------
.. automodule:: cxxheaderparser.errors
:members:
:undoc-members:

8
mypy.ini Normal file
View File

@@ -0,0 +1,8 @@
[mypy]
exclude = setup\.py|docs
[mypy-pcpp.*]
ignore_missing_imports = True
[mypy-cxxheaderparser._ply.*]
ignore_errors = True

View File

@@ -68,9 +68,11 @@ setup(
long_description=open("README.md").read(),
long_description_content_type="text/markdown",
install_requires=["dataclasses; python_version < '3.7'"],
extras_require={"pcpp": ["pcpp~=1.30"]},
license="BSD",
platforms="Platform Independent",
packages=find_packages(),
package_data={"cxxheaderparser": ["py.typed"]},
keywords="c++ header parser ply",
python_requires=">= 3.6",
classifiers=CLASSIFIERS,

2
tests/requirements.txt Normal file
View File

@@ -0,0 +1,2 @@
pytest
pcpp~=1.30

353
tests/test_abv_template.py Normal file
View File

@@ -0,0 +1,353 @@
# Note: testcases generated via `python -m cxxheaderparser.gentest`
#
# Tests various aspects of abbreviated function templates
#
from cxxheaderparser.simple import NamespaceScope, ParsedData, parse_string
from cxxheaderparser.types import (
AutoSpecifier,
Function,
FundamentalSpecifier,
NameSpecifier,
PQName,
Parameter,
Pointer,
Reference,
TemplateDecl,
TemplateNonTypeParam,
Type,
)
def test_abv_template_f1() -> None:
content = """
void f1(auto); // same as template<class T> void f1(T)
void f1p(auto p);
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
functions=[
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="void")])
),
name=PQName(segments=[NameSpecifier(name="f1")]),
parameters=[
Parameter(
type=Type(typename=PQName(segments=[AutoSpecifier()]))
)
],
template=TemplateDecl(
params=[
TemplateNonTypeParam(
type=Type(typename=PQName(segments=[AutoSpecifier()])),
param_idx=0,
)
]
),
),
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="void")])
),
name=PQName(segments=[NameSpecifier(name="f1p")]),
parameters=[
Parameter(
type=Type(typename=PQName(segments=[AutoSpecifier()])),
name="p",
)
],
template=TemplateDecl(
params=[
TemplateNonTypeParam(
type=Type(typename=PQName(segments=[AutoSpecifier()])),
param_idx=0,
)
]
),
),
]
)
)
def test_abv_template_f2() -> None:
content = """
void f2(C1 auto); // same as template<C1 T> void f2(T), if C1 is a concept
void f2p(C1 auto p);
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
functions=[
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="void")])
),
name=PQName(segments=[NameSpecifier(name="f2")]),
parameters=[
Parameter(
type=Type(typename=PQName(segments=[AutoSpecifier()]))
)
],
template=TemplateDecl(
params=[
TemplateNonTypeParam(
type=Type(
typename=PQName(segments=[NameSpecifier(name="C1")])
),
param_idx=0,
)
]
),
),
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="void")])
),
name=PQName(segments=[NameSpecifier(name="f2p")]),
parameters=[
Parameter(
type=Type(typename=PQName(segments=[AutoSpecifier()])),
name="p",
)
],
template=TemplateDecl(
params=[
TemplateNonTypeParam(
type=Type(
typename=PQName(segments=[NameSpecifier(name="C1")])
),
param_idx=0,
)
]
),
),
]
)
)
def test_abv_template_f3() -> None:
content = """
void f3(C2 auto...); // same as template<C2... Ts> void f3(Ts...), if C2 is a
// concept
void f3p(C2 auto p...);
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
functions=[
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="void")])
),
name=PQName(segments=[NameSpecifier(name="f3")]),
parameters=[
Parameter(
type=Type(typename=PQName(segments=[AutoSpecifier()])),
param_pack=True,
)
],
template=TemplateDecl(
params=[
TemplateNonTypeParam(
type=Type(
typename=PQName(segments=[NameSpecifier(name="C2")])
),
param_idx=0,
param_pack=True,
)
]
),
),
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="void")])
),
name=PQName(segments=[NameSpecifier(name="f3p")]),
parameters=[
Parameter(
type=Type(typename=PQName(segments=[AutoSpecifier()])),
name="p",
param_pack=True,
)
],
template=TemplateDecl(
params=[
TemplateNonTypeParam(
type=Type(
typename=PQName(segments=[NameSpecifier(name="C2")])
),
param_idx=0,
param_pack=True,
)
]
),
),
]
)
)
def test_abv_template_f4() -> None:
content = """
void f4(C2 auto, ...); // same as template<C2 T> void f4(T...), if C2 is a concept
void f4p(C2 auto p,...);
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
functions=[
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="void")])
),
name=PQName(segments=[NameSpecifier(name="f4")]),
parameters=[
Parameter(
type=Type(typename=PQName(segments=[AutoSpecifier()]))
)
],
vararg=True,
template=TemplateDecl(
params=[
TemplateNonTypeParam(
type=Type(
typename=PQName(segments=[NameSpecifier(name="C2")])
),
param_idx=0,
)
]
),
),
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="void")])
),
name=PQName(segments=[NameSpecifier(name="f4p")]),
parameters=[
Parameter(
type=Type(typename=PQName(segments=[AutoSpecifier()])),
name="p",
)
],
vararg=True,
template=TemplateDecl(
params=[
TemplateNonTypeParam(
type=Type(
typename=PQName(segments=[NameSpecifier(name="C2")])
),
param_idx=0,
)
]
),
),
]
)
)
def test_abv_template_f5() -> None:
content = """
void f5(const C3 auto *, C4 auto &); // same as template<C3 T, C4 U> void f5(const T*, U&);
void f5p(const C3 auto * p1, C4 auto &p2);
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
functions=[
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="void")])
),
name=PQName(segments=[NameSpecifier(name="f5")]),
parameters=[
Parameter(
type=Pointer(
ptr_to=Type(
typename=PQName(
segments=[AutoSpecifier()],
),
const=True,
)
)
),
Parameter(
type=Reference(
ref_to=Type(typename=PQName(segments=[AutoSpecifier()]))
)
),
],
template=TemplateDecl(
params=[
TemplateNonTypeParam(
type=Type(
typename=PQName(
segments=[NameSpecifier(name="C3")]
),
),
param_idx=0,
),
TemplateNonTypeParam(
type=Type(
typename=PQName(segments=[NameSpecifier(name="C4")])
),
param_idx=1,
),
]
),
),
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="void")])
),
name=PQName(segments=[NameSpecifier(name="f5p")]),
parameters=[
Parameter(
type=Pointer(
ptr_to=Type(
typename=PQName(
segments=[AutoSpecifier()],
),
const=True,
)
),
name="p1",
),
Parameter(
type=Reference(
ref_to=Type(typename=PQName(segments=[AutoSpecifier()]))
),
name="p2",
),
],
template=TemplateDecl(
params=[
TemplateNonTypeParam(
type=Type(
typename=PQName(
segments=[NameSpecifier(name="C3")]
),
),
param_idx=0,
),
TemplateNonTypeParam(
type=Type(
typename=PQName(segments=[NameSpecifier(name="C4")])
),
param_idx=1,
),
]
),
),
]
)
)

View File

@@ -5,11 +5,15 @@ from cxxheaderparser.types import (
EnumDecl,
Enumerator,
Field,
FriendDecl,
Function,
FundamentalSpecifier,
Method,
NameSpecifier,
PQName,
Pointer,
TemplateDecl,
TemplateTypeParam,
Token,
Type,
Typedef,
@@ -24,7 +28,7 @@ from cxxheaderparser.simple import (
)
def test_attributes_everywhere():
def test_attributes_everywhere() -> None:
# TODO: someday we'll actually support storing attributes,
# but for now just make sure they don't get in the way
@@ -120,7 +124,7 @@ def test_attributes_everywhere():
)
def test_attributes_gcc_enum_packed():
def test_attributes_gcc_enum_packed() -> None:
content = """
enum Wheat {
w1,
@@ -146,3 +150,84 @@ def test_attributes_gcc_enum_packed():
]
)
)
def test_friendly_declspec() -> None:
content = """
struct D {
friend __declspec(dllexport) void my_friend();
static __declspec(dllexport) void static_declspec();
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="D")], classkey="struct"
)
),
friends=[
FriendDecl(
fn=Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="my_friend")]),
parameters=[],
access="public",
)
)
],
methods=[
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(
segments=[NameSpecifier(name="static_declspec")]
),
parameters=[],
static=True,
access="public",
)
],
)
]
)
)
def test_declspec_template() -> None:
content = """
template <class T2>
__declspec(deprecated("message"))
static T2 fn() { return T2(); }
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
functions=[
Function(
return_type=Type(
typename=PQName(segments=[NameSpecifier(name="T2")])
),
name=PQName(segments=[NameSpecifier(name="fn")]),
parameters=[],
static=True,
has_body=True,
template=TemplateDecl(
params=[TemplateTypeParam(typekey="class", name="T2")]
),
)
]
)
)

File diff suppressed because it is too large Load Diff

View File

@@ -21,7 +21,7 @@ from cxxheaderparser.simple import (
)
def test_class_private_base():
def test_class_private_base() -> None:
content = """
namespace Citrus
{
@@ -108,7 +108,7 @@ def test_class_private_base():
)
def test_class_virtual_base():
def test_class_virtual_base() -> None:
content = """
class BaseMangoClass {};
class MangoClass : virtual public BaseMangoClass {};
@@ -148,7 +148,7 @@ def test_class_virtual_base():
)
def test_class_multiple_base_with_virtual():
def test_class_multiple_base_with_virtual() -> None:
content = """
class BlueJay : public Bird, public virtual Food {
public:
@@ -193,7 +193,7 @@ def test_class_multiple_base_with_virtual():
)
def test_class_base_specialized():
def test_class_base_specialized() -> None:
content = """
class Pea : public Vegetable<Green> {
int i;
@@ -220,7 +220,15 @@ def test_class_base_specialized():
specialization=TemplateSpecialization(
args=[
TemplateArgument(
tokens=[Token(value="Green")]
arg=Type(
typename=PQName(
segments=[
NameSpecifier(
name="Green"
)
]
)
)
)
]
),

878
tests/test_concepts.py Normal file
View File

@@ -0,0 +1,878 @@
from cxxheaderparser.simple import ClassScope, NamespaceScope, ParsedData, parse_string
from cxxheaderparser.tokfmt import Token
from cxxheaderparser.types import (
AutoSpecifier,
ClassDecl,
Concept,
Function,
FundamentalSpecifier,
Method,
MoveReference,
NameSpecifier,
PQName,
Parameter,
TemplateArgument,
TemplateDecl,
TemplateNonTypeParam,
TemplateSpecialization,
TemplateTypeParam,
Type,
Value,
Variable,
)
def test_concept_basic_constraint() -> None:
content = """
template <class T, class U>
concept Derived = std::is_base_of<U, T>::value;
template <Derived<Base> T> void f(T); // T is constrained by Derived<T, Base>
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
functions=[
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="void")])
),
name=PQName(segments=[NameSpecifier(name="f")]),
parameters=[
Parameter(
type=Type(
typename=PQName(segments=[NameSpecifier(name="T")])
)
)
],
template=TemplateDecl(
params=[
TemplateNonTypeParam(
type=Type(
typename=PQName(
segments=[
NameSpecifier(
name="Derived",
specialization=TemplateSpecialization(
args=[
TemplateArgument(
arg=Type(
typename=PQName(
segments=[
NameSpecifier(
name="Base"
)
]
)
)
)
]
),
)
]
)
),
name="T",
)
]
),
)
],
concepts=[
Concept(
template=TemplateDecl(
params=[
TemplateTypeParam(typekey="class", name="T"),
TemplateTypeParam(typekey="class", name="U"),
]
),
name="Derived",
raw_constraint=Value(
tokens=[
Token(value="std"),
Token(value="::"),
Token(value="is_base_of"),
Token(value="<"),
Token(value="U"),
Token(value=","),
Token(value="T"),
Token(value=">"),
Token(value="::"),
Token(value="value"),
]
),
)
],
)
)
def test_concept_basic_constraint2() -> None:
content = """
template <class T> constexpr bool is_meowable = true;
template <class T> constexpr bool is_cat = true;
template <class T>
concept Meowable = is_meowable<T>;
template <class T>
concept BadMeowableCat = is_meowable<T> && is_cat<T>;
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
variables=[
Variable(
name=PQName(segments=[NameSpecifier(name="is_meowable")]),
type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="bool")])
),
value=Value(tokens=[Token(value="true")]),
constexpr=True,
template=TemplateDecl(
params=[TemplateTypeParam(typekey="class", name="T")]
),
),
Variable(
name=PQName(segments=[NameSpecifier(name="is_cat")]),
type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="bool")])
),
value=Value(tokens=[Token(value="true")]),
constexpr=True,
template=TemplateDecl(
params=[TemplateTypeParam(typekey="class", name="T")]
),
),
],
concepts=[
Concept(
template=TemplateDecl(
params=[TemplateTypeParam(typekey="class", name="T")]
),
name="Meowable",
raw_constraint=Value(
tokens=[
Token(value="is_meowable"),
Token(value="<"),
Token(value="T"),
Token(value=">"),
]
),
),
Concept(
template=TemplateDecl(
params=[TemplateTypeParam(typekey="class", name="T")]
),
name="BadMeowableCat",
raw_constraint=Value(
tokens=[
Token(value="is_meowable"),
Token(value="<"),
Token(value="T"),
Token(value=">"),
Token(value="&&"),
Token(value="is_cat"),
Token(value="<"),
Token(value="T"),
Token(value=">"),
]
),
),
],
)
)
def test_concept_basic_requires() -> None:
content = """
template <typename T>
concept Hashable = requires(T a) {
{ std::hash<T>{}(a) } -> std::convertible_to<std::size_t>;
};
template <Hashable T> void f(T) {}
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
functions=[
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="void")])
),
name=PQName(segments=[NameSpecifier(name="f")]),
parameters=[
Parameter(
type=Type(
typename=PQName(segments=[NameSpecifier(name="T")])
)
)
],
has_body=True,
template=TemplateDecl(
params=[
TemplateNonTypeParam(
type=Type(
typename=PQName(
segments=[NameSpecifier(name="Hashable")]
)
),
name="T",
)
]
),
)
],
concepts=[
Concept(
template=TemplateDecl(
params=[TemplateTypeParam(typekey="typename", name="T")]
),
name="Hashable",
raw_constraint=Value(
tokens=[
Token(value="requires"),
Token(value="("),
Token(value="T"),
Token(value="a"),
Token(value=")"),
Token(value="{"),
Token(value="{"),
Token(value="std"),
Token(value="::"),
Token(value="hash"),
Token(value="<"),
Token(value="T"),
Token(value=">"),
Token(value="{"),
Token(value="}"),
Token(value="("),
Token(value="a"),
Token(value=")"),
Token(value="}"),
Token(value="->"),
Token(value="std"),
Token(value="::"),
Token(value="convertible_to"),
Token(value="<"),
Token(value="std"),
Token(value="::"),
Token(value="size_t"),
Token(value=">"),
Token(value=";"),
Token(value="}"),
]
),
)
],
)
)
def test_concept_nested_requirements() -> None:
content = """
template<class T>
concept Semiregular = DefaultConstructible<T> &&
CopyConstructible<T> && CopyAssignable<T> && Destructible<T> &&
requires(T a, std::size_t n)
{
requires Same<T*, decltype(&a)>; // nested: "Same<...> evaluates to true"
{ a.~T() } noexcept; // compound: "a.~T()" is a valid expression that doesn't throw
requires Same<T*, decltype(new T)>; // nested: "Same<...> evaluates to true"
requires Same<T*, decltype(new T[n])>; // nested
{ delete new T }; // compound
{ delete new T[n] }; // compound
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
concepts=[
Concept(
template=TemplateDecl(
params=[TemplateTypeParam(typekey="class", name="T")]
),
name="Semiregular",
raw_constraint=Value(
tokens=[
Token(value="DefaultConstructible"),
Token(value="<"),
Token(value="T"),
Token(value=">"),
Token(value="&&"),
Token(value="CopyConstructible"),
Token(value="<"),
Token(value="T"),
Token(value=">"),
Token(value="&&"),
Token(value="CopyAssignable"),
Token(value="<"),
Token(value="T"),
Token(value=">"),
Token(value="&&"),
Token(value="Destructible"),
Token(value="<"),
Token(value="T"),
Token(value=">"),
Token(value="&&"),
Token(value="requires"),
Token(value="("),
Token(value="T"),
Token(value="a"),
Token(value=","),
Token(value="std"),
Token(value="::"),
Token(value="size_t"),
Token(value="n"),
Token(value=")"),
Token(value="{"),
Token(value="requires"),
Token(value="Same"),
Token(value="<"),
Token(value="T"),
Token(value="*"),
Token(value=","),
Token(value="decltype"),
Token(value="("),
Token(value="&"),
Token(value="a"),
Token(value=")"),
Token(value=">"),
Token(value=";"),
Token(value="{"),
Token(value="a"),
Token(value="."),
Token(value="~T"),
Token(value="("),
Token(value=")"),
Token(value="}"),
Token(value="noexcept"),
Token(value=";"),
Token(value="requires"),
Token(value="Same"),
Token(value="<"),
Token(value="T"),
Token(value="*"),
Token(value=","),
Token(value="decltype"),
Token(value="("),
Token(value="new"),
Token(value="T"),
Token(value=")"),
Token(value=">"),
Token(value=";"),
Token(value="requires"),
Token(value="Same"),
Token(value="<"),
Token(value="T"),
Token(value="*"),
Token(value=","),
Token(value="decltype"),
Token(value="("),
Token(value="new"),
Token(value="T"),
Token(value="["),
Token(value="n"),
Token(value="]"),
Token(value=")"),
Token(value=">"),
Token(value=";"),
Token(value="{"),
Token(value="delete"),
Token(value="new"),
Token(value="T"),
Token(value="}"),
Token(value=";"),
Token(value="{"),
Token(value="delete"),
Token(value="new"),
Token(value="T"),
Token(value="["),
Token(value="n"),
Token(value="]"),
Token(value="}"),
Token(value=";"),
Token(value="}"),
]
),
)
]
)
)
def test_concept_requires_class() -> None:
content = """
// clang-format off
template <typename T>
concept Number = std::integral<T> || std::floating_point<T>;
template <typename T>
requires Number<T>
struct WrappedNumber {};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="WrappedNumber")],
classkey="struct",
),
template=TemplateDecl(
params=[TemplateTypeParam(typekey="typename", name="T")],
raw_requires_pre=Value(
tokens=[
Token(value="Number"),
Token(value="<"),
Token(value="T"),
Token(value=">"),
]
),
),
)
)
],
concepts=[
Concept(
template=TemplateDecl(
params=[TemplateTypeParam(typekey="typename", name="T")]
),
name="Number",
raw_constraint=Value(
tokens=[
Token(value="std"),
Token(value="::"),
Token(value="integral"),
Token(value="<"),
Token(value="T"),
Token(value=">"),
Token(value="||"),
Token(value="std"),
Token(value="::"),
Token(value="floating_point"),
Token(value="<"),
Token(value="T"),
Token(value=">"),
]
),
)
],
)
)
def test_requires_last_elem() -> None:
content = """
template<typename T>
void f(T&&) requires Eq<T>; // can appear as the last element of a function declarator
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
functions=[
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="void")])
),
name=PQName(segments=[NameSpecifier(name="f")]),
parameters=[
Parameter(
type=MoveReference(
moveref_to=Type(
typename=PQName(segments=[NameSpecifier(name="T")])
)
)
)
],
template=TemplateDecl(
params=[TemplateTypeParam(typekey="typename", name="T")]
),
raw_requires=Value(
tokens=[
Token(value="Eq"),
Token(value="<"),
Token(value="T"),
Token(value=">"),
]
),
)
]
)
)
def test_requires_first_elem1() -> None:
content = """
template<typename T> requires Addable<T> // or right after a template parameter list
T add(T a, T b) { return a + b; }
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
functions=[
Function(
return_type=Type(
typename=PQName(segments=[NameSpecifier(name="T")])
),
name=PQName(segments=[NameSpecifier(name="add")]),
parameters=[
Parameter(
type=Type(
typename=PQName(segments=[NameSpecifier(name="T")])
),
name="a",
),
Parameter(
type=Type(
typename=PQName(segments=[NameSpecifier(name="T")])
),
name="b",
),
],
has_body=True,
template=TemplateDecl(
params=[TemplateTypeParam(typekey="typename", name="T")],
raw_requires_pre=Value(
tokens=[
Token(value="Addable"),
Token(value="<"),
Token(value="T"),
Token(value=">"),
]
),
),
)
]
)
)
def test_requires_first_elem2() -> None:
content = """
template<typename T> requires std::is_arithmetic_v<T>
T add(T a, T b) { return a + b; }
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
functions=[
Function(
return_type=Type(
typename=PQName(segments=[NameSpecifier(name="T")])
),
name=PQName(segments=[NameSpecifier(name="add")]),
parameters=[
Parameter(
type=Type(
typename=PQName(segments=[NameSpecifier(name="T")])
),
name="a",
),
Parameter(
type=Type(
typename=PQName(segments=[NameSpecifier(name="T")])
),
name="b",
),
],
has_body=True,
template=TemplateDecl(
params=[TemplateTypeParam(typekey="typename", name="T")],
raw_requires_pre=Value(
tokens=[
Token(value="std"),
Token(value="is_arithmetic_v"),
Token(value="<"),
Token(value="T"),
Token(value=">"),
]
),
),
)
]
)
)
def test_requires_compound() -> None:
content = """
template<typename T> requires Addable<T> || Subtractable<T>
T add(T a, T b) { return a + b; }
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
functions=[
Function(
return_type=Type(
typename=PQName(segments=[NameSpecifier(name="T")])
),
name=PQName(segments=[NameSpecifier(name="add")]),
parameters=[
Parameter(
type=Type(
typename=PQName(segments=[NameSpecifier(name="T")])
),
name="a",
),
Parameter(
type=Type(
typename=PQName(segments=[NameSpecifier(name="T")])
),
name="b",
),
],
has_body=True,
template=TemplateDecl(
params=[TemplateTypeParam(typekey="typename", name="T")],
raw_requires_pre=Value(
tokens=[
Token(value="Addable"),
Token(value="<"),
Token(value="T"),
Token(value=">"),
Token(value="||"),
Token(value="Subtractable"),
Token(value="<"),
Token(value="T"),
Token(value=">"),
]
),
),
)
]
)
)
def test_requires_ad_hoc() -> None:
content = """
template<typename T>
requires requires (T x) { x + x; } // ad-hoc constraint, note keyword used twice
T add(T a, T b) { return a + b; }
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
functions=[
Function(
return_type=Type(
typename=PQName(segments=[NameSpecifier(name="T")])
),
name=PQName(segments=[NameSpecifier(name="add")]),
parameters=[
Parameter(
type=Type(
typename=PQName(segments=[NameSpecifier(name="T")])
),
name="a",
),
Parameter(
type=Type(
typename=PQName(segments=[NameSpecifier(name="T")])
),
name="b",
),
],
has_body=True,
template=TemplateDecl(
params=[TemplateTypeParam(typekey="typename", name="T")],
raw_requires_pre=Value(
tokens=[
Token(value="requires"),
Token(value="("),
Token(value="T"),
Token(value="x"),
Token(value=")"),
Token(value="{"),
Token(value="x"),
Token(value="+"),
Token(value="x"),
Token(value=";"),
Token(value="}"),
]
),
),
)
]
)
)
def test_requires_both() -> None:
content = """
// clang-format off
template<typename T>
requires Addable<T>
auto f1(T a, T b) requires Subtractable<T>;
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
functions=[
Function(
return_type=Type(typename=PQName(segments=[AutoSpecifier()])),
name=PQName(segments=[NameSpecifier(name="f1")]),
parameters=[
Parameter(
type=Type(
typename=PQName(segments=[NameSpecifier(name="T")])
),
name="a",
),
Parameter(
type=Type(
typename=PQName(segments=[NameSpecifier(name="T")])
),
name="b",
),
],
template=TemplateDecl(
params=[TemplateTypeParam(typekey="typename", name="T")],
raw_requires_pre=Value(
tokens=[
Token(value="Addable"),
Token(value="<"),
Token(value="T"),
Token(value=">"),
]
),
),
raw_requires=Value(
tokens=[
Token(value="Subtractable"),
Token(value="<"),
Token(value="T"),
Token(value=">"),
]
),
)
]
)
)
def test_requires_paren() -> None:
content = """
// clang-format off
template<class T>
void h(T) requires (is_purrable<T>());
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
functions=[
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="void")])
),
name=PQName(segments=[NameSpecifier(name="h")]),
parameters=[
Parameter(
type=Type(
typename=PQName(segments=[NameSpecifier(name="T")])
)
)
],
template=TemplateDecl(
params=[TemplateTypeParam(typekey="class", name="T")]
),
raw_requires=Value(
tokens=[
Token(value="("),
Token(value="is_purrable"),
Token(value="<"),
Token(value="T"),
Token(value=">"),
Token(value="("),
Token(value=")"),
Token(value=")"),
]
),
)
]
)
)
def test_non_template_requires() -> None:
content = """
// clang-format off
template <class T>
struct Payload
{
constexpr Payload(T v)
requires(std::is_pod_v<T>)
: Value(v)
{
}
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Payload")], classkey="struct"
),
template=TemplateDecl(
params=[TemplateTypeParam(typekey="class", name="T")]
),
),
methods=[
Method(
return_type=None,
name=PQName(segments=[NameSpecifier(name="Payload")]),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[NameSpecifier(name="T")]
)
),
name="v",
)
],
constexpr=True,
has_body=True,
raw_requires=Value(
tokens=[
Token(value="("),
Token(value="std"),
Token(value="::"),
Token(value="is_pod_v"),
Token(value="<"),
Token(value="T"),
Token(value=">"),
Token(value=")"),
]
),
access="public",
constructor=True,
)
],
)
]
)
)

View File

@@ -14,7 +14,6 @@ from cxxheaderparser.types import (
Method,
MoveReference,
NameSpecifier,
Operator,
PQName,
Parameter,
Pointer,
@@ -27,6 +26,7 @@ from cxxheaderparser.types import (
Type,
Typedef,
UsingDecl,
UsingAlias,
Value,
Variable,
)
@@ -37,91 +37,109 @@ from cxxheaderparser.simple import (
ParsedData,
)
r"""
class SampleClass: public BaseSampleClass
{
public:
enum Elephant
{
EL_ONE = 1,
EL_TWO = 2,
EL_NINE = 9,
EL_TEN,
def test_doxygen_class() -> None:
content = """
// clang-format off
/// cls comment
class
C {
/// member comment
void fn();
/// var above
int var_above;
int var_after; /// var after
};
"""
data = parse_string(content, cleandoc=True)
SampleClass();
/*!
* Method 1
*/
string meth1();
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="C")], classkey="class"
),
doxygen="/// cls comment",
),
fields=[
Field(
access="private",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="var_above",
doxygen="/// var above",
),
Field(
access="private",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="var_after",
doxygen="/// var after",
),
],
methods=[
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="fn")]),
parameters=[],
doxygen="/// member comment",
access="private",
)
],
)
]
)
)
///
/// Method 2 description
///
/// @param v1 Variable 1
///
int meth2(int v1);
/**
* Method 3 description
*
* \param v1 Variable 1 with a really long
* wrapping description
* \param v2 Variable 2
*/
void meth3(const string & v1, vector<string> & v2);
def test_doxygen_class_template() -> None:
content = """
// clang-format off
/**********************************
* Method 4 description
*
* @return Return value
*********************************/
unsigned int meth4();
private:
void * meth5(){return NULL;}
/// template comment
template <typename T>
class C2 {};
"""
data = parse_string(content, cleandoc=True)
/// prop1 description
string prop1;
//! prop5 description
int prop5;
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="C2")], classkey="class"
),
template=TemplateDecl(
params=[TemplateTypeParam(typekey="typename", name="T")]
),
doxygen="/// template comment",
)
)
]
)
)
bool prop6; /*!< prop6 description */
double prop7; //!< prop7 description
//!< with two lines
def test_doxygen_enum() -> None:
content = """
// clang-format off
/// prop8 description
int prop8;
};
namespace Alpha
{
class AlphaClass
{
public:
AlphaClass();
void alphaMethod();
string alphaString;
protected:
typedef enum
{
Z_A,
Z_B = 0x2B,
Z_C = 'j',
Z_D,
} Zebra;
};
namespace Omega
{
class OmegaClass
{
public:
OmegaClass();
string omegaString;
protected:
///
/// @brief Rino Numbers, not that that means anything
///
@@ -134,88 +152,338 @@ namespace Alpha
/// item four
RI_FOUR,
} Rino;
};
};
}
"""
data = parse_string(content, cleandoc=True)
"""
assert data == ParsedData(
namespace=NamespaceScope(
enums=[
EnumDecl(
typename=PQName(segments=[AnonymousName(id=1)], classkey="enum"),
values=[
Enumerator(name="RI_ZERO", doxygen="/// item zero"),
Enumerator(name="RI_ONE", doxygen="/** item one */"),
Enumerator(name="RI_TWO", doxygen="//!< item two"),
Enumerator(name="RI_THREE"),
Enumerator(name="RI_FOUR", doxygen="/// item four"),
],
doxygen="///\n/// @brief Rino Numbers, not that that means anything\n///",
)
],
typedefs=[
Typedef(
type=Type(
typename=PQName(segments=[AnonymousName(id=1)], classkey="enum")
),
name="Rino",
)
],
)
)
# def test_doxygen_messy():
# content = """
# // clang-format off
def test_doxygen_fn_3slash() -> None:
content = """
// clang-format off
# /// fn comment
# void
# fn();
/// fn comment
void
fn();
# /// var comment
# int
# v1 = 0;
"""
data = parse_string(content, cleandoc=True)
# int
# v2 = 0; /// var2 comment
assert data == ParsedData(
namespace=NamespaceScope(
functions=[
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="void")])
),
name=PQName(segments=[NameSpecifier(name="fn")]),
parameters=[],
doxygen="/// fn comment",
)
]
)
)
# /// cls comment
# class
# C {};
# /// template comment
# template <typename T>
# class
# C2 {};
# """
# data = parse_string(content, cleandoc=True)
def test_doxygen_fn_cstyle1() -> None:
content = """
/**
* fn comment
*/
void
fn();
"""
data = parse_string(content, cleandoc=True)
# assert data == ParsedData(
# namespace=NamespaceScope(
# classes=[
# ClassScope(
# class_decl=ClassDecl(
# typename=PQName(
# segments=[NameSpecifier(name="C")], classkey="class"
# ),
# doxygen="/// cls comment",
# )
# ),
# ClassScope(
# class_decl=ClassDecl(
# typename=PQName(
# segments=[NameSpecifier(name="C2")], classkey="class"
# ),
# template=TemplateDecl(
# params=[TemplateTypeParam(typekey="typename", name="T")]
# ),
# doxygen="/// template comment",
# )
# ),
# ],
# functions=[
# Function(
# return_type=Type(
# typename=PQName(segments=[FundamentalSpecifier(name="void")])
# ),
# name=PQName(segments=[NameSpecifier(name="fn")]),
# parameters=[],
# doxygen="/// fn comment",
# )
# ],
# variables=[
# Variable(
# name=PQName(segments=[NameSpecifier(name="v1")]),
# type=Type(
# typename=PQName(segments=[FundamentalSpecifier(name="int")])
# ),
# value=Value(tokens=[Token(value="0")]),
# doxygen="/// var comment",
# ),
# Variable(
# name=PQName(segments=[NameSpecifier(name="v2")]),
# type=Type(
# typename=PQName(segments=[FundamentalSpecifier(name="int")])
# ),
# value=Value(tokens=[Token(value="0")]),
# ),
# ],
# )
# )
assert data == ParsedData(
namespace=NamespaceScope(
functions=[
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="void")])
),
name=PQName(segments=[NameSpecifier(name="fn")]),
parameters=[],
doxygen="/**\n* fn comment\n*/",
)
]
)
)
def test_doxygen_fn_cstyle2() -> None:
content = """
/*!
* fn comment
*/
void
fn();
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
functions=[
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="void")])
),
name=PQName(segments=[NameSpecifier(name="fn")]),
parameters=[],
doxygen="/*!\n* fn comment\n*/",
)
]
)
)
def test_doxygen_var_above() -> None:
content = """
// clang-format off
/// var comment
int
v1 = 0;
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
variables=[
Variable(
name=PQName(segments=[NameSpecifier(name="v1")]),
type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="int")])
),
value=Value(tokens=[Token(value="0")]),
doxygen="/// var comment",
)
]
)
)
def test_doxygen_var_after() -> None:
content = """
// clang-format off
int
v2 = 0; /// var2 comment
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
variables=[
Variable(
name=PQName(segments=[NameSpecifier(name="v2")]),
type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="int")])
),
value=Value(tokens=[Token(value="0")]),
doxygen="/// var2 comment",
)
]
)
)
def test_doxygen_multiple_variables() -> None:
content = """
int x; /// this is x
int y; /// this is y
/// this is also y
int z; /// this is z
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
variables=[
Variable(
name=PQName(segments=[NameSpecifier(name="x")]),
type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="int")])
),
doxygen="/// this is x",
),
Variable(
name=PQName(segments=[NameSpecifier(name="y")]),
type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="int")])
),
doxygen="/// this is y\n/// this is also y",
),
Variable(
name=PQName(segments=[NameSpecifier(name="z")]),
type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="int")])
),
doxygen="/// this is z",
),
]
)
)
def test_doxygen_namespace() -> None:
content = """
/**
* x is a mysterious namespace
*/
namespace x {}
/**
* c is also a mysterious namespace
*/
namespace a::b::c {}
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
namespaces={
"x": NamespaceScope(
name="x", doxygen="/**\n* x is a mysterious namespace\n*/"
),
"a": NamespaceScope(
name="a",
namespaces={
"b": NamespaceScope(
name="b",
namespaces={
"c": NamespaceScope(
name="c",
doxygen="/**\n* c is also a mysterious namespace\n*/",
)
},
)
},
),
}
)
)
def test_doxygen_declspec() -> None:
content = """
/// declspec comment
__declspec(thread) int i = 1;
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
variables=[
Variable(
name=PQName(segments=[NameSpecifier(name="i")]),
type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="int")])
),
value=Value(tokens=[Token(value="1")]),
doxygen="/// declspec comment",
)
]
)
)
def test_doxygen_attribute() -> None:
content = """
/// hasattr comment
[[nodiscard]]
int hasattr();
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
functions=[
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="int")])
),
name=PQName(segments=[NameSpecifier(name="hasattr")]),
parameters=[],
doxygen="/// hasattr comment",
)
]
)
)
def test_doxygen_using_decl() -> None:
content = """
// clang-format off
/// Comment
using ns::ClassName;
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
using=[
UsingDecl(
typename=PQName(
segments=[
NameSpecifier(name="ns"),
NameSpecifier(name="ClassName"),
]
),
doxygen="/// Comment",
)
]
)
)
def test_doxygen_using_alias() -> None:
content = """
// clang-format off
/// Comment
using alias = sometype;
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
using_alias=[
UsingAlias(
alias="alias",
type=Type(
typename=PQName(segments=[NameSpecifier(name="sometype")])
),
doxygen="/// Comment",
)
]
)
)

View File

@@ -25,7 +25,7 @@ from cxxheaderparser.simple import (
)
def test_basic_enum():
def test_basic_enum() -> None:
content = """
enum Foo {
A,
@@ -48,7 +48,7 @@ def test_basic_enum():
)
def test_enum_w_expr():
def test_enum_w_expr() -> None:
content = """
enum Foo {
A = (1 / 2),
@@ -85,7 +85,7 @@ def test_enum_w_expr():
)
def test_enum_w_multiline_expr():
def test_enum_w_multiline_expr() -> None:
content = r"""
// clang-format off
enum Author
@@ -116,17 +116,14 @@ def test_enum_w_multiline_expr():
Token(value="<<"),
Token(value="24"),
Token(value="|"),
Token(value="\\"),
Token(value="'A'"),
Token(value="<<"),
Token(value="16"),
Token(value="|"),
Token(value="\\"),
Token(value="'S'"),
Token(value="<<"),
Token(value="8"),
Token(value="|"),
Token(value="\\"),
Token(value="'H'"),
Token(value=")"),
]
@@ -139,7 +136,7 @@ def test_enum_w_multiline_expr():
)
def test_basic_enum_class():
def test_basic_enum_class() -> None:
content = """
enum class BE { BEX };
"""
@@ -159,7 +156,7 @@ def test_basic_enum_class():
)
def test_basic_enum_struct():
def test_basic_enum_struct() -> None:
content = """
enum struct BE { BEX };
"""
@@ -179,7 +176,7 @@ def test_basic_enum_struct():
)
def test_enum_base():
def test_enum_base() -> None:
content = """
enum class E : int {};
"""
@@ -203,7 +200,7 @@ def test_enum_base():
# instances
def test_enum_instance_1():
def test_enum_instance_1() -> None:
content = """
enum class BE { BEX } be1;
"""
@@ -233,7 +230,7 @@ def test_enum_instance_1():
)
def test_enum_instance_2():
def test_enum_instance_2() -> None:
content = """
enum class BE { BEX } be1, *be2;
"""
@@ -277,7 +274,7 @@ def test_enum_instance_2():
# bases in namespaces
def test_enum_base_in_ns():
def test_enum_base_in_ns() -> None:
content = """
namespace EN {
typedef int EINT;
@@ -322,7 +319,7 @@ def test_enum_base_in_ns():
# forward declarations
def test_enum_fwd():
def test_enum_fwd() -> None:
content = """
enum class BE1;
enum class BE2 : EN::EINT;
@@ -350,7 +347,7 @@ def test_enum_fwd():
)
def test_enum_private_in_class():
def test_enum_private_in_class() -> None:
content = """
class C {
@@ -383,7 +380,7 @@ def test_enum_private_in_class():
)
def test_enum_public_in_class():
def test_enum_public_in_class() -> None:
content = """
class C {
@@ -417,7 +414,7 @@ def test_enum_public_in_class():
)
def test_default_enum():
def test_default_enum() -> None:
content = """
class A {
enum {
@@ -497,7 +494,7 @@ def test_default_enum():
)
def test_enum_template_vals():
def test_enum_template_vals() -> None:
content = """
enum {
IsRandomAccess = std::is_base_of<std::random_access_iterator_tag,
@@ -559,7 +556,7 @@ def test_enum_template_vals():
)
def test_enum_fn():
def test_enum_fn() -> None:
content = """
enum E {
VALUE,

View File

@@ -3,8 +3,14 @@
from cxxheaderparser.types import (
Array,
AutoSpecifier,
ClassDecl,
DecltypeSpecifier,
Field,
Function,
FunctionType,
FundamentalSpecifier,
Method,
MoveReference,
NameSpecifier,
PQName,
Parameter,
@@ -16,15 +22,18 @@ from cxxheaderparser.types import (
TemplateTypeParam,
Token,
Type,
Typedef,
Value,
)
from cxxheaderparser.simple import (
ClassScope,
NamespaceScope,
parse_string,
ParsedData,
)
def test_fn_returns_class():
def test_fn_returns_class() -> None:
content = """
class X *fn1();
struct Y fn2();
@@ -70,7 +79,62 @@ def test_fn_returns_class():
)
def test_fn_pointer_params():
def test_fn_returns_typename() -> None:
content = """
typename ns::X fn();
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
functions=[
Function(
return_type=Type(
typename=PQName(
segments=[
NameSpecifier(name="ns"),
NameSpecifier(name="X"),
],
has_typename=True,
)
),
name=PQName(segments=[NameSpecifier(name="fn")]),
parameters=[],
)
]
)
)
def test_fn_returns_typename_const() -> None:
content = """
const typename ns::X fn();
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
functions=[
Function(
return_type=Type(
typename=PQName(
segments=[
NameSpecifier(name="ns"),
NameSpecifier(name="X"),
],
has_typename=True,
),
const=True,
),
name=PQName(segments=[NameSpecifier(name="fn")]),
parameters=[],
)
]
)
)
def test_fn_pointer_params() -> None:
content = """
int fn1(int *);
int fn2(int *p);
@@ -139,7 +203,7 @@ def test_fn_pointer_params():
)
def test_fn_void_is_no_params():
def test_fn_void_is_no_params() -> None:
content = """
int fn(void);
"""
@@ -160,7 +224,7 @@ def test_fn_void_is_no_params():
)
def test_fn_array_param():
def test_fn_array_param() -> None:
content = """
void fn(int array[]);
"""
@@ -193,7 +257,59 @@ def test_fn_array_param():
)
def test_fn_weird_refs():
def test_fn_typename_param() -> None:
content = """
void MethodA(const mynamespace::SomeObject &x,
typename mynamespace::SomeObject * = 0);
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
functions=[
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="void")])
),
name=PQName(segments=[NameSpecifier(name="MethodA")]),
parameters=[
Parameter(
type=Reference(
ref_to=Type(
typename=PQName(
segments=[
NameSpecifier(name="mynamespace"),
NameSpecifier(name="SomeObject"),
]
),
const=True,
)
),
name="x",
),
Parameter(
type=Pointer(
ptr_to=Type(
typename=PQName(
segments=[
NameSpecifier(name="mynamespace"),
NameSpecifier(name="SomeObject"),
],
has_typename=True,
)
)
),
default=Value(tokens=[Token(value="0")]),
),
],
)
]
)
)
def test_fn_weird_refs() -> None:
content = """
int aref(int(&x));
void ptr_ref(int(*&name));
@@ -268,7 +384,7 @@ def test_fn_weird_refs():
)
def test_fn_too_many_parens():
def test_fn_too_many_parens() -> None:
content = """
int fn1(int (x));
void (fn2 (int (*const (name))));
@@ -325,7 +441,7 @@ void (__stdcall * fn)
"""
def test_fn_same_line():
def test_fn_same_line() -> None:
# multiple functions on the same line
content = """
void fn1(), fn2();
@@ -373,7 +489,7 @@ def test_fn_same_line():
)
def test_fn_auto_template():
def test_fn_auto_template() -> None:
content = """
template<class T, class U>
auto add(T t, U u) { return t + u; }
@@ -413,7 +529,7 @@ def test_fn_auto_template():
)
def test_fn_template_ptr():
def test_fn_template_ptr() -> None:
content = """
std::vector<Pointer *> *fn(std::vector<Pointer *> *ps);
"""
@@ -433,11 +549,18 @@ def test_fn_template_ptr():
specialization=TemplateSpecialization(
args=[
TemplateArgument(
tokens=[
Token(value="Pointer"),
Token(value="*"),
arg=Pointer(
ptr_to=Type(
typename=PQName(
segments=[
NameSpecifier(
name="Pointer"
)
]
)
)
)
)
]
),
),
@@ -458,11 +581,18 @@ def test_fn_template_ptr():
specialization=TemplateSpecialization(
args=[
TemplateArgument(
tokens=[
Token(value="Pointer"),
Token(value="*"),
arg=Pointer(
ptr_to=Type(
typename=PQName(
segments=[
NameSpecifier(
name="Pointer"
)
]
)
)
)
)
]
),
),
@@ -479,7 +609,7 @@ def test_fn_template_ptr():
)
def test_fn_with_impl():
def test_fn_with_impl() -> None:
content = """
// clang-format off
void termite(void)
@@ -504,3 +634,627 @@ def test_fn_with_impl():
]
)
)
def test_fn_return_std_function() -> None:
content = """
std::function<void(int)> fn();
"""
data1 = parse_string(content, cleandoc=True)
content = """
std::function<void((int))> fn();
"""
data2 = parse_string(content, cleandoc=True)
expected = ParsedData(
namespace=NamespaceScope(
functions=[
Function(
return_type=Type(
typename=PQName(
segments=[
NameSpecifier(name="std"),
NameSpecifier(
name="function",
specialization=TemplateSpecialization(
args=[
TemplateArgument(
arg=FunctionType(
return_type=Type(
typename=PQName(
segments=[
FundamentalSpecifier(
name="void"
)
]
)
),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[
FundamentalSpecifier(
name="int"
)
]
)
)
)
],
)
)
]
),
),
]
)
),
name=PQName(segments=[NameSpecifier(name="fn")]),
parameters=[],
)
]
)
)
assert data1 == expected
assert data2 == expected
def test_fn_return_std_function_trailing() -> None:
content = """
std::function<auto(int)->int> fn();
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
functions=[
Function(
return_type=Type(
typename=PQName(
segments=[
NameSpecifier(name="std"),
NameSpecifier(
name="function",
specialization=TemplateSpecialization(
args=[
TemplateArgument(
arg=FunctionType(
return_type=Type(
typename=PQName(
segments=[
FundamentalSpecifier(
name="int"
)
]
)
),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[
FundamentalSpecifier(
name="int"
)
]
)
)
)
],
has_trailing_return=True,
)
)
]
),
),
]
)
),
name=PQName(segments=[NameSpecifier(name="fn")]),
parameters=[],
)
]
)
)
def test_fn_trailing_return_simple() -> None:
content = """
auto fn() -> int;
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
functions=[
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="int")])
),
name=PQName(segments=[NameSpecifier(name="fn")]),
parameters=[],
has_trailing_return=True,
)
]
)
)
def test_fn_trailing_return_std_function() -> None:
content = """
auto fn() -> std::function<int()>;
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
functions=[
Function(
return_type=Type(
typename=PQName(
segments=[
NameSpecifier(name="std"),
NameSpecifier(
name="function",
specialization=TemplateSpecialization(
args=[
TemplateArgument(
arg=FunctionType(
return_type=Type(
typename=PQName(
segments=[
FundamentalSpecifier(
name="int"
)
]
)
),
parameters=[],
)
)
]
),
),
]
)
),
name=PQName(segments=[NameSpecifier(name="fn")]),
parameters=[],
has_trailing_return=True,
)
]
)
)
def test_inline_volatile_fn() -> None:
content = """
inline int Standard_Atomic_Increment (volatile int* theValue);
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
functions=[
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="int")])
),
name=PQName(
segments=[NameSpecifier(name="Standard_Atomic_Increment")]
),
parameters=[
Parameter(
type=Pointer(
ptr_to=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
),
volatile=True,
)
),
name="theValue",
)
],
inline=True,
)
]
)
)
def test_method_w_reference() -> None:
content = """
struct StreamBuffer
{
StreamBuffer &operator<<(std::ostream &(*fn)(std::ostream &))
{
return *this;
}
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="StreamBuffer")],
classkey="struct",
)
),
methods=[
Method(
return_type=Reference(
ref_to=Type(
typename=PQName(
segments=[NameSpecifier(name="StreamBuffer")]
)
)
),
name=PQName(segments=[NameSpecifier(name="operator<<")]),
parameters=[
Parameter(
type=Pointer(
ptr_to=FunctionType(
return_type=Reference(
ref_to=Type(
typename=PQName(
segments=[
NameSpecifier(name="std"),
NameSpecifier(
name="ostream"
),
]
)
)
),
parameters=[
Parameter(
type=Reference(
ref_to=Type(
typename=PQName(
segments=[
NameSpecifier(
name="std"
),
NameSpecifier(
name="ostream"
),
]
)
)
)
)
],
)
),
name="fn",
)
],
has_body=True,
access="public",
operator="<<",
)
],
)
]
)
)
def test_fn_w_mvreference() -> None:
content = """
void fn1(int && (*)(int));
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
functions=[
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="void")])
),
name=PQName(segments=[NameSpecifier(name="fn1")]),
parameters=[
Parameter(
type=Pointer(
ptr_to=FunctionType(
return_type=MoveReference(
moveref_to=Type(
typename=PQName(
segments=[
FundamentalSpecifier(name="int")
]
)
)
),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[
FundamentalSpecifier(name="int")
]
)
)
)
],
)
)
)
],
)
]
)
)
def test_msvc_conventions() -> None:
content = """
void __cdecl fn();
typedef const char* (__stdcall *wglGetExtensionsStringARB_t)(HDC theDeviceContext);
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
functions=[
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="void")])
),
name=PQName(segments=[NameSpecifier(name="fn")]),
parameters=[],
msvc_convention="__cdecl",
)
],
typedefs=[
Typedef(
type=Pointer(
ptr_to=FunctionType(
return_type=Pointer(
ptr_to=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="char")]
),
const=True,
)
),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[NameSpecifier(name="HDC")]
)
),
name="theDeviceContext",
)
],
msvc_convention="__stdcall",
)
),
name="wglGetExtensionsStringARB_t",
)
],
)
)
def test_throw_empty() -> None:
content = """
void foo() throw() { throw std::runtime_error("foo"); }
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
functions=[
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="void")])
),
name=PQName(segments=[NameSpecifier(name="foo")]),
parameters=[],
has_body=True,
throw=Value(tokens=[]),
)
]
)
)
def test_throw_dynamic() -> None:
content = """
void foo() throw(std::exception) { throw std::runtime_error("foo"); }
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
functions=[
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="void")])
),
name=PQName(segments=[NameSpecifier(name="foo")]),
parameters=[],
has_body=True,
throw=Value(
tokens=[
Token(value="std"),
Token(value="::"),
Token(value="exception"),
]
),
)
]
)
)
def test_noexcept_empty() -> None:
content = """
void foo() noexcept;
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
functions=[
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="void")])
),
name=PQName(segments=[NameSpecifier(name="foo")]),
parameters=[],
noexcept=Value(tokens=[]),
)
]
)
)
def test_noexcept_contents() -> None:
content = """
void foo() noexcept(false);
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
functions=[
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="void")])
),
name=PQName(segments=[NameSpecifier(name="foo")]),
parameters=[],
noexcept=Value(tokens=[Token(value="false")]),
)
]
)
)
def test_auto_decltype_return() -> None:
content = """
class C {
public:
int x;
auto GetSelected() -> decltype(x);
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="C")], classkey="class"
)
),
fields=[
Field(
access="public",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="x",
)
],
methods=[
Method(
return_type=Type(
typename=PQName(
segments=[
DecltypeSpecifier(tokens=[Token(value="x")])
]
)
),
name=PQName(segments=[NameSpecifier(name="GetSelected")]),
parameters=[],
has_trailing_return=True,
access="public",
)
],
)
]
)
)
def test_fn_trailing_return_with_body() -> None:
content = """
auto test() -> void
{
}
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
functions=[
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="void")])
),
name=PQName(segments=[NameSpecifier(name="test")]),
parameters=[],
has_body=True,
has_trailing_return=True,
)
]
)
)
def test_method_trailing_return_with_body() -> None:
content = """
struct X {
auto test() -> void
{
}
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="X")], classkey="struct"
)
),
methods=[
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="test")]),
parameters=[],
has_body=True,
has_trailing_return=True,
access="public",
)
],
)
]
)
)

View File

@@ -22,8 +22,9 @@ from cxxheaderparser.simple import (
ParsedData,
)
# friends
def test_various_friends():
def test_various_friends() -> None:
content = """
class FX {
public:
@@ -170,7 +171,7 @@ def test_various_friends():
)
def test_more_friends():
def test_more_friends() -> None:
content = """
template <typename T> struct X { static int x; };
@@ -285,7 +286,7 @@ def test_more_friends():
)
def test_friend_type_no_class():
def test_friend_type_no_class() -> None:
content = """
class DogClass;
class CatClass {
@@ -327,7 +328,7 @@ def test_friend_type_no_class():
)
def test_friend_with_impl():
def test_friend_with_impl() -> None:
content = """
// clang-format off
class Garlic {

View File

@@ -1,56 +1,49 @@
# Note: testcases generated via `python -m cxxheaderparser.gentest`
from cxxheaderparser.errors import CxxParseError
from cxxheaderparser.types import (
BaseClass,
ClassDecl,
Function,
FundamentalSpecifier,
NameSpecifier,
PQName,
Parameter,
Token,
Type,
Value,
Variable,
)
from cxxheaderparser.simple import (
ClassScope,
Include,
NamespaceScope,
Pragma,
parse_string,
ParsedData,
Define,
)
import pytest
#
# minimal preprocessor support
#
def test_define():
def test_includes() -> None:
content = """
#define simple
#define complex(thing) stuff(thing)
# define spaced
#include <global.h>
#include "local.h"
# include "space.h"
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
defines=[
Define(content="simple"),
Define(content="complex(thing) stuff(thing)"),
Define(content="spaced"),
],
includes=[Include("<global.h>"), Include('"local.h"'), Include('"space.h"')]
)
def test_includes():
content = """
#include <global.h>
#include "local.h"
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(includes=[Include("<global.h>"), Include('"local.h"')])
def test_pragma():
def test_pragma() -> None:
content = """
#pragma once
@@ -58,7 +51,62 @@ def test_pragma():
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(pragmas=[Pragma(content="once")])
assert data == ParsedData(
pragmas=[Pragma(content=Value(tokens=[Token(value="once")]))]
)
def test_pragma_more() -> None:
content = """
#pragma (some content here)
#pragma (even \
more \
content here)
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
pragmas=[
Pragma(
content=Value(
tokens=[
Token(value="("),
Token(value="some"),
Token(value="content"),
Token(value="here"),
Token(value=")"),
]
)
),
Pragma(
content=Value(
tokens=[
Token(value="("),
Token(value="even"),
Token(value="more"),
Token(value="content"),
Token(value="here"),
Token(value=")"),
]
)
),
]
)
def test_line_and_define() -> None:
content = """
// this should work + change line number of error
#line 40 "filename.h"
// this should fail
#define 1
"""
with pytest.raises(CxxParseError) as e:
parse_string(content, cleandoc=True)
assert "filename.h:41" in str(e.value)
#
@@ -66,7 +114,7 @@ def test_pragma():
#
def test_extern_c():
def test_extern_c() -> None:
content = """
extern "C" {
int x;
@@ -96,7 +144,7 @@ def test_extern_c():
)
def test_misc_extern_inline():
def test_misc_extern_inline() -> None:
content = """
extern "C++" {
inline HAL_Value HAL_GetSimValue(HAL_SimValueHandle handle) {
@@ -138,7 +186,7 @@ def test_misc_extern_inline():
#
def test_static_assert_1():
def test_static_assert_1() -> None:
# static_assert should be ignored
content = """
static_assert(x == 1);
@@ -148,7 +196,7 @@ def test_static_assert_1():
assert data == ParsedData()
def test_static_assert_2():
def test_static_assert_2() -> None:
# static_assert should be ignored
content = """
static_assert(sizeof(int) == 4,
@@ -160,7 +208,7 @@ def test_static_assert_2():
assert data == ParsedData()
def test_comment_eof():
def test_comment_eof() -> None:
content = """
namespace a {} // namespace a"""
data = parse_string(content, cleandoc=True)
@@ -168,3 +216,138 @@ def test_comment_eof():
assert data == ParsedData(
namespace=NamespaceScope(namespaces={"a": NamespaceScope(name="a")})
)
def test_final() -> None:
content = """
// ok here
int fn(const int final);
// ok here
int final = 2;
// but it's a keyword here
struct B final : A {};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="B")], classkey="struct"
),
bases=[
BaseClass(
access="public",
typename=PQName(segments=[NameSpecifier(name="A")]),
)
],
final=True,
)
)
],
functions=[
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="int")])
),
name=PQName(segments=[NameSpecifier(name="fn")]),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
),
const=True,
),
name="final",
)
],
)
],
variables=[
Variable(
name=PQName(segments=[NameSpecifier(name="final")]),
type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="int")])
),
value=Value(tokens=[Token(value="2")]),
)
],
)
)
#
# User defined literals
#
def test_user_defined_literal() -> None:
content = """
units::volt_t v = 1_V;
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
variables=[
Variable(
name=PQName(segments=[NameSpecifier(name="v")]),
type=Type(
typename=PQName(
segments=[
NameSpecifier(name="units"),
NameSpecifier(name="volt_t"),
]
)
),
value=Value(tokens=[Token(value="1_V")]),
)
]
)
)
#
# Line continuation
#
def test_line_continuation() -> None:
content = """
static int \
variable;
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
variables=[
Variable(
name=PQName(segments=[NameSpecifier(name="variable")]),
type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="int")])
),
static=True,
)
]
)
)
#
# #warning (C++23)
#
def test_warning_directive() -> None:
content = """
#warning "this is a warning"
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData()

View File

@@ -1,7 +1,10 @@
# Note: testcases generated via `python -m cxxheaderparser.gentest`
from cxxheaderparser.errors import CxxParseError
from cxxheaderparser.types import (
ForwardDecl,
FundamentalSpecifier,
NamespaceAlias,
NameSpecifier,
PQName,
Token,
@@ -15,8 +18,11 @@ from cxxheaderparser.simple import (
ParsedData,
)
import pytest
import re
def test_dups_in_different_ns():
def test_dups_in_different_ns() -> None:
content = """
namespace {
@@ -58,7 +64,7 @@ def test_dups_in_different_ns():
)
def test_correct_ns():
def test_correct_ns() -> None:
content = """
namespace a::b::c {
int i1;
@@ -119,3 +125,73 @@ def test_correct_ns():
}
)
)
def test_inline_namespace() -> None:
content = """
namespace Lib {
inline namespace Lib_1 {
class A;
}
}
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
namespaces={
"Lib": NamespaceScope(
name="Lib",
namespaces={
"Lib_1": NamespaceScope(
name="Lib_1",
inline=True,
forward_decls=[
ForwardDecl(
typename=PQName(
segments=[NameSpecifier(name="A")],
classkey="class",
)
)
],
)
},
)
}
)
)
def test_invalid_inline_namespace() -> None:
content = """
inline namespace a::b {}
"""
err = "<str>:1: parse error evaluating 'inline': a nested namespace definition cannot be inline"
with pytest.raises(CxxParseError, match=re.escape(err)):
parse_string(content, cleandoc=True)
def test_ns_alias() -> None:
content = """
namespace ANS = my::ns;
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
ns_alias=[NamespaceAlias(alias="ANS", names=["my", "ns"])]
)
)
def test_ns_alias_global() -> None:
content = """
namespace ANS = ::my::ns;
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
ns_alias=[NamespaceAlias(alias="ANS", names=["::", "my", "ns"])]
)
)

View File

@@ -2,9 +2,12 @@
from cxxheaderparser.types import (
ClassDecl,
Function,
FundamentalSpecifier,
Method,
MoveReference,
NameSpecifier,
Operator,
Pointer,
PQName,
Parameter,
Reference,
@@ -18,7 +21,7 @@ from cxxheaderparser.simple import (
)
def test_class_operators():
def test_class_operators() -> None:
content = r"""
class OperatorClass {
public:
@@ -76,7 +79,7 @@ def test_class_operators():
)
),
methods=[
Operator(
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
@@ -100,7 +103,7 @@ def test_class_operators():
access="public",
operator="=",
),
Operator(
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
@@ -124,7 +127,7 @@ def test_class_operators():
access="public",
operator="-=",
),
Operator(
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
@@ -135,7 +138,7 @@ def test_class_operators():
access="public",
operator="+=",
),
Operator(
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
@@ -146,7 +149,7 @@ def test_class_operators():
access="public",
operator="[]",
),
Operator(
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="bool")]
@@ -171,7 +174,7 @@ def test_class_operators():
access="public",
operator="==",
),
Operator(
Method(
return_type=Reference(
ref_to=Type(
typename=PQName(
@@ -184,7 +187,7 @@ def test_class_operators():
access="public",
operator="+",
),
Operator(
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
@@ -195,7 +198,7 @@ def test_class_operators():
access="public",
operator="-",
),
Operator(
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
@@ -206,7 +209,7 @@ def test_class_operators():
access="public",
operator="*",
),
Operator(
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
@@ -217,7 +220,7 @@ def test_class_operators():
access="public",
operator="\\",
),
Operator(
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
@@ -228,7 +231,7 @@ def test_class_operators():
access="public",
operator="%",
),
Operator(
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
@@ -239,7 +242,7 @@ def test_class_operators():
access="public",
operator="^",
),
Operator(
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
@@ -250,7 +253,7 @@ def test_class_operators():
access="public",
operator="|",
),
Operator(
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
@@ -261,7 +264,7 @@ def test_class_operators():
access="public",
operator="&",
),
Operator(
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
@@ -272,7 +275,7 @@ def test_class_operators():
access="public",
operator="~",
),
Operator(
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
@@ -283,7 +286,7 @@ def test_class_operators():
access="public",
operator="<<",
),
Operator(
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
@@ -294,7 +297,7 @@ def test_class_operators():
access="public",
operator=">>",
),
Operator(
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
@@ -305,7 +308,7 @@ def test_class_operators():
access="public",
operator="!=",
),
Operator(
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
@@ -316,7 +319,7 @@ def test_class_operators():
access="public",
operator="<",
),
Operator(
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
@@ -327,7 +330,7 @@ def test_class_operators():
access="public",
operator=">",
),
Operator(
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
@@ -338,7 +341,7 @@ def test_class_operators():
access="public",
operator=">=",
),
Operator(
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
@@ -349,7 +352,7 @@ def test_class_operators():
access="public",
operator="<=",
),
Operator(
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
@@ -360,7 +363,7 @@ def test_class_operators():
access="public",
operator="!",
),
Operator(
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
@@ -371,7 +374,7 @@ def test_class_operators():
access="public",
operator="&&",
),
Operator(
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
@@ -382,7 +385,7 @@ def test_class_operators():
access="public",
operator="||",
),
Operator(
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
@@ -393,7 +396,7 @@ def test_class_operators():
access="public",
operator="+=",
),
Operator(
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
@@ -404,7 +407,7 @@ def test_class_operators():
access="public",
operator="-=",
),
Operator(
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
@@ -415,7 +418,7 @@ def test_class_operators():
access="public",
operator="*=",
),
Operator(
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
@@ -426,7 +429,7 @@ def test_class_operators():
access="public",
operator="\\=",
),
Operator(
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
@@ -437,7 +440,7 @@ def test_class_operators():
access="public",
operator="%=",
),
Operator(
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
@@ -448,7 +451,7 @@ def test_class_operators():
access="public",
operator="&=",
),
Operator(
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
@@ -459,7 +462,7 @@ def test_class_operators():
access="public",
operator="|=",
),
Operator(
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
@@ -470,7 +473,7 @@ def test_class_operators():
access="public",
operator="^=",
),
Operator(
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
@@ -481,7 +484,7 @@ def test_class_operators():
access="public",
operator="<<=",
),
Operator(
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
@@ -492,7 +495,7 @@ def test_class_operators():
access="public",
operator=">>=",
),
Operator(
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
@@ -503,7 +506,7 @@ def test_class_operators():
access="public",
operator="++",
),
Operator(
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
@@ -514,7 +517,7 @@ def test_class_operators():
access="public",
operator="--",
),
Operator(
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
@@ -525,7 +528,7 @@ def test_class_operators():
access="public",
operator="()",
),
Operator(
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
@@ -536,7 +539,7 @@ def test_class_operators():
access="public",
operator="->",
),
Operator(
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
@@ -552,3 +555,194 @@ def test_class_operators():
]
)
)
def test_conversion_operators() -> None:
content = """
class Foo
{
public:
operator Type1() const { return SomeMethod(); }
explicit operator Type2() const;
virtual operator bool() const;
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Foo")], classkey="class"
)
),
methods=[
Method(
return_type=Type(
typename=PQName(segments=[NameSpecifier(name="Type1")])
),
name=PQName(segments=[NameSpecifier(name="operator")]),
parameters=[],
has_body=True,
access="public",
const=True,
operator="conversion",
),
Method(
return_type=Type(
typename=PQName(segments=[NameSpecifier(name="Type2")])
),
name=PQName(segments=[NameSpecifier(name="operator")]),
parameters=[],
access="public",
const=True,
explicit=True,
operator="conversion",
),
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="bool")]
)
),
name=PQName(segments=[NameSpecifier(name="operator")]),
parameters=[],
access="public",
const=True,
virtual=True,
operator="conversion",
),
],
)
]
)
)
def test_conversion_operators_decorated() -> None:
content = """
struct S {
operator const native_handle_t*() const;
operator const native_handle_t&() const;
operator const native_handle_t&&() const;
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="S")], classkey="struct"
)
),
methods=[
Method(
return_type=Pointer(
ptr_to=Type(
typename=PQName(
segments=[NameSpecifier(name="native_handle_t")]
),
const=True,
)
),
name=PQName(segments=[NameSpecifier(name="operator")]),
parameters=[],
access="public",
const=True,
operator="conversion",
),
Method(
return_type=Reference(
ref_to=Type(
typename=PQName(
segments=[NameSpecifier(name="native_handle_t")]
),
const=True,
)
),
name=PQName(segments=[NameSpecifier(name="operator")]),
parameters=[],
access="public",
const=True,
operator="conversion",
),
Method(
return_type=MoveReference(
moveref_to=Type(
typename=PQName(
segments=[NameSpecifier(name="native_handle_t")]
),
const=True,
)
),
name=PQName(segments=[NameSpecifier(name="operator")]),
parameters=[],
access="public",
const=True,
operator="conversion",
),
],
)
]
)
)
def test_free_operator() -> None:
content = """
std::ostream& operator<<(std::ostream& os, const MyDate& dt);
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
functions=[
Function(
return_type=Reference(
ref_to=Type(
typename=PQName(
segments=[
NameSpecifier(name="std"),
NameSpecifier(name="ostream"),
]
)
)
),
name=PQName(segments=[NameSpecifier(name="operator<<")]),
parameters=[
Parameter(
type=Reference(
ref_to=Type(
typename=PQName(
segments=[
NameSpecifier(name="std"),
NameSpecifier(name="ostream"),
]
)
)
),
name="os",
),
Parameter(
type=Reference(
ref_to=Type(
typename=PQName(
segments=[NameSpecifier(name="MyDate")]
),
const=True,
)
),
name="dt",
),
],
operator="<<",
)
]
)
)

View File

@@ -0,0 +1,188 @@
def test_pointer_to_member() -> None:
content = """
class Class
{
};
int Class::* intPtr;
int (Class::* intReturnFuncPtr)();
void (Class::* intParamFuncPtr)(int);
void (Class::* varargFuncPtr)(...);
template<typename... TArgs>
int takesFunc(void (*func)(TArgs...));
template<typename TObject, typename... TArgs>
int takesMemberFunc(TObject& object, void (TObject::* func)(TArgs...));
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Class")], classkey="class"
)
)
)
],
functions=[
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="int")])
),
name=PQName(segments=[NameSpecifier(name="takesFunc")]),
parameters=[
Parameter(
type=Pointer(
ptr_to=FunctionType(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[
NameSpecifier(name="TArgs")
]
)
),
param_pack=True,
)
],
)
),
name="func",
)
],
template=TemplateDecl(
params=[
TemplateTypeParam(
typekey="typename", name="TArgs", param_pack=True
)
]
),
),
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="int")])
),
name=PQName(segments=[NameSpecifier(name="takesMemberFunc")]),
parameters=[
Parameter(
type=Reference(
ref_to=Type(
typename=PQName(
segments=[NameSpecifier(name="TObject")]
)
)
),
name="object",
),
Parameter(
type=PointerToMember(
base_type=Type(typename=NameSpecifier(name="TObject")),
ptr_to=FunctionType(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[
NameSpecifier(name="TArgs")
]
)
),
param_pack=True,
)
],
),
),
name="func",
),
],
template=TemplateDecl(
params=[
TemplateTypeParam(typekey="typename", name="TObject"),
TemplateTypeParam(
typekey="typename", name="TArgs", param_pack=True
),
]
),
),
],
variables=[
Variable(
name=PQName(segments=[NameSpecifier(name="intPtr")]),
type=PointerToMember(
base_type=Type(typename=NameSpecifier(name="Class")),
ptr_to=Type(
typename=PQName(segments=[FundamentalSpecifier(name="int")])
),
),
),
Variable(
name=PQName(segments=[NameSpecifier(name="intReturnFuncPtr")]),
type=PointerToMember(
base_type=Type(typename=NameSpecifier(name="Class")),
ptr_to=FunctionType(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
parameters=[],
),
),
),
Variable(
name=PQName(segments=[NameSpecifier(name="intParamFuncPtr")]),
type=PointerToMember(
base_type=Type(typename=NameSpecifier(name="Class")),
ptr_to=FunctionType(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
)
)
],
),
),
),
Variable(
name=PQName(segments=[NameSpecifier(name="varargFuncPtr")]),
type=PointerToMember(
base_type=Type(typename=NameSpecifier(name="Class")),
ptr_to=FunctionType(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
parameters=[],
vararg=True,
),
),
),
],
)
)

204
tests/test_preprocessor.py Normal file
View File

@@ -0,0 +1,204 @@
import os
import pathlib
import pytest
import re
import shutil
import subprocess
import typing
from cxxheaderparser.options import ParserOptions, PreprocessorFunction
from cxxheaderparser import preprocessor
from cxxheaderparser.simple import (
NamespaceScope,
ParsedData,
parse_file,
parse_string,
Include,
)
from cxxheaderparser.types import (
FundamentalSpecifier,
NameSpecifier,
PQName,
Token,
Type,
Value,
Variable,
)
@pytest.fixture(params=["gcc", "msvc", "pcpp"])
def make_pp(request) -> typing.Callable[..., PreprocessorFunction]:
param = request.param
if param == "gcc":
gcc_path = shutil.which("g++")
if not gcc_path:
pytest.skip("g++ not found")
subprocess.run([gcc_path, "--version"])
return preprocessor.make_gcc_preprocessor
elif param == "msvc":
gcc_path = shutil.which("cl.exe")
if not gcc_path:
pytest.skip("cl.exe not found")
return preprocessor.make_msvc_preprocessor
elif param == "pcpp":
if preprocessor.pcpp is None:
pytest.skip("pcpp not installed")
return preprocessor.make_pcpp_preprocessor
else:
assert False
def test_basic_preprocessor(
make_pp: typing.Callable[..., PreprocessorFunction]
) -> None:
content = """
#define X 1
int x = X;
"""
options = ParserOptions(preprocessor=make_pp())
data = parse_string(content, cleandoc=True, options=options)
assert data == ParsedData(
namespace=NamespaceScope(
variables=[
Variable(
name=PQName(segments=[NameSpecifier(name="x")]),
type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="int")])
),
value=Value(tokens=[Token(value="1")]),
)
]
)
)
def test_preprocessor_omit_content(
make_pp: typing.Callable[..., PreprocessorFunction],
tmp_path: pathlib.Path,
) -> None:
"""Ensure that content in other headers is omitted"""
h_content = '#include "t2.h"' "\n" "int x = X;\n"
h2_content = "#define X 2\n" "int omitted = 1;\n"
with open(tmp_path / "t1.h", "w") as fp:
fp.write(h_content)
with open(tmp_path / "t2.h", "w") as fp:
fp.write(h2_content)
options = ParserOptions(preprocessor=make_pp())
data = parse_file(tmp_path / "t1.h", options=options)
assert data == ParsedData(
namespace=NamespaceScope(
variables=[
Variable(
name=PQName(segments=[NameSpecifier(name="x")]),
type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="int")])
),
value=Value(tokens=[Token(value="2")]),
)
]
)
)
def test_preprocessor_omit_content2(
make_pp: typing.Callable[..., PreprocessorFunction],
tmp_path: pathlib.Path,
) -> None:
"""
Ensure that content in other headers is omitted while handling pcpp
relative path quirk
"""
h_content = '#include "t2.h"' "\n" "int x = X;\n"
h2_content = "#define X 2\n" "int omitted = 1;\n"
tmp_path2 = tmp_path / "l1"
tmp_path2.mkdir()
with open(tmp_path2 / "t1.h", "w") as fp:
fp.write(h_content)
with open(tmp_path2 / "t2.h", "w") as fp:
fp.write(h2_content)
options = ParserOptions(preprocessor=make_pp(include_paths=[str(tmp_path)]))
# Weirdness happens here
os.chdir(tmp_path)
data = parse_file(tmp_path2 / "t1.h", options=options)
assert data == ParsedData(
namespace=NamespaceScope(
variables=[
Variable(
name=PQName(segments=[NameSpecifier(name="x")]),
type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="int")])
),
value=Value(tokens=[Token(value="2")]),
)
]
)
)
def test_preprocessor_encoding(
make_pp: typing.Callable[..., PreprocessorFunction], tmp_path: pathlib.Path
) -> None:
"""Ensure we can handle alternate encodings"""
h_content = b"// \xa9 2023 someone\n" b'#include "t2.h"' b"\n" b"int x = X;\n"
h2_content = b"// \xa9 2023 someone\n" b"#define X 3\n" b"int omitted = 1;\n"
with open(tmp_path / "t1.h", "wb") as fp:
fp.write(h_content)
with open(tmp_path / "t2.h", "wb") as fp:
fp.write(h2_content)
options = ParserOptions(preprocessor=make_pp(encoding="cp1252"))
data = parse_file(tmp_path / "t1.h", options=options, encoding="cp1252")
assert data == ParsedData(
namespace=NamespaceScope(
variables=[
Variable(
name=PQName(segments=[NameSpecifier(name="x")]),
type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="int")])
),
value=Value(tokens=[Token(value="3")]),
)
]
)
)
@pytest.mark.skipif(preprocessor.pcpp is None, reason="pcpp not installed")
def test_preprocessor_passthru_includes(tmp_path: pathlib.Path) -> None:
"""Ensure that all #include pass through"""
h_content = '#include "t2.h"\n'
with open(tmp_path / "t1.h", "w") as fp:
fp.write(h_content)
with open(tmp_path / "t2.h", "w") as fp:
fp.write("")
options = ParserOptions(
preprocessor=preprocessor.make_pcpp_preprocessor(
passthru_includes=re.compile(".+")
)
)
data = parse_file(tmp_path / "t1.h", options=options)
assert data == ParsedData(
namespace=NamespaceScope(), includes=[Include(filename='"t2.h"')]
)

221
tests/test_skip.py Normal file
View File

@@ -0,0 +1,221 @@
# Note: testcases generated via `python -m cxxheaderparser.gentest`
# .. and modified
import inspect
import typing
from cxxheaderparser.parser import CxxParser
from cxxheaderparser.simple import (
ClassScope,
NamespaceScope,
ParsedData,
SClassBlockState,
SExternBlockState,
SNamespaceBlockState,
SimpleCxxVisitor,
)
from cxxheaderparser.types import (
ClassDecl,
Function,
FundamentalSpecifier,
Method,
NameSpecifier,
PQName,
Type,
)
#
# ensure extern block is skipped
#
class SkipExtern(SimpleCxxVisitor):
def on_extern_block_start(self, state: SExternBlockState) -> typing.Optional[bool]:
return False
def test_skip_extern():
content = """
void fn1();
extern "C" {
void fn2();
}
void fn3();
"""
v = SkipExtern()
parser = CxxParser("<str>", inspect.cleandoc(content), v)
parser.parse()
data = v.data
assert data == ParsedData(
namespace=NamespaceScope(
functions=[
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="void")])
),
name=PQName(segments=[NameSpecifier(name="fn1")]),
parameters=[],
),
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="void")])
),
name=PQName(segments=[NameSpecifier(name="fn3")]),
parameters=[],
),
]
)
)
#
# ensure class block is skipped
#
class SkipClass(SimpleCxxVisitor):
def on_class_start(self, state: SClassBlockState) -> typing.Optional[bool]:
if getattr(state.class_decl.typename.segments[0], "name", None) == "Skip":
return False
return super().on_class_start(state)
def test_skip_class() -> None:
content = """
void fn1();
class Skip {
void fn2();
};
class Yup {
void fn3();
};
void fn5();
"""
v = SkipClass()
parser = CxxParser("<str>", inspect.cleandoc(content), v)
parser.parse()
data = v.data
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Yup")], classkey="class"
)
),
methods=[
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="fn3")]),
parameters=[],
access="private",
)
],
),
],
functions=[
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="void")])
),
name=PQName(segments=[NameSpecifier(name="fn1")]),
parameters=[],
),
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="void")])
),
name=PQName(segments=[NameSpecifier(name="fn5")]),
parameters=[],
),
],
)
)
#
# ensure namespace 'skip' is skipped
#
class SkipNamespace(SimpleCxxVisitor):
def on_namespace_start(self, state: SNamespaceBlockState) -> typing.Optional[bool]:
if "skip" in state.namespace.names[0]:
return False
return super().on_namespace_start(state)
def test_skip_namespace():
content = """
void fn1();
namespace skip {
void fn2();
namespace thistoo {
void fn3();
}
}
namespace ok {
void fn4();
}
void fn5();
"""
v = SkipNamespace()
parser = CxxParser("<str>", inspect.cleandoc(content), v)
parser.parse()
data = v.data
assert data == ParsedData(
namespace=NamespaceScope(
functions=[
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="void")])
),
name=PQName(segments=[NameSpecifier(name="fn1")]),
parameters=[],
),
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="void")])
),
name=PQName(segments=[NameSpecifier(name="fn5")]),
parameters=[],
),
],
namespaces={
"ok": NamespaceScope(
name="ok",
functions=[
Function(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="fn4")]),
parameters=[],
)
],
),
},
)
)

File diff suppressed because it is too large Load Diff

View File

@@ -1,7 +1,8 @@
import pytest
from cxxheaderparser.lexer import Lexer
from cxxheaderparser.lexer import PlyLexer, LexerTokenStream
from cxxheaderparser.tokfmt import tokfmt
from cxxheaderparser.types import Token
@pytest.mark.parametrize(
@@ -11,6 +12,7 @@ from cxxheaderparser.tokfmt import tokfmt
"unsigned int",
"::uint8_t",
"void *",
"void * *",
"const char *",
"const char[]",
"void * (*)()",
@@ -34,19 +36,20 @@ from cxxheaderparser.tokfmt import tokfmt
"operator>=",
],
)
def test_tokfmt(instr):
def test_tokfmt(instr: str) -> None:
"""
Each input string is exactly what the output of tokfmt should be
"""
toks = []
lexer = Lexer("")
lexer = PlyLexer("")
lexer.input(instr)
while True:
tok = lexer.token_eof_ok()
tok = lexer.token()
if not tok:
break
toks.append(tok)
if tok.type not in LexerTokenStream._discard_types:
toks.append(Token(tok.value, tok.type))
assert tokfmt(toks) == instr

View File

@@ -26,7 +26,7 @@ from cxxheaderparser.types import (
from cxxheaderparser.simple import ClassScope, NamespaceScope, ParsedData, parse_string
def test_simple_typedef():
def test_simple_typedef() -> None:
content = """
typedef std::vector<int> IntVector;
"""
@@ -45,7 +45,15 @@ def test_simple_typedef():
specialization=TemplateSpecialization(
args=[
TemplateArgument(
tokens=[Token(value="int")]
arg=Type(
typename=PQName(
segments=[
FundamentalSpecifier(
name="int"
)
]
)
)
)
]
),
@@ -60,7 +68,7 @@ def test_simple_typedef():
)
def test_struct_typedef_1():
def test_struct_typedef_1() -> None:
content = """
typedef struct {
int m;
@@ -114,7 +122,7 @@ def test_struct_typedef_1():
)
def test_struct_typedef_2():
def test_struct_typedef_2() -> None:
content = """
typedef struct {
int m;
@@ -168,7 +176,7 @@ def test_struct_typedef_2():
)
def test_typedef_array():
def test_typedef_array() -> None:
content = """
typedef char TenCharArray[10];
"""
@@ -193,7 +201,7 @@ def test_typedef_array():
)
def test_typedef_array_of_struct():
def test_typedef_array_of_struct() -> None:
content = """
typedef struct{} tx[3], ty;
"""
@@ -235,7 +243,7 @@ def test_typedef_array_of_struct():
)
def test_typedef_class_w_base():
def test_typedef_class_w_base() -> None:
content = """
typedef class XX : public F {} G;
"""
@@ -272,7 +280,7 @@ def test_typedef_class_w_base():
)
def test_complicated_typedef():
def test_complicated_typedef() -> None:
content = """
typedef int int_t, *intp_t, (&fp)(int, ulong), arr_t[10];
"""
@@ -337,7 +345,7 @@ def test_complicated_typedef():
)
def test_typedef_c_struct_idiom():
def test_typedef_c_struct_idiom() -> None:
content = """
// common C idiom to avoid having to write "struct S"
typedef struct {int a; int b;} S, *pS;
@@ -399,7 +407,7 @@ def test_typedef_c_struct_idiom():
)
def test_typedef_struct_same_name():
def test_typedef_struct_same_name() -> None:
content = """
typedef struct Fig {
int a;
@@ -443,7 +451,7 @@ def test_typedef_struct_same_name():
)
def test_typedef_struct_w_enum():
def test_typedef_struct_w_enum() -> None:
content = """
typedef struct {
enum BeetEnum : int { FAIL = 0, PASS = 1 };
@@ -494,7 +502,7 @@ def test_typedef_struct_w_enum():
)
def test_typedef_union():
def test_typedef_union() -> None:
content = """
typedef union apricot_t {
int i;
@@ -561,7 +569,7 @@ def test_typedef_union():
)
def test_typedef_fnptr():
def test_typedef_fnptr() -> None:
content = """
typedef void *(*fndef)(int);
"""
@@ -598,7 +606,7 @@ def test_typedef_fnptr():
)
def test_typedef_const():
def test_typedef_const() -> None:
content = """
typedef int theint, *const ptheint;
"""
@@ -627,7 +635,7 @@ def test_typedef_const():
)
def test_enum_typedef_1():
def test_enum_typedef_1() -> None:
content = """
typedef enum {} E;
"""
@@ -653,7 +661,7 @@ def test_enum_typedef_1():
)
def test_enum_typedef_2():
def test_enum_typedef_2() -> None:
content = """
typedef enum { E1 } BE;
"""
@@ -679,7 +687,7 @@ def test_enum_typedef_2():
)
def test_enum_typedef_3():
def test_enum_typedef_3() -> None:
content = """
typedef enum { E1, E2, } E;
"""
@@ -705,7 +713,7 @@ def test_enum_typedef_3():
)
def test_enum_typedef_3():
def test_enum_typedef_3_1() -> None:
content = """
typedef enum { E1 } * PBE;
"""
@@ -735,7 +743,7 @@ def test_enum_typedef_3():
)
def test_enum_typedef_4():
def test_enum_typedef_4() -> None:
content = """
typedef enum { E1 } * PBE, BE;
"""
@@ -771,7 +779,7 @@ def test_enum_typedef_4():
)
def test_enum_typedef_5():
def test_enum_typedef_5() -> None:
content = """
typedef enum { E1 } BE, *PBE;
"""
@@ -807,7 +815,7 @@ def test_enum_typedef_5():
)
def test_enum_typedef_fwd():
def test_enum_typedef_fwd() -> None:
content = """
typedef enum BE BET;
"""
@@ -829,7 +837,7 @@ def test_enum_typedef_fwd():
)
def test_typedef_enum_expr():
def test_typedef_enum_expr() -> None:
content = """
typedef enum { StarFruit = (2 + 2) / 2 } Carambola;
"""
@@ -868,3 +876,115 @@ def test_typedef_enum_expr():
],
)
)
def test_volatile_typedef() -> None:
content = """
typedef volatile signed short vint16;
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
typedefs=[
Typedef(
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="signed short")]
),
volatile=True,
),
name="vint16",
)
]
)
)
def test_function_typedef() -> None:
content = """
typedef void fn(int);
typedef auto fntype(int) -> int;
struct X {
typedef void fn(int);
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="X")], classkey="struct"
)
),
typedefs=[
Typedef(
type=FunctionType(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[
FundamentalSpecifier(name="int")
]
)
)
)
],
),
name="fn",
access="public",
)
],
)
],
typedefs=[
Typedef(
type=FunctionType(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
)
)
],
),
name="fn",
),
Typedef(
type=FunctionType(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="int")])
),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
)
)
],
has_trailing_return=True,
),
name="fntype",
),
],
)
)

313
tests/test_typefmt.py Normal file
View File

@@ -0,0 +1,313 @@
import typing
import pytest
from cxxheaderparser.tokfmt import Token
from cxxheaderparser.types import (
Array,
DecoratedType,
FunctionType,
FundamentalSpecifier,
Method,
MoveReference,
NameSpecifier,
PQName,
Parameter,
Pointer,
Reference,
TemplateArgument,
TemplateSpecialization,
TemplateDecl,
Type,
Value,
)
@pytest.mark.parametrize(
"pytype,typestr,declstr",
[
(
Type(typename=PQName(segments=[FundamentalSpecifier(name="int")])),
"int",
"int name",
),
(
Type(
typename=PQName(segments=[FundamentalSpecifier(name="int")]), const=True
),
"const int",
"const int name",
),
(
Type(
typename=PQName(segments=[NameSpecifier(name="S")], classkey="struct")
),
"struct S",
"struct S name",
),
(
Pointer(
ptr_to=Type(
typename=PQName(segments=[FundamentalSpecifier(name="int")])
)
),
"int*",
"int* name",
),
(
Pointer(
ptr_to=Pointer(
ptr_to=Type(
typename=PQName(segments=[FundamentalSpecifier(name="int")])
)
)
),
"int**",
"int** name",
),
(
Reference(
ref_to=Type(
typename=PQName(segments=[FundamentalSpecifier(name="int")])
)
),
"int&",
"int& name",
),
(
Reference(
ref_to=Array(
array_of=Type(
typename=PQName(segments=[FundamentalSpecifier(name="int")])
),
size=Value(tokens=[Token(value="3")]),
)
),
"int (&)[3]",
"int (& name)[3]",
),
(
MoveReference(
moveref_to=Type(
typename=PQName(
segments=[NameSpecifier(name="T"), NameSpecifier(name="T")]
)
)
),
"T::T&&",
"T::T&& name",
),
(
Pointer(
ptr_to=Array(
array_of=Type(
typename=PQName(segments=[FundamentalSpecifier(name="int")])
),
size=Value(tokens=[Token(value="3")]),
)
),
"int (*)[3]",
"int (* name)[3]",
),
(
Pointer(
ptr_to=Array(
array_of=Type(
typename=PQName(segments=[FundamentalSpecifier(name="int")])
),
size=Value(tokens=[Token(value="3")]),
),
const=True,
),
"int (* const)[3]",
"int (* const name)[3]",
),
(
FunctionType(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="int")])
),
parameters=[
Parameter(
type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="int")])
)
)
],
),
"int (int)",
"int name(int)",
),
(
FunctionType(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="int")])
),
parameters=[
Parameter(
type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="int")])
)
)
],
has_trailing_return=True,
),
"auto (int) -> int",
"auto name(int) -> int",
),
(
FunctionType(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")],
),
),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")],
),
),
name="a",
),
Parameter(
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")],
),
),
name="b",
),
],
),
"void (int a, int b)",
"void name(int a, int b)",
),
(
Pointer(
ptr_to=FunctionType(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="int")])
),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
)
)
],
)
),
"int (*)(int)",
"int (* name)(int)",
),
(
Type(
typename=PQName(
segments=[
NameSpecifier(name="std"),
NameSpecifier(
name="function",
specialization=TemplateSpecialization(
args=[
TemplateArgument(
arg=FunctionType(
return_type=Type(
typename=PQName(
segments=[
FundamentalSpecifier(name="int")
]
)
),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[
FundamentalSpecifier(
name="int"
)
]
)
)
)
],
)
)
]
),
),
]
)
),
"std::function<int (int)>",
"std::function<int (int)> name",
),
(
Type(
typename=PQName(
segments=[
NameSpecifier(
name="foo",
specialization=TemplateSpecialization(
args=[
TemplateArgument(
arg=Type(
typename=PQName(
segments=[
NameSpecifier(name=""),
NameSpecifier(name="T"),
],
)
),
)
]
),
)
]
),
),
"foo<::T>",
"foo<::T> name",
),
(
Type(
typename=PQName(
segments=[
NameSpecifier(
name="foo",
specialization=TemplateSpecialization(
args=[
TemplateArgument(
arg=Type(
typename=PQName(
segments=[
NameSpecifier(name=""),
NameSpecifier(name="T"),
],
has_typename=True,
)
),
)
]
),
)
]
),
),
"foo<typename ::T>",
"foo<typename ::T> name",
),
],
)
def test_typefmt(
pytype: typing.Union[DecoratedType, FunctionType], typestr: str, declstr: str
):
# basic formatting
assert pytype.format() == typestr
# as a type declaration
assert pytype.format_decl("name") == declstr

View File

@@ -17,7 +17,7 @@ from cxxheaderparser.simple import (
)
def test_union_basic():
def test_union_basic() -> None:
content = """
struct HAL_Value {
@@ -86,7 +86,7 @@ def test_union_basic():
)
def test_union_anon_in_struct():
def test_union_anon_in_struct() -> None:
content = """
struct Outer {
union {
@@ -138,6 +138,14 @@ def test_union_anon_in_struct():
)
],
fields=[
Field(
access="public",
type=Type(
typename=PQName(
segments=[AnonymousName(id=1)], classkey="union"
)
),
),
Field(
access="public",
type=Type(
@@ -146,7 +154,7 @@ def test_union_anon_in_struct():
)
),
name="z",
)
),
],
)
]

View File

@@ -4,6 +4,7 @@ from cxxheaderparser.types import (
BaseClass,
ClassDecl,
Function,
FunctionType,
FundamentalSpecifier,
Method,
NameSpecifier,
@@ -29,7 +30,7 @@ from cxxheaderparser.simple import (
)
def test_using_namespace():
def test_using_namespace() -> None:
content = """
using namespace foo;
using namespace foo::bar;
@@ -51,7 +52,7 @@ def test_using_namespace():
)
def test_using_declaration():
def test_using_declaration() -> None:
content = """
using ::foo;
using foo::bar;
@@ -103,7 +104,7 @@ def test_using_declaration():
# alias-declaration
def test_alias_declaration_1():
def test_alias_declaration_1() -> None:
content = """
using alias = foo;
"""
@@ -121,7 +122,7 @@ def test_alias_declaration_1():
)
def test_alias_declaration_2():
def test_alias_declaration_2() -> None:
content = """
template <typename T> using alias = foo<T>;
"""
@@ -139,7 +140,15 @@ def test_alias_declaration_2():
name="foo",
specialization=TemplateSpecialization(
args=[
TemplateArgument(tokens=[Token(value="T")])
TemplateArgument(
arg=Type(
typename=PQName(
segments=[
NameSpecifier(name="T")
]
)
)
)
]
),
)
@@ -155,7 +164,7 @@ def test_alias_declaration_2():
)
def test_alias_declaration_3():
def test_alias_declaration_3() -> None:
content = """
using alias = ::foo::bar;
"""
@@ -181,7 +190,7 @@ def test_alias_declaration_3():
)
def test_alias_declaration_4():
def test_alias_declaration_4() -> None:
content = """
template <typename T> using alias = ::foo::bar<T>;
"""
@@ -201,7 +210,15 @@ def test_alias_declaration_4():
name="bar",
specialization=TemplateSpecialization(
args=[
TemplateArgument(tokens=[Token(value="T")])
TemplateArgument(
arg=Type(
typename=PQName(
segments=[
NameSpecifier(name="T")
]
)
)
)
]
),
),
@@ -217,7 +234,7 @@ def test_alias_declaration_4():
)
def test_alias_declaration_5():
def test_alias_declaration_5() -> None:
content = """
using alias = foo::bar;
"""
@@ -242,7 +259,7 @@ def test_alias_declaration_5():
)
def test_alias_declaration_6():
def test_alias_declaration_6() -> None:
content = """
template <typename T> using alias = foo<T>::bar;
"""
@@ -260,7 +277,15 @@ def test_alias_declaration_6():
name="foo",
specialization=TemplateSpecialization(
args=[
TemplateArgument(tokens=[Token(value="T")])
TemplateArgument(
arg=Type(
typename=PQName(
segments=[
NameSpecifier(name="T")
]
)
)
)
]
),
),
@@ -277,7 +302,7 @@ def test_alias_declaration_6():
)
def test_using_many_things():
def test_using_many_things() -> None:
content = """
// clang-format off
@@ -410,12 +435,19 @@ def test_using_many_things():
specialization=TemplateSpecialization(
args=[
TemplateArgument(
tokens=[
Token(value="int"),
Token(value="("),
Token(value=")"),
arg=FunctionType(
return_type=Type(
typename=PQName(
segments=[
FundamentalSpecifier(
name="int"
)
]
)
),
parameters=[],
)
)
]
),
),
@@ -491,12 +523,19 @@ def test_using_many_things():
specialization=TemplateSpecialization(
args=[
TemplateArgument(
tokens=[
Token(value="void"),
Token(value="("),
Token(value=")"),
arg=FunctionType(
return_type=Type(
typename=PQName(
segments=[
FundamentalSpecifier(
name="void"
)
]
)
),
parameters=[],
)
)
]
),
),
@@ -509,3 +548,171 @@ def test_using_many_things():
},
)
)
def test_using_template_in_class() -> None:
content = """
class X {
template <typename T>
using TT = U<T>;
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="X")], classkey="class"
)
),
using_alias=[
UsingAlias(
alias="TT",
type=Type(
typename=PQName(
segments=[
NameSpecifier(
name="U",
specialization=TemplateSpecialization(
args=[
TemplateArgument(
arg=Type(
typename=PQName(
segments=[
NameSpecifier(
name="T"
)
]
)
)
)
]
),
)
]
)
),
template=TemplateDecl(
params=[TemplateTypeParam(typekey="typename", name="T")]
),
access="private",
)
],
)
]
)
)
def test_using_typename_in_class() -> None:
content = """
template <class D> class P {
using A = typename f::TP<D>::A;
public:
using State = typename f::TP<D>::S;
P(State st);
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="P")], classkey="class"
),
template=TemplateDecl(
params=[TemplateTypeParam(typekey="class", name="D")]
),
),
methods=[
Method(
return_type=None,
name=PQName(segments=[NameSpecifier(name="P")]),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[NameSpecifier(name="State")]
)
),
name="st",
)
],
access="public",
constructor=True,
)
],
using_alias=[
UsingAlias(
alias="A",
type=Type(
typename=PQName(
segments=[
NameSpecifier(name="f"),
NameSpecifier(
name="TP",
specialization=TemplateSpecialization(
args=[
TemplateArgument(
arg=Type(
typename=PQName(
segments=[
NameSpecifier(
name="D"
)
]
)
)
)
]
),
),
NameSpecifier(name="A"),
],
has_typename=True,
)
),
access="private",
),
UsingAlias(
alias="State",
type=Type(
typename=PQName(
segments=[
NameSpecifier(name="f"),
NameSpecifier(
name="TP",
specialization=TemplateSpecialization(
args=[
TemplateArgument(
arg=Type(
typename=PQName(
segments=[
NameSpecifier(
name="D"
)
]
)
)
)
]
),
),
NameSpecifier(name="S"),
],
has_typename=True,
)
),
access="public",
),
],
)
]
)
)

View File

@@ -1,6 +1,6 @@
# Note: testcases generated via `python -m cxxheaderparser.gentest`
from cxxheaderparser.errors import CxxParseError
from cxxheaderparser.types import (
Array,
ClassDecl,
@@ -21,8 +21,11 @@ from cxxheaderparser.types import (
)
from cxxheaderparser.simple import ClassScope, NamespaceScope, ParsedData, parse_string
import pytest
import re
def test_var_unixwiz_ridiculous():
def test_var_unixwiz_ridiculous() -> None:
# http://unixwiz.net/techtips/reading-cdecl.html
#
# .. "we have no idea how this variable is useful, but at least we can
@@ -73,7 +76,7 @@ def test_var_unixwiz_ridiculous():
)
def test_var_ptr_to_array15_of_ptr_to_int():
def test_var_ptr_to_array15_of_ptr_to_int() -> None:
content = """
int *(*crocodile)[15];
"""
@@ -102,7 +105,7 @@ def test_var_ptr_to_array15_of_ptr_to_int():
)
def test_var_ref_to_array():
def test_var_ref_to_array() -> None:
content = """
int abase[3];
int (&aname)[3] = abase;
@@ -140,7 +143,7 @@ def test_var_ref_to_array():
)
def test_var_ptr_to_array():
def test_var_ptr_to_array() -> None:
content = """
int zz, (*aname)[3] = &abase;
"""
@@ -174,7 +177,7 @@ def test_var_ptr_to_array():
)
def test_var_multi_1():
def test_var_multi_1() -> None:
content = """
int zz, (&aname)[3] = abase;
"""
@@ -208,7 +211,7 @@ def test_var_multi_1():
)
def test_var_array_of_fnptr_varargs():
def test_var_array_of_fnptr_varargs() -> None:
content = """
void (*a3[3])(int, ...);
"""
@@ -249,7 +252,7 @@ def test_var_array_of_fnptr_varargs():
)
def test_var_double_fnptr_varargs():
def test_var_double_fnptr_varargs() -> None:
content = """
void (*(*a4))(int, ...);
"""
@@ -289,7 +292,7 @@ def test_var_double_fnptr_varargs():
)
def test_var_fnptr_voidstar():
def test_var_fnptr_voidstar() -> None:
content = """
void(*(*a5)(int));
"""
@@ -326,7 +329,7 @@ def test_var_fnptr_voidstar():
)
def test_var_fnptr_moreparens():
def test_var_fnptr_moreparens() -> None:
content = """
void (*x)(int(p1), int);
"""
@@ -384,7 +387,7 @@ def test_var_fnptr_moreparens():
# Means "const pointer to pointer to char"
def test_var_ptr_to_const_ptr_to_char():
def test_var_ptr_to_const_ptr_to_char() -> None:
content = """
char *const *p;
"""
@@ -411,7 +414,7 @@ def test_var_ptr_to_const_ptr_to_char():
)
def test_var_const_ptr_to_ptr_to_char():
def test_var_const_ptr_to_ptr_to_char() -> None:
content = """
char **const p;
"""
@@ -438,7 +441,7 @@ def test_var_const_ptr_to_ptr_to_char():
)
def test_var_array_initializer1():
def test_var_array_initializer1() -> None:
content = """
int x[3]{1, 2, 3};
"""
@@ -472,7 +475,7 @@ def test_var_array_initializer1():
)
def test_var_array_initializer2():
def test_var_array_initializer2() -> None:
content = """
int x[3] = {1, 2, 3};
"""
@@ -506,7 +509,7 @@ def test_var_array_initializer2():
)
def test_var_extern_c():
def test_var_extern_c() -> None:
content = """
extern "C" int x;
"""
@@ -528,7 +531,7 @@ def test_var_extern_c():
)
def test_var_ns_1():
def test_var_ns_1() -> None:
content = """
int N::x;
"""
@@ -550,7 +553,7 @@ def test_var_ns_1():
)
def test_var_ns_2():
def test_var_ns_2() -> None:
content = """
int N::x = 4;
"""
@@ -573,7 +576,7 @@ def test_var_ns_2():
)
def test_var_ns_3():
def test_var_ns_3() -> None:
content = """
int N::x{4};
"""
@@ -598,7 +601,7 @@ def test_var_ns_3():
)
def test_var_static_struct():
def test_var_static_struct() -> None:
content = """
constexpr static struct SS {} s;
"""
@@ -631,7 +634,7 @@ def test_var_static_struct():
)
def test_var_constexpr_enum():
def test_var_constexpr_enum() -> None:
content = """
constexpr enum E { EE } e = EE;
"""
@@ -663,7 +666,7 @@ def test_var_constexpr_enum():
)
def test_var_fnptr_in_class():
def test_var_fnptr_in_class() -> None:
content = """
struct DriverFuncs {
void *(*init)();
@@ -747,7 +750,7 @@ def test_var_fnptr_in_class():
)
def test_var_extern():
def test_var_extern() -> None:
content = """
extern int externVar;
"""
@@ -766,3 +769,73 @@ def test_var_extern():
]
)
)
def test_balanced_with_gt() -> None:
"""Tests _consume_balanced_tokens handling of mismatched gt tokens"""
content = """
int x = (1 >> 2);
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
variables=[
Variable(
name=PQName(segments=[NameSpecifier(name="x")]),
type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="int")])
),
value=Value(
tokens=[
Token(value="("),
Token(value="1"),
Token(value=">"),
Token(value=">"),
Token(value="2"),
Token(value=")"),
]
),
)
]
)
)
def test_balanced_with_lt() -> None:
"""Tests _consume_balanced_tokens handling of mismatched lt tokens"""
content = """
bool z = (i < 4);
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
variables=[
Variable(
name=PQName(segments=[NameSpecifier(name="z")]),
type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="bool")])
),
value=Value(
tokens=[
Token(value="("),
Token(value="i"),
Token(value="<"),
Token(value="4"),
Token(value=")"),
]
),
)
]
)
)
def test_balanced_bad_mismatch() -> None:
content = """
bool z = (12 ]);
"""
err = "<str>:1: parse error evaluating ']': unexpected ']', expected ')'"
with pytest.raises(CxxParseError, match=re.escape(err)):
parse_string(content, cleandoc=True)