Compare commits

...

3 Commits

31 changed files with 1240 additions and 59 deletions

220
.gitignore vendored
View File

@ -1 +1,219 @@
__pycache__ # Project files
/.idea/
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[codz]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py.cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
# Pipfile.lock
# UV
# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# uv.lock
# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
# poetry.lock
# poetry.toml
# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
# pdm recommends including project-wide configuration in pdm.toml, but excluding .pdm-python.
# https://pdm-project.org/en/latest/usage/project/#working-with-version-control
# pdm.lock
# pdm.toml
.pdm-python
.pdm-build/
# pixi
# Similar to Pipfile.lock, it is generally recommended to include pixi.lock in version control.
# pixi.lock
# Pixi creates a virtual environment in the .pixi directory, just like venv module creates one
# in the .venv directory. It is recommended not to include this directory in version control.
.pixi
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# Redis
*.rdb
*.aof
*.pid
# RabbitMQ
mnesia/
rabbitmq/
rabbitmq-data/
# ActiveMQ
activemq-data/
# SageMath parsed files
*.sage.py
# Environments
.env
.envrc
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
# .idea/
# Abstra
# Abstra is an AI-powered process automation framework.
# Ignore directories containing user credentials, local state, and settings.
# Learn more at https://abstra.io/docs
.abstra/
# Visual Studio Code
# Visual Studio Code specific template is maintained in a separate VisualStudioCode.gitignore
# that can be found at https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore
# and can be added to the global gitignore or merged into this file. However, if you prefer,
# you could uncomment the following to ignore the entire vscode folder
# .vscode/
# Ruff stuff:
.ruff_cache/
# PyPI configuration file
.pypirc
# Marimo
marimo/_static/
marimo/_lsp/
__marimo__/
# Streamlit
.streamlit/secrets.toml

View File

@ -1,8 +1,10 @@
from collections.abc import Callable
import copy import copy
from dataclasses import dataclass, field from dataclasses import dataclass
import enum import enum
import glob import glob
import importlib.util
import inspect import inspect
import json import json
import multiprocessing import multiprocessing
@ -17,6 +19,18 @@ from typing import Any
import uuid import uuid
from SCons.Node import Node from SCons.Node import Node
from SCons.Script import *
sys.path.append(os.path.join(Dir('.').abspath, 'lib'))
from spp import _init_interface
_init_interface(globals=globals())
_SPP_VERSION = (1, 1, 0)
_DEBUG = {
'addons': False
}
_GCC_CPU_FEATURES_MAP = { _GCC_CPU_FEATURES_MAP = {
'mmx': '-mmmx', 'mmx': '-mmmx',
@ -36,6 +50,7 @@ class TargetType(enum.Enum):
PROGRAM = 0 PROGRAM = 0
STATIC_LIBRARY = 1 STATIC_LIBRARY = 1
SHARED_LIBRARY = 2 SHARED_LIBRARY = 2
MISC = 3
class _VersionSpec: class _VersionSpec:
minimum_version = None minimum_version = None
@ -78,10 +93,16 @@ class _Target:
def _find_recipe(env: Environment, recipe_name: str): def _find_recipe(env: Environment, recipe_name: str):
if recipe_name in env['SPP_RECIPES']: if recipe_name in env['SPP_RECIPES']:
return env['SPP_RECIPES'][recipe_name] return env['SPP_RECIPES'][recipe_name]
import importlib.util
source_file = None source_file = None
if not env['SPP_RECIPES_FOLDERS']: if not env['SPP_RECIPES_FOLDERS']:
# for compatibility
if '_SPP_FALLBACK_RECIPE_REPO' in env:
repo_args: dict = env['_SPP_FALLBACK_RECIPE_REPO']
env.Warn('No recipes repositories set. Add one using env.RecipeRepo(<name>, <url>, <branch>).')
env.Warn(f'Falling back to default recipe repository ({repo_args["repo_name"]} at {repo_args["remote_url"]} ref={repo_args.get("git_ref", "master")}).')
env.RecipeRepo(**repo_args)
else:
env.Error('No recipes repositories set. Add one using env.RecipeRepo(<name>, <url>, <branch>).') env.Error('No recipes repositories set. Add one using env.RecipeRepo(<name>, <url>, <branch>).')
for folder in env['SPP_RECIPES_FOLDERS']: for folder in env['SPP_RECIPES_FOLDERS']:
from SCons import Node from SCons import Node
@ -293,16 +314,19 @@ def _find_lib(env: Environment, name: str, paths: 'list[str]', type : str = 'sta
return None return None
raise Exception(f'Could not find library with name {name} in paths: "{", ".join(paths)}" filename: "{fname}".') raise Exception(f'Could not find library with name {name} in paths: "{", ".join(paths)}" filename: "{fname}".')
def _debug(cond: str, msg: str) -> None:
if _DEBUG.get(cond):
print(f'[DEBUG] [{cond}] {msg}')
def _info(env: Environment, message: str) -> None: def _info(env: Environment|None, message: str) -> None:
if not GetOption('silent'): if not GetOption('silent'):
print(message) print(f'[INFO] {message}')
def _warn(env: Environment, message: str) -> None: def _warn(env: Environment|None, message: str) -> None:
print(message, file=sys.stderr) print(f'[WARN] {message}', file=sys.stderr)
def _error(env: Environment, message: str) -> None: def _error(env: Environment|None, message: str) -> None:
print(message, file=sys.stderr) print(f'[ERROR] {message}', file=sys.stderr)
Exit(1) Exit(1)
def _try_merge_dicts(dictA: dict, dictB: dict) -> 'dict|None': def _try_merge_dicts(dictA: dict, dictB: dict) -> 'dict|None':
@ -450,17 +474,20 @@ def _wrap_builder(builder, target_type: TargetType):
kwargs['LIBPATH'] = copy.copy(env['LIBPATH']) kwargs['LIBPATH'] = copy.copy(env['LIBPATH'])
if 'LIBS' not in kwargs and 'LIBS' in env: if 'LIBS' not in kwargs and 'LIBS' in env:
kwargs['LIBS'] = copy.copy(env['LIBS']) kwargs['LIBS'] = copy.copy(env['LIBS'])
if 'source' in kwargs:
source = kwargs['source'] def _fix_filearg(filearg: str|Entry|list[str|Entry]) -> list[Entry]:
if not isinstance(source, list): if not isinstance(filearg, list):
source = [source] filearg = (filearg,)
new_source = [] result = []
for src in source: for ele in filearg:
if isinstance(src, str): if isinstance(ele, str):
new_source.append(env.Entry(src)) result.append(env.Entry(ele))
else: else:
new_source.append(src) result.append(ele)
kwargs['source'] = new_source return result
if 'source' in kwargs:
kwargs['source'] = _fix_filearg(kwargs['source'])
target = _Target() target = _Target()
if 'name' in kwargs: if 'name' in kwargs:
@ -502,7 +529,7 @@ def _wrap_default(default):
def _wrap_depends(depends): def _wrap_depends(depends):
def _wrapped(env, dependant, dependency): def _wrapped(env, dependant, dependency):
if isinstance(dependant, _Target) or isinstance(dependency, _Target): if isinstance(dependant, _Target) or isinstance(dependency, _Target):
env.Append(SPP_TARGET_DEPENDENCIES = [(dependant, dependency)]) env.Append(SPP_TARGET_DEPENDENCIES = [(dependant, dependency, depends)])
return return
elif isinstance(dependant, dict) and '_target' in dependant: elif isinstance(dependant, dict) and '_target' in dependant:
dependant = dependant['_target'] dependant = dependant['_target']
@ -527,7 +554,7 @@ def _build_target(target: _Target):
target.kwargs['LIBS'].remove(lib) target.kwargs['LIBS'].remove(lib)
target.kwargs['LIBS'].append(lib.target) target.kwargs['LIBS'].append(lib.target)
new_kwargs = target.kwargs.copy() new_kwargs = target.kwargs.copy()
if 'target' in new_kwargs: # there should always be a target, right? if 'target' in new_kwargs and target.target_type != TargetType.MISC: # there should always be a target, right?
new_kwargs['target'] = f"{new_kwargs['target']}-{build_type}" new_kwargs['target'] = f"{new_kwargs['target']}-{build_type}"
if os.name == 'nt' and 'PDB' not in new_kwargs: if os.name == 'nt' and 'PDB' not in new_kwargs:
new_kwargs['PDB'] = f'{new_kwargs["target"]}.pdb' new_kwargs['PDB'] = f'{new_kwargs["target"]}.pdb'
@ -566,12 +593,12 @@ def _finalize(env: Environment):
_build_target(target) _build_target(target)
for target in env['SPP_DEFAULT_TARGETS']: for target in env['SPP_DEFAULT_TARGETS']:
env.Default(target.target) env.Default(target.target)
for dependant, dependency in env['SPP_TARGET_DEPENDENCIES']: for dependant, dependency, depends in env['SPP_TARGET_DEPENDENCIES']:
if isinstance(dependant, _Target): if isinstance(dependant, _Target):
dependant = dependant.target dependant = dependant.target
if isinstance(dependency, _Target): if isinstance(dependency, _Target):
dependency = dependency.target dependency = dependency.target
env.Depends(dependant, dependency) depends(dependant, dependency)
def _find_target(env: Environment, target_name: str) -> '_Target|None': def _find_target(env: Environment, target_name: str) -> '_Target|None':
for target in env['SPP_TARGETS']: for target in env['SPP_TARGETS']:
@ -915,6 +942,51 @@ def _dump() -> None:
print(dump_fn(data)) print(dump_fn(data))
Exit(0) Exit(0)
class _Hook:
def __init__(self) -> None:
self._funcs: list[Callable] = []
def add_func(self, func: Callable) -> None:
self._funcs.append(func)
def invoke(self, **kwargs) -> None:
for func in self._funcs:
func(**kwargs)
_hook_pre_environment = _Hook()
_hook_post_environment = _Hook()
def _load_addon(modname: str, modpath: pathlib.Path) -> None:
_debug('addons', f'Loading addon {modname} from {modpath}.')
spec = importlib.util.spec_from_file_location(modname, modpath)
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
if hasattr(module, 'available') and not module.available():
_debug('addons', f'Addon {modname} is not available and will not be loaded.')
return
if hasattr(module, 'pre_environment'):
_hook_pre_environment.add_func(module.pre_environment)
_debug('addons', f'Addon {modname} registered a pre_environment hook.')
if hasattr(module, 'post_environment'):
_hook_post_environment.add_func(module.post_environment)
_debug('addons', f'Addon {modname} registered a post_environment hook.')
def _load_addons(folder: pathlib.Path) -> None:
_debug('addons', f'Loading addons from {folder}.')
for script_file in folder.glob('*.py'):
_load_addon(script_file.name[:-3], script_file)
_ALLOWED_CHARS = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_'
def _sanitize_identifier(name: str) -> str:
chrs = []
for chr in name:
if chr in _ALLOWED_CHARS:
chrs.append(chr)
else:
chrs.append('_')
return ''.join(chrs)
Import('config') Import('config')
@ -925,7 +997,9 @@ if not config.get('CXX_STANDARD'):
if not config.get('CXX_NO_EXCEPTIONS'): if not config.get('CXX_NO_EXCEPTIONS'):
config['CXX_NO_EXCEPTIONS'] = False config['CXX_NO_EXCEPTIONS'] = False
if not config.get('PREPROCESSOR_PREFIX'): if not config.get('PREPROCESSOR_PREFIX'):
config['PREPROCESSOR_PREFIX'] = config['PROJECT_NAME'].upper() # TODO: may be nicer? config['PREPROCESSOR_PREFIX'] = _sanitize_identifier(config['PROJECT_NAME']).upper() # TODO: may be nicer?
if not config.get('SPP_TARGET_VERSION'):
config['SPP_TARGET_VERSION'] = (1, 0, 0)
if 'COMPILATIONDB_FILTER_FILES' not in config: if 'COMPILATIONDB_FILTER_FILES' not in config:
config['COMPILATIONDB_FILTER_FILES'] = True config['COMPILATIONDB_FILTER_FILES'] = True
@ -1080,9 +1154,21 @@ if 'VARIABLES' in config:
for vardef in config['VARIABLES']: for vardef in config['VARIABLES']:
vars.Add(*vardef) vars.Add(*vardef)
tools = ['default', 'compilation_db', 'unity_build'] tools = ['default', 'compilation_db']
if 'TOOLS' in config: if 'TOOLS' in config:
assert isinstance(config['TOOLS'], list)
tools.extend(config['TOOLS']) tools.extend(config['TOOLS'])
addon_dirs = [pathlib.Path(Dir('.').abspath) / 'addons']
if 'ADDON_DIRS' in config:
assert isinstance(config['ADDON_DIRS'], list)
addon_dirs.extend(config['ADDON_DIRS'])
for addon_dir in addon_dirs:
if not isinstance(addon_dir, pathlib.Path):
addon_dir = pathlib.Path(addon_dir)
_load_addons(addon_dir)
_hook_pre_environment.invoke()
env = Environment(tools = tools, variables = vars, ENV = os.environ) env = Environment(tools = tools, variables = vars, ENV = os.environ)
env['SPP_RECIPES_FOLDERS'] = [] env['SPP_RECIPES_FOLDERS'] = []
@ -1156,6 +1242,8 @@ env['OBJSUFFIX'] = f".{env['BUILD_TYPE']}{env['OBJSUFFIX']}"
if variant: if variant:
env['OBJSUFFIX'] = f".{variant}{env['OBJSUFFIX']}" env['OBJSUFFIX'] = f".{variant}{env['OBJSUFFIX']}"
_hook_post_environment.invoke()
# create the cache dir # create the cache dir
os.makedirs(env['CACHE_DIR'], exist_ok=True) os.makedirs(env['CACHE_DIR'], exist_ok=True)
cache_gitignore = f'{env["CACHE_DIR"]}/.gitignore' cache_gitignore = f'{env["CACHE_DIR"]}/.gitignore'
@ -1324,17 +1412,23 @@ env.AddMethod(_find_lib, 'FindLib')
env.AddMethod(_info, 'Info') env.AddMethod(_info, 'Info')
env.AddMethod(_warn, 'Warn') env.AddMethod(_warn, 'Warn')
env.AddMethod(_error, 'Error') env.AddMethod(_error, 'Error')
env.AddMethod(_wrap_builder(env.Program, TargetType.PROGRAM), 'Program')
env.AddMethod(_wrap_builder(env.Library, TargetType.STATIC_LIBRARY), 'Library') env.AddMethod(_wrap_builder(env.Library, TargetType.STATIC_LIBRARY), 'Library')
env.AddMethod(_wrap_builder(env.StaticLibrary, TargetType.STATIC_LIBRARY), 'StaticLibrary') env.AddMethod(_wrap_builder(env.StaticLibrary, TargetType.STATIC_LIBRARY), 'StaticLibrary')
env.AddMethod(_wrap_builder(env.SharedLibrary, TargetType.SHARED_LIBRARY), 'SharedLibrary') env.AddMethod(_wrap_builder(env.SharedLibrary, TargetType.SHARED_LIBRARY), 'SharedLibrary')
env.AddMethod(_wrap_builder(env.Program, TargetType.PROGRAM), 'Program') env.AddMethod(_wrap_builder(env.AstJson, TargetType.MISC), 'AstJson')
env.AddMethod(_wrap_default(env.Default), 'Default')
env.AddMethod(_wrap_depends(env.Depends), 'Depends')
if 'unity_build' in tools:
env.AddMethod(_wrap_builder(env.UnityProgram, TargetType.PROGRAM), 'UnityProgram') env.AddMethod(_wrap_builder(env.UnityProgram, TargetType.PROGRAM), 'UnityProgram')
env.AddMethod(_wrap_builder(env.UnityLibrary, TargetType.STATIC_LIBRARY), 'UnityLibrary') env.AddMethod(_wrap_builder(env.UnityLibrary, TargetType.STATIC_LIBRARY), 'UnityLibrary')
env.AddMethod(_wrap_builder(env.UnityStaticLibrary, TargetType.STATIC_LIBRARY), 'UnityStaticLibrary') env.AddMethod(_wrap_builder(env.UnityStaticLibrary, TargetType.STATIC_LIBRARY), 'UnityStaticLibrary')
env.AddMethod(_wrap_builder(env.UnitySharedLibrary, TargetType.SHARED_LIBRARY), 'UnitySharedLibrary') env.AddMethod(_wrap_builder(env.UnitySharedLibrary, TargetType.SHARED_LIBRARY), 'UnitySharedLibrary')
env.AddMethod(_wrap_default(env.Default), 'Default')
env.AddMethod(_wrap_depends(env.Depends), 'Depends')
env.AddMethod(_wrap_depends(env.Ignore), 'Ignore')
env.AddMethod(_wrap_depends(env.Requires), 'Requires')
env.AddMethod(_module, 'Module') env.AddMethod(_module, 'Module')
env.AddMethod(_module_config, 'ModuleConfig') env.AddMethod(_module_config, 'ModuleConfig')
env.AddMethod(_finalize, 'Finalize') env.AddMethod(_finalize, 'Finalize')
@ -1343,7 +1437,7 @@ env.AddMethod(_find_target, 'FindTarget')
if hasattr(env, 'Gch'): if hasattr(env, 'Gch'):
env.AddMethod(_wrap_builder(env.Gch, TargetType.STATIC_LIBRARY), 'Gch') env.AddMethod(_wrap_builder(env.Gch, TargetType.STATIC_LIBRARY), 'Gch')
for addon_file in env.Glob('addons/*.py'): for addon_file in env.Glob('addons/old/*.py'):
env = SConscript(addon_file, exports = 'env') env = SConscript(addon_file, exports = 'env')
Return('env') Return('env')

209
addons/astgen.py Normal file
View File

@ -0,0 +1,209 @@
import gzip
import json
import os.path
import pickle
import subprocess
from abc import ABC, abstractmethod
from typing import Callable, Any, Iterable, Self, Generator
from SCons.Script import *
from SCons.Node.FS import File
from spp import get_spp
spp = get_spp()
def post_environment(**kwargs) -> None:
env: Environment = spp.globals['env']
ast_json_builder = Builder(
action=_gen_ast_json
)
env.Append(BUILDERS = {'AstJson': ast_json_builder})
# env.SetDefault(ASTJSONCOM = '$ASTJSON -Xclang -ast-dump=json -fsyntax-only -Wno-unknown-warning-option -DSPP_AST_GEN $CXXFLAGS $SOURCES > $TARGET')
env.AddMethod(_ast_jinja, 'AstJinja')
def _gen_ast_json(target: list[File], source: list[File], env: Environment):
clang_exe = env.WhereIs('clang++')
cmd = [clang_exe, '-Xclang', '-ast-dump=json', '-fsyntax-only', '-Wno-unknown-warning-option',
'-DSPP_AST_GEN', f'-std={env["CXX_STANDARD"]}']
for define in env['CPPDEFINES']:
cmd.append(f'-D{define}')
for path in env['CPPPATH']:
cmd.append(f'-I{path}')
cmd.append(source[0].abspath)
# print(*cmd)
try:
proc = subprocess.Popen(cmd, text=True, stdout=subprocess.PIPE)
except subprocess.CalledProcessError as e:
env.Error(f'Clang exited with code {e.returncode}.')
return
parsed = json.load(proc.stdout)
inner: list = parsed["inner"]
# pos = 0
# last_file = None
#while pos < len(inner):
# last_file = inner[pos]["loc"].get("file", last_file)
# if last_file is None: # or os.path.isabs(last_file):
# del inner[pos]
# else:
# pos += 1
if target[0].suffix == '.bin':
with gzip.open(target[0].abspath, 'wb') as f:
pickle.dump(parsed, f)
elif target[0].suffix == '.gz':
with gzip.open(target[0].abspath, 'wt') as f:
json.dump(parsed, f)
else:
with open(target[0].abspath, 'wt') as f:
json.dump(parsed, f)
class ASTNode(ABC):
@abstractmethod
def _get_decls(self) -> Iterable[dict]: ...
def inner(self) -> Iterable[dict]:
return itertools.chain(*(decl['inner'] for decl in self._get_decls()))
def inner_filtered(self, **kwargs) -> Iterable[dict]:
def _applies(decl: dict) -> bool:
for name, val in kwargs.items():
if decl.get(name) != val:
return False
return True
return (decl for decl in self.inner() if _applies(decl))
class SimpleASTNode(ASTNode):
def __init__(self, decl: dict) -> None:
self._decl = decl
def _get_decls(self) -> Iterable[dict]:
return (self._decl,)
class Value(SimpleASTNode): ...
class Annotation(SimpleASTNode):
@property
def values(self) -> Iterable[Value]:
return (Value(decl) for decl in self.inner())
class Param(SimpleASTNode):
@property
def name(self) -> str:
return self._decl.get('name', '')
@property
def type(self) -> str:
return self._decl['type']['qualType']
class Method(SimpleASTNode):
def __init__(self, decl: dict, access: str) -> None:
super().__init__(decl)
self._access = access
@property
def access(self) -> str:
return self._access
@property
def name(self) -> str:
return self._decl['name']
@property
def mangled_name(self) -> str:
return self._decl['mangledName']
@property
def type(self) -> str:
return self._decl['type']['qualType']
@property
def return_type(self) -> str:
return self.type.split('(', 1)[0].strip()
@property
def params(self) -> Iterable[Param]:
return (Param(decl) for decl in self.inner_filtered(kind='ParmVarDecl'))
@property
def annotations(self) -> Iterable[Annotation]:
return (Annotation(decl) for decl in self.inner_filtered(kind='AnnotateAttr'))
class Class(SimpleASTNode):
@property
def name(self) -> str:
return self._decl['name']
@property
def tagUsed(self) -> str:
return self._decl['tagUsed']
@property
def methods(self) -> Generator[Method]:
access = 'private' if self.tagUsed == 'class' else 'public'
for decl in self.inner():
if decl['kind'] == 'AccessSpecDecl':
access = decl['access']
elif decl['kind'] == 'CXXMethodDecl' and not decl.get('isImplicit', False):
yield Method(decl, access)
class Namespace(ASTNode, ABC):
def get_namespace(self, ns_name: str) -> Self:
return InnerNamespace(list(self.inner_filtered(kind='NamespaceDecl', name=ns_name)))
@property
def classes(self) -> Iterable[Class]:
return (Class(decl) for decl in self.inner_filtered(kind='CXXRecordDecl', tagUsed='class', completeDefinition=True))
class InnerNamespace(Namespace):
def __init__(self, decls: list[dict]) -> None:
self._decls = decls
def _get_decls(self) -> Iterable[dict]:
return self._decls
class Ast(Namespace):
def __init__(self, file: File) -> None:
self._file = file
self._data_dict: dict|None = None
def _get_decls(self) -> tuple[dict]:
if self._data_dict is None:
if not self._file.exists():
self._data_dict = {
'inner': []
}
elif self._file.suffix == '.bin':
with gzip.open(self._file.abspath, 'rb') as f:
self._data_dict = pickle.load(f)
elif self._file.suffix == '.gz':
with gzip.open(self._file.abspath) as f:
self._data_dict = json.load(f)
else:
with open(self._file.abspath, 'r') as f:
self._data_dict = json.load(f)
return (self._data_dict,)
def _ast_jinja(env: Environment, source: File, target: File, template: File, **kwargs):
cache_dir = env['CACHE_DIR']
rel_path = env.Dir('#').rel_path(source)
json_file = env.File(os.path.join(cache_dir, 'ast_json', f'{rel_path}.bin'))
ast_json = env.AstJson(target=json_file, source=source, **kwargs)
ast_jinja = env.Jinja(
target=target,
source=template,
JINJA_CONTEXT = {
'ast': Ast(json_file)
},
**kwargs
)
env.Depends(ast_jinja, ast_json)
# env.AlwaysBuild(ast_jinja)
# env.Requires(ast_jinja, ast_json)
# env.Requires(source, ast_jinja)
env.Ignore(ast_json, ast_jinja)
return ast_jinja

13
addons/compat_v1_0.py Normal file
View File

@ -0,0 +1,13 @@
from spp import get_spp
spp = get_spp()
def available(**kwargs) -> bool:
return spp.globals['config']['SPP_TARGET_VERSION'][0:2] == (1, 0)
def pre_environment(**kwargs) -> None:
spp.globals['tools'].append('unity_build') # S++ 1.0.0 had the unity_build enabled by default
def post_environment(**kwargs) -> None:
spp.globals['env']['_SPP_FALLBACK_RECIPE_REPO'] = {'repo_name': 'mewin', 'remote_url': 'https://git.mewin.de/mewin/spp_recipes.git', 'git_ref': 'stable'}

View File

@ -1,11 +1,178 @@
# based on https://github.com/hgomersall/scons-jinja
from SCons.Script import *
import os import os
import pathlib import pathlib
Import('env') from spp import get_spp
if not hasattr(env, 'Jinja'): try:
Return('env') import jinja2
from jinja2.utils import open_if_exists
except ImportError:
jinja2 = None
print('No Jinja :(')
spp = get_spp()
def available(**kwargs) -> bool:
return jinja2 is not None
def post_environment(**kwargs) -> None:
env: Environment = spp.globals['env']
env.SetDefault(JINJA_CONTEXT={})
env.SetDefault(JINJA_ENVIRONMENT_VARS={})
env.SetDefault(JINJA_FILTERS={'load_config': _jinja_load_config})
env.SetDefault(JINJA_GLOBALS={
'file_size': lambda *args: _file_size(env, *args),
'file_content_hex': lambda *args: _file_content_hex(env, *args)
})
env.SetDefault(JINJA_TEMPLATE_SEARCHPATH=['data/jinja'])
env.SetDefault(JINJA_CONFIG_SEARCHPATH=[env.Dir('#data/config')])
env.SetDefault(JINJA_FILE_SEARCHPATH=[env.Dir('#')])
env['BUILDERS']['Jinja'] = Builder(
action=render_jinja_template
)
scanner = env.Scanner(function=jinja_scanner,
skeys=['.jinja'])
env.Append(SCANNERS=scanner)
env.AddMethod(_wrap_jinja(env.Jinja), 'Jinja')
class FileSystemLoaderRecorder(jinja2.FileSystemLoader):
""" A wrapper around FileSystemLoader that records files as they are
loaded. These are contained within loaded_filenames set attribute.
"""
def __init__(self, searchpath, encoding='utf-8'):
self.loaded_filenames = set()
super(FileSystemLoaderRecorder, self).__init__(searchpath, encoding)
def get_source(self, environment, template):
"""Overwritten FileSystemLoader.get_source method that extracts the
filename that is used to load each filename and adds it to
self.loaded_filenames.
"""
for searchpath in self.searchpath:
filename = os.path.join(searchpath, template)
f = open_if_exists(filename)
if f is None:
continue
try:
contents = f.read().decode(self.encoding)
finally:
f.close()
self.loaded_filenames.add(filename)
return super(FileSystemLoaderRecorder, self).get_source(
environment, template)
# If the template isn't found, then we have to drop out.
raise jinja2.TemplateNotFound(template)
def jinja_scanner(node, env, path):
# Instantiate the file as necessary
node.get_text_contents()
template_dir, filename = os.path.split(str(node))
template_search_path = ([template_dir] +
env.subst(env['JINJA_TEMPLATE_SEARCHPATH']))
template_loader = FileSystemLoaderRecorder(template_search_path)
jinja_env = jinja2.Environment(loader=template_loader,
extensions=['jinja2.ext.do'], **env['JINJA_ENVIRONMENT_VARS'])
jinja_env.filters.update(env['JINJA_FILTERS'])
jinja_env.globals.update(env['JINJA_GLOBALS'])
try:
template = jinja_env.get_template(filename)
except jinja2.TemplateNotFound as e:
env.Error(f'Missing template: {os.path.join(template_dir, str(e))}')
# We need to render the template to do all the necessary loading.
#
# It's necessary to respond to missing templates by grabbing
# the content as the exception is raised. This makes sure of the
# existence of the file upon which the current scanned node depends.
#
# I suspect that this is pretty inefficient, but it does
# work reliably.
context = env['JINJA_CONTEXT']
last_missing_file = ''
while True:
try:
template.render(**context)
except jinja2.TemplateNotFound as e:
if last_missing_file == str(e):
# We've already been round once for this file,
# so need to raise
env.Error(f'Missing template: {os.path.join(template_dir, str(e))}')
last_missing_file = str(e)
# Find where the template came from (using the same ordering
# as Jinja uses).
for searchpath in template_search_path:
filename = os.path.join(searchpath, last_missing_file)
if os.path.exists(filename):
continue
else:
env.File(filename).get_text_contents()
continue
break
# Get all the files that were loaded. The set includes the current node,
# so we remove that.
found_nodes_names = list(template_loader.loaded_filenames)
try:
found_nodes_names.remove(str(node))
except ValueError as e:
env.Error(f'Missing template node: {str(node)}')
return [env.File(f) for f in found_nodes_names]
def render_jinja_template(target, source, env):
output_str = ''
if not source:
source = [f'{target}.jinja']
for template_file in source:
template_dir, filename = os.path.split(str(template_file))
template_search_path = ([template_dir] +
env.subst(env['JINJA_TEMPLATE_SEARCHPATH']))
template_loader = FileSystemLoaderRecorder(template_search_path)
jinja_env = jinja2.Environment(loader=template_loader,
extensions=['jinja2.ext.do'], **env['JINJA_ENVIRONMENT_VARS'])
jinja_env.filters.update(env['JINJA_FILTERS'])
jinja_env.globals.update(env['JINJA_GLOBALS'])
jinja_env.filters.update(env['JINJA_FILTERS'])
template = jinja_env.get_template(filename)
context = env['JINJA_CONTEXT']
template.render(**context)
output_str += template.render(**context)
with open(str(target[0]), 'w') as target_file:
target_file.write(output_str)
return None
def _jinja_load_config(env, config_name): def _jinja_load_config(env, config_name):
searched_paths = [] searched_paths = []
@ -24,11 +191,10 @@ def _wrap_jinja(orig_jinja):
def _wrapped(env, target, **kwargs): def _wrapped(env, target, **kwargs):
if 'source' not in kwargs: if 'source' not in kwargs:
kwargs['source'] = f'{target}.jinja' kwargs['source'] = f'{target}.jinja'
target = orig_jinja(**kwargs) target = orig_jinja(target=target, **kwargs)
if 'depends' in kwargs: if 'depends' in kwargs:
for dependency in kwargs['depends']: for dependency in kwargs['depends']:
env.Depends(target, dependency) env.Depends(target, dependency)
# env.Depends(alias_prepare, target)
return target return target
return _wrapped return _wrapped
@ -51,14 +217,3 @@ def _file_content_hex(env, fname: str) -> str:
env.Error(f'File does not exist: {fname}. Searched in: {[d.abspath for d in env["JINJA_FILE_SEARCHPATH"]]}') env.Error(f'File does not exist: {fname}. Searched in: {[d.abspath for d in env["JINJA_FILE_SEARCHPATH"]]}')
bytes = file.get_contents() bytes = file.get_contents()
return ','.join([hex(byte) for byte in bytes]) return ','.join([hex(byte) for byte in bytes])
env.AddMethod(_wrap_jinja(env.Jinja), 'Jinja')
env.Append(JINJA_FILTERS = {'load_config': _jinja_load_config})
env.Append(JINJA_GLOBALS = {
'file_size': lambda *args: _file_size(env, *args),
'file_content_hex': lambda *args: _file_content_hex(env, *args)
})
env.Append(JINJA_TEMPLATE_SEARCHPATH = ['data/jinja'])
env['JINJA_CONFIG_SEARCHPATH'] = [env.Dir('#data/config')]
env['JINJA_FILE_SEARCHPATH'] = [env.Dir('#')]
Return('env')

View File

@ -1,4 +1,6 @@
GitPython GitPython~=3.1.45
psutil psutil~=7.0.0
Jinja2 Jinja2
requests requests
SCons~=4.9.1
cxxheaderparser~=1.5.4

Binary file not shown.

View File

@ -1,6 +0,0 @@
config = {
'PROJECT_NAME': 'DUMMY'
}
env = SConscript('../SConscript', exports = ['config'])

86
test/codegen/.gitignore vendored Normal file
View File

@ -0,0 +1,86 @@
# Generated Files
*.refl.hpp
*.generated.*
private/**/*.json
# Project
/.idea/
/.vs/
/.vscode/
/vs_project_files/
*.sln
# Executables
/bin
/bin_*
# Libraries
/lib
/lib_*
# Vulkan API dumps
/api_dump*
# Compile commands
compile_commands.json
# whatever this is
.cache
# ImGui config
/imgui.ini
# Environment setup
/.env
# Build Configuration
/config.py
/config_*.py
# Prerequisites
*.d
# Compiled Object files
*.slo
*.lo
*.o
*.obj
# Precompiled Headers
*.gch
*.pch
# Compiled Dynamic libraries
*.so
*.dylib
*.dll
# Fortran module files
*.mod
*.smod
# Compiled Static libraries
*.lai
*.la
*.a
*.lib
# Executables
*.exe
*.out
*.app
# Debug Info
*.pdb
# for projects that use SCons for building: http://http://www.scons.org/
.sconsign.dblite
/.sconf_temp
/config.log
# Byte-compiled / optimized python files
__pycache__/
*.py[cod]
# Backup files
*.bak

15
test/codegen/SConstruct Normal file
View File

@ -0,0 +1,15 @@
config = {
'PROJECT_NAME': 'S++ Codegen Test',
'SPP_TARGET_VERSION': (1, 1, 0)
}
env = SConscript('../../SConscript', exports = ['config'])
# recipe repo
env.RecipeRepo('mewin', 'https://git.mewin.de/mewin/spp_recipes.git', 'stable')
# app
env = env.Module('private/test/SModule')
env.Finalize()

View File

@ -0,0 +1,50 @@
Import('env')
env.ModuleConfig(
name = 'Test',
description = 'Test Module',
cxx_namespace = 'tst'
)
src_files = Split("""
main.cpp
test.cpp
test.generated.cpp
""")
# env.IncludeGen(src_files,
# template=env.File('#templates/header.jinja'),
# include_filter=r'.*\.refl.hpp'
# )
# env.CodeGen('GenSource', inputs = [], template=env.File('#templates/source.jinja'), )
# env.CodeGen(
# target = 'test.generated.cpp',
# template = env.File('#templates/source.jinja'),
# inputs = {'source': 'test.cpp'}
# )
ast_json = env.AstJson(
target = env.File('test.json'),
source = 'test.hpp'
)
env.Default(ast_json)
ast_hpp = env.AstJinja(
target = env.File('test.refl.hpp'),
source = env.File('test.hpp'),
template = env.File('#templates/header.jinja')
)
prog_app = env.Program(
name = 'Test',
target = env['BIN_DIR'] + '/test',
source = src_files,
dependencies = {
}
)
env.Requires(prog_app.target, ast_hpp)
env.Default(prog_app)
Return('env')

View File

@ -0,0 +1,8 @@
#include "./test.hpp"
int main(int, char**)
{
tst::printHelloWorld(100);
return 0;
}

View File

@ -0,0 +1,12 @@
#include "./test.hpp"
#include <print>
namespace tst
{
void printHelloWorld(int param) noexcept
{
std::println("Hello World! Param is {}.", param);
}
}

View File

@ -0,0 +1,33 @@
#pragma once
#include <vector>
#if __has_include("test.refl.hpp")
#include "test.refl.hpp"
#endif
namespace tst
{
static constexpr int kAnnotVal = 17;
class MyClass
{
private:
std::vector<int> mInts;
public:
MyClass();
#if defined(__clang__)
[[clang::annotate("reflect", "yes, please", kAnnotVal)]]
#endif
int getVal();
void setVal(int val);
static constexpr int kVal = 1;
};
}
namespace tst
{
void printHelloWorld(int param) noexcept;
}

View File

@ -0,0 +1,11 @@
#if !defined(SPP_AST_GEN)
{% for class in ast.get_namespace('tst').classes %}
/*
{{ class.name }}
{% for method in class.methods %}
{{ method.return_type }} {{ method.name }} ({% for param in method.params %} {{ param.type }} {{ param.name }} {% endfor %})
{% endfor %}
{% endfor %}
*/
#endif

View File

@ -0,0 +1,3 @@
{% for cls in source.namespace.classes %}
// {{ cls.class_decl.typename.format() }}
{% endfor %}

81
test/v1_0_0/.gitignore vendored Normal file
View File

@ -0,0 +1,81 @@
# Project
/.idea/
/.vs/
/.vscode/
/vs_project_files/
*.sln
# Executables
/bin
/bin_*
# Libraries
/lib
/lib_*
# Vulkan API dumps
/api_dump*
# Compile commands
compile_commands.json
# whatever this is
.cache
# ImGui config
/imgui.ini
# Environment setup
/.env
# Build Configuration
/config.py
/config_*.py
# Prerequisites
*.d
# Compiled Object files
*.slo
*.lo
*.o
*.obj
# Precompiled Headers
*.gch
*.pch
# Compiled Dynamic libraries
*.so
*.dylib
*.dll
# Fortran module files
*.mod
*.smod
# Compiled Static libraries
*.lai
*.la
*.a
*.lib
# Executables
*.exe
*.out
*.app
# Debug Info
*.pdb
# for projects that use SCons for building: http://http://www.scons.org/
.sconsign.dblite
/.sconf_temp
/config.log
# Byte-compiled / optimized python files
__pycache__/
*.py[cod]
# Backup files
*.bak

10
test/v1_0_0/SConstruct Normal file
View File

@ -0,0 +1,10 @@
config = {
'PROJECT_NAME': 'S++ 1.0.0 Test'
}
env = SConscript('../../SConscript', exports = ['config'])
env = env.Module('private/test/SModule')
env.Finalize()

View File

@ -0,0 +1,25 @@
Import('env')
env.ModuleConfig(
name = 'Test',
description = 'Test Module',
cxx_namespace = 'tst'
)
src_files = Split("""
main.cpp
test.cpp
""")
prog_app = env.UnityProgram(
name = 'Test',
target = env['BIN_DIR'] + '/test',
source = src_files,
dependencies = {
'mijin': {}
}
)
env.Default(prog_app)
Return('env')

View File

@ -0,0 +1,8 @@
#include "./test.hpp"
int main(int, char**)
{
tst::printHelloWorld(100);
return 0;
}

View File

@ -0,0 +1,15 @@
#include "./test.hpp"
#include <mijin/debug/assert.hpp>
#include <print>
namespace tst
{
void printHelloWorld(int param) noexcept
{
MIJIN_ASSERT(param > 0, "param is not >0 :(");
std::println("Hello World! Param is {}.", param);
}
}

View File

@ -0,0 +1,7 @@
#pragma once
namespace tst
{
void printHelloWorld(int param) noexcept;
}

View File

@ -0,0 +1,133 @@
import os
import math
from SCons.Script import *
from SCons.Node.FS import File
from SCons import Action
"""
Scons Unity Build Generator
Provides several generators for SCons to combine multiple source files into a bigger
one to reduce compilation time, so called "unity builds". This is achieved by generating
unity source files which in term include the actual source files and compile them using
one of the existing SCons builders.
Usage
-----
In order to use this, just place it inside your `site_scons/site_tools` folder, enable it by
adding "unity_build" to the tools when constructing your Environment and replace invocations
of the Program/Library/SharedLibrary/StaticLibrary builders with their Unity... counterpart:
env = Environment(tools = ['default', 'unity_build'])
source_files = ...
env.UnityProgram(
target = 'my_program',
source = source_files,
...
)
The tool will generate an amount of unity source files and invoke the Program builder on these,
forwarding any other arguments you passed.
Other Options
------------
You can control the behaviour of the builder using several Environment options:
env['UNITY_CACHE_DIR'] = '.unity' # Directory where the unity sources are stored.
# can be either a string or a Dir() node.
env['UNITY_MAX_SOURCES'] = 15 # Maximum number of source files per unity file.
env['UNITY_MIN_FILES'] = env.GetOption('num_jobs')
# Minimum number of unity files to generate (if possible).
# Defaults to the number of jobs passed to SCons.
env['UNITY_DISABLE'] = False # Set to True to completely disable unity builds. The commands
# will simply pass through their options to the regular builders.
Additionally any generator can be passed a `cache_dir` to overwrite the value from the Environment.
"""
def exists(env : Environment):
return True
def generate(env : Environment):
env.AddMethod(_make_generator(env.Program), 'UnityProgram')
env.AddMethod(_make_generator(env.Library), 'UnityLibrary')
env.AddMethod(_make_generator(env.StaticLibrary), 'UnityStaticLibrary')
env.AddMethod(_make_generator(env.SharedLibrary), 'UnitySharedLibrary')
# build for generating the unity source files
unity_source_builder = env.Builder(
action = Action.Action(_generate_unity_file, _generate_unity_file_msg)
)
env.Append(BUILDERS = {'UnitySource': unity_source_builder})
env.SetDefault(UNITY_CACHE_DIR = '.unity')
env.SetDefault(UNITY_MAX_SOURCES = 15)
env.SetDefault(UNITY_MIN_FILES = env.GetOption('num_jobs'))
env.SetDefault(UNITY_DISABLE = False)
def _make_generator(base_generator):
def generator(env, source, target, cache_dir = None, *args, **kwargs):
if env['UNITY_DISABLE']:
return base_generator(target = target, source = source, *args, **kwargs)
unity_source_files = []
source_files, other_nodes = _flatten_source(source)
max_sources_per_file = max(1, math.ceil(len(source_files) / env['UNITY_MIN_FILES']))
sources_per_file = min(max_sources_per_file, env['UNITY_MAX_SOURCES'])
num_unity_files = math.ceil(len(source_files) / sources_per_file)
if not cache_dir:
cache_dir = env['UNITY_CACHE_DIR']
if not isinstance(cache_dir, str):
cache_dir = cache_dir.abspath
os.makedirs(cache_dir, exist_ok=True)
target_base_name = os.path.basename(target)
for idx in range(num_unity_files):
unity_filename = f'{cache_dir}/{target_base_name}_{idx}.cpp'
unity_source_files.append(unity_filename)
begin = sources_per_file*idx
end = sources_per_file*(idx+1)
env.UnitySource(
target = unity_filename,
source = source_files[begin:end]
)
if len(other_nodes) > 0:
print(f'Exluded {len(other_nodes)} node(s) from Unity build.')
return [base_generator(target = target, source = unity_source_files + other_nodes, *args, **kwargs)]
return generator
def _flatten_source(source : list):
source_files = []
other_nodes = []
for ele in source:
if isinstance(ele, list):
more_sources, more_other = _flatten_source(ele)
source_files.extend(more_sources)
other_nodes.extend(more_other)
elif isinstance(ele, File):
source_files.append(ele.abspath)
elif isinstance(ele, str):
source_files.append(ele)
else:
other_nodes.append(ele)
return source_files, other_nodes
def _generate_unity_file_msg(target, source, env : Environment):
assert(len(target) == 1)
return f'Generating {str(target[0])} from {len(source)} source files.'
def _generate_unity_file(target, source, env : Environment):
assert(len(target) == 1)
unity_filename = target[0].abspath
with open(unity_filename, 'w') as f:
for source_file in source:
fpath = source_file.abspath.replace("\\", "\\\\")
f.write(f'#include "{fpath}"\n')

View File

@ -2,6 +2,5 @@
<project version="4"> <project version="4">
<component name="VcsDirectoryMappings"> <component name="VcsDirectoryMappings">
<mapping directory="" vcs="Git" /> <mapping directory="" vcs="Git" />
<mapping directory="$PROJECT_DIR$/external/scons-plus-plus" vcs="Git" />
</component> </component>
</project> </project>

View File

@ -80,7 +80,7 @@
"vue.rearranger.settings.migration": "true" "vue.rearranger.settings.migration": "true"
} }
}]]></component> }]]></component>
<component name="RunManager" selected="Custom Build Application.{{ project.executables[0].name }} {{ project.build_types[0] }}"> <component name="RunManager" selected="Custom Build Application.{% if project.executables|length > 0 %}{{ project.executables[0].name }}{% else %}{{ project.libraries[0].name }}{% endif %} {{ project.build_types[0] }}">
{% for executable in project.executables -%} {% for executable in project.executables -%}
{% for build_type in project.build_types -%} {% for build_type in project.build_types -%}
{% set build_type_name = build_type | capitalize -%} {% set build_type_name = build_type | capitalize -%}