Compare commits

...

29 Commits

Author SHA1 Message Date
Patrick Wuttke
7fa3855498 Made private/public module folders adjustable via config and fixed Jinja get_sources() function for targets with nested nodes in sources. 2026-01-08 14:35:13 +01:00
b9335a6247 Added command line tool and removed lib folder from gitignore. 2025-09-20 14:01:43 +02:00
79366c9098 Fixed CLion project generation for projects without executable targets. 2025-09-20 12:17:51 +02:00
5c17999cdf (WIP) Restructuring of the project, rework of addons. 2025-09-20 12:17:51 +02:00
7b2e5c7432 Removed S++ subfolder from CLion VCS configuration. 2025-09-20 12:17:51 +02:00
Patrick Wuttke
07c2496342 Fixed .sln generation (a little) and changed config to create one .pdb per target. 2025-09-12 09:56:26 +02:00
e19f6115be Disabled array-bounds warnings for GCC due to (probably?) false positives. 2025-07-25 00:50:28 +02:00
Patrick Wuttke
7fc8518db4 Merge branch 'master' of https://git.mewin.de/mewin/scons-plus-plus 2025-07-14 18:51:46 +02:00
Patrick Wuttke
8b5d66dbec Forward CCFLAGS to Visual Studio project for IntelliSense. 2025-07-14 18:51:41 +02:00
75c626c235 Added option to pass CPU features (e.g. vector extensions) to the compiler (GCC/Clang only for now). 2025-07-12 12:51:14 +02:00
202331ba60 Added TARGET_PLATFORM variable and fixed (hopefully) debug symbols on
Windows.
2025-07-11 18:01:08 +02:00
9b82fb87c0 Don't create module configuration for dependencies. 2025-07-11 14:45:34 +02:00
45b4d164d0 Removed targets from module again to fix recursive references when trying to serialize them. 2025-07-09 00:58:32 +02:00
43503dfec6 Changed folder names for git worktrees to the name of the ref that is checked out for better readability. 2025-07-08 18:40:31 +02:00
7916566d47 Adjusted dynamic library extension for Windows (which is also .lib). 2025-07-08 18:40:31 +02:00
b47ceb81dc Added build_dir to cmake result. 2025-07-08 18:40:31 +02:00
Patrick Wuttke
6326454729 Fixed how C++ standard is passed to VS/IntelliSense. 2025-07-08 16:50:59 +02:00
Patrick Wuttke
18293fdcf7 Fixed target info dumping. 2025-07-08 16:50:06 +02:00
Patrick Wuttke
8371f96d4a Merge branch 'master' of https://git.mewin.de/mewin/scons-plus-plus 2025-07-08 14:34:23 +02:00
Patrick Wuttke
af53bf6084 Added Visual Studio project generation. 2025-07-08 14:34:20 +02:00
4bae8d67a0 Just warn instead of failing when tags cannot be fetched. 2025-07-07 00:13:07 +02:00
0ac1621494 Added option for projects to add own variables. 2025-07-06 10:40:33 +02:00
8770bd97dc Made preprocessor with MSVC behave correctly. 2025-06-24 15:21:49 +02:00
68f20bcf2d Disabled parentheses-equality warning on CLang vecause it was producing false positives. 2025-06-23 00:25:10 +02:00
e583c5ef6c Added module type and module configuration. 2025-06-19 16:43:53 +02:00
c3b5244eac Replaced --dump_env with --dump and --dump_format that allow dumping environment and config, both as text or json. And added _info() and env.Info() function for printing that reacts to SCons' -s flag. 2025-06-19 13:33:21 +02:00
88844ee5da Added small script for starting scons via IDE for debugging. 2025-06-19 13:32:18 +02:00
161f2e52d8 Give dependencies the option to add to CMAKE_PREFIX_PATH for compiling CMake dependencies against each other. 2025-04-03 16:29:36 +02:00
9436d2c48d Added JINJA_FILE_SEARCHPATH for Jinja file functions to make it easier for library projects to find their files. 2025-03-28 14:52:14 +01:00
44 changed files with 2182 additions and 166 deletions

220
.gitignore vendored
View File

@@ -1 +1,219 @@
__pycache__
# Project files
/.idea/
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[codz]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
# lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py.cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
# Pipfile.lock
# UV
# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# uv.lock
# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
# poetry.lock
# poetry.toml
# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
# pdm recommends including project-wide configuration in pdm.toml, but excluding .pdm-python.
# https://pdm-project.org/en/latest/usage/project/#working-with-version-control
# pdm.lock
# pdm.toml
.pdm-python
.pdm-build/
# pixi
# Similar to Pipfile.lock, it is generally recommended to include pixi.lock in version control.
# pixi.lock
# Pixi creates a virtual environment in the .pixi directory, just like venv module creates one
# in the .venv directory. It is recommended not to include this directory in version control.
.pixi
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# Redis
*.rdb
*.aof
*.pid
# RabbitMQ
mnesia/
rabbitmq/
rabbitmq-data/
# ActiveMQ
activemq-data/
# SageMath parsed files
*.sage.py
# Environments
.env
.envrc
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
# .idea/
# Abstra
# Abstra is an AI-powered process automation framework.
# Ignore directories containing user credentials, local state, and settings.
# Learn more at https://abstra.io/docs
.abstra/
# Visual Studio Code
# Visual Studio Code specific template is maintained in a separate VisualStudioCode.gitignore
# that can be found at https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore
# and can be added to the global gitignore or merged into this file. However, if you prefer,
# you could uncomment the following to ignore the entire vscode folder
# .vscode/
# Ruff stuff:
.ruff_cache/
# PyPI configuration file
.pypirc
# Marimo
marimo/_static/
marimo/_lsp/
__marimo__/
# Streamlit
.streamlit/secrets.toml

View File

@@ -1,23 +1,50 @@
from collections.abc import Callable
import copy
from dataclasses import dataclass
import enum
import glob
import importlib.util
import inspect
import json
import multiprocessing
import os
import pathlib
import platform
import psutil
import shutil
import sys
import time
from typing import Any
import uuid
from SCons.Node import Node
from SCons.Script import *
class TargetType(enum.Enum):
PROGRAM = 0
STATIC_LIBRARY = 1
SHARED_LIBRARY = 2
sys.path.append(os.path.join(Dir('.').abspath, 'lib'))
from spp import _init_interface, Module, Target, TargetType
_init_interface(globals=globals())
_SPP_VERSION = (1, 1, 0)
_DEBUG = {
'addons': False
}
_GCC_CPU_FEATURES_MAP = {
'mmx': '-mmmx',
'sse': '-msse',
'sse2': '-msse2',
'sse3': '-msse3',
'ssse3': '-mssse3',
'sse4': '-msse4',
'sse4a': '-msse4a',
'sse4.1': '-msse4.1',
'sse4.2': '-msse4.2',
'avx': '-mavx',
'avx2': '-mavx2'
}
class _VersionSpec:
minimum_version = None
@@ -40,23 +67,20 @@ class _Dependency:
depdeps: list = []
cook_result: dict = {}
class _Target:
name: str
target_type: TargetType
builder = None
args: list = []
kwargs: dict = {}
dependencies: list = []
target = None
def _find_recipe(env: Environment, recipe_name: str):
if recipe_name in env['SPP_RECIPES']:
return env['SPP_RECIPES'][recipe_name]
import importlib.util
source_file = None
if not env['SPP_RECIPES_FOLDERS']:
env.Error('No recipes repositories set. Add one using env.RecipeRepo(<name>, <url>, <branch>).')
# for compatibility
if '_SPP_FALLBACK_RECIPE_REPO' in env:
repo_args: dict = env['_SPP_FALLBACK_RECIPE_REPO']
env.Warn('No recipes repositories set. Add one using env.RecipeRepo(<name>, <url>, <branch>).')
env.Warn(f'Falling back to default recipe repository ({repo_args["repo_name"]} at {repo_args["remote_url"]} ref={repo_args.get("git_ref", "master")}).')
env.RecipeRepo(**repo_args)
else:
env.Error('No recipes repositories set. Add one using env.RecipeRepo(<name>, <url>, <branch>).')
for folder in env['SPP_RECIPES_FOLDERS']:
from SCons import Node
if folder is Node:
@@ -89,9 +113,36 @@ def _cook(env: Environment, recipe_name: str):
_run_cook(dependency)
return dependency.cook_result
def _normalize_module_path(env: Environment, path: str) -> str|None:
module_root = env.Dir('#').abspath
try:
relative = os.path.relpath(path, module_root)
if relative[:2] == '..':
return None
return os.path.join(*os.path.split(relative)[1:])
except ValueError: # may be thrown on Windows if the module is on a different drive than the project
return None
def _module(env: Environment, file: str):
folder = _normalize_module_path(env, env.File(file).dir.abspath)
if folder is not None: # only include modules inside the source tree
dirname = os.path.basename(folder)
env.Append(SPP_MODULES = {folder: Module(
name=dirname,
folder=folder,
description='',
cxx_namespace=dirname
)})
return SConscript(file, exports = 'env', variant_dir = env['VARIANT_DIR'], src_dir = '.')
def _module_config(env: Environment, **kwargs) -> None:
module_folder = _normalize_module_path(env, env.Dir('.').abspath)
module = env['SPP_MODULES'].get(module_folder)
if module is None:
env.Warn(f'No module config found for module at {module_folder}')
return
module.__dict__.update(kwargs)
def _parse_lib_conf(env: Environment, lib_conf: dict) -> None:
env.Append(CPPPATH = lib_conf.get('CPPPATH', []),
CPPDEFINES = lib_conf.get('CPPDEFINES', []),
@@ -128,7 +179,7 @@ def _inject_dependency(dependency, kwargs: dict, add_sources: bool = True) -> No
_inject_list(kwargs, dependency.cook_result, 'LINKFLAGS')
for depdep in dependency.depdeps:
_inject_dependency(depdep, kwargs)
elif isinstance(dependency, _Target):
elif isinstance(dependency, Target):
_inject_list(kwargs, dependency.kwargs, 'CPPPATH')
_inject_list(kwargs, dependency.kwargs, 'CPPDEFINES')
_inject_list(kwargs, dependency.kwargs, 'LIBPATH')
@@ -220,11 +271,7 @@ def _lib_filename(env: Environment, name: str, type: str = 'static') -> str:
}[type]
return f'lib{name}.{ext}'
elif os.name == 'nt':
ext = {
'static': 'lib',
'shared': 'dll'
}[type]
return f'{name}.{ext}'
return f'{name}.lib'
else:
raise Exception('What OS is this?')
@@ -244,8 +291,19 @@ def _find_lib(env: Environment, name: str, paths: 'list[str]', type : str = 'sta
return None
raise Exception(f'Could not find library with name {name} in paths: "{", ".join(paths)}" filename: "{fname}".')
def _error(env: Environment, message: str):
print(message, file=sys.stderr)
def _debug(cond: str, msg: str) -> None:
if _DEBUG.get(cond):
print(f'[DEBUG] [{cond}] {msg}')
def _info(env: Environment|None, message: str) -> None:
if not GetOption('silent'):
print(f'[INFO] {message}')
def _warn(env: Environment|None, message: str) -> None:
print(f'[WARN] {message}', file=sys.stderr)
def _error(env: Environment|None, message: str) -> None:
print(f'[ERROR] {message}', file=sys.stderr)
Exit(1)
def _try_merge_dicts(dictA: dict, dictB: dict) -> 'dict|None':
@@ -264,8 +322,7 @@ def _try_merge_dicts(dictA: dict, dictB: dict) -> 'dict|None':
result[key] = mergedValue
elif valueA != valueB:
return None
else:
result[key] = valueA
result[key] = valueA
for key, valueB in dictB.items():
if key not in result:
result[key] = valueB
@@ -394,19 +451,22 @@ def _wrap_builder(builder, target_type: TargetType):
kwargs['LIBPATH'] = copy.copy(env['LIBPATH'])
if 'LIBS' not in kwargs and 'LIBS' in env:
kwargs['LIBS'] = copy.copy(env['LIBS'])
if 'source' in kwargs:
source = kwargs['source']
if not isinstance(source, list):
source = [source]
new_source = []
for src in source:
if isinstance(src, str):
new_source.append(env.Entry(src))
else:
new_source.append(src)
kwargs['source'] = new_source
target = _Target()
def _fix_filearg(filearg: str|Entry|list[str|Entry]) -> list[Entry]:
if not isinstance(filearg, list):
filearg = (filearg,)
result = []
for ele in filearg:
if isinstance(ele, str):
result.append(env.Entry(ele))
else:
result.append(ele)
return result
if 'source' in kwargs:
kwargs['source'] = _fix_filearg(kwargs['source'])
target = Target()
if 'name' in kwargs:
target.name = kwargs['name']
else:
@@ -420,6 +480,13 @@ def _wrap_builder(builder, target_type: TargetType):
target.args = args
target.kwargs = kwargs
target.dependencies = target_dependencies
module_folder = _normalize_module_path(env, env.Dir('.').abspath)
if module_folder is not None:
module = env['SPP_MODULES'].get(module_folder)
if module is None:
env.Warn(f'No module config found for target {target.name} at {module_folder}')
else:
target.module = module
env.Append(SPP_TARGETS = [target])
if not target.dependencies:
_build_target(target)
@@ -428,7 +495,7 @@ def _wrap_builder(builder, target_type: TargetType):
def _wrap_default(default):
def _wrapped(env, arg):
if isinstance(arg, _Target):
if isinstance(arg, Target):
env.Append(SPP_DEFAULT_TARGETS = [arg])
elif isinstance(arg, dict) and '_target' in arg:
default(arg['_target'])
@@ -438,8 +505,8 @@ def _wrap_default(default):
def _wrap_depends(depends):
def _wrapped(env, dependant, dependency):
if isinstance(dependant, _Target) or isinstance(dependency, _Target):
env.Append(SPP_TARGET_DEPENDENCIES = [(dependant, dependency)])
if isinstance(dependant, Target) or isinstance(dependency, Target):
env.Append(SPP_TARGET_DEPENDENCIES = [(dependant, dependency, depends)])
return
elif isinstance(dependant, dict) and '_target' in dependant:
dependant = dependant['_target']
@@ -448,7 +515,7 @@ def _wrap_depends(depends):
depends(dependant, dependency)
return _wrapped
def _build_target(target: _Target):
def _build_target(target: Target):
for dependency in target.dependencies:
_inject_dependency(dependency, target.kwargs)
if 'LIBS' in target.kwargs:
@@ -458,14 +525,16 @@ def _build_target(target: _Target):
target.kwargs['LIBS'].remove(lib)
target.kwargs['LIBS'].append(env.File(lib))
pass
elif isinstance(lib, _Target):
elif isinstance(lib, Target):
if not lib.target:
_build_target(lib)
target.kwargs['LIBS'].remove(lib)
target.kwargs['LIBS'].append(lib.target)
new_kwargs = target.kwargs.copy()
if 'target' in new_kwargs: # there should always be a target, right?
if 'target' in new_kwargs and target.target_type != TargetType.MISC: # there should always be a target, right?
new_kwargs['target'] = f"{new_kwargs['target']}-{build_type}"
if os.name == 'nt' and 'PDB' not in new_kwargs:
new_kwargs['PDB'] = f'{new_kwargs["target"]}.pdb'
target.target = target.builder(*target.args, **new_kwargs)
def _version_to_string(version) -> str:
@@ -476,6 +545,7 @@ def _finalize(env: Environment):
_generate_project(generate_project)
Exit(0)
_hook_pre_finalize.invoke()
version_requirements = {dep.name: {
'min': dep.version_spec.minimum_version and _version_to_string(dep.version_spec.minimum_version),
'max': dep.version_spec.maximum_version and _version_to_string(dep.version_spec.maximum_version),
@@ -494,19 +564,23 @@ def _finalize(env: Environment):
}
}, f)
if dump is not None:
_dump()
for target in env['SPP_TARGETS']:
_build_target(target)
for target in env['SPP_DEFAULT_TARGETS']:
env.Default(target.target)
for dependant, dependency in env['SPP_TARGET_DEPENDENCIES']:
if isinstance(dependant, _Target):
for dependant, dependency, depends in env['SPP_TARGET_DEPENDENCIES']:
if isinstance(dependant, Target):
dependant = dependant.target
if isinstance(dependency, _Target):
if isinstance(dependency, Target):
dependency = dependency.target
env.Depends(dependant, dependency)
depends(dependant, dependency)
def _find_target(env: Environment, target_name: str) -> '_Target|None':
_hook_post_finalize.invoke()
def _find_target(env: Environment, target_name: str) -> 'Target|None':
for target in env['SPP_TARGETS']:
if target.name == target_name:
return target
@@ -546,7 +620,8 @@ def _generate_project(project_type: str) -> None:
source_folder, target_folder = {
'clion': (os.path.join(_spp_dir.abspath, 'util', 'clion_project_template'), Dir('#.idea').abspath),
'vscode': (os.path.join(_spp_dir.abspath, 'util', 'vscode_project_template'), Dir('#.vscode').abspath)
'vscode': (os.path.join(_spp_dir.abspath, 'util', 'vscode_project_template'), Dir('#.vscode').abspath),
'vs': (os.path.join(_spp_dir.abspath, 'util', 'vs_project_template'), Dir('#').abspath)
}.get(project_type, (None, None))
if not source_folder:
_error(None, 'Invalid project type option.')
@@ -561,86 +636,195 @@ def _generate_project(project_type: str) -> None:
except Exception as e:
print(f'Error loading UUID cache: {e}')
def _generate_uuid(name: str = '') -> str:
def _generate_uuid(name: str = '', ms_style: bool = False) -> str:
nonlocal save_uuid_cache
if name and name in uuid_cache:
return uuid_cache[name]
new_uuid = str(uuid.uuid4())
if name:
uuid_cache[name] = new_uuid
save_uuid_cache = True
return new_uuid
result = uuid_cache[name]
else:
result = str(uuid.uuid4())
if name:
uuid_cache[name] = result
save_uuid_cache = True
if ms_style:
return f'{{{result.upper()}}}'
return result
root_path = pathlib.Path(env.Dir('#').abspath)
def _make_entry(target, type, prefix, suffix) -> str:
def _full_path(build_type) -> str:
trgt = _target_entry(target.kwargs['target'])
full_path = pathlib.Path(trgt.abspath).relative_to(root_path)
full_path = full_path.parent / f'{env.subst(prefix)}{full_path.name}-{build_type}{env.subst(suffix)}'
return str(full_path)
return {
'name': target.name,
'filename': _full_path,
'target': target,
'type': type,
'module': target.module
}
def _get_executables() -> list:
result = []
for target in env['SPP_TARGETS']:
if target.target_type == TargetType.PROGRAM:
trgt = _target_entry(target.kwargs['target'])
def _exe_path(build_type) -> str:
exe_path = pathlib.Path(trgt.abspath).relative_to(root_path)
exe_path = exe_path.parent / f'{env.subst("$PROGPREFIX")}{exe_path.name}-{build_type}{env.subst("$PROGSUFFIX")}'
return str(exe_path)
result.append({
'name': target.name,
'filename': _exe_path
})
result.append(_make_entry(target, 'executable', '$PROGPREFIX', '$PROGSUFFIX'))
return result
def _get_libraries() -> list:
result = []
for target in env['SPP_TARGETS']:
if target.target_type == TargetType.STATIC_LIBRARY:
trgt = _target_entry(target.kwargs['target'])
def _lib_path(build_type) -> str:
lib_path = pathlib.Path(trgt.abspath).relative_to(root_path)
lib_path = lib_path.parent / f'{env.subst("$LIBPREFIX")}{lib_path.name}-{build_type}{env.subst("$LIBSUFFIX")}'
return str(lib_path)
result.append({
'name': target.name,
'filename': _lib_path
})
result.append(_make_entry(target, 'executable', '$LIBPREFIX', '$LIBSUFFIX'))
elif target.target_type == TargetType.SHARED_LIBRARY:
trgt = _target_entry(target.kwargs['target'])
def _lib_path(build_type) -> str:
lib_path = pathlib.Path(trgt.abspath).relative_to(root_path)
lib_path = lib_path.parent / f'{env.subst("$SHLIBPREFIX")}{lib_path.name}-{build_type}{env.subst("$SHLIBSUFFIX")}'
return str(lib_path)
result.append({
'name': target.name,
'filename': _lib_path
})
result.append(_make_entry(target, 'executable', '$SHLIBPREFIX', '$SHLIBSUFFIX'))
return result
def _get_modules() -> list:
result = []
for folder, module in env['SPP_MODULES'].items():
result.append({
'name': module.name,
'private_folder': os.path.join(config['PRIVATE_FOLDER'], folder),
'public_folder': os.path.join(config['PUBLIC_FOLDER'], folder),
'description': module.description,
'cxx_namespace': module.cxx_namespace
})
return result
def _escape_path(input: str) -> str:
return input.replace('\\', '\\\\')
def _strip_path_prefix(path: str, skip_eles: int) -> str:
for _ in range(skip_eles):
pos = path.find(os.sep)
if pos < 0:
return ''
path = path[pos+1:]
return path
def _folder_list(file_list: list[str], skip_eles: int = 0) -> list[str]:
result = {}
for file in file_list:
folder = os.path.dirname(file)
folder = _strip_path_prefix(folder, skip_eles)
if folder == '':
continue
while True:
result[folder] = True
# also add all parents
sep_pos = folder.rfind(os.sep)
if sep_pos < 0:
break
folder = folder[0:sep_pos]
return list(result.keys())
def _get_sources(target_dict: dict) -> list[str]:
target : Target = target_dict['target']
sources = target.kwargs.get('source')
return [str(pathlib.Path(source.abspath).relative_to(root_path)) for source in Flatten(sources)]
def _get_headers(folder: str) -> list[str]:
result = []
for root, _, files in os.walk(folder):
for file in files:
_, ext = os.path.splitext(file)
if ext in ('.h', '.hpp', '.inl', '.hxx'):
result.append(os.path.join(root, file))
return result
def _get_target_property(build_type: str, target: str, path: str) -> Any:
import subprocess
cmd = (shutil.which('scons'), '--silent', f'--build_type={build_type}', '--dump=targets', '--dump_format=json', f'--dump_path={target}/{path}')
output = subprocess.check_output(cmd, text=True).strip()
try:
return json.loads(output)
except json.JSONDecodeError as e:
env.Warn(f'Command: {cmd}')
env.Warn(f'Output: {output}')
raise e
executables = _get_executables()
libraries = _get_libraries()
modules = _get_modules()
jinja_env = jinja2.Environment()
jinja_env.globals['generate_uuid'] = _generate_uuid
jinja_env.globals['get_sources'] = _get_sources
jinja_env.globals['get_headers'] = _get_headers
jinja_env.globals['get_target_property'] = _get_target_property
jinja_env.globals['project'] = {
'name': env.Dir('#').name,
'executables': _get_executables(),
'libraries': _get_libraries(),
'build_types': ['debug', 'release_debug', 'release', 'profile']
'executables': executables,
'libraries': libraries,
'modules': modules,
'build_types': ['debug', 'release_debug', 'release', 'profile'],
'cxx_standard': env['CXX_STANDARD']
}
jinja_env.globals['scons_exe'] = shutil.which('scons')
jinja_env.globals['nproc'] = multiprocessing.cpu_count()
jinja_env.filters['escape_path'] = _escape_path
jinja_env.filters['strip_path_prefix'] = _strip_path_prefix
jinja_env.filters['folder_list'] = _folder_list
jinja_env.filters['basename'] = os.path.basename
jinja_env.filters['dirname'] = os.path.dirname
source_path = pathlib.Path(source_folder)
target_path = pathlib.Path(target_folder)
tmpl_config = {}
tmpl_config_file = source_path / 'template.json'
if tmpl_config_file.exists():
with tmpl_config_file.open('r') as f:
tmpl_config = json.load(f)
files_config = tmpl_config.get('files', {})
for source_file in source_path.rglob('*'):
if source_file.is_file():
target_file = target_path / (source_file.relative_to(source_path))
if source_file == tmpl_config_file:
continue
if not source_file.is_file():
continue
source_file_relative = source_file.relative_to(source_path)
file_config = files_config.get(str(source_file_relative).replace('\\', '/'), {})
one_per = file_config.get('one_per', 'project')
def generate_file_once() -> None:
is_jinja = (source_file.suffix == '.jinja')
if 'rename_to' in file_config:
new_filename = jinja_env.from_string(file_config['rename_to']).render()
target_file = target_path / new_filename
else:
target_file = target_path / source_file_relative
if is_jinja:
target_file = target_file.with_suffix('')
target_file.parent.mkdir(parents=True, exist_ok=True)
if source_file.suffix != '.jinja':
if not is_jinja:
shutil.copyfile(source_file, target_file)
continue
return
with source_file.open('r') as f:
templ = jinja_env.from_string(f.read())
target_file = target_file.with_suffix('')
try:
templ = jinja_env.from_string(f.read())
except jinja2.TemplateSyntaxError as e:
e.filename = str(source_file)
raise e
with target_file.open('w') as f:
f.write(templ.render())
try:
if one_per == 'project':
generate_file_once()
elif one_per == 'target':
for executable in executables:
jinja_env.globals['target'] = executable
generate_file_once()
for library in libraries:
jinja_env.globals['target'] = library
generate_file_once()
else:
raise ValueError(f'invalid value for "one_per": {one_per}')
except jinja2.TemplateSyntaxError as e:
env.Error(f'Jinja syntax error at {e.filename}:{e.lineno}: {e.message}')
Exit(1)
if save_uuid_cache:
try:
@@ -649,8 +833,150 @@ def _generate_project(project_type: str) -> None:
except Exception as e:
print(f'Error writing uuid cache: {e}')
def _dump() -> None:
def _dump_as_text(data: Any) -> str:
from pprint import pformat
dump_name = {
'env': 'Environment',
'config': 'Configuration',
'modules': 'Modules',
'targets': 'Targets'
}[dump]
return '\n'.join((
f'==== Begin {dump_name} Dump ====',
pformat(data),
f'==== End {dump_name} Dump ===='
))
def _dump_as_json(data: Any) -> str:
class _Encoder(json.JSONEncoder):
def default(self, o) -> dict:
if isinstance(o, object):
if hasattr(o, '__iter__'):
return list(o)
elif isinstance(o, Node):
return o.abspath
return o.__dict__
return super().default(o)
return json.dumps(data, cls=_Encoder)
def _apply_path(data: Any, path: str) -> Any:
for part in path.split('/'):
if isinstance(data, dict):
if part not in data:
_error(f'Invalid path specified. No key {part} in dict {data}.')
Exit(1)
data = data[part]
elif isinstance(data, list):
try:
part = int(part)
except ValueError:
_error(f'Invalid path specified. {part} is not a valid list index.')
Exit(1)
if part < 0 or part >= len(data):
_error(f'Invalid path specified. {part} is out of list range.')
Exit(1)
data = data[part]
elif isinstance(data, object):
data = data.__dict__
if part not in data:
_error(f'Invalid path specified. No attribute {part} in object {data}.')
Exit(1)
data = data[part]
else:
_error(f'Invalid path specified. {data} has no properties.')
Exit(1)
return data
def _targets() -> dict:
result = {}
for target in env['SPP_TARGETS']:
kwargs = target.kwargs.copy()
for dependency in target.dependencies:
_inject_dependency(dependency, kwargs)
result[target.name] = {
'target_type': target.target_type.name,
'args': target.args,
# 'kwargs': kwargs, <- circular dependency here and the json encoder doesn't like that
'CPPDEFINES': kwargs.get('CPPDEFINES', env['CPPDEFINES']),
'CPPPATH': kwargs.get('CPPPATH', env['CPPPATH']),
'CFLAGS': kwargs.get('CFLAGS', env['CFLAGS']),
'CCFLAGS': kwargs.get('CCFLAGS', env['CCFLAGS']),
'CXXFLAGS': kwargs.get('CXXFLAGS', env['CXXFLAGS'])
}
return result
data = {
'env': env.Dictionary,
'config': lambda: config,
'modules': lambda: env['SPP_MODULES'],
'targets': _targets
}[dump]()
global dump_path
dump_path = dump_path.strip()
if dump_path != '':
data = _apply_path(data, dump_path)
dump_fn = {
'text': _dump_as_text,
'json': _dump_as_json
}[dump_format]
print(dump_fn(data))
Exit(0)
class _Hook:
def __init__(self) -> None:
self._funcs: list[Callable] = []
def add_func(self, func: Callable) -> None:
self._funcs.append(func)
def invoke(self, **kwargs) -> None:
for func in self._funcs:
func(**kwargs)
_hook_pre_environment = _Hook()
_hook_post_environment = _Hook()
_hook_config_complete = _Hook()
_hook_pre_finalize = _Hook()
_hook_post_finalize = _Hook()
def _load_addon(modname: str, modpath: pathlib.Path) -> None:
_debug('addons', f'Loading addon {modname} from {modpath}.')
spec = importlib.util.spec_from_file_location(modname, modpath)
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
if hasattr(module, 'available') and not module.available():
_debug('addons', f'Addon {modname} is not available and will not be loaded.')
return
def _add_hook(func_name: str, hook: _Hook) -> None:
if hasattr(module, func_name):
hook.add_func(getattr(module, func_name))
_debug('addons', f'Addon {modname} registered a {func_name} hook.')
_add_hook('pre_environment', _hook_pre_environment)
_add_hook('post_environment', _hook_post_environment)
_add_hook('config_complete', _hook_config_complete)
_add_hook('pre_finalize', _hook_pre_finalize)
_add_hook('post_finalize', _hook_post_finalize)
def _load_addons(folder: pathlib.Path) -> None:
_debug('addons', f'Loading addons from {folder}.')
for script_file in folder.glob('*.py'):
_load_addon(script_file.name[:-3], script_file)
_ALLOWED_CHARS = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_'
def _sanitize_identifier(name: str) -> str:
chrs = []
for chr in name:
if chr in _ALLOWED_CHARS:
chrs.append(chr)
else:
chrs.append('_')
return ''.join(chrs)
config: dict
Import('config')
if not config.get('PROJECT_NAME'):
@@ -659,9 +985,14 @@ if not config.get('CXX_STANDARD'):
config['CXX_STANDARD'] = 'c++23'
if not config.get('CXX_NO_EXCEPTIONS'):
config['CXX_NO_EXCEPTIONS'] = False
if not config.get('PREPROCESSOR_PREFIX'):
config['PREPROCESSOR_PREFIX'] = config['PROJECT_NAME'].upper() # TODO: may be nicer?
config['PREPROCESSOR_PREFIX'] = _sanitize_identifier(config['PROJECT_NAME']).upper() # TODO: may be nicer?
if not config.get('SPP_TARGET_VERSION'):
config['SPP_TARGET_VERSION'] = (1, 0, 0)
if not config.get('PRIVATE_FOLDER'):
config['PRIVATE_FOLDER'] = 'private'
if not config.get('PUBLIC_FOLDER'):
config['PUBLIC_FOLDER'] = 'public'
if 'COMPILATIONDB_FILTER_FILES' not in config:
config['COMPILATIONDB_FILTER_FILES'] = True
@@ -727,16 +1058,43 @@ AddOption(
)
AddOption(
'--dump_env',
dest = 'dump_env',
'--disable_auto_update',
dest = 'disable_auto_update',
action = 'store_true'
)
AddOption(
'--dump',
dest = 'dump',
type = 'choice',
choices = ('env', 'config', 'modules', 'targets'),
nargs = 1,
action = 'store'
)
AddOption(
'--dump_format',
dest = 'dump_format',
type = 'choice',
choices = ('text', 'json'),
nargs = 1,
action = 'store',
default = 'text'
)
AddOption(
'--dump_path',
dest = 'dump_path',
nargs = 1,
action = 'store',
default = ''
)
AddOption(
'--generate_project',
dest = 'generate_project',
type = 'choice',
choices = ('clion', 'vscode'),
choices = ('clion', 'vscode', 'vs'),
nargs = 1,
action = 'store'
)
@@ -750,7 +1108,10 @@ enable_asan = GetOption('enable_asan')
config_file = GetOption('config_file')
compiler = GetOption('compiler')
update_repositories = GetOption('update_repositories')
dump_env = GetOption('dump_env')
disable_auto_update = GetOption('disable_auto_update')
dump = GetOption('dump')
dump_format = GetOption('dump_format')
dump_path = GetOption('dump_path')
generate_project = GetOption('generate_project')
default_CC = {
@@ -780,10 +1141,27 @@ vars.Add('COMPILATIONDB_FILTER_FILES', 'Removes source files from the compilatio
' project.', config['COMPILATIONDB_FILTER_FILES'])
vars.Add('SHOW_INCLUDES', 'Show include hierarchy (for debugging).', False)
vars.Add('ENABLE_ASAN', 'Enable address sanitization.', bool(enable_asan))
vars.Add('TARGET_PLATFORM', 'Target platform.', platform.system())
tools = ['default', 'compilation_db', 'unity_build']
if 'VARIABLES' in config:
for vardef in config['VARIABLES']:
vars.Add(*vardef)
tools = ['default', 'compilation_db']
if 'TOOLS' in config:
assert isinstance(config['TOOLS'], list)
tools.extend(config['TOOLS'])
addon_dirs = [pathlib.Path(Dir('.').abspath) / 'addons']
if 'ADDON_DIRS' in config:
assert isinstance(config['ADDON_DIRS'], list)
addon_dirs.extend(config['ADDON_DIRS'])
for addon_dir in addon_dirs:
if not isinstance(addon_dir, pathlib.Path):
addon_dir = pathlib.Path(addon_dir)
_load_addons(addon_dir)
_hook_pre_environment.invoke()
env = Environment(tools = tools, variables = vars, ENV = os.environ)
env['SPP_RECIPES_FOLDERS'] = []
@@ -796,12 +1174,12 @@ env['DEPS_CFLAGS'] = []
env['DEPS_CXXFLAGS'] = []
env['DEPS_LINKFLAGS'] = []
print(f'Detected system cache directory: {env["SYSTEM_CACHE_DIR"]}')
_info(None, f'Detected system cache directory: {env["SYSTEM_CACHE_DIR"]}')
try:
os.makedirs(env['SYSTEM_CACHE_DIR'], exist_ok=True)
except:
env['SYSTEM_CACHE_DIR'] = os.path.join(_get_fallback_cache_dir(), 'spp_cache')
print(f'Creating spp cache dir failed, using fallback: {env["SYSTEM_CACHE_DIR"]}.')
_info(None, f'Creating spp cache dir failed, using fallback: {env["SYSTEM_CACHE_DIR"]}.')
os.makedirs(env['SYSTEM_CACHE_DIR'], exist_ok=True) # no more safeguards!
env['CLONE_DIR'] = os.path.join(env['SYSTEM_CACHE_DIR'], 'cloned')
env['DOWNLOAD_DIR'] = os.path.join(env['SYSTEM_CACHE_DIR'], 'downloaded')
@@ -841,6 +1219,7 @@ env.Append(CXXFLAGS = [])
env.Append(CPPPATH = [])
env.Append(CPPDEFINES = [])
env.Append(LINKFLAGS = [])
env.Append(LIBS = [])
# init SPP environment variables
env['SPP_DIR'] = _spp_dir.abspath
@@ -849,11 +1228,15 @@ env['SPP_DEFAULT_TARGETS'] = []
env['SPP_TARGET_DEPENDENCIES'] = []
env['SPP_DEPENDENCIES'] = {}
env['SPP_RECIPES'] = {}
env['SPP_MODULES'] = {} # maps from folder to Module
env['SPP_CPU_FEATURES'] = config.get('USE_CPU_FEATURES', [])
env['OBJSUFFIX'] = f".{env['BUILD_TYPE']}{env['OBJSUFFIX']}"
if variant:
env['OBJSUFFIX'] = f".{variant}{env['OBJSUFFIX']}"
_hook_post_environment.invoke()
# create the cache dir
os.makedirs(env['CACHE_DIR'], exist_ok=True)
cache_gitignore = f'{env["CACHE_DIR"]}/.gitignore'
@@ -878,8 +1261,8 @@ if os.path.exists(update_stamp_file):
except:
pass
boot_time = psutil.boot_time()
if boot_time > update_time:
print('Didn\'t update repositories since last boot, doing it now...')
if not disable_auto_update and boot_time > update_time:
_info(None, 'Didn\'t update repositories since last boot, doing it now...')
env['UPDATE_REPOSITORIES'] = True
if env['UPDATE_REPOSITORIES']:
with open(update_stamp_file, 'w') as f:
@@ -920,15 +1303,17 @@ if env['COMPILER_FAMILY'] == 'gcc' or env['COMPILER_FAMILY'] == 'clang':
# also GCC complains about some (compiler generated) fields in coroutines not having any linkage
# also -Wdangling-reference seems to produce a lot of false positives
# also -Wmaybe-uninitialized seems to produce false positives (or a bug in the standard library?))
# also -Warray-bounds because GCC kept complaining when I was constructing a string from a string_view
# -Winit-list-lifetime triggers in vulkan.hpp even though it is disabled via pragma :/
# -Wtautological-compare triggers in libfmt and doesn't seem too useful anyway
env.Append(CCFLAGS = ['-Wno-missing-field-initializers', '-Wno-maybe-uninitialized'])
env.Append(CCFLAGS = ['-Wno-missing-field-initializers', '-Wno-maybe-uninitialized', '-Wno-array-bounds'])
env.Append(CXXFLAGS = ['-Wno-subobject-linkage', '-Wno-dangling-reference', '-Wno-init-list-lifetime', '-Wno-tautological-compare'])
else: # clang only
# no-gnu-anonymous-struct - we don't care
# no-missing-field-initializers - useful in some cases, annoying in most
# no-ambiguous-reversed-operator - should be quite useful, but we get a false positive, apparently?
env.Append(CCFLAGS = ['-Wno-gnu-anonymous-struct', '-Wno-missing-field-initializers', '-Wno-ambiguous-reversed-operator'])
# no-parentheses-equality - false positive for fold expressions and doesn't seem to useful anyway
env.Append(CCFLAGS = ['-Wno-gnu-anonymous-struct', '-Wno-missing-field-initializers', '-Wno-ambiguous-reversed-operator', '-Wno-parentheses-equality'])
env.Append(CXXFLAGS = ['-fexperimental-library']) # enable std::jthread
if build_type == 'debug':
env.Append(CCFLAGS = ['-g', '-O0'], CPPDEFINES = ['_GLIBCXX_DEBUG'])
@@ -950,6 +1335,13 @@ if env['COMPILER_FAMILY'] == 'gcc' or env['COMPILER_FAMILY'] == 'clang':
env.Append(DEPS_CXXFLAGS = ['-fsanitize=address', '-fno-omit-frame-pointer'])
env.Append(DEPS_LINKFLAGS = ['-fsanitize=address'])
for feature in env['SPP_CPU_FEATURES']:
flag = _GCC_CPU_FEATURES_MAP.get(feature)
if flag is None:
_warn(None, f'Unknown or unsupported cpu feature "{feature}" for GCC/Clang.')
else:
env.Append(CCFLAGS = [flag])
elif env['COMPILER_FAMILY'] == 'cl':
cxx_version_name = {
'c++14': 'c++14',
@@ -964,7 +1356,7 @@ elif env['COMPILER_FAMILY'] == 'cl':
# C4251: missing dll-interface of some std types, yaml-cpp doesn't compile with this enabled
# C4275: same as above
env.Append(CCFLAGS = ['/W4', '/WX', '/wd4201', '/wd4127', '/wd4702', '/wd4251', '/wd4275', '/bigobj', '/vmg',
f'/std:{cxx_version_name}', '/permissive-', '/FS', '/Zc:char8_t', '/utf-8'])
f'/std:{cxx_version_name}', '/permissive-', '/FS', '/Zc:char8_t', '/utf-8', '/Zc:preprocessor'])
env.Append(CPPDEFINES = ['_CRT_SECURE_NO_WARNINGS']) # I'd like to not use MSVC specific versions of functions because they are "safer" ...
env.Append(DEPS_CXXFLAGS = ['/Zc:char8_t', '/utf-8', '/vmg'])
if env['CXX_NO_EXCEPTIONS']:
@@ -975,12 +1367,14 @@ elif env['COMPILER_FAMILY'] == 'cl':
if env['SHOW_INCLUDES']:
env.Append(CCFLAGS = ['/showIncludes'])
if build_type == 'debug':
env.Append(CCFLAGS = ['/Od', '/Zi', '/MDd'], LINKFLAGS = ' /DEBUG')
#env['PDB'] = env.File('#bin/full.pdb')
env.Append(CCFLAGS = ['/Od', '/MDd'], LINKFLAGS = ' /DEBUG')
env.Append(CPPDEFINES = ['_DEBUG', '_ITERATOR_DEBUG_LEVEL=2'])
env.Append(DEPS_CXXFLAGS = ['/MDd', '/Zi', '/D_DEBUG', '/D_ITERATOR_DEBUG_LEVEL=2'])
env.Append(DEPS_LINKFLAGS = ['/DEBUG'])
elif build_type == 'release_debug' or build_type == 'profile':
env.Append(CCFLAGS = ['/O2', '/MD', '/Zi'], LINKFLAGS = ' /DEBUG')
#env['PDB'] = env.File('#bin/full.pdb')
env.Append(CCFLAGS = ['/O2', '/MD'], LINKFLAGS = ' /DEBUG')
env.Append(DEPS_CXXFLAGS = ['/Zi', '/MD'])
env.Append(DEPS_LINKFLAGS = ['/DEBUG'])
else:
@@ -1008,32 +1402,37 @@ env.AddMethod(_make_interface, 'MakeInterface')
env.AddMethod(_lib_filename, 'LibFilename')
env.AddMethod(_find_executable, 'FindExecutable')
env.AddMethod(_find_lib, 'FindLib')
env.AddMethod(_info, 'Info')
env.AddMethod(_warn, 'Warn')
env.AddMethod(_error, 'Error')
env.AddMethod(_wrap_builder(env.Program, TargetType.PROGRAM), 'Program')
env.AddMethod(_wrap_builder(env.Library, TargetType.STATIC_LIBRARY), 'Library')
env.AddMethod(_wrap_builder(env.StaticLibrary, TargetType.STATIC_LIBRARY), 'StaticLibrary')
env.AddMethod(_wrap_builder(env.SharedLibrary, TargetType.SHARED_LIBRARY), 'SharedLibrary')
env.AddMethod(_wrap_builder(env.Program, TargetType.PROGRAM), 'Program')
env.AddMethod(_wrap_builder(env.AstJson, TargetType.MISC), 'AstJson')
if 'unity_build' in tools:
env.AddMethod(_wrap_builder(env.UnityProgram, TargetType.PROGRAM), 'UnityProgram')
env.AddMethod(_wrap_builder(env.UnityLibrary, TargetType.STATIC_LIBRARY), 'UnityLibrary')
env.AddMethod(_wrap_builder(env.UnityStaticLibrary, TargetType.STATIC_LIBRARY), 'UnityStaticLibrary')
env.AddMethod(_wrap_builder(env.UnitySharedLibrary, TargetType.SHARED_LIBRARY), 'UnitySharedLibrary')
env.AddMethod(_wrap_default(env.Default), 'Default')
env.AddMethod(_wrap_depends(env.Depends), 'Depends')
env.AddMethod(_wrap_depends(env.Ignore), 'Ignore')
env.AddMethod(_wrap_depends(env.Requires), 'Requires')
env.AddMethod(_wrap_builder(env.UnityProgram, TargetType.PROGRAM), 'UnityProgram')
env.AddMethod(_wrap_builder(env.UnityLibrary, TargetType.STATIC_LIBRARY), 'UnityLibrary')
env.AddMethod(_wrap_builder(env.UnityStaticLibrary, TargetType.STATIC_LIBRARY), 'UnityStaticLibrary')
env.AddMethod(_wrap_builder(env.UnitySharedLibrary, TargetType.SHARED_LIBRARY), 'UnitySharedLibrary')
env.AddMethod(_module, 'Module')
env.AddMethod(_module_config, 'ModuleConfig')
env.AddMethod(_finalize, 'Finalize')
env.AddMethod(_find_target, 'FindTarget')
if hasattr(env, 'Gch'):
env.AddMethod(_wrap_builder(env.Gch), 'Gch')
env.AddMethod(_wrap_builder(env.Gch, TargetType.STATIC_LIBRARY), 'Gch')
for addon_file in env.Glob('addons/*.py'):
_hook_config_complete.invoke()
for addon_file in env.Glob('addons/old/*.py'):
env = SConscript(addon_file, exports = 'env')
if dump_env:
print('==== Begin Environment Dump =====')
print(env.Dump())
print('==== End Environment Dump =====')
Exit(0)
Return('env')

209
addons/astgen.py Normal file
View File

@@ -0,0 +1,209 @@
import gzip
import json
import os.path
import pickle
import subprocess
from abc import ABC, abstractmethod
from typing import Callable, Any, Iterable, Self, Generator
from SCons.Script import *
from SCons.Node.FS import File
from spp import get_spp
spp = get_spp()
def post_environment(**kwargs) -> None:
env: Environment = spp.globals['env']
ast_json_builder = Builder(
action=_gen_ast_json
)
env.Append(BUILDERS = {'AstJson': ast_json_builder})
# env.SetDefault(ASTJSONCOM = '$ASTJSON -Xclang -ast-dump=json -fsyntax-only -Wno-unknown-warning-option -DSPP_AST_GEN $CXXFLAGS $SOURCES > $TARGET')
env.AddMethod(_ast_jinja, 'AstJinja')
def _gen_ast_json(target: list[File], source: list[File], env: Environment):
clang_exe = env.WhereIs('clang++')
cmd = [clang_exe, '-Xclang', '-ast-dump=json', '-fsyntax-only', '-Wno-unknown-warning-option',
'-DSPP_AST_GEN', f'-std={env["CXX_STANDARD"]}']
for define in env['CPPDEFINES']:
cmd.append(f'-D{define}')
for path in env['CPPPATH']:
cmd.append(f'-I{path}')
cmd.append(source[0].abspath)
# print(*cmd)
try:
proc = subprocess.Popen(cmd, text=True, stdout=subprocess.PIPE)
except subprocess.CalledProcessError as e:
env.Error(f'Clang exited with code {e.returncode}.')
return
parsed = json.load(proc.stdout)
inner: list = parsed["inner"]
# pos = 0
# last_file = None
#while pos < len(inner):
# last_file = inner[pos]["loc"].get("file", last_file)
# if last_file is None: # or os.path.isabs(last_file):
# del inner[pos]
# else:
# pos += 1
if target[0].suffix == '.bin':
with gzip.open(target[0].abspath, 'wb') as f:
pickle.dump(parsed, f)
elif target[0].suffix == '.gz':
with gzip.open(target[0].abspath, 'wt') as f:
json.dump(parsed, f)
else:
with open(target[0].abspath, 'wt') as f:
json.dump(parsed, f)
class ASTNode(ABC):
@abstractmethod
def _get_decls(self) -> Iterable[dict]: ...
def inner(self) -> Iterable[dict]:
return itertools.chain(*(decl['inner'] for decl in self._get_decls()))
def inner_filtered(self, **kwargs) -> Iterable[dict]:
def _applies(decl: dict) -> bool:
for name, val in kwargs.items():
if decl.get(name) != val:
return False
return True
return (decl for decl in self.inner() if _applies(decl))
class SimpleASTNode(ASTNode):
def __init__(self, decl: dict) -> None:
self._decl = decl
def _get_decls(self) -> Iterable[dict]:
return (self._decl,)
class Value(SimpleASTNode): ...
class Annotation(SimpleASTNode):
@property
def values(self) -> Iterable[Value]:
return (Value(decl) for decl in self.inner())
class Param(SimpleASTNode):
@property
def name(self) -> str:
return self._decl.get('name', '')
@property
def type(self) -> str:
return self._decl['type']['qualType']
class Method(SimpleASTNode):
def __init__(self, decl: dict, access: str) -> None:
super().__init__(decl)
self._access = access
@property
def access(self) -> str:
return self._access
@property
def name(self) -> str:
return self._decl['name']
@property
def mangled_name(self) -> str:
return self._decl['mangledName']
@property
def type(self) -> str:
return self._decl['type']['qualType']
@property
def return_type(self) -> str:
return self.type.split('(', 1)[0].strip()
@property
def params(self) -> Iterable[Param]:
return (Param(decl) for decl in self.inner_filtered(kind='ParmVarDecl'))
@property
def annotations(self) -> Iterable[Annotation]:
return (Annotation(decl) for decl in self.inner_filtered(kind='AnnotateAttr'))
class Class(SimpleASTNode):
@property
def name(self) -> str:
return self._decl['name']
@property
def tagUsed(self) -> str:
return self._decl['tagUsed']
@property
def methods(self) -> Generator[Method]:
access = 'private' if self.tagUsed == 'class' else 'public'
for decl in self.inner():
if decl['kind'] == 'AccessSpecDecl':
access = decl['access']
elif decl['kind'] == 'CXXMethodDecl' and not decl.get('isImplicit', False):
yield Method(decl, access)
class Namespace(ASTNode, ABC):
def get_namespace(self, ns_name: str) -> Self:
return InnerNamespace(list(self.inner_filtered(kind='NamespaceDecl', name=ns_name)))
@property
def classes(self) -> Iterable[Class]:
return (Class(decl) for decl in self.inner_filtered(kind='CXXRecordDecl', tagUsed='class', completeDefinition=True))
class InnerNamespace(Namespace):
def __init__(self, decls: list[dict]) -> None:
self._decls = decls
def _get_decls(self) -> Iterable[dict]:
return self._decls
class Ast(Namespace):
def __init__(self, file: File) -> None:
self._file = file
self._data_dict: dict|None = None
def _get_decls(self) -> tuple[dict]:
if self._data_dict is None:
if not self._file.exists():
self._data_dict = {
'inner': []
}
elif self._file.suffix == '.bin':
with gzip.open(self._file.abspath, 'rb') as f:
self._data_dict = pickle.load(f)
elif self._file.suffix == '.gz':
with gzip.open(self._file.abspath) as f:
self._data_dict = json.load(f)
else:
with open(self._file.abspath, 'r') as f:
self._data_dict = json.load(f)
return (self._data_dict,)
def _ast_jinja(env: Environment, source: File, target: File, template: File, **kwargs):
cache_dir = env['CACHE_DIR']
rel_path = env.Dir('#').rel_path(source)
json_file = env.File(os.path.join(cache_dir, 'ast_json', f'{rel_path}.bin'))
ast_json = env.AstJson(target=json_file, source=source, **kwargs)
ast_jinja = env.Jinja(
target=target,
source=template,
JINJA_CONTEXT = {
'ast': Ast(json_file)
},
**kwargs
)
env.Depends(ast_jinja, ast_json)
# env.AlwaysBuild(ast_jinja)
# env.Requires(ast_jinja, ast_json)
# env.Requires(source, ast_jinja)
env.Ignore(ast_json, ast_jinja)
return ast_jinja

13
addons/compat_v1_0.py Normal file
View File

@@ -0,0 +1,13 @@
from spp import get_spp
spp = get_spp()
def available(**kwargs) -> bool:
return spp.globals['config']['SPP_TARGET_VERSION'][0:2] == (1, 0)
def pre_environment(**kwargs) -> None:
spp.globals['tools'].append('unity_build') # S++ 1.0.0 had the unity_build enabled by default
def post_environment(**kwargs) -> None:
spp.globals['env']['_SPP_FALLBACK_RECIPE_REPO'] = {'repo_name': 'mewin', 'remote_url': 'https://git.mewin.de/mewin/spp_recipes.git', 'git_ref': 'stable'}

35
addons/config_cache.py Normal file
View File

@@ -0,0 +1,35 @@
import json
from pathlib import Path
from spp import get_spp, TargetType
spp = get_spp()
def _should_generate() -> bool:
# check if any program or library target has been built
for target in spp.targets:
if target.target_type in (TargetType.PROGRAM, TargetType.STATIC_LIBRARY, TargetType.SHARED_LIBRARY):
return True
return False
def post_finalize(**kwargs) -> None:
if not _should_generate():
return
cache_file = Path(spp.env['CACHE_DIR']) / 'config_cache.json'
cache = {}
if cache_file.exists():
try:
with cache_file.open('r') as f:
cache = json.load(f)
except Exception as e:
spp.env.Warn(f'Error while loading config cache: {e}.')
cache['build_type'] = spp.env['BUILD_TYPE']
try:
with cache_file.open('w') as f:
json.dump(cache, f)
except Exception as e:
spp.env.Warn(f'Error while saving config cache: {e}.')

View File

@@ -1,10 +1,178 @@
# based on https://github.com/hgomersall/scons-jinja
from SCons.Script import *
import os
import pathlib
Import('env')
from spp import get_spp
if not hasattr(env, 'Jinja'):
Return('env')
try:
import jinja2
from jinja2.utils import open_if_exists
except ImportError:
jinja2 = None
print('No Jinja :(')
spp = get_spp()
def available(**kwargs) -> bool:
return jinja2 is not None
def post_environment(**kwargs) -> None:
env: Environment = spp.globals['env']
env.SetDefault(JINJA_CONTEXT={})
env.SetDefault(JINJA_ENVIRONMENT_VARS={})
env.SetDefault(JINJA_FILTERS={'load_config': _jinja_load_config})
env.SetDefault(JINJA_GLOBALS={
'file_size': lambda *args: _file_size(env, *args),
'file_content_hex': lambda *args: _file_content_hex(env, *args)
})
env.SetDefault(JINJA_TEMPLATE_SEARCHPATH=['data/jinja'])
env.SetDefault(JINJA_CONFIG_SEARCHPATH=[env.Dir('#data/config')])
env.SetDefault(JINJA_FILE_SEARCHPATH=[env.Dir('#')])
env['BUILDERS']['Jinja'] = Builder(
action=render_jinja_template
)
scanner = env.Scanner(function=jinja_scanner,
skeys=['.jinja'])
env.Append(SCANNERS=scanner)
env.AddMethod(_wrap_jinja(env.Jinja), 'Jinja')
class FileSystemLoaderRecorder(jinja2.FileSystemLoader):
""" A wrapper around FileSystemLoader that records files as they are
loaded. These are contained within loaded_filenames set attribute.
"""
def __init__(self, searchpath, encoding='utf-8'):
self.loaded_filenames = set()
super(FileSystemLoaderRecorder, self).__init__(searchpath, encoding)
def get_source(self, environment, template):
"""Overwritten FileSystemLoader.get_source method that extracts the
filename that is used to load each filename and adds it to
self.loaded_filenames.
"""
for searchpath in self.searchpath:
filename = os.path.join(searchpath, template)
f = open_if_exists(filename)
if f is None:
continue
try:
contents = f.read().decode(self.encoding)
finally:
f.close()
self.loaded_filenames.add(filename)
return super(FileSystemLoaderRecorder, self).get_source(
environment, template)
# If the template isn't found, then we have to drop out.
raise jinja2.TemplateNotFound(template)
def jinja_scanner(node, env, path):
# Instantiate the file as necessary
node.get_text_contents()
template_dir, filename = os.path.split(str(node))
template_search_path = ([template_dir] +
env.subst(env['JINJA_TEMPLATE_SEARCHPATH']))
template_loader = FileSystemLoaderRecorder(template_search_path)
jinja_env = jinja2.Environment(loader=template_loader,
extensions=['jinja2.ext.do'], **env['JINJA_ENVIRONMENT_VARS'])
jinja_env.filters.update(env['JINJA_FILTERS'])
jinja_env.globals.update(env['JINJA_GLOBALS'])
try:
template = jinja_env.get_template(filename)
except jinja2.TemplateNotFound as e:
env.Error(f'Missing template: {os.path.join(template_dir, str(e))}')
# We need to render the template to do all the necessary loading.
#
# It's necessary to respond to missing templates by grabbing
# the content as the exception is raised. This makes sure of the
# existence of the file upon which the current scanned node depends.
#
# I suspect that this is pretty inefficient, but it does
# work reliably.
context = env['JINJA_CONTEXT']
last_missing_file = ''
while True:
try:
template.render(**context)
except jinja2.TemplateNotFound as e:
if last_missing_file == str(e):
# We've already been round once for this file,
# so need to raise
env.Error(f'Missing template: {os.path.join(template_dir, str(e))}')
last_missing_file = str(e)
# Find where the template came from (using the same ordering
# as Jinja uses).
for searchpath in template_search_path:
filename = os.path.join(searchpath, last_missing_file)
if os.path.exists(filename):
continue
else:
env.File(filename).get_text_contents()
continue
break
# Get all the files that were loaded. The set includes the current node,
# so we remove that.
found_nodes_names = list(template_loader.loaded_filenames)
try:
found_nodes_names.remove(str(node))
except ValueError as e:
env.Error(f'Missing template node: {str(node)}')
return [env.File(f) for f in found_nodes_names]
def render_jinja_template(target, source, env):
output_str = ''
if not source:
source = [f'{target}.jinja']
for template_file in source:
template_dir, filename = os.path.split(str(template_file))
template_search_path = ([template_dir] +
env.subst(env['JINJA_TEMPLATE_SEARCHPATH']))
template_loader = FileSystemLoaderRecorder(template_search_path)
jinja_env = jinja2.Environment(loader=template_loader,
extensions=['jinja2.ext.do'], **env['JINJA_ENVIRONMENT_VARS'])
jinja_env.filters.update(env['JINJA_FILTERS'])
jinja_env.globals.update(env['JINJA_GLOBALS'])
jinja_env.filters.update(env['JINJA_FILTERS'])
template = jinja_env.get_template(filename)
context = env['JINJA_CONTEXT']
template.render(**context)
output_str += template.render(**context)
with open(str(target[0]), 'w') as target_file:
target_file.write(output_str)
return None
def _jinja_load_config(env, config_name):
searched_paths = []
@@ -23,27 +191,29 @@ def _wrap_jinja(orig_jinja):
def _wrapped(env, target, **kwargs):
if 'source' not in kwargs:
kwargs['source'] = f'{target}.jinja'
target = orig_jinja(**kwargs)
target = orig_jinja(target=target, **kwargs)
if 'depends' in kwargs:
for dependency in kwargs['depends']:
env.Depends(target, dependency)
# env.Depends(alias_prepare, target)
return target
return _wrapped
def _find_file(env, fname):
for path in env['JINJA_FILE_SEARCHPATH']:
fullpath = os.path.join(path.abspath, fname)
if os.path.exists(fullpath):
return env.File(fullpath)
return None
def _file_size(env, fname: str) -> int:
return env.File(fname).get_size()
file = _find_file(env, fname)
if not file:
env.Error(f'File does not exist: {fname}. Searched in: {[d.abspath for d in env["JINJA_FILE_SEARCHPATH"]]}')
return file.get_size()
def _file_content_hex(env, fname: str) -> str:
bytes = env.File(fname).get_contents()
file = _find_file(env, fname)
if not file:
env.Error(f'File does not exist: {fname}. Searched in: {[d.abspath for d in env["JINJA_FILE_SEARCHPATH"]]}')
bytes = file.get_contents()
return ','.join([hex(byte) for byte in bytes])
env.AddMethod(_wrap_jinja(env.Jinja), 'Jinja')
env.Append(JINJA_FILTERS = {'load_config': _jinja_load_config})
env.Append(JINJA_GLOBALS = {
'file_size': lambda *args: _file_size(env, *args),
'file_content_hex': lambda *args: _file_content_hex(env, *args)
})
env.Append(JINJA_TEMPLATE_SEARCHPATH = ['data/jinja'])
env['JINJA_CONFIG_SEARCHPATH'] = [env.Dir('#data/config')]
Return('env')

View File

@@ -31,10 +31,18 @@ def _generate_cmake_cxx_flags(env, dependencies: 'list[dict]') -> str:
def _get_cmake_cxx_standard(env: Environment) -> str:
return env['CXX_STANDARD'][3:] # we use "C++XX", CMake just "XX"
def _get_cmake_prefix_path(dependencies: 'list[dict]') -> str:
parts = []
for dependency in dependencies:
for path in dependency.get('CMAKE_PREFIX_PATH', []):
parts.append(path)
return cmd_quote(';'.join(parts))
def _generate_cmake_args(env: Environment, dependencies: 'list[dict]') -> 'list[str]':
args = [f'-DCMAKE_C_FLAGS={_generate_cmake_c_flags(env, dependencies)}',
f'-DCMAKE_CXX_FLAGS={_generate_cmake_cxx_flags(env, dependencies)}',
f'-DCMAKE_CXX_STANDARD={_get_cmake_cxx_standard(env)}']
f'-DCMAKE_CXX_STANDARD={_get_cmake_cxx_standard(env)}',
f'-DCMAKE_PREFIX_PATH={_get_cmake_prefix_path(dependencies)}']
for dependency in dependencies:
for name, value in dependency.get('CMAKE_VARS', {}).items():
args.append(f'-D{name}={cmd_quote(value)}')
@@ -97,6 +105,7 @@ def _cmake_project(env: Environment, project_root: str, generate_args: 'list[str
libpath.append(full_path)
return {
'build_dir': build_dir,
'install_dir': install_dir,
'BINPATH': [os.path.join(install_dir, 'bin')],
'LIBPATH': libpath,

View File

@@ -37,19 +37,22 @@ def _download_file(url: str, path: pathlib.Path) -> None:
urllib.request.urlretrieve(url, dl_path)
dl_path.rename(path)
def _extract_file(path: pathlib.Path, output_dir: str, archive_type: ArchiveType, skip_folders: int) -> None:
def _extract_file(path: pathlib.Path, output_dir: str, archive_type: ArchiveType, skip_folders: int = 0) -> None:
if archive_type == ArchiveType.TAR_GZ:
file = tarfile.open(str(path))
filter = tarfile.data_filter
if skip_folders != 0:
def skip_filer(member: tarfile.TarInfo, path: str) -> tarfile.TarInfo:
name_parts = member.name.split('/')
def skip_filter(member: tarfile.TarInfo, path: str) -> tarfile.TarInfo:
name_parts = member.name.split('/', skip_folders)
if len(name_parts) <= skip_folders:
return None
return member.replace(name = '/'.join(name_parts[skip_folders:]))
file.extraction_filter = skip_filer
file.extractall(output_dir)
filter = skip_filter
file.extractall(output_dir, filter=filter)
file.close()
elif archive_type == ArchiveType.ZIP:
if skip_folders != 0:
raise Exception('skip_folders option is not yet supported for zip-archives :()')
file = zipfile.open(str(path))
file.extractall(output_dir)
file.close()

View File

@@ -3,6 +3,8 @@ from git import Repo
from git.exc import GitError
import hashlib
import inspect
import os
import shutil
from SCons.Script import *
Import('env')
@@ -20,7 +22,41 @@ def _clone(env: Environment, repo_name: str, remote_url: str):
def _git_branch(env: Environment, repo_name: str, remote_url: str, git_ref: str = 'main') -> dict:
repo, origin = _clone(env, repo_name, remote_url)
worktree_dir = os.path.join(env['CLONE_DIR'], 'git', repo_name, hashlib.shake_128(git_ref.encode('utf-8')).hexdigest(6)) # TODO: commit hash would be better, right? -> not if it's a branch!
old_worktree_dir = os.path.join(env['CLONE_DIR'], 'git', repo_name, hashlib.shake_128(git_ref.encode('utf-8')).hexdigest(6))
worktree_dir = os.path.join(env['CLONE_DIR'], 'git', repo_name, git_ref.replace('/', '_'))
if os.path.exists(old_worktree_dir) and not os.path.islink(old_worktree_dir):
if not os.path.exists(worktree_dir):
print(f'Found old Git worktree at {old_worktree_dir}, moving it to {worktree_dir}.')
try:
repo.git.worktree('move', old_worktree_dir, worktree_dir)
except GitError:
print('Error while moving worktree, manually moving and repairing it instead.')
shutil.move(old_worktree_dir, worktree_dir)
try:
repo.git.worktree('repair', worktree_dir)
except GitError:
print('Also didn\'t work, removing and redownloading it.')
try:
repo.git.worktree('remove', '-f', worktree_dir)
except GitError: ...
try:
repo.git.worktree('remove', '-f', old_worktree_dir)
except GitError: ...
if os.path.exists(worktree_dir):
shutil.rmtree(worktree_dir, ignore_errors=True)
# this is all we can do, I guess
else:
print(f'Found old Git worktree at {old_worktree_dir}, but the new one at {worktree_dir} already exists. Removing the old one.')
repo.git.worktree('remove', '-f', old_worktree_dir)
print('Attempting to create a symlink for older S++ versions.')
try:
os.symlink(worktree_dir, old_worktree_dir, target_is_directory=True)
except Exception as e:
print(f'Failed: {e}')
update_submodules = False
if not os.path.exists(worktree_dir):
print(f'Checking out into {worktree_dir}.')
@@ -38,9 +74,17 @@ def _git_branch(env: Environment, repo_name: str, remote_url: str, git_ref: str
update_submodules = True
else:
print(f'Not updating git repository {worktree_dir} as it is not on a branch.')
else:
worktree_repo = Repo(worktree_dir)
if update_submodules:
for submodule in worktree_repo.submodules:
submodule.update(init=True)
for submodule in worktree_repo.submodules:
if os.listdir(submodule.abspath) == ['.git']:
print(f'Submodule {submodule.name} seems borked, attempting to fix it.')
worktree_repo.git.submodule('deinit', '-f', submodule.path)
worktree_repo.git.submodule('init', submodule.path)
worktree_repo.git.submodule('update', submodule.path)
return {
'checkout_root': worktree_dir,
'repo': repo,
@@ -50,7 +94,10 @@ def _git_branch(env: Environment, repo_name: str, remote_url: str, git_ref: str
def _git_tags(env: Environment, repo_name: str, remote_url: str, force_fetch: bool = False) -> 'list[str]':
repo, origin = _clone(env, repo_name, remote_url)
if force_fetch or env['UPDATE_REPOSITORIES']:
origin.fetch(tags=True)
try:
origin.fetch(tags=True)
except GitError:
env.Warn(f'Error fetching tags from {repo_name} ({remote_url})')
return [t.name for t in repo.tags]
def _make_callable(val):

53
contrib/vs/spp.targets Normal file
View File

@@ -0,0 +1,53 @@
<Project ToolsVersion="4.0" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<PropertyGroup>
<SolutionExt>.sln</SolutionExt>
<Language>C++</Language>
<DefaultLanguageSourceExtension>.cpp</DefaultLanguageSourceExtension>
</PropertyGroup>
<PropertyGroup>
<TargetFileName Condition="'$(TargetPath)' != ''">$([System.IO.Path]::GetFileName('$(TargetPath)'))</TargetFileName>
<TargetDir Condition="'$(TargetPath)' != ''">$([System.IO.Path]::GetDirectoryName('$(TargetPath)'))</TargetDir>
<OutputPath>$(TargetDir)</OutputPath>
<LocalDebuggerCommand Condition="'$(LocalDebuggerCommand)' == ''">$(TargetPath)</LocalDebuggerCommand>
<SConsCommandLine Condition="'$(SConsCommandLine)' == ''">scons</SConsCommandLine>
<SPPNumProcs Condition="'$(SPPNumProcs)' == ''">$([System.Environment]::ProcessorCount)</SPPNumProcs>
<SPPBuildType Condition="'$(SPPBuildType)' == ''">debug</SPPBuildType>
<SPPTargetType Condition="'$(SPPTargetType)' == ''">executable</SPPTargetType>
<OutDir>$(OutputPath)\</OutDir>
<IntDir>$(SolutionDir)cache\msbuild\</IntDir>
</PropertyGroup>
<Import Project="$(MSBuildToolsPath)\Microsoft.Common.targets" />
<Target Name="Build" Condition="'$(SPPTargetType)' != 'meta'">
<Exec Command="$(SConsCommandLine) -j$(SPPNumProcs) --build_type=$(SPPBuildType) --unity=disable $(TargetPath)"
WorkingDirectory="$(SolutionDir)" />
</Target>
<!--<Target Name="Build" Condition="'$(SPPTargetType)' == 'meta'">
<Message Importance="low" Text="Skipping build for meta target $(ProjectName)" />
</Target>-->
<Target Name="Clean" Condition="'$(SPPTargetType)' != 'meta'">
<Exec Command="$(SConsCommandLine) -c -j$(SPPNumProcs) --build_type=$(SPPBuildType) --unity=disable $(TargetPath)"
WorkingDirectory="$(SolutionDir)" />
</Target>
<!--<Target Name="Clean" Condition="'$(SPPTargetType)' == 'meta'">
<Message Importance="low" Text="Skipping clean for meta target $(ProjectName)" />
</Target>-->
<Target Name="Rebuild" Condition="'$(SPPTargetType)' != 'meta'" DependsOnTargets="Clean;Build" />
<!--<Target Name="Rebuild" Condition="'$(SPPTargetType)' == 'meta'">
<Message Importance="low" Text="Skipping rebuild for meta target $(ProjectName)" />
</Target>-->
<!-- This target is needed just to suppress "warning NU1503: Skipping restore for project '...'. The project file may be invalid or missing targets
required for restore." -->
<Target Name="_IsProjectRestoreSupported" Returns="@(_ValidProjectsForRestore)">
<ItemGroup>
<_ValidProjectsForRestore Include="$(MSBuildProjectFullPath)" />
</ItemGroup>
</Target>
<Import Condition="'$(_ImportMicrosoftCppDesignTime)' != 'false'" Project="$(VCTargetsPathActual)\Microsoft.Cpp.DesignTime.targets" />
</Project>

55
lib/spp.py Normal file
View File

@@ -0,0 +1,55 @@
from dataclasses import dataclass
import enum
from typing import TYPE_CHECKING
from SCons.Script import *
if TYPE_CHECKING:
class SPPEnvironment(Environment):
def Info(self, message: str): ...
def Warn(self, message: str): ...
def Error(self, message: str): ...
else:
SPPEnvironment = Environment
@dataclass
class Module:
name: str
folder: str
description: str
cxx_namespace: str
class TargetType(enum.Enum):
PROGRAM = 0
STATIC_LIBRARY = 1
SHARED_LIBRARY = 2
MISC = 3
class Target:
name: str
target_type: TargetType
builder = None
args: list = []
kwargs: dict = {}
dependencies: list = []
target = None
module: Module = None
@dataclass(frozen=True)
class SPPInterface:
globals: dict
@property
def env(self) -> SPPEnvironment:
return self.globals['env']
@property
def targets(self) -> list[Target]:
return self.env['SPP_TARGETS']
_spp: SPPInterface
def _init_interface(**kwargs) -> None:
global _spp
_spp = SPPInterface(**kwargs)
def get_spp() -> SPPInterface:
return _spp

View File

@@ -1,4 +1,6 @@
GitPython
psutil
GitPython~=3.1.45
psutil~=7.0.0
Jinja2
requests
SCons~=4.9.1
cxxheaderparser~=1.5.4

Binary file not shown.

View File

@@ -1,6 +0,0 @@
config = {
'PROJECT_NAME': 'DUMMY'
}
env = SConscript('../SConscript', exports = ['config'])

86
test/codegen/.gitignore vendored Normal file
View File

@@ -0,0 +1,86 @@
# Generated Files
*.refl.hpp
*.generated.*
private/**/*.json
# Project
/.idea/
/.vs/
/.vscode/
/vs_project_files/
*.sln
# Executables
/bin
/bin_*
# Libraries
/lib
/lib_*
# Vulkan API dumps
/api_dump*
# Compile commands
compile_commands.json
# whatever this is
.cache
# ImGui config
/imgui.ini
# Environment setup
/.env
# Build Configuration
/config.py
/config_*.py
# Prerequisites
*.d
# Compiled Object files
*.slo
*.lo
*.o
*.obj
# Precompiled Headers
*.gch
*.pch
# Compiled Dynamic libraries
*.so
*.dylib
*.dll
# Fortran module files
*.mod
*.smod
# Compiled Static libraries
*.lai
*.la
*.a
*.lib
# Executables
*.exe
*.out
*.app
# Debug Info
*.pdb
# for projects that use SCons for building: http://http://www.scons.org/
.sconsign.dblite
/.sconf_temp
/config.log
# Byte-compiled / optimized python files
__pycache__/
*.py[cod]
# Backup files
*.bak

15
test/codegen/SConstruct Normal file
View File

@@ -0,0 +1,15 @@
config = {
'PROJECT_NAME': 'S++ Codegen Test',
'SPP_TARGET_VERSION': (1, 1, 0)
}
env = SConscript('../../SConscript', exports = ['config'])
# recipe repo
env.RecipeRepo('mewin', 'https://git.mewin.de/mewin/spp_recipes.git', 'stable')
# app
env = env.Module('private/test/SModule')
env.Finalize()

View File

@@ -0,0 +1,50 @@
Import('env')
env.ModuleConfig(
name = 'Test',
description = 'Test Module',
cxx_namespace = 'tst'
)
src_files = Split("""
main.cpp
test.cpp
test.generated.cpp
""")
# env.IncludeGen(src_files,
# template=env.File('#templates/header.jinja'),
# include_filter=r'.*\.refl.hpp'
# )
# env.CodeGen('GenSource', inputs = [], template=env.File('#templates/source.jinja'), )
# env.CodeGen(
# target = 'test.generated.cpp',
# template = env.File('#templates/source.jinja'),
# inputs = {'source': 'test.cpp'}
# )
ast_json = env.AstJson(
target = env.File('test.json'),
source = 'test.hpp'
)
env.Default(ast_json)
ast_hpp = env.AstJinja(
target = env.File('test.refl.hpp'),
source = env.File('test.hpp'),
template = env.File('#templates/header.jinja')
)
prog_app = env.Program(
name = 'Test',
target = env['BIN_DIR'] + '/test',
source = src_files,
dependencies = {
}
)
env.Requires(prog_app.target, ast_hpp)
env.Default(prog_app)
Return('env')

View File

@@ -0,0 +1,8 @@
#include "./test.hpp"
int main(int, char**)
{
tst::printHelloWorld(100);
return 0;
}

View File

@@ -0,0 +1,12 @@
#include "./test.hpp"
#include <print>
namespace tst
{
void printHelloWorld(int param) noexcept
{
std::println("Hello World! Param is {}.", param);
}
}

View File

@@ -0,0 +1,33 @@
#pragma once
#include <vector>
#if __has_include("test.refl.hpp")
#include "test.refl.hpp"
#endif
namespace tst
{
static constexpr int kAnnotVal = 17;
class MyClass
{
private:
std::vector<int> mInts;
public:
MyClass();
#if defined(__clang__)
[[clang::annotate("reflect", "yes, please", kAnnotVal)]]
#endif
int getVal();
void setVal(int val);
static constexpr int kVal = 1;
};
}
namespace tst
{
void printHelloWorld(int param) noexcept;
}

View File

@@ -0,0 +1,11 @@
#if !defined(SPP_AST_GEN)
{% for class in ast.get_namespace('tst').classes %}
/*
{{ class.name }}
{% for method in class.methods %}
{{ method.return_type }} {{ method.name }} ({% for param in method.params %} {{ param.type }} {{ param.name }} {% endfor %})
{% endfor %}
{% endfor %}
*/
#endif

View File

@@ -0,0 +1,3 @@
{% for cls in source.namespace.classes %}
// {{ cls.class_decl.typename.format() }}
{% endfor %}

81
test/v1_0_0/.gitignore vendored Normal file
View File

@@ -0,0 +1,81 @@
# Project
/.idea/
/.vs/
/.vscode/
/vs_project_files/
*.sln
# Executables
/bin
/bin_*
# Libraries
/lib
/lib_*
# Vulkan API dumps
/api_dump*
# Compile commands
compile_commands.json
# whatever this is
.cache
# ImGui config
/imgui.ini
# Environment setup
/.env
# Build Configuration
/config.py
/config_*.py
# Prerequisites
*.d
# Compiled Object files
*.slo
*.lo
*.o
*.obj
# Precompiled Headers
*.gch
*.pch
# Compiled Dynamic libraries
*.so
*.dylib
*.dll
# Fortran module files
*.mod
*.smod
# Compiled Static libraries
*.lai
*.la
*.a
*.lib
# Executables
*.exe
*.out
*.app
# Debug Info
*.pdb
# for projects that use SCons for building: http://http://www.scons.org/
.sconsign.dblite
/.sconf_temp
/config.log
# Byte-compiled / optimized python files
__pycache__/
*.py[cod]
# Backup files
*.bak

10
test/v1_0_0/SConstruct Normal file
View File

@@ -0,0 +1,10 @@
config = {
'PROJECT_NAME': 'S++ 1.0.0 Test'
}
env = SConscript('../../SConscript', exports = ['config'])
env = env.Module('private/test/SModule')
env.Finalize()

View File

@@ -0,0 +1,25 @@
Import('env')
env.ModuleConfig(
name = 'Test',
description = 'Test Module',
cxx_namespace = 'tst'
)
src_files = Split("""
main.cpp
test.cpp
""")
prog_app = env.UnityProgram(
name = 'Test',
target = env['BIN_DIR'] + '/test',
source = src_files,
dependencies = {
'mijin': {}
}
)
env.Default(prog_app)
Return('env')

View File

@@ -0,0 +1,8 @@
#include "./test.hpp"
int main(int, char**)
{
tst::printHelloWorld(100);
return 0;
}

View File

@@ -0,0 +1,15 @@
#include "./test.hpp"
#include <mijin/debug/assert.hpp>
#include <print>
namespace tst
{
void printHelloWorld(int param) noexcept
{
MIJIN_ASSERT(param > 0, "param is not >0 :(");
std::println("Hello World! Param is {}.", param);
}
}

View File

@@ -0,0 +1,7 @@
#pragma once
namespace tst
{
void printHelloWorld(int param) noexcept;
}

View File

@@ -0,0 +1,133 @@
import os
import math
from SCons.Script import *
from SCons.Node.FS import File
from SCons import Action
"""
Scons Unity Build Generator
Provides several generators for SCons to combine multiple source files into a bigger
one to reduce compilation time, so called "unity builds". This is achieved by generating
unity source files which in term include the actual source files and compile them using
one of the existing SCons builders.
Usage
-----
In order to use this, just place it inside your `site_scons/site_tools` folder, enable it by
adding "unity_build" to the tools when constructing your Environment and replace invocations
of the Program/Library/SharedLibrary/StaticLibrary builders with their Unity... counterpart:
env = Environment(tools = ['default', 'unity_build'])
source_files = ...
env.UnityProgram(
target = 'my_program',
source = source_files,
...
)
The tool will generate an amount of unity source files and invoke the Program builder on these,
forwarding any other arguments you passed.
Other Options
------------
You can control the behaviour of the builder using several Environment options:
env['UNITY_CACHE_DIR'] = '.unity' # Directory where the unity sources are stored.
# can be either a string or a Dir() node.
env['UNITY_MAX_SOURCES'] = 15 # Maximum number of source files per unity file.
env['UNITY_MIN_FILES'] = env.GetOption('num_jobs')
# Minimum number of unity files to generate (if possible).
# Defaults to the number of jobs passed to SCons.
env['UNITY_DISABLE'] = False # Set to True to completely disable unity builds. The commands
# will simply pass through their options to the regular builders.
Additionally any generator can be passed a `cache_dir` to overwrite the value from the Environment.
"""
def exists(env : Environment):
return True
def generate(env : Environment):
env.AddMethod(_make_generator(env.Program), 'UnityProgram')
env.AddMethod(_make_generator(env.Library), 'UnityLibrary')
env.AddMethod(_make_generator(env.StaticLibrary), 'UnityStaticLibrary')
env.AddMethod(_make_generator(env.SharedLibrary), 'UnitySharedLibrary')
# build for generating the unity source files
unity_source_builder = env.Builder(
action = Action.Action(_generate_unity_file, _generate_unity_file_msg)
)
env.Append(BUILDERS = {'UnitySource': unity_source_builder})
env.SetDefault(UNITY_CACHE_DIR = '.unity')
env.SetDefault(UNITY_MAX_SOURCES = 15)
env.SetDefault(UNITY_MIN_FILES = env.GetOption('num_jobs'))
env.SetDefault(UNITY_DISABLE = False)
def _make_generator(base_generator):
def generator(env, source, target, cache_dir = None, *args, **kwargs):
if env['UNITY_DISABLE']:
return base_generator(target = target, source = source, *args, **kwargs)
unity_source_files = []
source_files, other_nodes = _flatten_source(source)
max_sources_per_file = max(1, math.ceil(len(source_files) / env['UNITY_MIN_FILES']))
sources_per_file = min(max_sources_per_file, env['UNITY_MAX_SOURCES'])
num_unity_files = math.ceil(len(source_files) / sources_per_file)
if not cache_dir:
cache_dir = env['UNITY_CACHE_DIR']
if not isinstance(cache_dir, str):
cache_dir = cache_dir.abspath
os.makedirs(cache_dir, exist_ok=True)
target_base_name = os.path.basename(target)
for idx in range(num_unity_files):
unity_filename = f'{cache_dir}/{target_base_name}_{idx}.cpp'
unity_source_files.append(unity_filename)
begin = sources_per_file*idx
end = sources_per_file*(idx+1)
env.UnitySource(
target = unity_filename,
source = source_files[begin:end]
)
if len(other_nodes) > 0:
print(f'Exluded {len(other_nodes)} node(s) from Unity build.')
return [base_generator(target = target, source = unity_source_files + other_nodes, *args, **kwargs)]
return generator
def _flatten_source(source : list):
source_files = []
other_nodes = []
for ele in source:
if isinstance(ele, list):
more_sources, more_other = _flatten_source(ele)
source_files.extend(more_sources)
other_nodes.extend(more_other)
elif isinstance(ele, File):
source_files.append(ele.abspath)
elif isinstance(ele, str):
source_files.append(ele)
else:
other_nodes.append(ele)
return source_files, other_nodes
def _generate_unity_file_msg(target, source, env : Environment):
assert(len(target) == 1)
return f'Generating {str(target[0])} from {len(source)} source files.'
def _generate_unity_file(target, source, env : Environment):
assert(len(target) == 1)
unity_filename = target[0].abspath
with open(unity_filename, 'w') as f:
for source_file in source:
fpath = source_file.abspath.replace("\\", "\\\\")
f.write(f'#include "{fpath}"\n')

0
util/__init__.py Normal file
View File

View File

@@ -2,6 +2,5 @@
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="" vcs="Git" />
<mapping directory="$PROJECT_DIR$/external/scons-plus-plus" vcs="Git" />
</component>
</project>
</project>

View File

@@ -80,7 +80,7 @@
"vue.rearranger.settings.migration": "true"
}
}]]></component>
<component name="RunManager" selected="Custom Build Application.{{ project.executables[0].name }} {{ project.build_types[0] }}">
<component name="RunManager" selected="Custom Build Application.{% if project.executables|length > 0 %}{{ project.executables[0].name }}{% else %}{{ project.libraries[0].name }}{% endif %} {{ project.build_types[0] }}">
{% for executable in project.executables -%}
{% for build_type in project.build_types -%}
{% set build_type_name = build_type | capitalize -%}

View File

@@ -0,0 +1,24 @@
"""
Scons++ Command Line Interface
"""
import argparse
import logging
from .ccjson import make_ccjson_parser
_STDOUT_LOG_FORMAT = '%(message)s'
def run_spp_cmd() -> int:
parser = argparse.ArgumentParser()
parser.add_argument('--verbose', '-v', action='store_true')
subparsers = parser.add_subparsers(required=True)
make_ccjson_parser(subparsers)
args = parser.parse_args()
logging.basicConfig(format=_STDOUT_LOG_FORMAT, level=logging.DEBUG if args.verbose else logging.INFO)
args.handler(args)
return 0

View File

@@ -0,0 +1,18 @@
import argparse
from .common import exec_spp, get_config_cache, require_project_file
def _cmd(args: argparse.Namespace) -> None:
require_project_file()
build_type = args.build_type
if build_type == 'auto':
cache = get_config_cache()
build_type = cache.get('build_type', 'debug')
exec_spp((f'--build_type={build_type}', '--unity=disable', 'compile_commands.json'))
def make_ccjson_parser(subparsers) -> None:
parser : argparse.ArgumentParser = subparsers.add_parser('ccjson', help='Generate compile_commands.json')
parser.set_defaults(handler=_cmd)
parser.add_argument('--build_type', choices=('auto', 'debug', 'release_debug', 'release', 'profile'), default='auto')

View File

@@ -0,0 +1,51 @@
import json
import logging
from pathlib import Path
import shlex
import subprocess
import sys
from typing import Sequence
_project_root = Path('.').absolute()
def get_project_root() -> Path:
return _project_root
def set_project_root(path: Path) -> None:
global _project_root
_project_root = path
def get_config_cache() -> dict:
cache_file = get_project_root() / 'cache' / 'config_cache.json'
if not cache_file.exists():
return {}
try:
with cache_file.open('r') as f:
cache = json.load(f)
if not isinstance(cache, dict):
logging.warning('Config cache is not a dictionary, ignoring it.')
return {}
return cache
except Exception as e:
logging.error(f'Error while reading config cache: {e}.')
return {}
def require_project_file() -> None:
if not (get_project_root() / 'SConstruct').exists():
logging.error('This command has to be run inside an existing S++ project folder. Exiting.')
sys.exit(1)
def exec_checked(args: Sequence[str], **kwargs) -> None:
logging.debug('exec_checked: "%s"', shlex.join(args))
subprocess.run(args, stdout=sys.stdout, stderr=sys.stderr, check=True, **kwargs)
def exec_get_output(args: Sequence[str], **kwargs) -> str:
logging.debug('exec_get_output: "%s"', shlex.join(args))
return subprocess.run(args, text=True, check=True, capture_output=True, **kwargs).stdout
def exec_spp(args: Sequence[str], **kwargs):
full_cmd = ('scons', '-s', '--disable_auto_update', *args)
exec_checked(full_cmd, **kwargs)

6
util/run_scons.py Normal file
View File

@@ -0,0 +1,6 @@
# use this to start SCons from the IDE for debugging
import sys
from SCons.Script.Main import main
if __name__ == '__main__':
sys.exit(main())

10
util/spp_cmd.py Executable file
View File

@@ -0,0 +1,10 @@
#!/usr/bin/env python3
import os
import sys
if __name__ == '__main__':
sys.path.append(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'python_module'))
from sppcmd import run_spp_cmd
sys.exit(run_spp_cmd())

View File

@@ -0,0 +1,48 @@
Microsoft Visual Studio Solution File, Format Version 12.00
# Visual Studio Version 17
VisualStudioVersion = 17.10.35122.118
MinimumVisualStudioVersion = 10.0.40219.1
{%- for executable in project.executables %}
Project("{{ generate_uuid(project.name, True) }}") = "{{ executable.name }}", "vs_project_files\{{ executable.name }}.vcxproj", ""{{ generate_uuid('target_' + executable.name, True) }}""
{%- endfor %}
{%- for library in project.libraries %}
Project("{{ generate_uuid(project.name, True) }}") = "{{ library.name }}", "vs_project_files\{{ library.name }}.vcxproj", ""{{ generate_uuid('target_' + library.name, True) }}""
{%- endfor %}
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution Items", "{{ generate_uuid('solution_items', True) }}"
ProjectSection(SolutionItems) = preProject
SConstruct = SConstruct
EndProjectSection
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
{%- for build_type in project.build_types %}
{%- set build_type_name = build_type | capitalize %}
{{ build_type_name }}|x64 = {{ build_type_name }}|x64
{%- endfor %}
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{%- for executable in project.executables %}
{%- for build_type in project.build_types %}
{%- set build_type_name = build_type | capitalize %}
{{ generate_uuid('target_' + executable.name, True) }}.{{ build_type_name }}|x64.ActiveCfg = {{ build_type_name }}|x64
{{ generate_uuid('target_' + executable.name, True) }}.{{ build_type_name }}|x64.Build.0 = {{ build_type_name }}|x64
{%- endfor %}
{%- endfor %}
{%- for library in project.libraries %}
{%- for build_type in project.build_types %}
{%- set build_type_name = build_type | capitalize %}
{{ generate_uuid('target_' + library.name, True) }}.{{ build_type_name }}|x64.ActiveCfg = {{ build_type_name }}|x64
{{ generate_uuid('target_' + library.name, True) }}.{{ build_type_name }}|x64.Build.0 = {{ build_type_name }}|x64
{%- endfor %}
{%- endfor %}
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
SolutionGuid = {{ generate_uuid("solution", True) }}
EndGlobalSection
EndGlobal

View File

@@ -0,0 +1,15 @@
{
"files": {
"solution.sln.jinja": {
"rename_to": "{{ project.name }}.sln"
},
"vs_project_files/target.vcxproj.jinja": {
"one_per": "target",
"rename_to": "vs_project_files/{{ target.name }}.vcxproj"
},
"vs_project_files/target.vcxproj.filters.jinja": {
"one_per": "target",
"rename_to": "vs_project_files/{{ target.name }}.vcxproj.filters"
}
}
}

View File

@@ -0,0 +1,73 @@
{%- set source_files = get_sources(target) -%}
{%- set private_headers = get_headers('private\\' + target.module.folder) -%}
{%- set public_headers = get_headers('public\\' + target.module.folder) -%}
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="17.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<ItemGroup>
<Filter Include="Source Files">
<UniqueIdentifier>{{ generate_uuid('filter_sources_' + target.name, True) }}</UniqueIdentifier>
</Filter>
{%- for folder in source_files | folder_list(2) | sort %}
<Filter Include="Source Files\{{ folder }}">
<UniqueIdentifier>{{ generate_uuid('filter_sources_' + target.name + '_' + folder, True) }}</UniqueIdentifier>
</Filter>
{%- endfor %}
{%- if public_headers | length > 0 %}
<Filter Include="Public Header Files">
<UniqueIdentifier>{{ generate_uuid('filter_public_headers_' + target.name, True) }}</UniqueIdentifier>
</Filter>
{%- for folder in public_headers | folder_list(2) | sort %}
<Filter Include="Public Header Files\{{ folder }}">
<UniqueIdentifier>{{ generate_uuid('filter_public_headers_' + target.name + '_' + folder, True) }}</UniqueIdentifier>
</Filter>
{%- endfor %}
{%- endif %}
{%- if private_headers | length > 0 %}
<Filter Include="Private Header Files">
<UniqueIdentifier>{{ generate_uuid('filter_private_headers_' + target.name, True) }}</UniqueIdentifier>
</Filter>
{%- for folder in private_headers | folder_list(2) | sort %}
<Filter Include="Private Header Files\{{ folder }}">
<UniqueIdentifier>{{ generate_uuid('filter_private_headers_' + target.name + '_' + folder, True) }}</UniqueIdentifier>
</Filter>
{%- endfor %}
{%- endif %}
</ItemGroup>
<ItemGroup>
{%- for source_file in source_files %}
<ClCompile Include="$(SolutionDir){{ source_file }}">
{%- set path = source_file | strip_path_prefix(2) | dirname -%}
{%- if path %}
<Filter>Source Files\{{ path }}</Filter>
{%- else %}
<Filter>Source Files</Filter>
{%- endif %}
</ClCompile>
{%- endfor %}
</ItemGroup>
<ItemGroup>
{%- for header_file in public_headers %}
<ClInclude Include="$(SolutionDir){{ header_file }}">
{%- set path = header_file | strip_path_prefix(2) | dirname -%}
{%- if path %}
<Filter>Public Header Files\{{ path }}</Filter>
{%- else %}
<Filter>Public Header Files</Filter>
{%- endif %}
</ClInclude>
{%- endfor %}
{%- for header_file in private_headers %}
<ClInclude Include="$(SolutionDir){{ header_file }}">
{%- set path = header_file | strip_path_prefix(2) | dirname -%}
{%- if path %}
<Filter>Private Header Files\{{ path }}</Filter>
{%- else %}
<Filter>Private Header Files</Filter>
{%- endif %}
</ClInclude>
{%- endfor %}
</ItemGroup>
<ItemGroup>
<Content Include="$(SolutionDir)private\{{ target.module.folder }}\SModule" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,67 @@
{%- set ms_cxx_standard = {
'c++14': 'c++14',
'c++17': 'c++17',
'c++20': 'c++20',
'c++23': 'c++latest',
'c++26': 'c++latest'}[project.cxx_standard] | default('c++14')
-%}
<?xml version="1.0" encoding="utf-8"?>
<Project DefaultTargets="Build" ToolsVersion="17.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<ItemGroup Label="ProjectConfigurations">
{%- for build_type in project.build_types %}
{% set build_type_name = build_type | capitalize -%}
<ProjectConfiguration Include="{{ build_type_name }}|x64">
<Configuration>{{ build_type_name }}</Configuration>
<Platform>x64</Platform>
</ProjectConfiguration>
{%- endfor %}
</ItemGroup>
<PropertyGroup Label="Globals">
<Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
<ProjectGuid>{{ generate_uuid('target_' + target.name, True) }}</ProjectGuid>
<ProjectName>{{ target.name }}</ProjectName>
<SConsCommandLine>{{ scons_exe }}</SConsCommandLine>
</PropertyGroup>
{%- for build_type in project.build_types %}
{% set build_type_name = build_type | capitalize -%}
<PropertyGroup Condition="'$(Configuration)'=='{{ build_type_name }}'">
<TargetPath>$(SolutionDir){{ target.filename(build_type) }}</TargetPath>
<SPPBuildType>{{ build_type }}</SPPBuildType>
<SPPTargetType>{{ target.type }}</SPPTargetType>
</PropertyGroup>
{%- endfor %}
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
<PropertyGroup Label="Configuration">
<ConfigurationType>Makefile</ConfigurationType>
</PropertyGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
<ItemGroup>
{%- for source_file in get_sources(target) %}
<ClCompile Include="$(SolutionDir){{ source_file }}" />
{%- endfor %}
</ItemGroup>
<ItemGroup>
{%- for header_file in get_headers('private\\' + target.module.folder) %}
<ClInclude Include="$(SolutionDir){{ header_file }}" />
{%- endfor %}
{%- for header_file in get_headers('public\\' + target.module.folder) %}
<ClInclude Include="$(SolutionDir){{ header_file }}" />
{%- endfor %}
</ItemGroup>
<ItemGroup>
<Content Include="$(SolutionDir)private\{{ target.module.folder }}\SModule" />
</ItemGroup>
{%- for build_type in project.build_types %}
{% set build_type_name = build_type | capitalize -%}
<ItemDefinitionGroup Condition="'$(Configuration)'=='{{ build_type_name }}'">
<ClCompile>
<PreprocessorDefinitions>{{ get_target_property(build_type, target.name, 'CPPDEFINES') | join(';') }};%(PreprocessorDefinitions);</PreprocessorDefinitions>
<GenerateDebugInformation>{{ build_type != 'release' and 'true' or 'false' }}</GenerateDebugInformation>
<AdditionalIncludeDirectories>{{ get_target_property(build_type, target.name, 'CPPPATH') | join(';') }};%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
<MsExtensions>false</MsExtensions>
<AdditionalOptions>{{ get_target_property(build_type, target.name, 'CCFLAGS') | join(' ') }}</AdditionalOptions> {# + get_target_property(build_type, target.name, 'CXXFLAGS')) #}
</ClCompile>
</ItemDefinitionGroup>
{%- endfor %}
<Import Project="$(SolutionDir)external\scons-plus-plus\contrib\vs\spp.targets" />
</Project>

View File

@@ -1,8 +1,8 @@
{
"configurations": [
{% for executable in project.executables %}
{% for build_type in project.build_types %}
{% set build_type_name = build_type | capitalize -%}
{%- for executable in project.executables -%}
{%- for build_type in project.build_types -%}
{%- set build_type_name = build_type | capitalize %}
{
"name": "{{ executable.name }} ({{ build_type | capitalize }})",
"type": "cppvsdbg",
@@ -12,9 +12,10 @@
"stopAtEntry": false,
"cwd": "${workspaceFolder}",
"environment": [],
"console": "integratedTerminal"
}
{% endfor %}
{% endfor %}
"console": "integratedTerminal",
"preLaunchTask": "{{ executable.name }} {{ build_type_name }}"
},
{%- endfor %}
{%- endfor %}
]
}