Compare commits
29 Commits
2769fd801f
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7fa3855498 | ||
| b9335a6247 | |||
| 79366c9098 | |||
| 5c17999cdf | |||
| 7b2e5c7432 | |||
|
|
07c2496342 | ||
| e19f6115be | |||
|
|
7fc8518db4 | ||
|
|
8b5d66dbec | ||
| 75c626c235 | |||
| 202331ba60 | |||
| 9b82fb87c0 | |||
| 45b4d164d0 | |||
| 43503dfec6 | |||
| 7916566d47 | |||
| b47ceb81dc | |||
|
|
6326454729 | ||
|
|
18293fdcf7 | ||
|
|
8371f96d4a | ||
|
|
af53bf6084 | ||
| 4bae8d67a0 | |||
| 0ac1621494 | |||
| 8770bd97dc | |||
| 68f20bcf2d | |||
| e583c5ef6c | |||
| c3b5244eac | |||
| 88844ee5da | |||
| 161f2e52d8 | |||
| 9436d2c48d |
220
.gitignore
vendored
220
.gitignore
vendored
@@ -1 +1,219 @@
|
|||||||
__pycache__
|
# Project files
|
||||||
|
/.idea/
|
||||||
|
|
||||||
|
# Byte-compiled / optimized / DLL files
|
||||||
|
__pycache__/
|
||||||
|
*.py[codz]
|
||||||
|
*$py.class
|
||||||
|
|
||||||
|
# C extensions
|
||||||
|
*.so
|
||||||
|
|
||||||
|
# Distribution / packaging
|
||||||
|
.Python
|
||||||
|
build/
|
||||||
|
develop-eggs/
|
||||||
|
dist/
|
||||||
|
downloads/
|
||||||
|
eggs/
|
||||||
|
.eggs/
|
||||||
|
# lib/
|
||||||
|
lib64/
|
||||||
|
parts/
|
||||||
|
sdist/
|
||||||
|
var/
|
||||||
|
wheels/
|
||||||
|
share/python-wheels/
|
||||||
|
*.egg-info/
|
||||||
|
.installed.cfg
|
||||||
|
*.egg
|
||||||
|
MANIFEST
|
||||||
|
|
||||||
|
# PyInstaller
|
||||||
|
# Usually these files are written by a python script from a template
|
||||||
|
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||||
|
*.manifest
|
||||||
|
*.spec
|
||||||
|
|
||||||
|
# Installer logs
|
||||||
|
pip-log.txt
|
||||||
|
pip-delete-this-directory.txt
|
||||||
|
|
||||||
|
# Unit test / coverage reports
|
||||||
|
htmlcov/
|
||||||
|
.tox/
|
||||||
|
.nox/
|
||||||
|
.coverage
|
||||||
|
.coverage.*
|
||||||
|
.cache
|
||||||
|
nosetests.xml
|
||||||
|
coverage.xml
|
||||||
|
*.cover
|
||||||
|
*.py.cover
|
||||||
|
.hypothesis/
|
||||||
|
.pytest_cache/
|
||||||
|
cover/
|
||||||
|
|
||||||
|
# Translations
|
||||||
|
*.mo
|
||||||
|
*.pot
|
||||||
|
|
||||||
|
# Django stuff:
|
||||||
|
*.log
|
||||||
|
local_settings.py
|
||||||
|
db.sqlite3
|
||||||
|
db.sqlite3-journal
|
||||||
|
|
||||||
|
# Flask stuff:
|
||||||
|
instance/
|
||||||
|
.webassets-cache
|
||||||
|
|
||||||
|
# Scrapy stuff:
|
||||||
|
.scrapy
|
||||||
|
|
||||||
|
# Sphinx documentation
|
||||||
|
docs/_build/
|
||||||
|
|
||||||
|
# PyBuilder
|
||||||
|
.pybuilder/
|
||||||
|
target/
|
||||||
|
|
||||||
|
# Jupyter Notebook
|
||||||
|
.ipynb_checkpoints
|
||||||
|
|
||||||
|
# IPython
|
||||||
|
profile_default/
|
||||||
|
ipython_config.py
|
||||||
|
|
||||||
|
# pyenv
|
||||||
|
# For a library or package, you might want to ignore these files since the code is
|
||||||
|
# intended to run in multiple environments; otherwise, check them in:
|
||||||
|
# .python-version
|
||||||
|
|
||||||
|
# pipenv
|
||||||
|
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||||
|
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||||
|
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||||
|
# install all needed dependencies.
|
||||||
|
# Pipfile.lock
|
||||||
|
|
||||||
|
# UV
|
||||||
|
# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
|
||||||
|
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||||
|
# commonly ignored for libraries.
|
||||||
|
# uv.lock
|
||||||
|
|
||||||
|
# poetry
|
||||||
|
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||||
|
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||||
|
# commonly ignored for libraries.
|
||||||
|
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||||
|
# poetry.lock
|
||||||
|
# poetry.toml
|
||||||
|
|
||||||
|
# pdm
|
||||||
|
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
||||||
|
# pdm recommends including project-wide configuration in pdm.toml, but excluding .pdm-python.
|
||||||
|
# https://pdm-project.org/en/latest/usage/project/#working-with-version-control
|
||||||
|
# pdm.lock
|
||||||
|
# pdm.toml
|
||||||
|
.pdm-python
|
||||||
|
.pdm-build/
|
||||||
|
|
||||||
|
# pixi
|
||||||
|
# Similar to Pipfile.lock, it is generally recommended to include pixi.lock in version control.
|
||||||
|
# pixi.lock
|
||||||
|
# Pixi creates a virtual environment in the .pixi directory, just like venv module creates one
|
||||||
|
# in the .venv directory. It is recommended not to include this directory in version control.
|
||||||
|
.pixi
|
||||||
|
|
||||||
|
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
||||||
|
__pypackages__/
|
||||||
|
|
||||||
|
# Celery stuff
|
||||||
|
celerybeat-schedule
|
||||||
|
celerybeat.pid
|
||||||
|
|
||||||
|
# Redis
|
||||||
|
*.rdb
|
||||||
|
*.aof
|
||||||
|
*.pid
|
||||||
|
|
||||||
|
# RabbitMQ
|
||||||
|
mnesia/
|
||||||
|
rabbitmq/
|
||||||
|
rabbitmq-data/
|
||||||
|
|
||||||
|
# ActiveMQ
|
||||||
|
activemq-data/
|
||||||
|
|
||||||
|
# SageMath parsed files
|
||||||
|
*.sage.py
|
||||||
|
|
||||||
|
# Environments
|
||||||
|
.env
|
||||||
|
.envrc
|
||||||
|
.venv
|
||||||
|
env/
|
||||||
|
venv/
|
||||||
|
ENV/
|
||||||
|
env.bak/
|
||||||
|
venv.bak/
|
||||||
|
|
||||||
|
# Spyder project settings
|
||||||
|
.spyderproject
|
||||||
|
.spyproject
|
||||||
|
|
||||||
|
# Rope project settings
|
||||||
|
.ropeproject
|
||||||
|
|
||||||
|
# mkdocs documentation
|
||||||
|
/site
|
||||||
|
|
||||||
|
# mypy
|
||||||
|
.mypy_cache/
|
||||||
|
.dmypy.json
|
||||||
|
dmypy.json
|
||||||
|
|
||||||
|
# Pyre type checker
|
||||||
|
.pyre/
|
||||||
|
|
||||||
|
# pytype static type analyzer
|
||||||
|
.pytype/
|
||||||
|
|
||||||
|
# Cython debug symbols
|
||||||
|
cython_debug/
|
||||||
|
|
||||||
|
# PyCharm
|
||||||
|
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||||
|
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||||
|
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||||
|
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||||
|
# .idea/
|
||||||
|
|
||||||
|
# Abstra
|
||||||
|
# Abstra is an AI-powered process automation framework.
|
||||||
|
# Ignore directories containing user credentials, local state, and settings.
|
||||||
|
# Learn more at https://abstra.io/docs
|
||||||
|
.abstra/
|
||||||
|
|
||||||
|
# Visual Studio Code
|
||||||
|
# Visual Studio Code specific template is maintained in a separate VisualStudioCode.gitignore
|
||||||
|
# that can be found at https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore
|
||||||
|
# and can be added to the global gitignore or merged into this file. However, if you prefer,
|
||||||
|
# you could uncomment the following to ignore the entire vscode folder
|
||||||
|
# .vscode/
|
||||||
|
|
||||||
|
# Ruff stuff:
|
||||||
|
.ruff_cache/
|
||||||
|
|
||||||
|
# PyPI configuration file
|
||||||
|
.pypirc
|
||||||
|
|
||||||
|
# Marimo
|
||||||
|
marimo/_static/
|
||||||
|
marimo/_lsp/
|
||||||
|
__marimo__/
|
||||||
|
|
||||||
|
# Streamlit
|
||||||
|
.streamlit/secrets.toml
|
||||||
|
|||||||
643
SConscript
643
SConscript
@@ -1,23 +1,50 @@
|
|||||||
|
|
||||||
|
from collections.abc import Callable
|
||||||
import copy
|
import copy
|
||||||
|
from dataclasses import dataclass
|
||||||
import enum
|
import enum
|
||||||
import glob
|
import glob
|
||||||
|
import importlib.util
|
||||||
import inspect
|
import inspect
|
||||||
import json
|
import json
|
||||||
import multiprocessing
|
import multiprocessing
|
||||||
import os
|
import os
|
||||||
import pathlib
|
import pathlib
|
||||||
|
import platform
|
||||||
import psutil
|
import psutil
|
||||||
import shutil
|
import shutil
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
|
from typing import Any
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
|
from SCons.Node import Node
|
||||||
|
from SCons.Script import *
|
||||||
|
|
||||||
class TargetType(enum.Enum):
|
sys.path.append(os.path.join(Dir('.').abspath, 'lib'))
|
||||||
PROGRAM = 0
|
|
||||||
STATIC_LIBRARY = 1
|
from spp import _init_interface, Module, Target, TargetType
|
||||||
SHARED_LIBRARY = 2
|
|
||||||
|
_init_interface(globals=globals())
|
||||||
|
|
||||||
|
_SPP_VERSION = (1, 1, 0)
|
||||||
|
_DEBUG = {
|
||||||
|
'addons': False
|
||||||
|
}
|
||||||
|
|
||||||
|
_GCC_CPU_FEATURES_MAP = {
|
||||||
|
'mmx': '-mmmx',
|
||||||
|
'sse': '-msse',
|
||||||
|
'sse2': '-msse2',
|
||||||
|
'sse3': '-msse3',
|
||||||
|
'ssse3': '-mssse3',
|
||||||
|
'sse4': '-msse4',
|
||||||
|
'sse4a': '-msse4a',
|
||||||
|
'sse4.1': '-msse4.1',
|
||||||
|
'sse4.2': '-msse4.2',
|
||||||
|
'avx': '-mavx',
|
||||||
|
'avx2': '-mavx2'
|
||||||
|
}
|
||||||
|
|
||||||
class _VersionSpec:
|
class _VersionSpec:
|
||||||
minimum_version = None
|
minimum_version = None
|
||||||
@@ -40,23 +67,20 @@ class _Dependency:
|
|||||||
depdeps: list = []
|
depdeps: list = []
|
||||||
cook_result: dict = {}
|
cook_result: dict = {}
|
||||||
|
|
||||||
class _Target:
|
|
||||||
name: str
|
|
||||||
target_type: TargetType
|
|
||||||
builder = None
|
|
||||||
args: list = []
|
|
||||||
kwargs: dict = {}
|
|
||||||
dependencies: list = []
|
|
||||||
target = None
|
|
||||||
|
|
||||||
def _find_recipe(env: Environment, recipe_name: str):
|
def _find_recipe(env: Environment, recipe_name: str):
|
||||||
if recipe_name in env['SPP_RECIPES']:
|
if recipe_name in env['SPP_RECIPES']:
|
||||||
return env['SPP_RECIPES'][recipe_name]
|
return env['SPP_RECIPES'][recipe_name]
|
||||||
import importlib.util
|
|
||||||
source_file = None
|
source_file = None
|
||||||
|
|
||||||
if not env['SPP_RECIPES_FOLDERS']:
|
if not env['SPP_RECIPES_FOLDERS']:
|
||||||
env.Error('No recipes repositories set. Add one using env.RecipeRepo(<name>, <url>, <branch>).')
|
# for compatibility
|
||||||
|
if '_SPP_FALLBACK_RECIPE_REPO' in env:
|
||||||
|
repo_args: dict = env['_SPP_FALLBACK_RECIPE_REPO']
|
||||||
|
env.Warn('No recipes repositories set. Add one using env.RecipeRepo(<name>, <url>, <branch>).')
|
||||||
|
env.Warn(f'Falling back to default recipe repository ({repo_args["repo_name"]} at {repo_args["remote_url"]} ref={repo_args.get("git_ref", "master")}).')
|
||||||
|
env.RecipeRepo(**repo_args)
|
||||||
|
else:
|
||||||
|
env.Error('No recipes repositories set. Add one using env.RecipeRepo(<name>, <url>, <branch>).')
|
||||||
for folder in env['SPP_RECIPES_FOLDERS']:
|
for folder in env['SPP_RECIPES_FOLDERS']:
|
||||||
from SCons import Node
|
from SCons import Node
|
||||||
if folder is Node:
|
if folder is Node:
|
||||||
@@ -89,9 +113,36 @@ def _cook(env: Environment, recipe_name: str):
|
|||||||
_run_cook(dependency)
|
_run_cook(dependency)
|
||||||
return dependency.cook_result
|
return dependency.cook_result
|
||||||
|
|
||||||
|
def _normalize_module_path(env: Environment, path: str) -> str|None:
|
||||||
|
module_root = env.Dir('#').abspath
|
||||||
|
try:
|
||||||
|
relative = os.path.relpath(path, module_root)
|
||||||
|
if relative[:2] == '..':
|
||||||
|
return None
|
||||||
|
return os.path.join(*os.path.split(relative)[1:])
|
||||||
|
except ValueError: # may be thrown on Windows if the module is on a different drive than the project
|
||||||
|
return None
|
||||||
|
|
||||||
def _module(env: Environment, file: str):
|
def _module(env: Environment, file: str):
|
||||||
|
folder = _normalize_module_path(env, env.File(file).dir.abspath)
|
||||||
|
if folder is not None: # only include modules inside the source tree
|
||||||
|
dirname = os.path.basename(folder)
|
||||||
|
env.Append(SPP_MODULES = {folder: Module(
|
||||||
|
name=dirname,
|
||||||
|
folder=folder,
|
||||||
|
description='',
|
||||||
|
cxx_namespace=dirname
|
||||||
|
)})
|
||||||
return SConscript(file, exports = 'env', variant_dir = env['VARIANT_DIR'], src_dir = '.')
|
return SConscript(file, exports = 'env', variant_dir = env['VARIANT_DIR'], src_dir = '.')
|
||||||
|
|
||||||
|
def _module_config(env: Environment, **kwargs) -> None:
|
||||||
|
module_folder = _normalize_module_path(env, env.Dir('.').abspath)
|
||||||
|
module = env['SPP_MODULES'].get(module_folder)
|
||||||
|
if module is None:
|
||||||
|
env.Warn(f'No module config found for module at {module_folder}')
|
||||||
|
return
|
||||||
|
module.__dict__.update(kwargs)
|
||||||
|
|
||||||
def _parse_lib_conf(env: Environment, lib_conf: dict) -> None:
|
def _parse_lib_conf(env: Environment, lib_conf: dict) -> None:
|
||||||
env.Append(CPPPATH = lib_conf.get('CPPPATH', []),
|
env.Append(CPPPATH = lib_conf.get('CPPPATH', []),
|
||||||
CPPDEFINES = lib_conf.get('CPPDEFINES', []),
|
CPPDEFINES = lib_conf.get('CPPDEFINES', []),
|
||||||
@@ -128,7 +179,7 @@ def _inject_dependency(dependency, kwargs: dict, add_sources: bool = True) -> No
|
|||||||
_inject_list(kwargs, dependency.cook_result, 'LINKFLAGS')
|
_inject_list(kwargs, dependency.cook_result, 'LINKFLAGS')
|
||||||
for depdep in dependency.depdeps:
|
for depdep in dependency.depdeps:
|
||||||
_inject_dependency(depdep, kwargs)
|
_inject_dependency(depdep, kwargs)
|
||||||
elif isinstance(dependency, _Target):
|
elif isinstance(dependency, Target):
|
||||||
_inject_list(kwargs, dependency.kwargs, 'CPPPATH')
|
_inject_list(kwargs, dependency.kwargs, 'CPPPATH')
|
||||||
_inject_list(kwargs, dependency.kwargs, 'CPPDEFINES')
|
_inject_list(kwargs, dependency.kwargs, 'CPPDEFINES')
|
||||||
_inject_list(kwargs, dependency.kwargs, 'LIBPATH')
|
_inject_list(kwargs, dependency.kwargs, 'LIBPATH')
|
||||||
@@ -220,11 +271,7 @@ def _lib_filename(env: Environment, name: str, type: str = 'static') -> str:
|
|||||||
}[type]
|
}[type]
|
||||||
return f'lib{name}.{ext}'
|
return f'lib{name}.{ext}'
|
||||||
elif os.name == 'nt':
|
elif os.name == 'nt':
|
||||||
ext = {
|
return f'{name}.lib'
|
||||||
'static': 'lib',
|
|
||||||
'shared': 'dll'
|
|
||||||
}[type]
|
|
||||||
return f'{name}.{ext}'
|
|
||||||
else:
|
else:
|
||||||
raise Exception('What OS is this?')
|
raise Exception('What OS is this?')
|
||||||
|
|
||||||
@@ -244,8 +291,19 @@ def _find_lib(env: Environment, name: str, paths: 'list[str]', type : str = 'sta
|
|||||||
return None
|
return None
|
||||||
raise Exception(f'Could not find library with name {name} in paths: "{", ".join(paths)}" filename: "{fname}".')
|
raise Exception(f'Could not find library with name {name} in paths: "{", ".join(paths)}" filename: "{fname}".')
|
||||||
|
|
||||||
def _error(env: Environment, message: str):
|
def _debug(cond: str, msg: str) -> None:
|
||||||
print(message, file=sys.stderr)
|
if _DEBUG.get(cond):
|
||||||
|
print(f'[DEBUG] [{cond}] {msg}')
|
||||||
|
|
||||||
|
def _info(env: Environment|None, message: str) -> None:
|
||||||
|
if not GetOption('silent'):
|
||||||
|
print(f'[INFO] {message}')
|
||||||
|
|
||||||
|
def _warn(env: Environment|None, message: str) -> None:
|
||||||
|
print(f'[WARN] {message}', file=sys.stderr)
|
||||||
|
|
||||||
|
def _error(env: Environment|None, message: str) -> None:
|
||||||
|
print(f'[ERROR] {message}', file=sys.stderr)
|
||||||
Exit(1)
|
Exit(1)
|
||||||
|
|
||||||
def _try_merge_dicts(dictA: dict, dictB: dict) -> 'dict|None':
|
def _try_merge_dicts(dictA: dict, dictB: dict) -> 'dict|None':
|
||||||
@@ -264,8 +322,7 @@ def _try_merge_dicts(dictA: dict, dictB: dict) -> 'dict|None':
|
|||||||
result[key] = mergedValue
|
result[key] = mergedValue
|
||||||
elif valueA != valueB:
|
elif valueA != valueB:
|
||||||
return None
|
return None
|
||||||
else:
|
result[key] = valueA
|
||||||
result[key] = valueA
|
|
||||||
for key, valueB in dictB.items():
|
for key, valueB in dictB.items():
|
||||||
if key not in result:
|
if key not in result:
|
||||||
result[key] = valueB
|
result[key] = valueB
|
||||||
@@ -394,19 +451,22 @@ def _wrap_builder(builder, target_type: TargetType):
|
|||||||
kwargs['LIBPATH'] = copy.copy(env['LIBPATH'])
|
kwargs['LIBPATH'] = copy.copy(env['LIBPATH'])
|
||||||
if 'LIBS' not in kwargs and 'LIBS' in env:
|
if 'LIBS' not in kwargs and 'LIBS' in env:
|
||||||
kwargs['LIBS'] = copy.copy(env['LIBS'])
|
kwargs['LIBS'] = copy.copy(env['LIBS'])
|
||||||
if 'source' in kwargs:
|
|
||||||
source = kwargs['source']
|
|
||||||
if not isinstance(source, list):
|
|
||||||
source = [source]
|
|
||||||
new_source = []
|
|
||||||
for src in source:
|
|
||||||
if isinstance(src, str):
|
|
||||||
new_source.append(env.Entry(src))
|
|
||||||
else:
|
|
||||||
new_source.append(src)
|
|
||||||
kwargs['source'] = new_source
|
|
||||||
|
|
||||||
target = _Target()
|
def _fix_filearg(filearg: str|Entry|list[str|Entry]) -> list[Entry]:
|
||||||
|
if not isinstance(filearg, list):
|
||||||
|
filearg = (filearg,)
|
||||||
|
result = []
|
||||||
|
for ele in filearg:
|
||||||
|
if isinstance(ele, str):
|
||||||
|
result.append(env.Entry(ele))
|
||||||
|
else:
|
||||||
|
result.append(ele)
|
||||||
|
return result
|
||||||
|
|
||||||
|
if 'source' in kwargs:
|
||||||
|
kwargs['source'] = _fix_filearg(kwargs['source'])
|
||||||
|
|
||||||
|
target = Target()
|
||||||
if 'name' in kwargs:
|
if 'name' in kwargs:
|
||||||
target.name = kwargs['name']
|
target.name = kwargs['name']
|
||||||
else:
|
else:
|
||||||
@@ -420,6 +480,13 @@ def _wrap_builder(builder, target_type: TargetType):
|
|||||||
target.args = args
|
target.args = args
|
||||||
target.kwargs = kwargs
|
target.kwargs = kwargs
|
||||||
target.dependencies = target_dependencies
|
target.dependencies = target_dependencies
|
||||||
|
module_folder = _normalize_module_path(env, env.Dir('.').abspath)
|
||||||
|
if module_folder is not None:
|
||||||
|
module = env['SPP_MODULES'].get(module_folder)
|
||||||
|
if module is None:
|
||||||
|
env.Warn(f'No module config found for target {target.name} at {module_folder}')
|
||||||
|
else:
|
||||||
|
target.module = module
|
||||||
env.Append(SPP_TARGETS = [target])
|
env.Append(SPP_TARGETS = [target])
|
||||||
if not target.dependencies:
|
if not target.dependencies:
|
||||||
_build_target(target)
|
_build_target(target)
|
||||||
@@ -428,7 +495,7 @@ def _wrap_builder(builder, target_type: TargetType):
|
|||||||
|
|
||||||
def _wrap_default(default):
|
def _wrap_default(default):
|
||||||
def _wrapped(env, arg):
|
def _wrapped(env, arg):
|
||||||
if isinstance(arg, _Target):
|
if isinstance(arg, Target):
|
||||||
env.Append(SPP_DEFAULT_TARGETS = [arg])
|
env.Append(SPP_DEFAULT_TARGETS = [arg])
|
||||||
elif isinstance(arg, dict) and '_target' in arg:
|
elif isinstance(arg, dict) and '_target' in arg:
|
||||||
default(arg['_target'])
|
default(arg['_target'])
|
||||||
@@ -438,8 +505,8 @@ def _wrap_default(default):
|
|||||||
|
|
||||||
def _wrap_depends(depends):
|
def _wrap_depends(depends):
|
||||||
def _wrapped(env, dependant, dependency):
|
def _wrapped(env, dependant, dependency):
|
||||||
if isinstance(dependant, _Target) or isinstance(dependency, _Target):
|
if isinstance(dependant, Target) or isinstance(dependency, Target):
|
||||||
env.Append(SPP_TARGET_DEPENDENCIES = [(dependant, dependency)])
|
env.Append(SPP_TARGET_DEPENDENCIES = [(dependant, dependency, depends)])
|
||||||
return
|
return
|
||||||
elif isinstance(dependant, dict) and '_target' in dependant:
|
elif isinstance(dependant, dict) and '_target' in dependant:
|
||||||
dependant = dependant['_target']
|
dependant = dependant['_target']
|
||||||
@@ -448,7 +515,7 @@ def _wrap_depends(depends):
|
|||||||
depends(dependant, dependency)
|
depends(dependant, dependency)
|
||||||
return _wrapped
|
return _wrapped
|
||||||
|
|
||||||
def _build_target(target: _Target):
|
def _build_target(target: Target):
|
||||||
for dependency in target.dependencies:
|
for dependency in target.dependencies:
|
||||||
_inject_dependency(dependency, target.kwargs)
|
_inject_dependency(dependency, target.kwargs)
|
||||||
if 'LIBS' in target.kwargs:
|
if 'LIBS' in target.kwargs:
|
||||||
@@ -458,14 +525,16 @@ def _build_target(target: _Target):
|
|||||||
target.kwargs['LIBS'].remove(lib)
|
target.kwargs['LIBS'].remove(lib)
|
||||||
target.kwargs['LIBS'].append(env.File(lib))
|
target.kwargs['LIBS'].append(env.File(lib))
|
||||||
pass
|
pass
|
||||||
elif isinstance(lib, _Target):
|
elif isinstance(lib, Target):
|
||||||
if not lib.target:
|
if not lib.target:
|
||||||
_build_target(lib)
|
_build_target(lib)
|
||||||
target.kwargs['LIBS'].remove(lib)
|
target.kwargs['LIBS'].remove(lib)
|
||||||
target.kwargs['LIBS'].append(lib.target)
|
target.kwargs['LIBS'].append(lib.target)
|
||||||
new_kwargs = target.kwargs.copy()
|
new_kwargs = target.kwargs.copy()
|
||||||
if 'target' in new_kwargs: # there should always be a target, right?
|
if 'target' in new_kwargs and target.target_type != TargetType.MISC: # there should always be a target, right?
|
||||||
new_kwargs['target'] = f"{new_kwargs['target']}-{build_type}"
|
new_kwargs['target'] = f"{new_kwargs['target']}-{build_type}"
|
||||||
|
if os.name == 'nt' and 'PDB' not in new_kwargs:
|
||||||
|
new_kwargs['PDB'] = f'{new_kwargs["target"]}.pdb'
|
||||||
target.target = target.builder(*target.args, **new_kwargs)
|
target.target = target.builder(*target.args, **new_kwargs)
|
||||||
|
|
||||||
def _version_to_string(version) -> str:
|
def _version_to_string(version) -> str:
|
||||||
@@ -476,6 +545,7 @@ def _finalize(env: Environment):
|
|||||||
_generate_project(generate_project)
|
_generate_project(generate_project)
|
||||||
Exit(0)
|
Exit(0)
|
||||||
|
|
||||||
|
_hook_pre_finalize.invoke()
|
||||||
version_requirements = {dep.name: {
|
version_requirements = {dep.name: {
|
||||||
'min': dep.version_spec.minimum_version and _version_to_string(dep.version_spec.minimum_version),
|
'min': dep.version_spec.minimum_version and _version_to_string(dep.version_spec.minimum_version),
|
||||||
'max': dep.version_spec.maximum_version and _version_to_string(dep.version_spec.maximum_version),
|
'max': dep.version_spec.maximum_version and _version_to_string(dep.version_spec.maximum_version),
|
||||||
@@ -494,19 +564,23 @@ def _finalize(env: Environment):
|
|||||||
}
|
}
|
||||||
}, f)
|
}, f)
|
||||||
|
|
||||||
|
if dump is not None:
|
||||||
|
_dump()
|
||||||
|
|
||||||
for target in env['SPP_TARGETS']:
|
for target in env['SPP_TARGETS']:
|
||||||
_build_target(target)
|
_build_target(target)
|
||||||
for target in env['SPP_DEFAULT_TARGETS']:
|
for target in env['SPP_DEFAULT_TARGETS']:
|
||||||
env.Default(target.target)
|
env.Default(target.target)
|
||||||
for dependant, dependency in env['SPP_TARGET_DEPENDENCIES']:
|
for dependant, dependency, depends in env['SPP_TARGET_DEPENDENCIES']:
|
||||||
if isinstance(dependant, _Target):
|
if isinstance(dependant, Target):
|
||||||
dependant = dependant.target
|
dependant = dependant.target
|
||||||
if isinstance(dependency, _Target):
|
if isinstance(dependency, Target):
|
||||||
dependency = dependency.target
|
dependency = dependency.target
|
||||||
env.Depends(dependant, dependency)
|
depends(dependant, dependency)
|
||||||
|
|
||||||
def _find_target(env: Environment, target_name: str) -> '_Target|None':
|
_hook_post_finalize.invoke()
|
||||||
|
|
||||||
|
def _find_target(env: Environment, target_name: str) -> 'Target|None':
|
||||||
for target in env['SPP_TARGETS']:
|
for target in env['SPP_TARGETS']:
|
||||||
if target.name == target_name:
|
if target.name == target_name:
|
||||||
return target
|
return target
|
||||||
@@ -546,7 +620,8 @@ def _generate_project(project_type: str) -> None:
|
|||||||
|
|
||||||
source_folder, target_folder = {
|
source_folder, target_folder = {
|
||||||
'clion': (os.path.join(_spp_dir.abspath, 'util', 'clion_project_template'), Dir('#.idea').abspath),
|
'clion': (os.path.join(_spp_dir.abspath, 'util', 'clion_project_template'), Dir('#.idea').abspath),
|
||||||
'vscode': (os.path.join(_spp_dir.abspath, 'util', 'vscode_project_template'), Dir('#.vscode').abspath)
|
'vscode': (os.path.join(_spp_dir.abspath, 'util', 'vscode_project_template'), Dir('#.vscode').abspath),
|
||||||
|
'vs': (os.path.join(_spp_dir.abspath, 'util', 'vs_project_template'), Dir('#').abspath)
|
||||||
}.get(project_type, (None, None))
|
}.get(project_type, (None, None))
|
||||||
if not source_folder:
|
if not source_folder:
|
||||||
_error(None, 'Invalid project type option.')
|
_error(None, 'Invalid project type option.')
|
||||||
@@ -561,86 +636,195 @@ def _generate_project(project_type: str) -> None:
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f'Error loading UUID cache: {e}')
|
print(f'Error loading UUID cache: {e}')
|
||||||
|
|
||||||
def _generate_uuid(name: str = '') -> str:
|
def _generate_uuid(name: str = '', ms_style: bool = False) -> str:
|
||||||
nonlocal save_uuid_cache
|
nonlocal save_uuid_cache
|
||||||
if name and name in uuid_cache:
|
if name and name in uuid_cache:
|
||||||
return uuid_cache[name]
|
result = uuid_cache[name]
|
||||||
new_uuid = str(uuid.uuid4())
|
else:
|
||||||
if name:
|
result = str(uuid.uuid4())
|
||||||
uuid_cache[name] = new_uuid
|
if name:
|
||||||
save_uuid_cache = True
|
uuid_cache[name] = result
|
||||||
return new_uuid
|
save_uuid_cache = True
|
||||||
|
if ms_style:
|
||||||
|
return f'{{{result.upper()}}}'
|
||||||
|
return result
|
||||||
|
|
||||||
root_path = pathlib.Path(env.Dir('#').abspath)
|
root_path = pathlib.Path(env.Dir('#').abspath)
|
||||||
|
|
||||||
|
def _make_entry(target, type, prefix, suffix) -> str:
|
||||||
|
def _full_path(build_type) -> str:
|
||||||
|
trgt = _target_entry(target.kwargs['target'])
|
||||||
|
full_path = pathlib.Path(trgt.abspath).relative_to(root_path)
|
||||||
|
full_path = full_path.parent / f'{env.subst(prefix)}{full_path.name}-{build_type}{env.subst(suffix)}'
|
||||||
|
return str(full_path)
|
||||||
|
return {
|
||||||
|
'name': target.name,
|
||||||
|
'filename': _full_path,
|
||||||
|
'target': target,
|
||||||
|
'type': type,
|
||||||
|
'module': target.module
|
||||||
|
}
|
||||||
def _get_executables() -> list:
|
def _get_executables() -> list:
|
||||||
result = []
|
result = []
|
||||||
for target in env['SPP_TARGETS']:
|
for target in env['SPP_TARGETS']:
|
||||||
if target.target_type == TargetType.PROGRAM:
|
if target.target_type == TargetType.PROGRAM:
|
||||||
trgt = _target_entry(target.kwargs['target'])
|
result.append(_make_entry(target, 'executable', '$PROGPREFIX', '$PROGSUFFIX'))
|
||||||
def _exe_path(build_type) -> str:
|
|
||||||
exe_path = pathlib.Path(trgt.abspath).relative_to(root_path)
|
|
||||||
exe_path = exe_path.parent / f'{env.subst("$PROGPREFIX")}{exe_path.name}-{build_type}{env.subst("$PROGSUFFIX")}'
|
|
||||||
return str(exe_path)
|
|
||||||
result.append({
|
|
||||||
'name': target.name,
|
|
||||||
'filename': _exe_path
|
|
||||||
})
|
|
||||||
return result
|
return result
|
||||||
def _get_libraries() -> list:
|
def _get_libraries() -> list:
|
||||||
result = []
|
result = []
|
||||||
for target in env['SPP_TARGETS']:
|
for target in env['SPP_TARGETS']:
|
||||||
if target.target_type == TargetType.STATIC_LIBRARY:
|
if target.target_type == TargetType.STATIC_LIBRARY:
|
||||||
trgt = _target_entry(target.kwargs['target'])
|
result.append(_make_entry(target, 'executable', '$LIBPREFIX', '$LIBSUFFIX'))
|
||||||
def _lib_path(build_type) -> str:
|
|
||||||
lib_path = pathlib.Path(trgt.abspath).relative_to(root_path)
|
|
||||||
lib_path = lib_path.parent / f'{env.subst("$LIBPREFIX")}{lib_path.name}-{build_type}{env.subst("$LIBSUFFIX")}'
|
|
||||||
return str(lib_path)
|
|
||||||
result.append({
|
|
||||||
'name': target.name,
|
|
||||||
'filename': _lib_path
|
|
||||||
})
|
|
||||||
elif target.target_type == TargetType.SHARED_LIBRARY:
|
elif target.target_type == TargetType.SHARED_LIBRARY:
|
||||||
trgt = _target_entry(target.kwargs['target'])
|
result.append(_make_entry(target, 'executable', '$SHLIBPREFIX', '$SHLIBSUFFIX'))
|
||||||
def _lib_path(build_type) -> str:
|
return result
|
||||||
lib_path = pathlib.Path(trgt.abspath).relative_to(root_path)
|
def _get_modules() -> list:
|
||||||
lib_path = lib_path.parent / f'{env.subst("$SHLIBPREFIX")}{lib_path.name}-{build_type}{env.subst("$SHLIBSUFFIX")}'
|
result = []
|
||||||
return str(lib_path)
|
for folder, module in env['SPP_MODULES'].items():
|
||||||
result.append({
|
result.append({
|
||||||
'name': target.name,
|
'name': module.name,
|
||||||
'filename': _lib_path
|
'private_folder': os.path.join(config['PRIVATE_FOLDER'], folder),
|
||||||
})
|
'public_folder': os.path.join(config['PUBLIC_FOLDER'], folder),
|
||||||
|
'description': module.description,
|
||||||
|
'cxx_namespace': module.cxx_namespace
|
||||||
|
})
|
||||||
return result
|
return result
|
||||||
def _escape_path(input: str) -> str:
|
def _escape_path(input: str) -> str:
|
||||||
return input.replace('\\', '\\\\')
|
return input.replace('\\', '\\\\')
|
||||||
|
|
||||||
|
def _strip_path_prefix(path: str, skip_eles: int) -> str:
|
||||||
|
for _ in range(skip_eles):
|
||||||
|
pos = path.find(os.sep)
|
||||||
|
if pos < 0:
|
||||||
|
return ''
|
||||||
|
path = path[pos+1:]
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
def _folder_list(file_list: list[str], skip_eles: int = 0) -> list[str]:
|
||||||
|
result = {}
|
||||||
|
for file in file_list:
|
||||||
|
folder = os.path.dirname(file)
|
||||||
|
folder = _strip_path_prefix(folder, skip_eles)
|
||||||
|
if folder == '':
|
||||||
|
continue
|
||||||
|
while True:
|
||||||
|
result[folder] = True
|
||||||
|
# also add all parents
|
||||||
|
sep_pos = folder.rfind(os.sep)
|
||||||
|
if sep_pos < 0:
|
||||||
|
break
|
||||||
|
folder = folder[0:sep_pos]
|
||||||
|
return list(result.keys())
|
||||||
|
|
||||||
|
|
||||||
|
def _get_sources(target_dict: dict) -> list[str]:
|
||||||
|
target : Target = target_dict['target']
|
||||||
|
sources = target.kwargs.get('source')
|
||||||
|
return [str(pathlib.Path(source.abspath).relative_to(root_path)) for source in Flatten(sources)]
|
||||||
|
|
||||||
|
def _get_headers(folder: str) -> list[str]:
|
||||||
|
result = []
|
||||||
|
for root, _, files in os.walk(folder):
|
||||||
|
for file in files:
|
||||||
|
_, ext = os.path.splitext(file)
|
||||||
|
if ext in ('.h', '.hpp', '.inl', '.hxx'):
|
||||||
|
result.append(os.path.join(root, file))
|
||||||
|
return result
|
||||||
|
|
||||||
|
def _get_target_property(build_type: str, target: str, path: str) -> Any:
|
||||||
|
import subprocess
|
||||||
|
cmd = (shutil.which('scons'), '--silent', f'--build_type={build_type}', '--dump=targets', '--dump_format=json', f'--dump_path={target}/{path}')
|
||||||
|
output = subprocess.check_output(cmd, text=True).strip()
|
||||||
|
try:
|
||||||
|
return json.loads(output)
|
||||||
|
except json.JSONDecodeError as e:
|
||||||
|
env.Warn(f'Command: {cmd}')
|
||||||
|
env.Warn(f'Output: {output}')
|
||||||
|
raise e
|
||||||
|
|
||||||
|
|
||||||
|
executables = _get_executables()
|
||||||
|
libraries = _get_libraries()
|
||||||
|
modules = _get_modules()
|
||||||
|
|
||||||
jinja_env = jinja2.Environment()
|
jinja_env = jinja2.Environment()
|
||||||
jinja_env.globals['generate_uuid'] = _generate_uuid
|
jinja_env.globals['generate_uuid'] = _generate_uuid
|
||||||
|
jinja_env.globals['get_sources'] = _get_sources
|
||||||
|
jinja_env.globals['get_headers'] = _get_headers
|
||||||
|
jinja_env.globals['get_target_property'] = _get_target_property
|
||||||
jinja_env.globals['project'] = {
|
jinja_env.globals['project'] = {
|
||||||
'name': env.Dir('#').name,
|
'name': env.Dir('#').name,
|
||||||
'executables': _get_executables(),
|
'executables': executables,
|
||||||
'libraries': _get_libraries(),
|
'libraries': libraries,
|
||||||
'build_types': ['debug', 'release_debug', 'release', 'profile']
|
'modules': modules,
|
||||||
|
'build_types': ['debug', 'release_debug', 'release', 'profile'],
|
||||||
|
'cxx_standard': env['CXX_STANDARD']
|
||||||
}
|
}
|
||||||
jinja_env.globals['scons_exe'] = shutil.which('scons')
|
jinja_env.globals['scons_exe'] = shutil.which('scons')
|
||||||
jinja_env.globals['nproc'] = multiprocessing.cpu_count()
|
jinja_env.globals['nproc'] = multiprocessing.cpu_count()
|
||||||
|
|
||||||
jinja_env.filters['escape_path'] = _escape_path
|
jinja_env.filters['escape_path'] = _escape_path
|
||||||
|
jinja_env.filters['strip_path_prefix'] = _strip_path_prefix
|
||||||
|
jinja_env.filters['folder_list'] = _folder_list
|
||||||
|
jinja_env.filters['basename'] = os.path.basename
|
||||||
|
jinja_env.filters['dirname'] = os.path.dirname
|
||||||
|
|
||||||
source_path = pathlib.Path(source_folder)
|
source_path = pathlib.Path(source_folder)
|
||||||
target_path = pathlib.Path(target_folder)
|
target_path = pathlib.Path(target_folder)
|
||||||
|
|
||||||
|
tmpl_config = {}
|
||||||
|
tmpl_config_file = source_path / 'template.json'
|
||||||
|
if tmpl_config_file.exists():
|
||||||
|
with tmpl_config_file.open('r') as f:
|
||||||
|
tmpl_config = json.load(f)
|
||||||
|
files_config = tmpl_config.get('files', {})
|
||||||
|
|
||||||
for source_file in source_path.rglob('*'):
|
for source_file in source_path.rglob('*'):
|
||||||
if source_file.is_file():
|
if source_file == tmpl_config_file:
|
||||||
target_file = target_path / (source_file.relative_to(source_path))
|
continue
|
||||||
|
if not source_file.is_file():
|
||||||
|
continue
|
||||||
|
source_file_relative = source_file.relative_to(source_path)
|
||||||
|
file_config = files_config.get(str(source_file_relative).replace('\\', '/'), {})
|
||||||
|
one_per = file_config.get('one_per', 'project')
|
||||||
|
|
||||||
|
def generate_file_once() -> None:
|
||||||
|
is_jinja = (source_file.suffix == '.jinja')
|
||||||
|
if 'rename_to' in file_config:
|
||||||
|
new_filename = jinja_env.from_string(file_config['rename_to']).render()
|
||||||
|
target_file = target_path / new_filename
|
||||||
|
else:
|
||||||
|
target_file = target_path / source_file_relative
|
||||||
|
if is_jinja:
|
||||||
|
target_file = target_file.with_suffix('')
|
||||||
target_file.parent.mkdir(parents=True, exist_ok=True)
|
target_file.parent.mkdir(parents=True, exist_ok=True)
|
||||||
if source_file.suffix != '.jinja':
|
if not is_jinja:
|
||||||
shutil.copyfile(source_file, target_file)
|
shutil.copyfile(source_file, target_file)
|
||||||
continue
|
return
|
||||||
with source_file.open('r') as f:
|
with source_file.open('r') as f:
|
||||||
templ = jinja_env.from_string(f.read())
|
try:
|
||||||
target_file = target_file.with_suffix('')
|
templ = jinja_env.from_string(f.read())
|
||||||
|
except jinja2.TemplateSyntaxError as e:
|
||||||
|
e.filename = str(source_file)
|
||||||
|
raise e
|
||||||
with target_file.open('w') as f:
|
with target_file.open('w') as f:
|
||||||
f.write(templ.render())
|
f.write(templ.render())
|
||||||
|
try:
|
||||||
|
if one_per == 'project':
|
||||||
|
generate_file_once()
|
||||||
|
elif one_per == 'target':
|
||||||
|
for executable in executables:
|
||||||
|
jinja_env.globals['target'] = executable
|
||||||
|
generate_file_once()
|
||||||
|
for library in libraries:
|
||||||
|
jinja_env.globals['target'] = library
|
||||||
|
generate_file_once()
|
||||||
|
else:
|
||||||
|
raise ValueError(f'invalid value for "one_per": {one_per}')
|
||||||
|
except jinja2.TemplateSyntaxError as e:
|
||||||
|
env.Error(f'Jinja syntax error at {e.filename}:{e.lineno}: {e.message}')
|
||||||
|
Exit(1)
|
||||||
|
|
||||||
if save_uuid_cache:
|
if save_uuid_cache:
|
||||||
try:
|
try:
|
||||||
@@ -649,8 +833,150 @@ def _generate_project(project_type: str) -> None:
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f'Error writing uuid cache: {e}')
|
print(f'Error writing uuid cache: {e}')
|
||||||
|
|
||||||
|
def _dump() -> None:
|
||||||
|
def _dump_as_text(data: Any) -> str:
|
||||||
|
from pprint import pformat
|
||||||
|
dump_name = {
|
||||||
|
'env': 'Environment',
|
||||||
|
'config': 'Configuration',
|
||||||
|
'modules': 'Modules',
|
||||||
|
'targets': 'Targets'
|
||||||
|
}[dump]
|
||||||
|
|
||||||
|
return '\n'.join((
|
||||||
|
f'==== Begin {dump_name} Dump ====',
|
||||||
|
pformat(data),
|
||||||
|
f'==== End {dump_name} Dump ===='
|
||||||
|
))
|
||||||
|
def _dump_as_json(data: Any) -> str:
|
||||||
|
class _Encoder(json.JSONEncoder):
|
||||||
|
def default(self, o) -> dict:
|
||||||
|
if isinstance(o, object):
|
||||||
|
if hasattr(o, '__iter__'):
|
||||||
|
return list(o)
|
||||||
|
elif isinstance(o, Node):
|
||||||
|
return o.abspath
|
||||||
|
return o.__dict__
|
||||||
|
return super().default(o)
|
||||||
|
return json.dumps(data, cls=_Encoder)
|
||||||
|
def _apply_path(data: Any, path: str) -> Any:
|
||||||
|
for part in path.split('/'):
|
||||||
|
if isinstance(data, dict):
|
||||||
|
if part not in data:
|
||||||
|
_error(f'Invalid path specified. No key {part} in dict {data}.')
|
||||||
|
Exit(1)
|
||||||
|
data = data[part]
|
||||||
|
elif isinstance(data, list):
|
||||||
|
try:
|
||||||
|
part = int(part)
|
||||||
|
except ValueError:
|
||||||
|
_error(f'Invalid path specified. {part} is not a valid list index.')
|
||||||
|
Exit(1)
|
||||||
|
if part < 0 or part >= len(data):
|
||||||
|
_error(f'Invalid path specified. {part} is out of list range.')
|
||||||
|
Exit(1)
|
||||||
|
data = data[part]
|
||||||
|
elif isinstance(data, object):
|
||||||
|
data = data.__dict__
|
||||||
|
if part not in data:
|
||||||
|
_error(f'Invalid path specified. No attribute {part} in object {data}.')
|
||||||
|
Exit(1)
|
||||||
|
data = data[part]
|
||||||
|
else:
|
||||||
|
_error(f'Invalid path specified. {data} has no properties.')
|
||||||
|
Exit(1)
|
||||||
|
return data
|
||||||
|
def _targets() -> dict:
|
||||||
|
result = {}
|
||||||
|
for target in env['SPP_TARGETS']:
|
||||||
|
kwargs = target.kwargs.copy()
|
||||||
|
for dependency in target.dependencies:
|
||||||
|
_inject_dependency(dependency, kwargs)
|
||||||
|
result[target.name] = {
|
||||||
|
'target_type': target.target_type.name,
|
||||||
|
'args': target.args,
|
||||||
|
# 'kwargs': kwargs, <- circular dependency here and the json encoder doesn't like that
|
||||||
|
'CPPDEFINES': kwargs.get('CPPDEFINES', env['CPPDEFINES']),
|
||||||
|
'CPPPATH': kwargs.get('CPPPATH', env['CPPPATH']),
|
||||||
|
'CFLAGS': kwargs.get('CFLAGS', env['CFLAGS']),
|
||||||
|
'CCFLAGS': kwargs.get('CCFLAGS', env['CCFLAGS']),
|
||||||
|
'CXXFLAGS': kwargs.get('CXXFLAGS', env['CXXFLAGS'])
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
|
||||||
|
data = {
|
||||||
|
'env': env.Dictionary,
|
||||||
|
'config': lambda: config,
|
||||||
|
'modules': lambda: env['SPP_MODULES'],
|
||||||
|
'targets': _targets
|
||||||
|
}[dump]()
|
||||||
|
|
||||||
|
global dump_path
|
||||||
|
dump_path = dump_path.strip()
|
||||||
|
if dump_path != '':
|
||||||
|
data = _apply_path(data, dump_path)
|
||||||
|
dump_fn = {
|
||||||
|
'text': _dump_as_text,
|
||||||
|
'json': _dump_as_json
|
||||||
|
}[dump_format]
|
||||||
|
print(dump_fn(data))
|
||||||
|
Exit(0)
|
||||||
|
|
||||||
|
class _Hook:
|
||||||
|
def __init__(self) -> None:
|
||||||
|
self._funcs: list[Callable] = []
|
||||||
|
|
||||||
|
def add_func(self, func: Callable) -> None:
|
||||||
|
self._funcs.append(func)
|
||||||
|
|
||||||
|
def invoke(self, **kwargs) -> None:
|
||||||
|
for func in self._funcs:
|
||||||
|
func(**kwargs)
|
||||||
|
|
||||||
|
_hook_pre_environment = _Hook()
|
||||||
|
_hook_post_environment = _Hook()
|
||||||
|
_hook_config_complete = _Hook()
|
||||||
|
_hook_pre_finalize = _Hook()
|
||||||
|
_hook_post_finalize = _Hook()
|
||||||
|
|
||||||
|
def _load_addon(modname: str, modpath: pathlib.Path) -> None:
|
||||||
|
_debug('addons', f'Loading addon {modname} from {modpath}.')
|
||||||
|
|
||||||
|
spec = importlib.util.spec_from_file_location(modname, modpath)
|
||||||
|
module = importlib.util.module_from_spec(spec)
|
||||||
|
spec.loader.exec_module(module)
|
||||||
|
|
||||||
|
if hasattr(module, 'available') and not module.available():
|
||||||
|
_debug('addons', f'Addon {modname} is not available and will not be loaded.')
|
||||||
|
return
|
||||||
|
|
||||||
|
def _add_hook(func_name: str, hook: _Hook) -> None:
|
||||||
|
if hasattr(module, func_name):
|
||||||
|
hook.add_func(getattr(module, func_name))
|
||||||
|
_debug('addons', f'Addon {modname} registered a {func_name} hook.')
|
||||||
|
|
||||||
|
_add_hook('pre_environment', _hook_pre_environment)
|
||||||
|
_add_hook('post_environment', _hook_post_environment)
|
||||||
|
_add_hook('config_complete', _hook_config_complete)
|
||||||
|
_add_hook('pre_finalize', _hook_pre_finalize)
|
||||||
|
_add_hook('post_finalize', _hook_post_finalize)
|
||||||
|
|
||||||
|
def _load_addons(folder: pathlib.Path) -> None:
|
||||||
|
_debug('addons', f'Loading addons from {folder}.')
|
||||||
|
for script_file in folder.glob('*.py'):
|
||||||
|
_load_addon(script_file.name[:-3], script_file)
|
||||||
|
|
||||||
|
_ALLOWED_CHARS = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_'
|
||||||
|
def _sanitize_identifier(name: str) -> str:
|
||||||
|
chrs = []
|
||||||
|
for chr in name:
|
||||||
|
if chr in _ALLOWED_CHARS:
|
||||||
|
chrs.append(chr)
|
||||||
|
else:
|
||||||
|
chrs.append('_')
|
||||||
|
return ''.join(chrs)
|
||||||
|
|
||||||
|
config: dict
|
||||||
Import('config')
|
Import('config')
|
||||||
|
|
||||||
if not config.get('PROJECT_NAME'):
|
if not config.get('PROJECT_NAME'):
|
||||||
@@ -659,9 +985,14 @@ if not config.get('CXX_STANDARD'):
|
|||||||
config['CXX_STANDARD'] = 'c++23'
|
config['CXX_STANDARD'] = 'c++23'
|
||||||
if not config.get('CXX_NO_EXCEPTIONS'):
|
if not config.get('CXX_NO_EXCEPTIONS'):
|
||||||
config['CXX_NO_EXCEPTIONS'] = False
|
config['CXX_NO_EXCEPTIONS'] = False
|
||||||
|
|
||||||
if not config.get('PREPROCESSOR_PREFIX'):
|
if not config.get('PREPROCESSOR_PREFIX'):
|
||||||
config['PREPROCESSOR_PREFIX'] = config['PROJECT_NAME'].upper() # TODO: may be nicer?
|
config['PREPROCESSOR_PREFIX'] = _sanitize_identifier(config['PROJECT_NAME']).upper() # TODO: may be nicer?
|
||||||
|
if not config.get('SPP_TARGET_VERSION'):
|
||||||
|
config['SPP_TARGET_VERSION'] = (1, 0, 0)
|
||||||
|
if not config.get('PRIVATE_FOLDER'):
|
||||||
|
config['PRIVATE_FOLDER'] = 'private'
|
||||||
|
if not config.get('PUBLIC_FOLDER'):
|
||||||
|
config['PUBLIC_FOLDER'] = 'public'
|
||||||
|
|
||||||
if 'COMPILATIONDB_FILTER_FILES' not in config:
|
if 'COMPILATIONDB_FILTER_FILES' not in config:
|
||||||
config['COMPILATIONDB_FILTER_FILES'] = True
|
config['COMPILATIONDB_FILTER_FILES'] = True
|
||||||
@@ -727,16 +1058,43 @@ AddOption(
|
|||||||
)
|
)
|
||||||
|
|
||||||
AddOption(
|
AddOption(
|
||||||
'--dump_env',
|
'--disable_auto_update',
|
||||||
dest = 'dump_env',
|
dest = 'disable_auto_update',
|
||||||
action = 'store_true'
|
action = 'store_true'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
AddOption(
|
||||||
|
'--dump',
|
||||||
|
dest = 'dump',
|
||||||
|
type = 'choice',
|
||||||
|
choices = ('env', 'config', 'modules', 'targets'),
|
||||||
|
nargs = 1,
|
||||||
|
action = 'store'
|
||||||
|
)
|
||||||
|
|
||||||
|
AddOption(
|
||||||
|
'--dump_format',
|
||||||
|
dest = 'dump_format',
|
||||||
|
type = 'choice',
|
||||||
|
choices = ('text', 'json'),
|
||||||
|
nargs = 1,
|
||||||
|
action = 'store',
|
||||||
|
default = 'text'
|
||||||
|
)
|
||||||
|
|
||||||
|
AddOption(
|
||||||
|
'--dump_path',
|
||||||
|
dest = 'dump_path',
|
||||||
|
nargs = 1,
|
||||||
|
action = 'store',
|
||||||
|
default = ''
|
||||||
|
)
|
||||||
|
|
||||||
AddOption(
|
AddOption(
|
||||||
'--generate_project',
|
'--generate_project',
|
||||||
dest = 'generate_project',
|
dest = 'generate_project',
|
||||||
type = 'choice',
|
type = 'choice',
|
||||||
choices = ('clion', 'vscode'),
|
choices = ('clion', 'vscode', 'vs'),
|
||||||
nargs = 1,
|
nargs = 1,
|
||||||
action = 'store'
|
action = 'store'
|
||||||
)
|
)
|
||||||
@@ -750,7 +1108,10 @@ enable_asan = GetOption('enable_asan')
|
|||||||
config_file = GetOption('config_file')
|
config_file = GetOption('config_file')
|
||||||
compiler = GetOption('compiler')
|
compiler = GetOption('compiler')
|
||||||
update_repositories = GetOption('update_repositories')
|
update_repositories = GetOption('update_repositories')
|
||||||
dump_env = GetOption('dump_env')
|
disable_auto_update = GetOption('disable_auto_update')
|
||||||
|
dump = GetOption('dump')
|
||||||
|
dump_format = GetOption('dump_format')
|
||||||
|
dump_path = GetOption('dump_path')
|
||||||
generate_project = GetOption('generate_project')
|
generate_project = GetOption('generate_project')
|
||||||
|
|
||||||
default_CC = {
|
default_CC = {
|
||||||
@@ -780,10 +1141,27 @@ vars.Add('COMPILATIONDB_FILTER_FILES', 'Removes source files from the compilatio
|
|||||||
' project.', config['COMPILATIONDB_FILTER_FILES'])
|
' project.', config['COMPILATIONDB_FILTER_FILES'])
|
||||||
vars.Add('SHOW_INCLUDES', 'Show include hierarchy (for debugging).', False)
|
vars.Add('SHOW_INCLUDES', 'Show include hierarchy (for debugging).', False)
|
||||||
vars.Add('ENABLE_ASAN', 'Enable address sanitization.', bool(enable_asan))
|
vars.Add('ENABLE_ASAN', 'Enable address sanitization.', bool(enable_asan))
|
||||||
|
vars.Add('TARGET_PLATFORM', 'Target platform.', platform.system())
|
||||||
|
|
||||||
tools = ['default', 'compilation_db', 'unity_build']
|
if 'VARIABLES' in config:
|
||||||
|
for vardef in config['VARIABLES']:
|
||||||
|
vars.Add(*vardef)
|
||||||
|
|
||||||
|
tools = ['default', 'compilation_db']
|
||||||
if 'TOOLS' in config:
|
if 'TOOLS' in config:
|
||||||
|
assert isinstance(config['TOOLS'], list)
|
||||||
tools.extend(config['TOOLS'])
|
tools.extend(config['TOOLS'])
|
||||||
|
addon_dirs = [pathlib.Path(Dir('.').abspath) / 'addons']
|
||||||
|
if 'ADDON_DIRS' in config:
|
||||||
|
assert isinstance(config['ADDON_DIRS'], list)
|
||||||
|
addon_dirs.extend(config['ADDON_DIRS'])
|
||||||
|
|
||||||
|
for addon_dir in addon_dirs:
|
||||||
|
if not isinstance(addon_dir, pathlib.Path):
|
||||||
|
addon_dir = pathlib.Path(addon_dir)
|
||||||
|
_load_addons(addon_dir)
|
||||||
|
|
||||||
|
_hook_pre_environment.invoke()
|
||||||
|
|
||||||
env = Environment(tools = tools, variables = vars, ENV = os.environ)
|
env = Environment(tools = tools, variables = vars, ENV = os.environ)
|
||||||
env['SPP_RECIPES_FOLDERS'] = []
|
env['SPP_RECIPES_FOLDERS'] = []
|
||||||
@@ -796,12 +1174,12 @@ env['DEPS_CFLAGS'] = []
|
|||||||
env['DEPS_CXXFLAGS'] = []
|
env['DEPS_CXXFLAGS'] = []
|
||||||
env['DEPS_LINKFLAGS'] = []
|
env['DEPS_LINKFLAGS'] = []
|
||||||
|
|
||||||
print(f'Detected system cache directory: {env["SYSTEM_CACHE_DIR"]}')
|
_info(None, f'Detected system cache directory: {env["SYSTEM_CACHE_DIR"]}')
|
||||||
try:
|
try:
|
||||||
os.makedirs(env['SYSTEM_CACHE_DIR'], exist_ok=True)
|
os.makedirs(env['SYSTEM_CACHE_DIR'], exist_ok=True)
|
||||||
except:
|
except:
|
||||||
env['SYSTEM_CACHE_DIR'] = os.path.join(_get_fallback_cache_dir(), 'spp_cache')
|
env['SYSTEM_CACHE_DIR'] = os.path.join(_get_fallback_cache_dir(), 'spp_cache')
|
||||||
print(f'Creating spp cache dir failed, using fallback: {env["SYSTEM_CACHE_DIR"]}.')
|
_info(None, f'Creating spp cache dir failed, using fallback: {env["SYSTEM_CACHE_DIR"]}.')
|
||||||
os.makedirs(env['SYSTEM_CACHE_DIR'], exist_ok=True) # no more safeguards!
|
os.makedirs(env['SYSTEM_CACHE_DIR'], exist_ok=True) # no more safeguards!
|
||||||
env['CLONE_DIR'] = os.path.join(env['SYSTEM_CACHE_DIR'], 'cloned')
|
env['CLONE_DIR'] = os.path.join(env['SYSTEM_CACHE_DIR'], 'cloned')
|
||||||
env['DOWNLOAD_DIR'] = os.path.join(env['SYSTEM_CACHE_DIR'], 'downloaded')
|
env['DOWNLOAD_DIR'] = os.path.join(env['SYSTEM_CACHE_DIR'], 'downloaded')
|
||||||
@@ -841,6 +1219,7 @@ env.Append(CXXFLAGS = [])
|
|||||||
env.Append(CPPPATH = [])
|
env.Append(CPPPATH = [])
|
||||||
env.Append(CPPDEFINES = [])
|
env.Append(CPPDEFINES = [])
|
||||||
env.Append(LINKFLAGS = [])
|
env.Append(LINKFLAGS = [])
|
||||||
|
env.Append(LIBS = [])
|
||||||
|
|
||||||
# init SPP environment variables
|
# init SPP environment variables
|
||||||
env['SPP_DIR'] = _spp_dir.abspath
|
env['SPP_DIR'] = _spp_dir.abspath
|
||||||
@@ -849,11 +1228,15 @@ env['SPP_DEFAULT_TARGETS'] = []
|
|||||||
env['SPP_TARGET_DEPENDENCIES'] = []
|
env['SPP_TARGET_DEPENDENCIES'] = []
|
||||||
env['SPP_DEPENDENCIES'] = {}
|
env['SPP_DEPENDENCIES'] = {}
|
||||||
env['SPP_RECIPES'] = {}
|
env['SPP_RECIPES'] = {}
|
||||||
|
env['SPP_MODULES'] = {} # maps from folder to Module
|
||||||
|
env['SPP_CPU_FEATURES'] = config.get('USE_CPU_FEATURES', [])
|
||||||
|
|
||||||
env['OBJSUFFIX'] = f".{env['BUILD_TYPE']}{env['OBJSUFFIX']}"
|
env['OBJSUFFIX'] = f".{env['BUILD_TYPE']}{env['OBJSUFFIX']}"
|
||||||
if variant:
|
if variant:
|
||||||
env['OBJSUFFIX'] = f".{variant}{env['OBJSUFFIX']}"
|
env['OBJSUFFIX'] = f".{variant}{env['OBJSUFFIX']}"
|
||||||
|
|
||||||
|
_hook_post_environment.invoke()
|
||||||
|
|
||||||
# create the cache dir
|
# create the cache dir
|
||||||
os.makedirs(env['CACHE_DIR'], exist_ok=True)
|
os.makedirs(env['CACHE_DIR'], exist_ok=True)
|
||||||
cache_gitignore = f'{env["CACHE_DIR"]}/.gitignore'
|
cache_gitignore = f'{env["CACHE_DIR"]}/.gitignore'
|
||||||
@@ -878,8 +1261,8 @@ if os.path.exists(update_stamp_file):
|
|||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
boot_time = psutil.boot_time()
|
boot_time = psutil.boot_time()
|
||||||
if boot_time > update_time:
|
if not disable_auto_update and boot_time > update_time:
|
||||||
print('Didn\'t update repositories since last boot, doing it now...')
|
_info(None, 'Didn\'t update repositories since last boot, doing it now...')
|
||||||
env['UPDATE_REPOSITORIES'] = True
|
env['UPDATE_REPOSITORIES'] = True
|
||||||
if env['UPDATE_REPOSITORIES']:
|
if env['UPDATE_REPOSITORIES']:
|
||||||
with open(update_stamp_file, 'w') as f:
|
with open(update_stamp_file, 'w') as f:
|
||||||
@@ -920,15 +1303,17 @@ if env['COMPILER_FAMILY'] == 'gcc' or env['COMPILER_FAMILY'] == 'clang':
|
|||||||
# also GCC complains about some (compiler generated) fields in coroutines not having any linkage
|
# also GCC complains about some (compiler generated) fields in coroutines not having any linkage
|
||||||
# also -Wdangling-reference seems to produce a lot of false positives
|
# also -Wdangling-reference seems to produce a lot of false positives
|
||||||
# also -Wmaybe-uninitialized seems to produce false positives (or a bug in the standard library?))
|
# also -Wmaybe-uninitialized seems to produce false positives (or a bug in the standard library?))
|
||||||
|
# also -Warray-bounds because GCC kept complaining when I was constructing a string from a string_view
|
||||||
# -Winit-list-lifetime triggers in vulkan.hpp even though it is disabled via pragma :/
|
# -Winit-list-lifetime triggers in vulkan.hpp even though it is disabled via pragma :/
|
||||||
# -Wtautological-compare triggers in libfmt and doesn't seem too useful anyway
|
# -Wtautological-compare triggers in libfmt and doesn't seem too useful anyway
|
||||||
env.Append(CCFLAGS = ['-Wno-missing-field-initializers', '-Wno-maybe-uninitialized'])
|
env.Append(CCFLAGS = ['-Wno-missing-field-initializers', '-Wno-maybe-uninitialized', '-Wno-array-bounds'])
|
||||||
env.Append(CXXFLAGS = ['-Wno-subobject-linkage', '-Wno-dangling-reference', '-Wno-init-list-lifetime', '-Wno-tautological-compare'])
|
env.Append(CXXFLAGS = ['-Wno-subobject-linkage', '-Wno-dangling-reference', '-Wno-init-list-lifetime', '-Wno-tautological-compare'])
|
||||||
else: # clang only
|
else: # clang only
|
||||||
# no-gnu-anonymous-struct - we don't care
|
# no-gnu-anonymous-struct - we don't care
|
||||||
# no-missing-field-initializers - useful in some cases, annoying in most
|
# no-missing-field-initializers - useful in some cases, annoying in most
|
||||||
# no-ambiguous-reversed-operator - should be quite useful, but we get a false positive, apparently?
|
# no-ambiguous-reversed-operator - should be quite useful, but we get a false positive, apparently?
|
||||||
env.Append(CCFLAGS = ['-Wno-gnu-anonymous-struct', '-Wno-missing-field-initializers', '-Wno-ambiguous-reversed-operator'])
|
# no-parentheses-equality - false positive for fold expressions and doesn't seem to useful anyway
|
||||||
|
env.Append(CCFLAGS = ['-Wno-gnu-anonymous-struct', '-Wno-missing-field-initializers', '-Wno-ambiguous-reversed-operator', '-Wno-parentheses-equality'])
|
||||||
env.Append(CXXFLAGS = ['-fexperimental-library']) # enable std::jthread
|
env.Append(CXXFLAGS = ['-fexperimental-library']) # enable std::jthread
|
||||||
if build_type == 'debug':
|
if build_type == 'debug':
|
||||||
env.Append(CCFLAGS = ['-g', '-O0'], CPPDEFINES = ['_GLIBCXX_DEBUG'])
|
env.Append(CCFLAGS = ['-g', '-O0'], CPPDEFINES = ['_GLIBCXX_DEBUG'])
|
||||||
@@ -950,6 +1335,13 @@ if env['COMPILER_FAMILY'] == 'gcc' or env['COMPILER_FAMILY'] == 'clang':
|
|||||||
env.Append(DEPS_CXXFLAGS = ['-fsanitize=address', '-fno-omit-frame-pointer'])
|
env.Append(DEPS_CXXFLAGS = ['-fsanitize=address', '-fno-omit-frame-pointer'])
|
||||||
env.Append(DEPS_LINKFLAGS = ['-fsanitize=address'])
|
env.Append(DEPS_LINKFLAGS = ['-fsanitize=address'])
|
||||||
|
|
||||||
|
for feature in env['SPP_CPU_FEATURES']:
|
||||||
|
flag = _GCC_CPU_FEATURES_MAP.get(feature)
|
||||||
|
if flag is None:
|
||||||
|
_warn(None, f'Unknown or unsupported cpu feature "{feature}" for GCC/Clang.')
|
||||||
|
else:
|
||||||
|
env.Append(CCFLAGS = [flag])
|
||||||
|
|
||||||
elif env['COMPILER_FAMILY'] == 'cl':
|
elif env['COMPILER_FAMILY'] == 'cl':
|
||||||
cxx_version_name = {
|
cxx_version_name = {
|
||||||
'c++14': 'c++14',
|
'c++14': 'c++14',
|
||||||
@@ -964,7 +1356,7 @@ elif env['COMPILER_FAMILY'] == 'cl':
|
|||||||
# C4251: missing dll-interface of some std types, yaml-cpp doesn't compile with this enabled
|
# C4251: missing dll-interface of some std types, yaml-cpp doesn't compile with this enabled
|
||||||
# C4275: same as above
|
# C4275: same as above
|
||||||
env.Append(CCFLAGS = ['/W4', '/WX', '/wd4201', '/wd4127', '/wd4702', '/wd4251', '/wd4275', '/bigobj', '/vmg',
|
env.Append(CCFLAGS = ['/W4', '/WX', '/wd4201', '/wd4127', '/wd4702', '/wd4251', '/wd4275', '/bigobj', '/vmg',
|
||||||
f'/std:{cxx_version_name}', '/permissive-', '/FS', '/Zc:char8_t', '/utf-8'])
|
f'/std:{cxx_version_name}', '/permissive-', '/FS', '/Zc:char8_t', '/utf-8', '/Zc:preprocessor'])
|
||||||
env.Append(CPPDEFINES = ['_CRT_SECURE_NO_WARNINGS']) # I'd like to not use MSVC specific versions of functions because they are "safer" ...
|
env.Append(CPPDEFINES = ['_CRT_SECURE_NO_WARNINGS']) # I'd like to not use MSVC specific versions of functions because they are "safer" ...
|
||||||
env.Append(DEPS_CXXFLAGS = ['/Zc:char8_t', '/utf-8', '/vmg'])
|
env.Append(DEPS_CXXFLAGS = ['/Zc:char8_t', '/utf-8', '/vmg'])
|
||||||
if env['CXX_NO_EXCEPTIONS']:
|
if env['CXX_NO_EXCEPTIONS']:
|
||||||
@@ -975,12 +1367,14 @@ elif env['COMPILER_FAMILY'] == 'cl':
|
|||||||
if env['SHOW_INCLUDES']:
|
if env['SHOW_INCLUDES']:
|
||||||
env.Append(CCFLAGS = ['/showIncludes'])
|
env.Append(CCFLAGS = ['/showIncludes'])
|
||||||
if build_type == 'debug':
|
if build_type == 'debug':
|
||||||
env.Append(CCFLAGS = ['/Od', '/Zi', '/MDd'], LINKFLAGS = ' /DEBUG')
|
#env['PDB'] = env.File('#bin/full.pdb')
|
||||||
|
env.Append(CCFLAGS = ['/Od', '/MDd'], LINKFLAGS = ' /DEBUG')
|
||||||
env.Append(CPPDEFINES = ['_DEBUG', '_ITERATOR_DEBUG_LEVEL=2'])
|
env.Append(CPPDEFINES = ['_DEBUG', '_ITERATOR_DEBUG_LEVEL=2'])
|
||||||
env.Append(DEPS_CXXFLAGS = ['/MDd', '/Zi', '/D_DEBUG', '/D_ITERATOR_DEBUG_LEVEL=2'])
|
env.Append(DEPS_CXXFLAGS = ['/MDd', '/Zi', '/D_DEBUG', '/D_ITERATOR_DEBUG_LEVEL=2'])
|
||||||
env.Append(DEPS_LINKFLAGS = ['/DEBUG'])
|
env.Append(DEPS_LINKFLAGS = ['/DEBUG'])
|
||||||
elif build_type == 'release_debug' or build_type == 'profile':
|
elif build_type == 'release_debug' or build_type == 'profile':
|
||||||
env.Append(CCFLAGS = ['/O2', '/MD', '/Zi'], LINKFLAGS = ' /DEBUG')
|
#env['PDB'] = env.File('#bin/full.pdb')
|
||||||
|
env.Append(CCFLAGS = ['/O2', '/MD'], LINKFLAGS = ' /DEBUG')
|
||||||
env.Append(DEPS_CXXFLAGS = ['/Zi', '/MD'])
|
env.Append(DEPS_CXXFLAGS = ['/Zi', '/MD'])
|
||||||
env.Append(DEPS_LINKFLAGS = ['/DEBUG'])
|
env.Append(DEPS_LINKFLAGS = ['/DEBUG'])
|
||||||
else:
|
else:
|
||||||
@@ -1008,32 +1402,37 @@ env.AddMethod(_make_interface, 'MakeInterface')
|
|||||||
env.AddMethod(_lib_filename, 'LibFilename')
|
env.AddMethod(_lib_filename, 'LibFilename')
|
||||||
env.AddMethod(_find_executable, 'FindExecutable')
|
env.AddMethod(_find_executable, 'FindExecutable')
|
||||||
env.AddMethod(_find_lib, 'FindLib')
|
env.AddMethod(_find_lib, 'FindLib')
|
||||||
|
env.AddMethod(_info, 'Info')
|
||||||
|
env.AddMethod(_warn, 'Warn')
|
||||||
env.AddMethod(_error, 'Error')
|
env.AddMethod(_error, 'Error')
|
||||||
|
env.AddMethod(_wrap_builder(env.Program, TargetType.PROGRAM), 'Program')
|
||||||
env.AddMethod(_wrap_builder(env.Library, TargetType.STATIC_LIBRARY), 'Library')
|
env.AddMethod(_wrap_builder(env.Library, TargetType.STATIC_LIBRARY), 'Library')
|
||||||
env.AddMethod(_wrap_builder(env.StaticLibrary, TargetType.STATIC_LIBRARY), 'StaticLibrary')
|
env.AddMethod(_wrap_builder(env.StaticLibrary, TargetType.STATIC_LIBRARY), 'StaticLibrary')
|
||||||
env.AddMethod(_wrap_builder(env.SharedLibrary, TargetType.SHARED_LIBRARY), 'SharedLibrary')
|
env.AddMethod(_wrap_builder(env.SharedLibrary, TargetType.SHARED_LIBRARY), 'SharedLibrary')
|
||||||
env.AddMethod(_wrap_builder(env.Program, TargetType.PROGRAM), 'Program')
|
env.AddMethod(_wrap_builder(env.AstJson, TargetType.MISC), 'AstJson')
|
||||||
|
|
||||||
|
if 'unity_build' in tools:
|
||||||
|
env.AddMethod(_wrap_builder(env.UnityProgram, TargetType.PROGRAM), 'UnityProgram')
|
||||||
|
env.AddMethod(_wrap_builder(env.UnityLibrary, TargetType.STATIC_LIBRARY), 'UnityLibrary')
|
||||||
|
env.AddMethod(_wrap_builder(env.UnityStaticLibrary, TargetType.STATIC_LIBRARY), 'UnityStaticLibrary')
|
||||||
|
env.AddMethod(_wrap_builder(env.UnitySharedLibrary, TargetType.SHARED_LIBRARY), 'UnitySharedLibrary')
|
||||||
|
|
||||||
env.AddMethod(_wrap_default(env.Default), 'Default')
|
env.AddMethod(_wrap_default(env.Default), 'Default')
|
||||||
env.AddMethod(_wrap_depends(env.Depends), 'Depends')
|
env.AddMethod(_wrap_depends(env.Depends), 'Depends')
|
||||||
|
env.AddMethod(_wrap_depends(env.Ignore), 'Ignore')
|
||||||
|
env.AddMethod(_wrap_depends(env.Requires), 'Requires')
|
||||||
|
|
||||||
env.AddMethod(_wrap_builder(env.UnityProgram, TargetType.PROGRAM), 'UnityProgram')
|
|
||||||
env.AddMethod(_wrap_builder(env.UnityLibrary, TargetType.STATIC_LIBRARY), 'UnityLibrary')
|
|
||||||
env.AddMethod(_wrap_builder(env.UnityStaticLibrary, TargetType.STATIC_LIBRARY), 'UnityStaticLibrary')
|
|
||||||
env.AddMethod(_wrap_builder(env.UnitySharedLibrary, TargetType.SHARED_LIBRARY), 'UnitySharedLibrary')
|
|
||||||
env.AddMethod(_module, 'Module')
|
env.AddMethod(_module, 'Module')
|
||||||
|
env.AddMethod(_module_config, 'ModuleConfig')
|
||||||
env.AddMethod(_finalize, 'Finalize')
|
env.AddMethod(_finalize, 'Finalize')
|
||||||
env.AddMethod(_find_target, 'FindTarget')
|
env.AddMethod(_find_target, 'FindTarget')
|
||||||
|
|
||||||
if hasattr(env, 'Gch'):
|
if hasattr(env, 'Gch'):
|
||||||
env.AddMethod(_wrap_builder(env.Gch), 'Gch')
|
env.AddMethod(_wrap_builder(env.Gch, TargetType.STATIC_LIBRARY), 'Gch')
|
||||||
|
|
||||||
for addon_file in env.Glob('addons/*.py'):
|
_hook_config_complete.invoke()
|
||||||
|
|
||||||
|
for addon_file in env.Glob('addons/old/*.py'):
|
||||||
env = SConscript(addon_file, exports = 'env')
|
env = SConscript(addon_file, exports = 'env')
|
||||||
|
|
||||||
if dump_env:
|
|
||||||
print('==== Begin Environment Dump =====')
|
|
||||||
print(env.Dump())
|
|
||||||
print('==== End Environment Dump =====')
|
|
||||||
Exit(0)
|
|
||||||
|
|
||||||
Return('env')
|
Return('env')
|
||||||
|
|||||||
209
addons/astgen.py
Normal file
209
addons/astgen.py
Normal file
@@ -0,0 +1,209 @@
|
|||||||
|
import gzip
|
||||||
|
import json
|
||||||
|
import os.path
|
||||||
|
import pickle
|
||||||
|
import subprocess
|
||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from typing import Callable, Any, Iterable, Self, Generator
|
||||||
|
|
||||||
|
from SCons.Script import *
|
||||||
|
from SCons.Node.FS import File
|
||||||
|
from spp import get_spp
|
||||||
|
|
||||||
|
spp = get_spp()
|
||||||
|
|
||||||
|
def post_environment(**kwargs) -> None:
|
||||||
|
env: Environment = spp.globals['env']
|
||||||
|
|
||||||
|
ast_json_builder = Builder(
|
||||||
|
action=_gen_ast_json
|
||||||
|
)
|
||||||
|
env.Append(BUILDERS = {'AstJson': ast_json_builder})
|
||||||
|
# env.SetDefault(ASTJSONCOM = '$ASTJSON -Xclang -ast-dump=json -fsyntax-only -Wno-unknown-warning-option -DSPP_AST_GEN $CXXFLAGS $SOURCES > $TARGET')
|
||||||
|
|
||||||
|
env.AddMethod(_ast_jinja, 'AstJinja')
|
||||||
|
|
||||||
|
def _gen_ast_json(target: list[File], source: list[File], env: Environment):
|
||||||
|
clang_exe = env.WhereIs('clang++')
|
||||||
|
cmd = [clang_exe, '-Xclang', '-ast-dump=json', '-fsyntax-only', '-Wno-unknown-warning-option',
|
||||||
|
'-DSPP_AST_GEN', f'-std={env["CXX_STANDARD"]}']
|
||||||
|
for define in env['CPPDEFINES']:
|
||||||
|
cmd.append(f'-D{define}')
|
||||||
|
for path in env['CPPPATH']:
|
||||||
|
cmd.append(f'-I{path}')
|
||||||
|
cmd.append(source[0].abspath)
|
||||||
|
# print(*cmd)
|
||||||
|
try:
|
||||||
|
proc = subprocess.Popen(cmd, text=True, stdout=subprocess.PIPE)
|
||||||
|
except subprocess.CalledProcessError as e:
|
||||||
|
env.Error(f'Clang exited with code {e.returncode}.')
|
||||||
|
return
|
||||||
|
parsed = json.load(proc.stdout)
|
||||||
|
inner: list = parsed["inner"]
|
||||||
|
|
||||||
|
# pos = 0
|
||||||
|
# last_file = None
|
||||||
|
|
||||||
|
#while pos < len(inner):
|
||||||
|
# last_file = inner[pos]["loc"].get("file", last_file)
|
||||||
|
# if last_file is None: # or os.path.isabs(last_file):
|
||||||
|
# del inner[pos]
|
||||||
|
# else:
|
||||||
|
# pos += 1
|
||||||
|
|
||||||
|
if target[0].suffix == '.bin':
|
||||||
|
with gzip.open(target[0].abspath, 'wb') as f:
|
||||||
|
pickle.dump(parsed, f)
|
||||||
|
elif target[0].suffix == '.gz':
|
||||||
|
with gzip.open(target[0].abspath, 'wt') as f:
|
||||||
|
json.dump(parsed, f)
|
||||||
|
else:
|
||||||
|
with open(target[0].abspath, 'wt') as f:
|
||||||
|
json.dump(parsed, f)
|
||||||
|
|
||||||
|
class ASTNode(ABC):
|
||||||
|
@abstractmethod
|
||||||
|
def _get_decls(self) -> Iterable[dict]: ...
|
||||||
|
|
||||||
|
def inner(self) -> Iterable[dict]:
|
||||||
|
return itertools.chain(*(decl['inner'] for decl in self._get_decls()))
|
||||||
|
|
||||||
|
def inner_filtered(self, **kwargs) -> Iterable[dict]:
|
||||||
|
def _applies(decl: dict) -> bool:
|
||||||
|
for name, val in kwargs.items():
|
||||||
|
if decl.get(name) != val:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
return (decl for decl in self.inner() if _applies(decl))
|
||||||
|
|
||||||
|
class SimpleASTNode(ASTNode):
|
||||||
|
def __init__(self, decl: dict) -> None:
|
||||||
|
self._decl = decl
|
||||||
|
|
||||||
|
def _get_decls(self) -> Iterable[dict]:
|
||||||
|
return (self._decl,)
|
||||||
|
|
||||||
|
class Value(SimpleASTNode): ...
|
||||||
|
|
||||||
|
class Annotation(SimpleASTNode):
|
||||||
|
@property
|
||||||
|
def values(self) -> Iterable[Value]:
|
||||||
|
return (Value(decl) for decl in self.inner())
|
||||||
|
|
||||||
|
class Param(SimpleASTNode):
|
||||||
|
@property
|
||||||
|
def name(self) -> str:
|
||||||
|
return self._decl.get('name', '')
|
||||||
|
|
||||||
|
@property
|
||||||
|
def type(self) -> str:
|
||||||
|
return self._decl['type']['qualType']
|
||||||
|
|
||||||
|
class Method(SimpleASTNode):
|
||||||
|
def __init__(self, decl: dict, access: str) -> None:
|
||||||
|
super().__init__(decl)
|
||||||
|
self._access = access
|
||||||
|
|
||||||
|
@property
|
||||||
|
def access(self) -> str:
|
||||||
|
return self._access
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self) -> str:
|
||||||
|
return self._decl['name']
|
||||||
|
|
||||||
|
@property
|
||||||
|
def mangled_name(self) -> str:
|
||||||
|
return self._decl['mangledName']
|
||||||
|
|
||||||
|
@property
|
||||||
|
def type(self) -> str:
|
||||||
|
return self._decl['type']['qualType']
|
||||||
|
|
||||||
|
@property
|
||||||
|
def return_type(self) -> str:
|
||||||
|
return self.type.split('(', 1)[0].strip()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def params(self) -> Iterable[Param]:
|
||||||
|
return (Param(decl) for decl in self.inner_filtered(kind='ParmVarDecl'))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def annotations(self) -> Iterable[Annotation]:
|
||||||
|
return (Annotation(decl) for decl in self.inner_filtered(kind='AnnotateAttr'))
|
||||||
|
|
||||||
|
class Class(SimpleASTNode):
|
||||||
|
@property
|
||||||
|
def name(self) -> str:
|
||||||
|
return self._decl['name']
|
||||||
|
|
||||||
|
@property
|
||||||
|
def tagUsed(self) -> str:
|
||||||
|
return self._decl['tagUsed']
|
||||||
|
|
||||||
|
@property
|
||||||
|
def methods(self) -> Generator[Method]:
|
||||||
|
access = 'private' if self.tagUsed == 'class' else 'public'
|
||||||
|
for decl in self.inner():
|
||||||
|
if decl['kind'] == 'AccessSpecDecl':
|
||||||
|
access = decl['access']
|
||||||
|
elif decl['kind'] == 'CXXMethodDecl' and not decl.get('isImplicit', False):
|
||||||
|
yield Method(decl, access)
|
||||||
|
|
||||||
|
class Namespace(ASTNode, ABC):
|
||||||
|
def get_namespace(self, ns_name: str) -> Self:
|
||||||
|
return InnerNamespace(list(self.inner_filtered(kind='NamespaceDecl', name=ns_name)))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def classes(self) -> Iterable[Class]:
|
||||||
|
return (Class(decl) for decl in self.inner_filtered(kind='CXXRecordDecl', tagUsed='class', completeDefinition=True))
|
||||||
|
|
||||||
|
class InnerNamespace(Namespace):
|
||||||
|
def __init__(self, decls: list[dict]) -> None:
|
||||||
|
self._decls = decls
|
||||||
|
|
||||||
|
def _get_decls(self) -> Iterable[dict]:
|
||||||
|
return self._decls
|
||||||
|
|
||||||
|
class Ast(Namespace):
|
||||||
|
def __init__(self, file: File) -> None:
|
||||||
|
self._file = file
|
||||||
|
self._data_dict: dict|None = None
|
||||||
|
|
||||||
|
def _get_decls(self) -> tuple[dict]:
|
||||||
|
if self._data_dict is None:
|
||||||
|
if not self._file.exists():
|
||||||
|
self._data_dict = {
|
||||||
|
'inner': []
|
||||||
|
}
|
||||||
|
elif self._file.suffix == '.bin':
|
||||||
|
with gzip.open(self._file.abspath, 'rb') as f:
|
||||||
|
self._data_dict = pickle.load(f)
|
||||||
|
elif self._file.suffix == '.gz':
|
||||||
|
with gzip.open(self._file.abspath) as f:
|
||||||
|
self._data_dict = json.load(f)
|
||||||
|
else:
|
||||||
|
with open(self._file.abspath, 'r') as f:
|
||||||
|
self._data_dict = json.load(f)
|
||||||
|
return (self._data_dict,)
|
||||||
|
|
||||||
|
def _ast_jinja(env: Environment, source: File, target: File, template: File, **kwargs):
|
||||||
|
cache_dir = env['CACHE_DIR']
|
||||||
|
rel_path = env.Dir('#').rel_path(source)
|
||||||
|
json_file = env.File(os.path.join(cache_dir, 'ast_json', f'{rel_path}.bin'))
|
||||||
|
ast_json = env.AstJson(target=json_file, source=source, **kwargs)
|
||||||
|
|
||||||
|
ast_jinja = env.Jinja(
|
||||||
|
target=target,
|
||||||
|
source=template,
|
||||||
|
JINJA_CONTEXT = {
|
||||||
|
'ast': Ast(json_file)
|
||||||
|
},
|
||||||
|
**kwargs
|
||||||
|
)
|
||||||
|
env.Depends(ast_jinja, ast_json)
|
||||||
|
# env.AlwaysBuild(ast_jinja)
|
||||||
|
# env.Requires(ast_jinja, ast_json)
|
||||||
|
# env.Requires(source, ast_jinja)
|
||||||
|
env.Ignore(ast_json, ast_jinja)
|
||||||
|
return ast_jinja
|
||||||
13
addons/compat_v1_0.py
Normal file
13
addons/compat_v1_0.py
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
|
||||||
|
from spp import get_spp
|
||||||
|
|
||||||
|
spp = get_spp()
|
||||||
|
|
||||||
|
def available(**kwargs) -> bool:
|
||||||
|
return spp.globals['config']['SPP_TARGET_VERSION'][0:2] == (1, 0)
|
||||||
|
|
||||||
|
def pre_environment(**kwargs) -> None:
|
||||||
|
spp.globals['tools'].append('unity_build') # S++ 1.0.0 had the unity_build enabled by default
|
||||||
|
|
||||||
|
def post_environment(**kwargs) -> None:
|
||||||
|
spp.globals['env']['_SPP_FALLBACK_RECIPE_REPO'] = {'repo_name': 'mewin', 'remote_url': 'https://git.mewin.de/mewin/spp_recipes.git', 'git_ref': 'stable'}
|
||||||
35
addons/config_cache.py
Normal file
35
addons/config_cache.py
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
|
||||||
|
import json
|
||||||
|
from pathlib import Path
|
||||||
|
from spp import get_spp, TargetType
|
||||||
|
|
||||||
|
spp = get_spp()
|
||||||
|
|
||||||
|
def _should_generate() -> bool:
|
||||||
|
# check if any program or library target has been built
|
||||||
|
for target in spp.targets:
|
||||||
|
if target.target_type in (TargetType.PROGRAM, TargetType.STATIC_LIBRARY, TargetType.SHARED_LIBRARY):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def post_finalize(**kwargs) -> None:
|
||||||
|
if not _should_generate():
|
||||||
|
return
|
||||||
|
|
||||||
|
cache_file = Path(spp.env['CACHE_DIR']) / 'config_cache.json'
|
||||||
|
|
||||||
|
cache = {}
|
||||||
|
if cache_file.exists():
|
||||||
|
try:
|
||||||
|
with cache_file.open('r') as f:
|
||||||
|
cache = json.load(f)
|
||||||
|
except Exception as e:
|
||||||
|
spp.env.Warn(f'Error while loading config cache: {e}.')
|
||||||
|
|
||||||
|
cache['build_type'] = spp.env['BUILD_TYPE']
|
||||||
|
|
||||||
|
try:
|
||||||
|
with cache_file.open('w') as f:
|
||||||
|
json.dump(cache, f)
|
||||||
|
except Exception as e:
|
||||||
|
spp.env.Warn(f'Error while saving config cache: {e}.')
|
||||||
204
addons/jinja.py
204
addons/jinja.py
@@ -1,10 +1,178 @@
|
|||||||
|
# based on https://github.com/hgomersall/scons-jinja
|
||||||
|
|
||||||
|
from SCons.Script import *
|
||||||
|
|
||||||
|
import os
|
||||||
import pathlib
|
import pathlib
|
||||||
|
|
||||||
Import('env')
|
from spp import get_spp
|
||||||
|
|
||||||
if not hasattr(env, 'Jinja'):
|
try:
|
||||||
Return('env')
|
import jinja2
|
||||||
|
from jinja2.utils import open_if_exists
|
||||||
|
except ImportError:
|
||||||
|
jinja2 = None
|
||||||
|
print('No Jinja :(')
|
||||||
|
|
||||||
|
spp = get_spp()
|
||||||
|
|
||||||
|
def available(**kwargs) -> bool:
|
||||||
|
return jinja2 is not None
|
||||||
|
|
||||||
|
def post_environment(**kwargs) -> None:
|
||||||
|
env: Environment = spp.globals['env']
|
||||||
|
|
||||||
|
env.SetDefault(JINJA_CONTEXT={})
|
||||||
|
env.SetDefault(JINJA_ENVIRONMENT_VARS={})
|
||||||
|
env.SetDefault(JINJA_FILTERS={'load_config': _jinja_load_config})
|
||||||
|
env.SetDefault(JINJA_GLOBALS={
|
||||||
|
'file_size': lambda *args: _file_size(env, *args),
|
||||||
|
'file_content_hex': lambda *args: _file_content_hex(env, *args)
|
||||||
|
})
|
||||||
|
env.SetDefault(JINJA_TEMPLATE_SEARCHPATH=['data/jinja'])
|
||||||
|
env.SetDefault(JINJA_CONFIG_SEARCHPATH=[env.Dir('#data/config')])
|
||||||
|
env.SetDefault(JINJA_FILE_SEARCHPATH=[env.Dir('#')])
|
||||||
|
|
||||||
|
env['BUILDERS']['Jinja'] = Builder(
|
||||||
|
action=render_jinja_template
|
||||||
|
)
|
||||||
|
|
||||||
|
scanner = env.Scanner(function=jinja_scanner,
|
||||||
|
skeys=['.jinja'])
|
||||||
|
|
||||||
|
env.Append(SCANNERS=scanner)
|
||||||
|
|
||||||
|
env.AddMethod(_wrap_jinja(env.Jinja), 'Jinja')
|
||||||
|
|
||||||
|
class FileSystemLoaderRecorder(jinja2.FileSystemLoader):
|
||||||
|
""" A wrapper around FileSystemLoader that records files as they are
|
||||||
|
loaded. These are contained within loaded_filenames set attribute.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, searchpath, encoding='utf-8'):
|
||||||
|
|
||||||
|
self.loaded_filenames = set()
|
||||||
|
super(FileSystemLoaderRecorder, self).__init__(searchpath, encoding)
|
||||||
|
|
||||||
|
def get_source(self, environment, template):
|
||||||
|
"""Overwritten FileSystemLoader.get_source method that extracts the
|
||||||
|
filename that is used to load each filename and adds it to
|
||||||
|
self.loaded_filenames.
|
||||||
|
"""
|
||||||
|
for searchpath in self.searchpath:
|
||||||
|
filename = os.path.join(searchpath, template)
|
||||||
|
f = open_if_exists(filename)
|
||||||
|
if f is None:
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
contents = f.read().decode(self.encoding)
|
||||||
|
finally:
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
self.loaded_filenames.add(filename)
|
||||||
|
|
||||||
|
return super(FileSystemLoaderRecorder, self).get_source(
|
||||||
|
environment, template)
|
||||||
|
|
||||||
|
# If the template isn't found, then we have to drop out.
|
||||||
|
raise jinja2.TemplateNotFound(template)
|
||||||
|
|
||||||
|
|
||||||
|
def jinja_scanner(node, env, path):
|
||||||
|
# Instantiate the file as necessary
|
||||||
|
node.get_text_contents()
|
||||||
|
|
||||||
|
template_dir, filename = os.path.split(str(node))
|
||||||
|
|
||||||
|
template_search_path = ([template_dir] +
|
||||||
|
env.subst(env['JINJA_TEMPLATE_SEARCHPATH']))
|
||||||
|
template_loader = FileSystemLoaderRecorder(template_search_path)
|
||||||
|
|
||||||
|
jinja_env = jinja2.Environment(loader=template_loader,
|
||||||
|
extensions=['jinja2.ext.do'], **env['JINJA_ENVIRONMENT_VARS'])
|
||||||
|
jinja_env.filters.update(env['JINJA_FILTERS'])
|
||||||
|
jinja_env.globals.update(env['JINJA_GLOBALS'])
|
||||||
|
|
||||||
|
try:
|
||||||
|
template = jinja_env.get_template(filename)
|
||||||
|
except jinja2.TemplateNotFound as e:
|
||||||
|
env.Error(f'Missing template: {os.path.join(template_dir, str(e))}')
|
||||||
|
|
||||||
|
# We need to render the template to do all the necessary loading.
|
||||||
|
#
|
||||||
|
# It's necessary to respond to missing templates by grabbing
|
||||||
|
# the content as the exception is raised. This makes sure of the
|
||||||
|
# existence of the file upon which the current scanned node depends.
|
||||||
|
#
|
||||||
|
# I suspect that this is pretty inefficient, but it does
|
||||||
|
# work reliably.
|
||||||
|
context = env['JINJA_CONTEXT']
|
||||||
|
|
||||||
|
last_missing_file = ''
|
||||||
|
while True:
|
||||||
|
|
||||||
|
try:
|
||||||
|
template.render(**context)
|
||||||
|
except jinja2.TemplateNotFound as e:
|
||||||
|
if last_missing_file == str(e):
|
||||||
|
# We've already been round once for this file,
|
||||||
|
# so need to raise
|
||||||
|
env.Error(f'Missing template: {os.path.join(template_dir, str(e))}')
|
||||||
|
|
||||||
|
last_missing_file = str(e)
|
||||||
|
# Find where the template came from (using the same ordering
|
||||||
|
# as Jinja uses).
|
||||||
|
for searchpath in template_search_path:
|
||||||
|
filename = os.path.join(searchpath, last_missing_file)
|
||||||
|
if os.path.exists(filename):
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
env.File(filename).get_text_contents()
|
||||||
|
continue
|
||||||
|
|
||||||
|
break
|
||||||
|
|
||||||
|
# Get all the files that were loaded. The set includes the current node,
|
||||||
|
# so we remove that.
|
||||||
|
found_nodes_names = list(template_loader.loaded_filenames)
|
||||||
|
try:
|
||||||
|
found_nodes_names.remove(str(node))
|
||||||
|
except ValueError as e:
|
||||||
|
env.Error(f'Missing template node: {str(node)}')
|
||||||
|
|
||||||
|
return [env.File(f) for f in found_nodes_names]
|
||||||
|
|
||||||
|
|
||||||
|
def render_jinja_template(target, source, env):
|
||||||
|
output_str = ''
|
||||||
|
|
||||||
|
if not source:
|
||||||
|
source = [f'{target}.jinja']
|
||||||
|
|
||||||
|
for template_file in source:
|
||||||
|
template_dir, filename = os.path.split(str(template_file))
|
||||||
|
|
||||||
|
template_search_path = ([template_dir] +
|
||||||
|
env.subst(env['JINJA_TEMPLATE_SEARCHPATH']))
|
||||||
|
template_loader = FileSystemLoaderRecorder(template_search_path)
|
||||||
|
|
||||||
|
jinja_env = jinja2.Environment(loader=template_loader,
|
||||||
|
extensions=['jinja2.ext.do'], **env['JINJA_ENVIRONMENT_VARS'])
|
||||||
|
jinja_env.filters.update(env['JINJA_FILTERS'])
|
||||||
|
jinja_env.globals.update(env['JINJA_GLOBALS'])
|
||||||
|
|
||||||
|
jinja_env.filters.update(env['JINJA_FILTERS'])
|
||||||
|
template = jinja_env.get_template(filename)
|
||||||
|
|
||||||
|
context = env['JINJA_CONTEXT']
|
||||||
|
template.render(**context)
|
||||||
|
|
||||||
|
output_str += template.render(**context)
|
||||||
|
|
||||||
|
with open(str(target[0]), 'w') as target_file:
|
||||||
|
target_file.write(output_str)
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
def _jinja_load_config(env, config_name):
|
def _jinja_load_config(env, config_name):
|
||||||
searched_paths = []
|
searched_paths = []
|
||||||
@@ -23,27 +191,29 @@ def _wrap_jinja(orig_jinja):
|
|||||||
def _wrapped(env, target, **kwargs):
|
def _wrapped(env, target, **kwargs):
|
||||||
if 'source' not in kwargs:
|
if 'source' not in kwargs:
|
||||||
kwargs['source'] = f'{target}.jinja'
|
kwargs['source'] = f'{target}.jinja'
|
||||||
target = orig_jinja(**kwargs)
|
target = orig_jinja(target=target, **kwargs)
|
||||||
if 'depends' in kwargs:
|
if 'depends' in kwargs:
|
||||||
for dependency in kwargs['depends']:
|
for dependency in kwargs['depends']:
|
||||||
env.Depends(target, dependency)
|
env.Depends(target, dependency)
|
||||||
# env.Depends(alias_prepare, target)
|
|
||||||
return target
|
return target
|
||||||
return _wrapped
|
return _wrapped
|
||||||
|
|
||||||
|
def _find_file(env, fname):
|
||||||
|
for path in env['JINJA_FILE_SEARCHPATH']:
|
||||||
|
fullpath = os.path.join(path.abspath, fname)
|
||||||
|
if os.path.exists(fullpath):
|
||||||
|
return env.File(fullpath)
|
||||||
|
return None
|
||||||
|
|
||||||
def _file_size(env, fname: str) -> int:
|
def _file_size(env, fname: str) -> int:
|
||||||
return env.File(fname).get_size()
|
file = _find_file(env, fname)
|
||||||
|
if not file:
|
||||||
|
env.Error(f'File does not exist: {fname}. Searched in: {[d.abspath for d in env["JINJA_FILE_SEARCHPATH"]]}')
|
||||||
|
return file.get_size()
|
||||||
|
|
||||||
def _file_content_hex(env, fname: str) -> str:
|
def _file_content_hex(env, fname: str) -> str:
|
||||||
bytes = env.File(fname).get_contents()
|
file = _find_file(env, fname)
|
||||||
|
if not file:
|
||||||
|
env.Error(f'File does not exist: {fname}. Searched in: {[d.abspath for d in env["JINJA_FILE_SEARCHPATH"]]}')
|
||||||
|
bytes = file.get_contents()
|
||||||
return ','.join([hex(byte) for byte in bytes])
|
return ','.join([hex(byte) for byte in bytes])
|
||||||
|
|
||||||
env.AddMethod(_wrap_jinja(env.Jinja), 'Jinja')
|
|
||||||
env.Append(JINJA_FILTERS = {'load_config': _jinja_load_config})
|
|
||||||
env.Append(JINJA_GLOBALS = {
|
|
||||||
'file_size': lambda *args: _file_size(env, *args),
|
|
||||||
'file_content_hex': lambda *args: _file_content_hex(env, *args)
|
|
||||||
})
|
|
||||||
env.Append(JINJA_TEMPLATE_SEARCHPATH = ['data/jinja'])
|
|
||||||
env['JINJA_CONFIG_SEARCHPATH'] = [env.Dir('#data/config')]
|
|
||||||
Return('env')
|
|
||||||
|
|||||||
@@ -31,10 +31,18 @@ def _generate_cmake_cxx_flags(env, dependencies: 'list[dict]') -> str:
|
|||||||
def _get_cmake_cxx_standard(env: Environment) -> str:
|
def _get_cmake_cxx_standard(env: Environment) -> str:
|
||||||
return env['CXX_STANDARD'][3:] # we use "C++XX", CMake just "XX"
|
return env['CXX_STANDARD'][3:] # we use "C++XX", CMake just "XX"
|
||||||
|
|
||||||
|
def _get_cmake_prefix_path(dependencies: 'list[dict]') -> str:
|
||||||
|
parts = []
|
||||||
|
for dependency in dependencies:
|
||||||
|
for path in dependency.get('CMAKE_PREFIX_PATH', []):
|
||||||
|
parts.append(path)
|
||||||
|
return cmd_quote(';'.join(parts))
|
||||||
|
|
||||||
def _generate_cmake_args(env: Environment, dependencies: 'list[dict]') -> 'list[str]':
|
def _generate_cmake_args(env: Environment, dependencies: 'list[dict]') -> 'list[str]':
|
||||||
args = [f'-DCMAKE_C_FLAGS={_generate_cmake_c_flags(env, dependencies)}',
|
args = [f'-DCMAKE_C_FLAGS={_generate_cmake_c_flags(env, dependencies)}',
|
||||||
f'-DCMAKE_CXX_FLAGS={_generate_cmake_cxx_flags(env, dependencies)}',
|
f'-DCMAKE_CXX_FLAGS={_generate_cmake_cxx_flags(env, dependencies)}',
|
||||||
f'-DCMAKE_CXX_STANDARD={_get_cmake_cxx_standard(env)}']
|
f'-DCMAKE_CXX_STANDARD={_get_cmake_cxx_standard(env)}',
|
||||||
|
f'-DCMAKE_PREFIX_PATH={_get_cmake_prefix_path(dependencies)}']
|
||||||
for dependency in dependencies:
|
for dependency in dependencies:
|
||||||
for name, value in dependency.get('CMAKE_VARS', {}).items():
|
for name, value in dependency.get('CMAKE_VARS', {}).items():
|
||||||
args.append(f'-D{name}={cmd_quote(value)}')
|
args.append(f'-D{name}={cmd_quote(value)}')
|
||||||
@@ -97,6 +105,7 @@ def _cmake_project(env: Environment, project_root: str, generate_args: 'list[str
|
|||||||
libpath.append(full_path)
|
libpath.append(full_path)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
|
'build_dir': build_dir,
|
||||||
'install_dir': install_dir,
|
'install_dir': install_dir,
|
||||||
'BINPATH': [os.path.join(install_dir, 'bin')],
|
'BINPATH': [os.path.join(install_dir, 'bin')],
|
||||||
'LIBPATH': libpath,
|
'LIBPATH': libpath,
|
||||||
@@ -37,19 +37,22 @@ def _download_file(url: str, path: pathlib.Path) -> None:
|
|||||||
urllib.request.urlretrieve(url, dl_path)
|
urllib.request.urlretrieve(url, dl_path)
|
||||||
dl_path.rename(path)
|
dl_path.rename(path)
|
||||||
|
|
||||||
def _extract_file(path: pathlib.Path, output_dir: str, archive_type: ArchiveType, skip_folders: int) -> None:
|
def _extract_file(path: pathlib.Path, output_dir: str, archive_type: ArchiveType, skip_folders: int = 0) -> None:
|
||||||
if archive_type == ArchiveType.TAR_GZ:
|
if archive_type == ArchiveType.TAR_GZ:
|
||||||
file = tarfile.open(str(path))
|
file = tarfile.open(str(path))
|
||||||
|
filter = tarfile.data_filter
|
||||||
if skip_folders != 0:
|
if skip_folders != 0:
|
||||||
def skip_filer(member: tarfile.TarInfo, path: str) -> tarfile.TarInfo:
|
def skip_filter(member: tarfile.TarInfo, path: str) -> tarfile.TarInfo:
|
||||||
name_parts = member.name.split('/')
|
name_parts = member.name.split('/', skip_folders)
|
||||||
if len(name_parts) <= skip_folders:
|
if len(name_parts) <= skip_folders:
|
||||||
return None
|
return None
|
||||||
return member.replace(name = '/'.join(name_parts[skip_folders:]))
|
return member.replace(name = '/'.join(name_parts[skip_folders:]))
|
||||||
file.extraction_filter = skip_filer
|
filter = skip_filter
|
||||||
file.extractall(output_dir)
|
file.extractall(output_dir, filter=filter)
|
||||||
file.close()
|
file.close()
|
||||||
elif archive_type == ArchiveType.ZIP:
|
elif archive_type == ArchiveType.ZIP:
|
||||||
|
if skip_folders != 0:
|
||||||
|
raise Exception('skip_folders option is not yet supported for zip-archives :()')
|
||||||
file = zipfile.open(str(path))
|
file = zipfile.open(str(path))
|
||||||
file.extractall(output_dir)
|
file.extractall(output_dir)
|
||||||
file.close()
|
file.close()
|
||||||
@@ -3,6 +3,8 @@ from git import Repo
|
|||||||
from git.exc import GitError
|
from git.exc import GitError
|
||||||
import hashlib
|
import hashlib
|
||||||
import inspect
|
import inspect
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
from SCons.Script import *
|
from SCons.Script import *
|
||||||
|
|
||||||
Import('env')
|
Import('env')
|
||||||
@@ -20,7 +22,41 @@ def _clone(env: Environment, repo_name: str, remote_url: str):
|
|||||||
|
|
||||||
def _git_branch(env: Environment, repo_name: str, remote_url: str, git_ref: str = 'main') -> dict:
|
def _git_branch(env: Environment, repo_name: str, remote_url: str, git_ref: str = 'main') -> dict:
|
||||||
repo, origin = _clone(env, repo_name, remote_url)
|
repo, origin = _clone(env, repo_name, remote_url)
|
||||||
worktree_dir = os.path.join(env['CLONE_DIR'], 'git', repo_name, hashlib.shake_128(git_ref.encode('utf-8')).hexdigest(6)) # TODO: commit hash would be better, right? -> not if it's a branch!
|
old_worktree_dir = os.path.join(env['CLONE_DIR'], 'git', repo_name, hashlib.shake_128(git_ref.encode('utf-8')).hexdigest(6))
|
||||||
|
worktree_dir = os.path.join(env['CLONE_DIR'], 'git', repo_name, git_ref.replace('/', '_'))
|
||||||
|
if os.path.exists(old_worktree_dir) and not os.path.islink(old_worktree_dir):
|
||||||
|
if not os.path.exists(worktree_dir):
|
||||||
|
print(f'Found old Git worktree at {old_worktree_dir}, moving it to {worktree_dir}.')
|
||||||
|
try:
|
||||||
|
repo.git.worktree('move', old_worktree_dir, worktree_dir)
|
||||||
|
except GitError:
|
||||||
|
print('Error while moving worktree, manually moving and repairing it instead.')
|
||||||
|
shutil.move(old_worktree_dir, worktree_dir)
|
||||||
|
try:
|
||||||
|
repo.git.worktree('repair', worktree_dir)
|
||||||
|
except GitError:
|
||||||
|
print('Also didn\'t work, removing and redownloading it.')
|
||||||
|
try:
|
||||||
|
repo.git.worktree('remove', '-f', worktree_dir)
|
||||||
|
except GitError: ...
|
||||||
|
|
||||||
|
try:
|
||||||
|
repo.git.worktree('remove', '-f', old_worktree_dir)
|
||||||
|
except GitError: ...
|
||||||
|
|
||||||
|
if os.path.exists(worktree_dir):
|
||||||
|
shutil.rmtree(worktree_dir, ignore_errors=True)
|
||||||
|
# this is all we can do, I guess
|
||||||
|
else:
|
||||||
|
print(f'Found old Git worktree at {old_worktree_dir}, but the new one at {worktree_dir} already exists. Removing the old one.')
|
||||||
|
repo.git.worktree('remove', '-f', old_worktree_dir)
|
||||||
|
|
||||||
|
print('Attempting to create a symlink for older S++ versions.')
|
||||||
|
try:
|
||||||
|
os.symlink(worktree_dir, old_worktree_dir, target_is_directory=True)
|
||||||
|
except Exception as e:
|
||||||
|
print(f'Failed: {e}')
|
||||||
|
|
||||||
update_submodules = False
|
update_submodules = False
|
||||||
if not os.path.exists(worktree_dir):
|
if not os.path.exists(worktree_dir):
|
||||||
print(f'Checking out into {worktree_dir}.')
|
print(f'Checking out into {worktree_dir}.')
|
||||||
@@ -38,9 +74,17 @@ def _git_branch(env: Environment, repo_name: str, remote_url: str, git_ref: str
|
|||||||
update_submodules = True
|
update_submodules = True
|
||||||
else:
|
else:
|
||||||
print(f'Not updating git repository {worktree_dir} as it is not on a branch.')
|
print(f'Not updating git repository {worktree_dir} as it is not on a branch.')
|
||||||
|
else:
|
||||||
|
worktree_repo = Repo(worktree_dir)
|
||||||
if update_submodules:
|
if update_submodules:
|
||||||
for submodule in worktree_repo.submodules:
|
for submodule in worktree_repo.submodules:
|
||||||
submodule.update(init=True)
|
submodule.update(init=True)
|
||||||
|
for submodule in worktree_repo.submodules:
|
||||||
|
if os.listdir(submodule.abspath) == ['.git']:
|
||||||
|
print(f'Submodule {submodule.name} seems borked, attempting to fix it.')
|
||||||
|
worktree_repo.git.submodule('deinit', '-f', submodule.path)
|
||||||
|
worktree_repo.git.submodule('init', submodule.path)
|
||||||
|
worktree_repo.git.submodule('update', submodule.path)
|
||||||
return {
|
return {
|
||||||
'checkout_root': worktree_dir,
|
'checkout_root': worktree_dir,
|
||||||
'repo': repo,
|
'repo': repo,
|
||||||
@@ -50,7 +94,10 @@ def _git_branch(env: Environment, repo_name: str, remote_url: str, git_ref: str
|
|||||||
def _git_tags(env: Environment, repo_name: str, remote_url: str, force_fetch: bool = False) -> 'list[str]':
|
def _git_tags(env: Environment, repo_name: str, remote_url: str, force_fetch: bool = False) -> 'list[str]':
|
||||||
repo, origin = _clone(env, repo_name, remote_url)
|
repo, origin = _clone(env, repo_name, remote_url)
|
||||||
if force_fetch or env['UPDATE_REPOSITORIES']:
|
if force_fetch or env['UPDATE_REPOSITORIES']:
|
||||||
origin.fetch(tags=True)
|
try:
|
||||||
|
origin.fetch(tags=True)
|
||||||
|
except GitError:
|
||||||
|
env.Warn(f'Error fetching tags from {repo_name} ({remote_url})')
|
||||||
return [t.name for t in repo.tags]
|
return [t.name for t in repo.tags]
|
||||||
|
|
||||||
def _make_callable(val):
|
def _make_callable(val):
|
||||||
53
contrib/vs/spp.targets
Normal file
53
contrib/vs/spp.targets
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
<Project ToolsVersion="4.0" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
||||||
|
<PropertyGroup>
|
||||||
|
<SolutionExt>.sln</SolutionExt>
|
||||||
|
<Language>C++</Language>
|
||||||
|
<DefaultLanguageSourceExtension>.cpp</DefaultLanguageSourceExtension>
|
||||||
|
</PropertyGroup>
|
||||||
|
|
||||||
|
<PropertyGroup>
|
||||||
|
<TargetFileName Condition="'$(TargetPath)' != ''">$([System.IO.Path]::GetFileName('$(TargetPath)'))</TargetFileName>
|
||||||
|
<TargetDir Condition="'$(TargetPath)' != ''">$([System.IO.Path]::GetDirectoryName('$(TargetPath)'))</TargetDir>
|
||||||
|
<OutputPath>$(TargetDir)</OutputPath>
|
||||||
|
<LocalDebuggerCommand Condition="'$(LocalDebuggerCommand)' == ''">$(TargetPath)</LocalDebuggerCommand>
|
||||||
|
|
||||||
|
<SConsCommandLine Condition="'$(SConsCommandLine)' == ''">scons</SConsCommandLine>
|
||||||
|
<SPPNumProcs Condition="'$(SPPNumProcs)' == ''">$([System.Environment]::ProcessorCount)</SPPNumProcs>
|
||||||
|
<SPPBuildType Condition="'$(SPPBuildType)' == ''">debug</SPPBuildType>
|
||||||
|
<SPPTargetType Condition="'$(SPPTargetType)' == ''">executable</SPPTargetType>
|
||||||
|
|
||||||
|
<OutDir>$(OutputPath)\</OutDir>
|
||||||
|
<IntDir>$(SolutionDir)cache\msbuild\</IntDir>
|
||||||
|
</PropertyGroup>
|
||||||
|
|
||||||
|
<Import Project="$(MSBuildToolsPath)\Microsoft.Common.targets" />
|
||||||
|
|
||||||
|
<Target Name="Build" Condition="'$(SPPTargetType)' != 'meta'">
|
||||||
|
<Exec Command="$(SConsCommandLine) -j$(SPPNumProcs) --build_type=$(SPPBuildType) --unity=disable $(TargetPath)"
|
||||||
|
WorkingDirectory="$(SolutionDir)" />
|
||||||
|
</Target>
|
||||||
|
<!--<Target Name="Build" Condition="'$(SPPTargetType)' == 'meta'">
|
||||||
|
<Message Importance="low" Text="Skipping build for meta target $(ProjectName)" />
|
||||||
|
</Target>-->
|
||||||
|
<Target Name="Clean" Condition="'$(SPPTargetType)' != 'meta'">
|
||||||
|
<Exec Command="$(SConsCommandLine) -c -j$(SPPNumProcs) --build_type=$(SPPBuildType) --unity=disable $(TargetPath)"
|
||||||
|
WorkingDirectory="$(SolutionDir)" />
|
||||||
|
</Target>
|
||||||
|
<!--<Target Name="Clean" Condition="'$(SPPTargetType)' == 'meta'">
|
||||||
|
<Message Importance="low" Text="Skipping clean for meta target $(ProjectName)" />
|
||||||
|
</Target>-->
|
||||||
|
<Target Name="Rebuild" Condition="'$(SPPTargetType)' != 'meta'" DependsOnTargets="Clean;Build" />
|
||||||
|
<!--<Target Name="Rebuild" Condition="'$(SPPTargetType)' == 'meta'">
|
||||||
|
<Message Importance="low" Text="Skipping rebuild for meta target $(ProjectName)" />
|
||||||
|
</Target>-->
|
||||||
|
|
||||||
|
<!-- This target is needed just to suppress "warning NU1503: Skipping restore for project '...'. The project file may be invalid or missing targets
|
||||||
|
required for restore." -->
|
||||||
|
<Target Name="_IsProjectRestoreSupported" Returns="@(_ValidProjectsForRestore)">
|
||||||
|
<ItemGroup>
|
||||||
|
<_ValidProjectsForRestore Include="$(MSBuildProjectFullPath)" />
|
||||||
|
</ItemGroup>
|
||||||
|
</Target>
|
||||||
|
|
||||||
|
<Import Condition="'$(_ImportMicrosoftCppDesignTime)' != 'false'" Project="$(VCTargetsPathActual)\Microsoft.Cpp.DesignTime.targets" />
|
||||||
|
</Project>
|
||||||
55
lib/spp.py
Normal file
55
lib/spp.py
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
from dataclasses import dataclass
|
||||||
|
import enum
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
from SCons.Script import *
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
class SPPEnvironment(Environment):
|
||||||
|
def Info(self, message: str): ...
|
||||||
|
def Warn(self, message: str): ...
|
||||||
|
def Error(self, message: str): ...
|
||||||
|
else:
|
||||||
|
SPPEnvironment = Environment
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class Module:
|
||||||
|
name: str
|
||||||
|
folder: str
|
||||||
|
description: str
|
||||||
|
cxx_namespace: str
|
||||||
|
|
||||||
|
class TargetType(enum.Enum):
|
||||||
|
PROGRAM = 0
|
||||||
|
STATIC_LIBRARY = 1
|
||||||
|
SHARED_LIBRARY = 2
|
||||||
|
MISC = 3
|
||||||
|
|
||||||
|
class Target:
|
||||||
|
name: str
|
||||||
|
target_type: TargetType
|
||||||
|
builder = None
|
||||||
|
args: list = []
|
||||||
|
kwargs: dict = {}
|
||||||
|
dependencies: list = []
|
||||||
|
target = None
|
||||||
|
module: Module = None
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class SPPInterface:
|
||||||
|
globals: dict
|
||||||
|
|
||||||
|
@property
|
||||||
|
def env(self) -> SPPEnvironment:
|
||||||
|
return self.globals['env']
|
||||||
|
|
||||||
|
@property
|
||||||
|
def targets(self) -> list[Target]:
|
||||||
|
return self.env['SPP_TARGETS']
|
||||||
|
|
||||||
|
_spp: SPPInterface
|
||||||
|
def _init_interface(**kwargs) -> None:
|
||||||
|
global _spp
|
||||||
|
_spp = SPPInterface(**kwargs)
|
||||||
|
|
||||||
|
def get_spp() -> SPPInterface:
|
||||||
|
return _spp
|
||||||
@@ -1,4 +1,6 @@
|
|||||||
GitPython
|
GitPython~=3.1.45
|
||||||
psutil
|
psutil~=7.0.0
|
||||||
Jinja2
|
Jinja2
|
||||||
requests
|
requests
|
||||||
|
SCons~=4.9.1
|
||||||
|
cxxheaderparser~=1.5.4
|
||||||
|
|||||||
Binary file not shown.
@@ -1,6 +0,0 @@
|
|||||||
|
|
||||||
config = {
|
|
||||||
'PROJECT_NAME': 'DUMMY'
|
|
||||||
}
|
|
||||||
|
|
||||||
env = SConscript('../SConscript', exports = ['config'])
|
|
||||||
86
test/codegen/.gitignore
vendored
Normal file
86
test/codegen/.gitignore
vendored
Normal file
@@ -0,0 +1,86 @@
|
|||||||
|
# Generated Files
|
||||||
|
*.refl.hpp
|
||||||
|
*.generated.*
|
||||||
|
private/**/*.json
|
||||||
|
|
||||||
|
# Project
|
||||||
|
/.idea/
|
||||||
|
/.vs/
|
||||||
|
/.vscode/
|
||||||
|
/vs_project_files/
|
||||||
|
*.sln
|
||||||
|
|
||||||
|
# Executables
|
||||||
|
/bin
|
||||||
|
/bin_*
|
||||||
|
|
||||||
|
# Libraries
|
||||||
|
/lib
|
||||||
|
/lib_*
|
||||||
|
|
||||||
|
# Vulkan API dumps
|
||||||
|
/api_dump*
|
||||||
|
|
||||||
|
# Compile commands
|
||||||
|
compile_commands.json
|
||||||
|
|
||||||
|
# whatever this is
|
||||||
|
.cache
|
||||||
|
|
||||||
|
# ImGui config
|
||||||
|
/imgui.ini
|
||||||
|
|
||||||
|
# Environment setup
|
||||||
|
/.env
|
||||||
|
|
||||||
|
# Build Configuration
|
||||||
|
/config.py
|
||||||
|
/config_*.py
|
||||||
|
|
||||||
|
# Prerequisites
|
||||||
|
*.d
|
||||||
|
|
||||||
|
# Compiled Object files
|
||||||
|
*.slo
|
||||||
|
*.lo
|
||||||
|
*.o
|
||||||
|
*.obj
|
||||||
|
|
||||||
|
# Precompiled Headers
|
||||||
|
*.gch
|
||||||
|
*.pch
|
||||||
|
|
||||||
|
# Compiled Dynamic libraries
|
||||||
|
*.so
|
||||||
|
*.dylib
|
||||||
|
*.dll
|
||||||
|
|
||||||
|
# Fortran module files
|
||||||
|
*.mod
|
||||||
|
*.smod
|
||||||
|
|
||||||
|
# Compiled Static libraries
|
||||||
|
*.lai
|
||||||
|
*.la
|
||||||
|
*.a
|
||||||
|
*.lib
|
||||||
|
|
||||||
|
# Executables
|
||||||
|
*.exe
|
||||||
|
*.out
|
||||||
|
*.app
|
||||||
|
|
||||||
|
# Debug Info
|
||||||
|
*.pdb
|
||||||
|
|
||||||
|
# for projects that use SCons for building: http://http://www.scons.org/
|
||||||
|
.sconsign.dblite
|
||||||
|
/.sconf_temp
|
||||||
|
/config.log
|
||||||
|
|
||||||
|
# Byte-compiled / optimized python files
|
||||||
|
__pycache__/
|
||||||
|
*.py[cod]
|
||||||
|
|
||||||
|
# Backup files
|
||||||
|
*.bak
|
||||||
15
test/codegen/SConstruct
Normal file
15
test/codegen/SConstruct
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
|
||||||
|
config = {
|
||||||
|
'PROJECT_NAME': 'S++ Codegen Test',
|
||||||
|
'SPP_TARGET_VERSION': (1, 1, 0)
|
||||||
|
}
|
||||||
|
|
||||||
|
env = SConscript('../../SConscript', exports = ['config'])
|
||||||
|
|
||||||
|
# recipe repo
|
||||||
|
env.RecipeRepo('mewin', 'https://git.mewin.de/mewin/spp_recipes.git', 'stable')
|
||||||
|
|
||||||
|
# app
|
||||||
|
env = env.Module('private/test/SModule')
|
||||||
|
|
||||||
|
env.Finalize()
|
||||||
50
test/codegen/private/test/SModule
Normal file
50
test/codegen/private/test/SModule
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
|
||||||
|
Import('env')
|
||||||
|
|
||||||
|
env.ModuleConfig(
|
||||||
|
name = 'Test',
|
||||||
|
description = 'Test Module',
|
||||||
|
cxx_namespace = 'tst'
|
||||||
|
)
|
||||||
|
|
||||||
|
src_files = Split("""
|
||||||
|
main.cpp
|
||||||
|
test.cpp
|
||||||
|
test.generated.cpp
|
||||||
|
""")
|
||||||
|
|
||||||
|
# env.IncludeGen(src_files,
|
||||||
|
# template=env.File('#templates/header.jinja'),
|
||||||
|
# include_filter=r'.*\.refl.hpp'
|
||||||
|
# )
|
||||||
|
|
||||||
|
# env.CodeGen('GenSource', inputs = [], template=env.File('#templates/source.jinja'), )
|
||||||
|
# env.CodeGen(
|
||||||
|
# target = 'test.generated.cpp',
|
||||||
|
# template = env.File('#templates/source.jinja'),
|
||||||
|
# inputs = {'source': 'test.cpp'}
|
||||||
|
# )
|
||||||
|
|
||||||
|
ast_json = env.AstJson(
|
||||||
|
target = env.File('test.json'),
|
||||||
|
source = 'test.hpp'
|
||||||
|
)
|
||||||
|
env.Default(ast_json)
|
||||||
|
|
||||||
|
ast_hpp = env.AstJinja(
|
||||||
|
target = env.File('test.refl.hpp'),
|
||||||
|
source = env.File('test.hpp'),
|
||||||
|
template = env.File('#templates/header.jinja')
|
||||||
|
)
|
||||||
|
|
||||||
|
prog_app = env.Program(
|
||||||
|
name = 'Test',
|
||||||
|
target = env['BIN_DIR'] + '/test',
|
||||||
|
source = src_files,
|
||||||
|
dependencies = {
|
||||||
|
}
|
||||||
|
)
|
||||||
|
env.Requires(prog_app.target, ast_hpp)
|
||||||
|
env.Default(prog_app)
|
||||||
|
|
||||||
|
Return('env')
|
||||||
8
test/codegen/private/test/main.cpp
Normal file
8
test/codegen/private/test/main.cpp
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
|
||||||
|
#include "./test.hpp"
|
||||||
|
|
||||||
|
int main(int, char**)
|
||||||
|
{
|
||||||
|
tst::printHelloWorld(100);
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
12
test/codegen/private/test/test.cpp
Normal file
12
test/codegen/private/test/test.cpp
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
|
||||||
|
#include "./test.hpp"
|
||||||
|
|
||||||
|
#include <print>
|
||||||
|
|
||||||
|
namespace tst
|
||||||
|
{
|
||||||
|
void printHelloWorld(int param) noexcept
|
||||||
|
{
|
||||||
|
std::println("Hello World! Param is {}.", param);
|
||||||
|
}
|
||||||
|
}
|
||||||
33
test/codegen/private/test/test.hpp
Normal file
33
test/codegen/private/test/test.hpp
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
|
||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include <vector>
|
||||||
|
|
||||||
|
#if __has_include("test.refl.hpp")
|
||||||
|
#include "test.refl.hpp"
|
||||||
|
#endif
|
||||||
|
|
||||||
|
namespace tst
|
||||||
|
{
|
||||||
|
static constexpr int kAnnotVal = 17;
|
||||||
|
class MyClass
|
||||||
|
{
|
||||||
|
private:
|
||||||
|
std::vector<int> mInts;
|
||||||
|
public:
|
||||||
|
MyClass();
|
||||||
|
|
||||||
|
#if defined(__clang__)
|
||||||
|
[[clang::annotate("reflect", "yes, please", kAnnotVal)]]
|
||||||
|
#endif
|
||||||
|
int getVal();
|
||||||
|
void setVal(int val);
|
||||||
|
|
||||||
|
static constexpr int kVal = 1;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
namespace tst
|
||||||
|
{
|
||||||
|
void printHelloWorld(int param) noexcept;
|
||||||
|
}
|
||||||
11
test/codegen/templates/header.jinja
Normal file
11
test/codegen/templates/header.jinja
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
|
||||||
|
#if !defined(SPP_AST_GEN)
|
||||||
|
{% for class in ast.get_namespace('tst').classes %}
|
||||||
|
/*
|
||||||
|
{{ class.name }}
|
||||||
|
{% for method in class.methods %}
|
||||||
|
{{ method.return_type }} {{ method.name }} ({% for param in method.params %} {{ param.type }} {{ param.name }} {% endfor %})
|
||||||
|
{% endfor %}
|
||||||
|
{% endfor %}
|
||||||
|
*/
|
||||||
|
#endif
|
||||||
3
test/codegen/templates/source.jinja
Normal file
3
test/codegen/templates/source.jinja
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
{% for cls in source.namespace.classes %}
|
||||||
|
// {{ cls.class_decl.typename.format() }}
|
||||||
|
{% endfor %}
|
||||||
81
test/v1_0_0/.gitignore
vendored
Normal file
81
test/v1_0_0/.gitignore
vendored
Normal file
@@ -0,0 +1,81 @@
|
|||||||
|
# Project
|
||||||
|
/.idea/
|
||||||
|
/.vs/
|
||||||
|
/.vscode/
|
||||||
|
/vs_project_files/
|
||||||
|
*.sln
|
||||||
|
|
||||||
|
# Executables
|
||||||
|
/bin
|
||||||
|
/bin_*
|
||||||
|
|
||||||
|
# Libraries
|
||||||
|
/lib
|
||||||
|
/lib_*
|
||||||
|
|
||||||
|
# Vulkan API dumps
|
||||||
|
/api_dump*
|
||||||
|
|
||||||
|
# Compile commands
|
||||||
|
compile_commands.json
|
||||||
|
|
||||||
|
# whatever this is
|
||||||
|
.cache
|
||||||
|
|
||||||
|
# ImGui config
|
||||||
|
/imgui.ini
|
||||||
|
|
||||||
|
# Environment setup
|
||||||
|
/.env
|
||||||
|
|
||||||
|
# Build Configuration
|
||||||
|
/config.py
|
||||||
|
/config_*.py
|
||||||
|
|
||||||
|
# Prerequisites
|
||||||
|
*.d
|
||||||
|
|
||||||
|
# Compiled Object files
|
||||||
|
*.slo
|
||||||
|
*.lo
|
||||||
|
*.o
|
||||||
|
*.obj
|
||||||
|
|
||||||
|
# Precompiled Headers
|
||||||
|
*.gch
|
||||||
|
*.pch
|
||||||
|
|
||||||
|
# Compiled Dynamic libraries
|
||||||
|
*.so
|
||||||
|
*.dylib
|
||||||
|
*.dll
|
||||||
|
|
||||||
|
# Fortran module files
|
||||||
|
*.mod
|
||||||
|
*.smod
|
||||||
|
|
||||||
|
# Compiled Static libraries
|
||||||
|
*.lai
|
||||||
|
*.la
|
||||||
|
*.a
|
||||||
|
*.lib
|
||||||
|
|
||||||
|
# Executables
|
||||||
|
*.exe
|
||||||
|
*.out
|
||||||
|
*.app
|
||||||
|
|
||||||
|
# Debug Info
|
||||||
|
*.pdb
|
||||||
|
|
||||||
|
# for projects that use SCons for building: http://http://www.scons.org/
|
||||||
|
.sconsign.dblite
|
||||||
|
/.sconf_temp
|
||||||
|
/config.log
|
||||||
|
|
||||||
|
# Byte-compiled / optimized python files
|
||||||
|
__pycache__/
|
||||||
|
*.py[cod]
|
||||||
|
|
||||||
|
# Backup files
|
||||||
|
*.bak
|
||||||
10
test/v1_0_0/SConstruct
Normal file
10
test/v1_0_0/SConstruct
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
|
||||||
|
config = {
|
||||||
|
'PROJECT_NAME': 'S++ 1.0.0 Test'
|
||||||
|
}
|
||||||
|
|
||||||
|
env = SConscript('../../SConscript', exports = ['config'])
|
||||||
|
|
||||||
|
env = env.Module('private/test/SModule')
|
||||||
|
|
||||||
|
env.Finalize()
|
||||||
25
test/v1_0_0/private/test/SModule
Normal file
25
test/v1_0_0/private/test/SModule
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
|
||||||
|
Import('env')
|
||||||
|
|
||||||
|
env.ModuleConfig(
|
||||||
|
name = 'Test',
|
||||||
|
description = 'Test Module',
|
||||||
|
cxx_namespace = 'tst'
|
||||||
|
)
|
||||||
|
|
||||||
|
src_files = Split("""
|
||||||
|
main.cpp
|
||||||
|
test.cpp
|
||||||
|
""")
|
||||||
|
|
||||||
|
prog_app = env.UnityProgram(
|
||||||
|
name = 'Test',
|
||||||
|
target = env['BIN_DIR'] + '/test',
|
||||||
|
source = src_files,
|
||||||
|
dependencies = {
|
||||||
|
'mijin': {}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
env.Default(prog_app)
|
||||||
|
|
||||||
|
Return('env')
|
||||||
8
test/v1_0_0/private/test/main.cpp
Normal file
8
test/v1_0_0/private/test/main.cpp
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
|
||||||
|
#include "./test.hpp"
|
||||||
|
|
||||||
|
int main(int, char**)
|
||||||
|
{
|
||||||
|
tst::printHelloWorld(100);
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
15
test/v1_0_0/private/test/test.cpp
Normal file
15
test/v1_0_0/private/test/test.cpp
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
|
||||||
|
#include "./test.hpp"
|
||||||
|
|
||||||
|
#include <mijin/debug/assert.hpp>
|
||||||
|
|
||||||
|
#include <print>
|
||||||
|
|
||||||
|
namespace tst
|
||||||
|
{
|
||||||
|
void printHelloWorld(int param) noexcept
|
||||||
|
{
|
||||||
|
MIJIN_ASSERT(param > 0, "param is not >0 :(");
|
||||||
|
std::println("Hello World! Param is {}.", param);
|
||||||
|
}
|
||||||
|
}
|
||||||
7
test/v1_0_0/private/test/test.hpp
Normal file
7
test/v1_0_0/private/test/test.hpp
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
|
||||||
|
#pragma once
|
||||||
|
|
||||||
|
namespace tst
|
||||||
|
{
|
||||||
|
void printHelloWorld(int param) noexcept;
|
||||||
|
}
|
||||||
133
test/v1_0_0/site_scons/site_tools/unity_build.py
Normal file
133
test/v1_0_0/site_scons/site_tools/unity_build.py
Normal file
@@ -0,0 +1,133 @@
|
|||||||
|
|
||||||
|
import os
|
||||||
|
import math
|
||||||
|
from SCons.Script import *
|
||||||
|
from SCons.Node.FS import File
|
||||||
|
from SCons import Action
|
||||||
|
|
||||||
|
"""
|
||||||
|
Scons Unity Build Generator
|
||||||
|
|
||||||
|
Provides several generators for SCons to combine multiple source files into a bigger
|
||||||
|
one to reduce compilation time, so called "unity builds". This is achieved by generating
|
||||||
|
unity source files which in term include the actual source files and compile them using
|
||||||
|
one of the existing SCons builders.
|
||||||
|
|
||||||
|
Usage
|
||||||
|
-----
|
||||||
|
In order to use this, just place it inside your `site_scons/site_tools` folder, enable it by
|
||||||
|
adding "unity_build" to the tools when constructing your Environment and replace invocations
|
||||||
|
of the Program/Library/SharedLibrary/StaticLibrary builders with their Unity... counterpart:
|
||||||
|
|
||||||
|
env = Environment(tools = ['default', 'unity_build'])
|
||||||
|
|
||||||
|
source_files = ...
|
||||||
|
|
||||||
|
env.UnityProgram(
|
||||||
|
target = 'my_program',
|
||||||
|
source = source_files,
|
||||||
|
...
|
||||||
|
)
|
||||||
|
|
||||||
|
The tool will generate an amount of unity source files and invoke the Program builder on these,
|
||||||
|
forwarding any other arguments you passed.
|
||||||
|
|
||||||
|
Other Options
|
||||||
|
------------
|
||||||
|
You can control the behaviour of the builder using several Environment options:
|
||||||
|
env['UNITY_CACHE_DIR'] = '.unity' # Directory where the unity sources are stored.
|
||||||
|
# can be either a string or a Dir() node.
|
||||||
|
env['UNITY_MAX_SOURCES'] = 15 # Maximum number of source files per unity file.
|
||||||
|
env['UNITY_MIN_FILES'] = env.GetOption('num_jobs')
|
||||||
|
# Minimum number of unity files to generate (if possible).
|
||||||
|
# Defaults to the number of jobs passed to SCons.
|
||||||
|
env['UNITY_DISABLE'] = False # Set to True to completely disable unity builds. The commands
|
||||||
|
# will simply pass through their options to the regular builders.
|
||||||
|
|
||||||
|
Additionally any generator can be passed a `cache_dir` to overwrite the value from the Environment.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def exists(env : Environment):
|
||||||
|
return True
|
||||||
|
|
||||||
|
def generate(env : Environment):
|
||||||
|
env.AddMethod(_make_generator(env.Program), 'UnityProgram')
|
||||||
|
env.AddMethod(_make_generator(env.Library), 'UnityLibrary')
|
||||||
|
env.AddMethod(_make_generator(env.StaticLibrary), 'UnityStaticLibrary')
|
||||||
|
env.AddMethod(_make_generator(env.SharedLibrary), 'UnitySharedLibrary')
|
||||||
|
|
||||||
|
# build for generating the unity source files
|
||||||
|
unity_source_builder = env.Builder(
|
||||||
|
action = Action.Action(_generate_unity_file, _generate_unity_file_msg)
|
||||||
|
)
|
||||||
|
env.Append(BUILDERS = {'UnitySource': unity_source_builder})
|
||||||
|
|
||||||
|
env.SetDefault(UNITY_CACHE_DIR = '.unity')
|
||||||
|
env.SetDefault(UNITY_MAX_SOURCES = 15)
|
||||||
|
env.SetDefault(UNITY_MIN_FILES = env.GetOption('num_jobs'))
|
||||||
|
env.SetDefault(UNITY_DISABLE = False)
|
||||||
|
|
||||||
|
def _make_generator(base_generator):
|
||||||
|
def generator(env, source, target, cache_dir = None, *args, **kwargs):
|
||||||
|
if env['UNITY_DISABLE']:
|
||||||
|
return base_generator(target = target, source = source, *args, **kwargs)
|
||||||
|
unity_source_files = []
|
||||||
|
source_files, other_nodes = _flatten_source(source)
|
||||||
|
|
||||||
|
max_sources_per_file = max(1, math.ceil(len(source_files) / env['UNITY_MIN_FILES']))
|
||||||
|
sources_per_file = min(max_sources_per_file, env['UNITY_MAX_SOURCES'])
|
||||||
|
|
||||||
|
num_unity_files = math.ceil(len(source_files) / sources_per_file)
|
||||||
|
|
||||||
|
if not cache_dir:
|
||||||
|
cache_dir = env['UNITY_CACHE_DIR']
|
||||||
|
if not isinstance(cache_dir, str):
|
||||||
|
cache_dir = cache_dir.abspath
|
||||||
|
|
||||||
|
os.makedirs(cache_dir, exist_ok=True)
|
||||||
|
target_base_name = os.path.basename(target)
|
||||||
|
|
||||||
|
for idx in range(num_unity_files):
|
||||||
|
unity_filename = f'{cache_dir}/{target_base_name}_{idx}.cpp'
|
||||||
|
unity_source_files.append(unity_filename)
|
||||||
|
begin = sources_per_file*idx
|
||||||
|
end = sources_per_file*(idx+1)
|
||||||
|
env.UnitySource(
|
||||||
|
target = unity_filename,
|
||||||
|
source = source_files[begin:end]
|
||||||
|
)
|
||||||
|
|
||||||
|
if len(other_nodes) > 0:
|
||||||
|
print(f'Exluded {len(other_nodes)} node(s) from Unity build.')
|
||||||
|
return [base_generator(target = target, source = unity_source_files + other_nodes, *args, **kwargs)]
|
||||||
|
return generator
|
||||||
|
|
||||||
|
def _flatten_source(source : list):
|
||||||
|
source_files = []
|
||||||
|
other_nodes = []
|
||||||
|
for ele in source:
|
||||||
|
if isinstance(ele, list):
|
||||||
|
more_sources, more_other = _flatten_source(ele)
|
||||||
|
source_files.extend(more_sources)
|
||||||
|
other_nodes.extend(more_other)
|
||||||
|
elif isinstance(ele, File):
|
||||||
|
source_files.append(ele.abspath)
|
||||||
|
elif isinstance(ele, str):
|
||||||
|
source_files.append(ele)
|
||||||
|
else:
|
||||||
|
other_nodes.append(ele)
|
||||||
|
|
||||||
|
return source_files, other_nodes
|
||||||
|
|
||||||
|
def _generate_unity_file_msg(target, source, env : Environment):
|
||||||
|
assert(len(target) == 1)
|
||||||
|
return f'Generating {str(target[0])} from {len(source)} source files.'
|
||||||
|
|
||||||
|
def _generate_unity_file(target, source, env : Environment):
|
||||||
|
assert(len(target) == 1)
|
||||||
|
|
||||||
|
unity_filename = target[0].abspath
|
||||||
|
with open(unity_filename, 'w') as f:
|
||||||
|
for source_file in source:
|
||||||
|
fpath = source_file.abspath.replace("\\", "\\\\")
|
||||||
|
f.write(f'#include "{fpath}"\n')
|
||||||
0
util/__init__.py
Normal file
0
util/__init__.py
Normal file
@@ -2,6 +2,5 @@
|
|||||||
<project version="4">
|
<project version="4">
|
||||||
<component name="VcsDirectoryMappings">
|
<component name="VcsDirectoryMappings">
|
||||||
<mapping directory="" vcs="Git" />
|
<mapping directory="" vcs="Git" />
|
||||||
<mapping directory="$PROJECT_DIR$/external/scons-plus-plus" vcs="Git" />
|
|
||||||
</component>
|
</component>
|
||||||
</project>
|
</project>
|
||||||
|
|||||||
@@ -80,7 +80,7 @@
|
|||||||
"vue.rearranger.settings.migration": "true"
|
"vue.rearranger.settings.migration": "true"
|
||||||
}
|
}
|
||||||
}]]></component>
|
}]]></component>
|
||||||
<component name="RunManager" selected="Custom Build Application.{{ project.executables[0].name }} {{ project.build_types[0] }}">
|
<component name="RunManager" selected="Custom Build Application.{% if project.executables|length > 0 %}{{ project.executables[0].name }}{% else %}{{ project.libraries[0].name }}{% endif %} {{ project.build_types[0] }}">
|
||||||
{% for executable in project.executables -%}
|
{% for executable in project.executables -%}
|
||||||
{% for build_type in project.build_types -%}
|
{% for build_type in project.build_types -%}
|
||||||
{% set build_type_name = build_type | capitalize -%}
|
{% set build_type_name = build_type | capitalize -%}
|
||||||
|
|||||||
24
util/python_module/sppcmd/__init__.py
Normal file
24
util/python_module/sppcmd/__init__.py
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
"""
|
||||||
|
Scons++ Command Line Interface
|
||||||
|
"""
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from .ccjson import make_ccjson_parser
|
||||||
|
|
||||||
|
_STDOUT_LOG_FORMAT = '%(message)s'
|
||||||
|
|
||||||
|
def run_spp_cmd() -> int:
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
parser.add_argument('--verbose', '-v', action='store_true')
|
||||||
|
subparsers = parser.add_subparsers(required=True)
|
||||||
|
|
||||||
|
make_ccjson_parser(subparsers)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
logging.basicConfig(format=_STDOUT_LOG_FORMAT, level=logging.DEBUG if args.verbose else logging.INFO)
|
||||||
|
args.handler(args)
|
||||||
|
|
||||||
|
return 0
|
||||||
18
util/python_module/sppcmd/ccjson.py
Normal file
18
util/python_module/sppcmd/ccjson.py
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
|
||||||
|
import argparse
|
||||||
|
from .common import exec_spp, get_config_cache, require_project_file
|
||||||
|
|
||||||
|
def _cmd(args: argparse.Namespace) -> None:
|
||||||
|
require_project_file()
|
||||||
|
|
||||||
|
build_type = args.build_type
|
||||||
|
if build_type == 'auto':
|
||||||
|
cache = get_config_cache()
|
||||||
|
build_type = cache.get('build_type', 'debug')
|
||||||
|
|
||||||
|
exec_spp((f'--build_type={build_type}', '--unity=disable', 'compile_commands.json'))
|
||||||
|
|
||||||
|
def make_ccjson_parser(subparsers) -> None:
|
||||||
|
parser : argparse.ArgumentParser = subparsers.add_parser('ccjson', help='Generate compile_commands.json')
|
||||||
|
parser.set_defaults(handler=_cmd)
|
||||||
|
parser.add_argument('--build_type', choices=('auto', 'debug', 'release_debug', 'release', 'profile'), default='auto')
|
||||||
51
util/python_module/sppcmd/common.py
Normal file
51
util/python_module/sppcmd/common.py
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
|
||||||
|
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
from pathlib import Path
|
||||||
|
import shlex
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
from typing import Sequence
|
||||||
|
|
||||||
|
_project_root = Path('.').absolute()
|
||||||
|
|
||||||
|
def get_project_root() -> Path:
|
||||||
|
return _project_root
|
||||||
|
|
||||||
|
def set_project_root(path: Path) -> None:
|
||||||
|
global _project_root
|
||||||
|
_project_root = path
|
||||||
|
|
||||||
|
def get_config_cache() -> dict:
|
||||||
|
cache_file = get_project_root() / 'cache' / 'config_cache.json'
|
||||||
|
if not cache_file.exists():
|
||||||
|
return {}
|
||||||
|
|
||||||
|
try:
|
||||||
|
with cache_file.open('r') as f:
|
||||||
|
cache = json.load(f)
|
||||||
|
if not isinstance(cache, dict):
|
||||||
|
logging.warning('Config cache is not a dictionary, ignoring it.')
|
||||||
|
return {}
|
||||||
|
return cache
|
||||||
|
except Exception as e:
|
||||||
|
logging.error(f'Error while reading config cache: {e}.')
|
||||||
|
return {}
|
||||||
|
|
||||||
|
def require_project_file() -> None:
|
||||||
|
if not (get_project_root() / 'SConstruct').exists():
|
||||||
|
logging.error('This command has to be run inside an existing S++ project folder. Exiting.')
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
def exec_checked(args: Sequence[str], **kwargs) -> None:
|
||||||
|
logging.debug('exec_checked: "%s"', shlex.join(args))
|
||||||
|
subprocess.run(args, stdout=sys.stdout, stderr=sys.stderr, check=True, **kwargs)
|
||||||
|
|
||||||
|
def exec_get_output(args: Sequence[str], **kwargs) -> str:
|
||||||
|
logging.debug('exec_get_output: "%s"', shlex.join(args))
|
||||||
|
return subprocess.run(args, text=True, check=True, capture_output=True, **kwargs).stdout
|
||||||
|
|
||||||
|
def exec_spp(args: Sequence[str], **kwargs):
|
||||||
|
full_cmd = ('scons', '-s', '--disable_auto_update', *args)
|
||||||
|
exec_checked(full_cmd, **kwargs)
|
||||||
6
util/run_scons.py
Normal file
6
util/run_scons.py
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
# use this to start SCons from the IDE for debugging
|
||||||
|
import sys
|
||||||
|
from SCons.Script.Main import main
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.exit(main())
|
||||||
10
util/spp_cmd.py
Executable file
10
util/spp_cmd.py
Executable file
@@ -0,0 +1,10 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.path.append(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'python_module'))
|
||||||
|
|
||||||
|
from sppcmd import run_spp_cmd
|
||||||
|
sys.exit(run_spp_cmd())
|
||||||
48
util/vs_project_template/solution.sln.jinja
Normal file
48
util/vs_project_template/solution.sln.jinja
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
|
||||||
|
Microsoft Visual Studio Solution File, Format Version 12.00
|
||||||
|
# Visual Studio Version 17
|
||||||
|
VisualStudioVersion = 17.10.35122.118
|
||||||
|
MinimumVisualStudioVersion = 10.0.40219.1
|
||||||
|
{%- for executable in project.executables %}
|
||||||
|
Project("{{ generate_uuid(project.name, True) }}") = "{{ executable.name }}", "vs_project_files\{{ executable.name }}.vcxproj", ""{{ generate_uuid('target_' + executable.name, True) }}""
|
||||||
|
{%- endfor %}
|
||||||
|
{%- for library in project.libraries %}
|
||||||
|
Project("{{ generate_uuid(project.name, True) }}") = "{{ library.name }}", "vs_project_files\{{ library.name }}.vcxproj", ""{{ generate_uuid('target_' + library.name, True) }}""
|
||||||
|
{%- endfor %}
|
||||||
|
EndProject
|
||||||
|
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution Items", "{{ generate_uuid('solution_items', True) }}"
|
||||||
|
ProjectSection(SolutionItems) = preProject
|
||||||
|
SConstruct = SConstruct
|
||||||
|
EndProjectSection
|
||||||
|
EndProject
|
||||||
|
Global
|
||||||
|
GlobalSection(SolutionConfigurationPlatforms) = preSolution
|
||||||
|
{%- for build_type in project.build_types %}
|
||||||
|
{%- set build_type_name = build_type | capitalize %}
|
||||||
|
{{ build_type_name }}|x64 = {{ build_type_name }}|x64
|
||||||
|
{%- endfor %}
|
||||||
|
EndGlobalSection
|
||||||
|
GlobalSection(ProjectConfigurationPlatforms) = postSolution
|
||||||
|
{%- for executable in project.executables %}
|
||||||
|
{%- for build_type in project.build_types %}
|
||||||
|
{%- set build_type_name = build_type | capitalize %}
|
||||||
|
{{ generate_uuid('target_' + executable.name, True) }}.{{ build_type_name }}|x64.ActiveCfg = {{ build_type_name }}|x64
|
||||||
|
{{ generate_uuid('target_' + executable.name, True) }}.{{ build_type_name }}|x64.Build.0 = {{ build_type_name }}|x64
|
||||||
|
{%- endfor %}
|
||||||
|
{%- endfor %}
|
||||||
|
{%- for library in project.libraries %}
|
||||||
|
{%- for build_type in project.build_types %}
|
||||||
|
{%- set build_type_name = build_type | capitalize %}
|
||||||
|
{{ generate_uuid('target_' + library.name, True) }}.{{ build_type_name }}|x64.ActiveCfg = {{ build_type_name }}|x64
|
||||||
|
{{ generate_uuid('target_' + library.name, True) }}.{{ build_type_name }}|x64.Build.0 = {{ build_type_name }}|x64
|
||||||
|
{%- endfor %}
|
||||||
|
{%- endfor %}
|
||||||
|
EndGlobalSection
|
||||||
|
GlobalSection(SolutionProperties) = preSolution
|
||||||
|
HideSolutionNode = FALSE
|
||||||
|
EndGlobalSection
|
||||||
|
GlobalSection(ExtensibilityGlobals) = postSolution
|
||||||
|
SolutionGuid = {{ generate_uuid("solution", True) }}
|
||||||
|
EndGlobalSection
|
||||||
|
EndGlobal
|
||||||
|
|
||||||
15
util/vs_project_template/template.json
Normal file
15
util/vs_project_template/template.json
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
{
|
||||||
|
"files": {
|
||||||
|
"solution.sln.jinja": {
|
||||||
|
"rename_to": "{{ project.name }}.sln"
|
||||||
|
},
|
||||||
|
"vs_project_files/target.vcxproj.jinja": {
|
||||||
|
"one_per": "target",
|
||||||
|
"rename_to": "vs_project_files/{{ target.name }}.vcxproj"
|
||||||
|
},
|
||||||
|
"vs_project_files/target.vcxproj.filters.jinja": {
|
||||||
|
"one_per": "target",
|
||||||
|
"rename_to": "vs_project_files/{{ target.name }}.vcxproj.filters"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,73 @@
|
|||||||
|
{%- set source_files = get_sources(target) -%}
|
||||||
|
{%- set private_headers = get_headers('private\\' + target.module.folder) -%}
|
||||||
|
{%- set public_headers = get_headers('public\\' + target.module.folder) -%}
|
||||||
|
<?xml version="1.0" encoding="utf-8"?>
|
||||||
|
<Project ToolsVersion="17.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
||||||
|
<ItemGroup>
|
||||||
|
<Filter Include="Source Files">
|
||||||
|
<UniqueIdentifier>{{ generate_uuid('filter_sources_' + target.name, True) }}</UniqueIdentifier>
|
||||||
|
</Filter>
|
||||||
|
{%- for folder in source_files | folder_list(2) | sort %}
|
||||||
|
<Filter Include="Source Files\{{ folder }}">
|
||||||
|
<UniqueIdentifier>{{ generate_uuid('filter_sources_' + target.name + '_' + folder, True) }}</UniqueIdentifier>
|
||||||
|
</Filter>
|
||||||
|
{%- endfor %}
|
||||||
|
{%- if public_headers | length > 0 %}
|
||||||
|
<Filter Include="Public Header Files">
|
||||||
|
<UniqueIdentifier>{{ generate_uuid('filter_public_headers_' + target.name, True) }}</UniqueIdentifier>
|
||||||
|
</Filter>
|
||||||
|
{%- for folder in public_headers | folder_list(2) | sort %}
|
||||||
|
<Filter Include="Public Header Files\{{ folder }}">
|
||||||
|
<UniqueIdentifier>{{ generate_uuid('filter_public_headers_' + target.name + '_' + folder, True) }}</UniqueIdentifier>
|
||||||
|
</Filter>
|
||||||
|
{%- endfor %}
|
||||||
|
{%- endif %}
|
||||||
|
{%- if private_headers | length > 0 %}
|
||||||
|
<Filter Include="Private Header Files">
|
||||||
|
<UniqueIdentifier>{{ generate_uuid('filter_private_headers_' + target.name, True) }}</UniqueIdentifier>
|
||||||
|
</Filter>
|
||||||
|
{%- for folder in private_headers | folder_list(2) | sort %}
|
||||||
|
<Filter Include="Private Header Files\{{ folder }}">
|
||||||
|
<UniqueIdentifier>{{ generate_uuid('filter_private_headers_' + target.name + '_' + folder, True) }}</UniqueIdentifier>
|
||||||
|
</Filter>
|
||||||
|
{%- endfor %}
|
||||||
|
{%- endif %}
|
||||||
|
</ItemGroup>
|
||||||
|
<ItemGroup>
|
||||||
|
{%- for source_file in source_files %}
|
||||||
|
<ClCompile Include="$(SolutionDir){{ source_file }}">
|
||||||
|
{%- set path = source_file | strip_path_prefix(2) | dirname -%}
|
||||||
|
{%- if path %}
|
||||||
|
<Filter>Source Files\{{ path }}</Filter>
|
||||||
|
{%- else %}
|
||||||
|
<Filter>Source Files</Filter>
|
||||||
|
{%- endif %}
|
||||||
|
</ClCompile>
|
||||||
|
{%- endfor %}
|
||||||
|
</ItemGroup>
|
||||||
|
<ItemGroup>
|
||||||
|
{%- for header_file in public_headers %}
|
||||||
|
<ClInclude Include="$(SolutionDir){{ header_file }}">
|
||||||
|
{%- set path = header_file | strip_path_prefix(2) | dirname -%}
|
||||||
|
{%- if path %}
|
||||||
|
<Filter>Public Header Files\{{ path }}</Filter>
|
||||||
|
{%- else %}
|
||||||
|
<Filter>Public Header Files</Filter>
|
||||||
|
{%- endif %}
|
||||||
|
</ClInclude>
|
||||||
|
{%- endfor %}
|
||||||
|
{%- for header_file in private_headers %}
|
||||||
|
<ClInclude Include="$(SolutionDir){{ header_file }}">
|
||||||
|
{%- set path = header_file | strip_path_prefix(2) | dirname -%}
|
||||||
|
{%- if path %}
|
||||||
|
<Filter>Private Header Files\{{ path }}</Filter>
|
||||||
|
{%- else %}
|
||||||
|
<Filter>Private Header Files</Filter>
|
||||||
|
{%- endif %}
|
||||||
|
</ClInclude>
|
||||||
|
{%- endfor %}
|
||||||
|
</ItemGroup>
|
||||||
|
<ItemGroup>
|
||||||
|
<Content Include="$(SolutionDir)private\{{ target.module.folder }}\SModule" />
|
||||||
|
</ItemGroup>
|
||||||
|
</Project>
|
||||||
@@ -0,0 +1,67 @@
|
|||||||
|
{%- set ms_cxx_standard = {
|
||||||
|
'c++14': 'c++14',
|
||||||
|
'c++17': 'c++17',
|
||||||
|
'c++20': 'c++20',
|
||||||
|
'c++23': 'c++latest',
|
||||||
|
'c++26': 'c++latest'}[project.cxx_standard] | default('c++14')
|
||||||
|
-%}
|
||||||
|
<?xml version="1.0" encoding="utf-8"?>
|
||||||
|
<Project DefaultTargets="Build" ToolsVersion="17.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
||||||
|
<ItemGroup Label="ProjectConfigurations">
|
||||||
|
{%- for build_type in project.build_types %}
|
||||||
|
{% set build_type_name = build_type | capitalize -%}
|
||||||
|
<ProjectConfiguration Include="{{ build_type_name }}|x64">
|
||||||
|
<Configuration>{{ build_type_name }}</Configuration>
|
||||||
|
<Platform>x64</Platform>
|
||||||
|
</ProjectConfiguration>
|
||||||
|
{%- endfor %}
|
||||||
|
</ItemGroup>
|
||||||
|
<PropertyGroup Label="Globals">
|
||||||
|
<Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
|
||||||
|
<ProjectGuid>{{ generate_uuid('target_' + target.name, True) }}</ProjectGuid>
|
||||||
|
<ProjectName>{{ target.name }}</ProjectName>
|
||||||
|
<SConsCommandLine>{{ scons_exe }}</SConsCommandLine>
|
||||||
|
</PropertyGroup>
|
||||||
|
{%- for build_type in project.build_types %}
|
||||||
|
{% set build_type_name = build_type | capitalize -%}
|
||||||
|
<PropertyGroup Condition="'$(Configuration)'=='{{ build_type_name }}'">
|
||||||
|
<TargetPath>$(SolutionDir){{ target.filename(build_type) }}</TargetPath>
|
||||||
|
<SPPBuildType>{{ build_type }}</SPPBuildType>
|
||||||
|
<SPPTargetType>{{ target.type }}</SPPTargetType>
|
||||||
|
</PropertyGroup>
|
||||||
|
{%- endfor %}
|
||||||
|
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
|
||||||
|
<PropertyGroup Label="Configuration">
|
||||||
|
<ConfigurationType>Makefile</ConfigurationType>
|
||||||
|
</PropertyGroup>
|
||||||
|
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
|
||||||
|
<ItemGroup>
|
||||||
|
{%- for source_file in get_sources(target) %}
|
||||||
|
<ClCompile Include="$(SolutionDir){{ source_file }}" />
|
||||||
|
{%- endfor %}
|
||||||
|
</ItemGroup>
|
||||||
|
<ItemGroup>
|
||||||
|
{%- for header_file in get_headers('private\\' + target.module.folder) %}
|
||||||
|
<ClInclude Include="$(SolutionDir){{ header_file }}" />
|
||||||
|
{%- endfor %}
|
||||||
|
{%- for header_file in get_headers('public\\' + target.module.folder) %}
|
||||||
|
<ClInclude Include="$(SolutionDir){{ header_file }}" />
|
||||||
|
{%- endfor %}
|
||||||
|
</ItemGroup>
|
||||||
|
<ItemGroup>
|
||||||
|
<Content Include="$(SolutionDir)private\{{ target.module.folder }}\SModule" />
|
||||||
|
</ItemGroup>
|
||||||
|
{%- for build_type in project.build_types %}
|
||||||
|
{% set build_type_name = build_type | capitalize -%}
|
||||||
|
<ItemDefinitionGroup Condition="'$(Configuration)'=='{{ build_type_name }}'">
|
||||||
|
<ClCompile>
|
||||||
|
<PreprocessorDefinitions>{{ get_target_property(build_type, target.name, 'CPPDEFINES') | join(';') }};%(PreprocessorDefinitions);</PreprocessorDefinitions>
|
||||||
|
<GenerateDebugInformation>{{ build_type != 'release' and 'true' or 'false' }}</GenerateDebugInformation>
|
||||||
|
<AdditionalIncludeDirectories>{{ get_target_property(build_type, target.name, 'CPPPATH') | join(';') }};%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
|
||||||
|
<MsExtensions>false</MsExtensions>
|
||||||
|
<AdditionalOptions>{{ get_target_property(build_type, target.name, 'CCFLAGS') | join(' ') }}</AdditionalOptions> {# + get_target_property(build_type, target.name, 'CXXFLAGS')) #}
|
||||||
|
</ClCompile>
|
||||||
|
</ItemDefinitionGroup>
|
||||||
|
{%- endfor %}
|
||||||
|
<Import Project="$(SolutionDir)external\scons-plus-plus\contrib\vs\spp.targets" />
|
||||||
|
</Project>
|
||||||
@@ -1,8 +1,8 @@
|
|||||||
{
|
{
|
||||||
"configurations": [
|
"configurations": [
|
||||||
{% for executable in project.executables %}
|
{%- for executable in project.executables -%}
|
||||||
{% for build_type in project.build_types %}
|
{%- for build_type in project.build_types -%}
|
||||||
{% set build_type_name = build_type | capitalize -%}
|
{%- set build_type_name = build_type | capitalize %}
|
||||||
{
|
{
|
||||||
"name": "{{ executable.name }} ({{ build_type | capitalize }})",
|
"name": "{{ executable.name }} ({{ build_type | capitalize }})",
|
||||||
"type": "cppvsdbg",
|
"type": "cppvsdbg",
|
||||||
@@ -12,9 +12,10 @@
|
|||||||
"stopAtEntry": false,
|
"stopAtEntry": false,
|
||||||
"cwd": "${workspaceFolder}",
|
"cwd": "${workspaceFolder}",
|
||||||
"environment": [],
|
"environment": [],
|
||||||
"console": "integratedTerminal"
|
"console": "integratedTerminal",
|
||||||
}
|
"preLaunchTask": "{{ executable.name }} {{ build_type_name }}"
|
||||||
{% endfor %}
|
},
|
||||||
{% endfor %}
|
{%- endfor %}
|
||||||
|
{%- endfor %}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
Reference in New Issue
Block a user