Compare commits

..

16 Commits

Author SHA1 Message Date
1e4bb17251 Added recipe for json and updated yaml-cpp recipe. 2024-10-25 08:29:25 +02:00
c461b5da39 Added recipes for curl, libidn2, libpsl and libunistring. 2024-10-23 23:37:37 +02:00
b7cb5f7c48 Added openssl recipe. 2024-08-21 09:36:17 +02:00
9c64f982fd Added recipe for winsock2 and target_os to dependency conditions. 2024-08-19 18:36:37 +02:00
378c6ba341 Fixed Catch2 recipe. 2024-08-18 17:28:26 +02:00
96fc1984cd Fixed compilation with MSVC. 2024-08-18 17:28:25 +02:00
396350b295 Allow settings COMPILATIONDB_FILTER_FILES via config. 2024-08-18 17:26:32 +02:00
5de1ac4444 Enable experimental library features (jthread) for clang. 2024-08-18 17:26:30 +02:00
d5712120df Moved check of SYSTEM_CACHE_DIR accessibility to before it is used. 2024-08-18 17:25:59 +02:00
267d06a997 Added CXXFLAGS and CFLAGS to config variables. 2024-08-18 17:24:41 +02:00
089ea25c10 Adjusted error description to make more sense. 2024-08-17 18:11:36 +02:00
e1404fee58 Fixed zlib recipe on linux. 2024-08-17 18:11:13 +02:00
c4200393fb Fixed compilation with MSVC. 2024-08-15 15:27:39 +02:00
0c82036300 Update to new recipe system (S++ 2.0). 2024-08-14 23:33:04 +02:00
35b38b8b6e Some more work on the new dependency resolution system. 2024-08-08 14:32:28 +02:00
8bea4a6db5 Some tests. 2024-08-06 09:33:42 +02:00
84 changed files with 1272 additions and 2940 deletions

220
.gitignore vendored
View File

@ -1,219 +1 @@
# Project files __pycache__
/.idea/
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[codz]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
# lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py.cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
# Pipfile.lock
# UV
# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# uv.lock
# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
# poetry.lock
# poetry.toml
# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
# pdm recommends including project-wide configuration in pdm.toml, but excluding .pdm-python.
# https://pdm-project.org/en/latest/usage/project/#working-with-version-control
# pdm.lock
# pdm.toml
.pdm-python
.pdm-build/
# pixi
# Similar to Pipfile.lock, it is generally recommended to include pixi.lock in version control.
# pixi.lock
# Pixi creates a virtual environment in the .pixi directory, just like venv module creates one
# in the .venv directory. It is recommended not to include this directory in version control.
.pixi
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# Redis
*.rdb
*.aof
*.pid
# RabbitMQ
mnesia/
rabbitmq/
rabbitmq-data/
# ActiveMQ
activemq-data/
# SageMath parsed files
*.sage.py
# Environments
.env
.envrc
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
# .idea/
# Abstra
# Abstra is an AI-powered process automation framework.
# Ignore directories containing user credentials, local state, and settings.
# Learn more at https://abstra.io/docs
.abstra/
# Visual Studio Code
# Visual Studio Code specific template is maintained in a separate VisualStudioCode.gitignore
# that can be found at https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore
# and can be added to the global gitignore or merged into this file. However, if you prefer,
# you could uncomment the following to ignore the entire vscode folder
# .vscode/
# Ruff stuff:
.ruff_cache/
# PyPI configuration file
.pypirc
# Marimo
marimo/_static/
marimo/_lsp/
__marimo__/
# Streamlit
.streamlit/secrets.toml

File diff suppressed because it is too large Load Diff

View File

@ -1,209 +0,0 @@
import gzip
import json
import os.path
import pickle
import subprocess
from abc import ABC, abstractmethod
from typing import Callable, Any, Iterable, Self, Generator
from SCons.Script import *
from SCons.Node.FS import File
from spp import get_spp
spp = get_spp()
def post_environment(**kwargs) -> None:
env: Environment = spp.globals['env']
ast_json_builder = Builder(
action=_gen_ast_json
)
env.Append(BUILDERS = {'AstJson': ast_json_builder})
# env.SetDefault(ASTJSONCOM = '$ASTJSON -Xclang -ast-dump=json -fsyntax-only -Wno-unknown-warning-option -DSPP_AST_GEN $CXXFLAGS $SOURCES > $TARGET')
env.AddMethod(_ast_jinja, 'AstJinja')
def _gen_ast_json(target: list[File], source: list[File], env: Environment):
clang_exe = env.WhereIs('clang++')
cmd = [clang_exe, '-Xclang', '-ast-dump=json', '-fsyntax-only', '-Wno-unknown-warning-option',
'-DSPP_AST_GEN', f'-std={env["CXX_STANDARD"]}']
for define in env['CPPDEFINES']:
cmd.append(f'-D{define}')
for path in env['CPPPATH']:
cmd.append(f'-I{path}')
cmd.append(source[0].abspath)
# print(*cmd)
try:
proc = subprocess.Popen(cmd, text=True, stdout=subprocess.PIPE)
except subprocess.CalledProcessError as e:
env.Error(f'Clang exited with code {e.returncode}.')
return
parsed = json.load(proc.stdout)
inner: list = parsed["inner"]
# pos = 0
# last_file = None
#while pos < len(inner):
# last_file = inner[pos]["loc"].get("file", last_file)
# if last_file is None: # or os.path.isabs(last_file):
# del inner[pos]
# else:
# pos += 1
if target[0].suffix == '.bin':
with gzip.open(target[0].abspath, 'wb') as f:
pickle.dump(parsed, f)
elif target[0].suffix == '.gz':
with gzip.open(target[0].abspath, 'wt') as f:
json.dump(parsed, f)
else:
with open(target[0].abspath, 'wt') as f:
json.dump(parsed, f)
class ASTNode(ABC):
@abstractmethod
def _get_decls(self) -> Iterable[dict]: ...
def inner(self) -> Iterable[dict]:
return itertools.chain(*(decl['inner'] for decl in self._get_decls()))
def inner_filtered(self, **kwargs) -> Iterable[dict]:
def _applies(decl: dict) -> bool:
for name, val in kwargs.items():
if decl.get(name) != val:
return False
return True
return (decl for decl in self.inner() if _applies(decl))
class SimpleASTNode(ASTNode):
def __init__(self, decl: dict) -> None:
self._decl = decl
def _get_decls(self) -> Iterable[dict]:
return (self._decl,)
class Value(SimpleASTNode): ...
class Annotation(SimpleASTNode):
@property
def values(self) -> Iterable[Value]:
return (Value(decl) for decl in self.inner())
class Param(SimpleASTNode):
@property
def name(self) -> str:
return self._decl.get('name', '')
@property
def type(self) -> str:
return self._decl['type']['qualType']
class Method(SimpleASTNode):
def __init__(self, decl: dict, access: str) -> None:
super().__init__(decl)
self._access = access
@property
def access(self) -> str:
return self._access
@property
def name(self) -> str:
return self._decl['name']
@property
def mangled_name(self) -> str:
return self._decl['mangledName']
@property
def type(self) -> str:
return self._decl['type']['qualType']
@property
def return_type(self) -> str:
return self.type.split('(', 1)[0].strip()
@property
def params(self) -> Iterable[Param]:
return (Param(decl) for decl in self.inner_filtered(kind='ParmVarDecl'))
@property
def annotations(self) -> Iterable[Annotation]:
return (Annotation(decl) for decl in self.inner_filtered(kind='AnnotateAttr'))
class Class(SimpleASTNode):
@property
def name(self) -> str:
return self._decl['name']
@property
def tagUsed(self) -> str:
return self._decl['tagUsed']
@property
def methods(self) -> Generator[Method]:
access = 'private' if self.tagUsed == 'class' else 'public'
for decl in self.inner():
if decl['kind'] == 'AccessSpecDecl':
access = decl['access']
elif decl['kind'] == 'CXXMethodDecl' and not decl.get('isImplicit', False):
yield Method(decl, access)
class Namespace(ASTNode, ABC):
def get_namespace(self, ns_name: str) -> Self:
return InnerNamespace(list(self.inner_filtered(kind='NamespaceDecl', name=ns_name)))
@property
def classes(self) -> Iterable[Class]:
return (Class(decl) for decl in self.inner_filtered(kind='CXXRecordDecl', tagUsed='class', completeDefinition=True))
class InnerNamespace(Namespace):
def __init__(self, decls: list[dict]) -> None:
self._decls = decls
def _get_decls(self) -> Iterable[dict]:
return self._decls
class Ast(Namespace):
def __init__(self, file: File) -> None:
self._file = file
self._data_dict: dict|None = None
def _get_decls(self) -> tuple[dict]:
if self._data_dict is None:
if not self._file.exists():
self._data_dict = {
'inner': []
}
elif self._file.suffix == '.bin':
with gzip.open(self._file.abspath, 'rb') as f:
self._data_dict = pickle.load(f)
elif self._file.suffix == '.gz':
with gzip.open(self._file.abspath) as f:
self._data_dict = json.load(f)
else:
with open(self._file.abspath, 'r') as f:
self._data_dict = json.load(f)
return (self._data_dict,)
def _ast_jinja(env: Environment, source: File, target: File, template: File, **kwargs):
cache_dir = env['CACHE_DIR']
rel_path = env.Dir('#').rel_path(source)
json_file = env.File(os.path.join(cache_dir, 'ast_json', f'{rel_path}.bin'))
ast_json = env.AstJson(target=json_file, source=source, **kwargs)
ast_jinja = env.Jinja(
target=target,
source=template,
JINJA_CONTEXT = {
'ast': Ast(json_file)
},
**kwargs
)
env.Depends(ast_jinja, ast_json)
# env.AlwaysBuild(ast_jinja)
# env.Requires(ast_jinja, ast_json)
# env.Requires(source, ast_jinja)
env.Ignore(ast_json, ast_jinja)
return ast_jinja

View File

@ -9,7 +9,7 @@ _BUILT_STAMPFILE = '.spp_built'
Import('env') Import('env')
def _autotools_project(env: Environment, project_root: str, config_args: 'list[str]' = [], build_args : 'list[str]' = [], install_args : 'list[str]' = [], configure_script_path: str = 'configure', skip_steps = ()) -> dict: def _autotools_project(env: Environment, project_root: str, config_args: 'list[str]' = [], build_args : 'list[str]' = [], install_args : 'list[str]' = [], configure_script_path: str = 'configure') -> dict:
config = env['BUILD_TYPE'] config = env['BUILD_TYPE']
build_dir = os.path.join(project_root, f'build_{config}') build_dir = os.path.join(project_root, f'build_{config}')
install_dir = os.path.join(project_root, f'install_{config}') install_dir = os.path.join(project_root, f'install_{config}')
@ -32,15 +32,9 @@ def _autotools_project(env: Environment, project_root: str, config_args: 'list[s
if not os.path.exists(config_script) and os.path.exists(f'{config_script}.ac'): if not os.path.exists(config_script) and os.path.exists(f'{config_script}.ac'):
subprocess.run(('autoreconf', '--install', '--force'), cwd=project_root) subprocess.run(('autoreconf', '--install', '--force'), cwd=project_root)
if 'configure' not in skip_steps:
subprocess.run((config_script, f'--prefix={install_dir}', *config_args), cwd=build_dir, env=env, stdout=sys.stdout, stderr=sys.stderr, check=True) subprocess.run((config_script, f'--prefix={install_dir}', *config_args), cwd=build_dir, env=env, stdout=sys.stdout, stderr=sys.stderr, check=True)
if 'build' not in skip_steps:
subprocess.run(('make', f'-j{jobs}', *build_args), cwd=build_dir, stdout=sys.stdout, stderr=sys.stderr, check=True) subprocess.run(('make', f'-j{jobs}', *build_args), cwd=build_dir, stdout=sys.stdout, stderr=sys.stderr, check=True)
if 'install' not in skip_steps:
subprocess.run(('make', 'install', *install_args), cwd=build_dir, stdout=sys.stdout, stderr=sys.stderr, check=True) subprocess.run(('make', 'install', *install_args), cwd=build_dir, stdout=sys.stdout, stderr=sys.stderr, check=True)
else:
# must still create the install dir for the stamp file
os.makedirs(install_dir, exist_ok=True)
pathlib.Path(install_dir, _BUILT_STAMPFILE).touch() pathlib.Path(install_dir, _BUILT_STAMPFILE).touch()
libpath = [] libpath = []
@ -50,7 +44,6 @@ def _autotools_project(env: Environment, project_root: str, config_args: 'list[s
libpath.append(full_path) libpath.append(full_path)
return { return {
'build_dir': build_dir,
'install_dir': install_dir, 'install_dir': install_dir,
'LIBPATH': libpath, 'LIBPATH': libpath,
'CPPPATH': [os.path.join(install_dir, 'include')] 'CPPPATH': [os.path.join(install_dir, 'include')]

View File

@ -31,28 +31,19 @@ def _generate_cmake_cxx_flags(env, dependencies: 'list[dict]') -> str:
def _get_cmake_cxx_standard(env: Environment) -> str: def _get_cmake_cxx_standard(env: Environment) -> str:
return env['CXX_STANDARD'][3:] # we use "C++XX", CMake just "XX" return env['CXX_STANDARD'][3:] # we use "C++XX", CMake just "XX"
def _get_cmake_prefix_path(dependencies: 'list[dict]') -> str:
parts = []
for dependency in dependencies:
for path in dependency.get('CMAKE_PREFIX_PATH', []):
parts.append(path)
return cmd_quote(';'.join(parts))
def _generate_cmake_args(env: Environment, dependencies: 'list[dict]') -> 'list[str]': def _generate_cmake_args(env: Environment, dependencies: 'list[dict]') -> 'list[str]':
args = [f'-DCMAKE_C_FLAGS={_generate_cmake_c_flags(env, dependencies)}', args = [f'-DCMAKE_C_FLAGS={_generate_cmake_c_flags(env, dependencies)}',
f'-DCMAKE_CXX_FLAGS={_generate_cmake_cxx_flags(env, dependencies)}', f'-DCMAKE_CXX_FLAGS={_generate_cmake_cxx_flags(env, dependencies)}',
f'-DCMAKE_CXX_STANDARD={_get_cmake_cxx_standard(env)}', f'-DCMAKE_CXX_STANDARD={_get_cmake_cxx_standard(env)}']
f'-DCMAKE_PREFIX_PATH={_get_cmake_prefix_path(dependencies)}']
for dependency in dependencies: for dependency in dependencies:
for name, value in dependency.get('CMAKE_VARS', {}).items(): for name, value in dependency.get('CMAKE_VARS', {}).items():
args.append(f'-D{name}={cmd_quote(value)}') args.append(f'-D{name}={cmd_quote(value)}')
return args return args
def _calc_version_hash(env, dependencies: 'list[dict]') -> str: def _calc_version_hash(dependencies: 'list[dict]') -> str:
return json.dumps({ return json.dumps({
'version': _VERSION, 'version': _VERSION,
'dependencies': dependencies, 'dependencies': dependencies
'cxxflags': env['DEPS_CXXFLAGS']
}) })
def _cmake_project(env: Environment, project_root: str, generate_args: 'list[str]' = [], build_args : 'list[str]' = [], install_args : 'list[str]' = [], dependencies: 'list[dict]' = []) -> dict: def _cmake_project(env: Environment, project_root: str, generate_args: 'list[str]' = [], build_args : 'list[str]' = [], install_args : 'list[str]' = [], dependencies: 'list[dict]' = []) -> dict:
@ -60,7 +51,7 @@ def _cmake_project(env: Environment, project_root: str, generate_args: 'list[str
build_dir = os.path.join(project_root, f'build_{config}') build_dir = os.path.join(project_root, f'build_{config}')
install_dir = os.path.join(project_root, f'install_{config}') install_dir = os.path.join(project_root, f'install_{config}')
version_hash = _calc_version_hash(env, dependencies) version_hash = _calc_version_hash(dependencies)
stamp_file = pathlib.Path(install_dir, _BUILT_STAMPFILE) stamp_file = pathlib.Path(install_dir, _BUILT_STAMPFILE)
is_built = stamp_file.exists() is_built = stamp_file.exists()
@ -105,9 +96,7 @@ def _cmake_project(env: Environment, project_root: str, generate_args: 'list[str
libpath.append(full_path) libpath.append(full_path)
return { return {
'build_dir': build_dir,
'install_dir': install_dir, 'install_dir': install_dir,
'BINPATH': [os.path.join(install_dir, 'bin')],
'LIBPATH': libpath, 'LIBPATH': libpath,
'CPPPATH': [os.path.join(install_dir, 'include')] 'CPPPATH': [os.path.join(install_dir, 'include')]
} }

View File

@ -1,13 +0,0 @@
from spp import get_spp
spp = get_spp()
def available(**kwargs) -> bool:
return spp.globals['config']['SPP_TARGET_VERSION'][0:2] == (1, 0)
def pre_environment(**kwargs) -> None:
spp.globals['tools'].append('unity_build') # S++ 1.0.0 had the unity_build enabled by default
def post_environment(**kwargs) -> None:
spp.globals['env']['_SPP_FALLBACK_RECIPE_REPO'] = {'repo_name': 'mewin', 'remote_url': 'https://git.mewin.de/mewin/spp_recipes.git', 'git_ref': 'stable'}

View File

@ -1,35 +0,0 @@
import json
from pathlib import Path
from spp import get_spp, TargetType
spp = get_spp()
def _should_generate() -> bool:
# check if any program or library target has been built
for target in spp.targets:
if target.target_type in (TargetType.PROGRAM, TargetType.STATIC_LIBRARY, TargetType.SHARED_LIBRARY):
return True
return False
def post_finalize(**kwargs) -> None:
if not _should_generate():
return
cache_file = Path(spp.env['CACHE_DIR']) / 'config_cache.json'
cache = {}
if cache_file.exists():
try:
with cache_file.open('r') as f:
cache = json.load(f)
except Exception as e:
spp.env.Warn(f'Error while loading config cache: {e}.')
cache['build_type'] = spp.env['BUILD_TYPE']
try:
with cache_file.open('w') as f:
json.dump(cache, f)
except Exception as e:
spp.env.Warn(f'Error while saving config cache: {e}.')

View File

@ -37,22 +37,19 @@ def _download_file(url: str, path: pathlib.Path) -> None:
urllib.request.urlretrieve(url, dl_path) urllib.request.urlretrieve(url, dl_path)
dl_path.rename(path) dl_path.rename(path)
def _extract_file(path: pathlib.Path, output_dir: str, archive_type: ArchiveType, skip_folders: int = 0) -> None: def _extract_file(path: pathlib.Path, output_dir: str, archive_type: ArchiveType, skip_folders: int) -> None:
if archive_type == ArchiveType.TAR_GZ: if archive_type == ArchiveType.TAR_GZ:
file = tarfile.open(str(path)) file = tarfile.open(str(path))
filter = tarfile.data_filter
if skip_folders != 0: if skip_folders != 0:
def skip_filter(member: tarfile.TarInfo, path: str) -> tarfile.TarInfo: def skip_filer(member: tarfile.TarInfo, path: str) -> tarfile.TarInfo:
name_parts = member.name.split('/', skip_folders) name_parts = member.name.split('/')
if len(name_parts) <= skip_folders: if len(name_parts) <= skip_folders:
return None return None
return member.replace(name = '/'.join(name_parts[skip_folders:])) return member.replace(name = '/'.join(name_parts[skip_folders:]))
filter = skip_filter file.extraction_filter = skip_filer
file.extractall(output_dir, filter=filter) file.extractall(output_dir)
file.close() file.close()
elif archive_type == ArchiveType.ZIP: elif archive_type == ArchiveType.ZIP:
if skip_folders != 0:
raise Exception('skip_folders option is not yet supported for zip-archives :()')
file = zipfile.open(str(path)) file = zipfile.open(str(path))
file.extractall(output_dir) file.extractall(output_dir)
file.close() file.close()

95
addons/gitbranch.py Normal file
View File

@ -0,0 +1,95 @@
from git import Repo
from git.exc import GitError
import hashlib
import re
from SCons.Script import *
Import('env')
def _clone(env: Environment, repo_name: str, remote_url: str):
repo_dir = os.path.join(env['CLONE_DIR'], 'git', repo_name, '_bare')
try:
repo = Repo(repo_dir)
origin = repo.remotes['origin']
except GitError:
print(f'Initializing git repository at {repo_dir}.')
repo = Repo.init(repo_dir, bare=True)
origin = repo.create_remote('origin', remote_url)
return repo, origin
def _git_branch(env: Environment, repo_name: str, remote_url: str, git_ref: str = 'main') -> dict:
repo, origin = _clone(env, repo_name, remote_url)
worktree_dir = os.path.join(env['CLONE_DIR'], 'git', repo_name, hashlib.shake_128(git_ref.encode('utf-8')).hexdigest(6)) # TODO: commit hash would be better, right? -> not if it's a branch!
if not os.path.exists(worktree_dir):
print(f'Checking out into {worktree_dir}.')
origin.fetch(tags=True)
os.makedirs(worktree_dir)
repo.git.worktree('add', worktree_dir, git_ref)
elif env['UPDATE_REPOSITORIES']:
worktree_repo = Repo(worktree_dir)
if not worktree_repo.head.is_detached:
print(f'Updating git repository at {worktree_dir}')
worktree_origin = worktree_repo.remotes['origin']
worktree_origin.pull()
else:
print(f'Not updating git repository {worktree_dir} as it is not on a branch.')
return {
'checkout_root': worktree_dir
}
def _git_tags(env: Environment, repo_name: str, remote_url: str, force_fetch: bool = False) -> 'list[str]':
repo, origin = _clone(env, repo_name, remote_url)
if force_fetch or env['UPDATE_REPOSITORIES']:
origin.fetch(tags=True)
return [t.name for t in repo.tags]
def _make_callable(val):
if callable(val):
return val
else:
return lambda env: val
def _git_recipe(env: Environment, globals: dict, repo_name, repo_url, cook_fn, versions = None, tag_pattern = None, tag_fn = None, ref_fn = None, dependencies: dict = {}) -> None:
_repo_name = _make_callable(repo_name)
_repo_url = _make_callable(repo_url)
_tag_pattern = _make_callable(tag_pattern)
versions_cb = versions and _make_callable(versions)
def _versions(env: Environment, update: bool = False):
pattern = _tag_pattern(env)
if pattern:
tags = env.GitTags(repo_name = _repo_name(env), remote_url = _repo_url(env), force_fetch=update)
result = []
for tag in tags:
match = pattern.match(tag)
if match:
result.append(tuple(int(part) for part in match.groups() if part is not None))
if len(result) == 0 and not update:
return _versions(env, update=True)
return result
elif versions_cb:
return versions_cb(env)
else:
return [(0, 0, 0)]
def _dependencies(env: Environment, version) -> 'dict':
return dependencies
def _cook(env: Environment, version) -> dict:
if tag_fn:
git_ref = f'refs/tags/{tag_fn(version)}'
else:
assert ref_fn
git_ref = ref_fn(env, version)
repo = env.GitBranch(repo_name = _repo_name(env), remote_url = _repo_url(env), git_ref = git_ref)
return cook_fn(env, repo)
globals['versions'] = _versions
globals['dependencies'] = _dependencies
globals['cook'] = _cook
env.AddMethod(_git_branch, 'GitBranch')
env.AddMethod(_git_tags, 'GitTags')
env.AddMethod(_git_recipe, 'GitRecipe')
Return('env')

View File

@ -1,178 +1,10 @@
# based on https://github.com/hgomersall/scons-jinja
from SCons.Script import *
import os
import pathlib import pathlib
from spp import get_spp Import('env')
try: if not hasattr(env, 'Jinja'):
import jinja2 Return('env')
from jinja2.utils import open_if_exists
except ImportError:
jinja2 = None
print('No Jinja :(')
spp = get_spp()
def available(**kwargs) -> bool:
return jinja2 is not None
def post_environment(**kwargs) -> None:
env: Environment = spp.globals['env']
env.SetDefault(JINJA_CONTEXT={})
env.SetDefault(JINJA_ENVIRONMENT_VARS={})
env.SetDefault(JINJA_FILTERS={'load_config': _jinja_load_config})
env.SetDefault(JINJA_GLOBALS={
'file_size': lambda *args: _file_size(env, *args),
'file_content_hex': lambda *args: _file_content_hex(env, *args)
})
env.SetDefault(JINJA_TEMPLATE_SEARCHPATH=['data/jinja'])
env.SetDefault(JINJA_CONFIG_SEARCHPATH=[env.Dir('#data/config')])
env.SetDefault(JINJA_FILE_SEARCHPATH=[env.Dir('#')])
env['BUILDERS']['Jinja'] = Builder(
action=render_jinja_template
)
scanner = env.Scanner(function=jinja_scanner,
skeys=['.jinja'])
env.Append(SCANNERS=scanner)
env.AddMethod(_wrap_jinja(env.Jinja), 'Jinja')
class FileSystemLoaderRecorder(jinja2.FileSystemLoader):
""" A wrapper around FileSystemLoader that records files as they are
loaded. These are contained within loaded_filenames set attribute.
"""
def __init__(self, searchpath, encoding='utf-8'):
self.loaded_filenames = set()
super(FileSystemLoaderRecorder, self).__init__(searchpath, encoding)
def get_source(self, environment, template):
"""Overwritten FileSystemLoader.get_source method that extracts the
filename that is used to load each filename and adds it to
self.loaded_filenames.
"""
for searchpath in self.searchpath:
filename = os.path.join(searchpath, template)
f = open_if_exists(filename)
if f is None:
continue
try:
contents = f.read().decode(self.encoding)
finally:
f.close()
self.loaded_filenames.add(filename)
return super(FileSystemLoaderRecorder, self).get_source(
environment, template)
# If the template isn't found, then we have to drop out.
raise jinja2.TemplateNotFound(template)
def jinja_scanner(node, env, path):
# Instantiate the file as necessary
node.get_text_contents()
template_dir, filename = os.path.split(str(node))
template_search_path = ([template_dir] +
env.subst(env['JINJA_TEMPLATE_SEARCHPATH']))
template_loader = FileSystemLoaderRecorder(template_search_path)
jinja_env = jinja2.Environment(loader=template_loader,
extensions=['jinja2.ext.do'], **env['JINJA_ENVIRONMENT_VARS'])
jinja_env.filters.update(env['JINJA_FILTERS'])
jinja_env.globals.update(env['JINJA_GLOBALS'])
try:
template = jinja_env.get_template(filename)
except jinja2.TemplateNotFound as e:
env.Error(f'Missing template: {os.path.join(template_dir, str(e))}')
# We need to render the template to do all the necessary loading.
#
# It's necessary to respond to missing templates by grabbing
# the content as the exception is raised. This makes sure of the
# existence of the file upon which the current scanned node depends.
#
# I suspect that this is pretty inefficient, but it does
# work reliably.
context = env['JINJA_CONTEXT']
last_missing_file = ''
while True:
try:
template.render(**context)
except jinja2.TemplateNotFound as e:
if last_missing_file == str(e):
# We've already been round once for this file,
# so need to raise
env.Error(f'Missing template: {os.path.join(template_dir, str(e))}')
last_missing_file = str(e)
# Find where the template came from (using the same ordering
# as Jinja uses).
for searchpath in template_search_path:
filename = os.path.join(searchpath, last_missing_file)
if os.path.exists(filename):
continue
else:
env.File(filename).get_text_contents()
continue
break
# Get all the files that were loaded. The set includes the current node,
# so we remove that.
found_nodes_names = list(template_loader.loaded_filenames)
try:
found_nodes_names.remove(str(node))
except ValueError as e:
env.Error(f'Missing template node: {str(node)}')
return [env.File(f) for f in found_nodes_names]
def render_jinja_template(target, source, env):
output_str = ''
if not source:
source = [f'{target}.jinja']
for template_file in source:
template_dir, filename = os.path.split(str(template_file))
template_search_path = ([template_dir] +
env.subst(env['JINJA_TEMPLATE_SEARCHPATH']))
template_loader = FileSystemLoaderRecorder(template_search_path)
jinja_env = jinja2.Environment(loader=template_loader,
extensions=['jinja2.ext.do'], **env['JINJA_ENVIRONMENT_VARS'])
jinja_env.filters.update(env['JINJA_FILTERS'])
jinja_env.globals.update(env['JINJA_GLOBALS'])
jinja_env.filters.update(env['JINJA_FILTERS'])
template = jinja_env.get_template(filename)
context = env['JINJA_CONTEXT']
template.render(**context)
output_str += template.render(**context)
with open(str(target[0]), 'w') as target_file:
target_file.write(output_str)
return None
def _jinja_load_config(env, config_name): def _jinja_load_config(env, config_name):
searched_paths = [] searched_paths = []
@ -191,29 +23,16 @@ def _wrap_jinja(orig_jinja):
def _wrapped(env, target, **kwargs): def _wrapped(env, target, **kwargs):
if 'source' not in kwargs: if 'source' not in kwargs:
kwargs['source'] = f'{target}.jinja' kwargs['source'] = f'{target}.jinja'
target = orig_jinja(target=target, **kwargs) target = orig_jinja(**kwargs)
if 'depends' in kwargs: if 'depends' in kwargs:
for dependency in kwargs['depends']: for dependency in kwargs['depends']:
env.Depends(target, dependency) env.Depends(target, dependency)
# env.Depends(alias_prepare, target)
return target return target
return _wrapped return _wrapped
def _find_file(env, fname): env.AddMethod(_wrap_jinja(env.Jinja), 'Jinja')
for path in env['JINJA_FILE_SEARCHPATH']: env.Append(JINJA_FILTERS = {'load_config': _jinja_load_config})
fullpath = os.path.join(path.abspath, fname) env.Append(JINJA_TEMPLATE_SEARCHPATH = ['data/jinja'])
if os.path.exists(fullpath): env['JINJA_CONFIG_SEARCHPATH'] = [env.Dir('#data/config')]
return env.File(fullpath) Return('env')
return None
def _file_size(env, fname: str) -> int:
file = _find_file(env, fname)
if not file:
env.Error(f'File does not exist: {fname}. Searched in: {[d.abspath for d in env["JINJA_FILE_SEARCHPATH"]]}')
return file.get_size()
def _file_content_hex(env, fname: str) -> str:
file = _find_file(env, fname)
if not file:
env.Error(f'File does not exist: {fname}. Searched in: {[d.abspath for d in env["JINJA_FILE_SEARCHPATH"]]}')
bytes = file.get_contents()
return ','.join([hex(byte) for byte in bytes])

View File

@ -1,176 +0,0 @@
from git import Repo
from git.exc import GitError
import hashlib
import inspect
import os
import shutil
from SCons.Script import *
Import('env')
def _clone(env: Environment, repo_name: str, remote_url: str):
repo_dir = os.path.join(env['CLONE_DIR'], 'git', repo_name, '_bare')
try:
repo = Repo(repo_dir)
origin = repo.remotes['origin']
except GitError:
print(f'Initializing git repository at {repo_dir}.')
repo = Repo.init(repo_dir, bare=True)
origin = repo.create_remote('origin', remote_url)
return repo, origin
def _git_branch(env: Environment, repo_name: str, remote_url: str, git_ref: str = 'main') -> dict:
repo, origin = _clone(env, repo_name, remote_url)
old_worktree_dir = os.path.join(env['CLONE_DIR'], 'git', repo_name, hashlib.shake_128(git_ref.encode('utf-8')).hexdigest(6))
worktree_dir = os.path.join(env['CLONE_DIR'], 'git', repo_name, git_ref.replace('/', '_'))
if os.path.exists(old_worktree_dir) and not os.path.islink(old_worktree_dir):
if not os.path.exists(worktree_dir):
print(f'Found old Git worktree at {old_worktree_dir}, moving it to {worktree_dir}.')
try:
repo.git.worktree('move', old_worktree_dir, worktree_dir)
except GitError:
print('Error while moving worktree, manually moving and repairing it instead.')
shutil.move(old_worktree_dir, worktree_dir)
try:
repo.git.worktree('repair', worktree_dir)
except GitError:
print('Also didn\'t work, removing and redownloading it.')
try:
repo.git.worktree('remove', '-f', worktree_dir)
except GitError: ...
try:
repo.git.worktree('remove', '-f', old_worktree_dir)
except GitError: ...
if os.path.exists(worktree_dir):
shutil.rmtree(worktree_dir, ignore_errors=True)
# this is all we can do, I guess
else:
print(f'Found old Git worktree at {old_worktree_dir}, but the new one at {worktree_dir} already exists. Removing the old one.')
repo.git.worktree('remove', '-f', old_worktree_dir)
print('Attempting to create a symlink for older S++ versions.')
try:
os.symlink(worktree_dir, old_worktree_dir, target_is_directory=True)
except Exception as e:
print(f'Failed: {e}')
update_submodules = False
if not os.path.exists(worktree_dir):
print(f'Checking out into {worktree_dir}.')
origin.fetch(tags=True, force=True)
os.makedirs(worktree_dir)
repo.git.worktree('add', worktree_dir, git_ref)
worktree_repo = Repo(worktree_dir)
update_submodules = True
elif env['UPDATE_REPOSITORIES']:
worktree_repo = Repo(worktree_dir)
if not worktree_repo.head.is_detached:
print(f'Updating git repository at {worktree_dir}')
worktree_origin = worktree_repo.remotes['origin']
worktree_origin.pull()
update_submodules = True
else:
print(f'Not updating git repository {worktree_dir} as it is not on a branch.')
else:
worktree_repo = Repo(worktree_dir)
if update_submodules:
for submodule in worktree_repo.submodules:
submodule.update(init=True)
for submodule in worktree_repo.submodules:
if os.listdir(submodule.abspath) == ['.git']:
print(f'Submodule {submodule.name} seems borked, attempting to fix it.')
worktree_repo.git.submodule('deinit', '-f', submodule.path)
worktree_repo.git.submodule('init', submodule.path)
worktree_repo.git.submodule('update', submodule.path)
return {
'checkout_root': worktree_dir,
'repo': repo,
'origin': origin
}
def _git_tags(env: Environment, repo_name: str, remote_url: str, force_fetch: bool = False) -> 'list[str]':
repo, origin = _clone(env, repo_name, remote_url)
if force_fetch or env['UPDATE_REPOSITORIES']:
try:
origin.fetch(tags=True)
except GitError:
env.Warn(f'Error fetching tags from {repo_name} ({remote_url})')
return [t.name for t in repo.tags]
def _make_callable(val):
if callable(val):
return val
else:
def _wrapped(*args, **kwargs):
return val
return _wrapped
def _git_recipe(env: Environment, globals: dict, repo_name, repo_url, cook_fn, versions = None, tag_pattern = None, tag_fn = None, ref_fn = None, dependencies: dict = {}) -> None:
_repo_name = _make_callable(repo_name)
_repo_url = _make_callable(repo_url)
_tag_pattern = _make_callable(tag_pattern)
versions_cb = versions and _make_callable(versions)
dependencies_cb = _make_callable(dependencies)
def _versions(env: Environment, update: bool = False, options: dict = {}):
if 'ref' in options:
return [(0, 0, 0)] # no versions if compiling from a branch
pattern_signature = inspect.signature(_tag_pattern)
kwargs = {}
if 'options' in pattern_signature.parameters:
kwargs['options'] = options
pattern = _tag_pattern(env, **kwargs)
if pattern:
tags = env.GitTags(repo_name = _repo_name(env), remote_url = _repo_url(env), force_fetch=update)
result = []
for tag in tags:
match = pattern.match(tag)
if match:
result.append(tuple(int(part) for part in match.groups() if part is not None))
if len(result) == 0 and not update:
return _versions(env, update=True)
return result
elif versions_cb:
return versions_cb(env)
else:
return [(0, 0, 0)]
def _dependencies(env: Environment, version, options: dict) -> 'dict':
dependencies_signature = inspect.signature(dependencies_cb)
kwargs = {}
if 'options' in dependencies_signature.parameters:
kwargs['options'] = options
return dependencies_cb(env, version, **kwargs)
def _cook(env: Environment, version, options: dict = {}) -> dict:
if 'ref' in options:
git_ref = options['ref']
elif tag_fn:
tag_signature = inspect.signature(tag_fn)
kwargs = {}
if 'options' in tag_signature.parameters:
kwargs['options'] = options
git_ref = f'refs/tags/{tag_fn(version, **kwargs)}'
else:
assert ref_fn
git_ref = ref_fn(env, version)
repo = env.GitBranch(repo_name = _repo_name(env), remote_url = _repo_url(env), git_ref = git_ref)
cook_signature = inspect.signature(cook_fn)
kwargs = {}
if 'options' in cook_signature.parameters:
kwargs['options'] = options
return cook_fn(env, repo, **kwargs)
globals['versions'] = _versions
globals['dependencies'] = _dependencies
globals['cook'] = _cook
env.AddMethod(_git_branch, 'GitBranch')
env.AddMethod(_git_tags, 'GitTags')
env.AddMethod(_git_recipe, 'GitRecipe')
Return('env')

View File

@ -1,11 +0,0 @@
import os
Import('env')
def _recipe_repository(env, repo_name: str, remote_url: str, git_ref: str = 'master') -> None:
repo = env.GitBranch(repo_name = os.path.join('recipe_repos', repo_name), remote_url = remote_url, git_ref = git_ref)
env.Append(SPP_RECIPES_FOLDERS = [os.path.join(repo['checkout_root'], 'recipes')])
env.AddMethod(_recipe_repository, 'RecipeRepo')
Return('env')

View File

@ -1,13 +0,0 @@
FROM debian:sid-slim
RUN apt-get -y update && \
apt-get -y upgrade && \
apt-get -y install build-essential clang-19 gcc-14 g++-14 python3 python3-pip \
virtualenv python-is-python3 clang-tidy git ninja-build cmake
RUN ln -s /usr/bin/clang-19 /usr/local/bin/clang \
&& ln -s /usr/bin/clang++-19 /usr/local/bin/clang++ \
&& ln -s /usr/bin/clang-tidy-19 /usr/local/bin/clang-tidy \
&& ln -s /usr/bin/run-clang-tidy-19 /usr/local/bin/run-clang-tidy
COPY scripts /opt/scripts
RUN chmod a+x /opt/scripts/*.sh

View File

@ -1,9 +0,0 @@
#!/bin/bash
set -xe
python -m virtualenv venv
source venv/bin/activate
pip install scons
pip install -r external/scons-plus-plus/requirements.txt
scons -j$(nproc) --build_type=debug --variant=linux_clang_debug --compiler=clang
scons -j$(nproc) --build_type=release_debug --variant=linux_clang_release_debug --compiler=clang
scons -j$(nproc) --build_type=release --variant=linux_clang_release --compiler=clang

View File

@ -1,9 +0,0 @@
#!/bin/bash
set -xe
python -m virtualenv venv
source venv/bin/activate
pip install scons
pip install -r external/scons-plus-plus/requirements.txt
scons -j$(nproc) --build_type=debug --variant=linux_gcc_debug --compiler=gcc
scons -j$(nproc) --build_type=release_debug --variant=linux_gcc_release_debug --compiler=gcc
scons -j$(nproc) --build_type=release --variant=linux_gcc_release --compiler=gcc

View File

@ -1,53 +0,0 @@
<Project ToolsVersion="4.0" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<PropertyGroup>
<SolutionExt>.sln</SolutionExt>
<Language>C++</Language>
<DefaultLanguageSourceExtension>.cpp</DefaultLanguageSourceExtension>
</PropertyGroup>
<PropertyGroup>
<TargetFileName Condition="'$(TargetPath)' != ''">$([System.IO.Path]::GetFileName('$(TargetPath)'))</TargetFileName>
<TargetDir Condition="'$(TargetPath)' != ''">$([System.IO.Path]::GetDirectoryName('$(TargetPath)'))</TargetDir>
<OutputPath>$(TargetDir)</OutputPath>
<LocalDebuggerCommand Condition="'$(LocalDebuggerCommand)' == ''">$(TargetPath)</LocalDebuggerCommand>
<SConsCommandLine Condition="'$(SConsCommandLine)' == ''">scons</SConsCommandLine>
<SPPNumProcs Condition="'$(SPPNumProcs)' == ''">$([System.Environment]::ProcessorCount)</SPPNumProcs>
<SPPBuildType Condition="'$(SPPBuildType)' == ''">debug</SPPBuildType>
<SPPTargetType Condition="'$(SPPTargetType)' == ''">executable</SPPTargetType>
<OutDir>$(OutputPath)\</OutDir>
<IntDir>$(SolutionDir)cache\msbuild\</IntDir>
</PropertyGroup>
<Import Project="$(MSBuildToolsPath)\Microsoft.Common.targets" />
<Target Name="Build" Condition="'$(SPPTargetType)' != 'meta'">
<Exec Command="$(SConsCommandLine) -j$(SPPNumProcs) --build_type=$(SPPBuildType) --unity=disable $(TargetPath)"
WorkingDirectory="$(SolutionDir)" />
</Target>
<!--<Target Name="Build" Condition="'$(SPPTargetType)' == 'meta'">
<Message Importance="low" Text="Skipping build for meta target $(ProjectName)" />
</Target>-->
<Target Name="Clean" Condition="'$(SPPTargetType)' != 'meta'">
<Exec Command="$(SConsCommandLine) -c -j$(SPPNumProcs) --build_type=$(SPPBuildType) --unity=disable $(TargetPath)"
WorkingDirectory="$(SolutionDir)" />
</Target>
<!--<Target Name="Clean" Condition="'$(SPPTargetType)' == 'meta'">
<Message Importance="low" Text="Skipping clean for meta target $(ProjectName)" />
</Target>-->
<Target Name="Rebuild" Condition="'$(SPPTargetType)' != 'meta'" DependsOnTargets="Clean;Build" />
<!--<Target Name="Rebuild" Condition="'$(SPPTargetType)' == 'meta'">
<Message Importance="low" Text="Skipping rebuild for meta target $(ProjectName)" />
</Target>-->
<!-- This target is needed just to suppress "warning NU1503: Skipping restore for project '...'. The project file may be invalid or missing targets
required for restore." -->
<Target Name="_IsProjectRestoreSupported" Returns="@(_ValidProjectsForRestore)">
<ItemGroup>
<_ValidProjectsForRestore Include="$(MSBuildProjectFullPath)" />
</ItemGroup>
</Target>
<Import Condition="'$(_ImportMicrosoftCppDesignTime)' != 'false'" Project="$(VCTargetsPathActual)\Microsoft.Cpp.DesignTime.targets" />
</Project>

View File

@ -1,55 +0,0 @@
from dataclasses import dataclass
import enum
from typing import TYPE_CHECKING
from SCons.Script import *
if TYPE_CHECKING:
class SPPEnvironment(Environment):
def Info(self, message: str): ...
def Warn(self, message: str): ...
def Error(self, message: str): ...
else:
SPPEnvironment = Environment
@dataclass
class Module:
name: str
folder: str
description: str
cxx_namespace: str
class TargetType(enum.Enum):
PROGRAM = 0
STATIC_LIBRARY = 1
SHARED_LIBRARY = 2
MISC = 3
class Target:
name: str
target_type: TargetType
builder = None
args: list = []
kwargs: dict = {}
dependencies: list = []
target = None
module: Module = None
@dataclass(frozen=True)
class SPPInterface:
globals: dict
@property
def env(self) -> SPPEnvironment:
return self.globals['env']
@property
def targets(self) -> list[Target]:
return self.env['SPP_TARGETS']
_spp: SPPInterface
def _init_interface(**kwargs) -> None:
global _spp
_spp = SPPInterface(**kwargs)
def get_spp() -> SPPInterface:
return _spp

31
recipes/Catch2/recipe.py Normal file
View File

@ -0,0 +1,31 @@
import re
from SCons.Script import *
def _git_cook(env: Environment, repo) -> dict:
checkout_root = repo['checkout_root']
build_result = env.CMakeProject(project_root=checkout_root)
lib_name = {
'debug': 'Catch2d'
}.get(env['BUILD_TYPE'], 'Catch2')
libs = []
if not env.get('CATCH2_OWN_MAIN'):
libs.append({
'debug': 'Catch2Maind'
}.get(env['BUILD_TYPE'], 'Catch2Main'))
libs.append(lib_name)
return {
'CPPPATH': build_result['CPPPATH'],
'LIBS': [env.FindLib(lib, paths=build_result['LIBPATH']) for lib in libs]
}
env.GitRecipe(
globals = globals(),
repo_name = 'Catch2',
repo_url = 'https://github.com/catchorg/Catch2.git',
tag_pattern = re.compile(r'^v([0-9]+)\.([0-9]+)\.([0-9]+)$'),
tag_fn = lambda version: f'v{version[0]}.{version[1]}.{version[2]}',
cook_fn = _git_cook
)

View File

@ -0,0 +1,31 @@
import re
from SCons.Script import *
_REPO_NAME = 'ImageMagick'
_REPO_URL = 'https://github.com/ImageMagick/ImageMagick.git'
_TAG_PATTERN = re.compile(r'^([0-9]+)\.([0-9]+)\.([0-9]+)-([0-9]+)$')
def versions(env: Environment, update: bool = False):
tags = env.GitTags(repo_name = _REPO_NAME, remote_url = _REPO_URL, force_fetch=update)
result = []
for tag in tags:
match = _TAG_PATTERN.match(tag)
if match:
result.append((int(match.groups()[0]), int(match.groups()[1]), int(match.groups()[2]), int(match.groups()[3])))
return result
def dependencies(env: Environment, version) -> 'dict':
return {}
def cook(env: Environment, version) -> dict:
raise Exception('this still needs to be implemented property :/')
# git_ref = f'refs/tags/{version[0]}.{version[1]}.{version[2]}-{version[3]}'
# repo = env.GitBranch(repo_name = _REPO_NAME, remote_url = _REPO_URL, git_ref = git_ref)
# checkout_root = repo['checkout_root']
# build_result = env.AutotoolsProject(checkout_root)
# return {
# 'LIBPATH': build_result['LIBPATH'],
# 'CPPPATH': build_result['CPPPATH'],
# 'LIBS': ['backtrace']
# }

37
recipes/SDL/recipe.py Normal file
View File

@ -0,0 +1,37 @@
import platform
import re
from SCons.Script import *
def _git_cook(env: Environment, repo: dict) -> dict:
checkout_root = repo['checkout_root']
build_result = env.CMakeProject(project_root=checkout_root, generate_args = ['-DSDL_STATIC=ON', '-DSDL_SHARED=OFF'])
libs = []
if platform.system() == 'Windows':
if env['BUILD_TYPE'] == 'debug':
libs.append('SDL2-staticd')
else:
libs.append('SDL2-static')
libs.extend(('kernel32', 'user32', 'gdi32', 'winmm', 'imm32', 'ole32', 'oleaut32', 'version', 'uuid', 'advapi32', 'setupapi', 'shell32', 'dinput8'))
else:
if env['BUILD_TYPE'] == 'debug':
libs.append('SDL2d')
else:
libs.append('SDL2')
return {
'LIBPATH': build_result['LIBPATH'],
'CPPPATH': [os.path.join(build_result['install_dir'], 'include/SDL2')], # SDL is really weird about include paths ...
'LIBS': libs
}
env.GitRecipe(
globals = globals(),
repo_name = 'SDL',
repo_url = 'https://github.com/libsdl-org/SDL.git',
tag_pattern = re.compile(r'^release-([0-9]+)\.([0-9]+)\.([0-9]+)$'),
tag_fn = lambda version: f'release-{version[0]}.{version[1]}.{version[2]}',
cook_fn = _git_cook
)

View File

@ -0,0 +1,45 @@
import re
from SCons.Script import *
_REPO_NAMES = {
'default': 'VulkanHeaders',
'mewin': 'VulkanHeaders_mewin'
}
_REPO_URLS = {
'default': 'https://github.com/KhronosGroup/Vulkan-Headers.git',
'mewin': 'https://git.mewin.de/mewin/vulkan-headers.git'
}
_TAG_PATTERN = re.compile(r'^v([0-9]+)\.([0-9]+)\.([0-9]+)$')
def _get_repo_name(env: Environment) -> str:
return _REPO_NAMES[env.get('VULKANHEADERS_REMOTE', 'default')]
def _get_repo_url(env: Environment) -> str:
return _REPO_URLS[env.get('VULKANHEADERS_REMOTE', 'default')]
def versions(env: Environment, update: bool = False):
if env.get('VULKANHEADERS_REMOTE') == 'mewin':
return [(0, 0, 0)]
tags = env.GitTags(repo_name = _get_repo_name(env), remote_url = _get_repo_url(env), force_fetch=update)
result = []
for tag in tags:
match = _TAG_PATTERN.match(tag)
if match:
result.append((int(match.groups()[0]), int(match.groups()[1]), int(match.groups()[2])))
return result
def dependencies(env: Environment, version) -> 'dict':
return {}
def cook(env: Environment, version) -> dict:
if env.get('VULKANHEADERS_REMOTE') == 'mewin':
git_ref = 'main'
else:
git_ref = f'refs/tags/v{version[0]}.{version[1]}.{version[2]}'
repo = env.GitBranch(repo_name = _get_repo_name(env), remote_url = _get_repo_url(env), git_ref = git_ref)
checkout_root = repo['checkout_root']
return {
'CPPPATH': [os.path.join(checkout_root, 'include')]
}

View File

@ -0,0 +1,18 @@
import re
from SCons.Script import *
def _git_cook(env: Environment, repo: dict) -> dict:
checkout_root = repo['checkout_root']
return {
'CPPPATH': [os.path.join(checkout_root, 'include')]
}
env.GitRecipe(
globals = globals(),
repo_name = 'argparse',
repo_url = 'https://github.com/p-ranav/argparse.git',
tag_pattern = re.compile(r'^v([0-9]+)\.([0-9]+)$'),
tag_fn = lambda version: f'v{version[0]}.{version[1]}',
cook_fn = _git_cook
)

67
recipes/boost/recipe.py Normal file
View File

@ -0,0 +1,67 @@
import json
import os
import re
import requests
from SCons.Script import *
_VERSIONS_URL = 'https://api.github.com/repos/boostorg/boost/releases'
_VERSION_PATTERN = re.compile(r'^boost-([0-9]+)\.([0-9]+)\.([0-9]+)$')
def versions(env: Environment, update: bool = False):
versions_file = os.path.join(env['DOWNLOAD_DIR'], 'boost_versions.json')
if update or not os.path.exists(versions_file):
req = requests.get(_VERSIONS_URL)
versions_data = json.loads(req.text)
result = []
for version_data in versions_data:
match = _VERSION_PATTERN.match(version_data['name'])
if not match:
continue
result.append((int(match.groups()[0]), int(match.groups()[1]), int(match.groups()[2])))
with open(versions_file, 'w') as f:
json.dump(result, f)
return result
else:
try:
with open(versions_file, 'r') as f:
return [tuple(v) for v in json.load(f)]
except:
print('boost_versions.json is empty or broken, redownloading.')
return versions(env, update=True)
def dependencies(env: Environment, version) -> 'dict':
return {}
def cook(env: Environment, version) -> dict:
if env.get('BOOST_LIBS') is None:
raise Exception('BOOST_LIBS not set. Set to a list of boost libs to link or "*" to link everything.')
if version >= (1, 85, 0):
url = f'https://github.com/boostorg/boost/releases/download/boost-{version[0]}.{version[1]}.{version[2]}/boost-{version[0]}.{version[1]}.{version[2]}-cmake.tar.gz'
else:
url = f'https://github.com/boostorg/boost/releases/download/boost-{version[0]}.{version[1]}.{version[2]}/boost-{version[0]}.{version[1]}.{version[2]}.tar.gz'
repo = env.DownloadAndExtract(f'boost_{version[0]}.{version[1]}.{version[2]}', url = url, skip_folders = 1)
checkout_root = repo['extracted_root']
build_result = env.CMakeProject(checkout_root)
libs = []
if '*' in env['BOOST_LIBS']:
lib_dir = build_result['LIBPATH'][0]
for lib_file in os.listdir(lib_dir):
fname = os.path.join(lib_dir, lib_file)
if not os.path.isfile(fname):
continue
libs.append(fname)
else:
for lib in set(env['BOOST_LIBS']):
if os.name == 'posix':
libs.append(env.FindLib(f'boost_{lib}', paths=build_result['LIBPATH']))
elif os.name == 'nt':
libs.append(env.FindLib(f'libboost_{lib}-*', paths=build_result['LIBPATH'], use_glob=True))
else:
raise Exception('Boost not supported on this platform.')
return {
'CPPPATH': build_result['CPPPATH'],
'LIBS': libs
}

19
recipes/cgltf/recipe.py Normal file
View File

@ -0,0 +1,19 @@
import re
from SCons.Script import *
def _git_cook(env: Environment, repo) -> dict:
checkout_root = repo['checkout_root']
return {
'CPPPATH': [checkout_root]
}
env.GitRecipe(
globals = globals(),
repo_name = 'cgltf',
repo_url = 'https://github.com/jkuhlmann/cgltf.git',
tag_pattern = re.compile(r'^v([0-9]+)\.([0-9]+)$'),
tag_fn = lambda version: f'v{version[0]}.{version[1]}',
cook_fn = _git_cook
)

39
recipes/curl/recipe.py Normal file
View File

@ -0,0 +1,39 @@
import re
from SCons.Script import *
def _build_lib_name(env: Environment) -> str:
if os.name == 'posix':
return {
'debug': 'curl-d'
}.get(env['BUILD_TYPE'], 'curl')
elif os.name == 'nt':
raise Exception('TODO')
else:
raise Exception('curl is not supported yet on this OS')
def _git_cook(env: Environment, repo: dict) -> dict:
checkout_root = repo['checkout_root']
build_result = env.CMakeProject(checkout_root, generate_args=['-DBUILD_CURL_EXE=OFF','-DBUILD_SHARED_LIBS=OFF',
'-DBUILD_STATIC_LIBS=ON', '-DHTTP_ONLY=ON',
'-DCURL_USE_LIBSSH2=OFF'])
lib_name = _build_lib_name(env)
return {
'CPPPATH': build_result['CPPPATH'],
'LIBS': [env.FindLib(lib_name, paths=build_result['LIBPATH'])],
}
env.GitRecipe(
globals = globals(),
repo_name = 'curl',
repo_url = 'https://github.com/curl/curl.git',
tag_pattern = re.compile(r'^curl-([0-9]+)_([0-9]+)_([0-9]+)$'),
tag_fn = lambda version: f'curl-{version[0]}_{version[1]}_{version[2]}',
cook_fn = _git_cook,
dependencies = {
'openssl': {},
'zlib': {},
'psl': {}
}
)

27
recipes/fmt/recipe.py Normal file
View File

@ -0,0 +1,27 @@
import re
from SCons.Script import *
def _git_cook(env: Environment, repo: dict) -> dict:
checkout_root = repo['checkout_root']
build_result = env.CMakeProject(checkout_root)
lib_name = {
'debug': 'fmtd'
}.get(env['BUILD_TYPE'], 'fmt')
return {
'CPPPATH': build_result['CPPPATH'],
'LIBS': [env.FindLib(lib_name, paths=build_result['LIBPATH'])]
}
env.GitRecipe(
globals = globals(),
repo_name = 'fmt',
repo_url = 'https://github.com/fmtlib/fmt.git',
tag_pattern = re.compile(r'^([0-9]+)\.([0-9]+)\.([0-9]+)$'),
tag_fn = lambda version: f'{version[0]}.{version[1]}.{version[2]}',
cook_fn = _git_cook
)

52
recipes/glm/recipe.py Normal file
View File

@ -0,0 +1,52 @@
import re
from SCons.Script import *
_REPO_NAMES = {
'default': 'glm',
'mewin': 'glm_mewin'
}
_REPO_URLS = {
'default': 'https://github.com/g-truc/glm.git',
'mewin': 'https://git.mewin.de/mewin/glm.git'
}
_TAG_PATTERN = re.compile(r'^([0-9]+)\.([0-9]+)\.([0-9]+)$')
_TAG_PATTERN_ALT = re.compile(r'^0\.([0-9]+)\.([0-9]+)\.([0-9]+)$')
def _get_repo_name(env: Environment) -> str:
return _REPO_NAMES[env.get('GLM_REMOTE', 'default')]
def _get_repo_url(env: Environment) -> str:
return _REPO_URLS[env.get('GLM_REMOTE', 'default')]
def versions(env: Environment, update: bool = False):
if env.get('GLM_REMOTE') == 'mewin':
return [(0, 0, 0)]
tags = env.GitTags(repo_name = _get_repo_name(env), remote_url = _get_repo_url(env), force_fetch=update)
result = []
for tag in tags:
match = _TAG_PATTERN.match(tag)
if match:
result.append((int(match.groups()[0]), int(match.groups()[1]), int(match.groups()[2])))
else:
match = _TAG_PATTERN_ALT.match(tag)
if match:
result.append((0, int(match.groups()[0]), int(match.groups()[1]) * 10 + int(match.groups()[2])))
return result
def dependencies(env: Environment, version) -> 'dict':
return {}
def cook(env: Environment, version) -> dict:
if env.get('GLM_REMOTE') == 'mewin':
git_ref = 'master'
elif version[0] == 0:
git_ref = f'refs/tags/0.{version[1]}.{int(version[2]/10)}.{version[2]%10}'
else:
git_ref = f'refs/tags/{version[0]}.{version[1]}.{version[2]}'
repo = env.GitBranch(repo_name = _get_repo_name(env), remote_url = _get_repo_url(env), git_ref = git_ref)
checkout_root = repo['checkout_root']
return {
'CPPPATH': [checkout_root],
}

111
recipes/glslang/recipe.py Normal file
View File

@ -0,0 +1,111 @@
import glob
import pathlib
import platform
import re
import shutil
from SCons.Script import *
_SCRIPT_STAMPFILE = '.spp_script_run'
def _git_cook(env: Environment, repo) -> dict:
checkout_root = repo['checkout_root']
# TODO: windows?
did_run_script = os.path.exists(os.path.join(repo['checkout_root'], _SCRIPT_STAMPFILE))
if not did_run_script or env['UPDATE_REPOSITORIES']:
python_exe = os.path.realpath(sys.executable)
script_file = os.path.join(repo['checkout_root'], 'update_glslang_sources.py')
prev_cwd = os.getcwd()
os.chdir(repo['checkout_root'])
if env.Execute(f'"{python_exe}" {script_file}'):
env.Exit(1)
os.chdir(prev_cwd)
pathlib.Path(repo['checkout_root'], _SCRIPT_STAMPFILE).touch()
# generate the build_info.h
generator_script = os.path.join(repo['checkout_root'], 'build_info.py')
generator_script_input = os.path.join(repo['checkout_root'], 'build_info.h.tmpl')
generator_script_output = os.path.join(repo['checkout_root'], 'glslang/build_info.h')
env.Command(
target = generator_script_output,
source = [generator_script, generator_script_input, os.path.join(repo['checkout_root'], 'CHANGES.md')],
action = f'"$PYTHON" "{generator_script}" "{repo["checkout_root"]}" -i "{generator_script_input}" -o "$TARGET"'
)
platform_source_dir = {
'Linux': 'Unix',
'Windows': 'Windows',
'Darwin': 'Unix'
}.get(platform.system(), 'Unix')
glslang_source_files = env.RGlob(os.path.join(repo['checkout_root'], 'glslang/GenericCodeGen/'), '*.cpp') \
+ env.RGlob(os.path.join(repo['checkout_root'], 'glslang/MachineIndependent/'), '*.cpp') \
+ env.RGlob(os.path.join(repo['checkout_root'], 'glslang/OGLCompilersDLL/'), '*.cpp') \
+ env.RGlob(os.path.join(repo['checkout_root'], 'glslang/ResourceLimits/'), '*.cpp') \
+ env.RGlob(os.path.join(repo['checkout_root'], 'SPIRV/'), '*.cpp') \
+ [os.path.join(repo['checkout_root'], f'glslang/OSDependent/{platform_source_dir}/ossource.cpp')]
# disable warnings
additional_cxx_flags = {
'clang': ['-w'],
'gcc': ['-w'],
'cl': ['/w']
}.get(env['COMPILER_FAMILY'], [])
env.StaticLibrary(
CCFLAGS = env['CCFLAGS'] + additional_cxx_flags,
CPPPATH = repo['checkout_root'],
target = env['LIB_DIR'] + '/glslang_full',
source = glslang_source_files
)
# build the include folder
include_dir = os.path.join(checkout_root, 'include')
if not os.path.exists(include_dir) or env['UPDATE_REPOSITORIES']:
def copy_headers(dst, src):
os.makedirs(dst, exist_ok=True)
for file in glob.glob(os.path.join(src, '*.h')):
shutil.copy(file, dst)
copy_headers(os.path.join(include_dir, 'glslang/HLSL'), os.path.join(checkout_root, 'glslang/HLSL'))
copy_headers(os.path.join(include_dir, 'glslang/Include'), os.path.join(checkout_root, 'glslang/Include'))
copy_headers(os.path.join(include_dir, 'glslang/MachineIndependent'), os.path.join(checkout_root, 'glslang/MachineIndependent'))
copy_headers(os.path.join(include_dir, 'glslang/Public'), os.path.join(checkout_root, 'glslang/Public'))
copy_headers(os.path.join(include_dir, 'glslang/SPIRV'), os.path.join(checkout_root, 'SPIRV'))
return {
'CPPPATH': [include_dir],
'LIBS': [os.path.join(env['LIB_DIR'], env.LibFilename('glslang_full'))]
}
_REPO_NAMES = {
'default': 'glslang',
'mewin': 'glslang_mewin'
}
_REPO_URLS = {
'default': 'https://github.com/KhronosGroup/glslang.git',
'mewin': 'https://git.mewin.de/mewin/glslang.git'
}
_TAG_PATTERNS = {
'default': re.compile(r'^([0-9]+)\.([0-9]+)\.([0-9]+)$'),
'mewin': None
}
def _ref_fn(env: Environment, version) -> str:
remote = env.get('GLSLANG_REMOTE', 'default')
if remote == 'default':
return f'refs/tags/{version[0]}.{version[1]}.{version[2]}'
elif remote == 'mewin':
return 'master'
else:
raise Exception('invalid glslang remote')
env.GitRecipe(
globals = globals(),
repo_name = lambda env: _REPO_NAMES[env.get('GLSLANG_REMOTE', 'default')],
repo_url = lambda env: _REPO_URLS[env.get('GLSLANG_REMOTE', 'default')],
tag_pattern = lambda env: _TAG_PATTERNS[env.get('GLSLANG_REMOTE', 'default')],
cook_fn = _git_cook,
ref_fn = _ref_fn
)

48
recipes/idn2/recipe.py Normal file
View File

@ -0,0 +1,48 @@
import json
import os
import re
import requests
from SCons.Script import *
_VERSIONS_URL = 'https://gitlab.com/api/v4/projects/2882658/releases'
_VERSION_PATTERN = re.compile(r'^([0-9]+)\.([0-9]+)\.([0-9]+)$')
def versions(env: Environment, update: bool = False):
versions_file = os.path.join(env['DOWNLOAD_DIR'], 'libidn2_versions.json')
if update or not os.path.exists(versions_file):
req = requests.get(_VERSIONS_URL)
versions_data = json.loads(req.text)
result = []
for version_data in versions_data:
match = _VERSION_PATTERN.match(version_data['name'])
if not match:
continue
result.append((int(match.groups()[0]), int(match.groups()[1]), int(match.groups()[2])))
with open(versions_file, 'w') as f:
json.dump(result, f)
return result
else:
try:
with open(versions_file, 'r') as f:
return [tuple(v) for v in json.load(f)]
except:
print('libidn2_versions.json is empty or broken, redownloading.')
return versions(env, update=True)
def dependencies(env: Environment, version) -> 'dict':
return {
'unistring': {}
}
def cook(env: Environment, version) -> dict:
url = f'https://ftp.gnu.org/gnu/libidn/libidn2-{version[0]}.{version[1]}.{version[2]}.tar.gz'
repo = env.DownloadAndExtract(f'libidn2_{version[0]}.{version[1]}.{version[2]}', url = url, skip_folders = 1)
checkout_root = repo['extracted_root']
build_result = env.AutotoolsProject(checkout_root)
return {
'CPPPATH': build_result['CPPPATH'],
'LIBS': [env.FindLib('idn2', paths=build_result['LIBPATH'])]
}

38
recipes/imgui/recipe.py Normal file
View File

@ -0,0 +1,38 @@
import re
from SCons.Script import *
def _git_cook(env: Environment, repo: dict) -> dict:
imgui_source_files = [
os.path.join(repo['checkout_root'], 'imgui.cpp'),
os.path.join(repo['checkout_root'], 'imgui_draw.cpp'),
os.path.join(repo['checkout_root'], 'imgui_tables.cpp'),
os.path.join(repo['checkout_root'], 'imgui_widgets.cpp')
]
imgui_add_sources = []
for backend in env.get('IMGUI_BACKENDS', []):
imgui_add_sources.append(f'backends/imgui_impl_{backend}.cpp')
env.StaticLibrary(
CPPPATH = [repo['checkout_root']],
CPPDEFINES = ['IMGUI_IMPL_VULKAN_NO_PROTOTYPES=1'],
target = env['LIB_DIR'] + '/imgui',
source = imgui_source_files,
add_source = imgui_add_sources
)
return {
'CPPPATH': [repo['checkout_root']],
'LIBS': [os.path.join(env['LIB_DIR'], env.LibFilename('imgui'))]
}
env.GitRecipe(
globals = globals(),
repo_name = 'imgui',
repo_url = 'https://github.com/ocornut/imgui.git',
tag_pattern = re.compile(r'^v([0-9]+)\.([0-9]+)\.([0-9]+)$'),
tag_fn = lambda version: f'v{version[0]}.{version[1]}.{version[2]}',
cook_fn = _git_cook
)

20
recipes/iwa/recipe.py Normal file
View File

@ -0,0 +1,20 @@
import json
from SCons.Script import *
_REPO_NAME = 'iwa'
_REPO_URL = 'https://git.mewin.de/mewin/iwa.git'
def versions(env: Environment, update: bool = False):
return [(0, 0, 0)]
def dependencies(env: Environment, version) -> 'dict':
repo = env.GitBranch(repo_name = _REPO_NAME, remote_url = _REPO_URL, git_ref = 'master')
checkout_root = repo['checkout_root']
with open(os.path.join(checkout_root, 'dependencies.json'), 'r') as f:
return env.DepsFromJson(json.load(f))
def cook(env: Environment, version) -> dict:
repo = env.GitBranch(repo_name = _REPO_NAME, remote_url = _REPO_URL, git_ref = 'master')
checkout_root = repo['checkout_root']
return env.Module(os.path.join(checkout_root, 'SModule'))

22
recipes/json/recipe.py Normal file
View File

@ -0,0 +1,22 @@
import re
from SCons.Script import *
def _git_cook(env: Environment, repo: dict) -> dict:
checkout_root = repo['checkout_root']
build_result = env.CMakeProject(project_root=checkout_root)
return {
'CPPPATH': build_result['CPPPATH']
}
env.GitRecipe(
globals = globals(),
repo_name = 'json',
repo_url = 'https://github.com/nlohmann/json.git',
tag_pattern = re.compile(r'^v([0-9]+)\.([0-9]+)\.([0-9]+)$'),
tag_fn = lambda version: f'v{version[0]}.{version[1]}.{version[2]}',
cook_fn = _git_cook
)

View File

@ -0,0 +1,21 @@
from SCons.Script import *
def versions(env: Environment, update: bool = False):
return [(1, 0)]
def dependencies(env: Environment, version) -> 'dict':
return {}
def cook(env: Environment, version) -> dict:
if env['COMPILER_FAMILY'] not in ('gcc', 'clang'):
env.Error('libbacktrace requires gcc or clang.')
repo = env.GitBranch(repo_name = 'libbacktrace', remote_url = 'https://github.com/ianlancetaylor/libbacktrace.git', git_ref = 'master')
checkout_root = repo['checkout_root']
build_result = env.AutotoolsProject(checkout_root)
return {
'LIBPATH': build_result['LIBPATH'],
'CPPPATH': build_result['CPPPATH'],
'LIBS': ['backtrace']
}

View File

@ -0,0 +1,21 @@
import re
from SCons.Script import *
def _git_cook(env: Environment, repo: dict) -> dict:
checkout_root = repo['checkout_root']
build_result = env.CMakeProject(checkout_root)
return {
'CPPPATH': build_result['CPPPATH'],
'LIBS': [env.FindLib('jpeg', paths=build_result['LIBPATH'])],
}
env.GitRecipe(
globals = globals(),
repo_name = 'libjpeg-turbo',
repo_url = 'https://github.com/libjpeg-turbo/libjpeg-turbo.git',
tag_pattern = re.compile(r'^([0-9]+)\.([0-9]+)\.([0-9]+)$'),
tag_fn = lambda version: f'{version[0]}.{version[1]}.{version[2]}',
cook_fn = _git_cook
)

39
recipes/libpng/recipe.py Normal file
View File

@ -0,0 +1,39 @@
import os
import re
from SCons.Script import *
def _build_lib_name(env: Environment) -> str:
if os.name == 'posix':
return {
'debug': 'png16d'
}.get(env['BUILD_TYPE'], 'png16')
elif os.name == 'nt':
return {
'debug': 'libpng16_staticd'
}.get(env['BUILD_TYPE'], 'libpng16_static')
else:
raise Exception('libpng is not supported yet on this OS')
def _git_cook(env: Environment, repo: dict) -> dict:
lib_zlib = env.Cook('zlib')
checkout_root = repo['checkout_root']
build_result = env.CMakeProject(checkout_root, dependencies = [lib_zlib])
lib_name = _build_lib_name(env)
return {
'CPPPATH': build_result['CPPPATH'],
'LIBS': [env.FindLib(lib_name, paths=build_result['LIBPATH'])]
}
env.GitRecipe(
globals = globals(),
repo_name = 'libpng',
repo_url = 'https://git.code.sf.net/p/libpng/code.git',
tag_pattern = re.compile(r'^v([0-9]+)\.([0-9]+)\.([0-9]+)$'),
tag_fn = lambda version: f'v{version[0]}.{version[1]}.{version[2]}',
cook_fn = _git_cook,
dependencies = {
'zlib': {}
}
)

View File

@ -0,0 +1,19 @@
import re
from SCons.Script import *
def _git_cook(env: Environment, repo: dict) -> dict:
checkout_root = repo['checkout_root']
return {
'CPPPATH': [os.path.join(checkout_root, 'include')]
}
env.GitRecipe(
globals = globals(),
repo_name = 'magic_enum',
repo_url = 'https://github.com/Neargye/magic_enum.git',
tag_pattern = re.compile(r'^v([0-9]+)\.([0-9]+)\.([0-9]+)$'),
tag_fn = lambda version: f'v{version[0]}.{version[1]}.{version[2]}',
cook_fn = _git_cook
)

21
recipes/mecab/recipe.py Normal file
View File

@ -0,0 +1,21 @@
from SCons.Script import *
import os
def versions(env: Environment, update: bool = False):
return [(1, 0)]
def dependencies(env: Environment, version) -> 'dict':
return {}
def cook(env: Environment, version) -> dict:
repo = env.GitBranch(repo_name = 'mecab', remote_url = 'https://github.com/taku910/mecab.git', git_ref = 'master')
checkout_root = repo['checkout_root']
build_result = env.AutotoolsProject(os.path.join(checkout_root, 'mecab'))
return {
'LIBPATH': build_result['LIBPATH'],
'CPPPATH': build_result['CPPPATH'],
'LIBS': ['mecab']
}

20
recipes/mijin/recipe.py Normal file
View File

@ -0,0 +1,20 @@
import json
from SCons.Script import *
_REPO_NAME = 'mijin'
_REPO_URL = 'https://git.mewin.de/mewin/mijin2.git'
def versions(env: Environment, update: bool = False):
return [(0, 0, 0)]
def dependencies(env: Environment, version) -> 'dict':
repo = env.GitBranch(repo_name = _REPO_NAME, remote_url = _REPO_URL, git_ref = 'master')
checkout_root = repo['checkout_root']
with open(os.path.join(checkout_root, 'dependencies.json'), 'r') as f:
return env.DepsFromJson(json.load(f))
def cook(env: Environment, version) -> dict:
repo = env.GitBranch(repo_name = _REPO_NAME, remote_url = _REPO_URL, git_ref = 'master')
checkout_root = repo['checkout_root']
return env.Module(os.path.join(checkout_root, 'SModule'))

View File

@ -0,0 +1,27 @@
from SCons.Script import *
def versions(env: Environment, update: bool = False):
return [(1, 0)]
def dependencies(env: Environment, version) -> 'dict':
return {}
def cook(env: Environment, version) -> dict:
repo = env.GitBranch(repo_name = 'mikktspace', remote_url = 'https://github.com/mmikk/MikkTSpace.git', git_ref = 'master')
checkout_root = repo['checkout_root']
ccflags = env['CCFLAGS'].copy()
if env['COMPILER_FAMILY'] == 'cl':
ccflags.append('/wd4456')
lib_mikktspace = env.StaticLibrary(
CCFLAGS = ccflags,
CPPPATH = [checkout_root],
target = env['LIB_DIR'] + '/mikktspace',
source = [os.path.join(repo['checkout_root'], 'mikktspace.c')]
)
return {
'CPPPATH': [checkout_root],
'LIBS': [lib_mikktspace]
}

21
recipes/openssl/recipe.py Normal file
View File

@ -0,0 +1,21 @@
import re
from SCons.Script import *
def _git_cook(env: Environment, repo: dict) -> dict:
checkout_root = repo['checkout_root']
build_result = env.AutotoolsProject(checkout_root, config_args = ['no-shared', 'no-tests', 'no-docs'], configure_script_path='Configure')
return {
'CPPPATH': build_result['CPPPATH'],
'LIBS': [env.FindLib(libname, paths=build_result['LIBPATH']) for libname in ('ssl', 'crypto')]
}
env.GitRecipe(
globals = globals(),
repo_name = 'openssl',
repo_url = 'https://github.com/openssl/openssl.git',
tag_pattern = re.compile(r'^openssl-([0-9]+)\.([0-9]+)\.([0-9]+)$'),
tag_fn = lambda version: f'openssl-{version[0]}.{version[1]}.{version[2]}',
cook_fn = _git_cook
)

70
recipes/psl/recipe.py Normal file
View File

@ -0,0 +1,70 @@
import json
import os
import re
import requests
from SCons.Script import *
_VERSIONS_URL = 'https://api.github.com/repos/rockdaboot/libpsl/releases'
_VERSION_PATTERN = re.compile(r'^Release v([0-9]+)\.([0-9]+)\.([0-9]+)$')
def versions(env: Environment, update: bool = False):
versions_file = os.path.join(env['DOWNLOAD_DIR'], 'libpsl_versions.json')
if update or not os.path.exists(versions_file):
req = requests.get(_VERSIONS_URL)
versions_data = json.loads(req.text)
result = []
for version_data in versions_data:
match = _VERSION_PATTERN.match(version_data['name'])
if not match:
continue
result.append((int(match.groups()[0]), int(match.groups()[1]), int(match.groups()[2])))
with open(versions_file, 'w') as f:
json.dump(result, f)
return result
else:
try:
with open(versions_file, 'r') as f:
return [tuple(v) for v in json.load(f)]
except:
print('libpsl_versions.json is empty or broken, redownloading.')
return versions(env, update=True)
def dependencies(env: Environment, version) -> 'dict':
return {
'idn2': {},
'unistring': {}
}
def cook(env: Environment, version) -> dict:
url = f'https://github.com/rockdaboot/libpsl/releases/download/{version[0]}.{version[1]}.{version[2]}/libpsl-{version[0]}.{version[1]}.{version[2]}.tar.gz'
repo = env.DownloadAndExtract(f'libpsl_{version[0]}.{version[1]}.{version[2]}', url = url, skip_folders = 1)
checkout_root = repo['extracted_root']
build_result = env.AutotoolsProject(checkout_root)
return {
'CPPPATH': build_result['CPPPATH'],
'LIBS': [env.FindLib('psl', paths=build_result['LIBPATH'])]
}
#def _git_cook(env: Environment, repo: dict) -> dict:
# checkout_root = repo['checkout_root']
# subprocess.run((os.path.join(checkout_root, 'autogen.sh'),), cwd=checkout_root)
# build_result = env.AutotoolsProject(checkout_root)
# return {
# 'CPPPATH': build_result['CPPPATH'],
# 'LIBS': [env.FindLib('psl', paths=build_result['LIBPATH'])]
# }
#
#env.GitRecipe(
# globals = globals(),
# repo_name = 'psl',
# repo_url = 'https://github.com/rockdaboot/libpsl.git',
# tag_pattern = re.compile(r'^libpsl-([0-9]+)\.([0-9]+)\.([0-9]+)$'),
# tag_fn = lambda version: f'libpsl-{version[0]}.{version[1]}.{version[2]}',
# cook_fn = _git_cook,
# dependencies = {
# 'idn2': {},
# 'unistring': {}
# }
#)

34
recipes/spdlog/recipe.py Normal file
View File

@ -0,0 +1,34 @@
import re
from SCons.Script import *
def _git_cook(env: Environment, repo: dict) -> dict:
lib_fmt = env.Cook('fmt')
checkout_root = repo['checkout_root']
build_result = env.CMakeProject(project_root=checkout_root, dependencies=[lib_fmt])
lib_name = {
'debug': 'spdlogd'
}.get(env['BUILD_TYPE'], 'spdlog')
cppdefines = ['SPDLOG_COMPILE_LIB=1', 'SPDLOG_FMT_EXTERNAL=1']
return {
'CPPPATH': build_result['CPPPATH'],
'CPPDEFINES': cppdefines,
'LIBS': [env.FindLib(lib_name, paths=build_result['LIBPATH'])]
}
env.GitRecipe(
globals = globals(),
repo_name = 'spdlog',
repo_url = 'https://github.com/gabime/spdlog.git',
tag_pattern = re.compile(r'^v([0-9]+)\.([0-9]+)\.([0-9]+)$'),
tag_fn = lambda version: f'v{version[0]}.{version[1]}.{version[2]}',
cook_fn = _git_cook,
dependencies = {
'fmt': {}
}
)

19
recipes/stb/recipe.py Normal file
View File

@ -0,0 +1,19 @@
from SCons.Script import *
_REPO_NAME = 'stb'
_REPO_URL = 'https://github.com/nothings/stb.git'
def versions(env: Environment, update: bool = False):
return [(0, 0, 0)]
def dependencies(env: Environment, version) -> 'dict':
return {}
def cook(env: Environment, version) -> dict:
repo = env.GitBranch(repo_name = _REPO_NAME, remote_url = _REPO_URL, git_ref = 'master')
checkout_root = repo['checkout_root']
return {
'CPPPATH': [checkout_root]
}

View File

@ -0,0 +1,42 @@
import json
import os
import re
import requests
from SCons.Script import *
_VERSIONS_URL = 'https://ftp.gnu.org/gnu/libunistring/?F=0'
_VERSION_PATTERN = re.compile(r'href="libunistring-([0-9]+)\.([0-9]+)\.([0-9]+)\.tar\.gz"')
def versions(env: Environment, update: bool = False):
versions_file = os.path.join(env['DOWNLOAD_DIR'], 'libunistring_versions.json')
if update or not os.path.exists(versions_file):
req = requests.get(_VERSIONS_URL)
result = []
for match in _VERSION_PATTERN.finditer(req.text):
result.append((int(match.groups()[0]), int(match.groups()[1]), int(match.groups()[2])))
with open(versions_file, 'w') as f:
json.dump(result, f)
return result
else:
try:
with open(versions_file, 'r') as f:
return [tuple(v) for v in json.load(f)]
except:
print('libunistring_versions.json is empty or broken, redownloading.')
return versions(env, update=True)
def dependencies(env: Environment, version) -> 'dict':
return {}
def cook(env: Environment, version) -> dict:
url = f'https://ftp.gnu.org/gnu/libunistring/libunistring-{version[0]}.{version[1]}.{version[2]}.tar.gz'
repo = env.DownloadAndExtract(f'libunistring_{version[0]}.{version[1]}.{version[2]}', url = url, skip_folders = 1)
checkout_root = repo['extracted_root']
build_result = env.AutotoolsProject(checkout_root)
return {
'CPPPATH': build_result['CPPPATH'],
'LIBS': [env.FindLib('unistring', paths=build_result['LIBPATH'])]
}

View File

@ -0,0 +1,23 @@
import os
from SCons.Script import *
def available(env: Environment):
if os.name != 'nt':
return 'Winsock2 is only available on Windows.'
def versions(env: Environment, update: bool = False):
if os.name == 'nt':
return [(0, 0, 0)]
else:
return []
def dependencies(env: Environment, version) -> 'dict':
return {}
def cook(env: Environment, version) -> dict:
return {
'LIBS': ['Ws2_32']
}

View File

@ -0,0 +1,26 @@
import re
from SCons.Script import *
def _git_cook(env: Environment, repo: dict) -> dict:
checkout_root = repo['checkout_root']
build_result = env.CMakeProject(project_root=checkout_root)
lib_name = {
'debug': 'yaml-cppd'
}.get(env['BUILD_TYPE'], 'yaml-cpp')
return {
'CPPPATH': build_result['CPPPATH'],
'LIBS': [env.FindLib(lib_name, paths=build_result['LIBPATH'])]
}
env.GitRecipe(
globals = globals(),
repo_name = 'yaml-cpp',
repo_url = 'https://github.com/jbeder/yaml-cpp.git',
tag_pattern = re.compile(r'^yaml-cpp-([0-9]+)\.([0-9]+)\.([0-9]+)$'),
tag_fn = lambda version: f'yaml-cpp-{version[0]}.{version[1]}.{version[2]}',
cook_fn = _git_cook
)

49
recipes/zlib/recipe.py Normal file
View File

@ -0,0 +1,49 @@
import os
import re
from SCons.Script import *
_REPO_NAME = 'zlib'
_REPO_URL = 'https://github.com/madler/zlib.git'
_TAG_PATTERN = re.compile(r'^v([0-9]+)\.([0-9]+)(?:\.([0-9]+))?$')
def _build_lib_name(env: Environment) -> str:
if os.name == 'posix':
return 'z'
elif os.name == 'nt':
return {
'debug': 'zlibstaticd'
}.get(env['BUILD_TYPE'], 'zlibstatic')
else:
raise Exception('libpng is not supported yet on this OS')
def versions(env: Environment, update: bool = False):
tags = env.GitTags(repo_name = _REPO_NAME, remote_url = _REPO_URL, force_fetch=update)
result = []
for tag in tags:
match = _TAG_PATTERN.match(tag)
if match:
result.append((int(match.groups()[0]), int(match.groups()[1]), int(match.groups()[2] or 0)))
return result
def dependencies(env: Environment, version) -> 'dict':
return {}
def cook(env: Environment, version) -> dict:
git_ref = f'refs/tags/v{version[0]}.{version[1]}'
if version[2] != 0:
git_ref = git_ref + f'.{version[2]}'
repo = env.GitBranch(repo_name = _REPO_NAME, remote_url = _REPO_URL, git_ref = git_ref)
checkout_root = repo['checkout_root']
build_result = env.CMakeProject(project_root=checkout_root)
include_dir = os.path.join(build_result['install_dir'], 'include')
lib_name = _build_lib_name(env)
lib_file = env.FindLib(lib_name, paths=build_result['LIBPATH'])
return {
'CPPPATH': [include_dir],
'LIBS': [lib_file],
'CMAKE_VARS': {
'ZLIB_LIBRARY': lib_file,
'ZLIB_INCLUDE_DIR': include_dir
}
}

View File

@ -1,6 +1,2 @@
GitPython~=3.1.45 GitPython
psutil~=7.0.0 psutil
Jinja2
requests
SCons~=4.9.1
cxxheaderparser~=1.5.4

BIN
test/.sconsign.dblite Normal file

Binary file not shown.

6
test/SConstruct Normal file
View File

@ -0,0 +1,6 @@
config = {
'PROJECT_NAME': 'DUMMY'
}
env = SConscript('../SConscript', exports = ['config'])

View File

@ -1,86 +0,0 @@
# Generated Files
*.refl.hpp
*.generated.*
private/**/*.json
# Project
/.idea/
/.vs/
/.vscode/
/vs_project_files/
*.sln
# Executables
/bin
/bin_*
# Libraries
/lib
/lib_*
# Vulkan API dumps
/api_dump*
# Compile commands
compile_commands.json
# whatever this is
.cache
# ImGui config
/imgui.ini
# Environment setup
/.env
# Build Configuration
/config.py
/config_*.py
# Prerequisites
*.d
# Compiled Object files
*.slo
*.lo
*.o
*.obj
# Precompiled Headers
*.gch
*.pch
# Compiled Dynamic libraries
*.so
*.dylib
*.dll
# Fortran module files
*.mod
*.smod
# Compiled Static libraries
*.lai
*.la
*.a
*.lib
# Executables
*.exe
*.out
*.app
# Debug Info
*.pdb
# for projects that use SCons for building: http://http://www.scons.org/
.sconsign.dblite
/.sconf_temp
/config.log
# Byte-compiled / optimized python files
__pycache__/
*.py[cod]
# Backup files
*.bak

View File

@ -1,15 +0,0 @@
config = {
'PROJECT_NAME': 'S++ Codegen Test',
'SPP_TARGET_VERSION': (1, 1, 0)
}
env = SConscript('../../SConscript', exports = ['config'])
# recipe repo
env.RecipeRepo('mewin', 'https://git.mewin.de/mewin/spp_recipes.git', 'stable')
# app
env = env.Module('private/test/SModule')
env.Finalize()

View File

@ -1,50 +0,0 @@
Import('env')
env.ModuleConfig(
name = 'Test',
description = 'Test Module',
cxx_namespace = 'tst'
)
src_files = Split("""
main.cpp
test.cpp
test.generated.cpp
""")
# env.IncludeGen(src_files,
# template=env.File('#templates/header.jinja'),
# include_filter=r'.*\.refl.hpp'
# )
# env.CodeGen('GenSource', inputs = [], template=env.File('#templates/source.jinja'), )
# env.CodeGen(
# target = 'test.generated.cpp',
# template = env.File('#templates/source.jinja'),
# inputs = {'source': 'test.cpp'}
# )
ast_json = env.AstJson(
target = env.File('test.json'),
source = 'test.hpp'
)
env.Default(ast_json)
ast_hpp = env.AstJinja(
target = env.File('test.refl.hpp'),
source = env.File('test.hpp'),
template = env.File('#templates/header.jinja')
)
prog_app = env.Program(
name = 'Test',
target = env['BIN_DIR'] + '/test',
source = src_files,
dependencies = {
}
)
env.Requires(prog_app.target, ast_hpp)
env.Default(prog_app)
Return('env')

View File

@ -1,8 +0,0 @@
#include "./test.hpp"
int main(int, char**)
{
tst::printHelloWorld(100);
return 0;
}

View File

@ -1,12 +0,0 @@
#include "./test.hpp"
#include <print>
namespace tst
{
void printHelloWorld(int param) noexcept
{
std::println("Hello World! Param is {}.", param);
}
}

View File

@ -1,33 +0,0 @@
#pragma once
#include <vector>
#if __has_include("test.refl.hpp")
#include "test.refl.hpp"
#endif
namespace tst
{
static constexpr int kAnnotVal = 17;
class MyClass
{
private:
std::vector<int> mInts;
public:
MyClass();
#if defined(__clang__)
[[clang::annotate("reflect", "yes, please", kAnnotVal)]]
#endif
int getVal();
void setVal(int val);
static constexpr int kVal = 1;
};
}
namespace tst
{
void printHelloWorld(int param) noexcept;
}

View File

@ -1,11 +0,0 @@
#if !defined(SPP_AST_GEN)
{% for class in ast.get_namespace('tst').classes %}
/*
{{ class.name }}
{% for method in class.methods %}
{{ method.return_type }} {{ method.name }} ({% for param in method.params %} {{ param.type }} {{ param.name }} {% endfor %})
{% endfor %}
{% endfor %}
*/
#endif

View File

@ -1,3 +0,0 @@
{% for cls in source.namespace.classes %}
// {{ cls.class_decl.typename.format() }}
{% endfor %}

View File

@ -1,81 +0,0 @@
# Project
/.idea/
/.vs/
/.vscode/
/vs_project_files/
*.sln
# Executables
/bin
/bin_*
# Libraries
/lib
/lib_*
# Vulkan API dumps
/api_dump*
# Compile commands
compile_commands.json
# whatever this is
.cache
# ImGui config
/imgui.ini
# Environment setup
/.env
# Build Configuration
/config.py
/config_*.py
# Prerequisites
*.d
# Compiled Object files
*.slo
*.lo
*.o
*.obj
# Precompiled Headers
*.gch
*.pch
# Compiled Dynamic libraries
*.so
*.dylib
*.dll
# Fortran module files
*.mod
*.smod
# Compiled Static libraries
*.lai
*.la
*.a
*.lib
# Executables
*.exe
*.out
*.app
# Debug Info
*.pdb
# for projects that use SCons for building: http://http://www.scons.org/
.sconsign.dblite
/.sconf_temp
/config.log
# Byte-compiled / optimized python files
__pycache__/
*.py[cod]
# Backup files
*.bak

View File

@ -1,10 +0,0 @@
config = {
'PROJECT_NAME': 'S++ 1.0.0 Test'
}
env = SConscript('../../SConscript', exports = ['config'])
env = env.Module('private/test/SModule')
env.Finalize()

View File

@ -1,25 +0,0 @@
Import('env')
env.ModuleConfig(
name = 'Test',
description = 'Test Module',
cxx_namespace = 'tst'
)
src_files = Split("""
main.cpp
test.cpp
""")
prog_app = env.UnityProgram(
name = 'Test',
target = env['BIN_DIR'] + '/test',
source = src_files,
dependencies = {
'mijin': {}
}
)
env.Default(prog_app)
Return('env')

View File

@ -1,8 +0,0 @@
#include "./test.hpp"
int main(int, char**)
{
tst::printHelloWorld(100);
return 0;
}

View File

@ -1,15 +0,0 @@
#include "./test.hpp"
#include <mijin/debug/assert.hpp>
#include <print>
namespace tst
{
void printHelloWorld(int param) noexcept
{
MIJIN_ASSERT(param > 0, "param is not >0 :(");
std::println("Hello World! Param is {}.", param);
}
}

View File

@ -1,7 +0,0 @@
#pragma once
namespace tst
{
void printHelloWorld(int param) noexcept;
}

View File

@ -1,133 +0,0 @@
import os
import math
from SCons.Script import *
from SCons.Node.FS import File
from SCons import Action
"""
Scons Unity Build Generator
Provides several generators for SCons to combine multiple source files into a bigger
one to reduce compilation time, so called "unity builds". This is achieved by generating
unity source files which in term include the actual source files and compile them using
one of the existing SCons builders.
Usage
-----
In order to use this, just place it inside your `site_scons/site_tools` folder, enable it by
adding "unity_build" to the tools when constructing your Environment and replace invocations
of the Program/Library/SharedLibrary/StaticLibrary builders with their Unity... counterpart:
env = Environment(tools = ['default', 'unity_build'])
source_files = ...
env.UnityProgram(
target = 'my_program',
source = source_files,
...
)
The tool will generate an amount of unity source files and invoke the Program builder on these,
forwarding any other arguments you passed.
Other Options
------------
You can control the behaviour of the builder using several Environment options:
env['UNITY_CACHE_DIR'] = '.unity' # Directory where the unity sources are stored.
# can be either a string or a Dir() node.
env['UNITY_MAX_SOURCES'] = 15 # Maximum number of source files per unity file.
env['UNITY_MIN_FILES'] = env.GetOption('num_jobs')
# Minimum number of unity files to generate (if possible).
# Defaults to the number of jobs passed to SCons.
env['UNITY_DISABLE'] = False # Set to True to completely disable unity builds. The commands
# will simply pass through their options to the regular builders.
Additionally any generator can be passed a `cache_dir` to overwrite the value from the Environment.
"""
def exists(env : Environment):
return True
def generate(env : Environment):
env.AddMethod(_make_generator(env.Program), 'UnityProgram')
env.AddMethod(_make_generator(env.Library), 'UnityLibrary')
env.AddMethod(_make_generator(env.StaticLibrary), 'UnityStaticLibrary')
env.AddMethod(_make_generator(env.SharedLibrary), 'UnitySharedLibrary')
# build for generating the unity source files
unity_source_builder = env.Builder(
action = Action.Action(_generate_unity_file, _generate_unity_file_msg)
)
env.Append(BUILDERS = {'UnitySource': unity_source_builder})
env.SetDefault(UNITY_CACHE_DIR = '.unity')
env.SetDefault(UNITY_MAX_SOURCES = 15)
env.SetDefault(UNITY_MIN_FILES = env.GetOption('num_jobs'))
env.SetDefault(UNITY_DISABLE = False)
def _make_generator(base_generator):
def generator(env, source, target, cache_dir = None, *args, **kwargs):
if env['UNITY_DISABLE']:
return base_generator(target = target, source = source, *args, **kwargs)
unity_source_files = []
source_files, other_nodes = _flatten_source(source)
max_sources_per_file = max(1, math.ceil(len(source_files) / env['UNITY_MIN_FILES']))
sources_per_file = min(max_sources_per_file, env['UNITY_MAX_SOURCES'])
num_unity_files = math.ceil(len(source_files) / sources_per_file)
if not cache_dir:
cache_dir = env['UNITY_CACHE_DIR']
if not isinstance(cache_dir, str):
cache_dir = cache_dir.abspath
os.makedirs(cache_dir, exist_ok=True)
target_base_name = os.path.basename(target)
for idx in range(num_unity_files):
unity_filename = f'{cache_dir}/{target_base_name}_{idx}.cpp'
unity_source_files.append(unity_filename)
begin = sources_per_file*idx
end = sources_per_file*(idx+1)
env.UnitySource(
target = unity_filename,
source = source_files[begin:end]
)
if len(other_nodes) > 0:
print(f'Exluded {len(other_nodes)} node(s) from Unity build.')
return [base_generator(target = target, source = unity_source_files + other_nodes, *args, **kwargs)]
return generator
def _flatten_source(source : list):
source_files = []
other_nodes = []
for ele in source:
if isinstance(ele, list):
more_sources, more_other = _flatten_source(ele)
source_files.extend(more_sources)
other_nodes.extend(more_other)
elif isinstance(ele, File):
source_files.append(ele.abspath)
elif isinstance(ele, str):
source_files.append(ele)
else:
other_nodes.append(ele)
return source_files, other_nodes
def _generate_unity_file_msg(target, source, env : Environment):
assert(len(target) == 1)
return f'Generating {str(target[0])} from {len(source)} source files.'
def _generate_unity_file(target, source, env : Environment):
assert(len(target) == 1)
unity_filename = target[0].abspath
with open(unity_filename, 'w') as f:
for source_file in source:
fpath = source_file.abspath.replace("\\", "\\\\")
f.write(f'#include "{fpath}"\n')

View File

View File

@ -1,8 +0,0 @@
# Default ignored files
/shelf/
/workspace.xml
# Editor-based HTTP Client requests
/httpRequests/
# Datasource local storage ignored files
/dataSources/
/dataSources.local.xml

View File

@ -1,35 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="CLionExternalBuildManager">
{% for executable in project.executables %}
{% for build_type in project.build_types %}
{% set build_type_name = build_type | capitalize -%}
<target id="{{ generate_uuid('target_' + executable.name + '_' + build_type) }}" name="{{ executable.name }} {{ build_type_name }}" defaultType="TOOL">
<configuration id="{{ generate_uuid('configuration_' + executable.name + '_' + build_type) }}" name="{{ executable.name }} {{ build_type_name }}">
<build type="TOOL">
<tool actionId="Tool_External Tools_{{ executable.name }} {{ build_type_name }}" />
</build>
<clean type="TOOL">
<tool actionId="Tool_External Tools_{{ executable.name }} {{ build_type_name }} Clean" />
</clean>
</configuration>
</target>
{% endfor %}
{% endfor %}
{% for library in project.libraries %}
{% for build_type in project.build_types %}
{% set build_type_name = build_type | capitalize -%}
<target id="{{ generate_uuid('target_' + library.name + '_' + build_type) }}" name="{{ library.name }} {{ build_type_name }}" defaultType="TOOL">
<configuration id="{{ generate_uuid('configuration_' + library.name + '_' + build_type) }}" name="{{ library.name }} {{ build_type_name }}">
<build type="TOOL">
<tool actionId="Tool_External Tools_{{ library.name }} {{ build_type_name }}" />
</build>
<clean type="TOOL">
<tool actionId="Tool_External Tools_{{ library.name }} {{ build_type_name }} Clean" />
</clean>
</configuration>
</target>
{% endfor %}
{% endfor %}
</component>
</project>

View File

@ -1,17 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="CompDBSettings">
<option name="linkedExternalProjectsSettings">
<CompDBProjectSettings>
<option name="externalProjectPath" value="$PROJECT_DIR$" />
<option name="modules">
<set>
<option value="$PROJECT_DIR$" />
</set>
</option>
</CompDBProjectSettings>
</option>
</component>
<component name="CompDBWorkspace" PROJECT_DIR="$PROJECT_DIR$" />
<component name="ExternalStorageConfigurationManager" enabled="true" />
</project>

View File

@ -1,40 +0,0 @@
<toolSet name="External Tools">
{% for executable in project.executables %}
{% for build_type in project.build_types %}
{% set build_type_name = build_type | capitalize -%}
<tool name="{{ executable.name }} {{ build_type_name }}" showInMainMenu="false" showInEditor="false" showInProject="false" showInSearchPopup="false" disabled="false" useConsole="true" showConsoleOnStdOut="false" showConsoleOnStdErr="false" synchronizeAfterRun="true">
<exec>
<option name="COMMAND" value="{{ scons_exe }}" />
<option name="PARAMETERS" value="-j{{ nproc }} --build_type={{ build_type }} --unity=disable {{ executable.filename(build_type) }} compile_commands.json" />
<option name="WORKING_DIRECTORY" value="$ProjectFileDir$" />
</exec>
</tool>
<tool name="{{ executable.name }} {{ build_type_name }} Clean" showInMainMenu="false" showInEditor="false" showInProject="false" showInSearchPopup="false" disabled="false" useConsole="true" showConsoleOnStdOut="false" showConsoleOnStdErr="false" synchronizeAfterRun="true">
<exec>
<option name="COMMAND" value="{{ scons_exe }}" />
<option name="PARAMETERS" value="--build_type={{ build_type }} --unity=disable {{ executable.filename(build_type) }} -c" />
<option name="WORKING_DIRECTORY" value="$ProjectFileDir$" />
</exec>
</tool>
{% endfor %}
{% endfor %}
{% for library in project.libraries %}
{% for build_type in project.build_types %}
{% set build_type_name = build_type | capitalize -%}
<tool name="{{ library.name }} {{ build_type_name }}" showInMainMenu="false" showInEditor="false" showInProject="false" showInSearchPopup="false" disabled="false" useConsole="true" showConsoleOnStdOut="false" showConsoleOnStdErr="false" synchronizeAfterRun="true">
<exec>
<option name="COMMAND" value="{{ scons_exe }}" />
<option name="PARAMETERS" value="-j{{ nproc }} --build_type={{ build_type }} --unity=disable {{ library.filename(build_type) }} compile_commands.json" />
<option name="WORKING_DIRECTORY" value="$ProjectFileDir$" />
</exec>
</tool>
<tool name="{{ library.name }} {{ build_type_name }} Clean" showInMainMenu="false" showInEditor="false" showInProject="false" showInSearchPopup="false" disabled="false" useConsole="true" showConsoleOnStdOut="false" showConsoleOnStdErr="false" synchronizeAfterRun="true">
<exec>
<option name="COMMAND" value="{{ scons_exe }}" />
<option name="PARAMETERS" value="--build_type={{ build_type }} --unity=disable {{ library.filename(build_type) }} -c" />
<option name="WORKING_DIRECTORY" value="$ProjectFileDir$" />
</exec>
</tool>
{% endfor %}
{% endfor %}
</toolSet>

View File

@ -1,6 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="" vcs="Git" />
</component>
</project>

View File

@ -1,109 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="AutoImportSettings">
<option name="autoReloadType" value="SELECTIVE" />
</component>
<component name="CMakeRunConfigurationManager">
<generated />
</component>
<component name="CMakeSettings">
<configurations>
<configuration PROFILE_NAME="Debug" ENABLED="true" CONFIG_NAME="Debug" />
</configurations>
</component>
<component name="ClangdSettings">
<option name="formatViaClangd" value="false" />
</component>
<component name="CompDBLocalSettings">
<option name="availableProjects">
<map>
<entry>
<key>
<ExternalProjectPojo>
<option name="name" value="{{ project.name }}" />
<option name="path" value="$PROJECT_DIR$" />
</ExternalProjectPojo>
</key>
<value>
<list>
<ExternalProjectPojo>
<option name="name" value="{{ project.name }}" />
<option name="path" value="$PROJECT_DIR$" />
</ExternalProjectPojo>
</list>
</value>
</entry>
</map>
</option>
<option name="projectSyncType">
<map>
<entry key="$PROJECT_DIR$" value="RE_IMPORT" />
</map>
</option>
</component>
<component name="ExternalProjectsData">
<projectState path="$PROJECT_DIR$">
<ProjectState />
</projectState>
</component>
<component name="Git.Settings">
<option name="RECENT_GIT_ROOT_PATH" value="$PROJECT_DIR$" />
</component>
<component name="ProjectColorInfo">{
&quot;associatedIndex&quot;: 5
}</component>
<component name="ProjectViewState">
<option name="hideEmptyMiddlePackages" value="true" />
<option name="showLibraryContents" value="true" />
</component>
<component name="PropertiesComponent"><![CDATA[{
"keyToString": {
{% for executable in project.executables -%}
{% for build_type in project.build_types -%}
{% set build_type_name = build_type | capitalize -%}
"Custom Build Application.{{ executable.name }} {{ build_type_name }}.executor": "Debug",
{% endfor -%}
{% endfor -%}
"RunOnceActivity.RadMigrateCodeStyle": "true",
"RunOnceActivity.ShowReadmeOnStart": "true",
"RunOnceActivity.cidr.known.project.marker": "true",
"RunOnceActivity.readMode.enableVisualFormatting": "true",
"cf.first.check.clang-format": "false",
"cidr.known.project.marker": "true",
"git-widget-placeholder": "master",
"node.js.detected.package.eslint": "true",
"node.js.detected.package.tslint": "true",
"node.js.selected.package.eslint": "(autodetect)",
"node.js.selected.package.tslint": "(autodetect)",
"nodejs_package_manager_path": "npm",
"settings.editor.selected.configurable": "CLionExternalConfigurable",
"vue.rearranger.settings.migration": "true"
}
}]]></component>
<component name="RunManager" selected="Custom Build Application.{% if project.executables|length > 0 %}{{ project.executables[0].name }}{% else %}{{ project.libraries[0].name }}{% endif %} {{ project.build_types[0] }}">
{% for executable in project.executables -%}
{% for build_type in project.build_types -%}
{% set build_type_name = build_type | capitalize -%}
<configuration name="{{ executable.name }} {{ build_type_name }}" type="CLionExternalRunConfiguration" factoryName="Application" REDIRECT_INPUT="false" ELEVATE="false" USE_EXTERNAL_CONSOLE="false" EMULATE_TERMINAL="false" WORKING_DIR="file://$ProjectFileDir$" PASS_PARENT_ENVS_2="true" PROJECT_NAME="{{ project.name }}" TARGET_NAME="{{ executable.name }} {{ build_type_name }}" CONFIG_NAME="{{ executable.name }} {{ build_type_name }}" RUN_PATH="$PROJECT_DIR$/{{ executable.filename(build_type) }}">
<method v="2">
<option name="CLION.EXTERNAL.BUILD" enabled="true" />
</method>
</configuration>
{% endfor -%}
{% endfor -%}
{% for library in project.libraries -%}
{% for build_type in project.build_types -%}
{% set build_type_name = build_type | capitalize -%}
<configuration name="{{ library.name }} {{ build_type_name }}" type="CLionExternalRunConfiguration" factoryName="Application" REDIRECT_INPUT="false" ELEVATE="false" USE_EXTERNAL_CONSOLE="false" EMULATE_TERMINAL="false" PASS_PARENT_ENVS_2="true" PROJECT_NAME="{{ project.name }}" TARGET_NAME="{{ library.name }} {{ build_type_name }}" CONFIG_NAME="{{ library.name }} {{ build_type_name }}">
<method v="2">
<option name="CLION.EXTERNAL.BUILD" enabled="true" />
</method>
</configuration>
{% endfor -%}
{% endfor -%}
</component>
<component name="SpellCheckerSettings" RuntimeDictionaries="0" Folders="0" CustomDictionaries="0" DefaultDictionary="application-level" UseSingleDictionary="true" transferred="true" />
<component name="TypeScriptGeneratedFilesManager">
<option name="version" value="3" />
</component>
</project>

View File

@ -1,24 +0,0 @@
"""
Scons++ Command Line Interface
"""
import argparse
import logging
from .ccjson import make_ccjson_parser
_STDOUT_LOG_FORMAT = '%(message)s'
def run_spp_cmd() -> int:
parser = argparse.ArgumentParser()
parser.add_argument('--verbose', '-v', action='store_true')
subparsers = parser.add_subparsers(required=True)
make_ccjson_parser(subparsers)
args = parser.parse_args()
logging.basicConfig(format=_STDOUT_LOG_FORMAT, level=logging.DEBUG if args.verbose else logging.INFO)
args.handler(args)
return 0

View File

@ -1,18 +0,0 @@
import argparse
from .common import exec_spp, get_config_cache, require_project_file
def _cmd(args: argparse.Namespace) -> None:
require_project_file()
build_type = args.build_type
if build_type == 'auto':
cache = get_config_cache()
build_type = cache.get('build_type', 'debug')
exec_spp((f'--build_type={build_type}', '--unity=disable', 'compile_commands.json'))
def make_ccjson_parser(subparsers) -> None:
parser : argparse.ArgumentParser = subparsers.add_parser('ccjson', help='Generate compile_commands.json')
parser.set_defaults(handler=_cmd)
parser.add_argument('--build_type', choices=('auto', 'debug', 'release_debug', 'release', 'profile'), default='auto')

View File

@ -1,51 +0,0 @@
import json
import logging
from pathlib import Path
import shlex
import subprocess
import sys
from typing import Sequence
_project_root = Path('.').absolute()
def get_project_root() -> Path:
return _project_root
def set_project_root(path: Path) -> None:
global _project_root
_project_root = path
def get_config_cache() -> dict:
cache_file = get_project_root() / 'cache' / 'config_cache.json'
if not cache_file.exists():
return {}
try:
with cache_file.open('r') as f:
cache = json.load(f)
if not isinstance(cache, dict):
logging.warning('Config cache is not a dictionary, ignoring it.')
return {}
return cache
except Exception as e:
logging.error(f'Error while reading config cache: {e}.')
return {}
def require_project_file() -> None:
if not (get_project_root() / 'SConstruct').exists():
logging.error('This command has to be run inside an existing S++ project folder. Exiting.')
sys.exit(1)
def exec_checked(args: Sequence[str], **kwargs) -> None:
logging.debug('exec_checked: "%s"', shlex.join(args))
subprocess.run(args, stdout=sys.stdout, stderr=sys.stderr, check=True, **kwargs)
def exec_get_output(args: Sequence[str], **kwargs) -> str:
logging.debug('exec_get_output: "%s"', shlex.join(args))
return subprocess.run(args, text=True, check=True, capture_output=True, **kwargs).stdout
def exec_spp(args: Sequence[str], **kwargs):
full_cmd = ('scons', '-s', '--disable_auto_update', *args)
exec_checked(full_cmd, **kwargs)

View File

@ -1,6 +0,0 @@
# use this to start SCons from the IDE for debugging
import sys
from SCons.Script.Main import main
if __name__ == '__main__':
sys.exit(main())

View File

@ -1,10 +0,0 @@
#!/usr/bin/env python3
import os
import sys
if __name__ == '__main__':
sys.path.append(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'python_module'))
from sppcmd import run_spp_cmd
sys.exit(run_spp_cmd())

View File

@ -1,48 +0,0 @@
Microsoft Visual Studio Solution File, Format Version 12.00
# Visual Studio Version 17
VisualStudioVersion = 17.10.35122.118
MinimumVisualStudioVersion = 10.0.40219.1
{%- for executable in project.executables %}
Project("{{ generate_uuid(project.name, True) }}") = "{{ executable.name }}", "vs_project_files\{{ executable.name }}.vcxproj", ""{{ generate_uuid('target_' + executable.name, True) }}""
{%- endfor %}
{%- for library in project.libraries %}
Project("{{ generate_uuid(project.name, True) }}") = "{{ library.name }}", "vs_project_files\{{ library.name }}.vcxproj", ""{{ generate_uuid('target_' + library.name, True) }}""
{%- endfor %}
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution Items", "{{ generate_uuid('solution_items', True) }}"
ProjectSection(SolutionItems) = preProject
SConstruct = SConstruct
EndProjectSection
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
{%- for build_type in project.build_types %}
{%- set build_type_name = build_type | capitalize %}
{{ build_type_name }}|x64 = {{ build_type_name }}|x64
{%- endfor %}
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{%- for executable in project.executables %}
{%- for build_type in project.build_types %}
{%- set build_type_name = build_type | capitalize %}
{{ generate_uuid('target_' + executable.name, True) }}.{{ build_type_name }}|x64.ActiveCfg = {{ build_type_name }}|x64
{{ generate_uuid('target_' + executable.name, True) }}.{{ build_type_name }}|x64.Build.0 = {{ build_type_name }}|x64
{%- endfor %}
{%- endfor %}
{%- for library in project.libraries %}
{%- for build_type in project.build_types %}
{%- set build_type_name = build_type | capitalize %}
{{ generate_uuid('target_' + library.name, True) }}.{{ build_type_name }}|x64.ActiveCfg = {{ build_type_name }}|x64
{{ generate_uuid('target_' + library.name, True) }}.{{ build_type_name }}|x64.Build.0 = {{ build_type_name }}|x64
{%- endfor %}
{%- endfor %}
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
SolutionGuid = {{ generate_uuid("solution", True) }}
EndGlobalSection
EndGlobal

View File

@ -1,15 +0,0 @@
{
"files": {
"solution.sln.jinja": {
"rename_to": "{{ project.name }}.sln"
},
"vs_project_files/target.vcxproj.jinja": {
"one_per": "target",
"rename_to": "vs_project_files/{{ target.name }}.vcxproj"
},
"vs_project_files/target.vcxproj.filters.jinja": {
"one_per": "target",
"rename_to": "vs_project_files/{{ target.name }}.vcxproj.filters"
}
}
}

View File

@ -1,73 +0,0 @@
{%- set source_files = get_sources(target) -%}
{%- set private_headers = get_headers('private\\' + target.module.folder) -%}
{%- set public_headers = get_headers('public\\' + target.module.folder) -%}
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="17.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<ItemGroup>
<Filter Include="Source Files">
<UniqueIdentifier>{{ generate_uuid('filter_sources_' + target.name, True) }}</UniqueIdentifier>
</Filter>
{%- for folder in source_files | folder_list(2) | sort %}
<Filter Include="Source Files\{{ folder }}">
<UniqueIdentifier>{{ generate_uuid('filter_sources_' + target.name + '_' + folder, True) }}</UniqueIdentifier>
</Filter>
{%- endfor %}
{%- if public_headers | length > 0 %}
<Filter Include="Public Header Files">
<UniqueIdentifier>{{ generate_uuid('filter_public_headers_' + target.name, True) }}</UniqueIdentifier>
</Filter>
{%- for folder in public_headers | folder_list(2) | sort %}
<Filter Include="Public Header Files\{{ folder }}">
<UniqueIdentifier>{{ generate_uuid('filter_public_headers_' + target.name + '_' + folder, True) }}</UniqueIdentifier>
</Filter>
{%- endfor %}
{%- endif %}
{%- if private_headers | length > 0 %}
<Filter Include="Private Header Files">
<UniqueIdentifier>{{ generate_uuid('filter_private_headers_' + target.name, True) }}</UniqueIdentifier>
</Filter>
{%- for folder in private_headers | folder_list(2) | sort %}
<Filter Include="Private Header Files\{{ folder }}">
<UniqueIdentifier>{{ generate_uuid('filter_private_headers_' + target.name + '_' + folder, True) }}</UniqueIdentifier>
</Filter>
{%- endfor %}
{%- endif %}
</ItemGroup>
<ItemGroup>
{%- for source_file in source_files %}
<ClCompile Include="$(SolutionDir){{ source_file }}">
{%- set path = source_file | strip_path_prefix(2) | dirname -%}
{%- if path %}
<Filter>Source Files\{{ path }}</Filter>
{%- else %}
<Filter>Source Files</Filter>
{%- endif %}
</ClCompile>
{%- endfor %}
</ItemGroup>
<ItemGroup>
{%- for header_file in public_headers %}
<ClInclude Include="$(SolutionDir){{ header_file }}">
{%- set path = header_file | strip_path_prefix(2) | dirname -%}
{%- if path %}
<Filter>Public Header Files\{{ path }}</Filter>
{%- else %}
<Filter>Public Header Files</Filter>
{%- endif %}
</ClInclude>
{%- endfor %}
{%- for header_file in private_headers %}
<ClInclude Include="$(SolutionDir){{ header_file }}">
{%- set path = header_file | strip_path_prefix(2) | dirname -%}
{%- if path %}
<Filter>Private Header Files\{{ path }}</Filter>
{%- else %}
<Filter>Private Header Files</Filter>
{%- endif %}
</ClInclude>
{%- endfor %}
</ItemGroup>
<ItemGroup>
<Content Include="$(SolutionDir)private\{{ target.module.folder }}\SModule" />
</ItemGroup>
</Project>

View File

@ -1,67 +0,0 @@
{%- set ms_cxx_standard = {
'c++14': 'c++14',
'c++17': 'c++17',
'c++20': 'c++20',
'c++23': 'c++latest',
'c++26': 'c++latest'}[project.cxx_standard] | default('c++14')
-%}
<?xml version="1.0" encoding="utf-8"?>
<Project DefaultTargets="Build" ToolsVersion="17.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<ItemGroup Label="ProjectConfigurations">
{%- for build_type in project.build_types %}
{% set build_type_name = build_type | capitalize -%}
<ProjectConfiguration Include="{{ build_type_name }}|x64">
<Configuration>{{ build_type_name }}</Configuration>
<Platform>x64</Platform>
</ProjectConfiguration>
{%- endfor %}
</ItemGroup>
<PropertyGroup Label="Globals">
<Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
<ProjectGuid>{{ generate_uuid('target_' + target.name, True) }}</ProjectGuid>
<ProjectName>{{ target.name }}</ProjectName>
<SConsCommandLine>{{ scons_exe }}</SConsCommandLine>
</PropertyGroup>
{%- for build_type in project.build_types %}
{% set build_type_name = build_type | capitalize -%}
<PropertyGroup Condition="'$(Configuration)'=='{{ build_type_name }}'">
<TargetPath>$(SolutionDir){{ target.filename(build_type) }}</TargetPath>
<SPPBuildType>{{ build_type }}</SPPBuildType>
<SPPTargetType>{{ target.type }}</SPPTargetType>
</PropertyGroup>
{%- endfor %}
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
<PropertyGroup Label="Configuration">
<ConfigurationType>Makefile</ConfigurationType>
</PropertyGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
<ItemGroup>
{%- for source_file in get_sources(target) %}
<ClCompile Include="$(SolutionDir){{ source_file }}" />
{%- endfor %}
</ItemGroup>
<ItemGroup>
{%- for header_file in get_headers('private\\' + target.module.folder) %}
<ClInclude Include="$(SolutionDir){{ header_file }}" />
{%- endfor %}
{%- for header_file in get_headers('public\\' + target.module.folder) %}
<ClInclude Include="$(SolutionDir){{ header_file }}" />
{%- endfor %}
</ItemGroup>
<ItemGroup>
<Content Include="$(SolutionDir)private\{{ target.module.folder }}\SModule" />
</ItemGroup>
{%- for build_type in project.build_types %}
{% set build_type_name = build_type | capitalize -%}
<ItemDefinitionGroup Condition="'$(Configuration)'=='{{ build_type_name }}'">
<ClCompile>
<PreprocessorDefinitions>{{ get_target_property(build_type, target.name, 'CPPDEFINES') | join(';') }};%(PreprocessorDefinitions);</PreprocessorDefinitions>
<GenerateDebugInformation>{{ build_type != 'release' and 'true' or 'false' }}</GenerateDebugInformation>
<AdditionalIncludeDirectories>{{ get_target_property(build_type, target.name, 'CPPPATH') | join(';') }};%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
<MsExtensions>false</MsExtensions>
<AdditionalOptions>{{ get_target_property(build_type, target.name, 'CCFLAGS') | join(' ') }}</AdditionalOptions> {# + get_target_property(build_type, target.name, 'CXXFLAGS')) #}
</ClCompile>
</ItemDefinitionGroup>
{%- endfor %}
<Import Project="$(SolutionDir)external\scons-plus-plus\contrib\vs\spp.targets" />
</Project>

View File

@ -1,6 +0,0 @@
{
"recommendations": [
"ms-vscode.cpptools",
"llvm-vs-code-extensions.vscode-clangd"
]
}

View File

@ -1,21 +0,0 @@
{
"configurations": [
{%- for executable in project.executables -%}
{%- for build_type in project.build_types -%}
{%- set build_type_name = build_type | capitalize %}
{
"name": "{{ executable.name }} ({{ build_type | capitalize }})",
"type": "cppvsdbg",
"request": "launch",
"program": "{{ executable.filename(build_type) | escape_path }}",
"args": [],
"stopAtEntry": false,
"cwd": "${workspaceFolder}",
"environment": [],
"console": "integratedTerminal",
"preLaunchTask": "{{ executable.name }} {{ build_type_name }}"
},
{%- endfor %}
{%- endfor %}
]
}

View File

@ -1,69 +0,0 @@
{
// See https://go.microsoft.com/fwlink/?LinkId=733558
// for the documentation about the tasks.json format
"version": "2.0.0",
"tasks": [
{% for executable in project.executables %}
{% for build_type in project.build_types %}
{% set build_type_name = build_type | capitalize -%}
{
"label": "{{ executable.name }} {{ build_type_name }}",
"type": "shell",
"command": "{{ scons_exe | escape_path }} -j{{ nproc }} --build_type={{ build_type }} --unity=disable {{ executable.filename(build_type) | escape_path }} compile_commands.json",
"options": {
"cwd": "${workspaceFolder}"
},
"problemMatcher": [],
"group": {
"kind": "build",
"isDefault": false
}
},
{
"label": "{{ executable.name }} {{ build_type_name }} Clean",
"type": "shell",
"command": "{{ scons_exe | escape_path }} --build_type={{ build_type }} --unity=disable {{ executable.filename(build_type) | escape_path }} -c",
"options": {
"cwd": "${workspaceFolder}"
},
"problemMatcher": [],
"group": {
"kind": "build",
"isDefault": false
}
},
{% endfor %}
{% endfor %}
{% for library in project.libraries %}
{% for build_type in project.build_types %}
{% set build_type_name = build_type | capitalize -%}
{
"label": "{{ library.name }} {{ build_type_name }}",
"type": "shell",
"command": "{{ scons_exe | escape_path }} -j{{ nproc }} --build_type={{ build_type }} --unity=disable {{ library.filename(build_type) | escape_path }} compile_commands.json",
"options": {
"cwd": "${workspaceFolder}"
},
"problemMatcher": [],
"group": {
"kind": "build",
"isDefault": false
}
},
{
"label": "{{ library.name }} {{ build_type_name }} Clean",
"type": "shell",
"command": "{{ scons_exe | escape_path }} --build_type={{ build_type }} --unity=disable {{ library.filename(build_type) | escape_path }} -c",
"options": {
"cwd": "${workspaceFolder}"
},
"problemMatcher": [],
"group": {
"kind": "build",
"isDefault": false
}
},
{% endfor %}
{% endfor %}
]
}