Compare commits

..

16 Commits

Author SHA1 Message Date
1e4bb17251 Added recipe for json and updated yaml-cpp recipe. 2024-10-25 08:29:25 +02:00
c461b5da39 Added recipes for curl, libidn2, libpsl and libunistring. 2024-10-23 23:37:37 +02:00
b7cb5f7c48 Added openssl recipe. 2024-08-21 09:36:17 +02:00
9c64f982fd Added recipe for winsock2 and target_os to dependency conditions. 2024-08-19 18:36:37 +02:00
378c6ba341 Fixed Catch2 recipe. 2024-08-18 17:28:26 +02:00
96fc1984cd Fixed compilation with MSVC. 2024-08-18 17:28:25 +02:00
396350b295 Allow settings COMPILATIONDB_FILTER_FILES via config. 2024-08-18 17:26:32 +02:00
5de1ac4444 Enable experimental library features (jthread) for clang. 2024-08-18 17:26:30 +02:00
d5712120df Moved check of SYSTEM_CACHE_DIR accessibility to before it is used. 2024-08-18 17:25:59 +02:00
267d06a997 Added CXXFLAGS and CFLAGS to config variables. 2024-08-18 17:24:41 +02:00
089ea25c10 Adjusted error description to make more sense. 2024-08-17 18:11:36 +02:00
e1404fee58 Fixed zlib recipe on linux. 2024-08-17 18:11:13 +02:00
c4200393fb Fixed compilation with MSVC. 2024-08-15 15:27:39 +02:00
0c82036300 Update to new recipe system (S++ 2.0). 2024-08-14 23:33:04 +02:00
35b38b8b6e Some more work on the new dependency resolution system. 2024-08-08 14:32:28 +02:00
8bea4a6db5 Some tests. 2024-08-06 09:33:42 +02:00
49 changed files with 1114 additions and 771 deletions

View File

@@ -1,36 +1,23 @@
import copy import copy
import enum
import glob import glob
import inspect
import json import json
import multiprocessing
import os import os
import pathlib
import psutil import psutil
import shutil
import sys import sys
import time import time
import uuid
class TargetType(enum.Enum):
PROGRAM = 0
STATIC_LIBRARY = 1
SHARED_LIBRARY = 2
class _VersionSpec: class _VersionSpec:
minimum_version = None minimum_version = None
maximum_version = None maximum_version = None
options = {}
def __init__(self, minimum_version = None, maximum_version = None, options = {}): def __init__(self, minimum_version = None, maximum_version = None):
self.minimum_version = minimum_version self.minimum_version = minimum_version
self.maximum_version = maximum_version self.maximum_version = maximum_version
self.options = options
def __str__(self): def __str__(self):
return f'Min: {self.minimum_version}, Max: {self.maximum_version}, Options: {self.options}' return f'Min: {self.minimum_version}, Max: {self.maximum_version}'
class _Dependency: class _Dependency:
name: str = '' name: str = ''
@@ -41,8 +28,6 @@ class _Dependency:
cook_result: dict = {} cook_result: dict = {}
class _Target: class _Target:
name: str
target_type: TargetType
builder = None builder = None
args: list = [] args: list = []
kwargs: dict = {} kwargs: dict = {}
@@ -54,19 +39,13 @@ def _find_recipe(env: Environment, recipe_name: str):
return env['SPP_RECIPES'][recipe_name] return env['SPP_RECIPES'][recipe_name]
import importlib.util import importlib.util
source_file = None source_file = None
for folder in env['RECIPES_FOLDERS']:
if not env['SPP_RECIPES_FOLDERS']: try_source_file = f'{folder.abspath}/{recipe_name}/recipe.py'
env.Error('No recipes repositories set. Add one using env.RecipeRepo(<name>, <url>, <branch>).')
for folder in env['SPP_RECIPES_FOLDERS']:
from SCons import Node
if folder is Node:
folder = folder.abspath
try_source_file = f'{folder}/{recipe_name}/recipe.py'
if os.path.exists(try_source_file): if os.path.exists(try_source_file):
source_file = try_source_file source_file = try_source_file
break break
if not source_file: if not source_file:
env.Error(f'Could not find recipe for {recipe_name}.') raise Exception(f'Could not find recipe {recipe_name}.')
spec = importlib.util.spec_from_file_location(recipe_name, source_file) spec = importlib.util.spec_from_file_location(recipe_name, source_file)
recipe = importlib.util.module_from_spec(spec) recipe = importlib.util.module_from_spec(spec)
recipe.env = env recipe.env = env
@@ -74,19 +53,12 @@ def _find_recipe(env: Environment, recipe_name: str):
env['SPP_RECIPES'][recipe_name] = recipe env['SPP_RECIPES'][recipe_name] = recipe
return recipe return recipe
def _run_cook(dependency: _Dependency):
if not dependency.cook_result:
cook_signature = inspect.signature(dependency.recipe.cook)
kwargs = {}
if 'options' in cook_signature.parameters:
kwargs['options'] = dependency.version_spec.options
dependency.cook_result = dependency.recipe.cook(env, dependency.version, **kwargs)
def _cook(env: Environment, recipe_name: str): def _cook(env: Environment, recipe_name: str):
dependency = env['SPP_DEPENDENCIES'].get(recipe_name) dependency = env['SPP_DEPENDENCIES'].get(recipe_name)
if not dependency: if not dependency:
raise Exception(f'Cannot cook {recipe_name} as it was not listed as a dependency.') raise Exception(f'Cannot cook {recipe_name} as it was not listed as a dependency.')
_run_cook(dependency) if not dependency.cook_result:
dependency.cook_result = dependency.recipe.cook(env, dependency.version)
return dependency.cook_result return dependency.cook_result
def _module(env: Environment, file: str): def _module(env: Environment, file: str):
@@ -97,7 +69,6 @@ def _parse_lib_conf(env: Environment, lib_conf: dict) -> None:
CPPDEFINES = lib_conf.get('CPPDEFINES', []), CPPDEFINES = lib_conf.get('CPPDEFINES', []),
LIBPATH = lib_conf.get('LIBPATH', []), LIBPATH = lib_conf.get('LIBPATH', []),
LIBS = lib_conf.get('LIBS', []), LIBS = lib_conf.get('LIBS', []),
LINKFLAGS = lib_conf.get('LINKFLAGS', []),
JINJA_TEMPLATE_SEARCHPATH = lib_conf.get('JINJA_TEMPLATE_SEARCHPATH', [])) JINJA_TEMPLATE_SEARCHPATH = lib_conf.get('JINJA_TEMPLATE_SEARCHPATH', []))
def _inject_list(kwargs: dict, dependency: dict, list_name: str) -> None: def _inject_list(kwargs: dict, dependency: dict, list_name: str) -> None:
@@ -113,30 +84,20 @@ def _inject_dependency(dependency, kwargs: dict, add_sources: bool = True) -> No
_inject_list(kwargs, dependency, 'CPPDEFINES') _inject_list(kwargs, dependency, 'CPPDEFINES')
_inject_list(kwargs, dependency, 'LIBPATH') _inject_list(kwargs, dependency, 'LIBPATH')
_inject_list(kwargs, dependency, 'LIBS') _inject_list(kwargs, dependency, 'LIBS')
_inject_list(kwargs, dependency, 'LINKFLAGS')
if add_sources and 'ADDITIONAL_SOURCES' in dependency and hasattr(kwargs['source'], 'extend'): if add_sources and 'ADDITIONAL_SOURCES' in dependency and hasattr(kwargs['source'], 'extend'):
kwargs['source'].extend(dependency['ADDITIONAL_SOURCES']) kwargs['source'].extend(dependency['ADDITIONAL_SOURCES'])
if 'DEPENDENCIES' in dependency: if 'DEPENDENCIES' in dependency:
for inner_dependency in dependency['DEPENDENCIES']: for inner_dependency in dependency['DEPENDENCIES']:
_inject_dependency(inner_dependency, kwargs, False) _inject_dependency(inner_dependency, kwargs, False)
elif isinstance(dependency, _Dependency): elif isinstance(dependency, _Dependency):
_run_cook(dependency) if not dependency.cook_result:
dependency.cook_result = dependency.recipe.cook(env, dependency.version)
_inject_list(kwargs, dependency.cook_result, 'CPPPATH') _inject_list(kwargs, dependency.cook_result, 'CPPPATH')
_inject_list(kwargs, dependency.cook_result, 'CPPDEFINES') _inject_list(kwargs, dependency.cook_result, 'CPPDEFINES')
_inject_list(kwargs, dependency.cook_result, 'LIBPATH') _inject_list(kwargs, dependency.cook_result, 'LIBPATH')
_inject_list(kwargs, dependency.cook_result, 'LIBS') _inject_list(kwargs, dependency.cook_result, 'LIBS')
_inject_list(kwargs, dependency.cook_result, 'LINKFLAGS')
for depdep in dependency.depdeps: for depdep in dependency.depdeps:
_inject_dependency(depdep, kwargs) _inject_dependency(depdep, kwargs)
elif isinstance(dependency, _Target):
_inject_list(kwargs, dependency.kwargs, 'CPPPATH')
_inject_list(kwargs, dependency.kwargs, 'CPPDEFINES')
_inject_list(kwargs, dependency.kwargs, 'LIBPATH')
_inject_list(kwargs, dependency.kwargs, 'LIBS')
_inject_list(kwargs, {'LIBS': [dependency]}, 'LIBS')
_inject_list(kwargs, dependency.kwargs, 'LINKFLAGS')
for depdep in dependency.dependencies:
_inject_dependency(depdep, kwargs)
def _rglob(env: Environment, root_path: str, pattern: str, **kwargs): def _rglob(env: Environment, root_path: str, pattern: str, **kwargs):
result_nodes = [] result_nodes = []
@@ -188,30 +149,6 @@ def _make_interface(env: Environment, dependencies: list = []):
'CPPDEFINES': kwargs.get('CPPDEFINES', []) 'CPPDEFINES': kwargs.get('CPPDEFINES', [])
} }
def _exe_filename(env: Environment, name: str, type: str = 'static') -> str:
if os.name == 'posix':
return name
elif os.name == 'nt':
return f'{name}.exe'
else:
raise Exception('What OS is this?')
def _find_executable(env: Environment, name: str, paths: 'list[str]', type : str = 'static', allow_fail: bool = False, use_glob: bool = False):
fname = _exe_filename(env, name, type)
for path in paths:
lib_path = os.path.join(path, fname)
if use_glob:
files = glob.glob(lib_path)
if len(files) == 1:
return files[0]
elif len(files) > 1:
raise Exception(f'Multiple candidates found for executable with name {name} in paths: "{", ".join(paths)}" with name: "{", ".join(files)}".')
elif os.path.exists(lib_path):
return lib_path
if allow_fail:
return None
raise Exception(f'Could not find executable with name {name} in paths: "{", ".join(paths)}" filename: "{fname}".')
def _lib_filename(env: Environment, name: str, type: str = 'static') -> str: def _lib_filename(env: Environment, name: str, type: str = 'static') -> str:
if os.name == 'posix': if os.name == 'posix':
ext = { ext = {
@@ -246,37 +183,10 @@ def _find_lib(env: Environment, name: str, paths: 'list[str]', type : str = 'sta
def _error(env: Environment, message: str): def _error(env: Environment, message: str):
print(message, file=sys.stderr) print(message, file=sys.stderr)
Exit(1) env.Exit(1)
def _try_merge_dicts(dictA: dict, dictB: dict) -> 'dict|None': def _find_common_depenency_version(name: str, versionA: _VersionSpec, versionB: _VersionSpec) -> _VersionSpec:
result = {} result_version = _VersionSpec()
for key, valueA in dictA.items():
if key in dictB:
valueB = dictB[key]
if type(valueA) != type(valueB):
return None
elif type(valueA) == list:
result[key] = valueA + valueB
elif type(valueA) == dict:
mergedValue = _try_merge_dicts(valueA, valueB)
if mergedValue is None:
return None
result[key] = mergedValue
elif valueA != valueB:
return None
else:
result[key] = valueA
for key, valueB in dictB.items():
if key not in result:
result[key] = valueB
return result
def _find_common_dependency_version(name: str, versionA: _VersionSpec, versionB: _VersionSpec) -> _VersionSpec:
options = _try_merge_dicts(versionA.options, versionB.options)
if options is None:
return None
result_version = _VersionSpec(options=options)
if versionA.minimum_version is not None: if versionA.minimum_version is not None:
if versionB.minimum_version is not None: if versionB.minimum_version is not None:
result_version.minimum_version = max(versionA.minimum_version, versionB.minimum_version) result_version.minimum_version = max(versionA.minimum_version, versionB.minimum_version)
@@ -299,19 +209,19 @@ def _find_common_dependency_version(name: str, versionA: _VersionSpec, versionB:
return result_version return result_version
def _parse_version_spec(version_spec: dict) -> _VersionSpec: def _parse_version_spec(version_spec: dict) -> _VersionSpec:
return _VersionSpec(version_spec.get('min'), version_spec.get('max'), version_spec.get('options', {})) return _VersionSpec(version_spec.get('min'), version_spec.get('max'))
def _can_add_dependency(env: Environment, name: str, version_spec: _VersionSpec) -> bool: def _can_add_dependency(env: Environment, name: str, version_spec: _VersionSpec) -> bool:
if name not in env['SPP_DEPENDENCIES']: if name not in env['SPP_DEPENDENCIES']:
return True return True
dependency = env['SPP_DEPENDENCIES'][name] dependency = env['SPP_DEPENDENCIES'][name]
common_version_spec = _find_common_dependency_version(name, dependency.version_spec, version_spec) common_version_spec = _find_common_depenency_version(name, dependency.version_spec, version_spec)
return common_version_spec is not None return common_version_spec is not None
def _add_dependency(env: Environment, name: str, version_spec: _VersionSpec) -> _Dependency: def _add_dependency(env: Environment, name: str, version_spec: _VersionSpec) -> _Dependency:
if name in env['SPP_DEPENDENCIES']: if name in env['SPP_DEPENDENCIES']:
dependency = env['SPP_DEPENDENCIES'][name] dependency = env['SPP_DEPENDENCIES'][name]
common_version_spec = _find_common_dependency_version(name, dependency.version_spec, version_spec) common_version_spec = _find_common_depenency_version(name, dependency.version_spec, version_spec)
if common_version_spec is None: if common_version_spec is None:
raise Exception(f'Incompatible versions detected for {name}: {dependency.version_spec} and {version_spec}') raise Exception(f'Incompatible versions detected for {name}: {dependency.version_spec} and {version_spec}')
if dependency.version_spec != common_version_spec: if dependency.version_spec != common_version_spec:
@@ -346,26 +256,18 @@ def _version_matches(version, version_spec: _VersionSpec) -> bool:
def _find_version(env: Environment, dependency: _Dependency): def _find_version(env: Environment, dependency: _Dependency):
for update in (False, True): for update in (False, True):
versions_signature = inspect.signature(dependency.recipe.versions) versions = dependency.recipe.versions(env, update=update)
kwargs = {}
if 'options' in versions_signature.parameters:
kwargs['options'] = dependency.version_spec.options
versions = dependency.recipe.versions(env, update=update, **kwargs)
_sort_versions(versions) _sort_versions(versions)
for version in versions: for version in versions:
kwargs = {}
dependencies_signature = inspect.signature(dependency.recipe.dependencies)
if 'options' in dependencies_signature.parameters:
kwargs['options'] = dependency.version_spec.options
if _version_matches(version, dependency.version_spec): if _version_matches(version, dependency.version_spec):
canadd = True canadd = True
for depname, depspec in dependency.recipe.dependencies(env, version, **kwargs).items(): for depname, depspec in dependency.recipe.dependencies(env, version).items():
if not _can_add_dependency(env, depname, _parse_version_spec(depspec)): if not _can_add_dependency(env, depname, _parse_version_spec(depspec)):
canadd = False canadd = False
break break
if canadd: if canadd:
depdeps = [] depdeps = []
for depname, depspec in dependency.recipe.dependencies(env, version, **kwargs).items(): for depname, depspec in dependency.recipe.dependencies(env, version).items():
depdeps.append(_add_dependency(env, depname, _parse_version_spec(depspec))) depdeps.append(_add_dependency(env, depname, _parse_version_spec(depspec)))
dependency.version = version dependency.version = version
dependency.depdeps = depdeps dependency.depdeps = depdeps
@@ -374,16 +276,10 @@ def _find_version(env: Environment, dependency: _Dependency):
print(f'Required version: {dependency.version_spec}') print(f'Required version: {dependency.version_spec}')
raise Exception(f'Could not find a suitable version for dependency {dependency.name}.') raise Exception(f'Could not find a suitable version for dependency {dependency.name}.')
def _wrap_builder(builder, target_type: TargetType): def _wrap_builder(builder, is_lib: bool = False):
def _wrapped(env, dependencies = {}, *args, **kwargs): def _wrapped(env, dependencies = {}, *args, **kwargs):
target_dependencies = [] target_dependencies = []
for name, version_spec in dependencies.items(): for name, version_spec in dependencies.items():
if version_spec == {} and name not in env['SPP_DEPENDENCIES']: # this is basically a shortcut to adding targets from other modules without having to save them in the env
dep_target = _find_target(env, name)
if dep_target is not None and dep_target.target_type != TargetType.PROGRAM:
target_dependencies.append(dep_target)
# TODO: there might be an issue here with dependencies not being injected this way :/
continue
target_dependencies.append(_add_dependency(env, name, _parse_version_spec(version_spec))) target_dependencies.append(_add_dependency(env, name, _parse_version_spec(version_spec)))
if 'CPPPATH' not in kwargs: if 'CPPPATH' not in kwargs:
@@ -407,15 +303,6 @@ def _wrap_builder(builder, target_type: TargetType):
kwargs['source'] = new_source kwargs['source'] = new_source
target = _Target() target = _Target()
if 'name' in kwargs:
target.name = kwargs['name']
else:
trgt = _target_entry(kwargs.get('target'))
if trgt is not None:
target.name = str(trgt.name)
else:
target.name = 'Unknown target'
target.target_type = target_type
target.builder = builder target.builder = builder
target.args = args target.args = args
target.kwargs = kwargs target.kwargs = kwargs
@@ -440,7 +327,6 @@ def _wrap_depends(depends):
def _wrapped(env, dependant, dependency): def _wrapped(env, dependant, dependency):
if isinstance(dependant, _Target) or isinstance(dependency, _Target): if isinstance(dependant, _Target) or isinstance(dependency, _Target):
env.Append(SPP_TARGET_DEPENDENCIES = [(dependant, dependency)]) env.Append(SPP_TARGET_DEPENDENCIES = [(dependant, dependency)])
return
elif isinstance(dependant, dict) and '_target' in dependant: elif isinstance(dependant, dict) and '_target' in dependant:
dependant = dependant['_target'] dependant = dependant['_target']
elif isinstance(dependency, dict) and '_target' in dependency: elif isinstance(dependency, dict) and '_target' in dependency:
@@ -463,19 +349,12 @@ def _build_target(target: _Target):
_build_target(lib) _build_target(lib)
target.kwargs['LIBS'].remove(lib) target.kwargs['LIBS'].remove(lib)
target.kwargs['LIBS'].append(lib.target) target.kwargs['LIBS'].append(lib.target)
new_kwargs = target.kwargs.copy() target.target = target.builder(*target.args, **target.kwargs)
if 'target' in new_kwargs: # there should always be a target, right?
new_kwargs['target'] = f"{new_kwargs['target']}-{build_type}"
target.target = target.builder(*target.args, **new_kwargs)
def _version_to_string(version) -> str: def _version_to_string(version) -> str:
return '.'.join([str(v) for v in version]) return '.'.join([str(v) for v in version])
def _finalize(env: Environment): def _finalize(env: Environment):
if generate_project:
_generate_project(generate_project)
Exit(0)
version_requirements = {dep.name: { version_requirements = {dep.name: {
'min': dep.version_spec.minimum_version and _version_to_string(dep.version_spec.minimum_version), 'min': dep.version_spec.minimum_version and _version_to_string(dep.version_spec.minimum_version),
'max': dep.version_spec.maximum_version and _version_to_string(dep.version_spec.maximum_version), 'max': dep.version_spec.maximum_version and _version_to_string(dep.version_spec.maximum_version),
@@ -499,18 +378,6 @@ def _finalize(env: Environment):
_build_target(target) _build_target(target)
for target in env['SPP_DEFAULT_TARGETS']: for target in env['SPP_DEFAULT_TARGETS']:
env.Default(target.target) env.Default(target.target)
for dependant, dependency in env['SPP_TARGET_DEPENDENCIES']:
if isinstance(dependant, _Target):
dependant = dependant.target
if isinstance(dependency, _Target):
dependency = dependency.target
env.Depends(dependant, dependency)
def _find_target(env: Environment, target_name: str) -> '_Target|None':
for target in env['SPP_TARGETS']:
if target.name == target_name:
return target
return None
def _get_fallback_cache_dir() -> str: def _get_fallback_cache_dir() -> str:
return Dir('#cache').abspath return Dir('#cache').abspath
@@ -527,138 +394,12 @@ def _find_system_cache_dir() -> str:
# fallback # fallback
return _get_fallback_cache_dir() return _get_fallback_cache_dir()
def _target_entry(target_value):
if target_value is None:
return None
if not isinstance(target_value, list):
target_value = [target_value]
if len(target_value) < 1:
return None
if isinstance(target_value[0], str):
target_value[0] = env.Entry(target_value[0])
return target_value[0]
def _generate_project(project_type: str) -> None:
try:
import jinja2
except ImportError:
_error(None, 'Project generation requires the jinja2 to be installed.')
source_folder, target_folder = {
'clion': (os.path.join(_spp_dir.abspath, 'util', 'clion_project_template'), Dir('#.idea').abspath),
'vscode': (os.path.join(_spp_dir.abspath, 'util', 'vscode_project_template'), Dir('#.vscode').abspath)
}.get(project_type, (None, None))
if not source_folder:
_error(None, 'Invalid project type option.')
uuid_cache_file = pathlib.Path(env['SHARED_CACHE_DIR'], 'uuids.json')
uuid_cache = {}
save_uuid_cache = False
if uuid_cache_file.exists():
try:
with uuid_cache_file.open('r') as f:
uuid_cache = json.load(f)
except Exception as e:
print(f'Error loading UUID cache: {e}')
def _generate_uuid(name: str = '') -> str:
nonlocal save_uuid_cache
if name and name in uuid_cache:
return uuid_cache[name]
new_uuid = str(uuid.uuid4())
if name:
uuid_cache[name] = new_uuid
save_uuid_cache = True
return new_uuid
root_path = pathlib.Path(env.Dir('#').abspath)
def _get_executables() -> list:
result = []
for target in env['SPP_TARGETS']:
if target.target_type == TargetType.PROGRAM:
trgt = _target_entry(target.kwargs['target'])
def _exe_path(build_type) -> str:
exe_path = pathlib.Path(trgt.abspath).relative_to(root_path)
exe_path = exe_path.parent / f'{env.subst("$PROGPREFIX")}{exe_path.name}-{build_type}{env.subst("$PROGSUFFIX")}'
return str(exe_path)
result.append({
'name': target.name,
'filename': _exe_path
})
return result
def _get_libraries() -> list:
result = []
for target in env['SPP_TARGETS']:
if target.target_type == TargetType.STATIC_LIBRARY:
trgt = _target_entry(target.kwargs['target'])
def _lib_path(build_type) -> str:
lib_path = pathlib.Path(trgt.abspath).relative_to(root_path)
lib_path = lib_path.parent / f'{env.subst("$LIBPREFIX")}{lib_path.name}-{build_type}{env.subst("$LIBSUFFIX")}'
return str(lib_path)
result.append({
'name': target.name,
'filename': _lib_path
})
elif target.target_type == TargetType.SHARED_LIBRARY:
trgt = _target_entry(target.kwargs['target'])
def _lib_path(build_type) -> str:
lib_path = pathlib.Path(trgt.abspath).relative_to(root_path)
lib_path = lib_path.parent / f'{env.subst("$SHLIBPREFIX")}{lib_path.name}-{build_type}{env.subst("$SHLIBSUFFIX")}'
return str(lib_path)
result.append({
'name': target.name,
'filename': _lib_path
})
return result
def _escape_path(input: str) -> str:
return input.replace('\\', '\\\\')
jinja_env = jinja2.Environment()
jinja_env.globals['generate_uuid'] = _generate_uuid
jinja_env.globals['project'] = {
'name': env.Dir('#').name,
'executables': _get_executables(),
'libraries': _get_libraries(),
'build_types': ['debug', 'release_debug', 'release', 'profile']
}
jinja_env.globals['scons_exe'] = shutil.which('scons')
jinja_env.globals['nproc'] = multiprocessing.cpu_count()
jinja_env.filters['escape_path'] = _escape_path
source_path = pathlib.Path(source_folder)
target_path = pathlib.Path(target_folder)
for source_file in source_path.rglob('*'):
if source_file.is_file():
target_file = target_path / (source_file.relative_to(source_path))
target_file.parent.mkdir(parents=True, exist_ok=True)
if source_file.suffix != '.jinja':
shutil.copyfile(source_file, target_file)
continue
with source_file.open('r') as f:
templ = jinja_env.from_string(f.read())
target_file = target_file.with_suffix('')
with target_file.open('w') as f:
f.write(templ.render())
if save_uuid_cache:
try:
with uuid_cache_file.open('w') as f:
json.dump(uuid_cache, f)
except Exception as e:
print(f'Error writing uuid cache: {e}')
Import('config') Import('config')
if not config.get('PROJECT_NAME'): if not config.get('PROJECT_NAME'):
config['PROJECT_NAME'] = 'PROJECT' config['PROJECT_NAME'] = 'PROJECT'
if not config.get('CXX_STANDARD'): if not config.get('CXX_STANDARD'):
config['CXX_STANDARD'] = 'c++23' config['CXX_STANDARD'] = 'c++23'
if not config.get('CXX_NO_EXCEPTIONS'):
config['CXX_NO_EXCEPTIONS'] = False
if not config.get('PREPROCESSOR_PREFIX'): if not config.get('PREPROCESSOR_PREFIX'):
config['PREPROCESSOR_PREFIX'] = config['PROJECT_NAME'].upper() # TODO: may be nicer? config['PREPROCESSOR_PREFIX'] = config['PROJECT_NAME'].upper() # TODO: may be nicer?
@@ -732,17 +473,6 @@ AddOption(
action = 'store_true' action = 'store_true'
) )
AddOption(
'--generate_project',
dest = 'generate_project',
type = 'choice',
choices = ('clion', 'vscode'),
nargs = 1,
action = 'store'
)
_spp_dir = Dir('.')
build_type = GetOption('build_type') build_type = GetOption('build_type')
unity_mode = GetOption('unity_mode') unity_mode = GetOption('unity_mode')
variant = GetOption('variant') variant = GetOption('variant')
@@ -751,7 +481,6 @@ config_file = GetOption('config_file')
compiler = GetOption('compiler') compiler = GetOption('compiler')
update_repositories = GetOption('update_repositories') update_repositories = GetOption('update_repositories')
dump_env = GetOption('dump_env') dump_env = GetOption('dump_env')
generate_project = GetOption('generate_project')
default_CC = { default_CC = {
'gcc': 'gcc', 'gcc': 'gcc',
@@ -778,24 +507,14 @@ vars.Add('LINKFLAGS', 'Linker Flags')
vars.Add('PYTHON', 'Python Executable', 'python') vars.Add('PYTHON', 'Python Executable', 'python')
vars.Add('COMPILATIONDB_FILTER_FILES', 'Removes source files from the compilation DB that are not from the current' vars.Add('COMPILATIONDB_FILTER_FILES', 'Removes source files from the compilation DB that are not from the current'
' project.', config['COMPILATIONDB_FILTER_FILES']) ' project.', config['COMPILATIONDB_FILTER_FILES'])
vars.Add('SHOW_INCLUDES', 'Show include hierarchy (for debugging).', False)
vars.Add('ENABLE_ASAN', 'Enable address sanitization.', bool(enable_asan))
tools = ['default', 'compilation_db', 'unity_build'] tools = ['default', 'compilation_db', 'unity_build']
if 'TOOLS' in config: if 'TOOLS' in config:
tools.extend(config['TOOLS']) tools.extend(config['TOOLS'])
env = Environment(tools = tools, variables = vars, ENV = os.environ) env = Environment(tools = tools, variables = vars, ENV = os.environ)
env['SPP_RECIPES_FOLDERS'] = [] env['RECIPES_FOLDERS'] = [Dir('recipes')]
env['SYSTEM_CACHE_DIR'] = os.path.join(_find_system_cache_dir(), 'spp_cache') env['SYSTEM_CACHE_DIR'] = os.path.join(_find_system_cache_dir(), 'spp_cache')
env['CLONE_DIR'] = os.path.join(env['SYSTEM_CACHE_DIR'], 'cloned')
env['DOWNLOAD_DIR'] = os.path.join(env['SYSTEM_CACHE_DIR'], 'downloaded')
env['UPDATE_REPOSITORIES'] = update_repositories
env['CXX_STANDARD'] = config['CXX_STANDARD'] # make it available to everyone
env['DEPS_CFLAGS'] = []
env['DEPS_CXXFLAGS'] = []
env['DEPS_LINKFLAGS'] = []
print(f'Detected system cache directory: {env["SYSTEM_CACHE_DIR"]}') print(f'Detected system cache directory: {env["SYSTEM_CACHE_DIR"]}')
try: try:
os.makedirs(env['SYSTEM_CACHE_DIR'], exist_ok=True) os.makedirs(env['SYSTEM_CACHE_DIR'], exist_ok=True)
@@ -806,9 +525,7 @@ except:
env['CLONE_DIR'] = os.path.join(env['SYSTEM_CACHE_DIR'], 'cloned') env['CLONE_DIR'] = os.path.join(env['SYSTEM_CACHE_DIR'], 'cloned')
env['DOWNLOAD_DIR'] = os.path.join(env['SYSTEM_CACHE_DIR'], 'downloaded') env['DOWNLOAD_DIR'] = os.path.join(env['SYSTEM_CACHE_DIR'], 'downloaded')
env['UPDATE_REPOSITORIES'] = update_repositories env['UPDATE_REPOSITORIES'] = update_repositories
env['CXX_STANDARD'] = config['CXX_STANDARD'] # make it available to everyone env['CXX_STANDARD'] = config['CXX_STANDARD'] # make it available to everyone
env['CXX_NO_EXCEPTIONS'] = config['CXX_NO_EXCEPTIONS']
env['DEPS_CFLAGS'] = [] env['DEPS_CFLAGS'] = []
env['DEPS_CXXFLAGS'] = [] env['DEPS_CXXFLAGS'] = []
env['DEPS_LINKFLAGS'] = [] env['DEPS_LINKFLAGS'] = []
@@ -843,17 +560,12 @@ env.Append(CPPDEFINES = [])
env.Append(LINKFLAGS = []) env.Append(LINKFLAGS = [])
# init SPP environment variables # init SPP environment variables
env['SPP_DIR'] = _spp_dir.abspath
env['SPP_TARGETS'] = [] env['SPP_TARGETS'] = []
env['SPP_DEFAULT_TARGETS'] = [] env['SPP_DEFAULT_TARGETS'] = []
env['SPP_TARGET_DEPENDENCIES'] = [] env['SPP_TARGET_DEPENDENCIES'] = []
env['SPP_DEPENDENCIES'] = {} env['SPP_DEPENDENCIES'] = {}
env['SPP_RECIPES'] = {} env['SPP_RECIPES'] = {}
env['OBJSUFFIX'] = f".{env['BUILD_TYPE']}{env['OBJSUFFIX']}"
if variant:
env['OBJSUFFIX'] = f".{variant}{env['OBJSUFFIX']}"
# create the cache dir # create the cache dir
os.makedirs(env['CACHE_DIR'], exist_ok=True) os.makedirs(env['CACHE_DIR'], exist_ok=True)
cache_gitignore = f'{env["CACHE_DIR"]}/.gitignore' cache_gitignore = f'{env["CACHE_DIR"]}/.gitignore'
@@ -908,10 +620,8 @@ elif unity_mode == 'stress': # compile everything in one single file to stress t
# setup compiler specific options # setup compiler specific options
if env['COMPILER_FAMILY'] == 'gcc' or env['COMPILER_FAMILY'] == 'clang': if env['COMPILER_FAMILY'] == 'gcc' or env['COMPILER_FAMILY'] == 'clang':
env.Append(CCFLAGS = ['-Wall', '-Wextra', '-Werror', '-Wstrict-aliasing', '-pedantic', '-fvisibility=hidden']) env.Append(CCFLAGS = ['-Wall', '-Wextra', '-Werror', '-Wstrict-aliasing', '-pedantic'])
env.Append(CXXFLAGS = [f'-std={config["CXX_STANDARD"]}']) env.Append(CXXFLAGS = [f'-std={config["CXX_STANDARD"]}'])
if env['CXX_NO_EXCEPTIONS']:
env.Append(CXXFLAGS = [f'-fno-exceptions'])
if build_type != 'release': if build_type != 'release':
env.Append(LINKFLAGS = [f'-Wl,-rpath,{env["LIB_DIR"]}']) env.Append(LINKFLAGS = [f'-Wl,-rpath,{env["LIB_DIR"]}'])
env['LINKCOM'] = env['LINKCOM'].replace('$_LIBFLAGS', '-Wl,--start-group $_LIBFLAGS -Wl,--end-group') env['LINKCOM'] = env['LINKCOM'].replace('$_LIBFLAGS', '-Wl,--start-group $_LIBFLAGS -Wl,--end-group')
@@ -924,11 +634,10 @@ if env['COMPILER_FAMILY'] == 'gcc' or env['COMPILER_FAMILY'] == 'clang':
# -Wtautological-compare triggers in libfmt and doesn't seem too useful anyway # -Wtautological-compare triggers in libfmt and doesn't seem too useful anyway
env.Append(CCFLAGS = ['-Wno-missing-field-initializers', '-Wno-maybe-uninitialized']) env.Append(CCFLAGS = ['-Wno-missing-field-initializers', '-Wno-maybe-uninitialized'])
env.Append(CXXFLAGS = ['-Wno-subobject-linkage', '-Wno-dangling-reference', '-Wno-init-list-lifetime', '-Wno-tautological-compare']) env.Append(CXXFLAGS = ['-Wno-subobject-linkage', '-Wno-dangling-reference', '-Wno-init-list-lifetime', '-Wno-tautological-compare'])
else: # clang only else: # clang only
# no-gnu-anonymous-struct - we don't care # no-gnu-anonymous-struct - we don't care
# no-missing-field-initializers - useful in some cases, annoying in most env.Append(CCFLAGS = ['-Wno-gnu-anonymous-struct'])
# no-ambiguous-reversed-operator - should be quite useful, but we get a false positive, apparently?
env.Append(CCFLAGS = ['-Wno-gnu-anonymous-struct', '-Wno-missing-field-initializers', '-Wno-ambiguous-reversed-operator'])
env.Append(CXXFLAGS = ['-fexperimental-library']) # enable std::jthread env.Append(CXXFLAGS = ['-fexperimental-library']) # enable std::jthread
if build_type == 'debug': if build_type == 'debug':
env.Append(CCFLAGS = ['-g', '-O0'], CPPDEFINES = ['_GLIBCXX_DEBUG']) env.Append(CCFLAGS = ['-g', '-O0'], CPPDEFINES = ['_GLIBCXX_DEBUG'])
@@ -944,7 +653,7 @@ if env['COMPILER_FAMILY'] == 'gcc' or env['COMPILER_FAMILY'] == 'clang':
elif build_type == 'release': elif build_type == 'release':
env.Append(CCFLAGS = ['-Wno-unused-variable', '-Wno-unused-parameter', '-Wno-unused-but-set-variable', '-Wno-unused-local-typedef', '-Wno-unused-local-typedefs', '-O2'], CPPDEFINES = [f'{config["PREPROCESSOR_PREFIX"]}_RELEASE', 'NDEBUG']) env.Append(CCFLAGS = ['-Wno-unused-variable', '-Wno-unused-parameter', '-Wno-unused-but-set-variable', '-Wno-unused-local-typedef', '-Wno-unused-local-typedefs', '-O2'], CPPDEFINES = [f'{config["PREPROCESSOR_PREFIX"]}_RELEASE', 'NDEBUG'])
if env['ENABLE_ASAN']: if enable_asan:
env.Append(CCFLAGS = ['-fsanitize=address', '-fno-omit-frame-pointer']) env.Append(CCFLAGS = ['-fsanitize=address', '-fno-omit-frame-pointer'])
env.Append(LINKFLAGS = ['-fsanitize=address']) env.Append(LINKFLAGS = ['-fsanitize=address'])
env.Append(DEPS_CXXFLAGS = ['-fsanitize=address', '-fno-omit-frame-pointer']) env.Append(DEPS_CXXFLAGS = ['-fsanitize=address', '-fno-omit-frame-pointer'])
@@ -964,31 +673,20 @@ elif env['COMPILER_FAMILY'] == 'cl':
# C4251: missing dll-interface of some std types, yaml-cpp doesn't compile with this enabled # C4251: missing dll-interface of some std types, yaml-cpp doesn't compile with this enabled
# C4275: same as above # C4275: same as above
env.Append(CCFLAGS = ['/W4', '/WX', '/wd4201', '/wd4127', '/wd4702', '/wd4251', '/wd4275', '/bigobj', '/vmg', env.Append(CCFLAGS = ['/W4', '/WX', '/wd4201', '/wd4127', '/wd4702', '/wd4251', '/wd4275', '/bigobj', '/vmg',
f'/std:{cxx_version_name}', '/permissive-', '/FS', '/Zc:char8_t', '/utf-8']) f'/std:{cxx_version_name}', '/permissive-', '/EHsc', '/FS', '/Zc:char8_t', '/utf-8'])
env.Append(CPPDEFINES = ['_CRT_SECURE_NO_WARNINGS']) # I'd like to not use MSVC specific versions of functions because they are "safer" ... env.Append(CPPDEFINES = ['_CRT_SECURE_NO_WARNINGS']) # I'd like to not use MSVC specific versions of functions because they are "safer" ...
env.Append(DEPS_CXXFLAGS = ['/Zc:char8_t', '/utf-8', '/vmg']) env.Append(DEPS_CXXFLAGS = ['/EHsc', '/Zc:char8_t', '/utf-8', '/vmg'])
if env['CXX_NO_EXCEPTIONS']:
env.Append(CPPDEFINES = ['_HAS_EXCEPTIONS=0'])
else:
env.Append(CXXFLAGS = ['/EHsc'])
env.Append(DEPS_CXXFLAGS = ['/EHsc'])
if env['SHOW_INCLUDES']:
env.Append(CCFLAGS = ['/showIncludes'])
if build_type == 'debug': if build_type == 'debug':
env.Append(CCFLAGS = ['/Od', '/Zi', '/MDd'], LINKFLAGS = ' /DEBUG') env.Append(CCFLAGS = ['/Od', '/Zi', '/MDd'], LINKFLAGS = ' /DEBUG')
env.Append(CPPDEFINES = ['_DEBUG', '_ITERATOR_DEBUG_LEVEL=2']) env.Append(CPPDEFINES = ['_DEBUG', '_ITERATOR_DEBUG_LEVEL=2'])
env.Append(DEPS_CXXFLAGS = ['/MDd', '/Zi', '/D_DEBUG', '/D_ITERATOR_DEBUG_LEVEL=2']) env.Append(DEPS_CXXFLAGS = ['/MDd', '/Zi', '/D_DEBUG', '/D_ITERATOR_DEBUG_LEVEL=2'])
env.Append(DEPS_LINKFLAGS = ['/DEBUG']) env.Append(DEPS_LINKFLAGS = ['/DEBUG'])
elif build_type == 'release_debug' or build_type == 'profile': elif build_type == 'release_debug' or build_type == 'profile':
env.Append(CCFLAGS = ['/O2', '/MD', '/Zi'], LINKFLAGS = ' /DEBUG') env.Append(CCFLAGS = ['/O2', '/Zi'], LINKFLAGS = ' /DEBUG')
env.Append(DEPS_CXXFLAGS = ['/Zi', '/MD']) env.Append(DEPS_CXXFLAGS = ['/Zi'])
env.Append(DEPS_LINKFLAGS = ['/DEBUG']) env.Append(DEPS_LINKFLAGS = ['/DEBUG'])
else: else:
env.Append(CCFLAGS = ['/O2', '/MD']) env.Append(CCFLAGS = ['/O2'])
env.Append(DEPS_CXXFLAGS = ['/MD'])
if env['ENABLE_ASAN']:
env.Append(CCFLAGS = ['/fsanitize=address'])
if env['COMPILER_FAMILY'] == 'gcc': if env['COMPILER_FAMILY'] == 'gcc':
env.Append(CXXFLAGS = ['-Wno-volatile']) env.Append(CXXFLAGS = ['-Wno-volatile'])
@@ -1006,23 +704,21 @@ env.AddMethod(_rglob, 'RGlob')
env.AddMethod(_deps_from_json, 'DepsFromJson') env.AddMethod(_deps_from_json, 'DepsFromJson')
env.AddMethod(_make_interface, 'MakeInterface') env.AddMethod(_make_interface, 'MakeInterface')
env.AddMethod(_lib_filename, 'LibFilename') env.AddMethod(_lib_filename, 'LibFilename')
env.AddMethod(_find_executable, 'FindExecutable')
env.AddMethod(_find_lib, 'FindLib') env.AddMethod(_find_lib, 'FindLib')
env.AddMethod(_error, 'Error') env.AddMethod(_error, 'Error')
env.AddMethod(_wrap_builder(env.Library, TargetType.STATIC_LIBRARY), 'Library') env.AddMethod(_wrap_builder(env.Library, is_lib = True), 'Library')
env.AddMethod(_wrap_builder(env.StaticLibrary, TargetType.STATIC_LIBRARY), 'StaticLibrary') env.AddMethod(_wrap_builder(env.StaticLibrary, is_lib = True), 'StaticLibrary')
env.AddMethod(_wrap_builder(env.SharedLibrary, TargetType.SHARED_LIBRARY), 'SharedLibrary') env.AddMethod(_wrap_builder(env.SharedLibrary, is_lib = True), 'SharedLibrary')
env.AddMethod(_wrap_builder(env.Program, TargetType.PROGRAM), 'Program') env.AddMethod(_wrap_builder(env.Program), 'Program')
env.AddMethod(_wrap_default(env.Default), 'Default') env.AddMethod(_wrap_default(env.Default), 'Default')
env.AddMethod(_wrap_depends(env.Depends), 'Depends') env.AddMethod(_wrap_depends(env.Depends), 'Depends')
env.AddMethod(_wrap_builder(env.UnityProgram, TargetType.PROGRAM), 'UnityProgram') env.AddMethod(_wrap_builder(env.UnityProgram), 'UnityProgram')
env.AddMethod(_wrap_builder(env.UnityLibrary, TargetType.STATIC_LIBRARY), 'UnityLibrary') env.AddMethod(_wrap_builder(env.UnityLibrary, is_lib = True), 'UnityLibrary')
env.AddMethod(_wrap_builder(env.UnityStaticLibrary, TargetType.STATIC_LIBRARY), 'UnityStaticLibrary') env.AddMethod(_wrap_builder(env.UnityStaticLibrary, is_lib = True), 'UnityStaticLibrary')
env.AddMethod(_wrap_builder(env.UnitySharedLibrary, TargetType.SHARED_LIBRARY), 'UnitySharedLibrary') env.AddMethod(_wrap_builder(env.UnitySharedLibrary, is_lib = True), 'UnitySharedLibrary')
env.AddMethod(_module, 'Module') env.AddMethod(_module, 'Module')
env.AddMethod(_finalize, 'Finalize') env.AddMethod(_finalize, 'Finalize')
env.AddMethod(_find_target, 'FindTarget')
if hasattr(env, 'Gch'): if hasattr(env, 'Gch'):
env.AddMethod(_wrap_builder(env.Gch), 'Gch') env.AddMethod(_wrap_builder(env.Gch), 'Gch')
@@ -1034,6 +730,5 @@ if dump_env:
print('==== Begin Environment Dump =====') print('==== Begin Environment Dump =====')
print(env.Dump()) print(env.Dump())
print('==== End Environment Dump =====') print('==== End Environment Dump =====')
Exit(0)
Return('env') Return('env')

View File

@@ -9,7 +9,7 @@ _BUILT_STAMPFILE = '.spp_built'
Import('env') Import('env')
def _autotools_project(env: Environment, project_root: str, config_args: 'list[str]' = [], build_args : 'list[str]' = [], install_args : 'list[str]' = [], configure_script_path: str = 'configure', skip_steps = ()) -> dict: def _autotools_project(env: Environment, project_root: str, config_args: 'list[str]' = [], build_args : 'list[str]' = [], install_args : 'list[str]' = [], configure_script_path: str = 'configure') -> dict:
config = env['BUILD_TYPE'] config = env['BUILD_TYPE']
build_dir = os.path.join(project_root, f'build_{config}') build_dir = os.path.join(project_root, f'build_{config}')
install_dir = os.path.join(project_root, f'install_{config}') install_dir = os.path.join(project_root, f'install_{config}')
@@ -32,15 +32,9 @@ def _autotools_project(env: Environment, project_root: str, config_args: 'list[s
if not os.path.exists(config_script) and os.path.exists(f'{config_script}.ac'): if not os.path.exists(config_script) and os.path.exists(f'{config_script}.ac'):
subprocess.run(('autoreconf', '--install', '--force'), cwd=project_root) subprocess.run(('autoreconf', '--install', '--force'), cwd=project_root)
if 'configure' not in skip_steps: subprocess.run((config_script, f'--prefix={install_dir}', *config_args), cwd=build_dir, env=env, stdout=sys.stdout, stderr=sys.stderr, check=True)
subprocess.run((config_script, f'--prefix={install_dir}', *config_args), cwd=build_dir, env=env, stdout=sys.stdout, stderr=sys.stderr, check=True) subprocess.run(('make', f'-j{jobs}', *build_args), cwd=build_dir, stdout=sys.stdout, stderr=sys.stderr, check=True)
if 'build' not in skip_steps: subprocess.run(('make', 'install', *install_args), cwd=build_dir, stdout=sys.stdout, stderr=sys.stderr, check=True)
subprocess.run(('make', f'-j{jobs}', *build_args), cwd=build_dir, stdout=sys.stdout, stderr=sys.stderr, check=True)
if 'install' not in skip_steps:
subprocess.run(('make', 'install', *install_args), cwd=build_dir, stdout=sys.stdout, stderr=sys.stderr, check=True)
else:
# must still create the install dir for the stamp file
os.makedirs(install_dir, exist_ok=True)
pathlib.Path(install_dir, _BUILT_STAMPFILE).touch() pathlib.Path(install_dir, _BUILT_STAMPFILE).touch()
libpath = [] libpath = []
@@ -50,7 +44,6 @@ def _autotools_project(env: Environment, project_root: str, config_args: 'list[s
libpath.append(full_path) libpath.append(full_path)
return { return {
'build_dir': build_dir,
'install_dir': install_dir, 'install_dir': install_dir,
'LIBPATH': libpath, 'LIBPATH': libpath,
'CPPPATH': [os.path.join(install_dir, 'include')] 'CPPPATH': [os.path.join(install_dir, 'include')]

View File

@@ -40,11 +40,10 @@ def _generate_cmake_args(env: Environment, dependencies: 'list[dict]') -> 'list[
args.append(f'-D{name}={cmd_quote(value)}') args.append(f'-D{name}={cmd_quote(value)}')
return args return args
def _calc_version_hash(env, dependencies: 'list[dict]') -> str: def _calc_version_hash(dependencies: 'list[dict]') -> str:
return json.dumps({ return json.dumps({
'version': _VERSION, 'version': _VERSION,
'dependencies': dependencies, 'dependencies': dependencies
'cxxflags': env['DEPS_CXXFLAGS']
}) })
def _cmake_project(env: Environment, project_root: str, generate_args: 'list[str]' = [], build_args : 'list[str]' = [], install_args : 'list[str]' = [], dependencies: 'list[dict]' = []) -> dict: def _cmake_project(env: Environment, project_root: str, generate_args: 'list[str]' = [], build_args : 'list[str]' = [], install_args : 'list[str]' = [], dependencies: 'list[dict]' = []) -> dict:
@@ -52,7 +51,7 @@ def _cmake_project(env: Environment, project_root: str, generate_args: 'list[str
build_dir = os.path.join(project_root, f'build_{config}') build_dir = os.path.join(project_root, f'build_{config}')
install_dir = os.path.join(project_root, f'install_{config}') install_dir = os.path.join(project_root, f'install_{config}')
version_hash = _calc_version_hash(env, dependencies) version_hash = _calc_version_hash(dependencies)
stamp_file = pathlib.Path(install_dir, _BUILT_STAMPFILE) stamp_file = pathlib.Path(install_dir, _BUILT_STAMPFILE)
is_built = stamp_file.exists() is_built = stamp_file.exists()
@@ -98,7 +97,6 @@ def _cmake_project(env: Environment, project_root: str, generate_args: 'list[str
return { return {
'install_dir': install_dir, 'install_dir': install_dir,
'BINPATH': [os.path.join(install_dir, 'bin')],
'LIBPATH': libpath, 'LIBPATH': libpath,
'CPPPATH': [os.path.join(install_dir, 'include')] 'CPPPATH': [os.path.join(install_dir, 'include')]
} }

View File

@@ -2,7 +2,7 @@
from git import Repo from git import Repo
from git.exc import GitError from git.exc import GitError
import hashlib import hashlib
import inspect import re
from SCons.Script import * from SCons.Script import *
Import('env') Import('env')
@@ -21,30 +21,21 @@ def _clone(env: Environment, repo_name: str, remote_url: str):
def _git_branch(env: Environment, repo_name: str, remote_url: str, git_ref: str = 'main') -> dict: def _git_branch(env: Environment, repo_name: str, remote_url: str, git_ref: str = 'main') -> dict:
repo, origin = _clone(env, repo_name, remote_url) repo, origin = _clone(env, repo_name, remote_url)
worktree_dir = os.path.join(env['CLONE_DIR'], 'git', repo_name, hashlib.shake_128(git_ref.encode('utf-8')).hexdigest(6)) # TODO: commit hash would be better, right? -> not if it's a branch! worktree_dir = os.path.join(env['CLONE_DIR'], 'git', repo_name, hashlib.shake_128(git_ref.encode('utf-8')).hexdigest(6)) # TODO: commit hash would be better, right? -> not if it's a branch!
update_submodules = False
if not os.path.exists(worktree_dir): if not os.path.exists(worktree_dir):
print(f'Checking out into {worktree_dir}.') print(f'Checking out into {worktree_dir}.')
origin.fetch(tags=True, force=True) origin.fetch(tags=True)
os.makedirs(worktree_dir) os.makedirs(worktree_dir)
repo.git.worktree('add', worktree_dir, git_ref) repo.git.worktree('add', worktree_dir, git_ref)
worktree_repo = Repo(worktree_dir)
update_submodules = True
elif env['UPDATE_REPOSITORIES']: elif env['UPDATE_REPOSITORIES']:
worktree_repo = Repo(worktree_dir) worktree_repo = Repo(worktree_dir)
if not worktree_repo.head.is_detached: if not worktree_repo.head.is_detached:
print(f'Updating git repository at {worktree_dir}') print(f'Updating git repository at {worktree_dir}')
worktree_origin = worktree_repo.remotes['origin'] worktree_origin = worktree_repo.remotes['origin']
worktree_origin.pull() worktree_origin.pull()
update_submodules = True
else: else:
print(f'Not updating git repository {worktree_dir} as it is not on a branch.') print(f'Not updating git repository {worktree_dir} as it is not on a branch.')
if update_submodules:
for submodule in worktree_repo.submodules:
submodule.update(init=True)
return { return {
'checkout_root': worktree_dir, 'checkout_root': worktree_dir
'repo': repo,
'origin': origin
} }
def _git_tags(env: Environment, repo_name: str, remote_url: str, force_fetch: bool = False) -> 'list[str]': def _git_tags(env: Environment, repo_name: str, remote_url: str, force_fetch: bool = False) -> 'list[str]':
@@ -57,25 +48,16 @@ def _make_callable(val):
if callable(val): if callable(val):
return val return val
else: else:
def _wrapped(*args, **kwargs): return lambda env: val
return val
return _wrapped
def _git_recipe(env: Environment, globals: dict, repo_name, repo_url, cook_fn, versions = None, tag_pattern = None, tag_fn = None, ref_fn = None, dependencies: dict = {}) -> None: def _git_recipe(env: Environment, globals: dict, repo_name, repo_url, cook_fn, versions = None, tag_pattern = None, tag_fn = None, ref_fn = None, dependencies: dict = {}) -> None:
_repo_name = _make_callable(repo_name) _repo_name = _make_callable(repo_name)
_repo_url = _make_callable(repo_url) _repo_url = _make_callable(repo_url)
_tag_pattern = _make_callable(tag_pattern) _tag_pattern = _make_callable(tag_pattern)
versions_cb = versions and _make_callable(versions) versions_cb = versions and _make_callable(versions)
dependencies_cb = _make_callable(dependencies)
def _versions(env: Environment, update: bool = False, options: dict = {}): def _versions(env: Environment, update: bool = False):
if 'ref' in options: pattern = _tag_pattern(env)
return [(0, 0, 0)] # no versions if compiling from a branch
pattern_signature = inspect.signature(_tag_pattern)
kwargs = {}
if 'options' in pattern_signature.parameters:
kwargs['options'] = options
pattern = _tag_pattern(env, **kwargs)
if pattern: if pattern:
tags = env.GitTags(repo_name = _repo_name(env), remote_url = _repo_url(env), force_fetch=update) tags = env.GitTags(repo_name = _repo_name(env), remote_url = _repo_url(env), force_fetch=update)
result = [] result = []
@@ -91,33 +73,17 @@ def _git_recipe(env: Environment, globals: dict, repo_name, repo_url, cook_fn, v
else: else:
return [(0, 0, 0)] return [(0, 0, 0)]
def _dependencies(env: Environment, version, options: dict) -> 'dict': def _dependencies(env: Environment, version) -> 'dict':
dependencies_signature = inspect.signature(dependencies_cb) return dependencies
kwargs = {}
if 'options' in dependencies_signature.parameters:
kwargs['options'] = options
return dependencies_cb(env, version, **kwargs)
def _cook(env: Environment, version, options: dict = {}) -> dict: def _cook(env: Environment, version) -> dict:
if 'ref' in options: if tag_fn:
git_ref = options['ref'] git_ref = f'refs/tags/{tag_fn(version)}'
elif tag_fn:
tag_signature = inspect.signature(tag_fn)
kwargs = {}
if 'options' in tag_signature.parameters:
kwargs['options'] = options
git_ref = f'refs/tags/{tag_fn(version, **kwargs)}'
else: else:
assert ref_fn assert ref_fn
git_ref = ref_fn(env, version) git_ref = ref_fn(env, version)
repo = env.GitBranch(repo_name = _repo_name(env), remote_url = _repo_url(env), git_ref = git_ref) repo = env.GitBranch(repo_name = _repo_name(env), remote_url = _repo_url(env), git_ref = git_ref)
return cook_fn(env, repo)
cook_signature = inspect.signature(cook_fn)
kwargs = {}
if 'options' in cook_signature.parameters:
kwargs['options'] = options
return cook_fn(env, repo, **kwargs)
globals['versions'] = _versions globals['versions'] = _versions
globals['dependencies'] = _dependencies globals['dependencies'] = _dependencies

View File

@@ -31,19 +31,8 @@ def _wrap_jinja(orig_jinja):
return target return target
return _wrapped return _wrapped
def _file_size(env, fname: str) -> int:
return env.File(fname).get_size()
def _file_content_hex(env, fname: str) -> str:
bytes = env.File(fname).get_contents()
return ','.join([hex(byte) for byte in bytes])
env.AddMethod(_wrap_jinja(env.Jinja), 'Jinja') env.AddMethod(_wrap_jinja(env.Jinja), 'Jinja')
env.Append(JINJA_FILTERS = {'load_config': _jinja_load_config}) env.Append(JINJA_FILTERS = {'load_config': _jinja_load_config})
env.Append(JINJA_GLOBALS = {
'file_size': lambda *args: _file_size(env, *args),
'file_content_hex': lambda *args: _file_content_hex(env, *args)
})
env.Append(JINJA_TEMPLATE_SEARCHPATH = ['data/jinja']) env.Append(JINJA_TEMPLATE_SEARCHPATH = ['data/jinja'])
env['JINJA_CONFIG_SEARCHPATH'] = [env.Dir('#data/config')] env['JINJA_CONFIG_SEARCHPATH'] = [env.Dir('#data/config')]
Return('env') Return('env')

View File

@@ -1,11 +0,0 @@
import os
Import('env')
def _recipe_repository(env, repo_name: str, remote_url: str, git_ref: str = 'master') -> None:
repo = env.GitBranch(repo_name = os.path.join('recipe_repos', repo_name), remote_url = remote_url, git_ref = git_ref)
env.Append(SPP_RECIPES_FOLDERS = [os.path.join(repo['checkout_root'], 'recipes')])
env.AddMethod(_recipe_repository, 'RecipeRepo')
Return('env')

View File

@@ -1,13 +0,0 @@
FROM debian:sid-slim
RUN apt-get -y update && \
apt-get -y upgrade && \
apt-get -y install build-essential clang-19 gcc-14 g++-14 python3 python3-pip \
virtualenv python-is-python3 clang-tidy git ninja-build cmake
RUN ln -s /usr/bin/clang-19 /usr/local/bin/clang \
&& ln -s /usr/bin/clang++-19 /usr/local/bin/clang++ \
&& ln -s /usr/bin/clang-tidy-19 /usr/local/bin/clang-tidy \
&& ln -s /usr/bin/run-clang-tidy-19 /usr/local/bin/run-clang-tidy
COPY scripts /opt/scripts
RUN chmod a+x /opt/scripts/*.sh

View File

@@ -1,9 +0,0 @@
#!/bin/bash
set -xe
python -m virtualenv venv
source venv/bin/activate
pip install scons
pip install -r external/scons-plus-plus/requirements.txt
scons -j$(nproc) --build_type=debug --variant=linux_clang_debug --compiler=clang
scons -j$(nproc) --build_type=release_debug --variant=linux_clang_release_debug --compiler=clang
scons -j$(nproc) --build_type=release --variant=linux_clang_release --compiler=clang

View File

@@ -1,9 +0,0 @@
#!/bin/bash
set -xe
python -m virtualenv venv
source venv/bin/activate
pip install scons
pip install -r external/scons-plus-plus/requirements.txt
scons -j$(nproc) --build_type=debug --variant=linux_gcc_debug --compiler=gcc
scons -j$(nproc) --build_type=release_debug --variant=linux_gcc_release_debug --compiler=gcc
scons -j$(nproc) --build_type=release --variant=linux_gcc_release --compiler=gcc

31
recipes/Catch2/recipe.py Normal file
View File

@@ -0,0 +1,31 @@
import re
from SCons.Script import *
def _git_cook(env: Environment, repo) -> dict:
checkout_root = repo['checkout_root']
build_result = env.CMakeProject(project_root=checkout_root)
lib_name = {
'debug': 'Catch2d'
}.get(env['BUILD_TYPE'], 'Catch2')
libs = []
if not env.get('CATCH2_OWN_MAIN'):
libs.append({
'debug': 'Catch2Maind'
}.get(env['BUILD_TYPE'], 'Catch2Main'))
libs.append(lib_name)
return {
'CPPPATH': build_result['CPPPATH'],
'LIBS': [env.FindLib(lib, paths=build_result['LIBPATH']) for lib in libs]
}
env.GitRecipe(
globals = globals(),
repo_name = 'Catch2',
repo_url = 'https://github.com/catchorg/Catch2.git',
tag_pattern = re.compile(r'^v([0-9]+)\.([0-9]+)\.([0-9]+)$'),
tag_fn = lambda version: f'v{version[0]}.{version[1]}.{version[2]}',
cook_fn = _git_cook
)

View File

@@ -0,0 +1,31 @@
import re
from SCons.Script import *
_REPO_NAME = 'ImageMagick'
_REPO_URL = 'https://github.com/ImageMagick/ImageMagick.git'
_TAG_PATTERN = re.compile(r'^([0-9]+)\.([0-9]+)\.([0-9]+)-([0-9]+)$')
def versions(env: Environment, update: bool = False):
tags = env.GitTags(repo_name = _REPO_NAME, remote_url = _REPO_URL, force_fetch=update)
result = []
for tag in tags:
match = _TAG_PATTERN.match(tag)
if match:
result.append((int(match.groups()[0]), int(match.groups()[1]), int(match.groups()[2]), int(match.groups()[3])))
return result
def dependencies(env: Environment, version) -> 'dict':
return {}
def cook(env: Environment, version) -> dict:
raise Exception('this still needs to be implemented property :/')
# git_ref = f'refs/tags/{version[0]}.{version[1]}.{version[2]}-{version[3]}'
# repo = env.GitBranch(repo_name = _REPO_NAME, remote_url = _REPO_URL, git_ref = git_ref)
# checkout_root = repo['checkout_root']
# build_result = env.AutotoolsProject(checkout_root)
# return {
# 'LIBPATH': build_result['LIBPATH'],
# 'CPPPATH': build_result['CPPPATH'],
# 'LIBS': ['backtrace']
# }

37
recipes/SDL/recipe.py Normal file
View File

@@ -0,0 +1,37 @@
import platform
import re
from SCons.Script import *
def _git_cook(env: Environment, repo: dict) -> dict:
checkout_root = repo['checkout_root']
build_result = env.CMakeProject(project_root=checkout_root, generate_args = ['-DSDL_STATIC=ON', '-DSDL_SHARED=OFF'])
libs = []
if platform.system() == 'Windows':
if env['BUILD_TYPE'] == 'debug':
libs.append('SDL2-staticd')
else:
libs.append('SDL2-static')
libs.extend(('kernel32', 'user32', 'gdi32', 'winmm', 'imm32', 'ole32', 'oleaut32', 'version', 'uuid', 'advapi32', 'setupapi', 'shell32', 'dinput8'))
else:
if env['BUILD_TYPE'] == 'debug':
libs.append('SDL2d')
else:
libs.append('SDL2')
return {
'LIBPATH': build_result['LIBPATH'],
'CPPPATH': [os.path.join(build_result['install_dir'], 'include/SDL2')], # SDL is really weird about include paths ...
'LIBS': libs
}
env.GitRecipe(
globals = globals(),
repo_name = 'SDL',
repo_url = 'https://github.com/libsdl-org/SDL.git',
tag_pattern = re.compile(r'^release-([0-9]+)\.([0-9]+)\.([0-9]+)$'),
tag_fn = lambda version: f'release-{version[0]}.{version[1]}.{version[2]}',
cook_fn = _git_cook
)

View File

@@ -0,0 +1,45 @@
import re
from SCons.Script import *
_REPO_NAMES = {
'default': 'VulkanHeaders',
'mewin': 'VulkanHeaders_mewin'
}
_REPO_URLS = {
'default': 'https://github.com/KhronosGroup/Vulkan-Headers.git',
'mewin': 'https://git.mewin.de/mewin/vulkan-headers.git'
}
_TAG_PATTERN = re.compile(r'^v([0-9]+)\.([0-9]+)\.([0-9]+)$')
def _get_repo_name(env: Environment) -> str:
return _REPO_NAMES[env.get('VULKANHEADERS_REMOTE', 'default')]
def _get_repo_url(env: Environment) -> str:
return _REPO_URLS[env.get('VULKANHEADERS_REMOTE', 'default')]
def versions(env: Environment, update: bool = False):
if env.get('VULKANHEADERS_REMOTE') == 'mewin':
return [(0, 0, 0)]
tags = env.GitTags(repo_name = _get_repo_name(env), remote_url = _get_repo_url(env), force_fetch=update)
result = []
for tag in tags:
match = _TAG_PATTERN.match(tag)
if match:
result.append((int(match.groups()[0]), int(match.groups()[1]), int(match.groups()[2])))
return result
def dependencies(env: Environment, version) -> 'dict':
return {}
def cook(env: Environment, version) -> dict:
if env.get('VULKANHEADERS_REMOTE') == 'mewin':
git_ref = 'main'
else:
git_ref = f'refs/tags/v{version[0]}.{version[1]}.{version[2]}'
repo = env.GitBranch(repo_name = _get_repo_name(env), remote_url = _get_repo_url(env), git_ref = git_ref)
checkout_root = repo['checkout_root']
return {
'CPPPATH': [os.path.join(checkout_root, 'include')]
}

View File

@@ -0,0 +1,18 @@
import re
from SCons.Script import *
def _git_cook(env: Environment, repo: dict) -> dict:
checkout_root = repo['checkout_root']
return {
'CPPPATH': [os.path.join(checkout_root, 'include')]
}
env.GitRecipe(
globals = globals(),
repo_name = 'argparse',
repo_url = 'https://github.com/p-ranav/argparse.git',
tag_pattern = re.compile(r'^v([0-9]+)\.([0-9]+)$'),
tag_fn = lambda version: f'v{version[0]}.{version[1]}',
cook_fn = _git_cook
)

67
recipes/boost/recipe.py Normal file
View File

@@ -0,0 +1,67 @@
import json
import os
import re
import requests
from SCons.Script import *
_VERSIONS_URL = 'https://api.github.com/repos/boostorg/boost/releases'
_VERSION_PATTERN = re.compile(r'^boost-([0-9]+)\.([0-9]+)\.([0-9]+)$')
def versions(env: Environment, update: bool = False):
versions_file = os.path.join(env['DOWNLOAD_DIR'], 'boost_versions.json')
if update or not os.path.exists(versions_file):
req = requests.get(_VERSIONS_URL)
versions_data = json.loads(req.text)
result = []
for version_data in versions_data:
match = _VERSION_PATTERN.match(version_data['name'])
if not match:
continue
result.append((int(match.groups()[0]), int(match.groups()[1]), int(match.groups()[2])))
with open(versions_file, 'w') as f:
json.dump(result, f)
return result
else:
try:
with open(versions_file, 'r') as f:
return [tuple(v) for v in json.load(f)]
except:
print('boost_versions.json is empty or broken, redownloading.')
return versions(env, update=True)
def dependencies(env: Environment, version) -> 'dict':
return {}
def cook(env: Environment, version) -> dict:
if env.get('BOOST_LIBS') is None:
raise Exception('BOOST_LIBS not set. Set to a list of boost libs to link or "*" to link everything.')
if version >= (1, 85, 0):
url = f'https://github.com/boostorg/boost/releases/download/boost-{version[0]}.{version[1]}.{version[2]}/boost-{version[0]}.{version[1]}.{version[2]}-cmake.tar.gz'
else:
url = f'https://github.com/boostorg/boost/releases/download/boost-{version[0]}.{version[1]}.{version[2]}/boost-{version[0]}.{version[1]}.{version[2]}.tar.gz'
repo = env.DownloadAndExtract(f'boost_{version[0]}.{version[1]}.{version[2]}', url = url, skip_folders = 1)
checkout_root = repo['extracted_root']
build_result = env.CMakeProject(checkout_root)
libs = []
if '*' in env['BOOST_LIBS']:
lib_dir = build_result['LIBPATH'][0]
for lib_file in os.listdir(lib_dir):
fname = os.path.join(lib_dir, lib_file)
if not os.path.isfile(fname):
continue
libs.append(fname)
else:
for lib in set(env['BOOST_LIBS']):
if os.name == 'posix':
libs.append(env.FindLib(f'boost_{lib}', paths=build_result['LIBPATH']))
elif os.name == 'nt':
libs.append(env.FindLib(f'libboost_{lib}-*', paths=build_result['LIBPATH'], use_glob=True))
else:
raise Exception('Boost not supported on this platform.')
return {
'CPPPATH': build_result['CPPPATH'],
'LIBS': libs
}

19
recipes/cgltf/recipe.py Normal file
View File

@@ -0,0 +1,19 @@
import re
from SCons.Script import *
def _git_cook(env: Environment, repo) -> dict:
checkout_root = repo['checkout_root']
return {
'CPPPATH': [checkout_root]
}
env.GitRecipe(
globals = globals(),
repo_name = 'cgltf',
repo_url = 'https://github.com/jkuhlmann/cgltf.git',
tag_pattern = re.compile(r'^v([0-9]+)\.([0-9]+)$'),
tag_fn = lambda version: f'v{version[0]}.{version[1]}',
cook_fn = _git_cook
)

39
recipes/curl/recipe.py Normal file
View File

@@ -0,0 +1,39 @@
import re
from SCons.Script import *
def _build_lib_name(env: Environment) -> str:
if os.name == 'posix':
return {
'debug': 'curl-d'
}.get(env['BUILD_TYPE'], 'curl')
elif os.name == 'nt':
raise Exception('TODO')
else:
raise Exception('curl is not supported yet on this OS')
def _git_cook(env: Environment, repo: dict) -> dict:
checkout_root = repo['checkout_root']
build_result = env.CMakeProject(checkout_root, generate_args=['-DBUILD_CURL_EXE=OFF','-DBUILD_SHARED_LIBS=OFF',
'-DBUILD_STATIC_LIBS=ON', '-DHTTP_ONLY=ON',
'-DCURL_USE_LIBSSH2=OFF'])
lib_name = _build_lib_name(env)
return {
'CPPPATH': build_result['CPPPATH'],
'LIBS': [env.FindLib(lib_name, paths=build_result['LIBPATH'])],
}
env.GitRecipe(
globals = globals(),
repo_name = 'curl',
repo_url = 'https://github.com/curl/curl.git',
tag_pattern = re.compile(r'^curl-([0-9]+)_([0-9]+)_([0-9]+)$'),
tag_fn = lambda version: f'curl-{version[0]}_{version[1]}_{version[2]}',
cook_fn = _git_cook,
dependencies = {
'openssl': {},
'zlib': {},
'psl': {}
}
)

27
recipes/fmt/recipe.py Normal file
View File

@@ -0,0 +1,27 @@
import re
from SCons.Script import *
def _git_cook(env: Environment, repo: dict) -> dict:
checkout_root = repo['checkout_root']
build_result = env.CMakeProject(checkout_root)
lib_name = {
'debug': 'fmtd'
}.get(env['BUILD_TYPE'], 'fmt')
return {
'CPPPATH': build_result['CPPPATH'],
'LIBS': [env.FindLib(lib_name, paths=build_result['LIBPATH'])]
}
env.GitRecipe(
globals = globals(),
repo_name = 'fmt',
repo_url = 'https://github.com/fmtlib/fmt.git',
tag_pattern = re.compile(r'^([0-9]+)\.([0-9]+)\.([0-9]+)$'),
tag_fn = lambda version: f'{version[0]}.{version[1]}.{version[2]}',
cook_fn = _git_cook
)

52
recipes/glm/recipe.py Normal file
View File

@@ -0,0 +1,52 @@
import re
from SCons.Script import *
_REPO_NAMES = {
'default': 'glm',
'mewin': 'glm_mewin'
}
_REPO_URLS = {
'default': 'https://github.com/g-truc/glm.git',
'mewin': 'https://git.mewin.de/mewin/glm.git'
}
_TAG_PATTERN = re.compile(r'^([0-9]+)\.([0-9]+)\.([0-9]+)$')
_TAG_PATTERN_ALT = re.compile(r'^0\.([0-9]+)\.([0-9]+)\.([0-9]+)$')
def _get_repo_name(env: Environment) -> str:
return _REPO_NAMES[env.get('GLM_REMOTE', 'default')]
def _get_repo_url(env: Environment) -> str:
return _REPO_URLS[env.get('GLM_REMOTE', 'default')]
def versions(env: Environment, update: bool = False):
if env.get('GLM_REMOTE') == 'mewin':
return [(0, 0, 0)]
tags = env.GitTags(repo_name = _get_repo_name(env), remote_url = _get_repo_url(env), force_fetch=update)
result = []
for tag in tags:
match = _TAG_PATTERN.match(tag)
if match:
result.append((int(match.groups()[0]), int(match.groups()[1]), int(match.groups()[2])))
else:
match = _TAG_PATTERN_ALT.match(tag)
if match:
result.append((0, int(match.groups()[0]), int(match.groups()[1]) * 10 + int(match.groups()[2])))
return result
def dependencies(env: Environment, version) -> 'dict':
return {}
def cook(env: Environment, version) -> dict:
if env.get('GLM_REMOTE') == 'mewin':
git_ref = 'master'
elif version[0] == 0:
git_ref = f'refs/tags/0.{version[1]}.{int(version[2]/10)}.{version[2]%10}'
else:
git_ref = f'refs/tags/{version[0]}.{version[1]}.{version[2]}'
repo = env.GitBranch(repo_name = _get_repo_name(env), remote_url = _get_repo_url(env), git_ref = git_ref)
checkout_root = repo['checkout_root']
return {
'CPPPATH': [checkout_root],
}

111
recipes/glslang/recipe.py Normal file
View File

@@ -0,0 +1,111 @@
import glob
import pathlib
import platform
import re
import shutil
from SCons.Script import *
_SCRIPT_STAMPFILE = '.spp_script_run'
def _git_cook(env: Environment, repo) -> dict:
checkout_root = repo['checkout_root']
# TODO: windows?
did_run_script = os.path.exists(os.path.join(repo['checkout_root'], _SCRIPT_STAMPFILE))
if not did_run_script or env['UPDATE_REPOSITORIES']:
python_exe = os.path.realpath(sys.executable)
script_file = os.path.join(repo['checkout_root'], 'update_glslang_sources.py')
prev_cwd = os.getcwd()
os.chdir(repo['checkout_root'])
if env.Execute(f'"{python_exe}" {script_file}'):
env.Exit(1)
os.chdir(prev_cwd)
pathlib.Path(repo['checkout_root'], _SCRIPT_STAMPFILE).touch()
# generate the build_info.h
generator_script = os.path.join(repo['checkout_root'], 'build_info.py')
generator_script_input = os.path.join(repo['checkout_root'], 'build_info.h.tmpl')
generator_script_output = os.path.join(repo['checkout_root'], 'glslang/build_info.h')
env.Command(
target = generator_script_output,
source = [generator_script, generator_script_input, os.path.join(repo['checkout_root'], 'CHANGES.md')],
action = f'"$PYTHON" "{generator_script}" "{repo["checkout_root"]}" -i "{generator_script_input}" -o "$TARGET"'
)
platform_source_dir = {
'Linux': 'Unix',
'Windows': 'Windows',
'Darwin': 'Unix'
}.get(platform.system(), 'Unix')
glslang_source_files = env.RGlob(os.path.join(repo['checkout_root'], 'glslang/GenericCodeGen/'), '*.cpp') \
+ env.RGlob(os.path.join(repo['checkout_root'], 'glslang/MachineIndependent/'), '*.cpp') \
+ env.RGlob(os.path.join(repo['checkout_root'], 'glslang/OGLCompilersDLL/'), '*.cpp') \
+ env.RGlob(os.path.join(repo['checkout_root'], 'glslang/ResourceLimits/'), '*.cpp') \
+ env.RGlob(os.path.join(repo['checkout_root'], 'SPIRV/'), '*.cpp') \
+ [os.path.join(repo['checkout_root'], f'glslang/OSDependent/{platform_source_dir}/ossource.cpp')]
# disable warnings
additional_cxx_flags = {
'clang': ['-w'],
'gcc': ['-w'],
'cl': ['/w']
}.get(env['COMPILER_FAMILY'], [])
env.StaticLibrary(
CCFLAGS = env['CCFLAGS'] + additional_cxx_flags,
CPPPATH = repo['checkout_root'],
target = env['LIB_DIR'] + '/glslang_full',
source = glslang_source_files
)
# build the include folder
include_dir = os.path.join(checkout_root, 'include')
if not os.path.exists(include_dir) or env['UPDATE_REPOSITORIES']:
def copy_headers(dst, src):
os.makedirs(dst, exist_ok=True)
for file in glob.glob(os.path.join(src, '*.h')):
shutil.copy(file, dst)
copy_headers(os.path.join(include_dir, 'glslang/HLSL'), os.path.join(checkout_root, 'glslang/HLSL'))
copy_headers(os.path.join(include_dir, 'glslang/Include'), os.path.join(checkout_root, 'glslang/Include'))
copy_headers(os.path.join(include_dir, 'glslang/MachineIndependent'), os.path.join(checkout_root, 'glslang/MachineIndependent'))
copy_headers(os.path.join(include_dir, 'glslang/Public'), os.path.join(checkout_root, 'glslang/Public'))
copy_headers(os.path.join(include_dir, 'glslang/SPIRV'), os.path.join(checkout_root, 'SPIRV'))
return {
'CPPPATH': [include_dir],
'LIBS': [os.path.join(env['LIB_DIR'], env.LibFilename('glslang_full'))]
}
_REPO_NAMES = {
'default': 'glslang',
'mewin': 'glslang_mewin'
}
_REPO_URLS = {
'default': 'https://github.com/KhronosGroup/glslang.git',
'mewin': 'https://git.mewin.de/mewin/glslang.git'
}
_TAG_PATTERNS = {
'default': re.compile(r'^([0-9]+)\.([0-9]+)\.([0-9]+)$'),
'mewin': None
}
def _ref_fn(env: Environment, version) -> str:
remote = env.get('GLSLANG_REMOTE', 'default')
if remote == 'default':
return f'refs/tags/{version[0]}.{version[1]}.{version[2]}'
elif remote == 'mewin':
return 'master'
else:
raise Exception('invalid glslang remote')
env.GitRecipe(
globals = globals(),
repo_name = lambda env: _REPO_NAMES[env.get('GLSLANG_REMOTE', 'default')],
repo_url = lambda env: _REPO_URLS[env.get('GLSLANG_REMOTE', 'default')],
tag_pattern = lambda env: _TAG_PATTERNS[env.get('GLSLANG_REMOTE', 'default')],
cook_fn = _git_cook,
ref_fn = _ref_fn
)

48
recipes/idn2/recipe.py Normal file
View File

@@ -0,0 +1,48 @@
import json
import os
import re
import requests
from SCons.Script import *
_VERSIONS_URL = 'https://gitlab.com/api/v4/projects/2882658/releases'
_VERSION_PATTERN = re.compile(r'^([0-9]+)\.([0-9]+)\.([0-9]+)$')
def versions(env: Environment, update: bool = False):
versions_file = os.path.join(env['DOWNLOAD_DIR'], 'libidn2_versions.json')
if update or not os.path.exists(versions_file):
req = requests.get(_VERSIONS_URL)
versions_data = json.loads(req.text)
result = []
for version_data in versions_data:
match = _VERSION_PATTERN.match(version_data['name'])
if not match:
continue
result.append((int(match.groups()[0]), int(match.groups()[1]), int(match.groups()[2])))
with open(versions_file, 'w') as f:
json.dump(result, f)
return result
else:
try:
with open(versions_file, 'r') as f:
return [tuple(v) for v in json.load(f)]
except:
print('libidn2_versions.json is empty or broken, redownloading.')
return versions(env, update=True)
def dependencies(env: Environment, version) -> 'dict':
return {
'unistring': {}
}
def cook(env: Environment, version) -> dict:
url = f'https://ftp.gnu.org/gnu/libidn/libidn2-{version[0]}.{version[1]}.{version[2]}.tar.gz'
repo = env.DownloadAndExtract(f'libidn2_{version[0]}.{version[1]}.{version[2]}', url = url, skip_folders = 1)
checkout_root = repo['extracted_root']
build_result = env.AutotoolsProject(checkout_root)
return {
'CPPPATH': build_result['CPPPATH'],
'LIBS': [env.FindLib('idn2', paths=build_result['LIBPATH'])]
}

38
recipes/imgui/recipe.py Normal file
View File

@@ -0,0 +1,38 @@
import re
from SCons.Script import *
def _git_cook(env: Environment, repo: dict) -> dict:
imgui_source_files = [
os.path.join(repo['checkout_root'], 'imgui.cpp'),
os.path.join(repo['checkout_root'], 'imgui_draw.cpp'),
os.path.join(repo['checkout_root'], 'imgui_tables.cpp'),
os.path.join(repo['checkout_root'], 'imgui_widgets.cpp')
]
imgui_add_sources = []
for backend in env.get('IMGUI_BACKENDS', []):
imgui_add_sources.append(f'backends/imgui_impl_{backend}.cpp')
env.StaticLibrary(
CPPPATH = [repo['checkout_root']],
CPPDEFINES = ['IMGUI_IMPL_VULKAN_NO_PROTOTYPES=1'],
target = env['LIB_DIR'] + '/imgui',
source = imgui_source_files,
add_source = imgui_add_sources
)
return {
'CPPPATH': [repo['checkout_root']],
'LIBS': [os.path.join(env['LIB_DIR'], env.LibFilename('imgui'))]
}
env.GitRecipe(
globals = globals(),
repo_name = 'imgui',
repo_url = 'https://github.com/ocornut/imgui.git',
tag_pattern = re.compile(r'^v([0-9]+)\.([0-9]+)\.([0-9]+)$'),
tag_fn = lambda version: f'v{version[0]}.{version[1]}.{version[2]}',
cook_fn = _git_cook
)

20
recipes/iwa/recipe.py Normal file
View File

@@ -0,0 +1,20 @@
import json
from SCons.Script import *
_REPO_NAME = 'iwa'
_REPO_URL = 'https://git.mewin.de/mewin/iwa.git'
def versions(env: Environment, update: bool = False):
return [(0, 0, 0)]
def dependencies(env: Environment, version) -> 'dict':
repo = env.GitBranch(repo_name = _REPO_NAME, remote_url = _REPO_URL, git_ref = 'master')
checkout_root = repo['checkout_root']
with open(os.path.join(checkout_root, 'dependencies.json'), 'r') as f:
return env.DepsFromJson(json.load(f))
def cook(env: Environment, version) -> dict:
repo = env.GitBranch(repo_name = _REPO_NAME, remote_url = _REPO_URL, git_ref = 'master')
checkout_root = repo['checkout_root']
return env.Module(os.path.join(checkout_root, 'SModule'))

22
recipes/json/recipe.py Normal file
View File

@@ -0,0 +1,22 @@
import re
from SCons.Script import *
def _git_cook(env: Environment, repo: dict) -> dict:
checkout_root = repo['checkout_root']
build_result = env.CMakeProject(project_root=checkout_root)
return {
'CPPPATH': build_result['CPPPATH']
}
env.GitRecipe(
globals = globals(),
repo_name = 'json',
repo_url = 'https://github.com/nlohmann/json.git',
tag_pattern = re.compile(r'^v([0-9]+)\.([0-9]+)\.([0-9]+)$'),
tag_fn = lambda version: f'v{version[0]}.{version[1]}.{version[2]}',
cook_fn = _git_cook
)

View File

@@ -0,0 +1,21 @@
from SCons.Script import *
def versions(env: Environment, update: bool = False):
return [(1, 0)]
def dependencies(env: Environment, version) -> 'dict':
return {}
def cook(env: Environment, version) -> dict:
if env['COMPILER_FAMILY'] not in ('gcc', 'clang'):
env.Error('libbacktrace requires gcc or clang.')
repo = env.GitBranch(repo_name = 'libbacktrace', remote_url = 'https://github.com/ianlancetaylor/libbacktrace.git', git_ref = 'master')
checkout_root = repo['checkout_root']
build_result = env.AutotoolsProject(checkout_root)
return {
'LIBPATH': build_result['LIBPATH'],
'CPPPATH': build_result['CPPPATH'],
'LIBS': ['backtrace']
}

View File

@@ -0,0 +1,21 @@
import re
from SCons.Script import *
def _git_cook(env: Environment, repo: dict) -> dict:
checkout_root = repo['checkout_root']
build_result = env.CMakeProject(checkout_root)
return {
'CPPPATH': build_result['CPPPATH'],
'LIBS': [env.FindLib('jpeg', paths=build_result['LIBPATH'])],
}
env.GitRecipe(
globals = globals(),
repo_name = 'libjpeg-turbo',
repo_url = 'https://github.com/libjpeg-turbo/libjpeg-turbo.git',
tag_pattern = re.compile(r'^([0-9]+)\.([0-9]+)\.([0-9]+)$'),
tag_fn = lambda version: f'{version[0]}.{version[1]}.{version[2]}',
cook_fn = _git_cook
)

39
recipes/libpng/recipe.py Normal file
View File

@@ -0,0 +1,39 @@
import os
import re
from SCons.Script import *
def _build_lib_name(env: Environment) -> str:
if os.name == 'posix':
return {
'debug': 'png16d'
}.get(env['BUILD_TYPE'], 'png16')
elif os.name == 'nt':
return {
'debug': 'libpng16_staticd'
}.get(env['BUILD_TYPE'], 'libpng16_static')
else:
raise Exception('libpng is not supported yet on this OS')
def _git_cook(env: Environment, repo: dict) -> dict:
lib_zlib = env.Cook('zlib')
checkout_root = repo['checkout_root']
build_result = env.CMakeProject(checkout_root, dependencies = [lib_zlib])
lib_name = _build_lib_name(env)
return {
'CPPPATH': build_result['CPPPATH'],
'LIBS': [env.FindLib(lib_name, paths=build_result['LIBPATH'])]
}
env.GitRecipe(
globals = globals(),
repo_name = 'libpng',
repo_url = 'https://git.code.sf.net/p/libpng/code.git',
tag_pattern = re.compile(r'^v([0-9]+)\.([0-9]+)\.([0-9]+)$'),
tag_fn = lambda version: f'v{version[0]}.{version[1]}.{version[2]}',
cook_fn = _git_cook,
dependencies = {
'zlib': {}
}
)

View File

@@ -0,0 +1,19 @@
import re
from SCons.Script import *
def _git_cook(env: Environment, repo: dict) -> dict:
checkout_root = repo['checkout_root']
return {
'CPPPATH': [os.path.join(checkout_root, 'include')]
}
env.GitRecipe(
globals = globals(),
repo_name = 'magic_enum',
repo_url = 'https://github.com/Neargye/magic_enum.git',
tag_pattern = re.compile(r'^v([0-9]+)\.([0-9]+)\.([0-9]+)$'),
tag_fn = lambda version: f'v{version[0]}.{version[1]}.{version[2]}',
cook_fn = _git_cook
)

21
recipes/mecab/recipe.py Normal file
View File

@@ -0,0 +1,21 @@
from SCons.Script import *
import os
def versions(env: Environment, update: bool = False):
return [(1, 0)]
def dependencies(env: Environment, version) -> 'dict':
return {}
def cook(env: Environment, version) -> dict:
repo = env.GitBranch(repo_name = 'mecab', remote_url = 'https://github.com/taku910/mecab.git', git_ref = 'master')
checkout_root = repo['checkout_root']
build_result = env.AutotoolsProject(os.path.join(checkout_root, 'mecab'))
return {
'LIBPATH': build_result['LIBPATH'],
'CPPPATH': build_result['CPPPATH'],
'LIBS': ['mecab']
}

20
recipes/mijin/recipe.py Normal file
View File

@@ -0,0 +1,20 @@
import json
from SCons.Script import *
_REPO_NAME = 'mijin'
_REPO_URL = 'https://git.mewin.de/mewin/mijin2.git'
def versions(env: Environment, update: bool = False):
return [(0, 0, 0)]
def dependencies(env: Environment, version) -> 'dict':
repo = env.GitBranch(repo_name = _REPO_NAME, remote_url = _REPO_URL, git_ref = 'master')
checkout_root = repo['checkout_root']
with open(os.path.join(checkout_root, 'dependencies.json'), 'r') as f:
return env.DepsFromJson(json.load(f))
def cook(env: Environment, version) -> dict:
repo = env.GitBranch(repo_name = _REPO_NAME, remote_url = _REPO_URL, git_ref = 'master')
checkout_root = repo['checkout_root']
return env.Module(os.path.join(checkout_root, 'SModule'))

View File

@@ -0,0 +1,27 @@
from SCons.Script import *
def versions(env: Environment, update: bool = False):
return [(1, 0)]
def dependencies(env: Environment, version) -> 'dict':
return {}
def cook(env: Environment, version) -> dict:
repo = env.GitBranch(repo_name = 'mikktspace', remote_url = 'https://github.com/mmikk/MikkTSpace.git', git_ref = 'master')
checkout_root = repo['checkout_root']
ccflags = env['CCFLAGS'].copy()
if env['COMPILER_FAMILY'] == 'cl':
ccflags.append('/wd4456')
lib_mikktspace = env.StaticLibrary(
CCFLAGS = ccflags,
CPPPATH = [checkout_root],
target = env['LIB_DIR'] + '/mikktspace',
source = [os.path.join(repo['checkout_root'], 'mikktspace.c')]
)
return {
'CPPPATH': [checkout_root],
'LIBS': [lib_mikktspace]
}

21
recipes/openssl/recipe.py Normal file
View File

@@ -0,0 +1,21 @@
import re
from SCons.Script import *
def _git_cook(env: Environment, repo: dict) -> dict:
checkout_root = repo['checkout_root']
build_result = env.AutotoolsProject(checkout_root, config_args = ['no-shared', 'no-tests', 'no-docs'], configure_script_path='Configure')
return {
'CPPPATH': build_result['CPPPATH'],
'LIBS': [env.FindLib(libname, paths=build_result['LIBPATH']) for libname in ('ssl', 'crypto')]
}
env.GitRecipe(
globals = globals(),
repo_name = 'openssl',
repo_url = 'https://github.com/openssl/openssl.git',
tag_pattern = re.compile(r'^openssl-([0-9]+)\.([0-9]+)\.([0-9]+)$'),
tag_fn = lambda version: f'openssl-{version[0]}.{version[1]}.{version[2]}',
cook_fn = _git_cook
)

70
recipes/psl/recipe.py Normal file
View File

@@ -0,0 +1,70 @@
import json
import os
import re
import requests
from SCons.Script import *
_VERSIONS_URL = 'https://api.github.com/repos/rockdaboot/libpsl/releases'
_VERSION_PATTERN = re.compile(r'^Release v([0-9]+)\.([0-9]+)\.([0-9]+)$')
def versions(env: Environment, update: bool = False):
versions_file = os.path.join(env['DOWNLOAD_DIR'], 'libpsl_versions.json')
if update or not os.path.exists(versions_file):
req = requests.get(_VERSIONS_URL)
versions_data = json.loads(req.text)
result = []
for version_data in versions_data:
match = _VERSION_PATTERN.match(version_data['name'])
if not match:
continue
result.append((int(match.groups()[0]), int(match.groups()[1]), int(match.groups()[2])))
with open(versions_file, 'w') as f:
json.dump(result, f)
return result
else:
try:
with open(versions_file, 'r') as f:
return [tuple(v) for v in json.load(f)]
except:
print('libpsl_versions.json is empty or broken, redownloading.')
return versions(env, update=True)
def dependencies(env: Environment, version) -> 'dict':
return {
'idn2': {},
'unistring': {}
}
def cook(env: Environment, version) -> dict:
url = f'https://github.com/rockdaboot/libpsl/releases/download/{version[0]}.{version[1]}.{version[2]}/libpsl-{version[0]}.{version[1]}.{version[2]}.tar.gz'
repo = env.DownloadAndExtract(f'libpsl_{version[0]}.{version[1]}.{version[2]}', url = url, skip_folders = 1)
checkout_root = repo['extracted_root']
build_result = env.AutotoolsProject(checkout_root)
return {
'CPPPATH': build_result['CPPPATH'],
'LIBS': [env.FindLib('psl', paths=build_result['LIBPATH'])]
}
#def _git_cook(env: Environment, repo: dict) -> dict:
# checkout_root = repo['checkout_root']
# subprocess.run((os.path.join(checkout_root, 'autogen.sh'),), cwd=checkout_root)
# build_result = env.AutotoolsProject(checkout_root)
# return {
# 'CPPPATH': build_result['CPPPATH'],
# 'LIBS': [env.FindLib('psl', paths=build_result['LIBPATH'])]
# }
#
#env.GitRecipe(
# globals = globals(),
# repo_name = 'psl',
# repo_url = 'https://github.com/rockdaboot/libpsl.git',
# tag_pattern = re.compile(r'^libpsl-([0-9]+)\.([0-9]+)\.([0-9]+)$'),
# tag_fn = lambda version: f'libpsl-{version[0]}.{version[1]}.{version[2]}',
# cook_fn = _git_cook,
# dependencies = {
# 'idn2': {},
# 'unistring': {}
# }
#)

34
recipes/spdlog/recipe.py Normal file
View File

@@ -0,0 +1,34 @@
import re
from SCons.Script import *
def _git_cook(env: Environment, repo: dict) -> dict:
lib_fmt = env.Cook('fmt')
checkout_root = repo['checkout_root']
build_result = env.CMakeProject(project_root=checkout_root, dependencies=[lib_fmt])
lib_name = {
'debug': 'spdlogd'
}.get(env['BUILD_TYPE'], 'spdlog')
cppdefines = ['SPDLOG_COMPILE_LIB=1', 'SPDLOG_FMT_EXTERNAL=1']
return {
'CPPPATH': build_result['CPPPATH'],
'CPPDEFINES': cppdefines,
'LIBS': [env.FindLib(lib_name, paths=build_result['LIBPATH'])]
}
env.GitRecipe(
globals = globals(),
repo_name = 'spdlog',
repo_url = 'https://github.com/gabime/spdlog.git',
tag_pattern = re.compile(r'^v([0-9]+)\.([0-9]+)\.([0-9]+)$'),
tag_fn = lambda version: f'v{version[0]}.{version[1]}.{version[2]}',
cook_fn = _git_cook,
dependencies = {
'fmt': {}
}
)

19
recipes/stb/recipe.py Normal file
View File

@@ -0,0 +1,19 @@
from SCons.Script import *
_REPO_NAME = 'stb'
_REPO_URL = 'https://github.com/nothings/stb.git'
def versions(env: Environment, update: bool = False):
return [(0, 0, 0)]
def dependencies(env: Environment, version) -> 'dict':
return {}
def cook(env: Environment, version) -> dict:
repo = env.GitBranch(repo_name = _REPO_NAME, remote_url = _REPO_URL, git_ref = 'master')
checkout_root = repo['checkout_root']
return {
'CPPPATH': [checkout_root]
}

View File

@@ -0,0 +1,42 @@
import json
import os
import re
import requests
from SCons.Script import *
_VERSIONS_URL = 'https://ftp.gnu.org/gnu/libunistring/?F=0'
_VERSION_PATTERN = re.compile(r'href="libunistring-([0-9]+)\.([0-9]+)\.([0-9]+)\.tar\.gz"')
def versions(env: Environment, update: bool = False):
versions_file = os.path.join(env['DOWNLOAD_DIR'], 'libunistring_versions.json')
if update or not os.path.exists(versions_file):
req = requests.get(_VERSIONS_URL)
result = []
for match in _VERSION_PATTERN.finditer(req.text):
result.append((int(match.groups()[0]), int(match.groups()[1]), int(match.groups()[2])))
with open(versions_file, 'w') as f:
json.dump(result, f)
return result
else:
try:
with open(versions_file, 'r') as f:
return [tuple(v) for v in json.load(f)]
except:
print('libunistring_versions.json is empty or broken, redownloading.')
return versions(env, update=True)
def dependencies(env: Environment, version) -> 'dict':
return {}
def cook(env: Environment, version) -> dict:
url = f'https://ftp.gnu.org/gnu/libunistring/libunistring-{version[0]}.{version[1]}.{version[2]}.tar.gz'
repo = env.DownloadAndExtract(f'libunistring_{version[0]}.{version[1]}.{version[2]}', url = url, skip_folders = 1)
checkout_root = repo['extracted_root']
build_result = env.AutotoolsProject(checkout_root)
return {
'CPPPATH': build_result['CPPPATH'],
'LIBS': [env.FindLib('unistring', paths=build_result['LIBPATH'])]
}

View File

@@ -0,0 +1,23 @@
import os
from SCons.Script import *
def available(env: Environment):
if os.name != 'nt':
return 'Winsock2 is only available on Windows.'
def versions(env: Environment, update: bool = False):
if os.name == 'nt':
return [(0, 0, 0)]
else:
return []
def dependencies(env: Environment, version) -> 'dict':
return {}
def cook(env: Environment, version) -> dict:
return {
'LIBS': ['Ws2_32']
}

View File

@@ -0,0 +1,26 @@
import re
from SCons.Script import *
def _git_cook(env: Environment, repo: dict) -> dict:
checkout_root = repo['checkout_root']
build_result = env.CMakeProject(project_root=checkout_root)
lib_name = {
'debug': 'yaml-cppd'
}.get(env['BUILD_TYPE'], 'yaml-cpp')
return {
'CPPPATH': build_result['CPPPATH'],
'LIBS': [env.FindLib(lib_name, paths=build_result['LIBPATH'])]
}
env.GitRecipe(
globals = globals(),
repo_name = 'yaml-cpp',
repo_url = 'https://github.com/jbeder/yaml-cpp.git',
tag_pattern = re.compile(r'^yaml-cpp-([0-9]+)\.([0-9]+)\.([0-9]+)$'),
tag_fn = lambda version: f'yaml-cpp-{version[0]}.{version[1]}.{version[2]}',
cook_fn = _git_cook
)

49
recipes/zlib/recipe.py Normal file
View File

@@ -0,0 +1,49 @@
import os
import re
from SCons.Script import *
_REPO_NAME = 'zlib'
_REPO_URL = 'https://github.com/madler/zlib.git'
_TAG_PATTERN = re.compile(r'^v([0-9]+)\.([0-9]+)(?:\.([0-9]+))?$')
def _build_lib_name(env: Environment) -> str:
if os.name == 'posix':
return 'z'
elif os.name == 'nt':
return {
'debug': 'zlibstaticd'
}.get(env['BUILD_TYPE'], 'zlibstatic')
else:
raise Exception('libpng is not supported yet on this OS')
def versions(env: Environment, update: bool = False):
tags = env.GitTags(repo_name = _REPO_NAME, remote_url = _REPO_URL, force_fetch=update)
result = []
for tag in tags:
match = _TAG_PATTERN.match(tag)
if match:
result.append((int(match.groups()[0]), int(match.groups()[1]), int(match.groups()[2] or 0)))
return result
def dependencies(env: Environment, version) -> 'dict':
return {}
def cook(env: Environment, version) -> dict:
git_ref = f'refs/tags/v{version[0]}.{version[1]}'
if version[2] != 0:
git_ref = git_ref + f'.{version[2]}'
repo = env.GitBranch(repo_name = _REPO_NAME, remote_url = _REPO_URL, git_ref = git_ref)
checkout_root = repo['checkout_root']
build_result = env.CMakeProject(project_root=checkout_root)
include_dir = os.path.join(build_result['install_dir'], 'include')
lib_name = _build_lib_name(env)
lib_file = env.FindLib(lib_name, paths=build_result['LIBPATH'])
return {
'CPPPATH': [include_dir],
'LIBS': [lib_file],
'CMAKE_VARS': {
'ZLIB_LIBRARY': lib_file,
'ZLIB_INCLUDE_DIR': include_dir
}
}

View File

@@ -1,4 +1,2 @@
GitPython GitPython
psutil psutil
Jinja2
requests

View File

@@ -1,8 +0,0 @@
# Default ignored files
/shelf/
/workspace.xml
# Editor-based HTTP Client requests
/httpRequests/
# Datasource local storage ignored files
/dataSources/
/dataSources.local.xml

View File

@@ -1,35 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="CLionExternalBuildManager">
{% for executable in project.executables %}
{% for build_type in project.build_types %}
{% set build_type_name = build_type | capitalize -%}
<target id="{{ generate_uuid('target_' + executable.name + '_' + build_type) }}" name="{{ executable.name }} {{ build_type_name }}" defaultType="TOOL">
<configuration id="{{ generate_uuid('configuration_' + executable.name + '_' + build_type) }}" name="{{ executable.name }} {{ build_type_name }}">
<build type="TOOL">
<tool actionId="Tool_External Tools_{{ executable.name }} {{ build_type_name }}" />
</build>
<clean type="TOOL">
<tool actionId="Tool_External Tools_{{ executable.name }} {{ build_type_name }} Clean" />
</clean>
</configuration>
</target>
{% endfor %}
{% endfor %}
{% for library in project.libraries %}
{% for build_type in project.build_types %}
{% set build_type_name = build_type | capitalize -%}
<target id="{{ generate_uuid('target_' + library.name + '_' + build_type) }}" name="{{ library.name }} {{ build_type_name }}" defaultType="TOOL">
<configuration id="{{ generate_uuid('configuration_' + library.name + '_' + build_type) }}" name="{{ library.name }} {{ build_type_name }}">
<build type="TOOL">
<tool actionId="Tool_External Tools_{{ library.name }} {{ build_type_name }}" />
</build>
<clean type="TOOL">
<tool actionId="Tool_External Tools_{{ library.name }} {{ build_type_name }} Clean" />
</clean>
</configuration>
</target>
{% endfor %}
{% endfor %}
</component>
</project>

View File

@@ -1,17 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="CompDBSettings">
<option name="linkedExternalProjectsSettings">
<CompDBProjectSettings>
<option name="externalProjectPath" value="$PROJECT_DIR$" />
<option name="modules">
<set>
<option value="$PROJECT_DIR$" />
</set>
</option>
</CompDBProjectSettings>
</option>
</component>
<component name="CompDBWorkspace" PROJECT_DIR="$PROJECT_DIR$" />
<component name="ExternalStorageConfigurationManager" enabled="true" />
</project>

View File

@@ -1,40 +0,0 @@
<toolSet name="External Tools">
{% for executable in project.executables %}
{% for build_type in project.build_types %}
{% set build_type_name = build_type | capitalize -%}
<tool name="{{ executable.name }} {{ build_type_name }}" showInMainMenu="false" showInEditor="false" showInProject="false" showInSearchPopup="false" disabled="false" useConsole="true" showConsoleOnStdOut="false" showConsoleOnStdErr="false" synchronizeAfterRun="true">
<exec>
<option name="COMMAND" value="{{ scons_exe }}" />
<option name="PARAMETERS" value="-j{{ nproc }} --build_type={{ build_type }} --unity=disable {{ executable.filename(build_type) }} compile_commands.json" />
<option name="WORKING_DIRECTORY" value="$ProjectFileDir$" />
</exec>
</tool>
<tool name="{{ executable.name }} {{ build_type_name }} Clean" showInMainMenu="false" showInEditor="false" showInProject="false" showInSearchPopup="false" disabled="false" useConsole="true" showConsoleOnStdOut="false" showConsoleOnStdErr="false" synchronizeAfterRun="true">
<exec>
<option name="COMMAND" value="{{ scons_exe }}" />
<option name="PARAMETERS" value="--build_type={{ build_type }} --unity=disable {{ executable.filename(build_type) }} -c" />
<option name="WORKING_DIRECTORY" value="$ProjectFileDir$" />
</exec>
</tool>
{% endfor %}
{% endfor %}
{% for library in project.libraries %}
{% for build_type in project.build_types %}
{% set build_type_name = build_type | capitalize -%}
<tool name="{{ library.name }} {{ build_type_name }}" showInMainMenu="false" showInEditor="false" showInProject="false" showInSearchPopup="false" disabled="false" useConsole="true" showConsoleOnStdOut="false" showConsoleOnStdErr="false" synchronizeAfterRun="true">
<exec>
<option name="COMMAND" value="{{ scons_exe }}" />
<option name="PARAMETERS" value="-j{{ nproc }} --build_type={{ build_type }} --unity=disable {{ library.filename(build_type) }} compile_commands.json" />
<option name="WORKING_DIRECTORY" value="$ProjectFileDir$" />
</exec>
</tool>
<tool name="{{ library.name }} {{ build_type_name }} Clean" showInMainMenu="false" showInEditor="false" showInProject="false" showInSearchPopup="false" disabled="false" useConsole="true" showConsoleOnStdOut="false" showConsoleOnStdErr="false" synchronizeAfterRun="true">
<exec>
<option name="COMMAND" value="{{ scons_exe }}" />
<option name="PARAMETERS" value="--build_type={{ build_type }} --unity=disable {{ library.filename(build_type) }} -c" />
<option name="WORKING_DIRECTORY" value="$ProjectFileDir$" />
</exec>
</tool>
{% endfor %}
{% endfor %}
</toolSet>

View File

@@ -1,7 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="" vcs="Git" />
<mapping directory="$PROJECT_DIR$/external/scons-plus-plus" vcs="Git" />
</component>
</project>

View File

@@ -1,109 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="AutoImportSettings">
<option name="autoReloadType" value="SELECTIVE" />
</component>
<component name="CMakeRunConfigurationManager">
<generated />
</component>
<component name="CMakeSettings">
<configurations>
<configuration PROFILE_NAME="Debug" ENABLED="true" CONFIG_NAME="Debug" />
</configurations>
</component>
<component name="ClangdSettings">
<option name="formatViaClangd" value="false" />
</component>
<component name="CompDBLocalSettings">
<option name="availableProjects">
<map>
<entry>
<key>
<ExternalProjectPojo>
<option name="name" value="{{ project.name }}" />
<option name="path" value="$PROJECT_DIR$" />
</ExternalProjectPojo>
</key>
<value>
<list>
<ExternalProjectPojo>
<option name="name" value="{{ project.name }}" />
<option name="path" value="$PROJECT_DIR$" />
</ExternalProjectPojo>
</list>
</value>
</entry>
</map>
</option>
<option name="projectSyncType">
<map>
<entry key="$PROJECT_DIR$" value="RE_IMPORT" />
</map>
</option>
</component>
<component name="ExternalProjectsData">
<projectState path="$PROJECT_DIR$">
<ProjectState />
</projectState>
</component>
<component name="Git.Settings">
<option name="RECENT_GIT_ROOT_PATH" value="$PROJECT_DIR$" />
</component>
<component name="ProjectColorInfo">{
&quot;associatedIndex&quot;: 5
}</component>
<component name="ProjectViewState">
<option name="hideEmptyMiddlePackages" value="true" />
<option name="showLibraryContents" value="true" />
</component>
<component name="PropertiesComponent"><![CDATA[{
"keyToString": {
{% for executable in project.executables -%}
{% for build_type in project.build_types -%}
{% set build_type_name = build_type | capitalize -%}
"Custom Build Application.{{ executable.name }} {{ build_type_name }}.executor": "Debug",
{% endfor -%}
{% endfor -%}
"RunOnceActivity.RadMigrateCodeStyle": "true",
"RunOnceActivity.ShowReadmeOnStart": "true",
"RunOnceActivity.cidr.known.project.marker": "true",
"RunOnceActivity.readMode.enableVisualFormatting": "true",
"cf.first.check.clang-format": "false",
"cidr.known.project.marker": "true",
"git-widget-placeholder": "master",
"node.js.detected.package.eslint": "true",
"node.js.detected.package.tslint": "true",
"node.js.selected.package.eslint": "(autodetect)",
"node.js.selected.package.tslint": "(autodetect)",
"nodejs_package_manager_path": "npm",
"settings.editor.selected.configurable": "CLionExternalConfigurable",
"vue.rearranger.settings.migration": "true"
}
}]]></component>
<component name="RunManager" selected="Custom Build Application.{{ project.executables[0].name }} {{ project.build_types[0] }}">
{% for executable in project.executables -%}
{% for build_type in project.build_types -%}
{% set build_type_name = build_type | capitalize -%}
<configuration name="{{ executable.name }} {{ build_type_name }}" type="CLionExternalRunConfiguration" factoryName="Application" REDIRECT_INPUT="false" ELEVATE="false" USE_EXTERNAL_CONSOLE="false" EMULATE_TERMINAL="false" WORKING_DIR="file://$ProjectFileDir$" PASS_PARENT_ENVS_2="true" PROJECT_NAME="{{ project.name }}" TARGET_NAME="{{ executable.name }} {{ build_type_name }}" CONFIG_NAME="{{ executable.name }} {{ build_type_name }}" RUN_PATH="$PROJECT_DIR$/{{ executable.filename(build_type) }}">
<method v="2">
<option name="CLION.EXTERNAL.BUILD" enabled="true" />
</method>
</configuration>
{% endfor -%}
{% endfor -%}
{% for library in project.libraries -%}
{% for build_type in project.build_types -%}
{% set build_type_name = build_type | capitalize -%}
<configuration name="{{ library.name }} {{ build_type_name }}" type="CLionExternalRunConfiguration" factoryName="Application" REDIRECT_INPUT="false" ELEVATE="false" USE_EXTERNAL_CONSOLE="false" EMULATE_TERMINAL="false" PASS_PARENT_ENVS_2="true" PROJECT_NAME="{{ project.name }}" TARGET_NAME="{{ library.name }} {{ build_type_name }}" CONFIG_NAME="{{ library.name }} {{ build_type_name }}">
<method v="2">
<option name="CLION.EXTERNAL.BUILD" enabled="true" />
</method>
</configuration>
{% endfor -%}
{% endfor -%}
</component>
<component name="SpellCheckerSettings" RuntimeDictionaries="0" Folders="0" CustomDictionaries="0" DefaultDictionary="application-level" UseSingleDictionary="true" transferred="true" />
<component name="TypeScriptGeneratedFilesManager">
<option name="version" value="3" />
</component>
</project>

View File

@@ -1,6 +0,0 @@
{
"recommendations": [
"ms-vscode.cpptools",
"llvm-vs-code-extensions.vscode-clangd"
]
}

View File

@@ -1,20 +0,0 @@
{
"configurations": [
{% for executable in project.executables %}
{% for build_type in project.build_types %}
{% set build_type_name = build_type | capitalize -%}
{
"name": "{{ executable.name }} ({{ build_type | capitalize }})",
"type": "cppvsdbg",
"request": "launch",
"program": "{{ executable.filename(build_type) | escape_path }}",
"args": [],
"stopAtEntry": false,
"cwd": "${workspaceFolder}",
"environment": [],
"console": "integratedTerminal"
}
{% endfor %}
{% endfor %}
]
}

View File

@@ -1,69 +0,0 @@
{
// See https://go.microsoft.com/fwlink/?LinkId=733558
// for the documentation about the tasks.json format
"version": "2.0.0",
"tasks": [
{% for executable in project.executables %}
{% for build_type in project.build_types %}
{% set build_type_name = build_type | capitalize -%}
{
"label": "{{ executable.name }} {{ build_type_name }}",
"type": "shell",
"command": "{{ scons_exe | escape_path }} -j{{ nproc }} --build_type={{ build_type }} --unity=disable {{ executable.filename(build_type) | escape_path }} compile_commands.json",
"options": {
"cwd": "${workspaceFolder}"
},
"problemMatcher": [],
"group": {
"kind": "build",
"isDefault": false
}
},
{
"label": "{{ executable.name }} {{ build_type_name }} Clean",
"type": "shell",
"command": "{{ scons_exe | escape_path }} --build_type={{ build_type }} --unity=disable {{ executable.filename(build_type) | escape_path }} -c",
"options": {
"cwd": "${workspaceFolder}"
},
"problemMatcher": [],
"group": {
"kind": "build",
"isDefault": false
}
},
{% endfor %}
{% endfor %}
{% for library in project.libraries %}
{% for build_type in project.build_types %}
{% set build_type_name = build_type | capitalize -%}
{
"label": "{{ library.name }} {{ build_type_name }}",
"type": "shell",
"command": "{{ scons_exe | escape_path }} -j{{ nproc }} --build_type={{ build_type }} --unity=disable {{ library.filename(build_type) | escape_path }} compile_commands.json",
"options": {
"cwd": "${workspaceFolder}"
},
"problemMatcher": [],
"group": {
"kind": "build",
"isDefault": false
}
},
{
"label": "{{ library.name }} {{ build_type_name }} Clean",
"type": "shell",
"command": "{{ scons_exe | escape_path }} --build_type={{ build_type }} --unity=disable {{ library.filename(build_type) | escape_path }} -c",
"options": {
"cwd": "${workspaceFolder}"
},
"problemMatcher": [],
"group": {
"kind": "build",
"isDefault": false
}
},
{% endfor %}
{% endfor %}
]
}