Compare commits

...

71 Commits

Author SHA1 Message Date
1e4bb17251 Added recipe for json and updated yaml-cpp recipe. 2024-10-25 08:29:25 +02:00
c461b5da39 Added recipes for curl, libidn2, libpsl and libunistring. 2024-10-23 23:37:37 +02:00
b7cb5f7c48 Added openssl recipe. 2024-08-21 09:36:17 +02:00
9c64f982fd Added recipe for winsock2 and target_os to dependency conditions. 2024-08-19 18:36:37 +02:00
378c6ba341 Fixed Catch2 recipe. 2024-08-18 17:28:26 +02:00
96fc1984cd Fixed compilation with MSVC. 2024-08-18 17:28:25 +02:00
396350b295 Allow settings COMPILATIONDB_FILTER_FILES via config. 2024-08-18 17:26:32 +02:00
5de1ac4444 Enable experimental library features (jthread) for clang. 2024-08-18 17:26:30 +02:00
d5712120df Moved check of SYSTEM_CACHE_DIR accessibility to before it is used. 2024-08-18 17:25:59 +02:00
267d06a997 Added CXXFLAGS and CFLAGS to config variables. 2024-08-18 17:24:41 +02:00
089ea25c10 Adjusted error description to make more sense. 2024-08-17 18:11:36 +02:00
e1404fee58 Fixed zlib recipe on linux. 2024-08-17 18:11:13 +02:00
c4200393fb Fixed compilation with MSVC. 2024-08-15 15:27:39 +02:00
0c82036300 Update to new recipe system (S++ 2.0). 2024-08-14 23:33:04 +02:00
35b38b8b6e Some more work on the new dependency resolution system. 2024-08-08 14:32:28 +02:00
8bea4a6db5 Some tests. 2024-08-06 09:33:42 +02:00
6c1ad82c16 Fixed cases when there are no libs. 2024-08-04 16:07:35 +02:00
7d070c7e68 Moved all the recipes that weren't actually recipes to addons. 2024-08-04 13:11:10 +02:00
bbfec6c98a More recipes (libjpeg, libz, imagemagick). 2024-08-04 12:53:07 +02:00
abc72895e6 Added recipe for boost. 2024-08-04 02:31:31 +02:00
6302d61d42 Added recipe for libpng. 2024-08-03 21:31:34 +02:00
cf2ba0c45e Apparently some Python versions didn't like the backslash in the f-string. 2024-08-03 20:24:45 +02:00
d8e58f99d5 Added missing psutil to requirements.txt. 2024-08-03 20:19:52 +02:00
0454186b5a Changed default C++ version to 23. 2024-08-03 20:04:22 +02:00
63b670e77c Added MakeInterface command that just copies dependencies include folders/defines. 2024-07-27 15:59:46 +02:00
34b2bc1e5b Added missing define and use_external_libfmt option to spdlog recipe. 2024-07-26 23:29:45 +02:00
28c0feb619 Added spdlog recipe. 2024-07-23 18:53:31 +02:00
27f6869a1f Added COMPILATIONDB_FILTER_FILES option and auto update if there was no update since boot. 2024-06-28 18:22:57 +02:00
3171d112ce Added MSVC/Windows compatibility to a few recipes (and the main script). 2024-06-26 10:14:57 +02:00
c8554282f9 Added another ignored warning for compiling libfmt. 2024-05-17 20:30:18 +02:00
a5ba3b5d10 Moved Jinja utility functions to S++. 2024-04-12 21:08:22 +02:00
7b94bc3fe0 Added git_ref parameter to libbacktrace recipe. 2024-04-10 20:54:05 +02:00
040571f74b Added MeCab recipe. 2024-04-10 20:53:52 +02:00
d126296283 Added recipe for iwa. 2024-04-06 14:20:44 +02:00
fa1768381f Also fetch tags. 2023-12-30 23:29:46 +01:00
447a694eb0 Added wrapper for Gch builder. And disabled another warning because weird things are happening. 2023-12-21 19:18:25 +01:00
e6b8062865 Added recipe for MikkTSpace. 2023-12-07 17:42:23 +01:00
4859f40f8f Added custom version of VulkanHeaders as an option to the recipe. 2023-12-07 17:42:08 +01:00
6e88c408b7 Moved some flags that were actually pure C++ flags to the correct array. 2023-12-07 17:41:33 +01:00
2dd5bd4c05 Added support for custom tools via config and wrapper for env.Depends(). 2023-12-02 14:54:07 +01:00
30e7e348c6 Added imgui and an option to inject source files into dependant projects. 2023-11-28 01:36:23 +01:00
d15baed4c4 Fixed library name in yaml-cpp recipe. 2023-11-26 16:12:23 +01:00
2ada4d0a41 Added recipe for yaml-cpp 2023-11-26 13:51:30 +01:00
41f8b81097 Added support for mewin.de branch of glm. 2023-11-26 13:13:27 +01:00
321f003b10 Added cgltf recipe. 2023-11-24 22:13:41 +01:00
3cac31bd81 Added libbacktrace and recursive dependencies. 2023-11-11 12:42:21 +01:00
0abc33d6f8 Fixed Glslang recipe to generate an include folder and avoid mixing with system headers. 2023-11-11 00:30:14 +01:00
14a080e618 Added project option for C++ standard. 2023-11-11 00:29:47 +01:00
dedad06419 Added stb. 2023-11-08 00:23:58 +01:00
76a7a5e35b Only run the generator script if necessary. 2023-11-06 20:45:22 +01:00
5998696860 Fixed glslang recipe even more, corrected path to SPIRV sources. 2023-11-05 10:33:33 +01:00
7f11cd544a Updated recipe for Glslang to build it via Scons instead of CMake so it is compiled with the same options (e.g. safe iterators) as the main project. 2023-11-04 22:34:41 +01:00
7050ec5e43 Added recipes for Glslang and GLM. 2023-11-04 19:34:16 +01:00
93dd09e324 This was actually a problem with GCC, not Clang. 2023-09-02 12:10:08 +02:00
3f4dbaf11e Fixed config file not being used. 2023-09-02 11:52:10 +02:00
55f57d55e6 Added --dump_env option. 2023-09-02 11:32:03 +02:00
02904158b7 Ignore warnings about maybe uninitialized variables in Clang. (Seems to be a false positive or a bug in the standard library. Just try clang-tidy.) 2023-08-30 19:55:28 +02:00
d2821a239d Ignore unknown warning options with clang to make it easier to support more compiler versions. 2023-08-21 09:36:47 +02:00
d5bfa0ee80 Use install dir instead of project root for build stamp file. Otherwise only one version will be built. 2023-08-06 14:07:30 +02:00
d1c48fbd8a Added common recipe for building CMake projects. And added Catch2 recipe. 2023-08-06 13:41:01 +02:00
1988bcc99b Added update_repositories command line argument to refresh git repositories. And added recipe for Criterion. 2023-08-05 18:01:31 +02:00
acffaa9928 Replaced mijin URL with the HTTP version. 2023-08-05 16:04:10 +02:00
2aa80afc51 Try to create SPP cache dir and use fallback if it fails. 2023-08-05 14:55:44 +02:00
b8ae0df0ae Use get() instead of in for os.environ to also skip if the value is empty. 2023-08-05 14:41:34 +02:00
2f3705bb12 Use fallback if HOME is not set. 2023-08-05 14:38:19 +02:00
3e370bfa52 Added --compiler option and made --config actually work. 2023-08-05 14:09:29 +02:00
b2a81cadb9 Added recipe for argparse. 2023-07-08 11:20:28 +02:00
2eeed1f551 Expose variant_dir to environment (otherwise it's useless). 2023-07-08 11:20:20 +02:00
d959540e0c Added recipes for fmt and magic_enum. 2023-06-25 13:53:04 +02:00
93aabf30b4 Fixed _RELEASE preprocessor define name. 2023-06-25 13:52:56 +02:00
227daa21d2 Removed __pycache__ from repo. 2023-06-25 13:52:39 +02:00
43 changed files with 1946 additions and 120 deletions

1
.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
__pycache__

View File

@@ -1,91 +1,415 @@
import copy import copy
import glob
import json
import os import os
import psutil
import sys
import time
def _cook(env: Environment, recipe_name: str, *args, **kwargs):
class _VersionSpec:
minimum_version = None
maximum_version = None
def __init__(self, minimum_version = None, maximum_version = None):
self.minimum_version = minimum_version
self.maximum_version = maximum_version
def __str__(self):
return f'Min: {self.minimum_version}, Max: {self.maximum_version}'
class _Dependency:
name: str = ''
version = None
version_spec: _VersionSpec
recipe = None
depdeps: list = []
cook_result: dict = {}
class _Target:
builder = None
args: list = []
kwargs: dict = {}
dependencies: list = []
target = None
def _find_recipe(env: Environment, recipe_name: str):
if recipe_name in env['SPP_RECIPES']:
return env['SPP_RECIPES'][recipe_name]
import importlib.util import importlib.util
source_file = None
for folder in env['RECIPES_FOLDERS']: for folder in env['RECIPES_FOLDERS']:
source_file = f'{folder.abspath}/{recipe_name}/recipe.py' try_source_file = f'{folder.abspath}/{recipe_name}/recipe.py'
if os.path.exists(source_file): if os.path.exists(try_source_file):
source_file = try_source_file
break break
if not source_file: if not source_file:
raise Exception(f'Could not find recipe {recipe_name}.') raise Exception(f'Could not find recipe {recipe_name}.')
spec = importlib.util.spec_from_file_location(recipe_name, source_file) spec = importlib.util.spec_from_file_location(recipe_name, source_file)
recipe = importlib.util.module_from_spec(spec) recipe = importlib.util.module_from_spec(spec)
recipe.env = env
spec.loader.exec_module(recipe) spec.loader.exec_module(recipe)
return recipe.cook(env, *args, **kwargs) env['SPP_RECIPES'][recipe_name] = recipe
return recipe
def _cook(env: Environment, recipe_name: str):
dependency = env['SPP_DEPENDENCIES'].get(recipe_name)
if not dependency:
raise Exception(f'Cannot cook {recipe_name} as it was not listed as a dependency.')
if not dependency.cook_result:
dependency.cook_result = dependency.recipe.cook(env, dependency.version)
return dependency.cook_result
def _module(env: Environment, file: str):
return SConscript(file, exports = 'env', variant_dir = env['VARIANT_DIR'], src_dir = '.')
def _parse_lib_conf(env: Environment, lib_conf: dict) -> None: def _parse_lib_conf(env: Environment, lib_conf: dict) -> None:
env.Append(CPPPATH = lib_conf.get('CPPPATH', []), env.Append(CPPPATH = lib_conf.get('CPPPATH', []),
CPPDEFINES = lib_conf.get('CPPDEFINES', []), CPPDEFINES = lib_conf.get('CPPDEFINES', []),
LIBPATH = lib_conf.get('LIBPATH', []), LIBPATH = lib_conf.get('LIBPATH', []),
LIBS = lib_conf.get('LIBS', [])) LIBS = lib_conf.get('LIBS', []),
JINJA_TEMPLATE_SEARCHPATH = lib_conf.get('JINJA_TEMPLATE_SEARCHPATH', []))
def _inject_list(kwargs: dict, dependency: dict, list_name: str) -> None: def _inject_list(kwargs: dict, dependency: dict, list_name: str) -> None:
if list_name not in dependency: if list_name not in dependency:
return return
if list_name not in kwargs: if list_name not in kwargs:
kwargs[list_name] = [] kwargs[list_name] = []
kwargs[list_name].extend(dependency[list_name]) # TODO: eliminiate duplicates? kwargs[list_name].extend(dependency[list_name]) # TODO: eliminate duplicates?
def _inject_dependency(dependency, kwargs: dict) -> None: def _inject_dependency(dependency, kwargs: dict, add_sources: bool = True) -> None:
if isinstance(dependency, dict): if isinstance(dependency, dict):
_inject_list(kwargs, dependency, 'CPPPATH') _inject_list(kwargs, dependency, 'CPPPATH')
_inject_list(kwargs, dependency, 'CPPDEFINES') _inject_list(kwargs, dependency, 'CPPDEFINES')
_inject_list(kwargs, dependency, 'LIBPATH') _inject_list(kwargs, dependency, 'LIBPATH')
_inject_list(kwargs, dependency, 'LIBS') _inject_list(kwargs, dependency, 'LIBS')
if add_sources and 'ADDITIONAL_SOURCES' in dependency and hasattr(kwargs['source'], 'extend'):
kwargs['source'].extend(dependency['ADDITIONAL_SOURCES'])
if 'DEPENDENCIES' in dependency:
for inner_dependency in dependency['DEPENDENCIES']:
_inject_dependency(inner_dependency, kwargs, False)
elif isinstance(dependency, _Dependency):
if not dependency.cook_result:
dependency.cook_result = dependency.recipe.cook(env, dependency.version)
_inject_list(kwargs, dependency.cook_result, 'CPPPATH')
_inject_list(kwargs, dependency.cook_result, 'CPPDEFINES')
_inject_list(kwargs, dependency.cook_result, 'LIBPATH')
_inject_list(kwargs, dependency.cook_result, 'LIBS')
for depdep in dependency.depdeps:
_inject_dependency(depdep, kwargs)
def _rglob(env: Environment, root_path: str, pattern: str, **kwargs):
result_nodes = []
paths = [root_path]
while paths:
path = paths.pop()
all_nodes = env.Glob(f'{path}/*', **kwargs)
paths.extend(entry for entry in all_nodes if entry.isdir() or (entry.srcnode() and entry.srcnode().isdir())) # `srcnode()` must be used because `isdir()` doesn't work for entries in variant dirs which haven't been copied yet.
result_nodes.extend(env.Glob(f'{path}/{pattern}', **kwargs))
return sorted(result_nodes)
def _safe_eval(condition: str, locals={}):
return eval(condition, {
'__builtins__': {
'abs': abs, 'all': all, 'any': any, 'ascii': ascii, 'bin': bin, 'bool': bool, 'chr': chr, 'complex': complex,
'dict': dict, 'divmod': divmod, 'enumerate': enumerate, 'filter': filter, 'float': float, 'format': format,
'hasattr': hasattr, 'hash': hash, 'hex': hex, 'id': id, 'int': int, 'isinstance': isinstance,
'issubclass': issubclass, 'len': len, 'list': list, 'map': map, 'max': max, 'min': min, 'next': next,
'oct': oct, 'ord': ord, 'pow': pow, 'range': range, 'reversed': reversed, 'round': round, 'set': set,
'slice': slice, 'sorted': sorted, 'str': str, 'sum': sum, 'tuple': tuple, 'type': type, 'zip': zip
}
}, locals)
def _deps_from_json(env: Environment, deps: dict) -> dict:
to_remove = []
for key, dep in deps.items():
if 'condition' in dep:
if not _safe_eval(dep['condition'], {
'compiler_family': env['COMPILER_FAMILY'],
'target_os': os.name,
'getenv': lambda name: env.get(name)
}):
to_remove.append(key)
continue
if 'min' in dep and isinstance(dep['min'], list):
dep['min'] = tuple(dep['min'])
if 'max' in dep and isinstance(dep['max'], list):
dep['max'] = tuple(dep['max'])
for key in to_remove:
del deps[key]
return deps
def _make_interface(env: Environment, dependencies: list = []):
kwargs = {}
for dependency in dependencies:
_inject_dependency(dependency, kwargs)
return {
'CPPPATH': kwargs.get('CPPPATH', []),
'CPPDEFINES': kwargs.get('CPPDEFINES', [])
}
def _lib_filename(env: Environment, name: str, type: str = 'static') -> str:
if os.name == 'posix':
ext = {
'static': 'a',
'shared': 'so'
}[type]
return f'lib{name}.{ext}'
elif os.name == 'nt':
ext = {
'static': 'lib',
'shared': 'dll'
}[type]
return f'{name}.{ext}'
else:
raise Exception('What OS is this?')
def _find_lib(env: Environment, name: str, paths: 'list[str]', type : str = 'static', allow_fail: bool = False, use_glob: bool = False):
fname = _lib_filename(env, name, type)
for path in paths:
lib_path = os.path.join(path, fname)
if use_glob:
files = glob.glob(lib_path)
if len(files) == 1:
return files[0]
elif len(files) > 1:
raise Exception(f'Multiple candidates found for library with name {name} in paths: "{", ".join(paths)}" with name: "{", ".join(files)}".')
elif os.path.exists(lib_path):
return lib_path
if allow_fail:
return None
raise Exception(f'Could not find library with name {name} in paths: "{", ".join(paths)}" filename: "{fname}".')
def _error(env: Environment, message: str):
print(message, file=sys.stderr)
env.Exit(1)
def _find_common_depenency_version(name: str, versionA: _VersionSpec, versionB: _VersionSpec) -> _VersionSpec:
result_version = _VersionSpec()
if versionA.minimum_version is not None:
if versionB.minimum_version is not None:
result_version.minimum_version = max(versionA.minimum_version, versionB.minimum_version)
else:
result_version.minimum_version = versionA.minimum_version
else:
result_version.minimum_version = versionB.minimum_version
if versionA.maximum_version is not None:
if versionB.maximum_version is not None:
result_version.maximum_version = min(versionA.maximum_version, versionB.maximum_version)
else:
result_version.maximum_version = versionA.maximum_version
else:
result_version.maximum_version = versionB.maximum_version
if result_version.minimum_version is not None and result_version.maximum_version is not None \
and (result_version.minimum_version > result_version.maximum_version):
return None
return result_version
def _parse_version_spec(version_spec: dict) -> _VersionSpec:
return _VersionSpec(version_spec.get('min'), version_spec.get('max'))
def _can_add_dependency(env: Environment, name: str, version_spec: _VersionSpec) -> bool:
if name not in env['SPP_DEPENDENCIES']:
return True
dependency = env['SPP_DEPENDENCIES'][name]
common_version_spec = _find_common_depenency_version(name, dependency.version_spec, version_spec)
return common_version_spec is not None
def _add_dependency(env: Environment, name: str, version_spec: _VersionSpec) -> _Dependency:
if name in env['SPP_DEPENDENCIES']:
dependency = env['SPP_DEPENDENCIES'][name]
common_version_spec = _find_common_depenency_version(name, dependency.version_spec, version_spec)
if common_version_spec is None:
raise Exception(f'Incompatible versions detected for {name}: {dependency.version_spec} and {version_spec}')
if dependency.version_spec != common_version_spec:
env['_SPP_DEPENDENCIES_OKAY'] = False
dependency.version_spec = common_version_spec
return dependency
dependency = _Dependency()
dependency.name = name
dependency.version_spec = version_spec
dependency.recipe = _find_recipe(env, name)
env['SPP_DEPENDENCIES'][name] = dependency
env['_SPP_DEPENDENCIES_OKAY'] = False
return dependency
def _sort_versions(versions: list) -> None:
import functools
def _compare(left, right):
if left < right:
return 1
elif left == right:
return 0
else:
return -1
versions.sort(key=functools.cmp_to_key(_compare))
def _version_matches(version, version_spec: _VersionSpec) -> bool:
if version_spec.minimum_version is not None and version < version_spec.minimum_version:
return False
if version_spec.maximum_version is not None and version > version_spec.maximum_version:
return False
return True
def _find_version(env: Environment, dependency: _Dependency):
for update in (False, True):
versions = dependency.recipe.versions(env, update=update)
_sort_versions(versions)
for version in versions:
if _version_matches(version, dependency.version_spec):
canadd = True
for depname, depspec in dependency.recipe.dependencies(env, version).items():
if not _can_add_dependency(env, depname, _parse_version_spec(depspec)):
canadd = False
break
if canadd:
depdeps = []
for depname, depspec in dependency.recipe.dependencies(env, version).items():
depdeps.append(_add_dependency(env, depname, _parse_version_spec(depspec)))
dependency.version = version
dependency.depdeps = depdeps
return
print(f'Available versions: \n{versions}')
print(f'Required version: {dependency.version_spec}')
raise Exception(f'Could not find a suitable version for dependency {dependency.name}.')
def _wrap_builder(builder, is_lib: bool = False): def _wrap_builder(builder, is_lib: bool = False):
def _wrapped(env, dependencies = [], *args, **kwargs): def _wrapped(env, dependencies = {}, *args, **kwargs):
target_dependencies = []
for name, version_spec in dependencies.items():
target_dependencies.append(_add_dependency(env, name, _parse_version_spec(version_spec)))
if 'CPPPATH' not in kwargs: if 'CPPPATH' not in kwargs:
kwargs['CPPPATH'] = copy.copy(env['CPPPATH']) kwargs['CPPPATH'] = copy.copy(env['CPPPATH'])
if 'CPPDEFINES' not in kwargs: if 'CPPDEFINES' not in kwargs:
kwargs['CPPDEFINES'] = copy.copy(env['CPPDEFINES']) kwargs['CPPDEFINES'] = copy.copy(env['CPPDEFINES'])
if 'LIBPATH' not in kwargs: if 'LIBPATH' not in kwargs:
kwargs['LIBPATH'] = copy.copy(env['LIBPATH']) kwargs['LIBPATH'] = copy.copy(env['LIBPATH'])
for dependency in dependencies: if 'LIBS' not in kwargs and 'LIBS' in env:
_inject_dependency(dependency, kwargs) kwargs['LIBS'] = copy.copy(env['LIBS'])
result = builder(*args, **kwargs) if 'source' in kwargs:
if is_lib: source = kwargs['source']
# generate a new libconf if not isinstance(source, list):
return { source = [source]
'CPPPATH': kwargs.get('CPPPATH', []), new_source = []
'CPPDEFINES': kwargs.get('CPPDEFINES', []), for src in source:
'LIBPATH': kwargs.get('LIBPATH', []), if isinstance(src, str):
'LIBS': result + kwargs.get('LIBS', []), new_source.append(env.Entry(src))
'_target': result else:
} new_source.append(src)
return result kwargs['source'] = new_source
target = _Target()
target.builder = builder
target.args = args
target.kwargs = kwargs
target.dependencies = target_dependencies
env.Append(SPP_TARGETS = [target])
if not target.dependencies:
_build_target(target)
return target
return _wrapped return _wrapped
def _wrap_default(default): def _wrap_default(default):
def _wrapped(env, arg): def _wrapped(env, arg):
if isinstance(arg, dict) and '_target' in arg: if isinstance(arg, _Target):
env.Append(SPP_DEFAULT_TARGETS = [arg])
elif isinstance(arg, dict) and '_target' in arg:
default(arg['_target']) default(arg['_target'])
else: else:
default(arg) default(arg)
return _wrapped return _wrapped
def _wrap_depends(depends):
def _wrapped(env, dependant, dependency):
if isinstance(dependant, _Target) or isinstance(dependency, _Target):
env.Append(SPP_TARGET_DEPENDENCIES = [(dependant, dependency)])
elif isinstance(dependant, dict) and '_target' in dependant:
dependant = dependant['_target']
elif isinstance(dependency, dict) and '_target' in dependency:
dependency = dependency['_target']
depends(dependant, dependency)
return _wrapped
def _build_target(target: _Target):
for dependency in target.dependencies:
_inject_dependency(dependency, target.kwargs)
if 'LIBS' in target.kwargs:
libs_copy = list(target.kwargs['LIBS'])
for lib in libs_copy:
if isinstance(lib, str) and os.path.isabs(lib):
target.kwargs['LIBS'].remove(lib)
target.kwargs['LIBS'].append(env.File(lib))
pass
elif isinstance(lib, _Target):
if not lib.target:
_build_target(lib)
target.kwargs['LIBS'].remove(lib)
target.kwargs['LIBS'].append(lib.target)
target.target = target.builder(*target.args, **target.kwargs)
def _version_to_string(version) -> str:
return '.'.join([str(v) for v in version])
def _finalize(env: Environment):
version_requirements = {dep.name: {
'min': dep.version_spec.minimum_version and _version_to_string(dep.version_spec.minimum_version),
'max': dep.version_spec.maximum_version and _version_to_string(dep.version_spec.maximum_version),
} for dep in env['SPP_DEPENDENCIES'].values()}
env['_SPP_DEPENDENCIES_OKAY'] = False
while not env['_SPP_DEPENDENCIES_OKAY']:
env['_SPP_DEPENDENCIES_OKAY'] = True
for dependency in list(env['SPP_DEPENDENCIES'].values()):
if not dependency.version:
_find_version(env, dependency)
with open('cache/versions.json', 'w') as f:
json.dump({
'requirements': version_requirements,
'selected': {
dep.name: _version_to_string(dep.version) for dep in env['SPP_DEPENDENCIES'].values()
}
}, f)
for target in env['SPP_TARGETS']:
_build_target(target)
for target in env['SPP_DEFAULT_TARGETS']:
env.Default(target.target)
def _get_fallback_cache_dir() -> str:
return Dir('#cache').abspath
def _find_system_cache_dir() -> str: def _find_system_cache_dir() -> str:
if os.name == 'posix': if os.name == 'posix':
if 'XDG_CACHE_HOME' in os.environ: if os.environ.get('XDG_CACHE_HOME'):
return os.environ['XDG_CACHE_HOME'] return os.environ['XDG_CACHE_HOME']
else: elif os.environ.get('HOME'):
return os.path.join(os.environ['HOME'], '.cache') return os.path.join(os.environ['HOME'], '.cache')
elif os.name == 'nt': elif os.name == 'nt':
# TODO: just guessing # TODO: just guessing
return os.environ['LocalAppData'] return os.environ['LocalAppData']
else: # fallback # fallback
return Dir('#cache').abspath return _get_fallback_cache_dir()
Import('config') Import('config')
if not config.get('PROJECT_NAME'): if not config.get('PROJECT_NAME'):
config['PROJECT_NAME'] = 'PROJECT' config['PROJECT_NAME'] = 'PROJECT'
if not config.get('CXX_STANDARD'):
config['CXX_STANDARD'] = 'c++23'
if not config.get('PREPROCESSOR_PREFIX'): if not config.get('PREPROCESSOR_PREFIX'):
config['PREPROCESSOR_PREFIX'] = config['PROJECT_NAME'].upper() # TODO: may be nicer? config['PREPROCESSOR_PREFIX'] = config['PROJECT_NAME'].upper() # TODO: may be nicer?
if 'COMPILATIONDB_FILTER_FILES' not in config:
config['COMPILATIONDB_FILTER_FILES'] = True
if 'WINDOWS_DISABLE_DEFAULT_DEFINES' not in config:
config['WINDOWS_DISABLE_DEFAULT_DEFINES'] = False
AddOption( AddOption(
'--build_type', '--build_type',
dest = 'build_type', dest = 'build_type',
@@ -127,37 +451,121 @@ AddOption(
default = 'config.py' default = 'config.py'
) )
build_type = GetOption('build_type') AddOption(
unity_mode = GetOption('unity_mode') '--compiler',
variant = GetOption('variant') dest = 'compiler',
enable_asan = GetOption('enable_asan') type = 'choice',
config_file = GetOption('config_file') choices = ('auto', 'gcc', 'clang', 'msvc'),
nargs = 1,
action = 'store',
default = 'auto'
)
env = Environment(tools = ['default', 'compilation_db', 'unity_build']) AddOption(
'--update_repositories',
dest = 'update_repositories',
action = 'store_true'
)
AddOption(
'--dump_env',
dest = 'dump_env',
action = 'store_true'
)
build_type = GetOption('build_type')
unity_mode = GetOption('unity_mode')
variant = GetOption('variant')
enable_asan = GetOption('enable_asan')
config_file = GetOption('config_file')
compiler = GetOption('compiler')
update_repositories = GetOption('update_repositories')
dump_env = GetOption('dump_env')
default_CC = {
'gcc': 'gcc',
'clang': 'clang',
'msvc': 'cl.exe'
}.get(compiler, None)
default_CXX = {
'gcc': 'g++',
'clang': 'clang++',
'msvc': 'cl.exe'
}.get(compiler, None)
if not os.path.isabs(config_file):
config_file = os.path.join(Dir('#').abspath, config_file)
vars = Variables(config_file)
vars.Add('CC', 'The C Compiler', default_CC)
vars.Add('CXX', 'The C++ Compiler', default_CXX)
vars.Add('LINK', 'The Linker')
vars.Add('CCFLAGS', 'C/C++ Compiler Flags')
vars.Add('CFLAGS', 'C Compiler Flags')
vars.Add('CXXFLAGS', 'C++ Compiler Flags')
vars.Add('LINKFLAGS', 'Linker Flags')
vars.Add('PYTHON', 'Python Executable', 'python')
vars.Add('COMPILATIONDB_FILTER_FILES', 'Removes source files from the compilation DB that are not from the current'
' project.', config['COMPILATIONDB_FILTER_FILES'])
tools = ['default', 'compilation_db', 'unity_build']
if 'TOOLS' in config:
tools.extend(config['TOOLS'])
env = Environment(tools = tools, variables = vars, ENV = os.environ)
env['RECIPES_FOLDERS'] = [Dir('recipes')] env['RECIPES_FOLDERS'] = [Dir('recipes')]
env['SYSTEM_CACHE_DIR'] = os.path.join(_find_system_cache_dir(), 'spp_cache') env['SYSTEM_CACHE_DIR'] = os.path.join(_find_system_cache_dir(), 'spp_cache')
env['CLONE_DIR'] = os.path.join(env['SYSTEM_CACHE_DIR'], 'cloned')
print(f'Detected system cache directory: {env["SYSTEM_CACHE_DIR"]}') print(f'Detected system cache directory: {env["SYSTEM_CACHE_DIR"]}')
try:
os.makedirs(env['SYSTEM_CACHE_DIR'], exist_ok=True)
except:
env['SYSTEM_CACHE_DIR'] = os.path.join(_get_fallback_cache_dir(), 'spp_cache')
print(f'Creating spp cache dir failed, using fallback: {env["SYSTEM_CACHE_DIR"]}.')
os.makedirs(env['SYSTEM_CACHE_DIR'], exist_ok=True) # no more safeguards!
env['CLONE_DIR'] = os.path.join(env['SYSTEM_CACHE_DIR'], 'cloned')
env['DOWNLOAD_DIR'] = os.path.join(env['SYSTEM_CACHE_DIR'], 'downloaded')
env['UPDATE_REPOSITORIES'] = update_repositories
env['CXX_STANDARD'] = config['CXX_STANDARD'] # make it available to everyone
env['DEPS_CFLAGS'] = []
env['DEPS_CXXFLAGS'] = []
env['DEPS_LINKFLAGS'] = []
env['SHARED_CACHE_DIR'] = Dir(f'#cache').abspath
# allow compiling to variant directories (each gets their own bin/lib/cache dirs) # allow compiling to variant directories (each gets their own bin/lib/cache dirs)
if variant: if variant:
env['BIN_DIR'] = Dir(f'#bin_{variant}').abspath env['BIN_DIR'] = Dir(f'#bin_{variant}').abspath
env['LIB_DIR'] = Dir(f'#lib_{variant}').abspath env['LIB_DIR'] = Dir(f'#lib_{variant}').abspath
env['CACHE_DIR'] = Dir(f'#cache_{variant}').abspath env['CACHE_DIR'] = Dir(f'#cache_{variant}').abspath
variant_dir = f'{env["CACHE_DIR"]}/variant' env['VARIANT_DIR'] = f'{env["CACHE_DIR"]}/variant'
env.Append(CPPDEFINES = [f'{config["PREPROCESSOR_PREFIX"]}_VARIANT={variant}']) env.Append(CPPDEFINES = [f'{config["PREPROCESSOR_PREFIX"]}_VARIANT={variant}'])
else: else:
variant_dir = None env['VARIANT_DIR'] = None
env['COMPILATIONDB_USE_ABSPATH'] = True
if env['COMPILATIONDB_FILTER_FILES']:
env['COMPILATIONDB_PATH_FILTER'] = f"{Dir('#').abspath}/*"
comp_db = env.CompilationDatabase(target = '#compile_commands.json') comp_db = env.CompilationDatabase(target = '#compile_commands.json')
Default(comp_db) Default(comp_db)
env['BIN_DIR'] = Dir('#bin').abspath env['BIN_DIR'] = Dir('#bin').abspath
env['LIB_DIR'] = Dir('#lib').abspath env['LIB_DIR'] = Dir('#lib').abspath
env['CACHE_DIR'] = Dir(f'#cache').abspath env['CACHE_DIR'] = env['SHARED_CACHE_DIR']
env['UNITY_CACHE_DIR'] = Dir(f'{env["CACHE_DIR"]}/unity') env['UNITY_CACHE_DIR'] = Dir(f'{env["CACHE_DIR"]}/unity')
env['BUILD_TYPE'] = build_type env['BUILD_TYPE'] = build_type
env.Append(LIBPATH = [env['LIB_DIR']]) # to allow submodules to link to each other without hassle env.Append(LIBPATH = [env['LIB_DIR']]) # to allow submodules to link to each other without hassle
# make sure these are all defined in case someone wants to use/copy them
env.Append(CCFLAGS = [])
env.Append(CXXFLAGS = [])
env.Append(CPPPATH = [])
env.Append(CPPDEFINES = [])
env.Append(LINKFLAGS = [])
# init SPP environment variables
env['SPP_TARGETS'] = []
env['SPP_DEFAULT_TARGETS'] = []
env['SPP_TARGET_DEPENDENCIES'] = []
env['SPP_DEPENDENCIES'] = {}
env['SPP_RECIPES'] = {}
# create the cache dir # create the cache dir
os.makedirs(env['CACHE_DIR'], exist_ok=True) os.makedirs(env['CACHE_DIR'], exist_ok=True)
cache_gitignore = f'{env["CACHE_DIR"]}/.gitignore' cache_gitignore = f'{env["CACHE_DIR"]}/.gitignore'
@@ -165,11 +573,35 @@ if not os.path.exists(cache_gitignore):
with open(cache_gitignore, 'w') as f: with open(cache_gitignore, 'w') as f:
f.write('*\n') f.write('*\n')
if env['CACHE_DIR'] != env['SHARED_CACHE_DIR']:
os.makedirs(env['SHARED_CACHE_DIR'], exist_ok=True)
cache_gitignore = f'{env["SHARED_CACHE_DIR"]}/.gitignore'
if not os.path.exists(cache_gitignore):
with open(cache_gitignore, 'w') as f:
f.write('*\n')
# check whether repositories where updated since last boot
update_stamp_file = f'{env["SHARED_CACHE_DIR"]}/last_update.stamp'
update_time = 0.0
if os.path.exists(update_stamp_file):
with open(update_stamp_file, 'r') as f:
try:
update_time = float(f.read())
except:
pass
boot_time = psutil.boot_time()
if boot_time > update_time:
print('Didn\'t update repositories since last boot, doing it now...')
env['UPDATE_REPOSITORIES'] = True
if env['UPDATE_REPOSITORIES']:
with open(update_stamp_file, 'w') as f:
f.write(str(time.time()))
# create the clone and system cache dirs # create the clone and system cache dirs
os.makedirs(env['CLONE_DIR'], exist_ok=True) os.makedirs(env['CLONE_DIR'], exist_ok=True)
os.makedirs(env['DOWNLOAD_DIR'], exist_ok=True)
# try to detect what compiler we are using # try to detect what compiler we are using
compiler_exe = os.path.basename(env['CC']) compiler_exe = os.path.basename(env.subst(env['CC']))
if 'gcc' in compiler_exe: if 'gcc' in compiler_exe:
env['COMPILER_FAMILY'] = 'gcc' env['COMPILER_FAMILY'] = 'gcc'
elif 'clang' in compiler_exe: elif 'clang' in compiler_exe:
@@ -189,63 +621,114 @@ elif unity_mode == 'stress': # compile everything in one single file to stress t
# setup compiler specific options # setup compiler specific options
if env['COMPILER_FAMILY'] == 'gcc' or env['COMPILER_FAMILY'] == 'clang': if env['COMPILER_FAMILY'] == 'gcc' or env['COMPILER_FAMILY'] == 'clang':
env.Append(CCFLAGS = ['-Wall', '-Wextra', '-Werror', '-Wstrict-aliasing', '-pedantic']) env.Append(CCFLAGS = ['-Wall', '-Wextra', '-Werror', '-Wstrict-aliasing', '-pedantic'])
env.Append(CXXFLAGS = ['-std=c++20']) env.Append(CXXFLAGS = [f'-std={config["CXX_STANDARD"]}'])
if build_type != 'release': if build_type != 'release':
env.Append(LINKFLAGS = [f'-Wl,-rpath,{env["LIB_DIR"]}']) env.Append(LINKFLAGS = [f'-Wl,-rpath,{env["LIB_DIR"]}'])
env['LINKCOM'] = env['LINKCOM'].replace('$_LIBFLAGS', '-Wl,--start-group $_LIBFLAGS -Wl,--end-group') env['LINKCOM'] = env['LINKCOM'].replace('$_LIBFLAGS', '-Wl,--start-group $_LIBFLAGS -Wl,--end-group')
if env['COMPILER_FAMILY'] == 'gcc': if env['COMPILER_FAMILY'] == 'gcc':
# GCC complains about missing initializer for "<anonymous>" that doesn't exist :/ # GCC complains about missing initializer for "<anonymous>" that doesn't exist :/
# also GCC complains about some (compiler generated) fields in coroutines not having any linkage # also GCC complains about some (compiler generated) fields in coroutines not having any linkage
# also -Wdangling-reference seems to produce a lot of false positives # also -Wdangling-reference seems to produce a lot of false positives
env.Append(CCFLAGS = ['-Wno-missing-field-initializers', '-Wno-subobject-linkage', '-Wno-dangling-reference']) # also -Wmaybe-uninitialized seems to produce false positives (or a bug in the standard library?))
else: # -Winit-list-lifetime triggers in vulkan.hpp even though it is disabled via pragma :/
# -Wtautological-compare triggers in libfmt and doesn't seem too useful anyway
env.Append(CCFLAGS = ['-Wno-missing-field-initializers', '-Wno-maybe-uninitialized'])
env.Append(CXXFLAGS = ['-Wno-subobject-linkage', '-Wno-dangling-reference', '-Wno-init-list-lifetime', '-Wno-tautological-compare'])
else: # clang only
# no-gnu-anonymous-struct - we don't care
env.Append(CCFLAGS = ['-Wno-gnu-anonymous-struct']) env.Append(CCFLAGS = ['-Wno-gnu-anonymous-struct'])
env.Append(CXXFLAGS = ['-fexperimental-library']) # enable std::jthread
if build_type == 'debug': if build_type == 'debug':
env.Append(CCFLAGS = ['-g', '-O0'], CPPDEFINES = ['_GLIBCXX_DEBUG']) env.Append(CCFLAGS = ['-g', '-O0'], CPPDEFINES = ['_GLIBCXX_DEBUG'])
env.Append(DEPS_CXXFLAGS = ['-D_GLIBCXX_DEBUG'])
elif build_type == 'release_debug' or build_type == 'profile': elif build_type == 'release_debug' or build_type == 'profile':
env.Append(CCFLAGS = ['-Wno-unused-variable', '-Wno-unused-parameter', '-Wno-unused-but-set-variable', '-Wno-unused-local-typedef', '-Wno-unused-local-typedefs', '-g', '-O2'], CPPDEFINES = ['SEKIEI_RELEASE', 'NDEBUG']) env.Append(CCFLAGS = ['-Wno-unused-variable', '-Wno-unused-parameter', '-Wno-unused-but-set-variable', '-Wno-unused-local-typedef', '-Wno-unused-local-typedefs', '-g', '-O2'], CPPDEFINES = [f'{config["PREPROCESSOR_PREFIX"]}_RELEASE', 'NDEBUG'])
if build_type == 'profile': if build_type == 'profile':
if env['COMPILER_FAMILY'] == 'gcc': if env['COMPILER_FAMILY'] == 'gcc':
env.Append(CPPDEFINES = ['SEKIEI_GCC_INSTRUMENTING=1']) env.Append(CPPDEFINES = [f'{config["PREPROCESSOR_PREFIX"]}_GCC_INSTRUMENTING=1'])
env.Append(CCFLAGS = ['-finstrument-functions']) env.Append(CCFLAGS = ['-finstrument-functions'])
env.Append(LINKFLAGS = ['-rdynamic']) env.Append(LINKFLAGS = ['-rdynamic'])
elif build_type == 'release': elif build_type == 'release':
env.Append(CCFLAGS = ['-Wno-unused-variable', '-Wno-unused-parameter', '-Wno-unused-but-set-variable', '-Wno-unused-local-typedef', '-Wno-unused-local-typedefs', '-O2'], CPPDEFINES = ['SEKIEI_RELEASE', 'NDEBUG']) env.Append(CCFLAGS = ['-Wno-unused-variable', '-Wno-unused-parameter', '-Wno-unused-but-set-variable', '-Wno-unused-local-typedef', '-Wno-unused-local-typedefs', '-O2'], CPPDEFINES = [f'{config["PREPROCESSOR_PREFIX"]}_RELEASE', 'NDEBUG'])
if enable_asan: if enable_asan:
env.Append(CCFLAGS = ['-fsanitize=address', '-fno-omit-frame-pointer']) env.Append(CCFLAGS = ['-fsanitize=address', '-fno-omit-frame-pointer'])
env.Append(LINKFLAGS = ['-fsanitize=address']) env.Append(LINKFLAGS = ['-fsanitize=address'])
env.Append(DEPS_CXXFLAGS = ['-fsanitize=address', '-fno-omit-frame-pointer'])
env.Append(DEPS_LINKFLAGS = ['-fsanitize=address'])
elif env['COMPILER_FAMILY'] == 'cl': elif env['COMPILER_FAMILY'] == 'cl':
cxx_version_name = {
'c++14': 'c++14',
'c++17': 'c++17',
'c++20': 'c++20',
'c++23': 'c++latest',
'c++26': 'c++latest'
}.get(env['CXX_STANDARD'], 'c++14') # default to C++14 for older versions
# C4201: nonstandard extension used : nameless struct/union - I use it and want to continue using it # C4201: nonstandard extension used : nameless struct/union - I use it and want to continue using it
# C4127: conditional expression is constant - some libs (CRC, format) don't compile with this enabled # TODO: fix? # C4127: conditional expression is constant - some libs (CRC, format) don't compile with this enabled # TODO: fix?
env.Append(CCFLAGS = ['/W4', '/WX', '/wd4201', '/wd4127', '/std:c++20', '/permissive-', '/EHsc', '/FS', '/Zc:char8_t']) # C4702: unreachable code, issued after MIJIN_FATAL macro
# C4251: missing dll-interface of some std types, yaml-cpp doesn't compile with this enabled
# C4275: same as above
env.Append(CCFLAGS = ['/W4', '/WX', '/wd4201', '/wd4127', '/wd4702', '/wd4251', '/wd4275', '/bigobj', '/vmg',
f'/std:{cxx_version_name}', '/permissive-', '/EHsc', '/FS', '/Zc:char8_t', '/utf-8'])
env.Append(CPPDEFINES = ['_CRT_SECURE_NO_WARNINGS']) # I'd like to not use MSVC specific versions of functions because they are "safer" ... env.Append(CPPDEFINES = ['_CRT_SECURE_NO_WARNINGS']) # I'd like to not use MSVC specific versions of functions because they are "safer" ...
env.Append(DEPS_CXXFLAGS = ['/EHsc', '/Zc:char8_t', '/utf-8', '/vmg'])
if build_type == 'debug': if build_type == 'debug':
env.Append(CCFLAGS = ['/Od', '/Zi'], LINKFLAGS = ' /DEBUG') env.Append(CCFLAGS = ['/Od', '/Zi', '/MDd'], LINKFLAGS = ' /DEBUG')
env.Append(CPPDEFINES = ['_DEBUG', '_ITERATOR_DEBUG_LEVEL=2'])
env.Append(DEPS_CXXFLAGS = ['/MDd', '/Zi', '/D_DEBUG', '/D_ITERATOR_DEBUG_LEVEL=2'])
env.Append(DEPS_LINKFLAGS = ['/DEBUG'])
elif build_type == 'release_debug' or build_type == 'profile': elif build_type == 'release_debug' or build_type == 'profile':
env.Append(CCFLAGS = ['/O2', '/Zi'], LINKFLAGS = ' /DEBUG') env.Append(CCFLAGS = ['/O2', '/Zi'], LINKFLAGS = ' /DEBUG')
env.Append(DEPS_CXXFLAGS = ['/Zi'])
env.Append(DEPS_LINKFLAGS = ['/DEBUG'])
else: else:
env.Append(CCFLAGS = ['/O2']) env.Append(CCFLAGS = ['/O2'])
if env['COMPILER_FAMILY'] == 'gcc': if env['COMPILER_FAMILY'] == 'gcc':
env.Append(CCFLAGS = ['-Wno-volatile']) env.Append(CXXFLAGS = ['-Wno-volatile'])
elif env['COMPILER_FAMILY'] == 'clang': elif env['COMPILER_FAMILY'] == 'clang':
env.Append(CCFLAGS = ['-Wno-deprecated-volatile', '-Wno-nested-anon-types']) env.Append(CCFLAGS = ['-Wno-deprecated-volatile', '-Wno-nested-anon-types', '-Wno-unknown-warning-option'])
# platform specific options
if os.name == 'nt':
if not config['WINDOWS_DISABLE_DEFAULT_DEFINES']:
env.Append(CDEFINES = ['WIN32_LEAN_AND_MEAN', 'NOMINMAX', 'STRICT', 'UNICODE'], CPPDEFINES = ['WIN32_LEAN_AND_MEAN', 'NOMINMAX', 'STRICT', 'UNICODE'])
env.AddMethod(_cook, 'Cook') env.AddMethod(_cook, 'Cook')
env.AddMethod(_parse_lib_conf, 'ParseLibConf') env.AddMethod(_parse_lib_conf, 'ParseLibConf')
env.AddMethod(_rglob, 'RGlob')
env.AddMethod(_deps_from_json, 'DepsFromJson')
env.AddMethod(_make_interface, 'MakeInterface')
env.AddMethod(_lib_filename, 'LibFilename')
env.AddMethod(_find_lib, 'FindLib')
env.AddMethod(_error, 'Error')
env.AddMethod(_wrap_builder(env.Library, is_lib = True), 'Library') env.AddMethod(_wrap_builder(env.Library, is_lib = True), 'Library')
env.AddMethod(_wrap_builder(env.StaticLibrary, is_lib = True), 'StaticLibrary') env.AddMethod(_wrap_builder(env.StaticLibrary, is_lib = True), 'StaticLibrary')
env.AddMethod(_wrap_builder(env.SharedLibrary, is_lib = True), 'SharedLibrary') env.AddMethod(_wrap_builder(env.SharedLibrary, is_lib = True), 'SharedLibrary')
env.AddMethod(_wrap_builder(env.Program), 'Program') env.AddMethod(_wrap_builder(env.Program), 'Program')
env.AddMethod(_wrap_default(env.Default), 'Default') env.AddMethod(_wrap_default(env.Default), 'Default')
env.AddMethod(_wrap_depends(env.Depends), 'Depends')
env.AddMethod(_wrap_builder(env.UnityProgram), 'UnityProgram') env.AddMethod(_wrap_builder(env.UnityProgram), 'UnityProgram')
env.AddMethod(_wrap_builder(env.UnityLibrary, is_lib = True), 'UnityLibrary') env.AddMethod(_wrap_builder(env.UnityLibrary, is_lib = True), 'UnityLibrary')
env.AddMethod(_wrap_builder(env.UnityStaticLibrary, is_lib = True), 'UnityStaticLibrary') env.AddMethod(_wrap_builder(env.UnityStaticLibrary, is_lib = True), 'UnityStaticLibrary')
env.AddMethod(_wrap_builder(env.UnitySharedLibrary, is_lib = True), 'UnitySharedLibrary') env.AddMethod(_wrap_builder(env.UnitySharedLibrary, is_lib = True), 'UnitySharedLibrary')
env.AddMethod(_module, 'Module')
env.AddMethod(_finalize, 'Finalize')
if hasattr(env, 'Gch'):
env.AddMethod(_wrap_builder(env.Gch), 'Gch')
for addon_file in env.Glob('addons/*.py'):
env = SConscript(addon_file, exports = 'env')
if dump_env:
print('==== Begin Environment Dump =====')
print(env.Dump())
print('==== End Environment Dump =====')
Return('env') Return('env')

View File

@@ -0,0 +1,53 @@
import os
import pathlib
import subprocess
import sys
from SCons.Script import *
_BUILT_STAMPFILE = '.spp_built'
Import('env')
def _autotools_project(env: Environment, project_root: str, config_args: 'list[str]' = [], build_args : 'list[str]' = [], install_args : 'list[str]' = [], configure_script_path: str = 'configure') -> dict:
config = env['BUILD_TYPE']
build_dir = os.path.join(project_root, f'build_{config}')
install_dir = os.path.join(project_root, f'install_{config}')
is_built = os.path.exists(os.path.join(install_dir, _BUILT_STAMPFILE))
if not is_built or env['UPDATE_REPOSITORIES']:
print(f'Building {project_root}, config {config}')
os.makedirs(build_dir, exist_ok=True)
opt_level = {
'debug': '-O0',
}.get(env['BUILD_TYPE'], '-O2')
debug_symbols = {
'release': ''
}.get(env['BUILD_TYPE'], '-g')
cflags = f'{opt_level} {debug_symbols}'
jobs = env.GetOption('num_jobs')
env = os.environ.copy()
env['CFLAGS'] = cflags
config_script = os.path.join(project_root, configure_script_path)
if not os.path.exists(config_script) and os.path.exists(f'{config_script}.ac'):
subprocess.run(('autoreconf', '--install', '--force'), cwd=project_root)
subprocess.run((config_script, f'--prefix={install_dir}', *config_args), cwd=build_dir, env=env, stdout=sys.stdout, stderr=sys.stderr, check=True)
subprocess.run(('make', f'-j{jobs}', *build_args), cwd=build_dir, stdout=sys.stdout, stderr=sys.stderr, check=True)
subprocess.run(('make', 'install', *install_args), cwd=build_dir, stdout=sys.stdout, stderr=sys.stderr, check=True)
pathlib.Path(install_dir, _BUILT_STAMPFILE).touch()
libpath = []
for lib_folder in ('lib', 'lib64'):
full_path = os.path.join(install_dir, lib_folder)
if os.path.exists(full_path):
libpath.append(full_path)
return {
'install_dir': install_dir,
'LIBPATH': libpath,
'CPPPATH': [os.path.join(install_dir, 'include')]
}
env.AddMethod(_autotools_project, 'AutotoolsProject')
Return('env')

105
addons/cmake_project.py Normal file
View File

@@ -0,0 +1,105 @@
import json
import pathlib
import shutil
from SCons.Script import *
_BUILT_STAMPFILE = '.spp_built'
_VERSION = 2 # bump if you change how the projects are build to trigger a clean build
Import('env')
def cmd_quote(s: str) -> str:
escaped = s.replace('\\', '\\\\')
return f'"{escaped}"'
def _generate_cmake_c_flags(env, dependencies: 'list[dict]') -> str:
parts = env['DEPS_CFLAGS'].copy()
for dependency in dependencies:
for path in dependency.get('CPPPATH', []):
parts.append(f'-I{path}')
return cmd_quote(' '.join(parts))
def _generate_cmake_cxx_flags(env, dependencies: 'list[dict]') -> str:
parts = env['DEPS_CXXFLAGS'].copy()
for dependency in dependencies:
for path in dependency.get('CPPPATH', []):
parts.append(f'-I{path}')
return cmd_quote(' '.join(parts))
def _get_cmake_cxx_standard(env: Environment) -> str:
return env['CXX_STANDARD'][3:] # we use "C++XX", CMake just "XX"
def _generate_cmake_args(env: Environment, dependencies: 'list[dict]') -> 'list[str]':
args = [f'-DCMAKE_C_FLAGS={_generate_cmake_c_flags(env, dependencies)}',
f'-DCMAKE_CXX_FLAGS={_generate_cmake_cxx_flags(env, dependencies)}',
f'-DCMAKE_CXX_STANDARD={_get_cmake_cxx_standard(env)}']
for dependency in dependencies:
for name, value in dependency.get('CMAKE_VARS', {}).items():
args.append(f'-D{name}={cmd_quote(value)}')
return args
def _calc_version_hash(dependencies: 'list[dict]') -> str:
return json.dumps({
'version': _VERSION,
'dependencies': dependencies
})
def _cmake_project(env: Environment, project_root: str, generate_args: 'list[str]' = [], build_args : 'list[str]' = [], install_args : 'list[str]' = [], dependencies: 'list[dict]' = []) -> dict:
config = env['BUILD_TYPE']
build_dir = os.path.join(project_root, f'build_{config}')
install_dir = os.path.join(project_root, f'install_{config}')
version_hash = _calc_version_hash(dependencies)
stamp_file = pathlib.Path(install_dir, _BUILT_STAMPFILE)
is_built = stamp_file.exists()
if is_built:
with stamp_file.open('r') as f:
build_version = f.read()
if build_version != version_hash:
print(f'Rebuilding CMake project at {project_root} as the script version changed.')
is_built = False
if not is_built:
shutil.rmtree(build_dir)
shutil.rmtree(install_dir)
if not is_built or env['UPDATE_REPOSITORIES']:
print(f'Building {project_root}, config {config}')
os.makedirs(build_dir, exist_ok=True)
build_type = {
'debug': 'Debug',
'release_debug': 'RelWithDebInfo',
'release': 'Release',
'profile': 'RelWithDebInfo'
}.get(env['BUILD_TYPE'], 'RelWithDebInfo')
def run_cmd(args):
if env.Execute(' '.join([str(s) for s in args])):
Exit(1)
# TODO: is this a problem?
# environ = os.environ.copy()
# environ['CXXFLAGS'] = ' '.join(f'-D{define}' for define in env['CPPDEFINES']) # TODO: who cares about windows?
run_cmd(['cmake', '-G', 'Ninja', '-B', build_dir, f'-DCMAKE_BUILD_TYPE={build_type}',
f'-DCMAKE_INSTALL_PREFIX={cmd_quote(install_dir)}', '-DBUILD_TESTING=OFF',
*_generate_cmake_args(env, dependencies), *generate_args, project_root])
run_cmd(['cmake', '--build', *build_args, cmd_quote(build_dir)])
run_cmd(['cmake', '--install', *install_args, cmd_quote(build_dir)])
with pathlib.Path(install_dir, _BUILT_STAMPFILE).open('w') as f:
f.write(version_hash)
libpath = []
for lib_folder in ('lib', 'lib64'):
full_path = os.path.join(install_dir, lib_folder)
if os.path.exists(full_path):
libpath.append(full_path)
return {
'install_dir': install_dir,
'LIBPATH': libpath,
'CPPPATH': [os.path.join(install_dir, 'include')]
}
env.AddMethod(_cmake_project, 'CMakeProject')
Return('env')

View File

@@ -0,0 +1,76 @@
from enum import Enum
import hashlib
import pathlib
import tarfile
import zipfile
import urllib.request
from SCons.Script import *
Import('env')
class ArchiveType(Enum):
TAR_GZ = 0
ZIP = 1
def _detect_archive_type(url: str) -> ArchiveType:
if url.lower().endswith('.tar.gz'):
return ArchiveType.TAR_GZ
elif url.lower().endswith('.zip'):
return ArchiveType.ZIP
raise Exception('could not detect archive type from URL')
def _archive_type_ext(archive_type: ArchiveType) -> str:
if archive_type == ArchiveType.TAR_GZ:
return 'tar.gz'
elif archive_type == ArchiveType.ZIP:
return 'zip'
raise Exception('invalid archive type')
def _download_file(url: str, path: pathlib.Path) -> None:
if path.exists():
return
dl_path = path.with_suffix(f'{path.suffix}.tmp')
if dl_path.exists():
dl_path.unlink()
print(f'Downloading {url} to {dl_path}...')
urllib.request.urlretrieve(url, dl_path)
dl_path.rename(path)
def _extract_file(path: pathlib.Path, output_dir: str, archive_type: ArchiveType, skip_folders: int) -> None:
if archive_type == ArchiveType.TAR_GZ:
file = tarfile.open(str(path))
if skip_folders != 0:
def skip_filer(member: tarfile.TarInfo, path: str) -> tarfile.TarInfo:
name_parts = member.name.split('/')
if len(name_parts) <= skip_folders:
return None
return member.replace(name = '/'.join(name_parts[skip_folders:]))
file.extraction_filter = skip_filer
file.extractall(output_dir)
file.close()
elif archive_type == ArchiveType.ZIP:
file = zipfile.open(str(path))
file.extractall(output_dir)
file.close()
else:
raise Exception('invalid archive type')
def _download_and_extract(env: Environment, repo_name: str, url: str, skip_folders: int = 0) -> dict:
archive_type = _detect_archive_type(url)
ext = _archive_type_ext(archive_type)
path = pathlib.Path(env['DOWNLOAD_DIR'], f'{hashlib.shake_128(url.encode("utf-8")).hexdigest(6)}.{ext}')
output_dir = pathlib.Path(env['CLONE_DIR'], 'download', repo_name)
stamp_file = pathlib.Path(output_dir, '.spp_extracted')
if not stamp_file.exists():
_download_file(url, path)
_extract_file(path, output_dir, archive_type, skip_folders)
stamp_file.touch()
return {
'extracted_root': str(output_dir)
}
env.AddMethod(_download_and_extract, 'DownloadAndExtract')
Return('env')

95
addons/gitbranch.py Normal file
View File

@@ -0,0 +1,95 @@
from git import Repo
from git.exc import GitError
import hashlib
import re
from SCons.Script import *
Import('env')
def _clone(env: Environment, repo_name: str, remote_url: str):
repo_dir = os.path.join(env['CLONE_DIR'], 'git', repo_name, '_bare')
try:
repo = Repo(repo_dir)
origin = repo.remotes['origin']
except GitError:
print(f'Initializing git repository at {repo_dir}.')
repo = Repo.init(repo_dir, bare=True)
origin = repo.create_remote('origin', remote_url)
return repo, origin
def _git_branch(env: Environment, repo_name: str, remote_url: str, git_ref: str = 'main') -> dict:
repo, origin = _clone(env, repo_name, remote_url)
worktree_dir = os.path.join(env['CLONE_DIR'], 'git', repo_name, hashlib.shake_128(git_ref.encode('utf-8')).hexdigest(6)) # TODO: commit hash would be better, right? -> not if it's a branch!
if not os.path.exists(worktree_dir):
print(f'Checking out into {worktree_dir}.')
origin.fetch(tags=True)
os.makedirs(worktree_dir)
repo.git.worktree('add', worktree_dir, git_ref)
elif env['UPDATE_REPOSITORIES']:
worktree_repo = Repo(worktree_dir)
if not worktree_repo.head.is_detached:
print(f'Updating git repository at {worktree_dir}')
worktree_origin = worktree_repo.remotes['origin']
worktree_origin.pull()
else:
print(f'Not updating git repository {worktree_dir} as it is not on a branch.')
return {
'checkout_root': worktree_dir
}
def _git_tags(env: Environment, repo_name: str, remote_url: str, force_fetch: bool = False) -> 'list[str]':
repo, origin = _clone(env, repo_name, remote_url)
if force_fetch or env['UPDATE_REPOSITORIES']:
origin.fetch(tags=True)
return [t.name for t in repo.tags]
def _make_callable(val):
if callable(val):
return val
else:
return lambda env: val
def _git_recipe(env: Environment, globals: dict, repo_name, repo_url, cook_fn, versions = None, tag_pattern = None, tag_fn = None, ref_fn = None, dependencies: dict = {}) -> None:
_repo_name = _make_callable(repo_name)
_repo_url = _make_callable(repo_url)
_tag_pattern = _make_callable(tag_pattern)
versions_cb = versions and _make_callable(versions)
def _versions(env: Environment, update: bool = False):
pattern = _tag_pattern(env)
if pattern:
tags = env.GitTags(repo_name = _repo_name(env), remote_url = _repo_url(env), force_fetch=update)
result = []
for tag in tags:
match = pattern.match(tag)
if match:
result.append(tuple(int(part) for part in match.groups() if part is not None))
if len(result) == 0 and not update:
return _versions(env, update=True)
return result
elif versions_cb:
return versions_cb(env)
else:
return [(0, 0, 0)]
def _dependencies(env: Environment, version) -> 'dict':
return dependencies
def _cook(env: Environment, version) -> dict:
if tag_fn:
git_ref = f'refs/tags/{tag_fn(version)}'
else:
assert ref_fn
git_ref = ref_fn(env, version)
repo = env.GitBranch(repo_name = _repo_name(env), remote_url = _repo_url(env), git_ref = git_ref)
return cook_fn(env, repo)
globals['versions'] = _versions
globals['dependencies'] = _dependencies
globals['cook'] = _cook
env.AddMethod(_git_branch, 'GitBranch')
env.AddMethod(_git_tags, 'GitTags')
env.AddMethod(_git_recipe, 'GitRecipe')
Return('env')

38
addons/jinja.py Normal file
View File

@@ -0,0 +1,38 @@
import pathlib
Import('env')
if not hasattr(env, 'Jinja'):
Return('env')
def _jinja_load_config(env, config_name):
searched_paths = []
for scons_path in env['JINJA_CONFIG_SEARCHPATH']:
if hasattr(scons_path, 'abspath'):
scons_path = scons_path.abspath
path = pathlib.Path(scons_path) / f'{config_name}.yml'
if path.exists():
with path.open('r') as file:
import yaml
return yaml.safe_load(file)
searched_paths.append(f'\n{path}')
joined_paths = ''.join(searched_paths)
raise Exception(f'Could not find Jinja config file "{config_name}.yml". Searched: {joined_paths}')
def _wrap_jinja(orig_jinja):
def _wrapped(env, target, **kwargs):
if 'source' not in kwargs:
kwargs['source'] = f'{target}.jinja'
target = orig_jinja(**kwargs)
if 'depends' in kwargs:
for dependency in kwargs['depends']:
env.Depends(target, dependency)
# env.Depends(alias_prepare, target)
return target
return _wrapped
env.AddMethod(_wrap_jinja(env.Jinja), 'Jinja')
env.Append(JINJA_FILTERS = {'load_config': _jinja_load_config})
env.Append(JINJA_TEMPLATE_SEARCHPATH = ['data/jinja'])
env['JINJA_CONFIG_SEARCHPATH'] = [env.Dir('#data/config')]
Return('env')

31
recipes/Catch2/recipe.py Normal file
View File

@@ -0,0 +1,31 @@
import re
from SCons.Script import *
def _git_cook(env: Environment, repo) -> dict:
checkout_root = repo['checkout_root']
build_result = env.CMakeProject(project_root=checkout_root)
lib_name = {
'debug': 'Catch2d'
}.get(env['BUILD_TYPE'], 'Catch2')
libs = []
if not env.get('CATCH2_OWN_MAIN'):
libs.append({
'debug': 'Catch2Maind'
}.get(env['BUILD_TYPE'], 'Catch2Main'))
libs.append(lib_name)
return {
'CPPPATH': build_result['CPPPATH'],
'LIBS': [env.FindLib(lib, paths=build_result['LIBPATH']) for lib in libs]
}
env.GitRecipe(
globals = globals(),
repo_name = 'Catch2',
repo_url = 'https://github.com/catchorg/Catch2.git',
tag_pattern = re.compile(r'^v([0-9]+)\.([0-9]+)\.([0-9]+)$'),
tag_fn = lambda version: f'v{version[0]}.{version[1]}.{version[2]}',
cook_fn = _git_cook
)

View File

@@ -1,26 +0,0 @@
from git import Repo
from git.exc import GitError
import hashlib
import os
from SCons.Script import *
def cook(env: Environment, repo_name: str, remote_url: str, git_ref: str = "main") -> dict:
repo_dir = os.path.join(env['CLONE_DIR'], 'git', repo_name, '_bare')
try:
repo = Repo(repo_dir)
origin = repo.remotes['origin']
except GitError:
print(f'Initializing git repository for SDL at {repo_dir}.')
repo = Repo.init(repo_dir, bare=True)
origin = repo.create_remote('origin', remote_url)
worktree_dir = os.path.join(env['CLONE_DIR'], 'git', repo_name, hashlib.shake_128(git_ref.encode('utf-8')).hexdigest(6)) # TODO: commit hash would be better, right?
if not os.path.exists(worktree_dir):
print(f'Checking out into {worktree_dir}.')
origin.fetch()
os.makedirs(worktree_dir)
repo.git.worktree('add', worktree_dir, git_ref)
return {
'checkout_root': worktree_dir
}

View File

@@ -0,0 +1,31 @@
import re
from SCons.Script import *
_REPO_NAME = 'ImageMagick'
_REPO_URL = 'https://github.com/ImageMagick/ImageMagick.git'
_TAG_PATTERN = re.compile(r'^([0-9]+)\.([0-9]+)\.([0-9]+)-([0-9]+)$')
def versions(env: Environment, update: bool = False):
tags = env.GitTags(repo_name = _REPO_NAME, remote_url = _REPO_URL, force_fetch=update)
result = []
for tag in tags:
match = _TAG_PATTERN.match(tag)
if match:
result.append((int(match.groups()[0]), int(match.groups()[1]), int(match.groups()[2]), int(match.groups()[3])))
return result
def dependencies(env: Environment, version) -> 'dict':
return {}
def cook(env: Environment, version) -> dict:
raise Exception('this still needs to be implemented property :/')
# git_ref = f'refs/tags/{version[0]}.{version[1]}.{version[2]}-{version[3]}'
# repo = env.GitBranch(repo_name = _REPO_NAME, remote_url = _REPO_URL, git_ref = git_ref)
# checkout_root = repo['checkout_root']
# build_result = env.AutotoolsProject(checkout_root)
# return {
# 'LIBPATH': build_result['LIBPATH'],
# 'CPPPATH': build_result['CPPPATH'],
# 'LIBS': ['backtrace']
# }

View File

@@ -1,40 +1,37 @@
import os import platform
import subprocess import re
import sys
from SCons.Script import * from SCons.Script import *
def cook(env: Environment, git_ref: str = "main") -> dict:
repo = env.Cook('GitBranch', repo_name = 'SDL', remote_url = 'https://github.com/libsdl-org/SDL.git', git_ref = git_ref) def _git_cook(env: Environment, repo: dict) -> dict:
checkout_root = repo['checkout_root'] checkout_root = repo['checkout_root']
build_result = env.CMakeProject(project_root=checkout_root, generate_args = ['-DSDL_STATIC=ON', '-DSDL_SHARED=OFF'])
config = env['BUILD_TYPE'] libs = []
build_dir = os.path.join(checkout_root, f'build_{config}') if platform.system() == 'Windows':
install_dir = os.path.join(checkout_root, f'install_{config}') if env['BUILD_TYPE'] == 'debug':
lib_fname = { libs.append('SDL2-staticd')
'debug': 'libSDL2d.a' else:
}.get(env['BUILD_TYPE'], 'libSDL2.a') # TODO: who cares about windows? libs.append('SDL2-static')
is_built = os.path.exists(os.path.join(build_dir, lib_fname)) # TODO! libs.extend(('kernel32', 'user32', 'gdi32', 'winmm', 'imm32', 'ole32', 'oleaut32', 'version', 'uuid', 'advapi32', 'setupapi', 'shell32', 'dinput8'))
if not is_built: else:
print(f'Building SDL, config {config}') if env['BUILD_TYPE'] == 'debug':
os.makedirs(build_dir, exist_ok=True) libs.append('SDL2d')
build_type = { else:
'debug': 'Debug', libs.append('SDL2')
'release_debug': 'RelWithDebInfo',
'release': 'Release',
'profile': 'RelWithDebInfo'
}.get(env['BUILD_TYPE'], 'RelWithDebInfo')
subprocess.run(('cmake', '-G', 'Ninja', '-B', build_dir, f'-DCMAKE_BUILD_TYPE={build_type}', '-DSDL_STATIC=ON', '-DSDL_SHARED=OFF', f'-DCMAKE_INSTALL_PREFIX={install_dir}', checkout_root), stdout=sys.stdout, stderr=sys.stderr, check=True)
subprocess.run(('cmake', '--build', build_dir), stdout=sys.stdout, stderr=sys.stderr, check=True)
subprocess.run(('cmake', '--install', build_dir), stdout=sys.stdout, stderr=sys.stderr, check=True)
lib_name = {
'debug': 'SDL2d'
}.get(env['BUILD_TYPE'], 'SDL2')
return { return {
'LIBPATH': [os.path.join(install_dir, 'lib')], 'LIBPATH': build_result['LIBPATH'],
'CPPPATH': [os.path.join(install_dir, 'include')], 'CPPPATH': [os.path.join(build_result['install_dir'], 'include/SDL2')], # SDL is really weird about include paths ...
'LIBS': [lib_name, 'm'] 'LIBS': libs
} }
env.GitRecipe(
globals = globals(),
repo_name = 'SDL',
repo_url = 'https://github.com/libsdl-org/SDL.git',
tag_pattern = re.compile(r'^release-([0-9]+)\.([0-9]+)\.([0-9]+)$'),
tag_fn = lambda version: f'release-{version[0]}.{version[1]}.{version[2]}',
cook_fn = _git_cook
)

View File

@@ -1,9 +1,45 @@
import os import re
from SCons.Script import * from SCons.Script import *
def cook(env: Environment, git_ref: str = "main") -> dict: _REPO_NAMES = {
repo = env.Cook('GitBranch', repo_name = 'VulkanHeaders', remote_url = 'https://github.com/KhronosGroup/Vulkan-Headers.git', git_ref = git_ref) 'default': 'VulkanHeaders',
checkout_root = repo['checkout_root'] 'mewin': 'VulkanHeaders_mewin'
env.Append(CPPPATH = [os.path.join(checkout_root, 'include')]) }
_REPO_URLS = {
'default': 'https://github.com/KhronosGroup/Vulkan-Headers.git',
'mewin': 'https://git.mewin.de/mewin/vulkan-headers.git'
}
_TAG_PATTERN = re.compile(r'^v([0-9]+)\.([0-9]+)\.([0-9]+)$')
def _get_repo_name(env: Environment) -> str:
return _REPO_NAMES[env.get('VULKANHEADERS_REMOTE', 'default')]
def _get_repo_url(env: Environment) -> str:
return _REPO_URLS[env.get('VULKANHEADERS_REMOTE', 'default')]
def versions(env: Environment, update: bool = False):
if env.get('VULKANHEADERS_REMOTE') == 'mewin':
return [(0, 0, 0)]
tags = env.GitTags(repo_name = _get_repo_name(env), remote_url = _get_repo_url(env), force_fetch=update)
result = []
for tag in tags:
match = _TAG_PATTERN.match(tag)
if match:
result.append((int(match.groups()[0]), int(match.groups()[1]), int(match.groups()[2])))
return result
def dependencies(env: Environment, version) -> 'dict':
return {} return {}
def cook(env: Environment, version) -> dict:
if env.get('VULKANHEADERS_REMOTE') == 'mewin':
git_ref = 'main'
else:
git_ref = f'refs/tags/v{version[0]}.{version[1]}.{version[2]}'
repo = env.GitBranch(repo_name = _get_repo_name(env), remote_url = _get_repo_url(env), git_ref = git_ref)
checkout_root = repo['checkout_root']
return {
'CPPPATH': [os.path.join(checkout_root, 'include')]
}

View File

@@ -0,0 +1,18 @@
import re
from SCons.Script import *
def _git_cook(env: Environment, repo: dict) -> dict:
checkout_root = repo['checkout_root']
return {
'CPPPATH': [os.path.join(checkout_root, 'include')]
}
env.GitRecipe(
globals = globals(),
repo_name = 'argparse',
repo_url = 'https://github.com/p-ranav/argparse.git',
tag_pattern = re.compile(r'^v([0-9]+)\.([0-9]+)$'),
tag_fn = lambda version: f'v{version[0]}.{version[1]}',
cook_fn = _git_cook
)

67
recipes/boost/recipe.py Normal file
View File

@@ -0,0 +1,67 @@
import json
import os
import re
import requests
from SCons.Script import *
_VERSIONS_URL = 'https://api.github.com/repos/boostorg/boost/releases'
_VERSION_PATTERN = re.compile(r'^boost-([0-9]+)\.([0-9]+)\.([0-9]+)$')
def versions(env: Environment, update: bool = False):
versions_file = os.path.join(env['DOWNLOAD_DIR'], 'boost_versions.json')
if update or not os.path.exists(versions_file):
req = requests.get(_VERSIONS_URL)
versions_data = json.loads(req.text)
result = []
for version_data in versions_data:
match = _VERSION_PATTERN.match(version_data['name'])
if not match:
continue
result.append((int(match.groups()[0]), int(match.groups()[1]), int(match.groups()[2])))
with open(versions_file, 'w') as f:
json.dump(result, f)
return result
else:
try:
with open(versions_file, 'r') as f:
return [tuple(v) for v in json.load(f)]
except:
print('boost_versions.json is empty or broken, redownloading.')
return versions(env, update=True)
def dependencies(env: Environment, version) -> 'dict':
return {}
def cook(env: Environment, version) -> dict:
if env.get('BOOST_LIBS') is None:
raise Exception('BOOST_LIBS not set. Set to a list of boost libs to link or "*" to link everything.')
if version >= (1, 85, 0):
url = f'https://github.com/boostorg/boost/releases/download/boost-{version[0]}.{version[1]}.{version[2]}/boost-{version[0]}.{version[1]}.{version[2]}-cmake.tar.gz'
else:
url = f'https://github.com/boostorg/boost/releases/download/boost-{version[0]}.{version[1]}.{version[2]}/boost-{version[0]}.{version[1]}.{version[2]}.tar.gz'
repo = env.DownloadAndExtract(f'boost_{version[0]}.{version[1]}.{version[2]}', url = url, skip_folders = 1)
checkout_root = repo['extracted_root']
build_result = env.CMakeProject(checkout_root)
libs = []
if '*' in env['BOOST_LIBS']:
lib_dir = build_result['LIBPATH'][0]
for lib_file in os.listdir(lib_dir):
fname = os.path.join(lib_dir, lib_file)
if not os.path.isfile(fname):
continue
libs.append(fname)
else:
for lib in set(env['BOOST_LIBS']):
if os.name == 'posix':
libs.append(env.FindLib(f'boost_{lib}', paths=build_result['LIBPATH']))
elif os.name == 'nt':
libs.append(env.FindLib(f'libboost_{lib}-*', paths=build_result['LIBPATH'], use_glob=True))
else:
raise Exception('Boost not supported on this platform.')
return {
'CPPPATH': build_result['CPPPATH'],
'LIBS': libs
}

19
recipes/cgltf/recipe.py Normal file
View File

@@ -0,0 +1,19 @@
import re
from SCons.Script import *
def _git_cook(env: Environment, repo) -> dict:
checkout_root = repo['checkout_root']
return {
'CPPPATH': [checkout_root]
}
env.GitRecipe(
globals = globals(),
repo_name = 'cgltf',
repo_url = 'https://github.com/jkuhlmann/cgltf.git',
tag_pattern = re.compile(r'^v([0-9]+)\.([0-9]+)$'),
tag_fn = lambda version: f'v{version[0]}.{version[1]}',
cook_fn = _git_cook
)

39
recipes/curl/recipe.py Normal file
View File

@@ -0,0 +1,39 @@
import re
from SCons.Script import *
def _build_lib_name(env: Environment) -> str:
if os.name == 'posix':
return {
'debug': 'curl-d'
}.get(env['BUILD_TYPE'], 'curl')
elif os.name == 'nt':
raise Exception('TODO')
else:
raise Exception('curl is not supported yet on this OS')
def _git_cook(env: Environment, repo: dict) -> dict:
checkout_root = repo['checkout_root']
build_result = env.CMakeProject(checkout_root, generate_args=['-DBUILD_CURL_EXE=OFF','-DBUILD_SHARED_LIBS=OFF',
'-DBUILD_STATIC_LIBS=ON', '-DHTTP_ONLY=ON',
'-DCURL_USE_LIBSSH2=OFF'])
lib_name = _build_lib_name(env)
return {
'CPPPATH': build_result['CPPPATH'],
'LIBS': [env.FindLib(lib_name, paths=build_result['LIBPATH'])],
}
env.GitRecipe(
globals = globals(),
repo_name = 'curl',
repo_url = 'https://github.com/curl/curl.git',
tag_pattern = re.compile(r'^curl-([0-9]+)_([0-9]+)_([0-9]+)$'),
tag_fn = lambda version: f'curl-{version[0]}_{version[1]}_{version[2]}',
cook_fn = _git_cook,
dependencies = {
'openssl': {},
'zlib': {},
'psl': {}
}
)

27
recipes/fmt/recipe.py Normal file
View File

@@ -0,0 +1,27 @@
import re
from SCons.Script import *
def _git_cook(env: Environment, repo: dict) -> dict:
checkout_root = repo['checkout_root']
build_result = env.CMakeProject(checkout_root)
lib_name = {
'debug': 'fmtd'
}.get(env['BUILD_TYPE'], 'fmt')
return {
'CPPPATH': build_result['CPPPATH'],
'LIBS': [env.FindLib(lib_name, paths=build_result['LIBPATH'])]
}
env.GitRecipe(
globals = globals(),
repo_name = 'fmt',
repo_url = 'https://github.com/fmtlib/fmt.git',
tag_pattern = re.compile(r'^([0-9]+)\.([0-9]+)\.([0-9]+)$'),
tag_fn = lambda version: f'{version[0]}.{version[1]}.{version[2]}',
cook_fn = _git_cook
)

52
recipes/glm/recipe.py Normal file
View File

@@ -0,0 +1,52 @@
import re
from SCons.Script import *
_REPO_NAMES = {
'default': 'glm',
'mewin': 'glm_mewin'
}
_REPO_URLS = {
'default': 'https://github.com/g-truc/glm.git',
'mewin': 'https://git.mewin.de/mewin/glm.git'
}
_TAG_PATTERN = re.compile(r'^([0-9]+)\.([0-9]+)\.([0-9]+)$')
_TAG_PATTERN_ALT = re.compile(r'^0\.([0-9]+)\.([0-9]+)\.([0-9]+)$')
def _get_repo_name(env: Environment) -> str:
return _REPO_NAMES[env.get('GLM_REMOTE', 'default')]
def _get_repo_url(env: Environment) -> str:
return _REPO_URLS[env.get('GLM_REMOTE', 'default')]
def versions(env: Environment, update: bool = False):
if env.get('GLM_REMOTE') == 'mewin':
return [(0, 0, 0)]
tags = env.GitTags(repo_name = _get_repo_name(env), remote_url = _get_repo_url(env), force_fetch=update)
result = []
for tag in tags:
match = _TAG_PATTERN.match(tag)
if match:
result.append((int(match.groups()[0]), int(match.groups()[1]), int(match.groups()[2])))
else:
match = _TAG_PATTERN_ALT.match(tag)
if match:
result.append((0, int(match.groups()[0]), int(match.groups()[1]) * 10 + int(match.groups()[2])))
return result
def dependencies(env: Environment, version) -> 'dict':
return {}
def cook(env: Environment, version) -> dict:
if env.get('GLM_REMOTE') == 'mewin':
git_ref = 'master'
elif version[0] == 0:
git_ref = f'refs/tags/0.{version[1]}.{int(version[2]/10)}.{version[2]%10}'
else:
git_ref = f'refs/tags/{version[0]}.{version[1]}.{version[2]}'
repo = env.GitBranch(repo_name = _get_repo_name(env), remote_url = _get_repo_url(env), git_ref = git_ref)
checkout_root = repo['checkout_root']
return {
'CPPPATH': [checkout_root],
}

111
recipes/glslang/recipe.py Normal file
View File

@@ -0,0 +1,111 @@
import glob
import pathlib
import platform
import re
import shutil
from SCons.Script import *
_SCRIPT_STAMPFILE = '.spp_script_run'
def _git_cook(env: Environment, repo) -> dict:
checkout_root = repo['checkout_root']
# TODO: windows?
did_run_script = os.path.exists(os.path.join(repo['checkout_root'], _SCRIPT_STAMPFILE))
if not did_run_script or env['UPDATE_REPOSITORIES']:
python_exe = os.path.realpath(sys.executable)
script_file = os.path.join(repo['checkout_root'], 'update_glslang_sources.py')
prev_cwd = os.getcwd()
os.chdir(repo['checkout_root'])
if env.Execute(f'"{python_exe}" {script_file}'):
env.Exit(1)
os.chdir(prev_cwd)
pathlib.Path(repo['checkout_root'], _SCRIPT_STAMPFILE).touch()
# generate the build_info.h
generator_script = os.path.join(repo['checkout_root'], 'build_info.py')
generator_script_input = os.path.join(repo['checkout_root'], 'build_info.h.tmpl')
generator_script_output = os.path.join(repo['checkout_root'], 'glslang/build_info.h')
env.Command(
target = generator_script_output,
source = [generator_script, generator_script_input, os.path.join(repo['checkout_root'], 'CHANGES.md')],
action = f'"$PYTHON" "{generator_script}" "{repo["checkout_root"]}" -i "{generator_script_input}" -o "$TARGET"'
)
platform_source_dir = {
'Linux': 'Unix',
'Windows': 'Windows',
'Darwin': 'Unix'
}.get(platform.system(), 'Unix')
glslang_source_files = env.RGlob(os.path.join(repo['checkout_root'], 'glslang/GenericCodeGen/'), '*.cpp') \
+ env.RGlob(os.path.join(repo['checkout_root'], 'glslang/MachineIndependent/'), '*.cpp') \
+ env.RGlob(os.path.join(repo['checkout_root'], 'glslang/OGLCompilersDLL/'), '*.cpp') \
+ env.RGlob(os.path.join(repo['checkout_root'], 'glslang/ResourceLimits/'), '*.cpp') \
+ env.RGlob(os.path.join(repo['checkout_root'], 'SPIRV/'), '*.cpp') \
+ [os.path.join(repo['checkout_root'], f'glslang/OSDependent/{platform_source_dir}/ossource.cpp')]
# disable warnings
additional_cxx_flags = {
'clang': ['-w'],
'gcc': ['-w'],
'cl': ['/w']
}.get(env['COMPILER_FAMILY'], [])
env.StaticLibrary(
CCFLAGS = env['CCFLAGS'] + additional_cxx_flags,
CPPPATH = repo['checkout_root'],
target = env['LIB_DIR'] + '/glslang_full',
source = glslang_source_files
)
# build the include folder
include_dir = os.path.join(checkout_root, 'include')
if not os.path.exists(include_dir) or env['UPDATE_REPOSITORIES']:
def copy_headers(dst, src):
os.makedirs(dst, exist_ok=True)
for file in glob.glob(os.path.join(src, '*.h')):
shutil.copy(file, dst)
copy_headers(os.path.join(include_dir, 'glslang/HLSL'), os.path.join(checkout_root, 'glslang/HLSL'))
copy_headers(os.path.join(include_dir, 'glslang/Include'), os.path.join(checkout_root, 'glslang/Include'))
copy_headers(os.path.join(include_dir, 'glslang/MachineIndependent'), os.path.join(checkout_root, 'glslang/MachineIndependent'))
copy_headers(os.path.join(include_dir, 'glslang/Public'), os.path.join(checkout_root, 'glslang/Public'))
copy_headers(os.path.join(include_dir, 'glslang/SPIRV'), os.path.join(checkout_root, 'SPIRV'))
return {
'CPPPATH': [include_dir],
'LIBS': [os.path.join(env['LIB_DIR'], env.LibFilename('glslang_full'))]
}
_REPO_NAMES = {
'default': 'glslang',
'mewin': 'glslang_mewin'
}
_REPO_URLS = {
'default': 'https://github.com/KhronosGroup/glslang.git',
'mewin': 'https://git.mewin.de/mewin/glslang.git'
}
_TAG_PATTERNS = {
'default': re.compile(r'^([0-9]+)\.([0-9]+)\.([0-9]+)$'),
'mewin': None
}
def _ref_fn(env: Environment, version) -> str:
remote = env.get('GLSLANG_REMOTE', 'default')
if remote == 'default':
return f'refs/tags/{version[0]}.{version[1]}.{version[2]}'
elif remote == 'mewin':
return 'master'
else:
raise Exception('invalid glslang remote')
env.GitRecipe(
globals = globals(),
repo_name = lambda env: _REPO_NAMES[env.get('GLSLANG_REMOTE', 'default')],
repo_url = lambda env: _REPO_URLS[env.get('GLSLANG_REMOTE', 'default')],
tag_pattern = lambda env: _TAG_PATTERNS[env.get('GLSLANG_REMOTE', 'default')],
cook_fn = _git_cook,
ref_fn = _ref_fn
)

48
recipes/idn2/recipe.py Normal file
View File

@@ -0,0 +1,48 @@
import json
import os
import re
import requests
from SCons.Script import *
_VERSIONS_URL = 'https://gitlab.com/api/v4/projects/2882658/releases'
_VERSION_PATTERN = re.compile(r'^([0-9]+)\.([0-9]+)\.([0-9]+)$')
def versions(env: Environment, update: bool = False):
versions_file = os.path.join(env['DOWNLOAD_DIR'], 'libidn2_versions.json')
if update or not os.path.exists(versions_file):
req = requests.get(_VERSIONS_URL)
versions_data = json.loads(req.text)
result = []
for version_data in versions_data:
match = _VERSION_PATTERN.match(version_data['name'])
if not match:
continue
result.append((int(match.groups()[0]), int(match.groups()[1]), int(match.groups()[2])))
with open(versions_file, 'w') as f:
json.dump(result, f)
return result
else:
try:
with open(versions_file, 'r') as f:
return [tuple(v) for v in json.load(f)]
except:
print('libidn2_versions.json is empty or broken, redownloading.')
return versions(env, update=True)
def dependencies(env: Environment, version) -> 'dict':
return {
'unistring': {}
}
def cook(env: Environment, version) -> dict:
url = f'https://ftp.gnu.org/gnu/libidn/libidn2-{version[0]}.{version[1]}.{version[2]}.tar.gz'
repo = env.DownloadAndExtract(f'libidn2_{version[0]}.{version[1]}.{version[2]}', url = url, skip_folders = 1)
checkout_root = repo['extracted_root']
build_result = env.AutotoolsProject(checkout_root)
return {
'CPPPATH': build_result['CPPPATH'],
'LIBS': [env.FindLib('idn2', paths=build_result['LIBPATH'])]
}

38
recipes/imgui/recipe.py Normal file
View File

@@ -0,0 +1,38 @@
import re
from SCons.Script import *
def _git_cook(env: Environment, repo: dict) -> dict:
imgui_source_files = [
os.path.join(repo['checkout_root'], 'imgui.cpp'),
os.path.join(repo['checkout_root'], 'imgui_draw.cpp'),
os.path.join(repo['checkout_root'], 'imgui_tables.cpp'),
os.path.join(repo['checkout_root'], 'imgui_widgets.cpp')
]
imgui_add_sources = []
for backend in env.get('IMGUI_BACKENDS', []):
imgui_add_sources.append(f'backends/imgui_impl_{backend}.cpp')
env.StaticLibrary(
CPPPATH = [repo['checkout_root']],
CPPDEFINES = ['IMGUI_IMPL_VULKAN_NO_PROTOTYPES=1'],
target = env['LIB_DIR'] + '/imgui',
source = imgui_source_files,
add_source = imgui_add_sources
)
return {
'CPPPATH': [repo['checkout_root']],
'LIBS': [os.path.join(env['LIB_DIR'], env.LibFilename('imgui'))]
}
env.GitRecipe(
globals = globals(),
repo_name = 'imgui',
repo_url = 'https://github.com/ocornut/imgui.git',
tag_pattern = re.compile(r'^v([0-9]+)\.([0-9]+)\.([0-9]+)$'),
tag_fn = lambda version: f'v{version[0]}.{version[1]}.{version[2]}',
cook_fn = _git_cook
)

20
recipes/iwa/recipe.py Normal file
View File

@@ -0,0 +1,20 @@
import json
from SCons.Script import *
_REPO_NAME = 'iwa'
_REPO_URL = 'https://git.mewin.de/mewin/iwa.git'
def versions(env: Environment, update: bool = False):
return [(0, 0, 0)]
def dependencies(env: Environment, version) -> 'dict':
repo = env.GitBranch(repo_name = _REPO_NAME, remote_url = _REPO_URL, git_ref = 'master')
checkout_root = repo['checkout_root']
with open(os.path.join(checkout_root, 'dependencies.json'), 'r') as f:
return env.DepsFromJson(json.load(f))
def cook(env: Environment, version) -> dict:
repo = env.GitBranch(repo_name = _REPO_NAME, remote_url = _REPO_URL, git_ref = 'master')
checkout_root = repo['checkout_root']
return env.Module(os.path.join(checkout_root, 'SModule'))

22
recipes/json/recipe.py Normal file
View File

@@ -0,0 +1,22 @@
import re
from SCons.Script import *
def _git_cook(env: Environment, repo: dict) -> dict:
checkout_root = repo['checkout_root']
build_result = env.CMakeProject(project_root=checkout_root)
return {
'CPPPATH': build_result['CPPPATH']
}
env.GitRecipe(
globals = globals(),
repo_name = 'json',
repo_url = 'https://github.com/nlohmann/json.git',
tag_pattern = re.compile(r'^v([0-9]+)\.([0-9]+)\.([0-9]+)$'),
tag_fn = lambda version: f'v{version[0]}.{version[1]}.{version[2]}',
cook_fn = _git_cook
)

View File

@@ -0,0 +1,21 @@
from SCons.Script import *
def versions(env: Environment, update: bool = False):
return [(1, 0)]
def dependencies(env: Environment, version) -> 'dict':
return {}
def cook(env: Environment, version) -> dict:
if env['COMPILER_FAMILY'] not in ('gcc', 'clang'):
env.Error('libbacktrace requires gcc or clang.')
repo = env.GitBranch(repo_name = 'libbacktrace', remote_url = 'https://github.com/ianlancetaylor/libbacktrace.git', git_ref = 'master')
checkout_root = repo['checkout_root']
build_result = env.AutotoolsProject(checkout_root)
return {
'LIBPATH': build_result['LIBPATH'],
'CPPPATH': build_result['CPPPATH'],
'LIBS': ['backtrace']
}

View File

@@ -0,0 +1,21 @@
import re
from SCons.Script import *
def _git_cook(env: Environment, repo: dict) -> dict:
checkout_root = repo['checkout_root']
build_result = env.CMakeProject(checkout_root)
return {
'CPPPATH': build_result['CPPPATH'],
'LIBS': [env.FindLib('jpeg', paths=build_result['LIBPATH'])],
}
env.GitRecipe(
globals = globals(),
repo_name = 'libjpeg-turbo',
repo_url = 'https://github.com/libjpeg-turbo/libjpeg-turbo.git',
tag_pattern = re.compile(r'^([0-9]+)\.([0-9]+)\.([0-9]+)$'),
tag_fn = lambda version: f'{version[0]}.{version[1]}.{version[2]}',
cook_fn = _git_cook
)

39
recipes/libpng/recipe.py Normal file
View File

@@ -0,0 +1,39 @@
import os
import re
from SCons.Script import *
def _build_lib_name(env: Environment) -> str:
if os.name == 'posix':
return {
'debug': 'png16d'
}.get(env['BUILD_TYPE'], 'png16')
elif os.name == 'nt':
return {
'debug': 'libpng16_staticd'
}.get(env['BUILD_TYPE'], 'libpng16_static')
else:
raise Exception('libpng is not supported yet on this OS')
def _git_cook(env: Environment, repo: dict) -> dict:
lib_zlib = env.Cook('zlib')
checkout_root = repo['checkout_root']
build_result = env.CMakeProject(checkout_root, dependencies = [lib_zlib])
lib_name = _build_lib_name(env)
return {
'CPPPATH': build_result['CPPPATH'],
'LIBS': [env.FindLib(lib_name, paths=build_result['LIBPATH'])]
}
env.GitRecipe(
globals = globals(),
repo_name = 'libpng',
repo_url = 'https://git.code.sf.net/p/libpng/code.git',
tag_pattern = re.compile(r'^v([0-9]+)\.([0-9]+)\.([0-9]+)$'),
tag_fn = lambda version: f'v{version[0]}.{version[1]}.{version[2]}',
cook_fn = _git_cook,
dependencies = {
'zlib': {}
}
)

View File

@@ -0,0 +1,19 @@
import re
from SCons.Script import *
def _git_cook(env: Environment, repo: dict) -> dict:
checkout_root = repo['checkout_root']
return {
'CPPPATH': [os.path.join(checkout_root, 'include')]
}
env.GitRecipe(
globals = globals(),
repo_name = 'magic_enum',
repo_url = 'https://github.com/Neargye/magic_enum.git',
tag_pattern = re.compile(r'^v([0-9]+)\.([0-9]+)\.([0-9]+)$'),
tag_fn = lambda version: f'v{version[0]}.{version[1]}.{version[2]}',
cook_fn = _git_cook
)

21
recipes/mecab/recipe.py Normal file
View File

@@ -0,0 +1,21 @@
from SCons.Script import *
import os
def versions(env: Environment, update: bool = False):
return [(1, 0)]
def dependencies(env: Environment, version) -> 'dict':
return {}
def cook(env: Environment, version) -> dict:
repo = env.GitBranch(repo_name = 'mecab', remote_url = 'https://github.com/taku910/mecab.git', git_ref = 'master')
checkout_root = repo['checkout_root']
build_result = env.AutotoolsProject(os.path.join(checkout_root, 'mecab'))
return {
'LIBPATH': build_result['LIBPATH'],
'CPPPATH': build_result['CPPPATH'],
'LIBS': ['mecab']
}

View File

@@ -1,8 +1,20 @@
import os import json
from SCons.Script import * from SCons.Script import *
def cook(env: Environment, git_ref: str = "master") -> dict: _REPO_NAME = 'mijin'
repo = env.Cook('GitBranch', repo_name = 'mijin', remote_url = 'ssh://git@git.mewin.de:10022/mewin/mijin2.git', git_ref = git_ref) _REPO_URL = 'https://git.mewin.de/mewin/mijin2.git'
def versions(env: Environment, update: bool = False):
return [(0, 0, 0)]
def dependencies(env: Environment, version) -> 'dict':
repo = env.GitBranch(repo_name = _REPO_NAME, remote_url = _REPO_URL, git_ref = 'master')
checkout_root = repo['checkout_root'] checkout_root = repo['checkout_root']
return SConscript(os.path.join(checkout_root, 'LibConf'), exports = ['env']) with open(os.path.join(checkout_root, 'dependencies.json'), 'r') as f:
return env.DepsFromJson(json.load(f))
def cook(env: Environment, version) -> dict:
repo = env.GitBranch(repo_name = _REPO_NAME, remote_url = _REPO_URL, git_ref = 'master')
checkout_root = repo['checkout_root']
return env.Module(os.path.join(checkout_root, 'SModule'))

View File

@@ -0,0 +1,27 @@
from SCons.Script import *
def versions(env: Environment, update: bool = False):
return [(1, 0)]
def dependencies(env: Environment, version) -> 'dict':
return {}
def cook(env: Environment, version) -> dict:
repo = env.GitBranch(repo_name = 'mikktspace', remote_url = 'https://github.com/mmikk/MikkTSpace.git', git_ref = 'master')
checkout_root = repo['checkout_root']
ccflags = env['CCFLAGS'].copy()
if env['COMPILER_FAMILY'] == 'cl':
ccflags.append('/wd4456')
lib_mikktspace = env.StaticLibrary(
CCFLAGS = ccflags,
CPPPATH = [checkout_root],
target = env['LIB_DIR'] + '/mikktspace',
source = [os.path.join(repo['checkout_root'], 'mikktspace.c')]
)
return {
'CPPPATH': [checkout_root],
'LIBS': [lib_mikktspace]
}

21
recipes/openssl/recipe.py Normal file
View File

@@ -0,0 +1,21 @@
import re
from SCons.Script import *
def _git_cook(env: Environment, repo: dict) -> dict:
checkout_root = repo['checkout_root']
build_result = env.AutotoolsProject(checkout_root, config_args = ['no-shared', 'no-tests', 'no-docs'], configure_script_path='Configure')
return {
'CPPPATH': build_result['CPPPATH'],
'LIBS': [env.FindLib(libname, paths=build_result['LIBPATH']) for libname in ('ssl', 'crypto')]
}
env.GitRecipe(
globals = globals(),
repo_name = 'openssl',
repo_url = 'https://github.com/openssl/openssl.git',
tag_pattern = re.compile(r'^openssl-([0-9]+)\.([0-9]+)\.([0-9]+)$'),
tag_fn = lambda version: f'openssl-{version[0]}.{version[1]}.{version[2]}',
cook_fn = _git_cook
)

70
recipes/psl/recipe.py Normal file
View File

@@ -0,0 +1,70 @@
import json
import os
import re
import requests
from SCons.Script import *
_VERSIONS_URL = 'https://api.github.com/repos/rockdaboot/libpsl/releases'
_VERSION_PATTERN = re.compile(r'^Release v([0-9]+)\.([0-9]+)\.([0-9]+)$')
def versions(env: Environment, update: bool = False):
versions_file = os.path.join(env['DOWNLOAD_DIR'], 'libpsl_versions.json')
if update or not os.path.exists(versions_file):
req = requests.get(_VERSIONS_URL)
versions_data = json.loads(req.text)
result = []
for version_data in versions_data:
match = _VERSION_PATTERN.match(version_data['name'])
if not match:
continue
result.append((int(match.groups()[0]), int(match.groups()[1]), int(match.groups()[2])))
with open(versions_file, 'w') as f:
json.dump(result, f)
return result
else:
try:
with open(versions_file, 'r') as f:
return [tuple(v) for v in json.load(f)]
except:
print('libpsl_versions.json is empty or broken, redownloading.')
return versions(env, update=True)
def dependencies(env: Environment, version) -> 'dict':
return {
'idn2': {},
'unistring': {}
}
def cook(env: Environment, version) -> dict:
url = f'https://github.com/rockdaboot/libpsl/releases/download/{version[0]}.{version[1]}.{version[2]}/libpsl-{version[0]}.{version[1]}.{version[2]}.tar.gz'
repo = env.DownloadAndExtract(f'libpsl_{version[0]}.{version[1]}.{version[2]}', url = url, skip_folders = 1)
checkout_root = repo['extracted_root']
build_result = env.AutotoolsProject(checkout_root)
return {
'CPPPATH': build_result['CPPPATH'],
'LIBS': [env.FindLib('psl', paths=build_result['LIBPATH'])]
}
#def _git_cook(env: Environment, repo: dict) -> dict:
# checkout_root = repo['checkout_root']
# subprocess.run((os.path.join(checkout_root, 'autogen.sh'),), cwd=checkout_root)
# build_result = env.AutotoolsProject(checkout_root)
# return {
# 'CPPPATH': build_result['CPPPATH'],
# 'LIBS': [env.FindLib('psl', paths=build_result['LIBPATH'])]
# }
#
#env.GitRecipe(
# globals = globals(),
# repo_name = 'psl',
# repo_url = 'https://github.com/rockdaboot/libpsl.git',
# tag_pattern = re.compile(r'^libpsl-([0-9]+)\.([0-9]+)\.([0-9]+)$'),
# tag_fn = lambda version: f'libpsl-{version[0]}.{version[1]}.{version[2]}',
# cook_fn = _git_cook,
# dependencies = {
# 'idn2': {},
# 'unistring': {}
# }
#)

34
recipes/spdlog/recipe.py Normal file
View File

@@ -0,0 +1,34 @@
import re
from SCons.Script import *
def _git_cook(env: Environment, repo: dict) -> dict:
lib_fmt = env.Cook('fmt')
checkout_root = repo['checkout_root']
build_result = env.CMakeProject(project_root=checkout_root, dependencies=[lib_fmt])
lib_name = {
'debug': 'spdlogd'
}.get(env['BUILD_TYPE'], 'spdlog')
cppdefines = ['SPDLOG_COMPILE_LIB=1', 'SPDLOG_FMT_EXTERNAL=1']
return {
'CPPPATH': build_result['CPPPATH'],
'CPPDEFINES': cppdefines,
'LIBS': [env.FindLib(lib_name, paths=build_result['LIBPATH'])]
}
env.GitRecipe(
globals = globals(),
repo_name = 'spdlog',
repo_url = 'https://github.com/gabime/spdlog.git',
tag_pattern = re.compile(r'^v([0-9]+)\.([0-9]+)\.([0-9]+)$'),
tag_fn = lambda version: f'v{version[0]}.{version[1]}.{version[2]}',
cook_fn = _git_cook,
dependencies = {
'fmt': {}
}
)

19
recipes/stb/recipe.py Normal file
View File

@@ -0,0 +1,19 @@
from SCons.Script import *
_REPO_NAME = 'stb'
_REPO_URL = 'https://github.com/nothings/stb.git'
def versions(env: Environment, update: bool = False):
return [(0, 0, 0)]
def dependencies(env: Environment, version) -> 'dict':
return {}
def cook(env: Environment, version) -> dict:
repo = env.GitBranch(repo_name = _REPO_NAME, remote_url = _REPO_URL, git_ref = 'master')
checkout_root = repo['checkout_root']
return {
'CPPPATH': [checkout_root]
}

View File

@@ -0,0 +1,42 @@
import json
import os
import re
import requests
from SCons.Script import *
_VERSIONS_URL = 'https://ftp.gnu.org/gnu/libunistring/?F=0'
_VERSION_PATTERN = re.compile(r'href="libunistring-([0-9]+)\.([0-9]+)\.([0-9]+)\.tar\.gz"')
def versions(env: Environment, update: bool = False):
versions_file = os.path.join(env['DOWNLOAD_DIR'], 'libunistring_versions.json')
if update or not os.path.exists(versions_file):
req = requests.get(_VERSIONS_URL)
result = []
for match in _VERSION_PATTERN.finditer(req.text):
result.append((int(match.groups()[0]), int(match.groups()[1]), int(match.groups()[2])))
with open(versions_file, 'w') as f:
json.dump(result, f)
return result
else:
try:
with open(versions_file, 'r') as f:
return [tuple(v) for v in json.load(f)]
except:
print('libunistring_versions.json is empty or broken, redownloading.')
return versions(env, update=True)
def dependencies(env: Environment, version) -> 'dict':
return {}
def cook(env: Environment, version) -> dict:
url = f'https://ftp.gnu.org/gnu/libunistring/libunistring-{version[0]}.{version[1]}.{version[2]}.tar.gz'
repo = env.DownloadAndExtract(f'libunistring_{version[0]}.{version[1]}.{version[2]}', url = url, skip_folders = 1)
checkout_root = repo['extracted_root']
build_result = env.AutotoolsProject(checkout_root)
return {
'CPPPATH': build_result['CPPPATH'],
'LIBS': [env.FindLib('unistring', paths=build_result['LIBPATH'])]
}

View File

@@ -0,0 +1,23 @@
import os
from SCons.Script import *
def available(env: Environment):
if os.name != 'nt':
return 'Winsock2 is only available on Windows.'
def versions(env: Environment, update: bool = False):
if os.name == 'nt':
return [(0, 0, 0)]
else:
return []
def dependencies(env: Environment, version) -> 'dict':
return {}
def cook(env: Environment, version) -> dict:
return {
'LIBS': ['Ws2_32']
}

View File

@@ -0,0 +1,26 @@
import re
from SCons.Script import *
def _git_cook(env: Environment, repo: dict) -> dict:
checkout_root = repo['checkout_root']
build_result = env.CMakeProject(project_root=checkout_root)
lib_name = {
'debug': 'yaml-cppd'
}.get(env['BUILD_TYPE'], 'yaml-cpp')
return {
'CPPPATH': build_result['CPPPATH'],
'LIBS': [env.FindLib(lib_name, paths=build_result['LIBPATH'])]
}
env.GitRecipe(
globals = globals(),
repo_name = 'yaml-cpp',
repo_url = 'https://github.com/jbeder/yaml-cpp.git',
tag_pattern = re.compile(r'^yaml-cpp-([0-9]+)\.([0-9]+)\.([0-9]+)$'),
tag_fn = lambda version: f'yaml-cpp-{version[0]}.{version[1]}.{version[2]}',
cook_fn = _git_cook
)

49
recipes/zlib/recipe.py Normal file
View File

@@ -0,0 +1,49 @@
import os
import re
from SCons.Script import *
_REPO_NAME = 'zlib'
_REPO_URL = 'https://github.com/madler/zlib.git'
_TAG_PATTERN = re.compile(r'^v([0-9]+)\.([0-9]+)(?:\.([0-9]+))?$')
def _build_lib_name(env: Environment) -> str:
if os.name == 'posix':
return 'z'
elif os.name == 'nt':
return {
'debug': 'zlibstaticd'
}.get(env['BUILD_TYPE'], 'zlibstatic')
else:
raise Exception('libpng is not supported yet on this OS')
def versions(env: Environment, update: bool = False):
tags = env.GitTags(repo_name = _REPO_NAME, remote_url = _REPO_URL, force_fetch=update)
result = []
for tag in tags:
match = _TAG_PATTERN.match(tag)
if match:
result.append((int(match.groups()[0]), int(match.groups()[1]), int(match.groups()[2] or 0)))
return result
def dependencies(env: Environment, version) -> 'dict':
return {}
def cook(env: Environment, version) -> dict:
git_ref = f'refs/tags/v{version[0]}.{version[1]}'
if version[2] != 0:
git_ref = git_ref + f'.{version[2]}'
repo = env.GitBranch(repo_name = _REPO_NAME, remote_url = _REPO_URL, git_ref = git_ref)
checkout_root = repo['checkout_root']
build_result = env.CMakeProject(project_root=checkout_root)
include_dir = os.path.join(build_result['install_dir'], 'include')
lib_name = _build_lib_name(env)
lib_file = env.FindLib(lib_name, paths=build_result['LIBPATH'])
return {
'CPPPATH': [include_dir],
'LIBS': [lib_file],
'CMAKE_VARS': {
'ZLIB_LIBRARY': lib_file,
'ZLIB_INCLUDE_DIR': include_dir
}
}

View File

@@ -1 +1,2 @@
GitPython GitPython
psutil