Compare commits
16 Commits
Author | SHA1 | Date | |
---|---|---|---|
1e4bb17251 | |||
c461b5da39 | |||
b7cb5f7c48 | |||
9c64f982fd | |||
378c6ba341 | |||
96fc1984cd | |||
396350b295 | |||
5de1ac4444 | |||
d5712120df | |||
267d06a997 | |||
089ea25c10 | |||
e1404fee58 | |||
c4200393fb | |||
0c82036300 | |||
35b38b8b6e | |||
8bea4a6db5 |
367
SConscript
367
SConscript
@ -1,11 +1,42 @@
|
|||||||
|
|
||||||
import copy
|
import copy
|
||||||
|
import glob
|
||||||
|
import json
|
||||||
import os
|
import os
|
||||||
import psutil
|
import psutil
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
|
|
||||||
def _cook(env: Environment, recipe_name: str, *args, **kwargs):
|
|
||||||
|
class _VersionSpec:
|
||||||
|
minimum_version = None
|
||||||
|
maximum_version = None
|
||||||
|
|
||||||
|
def __init__(self, minimum_version = None, maximum_version = None):
|
||||||
|
self.minimum_version = minimum_version
|
||||||
|
self.maximum_version = maximum_version
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f'Min: {self.minimum_version}, Max: {self.maximum_version}'
|
||||||
|
|
||||||
|
class _Dependency:
|
||||||
|
name: str = ''
|
||||||
|
version = None
|
||||||
|
version_spec: _VersionSpec
|
||||||
|
recipe = None
|
||||||
|
depdeps: list = []
|
||||||
|
cook_result: dict = {}
|
||||||
|
|
||||||
|
class _Target:
|
||||||
|
builder = None
|
||||||
|
args: list = []
|
||||||
|
kwargs: dict = {}
|
||||||
|
dependencies: list = []
|
||||||
|
target = None
|
||||||
|
|
||||||
|
def _find_recipe(env: Environment, recipe_name: str):
|
||||||
|
if recipe_name in env['SPP_RECIPES']:
|
||||||
|
return env['SPP_RECIPES'][recipe_name]
|
||||||
import importlib.util
|
import importlib.util
|
||||||
source_file = None
|
source_file = None
|
||||||
for folder in env['RECIPES_FOLDERS']:
|
for folder in env['RECIPES_FOLDERS']:
|
||||||
@ -17,8 +48,21 @@ def _cook(env: Environment, recipe_name: str, *args, **kwargs):
|
|||||||
raise Exception(f'Could not find recipe {recipe_name}.')
|
raise Exception(f'Could not find recipe {recipe_name}.')
|
||||||
spec = importlib.util.spec_from_file_location(recipe_name, source_file)
|
spec = importlib.util.spec_from_file_location(recipe_name, source_file)
|
||||||
recipe = importlib.util.module_from_spec(spec)
|
recipe = importlib.util.module_from_spec(spec)
|
||||||
|
recipe.env = env
|
||||||
spec.loader.exec_module(recipe)
|
spec.loader.exec_module(recipe)
|
||||||
return recipe.cook(env, *args, **kwargs)
|
env['SPP_RECIPES'][recipe_name] = recipe
|
||||||
|
return recipe
|
||||||
|
|
||||||
|
def _cook(env: Environment, recipe_name: str):
|
||||||
|
dependency = env['SPP_DEPENDENCIES'].get(recipe_name)
|
||||||
|
if not dependency:
|
||||||
|
raise Exception(f'Cannot cook {recipe_name} as it was not listed as a dependency.')
|
||||||
|
if not dependency.cook_result:
|
||||||
|
dependency.cook_result = dependency.recipe.cook(env, dependency.version)
|
||||||
|
return dependency.cook_result
|
||||||
|
|
||||||
|
def _module(env: Environment, file: str):
|
||||||
|
return SConscript(file, exports = 'env', variant_dir = env['VARIANT_DIR'], src_dir = '.')
|
||||||
|
|
||||||
def _parse_lib_conf(env: Environment, lib_conf: dict) -> None:
|
def _parse_lib_conf(env: Environment, lib_conf: dict) -> None:
|
||||||
env.Append(CPPPATH = lib_conf.get('CPPPATH', []),
|
env.Append(CPPPATH = lib_conf.get('CPPPATH', []),
|
||||||
@ -45,6 +89,15 @@ def _inject_dependency(dependency, kwargs: dict, add_sources: bool = True) -> No
|
|||||||
if 'DEPENDENCIES' in dependency:
|
if 'DEPENDENCIES' in dependency:
|
||||||
for inner_dependency in dependency['DEPENDENCIES']:
|
for inner_dependency in dependency['DEPENDENCIES']:
|
||||||
_inject_dependency(inner_dependency, kwargs, False)
|
_inject_dependency(inner_dependency, kwargs, False)
|
||||||
|
elif isinstance(dependency, _Dependency):
|
||||||
|
if not dependency.cook_result:
|
||||||
|
dependency.cook_result = dependency.recipe.cook(env, dependency.version)
|
||||||
|
_inject_list(kwargs, dependency.cook_result, 'CPPPATH')
|
||||||
|
_inject_list(kwargs, dependency.cook_result, 'CPPDEFINES')
|
||||||
|
_inject_list(kwargs, dependency.cook_result, 'LIBPATH')
|
||||||
|
_inject_list(kwargs, dependency.cook_result, 'LIBS')
|
||||||
|
for depdep in dependency.depdeps:
|
||||||
|
_inject_dependency(depdep, kwargs)
|
||||||
|
|
||||||
def _rglob(env: Environment, root_path: str, pattern: str, **kwargs):
|
def _rglob(env: Environment, root_path: str, pattern: str, **kwargs):
|
||||||
result_nodes = []
|
result_nodes = []
|
||||||
@ -56,6 +109,37 @@ def _rglob(env: Environment, root_path: str, pattern: str, **kwargs):
|
|||||||
result_nodes.extend(env.Glob(f'{path}/{pattern}', **kwargs))
|
result_nodes.extend(env.Glob(f'{path}/{pattern}', **kwargs))
|
||||||
return sorted(result_nodes)
|
return sorted(result_nodes)
|
||||||
|
|
||||||
|
def _safe_eval(condition: str, locals={}):
|
||||||
|
return eval(condition, {
|
||||||
|
'__builtins__': {
|
||||||
|
'abs': abs, 'all': all, 'any': any, 'ascii': ascii, 'bin': bin, 'bool': bool, 'chr': chr, 'complex': complex,
|
||||||
|
'dict': dict, 'divmod': divmod, 'enumerate': enumerate, 'filter': filter, 'float': float, 'format': format,
|
||||||
|
'hasattr': hasattr, 'hash': hash, 'hex': hex, 'id': id, 'int': int, 'isinstance': isinstance,
|
||||||
|
'issubclass': issubclass, 'len': len, 'list': list, 'map': map, 'max': max, 'min': min, 'next': next,
|
||||||
|
'oct': oct, 'ord': ord, 'pow': pow, 'range': range, 'reversed': reversed, 'round': round, 'set': set,
|
||||||
|
'slice': slice, 'sorted': sorted, 'str': str, 'sum': sum, 'tuple': tuple, 'type': type, 'zip': zip
|
||||||
|
}
|
||||||
|
}, locals)
|
||||||
|
|
||||||
|
def _deps_from_json(env: Environment, deps: dict) -> dict:
|
||||||
|
to_remove = []
|
||||||
|
for key, dep in deps.items():
|
||||||
|
if 'condition' in dep:
|
||||||
|
if not _safe_eval(dep['condition'], {
|
||||||
|
'compiler_family': env['COMPILER_FAMILY'],
|
||||||
|
'target_os': os.name,
|
||||||
|
'getenv': lambda name: env.get(name)
|
||||||
|
}):
|
||||||
|
to_remove.append(key)
|
||||||
|
continue
|
||||||
|
if 'min' in dep and isinstance(dep['min'], list):
|
||||||
|
dep['min'] = tuple(dep['min'])
|
||||||
|
if 'max' in dep and isinstance(dep['max'], list):
|
||||||
|
dep['max'] = tuple(dep['max'])
|
||||||
|
for key in to_remove:
|
||||||
|
del deps[key]
|
||||||
|
return deps
|
||||||
|
|
||||||
def _make_interface(env: Environment, dependencies: list = []):
|
def _make_interface(env: Environment, dependencies: list = []):
|
||||||
kwargs = {}
|
kwargs = {}
|
||||||
for dependency in dependencies:
|
for dependency in dependencies:
|
||||||
@ -65,27 +149,139 @@ def _make_interface(env: Environment, dependencies: list = []):
|
|||||||
'CPPDEFINES': kwargs.get('CPPDEFINES', [])
|
'CPPDEFINES': kwargs.get('CPPDEFINES', [])
|
||||||
}
|
}
|
||||||
|
|
||||||
def _lib_filename(name: str, type: str = 'static') -> str:
|
def _lib_filename(env: Environment, name: str, type: str = 'static') -> str:
|
||||||
# TODO: windows
|
if os.name == 'posix':
|
||||||
ext = {
|
ext = {
|
||||||
'static': 'a',
|
'static': 'a',
|
||||||
'shared': 'so'
|
'shared': 'so'
|
||||||
}[type]
|
}[type]
|
||||||
return f'lib{name}.{ext}'
|
return f'lib{name}.{ext}'
|
||||||
|
elif os.name == 'nt':
|
||||||
|
ext = {
|
||||||
|
'static': 'lib',
|
||||||
|
'shared': 'dll'
|
||||||
|
}[type]
|
||||||
|
return f'{name}.{ext}'
|
||||||
|
else:
|
||||||
|
raise Exception('What OS is this?')
|
||||||
|
|
||||||
def _find_lib(env: Environment, name: str, paths: 'list[str]', type : str = 'static'):
|
def _find_lib(env: Environment, name: str, paths: 'list[str]', type : str = 'static', allow_fail: bool = False, use_glob: bool = False):
|
||||||
|
fname = _lib_filename(env, name, type)
|
||||||
for path in paths:
|
for path in paths:
|
||||||
lib_path = os.path.join(path, _lib_filename(name, type))
|
lib_path = os.path.join(path, fname)
|
||||||
if os.path.exists(lib_path):
|
if use_glob:
|
||||||
|
files = glob.glob(lib_path)
|
||||||
|
if len(files) == 1:
|
||||||
|
return files[0]
|
||||||
|
elif len(files) > 1:
|
||||||
|
raise Exception(f'Multiple candidates found for library with name {name} in paths: "{", ".join(paths)}" with name: "{", ".join(files)}".')
|
||||||
|
elif os.path.exists(lib_path):
|
||||||
return lib_path
|
return lib_path
|
||||||
|
if allow_fail:
|
||||||
return None
|
return None
|
||||||
|
raise Exception(f'Could not find library with name {name} in paths: "{", ".join(paths)}" filename: "{fname}".')
|
||||||
|
|
||||||
def _error(env: Environment, message: str):
|
def _error(env: Environment, message: str):
|
||||||
print(message, file=sys.stderr)
|
print(message, file=sys.stderr)
|
||||||
env.Exit(1)
|
env.Exit(1)
|
||||||
|
|
||||||
|
def _find_common_depenency_version(name: str, versionA: _VersionSpec, versionB: _VersionSpec) -> _VersionSpec:
|
||||||
|
result_version = _VersionSpec()
|
||||||
|
if versionA.minimum_version is not None:
|
||||||
|
if versionB.minimum_version is not None:
|
||||||
|
result_version.minimum_version = max(versionA.minimum_version, versionB.minimum_version)
|
||||||
|
else:
|
||||||
|
result_version.minimum_version = versionA.minimum_version
|
||||||
|
else:
|
||||||
|
result_version.minimum_version = versionB.minimum_version
|
||||||
|
|
||||||
|
if versionA.maximum_version is not None:
|
||||||
|
if versionB.maximum_version is not None:
|
||||||
|
result_version.maximum_version = min(versionA.maximum_version, versionB.maximum_version)
|
||||||
|
else:
|
||||||
|
result_version.maximum_version = versionA.maximum_version
|
||||||
|
else:
|
||||||
|
result_version.maximum_version = versionB.maximum_version
|
||||||
|
|
||||||
|
if result_version.minimum_version is not None and result_version.maximum_version is not None \
|
||||||
|
and (result_version.minimum_version > result_version.maximum_version):
|
||||||
|
return None
|
||||||
|
return result_version
|
||||||
|
|
||||||
|
def _parse_version_spec(version_spec: dict) -> _VersionSpec:
|
||||||
|
return _VersionSpec(version_spec.get('min'), version_spec.get('max'))
|
||||||
|
|
||||||
|
def _can_add_dependency(env: Environment, name: str, version_spec: _VersionSpec) -> bool:
|
||||||
|
if name not in env['SPP_DEPENDENCIES']:
|
||||||
|
return True
|
||||||
|
dependency = env['SPP_DEPENDENCIES'][name]
|
||||||
|
common_version_spec = _find_common_depenency_version(name, dependency.version_spec, version_spec)
|
||||||
|
return common_version_spec is not None
|
||||||
|
|
||||||
|
def _add_dependency(env: Environment, name: str, version_spec: _VersionSpec) -> _Dependency:
|
||||||
|
if name in env['SPP_DEPENDENCIES']:
|
||||||
|
dependency = env['SPP_DEPENDENCIES'][name]
|
||||||
|
common_version_spec = _find_common_depenency_version(name, dependency.version_spec, version_spec)
|
||||||
|
if common_version_spec is None:
|
||||||
|
raise Exception(f'Incompatible versions detected for {name}: {dependency.version_spec} and {version_spec}')
|
||||||
|
if dependency.version_spec != common_version_spec:
|
||||||
|
env['_SPP_DEPENDENCIES_OKAY'] = False
|
||||||
|
dependency.version_spec = common_version_spec
|
||||||
|
return dependency
|
||||||
|
dependency = _Dependency()
|
||||||
|
dependency.name = name
|
||||||
|
dependency.version_spec = version_spec
|
||||||
|
dependency.recipe = _find_recipe(env, name)
|
||||||
|
env['SPP_DEPENDENCIES'][name] = dependency
|
||||||
|
env['_SPP_DEPENDENCIES_OKAY'] = False
|
||||||
|
return dependency
|
||||||
|
|
||||||
|
def _sort_versions(versions: list) -> None:
|
||||||
|
import functools
|
||||||
|
def _compare(left, right):
|
||||||
|
if left < right:
|
||||||
|
return 1
|
||||||
|
elif left == right:
|
||||||
|
return 0
|
||||||
|
else:
|
||||||
|
return -1
|
||||||
|
versions.sort(key=functools.cmp_to_key(_compare))
|
||||||
|
|
||||||
|
def _version_matches(version, version_spec: _VersionSpec) -> bool:
|
||||||
|
if version_spec.minimum_version is not None and version < version_spec.minimum_version:
|
||||||
|
return False
|
||||||
|
if version_spec.maximum_version is not None and version > version_spec.maximum_version:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _find_version(env: Environment, dependency: _Dependency):
|
||||||
|
for update in (False, True):
|
||||||
|
versions = dependency.recipe.versions(env, update=update)
|
||||||
|
_sort_versions(versions)
|
||||||
|
for version in versions:
|
||||||
|
if _version_matches(version, dependency.version_spec):
|
||||||
|
canadd = True
|
||||||
|
for depname, depspec in dependency.recipe.dependencies(env, version).items():
|
||||||
|
if not _can_add_dependency(env, depname, _parse_version_spec(depspec)):
|
||||||
|
canadd = False
|
||||||
|
break
|
||||||
|
if canadd:
|
||||||
|
depdeps = []
|
||||||
|
for depname, depspec in dependency.recipe.dependencies(env, version).items():
|
||||||
|
depdeps.append(_add_dependency(env, depname, _parse_version_spec(depspec)))
|
||||||
|
dependency.version = version
|
||||||
|
dependency.depdeps = depdeps
|
||||||
|
return
|
||||||
|
print(f'Available versions: \n{versions}')
|
||||||
|
print(f'Required version: {dependency.version_spec}')
|
||||||
|
raise Exception(f'Could not find a suitable version for dependency {dependency.name}.')
|
||||||
|
|
||||||
def _wrap_builder(builder, is_lib: bool = False):
|
def _wrap_builder(builder, is_lib: bool = False):
|
||||||
def _wrapped(env, dependencies = [], *args, **kwargs):
|
def _wrapped(env, dependencies = {}, *args, **kwargs):
|
||||||
|
target_dependencies = []
|
||||||
|
for name, version_spec in dependencies.items():
|
||||||
|
target_dependencies.append(_add_dependency(env, name, _parse_version_spec(version_spec)))
|
||||||
|
|
||||||
if 'CPPPATH' not in kwargs:
|
if 'CPPPATH' not in kwargs:
|
||||||
kwargs['CPPPATH'] = copy.copy(env['CPPPATH'])
|
kwargs['CPPPATH'] = copy.copy(env['CPPPATH'])
|
||||||
if 'CPPDEFINES' not in kwargs:
|
if 'CPPDEFINES' not in kwargs:
|
||||||
@ -94,32 +290,34 @@ def _wrap_builder(builder, is_lib: bool = False):
|
|||||||
kwargs['LIBPATH'] = copy.copy(env['LIBPATH'])
|
kwargs['LIBPATH'] = copy.copy(env['LIBPATH'])
|
||||||
if 'LIBS' not in kwargs and 'LIBS' in env:
|
if 'LIBS' not in kwargs and 'LIBS' in env:
|
||||||
kwargs['LIBS'] = copy.copy(env['LIBS'])
|
kwargs['LIBS'] = copy.copy(env['LIBS'])
|
||||||
for dependency in dependencies:
|
if 'source' in kwargs:
|
||||||
_inject_dependency(dependency, kwargs)
|
source = kwargs['source']
|
||||||
if 'LIBS' in kwargs:
|
if not isinstance(source, list):
|
||||||
libs_copy = list(kwargs['LIBS'])
|
source = [source]
|
||||||
for lib in libs_copy:
|
new_source = []
|
||||||
if isinstance(lib, str) and os.path.isabs(lib):
|
for src in source:
|
||||||
kwargs['LIBS'].remove(lib)
|
if isinstance(src, str):
|
||||||
kwargs['source'].append(lib)
|
new_source.append(env.Entry(src))
|
||||||
|
else:
|
||||||
|
new_source.append(src)
|
||||||
|
kwargs['source'] = new_source
|
||||||
|
|
||||||
result = builder(*args, **kwargs)
|
target = _Target()
|
||||||
if is_lib:
|
target.builder = builder
|
||||||
# generate a new libconf
|
target.args = args
|
||||||
return {
|
target.kwargs = kwargs
|
||||||
'CPPPATH': kwargs.get('CPPPATH', []),
|
target.dependencies = target_dependencies
|
||||||
'CPPDEFINES': kwargs.get('CPPDEFINES', []),
|
env.Append(SPP_TARGETS = [target])
|
||||||
'LIBPATH': kwargs.get('LIBPATH', []),
|
if not target.dependencies:
|
||||||
'LIBS': result + kwargs.get('LIBS', []),
|
_build_target(target)
|
||||||
'ADDITIONAL_SOURCES': kwargs.get('add_source', []),
|
return target
|
||||||
'_target': result
|
|
||||||
}
|
|
||||||
return result
|
|
||||||
return _wrapped
|
return _wrapped
|
||||||
|
|
||||||
def _wrap_default(default):
|
def _wrap_default(default):
|
||||||
def _wrapped(env, arg):
|
def _wrapped(env, arg):
|
||||||
if isinstance(arg, dict) and '_target' in arg:
|
if isinstance(arg, _Target):
|
||||||
|
env.Append(SPP_DEFAULT_TARGETS = [arg])
|
||||||
|
elif isinstance(arg, dict) and '_target' in arg:
|
||||||
default(arg['_target'])
|
default(arg['_target'])
|
||||||
else:
|
else:
|
||||||
default(arg)
|
default(arg)
|
||||||
@ -127,13 +325,60 @@ def _wrap_default(default):
|
|||||||
|
|
||||||
def _wrap_depends(depends):
|
def _wrap_depends(depends):
|
||||||
def _wrapped(env, dependant, dependency):
|
def _wrapped(env, dependant, dependency):
|
||||||
if isinstance(dependant, dict) and '_target' in dependant:
|
if isinstance(dependant, _Target) or isinstance(dependency, _Target):
|
||||||
|
env.Append(SPP_TARGET_DEPENDENCIES = [(dependant, dependency)])
|
||||||
|
elif isinstance(dependant, dict) and '_target' in dependant:
|
||||||
dependant = dependant['_target']
|
dependant = dependant['_target']
|
||||||
if isinstance(dependency, dict) and '_target' in dependency:
|
elif isinstance(dependency, dict) and '_target' in dependency:
|
||||||
dependency = dependency['_target']
|
dependency = dependency['_target']
|
||||||
depends(dependant, dependency)
|
depends(dependant, dependency)
|
||||||
return _wrapped
|
return _wrapped
|
||||||
|
|
||||||
|
def _build_target(target: _Target):
|
||||||
|
for dependency in target.dependencies:
|
||||||
|
_inject_dependency(dependency, target.kwargs)
|
||||||
|
if 'LIBS' in target.kwargs:
|
||||||
|
libs_copy = list(target.kwargs['LIBS'])
|
||||||
|
for lib in libs_copy:
|
||||||
|
if isinstance(lib, str) and os.path.isabs(lib):
|
||||||
|
target.kwargs['LIBS'].remove(lib)
|
||||||
|
target.kwargs['LIBS'].append(env.File(lib))
|
||||||
|
pass
|
||||||
|
elif isinstance(lib, _Target):
|
||||||
|
if not lib.target:
|
||||||
|
_build_target(lib)
|
||||||
|
target.kwargs['LIBS'].remove(lib)
|
||||||
|
target.kwargs['LIBS'].append(lib.target)
|
||||||
|
target.target = target.builder(*target.args, **target.kwargs)
|
||||||
|
|
||||||
|
def _version_to_string(version) -> str:
|
||||||
|
return '.'.join([str(v) for v in version])
|
||||||
|
|
||||||
|
def _finalize(env: Environment):
|
||||||
|
version_requirements = {dep.name: {
|
||||||
|
'min': dep.version_spec.minimum_version and _version_to_string(dep.version_spec.minimum_version),
|
||||||
|
'max': dep.version_spec.maximum_version and _version_to_string(dep.version_spec.maximum_version),
|
||||||
|
} for dep in env['SPP_DEPENDENCIES'].values()}
|
||||||
|
env['_SPP_DEPENDENCIES_OKAY'] = False
|
||||||
|
while not env['_SPP_DEPENDENCIES_OKAY']:
|
||||||
|
env['_SPP_DEPENDENCIES_OKAY'] = True
|
||||||
|
for dependency in list(env['SPP_DEPENDENCIES'].values()):
|
||||||
|
if not dependency.version:
|
||||||
|
_find_version(env, dependency)
|
||||||
|
with open('cache/versions.json', 'w') as f:
|
||||||
|
json.dump({
|
||||||
|
'requirements': version_requirements,
|
||||||
|
'selected': {
|
||||||
|
dep.name: _version_to_string(dep.version) for dep in env['SPP_DEPENDENCIES'].values()
|
||||||
|
}
|
||||||
|
}, f)
|
||||||
|
|
||||||
|
|
||||||
|
for target in env['SPP_TARGETS']:
|
||||||
|
_build_target(target)
|
||||||
|
for target in env['SPP_DEFAULT_TARGETS']:
|
||||||
|
env.Default(target.target)
|
||||||
|
|
||||||
def _get_fallback_cache_dir() -> str:
|
def _get_fallback_cache_dir() -> str:
|
||||||
return Dir('#cache').abspath
|
return Dir('#cache').abspath
|
||||||
|
|
||||||
@ -159,6 +404,12 @@ if not config.get('CXX_STANDARD'):
|
|||||||
if not config.get('PREPROCESSOR_PREFIX'):
|
if not config.get('PREPROCESSOR_PREFIX'):
|
||||||
config['PREPROCESSOR_PREFIX'] = config['PROJECT_NAME'].upper() # TODO: may be nicer?
|
config['PREPROCESSOR_PREFIX'] = config['PROJECT_NAME'].upper() # TODO: may be nicer?
|
||||||
|
|
||||||
|
if 'COMPILATIONDB_FILTER_FILES' not in config:
|
||||||
|
config['COMPILATIONDB_FILTER_FILES'] = True
|
||||||
|
|
||||||
|
if 'WINDOWS_DISABLE_DEFAULT_DEFINES' not in config:
|
||||||
|
config['WINDOWS_DISABLE_DEFAULT_DEFINES'] = False
|
||||||
|
|
||||||
AddOption(
|
AddOption(
|
||||||
'--build_type',
|
'--build_type',
|
||||||
dest = 'build_type',
|
dest = 'build_type',
|
||||||
@ -250,9 +501,12 @@ vars.Add('CC', 'The C Compiler', default_CC)
|
|||||||
vars.Add('CXX', 'The C++ Compiler', default_CXX)
|
vars.Add('CXX', 'The C++ Compiler', default_CXX)
|
||||||
vars.Add('LINK', 'The Linker')
|
vars.Add('LINK', 'The Linker')
|
||||||
vars.Add('CCFLAGS', 'C/C++ Compiler Flags')
|
vars.Add('CCFLAGS', 'C/C++ Compiler Flags')
|
||||||
|
vars.Add('CFLAGS', 'C Compiler Flags')
|
||||||
|
vars.Add('CXXFLAGS', 'C++ Compiler Flags')
|
||||||
vars.Add('LINKFLAGS', 'Linker Flags')
|
vars.Add('LINKFLAGS', 'Linker Flags')
|
||||||
vars.Add('PYTHON', 'Python Executable', 'python')
|
vars.Add('PYTHON', 'Python Executable', 'python')
|
||||||
vars.Add('COMPILATIONDB_FILTER_FILES', 'Removes source files from the compilation DB that are not from the current project.', True)
|
vars.Add('COMPILATIONDB_FILTER_FILES', 'Removes source files from the compilation DB that are not from the current'
|
||||||
|
' project.', config['COMPILATIONDB_FILTER_FILES'])
|
||||||
|
|
||||||
tools = ['default', 'compilation_db', 'unity_build']
|
tools = ['default', 'compilation_db', 'unity_build']
|
||||||
if 'TOOLS' in config:
|
if 'TOOLS' in config:
|
||||||
@ -261,18 +515,20 @@ if 'TOOLS' in config:
|
|||||||
env = Environment(tools = tools, variables = vars, ENV = os.environ)
|
env = Environment(tools = tools, variables = vars, ENV = os.environ)
|
||||||
env['RECIPES_FOLDERS'] = [Dir('recipes')]
|
env['RECIPES_FOLDERS'] = [Dir('recipes')]
|
||||||
env['SYSTEM_CACHE_DIR'] = os.path.join(_find_system_cache_dir(), 'spp_cache')
|
env['SYSTEM_CACHE_DIR'] = os.path.join(_find_system_cache_dir(), 'spp_cache')
|
||||||
env['CLONE_DIR'] = os.path.join(env['SYSTEM_CACHE_DIR'], 'cloned')
|
|
||||||
env['DOWNLOAD_DIR'] = os.path.join(env['SYSTEM_CACHE_DIR'], 'downloaded')
|
|
||||||
env['UPDATE_REPOSITORIES'] = update_repositories
|
|
||||||
|
|
||||||
print(f'Detected system cache directory: {env["SYSTEM_CACHE_DIR"]}')
|
print(f'Detected system cache directory: {env["SYSTEM_CACHE_DIR"]}')
|
||||||
try:
|
try:
|
||||||
os.makedirs(env['SYSTEM_CACHE_DIR'], exist_ok=True)
|
os.makedirs(env['SYSTEM_CACHE_DIR'], exist_ok=True)
|
||||||
except:
|
except:
|
||||||
env['SYSTEM_CACHE_DIR'] = os.path.join(_get_fallback_cache_dir(), 'spp_cache')
|
env['SYSTEM_CACHE_DIR'] = os.path.join(_get_fallback_cache_dir(), 'spp_cache')
|
||||||
env['CLONE_DIR'] = os.path.join(env['SYSTEM_CACHE_DIR'], 'cloned')
|
|
||||||
print(f'Creating spp cache dir failed, using fallback: {env["SYSTEM_CACHE_DIR"]}.')
|
print(f'Creating spp cache dir failed, using fallback: {env["SYSTEM_CACHE_DIR"]}.')
|
||||||
os.makedirs(env['SYSTEM_CACHE_DIR'], exist_ok=True) # no more safeguards!
|
os.makedirs(env['SYSTEM_CACHE_DIR'], exist_ok=True) # no more safeguards!
|
||||||
|
env['CLONE_DIR'] = os.path.join(env['SYSTEM_CACHE_DIR'], 'cloned')
|
||||||
|
env['DOWNLOAD_DIR'] = os.path.join(env['SYSTEM_CACHE_DIR'], 'downloaded')
|
||||||
|
env['UPDATE_REPOSITORIES'] = update_repositories
|
||||||
|
env['CXX_STANDARD'] = config['CXX_STANDARD'] # make it available to everyone
|
||||||
|
env['DEPS_CFLAGS'] = []
|
||||||
|
env['DEPS_CXXFLAGS'] = []
|
||||||
|
env['DEPS_LINKFLAGS'] = []
|
||||||
|
|
||||||
env['SHARED_CACHE_DIR'] = Dir(f'#cache').abspath
|
env['SHARED_CACHE_DIR'] = Dir(f'#cache').abspath
|
||||||
# allow compiling to variant directories (each gets their own bin/lib/cache dirs)
|
# allow compiling to variant directories (each gets their own bin/lib/cache dirs)
|
||||||
@ -303,6 +559,13 @@ env.Append(CPPPATH = [])
|
|||||||
env.Append(CPPDEFINES = [])
|
env.Append(CPPDEFINES = [])
|
||||||
env.Append(LINKFLAGS = [])
|
env.Append(LINKFLAGS = [])
|
||||||
|
|
||||||
|
# init SPP environment variables
|
||||||
|
env['SPP_TARGETS'] = []
|
||||||
|
env['SPP_DEFAULT_TARGETS'] = []
|
||||||
|
env['SPP_TARGET_DEPENDENCIES'] = []
|
||||||
|
env['SPP_DEPENDENCIES'] = {}
|
||||||
|
env['SPP_RECIPES'] = {}
|
||||||
|
|
||||||
# create the cache dir
|
# create the cache dir
|
||||||
os.makedirs(env['CACHE_DIR'], exist_ok=True)
|
os.makedirs(env['CACHE_DIR'], exist_ok=True)
|
||||||
cache_gitignore = f'{env["CACHE_DIR"]}/.gitignore'
|
cache_gitignore = f'{env["CACHE_DIR"]}/.gitignore'
|
||||||
@ -372,11 +635,13 @@ if env['COMPILER_FAMILY'] == 'gcc' or env['COMPILER_FAMILY'] == 'clang':
|
|||||||
env.Append(CCFLAGS = ['-Wno-missing-field-initializers', '-Wno-maybe-uninitialized'])
|
env.Append(CCFLAGS = ['-Wno-missing-field-initializers', '-Wno-maybe-uninitialized'])
|
||||||
env.Append(CXXFLAGS = ['-Wno-subobject-linkage', '-Wno-dangling-reference', '-Wno-init-list-lifetime', '-Wno-tautological-compare'])
|
env.Append(CXXFLAGS = ['-Wno-subobject-linkage', '-Wno-dangling-reference', '-Wno-init-list-lifetime', '-Wno-tautological-compare'])
|
||||||
|
|
||||||
else:
|
else: # clang only
|
||||||
# no-gnu-anonymous-struct - we don't care
|
# no-gnu-anonymous-struct - we don't care
|
||||||
env.Append(CCFLAGS = ['-Wno-gnu-anonymous-struct'])
|
env.Append(CCFLAGS = ['-Wno-gnu-anonymous-struct'])
|
||||||
|
env.Append(CXXFLAGS = ['-fexperimental-library']) # enable std::jthread
|
||||||
if build_type == 'debug':
|
if build_type == 'debug':
|
||||||
env.Append(CCFLAGS = ['-g', '-O0'], CPPDEFINES = ['_GLIBCXX_DEBUG'])
|
env.Append(CCFLAGS = ['-g', '-O0'], CPPDEFINES = ['_GLIBCXX_DEBUG'])
|
||||||
|
env.Append(DEPS_CXXFLAGS = ['-D_GLIBCXX_DEBUG'])
|
||||||
elif build_type == 'release_debug' or build_type == 'profile':
|
elif build_type == 'release_debug' or build_type == 'profile':
|
||||||
env.Append(CCFLAGS = ['-Wno-unused-variable', '-Wno-unused-parameter', '-Wno-unused-but-set-variable', '-Wno-unused-local-typedef', '-Wno-unused-local-typedefs', '-g', '-O2'], CPPDEFINES = [f'{config["PREPROCESSOR_PREFIX"]}_RELEASE', 'NDEBUG'])
|
env.Append(CCFLAGS = ['-Wno-unused-variable', '-Wno-unused-parameter', '-Wno-unused-but-set-variable', '-Wno-unused-local-typedef', '-Wno-unused-local-typedefs', '-g', '-O2'], CPPDEFINES = [f'{config["PREPROCESSOR_PREFIX"]}_RELEASE', 'NDEBUG'])
|
||||||
if build_type == 'profile':
|
if build_type == 'profile':
|
||||||
@ -391,20 +656,35 @@ if env['COMPILER_FAMILY'] == 'gcc' or env['COMPILER_FAMILY'] == 'clang':
|
|||||||
if enable_asan:
|
if enable_asan:
|
||||||
env.Append(CCFLAGS = ['-fsanitize=address', '-fno-omit-frame-pointer'])
|
env.Append(CCFLAGS = ['-fsanitize=address', '-fno-omit-frame-pointer'])
|
||||||
env.Append(LINKFLAGS = ['-fsanitize=address'])
|
env.Append(LINKFLAGS = ['-fsanitize=address'])
|
||||||
|
env.Append(DEPS_CXXFLAGS = ['-fsanitize=address', '-fno-omit-frame-pointer'])
|
||||||
|
env.Append(DEPS_LINKFLAGS = ['-fsanitize=address'])
|
||||||
|
|
||||||
elif env['COMPILER_FAMILY'] == 'cl':
|
elif env['COMPILER_FAMILY'] == 'cl':
|
||||||
|
cxx_version_name = {
|
||||||
|
'c++14': 'c++14',
|
||||||
|
'c++17': 'c++17',
|
||||||
|
'c++20': 'c++20',
|
||||||
|
'c++23': 'c++latest',
|
||||||
|
'c++26': 'c++latest'
|
||||||
|
}.get(env['CXX_STANDARD'], 'c++14') # default to C++14 for older versions
|
||||||
# C4201: nonstandard extension used : nameless struct/union - I use it and want to continue using it
|
# C4201: nonstandard extension used : nameless struct/union - I use it and want to continue using it
|
||||||
# C4127: conditional expression is constant - some libs (CRC, format) don't compile with this enabled # TODO: fix?
|
# C4127: conditional expression is constant - some libs (CRC, format) don't compile with this enabled # TODO: fix?
|
||||||
# C4702: unreachable code, issued after MIJIN_FATAL macro
|
# C4702: unreachable code, issued after MIJIN_FATAL macro
|
||||||
# C4251: missing dll-interface of some std types, yaml-cpp doesn't compile with this enabled
|
# C4251: missing dll-interface of some std types, yaml-cpp doesn't compile with this enabled
|
||||||
# C4275: same as above
|
# C4275: same as above
|
||||||
env.Append(CCFLAGS = ['/W4', '/WX', '/wd4201', '/wd4127', '/wd4702', '/wd4251', '/wd4275', '/bigobj', f'/std:{config["CXX_STANDARD"]}', '/permissive-', '/EHsc', '/FS', '/Zc:char8_t'])
|
env.Append(CCFLAGS = ['/W4', '/WX', '/wd4201', '/wd4127', '/wd4702', '/wd4251', '/wd4275', '/bigobj', '/vmg',
|
||||||
|
f'/std:{cxx_version_name}', '/permissive-', '/EHsc', '/FS', '/Zc:char8_t', '/utf-8'])
|
||||||
env.Append(CPPDEFINES = ['_CRT_SECURE_NO_WARNINGS']) # I'd like to not use MSVC specific versions of functions because they are "safer" ...
|
env.Append(CPPDEFINES = ['_CRT_SECURE_NO_WARNINGS']) # I'd like to not use MSVC specific versions of functions because they are "safer" ...
|
||||||
|
env.Append(DEPS_CXXFLAGS = ['/EHsc', '/Zc:char8_t', '/utf-8', '/vmg'])
|
||||||
if build_type == 'debug':
|
if build_type == 'debug':
|
||||||
env.Append(CCFLAGS = ['/Od', '/Zi', '/MDd'], LINKFLAGS = ' /DEBUG')
|
env.Append(CCFLAGS = ['/Od', '/Zi', '/MDd'], LINKFLAGS = ' /DEBUG')
|
||||||
env.Append(CPPDEFINES = ['_DEBUG', '_ITERATOR_DEBUG_LEVEL=2'])
|
env.Append(CPPDEFINES = ['_DEBUG', '_ITERATOR_DEBUG_LEVEL=2'])
|
||||||
|
env.Append(DEPS_CXXFLAGS = ['/MDd', '/Zi', '/D_DEBUG', '/D_ITERATOR_DEBUG_LEVEL=2'])
|
||||||
|
env.Append(DEPS_LINKFLAGS = ['/DEBUG'])
|
||||||
elif build_type == 'release_debug' or build_type == 'profile':
|
elif build_type == 'release_debug' or build_type == 'profile':
|
||||||
env.Append(CCFLAGS = ['/O2', '/Zi'], LINKFLAGS = ' /DEBUG')
|
env.Append(CCFLAGS = ['/O2', '/Zi'], LINKFLAGS = ' /DEBUG')
|
||||||
|
env.Append(DEPS_CXXFLAGS = ['/Zi'])
|
||||||
|
env.Append(DEPS_LINKFLAGS = ['/DEBUG'])
|
||||||
else:
|
else:
|
||||||
env.Append(CCFLAGS = ['/O2'])
|
env.Append(CCFLAGS = ['/O2'])
|
||||||
|
|
||||||
@ -413,10 +693,17 @@ if env['COMPILER_FAMILY'] == 'gcc':
|
|||||||
elif env['COMPILER_FAMILY'] == 'clang':
|
elif env['COMPILER_FAMILY'] == 'clang':
|
||||||
env.Append(CCFLAGS = ['-Wno-deprecated-volatile', '-Wno-nested-anon-types', '-Wno-unknown-warning-option'])
|
env.Append(CCFLAGS = ['-Wno-deprecated-volatile', '-Wno-nested-anon-types', '-Wno-unknown-warning-option'])
|
||||||
|
|
||||||
|
# platform specific options
|
||||||
|
if os.name == 'nt':
|
||||||
|
if not config['WINDOWS_DISABLE_DEFAULT_DEFINES']:
|
||||||
|
env.Append(CDEFINES = ['WIN32_LEAN_AND_MEAN', 'NOMINMAX', 'STRICT', 'UNICODE'], CPPDEFINES = ['WIN32_LEAN_AND_MEAN', 'NOMINMAX', 'STRICT', 'UNICODE'])
|
||||||
|
|
||||||
env.AddMethod(_cook, 'Cook')
|
env.AddMethod(_cook, 'Cook')
|
||||||
env.AddMethod(_parse_lib_conf, 'ParseLibConf')
|
env.AddMethod(_parse_lib_conf, 'ParseLibConf')
|
||||||
env.AddMethod(_rglob, 'RGlob')
|
env.AddMethod(_rglob, 'RGlob')
|
||||||
|
env.AddMethod(_deps_from_json, 'DepsFromJson')
|
||||||
env.AddMethod(_make_interface, 'MakeInterface')
|
env.AddMethod(_make_interface, 'MakeInterface')
|
||||||
|
env.AddMethod(_lib_filename, 'LibFilename')
|
||||||
env.AddMethod(_find_lib, 'FindLib')
|
env.AddMethod(_find_lib, 'FindLib')
|
||||||
env.AddMethod(_error, 'Error')
|
env.AddMethod(_error, 'Error')
|
||||||
env.AddMethod(_wrap_builder(env.Library, is_lib = True), 'Library')
|
env.AddMethod(_wrap_builder(env.Library, is_lib = True), 'Library')
|
||||||
@ -430,6 +717,8 @@ env.AddMethod(_wrap_builder(env.UnityProgram), 'UnityProgram')
|
|||||||
env.AddMethod(_wrap_builder(env.UnityLibrary, is_lib = True), 'UnityLibrary')
|
env.AddMethod(_wrap_builder(env.UnityLibrary, is_lib = True), 'UnityLibrary')
|
||||||
env.AddMethod(_wrap_builder(env.UnityStaticLibrary, is_lib = True), 'UnityStaticLibrary')
|
env.AddMethod(_wrap_builder(env.UnityStaticLibrary, is_lib = True), 'UnityStaticLibrary')
|
||||||
env.AddMethod(_wrap_builder(env.UnitySharedLibrary, is_lib = True), 'UnitySharedLibrary')
|
env.AddMethod(_wrap_builder(env.UnitySharedLibrary, is_lib = True), 'UnitySharedLibrary')
|
||||||
|
env.AddMethod(_module, 'Module')
|
||||||
|
env.AddMethod(_finalize, 'Finalize')
|
||||||
|
|
||||||
if hasattr(env, 'Gch'):
|
if hasattr(env, 'Gch'):
|
||||||
env.AddMethod(_wrap_builder(env.Gch), 'Gch')
|
env.AddMethod(_wrap_builder(env.Gch), 'Gch')
|
||||||
|
@ -9,7 +9,7 @@ _BUILT_STAMPFILE = '.spp_built'
|
|||||||
|
|
||||||
Import('env')
|
Import('env')
|
||||||
|
|
||||||
def _autotools_project(env: Environment, project_root: str, config_args: 'list[str]' = [], build_args : 'list[str]' = [], install_args : 'list[str]' = []) -> dict:
|
def _autotools_project(env: Environment, project_root: str, config_args: 'list[str]' = [], build_args : 'list[str]' = [], install_args : 'list[str]' = [], configure_script_path: str = 'configure') -> dict:
|
||||||
config = env['BUILD_TYPE']
|
config = env['BUILD_TYPE']
|
||||||
build_dir = os.path.join(project_root, f'build_{config}')
|
build_dir = os.path.join(project_root, f'build_{config}')
|
||||||
install_dir = os.path.join(project_root, f'install_{config}')
|
install_dir = os.path.join(project_root, f'install_{config}')
|
||||||
@ -28,14 +28,24 @@ def _autotools_project(env: Environment, project_root: str, config_args: 'list[s
|
|||||||
env = os.environ.copy()
|
env = os.environ.copy()
|
||||||
env['CFLAGS'] = cflags
|
env['CFLAGS'] = cflags
|
||||||
|
|
||||||
subprocess.run((os.path.join(project_root, 'configure'), '--prefix', install_dir, *config_args), cwd=build_dir, env=env, stdout=sys.stdout, stderr=sys.stderr, check=True)
|
config_script = os.path.join(project_root, configure_script_path)
|
||||||
|
if not os.path.exists(config_script) and os.path.exists(f'{config_script}.ac'):
|
||||||
|
subprocess.run(('autoreconf', '--install', '--force'), cwd=project_root)
|
||||||
|
|
||||||
|
subprocess.run((config_script, f'--prefix={install_dir}', *config_args), cwd=build_dir, env=env, stdout=sys.stdout, stderr=sys.stderr, check=True)
|
||||||
subprocess.run(('make', f'-j{jobs}', *build_args), cwd=build_dir, stdout=sys.stdout, stderr=sys.stderr, check=True)
|
subprocess.run(('make', f'-j{jobs}', *build_args), cwd=build_dir, stdout=sys.stdout, stderr=sys.stderr, check=True)
|
||||||
subprocess.run(('make', 'install', *install_args), cwd=build_dir, stdout=sys.stdout, stderr=sys.stderr, check=True)
|
subprocess.run(('make', 'install', *install_args), cwd=build_dir, stdout=sys.stdout, stderr=sys.stderr, check=True)
|
||||||
pathlib.Path(install_dir, _BUILT_STAMPFILE).touch()
|
pathlib.Path(install_dir, _BUILT_STAMPFILE).touch()
|
||||||
|
|
||||||
|
libpath = []
|
||||||
|
for lib_folder in ('lib', 'lib64'):
|
||||||
|
full_path = os.path.join(install_dir, lib_folder)
|
||||||
|
if os.path.exists(full_path):
|
||||||
|
libpath.append(full_path)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
'install_dir': install_dir,
|
'install_dir': install_dir,
|
||||||
'LIBPATH': [os.path.join(install_dir, 'lib')],
|
'LIBPATH': libpath,
|
||||||
'CPPPATH': [os.path.join(install_dir, 'include')]
|
'CPPPATH': [os.path.join(install_dir, 'include')]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,9 +1,12 @@
|
|||||||
|
|
||||||
import os
|
import json
|
||||||
import pathlib
|
import pathlib
|
||||||
|
import shutil
|
||||||
|
|
||||||
from SCons.Script import *
|
from SCons.Script import *
|
||||||
|
|
||||||
_BUILT_STAMPFILE = '.spp_built'
|
_BUILT_STAMPFILE = '.spp_built'
|
||||||
|
_VERSION = 2 # bump if you change how the projects are build to trigger a clean build
|
||||||
|
|
||||||
Import('env')
|
Import('env')
|
||||||
|
|
||||||
@ -11,11 +14,57 @@ def cmd_quote(s: str) -> str:
|
|||||||
escaped = s.replace('\\', '\\\\')
|
escaped = s.replace('\\', '\\\\')
|
||||||
return f'"{escaped}"'
|
return f'"{escaped}"'
|
||||||
|
|
||||||
def _cmake_project(env: Environment, project_root: str, generate_args: 'list[str]' = [], build_args : 'list[str]' = [], install_args : 'list[str]' = []) -> dict:
|
def _generate_cmake_c_flags(env, dependencies: 'list[dict]') -> str:
|
||||||
|
parts = env['DEPS_CFLAGS'].copy()
|
||||||
|
for dependency in dependencies:
|
||||||
|
for path in dependency.get('CPPPATH', []):
|
||||||
|
parts.append(f'-I{path}')
|
||||||
|
return cmd_quote(' '.join(parts))
|
||||||
|
|
||||||
|
def _generate_cmake_cxx_flags(env, dependencies: 'list[dict]') -> str:
|
||||||
|
parts = env['DEPS_CXXFLAGS'].copy()
|
||||||
|
for dependency in dependencies:
|
||||||
|
for path in dependency.get('CPPPATH', []):
|
||||||
|
parts.append(f'-I{path}')
|
||||||
|
return cmd_quote(' '.join(parts))
|
||||||
|
|
||||||
|
def _get_cmake_cxx_standard(env: Environment) -> str:
|
||||||
|
return env['CXX_STANDARD'][3:] # we use "C++XX", CMake just "XX"
|
||||||
|
|
||||||
|
def _generate_cmake_args(env: Environment, dependencies: 'list[dict]') -> 'list[str]':
|
||||||
|
args = [f'-DCMAKE_C_FLAGS={_generate_cmake_c_flags(env, dependencies)}',
|
||||||
|
f'-DCMAKE_CXX_FLAGS={_generate_cmake_cxx_flags(env, dependencies)}',
|
||||||
|
f'-DCMAKE_CXX_STANDARD={_get_cmake_cxx_standard(env)}']
|
||||||
|
for dependency in dependencies:
|
||||||
|
for name, value in dependency.get('CMAKE_VARS', {}).items():
|
||||||
|
args.append(f'-D{name}={cmd_quote(value)}')
|
||||||
|
return args
|
||||||
|
|
||||||
|
def _calc_version_hash(dependencies: 'list[dict]') -> str:
|
||||||
|
return json.dumps({
|
||||||
|
'version': _VERSION,
|
||||||
|
'dependencies': dependencies
|
||||||
|
})
|
||||||
|
|
||||||
|
def _cmake_project(env: Environment, project_root: str, generate_args: 'list[str]' = [], build_args : 'list[str]' = [], install_args : 'list[str]' = [], dependencies: 'list[dict]' = []) -> dict:
|
||||||
config = env['BUILD_TYPE']
|
config = env['BUILD_TYPE']
|
||||||
build_dir = os.path.join(project_root, f'build_{config}')
|
build_dir = os.path.join(project_root, f'build_{config}')
|
||||||
install_dir = os.path.join(project_root, f'install_{config}')
|
install_dir = os.path.join(project_root, f'install_{config}')
|
||||||
is_built = os.path.exists(os.path.join(install_dir, _BUILT_STAMPFILE))
|
|
||||||
|
version_hash = _calc_version_hash(dependencies)
|
||||||
|
stamp_file = pathlib.Path(install_dir, _BUILT_STAMPFILE)
|
||||||
|
is_built = stamp_file.exists()
|
||||||
|
|
||||||
|
if is_built:
|
||||||
|
with stamp_file.open('r') as f:
|
||||||
|
build_version = f.read()
|
||||||
|
if build_version != version_hash:
|
||||||
|
print(f'Rebuilding CMake project at {project_root} as the script version changed.')
|
||||||
|
is_built = False
|
||||||
|
if not is_built:
|
||||||
|
shutil.rmtree(build_dir)
|
||||||
|
shutil.rmtree(install_dir)
|
||||||
|
|
||||||
if not is_built or env['UPDATE_REPOSITORIES']:
|
if not is_built or env['UPDATE_REPOSITORIES']:
|
||||||
print(f'Building {project_root}, config {config}')
|
print(f'Building {project_root}, config {config}')
|
||||||
os.makedirs(build_dir, exist_ok=True)
|
os.makedirs(build_dir, exist_ok=True)
|
||||||
@ -26,14 +75,19 @@ def _cmake_project(env: Environment, project_root: str, generate_args: 'list[str
|
|||||||
'profile': 'RelWithDebInfo'
|
'profile': 'RelWithDebInfo'
|
||||||
}.get(env['BUILD_TYPE'], 'RelWithDebInfo')
|
}.get(env['BUILD_TYPE'], 'RelWithDebInfo')
|
||||||
def run_cmd(args):
|
def run_cmd(args):
|
||||||
env.Execute(' '.join([str(s) for s in args]))
|
if env.Execute(' '.join([str(s) for s in args])):
|
||||||
|
Exit(1)
|
||||||
# TODO: is this a problem?
|
# TODO: is this a problem?
|
||||||
# environ = os.environ.copy()
|
# environ = os.environ.copy()
|
||||||
# environ['CXXFLAGS'] = ' '.join(f'-D{define}' for define in env['CPPDEFINES']) # TODO: who cares about windows?
|
# environ['CXXFLAGS'] = ' '.join(f'-D{define}' for define in env['CPPDEFINES']) # TODO: who cares about windows?
|
||||||
run_cmd(['cmake', '-G', 'Ninja', '-B', build_dir, f'-DCMAKE_BUILD_TYPE={build_type}', f'-DCMAKE_INSTALL_PREFIX={cmd_quote(install_dir)}', '-DBUILD_TESTING=OFF', *generate_args, project_root])
|
run_cmd(['cmake', '-G', 'Ninja', '-B', build_dir, f'-DCMAKE_BUILD_TYPE={build_type}',
|
||||||
|
f'-DCMAKE_INSTALL_PREFIX={cmd_quote(install_dir)}', '-DBUILD_TESTING=OFF',
|
||||||
|
*_generate_cmake_args(env, dependencies), *generate_args, project_root])
|
||||||
run_cmd(['cmake', '--build', *build_args, cmd_quote(build_dir)])
|
run_cmd(['cmake', '--build', *build_args, cmd_quote(build_dir)])
|
||||||
run_cmd(['cmake', '--install', *install_args, cmd_quote(build_dir)])
|
run_cmd(['cmake', '--install', *install_args, cmd_quote(build_dir)])
|
||||||
pathlib.Path(install_dir, _BUILT_STAMPFILE).touch()
|
|
||||||
|
with pathlib.Path(install_dir, _BUILT_STAMPFILE).open('w') as f:
|
||||||
|
f.write(version_hash)
|
||||||
|
|
||||||
libpath = []
|
libpath = []
|
||||||
for lib_folder in ('lib', 'lib64'):
|
for lib_folder in ('lib', 'lib64'):
|
||||||
|
@ -2,12 +2,12 @@
|
|||||||
from git import Repo
|
from git import Repo
|
||||||
from git.exc import GitError
|
from git.exc import GitError
|
||||||
import hashlib
|
import hashlib
|
||||||
import os
|
import re
|
||||||
from SCons.Script import *
|
from SCons.Script import *
|
||||||
|
|
||||||
Import('env')
|
Import('env')
|
||||||
|
|
||||||
def _gitbranch(env: Environment, repo_name: str, remote_url: str, git_ref: str = "main") -> dict:
|
def _clone(env: Environment, repo_name: str, remote_url: str):
|
||||||
repo_dir = os.path.join(env['CLONE_DIR'], 'git', repo_name, '_bare')
|
repo_dir = os.path.join(env['CLONE_DIR'], 'git', repo_name, '_bare')
|
||||||
try:
|
try:
|
||||||
repo = Repo(repo_dir)
|
repo = Repo(repo_dir)
|
||||||
@ -16,6 +16,10 @@ def _gitbranch(env: Environment, repo_name: str, remote_url: str, git_ref: str =
|
|||||||
print(f'Initializing git repository at {repo_dir}.')
|
print(f'Initializing git repository at {repo_dir}.')
|
||||||
repo = Repo.init(repo_dir, bare=True)
|
repo = Repo.init(repo_dir, bare=True)
|
||||||
origin = repo.create_remote('origin', remote_url)
|
origin = repo.create_remote('origin', remote_url)
|
||||||
|
return repo, origin
|
||||||
|
|
||||||
|
def _git_branch(env: Environment, repo_name: str, remote_url: str, git_ref: str = 'main') -> dict:
|
||||||
|
repo, origin = _clone(env, repo_name, remote_url)
|
||||||
worktree_dir = os.path.join(env['CLONE_DIR'], 'git', repo_name, hashlib.shake_128(git_ref.encode('utf-8')).hexdigest(6)) # TODO: commit hash would be better, right? -> not if it's a branch!
|
worktree_dir = os.path.join(env['CLONE_DIR'], 'git', repo_name, hashlib.shake_128(git_ref.encode('utf-8')).hexdigest(6)) # TODO: commit hash would be better, right? -> not if it's a branch!
|
||||||
if not os.path.exists(worktree_dir):
|
if not os.path.exists(worktree_dir):
|
||||||
print(f'Checking out into {worktree_dir}.')
|
print(f'Checking out into {worktree_dir}.')
|
||||||
@ -34,6 +38,58 @@ def _gitbranch(env: Environment, repo_name: str, remote_url: str, git_ref: str =
|
|||||||
'checkout_root': worktree_dir
|
'checkout_root': worktree_dir
|
||||||
}
|
}
|
||||||
|
|
||||||
|
def _git_tags(env: Environment, repo_name: str, remote_url: str, force_fetch: bool = False) -> 'list[str]':
|
||||||
|
repo, origin = _clone(env, repo_name, remote_url)
|
||||||
|
if force_fetch or env['UPDATE_REPOSITORIES']:
|
||||||
|
origin.fetch(tags=True)
|
||||||
|
return [t.name for t in repo.tags]
|
||||||
|
|
||||||
env.AddMethod(_gitbranch, 'GitBranch')
|
def _make_callable(val):
|
||||||
|
if callable(val):
|
||||||
|
return val
|
||||||
|
else:
|
||||||
|
return lambda env: val
|
||||||
|
|
||||||
|
def _git_recipe(env: Environment, globals: dict, repo_name, repo_url, cook_fn, versions = None, tag_pattern = None, tag_fn = None, ref_fn = None, dependencies: dict = {}) -> None:
|
||||||
|
_repo_name = _make_callable(repo_name)
|
||||||
|
_repo_url = _make_callable(repo_url)
|
||||||
|
_tag_pattern = _make_callable(tag_pattern)
|
||||||
|
versions_cb = versions and _make_callable(versions)
|
||||||
|
|
||||||
|
def _versions(env: Environment, update: bool = False):
|
||||||
|
pattern = _tag_pattern(env)
|
||||||
|
if pattern:
|
||||||
|
tags = env.GitTags(repo_name = _repo_name(env), remote_url = _repo_url(env), force_fetch=update)
|
||||||
|
result = []
|
||||||
|
for tag in tags:
|
||||||
|
match = pattern.match(tag)
|
||||||
|
if match:
|
||||||
|
result.append(tuple(int(part) for part in match.groups() if part is not None))
|
||||||
|
if len(result) == 0 and not update:
|
||||||
|
return _versions(env, update=True)
|
||||||
|
return result
|
||||||
|
elif versions_cb:
|
||||||
|
return versions_cb(env)
|
||||||
|
else:
|
||||||
|
return [(0, 0, 0)]
|
||||||
|
|
||||||
|
def _dependencies(env: Environment, version) -> 'dict':
|
||||||
|
return dependencies
|
||||||
|
|
||||||
|
def _cook(env: Environment, version) -> dict:
|
||||||
|
if tag_fn:
|
||||||
|
git_ref = f'refs/tags/{tag_fn(version)}'
|
||||||
|
else:
|
||||||
|
assert ref_fn
|
||||||
|
git_ref = ref_fn(env, version)
|
||||||
|
repo = env.GitBranch(repo_name = _repo_name(env), remote_url = _repo_url(env), git_ref = git_ref)
|
||||||
|
return cook_fn(env, repo)
|
||||||
|
|
||||||
|
globals['versions'] = _versions
|
||||||
|
globals['dependencies'] = _dependencies
|
||||||
|
globals['cook'] = _cook
|
||||||
|
|
||||||
|
env.AddMethod(_git_branch, 'GitBranch')
|
||||||
|
env.AddMethod(_git_tags, 'GitTags')
|
||||||
|
env.AddMethod(_git_recipe, 'GitRecipe')
|
||||||
Return('env')
|
Return('env')
|
@ -1,21 +1,31 @@
|
|||||||
|
|
||||||
|
import re
|
||||||
from SCons.Script import *
|
from SCons.Script import *
|
||||||
|
|
||||||
def cook(env: Environment, git_ref: str = 'master', own_main: bool = False) -> dict:
|
def _git_cook(env: Environment, repo) -> dict:
|
||||||
repo = env.GitBranch(repo_name = 'catch2', remote_url = 'https://github.com/catchorg/Catch2.git', git_ref = git_ref)
|
|
||||||
checkout_root = repo['checkout_root']
|
checkout_root = repo['checkout_root']
|
||||||
build_result = env.CMakeProject(project_root=checkout_root)
|
build_result = env.CMakeProject(project_root=checkout_root)
|
||||||
|
|
||||||
lib_name = {
|
lib_name = {
|
||||||
'debug': 'Catch2d'
|
'debug': 'Catch2d'
|
||||||
}.get(env['BUILD_TYPE'], 'Catch2')
|
}.get(env['BUILD_TYPE'], 'Catch2')
|
||||||
libs = [lib_name]
|
libs = []
|
||||||
if not own_main:
|
if not env.get('CATCH2_OWN_MAIN'):
|
||||||
libs.append({
|
libs.append({
|
||||||
'debug': 'Catch2Maind'
|
'debug': 'Catch2Maind'
|
||||||
}.get(env['BUILD_TYPE'], 'Catch2Main'))
|
}.get(env['BUILD_TYPE'], 'Catch2Main'))
|
||||||
|
libs.append(lib_name)
|
||||||
return {
|
return {
|
||||||
'LIBPATH': build_result['LIBPATH'],
|
|
||||||
'CPPPATH': build_result['CPPPATH'],
|
'CPPPATH': build_result['CPPPATH'],
|
||||||
'LIBS': libs
|
'LIBS': [env.FindLib(lib, paths=build_result['LIBPATH']) for lib in libs]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
env.GitRecipe(
|
||||||
|
globals = globals(),
|
||||||
|
repo_name = 'Catch2',
|
||||||
|
repo_url = 'https://github.com/catchorg/Catch2.git',
|
||||||
|
tag_pattern = re.compile(r'^v([0-9]+)\.([0-9]+)\.([0-9]+)$'),
|
||||||
|
tag_fn = lambda version: f'v{version[0]}.{version[1]}.{version[2]}',
|
||||||
|
cook_fn = _git_cook
|
||||||
|
)
|
@ -1,12 +1,31 @@
|
|||||||
|
|
||||||
|
import re
|
||||||
from SCons.Script import *
|
from SCons.Script import *
|
||||||
|
|
||||||
def cook(env: Environment, git_ref = 'main') -> dict:
|
_REPO_NAME = 'ImageMagick'
|
||||||
repo = env.GitBranch(repo_name = 'ImageMagick', remote_url = 'https://github.com/ImageMagick/ImageMagick.git', git_ref = git_ref)
|
_REPO_URL = 'https://github.com/ImageMagick/ImageMagick.git'
|
||||||
checkout_root = repo['checkout_root']
|
_TAG_PATTERN = re.compile(r'^([0-9]+)\.([0-9]+)\.([0-9]+)-([0-9]+)$')
|
||||||
build_result = env.AutotoolsProject(checkout_root)
|
|
||||||
return {
|
def versions(env: Environment, update: bool = False):
|
||||||
'LIBPATH': build_result['LIBPATH'],
|
tags = env.GitTags(repo_name = _REPO_NAME, remote_url = _REPO_URL, force_fetch=update)
|
||||||
'CPPPATH': build_result['CPPPATH'],
|
result = []
|
||||||
'LIBS': ['backtrace']
|
for tag in tags:
|
||||||
}
|
match = _TAG_PATTERN.match(tag)
|
||||||
|
if match:
|
||||||
|
result.append((int(match.groups()[0]), int(match.groups()[1]), int(match.groups()[2]), int(match.groups()[3])))
|
||||||
|
return result
|
||||||
|
|
||||||
|
def dependencies(env: Environment, version) -> 'dict':
|
||||||
|
return {}
|
||||||
|
|
||||||
|
def cook(env: Environment, version) -> dict:
|
||||||
|
raise Exception('this still needs to be implemented property :/')
|
||||||
|
# git_ref = f'refs/tags/{version[0]}.{version[1]}.{version[2]}-{version[3]}'
|
||||||
|
# repo = env.GitBranch(repo_name = _REPO_NAME, remote_url = _REPO_URL, git_ref = git_ref)
|
||||||
|
# checkout_root = repo['checkout_root']
|
||||||
|
# build_result = env.AutotoolsProject(checkout_root)
|
||||||
|
# return {
|
||||||
|
# 'LIBPATH': build_result['LIBPATH'],
|
||||||
|
# 'CPPPATH': build_result['CPPPATH'],
|
||||||
|
# 'LIBS': ['backtrace']
|
||||||
|
# }
|
||||||
|
@ -1,10 +1,10 @@
|
|||||||
|
|
||||||
import os
|
|
||||||
import platform
|
import platform
|
||||||
|
import re
|
||||||
from SCons.Script import *
|
from SCons.Script import *
|
||||||
|
|
||||||
def cook(env: Environment, git_ref: str = "main") -> dict:
|
|
||||||
repo = env.GitBranch(repo_name = 'SDL', remote_url = 'https://github.com/libsdl-org/SDL.git', git_ref = git_ref)
|
def _git_cook(env: Environment, repo: dict) -> dict:
|
||||||
checkout_root = repo['checkout_root']
|
checkout_root = repo['checkout_root']
|
||||||
build_result = env.CMakeProject(project_root=checkout_root, generate_args = ['-DSDL_STATIC=ON', '-DSDL_SHARED=OFF'])
|
build_result = env.CMakeProject(project_root=checkout_root, generate_args = ['-DSDL_STATIC=ON', '-DSDL_SHARED=OFF'])
|
||||||
libs = []
|
libs = []
|
||||||
@ -25,3 +25,13 @@ def cook(env: Environment, git_ref: str = "main") -> dict:
|
|||||||
'LIBS': libs
|
'LIBS': libs
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
env.GitRecipe(
|
||||||
|
globals = globals(),
|
||||||
|
repo_name = 'SDL',
|
||||||
|
repo_url = 'https://github.com/libsdl-org/SDL.git',
|
||||||
|
tag_pattern = re.compile(r'^release-([0-9]+)\.([0-9]+)\.([0-9]+)$'),
|
||||||
|
tag_fn = lambda version: f'release-{version[0]}.{version[1]}.{version[2]}',
|
||||||
|
cook_fn = _git_cook
|
||||||
|
)
|
||||||
|
|
||||||
|
@ -1,12 +1,44 @@
|
|||||||
|
|
||||||
import os
|
import re
|
||||||
from SCons.Script import *
|
from SCons.Script import *
|
||||||
|
|
||||||
def cook(env: Environment, remote: str = 'github', git_ref: str = 'main') -> dict:
|
_REPO_NAMES = {
|
||||||
if remote == 'mewin':
|
'default': 'VulkanHeaders',
|
||||||
repo = env.GitBranch(repo_name = 'VulkanHeaders_mewin', remote_url = 'https://git.mewin.de/mewin/vulkan-headers.git', git_ref = git_ref)
|
'mewin': 'VulkanHeaders_mewin'
|
||||||
|
}
|
||||||
|
_REPO_URLS = {
|
||||||
|
'default': 'https://github.com/KhronosGroup/Vulkan-Headers.git',
|
||||||
|
'mewin': 'https://git.mewin.de/mewin/vulkan-headers.git'
|
||||||
|
}
|
||||||
|
_TAG_PATTERN = re.compile(r'^v([0-9]+)\.([0-9]+)\.([0-9]+)$')
|
||||||
|
|
||||||
|
|
||||||
|
def _get_repo_name(env: Environment) -> str:
|
||||||
|
return _REPO_NAMES[env.get('VULKANHEADERS_REMOTE', 'default')]
|
||||||
|
|
||||||
|
def _get_repo_url(env: Environment) -> str:
|
||||||
|
return _REPO_URLS[env.get('VULKANHEADERS_REMOTE', 'default')]
|
||||||
|
|
||||||
|
def versions(env: Environment, update: bool = False):
|
||||||
|
if env.get('VULKANHEADERS_REMOTE') == 'mewin':
|
||||||
|
return [(0, 0, 0)]
|
||||||
|
tags = env.GitTags(repo_name = _get_repo_name(env), remote_url = _get_repo_url(env), force_fetch=update)
|
||||||
|
result = []
|
||||||
|
for tag in tags:
|
||||||
|
match = _TAG_PATTERN.match(tag)
|
||||||
|
if match:
|
||||||
|
result.append((int(match.groups()[0]), int(match.groups()[1]), int(match.groups()[2])))
|
||||||
|
return result
|
||||||
|
|
||||||
|
def dependencies(env: Environment, version) -> 'dict':
|
||||||
|
return {}
|
||||||
|
|
||||||
|
def cook(env: Environment, version) -> dict:
|
||||||
|
if env.get('VULKANHEADERS_REMOTE') == 'mewin':
|
||||||
|
git_ref = 'main'
|
||||||
else:
|
else:
|
||||||
repo = env.GitBranch(repo_name = 'VulkanHeaders', remote_url = 'https://github.com/KhronosGroup/Vulkan-Headers.git', git_ref = git_ref)
|
git_ref = f'refs/tags/v{version[0]}.{version[1]}.{version[2]}'
|
||||||
|
repo = env.GitBranch(repo_name = _get_repo_name(env), remote_url = _get_repo_url(env), git_ref = git_ref)
|
||||||
checkout_root = repo['checkout_root']
|
checkout_root = repo['checkout_root']
|
||||||
return {
|
return {
|
||||||
'CPPPATH': [os.path.join(checkout_root, 'include')]
|
'CPPPATH': [os.path.join(checkout_root, 'include')]
|
||||||
|
@ -1,10 +1,18 @@
|
|||||||
|
|
||||||
import os
|
import re
|
||||||
from SCons.Script import *
|
from SCons.Script import *
|
||||||
|
|
||||||
def cook(env: Environment, git_ref: str = "master") -> dict:
|
def _git_cook(env: Environment, repo: dict) -> dict:
|
||||||
repo = env.GitBranch(repo_name = 'argparse', remote_url = 'https://github.com/p-ranav/argparse.git', git_ref = git_ref)
|
|
||||||
checkout_root = repo['checkout_root']
|
checkout_root = repo['checkout_root']
|
||||||
return {
|
return {
|
||||||
'CPPPATH': [os.path.join(checkout_root, 'include')]
|
'CPPPATH': [os.path.join(checkout_root, 'include')]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
env.GitRecipe(
|
||||||
|
globals = globals(),
|
||||||
|
repo_name = 'argparse',
|
||||||
|
repo_url = 'https://github.com/p-ranav/argparse.git',
|
||||||
|
tag_pattern = re.compile(r'^v([0-9]+)\.([0-9]+)$'),
|
||||||
|
tag_fn = lambda version: f'v{version[0]}.{version[1]}',
|
||||||
|
cook_fn = _git_cook
|
||||||
|
)
|
||||||
|
@ -1,12 +1,67 @@
|
|||||||
|
|
||||||
|
|
||||||
|
import json
|
||||||
import os
|
import os
|
||||||
|
import re
|
||||||
|
import requests
|
||||||
from SCons.Script import *
|
from SCons.Script import *
|
||||||
|
|
||||||
def cook(env: Environment, version: str = "1.85.0") -> dict:
|
_VERSIONS_URL = 'https://api.github.com/repos/boostorg/boost/releases'
|
||||||
# TODO: build binaries?
|
_VERSION_PATTERN = re.compile(r'^boost-([0-9]+)\.([0-9]+)\.([0-9]+)$')
|
||||||
url = f'https://archives.boost.io/release/{version}/source/boost_{version.replace(".", "_")}.tar.gz'
|
|
||||||
repo = env.DownloadAndExtract(f'boost_{version}', url = url, skip_folders = 1)
|
def versions(env: Environment, update: bool = False):
|
||||||
|
versions_file = os.path.join(env['DOWNLOAD_DIR'], 'boost_versions.json')
|
||||||
|
if update or not os.path.exists(versions_file):
|
||||||
|
req = requests.get(_VERSIONS_URL)
|
||||||
|
versions_data = json.loads(req.text)
|
||||||
|
result = []
|
||||||
|
for version_data in versions_data:
|
||||||
|
match = _VERSION_PATTERN.match(version_data['name'])
|
||||||
|
if not match:
|
||||||
|
continue
|
||||||
|
result.append((int(match.groups()[0]), int(match.groups()[1]), int(match.groups()[2])))
|
||||||
|
with open(versions_file, 'w') as f:
|
||||||
|
json.dump(result, f)
|
||||||
|
return result
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
with open(versions_file, 'r') as f:
|
||||||
|
return [tuple(v) for v in json.load(f)]
|
||||||
|
except:
|
||||||
|
print('boost_versions.json is empty or broken, redownloading.')
|
||||||
|
return versions(env, update=True)
|
||||||
|
|
||||||
|
def dependencies(env: Environment, version) -> 'dict':
|
||||||
|
return {}
|
||||||
|
|
||||||
|
def cook(env: Environment, version) -> dict:
|
||||||
|
if env.get('BOOST_LIBS') is None:
|
||||||
|
raise Exception('BOOST_LIBS not set. Set to a list of boost libs to link or "*" to link everything.')
|
||||||
|
if version >= (1, 85, 0):
|
||||||
|
url = f'https://github.com/boostorg/boost/releases/download/boost-{version[0]}.{version[1]}.{version[2]}/boost-{version[0]}.{version[1]}.{version[2]}-cmake.tar.gz'
|
||||||
|
else:
|
||||||
|
url = f'https://github.com/boostorg/boost/releases/download/boost-{version[0]}.{version[1]}.{version[2]}/boost-{version[0]}.{version[1]}.{version[2]}.tar.gz'
|
||||||
|
repo = env.DownloadAndExtract(f'boost_{version[0]}.{version[1]}.{version[2]}', url = url, skip_folders = 1)
|
||||||
checkout_root = repo['extracted_root']
|
checkout_root = repo['extracted_root']
|
||||||
|
build_result = env.CMakeProject(checkout_root)
|
||||||
|
|
||||||
|
libs = []
|
||||||
|
if '*' in env['BOOST_LIBS']:
|
||||||
|
lib_dir = build_result['LIBPATH'][0]
|
||||||
|
for lib_file in os.listdir(lib_dir):
|
||||||
|
fname = os.path.join(lib_dir, lib_file)
|
||||||
|
if not os.path.isfile(fname):
|
||||||
|
continue
|
||||||
|
libs.append(fname)
|
||||||
|
else:
|
||||||
|
for lib in set(env['BOOST_LIBS']):
|
||||||
|
if os.name == 'posix':
|
||||||
|
libs.append(env.FindLib(f'boost_{lib}', paths=build_result['LIBPATH']))
|
||||||
|
elif os.name == 'nt':
|
||||||
|
libs.append(env.FindLib(f'libboost_{lib}-*', paths=build_result['LIBPATH'], use_glob=True))
|
||||||
|
else:
|
||||||
|
raise Exception('Boost not supported on this platform.')
|
||||||
return {
|
return {
|
||||||
'CPPPATH': [checkout_root]
|
'CPPPATH': build_result['CPPPATH'],
|
||||||
|
'LIBS': libs
|
||||||
}
|
}
|
||||||
|
@ -1,9 +1,19 @@
|
|||||||
|
|
||||||
|
import re
|
||||||
from SCons.Script import *
|
from SCons.Script import *
|
||||||
|
|
||||||
def cook(env: Environment, git_ref: str = "master") -> dict:
|
def _git_cook(env: Environment, repo) -> dict:
|
||||||
repo = env.GitBranch(repo_name = 'cgltf', remote_url = 'https://github.com/jkuhlmann/cgltf.git', git_ref = git_ref)
|
|
||||||
checkout_root = repo['checkout_root']
|
checkout_root = repo['checkout_root']
|
||||||
return {
|
return {
|
||||||
'CPPPATH': [checkout_root]
|
'CPPPATH': [checkout_root]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
env.GitRecipe(
|
||||||
|
globals = globals(),
|
||||||
|
repo_name = 'cgltf',
|
||||||
|
repo_url = 'https://github.com/jkuhlmann/cgltf.git',
|
||||||
|
tag_pattern = re.compile(r'^v([0-9]+)\.([0-9]+)$'),
|
||||||
|
tag_fn = lambda version: f'v{version[0]}.{version[1]}',
|
||||||
|
cook_fn = _git_cook
|
||||||
|
)
|
||||||
|
39
recipes/curl/recipe.py
Normal file
39
recipes/curl/recipe.py
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
|
||||||
|
import re
|
||||||
|
from SCons.Script import *
|
||||||
|
|
||||||
|
def _build_lib_name(env: Environment) -> str:
|
||||||
|
if os.name == 'posix':
|
||||||
|
return {
|
||||||
|
'debug': 'curl-d'
|
||||||
|
}.get(env['BUILD_TYPE'], 'curl')
|
||||||
|
elif os.name == 'nt':
|
||||||
|
raise Exception('TODO')
|
||||||
|
else:
|
||||||
|
raise Exception('curl is not supported yet on this OS')
|
||||||
|
|
||||||
|
def _git_cook(env: Environment, repo: dict) -> dict:
|
||||||
|
checkout_root = repo['checkout_root']
|
||||||
|
build_result = env.CMakeProject(checkout_root, generate_args=['-DBUILD_CURL_EXE=OFF','-DBUILD_SHARED_LIBS=OFF',
|
||||||
|
'-DBUILD_STATIC_LIBS=ON', '-DHTTP_ONLY=ON',
|
||||||
|
'-DCURL_USE_LIBSSH2=OFF'])
|
||||||
|
lib_name = _build_lib_name(env)
|
||||||
|
return {
|
||||||
|
'CPPPATH': build_result['CPPPATH'],
|
||||||
|
'LIBS': [env.FindLib(lib_name, paths=build_result['LIBPATH'])],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
env.GitRecipe(
|
||||||
|
globals = globals(),
|
||||||
|
repo_name = 'curl',
|
||||||
|
repo_url = 'https://github.com/curl/curl.git',
|
||||||
|
tag_pattern = re.compile(r'^curl-([0-9]+)_([0-9]+)_([0-9]+)$'),
|
||||||
|
tag_fn = lambda version: f'curl-{version[0]}_{version[1]}_{version[2]}',
|
||||||
|
cook_fn = _git_cook,
|
||||||
|
dependencies = {
|
||||||
|
'openssl': {},
|
||||||
|
'zlib': {},
|
||||||
|
'psl': {}
|
||||||
|
}
|
||||||
|
)
|
@ -1,16 +1,27 @@
|
|||||||
|
|
||||||
|
|
||||||
|
import re
|
||||||
from SCons.Script import *
|
from SCons.Script import *
|
||||||
|
|
||||||
def cook(env: Environment, git_ref: str = 'master') -> dict:
|
|
||||||
repo = env.GitBranch(repo_name = 'fmt', remote_url = 'https://github.com/fmtlib/fmt.git', git_ref = git_ref)
|
def _git_cook(env: Environment, repo: dict) -> dict:
|
||||||
checkout_root = repo['checkout_root']
|
checkout_root = repo['checkout_root']
|
||||||
build_result = env.CMakeProject(project_root=checkout_root, generate_args = ['-DFMT_TEST=OFF'])
|
build_result = env.CMakeProject(checkout_root)
|
||||||
|
|
||||||
lib_name = {
|
lib_name = {
|
||||||
'debug': 'fmtd'
|
'debug': 'fmtd'
|
||||||
}.get(env['BUILD_TYPE'], 'fmt')
|
}.get(env['BUILD_TYPE'], 'fmt')
|
||||||
return {
|
return {
|
||||||
'LIBPATH': build_result['LIBPATH'],
|
|
||||||
'CPPPATH': build_result['CPPPATH'],
|
'CPPPATH': build_result['CPPPATH'],
|
||||||
'LIBS': [lib_name]
|
'LIBS': [env.FindLib(lib_name, paths=build_result['LIBPATH'])]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
env.GitRecipe(
|
||||||
|
globals = globals(),
|
||||||
|
repo_name = 'fmt',
|
||||||
|
repo_url = 'https://github.com/fmtlib/fmt.git',
|
||||||
|
tag_pattern = re.compile(r'^([0-9]+)\.([0-9]+)\.([0-9]+)$'),
|
||||||
|
tag_fn = lambda version: f'{version[0]}.{version[1]}.{version[2]}',
|
||||||
|
cook_fn = _git_cook
|
||||||
|
)
|
||||||
|
@ -1,13 +1,52 @@
|
|||||||
|
|
||||||
|
import re
|
||||||
from SCons.Script import *
|
from SCons.Script import *
|
||||||
|
|
||||||
def cook(env: Environment, remote: str = 'github', git_ref: str = "master") -> dict:
|
_REPO_NAMES = {
|
||||||
if remote == 'mewin':
|
'default': 'glm',
|
||||||
repo = env.GitBranch(repo_name = 'glm_mewin', remote_url = 'https://git.mewin.de/mewin/glm.git', git_ref = git_ref)
|
'mewin': 'glm_mewin'
|
||||||
else:
|
}
|
||||||
repo = env.GitBranch(repo_name = 'glm', remote_url = 'https://github.com/g-truc/glm.git', git_ref = git_ref)
|
_REPO_URLS = {
|
||||||
checkout_root = repo['checkout_root']
|
'default': 'https://github.com/g-truc/glm.git',
|
||||||
|
'mewin': 'https://git.mewin.de/mewin/glm.git'
|
||||||
|
}
|
||||||
|
_TAG_PATTERN = re.compile(r'^([0-9]+)\.([0-9]+)\.([0-9]+)$')
|
||||||
|
_TAG_PATTERN_ALT = re.compile(r'^0\.([0-9]+)\.([0-9]+)\.([0-9]+)$')
|
||||||
|
|
||||||
|
def _get_repo_name(env: Environment) -> str:
|
||||||
|
return _REPO_NAMES[env.get('GLM_REMOTE', 'default')]
|
||||||
|
|
||||||
|
def _get_repo_url(env: Environment) -> str:
|
||||||
|
return _REPO_URLS[env.get('GLM_REMOTE', 'default')]
|
||||||
|
|
||||||
|
def versions(env: Environment, update: bool = False):
|
||||||
|
if env.get('GLM_REMOTE') == 'mewin':
|
||||||
|
return [(0, 0, 0)]
|
||||||
|
|
||||||
|
tags = env.GitTags(repo_name = _get_repo_name(env), remote_url = _get_repo_url(env), force_fetch=update)
|
||||||
|
result = []
|
||||||
|
for tag in tags:
|
||||||
|
match = _TAG_PATTERN.match(tag)
|
||||||
|
if match:
|
||||||
|
result.append((int(match.groups()[0]), int(match.groups()[1]), int(match.groups()[2])))
|
||||||
|
else:
|
||||||
|
match = _TAG_PATTERN_ALT.match(tag)
|
||||||
|
if match:
|
||||||
|
result.append((0, int(match.groups()[0]), int(match.groups()[1]) * 10 + int(match.groups()[2])))
|
||||||
|
return result
|
||||||
|
|
||||||
|
def dependencies(env: Environment, version) -> 'dict':
|
||||||
|
return {}
|
||||||
|
|
||||||
|
def cook(env: Environment, version) -> dict:
|
||||||
|
if env.get('GLM_REMOTE') == 'mewin':
|
||||||
|
git_ref = 'master'
|
||||||
|
elif version[0] == 0:
|
||||||
|
git_ref = f'refs/tags/0.{version[1]}.{int(version[2]/10)}.{version[2]%10}'
|
||||||
|
else:
|
||||||
|
git_ref = f'refs/tags/{version[0]}.{version[1]}.{version[2]}'
|
||||||
|
repo = env.GitBranch(repo_name = _get_repo_name(env), remote_url = _get_repo_url(env), git_ref = git_ref)
|
||||||
|
checkout_root = repo['checkout_root']
|
||||||
return {
|
return {
|
||||||
'CPPPATH': [checkout_root],
|
'CPPPATH': [checkout_root],
|
||||||
}
|
}
|
||||||
|
@ -1,20 +1,15 @@
|
|||||||
|
|
||||||
from SCons.Script import *
|
|
||||||
|
|
||||||
import glob
|
import glob
|
||||||
import os
|
|
||||||
import pathlib
|
import pathlib
|
||||||
import platform
|
import platform
|
||||||
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
import sys
|
from SCons.Script import *
|
||||||
|
|
||||||
_SCRIPT_STAMPFILE = '.spp_script_run'
|
_SCRIPT_STAMPFILE = '.spp_script_run'
|
||||||
|
|
||||||
def cook(env: Environment, remote: str = 'github', git_ref: str = '') -> dict:
|
|
||||||
if remote == 'mewin':
|
def _git_cook(env: Environment, repo) -> dict:
|
||||||
repo = env.GitBranch(repo_name = 'glslang_mewin', remote_url = 'https://git.mewin.de/mewin/glslang.git', git_ref = git_ref or 'master')
|
|
||||||
else:
|
|
||||||
repo = env.GitBranch(repo_name = 'glslang', remote_url = 'https://github.com/KhronosGroup/glslang.git', git_ref = git_ref or 'main')
|
|
||||||
checkout_root = repo['checkout_root']
|
checkout_root = repo['checkout_root']
|
||||||
|
|
||||||
# TODO: windows?
|
# TODO: windows?
|
||||||
@ -51,10 +46,11 @@ def cook(env: Environment, remote: str = 'github', git_ref: str = '') -> dict:
|
|||||||
+ env.RGlob(os.path.join(repo['checkout_root'], 'SPIRV/'), '*.cpp') \
|
+ env.RGlob(os.path.join(repo['checkout_root'], 'SPIRV/'), '*.cpp') \
|
||||||
+ [os.path.join(repo['checkout_root'], f'glslang/OSDependent/{platform_source_dir}/ossource.cpp')]
|
+ [os.path.join(repo['checkout_root'], f'glslang/OSDependent/{platform_source_dir}/ossource.cpp')]
|
||||||
|
|
||||||
# disable a few warnings when compiling with clang
|
# disable warnings
|
||||||
additional_cxx_flags = {
|
additional_cxx_flags = {
|
||||||
'clang': ['-Wno-deprecated-copy', '-Wno-missing-field-initializers', '-Wno-gnu-redeclared-enum',
|
'clang': ['-w'],
|
||||||
'-Wno-unused-but-set-variable', '-Wno-deprecated-enum-enum-conversion']
|
'gcc': ['-w'],
|
||||||
|
'cl': ['/w']
|
||||||
}.get(env['COMPILER_FAMILY'], [])
|
}.get(env['COMPILER_FAMILY'], [])
|
||||||
env.StaticLibrary(
|
env.StaticLibrary(
|
||||||
CCFLAGS = env['CCFLAGS'] + additional_cxx_flags,
|
CCFLAGS = env['CCFLAGS'] + additional_cxx_flags,
|
||||||
@ -79,5 +75,37 @@ def cook(env: Environment, remote: str = 'github', git_ref: str = '') -> dict:
|
|||||||
|
|
||||||
return {
|
return {
|
||||||
'CPPPATH': [include_dir],
|
'CPPPATH': [include_dir],
|
||||||
'LIBS': ['glslang_full']
|
'LIBS': [os.path.join(env['LIB_DIR'], env.LibFilename('glslang_full'))]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
_REPO_NAMES = {
|
||||||
|
'default': 'glslang',
|
||||||
|
'mewin': 'glslang_mewin'
|
||||||
|
}
|
||||||
|
_REPO_URLS = {
|
||||||
|
'default': 'https://github.com/KhronosGroup/glslang.git',
|
||||||
|
'mewin': 'https://git.mewin.de/mewin/glslang.git'
|
||||||
|
}
|
||||||
|
_TAG_PATTERNS = {
|
||||||
|
'default': re.compile(r'^([0-9]+)\.([0-9]+)\.([0-9]+)$'),
|
||||||
|
'mewin': None
|
||||||
|
}
|
||||||
|
def _ref_fn(env: Environment, version) -> str:
|
||||||
|
remote = env.get('GLSLANG_REMOTE', 'default')
|
||||||
|
if remote == 'default':
|
||||||
|
return f'refs/tags/{version[0]}.{version[1]}.{version[2]}'
|
||||||
|
elif remote == 'mewin':
|
||||||
|
return 'master'
|
||||||
|
else:
|
||||||
|
raise Exception('invalid glslang remote')
|
||||||
|
|
||||||
|
env.GitRecipe(
|
||||||
|
globals = globals(),
|
||||||
|
repo_name = lambda env: _REPO_NAMES[env.get('GLSLANG_REMOTE', 'default')],
|
||||||
|
repo_url = lambda env: _REPO_URLS[env.get('GLSLANG_REMOTE', 'default')],
|
||||||
|
tag_pattern = lambda env: _TAG_PATTERNS[env.get('GLSLANG_REMOTE', 'default')],
|
||||||
|
cook_fn = _git_cook,
|
||||||
|
ref_fn = _ref_fn
|
||||||
|
)
|
||||||
|
48
recipes/idn2/recipe.py
Normal file
48
recipes/idn2/recipe.py
Normal file
@ -0,0 +1,48 @@
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import requests
|
||||||
|
from SCons.Script import *
|
||||||
|
|
||||||
|
_VERSIONS_URL = 'https://gitlab.com/api/v4/projects/2882658/releases'
|
||||||
|
_VERSION_PATTERN = re.compile(r'^([0-9]+)\.([0-9]+)\.([0-9]+)$')
|
||||||
|
|
||||||
|
def versions(env: Environment, update: bool = False):
|
||||||
|
versions_file = os.path.join(env['DOWNLOAD_DIR'], 'libidn2_versions.json')
|
||||||
|
if update or not os.path.exists(versions_file):
|
||||||
|
req = requests.get(_VERSIONS_URL)
|
||||||
|
versions_data = json.loads(req.text)
|
||||||
|
result = []
|
||||||
|
for version_data in versions_data:
|
||||||
|
match = _VERSION_PATTERN.match(version_data['name'])
|
||||||
|
if not match:
|
||||||
|
continue
|
||||||
|
result.append((int(match.groups()[0]), int(match.groups()[1]), int(match.groups()[2])))
|
||||||
|
with open(versions_file, 'w') as f:
|
||||||
|
json.dump(result, f)
|
||||||
|
return result
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
with open(versions_file, 'r') as f:
|
||||||
|
return [tuple(v) for v in json.load(f)]
|
||||||
|
except:
|
||||||
|
print('libidn2_versions.json is empty or broken, redownloading.')
|
||||||
|
return versions(env, update=True)
|
||||||
|
|
||||||
|
def dependencies(env: Environment, version) -> 'dict':
|
||||||
|
return {
|
||||||
|
'unistring': {}
|
||||||
|
}
|
||||||
|
|
||||||
|
def cook(env: Environment, version) -> dict:
|
||||||
|
url = f'https://ftp.gnu.org/gnu/libidn/libidn2-{version[0]}.{version[1]}.{version[2]}.tar.gz'
|
||||||
|
repo = env.DownloadAndExtract(f'libidn2_{version[0]}.{version[1]}.{version[2]}', url = url, skip_folders = 1)
|
||||||
|
checkout_root = repo['extracted_root']
|
||||||
|
build_result = env.AutotoolsProject(checkout_root)
|
||||||
|
return {
|
||||||
|
'CPPPATH': build_result['CPPPATH'],
|
||||||
|
'LIBS': [env.FindLib('idn2', paths=build_result['LIBPATH'])]
|
||||||
|
}
|
@ -1,10 +1,9 @@
|
|||||||
|
|
||||||
|
|
||||||
|
import re
|
||||||
from SCons.Script import *
|
from SCons.Script import *
|
||||||
|
|
||||||
import os
|
def _git_cook(env: Environment, repo: dict) -> dict:
|
||||||
|
|
||||||
def cook(env: Environment, backends: list = [], git_ref: str = '') -> dict:
|
|
||||||
repo = env.GitBranch(repo_name = 'imgui', remote_url = 'https://github.com/ocornut/imgui.git', git_ref = git_ref or 'master')
|
|
||||||
|
|
||||||
imgui_source_files = [
|
imgui_source_files = [
|
||||||
os.path.join(repo['checkout_root'], 'imgui.cpp'),
|
os.path.join(repo['checkout_root'], 'imgui.cpp'),
|
||||||
os.path.join(repo['checkout_root'], 'imgui_draw.cpp'),
|
os.path.join(repo['checkout_root'], 'imgui_draw.cpp'),
|
||||||
@ -13,18 +12,27 @@ def cook(env: Environment, backends: list = [], git_ref: str = '') -> dict:
|
|||||||
]
|
]
|
||||||
|
|
||||||
imgui_add_sources = []
|
imgui_add_sources = []
|
||||||
backend_sources = {
|
for backend in env.get('IMGUI_BACKENDS', []):
|
||||||
'vulkan': os.path.join(repo['checkout_root'], 'backends/imgui_impl_vulkan.cpp'),
|
imgui_add_sources.append(f'backends/imgui_impl_{backend}.cpp')
|
||||||
'sdl2': os.path.join(repo['checkout_root'], 'backends/imgui_impl_sdl2.cpp')
|
|
||||||
}
|
|
||||||
for backend in backends:
|
|
||||||
imgui_add_sources.append(backend_sources[backend])
|
|
||||||
|
|
||||||
lib_imgui = env.StaticLibrary(
|
env.StaticLibrary(
|
||||||
CPPPATH = [repo['checkout_root']],
|
CPPPATH = [repo['checkout_root']],
|
||||||
CPPDEFINES = ['IMGUI_IMPL_VULKAN_NO_PROTOTYPES=1'],
|
CPPDEFINES = ['IMGUI_IMPL_VULKAN_NO_PROTOTYPES=1'],
|
||||||
target = env['LIB_DIR'] + '/imgui',
|
target = env['LIB_DIR'] + '/imgui',
|
||||||
source = imgui_source_files,
|
source = imgui_source_files,
|
||||||
add_source = imgui_add_sources
|
add_source = imgui_add_sources
|
||||||
)
|
)
|
||||||
return lib_imgui
|
return {
|
||||||
|
'CPPPATH': [repo['checkout_root']],
|
||||||
|
'LIBS': [os.path.join(env['LIB_DIR'], env.LibFilename('imgui'))]
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
env.GitRecipe(
|
||||||
|
globals = globals(),
|
||||||
|
repo_name = 'imgui',
|
||||||
|
repo_url = 'https://github.com/ocornut/imgui.git',
|
||||||
|
tag_pattern = re.compile(r'^v([0-9]+)\.([0-9]+)\.([0-9]+)$'),
|
||||||
|
tag_fn = lambda version: f'v{version[0]}.{version[1]}.{version[2]}',
|
||||||
|
cook_fn = _git_cook
|
||||||
|
)
|
||||||
|
@ -1,8 +1,20 @@
|
|||||||
|
|
||||||
import os
|
import json
|
||||||
from SCons.Script import *
|
from SCons.Script import *
|
||||||
|
|
||||||
def cook(env: Environment, git_ref: str = "master") -> dict:
|
_REPO_NAME = 'iwa'
|
||||||
repo = env.GitBranch(repo_name = 'iwa', remote_url = 'https://git.mewin.de/mewin/iwa.git', git_ref = git_ref)
|
_REPO_URL = 'https://git.mewin.de/mewin/iwa.git'
|
||||||
|
|
||||||
|
def versions(env: Environment, update: bool = False):
|
||||||
|
return [(0, 0, 0)]
|
||||||
|
|
||||||
|
def dependencies(env: Environment, version) -> 'dict':
|
||||||
|
repo = env.GitBranch(repo_name = _REPO_NAME, remote_url = _REPO_URL, git_ref = 'master')
|
||||||
checkout_root = repo['checkout_root']
|
checkout_root = repo['checkout_root']
|
||||||
return SConscript(os.path.join(checkout_root, 'LibConf'), exports = ['env'])
|
with open(os.path.join(checkout_root, 'dependencies.json'), 'r') as f:
|
||||||
|
return env.DepsFromJson(json.load(f))
|
||||||
|
|
||||||
|
def cook(env: Environment, version) -> dict:
|
||||||
|
repo = env.GitBranch(repo_name = _REPO_NAME, remote_url = _REPO_URL, git_ref = 'master')
|
||||||
|
checkout_root = repo['checkout_root']
|
||||||
|
return env.Module(os.path.join(checkout_root, 'SModule'))
|
||||||
|
22
recipes/json/recipe.py
Normal file
22
recipes/json/recipe.py
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
|
||||||
|
|
||||||
|
import re
|
||||||
|
from SCons.Script import *
|
||||||
|
|
||||||
|
|
||||||
|
def _git_cook(env: Environment, repo: dict) -> dict:
|
||||||
|
checkout_root = repo['checkout_root']
|
||||||
|
build_result = env.CMakeProject(project_root=checkout_root)
|
||||||
|
return {
|
||||||
|
'CPPPATH': build_result['CPPPATH']
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
env.GitRecipe(
|
||||||
|
globals = globals(),
|
||||||
|
repo_name = 'json',
|
||||||
|
repo_url = 'https://github.com/nlohmann/json.git',
|
||||||
|
tag_pattern = re.compile(r'^v([0-9]+)\.([0-9]+)\.([0-9]+)$'),
|
||||||
|
tag_fn = lambda version: f'v{version[0]}.{version[1]}.{version[2]}',
|
||||||
|
cook_fn = _git_cook
|
||||||
|
)
|
@ -1,10 +1,17 @@
|
|||||||
|
|
||||||
|
|
||||||
from SCons.Script import *
|
from SCons.Script import *
|
||||||
|
|
||||||
def cook(env: Environment, git_ref = 'master') -> dict:
|
def versions(env: Environment, update: bool = False):
|
||||||
|
return [(1, 0)]
|
||||||
|
|
||||||
|
def dependencies(env: Environment, version) -> 'dict':
|
||||||
|
return {}
|
||||||
|
|
||||||
|
def cook(env: Environment, version) -> dict:
|
||||||
if env['COMPILER_FAMILY'] not in ('gcc', 'clang'):
|
if env['COMPILER_FAMILY'] not in ('gcc', 'clang'):
|
||||||
env.Error('libbacktrace requires gcc or clang.')
|
env.Error('libbacktrace requires gcc or clang.')
|
||||||
repo = env.GitBranch(repo_name = 'libbacktrace', remote_url = 'https://github.com/ianlancetaylor/libbacktrace.git', git_ref = git_ref)
|
repo = env.GitBranch(repo_name = 'libbacktrace', remote_url = 'https://github.com/ianlancetaylor/libbacktrace.git', git_ref = 'master')
|
||||||
checkout_root = repo['checkout_root']
|
checkout_root = repo['checkout_root']
|
||||||
build_result = env.AutotoolsProject(checkout_root)
|
build_result = env.AutotoolsProject(checkout_root)
|
||||||
return {
|
return {
|
||||||
|
@ -1,11 +1,21 @@
|
|||||||
|
|
||||||
|
import re
|
||||||
from SCons.Script import *
|
from SCons.Script import *
|
||||||
|
|
||||||
def cook(env: Environment, git_ref = 'main') -> dict:
|
def _git_cook(env: Environment, repo: dict) -> dict:
|
||||||
repo = env.GitBranch(repo_name = 'libjpeg-turbo', remote_url = 'https://github.com/libjpeg-turbo/libjpeg-turbo.git', git_ref = git_ref)
|
|
||||||
checkout_root = repo['checkout_root']
|
checkout_root = repo['checkout_root']
|
||||||
build_result = env.CMakeProject(checkout_root)
|
build_result = env.CMakeProject(checkout_root)
|
||||||
return {
|
return {
|
||||||
'CPPPATH': build_result['CPPPATH'],
|
'CPPPATH': build_result['CPPPATH'],
|
||||||
'LIBS': [env.FindLib('jpeg', paths=build_result['LIBPATH'])],
|
'LIBS': [env.FindLib('jpeg', paths=build_result['LIBPATH'])],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
env.GitRecipe(
|
||||||
|
globals = globals(),
|
||||||
|
repo_name = 'libjpeg-turbo',
|
||||||
|
repo_url = 'https://github.com/libjpeg-turbo/libjpeg-turbo.git',
|
||||||
|
tag_pattern = re.compile(r'^([0-9]+)\.([0-9]+)\.([0-9]+)$'),
|
||||||
|
tag_fn = lambda version: f'{version[0]}.{version[1]}.{version[2]}',
|
||||||
|
cook_fn = _git_cook
|
||||||
|
)
|
||||||
|
@ -1,13 +1,39 @@
|
|||||||
|
|
||||||
|
import os
|
||||||
|
import re
|
||||||
from SCons.Script import *
|
from SCons.Script import *
|
||||||
|
|
||||||
def cook(env: Environment, git_ref = 'master') -> dict:
|
def _build_lib_name(env: Environment) -> str:
|
||||||
lib_z = env.Cook('zlib')
|
if os.name == 'posix':
|
||||||
repo = env.GitBranch(repo_name = 'libpng', remote_url = 'https://git.code.sf.net/p/libpng/code.git', git_ref = git_ref)
|
return {
|
||||||
|
'debug': 'png16d'
|
||||||
|
}.get(env['BUILD_TYPE'], 'png16')
|
||||||
|
elif os.name == 'nt':
|
||||||
|
return {
|
||||||
|
'debug': 'libpng16_staticd'
|
||||||
|
}.get(env['BUILD_TYPE'], 'libpng16_static')
|
||||||
|
else:
|
||||||
|
raise Exception('libpng is not supported yet on this OS')
|
||||||
|
|
||||||
|
def _git_cook(env: Environment, repo: dict) -> dict:
|
||||||
|
lib_zlib = env.Cook('zlib')
|
||||||
checkout_root = repo['checkout_root']
|
checkout_root = repo['checkout_root']
|
||||||
build_result = env.AutotoolsProject(checkout_root)
|
build_result = env.CMakeProject(checkout_root, dependencies = [lib_zlib])
|
||||||
|
lib_name = _build_lib_name(env)
|
||||||
return {
|
return {
|
||||||
'CPPPATH': build_result['CPPPATH'],
|
'CPPPATH': build_result['CPPPATH'],
|
||||||
'LIBS': [env.FindLib('png16', paths=build_result['LIBPATH'])],
|
'LIBS': [env.FindLib(lib_name, paths=build_result['LIBPATH'])]
|
||||||
'DEPENDENCIES': [lib_z]
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
env.GitRecipe(
|
||||||
|
globals = globals(),
|
||||||
|
repo_name = 'libpng',
|
||||||
|
repo_url = 'https://git.code.sf.net/p/libpng/code.git',
|
||||||
|
tag_pattern = re.compile(r'^v([0-9]+)\.([0-9]+)\.([0-9]+)$'),
|
||||||
|
tag_fn = lambda version: f'v{version[0]}.{version[1]}.{version[2]}',
|
||||||
|
cook_fn = _git_cook,
|
||||||
|
dependencies = {
|
||||||
|
'zlib': {}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
@ -1,10 +1,19 @@
|
|||||||
|
|
||||||
import os
|
import re
|
||||||
from SCons.Script import *
|
from SCons.Script import *
|
||||||
|
|
||||||
def cook(env: Environment, git_ref: str = "master") -> dict:
|
def _git_cook(env: Environment, repo: dict) -> dict:
|
||||||
repo = env.GitBranch(repo_name = 'magic_enum', remote_url = 'https://github.com/Neargye/magic_enum.git', git_ref = git_ref)
|
|
||||||
checkout_root = repo['checkout_root']
|
checkout_root = repo['checkout_root']
|
||||||
return {
|
return {
|
||||||
'CPPPATH': [os.path.join(checkout_root, 'include')]
|
'CPPPATH': [os.path.join(checkout_root, 'include')]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
env.GitRecipe(
|
||||||
|
globals = globals(),
|
||||||
|
repo_name = 'magic_enum',
|
||||||
|
repo_url = 'https://github.com/Neargye/magic_enum.git',
|
||||||
|
tag_pattern = re.compile(r'^v([0-9]+)\.([0-9]+)\.([0-9]+)$'),
|
||||||
|
tag_fn = lambda version: f'v{version[0]}.{version[1]}.{version[2]}',
|
||||||
|
cook_fn = _git_cook
|
||||||
|
)
|
||||||
|
@ -3,8 +3,15 @@ from SCons.Script import *
|
|||||||
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
def cook(env: Environment, git_ref = 'master') -> dict:
|
|
||||||
repo = env.GitBranch(repo_name = 'mecab', remote_url = 'https://github.com/taku910/mecab.git', git_ref = git_ref)
|
def versions(env: Environment, update: bool = False):
|
||||||
|
return [(1, 0)]
|
||||||
|
|
||||||
|
def dependencies(env: Environment, version) -> 'dict':
|
||||||
|
return {}
|
||||||
|
|
||||||
|
def cook(env: Environment, version) -> dict:
|
||||||
|
repo = env.GitBranch(repo_name = 'mecab', remote_url = 'https://github.com/taku910/mecab.git', git_ref = 'master')
|
||||||
checkout_root = repo['checkout_root']
|
checkout_root = repo['checkout_root']
|
||||||
build_result = env.AutotoolsProject(os.path.join(checkout_root, 'mecab'))
|
build_result = env.AutotoolsProject(os.path.join(checkout_root, 'mecab'))
|
||||||
return {
|
return {
|
||||||
|
@ -1,8 +1,20 @@
|
|||||||
|
|
||||||
import os
|
import json
|
||||||
from SCons.Script import *
|
from SCons.Script import *
|
||||||
|
|
||||||
def cook(env: Environment, git_ref: str = "master") -> dict:
|
_REPO_NAME = 'mijin'
|
||||||
repo = env.GitBranch(repo_name = 'mijin', remote_url = 'https://git.mewin.de/mewin/mijin2.git', git_ref = git_ref)
|
_REPO_URL = 'https://git.mewin.de/mewin/mijin2.git'
|
||||||
|
|
||||||
|
def versions(env: Environment, update: bool = False):
|
||||||
|
return [(0, 0, 0)]
|
||||||
|
|
||||||
|
def dependencies(env: Environment, version) -> 'dict':
|
||||||
|
repo = env.GitBranch(repo_name = _REPO_NAME, remote_url = _REPO_URL, git_ref = 'master')
|
||||||
checkout_root = repo['checkout_root']
|
checkout_root = repo['checkout_root']
|
||||||
return SConscript(os.path.join(checkout_root, 'LibConf'), exports = ['env'])
|
with open(os.path.join(checkout_root, 'dependencies.json'), 'r') as f:
|
||||||
|
return env.DepsFromJson(json.load(f))
|
||||||
|
|
||||||
|
def cook(env: Environment, version) -> dict:
|
||||||
|
repo = env.GitBranch(repo_name = _REPO_NAME, remote_url = _REPO_URL, git_ref = 'master')
|
||||||
|
checkout_root = repo['checkout_root']
|
||||||
|
return env.Module(os.path.join(checkout_root, 'SModule'))
|
||||||
|
@ -1,12 +1,27 @@
|
|||||||
|
|
||||||
import os
|
|
||||||
from SCons.Script import *
|
from SCons.Script import *
|
||||||
|
|
||||||
def cook(env: Environment, git_ref: str = 'master') -> dict:
|
def versions(env: Environment, update: bool = False):
|
||||||
repo = env.GitBranch(repo_name = 'mikktspace', remote_url = 'https://github.com/mmikk/MikkTSpace.git', git_ref = git_ref)
|
return [(1, 0)]
|
||||||
|
|
||||||
|
def dependencies(env: Environment, version) -> 'dict':
|
||||||
|
return {}
|
||||||
|
|
||||||
|
def cook(env: Environment, version) -> dict:
|
||||||
|
repo = env.GitBranch(repo_name = 'mikktspace', remote_url = 'https://github.com/mmikk/MikkTSpace.git', git_ref = 'master')
|
||||||
|
checkout_root = repo['checkout_root']
|
||||||
|
ccflags = env['CCFLAGS'].copy()
|
||||||
|
if env['COMPILER_FAMILY'] == 'cl':
|
||||||
|
ccflags.append('/wd4456')
|
||||||
lib_mikktspace = env.StaticLibrary(
|
lib_mikktspace = env.StaticLibrary(
|
||||||
CPPPATH = [repo['checkout_root']],
|
CCFLAGS = ccflags,
|
||||||
|
CPPPATH = [checkout_root],
|
||||||
target = env['LIB_DIR'] + '/mikktspace',
|
target = env['LIB_DIR'] + '/mikktspace',
|
||||||
source = [os.path.join(repo['checkout_root'], 'mikktspace.c')]
|
source = [os.path.join(repo['checkout_root'], 'mikktspace.c')]
|
||||||
)
|
)
|
||||||
return lib_mikktspace
|
return {
|
||||||
|
'CPPPATH': [checkout_root],
|
||||||
|
'LIBS': [lib_mikktspace]
|
||||||
|
}
|
||||||
|
|
||||||
|
21
recipes/openssl/recipe.py
Normal file
21
recipes/openssl/recipe.py
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
|
||||||
|
|
||||||
|
import re
|
||||||
|
from SCons.Script import *
|
||||||
|
|
||||||
|
def _git_cook(env: Environment, repo: dict) -> dict:
|
||||||
|
checkout_root = repo['checkout_root']
|
||||||
|
build_result = env.AutotoolsProject(checkout_root, config_args = ['no-shared', 'no-tests', 'no-docs'], configure_script_path='Configure')
|
||||||
|
return {
|
||||||
|
'CPPPATH': build_result['CPPPATH'],
|
||||||
|
'LIBS': [env.FindLib(libname, paths=build_result['LIBPATH']) for libname in ('ssl', 'crypto')]
|
||||||
|
}
|
||||||
|
|
||||||
|
env.GitRecipe(
|
||||||
|
globals = globals(),
|
||||||
|
repo_name = 'openssl',
|
||||||
|
repo_url = 'https://github.com/openssl/openssl.git',
|
||||||
|
tag_pattern = re.compile(r'^openssl-([0-9]+)\.([0-9]+)\.([0-9]+)$'),
|
||||||
|
tag_fn = lambda version: f'openssl-{version[0]}.{version[1]}.{version[2]}',
|
||||||
|
cook_fn = _git_cook
|
||||||
|
)
|
70
recipes/psl/recipe.py
Normal file
70
recipes/psl/recipe.py
Normal file
@ -0,0 +1,70 @@
|
|||||||
|
|
||||||
|
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import requests
|
||||||
|
from SCons.Script import *
|
||||||
|
|
||||||
|
_VERSIONS_URL = 'https://api.github.com/repos/rockdaboot/libpsl/releases'
|
||||||
|
_VERSION_PATTERN = re.compile(r'^Release v([0-9]+)\.([0-9]+)\.([0-9]+)$')
|
||||||
|
|
||||||
|
def versions(env: Environment, update: bool = False):
|
||||||
|
versions_file = os.path.join(env['DOWNLOAD_DIR'], 'libpsl_versions.json')
|
||||||
|
if update or not os.path.exists(versions_file):
|
||||||
|
req = requests.get(_VERSIONS_URL)
|
||||||
|
versions_data = json.loads(req.text)
|
||||||
|
result = []
|
||||||
|
for version_data in versions_data:
|
||||||
|
match = _VERSION_PATTERN.match(version_data['name'])
|
||||||
|
if not match:
|
||||||
|
continue
|
||||||
|
result.append((int(match.groups()[0]), int(match.groups()[1]), int(match.groups()[2])))
|
||||||
|
with open(versions_file, 'w') as f:
|
||||||
|
json.dump(result, f)
|
||||||
|
return result
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
with open(versions_file, 'r') as f:
|
||||||
|
return [tuple(v) for v in json.load(f)]
|
||||||
|
except:
|
||||||
|
print('libpsl_versions.json is empty or broken, redownloading.')
|
||||||
|
return versions(env, update=True)
|
||||||
|
|
||||||
|
def dependencies(env: Environment, version) -> 'dict':
|
||||||
|
return {
|
||||||
|
'idn2': {},
|
||||||
|
'unistring': {}
|
||||||
|
}
|
||||||
|
|
||||||
|
def cook(env: Environment, version) -> dict:
|
||||||
|
url = f'https://github.com/rockdaboot/libpsl/releases/download/{version[0]}.{version[1]}.{version[2]}/libpsl-{version[0]}.{version[1]}.{version[2]}.tar.gz'
|
||||||
|
repo = env.DownloadAndExtract(f'libpsl_{version[0]}.{version[1]}.{version[2]}', url = url, skip_folders = 1)
|
||||||
|
checkout_root = repo['extracted_root']
|
||||||
|
build_result = env.AutotoolsProject(checkout_root)
|
||||||
|
return {
|
||||||
|
'CPPPATH': build_result['CPPPATH'],
|
||||||
|
'LIBS': [env.FindLib('psl', paths=build_result['LIBPATH'])]
|
||||||
|
}
|
||||||
|
|
||||||
|
#def _git_cook(env: Environment, repo: dict) -> dict:
|
||||||
|
# checkout_root = repo['checkout_root']
|
||||||
|
# subprocess.run((os.path.join(checkout_root, 'autogen.sh'),), cwd=checkout_root)
|
||||||
|
# build_result = env.AutotoolsProject(checkout_root)
|
||||||
|
# return {
|
||||||
|
# 'CPPPATH': build_result['CPPPATH'],
|
||||||
|
# 'LIBS': [env.FindLib('psl', paths=build_result['LIBPATH'])]
|
||||||
|
# }
|
||||||
|
#
|
||||||
|
#env.GitRecipe(
|
||||||
|
# globals = globals(),
|
||||||
|
# repo_name = 'psl',
|
||||||
|
# repo_url = 'https://github.com/rockdaboot/libpsl.git',
|
||||||
|
# tag_pattern = re.compile(r'^libpsl-([0-9]+)\.([0-9]+)\.([0-9]+)$'),
|
||||||
|
# tag_fn = lambda version: f'libpsl-{version[0]}.{version[1]}.{version[2]}',
|
||||||
|
# cook_fn = _git_cook,
|
||||||
|
# dependencies = {
|
||||||
|
# 'idn2': {},
|
||||||
|
# 'unistring': {}
|
||||||
|
# }
|
||||||
|
#)
|
@ -1,22 +1,34 @@
|
|||||||
|
|
||||||
|
import re
|
||||||
from SCons.Script import *
|
from SCons.Script import *
|
||||||
|
|
||||||
def cook(env: Environment, git_ref: str = 'v1.x', use_external_libfmt = False) -> dict:
|
|
||||||
repo = env.GitBranch(repo_name = 'spdlog', remote_url = 'https://github.com/gabime/spdlog.git', git_ref = git_ref)
|
def _git_cook(env: Environment, repo: dict) -> dict:
|
||||||
|
lib_fmt = env.Cook('fmt')
|
||||||
checkout_root = repo['checkout_root']
|
checkout_root = repo['checkout_root']
|
||||||
build_result = env.CMakeProject(project_root=checkout_root)
|
build_result = env.CMakeProject(project_root=checkout_root, dependencies=[lib_fmt])
|
||||||
|
|
||||||
lib_name = {
|
lib_name = {
|
||||||
'debug': 'spdlogd'
|
'debug': 'spdlogd'
|
||||||
}.get(env['BUILD_TYPE'], 'spdlog')
|
}.get(env['BUILD_TYPE'], 'spdlog')
|
||||||
|
|
||||||
cppdefines = ['SPDLOG_COMPILE_LIB=1']
|
cppdefines = ['SPDLOG_COMPILE_LIB=1', 'SPDLOG_FMT_EXTERNAL=1']
|
||||||
if use_external_libfmt:
|
|
||||||
cppdefines.append('SPDLOG_FMT_EXTERNAL=1')
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
'LIBPATH': build_result['LIBPATH'],
|
|
||||||
'CPPPATH': build_result['CPPPATH'],
|
'CPPPATH': build_result['CPPPATH'],
|
||||||
'CPPDEFINES': cppdefines,
|
'CPPDEFINES': cppdefines,
|
||||||
'LIBS': [lib_name]
|
'LIBS': [env.FindLib(lib_name, paths=build_result['LIBPATH'])]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
env.GitRecipe(
|
||||||
|
globals = globals(),
|
||||||
|
repo_name = 'spdlog',
|
||||||
|
repo_url = 'https://github.com/gabime/spdlog.git',
|
||||||
|
tag_pattern = re.compile(r'^v([0-9]+)\.([0-9]+)\.([0-9]+)$'),
|
||||||
|
tag_fn = lambda version: f'v{version[0]}.{version[1]}.{version[2]}',
|
||||||
|
cook_fn = _git_cook,
|
||||||
|
dependencies = {
|
||||||
|
'fmt': {}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
@ -1,8 +1,18 @@
|
|||||||
|
|
||||||
from SCons.Script import *
|
from SCons.Script import *
|
||||||
|
|
||||||
def cook(env: Environment, git_ref: str = "master") -> dict:
|
_REPO_NAME = 'stb'
|
||||||
repo = env.GitBranch(repo_name = 'stb', remote_url = 'https://github.com/nothings/stb.git', git_ref = git_ref)
|
_REPO_URL = 'https://github.com/nothings/stb.git'
|
||||||
|
|
||||||
|
|
||||||
|
def versions(env: Environment, update: bool = False):
|
||||||
|
return [(0, 0, 0)]
|
||||||
|
|
||||||
|
def dependencies(env: Environment, version) -> 'dict':
|
||||||
|
return {}
|
||||||
|
|
||||||
|
def cook(env: Environment, version) -> dict:
|
||||||
|
repo = env.GitBranch(repo_name = _REPO_NAME, remote_url = _REPO_URL, git_ref = 'master')
|
||||||
checkout_root = repo['checkout_root']
|
checkout_root = repo['checkout_root']
|
||||||
return {
|
return {
|
||||||
'CPPPATH': [checkout_root]
|
'CPPPATH': [checkout_root]
|
||||||
|
42
recipes/unistring/recipe.py
Normal file
42
recipes/unistring/recipe.py
Normal file
@ -0,0 +1,42 @@
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import requests
|
||||||
|
from SCons.Script import *
|
||||||
|
|
||||||
|
_VERSIONS_URL = 'https://ftp.gnu.org/gnu/libunistring/?F=0'
|
||||||
|
_VERSION_PATTERN = re.compile(r'href="libunistring-([0-9]+)\.([0-9]+)\.([0-9]+)\.tar\.gz"')
|
||||||
|
|
||||||
|
def versions(env: Environment, update: bool = False):
|
||||||
|
versions_file = os.path.join(env['DOWNLOAD_DIR'], 'libunistring_versions.json')
|
||||||
|
if update or not os.path.exists(versions_file):
|
||||||
|
req = requests.get(_VERSIONS_URL)
|
||||||
|
result = []
|
||||||
|
for match in _VERSION_PATTERN.finditer(req.text):
|
||||||
|
result.append((int(match.groups()[0]), int(match.groups()[1]), int(match.groups()[2])))
|
||||||
|
with open(versions_file, 'w') as f:
|
||||||
|
json.dump(result, f)
|
||||||
|
return result
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
with open(versions_file, 'r') as f:
|
||||||
|
return [tuple(v) for v in json.load(f)]
|
||||||
|
except:
|
||||||
|
print('libunistring_versions.json is empty or broken, redownloading.')
|
||||||
|
return versions(env, update=True)
|
||||||
|
|
||||||
|
def dependencies(env: Environment, version) -> 'dict':
|
||||||
|
return {}
|
||||||
|
|
||||||
|
def cook(env: Environment, version) -> dict:
|
||||||
|
url = f'https://ftp.gnu.org/gnu/libunistring/libunistring-{version[0]}.{version[1]}.{version[2]}.tar.gz'
|
||||||
|
repo = env.DownloadAndExtract(f'libunistring_{version[0]}.{version[1]}.{version[2]}', url = url, skip_folders = 1)
|
||||||
|
checkout_root = repo['extracted_root']
|
||||||
|
build_result = env.AutotoolsProject(checkout_root)
|
||||||
|
return {
|
||||||
|
'CPPPATH': build_result['CPPPATH'],
|
||||||
|
'LIBS': [env.FindLib('unistring', paths=build_result['LIBPATH'])]
|
||||||
|
}
|
23
recipes/winsock2/recipe.py
Normal file
23
recipes/winsock2/recipe.py
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
|
||||||
|
|
||||||
|
import os
|
||||||
|
from SCons.Script import *
|
||||||
|
|
||||||
|
|
||||||
|
def available(env: Environment):
|
||||||
|
if os.name != 'nt':
|
||||||
|
return 'Winsock2 is only available on Windows.'
|
||||||
|
|
||||||
|
def versions(env: Environment, update: bool = False):
|
||||||
|
if os.name == 'nt':
|
||||||
|
return [(0, 0, 0)]
|
||||||
|
else:
|
||||||
|
return []
|
||||||
|
|
||||||
|
def dependencies(env: Environment, version) -> 'dict':
|
||||||
|
return {}
|
||||||
|
|
||||||
|
def cook(env: Environment, version) -> dict:
|
||||||
|
return {
|
||||||
|
'LIBS': ['Ws2_32']
|
||||||
|
}
|
@ -1,15 +1,26 @@
|
|||||||
|
|
||||||
|
|
||||||
|
import re
|
||||||
from SCons.Script import *
|
from SCons.Script import *
|
||||||
|
|
||||||
def cook(env: Environment, git_ref: str = "master") -> dict:
|
|
||||||
repo = env.GitBranch(repo_name = 'yaml-cpp', remote_url = 'https://github.com/jbeder/yaml-cpp', git_ref = git_ref)
|
def _git_cook(env: Environment, repo: dict) -> dict:
|
||||||
checkout_root = repo['checkout_root']
|
checkout_root = repo['checkout_root']
|
||||||
build_result = env.CMakeProject(project_root=checkout_root)
|
build_result = env.CMakeProject(project_root=checkout_root)
|
||||||
lib_name = {
|
lib_name = {
|
||||||
'debug': 'yaml-cppd'
|
'debug': 'yaml-cppd'
|
||||||
}.get(env['BUILD_TYPE'], 'yaml-cpp')
|
}.get(env['BUILD_TYPE'], 'yaml-cpp')
|
||||||
return {
|
return {
|
||||||
'LIBPATH': build_result['LIBPATH'],
|
|
||||||
'CPPPATH': build_result['CPPPATH'],
|
'CPPPATH': build_result['CPPPATH'],
|
||||||
'LIBS': [lib_name]
|
'LIBS': [env.FindLib(lib_name, paths=build_result['LIBPATH'])]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
env.GitRecipe(
|
||||||
|
globals = globals(),
|
||||||
|
repo_name = 'yaml-cpp',
|
||||||
|
repo_url = 'https://github.com/jbeder/yaml-cpp.git',
|
||||||
|
tag_pattern = re.compile(r'^yaml-cpp-([0-9]+)\.([0-9]+)\.([0-9]+)$'),
|
||||||
|
tag_fn = lambda version: f'yaml-cpp-{version[0]}.{version[1]}.{version[2]}',
|
||||||
|
cook_fn = _git_cook
|
||||||
|
)
|
||||||
|
@ -1,12 +1,49 @@
|
|||||||
|
|
||||||
import os
|
import os
|
||||||
|
import re
|
||||||
from SCons.Script import *
|
from SCons.Script import *
|
||||||
|
|
||||||
def cook(env: Environment, git_ref: str = 'master') -> dict:
|
_REPO_NAME = 'zlib'
|
||||||
repo = env.GitBranch(repo_name = 'zlib', remote_url = 'https://github.com/madler/zlib.git', git_ref = git_ref)
|
_REPO_URL = 'https://github.com/madler/zlib.git'
|
||||||
extracted_root = repo['checkout_root']
|
_TAG_PATTERN = re.compile(r'^v([0-9]+)\.([0-9]+)(?:\.([0-9]+))?$')
|
||||||
build_result = env.CMakeProject(project_root=extracted_root)
|
|
||||||
|
def _build_lib_name(env: Environment) -> str:
|
||||||
|
if os.name == 'posix':
|
||||||
|
return 'z'
|
||||||
|
elif os.name == 'nt':
|
||||||
return {
|
return {
|
||||||
'CPPPATH': [os.path.join(build_result['install_dir'], 'install')],
|
'debug': 'zlibstaticd'
|
||||||
'LIBS': [env.FindLib('z', paths=build_result['LIBPATH'])]
|
}.get(env['BUILD_TYPE'], 'zlibstatic')
|
||||||
|
else:
|
||||||
|
raise Exception('libpng is not supported yet on this OS')
|
||||||
|
|
||||||
|
def versions(env: Environment, update: bool = False):
|
||||||
|
tags = env.GitTags(repo_name = _REPO_NAME, remote_url = _REPO_URL, force_fetch=update)
|
||||||
|
result = []
|
||||||
|
for tag in tags:
|
||||||
|
match = _TAG_PATTERN.match(tag)
|
||||||
|
if match:
|
||||||
|
result.append((int(match.groups()[0]), int(match.groups()[1]), int(match.groups()[2] or 0)))
|
||||||
|
return result
|
||||||
|
|
||||||
|
def dependencies(env: Environment, version) -> 'dict':
|
||||||
|
return {}
|
||||||
|
|
||||||
|
def cook(env: Environment, version) -> dict:
|
||||||
|
git_ref = f'refs/tags/v{version[0]}.{version[1]}'
|
||||||
|
if version[2] != 0:
|
||||||
|
git_ref = git_ref + f'.{version[2]}'
|
||||||
|
repo = env.GitBranch(repo_name = _REPO_NAME, remote_url = _REPO_URL, git_ref = git_ref)
|
||||||
|
checkout_root = repo['checkout_root']
|
||||||
|
build_result = env.CMakeProject(project_root=checkout_root)
|
||||||
|
include_dir = os.path.join(build_result['install_dir'], 'include')
|
||||||
|
lib_name = _build_lib_name(env)
|
||||||
|
lib_file = env.FindLib(lib_name, paths=build_result['LIBPATH'])
|
||||||
|
return {
|
||||||
|
'CPPPATH': [include_dir],
|
||||||
|
'LIBS': [lib_file],
|
||||||
|
'CMAKE_VARS': {
|
||||||
|
'ZLIB_LIBRARY': lib_file,
|
||||||
|
'ZLIB_INCLUDE_DIR': include_dir
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user