726 lines
29 KiB
Python
726 lines
29 KiB
Python
|
|
import copy
|
|
import glob
|
|
import json
|
|
import os
|
|
import psutil
|
|
import sys
|
|
import time
|
|
|
|
|
|
class _VersionSpec:
|
|
minimum_version = None
|
|
maximum_version = None
|
|
|
|
def __init__(self, minimum_version = None, maximum_version = None):
|
|
self.minimum_version = minimum_version
|
|
self.maximum_version = maximum_version
|
|
|
|
def __str__(self):
|
|
return f'Min: {self.minimum_version}, Max: {self.maximum_version}'
|
|
|
|
class _Dependency:
|
|
name: str = ''
|
|
version = None
|
|
version_spec: _VersionSpec
|
|
recipe = None
|
|
depdeps: list = []
|
|
cook_result: dict = {}
|
|
|
|
class _Target:
|
|
builder = None
|
|
args: list = []
|
|
kwargs: dict = {}
|
|
dependencies: list = []
|
|
target = None
|
|
|
|
def _find_recipe(env: Environment, recipe_name: str):
|
|
if recipe_name in env['SPP_RECIPES']:
|
|
return env['SPP_RECIPES'][recipe_name]
|
|
import importlib.util
|
|
source_file = None
|
|
for folder in env['RECIPES_FOLDERS']:
|
|
try_source_file = f'{folder.abspath}/{recipe_name}/recipe.py'
|
|
if os.path.exists(try_source_file):
|
|
source_file = try_source_file
|
|
break
|
|
if not source_file:
|
|
raise Exception(f'Could not find recipe {recipe_name}.')
|
|
spec = importlib.util.spec_from_file_location(recipe_name, source_file)
|
|
recipe = importlib.util.module_from_spec(spec)
|
|
recipe.env = env
|
|
spec.loader.exec_module(recipe)
|
|
env['SPP_RECIPES'][recipe_name] = recipe
|
|
return recipe
|
|
|
|
def _cook(env: Environment, recipe_name: str):
|
|
dependency = env['SPP_DEPENDENCIES'].get(recipe_name)
|
|
if not dependency:
|
|
raise Exception(f'Cannot cook {recipe_name} as it was not listed as a dependency.')
|
|
if not dependency.cook_result:
|
|
dependency.cook_result = dependency.recipe.cook(env, dependency.version)
|
|
return dependency.cook_result
|
|
|
|
def _module(env: Environment, file: str):
|
|
return SConscript(file, exports = 'env', variant_dir = env['VARIANT_DIR'], src_dir = '.')
|
|
|
|
def _parse_lib_conf(env: Environment, lib_conf: dict) -> None:
|
|
env.Append(CPPPATH = lib_conf.get('CPPPATH', []),
|
|
CPPDEFINES = lib_conf.get('CPPDEFINES', []),
|
|
LIBPATH = lib_conf.get('LIBPATH', []),
|
|
LIBS = lib_conf.get('LIBS', []),
|
|
JINJA_TEMPLATE_SEARCHPATH = lib_conf.get('JINJA_TEMPLATE_SEARCHPATH', []))
|
|
|
|
def _inject_list(kwargs: dict, dependency: dict, list_name: str) -> None:
|
|
if list_name not in dependency:
|
|
return
|
|
if list_name not in kwargs:
|
|
kwargs[list_name] = []
|
|
kwargs[list_name].extend(dependency[list_name]) # TODO: eliminate duplicates?
|
|
|
|
def _inject_dependency(dependency, kwargs: dict, add_sources: bool = True) -> None:
|
|
if isinstance(dependency, dict):
|
|
_inject_list(kwargs, dependency, 'CPPPATH')
|
|
_inject_list(kwargs, dependency, 'CPPDEFINES')
|
|
_inject_list(kwargs, dependency, 'LIBPATH')
|
|
_inject_list(kwargs, dependency, 'LIBS')
|
|
if add_sources and 'ADDITIONAL_SOURCES' in dependency and hasattr(kwargs['source'], 'extend'):
|
|
kwargs['source'].extend(dependency['ADDITIONAL_SOURCES'])
|
|
if 'DEPENDENCIES' in dependency:
|
|
for inner_dependency in dependency['DEPENDENCIES']:
|
|
_inject_dependency(inner_dependency, kwargs, False)
|
|
elif isinstance(dependency, _Dependency):
|
|
if not dependency.cook_result:
|
|
dependency.cook_result = dependency.recipe.cook(env, dependency.version)
|
|
_inject_list(kwargs, dependency.cook_result, 'CPPPATH')
|
|
_inject_list(kwargs, dependency.cook_result, 'CPPDEFINES')
|
|
_inject_list(kwargs, dependency.cook_result, 'LIBPATH')
|
|
_inject_list(kwargs, dependency.cook_result, 'LIBS')
|
|
for depdep in dependency.depdeps:
|
|
_inject_dependency(depdep, kwargs)
|
|
|
|
def _rglob(env: Environment, root_path: str, pattern: str, **kwargs):
|
|
result_nodes = []
|
|
paths = [root_path]
|
|
while paths:
|
|
path = paths.pop()
|
|
all_nodes = env.Glob(f'{path}/*', **kwargs)
|
|
paths.extend(entry for entry in all_nodes if entry.isdir() or (entry.srcnode() and entry.srcnode().isdir())) # `srcnode()` must be used because `isdir()` doesn't work for entries in variant dirs which haven't been copied yet.
|
|
result_nodes.extend(env.Glob(f'{path}/{pattern}', **kwargs))
|
|
return sorted(result_nodes)
|
|
|
|
def _safe_eval(condition: str, locals={}):
|
|
return eval(condition, {
|
|
'__builtins__': {
|
|
'abs': abs, 'all': all, 'any': any, 'ascii': ascii, 'bin': bin, 'bool': bool, 'chr': chr, 'complex': complex,
|
|
'dict': dict, 'divmod': divmod, 'enumerate': enumerate, 'filter': filter, 'float': float, 'format': format,
|
|
'hasattr': hasattr, 'hash': hash, 'hex': hex, 'id': id, 'int': int, 'isinstance': isinstance,
|
|
'issubclass': issubclass, 'len': len, 'list': list, 'map': map, 'max': max, 'min': min, 'next': next,
|
|
'oct': oct, 'ord': ord, 'pow': pow, 'range': range, 'reversed': reversed, 'round': round, 'set': set,
|
|
'slice': slice, 'sorted': sorted, 'str': str, 'sum': sum, 'tuple': tuple, 'type': type, 'zip': zip
|
|
}
|
|
}, locals)
|
|
|
|
def _deps_from_json(env: Environment, deps: dict) -> dict:
|
|
to_remove = []
|
|
for key, dep in deps.items():
|
|
if 'condition' in dep:
|
|
if not _safe_eval(dep['condition'], {
|
|
'compiler_family': env['COMPILER_FAMILY']
|
|
}):
|
|
to_remove.append(key)
|
|
continue
|
|
if 'min' in dep and isinstance(dep['min'], list):
|
|
dep['min'] = tuple(dep['min'])
|
|
if 'max' in dep and isinstance(dep['max'], list):
|
|
dep['max'] = tuple(dep['max'])
|
|
for key in to_remove:
|
|
del deps[key]
|
|
return deps
|
|
|
|
def _make_interface(env: Environment, dependencies: list = []):
|
|
kwargs = {}
|
|
for dependency in dependencies:
|
|
_inject_dependency(dependency, kwargs)
|
|
return {
|
|
'CPPPATH': kwargs.get('CPPPATH', []),
|
|
'CPPDEFINES': kwargs.get('CPPDEFINES', [])
|
|
}
|
|
|
|
def _lib_filename(env: Environment, name: str, type: str = 'static') -> str:
|
|
if os.name == 'posix':
|
|
ext = {
|
|
'static': 'a',
|
|
'shared': 'so'
|
|
}[type]
|
|
return f'lib{name}.{ext}'
|
|
elif os.name == 'nt':
|
|
ext = {
|
|
'static': 'lib',
|
|
'shared': 'dll'
|
|
}[type]
|
|
return f'{name}.{ext}'
|
|
else:
|
|
raise Exception('What OS is this?')
|
|
|
|
def _find_lib(env: Environment, name: str, paths: 'list[str]', type : str = 'static', allow_fail: bool = False, use_glob: bool = False):
|
|
fname = _lib_filename(env, name, type)
|
|
for path in paths:
|
|
lib_path = os.path.join(path, fname)
|
|
if use_glob:
|
|
files = glob.glob(lib_path)
|
|
if len(files) == 1:
|
|
return files[0]
|
|
elif len(files) > 1:
|
|
raise Exception(f'Multiple candidates found for library with name {name} in paths: "{", ".join(paths)}" with name: "{", ".join(files)}".')
|
|
elif os.path.exists(lib_path):
|
|
return lib_path
|
|
if allow_fail:
|
|
return None
|
|
raise Exception(f'Could not find library with name {name} in paths: "{", ".join(paths)}" filename: "{fname}".')
|
|
|
|
def _error(env: Environment, message: str):
|
|
print(message, file=sys.stderr)
|
|
env.Exit(1)
|
|
|
|
def _find_common_depenency_version(name: str, versionA: _VersionSpec, versionB: _VersionSpec) -> _VersionSpec:
|
|
result_version = _VersionSpec()
|
|
if versionA.minimum_version is not None:
|
|
if versionB.minimum_version is not None:
|
|
result_version.minimum_version = max(versionA.minimum_version, versionB.minimum_version)
|
|
else:
|
|
result_version.minimum_version = versionA.minimum_version
|
|
else:
|
|
result_version.minimum_version = versionB.minimum_version
|
|
|
|
if versionA.maximum_version is not None:
|
|
if versionB.maximum_version is not None:
|
|
result_version.maximum_version = min(versionA.maximum_version, versionB.maximum_version)
|
|
else:
|
|
result_version.maximum_version = versionA.maximum_version
|
|
else:
|
|
result_version.maximum_version = versionB.maximum_version
|
|
|
|
if result_version.minimum_version is not None and result_version.maximum_version is not None \
|
|
and (result_version.minimum_version > result_version.maximum_version):
|
|
return None
|
|
return result_version
|
|
|
|
def _parse_version_spec(version_spec: dict) -> _VersionSpec:
|
|
return _VersionSpec(version_spec.get('min'), version_spec.get('max'))
|
|
|
|
def _can_add_dependency(env: Environment, name: str, version_spec: _VersionSpec) -> bool:
|
|
if name not in env['SPP_DEPENDENCIES']:
|
|
return True
|
|
dependency = env['SPP_DEPENDENCIES'][name]
|
|
common_version_spec = _find_common_depenency_version(name, dependency.version_spec, version_spec)
|
|
return common_version_spec is not None
|
|
|
|
def _add_dependency(env: Environment, name: str, version_spec: _VersionSpec) -> _Dependency:
|
|
if name in env['SPP_DEPENDENCIES']:
|
|
dependency = env['SPP_DEPENDENCIES'][name]
|
|
common_version_spec = _find_common_depenency_version(name, dependency.version_spec, version_spec)
|
|
if common_version_spec is None:
|
|
raise Exception(f'Incompatible versions detected for {name}: {dependency.version_spec} and {version_spec}')
|
|
if dependency.version_spec != common_version_spec:
|
|
env['_SPP_DEPENDENCIES_OKAY'] = False
|
|
dependency.version_spec = common_version_spec
|
|
return dependency
|
|
dependency = _Dependency()
|
|
dependency.name = name
|
|
dependency.version_spec = version_spec
|
|
dependency.recipe = _find_recipe(env, name)
|
|
env['SPP_DEPENDENCIES'][name] = dependency
|
|
env['_SPP_DEPENDENCIES_OKAY'] = False
|
|
return dependency
|
|
|
|
def _sort_versions(versions: list) -> None:
|
|
import functools
|
|
def _compare(left, right):
|
|
if left < right:
|
|
return 1
|
|
elif left == right:
|
|
return 0
|
|
else:
|
|
return -1
|
|
versions.sort(key=functools.cmp_to_key(_compare))
|
|
|
|
def _version_matches(version, version_spec: _VersionSpec) -> bool:
|
|
if version_spec.minimum_version is not None and version < version_spec.minimum_version:
|
|
return False
|
|
if version_spec.maximum_version is not None and version > version_spec.maximum_version:
|
|
return False
|
|
return True
|
|
|
|
def _find_version(env: Environment, dependency: _Dependency):
|
|
for update in (False, True):
|
|
versions = dependency.recipe.versions(env, update=update)
|
|
_sort_versions(versions)
|
|
for version in versions:
|
|
if _version_matches(version, dependency.version_spec):
|
|
canadd = True
|
|
for depname, depspec in dependency.recipe.dependencies(env, version).items():
|
|
if not _can_add_dependency(env, depname, _parse_version_spec(depspec)):
|
|
canadd = False
|
|
break
|
|
if canadd:
|
|
depdeps = []
|
|
for depname, depspec in dependency.recipe.dependencies(env, version).items():
|
|
depdeps.append(_add_dependency(env, depname, _parse_version_spec(depspec)))
|
|
dependency.version = version
|
|
dependency.depdeps = depdeps
|
|
return
|
|
print(f'Available versions: \n{versions}')
|
|
print(f'Required version: {dependency.version_spec}')
|
|
raise Exception(f'Could not find a suitable version for dependency {dependency.name}.')
|
|
|
|
def _wrap_builder(builder, is_lib: bool = False):
|
|
def _wrapped(env, dependencies = {}, *args, **kwargs):
|
|
target_dependencies = []
|
|
for name, version_spec in dependencies.items():
|
|
target_dependencies.append(_add_dependency(env, name, _parse_version_spec(version_spec)))
|
|
|
|
if 'CPPPATH' not in kwargs:
|
|
kwargs['CPPPATH'] = copy.copy(env['CPPPATH'])
|
|
if 'CPPDEFINES' not in kwargs:
|
|
kwargs['CPPDEFINES'] = copy.copy(env['CPPDEFINES'])
|
|
if 'LIBPATH' not in kwargs:
|
|
kwargs['LIBPATH'] = copy.copy(env['LIBPATH'])
|
|
if 'LIBS' not in kwargs and 'LIBS' in env:
|
|
kwargs['LIBS'] = copy.copy(env['LIBS'])
|
|
if 'source' in kwargs:
|
|
source = kwargs['source']
|
|
if not isinstance(source, list):
|
|
source = [source]
|
|
new_source = []
|
|
for src in source:
|
|
if isinstance(src, str):
|
|
new_source.append(env.Entry(src))
|
|
else:
|
|
new_source.append(src)
|
|
kwargs['source'] = new_source
|
|
|
|
target = _Target()
|
|
target.builder = builder
|
|
target.args = args
|
|
target.kwargs = kwargs
|
|
target.dependencies = target_dependencies
|
|
env.Append(SPP_TARGETS = [target])
|
|
if not target.dependencies:
|
|
_build_target(target)
|
|
return target
|
|
return _wrapped
|
|
|
|
def _wrap_default(default):
|
|
def _wrapped(env, arg):
|
|
if isinstance(arg, _Target):
|
|
env.Append(SPP_DEFAULT_TARGETS = [arg])
|
|
elif isinstance(arg, dict) and '_target' in arg:
|
|
default(arg['_target'])
|
|
else:
|
|
default(arg)
|
|
return _wrapped
|
|
|
|
def _wrap_depends(depends):
|
|
def _wrapped(env, dependant, dependency):
|
|
if isinstance(dependant, _Target) or isinstance(dependency, _Target):
|
|
env.Append(SPP_TARGET_DEPENDENCIES = [(dependant, dependency)])
|
|
elif isinstance(dependant, dict) and '_target' in dependant:
|
|
dependant = dependant['_target']
|
|
elif isinstance(dependency, dict) and '_target' in dependency:
|
|
dependency = dependency['_target']
|
|
depends(dependant, dependency)
|
|
return _wrapped
|
|
|
|
def _build_target(target: _Target):
|
|
for dependency in target.dependencies:
|
|
_inject_dependency(dependency, target.kwargs)
|
|
if 'LIBS' in target.kwargs:
|
|
libs_copy = list(target.kwargs['LIBS'])
|
|
for lib in libs_copy:
|
|
if isinstance(lib, str) and os.path.isabs(lib):
|
|
target.kwargs['LIBS'].remove(lib)
|
|
target.kwargs['source'].append(lib)
|
|
elif isinstance(lib, _Target):
|
|
if not lib.target:
|
|
_build_target(lib)
|
|
target.kwargs['LIBS'].remove(lib)
|
|
target.kwargs['LIBS'].append(lib.target)
|
|
target.target = target.builder(*target.args, **target.kwargs)
|
|
|
|
def _version_to_string(version) -> str:
|
|
return '.'.join([str(v) for v in version])
|
|
|
|
def _finalize(env: Environment):
|
|
version_requirements = {dep.name: {
|
|
'min': dep.version_spec.minimum_version and _version_to_string(dep.version_spec.minimum_version),
|
|
'max': dep.version_spec.maximum_version and _version_to_string(dep.version_spec.maximum_version),
|
|
} for dep in env['SPP_DEPENDENCIES'].values()}
|
|
env['_SPP_DEPENDENCIES_OKAY'] = False
|
|
while not env['_SPP_DEPENDENCIES_OKAY']:
|
|
env['_SPP_DEPENDENCIES_OKAY'] = True
|
|
for dependency in list(env['SPP_DEPENDENCIES'].values()):
|
|
if not dependency.version:
|
|
_find_version(env, dependency)
|
|
with open('cache/versions.json', 'w') as f:
|
|
json.dump({
|
|
'requirements': version_requirements,
|
|
'selected': {
|
|
dep.name: _version_to_string(dep.version) for dep in env['SPP_DEPENDENCIES'].values()
|
|
}
|
|
}, f)
|
|
|
|
|
|
for target in env['SPP_TARGETS']:
|
|
_build_target(target)
|
|
for target in env['SPP_DEFAULT_TARGETS']:
|
|
env.Default(target.target)
|
|
|
|
def _get_fallback_cache_dir() -> str:
|
|
return Dir('#cache').abspath
|
|
|
|
def _find_system_cache_dir() -> str:
|
|
if os.name == 'posix':
|
|
if os.environ.get('XDG_CACHE_HOME'):
|
|
return os.environ['XDG_CACHE_HOME']
|
|
elif os.environ.get('HOME'):
|
|
return os.path.join(os.environ['HOME'], '.cache')
|
|
elif os.name == 'nt':
|
|
# TODO: just guessing
|
|
return os.environ['LocalAppData']
|
|
# fallback
|
|
return _get_fallback_cache_dir()
|
|
|
|
Import('config')
|
|
|
|
if not config.get('PROJECT_NAME'):
|
|
config['PROJECT_NAME'] = 'PROJECT'
|
|
if not config.get('CXX_STANDARD'):
|
|
config['CXX_STANDARD'] = 'c++23'
|
|
|
|
if not config.get('PREPROCESSOR_PREFIX'):
|
|
config['PREPROCESSOR_PREFIX'] = config['PROJECT_NAME'].upper() # TODO: may be nicer?
|
|
|
|
if 'COMPILATIONDB_FILTER_FILES' not in config:
|
|
config['COMPILATIONDB_FILTER_FILES'] = True
|
|
|
|
AddOption(
|
|
'--build_type',
|
|
dest = 'build_type',
|
|
type = 'choice',
|
|
choices = ('debug', 'release_debug', 'release', 'profile'),
|
|
nargs = 1,
|
|
action = 'store',
|
|
default = 'debug'
|
|
)
|
|
|
|
AddOption(
|
|
'--unity',
|
|
dest = 'unity_mode',
|
|
type = 'choice',
|
|
choices = ('enable', 'disable', 'stress'),
|
|
nargs = 1,
|
|
action = 'store',
|
|
default = 'enable'
|
|
)
|
|
|
|
AddOption(
|
|
'--variant',
|
|
dest = 'variant',
|
|
nargs = 1,
|
|
action = 'store'
|
|
)
|
|
|
|
AddOption(
|
|
'--asan',
|
|
dest = 'enable_asan',
|
|
action = 'store_true'
|
|
)
|
|
|
|
AddOption(
|
|
'--config_file',
|
|
dest = 'config_file',
|
|
nargs = 1,
|
|
action = 'store',
|
|
default = 'config.py'
|
|
)
|
|
|
|
AddOption(
|
|
'--compiler',
|
|
dest = 'compiler',
|
|
type = 'choice',
|
|
choices = ('auto', 'gcc', 'clang', 'msvc'),
|
|
nargs = 1,
|
|
action = 'store',
|
|
default = 'auto'
|
|
)
|
|
|
|
AddOption(
|
|
'--update_repositories',
|
|
dest = 'update_repositories',
|
|
action = 'store_true'
|
|
)
|
|
|
|
AddOption(
|
|
'--dump_env',
|
|
dest = 'dump_env',
|
|
action = 'store_true'
|
|
)
|
|
|
|
build_type = GetOption('build_type')
|
|
unity_mode = GetOption('unity_mode')
|
|
variant = GetOption('variant')
|
|
enable_asan = GetOption('enable_asan')
|
|
config_file = GetOption('config_file')
|
|
compiler = GetOption('compiler')
|
|
update_repositories = GetOption('update_repositories')
|
|
dump_env = GetOption('dump_env')
|
|
|
|
default_CC = {
|
|
'gcc': 'gcc',
|
|
'clang': 'clang',
|
|
'msvc': 'cl.exe'
|
|
}.get(compiler, None)
|
|
default_CXX = {
|
|
'gcc': 'g++',
|
|
'clang': 'clang++',
|
|
'msvc': 'cl.exe'
|
|
}.get(compiler, None)
|
|
|
|
if not os.path.isabs(config_file):
|
|
config_file = os.path.join(Dir('#').abspath, config_file)
|
|
|
|
vars = Variables(config_file)
|
|
vars.Add('CC', 'The C Compiler', default_CC)
|
|
vars.Add('CXX', 'The C++ Compiler', default_CXX)
|
|
vars.Add('LINK', 'The Linker')
|
|
vars.Add('CCFLAGS', 'C/C++ Compiler Flags')
|
|
vars.Add('CFLAGS', 'C Compiler Flags')
|
|
vars.Add('CXXFLAGS', 'C++ Compiler Flags')
|
|
vars.Add('LINKFLAGS', 'Linker Flags')
|
|
vars.Add('PYTHON', 'Python Executable', 'python')
|
|
vars.Add('COMPILATIONDB_FILTER_FILES', 'Removes source files from the compilation DB that are not from the current'
|
|
' project.', config['COMPILATIONDB_FILTER_FILES'])
|
|
|
|
tools = ['default', 'compilation_db', 'unity_build']
|
|
if 'TOOLS' in config:
|
|
tools.extend(config['TOOLS'])
|
|
|
|
env = Environment(tools = tools, variables = vars, ENV = os.environ)
|
|
env['RECIPES_FOLDERS'] = [Dir('recipes')]
|
|
env['SYSTEM_CACHE_DIR'] = os.path.join(_find_system_cache_dir(), 'spp_cache')
|
|
env['UPDATE_REPOSITORIES'] = update_repositories
|
|
env['CXX_STANDARD'] = config['CXX_STANDARD'] # make it available to everyone
|
|
env['DEPS_CFLAGS'] = []
|
|
env['DEPS_CXXFLAGS'] = []
|
|
env['DEPS_LINKFLAGS'] = []
|
|
|
|
print(f'Detected system cache directory: {env["SYSTEM_CACHE_DIR"]}')
|
|
try:
|
|
os.makedirs(env['SYSTEM_CACHE_DIR'], exist_ok=True)
|
|
except:
|
|
env['SYSTEM_CACHE_DIR'] = os.path.join(_get_fallback_cache_dir(), 'spp_cache')
|
|
print(f'Creating spp cache dir failed, using fallback: {env["SYSTEM_CACHE_DIR"]}.')
|
|
os.makedirs(env['SYSTEM_CACHE_DIR'], exist_ok=True) # no more safeguards!
|
|
env['CLONE_DIR'] = os.path.join(env['SYSTEM_CACHE_DIR'], 'cloned')
|
|
env['DOWNLOAD_DIR'] = os.path.join(env['SYSTEM_CACHE_DIR'], 'downloaded')
|
|
env['UPDATE_REPOSITORIES'] = update_repositories
|
|
|
|
|
|
env['SHARED_CACHE_DIR'] = Dir(f'#cache').abspath
|
|
# allow compiling to variant directories (each gets their own bin/lib/cache dirs)
|
|
if variant:
|
|
env['BIN_DIR'] = Dir(f'#bin_{variant}').abspath
|
|
env['LIB_DIR'] = Dir(f'#lib_{variant}').abspath
|
|
env['CACHE_DIR'] = Dir(f'#cache_{variant}').abspath
|
|
env['VARIANT_DIR'] = f'{env["CACHE_DIR"]}/variant'
|
|
env.Append(CPPDEFINES = [f'{config["PREPROCESSOR_PREFIX"]}_VARIANT={variant}'])
|
|
else:
|
|
env['VARIANT_DIR'] = None
|
|
env['COMPILATIONDB_USE_ABSPATH'] = True
|
|
if env['COMPILATIONDB_FILTER_FILES']:
|
|
env['COMPILATIONDB_PATH_FILTER'] = f"{Dir('#').abspath}/*"
|
|
comp_db = env.CompilationDatabase(target = '#compile_commands.json')
|
|
Default(comp_db)
|
|
env['BIN_DIR'] = Dir('#bin').abspath
|
|
env['LIB_DIR'] = Dir('#lib').abspath
|
|
env['CACHE_DIR'] = env['SHARED_CACHE_DIR']
|
|
env['UNITY_CACHE_DIR'] = Dir(f'{env["CACHE_DIR"]}/unity')
|
|
env['BUILD_TYPE'] = build_type
|
|
env.Append(LIBPATH = [env['LIB_DIR']]) # to allow submodules to link to each other without hassle
|
|
|
|
# make sure these are all defined in case someone wants to use/copy them
|
|
env.Append(CCFLAGS = [])
|
|
env.Append(CXXFLAGS = [])
|
|
env.Append(CPPPATH = [])
|
|
env.Append(CPPDEFINES = [])
|
|
env.Append(LINKFLAGS = [])
|
|
|
|
# init SPP environment variables
|
|
env['SPP_TARGETS'] = []
|
|
env['SPP_DEFAULT_TARGETS'] = []
|
|
env['SPP_TARGET_DEPENDENCIES'] = []
|
|
env['SPP_DEPENDENCIES'] = {}
|
|
env['SPP_RECIPES'] = {}
|
|
|
|
# create the cache dir
|
|
os.makedirs(env['CACHE_DIR'], exist_ok=True)
|
|
cache_gitignore = f'{env["CACHE_DIR"]}/.gitignore'
|
|
if not os.path.exists(cache_gitignore):
|
|
with open(cache_gitignore, 'w') as f:
|
|
f.write('*\n')
|
|
|
|
if env['CACHE_DIR'] != env['SHARED_CACHE_DIR']:
|
|
os.makedirs(env['SHARED_CACHE_DIR'], exist_ok=True)
|
|
cache_gitignore = f'{env["SHARED_CACHE_DIR"]}/.gitignore'
|
|
if not os.path.exists(cache_gitignore):
|
|
with open(cache_gitignore, 'w') as f:
|
|
f.write('*\n')
|
|
|
|
# check whether repositories where updated since last boot
|
|
update_stamp_file = f'{env["SHARED_CACHE_DIR"]}/last_update.stamp'
|
|
update_time = 0.0
|
|
if os.path.exists(update_stamp_file):
|
|
with open(update_stamp_file, 'r') as f:
|
|
try:
|
|
update_time = float(f.read())
|
|
except:
|
|
pass
|
|
boot_time = psutil.boot_time()
|
|
if boot_time > update_time:
|
|
print('Didn\'t update repositories since last boot, doing it now...')
|
|
env['UPDATE_REPOSITORIES'] = True
|
|
if env['UPDATE_REPOSITORIES']:
|
|
with open(update_stamp_file, 'w') as f:
|
|
f.write(str(time.time()))
|
|
# create the clone and system cache dirs
|
|
os.makedirs(env['CLONE_DIR'], exist_ok=True)
|
|
os.makedirs(env['DOWNLOAD_DIR'], exist_ok=True)
|
|
|
|
# try to detect what compiler we are using
|
|
compiler_exe = os.path.basename(env.subst(env['CC']))
|
|
if 'gcc' in compiler_exe:
|
|
env['COMPILER_FAMILY'] = 'gcc'
|
|
elif 'clang' in compiler_exe:
|
|
env['COMPILER_FAMILY'] = 'clang'
|
|
elif 'cl' in compiler_exe:
|
|
env['COMPILER_FAMILY'] = 'cl'
|
|
else:
|
|
env['COMPILER_FAMILY'] = 'unknown'
|
|
|
|
# setup unity build depending on mode
|
|
if unity_mode == 'disable':
|
|
env['UNITY_DISABLE'] = True
|
|
elif unity_mode == 'stress': # compile everything in one single file to stress test the unity build
|
|
env['UNITY_MAX_SOURCES'] = 100000 # I'll hopefully never reach this
|
|
env['UNITY_MIN_FILES'] = 1
|
|
|
|
# setup compiler specific options
|
|
if env['COMPILER_FAMILY'] == 'gcc' or env['COMPILER_FAMILY'] == 'clang':
|
|
env.Append(CCFLAGS = ['-Wall', '-Wextra', '-Werror', '-Wstrict-aliasing', '-pedantic'])
|
|
env.Append(CXXFLAGS = [f'-std={config["CXX_STANDARD"]}'])
|
|
if build_type != 'release':
|
|
env.Append(LINKFLAGS = [f'-Wl,-rpath,{env["LIB_DIR"]}'])
|
|
env['LINKCOM'] = env['LINKCOM'].replace('$_LIBFLAGS', '-Wl,--start-group $_LIBFLAGS -Wl,--end-group')
|
|
if env['COMPILER_FAMILY'] == 'gcc':
|
|
# GCC complains about missing initializer for "<anonymous>" that doesn't exist :/
|
|
# also GCC complains about some (compiler generated) fields in coroutines not having any linkage
|
|
# also -Wdangling-reference seems to produce a lot of false positives
|
|
# also -Wmaybe-uninitialized seems to produce false positives (or a bug in the standard library?))
|
|
# -Winit-list-lifetime triggers in vulkan.hpp even though it is disabled via pragma :/
|
|
# -Wtautological-compare triggers in libfmt and doesn't seem too useful anyway
|
|
env.Append(CCFLAGS = ['-Wno-missing-field-initializers', '-Wno-maybe-uninitialized'])
|
|
env.Append(CXXFLAGS = ['-Wno-subobject-linkage', '-Wno-dangling-reference', '-Wno-init-list-lifetime', '-Wno-tautological-compare'])
|
|
else: # clang only
|
|
# no-gnu-anonymous-struct - we don't care
|
|
env.Append(CCFLAGS = ['-Wno-gnu-anonymous-struct'])
|
|
env.Append(CXXFLAGS = ['-fexperimental-library']) # enable std::jthread
|
|
if build_type == 'debug':
|
|
env.Append(CCFLAGS = ['-g', '-O0'], CPPDEFINES = ['_GLIBCXX_DEBUG'])
|
|
env.Append(DEPS_CXXFLAGS = ['-D_GLIBCXX_DEBUG'])
|
|
elif build_type == 'release_debug' or build_type == 'profile':
|
|
env.Append(CCFLAGS = ['-Wno-unused-variable', '-Wno-unused-parameter', '-Wno-unused-but-set-variable', '-Wno-unused-local-typedef', '-Wno-unused-local-typedefs', '-g', '-O2'], CPPDEFINES = [f'{config["PREPROCESSOR_PREFIX"]}_RELEASE', 'NDEBUG'])
|
|
if build_type == 'profile':
|
|
if env['COMPILER_FAMILY'] == 'gcc':
|
|
env.Append(CPPDEFINES = [f'{config["PREPROCESSOR_PREFIX"]}_GCC_INSTRUMENTING=1'])
|
|
env.Append(CCFLAGS = ['-finstrument-functions'])
|
|
env.Append(LINKFLAGS = ['-rdynamic'])
|
|
|
|
elif build_type == 'release':
|
|
env.Append(CCFLAGS = ['-Wno-unused-variable', '-Wno-unused-parameter', '-Wno-unused-but-set-variable', '-Wno-unused-local-typedef', '-Wno-unused-local-typedefs', '-O2'], CPPDEFINES = [f'{config["PREPROCESSOR_PREFIX"]}_RELEASE', 'NDEBUG'])
|
|
|
|
if enable_asan:
|
|
env.Append(CCFLAGS = ['-fsanitize=address', '-fno-omit-frame-pointer'])
|
|
env.Append(LINKFLAGS = ['-fsanitize=address'])
|
|
env.Append(DEPS_CXXFLAGS = ['-fsanitize=address', '-fno-omit-frame-pointer'])
|
|
env.Append(DEPS_LINKFLAGS = ['-fsanitize=address'])
|
|
|
|
elif env['COMPILER_FAMILY'] == 'cl':
|
|
cxx_version_name = {
|
|
'c++14': 'c++14',
|
|
'c++17': 'c++17',
|
|
'c++20': 'c++20',
|
|
'c++23': 'c++latest',
|
|
'c++26': 'c++latest'
|
|
}.get(env['CXX_STANDARD'], 'c++14') # default to C++14 for older versions
|
|
# C4201: nonstandard extension used : nameless struct/union - I use it and want to continue using it
|
|
# C4127: conditional expression is constant - some libs (CRC, format) don't compile with this enabled # TODO: fix?
|
|
# C4702: unreachable code, issued after MIJIN_FATAL macro
|
|
# C4251: missing dll-interface of some std types, yaml-cpp doesn't compile with this enabled
|
|
# C4275: same as above
|
|
env.Append(CCFLAGS = ['/W4', '/WX', '/wd4201', '/wd4127', '/wd4702', '/wd4251', '/wd4275', '/bigobj', '/vmg',
|
|
f'/std:{cxx_version_name}', '/permissive-', '/EHsc', '/FS', '/Zc:char8_t', '/utf-8'])
|
|
env.Append(CPPDEFINES = ['_CRT_SECURE_NO_WARNINGS']) # I'd like to not use MSVC specific versions of functions because they are "safer" ...
|
|
env.Append(DEPS_CXXFLAGS = ['/EHsc', '/Zc:char8_t', '/utf-8', '/vmg'])
|
|
if build_type == 'debug':
|
|
env.Append(CCFLAGS = ['/Od', '/Zi', '/MDd'], LINKFLAGS = ' /DEBUG')
|
|
env.Append(CPPDEFINES = ['_DEBUG', '_ITERATOR_DEBUG_LEVEL=2'])
|
|
env.Append(DEPS_CXXFLAGS = ['/MDd', '/Zi', '/D_DEBUG', '/D_ITERATOR_DEBUG_LEVEL=2'])
|
|
env.Append(DEPS_LINKFLAGS = ['/DEBUG'])
|
|
elif build_type == 'release_debug' or build_type == 'profile':
|
|
env.Append(CCFLAGS = ['/O2', '/Zi'], LINKFLAGS = ' /DEBUG')
|
|
env.Append(DEPS_CXXFLAGS = ['/Zi'])
|
|
env.Append(DEPS_LINKFLAGS = ['/DEBUG'])
|
|
else:
|
|
env.Append(CCFLAGS = ['/O2'])
|
|
|
|
if env['COMPILER_FAMILY'] == 'gcc':
|
|
env.Append(CXXFLAGS = ['-Wno-volatile'])
|
|
elif env['COMPILER_FAMILY'] == 'clang':
|
|
env.Append(CCFLAGS = ['-Wno-deprecated-volatile', '-Wno-nested-anon-types', '-Wno-unknown-warning-option'])
|
|
|
|
env.AddMethod(_cook, 'Cook')
|
|
env.AddMethod(_parse_lib_conf, 'ParseLibConf')
|
|
env.AddMethod(_rglob, 'RGlob')
|
|
env.AddMethod(_deps_from_json, 'DepsFromJson')
|
|
env.AddMethod(_make_interface, 'MakeInterface')
|
|
env.AddMethod(_lib_filename, 'LibFilename')
|
|
env.AddMethod(_find_lib, 'FindLib')
|
|
env.AddMethod(_error, 'Error')
|
|
env.AddMethod(_wrap_builder(env.Library, is_lib = True), 'Library')
|
|
env.AddMethod(_wrap_builder(env.StaticLibrary, is_lib = True), 'StaticLibrary')
|
|
env.AddMethod(_wrap_builder(env.SharedLibrary, is_lib = True), 'SharedLibrary')
|
|
env.AddMethod(_wrap_builder(env.Program), 'Program')
|
|
env.AddMethod(_wrap_default(env.Default), 'Default')
|
|
env.AddMethod(_wrap_depends(env.Depends), 'Depends')
|
|
|
|
env.AddMethod(_wrap_builder(env.UnityProgram), 'UnityProgram')
|
|
env.AddMethod(_wrap_builder(env.UnityLibrary, is_lib = True), 'UnityLibrary')
|
|
env.AddMethod(_wrap_builder(env.UnityStaticLibrary, is_lib = True), 'UnityStaticLibrary')
|
|
env.AddMethod(_wrap_builder(env.UnitySharedLibrary, is_lib = True), 'UnitySharedLibrary')
|
|
env.AddMethod(_module, 'Module')
|
|
env.AddMethod(_finalize, 'Finalize')
|
|
|
|
if hasattr(env, 'Gch'):
|
|
env.AddMethod(_wrap_builder(env.Gch), 'Gch')
|
|
|
|
for addon_file in env.Glob('addons/*.py'):
|
|
env = SConscript(addon_file, exports = 'env')
|
|
|
|
if dump_env:
|
|
print('==== Begin Environment Dump =====')
|
|
print(env.Dump())
|
|
print('==== End Environment Dump =====')
|
|
|
|
Return('env')
|