1129 lines
44 KiB
Python
1129 lines
44 KiB
Python
|
|
import copy
|
|
from dataclasses import dataclass
|
|
import enum
|
|
import glob
|
|
import inspect
|
|
import json
|
|
import multiprocessing
|
|
import os
|
|
import pathlib
|
|
import psutil
|
|
import shutil
|
|
import sys
|
|
import time
|
|
import uuid
|
|
|
|
|
|
class TargetType(enum.Enum):
|
|
PROGRAM = 0
|
|
STATIC_LIBRARY = 1
|
|
SHARED_LIBRARY = 2
|
|
|
|
class _VersionSpec:
|
|
minimum_version = None
|
|
maximum_version = None
|
|
options = {}
|
|
|
|
def __init__(self, minimum_version = None, maximum_version = None, options = {}):
|
|
self.minimum_version = minimum_version
|
|
self.maximum_version = maximum_version
|
|
self.options = options
|
|
|
|
def __str__(self):
|
|
return f'Min: {self.minimum_version}, Max: {self.maximum_version}, Options: {self.options}'
|
|
|
|
class _Dependency:
|
|
name: str = ''
|
|
version = None
|
|
version_spec: _VersionSpec
|
|
recipe = None
|
|
depdeps: list = []
|
|
cook_result: dict = {}
|
|
|
|
class _Target:
|
|
name: str
|
|
target_type: TargetType
|
|
builder = None
|
|
args: list = []
|
|
kwargs: dict = {}
|
|
dependencies: list = []
|
|
target = None
|
|
|
|
@dataclass
|
|
class _Module:
|
|
name: str
|
|
description: str
|
|
cxx_namespace: str
|
|
|
|
def _find_recipe(env: Environment, recipe_name: str):
|
|
if recipe_name in env['SPP_RECIPES']:
|
|
return env['SPP_RECIPES'][recipe_name]
|
|
import importlib.util
|
|
source_file = None
|
|
|
|
if not env['SPP_RECIPES_FOLDERS']:
|
|
env.Error('No recipes repositories set. Add one using env.RecipeRepo(<name>, <url>, <branch>).')
|
|
for folder in env['SPP_RECIPES_FOLDERS']:
|
|
from SCons import Node
|
|
if folder is Node:
|
|
folder = folder.abspath
|
|
try_source_file = f'{folder}/{recipe_name}/recipe.py'
|
|
if os.path.exists(try_source_file):
|
|
source_file = try_source_file
|
|
break
|
|
if not source_file:
|
|
env.Error(f'Could not find recipe for {recipe_name}.')
|
|
spec = importlib.util.spec_from_file_location(recipe_name, source_file)
|
|
recipe = importlib.util.module_from_spec(spec)
|
|
recipe.env = env
|
|
spec.loader.exec_module(recipe)
|
|
env['SPP_RECIPES'][recipe_name] = recipe
|
|
return recipe
|
|
|
|
def _run_cook(dependency: _Dependency):
|
|
if not dependency.cook_result:
|
|
cook_signature = inspect.signature(dependency.recipe.cook)
|
|
kwargs = {}
|
|
if 'options' in cook_signature.parameters:
|
|
kwargs['options'] = dependency.version_spec.options
|
|
dependency.cook_result = dependency.recipe.cook(env, dependency.version, **kwargs)
|
|
|
|
def _cook(env: Environment, recipe_name: str):
|
|
dependency = env['SPP_DEPENDENCIES'].get(recipe_name)
|
|
if not dependency:
|
|
raise Exception(f'Cannot cook {recipe_name} as it was not listed as a dependency.')
|
|
_run_cook(dependency)
|
|
return dependency.cook_result
|
|
|
|
def _normalize_module_path(env: Environment, path: str) -> str:
|
|
module_root = env.Dir('#/private').abspath
|
|
return os.path.relpath(path, module_root)
|
|
|
|
def _module(env: Environment, file: str):
|
|
folder = _normalize_module_path(env, env.File(file).dir.abspath)
|
|
dirname = os.path.basename(folder)
|
|
env.Append(SPP_MODULES = {folder: _Module(
|
|
name=dirname,
|
|
description='',
|
|
cxx_namespace=dirname
|
|
)})
|
|
return SConscript(file, exports = 'env', variant_dir = env['VARIANT_DIR'], src_dir = '.')
|
|
|
|
def _module_config(env: Environment, **kwargs) -> None:
|
|
module_folder = _normalize_module_path(env, env.Dir('.').abspath)
|
|
module = env['SPP_MODULES'].get(module_folder)
|
|
if module is None:
|
|
env.Warn(f'No module config found for module at {module_folder}')
|
|
return
|
|
module.__dict__.update(kwargs)
|
|
|
|
def _parse_lib_conf(env: Environment, lib_conf: dict) -> None:
|
|
env.Append(CPPPATH = lib_conf.get('CPPPATH', []),
|
|
CPPDEFINES = lib_conf.get('CPPDEFINES', []),
|
|
LIBPATH = lib_conf.get('LIBPATH', []),
|
|
LIBS = lib_conf.get('LIBS', []),
|
|
LINKFLAGS = lib_conf.get('LINKFLAGS', []),
|
|
JINJA_TEMPLATE_SEARCHPATH = lib_conf.get('JINJA_TEMPLATE_SEARCHPATH', []))
|
|
|
|
def _inject_list(kwargs: dict, dependency: dict, list_name: str) -> None:
|
|
if list_name not in dependency:
|
|
return
|
|
if list_name not in kwargs:
|
|
kwargs[list_name] = []
|
|
kwargs[list_name].extend(dependency[list_name]) # TODO: eliminate duplicates?
|
|
|
|
def _inject_dependency(dependency, kwargs: dict, add_sources: bool = True) -> None:
|
|
if isinstance(dependency, dict):
|
|
_inject_list(kwargs, dependency, 'CPPPATH')
|
|
_inject_list(kwargs, dependency, 'CPPDEFINES')
|
|
_inject_list(kwargs, dependency, 'LIBPATH')
|
|
_inject_list(kwargs, dependency, 'LIBS')
|
|
_inject_list(kwargs, dependency, 'LINKFLAGS')
|
|
if add_sources and 'ADDITIONAL_SOURCES' in dependency and hasattr(kwargs['source'], 'extend'):
|
|
kwargs['source'].extend(dependency['ADDITIONAL_SOURCES'])
|
|
if 'DEPENDENCIES' in dependency:
|
|
for inner_dependency in dependency['DEPENDENCIES']:
|
|
_inject_dependency(inner_dependency, kwargs, False)
|
|
elif isinstance(dependency, _Dependency):
|
|
_run_cook(dependency)
|
|
_inject_list(kwargs, dependency.cook_result, 'CPPPATH')
|
|
_inject_list(kwargs, dependency.cook_result, 'CPPDEFINES')
|
|
_inject_list(kwargs, dependency.cook_result, 'LIBPATH')
|
|
_inject_list(kwargs, dependency.cook_result, 'LIBS')
|
|
_inject_list(kwargs, dependency.cook_result, 'LINKFLAGS')
|
|
for depdep in dependency.depdeps:
|
|
_inject_dependency(depdep, kwargs)
|
|
elif isinstance(dependency, _Target):
|
|
_inject_list(kwargs, dependency.kwargs, 'CPPPATH')
|
|
_inject_list(kwargs, dependency.kwargs, 'CPPDEFINES')
|
|
_inject_list(kwargs, dependency.kwargs, 'LIBPATH')
|
|
_inject_list(kwargs, dependency.kwargs, 'LIBS')
|
|
_inject_list(kwargs, {'LIBS': [dependency]}, 'LIBS')
|
|
_inject_list(kwargs, dependency.kwargs, 'LINKFLAGS')
|
|
for depdep in dependency.dependencies:
|
|
_inject_dependency(depdep, kwargs)
|
|
|
|
def _rglob(env: Environment, root_path: str, pattern: str, **kwargs):
|
|
result_nodes = []
|
|
paths = [root_path]
|
|
while paths:
|
|
path = paths.pop()
|
|
all_nodes = env.Glob(f'{path}/*', **kwargs)
|
|
paths.extend(entry for entry in all_nodes if entry.isdir() or (entry.srcnode() and entry.srcnode().isdir())) # `srcnode()` must be used because `isdir()` doesn't work for entries in variant dirs which haven't been copied yet.
|
|
result_nodes.extend(env.Glob(f'{path}/{pattern}', **kwargs))
|
|
return sorted(result_nodes)
|
|
|
|
def _safe_eval(condition: str, locals={}):
|
|
return eval(condition, {
|
|
'__builtins__': {
|
|
'abs': abs, 'all': all, 'any': any, 'ascii': ascii, 'bin': bin, 'bool': bool, 'chr': chr, 'complex': complex,
|
|
'dict': dict, 'divmod': divmod, 'enumerate': enumerate, 'filter': filter, 'float': float, 'format': format,
|
|
'hasattr': hasattr, 'hash': hash, 'hex': hex, 'id': id, 'int': int, 'isinstance': isinstance,
|
|
'issubclass': issubclass, 'len': len, 'list': list, 'map': map, 'max': max, 'min': min, 'next': next,
|
|
'oct': oct, 'ord': ord, 'pow': pow, 'range': range, 'reversed': reversed, 'round': round, 'set': set,
|
|
'slice': slice, 'sorted': sorted, 'str': str, 'sum': sum, 'tuple': tuple, 'type': type, 'zip': zip
|
|
}
|
|
}, locals)
|
|
|
|
def _deps_from_json(env: Environment, deps: dict) -> dict:
|
|
to_remove = []
|
|
for key, dep in deps.items():
|
|
if 'condition' in dep:
|
|
if not _safe_eval(dep['condition'], {
|
|
'compiler_family': env['COMPILER_FAMILY'],
|
|
'target_os': os.name,
|
|
'getenv': lambda name: env.get(name)
|
|
}):
|
|
to_remove.append(key)
|
|
continue
|
|
if 'min' in dep and isinstance(dep['min'], list):
|
|
dep['min'] = tuple(dep['min'])
|
|
if 'max' in dep and isinstance(dep['max'], list):
|
|
dep['max'] = tuple(dep['max'])
|
|
for key in to_remove:
|
|
del deps[key]
|
|
return deps
|
|
|
|
def _make_interface(env: Environment, dependencies: list = []):
|
|
kwargs = {}
|
|
for dependency in dependencies:
|
|
_inject_dependency(dependency, kwargs)
|
|
return {
|
|
'CPPPATH': kwargs.get('CPPPATH', []),
|
|
'CPPDEFINES': kwargs.get('CPPDEFINES', [])
|
|
}
|
|
|
|
def _exe_filename(env: Environment, name: str, type: str = 'static') -> str:
|
|
if os.name == 'posix':
|
|
return name
|
|
elif os.name == 'nt':
|
|
return f'{name}.exe'
|
|
else:
|
|
raise Exception('What OS is this?')
|
|
|
|
def _find_executable(env: Environment, name: str, paths: 'list[str]', type : str = 'static', allow_fail: bool = False, use_glob: bool = False):
|
|
fname = _exe_filename(env, name, type)
|
|
for path in paths:
|
|
lib_path = os.path.join(path, fname)
|
|
if use_glob:
|
|
files = glob.glob(lib_path)
|
|
if len(files) == 1:
|
|
return files[0]
|
|
elif len(files) > 1:
|
|
raise Exception(f'Multiple candidates found for executable with name {name} in paths: "{", ".join(paths)}" with name: "{", ".join(files)}".')
|
|
elif os.path.exists(lib_path):
|
|
return lib_path
|
|
if allow_fail:
|
|
return None
|
|
raise Exception(f'Could not find executable with name {name} in paths: "{", ".join(paths)}" filename: "{fname}".')
|
|
|
|
def _lib_filename(env: Environment, name: str, type: str = 'static') -> str:
|
|
if os.name == 'posix':
|
|
ext = {
|
|
'static': 'a',
|
|
'shared': 'so'
|
|
}[type]
|
|
return f'lib{name}.{ext}'
|
|
elif os.name == 'nt':
|
|
ext = {
|
|
'static': 'lib',
|
|
'shared': 'dll'
|
|
}[type]
|
|
return f'{name}.{ext}'
|
|
else:
|
|
raise Exception('What OS is this?')
|
|
|
|
def _find_lib(env: Environment, name: str, paths: 'list[str]', type : str = 'static', allow_fail: bool = False, use_glob: bool = False):
|
|
fname = _lib_filename(env, name, type)
|
|
for path in paths:
|
|
lib_path = os.path.join(path, fname)
|
|
if use_glob:
|
|
files = glob.glob(lib_path)
|
|
if len(files) == 1:
|
|
return files[0]
|
|
elif len(files) > 1:
|
|
raise Exception(f'Multiple candidates found for library with name {name} in paths: "{", ".join(paths)}" with name: "{", ".join(files)}".')
|
|
elif os.path.exists(lib_path):
|
|
return lib_path
|
|
if allow_fail:
|
|
return None
|
|
raise Exception(f'Could not find library with name {name} in paths: "{", ".join(paths)}" filename: "{fname}".')
|
|
|
|
|
|
def _info(env: Environment, message: str) -> None:
|
|
if not GetOption('silent'):
|
|
print(message)
|
|
|
|
def _warn(env: Environment, message: str) -> None:
|
|
print(message, file=sys.stderr)
|
|
|
|
def _error(env: Environment, message: str) -> None:
|
|
print(message, file=sys.stderr)
|
|
Exit(1)
|
|
|
|
def _try_merge_dicts(dictA: dict, dictB: dict) -> 'dict|None':
|
|
result = {}
|
|
for key, valueA in dictA.items():
|
|
if key in dictB:
|
|
valueB = dictB[key]
|
|
if type(valueA) != type(valueB):
|
|
return None
|
|
elif type(valueA) == list:
|
|
result[key] = valueA + valueB
|
|
elif type(valueA) == dict:
|
|
mergedValue = _try_merge_dicts(valueA, valueB)
|
|
if mergedValue is None:
|
|
return None
|
|
result[key] = mergedValue
|
|
elif valueA != valueB:
|
|
return None
|
|
else:
|
|
result[key] = valueA
|
|
for key, valueB in dictB.items():
|
|
if key not in result:
|
|
result[key] = valueB
|
|
return result
|
|
|
|
|
|
def _find_common_dependency_version(name: str, versionA: _VersionSpec, versionB: _VersionSpec) -> _VersionSpec:
|
|
options = _try_merge_dicts(versionA.options, versionB.options)
|
|
if options is None:
|
|
return None
|
|
result_version = _VersionSpec(options=options)
|
|
if versionA.minimum_version is not None:
|
|
if versionB.minimum_version is not None:
|
|
result_version.minimum_version = max(versionA.minimum_version, versionB.minimum_version)
|
|
else:
|
|
result_version.minimum_version = versionA.minimum_version
|
|
else:
|
|
result_version.minimum_version = versionB.minimum_version
|
|
|
|
if versionA.maximum_version is not None:
|
|
if versionB.maximum_version is not None:
|
|
result_version.maximum_version = min(versionA.maximum_version, versionB.maximum_version)
|
|
else:
|
|
result_version.maximum_version = versionA.maximum_version
|
|
else:
|
|
result_version.maximum_version = versionB.maximum_version
|
|
|
|
if result_version.minimum_version is not None and result_version.maximum_version is not None \
|
|
and (result_version.minimum_version > result_version.maximum_version):
|
|
return None
|
|
return result_version
|
|
|
|
def _parse_version_spec(version_spec: dict) -> _VersionSpec:
|
|
return _VersionSpec(version_spec.get('min'), version_spec.get('max'), version_spec.get('options', {}))
|
|
|
|
def _can_add_dependency(env: Environment, name: str, version_spec: _VersionSpec) -> bool:
|
|
if name not in env['SPP_DEPENDENCIES']:
|
|
return True
|
|
dependency = env['SPP_DEPENDENCIES'][name]
|
|
common_version_spec = _find_common_dependency_version(name, dependency.version_spec, version_spec)
|
|
return common_version_spec is not None
|
|
|
|
def _add_dependency(env: Environment, name: str, version_spec: _VersionSpec) -> _Dependency:
|
|
if name in env['SPP_DEPENDENCIES']:
|
|
dependency = env['SPP_DEPENDENCIES'][name]
|
|
common_version_spec = _find_common_dependency_version(name, dependency.version_spec, version_spec)
|
|
if common_version_spec is None:
|
|
raise Exception(f'Incompatible versions detected for {name}: {dependency.version_spec} and {version_spec}')
|
|
if dependency.version_spec != common_version_spec:
|
|
env['_SPP_DEPENDENCIES_OKAY'] = False
|
|
dependency.version_spec = common_version_spec
|
|
return dependency
|
|
dependency = _Dependency()
|
|
dependency.name = name
|
|
dependency.version_spec = version_spec
|
|
dependency.recipe = _find_recipe(env, name)
|
|
env['SPP_DEPENDENCIES'][name] = dependency
|
|
env['_SPP_DEPENDENCIES_OKAY'] = False
|
|
return dependency
|
|
|
|
def _sort_versions(versions: list) -> None:
|
|
import functools
|
|
def _compare(left, right):
|
|
if left < right:
|
|
return 1
|
|
elif left == right:
|
|
return 0
|
|
else:
|
|
return -1
|
|
versions.sort(key=functools.cmp_to_key(_compare))
|
|
|
|
def _version_matches(version, version_spec: _VersionSpec) -> bool:
|
|
if version_spec.minimum_version is not None and version < version_spec.minimum_version:
|
|
return False
|
|
if version_spec.maximum_version is not None and version > version_spec.maximum_version:
|
|
return False
|
|
return True
|
|
|
|
def _find_version(env: Environment, dependency: _Dependency):
|
|
for update in (False, True):
|
|
versions_signature = inspect.signature(dependency.recipe.versions)
|
|
kwargs = {}
|
|
if 'options' in versions_signature.parameters:
|
|
kwargs['options'] = dependency.version_spec.options
|
|
versions = dependency.recipe.versions(env, update=update, **kwargs)
|
|
_sort_versions(versions)
|
|
for version in versions:
|
|
kwargs = {}
|
|
dependencies_signature = inspect.signature(dependency.recipe.dependencies)
|
|
if 'options' in dependencies_signature.parameters:
|
|
kwargs['options'] = dependency.version_spec.options
|
|
if _version_matches(version, dependency.version_spec):
|
|
canadd = True
|
|
for depname, depspec in dependency.recipe.dependencies(env, version, **kwargs).items():
|
|
if not _can_add_dependency(env, depname, _parse_version_spec(depspec)):
|
|
canadd = False
|
|
break
|
|
if canadd:
|
|
depdeps = []
|
|
for depname, depspec in dependency.recipe.dependencies(env, version, **kwargs).items():
|
|
depdeps.append(_add_dependency(env, depname, _parse_version_spec(depspec)))
|
|
dependency.version = version
|
|
dependency.depdeps = depdeps
|
|
return
|
|
print(f'Available versions: \n{versions}')
|
|
print(f'Required version: {dependency.version_spec}')
|
|
raise Exception(f'Could not find a suitable version for dependency {dependency.name}.')
|
|
|
|
def _wrap_builder(builder, target_type: TargetType):
|
|
def _wrapped(env, dependencies = {}, *args, **kwargs):
|
|
target_dependencies = []
|
|
for name, version_spec in dependencies.items():
|
|
if version_spec == {} and name not in env['SPP_DEPENDENCIES']: # this is basically a shortcut to adding targets from other modules without having to save them in the env
|
|
dep_target = _find_target(env, name)
|
|
if dep_target is not None and dep_target.target_type != TargetType.PROGRAM:
|
|
target_dependencies.append(dep_target)
|
|
# TODO: there might be an issue here with dependencies not being injected this way :/
|
|
continue
|
|
target_dependencies.append(_add_dependency(env, name, _parse_version_spec(version_spec)))
|
|
|
|
if 'CPPPATH' not in kwargs:
|
|
kwargs['CPPPATH'] = copy.copy(env['CPPPATH'])
|
|
if 'CPPDEFINES' not in kwargs:
|
|
kwargs['CPPDEFINES'] = copy.copy(env['CPPDEFINES'])
|
|
if 'LIBPATH' not in kwargs:
|
|
kwargs['LIBPATH'] = copy.copy(env['LIBPATH'])
|
|
if 'LIBS' not in kwargs and 'LIBS' in env:
|
|
kwargs['LIBS'] = copy.copy(env['LIBS'])
|
|
if 'source' in kwargs:
|
|
source = kwargs['source']
|
|
if not isinstance(source, list):
|
|
source = [source]
|
|
new_source = []
|
|
for src in source:
|
|
if isinstance(src, str):
|
|
new_source.append(env.Entry(src))
|
|
else:
|
|
new_source.append(src)
|
|
kwargs['source'] = new_source
|
|
|
|
target = _Target()
|
|
if 'name' in kwargs:
|
|
target.name = kwargs['name']
|
|
else:
|
|
trgt = _target_entry(kwargs.get('target'))
|
|
if trgt is not None:
|
|
target.name = str(trgt.name)
|
|
else:
|
|
target.name = 'Unknown target'
|
|
target.target_type = target_type
|
|
target.builder = builder
|
|
target.args = args
|
|
target.kwargs = kwargs
|
|
target.dependencies = target_dependencies
|
|
env.Append(SPP_TARGETS = [target])
|
|
if not target.dependencies:
|
|
_build_target(target)
|
|
return target
|
|
return _wrapped
|
|
|
|
def _wrap_default(default):
|
|
def _wrapped(env, arg):
|
|
if isinstance(arg, _Target):
|
|
env.Append(SPP_DEFAULT_TARGETS = [arg])
|
|
elif isinstance(arg, dict) and '_target' in arg:
|
|
default(arg['_target'])
|
|
else:
|
|
default(arg)
|
|
return _wrapped
|
|
|
|
def _wrap_depends(depends):
|
|
def _wrapped(env, dependant, dependency):
|
|
if isinstance(dependant, _Target) or isinstance(dependency, _Target):
|
|
env.Append(SPP_TARGET_DEPENDENCIES = [(dependant, dependency)])
|
|
return
|
|
elif isinstance(dependant, dict) and '_target' in dependant:
|
|
dependant = dependant['_target']
|
|
elif isinstance(dependency, dict) and '_target' in dependency:
|
|
dependency = dependency['_target']
|
|
depends(dependant, dependency)
|
|
return _wrapped
|
|
|
|
def _build_target(target: _Target):
|
|
for dependency in target.dependencies:
|
|
_inject_dependency(dependency, target.kwargs)
|
|
if 'LIBS' in target.kwargs:
|
|
libs_copy = list(target.kwargs['LIBS'])
|
|
for lib in libs_copy:
|
|
if isinstance(lib, str) and os.path.isabs(lib):
|
|
target.kwargs['LIBS'].remove(lib)
|
|
target.kwargs['LIBS'].append(env.File(lib))
|
|
pass
|
|
elif isinstance(lib, _Target):
|
|
if not lib.target:
|
|
_build_target(lib)
|
|
target.kwargs['LIBS'].remove(lib)
|
|
target.kwargs['LIBS'].append(lib.target)
|
|
new_kwargs = target.kwargs.copy()
|
|
if 'target' in new_kwargs: # there should always be a target, right?
|
|
new_kwargs['target'] = f"{new_kwargs['target']}-{build_type}"
|
|
target.target = target.builder(*target.args, **new_kwargs)
|
|
|
|
def _version_to_string(version) -> str:
|
|
return '.'.join([str(v) for v in version])
|
|
|
|
def _finalize(env: Environment):
|
|
if dump is not None:
|
|
_dump()
|
|
if generate_project:
|
|
_generate_project(generate_project)
|
|
Exit(0)
|
|
|
|
version_requirements = {dep.name: {
|
|
'min': dep.version_spec.minimum_version and _version_to_string(dep.version_spec.minimum_version),
|
|
'max': dep.version_spec.maximum_version and _version_to_string(dep.version_spec.maximum_version),
|
|
} for dep in env['SPP_DEPENDENCIES'].values()}
|
|
env['_SPP_DEPENDENCIES_OKAY'] = False
|
|
while not env['_SPP_DEPENDENCIES_OKAY']:
|
|
env['_SPP_DEPENDENCIES_OKAY'] = True
|
|
for dependency in list(env['SPP_DEPENDENCIES'].values()):
|
|
if not dependency.version:
|
|
_find_version(env, dependency)
|
|
with open('cache/versions.json', 'w') as f:
|
|
json.dump({
|
|
'requirements': version_requirements,
|
|
'selected': {
|
|
dep.name: _version_to_string(dep.version) for dep in env['SPP_DEPENDENCIES'].values()
|
|
}
|
|
}, f)
|
|
|
|
|
|
for target in env['SPP_TARGETS']:
|
|
_build_target(target)
|
|
for target in env['SPP_DEFAULT_TARGETS']:
|
|
env.Default(target.target)
|
|
for dependant, dependency in env['SPP_TARGET_DEPENDENCIES']:
|
|
if isinstance(dependant, _Target):
|
|
dependant = dependant.target
|
|
if isinstance(dependency, _Target):
|
|
dependency = dependency.target
|
|
env.Depends(dependant, dependency)
|
|
|
|
def _find_target(env: Environment, target_name: str) -> '_Target|None':
|
|
for target in env['SPP_TARGETS']:
|
|
if target.name == target_name:
|
|
return target
|
|
return None
|
|
|
|
def _get_fallback_cache_dir() -> str:
|
|
return Dir('#cache').abspath
|
|
|
|
def _find_system_cache_dir() -> str:
|
|
if os.name == 'posix':
|
|
if os.environ.get('XDG_CACHE_HOME'):
|
|
return os.environ['XDG_CACHE_HOME']
|
|
elif os.environ.get('HOME'):
|
|
return os.path.join(os.environ['HOME'], '.cache')
|
|
elif os.name == 'nt':
|
|
# TODO: just guessing
|
|
return os.environ['LocalAppData']
|
|
# fallback
|
|
return _get_fallback_cache_dir()
|
|
|
|
def _target_entry(target_value):
|
|
if target_value is None:
|
|
return None
|
|
if not isinstance(target_value, list):
|
|
target_value = [target_value]
|
|
if len(target_value) < 1:
|
|
return None
|
|
if isinstance(target_value[0], str):
|
|
target_value[0] = env.Entry(target_value[0])
|
|
return target_value[0]
|
|
|
|
def _generate_project(project_type: str) -> None:
|
|
try:
|
|
import jinja2
|
|
except ImportError:
|
|
_error(None, 'Project generation requires the jinja2 to be installed.')
|
|
|
|
source_folder, target_folder = {
|
|
'clion': (os.path.join(_spp_dir.abspath, 'util', 'clion_project_template'), Dir('#.idea').abspath),
|
|
'vscode': (os.path.join(_spp_dir.abspath, 'util', 'vscode_project_template'), Dir('#.vscode').abspath)
|
|
}.get(project_type, (None, None))
|
|
if not source_folder:
|
|
_error(None, 'Invalid project type option.')
|
|
|
|
uuid_cache_file = pathlib.Path(env['SHARED_CACHE_DIR'], 'uuids.json')
|
|
uuid_cache = {}
|
|
save_uuid_cache = False
|
|
if uuid_cache_file.exists():
|
|
try:
|
|
with uuid_cache_file.open('r') as f:
|
|
uuid_cache = json.load(f)
|
|
except Exception as e:
|
|
print(f'Error loading UUID cache: {e}')
|
|
|
|
def _generate_uuid(name: str = '') -> str:
|
|
nonlocal save_uuid_cache
|
|
if name and name in uuid_cache:
|
|
return uuid_cache[name]
|
|
new_uuid = str(uuid.uuid4())
|
|
if name:
|
|
uuid_cache[name] = new_uuid
|
|
save_uuid_cache = True
|
|
return new_uuid
|
|
|
|
root_path = pathlib.Path(env.Dir('#').abspath)
|
|
def _get_executables() -> list:
|
|
result = []
|
|
for target in env['SPP_TARGETS']:
|
|
if target.target_type == TargetType.PROGRAM:
|
|
trgt = _target_entry(target.kwargs['target'])
|
|
def _exe_path(build_type) -> str:
|
|
exe_path = pathlib.Path(trgt.abspath).relative_to(root_path)
|
|
exe_path = exe_path.parent / f'{env.subst("$PROGPREFIX")}{exe_path.name}-{build_type}{env.subst("$PROGSUFFIX")}'
|
|
return str(exe_path)
|
|
result.append({
|
|
'name': target.name,
|
|
'filename': _exe_path
|
|
})
|
|
return result
|
|
def _get_libraries() -> list:
|
|
result = []
|
|
for target in env['SPP_TARGETS']:
|
|
if target.target_type == TargetType.STATIC_LIBRARY:
|
|
trgt = _target_entry(target.kwargs['target'])
|
|
def _lib_path(build_type) -> str:
|
|
lib_path = pathlib.Path(trgt.abspath).relative_to(root_path)
|
|
lib_path = lib_path.parent / f'{env.subst("$LIBPREFIX")}{lib_path.name}-{build_type}{env.subst("$LIBSUFFIX")}'
|
|
return str(lib_path)
|
|
result.append({
|
|
'name': target.name,
|
|
'filename': _lib_path
|
|
})
|
|
elif target.target_type == TargetType.SHARED_LIBRARY:
|
|
trgt = _target_entry(target.kwargs['target'])
|
|
def _lib_path(build_type) -> str:
|
|
lib_path = pathlib.Path(trgt.abspath).relative_to(root_path)
|
|
lib_path = lib_path.parent / f'{env.subst("$SHLIBPREFIX")}{lib_path.name}-{build_type}{env.subst("$SHLIBSUFFIX")}'
|
|
return str(lib_path)
|
|
result.append({
|
|
'name': target.name,
|
|
'filename': _lib_path
|
|
})
|
|
return result
|
|
def _escape_path(input: str) -> str:
|
|
return input.replace('\\', '\\\\')
|
|
|
|
jinja_env = jinja2.Environment()
|
|
jinja_env.globals['generate_uuid'] = _generate_uuid
|
|
jinja_env.globals['project'] = {
|
|
'name': env.Dir('#').name,
|
|
'executables': _get_executables(),
|
|
'libraries': _get_libraries(),
|
|
'build_types': ['debug', 'release_debug', 'release', 'profile']
|
|
}
|
|
jinja_env.globals['scons_exe'] = shutil.which('scons')
|
|
jinja_env.globals['nproc'] = multiprocessing.cpu_count()
|
|
|
|
jinja_env.filters['escape_path'] = _escape_path
|
|
|
|
source_path = pathlib.Path(source_folder)
|
|
target_path = pathlib.Path(target_folder)
|
|
|
|
for source_file in source_path.rglob('*'):
|
|
if source_file.is_file():
|
|
target_file = target_path / (source_file.relative_to(source_path))
|
|
target_file.parent.mkdir(parents=True, exist_ok=True)
|
|
if source_file.suffix != '.jinja':
|
|
shutil.copyfile(source_file, target_file)
|
|
continue
|
|
with source_file.open('r') as f:
|
|
templ = jinja_env.from_string(f.read())
|
|
target_file = target_file.with_suffix('')
|
|
with target_file.open('w') as f:
|
|
f.write(templ.render())
|
|
|
|
if save_uuid_cache:
|
|
try:
|
|
with uuid_cache_file.open('w') as f:
|
|
json.dump(uuid_cache, f)
|
|
except Exception as e:
|
|
print(f'Error writing uuid cache: {e}')
|
|
|
|
def _dump() -> None:
|
|
def _dump_as_text(data: Any) -> str:
|
|
from pprint import pformat
|
|
dump_name = {
|
|
'env': 'Environment',
|
|
'config': 'Configuration',
|
|
'modules': 'Modules'
|
|
}[dump]
|
|
|
|
return '\n'.join((
|
|
f'==== Begin {dump_name} Dump ====',
|
|
pformat(data),
|
|
f'==== End {dump_name} Dump ===='
|
|
))
|
|
def _dump_as_json(data: Any) -> str:
|
|
class _Encoder(json.JSONEncoder):
|
|
def default(self, o) -> dict:
|
|
if isinstance(o, object):
|
|
return o.__dict__
|
|
return super().default(o)
|
|
return json.dumps(data, cls=_Encoder)
|
|
|
|
data = {
|
|
'env': env.Dictionary,
|
|
'config': lambda: config,
|
|
'modules': lambda: env['SPP_MODULES']
|
|
}[dump]()
|
|
dump_fn = {
|
|
'text': _dump_as_text,
|
|
'json': _dump_as_json
|
|
}[dump_format]
|
|
print(dump_fn(data))
|
|
Exit(0)
|
|
|
|
|
|
Import('config')
|
|
|
|
if not config.get('PROJECT_NAME'):
|
|
config['PROJECT_NAME'] = 'PROJECT'
|
|
if not config.get('CXX_STANDARD'):
|
|
config['CXX_STANDARD'] = 'c++23'
|
|
if not config.get('CXX_NO_EXCEPTIONS'):
|
|
config['CXX_NO_EXCEPTIONS'] = False
|
|
|
|
if not config.get('PREPROCESSOR_PREFIX'):
|
|
config['PREPROCESSOR_PREFIX'] = config['PROJECT_NAME'].upper() # TODO: may be nicer?
|
|
|
|
if 'COMPILATIONDB_FILTER_FILES' not in config:
|
|
config['COMPILATIONDB_FILTER_FILES'] = True
|
|
|
|
if 'WINDOWS_DISABLE_DEFAULT_DEFINES' not in config:
|
|
config['WINDOWS_DISABLE_DEFAULT_DEFINES'] = False
|
|
|
|
AddOption(
|
|
'--build_type',
|
|
dest = 'build_type',
|
|
type = 'choice',
|
|
choices = ('debug', 'release_debug', 'release', 'profile'),
|
|
nargs = 1,
|
|
action = 'store',
|
|
default = 'debug'
|
|
)
|
|
|
|
AddOption(
|
|
'--unity',
|
|
dest = 'unity_mode',
|
|
type = 'choice',
|
|
choices = ('enable', 'disable', 'stress'),
|
|
nargs = 1,
|
|
action = 'store',
|
|
default = 'enable'
|
|
)
|
|
|
|
AddOption(
|
|
'--variant',
|
|
dest = 'variant',
|
|
nargs = 1,
|
|
action = 'store'
|
|
)
|
|
|
|
AddOption(
|
|
'--asan',
|
|
dest = 'enable_asan',
|
|
action = 'store_true'
|
|
)
|
|
|
|
AddOption(
|
|
'--config_file',
|
|
dest = 'config_file',
|
|
nargs = 1,
|
|
action = 'store',
|
|
default = 'config.py'
|
|
)
|
|
|
|
AddOption(
|
|
'--compiler',
|
|
dest = 'compiler',
|
|
type = 'choice',
|
|
choices = ('auto', 'gcc', 'clang', 'msvc'),
|
|
nargs = 1,
|
|
action = 'store',
|
|
default = 'auto'
|
|
)
|
|
|
|
AddOption(
|
|
'--update_repositories',
|
|
dest = 'update_repositories',
|
|
action = 'store_true'
|
|
)
|
|
|
|
AddOption(
|
|
'--disable_auto_update',
|
|
dest = 'disable_auto_update',
|
|
action = 'store_true'
|
|
)
|
|
|
|
AddOption(
|
|
'--dump',
|
|
dest = 'dump',
|
|
type = 'choice',
|
|
choices = ('env', 'config', 'modules'),
|
|
nargs = 1,
|
|
action = 'store'
|
|
)
|
|
|
|
AddOption(
|
|
'--dump_format',
|
|
dest = 'dump_format',
|
|
type = 'choice',
|
|
choices = ('text', 'json'),
|
|
nargs = 1,
|
|
action = 'store',
|
|
default = 'text'
|
|
)
|
|
|
|
AddOption(
|
|
'--generate_project',
|
|
dest = 'generate_project',
|
|
type = 'choice',
|
|
choices = ('clion', 'vscode'),
|
|
nargs = 1,
|
|
action = 'store'
|
|
)
|
|
|
|
_spp_dir = Dir('.')
|
|
|
|
build_type = GetOption('build_type')
|
|
unity_mode = GetOption('unity_mode')
|
|
variant = GetOption('variant')
|
|
enable_asan = GetOption('enable_asan')
|
|
config_file = GetOption('config_file')
|
|
compiler = GetOption('compiler')
|
|
update_repositories = GetOption('update_repositories')
|
|
disable_auto_update = GetOption('disable_auto_update')
|
|
dump = GetOption('dump')
|
|
dump_format = GetOption('dump_format')
|
|
generate_project = GetOption('generate_project')
|
|
|
|
default_CC = {
|
|
'gcc': 'gcc',
|
|
'clang': 'clang',
|
|
'msvc': 'cl.exe'
|
|
}.get(compiler, None)
|
|
default_CXX = {
|
|
'gcc': 'g++',
|
|
'clang': 'clang++',
|
|
'msvc': 'cl.exe'
|
|
}.get(compiler, None)
|
|
|
|
if not os.path.isabs(config_file):
|
|
config_file = os.path.join(Dir('#').abspath, config_file)
|
|
|
|
vars = Variables(config_file)
|
|
vars.Add('CC', 'The C Compiler', default_CC)
|
|
vars.Add('CXX', 'The C++ Compiler', default_CXX)
|
|
vars.Add('LINK', 'The Linker')
|
|
vars.Add('CCFLAGS', 'C/C++ Compiler Flags')
|
|
vars.Add('CFLAGS', 'C Compiler Flags')
|
|
vars.Add('CXXFLAGS', 'C++ Compiler Flags')
|
|
vars.Add('LINKFLAGS', 'Linker Flags')
|
|
vars.Add('PYTHON', 'Python Executable', 'python')
|
|
vars.Add('COMPILATIONDB_FILTER_FILES', 'Removes source files from the compilation DB that are not from the current'
|
|
' project.', config['COMPILATIONDB_FILTER_FILES'])
|
|
vars.Add('SHOW_INCLUDES', 'Show include hierarchy (for debugging).', False)
|
|
vars.Add('ENABLE_ASAN', 'Enable address sanitization.', bool(enable_asan))
|
|
|
|
tools = ['default', 'compilation_db', 'unity_build']
|
|
if 'TOOLS' in config:
|
|
tools.extend(config['TOOLS'])
|
|
|
|
env = Environment(tools = tools, variables = vars, ENV = os.environ)
|
|
env['SPP_RECIPES_FOLDERS'] = []
|
|
env['SYSTEM_CACHE_DIR'] = os.path.join(_find_system_cache_dir(), 'spp_cache')
|
|
env['CLONE_DIR'] = os.path.join(env['SYSTEM_CACHE_DIR'], 'cloned')
|
|
env['DOWNLOAD_DIR'] = os.path.join(env['SYSTEM_CACHE_DIR'], 'downloaded')
|
|
env['UPDATE_REPOSITORIES'] = update_repositories
|
|
env['CXX_STANDARD'] = config['CXX_STANDARD'] # make it available to everyone
|
|
env['DEPS_CFLAGS'] = []
|
|
env['DEPS_CXXFLAGS'] = []
|
|
env['DEPS_LINKFLAGS'] = []
|
|
|
|
_info(None, f'Detected system cache directory: {env["SYSTEM_CACHE_DIR"]}')
|
|
try:
|
|
os.makedirs(env['SYSTEM_CACHE_DIR'], exist_ok=True)
|
|
except:
|
|
env['SYSTEM_CACHE_DIR'] = os.path.join(_get_fallback_cache_dir(), 'spp_cache')
|
|
_info(None, f'Creating spp cache dir failed, using fallback: {env["SYSTEM_CACHE_DIR"]}.')
|
|
os.makedirs(env['SYSTEM_CACHE_DIR'], exist_ok=True) # no more safeguards!
|
|
env['CLONE_DIR'] = os.path.join(env['SYSTEM_CACHE_DIR'], 'cloned')
|
|
env['DOWNLOAD_DIR'] = os.path.join(env['SYSTEM_CACHE_DIR'], 'downloaded')
|
|
env['UPDATE_REPOSITORIES'] = update_repositories
|
|
|
|
env['CXX_STANDARD'] = config['CXX_STANDARD'] # make it available to everyone
|
|
env['CXX_NO_EXCEPTIONS'] = config['CXX_NO_EXCEPTIONS']
|
|
env['DEPS_CFLAGS'] = []
|
|
env['DEPS_CXXFLAGS'] = []
|
|
env['DEPS_LINKFLAGS'] = []
|
|
|
|
env['SHARED_CACHE_DIR'] = Dir(f'#cache').abspath
|
|
# allow compiling to variant directories (each gets their own bin/lib/cache dirs)
|
|
if variant:
|
|
env['BIN_DIR'] = Dir(f'#bin_{variant}').abspath
|
|
env['LIB_DIR'] = Dir(f'#lib_{variant}').abspath
|
|
env['CACHE_DIR'] = Dir(f'#cache_{variant}').abspath
|
|
env['VARIANT_DIR'] = f'{env["CACHE_DIR"]}/variant'
|
|
env.Append(CPPDEFINES = [f'{config["PREPROCESSOR_PREFIX"]}_VARIANT={variant}'])
|
|
else:
|
|
env['VARIANT_DIR'] = None
|
|
env['COMPILATIONDB_USE_ABSPATH'] = True
|
|
if env['COMPILATIONDB_FILTER_FILES']:
|
|
env['COMPILATIONDB_PATH_FILTER'] = f"{Dir('#').abspath}/*"
|
|
comp_db = env.CompilationDatabase(target = '#compile_commands.json')
|
|
Default(comp_db)
|
|
env['BIN_DIR'] = Dir('#bin').abspath
|
|
env['LIB_DIR'] = Dir('#lib').abspath
|
|
env['CACHE_DIR'] = env['SHARED_CACHE_DIR']
|
|
env['UNITY_CACHE_DIR'] = Dir(f'{env["CACHE_DIR"]}/unity')
|
|
env['BUILD_TYPE'] = build_type
|
|
env.Append(LIBPATH = [env['LIB_DIR']]) # to allow submodules to link to each other without hassle
|
|
|
|
# make sure these are all defined in case someone wants to use/copy them
|
|
env.Append(CCFLAGS = [])
|
|
env.Append(CXXFLAGS = [])
|
|
env.Append(CPPPATH = [])
|
|
env.Append(CPPDEFINES = [])
|
|
env.Append(LINKFLAGS = [])
|
|
|
|
# init SPP environment variables
|
|
env['SPP_DIR'] = _spp_dir.abspath
|
|
env['SPP_TARGETS'] = []
|
|
env['SPP_DEFAULT_TARGETS'] = []
|
|
env['SPP_TARGET_DEPENDENCIES'] = []
|
|
env['SPP_DEPENDENCIES'] = {}
|
|
env['SPP_RECIPES'] = {}
|
|
env['SPP_MODULES'] = {} # maps from folder to _Module
|
|
|
|
env['OBJSUFFIX'] = f".{env['BUILD_TYPE']}{env['OBJSUFFIX']}"
|
|
if variant:
|
|
env['OBJSUFFIX'] = f".{variant}{env['OBJSUFFIX']}"
|
|
|
|
# create the cache dir
|
|
os.makedirs(env['CACHE_DIR'], exist_ok=True)
|
|
cache_gitignore = f'{env["CACHE_DIR"]}/.gitignore'
|
|
if not os.path.exists(cache_gitignore):
|
|
with open(cache_gitignore, 'w') as f:
|
|
f.write('*\n')
|
|
|
|
if env['CACHE_DIR'] != env['SHARED_CACHE_DIR']:
|
|
os.makedirs(env['SHARED_CACHE_DIR'], exist_ok=True)
|
|
cache_gitignore = f'{env["SHARED_CACHE_DIR"]}/.gitignore'
|
|
if not os.path.exists(cache_gitignore):
|
|
with open(cache_gitignore, 'w') as f:
|
|
f.write('*\n')
|
|
|
|
# check whether repositories where updated since last boot
|
|
update_stamp_file = f'{env["SHARED_CACHE_DIR"]}/last_update.stamp'
|
|
update_time = 0.0
|
|
if os.path.exists(update_stamp_file):
|
|
with open(update_stamp_file, 'r') as f:
|
|
try:
|
|
update_time = float(f.read())
|
|
except:
|
|
pass
|
|
boot_time = psutil.boot_time()
|
|
if not disable_auto_update and boot_time > update_time:
|
|
_info(None, 'Didn\'t update repositories since last boot, doing it now...')
|
|
env['UPDATE_REPOSITORIES'] = True
|
|
if env['UPDATE_REPOSITORIES']:
|
|
with open(update_stamp_file, 'w') as f:
|
|
f.write(str(time.time()))
|
|
# create the clone and system cache dirs
|
|
os.makedirs(env['CLONE_DIR'], exist_ok=True)
|
|
os.makedirs(env['DOWNLOAD_DIR'], exist_ok=True)
|
|
|
|
# try to detect what compiler we are using
|
|
compiler_exe = os.path.basename(env.subst(env['CC']))
|
|
if 'gcc' in compiler_exe:
|
|
env['COMPILER_FAMILY'] = 'gcc'
|
|
elif 'clang' in compiler_exe:
|
|
env['COMPILER_FAMILY'] = 'clang'
|
|
elif 'cl' in compiler_exe:
|
|
env['COMPILER_FAMILY'] = 'cl'
|
|
else:
|
|
env['COMPILER_FAMILY'] = 'unknown'
|
|
|
|
# setup unity build depending on mode
|
|
if unity_mode == 'disable':
|
|
env['UNITY_DISABLE'] = True
|
|
elif unity_mode == 'stress': # compile everything in one single file to stress test the unity build
|
|
env['UNITY_MAX_SOURCES'] = 100000 # I'll hopefully never reach this
|
|
env['UNITY_MIN_FILES'] = 1
|
|
|
|
# setup compiler specific options
|
|
if env['COMPILER_FAMILY'] == 'gcc' or env['COMPILER_FAMILY'] == 'clang':
|
|
env.Append(CCFLAGS = ['-Wall', '-Wextra', '-Werror', '-Wstrict-aliasing', '-pedantic', '-fvisibility=hidden'])
|
|
env.Append(CXXFLAGS = [f'-std={config["CXX_STANDARD"]}'])
|
|
if env['CXX_NO_EXCEPTIONS']:
|
|
env.Append(CXXFLAGS = [f'-fno-exceptions'])
|
|
if build_type != 'release':
|
|
env.Append(LINKFLAGS = [f'-Wl,-rpath,{env["LIB_DIR"]}'])
|
|
env['LINKCOM'] = env['LINKCOM'].replace('$_LIBFLAGS', '-Wl,--start-group $_LIBFLAGS -Wl,--end-group')
|
|
if env['COMPILER_FAMILY'] == 'gcc':
|
|
# GCC complains about missing initializer for "<anonymous>" that doesn't exist :/
|
|
# also GCC complains about some (compiler generated) fields in coroutines not having any linkage
|
|
# also -Wdangling-reference seems to produce a lot of false positives
|
|
# also -Wmaybe-uninitialized seems to produce false positives (or a bug in the standard library?))
|
|
# -Winit-list-lifetime triggers in vulkan.hpp even though it is disabled via pragma :/
|
|
# -Wtautological-compare triggers in libfmt and doesn't seem too useful anyway
|
|
env.Append(CCFLAGS = ['-Wno-missing-field-initializers', '-Wno-maybe-uninitialized'])
|
|
env.Append(CXXFLAGS = ['-Wno-subobject-linkage', '-Wno-dangling-reference', '-Wno-init-list-lifetime', '-Wno-tautological-compare'])
|
|
else: # clang only
|
|
# no-gnu-anonymous-struct - we don't care
|
|
# no-missing-field-initializers - useful in some cases, annoying in most
|
|
# no-ambiguous-reversed-operator - should be quite useful, but we get a false positive, apparently?
|
|
# no-parentheses-equality - false positive for fold expressions and doesn't seem to useful anyway
|
|
env.Append(CCFLAGS = ['-Wno-gnu-anonymous-struct', '-Wno-missing-field-initializers', '-Wno-ambiguous-reversed-operator', '-Wno-parentheses-equality'])
|
|
env.Append(CXXFLAGS = ['-fexperimental-library']) # enable std::jthread
|
|
if build_type == 'debug':
|
|
env.Append(CCFLAGS = ['-g', '-O0'], CPPDEFINES = ['_GLIBCXX_DEBUG'])
|
|
env.Append(DEPS_CXXFLAGS = ['-D_GLIBCXX_DEBUG'])
|
|
elif build_type == 'release_debug' or build_type == 'profile':
|
|
env.Append(CCFLAGS = ['-Wno-unused-variable', '-Wno-unused-parameter', '-Wno-unused-but-set-variable', '-Wno-unused-local-typedef', '-Wno-unused-local-typedefs', '-g', '-O2'], CPPDEFINES = [f'{config["PREPROCESSOR_PREFIX"]}_RELEASE', 'NDEBUG'])
|
|
if build_type == 'profile':
|
|
if env['COMPILER_FAMILY'] == 'gcc':
|
|
env.Append(CPPDEFINES = [f'{config["PREPROCESSOR_PREFIX"]}_GCC_INSTRUMENTING=1'])
|
|
env.Append(CCFLAGS = ['-finstrument-functions'])
|
|
env.Append(LINKFLAGS = ['-rdynamic'])
|
|
|
|
elif build_type == 'release':
|
|
env.Append(CCFLAGS = ['-Wno-unused-variable', '-Wno-unused-parameter', '-Wno-unused-but-set-variable', '-Wno-unused-local-typedef', '-Wno-unused-local-typedefs', '-O2'], CPPDEFINES = [f'{config["PREPROCESSOR_PREFIX"]}_RELEASE', 'NDEBUG'])
|
|
|
|
if env['ENABLE_ASAN']:
|
|
env.Append(CCFLAGS = ['-fsanitize=address', '-fno-omit-frame-pointer'])
|
|
env.Append(LINKFLAGS = ['-fsanitize=address'])
|
|
env.Append(DEPS_CXXFLAGS = ['-fsanitize=address', '-fno-omit-frame-pointer'])
|
|
env.Append(DEPS_LINKFLAGS = ['-fsanitize=address'])
|
|
|
|
elif env['COMPILER_FAMILY'] == 'cl':
|
|
cxx_version_name = {
|
|
'c++14': 'c++14',
|
|
'c++17': 'c++17',
|
|
'c++20': 'c++20',
|
|
'c++23': 'c++latest',
|
|
'c++26': 'c++latest'
|
|
}.get(env['CXX_STANDARD'], 'c++14') # default to C++14 for older versions
|
|
# C4201: nonstandard extension used : nameless struct/union - I use it and want to continue using it
|
|
# C4127: conditional expression is constant - some libs (CRC, format) don't compile with this enabled # TODO: fix?
|
|
# C4702: unreachable code, issued after MIJIN_FATAL macro
|
|
# C4251: missing dll-interface of some std types, yaml-cpp doesn't compile with this enabled
|
|
# C4275: same as above
|
|
env.Append(CCFLAGS = ['/W4', '/WX', '/wd4201', '/wd4127', '/wd4702', '/wd4251', '/wd4275', '/bigobj', '/vmg',
|
|
f'/std:{cxx_version_name}', '/permissive-', '/FS', '/Zc:char8_t', '/utf-8', '/Zc:preprocessor'])
|
|
env.Append(CPPDEFINES = ['_CRT_SECURE_NO_WARNINGS']) # I'd like to not use MSVC specific versions of functions because they are "safer" ...
|
|
env.Append(DEPS_CXXFLAGS = ['/Zc:char8_t', '/utf-8', '/vmg'])
|
|
if env['CXX_NO_EXCEPTIONS']:
|
|
env.Append(CPPDEFINES = ['_HAS_EXCEPTIONS=0'])
|
|
else:
|
|
env.Append(CXXFLAGS = ['/EHsc'])
|
|
env.Append(DEPS_CXXFLAGS = ['/EHsc'])
|
|
if env['SHOW_INCLUDES']:
|
|
env.Append(CCFLAGS = ['/showIncludes'])
|
|
if build_type == 'debug':
|
|
env.Append(CCFLAGS = ['/Od', '/Zi', '/MDd'], LINKFLAGS = ' /DEBUG')
|
|
env.Append(CPPDEFINES = ['_DEBUG', '_ITERATOR_DEBUG_LEVEL=2'])
|
|
env.Append(DEPS_CXXFLAGS = ['/MDd', '/Zi', '/D_DEBUG', '/D_ITERATOR_DEBUG_LEVEL=2'])
|
|
env.Append(DEPS_LINKFLAGS = ['/DEBUG'])
|
|
elif build_type == 'release_debug' or build_type == 'profile':
|
|
env.Append(CCFLAGS = ['/O2', '/MD', '/Zi'], LINKFLAGS = ' /DEBUG')
|
|
env.Append(DEPS_CXXFLAGS = ['/Zi', '/MD'])
|
|
env.Append(DEPS_LINKFLAGS = ['/DEBUG'])
|
|
else:
|
|
env.Append(CCFLAGS = ['/O2', '/MD'])
|
|
env.Append(DEPS_CXXFLAGS = ['/MD'])
|
|
|
|
if env['ENABLE_ASAN']:
|
|
env.Append(CCFLAGS = ['/fsanitize=address'])
|
|
|
|
if env['COMPILER_FAMILY'] == 'gcc':
|
|
env.Append(CXXFLAGS = ['-Wno-volatile'])
|
|
elif env['COMPILER_FAMILY'] == 'clang':
|
|
env.Append(CCFLAGS = ['-Wno-deprecated-volatile', '-Wno-nested-anon-types', '-Wno-unknown-warning-option'])
|
|
|
|
# platform specific options
|
|
if os.name == 'nt':
|
|
if not config['WINDOWS_DISABLE_DEFAULT_DEFINES']:
|
|
env.Append(CDEFINES = ['WIN32_LEAN_AND_MEAN', 'NOMINMAX', 'STRICT', 'UNICODE'], CPPDEFINES = ['WIN32_LEAN_AND_MEAN', 'NOMINMAX', 'STRICT', 'UNICODE'])
|
|
|
|
env.AddMethod(_cook, 'Cook')
|
|
env.AddMethod(_parse_lib_conf, 'ParseLibConf')
|
|
env.AddMethod(_rglob, 'RGlob')
|
|
env.AddMethod(_deps_from_json, 'DepsFromJson')
|
|
env.AddMethod(_make_interface, 'MakeInterface')
|
|
env.AddMethod(_lib_filename, 'LibFilename')
|
|
env.AddMethod(_find_executable, 'FindExecutable')
|
|
env.AddMethod(_find_lib, 'FindLib')
|
|
env.AddMethod(_info, 'Info')
|
|
env.AddMethod(_warn, 'Warn')
|
|
env.AddMethod(_error, 'Error')
|
|
env.AddMethod(_wrap_builder(env.Library, TargetType.STATIC_LIBRARY), 'Library')
|
|
env.AddMethod(_wrap_builder(env.StaticLibrary, TargetType.STATIC_LIBRARY), 'StaticLibrary')
|
|
env.AddMethod(_wrap_builder(env.SharedLibrary, TargetType.SHARED_LIBRARY), 'SharedLibrary')
|
|
env.AddMethod(_wrap_builder(env.Program, TargetType.PROGRAM), 'Program')
|
|
env.AddMethod(_wrap_default(env.Default), 'Default')
|
|
env.AddMethod(_wrap_depends(env.Depends), 'Depends')
|
|
|
|
env.AddMethod(_wrap_builder(env.UnityProgram, TargetType.PROGRAM), 'UnityProgram')
|
|
env.AddMethod(_wrap_builder(env.UnityLibrary, TargetType.STATIC_LIBRARY), 'UnityLibrary')
|
|
env.AddMethod(_wrap_builder(env.UnityStaticLibrary, TargetType.STATIC_LIBRARY), 'UnityStaticLibrary')
|
|
env.AddMethod(_wrap_builder(env.UnitySharedLibrary, TargetType.SHARED_LIBRARY), 'UnitySharedLibrary')
|
|
env.AddMethod(_module, 'Module')
|
|
env.AddMethod(_module_config, 'ModuleConfig')
|
|
env.AddMethod(_finalize, 'Finalize')
|
|
env.AddMethod(_find_target, 'FindTarget')
|
|
|
|
if hasattr(env, 'Gch'):
|
|
env.AddMethod(_wrap_builder(env.Gch, TargetType.STATIC_LIBRARY), 'Gch')
|
|
|
|
for addon_file in env.Glob('addons/*.py'):
|
|
env = SConscript(addon_file, exports = 'env')
|
|
|
|
Return('env')
|