scons-plus-plus/SConscript

1356 lines
53 KiB
Python

import copy
from dataclasses import dataclass, field
import enum
import glob
import inspect
import json
import multiprocessing
import os
import pathlib
import platform
import psutil
import shutil
import sys
import time
from typing import Any
import uuid
from SCons.Node import Node
_GCC_CPU_FEATURES_MAP = {
'mmx': '-mmmx',
'sse': '-msse',
'sse2': '-msse2',
'sse3': '-msse3',
'ssse3': '-mssse3',
'sse4': '-msse4',
'sse4a': '-msse4a',
'sse4.1': '-msse4.1',
'sse4.2': '-msse4.2',
'avx': '-mavx',
'avx2': '-mavx2'
}
class TargetType(enum.Enum):
PROGRAM = 0
STATIC_LIBRARY = 1
SHARED_LIBRARY = 2
class _VersionSpec:
minimum_version = None
maximum_version = None
options = {}
def __init__(self, minimum_version = None, maximum_version = None, options = {}):
self.minimum_version = minimum_version
self.maximum_version = maximum_version
self.options = options
def __str__(self):
return f'Min: {self.minimum_version}, Max: {self.maximum_version}, Options: {self.options}'
class _Dependency:
name: str = ''
version = None
version_spec: _VersionSpec
recipe = None
depdeps: list = []
cook_result: dict = {}
@dataclass
class _Module:
name: str
folder: str
description: str
cxx_namespace: str
class _Target:
name: str
target_type: TargetType
builder = None
args: list = []
kwargs: dict = {}
dependencies: list = []
target = None
module: _Module = None
def _find_recipe(env: Environment, recipe_name: str):
if recipe_name in env['SPP_RECIPES']:
return env['SPP_RECIPES'][recipe_name]
import importlib.util
source_file = None
if not env['SPP_RECIPES_FOLDERS']:
env.Error('No recipes repositories set. Add one using env.RecipeRepo(<name>, <url>, <branch>).')
for folder in env['SPP_RECIPES_FOLDERS']:
from SCons import Node
if folder is Node:
folder = folder.abspath
try_source_file = f'{folder}/{recipe_name}/recipe.py'
if os.path.exists(try_source_file):
source_file = try_source_file
break
if not source_file:
env.Error(f'Could not find recipe for {recipe_name}.')
spec = importlib.util.spec_from_file_location(recipe_name, source_file)
recipe = importlib.util.module_from_spec(spec)
recipe.env = env
spec.loader.exec_module(recipe)
env['SPP_RECIPES'][recipe_name] = recipe
return recipe
def _run_cook(dependency: _Dependency):
if not dependency.cook_result:
cook_signature = inspect.signature(dependency.recipe.cook)
kwargs = {}
if 'options' in cook_signature.parameters:
kwargs['options'] = dependency.version_spec.options
dependency.cook_result = dependency.recipe.cook(env, dependency.version, **kwargs)
def _cook(env: Environment, recipe_name: str):
dependency = env['SPP_DEPENDENCIES'].get(recipe_name)
if not dependency:
raise Exception(f'Cannot cook {recipe_name} as it was not listed as a dependency.')
_run_cook(dependency)
return dependency.cook_result
def _normalize_module_path(env: Environment, path: str) -> str|None:
module_root = env.Dir('#/private').abspath
try:
relative = os.path.relpath(path, module_root)
if relative[:2] == '..':
return None
return relative
except ValueError: # may be thrown on Windows if the module is on a different drive than the project
return None
def _module(env: Environment, file: str):
folder = _normalize_module_path(env, env.File(file).dir.abspath)
if folder is not None: # only include modules inside the source tree
dirname = os.path.basename(folder)
env.Append(SPP_MODULES = {folder: _Module(
name=dirname,
folder=folder,
description='',
cxx_namespace=dirname
)})
return SConscript(file, exports = 'env', variant_dir = env['VARIANT_DIR'], src_dir = '.')
def _module_config(env: Environment, **kwargs) -> None:
module_folder = _normalize_module_path(env, env.Dir('.').abspath)
module = env['SPP_MODULES'].get(module_folder)
if module is None:
env.Warn(f'No module config found for module at {module_folder}')
return
module.__dict__.update(kwargs)
def _parse_lib_conf(env: Environment, lib_conf: dict) -> None:
env.Append(CPPPATH = lib_conf.get('CPPPATH', []),
CPPDEFINES = lib_conf.get('CPPDEFINES', []),
LIBPATH = lib_conf.get('LIBPATH', []),
LIBS = lib_conf.get('LIBS', []),
LINKFLAGS = lib_conf.get('LINKFLAGS', []),
JINJA_TEMPLATE_SEARCHPATH = lib_conf.get('JINJA_TEMPLATE_SEARCHPATH', []))
def _inject_list(kwargs: dict, dependency: dict, list_name: str) -> None:
if list_name not in dependency:
return
if list_name not in kwargs:
kwargs[list_name] = []
kwargs[list_name].extend(dependency[list_name]) # TODO: eliminate duplicates?
def _inject_dependency(dependency, kwargs: dict, add_sources: bool = True) -> None:
if isinstance(dependency, dict):
_inject_list(kwargs, dependency, 'CPPPATH')
_inject_list(kwargs, dependency, 'CPPDEFINES')
_inject_list(kwargs, dependency, 'LIBPATH')
_inject_list(kwargs, dependency, 'LIBS')
_inject_list(kwargs, dependency, 'LINKFLAGS')
if add_sources and 'ADDITIONAL_SOURCES' in dependency and hasattr(kwargs['source'], 'extend'):
kwargs['source'].extend(dependency['ADDITIONAL_SOURCES'])
if 'DEPENDENCIES' in dependency:
for inner_dependency in dependency['DEPENDENCIES']:
_inject_dependency(inner_dependency, kwargs, False)
elif isinstance(dependency, _Dependency):
_run_cook(dependency)
_inject_list(kwargs, dependency.cook_result, 'CPPPATH')
_inject_list(kwargs, dependency.cook_result, 'CPPDEFINES')
_inject_list(kwargs, dependency.cook_result, 'LIBPATH')
_inject_list(kwargs, dependency.cook_result, 'LIBS')
_inject_list(kwargs, dependency.cook_result, 'LINKFLAGS')
for depdep in dependency.depdeps:
_inject_dependency(depdep, kwargs)
elif isinstance(dependency, _Target):
_inject_list(kwargs, dependency.kwargs, 'CPPPATH')
_inject_list(kwargs, dependency.kwargs, 'CPPDEFINES')
_inject_list(kwargs, dependency.kwargs, 'LIBPATH')
_inject_list(kwargs, dependency.kwargs, 'LIBS')
_inject_list(kwargs, {'LIBS': [dependency]}, 'LIBS')
_inject_list(kwargs, dependency.kwargs, 'LINKFLAGS')
for depdep in dependency.dependencies:
_inject_dependency(depdep, kwargs)
def _rglob(env: Environment, root_path: str, pattern: str, **kwargs):
result_nodes = []
paths = [root_path]
while paths:
path = paths.pop()
all_nodes = env.Glob(f'{path}/*', **kwargs)
paths.extend(entry for entry in all_nodes if entry.isdir() or (entry.srcnode() and entry.srcnode().isdir())) # `srcnode()` must be used because `isdir()` doesn't work for entries in variant dirs which haven't been copied yet.
result_nodes.extend(env.Glob(f'{path}/{pattern}', **kwargs))
return sorted(result_nodes)
def _safe_eval(condition: str, locals={}):
return eval(condition, {
'__builtins__': {
'abs': abs, 'all': all, 'any': any, 'ascii': ascii, 'bin': bin, 'bool': bool, 'chr': chr, 'complex': complex,
'dict': dict, 'divmod': divmod, 'enumerate': enumerate, 'filter': filter, 'float': float, 'format': format,
'hasattr': hasattr, 'hash': hash, 'hex': hex, 'id': id, 'int': int, 'isinstance': isinstance,
'issubclass': issubclass, 'len': len, 'list': list, 'map': map, 'max': max, 'min': min, 'next': next,
'oct': oct, 'ord': ord, 'pow': pow, 'range': range, 'reversed': reversed, 'round': round, 'set': set,
'slice': slice, 'sorted': sorted, 'str': str, 'sum': sum, 'tuple': tuple, 'type': type, 'zip': zip
}
}, locals)
def _deps_from_json(env: Environment, deps: dict) -> dict:
to_remove = []
for key, dep in deps.items():
if 'condition' in dep:
if not _safe_eval(dep['condition'], {
'compiler_family': env['COMPILER_FAMILY'],
'target_os': os.name,
'getenv': lambda name: env.get(name)
}):
to_remove.append(key)
continue
if 'min' in dep and isinstance(dep['min'], list):
dep['min'] = tuple(dep['min'])
if 'max' in dep and isinstance(dep['max'], list):
dep['max'] = tuple(dep['max'])
for key in to_remove:
del deps[key]
return deps
def _make_interface(env: Environment, dependencies: list = []):
kwargs = {}
for dependency in dependencies:
_inject_dependency(dependency, kwargs)
return {
'CPPPATH': kwargs.get('CPPPATH', []),
'CPPDEFINES': kwargs.get('CPPDEFINES', [])
}
def _exe_filename(env: Environment, name: str, type: str = 'static') -> str:
if os.name == 'posix':
return name
elif os.name == 'nt':
return f'{name}.exe'
else:
raise Exception('What OS is this?')
def _find_executable(env: Environment, name: str, paths: 'list[str]', type : str = 'static', allow_fail: bool = False, use_glob: bool = False):
fname = _exe_filename(env, name, type)
for path in paths:
lib_path = os.path.join(path, fname)
if use_glob:
files = glob.glob(lib_path)
if len(files) == 1:
return files[0]
elif len(files) > 1:
raise Exception(f'Multiple candidates found for executable with name {name} in paths: "{", ".join(paths)}" with name: "{", ".join(files)}".')
elif os.path.exists(lib_path):
return lib_path
if allow_fail:
return None
raise Exception(f'Could not find executable with name {name} in paths: "{", ".join(paths)}" filename: "{fname}".')
def _lib_filename(env: Environment, name: str, type: str = 'static') -> str:
if os.name == 'posix':
ext = {
'static': 'a',
'shared': 'so'
}[type]
return f'lib{name}.{ext}'
elif os.name == 'nt':
return f'{name}.lib'
else:
raise Exception('What OS is this?')
def _find_lib(env: Environment, name: str, paths: 'list[str]', type : str = 'static', allow_fail: bool = False, use_glob: bool = False):
fname = _lib_filename(env, name, type)
for path in paths:
lib_path = os.path.join(path, fname)
if use_glob:
files = glob.glob(lib_path)
if len(files) == 1:
return files[0]
elif len(files) > 1:
raise Exception(f'Multiple candidates found for library with name {name} in paths: "{", ".join(paths)}" with name: "{", ".join(files)}".')
elif os.path.exists(lib_path):
return lib_path
if allow_fail:
return None
raise Exception(f'Could not find library with name {name} in paths: "{", ".join(paths)}" filename: "{fname}".')
def _info(env: Environment, message: str) -> None:
if not GetOption('silent'):
print(message)
def _warn(env: Environment, message: str) -> None:
print(message, file=sys.stderr)
def _error(env: Environment, message: str) -> None:
print(message, file=sys.stderr)
Exit(1)
def _try_merge_dicts(dictA: dict, dictB: dict) -> 'dict|None':
result = {}
for key, valueA in dictA.items():
if key in dictB:
valueB = dictB[key]
if type(valueA) != type(valueB):
return None
elif type(valueA) == list:
result[key] = valueA + valueB
elif type(valueA) == dict:
mergedValue = _try_merge_dicts(valueA, valueB)
if mergedValue is None:
return None
result[key] = mergedValue
elif valueA != valueB:
return None
result[key] = valueA
for key, valueB in dictB.items():
if key not in result:
result[key] = valueB
return result
def _find_common_dependency_version(name: str, versionA: _VersionSpec, versionB: _VersionSpec) -> _VersionSpec:
options = _try_merge_dicts(versionA.options, versionB.options)
if options is None:
return None
result_version = _VersionSpec(options=options)
if versionA.minimum_version is not None:
if versionB.minimum_version is not None:
result_version.minimum_version = max(versionA.minimum_version, versionB.minimum_version)
else:
result_version.minimum_version = versionA.minimum_version
else:
result_version.minimum_version = versionB.minimum_version
if versionA.maximum_version is not None:
if versionB.maximum_version is not None:
result_version.maximum_version = min(versionA.maximum_version, versionB.maximum_version)
else:
result_version.maximum_version = versionA.maximum_version
else:
result_version.maximum_version = versionB.maximum_version
if result_version.minimum_version is not None and result_version.maximum_version is not None \
and (result_version.minimum_version > result_version.maximum_version):
return None
return result_version
def _parse_version_spec(version_spec: dict) -> _VersionSpec:
return _VersionSpec(version_spec.get('min'), version_spec.get('max'), version_spec.get('options', {}))
def _can_add_dependency(env: Environment, name: str, version_spec: _VersionSpec) -> bool:
if name not in env['SPP_DEPENDENCIES']:
return True
dependency = env['SPP_DEPENDENCIES'][name]
common_version_spec = _find_common_dependency_version(name, dependency.version_spec, version_spec)
return common_version_spec is not None
def _add_dependency(env: Environment, name: str, version_spec: _VersionSpec) -> _Dependency:
if name in env['SPP_DEPENDENCIES']:
dependency = env['SPP_DEPENDENCIES'][name]
common_version_spec = _find_common_dependency_version(name, dependency.version_spec, version_spec)
if common_version_spec is None:
raise Exception(f'Incompatible versions detected for {name}: {dependency.version_spec} and {version_spec}')
if dependency.version_spec != common_version_spec:
env['_SPP_DEPENDENCIES_OKAY'] = False
dependency.version_spec = common_version_spec
return dependency
dependency = _Dependency()
dependency.name = name
dependency.version_spec = version_spec
dependency.recipe = _find_recipe(env, name)
env['SPP_DEPENDENCIES'][name] = dependency
env['_SPP_DEPENDENCIES_OKAY'] = False
return dependency
def _sort_versions(versions: list) -> None:
import functools
def _compare(left, right):
if left < right:
return 1
elif left == right:
return 0
else:
return -1
versions.sort(key=functools.cmp_to_key(_compare))
def _version_matches(version, version_spec: _VersionSpec) -> bool:
if version_spec.minimum_version is not None and version < version_spec.minimum_version:
return False
if version_spec.maximum_version is not None and version > version_spec.maximum_version:
return False
return True
def _find_version(env: Environment, dependency: _Dependency):
for update in (False, True):
versions_signature = inspect.signature(dependency.recipe.versions)
kwargs = {}
if 'options' in versions_signature.parameters:
kwargs['options'] = dependency.version_spec.options
versions = dependency.recipe.versions(env, update=update, **kwargs)
_sort_versions(versions)
for version in versions:
kwargs = {}
dependencies_signature = inspect.signature(dependency.recipe.dependencies)
if 'options' in dependencies_signature.parameters:
kwargs['options'] = dependency.version_spec.options
if _version_matches(version, dependency.version_spec):
canadd = True
for depname, depspec in dependency.recipe.dependencies(env, version, **kwargs).items():
if not _can_add_dependency(env, depname, _parse_version_spec(depspec)):
canadd = False
break
if canadd:
depdeps = []
for depname, depspec in dependency.recipe.dependencies(env, version, **kwargs).items():
depdeps.append(_add_dependency(env, depname, _parse_version_spec(depspec)))
dependency.version = version
dependency.depdeps = depdeps
return
print(f'Available versions: \n{versions}')
print(f'Required version: {dependency.version_spec}')
raise Exception(f'Could not find a suitable version for dependency {dependency.name}.')
def _wrap_builder(builder, target_type: TargetType):
def _wrapped(env, dependencies = {}, *args, **kwargs):
target_dependencies = []
for name, version_spec in dependencies.items():
if version_spec == {} and name not in env['SPP_DEPENDENCIES']: # this is basically a shortcut to adding targets from other modules without having to save them in the env
dep_target = _find_target(env, name)
if dep_target is not None and dep_target.target_type != TargetType.PROGRAM:
target_dependencies.append(dep_target)
# TODO: there might be an issue here with dependencies not being injected this way :/
continue
target_dependencies.append(_add_dependency(env, name, _parse_version_spec(version_spec)))
if 'CPPPATH' not in kwargs:
kwargs['CPPPATH'] = copy.copy(env['CPPPATH'])
if 'CPPDEFINES' not in kwargs:
kwargs['CPPDEFINES'] = copy.copy(env['CPPDEFINES'])
if 'LIBPATH' not in kwargs:
kwargs['LIBPATH'] = copy.copy(env['LIBPATH'])
if 'LIBS' not in kwargs and 'LIBS' in env:
kwargs['LIBS'] = copy.copy(env['LIBS'])
if 'source' in kwargs:
source = kwargs['source']
if not isinstance(source, list):
source = [source]
new_source = []
for src in source:
if isinstance(src, str):
new_source.append(env.Entry(src))
else:
new_source.append(src)
kwargs['source'] = new_source
target = _Target()
if 'name' in kwargs:
target.name = kwargs['name']
else:
trgt = _target_entry(kwargs.get('target'))
if trgt is not None:
target.name = str(trgt.name)
else:
target.name = 'Unknown target'
target.target_type = target_type
target.builder = builder
target.args = args
target.kwargs = kwargs
target.dependencies = target_dependencies
module_folder = _normalize_module_path(env, env.Dir('.').abspath)
if module_folder is not None:
module = env['SPP_MODULES'].get(module_folder)
if module is None:
env.Warn(f'No module config found for target {target.name} at {module_folder}')
else:
target.module = module
env.Append(SPP_TARGETS = [target])
if not target.dependencies:
_build_target(target)
return target
return _wrapped
def _wrap_default(default):
def _wrapped(env, arg):
if isinstance(arg, _Target):
env.Append(SPP_DEFAULT_TARGETS = [arg])
elif isinstance(arg, dict) and '_target' in arg:
default(arg['_target'])
else:
default(arg)
return _wrapped
def _wrap_depends(depends):
def _wrapped(env, dependant, dependency):
if isinstance(dependant, _Target) or isinstance(dependency, _Target):
env.Append(SPP_TARGET_DEPENDENCIES = [(dependant, dependency)])
return
elif isinstance(dependant, dict) and '_target' in dependant:
dependant = dependant['_target']
elif isinstance(dependency, dict) and '_target' in dependency:
dependency = dependency['_target']
depends(dependant, dependency)
return _wrapped
def _build_target(target: _Target):
for dependency in target.dependencies:
_inject_dependency(dependency, target.kwargs)
if 'LIBS' in target.kwargs:
libs_copy = list(target.kwargs['LIBS'])
for lib in libs_copy:
if isinstance(lib, str) and os.path.isabs(lib):
target.kwargs['LIBS'].remove(lib)
target.kwargs['LIBS'].append(env.File(lib))
pass
elif isinstance(lib, _Target):
if not lib.target:
_build_target(lib)
target.kwargs['LIBS'].remove(lib)
target.kwargs['LIBS'].append(lib.target)
new_kwargs = target.kwargs.copy()
if 'target' in new_kwargs: # there should always be a target, right?
new_kwargs['target'] = f"{new_kwargs['target']}-{build_type}"
target.target = target.builder(*target.args, **new_kwargs)
def _version_to_string(version) -> str:
return '.'.join([str(v) for v in version])
def _finalize(env: Environment):
if dump is not None:
_dump()
if generate_project:
_generate_project(generate_project)
Exit(0)
version_requirements = {dep.name: {
'min': dep.version_spec.minimum_version and _version_to_string(dep.version_spec.minimum_version),
'max': dep.version_spec.maximum_version and _version_to_string(dep.version_spec.maximum_version),
} for dep in env['SPP_DEPENDENCIES'].values()}
env['_SPP_DEPENDENCIES_OKAY'] = False
while not env['_SPP_DEPENDENCIES_OKAY']:
env['_SPP_DEPENDENCIES_OKAY'] = True
for dependency in list(env['SPP_DEPENDENCIES'].values()):
if not dependency.version:
_find_version(env, dependency)
with open('cache/versions.json', 'w') as f:
json.dump({
'requirements': version_requirements,
'selected': {
dep.name: _version_to_string(dep.version) for dep in env['SPP_DEPENDENCIES'].values()
}
}, f)
for target in env['SPP_TARGETS']:
_build_target(target)
for target in env['SPP_DEFAULT_TARGETS']:
env.Default(target.target)
for dependant, dependency in env['SPP_TARGET_DEPENDENCIES']:
if isinstance(dependant, _Target):
dependant = dependant.target
if isinstance(dependency, _Target):
dependency = dependency.target
env.Depends(dependant, dependency)
def _find_target(env: Environment, target_name: str) -> '_Target|None':
for target in env['SPP_TARGETS']:
if target.name == target_name:
return target
return None
def _get_fallback_cache_dir() -> str:
return Dir('#cache').abspath
def _find_system_cache_dir() -> str:
if os.name == 'posix':
if os.environ.get('XDG_CACHE_HOME'):
return os.environ['XDG_CACHE_HOME']
elif os.environ.get('HOME'):
return os.path.join(os.environ['HOME'], '.cache')
elif os.name == 'nt':
# TODO: just guessing
return os.environ['LocalAppData']
# fallback
return _get_fallback_cache_dir()
def _target_entry(target_value):
if target_value is None:
return None
if not isinstance(target_value, list):
target_value = [target_value]
if len(target_value) < 1:
return None
if isinstance(target_value[0], str):
target_value[0] = env.Entry(target_value[0])
return target_value[0]
def _generate_project(project_type: str) -> None:
try:
import jinja2
except ImportError:
_error(None, 'Project generation requires the jinja2 to be installed.')
source_folder, target_folder = {
'clion': (os.path.join(_spp_dir.abspath, 'util', 'clion_project_template'), Dir('#.idea').abspath),
'vscode': (os.path.join(_spp_dir.abspath, 'util', 'vscode_project_template'), Dir('#.vscode').abspath),
'vs': (os.path.join(_spp_dir.abspath, 'util', 'vs_project_template'), Dir('#').abspath)
}.get(project_type, (None, None))
if not source_folder:
_error(None, 'Invalid project type option.')
uuid_cache_file = pathlib.Path(env['SHARED_CACHE_DIR'], 'uuids.json')
uuid_cache = {}
save_uuid_cache = False
if uuid_cache_file.exists():
try:
with uuid_cache_file.open('r') as f:
uuid_cache = json.load(f)
except Exception as e:
print(f'Error loading UUID cache: {e}')
def _generate_uuid(name: str = '', ms_style: bool = False) -> str:
nonlocal save_uuid_cache
if name and name in uuid_cache:
result = uuid_cache[name]
else:
result = str(uuid.uuid4())
if name:
uuid_cache[name] = result
save_uuid_cache = True
if ms_style:
return f'{{{result.upper()}}}'
return result
root_path = pathlib.Path(env.Dir('#').abspath)
def _get_executables() -> list:
result = []
for target in env['SPP_TARGETS']:
if target.target_type == TargetType.PROGRAM:
trgt = _target_entry(target.kwargs['target'])
def _exe_path(build_type) -> str:
exe_path = pathlib.Path(trgt.abspath).relative_to(root_path)
exe_path = exe_path.parent / f'{env.subst("$PROGPREFIX")}{exe_path.name}-{build_type}{env.subst("$PROGSUFFIX")}'
return str(exe_path)
result.append({
'name': target.name,
'filename': _exe_path,
'target': target,
'type': 'executable',
'module': target.module
})
return result
def _get_libraries() -> list:
result = []
for target in env['SPP_TARGETS']:
if target.target_type == TargetType.STATIC_LIBRARY:
trgt = _target_entry(target.kwargs['target'])
def _lib_path(build_type) -> str:
lib_path = pathlib.Path(trgt.abspath).relative_to(root_path)
lib_path = lib_path.parent / f'{env.subst("$LIBPREFIX")}{lib_path.name}-{build_type}{env.subst("$LIBSUFFIX")}'
return str(lib_path)
result.append({
'name': target.name,
'filename': _lib_path,
'target': target,
'type': 'static_library',
'module': target.module
})
elif target.target_type == TargetType.SHARED_LIBRARY:
trgt = _target_entry(target.kwargs['target'])
def _lib_path(build_type) -> str:
lib_path = pathlib.Path(trgt.abspath).relative_to(root_path)
lib_path = lib_path.parent / f'{env.subst("$SHLIBPREFIX")}{lib_path.name}-{build_type}{env.subst("$SHLIBSUFFIX")}'
return str(lib_path)
result.append({
'name': target.name,
'filename': _lib_path,
'target': target,
'type': 'static_library',
'module': target.module
})
return result
def _get_modules() -> list:
result = []
for folder, config in env['SPP_MODULES'].items():
result.append({
'name': config.name,
'private_folder': os.path.join('private', folder),
'public_folder': os.path.join('public', folder),
'description': config.description,
'cxx_namespace': config.cxx_namespace
})
return result
def _escape_path(input: str) -> str:
return input.replace('\\', '\\\\')
def _strip_path_prefix(path: str, skip_eles: int) -> str:
for _ in range(skip_eles):
pos = path.find(os.sep)
if pos < 0:
return ''
path = path[pos+1:]
return path
def _folder_list(file_list: list[str], skip_eles: int = 0) -> list[str]:
result = {}
for file in file_list:
folder = os.path.dirname(file)
folder = _strip_path_prefix(folder, skip_eles)
if folder == '':
continue
while True:
result[folder] = True
# also add all parents
sep_pos = folder.rfind(os.sep)
if sep_pos < 0:
break
folder = folder[0:sep_pos]
return list(result.keys())
def _get_sources(target_dict: dict) -> list[str]:
target : _Target = target_dict['target']
sources = target.kwargs.get('source')
return [str(pathlib.Path(source.abspath).relative_to(root_path)) for source in sources]
def _get_headers(folder: str) -> list[str]:
result = []
for root, _, files in os.walk(folder):
for file in files:
_, ext = os.path.splitext(file)
if ext in ('.h', '.hpp', '.inl', '.hxx'):
result.append(os.path.join(root, file))
return result
def _get_target_property(build_type: str, target: str, path: str) -> Any:
import subprocess
output = subprocess.check_output((shutil.which('scons'), '--silent', f'--build_type={build_type}', '--dump=targets', '--dump_format=json', f'--dump_path={target}/{path}'), text=True).strip()
return json.loads(output)
executables = _get_executables()
libraries = _get_libraries()
modules = _get_modules()
jinja_env = jinja2.Environment()
jinja_env.globals['generate_uuid'] = _generate_uuid
jinja_env.globals['get_sources'] = _get_sources
jinja_env.globals['get_headers'] = _get_headers
jinja_env.globals['get_target_property'] = _get_target_property
jinja_env.globals['project'] = {
'name': env.Dir('#').name,
'executables': executables,
'libraries': libraries,
'modules': modules,
'build_types': ['debug', 'release_debug', 'release', 'profile'],
'cxx_standard': env['CXX_STANDARD']
}
jinja_env.globals['scons_exe'] = shutil.which('scons')
jinja_env.globals['nproc'] = multiprocessing.cpu_count()
jinja_env.filters['escape_path'] = _escape_path
jinja_env.filters['strip_path_prefix'] = _strip_path_prefix
jinja_env.filters['folder_list'] = _folder_list
jinja_env.filters['basename'] = os.path.basename
jinja_env.filters['dirname'] = os.path.dirname
source_path = pathlib.Path(source_folder)
target_path = pathlib.Path(target_folder)
config = {}
config_file = source_path / 'template.json'
if config_file.exists():
with config_file.open('r') as f:
config = json.load(f)
files_config = config.get('files', {})
for source_file in source_path.rglob('*'):
if source_file == config_file:
continue
if not source_file.is_file():
continue
source_file_relative = source_file.relative_to(source_path)
file_config = files_config.get(str(source_file_relative).replace('\\', '/'), {})
one_per = file_config.get('one_per', 'project')
def generate_file_once() -> None:
is_jinja = (source_file.suffix == '.jinja')
if 'rename_to' in file_config:
new_filename = jinja_env.from_string(file_config['rename_to']).render()
target_file = target_path / new_filename
else:
target_file = target_path / source_file_relative
if is_jinja:
target_file = target_file.with_suffix('')
target_file.parent.mkdir(parents=True, exist_ok=True)
if not is_jinja:
shutil.copyfile(source_file, target_file)
return
with source_file.open('r') as f:
try:
templ = jinja_env.from_string(f.read())
except jinja2.TemplateSyntaxError as e:
e.filename = str(source_file)
raise e
with target_file.open('w') as f:
f.write(templ.render())
try:
if one_per == 'project':
generate_file_once()
elif one_per == 'target':
for executable in executables:
jinja_env.globals['target'] = executable
generate_file_once()
for library in libraries:
jinja_env.globals['target'] = library
generate_file_once()
else:
raise ValueError(f'invalid value for "one_per": {one_per}')
except jinja2.TemplateSyntaxError as e:
env.Error(f'Jinja syntax error at {e.filename}:{e.lineno}: {e.message}')
Exit(1)
if save_uuid_cache:
try:
with uuid_cache_file.open('w') as f:
json.dump(uuid_cache, f)
except Exception as e:
print(f'Error writing uuid cache: {e}')
def _dump() -> None:
def _dump_as_text(data: Any) -> str:
from pprint import pformat
dump_name = {
'env': 'Environment',
'config': 'Configuration',
'modules': 'Modules',
'targets': 'Targets'
}[dump]
return '\n'.join((
f'==== Begin {dump_name} Dump ====',
pformat(data),
f'==== End {dump_name} Dump ===='
))
def _dump_as_json(data: Any) -> str:
class _Encoder(json.JSONEncoder):
def default(self, o) -> dict:
if isinstance(o, object):
if hasattr(o, '__iter__'):
return list(o)
elif isinstance(o, Node):
return o.abspath
return o.__dict__
return super().default(o)
return json.dumps(data, cls=_Encoder)
def _apply_path(data: Any, path: str) -> Any:
for part in path.split('/'):
if isinstance(data, dict):
if part not in data:
_error(f'Invalid path specified. No key {part} in dict {data}.')
Exit(1)
data = data[part]
elif isinstance(data, list):
try:
part = int(part)
except ValueError:
_error(f'Invalid path specified. {part} is not a valid list index.')
Exit(1)
if part < 0 or part >= len(data):
_error(f'Invalid path specified. {part} is out of list range.')
Exit(1)
data = data[part]
elif isinstance(data, object):
data = data.__dict__
if part not in data:
_error(f'Invalid path specified. No attribute {part} in object {data}.')
Exit(1)
data = data[part]
else:
_error(f'Invalid path specified. {data} has no properties.')
Exit(1)
return data
def _targets() -> dict:
result = {}
for target in env['SPP_TARGETS']:
kwargs = target.kwargs.copy()
for dependency in target.dependencies:
_inject_dependency(dependency, kwargs)
result[target.name] = {
'target_type': target.target_type.name,
'args': target.args,
# 'kwargs': kwargs, <- circular dependency here and the json encoder doesn't like that
'CPPDEFINES': kwargs.get('CPPDEFINES', env['CPPDEFINES']),
'CPPPATH': kwargs.get('CPPPATH', env['CPPPATH'])
}
return result
data = {
'env': env.Dictionary,
'config': lambda: config,
'modules': lambda: env['SPP_MODULES'],
'targets': _targets
}[dump]()
global dump_path
dump_path = dump_path.strip()
if dump_path != '':
data = _apply_path(data, dump_path)
dump_fn = {
'text': _dump_as_text,
'json': _dump_as_json
}[dump_format]
print(dump_fn(data))
Exit(0)
Import('config')
if not config.get('PROJECT_NAME'):
config['PROJECT_NAME'] = 'PROJECT'
if not config.get('CXX_STANDARD'):
config['CXX_STANDARD'] = 'c++23'
if not config.get('CXX_NO_EXCEPTIONS'):
config['CXX_NO_EXCEPTIONS'] = False
if not config.get('PREPROCESSOR_PREFIX'):
config['PREPROCESSOR_PREFIX'] = config['PROJECT_NAME'].upper() # TODO: may be nicer?
if 'COMPILATIONDB_FILTER_FILES' not in config:
config['COMPILATIONDB_FILTER_FILES'] = True
if 'WINDOWS_DISABLE_DEFAULT_DEFINES' not in config:
config['WINDOWS_DISABLE_DEFAULT_DEFINES'] = False
AddOption(
'--build_type',
dest = 'build_type',
type = 'choice',
choices = ('debug', 'release_debug', 'release', 'profile'),
nargs = 1,
action = 'store',
default = 'debug'
)
AddOption(
'--unity',
dest = 'unity_mode',
type = 'choice',
choices = ('enable', 'disable', 'stress'),
nargs = 1,
action = 'store',
default = 'enable'
)
AddOption(
'--variant',
dest = 'variant',
nargs = 1,
action = 'store'
)
AddOption(
'--asan',
dest = 'enable_asan',
action = 'store_true'
)
AddOption(
'--config_file',
dest = 'config_file',
nargs = 1,
action = 'store',
default = 'config.py'
)
AddOption(
'--compiler',
dest = 'compiler',
type = 'choice',
choices = ('auto', 'gcc', 'clang', 'msvc'),
nargs = 1,
action = 'store',
default = 'auto'
)
AddOption(
'--update_repositories',
dest = 'update_repositories',
action = 'store_true'
)
AddOption(
'--disable_auto_update',
dest = 'disable_auto_update',
action = 'store_true'
)
AddOption(
'--dump',
dest = 'dump',
type = 'choice',
choices = ('env', 'config', 'modules', 'targets'),
nargs = 1,
action = 'store'
)
AddOption(
'--dump_format',
dest = 'dump_format',
type = 'choice',
choices = ('text', 'json'),
nargs = 1,
action = 'store',
default = 'text'
)
AddOption(
'--dump_path',
dest = 'dump_path',
nargs = 1,
action = 'store',
default = ''
)
AddOption(
'--generate_project',
dest = 'generate_project',
type = 'choice',
choices = ('clion', 'vscode', 'vs'),
nargs = 1,
action = 'store'
)
_spp_dir = Dir('.')
build_type = GetOption('build_type')
unity_mode = GetOption('unity_mode')
variant = GetOption('variant')
enable_asan = GetOption('enable_asan')
config_file = GetOption('config_file')
compiler = GetOption('compiler')
update_repositories = GetOption('update_repositories')
disable_auto_update = GetOption('disable_auto_update')
dump = GetOption('dump')
dump_format = GetOption('dump_format')
dump_path = GetOption('dump_path')
generate_project = GetOption('generate_project')
default_CC = {
'gcc': 'gcc',
'clang': 'clang',
'msvc': 'cl.exe'
}.get(compiler, None)
default_CXX = {
'gcc': 'g++',
'clang': 'clang++',
'msvc': 'cl.exe'
}.get(compiler, None)
if not os.path.isabs(config_file):
config_file = os.path.join(Dir('#').abspath, config_file)
vars = Variables(config_file)
vars.Add('CC', 'The C Compiler', default_CC)
vars.Add('CXX', 'The C++ Compiler', default_CXX)
vars.Add('LINK', 'The Linker')
vars.Add('CCFLAGS', 'C/C++ Compiler Flags')
vars.Add('CFLAGS', 'C Compiler Flags')
vars.Add('CXXFLAGS', 'C++ Compiler Flags')
vars.Add('LINKFLAGS', 'Linker Flags')
vars.Add('PYTHON', 'Python Executable', 'python')
vars.Add('COMPILATIONDB_FILTER_FILES', 'Removes source files from the compilation DB that are not from the current'
' project.', config['COMPILATIONDB_FILTER_FILES'])
vars.Add('SHOW_INCLUDES', 'Show include hierarchy (for debugging).', False)
vars.Add('ENABLE_ASAN', 'Enable address sanitization.', bool(enable_asan))
vars.Add('TARGET_PLATFORM', 'Target platform.', platform.system())
if 'VARIABLES' in config:
for vardef in config['VARIABLES']:
vars.Add(*vardef)
tools = ['default', 'compilation_db', 'unity_build']
if 'TOOLS' in config:
tools.extend(config['TOOLS'])
env = Environment(tools = tools, variables = vars, ENV = os.environ)
env['SPP_RECIPES_FOLDERS'] = []
env['SYSTEM_CACHE_DIR'] = os.path.join(_find_system_cache_dir(), 'spp_cache')
env['CLONE_DIR'] = os.path.join(env['SYSTEM_CACHE_DIR'], 'cloned')
env['DOWNLOAD_DIR'] = os.path.join(env['SYSTEM_CACHE_DIR'], 'downloaded')
env['UPDATE_REPOSITORIES'] = update_repositories
env['CXX_STANDARD'] = config['CXX_STANDARD'] # make it available to everyone
env['DEPS_CFLAGS'] = []
env['DEPS_CXXFLAGS'] = []
env['DEPS_LINKFLAGS'] = []
_info(None, f'Detected system cache directory: {env["SYSTEM_CACHE_DIR"]}')
try:
os.makedirs(env['SYSTEM_CACHE_DIR'], exist_ok=True)
except:
env['SYSTEM_CACHE_DIR'] = os.path.join(_get_fallback_cache_dir(), 'spp_cache')
_info(None, f'Creating spp cache dir failed, using fallback: {env["SYSTEM_CACHE_DIR"]}.')
os.makedirs(env['SYSTEM_CACHE_DIR'], exist_ok=True) # no more safeguards!
env['CLONE_DIR'] = os.path.join(env['SYSTEM_CACHE_DIR'], 'cloned')
env['DOWNLOAD_DIR'] = os.path.join(env['SYSTEM_CACHE_DIR'], 'downloaded')
env['UPDATE_REPOSITORIES'] = update_repositories
env['CXX_STANDARD'] = config['CXX_STANDARD'] # make it available to everyone
env['CXX_NO_EXCEPTIONS'] = config['CXX_NO_EXCEPTIONS']
env['DEPS_CFLAGS'] = []
env['DEPS_CXXFLAGS'] = []
env['DEPS_LINKFLAGS'] = []
env['SHARED_CACHE_DIR'] = Dir(f'#cache').abspath
# allow compiling to variant directories (each gets their own bin/lib/cache dirs)
if variant:
env['BIN_DIR'] = Dir(f'#bin_{variant}').abspath
env['LIB_DIR'] = Dir(f'#lib_{variant}').abspath
env['CACHE_DIR'] = Dir(f'#cache_{variant}').abspath
env['VARIANT_DIR'] = f'{env["CACHE_DIR"]}/variant'
env.Append(CPPDEFINES = [f'{config["PREPROCESSOR_PREFIX"]}_VARIANT={variant}'])
else:
env['VARIANT_DIR'] = None
env['COMPILATIONDB_USE_ABSPATH'] = True
if env['COMPILATIONDB_FILTER_FILES']:
env['COMPILATIONDB_PATH_FILTER'] = f"{Dir('#').abspath}/*"
comp_db = env.CompilationDatabase(target = '#compile_commands.json')
Default(comp_db)
env['BIN_DIR'] = Dir('#bin').abspath
env['LIB_DIR'] = Dir('#lib').abspath
env['CACHE_DIR'] = env['SHARED_CACHE_DIR']
env['UNITY_CACHE_DIR'] = Dir(f'{env["CACHE_DIR"]}/unity')
env['BUILD_TYPE'] = build_type
env.Append(LIBPATH = [env['LIB_DIR']]) # to allow submodules to link to each other without hassle
# make sure these are all defined in case someone wants to use/copy them
env.Append(CCFLAGS = [])
env.Append(CXXFLAGS = [])
env.Append(CPPPATH = [])
env.Append(CPPDEFINES = [])
env.Append(LINKFLAGS = [])
# init SPP environment variables
env['SPP_DIR'] = _spp_dir.abspath
env['SPP_TARGETS'] = []
env['SPP_DEFAULT_TARGETS'] = []
env['SPP_TARGET_DEPENDENCIES'] = []
env['SPP_DEPENDENCIES'] = {}
env['SPP_RECIPES'] = {}
env['SPP_MODULES'] = {} # maps from folder to _Module
env['SPP_CPU_FEATURES'] = config.get('USE_CPU_FEATURES', [])
env['OBJSUFFIX'] = f".{env['BUILD_TYPE']}{env['OBJSUFFIX']}"
if variant:
env['OBJSUFFIX'] = f".{variant}{env['OBJSUFFIX']}"
# create the cache dir
os.makedirs(env['CACHE_DIR'], exist_ok=True)
cache_gitignore = f'{env["CACHE_DIR"]}/.gitignore'
if not os.path.exists(cache_gitignore):
with open(cache_gitignore, 'w') as f:
f.write('*\n')
if env['CACHE_DIR'] != env['SHARED_CACHE_DIR']:
os.makedirs(env['SHARED_CACHE_DIR'], exist_ok=True)
cache_gitignore = f'{env["SHARED_CACHE_DIR"]}/.gitignore'
if not os.path.exists(cache_gitignore):
with open(cache_gitignore, 'w') as f:
f.write('*\n')
# check whether repositories where updated since last boot
update_stamp_file = f'{env["SHARED_CACHE_DIR"]}/last_update.stamp'
update_time = 0.0
if os.path.exists(update_stamp_file):
with open(update_stamp_file, 'r') as f:
try:
update_time = float(f.read())
except:
pass
boot_time = psutil.boot_time()
if not disable_auto_update and boot_time > update_time:
_info(None, 'Didn\'t update repositories since last boot, doing it now...')
env['UPDATE_REPOSITORIES'] = True
if env['UPDATE_REPOSITORIES']:
with open(update_stamp_file, 'w') as f:
f.write(str(time.time()))
# create the clone and system cache dirs
os.makedirs(env['CLONE_DIR'], exist_ok=True)
os.makedirs(env['DOWNLOAD_DIR'], exist_ok=True)
# try to detect what compiler we are using
compiler_exe = os.path.basename(env.subst(env['CC']))
if 'gcc' in compiler_exe:
env['COMPILER_FAMILY'] = 'gcc'
elif 'clang' in compiler_exe:
env['COMPILER_FAMILY'] = 'clang'
elif 'cl' in compiler_exe:
env['COMPILER_FAMILY'] = 'cl'
else:
env['COMPILER_FAMILY'] = 'unknown'
# setup unity build depending on mode
if unity_mode == 'disable':
env['UNITY_DISABLE'] = True
elif unity_mode == 'stress': # compile everything in one single file to stress test the unity build
env['UNITY_MAX_SOURCES'] = 100000 # I'll hopefully never reach this
env['UNITY_MIN_FILES'] = 1
# setup compiler specific options
if env['COMPILER_FAMILY'] == 'gcc' or env['COMPILER_FAMILY'] == 'clang':
env.Append(CCFLAGS = ['-Wall', '-Wextra', '-Werror', '-Wstrict-aliasing', '-pedantic', '-fvisibility=hidden'])
env.Append(CXXFLAGS = [f'-std={config["CXX_STANDARD"]}'])
if env['CXX_NO_EXCEPTIONS']:
env.Append(CXXFLAGS = [f'-fno-exceptions'])
if build_type != 'release':
env.Append(LINKFLAGS = [f'-Wl,-rpath,{env["LIB_DIR"]}'])
env['LINKCOM'] = env['LINKCOM'].replace('$_LIBFLAGS', '-Wl,--start-group $_LIBFLAGS -Wl,--end-group')
if env['COMPILER_FAMILY'] == 'gcc':
# GCC complains about missing initializer for "<anonymous>" that doesn't exist :/
# also GCC complains about some (compiler generated) fields in coroutines not having any linkage
# also -Wdangling-reference seems to produce a lot of false positives
# also -Wmaybe-uninitialized seems to produce false positives (or a bug in the standard library?))
# -Winit-list-lifetime triggers in vulkan.hpp even though it is disabled via pragma :/
# -Wtautological-compare triggers in libfmt and doesn't seem too useful anyway
env.Append(CCFLAGS = ['-Wno-missing-field-initializers', '-Wno-maybe-uninitialized'])
env.Append(CXXFLAGS = ['-Wno-subobject-linkage', '-Wno-dangling-reference', '-Wno-init-list-lifetime', '-Wno-tautological-compare'])
else: # clang only
# no-gnu-anonymous-struct - we don't care
# no-missing-field-initializers - useful in some cases, annoying in most
# no-ambiguous-reversed-operator - should be quite useful, but we get a false positive, apparently?
# no-parentheses-equality - false positive for fold expressions and doesn't seem to useful anyway
env.Append(CCFLAGS = ['-Wno-gnu-anonymous-struct', '-Wno-missing-field-initializers', '-Wno-ambiguous-reversed-operator', '-Wno-parentheses-equality'])
env.Append(CXXFLAGS = ['-fexperimental-library']) # enable std::jthread
if build_type == 'debug':
env.Append(CCFLAGS = ['-g', '-O0'], CPPDEFINES = ['_GLIBCXX_DEBUG'])
env.Append(DEPS_CXXFLAGS = ['-D_GLIBCXX_DEBUG'])
elif build_type == 'release_debug' or build_type == 'profile':
env.Append(CCFLAGS = ['-Wno-unused-variable', '-Wno-unused-parameter', '-Wno-unused-but-set-variable', '-Wno-unused-local-typedef', '-Wno-unused-local-typedefs', '-g', '-O2'], CPPDEFINES = [f'{config["PREPROCESSOR_PREFIX"]}_RELEASE', 'NDEBUG'])
if build_type == 'profile':
if env['COMPILER_FAMILY'] == 'gcc':
env.Append(CPPDEFINES = [f'{config["PREPROCESSOR_PREFIX"]}_GCC_INSTRUMENTING=1'])
env.Append(CCFLAGS = ['-finstrument-functions'])
env.Append(LINKFLAGS = ['-rdynamic'])
elif build_type == 'release':
env.Append(CCFLAGS = ['-Wno-unused-variable', '-Wno-unused-parameter', '-Wno-unused-but-set-variable', '-Wno-unused-local-typedef', '-Wno-unused-local-typedefs', '-O2'], CPPDEFINES = [f'{config["PREPROCESSOR_PREFIX"]}_RELEASE', 'NDEBUG'])
if env['ENABLE_ASAN']:
env.Append(CCFLAGS = ['-fsanitize=address', '-fno-omit-frame-pointer'])
env.Append(LINKFLAGS = ['-fsanitize=address'])
env.Append(DEPS_CXXFLAGS = ['-fsanitize=address', '-fno-omit-frame-pointer'])
env.Append(DEPS_LINKFLAGS = ['-fsanitize=address'])
for feature in env['SPP_CPU_FEATURES']:
flag = _GCC_CPU_FEATURES_MAP.get(feature)
if flag is None:
_warn(None, f'Unknown or unsupported cpu feature "{feature}" for GCC/Clang.')
else:
env.Append(CCFLAGS = [flag])
elif env['COMPILER_FAMILY'] == 'cl':
cxx_version_name = {
'c++14': 'c++14',
'c++17': 'c++17',
'c++20': 'c++20',
'c++23': 'c++latest',
'c++26': 'c++latest'
}.get(env['CXX_STANDARD'], 'c++14') # default to C++14 for older versions
# C4201: nonstandard extension used : nameless struct/union - I use it and want to continue using it
# C4127: conditional expression is constant - some libs (CRC, format) don't compile with this enabled # TODO: fix?
# C4702: unreachable code, issued after MIJIN_FATAL macro
# C4251: missing dll-interface of some std types, yaml-cpp doesn't compile with this enabled
# C4275: same as above
env.Append(CCFLAGS = ['/W4', '/WX', '/wd4201', '/wd4127', '/wd4702', '/wd4251', '/wd4275', '/bigobj', '/vmg',
f'/std:{cxx_version_name}', '/permissive-', '/FS', '/Zc:char8_t', '/utf-8', '/Zc:preprocessor'])
env.Append(CPPDEFINES = ['_CRT_SECURE_NO_WARNINGS']) # I'd like to not use MSVC specific versions of functions because they are "safer" ...
env.Append(DEPS_CXXFLAGS = ['/Zc:char8_t', '/utf-8', '/vmg'])
if env['CXX_NO_EXCEPTIONS']:
env.Append(CPPDEFINES = ['_HAS_EXCEPTIONS=0'])
else:
env.Append(CXXFLAGS = ['/EHsc'])
env.Append(DEPS_CXXFLAGS = ['/EHsc'])
if env['SHOW_INCLUDES']:
env.Append(CCFLAGS = ['/showIncludes'])
if build_type == 'debug':
env['PDB'] = env.File('#bin/full.pdb')
env.Append(CCFLAGS = ['/Od', '/MDd'], LINKFLAGS = ' /DEBUG')
env.Append(CPPDEFINES = ['_DEBUG', '_ITERATOR_DEBUG_LEVEL=2'])
env.Append(DEPS_CXXFLAGS = ['/MDd', '/Zi', '/D_DEBUG', '/D_ITERATOR_DEBUG_LEVEL=2'])
env.Append(DEPS_LINKFLAGS = ['/DEBUG'])
elif build_type == 'release_debug' or build_type == 'profile':
env['PDB'] = env.File('#bin/full.pdb')
env.Append(CCFLAGS = ['/O2', '/MD'], LINKFLAGS = ' /DEBUG')
env.Append(DEPS_CXXFLAGS = ['/Zi', '/MD'])
env.Append(DEPS_LINKFLAGS = ['/DEBUG'])
else:
env.Append(CCFLAGS = ['/O2', '/MD'])
env.Append(DEPS_CXXFLAGS = ['/MD'])
if env['ENABLE_ASAN']:
env.Append(CCFLAGS = ['/fsanitize=address'])
if env['COMPILER_FAMILY'] == 'gcc':
env.Append(CXXFLAGS = ['-Wno-volatile'])
elif env['COMPILER_FAMILY'] == 'clang':
env.Append(CCFLAGS = ['-Wno-deprecated-volatile', '-Wno-nested-anon-types', '-Wno-unknown-warning-option'])
# platform specific options
if os.name == 'nt':
if not config['WINDOWS_DISABLE_DEFAULT_DEFINES']:
env.Append(CDEFINES = ['WIN32_LEAN_AND_MEAN', 'NOMINMAX', 'STRICT', 'UNICODE'], CPPDEFINES = ['WIN32_LEAN_AND_MEAN', 'NOMINMAX', 'STRICT', 'UNICODE'])
env.AddMethod(_cook, 'Cook')
env.AddMethod(_parse_lib_conf, 'ParseLibConf')
env.AddMethod(_rglob, 'RGlob')
env.AddMethod(_deps_from_json, 'DepsFromJson')
env.AddMethod(_make_interface, 'MakeInterface')
env.AddMethod(_lib_filename, 'LibFilename')
env.AddMethod(_find_executable, 'FindExecutable')
env.AddMethod(_find_lib, 'FindLib')
env.AddMethod(_info, 'Info')
env.AddMethod(_warn, 'Warn')
env.AddMethod(_error, 'Error')
env.AddMethod(_wrap_builder(env.Library, TargetType.STATIC_LIBRARY), 'Library')
env.AddMethod(_wrap_builder(env.StaticLibrary, TargetType.STATIC_LIBRARY), 'StaticLibrary')
env.AddMethod(_wrap_builder(env.SharedLibrary, TargetType.SHARED_LIBRARY), 'SharedLibrary')
env.AddMethod(_wrap_builder(env.Program, TargetType.PROGRAM), 'Program')
env.AddMethod(_wrap_default(env.Default), 'Default')
env.AddMethod(_wrap_depends(env.Depends), 'Depends')
env.AddMethod(_wrap_builder(env.UnityProgram, TargetType.PROGRAM), 'UnityProgram')
env.AddMethod(_wrap_builder(env.UnityLibrary, TargetType.STATIC_LIBRARY), 'UnityLibrary')
env.AddMethod(_wrap_builder(env.UnityStaticLibrary, TargetType.STATIC_LIBRARY), 'UnityStaticLibrary')
env.AddMethod(_wrap_builder(env.UnitySharedLibrary, TargetType.SHARED_LIBRARY), 'UnitySharedLibrary')
env.AddMethod(_module, 'Module')
env.AddMethod(_module_config, 'ModuleConfig')
env.AddMethod(_finalize, 'Finalize')
env.AddMethod(_find_target, 'FindTarget')
if hasattr(env, 'Gch'):
env.AddMethod(_wrap_builder(env.Gch, TargetType.STATIC_LIBRARY), 'Gch')
for addon_file in env.Glob('addons/*.py'):
env = SConscript(addon_file, exports = 'env')
Return('env')