503 lines
19 KiB
Python
503 lines
19 KiB
Python
|
|
import copy
|
|
import os
|
|
import psutil
|
|
import sys
|
|
import time
|
|
|
|
import SCons.Script
|
|
import SCons.Warnings
|
|
|
|
class _Dependency:
|
|
name: str = ''
|
|
version: str = ''
|
|
|
|
class _Target:
|
|
builder = None
|
|
target = None
|
|
source = None
|
|
args: list = []
|
|
kwargs: dict = {}
|
|
dependencies: list = []
|
|
|
|
def _cook(env: Environment, recipe_name: str, *args, **kwargs):
|
|
import importlib.util
|
|
source_file = None
|
|
for folder in env['RECIPES_FOLDERS']:
|
|
try_source_file = f'{folder.abspath}/{recipe_name}/recipe.py'
|
|
if os.path.exists(try_source_file):
|
|
source_file = try_source_file
|
|
break
|
|
if not source_file:
|
|
raise Exception(f'Could not find recipe {recipe_name}.')
|
|
spec = importlib.util.spec_from_file_location(recipe_name, source_file)
|
|
recipe = importlib.util.module_from_spec(spec)
|
|
spec.loader.exec_module(recipe)
|
|
return recipe.cook(env, *args, **kwargs)
|
|
|
|
def _module(env: Environment, file: str):
|
|
return SConscript(file, exports = 'env', variant_dir = env['VARIANT_DIR'], src_dir = '.')
|
|
|
|
def _parse_lib_conf(env: Environment, lib_conf: dict) -> None:
|
|
env.Append(CPPPATH = lib_conf.get('CPPPATH', []),
|
|
CPPDEFINES = lib_conf.get('CPPDEFINES', []),
|
|
LIBPATH = lib_conf.get('LIBPATH', []),
|
|
LIBS = lib_conf.get('LIBS', []),
|
|
JINJA_TEMPLATE_SEARCHPATH = lib_conf.get('JINJA_TEMPLATE_SEARCHPATH', []))
|
|
|
|
def _inject_list(kwargs: dict, dependency: dict, list_name: str) -> None:
|
|
if list_name not in dependency:
|
|
return
|
|
if list_name not in kwargs:
|
|
kwargs[list_name] = []
|
|
kwargs[list_name].extend(dependency[list_name]) # TODO: eliminate duplicates?
|
|
|
|
def _inject_dependency(dependency, kwargs: dict, add_sources: bool = True) -> None:
|
|
if isinstance(dependency, dict):
|
|
_inject_list(kwargs, dependency, 'CPPPATH')
|
|
_inject_list(kwargs, dependency, 'CPPDEFINES')
|
|
_inject_list(kwargs, dependency, 'LIBPATH')
|
|
_inject_list(kwargs, dependency, 'LIBS')
|
|
if add_sources and 'ADDITIONAL_SOURCES' in dependency and hasattr(kwargs['source'], 'extend'):
|
|
kwargs['source'].extend(dependency['ADDITIONAL_SOURCES'])
|
|
if 'DEPENDENCIES' in dependency:
|
|
for inner_dependency in dependency['DEPENDENCIES']:
|
|
_inject_dependency(inner_dependency, kwargs, False)
|
|
|
|
def _rglob(env: Environment, root_path: str, pattern: str, **kwargs):
|
|
result_nodes = []
|
|
paths = [root_path]
|
|
while paths:
|
|
path = paths.pop()
|
|
all_nodes = env.Glob(f'{path}/*', **kwargs)
|
|
paths.extend(entry for entry in all_nodes if entry.isdir() or (entry.srcnode() and entry.srcnode().isdir())) # `srcnode()` must be used because `isdir()` doesn't work for entries in variant dirs which haven't been copied yet.
|
|
result_nodes.extend(env.Glob(f'{path}/{pattern}', **kwargs))
|
|
return sorted(result_nodes)
|
|
|
|
def _make_interface(env: Environment, dependencies: list = []):
|
|
kwargs = {}
|
|
for dependency in dependencies:
|
|
_inject_dependency(dependency, kwargs)
|
|
return {
|
|
'CPPPATH': kwargs.get('CPPPATH', []),
|
|
'CPPDEFINES': kwargs.get('CPPDEFINES', [])
|
|
}
|
|
|
|
def _lib_filename(name: str, type: str = 'static') -> str:
|
|
# TODO: windows
|
|
ext = {
|
|
'static': 'a',
|
|
'shared': 'so'
|
|
}[type]
|
|
return f'lib{name}.{ext}'
|
|
|
|
def _find_lib(env: Environment, name: str, paths: 'list[str]', type : str = 'static'):
|
|
for path in paths:
|
|
lib_path = os.path.join(path, _lib_filename(name, type))
|
|
if os.path.exists(lib_path):
|
|
return lib_path
|
|
return None
|
|
|
|
def _error(env: Environment, message: str):
|
|
print(message, file=sys.stderr)
|
|
env.Exit(1)
|
|
|
|
def _build_action(target, source, env):
|
|
the_target = env['_target']
|
|
the_target.builder.method(env=env, *the_target.args, **the_target.kwargs)
|
|
|
|
_Builder = Builder(action=Action(_build_action, None))
|
|
|
|
def _add_dependency(name: str, version: str) -> _Dependency:
|
|
dependency = _Dependency()
|
|
dependency.name = name
|
|
dependency.version = version
|
|
return dependency
|
|
|
|
def _wrap_builder(builder, is_lib: bool = False):
|
|
def _wrapped(env, dependencies = {}, *args, **kwargs):
|
|
target_dependencies = []
|
|
for name, version in dependencies.items():
|
|
target_dependencies.append(_add_dependency(name, version))
|
|
|
|
if 'CPPPATH' not in kwargs:
|
|
kwargs['CPPPATH'] = copy.copy(env['CPPPATH'])
|
|
if 'CPPDEFINES' not in kwargs:
|
|
kwargs['CPPDEFINES'] = copy.copy(env['CPPDEFINES'])
|
|
if 'LIBPATH' not in kwargs:
|
|
kwargs['LIBPATH'] = copy.copy(env['LIBPATH'])
|
|
if 'LIBS' not in kwargs and 'LIBS' in env:
|
|
kwargs['LIBS'] = copy.copy(env['LIBS'])
|
|
if 'LIBS' in kwargs:
|
|
libs_copy = list(kwargs['LIBS'])
|
|
for lib in libs_copy:
|
|
if isinstance(lib, str) and os.path.isabs(lib):
|
|
kwargs['LIBS'].remove(lib)
|
|
kwargs['source'].append(lib)
|
|
|
|
target = _Target()
|
|
target.target = kwargs.get('target', None)
|
|
target.source = kwargs.get('source', None)
|
|
target.builder = builder
|
|
target.args = args
|
|
target.kwargs = kwargs
|
|
target.dependencies = target_dependencies
|
|
# return _Builder(target=kwargs.get('target', None), source=kwargs.get('source', None), env=env, _target=target)
|
|
return builder(*args, **kwargs)
|
|
return _wrapped
|
|
|
|
def _wrap_default(default):
|
|
print(default)
|
|
def _wrapped(env, arg):
|
|
if isinstance(arg, dict) and '_target' in arg:
|
|
default(arg['_target'])
|
|
else:
|
|
default(arg)
|
|
return _wrapped
|
|
|
|
def _wrap_depends(depends):
|
|
def _wrapped(env, dependant, dependency):
|
|
if isinstance(dependant, dict) and '_target' in dependant:
|
|
dependant = dependant['_target']
|
|
if isinstance(dependency, dict) and '_target' in dependency:
|
|
dependency = dependency['_target']
|
|
depends(dependant, dependency)
|
|
return _wrapped
|
|
|
|
def _get_fallback_cache_dir() -> str:
|
|
return Dir('#cache').abspath
|
|
|
|
def _find_system_cache_dir() -> str:
|
|
if os.name == 'posix':
|
|
if os.environ.get('XDG_CACHE_HOME'):
|
|
return os.environ['XDG_CACHE_HOME']
|
|
elif os.environ.get('HOME'):
|
|
return os.path.join(os.environ['HOME'], '.cache')
|
|
elif os.name == 'nt':
|
|
# TODO: just guessing
|
|
return os.environ['LocalAppData']
|
|
# fallback
|
|
return _get_fallback_cache_dir()
|
|
|
|
Import('config')
|
|
|
|
if not config.get('PROJECT_NAME'):
|
|
config['PROJECT_NAME'] = 'PROJECT'
|
|
if not config.get('CXX_STANDARD'):
|
|
config['CXX_STANDARD'] = 'c++23'
|
|
|
|
if not config.get('PREPROCESSOR_PREFIX'):
|
|
config['PREPROCESSOR_PREFIX'] = config['PROJECT_NAME'].upper() # TODO: may be nicer?
|
|
|
|
AddOption(
|
|
'--build_type',
|
|
dest = 'build_type',
|
|
type = 'choice',
|
|
choices = ('debug', 'release_debug', 'release', 'profile'),
|
|
nargs = 1,
|
|
action = 'store',
|
|
default = 'debug'
|
|
)
|
|
|
|
AddOption(
|
|
'--unity',
|
|
dest = 'unity_mode',
|
|
type = 'choice',
|
|
choices = ('enable', 'disable', 'stress'),
|
|
nargs = 1,
|
|
action = 'store',
|
|
default = 'enable'
|
|
)
|
|
|
|
AddOption(
|
|
'--variant',
|
|
dest = 'variant',
|
|
nargs = 1,
|
|
action = 'store'
|
|
)
|
|
|
|
AddOption(
|
|
'--asan',
|
|
dest = 'enable_asan',
|
|
action = 'store_true'
|
|
)
|
|
|
|
AddOption(
|
|
'--config_file',
|
|
dest = 'config_file',
|
|
nargs = 1,
|
|
action = 'store',
|
|
default = 'config.py'
|
|
)
|
|
|
|
AddOption(
|
|
'--compiler',
|
|
dest = 'compiler',
|
|
type = 'choice',
|
|
choices = ('auto', 'gcc', 'clang', 'msvc'),
|
|
nargs = 1,
|
|
action = 'store',
|
|
default = 'auto'
|
|
)
|
|
|
|
AddOption(
|
|
'--update_repositories',
|
|
dest = 'update_repositories',
|
|
action = 'store_true'
|
|
)
|
|
|
|
AddOption(
|
|
'--dump_env',
|
|
dest = 'dump_env',
|
|
action = 'store_true'
|
|
)
|
|
|
|
build_type = GetOption('build_type')
|
|
unity_mode = GetOption('unity_mode')
|
|
variant = GetOption('variant')
|
|
enable_asan = GetOption('enable_asan')
|
|
config_file = GetOption('config_file')
|
|
compiler = GetOption('compiler')
|
|
update_repositories = GetOption('update_repositories')
|
|
dump_env = GetOption('dump_env')
|
|
|
|
default_CC = {
|
|
'gcc': 'gcc',
|
|
'clang': 'clang',
|
|
'msvc': 'cl.exe'
|
|
}.get(compiler, None)
|
|
default_CXX = {
|
|
'gcc': 'g++',
|
|
'clang': 'clang++',
|
|
'msvc': 'cl.exe'
|
|
}.get(compiler, None)
|
|
|
|
if not os.path.isabs(config_file):
|
|
config_file = os.path.join(Dir('#').abspath, config_file)
|
|
|
|
vars = Variables(config_file)
|
|
vars.Add('CC', 'The C Compiler', default_CC)
|
|
vars.Add('CXX', 'The C++ Compiler', default_CXX)
|
|
vars.Add('LINK', 'The Linker')
|
|
vars.Add('CCFLAGS', 'C/C++ Compiler Flags')
|
|
vars.Add('LINKFLAGS', 'Linker Flags')
|
|
vars.Add('PYTHON', 'Python Executable', 'python')
|
|
vars.Add('COMPILATIONDB_FILTER_FILES', 'Removes source files from the compilation DB that are not from the current project.', True)
|
|
|
|
tools = ['default', 'compilation_db', 'unity_build']
|
|
if 'TOOLS' in config:
|
|
tools.extend(config['TOOLS'])
|
|
|
|
env = Environment(tools = tools, variables = vars, ENV = os.environ)
|
|
env['RECIPES_FOLDERS'] = [Dir('recipes')]
|
|
env['SYSTEM_CACHE_DIR'] = os.path.join(_find_system_cache_dir(), 'spp_cache')
|
|
env['CLONE_DIR'] = os.path.join(env['SYSTEM_CACHE_DIR'], 'cloned')
|
|
env['DOWNLOAD_DIR'] = os.path.join(env['SYSTEM_CACHE_DIR'], 'downloaded')
|
|
env['UPDATE_REPOSITORIES'] = update_repositories
|
|
|
|
print(f'Detected system cache directory: {env["SYSTEM_CACHE_DIR"]}')
|
|
try:
|
|
os.makedirs(env['SYSTEM_CACHE_DIR'], exist_ok=True)
|
|
except:
|
|
env['SYSTEM_CACHE_DIR'] = os.path.join(_get_fallback_cache_dir(), 'spp_cache')
|
|
env['CLONE_DIR'] = os.path.join(env['SYSTEM_CACHE_DIR'], 'cloned')
|
|
print(f'Creating spp cache dir failed, using fallback: {env["SYSTEM_CACHE_DIR"]}.')
|
|
os.makedirs(env['SYSTEM_CACHE_DIR'], exist_ok=True) # no more safeguards!
|
|
|
|
env['SHARED_CACHE_DIR'] = Dir(f'#cache').abspath
|
|
# allow compiling to variant directories (each gets their own bin/lib/cache dirs)
|
|
if variant:
|
|
env['BIN_DIR'] = Dir(f'#bin_{variant}').abspath
|
|
env['LIB_DIR'] = Dir(f'#lib_{variant}').abspath
|
|
env['CACHE_DIR'] = Dir(f'#cache_{variant}').abspath
|
|
env['VARIANT_DIR'] = f'{env["CACHE_DIR"]}/variant'
|
|
env.Append(CPPDEFINES = [f'{config["PREPROCESSOR_PREFIX"]}_VARIANT={variant}'])
|
|
else:
|
|
env['VARIANT_DIR'] = None
|
|
env['COMPILATIONDB_USE_ABSPATH'] = True
|
|
if env['COMPILATIONDB_FILTER_FILES']:
|
|
env['COMPILATIONDB_PATH_FILTER'] = f"{Dir('#').abspath}/*"
|
|
comp_db = env.CompilationDatabase(target = '#compile_commands.json')
|
|
Default(comp_db)
|
|
env['BIN_DIR'] = Dir('#bin').abspath
|
|
env['LIB_DIR'] = Dir('#lib').abspath
|
|
env['CACHE_DIR'] = env['SHARED_CACHE_DIR']
|
|
env['UNITY_CACHE_DIR'] = Dir(f'{env["CACHE_DIR"]}/unity')
|
|
env['BUILD_TYPE'] = build_type
|
|
env.Append(LIBPATH = [env['LIB_DIR']]) # to allow submodules to link to each other without hassle
|
|
|
|
# make sure these are all defined in case someone wants to use/copy them
|
|
env.Append(CCFLAGS = [])
|
|
env.Append(CXXFLAGS = [])
|
|
env.Append(CPPPATH = [])
|
|
env.Append(CPPDEFINES = [])
|
|
env.Append(LINKFLAGS = [])
|
|
|
|
# create the cache dir
|
|
os.makedirs(env['CACHE_DIR'], exist_ok=True)
|
|
cache_gitignore = f'{env["CACHE_DIR"]}/.gitignore'
|
|
if not os.path.exists(cache_gitignore):
|
|
with open(cache_gitignore, 'w') as f:
|
|
f.write('*\n')
|
|
|
|
if env['CACHE_DIR'] != env['SHARED_CACHE_DIR']:
|
|
os.makedirs(env['SHARED_CACHE_DIR'], exist_ok=True)
|
|
cache_gitignore = f'{env["SHARED_CACHE_DIR"]}/.gitignore'
|
|
if not os.path.exists(cache_gitignore):
|
|
with open(cache_gitignore, 'w') as f:
|
|
f.write('*\n')
|
|
|
|
# check whether repositories where updated since last boot
|
|
update_stamp_file = f'{env["SHARED_CACHE_DIR"]}/last_update.stamp'
|
|
update_time = 0.0
|
|
if os.path.exists(update_stamp_file):
|
|
with open(update_stamp_file, 'r') as f:
|
|
try:
|
|
update_time = float(f.read())
|
|
except:
|
|
pass
|
|
boot_time = psutil.boot_time()
|
|
if boot_time > update_time:
|
|
print('Didn\'t update repositories since last boot, doing it now...')
|
|
env['UPDATE_REPOSITORIES'] = True
|
|
if env['UPDATE_REPOSITORIES']:
|
|
with open(update_stamp_file, 'w') as f:
|
|
f.write(str(time.time()))
|
|
# create the clone and system cache dirs
|
|
os.makedirs(env['CLONE_DIR'], exist_ok=True)
|
|
os.makedirs(env['DOWNLOAD_DIR'], exist_ok=True)
|
|
|
|
# try to detect what compiler we are using
|
|
compiler_exe = os.path.basename(env.subst(env['CC']))
|
|
if 'gcc' in compiler_exe:
|
|
env['COMPILER_FAMILY'] = 'gcc'
|
|
elif 'clang' in compiler_exe:
|
|
env['COMPILER_FAMILY'] = 'clang'
|
|
elif 'cl' in compiler_exe:
|
|
env['COMPILER_FAMILY'] = 'cl'
|
|
else:
|
|
env['COMPILER_FAMILY'] = 'unknown'
|
|
|
|
# setup unity build depending on mode
|
|
if unity_mode == 'disable':
|
|
env['UNITY_DISABLE'] = True
|
|
elif unity_mode == 'stress': # compile everything in one single file to stress test the unity build
|
|
env['UNITY_MAX_SOURCES'] = 100000 # I'll hopefully never reach this
|
|
env['UNITY_MIN_FILES'] = 1
|
|
|
|
# setup compiler specific options
|
|
if env['COMPILER_FAMILY'] == 'gcc' or env['COMPILER_FAMILY'] == 'clang':
|
|
env.Append(CCFLAGS = ['-Wall', '-Wextra', '-Werror', '-Wstrict-aliasing', '-pedantic'])
|
|
env.Append(CXXFLAGS = [f'-std={config["CXX_STANDARD"]}'])
|
|
if build_type != 'release':
|
|
env.Append(LINKFLAGS = [f'-Wl,-rpath,{env["LIB_DIR"]}'])
|
|
env['LINKCOM'] = env['LINKCOM'].replace('$_LIBFLAGS', '-Wl,--start-group $_LIBFLAGS -Wl,--end-group')
|
|
if env['COMPILER_FAMILY'] == 'gcc':
|
|
# GCC complains about missing initializer for "<anonymous>" that doesn't exist :/
|
|
# also GCC complains about some (compiler generated) fields in coroutines not having any linkage
|
|
# also -Wdangling-reference seems to produce a lot of false positives
|
|
# also -Wmaybe-uninitialized seems to produce false positives (or a bug in the standard library?))
|
|
# -Winit-list-lifetime triggers in vulkan.hpp even though it is disabled via pragma :/
|
|
# -Wtautological-compare triggers in libfmt and doesn't seem too useful anyway
|
|
env.Append(CCFLAGS = ['-Wno-missing-field-initializers', '-Wno-maybe-uninitialized'])
|
|
env.Append(CXXFLAGS = ['-Wno-subobject-linkage', '-Wno-dangling-reference', '-Wno-init-list-lifetime', '-Wno-tautological-compare'])
|
|
|
|
else:
|
|
# no-gnu-anonymous-struct - we don't care
|
|
env.Append(CCFLAGS = ['-Wno-gnu-anonymous-struct'])
|
|
if build_type == 'debug':
|
|
env.Append(CCFLAGS = ['-g', '-O0'], CPPDEFINES = ['_GLIBCXX_DEBUG'])
|
|
elif build_type == 'release_debug' or build_type == 'profile':
|
|
env.Append(CCFLAGS = ['-Wno-unused-variable', '-Wno-unused-parameter', '-Wno-unused-but-set-variable', '-Wno-unused-local-typedef', '-Wno-unused-local-typedefs', '-g', '-O2'], CPPDEFINES = [f'{config["PREPROCESSOR_PREFIX"]}_RELEASE', 'NDEBUG'])
|
|
if build_type == 'profile':
|
|
if env['COMPILER_FAMILY'] == 'gcc':
|
|
env.Append(CPPDEFINES = [f'{config["PREPROCESSOR_PREFIX"]}_GCC_INSTRUMENTING=1'])
|
|
env.Append(CCFLAGS = ['-finstrument-functions'])
|
|
env.Append(LINKFLAGS = ['-rdynamic'])
|
|
|
|
elif build_type == 'release':
|
|
env.Append(CCFLAGS = ['-Wno-unused-variable', '-Wno-unused-parameter', '-Wno-unused-but-set-variable', '-Wno-unused-local-typedef', '-Wno-unused-local-typedefs', '-O2'], CPPDEFINES = [f'{config["PREPROCESSOR_PREFIX"]}_RELEASE', 'NDEBUG'])
|
|
|
|
if enable_asan:
|
|
env.Append(CCFLAGS = ['-fsanitize=address', '-fno-omit-frame-pointer'])
|
|
env.Append(LINKFLAGS = ['-fsanitize=address'])
|
|
|
|
elif env['COMPILER_FAMILY'] == 'cl':
|
|
# C4201: nonstandard extension used : nameless struct/union - I use it and want to continue using it
|
|
# C4127: conditional expression is constant - some libs (CRC, format) don't compile with this enabled # TODO: fix?
|
|
# C4702: unreachable code, issued after MIJIN_FATAL macro
|
|
# C4251: missing dll-interface of some std types, yaml-cpp doesn't compile with this enabled
|
|
# C4275: same as above
|
|
env.Append(CCFLAGS = ['/W4', '/WX', '/wd4201', '/wd4127', '/wd4702', '/wd4251', '/wd4275', '/bigobj', f'/std:{config["CXX_STANDARD"]}', '/permissive-', '/EHsc', '/FS', '/Zc:char8_t'])
|
|
env.Append(CPPDEFINES = ['_CRT_SECURE_NO_WARNINGS']) # I'd like to not use MSVC specific versions of functions because they are "safer" ...
|
|
if build_type == 'debug':
|
|
env.Append(CCFLAGS = ['/Od', '/Zi', '/MDd'], LINKFLAGS = ' /DEBUG')
|
|
env.Append(CPPDEFINES = ['_DEBUG', '_ITERATOR_DEBUG_LEVEL=2'])
|
|
elif build_type == 'release_debug' or build_type == 'profile':
|
|
env.Append(CCFLAGS = ['/O2', '/Zi'], LINKFLAGS = ' /DEBUG')
|
|
else:
|
|
env.Append(CCFLAGS = ['/O2'])
|
|
|
|
if env['COMPILER_FAMILY'] == 'gcc':
|
|
env.Append(CXXFLAGS = ['-Wno-volatile'])
|
|
elif env['COMPILER_FAMILY'] == 'clang':
|
|
env.Append(CCFLAGS = ['-Wno-deprecated-volatile', '-Wno-nested-anon-types', '-Wno-unknown-warning-option'])
|
|
|
|
env.AddMethod(_cook, 'Cook')
|
|
env.AddMethod(_parse_lib_conf, 'ParseLibConf')
|
|
env.AddMethod(_rglob, 'RGlob')
|
|
env.AddMethod(_make_interface, 'MakeInterface')
|
|
env.AddMethod(_find_lib, 'FindLib')
|
|
env.AddMethod(_error, 'Error')
|
|
env.AddMethod(_wrap_builder(env.Library, is_lib = True), 'Library')
|
|
env.AddMethod(_wrap_builder(env.StaticLibrary, is_lib = True), 'StaticLibrary')
|
|
env.AddMethod(_wrap_builder(env.SharedLibrary, is_lib = True), 'SharedLibrary')
|
|
env.AddMethod(_wrap_builder(env.Program), 'Program')
|
|
env.AddMethod(_wrap_default(env.Default), 'Default')
|
|
env.AddMethod(_wrap_depends(env.Depends), 'Depends')
|
|
|
|
env.AddMethod(_wrap_builder(env.UnityProgram), 'UnityProgram')
|
|
env.AddMethod(_wrap_builder(env.UnityLibrary, is_lib = True), 'UnityLibrary')
|
|
env.AddMethod(_wrap_builder(env.UnityStaticLibrary, is_lib = True), 'UnityStaticLibrary')
|
|
env.AddMethod(_wrap_builder(env.UnitySharedLibrary, is_lib = True), 'UnitySharedLibrary')
|
|
env.AddMethod(_module, 'Module')
|
|
|
|
if hasattr(env, 'Gch'):
|
|
env.AddMethod(_wrap_builder(env.Gch), 'Gch')
|
|
|
|
for addon_file in env.Glob('addons/*.py'):
|
|
env = SConscript(addon_file, exports = 'env')
|
|
|
|
if dump_env:
|
|
print('==== Begin Environment Dump =====')
|
|
print(env.Dump())
|
|
print('==== End Environment Dump =====')
|
|
|
|
_old_fn = SCons.Warnings.process_warn_strings
|
|
|
|
import SCons.Util
|
|
class _FrameWrapper(SCons.Util.Proxy):
|
|
def __init__(self, subject):
|
|
super().__init__(subject)
|
|
|
|
def __getattr__(self, name):
|
|
if name == 'retval':
|
|
print('YAY')
|
|
return super().__getattr__(name)
|
|
|
|
|
|
SCons.Script.call_stack[0] = _FrameWrapper(SCons.Script.call_stack[0])
|
|
|
|
print(SCons.Script.call_stack)
|
|
def _wrapped(*args, **kwargs):
|
|
for target in SCons.Script.BUILD_TARGETS:
|
|
if hasattr(target, 'abspath'):
|
|
print('Target: ', target.abspath)
|
|
else:
|
|
print('Target: ', target)
|
|
_old_fn(*args, **kwargs)
|
|
|
|
SCons.Warnings.process_warn_strings = _wrapped
|
|
|
|
Return('env')
|