Merge branch 'master' of https://git.mewin.de/mewin/scons-plus-plus
This commit is contained in:
77
SConscript
77
SConscript
@@ -8,6 +8,7 @@ import json
|
|||||||
import multiprocessing
|
import multiprocessing
|
||||||
import os
|
import os
|
||||||
import pathlib
|
import pathlib
|
||||||
|
import platform
|
||||||
import psutil
|
import psutil
|
||||||
import shutil
|
import shutil
|
||||||
import sys
|
import sys
|
||||||
@@ -17,6 +18,20 @@ import uuid
|
|||||||
|
|
||||||
from SCons.Node import Node
|
from SCons.Node import Node
|
||||||
|
|
||||||
|
_GCC_CPU_FEATURES_MAP = {
|
||||||
|
'mmx': '-mmmx',
|
||||||
|
'sse': '-msse',
|
||||||
|
'sse2': '-msse2',
|
||||||
|
'sse3': '-msse3',
|
||||||
|
'ssse3': '-mssse3',
|
||||||
|
'sse4': '-msse4',
|
||||||
|
'sse4a': '-msse4a',
|
||||||
|
'sse4.1': '-msse4.1',
|
||||||
|
'sse4.2': '-msse4.2',
|
||||||
|
'avx': '-mavx',
|
||||||
|
'avx2': '-mavx2'
|
||||||
|
}
|
||||||
|
|
||||||
class TargetType(enum.Enum):
|
class TargetType(enum.Enum):
|
||||||
PROGRAM = 0
|
PROGRAM = 0
|
||||||
STATIC_LIBRARY = 1
|
STATIC_LIBRARY = 1
|
||||||
@@ -49,7 +64,6 @@ class _Module:
|
|||||||
folder: str
|
folder: str
|
||||||
description: str
|
description: str
|
||||||
cxx_namespace: str
|
cxx_namespace: str
|
||||||
targets: list['_Target'] = field(default_factory=list)
|
|
||||||
|
|
||||||
class _Target:
|
class _Target:
|
||||||
name: str
|
name: str
|
||||||
@@ -101,22 +115,26 @@ def _cook(env: Environment, recipe_name: str):
|
|||||||
_run_cook(dependency)
|
_run_cook(dependency)
|
||||||
return dependency.cook_result
|
return dependency.cook_result
|
||||||
|
|
||||||
def _normalize_module_path(env: Environment, path: str) -> str:
|
def _normalize_module_path(env: Environment, path: str) -> str|None:
|
||||||
module_root = env.Dir('#/private').abspath
|
module_root = env.Dir('#/private').abspath
|
||||||
try:
|
try:
|
||||||
return os.path.relpath(path, module_root)
|
relative = os.path.relpath(path, module_root)
|
||||||
|
if relative[:2] == '..':
|
||||||
|
return None
|
||||||
|
return relative
|
||||||
except ValueError: # may be thrown on Windows if the module is on a different drive than the project
|
except ValueError: # may be thrown on Windows if the module is on a different drive than the project
|
||||||
return os.path.normpath(path) # just use the absolute path then
|
return None
|
||||||
|
|
||||||
def _module(env: Environment, file: str):
|
def _module(env: Environment, file: str):
|
||||||
folder = _normalize_module_path(env, env.File(file).dir.abspath)
|
folder = _normalize_module_path(env, env.File(file).dir.abspath)
|
||||||
dirname = os.path.basename(folder)
|
if folder is not None: # only include modules inside the source tree
|
||||||
env.Append(SPP_MODULES = {folder: _Module(
|
dirname = os.path.basename(folder)
|
||||||
name=dirname,
|
env.Append(SPP_MODULES = {folder: _Module(
|
||||||
folder=folder,
|
name=dirname,
|
||||||
description='',
|
folder=folder,
|
||||||
cxx_namespace=dirname
|
description='',
|
||||||
)})
|
cxx_namespace=dirname
|
||||||
|
)})
|
||||||
return SConscript(file, exports = 'env', variant_dir = env['VARIANT_DIR'], src_dir = '.')
|
return SConscript(file, exports = 'env', variant_dir = env['VARIANT_DIR'], src_dir = '.')
|
||||||
|
|
||||||
def _module_config(env: Environment, **kwargs) -> None:
|
def _module_config(env: Environment, **kwargs) -> None:
|
||||||
@@ -255,11 +273,7 @@ def _lib_filename(env: Environment, name: str, type: str = 'static') -> str:
|
|||||||
}[type]
|
}[type]
|
||||||
return f'lib{name}.{ext}'
|
return f'lib{name}.{ext}'
|
||||||
elif os.name == 'nt':
|
elif os.name == 'nt':
|
||||||
ext = {
|
return f'{name}.lib'
|
||||||
'static': 'lib',
|
|
||||||
'shared': 'dll'
|
|
||||||
}[type]
|
|
||||||
return f'{name}.{ext}'
|
|
||||||
else:
|
else:
|
||||||
raise Exception('What OS is this?')
|
raise Exception('What OS is this?')
|
||||||
|
|
||||||
@@ -307,8 +321,7 @@ def _try_merge_dicts(dictA: dict, dictB: dict) -> 'dict|None':
|
|||||||
result[key] = mergedValue
|
result[key] = mergedValue
|
||||||
elif valueA != valueB:
|
elif valueA != valueB:
|
||||||
return None
|
return None
|
||||||
else:
|
result[key] = valueA
|
||||||
result[key] = valueA
|
|
||||||
for key, valueB in dictB.items():
|
for key, valueB in dictB.items():
|
||||||
if key not in result:
|
if key not in result:
|
||||||
result[key] = valueB
|
result[key] = valueB
|
||||||
@@ -464,12 +477,12 @@ def _wrap_builder(builder, target_type: TargetType):
|
|||||||
target.kwargs = kwargs
|
target.kwargs = kwargs
|
||||||
target.dependencies = target_dependencies
|
target.dependencies = target_dependencies
|
||||||
module_folder = _normalize_module_path(env, env.Dir('.').abspath)
|
module_folder = _normalize_module_path(env, env.Dir('.').abspath)
|
||||||
module = env['SPP_MODULES'].get(module_folder)
|
if module_folder is not None:
|
||||||
if module is None:
|
module = env['SPP_MODULES'].get(module_folder)
|
||||||
env.Warn(f'No module config found for target {target.name} at {module_folder}')
|
if module is None:
|
||||||
else:
|
env.Warn(f'No module config found for target {target.name} at {module_folder}')
|
||||||
target.module = module
|
else:
|
||||||
module.targets.append(target)
|
target.module = module
|
||||||
env.Append(SPP_TARGETS = [target])
|
env.Append(SPP_TARGETS = [target])
|
||||||
if not target.dependencies:
|
if not target.dependencies:
|
||||||
_build_target(target)
|
_build_target(target)
|
||||||
@@ -928,7 +941,6 @@ if not config.get('CXX_STANDARD'):
|
|||||||
config['CXX_STANDARD'] = 'c++23'
|
config['CXX_STANDARD'] = 'c++23'
|
||||||
if not config.get('CXX_NO_EXCEPTIONS'):
|
if not config.get('CXX_NO_EXCEPTIONS'):
|
||||||
config['CXX_NO_EXCEPTIONS'] = False
|
config['CXX_NO_EXCEPTIONS'] = False
|
||||||
|
|
||||||
if not config.get('PREPROCESSOR_PREFIX'):
|
if not config.get('PREPROCESSOR_PREFIX'):
|
||||||
config['PREPROCESSOR_PREFIX'] = config['PROJECT_NAME'].upper() # TODO: may be nicer?
|
config['PREPROCESSOR_PREFIX'] = config['PROJECT_NAME'].upper() # TODO: may be nicer?
|
||||||
|
|
||||||
@@ -1079,6 +1091,7 @@ vars.Add('COMPILATIONDB_FILTER_FILES', 'Removes source files from the compilatio
|
|||||||
' project.', config['COMPILATIONDB_FILTER_FILES'])
|
' project.', config['COMPILATIONDB_FILTER_FILES'])
|
||||||
vars.Add('SHOW_INCLUDES', 'Show include hierarchy (for debugging).', False)
|
vars.Add('SHOW_INCLUDES', 'Show include hierarchy (for debugging).', False)
|
||||||
vars.Add('ENABLE_ASAN', 'Enable address sanitization.', bool(enable_asan))
|
vars.Add('ENABLE_ASAN', 'Enable address sanitization.', bool(enable_asan))
|
||||||
|
vars.Add('TARGET_PLATFORM', 'Target platform.', platform.system())
|
||||||
|
|
||||||
if 'VARIABLES' in config:
|
if 'VARIABLES' in config:
|
||||||
for vardef in config['VARIABLES']:
|
for vardef in config['VARIABLES']:
|
||||||
@@ -1154,6 +1167,7 @@ env['SPP_TARGET_DEPENDENCIES'] = []
|
|||||||
env['SPP_DEPENDENCIES'] = {}
|
env['SPP_DEPENDENCIES'] = {}
|
||||||
env['SPP_RECIPES'] = {}
|
env['SPP_RECIPES'] = {}
|
||||||
env['SPP_MODULES'] = {} # maps from folder to _Module
|
env['SPP_MODULES'] = {} # maps from folder to _Module
|
||||||
|
env['SPP_CPU_FEATURES'] = config.get('USE_CPU_FEATURES', [])
|
||||||
|
|
||||||
env['OBJSUFFIX'] = f".{env['BUILD_TYPE']}{env['OBJSUFFIX']}"
|
env['OBJSUFFIX'] = f".{env['BUILD_TYPE']}{env['OBJSUFFIX']}"
|
||||||
if variant:
|
if variant:
|
||||||
@@ -1256,6 +1270,13 @@ if env['COMPILER_FAMILY'] == 'gcc' or env['COMPILER_FAMILY'] == 'clang':
|
|||||||
env.Append(DEPS_CXXFLAGS = ['-fsanitize=address', '-fno-omit-frame-pointer'])
|
env.Append(DEPS_CXXFLAGS = ['-fsanitize=address', '-fno-omit-frame-pointer'])
|
||||||
env.Append(DEPS_LINKFLAGS = ['-fsanitize=address'])
|
env.Append(DEPS_LINKFLAGS = ['-fsanitize=address'])
|
||||||
|
|
||||||
|
for feature in env['SPP_CPU_FEATURES']:
|
||||||
|
flag = _GCC_CPU_FEATURES_MAP.get(feature)
|
||||||
|
if flag is None:
|
||||||
|
_warn(None, f'Unknown or unsupported cpu feature "{feature}" for GCC/Clang.')
|
||||||
|
else:
|
||||||
|
env.Append(CCFLAGS = [flag])
|
||||||
|
|
||||||
elif env['COMPILER_FAMILY'] == 'cl':
|
elif env['COMPILER_FAMILY'] == 'cl':
|
||||||
cxx_version_name = {
|
cxx_version_name = {
|
||||||
'c++14': 'c++14',
|
'c++14': 'c++14',
|
||||||
@@ -1281,12 +1302,14 @@ elif env['COMPILER_FAMILY'] == 'cl':
|
|||||||
if env['SHOW_INCLUDES']:
|
if env['SHOW_INCLUDES']:
|
||||||
env.Append(CCFLAGS = ['/showIncludes'])
|
env.Append(CCFLAGS = ['/showIncludes'])
|
||||||
if build_type == 'debug':
|
if build_type == 'debug':
|
||||||
env.Append(CCFLAGS = ['/Od', '/Zi', '/MDd'], LINKFLAGS = ' /DEBUG')
|
env['PDB'] = env.File('#bin/full.pdb')
|
||||||
|
env.Append(CCFLAGS = ['/Od', '/MDd'], LINKFLAGS = ' /DEBUG')
|
||||||
env.Append(CPPDEFINES = ['_DEBUG', '_ITERATOR_DEBUG_LEVEL=2'])
|
env.Append(CPPDEFINES = ['_DEBUG', '_ITERATOR_DEBUG_LEVEL=2'])
|
||||||
env.Append(DEPS_CXXFLAGS = ['/MDd', '/Zi', '/D_DEBUG', '/D_ITERATOR_DEBUG_LEVEL=2'])
|
env.Append(DEPS_CXXFLAGS = ['/MDd', '/Zi', '/D_DEBUG', '/D_ITERATOR_DEBUG_LEVEL=2'])
|
||||||
env.Append(DEPS_LINKFLAGS = ['/DEBUG'])
|
env.Append(DEPS_LINKFLAGS = ['/DEBUG'])
|
||||||
elif build_type == 'release_debug' or build_type == 'profile':
|
elif build_type == 'release_debug' or build_type == 'profile':
|
||||||
env.Append(CCFLAGS = ['/O2', '/MD', '/Zi'], LINKFLAGS = ' /DEBUG')
|
env['PDB'] = env.File('#bin/full.pdb')
|
||||||
|
env.Append(CCFLAGS = ['/O2', '/MD'], LINKFLAGS = ' /DEBUG')
|
||||||
env.Append(DEPS_CXXFLAGS = ['/Zi', '/MD'])
|
env.Append(DEPS_CXXFLAGS = ['/Zi', '/MD'])
|
||||||
env.Append(DEPS_LINKFLAGS = ['/DEBUG'])
|
env.Append(DEPS_LINKFLAGS = ['/DEBUG'])
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -105,6 +105,7 @@ def _cmake_project(env: Environment, project_root: str, generate_args: 'list[str
|
|||||||
libpath.append(full_path)
|
libpath.append(full_path)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
|
'build_dir': build_dir,
|
||||||
'install_dir': install_dir,
|
'install_dir': install_dir,
|
||||||
'BINPATH': [os.path.join(install_dir, 'bin')],
|
'BINPATH': [os.path.join(install_dir, 'bin')],
|
||||||
'LIBPATH': libpath,
|
'LIBPATH': libpath,
|
||||||
|
|||||||
@@ -3,6 +3,8 @@ from git import Repo
|
|||||||
from git.exc import GitError
|
from git.exc import GitError
|
||||||
import hashlib
|
import hashlib
|
||||||
import inspect
|
import inspect
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
from SCons.Script import *
|
from SCons.Script import *
|
||||||
|
|
||||||
Import('env')
|
Import('env')
|
||||||
@@ -20,7 +22,41 @@ def _clone(env: Environment, repo_name: str, remote_url: str):
|
|||||||
|
|
||||||
def _git_branch(env: Environment, repo_name: str, remote_url: str, git_ref: str = 'main') -> dict:
|
def _git_branch(env: Environment, repo_name: str, remote_url: str, git_ref: str = 'main') -> dict:
|
||||||
repo, origin = _clone(env, repo_name, remote_url)
|
repo, origin = _clone(env, repo_name, remote_url)
|
||||||
worktree_dir = os.path.join(env['CLONE_DIR'], 'git', repo_name, hashlib.shake_128(git_ref.encode('utf-8')).hexdigest(6)) # TODO: commit hash would be better, right? -> not if it's a branch!
|
old_worktree_dir = os.path.join(env['CLONE_DIR'], 'git', repo_name, hashlib.shake_128(git_ref.encode('utf-8')).hexdigest(6))
|
||||||
|
worktree_dir = os.path.join(env['CLONE_DIR'], 'git', repo_name, git_ref.replace('/', '_'))
|
||||||
|
if os.path.exists(old_worktree_dir) and not os.path.islink(old_worktree_dir):
|
||||||
|
if not os.path.exists(worktree_dir):
|
||||||
|
print(f'Found old Git worktree at {old_worktree_dir}, moving it to {worktree_dir}.')
|
||||||
|
try:
|
||||||
|
repo.git.worktree('move', old_worktree_dir, worktree_dir)
|
||||||
|
except GitError:
|
||||||
|
print('Error while moving worktree, manually moving and repairing it instead.')
|
||||||
|
shutil.move(old_worktree_dir, worktree_dir)
|
||||||
|
try:
|
||||||
|
repo.git.worktree('repair', worktree_dir)
|
||||||
|
except GitError:
|
||||||
|
print('Also didn\'t work, removing and redownloading it.')
|
||||||
|
try:
|
||||||
|
repo.git.worktree('remove', '-f', worktree_dir)
|
||||||
|
except GitError: ...
|
||||||
|
|
||||||
|
try:
|
||||||
|
repo.git.worktree('remove', '-f', old_worktree_dir)
|
||||||
|
except GitError: ...
|
||||||
|
|
||||||
|
if os.path.exists(worktree_dir):
|
||||||
|
shutil.rmtree(worktree_dir, ignore_errors=True)
|
||||||
|
# this is all we can do, I guess
|
||||||
|
else:
|
||||||
|
print(f'Found old Git worktree at {old_worktree_dir}, but the new one at {worktree_dir} already exists. Removing the old one.')
|
||||||
|
repo.git.worktree('remove', '-f', old_worktree_dir)
|
||||||
|
|
||||||
|
print('Attempting to create a symlink for older S++ versions.')
|
||||||
|
try:
|
||||||
|
os.symlink(worktree_dir, old_worktree_dir, target_is_directory=True)
|
||||||
|
except Exception as e:
|
||||||
|
print(f'Failed: {e}')
|
||||||
|
|
||||||
update_submodules = False
|
update_submodules = False
|
||||||
if not os.path.exists(worktree_dir):
|
if not os.path.exists(worktree_dir):
|
||||||
print(f'Checking out into {worktree_dir}.')
|
print(f'Checking out into {worktree_dir}.')
|
||||||
@@ -38,9 +74,17 @@ def _git_branch(env: Environment, repo_name: str, remote_url: str, git_ref: str
|
|||||||
update_submodules = True
|
update_submodules = True
|
||||||
else:
|
else:
|
||||||
print(f'Not updating git repository {worktree_dir} as it is not on a branch.')
|
print(f'Not updating git repository {worktree_dir} as it is not on a branch.')
|
||||||
|
else:
|
||||||
|
worktree_repo = Repo(worktree_dir)
|
||||||
if update_submodules:
|
if update_submodules:
|
||||||
for submodule in worktree_repo.submodules:
|
for submodule in worktree_repo.submodules:
|
||||||
submodule.update(init=True)
|
submodule.update(init=True)
|
||||||
|
for submodule in worktree_repo.submodules:
|
||||||
|
if os.listdir(submodule.abspath) == ['.git']:
|
||||||
|
print(f'Submodule {submodule.name} seems borked, attempting to fix it.')
|
||||||
|
worktree_repo.git.submodule('deinit', '-f', submodule.path)
|
||||||
|
worktree_repo.git.submodule('init', submodule.path)
|
||||||
|
worktree_repo.git.submodule('update', submodule.path)
|
||||||
return {
|
return {
|
||||||
'checkout_root': worktree_dir,
|
'checkout_root': worktree_dir,
|
||||||
'repo': repo,
|
'repo': repo,
|
||||||
|
|||||||
Reference in New Issue
Block a user