This commit is contained in:
Patrick Wuttke 2025-07-14 18:51:46 +02:00
commit 7fc8518db4
3 changed files with 96 additions and 28 deletions

View File

@ -8,6 +8,7 @@ import json
import multiprocessing
import os
import pathlib
import platform
import psutil
import shutil
import sys
@ -17,6 +18,20 @@ import uuid
from SCons.Node import Node
_GCC_CPU_FEATURES_MAP = {
'mmx': '-mmmx',
'sse': '-msse',
'sse2': '-msse2',
'sse3': '-msse3',
'ssse3': '-mssse3',
'sse4': '-msse4',
'sse4a': '-msse4a',
'sse4.1': '-msse4.1',
'sse4.2': '-msse4.2',
'avx': '-mavx',
'avx2': '-mavx2'
}
class TargetType(enum.Enum):
PROGRAM = 0
STATIC_LIBRARY = 1
@ -49,7 +64,6 @@ class _Module:
folder: str
description: str
cxx_namespace: str
targets: list['_Target'] = field(default_factory=list)
class _Target:
name: str
@ -101,22 +115,26 @@ def _cook(env: Environment, recipe_name: str):
_run_cook(dependency)
return dependency.cook_result
def _normalize_module_path(env: Environment, path: str) -> str:
def _normalize_module_path(env: Environment, path: str) -> str|None:
module_root = env.Dir('#/private').abspath
try:
return os.path.relpath(path, module_root)
relative = os.path.relpath(path, module_root)
if relative[:2] == '..':
return None
return relative
except ValueError: # may be thrown on Windows if the module is on a different drive than the project
return os.path.normpath(path) # just use the absolute path then
return None
def _module(env: Environment, file: str):
folder = _normalize_module_path(env, env.File(file).dir.abspath)
dirname = os.path.basename(folder)
env.Append(SPP_MODULES = {folder: _Module(
name=dirname,
folder=folder,
description='',
cxx_namespace=dirname
)})
if folder is not None: # only include modules inside the source tree
dirname = os.path.basename(folder)
env.Append(SPP_MODULES = {folder: _Module(
name=dirname,
folder=folder,
description='',
cxx_namespace=dirname
)})
return SConscript(file, exports = 'env', variant_dir = env['VARIANT_DIR'], src_dir = '.')
def _module_config(env: Environment, **kwargs) -> None:
@ -255,11 +273,7 @@ def _lib_filename(env: Environment, name: str, type: str = 'static') -> str:
}[type]
return f'lib{name}.{ext}'
elif os.name == 'nt':
ext = {
'static': 'lib',
'shared': 'dll'
}[type]
return f'{name}.{ext}'
return f'{name}.lib'
else:
raise Exception('What OS is this?')
@ -307,8 +321,7 @@ def _try_merge_dicts(dictA: dict, dictB: dict) -> 'dict|None':
result[key] = mergedValue
elif valueA != valueB:
return None
else:
result[key] = valueA
result[key] = valueA
for key, valueB in dictB.items():
if key not in result:
result[key] = valueB
@ -464,12 +477,12 @@ def _wrap_builder(builder, target_type: TargetType):
target.kwargs = kwargs
target.dependencies = target_dependencies
module_folder = _normalize_module_path(env, env.Dir('.').abspath)
module = env['SPP_MODULES'].get(module_folder)
if module is None:
env.Warn(f'No module config found for target {target.name} at {module_folder}')
else:
target.module = module
module.targets.append(target)
if module_folder is not None:
module = env['SPP_MODULES'].get(module_folder)
if module is None:
env.Warn(f'No module config found for target {target.name} at {module_folder}')
else:
target.module = module
env.Append(SPP_TARGETS = [target])
if not target.dependencies:
_build_target(target)
@ -928,7 +941,6 @@ if not config.get('CXX_STANDARD'):
config['CXX_STANDARD'] = 'c++23'
if not config.get('CXX_NO_EXCEPTIONS'):
config['CXX_NO_EXCEPTIONS'] = False
if not config.get('PREPROCESSOR_PREFIX'):
config['PREPROCESSOR_PREFIX'] = config['PROJECT_NAME'].upper() # TODO: may be nicer?
@ -1079,6 +1091,7 @@ vars.Add('COMPILATIONDB_FILTER_FILES', 'Removes source files from the compilatio
' project.', config['COMPILATIONDB_FILTER_FILES'])
vars.Add('SHOW_INCLUDES', 'Show include hierarchy (for debugging).', False)
vars.Add('ENABLE_ASAN', 'Enable address sanitization.', bool(enable_asan))
vars.Add('TARGET_PLATFORM', 'Target platform.', platform.system())
if 'VARIABLES' in config:
for vardef in config['VARIABLES']:
@ -1154,6 +1167,7 @@ env['SPP_TARGET_DEPENDENCIES'] = []
env['SPP_DEPENDENCIES'] = {}
env['SPP_RECIPES'] = {}
env['SPP_MODULES'] = {} # maps from folder to _Module
env['SPP_CPU_FEATURES'] = config.get('USE_CPU_FEATURES', [])
env['OBJSUFFIX'] = f".{env['BUILD_TYPE']}{env['OBJSUFFIX']}"
if variant:
@ -1256,6 +1270,13 @@ if env['COMPILER_FAMILY'] == 'gcc' or env['COMPILER_FAMILY'] == 'clang':
env.Append(DEPS_CXXFLAGS = ['-fsanitize=address', '-fno-omit-frame-pointer'])
env.Append(DEPS_LINKFLAGS = ['-fsanitize=address'])
for feature in env['SPP_CPU_FEATURES']:
flag = _GCC_CPU_FEATURES_MAP.get(feature)
if flag is None:
_warn(None, f'Unknown or unsupported cpu feature "{feature}" for GCC/Clang.')
else:
env.Append(CCFLAGS = [flag])
elif env['COMPILER_FAMILY'] == 'cl':
cxx_version_name = {
'c++14': 'c++14',
@ -1281,12 +1302,14 @@ elif env['COMPILER_FAMILY'] == 'cl':
if env['SHOW_INCLUDES']:
env.Append(CCFLAGS = ['/showIncludes'])
if build_type == 'debug':
env.Append(CCFLAGS = ['/Od', '/Zi', '/MDd'], LINKFLAGS = ' /DEBUG')
env['PDB'] = env.File('#bin/full.pdb')
env.Append(CCFLAGS = ['/Od', '/MDd'], LINKFLAGS = ' /DEBUG')
env.Append(CPPDEFINES = ['_DEBUG', '_ITERATOR_DEBUG_LEVEL=2'])
env.Append(DEPS_CXXFLAGS = ['/MDd', '/Zi', '/D_DEBUG', '/D_ITERATOR_DEBUG_LEVEL=2'])
env.Append(DEPS_LINKFLAGS = ['/DEBUG'])
elif build_type == 'release_debug' or build_type == 'profile':
env.Append(CCFLAGS = ['/O2', '/MD', '/Zi'], LINKFLAGS = ' /DEBUG')
env['PDB'] = env.File('#bin/full.pdb')
env.Append(CCFLAGS = ['/O2', '/MD'], LINKFLAGS = ' /DEBUG')
env.Append(DEPS_CXXFLAGS = ['/Zi', '/MD'])
env.Append(DEPS_LINKFLAGS = ['/DEBUG'])
else:

View File

@ -105,6 +105,7 @@ def _cmake_project(env: Environment, project_root: str, generate_args: 'list[str
libpath.append(full_path)
return {
'build_dir': build_dir,
'install_dir': install_dir,
'BINPATH': [os.path.join(install_dir, 'bin')],
'LIBPATH': libpath,

View File

@ -3,6 +3,8 @@ from git import Repo
from git.exc import GitError
import hashlib
import inspect
import os
import shutil
from SCons.Script import *
Import('env')
@ -20,7 +22,41 @@ def _clone(env: Environment, repo_name: str, remote_url: str):
def _git_branch(env: Environment, repo_name: str, remote_url: str, git_ref: str = 'main') -> dict:
repo, origin = _clone(env, repo_name, remote_url)
worktree_dir = os.path.join(env['CLONE_DIR'], 'git', repo_name, hashlib.shake_128(git_ref.encode('utf-8')).hexdigest(6)) # TODO: commit hash would be better, right? -> not if it's a branch!
old_worktree_dir = os.path.join(env['CLONE_DIR'], 'git', repo_name, hashlib.shake_128(git_ref.encode('utf-8')).hexdigest(6))
worktree_dir = os.path.join(env['CLONE_DIR'], 'git', repo_name, git_ref.replace('/', '_'))
if os.path.exists(old_worktree_dir) and not os.path.islink(old_worktree_dir):
if not os.path.exists(worktree_dir):
print(f'Found old Git worktree at {old_worktree_dir}, moving it to {worktree_dir}.')
try:
repo.git.worktree('move', old_worktree_dir, worktree_dir)
except GitError:
print('Error while moving worktree, manually moving and repairing it instead.')
shutil.move(old_worktree_dir, worktree_dir)
try:
repo.git.worktree('repair', worktree_dir)
except GitError:
print('Also didn\'t work, removing and redownloading it.')
try:
repo.git.worktree('remove', '-f', worktree_dir)
except GitError: ...
try:
repo.git.worktree('remove', '-f', old_worktree_dir)
except GitError: ...
if os.path.exists(worktree_dir):
shutil.rmtree(worktree_dir, ignore_errors=True)
# this is all we can do, I guess
else:
print(f'Found old Git worktree at {old_worktree_dir}, but the new one at {worktree_dir} already exists. Removing the old one.')
repo.git.worktree('remove', '-f', old_worktree_dir)
print('Attempting to create a symlink for older S++ versions.')
try:
os.symlink(worktree_dir, old_worktree_dir, target_is_directory=True)
except Exception as e:
print(f'Failed: {e}')
update_submodules = False
if not os.path.exists(worktree_dir):
print(f'Checking out into {worktree_dir}.')
@ -38,9 +74,17 @@ def _git_branch(env: Environment, repo_name: str, remote_url: str, git_ref: str
update_submodules = True
else:
print(f'Not updating git repository {worktree_dir} as it is not on a branch.')
else:
worktree_repo = Repo(worktree_dir)
if update_submodules:
for submodule in worktree_repo.submodules:
submodule.update(init=True)
for submodule in worktree_repo.submodules:
if os.listdir(submodule.abspath) == ['.git']:
print(f'Submodule {submodule.name} seems borked, attempting to fix it.')
worktree_repo.git.submodule('deinit', '-f', submodule.path)
worktree_repo.git.submodule('init', submodule.path)
worktree_repo.git.submodule('update', submodule.path)
return {
'checkout_root': worktree_dir,
'repo': repo,