Add recipe support.
This commit is contained in:
parent
feb87f3cf2
commit
0071b4942e
179
SConscript
179
SConscript
@ -1,11 +1,91 @@
|
|||||||
|
|
||||||
|
import copy
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
def _cook(env: Environment, recipe_name: str, *args, **kwargs):
|
||||||
|
import importlib.util
|
||||||
|
for folder in env['RECIPES_FOLDERS']:
|
||||||
|
source_file = f'{folder.abspath}/{recipe_name}/recipe.py'
|
||||||
|
if os.path.exists(source_file):
|
||||||
|
break
|
||||||
|
if not source_file:
|
||||||
|
raise Exception(f'Could not find recipe {recipe_name}.')
|
||||||
|
spec = importlib.util.spec_from_file_location(recipe_name, source_file)
|
||||||
|
recipe = importlib.util.module_from_spec(spec)
|
||||||
|
spec.loader.exec_module(recipe)
|
||||||
|
return recipe.cook(env, *args, **kwargs)
|
||||||
|
|
||||||
|
def _parse_lib_conf(env: Environment, lib_conf: dict) -> None:
|
||||||
|
env.Append(CPPPATH = lib_conf.get('CPPPATH', []),
|
||||||
|
CPPDEFINES = lib_conf.get('CPPDEFINES', []),
|
||||||
|
LIBPATH = lib_conf.get('LIBPATH', []),
|
||||||
|
LIBS = lib_conf.get('LIBS', []))
|
||||||
|
|
||||||
|
def _inject_list(kwargs: dict, dependency: dict, list_name: str) -> None:
|
||||||
|
if list_name not in dependency:
|
||||||
|
return
|
||||||
|
if list_name not in kwargs:
|
||||||
|
kwargs[list_name] = []
|
||||||
|
kwargs[list_name].extend(dependency[list_name]) # TODO: eliminiate duplicates?
|
||||||
|
|
||||||
|
def _inject_dependency(dependency, kwargs: dict) -> None:
|
||||||
|
if isinstance(dependency, dict):
|
||||||
|
_inject_list(kwargs, dependency, 'CPPPATH')
|
||||||
|
_inject_list(kwargs, dependency, 'CPPDEFINES')
|
||||||
|
_inject_list(kwargs, dependency, 'LIBPATH')
|
||||||
|
_inject_list(kwargs, dependency, 'LIBS')
|
||||||
|
|
||||||
|
def _wrap_builder(builder, is_lib: bool = False):
|
||||||
|
def _wrapped(env, dependencies = [], *args, **kwargs):
|
||||||
|
if 'CPPPATH' not in kwargs:
|
||||||
|
kwargs['CPPPATH'] = copy.copy(env['CPPPATH'])
|
||||||
|
if 'CPPDEFINES' not in kwargs:
|
||||||
|
kwargs['CPPDEFINES'] = copy.copy(env['CPPDEFINES'])
|
||||||
|
if 'LIBPATH' not in kwargs:
|
||||||
|
kwargs['LIBPATH'] = copy.copy(env['LIBPATH'])
|
||||||
|
for dependency in dependencies:
|
||||||
|
_inject_dependency(dependency, kwargs)
|
||||||
|
result = builder(*args, **kwargs)
|
||||||
|
if is_lib:
|
||||||
|
# generate a new libconf
|
||||||
|
return {
|
||||||
|
'CPPPATH': kwargs.get('CPPPATH', []),
|
||||||
|
'CPPDEFINES': kwargs.get('CPPDEFINES', []),
|
||||||
|
'LIBPATH': kwargs.get('LIBPATH', []),
|
||||||
|
'LIBS': result + kwargs.get('LIBS', []),
|
||||||
|
'_target': result
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
return _wrapped
|
||||||
|
|
||||||
|
def _wrap_default(default):
|
||||||
|
def _wrapped(env, arg):
|
||||||
|
if isinstance(arg, dict) and '_target' in arg:
|
||||||
|
default(arg['_target'])
|
||||||
|
else:
|
||||||
|
default(arg)
|
||||||
|
return _wrapped
|
||||||
|
|
||||||
|
def _find_system_cache_dir() -> str:
|
||||||
|
if os.name == 'posix':
|
||||||
|
if 'XDG_CACHE_HOME' in os.environ:
|
||||||
|
return os.environ['XDG_CACHE_HOME']
|
||||||
|
else:
|
||||||
|
return os.path.join(os.environ['HOME'], '.cache')
|
||||||
|
elif os.name == 'nt':
|
||||||
|
# TODO: just guessing
|
||||||
|
return os.environ['LocalAppData']
|
||||||
|
else: # fallback
|
||||||
|
return Dir('#cache').abspath
|
||||||
|
|
||||||
Import('config')
|
Import('config')
|
||||||
|
|
||||||
if not config.get('PROJECT_NAME'):
|
if not config.get('PROJECT_NAME'):
|
||||||
config['PROJECT_NAME'] = 'PROJECT'
|
config['PROJECT_NAME'] = 'PROJECT'
|
||||||
|
|
||||||
|
if not config.get('PREPROCESSOR_PREFIX'):
|
||||||
|
config['PREPROCESSOR_PREFIX'] = config['PROJECT_NAME'].upper() # TODO: may be nicer?
|
||||||
|
|
||||||
AddOption(
|
AddOption(
|
||||||
'--build_type',
|
'--build_type',
|
||||||
dest = 'build_type',
|
dest = 'build_type',
|
||||||
@ -53,24 +133,41 @@ variant = GetOption('variant')
|
|||||||
enable_asan = GetOption('enable_asan')
|
enable_asan = GetOption('enable_asan')
|
||||||
config_file = GetOption('config_file')
|
config_file = GetOption('config_file')
|
||||||
|
|
||||||
env = Environment(tools = ['default', 'compilation_db'])
|
env = Environment(tools = ['default', 'compilation_db', 'unity_build'])
|
||||||
|
env['RECIPES_FOLDERS'] = [Dir('recipes')]
|
||||||
|
env['SYSTEM_CACHE_DIR'] = os.path.join(_find_system_cache_dir(), 'spp_cache')
|
||||||
|
env['CLONE_DIR'] = os.path.join(env['SYSTEM_CACHE_DIR'], 'cloned')
|
||||||
|
|
||||||
|
print(f'Detected system cache directory: {env["SYSTEM_CACHE_DIR"]}')
|
||||||
|
|
||||||
# allow compiling to variant directories (each gets their own bin/lib/cache dirs)
|
# allow compiling to variant directories (each gets their own bin/lib/cache dirs)
|
||||||
if variant:
|
if variant:
|
||||||
variant_dir = f'cache/variant/{variant}'
|
env['BIN_DIR'] = Dir(f'#bin_{variant}').abspath
|
||||||
env['BIN_DIR'] = Dir(f'bin_{variant}').abspath
|
env['LIB_DIR'] = Dir(f'#lib_{variant}').abspath
|
||||||
env['LIB_DIR'] = Dir(f'lib_{variant}').abspath
|
env['CACHE_DIR'] = Dir(f'#cache_{variant}').abspath
|
||||||
env['UNITY_CACHE_DIR'] = Dir(f'cache/variant/{variant}/unity')
|
variant_dir = f'{env["CACHE_DIR"]}/variant'
|
||||||
env.Append(CPPDEFINES = [f'{config["PROJECT_NAME"]}_VARIANT={variant}'])
|
env.Append(CPPDEFINES = [f'{config["PREPROCESSOR_PREFIX"]}_VARIANT={variant}'])
|
||||||
else:
|
else:
|
||||||
variant_dir = None
|
variant_dir = None
|
||||||
env.CompilationDatabase()
|
comp_db = env.CompilationDatabase(target = '#compile_commands.json')
|
||||||
env['BIN_DIR'] = Dir('bin').abspath
|
Default(comp_db)
|
||||||
env['LIB_DIR'] = Dir('lib').abspath
|
env['BIN_DIR'] = Dir('#bin').abspath
|
||||||
env['UNITY_CACHE_DIR'] = Dir('cache/unity')
|
env['LIB_DIR'] = Dir('#lib').abspath
|
||||||
|
env['CACHE_DIR'] = Dir(f'#cache').abspath
|
||||||
|
env['UNITY_CACHE_DIR'] = Dir(f'{env["CACHE_DIR"]}/unity')
|
||||||
env['BUILD_TYPE'] = build_type
|
env['BUILD_TYPE'] = build_type
|
||||||
env.Append(LIBPATH = [env['LIB_DIR']]) # to allow submodules to link to each other without hassle
|
env.Append(LIBPATH = [env['LIB_DIR']]) # to allow submodules to link to each other without hassle
|
||||||
|
|
||||||
|
# create the cache dir
|
||||||
|
os.makedirs(env['CACHE_DIR'], exist_ok=True)
|
||||||
|
cache_gitignore = f'{env["CACHE_DIR"]}/.gitignore'
|
||||||
|
if not os.path.exists(cache_gitignore):
|
||||||
|
with open(cache_gitignore, 'w') as f:
|
||||||
|
f.write('*\n')
|
||||||
|
|
||||||
|
# create the clone and system cache dirs
|
||||||
|
os.makedirs(env['CLONE_DIR'], exist_ok=True)
|
||||||
|
|
||||||
# try to detect what compiler we are using
|
# try to detect what compiler we are using
|
||||||
compiler_exe = os.path.basename(env['CC'])
|
compiler_exe = os.path.basename(env['CC'])
|
||||||
if 'gcc' in compiler_exe:
|
if 'gcc' in compiler_exe:
|
||||||
@ -89,4 +186,66 @@ elif unity_mode == 'stress': # compile everything in one single file to stress t
|
|||||||
env['UNITY_MAX_SOURCES'] = 100000 # I'll hopefully never reach this
|
env['UNITY_MAX_SOURCES'] = 100000 # I'll hopefully never reach this
|
||||||
env['UNITY_MIN_FILES'] = 1
|
env['UNITY_MIN_FILES'] = 1
|
||||||
|
|
||||||
|
# setup compiler specific options
|
||||||
|
if env['COMPILER_FAMILY'] == 'gcc' or env['COMPILER_FAMILY'] == 'clang':
|
||||||
|
env.Append(CCFLAGS = ['-Wall', '-Wextra', '-Werror', '-Wstrict-aliasing', '-pedantic'])
|
||||||
|
env.Append(CXXFLAGS = ['-std=c++20'])
|
||||||
|
if build_type != 'release':
|
||||||
|
env.Append(LINKFLAGS = [f'-Wl,-rpath,{env["LIB_DIR"]}'])
|
||||||
|
|
||||||
|
env['LINKCOM'] = env['LINKCOM'].replace('$_LIBFLAGS', '-Wl,--start-group $_LIBFLAGS -Wl,--end-group')
|
||||||
|
if env['COMPILER_FAMILY'] == 'gcc':
|
||||||
|
# GCC complains about missing initializer for "<anonymous>" that doesn't exist :/
|
||||||
|
# also GCC complains about some (compiler generated) fields in coroutines not having any linkage
|
||||||
|
# also -Wdangling-reference seems to produce a lot of false positives
|
||||||
|
env.Append(CCFLAGS = ['-Wno-missing-field-initializers', '-Wno-subobject-linkage', '-Wno-dangling-reference'])
|
||||||
|
else:
|
||||||
|
env.Append(CCFLAGS = ['-Wno-gnu-anonymous-struct'])
|
||||||
|
if build_type == 'debug':
|
||||||
|
env.Append(CCFLAGS = ['-g', '-O0'], CPPDEFINES = ['_GLIBCXX_DEBUG'])
|
||||||
|
elif build_type == 'release_debug' or build_type == 'profile':
|
||||||
|
env.Append(CCFLAGS = ['-Wno-unused-variable', '-Wno-unused-parameter', '-Wno-unused-but-set-variable', '-Wno-unused-local-typedef', '-Wno-unused-local-typedefs', '-g', '-O2'], CPPDEFINES = ['SEKIEI_RELEASE', 'NDEBUG'])
|
||||||
|
if build_type == 'profile':
|
||||||
|
if env['COMPILER_FAMILY'] == 'gcc':
|
||||||
|
env.Append(CPPDEFINES = ['SEKIEI_GCC_INSTRUMENTING=1'])
|
||||||
|
env.Append(CCFLAGS = ['-finstrument-functions'])
|
||||||
|
env.Append(LINKFLAGS = ['-rdynamic'])
|
||||||
|
|
||||||
|
elif build_type == 'release':
|
||||||
|
env.Append(CCFLAGS = ['-Wno-unused-variable', '-Wno-unused-parameter', '-Wno-unused-but-set-variable', '-Wno-unused-local-typedef', '-Wno-unused-local-typedefs', '-O2'], CPPDEFINES = ['SEKIEI_RELEASE', 'NDEBUG'])
|
||||||
|
|
||||||
|
if enable_asan:
|
||||||
|
env.Append(CCFLAGS = ['-fsanitize=address', '-fno-omit-frame-pointer'])
|
||||||
|
env.Append(LINKFLAGS = ['-fsanitize=address'])
|
||||||
|
|
||||||
|
elif env['COMPILER_FAMILY'] == 'cl':
|
||||||
|
# C4201: nonstandard extension used : nameless struct/union - I use it and want to continue using it
|
||||||
|
# C4127: conditional expression is constant - some libs (CRC, format) don't compile with this enabled # TODO: fix?
|
||||||
|
env.Append(CCFLAGS = ['/W4', '/WX', '/wd4201', '/wd4127', '/std:c++20', '/permissive-', '/EHsc', '/FS', '/Zc:char8_t'])
|
||||||
|
env.Append(CPPDEFINES = ['_CRT_SECURE_NO_WARNINGS']) # I'd like to not use MSVC specific versions of functions because they are "safer" ...
|
||||||
|
if build_type == 'debug':
|
||||||
|
env.Append(CCFLAGS = ['/Od', '/Zi'], LINKFLAGS = ' /DEBUG')
|
||||||
|
elif build_type == 'release_debug' or build_type == 'profile':
|
||||||
|
env.Append(CCFLAGS = ['/O2', '/Zi'], LINKFLAGS = ' /DEBUG')
|
||||||
|
else:
|
||||||
|
env.Append(CCFLAGS = ['/O2'])
|
||||||
|
|
||||||
|
if env['COMPILER_FAMILY'] == 'gcc':
|
||||||
|
env.Append(CCFLAGS = ['-Wno-volatile'])
|
||||||
|
elif env['COMPILER_FAMILY'] == 'clang':
|
||||||
|
env.Append(CCFLAGS = ['-Wno-deprecated-volatile', '-Wno-nested-anon-types'])
|
||||||
|
|
||||||
|
env.AddMethod(_cook, 'Cook')
|
||||||
|
env.AddMethod(_parse_lib_conf, 'ParseLibConf')
|
||||||
|
env.AddMethod(_wrap_builder(env.Library, is_lib = True), 'Library')
|
||||||
|
env.AddMethod(_wrap_builder(env.StaticLibrary, is_lib = True), 'StaticLibrary')
|
||||||
|
env.AddMethod(_wrap_builder(env.SharedLibrary, is_lib = True), 'SharedLibrary')
|
||||||
|
env.AddMethod(_wrap_builder(env.Program), 'Program')
|
||||||
|
env.AddMethod(_wrap_default(env.Default), 'Default')
|
||||||
|
|
||||||
|
env.AddMethod(_wrap_builder(env.UnityProgram), 'UnityProgram')
|
||||||
|
env.AddMethod(_wrap_builder(env.UnityLibrary, is_lib = True), 'UnityLibrary')
|
||||||
|
env.AddMethod(_wrap_builder(env.UnityStaticLibrary, is_lib = True), 'UnityStaticLibrary')
|
||||||
|
env.AddMethod(_wrap_builder(env.UnitySharedLibrary, is_lib = True), 'UnitySharedLibrary')
|
||||||
|
|
||||||
Return('env')
|
Return('env')
|
||||||
|
BIN
recipes/GitBranch/__pycache__/recipe.cpython-311.pyc
Normal file
BIN
recipes/GitBranch/__pycache__/recipe.cpython-311.pyc
Normal file
Binary file not shown.
26
recipes/GitBranch/recipe.py
Normal file
26
recipes/GitBranch/recipe.py
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
|
||||||
|
from git import Repo
|
||||||
|
from git.exc import GitError
|
||||||
|
import hashlib
|
||||||
|
import os
|
||||||
|
from SCons.Script import *
|
||||||
|
|
||||||
|
def cook(env: Environment, repo_name: str, remote_url: str, git_ref: str = "main") -> dict:
|
||||||
|
repo_dir = os.path.join(env['CLONE_DIR'], 'git', repo_name, '_bare')
|
||||||
|
try:
|
||||||
|
repo = Repo(repo_dir)
|
||||||
|
origin = repo.remotes['origin']
|
||||||
|
except GitError:
|
||||||
|
print(f'Initializing git repository for SDL at {repo_dir}.')
|
||||||
|
repo = Repo.init(repo_dir, bare=True)
|
||||||
|
origin = repo.create_remote('origin', remote_url)
|
||||||
|
worktree_dir = os.path.join(env['CLONE_DIR'], 'git', repo_name, hashlib.shake_128(git_ref.encode('utf-8')).hexdigest(6)) # TODO: commit hash would be better, right?
|
||||||
|
if not os.path.exists(worktree_dir):
|
||||||
|
print(f'Checking out into {worktree_dir}.')
|
||||||
|
origin.fetch()
|
||||||
|
os.makedirs(worktree_dir)
|
||||||
|
repo.git.worktree('add', worktree_dir, git_ref)
|
||||||
|
return {
|
||||||
|
'checkout_root': worktree_dir
|
||||||
|
}
|
||||||
|
|
BIN
recipes/SDL/__pycache__/recipe.cpython-311.pyc
Normal file
BIN
recipes/SDL/__pycache__/recipe.cpython-311.pyc
Normal file
Binary file not shown.
40
recipes/SDL/recipe.py
Normal file
40
recipes/SDL/recipe.py
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
|
||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
from SCons.Script import *
|
||||||
|
|
||||||
|
def cook(env: Environment, git_ref: str = "main") -> dict:
|
||||||
|
repo = env.Cook('GitBranch', repo_name = 'SDL', remote_url = 'https://github.com/libsdl-org/SDL.git', git_ref = git_ref)
|
||||||
|
checkout_root = repo['checkout_root']
|
||||||
|
|
||||||
|
config = env['BUILD_TYPE']
|
||||||
|
build_dir = os.path.join(checkout_root, f'build_{config}')
|
||||||
|
install_dir = os.path.join(checkout_root, f'install_{config}')
|
||||||
|
lib_fname = {
|
||||||
|
'debug': 'libSDL2d.a'
|
||||||
|
}.get(env['BUILD_TYPE'], 'libSDL2.a') # TODO: who cares about windows?
|
||||||
|
is_built = os.path.exists(os.path.join(build_dir, lib_fname)) # TODO!
|
||||||
|
if not is_built:
|
||||||
|
print(f'Building SDL, config {config}')
|
||||||
|
os.makedirs(build_dir, exist_ok=True)
|
||||||
|
build_type = {
|
||||||
|
'debug': 'Debug',
|
||||||
|
'release_debug': 'RelWithDebInfo',
|
||||||
|
'release': 'Release',
|
||||||
|
'profile': 'RelWithDebInfo'
|
||||||
|
}.get(env['BUILD_TYPE'], 'RelWithDebInfo')
|
||||||
|
subprocess.run(('cmake', '-G', 'Ninja', '-B', build_dir, f'-DCMAKE_BUILD_TYPE={build_type}', '-DSDL_STATIC=ON', '-DSDL_SHARED=OFF', f'-DCMAKE_INSTALL_PREFIX={install_dir}', checkout_root), stdout=sys.stdout, stderr=sys.stderr, check=True)
|
||||||
|
subprocess.run(('cmake', '--build', build_dir), stdout=sys.stdout, stderr=sys.stderr, check=True)
|
||||||
|
subprocess.run(('cmake', '--install', build_dir), stdout=sys.stdout, stderr=sys.stderr, check=True)
|
||||||
|
|
||||||
|
|
||||||
|
lib_name = {
|
||||||
|
'debug': 'SDL2d'
|
||||||
|
}.get(env['BUILD_TYPE'], 'SDL2')
|
||||||
|
return {
|
||||||
|
'LIBPATH': [os.path.join(install_dir, 'lib')],
|
||||||
|
'CPPPATH': [os.path.join(install_dir, 'include')],
|
||||||
|
'LIBS': [lib_name, 'm']
|
||||||
|
}
|
||||||
|
|
BIN
recipes/VulkanHeaders/__pycache__/recipe.cpython-311.pyc
Normal file
BIN
recipes/VulkanHeaders/__pycache__/recipe.cpython-311.pyc
Normal file
Binary file not shown.
9
recipes/VulkanHeaders/recipe.py
Normal file
9
recipes/VulkanHeaders/recipe.py
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
|
||||||
|
import os
|
||||||
|
from SCons.Script import *
|
||||||
|
|
||||||
|
def cook(env: Environment, git_ref: str = "main") -> dict:
|
||||||
|
repo = env.Cook('GitBranch', repo_name = 'VulkanHeaders', remote_url = 'https://github.com/KhronosGroup/Vulkan-Headers.git', git_ref = git_ref)
|
||||||
|
checkout_root = repo['checkout_root']
|
||||||
|
env.Append(CPPPATH = [os.path.join(checkout_root, 'include')])
|
||||||
|
return {}
|
BIN
recipes/mijin/__pycache__/recipe.cpython-311.pyc
Normal file
BIN
recipes/mijin/__pycache__/recipe.cpython-311.pyc
Normal file
Binary file not shown.
8
recipes/mijin/recipe.py
Normal file
8
recipes/mijin/recipe.py
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
|
||||||
|
import os
|
||||||
|
from SCons.Script import *
|
||||||
|
|
||||||
|
def cook(env: Environment, git_ref: str = "master") -> dict:
|
||||||
|
repo = env.Cook('GitBranch', repo_name = 'mijin', remote_url = 'ssh://git@git.mewin.de:10022/mewin/mijin2.git', git_ref = git_ref)
|
||||||
|
checkout_root = repo['checkout_root']
|
||||||
|
return SConscript(os.path.join(checkout_root, 'LibConf'), exports = ['env'])
|
1
requirements.txt
Normal file
1
requirements.txt
Normal file
@ -0,0 +1 @@
|
|||||||
|
GitPython
|
Loading…
x
Reference in New Issue
Block a user