Added recipe for boost.
This commit is contained in:
parent
6302d61d42
commit
abc72895e6
@ -240,6 +240,7 @@ env = Environment(tools = tools, variables = vars, ENV = os.environ)
|
||||
env['RECIPES_FOLDERS'] = [Dir('recipes')]
|
||||
env['SYSTEM_CACHE_DIR'] = os.path.join(_find_system_cache_dir(), 'spp_cache')
|
||||
env['CLONE_DIR'] = os.path.join(env['SYSTEM_CACHE_DIR'], 'cloned')
|
||||
env['DOWNLOAD_DIR'] = os.path.join(env['SYSTEM_CACHE_DIR'], 'downloaded')
|
||||
env['UPDATE_REPOSITORIES'] = update_repositories
|
||||
|
||||
print(f'Detected system cache directory: {env["SYSTEM_CACHE_DIR"]}')
|
||||
@ -312,6 +313,7 @@ if env['UPDATE_REPOSITORIES']:
|
||||
f.write(str(time.time()))
|
||||
# create the clone and system cache dirs
|
||||
os.makedirs(env['CLONE_DIR'], exist_ok=True)
|
||||
os.makedirs(env['DOWNLOAD_DIR'], exist_ok=True)
|
||||
|
||||
# try to detect what compiler we are using
|
||||
compiler_exe = os.path.basename(env.subst(env['CC']))
|
||||
|
72
recipes/DownloadAndExtract/recipe.py
Normal file
72
recipes/DownloadAndExtract/recipe.py
Normal file
@ -0,0 +1,72 @@
|
||||
|
||||
from enum import Enum
|
||||
import hashlib
|
||||
import pathlib
|
||||
import tarfile
|
||||
import zipfile
|
||||
import urllib.request
|
||||
from SCons.Script import *
|
||||
|
||||
class ArchiveType(Enum):
|
||||
TAR_GZ = 0
|
||||
ZIP = 1
|
||||
|
||||
def _detect_archive_type(url: str) -> ArchiveType:
|
||||
if url.lower().endswith('.tar.gz'):
|
||||
return ArchiveType.TAR_GZ
|
||||
elif url.lower().endswith('.zip'):
|
||||
return ArchiveType.ZIP
|
||||
raise Exception('could not detect archive type from URL')
|
||||
|
||||
def _archive_type_ext(archive_type: ArchiveType) -> str:
|
||||
if archive_type == ArchiveType.TAR_GZ:
|
||||
return 'tar.gz'
|
||||
elif archive_type == ArchiveType.ZIP:
|
||||
return 'zip'
|
||||
raise Exception('invalid archive type')
|
||||
|
||||
def _download_file(url: str, path: pathlib.Path) -> None:
|
||||
if path.exists():
|
||||
return
|
||||
dl_path = path.with_suffix(f'{path.suffix}.tmp')
|
||||
if dl_path.exists():
|
||||
dl_path.unlink()
|
||||
print(f'Downloading {url} to {dl_path}...')
|
||||
urllib.request.urlretrieve(url, dl_path)
|
||||
dl_path.rename(path)
|
||||
|
||||
def _extract_file(path: pathlib.Path, output_dir: str, archive_type: ArchiveType, skip_folders: int) -> None:
|
||||
if archive_type == ArchiveType.TAR_GZ:
|
||||
file = tarfile.open(str(path))
|
||||
if skip_folders != 0:
|
||||
def skip_filer(member: tarfile.TarInfo, path: str) -> tarfile.TarInfo:
|
||||
name_parts = member.name.split('/')
|
||||
if len(name_parts) <= skip_folders:
|
||||
return None
|
||||
return member.replace(name = '/'.join(name_parts[skip_folders:]))
|
||||
file.extraction_filter = skip_filer
|
||||
file.extractall(output_dir)
|
||||
file.close()
|
||||
elif archive_type == ArchiveType.ZIP:
|
||||
file = zipfile.open(str(path))
|
||||
file.extractall(output_dir)
|
||||
file.close()
|
||||
else:
|
||||
raise Exception('invalid archive type')
|
||||
|
||||
def cook(env: Environment, repo_name: str, url: str, skip_folders: int = 0) -> dict:
|
||||
archive_type = _detect_archive_type(url)
|
||||
ext = _archive_type_ext(archive_type)
|
||||
path = pathlib.Path(env['DOWNLOAD_DIR'], f'{hashlib.shake_128(url.encode("utf-8")).hexdigest(6)}.{ext}')
|
||||
output_dir = pathlib.Path(env['CLONE_DIR'], 'download', repo_name)
|
||||
stamp_file = pathlib.Path(output_dir, '.spp_extracted')
|
||||
|
||||
if not stamp_file.exists():
|
||||
_download_file(url, path)
|
||||
_extract_file(path, output_dir, archive_type, skip_folders)
|
||||
stamp_file.touch()
|
||||
|
||||
return {
|
||||
'extracted_root': str(output_dir)
|
||||
}
|
||||
|
12
recipes/boost/recipe.py
Normal file
12
recipes/boost/recipe.py
Normal file
@ -0,0 +1,12 @@
|
||||
|
||||
import os
|
||||
from SCons.Script import *
|
||||
|
||||
def cook(env: Environment, version: str = "1.85.0") -> dict:
|
||||
# TODO: build binaries?
|
||||
url = f'https://archives.boost.io/release/{version}/source/boost_{version.replace(".", "_")}.tar.gz'
|
||||
repo = env.Cook('DownloadAndExtract', f'boost_{version}', url = url, skip_folders = 1)
|
||||
checkout_root = repo['extracted_root']
|
||||
return {
|
||||
'CPPPATH': [checkout_root]
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user