Added recipes for curl, libidn2, libpsl and libunistring.
This commit is contained in:
parent
b7cb5f7c48
commit
c461b5da39
@ -28,7 +28,11 @@ def _autotools_project(env: Environment, project_root: str, config_args: 'list[s
|
||||
env = os.environ.copy()
|
||||
env['CFLAGS'] = cflags
|
||||
|
||||
subprocess.run((os.path.join(project_root, configure_script_path), f'--prefix={install_dir}', *config_args), cwd=build_dir, env=env, stdout=sys.stdout, stderr=sys.stderr, check=True)
|
||||
config_script = os.path.join(project_root, configure_script_path)
|
||||
if not os.path.exists(config_script) and os.path.exists(f'{config_script}.ac'):
|
||||
subprocess.run(('autoreconf', '--install', '--force'), cwd=project_root)
|
||||
|
||||
subprocess.run((config_script, f'--prefix={install_dir}', *config_args), cwd=build_dir, env=env, stdout=sys.stdout, stderr=sys.stderr, check=True)
|
||||
subprocess.run(('make', f'-j{jobs}', *build_args), cwd=build_dir, stdout=sys.stdout, stderr=sys.stderr, check=True)
|
||||
subprocess.run(('make', 'install', *install_args), cwd=build_dir, stdout=sys.stdout, stderr=sys.stderr, check=True)
|
||||
pathlib.Path(install_dir, _BUILT_STAMPFILE).touch()
|
||||
|
39
recipes/curl/recipe.py
Normal file
39
recipes/curl/recipe.py
Normal file
@ -0,0 +1,39 @@
|
||||
|
||||
import re
|
||||
from SCons.Script import *
|
||||
|
||||
def _build_lib_name(env: Environment) -> str:
|
||||
if os.name == 'posix':
|
||||
return {
|
||||
'debug': 'curl-d'
|
||||
}.get(env['BUILD_TYPE'], 'curl')
|
||||
elif os.name == 'nt':
|
||||
raise Exception('TODO')
|
||||
else:
|
||||
raise Exception('curl is not supported yet on this OS')
|
||||
|
||||
def _git_cook(env: Environment, repo: dict) -> dict:
|
||||
checkout_root = repo['checkout_root']
|
||||
build_result = env.CMakeProject(checkout_root, generate_args=['-DBUILD_CURL_EXE=OFF','-DBUILD_SHARED_LIBS=OFF',
|
||||
'-DBUILD_STATIC_LIBS=ON', '-DHTTP_ONLY=ON',
|
||||
'-DCURL_USE_LIBSSH2=OFF'])
|
||||
lib_name = _build_lib_name(env)
|
||||
return {
|
||||
'CPPPATH': build_result['CPPPATH'],
|
||||
'LIBS': [env.FindLib(lib_name, paths=build_result['LIBPATH'])],
|
||||
}
|
||||
|
||||
|
||||
env.GitRecipe(
|
||||
globals = globals(),
|
||||
repo_name = 'curl',
|
||||
repo_url = 'https://github.com/curl/curl.git',
|
||||
tag_pattern = re.compile(r'^curl-([0-9]+)_([0-9]+)_([0-9]+)$'),
|
||||
tag_fn = lambda version: f'curl-{version[0]}_{version[1]}_{version[2]}',
|
||||
cook_fn = _git_cook,
|
||||
dependencies = {
|
||||
'openssl': {},
|
||||
'zlib': {},
|
||||
'psl': {}
|
||||
}
|
||||
)
|
48
recipes/idn2/recipe.py
Normal file
48
recipes/idn2/recipe.py
Normal file
@ -0,0 +1,48 @@
|
||||
|
||||
|
||||
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import requests
|
||||
from SCons.Script import *
|
||||
|
||||
_VERSIONS_URL = 'https://gitlab.com/api/v4/projects/2882658/releases'
|
||||
_VERSION_PATTERN = re.compile(r'^([0-9]+)\.([0-9]+)\.([0-9]+)$')
|
||||
|
||||
def versions(env: Environment, update: bool = False):
|
||||
versions_file = os.path.join(env['DOWNLOAD_DIR'], 'libidn2_versions.json')
|
||||
if update or not os.path.exists(versions_file):
|
||||
req = requests.get(_VERSIONS_URL)
|
||||
versions_data = json.loads(req.text)
|
||||
result = []
|
||||
for version_data in versions_data:
|
||||
match = _VERSION_PATTERN.match(version_data['name'])
|
||||
if not match:
|
||||
continue
|
||||
result.append((int(match.groups()[0]), int(match.groups()[1]), int(match.groups()[2])))
|
||||
with open(versions_file, 'w') as f:
|
||||
json.dump(result, f)
|
||||
return result
|
||||
else:
|
||||
try:
|
||||
with open(versions_file, 'r') as f:
|
||||
return [tuple(v) for v in json.load(f)]
|
||||
except:
|
||||
print('libidn2_versions.json is empty or broken, redownloading.')
|
||||
return versions(env, update=True)
|
||||
|
||||
def dependencies(env: Environment, version) -> 'dict':
|
||||
return {
|
||||
'unistring': {}
|
||||
}
|
||||
|
||||
def cook(env: Environment, version) -> dict:
|
||||
url = f'https://ftp.gnu.org/gnu/libidn/libidn2-{version[0]}.{version[1]}.{version[2]}.tar.gz'
|
||||
repo = env.DownloadAndExtract(f'libidn2_{version[0]}.{version[1]}.{version[2]}', url = url, skip_folders = 1)
|
||||
checkout_root = repo['extracted_root']
|
||||
build_result = env.AutotoolsProject(checkout_root)
|
||||
return {
|
||||
'CPPPATH': build_result['CPPPATH'],
|
||||
'LIBS': [env.FindLib('idn2', paths=build_result['LIBPATH'])]
|
||||
}
|
70
recipes/psl/recipe.py
Normal file
70
recipes/psl/recipe.py
Normal file
@ -0,0 +1,70 @@
|
||||
|
||||
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import requests
|
||||
from SCons.Script import *
|
||||
|
||||
_VERSIONS_URL = 'https://api.github.com/repos/rockdaboot/libpsl/releases'
|
||||
_VERSION_PATTERN = re.compile(r'^Release v([0-9]+)\.([0-9]+)\.([0-9]+)$')
|
||||
|
||||
def versions(env: Environment, update: bool = False):
|
||||
versions_file = os.path.join(env['DOWNLOAD_DIR'], 'libpsl_versions.json')
|
||||
if update or not os.path.exists(versions_file):
|
||||
req = requests.get(_VERSIONS_URL)
|
||||
versions_data = json.loads(req.text)
|
||||
result = []
|
||||
for version_data in versions_data:
|
||||
match = _VERSION_PATTERN.match(version_data['name'])
|
||||
if not match:
|
||||
continue
|
||||
result.append((int(match.groups()[0]), int(match.groups()[1]), int(match.groups()[2])))
|
||||
with open(versions_file, 'w') as f:
|
||||
json.dump(result, f)
|
||||
return result
|
||||
else:
|
||||
try:
|
||||
with open(versions_file, 'r') as f:
|
||||
return [tuple(v) for v in json.load(f)]
|
||||
except:
|
||||
print('libpsl_versions.json is empty or broken, redownloading.')
|
||||
return versions(env, update=True)
|
||||
|
||||
def dependencies(env: Environment, version) -> 'dict':
|
||||
return {
|
||||
'idn2': {},
|
||||
'unistring': {}
|
||||
}
|
||||
|
||||
def cook(env: Environment, version) -> dict:
|
||||
url = f'https://github.com/rockdaboot/libpsl/releases/download/{version[0]}.{version[1]}.{version[2]}/libpsl-{version[0]}.{version[1]}.{version[2]}.tar.gz'
|
||||
repo = env.DownloadAndExtract(f'libpsl_{version[0]}.{version[1]}.{version[2]}', url = url, skip_folders = 1)
|
||||
checkout_root = repo['extracted_root']
|
||||
build_result = env.AutotoolsProject(checkout_root)
|
||||
return {
|
||||
'CPPPATH': build_result['CPPPATH'],
|
||||
'LIBS': [env.FindLib('psl', paths=build_result['LIBPATH'])]
|
||||
}
|
||||
|
||||
#def _git_cook(env: Environment, repo: dict) -> dict:
|
||||
# checkout_root = repo['checkout_root']
|
||||
# subprocess.run((os.path.join(checkout_root, 'autogen.sh'),), cwd=checkout_root)
|
||||
# build_result = env.AutotoolsProject(checkout_root)
|
||||
# return {
|
||||
# 'CPPPATH': build_result['CPPPATH'],
|
||||
# 'LIBS': [env.FindLib('psl', paths=build_result['LIBPATH'])]
|
||||
# }
|
||||
#
|
||||
#env.GitRecipe(
|
||||
# globals = globals(),
|
||||
# repo_name = 'psl',
|
||||
# repo_url = 'https://github.com/rockdaboot/libpsl.git',
|
||||
# tag_pattern = re.compile(r'^libpsl-([0-9]+)\.([0-9]+)\.([0-9]+)$'),
|
||||
# tag_fn = lambda version: f'libpsl-{version[0]}.{version[1]}.{version[2]}',
|
||||
# cook_fn = _git_cook,
|
||||
# dependencies = {
|
||||
# 'idn2': {},
|
||||
# 'unistring': {}
|
||||
# }
|
||||
#)
|
42
recipes/unistring/recipe.py
Normal file
42
recipes/unistring/recipe.py
Normal file
@ -0,0 +1,42 @@
|
||||
|
||||
|
||||
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import requests
|
||||
from SCons.Script import *
|
||||
|
||||
_VERSIONS_URL = 'https://ftp.gnu.org/gnu/libunistring/?F=0'
|
||||
_VERSION_PATTERN = re.compile(r'href="libunistring-([0-9]+)\.([0-9]+)\.([0-9]+)\.tar\.gz"')
|
||||
|
||||
def versions(env: Environment, update: bool = False):
|
||||
versions_file = os.path.join(env['DOWNLOAD_DIR'], 'libunistring_versions.json')
|
||||
if update or not os.path.exists(versions_file):
|
||||
req = requests.get(_VERSIONS_URL)
|
||||
result = []
|
||||
for match in _VERSION_PATTERN.finditer(req.text):
|
||||
result.append((int(match.groups()[0]), int(match.groups()[1]), int(match.groups()[2])))
|
||||
with open(versions_file, 'w') as f:
|
||||
json.dump(result, f)
|
||||
return result
|
||||
else:
|
||||
try:
|
||||
with open(versions_file, 'r') as f:
|
||||
return [tuple(v) for v in json.load(f)]
|
||||
except:
|
||||
print('libunistring_versions.json is empty or broken, redownloading.')
|
||||
return versions(env, update=True)
|
||||
|
||||
def dependencies(env: Environment, version) -> 'dict':
|
||||
return {}
|
||||
|
||||
def cook(env: Environment, version) -> dict:
|
||||
url = f'https://ftp.gnu.org/gnu/libunistring/libunistring-{version[0]}.{version[1]}.{version[2]}.tar.gz'
|
||||
repo = env.DownloadAndExtract(f'libunistring_{version[0]}.{version[1]}.{version[2]}', url = url, skip_folders = 1)
|
||||
checkout_root = repo['extracted_root']
|
||||
build_result = env.AutotoolsProject(checkout_root)
|
||||
return {
|
||||
'CPPPATH': build_result['CPPPATH'],
|
||||
'LIBS': [env.FindLib('unistring', paths=build_result['LIBPATH'])]
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user