From a78992dd81b4bd1673e4815ff26acd694ff77f68 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Thu, 14 Jul 2022 13:12:25 -0700 Subject: interpreter: move handling of module stability to interpreter Thanks to `ModuleInfo`, all modules are just named `foo.py` instead of `unstable_foo.py`, which simplifies the import method a bit. This also allows for accurate FeatureNew/FeatureDeprecated use, as we know when the module was added and if/when it was stabilized. --- mesonbuild/interpreter/interpreter.py | 81 +++-- mesonbuild/modules/cuda.py | 360 +++++++++++++++++++++ mesonbuild/modules/external_project.py | 306 ++++++++++++++++++ mesonbuild/modules/icestorm.py | 131 ++++++++ mesonbuild/modules/rust.py | 235 ++++++++++++++ mesonbuild/modules/simd.py | 87 +++++ mesonbuild/modules/unstable_cuda.py | 360 --------------------- mesonbuild/modules/unstable_external_project.py | 306 ------------------ mesonbuild/modules/unstable_icestorm.py | 131 -------- mesonbuild/modules/unstable_rust.py | 235 -------------- mesonbuild/modules/unstable_simd.py | 87 ----- mesonbuild/modules/unstable_wayland.py | 154 --------- mesonbuild/modules/wayland.py | 154 +++++++++ run_mypy.py | 8 +- test cases/common/253 module warnings/meson.build | 4 + test cases/common/253 module warnings/test.json | 3 + test cases/keyval/1 basic/meson.build | 2 +- test cases/keyval/1 basic/test.json | 2 +- .../warning/7 module without unstable/meson.build | 3 + .../warning/7 module without unstable/test.json | 7 + unittests/allplatformstests.py | 2 +- 21 files changed, 1347 insertions(+), 1311 deletions(-) create mode 100644 mesonbuild/modules/cuda.py create mode 100644 mesonbuild/modules/external_project.py create mode 100644 mesonbuild/modules/icestorm.py create mode 100644 mesonbuild/modules/rust.py create mode 100644 mesonbuild/modules/simd.py delete mode 100644 mesonbuild/modules/unstable_cuda.py delete mode 100644 mesonbuild/modules/unstable_external_project.py delete mode 100644 mesonbuild/modules/unstable_icestorm.py delete mode 100644 mesonbuild/modules/unstable_rust.py delete mode 100644 mesonbuild/modules/unstable_simd.py delete mode 100644 mesonbuild/modules/unstable_wayland.py create mode 100644 mesonbuild/modules/wayland.py create mode 100644 test cases/warning/7 module without unstable/meson.build create mode 100644 test cases/warning/7 module without unstable/test.json diff --git a/mesonbuild/interpreter/interpreter.py b/mesonbuild/interpreter/interpreter.py index fef8f4b4d..9cf88d7df 100644 --- a/mesonbuild/interpreter/interpreter.py +++ b/mesonbuild/interpreter/interpreter.py @@ -599,26 +599,6 @@ class Interpreter(InterpreterBase, HoldableObject): dep = df.lookup(kwargs, force_fallback=True) self.build.stdlibs[for_machine][l] = dep - def _import_module(self, modname: str, required: bool, node: mparser.BaseNode) -> NewExtensionModule: - if modname in self.modules: - return self.modules[modname] - try: - module = importlib.import_module('mesonbuild.modules.' + modname) - except ImportError: - if required: - raise InvalidArguments(f'Module "{modname}" does not exist') - ext_module = NotFoundExtensionModule(modname) - else: - ext_module = module.initialize(self) - assert isinstance(ext_module, (ExtensionModule, NewExtensionModule)) - self.build.modules.append(modname) - if ext_module.INFO.added: - FeatureNew.single_use(f'module {ext_module.INFO.name}', ext_module.INFO.added, self.subproject, location=node) - if ext_module.INFO.deprecated: - FeatureDeprecated.single_use(f'module {ext_module.INFO.name}', ext_module.INFO.deprecated, self.subproject, location=node) - self.modules[modname] = ext_module - return ext_module - @typed_pos_args('import', str) @typed_kwargs( 'import', @@ -633,17 +613,56 @@ class Interpreter(InterpreterBase, HoldableObject): if disabled: return NotFoundExtensionModule(modname) - if modname.startswith('unstable-'): - plainname = modname.split('-', 1)[1] - try: - # check if stable module exists - mod = self._import_module(plainname, required, node) - mlog.warning(f'Module {modname} is now stable, please use the {plainname} module instead.') - return mod - except InvalidArguments: - mlog.warning(f'Module {modname} has no backwards or forwards compatibility and might not exist in future releases.', location=node) - modname = 'unstable_' + plainname - return self._import_module(modname, required, node) + expect_unstable = False + # Some tests use "unstable_" instead of "unstable-", and that happens to work because + # of implementation details + if modname.startswith(('unstable-', 'unstable_')): + real_modname = modname[len('unstable') + 1:] # + 1 to handle the - or _ + expect_unstable = True + else: + real_modname = modname + + if real_modname in self.modules: + return self.modules[real_modname] + try: + module = importlib.import_module(f'mesonbuild.modules.{real_modname}') + except ImportError: + if required: + raise InvalidArguments(f'Module "{modname}" does not exist') + ext_module = NotFoundExtensionModule(real_modname) + else: + ext_module = module.initialize(self) + assert isinstance(ext_module, (ExtensionModule, NewExtensionModule)) + self.build.modules.append(real_modname) + if ext_module.INFO.added: + FeatureNew.single_use(f'module {ext_module.INFO.name}', ext_module.INFO.added, self.subproject, location=node) + if ext_module.INFO.deprecated: + FeatureDeprecated.single_use(f'module {ext_module.INFO.name}', ext_module.INFO.deprecated, self.subproject, location=node) + if expect_unstable and not ext_module.INFO.unstable and ext_module.INFO.stabilized is None: + raise InvalidArguments(f'Module {ext_module.INFO.name} has never been unstable, remove "unstable-" prefix.') + if ext_module.INFO.stabilized is not None: + if expect_unstable: + FeatureDeprecated.single_use( + f'module {ext_module.INFO.name} has been stabilized', + ext_module.INFO.stabilized, self.subproject, + 'drop "unstable-" prefix from the module name', + location=node) + else: + FeatureNew.single_use( + f'module {ext_module.INFO.name} as stable module', + ext_module.INFO.stabilized, self.subproject, + f'Consider either adding "unstable-" to the module name, or updating the meson required version to ">= {ext_module.INFO.stabilized}"', + location=node) + elif ext_module.INFO.unstable: + if not expect_unstable: + if required: + raise InvalidArguments(f'Module "{ext_module.INFO.name}" has not been stabilized, and must be imported as unstable-{ext_module.INFO.name}') + ext_module = NotFoundExtensionModule(real_modname) + else: + mlog.warning(f'Module {ext_module.INFO.name} has no backwards or forwards compatibility and might not exist in future releases.', location=node) + + self.modules[real_modname] = ext_module + return ext_module @typed_pos_args('files', varargs=str) @noKwargs diff --git a/mesonbuild/modules/cuda.py b/mesonbuild/modules/cuda.py new file mode 100644 index 000000000..b31459f5b --- /dev/null +++ b/mesonbuild/modules/cuda.py @@ -0,0 +1,360 @@ +# Copyright 2017 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +import typing as T +import re + +from ..mesonlib import version_compare +from ..compilers import CudaCompiler + +from . import NewExtensionModule, ModuleInfo + +from ..interpreterbase import ( + flatten, permittedKwargs, noKwargs, + InvalidArguments +) + +if T.TYPE_CHECKING: + from . import ModuleState + from ..compilers import Compiler + +class CudaModule(NewExtensionModule): + + INFO = ModuleInfo('CUDA', '0.50.0', unstable=True) + + def __init__(self, *args, **kwargs): + super().__init__() + self.methods.update({ + "min_driver_version": self.min_driver_version, + "nvcc_arch_flags": self.nvcc_arch_flags, + "nvcc_arch_readable": self.nvcc_arch_readable, + }) + + @noKwargs + def min_driver_version(self, state: 'ModuleState', + args: T.Tuple[str], + kwargs: T.Dict[str, T.Any]) -> str: + argerror = InvalidArguments('min_driver_version must have exactly one positional argument: ' + + 'a CUDA Toolkit version string. Beware that, since CUDA 11.0, ' + + 'the CUDA Toolkit\'s components (including NVCC) are versioned ' + + 'independently from each other (and the CUDA Toolkit as a whole).') + + if len(args) != 1 or not isinstance(args[0], str): + raise argerror + + cuda_version = args[0] + driver_version_table = [ + {'cuda_version': '>=11.7.0', 'windows': '516.01', 'linux': '515.43.04'}, + {'cuda_version': '>=11.6.1', 'windows': '511.65', 'linux': '510.47.03'}, + {'cuda_version': '>=11.6.0', 'windows': '511.23', 'linux': '510.39.01'}, + {'cuda_version': '>=11.5.1', 'windows': '496.13', 'linux': '495.29.05'}, + {'cuda_version': '>=11.5.0', 'windows': '496.04', 'linux': '495.29.05'}, + {'cuda_version': '>=11.4.3', 'windows': '472.50', 'linux': '470.82.01'}, + {'cuda_version': '>=11.4.1', 'windows': '471.41', 'linux': '470.57.02'}, + {'cuda_version': '>=11.4.0', 'windows': '471.11', 'linux': '470.42.01'}, + {'cuda_version': '>=11.3.0', 'windows': '465.89', 'linux': '465.19.01'}, + {'cuda_version': '>=11.2.2', 'windows': '461.33', 'linux': '460.32.03'}, + {'cuda_version': '>=11.2.1', 'windows': '461.09', 'linux': '460.32.03'}, + {'cuda_version': '>=11.2.0', 'windows': '460.82', 'linux': '460.27.03'}, + {'cuda_version': '>=11.1.1', 'windows': '456.81', 'linux': '455.32'}, + {'cuda_version': '>=11.1.0', 'windows': '456.38', 'linux': '455.23'}, + {'cuda_version': '>=11.0.3', 'windows': '451.82', 'linux': '450.51.06'}, + {'cuda_version': '>=11.0.2', 'windows': '451.48', 'linux': '450.51.05'}, + {'cuda_version': '>=11.0.1', 'windows': '451.22', 'linux': '450.36.06'}, + {'cuda_version': '>=10.2.89', 'windows': '441.22', 'linux': '440.33'}, + {'cuda_version': '>=10.1.105', 'windows': '418.96', 'linux': '418.39'}, + {'cuda_version': '>=10.0.130', 'windows': '411.31', 'linux': '410.48'}, + {'cuda_version': '>=9.2.148', 'windows': '398.26', 'linux': '396.37'}, + {'cuda_version': '>=9.2.88', 'windows': '397.44', 'linux': '396.26'}, + {'cuda_version': '>=9.1.85', 'windows': '391.29', 'linux': '390.46'}, + {'cuda_version': '>=9.0.76', 'windows': '385.54', 'linux': '384.81'}, + {'cuda_version': '>=8.0.61', 'windows': '376.51', 'linux': '375.26'}, + {'cuda_version': '>=8.0.44', 'windows': '369.30', 'linux': '367.48'}, + {'cuda_version': '>=7.5.16', 'windows': '353.66', 'linux': '352.31'}, + {'cuda_version': '>=7.0.28', 'windows': '347.62', 'linux': '346.46'}, + ] + + driver_version = 'unknown' + for d in driver_version_table: + if version_compare(cuda_version, d['cuda_version']): + driver_version = d.get(state.host_machine.system, d['linux']) + break + + return driver_version + + @permittedKwargs(['detected']) + def nvcc_arch_flags(self, state: 'ModuleState', + args: T.Tuple[T.Union[Compiler, CudaCompiler, str]], + kwargs: T.Dict[str, T.Any]) -> T.List[str]: + nvcc_arch_args = self._validate_nvcc_arch_args(args, kwargs) + ret = self._nvcc_arch_flags(*nvcc_arch_args)[0] + return ret + + @permittedKwargs(['detected']) + def nvcc_arch_readable(self, state: 'ModuleState', + args: T.Tuple[T.Union[Compiler, CudaCompiler, str]], + kwargs: T.Dict[str, T.Any]) -> T.List[str]: + nvcc_arch_args = self._validate_nvcc_arch_args(args, kwargs) + ret = self._nvcc_arch_flags(*nvcc_arch_args)[1] + return ret + + @staticmethod + def _break_arch_string(s): + s = re.sub('[ \t\r\n,;]+', ';', s) + s = s.strip(';').split(';') + return s + + @staticmethod + def _detected_cc_from_compiler(c): + if isinstance(c, CudaCompiler): + return c.detected_cc + return '' + + @staticmethod + def _version_from_compiler(c): + if isinstance(c, CudaCompiler): + return c.version + if isinstance(c, str): + return c + return 'unknown' + + def _validate_nvcc_arch_args(self, args, kwargs): + argerror = InvalidArguments('The first argument must be an NVCC compiler object, or its version string!') + + if len(args) < 1: + raise argerror + else: + compiler = args[0] + cuda_version = self._version_from_compiler(compiler) + if cuda_version == 'unknown': + raise argerror + + arch_list = [] if len(args) <= 1 else flatten(args[1:]) + arch_list = [self._break_arch_string(a) for a in arch_list] + arch_list = flatten(arch_list) + if len(arch_list) > 1 and not set(arch_list).isdisjoint({'All', 'Common', 'Auto'}): + raise InvalidArguments('''The special architectures 'All', 'Common' and 'Auto' must appear alone, as a positional argument!''') + arch_list = arch_list[0] if len(arch_list) == 1 else arch_list + + detected = kwargs.get('detected', self._detected_cc_from_compiler(compiler)) + detected = flatten([detected]) + detected = [self._break_arch_string(a) for a in detected] + detected = flatten(detected) + if not set(detected).isdisjoint({'All', 'Common', 'Auto'}): + raise InvalidArguments('''The special architectures 'All', 'Common' and 'Auto' must appear alone, as a positional argument!''') + + return cuda_version, arch_list, detected + + def _filter_cuda_arch_list(self, cuda_arch_list, lo=None, hi=None, saturate=None): + """ + Filter CUDA arch list (no codenames) for >= low and < hi architecture + bounds, and deduplicate. + If saturate is provided, architectures >= hi are replaced with saturate. + """ + + filtered_cuda_arch_list = [] + for arch in cuda_arch_list: + if arch: + if lo and version_compare(arch, '<' + lo): + continue + if hi and version_compare(arch, '>=' + hi): + if not saturate: + continue + arch = saturate + if arch not in filtered_cuda_arch_list: + filtered_cuda_arch_list.append(arch) + return filtered_cuda_arch_list + + def _nvcc_arch_flags(self, cuda_version, cuda_arch_list='Auto', detected=''): + """ + Using the CUDA Toolkit version and the target architectures, compute + the NVCC architecture flags. + """ + + # Replicates much of the logic of + # https://github.com/Kitware/CMake/blob/master/Modules/FindCUDA/select_compute_arch.cmake + # except that a bug with cuda_arch_list="All" is worked around by + # tracking both lower and upper limits on GPU architectures. + + cuda_known_gpu_architectures = ['Fermi', 'Kepler', 'Maxwell'] # noqa: E221 + cuda_common_gpu_architectures = ['3.0', '3.5', '5.0'] # noqa: E221 + cuda_hi_limit_gpu_architecture = None # noqa: E221 + cuda_lo_limit_gpu_architecture = '2.0' # noqa: E221 + cuda_all_gpu_architectures = ['3.0', '3.2', '3.5', '5.0'] # noqa: E221 + + if version_compare(cuda_version, '<7.0'): + cuda_hi_limit_gpu_architecture = '5.2' + + if version_compare(cuda_version, '>=7.0'): + cuda_known_gpu_architectures += ['Kepler+Tegra', 'Kepler+Tesla', 'Maxwell+Tegra'] # noqa: E221 + cuda_common_gpu_architectures += ['5.2'] # noqa: E221 + + if version_compare(cuda_version, '<8.0'): + cuda_common_gpu_architectures += ['5.2+PTX'] # noqa: E221 + cuda_hi_limit_gpu_architecture = '6.0' # noqa: E221 + + if version_compare(cuda_version, '>=8.0'): + cuda_known_gpu_architectures += ['Pascal', 'Pascal+Tegra'] # noqa: E221 + cuda_common_gpu_architectures += ['6.0', '6.1'] # noqa: E221 + cuda_all_gpu_architectures += ['6.0', '6.1', '6.2'] # noqa: E221 + + if version_compare(cuda_version, '<9.0'): + cuda_common_gpu_architectures += ['6.1+PTX'] # noqa: E221 + cuda_hi_limit_gpu_architecture = '7.0' # noqa: E221 + + if version_compare(cuda_version, '>=9.0'): + cuda_known_gpu_architectures += ['Volta', 'Xavier'] # noqa: E221 + cuda_common_gpu_architectures += ['7.0'] # noqa: E221 + cuda_all_gpu_architectures += ['7.0', '7.2'] # noqa: E221 + # https://docs.nvidia.com/cuda/archive/9.0/cuda-toolkit-release-notes/index.html#unsupported-features + cuda_lo_limit_gpu_architecture = '3.0' # noqa: E221 + + if version_compare(cuda_version, '<10.0'): + cuda_common_gpu_architectures += ['7.2+PTX'] # noqa: E221 + cuda_hi_limit_gpu_architecture = '8.0' # noqa: E221 + + if version_compare(cuda_version, '>=10.0'): + cuda_known_gpu_architectures += ['Turing'] # noqa: E221 + cuda_common_gpu_architectures += ['7.5'] # noqa: E221 + cuda_all_gpu_architectures += ['7.5'] # noqa: E221 + + if version_compare(cuda_version, '<11.0'): + cuda_common_gpu_architectures += ['7.5+PTX'] # noqa: E221 + cuda_hi_limit_gpu_architecture = '8.0' # noqa: E221 + + if version_compare(cuda_version, '>=11.0'): + cuda_known_gpu_architectures += ['Ampere'] # noqa: E221 + cuda_common_gpu_architectures += ['8.0'] # noqa: E221 + cuda_all_gpu_architectures += ['8.0'] # noqa: E221 + # https://docs.nvidia.com/cuda/archive/11.0/cuda-toolkit-release-notes/index.html#deprecated-features + cuda_lo_limit_gpu_architecture = '3.5' # noqa: E221 + + if version_compare(cuda_version, '<11.1'): + cuda_common_gpu_architectures += ['8.0+PTX'] # noqa: E221 + cuda_hi_limit_gpu_architecture = '8.6' # noqa: E221 + + if version_compare(cuda_version, '>=11.1'): + cuda_common_gpu_architectures += ['8.6', '8.6+PTX'] # noqa: E221 + cuda_all_gpu_architectures += ['8.6'] # noqa: E221 + + if version_compare(cuda_version, '<12.0'): + cuda_hi_limit_gpu_architecture = '9.0' # noqa: E221 + + if not cuda_arch_list: + cuda_arch_list = 'Auto' + + if cuda_arch_list == 'All': # noqa: E271 + cuda_arch_list = cuda_known_gpu_architectures + elif cuda_arch_list == 'Common': # noqa: E271 + cuda_arch_list = cuda_common_gpu_architectures + elif cuda_arch_list == 'Auto': # noqa: E271 + if detected: + if isinstance(detected, list): + cuda_arch_list = detected + else: + cuda_arch_list = self._break_arch_string(detected) + cuda_arch_list = self._filter_cuda_arch_list(cuda_arch_list, + cuda_lo_limit_gpu_architecture, + cuda_hi_limit_gpu_architecture, + cuda_common_gpu_architectures[-1]) + else: + cuda_arch_list = cuda_common_gpu_architectures + elif isinstance(cuda_arch_list, str): + cuda_arch_list = self._break_arch_string(cuda_arch_list) + + cuda_arch_list = sorted(x for x in set(cuda_arch_list) if x) + + cuda_arch_bin = [] + cuda_arch_ptx = [] + for arch_name in cuda_arch_list: + arch_bin = [] + arch_ptx = [] + add_ptx = arch_name.endswith('+PTX') + if add_ptx: + arch_name = arch_name[:-len('+PTX')] + + if re.fullmatch('[0-9]+\\.[0-9](\\([0-9]+\\.[0-9]\\))?', arch_name): + arch_bin, arch_ptx = [arch_name], [arch_name] + else: + arch_bin, arch_ptx = { + 'Fermi': (['2.0', '2.1(2.0)'], []), + 'Kepler+Tegra': (['3.2'], []), + 'Kepler+Tesla': (['3.7'], []), + 'Kepler': (['3.0', '3.5'], ['3.5']), + 'Maxwell+Tegra': (['5.3'], []), + 'Maxwell': (['5.0', '5.2'], ['5.2']), + 'Pascal': (['6.0', '6.1'], ['6.1']), + 'Pascal+Tegra': (['6.2'], []), + 'Volta': (['7.0'], ['7.0']), + 'Xavier': (['7.2'], []), + 'Turing': (['7.5'], ['7.5']), + 'Ampere': (['8.0'], ['8.0']), + }.get(arch_name, (None, None)) + + if arch_bin is None: + raise InvalidArguments(f'Unknown CUDA Architecture Name {arch_name}!') + + cuda_arch_bin += arch_bin + + if add_ptx: + if not arch_ptx: + arch_ptx = arch_bin + cuda_arch_ptx += arch_ptx + + cuda_arch_bin = sorted(list(set(cuda_arch_bin))) + cuda_arch_ptx = sorted(list(set(cuda_arch_ptx))) + + nvcc_flags = [] + nvcc_archs_readable = [] + + for arch in cuda_arch_bin: + arch, codev = re.fullmatch( + '([0-9]+\\.[0-9])(?:\\(([0-9]+\\.[0-9])\\))?', arch).groups() + + if version_compare(arch, '<' + cuda_lo_limit_gpu_architecture): + continue + if version_compare(arch, '>=' + cuda_hi_limit_gpu_architecture): + continue + + if codev: + arch = arch.replace('.', '') + codev = codev.replace('.', '') + nvcc_flags += ['-gencode', 'arch=compute_' + codev + ',code=sm_' + arch] + nvcc_archs_readable += ['sm_' + arch] + else: + arch = arch.replace('.', '') + nvcc_flags += ['-gencode', 'arch=compute_' + arch + ',code=sm_' + arch] + nvcc_archs_readable += ['sm_' + arch] + + for arch in cuda_arch_ptx: + arch, codev = re.fullmatch( + '([0-9]+\\.[0-9])(?:\\(([0-9]+\\.[0-9])\\))?', arch).groups() + + if codev: + arch = codev + + if version_compare(arch, '<' + cuda_lo_limit_gpu_architecture): + continue + if version_compare(arch, '>=' + cuda_hi_limit_gpu_architecture): + continue + + arch = arch.replace('.', '') + nvcc_flags += ['-gencode', 'arch=compute_' + arch + ',code=compute_' + arch] + nvcc_archs_readable += ['compute_' + arch] + + return nvcc_flags, nvcc_archs_readable + +def initialize(*args, **kwargs): + return CudaModule(*args, **kwargs) diff --git a/mesonbuild/modules/external_project.py b/mesonbuild/modules/external_project.py new file mode 100644 index 000000000..1fd49112b --- /dev/null +++ b/mesonbuild/modules/external_project.py @@ -0,0 +1,306 @@ +# Copyright 2020 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from pathlib import Path +import os +import shlex +import subprocess +import typing as T + +from . import ExtensionModule, ModuleReturnValue, NewExtensionModule, ModuleInfo +from .. import mlog, build +from ..compilers.compilers import CFLAGS_MAPPING +from ..envconfig import ENV_VAR_PROG_MAP +from ..dependencies import InternalDependency, PkgConfigDependency +from ..interpreterbase import FeatureNew +from ..interpreter.type_checking import ENV_KW, DEPENDS_KW +from ..interpreterbase.decorators import ContainerTypeInfo, KwargInfo, typed_kwargs, typed_pos_args +from ..mesonlib import (EnvironmentException, MesonException, Popen_safe, MachineChoice, + get_variable_regex, do_replacement, join_args, OptionKey) + +if T.TYPE_CHECKING: + from typing_extensions import TypedDict + + from . import ModuleState + from ..interpreter import Interpreter + from ..interpreterbase import TYPE_var + from ..build import BuildTarget, CustomTarget + + class Dependency(TypedDict): + + subdir: str + + class AddProject(TypedDict): + + configure_options: T.List[str] + cross_configure_options: T.List[str] + verbose: bool + env: build.EnvironmentVariables + depends: T.List[T.Union[BuildTarget, CustomTarget]] + + +class ExternalProject(NewExtensionModule): + def __init__(self, + state: 'ModuleState', + configure_command: str, + configure_options: T.List[str], + cross_configure_options: T.List[str], + env: build.EnvironmentVariables, + verbose: bool, + extra_depends: T.List[T.Union['BuildTarget', 'CustomTarget']]): + super().__init__() + self.methods.update({'dependency': self.dependency_method, + }) + + self.subdir = Path(state.subdir) + self.project_version = state.project_version + self.subproject = state.subproject + self.env = state.environment + self.build_machine = state.build_machine + self.host_machine = state.host_machine + self.configure_command = configure_command + self.configure_options = configure_options + self.cross_configure_options = cross_configure_options + self.verbose = verbose + self.user_env = env + + self.src_dir = Path(self.env.get_source_dir(), self.subdir) + self.build_dir = Path(self.env.get_build_dir(), self.subdir, 'build') + self.install_dir = Path(self.env.get_build_dir(), self.subdir, 'dist') + _p = self.env.coredata.get_option(OptionKey('prefix')) + assert isinstance(_p, str), 'for mypy' + self.prefix = Path(_p) + _l = self.env.coredata.get_option(OptionKey('libdir')) + assert isinstance(_l, str), 'for mypy' + self.libdir = Path(_l) + _i = self.env.coredata.get_option(OptionKey('includedir')) + assert isinstance(_i, str), 'for mypy' + self.includedir = Path(_i) + self.name = self.src_dir.name + + # On Windows if the prefix is "c:/foo" and DESTDIR is "c:/bar", `make` + # will install files into "c:/bar/c:/foo" which is an invalid path. + # Work around that issue by removing the drive from prefix. + if self.prefix.drive: + self.prefix = self.prefix.relative_to(self.prefix.drive) + + # self.prefix is an absolute path, so we cannot append it to another path. + self.rel_prefix = self.prefix.relative_to(self.prefix.root) + + self._configure(state) + + self.targets = self._create_targets(extra_depends) + + def _configure(self, state: 'ModuleState') -> None: + if self.configure_command == 'waf': + FeatureNew('Waf external project', '0.60.0').use(self.subproject, state.current_node) + waf = state.find_program('waf') + configure_cmd = waf.get_command() + configure_cmd += ['configure', '-o', str(self.build_dir)] + workdir = self.src_dir + self.make = waf.get_command() + ['build'] + else: + # Assume it's the name of a script in source dir, like 'configure', + # 'autogen.sh', etc). + configure_path = Path(self.src_dir, self.configure_command) + configure_prog = state.find_program(configure_path.as_posix()) + configure_cmd = configure_prog.get_command() + workdir = self.build_dir + self.make = state.find_program('make').get_command() + + d = [('PREFIX', '--prefix=@PREFIX@', self.prefix.as_posix()), + ('LIBDIR', '--libdir=@PREFIX@/@LIBDIR@', self.libdir.as_posix()), + ('INCLUDEDIR', None, self.includedir.as_posix()), + ] + self._validate_configure_options(d, state) + + configure_cmd += self._format_options(self.configure_options, d) + + if self.env.is_cross_build(): + host = '{}-{}-{}'.format(self.host_machine.cpu_family, + self.build_machine.system, + self.host_machine.system) + d = [('HOST', None, host)] + configure_cmd += self._format_options(self.cross_configure_options, d) + + # Set common env variables like CFLAGS, CC, etc. + link_exelist: T.List[str] = [] + link_args: T.List[str] = [] + self.run_env = os.environ.copy() + for lang, compiler in self.env.coredata.compilers[MachineChoice.HOST].items(): + if any(lang not in i for i in (ENV_VAR_PROG_MAP, CFLAGS_MAPPING)): + continue + cargs = self.env.coredata.get_external_args(MachineChoice.HOST, lang) + assert isinstance(cargs, list), 'for mypy' + self.run_env[ENV_VAR_PROG_MAP[lang]] = self._quote_and_join(compiler.get_exelist()) + self.run_env[CFLAGS_MAPPING[lang]] = self._quote_and_join(cargs) + if not link_exelist: + link_exelist = compiler.get_linker_exelist() + _l = self.env.coredata.get_external_link_args(MachineChoice.HOST, lang) + assert isinstance(_l, list), 'for mypy' + link_args = _l + if link_exelist: + # FIXME: Do not pass linker because Meson uses CC as linker wrapper, + # but autotools often expects the real linker (e.h. GNU ld). + # self.run_env['LD'] = self._quote_and_join(link_exelist) + pass + self.run_env['LDFLAGS'] = self._quote_and_join(link_args) + + self.run_env = self.user_env.get_env(self.run_env) + self.run_env = PkgConfigDependency.setup_env(self.run_env, self.env, MachineChoice.HOST, + uninstalled=True) + + self.build_dir.mkdir(parents=True, exist_ok=True) + self._run('configure', configure_cmd, workdir) + + def _quote_and_join(self, array: T.List[str]) -> str: + return ' '.join([shlex.quote(i) for i in array]) + + def _validate_configure_options(self, variables: T.List[T.Tuple[str, str, str]], state: 'ModuleState') -> None: + # Ensure the user at least try to pass basic info to the build system, + # like the prefix, libdir, etc. + for key, default, val in variables: + if default is None: + continue + key_format = f'@{key}@' + for option in self.configure_options: + if key_format in option: + break + else: + FeatureNew('Default configure_option', '0.57.0').use(self.subproject, state.current_node) + self.configure_options.append(default) + + def _format_options(self, options: T.List[str], variables: T.List[T.Tuple[str, str, str]]) -> T.List[str]: + out: T.List[str] = [] + missing = set() + regex = get_variable_regex('meson') + confdata: T.Dict[str, T.Tuple[str, T.Optional[str]]] = {k: (v, None) for k, _, v in variables} + for o in options: + arg, missing_vars = do_replacement(regex, o, 'meson', confdata) + missing.update(missing_vars) + out.append(arg) + if missing: + var_list = ", ".join(map(repr, sorted(missing))) + raise EnvironmentException( + f"Variables {var_list} in configure options are missing.") + return out + + def _run(self, step: str, command: T.List[str], workdir: Path) -> None: + mlog.log(f'External project {self.name}:', mlog.bold(step)) + m = 'Running command ' + str(command) + ' in directory ' + str(workdir) + '\n' + log_filename = Path(mlog.log_dir, f'{self.name}-{step}.log') + output = None + if not self.verbose: + output = open(log_filename, 'w', encoding='utf-8') + output.write(m + '\n') + output.flush() + else: + mlog.log(m) + p, *_ = Popen_safe(command, cwd=workdir, env=self.run_env, + stderr=subprocess.STDOUT, + stdout=output) + if p.returncode != 0: + m = f'{step} step returned error code {p.returncode}.' + if not self.verbose: + m += '\nSee logs: ' + str(log_filename) + raise MesonException(m) + + def _create_targets(self, extra_depends: T.List[T.Union['BuildTarget', 'CustomTarget']]) -> T.List['TYPE_var']: + cmd = self.env.get_build_command() + cmd += ['--internal', 'externalproject', + '--name', self.name, + '--srcdir', self.src_dir.as_posix(), + '--builddir', self.build_dir.as_posix(), + '--installdir', self.install_dir.as_posix(), + '--logdir', mlog.log_dir, + '--make', join_args(self.make), + ] + if self.verbose: + cmd.append('--verbose') + + self.target = build.CustomTarget( + self.name, + self.subdir.as_posix(), + self.subproject, + self.env, + cmd + ['@OUTPUT@', '@DEPFILE@'], + [], + [f'{self.name}.stamp'], + depfile=f'{self.name}.d', + console=True, + extra_depends=extra_depends, + ) + + idir = build.InstallDir(self.subdir.as_posix(), + Path('dist', self.rel_prefix).as_posix(), + install_dir='.', + install_mode=None, + exclude=None, + strip_directory=True, + from_source_dir=False, + subproject=self.subproject) + + return [self.target, idir] + + @typed_pos_args('external_project.dependency', str) + @typed_kwargs('external_project.dependency', KwargInfo('subdir', str, default='')) + def dependency_method(self, state: 'ModuleState', args: T.Tuple[str], kwargs: 'Dependency') -> InternalDependency: + libname = args[0] + + abs_includedir = Path(self.install_dir, self.rel_prefix, self.includedir) + if kwargs['subdir']: + abs_includedir = Path(abs_includedir, kwargs['subdir']) + abs_libdir = Path(self.install_dir, self.rel_prefix, self.libdir) + + version = self.project_version + compile_args = [f'-I{abs_includedir}'] + link_args = [f'-L{abs_libdir}', f'-l{libname}'] + sources = self.target + dep = InternalDependency(version, [], compile_args, link_args, [], + [], [sources], [], {}, [], []) + return dep + + +class ExternalProjectModule(ExtensionModule): + + INFO = ModuleInfo('External build system', '0.56.0', unstable=True) + + def __init__(self, interpreter: 'Interpreter'): + super().__init__(interpreter) + self.methods.update({'add_project': self.add_project, + }) + + @typed_pos_args('external_project_mod.add_project', str) + @typed_kwargs( + 'external_project.add_project', + KwargInfo('configure_options', ContainerTypeInfo(list, str), default=[], listify=True), + KwargInfo('cross_configure_options', ContainerTypeInfo(list, str), default=['--host=@HOST@'], listify=True), + KwargInfo('verbose', bool, default=False), + ENV_KW, + DEPENDS_KW.evolve(since='0.63.0'), + ) + def add_project(self, state: 'ModuleState', args: T.Tuple[str], kwargs: 'AddProject') -> ModuleReturnValue: + configure_command = args[0] + project = ExternalProject(state, + configure_command, + kwargs['configure_options'], + kwargs['cross_configure_options'], + kwargs['env'], + kwargs['verbose'], + kwargs['depends']) + return ModuleReturnValue(project, project.targets) + + +def initialize(interp: 'Interpreter') -> ExternalProjectModule: + return ExternalProjectModule(interp) diff --git a/mesonbuild/modules/icestorm.py b/mesonbuild/modules/icestorm.py new file mode 100644 index 000000000..c5791487c --- /dev/null +++ b/mesonbuild/modules/icestorm.py @@ -0,0 +1,131 @@ +# Copyright 2017 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations +import itertools +import typing as T + +from . import ExtensionModule, ModuleReturnValue, ModuleInfo +from .. import build +from .. import mesonlib +from ..interpreter.type_checking import CT_INPUT_KW +from ..interpreterbase.decorators import KwargInfo, typed_kwargs, typed_pos_args + +if T.TYPE_CHECKING: + from typing_extensions import TypedDict + + from . import ModuleState + from ..interpreter import Interpreter + from ..programs import ExternalProgram + + class ProjectKwargs(TypedDict): + + sources: T.List[T.Union[mesonlib.FileOrString, build.GeneratedTypes]] + constraint_file: T.Union[mesonlib.FileOrString, build.GeneratedTypes] + +class IceStormModule(ExtensionModule): + + INFO = ModuleInfo('FPGA/Icestorm', '0.45.0', unstable=True) + + def __init__(self, interpreter: Interpreter) -> None: + super().__init__(interpreter) + self.tools: T.Dict[str, ExternalProgram] = {} + self.methods.update({ + 'project': self.project, + }) + + def detect_tools(self, state: ModuleState) -> None: + self.tools['yosys'] = state.find_program('yosys') + self.tools['arachne'] = state.find_program('arachne-pnr') + self.tools['icepack'] = state.find_program('icepack') + self.tools['iceprog'] = state.find_program('iceprog') + self.tools['icetime'] = state.find_program('icetime') + + @typed_pos_args('icestorm.project', str, + varargs=(str, mesonlib.File, build.CustomTarget, build.CustomTargetIndex, + build.GeneratedList)) + @typed_kwargs( + 'icestorm.project', + CT_INPUT_KW.evolve(name='sources'), + KwargInfo( + 'constraint_file', + (str, mesonlib.File, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList), + required=True, + ) + ) + def project(self, state: ModuleState, + args: T.Tuple[str, T.List[T.Union[mesonlib.FileOrString, build.GeneratedTypes]]], + kwargs: ProjectKwargs) -> ModuleReturnValue: + if not self.tools: + self.detect_tools(state) + proj_name, arg_sources = args + all_sources = self.interpreter.source_strings_to_files( + list(itertools.chain(arg_sources, kwargs['sources']))) + + blif_target = build.CustomTarget( + f'{proj_name}_blif', + state.subdir, + state.subproject, + state.environment, + [self.tools['yosys'], '-q', '-p', 'synth_ice40 -blif @OUTPUT@', '@INPUT@'], + all_sources, + [f'{proj_name}.blif'], + ) + + asc_target = build.CustomTarget( + f'{proj_name}_asc', + state.subdir, + state.subproject, + state.environment, + [self.tools['arachne'], '-q', '-d', '1k', '-p', '@INPUT@', '-o', '@OUTPUT@'], + [kwargs['constraint_file'], blif_target], + [f'{proj_name}.asc'], + ) + + bin_target = build.CustomTarget( + f'{proj_name}_bin', + state.subdir, + state.subproject, + state.environment, + [self.tools['icepack'], '@INPUT@', '@OUTPUT@'], + [asc_target], + [f'{proj_name}.bin'], + build_by_default=True, + ) + + upload_target = build.RunTarget( + f'{proj_name}-upload', + [self.tools['iceprog'], bin_target], + [], + state.subdir, + state.subproject, + state.environment, + ) + + time_target = build.RunTarget( + f'{proj_name}-time', + [self.tools['icetime'], bin_target], + [], + state.subdir, + state.subproject, + state.environment, + ) + + return ModuleReturnValue( + None, + [blif_target, asc_target, bin_target, upload_target, time_target]) + + +def initialize(interp: Interpreter) -> IceStormModule: + return IceStormModule(interp) diff --git a/mesonbuild/modules/rust.py b/mesonbuild/modules/rust.py new file mode 100644 index 000000000..792195e6d --- /dev/null +++ b/mesonbuild/modules/rust.py @@ -0,0 +1,235 @@ +# Copyright © 2020 Intel Corporation + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import typing as T + +from . import ExtensionModule, ModuleReturnValue, ModuleInfo +from .. import mlog +from ..build import BothLibraries, BuildTarget, CustomTargetIndex, Executable, ExtractedObjects, GeneratedList, IncludeDirs, CustomTarget, StructuredSources +from ..dependencies import Dependency, ExternalLibrary +from ..interpreter.interpreter import TEST_KWARGS, OUTPUT_KW +from ..interpreterbase import ContainerTypeInfo, InterpreterException, KwargInfo, typed_kwargs, typed_pos_args, noPosargs +from ..mesonlib import File + +if T.TYPE_CHECKING: + from . import ModuleState + from ..interpreter import Interpreter + from ..interpreter import kwargs as _kwargs + from ..interpreter.interpreter import SourceInputs, SourceOutputs + from ..programs import ExternalProgram + + from typing_extensions import TypedDict + + class FuncTest(_kwargs.BaseTest): + + dependencies: T.List[T.Union[Dependency, ExternalLibrary]] + is_parallel: bool + + class FuncBindgen(TypedDict): + + args: T.List[str] + c_args: T.List[str] + include_directories: T.List[IncludeDirs] + input: T.List[SourceInputs] + output: str + + +class RustModule(ExtensionModule): + + """A module that holds helper functions for rust.""" + + INFO = ModuleInfo('rust', '0.57.0', unstable=True) + + def __init__(self, interpreter: 'Interpreter') -> None: + super().__init__(interpreter) + self._bindgen_bin: T.Optional['ExternalProgram'] = None + self.methods.update({ + 'test': self.test, + 'bindgen': self.bindgen, + }) + + @typed_pos_args('rust.test', str, BuildTarget) + @typed_kwargs( + 'rust.test', + *TEST_KWARGS, + KwargInfo('is_parallel', bool, default=False), + KwargInfo( + 'dependencies', + ContainerTypeInfo(list, (Dependency, ExternalLibrary)), + listify=True, + default=[]), + ) + def test(self, state: 'ModuleState', args: T.Tuple[str, BuildTarget], kwargs: 'FuncTest') -> ModuleReturnValue: + """Generate a rust test target from a given rust target. + + Rust puts it's unitests inside it's main source files, unlike most + languages that put them in external files. This means that normally + you have to define two separate targets with basically the same + arguments to get tests: + + ```meson + rust_lib_sources = [...] + rust_lib = static_library( + 'rust_lib', + rust_lib_sources, + ) + + rust_lib_test = executable( + 'rust_lib_test', + rust_lib_sources, + rust_args : ['--test'], + ) + + test( + 'rust_lib_test', + rust_lib_test, + protocol : 'rust', + ) + ``` + + This is all fine, but not very DRY. This method makes it much easier + to define rust tests: + + ```meson + rust = import('unstable-rust') + + rust_lib = static_library( + 'rust_lib', + [sources], + ) + + rust.test('rust_lib_test', rust_lib) + ``` + """ + name = args[0] + base_target: BuildTarget = args[1] + if not base_target.uses_rust(): + raise InterpreterException('Second positional argument to rustmod.test() must be a rust based target') + extra_args = kwargs['args'] + + # Delete any arguments we don't want passed + if '--test' in extra_args: + mlog.warning('Do not add --test to rustmod.test arguments') + extra_args.remove('--test') + if '--format' in extra_args: + mlog.warning('Do not add --format to rustmod.test arguments') + i = extra_args.index('--format') + # Also delete the argument to --format + del extra_args[i + 1] + del extra_args[i] + for i, a in enumerate(extra_args): + if isinstance(a, str) and a.startswith('--format='): + del extra_args[i] + break + + dependencies = [d for d in kwargs['dependencies']] + + # We need to cast here, as currently these don't have protocol in them, but test itself does. + tkwargs = T.cast('_kwargs.FuncTest', kwargs.copy()) + + tkwargs['args'] = extra_args + ['--test', '--format', 'pretty'] + tkwargs['protocol'] = 'rust' + + new_target_kwargs = base_target.kwargs.copy() + # Don't mutate the shallow copied list, instead replace it with a new + # one + new_target_kwargs['rust_args'] = new_target_kwargs.get('rust_args', []) + ['--test'] + new_target_kwargs['install'] = False + new_target_kwargs['dependencies'] = new_target_kwargs.get('dependencies', []) + dependencies + + new_target = Executable( + name, base_target.subdir, state.subproject, base_target.for_machine, + base_target.sources, base_target.structured_sources, + base_target.objects, base_target.environment, base_target.compilers, + new_target_kwargs + ) + + test = self.interpreter.make_test( + self.interpreter.current_node, (name, new_target), tkwargs) + + return ModuleReturnValue(None, [new_target, test]) + + @noPosargs + @typed_kwargs( + 'rust.bindgen', + KwargInfo('c_args', ContainerTypeInfo(list, str), default=[], listify=True), + KwargInfo('args', ContainerTypeInfo(list, str), default=[], listify=True), + KwargInfo('include_directories', ContainerTypeInfo(list, IncludeDirs), default=[], listify=True), + KwargInfo( + 'input', + ContainerTypeInfo(list, (File, GeneratedList, BuildTarget, BothLibraries, ExtractedObjects, CustomTargetIndex, CustomTarget, str), allow_empty=False), + default=[], + listify=True, + required=True, + ), + OUTPUT_KW, + ) + def bindgen(self, state: 'ModuleState', args: T.List, kwargs: 'FuncBindgen') -> ModuleReturnValue: + """Wrapper around bindgen to simplify it's use. + + The main thing this simplifies is the use of `include_directory` + objects, instead of having to pass a plethora of `-I` arguments. + """ + header, *_deps = self.interpreter.source_strings_to_files(kwargs['input']) + + # Split File and Target dependencies to add pass to CustomTarget + depends: T.List['SourceOutputs'] = [] + depend_files: T.List[File] = [] + for d in _deps: + if isinstance(d, File): + depend_files.append(d) + else: + depends.append(d) + + inc_strs: T.List[str] = [] + for i in kwargs['include_directories']: + # bindgen always uses clang, so it's safe to hardcode -I here + inc_strs.extend([f'-I{x}' for x in i.to_string_list( + state.environment.get_source_dir(), state.environment.get_build_dir())]) + + if self._bindgen_bin is None: + self._bindgen_bin = state.find_program('bindgen') + + name: str + if isinstance(header, File): + name = header.fname + elif isinstance(header, (BuildTarget, BothLibraries, ExtractedObjects, StructuredSources)): + raise InterpreterException('bindgen source file must be a C header, not an object or build target') + else: + name = header.get_outputs()[0] + + target = CustomTarget( + f'rustmod-bindgen-{name}'.replace('/', '_'), + state.subdir, + state.subproject, + state.environment, + self._bindgen_bin.get_command() + [ + '@INPUT@', '--output', + os.path.join(state.environment.build_dir, '@OUTPUT@')] + + kwargs['args'] + ['--'] + kwargs['c_args'] + inc_strs + + ['-MD', '-MQ', '@INPUT@', '-MF', '@DEPFILE@'], + [header], + [kwargs['output']], + depfile='@PLAINNAME@.d', + extra_depends=depends, + depend_files=depend_files, + backend=state.backend, + ) + + return ModuleReturnValue([target], [target]) + + +def initialize(interp: 'Interpreter') -> RustModule: + return RustModule(interp) diff --git a/mesonbuild/modules/simd.py b/mesonbuild/modules/simd.py new file mode 100644 index 000000000..a33022d04 --- /dev/null +++ b/mesonbuild/modules/simd.py @@ -0,0 +1,87 @@ +# Copyright 2017 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .. import mesonlib, compilers, mlog +from .. import build + +from . import ExtensionModule, ModuleInfo + +class SimdModule(ExtensionModule): + + INFO = ModuleInfo('SIMD', '0.42.0', unstable=True) + + def __init__(self, interpreter): + super().__init__(interpreter) + # FIXME add Altivec and AVX512. + self.isets = ('mmx', + 'sse', + 'sse2', + 'sse3', + 'ssse3', + 'sse41', + 'sse42', + 'avx', + 'avx2', + 'neon', + ) + self.methods.update({ + 'check': self.check, + }) + + def check(self, state, args, kwargs): + result = [] + if len(args) != 1: + raise mesonlib.MesonException('Check requires one argument, a name prefix for checks.') + prefix = args[0] + if not isinstance(prefix, str): + raise mesonlib.MesonException('Argument must be a string.') + if 'compiler' not in kwargs: + raise mesonlib.MesonException('Must specify compiler keyword') + if 'sources' in kwargs: + raise mesonlib.MesonException('SIMD module does not support the "sources" keyword') + basic_kwargs = {} + for key, value in kwargs.items(): + if key not in self.isets and key != 'compiler': + basic_kwargs[key] = value + compiler = kwargs['compiler'] + if not isinstance(compiler, compilers.compilers.Compiler): + raise mesonlib.MesonException('Compiler argument must be a compiler object.') + conf = build.ConfigurationData() + for iset in self.isets: + if iset not in kwargs: + continue + iset_fname = kwargs[iset] # Might also be an array or Files. static_library will validate. + args = compiler.get_instruction_set_args(iset) + if args is None: + mlog.log('Compiler supports %s:' % iset, mlog.red('NO')) + continue + if args: + if not compiler.has_multi_arguments(args, state.environment)[0]: + mlog.log('Compiler supports %s:' % iset, mlog.red('NO')) + continue + mlog.log('Compiler supports %s:' % iset, mlog.green('YES')) + conf.values['HAVE_' + iset.upper()] = ('1', 'Compiler supports %s.' % iset) + libname = prefix + '_' + iset + lib_kwargs = {'sources': iset_fname, + } + lib_kwargs.update(basic_kwargs) + langarg_key = compiler.get_language() + '_args' + old_lang_args = mesonlib.extract_as_list(lib_kwargs, langarg_key) + all_lang_args = old_lang_args + args + lib_kwargs[langarg_key] = all_lang_args + result.append(self.interpreter.func_static_lib(None, [libname], lib_kwargs)) + return [result, conf] + +def initialize(*args, **kwargs): + return SimdModule(*args, **kwargs) diff --git a/mesonbuild/modules/unstable_cuda.py b/mesonbuild/modules/unstable_cuda.py deleted file mode 100644 index b31459f5b..000000000 --- a/mesonbuild/modules/unstable_cuda.py +++ /dev/null @@ -1,360 +0,0 @@ -# Copyright 2017 The Meson development team - -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at - -# http://www.apache.org/licenses/LICENSE-2.0 - -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from __future__ import annotations - -import typing as T -import re - -from ..mesonlib import version_compare -from ..compilers import CudaCompiler - -from . import NewExtensionModule, ModuleInfo - -from ..interpreterbase import ( - flatten, permittedKwargs, noKwargs, - InvalidArguments -) - -if T.TYPE_CHECKING: - from . import ModuleState - from ..compilers import Compiler - -class CudaModule(NewExtensionModule): - - INFO = ModuleInfo('CUDA', '0.50.0', unstable=True) - - def __init__(self, *args, **kwargs): - super().__init__() - self.methods.update({ - "min_driver_version": self.min_driver_version, - "nvcc_arch_flags": self.nvcc_arch_flags, - "nvcc_arch_readable": self.nvcc_arch_readable, - }) - - @noKwargs - def min_driver_version(self, state: 'ModuleState', - args: T.Tuple[str], - kwargs: T.Dict[str, T.Any]) -> str: - argerror = InvalidArguments('min_driver_version must have exactly one positional argument: ' + - 'a CUDA Toolkit version string. Beware that, since CUDA 11.0, ' + - 'the CUDA Toolkit\'s components (including NVCC) are versioned ' + - 'independently from each other (and the CUDA Toolkit as a whole).') - - if len(args) != 1 or not isinstance(args[0], str): - raise argerror - - cuda_version = args[0] - driver_version_table = [ - {'cuda_version': '>=11.7.0', 'windows': '516.01', 'linux': '515.43.04'}, - {'cuda_version': '>=11.6.1', 'windows': '511.65', 'linux': '510.47.03'}, - {'cuda_version': '>=11.6.0', 'windows': '511.23', 'linux': '510.39.01'}, - {'cuda_version': '>=11.5.1', 'windows': '496.13', 'linux': '495.29.05'}, - {'cuda_version': '>=11.5.0', 'windows': '496.04', 'linux': '495.29.05'}, - {'cuda_version': '>=11.4.3', 'windows': '472.50', 'linux': '470.82.01'}, - {'cuda_version': '>=11.4.1', 'windows': '471.41', 'linux': '470.57.02'}, - {'cuda_version': '>=11.4.0', 'windows': '471.11', 'linux': '470.42.01'}, - {'cuda_version': '>=11.3.0', 'windows': '465.89', 'linux': '465.19.01'}, - {'cuda_version': '>=11.2.2', 'windows': '461.33', 'linux': '460.32.03'}, - {'cuda_version': '>=11.2.1', 'windows': '461.09', 'linux': '460.32.03'}, - {'cuda_version': '>=11.2.0', 'windows': '460.82', 'linux': '460.27.03'}, - {'cuda_version': '>=11.1.1', 'windows': '456.81', 'linux': '455.32'}, - {'cuda_version': '>=11.1.0', 'windows': '456.38', 'linux': '455.23'}, - {'cuda_version': '>=11.0.3', 'windows': '451.82', 'linux': '450.51.06'}, - {'cuda_version': '>=11.0.2', 'windows': '451.48', 'linux': '450.51.05'}, - {'cuda_version': '>=11.0.1', 'windows': '451.22', 'linux': '450.36.06'}, - {'cuda_version': '>=10.2.89', 'windows': '441.22', 'linux': '440.33'}, - {'cuda_version': '>=10.1.105', 'windows': '418.96', 'linux': '418.39'}, - {'cuda_version': '>=10.0.130', 'windows': '411.31', 'linux': '410.48'}, - {'cuda_version': '>=9.2.148', 'windows': '398.26', 'linux': '396.37'}, - {'cuda_version': '>=9.2.88', 'windows': '397.44', 'linux': '396.26'}, - {'cuda_version': '>=9.1.85', 'windows': '391.29', 'linux': '390.46'}, - {'cuda_version': '>=9.0.76', 'windows': '385.54', 'linux': '384.81'}, - {'cuda_version': '>=8.0.61', 'windows': '376.51', 'linux': '375.26'}, - {'cuda_version': '>=8.0.44', 'windows': '369.30', 'linux': '367.48'}, - {'cuda_version': '>=7.5.16', 'windows': '353.66', 'linux': '352.31'}, - {'cuda_version': '>=7.0.28', 'windows': '347.62', 'linux': '346.46'}, - ] - - driver_version = 'unknown' - for d in driver_version_table: - if version_compare(cuda_version, d['cuda_version']): - driver_version = d.get(state.host_machine.system, d['linux']) - break - - return driver_version - - @permittedKwargs(['detected']) - def nvcc_arch_flags(self, state: 'ModuleState', - args: T.Tuple[T.Union[Compiler, CudaCompiler, str]], - kwargs: T.Dict[str, T.Any]) -> T.List[str]: - nvcc_arch_args = self._validate_nvcc_arch_args(args, kwargs) - ret = self._nvcc_arch_flags(*nvcc_arch_args)[0] - return ret - - @permittedKwargs(['detected']) - def nvcc_arch_readable(self, state: 'ModuleState', - args: T.Tuple[T.Union[Compiler, CudaCompiler, str]], - kwargs: T.Dict[str, T.Any]) -> T.List[str]: - nvcc_arch_args = self._validate_nvcc_arch_args(args, kwargs) - ret = self._nvcc_arch_flags(*nvcc_arch_args)[1] - return ret - - @staticmethod - def _break_arch_string(s): - s = re.sub('[ \t\r\n,;]+', ';', s) - s = s.strip(';').split(';') - return s - - @staticmethod - def _detected_cc_from_compiler(c): - if isinstance(c, CudaCompiler): - return c.detected_cc - return '' - - @staticmethod - def _version_from_compiler(c): - if isinstance(c, CudaCompiler): - return c.version - if isinstance(c, str): - return c - return 'unknown' - - def _validate_nvcc_arch_args(self, args, kwargs): - argerror = InvalidArguments('The first argument must be an NVCC compiler object, or its version string!') - - if len(args) < 1: - raise argerror - else: - compiler = args[0] - cuda_version = self._version_from_compiler(compiler) - if cuda_version == 'unknown': - raise argerror - - arch_list = [] if len(args) <= 1 else flatten(args[1:]) - arch_list = [self._break_arch_string(a) for a in arch_list] - arch_list = flatten(arch_list) - if len(arch_list) > 1 and not set(arch_list).isdisjoint({'All', 'Common', 'Auto'}): - raise InvalidArguments('''The special architectures 'All', 'Common' and 'Auto' must appear alone, as a positional argument!''') - arch_list = arch_list[0] if len(arch_list) == 1 else arch_list - - detected = kwargs.get('detected', self._detected_cc_from_compiler(compiler)) - detected = flatten([detected]) - detected = [self._break_arch_string(a) for a in detected] - detected = flatten(detected) - if not set(detected).isdisjoint({'All', 'Common', 'Auto'}): - raise InvalidArguments('''The special architectures 'All', 'Common' and 'Auto' must appear alone, as a positional argument!''') - - return cuda_version, arch_list, detected - - def _filter_cuda_arch_list(self, cuda_arch_list, lo=None, hi=None, saturate=None): - """ - Filter CUDA arch list (no codenames) for >= low and < hi architecture - bounds, and deduplicate. - If saturate is provided, architectures >= hi are replaced with saturate. - """ - - filtered_cuda_arch_list = [] - for arch in cuda_arch_list: - if arch: - if lo and version_compare(arch, '<' + lo): - continue - if hi and version_compare(arch, '>=' + hi): - if not saturate: - continue - arch = saturate - if arch not in filtered_cuda_arch_list: - filtered_cuda_arch_list.append(arch) - return filtered_cuda_arch_list - - def _nvcc_arch_flags(self, cuda_version, cuda_arch_list='Auto', detected=''): - """ - Using the CUDA Toolkit version and the target architectures, compute - the NVCC architecture flags. - """ - - # Replicates much of the logic of - # https://github.com/Kitware/CMake/blob/master/Modules/FindCUDA/select_compute_arch.cmake - # except that a bug with cuda_arch_list="All" is worked around by - # tracking both lower and upper limits on GPU architectures. - - cuda_known_gpu_architectures = ['Fermi', 'Kepler', 'Maxwell'] # noqa: E221 - cuda_common_gpu_architectures = ['3.0', '3.5', '5.0'] # noqa: E221 - cuda_hi_limit_gpu_architecture = None # noqa: E221 - cuda_lo_limit_gpu_architecture = '2.0' # noqa: E221 - cuda_all_gpu_architectures = ['3.0', '3.2', '3.5', '5.0'] # noqa: E221 - - if version_compare(cuda_version, '<7.0'): - cuda_hi_limit_gpu_architecture = '5.2' - - if version_compare(cuda_version, '>=7.0'): - cuda_known_gpu_architectures += ['Kepler+Tegra', 'Kepler+Tesla', 'Maxwell+Tegra'] # noqa: E221 - cuda_common_gpu_architectures += ['5.2'] # noqa: E221 - - if version_compare(cuda_version, '<8.0'): - cuda_common_gpu_architectures += ['5.2+PTX'] # noqa: E221 - cuda_hi_limit_gpu_architecture = '6.0' # noqa: E221 - - if version_compare(cuda_version, '>=8.0'): - cuda_known_gpu_architectures += ['Pascal', 'Pascal+Tegra'] # noqa: E221 - cuda_common_gpu_architectures += ['6.0', '6.1'] # noqa: E221 - cuda_all_gpu_architectures += ['6.0', '6.1', '6.2'] # noqa: E221 - - if version_compare(cuda_version, '<9.0'): - cuda_common_gpu_architectures += ['6.1+PTX'] # noqa: E221 - cuda_hi_limit_gpu_architecture = '7.0' # noqa: E221 - - if version_compare(cuda_version, '>=9.0'): - cuda_known_gpu_architectures += ['Volta', 'Xavier'] # noqa: E221 - cuda_common_gpu_architectures += ['7.0'] # noqa: E221 - cuda_all_gpu_architectures += ['7.0', '7.2'] # noqa: E221 - # https://docs.nvidia.com/cuda/archive/9.0/cuda-toolkit-release-notes/index.html#unsupported-features - cuda_lo_limit_gpu_architecture = '3.0' # noqa: E221 - - if version_compare(cuda_version, '<10.0'): - cuda_common_gpu_architectures += ['7.2+PTX'] # noqa: E221 - cuda_hi_limit_gpu_architecture = '8.0' # noqa: E221 - - if version_compare(cuda_version, '>=10.0'): - cuda_known_gpu_architectures += ['Turing'] # noqa: E221 - cuda_common_gpu_architectures += ['7.5'] # noqa: E221 - cuda_all_gpu_architectures += ['7.5'] # noqa: E221 - - if version_compare(cuda_version, '<11.0'): - cuda_common_gpu_architectures += ['7.5+PTX'] # noqa: E221 - cuda_hi_limit_gpu_architecture = '8.0' # noqa: E221 - - if version_compare(cuda_version, '>=11.0'): - cuda_known_gpu_architectures += ['Ampere'] # noqa: E221 - cuda_common_gpu_architectures += ['8.0'] # noqa: E221 - cuda_all_gpu_architectures += ['8.0'] # noqa: E221 - # https://docs.nvidia.com/cuda/archive/11.0/cuda-toolkit-release-notes/index.html#deprecated-features - cuda_lo_limit_gpu_architecture = '3.5' # noqa: E221 - - if version_compare(cuda_version, '<11.1'): - cuda_common_gpu_architectures += ['8.0+PTX'] # noqa: E221 - cuda_hi_limit_gpu_architecture = '8.6' # noqa: E221 - - if version_compare(cuda_version, '>=11.1'): - cuda_common_gpu_architectures += ['8.6', '8.6+PTX'] # noqa: E221 - cuda_all_gpu_architectures += ['8.6'] # noqa: E221 - - if version_compare(cuda_version, '<12.0'): - cuda_hi_limit_gpu_architecture = '9.0' # noqa: E221 - - if not cuda_arch_list: - cuda_arch_list = 'Auto' - - if cuda_arch_list == 'All': # noqa: E271 - cuda_arch_list = cuda_known_gpu_architectures - elif cuda_arch_list == 'Common': # noqa: E271 - cuda_arch_list = cuda_common_gpu_architectures - elif cuda_arch_list == 'Auto': # noqa: E271 - if detected: - if isinstance(detected, list): - cuda_arch_list = detected - else: - cuda_arch_list = self._break_arch_string(detected) - cuda_arch_list = self._filter_cuda_arch_list(cuda_arch_list, - cuda_lo_limit_gpu_architecture, - cuda_hi_limit_gpu_architecture, - cuda_common_gpu_architectures[-1]) - else: - cuda_arch_list = cuda_common_gpu_architectures - elif isinstance(cuda_arch_list, str): - cuda_arch_list = self._break_arch_string(cuda_arch_list) - - cuda_arch_list = sorted(x for x in set(cuda_arch_list) if x) - - cuda_arch_bin = [] - cuda_arch_ptx = [] - for arch_name in cuda_arch_list: - arch_bin = [] - arch_ptx = [] - add_ptx = arch_name.endswith('+PTX') - if add_ptx: - arch_name = arch_name[:-len('+PTX')] - - if re.fullmatch('[0-9]+\\.[0-9](\\([0-9]+\\.[0-9]\\))?', arch_name): - arch_bin, arch_ptx = [arch_name], [arch_name] - else: - arch_bin, arch_ptx = { - 'Fermi': (['2.0', '2.1(2.0)'], []), - 'Kepler+Tegra': (['3.2'], []), - 'Kepler+Tesla': (['3.7'], []), - 'Kepler': (['3.0', '3.5'], ['3.5']), - 'Maxwell+Tegra': (['5.3'], []), - 'Maxwell': (['5.0', '5.2'], ['5.2']), - 'Pascal': (['6.0', '6.1'], ['6.1']), - 'Pascal+Tegra': (['6.2'], []), - 'Volta': (['7.0'], ['7.0']), - 'Xavier': (['7.2'], []), - 'Turing': (['7.5'], ['7.5']), - 'Ampere': (['8.0'], ['8.0']), - }.get(arch_name, (None, None)) - - if arch_bin is None: - raise InvalidArguments(f'Unknown CUDA Architecture Name {arch_name}!') - - cuda_arch_bin += arch_bin - - if add_ptx: - if not arch_ptx: - arch_ptx = arch_bin - cuda_arch_ptx += arch_ptx - - cuda_arch_bin = sorted(list(set(cuda_arch_bin))) - cuda_arch_ptx = sorted(list(set(cuda_arch_ptx))) - - nvcc_flags = [] - nvcc_archs_readable = [] - - for arch in cuda_arch_bin: - arch, codev = re.fullmatch( - '([0-9]+\\.[0-9])(?:\\(([0-9]+\\.[0-9])\\))?', arch).groups() - - if version_compare(arch, '<' + cuda_lo_limit_gpu_architecture): - continue - if version_compare(arch, '>=' + cuda_hi_limit_gpu_architecture): - continue - - if codev: - arch = arch.replace('.', '') - codev = codev.replace('.', '') - nvcc_flags += ['-gencode', 'arch=compute_' + codev + ',code=sm_' + arch] - nvcc_archs_readable += ['sm_' + arch] - else: - arch = arch.replace('.', '') - nvcc_flags += ['-gencode', 'arch=compute_' + arch + ',code=sm_' + arch] - nvcc_archs_readable += ['sm_' + arch] - - for arch in cuda_arch_ptx: - arch, codev = re.fullmatch( - '([0-9]+\\.[0-9])(?:\\(([0-9]+\\.[0-9])\\))?', arch).groups() - - if codev: - arch = codev - - if version_compare(arch, '<' + cuda_lo_limit_gpu_architecture): - continue - if version_compare(arch, '>=' + cuda_hi_limit_gpu_architecture): - continue - - arch = arch.replace('.', '') - nvcc_flags += ['-gencode', 'arch=compute_' + arch + ',code=compute_' + arch] - nvcc_archs_readable += ['compute_' + arch] - - return nvcc_flags, nvcc_archs_readable - -def initialize(*args, **kwargs): - return CudaModule(*args, **kwargs) diff --git a/mesonbuild/modules/unstable_external_project.py b/mesonbuild/modules/unstable_external_project.py deleted file mode 100644 index 1fd49112b..000000000 --- a/mesonbuild/modules/unstable_external_project.py +++ /dev/null @@ -1,306 +0,0 @@ -# Copyright 2020 The Meson development team - -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at - -# http://www.apache.org/licenses/LICENSE-2.0 - -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from pathlib import Path -import os -import shlex -import subprocess -import typing as T - -from . import ExtensionModule, ModuleReturnValue, NewExtensionModule, ModuleInfo -from .. import mlog, build -from ..compilers.compilers import CFLAGS_MAPPING -from ..envconfig import ENV_VAR_PROG_MAP -from ..dependencies import InternalDependency, PkgConfigDependency -from ..interpreterbase import FeatureNew -from ..interpreter.type_checking import ENV_KW, DEPENDS_KW -from ..interpreterbase.decorators import ContainerTypeInfo, KwargInfo, typed_kwargs, typed_pos_args -from ..mesonlib import (EnvironmentException, MesonException, Popen_safe, MachineChoice, - get_variable_regex, do_replacement, join_args, OptionKey) - -if T.TYPE_CHECKING: - from typing_extensions import TypedDict - - from . import ModuleState - from ..interpreter import Interpreter - from ..interpreterbase import TYPE_var - from ..build import BuildTarget, CustomTarget - - class Dependency(TypedDict): - - subdir: str - - class AddProject(TypedDict): - - configure_options: T.List[str] - cross_configure_options: T.List[str] - verbose: bool - env: build.EnvironmentVariables - depends: T.List[T.Union[BuildTarget, CustomTarget]] - - -class ExternalProject(NewExtensionModule): - def __init__(self, - state: 'ModuleState', - configure_command: str, - configure_options: T.List[str], - cross_configure_options: T.List[str], - env: build.EnvironmentVariables, - verbose: bool, - extra_depends: T.List[T.Union['BuildTarget', 'CustomTarget']]): - super().__init__() - self.methods.update({'dependency': self.dependency_method, - }) - - self.subdir = Path(state.subdir) - self.project_version = state.project_version - self.subproject = state.subproject - self.env = state.environment - self.build_machine = state.build_machine - self.host_machine = state.host_machine - self.configure_command = configure_command - self.configure_options = configure_options - self.cross_configure_options = cross_configure_options - self.verbose = verbose - self.user_env = env - - self.src_dir = Path(self.env.get_source_dir(), self.subdir) - self.build_dir = Path(self.env.get_build_dir(), self.subdir, 'build') - self.install_dir = Path(self.env.get_build_dir(), self.subdir, 'dist') - _p = self.env.coredata.get_option(OptionKey('prefix')) - assert isinstance(_p, str), 'for mypy' - self.prefix = Path(_p) - _l = self.env.coredata.get_option(OptionKey('libdir')) - assert isinstance(_l, str), 'for mypy' - self.libdir = Path(_l) - _i = self.env.coredata.get_option(OptionKey('includedir')) - assert isinstance(_i, str), 'for mypy' - self.includedir = Path(_i) - self.name = self.src_dir.name - - # On Windows if the prefix is "c:/foo" and DESTDIR is "c:/bar", `make` - # will install files into "c:/bar/c:/foo" which is an invalid path. - # Work around that issue by removing the drive from prefix. - if self.prefix.drive: - self.prefix = self.prefix.relative_to(self.prefix.drive) - - # self.prefix is an absolute path, so we cannot append it to another path. - self.rel_prefix = self.prefix.relative_to(self.prefix.root) - - self._configure(state) - - self.targets = self._create_targets(extra_depends) - - def _configure(self, state: 'ModuleState') -> None: - if self.configure_command == 'waf': - FeatureNew('Waf external project', '0.60.0').use(self.subproject, state.current_node) - waf = state.find_program('waf') - configure_cmd = waf.get_command() - configure_cmd += ['configure', '-o', str(self.build_dir)] - workdir = self.src_dir - self.make = waf.get_command() + ['build'] - else: - # Assume it's the name of a script in source dir, like 'configure', - # 'autogen.sh', etc). - configure_path = Path(self.src_dir, self.configure_command) - configure_prog = state.find_program(configure_path.as_posix()) - configure_cmd = configure_prog.get_command() - workdir = self.build_dir - self.make = state.find_program('make').get_command() - - d = [('PREFIX', '--prefix=@PREFIX@', self.prefix.as_posix()), - ('LIBDIR', '--libdir=@PREFIX@/@LIBDIR@', self.libdir.as_posix()), - ('INCLUDEDIR', None, self.includedir.as_posix()), - ] - self._validate_configure_options(d, state) - - configure_cmd += self._format_options(self.configure_options, d) - - if self.env.is_cross_build(): - host = '{}-{}-{}'.format(self.host_machine.cpu_family, - self.build_machine.system, - self.host_machine.system) - d = [('HOST', None, host)] - configure_cmd += self._format_options(self.cross_configure_options, d) - - # Set common env variables like CFLAGS, CC, etc. - link_exelist: T.List[str] = [] - link_args: T.List[str] = [] - self.run_env = os.environ.copy() - for lang, compiler in self.env.coredata.compilers[MachineChoice.HOST].items(): - if any(lang not in i for i in (ENV_VAR_PROG_MAP, CFLAGS_MAPPING)): - continue - cargs = self.env.coredata.get_external_args(MachineChoice.HOST, lang) - assert isinstance(cargs, list), 'for mypy' - self.run_env[ENV_VAR_PROG_MAP[lang]] = self._quote_and_join(compiler.get_exelist()) - self.run_env[CFLAGS_MAPPING[lang]] = self._quote_and_join(cargs) - if not link_exelist: - link_exelist = compiler.get_linker_exelist() - _l = self.env.coredata.get_external_link_args(MachineChoice.HOST, lang) - assert isinstance(_l, list), 'for mypy' - link_args = _l - if link_exelist: - # FIXME: Do not pass linker because Meson uses CC as linker wrapper, - # but autotools often expects the real linker (e.h. GNU ld). - # self.run_env['LD'] = self._quote_and_join(link_exelist) - pass - self.run_env['LDFLAGS'] = self._quote_and_join(link_args) - - self.run_env = self.user_env.get_env(self.run_env) - self.run_env = PkgConfigDependency.setup_env(self.run_env, self.env, MachineChoice.HOST, - uninstalled=True) - - self.build_dir.mkdir(parents=True, exist_ok=True) - self._run('configure', configure_cmd, workdir) - - def _quote_and_join(self, array: T.List[str]) -> str: - return ' '.join([shlex.quote(i) for i in array]) - - def _validate_configure_options(self, variables: T.List[T.Tuple[str, str, str]], state: 'ModuleState') -> None: - # Ensure the user at least try to pass basic info to the build system, - # like the prefix, libdir, etc. - for key, default, val in variables: - if default is None: - continue - key_format = f'@{key}@' - for option in self.configure_options: - if key_format in option: - break - else: - FeatureNew('Default configure_option', '0.57.0').use(self.subproject, state.current_node) - self.configure_options.append(default) - - def _format_options(self, options: T.List[str], variables: T.List[T.Tuple[str, str, str]]) -> T.List[str]: - out: T.List[str] = [] - missing = set() - regex = get_variable_regex('meson') - confdata: T.Dict[str, T.Tuple[str, T.Optional[str]]] = {k: (v, None) for k, _, v in variables} - for o in options: - arg, missing_vars = do_replacement(regex, o, 'meson', confdata) - missing.update(missing_vars) - out.append(arg) - if missing: - var_list = ", ".join(map(repr, sorted(missing))) - raise EnvironmentException( - f"Variables {var_list} in configure options are missing.") - return out - - def _run(self, step: str, command: T.List[str], workdir: Path) -> None: - mlog.log(f'External project {self.name}:', mlog.bold(step)) - m = 'Running command ' + str(command) + ' in directory ' + str(workdir) + '\n' - log_filename = Path(mlog.log_dir, f'{self.name}-{step}.log') - output = None - if not self.verbose: - output = open(log_filename, 'w', encoding='utf-8') - output.write(m + '\n') - output.flush() - else: - mlog.log(m) - p, *_ = Popen_safe(command, cwd=workdir, env=self.run_env, - stderr=subprocess.STDOUT, - stdout=output) - if p.returncode != 0: - m = f'{step} step returned error code {p.returncode}.' - if not self.verbose: - m += '\nSee logs: ' + str(log_filename) - raise MesonException(m) - - def _create_targets(self, extra_depends: T.List[T.Union['BuildTarget', 'CustomTarget']]) -> T.List['TYPE_var']: - cmd = self.env.get_build_command() - cmd += ['--internal', 'externalproject', - '--name', self.name, - '--srcdir', self.src_dir.as_posix(), - '--builddir', self.build_dir.as_posix(), - '--installdir', self.install_dir.as_posix(), - '--logdir', mlog.log_dir, - '--make', join_args(self.make), - ] - if self.verbose: - cmd.append('--verbose') - - self.target = build.CustomTarget( - self.name, - self.subdir.as_posix(), - self.subproject, - self.env, - cmd + ['@OUTPUT@', '@DEPFILE@'], - [], - [f'{self.name}.stamp'], - depfile=f'{self.name}.d', - console=True, - extra_depends=extra_depends, - ) - - idir = build.InstallDir(self.subdir.as_posix(), - Path('dist', self.rel_prefix).as_posix(), - install_dir='.', - install_mode=None, - exclude=None, - strip_directory=True, - from_source_dir=False, - subproject=self.subproject) - - return [self.target, idir] - - @typed_pos_args('external_project.dependency', str) - @typed_kwargs('external_project.dependency', KwargInfo('subdir', str, default='')) - def dependency_method(self, state: 'ModuleState', args: T.Tuple[str], kwargs: 'Dependency') -> InternalDependency: - libname = args[0] - - abs_includedir = Path(self.install_dir, self.rel_prefix, self.includedir) - if kwargs['subdir']: - abs_includedir = Path(abs_includedir, kwargs['subdir']) - abs_libdir = Path(self.install_dir, self.rel_prefix, self.libdir) - - version = self.project_version - compile_args = [f'-I{abs_includedir}'] - link_args = [f'-L{abs_libdir}', f'-l{libname}'] - sources = self.target - dep = InternalDependency(version, [], compile_args, link_args, [], - [], [sources], [], {}, [], []) - return dep - - -class ExternalProjectModule(ExtensionModule): - - INFO = ModuleInfo('External build system', '0.56.0', unstable=True) - - def __init__(self, interpreter: 'Interpreter'): - super().__init__(interpreter) - self.methods.update({'add_project': self.add_project, - }) - - @typed_pos_args('external_project_mod.add_project', str) - @typed_kwargs( - 'external_project.add_project', - KwargInfo('configure_options', ContainerTypeInfo(list, str), default=[], listify=True), - KwargInfo('cross_configure_options', ContainerTypeInfo(list, str), default=['--host=@HOST@'], listify=True), - KwargInfo('verbose', bool, default=False), - ENV_KW, - DEPENDS_KW.evolve(since='0.63.0'), - ) - def add_project(self, state: 'ModuleState', args: T.Tuple[str], kwargs: 'AddProject') -> ModuleReturnValue: - configure_command = args[0] - project = ExternalProject(state, - configure_command, - kwargs['configure_options'], - kwargs['cross_configure_options'], - kwargs['env'], - kwargs['verbose'], - kwargs['depends']) - return ModuleReturnValue(project, project.targets) - - -def initialize(interp: 'Interpreter') -> ExternalProjectModule: - return ExternalProjectModule(interp) diff --git a/mesonbuild/modules/unstable_icestorm.py b/mesonbuild/modules/unstable_icestorm.py deleted file mode 100644 index c5791487c..000000000 --- a/mesonbuild/modules/unstable_icestorm.py +++ /dev/null @@ -1,131 +0,0 @@ -# Copyright 2017 The Meson development team - -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at - -# http://www.apache.org/licenses/LICENSE-2.0 - -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import annotations -import itertools -import typing as T - -from . import ExtensionModule, ModuleReturnValue, ModuleInfo -from .. import build -from .. import mesonlib -from ..interpreter.type_checking import CT_INPUT_KW -from ..interpreterbase.decorators import KwargInfo, typed_kwargs, typed_pos_args - -if T.TYPE_CHECKING: - from typing_extensions import TypedDict - - from . import ModuleState - from ..interpreter import Interpreter - from ..programs import ExternalProgram - - class ProjectKwargs(TypedDict): - - sources: T.List[T.Union[mesonlib.FileOrString, build.GeneratedTypes]] - constraint_file: T.Union[mesonlib.FileOrString, build.GeneratedTypes] - -class IceStormModule(ExtensionModule): - - INFO = ModuleInfo('FPGA/Icestorm', '0.45.0', unstable=True) - - def __init__(self, interpreter: Interpreter) -> None: - super().__init__(interpreter) - self.tools: T.Dict[str, ExternalProgram] = {} - self.methods.update({ - 'project': self.project, - }) - - def detect_tools(self, state: ModuleState) -> None: - self.tools['yosys'] = state.find_program('yosys') - self.tools['arachne'] = state.find_program('arachne-pnr') - self.tools['icepack'] = state.find_program('icepack') - self.tools['iceprog'] = state.find_program('iceprog') - self.tools['icetime'] = state.find_program('icetime') - - @typed_pos_args('icestorm.project', str, - varargs=(str, mesonlib.File, build.CustomTarget, build.CustomTargetIndex, - build.GeneratedList)) - @typed_kwargs( - 'icestorm.project', - CT_INPUT_KW.evolve(name='sources'), - KwargInfo( - 'constraint_file', - (str, mesonlib.File, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList), - required=True, - ) - ) - def project(self, state: ModuleState, - args: T.Tuple[str, T.List[T.Union[mesonlib.FileOrString, build.GeneratedTypes]]], - kwargs: ProjectKwargs) -> ModuleReturnValue: - if not self.tools: - self.detect_tools(state) - proj_name, arg_sources = args - all_sources = self.interpreter.source_strings_to_files( - list(itertools.chain(arg_sources, kwargs['sources']))) - - blif_target = build.CustomTarget( - f'{proj_name}_blif', - state.subdir, - state.subproject, - state.environment, - [self.tools['yosys'], '-q', '-p', 'synth_ice40 -blif @OUTPUT@', '@INPUT@'], - all_sources, - [f'{proj_name}.blif'], - ) - - asc_target = build.CustomTarget( - f'{proj_name}_asc', - state.subdir, - state.subproject, - state.environment, - [self.tools['arachne'], '-q', '-d', '1k', '-p', '@INPUT@', '-o', '@OUTPUT@'], - [kwargs['constraint_file'], blif_target], - [f'{proj_name}.asc'], - ) - - bin_target = build.CustomTarget( - f'{proj_name}_bin', - state.subdir, - state.subproject, - state.environment, - [self.tools['icepack'], '@INPUT@', '@OUTPUT@'], - [asc_target], - [f'{proj_name}.bin'], - build_by_default=True, - ) - - upload_target = build.RunTarget( - f'{proj_name}-upload', - [self.tools['iceprog'], bin_target], - [], - state.subdir, - state.subproject, - state.environment, - ) - - time_target = build.RunTarget( - f'{proj_name}-time', - [self.tools['icetime'], bin_target], - [], - state.subdir, - state.subproject, - state.environment, - ) - - return ModuleReturnValue( - None, - [blif_target, asc_target, bin_target, upload_target, time_target]) - - -def initialize(interp: Interpreter) -> IceStormModule: - return IceStormModule(interp) diff --git a/mesonbuild/modules/unstable_rust.py b/mesonbuild/modules/unstable_rust.py deleted file mode 100644 index 792195e6d..000000000 --- a/mesonbuild/modules/unstable_rust.py +++ /dev/null @@ -1,235 +0,0 @@ -# Copyright © 2020 Intel Corporation - -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at - -# http://www.apache.org/licenses/LICENSE-2.0 - -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -import typing as T - -from . import ExtensionModule, ModuleReturnValue, ModuleInfo -from .. import mlog -from ..build import BothLibraries, BuildTarget, CustomTargetIndex, Executable, ExtractedObjects, GeneratedList, IncludeDirs, CustomTarget, StructuredSources -from ..dependencies import Dependency, ExternalLibrary -from ..interpreter.interpreter import TEST_KWARGS, OUTPUT_KW -from ..interpreterbase import ContainerTypeInfo, InterpreterException, KwargInfo, typed_kwargs, typed_pos_args, noPosargs -from ..mesonlib import File - -if T.TYPE_CHECKING: - from . import ModuleState - from ..interpreter import Interpreter - from ..interpreter import kwargs as _kwargs - from ..interpreter.interpreter import SourceInputs, SourceOutputs - from ..programs import ExternalProgram - - from typing_extensions import TypedDict - - class FuncTest(_kwargs.BaseTest): - - dependencies: T.List[T.Union[Dependency, ExternalLibrary]] - is_parallel: bool - - class FuncBindgen(TypedDict): - - args: T.List[str] - c_args: T.List[str] - include_directories: T.List[IncludeDirs] - input: T.List[SourceInputs] - output: str - - -class RustModule(ExtensionModule): - - """A module that holds helper functions for rust.""" - - INFO = ModuleInfo('rust', '0.57.0', unstable=True) - - def __init__(self, interpreter: 'Interpreter') -> None: - super().__init__(interpreter) - self._bindgen_bin: T.Optional['ExternalProgram'] = None - self.methods.update({ - 'test': self.test, - 'bindgen': self.bindgen, - }) - - @typed_pos_args('rust.test', str, BuildTarget) - @typed_kwargs( - 'rust.test', - *TEST_KWARGS, - KwargInfo('is_parallel', bool, default=False), - KwargInfo( - 'dependencies', - ContainerTypeInfo(list, (Dependency, ExternalLibrary)), - listify=True, - default=[]), - ) - def test(self, state: 'ModuleState', args: T.Tuple[str, BuildTarget], kwargs: 'FuncTest') -> ModuleReturnValue: - """Generate a rust test target from a given rust target. - - Rust puts it's unitests inside it's main source files, unlike most - languages that put them in external files. This means that normally - you have to define two separate targets with basically the same - arguments to get tests: - - ```meson - rust_lib_sources = [...] - rust_lib = static_library( - 'rust_lib', - rust_lib_sources, - ) - - rust_lib_test = executable( - 'rust_lib_test', - rust_lib_sources, - rust_args : ['--test'], - ) - - test( - 'rust_lib_test', - rust_lib_test, - protocol : 'rust', - ) - ``` - - This is all fine, but not very DRY. This method makes it much easier - to define rust tests: - - ```meson - rust = import('unstable-rust') - - rust_lib = static_library( - 'rust_lib', - [sources], - ) - - rust.test('rust_lib_test', rust_lib) - ``` - """ - name = args[0] - base_target: BuildTarget = args[1] - if not base_target.uses_rust(): - raise InterpreterException('Second positional argument to rustmod.test() must be a rust based target') - extra_args = kwargs['args'] - - # Delete any arguments we don't want passed - if '--test' in extra_args: - mlog.warning('Do not add --test to rustmod.test arguments') - extra_args.remove('--test') - if '--format' in extra_args: - mlog.warning('Do not add --format to rustmod.test arguments') - i = extra_args.index('--format') - # Also delete the argument to --format - del extra_args[i + 1] - del extra_args[i] - for i, a in enumerate(extra_args): - if isinstance(a, str) and a.startswith('--format='): - del extra_args[i] - break - - dependencies = [d for d in kwargs['dependencies']] - - # We need to cast here, as currently these don't have protocol in them, but test itself does. - tkwargs = T.cast('_kwargs.FuncTest', kwargs.copy()) - - tkwargs['args'] = extra_args + ['--test', '--format', 'pretty'] - tkwargs['protocol'] = 'rust' - - new_target_kwargs = base_target.kwargs.copy() - # Don't mutate the shallow copied list, instead replace it with a new - # one - new_target_kwargs['rust_args'] = new_target_kwargs.get('rust_args', []) + ['--test'] - new_target_kwargs['install'] = False - new_target_kwargs['dependencies'] = new_target_kwargs.get('dependencies', []) + dependencies - - new_target = Executable( - name, base_target.subdir, state.subproject, base_target.for_machine, - base_target.sources, base_target.structured_sources, - base_target.objects, base_target.environment, base_target.compilers, - new_target_kwargs - ) - - test = self.interpreter.make_test( - self.interpreter.current_node, (name, new_target), tkwargs) - - return ModuleReturnValue(None, [new_target, test]) - - @noPosargs - @typed_kwargs( - 'rust.bindgen', - KwargInfo('c_args', ContainerTypeInfo(list, str), default=[], listify=True), - KwargInfo('args', ContainerTypeInfo(list, str), default=[], listify=True), - KwargInfo('include_directories', ContainerTypeInfo(list, IncludeDirs), default=[], listify=True), - KwargInfo( - 'input', - ContainerTypeInfo(list, (File, GeneratedList, BuildTarget, BothLibraries, ExtractedObjects, CustomTargetIndex, CustomTarget, str), allow_empty=False), - default=[], - listify=True, - required=True, - ), - OUTPUT_KW, - ) - def bindgen(self, state: 'ModuleState', args: T.List, kwargs: 'FuncBindgen') -> ModuleReturnValue: - """Wrapper around bindgen to simplify it's use. - - The main thing this simplifies is the use of `include_directory` - objects, instead of having to pass a plethora of `-I` arguments. - """ - header, *_deps = self.interpreter.source_strings_to_files(kwargs['input']) - - # Split File and Target dependencies to add pass to CustomTarget - depends: T.List['SourceOutputs'] = [] - depend_files: T.List[File] = [] - for d in _deps: - if isinstance(d, File): - depend_files.append(d) - else: - depends.append(d) - - inc_strs: T.List[str] = [] - for i in kwargs['include_directories']: - # bindgen always uses clang, so it's safe to hardcode -I here - inc_strs.extend([f'-I{x}' for x in i.to_string_list( - state.environment.get_source_dir(), state.environment.get_build_dir())]) - - if self._bindgen_bin is None: - self._bindgen_bin = state.find_program('bindgen') - - name: str - if isinstance(header, File): - name = header.fname - elif isinstance(header, (BuildTarget, BothLibraries, ExtractedObjects, StructuredSources)): - raise InterpreterException('bindgen source file must be a C header, not an object or build target') - else: - name = header.get_outputs()[0] - - target = CustomTarget( - f'rustmod-bindgen-{name}'.replace('/', '_'), - state.subdir, - state.subproject, - state.environment, - self._bindgen_bin.get_command() + [ - '@INPUT@', '--output', - os.path.join(state.environment.build_dir, '@OUTPUT@')] + - kwargs['args'] + ['--'] + kwargs['c_args'] + inc_strs + - ['-MD', '-MQ', '@INPUT@', '-MF', '@DEPFILE@'], - [header], - [kwargs['output']], - depfile='@PLAINNAME@.d', - extra_depends=depends, - depend_files=depend_files, - backend=state.backend, - ) - - return ModuleReturnValue([target], [target]) - - -def initialize(interp: 'Interpreter') -> RustModule: - return RustModule(interp) diff --git a/mesonbuild/modules/unstable_simd.py b/mesonbuild/modules/unstable_simd.py deleted file mode 100644 index a33022d04..000000000 --- a/mesonbuild/modules/unstable_simd.py +++ /dev/null @@ -1,87 +0,0 @@ -# Copyright 2017 The Meson development team - -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at - -# http://www.apache.org/licenses/LICENSE-2.0 - -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from .. import mesonlib, compilers, mlog -from .. import build - -from . import ExtensionModule, ModuleInfo - -class SimdModule(ExtensionModule): - - INFO = ModuleInfo('SIMD', '0.42.0', unstable=True) - - def __init__(self, interpreter): - super().__init__(interpreter) - # FIXME add Altivec and AVX512. - self.isets = ('mmx', - 'sse', - 'sse2', - 'sse3', - 'ssse3', - 'sse41', - 'sse42', - 'avx', - 'avx2', - 'neon', - ) - self.methods.update({ - 'check': self.check, - }) - - def check(self, state, args, kwargs): - result = [] - if len(args) != 1: - raise mesonlib.MesonException('Check requires one argument, a name prefix for checks.') - prefix = args[0] - if not isinstance(prefix, str): - raise mesonlib.MesonException('Argument must be a string.') - if 'compiler' not in kwargs: - raise mesonlib.MesonException('Must specify compiler keyword') - if 'sources' in kwargs: - raise mesonlib.MesonException('SIMD module does not support the "sources" keyword') - basic_kwargs = {} - for key, value in kwargs.items(): - if key not in self.isets and key != 'compiler': - basic_kwargs[key] = value - compiler = kwargs['compiler'] - if not isinstance(compiler, compilers.compilers.Compiler): - raise mesonlib.MesonException('Compiler argument must be a compiler object.') - conf = build.ConfigurationData() - for iset in self.isets: - if iset not in kwargs: - continue - iset_fname = kwargs[iset] # Might also be an array or Files. static_library will validate. - args = compiler.get_instruction_set_args(iset) - if args is None: - mlog.log('Compiler supports %s:' % iset, mlog.red('NO')) - continue - if args: - if not compiler.has_multi_arguments(args, state.environment)[0]: - mlog.log('Compiler supports %s:' % iset, mlog.red('NO')) - continue - mlog.log('Compiler supports %s:' % iset, mlog.green('YES')) - conf.values['HAVE_' + iset.upper()] = ('1', 'Compiler supports %s.' % iset) - libname = prefix + '_' + iset - lib_kwargs = {'sources': iset_fname, - } - lib_kwargs.update(basic_kwargs) - langarg_key = compiler.get_language() + '_args' - old_lang_args = mesonlib.extract_as_list(lib_kwargs, langarg_key) - all_lang_args = old_lang_args + args - lib_kwargs[langarg_key] = all_lang_args - result.append(self.interpreter.func_static_lib(None, [libname], lib_kwargs)) - return [result, conf] - -def initialize(*args, **kwargs): - return SimdModule(*args, **kwargs) diff --git a/mesonbuild/modules/unstable_wayland.py b/mesonbuild/modules/unstable_wayland.py deleted file mode 100644 index aab07d4d5..000000000 --- a/mesonbuild/modules/unstable_wayland.py +++ /dev/null @@ -1,154 +0,0 @@ -# Copyright 2022 Mark Bolhuis - -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at - -# http://www.apache.org/licenses/LICENSE-2.0 - -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import annotations -import os -import typing as T - -from . import ExtensionModule, ModuleReturnValue, ModuleInfo -from ..build import CustomTarget -from ..interpreter.type_checking import NoneType, in_set_validator -from ..interpreterbase import typed_pos_args, typed_kwargs, KwargInfo -from ..mesonlib import File, MesonException - -if T.TYPE_CHECKING: - from typing_extensions import Literal,TypedDict - - from . import ModuleState - from ..build import Executable - from ..dependencies import Dependency - from ..interpreter import Interpreter - from ..programs import ExternalProgram - from ..mesonlib import FileOrString - - class ScanXML(TypedDict): - - public: bool - client: bool - server: bool - - class FindProtocol(TypedDict): - - state: Literal['stable', 'staging', 'unstable'] - version: T.Optional[int] - -class WaylandModule(ExtensionModule): - - INFO = ModuleInfo('wayland', '0.62.0', unstable=True) - - def __init__(self, interpreter: Interpreter) -> None: - super().__init__(interpreter) - - self.protocols_dep: T.Optional[Dependency] = None - self.pkgdatadir: T.Optional[str] = None - self.scanner_bin: T.Optional[T.Union[ExternalProgram, Executable]] = None - - self.methods.update({ - 'scan_xml': self.scan_xml, - 'find_protocol': self.find_protocol, - }) - - @typed_pos_args('wayland.scan_xml', varargs=(str, File), min_varargs=1) - @typed_kwargs( - 'wayland.scan_xml', - KwargInfo('public', bool, default=False), - KwargInfo('client', bool, default=True), - KwargInfo('server', bool, default=False), - ) - def scan_xml(self, state: ModuleState, args: T.Tuple[T.List[FileOrString]], kwargs: ScanXML) -> ModuleReturnValue: - if self.scanner_bin is None: - # wayland-scanner from BUILD machine must have same version as wayland - # libraries from HOST machine. - dep = state.dependency('wayland-client') - self.scanner_bin = state.find_tool('wayland-scanner', 'wayland-scanner', 'wayland_scanner', - wanted=dep.version) - - scope = 'public' if kwargs['public'] else 'private' - # We have to cast because mypy can't deduce these are literals - sides = [i for i in T.cast("T.List[Literal['client', 'server']]", ['client', 'server']) if kwargs[i]] - if not sides: - raise MesonException('At least one of client or server keyword argument must be set to true.') - - xml_files = self.interpreter.source_strings_to_files(args[0]) - targets: T.List[CustomTarget] = [] - for xml_file in xml_files: - name = os.path.splitext(os.path.basename(xml_file.fname))[0] - - code = CustomTarget( - f'{name}-protocol', - state.subdir, - state.subproject, - state.environment, - [self.scanner_bin, f'{scope}-code', '@INPUT@', '@OUTPUT@'], - [xml_file], - [f'{name}-protocol.c'], - backend=state.backend, - ) - targets.append(code) - - for side in sides: - header = CustomTarget( - f'{name}-{side}-protocol', - state.subdir, - state.subproject, - state.environment, - [self.scanner_bin, f'{side}-header', '@INPUT@', '@OUTPUT@'], - [xml_file], - [f'{name}-{side}-protocol.h'], - backend=state.backend, - ) - targets.append(header) - - return ModuleReturnValue(targets, targets) - - @typed_pos_args('wayland.find_protocol', str) - @typed_kwargs( - 'wayland.find_protocol', - KwargInfo('state', str, default='stable', validator=in_set_validator({'stable', 'staging', 'unstable'})), - KwargInfo('version', (int, NoneType)), - ) - def find_protocol(self, state: ModuleState, args: T.Tuple[str], kwargs: FindProtocol) -> File: - base_name = args[0] - xml_state = kwargs['state'] - version = kwargs['version'] - - if xml_state != 'stable' and version is None: - raise MesonException(f'{xml_state} protocols require a version number.') - - if xml_state == 'stable' and version is not None: - raise MesonException('stable protocols do not require a version number.') - - if self.protocols_dep is None: - self.protocols_dep = state.dependency('wayland-protocols') - - if self.pkgdatadir is None: - self.pkgdatadir = self.protocols_dep.get_variable(pkgconfig='pkgdatadir', internal='pkgdatadir') - - if xml_state == 'stable': - xml_name = f'{base_name}.xml' - elif xml_state == 'staging': - xml_name = f'{base_name}-v{version}.xml' - else: - xml_name = f'{base_name}-unstable-v{version}.xml' - - path = os.path.join(self.pkgdatadir, xml_state, base_name, xml_name) - - if not os.path.exists(path): - raise MesonException(f'The file {path} does not exist.') - - return File.from_absolute_file(path) - - -def initialize(interpreter: Interpreter) -> WaylandModule: - return WaylandModule(interpreter) diff --git a/mesonbuild/modules/wayland.py b/mesonbuild/modules/wayland.py new file mode 100644 index 000000000..aab07d4d5 --- /dev/null +++ b/mesonbuild/modules/wayland.py @@ -0,0 +1,154 @@ +# Copyright 2022 Mark Bolhuis + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations +import os +import typing as T + +from . import ExtensionModule, ModuleReturnValue, ModuleInfo +from ..build import CustomTarget +from ..interpreter.type_checking import NoneType, in_set_validator +from ..interpreterbase import typed_pos_args, typed_kwargs, KwargInfo +from ..mesonlib import File, MesonException + +if T.TYPE_CHECKING: + from typing_extensions import Literal,TypedDict + + from . import ModuleState + from ..build import Executable + from ..dependencies import Dependency + from ..interpreter import Interpreter + from ..programs import ExternalProgram + from ..mesonlib import FileOrString + + class ScanXML(TypedDict): + + public: bool + client: bool + server: bool + + class FindProtocol(TypedDict): + + state: Literal['stable', 'staging', 'unstable'] + version: T.Optional[int] + +class WaylandModule(ExtensionModule): + + INFO = ModuleInfo('wayland', '0.62.0', unstable=True) + + def __init__(self, interpreter: Interpreter) -> None: + super().__init__(interpreter) + + self.protocols_dep: T.Optional[Dependency] = None + self.pkgdatadir: T.Optional[str] = None + self.scanner_bin: T.Optional[T.Union[ExternalProgram, Executable]] = None + + self.methods.update({ + 'scan_xml': self.scan_xml, + 'find_protocol': self.find_protocol, + }) + + @typed_pos_args('wayland.scan_xml', varargs=(str, File), min_varargs=1) + @typed_kwargs( + 'wayland.scan_xml', + KwargInfo('public', bool, default=False), + KwargInfo('client', bool, default=True), + KwargInfo('server', bool, default=False), + ) + def scan_xml(self, state: ModuleState, args: T.Tuple[T.List[FileOrString]], kwargs: ScanXML) -> ModuleReturnValue: + if self.scanner_bin is None: + # wayland-scanner from BUILD machine must have same version as wayland + # libraries from HOST machine. + dep = state.dependency('wayland-client') + self.scanner_bin = state.find_tool('wayland-scanner', 'wayland-scanner', 'wayland_scanner', + wanted=dep.version) + + scope = 'public' if kwargs['public'] else 'private' + # We have to cast because mypy can't deduce these are literals + sides = [i for i in T.cast("T.List[Literal['client', 'server']]", ['client', 'server']) if kwargs[i]] + if not sides: + raise MesonException('At least one of client or server keyword argument must be set to true.') + + xml_files = self.interpreter.source_strings_to_files(args[0]) + targets: T.List[CustomTarget] = [] + for xml_file in xml_files: + name = os.path.splitext(os.path.basename(xml_file.fname))[0] + + code = CustomTarget( + f'{name}-protocol', + state.subdir, + state.subproject, + state.environment, + [self.scanner_bin, f'{scope}-code', '@INPUT@', '@OUTPUT@'], + [xml_file], + [f'{name}-protocol.c'], + backend=state.backend, + ) + targets.append(code) + + for side in sides: + header = CustomTarget( + f'{name}-{side}-protocol', + state.subdir, + state.subproject, + state.environment, + [self.scanner_bin, f'{side}-header', '@INPUT@', '@OUTPUT@'], + [xml_file], + [f'{name}-{side}-protocol.h'], + backend=state.backend, + ) + targets.append(header) + + return ModuleReturnValue(targets, targets) + + @typed_pos_args('wayland.find_protocol', str) + @typed_kwargs( + 'wayland.find_protocol', + KwargInfo('state', str, default='stable', validator=in_set_validator({'stable', 'staging', 'unstable'})), + KwargInfo('version', (int, NoneType)), + ) + def find_protocol(self, state: ModuleState, args: T.Tuple[str], kwargs: FindProtocol) -> File: + base_name = args[0] + xml_state = kwargs['state'] + version = kwargs['version'] + + if xml_state != 'stable' and version is None: + raise MesonException(f'{xml_state} protocols require a version number.') + + if xml_state == 'stable' and version is not None: + raise MesonException('stable protocols do not require a version number.') + + if self.protocols_dep is None: + self.protocols_dep = state.dependency('wayland-protocols') + + if self.pkgdatadir is None: + self.pkgdatadir = self.protocols_dep.get_variable(pkgconfig='pkgdatadir', internal='pkgdatadir') + + if xml_state == 'stable': + xml_name = f'{base_name}.xml' + elif xml_state == 'staging': + xml_name = f'{base_name}-v{version}.xml' + else: + xml_name = f'{base_name}-unstable-v{version}.xml' + + path = os.path.join(self.pkgdatadir, xml_state, base_name, xml_name) + + if not os.path.exists(path): + raise MesonException(f'The file {path} does not exist.') + + return File.from_absolute_file(path) + + +def initialize(interpreter: Interpreter) -> WaylandModule: + return WaylandModule(interpreter) diff --git a/run_mypy.py b/run_mypy.py index f5f83010c..a5fc944ab 100755 --- a/run_mypy.py +++ b/run_mypy.py @@ -40,18 +40,18 @@ modules = [ 'mesonbuild/mintro.py', 'mesonbuild/mlog.py', 'mesonbuild/msubprojects.py', + 'mesonbuild/modules/external_project.py', 'mesonbuild/modules/fs.py', 'mesonbuild/modules/gnome.py', 'mesonbuild/modules/i18n.py', + 'mesonbuild/modules/icestorm.py', 'mesonbuild/modules/java.py', 'mesonbuild/modules/keyval.py', 'mesonbuild/modules/modtest.py', 'mesonbuild/modules/qt.py', + 'mesonbuild/modules/rust.py', 'mesonbuild/modules/sourceset.py', - 'mesonbuild/modules/unstable_external_project.py', - 'mesonbuild/modules/unstable_icestorm.py', - 'mesonbuild/modules/unstable_rust.py', - 'mesonbuild/modules/unstable_wayland.py', + 'mesonbuild/modules/wayland.py', 'mesonbuild/modules/windows.py', 'mesonbuild/mparser.py', 'mesonbuild/msetup.py', diff --git a/test cases/common/253 module warnings/meson.build b/test cases/common/253 module warnings/meson.build index 543a700dc..8397930ae 100644 --- a/test cases/common/253 module warnings/meson.build +++ b/test cases/common/253 module warnings/meson.build @@ -2,3 +2,7 @@ project('module warnings', meson_version : '>= 0.56') import('python3') # deprecated module import('java') # new module +import('unstable-keyval') # module that has been stabilized, import with unstable- + +ice = import('icestorm', required : false) +assert(not ice.found(), 'unstable-icestorm module should not be importable as `simd`') diff --git a/test cases/common/253 module warnings/test.json b/test cases/common/253 module warnings/test.json index 8833da2dc..ec861ccc6 100644 --- a/test cases/common/253 module warnings/test.json +++ b/test cases/common/253 module warnings/test.json @@ -5,6 +5,9 @@ }, { "line": "test cases/common/253 module warnings/meson.build:4: WARNING: Project targets '>= 0.56' but uses feature introduced in '0.60.0': module java." + }, + { + "line": "test cases/common/253 module warnings/meson.build:5: WARNING: Project targets '>= 0.56' but uses feature deprecated since '0.56.0': module keyval has been stabilized. drop \"unstable-\" prefix from the module name" } ] } diff --git a/test cases/keyval/1 basic/meson.build b/test cases/keyval/1 basic/meson.build index 4207b8e2d..a6e023ed4 100644 --- a/test cases/keyval/1 basic/meson.build +++ b/test cases/keyval/1 basic/meson.build @@ -1,4 +1,4 @@ -project('keyval basic test') +project('keyval basic test', meson_version : '>= 0.55') k = import('keyval') conf = k.load('.config') diff --git a/test cases/keyval/1 basic/test.json b/test cases/keyval/1 basic/test.json index dbdc5af41..1f8fd9b06 100644 --- a/test cases/keyval/1 basic/test.json +++ b/test cases/keyval/1 basic/test.json @@ -1,7 +1,7 @@ { "stdout": [ { - "line": "WARNING: Module unstable-keyval is now stable, please use the keyval module instead." + "line": "test cases/keyval/1 basic/meson.build:3: WARNING: Project targets '>= 0.55' but uses feature introduced in '0.56.0': module keyval as stable module. Consider either adding \"unstable-\" to the module name, or updating the meson required version to \">= 0.56.0\"" } ] } diff --git a/test cases/warning/7 module without unstable/meson.build b/test cases/warning/7 module without unstable/meson.build new file mode 100644 index 000000000..409a23618 --- /dev/null +++ b/test cases/warning/7 module without unstable/meson.build @@ -0,0 +1,3 @@ +project('module import without unstable', meson_version : '>= 0.55') + +import('keyval') diff --git a/test cases/warning/7 module without unstable/test.json b/test cases/warning/7 module without unstable/test.json new file mode 100644 index 000000000..62b8aa1c6 --- /dev/null +++ b/test cases/warning/7 module without unstable/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/warning/7 module without unstable/meson.build:3: WARNING: Project targets '>= 0.55' but uses feature introduced in '0.56.0': module keyval as stable module. Consider either adding \"unstable-\" to the module name, or updating the meson required version to \">= 0.56.0\"" + } + ] +} diff --git a/unittests/allplatformstests.py b/unittests/allplatformstests.py index 745e67bba..3238a39a7 100644 --- a/unittests/allplatformstests.py +++ b/unittests/allplatformstests.py @@ -1929,7 +1929,7 @@ class AllPlatformTests(BasePlatformTests): r'sub' + os.path.sep + r'meson.build:3: WARNING: Keyword argument "link_with" defined multiple times.', r'meson.build:6: WARNING: a warning of some sort', r'sub' + os.path.sep + r'meson.build:4: WARNING: subdir warning', - r'meson.build:7: WARNING: Module unstable-simd has no backwards or forwards compatibility and might not exist in future releases.', + r'meson.build:7: WARNING: Module SIMD has no backwards or forwards compatibility and might not exist in future releases.', r"meson.build:11: WARNING: The variable(s) 'MISSING' in the input file 'conf.in' are not present in the given configuration data.", ]: with self.subTest(expected): -- cgit v1.2.3