From 072d637caa05136dc066d0fe6249a06c0e721735 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Tue, 27 Aug 2024 08:53:45 -0700 Subject: [PATCH 01/13] environment: build_dir is allowed to be None in the initializer But we really expect it to be a string once the initializer is done. Therefore, let's set it to `''` just like we do with the scratchdir in the case that it's set to None in the initializer. --- mesonbuild/environment.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/mesonbuild/environment.py b/mesonbuild/environment.py index 71a2f3afcb6a..94db0e85ebcb 100644 --- a/mesonbuild/environment.py +++ b/mesonbuild/environment.py @@ -1,6 +1,6 @@ # SPDX-License-Identifier: Apache-2.0 # Copyright 2012-2020 The Meson development team -# Copyright © 2023 Intel Corporation +# Copyright © 2023-2024 Intel Corporation from __future__ import annotations @@ -552,12 +552,12 @@ class Environment: log_dir = 'meson-logs' info_dir = 'meson-info' - def __init__(self, source_dir: str, build_dir: str, cmd_options: coredata.SharedCMDOptions) -> None: + def __init__(self, source_dir: str, build_dir: T.Optional[str], cmd_options: coredata.SharedCMDOptions) -> None: self.source_dir = source_dir - self.build_dir = build_dir # Do not try to create build directories when build_dir is none. # This reduced mode is used by the --buildoptions introspector if build_dir is not None: + self.build_dir = build_dir self.scratch_dir = os.path.join(build_dir, Environment.private_dir) self.log_dir = os.path.join(build_dir, Environment.log_dir) self.info_dir = os.path.join(build_dir, Environment.info_dir) @@ -586,6 +586,7 @@ def __init__(self, source_dir: str, build_dir: str, cmd_options: coredata.Shared raise MesonException(f'{str(e)} Try regenerating using "meson setup --wipe".') else: # Just create a fresh coredata in this case + self.build_dir = '' self.scratch_dir = '' self.create_new_coredata(cmd_options) From 0eba09d0fdd6f67dcf852ae6bdb790f6ee6eec70 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Tue, 2 Jan 2024 15:00:53 -0800 Subject: [PATCH 02/13] tests: demonstrate that our scanner cannot handle cross target modules --- test cases/fortran/8 module names/lib.f90 | 9 +++++++++ test cases/fortran/8 module names/meson.build | 13 +++++++++++-- .../fortran/8 module names/meson_options.txt | 1 + test cases/fortran/8 module names/mod2.f90 | 8 ++++++++ test cases/fortran/8 module names/test.f90 | 3 +-- unittests/allplatformstests.py | 18 +++++++++++++++++- unittests/baseplatformtests.py | 1 + 7 files changed, 48 insertions(+), 5 deletions(-) create mode 100644 test cases/fortran/8 module names/lib.f90 create mode 100644 test cases/fortran/8 module names/meson_options.txt diff --git a/test cases/fortran/8 module names/lib.f90 b/test cases/fortran/8 module names/lib.f90 new file mode 100644 index 000000000000..f8a8bfdacbb6 --- /dev/null +++ b/test cases/fortran/8 module names/lib.f90 @@ -0,0 +1,9 @@ +program lib +use MyMod1 +use MyMod2 ! test inline comment + +implicit none + +call showvalues() + +end program diff --git a/test cases/fortran/8 module names/meson.build b/test cases/fortran/8 module names/meson.build index 632c597889bf..9340c79d7789 100644 --- a/test cases/fortran/8 module names/meson.build +++ b/test cases/fortran/8 module names/meson.build @@ -1,6 +1,15 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright © 2024 Intel Corporation + project('mod_name_case', 'fortran') sources = ['test.f90', 'mod1.f90', 'mod2.f90'] -exe = executable('mod_name_case', sources) -test('mod_name_case', exe) +l = static_library('s1', 'mod1.f90') +l2 = static_library('s2', 'mod2.f90', link_whole : l) +if get_option('unittest') + sh = static_library('library', 'lib.f90', link_with : l2) +else + exe = executable('mod_name_case', 'test.f90', link_with : l2) + test('mod_name_case', exe) +endif diff --git a/test cases/fortran/8 module names/meson_options.txt b/test cases/fortran/8 module names/meson_options.txt new file mode 100644 index 000000000000..b5b7ee9a2394 --- /dev/null +++ b/test cases/fortran/8 module names/meson_options.txt @@ -0,0 +1 @@ +option('unittest', type : 'boolean', value : false) diff --git a/test cases/fortran/8 module names/mod2.f90 b/test cases/fortran/8 module names/mod2.f90 index 2087750debfc..3061c211ffab 100644 --- a/test cases/fortran/8 module names/mod2.f90 +++ b/test cases/fortran/8 module names/mod2.f90 @@ -1,6 +1,14 @@ module mymod2 +use mymod1 implicit none integer, parameter :: myModVal2 = 2 +contains + subroutine showvalues() + print*, "myModVal1 = ", myModVal1 + print*, "myModVal2 = ", myModVal2 + end subroutine showvalues + + end module mymod2 diff --git a/test cases/fortran/8 module names/test.f90 b/test cases/fortran/8 module names/test.f90 index 60ff16e9034c..fcfc23f919f8 100644 --- a/test cases/fortran/8 module names/test.f90 +++ b/test cases/fortran/8 module names/test.f90 @@ -1,9 +1,8 @@ program main -use mymod1 use MyMod2 ! test inline comment implicit none -integer, parameter :: testVar = myModVal1 + myModVal2 +call showvalues() end program diff --git a/unittests/allplatformstests.py b/unittests/allplatformstests.py index ca4b194e6180..74e7c2fce22f 100644 --- a/unittests/allplatformstests.py +++ b/unittests/allplatformstests.py @@ -12,7 +12,7 @@ import pickle import zipfile, tarfile import sys -from unittest import mock, SkipTest, skipIf, skipUnless +from unittest import mock, SkipTest, skipIf, skipUnless, expectedFailure from contextlib import contextmanager from glob import glob from pathlib import (PurePath, Path) @@ -5042,3 +5042,19 @@ def test_rsp_support(self): 'link', 'lld-link', 'mwldarm', 'mwldeppc', 'optlink', 'xilink', } self.assertEqual(cc.linker.get_accepts_rsp(), has_rsp) + + @expectedFailure + @skip_if_not_language('fortran') + def test_fortran_cross_target_module_dep(self) -> None: + if self.backend is not Backend.ninja: + raise SkipTest('Test is only relavent on the ninja backend') + testdir = os.path.join(self.fortran_test_dir, '8 module names') + self.init(testdir, extra_args=['-Dunittest=true']) + + # Find the correct output to compile, regardless of what compiler is being used + comp = self.get_compdb() + entry = first(comp, lambda e: e['file'].endswith('lib.f90')) + assert entry is not None, 'for mypy' + output = entry['output'] + + self.build(output, extra_args=['-j1']) diff --git a/unittests/baseplatformtests.py b/unittests/baseplatformtests.py index 3770321925fa..6e8d2cf28440 100644 --- a/unittests/baseplatformtests.py +++ b/unittests/baseplatformtests.py @@ -79,6 +79,7 @@ def setUpClass(cls) -> None: cls.objc_test_dir = os.path.join(src_root, 'test cases/objc') cls.objcpp_test_dir = os.path.join(src_root, 'test cases/objcpp') cls.darwin_test_dir = os.path.join(src_root, 'test cases/darwin') + cls.fortran_test_dir = os.path.join(src_root, 'test cases/fortran') # Misc stuff if cls.backend is Backend.ninja: From 1a791a3645eee93175b1693e425aedb46fe7fad5 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Thu, 7 Mar 2024 15:48:51 -0800 Subject: [PATCH 03/13] tests: our fortran order deps are wrong if a new module is introduced Because we don't set the appropriate dependencies --- unittests/allplatformstests.py | 42 +++++++++++++++++++++++++++++++++- 1 file changed, 41 insertions(+), 1 deletion(-) diff --git a/unittests/allplatformstests.py b/unittests/allplatformstests.py index 74e7c2fce22f..772642e90221 100644 --- a/unittests/allplatformstests.py +++ b/unittests/allplatformstests.py @@ -32,7 +32,7 @@ is_sunos, windows_proof_rmtree, python_command, version_compare, split_args, quote_arg, relpath, is_linux, git, search_version, do_conf_file, do_conf_str, default_prefix, MesonException, EnvironmentException, - windows_proof_rm + windows_proof_rm, first ) from mesonbuild.options import OptionKey from mesonbuild.programs import ExternalProgram @@ -5058,3 +5058,43 @@ def test_fortran_cross_target_module_dep(self) -> None: output = entry['output'] self.build(output, extra_args=['-j1']) + + @expectedFailure + @skip_if_not_language('fortran') + def test_fortran_new_module_in_dep(self) -> None: + if self.backend is not Backend.ninja: + raise SkipTest('Test is only relavent on the ninja backend') + testdir = self.copy_srcdir(os.path.join(self.fortran_test_dir, '8 module names')) + self.init(testdir, extra_args=['-Dunittest=true']) + self.build() + + with open(os.path.join(testdir, 'mod1.f90'), 'a', encoding='utf-8') as f: + f.write(textwrap.dedent("""\ + module MyMod3 + implicit none + + integer, parameter :: myModVal3 =1 + + end module MyMod3 + """)) + + with open(os.path.join(testdir, 'test.f90'), 'w', encoding='utf-8') as f: + f.write(textwrap.dedent("""\ + program main + use MyMod2 + use MyMod3 + implicit none + + call showvalues() + print*, "MyModValu3 = ", myModVal3 + + end program + """)) + + # Find the correct output to compile, regardless of what compiler is being used + comp = self.get_compdb() + entry = first(comp, lambda e: e['file'].endswith('lib.f90')) + assert entry is not None, 'for mypy' + output = entry['output'] + + self.build(output, extra_args=['-j1']) From 810ae00150f02ad235abf98457fe52b0193356bd Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Tue, 2 Jan 2024 16:40:08 -0800 Subject: [PATCH 04/13] backend/ninja: fortran must fully depend on all linked targets Because we use this dependency to ensure that any binary module files are generated, we must have a full dependency. Otherwise, if a new module is added to a dependent target, and used in our target, we race the generation of the binary module definition. --- mesonbuild/backend/ninjabackend.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py index 6eb1f1d0191d..673b37788dea 100644 --- a/mesonbuild/backend/ninjabackend.py +++ b/mesonbuild/backend/ninjabackend.py @@ -3069,8 +3069,8 @@ def generate_single_compile(self, target: build.BuildTarget, src, d = os.path.join(self.get_target_private_dir(target), d) element.add_orderdep(d) element.add_dep(pch_dep) - for i in self.get_fortran_orderdeps(target, compiler): - element.add_orderdep(i) + for i in self.get_fortran_module_deps(target, compiler): + element.add_dep(i) if dep_file: element.add_item('DEPFILE', dep_file) if compiler.get_language() == 'cuda': @@ -3137,10 +3137,12 @@ def has_dir_part(self, fname: FileOrString) -> bool: # Fortran is a bit weird (again). When you link against a library, just compiling a source file # requires the mod files that are output when single files are built. To do this right we would need to # scan all inputs and write out explicit deps for each file. That is too slow and too much effort so - # instead just have an ordered dependency on the library. This ensures all required mod files are created. + # instead just have a full dependency on the library. This ensures all required mod files are created. # The real deps are then detected via dep file generation from the compiler. This breaks on compilers that - # produce incorrect dep files but such is life. - def get_fortran_orderdeps(self, target, compiler): + # produce incorrect dep files but such is life. A full dependency is + # required to ensure that if a new module is added to an existing file that + # we correctly rebuild. + def get_fortran_module_deps(self, target, compiler) -> T.List[str]: if compiler.language != 'fortran': return [] return [ From aa5f8bf46158cd4c4c743d5b8336cf0accd1919a Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Thu, 7 Mar 2024 14:05:48 -0800 Subject: [PATCH 05/13] build: annotate all `get_all_link_deps` the same And fix some internal annotations --- mesonbuild/build.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/mesonbuild/build.py b/mesonbuild/build.py index 460ed549be92..e552c6144fcd 100644 --- a/mesonbuild/build.py +++ b/mesonbuild/build.py @@ -1085,7 +1085,7 @@ def get_all_link_deps(self) -> ImmutableListProtocol[BuildTargetTypes]: @lru_cache(maxsize=None) def get_transitive_link_deps(self) -> ImmutableListProtocol[BuildTargetTypes]: - result: T.List[Target] = [] + result: T.List[BuildTargetTypes] = [] for i in self.link_targets: result += i.get_all_link_deps() return result @@ -2424,7 +2424,7 @@ def get_debug_filename(self) -> T.Optional[str]: """ return self.debug_filename - def get_all_link_deps(self): + def get_all_link_deps(self) -> ImmutableListProtocol[BuildTargetTypes]: return [self] + self.get_transitive_link_deps() def get_aliases(self) -> T.List[T.Tuple[str, str, str]]: @@ -2758,7 +2758,7 @@ def get_link_deps_mapping(self, prefix: str) -> T.Mapping[str, str]: def get_link_dep_subdirs(self) -> T.AbstractSet[str]: return OrderedSet() - def get_all_link_deps(self): + def get_all_link_deps(self) -> ImmutableListProtocol[BuildTargetTypes]: return [] def is_internal(self) -> bool: @@ -3016,7 +3016,7 @@ def get_filename(self) -> str: def get_id(self) -> str: return self.target.get_id() - def get_all_link_deps(self): + def get_all_link_deps(self) -> ImmutableListProtocol[BuildTargetTypes]: return self.target.get_all_link_deps() def get_link_deps_mapping(self, prefix: str) -> T.Mapping[str, str]: From cd7acdcae2a0b3caf502f261e41c70b88fc7cd5e Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Thu, 7 Mar 2024 14:07:45 -0800 Subject: [PATCH 06/13] build: include each target in get_transitive_link_deps Otherwise only shared libraries get installed, which isn't correct. --- mesonbuild/build.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/mesonbuild/build.py b/mesonbuild/build.py index e552c6144fcd..5e49a3a9dff6 100644 --- a/mesonbuild/build.py +++ b/mesonbuild/build.py @@ -1087,7 +1087,8 @@ def get_all_link_deps(self) -> ImmutableListProtocol[BuildTargetTypes]: def get_transitive_link_deps(self) -> ImmutableListProtocol[BuildTargetTypes]: result: T.List[BuildTargetTypes] = [] for i in self.link_targets: - result += i.get_all_link_deps() + result.append(i) + result.extend(i.get_all_link_deps()) return result def get_link_deps_mapping(self, prefix: str) -> T.Mapping[str, str]: From 3db12092b80365d3a32a5c620e0dd6b3bf905d4b Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Thu, 7 Mar 2024 14:08:31 -0800 Subject: [PATCH 07/13] build: add link_whole_targets to get_transitive_link_deps Otherwise we again miss out on half of the targets we need --- mesonbuild/build.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mesonbuild/build.py b/mesonbuild/build.py index 5e49a3a9dff6..268a8cb01284 100644 --- a/mesonbuild/build.py +++ b/mesonbuild/build.py @@ -1086,7 +1086,7 @@ def get_all_link_deps(self) -> ImmutableListProtocol[BuildTargetTypes]: @lru_cache(maxsize=None) def get_transitive_link_deps(self) -> ImmutableListProtocol[BuildTargetTypes]: result: T.List[BuildTargetTypes] = [] - for i in self.link_targets: + for i in itertools.chain(self.link_targets, self.link_whole_targets): result.append(i) result.extend(i.get_all_link_deps()) return result From de2b237a6573abdf44e12250c43ef1c35674f396 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Thu, 7 Mar 2024 11:54:22 -0800 Subject: [PATCH 08/13] backend/ninja: Fortran targets need to -I transitive deps private dirs Otherwise they won't be able to find their module outputs. --- mesonbuild/backend/ninjabackend.py | 6 ++++++ unittests/allplatformstests.py | 1 - 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py index 673b37788dea..b21aa9993b18 100644 --- a/mesonbuild/backend/ninjabackend.py +++ b/mesonbuild/backend/ninjabackend.py @@ -1019,6 +1019,12 @@ def generate_target(self, target) -> None: fortran_inc_args = mesonlib.listify([target.compilers['fortran'].get_include_args( self.get_target_private_dir(t), is_system=False) for t in obj_targets]) + # add the private directories of all transitive dependencies, which + # are needed for their mod files + for t in target.get_all_link_deps(): + fortran_inc_args.extend(target.compilers['fortran'].get_include_args( + self.get_target_private_dir(t), False)) + # Generate compilation targets for sources generated by transpilers. # # Do not try to unity-build the generated source files, as these diff --git a/unittests/allplatformstests.py b/unittests/allplatformstests.py index 772642e90221..486ec35606a3 100644 --- a/unittests/allplatformstests.py +++ b/unittests/allplatformstests.py @@ -5043,7 +5043,6 @@ def test_rsp_support(self): } self.assertEqual(cc.linker.get_accepts_rsp(), has_rsp) - @expectedFailure @skip_if_not_language('fortran') def test_fortran_cross_target_module_dep(self) -> None: if self.backend is not Backend.ninja: From 346b2527a989b874f111ab4191882553e28d2cca Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Mon, 11 Mar 2024 12:43:49 -0700 Subject: [PATCH 09/13] backend/ninja: depfile generation needs a full dependency on all scanned sources It is not sufficient to have an order dependency on generated sources. If any of those sources change we need to rescan, otherwise we may not notice changes to modules. --- mesonbuild/backend/ninjabackend.py | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py index b21aa9993b18..b41d33292539 100644 --- a/mesonbuild/backend/ninjabackend.py +++ b/mesonbuild/backend/ninjabackend.py @@ -1083,7 +1083,7 @@ def generate_target(self, target) -> None: else: final_obj_list = obj_list elem = self.generate_link(target, outname, final_obj_list, linker, pch_objects, stdlib_args=stdlib_args) - self.generate_dependency_scan_target(target, compiled_sources, source2object, generated_source_files, fortran_order_deps) + self.generate_dependency_scan_target(target, compiled_sources, source2object, fortran_order_deps) self.add_build(elem) #In AIX, we archive shared libraries. If the instance is a shared library, we add a command to archive the shared library #object and create the build element. @@ -1117,7 +1117,6 @@ def should_use_dyndeps_for_target(self, target: 'build.BuildTarget') -> bool: def generate_dependency_scan_target(self, target: build.BuildTarget, compiled_sources: T.List[str], source2object: T.Dict[str, str], - generated_source_files: T.List[mesonlib.File], object_deps: T.List[FileOrString]) -> None: if not self.should_use_dyndeps_for_target(target): return @@ -1143,19 +1142,22 @@ def generate_dependency_scan_target(self, target: build.BuildTarget, pickle.dump(scaninfo, p) elem = NinjaBuildElement(self.all_outputs, depscan_file, rule_name, pickle_file) - # Add any generated outputs to the order deps of the scan target, so - # that those sources are present - for g in generated_source_files: - elem.orderdeps.add(g.relative_name()) + # A full dependency is required on all scanned sources, if any of them + # are updated we need to rescan, as they may have changed the modules + # they use or export. + for s in scan_sources: + elem.deps.add(s[0]) elem.orderdeps.update(object_deps) self.add_build(elem) - def select_sources_to_scan(self, compiled_sources: T.List[str] + def select_sources_to_scan(self, compiled_sources: T.List[str], ) -> T.Iterable[T.Tuple[str, Literal['cpp', 'fortran']]]: # in practice pick up C++ and Fortran files. If some other language # requires scanning (possibly Java to deal with inner class files) # then add them here. for source in compiled_sources: + if isinstance(source, mesonlib.File): + source = source.rel_to_builddir(self.build_to_src) ext = os.path.splitext(source)[1][1:] if ext.lower() in compilers.lang_suffixes['cpp'] or ext == 'C': yield source, 'cpp' From 8d9bb1ece3220435e74ee159dcaab14391b514e8 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Mon, 11 Mar 2024 12:20:00 -0700 Subject: [PATCH 10/13] backend/ninja: fix cross module dependencies This requires that every Fortran target that uses modules have a full dependency on the scan target of it's dependencies. This means that for a three step target `A -> B -> C`, we cannot start compiling any of B until all of A is linked, and cannot start compiling any of C until all of A and B is linked. This fixes various kinds of races, but it serializes the build and makes it slow. This is the best we can do though, since we don't have any sort of portable format for telling C what is in A and B, so C can't know what sources to wait on for it's modules to be fulfilled. --- mesonbuild/backend/ninjabackend.py | 7 +++++++ unittests/allplatformstests.py | 1 - 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py index b41d33292539..29ea4a971621 100644 --- a/mesonbuild/backend/ninjabackend.py +++ b/mesonbuild/backend/ninjabackend.py @@ -1147,6 +1147,13 @@ def generate_dependency_scan_target(self, target: build.BuildTarget, # they use or export. for s in scan_sources: elem.deps.add(s[0]) + # We need a full dependency on the output depfiles of other targets. If + # they change we need to completely + for t in target.get_transitive_link_deps(): + if self.should_use_dyndeps_for_target(t): + elem.deps.add(os.path.join(self.get_target_dir(t), t.get_filename())) + elem.deps.update({os.path.join(self.get_target_dir(t), t.get_filename()) + for t in self.flatten_object_list(target)[1]}) elem.orderdeps.update(object_deps) self.add_build(elem) diff --git a/unittests/allplatformstests.py b/unittests/allplatformstests.py index 486ec35606a3..107745113382 100644 --- a/unittests/allplatformstests.py +++ b/unittests/allplatformstests.py @@ -5058,7 +5058,6 @@ def test_fortran_cross_target_module_dep(self) -> None: self.build(output, extra_args=['-j1']) - @expectedFailure @skip_if_not_language('fortran') def test_fortran_new_module_in_dep(self) -> None: if self.backend is not Backend.ninja: From 86bfada4d8acfae7d34e0cca4f9a8552f515feca Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Mon, 11 Mar 2024 12:35:25 -0700 Subject: [PATCH 11/13] backend/ninja: use a two step process for dependency scanning This splits the scanner into two discrete steps, one that scans the source files, and one that that reads in the dependency information and produces a dyndep. The scanner uses the JSON format from https://www.open-std.org/jtc1/sc22/wg21/docs/papers/2022/p1689r5.html, which is the same format the MSVC and Clang use for C++ modules scanning. This will allow us to more easily move to using MSVC and clang-scan-deps when possible. As an added bonus, this correctly tracks dependencies across TU and Target boundaries, unlike the previous implementation, which assumed that if it couldn't find a provider that everything was good, but could run into issues. Because of that limitation Fortran code had to fully depend on all of it's dependencies, transitive or not. Now, when using the dep scanner, we can remove that restriction, allowing more parallelism. --- mesonbuild/backend/ninjabackend.py | 59 ++++++++---- mesonbuild/scripts/depaccumulate.py | 129 +++++++++++++++++++++++++++ mesonbuild/scripts/depscan.py | 133 ++++++++++++++++++---------- 3 files changed, 254 insertions(+), 67 deletions(-) create mode 100644 mesonbuild/scripts/depaccumulate.py diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py index 29ea4a971621..156523dddbff 100644 --- a/mesonbuild/backend/ninjabackend.py +++ b/mesonbuild/backend/ninjabackend.py @@ -1013,7 +1013,12 @@ def generate_target(self, target) -> None: obj_targets = [t for t in od if t.uses_fortran()] obj_list.extend(o) - fortran_order_deps = [File(True, *os.path.split(self.get_target_filename(t))) for t in obj_targets] + # We don't need this order dep if we're using dyndeps, as the + # depscanner will handle this for us, which produces a better dependency + # graph + fortran_order_deps: T.List[File] = [] + if not self.use_dyndeps_for_fortran(): + fortran_order_deps = [File(True, *os.path.split(self.get_target_filename(t))) for t in obj_targets] fortran_inc_args: T.List[str] = [] if target.uses_fortran(): fortran_inc_args = mesonlib.listify([target.compilers['fortran'].get_include_args( @@ -1121,7 +1126,7 @@ def generate_dependency_scan_target(self, target: build.BuildTarget, if not self.should_use_dyndeps_for_target(target): return self._uses_dyndeps = True - depscan_file = self.get_dep_scan_file_for(target) + json_file, depscan_file = self.get_dep_scan_file_for(target) pickle_base = target.name + '.dat' pickle_file = os.path.join(self.get_target_private_dir(target), pickle_base).replace('\\', '/') pickle_abs = os.path.join(self.get_target_private_dir_abs(target), pickle_base).replace('\\', '/') @@ -1141,20 +1146,25 @@ def generate_dependency_scan_target(self, target: build.BuildTarget, with open(pickle_abs, 'wb') as p: pickle.dump(scaninfo, p) - elem = NinjaBuildElement(self.all_outputs, depscan_file, rule_name, pickle_file) + elem = NinjaBuildElement(self.all_outputs, json_file, rule_name, pickle_file) # A full dependency is required on all scanned sources, if any of them # are updated we need to rescan, as they may have changed the modules # they use or export. for s in scan_sources: elem.deps.add(s[0]) - # We need a full dependency on the output depfiles of other targets. If - # they change we need to completely + elem.orderdeps.update(object_deps) + elem.add_item('name', target.name) + self.add_build(elem) + + infiles: T.Set[str] = set() for t in target.get_transitive_link_deps(): if self.should_use_dyndeps_for_target(t): - elem.deps.add(os.path.join(self.get_target_dir(t), t.get_filename())) - elem.deps.update({os.path.join(self.get_target_dir(t), t.get_filename()) - for t in self.flatten_object_list(target)[1]}) - elem.orderdeps.update(object_deps) + infiles.add(self.get_dep_scan_file_for(t)[0]) + _, od = self.flatten_object_list(target) + infiles.update({self.get_dep_scan_file_for(t)[0] for t in od if t.uses_fortran()}) + + elem = NinjaBuildElement(self.all_outputs, depscan_file, 'depaccumulate', [json_file] + sorted(infiles)) + elem.add_item('name', target.name) self.add_build(elem) def select_sources_to_scan(self, compiled_sources: T.List[str], @@ -2569,10 +2579,19 @@ def generate_scanner_rules(self) -> None: if rulename in self.ruledict: # Scanning command is the same for native and cross compilation. return + command = self.environment.get_build_command() + \ ['--internal', 'depscan'] args = ['$picklefile', '$out', '$in'] - description = 'Module scanner.' + description = 'Scanning target $name for modules' + rule = NinjaRule(rulename, command, args, description) + self.add_rule(rule) + + rulename = 'depaccumulate' + command = self.environment.get_build_command() + \ + ['--internal', 'depaccumulate'] + args = ['$out', '$in'] + description = 'Generating dynamic dependency information for target $name' rule = NinjaRule(rulename, command, args, description) self.add_rule(rule) @@ -3084,8 +3103,9 @@ def generate_single_compile(self, target: build.BuildTarget, src, d = os.path.join(self.get_target_private_dir(target), d) element.add_orderdep(d) element.add_dep(pch_dep) - for i in self.get_fortran_module_deps(target, compiler): - element.add_dep(i) + if not self.use_dyndeps_for_fortran(): + for i in self.get_fortran_module_deps(target, compiler): + element.add_dep(i) if dep_file: element.add_item('DEPFILE', dep_file) if compiler.get_language() == 'cuda': @@ -3128,12 +3148,13 @@ def add_dependency_scanner_entries_to_element(self, target: build.BuildTarget, c extension = extension.lower() if not (extension in compilers.lang_suffixes['fortran'] or extension in compilers.lang_suffixes['cpp']): return - dep_scan_file = self.get_dep_scan_file_for(target) + dep_scan_file = self.get_dep_scan_file_for(target)[1] element.add_item('dyndep', dep_scan_file) element.add_orderdep(dep_scan_file) - def get_dep_scan_file_for(self, target: build.BuildTarget) -> str: - return os.path.join(self.get_target_private_dir(target), 'depscan.dd') + def get_dep_scan_file_for(self, target: build.BuildTarget) -> T.Tuple[str, str]: + priv = self.get_target_private_dir(target) + return os.path.join(priv, 'depscan.json'), os.path.join(priv, 'depscan.dd') def add_header_deps(self, target, ninja_element, header_deps): for d in header_deps: @@ -3156,9 +3177,11 @@ def has_dir_part(self, fname: FileOrString) -> bool: # The real deps are then detected via dep file generation from the compiler. This breaks on compilers that # produce incorrect dep files but such is life. A full dependency is # required to ensure that if a new module is added to an existing file that - # we correctly rebuild. - def get_fortran_module_deps(self, target, compiler) -> T.List[str]: - if compiler.language != 'fortran': + # we correctly rebuild + def get_fortran_module_deps(self, target: build.BuildTarget, compiler: Compiler) -> T.List[str]: + # If we have dyndeps then we don't need this, since the depscanner will + # do all of things described above. + if compiler.language != 'fortran' or self.use_dyndeps_for_fortran(): return [] return [ os.path.join(self.get_target_dir(lt), lt.get_filename()) diff --git a/mesonbuild/scripts/depaccumulate.py b/mesonbuild/scripts/depaccumulate.py new file mode 100644 index 000000000000..7576390d4380 --- /dev/null +++ b/mesonbuild/scripts/depaccumulate.py @@ -0,0 +1,129 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright © 2021-2024 Intel Corporation + +"""Accumulator for p1689r5 module dependencies. + +See: https://www.open-std.org/jtc1/sc22/wg21/docs/papers/2022/p1689r5.html +""" + +from __future__ import annotations +import json +import re +import textwrap +import typing as T + +if T.TYPE_CHECKING: + from .depscan import Description, Rule + +# The quoting logic has been copied from the ninjabackend to avoid having to +# import half of Meson just to quote outputs, which is a performance problem +_QUOTE_PAT = re.compile(r'[$ :\n]') + + +def quote(text: str) -> str: + # Fast path for when no quoting is necessary + if not _QUOTE_PAT.search(text): + return text + if '\n' in text: + errmsg = textwrap.dedent(f'''\ + Ninja does not support newlines in rules. The content was: + + {text} + + Please report this error with a test case to the Meson bug tracker.''') + raise RuntimeError(errmsg) + return _QUOTE_PAT.sub(r'$\g<0>', text) + + +_PROVIDER_CACHE: T.Dict[str, str] = {} + + +def get_provider(rules: T.List[Rule], name: str) -> T.Optional[str]: + """Get the object that a module from another Target provides + + We must rely on the object file here instead of the module itself, because + the object rule is part of the generated build.ninja, while the module is + only declared inside a dyndep. This creates for the dyndep generator to + depend on previous dyndeps as order deps. Since the module + interface file will be generated when the object is generated we can rely on + that in proxy and simplify generation. + + :param rules: The list of rules to check + :param name: The logical-name to look for + :raises RuntimeError: If no provider can be found + :return: The object file of the rule providing the module + """ + # Cache the result for performance reasons + if name in _PROVIDER_CACHE: + return _PROVIDER_CACHE[name] + + for r in rules: + for p in r.get('provides', []): + if p['logical-name'] == name: + obj = r['primary-output'] + _PROVIDER_CACHE[name] = obj + return obj + return None + + +def process_rules(rules: T.List[Rule], + extra_rules: T.List[Rule], + ) -> T.Iterable[T.Tuple[str, T.Optional[T.List[str]], T.List[str]]]: + """Process the rules for this Target + + :param rules: the rules for this target + :param extra_rules: the rules for all of the targets this one links with, to use their provides + :yield: A tuple of the output, the exported modules, and the consumed modules + """ + for rule in rules: + prov: T.Optional[T.List[str]] = None + req: T.List[str] = [] + if 'provides' in rule: + prov = [p['compiled-module-path'] for p in rule['provides']] + if 'requires' in rule: + for p in rule['requires']: + modfile = p.get('compiled-module-path') + if modfile is not None: + req.append(modfile) + else: + # We can't error if this is not found because of compiler + # provided modules + found = get_provider(extra_rules, p['logical-name']) + if found: + req.append(found) + yield rule['primary-output'], prov, req + + +def formatter(files: T.Optional[T.List[str]]) -> str: + if files: + fmt = ' '.join(quote(f) for f in files) + return f'| {fmt}' + return '' + + +def gen(outfile: str, desc: Description, extra_rules: T.List[Rule]) -> int: + with open(outfile, 'w', encoding='utf-8') as f: + f.write('ninja_dyndep_version = 1\n\n') + + for obj, provides, requires in process_rules(desc['rules'], extra_rules): + ins = formatter(requires) + out = formatter(provides) + f.write(f'build {quote(obj)} {out}: dyndep {ins}\n\n') + + return 0 + + +def run(args: T.List[str]) -> int: + assert len(args) >= 2, 'got wrong number of arguments!' + outfile, jsonfile, *jsondeps = args + with open(jsonfile, 'r', encoding='utf-8') as f: + desc: Description = json.load(f) + + # All rules, necessary for fulfilling across TU and target boundaries + rules = desc['rules'].copy() + for dep in jsondeps: + with open(dep, encoding='utf-8') as f: + d: Description = json.load(f) + rules.extend(d['rules']) + + return gen(outfile, desc, rules) diff --git a/mesonbuild/scripts/depscan.py b/mesonbuild/scripts/depscan.py index 44e805447713..6bd5cde9aac0 100644 --- a/mesonbuild/scripts/depscan.py +++ b/mesonbuild/scripts/depscan.py @@ -1,22 +1,60 @@ # SPDX-License-Identifier: Apache-2.0 # Copyright 2020 The Meson development team -# Copyright © 2023 Intel Corporation +# Copyright © 2023-2024 Intel Corporation from __future__ import annotations import collections +import json import os import pathlib import pickle import re import typing as T -from ..backend.ninjabackend import ninja_quote - if T.TYPE_CHECKING: - from typing_extensions import Literal + from typing_extensions import Literal, TypedDict, NotRequired from ..backend.ninjabackend import TargetDependencyScannerInfo + Require = TypedDict( + 'Require', + { + 'logical-name': str, + 'compiled-module-path': NotRequired[str], + 'source-path': NotRequired[str], + 'unique-on-source-path': NotRequired[bool], + 'lookup-method': NotRequired[Literal['by-name', 'include-angle', 'include-quote']] + }, + ) + + Provide = TypedDict( + 'Provide', + { + 'logical-name': str, + 'compiled-module-path': NotRequired[str], + 'source-path': NotRequired[str], + 'unique-on-source-path': NotRequired[bool], + 'is-interface': NotRequired[bool], + }, + ) + + Rule = TypedDict( + 'Rule', + { + 'primary-output': NotRequired[str], + 'outputs': NotRequired[T.List[str]], + 'provides': NotRequired[T.List[Provide]], + 'requires': NotRequired[T.List[Require]], + } + ) + + class Description(TypedDict): + + version: int + revision: int + rules: T.List[Rule] + + CPP_IMPORT_RE = re.compile(r'\w*import ([a-zA-Z0-9]+);') CPP_EXPORT_RE = re.compile(r'\w*export module ([a-zA-Z0-9]+);') @@ -37,7 +75,7 @@ def __init__(self, pickle_file: str, outfile: str): self.sources = self.target_data.sources self.provided_by: T.Dict[str, str] = {} self.exports: T.Dict[str, str] = {} - self.needs: collections.defaultdict[str, T.List[str]] = collections.defaultdict(list) + self.imports: collections.defaultdict[str, T.List[str]] = collections.defaultdict(list) self.sources_with_exports: T.List[str] = [] def scan_file(self, fname: str, lang: Literal['cpp', 'fortran']) -> None: @@ -58,7 +96,7 @@ def scan_fortran_file(self, fname: str) -> None: # In Fortran you have an using declaration also for the module # you define in the same file. Prevent circular dependencies. if needed not in modules_in_this_file: - self.needs[fname].append(needed) + self.imports[fname].append(needed) if export_match: exported_module = export_match.group(1).lower() assert exported_module not in modules_in_this_file @@ -89,7 +127,7 @@ def scan_fortran_file(self, fname: str) -> None: # submodule (a1:a2) a3 <- requires a1@a2.smod # # a3 does not depend on the a1 parent module directly, only transitively. - self.needs[fname].append(parent_module_name_full) + self.imports[fname].append(parent_module_name_full) def scan_cpp_file(self, fname: str) -> None: fpath = pathlib.Path(fname) @@ -98,7 +136,7 @@ def scan_cpp_file(self, fname: str) -> None: export_match = CPP_EXPORT_RE.match(line) if import_match: needed = import_match.group(1) - self.needs[fname].append(needed) + self.imports[fname].append(needed) if export_match: exported_module = export_match.group(1) if exported_module in self.provided_by: @@ -123,47 +161,44 @@ def module_name_for(self, src: str, lang: Literal['cpp', 'fortran']) -> str: def scan(self) -> int: for s, lang in self.sources: self.scan_file(s, lang) - with open(self.outfile, 'w', encoding='utf-8') as ofile: - ofile.write('ninja_dyndep_version = 1\n') - for src, lang in self.sources: - objfilename = self.target_data.source2object[src] - mods_and_submods_needed = [] - module_files_generated = [] - module_files_needed = [] - if src in self.sources_with_exports: - module_files_generated.append(self.module_name_for(src, lang)) - if src in self.needs: - for modname in self.needs[src]: - if modname not in self.provided_by: - # Nothing provides this module, we assume that it - # comes from a dependency library somewhere and is - # already built by the time this compilation starts. - pass - else: - mods_and_submods_needed.append(modname) - - for modname in mods_and_submods_needed: - provider_src = self.provided_by[modname] - provider_modfile = self.module_name_for(provider_src, lang) - # Prune self-dependencies - if provider_src != src: - module_files_needed.append(provider_modfile) - - quoted_objfilename = ninja_quote(objfilename, True) - quoted_module_files_generated = [ninja_quote(x, True) for x in module_files_generated] - quoted_module_files_needed = [ninja_quote(x, True) for x in module_files_needed] - if quoted_module_files_generated: - mod_gen = '| ' + ' '.join(quoted_module_files_generated) - else: - mod_gen = '' - if quoted_module_files_needed: - mod_dep = '| ' + ' '.join(quoted_module_files_needed) - else: - mod_dep = '' - build_line = 'build {} {}: dyndep {}'.format(quoted_objfilename, - mod_gen, - mod_dep) - ofile.write(build_line + '\n') + description: Description = { + 'version': 1, + 'revision': 0, + 'rules': [], + } + for src, lang in self.sources: + rule: Rule = { + 'primary-output': self.target_data.source2object[src], + 'requires': [], + 'provides': [], + } + if src in self.sources_with_exports: + rule['outputs'] = [self.module_name_for(src, lang)] + if src in self.imports: + for modname in self.imports[src]: + provider_src = self.provided_by.get(modname) + if provider_src == src: + continue + rule['requires'].append({ + 'logical-name': modname, + }) + if provider_src: + rule['requires'][-1].update({ + 'source-path': provider_src, + 'compiled-module-path': self.module_name_for(provider_src, lang), + }) + if src in self.exports: + modname = self.exports[src] + rule['provides'].append({ + 'logical-name': modname, + 'source-path': src, + 'compiled-module-path': self.module_name_for(src, lang), + }) + description['rules'].append(rule) + + with open(self.outfile, 'w', encoding='utf-8') as f: + json.dump(description, f) + return 0 def run(args: T.List[str]) -> int: From 680c117fe2fa106cd0c75bf06814e70ced1e8eb0 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Mon, 1 Apr 2024 09:54:18 -0700 Subject: [PATCH 12/13] tests/fortran: also test using a generator() We already test for custom_targets and configure_file, but we should test a generator too --- test cases/fortran/7 generated/gen.py | 45 ++++++++++++++++++++++ test cases/fortran/7 generated/meson.build | 21 +++++++++- 2 files changed, 64 insertions(+), 2 deletions(-) create mode 100755 test cases/fortran/7 generated/gen.py diff --git a/test cases/fortran/7 generated/gen.py b/test cases/fortran/7 generated/gen.py new file mode 100755 index 000000000000..86d9bf712261 --- /dev/null +++ b/test cases/fortran/7 generated/gen.py @@ -0,0 +1,45 @@ +#!/usr/bin/env python3 +# SPDX-License-Identifier: Apache-2.0 +# Copyright © 2024 Intel Corporation + +from __future__ import annotations +import argparse +import typing as T + +if T.TYPE_CHECKING: + class Arguments(T.Protocol): + + input: str + output: str + replacements: T.List[T.Tuple[str, str]] + + +def process(txt: str, replacements: T.List[T.Tuple[str, str]]) -> str: + for k, v in replacements: + txt = txt.replace(k, v) + return txt + + +def split_arg(arg: str) -> T.Tuple[str, str]: + args = arg.split('=', maxsplit=1) + assert len(args) == 2, 'Did not get the right number of args?' + return T.cast('T.Tuple[str, str]', tuple(args)) + + +def main() -> None: + parser = argparse.ArgumentParser() + parser.add_argument('input') + parser.add_argument('output') + parser.add_argument('--replace', action='append', required=True, dest='replacements', type=split_arg) + args = T.cast('Arguments', parser.parse_args()) + + with open(args.input, 'r', encoding='utf-8') as f: + content = f.read() + + content = process(content, args.replacements) + + with open(args.output, 'w', encoding='utf-8') as f: + f.write(content) + +if __name__ == "__main__": + main() diff --git a/test cases/fortran/7 generated/meson.build b/test cases/fortran/7 generated/meson.build index f021309d1c75..8f3b3d45d4d2 100644 --- a/test cases/fortran/7 generated/meson.build +++ b/test cases/fortran/7 generated/meson.build @@ -35,5 +35,22 @@ foreach template_basename : templates_basenames endforeach sources = ['prog.f90'] + generated_sources -exe = executable('generated', sources, link_with: three) -test('generated', exe) +exe = executable('configure_file', sources, link_with: three) +test('configure_file', exe) + +gen = generator( + find_program('gen.py'), + arguments : [ + '@INPUT@', '@OUTPUT@', + '--replace', '@ONE@=1', + '--replace', '@TWO@=2', + ], + output : ['@BASENAME@.f90'], +) + +exe = executable( + 'generator', + 'prog.f90', gen.process('mod1.fpp', 'mod2.fpp'), + link_with: three, +) +test('generator', exe) From 0ce68bee5703c44722e6d9161644235f1f87d759 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Mon, 1 Apr 2024 09:55:48 -0700 Subject: [PATCH 13/13] tests/fortran: use fs.copyfile Since the comment saying we need a generic way to do this is a little outdated. --- test cases/fortran/7 generated/meson.build | 15 +-------------- 1 file changed, 1 insertion(+), 14 deletions(-) diff --git a/test cases/fortran/7 generated/meson.build b/test cases/fortran/7 generated/meson.build index 8f3b3d45d4d2..257ea1e2207f 100644 --- a/test cases/fortran/7 generated/meson.build +++ b/test cases/fortran/7 generated/meson.build @@ -8,20 +8,7 @@ conf_data = configuration_data() conf_data.set('ONE', 1) conf_data.set('TWO', 2) -mod3_f = custom_target( - 'mod3.f', - input : 'mod3.f90', - output : 'mod3.f90', - # We need a platform agnostic way to do a copy a file, using a custom_target - # and we need to use the @OUTDIR@, not @OUTPUT@ in order to exercise - # https://github.com/mesonbuild/meson/issues/9258 - command : [ - find_program('python', 'python3'), '-c', - 'import sys, shutil; shutil.copy(sys.argv[1], sys.argv[2])', - '@INPUT@', '@OUTDIR@', - ], -) - +mod3_f = import('fs').copyfile('mod3.f90', 'mod3.f90') three = library('mod3', mod3_f) templates_basenames = ['mod2', 'mod1']