Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Use a two step Fortran and C++ dependency scanner #12539

Open
wants to merge 13 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
80 changes: 60 additions & 20 deletions mesonbuild/backend/ninjabackend.py
Original file line number Diff line number Diff line change
Expand Up @@ -1013,12 +1013,23 @@ def generate_target(self, target) -> None:
obj_targets = [t for t in od if t.uses_fortran()]
obj_list.extend(o)

fortran_order_deps = [File(True, *os.path.split(self.get_target_filename(t))) for t in obj_targets]
# We don't need this order dep if we're using dyndeps, as the
# depscanner will handle this for us, which produces a better dependency
# graph
fortran_order_deps: T.List[File] = []
if not self.use_dyndeps_for_fortran():
fortran_order_deps = [File(True, *os.path.split(self.get_target_filename(t))) for t in obj_targets]
fortran_inc_args: T.List[str] = []
if target.uses_fortran():
fortran_inc_args = mesonlib.listify([target.compilers['fortran'].get_include_args(
self.get_target_private_dir(t), is_system=False) for t in obj_targets])

# add the private directories of all transitive dependencies, which
# are needed for their mod files
for t in target.get_all_link_deps():
fortran_inc_args.extend(target.compilers['fortran'].get_include_args(
self.get_target_private_dir(t), False))

# Generate compilation targets for sources generated by transpilers.
#
# Do not try to unity-build the generated source files, as these
Expand Down Expand Up @@ -1077,7 +1088,7 @@ def generate_target(self, target) -> None:
else:
final_obj_list = obj_list
elem = self.generate_link(target, outname, final_obj_list, linker, pch_objects, stdlib_args=stdlib_args)
self.generate_dependency_scan_target(target, compiled_sources, source2object, generated_source_files, fortran_order_deps)
self.generate_dependency_scan_target(target, compiled_sources, source2object, fortran_order_deps)
self.add_build(elem)
#In AIX, we archive shared libraries. If the instance is a shared library, we add a command to archive the shared library
#object and create the build element.
Expand Down Expand Up @@ -1111,12 +1122,11 @@ def should_use_dyndeps_for_target(self, target: 'build.BuildTarget') -> bool:
def generate_dependency_scan_target(self, target: build.BuildTarget,
compiled_sources: T.List[str],
source2object: T.Dict[str, str],
generated_source_files: T.List[mesonlib.File],
object_deps: T.List[FileOrString]) -> None:
if not self.should_use_dyndeps_for_target(target):
return
self._uses_dyndeps = True
depscan_file = self.get_dep_scan_file_for(target)
json_file, depscan_file = self.get_dep_scan_file_for(target)
pickle_base = target.name + '.dat'
pickle_file = os.path.join(self.get_target_private_dir(target), pickle_base).replace('\\', '/')
pickle_abs = os.path.join(self.get_target_private_dir_abs(target), pickle_base).replace('\\', '/')
Expand All @@ -1136,20 +1146,35 @@ def generate_dependency_scan_target(self, target: build.BuildTarget,
with open(pickle_abs, 'wb') as p:
pickle.dump(scaninfo, p)

elem = NinjaBuildElement(self.all_outputs, depscan_file, rule_name, pickle_file)
# Add any generated outputs to the order deps of the scan target, so
# that those sources are present
for g in generated_source_files:
elem.orderdeps.add(g.relative_name())
elem = NinjaBuildElement(self.all_outputs, json_file, rule_name, pickle_file)
# A full dependency is required on all scanned sources, if any of them
# are updated we need to rescan, as they may have changed the modules
# they use or export.
for s in scan_sources:
elem.deps.add(s[0])
elem.orderdeps.update(object_deps)
elem.add_item('name', target.name)
self.add_build(elem)

def select_sources_to_scan(self, compiled_sources: T.List[str]
infiles: T.Set[str] = set()
for t in target.get_transitive_link_deps():
if self.should_use_dyndeps_for_target(t):
infiles.add(self.get_dep_scan_file_for(t)[0])
_, od = self.flatten_object_list(target)
infiles.update({self.get_dep_scan_file_for(t)[0] for t in od if t.uses_fortran()})

elem = NinjaBuildElement(self.all_outputs, depscan_file, 'depaccumulate', [json_file] + sorted(infiles))
elem.add_item('name', target.name)
self.add_build(elem)

def select_sources_to_scan(self, compiled_sources: T.List[str],
) -> T.Iterable[T.Tuple[str, Literal['cpp', 'fortran']]]:
# in practice pick up C++ and Fortran files. If some other language
# requires scanning (possibly Java to deal with inner class files)
# then add them here.
for source in compiled_sources:
if isinstance(source, mesonlib.File):
source = source.rel_to_builddir(self.build_to_src)
ext = os.path.splitext(source)[1][1:]
if ext.lower() in compilers.lang_suffixes['cpp'] or ext == 'C':
yield source, 'cpp'
Expand Down Expand Up @@ -2554,10 +2579,19 @@ def generate_scanner_rules(self) -> None:
if rulename in self.ruledict:
# Scanning command is the same for native and cross compilation.
return

command = self.environment.get_build_command() + \
['--internal', 'depscan']
args = ['$picklefile', '$out', '$in']
description = 'Module scanner.'
description = 'Scanning target $name for modules'
rule = NinjaRule(rulename, command, args, description)
self.add_rule(rule)

rulename = 'depaccumulate'
command = self.environment.get_build_command() + \
['--internal', 'depaccumulate']
args = ['$out', '$in']
description = 'Generating dynamic dependency information for target $name'
rule = NinjaRule(rulename, command, args, description)
self.add_rule(rule)

Expand Down Expand Up @@ -3069,8 +3103,9 @@ def generate_single_compile(self, target: build.BuildTarget, src,
d = os.path.join(self.get_target_private_dir(target), d)
element.add_orderdep(d)
element.add_dep(pch_dep)
for i in self.get_fortran_orderdeps(target, compiler):
element.add_orderdep(i)
if not self.use_dyndeps_for_fortran():
for i in self.get_fortran_module_deps(target, compiler):
element.add_dep(i)
if dep_file:
element.add_item('DEPFILE', dep_file)
if compiler.get_language() == 'cuda':
Expand Down Expand Up @@ -3113,12 +3148,13 @@ def add_dependency_scanner_entries_to_element(self, target: build.BuildTarget, c
extension = extension.lower()
if not (extension in compilers.lang_suffixes['fortran'] or extension in compilers.lang_suffixes['cpp']):
return
dep_scan_file = self.get_dep_scan_file_for(target)
dep_scan_file = self.get_dep_scan_file_for(target)[1]
element.add_item('dyndep', dep_scan_file)
element.add_orderdep(dep_scan_file)

def get_dep_scan_file_for(self, target: build.BuildTarget) -> str:
return os.path.join(self.get_target_private_dir(target), 'depscan.dd')
def get_dep_scan_file_for(self, target: build.BuildTarget) -> T.Tuple[str, str]:
priv = self.get_target_private_dir(target)
return os.path.join(priv, 'depscan.json'), os.path.join(priv, 'depscan.dd')

def add_header_deps(self, target, ninja_element, header_deps):
for d in header_deps:
Expand All @@ -3137,11 +3173,15 @@ def has_dir_part(self, fname: FileOrString) -> bool:
# Fortran is a bit weird (again). When you link against a library, just compiling a source file
# requires the mod files that are output when single files are built. To do this right we would need to
# scan all inputs and write out explicit deps for each file. That is too slow and too much effort so
# instead just have an ordered dependency on the library. This ensures all required mod files are created.
# instead just have a full dependency on the library. This ensures all required mod files are created.
# The real deps are then detected via dep file generation from the compiler. This breaks on compilers that
# produce incorrect dep files but such is life.
def get_fortran_orderdeps(self, target, compiler):
if compiler.language != 'fortran':
# produce incorrect dep files but such is life. A full dependency is
# required to ensure that if a new module is added to an existing file that
# we correctly rebuild
def get_fortran_module_deps(self, target: build.BuildTarget, compiler: Compiler) -> T.List[str]:
# If we have dyndeps then we don't need this, since the depscanner will
# do all of things described above.
if compiler.language != 'fortran' or self.use_dyndeps_for_fortran():
return []
return [
os.path.join(self.get_target_dir(lt), lt.get_filename())
Expand Down
13 changes: 7 additions & 6 deletions mesonbuild/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -1085,9 +1085,10 @@ def get_all_link_deps(self) -> ImmutableListProtocol[BuildTargetTypes]:

@lru_cache(maxsize=None)
def get_transitive_link_deps(self) -> ImmutableListProtocol[BuildTargetTypes]:
result: T.List[Target] = []
for i in self.link_targets:
result += i.get_all_link_deps()
result: T.List[BuildTargetTypes] = []
for i in itertools.chain(self.link_targets, self.link_whole_targets):
result.append(i)
result.extend(i.get_all_link_deps())
return result

def get_link_deps_mapping(self, prefix: str) -> T.Mapping[str, str]:
Expand Down Expand Up @@ -2424,7 +2425,7 @@ def get_debug_filename(self) -> T.Optional[str]:
"""
return self.debug_filename

def get_all_link_deps(self):
def get_all_link_deps(self) -> ImmutableListProtocol[BuildTargetTypes]:
return [self] + self.get_transitive_link_deps()

def get_aliases(self) -> T.List[T.Tuple[str, str, str]]:
Expand Down Expand Up @@ -2758,7 +2759,7 @@ def get_link_deps_mapping(self, prefix: str) -> T.Mapping[str, str]:
def get_link_dep_subdirs(self) -> T.AbstractSet[str]:
return OrderedSet()

def get_all_link_deps(self):
def get_all_link_deps(self) -> ImmutableListProtocol[BuildTargetTypes]:
return []

def is_internal(self) -> bool:
Expand Down Expand Up @@ -3016,7 +3017,7 @@ def get_filename(self) -> str:
def get_id(self) -> str:
return self.target.get_id()

def get_all_link_deps(self):
def get_all_link_deps(self) -> ImmutableListProtocol[BuildTargetTypes]:
return self.target.get_all_link_deps()

def get_link_deps_mapping(self, prefix: str) -> T.Mapping[str, str]:
Expand Down
7 changes: 4 additions & 3 deletions mesonbuild/environment.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# SPDX-License-Identifier: Apache-2.0
# Copyright 2012-2020 The Meson development team
# Copyright © 2023 Intel Corporation
# Copyright © 2023-2024 Intel Corporation

from __future__ import annotations

Expand Down Expand Up @@ -552,12 +552,12 @@ class Environment:
log_dir = 'meson-logs'
info_dir = 'meson-info'

def __init__(self, source_dir: str, build_dir: str, cmd_options: coredata.SharedCMDOptions) -> None:
def __init__(self, source_dir: str, build_dir: T.Optional[str], cmd_options: coredata.SharedCMDOptions) -> None:
self.source_dir = source_dir
self.build_dir = build_dir
# Do not try to create build directories when build_dir is none.
# This reduced mode is used by the --buildoptions introspector
if build_dir is not None:
self.build_dir = build_dir
self.scratch_dir = os.path.join(build_dir, Environment.private_dir)
self.log_dir = os.path.join(build_dir, Environment.log_dir)
self.info_dir = os.path.join(build_dir, Environment.info_dir)
Expand Down Expand Up @@ -586,6 +586,7 @@ def __init__(self, source_dir: str, build_dir: str, cmd_options: coredata.Shared
raise MesonException(f'{str(e)} Try regenerating using "meson setup --wipe".')
else:
# Just create a fresh coredata in this case
self.build_dir = ''
self.scratch_dir = ''
self.create_new_coredata(cmd_options)

Expand Down
129 changes: 129 additions & 0 deletions mesonbuild/scripts/depaccumulate.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,129 @@
# SPDX-License-Identifier: Apache-2.0
# Copyright © 2021-2024 Intel Corporation

"""Accumulator for p1689r5 module dependencies.

See: https://www.open-std.org/jtc1/sc22/wg21/docs/papers/2022/p1689r5.html
"""

from __future__ import annotations
import json
import re
import textwrap
import typing as T

if T.TYPE_CHECKING:
from .depscan import Description, Rule

# The quoting logic has been copied from the ninjabackend to avoid having to
# import half of Meson just to quote outputs, which is a performance problem
_QUOTE_PAT = re.compile(r'[$ :\n]')


def quote(text: str) -> str:
# Fast path for when no quoting is necessary
if not _QUOTE_PAT.search(text):
return text
if '\n' in text:
errmsg = textwrap.dedent(f'''\
Ninja does not support newlines in rules. The content was:

{text}

Please report this error with a test case to the Meson bug tracker.''')
raise RuntimeError(errmsg)
return _QUOTE_PAT.sub(r'$\g<0>', text)


_PROVIDER_CACHE: T.Dict[str, str] = {}


def get_provider(rules: T.List[Rule], name: str) -> T.Optional[str]:
"""Get the object that a module from another Target provides

We must rely on the object file here instead of the module itself, because
the object rule is part of the generated build.ninja, while the module is
only declared inside a dyndep. This creates for the dyndep generator to
depend on previous dyndeps as order deps. Since the module
interface file will be generated when the object is generated we can rely on
that in proxy and simplify generation.

:param rules: The list of rules to check
:param name: The logical-name to look for
:raises RuntimeError: If no provider can be found
:return: The object file of the rule providing the module
"""
# Cache the result for performance reasons
if name in _PROVIDER_CACHE:
return _PROVIDER_CACHE[name]

for r in rules:
for p in r.get('provides', []):
if p['logical-name'] == name:
obj = r['primary-output']
_PROVIDER_CACHE[name] = obj
return obj
return None


def process_rules(rules: T.List[Rule],
extra_rules: T.List[Rule],
) -> T.Iterable[T.Tuple[str, T.Optional[T.List[str]], T.List[str]]]:
"""Process the rules for this Target

:param rules: the rules for this target
:param extra_rules: the rules for all of the targets this one links with, to use their provides
:yield: A tuple of the output, the exported modules, and the consumed modules
"""
for rule in rules:
prov: T.Optional[T.List[str]] = None
req: T.List[str] = []
if 'provides' in rule:
prov = [p['compiled-module-path'] for p in rule['provides']]
if 'requires' in rule:
for p in rule['requires']:
modfile = p.get('compiled-module-path')
if modfile is not None:
req.append(modfile)
else:
# We can't error if this is not found because of compiler
# provided modules
found = get_provider(extra_rules, p['logical-name'])
if found:
req.append(found)
yield rule['primary-output'], prov, req


def formatter(files: T.Optional[T.List[str]]) -> str:
if files:
fmt = ' '.join(quote(f) for f in files)
return f'| {fmt}'
return ''


def gen(outfile: str, desc: Description, extra_rules: T.List[Rule]) -> int:
with open(outfile, 'w', encoding='utf-8') as f:
f.write('ninja_dyndep_version = 1\n\n')

for obj, provides, requires in process_rules(desc['rules'], extra_rules):
ins = formatter(requires)
out = formatter(provides)
f.write(f'build {quote(obj)} {out}: dyndep {ins}\n\n')

return 0


def run(args: T.List[str]) -> int:
assert len(args) >= 2, 'got wrong number of arguments!'
outfile, jsonfile, *jsondeps = args
with open(jsonfile, 'r', encoding='utf-8') as f:
desc: Description = json.load(f)

# All rules, necessary for fulfilling across TU and target boundaries
rules = desc['rules'].copy()
for dep in jsondeps:
with open(dep, encoding='utf-8') as f:
d: Description = json.load(f)
rules.extend(d['rules'])

return gen(outfile, desc, rules)
Loading
Loading