-
-
Notifications
You must be signed in to change notification settings - Fork 31.2k
/
Copy path__init__.py
270 lines (233 loc) · 8.12 KB
/
__init__.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
import contextlib
import logging
import os
import os.path
import platform
import re
import sys
from c_common.fsutil import match_glob as _match_glob
from c_common.tables import parse_table as _parse_table
from ..source import (
resolve as _resolve_source,
good_file as _good_file,
)
from . import errors as _errors
from . import (
pure as _pure,
gcc as _gcc,
)
logger = logging.getLogger(__name__)
# Supported "source":
# * filename (string)
# * lines (iterable)
# * text (string)
# Supported return values:
# * iterator of SourceLine
# * sequence of SourceLine
# * text (string)
# * something that combines all those
# XXX Add the missing support from above.
# XXX Add more low-level functions to handle permutations?
def preprocess(source, *,
incldirs=None,
includes=None,
macros=None,
samefiles=None,
filename=None,
cwd=None,
tool=True,
):
"""...
CWD should be the project root and "source" should be relative.
"""
if tool:
if not cwd:
cwd = os.getcwd()
logger.debug(f'CWD: {cwd!r}')
logger.debug(f'incldirs: {incldirs!r}')
logger.debug(f'includes: {includes!r}')
logger.debug(f'macros: {macros!r}')
logger.debug(f'samefiles: {samefiles!r}')
_preprocess = _get_preprocessor(tool)
with _good_file(source, filename) as source:
return _preprocess(
source,
incldirs,
includes,
macros,
samefiles,
cwd,
) or ()
else:
source, filename = _resolve_source(source, filename)
# We ignore "includes", "macros", etc.
return _pure.preprocess(source, filename, cwd)
# if _run() returns just the lines:
# text = _run(source)
# lines = [line + os.linesep for line in text.splitlines()]
# lines[-1] = lines[-1].splitlines()[0]
#
# conditions = None
# for lno, line in enumerate(lines, 1):
# kind = 'source'
# directive = None
# data = line
# yield lno, kind, data, conditions
def get_preprocessor(*,
file_macros=None,
file_includes=None,
file_incldirs=None,
file_same=None,
ignore_exc=False,
log_err=None,
):
_preprocess = preprocess
if file_macros:
file_macros = tuple(_parse_macros(file_macros))
if file_includes:
file_includes = tuple(_parse_includes(file_includes))
if file_incldirs:
file_incldirs = tuple(_parse_incldirs(file_incldirs))
if file_same:
file_same = dict(file_same or ())
if not callable(ignore_exc):
ignore_exc = (lambda exc, _ig=ignore_exc: _ig)
def get_file_preprocessor(filename):
filename = filename.strip()
if file_macros:
macros = list(_resolve_file_values(filename, file_macros))
if file_includes:
# There's a small chance we could need to filter out any
# includes that import "filename". It isn't clear that it's
# a problem any longer. If we do end up filtering then
# it may make sense to use c_common.fsutil.match_path_tail().
includes = [i for i, in _resolve_file_values(filename, file_includes)]
if file_incldirs:
incldirs = [v for v, in _resolve_file_values(filename, file_incldirs)]
if file_same:
samefiles = _resolve_samefiles(filename, file_same)
def preprocess(**kwargs):
if file_macros and 'macros' not in kwargs:
kwargs['macros'] = macros
if file_includes and 'includes' not in kwargs:
kwargs['includes'] = includes
if file_incldirs and 'incldirs' not in kwargs:
kwargs['incldirs'] = incldirs
if file_same and 'samefiles' not in kwargs:
kwargs['samefiles'] = samefiles
kwargs.setdefault('filename', filename)
with handling_errors(ignore_exc, log_err=log_err):
return _preprocess(filename, **kwargs)
return preprocess
return get_file_preprocessor
def _resolve_file_values(filename, file_values):
# We expect the filename and all patterns to be absolute paths.
for pattern, *value in file_values or ():
if _match_glob(filename, pattern):
yield value
def _parse_macros(macros):
for row, srcfile in _parse_table(macros, '\t', 'glob\tname\tvalue', rawsep='=', default=None):
yield row
def _parse_includes(includes):
for row, srcfile in _parse_table(includes, '\t', 'glob\tinclude', default=None):
yield row
def _parse_incldirs(incldirs):
for row, srcfile in _parse_table(incldirs, '\t', 'glob\tdirname', default=None):
glob, dirname = row
if dirname is None:
# Match all files.
dirname = glob
row = ('*', dirname.strip())
yield row
def _resolve_samefiles(filename, file_same):
assert '*' not in filename, (filename,)
assert os.path.normpath(filename) == filename, (filename,)
_, suffix = os.path.splitext(filename)
samefiles = []
for patterns, in _resolve_file_values(filename, file_same.items()):
for pattern in patterns:
same = _resolve_samefile(filename, pattern, suffix)
if not same:
continue
samefiles.append(same)
return samefiles
def _resolve_samefile(filename, pattern, suffix):
if pattern == filename:
return None
if pattern.endswith(os.path.sep):
pattern += f'*{suffix}'
assert os.path.normpath(pattern) == pattern, (pattern,)
if '*' in os.path.dirname(pattern):
raise NotImplementedError((filename, pattern))
if '*' not in os.path.basename(pattern):
return pattern
common = os.path.commonpath([filename, pattern])
relpattern = pattern[len(common) + len(os.path.sep):]
relpatterndir = os.path.dirname(relpattern)
relfile = filename[len(common) + len(os.path.sep):]
if os.path.basename(pattern) == '*':
return os.path.join(common, relpatterndir, relfile)
elif os.path.basename(relpattern) == '*' + suffix:
return os.path.join(common, relpatterndir, relfile)
else:
raise NotImplementedError((filename, pattern))
@contextlib.contextmanager
def handling_errors(ignore_exc=None, *, log_err=None):
try:
yield
except _errors.OSMismatchError as exc:
if not ignore_exc(exc):
raise # re-raise
if log_err is not None:
log_err(f'<OS mismatch (expected {" or ".join(exc.expected)})>')
return None
except _errors.MissingDependenciesError as exc:
if not ignore_exc(exc):
raise # re-raise
if log_err is not None:
log_err(f'<missing dependency {exc.missing}')
return None
except _errors.ErrorDirectiveError as exc:
if not ignore_exc(exc):
raise # re-raise
if log_err is not None:
log_err(exc)
return None
##################################
# tools
_COMPILERS = {
# matching distutils.ccompiler.compiler_class:
'unix': _gcc.preprocess,
'msvc': None,
'cygwin': None,
'mingw32': None,
'bcpp': None,
# aliases/extras:
'gcc': _gcc.preprocess,
'clang': None,
}
def _get_default_compiler():
if re.match('cygwin.*', sys.platform) is not None:
return 'unix'
if os.name == 'nt':
return 'msvc'
if sys.platform == 'darwin' and 'clang' in platform.python_compiler():
return 'clang'
return 'unix'
def _get_preprocessor(tool):
if tool is True:
tool = _get_default_compiler()
preprocess = _COMPILERS.get(tool)
if preprocess is None:
raise ValueError(f'unsupported tool {tool}')
return preprocess
##################################
# aliases
from .errors import (
PreprocessorError,
PreprocessorFailure,
ErrorDirectiveError,
MissingDependenciesError,
OSMismatchError,
)
from .common import FileInfo, SourceLine