Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
allendowney
GitHub Repository: allendowney/cpython
Path: blob/main/Tools/c-analyzer/c_parser/preprocessor/__init__.py
12 views
1
import contextlib
2
import logging
3
import os
4
import os.path
5
import re
6
import sys
7
8
from c_common.fsutil import match_glob as _match_glob
9
from c_common.tables import parse_table as _parse_table
10
from ..source import (
11
resolve as _resolve_source,
12
good_file as _good_file,
13
)
14
from . import errors as _errors
15
from . import (
16
pure as _pure,
17
gcc as _gcc,
18
)
19
20
21
logger = logging.getLogger(__name__)
22
23
24
# Supported "source":
25
# * filename (string)
26
# * lines (iterable)
27
# * text (string)
28
# Supported return values:
29
# * iterator of SourceLine
30
# * sequence of SourceLine
31
# * text (string)
32
# * something that combines all those
33
# XXX Add the missing support from above.
34
# XXX Add more low-level functions to handle permutations?
35
36
def preprocess(source, *,
37
incldirs=None,
38
includes=None,
39
macros=None,
40
samefiles=None,
41
filename=None,
42
cwd=None,
43
tool=True,
44
):
45
"""...
46
47
CWD should be the project root and "source" should be relative.
48
"""
49
if tool:
50
if not cwd:
51
cwd = os.getcwd()
52
logger.debug(f'CWD: {cwd!r}')
53
logger.debug(f'incldirs: {incldirs!r}')
54
logger.debug(f'includes: {includes!r}')
55
logger.debug(f'macros: {macros!r}')
56
logger.debug(f'samefiles: {samefiles!r}')
57
_preprocess = _get_preprocessor(tool)
58
with _good_file(source, filename) as source:
59
return _preprocess(
60
source,
61
incldirs,
62
includes,
63
macros,
64
samefiles,
65
cwd,
66
) or ()
67
else:
68
source, filename = _resolve_source(source, filename)
69
# We ignore "includes", "macros", etc.
70
return _pure.preprocess(source, filename, cwd)
71
72
# if _run() returns just the lines:
73
# text = _run(source)
74
# lines = [line + os.linesep for line in text.splitlines()]
75
# lines[-1] = lines[-1].splitlines()[0]
76
#
77
# conditions = None
78
# for lno, line in enumerate(lines, 1):
79
# kind = 'source'
80
# directive = None
81
# data = line
82
# yield lno, kind, data, conditions
83
84
85
def get_preprocessor(*,
86
file_macros=None,
87
file_includes=None,
88
file_incldirs=None,
89
file_same=None,
90
ignore_exc=False,
91
log_err=None,
92
):
93
_preprocess = preprocess
94
if file_macros:
95
file_macros = tuple(_parse_macros(file_macros))
96
if file_includes:
97
file_includes = tuple(_parse_includes(file_includes))
98
if file_incldirs:
99
file_incldirs = tuple(_parse_incldirs(file_incldirs))
100
if file_same:
101
file_same = dict(file_same or ())
102
if not callable(ignore_exc):
103
ignore_exc = (lambda exc, _ig=ignore_exc: _ig)
104
105
def get_file_preprocessor(filename):
106
filename = filename.strip()
107
if file_macros:
108
macros = list(_resolve_file_values(filename, file_macros))
109
if file_includes:
110
# There's a small chance we could need to filter out any
111
# includes that import "filename". It isn't clear that it's
112
# a problem any longer. If we do end up filtering then
113
# it may make sense to use c_common.fsutil.match_path_tail().
114
includes = [i for i, in _resolve_file_values(filename, file_includes)]
115
if file_incldirs:
116
incldirs = [v for v, in _resolve_file_values(filename, file_incldirs)]
117
if file_same:
118
samefiles = _resolve_samefiles(filename, file_same)
119
120
def preprocess(**kwargs):
121
if file_macros and 'macros' not in kwargs:
122
kwargs['macros'] = macros
123
if file_includes and 'includes' not in kwargs:
124
kwargs['includes'] = includes
125
if file_incldirs and 'incldirs' not in kwargs:
126
kwargs['incldirs'] = incldirs
127
if file_same and 'samefiles' not in kwargs:
128
kwargs['samefiles'] = samefiles
129
kwargs.setdefault('filename', filename)
130
with handling_errors(ignore_exc, log_err=log_err):
131
return _preprocess(filename, **kwargs)
132
return preprocess
133
return get_file_preprocessor
134
135
136
def _resolve_file_values(filename, file_values):
137
# We expect the filename and all patterns to be absolute paths.
138
for pattern, *value in file_values or ():
139
if _match_glob(filename, pattern):
140
yield value
141
142
143
def _parse_macros(macros):
144
for row, srcfile in _parse_table(macros, '\t', 'glob\tname\tvalue', rawsep='=', default=None):
145
yield row
146
147
148
def _parse_includes(includes):
149
for row, srcfile in _parse_table(includes, '\t', 'glob\tinclude', default=None):
150
yield row
151
152
153
def _parse_incldirs(incldirs):
154
for row, srcfile in _parse_table(incldirs, '\t', 'glob\tdirname', default=None):
155
glob, dirname = row
156
if dirname is None:
157
# Match all files.
158
dirname = glob
159
row = ('*', dirname.strip())
160
yield row
161
162
163
def _resolve_samefiles(filename, file_same):
164
assert '*' not in filename, (filename,)
165
assert os.path.normpath(filename) == filename, (filename,)
166
_, suffix = os.path.splitext(filename)
167
samefiles = []
168
for patterns, in _resolve_file_values(filename, file_same.items()):
169
for pattern in patterns:
170
same = _resolve_samefile(filename, pattern, suffix)
171
if not same:
172
continue
173
samefiles.append(same)
174
return samefiles
175
176
177
def _resolve_samefile(filename, pattern, suffix):
178
if pattern == filename:
179
return None
180
if pattern.endswith(os.path.sep):
181
pattern += f'*{suffix}'
182
assert os.path.normpath(pattern) == pattern, (pattern,)
183
if '*' in os.path.dirname(pattern):
184
raise NotImplementedError((filename, pattern))
185
if '*' not in os.path.basename(pattern):
186
return pattern
187
188
common = os.path.commonpath([filename, pattern])
189
relpattern = pattern[len(common) + len(os.path.sep):]
190
relpatterndir = os.path.dirname(relpattern)
191
relfile = filename[len(common) + len(os.path.sep):]
192
if os.path.basename(pattern) == '*':
193
return os.path.join(common, relpatterndir, relfile)
194
elif os.path.basename(relpattern) == '*' + suffix:
195
return os.path.join(common, relpatterndir, relfile)
196
else:
197
raise NotImplementedError((filename, pattern))
198
199
200
@contextlib.contextmanager
201
def handling_errors(ignore_exc=None, *, log_err=None):
202
try:
203
yield
204
except _errors.OSMismatchError as exc:
205
if not ignore_exc(exc):
206
raise # re-raise
207
if log_err is not None:
208
log_err(f'<OS mismatch (expected {" or ".join(exc.expected)})>')
209
return None
210
except _errors.MissingDependenciesError as exc:
211
if not ignore_exc(exc):
212
raise # re-raise
213
if log_err is not None:
214
log_err(f'<missing dependency {exc.missing}')
215
return None
216
except _errors.ErrorDirectiveError as exc:
217
if not ignore_exc(exc):
218
raise # re-raise
219
if log_err is not None:
220
log_err(exc)
221
return None
222
223
224
##################################
225
# tools
226
227
_COMPILERS = {
228
# matching distutils.ccompiler.compiler_class:
229
'unix': _gcc.preprocess,
230
'msvc': None,
231
'cygwin': None,
232
'mingw32': None,
233
'bcpp': None,
234
# aliases/extras:
235
'gcc': _gcc.preprocess,
236
'clang': None,
237
}
238
239
240
def _get_default_compiler():
241
if re.match('cygwin.*', sys.platform) is not None:
242
return 'unix'
243
if os.name == 'nt':
244
return 'msvc'
245
return 'unix'
246
247
248
def _get_preprocessor(tool):
249
if tool is True:
250
tool = _get_default_compiler()
251
preprocess = _COMPILERS.get(tool)
252
if preprocess is None:
253
raise ValueError(f'unsupported tool {tool}')
254
return preprocess
255
256
257
##################################
258
# aliases
259
260
from .errors import (
261
PreprocessorError,
262
PreprocessorFailure,
263
ErrorDirectiveError,
264
MissingDependenciesError,
265
OSMismatchError,
266
)
267
from .common import FileInfo, SourceLine
268
269