Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
keewenaw
GitHub Repository: keewenaw/ethereum-wallet-cracker
Path: blob/main/test/lib/python3.9/site-packages/pkg_resources/__init__.py
4808 views
1
"""
2
Package resource API
3
--------------------
4
5
A resource is a logical file contained within a package, or a logical
6
subdirectory thereof. The package resource API expects resource names
7
to have their path parts separated with ``/``, *not* whatever the local
8
path separator is. Do not use os.path operations to manipulate resource
9
names being passed into the API.
10
11
The package resource API is designed to work with normal filesystem packages,
12
.egg files, and unpacked .egg files. It can also work in a limited way with
13
.zip files and with custom PEP 302 loaders that support the ``get_data()``
14
method.
15
"""
16
17
import sys
18
import os
19
import io
20
import time
21
import re
22
import types
23
import zipfile
24
import zipimport
25
import warnings
26
import stat
27
import functools
28
import pkgutil
29
import operator
30
import platform
31
import collections
32
import plistlib
33
import email.parser
34
import errno
35
import tempfile
36
import textwrap
37
import itertools
38
import inspect
39
import ntpath
40
import posixpath
41
import importlib
42
from pkgutil import get_importer
43
44
try:
45
import _imp
46
except ImportError:
47
# Python 3.2 compatibility
48
import imp as _imp
49
50
try:
51
FileExistsError
52
except NameError:
53
FileExistsError = OSError
54
55
# capture these to bypass sandboxing
56
from os import utime
57
try:
58
from os import mkdir, rename, unlink
59
WRITE_SUPPORT = True
60
except ImportError:
61
# no write support, probably under GAE
62
WRITE_SUPPORT = False
63
64
from os import open as os_open
65
from os.path import isdir, split
66
67
try:
68
import importlib.machinery as importlib_machinery
69
# access attribute to force import under delayed import mechanisms.
70
importlib_machinery.__name__
71
except ImportError:
72
importlib_machinery = None
73
74
from pkg_resources.extern.jaraco.text import (
75
yield_lines,
76
drop_comment,
77
join_continuation,
78
)
79
80
from pkg_resources.extern import appdirs
81
from pkg_resources.extern import packaging
82
__import__('pkg_resources.extern.packaging.version')
83
__import__('pkg_resources.extern.packaging.specifiers')
84
__import__('pkg_resources.extern.packaging.requirements')
85
__import__('pkg_resources.extern.packaging.markers')
86
__import__('pkg_resources.extern.packaging.utils')
87
88
if sys.version_info < (3, 5):
89
raise RuntimeError("Python 3.5 or later is required")
90
91
# declare some globals that will be defined later to
92
# satisfy the linters.
93
require = None
94
working_set = None
95
add_activation_listener = None
96
resources_stream = None
97
cleanup_resources = None
98
resource_dir = None
99
resource_stream = None
100
set_extraction_path = None
101
resource_isdir = None
102
resource_string = None
103
iter_entry_points = None
104
resource_listdir = None
105
resource_filename = None
106
resource_exists = None
107
_distribution_finders = None
108
_namespace_handlers = None
109
_namespace_packages = None
110
111
112
class PEP440Warning(RuntimeWarning):
113
"""
114
Used when there is an issue with a version or specifier not complying with
115
PEP 440.
116
"""
117
118
119
def parse_version(v):
120
try:
121
return packaging.version.Version(v)
122
except packaging.version.InvalidVersion:
123
warnings.warn(
124
f"{v} is an invalid version and will not be supported in "
125
"a future release",
126
PkgResourcesDeprecationWarning,
127
)
128
return packaging.version.LegacyVersion(v)
129
130
131
_state_vars = {}
132
133
134
def _declare_state(vartype, **kw):
135
globals().update(kw)
136
_state_vars.update(dict.fromkeys(kw, vartype))
137
138
139
def __getstate__():
140
state = {}
141
g = globals()
142
for k, v in _state_vars.items():
143
state[k] = g['_sget_' + v](g[k])
144
return state
145
146
147
def __setstate__(state):
148
g = globals()
149
for k, v in state.items():
150
g['_sset_' + _state_vars[k]](k, g[k], v)
151
return state
152
153
154
def _sget_dict(val):
155
return val.copy()
156
157
158
def _sset_dict(key, ob, state):
159
ob.clear()
160
ob.update(state)
161
162
163
def _sget_object(val):
164
return val.__getstate__()
165
166
167
def _sset_object(key, ob, state):
168
ob.__setstate__(state)
169
170
171
_sget_none = _sset_none = lambda *args: None
172
173
174
def get_supported_platform():
175
"""Return this platform's maximum compatible version.
176
177
distutils.util.get_platform() normally reports the minimum version
178
of macOS that would be required to *use* extensions produced by
179
distutils. But what we want when checking compatibility is to know the
180
version of macOS that we are *running*. To allow usage of packages that
181
explicitly require a newer version of macOS, we must also know the
182
current version of the OS.
183
184
If this condition occurs for any other platform with a version in its
185
platform strings, this function should be extended accordingly.
186
"""
187
plat = get_build_platform()
188
m = macosVersionString.match(plat)
189
if m is not None and sys.platform == "darwin":
190
try:
191
plat = 'macosx-%s-%s' % ('.'.join(_macos_vers()[:2]), m.group(3))
192
except ValueError:
193
# not macOS
194
pass
195
return plat
196
197
198
__all__ = [
199
# Basic resource access and distribution/entry point discovery
200
'require', 'run_script', 'get_provider', 'get_distribution',
201
'load_entry_point', 'get_entry_map', 'get_entry_info',
202
'iter_entry_points',
203
'resource_string', 'resource_stream', 'resource_filename',
204
'resource_listdir', 'resource_exists', 'resource_isdir',
205
206
# Environmental control
207
'declare_namespace', 'working_set', 'add_activation_listener',
208
'find_distributions', 'set_extraction_path', 'cleanup_resources',
209
'get_default_cache',
210
211
# Primary implementation classes
212
'Environment', 'WorkingSet', 'ResourceManager',
213
'Distribution', 'Requirement', 'EntryPoint',
214
215
# Exceptions
216
'ResolutionError', 'VersionConflict', 'DistributionNotFound',
217
'UnknownExtra', 'ExtractionError',
218
219
# Warnings
220
'PEP440Warning',
221
222
# Parsing functions and string utilities
223
'parse_requirements', 'parse_version', 'safe_name', 'safe_version',
224
'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections',
225
'safe_extra', 'to_filename', 'invalid_marker', 'evaluate_marker',
226
227
# filesystem utilities
228
'ensure_directory', 'normalize_path',
229
230
# Distribution "precedence" constants
231
'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST',
232
233
# "Provider" interfaces, implementations, and registration/lookup APIs
234
'IMetadataProvider', 'IResourceProvider', 'FileMetadata',
235
'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider',
236
'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider',
237
'register_finder', 'register_namespace_handler', 'register_loader_type',
238
'fixup_namespace_packages', 'get_importer',
239
240
# Warnings
241
'PkgResourcesDeprecationWarning',
242
243
# Deprecated/backward compatibility only
244
'run_main', 'AvailableDistributions',
245
]
246
247
248
class ResolutionError(Exception):
249
"""Abstract base for dependency resolution errors"""
250
251
def __repr__(self):
252
return self.__class__.__name__ + repr(self.args)
253
254
255
class VersionConflict(ResolutionError):
256
"""
257
An already-installed version conflicts with the requested version.
258
259
Should be initialized with the installed Distribution and the requested
260
Requirement.
261
"""
262
263
_template = "{self.dist} is installed but {self.req} is required"
264
265
@property
266
def dist(self):
267
return self.args[0]
268
269
@property
270
def req(self):
271
return self.args[1]
272
273
def report(self):
274
return self._template.format(**locals())
275
276
def with_context(self, required_by):
277
"""
278
If required_by is non-empty, return a version of self that is a
279
ContextualVersionConflict.
280
"""
281
if not required_by:
282
return self
283
args = self.args + (required_by,)
284
return ContextualVersionConflict(*args)
285
286
287
class ContextualVersionConflict(VersionConflict):
288
"""
289
A VersionConflict that accepts a third parameter, the set of the
290
requirements that required the installed Distribution.
291
"""
292
293
_template = VersionConflict._template + ' by {self.required_by}'
294
295
@property
296
def required_by(self):
297
return self.args[2]
298
299
300
class DistributionNotFound(ResolutionError):
301
"""A requested distribution was not found"""
302
303
_template = ("The '{self.req}' distribution was not found "
304
"and is required by {self.requirers_str}")
305
306
@property
307
def req(self):
308
return self.args[0]
309
310
@property
311
def requirers(self):
312
return self.args[1]
313
314
@property
315
def requirers_str(self):
316
if not self.requirers:
317
return 'the application'
318
return ', '.join(self.requirers)
319
320
def report(self):
321
return self._template.format(**locals())
322
323
def __str__(self):
324
return self.report()
325
326
327
class UnknownExtra(ResolutionError):
328
"""Distribution doesn't have an "extra feature" of the given name"""
329
330
331
_provider_factories = {}
332
333
PY_MAJOR = '{}.{}'.format(*sys.version_info)
334
EGG_DIST = 3
335
BINARY_DIST = 2
336
SOURCE_DIST = 1
337
CHECKOUT_DIST = 0
338
DEVELOP_DIST = -1
339
340
341
def register_loader_type(loader_type, provider_factory):
342
"""Register `provider_factory` to make providers for `loader_type`
343
344
`loader_type` is the type or class of a PEP 302 ``module.__loader__``,
345
and `provider_factory` is a function that, passed a *module* object,
346
returns an ``IResourceProvider`` for that module.
347
"""
348
_provider_factories[loader_type] = provider_factory
349
350
351
def get_provider(moduleOrReq):
352
"""Return an IResourceProvider for the named module or requirement"""
353
if isinstance(moduleOrReq, Requirement):
354
return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0]
355
try:
356
module = sys.modules[moduleOrReq]
357
except KeyError:
358
__import__(moduleOrReq)
359
module = sys.modules[moduleOrReq]
360
loader = getattr(module, '__loader__', None)
361
return _find_adapter(_provider_factories, loader)(module)
362
363
364
def _macos_vers(_cache=[]):
365
if not _cache:
366
version = platform.mac_ver()[0]
367
# fallback for MacPorts
368
if version == '':
369
plist = '/System/Library/CoreServices/SystemVersion.plist'
370
if os.path.exists(plist):
371
if hasattr(plistlib, 'readPlist'):
372
plist_content = plistlib.readPlist(plist)
373
if 'ProductVersion' in plist_content:
374
version = plist_content['ProductVersion']
375
376
_cache.append(version.split('.'))
377
return _cache[0]
378
379
380
def _macos_arch(machine):
381
return {'PowerPC': 'ppc', 'Power_Macintosh': 'ppc'}.get(machine, machine)
382
383
384
def get_build_platform():
385
"""Return this platform's string for platform-specific distributions
386
387
XXX Currently this is the same as ``distutils.util.get_platform()``, but it
388
needs some hacks for Linux and macOS.
389
"""
390
from sysconfig import get_platform
391
392
plat = get_platform()
393
if sys.platform == "darwin" and not plat.startswith('macosx-'):
394
try:
395
version = _macos_vers()
396
machine = os.uname()[4].replace(" ", "_")
397
return "macosx-%d.%d-%s" % (
398
int(version[0]), int(version[1]),
399
_macos_arch(machine),
400
)
401
except ValueError:
402
# if someone is running a non-Mac darwin system, this will fall
403
# through to the default implementation
404
pass
405
return plat
406
407
408
macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)")
409
darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)")
410
# XXX backward compat
411
get_platform = get_build_platform
412
413
414
def compatible_platforms(provided, required):
415
"""Can code for the `provided` platform run on the `required` platform?
416
417
Returns true if either platform is ``None``, or the platforms are equal.
418
419
XXX Needs compatibility checks for Linux and other unixy OSes.
420
"""
421
if provided is None or required is None or provided == required:
422
# easy case
423
return True
424
425
# macOS special cases
426
reqMac = macosVersionString.match(required)
427
if reqMac:
428
provMac = macosVersionString.match(provided)
429
430
# is this a Mac package?
431
if not provMac:
432
# this is backwards compatibility for packages built before
433
# setuptools 0.6. All packages built after this point will
434
# use the new macOS designation.
435
provDarwin = darwinVersionString.match(provided)
436
if provDarwin:
437
dversion = int(provDarwin.group(1))
438
macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2))
439
if dversion == 7 and macosversion >= "10.3" or \
440
dversion == 8 and macosversion >= "10.4":
441
return True
442
# egg isn't macOS or legacy darwin
443
return False
444
445
# are they the same major version and machine type?
446
if provMac.group(1) != reqMac.group(1) or \
447
provMac.group(3) != reqMac.group(3):
448
return False
449
450
# is the required OS major update >= the provided one?
451
if int(provMac.group(2)) > int(reqMac.group(2)):
452
return False
453
454
return True
455
456
# XXX Linux and other platforms' special cases should go here
457
return False
458
459
460
def run_script(dist_spec, script_name):
461
"""Locate distribution `dist_spec` and run its `script_name` script"""
462
ns = sys._getframe(1).f_globals
463
name = ns['__name__']
464
ns.clear()
465
ns['__name__'] = name
466
require(dist_spec)[0].run_script(script_name, ns)
467
468
469
# backward compatibility
470
run_main = run_script
471
472
473
def get_distribution(dist):
474
"""Return a current distribution object for a Requirement or string"""
475
if isinstance(dist, str):
476
dist = Requirement.parse(dist)
477
if isinstance(dist, Requirement):
478
dist = get_provider(dist)
479
if not isinstance(dist, Distribution):
480
raise TypeError("Expected string, Requirement, or Distribution", dist)
481
return dist
482
483
484
def load_entry_point(dist, group, name):
485
"""Return `name` entry point of `group` for `dist` or raise ImportError"""
486
return get_distribution(dist).load_entry_point(group, name)
487
488
489
def get_entry_map(dist, group=None):
490
"""Return the entry point map for `group`, or the full entry map"""
491
return get_distribution(dist).get_entry_map(group)
492
493
494
def get_entry_info(dist, group, name):
495
"""Return the EntryPoint object for `group`+`name`, or ``None``"""
496
return get_distribution(dist).get_entry_info(group, name)
497
498
499
class IMetadataProvider:
500
def has_metadata(name):
501
"""Does the package's distribution contain the named metadata?"""
502
503
def get_metadata(name):
504
"""The named metadata resource as a string"""
505
506
def get_metadata_lines(name):
507
"""Yield named metadata resource as list of non-blank non-comment lines
508
509
Leading and trailing whitespace is stripped from each line, and lines
510
with ``#`` as the first non-blank character are omitted."""
511
512
def metadata_isdir(name):
513
"""Is the named metadata a directory? (like ``os.path.isdir()``)"""
514
515
def metadata_listdir(name):
516
"""List of metadata names in the directory (like ``os.listdir()``)"""
517
518
def run_script(script_name, namespace):
519
"""Execute the named script in the supplied namespace dictionary"""
520
521
522
class IResourceProvider(IMetadataProvider):
523
"""An object that provides access to package resources"""
524
525
def get_resource_filename(manager, resource_name):
526
"""Return a true filesystem path for `resource_name`
527
528
`manager` must be an ``IResourceManager``"""
529
530
def get_resource_stream(manager, resource_name):
531
"""Return a readable file-like object for `resource_name`
532
533
`manager` must be an ``IResourceManager``"""
534
535
def get_resource_string(manager, resource_name):
536
"""Return a string containing the contents of `resource_name`
537
538
`manager` must be an ``IResourceManager``"""
539
540
def has_resource(resource_name):
541
"""Does the package contain the named resource?"""
542
543
def resource_isdir(resource_name):
544
"""Is the named resource a directory? (like ``os.path.isdir()``)"""
545
546
def resource_listdir(resource_name):
547
"""List of resource names in the directory (like ``os.listdir()``)"""
548
549
550
class WorkingSet:
551
"""A collection of active distributions on sys.path (or a similar list)"""
552
553
def __init__(self, entries=None):
554
"""Create working set from list of path entries (default=sys.path)"""
555
self.entries = []
556
self.entry_keys = {}
557
self.by_key = {}
558
self.normalized_to_canonical_keys = {}
559
self.callbacks = []
560
561
if entries is None:
562
entries = sys.path
563
564
for entry in entries:
565
self.add_entry(entry)
566
567
@classmethod
568
def _build_master(cls):
569
"""
570
Prepare the master working set.
571
"""
572
ws = cls()
573
try:
574
from __main__ import __requires__
575
except ImportError:
576
# The main program does not list any requirements
577
return ws
578
579
# ensure the requirements are met
580
try:
581
ws.require(__requires__)
582
except VersionConflict:
583
return cls._build_from_requirements(__requires__)
584
585
return ws
586
587
@classmethod
588
def _build_from_requirements(cls, req_spec):
589
"""
590
Build a working set from a requirement spec. Rewrites sys.path.
591
"""
592
# try it without defaults already on sys.path
593
# by starting with an empty path
594
ws = cls([])
595
reqs = parse_requirements(req_spec)
596
dists = ws.resolve(reqs, Environment())
597
for dist in dists:
598
ws.add(dist)
599
600
# add any missing entries from sys.path
601
for entry in sys.path:
602
if entry not in ws.entries:
603
ws.add_entry(entry)
604
605
# then copy back to sys.path
606
sys.path[:] = ws.entries
607
return ws
608
609
def add_entry(self, entry):
610
"""Add a path item to ``.entries``, finding any distributions on it
611
612
``find_distributions(entry, True)`` is used to find distributions
613
corresponding to the path entry, and they are added. `entry` is
614
always appended to ``.entries``, even if it is already present.
615
(This is because ``sys.path`` can contain the same value more than
616
once, and the ``.entries`` of the ``sys.path`` WorkingSet should always
617
equal ``sys.path``.)
618
"""
619
self.entry_keys.setdefault(entry, [])
620
self.entries.append(entry)
621
for dist in find_distributions(entry, True):
622
self.add(dist, entry, False)
623
624
def __contains__(self, dist):
625
"""True if `dist` is the active distribution for its project"""
626
return self.by_key.get(dist.key) == dist
627
628
def find(self, req):
629
"""Find a distribution matching requirement `req`
630
631
If there is an active distribution for the requested project, this
632
returns it as long as it meets the version requirement specified by
633
`req`. But, if there is an active distribution for the project and it
634
does *not* meet the `req` requirement, ``VersionConflict`` is raised.
635
If there is no active distribution for the requested project, ``None``
636
is returned.
637
"""
638
dist = self.by_key.get(req.key)
639
640
if dist is None:
641
canonical_key = self.normalized_to_canonical_keys.get(req.key)
642
643
if canonical_key is not None:
644
req.key = canonical_key
645
dist = self.by_key.get(canonical_key)
646
647
if dist is not None and dist not in req:
648
# XXX add more info
649
raise VersionConflict(dist, req)
650
return dist
651
652
def iter_entry_points(self, group, name=None):
653
"""Yield entry point objects from `group` matching `name`
654
655
If `name` is None, yields all entry points in `group` from all
656
distributions in the working set, otherwise only ones matching
657
both `group` and `name` are yielded (in distribution order).
658
"""
659
return (
660
entry
661
for dist in self
662
for entry in dist.get_entry_map(group).values()
663
if name is None or name == entry.name
664
)
665
666
def run_script(self, requires, script_name):
667
"""Locate distribution for `requires` and run `script_name` script"""
668
ns = sys._getframe(1).f_globals
669
name = ns['__name__']
670
ns.clear()
671
ns['__name__'] = name
672
self.require(requires)[0].run_script(script_name, ns)
673
674
def __iter__(self):
675
"""Yield distributions for non-duplicate projects in the working set
676
677
The yield order is the order in which the items' path entries were
678
added to the working set.
679
"""
680
seen = {}
681
for item in self.entries:
682
if item not in self.entry_keys:
683
# workaround a cache issue
684
continue
685
686
for key in self.entry_keys[item]:
687
if key not in seen:
688
seen[key] = 1
689
yield self.by_key[key]
690
691
def add(self, dist, entry=None, insert=True, replace=False):
692
"""Add `dist` to working set, associated with `entry`
693
694
If `entry` is unspecified, it defaults to the ``.location`` of `dist`.
695
On exit from this routine, `entry` is added to the end of the working
696
set's ``.entries`` (if it wasn't already present).
697
698
`dist` is only added to the working set if it's for a project that
699
doesn't already have a distribution in the set, unless `replace=True`.
700
If it's added, any callbacks registered with the ``subscribe()`` method
701
will be called.
702
"""
703
if insert:
704
dist.insert_on(self.entries, entry, replace=replace)
705
706
if entry is None:
707
entry = dist.location
708
keys = self.entry_keys.setdefault(entry, [])
709
keys2 = self.entry_keys.setdefault(dist.location, [])
710
if not replace and dist.key in self.by_key:
711
# ignore hidden distros
712
return
713
714
self.by_key[dist.key] = dist
715
normalized_name = packaging.utils.canonicalize_name(dist.key)
716
self.normalized_to_canonical_keys[normalized_name] = dist.key
717
if dist.key not in keys:
718
keys.append(dist.key)
719
if dist.key not in keys2:
720
keys2.append(dist.key)
721
self._added_new(dist)
722
723
# FIXME: 'WorkingSet.resolve' is too complex (11)
724
def resolve(self, requirements, env=None, installer=None, # noqa: C901
725
replace_conflicting=False, extras=None):
726
"""List all distributions needed to (recursively) meet `requirements`
727
728
`requirements` must be a sequence of ``Requirement`` objects. `env`,
729
if supplied, should be an ``Environment`` instance. If
730
not supplied, it defaults to all distributions available within any
731
entry or distribution in the working set. `installer`, if supplied,
732
will be invoked with each requirement that cannot be met by an
733
already-installed distribution; it should return a ``Distribution`` or
734
``None``.
735
736
Unless `replace_conflicting=True`, raises a VersionConflict exception
737
if
738
any requirements are found on the path that have the correct name but
739
the wrong version. Otherwise, if an `installer` is supplied it will be
740
invoked to obtain the correct version of the requirement and activate
741
it.
742
743
`extras` is a list of the extras to be used with these requirements.
744
This is important because extra requirements may look like `my_req;
745
extra = "my_extra"`, which would otherwise be interpreted as a purely
746
optional requirement. Instead, we want to be able to assert that these
747
requirements are truly required.
748
"""
749
750
# set up the stack
751
requirements = list(requirements)[::-1]
752
# set of processed requirements
753
processed = {}
754
# key -> dist
755
best = {}
756
to_activate = []
757
758
req_extras = _ReqExtras()
759
760
# Mapping of requirement to set of distributions that required it;
761
# useful for reporting info about conflicts.
762
required_by = collections.defaultdict(set)
763
764
while requirements:
765
# process dependencies breadth-first
766
req = requirements.pop(0)
767
if req in processed:
768
# Ignore cyclic or redundant dependencies
769
continue
770
771
if not req_extras.markers_pass(req, extras):
772
continue
773
774
dist = best.get(req.key)
775
if dist is None:
776
# Find the best distribution and add it to the map
777
dist = self.by_key.get(req.key)
778
if dist is None or (dist not in req and replace_conflicting):
779
ws = self
780
if env is None:
781
if dist is None:
782
env = Environment(self.entries)
783
else:
784
# Use an empty environment and workingset to avoid
785
# any further conflicts with the conflicting
786
# distribution
787
env = Environment([])
788
ws = WorkingSet([])
789
dist = best[req.key] = env.best_match(
790
req, ws, installer,
791
replace_conflicting=replace_conflicting
792
)
793
if dist is None:
794
requirers = required_by.get(req, None)
795
raise DistributionNotFound(req, requirers)
796
to_activate.append(dist)
797
if dist not in req:
798
# Oops, the "best" so far conflicts with a dependency
799
dependent_req = required_by[req]
800
raise VersionConflict(dist, req).with_context(dependent_req)
801
802
# push the new requirements onto the stack
803
new_requirements = dist.requires(req.extras)[::-1]
804
requirements.extend(new_requirements)
805
806
# Register the new requirements needed by req
807
for new_requirement in new_requirements:
808
required_by[new_requirement].add(req.project_name)
809
req_extras[new_requirement] = req.extras
810
811
processed[req] = True
812
813
# return list of distros to activate
814
return to_activate
815
816
def find_plugins(
817
self, plugin_env, full_env=None, installer=None, fallback=True):
818
"""Find all activatable distributions in `plugin_env`
819
820
Example usage::
821
822
distributions, errors = working_set.find_plugins(
823
Environment(plugin_dirlist)
824
)
825
# add plugins+libs to sys.path
826
map(working_set.add, distributions)
827
# display errors
828
print('Could not load', errors)
829
830
The `plugin_env` should be an ``Environment`` instance that contains
831
only distributions that are in the project's "plugin directory" or
832
directories. The `full_env`, if supplied, should be an ``Environment``
833
contains all currently-available distributions. If `full_env` is not
834
supplied, one is created automatically from the ``WorkingSet`` this
835
method is called on, which will typically mean that every directory on
836
``sys.path`` will be scanned for distributions.
837
838
`installer` is a standard installer callback as used by the
839
``resolve()`` method. The `fallback` flag indicates whether we should
840
attempt to resolve older versions of a plugin if the newest version
841
cannot be resolved.
842
843
This method returns a 2-tuple: (`distributions`, `error_info`), where
844
`distributions` is a list of the distributions found in `plugin_env`
845
that were loadable, along with any other distributions that are needed
846
to resolve their dependencies. `error_info` is a dictionary mapping
847
unloadable plugin distributions to an exception instance describing the
848
error that occurred. Usually this will be a ``DistributionNotFound`` or
849
``VersionConflict`` instance.
850
"""
851
852
plugin_projects = list(plugin_env)
853
# scan project names in alphabetic order
854
plugin_projects.sort()
855
856
error_info = {}
857
distributions = {}
858
859
if full_env is None:
860
env = Environment(self.entries)
861
env += plugin_env
862
else:
863
env = full_env + plugin_env
864
865
shadow_set = self.__class__([])
866
# put all our entries in shadow_set
867
list(map(shadow_set.add, self))
868
869
for project_name in plugin_projects:
870
871
for dist in plugin_env[project_name]:
872
873
req = [dist.as_requirement()]
874
875
try:
876
resolvees = shadow_set.resolve(req, env, installer)
877
878
except ResolutionError as v:
879
# save error info
880
error_info[dist] = v
881
if fallback:
882
# try the next older version of project
883
continue
884
else:
885
# give up on this project, keep going
886
break
887
888
else:
889
list(map(shadow_set.add, resolvees))
890
distributions.update(dict.fromkeys(resolvees))
891
892
# success, no need to try any more versions of this project
893
break
894
895
distributions = list(distributions)
896
distributions.sort()
897
898
return distributions, error_info
899
900
def require(self, *requirements):
901
"""Ensure that distributions matching `requirements` are activated
902
903
`requirements` must be a string or a (possibly-nested) sequence
904
thereof, specifying the distributions and versions required. The
905
return value is a sequence of the distributions that needed to be
906
activated to fulfill the requirements; all relevant distributions are
907
included, even if they were already activated in this working set.
908
"""
909
needed = self.resolve(parse_requirements(requirements))
910
911
for dist in needed:
912
self.add(dist)
913
914
return needed
915
916
def subscribe(self, callback, existing=True):
917
"""Invoke `callback` for all distributions
918
919
If `existing=True` (default),
920
call on all existing ones, as well.
921
"""
922
if callback in self.callbacks:
923
return
924
self.callbacks.append(callback)
925
if not existing:
926
return
927
for dist in self:
928
callback(dist)
929
930
def _added_new(self, dist):
931
for callback in self.callbacks:
932
callback(dist)
933
934
def __getstate__(self):
935
return (
936
self.entries[:], self.entry_keys.copy(), self.by_key.copy(),
937
self.normalized_to_canonical_keys.copy(), self.callbacks[:]
938
)
939
940
def __setstate__(self, e_k_b_n_c):
941
entries, keys, by_key, normalized_to_canonical_keys, callbacks = e_k_b_n_c
942
self.entries = entries[:]
943
self.entry_keys = keys.copy()
944
self.by_key = by_key.copy()
945
self.normalized_to_canonical_keys = normalized_to_canonical_keys.copy()
946
self.callbacks = callbacks[:]
947
948
949
class _ReqExtras(dict):
950
"""
951
Map each requirement to the extras that demanded it.
952
"""
953
954
def markers_pass(self, req, extras=None):
955
"""
956
Evaluate markers for req against each extra that
957
demanded it.
958
959
Return False if the req has a marker and fails
960
evaluation. Otherwise, return True.
961
"""
962
extra_evals = (
963
req.marker.evaluate({'extra': extra})
964
for extra in self.get(req, ()) + (extras or (None,))
965
)
966
return not req.marker or any(extra_evals)
967
968
969
class Environment:
970
"""Searchable snapshot of distributions on a search path"""
971
972
def __init__(
973
self, search_path=None, platform=get_supported_platform(),
974
python=PY_MAJOR):
975
"""Snapshot distributions available on a search path
976
977
Any distributions found on `search_path` are added to the environment.
978
`search_path` should be a sequence of ``sys.path`` items. If not
979
supplied, ``sys.path`` is used.
980
981
`platform` is an optional string specifying the name of the platform
982
that platform-specific distributions must be compatible with. If
983
unspecified, it defaults to the current platform. `python` is an
984
optional string naming the desired version of Python (e.g. ``'3.6'``);
985
it defaults to the current version.
986
987
You may explicitly set `platform` (and/or `python`) to ``None`` if you
988
wish to map *all* distributions, not just those compatible with the
989
running platform or Python version.
990
"""
991
self._distmap = {}
992
self.platform = platform
993
self.python = python
994
self.scan(search_path)
995
996
def can_add(self, dist):
997
"""Is distribution `dist` acceptable for this environment?
998
999
The distribution must match the platform and python version
1000
requirements specified when this environment was created, or False
1001
is returned.
1002
"""
1003
py_compat = (
1004
self.python is None
1005
or dist.py_version is None
1006
or dist.py_version == self.python
1007
)
1008
return py_compat and compatible_platforms(dist.platform, self.platform)
1009
1010
def remove(self, dist):
1011
"""Remove `dist` from the environment"""
1012
self._distmap[dist.key].remove(dist)
1013
1014
def scan(self, search_path=None):
1015
"""Scan `search_path` for distributions usable in this environment
1016
1017
Any distributions found are added to the environment.
1018
`search_path` should be a sequence of ``sys.path`` items. If not
1019
supplied, ``sys.path`` is used. Only distributions conforming to
1020
the platform/python version defined at initialization are added.
1021
"""
1022
if search_path is None:
1023
search_path = sys.path
1024
1025
for item in search_path:
1026
for dist in find_distributions(item):
1027
self.add(dist)
1028
1029
def __getitem__(self, project_name):
1030
"""Return a newest-to-oldest list of distributions for `project_name`
1031
1032
Uses case-insensitive `project_name` comparison, assuming all the
1033
project's distributions use their project's name converted to all
1034
lowercase as their key.
1035
1036
"""
1037
distribution_key = project_name.lower()
1038
return self._distmap.get(distribution_key, [])
1039
1040
def add(self, dist):
1041
"""Add `dist` if we ``can_add()`` it and it has not already been added
1042
"""
1043
if self.can_add(dist) and dist.has_version():
1044
dists = self._distmap.setdefault(dist.key, [])
1045
if dist not in dists:
1046
dists.append(dist)
1047
dists.sort(key=operator.attrgetter('hashcmp'), reverse=True)
1048
1049
def best_match(
1050
self, req, working_set, installer=None, replace_conflicting=False):
1051
"""Find distribution best matching `req` and usable on `working_set`
1052
1053
This calls the ``find(req)`` method of the `working_set` to see if a
1054
suitable distribution is already active. (This may raise
1055
``VersionConflict`` if an unsuitable version of the project is already
1056
active in the specified `working_set`.) If a suitable distribution
1057
isn't active, this method returns the newest distribution in the
1058
environment that meets the ``Requirement`` in `req`. If no suitable
1059
distribution is found, and `installer` is supplied, then the result of
1060
calling the environment's ``obtain(req, installer)`` method will be
1061
returned.
1062
"""
1063
try:
1064
dist = working_set.find(req)
1065
except VersionConflict:
1066
if not replace_conflicting:
1067
raise
1068
dist = None
1069
if dist is not None:
1070
return dist
1071
for dist in self[req.key]:
1072
if dist in req:
1073
return dist
1074
# try to download/install
1075
return self.obtain(req, installer)
1076
1077
def obtain(self, requirement, installer=None):
1078
"""Obtain a distribution matching `requirement` (e.g. via download)
1079
1080
Obtain a distro that matches requirement (e.g. via download). In the
1081
base ``Environment`` class, this routine just returns
1082
``installer(requirement)``, unless `installer` is None, in which case
1083
None is returned instead. This method is a hook that allows subclasses
1084
to attempt other ways of obtaining a distribution before falling back
1085
to the `installer` argument."""
1086
if installer is not None:
1087
return installer(requirement)
1088
1089
def __iter__(self):
1090
"""Yield the unique project names of the available distributions"""
1091
for key in self._distmap.keys():
1092
if self[key]:
1093
yield key
1094
1095
def __iadd__(self, other):
1096
"""In-place addition of a distribution or environment"""
1097
if isinstance(other, Distribution):
1098
self.add(other)
1099
elif isinstance(other, Environment):
1100
for project in other:
1101
for dist in other[project]:
1102
self.add(dist)
1103
else:
1104
raise TypeError("Can't add %r to environment" % (other,))
1105
return self
1106
1107
def __add__(self, other):
1108
"""Add an environment or distribution to an environment"""
1109
new = self.__class__([], platform=None, python=None)
1110
for env in self, other:
1111
new += env
1112
return new
1113
1114
1115
# XXX backward compatibility
1116
AvailableDistributions = Environment
1117
1118
1119
class ExtractionError(RuntimeError):
1120
"""An error occurred extracting a resource
1121
1122
The following attributes are available from instances of this exception:
1123
1124
manager
1125
The resource manager that raised this exception
1126
1127
cache_path
1128
The base directory for resource extraction
1129
1130
original_error
1131
The exception instance that caused extraction to fail
1132
"""
1133
1134
1135
class ResourceManager:
1136
"""Manage resource extraction and packages"""
1137
extraction_path = None
1138
1139
def __init__(self):
1140
self.cached_files = {}
1141
1142
def resource_exists(self, package_or_requirement, resource_name):
1143
"""Does the named resource exist?"""
1144
return get_provider(package_or_requirement).has_resource(resource_name)
1145
1146
def resource_isdir(self, package_or_requirement, resource_name):
1147
"""Is the named resource an existing directory?"""
1148
return get_provider(package_or_requirement).resource_isdir(
1149
resource_name
1150
)
1151
1152
def resource_filename(self, package_or_requirement, resource_name):
1153
"""Return a true filesystem path for specified resource"""
1154
return get_provider(package_or_requirement).get_resource_filename(
1155
self, resource_name
1156
)
1157
1158
def resource_stream(self, package_or_requirement, resource_name):
1159
"""Return a readable file-like object for specified resource"""
1160
return get_provider(package_or_requirement).get_resource_stream(
1161
self, resource_name
1162
)
1163
1164
def resource_string(self, package_or_requirement, resource_name):
1165
"""Return specified resource as a string"""
1166
return get_provider(package_or_requirement).get_resource_string(
1167
self, resource_name
1168
)
1169
1170
def resource_listdir(self, package_or_requirement, resource_name):
1171
"""List the contents of the named resource directory"""
1172
return get_provider(package_or_requirement).resource_listdir(
1173
resource_name
1174
)
1175
1176
def extraction_error(self):
1177
"""Give an error message for problems extracting file(s)"""
1178
1179
old_exc = sys.exc_info()[1]
1180
cache_path = self.extraction_path or get_default_cache()
1181
1182
tmpl = textwrap.dedent("""
1183
Can't extract file(s) to egg cache
1184
1185
The following error occurred while trying to extract file(s)
1186
to the Python egg cache:
1187
1188
{old_exc}
1189
1190
The Python egg cache directory is currently set to:
1191
1192
{cache_path}
1193
1194
Perhaps your account does not have write access to this directory?
1195
You can change the cache directory by setting the PYTHON_EGG_CACHE
1196
environment variable to point to an accessible directory.
1197
""").lstrip()
1198
err = ExtractionError(tmpl.format(**locals()))
1199
err.manager = self
1200
err.cache_path = cache_path
1201
err.original_error = old_exc
1202
raise err
1203
1204
def get_cache_path(self, archive_name, names=()):
1205
"""Return absolute location in cache for `archive_name` and `names`
1206
1207
The parent directory of the resulting path will be created if it does
1208
not already exist. `archive_name` should be the base filename of the
1209
enclosing egg (which may not be the name of the enclosing zipfile!),
1210
including its ".egg" extension. `names`, if provided, should be a
1211
sequence of path name parts "under" the egg's extraction location.
1212
1213
This method should only be called by resource providers that need to
1214
obtain an extraction location, and only for names they intend to
1215
extract, as it tracks the generated names for possible cleanup later.
1216
"""
1217
extract_path = self.extraction_path or get_default_cache()
1218
target_path = os.path.join(extract_path, archive_name + '-tmp', *names)
1219
try:
1220
_bypass_ensure_directory(target_path)
1221
except Exception:
1222
self.extraction_error()
1223
1224
self._warn_unsafe_extraction_path(extract_path)
1225
1226
self.cached_files[target_path] = 1
1227
return target_path
1228
1229
@staticmethod
1230
def _warn_unsafe_extraction_path(path):
1231
"""
1232
If the default extraction path is overridden and set to an insecure
1233
location, such as /tmp, it opens up an opportunity for an attacker to
1234
replace an extracted file with an unauthorized payload. Warn the user
1235
if a known insecure location is used.
1236
1237
See Distribute #375 for more details.
1238
"""
1239
if os.name == 'nt' and not path.startswith(os.environ['windir']):
1240
# On Windows, permissions are generally restrictive by default
1241
# and temp directories are not writable by other users, so
1242
# bypass the warning.
1243
return
1244
mode = os.stat(path).st_mode
1245
if mode & stat.S_IWOTH or mode & stat.S_IWGRP:
1246
msg = (
1247
"Extraction path is writable by group/others "
1248
"and vulnerable to attack when "
1249
"used with get_resource_filename ({path}). "
1250
"Consider a more secure "
1251
"location (set with .set_extraction_path or the "
1252
"PYTHON_EGG_CACHE environment variable)."
1253
).format(**locals())
1254
warnings.warn(msg, UserWarning)
1255
1256
def postprocess(self, tempname, filename):
1257
"""Perform any platform-specific postprocessing of `tempname`
1258
1259
This is where Mac header rewrites should be done; other platforms don't
1260
have anything special they should do.
1261
1262
Resource providers should call this method ONLY after successfully
1263
extracting a compressed resource. They must NOT call it on resources
1264
that are already in the filesystem.
1265
1266
`tempname` is the current (temporary) name of the file, and `filename`
1267
is the name it will be renamed to by the caller after this routine
1268
returns.
1269
"""
1270
1271
if os.name == 'posix':
1272
# Make the resource executable
1273
mode = ((os.stat(tempname).st_mode) | 0o555) & 0o7777
1274
os.chmod(tempname, mode)
1275
1276
def set_extraction_path(self, path):
1277
"""Set the base path where resources will be extracted to, if needed.
1278
1279
If you do not call this routine before any extractions take place, the
1280
path defaults to the return value of ``get_default_cache()``. (Which
1281
is based on the ``PYTHON_EGG_CACHE`` environment variable, with various
1282
platform-specific fallbacks. See that routine's documentation for more
1283
details.)
1284
1285
Resources are extracted to subdirectories of this path based upon
1286
information given by the ``IResourceProvider``. You may set this to a
1287
temporary directory, but then you must call ``cleanup_resources()`` to
1288
delete the extracted files when done. There is no guarantee that
1289
``cleanup_resources()`` will be able to remove all extracted files.
1290
1291
(Note: you may not change the extraction path for a given resource
1292
manager once resources have been extracted, unless you first call
1293
``cleanup_resources()``.)
1294
"""
1295
if self.cached_files:
1296
raise ValueError(
1297
"Can't change extraction path, files already extracted"
1298
)
1299
1300
self.extraction_path = path
1301
1302
def cleanup_resources(self, force=False):
1303
"""
1304
Delete all extracted resource files and directories, returning a list
1305
of the file and directory names that could not be successfully removed.
1306
This function does not have any concurrency protection, so it should
1307
generally only be called when the extraction path is a temporary
1308
directory exclusive to a single process. This method is not
1309
automatically called; you must call it explicitly or register it as an
1310
``atexit`` function if you wish to ensure cleanup of a temporary
1311
directory used for extractions.
1312
"""
1313
# XXX
1314
1315
1316
def get_default_cache():
1317
"""
1318
Return the ``PYTHON_EGG_CACHE`` environment variable
1319
or a platform-relevant user cache dir for an app
1320
named "Python-Eggs".
1321
"""
1322
return (
1323
os.environ.get('PYTHON_EGG_CACHE')
1324
or appdirs.user_cache_dir(appname='Python-Eggs')
1325
)
1326
1327
1328
def safe_name(name):
1329
"""Convert an arbitrary string to a standard distribution name
1330
1331
Any runs of non-alphanumeric/. characters are replaced with a single '-'.
1332
"""
1333
return re.sub('[^A-Za-z0-9.]+', '-', name)
1334
1335
1336
def safe_version(version):
1337
"""
1338
Convert an arbitrary string to a standard version string
1339
"""
1340
try:
1341
# normalize the version
1342
return str(packaging.version.Version(version))
1343
except packaging.version.InvalidVersion:
1344
version = version.replace(' ', '.')
1345
return re.sub('[^A-Za-z0-9.]+', '-', version)
1346
1347
1348
def safe_extra(extra):
1349
"""Convert an arbitrary string to a standard 'extra' name
1350
1351
Any runs of non-alphanumeric characters are replaced with a single '_',
1352
and the result is always lowercased.
1353
"""
1354
return re.sub('[^A-Za-z0-9.-]+', '_', extra).lower()
1355
1356
1357
def to_filename(name):
1358
"""Convert a project or version name to its filename-escaped form
1359
1360
Any '-' characters are currently replaced with '_'.
1361
"""
1362
return name.replace('-', '_')
1363
1364
1365
def invalid_marker(text):
1366
"""
1367
Validate text as a PEP 508 environment marker; return an exception
1368
if invalid or False otherwise.
1369
"""
1370
try:
1371
evaluate_marker(text)
1372
except SyntaxError as e:
1373
e.filename = None
1374
e.lineno = None
1375
return e
1376
return False
1377
1378
1379
def evaluate_marker(text, extra=None):
1380
"""
1381
Evaluate a PEP 508 environment marker.
1382
Return a boolean indicating the marker result in this environment.
1383
Raise SyntaxError if marker is invalid.
1384
1385
This implementation uses the 'pyparsing' module.
1386
"""
1387
try:
1388
marker = packaging.markers.Marker(text)
1389
return marker.evaluate()
1390
except packaging.markers.InvalidMarker as e:
1391
raise SyntaxError(e) from e
1392
1393
1394
class NullProvider:
1395
"""Try to implement resources and metadata for arbitrary PEP 302 loaders"""
1396
1397
egg_name = None
1398
egg_info = None
1399
loader = None
1400
1401
def __init__(self, module):
1402
self.loader = getattr(module, '__loader__', None)
1403
self.module_path = os.path.dirname(getattr(module, '__file__', ''))
1404
1405
def get_resource_filename(self, manager, resource_name):
1406
return self._fn(self.module_path, resource_name)
1407
1408
def get_resource_stream(self, manager, resource_name):
1409
return io.BytesIO(self.get_resource_string(manager, resource_name))
1410
1411
def get_resource_string(self, manager, resource_name):
1412
return self._get(self._fn(self.module_path, resource_name))
1413
1414
def has_resource(self, resource_name):
1415
return self._has(self._fn(self.module_path, resource_name))
1416
1417
def _get_metadata_path(self, name):
1418
return self._fn(self.egg_info, name)
1419
1420
def has_metadata(self, name):
1421
if not self.egg_info:
1422
return self.egg_info
1423
1424
path = self._get_metadata_path(name)
1425
return self._has(path)
1426
1427
def get_metadata(self, name):
1428
if not self.egg_info:
1429
return ""
1430
path = self._get_metadata_path(name)
1431
value = self._get(path)
1432
try:
1433
return value.decode('utf-8')
1434
except UnicodeDecodeError as exc:
1435
# Include the path in the error message to simplify
1436
# troubleshooting, and without changing the exception type.
1437
exc.reason += ' in {} file at path: {}'.format(name, path)
1438
raise
1439
1440
def get_metadata_lines(self, name):
1441
return yield_lines(self.get_metadata(name))
1442
1443
def resource_isdir(self, resource_name):
1444
return self._isdir(self._fn(self.module_path, resource_name))
1445
1446
def metadata_isdir(self, name):
1447
return self.egg_info and self._isdir(self._fn(self.egg_info, name))
1448
1449
def resource_listdir(self, resource_name):
1450
return self._listdir(self._fn(self.module_path, resource_name))
1451
1452
def metadata_listdir(self, name):
1453
if self.egg_info:
1454
return self._listdir(self._fn(self.egg_info, name))
1455
return []
1456
1457
def run_script(self, script_name, namespace):
1458
script = 'scripts/' + script_name
1459
if not self.has_metadata(script):
1460
raise ResolutionError(
1461
"Script {script!r} not found in metadata at {self.egg_info!r}"
1462
.format(**locals()),
1463
)
1464
script_text = self.get_metadata(script).replace('\r\n', '\n')
1465
script_text = script_text.replace('\r', '\n')
1466
script_filename = self._fn(self.egg_info, script)
1467
namespace['__file__'] = script_filename
1468
if os.path.exists(script_filename):
1469
with open(script_filename) as fid:
1470
source = fid.read()
1471
code = compile(source, script_filename, 'exec')
1472
exec(code, namespace, namespace)
1473
else:
1474
from linecache import cache
1475
cache[script_filename] = (
1476
len(script_text), 0, script_text.split('\n'), script_filename
1477
)
1478
script_code = compile(script_text, script_filename, 'exec')
1479
exec(script_code, namespace, namespace)
1480
1481
def _has(self, path):
1482
raise NotImplementedError(
1483
"Can't perform this operation for unregistered loader type"
1484
)
1485
1486
def _isdir(self, path):
1487
raise NotImplementedError(
1488
"Can't perform this operation for unregistered loader type"
1489
)
1490
1491
def _listdir(self, path):
1492
raise NotImplementedError(
1493
"Can't perform this operation for unregistered loader type"
1494
)
1495
1496
def _fn(self, base, resource_name):
1497
self._validate_resource_path(resource_name)
1498
if resource_name:
1499
return os.path.join(base, *resource_name.split('/'))
1500
return base
1501
1502
@staticmethod
1503
def _validate_resource_path(path):
1504
"""
1505
Validate the resource paths according to the docs.
1506
https://setuptools.pypa.io/en/latest/pkg_resources.html#basic-resource-access
1507
1508
>>> warned = getfixture('recwarn')
1509
>>> warnings.simplefilter('always')
1510
>>> vrp = NullProvider._validate_resource_path
1511
>>> vrp('foo/bar.txt')
1512
>>> bool(warned)
1513
False
1514
>>> vrp('../foo/bar.txt')
1515
>>> bool(warned)
1516
True
1517
>>> warned.clear()
1518
>>> vrp('/foo/bar.txt')
1519
>>> bool(warned)
1520
True
1521
>>> vrp('foo/../../bar.txt')
1522
>>> bool(warned)
1523
True
1524
>>> warned.clear()
1525
>>> vrp('foo/f../bar.txt')
1526
>>> bool(warned)
1527
False
1528
1529
Windows path separators are straight-up disallowed.
1530
>>> vrp(r'\\foo/bar.txt')
1531
Traceback (most recent call last):
1532
...
1533
ValueError: Use of .. or absolute path in a resource path \
1534
is not allowed.
1535
1536
>>> vrp(r'C:\\foo/bar.txt')
1537
Traceback (most recent call last):
1538
...
1539
ValueError: Use of .. or absolute path in a resource path \
1540
is not allowed.
1541
1542
Blank values are allowed
1543
1544
>>> vrp('')
1545
>>> bool(warned)
1546
False
1547
1548
Non-string values are not.
1549
1550
>>> vrp(None)
1551
Traceback (most recent call last):
1552
...
1553
AttributeError: ...
1554
"""
1555
invalid = (
1556
os.path.pardir in path.split(posixpath.sep) or
1557
posixpath.isabs(path) or
1558
ntpath.isabs(path)
1559
)
1560
if not invalid:
1561
return
1562
1563
msg = "Use of .. or absolute path in a resource path is not allowed."
1564
1565
# Aggressively disallow Windows absolute paths
1566
if ntpath.isabs(path) and not posixpath.isabs(path):
1567
raise ValueError(msg)
1568
1569
# for compatibility, warn; in future
1570
# raise ValueError(msg)
1571
warnings.warn(
1572
msg[:-1] + " and will raise exceptions in a future release.",
1573
DeprecationWarning,
1574
stacklevel=4,
1575
)
1576
1577
def _get(self, path):
1578
if hasattr(self.loader, 'get_data'):
1579
return self.loader.get_data(path)
1580
raise NotImplementedError(
1581
"Can't perform this operation for loaders without 'get_data()'"
1582
)
1583
1584
1585
register_loader_type(object, NullProvider)
1586
1587
1588
def _parents(path):
1589
"""
1590
yield all parents of path including path
1591
"""
1592
last = None
1593
while path != last:
1594
yield path
1595
last = path
1596
path, _ = os.path.split(path)
1597
1598
1599
class EggProvider(NullProvider):
1600
"""Provider based on a virtual filesystem"""
1601
1602
def __init__(self, module):
1603
super().__init__(module)
1604
self._setup_prefix()
1605
1606
def _setup_prefix(self):
1607
# Assume that metadata may be nested inside a "basket"
1608
# of multiple eggs and use module_path instead of .archive.
1609
eggs = filter(_is_egg_path, _parents(self.module_path))
1610
egg = next(eggs, None)
1611
egg and self._set_egg(egg)
1612
1613
def _set_egg(self, path):
1614
self.egg_name = os.path.basename(path)
1615
self.egg_info = os.path.join(path, 'EGG-INFO')
1616
self.egg_root = path
1617
1618
1619
class DefaultProvider(EggProvider):
1620
"""Provides access to package resources in the filesystem"""
1621
1622
def _has(self, path):
1623
return os.path.exists(path)
1624
1625
def _isdir(self, path):
1626
return os.path.isdir(path)
1627
1628
def _listdir(self, path):
1629
return os.listdir(path)
1630
1631
def get_resource_stream(self, manager, resource_name):
1632
return open(self._fn(self.module_path, resource_name), 'rb')
1633
1634
def _get(self, path):
1635
with open(path, 'rb') as stream:
1636
return stream.read()
1637
1638
@classmethod
1639
def _register(cls):
1640
loader_names = 'SourceFileLoader', 'SourcelessFileLoader',
1641
for name in loader_names:
1642
loader_cls = getattr(importlib_machinery, name, type(None))
1643
register_loader_type(loader_cls, cls)
1644
1645
1646
DefaultProvider._register()
1647
1648
1649
class EmptyProvider(NullProvider):
1650
"""Provider that returns nothing for all requests"""
1651
1652
module_path = None
1653
1654
_isdir = _has = lambda self, path: False
1655
1656
def _get(self, path):
1657
return ''
1658
1659
def _listdir(self, path):
1660
return []
1661
1662
def __init__(self):
1663
pass
1664
1665
1666
empty_provider = EmptyProvider()
1667
1668
1669
class ZipManifests(dict):
1670
"""
1671
zip manifest builder
1672
"""
1673
1674
@classmethod
1675
def build(cls, path):
1676
"""
1677
Build a dictionary similar to the zipimport directory
1678
caches, except instead of tuples, store ZipInfo objects.
1679
1680
Use a platform-specific path separator (os.sep) for the path keys
1681
for compatibility with pypy on Windows.
1682
"""
1683
with zipfile.ZipFile(path) as zfile:
1684
items = (
1685
(
1686
name.replace('/', os.sep),
1687
zfile.getinfo(name),
1688
)
1689
for name in zfile.namelist()
1690
)
1691
return dict(items)
1692
1693
load = build
1694
1695
1696
class MemoizedZipManifests(ZipManifests):
1697
"""
1698
Memoized zipfile manifests.
1699
"""
1700
manifest_mod = collections.namedtuple('manifest_mod', 'manifest mtime')
1701
1702
def load(self, path):
1703
"""
1704
Load a manifest at path or return a suitable manifest already loaded.
1705
"""
1706
path = os.path.normpath(path)
1707
mtime = os.stat(path).st_mtime
1708
1709
if path not in self or self[path].mtime != mtime:
1710
manifest = self.build(path)
1711
self[path] = self.manifest_mod(manifest, mtime)
1712
1713
return self[path].manifest
1714
1715
1716
class ZipProvider(EggProvider):
1717
"""Resource support for zips and eggs"""
1718
1719
eagers = None
1720
_zip_manifests = MemoizedZipManifests()
1721
1722
def __init__(self, module):
1723
super().__init__(module)
1724
self.zip_pre = self.loader.archive + os.sep
1725
1726
def _zipinfo_name(self, fspath):
1727
# Convert a virtual filename (full path to file) into a zipfile subpath
1728
# usable with the zipimport directory cache for our target archive
1729
fspath = fspath.rstrip(os.sep)
1730
if fspath == self.loader.archive:
1731
return ''
1732
if fspath.startswith(self.zip_pre):
1733
return fspath[len(self.zip_pre):]
1734
raise AssertionError(
1735
"%s is not a subpath of %s" % (fspath, self.zip_pre)
1736
)
1737
1738
def _parts(self, zip_path):
1739
# Convert a zipfile subpath into an egg-relative path part list.
1740
# pseudo-fs path
1741
fspath = self.zip_pre + zip_path
1742
if fspath.startswith(self.egg_root + os.sep):
1743
return fspath[len(self.egg_root) + 1:].split(os.sep)
1744
raise AssertionError(
1745
"%s is not a subpath of %s" % (fspath, self.egg_root)
1746
)
1747
1748
@property
1749
def zipinfo(self):
1750
return self._zip_manifests.load(self.loader.archive)
1751
1752
def get_resource_filename(self, manager, resource_name):
1753
if not self.egg_name:
1754
raise NotImplementedError(
1755
"resource_filename() only supported for .egg, not .zip"
1756
)
1757
# no need to lock for extraction, since we use temp names
1758
zip_path = self._resource_to_zip(resource_name)
1759
eagers = self._get_eager_resources()
1760
if '/'.join(self._parts(zip_path)) in eagers:
1761
for name in eagers:
1762
self._extract_resource(manager, self._eager_to_zip(name))
1763
return self._extract_resource(manager, zip_path)
1764
1765
@staticmethod
1766
def _get_date_and_size(zip_stat):
1767
size = zip_stat.file_size
1768
# ymdhms+wday, yday, dst
1769
date_time = zip_stat.date_time + (0, 0, -1)
1770
# 1980 offset already done
1771
timestamp = time.mktime(date_time)
1772
return timestamp, size
1773
1774
# FIXME: 'ZipProvider._extract_resource' is too complex (12)
1775
def _extract_resource(self, manager, zip_path): # noqa: C901
1776
1777
if zip_path in self._index():
1778
for name in self._index()[zip_path]:
1779
last = self._extract_resource(
1780
manager, os.path.join(zip_path, name)
1781
)
1782
# return the extracted directory name
1783
return os.path.dirname(last)
1784
1785
timestamp, size = self._get_date_and_size(self.zipinfo[zip_path])
1786
1787
if not WRITE_SUPPORT:
1788
raise IOError('"os.rename" and "os.unlink" are not supported '
1789
'on this platform')
1790
try:
1791
1792
real_path = manager.get_cache_path(
1793
self.egg_name, self._parts(zip_path)
1794
)
1795
1796
if self._is_current(real_path, zip_path):
1797
return real_path
1798
1799
outf, tmpnam = _mkstemp(
1800
".$extract",
1801
dir=os.path.dirname(real_path),
1802
)
1803
os.write(outf, self.loader.get_data(zip_path))
1804
os.close(outf)
1805
utime(tmpnam, (timestamp, timestamp))
1806
manager.postprocess(tmpnam, real_path)
1807
1808
try:
1809
rename(tmpnam, real_path)
1810
1811
except os.error:
1812
if os.path.isfile(real_path):
1813
if self._is_current(real_path, zip_path):
1814
# the file became current since it was checked above,
1815
# so proceed.
1816
return real_path
1817
# Windows, del old file and retry
1818
elif os.name == 'nt':
1819
unlink(real_path)
1820
rename(tmpnam, real_path)
1821
return real_path
1822
raise
1823
1824
except os.error:
1825
# report a user-friendly error
1826
manager.extraction_error()
1827
1828
return real_path
1829
1830
def _is_current(self, file_path, zip_path):
1831
"""
1832
Return True if the file_path is current for this zip_path
1833
"""
1834
timestamp, size = self._get_date_and_size(self.zipinfo[zip_path])
1835
if not os.path.isfile(file_path):
1836
return False
1837
stat = os.stat(file_path)
1838
if stat.st_size != size or stat.st_mtime != timestamp:
1839
return False
1840
# check that the contents match
1841
zip_contents = self.loader.get_data(zip_path)
1842
with open(file_path, 'rb') as f:
1843
file_contents = f.read()
1844
return zip_contents == file_contents
1845
1846
def _get_eager_resources(self):
1847
if self.eagers is None:
1848
eagers = []
1849
for name in ('native_libs.txt', 'eager_resources.txt'):
1850
if self.has_metadata(name):
1851
eagers.extend(self.get_metadata_lines(name))
1852
self.eagers = eagers
1853
return self.eagers
1854
1855
def _index(self):
1856
try:
1857
return self._dirindex
1858
except AttributeError:
1859
ind = {}
1860
for path in self.zipinfo:
1861
parts = path.split(os.sep)
1862
while parts:
1863
parent = os.sep.join(parts[:-1])
1864
if parent in ind:
1865
ind[parent].append(parts[-1])
1866
break
1867
else:
1868
ind[parent] = [parts.pop()]
1869
self._dirindex = ind
1870
return ind
1871
1872
def _has(self, fspath):
1873
zip_path = self._zipinfo_name(fspath)
1874
return zip_path in self.zipinfo or zip_path in self._index()
1875
1876
def _isdir(self, fspath):
1877
return self._zipinfo_name(fspath) in self._index()
1878
1879
def _listdir(self, fspath):
1880
return list(self._index().get(self._zipinfo_name(fspath), ()))
1881
1882
def _eager_to_zip(self, resource_name):
1883
return self._zipinfo_name(self._fn(self.egg_root, resource_name))
1884
1885
def _resource_to_zip(self, resource_name):
1886
return self._zipinfo_name(self._fn(self.module_path, resource_name))
1887
1888
1889
register_loader_type(zipimport.zipimporter, ZipProvider)
1890
1891
1892
class FileMetadata(EmptyProvider):
1893
"""Metadata handler for standalone PKG-INFO files
1894
1895
Usage::
1896
1897
metadata = FileMetadata("/path/to/PKG-INFO")
1898
1899
This provider rejects all data and metadata requests except for PKG-INFO,
1900
which is treated as existing, and will be the contents of the file at
1901
the provided location.
1902
"""
1903
1904
def __init__(self, path):
1905
self.path = path
1906
1907
def _get_metadata_path(self, name):
1908
return self.path
1909
1910
def has_metadata(self, name):
1911
return name == 'PKG-INFO' and os.path.isfile(self.path)
1912
1913
def get_metadata(self, name):
1914
if name != 'PKG-INFO':
1915
raise KeyError("No metadata except PKG-INFO is available")
1916
1917
with io.open(self.path, encoding='utf-8', errors="replace") as f:
1918
metadata = f.read()
1919
self._warn_on_replacement(metadata)
1920
return metadata
1921
1922
def _warn_on_replacement(self, metadata):
1923
replacement_char = '�'
1924
if replacement_char in metadata:
1925
tmpl = "{self.path} could not be properly decoded in UTF-8"
1926
msg = tmpl.format(**locals())
1927
warnings.warn(msg)
1928
1929
def get_metadata_lines(self, name):
1930
return yield_lines(self.get_metadata(name))
1931
1932
1933
class PathMetadata(DefaultProvider):
1934
"""Metadata provider for egg directories
1935
1936
Usage::
1937
1938
# Development eggs:
1939
1940
egg_info = "/path/to/PackageName.egg-info"
1941
base_dir = os.path.dirname(egg_info)
1942
metadata = PathMetadata(base_dir, egg_info)
1943
dist_name = os.path.splitext(os.path.basename(egg_info))[0]
1944
dist = Distribution(basedir, project_name=dist_name, metadata=metadata)
1945
1946
# Unpacked egg directories:
1947
1948
egg_path = "/path/to/PackageName-ver-pyver-etc.egg"
1949
metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO'))
1950
dist = Distribution.from_filename(egg_path, metadata=metadata)
1951
"""
1952
1953
def __init__(self, path, egg_info):
1954
self.module_path = path
1955
self.egg_info = egg_info
1956
1957
1958
class EggMetadata(ZipProvider):
1959
"""Metadata provider for .egg files"""
1960
1961
def __init__(self, importer):
1962
"""Create a metadata provider from a zipimporter"""
1963
1964
self.zip_pre = importer.archive + os.sep
1965
self.loader = importer
1966
if importer.prefix:
1967
self.module_path = os.path.join(importer.archive, importer.prefix)
1968
else:
1969
self.module_path = importer.archive
1970
self._setup_prefix()
1971
1972
1973
_declare_state('dict', _distribution_finders={})
1974
1975
1976
def register_finder(importer_type, distribution_finder):
1977
"""Register `distribution_finder` to find distributions in sys.path items
1978
1979
`importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
1980
handler), and `distribution_finder` is a callable that, passed a path
1981
item and the importer instance, yields ``Distribution`` instances found on
1982
that path item. See ``pkg_resources.find_on_path`` for an example."""
1983
_distribution_finders[importer_type] = distribution_finder
1984
1985
1986
def find_distributions(path_item, only=False):
1987
"""Yield distributions accessible via `path_item`"""
1988
importer = get_importer(path_item)
1989
finder = _find_adapter(_distribution_finders, importer)
1990
return finder(importer, path_item, only)
1991
1992
1993
def find_eggs_in_zip(importer, path_item, only=False):
1994
"""
1995
Find eggs in zip files; possibly multiple nested eggs.
1996
"""
1997
if importer.archive.endswith('.whl'):
1998
# wheels are not supported with this finder
1999
# they don't have PKG-INFO metadata, and won't ever contain eggs
2000
return
2001
metadata = EggMetadata(importer)
2002
if metadata.has_metadata('PKG-INFO'):
2003
yield Distribution.from_filename(path_item, metadata=metadata)
2004
if only:
2005
# don't yield nested distros
2006
return
2007
for subitem in metadata.resource_listdir(''):
2008
if _is_egg_path(subitem):
2009
subpath = os.path.join(path_item, subitem)
2010
dists = find_eggs_in_zip(zipimport.zipimporter(subpath), subpath)
2011
for dist in dists:
2012
yield dist
2013
elif subitem.lower().endswith(('.dist-info', '.egg-info')):
2014
subpath = os.path.join(path_item, subitem)
2015
submeta = EggMetadata(zipimport.zipimporter(subpath))
2016
submeta.egg_info = subpath
2017
yield Distribution.from_location(path_item, subitem, submeta)
2018
2019
2020
register_finder(zipimport.zipimporter, find_eggs_in_zip)
2021
2022
2023
def find_nothing(importer, path_item, only=False):
2024
return ()
2025
2026
2027
register_finder(object, find_nothing)
2028
2029
2030
def _by_version_descending(names):
2031
"""
2032
Given a list of filenames, return them in descending order
2033
by version number.
2034
2035
>>> names = 'bar', 'foo', 'Python-2.7.10.egg', 'Python-2.7.2.egg'
2036
>>> _by_version_descending(names)
2037
['Python-2.7.10.egg', 'Python-2.7.2.egg', 'bar', 'foo']
2038
>>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.egg'
2039
>>> _by_version_descending(names)
2040
['Setuptools-1.2.3.egg', 'Setuptools-1.2.3b1.egg']
2041
>>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.post1.egg'
2042
>>> _by_version_descending(names)
2043
['Setuptools-1.2.3.post1.egg', 'Setuptools-1.2.3b1.egg']
2044
"""
2045
def try_parse(name):
2046
"""
2047
Attempt to parse as a version or return a null version.
2048
"""
2049
try:
2050
return packaging.version.Version(name)
2051
except Exception:
2052
return packaging.version.Version('0')
2053
2054
def _by_version(name):
2055
"""
2056
Parse each component of the filename
2057
"""
2058
name, ext = os.path.splitext(name)
2059
parts = itertools.chain(name.split('-'), [ext])
2060
return [try_parse(part) for part in parts]
2061
2062
return sorted(names, key=_by_version, reverse=True)
2063
2064
2065
def find_on_path(importer, path_item, only=False):
2066
"""Yield distributions accessible on a sys.path directory"""
2067
path_item = _normalize_cached(path_item)
2068
2069
if _is_unpacked_egg(path_item):
2070
yield Distribution.from_filename(
2071
path_item, metadata=PathMetadata(
2072
path_item, os.path.join(path_item, 'EGG-INFO')
2073
)
2074
)
2075
return
2076
2077
entries = (
2078
os.path.join(path_item, child)
2079
for child in safe_listdir(path_item)
2080
)
2081
2082
# for performance, before sorting by version,
2083
# screen entries for only those that will yield
2084
# distributions
2085
filtered = (
2086
entry
2087
for entry in entries
2088
if dist_factory(path_item, entry, only)
2089
)
2090
2091
# scan for .egg and .egg-info in directory
2092
path_item_entries = _by_version_descending(filtered)
2093
for entry in path_item_entries:
2094
fullpath = os.path.join(path_item, entry)
2095
factory = dist_factory(path_item, entry, only)
2096
for dist in factory(fullpath):
2097
yield dist
2098
2099
2100
def dist_factory(path_item, entry, only):
2101
"""Return a dist_factory for the given entry."""
2102
lower = entry.lower()
2103
is_egg_info = lower.endswith('.egg-info')
2104
is_dist_info = (
2105
lower.endswith('.dist-info') and
2106
os.path.isdir(os.path.join(path_item, entry))
2107
)
2108
is_meta = is_egg_info or is_dist_info
2109
return (
2110
distributions_from_metadata
2111
if is_meta else
2112
find_distributions
2113
if not only and _is_egg_path(entry) else
2114
resolve_egg_link
2115
if not only and lower.endswith('.egg-link') else
2116
NoDists()
2117
)
2118
2119
2120
class NoDists:
2121
"""
2122
>>> bool(NoDists())
2123
False
2124
2125
>>> list(NoDists()('anything'))
2126
[]
2127
"""
2128
def __bool__(self):
2129
return False
2130
2131
def __call__(self, fullpath):
2132
return iter(())
2133
2134
2135
def safe_listdir(path):
2136
"""
2137
Attempt to list contents of path, but suppress some exceptions.
2138
"""
2139
try:
2140
return os.listdir(path)
2141
except (PermissionError, NotADirectoryError):
2142
pass
2143
except OSError as e:
2144
# Ignore the directory if does not exist, not a directory or
2145
# permission denied
2146
if e.errno not in (errno.ENOTDIR, errno.EACCES, errno.ENOENT):
2147
raise
2148
return ()
2149
2150
2151
def distributions_from_metadata(path):
2152
root = os.path.dirname(path)
2153
if os.path.isdir(path):
2154
if len(os.listdir(path)) == 0:
2155
# empty metadata dir; skip
2156
return
2157
metadata = PathMetadata(root, path)
2158
else:
2159
metadata = FileMetadata(path)
2160
entry = os.path.basename(path)
2161
yield Distribution.from_location(
2162
root, entry, metadata, precedence=DEVELOP_DIST,
2163
)
2164
2165
2166
def non_empty_lines(path):
2167
"""
2168
Yield non-empty lines from file at path
2169
"""
2170
with open(path) as f:
2171
for line in f:
2172
line = line.strip()
2173
if line:
2174
yield line
2175
2176
2177
def resolve_egg_link(path):
2178
"""
2179
Given a path to an .egg-link, resolve distributions
2180
present in the referenced path.
2181
"""
2182
referenced_paths = non_empty_lines(path)
2183
resolved_paths = (
2184
os.path.join(os.path.dirname(path), ref)
2185
for ref in referenced_paths
2186
)
2187
dist_groups = map(find_distributions, resolved_paths)
2188
return next(dist_groups, ())
2189
2190
2191
register_finder(pkgutil.ImpImporter, find_on_path)
2192
2193
if hasattr(importlib_machinery, 'FileFinder'):
2194
register_finder(importlib_machinery.FileFinder, find_on_path)
2195
2196
_declare_state('dict', _namespace_handlers={})
2197
_declare_state('dict', _namespace_packages={})
2198
2199
2200
def register_namespace_handler(importer_type, namespace_handler):
2201
"""Register `namespace_handler` to declare namespace packages
2202
2203
`importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
2204
handler), and `namespace_handler` is a callable like this::
2205
2206
def namespace_handler(importer, path_entry, moduleName, module):
2207
# return a path_entry to use for child packages
2208
2209
Namespace handlers are only called if the importer object has already
2210
agreed that it can handle the relevant path item, and they should only
2211
return a subpath if the module __path__ does not already contain an
2212
equivalent subpath. For an example namespace handler, see
2213
``pkg_resources.file_ns_handler``.
2214
"""
2215
_namespace_handlers[importer_type] = namespace_handler
2216
2217
2218
def _handle_ns(packageName, path_item):
2219
"""Ensure that named package includes a subpath of path_item (if needed)"""
2220
2221
importer = get_importer(path_item)
2222
if importer is None:
2223
return None
2224
2225
# use find_spec (PEP 451) and fall-back to find_module (PEP 302)
2226
try:
2227
spec = importer.find_spec(packageName)
2228
except AttributeError:
2229
# capture warnings due to #1111
2230
with warnings.catch_warnings():
2231
warnings.simplefilter("ignore")
2232
loader = importer.find_module(packageName)
2233
else:
2234
loader = spec.loader if spec else None
2235
2236
if loader is None:
2237
return None
2238
module = sys.modules.get(packageName)
2239
if module is None:
2240
module = sys.modules[packageName] = types.ModuleType(packageName)
2241
module.__path__ = []
2242
_set_parent_ns(packageName)
2243
elif not hasattr(module, '__path__'):
2244
raise TypeError("Not a package:", packageName)
2245
handler = _find_adapter(_namespace_handlers, importer)
2246
subpath = handler(importer, path_item, packageName, module)
2247
if subpath is not None:
2248
path = module.__path__
2249
path.append(subpath)
2250
importlib.import_module(packageName)
2251
_rebuild_mod_path(path, packageName, module)
2252
return subpath
2253
2254
2255
def _rebuild_mod_path(orig_path, package_name, module):
2256
"""
2257
Rebuild module.__path__ ensuring that all entries are ordered
2258
corresponding to their sys.path order
2259
"""
2260
sys_path = [_normalize_cached(p) for p in sys.path]
2261
2262
def safe_sys_path_index(entry):
2263
"""
2264
Workaround for #520 and #513.
2265
"""
2266
try:
2267
return sys_path.index(entry)
2268
except ValueError:
2269
return float('inf')
2270
2271
def position_in_sys_path(path):
2272
"""
2273
Return the ordinal of the path based on its position in sys.path
2274
"""
2275
path_parts = path.split(os.sep)
2276
module_parts = package_name.count('.') + 1
2277
parts = path_parts[:-module_parts]
2278
return safe_sys_path_index(_normalize_cached(os.sep.join(parts)))
2279
2280
new_path = sorted(orig_path, key=position_in_sys_path)
2281
new_path = [_normalize_cached(p) for p in new_path]
2282
2283
if isinstance(module.__path__, list):
2284
module.__path__[:] = new_path
2285
else:
2286
module.__path__ = new_path
2287
2288
2289
def declare_namespace(packageName):
2290
"""Declare that package 'packageName' is a namespace package"""
2291
2292
_imp.acquire_lock()
2293
try:
2294
if packageName in _namespace_packages:
2295
return
2296
2297
path = sys.path
2298
parent, _, _ = packageName.rpartition('.')
2299
2300
if parent:
2301
declare_namespace(parent)
2302
if parent not in _namespace_packages:
2303
__import__(parent)
2304
try:
2305
path = sys.modules[parent].__path__
2306
except AttributeError as e:
2307
raise TypeError("Not a package:", parent) from e
2308
2309
# Track what packages are namespaces, so when new path items are added,
2310
# they can be updated
2311
_namespace_packages.setdefault(parent or None, []).append(packageName)
2312
_namespace_packages.setdefault(packageName, [])
2313
2314
for path_item in path:
2315
# Ensure all the parent's path items are reflected in the child,
2316
# if they apply
2317
_handle_ns(packageName, path_item)
2318
2319
finally:
2320
_imp.release_lock()
2321
2322
2323
def fixup_namespace_packages(path_item, parent=None):
2324
"""Ensure that previously-declared namespace packages include path_item"""
2325
_imp.acquire_lock()
2326
try:
2327
for package in _namespace_packages.get(parent, ()):
2328
subpath = _handle_ns(package, path_item)
2329
if subpath:
2330
fixup_namespace_packages(subpath, package)
2331
finally:
2332
_imp.release_lock()
2333
2334
2335
def file_ns_handler(importer, path_item, packageName, module):
2336
"""Compute an ns-package subpath for a filesystem or zipfile importer"""
2337
2338
subpath = os.path.join(path_item, packageName.split('.')[-1])
2339
normalized = _normalize_cached(subpath)
2340
for item in module.__path__:
2341
if _normalize_cached(item) == normalized:
2342
break
2343
else:
2344
# Only return the path if it's not already there
2345
return subpath
2346
2347
2348
register_namespace_handler(pkgutil.ImpImporter, file_ns_handler)
2349
register_namespace_handler(zipimport.zipimporter, file_ns_handler)
2350
2351
if hasattr(importlib_machinery, 'FileFinder'):
2352
register_namespace_handler(importlib_machinery.FileFinder, file_ns_handler)
2353
2354
2355
def null_ns_handler(importer, path_item, packageName, module):
2356
return None
2357
2358
2359
register_namespace_handler(object, null_ns_handler)
2360
2361
2362
def normalize_path(filename):
2363
"""Normalize a file/dir name for comparison purposes"""
2364
return os.path.normcase(os.path.realpath(os.path.normpath(
2365
_cygwin_patch(filename))))
2366
2367
2368
def _cygwin_patch(filename): # pragma: nocover
2369
"""
2370
Contrary to POSIX 2008, on Cygwin, getcwd (3) contains
2371
symlink components. Using
2372
os.path.abspath() works around this limitation. A fix in os.getcwd()
2373
would probably better, in Cygwin even more so, except
2374
that this seems to be by design...
2375
"""
2376
return os.path.abspath(filename) if sys.platform == 'cygwin' else filename
2377
2378
2379
def _normalize_cached(filename, _cache={}):
2380
try:
2381
return _cache[filename]
2382
except KeyError:
2383
_cache[filename] = result = normalize_path(filename)
2384
return result
2385
2386
2387
def _is_egg_path(path):
2388
"""
2389
Determine if given path appears to be an egg.
2390
"""
2391
return _is_zip_egg(path) or _is_unpacked_egg(path)
2392
2393
2394
def _is_zip_egg(path):
2395
return (
2396
path.lower().endswith('.egg') and
2397
os.path.isfile(path) and
2398
zipfile.is_zipfile(path)
2399
)
2400
2401
2402
def _is_unpacked_egg(path):
2403
"""
2404
Determine if given path appears to be an unpacked egg.
2405
"""
2406
return (
2407
path.lower().endswith('.egg') and
2408
os.path.isfile(os.path.join(path, 'EGG-INFO', 'PKG-INFO'))
2409
)
2410
2411
2412
def _set_parent_ns(packageName):
2413
parts = packageName.split('.')
2414
name = parts.pop()
2415
if parts:
2416
parent = '.'.join(parts)
2417
setattr(sys.modules[parent], name, sys.modules[packageName])
2418
2419
2420
MODULE = re.compile(r"\w+(\.\w+)*$").match
2421
EGG_NAME = re.compile(
2422
r"""
2423
(?P<name>[^-]+) (
2424
-(?P<ver>[^-]+) (
2425
-py(?P<pyver>[^-]+) (
2426
-(?P<plat>.+)
2427
)?
2428
)?
2429
)?
2430
""",
2431
re.VERBOSE | re.IGNORECASE,
2432
).match
2433
2434
2435
class EntryPoint:
2436
"""Object representing an advertised importable object"""
2437
2438
def __init__(self, name, module_name, attrs=(), extras=(), dist=None):
2439
if not MODULE(module_name):
2440
raise ValueError("Invalid module name", module_name)
2441
self.name = name
2442
self.module_name = module_name
2443
self.attrs = tuple(attrs)
2444
self.extras = tuple(extras)
2445
self.dist = dist
2446
2447
def __str__(self):
2448
s = "%s = %s" % (self.name, self.module_name)
2449
if self.attrs:
2450
s += ':' + '.'.join(self.attrs)
2451
if self.extras:
2452
s += ' [%s]' % ','.join(self.extras)
2453
return s
2454
2455
def __repr__(self):
2456
return "EntryPoint.parse(%r)" % str(self)
2457
2458
def load(self, require=True, *args, **kwargs):
2459
"""
2460
Require packages for this EntryPoint, then resolve it.
2461
"""
2462
if not require or args or kwargs:
2463
warnings.warn(
2464
"Parameters to load are deprecated. Call .resolve and "
2465
".require separately.",
2466
PkgResourcesDeprecationWarning,
2467
stacklevel=2,
2468
)
2469
if require:
2470
self.require(*args, **kwargs)
2471
return self.resolve()
2472
2473
def resolve(self):
2474
"""
2475
Resolve the entry point from its module and attrs.
2476
"""
2477
module = __import__(self.module_name, fromlist=['__name__'], level=0)
2478
try:
2479
return functools.reduce(getattr, self.attrs, module)
2480
except AttributeError as exc:
2481
raise ImportError(str(exc)) from exc
2482
2483
def require(self, env=None, installer=None):
2484
if self.extras and not self.dist:
2485
raise UnknownExtra("Can't require() without a distribution", self)
2486
2487
# Get the requirements for this entry point with all its extras and
2488
# then resolve them. We have to pass `extras` along when resolving so
2489
# that the working set knows what extras we want. Otherwise, for
2490
# dist-info distributions, the working set will assume that the
2491
# requirements for that extra are purely optional and skip over them.
2492
reqs = self.dist.requires(self.extras)
2493
items = working_set.resolve(reqs, env, installer, extras=self.extras)
2494
list(map(working_set.add, items))
2495
2496
pattern = re.compile(
2497
r'\s*'
2498
r'(?P<name>.+?)\s*'
2499
r'=\s*'
2500
r'(?P<module>[\w.]+)\s*'
2501
r'(:\s*(?P<attr>[\w.]+))?\s*'
2502
r'(?P<extras>\[.*\])?\s*$'
2503
)
2504
2505
@classmethod
2506
def parse(cls, src, dist=None):
2507
"""Parse a single entry point from string `src`
2508
2509
Entry point syntax follows the form::
2510
2511
name = some.module:some.attr [extra1, extra2]
2512
2513
The entry name and module name are required, but the ``:attrs`` and
2514
``[extras]`` parts are optional
2515
"""
2516
m = cls.pattern.match(src)
2517
if not m:
2518
msg = "EntryPoint must be in 'name=module:attrs [extras]' format"
2519
raise ValueError(msg, src)
2520
res = m.groupdict()
2521
extras = cls._parse_extras(res['extras'])
2522
attrs = res['attr'].split('.') if res['attr'] else ()
2523
return cls(res['name'], res['module'], attrs, extras, dist)
2524
2525
@classmethod
2526
def _parse_extras(cls, extras_spec):
2527
if not extras_spec:
2528
return ()
2529
req = Requirement.parse('x' + extras_spec)
2530
if req.specs:
2531
raise ValueError()
2532
return req.extras
2533
2534
@classmethod
2535
def parse_group(cls, group, lines, dist=None):
2536
"""Parse an entry point group"""
2537
if not MODULE(group):
2538
raise ValueError("Invalid group name", group)
2539
this = {}
2540
for line in yield_lines(lines):
2541
ep = cls.parse(line, dist)
2542
if ep.name in this:
2543
raise ValueError("Duplicate entry point", group, ep.name)
2544
this[ep.name] = ep
2545
return this
2546
2547
@classmethod
2548
def parse_map(cls, data, dist=None):
2549
"""Parse a map of entry point groups"""
2550
if isinstance(data, dict):
2551
data = data.items()
2552
else:
2553
data = split_sections(data)
2554
maps = {}
2555
for group, lines in data:
2556
if group is None:
2557
if not lines:
2558
continue
2559
raise ValueError("Entry points must be listed in groups")
2560
group = group.strip()
2561
if group in maps:
2562
raise ValueError("Duplicate group name", group)
2563
maps[group] = cls.parse_group(group, lines, dist)
2564
return maps
2565
2566
2567
def _version_from_file(lines):
2568
"""
2569
Given an iterable of lines from a Metadata file, return
2570
the value of the Version field, if present, or None otherwise.
2571
"""
2572
def is_version_line(line):
2573
return line.lower().startswith('version:')
2574
version_lines = filter(is_version_line, lines)
2575
line = next(iter(version_lines), '')
2576
_, _, value = line.partition(':')
2577
return safe_version(value.strip()) or None
2578
2579
2580
class Distribution:
2581
"""Wrap an actual or potential sys.path entry w/metadata"""
2582
PKG_INFO = 'PKG-INFO'
2583
2584
def __init__(
2585
self, location=None, metadata=None, project_name=None,
2586
version=None, py_version=PY_MAJOR, platform=None,
2587
precedence=EGG_DIST):
2588
self.project_name = safe_name(project_name or 'Unknown')
2589
if version is not None:
2590
self._version = safe_version(version)
2591
self.py_version = py_version
2592
self.platform = platform
2593
self.location = location
2594
self.precedence = precedence
2595
self._provider = metadata or empty_provider
2596
2597
@classmethod
2598
def from_location(cls, location, basename, metadata=None, **kw):
2599
project_name, version, py_version, platform = [None] * 4
2600
basename, ext = os.path.splitext(basename)
2601
if ext.lower() in _distributionImpl:
2602
cls = _distributionImpl[ext.lower()]
2603
2604
match = EGG_NAME(basename)
2605
if match:
2606
project_name, version, py_version, platform = match.group(
2607
'name', 'ver', 'pyver', 'plat'
2608
)
2609
return cls(
2610
location, metadata, project_name=project_name, version=version,
2611
py_version=py_version, platform=platform, **kw
2612
)._reload_version()
2613
2614
def _reload_version(self):
2615
return self
2616
2617
@property
2618
def hashcmp(self):
2619
return (
2620
self.parsed_version,
2621
self.precedence,
2622
self.key,
2623
self.location,
2624
self.py_version or '',
2625
self.platform or '',
2626
)
2627
2628
def __hash__(self):
2629
return hash(self.hashcmp)
2630
2631
def __lt__(self, other):
2632
return self.hashcmp < other.hashcmp
2633
2634
def __le__(self, other):
2635
return self.hashcmp <= other.hashcmp
2636
2637
def __gt__(self, other):
2638
return self.hashcmp > other.hashcmp
2639
2640
def __ge__(self, other):
2641
return self.hashcmp >= other.hashcmp
2642
2643
def __eq__(self, other):
2644
if not isinstance(other, self.__class__):
2645
# It's not a Distribution, so they are not equal
2646
return False
2647
return self.hashcmp == other.hashcmp
2648
2649
def __ne__(self, other):
2650
return not self == other
2651
2652
# These properties have to be lazy so that we don't have to load any
2653
# metadata until/unless it's actually needed. (i.e., some distributions
2654
# may not know their name or version without loading PKG-INFO)
2655
2656
@property
2657
def key(self):
2658
try:
2659
return self._key
2660
except AttributeError:
2661
self._key = key = self.project_name.lower()
2662
return key
2663
2664
@property
2665
def parsed_version(self):
2666
if not hasattr(self, "_parsed_version"):
2667
self._parsed_version = parse_version(self.version)
2668
2669
return self._parsed_version
2670
2671
def _warn_legacy_version(self):
2672
LV = packaging.version.LegacyVersion
2673
is_legacy = isinstance(self._parsed_version, LV)
2674
if not is_legacy:
2675
return
2676
2677
# While an empty version is technically a legacy version and
2678
# is not a valid PEP 440 version, it's also unlikely to
2679
# actually come from someone and instead it is more likely that
2680
# it comes from setuptools attempting to parse a filename and
2681
# including it in the list. So for that we'll gate this warning
2682
# on if the version is anything at all or not.
2683
if not self.version:
2684
return
2685
2686
tmpl = textwrap.dedent("""
2687
'{project_name} ({version})' is being parsed as a legacy,
2688
non PEP 440,
2689
version. You may find odd behavior and sort order.
2690
In particular it will be sorted as less than 0.0. It
2691
is recommended to migrate to PEP 440 compatible
2692
versions.
2693
""").strip().replace('\n', ' ')
2694
2695
warnings.warn(tmpl.format(**vars(self)), PEP440Warning)
2696
2697
@property
2698
def version(self):
2699
try:
2700
return self._version
2701
except AttributeError as e:
2702
version = self._get_version()
2703
if version is None:
2704
path = self._get_metadata_path_for_display(self.PKG_INFO)
2705
msg = (
2706
"Missing 'Version:' header and/or {} file at path: {}"
2707
).format(self.PKG_INFO, path)
2708
raise ValueError(msg, self) from e
2709
2710
return version
2711
2712
@property
2713
def _dep_map(self):
2714
"""
2715
A map of extra to its list of (direct) requirements
2716
for this distribution, including the null extra.
2717
"""
2718
try:
2719
return self.__dep_map
2720
except AttributeError:
2721
self.__dep_map = self._filter_extras(self._build_dep_map())
2722
return self.__dep_map
2723
2724
@staticmethod
2725
def _filter_extras(dm):
2726
"""
2727
Given a mapping of extras to dependencies, strip off
2728
environment markers and filter out any dependencies
2729
not matching the markers.
2730
"""
2731
for extra in list(filter(None, dm)):
2732
new_extra = extra
2733
reqs = dm.pop(extra)
2734
new_extra, _, marker = extra.partition(':')
2735
fails_marker = marker and (
2736
invalid_marker(marker)
2737
or not evaluate_marker(marker)
2738
)
2739
if fails_marker:
2740
reqs = []
2741
new_extra = safe_extra(new_extra) or None
2742
2743
dm.setdefault(new_extra, []).extend(reqs)
2744
return dm
2745
2746
def _build_dep_map(self):
2747
dm = {}
2748
for name in 'requires.txt', 'depends.txt':
2749
for extra, reqs in split_sections(self._get_metadata(name)):
2750
dm.setdefault(extra, []).extend(parse_requirements(reqs))
2751
return dm
2752
2753
def requires(self, extras=()):
2754
"""List of Requirements needed for this distro if `extras` are used"""
2755
dm = self._dep_map
2756
deps = []
2757
deps.extend(dm.get(None, ()))
2758
for ext in extras:
2759
try:
2760
deps.extend(dm[safe_extra(ext)])
2761
except KeyError as e:
2762
raise UnknownExtra(
2763
"%s has no such extra feature %r" % (self, ext)
2764
) from e
2765
return deps
2766
2767
def _get_metadata_path_for_display(self, name):
2768
"""
2769
Return the path to the given metadata file, if available.
2770
"""
2771
try:
2772
# We need to access _get_metadata_path() on the provider object
2773
# directly rather than through this class's __getattr__()
2774
# since _get_metadata_path() is marked private.
2775
path = self._provider._get_metadata_path(name)
2776
2777
# Handle exceptions e.g. in case the distribution's metadata
2778
# provider doesn't support _get_metadata_path().
2779
except Exception:
2780
return '[could not detect]'
2781
2782
return path
2783
2784
def _get_metadata(self, name):
2785
if self.has_metadata(name):
2786
for line in self.get_metadata_lines(name):
2787
yield line
2788
2789
def _get_version(self):
2790
lines = self._get_metadata(self.PKG_INFO)
2791
version = _version_from_file(lines)
2792
2793
return version
2794
2795
def activate(self, path=None, replace=False):
2796
"""Ensure distribution is importable on `path` (default=sys.path)"""
2797
if path is None:
2798
path = sys.path
2799
self.insert_on(path, replace=replace)
2800
if path is sys.path:
2801
fixup_namespace_packages(self.location)
2802
for pkg in self._get_metadata('namespace_packages.txt'):
2803
if pkg in sys.modules:
2804
declare_namespace(pkg)
2805
2806
def egg_name(self):
2807
"""Return what this distribution's standard .egg filename should be"""
2808
filename = "%s-%s-py%s" % (
2809
to_filename(self.project_name), to_filename(self.version),
2810
self.py_version or PY_MAJOR
2811
)
2812
2813
if self.platform:
2814
filename += '-' + self.platform
2815
return filename
2816
2817
def __repr__(self):
2818
if self.location:
2819
return "%s (%s)" % (self, self.location)
2820
else:
2821
return str(self)
2822
2823
def __str__(self):
2824
try:
2825
version = getattr(self, 'version', None)
2826
except ValueError:
2827
version = None
2828
version = version or "[unknown version]"
2829
return "%s %s" % (self.project_name, version)
2830
2831
def __getattr__(self, attr):
2832
"""Delegate all unrecognized public attributes to .metadata provider"""
2833
if attr.startswith('_'):
2834
raise AttributeError(attr)
2835
return getattr(self._provider, attr)
2836
2837
def __dir__(self):
2838
return list(
2839
set(super(Distribution, self).__dir__())
2840
| set(
2841
attr for attr in self._provider.__dir__()
2842
if not attr.startswith('_')
2843
)
2844
)
2845
2846
@classmethod
2847
def from_filename(cls, filename, metadata=None, **kw):
2848
return cls.from_location(
2849
_normalize_cached(filename), os.path.basename(filename), metadata,
2850
**kw
2851
)
2852
2853
def as_requirement(self):
2854
"""Return a ``Requirement`` that matches this distribution exactly"""
2855
if isinstance(self.parsed_version, packaging.version.Version):
2856
spec = "%s==%s" % (self.project_name, self.parsed_version)
2857
else:
2858
spec = "%s===%s" % (self.project_name, self.parsed_version)
2859
2860
return Requirement.parse(spec)
2861
2862
def load_entry_point(self, group, name):
2863
"""Return the `name` entry point of `group` or raise ImportError"""
2864
ep = self.get_entry_info(group, name)
2865
if ep is None:
2866
raise ImportError("Entry point %r not found" % ((group, name),))
2867
return ep.load()
2868
2869
def get_entry_map(self, group=None):
2870
"""Return the entry point map for `group`, or the full entry map"""
2871
try:
2872
ep_map = self._ep_map
2873
except AttributeError:
2874
ep_map = self._ep_map = EntryPoint.parse_map(
2875
self._get_metadata('entry_points.txt'), self
2876
)
2877
if group is not None:
2878
return ep_map.get(group, {})
2879
return ep_map
2880
2881
def get_entry_info(self, group, name):
2882
"""Return the EntryPoint object for `group`+`name`, or ``None``"""
2883
return self.get_entry_map(group).get(name)
2884
2885
# FIXME: 'Distribution.insert_on' is too complex (13)
2886
def insert_on(self, path, loc=None, replace=False): # noqa: C901
2887
"""Ensure self.location is on path
2888
2889
If replace=False (default):
2890
- If location is already in path anywhere, do nothing.
2891
- Else:
2892
- If it's an egg and its parent directory is on path,
2893
insert just ahead of the parent.
2894
- Else: add to the end of path.
2895
If replace=True:
2896
- If location is already on path anywhere (not eggs)
2897
or higher priority than its parent (eggs)
2898
do nothing.
2899
- Else:
2900
- If it's an egg and its parent directory is on path,
2901
insert just ahead of the parent,
2902
removing any lower-priority entries.
2903
- Else: add it to the front of path.
2904
"""
2905
2906
loc = loc or self.location
2907
if not loc:
2908
return
2909
2910
nloc = _normalize_cached(loc)
2911
bdir = os.path.dirname(nloc)
2912
npath = [(p and _normalize_cached(p) or p) for p in path]
2913
2914
for p, item in enumerate(npath):
2915
if item == nloc:
2916
if replace:
2917
break
2918
else:
2919
# don't modify path (even removing duplicates) if
2920
# found and not replace
2921
return
2922
elif item == bdir and self.precedence == EGG_DIST:
2923
# if it's an .egg, give it precedence over its directory
2924
# UNLESS it's already been added to sys.path and replace=False
2925
if (not replace) and nloc in npath[p:]:
2926
return
2927
if path is sys.path:
2928
self.check_version_conflict()
2929
path.insert(p, loc)
2930
npath.insert(p, nloc)
2931
break
2932
else:
2933
if path is sys.path:
2934
self.check_version_conflict()
2935
if replace:
2936
path.insert(0, loc)
2937
else:
2938
path.append(loc)
2939
return
2940
2941
# p is the spot where we found or inserted loc; now remove duplicates
2942
while True:
2943
try:
2944
np = npath.index(nloc, p + 1)
2945
except ValueError:
2946
break
2947
else:
2948
del npath[np], path[np]
2949
# ha!
2950
p = np
2951
2952
return
2953
2954
def check_version_conflict(self):
2955
if self.key == 'setuptools':
2956
# ignore the inevitable setuptools self-conflicts :(
2957
return
2958
2959
nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt'))
2960
loc = normalize_path(self.location)
2961
for modname in self._get_metadata('top_level.txt'):
2962
if (modname not in sys.modules or modname in nsp
2963
or modname in _namespace_packages):
2964
continue
2965
if modname in ('pkg_resources', 'setuptools', 'site'):
2966
continue
2967
fn = getattr(sys.modules[modname], '__file__', None)
2968
if fn and (normalize_path(fn).startswith(loc) or
2969
fn.startswith(self.location)):
2970
continue
2971
issue_warning(
2972
"Module %s was already imported from %s, but %s is being added"
2973
" to sys.path" % (modname, fn, self.location),
2974
)
2975
2976
def has_version(self):
2977
try:
2978
self.version
2979
except ValueError:
2980
issue_warning("Unbuilt egg for " + repr(self))
2981
return False
2982
return True
2983
2984
def clone(self, **kw):
2985
"""Copy this distribution, substituting in any changed keyword args"""
2986
names = 'project_name version py_version platform location precedence'
2987
for attr in names.split():
2988
kw.setdefault(attr, getattr(self, attr, None))
2989
kw.setdefault('metadata', self._provider)
2990
return self.__class__(**kw)
2991
2992
@property
2993
def extras(self):
2994
return [dep for dep in self._dep_map if dep]
2995
2996
2997
class EggInfoDistribution(Distribution):
2998
def _reload_version(self):
2999
"""
3000
Packages installed by distutils (e.g. numpy or scipy),
3001
which uses an old safe_version, and so
3002
their version numbers can get mangled when
3003
converted to filenames (e.g., 1.11.0.dev0+2329eae to
3004
1.11.0.dev0_2329eae). These distributions will not be
3005
parsed properly
3006
downstream by Distribution and safe_version, so
3007
take an extra step and try to get the version number from
3008
the metadata file itself instead of the filename.
3009
"""
3010
md_version = self._get_version()
3011
if md_version:
3012
self._version = md_version
3013
return self
3014
3015
3016
class DistInfoDistribution(Distribution):
3017
"""
3018
Wrap an actual or potential sys.path entry
3019
w/metadata, .dist-info style.
3020
"""
3021
PKG_INFO = 'METADATA'
3022
EQEQ = re.compile(r"([\(,])\s*(\d.*?)\s*([,\)])")
3023
3024
@property
3025
def _parsed_pkg_info(self):
3026
"""Parse and cache metadata"""
3027
try:
3028
return self._pkg_info
3029
except AttributeError:
3030
metadata = self.get_metadata(self.PKG_INFO)
3031
self._pkg_info = email.parser.Parser().parsestr(metadata)
3032
return self._pkg_info
3033
3034
@property
3035
def _dep_map(self):
3036
try:
3037
return self.__dep_map
3038
except AttributeError:
3039
self.__dep_map = self._compute_dependencies()
3040
return self.__dep_map
3041
3042
def _compute_dependencies(self):
3043
"""Recompute this distribution's dependencies."""
3044
dm = self.__dep_map = {None: []}
3045
3046
reqs = []
3047
# Including any condition expressions
3048
for req in self._parsed_pkg_info.get_all('Requires-Dist') or []:
3049
reqs.extend(parse_requirements(req))
3050
3051
def reqs_for_extra(extra):
3052
for req in reqs:
3053
if not req.marker or req.marker.evaluate({'extra': extra}):
3054
yield req
3055
3056
common = types.MappingProxyType(dict.fromkeys(reqs_for_extra(None)))
3057
dm[None].extend(common)
3058
3059
for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []:
3060
s_extra = safe_extra(extra.strip())
3061
dm[s_extra] = [r for r in reqs_for_extra(extra) if r not in common]
3062
3063
return dm
3064
3065
3066
_distributionImpl = {
3067
'.egg': Distribution,
3068
'.egg-info': EggInfoDistribution,
3069
'.dist-info': DistInfoDistribution,
3070
}
3071
3072
3073
def issue_warning(*args, **kw):
3074
level = 1
3075
g = globals()
3076
try:
3077
# find the first stack frame that is *not* code in
3078
# the pkg_resources module, to use for the warning
3079
while sys._getframe(level).f_globals is g:
3080
level += 1
3081
except ValueError:
3082
pass
3083
warnings.warn(stacklevel=level + 1, *args, **kw)
3084
3085
3086
def parse_requirements(strs):
3087
"""
3088
Yield ``Requirement`` objects for each specification in `strs`.
3089
3090
`strs` must be a string, or a (possibly-nested) iterable thereof.
3091
"""
3092
return map(Requirement, join_continuation(map(drop_comment, yield_lines(strs))))
3093
3094
3095
class RequirementParseError(packaging.requirements.InvalidRequirement):
3096
"Compatibility wrapper for InvalidRequirement"
3097
3098
3099
class Requirement(packaging.requirements.Requirement):
3100
def __init__(self, requirement_string):
3101
"""DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!"""
3102
super(Requirement, self).__init__(requirement_string)
3103
self.unsafe_name = self.name
3104
project_name = safe_name(self.name)
3105
self.project_name, self.key = project_name, project_name.lower()
3106
self.specs = [
3107
(spec.operator, spec.version) for spec in self.specifier]
3108
self.extras = tuple(map(safe_extra, self.extras))
3109
self.hashCmp = (
3110
self.key,
3111
self.url,
3112
self.specifier,
3113
frozenset(self.extras),
3114
str(self.marker) if self.marker else None,
3115
)
3116
self.__hash = hash(self.hashCmp)
3117
3118
def __eq__(self, other):
3119
return (
3120
isinstance(other, Requirement) and
3121
self.hashCmp == other.hashCmp
3122
)
3123
3124
def __ne__(self, other):
3125
return not self == other
3126
3127
def __contains__(self, item):
3128
if isinstance(item, Distribution):
3129
if item.key != self.key:
3130
return False
3131
3132
item = item.version
3133
3134
# Allow prereleases always in order to match the previous behavior of
3135
# this method. In the future this should be smarter and follow PEP 440
3136
# more accurately.
3137
return self.specifier.contains(item, prereleases=True)
3138
3139
def __hash__(self):
3140
return self.__hash
3141
3142
def __repr__(self):
3143
return "Requirement.parse(%r)" % str(self)
3144
3145
@staticmethod
3146
def parse(s):
3147
req, = parse_requirements(s)
3148
return req
3149
3150
3151
def _always_object(classes):
3152
"""
3153
Ensure object appears in the mro even
3154
for old-style classes.
3155
"""
3156
if object not in classes:
3157
return classes + (object,)
3158
return classes
3159
3160
3161
def _find_adapter(registry, ob):
3162
"""Return an adapter factory for `ob` from `registry`"""
3163
types = _always_object(inspect.getmro(getattr(ob, '__class__', type(ob))))
3164
for t in types:
3165
if t in registry:
3166
return registry[t]
3167
3168
3169
def ensure_directory(path):
3170
"""Ensure that the parent directory of `path` exists"""
3171
dirname = os.path.dirname(path)
3172
os.makedirs(dirname, exist_ok=True)
3173
3174
3175
def _bypass_ensure_directory(path):
3176
"""Sandbox-bypassing version of ensure_directory()"""
3177
if not WRITE_SUPPORT:
3178
raise IOError('"os.mkdir" not supported on this platform.')
3179
dirname, filename = split(path)
3180
if dirname and filename and not isdir(dirname):
3181
_bypass_ensure_directory(dirname)
3182
try:
3183
mkdir(dirname, 0o755)
3184
except FileExistsError:
3185
pass
3186
3187
3188
def split_sections(s):
3189
"""Split a string or iterable thereof into (section, content) pairs
3190
3191
Each ``section`` is a stripped version of the section header ("[section]")
3192
and each ``content`` is a list of stripped lines excluding blank lines and
3193
comment-only lines. If there are any such lines before the first section
3194
header, they're returned in a first ``section`` of ``None``.
3195
"""
3196
section = None
3197
content = []
3198
for line in yield_lines(s):
3199
if line.startswith("["):
3200
if line.endswith("]"):
3201
if section or content:
3202
yield section, content
3203
section = line[1:-1].strip()
3204
content = []
3205
else:
3206
raise ValueError("Invalid section heading", line)
3207
else:
3208
content.append(line)
3209
3210
# wrap up last segment
3211
yield section, content
3212
3213
3214
def _mkstemp(*args, **kw):
3215
old_open = os.open
3216
try:
3217
# temporarily bypass sandboxing
3218
os.open = os_open
3219
return tempfile.mkstemp(*args, **kw)
3220
finally:
3221
# and then put it back
3222
os.open = old_open
3223
3224
3225
# Silence the PEP440Warning by default, so that end users don't get hit by it
3226
# randomly just because they use pkg_resources. We want to append the rule
3227
# because we want earlier uses of filterwarnings to take precedence over this
3228
# one.
3229
warnings.filterwarnings("ignore", category=PEP440Warning, append=True)
3230
3231
3232
# from jaraco.functools 1.3
3233
def _call_aside(f, *args, **kwargs):
3234
f(*args, **kwargs)
3235
return f
3236
3237
3238
@_call_aside
3239
def _initialize(g=globals()):
3240
"Set up global resource manager (deliberately not state-saved)"
3241
manager = ResourceManager()
3242
g['_manager'] = manager
3243
g.update(
3244
(name, getattr(manager, name))
3245
for name in dir(manager)
3246
if not name.startswith('_')
3247
)
3248
3249
3250
class PkgResourcesDeprecationWarning(Warning):
3251
"""
3252
Base class for warning about deprecations in ``pkg_resources``
3253
3254
This class is not derived from ``DeprecationWarning``, and as such is
3255
visible by default.
3256
"""
3257
3258
3259
@_call_aside
3260
def _initialize_master_working_set():
3261
"""
3262
Prepare the master working set and make the ``require()``
3263
API available.
3264
3265
This function has explicit effects on the global state
3266
of pkg_resources. It is intended to be invoked once at
3267
the initialization of this module.
3268
3269
Invocation by other packages is unsupported and done
3270
at their own risk.
3271
"""
3272
working_set = WorkingSet._build_master()
3273
_declare_state('object', working_set=working_set)
3274
3275
require = working_set.require
3276
iter_entry_points = working_set.iter_entry_points
3277
add_activation_listener = working_set.subscribe
3278
run_script = working_set.run_script
3279
# backward compatibility
3280
run_main = run_script
3281
# Activate all distributions already on sys.path with replace=False and
3282
# ensure that all distributions added to the working set in the future
3283
# (e.g. by calling ``require()``) will get activated as well,
3284
# with higher priority (replace=True).
3285
tuple(
3286
dist.activate(replace=False)
3287
for dist in working_set
3288
)
3289
add_activation_listener(
3290
lambda dist: dist.activate(replace=True),
3291
existing=False,
3292
)
3293
working_set.entries = []
3294
# match order
3295
list(map(working_set.add_entry, sys.path))
3296
globals().update(locals())
3297
3298