1# coding: utf-8
2"""
3Package resource API
4--------------------
5
6A resource is a logical file contained within a package, or a logical
7subdirectory thereof.  The package resource API expects resource names
8to have their path parts separated with ``/``, *not* whatever the local
9path separator is.  Do not use os.path operations to manipulate resource
10names being passed into the API.
11
12The package resource API is designed to work with normal filesystem packages,
13.egg files, and unpacked .egg files.  It can also work in a limited way with
14.zip files and with custom PEP 302 loaders that support the ``get_data()``
15method.
16"""
17
18from __future__ import absolute_import
19
20import sys
21import os
22import io
23import time
24import re
25import types
26import zipfile
27import zipimport
28import warnings
29import stat
30import functools
31import pkgutil
32import operator
33import platform
34import collections
35import plistlib
36import email.parser
37import errno
38import tempfile
39import textwrap
40import itertools
41import inspect
42from pkgutil import get_importer
43
44try:
45    import _imp
46except ImportError:
47    # Python 3.2 compatibility
48    import imp as _imp
49
50from pkg_resources.extern import six
51from pkg_resources.extern.six.moves import urllib, map, filter
52
53# capture these to bypass sandboxing
54from os import utime
55try:
56    from os import mkdir, rename, unlink
57    WRITE_SUPPORT = True
58except ImportError:
59    # no write support, probably under GAE
60    WRITE_SUPPORT = False
61
62from os import open as os_open
63from os.path import isdir, split
64
65try:
66    import importlib.machinery as importlib_machinery
67    # access attribute to force import under delayed import mechanisms.
68    importlib_machinery.__name__
69except ImportError:
70    importlib_machinery = None
71
72from . import py31compat
73from pkg_resources.extern import appdirs
74from pkg_resources.extern import packaging
75__import__('pkg_resources.extern.packaging.version')
76__import__('pkg_resources.extern.packaging.specifiers')
77__import__('pkg_resources.extern.packaging.requirements')
78__import__('pkg_resources.extern.packaging.markers')
79
80
81if (3, 0) < sys.version_info < (3, 3):
82    raise RuntimeError("Python 3.3 or later is required")
83
84if six.PY2:
85    # Those builtin exceptions are only defined in Python 3
86    PermissionError = None
87    NotADirectoryError = None
88
89# declare some globals that will be defined later to
90# satisfy the linters.
91require = None
92working_set = None
93add_activation_listener = None
94resources_stream = None
95cleanup_resources = None
96resource_dir = None
97resource_stream = None
98set_extraction_path = None
99resource_isdir = None
100resource_string = None
101iter_entry_points = None
102resource_listdir = None
103resource_filename = None
104resource_exists = None
105_distribution_finders = None
106_namespace_handlers = None
107_namespace_packages = None
108
109
110class PEP440Warning(RuntimeWarning):
111    """
112    Used when there is an issue with a version or specifier not complying with
113    PEP 440.
114    """
115
116
117def parse_version(v):
118    try:
119        return packaging.version.Version(v)
120    except packaging.version.InvalidVersion:
121        return packaging.version.LegacyVersion(v)
122
123
124_state_vars = {}
125
126
127def _declare_state(vartype, **kw):
128    globals().update(kw)
129    _state_vars.update(dict.fromkeys(kw, vartype))
130
131
132def __getstate__():
133    state = {}
134    g = globals()
135    for k, v in _state_vars.items():
136        state[k] = g['_sget_' + v](g[k])
137    return state
138
139
140def __setstate__(state):
141    g = globals()
142    for k, v in state.items():
143        g['_sset_' + _state_vars[k]](k, g[k], v)
144    return state
145
146
147def _sget_dict(val):
148    return val.copy()
149
150
151def _sset_dict(key, ob, state):
152    ob.clear()
153    ob.update(state)
154
155
156def _sget_object(val):
157    return val.__getstate__()
158
159
160def _sset_object(key, ob, state):
161    ob.__setstate__(state)
162
163
164_sget_none = _sset_none = lambda *args: None
165
166
167def get_supported_platform():
168    """Return this platform's maximum compatible version.
169
170    distutils.util.get_platform() normally reports the minimum version
171    of Mac OS X that would be required to *use* extensions produced by
172    distutils.  But what we want when checking compatibility is to know the
173    version of Mac OS X that we are *running*.  To allow usage of packages that
174    explicitly require a newer version of Mac OS X, we must also know the
175    current version of the OS.
176
177    If this condition occurs for any other platform with a version in its
178    platform strings, this function should be extended accordingly.
179    """
180    plat = get_build_platform()
181    m = macosVersionString.match(plat)
182    if m is not None and sys.platform == "darwin":
183        try:
184            plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3))
185        except ValueError:
186            # not Mac OS X
187            pass
188    return plat
189
190
191__all__ = [
192    # Basic resource access and distribution/entry point discovery
193    'require', 'run_script', 'get_provider', 'get_distribution',
194    'load_entry_point', 'get_entry_map', 'get_entry_info',
195    'iter_entry_points',
196    'resource_string', 'resource_stream', 'resource_filename',
197    'resource_listdir', 'resource_exists', 'resource_isdir',
198
199    # Environmental control
200    'declare_namespace', 'working_set', 'add_activation_listener',
201    'find_distributions', 'set_extraction_path', 'cleanup_resources',
202    'get_default_cache',
203
204    # Primary implementation classes
205    'Environment', 'WorkingSet', 'ResourceManager',
206    'Distribution', 'Requirement', 'EntryPoint',
207
208    # Exceptions
209    'ResolutionError', 'VersionConflict', 'DistributionNotFound',
210    'UnknownExtra', 'ExtractionError',
211
212    # Warnings
213    'PEP440Warning',
214
215    # Parsing functions and string utilities
216    'parse_requirements', 'parse_version', 'safe_name', 'safe_version',
217    'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections',
218    'safe_extra', 'to_filename', 'invalid_marker', 'evaluate_marker',
219
220    # filesystem utilities
221    'ensure_directory', 'normalize_path',
222
223    # Distribution "precedence" constants
224    'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST',
225
226    # "Provider" interfaces, implementations, and registration/lookup APIs
227    'IMetadataProvider', 'IResourceProvider', 'FileMetadata',
228    'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider',
229    'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider',
230    'register_finder', 'register_namespace_handler', 'register_loader_type',
231    'fixup_namespace_packages', 'get_importer',
232
233    # Deprecated/backward compatibility only
234    'run_main', 'AvailableDistributions',
235]
236
237
238class ResolutionError(Exception):
239    """Abstract base for dependency resolution errors"""
240
241    def __repr__(self):
242        return self.__class__.__name__ + repr(self.args)
243
244
245class VersionConflict(ResolutionError):
246    """
247    An already-installed version conflicts with the requested version.
248
249    Should be initialized with the installed Distribution and the requested
250    Requirement.
251    """
252
253    _template = "{self.dist} is installed but {self.req} is required"
254
255    @property
256    def dist(self):
257        return self.args[0]
258
259    @property
260    def req(self):
261        return self.args[1]
262
263    def report(self):
264        return self._template.format(**locals())
265
266    def with_context(self, required_by):
267        """
268        If required_by is non-empty, return a version of self that is a
269        ContextualVersionConflict.
270        """
271        if not required_by:
272            return self
273        args = self.args + (required_by,)
274        return ContextualVersionConflict(*args)
275
276
277class ContextualVersionConflict(VersionConflict):
278    """
279    A VersionConflict that accepts a third parameter, the set of the
280    requirements that required the installed Distribution.
281    """
282
283    _template = VersionConflict._template + ' by {self.required_by}'
284
285    @property
286    def required_by(self):
287        return self.args[2]
288
289
290class DistributionNotFound(ResolutionError):
291    """A requested distribution was not found"""
292
293    _template = ("The '{self.req}' distribution was not found "
294                 "and is required by {self.requirers_str}")
295
296    @property
297    def req(self):
298        return self.args[0]
299
300    @property
301    def requirers(self):
302        return self.args[1]
303
304    @property
305    def requirers_str(self):
306        if not self.requirers:
307            return 'the application'
308        return ', '.join(self.requirers)
309
310    def report(self):
311        return self._template.format(**locals())
312
313    def __str__(self):
314        return self.report()
315
316
317class UnknownExtra(ResolutionError):
318    """Distribution doesn't have an "extra feature" of the given name"""
319
320
321_provider_factories = {}
322
323PY_MAJOR = sys.version[:3]
324EGG_DIST = 3
325BINARY_DIST = 2
326SOURCE_DIST = 1
327CHECKOUT_DIST = 0
328DEVELOP_DIST = -1
329
330
331def register_loader_type(loader_type, provider_factory):
332    """Register `provider_factory` to make providers for `loader_type`
333
334    `loader_type` is the type or class of a PEP 302 ``module.__loader__``,
335    and `provider_factory` is a function that, passed a *module* object,
336    returns an ``IResourceProvider`` for that module.
337    """
338    _provider_factories[loader_type] = provider_factory
339
340
341def get_provider(moduleOrReq):
342    """Return an IResourceProvider for the named module or requirement"""
343    if isinstance(moduleOrReq, Requirement):
344        return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0]
345    try:
346        module = sys.modules[moduleOrReq]
347    except KeyError:
348        __import__(moduleOrReq)
349        module = sys.modules[moduleOrReq]
350    loader = getattr(module, '__loader__', None)
351    return _find_adapter(_provider_factories, loader)(module)
352
353
354def _macosx_vers(_cache=[]):
355    if not _cache:
356        version = platform.mac_ver()[0]
357        # fallback for MacPorts
358        if version == '':
359            plist = '/System/Library/CoreServices/SystemVersion.plist'
360            if os.path.exists(plist):
361                if hasattr(plistlib, 'readPlist'):
362                    plist_content = plistlib.readPlist(plist)
363                    if 'ProductVersion' in plist_content:
364                        version = plist_content['ProductVersion']
365
366        _cache.append(version.split('.'))
367    return _cache[0]
368
369
370def _macosx_arch(machine):
371    return {'PowerPC': 'ppc', 'Power_Macintosh': 'ppc'}.get(machine, machine)
372
373
374def get_build_platform():
375    """Return this platform's string for platform-specific distributions
376
377    XXX Currently this is the same as ``distutils.util.get_platform()``, but it
378    needs some hacks for Linux and Mac OS X.
379    """
380    try:
381        # Python 2.7 or >=3.2
382        from sysconfig import get_platform
383    except ImportError:
384        from distutils.util import get_platform
385
386    plat = get_platform()
387    if sys.platform == "darwin" and not plat.startswith('macosx-'):
388        try:
389            version = _macosx_vers()
390            machine = os.uname()[4].replace(" ", "_")
391            return "macosx-%d.%d-%s" % (
392                int(version[0]), int(version[1]),
393                _macosx_arch(machine),
394            )
395        except ValueError:
396            # if someone is running a non-Mac darwin system, this will fall
397            # through to the default implementation
398            pass
399    return plat
400
401
402macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)")
403darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)")
404# XXX backward compat
405get_platform = get_build_platform
406
407
408def compatible_platforms(provided, required):
409    """Can code for the `provided` platform run on the `required` platform?
410
411    Returns true if either platform is ``None``, or the platforms are equal.
412
413    XXX Needs compatibility checks for Linux and other unixy OSes.
414    """
415    if provided is None or required is None or provided == required:
416        # easy case
417        return True
418
419    # Mac OS X special cases
420    reqMac = macosVersionString.match(required)
421    if reqMac:
422        provMac = macosVersionString.match(provided)
423
424        # is this a Mac package?
425        if not provMac:
426            # this is backwards compatibility for packages built before
427            # setuptools 0.6. All packages built after this point will
428            # use the new macosx designation.
429            provDarwin = darwinVersionString.match(provided)
430            if provDarwin:
431                dversion = int(provDarwin.group(1))
432                macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2))
433                if dversion == 7 and macosversion >= "10.3" or \
434                        dversion == 8 and macosversion >= "10.4":
435                    return True
436            # egg isn't macosx or legacy darwin
437            return False
438
439        # are they the same major version and machine type?
440        if provMac.group(1) != reqMac.group(1) or \
441                provMac.group(3) != reqMac.group(3):
442            return False
443
444        # is the required OS major update >= the provided one?
445        if int(provMac.group(2)) > int(reqMac.group(2)):
446            return False
447
448        return True
449
450    # XXX Linux and other platforms' special cases should go here
451    return False
452
453
454def run_script(dist_spec, script_name):
455    """Locate distribution `dist_spec` and run its `script_name` script"""
456    ns = sys._getframe(1).f_globals
457    name = ns['__name__']
458    ns.clear()
459    ns['__name__'] = name
460    require(dist_spec)[0].run_script(script_name, ns)
461
462
463# backward compatibility
464run_main = run_script
465
466
467def get_distribution(dist):
468    """Return a current distribution object for a Requirement or string"""
469    if isinstance(dist, six.string_types):
470        dist = Requirement.parse(dist)
471    if isinstance(dist, Requirement):
472        dist = get_provider(dist)
473    if not isinstance(dist, Distribution):
474        raise TypeError("Expected string, Requirement, or Distribution", dist)
475    return dist
476
477
478def load_entry_point(dist, group, name):
479    """Return `name` entry point of `group` for `dist` or raise ImportError"""
480    return get_distribution(dist).load_entry_point(group, name)
481
482
483def get_entry_map(dist, group=None):
484    """Return the entry point map for `group`, or the full entry map"""
485    return get_distribution(dist).get_entry_map(group)
486
487
488def get_entry_info(dist, group, name):
489    """Return the EntryPoint object for `group`+`name`, or ``None``"""
490    return get_distribution(dist).get_entry_info(group, name)
491
492
493class IMetadataProvider:
494    def has_metadata(name):
495        """Does the package's distribution contain the named metadata?"""
496
497    def get_metadata(name):
498        """The named metadata resource as a string"""
499
500    def get_metadata_lines(name):
501        """Yield named metadata resource as list of non-blank non-comment lines
502
503       Leading and trailing whitespace is stripped from each line, and lines
504       with ``#`` as the first non-blank character are omitted."""
505
506    def metadata_isdir(name):
507        """Is the named metadata a directory?  (like ``os.path.isdir()``)"""
508
509    def metadata_listdir(name):
510        """List of metadata names in the directory (like ``os.listdir()``)"""
511
512    def run_script(script_name, namespace):
513        """Execute the named script in the supplied namespace dictionary"""
514
515
516class IResourceProvider(IMetadataProvider):
517    """An object that provides access to package resources"""
518
519    def get_resource_filename(manager, resource_name):
520        """Return a true filesystem path for `resource_name`
521
522        `manager` must be an ``IResourceManager``"""
523
524    def get_resource_stream(manager, resource_name):
525        """Return a readable file-like object for `resource_name`
526
527        `manager` must be an ``IResourceManager``"""
528
529    def get_resource_string(manager, resource_name):
530        """Return a string containing the contents of `resource_name`
531
532        `manager` must be an ``IResourceManager``"""
533
534    def has_resource(resource_name):
535        """Does the package contain the named resource?"""
536
537    def resource_isdir(resource_name):
538        """Is the named resource a directory?  (like ``os.path.isdir()``)"""
539
540    def resource_listdir(resource_name):
541        """List of resource names in the directory (like ``os.listdir()``)"""
542
543
544class WorkingSet(object):
545    """A collection of active distributions on sys.path (or a similar list)"""
546
547    def __init__(self, entries=None):
548        """Create working set from list of path entries (default=sys.path)"""
549        self.entries = []
550        self.entry_keys = {}
551        self.by_key = {}
552        self.callbacks = []
553
554        if entries is None:
555            entries = sys.path
556
557        for entry in entries:
558            self.add_entry(entry)
559
560    @classmethod
561    def _build_master(cls):
562        """
563        Prepare the master working set.
564        """
565        ws = cls()
566        try:
567            from __main__ import __requires__
568        except ImportError:
569            # The main program does not list any requirements
570            return ws
571
572        # ensure the requirements are met
573        try:
574            ws.require(__requires__)
575        except VersionConflict:
576            return cls._build_from_requirements(__requires__)
577
578        return ws
579
580    @classmethod
581    def _build_from_requirements(cls, req_spec):
582        """
583        Build a working set from a requirement spec. Rewrites sys.path.
584        """
585        # try it without defaults already on sys.path
586        # by starting with an empty path
587        ws = cls([])
588        reqs = parse_requirements(req_spec)
589        dists = ws.resolve(reqs, Environment())
590        for dist in dists:
591            ws.add(dist)
592
593        # add any missing entries from sys.path
594        for entry in sys.path:
595            if entry not in ws.entries:
596                ws.add_entry(entry)
597
598        # then copy back to sys.path
599        sys.path[:] = ws.entries
600        return ws
601
602    def add_entry(self, entry):
603        """Add a path item to ``.entries``, finding any distributions on it
604
605        ``find_distributions(entry, True)`` is used to find distributions
606        corresponding to the path entry, and they are added.  `entry` is
607        always appended to ``.entries``, even if it is already present.
608        (This is because ``sys.path`` can contain the same value more than
609        once, and the ``.entries`` of the ``sys.path`` WorkingSet should always
610        equal ``sys.path``.)
611        """
612        self.entry_keys.setdefault(entry, [])
613        self.entries.append(entry)
614        for dist in find_distributions(entry, True):
615            self.add(dist, entry, False)
616
617    def __contains__(self, dist):
618        """True if `dist` is the active distribution for its project"""
619        return self.by_key.get(dist.key) == dist
620
621    def find(self, req):
622        """Find a distribution matching requirement `req`
623
624        If there is an active distribution for the requested project, this
625        returns it as long as it meets the version requirement specified by
626        `req`.  But, if there is an active distribution for the project and it
627        does *not* meet the `req` requirement, ``VersionConflict`` is raised.
628        If there is no active distribution for the requested project, ``None``
629        is returned.
630        """
631        dist = self.by_key.get(req.key)
632        if dist is not None and dist not in req:
633            # XXX add more info
634            raise VersionConflict(dist, req)
635        return dist
636
637    def iter_entry_points(self, group, name=None):
638        """Yield entry point objects from `group` matching `name`
639
640        If `name` is None, yields all entry points in `group` from all
641        distributions in the working set, otherwise only ones matching
642        both `group` and `name` are yielded (in distribution order).
643        """
644        for dist in self:
645            entries = dist.get_entry_map(group)
646            if name is None:
647                for ep in entries.values():
648                    yield ep
649            elif name in entries:
650                yield entries[name]
651
652    def run_script(self, requires, script_name):
653        """Locate distribution for `requires` and run `script_name` script"""
654        ns = sys._getframe(1).f_globals
655        name = ns['__name__']
656        ns.clear()
657        ns['__name__'] = name
658        self.require(requires)[0].run_script(script_name, ns)
659
660    def __iter__(self):
661        """Yield distributions for non-duplicate projects in the working set
662
663        The yield order is the order in which the items' path entries were
664        added to the working set.
665        """
666        seen = {}
667        for item in self.entries:
668            if item not in self.entry_keys:
669                # workaround a cache issue
670                continue
671
672            for key in self.entry_keys[item]:
673                if key not in seen:
674                    seen[key] = 1
675                    yield self.by_key[key]
676
677    def add(self, dist, entry=None, insert=True, replace=False):
678        """Add `dist` to working set, associated with `entry`
679
680        If `entry` is unspecified, it defaults to the ``.location`` of `dist`.
681        On exit from this routine, `entry` is added to the end of the working
682        set's ``.entries`` (if it wasn't already present).
683
684        `dist` is only added to the working set if it's for a project that
685        doesn't already have a distribution in the set, unless `replace=True`.
686        If it's added, any callbacks registered with the ``subscribe()`` method
687        will be called.
688        """
689        if insert:
690            dist.insert_on(self.entries, entry, replace=replace)
691
692        if entry is None:
693            entry = dist.location
694        keys = self.entry_keys.setdefault(entry, [])
695        keys2 = self.entry_keys.setdefault(dist.location, [])
696        if not replace and dist.key in self.by_key:
697            # ignore hidden distros
698            return
699
700        self.by_key[dist.key] = dist
701        if dist.key not in keys:
702            keys.append(dist.key)
703        if dist.key not in keys2:
704            keys2.append(dist.key)
705        self._added_new(dist)
706
707    def resolve(self, requirements, env=None, installer=None,
708                replace_conflicting=False, extras=None):
709        """List all distributions needed to (recursively) meet `requirements`
710
711        `requirements` must be a sequence of ``Requirement`` objects.  `env`,
712        if supplied, should be an ``Environment`` instance.  If
713        not supplied, it defaults to all distributions available within any
714        entry or distribution in the working set.  `installer`, if supplied,
715        will be invoked with each requirement that cannot be met by an
716        already-installed distribution; it should return a ``Distribution`` or
717        ``None``.
718
719        Unless `replace_conflicting=True`, raises a VersionConflict exception
720        if
721        any requirements are found on the path that have the correct name but
722        the wrong version.  Otherwise, if an `installer` is supplied it will be
723        invoked to obtain the correct version of the requirement and activate
724        it.
725
726        `extras` is a list of the extras to be used with these requirements.
727        This is important because extra requirements may look like `my_req;
728        extra = "my_extra"`, which would otherwise be interpreted as a purely
729        optional requirement.  Instead, we want to be able to assert that these
730        requirements are truly required.
731        """
732
733        # set up the stack
734        requirements = list(requirements)[::-1]
735        # set of processed requirements
736        processed = {}
737        # key -> dist
738        best = {}
739        to_activate = []
740
741        req_extras = _ReqExtras()
742
743        # Mapping of requirement to set of distributions that required it;
744        # useful for reporting info about conflicts.
745        required_by = collections.defaultdict(set)
746
747        while requirements:
748            # process dependencies breadth-first
749            req = requirements.pop(0)
750            if req in processed:
751                # Ignore cyclic or redundant dependencies
752                continue
753
754            if not req_extras.markers_pass(req, extras):
755                continue
756
757            dist = best.get(req.key)
758            if dist is None:
759                # Find the best distribution and add it to the map
760                dist = self.by_key.get(req.key)
761                if dist is None or (dist not in req and replace_conflicting):
762                    ws = self
763                    if env is None:
764                        if dist is None:
765                            env = Environment(self.entries)
766                        else:
767                            # Use an empty environment and workingset to avoid
768                            # any further conflicts with the conflicting
769                            # distribution
770                            env = Environment([])
771                            ws = WorkingSet([])
772                    dist = best[req.key] = env.best_match(
773                        req, ws, installer,
774                        replace_conflicting=replace_conflicting
775                    )
776                    if dist is None:
777                        requirers = required_by.get(req, None)
778                        raise DistributionNotFound(req, requirers)
779                to_activate.append(dist)
780            if dist not in req:
781                # Oops, the "best" so far conflicts with a dependency
782                dependent_req = required_by[req]
783                raise VersionConflict(dist, req).with_context(dependent_req)
784
785            # push the new requirements onto the stack
786            new_requirements = dist.requires(req.extras)[::-1]
787            requirements.extend(new_requirements)
788
789            # Register the new requirements needed by req
790            for new_requirement in new_requirements:
791                required_by[new_requirement].add(req.project_name)
792                req_extras[new_requirement] = req.extras
793
794            processed[req] = True
795
796        # return list of distros to activate
797        return to_activate
798
799    def find_plugins(
800            self, plugin_env, full_env=None, installer=None, fallback=True):
801        """Find all activatable distributions in `plugin_env`
802
803        Example usage::
804
805            distributions, errors = working_set.find_plugins(
806                Environment(plugin_dirlist)
807            )
808            # add plugins+libs to sys.path
809            map(working_set.add, distributions)
810            # display errors
811            print('Could not load', errors)
812
813        The `plugin_env` should be an ``Environment`` instance that contains
814        only distributions that are in the project's "plugin directory" or
815        directories. The `full_env`, if supplied, should be an ``Environment``
816        contains all currently-available distributions.  If `full_env` is not
817        supplied, one is created automatically from the ``WorkingSet`` this
818        method is called on, which will typically mean that every directory on
819        ``sys.path`` will be scanned for distributions.
820
821        `installer` is a standard installer callback as used by the
822        ``resolve()`` method. The `fallback` flag indicates whether we should
823        attempt to resolve older versions of a plugin if the newest version
824        cannot be resolved.
825
826        This method returns a 2-tuple: (`distributions`, `error_info`), where
827        `distributions` is a list of the distributions found in `plugin_env`
828        that were loadable, along with any other distributions that are needed
829        to resolve their dependencies.  `error_info` is a dictionary mapping
830        unloadable plugin distributions to an exception instance describing the
831        error that occurred. Usually this will be a ``DistributionNotFound`` or
832        ``VersionConflict`` instance.
833        """
834
835        plugin_projects = list(plugin_env)
836        # scan project names in alphabetic order
837        plugin_projects.sort()
838
839        error_info = {}
840        distributions = {}
841
842        if full_env is None:
843            env = Environment(self.entries)
844            env += plugin_env
845        else:
846            env = full_env + plugin_env
847
848        shadow_set = self.__class__([])
849        # put all our entries in shadow_set
850        list(map(shadow_set.add, self))
851
852        for project_name in plugin_projects:
853
854            for dist in plugin_env[project_name]:
855
856                req = [dist.as_requirement()]
857
858                try:
859                    resolvees = shadow_set.resolve(req, env, installer)
860
861                except ResolutionError as v:
862                    # save error info
863                    error_info[dist] = v
864                    if fallback:
865                        # try the next older version of project
866                        continue
867                    else:
868                        # give up on this project, keep going
869                        break
870
871                else:
872                    list(map(shadow_set.add, resolvees))
873                    distributions.update(dict.fromkeys(resolvees))
874
875                    # success, no need to try any more versions of this project
876                    break
877
878        distributions = list(distributions)
879        distributions.sort()
880
881        return distributions, error_info
882
883    def require(self, *requirements):
884        """Ensure that distributions matching `requirements` are activated
885
886        `requirements` must be a string or a (possibly-nested) sequence
887        thereof, specifying the distributions and versions required.  The
888        return value is a sequence of the distributions that needed to be
889        activated to fulfill the requirements; all relevant distributions are
890        included, even if they were already activated in this working set.
891        """
892        needed = self.resolve(parse_requirements(requirements))
893
894        for dist in needed:
895            self.add(dist)
896
897        return needed
898
899    def subscribe(self, callback, existing=True):
900        """Invoke `callback` for all distributions
901
902        If `existing=True` (default),
903        call on all existing ones, as well.
904        """
905        if callback in self.callbacks:
906            return
907        self.callbacks.append(callback)
908        if not existing:
909            return
910        for dist in self:
911            callback(dist)
912
913    def _added_new(self, dist):
914        for callback in self.callbacks:
915            callback(dist)
916
917    def __getstate__(self):
918        return (
919            self.entries[:], self.entry_keys.copy(), self.by_key.copy(),
920            self.callbacks[:]
921        )
922
923    def __setstate__(self, e_k_b_c):
924        entries, keys, by_key, callbacks = e_k_b_c
925        self.entries = entries[:]
926        self.entry_keys = keys.copy()
927        self.by_key = by_key.copy()
928        self.callbacks = callbacks[:]
929
930
931class _ReqExtras(dict):
932    """
933    Map each requirement to the extras that demanded it.
934    """
935
936    def markers_pass(self, req, extras=None):
937        """
938        Evaluate markers for req against each extra that
939        demanded it.
940
941        Return False if the req has a marker and fails
942        evaluation. Otherwise, return True.
943        """
944        extra_evals = (
945            req.marker.evaluate({'extra': extra})
946            for extra in self.get(req, ()) + (extras or (None,))
947        )
948        return not req.marker or any(extra_evals)
949
950
951class Environment(object):
952    """Searchable snapshot of distributions on a search path"""
953
954    def __init__(
955            self, search_path=None, platform=get_supported_platform(),
956            python=PY_MAJOR):
957        """Snapshot distributions available on a search path
958
959        Any distributions found on `search_path` are added to the environment.
960        `search_path` should be a sequence of ``sys.path`` items.  If not
961        supplied, ``sys.path`` is used.
962
963        `platform` is an optional string specifying the name of the platform
964        that platform-specific distributions must be compatible with.  If
965        unspecified, it defaults to the current platform.  `python` is an
966        optional string naming the desired version of Python (e.g. ``'3.3'``);
967        it defaults to the current version.
968
969        You may explicitly set `platform` (and/or `python`) to ``None`` if you
970        wish to map *all* distributions, not just those compatible with the
971        running platform or Python version.
972        """
973        self._distmap = {}
974        self.platform = platform
975        self.python = python
976        self.scan(search_path)
977
978    def can_add(self, dist):
979        """Is distribution `dist` acceptable for this environment?
980
981        The distribution must match the platform and python version
982        requirements specified when this environment was created, or False
983        is returned.
984        """
985        py_compat = (
986            self.python is None
987            or dist.py_version is None
988            or dist.py_version == self.python
989        )
990        return py_compat and compatible_platforms(dist.platform, self.platform)
991
992    def remove(self, dist):
993        """Remove `dist` from the environment"""
994        self._distmap[dist.key].remove(dist)
995
996    def scan(self, search_path=None):
997        """Scan `search_path` for distributions usable in this environment
998
999        Any distributions found are added to the environment.
1000        `search_path` should be a sequence of ``sys.path`` items.  If not
1001        supplied, ``sys.path`` is used.  Only distributions conforming to
1002        the platform/python version defined at initialization are added.
1003        """
1004        if search_path is None:
1005            search_path = sys.path
1006
1007        for item in search_path:
1008            for dist in find_distributions(item):
1009                self.add(dist)
1010
1011    def __getitem__(self, project_name):
1012        """Return a newest-to-oldest list of distributions for `project_name`
1013
1014        Uses case-insensitive `project_name` comparison, assuming all the
1015        project's distributions use their project's name converted to all
1016        lowercase as their key.
1017
1018        """
1019        distribution_key = project_name.lower()
1020        return self._distmap.get(distribution_key, [])
1021
1022    def add(self, dist):
1023        """Add `dist` if we ``can_add()`` it and it has not already been added
1024        """
1025        if self.can_add(dist) and dist.has_version():
1026            dists = self._distmap.setdefault(dist.key, [])
1027            if dist not in dists:
1028                dists.append(dist)
1029                dists.sort(key=operator.attrgetter('hashcmp'), reverse=True)
1030
1031    def best_match(
1032            self, req, working_set, installer=None, replace_conflicting=False):
1033        """Find distribution best matching `req` and usable on `working_set`
1034
1035        This calls the ``find(req)`` method of the `working_set` to see if a
1036        suitable distribution is already active.  (This may raise
1037        ``VersionConflict`` if an unsuitable version of the project is already
1038        active in the specified `working_set`.)  If a suitable distribution
1039        isn't active, this method returns the newest distribution in the
1040        environment that meets the ``Requirement`` in `req`.  If no suitable
1041        distribution is found, and `installer` is supplied, then the result of
1042        calling the environment's ``obtain(req, installer)`` method will be
1043        returned.
1044        """
1045        try:
1046            dist = working_set.find(req)
1047        except VersionConflict:
1048            if not replace_conflicting:
1049                raise
1050            dist = None
1051        if dist is not None:
1052            return dist
1053        for dist in self[req.key]:
1054            if dist in req:
1055                return dist
1056        # try to download/install
1057        return self.obtain(req, installer)
1058
1059    def obtain(self, requirement, installer=None):
1060        """Obtain a distribution matching `requirement` (e.g. via download)
1061
1062        Obtain a distro that matches requirement (e.g. via download).  In the
1063        base ``Environment`` class, this routine just returns
1064        ``installer(requirement)``, unless `installer` is None, in which case
1065        None is returned instead.  This method is a hook that allows subclasses
1066        to attempt other ways of obtaining a distribution before falling back
1067        to the `installer` argument."""
1068        if installer is not None:
1069            return installer(requirement)
1070
1071    def __iter__(self):
1072        """Yield the unique project names of the available distributions"""
1073        for key in self._distmap.keys():
1074            if self[key]:
1075                yield key
1076
1077    def __iadd__(self, other):
1078        """In-place addition of a distribution or environment"""
1079        if isinstance(other, Distribution):
1080            self.add(other)
1081        elif isinstance(other, Environment):
1082            for project in other:
1083                for dist in other[project]:
1084                    self.add(dist)
1085        else:
1086            raise TypeError("Can't add %r to environment" % (other,))
1087        return self
1088
1089    def __add__(self, other):
1090        """Add an environment or distribution to an environment"""
1091        new = self.__class__([], platform=None, python=None)
1092        for env in self, other:
1093            new += env
1094        return new
1095
1096
1097# XXX backward compatibility
1098AvailableDistributions = Environment
1099
1100
1101class ExtractionError(RuntimeError):
1102    """An error occurred extracting a resource
1103
1104    The following attributes are available from instances of this exception:
1105
1106    manager
1107        The resource manager that raised this exception
1108
1109    cache_path
1110        The base directory for resource extraction
1111
1112    original_error
1113        The exception instance that caused extraction to fail
1114    """
1115
1116
1117class ResourceManager:
1118    """Manage resource extraction and packages"""
1119    extraction_path = None
1120
1121    def __init__(self):
1122        self.cached_files = {}
1123
1124    def resource_exists(self, package_or_requirement, resource_name):
1125        """Does the named resource exist?"""
1126        return get_provider(package_or_requirement).has_resource(resource_name)
1127
1128    def resource_isdir(self, package_or_requirement, resource_name):
1129        """Is the named resource an existing directory?"""
1130        return get_provider(package_or_requirement).resource_isdir(
1131            resource_name
1132        )
1133
1134    def resource_filename(self, package_or_requirement, resource_name):
1135        """Return a true filesystem path for specified resource"""
1136        return get_provider(package_or_requirement).get_resource_filename(
1137            self, resource_name
1138        )
1139
1140    def resource_stream(self, package_or_requirement, resource_name):
1141        """Return a readable file-like object for specified resource"""
1142        return get_provider(package_or_requirement).get_resource_stream(
1143            self, resource_name
1144        )
1145
1146    def resource_string(self, package_or_requirement, resource_name):
1147        """Return specified resource as a string"""
1148        return get_provider(package_or_requirement).get_resource_string(
1149            self, resource_name
1150        )
1151
1152    def resource_listdir(self, package_or_requirement, resource_name):
1153        """List the contents of the named resource directory"""
1154        return get_provider(package_or_requirement).resource_listdir(
1155            resource_name
1156        )
1157
1158    def extraction_error(self):
1159        """Give an error message for problems extracting file(s)"""
1160
1161        old_exc = sys.exc_info()[1]
1162        cache_path = self.extraction_path or get_default_cache()
1163
1164        tmpl = textwrap.dedent("""
1165            Can't extract file(s) to egg cache
1166
1167            The following error occurred while trying to extract file(s)
1168            to the Python egg cache:
1169
1170              {old_exc}
1171
1172            The Python egg cache directory is currently set to:
1173
1174              {cache_path}
1175
1176            Perhaps your account does not have write access to this directory?
1177            You can change the cache directory by setting the PYTHON_EGG_CACHE
1178            environment variable to point to an accessible directory.
1179            """).lstrip()
1180        err = ExtractionError(tmpl.format(**locals()))
1181        err.manager = self
1182        err.cache_path = cache_path
1183        err.original_error = old_exc
1184        raise err
1185
1186    def get_cache_path(self, archive_name, names=()):
1187        """Return absolute location in cache for `archive_name` and `names`
1188
1189        The parent directory of the resulting path will be created if it does
1190        not already exist.  `archive_name` should be the base filename of the
1191        enclosing egg (which may not be the name of the enclosing zipfile!),
1192        including its ".egg" extension.  `names`, if provided, should be a
1193        sequence of path name parts "under" the egg's extraction location.
1194
1195        This method should only be called by resource providers that need to
1196        obtain an extraction location, and only for names they intend to
1197        extract, as it tracks the generated names for possible cleanup later.
1198        """
1199        extract_path = self.extraction_path or get_default_cache()
1200        target_path = os.path.join(extract_path, archive_name + '-tmp', *names)
1201        try:
1202            _bypass_ensure_directory(target_path)
1203        except Exception:
1204            self.extraction_error()
1205
1206        self._warn_unsafe_extraction_path(extract_path)
1207
1208        self.cached_files[target_path] = 1
1209        return target_path
1210
1211    @staticmethod
1212    def _warn_unsafe_extraction_path(path):
1213        """
1214        If the default extraction path is overridden and set to an insecure
1215        location, such as /tmp, it opens up an opportunity for an attacker to
1216        replace an extracted file with an unauthorized payload. Warn the user
1217        if a known insecure location is used.
1218
1219        See Distribute #375 for more details.
1220        """
1221        if os.name == 'nt' and not path.startswith(os.environ['windir']):
1222            # On Windows, permissions are generally restrictive by default
1223            #  and temp directories are not writable by other users, so
1224            #  bypass the warning.
1225            return
1226        mode = os.stat(path).st_mode
1227        if mode & stat.S_IWOTH or mode & stat.S_IWGRP:
1228            msg = (
1229                "%s is writable by group/others and vulnerable to attack "
1230                "when "
1231                "used with get_resource_filename. Consider a more secure "
1232                "location (set with .set_extraction_path or the "
1233                "PYTHON_EGG_CACHE environment variable)." % path
1234            )
1235            warnings.warn(msg, UserWarning)
1236
1237    def postprocess(self, tempname, filename):
1238        """Perform any platform-specific postprocessing of `tempname`
1239
1240        This is where Mac header rewrites should be done; other platforms don't
1241        have anything special they should do.
1242
1243        Resource providers should call this method ONLY after successfully
1244        extracting a compressed resource.  They must NOT call it on resources
1245        that are already in the filesystem.
1246
1247        `tempname` is the current (temporary) name of the file, and `filename`
1248        is the name it will be renamed to by the caller after this routine
1249        returns.
1250        """
1251
1252        if os.name == 'posix':
1253            # Make the resource executable
1254            mode = ((os.stat(tempname).st_mode) | 0o555) & 0o7777
1255            os.chmod(tempname, mode)
1256
1257    def set_extraction_path(self, path):
1258        """Set the base path where resources will be extracted to, if needed.
1259
1260        If you do not call this routine before any extractions take place, the
1261        path defaults to the return value of ``get_default_cache()``.  (Which
1262        is based on the ``PYTHON_EGG_CACHE`` environment variable, with various
1263        platform-specific fallbacks.  See that routine's documentation for more
1264        details.)
1265
1266        Resources are extracted to subdirectories of this path based upon
1267        information given by the ``IResourceProvider``.  You may set this to a
1268        temporary directory, but then you must call ``cleanup_resources()`` to
1269        delete the extracted files when done.  There is no guarantee that
1270        ``cleanup_resources()`` will be able to remove all extracted files.
1271
1272        (Note: you may not change the extraction path for a given resource
1273        manager once resources have been extracted, unless you first call
1274        ``cleanup_resources()``.)
1275        """
1276        if self.cached_files:
1277            raise ValueError(
1278                "Can't change extraction path, files already extracted"
1279            )
1280
1281        self.extraction_path = path
1282
1283    def cleanup_resources(self, force=False):
1284        """
1285        Delete all extracted resource files and directories, returning a list
1286        of the file and directory names that could not be successfully removed.
1287        This function does not have any concurrency protection, so it should
1288        generally only be called when the extraction path is a temporary
1289        directory exclusive to a single process.  This method is not
1290        automatically called; you must call it explicitly or register it as an
1291        ``atexit`` function if you wish to ensure cleanup of a temporary
1292        directory used for extractions.
1293        """
1294        # XXX
1295
1296
1297def get_default_cache():
1298    """
1299    Return the ``PYTHON_EGG_CACHE`` environment variable
1300    or a platform-relevant user cache dir for an app
1301    named "Python-Eggs".
1302    """
1303    return (
1304        os.environ.get('PYTHON_EGG_CACHE')
1305        or appdirs.user_cache_dir(appname='Python-Eggs')
1306    )
1307
1308
1309def safe_name(name):
1310    """Convert an arbitrary string to a standard distribution name
1311
1312    Any runs of non-alphanumeric/. characters are replaced with a single '-'.
1313    """
1314    return re.sub('[^A-Za-z0-9.]+', '-', name)
1315
1316
1317def safe_version(version):
1318    """
1319    Convert an arbitrary string to a standard version string
1320    """
1321    try:
1322        # normalize the version
1323        return str(packaging.version.Version(version))
1324    except packaging.version.InvalidVersion:
1325        version = version.replace(' ', '.')
1326        return re.sub('[^A-Za-z0-9.]+', '-', version)
1327
1328
1329def safe_extra(extra):
1330    """Convert an arbitrary string to a standard 'extra' name
1331
1332    Any runs of non-alphanumeric characters are replaced with a single '_',
1333    and the result is always lowercased.
1334    """
1335    return re.sub('[^A-Za-z0-9.-]+', '_', extra).lower()
1336
1337
1338def to_filename(name):
1339    """Convert a project or version name to its filename-escaped form
1340
1341    Any '-' characters are currently replaced with '_'.
1342    """
1343    return name.replace('-', '_')
1344
1345
1346def invalid_marker(text):
1347    """
1348    Validate text as a PEP 508 environment marker; return an exception
1349    if invalid or False otherwise.
1350    """
1351    try:
1352        evaluate_marker(text)
1353    except SyntaxError as e:
1354        e.filename = None
1355        e.lineno = None
1356        return e
1357    return False
1358
1359
1360def evaluate_marker(text, extra=None):
1361    """
1362    Evaluate a PEP 508 environment marker.
1363    Return a boolean indicating the marker result in this environment.
1364    Raise SyntaxError if marker is invalid.
1365
1366    This implementation uses the 'pyparsing' module.
1367    """
1368    try:
1369        marker = packaging.markers.Marker(text)
1370        return marker.evaluate()
1371    except packaging.markers.InvalidMarker as e:
1372        raise SyntaxError(e)
1373
1374
1375class NullProvider:
1376    """Try to implement resources and metadata for arbitrary PEP 302 loaders"""
1377
1378    egg_name = None
1379    egg_info = None
1380    loader = None
1381
1382    def __init__(self, module):
1383        self.loader = getattr(module, '__loader__', None)
1384        self.module_path = os.path.dirname(getattr(module, '__file__', ''))
1385
1386    def get_resource_filename(self, manager, resource_name):
1387        return self._fn(self.module_path, resource_name)
1388
1389    def get_resource_stream(self, manager, resource_name):
1390        return io.BytesIO(self.get_resource_string(manager, resource_name))
1391
1392    def get_resource_string(self, manager, resource_name):
1393        return self._get(self._fn(self.module_path, resource_name))
1394
1395    def has_resource(self, resource_name):
1396        return self._has(self._fn(self.module_path, resource_name))
1397
1398    def has_metadata(self, name):
1399        return self.egg_info and self._has(self._fn(self.egg_info, name))
1400
1401    def get_metadata(self, name):
1402        if not self.egg_info:
1403            return ""
1404        value = self._get(self._fn(self.egg_info, name))
1405        return value.decode('utf-8') if six.PY3 else value
1406
1407    def get_metadata_lines(self, name):
1408        return yield_lines(self.get_metadata(name))
1409
1410    def resource_isdir(self, resource_name):
1411        return self._isdir(self._fn(self.module_path, resource_name))
1412
1413    def metadata_isdir(self, name):
1414        return self.egg_info and self._isdir(self._fn(self.egg_info, name))
1415
1416    def resource_listdir(self, resource_name):
1417        return self._listdir(self._fn(self.module_path, resource_name))
1418
1419    def metadata_listdir(self, name):
1420        if self.egg_info:
1421            return self._listdir(self._fn(self.egg_info, name))
1422        return []
1423
1424    def run_script(self, script_name, namespace):
1425        script = 'scripts/' + script_name
1426        if not self.has_metadata(script):
1427            raise ResolutionError(
1428                "Script {script!r} not found in metadata at {self.egg_info!r}"
1429                .format(**locals()),
1430            )
1431        script_text = self.get_metadata(script).replace('\r\n', '\n')
1432        script_text = script_text.replace('\r', '\n')
1433        script_filename = self._fn(self.egg_info, script)
1434        namespace['__file__'] = script_filename
1435        if os.path.exists(script_filename):
1436            source = open(script_filename).read()
1437            code = compile(source, script_filename, 'exec')
1438            exec(code, namespace, namespace)
1439        else:
1440            from linecache import cache
1441            cache[script_filename] = (
1442                len(script_text), 0, script_text.split('\n'), script_filename
1443            )
1444            script_code = compile(script_text, script_filename, 'exec')
1445            exec(script_code, namespace, namespace)
1446
1447    def _has(self, path):
1448        raise NotImplementedError(
1449            "Can't perform this operation for unregistered loader type"
1450        )
1451
1452    def _isdir(self, path):
1453        raise NotImplementedError(
1454            "Can't perform this operation for unregistered loader type"
1455        )
1456
1457    def _listdir(self, path):
1458        raise NotImplementedError(
1459            "Can't perform this operation for unregistered loader type"
1460        )
1461
1462    def _fn(self, base, resource_name):
1463        if resource_name:
1464            return os.path.join(base, *resource_name.split('/'))
1465        return base
1466
1467    def _get(self, path):
1468        if hasattr(self.loader, 'get_data'):
1469            return self.loader.get_data(path)
1470        raise NotImplementedError(
1471            "Can't perform this operation for loaders without 'get_data()'"
1472        )
1473
1474
1475register_loader_type(object, NullProvider)
1476
1477
1478class EggProvider(NullProvider):
1479    """Provider based on a virtual filesystem"""
1480
1481    def __init__(self, module):
1482        NullProvider.__init__(self, module)
1483        self._setup_prefix()
1484
1485    def _setup_prefix(self):
1486        # we assume here that our metadata may be nested inside a "basket"
1487        # of multiple eggs; that's why we use module_path instead of .archive
1488        path = self.module_path
1489        old = None
1490        while path != old:
1491            if _is_egg_path(path):
1492                self.egg_name = os.path.basename(path)
1493                self.egg_info = os.path.join(path, 'EGG-INFO')
1494                self.egg_root = path
1495                break
1496            old = path
1497            path, base = os.path.split(path)
1498
1499
1500class DefaultProvider(EggProvider):
1501    """Provides access to package resources in the filesystem"""
1502
1503    def _has(self, path):
1504        return os.path.exists(path)
1505
1506    def _isdir(self, path):
1507        return os.path.isdir(path)
1508
1509    def _listdir(self, path):
1510        return os.listdir(path)
1511
1512    def get_resource_stream(self, manager, resource_name):
1513        return open(self._fn(self.module_path, resource_name), 'rb')
1514
1515    def _get(self, path):
1516        with open(path, 'rb') as stream:
1517            return stream.read()
1518
1519    @classmethod
1520    def _register(cls):
1521        loader_names = 'SourceFileLoader', 'SourcelessFileLoader',
1522        for name in loader_names:
1523            loader_cls = getattr(importlib_machinery, name, type(None))
1524            register_loader_type(loader_cls, cls)
1525
1526
1527DefaultProvider._register()
1528
1529
1530class EmptyProvider(NullProvider):
1531    """Provider that returns nothing for all requests"""
1532
1533    module_path = None
1534
1535    _isdir = _has = lambda self, path: False
1536
1537    def _get(self, path):
1538        return ''
1539
1540    def _listdir(self, path):
1541        return []
1542
1543    def __init__(self):
1544        pass
1545
1546
1547empty_provider = EmptyProvider()
1548
1549
1550class ZipManifests(dict):
1551    """
1552    zip manifest builder
1553    """
1554
1555    @classmethod
1556    def build(cls, path):
1557        """
1558        Build a dictionary similar to the zipimport directory
1559        caches, except instead of tuples, store ZipInfo objects.
1560
1561        Use a platform-specific path separator (os.sep) for the path keys
1562        for compatibility with pypy on Windows.
1563        """
1564        with zipfile.ZipFile(path) as zfile:
1565            items = (
1566                (
1567                    name.replace('/', os.sep),
1568                    zfile.getinfo(name),
1569                )
1570                for name in zfile.namelist()
1571            )
1572            return dict(items)
1573
1574    load = build
1575
1576
1577class MemoizedZipManifests(ZipManifests):
1578    """
1579    Memoized zipfile manifests.
1580    """
1581    manifest_mod = collections.namedtuple('manifest_mod', 'manifest mtime')
1582
1583    def load(self, path):
1584        """
1585        Load a manifest at path or return a suitable manifest already loaded.
1586        """
1587        path = os.path.normpath(path)
1588        mtime = os.stat(path).st_mtime
1589
1590        if path not in self or self[path].mtime != mtime:
1591            manifest = self.build(path)
1592            self[path] = self.manifest_mod(manifest, mtime)
1593
1594        return self[path].manifest
1595
1596
1597class ZipProvider(EggProvider):
1598    """Resource support for zips and eggs"""
1599
1600    eagers = None
1601    _zip_manifests = MemoizedZipManifests()
1602
1603    def __init__(self, module):
1604        EggProvider.__init__(self, module)
1605        self.zip_pre = self.loader.archive + os.sep
1606
1607    def _zipinfo_name(self, fspath):
1608        # Convert a virtual filename (full path to file) into a zipfile subpath
1609        # usable with the zipimport directory cache for our target archive
1610        fspath = fspath.rstrip(os.sep)
1611        if fspath == self.loader.archive:
1612            return ''
1613        if fspath.startswith(self.zip_pre):
1614            return fspath[len(self.zip_pre):]
1615        raise AssertionError(
1616            "%s is not a subpath of %s" % (fspath, self.zip_pre)
1617        )
1618
1619    def _parts(self, zip_path):
1620        # Convert a zipfile subpath into an egg-relative path part list.
1621        # pseudo-fs path
1622        fspath = self.zip_pre + zip_path
1623        if fspath.startswith(self.egg_root + os.sep):
1624            return fspath[len(self.egg_root) + 1:].split(os.sep)
1625        raise AssertionError(
1626            "%s is not a subpath of %s" % (fspath, self.egg_root)
1627        )
1628
1629    @property
1630    def zipinfo(self):
1631        return self._zip_manifests.load(self.loader.archive)
1632
1633    def get_resource_filename(self, manager, resource_name):
1634        if not self.egg_name:
1635            raise NotImplementedError(
1636                "resource_filename() only supported for .egg, not .zip"
1637            )
1638        # no need to lock for extraction, since we use temp names
1639        zip_path = self._resource_to_zip(resource_name)
1640        eagers = self._get_eager_resources()
1641        if '/'.join(self._parts(zip_path)) in eagers:
1642            for name in eagers:
1643                self._extract_resource(manager, self._eager_to_zip(name))
1644        return self._extract_resource(manager, zip_path)
1645
1646    @staticmethod
1647    def _get_date_and_size(zip_stat):
1648        size = zip_stat.file_size
1649        # ymdhms+wday, yday, dst
1650        date_time = zip_stat.date_time + (0, 0, -1)
1651        # 1980 offset already done
1652        timestamp = time.mktime(date_time)
1653        return timestamp, size
1654
1655    def _extract_resource(self, manager, zip_path):
1656
1657        if zip_path in self._index():
1658            for name in self._index()[zip_path]:
1659                last = self._extract_resource(
1660                    manager, os.path.join(zip_path, name)
1661                )
1662            # return the extracted directory name
1663            return os.path.dirname(last)
1664
1665        timestamp, size = self._get_date_and_size(self.zipinfo[zip_path])
1666
1667        if not WRITE_SUPPORT:
1668            raise IOError('"os.rename" and "os.unlink" are not supported '
1669                          'on this platform')
1670        try:
1671
1672            real_path = manager.get_cache_path(
1673                self.egg_name, self._parts(zip_path)
1674            )
1675
1676            if self._is_current(real_path, zip_path):
1677                return real_path
1678
1679            outf, tmpnam = _mkstemp(
1680                ".$extract",
1681                dir=os.path.dirname(real_path),
1682            )
1683            os.write(outf, self.loader.get_data(zip_path))
1684            os.close(outf)
1685            utime(tmpnam, (timestamp, timestamp))
1686            manager.postprocess(tmpnam, real_path)
1687
1688            try:
1689                rename(tmpnam, real_path)
1690
1691            except os.error:
1692                if os.path.isfile(real_path):
1693                    if self._is_current(real_path, zip_path):
1694                        # the file became current since it was checked above,
1695                        #  so proceed.
1696                        return real_path
1697                    # Windows, del old file and retry
1698                    elif os.name == 'nt':
1699                        unlink(real_path)
1700                        rename(tmpnam, real_path)
1701                        return real_path
1702                raise
1703
1704        except os.error:
1705            # report a user-friendly error
1706            manager.extraction_error()
1707
1708        return real_path
1709
1710    def _is_current(self, file_path, zip_path):
1711        """
1712        Return True if the file_path is current for this zip_path
1713        """
1714        timestamp, size = self._get_date_and_size(self.zipinfo[zip_path])
1715        if not os.path.isfile(file_path):
1716            return False
1717        stat = os.stat(file_path)
1718        if stat.st_size != size or stat.st_mtime != timestamp:
1719            return False
1720        # check that the contents match
1721        zip_contents = self.loader.get_data(zip_path)
1722        with open(file_path, 'rb') as f:
1723            file_contents = f.read()
1724        return zip_contents == file_contents
1725
1726    def _get_eager_resources(self):
1727        if self.eagers is None:
1728            eagers = []
1729            for name in ('native_libs.txt', 'eager_resources.txt'):
1730                if self.has_metadata(name):
1731                    eagers.extend(self.get_metadata_lines(name))
1732            self.eagers = eagers
1733        return self.eagers
1734
1735    def _index(self):
1736        try:
1737            return self._dirindex
1738        except AttributeError:
1739            ind = {}
1740            for path in self.zipinfo:
1741                parts = path.split(os.sep)
1742                while parts:
1743                    parent = os.sep.join(parts[:-1])
1744                    if parent in ind:
1745                        ind[parent].append(parts[-1])
1746                        break
1747                    else:
1748                        ind[parent] = [parts.pop()]
1749            self._dirindex = ind
1750            return ind
1751
1752    def _has(self, fspath):
1753        zip_path = self._zipinfo_name(fspath)
1754        return zip_path in self.zipinfo or zip_path in self._index()
1755
1756    def _isdir(self, fspath):
1757        return self._zipinfo_name(fspath) in self._index()
1758
1759    def _listdir(self, fspath):
1760        return list(self._index().get(self._zipinfo_name(fspath), ()))
1761
1762    def _eager_to_zip(self, resource_name):
1763        return self._zipinfo_name(self._fn(self.egg_root, resource_name))
1764
1765    def _resource_to_zip(self, resource_name):
1766        return self._zipinfo_name(self._fn(self.module_path, resource_name))
1767
1768
1769register_loader_type(zipimport.zipimporter, ZipProvider)
1770
1771
1772class FileMetadata(EmptyProvider):
1773    """Metadata handler for standalone PKG-INFO files
1774
1775    Usage::
1776
1777        metadata = FileMetadata("/path/to/PKG-INFO")
1778
1779    This provider rejects all data and metadata requests except for PKG-INFO,
1780    which is treated as existing, and will be the contents of the file at
1781    the provided location.
1782    """
1783
1784    def __init__(self, path):
1785        self.path = path
1786
1787    def has_metadata(self, name):
1788        return name == 'PKG-INFO' and os.path.isfile(self.path)
1789
1790    def get_metadata(self, name):
1791        if name != 'PKG-INFO':
1792            raise KeyError("No metadata except PKG-INFO is available")
1793
1794        with io.open(self.path, encoding='utf-8', errors="replace") as f:
1795            metadata = f.read()
1796        self._warn_on_replacement(metadata)
1797        return metadata
1798
1799    def _warn_on_replacement(self, metadata):
1800        # Python 2.7 compat for: replacement_char = '�'
1801        replacement_char = b'\xef\xbf\xbd'.decode('utf-8')
1802        if replacement_char in metadata:
1803            tmpl = "{self.path} could not be properly decoded in UTF-8"
1804            msg = tmpl.format(**locals())
1805            warnings.warn(msg)
1806
1807    def get_metadata_lines(self, name):
1808        return yield_lines(self.get_metadata(name))
1809
1810
1811class PathMetadata(DefaultProvider):
1812    """Metadata provider for egg directories
1813
1814    Usage::
1815
1816        # Development eggs:
1817
1818        egg_info = "/path/to/PackageName.egg-info"
1819        base_dir = os.path.dirname(egg_info)
1820        metadata = PathMetadata(base_dir, egg_info)
1821        dist_name = os.path.splitext(os.path.basename(egg_info))[0]
1822        dist = Distribution(basedir, project_name=dist_name, metadata=metadata)
1823
1824        # Unpacked egg directories:
1825
1826        egg_path = "/path/to/PackageName-ver-pyver-etc.egg"
1827        metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO'))
1828        dist = Distribution.from_filename(egg_path, metadata=metadata)
1829    """
1830
1831    def __init__(self, path, egg_info):
1832        self.module_path = path
1833        self.egg_info = egg_info
1834
1835
1836class EggMetadata(ZipProvider):
1837    """Metadata provider for .egg files"""
1838
1839    def __init__(self, importer):
1840        """Create a metadata provider from a zipimporter"""
1841
1842        self.zip_pre = importer.archive + os.sep
1843        self.loader = importer
1844        if importer.prefix:
1845            self.module_path = os.path.join(importer.archive, importer.prefix)
1846        else:
1847            self.module_path = importer.archive
1848        self._setup_prefix()
1849
1850
1851_declare_state('dict', _distribution_finders={})
1852
1853
1854def register_finder(importer_type, distribution_finder):
1855    """Register `distribution_finder` to find distributions in sys.path items
1856
1857    `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
1858    handler), and `distribution_finder` is a callable that, passed a path
1859    item and the importer instance, yields ``Distribution`` instances found on
1860    that path item.  See ``pkg_resources.find_on_path`` for an example."""
1861    _distribution_finders[importer_type] = distribution_finder
1862
1863
1864def find_distributions(path_item, only=False):
1865    """Yield distributions accessible via `path_item`"""
1866    importer = get_importer(path_item)
1867    finder = _find_adapter(_distribution_finders, importer)
1868    return finder(importer, path_item, only)
1869
1870
1871def find_eggs_in_zip(importer, path_item, only=False):
1872    """
1873    Find eggs in zip files; possibly multiple nested eggs.
1874    """
1875    if importer.archive.endswith('.whl'):
1876        # wheels are not supported with this finder
1877        # they don't have PKG-INFO metadata, and won't ever contain eggs
1878        return
1879    metadata = EggMetadata(importer)
1880    if metadata.has_metadata('PKG-INFO'):
1881        yield Distribution.from_filename(path_item, metadata=metadata)
1882    if only:
1883        # don't yield nested distros
1884        return
1885    for subitem in metadata.resource_listdir('/'):
1886        if _is_egg_path(subitem):
1887            subpath = os.path.join(path_item, subitem)
1888            dists = find_eggs_in_zip(zipimport.zipimporter(subpath), subpath)
1889            for dist in dists:
1890                yield dist
1891        elif subitem.lower().endswith('.dist-info'):
1892            subpath = os.path.join(path_item, subitem)
1893            submeta = EggMetadata(zipimport.zipimporter(subpath))
1894            submeta.egg_info = subpath
1895            yield Distribution.from_location(path_item, subitem, submeta)
1896
1897
1898register_finder(zipimport.zipimporter, find_eggs_in_zip)
1899
1900
1901def find_nothing(importer, path_item, only=False):
1902    return ()
1903
1904
1905register_finder(object, find_nothing)
1906
1907
1908def _by_version_descending(names):
1909    """
1910    Given a list of filenames, return them in descending order
1911    by version number.
1912
1913    >>> names = 'bar', 'foo', 'Python-2.7.10.egg', 'Python-2.7.2.egg'
1914    >>> _by_version_descending(names)
1915    ['Python-2.7.10.egg', 'Python-2.7.2.egg', 'foo', 'bar']
1916    >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.egg'
1917    >>> _by_version_descending(names)
1918    ['Setuptools-1.2.3.egg', 'Setuptools-1.2.3b1.egg']
1919    >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.post1.egg'
1920    >>> _by_version_descending(names)
1921    ['Setuptools-1.2.3.post1.egg', 'Setuptools-1.2.3b1.egg']
1922    """
1923    def _by_version(name):
1924        """
1925        Parse each component of the filename
1926        """
1927        name, ext = os.path.splitext(name)
1928        parts = itertools.chain(name.split('-'), [ext])
1929        return [packaging.version.parse(part) for part in parts]
1930
1931    return sorted(names, key=_by_version, reverse=True)
1932
1933
1934def find_on_path(importer, path_item, only=False):
1935    """Yield distributions accessible on a sys.path directory"""
1936    path_item = _normalize_cached(path_item)
1937
1938    if _is_unpacked_egg(path_item):
1939        yield Distribution.from_filename(
1940            path_item, metadata=PathMetadata(
1941                path_item, os.path.join(path_item, 'EGG-INFO')
1942            )
1943        )
1944        return
1945
1946    entries = safe_listdir(path_item)
1947
1948    # for performance, before sorting by version,
1949    # screen entries for only those that will yield
1950    # distributions
1951    filtered = (
1952        entry
1953        for entry in entries
1954        if dist_factory(path_item, entry, only)
1955    )
1956
1957    # scan for .egg and .egg-info in directory
1958    path_item_entries = _by_version_descending(filtered)
1959    for entry in path_item_entries:
1960        fullpath = os.path.join(path_item, entry)
1961        factory = dist_factory(path_item, entry, only)
1962        for dist in factory(fullpath):
1963            yield dist
1964
1965
1966def dist_factory(path_item, entry, only):
1967    """
1968    Return a dist_factory for a path_item and entry
1969    """
1970    lower = entry.lower()
1971    is_meta = any(map(lower.endswith, ('.egg-info', '.dist-info')))
1972    return (
1973        distributions_from_metadata
1974        if is_meta else
1975        find_distributions
1976        if not only and _is_egg_path(entry) else
1977        resolve_egg_link
1978        if not only and lower.endswith('.egg-link') else
1979        NoDists()
1980    )
1981
1982
1983class NoDists:
1984    """
1985    >>> bool(NoDists())
1986    False
1987
1988    >>> list(NoDists()('anything'))
1989    []
1990    """
1991    def __bool__(self):
1992        return False
1993    if six.PY2:
1994        __nonzero__ = __bool__
1995
1996    def __call__(self, fullpath):
1997        return iter(())
1998
1999
2000def safe_listdir(path):
2001    """
2002    Attempt to list contents of path, but suppress some exceptions.
2003    """
2004    try:
2005        return os.listdir(path)
2006    except (PermissionError, NotADirectoryError):
2007        pass
2008    except OSError as e:
2009        # Ignore the directory if does not exist, not a directory or
2010        # permission denied
2011        ignorable = (
2012            e.errno in (errno.ENOTDIR, errno.EACCES, errno.ENOENT)
2013            # Python 2 on Windows needs to be handled this way :(
2014            or getattr(e, "winerror", None) == 267
2015        )
2016        if not ignorable:
2017            raise
2018    return ()
2019
2020
2021def distributions_from_metadata(path):
2022    root = os.path.dirname(path)
2023    if os.path.isdir(path):
2024        if len(os.listdir(path)) == 0:
2025            # empty metadata dir; skip
2026            return
2027        metadata = PathMetadata(root, path)
2028    else:
2029        metadata = FileMetadata(path)
2030    entry = os.path.basename(path)
2031    yield Distribution.from_location(
2032        root, entry, metadata, precedence=DEVELOP_DIST,
2033    )
2034
2035
2036def non_empty_lines(path):
2037    """
2038    Yield non-empty lines from file at path
2039    """
2040    with open(path) as f:
2041        for line in f:
2042            line = line.strip()
2043            if line:
2044                yield line
2045
2046
2047def resolve_egg_link(path):
2048    """
2049    Given a path to an .egg-link, resolve distributions
2050    present in the referenced path.
2051    """
2052    referenced_paths = non_empty_lines(path)
2053    resolved_paths = (
2054        os.path.join(os.path.dirname(path), ref)
2055        for ref in referenced_paths
2056    )
2057    dist_groups = map(find_distributions, resolved_paths)
2058    return next(dist_groups, ())
2059
2060
2061register_finder(pkgutil.ImpImporter, find_on_path)
2062
2063if hasattr(importlib_machinery, 'FileFinder'):
2064    register_finder(importlib_machinery.FileFinder, find_on_path)
2065
2066_declare_state('dict', _namespace_handlers={})
2067_declare_state('dict', _namespace_packages={})
2068
2069
2070def register_namespace_handler(importer_type, namespace_handler):
2071    """Register `namespace_handler` to declare namespace packages
2072
2073    `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
2074    handler), and `namespace_handler` is a callable like this::
2075
2076        def namespace_handler(importer, path_entry, moduleName, module):
2077            # return a path_entry to use for child packages
2078
2079    Namespace handlers are only called if the importer object has already
2080    agreed that it can handle the relevant path item, and they should only
2081    return a subpath if the module __path__ does not already contain an
2082    equivalent subpath.  For an example namespace handler, see
2083    ``pkg_resources.file_ns_handler``.
2084    """
2085    _namespace_handlers[importer_type] = namespace_handler
2086
2087
2088def _handle_ns(packageName, path_item):
2089    """Ensure that named package includes a subpath of path_item (if needed)"""
2090
2091    importer = get_importer(path_item)
2092    if importer is None:
2093        return None
2094    loader = importer.find_module(packageName)
2095    if loader is None:
2096        return None
2097    module = sys.modules.get(packageName)
2098    if module is None:
2099        module = sys.modules[packageName] = types.ModuleType(packageName)
2100        module.__path__ = []
2101        _set_parent_ns(packageName)
2102    elif not hasattr(module, '__path__'):
2103        raise TypeError("Not a package:", packageName)
2104    handler = _find_adapter(_namespace_handlers, importer)
2105    subpath = handler(importer, path_item, packageName, module)
2106    if subpath is not None:
2107        path = module.__path__
2108        path.append(subpath)
2109        loader.load_module(packageName)
2110        _rebuild_mod_path(path, packageName, module)
2111    return subpath
2112
2113
2114def _rebuild_mod_path(orig_path, package_name, module):
2115    """
2116    Rebuild module.__path__ ensuring that all entries are ordered
2117    corresponding to their sys.path order
2118    """
2119    sys_path = [_normalize_cached(p) for p in sys.path]
2120
2121    def safe_sys_path_index(entry):
2122        """
2123        Workaround for #520 and #513.
2124        """
2125        try:
2126            return sys_path.index(entry)
2127        except ValueError:
2128            return float('inf')
2129
2130    def position_in_sys_path(path):
2131        """
2132        Return the ordinal of the path based on its position in sys.path
2133        """
2134        path_parts = path.split(os.sep)
2135        module_parts = package_name.count('.') + 1
2136        parts = path_parts[:-module_parts]
2137        return safe_sys_path_index(_normalize_cached(os.sep.join(parts)))
2138
2139    if not isinstance(orig_path, list):
2140        # Is this behavior useful when module.__path__ is not a list?
2141        return
2142
2143    orig_path.sort(key=position_in_sys_path)
2144    module.__path__[:] = [_normalize_cached(p) for p in orig_path]
2145
2146
2147def declare_namespace(packageName):
2148    """Declare that package 'packageName' is a namespace package"""
2149
2150    _imp.acquire_lock()
2151    try:
2152        if packageName in _namespace_packages:
2153            return
2154
2155        path, parent = sys.path, None
2156        if '.' in packageName:
2157            parent = '.'.join(packageName.split('.')[:-1])
2158            declare_namespace(parent)
2159            if parent not in _namespace_packages:
2160                __import__(parent)
2161            try:
2162                path = sys.modules[parent].__path__
2163            except AttributeError:
2164                raise TypeError("Not a package:", parent)
2165
2166        # Track what packages are namespaces, so when new path items are added,
2167        # they can be updated
2168        _namespace_packages.setdefault(parent, []).append(packageName)
2169        _namespace_packages.setdefault(packageName, [])
2170
2171        for path_item in path:
2172            # Ensure all the parent's path items are reflected in the child,
2173            # if they apply
2174            _handle_ns(packageName, path_item)
2175
2176    finally:
2177        _imp.release_lock()
2178
2179
2180def fixup_namespace_packages(path_item, parent=None):
2181    """Ensure that previously-declared namespace packages include path_item"""
2182    _imp.acquire_lock()
2183    try:
2184        for package in _namespace_packages.get(parent, ()):
2185            subpath = _handle_ns(package, path_item)
2186            if subpath:
2187                fixup_namespace_packages(subpath, package)
2188    finally:
2189        _imp.release_lock()
2190
2191
2192def file_ns_handler(importer, path_item, packageName, module):
2193    """Compute an ns-package subpath for a filesystem or zipfile importer"""
2194
2195    subpath = os.path.join(path_item, packageName.split('.')[-1])
2196    normalized = _normalize_cached(subpath)
2197    for item in module.__path__:
2198        if _normalize_cached(item) == normalized:
2199            break
2200    else:
2201        # Only return the path if it's not already there
2202        return subpath
2203
2204
2205register_namespace_handler(pkgutil.ImpImporter, file_ns_handler)
2206register_namespace_handler(zipimport.zipimporter, file_ns_handler)
2207
2208if hasattr(importlib_machinery, 'FileFinder'):
2209    register_namespace_handler(importlib_machinery.FileFinder, file_ns_handler)
2210
2211
2212def null_ns_handler(importer, path_item, packageName, module):
2213    return None
2214
2215
2216register_namespace_handler(object, null_ns_handler)
2217
2218
2219def normalize_path(filename):
2220    """Normalize a file/dir name for comparison purposes"""
2221    return os.path.normcase(os.path.realpath(filename))
2222
2223
2224def _normalize_cached(filename, _cache={}):
2225    try:
2226        return _cache[filename]
2227    except KeyError:
2228        _cache[filename] = result = normalize_path(filename)
2229        return result
2230
2231
2232def _is_egg_path(path):
2233    """
2234    Determine if given path appears to be an egg.
2235    """
2236    return path.lower().endswith('.egg')
2237
2238
2239def _is_unpacked_egg(path):
2240    """
2241    Determine if given path appears to be an unpacked egg.
2242    """
2243    return (
2244        _is_egg_path(path) and
2245        os.path.isfile(os.path.join(path, 'EGG-INFO', 'PKG-INFO'))
2246    )
2247
2248
2249def _set_parent_ns(packageName):
2250    parts = packageName.split('.')
2251    name = parts.pop()
2252    if parts:
2253        parent = '.'.join(parts)
2254        setattr(sys.modules[parent], name, sys.modules[packageName])
2255
2256
2257def yield_lines(strs):
2258    """Yield non-empty/non-comment lines of a string or sequence"""
2259    if isinstance(strs, six.string_types):
2260        for s in strs.splitlines():
2261            s = s.strip()
2262            # skip blank lines/comments
2263            if s and not s.startswith('#'):
2264                yield s
2265    else:
2266        for ss in strs:
2267            for s in yield_lines(ss):
2268                yield s
2269
2270
2271MODULE = re.compile(r"\w+(\.\w+)*$").match
2272EGG_NAME = re.compile(
2273    r"""
2274    (?P<name>[^-]+) (
2275        -(?P<ver>[^-]+) (
2276            -py(?P<pyver>[^-]+) (
2277                -(?P<plat>.+)
2278            )?
2279        )?
2280    )?
2281    """,
2282    re.VERBOSE | re.IGNORECASE,
2283).match
2284
2285
2286class EntryPoint(object):
2287    """Object representing an advertised importable object"""
2288
2289    def __init__(self, name, module_name, attrs=(), extras=(), dist=None):
2290        if not MODULE(module_name):
2291            raise ValueError("Invalid module name", module_name)
2292        self.name = name
2293        self.module_name = module_name
2294        self.attrs = tuple(attrs)
2295        self.extras = tuple(extras)
2296        self.dist = dist
2297
2298    def __str__(self):
2299        s = "%s = %s" % (self.name, self.module_name)
2300        if self.attrs:
2301            s += ':' + '.'.join(self.attrs)
2302        if self.extras:
2303            s += ' [%s]' % ','.join(self.extras)
2304        return s
2305
2306    def __repr__(self):
2307        return "EntryPoint.parse(%r)" % str(self)
2308
2309    def load(self, require=True, *args, **kwargs):
2310        """
2311        Require packages for this EntryPoint, then resolve it.
2312        """
2313        if not require or args or kwargs:
2314            warnings.warn(
2315                "Parameters to load are deprecated.  Call .resolve and "
2316                ".require separately.",
2317                DeprecationWarning,
2318                stacklevel=2,
2319            )
2320        if require:
2321            self.require(*args, **kwargs)
2322        return self.resolve()
2323
2324    def resolve(self):
2325        """
2326        Resolve the entry point from its module and attrs.
2327        """
2328        module = __import__(self.module_name, fromlist=['__name__'], level=0)
2329        try:
2330            return functools.reduce(getattr, self.attrs, module)
2331        except AttributeError as exc:
2332            raise ImportError(str(exc))
2333
2334    def require(self, env=None, installer=None):
2335        if self.extras and not self.dist:
2336            raise UnknownExtra("Can't require() without a distribution", self)
2337
2338        # Get the requirements for this entry point with all its extras and
2339        # then resolve them. We have to pass `extras` along when resolving so
2340        # that the working set knows what extras we want. Otherwise, for
2341        # dist-info distributions, the working set will assume that the
2342        # requirements for that extra are purely optional and skip over them.
2343        reqs = self.dist.requires(self.extras)
2344        items = working_set.resolve(reqs, env, installer, extras=self.extras)
2345        list(map(working_set.add, items))
2346
2347    pattern = re.compile(
2348        r'\s*'
2349        r'(?P<name>.+?)\s*'
2350        r'=\s*'
2351        r'(?P<module>[\w.]+)\s*'
2352        r'(:\s*(?P<attr>[\w.]+))?\s*'
2353        r'(?P<extras>\[.*\])?\s*$'
2354    )
2355
2356    @classmethod
2357    def parse(cls, src, dist=None):
2358        """Parse a single entry point from string `src`
2359
2360        Entry point syntax follows the form::
2361
2362            name = some.module:some.attr [extra1, extra2]
2363
2364        The entry name and module name are required, but the ``:attrs`` and
2365        ``[extras]`` parts are optional
2366        """
2367        m = cls.pattern.match(src)
2368        if not m:
2369            msg = "EntryPoint must be in 'name=module:attrs [extras]' format"
2370            raise ValueError(msg, src)
2371        res = m.groupdict()
2372        extras = cls._parse_extras(res['extras'])
2373        attrs = res['attr'].split('.') if res['attr'] else ()
2374        return cls(res['name'], res['module'], attrs, extras, dist)
2375
2376    @classmethod
2377    def _parse_extras(cls, extras_spec):
2378        if not extras_spec:
2379            return ()
2380        req = Requirement.parse('x' + extras_spec)
2381        if req.specs:
2382            raise ValueError()
2383        return req.extras
2384
2385    @classmethod
2386    def parse_group(cls, group, lines, dist=None):
2387        """Parse an entry point group"""
2388        if not MODULE(group):
2389            raise ValueError("Invalid group name", group)
2390        this = {}
2391        for line in yield_lines(lines):
2392            ep = cls.parse(line, dist)
2393            if ep.name in this:
2394                raise ValueError("Duplicate entry point", group, ep.name)
2395            this[ep.name] = ep
2396        return this
2397
2398    @classmethod
2399    def parse_map(cls, data, dist=None):
2400        """Parse a map of entry point groups"""
2401        if isinstance(data, dict):
2402            data = data.items()
2403        else:
2404            data = split_sections(data)
2405        maps = {}
2406        for group, lines in data:
2407            if group is None:
2408                if not lines:
2409                    continue
2410                raise ValueError("Entry points must be listed in groups")
2411            group = group.strip()
2412            if group in maps:
2413                raise ValueError("Duplicate group name", group)
2414            maps[group] = cls.parse_group(group, lines, dist)
2415        return maps
2416
2417
2418def _remove_md5_fragment(location):
2419    if not location:
2420        return ''
2421    parsed = urllib.parse.urlparse(location)
2422    if parsed[-1].startswith('md5='):
2423        return urllib.parse.urlunparse(parsed[:-1] + ('',))
2424    return location
2425
2426
2427def _version_from_file(lines):
2428    """
2429    Given an iterable of lines from a Metadata file, return
2430    the value of the Version field, if present, or None otherwise.
2431    """
2432    def is_version_line(line):
2433        return line.lower().startswith('version:')
2434    version_lines = filter(is_version_line, lines)
2435    line = next(iter(version_lines), '')
2436    _, _, value = line.partition(':')
2437    return safe_version(value.strip()) or None
2438
2439
2440class Distribution(object):
2441    """Wrap an actual or potential sys.path entry w/metadata"""
2442    PKG_INFO = 'PKG-INFO'
2443
2444    def __init__(
2445            self, location=None, metadata=None, project_name=None,
2446            version=None, py_version=PY_MAJOR, platform=None,
2447            precedence=EGG_DIST):
2448        self.project_name = safe_name(project_name or 'Unknown')
2449        if version is not None:
2450            self._version = safe_version(version)
2451        self.py_version = py_version
2452        self.platform = platform
2453        self.location = location
2454        self.precedence = precedence
2455        self._provider = metadata or empty_provider
2456
2457    @classmethod
2458    def from_location(cls, location, basename, metadata=None, **kw):
2459        project_name, version, py_version, platform = [None] * 4
2460        basename, ext = os.path.splitext(basename)
2461        if ext.lower() in _distributionImpl:
2462            cls = _distributionImpl[ext.lower()]
2463
2464            match = EGG_NAME(basename)
2465            if match:
2466                project_name, version, py_version, platform = match.group(
2467                    'name', 'ver', 'pyver', 'plat'
2468                )
2469        return cls(
2470            location, metadata, project_name=project_name, version=version,
2471            py_version=py_version, platform=platform, **kw
2472        )._reload_version()
2473
2474    def _reload_version(self):
2475        return self
2476
2477    @property
2478    def hashcmp(self):
2479        return (
2480            self.parsed_version,
2481            self.precedence,
2482            self.key,
2483            _remove_md5_fragment(self.location),
2484            self.py_version or '',
2485            self.platform or '',
2486        )
2487
2488    def __hash__(self):
2489        return hash(self.hashcmp)
2490
2491    def __lt__(self, other):
2492        return self.hashcmp < other.hashcmp
2493
2494    def __le__(self, other):
2495        return self.hashcmp <= other.hashcmp
2496
2497    def __gt__(self, other):
2498        return self.hashcmp > other.hashcmp
2499
2500    def __ge__(self, other):
2501        return self.hashcmp >= other.hashcmp
2502
2503    def __eq__(self, other):
2504        if not isinstance(other, self.__class__):
2505            # It's not a Distribution, so they are not equal
2506            return False
2507        return self.hashcmp == other.hashcmp
2508
2509    def __ne__(self, other):
2510        return not self == other
2511
2512    # These properties have to be lazy so that we don't have to load any
2513    # metadata until/unless it's actually needed.  (i.e., some distributions
2514    # may not know their name or version without loading PKG-INFO)
2515
2516    @property
2517    def key(self):
2518        try:
2519            return self._key
2520        except AttributeError:
2521            self._key = key = self.project_name.lower()
2522            return key
2523
2524    @property
2525    def parsed_version(self):
2526        if not hasattr(self, "_parsed_version"):
2527            self._parsed_version = parse_version(self.version)
2528
2529        return self._parsed_version
2530
2531    def _warn_legacy_version(self):
2532        LV = packaging.version.LegacyVersion
2533        is_legacy = isinstance(self._parsed_version, LV)
2534        if not is_legacy:
2535            return
2536
2537        # While an empty version is technically a legacy version and
2538        # is not a valid PEP 440 version, it's also unlikely to
2539        # actually come from someone and instead it is more likely that
2540        # it comes from setuptools attempting to parse a filename and
2541        # including it in the list. So for that we'll gate this warning
2542        # on if the version is anything at all or not.
2543        if not self.version:
2544            return
2545
2546        tmpl = textwrap.dedent("""
2547            '{project_name} ({version})' is being parsed as a legacy,
2548            non PEP 440,
2549            version. You may find odd behavior and sort order.
2550            In particular it will be sorted as less than 0.0. It
2551            is recommended to migrate to PEP 440 compatible
2552            versions.
2553            """).strip().replace('\n', ' ')
2554
2555        warnings.warn(tmpl.format(**vars(self)), PEP440Warning)
2556
2557    @property
2558    def version(self):
2559        try:
2560            return self._version
2561        except AttributeError:
2562            version = _version_from_file(self._get_metadata(self.PKG_INFO))
2563            if version is None:
2564                tmpl = "Missing 'Version:' header and/or %s file"
2565                raise ValueError(tmpl % self.PKG_INFO, self)
2566            return version
2567
2568    @property
2569    def _dep_map(self):
2570        """
2571        A map of extra to its list of (direct) requirements
2572        for this distribution, including the null extra.
2573        """
2574        try:
2575            return self.__dep_map
2576        except AttributeError:
2577            self.__dep_map = self._filter_extras(self._build_dep_map())
2578        return self.__dep_map
2579
2580    @staticmethod
2581    def _filter_extras(dm):
2582        """
2583        Given a mapping of extras to dependencies, strip off
2584        environment markers and filter out any dependencies
2585        not matching the markers.
2586        """
2587        for extra in list(filter(None, dm)):
2588            new_extra = extra
2589            reqs = dm.pop(extra)
2590            new_extra, _, marker = extra.partition(':')
2591            fails_marker = marker and (
2592                invalid_marker(marker)
2593                or not evaluate_marker(marker)
2594            )
2595            if fails_marker:
2596                reqs = []
2597            new_extra = safe_extra(new_extra) or None
2598
2599            dm.setdefault(new_extra, []).extend(reqs)
2600        return dm
2601
2602    def _build_dep_map(self):
2603        dm = {}
2604        for name in 'requires.txt', 'depends.txt':
2605            for extra, reqs in split_sections(self._get_metadata(name)):
2606                dm.setdefault(extra, []).extend(parse_requirements(reqs))
2607        return dm
2608
2609    def requires(self, extras=()):
2610        """List of Requirements needed for this distro if `extras` are used"""
2611        dm = self._dep_map
2612        deps = []
2613        deps.extend(dm.get(None, ()))
2614        for ext in extras:
2615            try:
2616                deps.extend(dm[safe_extra(ext)])
2617            except KeyError:
2618                raise UnknownExtra(
2619                    "%s has no such extra feature %r" % (self, ext)
2620                )
2621        return deps
2622
2623    def _get_metadata(self, name):
2624        if self.has_metadata(name):
2625            for line in self.get_metadata_lines(name):
2626                yield line
2627
2628    def activate(self, path=None, replace=False):
2629        """Ensure distribution is importable on `path` (default=sys.path)"""
2630        if path is None:
2631            path = sys.path
2632        self.insert_on(path, replace=replace)
2633        if path is sys.path:
2634            fixup_namespace_packages(self.location)
2635            for pkg in self._get_metadata('namespace_packages.txt'):
2636                if pkg in sys.modules:
2637                    declare_namespace(pkg)
2638
2639    def egg_name(self):
2640        """Return what this distribution's standard .egg filename should be"""
2641        filename = "%s-%s-py%s" % (
2642            to_filename(self.project_name), to_filename(self.version),
2643            self.py_version or PY_MAJOR
2644        )
2645
2646        if self.platform:
2647            filename += '-' + self.platform
2648        return filename
2649
2650    def __repr__(self):
2651        if self.location:
2652            return "%s (%s)" % (self, self.location)
2653        else:
2654            return str(self)
2655
2656    def __str__(self):
2657        try:
2658            version = getattr(self, 'version', None)
2659        except ValueError:
2660            version = None
2661        version = version or "[unknown version]"
2662        return "%s %s" % (self.project_name, version)
2663
2664    def __getattr__(self, attr):
2665        """Delegate all unrecognized public attributes to .metadata provider"""
2666        if attr.startswith('_'):
2667            raise AttributeError(attr)
2668        return getattr(self._provider, attr)
2669
2670    @classmethod
2671    def from_filename(cls, filename, metadata=None, **kw):
2672        return cls.from_location(
2673            _normalize_cached(filename), os.path.basename(filename), metadata,
2674            **kw
2675        )
2676
2677    def as_requirement(self):
2678        """Return a ``Requirement`` that matches this distribution exactly"""
2679        if isinstance(self.parsed_version, packaging.version.Version):
2680            spec = "%s==%s" % (self.project_name, self.parsed_version)
2681        else:
2682            spec = "%s===%s" % (self.project_name, self.parsed_version)
2683
2684        return Requirement.parse(spec)
2685
2686    def load_entry_point(self, group, name):
2687        """Return the `name` entry point of `group` or raise ImportError"""
2688        ep = self.get_entry_info(group, name)
2689        if ep is None:
2690            raise ImportError("Entry point %r not found" % ((group, name),))
2691        return ep.load()
2692
2693    def get_entry_map(self, group=None):
2694        """Return the entry point map for `group`, or the full entry map"""
2695        try:
2696            ep_map = self._ep_map
2697        except AttributeError:
2698            ep_map = self._ep_map = EntryPoint.parse_map(
2699                self._get_metadata('entry_points.txt'), self
2700            )
2701        if group is not None:
2702            return ep_map.get(group, {})
2703        return ep_map
2704
2705    def get_entry_info(self, group, name):
2706        """Return the EntryPoint object for `group`+`name`, or ``None``"""
2707        return self.get_entry_map(group).get(name)
2708
2709    def insert_on(self, path, loc=None, replace=False):
2710        """Ensure self.location is on path
2711
2712        If replace=False (default):
2713            - If location is already in path anywhere, do nothing.
2714            - Else:
2715              - If it's an egg and its parent directory is on path,
2716                insert just ahead of the parent.
2717              - Else: add to the end of path.
2718        If replace=True:
2719            - If location is already on path anywhere (not eggs)
2720              or higher priority than its parent (eggs)
2721              do nothing.
2722            - Else:
2723              - If it's an egg and its parent directory is on path,
2724                insert just ahead of the parent,
2725                removing any lower-priority entries.
2726              - Else: add it to the front of path.
2727        """
2728
2729        loc = loc or self.location
2730        if not loc:
2731            return
2732
2733        nloc = _normalize_cached(loc)
2734        bdir = os.path.dirname(nloc)
2735        npath = [(p and _normalize_cached(p) or p) for p in path]
2736
2737        for p, item in enumerate(npath):
2738            if item == nloc:
2739                if replace:
2740                    break
2741                else:
2742                    # don't modify path (even removing duplicates) if
2743                    # found and not replace
2744                    return
2745            elif item == bdir and self.precedence == EGG_DIST:
2746                # if it's an .egg, give it precedence over its directory
2747                # UNLESS it's already been added to sys.path and replace=False
2748                if (not replace) and nloc in npath[p:]:
2749                    return
2750                if path is sys.path:
2751                    self.check_version_conflict()
2752                path.insert(p, loc)
2753                npath.insert(p, nloc)
2754                break
2755        else:
2756            if path is sys.path:
2757                self.check_version_conflict()
2758            if replace:
2759                path.insert(0, loc)
2760            else:
2761                path.append(loc)
2762            return
2763
2764        # p is the spot where we found or inserted loc; now remove duplicates
2765        while True:
2766            try:
2767                np = npath.index(nloc, p + 1)
2768            except ValueError:
2769                break
2770            else:
2771                del npath[np], path[np]
2772                # ha!
2773                p = np
2774
2775        return
2776
2777    def check_version_conflict(self):
2778        if self.key == 'setuptools':
2779            # ignore the inevitable setuptools self-conflicts  :(
2780            return
2781
2782        nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt'))
2783        loc = normalize_path(self.location)
2784        for modname in self._get_metadata('top_level.txt'):
2785            if (modname not in sys.modules or modname in nsp
2786                    or modname in _namespace_packages):
2787                continue
2788            if modname in ('pkg_resources', 'setuptools', 'site'):
2789                continue
2790            fn = getattr(sys.modules[modname], '__file__', None)
2791            if fn and (normalize_path(fn).startswith(loc) or
2792                       fn.startswith(self.location)):
2793                continue
2794            issue_warning(
2795                "Module %s was already imported from %s, but %s is being added"
2796                " to sys.path" % (modname, fn, self.location),
2797            )
2798
2799    def has_version(self):
2800        try:
2801            self.version
2802        except ValueError:
2803            issue_warning("Unbuilt egg for " + repr(self))
2804            return False
2805        return True
2806
2807    def clone(self, **kw):
2808        """Copy this distribution, substituting in any changed keyword args"""
2809        names = 'project_name version py_version platform location precedence'
2810        for attr in names.split():
2811            kw.setdefault(attr, getattr(self, attr, None))
2812        kw.setdefault('metadata', self._provider)
2813        return self.__class__(**kw)
2814
2815    @property
2816    def extras(self):
2817        return [dep for dep in self._dep_map if dep]
2818
2819
2820class EggInfoDistribution(Distribution):
2821    def _reload_version(self):
2822        """
2823        Packages installed by distutils (e.g. numpy or scipy),
2824        which uses an old safe_version, and so
2825        their version numbers can get mangled when
2826        converted to filenames (e.g., 1.11.0.dev0+2329eae to
2827        1.11.0.dev0_2329eae). These distributions will not be
2828        parsed properly
2829        downstream by Distribution and safe_version, so
2830        take an extra step and try to get the version number from
2831        the metadata file itself instead of the filename.
2832        """
2833        md_version = _version_from_file(self._get_metadata(self.PKG_INFO))
2834        if md_version:
2835            self._version = md_version
2836        return self
2837
2838
2839class DistInfoDistribution(Distribution):
2840    """
2841    Wrap an actual or potential sys.path entry
2842    w/metadata, .dist-info style.
2843    """
2844    PKG_INFO = 'METADATA'
2845    EQEQ = re.compile(r"([\(,])\s*(\d.*?)\s*([,\)])")
2846
2847    @property
2848    def _parsed_pkg_info(self):
2849        """Parse and cache metadata"""
2850        try:
2851            return self._pkg_info
2852        except AttributeError:
2853            metadata = self.get_metadata(self.PKG_INFO)
2854            self._pkg_info = email.parser.Parser().parsestr(metadata)
2855            return self._pkg_info
2856
2857    @property
2858    def _dep_map(self):
2859        try:
2860            return self.__dep_map
2861        except AttributeError:
2862            self.__dep_map = self._compute_dependencies()
2863            return self.__dep_map
2864
2865    def _compute_dependencies(self):
2866        """Recompute this distribution's dependencies."""
2867        dm = self.__dep_map = {None: []}
2868
2869        reqs = []
2870        # Including any condition expressions
2871        for req in self._parsed_pkg_info.get_all('Requires-Dist') or []:
2872            reqs.extend(parse_requirements(req))
2873
2874        def reqs_for_extra(extra):
2875            for req in reqs:
2876                if not req.marker or req.marker.evaluate({'extra': extra}):
2877                    yield req
2878
2879        common = frozenset(reqs_for_extra(None))
2880        dm[None].extend(common)
2881
2882        for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []:
2883            s_extra = safe_extra(extra.strip())
2884            dm[s_extra] = list(frozenset(reqs_for_extra(extra)) - common)
2885
2886        return dm
2887
2888
2889_distributionImpl = {
2890    '.egg': Distribution,
2891    '.egg-info': EggInfoDistribution,
2892    '.dist-info': DistInfoDistribution,
2893}
2894
2895
2896def issue_warning(*args, **kw):
2897    level = 1
2898    g = globals()
2899    try:
2900        # find the first stack frame that is *not* code in
2901        # the pkg_resources module, to use for the warning
2902        while sys._getframe(level).f_globals is g:
2903            level += 1
2904    except ValueError:
2905        pass
2906    warnings.warn(stacklevel=level + 1, *args, **kw)
2907
2908
2909class RequirementParseError(ValueError):
2910    def __str__(self):
2911        return ' '.join(self.args)
2912
2913
2914def parse_requirements(strs):
2915    """Yield ``Requirement`` objects for each specification in `strs`
2916
2917    `strs` must be a string, or a (possibly-nested) iterable thereof.
2918    """
2919    # create a steppable iterator, so we can handle \-continuations
2920    lines = iter(yield_lines(strs))
2921
2922    for line in lines:
2923        # Drop comments -- a hash without a space may be in a URL.
2924        if ' #' in line:
2925            line = line[:line.find(' #')]
2926        # If there is a line continuation, drop it, and append the next line.
2927        if line.endswith('\\'):
2928            line = line[:-2].strip()
2929            try:
2930                line += next(lines)
2931            except StopIteration:
2932                return
2933        yield Requirement(line)
2934
2935
2936class Requirement(packaging.requirements.Requirement):
2937    def __init__(self, requirement_string):
2938        """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!"""
2939        try:
2940            super(Requirement, self).__init__(requirement_string)
2941        except packaging.requirements.InvalidRequirement as e:
2942            raise RequirementParseError(str(e))
2943        self.unsafe_name = self.name
2944        project_name = safe_name(self.name)
2945        self.project_name, self.key = project_name, project_name.lower()
2946        self.specs = [
2947            (spec.operator, spec.version) for spec in self.specifier]
2948        self.extras = tuple(map(safe_extra, self.extras))
2949        self.hashCmp = (
2950            self.key,
2951            self.specifier,
2952            frozenset(self.extras),
2953            str(self.marker) if self.marker else None,
2954        )
2955        self.__hash = hash(self.hashCmp)
2956
2957    def __eq__(self, other):
2958        return (
2959            isinstance(other, Requirement) and
2960            self.hashCmp == other.hashCmp
2961        )
2962
2963    def __ne__(self, other):
2964        return not self == other
2965
2966    def __contains__(self, item):
2967        if isinstance(item, Distribution):
2968            if item.key != self.key:
2969                return False
2970
2971            item = item.version
2972
2973        # Allow prereleases always in order to match the previous behavior of
2974        # this method. In the future this should be smarter and follow PEP 440
2975        # more accurately.
2976        return self.specifier.contains(item, prereleases=True)
2977
2978    def __hash__(self):
2979        return self.__hash
2980
2981    def __repr__(self):
2982        return "Requirement.parse(%r)" % str(self)
2983
2984    @staticmethod
2985    def parse(s):
2986        req, = parse_requirements(s)
2987        return req
2988
2989
2990def _always_object(classes):
2991    """
2992    Ensure object appears in the mro even
2993    for old-style classes.
2994    """
2995    if object not in classes:
2996        return classes + (object,)
2997    return classes
2998
2999
3000def _find_adapter(registry, ob):
3001    """Return an adapter factory for `ob` from `registry`"""
3002    types = _always_object(inspect.getmro(getattr(ob, '__class__', type(ob))))
3003    for t in types:
3004        if t in registry:
3005            return registry[t]
3006
3007
3008def ensure_directory(path):
3009    """Ensure that the parent directory of `path` exists"""
3010    dirname = os.path.dirname(path)
3011    py31compat.makedirs(dirname, exist_ok=True)
3012
3013
3014def _bypass_ensure_directory(path):
3015    """Sandbox-bypassing version of ensure_directory()"""
3016    if not WRITE_SUPPORT:
3017        raise IOError('"os.mkdir" not supported on this platform.')
3018    dirname, filename = split(path)
3019    if dirname and filename and not isdir(dirname):
3020        _bypass_ensure_directory(dirname)
3021        mkdir(dirname, 0o755)
3022
3023
3024def split_sections(s):
3025    """Split a string or iterable thereof into (section, content) pairs
3026
3027    Each ``section`` is a stripped version of the section header ("[section]")
3028    and each ``content`` is a list of stripped lines excluding blank lines and
3029    comment-only lines.  If there are any such lines before the first section
3030    header, they're returned in a first ``section`` of ``None``.
3031    """
3032    section = None
3033    content = []
3034    for line in yield_lines(s):
3035        if line.startswith("["):
3036            if line.endswith("]"):
3037                if section or content:
3038                    yield section, content
3039                section = line[1:-1].strip()
3040                content = []
3041            else:
3042                raise ValueError("Invalid section heading", line)
3043        else:
3044            content.append(line)
3045
3046    # wrap up last segment
3047    yield section, content
3048
3049
3050def _mkstemp(*args, **kw):
3051    old_open = os.open
3052    try:
3053        # temporarily bypass sandboxing
3054        os.open = os_open
3055        return tempfile.mkstemp(*args, **kw)
3056    finally:
3057        # and then put it back
3058        os.open = old_open
3059
3060
3061# Silence the PEP440Warning by default, so that end users don't get hit by it
3062# randomly just because they use pkg_resources. We want to append the rule
3063# because we want earlier uses of filterwarnings to take precedence over this
3064# one.
3065warnings.filterwarnings("ignore", category=PEP440Warning, append=True)
3066
3067
3068# from jaraco.functools 1.3
3069def _call_aside(f, *args, **kwargs):
3070    f(*args, **kwargs)
3071    return f
3072
3073
3074@_call_aside
3075def _initialize(g=globals()):
3076    "Set up global resource manager (deliberately not state-saved)"
3077    manager = ResourceManager()
3078    g['_manager'] = manager
3079    g.update(
3080        (name, getattr(manager, name))
3081        for name in dir(manager)
3082        if not name.startswith('_')
3083    )
3084
3085
3086@_call_aside
3087def _initialize_master_working_set():
3088    """
3089    Prepare the master working set and make the ``require()``
3090    API available.
3091
3092    This function has explicit effects on the global state
3093    of pkg_resources. It is intended to be invoked once at
3094    the initialization of this module.
3095
3096    Invocation by other packages is unsupported and done
3097    at their own risk.
3098    """
3099    working_set = WorkingSet._build_master()
3100    _declare_state('object', working_set=working_set)
3101
3102    require = working_set.require
3103    iter_entry_points = working_set.iter_entry_points
3104    add_activation_listener = working_set.subscribe
3105    run_script = working_set.run_script
3106    # backward compatibility
3107    run_main = run_script
3108    # Activate all distributions already on sys.path with replace=False and
3109    # ensure that all distributions added to the working set in the future
3110    # (e.g. by calling ``require()``) will get activated as well,
3111    # with higher priority (replace=True).
3112    tuple(
3113        dist.activate(replace=False)
3114        for dist in working_set
3115    )
3116    add_activation_listener(
3117        lambda dist: dist.activate(replace=True),
3118        existing=False,
3119    )
3120    working_set.entries = []
3121    # match order
3122    list(map(working_set.add_entry, sys.path))
3123    globals().update(locals())
3124