1 """Package resource API
4 A resource is a logical file contained within a package, or a logical
5 subdirectory thereof. The package resource API expects resource names
6 to have their path parts separated with ``/``, *not* whatever the local
7 path separator is. Do not use os.path operations to manipulate resource
8 names being passed into the API.
10 The package resource API is designed to work with normal filesystem packages,
11 .egg files, and unpacked .egg files. It can also work in a limited way with
12 .zip files and with custom PEP 302 loaders that support the ``get_data()``
16 import sys, os, zipimport, time, re, imp
21 from sets import ImmutableSet as frozenset
23 # capture these to bypass sandboxing
24 from os import utime, rename, unlink, mkdir
25 from os import open as os_open
26 from os.path import isdir, split
28 from distutils import log
30 def _bypass_ensure_directory(name, mode=0777):
31 # Sandbox-bypassing version of ensure_directory()
32 dirname, filename = split(name)
33 if dirname and filename and not isdir(dirname):
34 _bypass_ensure_directory(dirname)
46 def _declare_state(vartype, **kw):
48 for name, val in kw.iteritems():
50 _state_vars[name] = vartype
55 for k, v in _state_vars.iteritems():
56 state[k] = g['_sget_'+v](g[k])
59 def __setstate__(state):
61 for k, v in state.iteritems():
62 g['_sset_'+_state_vars[k]](k, g[k], v)
68 def _sset_dict(key, ob, state):
72 def _sget_object(val):
73 return val.__getstate__()
75 def _sset_object(key, ob, state):
76 ob.__setstate__(state)
78 _sget_none = _sset_none = lambda *args: None
85 def get_supported_platform():
86 """Return this platform's maximum compatible version.
88 distutils.util.get_platform() normally reports the minimum version
89 of Mac OS X that would be required to *use* extensions produced by
90 distutils. But what we want when checking compatibility is to know the
91 version of Mac OS X that we are *running*. To allow usage of packages that
92 explicitly require a newer version of Mac OS X, we must also know the
93 current version of the OS.
95 If this condition occurs for any other platform with a version in its
96 platform strings, this function should be extended accordingly.
98 plat = get_build_platform(); m = macosVersionString.match(plat)
99 if m is not None and sys.platform == "darwin":
101 plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3))
127 # Basic resource access and distribution/entry point discovery
128 'require', 'run_script', 'get_provider', 'get_distribution',
129 'load_entry_point', 'get_entry_map', 'get_entry_info', 'iter_entry_points',
130 'resource_string', 'resource_stream', 'resource_filename',
131 'resource_listdir', 'resource_exists', 'resource_isdir',
133 # Environmental control
134 'declare_namespace', 'working_set', 'add_activation_listener',
135 'find_distributions', 'set_extraction_path', 'cleanup_resources',
138 # Primary implementation classes
139 'Environment', 'WorkingSet', 'ResourceManager',
140 'Distribution', 'Requirement', 'EntryPoint',
143 'ResolutionError','VersionConflict','DistributionNotFound','UnknownExtra',
146 # Parsing functions and string utilities
147 'parse_requirements', 'parse_version', 'safe_name', 'safe_version',
148 'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections',
149 'safe_extra', 'to_filename',
151 # filesystem utilities
152 'ensure_directory', 'normalize_path',
154 # Distribution "precedence" constants
155 'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST',
157 # "Provider" interfaces, implementations, and registration/lookup APIs
158 'IMetadataProvider', 'IResourceProvider', 'FileMetadata',
159 'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider',
160 'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider',
161 'register_finder', 'register_namespace_handler', 'register_loader_type',
162 'fixup_namespace_packages', 'get_importer',
164 # Deprecated/backward compatibility only
165 'run_main', 'AvailableDistributions',
167 class ResolutionError(Exception):
168 """Abstract base for dependency resolution errors"""
170 return self.__class__.__name__+repr(self.args)
172 class VersionConflict(ResolutionError):
173 """An already-installed version conflicts with the requested version"""
175 class DistributionNotFound(ResolutionError):
176 """A requested distribution was not found"""
178 class UnknownExtra(ResolutionError):
179 """Distribution doesn't have an "extra feature" of the given name"""
181 _provider_factories = {}
182 PY_MAJOR = sys.version[:3]
189 def register_loader_type(loader_type, provider_factory):
190 """Register `provider_factory` to make providers for `loader_type`
192 `loader_type` is the type or class of a PEP 302 ``module.__loader__``,
193 and `provider_factory` is a function that, passed a *module* object,
194 returns an ``IResourceProvider`` for that module.
196 _provider_factories[loader_type] = provider_factory
198 def get_provider(moduleOrReq):
199 """Return an IResourceProvider for the named module or requirement"""
200 if isinstance(moduleOrReq,Requirement):
201 return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0]
203 module = sys.modules[moduleOrReq]
205 __import__(moduleOrReq)
206 module = sys.modules[moduleOrReq]
207 loader = getattr(module, '__loader__', None)
208 return _find_adapter(_provider_factories, loader)(module)
210 def _macosx_vers(_cache=[]):
212 from platform import mac_ver
213 _cache.append(mac_ver()[0].split('.'))
216 def _macosx_arch(machine):
217 return {'PowerPC':'ppc', 'Power_Macintosh':'ppc'}.get(machine,machine)
219 def get_build_platform():
220 """Return this platform's string for platform-specific distributions
222 XXX Currently this is the same as ``distutils.util.get_platform()``, but it
223 needs some hacks for Linux and Mac OS X.
225 from distutils.util import get_platform
226 plat = get_platform()
227 if sys.platform == "darwin" and not plat.startswith('macosx-'):
229 version = _macosx_vers()
230 machine = os.uname()[4].replace(" ", "_")
231 return "macosx-%d.%d-%s" % (int(version[0]), int(version[1]),
232 _macosx_arch(machine))
234 # if someone is running a non-Mac darwin system, this will fall
235 # through to the default implementation
239 macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)")
240 darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)")
241 get_platform = get_build_platform # XXX backward compat
249 def compatible_platforms(provided,required):
250 """Can code for the `provided` platform run on the `required` platform?
252 Returns true if either platform is ``None``, or the platforms are equal.
254 XXX Needs compatibility checks for Linux and other unixy OSes.
256 if provided is None or required is None or provided==required:
257 return True # easy case
259 # Mac OS X special cases
260 reqMac = macosVersionString.match(required)
262 provMac = macosVersionString.match(provided)
264 # is this a Mac package?
266 # this is backwards compatibility for packages built before
267 # setuptools 0.6. All packages built after this point will
268 # use the new macosx designation.
269 provDarwin = darwinVersionString.match(provided)
271 dversion = int(provDarwin.group(1))
272 macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2))
273 if dversion == 7 and macosversion >= "10.3" or \
274 dversion == 8 and macosversion >= "10.4":
277 #warnings.warn("Mac eggs should be rebuilt to "
278 # "use the macosx designation instead of darwin.",
279 # category=DeprecationWarning)
281 return False # egg isn't macosx or legacy darwin
283 # are they the same major version and machine type?
284 if provMac.group(1) != reqMac.group(1) or \
285 provMac.group(3) != reqMac.group(3):
290 # is the required OS major update >= the provided one?
291 if int(provMac.group(2)) > int(reqMac.group(2)):
296 # XXX Linux and other platforms' special cases should go here
300 def run_script(dist_spec, script_name):
301 """Locate distribution `dist_spec` and run its `script_name` script"""
302 ns = sys._getframe(1).f_globals
303 name = ns['__name__']
305 ns['__name__'] = name
306 require(dist_spec)[0].run_script(script_name, ns)
308 run_main = run_script # backward compatibility
310 def get_distribution(dist):
311 """Return a current distribution object for a Requirement or string"""
312 if isinstance(dist,basestring): dist = Requirement.parse(dist)
313 if isinstance(dist,Requirement): dist = get_provider(dist)
314 if not isinstance(dist,Distribution):
315 raise TypeError("Expected string, Requirement, or Distribution", dist)
318 def load_entry_point(dist, group, name):
319 """Return `name` entry point of `group` for `dist` or raise ImportError"""
320 return get_distribution(dist).load_entry_point(group, name)
322 def get_entry_map(dist, group=None):
323 """Return the entry point map for `group`, or the full entry map"""
324 return get_distribution(dist).get_entry_map(group)
326 def get_entry_info(dist, group, name):
327 """Return the EntryPoint object for `group`+`name`, or ``None``"""
328 return get_distribution(dist).get_entry_info(group, name)
331 class IMetadataProvider:
333 def has_metadata(name):
334 """Does the package's distribution contain the named metadata?"""
336 def get_metadata(name):
337 """The named metadata resource as a string"""
339 def get_metadata_lines(name):
340 """Yield named metadata resource as list of non-blank non-comment lines
342 Leading and trailing whitespace is stripped from each line, and lines
343 with ``#`` as the first non-blank character are omitted."""
345 def metadata_isdir(name):
346 """Is the named metadata a directory? (like ``os.path.isdir()``)"""
348 def metadata_listdir(name):
349 """List of metadata names in the directory (like ``os.listdir()``)"""
351 def run_script(script_name, namespace):
352 """Execute the named script in the supplied namespace dictionary"""
372 class IResourceProvider(IMetadataProvider):
373 """An object that provides access to package resources"""
375 def get_resource_filename(manager, resource_name):
376 """Return a true filesystem path for `resource_name`
378 `manager` must be an ``IResourceManager``"""
380 def get_resource_stream(manager, resource_name):
381 """Return a readable file-like object for `resource_name`
383 `manager` must be an ``IResourceManager``"""
385 def get_resource_string(manager, resource_name):
386 """Return a string containing the contents of `resource_name`
388 `manager` must be an ``IResourceManager``"""
390 def has_resource(resource_name):
391 """Does the package contain the named resource?"""
393 def resource_isdir(resource_name):
394 """Is the named resource a directory? (like ``os.path.isdir()``)"""
396 def resource_listdir(resource_name):
397 """List of resource names in the directory (like ``os.listdir()``)"""
413 class WorkingSet(object):
414 """A collection of active distributions on sys.path (or a similar list)"""
416 def __init__(self, entries=None):
417 """Create working set from list of path entries (default=sys.path)"""
426 for entry in entries:
427 self.add_entry(entry)
430 def add_entry(self, entry):
431 """Add a path item to ``.entries``, finding any distributions on it
433 ``find_distributions(entry, True)`` is used to find distributions
434 corresponding to the path entry, and they are added. `entry` is
435 always appended to ``.entries``, even if it is already present.
436 (This is because ``sys.path`` can contain the same value more than
437 once, and the ``.entries`` of the ``sys.path`` WorkingSet should always
440 self.entry_keys.setdefault(entry, [])
441 self.entries.append(entry)
442 for dist in find_distributions(entry, True):
443 self.add(dist, entry, False)
446 def __contains__(self,dist):
447 """True if `dist` is the active distribution for its project"""
448 return self.by_key.get(dist.key) == dist
455 """Find a distribution matching requirement `req`
457 If there is an active distribution for the requested project, this
458 returns it as long as it meets the version requirement specified by
459 `req`. But, if there is an active distribution for the project and it
460 does *not* meet the `req` requirement, ``VersionConflict`` is raised.
461 If there is no active distribution for the requested project, ``None``
464 dist = self.by_key.get(req.key)
465 if dist is not None and dist not in req:
466 raise VersionConflict(dist,req) # XXX add more info
470 def iter_entry_points(self, group, name=None):
471 """Yield entry point objects from `group` matching `name`
473 If `name` is None, yields all entry points in `group` from all
474 distributions in the working set, otherwise only ones matching
475 both `group` and `name` are yielded (in distribution order).
478 entries = dist.get_entry_map(group)
480 for ep in entries.values():
482 elif name in entries:
485 def run_script(self, requires, script_name):
486 """Locate distribution for `requires` and run `script_name` script"""
487 ns = sys._getframe(1).f_globals
488 name = ns['__name__']
490 ns['__name__'] = name
491 self.require(requires)[0].run_script(script_name, ns)
496 """Yield distributions for non-duplicate projects in the working set
498 The yield order is the order in which the items' path entries were
499 added to the working set.
502 for item in self.entries:
503 for key in self.entry_keys[item]:
506 yield self.by_key[key]
508 def add(self, dist, entry=None, insert=True):
509 """Add `dist` to working set, associated with `entry`
511 If `entry` is unspecified, it defaults to the ``.location`` of `dist`.
512 On exit from this routine, `entry` is added to the end of the working
513 set's ``.entries`` (if it wasn't already present).
515 `dist` is only added to the working set if it's for a project that
516 doesn't already have a distribution in the set. If it's added, any
517 callbacks registered with the ``subscribe()`` method will be called.
520 dist.insert_on(self.entries, entry)
523 entry = dist.location
524 keys = self.entry_keys.setdefault(entry,[])
525 keys2 = self.entry_keys.setdefault(dist.location,[])
526 if dist.key in self.by_key:
527 return # ignore hidden distros
529 # If we have a __requires__ then we can already tell if this
530 # dist is unsatisfactory, in which case we won't add it.
531 if __requires__ is not None:
532 for thisreqstr in __requires__:
534 for thisreq in parse_requirements(thisreqstr):
535 if thisreq.key == dist.key:
536 if dist not in thisreq:
538 except ValueError, e:
539 e.args = tuple(e.args + ({'thisreqstr': thisreqstr},))
542 self.by_key[dist.key] = dist
543 if dist.key not in keys:
544 keys.append(dist.key)
545 if dist.key not in keys2:
546 keys2.append(dist.key)
547 self._added_new(dist)
549 def resolve(self, requirements, env=None, installer=None):
550 """List all distributions needed to (recursively) meet `requirements`
552 `requirements` must be a sequence of ``Requirement`` objects. `env`,
553 if supplied, should be an ``Environment`` instance. If
554 not supplied, it defaults to all distributions available within any
555 entry or distribution in the working set. `installer`, if supplied,
556 will be invoked with each requirement that cannot be met by an
557 already-installed distribution; it should return a ``Distribution`` or
561 requirements = list(requirements)[::-1] # set up the stack
562 processed = {} # set of processed requirements
563 best = {} # key -> dist
567 req = requirements.pop(0) # process dependencies breadth-first
569 # Ignore cyclic or redundant dependencies
570 log.info("\nAlready processed %s", req)
572 log.info("\nNeed %s", req)
573 dist = best.get(req.key)
575 # Find the best distribution and add it to the map
576 dist = self.by_key.get(req.key)
579 env = Environment(self.entries)
580 dist = best[req.key] = env.best_match(req, self, installer)
582 raise DistributionNotFound(req) # XXX put more info here
583 log.info(" found %r", dist)
584 to_activate.append(dist)
586 # Oops, the "best" so far conflicts with a dependency
587 raise VersionConflict(dist,req) # XXX put more info here
588 to_add = dist.requires(req.extras)[::-1]
590 log.info(" no subdependencies to add")
591 elif len(to_add) == 1:
592 log.info(" adding subdependency %s", "; ".join(map(str, to_add)))
594 log.info(" adding subdependencies %s", "; ".join(map(str, to_add)))
595 requirements.extend(to_add)
596 processed[req] = True
598 return to_activate # return list of distros to activate
600 def find_plugins(self,
601 plugin_env, full_env=None, installer=None, fallback=True
603 """Find all activatable distributions in `plugin_env`
607 distributions, errors = working_set.find_plugins(
608 Environment(plugin_dirlist)
610 map(working_set.add, distributions) # add plugins+libs to sys.path
611 print "Couldn't load", errors # display errors
613 The `plugin_env` should be an ``Environment`` instance that contains
614 only distributions that are in the project's "plugin directory" or
615 directories. The `full_env`, if supplied, should be an ``Environment``
616 contains all currently-available distributions. If `full_env` is not
617 supplied, one is created automatically from the ``WorkingSet`` this
618 method is called on, which will typically mean that every directory on
619 ``sys.path`` will be scanned for distributions.
621 `installer` is a standard installer callback as used by the
622 ``resolve()`` method. The `fallback` flag indicates whether we should
623 attempt to resolve older versions of a plugin if the newest version
626 This method returns a 2-tuple: (`distributions`, `error_info`), where
627 `distributions` is a list of the distributions found in `plugin_env`
628 that were loadable, along with any other distributions that are needed
629 to resolve their dependencies. `error_info` is a dictionary mapping
630 unloadable plugin distributions to an exception instance describing the
631 error that occurred. Usually this will be a ``DistributionNotFound`` or
632 ``VersionConflict`` instance.
635 plugin_projects = list(plugin_env)
636 plugin_projects.sort() # scan project names in alphabetic order
642 env = Environment(self.entries)
645 env = full_env + plugin_env
647 shadow_set = self.__class__([])
648 map(shadow_set.add, self) # put all our entries in shadow_set
650 for project_name in plugin_projects:
652 for dist in plugin_env[project_name]:
654 req = [dist.as_requirement()]
657 resolvees = shadow_set.resolve(req, env, installer)
659 except ResolutionError,v:
660 error_info[dist] = v # save error info
662 continue # try the next older version of project
664 break # give up on this project, keep going
667 map(shadow_set.add, resolvees)
668 distributions.update(dict.fromkeys(resolvees))
670 # success, no need to try any more versions of this project
673 distributions = list(distributions)
676 return distributions, error_info
682 def require(self, *requirements):
683 """Ensure that distributions matching `requirements` are activated
685 `requirements` must be a string or a (possibly-nested) sequence
686 thereof, specifying the distributions and versions required. The
687 return value is a sequence of the distributions that needed to be
688 activated to fulfill the requirements; all relevant distributions are
689 included, even if they were already activated in this working set.
691 needed = self.resolve(parse_requirements(requirements))
698 def subscribe(self, callback):
699 """Invoke `callback` for all distributions (including existing ones)"""
700 if callback in self.callbacks:
702 self.callbacks.append(callback)
706 def _added_new(self, dist):
707 for callback in self.callbacks:
710 def __getstate__(self):
712 self.entries[:], self.entry_keys.copy(), self.by_key.copy(),
716 def __setstate__(self, (entries, keys, by_key, callbacks)):
717 self.entries = entries[:]
718 self.entry_keys = keys.copy()
719 self.by_key = by_key.copy()
720 self.callbacks = callbacks[:]
723 class Environment(object):
724 """Searchable snapshot of distributions on a search path"""
726 def __init__(self, search_path=None, platform=get_supported_platform(), python=PY_MAJOR):
727 """Snapshot distributions available on a search path
729 Any distributions found on `search_path` are added to the environment.
730 `search_path` should be a sequence of ``sys.path`` items. If not
731 supplied, ``sys.path`` is used.
733 `platform` is an optional string specifying the name of the platform
734 that platform-specific distributions must be compatible with. If
735 unspecified, it defaults to the current platform. `python` is an
736 optional string naming the desired version of Python (e.g. ``'2.4'``);
737 it defaults to the current version.
739 You may explicitly set `platform` (and/or `python`) to ``None`` if you
740 wish to map *all* distributions, not just those compatible with the
741 running platform or Python version.
745 self.platform = platform
747 self.scan(search_path)
749 def can_add(self, dist):
750 """Is distribution `dist` acceptable for this environment?
752 The distribution must match the platform and python version
753 requirements specified when this environment was created, or False
756 return (self.python is None or dist.py_version is None
757 or dist.py_version==self.python) \
758 and compatible_platforms(dist.platform,self.platform)
760 def remove(self, dist):
761 """Remove `dist` from the environment"""
762 self._distmap[dist.key].remove(dist)
764 def scan(self, search_path=None):
765 """Scan `search_path` for distributions usable in this environment
767 Any distributions found are added to the environment.
768 `search_path` should be a sequence of ``sys.path`` items. If not
769 supplied, ``sys.path`` is used. Only distributions conforming to
770 the platform/python version defined at initialization are added.
772 if search_path is None:
773 search_path = sys.path
775 for item in search_path:
776 for dist in find_distributions(item):
779 def __getitem__(self,project_name):
780 """Return a newest-to-oldest list of distributions for `project_name`
783 return self._cache[project_name]
785 project_name = project_name.lower()
786 if project_name not in self._distmap:
789 if project_name not in self._cache:
790 dists = self._cache[project_name] = self._distmap[project_name]
793 return self._cache[project_name]
796 """Add `dist` if we ``can_add()`` it and it isn't already added"""
797 if self.can_add(dist) and dist.has_version():
798 dists = self._distmap.setdefault(dist.key,[])
799 if dist not in dists:
801 if dist.key in self._cache:
802 _sort_dists(self._cache[dist.key])
805 def best_match(self, req, working_set, installer=None):
806 """Find distribution best matching `req` and usable on `working_set`
808 This calls the ``find(req)`` method of the `working_set` to see if a
809 suitable distribution is already active. (This may raise
810 ``VersionConflict`` if an unsuitable version of the project is already
811 active in the specified `working_set`.)
813 If a suitable distribution isn't active, this method returns the
814 newest platform-dependent distribution in the environment that meets
815 the ``Requirement`` in `req`. If no suitable platform-dependent
816 distribution is found, then the newest platform-independent
817 distribution that meets the requirement is returned. (A platform-
818 dependent distribution will typically have code compiled or
819 specialized for that platform.)
821 Otherwise, if `installer` is supplied, then the result of calling the
822 environment's ``obtain(req, installer)`` method will be returned.
824 dist = working_set.find(req)
828 # first try to find a platform-dependent dist
829 for dist in self[req.key]:
830 if dist in req and dist.platform is not None:
833 # then try any other dist
834 for dist in self[req.key]:
838 return self.obtain(req, installer) # try and download/install
840 def obtain(self, requirement, installer=None):
841 """Obtain a distribution matching `requirement` (e.g. via download)
843 Obtain a distro that matches requirement (e.g. via download). In the
844 base ``Environment`` class, this routine just returns
845 ``installer(requirement)``, unless `installer` is None, in which case
846 None is returned instead. This method is a hook that allows subclasses
847 to attempt other ways of obtaining a distribution before falling back
848 to the `installer` argument."""
849 if installer is not None:
850 return installer(requirement)
853 """Yield the unique project names of the available distributions"""
854 for key in self._distmap.keys():
855 if self[key]: yield key
860 def __iadd__(self, other):
861 """In-place addition of a distribution or environment"""
862 if isinstance(other,Distribution):
864 elif isinstance(other,Environment):
865 for project in other:
866 for dist in other[project]:
869 raise TypeError("Can't add %r to environment" % (other,))
872 def __add__(self, other):
873 """Add an environment or distribution to an environment"""
874 new = self.__class__([], platform=None, python=None)
875 for env in self, other:
880 AvailableDistributions = Environment # XXX backward compatibility
883 class ExtractionError(RuntimeError):
884 """An error occurred extracting a resource
886 The following attributes are available from instances of this exception:
889 The resource manager that raised this exception
892 The base directory for resource extraction
895 The exception instance that caused extraction to fail
901 class ResourceManager:
902 """Manage resource extraction and packages"""
903 extraction_path = None
906 self.cached_files = {}
908 def resource_exists(self, package_or_requirement, resource_name):
909 """Does the named resource exist?"""
910 return get_provider(package_or_requirement).has_resource(resource_name)
912 def resource_isdir(self, package_or_requirement, resource_name):
913 """Is the named resource an existing directory?"""
914 return get_provider(package_or_requirement).resource_isdir(
918 def resource_filename(self, package_or_requirement, resource_name):
919 """Return a true filesystem path for specified resource"""
920 return get_provider(package_or_requirement).get_resource_filename(
924 def resource_stream(self, package_or_requirement, resource_name):
925 """Return a readable file-like object for specified resource"""
926 return get_provider(package_or_requirement).get_resource_stream(
930 def resource_string(self, package_or_requirement, resource_name):
931 """Return specified resource as a string"""
932 return get_provider(package_or_requirement).get_resource_string(
936 def resource_listdir(self, package_or_requirement, resource_name):
937 """List the contents of the named resource directory"""
938 return get_provider(package_or_requirement).resource_listdir(
942 def extraction_error(self):
943 """Give an error message for problems extracting file(s)"""
945 old_exc = sys.exc_info()[1]
946 cache_path = self.extraction_path or get_default_cache()
948 err = ExtractionError("""Can't extract file(s) to egg cache
950 The following error occurred while trying to extract file(s) to the Python egg
955 The Python egg cache directory is currently set to:
959 Perhaps your account does not have write access to this directory? You can
960 change the cache directory by setting the PYTHON_EGG_CACHE environment
961 variable to point to an accessible directory.
962 """ % (old_exc, cache_path)
965 err.cache_path = cache_path
966 err.original_error = old_exc
983 def get_cache_path(self, archive_name, names=()):
984 """Return absolute location in cache for `archive_name` and `names`
986 The parent directory of the resulting path will be created if it does
987 not already exist. `archive_name` should be the base filename of the
988 enclosing egg (which may not be the name of the enclosing zipfile!),
989 including its ".egg" extension. `names`, if provided, should be a
990 sequence of path name parts "under" the egg's extraction location.
992 This method should only be called by resource providers that need to
993 obtain an extraction location, and only for names they intend to
994 extract, as it tracks the generated names for possible cleanup later.
996 extract_path = self.extraction_path or get_default_cache()
997 target_path = os.path.join(extract_path, archive_name+'-tmp', *names)
999 _bypass_ensure_directory(target_path)
1001 self.extraction_error()
1003 self.cached_files[target_path] = 1
1024 def postprocess(self, tempname, filename):
1025 """Perform any platform-specific postprocessing of `tempname`
1027 This is where Mac header rewrites should be done; other platforms don't
1028 have anything special they should do.
1030 Resource providers should call this method ONLY after successfully
1031 extracting a compressed resource. They must NOT call it on resources
1032 that are already in the filesystem.
1034 `tempname` is the current (temporary) name of the file, and `filename`
1035 is the name it will be renamed to by the caller after this routine
1039 if os.name == 'posix':
1040 # Make the resource executable
1041 mode = ((os.stat(tempname).st_mode) | 0555) & 07777
1042 os.chmod(tempname, mode)
1065 def set_extraction_path(self, path):
1066 """Set the base path where resources will be extracted to, if needed.
1068 If you do not call this routine before any extractions take place, the
1069 path defaults to the return value of ``get_default_cache()``. (Which
1070 is based on the ``PYTHON_EGG_CACHE`` environment variable, with various
1071 platform-specific fallbacks. See that routine's documentation for more
1074 Resources are extracted to subdirectories of this path based upon
1075 information given by the ``IResourceProvider``. You may set this to a
1076 temporary directory, but then you must call ``cleanup_resources()`` to
1077 delete the extracted files when done. There is no guarantee that
1078 ``cleanup_resources()`` will be able to remove all extracted files.
1080 (Note: you may not change the extraction path for a given resource
1081 manager once resources have been extracted, unless you first call
1082 ``cleanup_resources()``.)
1084 if self.cached_files:
1086 "Can't change extraction path, files already extracted"
1089 self.extraction_path = path
1091 def cleanup_resources(self, force=False):
1093 Delete all extracted resource files and directories, returning a list
1094 of the file and directory names that could not be successfully removed.
1095 This function does not have any concurrency protection, so it should
1096 generally only be called when the extraction path is a temporary
1097 directory exclusive to a single process. This method is not
1098 automatically called; you must call it explicitly or register it as an
1099 ``atexit`` function if you wish to ensure cleanup of a temporary
1100 directory used for extractions.
1106 def get_default_cache():
1107 """Determine the default cache location
1109 This returns the ``PYTHON_EGG_CACHE`` environment variable, if set.
1110 Otherwise, on Windows, it returns a "Python-Eggs" subdirectory of the
1111 "Application Data" directory. On all other systems, it's "~/.python-eggs".
1114 return os.environ['PYTHON_EGG_CACHE']
1119 return os.path.expanduser('~/.python-eggs')
1121 app_data = 'Application Data' # XXX this may be locale-specific!
1123 (('APPDATA',), None), # best option, should be locale-safe
1124 (('USERPROFILE',), app_data),
1125 (('HOMEDRIVE','HOMEPATH'), app_data),
1126 (('HOMEPATH',), app_data),
1128 (('WINDIR',), app_data), # 95/98/ME
1131 for keys, subdir in app_homes:
1134 if key in os.environ:
1135 dirname = os.path.join(dirname, os.environ[key])
1140 dirname = os.path.join(dirname,subdir)
1141 return os.path.join(dirname, 'Python-Eggs')
1144 "Please set the PYTHON_EGG_CACHE enviroment variable"
1147 def safe_name(name):
1148 """Convert an arbitrary string to a standard distribution name
1150 Any runs of non-alphanumeric/. characters are replaced with a single '-'.
1152 return re.sub('[^A-Za-z0-9.]+', '-', name)
1155 def safe_version(version):
1156 """Convert an arbitrary string to a standard version string
1158 Spaces become dots, and all other non-alphanumeric characters become
1159 dashes, with runs of multiple dashes condensed to a single dash.
1161 version = version.replace(' ','.')
1162 return re.sub('[^A-Za-z0-9.]+', '-', version)
1165 def safe_extra(extra):
1166 """Convert an arbitrary string to a standard 'extra' name
1168 Any runs of non-alphanumeric characters are replaced with a single '_',
1169 and the result is always lowercased.
1171 return re.sub('[^A-Za-z0-9.]+', '_', extra).lower()
1174 def to_filename(name):
1175 """Convert a project or version name to its filename-escaped form
1177 Any '-' characters are currently replaced with '_'.
1179 return name.replace('-','_')
1189 """Try to implement resources and metadata for arbitrary PEP 302 loaders"""
1195 def __init__(self, module):
1196 self.loader = getattr(module, '__loader__', None)
1197 self.module_path = os.path.dirname(getattr(module, '__file__', ''))
1199 def get_resource_filename(self, manager, resource_name):
1200 return self._fn(self.module_path, resource_name)
1202 def get_resource_stream(self, manager, resource_name):
1203 return StringIO(self.get_resource_string(manager, resource_name))
1205 def get_resource_string(self, manager, resource_name):
1206 return self._get(self._fn(self.module_path, resource_name))
1208 def has_resource(self, resource_name):
1209 return self._has(self._fn(self.module_path, resource_name))
1211 def has_metadata(self, name):
1212 return self.egg_info and self._has(self._fn(self.egg_info,name))
1214 def get_metadata(self, name):
1215 if not self.egg_info:
1217 return self._get(self._fn(self.egg_info,name))
1219 def get_metadata_lines(self, name):
1220 return yield_lines(self.get_metadata(name))
1222 def resource_isdir(self,resource_name):
1223 return self._isdir(self._fn(self.module_path, resource_name))
1225 def metadata_isdir(self,name):
1226 return self.egg_info and self._isdir(self._fn(self.egg_info,name))
1229 def resource_listdir(self,resource_name):
1230 return self._listdir(self._fn(self.module_path,resource_name))
1232 def metadata_listdir(self,name):
1234 return self._listdir(self._fn(self.egg_info,name))
1237 def run_script(self,script_name,namespace):
1238 script = 'scripts/'+script_name
1239 if not self.has_metadata(script):
1240 raise ResolutionError("No script named %r" % script_name)
1241 script_text = self.get_metadata(script).replace('\r\n','\n')
1242 script_text = script_text.replace('\r','\n')
1243 script_filename = self._fn(self.egg_info,script)
1244 namespace['__file__'] = script_filename
1245 if os.path.exists(script_filename):
1246 execfile(script_filename, namespace, namespace)
1248 from linecache import cache
1249 cache[script_filename] = (
1250 len(script_text), 0, script_text.split('\n'), script_filename
1252 script_code = compile(script_text,script_filename,'exec')
1253 exec script_code in namespace, namespace
1255 def _has(self, path):
1256 raise NotImplementedError(
1257 "Can't perform this operation for unregistered loader type"
1260 def _isdir(self, path):
1261 raise NotImplementedError(
1262 "Can't perform this operation for unregistered loader type"
1265 def _listdir(self, path):
1266 raise NotImplementedError(
1267 "Can't perform this operation for unregistered loader type"
1270 def _fn(self, base, resource_name):
1272 return os.path.join(base, *resource_name.split('/'))
1275 def _get(self, path):
1276 if hasattr(self.loader, 'get_data'):
1277 return self.loader.get_data(path)
1278 raise NotImplementedError(
1279 "Can't perform this operation for loaders without 'get_data()'"
1282 register_loader_type(object, NullProvider)
1285 class EggProvider(NullProvider):
1286 """Provider based on a virtual filesystem"""
1288 def __init__(self,module):
1289 NullProvider.__init__(self,module)
1290 self._setup_prefix()
1292 def _setup_prefix(self):
1293 # we assume here that our metadata may be nested inside a "basket"
1294 # of multiple eggs; that's why we use module_path instead of .archive
1295 path = self.module_path
1298 if path.lower().endswith('.egg'):
1299 self.egg_name = os.path.basename(path)
1300 self.egg_info = os.path.join(path, 'EGG-INFO')
1301 self.egg_root = path
1304 path, base = os.path.split(path)
1311 class DefaultProvider(EggProvider):
1312 """Provides access to package resources in the filesystem"""
1314 def _has(self, path):
1315 return os.path.exists(path)
1317 def _isdir(self,path):
1318 return os.path.isdir(path)
1320 def _listdir(self,path):
1321 return os.listdir(path)
1323 def get_resource_stream(self, manager, resource_name):
1324 return open(self._fn(self.module_path, resource_name), 'rb')
1326 def _get(self, path):
1327 stream = open(path, 'rb')
1329 return stream.read()
1333 register_loader_type(type(None), DefaultProvider)
1336 class EmptyProvider(NullProvider):
1337 """Provider that returns nothing for all requests"""
1339 _isdir = _has = lambda self,path: False
1340 _get = lambda self,path: ''
1341 _listdir = lambda self,path: []
1347 empty_provider = EmptyProvider()
1352 class ZipProvider(EggProvider):
1353 """Resource support for zips and eggs"""
1357 def __init__(self, module):
1358 EggProvider.__init__(self,module)
1359 self.zipinfo = zipimport._zip_directory_cache[self.loader.archive]
1360 self.zip_pre = self.loader.archive+os.sep
1362 def _zipinfo_name(self, fspath):
1363 # Convert a virtual filename (full path to file) into a zipfile subpath
1364 # usable with the zipimport directory cache for our target archive
1365 if fspath.startswith(self.zip_pre):
1366 return fspath[len(self.zip_pre):]
1367 raise AssertionError(
1368 "%s is not a subpath of %s" % (fspath,self.zip_pre)
1371 def _parts(self,zip_path):
1372 # Convert a zipfile subpath into an egg-relative path part list
1373 fspath = self.zip_pre+zip_path # pseudo-fs path
1374 if fspath.startswith(self.egg_root+os.sep):
1375 return fspath[len(self.egg_root)+1:].split(os.sep)
1376 raise AssertionError(
1377 "%s is not a subpath of %s" % (fspath,self.egg_root)
1380 def get_resource_filename(self, manager, resource_name):
1381 if not self.egg_name:
1382 raise NotImplementedError(
1383 "resource_filename() only supported for .egg, not .zip"
1385 # no need to lock for extraction, since we use temp names
1386 zip_path = self._resource_to_zip(resource_name)
1387 eagers = self._get_eager_resources()
1388 if '/'.join(self._parts(zip_path)) in eagers:
1390 self._extract_resource(manager, self._eager_to_zip(name))
1391 return self._extract_resource(manager, zip_path)
1393 def _extract_resource(self, manager, zip_path):
1395 if zip_path in self._index():
1396 for name in self._index()[zip_path]:
1397 last = self._extract_resource(
1398 manager, os.path.join(zip_path, name)
1400 return os.path.dirname(last) # return the extracted directory name
1402 zip_stat = self.zipinfo[zip_path]
1403 t,d,size = zip_stat[5], zip_stat[6], zip_stat[3]
1405 (d>>9)+1980, (d>>5)&0xF, d&0x1F, # ymd
1406 (t&0xFFFF)>>11, (t>>5)&0x3F, (t&0x1F) * 2, 0, 0, -1 # hms, etc.
1408 timestamp = time.mktime(date_time)
1411 real_path = manager.get_cache_path(
1412 self.egg_name, self._parts(zip_path)
1415 if os.path.isfile(real_path):
1416 stat = os.stat(real_path)
1417 if stat.st_size==size and stat.st_mtime==timestamp:
1418 # size and stamp match, don't bother extracting
1421 outf, tmpnam = _mkstemp(".$extract", dir=os.path.dirname(real_path))
1422 os.write(outf, self.loader.get_data(zip_path))
1424 utime(tmpnam, (timestamp,timestamp))
1425 manager.postprocess(tmpnam, real_path)
1428 rename(tmpnam, real_path)
1431 if os.path.isfile(real_path):
1432 stat = os.stat(real_path)
1434 if stat.st_size==size and stat.st_mtime==timestamp:
1435 # size and stamp match, somebody did it just ahead of
1438 elif os.name=='nt': # Windows, del old file and retry
1440 rename(tmpnam, real_path)
1445 manager.extraction_error() # report a user-friendly error
1449 def _get_eager_resources(self):
1450 if self.eagers is None:
1452 for name in ('native_libs.txt', 'eager_resources.txt'):
1453 if self.has_metadata(name):
1454 eagers.extend(self.get_metadata_lines(name))
1455 self.eagers = eagers
1460 return self._dirindex
1461 except AttributeError:
1463 for path in self.zipinfo:
1464 parts = path.split(os.sep)
1466 parent = os.sep.join(parts[:-1])
1468 ind[parent].append(parts[-1])
1471 ind[parent] = [parts.pop()]
1472 self._dirindex = ind
1475 def _has(self, fspath):
1476 zip_path = self._zipinfo_name(fspath)
1477 return zip_path in self.zipinfo or zip_path in self._index()
1479 def _isdir(self,fspath):
1480 return self._zipinfo_name(fspath) in self._index()
1482 def _listdir(self,fspath):
1483 return list(self._index().get(self._zipinfo_name(fspath), ()))
1485 def _eager_to_zip(self,resource_name):
1486 return self._zipinfo_name(self._fn(self.egg_root,resource_name))
1488 def _resource_to_zip(self,resource_name):
1489 return self._zipinfo_name(self._fn(self.module_path,resource_name))
1491 register_loader_type(zipimport.zipimporter, ZipProvider)
1516 class FileMetadata(EmptyProvider):
1517 """Metadata handler for standalone PKG-INFO files
1521 metadata = FileMetadata("/path/to/PKG-INFO")
1523 This provider rejects all data and metadata requests except for PKG-INFO,
1524 which is treated as existing, and will be the contents of the file at
1525 the provided location.
1528 def __init__(self,path):
1531 def has_metadata(self,name):
1532 return name=='PKG-INFO'
1534 def get_metadata(self,name):
1535 if name=='PKG-INFO':
1536 return open(self.path,'rU').read()
1537 raise KeyError("No metadata except PKG-INFO is available")
1539 def get_metadata_lines(self,name):
1540 return yield_lines(self.get_metadata(name))
1557 class PathMetadata(DefaultProvider):
1558 """Metadata provider for egg directories
1564 egg_info = "/path/to/PackageName.egg-info"
1565 base_dir = os.path.dirname(egg_info)
1566 metadata = PathMetadata(base_dir, egg_info)
1567 dist_name = os.path.splitext(os.path.basename(egg_info))[0]
1568 dist = Distribution(basedir,project_name=dist_name,metadata=metadata)
1570 # Unpacked egg directories:
1572 egg_path = "/path/to/PackageName-ver-pyver-etc.egg"
1573 metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO'))
1574 dist = Distribution.from_filename(egg_path, metadata=metadata)
1577 def __init__(self, path, egg_info):
1578 self.module_path = path
1579 self.egg_info = egg_info
1582 class EggMetadata(ZipProvider):
1583 """Metadata provider for .egg files"""
1585 def __init__(self, importer):
1586 """Create a metadata provider from a zipimporter"""
1588 self.zipinfo = zipimport._zip_directory_cache[importer.archive]
1589 self.zip_pre = importer.archive+os.sep
1590 self.loader = importer
1592 self.module_path = os.path.join(importer.archive, importer.prefix)
1594 self.module_path = importer.archive
1595 self._setup_prefix()
1599 """PEP 302 Importer that wraps Python's "normal" import algorithm"""
1601 def __init__(self, path=None):
1604 def find_module(self, fullname, path=None):
1605 subname = fullname.split(".")[-1]
1606 if subname != fullname and self.path is None:
1608 if self.path is None:
1613 file, filename, etc = imp.find_module(subname, path)
1616 return ImpLoader(file, filename, etc)
1620 """PEP 302 Loader that wraps Python's "normal" import algorithm"""
1622 def __init__(self, file, filename, etc):
1624 self.filename = filename
1627 def load_module(self, fullname):
1629 mod = imp.load_module(fullname, self.file, self.filename, self.etc)
1631 if self.file: self.file.close()
1632 # Note: we don't set __loader__ because we want the module to look
1633 # normal; i.e. this is just a wrapper for standard import machinery
1639 def get_importer(path_item):
1640 """Retrieve a PEP 302 "importer" for the given path item
1642 If there is no importer, this returns a wrapper around the builtin import
1643 machinery. The returned importer is only cached if it was created by a
1647 importer = sys.path_importer_cache[path_item]
1649 for hook in sys.path_hooks:
1651 importer = hook(path_item)
1659 sys.path_importer_cache.setdefault(path_item,importer)
1660 if importer is None:
1662 importer = ImpWrapper(path_item)
1680 _declare_state('dict', _distribution_finders = {})
1682 def register_finder(importer_type, distribution_finder):
1683 """Register `distribution_finder` to find distributions in sys.path items
1685 `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
1686 handler), and `distribution_finder` is a callable that, passed a path
1687 item and the importer instance, yields ``Distribution`` instances found on
1688 that path item. See ``pkg_resources.find_on_path`` for an example."""
1689 _distribution_finders[importer_type] = distribution_finder
1692 def find_distributions(path_item, only=False):
1693 """Yield distributions accessible via `path_item`"""
1694 importer = get_importer(path_item)
1695 finder = _find_adapter(_distribution_finders, importer)
1696 return finder(importer, path_item, only)
1698 def find_in_zip(importer, path_item, only=False):
1699 metadata = EggMetadata(importer)
1700 if metadata.has_metadata('PKG-INFO'):
1701 yield Distribution.from_filename(path_item, metadata=metadata)
1703 return # don't yield nested distros
1704 for subitem in metadata.resource_listdir('/'):
1705 if subitem.endswith('.egg'):
1706 subpath = os.path.join(path_item, subitem)
1707 for dist in find_in_zip(zipimport.zipimporter(subpath), subpath):
1710 register_finder(zipimport.zipimporter, find_in_zip)
1712 def StringIO(*args, **kw):
1713 """Thunk to load the real StringIO on demand"""
1716 from cStringIO import StringIO
1718 from StringIO import StringIO
1719 return StringIO(*args,**kw)
1721 def find_nothing(importer, path_item, only=False):
1723 register_finder(object,find_nothing)
1725 def find_on_path(importer, path_item, only=False):
1726 """Yield distributions accessible on a sys.path directory"""
1727 path_item = _normalize_cached(path_item)
1729 if os.path.isdir(path_item) and os.access(path_item, os.R_OK):
1730 if path_item.lower().endswith('.egg'):
1732 yield Distribution.from_filename(
1733 path_item, metadata=PathMetadata(
1734 path_item, os.path.join(path_item,'EGG-INFO')
1738 # scan for .egg and .egg-info in directory
1739 for entry in os.listdir(path_item):
1740 lower = entry.lower()
1741 if lower.endswith('.egg-info'):
1742 fullpath = os.path.join(path_item, entry)
1743 if os.path.isdir(fullpath):
1744 # egg-info directory, allow getting metadata
1745 metadata = PathMetadata(path_item, fullpath)
1747 metadata = FileMetadata(fullpath)
1748 yield Distribution.from_location(
1749 path_item,entry,metadata,precedence=DEVELOP_DIST
1751 elif not only and lower.endswith('.egg'):
1752 for dist in find_distributions(os.path.join(path_item, entry)):
1754 elif not only and lower.endswith('.egg-link'):
1755 for line in file(os.path.join(path_item, entry)):
1756 if not line.strip(): continue
1757 for item in find_distributions(os.path.join(path_item,line.rstrip())):
1760 register_finder(ImpWrapper, find_on_path)
1762 _declare_state('dict', _namespace_handlers = {})
1763 _declare_state('dict', _namespace_packages = {})
1765 def register_namespace_handler(importer_type, namespace_handler):
1766 """Register `namespace_handler` to declare namespace packages
1768 `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
1769 handler), and `namespace_handler` is a callable like this::
1771 def namespace_handler(importer,path_entry,moduleName,module):
1772 # return a path_entry to use for child packages
1774 Namespace handlers are only called if the importer object has already
1775 agreed that it can handle the relevant path item, and they should only
1776 return a subpath if the module __path__ does not already contain an
1777 equivalent subpath. For an example namespace handler, see
1778 ``pkg_resources.file_ns_handler``.
1780 _namespace_handlers[importer_type] = namespace_handler
1782 def _handle_ns(packageName, path_item):
1783 """Ensure that named package includes a subpath of path_item (if needed)"""
1784 importer = get_importer(path_item)
1785 if importer is None:
1787 loader = importer.find_module(packageName)
1790 module = sys.modules.get(packageName)
1792 module = sys.modules[packageName] = imp.new_module(packageName)
1793 module.__path__ = []; _set_parent_ns(packageName)
1794 elif not hasattr(module,'__path__'):
1795 raise TypeError("Not a package:", packageName)
1796 handler = _find_adapter(_namespace_handlers, importer)
1797 subpath = handler(importer,path_item,packageName,module)
1798 if subpath is not None:
1799 path = module.__path__; path.append(subpath)
1800 loader.load_module(packageName); module.__path__ = path
1803 def declare_namespace(packageName):
1804 """Declare that package 'packageName' is a namespace package"""
1808 if packageName in _namespace_packages:
1811 path, parent = sys.path, None
1812 if '.' in packageName:
1813 parent = '.'.join(packageName.split('.')[:-1])
1814 declare_namespace(parent)
1817 path = sys.modules[parent].__path__
1818 except AttributeError:
1819 raise TypeError("Not a package:", parent)
1821 # Track what packages are namespaces, so when new path items are added,
1822 # they can be updated
1823 _namespace_packages.setdefault(parent,[]).append(packageName)
1824 _namespace_packages.setdefault(packageName,[])
1826 for path_item in path:
1827 # Ensure all the parent's path items are reflected in the child,
1829 _handle_ns(packageName, path_item)
1834 def fixup_namespace_packages(path_item, parent=None):
1835 """Ensure that previously-declared namespace packages include path_item"""
1838 for package in _namespace_packages.get(parent,()):
1839 subpath = _handle_ns(package, path_item)
1840 if subpath: fixup_namespace_packages(subpath,package)
1844 def file_ns_handler(importer, path_item, packageName, module):
1845 """Compute an ns-package subpath for a filesystem or zipfile importer"""
1847 subpath = os.path.join(path_item, packageName.split('.')[-1])
1848 normalized = _normalize_cached(subpath)
1849 for item in module.__path__:
1850 if _normalize_cached(item)==normalized:
1853 # Only return the path if it's not already there
1856 register_namespace_handler(ImpWrapper,file_ns_handler)
1857 register_namespace_handler(zipimport.zipimporter,file_ns_handler)
1860 def null_ns_handler(importer, path_item, packageName, module):
1863 register_namespace_handler(object,null_ns_handler)
1866 def normalize_path(filename):
1867 """Normalize a file/dir name for comparison purposes"""
1868 return os.path.normcase(os.path.realpath(filename))
1870 def _normalize_cached(filename,_cache={}):
1872 return _cache[filename]
1874 _cache[filename] = result = normalize_path(filename)
1877 def _set_parent_ns(packageName):
1878 parts = packageName.split('.')
1881 parent = '.'.join(parts)
1882 setattr(sys.modules[parent], name, sys.modules[packageName])
1885 def yield_lines(strs):
1886 """Yield non-empty/non-comment lines of a ``basestring`` or sequence"""
1887 if isinstance(strs,basestring):
1888 for s in strs.splitlines():
1890 if s and not s.startswith('#'): # skip blank lines/comments
1894 for s in yield_lines(ss):
1897 LINE_END = re.compile(r"\s*(#.*)?$").match # whitespace and comment
1898 CONTINUE = re.compile(r"\s*\\\s*(#.*)?$").match # line continuation
1899 DISTRO = re.compile(r"\s*((\w|[-.])+)").match # Distribution or extra
1900 VERSION = re.compile(r"\s*(<=?|>=?|==|!=)\s*((\w|[-.])+)").match # ver. info
1901 COMMA = re.compile(r"\s*,").match # comma between items
1902 OBRACKET = re.compile(r"\s*\[").match
1903 CBRACKET = re.compile(r"\s*\]").match
1904 MODULE = re.compile(r"\w+(\.\w+)*$").match
1905 EGG_NAME = re.compile(
1907 r"( -(?P<ver>[^-]+) (-py(?P<pyver>[^-]+) (-(?P<plat>.+))? )? )?",
1908 re.VERBOSE | re.IGNORECASE
1911 component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE)
1912 replace = {'pre':'c', 'preview':'c','-':'final-','rc':'c','dev':'@'}.get
1914 def _parse_version_parts(s):
1915 for part in component_re.split(s):
1916 part = replace(part,part)
1917 if not part or part=='.':
1919 if part[:1] in '0123456789':
1920 yield part.zfill(8) # pad for numeric comparison
1924 yield '*final' # ensure that alpha/beta/candidate are before final
1926 def parse_version(s):
1927 """Convert a version string to a chronologically-sortable key
1929 This is a rough cross between distutils' StrictVersion and LooseVersion;
1930 if you give it versions that would work with StrictVersion, then it behaves
1931 the same; otherwise it acts like a slightly-smarter LooseVersion. It is
1932 *possible* to create pathological version coding schemes that will fool
1933 this parser, but they should be very rare in practice.
1935 The returned value will be a tuple of strings. Numeric portions of the
1936 version are padded to 8 digits so they will compare numerically, but
1937 without relying on how numbers compare relative to strings. Dots are
1938 dropped, but dashes are retained. Trailing zeros between alpha segments
1939 or dashes are suppressed, so that e.g. "2.4.0" is considered the same as
1940 "2.4". Alphanumeric parts are lower-cased.
1942 The algorithm assumes that strings like "-" and any alpha string that
1943 alphabetically follows "final" represents a "patch level". So, "2.4-1"
1944 is assumed to be a branch or patch of "2.4", and therefore "2.4.1" is
1945 considered newer than "2.4-1", which in turn is newer than "2.4".
1947 Strings like "a", "b", "c", "alpha", "beta", "candidate" and so on (that
1948 come before "final" alphabetically) are assumed to be pre-release versions,
1949 so that the version "2.4" is considered newer than "2.4a1".
1951 Finally, to handle miscellaneous cases, the strings "pre", "preview", and
1952 "rc" are treated as if they were "c", i.e. as though they were release
1953 candidates, and therefore are not as new as a version string that does not
1954 contain them, and "dev" is replaced with an '@' so that it sorts lower than
1955 than any other pre-release tag.
1958 for part in _parse_version_parts(s.lower()):
1959 if part.startswith('*'):
1960 if part<'*final': # remove '-' before a prerelease tag
1961 while parts and parts[-1]=='*final-': parts.pop()
1962 # remove trailing zeros from each series of numeric parts
1963 while parts and parts[-1]=='00000000':
1968 class EntryPoint(object):
1969 """Object representing an advertised importable object"""
1971 def __init__(self, name, module_name, attrs=(), extras=(), dist=None):
1972 if not MODULE(module_name):
1973 raise ValueError("Invalid module name", module_name)
1975 self.module_name = module_name
1976 self.attrs = tuple(attrs)
1977 self.extras = Requirement.parse(("x[%s]" % ','.join(extras))).extras
1981 s = "%s = %s" % (self.name, self.module_name)
1983 s += ':' + '.'.join(self.attrs)
1985 s += ' [%s]' % ','.join(self.extras)
1989 return "EntryPoint.parse(%r)" % str(self)
1991 def load(self, require=True, env=None, installer=None):
1992 if require: self.require(env, installer)
1993 entry = __import__(self.module_name, globals(),globals(), ['__name__'])
1994 for attr in self.attrs:
1996 entry = getattr(entry,attr)
1997 except AttributeError:
1998 raise ImportError("%r has no %r attribute" % (entry,attr))
2001 def require(self, env=None, installer=None):
2002 if self.extras and not self.dist:
2003 raise UnknownExtra("Can't require() without a distribution", self)
2004 map(working_set.add,
2005 working_set.resolve(self.dist.requires(self.extras),env,installer))
2010 def parse(cls, src, dist=None):
2011 """Parse a single entry point from string `src`
2013 Entry point syntax follows the form::
2015 name = some.module:some.attr [extra1,extra2]
2017 The entry name and module name are required, but the ``:attrs`` and
2018 ``[extras]`` parts are optional
2022 name,value = src.split('=',1)
2024 value,extras = value.split('[',1)
2025 req = Requirement.parse("x["+extras)
2026 if req.specs: raise ValueError
2029 value,attrs = value.split(':',1)
2030 if not MODULE(attrs.rstrip()):
2032 attrs = attrs.rstrip().split('.')
2035 "EntryPoint must be in 'name=module:attrs [extras]' format",
2039 return cls(name.strip(), value.strip(), attrs, extras, dist)
2041 parse = classmethod(parse)
2051 def parse_group(cls, group, lines, dist=None):
2052 """Parse an entry point group"""
2053 if not MODULE(group):
2054 raise ValueError("Invalid group name", group)
2056 for line in yield_lines(lines):
2057 ep = cls.parse(line, dist)
2059 raise ValueError("Duplicate entry point", group, ep.name)
2063 parse_group = classmethod(parse_group)
2066 def parse_map(cls, data, dist=None):
2067 """Parse a map of entry point groups"""
2068 if isinstance(data,dict):
2071 data = split_sections(data)
2073 for group, lines in data:
2077 raise ValueError("Entry points must be listed in groups")
2078 group = group.strip()
2080 raise ValueError("Duplicate group name", group)
2081 maps[group] = cls.parse_group(group, lines, dist)
2084 parse_map = classmethod(parse_map)
2091 class Distribution(object):
2092 """Wrap an actual or potential sys.path entry w/metadata"""
2094 location=None, metadata=None, project_name=None, version=None,
2095 py_version=PY_MAJOR, platform=None, precedence = EGG_DIST
2097 self.project_name = safe_name(project_name or 'Unknown')
2098 if version is not None:
2099 self._version = safe_version(version)
2100 self.py_version = py_version
2101 self.platform = platform
2102 self.location = location
2103 self.precedence = precedence
2104 self._provider = metadata or empty_provider
2107 def from_location(cls,location,basename,metadata=None,**kw):
2108 project_name, version, py_version, platform = [None]*4
2109 basename, ext = os.path.splitext(basename)
2110 if ext.lower() in (".egg",".egg-info"):
2111 match = EGG_NAME(basename)
2113 project_name, version, py_version, platform = match.group(
2114 'name','ver','pyver','plat'
2117 location, metadata, project_name=project_name, version=version,
2118 py_version=py_version, platform=platform, **kw
2120 from_location = classmethod(from_location)
2124 getattr(self,'parsed_version',()), self.precedence, self.key,
2125 -len(self.location or ''), self.location, self.py_version,
2129 def __cmp__(self, other): return cmp(self.hashcmp, other)
2130 def __hash__(self): return hash(self.hashcmp)
2132 # These properties have to be lazy so that we don't have to load any
2133 # metadata until/unless it's actually needed. (i.e., some distributions
2134 # may not know their name or version without loading PKG-INFO)
2140 except AttributeError:
2141 self._key = key = self.project_name.lower()
2146 def parsed_version(self):
2148 return self._parsed_version
2149 except AttributeError:
2150 self._parsed_version = pv = parse_version(self.version)
2153 parsed_version = property(parsed_version)
2158 return self._version
2159 except AttributeError:
2160 for line in self._get_metadata('PKG-INFO'):
2161 if line.lower().startswith('version:'):
2162 self._version = safe_version(line.split(':',1)[1].strip())
2163 return self._version
2166 "Missing 'Version:' header and/or PKG-INFO file", self
2168 version = property(version)
2176 return self.__dep_map
2177 except AttributeError:
2178 dm = self.__dep_map = {None: []}
2179 for name in 'requires.txt', 'depends.txt':
2180 for extra,reqs in split_sections(self._get_metadata(name)):
2181 if extra: extra = safe_extra(extra)
2182 dm.setdefault(extra,[]).extend(parse_requirements(reqs))
2184 _dep_map = property(_dep_map)
2186 def requires(self,extras=()):
2187 """List of Requirements needed for this distro if `extras` are used"""
2190 deps.extend(dm.get(None,()))
2193 deps.extend(dm[safe_extra(ext)])
2196 "%s has no such extra feature %r" % (self, ext)
2200 def _get_metadata(self,name):
2201 if self.has_metadata(name):
2202 for line in self.get_metadata_lines(name):
2205 def activate(self,path=None):
2206 """Ensure distribution is importable on `path` (default=sys.path)"""
2207 if path is None: path = sys.path
2208 self.insert_on(path)
2209 if path is sys.path:
2210 fixup_namespace_packages(self.location)
2211 for pkg in self._get_metadata('namespace_packages.txt'):
2212 if pkg in sys.modules: declare_namespace(pkg)
2215 """Return what this distribution's standard .egg filename should be"""
2216 filename = "%s-%s-py%s" % (
2217 to_filename(self.project_name), to_filename(self.version),
2218 self.py_version or PY_MAJOR
2222 filename += '-'+self.platform
2227 return "%s (%s)" % (self,self.location)
2232 try: version = getattr(self,'version',None)
2233 except ValueError: version = None
2234 version = version or "[unknown version]"
2235 return "%s %s" % (self.project_name,version)
2237 def __getattr__(self,attr):
2238 """Delegate all unrecognized public attributes to .metadata provider"""
2239 if attr.startswith('_'):
2240 raise AttributeError,attr
2241 return getattr(self._provider, attr)
2244 def from_filename(cls,filename,metadata=None, **kw):
2245 return cls.from_location(
2246 _normalize_cached(filename), os.path.basename(filename), metadata,
2249 from_filename = classmethod(from_filename)
2251 def as_requirement(self):
2252 """Return a ``Requirement`` that matches this distribution exactly"""
2253 return Requirement.parse('%s==%s' % (self.project_name, self.version))
2255 def load_entry_point(self, group, name):
2256 """Return the `name` entry point of `group` or raise ImportError"""
2257 ep = self.get_entry_info(group,name)
2259 raise ImportError("Entry point %r not found" % ((group,name),))
2262 def get_entry_map(self, group=None):
2263 """Return the entry point map for `group`, or the full entry map"""
2265 ep_map = self._ep_map
2266 except AttributeError:
2267 ep_map = self._ep_map = EntryPoint.parse_map(
2268 self._get_metadata('entry_points.txt'), self
2270 if group is not None:
2271 return ep_map.get(group,{})
2274 def get_entry_info(self, group, name):
2275 """Return the EntryPoint object for `group`+`name`, or ``None``"""
2276 return self.get_entry_map(group).get(name)
2296 def insert_on(self, path, loc = None):
2297 """Insert self.location in path before its nearest parent directory"""
2299 loc = loc or self.location
2303 nloc = _normalize_cached(loc)
2304 bdir = os.path.dirname(nloc)
2305 npath= [(p and _normalize_cached(p) or p) for p in path]
2308 for p, item in enumerate(npath):
2311 elif item==bdir and self.precedence==EGG_DIST:
2312 # if it's an .egg, give it precedence over its directory
2313 if path is sys.path:
2314 self.check_version_conflict()
2316 npath.insert(p, nloc)
2319 if path is sys.path:
2320 self.check_version_conflict()
2324 # p is the spot where we found or inserted loc; now remove duplicates
2327 np = npath.index(nloc, p+1)
2331 del npath[np], path[np]
2337 def check_version_conflict(self):
2338 if self.key=='setuptools':
2339 return # ignore the inevitable setuptools self-conflicts :(
2341 nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt'))
2342 loc = normalize_path(self.location)
2343 for modname in self._get_metadata('top_level.txt'):
2344 if (modname not in sys.modules or modname in nsp
2345 or modname in _namespace_packages
2349 fn = getattr(sys.modules[modname], '__file__', None)
2350 if fn and (normalize_path(fn).startswith(loc) or fn.startswith(loc)):
2353 "Module %s was already imported from %s, but %s is being added"
2354 " to sys.path" % (modname, fn, self.location),
2357 def has_version(self):
2361 issue_warning("Unbuilt egg for "+repr(self))
2365 def clone(self,**kw):
2366 """Copy this distribution, substituting in any changed keyword args"""
2368 'project_name', 'version', 'py_version', 'platform', 'location',
2371 kw.setdefault(attr, getattr(self,attr,None))
2372 kw.setdefault('metadata', self._provider)
2373 return self.__class__(**kw)
2380 return [dep for dep in self._dep_map if dep]
2381 extras = property(extras)
2384 def issue_warning(*args,**kw):
2388 # find the first stack frame that is *not* code in
2389 # the pkg_resources module, to use for the warning
2390 while sys._getframe(level).f_globals is g:
2394 from warnings import warn
2395 warn(stacklevel = level+1, *args, **kw)
2419 def parse_requirements(strs):
2420 """Yield ``Requirement`` objects for each specification in `strs`
2422 `strs` must be an instance of ``basestring``, or a (possibly-nested)
2425 # create a steppable iterator, so we can handle \-continuations
2426 lines = iter(yield_lines(strs))
2428 def scan_list(ITEM,TERMINATOR,line,p,groups,item_name):
2432 while not TERMINATOR(line,p):
2433 if CONTINUE(line,p):
2435 line = lines.next(); p = 0
2436 except StopIteration:
2438 "\\ must not appear on the last nonblank line"
2441 match = ITEM(line,p)
2443 raise ValueError("Expected "+item_name+" in",line,"at",line[p:])
2445 items.append(match.group(*groups))
2448 match = COMMA(line,p)
2450 p = match.end() # skip the comma
2451 elif not TERMINATOR(line,p):
2453 "Expected ',' or end-of-list in",line,"at",line[p:]
2456 match = TERMINATOR(line,p)
2457 if match: p = match.end() # skip the terminator, if any
2458 return line, p, items
2461 match = DISTRO(line)
2463 raise ValueError("Missing distribution spec", line, strs)
2464 project_name = match.group(1)
2468 match = OBRACKET(line,p)
2471 line, p, extras = scan_list(
2472 DISTRO, CBRACKET, line, p, (1,), "'extra' name"
2475 line, p, specs = scan_list(VERSION,LINE_END,line,p,(1,2),"version spec")
2476 specs = [(op,safe_version(val)) for op,val in specs]
2477 yield Requirement(project_name, specs, extras)
2480 def _sort_dists(dists):
2481 tmp = [(dist.hashcmp,dist) for dist in dists]
2483 dists[::-1] = [d for hc,d in tmp]
2502 def __init__(self, project_name, specs, extras):
2503 """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!"""
2504 self.unsafe_name, project_name = project_name, safe_name(project_name)
2505 self.project_name, self.key = project_name, project_name.lower()
2506 index = [(parse_version(v),state_machine[op],op,v) for op,v in specs]
2508 self.specs = [(op,ver) for parsed,trans,op,ver in index]
2509 self.index, self.extras = index, tuple(map(safe_extra,extras))
2511 self.key, tuple([(op,parsed) for parsed,trans,op,ver in index]),
2512 frozenset(self.extras)
2514 self.__hash = hash(self.hashCmp)
2517 specs = ','.join([''.join(s) for s in self.specs])
2518 extras = ','.join(self.extras)
2519 if extras: extras = '[%s]' % extras
2520 return '%s%s%s' % (self.project_name, extras, specs)
2522 def __eq__(self,other):
2523 return isinstance(other,Requirement) and self.hashCmp==other.hashCmp
2525 def __contains__(self,item):
2526 if isinstance(item,Distribution):
2527 if item.key != self.key: return False
2528 if self.index: item = item.parsed_version # only get if we need it
2529 elif isinstance(item,basestring):
2530 item = parse_version(item)
2532 for parsed,trans,op,ver in self.index:
2533 action = trans[cmp(item,parsed)]
2534 if action=='F': return False
2535 elif action=='T': return True
2536 elif action=='+': last = True
2537 elif action=='-' or last is None: last = False
2538 if last is None: last = True # no rules encountered
2545 def __repr__(self): return "Requirement.parse(%r)" % str(self)
2549 reqs = list(parse_requirements(s))
2553 raise ValueError("Expected only one requirement", s)
2554 raise ValueError("No requirements found", s)
2556 parse = staticmethod(parse)
2570 """Get an mro for a type or classic class"""
2571 if not isinstance(cls,type):
2572 class cls(cls,object): pass
2573 return cls.__mro__[1:]
2576 def _find_adapter(registry, ob):
2577 """Return an adapter factory for `ob` from `registry`"""
2578 for t in _get_mro(getattr(ob, '__class__', type(ob))):
2583 def ensure_directory(path):
2584 """Ensure that the parent directory of `path` exists"""
2585 dirname = os.path.dirname(path)
2586 if not os.path.isdir(dirname):
2587 os.makedirs(dirname)
2589 def split_sections(s):
2590 """Split a string or iterable thereof into (section,content) pairs
2592 Each ``section`` is a stripped version of the section header ("[section]")
2593 and each ``content`` is a list of stripped lines excluding blank lines and
2594 comment-only lines. If there are any such lines before the first section
2595 header, they're returned in a first ``section`` of ``None``.
2599 for line in yield_lines(s):
2600 if line.startswith("["):
2601 if line.endswith("]"):
2602 if section or content:
2603 yield section, content
2604 section = line[1:-1].strip()
2607 raise ValueError("Invalid section heading", line)
2609 content.append(line)
2611 # wrap up last segment
2612 yield section, content
2614 def _mkstemp(*args,**kw):
2615 from tempfile import mkstemp
2618 os.open = os_open # temporarily bypass sandboxing
2619 return mkstemp(*args,**kw)
2621 os.open = old_open # and then put it back
2624 # Set up global resource manager (deliberately not state-saved)
2625 _manager = ResourceManager()
2627 for name in dir(_manager):
2628 if not name.startswith('_'):
2629 g[name] = getattr(_manager, name)
2630 _initialize(globals())
2632 # Prepare the master working set and make the ``require()`` API available
2634 _declare_state('object', working_set = WorkingSet())
2636 # Does the main program list any requirements?
2637 from __main__ import __requires__
2639 pass # No: just use the default working set based on sys.path
2641 # Yes: ensure the requirements are met, by prefixing sys.path if necessary
2643 working_set.require(__requires__)
2644 except (VersionConflict, DistributionNotFound): # try it without defaults already on sys.path
2645 working_set = WorkingSet([]) # by starting with an empty path
2647 for dist in working_set.resolve(
2648 parse_requirements(__requires__), Environment()
2650 working_set.add(dist)
2651 except DistributionNotFound:
2653 for entry in sys.path: # add any missing entries from sys.path
2654 if entry not in working_set.entries:
2655 working_set.add_entry(entry)
2656 sys.path[:] = working_set.entries # then copy back to sys.path
2658 require = working_set.require
2659 iter_entry_points = working_set.iter_entry_points
2660 add_activation_listener = working_set.subscribe
2661 run_script = working_set.run_script
2662 run_main = run_script # backward compatibility
2663 # Activate all distributions already on sys.path, and ensure that
2664 # all distributions added to the working set in the future (e.g. by
2665 # calling ``require()``) will get activated as well.
2666 add_activation_listener(lambda dist: dist.activate())
2667 working_set.entries=[]; map(working_set.add_entry,sys.path) # match order