1 """Package resource API
4 A resource is a logical file contained within a package, or a logical
5 subdirectory thereof. The package resource API expects resource names
6 to have their path parts separated with ``/``, *not* whatever the local
7 path separator is. Do not use os.path operations to manipulate resource
8 names being passed into the API.
10 The package resource API is designed to work with normal filesystem packages,
11 .egg files, and unpacked .egg files. It can also work in a limited way with
12 .zip files and with custom PEP 302 loaders that support the ``get_data()``
16 import sys, os, zipimport, time, re, imp
21 from sets import ImmutableSet as frozenset
23 # capture these to bypass sandboxing
24 from os import utime, rename, unlink, mkdir
25 from os import open as os_open
26 from os.path import isdir, split
28 from distutils import log
30 def _bypass_ensure_directory(name, mode=0777):
31 # Sandbox-bypassing version of ensure_directory()
32 dirname, filename = split(name)
33 if dirname and filename and not isdir(dirname):
34 _bypass_ensure_directory(dirname)
46 def _declare_state(vartype, **kw):
48 for name, val in kw.iteritems():
50 _state_vars[name] = vartype
55 for k, v in _state_vars.iteritems():
56 state[k] = g['_sget_'+v](g[k])
59 def __setstate__(state):
61 for k, v in state.iteritems():
62 g['_sset_'+_state_vars[k]](k, g[k], v)
68 def _sset_dict(key, ob, state):
72 def _sget_object(val):
73 return val.__getstate__()
75 def _sset_object(key, ob, state):
76 ob.__setstate__(state)
78 _sget_none = _sset_none = lambda *args: None
85 def get_supported_platform():
86 """Return this platform's maximum compatible version.
88 distutils.util.get_platform() normally reports the minimum version
89 of Mac OS X that would be required to *use* extensions produced by
90 distutils. But what we want when checking compatibility is to know the
91 version of Mac OS X that we are *running*. To allow usage of packages that
92 explicitly require a newer version of Mac OS X, we must also know the
93 current version of the OS.
95 If this condition occurs for any other platform with a version in its
96 platform strings, this function should be extended accordingly.
98 plat = get_build_platform(); m = macosVersionString.match(plat)
99 if m is not None and sys.platform == "darwin":
101 plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3))
127 # Basic resource access and distribution/entry point discovery
128 'require', 'run_script', 'get_provider', 'get_distribution',
129 'load_entry_point', 'get_entry_map', 'get_entry_info', 'iter_entry_points',
130 'resource_string', 'resource_stream', 'resource_filename',
131 'resource_listdir', 'resource_exists', 'resource_isdir',
133 # Environmental control
134 'declare_namespace', 'working_set', 'add_activation_listener',
135 'find_distributions', 'set_extraction_path', 'cleanup_resources',
138 # Primary implementation classes
139 'Environment', 'WorkingSet', 'ResourceManager',
140 'Distribution', 'Requirement', 'EntryPoint',
143 'ResolutionError','VersionConflict','DistributionNotFound','UnknownExtra',
146 # Parsing functions and string utilities
147 'parse_requirements', 'parse_version', 'safe_name', 'safe_version',
148 'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections',
149 'safe_extra', 'to_filename',
151 # filesystem utilities
152 'ensure_directory', 'normalize_path',
154 # Distribution "precedence" constants
155 'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST',
157 # "Provider" interfaces, implementations, and registration/lookup APIs
158 'IMetadataProvider', 'IResourceProvider', 'FileMetadata',
159 'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider',
160 'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider',
161 'register_finder', 'register_namespace_handler', 'register_loader_type',
162 'fixup_namespace_packages', 'get_importer',
164 # Deprecated/backward compatibility only
165 'run_main', 'AvailableDistributions',
167 class ResolutionError(Exception):
168 """Abstract base for dependency resolution errors"""
170 return self.__class__.__name__+repr(self.args)
172 class VersionConflict(ResolutionError):
173 """An already-installed version conflicts with the requested version"""
175 class DistributionNotFound(ResolutionError):
176 """A requested distribution was not found"""
178 class UnknownExtra(ResolutionError):
179 """Distribution doesn't have an "extra feature" of the given name"""
181 _provider_factories = {}
182 PY_MAJOR = sys.version[:3]
189 def register_loader_type(loader_type, provider_factory):
190 """Register `provider_factory` to make providers for `loader_type`
192 `loader_type` is the type or class of a PEP 302 ``module.__loader__``,
193 and `provider_factory` is a function that, passed a *module* object,
194 returns an ``IResourceProvider`` for that module.
196 _provider_factories[loader_type] = provider_factory
198 def get_provider(moduleOrReq):
199 """Return an IResourceProvider for the named module or requirement"""
200 if isinstance(moduleOrReq,Requirement):
201 return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0]
203 module = sys.modules[moduleOrReq]
205 __import__(moduleOrReq)
206 module = sys.modules[moduleOrReq]
207 loader = getattr(module, '__loader__', None)
208 return _find_adapter(_provider_factories, loader)(module)
210 def _macosx_vers(_cache=[]):
212 from platform import mac_ver
213 _cache.append(mac_ver()[0].split('.'))
216 def _macosx_arch(machine):
217 return {'PowerPC':'ppc', 'Power_Macintosh':'ppc'}.get(machine,machine)
219 def get_build_platform():
220 """Return this platform's string for platform-specific distributions
222 XXX Currently this is the same as ``distutils.util.get_platform()``, but it
223 needs some hacks for Linux and Mac OS X.
225 from distutils.util import get_platform
226 plat = get_platform()
227 if sys.platform == "darwin" and not plat.startswith('macosx-'):
229 version = _macosx_vers()
230 machine = os.uname()[4].replace(" ", "_")
231 return "macosx-%d.%d-%s" % (int(version[0]), int(version[1]),
232 _macosx_arch(machine))
234 # if someone is running a non-Mac darwin system, this will fall
235 # through to the default implementation
239 macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)")
240 darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)")
241 get_platform = get_build_platform # XXX backward compat
249 def compatible_platforms(provided,required):
250 """Can code for the `provided` platform run on the `required` platform?
252 Returns true if either platform is ``None``, or the platforms are equal.
254 XXX Needs compatibility checks for Linux and other unixy OSes.
256 if provided is None or required is None or provided==required:
257 return True # easy case
259 # Mac OS X special cases
260 reqMac = macosVersionString.match(required)
262 provMac = macosVersionString.match(provided)
264 # is this a Mac package?
266 # this is backwards compatibility for packages built before
267 # setuptools 0.6. All packages built after this point will
268 # use the new macosx designation.
269 provDarwin = darwinVersionString.match(provided)
271 dversion = int(provDarwin.group(1))
272 macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2))
273 if dversion == 7 and macosversion >= "10.3" or \
274 dversion == 8 and macosversion >= "10.4":
277 #warnings.warn("Mac eggs should be rebuilt to "
278 # "use the macosx designation instead of darwin.",
279 # category=DeprecationWarning)
281 return False # egg isn't macosx or legacy darwin
283 # are they the same major version and machine type?
284 if provMac.group(1) != reqMac.group(1) or \
285 provMac.group(3) != reqMac.group(3):
290 # is the required OS major update >= the provided one?
291 if int(provMac.group(2)) > int(reqMac.group(2)):
296 # XXX Linux and other platforms' special cases should go here
300 def run_script(dist_spec, script_name):
301 """Locate distribution `dist_spec` and run its `script_name` script"""
302 ns = sys._getframe(1).f_globals
303 name = ns['__name__']
305 ns['__name__'] = name
306 require(dist_spec)[0].run_script(script_name, ns)
308 run_main = run_script # backward compatibility
310 def get_distribution(dist):
311 """Return a current distribution object for a Requirement or string"""
312 if isinstance(dist,basestring): dist = Requirement.parse(dist)
313 if isinstance(dist,Requirement): dist = get_provider(dist)
314 if not isinstance(dist,Distribution):
315 raise TypeError("Expected string, Requirement, or Distribution", dist)
318 def load_entry_point(dist, group, name):
319 """Return `name` entry point of `group` for `dist` or raise ImportError"""
320 return get_distribution(dist).load_entry_point(group, name)
322 def get_entry_map(dist, group=None):
323 """Return the entry point map for `group`, or the full entry map"""
324 return get_distribution(dist).get_entry_map(group)
326 def get_entry_info(dist, group, name):
327 """Return the EntryPoint object for `group`+`name`, or ``None``"""
328 return get_distribution(dist).get_entry_info(group, name)
331 class IMetadataProvider:
333 def has_metadata(name):
334 """Does the package's distribution contain the named metadata?"""
336 def get_metadata(name):
337 """The named metadata resource as a string"""
339 def get_metadata_lines(name):
340 """Yield named metadata resource as list of non-blank non-comment lines
342 Leading and trailing whitespace is stripped from each line, and lines
343 with ``#`` as the first non-blank character are omitted."""
345 def metadata_isdir(name):
346 """Is the named metadata a directory? (like ``os.path.isdir()``)"""
348 def metadata_listdir(name):
349 """List of metadata names in the directory (like ``os.listdir()``)"""
351 def run_script(script_name, namespace):
352 """Execute the named script in the supplied namespace dictionary"""
372 class IResourceProvider(IMetadataProvider):
373 """An object that provides access to package resources"""
375 def get_resource_filename(manager, resource_name):
376 """Return a true filesystem path for `resource_name`
378 `manager` must be an ``IResourceManager``"""
380 def get_resource_stream(manager, resource_name):
381 """Return a readable file-like object for `resource_name`
383 `manager` must be an ``IResourceManager``"""
385 def get_resource_string(manager, resource_name):
386 """Return a string containing the contents of `resource_name`
388 `manager` must be an ``IResourceManager``"""
390 def has_resource(resource_name):
391 """Does the package contain the named resource?"""
393 def resource_isdir(resource_name):
394 """Is the named resource a directory? (like ``os.path.isdir()``)"""
396 def resource_listdir(resource_name):
397 """List of resource names in the directory (like ``os.listdir()``)"""
413 class WorkingSet(object):
414 """A collection of active distributions on sys.path (or a similar list)"""
416 def __init__(self, entries=None):
417 """Create working set from list of path entries (default=sys.path)"""
426 for entry in entries:
427 self.add_entry(entry)
430 def add_entry(self, entry):
431 """Add a path item to ``.entries``, finding any distributions on it
433 ``find_distributions(entry, True)`` is used to find distributions
434 corresponding to the path entry, and they are added. `entry` is
435 always appended to ``.entries``, even if it is already present.
436 (This is because ``sys.path`` can contain the same value more than
437 once, and the ``.entries`` of the ``sys.path`` WorkingSet should always
440 self.entry_keys.setdefault(entry, [])
441 self.entries.append(entry)
442 for dist in find_distributions(entry, True):
443 self.add(dist, entry, False)
446 def __contains__(self,dist):
447 """True if `dist` is the active distribution for its project"""
448 return self.by_key.get(dist.key) == dist
455 """Find a distribution matching requirement `req`
457 If there is an active distribution for the requested project, this
458 returns it as long as it meets the version requirement specified by
459 `req`. But, if there is an active distribution for the project and it
460 does *not* meet the `req` requirement, ``VersionConflict`` is raised.
461 If there is no active distribution for the requested project, ``None``
464 dist = self.by_key.get(req.key)
465 if dist is not None and dist not in req:
466 raise VersionConflict(dist,req) # XXX add more info
470 def iter_entry_points(self, group, name=None):
471 """Yield entry point objects from `group` matching `name`
473 If `name` is None, yields all entry points in `group` from all
474 distributions in the working set, otherwise only ones matching
475 both `group` and `name` are yielded (in distribution order).
478 entries = dist.get_entry_map(group)
480 for ep in entries.values():
482 elif name in entries:
485 def run_script(self, requires, script_name):
486 """Locate distribution for `requires` and run `script_name` script"""
487 ns = sys._getframe(1).f_globals
488 name = ns['__name__']
490 ns['__name__'] = name
491 self.require(requires)[0].run_script(script_name, ns)
496 """Yield distributions for non-duplicate projects in the working set
498 The yield order is the order in which the items' path entries were
499 added to the working set.
502 for item in self.entries:
503 for key in self.entry_keys[item]:
506 yield self.by_key[key]
508 def add(self, dist, entry=None, insert=True):
509 """Add `dist` to working set, associated with `entry`
511 If `entry` is unspecified, it defaults to the ``.location`` of `dist`.
512 On exit from this routine, `entry` is added to the end of the working
513 set's ``.entries`` (if it wasn't already present).
515 `dist` is only added to the working set if it's for a project that
516 doesn't already have a distribution in the set. If it's added, any
517 callbacks registered with the ``subscribe()`` method will be called.
520 dist.insert_on(self.entries, entry)
523 entry = dist.location
524 keys = self.entry_keys.setdefault(entry,[])
525 keys2 = self.entry_keys.setdefault(dist.location,[])
526 if dist.key in self.by_key:
527 return # ignore hidden distros
529 # If we have a __requires__ then we can already tell if this
530 # dist is unsatisfactory, in which case we won't add it.
531 if __requires__ is not None:
532 if isinstance(__requires__, basestring):
533 array_of__requires__ = [__requires__]
535 array_of__requires__ = __requires__
537 for thisreqstr in array_of__requires__:
539 for thisreq in parse_requirements(thisreqstr):
540 if thisreq.key == dist.key:
541 if dist not in thisreq:
543 except ValueError, e:
544 e.args = tuple(e.args + ({'thisreqstr': thisreqstr, '__requires__': __requires__},))
547 self.by_key[dist.key] = dist
548 if dist.key not in keys:
549 keys.append(dist.key)
550 if dist.key not in keys2:
551 keys2.append(dist.key)
552 self._added_new(dist)
554 def resolve(self, requirements, env=None, installer=None):
555 """List all distributions needed to (recursively) meet `requirements`
557 `requirements` must be a sequence of ``Requirement`` objects. `env`,
558 if supplied, should be an ``Environment`` instance. If
559 not supplied, it defaults to all distributions available within any
560 entry or distribution in the working set. `installer`, if supplied,
561 will be invoked with each requirement that cannot be met by an
562 already-installed distribution; it should return a ``Distribution`` or
566 requirements = list(requirements)[::-1] # set up the stack
567 processed = {} # set of processed requirements
568 best = {} # key -> dist
572 req = requirements.pop(0) # process dependencies breadth-first
574 # Ignore cyclic or redundant dependencies
575 log.info("\nAlready processed %s", req)
577 log.info("\nNeed %s", req)
578 dist = best.get(req.key)
580 # Find the best distribution and add it to the map
581 dist = self.by_key.get(req.key)
584 env = Environment(self.entries)
585 dist = best[req.key] = env.best_match(req, self, installer)
587 raise DistributionNotFound(req) # XXX put more info here
588 log.info(" found %r", dist)
589 to_activate.append(dist)
591 # Oops, the "best" so far conflicts with a dependency
592 raise VersionConflict(dist,req) # XXX put more info here
593 to_add = dist.requires(req.extras)[::-1]
595 log.info(" no subdependencies to add")
596 elif len(to_add) == 1:
597 log.info(" adding subdependency %s", "; ".join(map(str, to_add)))
599 log.info(" adding subdependencies %s", "; ".join(map(str, to_add)))
600 requirements.extend(to_add)
601 processed[req] = True
603 return to_activate # return list of distros to activate
605 def find_plugins(self,
606 plugin_env, full_env=None, installer=None, fallback=True
608 """Find all activatable distributions in `plugin_env`
612 distributions, errors = working_set.find_plugins(
613 Environment(plugin_dirlist)
615 map(working_set.add, distributions) # add plugins+libs to sys.path
616 print "Couldn't load", errors # display errors
618 The `plugin_env` should be an ``Environment`` instance that contains
619 only distributions that are in the project's "plugin directory" or
620 directories. The `full_env`, if supplied, should be an ``Environment``
621 contains all currently-available distributions. If `full_env` is not
622 supplied, one is created automatically from the ``WorkingSet`` this
623 method is called on, which will typically mean that every directory on
624 ``sys.path`` will be scanned for distributions.
626 `installer` is a standard installer callback as used by the
627 ``resolve()`` method. The `fallback` flag indicates whether we should
628 attempt to resolve older versions of a plugin if the newest version
631 This method returns a 2-tuple: (`distributions`, `error_info`), where
632 `distributions` is a list of the distributions found in `plugin_env`
633 that were loadable, along with any other distributions that are needed
634 to resolve their dependencies. `error_info` is a dictionary mapping
635 unloadable plugin distributions to an exception instance describing the
636 error that occurred. Usually this will be a ``DistributionNotFound`` or
637 ``VersionConflict`` instance.
640 plugin_projects = list(plugin_env)
641 plugin_projects.sort() # scan project names in alphabetic order
647 env = Environment(self.entries)
650 env = full_env + plugin_env
652 shadow_set = self.__class__([])
653 map(shadow_set.add, self) # put all our entries in shadow_set
655 for project_name in plugin_projects:
657 for dist in plugin_env[project_name]:
659 req = [dist.as_requirement()]
662 resolvees = shadow_set.resolve(req, env, installer)
664 except ResolutionError,v:
665 error_info[dist] = v # save error info
667 continue # try the next older version of project
669 break # give up on this project, keep going
672 map(shadow_set.add, resolvees)
673 distributions.update(dict.fromkeys(resolvees))
675 # success, no need to try any more versions of this project
678 distributions = list(distributions)
681 return distributions, error_info
687 def require(self, *requirements):
688 """Ensure that distributions matching `requirements` are activated
690 `requirements` must be a string or a (possibly-nested) sequence
691 thereof, specifying the distributions and versions required. The
692 return value is a sequence of the distributions that needed to be
693 activated to fulfill the requirements; all relevant distributions are
694 included, even if they were already activated in this working set.
696 needed = self.resolve(parse_requirements(requirements))
703 def subscribe(self, callback):
704 """Invoke `callback` for all distributions (including existing ones)"""
705 if callback in self.callbacks:
707 self.callbacks.append(callback)
711 def _added_new(self, dist):
712 for callback in self.callbacks:
715 def __getstate__(self):
717 self.entries[:], self.entry_keys.copy(), self.by_key.copy(),
721 def __setstate__(self, (entries, keys, by_key, callbacks)):
722 self.entries = entries[:]
723 self.entry_keys = keys.copy()
724 self.by_key = by_key.copy()
725 self.callbacks = callbacks[:]
728 class Environment(object):
729 """Searchable snapshot of distributions on a search path"""
731 def __init__(self, search_path=None, platform=get_supported_platform(), python=PY_MAJOR):
732 """Snapshot distributions available on a search path
734 Any distributions found on `search_path` are added to the environment.
735 `search_path` should be a sequence of ``sys.path`` items. If not
736 supplied, ``sys.path`` is used.
738 `platform` is an optional string specifying the name of the platform
739 that platform-specific distributions must be compatible with. If
740 unspecified, it defaults to the current platform. `python` is an
741 optional string naming the desired version of Python (e.g. ``'2.4'``);
742 it defaults to the current version.
744 You may explicitly set `platform` (and/or `python`) to ``None`` if you
745 wish to map *all* distributions, not just those compatible with the
746 running platform or Python version.
750 self.platform = platform
752 self.scan(search_path)
754 def can_add(self, dist):
755 """Is distribution `dist` acceptable for this environment?
757 The distribution must match the platform and python version
758 requirements specified when this environment was created, or False
761 return (self.python is None or dist.py_version is None
762 or dist.py_version==self.python) \
763 and compatible_platforms(dist.platform,self.platform)
765 def remove(self, dist):
766 """Remove `dist` from the environment"""
767 self._distmap[dist.key].remove(dist)
769 def scan(self, search_path=None):
770 """Scan `search_path` for distributions usable in this environment
772 Any distributions found are added to the environment.
773 `search_path` should be a sequence of ``sys.path`` items. If not
774 supplied, ``sys.path`` is used. Only distributions conforming to
775 the platform/python version defined at initialization are added.
777 if search_path is None:
778 search_path = sys.path
780 for item in search_path:
781 for dist in find_distributions(item):
784 def __getitem__(self,project_name):
785 """Return a newest-to-oldest list of distributions for `project_name`
788 return self._cache[project_name]
790 project_name = project_name.lower()
791 if project_name not in self._distmap:
794 if project_name not in self._cache:
795 dists = self._cache[project_name] = self._distmap[project_name]
798 return self._cache[project_name]
801 """Add `dist` if we ``can_add()`` it and it isn't already added"""
802 if self.can_add(dist) and dist.has_version():
803 dists = self._distmap.setdefault(dist.key,[])
804 if dist not in dists:
806 if dist.key in self._cache:
807 _sort_dists(self._cache[dist.key])
810 def best_match(self, req, working_set, installer=None):
811 """Find distribution best matching `req` and usable on `working_set`
813 This calls the ``find(req)`` method of the `working_set` to see if a
814 suitable distribution is already active. (This may raise
815 ``VersionConflict`` if an unsuitable version of the project is already
816 active in the specified `working_set`.)
818 If a suitable distribution isn't active, this method returns the
819 newest platform-dependent distribution in the environment that meets
820 the ``Requirement`` in `req`. If no suitable platform-dependent
821 distribution is found, then the newest platform-independent
822 distribution that meets the requirement is returned. (A platform-
823 dependent distribution will typically have code compiled or
824 specialized for that platform.)
826 Otherwise, if `installer` is supplied, then the result of calling the
827 environment's ``obtain(req, installer)`` method will be returned.
829 dist = working_set.find(req)
833 # first try to find a platform-dependent dist
834 for dist in self[req.key]:
835 if dist in req and dist.platform is not None:
838 # then try any other dist
839 for dist in self[req.key]:
843 return self.obtain(req, installer) # try and download/install
845 def obtain(self, requirement, installer=None):
846 """Obtain a distribution matching `requirement` (e.g. via download)
848 Obtain a distro that matches requirement (e.g. via download). In the
849 base ``Environment`` class, this routine just returns
850 ``installer(requirement)``, unless `installer` is None, in which case
851 None is returned instead. This method is a hook that allows subclasses
852 to attempt other ways of obtaining a distribution before falling back
853 to the `installer` argument."""
854 if installer is not None:
855 return installer(requirement)
858 """Yield the unique project names of the available distributions"""
859 for key in self._distmap.keys():
860 if self[key]: yield key
865 def __iadd__(self, other):
866 """In-place addition of a distribution or environment"""
867 if isinstance(other,Distribution):
869 elif isinstance(other,Environment):
870 for project in other:
871 for dist in other[project]:
874 raise TypeError("Can't add %r to environment" % (other,))
877 def __add__(self, other):
878 """Add an environment or distribution to an environment"""
879 new = self.__class__([], platform=None, python=None)
880 for env in self, other:
885 AvailableDistributions = Environment # XXX backward compatibility
888 class ExtractionError(RuntimeError):
889 """An error occurred extracting a resource
891 The following attributes are available from instances of this exception:
894 The resource manager that raised this exception
897 The base directory for resource extraction
900 The exception instance that caused extraction to fail
906 class ResourceManager:
907 """Manage resource extraction and packages"""
908 extraction_path = None
911 self.cached_files = {}
913 def resource_exists(self, package_or_requirement, resource_name):
914 """Does the named resource exist?"""
915 return get_provider(package_or_requirement).has_resource(resource_name)
917 def resource_isdir(self, package_or_requirement, resource_name):
918 """Is the named resource an existing directory?"""
919 return get_provider(package_or_requirement).resource_isdir(
923 def resource_filename(self, package_or_requirement, resource_name):
924 """Return a true filesystem path for specified resource"""
925 return get_provider(package_or_requirement).get_resource_filename(
929 def resource_stream(self, package_or_requirement, resource_name):
930 """Return a readable file-like object for specified resource"""
931 return get_provider(package_or_requirement).get_resource_stream(
935 def resource_string(self, package_or_requirement, resource_name):
936 """Return specified resource as a string"""
937 return get_provider(package_or_requirement).get_resource_string(
941 def resource_listdir(self, package_or_requirement, resource_name):
942 """List the contents of the named resource directory"""
943 return get_provider(package_or_requirement).resource_listdir(
947 def extraction_error(self):
948 """Give an error message for problems extracting file(s)"""
950 old_exc = sys.exc_info()[1]
951 cache_path = self.extraction_path or get_default_cache()
953 err = ExtractionError("""Can't extract file(s) to egg cache
955 The following error occurred while trying to extract file(s) to the Python egg
960 The Python egg cache directory is currently set to:
964 Perhaps your account does not have write access to this directory? You can
965 change the cache directory by setting the PYTHON_EGG_CACHE environment
966 variable to point to an accessible directory.
967 """ % (old_exc, cache_path)
970 err.cache_path = cache_path
971 err.original_error = old_exc
988 def get_cache_path(self, archive_name, names=()):
989 """Return absolute location in cache for `archive_name` and `names`
991 The parent directory of the resulting path will be created if it does
992 not already exist. `archive_name` should be the base filename of the
993 enclosing egg (which may not be the name of the enclosing zipfile!),
994 including its ".egg" extension. `names`, if provided, should be a
995 sequence of path name parts "under" the egg's extraction location.
997 This method should only be called by resource providers that need to
998 obtain an extraction location, and only for names they intend to
999 extract, as it tracks the generated names for possible cleanup later.
1001 extract_path = self.extraction_path or get_default_cache()
1002 target_path = os.path.join(extract_path, archive_name+'-tmp', *names)
1004 _bypass_ensure_directory(target_path)
1006 self.extraction_error()
1008 self.cached_files[target_path] = 1
1029 def postprocess(self, tempname, filename):
1030 """Perform any platform-specific postprocessing of `tempname`
1032 This is where Mac header rewrites should be done; other platforms don't
1033 have anything special they should do.
1035 Resource providers should call this method ONLY after successfully
1036 extracting a compressed resource. They must NOT call it on resources
1037 that are already in the filesystem.
1039 `tempname` is the current (temporary) name of the file, and `filename`
1040 is the name it will be renamed to by the caller after this routine
1044 if os.name == 'posix':
1045 # Make the resource executable
1046 mode = ((os.stat(tempname).st_mode) | 0555) & 07777
1047 os.chmod(tempname, mode)
1070 def set_extraction_path(self, path):
1071 """Set the base path where resources will be extracted to, if needed.
1073 If you do not call this routine before any extractions take place, the
1074 path defaults to the return value of ``get_default_cache()``. (Which
1075 is based on the ``PYTHON_EGG_CACHE`` environment variable, with various
1076 platform-specific fallbacks. See that routine's documentation for more
1079 Resources are extracted to subdirectories of this path based upon
1080 information given by the ``IResourceProvider``. You may set this to a
1081 temporary directory, but then you must call ``cleanup_resources()`` to
1082 delete the extracted files when done. There is no guarantee that
1083 ``cleanup_resources()`` will be able to remove all extracted files.
1085 (Note: you may not change the extraction path for a given resource
1086 manager once resources have been extracted, unless you first call
1087 ``cleanup_resources()``.)
1089 if self.cached_files:
1091 "Can't change extraction path, files already extracted"
1094 self.extraction_path = path
1096 def cleanup_resources(self, force=False):
1098 Delete all extracted resource files and directories, returning a list
1099 of the file and directory names that could not be successfully removed.
1100 This function does not have any concurrency protection, so it should
1101 generally only be called when the extraction path is a temporary
1102 directory exclusive to a single process. This method is not
1103 automatically called; you must call it explicitly or register it as an
1104 ``atexit`` function if you wish to ensure cleanup of a temporary
1105 directory used for extractions.
1111 def get_default_cache():
1112 """Determine the default cache location
1114 This returns the ``PYTHON_EGG_CACHE`` environment variable, if set.
1115 Otherwise, on Windows, it returns a "Python-Eggs" subdirectory of the
1116 "Application Data" directory. On all other systems, it's "~/.python-eggs".
1119 return os.environ['PYTHON_EGG_CACHE']
1124 return os.path.expanduser('~/.python-eggs')
1126 app_data = 'Application Data' # XXX this may be locale-specific!
1128 (('APPDATA',), None), # best option, should be locale-safe
1129 (('USERPROFILE',), app_data),
1130 (('HOMEDRIVE','HOMEPATH'), app_data),
1131 (('HOMEPATH',), app_data),
1133 (('WINDIR',), app_data), # 95/98/ME
1136 for keys, subdir in app_homes:
1139 if key in os.environ:
1140 dirname = os.path.join(dirname, os.environ[key])
1145 dirname = os.path.join(dirname,subdir)
1146 return os.path.join(dirname, 'Python-Eggs')
1149 "Please set the PYTHON_EGG_CACHE enviroment variable"
1152 def safe_name(name):
1153 """Convert an arbitrary string to a standard distribution name
1155 Any runs of non-alphanumeric/. characters are replaced with a single '-'.
1157 return re.sub('[^A-Za-z0-9.]+', '-', name)
1160 def safe_version(version):
1161 """Convert an arbitrary string to a standard version string
1163 Spaces become dots, and all other non-alphanumeric characters become
1164 dashes, with runs of multiple dashes condensed to a single dash.
1166 version = version.replace(' ','.')
1167 return re.sub('[^A-Za-z0-9.]+', '-', version)
1170 def safe_extra(extra):
1171 """Convert an arbitrary string to a standard 'extra' name
1173 Any runs of non-alphanumeric characters are replaced with a single '_',
1174 and the result is always lowercased.
1176 return re.sub('[^A-Za-z0-9.]+', '_', extra).lower()
1179 def to_filename(name):
1180 """Convert a project or version name to its filename-escaped form
1182 Any '-' characters are currently replaced with '_'.
1184 return name.replace('-','_')
1194 """Try to implement resources and metadata for arbitrary PEP 302 loaders"""
1200 def __init__(self, module):
1201 self.loader = getattr(module, '__loader__', None)
1202 self.module_path = os.path.dirname(getattr(module, '__file__', ''))
1204 def get_resource_filename(self, manager, resource_name):
1205 return self._fn(self.module_path, resource_name)
1207 def get_resource_stream(self, manager, resource_name):
1208 return StringIO(self.get_resource_string(manager, resource_name))
1210 def get_resource_string(self, manager, resource_name):
1211 return self._get(self._fn(self.module_path, resource_name))
1213 def has_resource(self, resource_name):
1214 return self._has(self._fn(self.module_path, resource_name))
1216 def has_metadata(self, name):
1217 return self.egg_info and self._has(self._fn(self.egg_info,name))
1219 def get_metadata(self, name):
1220 if not self.egg_info:
1222 return self._get(self._fn(self.egg_info,name))
1224 def get_metadata_lines(self, name):
1225 return yield_lines(self.get_metadata(name))
1227 def resource_isdir(self,resource_name):
1228 return self._isdir(self._fn(self.module_path, resource_name))
1230 def metadata_isdir(self,name):
1231 return self.egg_info and self._isdir(self._fn(self.egg_info,name))
1234 def resource_listdir(self,resource_name):
1235 return self._listdir(self._fn(self.module_path,resource_name))
1237 def metadata_listdir(self,name):
1239 return self._listdir(self._fn(self.egg_info,name))
1242 def run_script(self,script_name,namespace):
1243 script = 'scripts/'+script_name
1244 if not self.has_metadata(script):
1245 raise ResolutionError("No script named %r" % script_name)
1246 script_text = self.get_metadata(script).replace('\r\n','\n')
1247 script_text = script_text.replace('\r','\n')
1248 script_filename = self._fn(self.egg_info,script)
1249 namespace['__file__'] = script_filename
1250 if os.path.exists(script_filename):
1251 execfile(script_filename, namespace, namespace)
1253 from linecache import cache
1254 cache[script_filename] = (
1255 len(script_text), 0, script_text.split('\n'), script_filename
1257 script_code = compile(script_text,script_filename,'exec')
1258 exec script_code in namespace, namespace
1260 def _has(self, path):
1261 raise NotImplementedError(
1262 "Can't perform this operation for unregistered loader type"
1265 def _isdir(self, path):
1266 raise NotImplementedError(
1267 "Can't perform this operation for unregistered loader type"
1270 def _listdir(self, path):
1271 raise NotImplementedError(
1272 "Can't perform this operation for unregistered loader type"
1275 def _fn(self, base, resource_name):
1277 return os.path.join(base, *resource_name.split('/'))
1280 def _get(self, path):
1281 if hasattr(self.loader, 'get_data'):
1282 return self.loader.get_data(path)
1283 raise NotImplementedError(
1284 "Can't perform this operation for loaders without 'get_data()'"
1287 register_loader_type(object, NullProvider)
1290 class EggProvider(NullProvider):
1291 """Provider based on a virtual filesystem"""
1293 def __init__(self,module):
1294 NullProvider.__init__(self,module)
1295 self._setup_prefix()
1297 def _setup_prefix(self):
1298 # we assume here that our metadata may be nested inside a "basket"
1299 # of multiple eggs; that's why we use module_path instead of .archive
1300 path = self.module_path
1303 if path.lower().endswith('.egg'):
1304 self.egg_name = os.path.basename(path)
1305 self.egg_info = os.path.join(path, 'EGG-INFO')
1306 self.egg_root = path
1309 path, base = os.path.split(path)
1316 class DefaultProvider(EggProvider):
1317 """Provides access to package resources in the filesystem"""
1319 def _has(self, path):
1320 return os.path.exists(path)
1322 def _isdir(self,path):
1323 return os.path.isdir(path)
1325 def _listdir(self,path):
1326 return os.listdir(path)
1328 def get_resource_stream(self, manager, resource_name):
1329 return open(self._fn(self.module_path, resource_name), 'rb')
1331 def _get(self, path):
1332 stream = open(path, 'rb')
1334 return stream.read()
1338 register_loader_type(type(None), DefaultProvider)
1341 class EmptyProvider(NullProvider):
1342 """Provider that returns nothing for all requests"""
1344 _isdir = _has = lambda self,path: False
1345 _get = lambda self,path: ''
1346 _listdir = lambda self,path: []
1352 empty_provider = EmptyProvider()
1357 class ZipProvider(EggProvider):
1358 """Resource support for zips and eggs"""
1362 def __init__(self, module):
1363 EggProvider.__init__(self,module)
1364 self.zipinfo = zipimport._zip_directory_cache[self.loader.archive]
1365 self.zip_pre = self.loader.archive+os.sep
1367 def _zipinfo_name(self, fspath):
1368 # Convert a virtual filename (full path to file) into a zipfile subpath
1369 # usable with the zipimport directory cache for our target archive
1370 if fspath.startswith(self.zip_pre):
1371 return fspath[len(self.zip_pre):]
1372 raise AssertionError(
1373 "%s is not a subpath of %s" % (fspath,self.zip_pre)
1376 def _parts(self,zip_path):
1377 # Convert a zipfile subpath into an egg-relative path part list
1378 fspath = self.zip_pre+zip_path # pseudo-fs path
1379 if fspath.startswith(self.egg_root+os.sep):
1380 return fspath[len(self.egg_root)+1:].split(os.sep)
1381 raise AssertionError(
1382 "%s is not a subpath of %s" % (fspath,self.egg_root)
1385 def get_resource_filename(self, manager, resource_name):
1386 if not self.egg_name:
1387 raise NotImplementedError(
1388 "resource_filename() only supported for .egg, not .zip"
1390 # no need to lock for extraction, since we use temp names
1391 zip_path = self._resource_to_zip(resource_name)
1392 eagers = self._get_eager_resources()
1393 if '/'.join(self._parts(zip_path)) in eagers:
1395 self._extract_resource(manager, self._eager_to_zip(name))
1396 return self._extract_resource(manager, zip_path)
1398 def _extract_resource(self, manager, zip_path):
1400 if zip_path in self._index():
1401 for name in self._index()[zip_path]:
1402 last = self._extract_resource(
1403 manager, os.path.join(zip_path, name)
1405 return os.path.dirname(last) # return the extracted directory name
1407 zip_stat = self.zipinfo[zip_path]
1408 t,d,size = zip_stat[5], zip_stat[6], zip_stat[3]
1410 (d>>9)+1980, (d>>5)&0xF, d&0x1F, # ymd
1411 (t&0xFFFF)>>11, (t>>5)&0x3F, (t&0x1F) * 2, 0, 0, -1 # hms, etc.
1413 timestamp = time.mktime(date_time)
1416 real_path = manager.get_cache_path(
1417 self.egg_name, self._parts(zip_path)
1420 if os.path.isfile(real_path):
1421 stat = os.stat(real_path)
1422 if stat.st_size==size and stat.st_mtime==timestamp:
1423 # size and stamp match, don't bother extracting
1426 outf, tmpnam = _mkstemp(".$extract", dir=os.path.dirname(real_path))
1427 os.write(outf, self.loader.get_data(zip_path))
1429 utime(tmpnam, (timestamp,timestamp))
1430 manager.postprocess(tmpnam, real_path)
1433 rename(tmpnam, real_path)
1436 if os.path.isfile(real_path):
1437 stat = os.stat(real_path)
1439 if stat.st_size==size and stat.st_mtime==timestamp:
1440 # size and stamp match, somebody did it just ahead of
1443 elif os.name=='nt': # Windows, del old file and retry
1445 rename(tmpnam, real_path)
1450 manager.extraction_error() # report a user-friendly error
1454 def _get_eager_resources(self):
1455 if self.eagers is None:
1457 for name in ('native_libs.txt', 'eager_resources.txt'):
1458 if self.has_metadata(name):
1459 eagers.extend(self.get_metadata_lines(name))
1460 self.eagers = eagers
1465 return self._dirindex
1466 except AttributeError:
1468 for path in self.zipinfo:
1469 parts = path.split(os.sep)
1471 parent = os.sep.join(parts[:-1])
1473 ind[parent].append(parts[-1])
1476 ind[parent] = [parts.pop()]
1477 self._dirindex = ind
1480 def _has(self, fspath):
1481 zip_path = self._zipinfo_name(fspath)
1482 return zip_path in self.zipinfo or zip_path in self._index()
1484 def _isdir(self,fspath):
1485 return self._zipinfo_name(fspath) in self._index()
1487 def _listdir(self,fspath):
1488 return list(self._index().get(self._zipinfo_name(fspath), ()))
1490 def _eager_to_zip(self,resource_name):
1491 return self._zipinfo_name(self._fn(self.egg_root,resource_name))
1493 def _resource_to_zip(self,resource_name):
1494 return self._zipinfo_name(self._fn(self.module_path,resource_name))
1496 register_loader_type(zipimport.zipimporter, ZipProvider)
1521 class FileMetadata(EmptyProvider):
1522 """Metadata handler for standalone PKG-INFO files
1526 metadata = FileMetadata("/path/to/PKG-INFO")
1528 This provider rejects all data and metadata requests except for PKG-INFO,
1529 which is treated as existing, and will be the contents of the file at
1530 the provided location.
1533 def __init__(self,path):
1536 def has_metadata(self,name):
1537 return name=='PKG-INFO'
1539 def get_metadata(self,name):
1540 if name=='PKG-INFO':
1541 return open(self.path,'rU').read()
1542 raise KeyError("No metadata except PKG-INFO is available")
1544 def get_metadata_lines(self,name):
1545 return yield_lines(self.get_metadata(name))
1562 class PathMetadata(DefaultProvider):
1563 """Metadata provider for egg directories
1569 egg_info = "/path/to/PackageName.egg-info"
1570 base_dir = os.path.dirname(egg_info)
1571 metadata = PathMetadata(base_dir, egg_info)
1572 dist_name = os.path.splitext(os.path.basename(egg_info))[0]
1573 dist = Distribution(basedir,project_name=dist_name,metadata=metadata)
1575 # Unpacked egg directories:
1577 egg_path = "/path/to/PackageName-ver-pyver-etc.egg"
1578 metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO'))
1579 dist = Distribution.from_filename(egg_path, metadata=metadata)
1582 def __init__(self, path, egg_info):
1583 self.module_path = path
1584 self.egg_info = egg_info
1587 class EggMetadata(ZipProvider):
1588 """Metadata provider for .egg files"""
1590 def __init__(self, importer):
1591 """Create a metadata provider from a zipimporter"""
1593 self.zipinfo = zipimport._zip_directory_cache[importer.archive]
1594 self.zip_pre = importer.archive+os.sep
1595 self.loader = importer
1597 self.module_path = os.path.join(importer.archive, importer.prefix)
1599 self.module_path = importer.archive
1600 self._setup_prefix()
1604 """PEP 302 Importer that wraps Python's "normal" import algorithm"""
1606 def __init__(self, path=None):
1609 def find_module(self, fullname, path=None):
1610 subname = fullname.split(".")[-1]
1611 if subname != fullname and self.path is None:
1613 if self.path is None:
1618 file, filename, etc = imp.find_module(subname, path)
1621 return ImpLoader(file, filename, etc)
1625 """PEP 302 Loader that wraps Python's "normal" import algorithm"""
1627 def __init__(self, file, filename, etc):
1629 self.filename = filename
1632 def load_module(self, fullname):
1634 mod = imp.load_module(fullname, self.file, self.filename, self.etc)
1636 if self.file: self.file.close()
1637 # Note: we don't set __loader__ because we want the module to look
1638 # normal; i.e. this is just a wrapper for standard import machinery
1644 def get_importer(path_item):
1645 """Retrieve a PEP 302 "importer" for the given path item
1647 If there is no importer, this returns a wrapper around the builtin import
1648 machinery. The returned importer is only cached if it was created by a
1652 importer = sys.path_importer_cache[path_item]
1654 for hook in sys.path_hooks:
1656 importer = hook(path_item)
1664 sys.path_importer_cache.setdefault(path_item,importer)
1665 if importer is None:
1667 importer = ImpWrapper(path_item)
1685 _declare_state('dict', _distribution_finders = {})
1687 def register_finder(importer_type, distribution_finder):
1688 """Register `distribution_finder` to find distributions in sys.path items
1690 `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
1691 handler), and `distribution_finder` is a callable that, passed a path
1692 item and the importer instance, yields ``Distribution`` instances found on
1693 that path item. See ``pkg_resources.find_on_path`` for an example."""
1694 _distribution_finders[importer_type] = distribution_finder
1697 def find_distributions(path_item, only=False):
1698 """Yield distributions accessible via `path_item`"""
1699 importer = get_importer(path_item)
1700 finder = _find_adapter(_distribution_finders, importer)
1701 return finder(importer, path_item, only)
1703 def find_in_zip(importer, path_item, only=False):
1704 metadata = EggMetadata(importer)
1705 if metadata.has_metadata('PKG-INFO'):
1706 yield Distribution.from_filename(path_item, metadata=metadata)
1708 return # don't yield nested distros
1709 for subitem in metadata.resource_listdir('/'):
1710 if subitem.endswith('.egg'):
1711 subpath = os.path.join(path_item, subitem)
1712 for dist in find_in_zip(zipimport.zipimporter(subpath), subpath):
1715 register_finder(zipimport.zipimporter, find_in_zip)
1717 def StringIO(*args, **kw):
1718 """Thunk to load the real StringIO on demand"""
1721 from cStringIO import StringIO
1723 from StringIO import StringIO
1724 return StringIO(*args,**kw)
1726 def find_nothing(importer, path_item, only=False):
1728 register_finder(object,find_nothing)
1730 def find_on_path(importer, path_item, only=False):
1731 """Yield distributions accessible on a sys.path directory"""
1732 path_item = _normalize_cached(path_item)
1734 if os.path.isdir(path_item) and os.access(path_item, os.R_OK):
1735 if path_item.lower().endswith('.egg'):
1737 yield Distribution.from_filename(
1738 path_item, metadata=PathMetadata(
1739 path_item, os.path.join(path_item,'EGG-INFO')
1743 # scan for .egg and .egg-info in directory
1744 for entry in os.listdir(path_item):
1745 lower = entry.lower()
1746 if lower.endswith('.egg-info'):
1747 fullpath = os.path.join(path_item, entry)
1748 if os.path.isdir(fullpath):
1749 # egg-info directory, allow getting metadata
1750 metadata = PathMetadata(path_item, fullpath)
1752 metadata = FileMetadata(fullpath)
1753 yield Distribution.from_location(
1754 path_item,entry,metadata,precedence=DEVELOP_DIST
1756 elif not only and lower.endswith('.egg'):
1757 for dist in find_distributions(os.path.join(path_item, entry)):
1759 elif not only and lower.endswith('.egg-link'):
1760 for line in file(os.path.join(path_item, entry)):
1761 if not line.strip(): continue
1762 for item in find_distributions(os.path.join(path_item,line.rstrip())):
1765 register_finder(ImpWrapper, find_on_path)
1767 _declare_state('dict', _namespace_handlers = {})
1768 _declare_state('dict', _namespace_packages = {})
1770 def register_namespace_handler(importer_type, namespace_handler):
1771 """Register `namespace_handler` to declare namespace packages
1773 `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
1774 handler), and `namespace_handler` is a callable like this::
1776 def namespace_handler(importer,path_entry,moduleName,module):
1777 # return a path_entry to use for child packages
1779 Namespace handlers are only called if the importer object has already
1780 agreed that it can handle the relevant path item, and they should only
1781 return a subpath if the module __path__ does not already contain an
1782 equivalent subpath. For an example namespace handler, see
1783 ``pkg_resources.file_ns_handler``.
1785 _namespace_handlers[importer_type] = namespace_handler
1787 def _handle_ns(packageName, path_item):
1788 """Ensure that named package includes a subpath of path_item (if needed)"""
1789 importer = get_importer(path_item)
1790 if importer is None:
1792 loader = importer.find_module(packageName)
1795 module = sys.modules.get(packageName)
1797 module = sys.modules[packageName] = imp.new_module(packageName)
1798 module.__path__ = []; _set_parent_ns(packageName)
1799 elif not hasattr(module,'__path__'):
1800 raise TypeError("Not a package:", packageName)
1801 handler = _find_adapter(_namespace_handlers, importer)
1802 subpath = handler(importer,path_item,packageName,module)
1803 if subpath is not None:
1804 path = module.__path__; path.append(subpath)
1805 loader.load_module(packageName); module.__path__ = path
1808 def declare_namespace(packageName):
1809 """Declare that package 'packageName' is a namespace package"""
1813 if packageName in _namespace_packages:
1816 path, parent = sys.path, None
1817 if '.' in packageName:
1818 parent = '.'.join(packageName.split('.')[:-1])
1819 declare_namespace(parent)
1822 path = sys.modules[parent].__path__
1823 except AttributeError:
1824 raise TypeError("Not a package:", parent)
1826 # Track what packages are namespaces, so when new path items are added,
1827 # they can be updated
1828 _namespace_packages.setdefault(parent,[]).append(packageName)
1829 _namespace_packages.setdefault(packageName,[])
1831 for path_item in path:
1832 # Ensure all the parent's path items are reflected in the child,
1834 _handle_ns(packageName, path_item)
1839 def fixup_namespace_packages(path_item, parent=None):
1840 """Ensure that previously-declared namespace packages include path_item"""
1843 for package in _namespace_packages.get(parent,()):
1844 subpath = _handle_ns(package, path_item)
1845 if subpath: fixup_namespace_packages(subpath,package)
1849 def file_ns_handler(importer, path_item, packageName, module):
1850 """Compute an ns-package subpath for a filesystem or zipfile importer"""
1852 subpath = os.path.join(path_item, packageName.split('.')[-1])
1853 normalized = _normalize_cached(subpath)
1854 for item in module.__path__:
1855 if _normalize_cached(item)==normalized:
1858 # Only return the path if it's not already there
1861 register_namespace_handler(ImpWrapper,file_ns_handler)
1862 register_namespace_handler(zipimport.zipimporter,file_ns_handler)
1865 def null_ns_handler(importer, path_item, packageName, module):
1868 register_namespace_handler(object,null_ns_handler)
1871 def normalize_path(filename):
1872 """Normalize a file/dir name for comparison purposes"""
1873 return os.path.normcase(os.path.realpath(filename))
1875 def _normalize_cached(filename,_cache={}):
1877 return _cache[filename]
1879 _cache[filename] = result = normalize_path(filename)
1882 def _set_parent_ns(packageName):
1883 parts = packageName.split('.')
1886 parent = '.'.join(parts)
1887 setattr(sys.modules[parent], name, sys.modules[packageName])
1890 def yield_lines(strs):
1891 """Yield non-empty/non-comment lines of a ``basestring`` or sequence"""
1892 if isinstance(strs,basestring):
1893 for s in strs.splitlines():
1895 if s and not s.startswith('#'): # skip blank lines/comments
1899 for s in yield_lines(ss):
1902 LINE_END = re.compile(r"\s*(#.*)?$").match # whitespace and comment
1903 CONTINUE = re.compile(r"\s*\\\s*(#.*)?$").match # line continuation
1904 DISTRO = re.compile(r"\s*((\w|[-.])+)").match # Distribution or extra
1905 VERSION = re.compile(r"\s*(<=?|>=?|==|!=)\s*((\w|[-.])+)").match # ver. info
1906 COMMA = re.compile(r"\s*,").match # comma between items
1907 OBRACKET = re.compile(r"\s*\[").match
1908 CBRACKET = re.compile(r"\s*\]").match
1909 MODULE = re.compile(r"\w+(\.\w+)*$").match
1910 EGG_NAME = re.compile(
1912 r"( -(?P<ver>[^-]+) (-py(?P<pyver>[^-]+) (-(?P<plat>.+))? )? )?",
1913 re.VERBOSE | re.IGNORECASE
1916 component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE)
1917 replace = {'pre':'c', 'preview':'c','-':'final-','rc':'c','dev':'@'}.get
1919 def _parse_version_parts(s):
1920 for part in component_re.split(s):
1921 part = replace(part,part)
1922 if not part or part=='.':
1924 if part[:1] in '0123456789':
1925 yield part.zfill(8) # pad for numeric comparison
1929 yield '*final' # ensure that alpha/beta/candidate are before final
1931 def parse_version(s):
1932 """Convert a version string to a chronologically-sortable key
1934 This is a rough cross between distutils' StrictVersion and LooseVersion;
1935 if you give it versions that would work with StrictVersion, then it behaves
1936 the same; otherwise it acts like a slightly-smarter LooseVersion. It is
1937 *possible* to create pathological version coding schemes that will fool
1938 this parser, but they should be very rare in practice.
1940 The returned value will be a tuple of strings. Numeric portions of the
1941 version are padded to 8 digits so they will compare numerically, but
1942 without relying on how numbers compare relative to strings. Dots are
1943 dropped, but dashes are retained. Trailing zeros between alpha segments
1944 or dashes are suppressed, so that e.g. "2.4.0" is considered the same as
1945 "2.4". Alphanumeric parts are lower-cased.
1947 The algorithm assumes that strings like "-" and any alpha string that
1948 alphabetically follows "final" represents a "patch level". So, "2.4-1"
1949 is assumed to be a branch or patch of "2.4", and therefore "2.4.1" is
1950 considered newer than "2.4-1", which in turn is newer than "2.4".
1952 Strings like "a", "b", "c", "alpha", "beta", "candidate" and so on (that
1953 come before "final" alphabetically) are assumed to be pre-release versions,
1954 so that the version "2.4" is considered newer than "2.4a1".
1956 Finally, to handle miscellaneous cases, the strings "pre", "preview", and
1957 "rc" are treated as if they were "c", i.e. as though they were release
1958 candidates, and therefore are not as new as a version string that does not
1959 contain them, and "dev" is replaced with an '@' so that it sorts lower than
1960 than any other pre-release tag.
1963 for part in _parse_version_parts(s.lower()):
1964 if part.startswith('*'):
1965 if part<'*final': # remove '-' before a prerelease tag
1966 while parts and parts[-1]=='*final-': parts.pop()
1967 # remove trailing zeros from each series of numeric parts
1968 while parts and parts[-1]=='00000000':
1973 class EntryPoint(object):
1974 """Object representing an advertised importable object"""
1976 def __init__(self, name, module_name, attrs=(), extras=(), dist=None):
1977 if not MODULE(module_name):
1978 raise ValueError("Invalid module name", module_name)
1980 self.module_name = module_name
1981 self.attrs = tuple(attrs)
1982 self.extras = Requirement.parse(("x[%s]" % ','.join(extras))).extras
1986 s = "%s = %s" % (self.name, self.module_name)
1988 s += ':' + '.'.join(self.attrs)
1990 s += ' [%s]' % ','.join(self.extras)
1994 return "EntryPoint.parse(%r)" % str(self)
1996 def load(self, require=True, env=None, installer=None):
1997 if require: self.require(env, installer)
1998 entry = __import__(self.module_name, globals(),globals(), ['__name__'])
1999 for attr in self.attrs:
2001 entry = getattr(entry,attr)
2002 except AttributeError:
2003 raise ImportError("%r has no %r attribute" % (entry,attr))
2006 def require(self, env=None, installer=None):
2007 if self.extras and not self.dist:
2008 raise UnknownExtra("Can't require() without a distribution", self)
2009 map(working_set.add,
2010 working_set.resolve(self.dist.requires(self.extras),env,installer))
2015 def parse(cls, src, dist=None):
2016 """Parse a single entry point from string `src`
2018 Entry point syntax follows the form::
2020 name = some.module:some.attr [extra1,extra2]
2022 The entry name and module name are required, but the ``:attrs`` and
2023 ``[extras]`` parts are optional
2027 name,value = src.split('=',1)
2029 value,extras = value.split('[',1)
2030 req = Requirement.parse("x["+extras)
2031 if req.specs: raise ValueError
2034 value,attrs = value.split(':',1)
2035 if not MODULE(attrs.rstrip()):
2037 attrs = attrs.rstrip().split('.')
2040 "EntryPoint must be in 'name=module:attrs [extras]' format",
2044 return cls(name.strip(), value.strip(), attrs, extras, dist)
2046 parse = classmethod(parse)
2056 def parse_group(cls, group, lines, dist=None):
2057 """Parse an entry point group"""
2058 if not MODULE(group):
2059 raise ValueError("Invalid group name", group)
2061 for line in yield_lines(lines):
2062 ep = cls.parse(line, dist)
2064 raise ValueError("Duplicate entry point", group, ep.name)
2068 parse_group = classmethod(parse_group)
2071 def parse_map(cls, data, dist=None):
2072 """Parse a map of entry point groups"""
2073 if isinstance(data,dict):
2076 data = split_sections(data)
2078 for group, lines in data:
2082 raise ValueError("Entry points must be listed in groups")
2083 group = group.strip()
2085 raise ValueError("Duplicate group name", group)
2086 maps[group] = cls.parse_group(group, lines, dist)
2089 parse_map = classmethod(parse_map)
2096 class Distribution(object):
2097 """Wrap an actual or potential sys.path entry w/metadata"""
2099 location=None, metadata=None, project_name=None, version=None,
2100 py_version=PY_MAJOR, platform=None, precedence = EGG_DIST
2102 self.project_name = safe_name(project_name or 'Unknown')
2103 if version is not None:
2104 self._version = safe_version(version)
2105 self.py_version = py_version
2106 self.platform = platform
2107 self.location = location
2108 self.precedence = precedence
2109 self._provider = metadata or empty_provider
2112 def from_location(cls,location,basename,metadata=None,**kw):
2113 project_name, version, py_version, platform = [None]*4
2114 basename, ext = os.path.splitext(basename)
2115 if ext.lower() in (".egg",".egg-info"):
2116 match = EGG_NAME(basename)
2118 project_name, version, py_version, platform = match.group(
2119 'name','ver','pyver','plat'
2122 location, metadata, project_name=project_name, version=version,
2123 py_version=py_version, platform=platform, **kw
2125 from_location = classmethod(from_location)
2129 getattr(self,'parsed_version',()), self.precedence, self.key,
2130 -len(self.location or ''), self.location, self.py_version,
2134 def __cmp__(self, other): return cmp(self.hashcmp, other)
2135 def __hash__(self): return hash(self.hashcmp)
2137 # These properties have to be lazy so that we don't have to load any
2138 # metadata until/unless it's actually needed. (i.e., some distributions
2139 # may not know their name or version without loading PKG-INFO)
2145 except AttributeError:
2146 self._key = key = self.project_name.lower()
2151 def parsed_version(self):
2153 return self._parsed_version
2154 except AttributeError:
2155 self._parsed_version = pv = parse_version(self.version)
2158 parsed_version = property(parsed_version)
2163 return self._version
2164 except AttributeError:
2165 for line in self._get_metadata('PKG-INFO'):
2166 if line.lower().startswith('version:'):
2167 self._version = safe_version(line.split(':',1)[1].strip())
2168 return self._version
2171 "Missing 'Version:' header and/or PKG-INFO file", self
2173 version = property(version)
2181 return self.__dep_map
2182 except AttributeError:
2183 dm = self.__dep_map = {None: []}
2184 for name in 'requires.txt', 'depends.txt':
2185 for extra,reqs in split_sections(self._get_metadata(name)):
2186 if extra: extra = safe_extra(extra)
2187 dm.setdefault(extra,[]).extend(parse_requirements(reqs))
2189 _dep_map = property(_dep_map)
2191 def requires(self,extras=()):
2192 """List of Requirements needed for this distro if `extras` are used"""
2195 deps.extend(dm.get(None,()))
2198 deps.extend(dm[safe_extra(ext)])
2201 "%s has no such extra feature %r" % (self, ext)
2205 def _get_metadata(self,name):
2206 if self.has_metadata(name):
2207 for line in self.get_metadata_lines(name):
2210 def activate(self,path=None):
2211 """Ensure distribution is importable on `path` (default=sys.path)"""
2212 if path is None: path = sys.path
2213 self.insert_on(path)
2214 if path is sys.path:
2215 fixup_namespace_packages(self.location)
2216 for pkg in self._get_metadata('namespace_packages.txt'):
2217 if pkg in sys.modules: declare_namespace(pkg)
2220 """Return what this distribution's standard .egg filename should be"""
2221 filename = "%s-%s-py%s" % (
2222 to_filename(self.project_name), to_filename(self.version),
2223 self.py_version or PY_MAJOR
2227 filename += '-'+self.platform
2232 return "%s (%s)" % (self,self.location)
2237 try: version = getattr(self,'version',None)
2238 except ValueError: version = None
2239 version = version or "[unknown version]"
2240 return "%s %s" % (self.project_name,version)
2242 def __getattr__(self,attr):
2243 """Delegate all unrecognized public attributes to .metadata provider"""
2244 if attr.startswith('_'):
2245 raise AttributeError,attr
2246 return getattr(self._provider, attr)
2249 def from_filename(cls,filename,metadata=None, **kw):
2250 return cls.from_location(
2251 _normalize_cached(filename), os.path.basename(filename), metadata,
2254 from_filename = classmethod(from_filename)
2256 def as_requirement(self):
2257 """Return a ``Requirement`` that matches this distribution exactly"""
2258 return Requirement.parse('%s==%s' % (self.project_name, self.version))
2260 def load_entry_point(self, group, name):
2261 """Return the `name` entry point of `group` or raise ImportError"""
2262 ep = self.get_entry_info(group,name)
2264 raise ImportError("Entry point %r not found" % ((group,name),))
2267 def get_entry_map(self, group=None):
2268 """Return the entry point map for `group`, or the full entry map"""
2270 ep_map = self._ep_map
2271 except AttributeError:
2272 ep_map = self._ep_map = EntryPoint.parse_map(
2273 self._get_metadata('entry_points.txt'), self
2275 if group is not None:
2276 return ep_map.get(group,{})
2279 def get_entry_info(self, group, name):
2280 """Return the EntryPoint object for `group`+`name`, or ``None``"""
2281 return self.get_entry_map(group).get(name)
2301 def insert_on(self, path, loc = None):
2302 """Insert self.location in path before its nearest parent directory"""
2304 loc = loc or self.location
2308 nloc = _normalize_cached(loc)
2309 bdir = os.path.dirname(nloc)
2310 npath= [(p and _normalize_cached(p) or p) for p in path]
2313 for p, item in enumerate(npath):
2316 elif item==bdir and self.precedence==EGG_DIST:
2317 # if it's an .egg, give it precedence over its directory
2318 if path is sys.path:
2319 self.check_version_conflict()
2321 npath.insert(p, nloc)
2324 if path is sys.path:
2325 self.check_version_conflict()
2329 # p is the spot where we found or inserted loc; now remove duplicates
2332 np = npath.index(nloc, p+1)
2336 del npath[np], path[np]
2342 def check_version_conflict(self):
2343 if self.key=='setuptools':
2344 return # ignore the inevitable setuptools self-conflicts :(
2346 nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt'))
2347 loc = normalize_path(self.location)
2348 for modname in self._get_metadata('top_level.txt'):
2349 if (modname not in sys.modules or modname in nsp
2350 or modname in _namespace_packages
2354 fn = getattr(sys.modules[modname], '__file__', None)
2355 if fn and (normalize_path(fn).startswith(loc) or fn.startswith(loc)):
2358 "Module %s was already imported from %s, but %s is being added"
2359 " to sys.path" % (modname, fn, self.location),
2362 def has_version(self):
2366 issue_warning("Unbuilt egg for "+repr(self))
2370 def clone(self,**kw):
2371 """Copy this distribution, substituting in any changed keyword args"""
2373 'project_name', 'version', 'py_version', 'platform', 'location',
2376 kw.setdefault(attr, getattr(self,attr,None))
2377 kw.setdefault('metadata', self._provider)
2378 return self.__class__(**kw)
2385 return [dep for dep in self._dep_map if dep]
2386 extras = property(extras)
2389 def issue_warning(*args,**kw):
2393 # find the first stack frame that is *not* code in
2394 # the pkg_resources module, to use for the warning
2395 while sys._getframe(level).f_globals is g:
2399 from warnings import warn
2400 warn(stacklevel = level+1, *args, **kw)
2424 def parse_requirements(strs):
2425 """Yield ``Requirement`` objects for each specification in `strs`
2427 `strs` must be an instance of ``basestring``, or a (possibly-nested)
2430 # create a steppable iterator, so we can handle \-continuations
2431 lines = iter(yield_lines(strs))
2433 def scan_list(ITEM,TERMINATOR,line,p,groups,item_name):
2437 while not TERMINATOR(line,p):
2438 if CONTINUE(line,p):
2440 line = lines.next(); p = 0
2441 except StopIteration:
2443 "\\ must not appear on the last nonblank line"
2446 match = ITEM(line,p)
2448 raise ValueError("Expected "+item_name+" in",line,"at",line[p:])
2450 items.append(match.group(*groups))
2453 match = COMMA(line,p)
2455 p = match.end() # skip the comma
2456 elif not TERMINATOR(line,p):
2458 "Expected ',' or end-of-list in",line,"at",line[p:]
2461 match = TERMINATOR(line,p)
2462 if match: p = match.end() # skip the terminator, if any
2463 return line, p, items
2466 match = DISTRO(line)
2468 raise ValueError("Missing distribution spec", line, strs)
2469 project_name = match.group(1)
2473 match = OBRACKET(line,p)
2476 line, p, extras = scan_list(
2477 DISTRO, CBRACKET, line, p, (1,), "'extra' name"
2480 line, p, specs = scan_list(VERSION,LINE_END,line,p,(1,2),"version spec")
2481 specs = [(op,safe_version(val)) for op,val in specs]
2482 yield Requirement(project_name, specs, extras)
2485 def _sort_dists(dists):
2486 tmp = [(dist.hashcmp,dist) for dist in dists]
2488 dists[::-1] = [d for hc,d in tmp]
2507 def __init__(self, project_name, specs, extras):
2508 """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!"""
2509 self.unsafe_name, project_name = project_name, safe_name(project_name)
2510 self.project_name, self.key = project_name, project_name.lower()
2511 index = [(parse_version(v),state_machine[op],op,v) for op,v in specs]
2513 self.specs = [(op,ver) for parsed,trans,op,ver in index]
2514 self.index, self.extras = index, tuple(map(safe_extra,extras))
2516 self.key, tuple([(op,parsed) for parsed,trans,op,ver in index]),
2517 frozenset(self.extras)
2519 self.__hash = hash(self.hashCmp)
2522 specs = ','.join([''.join(s) for s in self.specs])
2523 extras = ','.join(self.extras)
2524 if extras: extras = '[%s]' % extras
2525 return '%s%s%s' % (self.project_name, extras, specs)
2527 def __eq__(self,other):
2528 return isinstance(other,Requirement) and self.hashCmp==other.hashCmp
2530 def __contains__(self,item):
2531 if isinstance(item,Distribution):
2532 if item.key != self.key: return False
2533 if self.index: item = item.parsed_version # only get if we need it
2534 elif isinstance(item,basestring):
2535 item = parse_version(item)
2537 for parsed,trans,op,ver in self.index:
2538 action = trans[cmp(item,parsed)]
2539 if action=='F': return False
2540 elif action=='T': return True
2541 elif action=='+': last = True
2542 elif action=='-' or last is None: last = False
2543 if last is None: last = True # no rules encountered
2550 def __repr__(self): return "Requirement.parse(%r)" % str(self)
2554 reqs = list(parse_requirements(s))
2558 raise ValueError("Expected only one requirement", s)
2559 raise ValueError("No requirements found", s)
2561 parse = staticmethod(parse)
2575 """Get an mro for a type or classic class"""
2576 if not isinstance(cls,type):
2577 class cls(cls,object): pass
2578 return cls.__mro__[1:]
2581 def _find_adapter(registry, ob):
2582 """Return an adapter factory for `ob` from `registry`"""
2583 for t in _get_mro(getattr(ob, '__class__', type(ob))):
2588 def ensure_directory(path):
2589 """Ensure that the parent directory of `path` exists"""
2590 dirname = os.path.dirname(path)
2591 if not os.path.isdir(dirname):
2592 os.makedirs(dirname)
2594 def split_sections(s):
2595 """Split a string or iterable thereof into (section,content) pairs
2597 Each ``section`` is a stripped version of the section header ("[section]")
2598 and each ``content`` is a list of stripped lines excluding blank lines and
2599 comment-only lines. If there are any such lines before the first section
2600 header, they're returned in a first ``section`` of ``None``.
2604 for line in yield_lines(s):
2605 if line.startswith("["):
2606 if line.endswith("]"):
2607 if section or content:
2608 yield section, content
2609 section = line[1:-1].strip()
2612 raise ValueError("Invalid section heading", line)
2614 content.append(line)
2616 # wrap up last segment
2617 yield section, content
2619 def _mkstemp(*args,**kw):
2620 from tempfile import mkstemp
2623 os.open = os_open # temporarily bypass sandboxing
2624 return mkstemp(*args,**kw)
2626 os.open = old_open # and then put it back
2629 # Set up global resource manager (deliberately not state-saved)
2630 _manager = ResourceManager()
2632 for name in dir(_manager):
2633 if not name.startswith('_'):
2634 g[name] = getattr(_manager, name)
2635 _initialize(globals())
2637 # Prepare the master working set and make the ``require()`` API available
2639 _declare_state('object', working_set = WorkingSet())
2641 # Does the main program list any requirements?
2642 from __main__ import __requires__
2644 pass # No: just use the default working set based on sys.path
2646 # Yes: ensure the requirements are met, by prefixing sys.path if necessary
2648 working_set.require(__requires__)
2649 except (VersionConflict, DistributionNotFound): # try it without defaults already on sys.path
2650 working_set = WorkingSet([]) # by starting with an empty path
2652 for dist in working_set.resolve(
2653 parse_requirements(__requires__), Environment()
2655 working_set.add(dist)
2656 except DistributionNotFound:
2658 for entry in sys.path: # add any missing entries from sys.path
2659 if entry not in working_set.entries:
2660 working_set.add_entry(entry)
2661 sys.path[:] = working_set.entries # then copy back to sys.path
2663 require = working_set.require
2664 iter_entry_points = working_set.iter_entry_points
2665 add_activation_listener = working_set.subscribe
2666 run_script = working_set.run_script
2667 run_main = run_script # backward compatibility
2668 # Activate all distributions already on sys.path, and ensure that
2669 # all distributions added to the working set in the future (e.g. by
2670 # calling ``require()``) will get activated as well.
2671 add_activation_listener(lambda dist: dist.activate())
2672 working_set.entries=[]; map(working_set.add_entry,sys.path) # match order