1 """Package resource API
4 A resource is a logical file contained within a package, or a logical
5 subdirectory thereof. The package resource API expects resource names
6 to have their path parts separated with ``/``, *not* whatever the local
7 path separator is. Do not use os.path operations to manipulate resource
8 names being passed into the API.
10 The package resource API is designed to work with normal filesystem packages,
11 .egg files, and unpacked .egg files. It can also work in a limited way with
12 .zip files and with custom PEP 302 loaders that support the ``get_data()``
16 import sys, os, zipimport, time, re, imp
21 from sets import ImmutableSet as frozenset
23 # capture these to bypass sandboxing
24 from os import utime, rename, unlink, mkdir
25 from os import open as os_open
26 from os.path import isdir, split
28 def _bypass_ensure_directory(name, mode=0777):
29 # Sandbox-bypassing version of ensure_directory()
30 dirname, filename = split(name)
31 if dirname and filename and not isdir(dirname):
32 _bypass_ensure_directory(dirname)
44 def _declare_state(vartype, **kw):
46 for name, val in kw.iteritems():
48 _state_vars[name] = vartype
53 for k, v in _state_vars.iteritems():
54 state[k] = g['_sget_'+v](g[k])
57 def __setstate__(state):
59 for k, v in state.iteritems():
60 g['_sset_'+_state_vars[k]](k, g[k], v)
66 def _sset_dict(key, ob, state):
70 def _sget_object(val):
71 return val.__getstate__()
73 def _sset_object(key, ob, state):
74 ob.__setstate__(state)
76 _sget_none = _sset_none = lambda *args: None
83 def get_supported_platform():
84 """Return this platform's maximum compatible version.
86 distutils.util.get_platform() normally reports the minimum version
87 of Mac OS X that would be required to *use* extensions produced by
88 distutils. But what we want when checking compatibility is to know the
89 version of Mac OS X that we are *running*. To allow usage of packages that
90 explicitly require a newer version of Mac OS X, we must also know the
91 current version of the OS.
93 If this condition occurs for any other platform with a version in its
94 platform strings, this function should be extended accordingly.
96 plat = get_build_platform(); m = macosVersionString.match(plat)
97 if m is not None and sys.platform == "darwin":
99 plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3))
125 # Basic resource access and distribution/entry point discovery
126 'require', 'run_script', 'get_provider', 'get_distribution',
127 'load_entry_point', 'get_entry_map', 'get_entry_info', 'iter_entry_points',
128 'resource_string', 'resource_stream', 'resource_filename',
129 'resource_listdir', 'resource_exists', 'resource_isdir',
131 # Environmental control
132 'declare_namespace', 'working_set', 'add_activation_listener',
133 'find_distributions', 'set_extraction_path', 'cleanup_resources',
136 # Primary implementation classes
137 'Environment', 'WorkingSet', 'ResourceManager',
138 'Distribution', 'Requirement', 'EntryPoint',
141 'ResolutionError','VersionConflict','DistributionNotFound','UnknownExtra',
144 # Parsing functions and string utilities
145 'parse_requirements', 'parse_version', 'safe_name', 'safe_version',
146 'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections',
147 'safe_extra', 'to_filename',
149 # filesystem utilities
150 'ensure_directory', 'normalize_path',
152 # Distribution "precedence" constants
153 'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST',
155 # "Provider" interfaces, implementations, and registration/lookup APIs
156 'IMetadataProvider', 'IResourceProvider', 'FileMetadata',
157 'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider',
158 'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider',
159 'register_finder', 'register_namespace_handler', 'register_loader_type',
160 'fixup_namespace_packages', 'get_importer',
162 # Deprecated/backward compatibility only
163 'run_main', 'AvailableDistributions',
165 class ResolutionError(Exception):
166 """Abstract base for dependency resolution errors"""
168 return self.__class__.__name__+repr(self.args)
170 class VersionConflict(ResolutionError):
171 """An already-installed version conflicts with the requested version"""
173 class DistributionNotFound(ResolutionError):
174 """A requested distribution was not found"""
176 class UnknownExtra(ResolutionError):
177 """Distribution doesn't have an "extra feature" of the given name"""
179 _provider_factories = {}
180 PY_MAJOR = sys.version[:3]
187 def register_loader_type(loader_type, provider_factory):
188 """Register `provider_factory` to make providers for `loader_type`
190 `loader_type` is the type or class of a PEP 302 ``module.__loader__``,
191 and `provider_factory` is a function that, passed a *module* object,
192 returns an ``IResourceProvider`` for that module.
194 _provider_factories[loader_type] = provider_factory
196 def get_provider(moduleOrReq):
197 """Return an IResourceProvider for the named module or requirement"""
198 if isinstance(moduleOrReq,Requirement):
199 return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0]
201 module = sys.modules[moduleOrReq]
203 __import__(moduleOrReq)
204 module = sys.modules[moduleOrReq]
205 loader = getattr(module, '__loader__', None)
206 return _find_adapter(_provider_factories, loader)(module)
208 def _macosx_vers(_cache=[]):
210 from platform import mac_ver
211 _cache.append(mac_ver()[0].split('.'))
214 def _macosx_arch(machine):
215 return {'PowerPC':'ppc', 'Power_Macintosh':'ppc'}.get(machine,machine)
217 def get_build_platform():
218 """Return this platform's string for platform-specific distributions
220 XXX Currently this is the same as ``distutils.util.get_platform()``, but it
221 needs some hacks for Linux and Mac OS X.
223 from distutils.util import get_platform
224 plat = get_platform()
225 if sys.platform == "darwin" and not plat.startswith('macosx-'):
227 version = _macosx_vers()
228 machine = os.uname()[4].replace(" ", "_")
229 return "macosx-%d.%d-%s" % (int(version[0]), int(version[1]),
230 _macosx_arch(machine))
232 # if someone is running a non-Mac darwin system, this will fall
233 # through to the default implementation
237 macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)")
238 darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)")
239 get_platform = get_build_platform # XXX backward compat
247 def compatible_platforms(provided,required):
248 """Can code for the `provided` platform run on the `required` platform?
250 Returns true if either platform is ``None``, or the platforms are equal.
252 XXX Needs compatibility checks for Linux and other unixy OSes.
254 if provided is None or required is None or provided==required:
255 return True # easy case
257 # Mac OS X special cases
258 reqMac = macosVersionString.match(required)
260 provMac = macosVersionString.match(provided)
262 # is this a Mac package?
264 # this is backwards compatibility for packages built before
265 # setuptools 0.6. All packages built after this point will
266 # use the new macosx designation.
267 provDarwin = darwinVersionString.match(provided)
269 dversion = int(provDarwin.group(1))
270 macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2))
271 if dversion == 7 and macosversion >= "10.3" or \
272 dversion == 8 and macosversion >= "10.4":
275 #warnings.warn("Mac eggs should be rebuilt to "
276 # "use the macosx designation instead of darwin.",
277 # category=DeprecationWarning)
279 return False # egg isn't macosx or legacy darwin
281 # are they the same major version and machine type?
282 if provMac.group(1) != reqMac.group(1) or \
283 provMac.group(3) != reqMac.group(3):
288 # is the required OS major update >= the provided one?
289 if int(provMac.group(2)) > int(reqMac.group(2)):
294 # XXX Linux and other platforms' special cases should go here
298 def run_script(dist_spec, script_name):
299 """Locate distribution `dist_spec` and run its `script_name` script"""
300 ns = sys._getframe(1).f_globals
301 name = ns['__name__']
303 ns['__name__'] = name
304 require(dist_spec)[0].run_script(script_name, ns)
306 run_main = run_script # backward compatibility
308 def get_distribution(dist):
309 """Return a current distribution object for a Requirement or string"""
310 if isinstance(dist,basestring): dist = Requirement.parse(dist)
311 if isinstance(dist,Requirement): dist = get_provider(dist)
312 if not isinstance(dist,Distribution):
313 raise TypeError("Expected string, Requirement, or Distribution", dist)
316 def load_entry_point(dist, group, name):
317 """Return `name` entry point of `group` for `dist` or raise ImportError"""
318 return get_distribution(dist).load_entry_point(group, name)
320 def get_entry_map(dist, group=None):
321 """Return the entry point map for `group`, or the full entry map"""
322 return get_distribution(dist).get_entry_map(group)
324 def get_entry_info(dist, group, name):
325 """Return the EntryPoint object for `group`+`name`, or ``None``"""
326 return get_distribution(dist).get_entry_info(group, name)
329 class IMetadataProvider:
331 def has_metadata(name):
332 """Does the package's distribution contain the named metadata?"""
334 def get_metadata(name):
335 """The named metadata resource as a string"""
337 def get_metadata_lines(name):
338 """Yield named metadata resource as list of non-blank non-comment lines
340 Leading and trailing whitespace is stripped from each line, and lines
341 with ``#`` as the first non-blank character are omitted."""
343 def metadata_isdir(name):
344 """Is the named metadata a directory? (like ``os.path.isdir()``)"""
346 def metadata_listdir(name):
347 """List of metadata names in the directory (like ``os.listdir()``)"""
349 def run_script(script_name, namespace):
350 """Execute the named script in the supplied namespace dictionary"""
370 class IResourceProvider(IMetadataProvider):
371 """An object that provides access to package resources"""
373 def get_resource_filename(manager, resource_name):
374 """Return a true filesystem path for `resource_name`
376 `manager` must be an ``IResourceManager``"""
378 def get_resource_stream(manager, resource_name):
379 """Return a readable file-like object for `resource_name`
381 `manager` must be an ``IResourceManager``"""
383 def get_resource_string(manager, resource_name):
384 """Return a string containing the contents of `resource_name`
386 `manager` must be an ``IResourceManager``"""
388 def has_resource(resource_name):
389 """Does the package contain the named resource?"""
391 def resource_isdir(resource_name):
392 """Is the named resource a directory? (like ``os.path.isdir()``)"""
394 def resource_listdir(resource_name):
395 """List of resource names in the directory (like ``os.listdir()``)"""
411 class WorkingSet(object):
412 """A collection of active distributions on sys.path (or a similar list)"""
414 def __init__(self, entries=None):
415 """Create working set from list of path entries (default=sys.path)"""
424 for entry in entries:
425 self.add_entry(entry)
428 def add_entry(self, entry):
429 """Add a path item to ``.entries``, finding any distributions on it
431 ``find_distributions(entry, True)`` is used to find distributions
432 corresponding to the path entry, and they are added. `entry` is
433 always appended to ``.entries``, even if it is already present.
434 (This is because ``sys.path`` can contain the same value more than
435 once, and the ``.entries`` of the ``sys.path`` WorkingSet should always
438 self.entry_keys.setdefault(entry, [])
439 self.entries.append(entry)
440 for dist in find_distributions(entry, True):
441 self.add(dist, entry, False)
444 def __contains__(self,dist):
445 """True if `dist` is the active distribution for its project"""
446 return self.by_key.get(dist.key) == dist
453 """Find a distribution matching requirement `req`
455 If there is an active distribution for the requested project, this
456 returns it as long as it meets the version requirement specified by
457 `req`. But, if there is an active distribution for the project and it
458 does *not* meet the `req` requirement, ``VersionConflict`` is raised.
459 If there is no active distribution for the requested project, ``None``
462 dist = self.by_key.get(req.key)
463 if dist is not None and dist not in req:
464 raise VersionConflict(dist,req) # XXX add more info
468 def iter_entry_points(self, group, name=None):
469 """Yield entry point objects from `group` matching `name`
471 If `name` is None, yields all entry points in `group` from all
472 distributions in the working set, otherwise only ones matching
473 both `group` and `name` are yielded (in distribution order).
476 entries = dist.get_entry_map(group)
478 for ep in entries.values():
480 elif name in entries:
483 def run_script(self, requires, script_name):
484 """Locate distribution for `requires` and run `script_name` script"""
485 ns = sys._getframe(1).f_globals
486 name = ns['__name__']
488 ns['__name__'] = name
489 self.require(requires)[0].run_script(script_name, ns)
494 """Yield distributions for non-duplicate projects in the working set
496 The yield order is the order in which the items' path entries were
497 added to the working set.
500 for item in self.entries:
501 for key in self.entry_keys[item]:
504 yield self.by_key[key]
506 def add(self, dist, entry=None, insert=True):
507 """Add `dist` to working set, associated with `entry`
509 If `entry` is unspecified, it defaults to the ``.location`` of `dist`.
510 On exit from this routine, `entry` is added to the end of the working
511 set's ``.entries`` (if it wasn't already present).
513 `dist` is only added to the working set if it's for a project that
514 doesn't already have a distribution in the set. If it's added, any
515 callbacks registered with the ``subscribe()`` method will be called.
518 dist.insert_on(self.entries, entry)
521 entry = dist.location
522 keys = self.entry_keys.setdefault(entry,[])
523 keys2 = self.entry_keys.setdefault(dist.location,[])
524 if dist.key in self.by_key:
525 return # ignore hidden distros
527 # If we have a __requires__ then we can already tell if this
528 # dist is unsatisfactory, in which case we won't add it.
529 if __requires__ is not None:
530 for thisreqstr in __requires__:
532 for thisreq in parse_requirements(thisreqstr):
533 if thisreq.key == dist.key:
534 if dist not in thisreq:
536 except ValueError, e:
537 e.args = tuple(e.args + ({'thisreqstr': thisreqstr},))
540 self.by_key[dist.key] = dist
541 if dist.key not in keys:
542 keys.append(dist.key)
543 if dist.key not in keys2:
544 keys2.append(dist.key)
545 self._added_new(dist)
547 def resolve(self, requirements, env=None, installer=None):
548 """List all distributions needed to (recursively) meet `requirements`
550 `requirements` must be a sequence of ``Requirement`` objects. `env`,
551 if supplied, should be an ``Environment`` instance. If
552 not supplied, it defaults to all distributions available within any
553 entry or distribution in the working set. `installer`, if supplied,
554 will be invoked with each requirement that cannot be met by an
555 already-installed distribution; it should return a ``Distribution`` or
559 requirements = list(requirements)[::-1] # set up the stack
560 processed = {} # set of processed requirements
561 best = {} # key -> dist
565 req = requirements.pop(0) # process dependencies breadth-first
567 # Ignore cyclic or redundant dependencies
569 dist = best.get(req.key)
571 # Find the best distribution and add it to the map
572 dist = self.by_key.get(req.key)
575 env = Environment(self.entries)
576 dist = best[req.key] = env.best_match(req, self, installer)
578 raise DistributionNotFound(req) # XXX put more info here
579 to_activate.append(dist)
581 # Oops, the "best" so far conflicts with a dependency
582 raise VersionConflict(dist,req) # XXX put more info here
583 requirements.extend(dist.requires(req.extras)[::-1])
584 processed[req] = True
586 return to_activate # return list of distros to activate
588 def find_plugins(self,
589 plugin_env, full_env=None, installer=None, fallback=True
591 """Find all activatable distributions in `plugin_env`
595 distributions, errors = working_set.find_plugins(
596 Environment(plugin_dirlist)
598 map(working_set.add, distributions) # add plugins+libs to sys.path
599 print "Couldn't load", errors # display errors
601 The `plugin_env` should be an ``Environment`` instance that contains
602 only distributions that are in the project's "plugin directory" or
603 directories. The `full_env`, if supplied, should be an ``Environment``
604 contains all currently-available distributions. If `full_env` is not
605 supplied, one is created automatically from the ``WorkingSet`` this
606 method is called on, which will typically mean that every directory on
607 ``sys.path`` will be scanned for distributions.
609 `installer` is a standard installer callback as used by the
610 ``resolve()`` method. The `fallback` flag indicates whether we should
611 attempt to resolve older versions of a plugin if the newest version
614 This method returns a 2-tuple: (`distributions`, `error_info`), where
615 `distributions` is a list of the distributions found in `plugin_env`
616 that were loadable, along with any other distributions that are needed
617 to resolve their dependencies. `error_info` is a dictionary mapping
618 unloadable plugin distributions to an exception instance describing the
619 error that occurred. Usually this will be a ``DistributionNotFound`` or
620 ``VersionConflict`` instance.
623 plugin_projects = list(plugin_env)
624 plugin_projects.sort() # scan project names in alphabetic order
630 env = Environment(self.entries)
633 env = full_env + plugin_env
635 shadow_set = self.__class__([])
636 map(shadow_set.add, self) # put all our entries in shadow_set
638 for project_name in plugin_projects:
640 for dist in plugin_env[project_name]:
642 req = [dist.as_requirement()]
645 resolvees = shadow_set.resolve(req, env, installer)
647 except ResolutionError,v:
648 error_info[dist] = v # save error info
650 continue # try the next older version of project
652 break # give up on this project, keep going
655 map(shadow_set.add, resolvees)
656 distributions.update(dict.fromkeys(resolvees))
658 # success, no need to try any more versions of this project
661 distributions = list(distributions)
664 return distributions, error_info
670 def require(self, *requirements):
671 """Ensure that distributions matching `requirements` are activated
673 `requirements` must be a string or a (possibly-nested) sequence
674 thereof, specifying the distributions and versions required. The
675 return value is a sequence of the distributions that needed to be
676 activated to fulfill the requirements; all relevant distributions are
677 included, even if they were already activated in this working set.
679 needed = self.resolve(parse_requirements(requirements))
686 def subscribe(self, callback):
687 """Invoke `callback` for all distributions (including existing ones)"""
688 if callback in self.callbacks:
690 self.callbacks.append(callback)
694 def _added_new(self, dist):
695 for callback in self.callbacks:
698 def __getstate__(self):
700 self.entries[:], self.entry_keys.copy(), self.by_key.copy(),
704 def __setstate__(self, (entries, keys, by_key, callbacks)):
705 self.entries = entries[:]
706 self.entry_keys = keys.copy()
707 self.by_key = by_key.copy()
708 self.callbacks = callbacks[:]
711 class Environment(object):
712 """Searchable snapshot of distributions on a search path"""
714 def __init__(self, search_path=None, platform=get_supported_platform(), python=PY_MAJOR):
715 """Snapshot distributions available on a search path
717 Any distributions found on `search_path` are added to the environment.
718 `search_path` should be a sequence of ``sys.path`` items. If not
719 supplied, ``sys.path`` is used.
721 `platform` is an optional string specifying the name of the platform
722 that platform-specific distributions must be compatible with. If
723 unspecified, it defaults to the current platform. `python` is an
724 optional string naming the desired version of Python (e.g. ``'2.4'``);
725 it defaults to the current version.
727 You may explicitly set `platform` (and/or `python`) to ``None`` if you
728 wish to map *all* distributions, not just those compatible with the
729 running platform or Python version.
733 self.platform = platform
735 self.scan(search_path)
737 def can_add(self, dist):
738 """Is distribution `dist` acceptable for this environment?
740 The distribution must match the platform and python version
741 requirements specified when this environment was created, or False
744 return (self.python is None or dist.py_version is None
745 or dist.py_version==self.python) \
746 and compatible_platforms(dist.platform,self.platform)
748 def remove(self, dist):
749 """Remove `dist` from the environment"""
750 self._distmap[dist.key].remove(dist)
752 def scan(self, search_path=None):
753 """Scan `search_path` for distributions usable in this environment
755 Any distributions found are added to the environment.
756 `search_path` should be a sequence of ``sys.path`` items. If not
757 supplied, ``sys.path`` is used. Only distributions conforming to
758 the platform/python version defined at initialization are added.
760 if search_path is None:
761 search_path = sys.path
763 for item in search_path:
764 for dist in find_distributions(item):
767 def __getitem__(self,project_name):
768 """Return a newest-to-oldest list of distributions for `project_name`
771 return self._cache[project_name]
773 project_name = project_name.lower()
774 if project_name not in self._distmap:
777 if project_name not in self._cache:
778 dists = self._cache[project_name] = self._distmap[project_name]
781 return self._cache[project_name]
784 """Add `dist` if we ``can_add()`` it and it isn't already added"""
785 if self.can_add(dist) and dist.has_version():
786 dists = self._distmap.setdefault(dist.key,[])
787 if dist not in dists:
789 if dist.key in self._cache:
790 _sort_dists(self._cache[dist.key])
793 def best_match(self, req, working_set, installer=None):
794 """Find distribution best matching `req` and usable on `working_set`
796 This calls the ``find(req)`` method of the `working_set` to see if a
797 suitable distribution is already active. (This may raise
798 ``VersionConflict`` if an unsuitable version of the project is already
799 active in the specified `working_set`.)
801 If a suitable distribution isn't active, this method returns the
802 newest platform-dependent distribution in the environment that meets
803 the ``Requirement`` in `req`. If no suitable platform-dependent
804 distribution is found, then the newest platform-independent
805 distribution that meets the requirement is returned. (A platform-
806 dependent distribution will typically have code compiled or
807 specialized for that platform.)
809 Otherwise, if `installer` is supplied, then the result of calling the
810 environment's ``obtain(req, installer)`` method will be returned.
812 dist = working_set.find(req)
816 # first try to find a platform-dependent dist
817 for dist in self[req.key]:
818 if dist in req and dist.platform is not None:
821 # then try any other dist
822 for dist in self[req.key]:
826 return self.obtain(req, installer) # try and download/install
828 def obtain(self, requirement, installer=None):
829 """Obtain a distribution matching `requirement` (e.g. via download)
831 Obtain a distro that matches requirement (e.g. via download). In the
832 base ``Environment`` class, this routine just returns
833 ``installer(requirement)``, unless `installer` is None, in which case
834 None is returned instead. This method is a hook that allows subclasses
835 to attempt other ways of obtaining a distribution before falling back
836 to the `installer` argument."""
837 if installer is not None:
838 return installer(requirement)
841 """Yield the unique project names of the available distributions"""
842 for key in self._distmap.keys():
843 if self[key]: yield key
848 def __iadd__(self, other):
849 """In-place addition of a distribution or environment"""
850 if isinstance(other,Distribution):
852 elif isinstance(other,Environment):
853 for project in other:
854 for dist in other[project]:
857 raise TypeError("Can't add %r to environment" % (other,))
860 def __add__(self, other):
861 """Add an environment or distribution to an environment"""
862 new = self.__class__([], platform=None, python=None)
863 for env in self, other:
868 AvailableDistributions = Environment # XXX backward compatibility
871 class ExtractionError(RuntimeError):
872 """An error occurred extracting a resource
874 The following attributes are available from instances of this exception:
877 The resource manager that raised this exception
880 The base directory for resource extraction
883 The exception instance that caused extraction to fail
889 class ResourceManager:
890 """Manage resource extraction and packages"""
891 extraction_path = None
894 self.cached_files = {}
896 def resource_exists(self, package_or_requirement, resource_name):
897 """Does the named resource exist?"""
898 return get_provider(package_or_requirement).has_resource(resource_name)
900 def resource_isdir(self, package_or_requirement, resource_name):
901 """Is the named resource an existing directory?"""
902 return get_provider(package_or_requirement).resource_isdir(
906 def resource_filename(self, package_or_requirement, resource_name):
907 """Return a true filesystem path for specified resource"""
908 return get_provider(package_or_requirement).get_resource_filename(
912 def resource_stream(self, package_or_requirement, resource_name):
913 """Return a readable file-like object for specified resource"""
914 return get_provider(package_or_requirement).get_resource_stream(
918 def resource_string(self, package_or_requirement, resource_name):
919 """Return specified resource as a string"""
920 return get_provider(package_or_requirement).get_resource_string(
924 def resource_listdir(self, package_or_requirement, resource_name):
925 """List the contents of the named resource directory"""
926 return get_provider(package_or_requirement).resource_listdir(
930 def extraction_error(self):
931 """Give an error message for problems extracting file(s)"""
933 old_exc = sys.exc_info()[1]
934 cache_path = self.extraction_path or get_default_cache()
936 err = ExtractionError("""Can't extract file(s) to egg cache
938 The following error occurred while trying to extract file(s) to the Python egg
943 The Python egg cache directory is currently set to:
947 Perhaps your account does not have write access to this directory? You can
948 change the cache directory by setting the PYTHON_EGG_CACHE environment
949 variable to point to an accessible directory.
950 """ % (old_exc, cache_path)
953 err.cache_path = cache_path
954 err.original_error = old_exc
971 def get_cache_path(self, archive_name, names=()):
972 """Return absolute location in cache for `archive_name` and `names`
974 The parent directory of the resulting path will be created if it does
975 not already exist. `archive_name` should be the base filename of the
976 enclosing egg (which may not be the name of the enclosing zipfile!),
977 including its ".egg" extension. `names`, if provided, should be a
978 sequence of path name parts "under" the egg's extraction location.
980 This method should only be called by resource providers that need to
981 obtain an extraction location, and only for names they intend to
982 extract, as it tracks the generated names for possible cleanup later.
984 extract_path = self.extraction_path or get_default_cache()
985 target_path = os.path.join(extract_path, archive_name+'-tmp', *names)
987 _bypass_ensure_directory(target_path)
989 self.extraction_error()
991 self.cached_files[target_path] = 1
1012 def postprocess(self, tempname, filename):
1013 """Perform any platform-specific postprocessing of `tempname`
1015 This is where Mac header rewrites should be done; other platforms don't
1016 have anything special they should do.
1018 Resource providers should call this method ONLY after successfully
1019 extracting a compressed resource. They must NOT call it on resources
1020 that are already in the filesystem.
1022 `tempname` is the current (temporary) name of the file, and `filename`
1023 is the name it will be renamed to by the caller after this routine
1027 if os.name == 'posix':
1028 # Make the resource executable
1029 mode = ((os.stat(tempname).st_mode) | 0555) & 07777
1030 os.chmod(tempname, mode)
1053 def set_extraction_path(self, path):
1054 """Set the base path where resources will be extracted to, if needed.
1056 If you do not call this routine before any extractions take place, the
1057 path defaults to the return value of ``get_default_cache()``. (Which
1058 is based on the ``PYTHON_EGG_CACHE`` environment variable, with various
1059 platform-specific fallbacks. See that routine's documentation for more
1062 Resources are extracted to subdirectories of this path based upon
1063 information given by the ``IResourceProvider``. You may set this to a
1064 temporary directory, but then you must call ``cleanup_resources()`` to
1065 delete the extracted files when done. There is no guarantee that
1066 ``cleanup_resources()`` will be able to remove all extracted files.
1068 (Note: you may not change the extraction path for a given resource
1069 manager once resources have been extracted, unless you first call
1070 ``cleanup_resources()``.)
1072 if self.cached_files:
1074 "Can't change extraction path, files already extracted"
1077 self.extraction_path = path
1079 def cleanup_resources(self, force=False):
1081 Delete all extracted resource files and directories, returning a list
1082 of the file and directory names that could not be successfully removed.
1083 This function does not have any concurrency protection, so it should
1084 generally only be called when the extraction path is a temporary
1085 directory exclusive to a single process. This method is not
1086 automatically called; you must call it explicitly or register it as an
1087 ``atexit`` function if you wish to ensure cleanup of a temporary
1088 directory used for extractions.
1094 def get_default_cache():
1095 """Determine the default cache location
1097 This returns the ``PYTHON_EGG_CACHE`` environment variable, if set.
1098 Otherwise, on Windows, it returns a "Python-Eggs" subdirectory of the
1099 "Application Data" directory. On all other systems, it's "~/.python-eggs".
1102 return os.environ['PYTHON_EGG_CACHE']
1107 return os.path.expanduser('~/.python-eggs')
1109 app_data = 'Application Data' # XXX this may be locale-specific!
1111 (('APPDATA',), None), # best option, should be locale-safe
1112 (('USERPROFILE',), app_data),
1113 (('HOMEDRIVE','HOMEPATH'), app_data),
1114 (('HOMEPATH',), app_data),
1116 (('WINDIR',), app_data), # 95/98/ME
1119 for keys, subdir in app_homes:
1122 if key in os.environ:
1123 dirname = os.path.join(dirname, os.environ[key])
1128 dirname = os.path.join(dirname,subdir)
1129 return os.path.join(dirname, 'Python-Eggs')
1132 "Please set the PYTHON_EGG_CACHE enviroment variable"
1135 def safe_name(name):
1136 """Convert an arbitrary string to a standard distribution name
1138 Any runs of non-alphanumeric/. characters are replaced with a single '-'.
1140 return re.sub('[^A-Za-z0-9.]+', '-', name)
1143 def safe_version(version):
1144 """Convert an arbitrary string to a standard version string
1146 Spaces become dots, and all other non-alphanumeric characters become
1147 dashes, with runs of multiple dashes condensed to a single dash.
1149 version = version.replace(' ','.')
1150 return re.sub('[^A-Za-z0-9.]+', '-', version)
1153 def safe_extra(extra):
1154 """Convert an arbitrary string to a standard 'extra' name
1156 Any runs of non-alphanumeric characters are replaced with a single '_',
1157 and the result is always lowercased.
1159 return re.sub('[^A-Za-z0-9.]+', '_', extra).lower()
1162 def to_filename(name):
1163 """Convert a project or version name to its filename-escaped form
1165 Any '-' characters are currently replaced with '_'.
1167 return name.replace('-','_')
1177 """Try to implement resources and metadata for arbitrary PEP 302 loaders"""
1183 def __init__(self, module):
1184 self.loader = getattr(module, '__loader__', None)
1185 self.module_path = os.path.dirname(getattr(module, '__file__', ''))
1187 def get_resource_filename(self, manager, resource_name):
1188 return self._fn(self.module_path, resource_name)
1190 def get_resource_stream(self, manager, resource_name):
1191 return StringIO(self.get_resource_string(manager, resource_name))
1193 def get_resource_string(self, manager, resource_name):
1194 return self._get(self._fn(self.module_path, resource_name))
1196 def has_resource(self, resource_name):
1197 return self._has(self._fn(self.module_path, resource_name))
1199 def has_metadata(self, name):
1200 return self.egg_info and self._has(self._fn(self.egg_info,name))
1202 def get_metadata(self, name):
1203 if not self.egg_info:
1205 return self._get(self._fn(self.egg_info,name))
1207 def get_metadata_lines(self, name):
1208 return yield_lines(self.get_metadata(name))
1210 def resource_isdir(self,resource_name):
1211 return self._isdir(self._fn(self.module_path, resource_name))
1213 def metadata_isdir(self,name):
1214 return self.egg_info and self._isdir(self._fn(self.egg_info,name))
1217 def resource_listdir(self,resource_name):
1218 return self._listdir(self._fn(self.module_path,resource_name))
1220 def metadata_listdir(self,name):
1222 return self._listdir(self._fn(self.egg_info,name))
1225 def run_script(self,script_name,namespace):
1226 script = 'scripts/'+script_name
1227 if not self.has_metadata(script):
1228 raise ResolutionError("No script named %r" % script_name)
1229 script_text = self.get_metadata(script).replace('\r\n','\n')
1230 script_text = script_text.replace('\r','\n')
1231 script_filename = self._fn(self.egg_info,script)
1232 namespace['__file__'] = script_filename
1233 if os.path.exists(script_filename):
1234 execfile(script_filename, namespace, namespace)
1236 from linecache import cache
1237 cache[script_filename] = (
1238 len(script_text), 0, script_text.split('\n'), script_filename
1240 script_code = compile(script_text,script_filename,'exec')
1241 exec script_code in namespace, namespace
1243 def _has(self, path):
1244 raise NotImplementedError(
1245 "Can't perform this operation for unregistered loader type"
1248 def _isdir(self, path):
1249 raise NotImplementedError(
1250 "Can't perform this operation for unregistered loader type"
1253 def _listdir(self, path):
1254 raise NotImplementedError(
1255 "Can't perform this operation for unregistered loader type"
1258 def _fn(self, base, resource_name):
1260 return os.path.join(base, *resource_name.split('/'))
1263 def _get(self, path):
1264 if hasattr(self.loader, 'get_data'):
1265 return self.loader.get_data(path)
1266 raise NotImplementedError(
1267 "Can't perform this operation for loaders without 'get_data()'"
1270 register_loader_type(object, NullProvider)
1273 class EggProvider(NullProvider):
1274 """Provider based on a virtual filesystem"""
1276 def __init__(self,module):
1277 NullProvider.__init__(self,module)
1278 self._setup_prefix()
1280 def _setup_prefix(self):
1281 # we assume here that our metadata may be nested inside a "basket"
1282 # of multiple eggs; that's why we use module_path instead of .archive
1283 path = self.module_path
1286 if path.lower().endswith('.egg'):
1287 self.egg_name = os.path.basename(path)
1288 self.egg_info = os.path.join(path, 'EGG-INFO')
1289 self.egg_root = path
1292 path, base = os.path.split(path)
1299 class DefaultProvider(EggProvider):
1300 """Provides access to package resources in the filesystem"""
1302 def _has(self, path):
1303 return os.path.exists(path)
1305 def _isdir(self,path):
1306 return os.path.isdir(path)
1308 def _listdir(self,path):
1309 return os.listdir(path)
1311 def get_resource_stream(self, manager, resource_name):
1312 return open(self._fn(self.module_path, resource_name), 'rb')
1314 def _get(self, path):
1315 stream = open(path, 'rb')
1317 return stream.read()
1321 register_loader_type(type(None), DefaultProvider)
1324 class EmptyProvider(NullProvider):
1325 """Provider that returns nothing for all requests"""
1327 _isdir = _has = lambda self,path: False
1328 _get = lambda self,path: ''
1329 _listdir = lambda self,path: []
1335 empty_provider = EmptyProvider()
1340 class ZipProvider(EggProvider):
1341 """Resource support for zips and eggs"""
1345 def __init__(self, module):
1346 EggProvider.__init__(self,module)
1347 self.zipinfo = zipimport._zip_directory_cache[self.loader.archive]
1348 self.zip_pre = self.loader.archive+os.sep
1350 def _zipinfo_name(self, fspath):
1351 # Convert a virtual filename (full path to file) into a zipfile subpath
1352 # usable with the zipimport directory cache for our target archive
1353 if fspath.startswith(self.zip_pre):
1354 return fspath[len(self.zip_pre):]
1355 raise AssertionError(
1356 "%s is not a subpath of %s" % (fspath,self.zip_pre)
1359 def _parts(self,zip_path):
1360 # Convert a zipfile subpath into an egg-relative path part list
1361 fspath = self.zip_pre+zip_path # pseudo-fs path
1362 if fspath.startswith(self.egg_root+os.sep):
1363 return fspath[len(self.egg_root)+1:].split(os.sep)
1364 raise AssertionError(
1365 "%s is not a subpath of %s" % (fspath,self.egg_root)
1368 def get_resource_filename(self, manager, resource_name):
1369 if not self.egg_name:
1370 raise NotImplementedError(
1371 "resource_filename() only supported for .egg, not .zip"
1373 # no need to lock for extraction, since we use temp names
1374 zip_path = self._resource_to_zip(resource_name)
1375 eagers = self._get_eager_resources()
1376 if '/'.join(self._parts(zip_path)) in eagers:
1378 self._extract_resource(manager, self._eager_to_zip(name))
1379 return self._extract_resource(manager, zip_path)
1381 def _extract_resource(self, manager, zip_path):
1383 if zip_path in self._index():
1384 for name in self._index()[zip_path]:
1385 last = self._extract_resource(
1386 manager, os.path.join(zip_path, name)
1388 return os.path.dirname(last) # return the extracted directory name
1390 zip_stat = self.zipinfo[zip_path]
1391 t,d,size = zip_stat[5], zip_stat[6], zip_stat[3]
1393 (d>>9)+1980, (d>>5)&0xF, d&0x1F, # ymd
1394 (t&0xFFFF)>>11, (t>>5)&0x3F, (t&0x1F) * 2, 0, 0, -1 # hms, etc.
1396 timestamp = time.mktime(date_time)
1399 real_path = manager.get_cache_path(
1400 self.egg_name, self._parts(zip_path)
1403 if os.path.isfile(real_path):
1404 stat = os.stat(real_path)
1405 if stat.st_size==size and stat.st_mtime==timestamp:
1406 # size and stamp match, don't bother extracting
1409 outf, tmpnam = _mkstemp(".$extract", dir=os.path.dirname(real_path))
1410 os.write(outf, self.loader.get_data(zip_path))
1412 utime(tmpnam, (timestamp,timestamp))
1413 manager.postprocess(tmpnam, real_path)
1416 rename(tmpnam, real_path)
1419 if os.path.isfile(real_path):
1420 stat = os.stat(real_path)
1422 if stat.st_size==size and stat.st_mtime==timestamp:
1423 # size and stamp match, somebody did it just ahead of
1426 elif os.name=='nt': # Windows, del old file and retry
1428 rename(tmpnam, real_path)
1433 manager.extraction_error() # report a user-friendly error
1437 def _get_eager_resources(self):
1438 if self.eagers is None:
1440 for name in ('native_libs.txt', 'eager_resources.txt'):
1441 if self.has_metadata(name):
1442 eagers.extend(self.get_metadata_lines(name))
1443 self.eagers = eagers
1448 return self._dirindex
1449 except AttributeError:
1451 for path in self.zipinfo:
1452 parts = path.split(os.sep)
1454 parent = os.sep.join(parts[:-1])
1456 ind[parent].append(parts[-1])
1459 ind[parent] = [parts.pop()]
1460 self._dirindex = ind
1463 def _has(self, fspath):
1464 zip_path = self._zipinfo_name(fspath)
1465 return zip_path in self.zipinfo or zip_path in self._index()
1467 def _isdir(self,fspath):
1468 return self._zipinfo_name(fspath) in self._index()
1470 def _listdir(self,fspath):
1471 return list(self._index().get(self._zipinfo_name(fspath), ()))
1473 def _eager_to_zip(self,resource_name):
1474 return self._zipinfo_name(self._fn(self.egg_root,resource_name))
1476 def _resource_to_zip(self,resource_name):
1477 return self._zipinfo_name(self._fn(self.module_path,resource_name))
1479 register_loader_type(zipimport.zipimporter, ZipProvider)
1504 class FileMetadata(EmptyProvider):
1505 """Metadata handler for standalone PKG-INFO files
1509 metadata = FileMetadata("/path/to/PKG-INFO")
1511 This provider rejects all data and metadata requests except for PKG-INFO,
1512 which is treated as existing, and will be the contents of the file at
1513 the provided location.
1516 def __init__(self,path):
1519 def has_metadata(self,name):
1520 return name=='PKG-INFO'
1522 def get_metadata(self,name):
1523 if name=='PKG-INFO':
1524 return open(self.path,'rU').read()
1525 raise KeyError("No metadata except PKG-INFO is available")
1527 def get_metadata_lines(self,name):
1528 return yield_lines(self.get_metadata(name))
1545 class PathMetadata(DefaultProvider):
1546 """Metadata provider for egg directories
1552 egg_info = "/path/to/PackageName.egg-info"
1553 base_dir = os.path.dirname(egg_info)
1554 metadata = PathMetadata(base_dir, egg_info)
1555 dist_name = os.path.splitext(os.path.basename(egg_info))[0]
1556 dist = Distribution(basedir,project_name=dist_name,metadata=metadata)
1558 # Unpacked egg directories:
1560 egg_path = "/path/to/PackageName-ver-pyver-etc.egg"
1561 metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO'))
1562 dist = Distribution.from_filename(egg_path, metadata=metadata)
1565 def __init__(self, path, egg_info):
1566 self.module_path = path
1567 self.egg_info = egg_info
1570 class EggMetadata(ZipProvider):
1571 """Metadata provider for .egg files"""
1573 def __init__(self, importer):
1574 """Create a metadata provider from a zipimporter"""
1576 self.zipinfo = zipimport._zip_directory_cache[importer.archive]
1577 self.zip_pre = importer.archive+os.sep
1578 self.loader = importer
1580 self.module_path = os.path.join(importer.archive, importer.prefix)
1582 self.module_path = importer.archive
1583 self._setup_prefix()
1587 """PEP 302 Importer that wraps Python's "normal" import algorithm"""
1589 def __init__(self, path=None):
1592 def find_module(self, fullname, path=None):
1593 subname = fullname.split(".")[-1]
1594 if subname != fullname and self.path is None:
1596 if self.path is None:
1601 file, filename, etc = imp.find_module(subname, path)
1604 return ImpLoader(file, filename, etc)
1608 """PEP 302 Loader that wraps Python's "normal" import algorithm"""
1610 def __init__(self, file, filename, etc):
1612 self.filename = filename
1615 def load_module(self, fullname):
1617 mod = imp.load_module(fullname, self.file, self.filename, self.etc)
1619 if self.file: self.file.close()
1620 # Note: we don't set __loader__ because we want the module to look
1621 # normal; i.e. this is just a wrapper for standard import machinery
1627 def get_importer(path_item):
1628 """Retrieve a PEP 302 "importer" for the given path item
1630 If there is no importer, this returns a wrapper around the builtin import
1631 machinery. The returned importer is only cached if it was created by a
1635 importer = sys.path_importer_cache[path_item]
1637 for hook in sys.path_hooks:
1639 importer = hook(path_item)
1647 sys.path_importer_cache.setdefault(path_item,importer)
1648 if importer is None:
1650 importer = ImpWrapper(path_item)
1668 _declare_state('dict', _distribution_finders = {})
1670 def register_finder(importer_type, distribution_finder):
1671 """Register `distribution_finder` to find distributions in sys.path items
1673 `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
1674 handler), and `distribution_finder` is a callable that, passed a path
1675 item and the importer instance, yields ``Distribution`` instances found on
1676 that path item. See ``pkg_resources.find_on_path`` for an example."""
1677 _distribution_finders[importer_type] = distribution_finder
1680 def find_distributions(path_item, only=False):
1681 """Yield distributions accessible via `path_item`"""
1682 importer = get_importer(path_item)
1683 finder = _find_adapter(_distribution_finders, importer)
1684 return finder(importer, path_item, only)
1686 def find_in_zip(importer, path_item, only=False):
1687 metadata = EggMetadata(importer)
1688 if metadata.has_metadata('PKG-INFO'):
1689 yield Distribution.from_filename(path_item, metadata=metadata)
1691 return # don't yield nested distros
1692 for subitem in metadata.resource_listdir('/'):
1693 if subitem.endswith('.egg'):
1694 subpath = os.path.join(path_item, subitem)
1695 for dist in find_in_zip(zipimport.zipimporter(subpath), subpath):
1698 register_finder(zipimport.zipimporter, find_in_zip)
1700 def StringIO(*args, **kw):
1701 """Thunk to load the real StringIO on demand"""
1704 from cStringIO import StringIO
1706 from StringIO import StringIO
1707 return StringIO(*args,**kw)
1709 def find_nothing(importer, path_item, only=False):
1711 register_finder(object,find_nothing)
1713 def find_on_path(importer, path_item, only=False):
1714 """Yield distributions accessible on a sys.path directory"""
1715 path_item = _normalize_cached(path_item)
1717 if os.path.isdir(path_item) and os.access(path_item, os.R_OK):
1718 if path_item.lower().endswith('.egg'):
1720 yield Distribution.from_filename(
1721 path_item, metadata=PathMetadata(
1722 path_item, os.path.join(path_item,'EGG-INFO')
1726 # scan for .egg and .egg-info in directory
1727 for entry in os.listdir(path_item):
1728 lower = entry.lower()
1729 if lower.endswith('.egg-info'):
1730 fullpath = os.path.join(path_item, entry)
1731 if os.path.isdir(fullpath):
1732 # egg-info directory, allow getting metadata
1733 metadata = PathMetadata(path_item, fullpath)
1735 metadata = FileMetadata(fullpath)
1736 yield Distribution.from_location(
1737 path_item,entry,metadata,precedence=DEVELOP_DIST
1739 elif not only and lower.endswith('.egg'):
1740 for dist in find_distributions(os.path.join(path_item, entry)):
1742 elif not only and lower.endswith('.egg-link'):
1743 for line in file(os.path.join(path_item, entry)):
1744 if not line.strip(): continue
1745 for item in find_distributions(os.path.join(path_item,line.rstrip())):
1748 register_finder(ImpWrapper, find_on_path)
1750 _declare_state('dict', _namespace_handlers = {})
1751 _declare_state('dict', _namespace_packages = {})
1753 def register_namespace_handler(importer_type, namespace_handler):
1754 """Register `namespace_handler` to declare namespace packages
1756 `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
1757 handler), and `namespace_handler` is a callable like this::
1759 def namespace_handler(importer,path_entry,moduleName,module):
1760 # return a path_entry to use for child packages
1762 Namespace handlers are only called if the importer object has already
1763 agreed that it can handle the relevant path item, and they should only
1764 return a subpath if the module __path__ does not already contain an
1765 equivalent subpath. For an example namespace handler, see
1766 ``pkg_resources.file_ns_handler``.
1768 _namespace_handlers[importer_type] = namespace_handler
1770 def _handle_ns(packageName, path_item):
1771 """Ensure that named package includes a subpath of path_item (if needed)"""
1772 importer = get_importer(path_item)
1773 if importer is None:
1775 loader = importer.find_module(packageName)
1778 module = sys.modules.get(packageName)
1780 module = sys.modules[packageName] = imp.new_module(packageName)
1781 module.__path__ = []; _set_parent_ns(packageName)
1782 elif not hasattr(module,'__path__'):
1783 raise TypeError("Not a package:", packageName)
1784 handler = _find_adapter(_namespace_handlers, importer)
1785 subpath = handler(importer,path_item,packageName,module)
1786 if subpath is not None:
1787 path = module.__path__; path.append(subpath)
1788 loader.load_module(packageName); module.__path__ = path
1791 def declare_namespace(packageName):
1792 """Declare that package 'packageName' is a namespace package"""
1796 if packageName in _namespace_packages:
1799 path, parent = sys.path, None
1800 if '.' in packageName:
1801 parent = '.'.join(packageName.split('.')[:-1])
1802 declare_namespace(parent)
1805 path = sys.modules[parent].__path__
1806 except AttributeError:
1807 raise TypeError("Not a package:", parent)
1809 # Track what packages are namespaces, so when new path items are added,
1810 # they can be updated
1811 _namespace_packages.setdefault(parent,[]).append(packageName)
1812 _namespace_packages.setdefault(packageName,[])
1814 for path_item in path:
1815 # Ensure all the parent's path items are reflected in the child,
1817 _handle_ns(packageName, path_item)
1822 def fixup_namespace_packages(path_item, parent=None):
1823 """Ensure that previously-declared namespace packages include path_item"""
1826 for package in _namespace_packages.get(parent,()):
1827 subpath = _handle_ns(package, path_item)
1828 if subpath: fixup_namespace_packages(subpath,package)
1832 def file_ns_handler(importer, path_item, packageName, module):
1833 """Compute an ns-package subpath for a filesystem or zipfile importer"""
1835 subpath = os.path.join(path_item, packageName.split('.')[-1])
1836 normalized = _normalize_cached(subpath)
1837 for item in module.__path__:
1838 if _normalize_cached(item)==normalized:
1841 # Only return the path if it's not already there
1844 register_namespace_handler(ImpWrapper,file_ns_handler)
1845 register_namespace_handler(zipimport.zipimporter,file_ns_handler)
1848 def null_ns_handler(importer, path_item, packageName, module):
1851 register_namespace_handler(object,null_ns_handler)
1854 def normalize_path(filename):
1855 """Normalize a file/dir name for comparison purposes"""
1856 return os.path.normcase(os.path.realpath(filename))
1858 def _normalize_cached(filename,_cache={}):
1860 return _cache[filename]
1862 _cache[filename] = result = normalize_path(filename)
1865 def _set_parent_ns(packageName):
1866 parts = packageName.split('.')
1869 parent = '.'.join(parts)
1870 setattr(sys.modules[parent], name, sys.modules[packageName])
1873 def yield_lines(strs):
1874 """Yield non-empty/non-comment lines of a ``basestring`` or sequence"""
1875 if isinstance(strs,basestring):
1876 for s in strs.splitlines():
1878 if s and not s.startswith('#'): # skip blank lines/comments
1882 for s in yield_lines(ss):
1885 LINE_END = re.compile(r"\s*(#.*)?$").match # whitespace and comment
1886 CONTINUE = re.compile(r"\s*\\\s*(#.*)?$").match # line continuation
1887 DISTRO = re.compile(r"\s*((\w|[-.])+)").match # Distribution or extra
1888 VERSION = re.compile(r"\s*(<=?|>=?|==|!=)\s*((\w|[-.])+)").match # ver. info
1889 COMMA = re.compile(r"\s*,").match # comma between items
1890 OBRACKET = re.compile(r"\s*\[").match
1891 CBRACKET = re.compile(r"\s*\]").match
1892 MODULE = re.compile(r"\w+(\.\w+)*$").match
1893 EGG_NAME = re.compile(
1895 r"( -(?P<ver>[^-]+) (-py(?P<pyver>[^-]+) (-(?P<plat>.+))? )? )?",
1896 re.VERBOSE | re.IGNORECASE
1899 component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE)
1900 replace = {'pre':'c', 'preview':'c','-':'final-','rc':'c','dev':'@'}.get
1902 def _parse_version_parts(s):
1903 for part in component_re.split(s):
1904 part = replace(part,part)
1905 if not part or part=='.':
1907 if part[:1] in '0123456789':
1908 yield part.zfill(8) # pad for numeric comparison
1912 yield '*final' # ensure that alpha/beta/candidate are before final
1914 def parse_version(s):
1915 """Convert a version string to a chronologically-sortable key
1917 This is a rough cross between distutils' StrictVersion and LooseVersion;
1918 if you give it versions that would work with StrictVersion, then it behaves
1919 the same; otherwise it acts like a slightly-smarter LooseVersion. It is
1920 *possible* to create pathological version coding schemes that will fool
1921 this parser, but they should be very rare in practice.
1923 The returned value will be a tuple of strings. Numeric portions of the
1924 version are padded to 8 digits so they will compare numerically, but
1925 without relying on how numbers compare relative to strings. Dots are
1926 dropped, but dashes are retained. Trailing zeros between alpha segments
1927 or dashes are suppressed, so that e.g. "2.4.0" is considered the same as
1928 "2.4". Alphanumeric parts are lower-cased.
1930 The algorithm assumes that strings like "-" and any alpha string that
1931 alphabetically follows "final" represents a "patch level". So, "2.4-1"
1932 is assumed to be a branch or patch of "2.4", and therefore "2.4.1" is
1933 considered newer than "2.4-1", which in turn is newer than "2.4".
1935 Strings like "a", "b", "c", "alpha", "beta", "candidate" and so on (that
1936 come before "final" alphabetically) are assumed to be pre-release versions,
1937 so that the version "2.4" is considered newer than "2.4a1".
1939 Finally, to handle miscellaneous cases, the strings "pre", "preview", and
1940 "rc" are treated as if they were "c", i.e. as though they were release
1941 candidates, and therefore are not as new as a version string that does not
1942 contain them, and "dev" is replaced with an '@' so that it sorts lower than
1943 than any other pre-release tag.
1946 for part in _parse_version_parts(s.lower()):
1947 if part.startswith('*'):
1948 if part<'*final': # remove '-' before a prerelease tag
1949 while parts and parts[-1]=='*final-': parts.pop()
1950 # remove trailing zeros from each series of numeric parts
1951 while parts and parts[-1]=='00000000':
1956 class EntryPoint(object):
1957 """Object representing an advertised importable object"""
1959 def __init__(self, name, module_name, attrs=(), extras=(), dist=None):
1960 if not MODULE(module_name):
1961 raise ValueError("Invalid module name", module_name)
1963 self.module_name = module_name
1964 self.attrs = tuple(attrs)
1965 self.extras = Requirement.parse(("x[%s]" % ','.join(extras))).extras
1969 s = "%s = %s" % (self.name, self.module_name)
1971 s += ':' + '.'.join(self.attrs)
1973 s += ' [%s]' % ','.join(self.extras)
1977 return "EntryPoint.parse(%r)" % str(self)
1979 def load(self, require=True, env=None, installer=None):
1980 if require: self.require(env, installer)
1981 entry = __import__(self.module_name, globals(),globals(), ['__name__'])
1982 for attr in self.attrs:
1984 entry = getattr(entry,attr)
1985 except AttributeError:
1986 raise ImportError("%r has no %r attribute" % (entry,attr))
1989 def require(self, env=None, installer=None):
1990 if self.extras and not self.dist:
1991 raise UnknownExtra("Can't require() without a distribution", self)
1992 map(working_set.add,
1993 working_set.resolve(self.dist.requires(self.extras),env,installer))
1998 def parse(cls, src, dist=None):
1999 """Parse a single entry point from string `src`
2001 Entry point syntax follows the form::
2003 name = some.module:some.attr [extra1,extra2]
2005 The entry name and module name are required, but the ``:attrs`` and
2006 ``[extras]`` parts are optional
2010 name,value = src.split('=',1)
2012 value,extras = value.split('[',1)
2013 req = Requirement.parse("x["+extras)
2014 if req.specs: raise ValueError
2017 value,attrs = value.split(':',1)
2018 if not MODULE(attrs.rstrip()):
2020 attrs = attrs.rstrip().split('.')
2023 "EntryPoint must be in 'name=module:attrs [extras]' format",
2027 return cls(name.strip(), value.strip(), attrs, extras, dist)
2029 parse = classmethod(parse)
2039 def parse_group(cls, group, lines, dist=None):
2040 """Parse an entry point group"""
2041 if not MODULE(group):
2042 raise ValueError("Invalid group name", group)
2044 for line in yield_lines(lines):
2045 ep = cls.parse(line, dist)
2047 raise ValueError("Duplicate entry point", group, ep.name)
2051 parse_group = classmethod(parse_group)
2054 def parse_map(cls, data, dist=None):
2055 """Parse a map of entry point groups"""
2056 if isinstance(data,dict):
2059 data = split_sections(data)
2061 for group, lines in data:
2065 raise ValueError("Entry points must be listed in groups")
2066 group = group.strip()
2068 raise ValueError("Duplicate group name", group)
2069 maps[group] = cls.parse_group(group, lines, dist)
2072 parse_map = classmethod(parse_map)
2079 class Distribution(object):
2080 """Wrap an actual or potential sys.path entry w/metadata"""
2082 location=None, metadata=None, project_name=None, version=None,
2083 py_version=PY_MAJOR, platform=None, precedence = EGG_DIST
2085 self.project_name = safe_name(project_name or 'Unknown')
2086 if version is not None:
2087 self._version = safe_version(version)
2088 self.py_version = py_version
2089 self.platform = platform
2090 self.location = location
2091 self.precedence = precedence
2092 self._provider = metadata or empty_provider
2095 def from_location(cls,location,basename,metadata=None,**kw):
2096 project_name, version, py_version, platform = [None]*4
2097 basename, ext = os.path.splitext(basename)
2098 if ext.lower() in (".egg",".egg-info"):
2099 match = EGG_NAME(basename)
2101 project_name, version, py_version, platform = match.group(
2102 'name','ver','pyver','plat'
2105 location, metadata, project_name=project_name, version=version,
2106 py_version=py_version, platform=platform, **kw
2108 from_location = classmethod(from_location)
2112 getattr(self,'parsed_version',()), self.precedence, self.key,
2113 -len(self.location or ''), self.location, self.py_version,
2117 def __cmp__(self, other): return cmp(self.hashcmp, other)
2118 def __hash__(self): return hash(self.hashcmp)
2120 # These properties have to be lazy so that we don't have to load any
2121 # metadata until/unless it's actually needed. (i.e., some distributions
2122 # may not know their name or version without loading PKG-INFO)
2128 except AttributeError:
2129 self._key = key = self.project_name.lower()
2134 def parsed_version(self):
2136 return self._parsed_version
2137 except AttributeError:
2138 self._parsed_version = pv = parse_version(self.version)
2141 parsed_version = property(parsed_version)
2146 return self._version
2147 except AttributeError:
2148 for line in self._get_metadata('PKG-INFO'):
2149 if line.lower().startswith('version:'):
2150 self._version = safe_version(line.split(':',1)[1].strip())
2151 return self._version
2154 "Missing 'Version:' header and/or PKG-INFO file", self
2156 version = property(version)
2164 return self.__dep_map
2165 except AttributeError:
2166 dm = self.__dep_map = {None: []}
2167 for name in 'requires.txt', 'depends.txt':
2168 for extra,reqs in split_sections(self._get_metadata(name)):
2169 if extra: extra = safe_extra(extra)
2170 dm.setdefault(extra,[]).extend(parse_requirements(reqs))
2172 _dep_map = property(_dep_map)
2174 def requires(self,extras=()):
2175 """List of Requirements needed for this distro if `extras` are used"""
2178 deps.extend(dm.get(None,()))
2181 deps.extend(dm[safe_extra(ext)])
2184 "%s has no such extra feature %r" % (self, ext)
2188 def _get_metadata(self,name):
2189 if self.has_metadata(name):
2190 for line in self.get_metadata_lines(name):
2193 def activate(self,path=None):
2194 """Ensure distribution is importable on `path` (default=sys.path)"""
2195 if path is None: path = sys.path
2196 self.insert_on(path)
2197 if path is sys.path:
2198 fixup_namespace_packages(self.location)
2199 for pkg in self._get_metadata('namespace_packages.txt'):
2200 if pkg in sys.modules: declare_namespace(pkg)
2203 """Return what this distribution's standard .egg filename should be"""
2204 filename = "%s-%s-py%s" % (
2205 to_filename(self.project_name), to_filename(self.version),
2206 self.py_version or PY_MAJOR
2210 filename += '-'+self.platform
2215 return "%s (%s)" % (self,self.location)
2220 try: version = getattr(self,'version',None)
2221 except ValueError: version = None
2222 version = version or "[unknown version]"
2223 return "%s %s" % (self.project_name,version)
2225 def __getattr__(self,attr):
2226 """Delegate all unrecognized public attributes to .metadata provider"""
2227 if attr.startswith('_'):
2228 raise AttributeError,attr
2229 return getattr(self._provider, attr)
2232 def from_filename(cls,filename,metadata=None, **kw):
2233 return cls.from_location(
2234 _normalize_cached(filename), os.path.basename(filename), metadata,
2237 from_filename = classmethod(from_filename)
2239 def as_requirement(self):
2240 """Return a ``Requirement`` that matches this distribution exactly"""
2241 return Requirement.parse('%s==%s' % (self.project_name, self.version))
2243 def load_entry_point(self, group, name):
2244 """Return the `name` entry point of `group` or raise ImportError"""
2245 ep = self.get_entry_info(group,name)
2247 raise ImportError("Entry point %r not found" % ((group,name),))
2250 def get_entry_map(self, group=None):
2251 """Return the entry point map for `group`, or the full entry map"""
2253 ep_map = self._ep_map
2254 except AttributeError:
2255 ep_map = self._ep_map = EntryPoint.parse_map(
2256 self._get_metadata('entry_points.txt'), self
2258 if group is not None:
2259 return ep_map.get(group,{})
2262 def get_entry_info(self, group, name):
2263 """Return the EntryPoint object for `group`+`name`, or ``None``"""
2264 return self.get_entry_map(group).get(name)
2284 def insert_on(self, path, loc = None):
2285 """Insert self.location in path before its nearest parent directory"""
2287 loc = loc or self.location
2291 nloc = _normalize_cached(loc)
2292 bdir = os.path.dirname(nloc)
2293 npath= [(p and _normalize_cached(p) or p) for p in path]
2296 for p, item in enumerate(npath):
2299 elif item==bdir and self.precedence==EGG_DIST:
2300 # if it's an .egg, give it precedence over its directory
2301 if path is sys.path:
2302 self.check_version_conflict()
2304 npath.insert(p, nloc)
2307 if path is sys.path:
2308 self.check_version_conflict()
2312 # p is the spot where we found or inserted loc; now remove duplicates
2315 np = npath.index(nloc, p+1)
2319 del npath[np], path[np]
2325 def check_version_conflict(self):
2326 if self.key=='setuptools':
2327 return # ignore the inevitable setuptools self-conflicts :(
2329 nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt'))
2330 loc = normalize_path(self.location)
2331 for modname in self._get_metadata('top_level.txt'):
2332 if (modname not in sys.modules or modname in nsp
2333 or modname in _namespace_packages
2337 fn = getattr(sys.modules[modname], '__file__', None)
2338 if fn and (normalize_path(fn).startswith(loc) or fn.startswith(loc)):
2341 "Module %s was already imported from %s, but %s is being added"
2342 " to sys.path" % (modname, fn, self.location),
2345 def has_version(self):
2349 issue_warning("Unbuilt egg for "+repr(self))
2353 def clone(self,**kw):
2354 """Copy this distribution, substituting in any changed keyword args"""
2356 'project_name', 'version', 'py_version', 'platform', 'location',
2359 kw.setdefault(attr, getattr(self,attr,None))
2360 kw.setdefault('metadata', self._provider)
2361 return self.__class__(**kw)
2368 return [dep for dep in self._dep_map if dep]
2369 extras = property(extras)
2372 def issue_warning(*args,**kw):
2376 # find the first stack frame that is *not* code in
2377 # the pkg_resources module, to use for the warning
2378 while sys._getframe(level).f_globals is g:
2382 from warnings import warn
2383 warn(stacklevel = level+1, *args, **kw)
2407 def parse_requirements(strs):
2408 """Yield ``Requirement`` objects for each specification in `strs`
2410 `strs` must be an instance of ``basestring``, or a (possibly-nested)
2413 # create a steppable iterator, so we can handle \-continuations
2414 lines = iter(yield_lines(strs))
2416 def scan_list(ITEM,TERMINATOR,line,p,groups,item_name):
2420 while not TERMINATOR(line,p):
2421 if CONTINUE(line,p):
2423 line = lines.next(); p = 0
2424 except StopIteration:
2426 "\\ must not appear on the last nonblank line"
2429 match = ITEM(line,p)
2431 raise ValueError("Expected "+item_name+" in",line,"at",line[p:])
2433 items.append(match.group(*groups))
2436 match = COMMA(line,p)
2438 p = match.end() # skip the comma
2439 elif not TERMINATOR(line,p):
2441 "Expected ',' or end-of-list in",line,"at",line[p:]
2444 match = TERMINATOR(line,p)
2445 if match: p = match.end() # skip the terminator, if any
2446 return line, p, items
2449 match = DISTRO(line)
2451 raise ValueError("Missing distribution spec", line)
2452 project_name = match.group(1)
2456 match = OBRACKET(line,p)
2459 line, p, extras = scan_list(
2460 DISTRO, CBRACKET, line, p, (1,), "'extra' name"
2463 line, p, specs = scan_list(VERSION,LINE_END,line,p,(1,2),"version spec")
2464 specs = [(op,safe_version(val)) for op,val in specs]
2465 yield Requirement(project_name, specs, extras)
2468 def _sort_dists(dists):
2469 tmp = [(dist.hashcmp,dist) for dist in dists]
2471 dists[::-1] = [d for hc,d in tmp]
2490 def __init__(self, project_name, specs, extras):
2491 """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!"""
2492 self.unsafe_name, project_name = project_name, safe_name(project_name)
2493 self.project_name, self.key = project_name, project_name.lower()
2494 index = [(parse_version(v),state_machine[op],op,v) for op,v in specs]
2496 self.specs = [(op,ver) for parsed,trans,op,ver in index]
2497 self.index, self.extras = index, tuple(map(safe_extra,extras))
2499 self.key, tuple([(op,parsed) for parsed,trans,op,ver in index]),
2500 frozenset(self.extras)
2502 self.__hash = hash(self.hashCmp)
2505 specs = ','.join([''.join(s) for s in self.specs])
2506 extras = ','.join(self.extras)
2507 if extras: extras = '[%s]' % extras
2508 return '%s%s%s' % (self.project_name, extras, specs)
2510 def __eq__(self,other):
2511 return isinstance(other,Requirement) and self.hashCmp==other.hashCmp
2513 def __contains__(self,item):
2514 if isinstance(item,Distribution):
2515 if item.key != self.key: return False
2516 if self.index: item = item.parsed_version # only get if we need it
2517 elif isinstance(item,basestring):
2518 item = parse_version(item)
2520 for parsed,trans,op,ver in self.index:
2521 action = trans[cmp(item,parsed)]
2522 if action=='F': return False
2523 elif action=='T': return True
2524 elif action=='+': last = True
2525 elif action=='-' or last is None: last = False
2526 if last is None: last = True # no rules encountered
2533 def __repr__(self): return "Requirement.parse(%r)" % str(self)
2537 reqs = list(parse_requirements(s))
2541 raise ValueError("Expected only one requirement", s)
2542 raise ValueError("No requirements found", s)
2544 parse = staticmethod(parse)
2558 """Get an mro for a type or classic class"""
2559 if not isinstance(cls,type):
2560 class cls(cls,object): pass
2561 return cls.__mro__[1:]
2564 def _find_adapter(registry, ob):
2565 """Return an adapter factory for `ob` from `registry`"""
2566 for t in _get_mro(getattr(ob, '__class__', type(ob))):
2571 def ensure_directory(path):
2572 """Ensure that the parent directory of `path` exists"""
2573 dirname = os.path.dirname(path)
2574 if not os.path.isdir(dirname):
2575 os.makedirs(dirname)
2577 def split_sections(s):
2578 """Split a string or iterable thereof into (section,content) pairs
2580 Each ``section`` is a stripped version of the section header ("[section]")
2581 and each ``content`` is a list of stripped lines excluding blank lines and
2582 comment-only lines. If there are any such lines before the first section
2583 header, they're returned in a first ``section`` of ``None``.
2587 for line in yield_lines(s):
2588 if line.startswith("["):
2589 if line.endswith("]"):
2590 if section or content:
2591 yield section, content
2592 section = line[1:-1].strip()
2595 raise ValueError("Invalid section heading", line)
2597 content.append(line)
2599 # wrap up last segment
2600 yield section, content
2602 def _mkstemp(*args,**kw):
2603 from tempfile import mkstemp
2606 os.open = os_open # temporarily bypass sandboxing
2607 return mkstemp(*args,**kw)
2609 os.open = old_open # and then put it back
2612 # Set up global resource manager (deliberately not state-saved)
2613 _manager = ResourceManager()
2615 for name in dir(_manager):
2616 if not name.startswith('_'):
2617 g[name] = getattr(_manager, name)
2618 _initialize(globals())
2620 # Prepare the master working set and make the ``require()`` API available
2622 _declare_state('object', working_set = WorkingSet())
2624 # Does the main program list any requirements?
2625 from __main__ import __requires__
2627 pass # No: just use the default working set based on sys.path
2629 # Yes: ensure the requirements are met, by prefixing sys.path if necessary
2631 working_set.require(__requires__)
2632 except (VersionConflict, DistributionNotFound): # try it without defaults already on sys.path
2633 working_set = WorkingSet([]) # by starting with an empty path
2635 for dist in working_set.resolve(
2636 parse_requirements(__requires__), Environment()
2638 working_set.add(dist)
2639 except DistributionNotFound:
2641 for entry in sys.path: # add any missing entries from sys.path
2642 if entry not in working_set.entries:
2643 working_set.add_entry(entry)
2644 sys.path[:] = working_set.entries # then copy back to sys.path
2646 require = working_set.require
2647 iter_entry_points = working_set.iter_entry_points
2648 add_activation_listener = working_set.subscribe
2649 run_script = working_set.run_script
2650 run_main = run_script # backward compatibility
2651 # Activate all distributions already on sys.path, and ensure that
2652 # all distributions added to the working set in the future (e.g. by
2653 # calling ``require()``) will get activated as well.
2654 add_activation_listener(lambda dist: dist.activate())
2655 working_set.entries=[]; map(working_set.add_entry,sys.path) # match order