1 """Package resource API
4 A resource is a logical file contained within a package, or a logical
5 subdirectory thereof. The package resource API expects resource names
6 to have their path parts separated with ``/``, *not* whatever the local
7 path separator is. Do not use os.path operations to manipulate resource
8 names being passed into the API.
10 The package resource API is designed to work with normal filesystem packages,
11 .egg files, and unpacked .egg files. It can also work in a limited way with
12 .zip files and with custom PEP 302 loaders that support the ``get_data()``
16 import sys, os, zipimport, time, re, imp
21 from sets import ImmutableSet as frozenset
23 # capture these to bypass sandboxing
24 from os import utime, rename, unlink, mkdir
25 from os import open as os_open
26 from os.path import isdir, split
28 def _bypass_ensure_directory(name, mode=0777):
29 # Sandbox-bypassing version of ensure_directory()
30 dirname, filename = split(name)
31 if dirname and filename and not isdir(dirname):
32 _bypass_ensure_directory(dirname)
44 def _declare_state(vartype, **kw):
46 for name, val in kw.iteritems():
48 _state_vars[name] = vartype
53 for k, v in _state_vars.iteritems():
54 state[k] = g['_sget_'+v](g[k])
57 def __setstate__(state):
59 for k, v in state.iteritems():
60 g['_sset_'+_state_vars[k]](k, g[k], v)
66 def _sset_dict(key, ob, state):
70 def _sget_object(val):
71 return val.__getstate__()
73 def _sset_object(key, ob, state):
74 ob.__setstate__(state)
76 _sget_none = _sset_none = lambda *args: None
83 def get_supported_platform():
84 """Return this platform's maximum compatible version.
86 distutils.util.get_platform() normally reports the minimum version
87 of Mac OS X that would be required to *use* extensions produced by
88 distutils. But what we want when checking compatibility is to know the
89 version of Mac OS X that we are *running*. To allow usage of packages that
90 explicitly require a newer version of Mac OS X, we must also know the
91 current version of the OS.
93 If this condition occurs for any other platform with a version in its
94 platform strings, this function should be extended accordingly.
96 plat = get_build_platform(); m = macosVersionString.match(plat)
97 if m is not None and sys.platform == "darwin":
99 plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3))
125 # Basic resource access and distribution/entry point discovery
126 'require', 'run_script', 'get_provider', 'get_distribution',
127 'load_entry_point', 'get_entry_map', 'get_entry_info', 'iter_entry_points',
128 'resource_string', 'resource_stream', 'resource_filename',
129 'resource_listdir', 'resource_exists', 'resource_isdir',
131 # Environmental control
132 'declare_namespace', 'working_set', 'add_activation_listener',
133 'find_distributions', 'set_extraction_path', 'cleanup_resources',
136 # Primary implementation classes
137 'Environment', 'WorkingSet', 'ResourceManager',
138 'Distribution', 'Requirement', 'EntryPoint',
141 'ResolutionError','VersionConflict','DistributionNotFound','UnknownExtra',
144 # Parsing functions and string utilities
145 'parse_requirements', 'parse_version', 'safe_name', 'safe_version',
146 'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections',
147 'safe_extra', 'to_filename',
149 # filesystem utilities
150 'ensure_directory', 'normalize_path',
152 # Distribution "precedence" constants
153 'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST',
155 # "Provider" interfaces, implementations, and registration/lookup APIs
156 'IMetadataProvider', 'IResourceProvider', 'FileMetadata',
157 'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider',
158 'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider',
159 'register_finder', 'register_namespace_handler', 'register_loader_type',
160 'fixup_namespace_packages', 'get_importer',
162 # Deprecated/backward compatibility only
163 'run_main', 'AvailableDistributions',
165 class ResolutionError(Exception):
166 """Abstract base for dependency resolution errors"""
168 return self.__class__.__name__+repr(self.args)
170 class VersionConflict(ResolutionError):
171 """An already-installed version conflicts with the requested version"""
173 class DistributionNotFound(ResolutionError):
174 """A requested distribution was not found"""
176 class UnknownExtra(ResolutionError):
177 """Distribution doesn't have an "extra feature" of the given name"""
179 _provider_factories = {}
180 PY_MAJOR = sys.version[:3]
187 def register_loader_type(loader_type, provider_factory):
188 """Register `provider_factory` to make providers for `loader_type`
190 `loader_type` is the type or class of a PEP 302 ``module.__loader__``,
191 and `provider_factory` is a function that, passed a *module* object,
192 returns an ``IResourceProvider`` for that module.
194 _provider_factories[loader_type] = provider_factory
196 def get_provider(moduleOrReq):
197 """Return an IResourceProvider for the named module or requirement"""
198 if isinstance(moduleOrReq,Requirement):
199 return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0]
201 module = sys.modules[moduleOrReq]
203 __import__(moduleOrReq)
204 module = sys.modules[moduleOrReq]
205 loader = getattr(module, '__loader__', None)
206 return _find_adapter(_provider_factories, loader)(module)
208 def _macosx_vers(_cache=[]):
210 from platform import mac_ver
211 _cache.append(mac_ver()[0].split('.'))
214 def _macosx_arch(machine):
215 return {'PowerPC':'ppc', 'Power_Macintosh':'ppc'}.get(machine,machine)
217 def get_build_platform():
218 """Return this platform's string for platform-specific distributions
220 XXX Currently this is the same as ``distutils.util.get_platform()``, but it
221 needs some hacks for Linux and Mac OS X.
223 from distutils.util import get_platform
224 plat = get_platform()
225 if sys.platform == "darwin" and not plat.startswith('macosx-'):
227 version = _macosx_vers()
228 machine = os.uname()[4].replace(" ", "_")
229 return "macosx-%d.%d-%s" % (int(version[0]), int(version[1]),
230 _macosx_arch(machine))
232 # if someone is running a non-Mac darwin system, this will fall
233 # through to the default implementation
237 macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)")
238 darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)")
239 get_platform = get_build_platform # XXX backward compat
247 def compatible_platforms(provided,required):
248 """Can code for the `provided` platform run on the `required` platform?
250 Returns true if either platform is ``None``, or the platforms are equal.
252 XXX Needs compatibility checks for Linux and other unixy OSes.
254 if provided is None or required is None or provided==required:
255 return True # easy case
257 # Mac OS X special cases
258 reqMac = macosVersionString.match(required)
260 provMac = macosVersionString.match(provided)
262 # is this a Mac package?
264 # this is backwards compatibility for packages built before
265 # setuptools 0.6. All packages built after this point will
266 # use the new macosx designation.
267 provDarwin = darwinVersionString.match(provided)
269 dversion = int(provDarwin.group(1))
270 macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2))
271 if dversion == 7 and macosversion >= "10.3" or \
272 dversion == 8 and macosversion >= "10.4":
275 #warnings.warn("Mac eggs should be rebuilt to "
276 # "use the macosx designation instead of darwin.",
277 # category=DeprecationWarning)
279 return False # egg isn't macosx or legacy darwin
281 # are they the same major version and machine type?
282 if provMac.group(1) != reqMac.group(1) or \
283 provMac.group(3) != reqMac.group(3):
288 # is the required OS major update >= the provided one?
289 if int(provMac.group(2)) > int(reqMac.group(2)):
294 # XXX Linux and other platforms' special cases should go here
298 def run_script(dist_spec, script_name):
299 """Locate distribution `dist_spec` and run its `script_name` script"""
300 ns = sys._getframe(1).f_globals
301 name = ns['__name__']
303 ns['__name__'] = name
304 require(dist_spec)[0].run_script(script_name, ns)
306 run_main = run_script # backward compatibility
308 def get_distribution(dist):
309 """Return a current distribution object for a Requirement or string"""
310 if isinstance(dist,basestring): dist = Requirement.parse(dist)
311 if isinstance(dist,Requirement): dist = get_provider(dist)
312 if not isinstance(dist,Distribution):
313 raise TypeError("Expected string, Requirement, or Distribution", dist)
316 def load_entry_point(dist, group, name):
317 """Return `name` entry point of `group` for `dist` or raise ImportError"""
318 return get_distribution(dist).load_entry_point(group, name)
320 def get_entry_map(dist, group=None):
321 """Return the entry point map for `group`, or the full entry map"""
322 return get_distribution(dist).get_entry_map(group)
324 def get_entry_info(dist, group, name):
325 """Return the EntryPoint object for `group`+`name`, or ``None``"""
326 return get_distribution(dist).get_entry_info(group, name)
329 class IMetadataProvider:
331 def has_metadata(name):
332 """Does the package's distribution contain the named metadata?"""
334 def get_metadata(name):
335 """The named metadata resource as a string"""
337 def get_metadata_lines(name):
338 """Yield named metadata resource as list of non-blank non-comment lines
340 Leading and trailing whitespace is stripped from each line, and lines
341 with ``#`` as the first non-blank character are omitted."""
343 def metadata_isdir(name):
344 """Is the named metadata a directory? (like ``os.path.isdir()``)"""
346 def metadata_listdir(name):
347 """List of metadata names in the directory (like ``os.listdir()``)"""
349 def run_script(script_name, namespace):
350 """Execute the named script in the supplied namespace dictionary"""
370 class IResourceProvider(IMetadataProvider):
371 """An object that provides access to package resources"""
373 def get_resource_filename(manager, resource_name):
374 """Return a true filesystem path for `resource_name`
376 `manager` must be an ``IResourceManager``"""
378 def get_resource_stream(manager, resource_name):
379 """Return a readable file-like object for `resource_name`
381 `manager` must be an ``IResourceManager``"""
383 def get_resource_string(manager, resource_name):
384 """Return a string containing the contents of `resource_name`
386 `manager` must be an ``IResourceManager``"""
388 def has_resource(resource_name):
389 """Does the package contain the named resource?"""
391 def resource_isdir(resource_name):
392 """Is the named resource a directory? (like ``os.path.isdir()``)"""
394 def resource_listdir(resource_name):
395 """List of resource names in the directory (like ``os.listdir()``)"""
411 class WorkingSet(object):
412 """A collection of active distributions on sys.path (or a similar list)"""
414 def __init__(self, entries=None):
415 """Create working set from list of path entries (default=sys.path)"""
424 for entry in entries:
425 self.add_entry(entry)
428 def add_entry(self, entry):
429 """Add a path item to ``.entries``, finding any distributions on it
431 ``find_distributions(entry, True)`` is used to find distributions
432 corresponding to the path entry, and they are added. `entry` is
433 always appended to ``.entries``, even if it is already present.
434 (This is because ``sys.path`` can contain the same value more than
435 once, and the ``.entries`` of the ``sys.path`` WorkingSet should always
438 self.entry_keys.setdefault(entry, [])
439 self.entries.append(entry)
440 for dist in find_distributions(entry, True):
441 self.add(dist, entry, False)
444 def __contains__(self,dist):
445 """True if `dist` is the active distribution for its project"""
446 return self.by_key.get(dist.key) == dist
453 """Find a distribution matching requirement `req`
455 If there is an active distribution for the requested project, this
456 returns it as long as it meets the version requirement specified by
457 `req`. But, if there is an active distribution for the project and it
458 does *not* meet the `req` requirement, ``VersionConflict`` is raised.
459 If there is no active distribution for the requested project, ``None``
462 dist = self.by_key.get(req.key)
463 if dist is not None and dist not in req:
464 raise VersionConflict(dist,req) # XXX add more info
468 def iter_entry_points(self, group, name=None):
469 """Yield entry point objects from `group` matching `name`
471 If `name` is None, yields all entry points in `group` from all
472 distributions in the working set, otherwise only ones matching
473 both `group` and `name` are yielded (in distribution order).
476 entries = dist.get_entry_map(group)
478 for ep in entries.values():
480 elif name in entries:
483 def run_script(self, requires, script_name):
484 """Locate distribution for `requires` and run `script_name` script"""
485 ns = sys._getframe(1).f_globals
486 name = ns['__name__']
488 ns['__name__'] = name
489 self.require(requires)[0].run_script(script_name, ns)
494 """Yield distributions for non-duplicate projects in the working set
496 The yield order is the order in which the items' path entries were
497 added to the working set.
500 for item in self.entries:
501 for key in self.entry_keys[item]:
504 yield self.by_key[key]
506 def add(self, dist, entry=None, insert=True):
507 """Add `dist` to working set, associated with `entry`
509 If `entry` is unspecified, it defaults to the ``.location`` of `dist`.
510 On exit from this routine, `entry` is added to the end of the working
511 set's ``.entries`` (if it wasn't already present).
513 `dist` is only added to the working set if it's for a project that
514 doesn't already have a distribution in the set. If it's added, any
515 callbacks registered with the ``subscribe()`` method will be called.
518 dist.insert_on(self.entries, entry)
521 entry = dist.location
522 keys = self.entry_keys.setdefault(entry,[])
523 keys2 = self.entry_keys.setdefault(dist.location,[])
524 if dist.key in self.by_key:
525 return # ignore hidden distros
527 # If we have a __requires__ then we can already tell if this
528 # dist is unsatisfactory, in which case we won't add it.
529 if __requires__ is not None:
530 for thisreqstr in __requires__:
531 for thisreq in parse_requirements(thisreqstr):
532 if thisreq.key == dist.key:
533 if dist not in thisreq:
537 self.by_key[dist.key] = dist
538 if dist.key not in keys:
539 keys.append(dist.key)
540 if dist.key not in keys2:
541 keys2.append(dist.key)
542 self._added_new(dist)
544 def resolve(self, requirements, env=None, installer=None):
545 """List all distributions needed to (recursively) meet `requirements`
547 `requirements` must be a sequence of ``Requirement`` objects. `env`,
548 if supplied, should be an ``Environment`` instance. If
549 not supplied, it defaults to all distributions available within any
550 entry or distribution in the working set. `installer`, if supplied,
551 will be invoked with each requirement that cannot be met by an
552 already-installed distribution; it should return a ``Distribution`` or
556 requirements = list(requirements)[::-1] # set up the stack
557 processed = {} # set of processed requirements
558 best = {} # key -> dist
562 req = requirements.pop(0) # process dependencies breadth-first
564 # Ignore cyclic or redundant dependencies
566 dist = best.get(req.key)
568 # Find the best distribution and add it to the map
569 dist = self.by_key.get(req.key)
572 env = Environment(self.entries)
573 dist = best[req.key] = env.best_match(req, self, installer)
575 raise DistributionNotFound(req) # XXX put more info here
576 to_activate.append(dist)
578 # Oops, the "best" so far conflicts with a dependency
579 raise VersionConflict(dist,req) # XXX put more info here
580 requirements.extend(dist.requires(req.extras)[::-1])
581 processed[req] = True
583 return to_activate # return list of distros to activate
585 def find_plugins(self,
586 plugin_env, full_env=None, installer=None, fallback=True
588 """Find all activatable distributions in `plugin_env`
592 distributions, errors = working_set.find_plugins(
593 Environment(plugin_dirlist)
595 map(working_set.add, distributions) # add plugins+libs to sys.path
596 print "Couldn't load", errors # display errors
598 The `plugin_env` should be an ``Environment`` instance that contains
599 only distributions that are in the project's "plugin directory" or
600 directories. The `full_env`, if supplied, should be an ``Environment``
601 contains all currently-available distributions. If `full_env` is not
602 supplied, one is created automatically from the ``WorkingSet`` this
603 method is called on, which will typically mean that every directory on
604 ``sys.path`` will be scanned for distributions.
606 `installer` is a standard installer callback as used by the
607 ``resolve()`` method. The `fallback` flag indicates whether we should
608 attempt to resolve older versions of a plugin if the newest version
611 This method returns a 2-tuple: (`distributions`, `error_info`), where
612 `distributions` is a list of the distributions found in `plugin_env`
613 that were loadable, along with any other distributions that are needed
614 to resolve their dependencies. `error_info` is a dictionary mapping
615 unloadable plugin distributions to an exception instance describing the
616 error that occurred. Usually this will be a ``DistributionNotFound`` or
617 ``VersionConflict`` instance.
620 plugin_projects = list(plugin_env)
621 plugin_projects.sort() # scan project names in alphabetic order
627 env = Environment(self.entries)
630 env = full_env + plugin_env
632 shadow_set = self.__class__([])
633 map(shadow_set.add, self) # put all our entries in shadow_set
635 for project_name in plugin_projects:
637 for dist in plugin_env[project_name]:
639 req = [dist.as_requirement()]
642 resolvees = shadow_set.resolve(req, env, installer)
644 except ResolutionError,v:
645 error_info[dist] = v # save error info
647 continue # try the next older version of project
649 break # give up on this project, keep going
652 map(shadow_set.add, resolvees)
653 distributions.update(dict.fromkeys(resolvees))
655 # success, no need to try any more versions of this project
658 distributions = list(distributions)
661 return distributions, error_info
667 def require(self, *requirements):
668 """Ensure that distributions matching `requirements` are activated
670 `requirements` must be a string or a (possibly-nested) sequence
671 thereof, specifying the distributions and versions required. The
672 return value is a sequence of the distributions that needed to be
673 activated to fulfill the requirements; all relevant distributions are
674 included, even if they were already activated in this working set.
676 needed = self.resolve(parse_requirements(requirements))
683 def subscribe(self, callback):
684 """Invoke `callback` for all distributions (including existing ones)"""
685 if callback in self.callbacks:
687 self.callbacks.append(callback)
691 def _added_new(self, dist):
692 for callback in self.callbacks:
695 def __getstate__(self):
697 self.entries[:], self.entry_keys.copy(), self.by_key.copy(),
701 def __setstate__(self, (entries, keys, by_key, callbacks)):
702 self.entries = entries[:]
703 self.entry_keys = keys.copy()
704 self.by_key = by_key.copy()
705 self.callbacks = callbacks[:]
708 class Environment(object):
709 """Searchable snapshot of distributions on a search path"""
711 def __init__(self, search_path=None, platform=get_supported_platform(), python=PY_MAJOR):
712 """Snapshot distributions available on a search path
714 Any distributions found on `search_path` are added to the environment.
715 `search_path` should be a sequence of ``sys.path`` items. If not
716 supplied, ``sys.path`` is used.
718 `platform` is an optional string specifying the name of the platform
719 that platform-specific distributions must be compatible with. If
720 unspecified, it defaults to the current platform. `python` is an
721 optional string naming the desired version of Python (e.g. ``'2.4'``);
722 it defaults to the current version.
724 You may explicitly set `platform` (and/or `python`) to ``None`` if you
725 wish to map *all* distributions, not just those compatible with the
726 running platform or Python version.
730 self.platform = platform
732 self.scan(search_path)
734 def can_add(self, dist):
735 """Is distribution `dist` acceptable for this environment?
737 The distribution must match the platform and python version
738 requirements specified when this environment was created, or False
741 return (self.python is None or dist.py_version is None
742 or dist.py_version==self.python) \
743 and compatible_platforms(dist.platform,self.platform)
745 def remove(self, dist):
746 """Remove `dist` from the environment"""
747 self._distmap[dist.key].remove(dist)
749 def scan(self, search_path=None):
750 """Scan `search_path` for distributions usable in this environment
752 Any distributions found are added to the environment.
753 `search_path` should be a sequence of ``sys.path`` items. If not
754 supplied, ``sys.path`` is used. Only distributions conforming to
755 the platform/python version defined at initialization are added.
757 if search_path is None:
758 search_path = sys.path
760 for item in search_path:
761 for dist in find_distributions(item):
764 def __getitem__(self,project_name):
765 """Return a newest-to-oldest list of distributions for `project_name`
768 return self._cache[project_name]
770 project_name = project_name.lower()
771 if project_name not in self._distmap:
774 if project_name not in self._cache:
775 dists = self._cache[project_name] = self._distmap[project_name]
778 return self._cache[project_name]
781 """Add `dist` if we ``can_add()`` it and it isn't already added"""
782 if self.can_add(dist) and dist.has_version():
783 dists = self._distmap.setdefault(dist.key,[])
784 if dist not in dists:
786 if dist.key in self._cache:
787 _sort_dists(self._cache[dist.key])
790 def best_match(self, req, working_set, installer=None):
791 """Find distribution best matching `req` and usable on `working_set`
793 This calls the ``find(req)`` method of the `working_set` to see if a
794 suitable distribution is already active. (This may raise
795 ``VersionConflict`` if an unsuitable version of the project is already
796 active in the specified `working_set`.)
798 If a suitable distribution isn't active, this method returns the
799 newest platform-dependent distribution in the environment that meets
800 the ``Requirement`` in `req`. If no suitable platform-dependent
801 distribution is found, then the newest platform-independent
802 distribution that meets the requirement is returned. (A platform-
803 dependent distribution will typically have code compiled or
804 specialized for that platform.)
806 Otherwise, if `installer` is supplied, then the result of calling the
807 environment's ``obtain(req, installer)`` method will be returned.
809 dist = working_set.find(req)
813 # first try to find a platform-dependent dist
814 for dist in self[req.key]:
815 if dist in req and dist.platform is not None:
818 # then try any other dist
819 for dist in self[req.key]:
823 return self.obtain(req, installer) # try and download/install
825 def obtain(self, requirement, installer=None):
826 """Obtain a distribution matching `requirement` (e.g. via download)
828 Obtain a distro that matches requirement (e.g. via download). In the
829 base ``Environment`` class, this routine just returns
830 ``installer(requirement)``, unless `installer` is None, in which case
831 None is returned instead. This method is a hook that allows subclasses
832 to attempt other ways of obtaining a distribution before falling back
833 to the `installer` argument."""
834 if installer is not None:
835 return installer(requirement)
838 """Yield the unique project names of the available distributions"""
839 for key in self._distmap.keys():
840 if self[key]: yield key
845 def __iadd__(self, other):
846 """In-place addition of a distribution or environment"""
847 if isinstance(other,Distribution):
849 elif isinstance(other,Environment):
850 for project in other:
851 for dist in other[project]:
854 raise TypeError("Can't add %r to environment" % (other,))
857 def __add__(self, other):
858 """Add an environment or distribution to an environment"""
859 new = self.__class__([], platform=None, python=None)
860 for env in self, other:
865 AvailableDistributions = Environment # XXX backward compatibility
868 class ExtractionError(RuntimeError):
869 """An error occurred extracting a resource
871 The following attributes are available from instances of this exception:
874 The resource manager that raised this exception
877 The base directory for resource extraction
880 The exception instance that caused extraction to fail
886 class ResourceManager:
887 """Manage resource extraction and packages"""
888 extraction_path = None
891 self.cached_files = {}
893 def resource_exists(self, package_or_requirement, resource_name):
894 """Does the named resource exist?"""
895 return get_provider(package_or_requirement).has_resource(resource_name)
897 def resource_isdir(self, package_or_requirement, resource_name):
898 """Is the named resource an existing directory?"""
899 return get_provider(package_or_requirement).resource_isdir(
903 def resource_filename(self, package_or_requirement, resource_name):
904 """Return a true filesystem path for specified resource"""
905 return get_provider(package_or_requirement).get_resource_filename(
909 def resource_stream(self, package_or_requirement, resource_name):
910 """Return a readable file-like object for specified resource"""
911 return get_provider(package_or_requirement).get_resource_stream(
915 def resource_string(self, package_or_requirement, resource_name):
916 """Return specified resource as a string"""
917 return get_provider(package_or_requirement).get_resource_string(
921 def resource_listdir(self, package_or_requirement, resource_name):
922 """List the contents of the named resource directory"""
923 return get_provider(package_or_requirement).resource_listdir(
927 def extraction_error(self):
928 """Give an error message for problems extracting file(s)"""
930 old_exc = sys.exc_info()[1]
931 cache_path = self.extraction_path or get_default_cache()
933 err = ExtractionError("""Can't extract file(s) to egg cache
935 The following error occurred while trying to extract file(s) to the Python egg
940 The Python egg cache directory is currently set to:
944 Perhaps your account does not have write access to this directory? You can
945 change the cache directory by setting the PYTHON_EGG_CACHE environment
946 variable to point to an accessible directory.
947 """ % (old_exc, cache_path)
950 err.cache_path = cache_path
951 err.original_error = old_exc
968 def get_cache_path(self, archive_name, names=()):
969 """Return absolute location in cache for `archive_name` and `names`
971 The parent directory of the resulting path will be created if it does
972 not already exist. `archive_name` should be the base filename of the
973 enclosing egg (which may not be the name of the enclosing zipfile!),
974 including its ".egg" extension. `names`, if provided, should be a
975 sequence of path name parts "under" the egg's extraction location.
977 This method should only be called by resource providers that need to
978 obtain an extraction location, and only for names they intend to
979 extract, as it tracks the generated names for possible cleanup later.
981 extract_path = self.extraction_path or get_default_cache()
982 target_path = os.path.join(extract_path, archive_name+'-tmp', *names)
984 _bypass_ensure_directory(target_path)
986 self.extraction_error()
988 self.cached_files[target_path] = 1
1009 def postprocess(self, tempname, filename):
1010 """Perform any platform-specific postprocessing of `tempname`
1012 This is where Mac header rewrites should be done; other platforms don't
1013 have anything special they should do.
1015 Resource providers should call this method ONLY after successfully
1016 extracting a compressed resource. They must NOT call it on resources
1017 that are already in the filesystem.
1019 `tempname` is the current (temporary) name of the file, and `filename`
1020 is the name it will be renamed to by the caller after this routine
1024 if os.name == 'posix':
1025 # Make the resource executable
1026 mode = ((os.stat(tempname).st_mode) | 0555) & 07777
1027 os.chmod(tempname, mode)
1050 def set_extraction_path(self, path):
1051 """Set the base path where resources will be extracted to, if needed.
1053 If you do not call this routine before any extractions take place, the
1054 path defaults to the return value of ``get_default_cache()``. (Which
1055 is based on the ``PYTHON_EGG_CACHE`` environment variable, with various
1056 platform-specific fallbacks. See that routine's documentation for more
1059 Resources are extracted to subdirectories of this path based upon
1060 information given by the ``IResourceProvider``. You may set this to a
1061 temporary directory, but then you must call ``cleanup_resources()`` to
1062 delete the extracted files when done. There is no guarantee that
1063 ``cleanup_resources()`` will be able to remove all extracted files.
1065 (Note: you may not change the extraction path for a given resource
1066 manager once resources have been extracted, unless you first call
1067 ``cleanup_resources()``.)
1069 if self.cached_files:
1071 "Can't change extraction path, files already extracted"
1074 self.extraction_path = path
1076 def cleanup_resources(self, force=False):
1078 Delete all extracted resource files and directories, returning a list
1079 of the file and directory names that could not be successfully removed.
1080 This function does not have any concurrency protection, so it should
1081 generally only be called when the extraction path is a temporary
1082 directory exclusive to a single process. This method is not
1083 automatically called; you must call it explicitly or register it as an
1084 ``atexit`` function if you wish to ensure cleanup of a temporary
1085 directory used for extractions.
1091 def get_default_cache():
1092 """Determine the default cache location
1094 This returns the ``PYTHON_EGG_CACHE`` environment variable, if set.
1095 Otherwise, on Windows, it returns a "Python-Eggs" subdirectory of the
1096 "Application Data" directory. On all other systems, it's "~/.python-eggs".
1099 return os.environ['PYTHON_EGG_CACHE']
1104 return os.path.expanduser('~/.python-eggs')
1106 app_data = 'Application Data' # XXX this may be locale-specific!
1108 (('APPDATA',), None), # best option, should be locale-safe
1109 (('USERPROFILE',), app_data),
1110 (('HOMEDRIVE','HOMEPATH'), app_data),
1111 (('HOMEPATH',), app_data),
1113 (('WINDIR',), app_data), # 95/98/ME
1116 for keys, subdir in app_homes:
1119 if key in os.environ:
1120 dirname = os.path.join(dirname, os.environ[key])
1125 dirname = os.path.join(dirname,subdir)
1126 return os.path.join(dirname, 'Python-Eggs')
1129 "Please set the PYTHON_EGG_CACHE enviroment variable"
1132 def safe_name(name):
1133 """Convert an arbitrary string to a standard distribution name
1135 Any runs of non-alphanumeric/. characters are replaced with a single '-'.
1137 return re.sub('[^A-Za-z0-9.]+', '-', name)
1140 def safe_version(version):
1141 """Convert an arbitrary string to a standard version string
1143 Spaces become dots, and all other non-alphanumeric characters become
1144 dashes, with runs of multiple dashes condensed to a single dash.
1146 version = version.replace(' ','.')
1147 return re.sub('[^A-Za-z0-9.]+', '-', version)
1150 def safe_extra(extra):
1151 """Convert an arbitrary string to a standard 'extra' name
1153 Any runs of non-alphanumeric characters are replaced with a single '_',
1154 and the result is always lowercased.
1156 return re.sub('[^A-Za-z0-9.]+', '_', extra).lower()
1159 def to_filename(name):
1160 """Convert a project or version name to its filename-escaped form
1162 Any '-' characters are currently replaced with '_'.
1164 return name.replace('-','_')
1174 """Try to implement resources and metadata for arbitrary PEP 302 loaders"""
1180 def __init__(self, module):
1181 self.loader = getattr(module, '__loader__', None)
1182 self.module_path = os.path.dirname(getattr(module, '__file__', ''))
1184 def get_resource_filename(self, manager, resource_name):
1185 return self._fn(self.module_path, resource_name)
1187 def get_resource_stream(self, manager, resource_name):
1188 return StringIO(self.get_resource_string(manager, resource_name))
1190 def get_resource_string(self, manager, resource_name):
1191 return self._get(self._fn(self.module_path, resource_name))
1193 def has_resource(self, resource_name):
1194 return self._has(self._fn(self.module_path, resource_name))
1196 def has_metadata(self, name):
1197 return self.egg_info and self._has(self._fn(self.egg_info,name))
1199 def get_metadata(self, name):
1200 if not self.egg_info:
1202 return self._get(self._fn(self.egg_info,name))
1204 def get_metadata_lines(self, name):
1205 return yield_lines(self.get_metadata(name))
1207 def resource_isdir(self,resource_name):
1208 return self._isdir(self._fn(self.module_path, resource_name))
1210 def metadata_isdir(self,name):
1211 return self.egg_info and self._isdir(self._fn(self.egg_info,name))
1214 def resource_listdir(self,resource_name):
1215 return self._listdir(self._fn(self.module_path,resource_name))
1217 def metadata_listdir(self,name):
1219 return self._listdir(self._fn(self.egg_info,name))
1222 def run_script(self,script_name,namespace):
1223 script = 'scripts/'+script_name
1224 if not self.has_metadata(script):
1225 raise ResolutionError("No script named %r" % script_name)
1226 script_text = self.get_metadata(script).replace('\r\n','\n')
1227 script_text = script_text.replace('\r','\n')
1228 script_filename = self._fn(self.egg_info,script)
1229 namespace['__file__'] = script_filename
1230 if os.path.exists(script_filename):
1231 execfile(script_filename, namespace, namespace)
1233 from linecache import cache
1234 cache[script_filename] = (
1235 len(script_text), 0, script_text.split('\n'), script_filename
1237 script_code = compile(script_text,script_filename,'exec')
1238 exec script_code in namespace, namespace
1240 def _has(self, path):
1241 raise NotImplementedError(
1242 "Can't perform this operation for unregistered loader type"
1245 def _isdir(self, path):
1246 raise NotImplementedError(
1247 "Can't perform this operation for unregistered loader type"
1250 def _listdir(self, path):
1251 raise NotImplementedError(
1252 "Can't perform this operation for unregistered loader type"
1255 def _fn(self, base, resource_name):
1257 return os.path.join(base, *resource_name.split('/'))
1260 def _get(self, path):
1261 if hasattr(self.loader, 'get_data'):
1262 return self.loader.get_data(path)
1263 raise NotImplementedError(
1264 "Can't perform this operation for loaders without 'get_data()'"
1267 register_loader_type(object, NullProvider)
1270 class EggProvider(NullProvider):
1271 """Provider based on a virtual filesystem"""
1273 def __init__(self,module):
1274 NullProvider.__init__(self,module)
1275 self._setup_prefix()
1277 def _setup_prefix(self):
1278 # we assume here that our metadata may be nested inside a "basket"
1279 # of multiple eggs; that's why we use module_path instead of .archive
1280 path = self.module_path
1283 if path.lower().endswith('.egg'):
1284 self.egg_name = os.path.basename(path)
1285 self.egg_info = os.path.join(path, 'EGG-INFO')
1286 self.egg_root = path
1289 path, base = os.path.split(path)
1296 class DefaultProvider(EggProvider):
1297 """Provides access to package resources in the filesystem"""
1299 def _has(self, path):
1300 return os.path.exists(path)
1302 def _isdir(self,path):
1303 return os.path.isdir(path)
1305 def _listdir(self,path):
1306 return os.listdir(path)
1308 def get_resource_stream(self, manager, resource_name):
1309 return open(self._fn(self.module_path, resource_name), 'rb')
1311 def _get(self, path):
1312 stream = open(path, 'rb')
1314 return stream.read()
1318 register_loader_type(type(None), DefaultProvider)
1321 class EmptyProvider(NullProvider):
1322 """Provider that returns nothing for all requests"""
1324 _isdir = _has = lambda self,path: False
1325 _get = lambda self,path: ''
1326 _listdir = lambda self,path: []
1332 empty_provider = EmptyProvider()
1337 class ZipProvider(EggProvider):
1338 """Resource support for zips and eggs"""
1342 def __init__(self, module):
1343 EggProvider.__init__(self,module)
1344 self.zipinfo = zipimport._zip_directory_cache[self.loader.archive]
1345 self.zip_pre = self.loader.archive+os.sep
1347 def _zipinfo_name(self, fspath):
1348 # Convert a virtual filename (full path to file) into a zipfile subpath
1349 # usable with the zipimport directory cache for our target archive
1350 if fspath.startswith(self.zip_pre):
1351 return fspath[len(self.zip_pre):]
1352 raise AssertionError(
1353 "%s is not a subpath of %s" % (fspath,self.zip_pre)
1356 def _parts(self,zip_path):
1357 # Convert a zipfile subpath into an egg-relative path part list
1358 fspath = self.zip_pre+zip_path # pseudo-fs path
1359 if fspath.startswith(self.egg_root+os.sep):
1360 return fspath[len(self.egg_root)+1:].split(os.sep)
1361 raise AssertionError(
1362 "%s is not a subpath of %s" % (fspath,self.egg_root)
1365 def get_resource_filename(self, manager, resource_name):
1366 if not self.egg_name:
1367 raise NotImplementedError(
1368 "resource_filename() only supported for .egg, not .zip"
1370 # no need to lock for extraction, since we use temp names
1371 zip_path = self._resource_to_zip(resource_name)
1372 eagers = self._get_eager_resources()
1373 if '/'.join(self._parts(zip_path)) in eagers:
1375 self._extract_resource(manager, self._eager_to_zip(name))
1376 return self._extract_resource(manager, zip_path)
1378 def _extract_resource(self, manager, zip_path):
1380 if zip_path in self._index():
1381 for name in self._index()[zip_path]:
1382 last = self._extract_resource(
1383 manager, os.path.join(zip_path, name)
1385 return os.path.dirname(last) # return the extracted directory name
1387 zip_stat = self.zipinfo[zip_path]
1388 t,d,size = zip_stat[5], zip_stat[6], zip_stat[3]
1390 (d>>9)+1980, (d>>5)&0xF, d&0x1F, # ymd
1391 (t&0xFFFF)>>11, (t>>5)&0x3F, (t&0x1F) * 2, 0, 0, -1 # hms, etc.
1393 timestamp = time.mktime(date_time)
1396 real_path = manager.get_cache_path(
1397 self.egg_name, self._parts(zip_path)
1400 if os.path.isfile(real_path):
1401 stat = os.stat(real_path)
1402 if stat.st_size==size and stat.st_mtime==timestamp:
1403 # size and stamp match, don't bother extracting
1406 outf, tmpnam = _mkstemp(".$extract", dir=os.path.dirname(real_path))
1407 os.write(outf, self.loader.get_data(zip_path))
1409 utime(tmpnam, (timestamp,timestamp))
1410 manager.postprocess(tmpnam, real_path)
1413 rename(tmpnam, real_path)
1416 if os.path.isfile(real_path):
1417 stat = os.stat(real_path)
1419 if stat.st_size==size and stat.st_mtime==timestamp:
1420 # size and stamp match, somebody did it just ahead of
1423 elif os.name=='nt': # Windows, del old file and retry
1425 rename(tmpnam, real_path)
1430 manager.extraction_error() # report a user-friendly error
1434 def _get_eager_resources(self):
1435 if self.eagers is None:
1437 for name in ('native_libs.txt', 'eager_resources.txt'):
1438 if self.has_metadata(name):
1439 eagers.extend(self.get_metadata_lines(name))
1440 self.eagers = eagers
1445 return self._dirindex
1446 except AttributeError:
1448 for path in self.zipinfo:
1449 parts = path.split(os.sep)
1451 parent = os.sep.join(parts[:-1])
1453 ind[parent].append(parts[-1])
1456 ind[parent] = [parts.pop()]
1457 self._dirindex = ind
1460 def _has(self, fspath):
1461 zip_path = self._zipinfo_name(fspath)
1462 return zip_path in self.zipinfo or zip_path in self._index()
1464 def _isdir(self,fspath):
1465 return self._zipinfo_name(fspath) in self._index()
1467 def _listdir(self,fspath):
1468 return list(self._index().get(self._zipinfo_name(fspath), ()))
1470 def _eager_to_zip(self,resource_name):
1471 return self._zipinfo_name(self._fn(self.egg_root,resource_name))
1473 def _resource_to_zip(self,resource_name):
1474 return self._zipinfo_name(self._fn(self.module_path,resource_name))
1476 register_loader_type(zipimport.zipimporter, ZipProvider)
1501 class FileMetadata(EmptyProvider):
1502 """Metadata handler for standalone PKG-INFO files
1506 metadata = FileMetadata("/path/to/PKG-INFO")
1508 This provider rejects all data and metadata requests except for PKG-INFO,
1509 which is treated as existing, and will be the contents of the file at
1510 the provided location.
1513 def __init__(self,path):
1516 def has_metadata(self,name):
1517 return name=='PKG-INFO'
1519 def get_metadata(self,name):
1520 if name=='PKG-INFO':
1521 return open(self.path,'rU').read()
1522 raise KeyError("No metadata except PKG-INFO is available")
1524 def get_metadata_lines(self,name):
1525 return yield_lines(self.get_metadata(name))
1542 class PathMetadata(DefaultProvider):
1543 """Metadata provider for egg directories
1549 egg_info = "/path/to/PackageName.egg-info"
1550 base_dir = os.path.dirname(egg_info)
1551 metadata = PathMetadata(base_dir, egg_info)
1552 dist_name = os.path.splitext(os.path.basename(egg_info))[0]
1553 dist = Distribution(basedir,project_name=dist_name,metadata=metadata)
1555 # Unpacked egg directories:
1557 egg_path = "/path/to/PackageName-ver-pyver-etc.egg"
1558 metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO'))
1559 dist = Distribution.from_filename(egg_path, metadata=metadata)
1562 def __init__(self, path, egg_info):
1563 self.module_path = path
1564 self.egg_info = egg_info
1567 class EggMetadata(ZipProvider):
1568 """Metadata provider for .egg files"""
1570 def __init__(self, importer):
1571 """Create a metadata provider from a zipimporter"""
1573 self.zipinfo = zipimport._zip_directory_cache[importer.archive]
1574 self.zip_pre = importer.archive+os.sep
1575 self.loader = importer
1577 self.module_path = os.path.join(importer.archive, importer.prefix)
1579 self.module_path = importer.archive
1580 self._setup_prefix()
1584 """PEP 302 Importer that wraps Python's "normal" import algorithm"""
1586 def __init__(self, path=None):
1589 def find_module(self, fullname, path=None):
1590 subname = fullname.split(".")[-1]
1591 if subname != fullname and self.path is None:
1593 if self.path is None:
1598 file, filename, etc = imp.find_module(subname, path)
1601 return ImpLoader(file, filename, etc)
1605 """PEP 302 Loader that wraps Python's "normal" import algorithm"""
1607 def __init__(self, file, filename, etc):
1609 self.filename = filename
1612 def load_module(self, fullname):
1614 mod = imp.load_module(fullname, self.file, self.filename, self.etc)
1616 if self.file: self.file.close()
1617 # Note: we don't set __loader__ because we want the module to look
1618 # normal; i.e. this is just a wrapper for standard import machinery
1624 def get_importer(path_item):
1625 """Retrieve a PEP 302 "importer" for the given path item
1627 If there is no importer, this returns a wrapper around the builtin import
1628 machinery. The returned importer is only cached if it was created by a
1632 importer = sys.path_importer_cache[path_item]
1634 for hook in sys.path_hooks:
1636 importer = hook(path_item)
1644 sys.path_importer_cache.setdefault(path_item,importer)
1645 if importer is None:
1647 importer = ImpWrapper(path_item)
1665 _declare_state('dict', _distribution_finders = {})
1667 def register_finder(importer_type, distribution_finder):
1668 """Register `distribution_finder` to find distributions in sys.path items
1670 `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
1671 handler), and `distribution_finder` is a callable that, passed a path
1672 item and the importer instance, yields ``Distribution`` instances found on
1673 that path item. See ``pkg_resources.find_on_path`` for an example."""
1674 _distribution_finders[importer_type] = distribution_finder
1677 def find_distributions(path_item, only=False):
1678 """Yield distributions accessible via `path_item`"""
1679 importer = get_importer(path_item)
1680 finder = _find_adapter(_distribution_finders, importer)
1681 return finder(importer, path_item, only)
1683 def find_in_zip(importer, path_item, only=False):
1684 metadata = EggMetadata(importer)
1685 if metadata.has_metadata('PKG-INFO'):
1686 yield Distribution.from_filename(path_item, metadata=metadata)
1688 return # don't yield nested distros
1689 for subitem in metadata.resource_listdir('/'):
1690 if subitem.endswith('.egg'):
1691 subpath = os.path.join(path_item, subitem)
1692 for dist in find_in_zip(zipimport.zipimporter(subpath), subpath):
1695 register_finder(zipimport.zipimporter, find_in_zip)
1697 def StringIO(*args, **kw):
1698 """Thunk to load the real StringIO on demand"""
1701 from cStringIO import StringIO
1703 from StringIO import StringIO
1704 return StringIO(*args,**kw)
1706 def find_nothing(importer, path_item, only=False):
1708 register_finder(object,find_nothing)
1710 def find_on_path(importer, path_item, only=False):
1711 """Yield distributions accessible on a sys.path directory"""
1712 path_item = _normalize_cached(path_item)
1714 if os.path.isdir(path_item) and os.access(path_item, os.R_OK):
1715 if path_item.lower().endswith('.egg'):
1717 yield Distribution.from_filename(
1718 path_item, metadata=PathMetadata(
1719 path_item, os.path.join(path_item,'EGG-INFO')
1723 # scan for .egg and .egg-info in directory
1724 for entry in os.listdir(path_item):
1725 lower = entry.lower()
1726 if lower.endswith('.egg-info'):
1727 fullpath = os.path.join(path_item, entry)
1728 if os.path.isdir(fullpath):
1729 # egg-info directory, allow getting metadata
1730 metadata = PathMetadata(path_item, fullpath)
1732 metadata = FileMetadata(fullpath)
1733 yield Distribution.from_location(
1734 path_item,entry,metadata,precedence=DEVELOP_DIST
1736 elif not only and lower.endswith('.egg'):
1737 for dist in find_distributions(os.path.join(path_item, entry)):
1739 elif not only and lower.endswith('.egg-link'):
1740 for line in file(os.path.join(path_item, entry)):
1741 if not line.strip(): continue
1742 for item in find_distributions(os.path.join(path_item,line.rstrip())):
1745 register_finder(ImpWrapper, find_on_path)
1747 _declare_state('dict', _namespace_handlers = {})
1748 _declare_state('dict', _namespace_packages = {})
1750 def register_namespace_handler(importer_type, namespace_handler):
1751 """Register `namespace_handler` to declare namespace packages
1753 `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
1754 handler), and `namespace_handler` is a callable like this::
1756 def namespace_handler(importer,path_entry,moduleName,module):
1757 # return a path_entry to use for child packages
1759 Namespace handlers are only called if the importer object has already
1760 agreed that it can handle the relevant path item, and they should only
1761 return a subpath if the module __path__ does not already contain an
1762 equivalent subpath. For an example namespace handler, see
1763 ``pkg_resources.file_ns_handler``.
1765 _namespace_handlers[importer_type] = namespace_handler
1767 def _handle_ns(packageName, path_item):
1768 """Ensure that named package includes a subpath of path_item (if needed)"""
1769 importer = get_importer(path_item)
1770 if importer is None:
1772 loader = importer.find_module(packageName)
1775 module = sys.modules.get(packageName)
1777 module = sys.modules[packageName] = imp.new_module(packageName)
1778 module.__path__ = []; _set_parent_ns(packageName)
1779 elif not hasattr(module,'__path__'):
1780 raise TypeError("Not a package:", packageName)
1781 handler = _find_adapter(_namespace_handlers, importer)
1782 subpath = handler(importer,path_item,packageName,module)
1783 if subpath is not None:
1784 path = module.__path__; path.append(subpath)
1785 loader.load_module(packageName); module.__path__ = path
1788 def declare_namespace(packageName):
1789 """Declare that package 'packageName' is a namespace package"""
1793 if packageName in _namespace_packages:
1796 path, parent = sys.path, None
1797 if '.' in packageName:
1798 parent = '.'.join(packageName.split('.')[:-1])
1799 declare_namespace(parent)
1802 path = sys.modules[parent].__path__
1803 except AttributeError:
1804 raise TypeError("Not a package:", parent)
1806 # Track what packages are namespaces, so when new path items are added,
1807 # they can be updated
1808 _namespace_packages.setdefault(parent,[]).append(packageName)
1809 _namespace_packages.setdefault(packageName,[])
1811 for path_item in path:
1812 # Ensure all the parent's path items are reflected in the child,
1814 _handle_ns(packageName, path_item)
1819 def fixup_namespace_packages(path_item, parent=None):
1820 """Ensure that previously-declared namespace packages include path_item"""
1823 for package in _namespace_packages.get(parent,()):
1824 subpath = _handle_ns(package, path_item)
1825 if subpath: fixup_namespace_packages(subpath,package)
1829 def file_ns_handler(importer, path_item, packageName, module):
1830 """Compute an ns-package subpath for a filesystem or zipfile importer"""
1832 subpath = os.path.join(path_item, packageName.split('.')[-1])
1833 normalized = _normalize_cached(subpath)
1834 for item in module.__path__:
1835 if _normalize_cached(item)==normalized:
1838 # Only return the path if it's not already there
1841 register_namespace_handler(ImpWrapper,file_ns_handler)
1842 register_namespace_handler(zipimport.zipimporter,file_ns_handler)
1845 def null_ns_handler(importer, path_item, packageName, module):
1848 register_namespace_handler(object,null_ns_handler)
1851 def normalize_path(filename):
1852 """Normalize a file/dir name for comparison purposes"""
1853 return os.path.normcase(os.path.realpath(filename))
1855 def _normalize_cached(filename,_cache={}):
1857 return _cache[filename]
1859 _cache[filename] = result = normalize_path(filename)
1862 def _set_parent_ns(packageName):
1863 parts = packageName.split('.')
1866 parent = '.'.join(parts)
1867 setattr(sys.modules[parent], name, sys.modules[packageName])
1870 def yield_lines(strs):
1871 """Yield non-empty/non-comment lines of a ``basestring`` or sequence"""
1872 if isinstance(strs,basestring):
1873 for s in strs.splitlines():
1875 if s and not s.startswith('#'): # skip blank lines/comments
1879 for s in yield_lines(ss):
1882 LINE_END = re.compile(r"\s*(#.*)?$").match # whitespace and comment
1883 CONTINUE = re.compile(r"\s*\\\s*(#.*)?$").match # line continuation
1884 DISTRO = re.compile(r"\s*((\w|[-.])+)").match # Distribution or extra
1885 VERSION = re.compile(r"\s*(<=?|>=?|==|!=)\s*((\w|[-.])+)").match # ver. info
1886 COMMA = re.compile(r"\s*,").match # comma between items
1887 OBRACKET = re.compile(r"\s*\[").match
1888 CBRACKET = re.compile(r"\s*\]").match
1889 MODULE = re.compile(r"\w+(\.\w+)*$").match
1890 EGG_NAME = re.compile(
1892 r"( -(?P<ver>[^-]+) (-py(?P<pyver>[^-]+) (-(?P<plat>.+))? )? )?",
1893 re.VERBOSE | re.IGNORECASE
1896 component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE)
1897 replace = {'pre':'c', 'preview':'c','-':'final-','rc':'c','dev':'@'}.get
1899 def _parse_version_parts(s):
1900 for part in component_re.split(s):
1901 part = replace(part,part)
1902 if not part or part=='.':
1904 if part[:1] in '0123456789':
1905 yield part.zfill(8) # pad for numeric comparison
1909 yield '*final' # ensure that alpha/beta/candidate are before final
1911 def parse_version(s):
1912 """Convert a version string to a chronologically-sortable key
1914 This is a rough cross between distutils' StrictVersion and LooseVersion;
1915 if you give it versions that would work with StrictVersion, then it behaves
1916 the same; otherwise it acts like a slightly-smarter LooseVersion. It is
1917 *possible* to create pathological version coding schemes that will fool
1918 this parser, but they should be very rare in practice.
1920 The returned value will be a tuple of strings. Numeric portions of the
1921 version are padded to 8 digits so they will compare numerically, but
1922 without relying on how numbers compare relative to strings. Dots are
1923 dropped, but dashes are retained. Trailing zeros between alpha segments
1924 or dashes are suppressed, so that e.g. "2.4.0" is considered the same as
1925 "2.4". Alphanumeric parts are lower-cased.
1927 The algorithm assumes that strings like "-" and any alpha string that
1928 alphabetically follows "final" represents a "patch level". So, "2.4-1"
1929 is assumed to be a branch or patch of "2.4", and therefore "2.4.1" is
1930 considered newer than "2.4-1", which in turn is newer than "2.4".
1932 Strings like "a", "b", "c", "alpha", "beta", "candidate" and so on (that
1933 come before "final" alphabetically) are assumed to be pre-release versions,
1934 so that the version "2.4" is considered newer than "2.4a1".
1936 Finally, to handle miscellaneous cases, the strings "pre", "preview", and
1937 "rc" are treated as if they were "c", i.e. as though they were release
1938 candidates, and therefore are not as new as a version string that does not
1939 contain them, and "dev" is replaced with an '@' so that it sorts lower than
1940 than any other pre-release tag.
1943 for part in _parse_version_parts(s.lower()):
1944 if part.startswith('*'):
1945 if part<'*final': # remove '-' before a prerelease tag
1946 while parts and parts[-1]=='*final-': parts.pop()
1947 # remove trailing zeros from each series of numeric parts
1948 while parts and parts[-1]=='00000000':
1953 class EntryPoint(object):
1954 """Object representing an advertised importable object"""
1956 def __init__(self, name, module_name, attrs=(), extras=(), dist=None):
1957 if not MODULE(module_name):
1958 raise ValueError("Invalid module name", module_name)
1960 self.module_name = module_name
1961 self.attrs = tuple(attrs)
1962 self.extras = Requirement.parse(("x[%s]" % ','.join(extras))).extras
1966 s = "%s = %s" % (self.name, self.module_name)
1968 s += ':' + '.'.join(self.attrs)
1970 s += ' [%s]' % ','.join(self.extras)
1974 return "EntryPoint.parse(%r)" % str(self)
1976 def load(self, require=True, env=None, installer=None):
1977 if require: self.require(env, installer)
1978 entry = __import__(self.module_name, globals(),globals(), ['__name__'])
1979 for attr in self.attrs:
1981 entry = getattr(entry,attr)
1982 except AttributeError:
1983 raise ImportError("%r has no %r attribute" % (entry,attr))
1986 def require(self, env=None, installer=None):
1987 if self.extras and not self.dist:
1988 raise UnknownExtra("Can't require() without a distribution", self)
1989 map(working_set.add,
1990 working_set.resolve(self.dist.requires(self.extras),env,installer))
1995 def parse(cls, src, dist=None):
1996 """Parse a single entry point from string `src`
1998 Entry point syntax follows the form::
2000 name = some.module:some.attr [extra1,extra2]
2002 The entry name and module name are required, but the ``:attrs`` and
2003 ``[extras]`` parts are optional
2007 name,value = src.split('=',1)
2009 value,extras = value.split('[',1)
2010 req = Requirement.parse("x["+extras)
2011 if req.specs: raise ValueError
2014 value,attrs = value.split(':',1)
2015 if not MODULE(attrs.rstrip()):
2017 attrs = attrs.rstrip().split('.')
2020 "EntryPoint must be in 'name=module:attrs [extras]' format",
2024 return cls(name.strip(), value.strip(), attrs, extras, dist)
2026 parse = classmethod(parse)
2036 def parse_group(cls, group, lines, dist=None):
2037 """Parse an entry point group"""
2038 if not MODULE(group):
2039 raise ValueError("Invalid group name", group)
2041 for line in yield_lines(lines):
2042 ep = cls.parse(line, dist)
2044 raise ValueError("Duplicate entry point", group, ep.name)
2048 parse_group = classmethod(parse_group)
2051 def parse_map(cls, data, dist=None):
2052 """Parse a map of entry point groups"""
2053 if isinstance(data,dict):
2056 data = split_sections(data)
2058 for group, lines in data:
2062 raise ValueError("Entry points must be listed in groups")
2063 group = group.strip()
2065 raise ValueError("Duplicate group name", group)
2066 maps[group] = cls.parse_group(group, lines, dist)
2069 parse_map = classmethod(parse_map)
2076 class Distribution(object):
2077 """Wrap an actual or potential sys.path entry w/metadata"""
2079 location=None, metadata=None, project_name=None, version=None,
2080 py_version=PY_MAJOR, platform=None, precedence = EGG_DIST
2082 self.project_name = safe_name(project_name or 'Unknown')
2083 if version is not None:
2084 self._version = safe_version(version)
2085 self.py_version = py_version
2086 self.platform = platform
2087 self.location = location
2088 self.precedence = precedence
2089 self._provider = metadata or empty_provider
2092 def from_location(cls,location,basename,metadata=None,**kw):
2093 project_name, version, py_version, platform = [None]*4
2094 basename, ext = os.path.splitext(basename)
2095 if ext.lower() in (".egg",".egg-info"):
2096 match = EGG_NAME(basename)
2098 project_name, version, py_version, platform = match.group(
2099 'name','ver','pyver','plat'
2102 location, metadata, project_name=project_name, version=version,
2103 py_version=py_version, platform=platform, **kw
2105 from_location = classmethod(from_location)
2109 getattr(self,'parsed_version',()), self.precedence, self.key,
2110 -len(self.location or ''), self.location, self.py_version,
2114 def __cmp__(self, other): return cmp(self.hashcmp, other)
2115 def __hash__(self): return hash(self.hashcmp)
2117 # These properties have to be lazy so that we don't have to load any
2118 # metadata until/unless it's actually needed. (i.e., some distributions
2119 # may not know their name or version without loading PKG-INFO)
2125 except AttributeError:
2126 self._key = key = self.project_name.lower()
2131 def parsed_version(self):
2133 return self._parsed_version
2134 except AttributeError:
2135 self._parsed_version = pv = parse_version(self.version)
2138 parsed_version = property(parsed_version)
2143 return self._version
2144 except AttributeError:
2145 for line in self._get_metadata('PKG-INFO'):
2146 if line.lower().startswith('version:'):
2147 self._version = safe_version(line.split(':',1)[1].strip())
2148 return self._version
2151 "Missing 'Version:' header and/or PKG-INFO file", self
2153 version = property(version)
2161 return self.__dep_map
2162 except AttributeError:
2163 dm = self.__dep_map = {None: []}
2164 for name in 'requires.txt', 'depends.txt':
2165 for extra,reqs in split_sections(self._get_metadata(name)):
2166 if extra: extra = safe_extra(extra)
2167 dm.setdefault(extra,[]).extend(parse_requirements(reqs))
2169 _dep_map = property(_dep_map)
2171 def requires(self,extras=()):
2172 """List of Requirements needed for this distro if `extras` are used"""
2175 deps.extend(dm.get(None,()))
2178 deps.extend(dm[safe_extra(ext)])
2181 "%s has no such extra feature %r" % (self, ext)
2185 def _get_metadata(self,name):
2186 if self.has_metadata(name):
2187 for line in self.get_metadata_lines(name):
2190 def activate(self,path=None):
2191 """Ensure distribution is importable on `path` (default=sys.path)"""
2192 if path is None: path = sys.path
2193 self.insert_on(path)
2194 if path is sys.path:
2195 fixup_namespace_packages(self.location)
2196 for pkg in self._get_metadata('namespace_packages.txt'):
2197 if pkg in sys.modules: declare_namespace(pkg)
2200 """Return what this distribution's standard .egg filename should be"""
2201 filename = "%s-%s-py%s" % (
2202 to_filename(self.project_name), to_filename(self.version),
2203 self.py_version or PY_MAJOR
2207 filename += '-'+self.platform
2212 return "%s (%s)" % (self,self.location)
2217 try: version = getattr(self,'version',None)
2218 except ValueError: version = None
2219 version = version or "[unknown version]"
2220 return "%s %s" % (self.project_name,version)
2222 def __getattr__(self,attr):
2223 """Delegate all unrecognized public attributes to .metadata provider"""
2224 if attr.startswith('_'):
2225 raise AttributeError,attr
2226 return getattr(self._provider, attr)
2229 def from_filename(cls,filename,metadata=None, **kw):
2230 return cls.from_location(
2231 _normalize_cached(filename), os.path.basename(filename), metadata,
2234 from_filename = classmethod(from_filename)
2236 def as_requirement(self):
2237 """Return a ``Requirement`` that matches this distribution exactly"""
2238 return Requirement.parse('%s==%s' % (self.project_name, self.version))
2240 def load_entry_point(self, group, name):
2241 """Return the `name` entry point of `group` or raise ImportError"""
2242 ep = self.get_entry_info(group,name)
2244 raise ImportError("Entry point %r not found" % ((group,name),))
2247 def get_entry_map(self, group=None):
2248 """Return the entry point map for `group`, or the full entry map"""
2250 ep_map = self._ep_map
2251 except AttributeError:
2252 ep_map = self._ep_map = EntryPoint.parse_map(
2253 self._get_metadata('entry_points.txt'), self
2255 if group is not None:
2256 return ep_map.get(group,{})
2259 def get_entry_info(self, group, name):
2260 """Return the EntryPoint object for `group`+`name`, or ``None``"""
2261 return self.get_entry_map(group).get(name)
2281 def insert_on(self, path, loc = None):
2282 """Insert self.location in path before its nearest parent directory"""
2284 loc = loc or self.location
2288 nloc = _normalize_cached(loc)
2289 bdir = os.path.dirname(nloc)
2290 npath= [(p and _normalize_cached(p) or p) for p in path]
2293 for p, item in enumerate(npath):
2296 elif item==bdir and self.precedence==EGG_DIST:
2297 # if it's an .egg, give it precedence over its directory
2298 if path is sys.path:
2299 self.check_version_conflict()
2301 npath.insert(p, nloc)
2304 if path is sys.path:
2305 self.check_version_conflict()
2309 # p is the spot where we found or inserted loc; now remove duplicates
2312 np = npath.index(nloc, p+1)
2316 del npath[np], path[np]
2322 def check_version_conflict(self):
2323 if self.key=='setuptools':
2324 return # ignore the inevitable setuptools self-conflicts :(
2326 nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt'))
2327 loc = normalize_path(self.location)
2328 for modname in self._get_metadata('top_level.txt'):
2329 if (modname not in sys.modules or modname in nsp
2330 or modname in _namespace_packages
2334 fn = getattr(sys.modules[modname], '__file__', None)
2335 if fn and (normalize_path(fn).startswith(loc) or fn.startswith(loc)):
2338 "Module %s was already imported from %s, but %s is being added"
2339 " to sys.path" % (modname, fn, self.location),
2342 def has_version(self):
2346 issue_warning("Unbuilt egg for "+repr(self))
2350 def clone(self,**kw):
2351 """Copy this distribution, substituting in any changed keyword args"""
2353 'project_name', 'version', 'py_version', 'platform', 'location',
2356 kw.setdefault(attr, getattr(self,attr,None))
2357 kw.setdefault('metadata', self._provider)
2358 return self.__class__(**kw)
2365 return [dep for dep in self._dep_map if dep]
2366 extras = property(extras)
2369 def issue_warning(*args,**kw):
2373 # find the first stack frame that is *not* code in
2374 # the pkg_resources module, to use for the warning
2375 while sys._getframe(level).f_globals is g:
2379 from warnings import warn
2380 warn(stacklevel = level+1, *args, **kw)
2404 def parse_requirements(strs):
2405 """Yield ``Requirement`` objects for each specification in `strs`
2407 `strs` must be an instance of ``basestring``, or a (possibly-nested)
2410 # create a steppable iterator, so we can handle \-continuations
2411 lines = iter(yield_lines(strs))
2413 def scan_list(ITEM,TERMINATOR,line,p,groups,item_name):
2417 while not TERMINATOR(line,p):
2418 if CONTINUE(line,p):
2420 line = lines.next(); p = 0
2421 except StopIteration:
2423 "\\ must not appear on the last nonblank line"
2426 match = ITEM(line,p)
2428 raise ValueError("Expected "+item_name+" in",line,"at",line[p:])
2430 items.append(match.group(*groups))
2433 match = COMMA(line,p)
2435 p = match.end() # skip the comma
2436 elif not TERMINATOR(line,p):
2438 "Expected ',' or end-of-list in",line,"at",line[p:]
2441 match = TERMINATOR(line,p)
2442 if match: p = match.end() # skip the terminator, if any
2443 return line, p, items
2446 match = DISTRO(line)
2448 raise ValueError("Missing distribution spec", line)
2449 project_name = match.group(1)
2453 match = OBRACKET(line,p)
2456 line, p, extras = scan_list(
2457 DISTRO, CBRACKET, line, p, (1,), "'extra' name"
2460 line, p, specs = scan_list(VERSION,LINE_END,line,p,(1,2),"version spec")
2461 specs = [(op,safe_version(val)) for op,val in specs]
2462 yield Requirement(project_name, specs, extras)
2465 def _sort_dists(dists):
2466 tmp = [(dist.hashcmp,dist) for dist in dists]
2468 dists[::-1] = [d for hc,d in tmp]
2487 def __init__(self, project_name, specs, extras):
2488 """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!"""
2489 self.unsafe_name, project_name = project_name, safe_name(project_name)
2490 self.project_name, self.key = project_name, project_name.lower()
2491 index = [(parse_version(v),state_machine[op],op,v) for op,v in specs]
2493 self.specs = [(op,ver) for parsed,trans,op,ver in index]
2494 self.index, self.extras = index, tuple(map(safe_extra,extras))
2496 self.key, tuple([(op,parsed) for parsed,trans,op,ver in index]),
2497 frozenset(self.extras)
2499 self.__hash = hash(self.hashCmp)
2502 specs = ','.join([''.join(s) for s in self.specs])
2503 extras = ','.join(self.extras)
2504 if extras: extras = '[%s]' % extras
2505 return '%s%s%s' % (self.project_name, extras, specs)
2507 def __eq__(self,other):
2508 return isinstance(other,Requirement) and self.hashCmp==other.hashCmp
2510 def __contains__(self,item):
2511 if isinstance(item,Distribution):
2512 if item.key != self.key: return False
2513 if self.index: item = item.parsed_version # only get if we need it
2514 elif isinstance(item,basestring):
2515 item = parse_version(item)
2517 for parsed,trans,op,ver in self.index:
2518 action = trans[cmp(item,parsed)]
2519 if action=='F': return False
2520 elif action=='T': return True
2521 elif action=='+': last = True
2522 elif action=='-' or last is None: last = False
2523 if last is None: last = True # no rules encountered
2530 def __repr__(self): return "Requirement.parse(%r)" % str(self)
2534 reqs = list(parse_requirements(s))
2538 raise ValueError("Expected only one requirement", s)
2539 raise ValueError("No requirements found", s)
2541 parse = staticmethod(parse)
2555 """Get an mro for a type or classic class"""
2556 if not isinstance(cls,type):
2557 class cls(cls,object): pass
2558 return cls.__mro__[1:]
2561 def _find_adapter(registry, ob):
2562 """Return an adapter factory for `ob` from `registry`"""
2563 for t in _get_mro(getattr(ob, '__class__', type(ob))):
2568 def ensure_directory(path):
2569 """Ensure that the parent directory of `path` exists"""
2570 dirname = os.path.dirname(path)
2571 if not os.path.isdir(dirname):
2572 os.makedirs(dirname)
2574 def split_sections(s):
2575 """Split a string or iterable thereof into (section,content) pairs
2577 Each ``section`` is a stripped version of the section header ("[section]")
2578 and each ``content`` is a list of stripped lines excluding blank lines and
2579 comment-only lines. If there are any such lines before the first section
2580 header, they're returned in a first ``section`` of ``None``.
2584 for line in yield_lines(s):
2585 if line.startswith("["):
2586 if line.endswith("]"):
2587 if section or content:
2588 yield section, content
2589 section = line[1:-1].strip()
2592 raise ValueError("Invalid section heading", line)
2594 content.append(line)
2596 # wrap up last segment
2597 yield section, content
2599 def _mkstemp(*args,**kw):
2600 from tempfile import mkstemp
2603 os.open = os_open # temporarily bypass sandboxing
2604 return mkstemp(*args,**kw)
2606 os.open = old_open # and then put it back
2609 # Set up global resource manager (deliberately not state-saved)
2610 _manager = ResourceManager()
2612 for name in dir(_manager):
2613 if not name.startswith('_'):
2614 g[name] = getattr(_manager, name)
2615 _initialize(globals())
2617 # Prepare the master working set and make the ``require()`` API available
2619 _declare_state('object', working_set = WorkingSet())
2621 # Does the main program list any requirements?
2622 from __main__ import __requires__
2624 pass # No: just use the default working set based on sys.path
2626 # Yes: ensure the requirements are met, by prefixing sys.path if necessary
2628 working_set.require(__requires__)
2629 except (VersionConflict, DistributionNotFound): # try it without defaults already on sys.path
2630 working_set = WorkingSet([]) # by starting with an empty path
2632 for dist in working_set.resolve(
2633 parse_requirements(__requires__), Environment()
2635 working_set.add(dist)
2636 except DistributionNotFound:
2638 for entry in sys.path: # add any missing entries from sys.path
2639 if entry not in working_set.entries:
2640 working_set.add_entry(entry)
2641 sys.path[:] = working_set.entries # then copy back to sys.path
2643 require = working_set.require
2644 iter_entry_points = working_set.iter_entry_points
2645 add_activation_listener = working_set.subscribe
2646 run_script = working_set.run_script
2647 run_main = run_script # backward compatibility
2648 # Activate all distributions already on sys.path, and ensure that
2649 # all distributions added to the working set in the future (e.g. by
2650 # calling ``require()``) will get activated as well.
2651 add_activation_listener(lambda dist: dist.activate())
2652 working_set.entries=[]; map(working_set.add_entry,sys.path) # match order