1 """Package resource API
4 A resource is a logical file contained within a package, or a logical
5 subdirectory thereof. The package resource API expects resource names
6 to have their path parts separated with ``/``, *not* whatever the local
7 path separator is. Do not use os.path operations to manipulate resource
8 names being passed into the API.
10 The package resource API is designed to work with normal filesystem packages,
11 .egg files, and unpacked .egg files. It can also work in a limited way with
12 .zip files and with custom PEP 302 loaders that support the ``get_data()``
16 import sys, os, zipimport, time, re, imp, new, pkgutil # XXX
21 from sets import ImmutableSet as frozenset
23 # capture these to bypass sandboxing
24 from os import utime, rename, unlink, mkdir
25 from os import open as os_open
26 from os.path import isdir, split
28 def _bypass_ensure_directory(name, mode=0777):
29 # Sandbox-bypassing version of ensure_directory()
30 dirname, filename = split(name)
31 if dirname and filename and not isdir(dirname):
32 _bypass_ensure_directory(dirname)
44 def _declare_state(vartype, **kw):
46 for name, val in kw.iteritems():
48 _state_vars[name] = vartype
53 for k, v in _state_vars.iteritems():
54 state[k] = g['_sget_'+v](g[k])
57 def __setstate__(state):
59 for k, v in state.iteritems():
60 g['_sset_'+_state_vars[k]](k, g[k], v)
66 def _sset_dict(key, ob, state):
70 def _sget_object(val):
71 return val.__getstate__()
73 def _sset_object(key, ob, state):
74 ob.__setstate__(state)
76 _sget_none = _sset_none = lambda *args: None
83 def get_supported_platform():
84 """Return this platform's maximum compatible version.
86 distutils.util.get_platform() normally reports the minimum version
87 of Mac OS X that would be required to *use* extensions produced by
88 distutils. But what we want when checking compatibility is to know the
89 version of Mac OS X that we are *running*. To allow usage of packages that
90 explicitly require a newer version of Mac OS X, we must also know the
91 current version of the OS.
93 If this condition occurs for any other platform with a version in its
94 platform strings, this function should be extended accordingly.
96 plat = get_build_platform(); m = macosVersionString.match(plat)
97 if m is not None and sys.platform == "darwin":
99 plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3))
125 # Basic resource access and distribution/entry point discovery
126 'require', 'run_script', 'get_provider', 'get_distribution',
127 'load_entry_point', 'get_entry_map', 'get_entry_info', 'iter_entry_points',
128 'resource_string', 'resource_stream', 'resource_filename',
129 'resource_listdir', 'resource_exists', 'resource_isdir',
131 # Environmental control
132 'declare_namespace', 'working_set', 'add_activation_listener',
133 'find_distributions', 'set_extraction_path', 'cleanup_resources',
136 # Primary implementation classes
137 'Environment', 'WorkingSet', 'ResourceManager',
138 'Distribution', 'Requirement', 'EntryPoint',
141 'ResolutionError','VersionConflict','DistributionNotFound','UnknownExtra',
144 # Parsing functions and string utilities
145 'parse_requirements', 'parse_version', 'safe_name', 'safe_version',
146 'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections',
147 'safe_extra', 'to_filename',
149 # filesystem utilities
150 'ensure_directory', 'normalize_path',
152 # Distribution "precedence" constants
153 'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST',
155 # "Provider" interfaces, implementations, and registration/lookup APIs
156 'IMetadataProvider', 'IResourceProvider', 'FileMetadata',
157 'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider',
158 'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider',
159 'register_finder', 'register_namespace_handler', 'register_loader_type',
160 'fixup_namespace_packages', 'get_importer',
162 # Deprecated/backward compatibility only
163 'run_main', 'AvailableDistributions',
165 class ResolutionError(Exception):
166 """Abstract base for dependency resolution errors"""
168 return self.__class__.__name__+repr(self.args)
170 class VersionConflict(ResolutionError):
171 """An already-installed version conflicts with the requested version"""
173 class DistributionNotFound(ResolutionError):
174 """A requested distribution was not found"""
176 class UnknownExtra(ResolutionError):
177 """Distribution doesn't have an "extra feature" of the given name"""
179 _provider_factories = {}
180 PY_MAJOR = sys.version[:3]
187 def register_loader_type(loader_type, provider_factory):
188 """Register `provider_factory` to make providers for `loader_type`
190 `loader_type` is the type or class of a PEP 302 ``module.__loader__``,
191 and `provider_factory` is a function that, passed a *module* object,
192 returns an ``IResourceProvider`` for that module.
194 _provider_factories[loader_type] = provider_factory
196 def get_provider(moduleOrReq):
197 """Return an IResourceProvider for the named module or requirement"""
198 if isinstance(moduleOrReq,Requirement):
199 return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0]
201 module = sys.modules[moduleOrReq]
203 __import__(moduleOrReq)
204 module = sys.modules[moduleOrReq]
205 loader = getattr(module, '__loader__', None)
206 return _find_adapter(_provider_factories, loader)(module)
208 def _macosx_vers(_cache=[]):
210 from platform import mac_ver
211 _cache.append(mac_ver()[0].split('.'))
214 def _macosx_arch(machine):
215 return {'PowerPC':'ppc', 'Power_Macintosh':'ppc'}.get(machine,machine)
217 def get_build_platform():
218 """Return this platform's string for platform-specific distributions
220 XXX Currently this is the same as ``distutils.util.get_platform()``, but it
221 needs some hacks for Linux and Mac OS X.
223 from distutils.util import get_platform
224 plat = get_platform()
225 if sys.platform == "darwin" and not plat.startswith('macosx-'):
227 version = _macosx_vers()
228 machine = os.uname()[4].replace(" ", "_")
229 return "macosx-%d.%d-%s" % (int(version[0]), int(version[1]),
230 _macosx_arch(machine))
232 # if someone is running a non-Mac darwin system, this will fall
233 # through to the default implementation
237 macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)")
238 darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)")
239 get_platform = get_build_platform # XXX backward compat
247 def compatible_platforms(provided,required):
248 """Can code for the `provided` platform run on the `required` platform?
250 Returns true if either platform is ``None``, or the platforms are equal.
252 XXX Needs compatibility checks for Linux and other unixy OSes.
254 if provided is None or required is None or provided==required:
255 return True # easy case
257 # Mac OS X special cases
258 reqMac = macosVersionString.match(required)
260 provMac = macosVersionString.match(provided)
262 # is this a Mac package?
264 # this is backwards compatibility for packages built before
265 # setuptools 0.6. All packages built after this point will
266 # use the new macosx designation.
267 provDarwin = darwinVersionString.match(provided)
269 dversion = int(provDarwin.group(1))
270 macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2))
271 if dversion == 7 and macosversion >= "10.3" or \
272 dversion == 8 and macosversion >= "10.4":
275 #warnings.warn("Mac eggs should be rebuilt to "
276 # "use the macosx designation instead of darwin.",
277 # category=DeprecationWarning)
279 return False # egg isn't macosx or legacy darwin
281 # are they the same major version and machine type?
282 if provMac.group(1) != reqMac.group(1) or \
283 provMac.group(3) != reqMac.group(3):
288 # is the required OS major update >= the provided one?
289 if int(provMac.group(2)) > int(reqMac.group(2)):
294 # XXX Linux and other platforms' special cases should go here
298 def run_script(dist_spec, script_name):
299 """Locate distribution `dist_spec` and run its `script_name` script"""
300 ns = sys._getframe(1).f_globals
301 name = ns['__name__']
303 ns['__name__'] = name
304 require(dist_spec)[0].run_script(script_name, ns)
306 run_main = run_script # backward compatibility
308 def get_distribution(dist):
309 """Return a current distribution object for a Requirement or string"""
310 if isinstance(dist,basestring): dist = Requirement.parse(dist)
311 if isinstance(dist,Requirement): dist = get_provider(dist)
312 if not isinstance(dist,Distribution):
313 raise TypeError("Expected string, Requirement, or Distribution", dist)
316 def load_entry_point(dist, group, name):
317 """Return `name` entry point of `group` for `dist` or raise ImportError"""
318 return get_distribution(dist).load_entry_point(group, name)
320 def get_entry_map(dist, group=None):
321 """Return the entry point map for `group`, or the full entry map"""
322 return get_distribution(dist).get_entry_map(group)
324 def get_entry_info(dist, group, name):
325 """Return the EntryPoint object for `group`+`name`, or ``None``"""
326 return get_distribution(dist).get_entry_info(group, name)
329 class IMetadataProvider:
331 def has_metadata(name):
332 """Does the package's distribution contain the named metadata?"""
334 def get_metadata(name):
335 """The named metadata resource as a string"""
337 def get_metadata_lines(name):
338 """Yield named metadata resource as list of non-blank non-comment lines
340 Leading and trailing whitespace is stripped from each line, and lines
341 with ``#`` as the first non-blank character are omitted."""
343 def metadata_isdir(name):
344 """Is the named metadata a directory? (like ``os.path.isdir()``)"""
346 def metadata_listdir(name):
347 """List of metadata names in the directory (like ``os.listdir()``)"""
349 def run_script(script_name, namespace):
350 """Execute the named script in the supplied namespace dictionary"""
370 class IResourceProvider(IMetadataProvider):
371 """An object that provides access to package resources"""
373 def get_resource_filename(manager, resource_name):
374 """Return a true filesystem path for `resource_name`
376 `manager` must be an ``IResourceManager``"""
378 def get_resource_stream(manager, resource_name):
379 """Return a readable file-like object for `resource_name`
381 `manager` must be an ``IResourceManager``"""
383 def get_resource_string(manager, resource_name):
384 """Return a string containing the contents of `resource_name`
386 `manager` must be an ``IResourceManager``"""
388 def has_resource(resource_name):
389 """Does the package contain the named resource?"""
391 def resource_isdir(resource_name):
392 """Is the named resource a directory? (like ``os.path.isdir()``)"""
394 def resource_listdir(resource_name):
395 """List of resource names in the directory (like ``os.listdir()``)"""
411 class WorkingSet(object):
412 """A collection of active distributions on sys.path (or a similar list)"""
414 def __init__(self, entries=None):
415 """Create working set from list of path entries (default=sys.path)"""
424 for entry in entries:
425 self.add_entry(entry)
428 def add_entry(self, entry):
429 """Add a path item to ``.entries``, finding any distributions on it
431 ``find_distributions(entry, True)`` is used to find distributions
432 corresponding to the path entry, and they are added. `entry` is
433 always appended to ``.entries``, even if it is already present.
434 (This is because ``sys.path`` can contain the same value more than
435 once, and the ``.entries`` of the ``sys.path`` WorkingSet should always
438 self.entry_keys.setdefault(entry, [])
439 self.entries.append(entry)
440 for dist in find_distributions(entry, True):
441 self.add(dist, entry, False)
444 def __contains__(self,dist):
445 """True if `dist` is the active distribution for its project"""
446 return self.by_key.get(dist.key) == dist
453 """Find a distribution matching requirement `req`
455 If there is an active distribution for the requested project, this
456 returns it as long as it meets the version requirement specified by
457 `req`. But, if there is an active distribution for the project and it
458 does *not* meet the `req` requirement, ``VersionConflict`` is raised.
459 If there is no active distribution for the requested project, ``None``
462 dist = self.by_key.get(req.key)
463 if dist is not None and dist not in req:
464 raise VersionConflict(dist,req) # XXX add more info
468 def iter_entry_points(self, group, name=None):
469 """Yield entry point objects from `group` matching `name`
471 If `name` is None, yields all entry points in `group` from all
472 distributions in the working set, otherwise only ones matching
473 both `group` and `name` are yielded (in distribution order).
476 entries = dist.get_entry_map(group)
478 for ep in entries.values():
480 elif name in entries:
483 def run_script(self, requires, script_name):
484 """Locate distribution for `requires` and run `script_name` script"""
485 ns = sys._getframe(1).f_globals
486 name = ns['__name__']
488 ns['__name__'] = name
489 self.require(requires)[0].run_script(script_name, ns)
494 """Yield distributions for non-duplicate projects in the working set
496 The yield order is the order in which the items' path entries were
497 added to the working set.
500 for item in self.entries:
501 for key in self.entry_keys[item]:
504 yield self.by_key[key]
506 def add(self, dist, entry=None, insert=True):
507 """Add `dist` to working set, associated with `entry`
509 If `entry` is unspecified, it defaults to the ``.location`` of `dist`.
510 On exit from this routine, `entry` is added to the end of the working
511 set's ``.entries`` (if it wasn't already present).
513 `dist` is only added to the working set if it's for a project that
514 doesn't already have a distribution in the set. If it's added, any
515 callbacks registered with the ``subscribe()`` method will be called.
518 dist.insert_on(self.entries, entry)
521 entry = dist.location
522 keys = self.entry_keys.setdefault(entry,[])
523 keys2 = self.entry_keys.setdefault(dist.location,[])
524 if dist.key in self.by_key:
525 return # ignore hidden distros
527 self.by_key[dist.key] = dist
528 if dist.key not in keys:
529 keys.append(dist.key)
530 if dist.key not in keys2:
531 keys2.append(dist.key)
532 self._added_new(dist)
534 def resolve(self, requirements, env=None, installer=None):
535 """List all distributions needed to (recursively) meet `requirements`
537 `requirements` must be a sequence of ``Requirement`` objects. `env`,
538 if supplied, should be an ``Environment`` instance. If
539 not supplied, it defaults to all distributions available within any
540 entry or distribution in the working set. `installer`, if supplied,
541 will be invoked with each requirement that cannot be met by an
542 already-installed distribution; it should return a ``Distribution`` or
546 requirements = list(requirements)[::-1] # set up the stack
547 processed = {} # set of processed requirements
548 best = {} # key -> dist
552 req = requirements.pop(0) # process dependencies breadth-first
554 # Ignore cyclic or redundant dependencies
556 dist = best.get(req.key)
558 # Find the best distribution and add it to the map
559 dist = self.by_key.get(req.key)
562 env = Environment(self.entries)
563 dist = best[req.key] = env.best_match(req, self, installer)
565 raise DistributionNotFound(req) # XXX put more info here
566 to_activate.append(dist)
568 # Oops, the "best" so far conflicts with a dependency
569 raise VersionConflict(dist,req) # XXX put more info here
570 requirements.extend(dist.requires(req.extras)[::-1])
571 processed[req] = True
573 return to_activate # return list of distros to activate
575 def find_plugins(self,
576 plugin_env, full_env=None, installer=None, fallback=True
578 """Find all activatable distributions in `plugin_env`
582 distributions, errors = working_set.find_plugins(
583 Environment(plugin_dirlist)
585 map(working_set.add, distributions) # add plugins+libs to sys.path
586 print "Couldn't load", errors # display errors
588 The `plugin_env` should be an ``Environment`` instance that contains
589 only distributions that are in the project's "plugin directory" or
590 directories. The `full_env`, if supplied, should be an ``Environment``
591 contains all currently-available distributions. If `full_env` is not
592 supplied, one is created automatically from the ``WorkingSet`` this
593 method is called on, which will typically mean that every directory on
594 ``sys.path`` will be scanned for distributions.
596 `installer` is a standard installer callback as used by the
597 ``resolve()`` method. The `fallback` flag indicates whether we should
598 attempt to resolve older versions of a plugin if the newest version
601 This method returns a 2-tuple: (`distributions`, `error_info`), where
602 `distributions` is a list of the distributions found in `plugin_env`
603 that were loadable, along with any other distributions that are needed
604 to resolve their dependencies. `error_info` is a dictionary mapping
605 unloadable plugin distributions to an exception instance describing the
606 error that occurred. Usually this will be a ``DistributionNotFound`` or
607 ``VersionConflict`` instance.
610 plugin_projects = list(plugin_env)
611 plugin_projects.sort() # scan project names in alphabetic order
617 env = Environment(self.entries)
620 env = full_env + plugin_env
622 shadow_set = self.__class__([])
623 map(shadow_set.add, self) # put all our entries in shadow_set
625 for project_name in plugin_projects:
627 for dist in plugin_env[project_name]:
629 req = [dist.as_requirement()]
632 resolvees = shadow_set.resolve(req, env, installer)
634 except ResolutionError,v:
635 error_info[dist] = v # save error info
637 continue # try the next older version of project
639 break # give up on this project, keep going
642 map(shadow_set.add, resolvees)
643 distributions.update(dict.fromkeys(resolvees))
645 # success, no need to try any more versions of this project
648 distributions = list(distributions)
651 return distributions, error_info
657 def require(self, *requirements):
658 """Ensure that distributions matching `requirements` are activated
660 `requirements` must be a string or a (possibly-nested) sequence
661 thereof, specifying the distributions and versions required. The
662 return value is a sequence of the distributions that needed to be
663 activated to fulfill the requirements; all relevant distributions are
664 included, even if they were already activated in this working set.
666 needed = self.resolve(parse_requirements(requirements))
673 def subscribe(self, callback):
674 """Invoke `callback` for all distributions (including existing ones)"""
675 if callback in self.callbacks:
677 self.callbacks.append(callback)
681 def _added_new(self, dist):
682 for callback in self.callbacks:
685 def __getstate__(self):
687 self.entries[:], self.entry_keys.copy(), self.by_key.copy(),
691 def __setstate__(self, (entries, keys, by_key, callbacks)):
692 self.entries = entries[:]
693 self.entry_keys = keys.copy()
694 self.by_key = by_key.copy()
695 self.callbacks = callbacks[:]
698 class Environment(object):
699 """Searchable snapshot of distributions on a search path"""
701 def __init__(self, search_path=None, platform=get_supported_platform(), python=PY_MAJOR):
702 """Snapshot distributions available on a search path
704 Any distributions found on `search_path` are added to the environment.
705 `search_path` should be a sequence of ``sys.path`` items. If not
706 supplied, ``sys.path`` is used.
708 `platform` is an optional string specifying the name of the platform
709 that platform-specific distributions must be compatible with. If
710 unspecified, it defaults to the current platform. `python` is an
711 optional string naming the desired version of Python (e.g. ``'2.4'``);
712 it defaults to the current version.
714 You may explicitly set `platform` (and/or `python`) to ``None`` if you
715 wish to map *all* distributions, not just those compatible with the
716 running platform or Python version.
720 self.platform = platform
722 self.scan(search_path)
724 def can_add(self, dist):
725 """Is distribution `dist` acceptable for this environment?
727 The distribution must match the platform and python version
728 requirements specified when this environment was created, or False
731 return (self.python is None or dist.py_version is None
732 or dist.py_version==self.python) \
733 and compatible_platforms(dist.platform,self.platform)
735 def remove(self, dist):
736 """Remove `dist` from the environment"""
737 self._distmap[dist.key].remove(dist)
739 def scan(self, search_path=None):
740 """Scan `search_path` for distributions usable in this environment
742 Any distributions found are added to the environment.
743 `search_path` should be a sequence of ``sys.path`` items. If not
744 supplied, ``sys.path`` is used. Only distributions conforming to
745 the platform/python version defined at initialization are added.
747 if search_path is None:
748 search_path = sys.path
750 for item in search_path:
751 for dist in find_distributions(item):
754 def __getitem__(self,project_name):
755 """Return a newest-to-oldest list of distributions for `project_name`
758 return self._cache[project_name]
760 project_name = project_name.lower()
761 if project_name not in self._distmap:
764 if project_name not in self._cache:
765 dists = self._cache[project_name] = self._distmap[project_name]
768 return self._cache[project_name]
771 """Add `dist` if we ``can_add()`` it and it isn't already added"""
772 if self.can_add(dist) and dist.has_version():
773 dists = self._distmap.setdefault(dist.key,[])
774 if dist not in dists:
776 if dist.key in self._cache:
777 _sort_dists(self._cache[dist.key])
780 def best_match(self, req, working_set, installer=None):
781 """Find distribution best matching `req` and usable on `working_set`
783 This calls the ``find(req)`` method of the `working_set` to see if a
784 suitable distribution is already active. (This may raise
785 ``VersionConflict`` if an unsuitable version of the project is already
786 active in the specified `working_set`.) If a suitable distribution
787 isn't active, this method returns the newest distribution in the
788 environment that meets the ``Requirement`` in `req`. If no suitable
789 distribution is found, and `installer` is supplied, then the result of
790 calling the environment's ``obtain(req, installer)`` method will be
793 dist = working_set.find(req)
796 for dist in self[req.key]:
799 return self.obtain(req, installer) # try and download/install
801 def obtain(self, requirement, installer=None):
802 """Obtain a distribution matching `requirement` (e.g. via download)
804 Obtain a distro that matches requirement (e.g. via download). In the
805 base ``Environment`` class, this routine just returns
806 ``installer(requirement)``, unless `installer` is None, in which case
807 None is returned instead. This method is a hook that allows subclasses
808 to attempt other ways of obtaining a distribution before falling back
809 to the `installer` argument."""
810 if installer is not None:
811 return installer(requirement)
814 """Yield the unique project names of the available distributions"""
815 for key in self._distmap.keys():
816 if self[key]: yield key
821 def __iadd__(self, other):
822 """In-place addition of a distribution or environment"""
823 if isinstance(other,Distribution):
825 elif isinstance(other,Environment):
826 for project in other:
827 for dist in other[project]:
830 raise TypeError("Can't add %r to environment" % (other,))
833 def __add__(self, other):
834 """Add an environment or distribution to an environment"""
835 new = self.__class__([], platform=None, python=None)
836 for env in self, other:
841 AvailableDistributions = Environment # XXX backward compatibility
844 class ExtractionError(RuntimeError):
845 """An error occurred extracting a resource
847 The following attributes are available from instances of this exception:
850 The resource manager that raised this exception
853 The base directory for resource extraction
856 The exception instance that caused extraction to fail
862 class ResourceManager:
863 """Manage resource extraction and packages"""
864 extraction_path = None
867 self.cached_files = {}
869 def resource_exists(self, package_or_requirement, resource_name):
870 """Does the named resource exist?"""
871 return get_provider(package_or_requirement).has_resource(resource_name)
873 def resource_isdir(self, package_or_requirement, resource_name):
874 """Is the named resource an existing directory?"""
875 return get_provider(package_or_requirement).resource_isdir(
879 def resource_filename(self, package_or_requirement, resource_name):
880 """Return a true filesystem path for specified resource"""
881 return get_provider(package_or_requirement).get_resource_filename(
885 def resource_stream(self, package_or_requirement, resource_name):
886 """Return a readable file-like object for specified resource"""
887 return get_provider(package_or_requirement).get_resource_stream(
891 def resource_string(self, package_or_requirement, resource_name):
892 """Return specified resource as a string"""
893 return get_provider(package_or_requirement).get_resource_string(
897 def resource_listdir(self, package_or_requirement, resource_name):
898 """List the contents of the named resource directory"""
899 return get_provider(package_or_requirement).resource_listdir(
903 def extraction_error(self):
904 """Give an error message for problems extracting file(s)"""
906 old_exc = sys.exc_info()[1]
907 cache_path = self.extraction_path or get_default_cache()
909 err = ExtractionError("""Can't extract file(s) to egg cache
911 The following error occurred while trying to extract file(s) to the Python egg
916 The Python egg cache directory is currently set to:
920 Perhaps your account does not have write access to this directory? You can
921 change the cache directory by setting the PYTHON_EGG_CACHE environment
922 variable to point to an accessible directory.
923 """ % (old_exc, cache_path)
926 err.cache_path = cache_path
927 err.original_error = old_exc
944 def get_cache_path(self, archive_name, names=()):
945 """Return absolute location in cache for `archive_name` and `names`
947 The parent directory of the resulting path will be created if it does
948 not already exist. `archive_name` should be the base filename of the
949 enclosing egg (which may not be the name of the enclosing zipfile!),
950 including its ".egg" extension. `names`, if provided, should be a
951 sequence of path name parts "under" the egg's extraction location.
953 This method should only be called by resource providers that need to
954 obtain an extraction location, and only for names they intend to
955 extract, as it tracks the generated names for possible cleanup later.
957 extract_path = self.extraction_path or get_default_cache()
958 target_path = os.path.join(extract_path, archive_name+'-tmp', *names)
960 _bypass_ensure_directory(target_path)
962 self.extraction_error()
964 self.cached_files[target_path] = 1
985 def postprocess(self, tempname, filename):
986 """Perform any platform-specific postprocessing of `tempname`
988 This is where Mac header rewrites should be done; other platforms don't
989 have anything special they should do.
991 Resource providers should call this method ONLY after successfully
992 extracting a compressed resource. They must NOT call it on resources
993 that are already in the filesystem.
995 `tempname` is the current (temporary) name of the file, and `filename`
996 is the name it will be renamed to by the caller after this routine
1000 if os.name == 'posix':
1001 # Make the resource executable
1002 mode = ((os.stat(tempname).st_mode) | 0555) & 07777
1003 os.chmod(tempname, mode)
1026 def set_extraction_path(self, path):
1027 """Set the base path where resources will be extracted to, if needed.
1029 If you do not call this routine before any extractions take place, the
1030 path defaults to the return value of ``get_default_cache()``. (Which
1031 is based on the ``PYTHON_EGG_CACHE`` environment variable, with various
1032 platform-specific fallbacks. See that routine's documentation for more
1035 Resources are extracted to subdirectories of this path based upon
1036 information given by the ``IResourceProvider``. You may set this to a
1037 temporary directory, but then you must call ``cleanup_resources()`` to
1038 delete the extracted files when done. There is no guarantee that
1039 ``cleanup_resources()`` will be able to remove all extracted files.
1041 (Note: you may not change the extraction path for a given resource
1042 manager once resources have been extracted, unless you first call
1043 ``cleanup_resources()``.)
1045 if self.cached_files:
1047 "Can't change extraction path, files already extracted"
1050 self.extraction_path = path
1052 def cleanup_resources(self, force=False):
1054 Delete all extracted resource files and directories, returning a list
1055 of the file and directory names that could not be successfully removed.
1056 This function does not have any concurrency protection, so it should
1057 generally only be called when the extraction path is a temporary
1058 directory exclusive to a single process. This method is not
1059 automatically called; you must call it explicitly or register it as an
1060 ``atexit`` function if you wish to ensure cleanup of a temporary
1061 directory used for extractions.
1067 def get_default_cache():
1068 """Determine the default cache location
1070 This returns the ``PYTHON_EGG_CACHE`` environment variable, if set.
1071 Otherwise, on Windows, it returns a "Python-Eggs" subdirectory of the
1072 "Application Data" directory. On all other systems, it's "~/.python-eggs".
1075 return os.environ['PYTHON_EGG_CACHE']
1080 return os.path.expanduser('~/.python-eggs')
1082 app_data = 'Application Data' # XXX this may be locale-specific!
1084 (('APPDATA',), None), # best option, should be locale-safe
1085 (('USERPROFILE',), app_data),
1086 (('HOMEDRIVE','HOMEPATH'), app_data),
1087 (('HOMEPATH',), app_data),
1089 (('WINDIR',), app_data), # 95/98/ME
1092 for keys, subdir in app_homes:
1095 if key in os.environ:
1096 dirname = os.path.join(dirname, os.environ[key])
1101 dirname = os.path.join(dirname,subdir)
1102 return os.path.join(dirname, 'Python-Eggs')
1105 "Please set the PYTHON_EGG_CACHE enviroment variable"
1108 def safe_name(name):
1109 """Convert an arbitrary string to a standard distribution name
1111 Any runs of non-alphanumeric/. characters are replaced with a single '-'.
1113 return re.sub('[^A-Za-z0-9.]+', '-', name)
1116 def safe_version(version):
1117 """Convert an arbitrary string to a standard version string
1119 Spaces become dots, and all other non-alphanumeric characters become
1120 dashes, with runs of multiple dashes condensed to a single dash.
1122 version = version.replace(' ','.')
1123 return re.sub('[^A-Za-z0-9.]+', '-', version)
1126 def safe_extra(extra):
1127 """Convert an arbitrary string to a standard 'extra' name
1129 Any runs of non-alphanumeric characters are replaced with a single '_',
1130 and the result is always lowercased.
1132 return re.sub('[^A-Za-z0-9.]+', '_', extra).lower()
1135 def to_filename(name):
1136 """Convert a project or version name to its filename-escaped form
1138 Any '-' characters are currently replaced with '_'.
1140 return name.replace('-','_')
1150 """Try to implement resources and metadata for arbitrary PEP 302 loaders"""
1156 def __init__(self, module):
1157 self.loader = getattr(module, '__loader__', None)
1158 self.module_path = os.path.dirname(getattr(module, '__file__', ''))
1160 def get_resource_filename(self, manager, resource_name):
1161 return self._fn(self.module_path, resource_name)
1163 def get_resource_stream(self, manager, resource_name):
1164 return StringIO(self.get_resource_string(manager, resource_name))
1166 def get_resource_string(self, manager, resource_name):
1167 return self._get(self._fn(self.module_path, resource_name))
1169 def has_resource(self, resource_name):
1170 return self._has(self._fn(self.module_path, resource_name))
1172 def has_metadata(self, name):
1173 return self.egg_info and self._has(self._fn(self.egg_info,name))
1175 def get_metadata(self, name):
1176 if not self.egg_info:
1178 return self._get(self._fn(self.egg_info,name))
1180 def get_metadata_lines(self, name):
1181 return yield_lines(self.get_metadata(name))
1183 def resource_isdir(self,resource_name):
1184 return self._isdir(self._fn(self.module_path, resource_name))
1186 def metadata_isdir(self,name):
1187 return self.egg_info and self._isdir(self._fn(self.egg_info,name))
1190 def resource_listdir(self,resource_name):
1191 return self._listdir(self._fn(self.module_path,resource_name))
1193 def metadata_listdir(self,name):
1195 return self._listdir(self._fn(self.egg_info,name))
1198 def run_script(self,script_name,namespace):
1199 script = 'scripts/'+script_name
1200 if not self.has_metadata(script):
1201 raise ResolutionError("No script named %r" % script_name)
1202 script_text = self.get_metadata(script).replace('\r\n','\n')
1203 script_text = script_text.replace('\r','\n')
1204 script_filename = self._fn(self.egg_info,script)
1205 namespace['__file__'] = script_filename
1206 if os.path.exists(script_filename):
1207 execfile(script_filename, namespace, namespace)
1209 from linecache import cache
1210 cache[script_filename] = (
1211 len(script_text), 0, script_text.split('\n'), script_filename
1213 script_code = compile(script_text,script_filename,'exec')
1214 exec script_code in namespace, namespace
1216 def _has(self, path):
1217 raise NotImplementedError(
1218 "Can't perform this operation for unregistered loader type"
1221 def _isdir(self, path):
1222 raise NotImplementedError(
1223 "Can't perform this operation for unregistered loader type"
1226 def _listdir(self, path):
1227 raise NotImplementedError(
1228 "Can't perform this operation for unregistered loader type"
1231 def _fn(self, base, resource_name):
1233 return os.path.join(base, *resource_name.split('/'))
1236 def _get(self, path):
1237 if hasattr(self.loader, 'get_data'):
1238 return self.loader.get_data(path)
1239 raise NotImplementedError(
1240 "Can't perform this operation for loaders without 'get_data()'"
1243 register_loader_type(object, NullProvider)
1246 class EggProvider(NullProvider):
1247 """Provider based on a virtual filesystem"""
1249 def __init__(self,module):
1250 NullProvider.__init__(self,module)
1251 self._setup_prefix()
1253 def _setup_prefix(self):
1254 # we assume here that our metadata may be nested inside a "basket"
1255 # of multiple eggs; that's why we use module_path instead of .archive
1256 path = self.module_path
1259 if path.lower().endswith('.egg'):
1260 self.egg_name = os.path.basename(path)
1261 self.egg_info = os.path.join(path, 'EGG-INFO')
1262 self.egg_root = path
1265 path, base = os.path.split(path)
1272 class DefaultProvider(EggProvider):
1273 """Provides access to package resources in the filesystem"""
1275 def _has(self, path):
1276 return os.path.exists(path)
1278 def _isdir(self,path):
1279 return os.path.isdir(path)
1281 def _listdir(self,path):
1282 return os.listdir(path)
1284 def get_resource_stream(self, manager, resource_name):
1285 return open(self._fn(self.module_path, resource_name), 'rb')
1287 def _get(self, path):
1288 stream = open(path, 'rb')
1290 return stream.read()
1294 register_loader_type(type(None), DefaultProvider)
1297 class EmptyProvider(NullProvider):
1298 """Provider that returns nothing for all requests"""
1300 _isdir = _has = lambda self,path: False
1301 _get = lambda self,path: ''
1302 _listdir = lambda self,path: []
1308 empty_provider = EmptyProvider()
1313 class ZipProvider(EggProvider):
1314 """Resource support for zips and eggs"""
1318 def __init__(self, module):
1319 EggProvider.__init__(self,module)
1320 self.zipinfo = zipimport._zip_directory_cache[self.loader.archive]
1321 self.zip_pre = self.loader.archive+os.sep
1323 def _zipinfo_name(self, fspath):
1324 # Convert a virtual filename (full path to file) into a zipfile subpath
1325 # usable with the zipimport directory cache for our target archive
1326 if fspath.startswith(self.zip_pre):
1327 return fspath[len(self.zip_pre):]
1328 raise AssertionError(
1329 "%s is not a subpath of %s" % (fspath,self.zip_pre)
1332 def _parts(self,zip_path):
1333 # Convert a zipfile subpath into an egg-relative path part list
1334 fspath = self.zip_pre+zip_path # pseudo-fs path
1335 if fspath.startswith(self.egg_root+os.sep):
1336 return fspath[len(self.egg_root)+1:].split(os.sep)
1337 raise AssertionError(
1338 "%s is not a subpath of %s" % (fspath,self.egg_root)
1341 def get_resource_filename(self, manager, resource_name):
1342 if not self.egg_name:
1343 raise NotImplementedError(
1344 "resource_filename() only supported for .egg, not .zip"
1346 # no need to lock for extraction, since we use temp names
1347 zip_path = self._resource_to_zip(resource_name)
1348 eagers = self._get_eager_resources()
1349 if '/'.join(self._parts(zip_path)) in eagers:
1351 self._extract_resource(manager, self._eager_to_zip(name))
1352 return self._extract_resource(manager, zip_path)
1354 def _extract_resource(self, manager, zip_path):
1356 if zip_path in self._index():
1357 for name in self._index()[zip_path]:
1358 last = self._extract_resource(
1359 manager, os.path.join(zip_path, name)
1361 return os.path.dirname(last) # return the extracted directory name
1363 zip_stat = self.zipinfo[zip_path]
1364 t,d,size = zip_stat[5], zip_stat[6], zip_stat[3]
1366 (d>>9)+1980, (d>>5)&0xF, d&0x1F, # ymd
1367 (t&0xFFFF)>>11, (t>>5)&0x3F, (t&0x1F) * 2, 0, 0, -1 # hms, etc.
1369 timestamp = time.mktime(date_time)
1372 real_path = manager.get_cache_path(
1373 self.egg_name, self._parts(zip_path)
1376 if os.path.isfile(real_path):
1377 stat = os.stat(real_path)
1378 if stat.st_size==size and stat.st_mtime==timestamp:
1379 # size and stamp match, don't bother extracting
1382 outf, tmpnam = _mkstemp(".$extract", dir=os.path.dirname(real_path))
1383 os.write(outf, self.loader.get_data(zip_path))
1385 utime(tmpnam, (timestamp,timestamp))
1386 manager.postprocess(tmpnam, real_path)
1389 rename(tmpnam, real_path)
1392 if os.path.isfile(real_path):
1393 stat = os.stat(real_path)
1395 if stat.st_size==size and stat.st_mtime==timestamp:
1396 # size and stamp match, somebody did it just ahead of
1399 elif os.name=='nt': # Windows, del old file and retry
1401 rename(tmpnam, real_path)
1406 manager.extraction_error() # report a user-friendly error
1410 def _get_eager_resources(self):
1411 if self.eagers is None:
1413 for name in ('native_libs.txt', 'eager_resources.txt'):
1414 if self.has_metadata(name):
1415 eagers.extend(self.get_metadata_lines(name))
1416 self.eagers = eagers
1421 return self._dirindex
1422 except AttributeError:
1424 for path in self.zipinfo:
1425 parts = path.split(os.sep)
1427 parent = os.sep.join(parts[:-1])
1429 ind[parent].append(parts[-1])
1432 ind[parent] = [parts.pop()]
1433 self._dirindex = ind
1436 def _has(self, fspath):
1437 zip_path = self._zipinfo_name(fspath)
1438 return zip_path in self.zipinfo or zip_path in self._index()
1440 def _isdir(self,fspath):
1441 return self._zipinfo_name(fspath) in self._index()
1443 def _listdir(self,fspath):
1444 return list(self._index().get(self._zipinfo_name(fspath), ()))
1446 def _eager_to_zip(self,resource_name):
1447 return self._zipinfo_name(self._fn(self.egg_root,resource_name))
1449 def _resource_to_zip(self,resource_name):
1450 return self._zipinfo_name(self._fn(self.module_path,resource_name))
1452 register_loader_type(zipimport.zipimporter, ZipProvider)
1477 class FileMetadata(EmptyProvider):
1478 """Metadata handler for standalone PKG-INFO files
1482 metadata = FileMetadata("/path/to/PKG-INFO")
1484 This provider rejects all data and metadata requests except for PKG-INFO,
1485 which is treated as existing, and will be the contents of the file at
1486 the provided location.
1489 def __init__(self,path):
1492 def has_metadata(self,name):
1493 return name=='PKG-INFO'
1495 def get_metadata(self,name):
1496 if name=='PKG-INFO':
1497 return open(self.path,'rU').read()
1498 raise KeyError("No metadata except PKG-INFO is available")
1500 def get_metadata_lines(self,name):
1501 return yield_lines(self.get_metadata(name))
1518 class PathMetadata(DefaultProvider):
1519 """Metadata provider for egg directories
1525 egg_info = "/path/to/PackageName.egg-info"
1526 base_dir = os.path.dirname(egg_info)
1527 metadata = PathMetadata(base_dir, egg_info)
1528 dist_name = os.path.splitext(os.path.basename(egg_info))[0]
1529 dist = Distribution(basedir,project_name=dist_name,metadata=metadata)
1531 # Unpacked egg directories:
1533 egg_path = "/path/to/PackageName-ver-pyver-etc.egg"
1534 metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO'))
1535 dist = Distribution.from_filename(egg_path, metadata=metadata)
1538 def __init__(self, path, egg_info):
1539 self.module_path = path
1540 self.egg_info = egg_info
1543 class EggMetadata(ZipProvider):
1544 """Metadata provider for .egg files"""
1546 def __init__(self, importer):
1547 """Create a metadata provider from a zipimporter"""
1549 self.zipinfo = zipimport._zip_directory_cache[importer.archive]
1550 self.zip_pre = importer.archive+os.sep
1551 self.loader = importer
1553 self.module_path = os.path.join(importer.archive, importer.prefix)
1555 self.module_path = importer.archive
1556 self._setup_prefix()
1560 """PEP 302 Importer that wraps Python's "normal" import algorithm"""
1562 def __init__(self, path=None):
1565 def find_module(self, fullname, path=None):
1566 subname = fullname.split(".")[-1]
1567 if subname != fullname and self.path is None:
1569 if self.path is None:
1574 file, filename, etc = imp.find_module(subname, path)
1577 return ImpLoader(file, filename, etc)
1581 """PEP 302 Loader that wraps Python's "normal" import algorithm"""
1583 def __init__(self, file, filename, etc):
1585 self.filename = filename
1588 def load_module(self, fullname):
1590 mod = imp.load_module(fullname, self.file, self.filename, self.etc)
1592 if self.file: self.file.close()
1593 # Note: we don't set __loader__ because we want the module to look
1594 # normal; i.e. this is just a wrapper for standard import machinery
1600 def get_importer(path_item):
1601 """Retrieve a PEP 302 "importer" for the given path item
1603 If there is no importer, this returns a wrapper around the builtin import
1604 machinery. The returned importer is only cached if it was created by a
1608 importer = sys.path_importer_cache[path_item]
1610 for hook in sys.path_hooks:
1612 importer = hook(path_item)
1620 sys.path_importer_cache.setdefault(path_item,importer)
1621 if importer is None:
1623 importer = ImpWrapper(path_item)
1641 _declare_state('dict', _distribution_finders = {})
1643 def register_finder(importer_type, distribution_finder):
1644 """Register `distribution_finder` to find distributions in sys.path items
1646 `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
1647 handler), and `distribution_finder` is a callable that, passed a path
1648 item and the importer instance, yields ``Distribution`` instances found on
1649 that path item. See ``pkg_resources.find_on_path`` for an example."""
1650 _distribution_finders[importer_type] = distribution_finder
1653 def find_distributions(path_item, only=False):
1654 """Yield distributions accessible via `path_item`"""
1655 importer = get_importer(path_item)
1656 finder = _find_adapter(_distribution_finders, importer)
1657 return finder(importer, path_item, only)
1659 def find_in_zip(importer, path_item, only=False):
1660 metadata = EggMetadata(importer)
1661 if metadata.has_metadata('PKG-INFO'):
1662 yield Distribution.from_filename(path_item, metadata=metadata)
1664 return # don't yield nested distros
1665 for subitem in metadata.resource_listdir('/'):
1666 if subitem.endswith('.egg'):
1667 subpath = os.path.join(path_item, subitem)
1668 for dist in find_in_zip(zipimport.zipimporter(subpath), subpath):
1671 register_finder(zipimport.zipimporter, find_in_zip)
1673 def StringIO(*args, **kw):
1674 """Thunk to load the real StringIO on demand"""
1677 from cStringIO import StringIO
1679 from StringIO import StringIO
1680 return StringIO(*args,**kw)
1682 def find_nothing(importer, path_item, only=False):
1684 register_finder(object,find_nothing)
1686 def find_on_path(importer, path_item, only=False):
1687 """Yield distributions accessible on a sys.path directory"""
1688 path_item = _normalize_cached(path_item)
1690 if os.path.isdir(path_item) and os.access(path_item, os.R_OK):
1691 if path_item.lower().endswith('.egg'):
1693 yield Distribution.from_filename(
1694 path_item, metadata=PathMetadata(
1695 path_item, os.path.join(path_item,'EGG-INFO')
1699 # scan for .egg and .egg-info in directory
1700 for entry in os.listdir(path_item):
1701 lower = entry.lower()
1702 if lower.endswith('.egg-info'):
1703 fullpath = os.path.join(path_item, entry)
1704 if os.path.isdir(fullpath):
1705 # egg-info directory, allow getting metadata
1706 metadata = PathMetadata(path_item, fullpath)
1708 metadata = FileMetadata(fullpath)
1709 yield Distribution.from_location(
1710 path_item,entry,metadata,precedence=DEVELOP_DIST
1712 elif not only and lower.endswith('.egg'):
1713 for dist in find_distributions(os.path.join(path_item, entry)):
1715 elif not only and lower.endswith('.egg-link'):
1716 for line in file(os.path.join(path_item, entry)):
1717 if not line.strip(): continue
1718 for item in find_distributions(os.path.join(path_item,line.rstrip())):
1721 register_finder(ImpWrapper, find_on_path)
1723 _declare_state('dict', _namespace_handlers = {})
1724 _declare_state('dict', _namespace_packages = {})
1726 def register_namespace_handler(importer_type, namespace_handler):
1727 """Register `namespace_handler` to declare namespace packages
1729 `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
1730 handler), and `namespace_handler` is a callable like this::
1732 def namespace_handler(importer,path_entry,moduleName,module):
1733 # return a path_entry to use for child packages
1735 Namespace handlers are only called if the importer object has already
1736 agreed that it can handle the relevant path item, and they should only
1737 return a subpath if the module __path__ does not already contain an
1738 equivalent subpath. For an example namespace handler, see
1739 ``pkg_resources.file_ns_handler``.
1741 _namespace_handlers[importer_type] = namespace_handler
1743 def _handle_ns(packageName, path_item):
1744 """Ensure that named package includes a subpath of path_item (if needed)"""
1745 importer = get_importer(path_item)
1746 if importer is None:
1748 loader = importer.find_module(packageName)
1751 module = sys.modules.get(packageName)
1753 module = sys.modules[packageName] = imp.new_module(packageName)
1754 module.__path__ = []; _set_parent_ns(packageName)
1755 elif not hasattr(module,'__path__'):
1756 raise TypeError("Not a package:", packageName)
1757 handler = _find_adapter(_namespace_handlers, importer)
1758 subpath = handler(importer,path_item,packageName,module)
1759 if subpath is not None:
1760 path = module.__path__; path.append(subpath)
1761 loader.load_module(packageName); module.__path__ = path
1764 def declare_namespace(packageName):
1765 """Declare that package 'packageName' is a namespace package"""
1769 if packageName in _namespace_packages:
1772 path, parent = sys.path, None
1773 if '.' in packageName:
1774 parent = '.'.join(packageName.split('.')[:-1])
1775 declare_namespace(parent)
1778 path = sys.modules[parent].__path__
1779 except AttributeError:
1780 raise TypeError("Not a package:", parent)
1782 # Track what packages are namespaces, so when new path items are added,
1783 # they can be updated
1784 _namespace_packages.setdefault(parent,[]).append(packageName)
1785 _namespace_packages.setdefault(packageName,[])
1787 for path_item in path:
1788 # Ensure all the parent's path items are reflected in the child,
1790 _handle_ns(packageName, path_item)
1795 def fixup_namespace_packages(path_item, parent=None):
1796 """Ensure that previously-declared namespace packages include path_item"""
1799 for package in _namespace_packages.get(parent,()):
1800 subpath = _handle_ns(package, path_item)
1801 if subpath: fixup_namespace_packages(subpath,package)
1805 def file_ns_handler(importer, path_item, packageName, module):
1806 """Compute an ns-package subpath for a filesystem or zipfile importer"""
1808 subpath = os.path.join(path_item, packageName.split('.')[-1])
1809 normalized = _normalize_cached(subpath)
1810 for item in module.__path__:
1811 if _normalize_cached(item)==normalized:
1814 # Only return the path if it's not already there
1817 register_namespace_handler(ImpWrapper,file_ns_handler)
1818 register_namespace_handler(zipimport.zipimporter,file_ns_handler)
1821 def null_ns_handler(importer, path_item, packageName, module):
1824 register_namespace_handler(object,null_ns_handler)
1827 def normalize_path(filename):
1828 """Normalize a file/dir name for comparison purposes"""
1829 return os.path.normcase(os.path.realpath(filename))
1831 def _normalize_cached(filename,_cache={}):
1833 return _cache[filename]
1835 _cache[filename] = result = normalize_path(filename)
1838 def _set_parent_ns(packageName):
1839 parts = packageName.split('.')
1842 parent = '.'.join(parts)
1843 setattr(sys.modules[parent], name, sys.modules[packageName])
1846 def yield_lines(strs):
1847 """Yield non-empty/non-comment lines of a ``basestring`` or sequence"""
1848 if isinstance(strs,basestring):
1849 for s in strs.splitlines():
1851 if s and not s.startswith('#'): # skip blank lines/comments
1855 for s in yield_lines(ss):
1858 LINE_END = re.compile(r"\s*(#.*)?$").match # whitespace and comment
1859 CONTINUE = re.compile(r"\s*\\\s*(#.*)?$").match # line continuation
1860 DISTRO = re.compile(r"\s*((\w|[-.])+)").match # Distribution or extra
1861 VERSION = re.compile(r"\s*(<=?|>=?|==|!=)\s*((\w|[-.])+)").match # ver. info
1862 COMMA = re.compile(r"\s*,").match # comma between items
1863 OBRACKET = re.compile(r"\s*\[").match
1864 CBRACKET = re.compile(r"\s*\]").match
1865 MODULE = re.compile(r"\w+(\.\w+)*$").match
1866 EGG_NAME = re.compile(
1868 r"( -(?P<ver>[^-]+) (-py(?P<pyver>[^-]+) (-(?P<plat>.+))? )? )?",
1869 re.VERBOSE | re.IGNORECASE
1872 component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE)
1873 replace = {'pre':'c', 'preview':'c','-':'final-','rc':'c','dev':'@'}.get
1875 def _parse_version_parts(s):
1876 for part in component_re.split(s):
1877 part = replace(part,part)
1878 if not part or part=='.':
1880 if part[:1] in '0123456789':
1881 yield part.zfill(8) # pad for numeric comparison
1885 yield '*final' # ensure that alpha/beta/candidate are before final
1887 def parse_version(s):
1888 """Convert a version string to a chronologically-sortable key
1890 This is a rough cross between distutils' StrictVersion and LooseVersion;
1891 if you give it versions that would work with StrictVersion, then it behaves
1892 the same; otherwise it acts like a slightly-smarter LooseVersion. It is
1893 *possible* to create pathological version coding schemes that will fool
1894 this parser, but they should be very rare in practice.
1896 The returned value will be a tuple of strings. Numeric portions of the
1897 version are padded to 8 digits so they will compare numerically, but
1898 without relying on how numbers compare relative to strings. Dots are
1899 dropped, but dashes are retained. Trailing zeros between alpha segments
1900 or dashes are suppressed, so that e.g. "2.4.0" is considered the same as
1901 "2.4". Alphanumeric parts are lower-cased.
1903 The algorithm assumes that strings like "-" and any alpha string that
1904 alphabetically follows "final" represents a "patch level". So, "2.4-1"
1905 is assumed to be a branch or patch of "2.4", and therefore "2.4.1" is
1906 considered newer than "2.4-1", which in turn is newer than "2.4".
1908 Strings like "a", "b", "c", "alpha", "beta", "candidate" and so on (that
1909 come before "final" alphabetically) are assumed to be pre-release versions,
1910 so that the version "2.4" is considered newer than "2.4a1".
1912 Finally, to handle miscellaneous cases, the strings "pre", "preview", and
1913 "rc" are treated as if they were "c", i.e. as though they were release
1914 candidates, and therefore are not as new as a version string that does not
1915 contain them, and "dev" is replaced with an '@' so that it sorts lower than
1916 than any other pre-release tag.
1919 for part in _parse_version_parts(s.lower()):
1920 if part.startswith('*'):
1921 if part<'*final': # remove '-' before a prerelease tag
1922 while parts and parts[-1]=='*final-': parts.pop()
1923 # remove trailing zeros from each series of numeric parts
1924 while parts and parts[-1]=='00000000':
1929 class EntryPoint(object):
1930 """Object representing an advertised importable object"""
1932 def __init__(self, name, module_name, attrs=(), extras=(), dist=None):
1933 if not MODULE(module_name):
1934 raise ValueError("Invalid module name", module_name)
1936 self.module_name = module_name
1937 self.attrs = tuple(attrs)
1938 self.extras = Requirement.parse(("x[%s]" % ','.join(extras))).extras
1942 s = "%s = %s" % (self.name, self.module_name)
1944 s += ':' + '.'.join(self.attrs)
1946 s += ' [%s]' % ','.join(self.extras)
1950 return "EntryPoint.parse(%r)" % str(self)
1952 def load(self, require=True, env=None, installer=None):
1953 if require: self.require(env, installer)
1954 entry = __import__(self.module_name, globals(),globals(), ['__name__'])
1955 for attr in self.attrs:
1957 entry = getattr(entry,attr)
1958 except AttributeError:
1959 raise ImportError("%r has no %r attribute" % (entry,attr))
1962 def require(self, env=None, installer=None):
1963 if self.extras and not self.dist:
1964 raise UnknownExtra("Can't require() without a distribution", self)
1965 map(working_set.add,
1966 working_set.resolve(self.dist.requires(self.extras),env,installer))
1971 def parse(cls, src, dist=None):
1972 """Parse a single entry point from string `src`
1974 Entry point syntax follows the form::
1976 name = some.module:some.attr [extra1,extra2]
1978 The entry name and module name are required, but the ``:attrs`` and
1979 ``[extras]`` parts are optional
1983 name,value = src.split('=',1)
1985 value,extras = value.split('[',1)
1986 req = Requirement.parse("x["+extras)
1987 if req.specs: raise ValueError
1990 value,attrs = value.split(':',1)
1991 if not MODULE(attrs.rstrip()):
1993 attrs = attrs.rstrip().split('.')
1996 "EntryPoint must be in 'name=module:attrs [extras]' format",
2000 return cls(name.strip(), value.strip(), attrs, extras, dist)
2002 parse = classmethod(parse)
2012 def parse_group(cls, group, lines, dist=None):
2013 """Parse an entry point group"""
2014 if not MODULE(group):
2015 raise ValueError("Invalid group name", group)
2017 for line in yield_lines(lines):
2018 ep = cls.parse(line, dist)
2020 raise ValueError("Duplicate entry point", group, ep.name)
2024 parse_group = classmethod(parse_group)
2027 def parse_map(cls, data, dist=None):
2028 """Parse a map of entry point groups"""
2029 if isinstance(data,dict):
2032 data = split_sections(data)
2034 for group, lines in data:
2038 raise ValueError("Entry points must be listed in groups")
2039 group = group.strip()
2041 raise ValueError("Duplicate group name", group)
2042 maps[group] = cls.parse_group(group, lines, dist)
2045 parse_map = classmethod(parse_map)
2052 class Distribution(object):
2053 """Wrap an actual or potential sys.path entry w/metadata"""
2055 location=None, metadata=None, project_name=None, version=None,
2056 py_version=PY_MAJOR, platform=None, precedence = EGG_DIST
2058 self.project_name = safe_name(project_name or 'Unknown')
2059 if version is not None:
2060 self._version = safe_version(version)
2061 self.py_version = py_version
2062 self.platform = platform
2063 self.location = location
2064 self.precedence = precedence
2065 self._provider = metadata or empty_provider
2068 def from_location(cls,location,basename,metadata=None,**kw):
2069 project_name, version, py_version, platform = [None]*4
2070 basename, ext = os.path.splitext(basename)
2071 if ext.lower() in (".egg",".egg-info"):
2072 match = EGG_NAME(basename)
2074 project_name, version, py_version, platform = match.group(
2075 'name','ver','pyver','plat'
2078 location, metadata, project_name=project_name, version=version,
2079 py_version=py_version, platform=platform, **kw
2081 from_location = classmethod(from_location)
2085 getattr(self,'parsed_version',()), self.precedence, self.key,
2086 -len(self.location or ''), self.location, self.py_version,
2090 def __cmp__(self, other): return cmp(self.hashcmp, other)
2091 def __hash__(self): return hash(self.hashcmp)
2093 # These properties have to be lazy so that we don't have to load any
2094 # metadata until/unless it's actually needed. (i.e., some distributions
2095 # may not know their name or version without loading PKG-INFO)
2101 except AttributeError:
2102 self._key = key = self.project_name.lower()
2107 def parsed_version(self):
2109 return self._parsed_version
2110 except AttributeError:
2111 self._parsed_version = pv = parse_version(self.version)
2114 parsed_version = property(parsed_version)
2119 return self._version
2120 except AttributeError:
2121 for line in self._get_metadata('PKG-INFO'):
2122 if line.lower().startswith('version:'):
2123 self._version = safe_version(line.split(':',1)[1].strip())
2124 return self._version
2127 "Missing 'Version:' header and/or PKG-INFO file", self
2129 version = property(version)
2137 return self.__dep_map
2138 except AttributeError:
2139 dm = self.__dep_map = {None: []}
2140 for name in 'requires.txt', 'depends.txt':
2141 for extra,reqs in split_sections(self._get_metadata(name)):
2142 if extra: extra = safe_extra(extra)
2143 dm.setdefault(extra,[]).extend(parse_requirements(reqs))
2145 _dep_map = property(_dep_map)
2147 def requires(self,extras=()):
2148 """List of Requirements needed for this distro if `extras` are used"""
2151 deps.extend(dm.get(None,()))
2154 deps.extend(dm[safe_extra(ext)])
2157 "%s has no such extra feature %r" % (self, ext)
2161 def _get_metadata(self,name):
2162 if self.has_metadata(name):
2163 for line in self.get_metadata_lines(name):
2166 def activate(self,path=None):
2167 """Ensure distribution is importable on `path` (default=sys.path)"""
2168 if path is None: path = sys.path
2169 self.insert_on(path)
2170 if path is sys.path:
2171 fixup_namespace_packages(self.location)
2172 for pkg in self._get_metadata('namespace_packages.txt'):
2173 if pkg in sys.modules: declare_namespace(pkg)
2176 """Return what this distribution's standard .egg filename should be"""
2177 filename = "%s-%s-py%s" % (
2178 to_filename(self.project_name), to_filename(self.version),
2179 self.py_version or PY_MAJOR
2183 filename += '-'+self.platform
2188 return "%s (%s)" % (self,self.location)
2193 try: version = getattr(self,'version',None)
2194 except ValueError: version = None
2195 version = version or "[unknown version]"
2196 return "%s %s" % (self.project_name,version)
2198 def __getattr__(self,attr):
2199 """Delegate all unrecognized public attributes to .metadata provider"""
2200 if attr.startswith('_'):
2201 raise AttributeError,attr
2202 return getattr(self._provider, attr)
2205 def from_filename(cls,filename,metadata=None, **kw):
2206 return cls.from_location(
2207 _normalize_cached(filename), os.path.basename(filename), metadata,
2210 from_filename = classmethod(from_filename)
2212 def as_requirement(self):
2213 """Return a ``Requirement`` that matches this distribution exactly"""
2214 return Requirement.parse('%s==%s' % (self.project_name, self.version))
2216 def load_entry_point(self, group, name):
2217 """Return the `name` entry point of `group` or raise ImportError"""
2218 ep = self.get_entry_info(group,name)
2220 raise ImportError("Entry point %r not found" % ((group,name),))
2223 def get_entry_map(self, group=None):
2224 """Return the entry point map for `group`, or the full entry map"""
2226 ep_map = self._ep_map
2227 except AttributeError:
2228 ep_map = self._ep_map = EntryPoint.parse_map(
2229 self._get_metadata('entry_points.txt'), self
2231 if group is not None:
2232 return ep_map.get(group,{})
2235 def get_entry_info(self, group, name):
2236 """Return the EntryPoint object for `group`+`name`, or ``None``"""
2237 return self.get_entry_map(group).get(name)
2257 def insert_on(self, path, loc = None):
2258 """Insert self.location in path before its nearest parent directory"""
2260 loc = loc or self.location
2264 nloc = _normalize_cached(loc)
2265 bdir = os.path.dirname(nloc)
2266 npath= [(p and _normalize_cached(p) or p) for p in path]
2269 for p, item in enumerate(npath):
2272 elif item==bdir and self.precedence==EGG_DIST:
2273 # if it's an .egg, give it precedence over its directory
2274 if path is sys.path:
2275 self.check_version_conflict()
2277 npath.insert(p, nloc)
2280 if path is sys.path:
2281 self.check_version_conflict()
2285 # p is the spot where we found or inserted loc; now remove duplicates
2288 np = npath.index(nloc, p+1)
2292 del npath[np], path[np]
2298 def check_version_conflict(self):
2299 if self.key=='setuptools':
2300 return # ignore the inevitable setuptools self-conflicts :(
2302 nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt'))
2303 loc = normalize_path(self.location)
2304 for modname in self._get_metadata('top_level.txt'):
2305 if (modname not in sys.modules or modname in nsp
2306 or modname in _namespace_packages
2310 fn = getattr(sys.modules[modname], '__file__', None)
2311 if fn and (normalize_path(fn).startswith(loc) or fn.startswith(loc)):
2314 "Module %s was already imported from %s, but %s is being added"
2315 " to sys.path" % (modname, fn, self.location),
2318 def has_version(self):
2322 issue_warning("Unbuilt egg for "+repr(self))
2326 def clone(self,**kw):
2327 """Copy this distribution, substituting in any changed keyword args"""
2329 'project_name', 'version', 'py_version', 'platform', 'location',
2332 kw.setdefault(attr, getattr(self,attr,None))
2333 kw.setdefault('metadata', self._provider)
2334 return self.__class__(**kw)
2341 return [dep for dep in self._dep_map if dep]
2342 extras = property(extras)
2345 def issue_warning(*args,**kw):
2349 # find the first stack frame that is *not* code in
2350 # the pkg_resources module, to use for the warning
2351 while sys._getframe(level).f_globals is g:
2355 from warnings import warn
2356 warn(stacklevel = level+1, *args, **kw)
2380 def parse_requirements(strs):
2381 """Yield ``Requirement`` objects for each specification in `strs`
2383 `strs` must be an instance of ``basestring``, or a (possibly-nested)
2386 # create a steppable iterator, so we can handle \-continuations
2387 lines = iter(yield_lines(strs))
2389 def scan_list(ITEM,TERMINATOR,line,p,groups,item_name):
2393 while not TERMINATOR(line,p):
2394 if CONTINUE(line,p):
2396 line = lines.next(); p = 0
2397 except StopIteration:
2399 "\\ must not appear on the last nonblank line"
2402 match = ITEM(line,p)
2404 raise ValueError("Expected "+item_name+" in",line,"at",line[p:])
2406 items.append(match.group(*groups))
2409 match = COMMA(line,p)
2411 p = match.end() # skip the comma
2412 elif not TERMINATOR(line,p):
2414 "Expected ',' or end-of-list in",line,"at",line[p:]
2417 match = TERMINATOR(line,p)
2418 if match: p = match.end() # skip the terminator, if any
2419 return line, p, items
2422 match = DISTRO(line)
2424 raise ValueError("Missing distribution spec", line)
2425 project_name = match.group(1)
2429 match = OBRACKET(line,p)
2432 line, p, extras = scan_list(
2433 DISTRO, CBRACKET, line, p, (1,), "'extra' name"
2436 line, p, specs = scan_list(VERSION,LINE_END,line,p,(1,2),"version spec")
2437 specs = [(op,safe_version(val)) for op,val in specs]
2438 yield Requirement(project_name, specs, extras)
2441 def _sort_dists(dists):
2442 tmp = [(dist.hashcmp,dist) for dist in dists]
2444 dists[::-1] = [d for hc,d in tmp]
2463 def __init__(self, project_name, specs, extras):
2464 """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!"""
2465 self.unsafe_name, project_name = project_name, safe_name(project_name)
2466 self.project_name, self.key = project_name, project_name.lower()
2467 index = [(parse_version(v),state_machine[op],op,v) for op,v in specs]
2469 self.specs = [(op,ver) for parsed,trans,op,ver in index]
2470 self.index, self.extras = index, tuple(map(safe_extra,extras))
2472 self.key, tuple([(op,parsed) for parsed,trans,op,ver in index]),
2473 frozenset(self.extras)
2475 self.__hash = hash(self.hashCmp)
2478 specs = ','.join([''.join(s) for s in self.specs])
2479 extras = ','.join(self.extras)
2480 if extras: extras = '[%s]' % extras
2481 return '%s%s%s' % (self.project_name, extras, specs)
2483 def __eq__(self,other):
2484 return isinstance(other,Requirement) and self.hashCmp==other.hashCmp
2486 def __contains__(self,item):
2487 if isinstance(item,Distribution):
2488 if item.key != self.key: return False
2489 if self.index: item = item.parsed_version # only get if we need it
2490 elif isinstance(item,basestring):
2491 item = parse_version(item)
2493 for parsed,trans,op,ver in self.index:
2494 action = trans[cmp(item,parsed)]
2495 if action=='F': return False
2496 elif action=='T': return True
2497 elif action=='+': last = True
2498 elif action=='-' or last is None: last = False
2499 if last is None: last = True # no rules encountered
2506 def __repr__(self): return "Requirement.parse(%r)" % str(self)
2510 reqs = list(parse_requirements(s))
2514 raise ValueError("Expected only one requirement", s)
2515 raise ValueError("No requirements found", s)
2517 parse = staticmethod(parse)
2531 """Get an mro for a type or classic class"""
2532 if not isinstance(cls,type):
2533 class cls(cls,object): pass
2534 return cls.__mro__[1:]
2537 def _find_adapter(registry, ob):
2538 """Return an adapter factory for `ob` from `registry`"""
2539 for t in _get_mro(getattr(ob, '__class__', type(ob))):
2544 def ensure_directory(path):
2545 """Ensure that the parent directory of `path` exists"""
2546 dirname = os.path.dirname(path)
2547 if not os.path.isdir(dirname):
2548 os.makedirs(dirname)
2550 def split_sections(s):
2551 """Split a string or iterable thereof into (section,content) pairs
2553 Each ``section`` is a stripped version of the section header ("[section]")
2554 and each ``content`` is a list of stripped lines excluding blank lines and
2555 comment-only lines. If there are any such lines before the first section
2556 header, they're returned in a first ``section`` of ``None``.
2560 for line in yield_lines(s):
2561 if line.startswith("["):
2562 if line.endswith("]"):
2563 if section or content:
2564 yield section, content
2565 section = line[1:-1].strip()
2568 raise ValueError("Invalid section heading", line)
2570 content.append(line)
2572 # wrap up last segment
2573 yield section, content
2575 def _mkstemp(*args,**kw):
2576 from tempfile import mkstemp
2579 os.open = os_open # temporarily bypass sandboxing
2580 return mkstemp(*args,**kw)
2582 os.open = old_open # and then put it back
2585 # Set up global resource manager (deliberately not state-saved)
2586 _manager = ResourceManager()
2588 for name in dir(_manager):
2589 if not name.startswith('_'):
2590 g[name] = getattr(_manager, name)
2591 _initialize(globals())
2593 # Prepare the master working set and make the ``require()`` API available
2594 _declare_state('object', working_set = WorkingSet())
2596 # Does the main program list any requirements?
2597 from __main__ import __requires__
2599 pass # No: just use the default working set based on sys.path
2601 # Yes: ensure the requirements are met, by prefixing sys.path if necessary
2603 working_set.require(__requires__)
2604 except VersionConflict: # try it without defaults already on sys.path
2605 working_set = WorkingSet([]) # by starting with an empty path
2606 for dist in working_set.resolve(
2607 parse_requirements(__requires__), Environment()
2609 working_set.add(dist)
2610 for entry in sys.path: # add any missing entries from sys.path
2611 if entry not in working_set.entries:
2612 working_set.add_entry(entry)
2613 sys.path[:] = working_set.entries # then copy back to sys.path
2615 require = working_set.require
2616 iter_entry_points = working_set.iter_entry_points
2617 add_activation_listener = working_set.subscribe
2618 run_script = working_set.run_script
2619 run_main = run_script # backward compatibility
2620 # Activate all distributions already on sys.path, and ensure that
2621 # all distributions added to the working set in the future (e.g. by
2622 # calling ``require()``) will get activated as well.
2623 add_activation_listener(lambda dist: dist.activate())
2624 working_set.entries=[]; map(working_set.add_entry,sys.path) # match order