[Python-checkins] distutils2: Remove obsolete files.

eric.araujo python-checkins at python.org
Tue Sep 13 13:43:49 CEST 2011


http://hg.python.org/distutils2/rev/ee75e8d9738c
changeset: 1145:ee75e8d9738c
user: Éric Araujo <merwok at netwok.org>
date: Tue Sep 13 13:30:34 2011 +0200
summary:
 Remove obsolete files.
test_distutils2.py does nothing that runtests.py or python -m unittest
does; pkgutil is no longer the home of the PEP 376 implementation
(d2.database is).
files:
 distutils2/_backport/pkgutil.py | 1222 ----------
 distutils2/_backport/tests/test_pkgutil.py | 608 ----
 test_distutils2.py | 5 -
 3 files changed, 0 insertions(+), 1835 deletions(-)
diff --git a/distutils2/_backport/pkgutil.py b/distutils2/_backport/pkgutil.py
deleted file mode 100644
--- a/distutils2/_backport/pkgutil.py
+++ /dev/null
@@ -1,1222 +0,0 @@
-"""Utilities to support packages."""
-
-import imp
-import sys
-
-from csv import reader as csv_reader
-import os
-import re
-from stat import ST_SIZE
-from types import ModuleType
-import warnings
-
-try:
- from hashlib import md5
-except ImportError:
- from md5 import md5
-
-from distutils2.errors import DistutilsError
-from distutils2.metadata import Metadata
-from distutils2.version import suggest_normalized_version, VersionPredicate
-try:
- import cStringIO as StringIO
-except ImportError:
- import StringIO
-
-
-__all__ = [
- 'get_importer', 'iter_importers', 'get_loader', 'find_loader',
- 'walk_packages', 'iter_modules', 'get_data',
- 'ImpImporter', 'ImpLoader', 'read_code', 'extend_path',
- 'Distribution', 'EggInfoDistribution', 'distinfo_dirname',
- 'get_distributions', 'get_distribution', 'get_file_users',
- 'provides_distribution', 'obsoletes_distribution',
- 'enable_cache', 'disable_cache', 'clear_cache',
-]
-
-
-##########################
-# PEP 302 Implementation #
-##########################
-
-def read_code(stream):
- # This helper is needed in order for the :pep:`302` emulation to
- # correctly handle compiled files
- import marshal
-
- magic = stream.read(4)
- if magic != imp.get_magic():
- return None
-
- stream.read(4) # Skip timestamp
- return marshal.load(stream)
-
-
-def simplegeneric(func):
- """Make a trivial single-dispatch generic function"""
- registry = {}
-
- def wrapper(*args, ** kw):
- ob = args[0]
- try:
- cls = ob.__class__
- except AttributeError:
- cls = type(ob)
- try:
- mro = cls.__mro__
- except AttributeError:
- try:
-
- class cls(cls, object):
- pass
- mro = cls.__mro__[1:]
- except TypeError:
- mro = object, # must be an ExtensionClass or some such :(
- for t in mro:
- if t in registry:
- return registry[t](*args, ** kw)
- else:
- return func(*args, ** kw)
- try:
- wrapper.__name__ = func.__name__
- except (TypeError, AttributeError):
- pass # Python 2.3 doesn't allow functions to be renamed
-
- def register(typ, func=None):
- if func is None:
- return lambda f: register(typ, f)
- registry[typ] = func
- return func
-
- wrapper.__dict__ = func.__dict__
- wrapper.__doc__ = func.__doc__
- wrapper.register = register
- return wrapper
-
-
-def walk_packages(path=None, prefix='', onerror=None):
- """Yields ``(module_loader, name, ispkg)`` for all modules recursively
- on *path*, or, if *path* is ``None``, all accessible modules.
-
- :parameter path: should be either ``None`` or a list of paths to look for
- modules in.
- :parameter prefix: is a string to output on the front of every module name
- on output.
-
- Note that this function must import all packages (NOT all
- modules!) on the given path, in order to access the ``__path__``
- attribute to find submodules.
-
- *onerror* is a function which gets called with one argument (the
- name of the package which was being imported) if any exception
- occurs while trying to import a package. If no onerror function is
- supplied, ``ImportErrors`` are caught and ignored, while all other
- exceptions are propagated, terminating the search.
-
- Examples:
-
- * list all modules python can access::
-
- walk_packages()
-
- * list all submodules of ctypes::
-
- walk_packages(ctypes.__path__, ctypes.__name__+'.')
-
- """
-
- def seen(p, m={}):
- if p in m:
- return True
- m[p] = True
-
- for importer, name, ispkg in iter_modules(path, prefix):
- yield importer, name, ispkg
-
- if ispkg:
- try:
- __import__(name)
- except ImportError:
- if onerror is not None:
- onerror(name)
- except Exception:
- if onerror is not None:
- onerror(name)
- else:
- raise
- else:
- path = getattr(sys.modules[name], '__path__', None) or []
-
- # don't traverse path items we've seen before
- path = [p for p in path if not seen(p)]
-
- for item in walk_packages(path, name + '.', onerror):
- yield item
-
-
-def iter_modules(path=None, prefix=''):
- """Yields ``(module_loader, name, ispkg)`` for all submodules on path,
- or, if *path* is ``None``, all top-level modules on ``sys.path``.
-
- :parameter path: should be either None or a list of paths to look for
- modules in.
- :parameter prefix: is a string to output on the front of every module name
- on output.
-
- """
-
- if path is None:
- importers = iter_importers()
- else:
- importers = map(get_importer, path)
-
- yielded = {}
- for i in importers:
- for name, ispkg in iter_importer_modules(i, prefix):
- if name not in yielded:
- yielded[name] = 1
- yield i, name, ispkg
-
-
-#@simplegeneric
-def iter_importer_modules(importer, prefix=''):
- if not hasattr(importer, 'iter_modules'):
- return []
- return importer.iter_modules(prefix)
-
-iter_importer_modules = simplegeneric(iter_importer_modules)
-
-
-class ImpImporter(object):
- """:pep:`302` Importer that wraps Python's "classic" import algorithm
-
- ``ImpImporter(dirname)`` produces a :pep:`302` importer that searches that
- directory. ``ImpImporter(None)`` produces a :pep:`302` importer that
- searches the current ``sys.path``, plus any modules that are frozen
- or built-in.
-
- Note that :class:`ImpImporter` does not currently support being used by
- placement on ``sys.meta_path``.
- """
-
- def __init__(self, path=None):
- self.path = path
-
- def find_module(self, fullname, path=None):
- # Note: we ignore 'path' argument since it is only used via meta_path
- subname = fullname.split(".")[-1]
- if subname != fullname and self.path is None:
- return None
- if self.path is None:
- path = None
- else:
- path = [os.path.realpath(self.path)]
- try:
- file, filename, etc = imp.find_module(subname, path)
- except ImportError:
- return None
- return ImpLoader(fullname, file, filename, etc)
-
- def iter_modules(self, prefix=''):
- if self.path is None or not os.path.isdir(self.path):
- return
-
- yielded = {}
- import inspect
-
- filenames = os.listdir(self.path)
- filenames.sort() # handle packages before same-named modules
-
- for fn in filenames:
- modname = inspect.getmodulename(fn)
- if modname == '__init__' or modname in yielded:
- continue
-
- path = os.path.join(self.path, fn)
- ispkg = False
-
- if not modname and os.path.isdir(path) and '.' not in fn:
- modname = fn
- for fn in os.listdir(path):
- subname = inspect.getmodulename(fn)
- if subname == '__init__':
- ispkg = True
- break
- else:
- continue # not a package
-
- if modname and '.' not in modname:
- yielded[modname] = 1
- yield prefix + modname, ispkg
-
-
-class ImpLoader(object):
- """:pep:`302` Loader that wraps Python's "classic" import algorithm """
-
- code = source = None
-
- def __init__(self, fullname, file, filename, etc):
- self.file = file
- self.filename = filename
- self.fullname = fullname
- self.etc = etc
-
- def load_module(self, fullname):
- self._reopen()
- try:
- mod = imp.load_module(fullname, self.file, self.filename, self.etc)
- finally:
- if self.file:
- self.file.close()
- # Note: we don't set __loader__ because we want the module to look
- # normal; i.e. this is just a wrapper for standard import machinery
- return mod
-
- def get_data(self, pathname):
- return open(pathname, "rb").read()
-
- def _reopen(self):
- if self.file and self.file.closed:
- mod_type = self.etc[2]
- if mod_type == imp.PY_SOURCE:
- self.file = open(self.filename, 'rU')
- elif mod_type in (imp.PY_COMPILED, imp.C_EXTENSION):
- self.file = open(self.filename, 'rb')
-
- def _fix_name(self, fullname):
- if fullname is None:
- fullname = self.fullname
- elif fullname != self.fullname:
- raise ImportError("Loader for module %s cannot handle "
- "module %s" % (self.fullname, fullname))
- return fullname
-
- def is_package(self, fullname):
- fullname = self._fix_name(fullname)
- return self.etc[2] == imp.PKG_DIRECTORY
-
- def get_code(self, fullname=None):
- fullname = self._fix_name(fullname)
- if self.code is None:
- mod_type = self.etc[2]
- if mod_type == imp.PY_SOURCE:
- source = self.get_source(fullname)
- self.code = compile(source, self.filename, 'exec')
- elif mod_type == imp.PY_COMPILED:
- self._reopen()
- try:
- self.code = read_code(self.file)
- finally:
- self.file.close()
- elif mod_type == imp.PKG_DIRECTORY:
- self.code = self._get_delegate().get_code()
- return self.code
-
- def get_source(self, fullname=None):
- fullname = self._fix_name(fullname)
- if self.source is None:
- mod_type = self.etc[2]
- if mod_type == imp.PY_SOURCE:
- self._reopen()
- try:
- self.source = self.file.read()
- finally:
- self.file.close()
- elif mod_type == imp.PY_COMPILED:
- if os.path.exists(self.filename[:-1]):
- f = open(self.filename[:-1], 'rU')
- self.source = f.read()
- f.close()
- elif mod_type == imp.PKG_DIRECTORY:
- self.source = self._get_delegate().get_source()
- return self.source
-
- def _get_delegate(self):
- return ImpImporter(self.filename).find_module('__init__')
-
- def get_filename(self, fullname=None):
- fullname = self._fix_name(fullname)
- mod_type = self.etc[2]
- if mod_type == imp.PKG_DIRECTORY:
- return self._get_delegate().get_filename()
- elif mod_type in (imp.PY_SOURCE, imp.PY_COMPILED, imp.C_EXTENSION):
- return self.filename
- return None
-
-
-try:
- import zipimport
- from zipimport import zipimporter
-
- def iter_zipimport_modules(importer, prefix=''):
- dirlist = sorted(zipimport._zip_directory_cache[importer.archive])
- _prefix = importer.prefix
- plen = len(_prefix)
- yielded = {}
- import inspect
- for fn in dirlist:
- if not fn.startswith(_prefix):
- continue
-
- fn = fn[plen:].split(os.sep)
-
- if len(fn) == 2 and fn[1].startswith('__init__.py'):
- if fn[0] not in yielded:
- yielded[fn[0]] = 1
- yield fn[0], True
-
- if len(fn) != 1:
- continue
-
- modname = inspect.getmodulename(fn[0])
- if modname == '__init__':
- continue
-
- if modname and '.' not in modname and modname not in yielded:
- yielded[modname] = 1
- yield prefix + modname, False
-
- iter_importer_modules.register(zipimporter, iter_zipimport_modules)
-
-except ImportError:
- pass
-
-
-def get_importer(path_item):
- """Retrieve a :pep:`302` importer for the given path item
-
- The returned importer is cached in ``sys.path_importer_cache``
- if it was newly created by a path hook.
-
- If there is no importer, a wrapper around the basic import
- machinery is returned. This wrapper is never inserted into
- the importer cache (``None`` is inserted instead).
-
- The cache (or part of it) can be cleared manually if a
- rescan of ``sys.path_hooks`` is necessary.
- """
- try:
- importer = sys.path_importer_cache[path_item]
- except KeyError:
- for path_hook in sys.path_hooks:
- try:
- importer = path_hook(path_item)
- break
- except ImportError:
- pass
- else:
- importer = None
- sys.path_importer_cache.setdefault(path_item, importer)
-
- if importer is None:
- try:
- importer = ImpImporter(path_item)
- except ImportError:
- importer = None
- return importer
-
-
-def iter_importers(fullname=""):
- """Yield :pep:`302` importers for the given module name
-
- If fullname contains a '.', the importers will be for the package
- containing fullname, otherwise they will be importers for sys.meta_path,
- sys.path, and Python's "classic" import machinery, in that order. If
- the named module is in a package, that package is imported as a side
- effect of invoking this function.
-
- Non :pep:`302` mechanisms (e.g. the Windows registry) used by the
- standard import machinery to find files in alternative locations
- are partially supported, but are searched AFTER ``sys.path``. Normally,
- these locations are searched BEFORE sys.path, preventing ``sys.path``
- entries from shadowing them.
-
- For this to cause a visible difference in behaviour, there must
- be a module or package name that is accessible via both sys.path
- and one of the non :pep:`302` file system mechanisms. In this case,
- the emulation will find the former version, while the builtin
- import mechanism will find the latter.
-
- Items of the following types can be affected by this discrepancy:
- :data:`imp.C_EXTENSION`, :data:`imp.PY_SOURCE`, :data:`imp.PY_COMPILED`,
- :data:`imp.PKG_DIRECTORY`
- """
- if fullname.startswith('.'):
- raise ImportError("Relative module names not supported")
- if '.' in fullname:
- # Get the containing package's __path__
- pkg = '.'.join(fullname.split('.')[:-1])
- if pkg not in sys.modules:
- __import__(pkg)
- path = getattr(sys.modules[pkg], '__path__', None) or []
- else:
- for importer in sys.meta_path:
- yield importer
- path = sys.path
- for item in path:
- yield get_importer(item)
- if '.' not in fullname:
- yield ImpImporter()
-
-
-def get_loader(module_or_name):
- """Get a :pep:`302` "loader" object for module_or_name
-
- If the module or package is accessible via the normal import
- mechanism, a wrapper around the relevant part of that machinery
- is returned. Returns None if the module cannot be found or imported.
- If the named module is not already imported, its containing package
- (if any) is imported, in order to establish the package ``__path__``.
-
- This function uses :func:`iter_importers`, and is thus subject to the same
- limitations regarding platform-specific special import locations such
- as the Windows registry.
- """
- if module_or_name in sys.modules:
- module_or_name = sys.modules[module_or_name]
- if isinstance(module_or_name, ModuleType):
- module = module_or_name
- loader = getattr(module, '__loader__', None)
- if loader is not None:
- return loader
- fullname = module.__name__
- else:
- fullname = module_or_name
- return find_loader(fullname)
-
-
-def find_loader(fullname):
- """Find a :pep:`302` "loader" object for fullname
-
- If fullname contains dots, path must be the containing package's
- ``__path__``. Returns ``None`` if the module cannot be found or imported.
- This function uses :func:`iter_importers`, and is thus subject to the same
- limitations regarding platform-specific special import locations such as
- the Windows registry.
- """
- for importer in iter_importers(fullname):
- loader = importer.find_module(fullname)
- if loader is not None:
- return loader
-
- return None
-
-
-def extend_path(path, name):
- """Extend a package's path.
-
- Intended use is to place the following code in a package's
- ``__init__.py``::
-
- from pkgutil import extend_path
- __path__ = extend_path(__path__, __name__)
-
- This will add to the package's ``__path__`` all subdirectories of
- directories on ``sys.path`` named after the package. This is useful
- if one wants to distribute different parts of a single logical
- package as multiple directories.
-
- It also looks for ``*.pkg`` files beginning where ``*`` matches the name
- argument. This feature is similar to ``*.pth`` files (see ``site.py``),
- except that it doesn't special-case lines starting with ``import``.
- A ``*.pkg`` file is trusted at face value: apart from checking for
- duplicates, all entries found in a ``*.pkg`` file are added to the
- path, regardless of whether they are exist the filesystem. (This
- is a feature.)
-
- If the input path is not a list (as is the case for frozen
- packages) it is returned unchanged. The input path is not
- modified; an extended copy is returned. Items are only appended
- to the copy at the end.
-
- It is assumed that sys.path is a sequence. Items of sys.path that
- are not (unicode or 8-bit) strings referring to existing
- directories are ignored. Unicode items of sys.path that cause
- errors when used as filenames may cause this function to raise an
- exception (in line with ``os.path.isdir()`` behavior).
- """
-
- if not isinstance(path, list):
- # This could happen e.g. when this is called from inside a
- # frozen package. Return the path unchanged in that case.
- return path
-
- pname = os.path.join(*name.split('.')) # Reconstitute as relative path
- # Just in case os.extsep != '.'
- sname = os.extsep.join(name.split('.'))
- sname_pkg = sname + os.extsep + "pkg"
- init_py = "__init__" + os.extsep + "py"
-
- path = path[:] # Start with a copy of the existing path
-
- for dir in sys.path:
- if not isinstance(dir, basestring) or not os.path.isdir(dir):
- continue
- subdir = os.path.join(dir, pname)
- # XXX This may still add duplicate entries to path on
- # case-insensitive filesystems
- initfile = os.path.join(subdir, init_py)
- if subdir not in path and os.path.isfile(initfile):
- path.append(subdir)
- # XXX Is this the right thing for subpackages like zope.app?
- # It looks for a file named "zope.app.pkg"
- pkgfile = os.path.join(dir, sname_pkg)
- if os.path.isfile(pkgfile):
- try:
- f = open(pkgfile)
- except IOError, msg:
- sys.stderr.write("Can't open %s: %s\n" %
- (pkgfile, msg))
- else:
- for line in f:
- line = line.rstrip('\n')
- if not line or line.startswith('#'):
- continue
- path.append(line) # Don't check for existence!
- f.close()
-
- return path
-
-
-def get_data(package, resource):
- """Get a resource from a package.
-
- This is a wrapper round the :pep:`302` loader get_data API. The package
- argument should be the name of a package, in standard module format
- (``foo.bar``). The resource argument should be in the form of a relative
- filename, using ``'/'`` as the path separator. The parent directory name
- ``'..'`` is not allowed, and nor is a rooted name (starting with a
- ``'/'``).
-
- The function returns a binary string, which is the contents of the
- specified resource.
-
- For packages located in the filesystem, which have already been imported,
- this is the rough equivalent of::
-
- d = os.path.dirname(sys.modules[package].__file__)
- data = open(os.path.join(d, resource), 'rb').read()
-
- If the package cannot be located or loaded, or it uses a :pep:`302` loader
- which does not support :func:`get_data`, then ``None`` is returned.
- """
-
- loader = get_loader(package)
- if loader is None or not hasattr(loader, 'get_data'):
- return None
- mod = sys.modules.get(package) or loader.load_module(package)
- if mod is None or not hasattr(mod, '__file__'):
- return None
-
- # Modify the resource name to be compatible with the loader.get_data
- # signature - an os.path format "filename" starting with the dirname of
- # the package's __file__
- parts = resource.split('/')
- parts.insert(0, os.path.dirname(mod.__file__))
- resource_name = os.path.join(*parts)
- return loader.get_data(resource_name)
-
-
-##########################
-# PEP 376 Implementation #
-##########################
-
-DIST_FILES = ('INSTALLER', 'METADATA', 'RECORD', 'REQUESTED', 'RESOURCES')
-
-# Cache
-_cache_name = {} # maps names to Distribution instances
-_cache_name_egg = {} # maps names to EggInfoDistribution instances
-_cache_path = {} # maps paths to Distribution instances
-_cache_path_egg = {} # maps paths to EggInfoDistribution instances
-_cache_generated = False # indicates if .dist-info distributions are cached
-_cache_generated_egg = False # indicates if .dist-info and .egg are cached
-_cache_enabled = True
-
-
-def enable_cache():
- """
- Enables the internal cache.
-
- Note that this function will not clear the cache in any case, for that
- functionality see :func:`clear_cache`.
- """
- global _cache_enabled
-
- _cache_enabled = True
-
-
-def disable_cache():
- """
- Disables the internal cache.
-
- Note that this function will not clear the cache in any case, for that
- functionality see :func:`clear_cache`.
- """
- global _cache_enabled
-
- _cache_enabled = False
-
-
-def clear_cache():
- """ Clears the internal cache. """
- global _cache_name, _cache_name_egg, _cache_path, _cache_path_egg, \
- _cache_generated, _cache_generated_egg
-
- _cache_name = {}
- _cache_name_egg = {}
- _cache_path = {}
- _cache_path_egg = {}
- _cache_generated = False
- _cache_generated_egg = False
-
-
-def _yield_distributions(include_dist, include_egg, paths=sys.path):
- """
- Yield .dist-info and .egg(-info) distributions, based on the arguments
-
- :parameter include_dist: yield .dist-info distributions
- :parameter include_egg: yield .egg(-info) distributions
- """
- for path in paths:
- realpath = os.path.realpath(path)
- if not os.path.isdir(realpath):
- continue
- for dir in os.listdir(realpath):
- dist_path = os.path.join(realpath, dir)
- if include_dist and dir.endswith('.dist-info'):
- yield Distribution(dist_path)
- elif include_egg and (dir.endswith('.egg-info') or
- dir.endswith('.egg')):
- yield EggInfoDistribution(dist_path)
-
-def _generate_cache(use_egg_info=False, paths=sys.path):
- global _cache_generated, _cache_generated_egg
-
- if _cache_generated_egg or (_cache_generated and not use_egg_info):
- return
- else:
- gen_dist = not _cache_generated
- gen_egg = use_egg_info
-
- for dist in _yield_distributions(gen_dist, gen_egg, paths):
- if isinstance(dist, Distribution):
- _cache_path[dist.path] = dist
- if not dist.name in _cache_name:
- _cache_name[dist.name] = []
- _cache_name[dist.name].append(dist)
- else:
- _cache_path_egg[dist.path] = dist
- if not dist.name in _cache_name_egg:
- _cache_name_egg[dist.name] = []
- _cache_name_egg[dist.name].append(dist)
-
- if gen_dist:
- _cache_generated = True
- if gen_egg:
- _cache_generated_egg = True
-
-
-class Distribution(object):
- """Created with the *path* of the ``.dist-info`` directory provided to the
- constructor. It reads the metadata contained in ``METADATA`` when it is
- instantiated."""
-
- # Attribute documenting for Sphinx style documentation, see for more info:
- # http://sphinx.pocoo.org/ext/autodoc.html#dir-autoattribute
- name = ''
- """The name of the distribution."""
- metadata = None
- """A :class:`distutils2.metadata.Metadata` instance loaded with
- the distribution's ``METADATA`` file."""
- requested = False
- """A boolean that indicates whether the ``REQUESTED`` metadata file is
- present (in other words, whether the package was installed by user
- request or it was installed as a dependency)."""
-
- def __init__(self, path):
- if _cache_enabled and path in _cache_path:
- self.metadata = _cache_path[path].metadata
- else:
- metadata_path = os.path.join(path, 'METADATA')
- self.metadata = Metadata(path=metadata_path)
-
- self.path = path
- self.name = self.metadata['name']
-
- if _cache_enabled and not path in _cache_path:
- _cache_path[path] = self
-
- def __repr__(self):
- return '%s-%s at %s' % (self.name, self.metadata.version, self.path)
-
- def _get_records(self, local=False):
- RECORD = self.get_distinfo_file('RECORD')
- record_reader = csv_reader(RECORD, delimiter=',')
- for row in record_reader:
- path, md5, size = row[:] + [None for i in xrange(len(row), 3)]
- if local:
- path = path.replace('/', os.sep)
- path = os.path.join(sys.prefix, path)
- yield path, md5, size
-
- def get_resource_path(self, relative_path):
- resources_file = self.get_distinfo_file('RESOURCES')
- resources_reader = csv_reader(resources_file, delimiter=',')
- for relative, destination in resources_reader:
- if relative == relative_path:
- return destination
- raise KeyError('No resource file with relative path %s were installed' %
- relative_path)
-
- def get_installed_files(self, local=False):
- """
- Iterates over the ``RECORD`` entries and returns a tuple
- ``(path, md5, size)`` for each line. If *local* is ``True``,
- the returned path is transformed into a local absolute path.
- Otherwise the raw value from RECORD is returned.
-
- A local absolute path is an absolute path in which occurrences of
- ``'/'`` have been replaced by the system separator given by ``os.sep``.
-
- :parameter local: flag to say if the path should be returned a local
- absolute path
-
- :type local: boolean
- :returns: iterator of (path, md5, size)
- """
- return self._get_records(local)
-
- def uses(self, path):
- """
- Returns ``True`` if path is listed in ``RECORD``. *path* can be a local
- absolute path or a relative ``'/'``-separated path.
-
- :rtype: boolean
- """
- for p, md5, size in self._get_records():
- local_absolute = os.path.join(sys.prefix, p)
- if path == p or path == local_absolute:
- return True
- return False
-
- def get_distinfo_file(self, path, binary=False):
- """
- Returns a file located under the ``.dist-info`` directory. Returns a
- ``file`` instance for the file pointed by *path*.
-
- :parameter path: a ``'/'``-separated path relative to the
- ``.dist-info`` directory or an absolute path;
- If *path* is an absolute path and doesn't start
- with the ``.dist-info`` directory path,
- a :class:`DistutilsError` is raised
- :type path: string
- :parameter binary: If *binary* is ``True``, opens the file in read-only
- binary mode (``rb``), otherwise opens it in
- read-only mode (``r``).
- :rtype: file object
- """
- open_flags = 'r'
- if binary:
- open_flags += 'b'
-
- # Check if it is an absolute path
- if path.find(os.sep) >= 0:
- # it's an absolute path?
- distinfo_dirname, path = path.split(os.sep)[-2:]
- if distinfo_dirname != self.path.split(os.sep)[-1]:
- raise DistutilsError("Requested dist-info file does not "
- "belong to the %s distribution. '%s' was requested." \
- % (self.name, os.sep.join([distinfo_dirname, path])))
-
- # The file must be relative
- if path not in DIST_FILES:
- raise DistutilsError("Requested an invalid dist-info file: "
- "%s" % path)
-
- # Convert the relative path back to absolute
- path = os.path.join(self.path, path)
- return open(path, open_flags)
-
- def get_distinfo_files(self, local=False):
- """
- Iterates over the ``RECORD`` entries and returns paths for each line if
- the path is pointing to a file located in the ``.dist-info`` directory
- or one of its subdirectories.
-
- :parameter local: If *local* is ``True``, each returned path is
- transformed into a local absolute path. Otherwise the
- raw value from ``RECORD`` is returned.
- :type local: boolean
- :returns: iterator of paths
- """
- for path, md5, size in self._get_records(local):
- yield path
-
- def __eq__(self, other):
- return isinstance(other, Distribution) and self.path == other.path
-
- # See http://docs.python.org/reference/datamodel#object.__hash__
- __hash__ = object.__hash__
-
-
-class EggInfoDistribution(object):
- """Created with the *path* of the ``.egg-info`` directory or file provided
- to the constructor. It reads the metadata contained in the file itself, or
- if the given path happens to be a directory, the metadata is read from the
- file ``PKG-INFO`` under that directory."""
-
- name = ''
- """The name of the distribution."""
- metadata = None
- """A :class:`distutils2.metadata.Metadata` instance loaded with
- the distribution's ``METADATA`` file."""
- _REQUIREMENT = re.compile(\
- r'(?P<name>[-A-Za-z0-9_.]+)\s*' \
- r'(?P<first>(?:<|<=|!=|==|>=|>)[-A-Za-z0-9_.]+)?\s*' \
- r'(?P<rest>(?:\s*,\s*(?:<|<=|!=|==|>=|>)[-A-Za-z0-9_.]+)*)\s*' \
- r'(?P<extras>\[.*\])?')
-
- def __init__(self, path, display_warnings=False):
- self.path = path
- self.display_warnings = display_warnings
- if _cache_enabled and path in _cache_path_egg:
- self.metadata = _cache_path_egg[path].metadata
- self.name = self.metadata['Name']
- return
-
- # reused from Distribute's pkg_resources
- def yield_lines(strs):
- """Yield non-empty/non-comment lines of a ``basestring``
- or sequence"""
- if isinstance(strs, basestring):
- for s in strs.splitlines():
- s = s.strip()
- # skip blank lines/comments
- if s and not s.startswith('#'):
- yield s
- else:
- for ss in strs:
- for s in yield_lines(ss):
- yield s
-
- requires = None
- if path.endswith('.egg'):
- if os.path.isdir(path):
- meta_path = os.path.join(path, 'EGG-INFO', 'PKG-INFO')
- self.metadata = Metadata(path=meta_path)
- try:
- req_path = os.path.join(path, 'EGG-INFO', 'requires.txt')
- requires = open(req_path, 'r').read()
- except IOError:
- requires = None
- else:
- # FIXME handle the case where zipfile is not available
- zipf = zipimport.zipimporter(path)
- fileobj = StringIO.StringIO(zipf.get_data('EGG-INFO/PKG-INFO'))
- self.metadata = Metadata(fileobj=fileobj)
- try:
- requires = zipf.get_data('EGG-INFO/requires.txt')
- except IOError:
- requires = None
- self.name = self.metadata['Name']
- elif path.endswith('.egg-info'):
- if os.path.isdir(path):
- path = os.path.join(path, 'PKG-INFO')
- try:
- req_f = open(os.path.join(path, 'requires.txt'), 'r')
- requires = req_f.read()
- except IOError:
- requires = None
- self.metadata = Metadata(path=path)
- self.name = self.metadata['name']
- else:
- raise ValueError('The path must end with .egg-info or .egg')
-
-
- if requires is not None:
- if self.metadata['Metadata-Version'] == '1.1':
- # we can't have 1.1 metadata *and* Setuptools requires
- for field in ('Obsoletes', 'Requires', 'Provides'):
- del self.metadata[field]
-
- reqs = []
-
- if requires is not None:
- for line in yield_lines(requires):
- if line[0] == '[' and self.display_warnings:
- warnings.warn('distutils2 does not support extensions '
- 'in requires.txt')
- break
- else:
- match = self._REQUIREMENT.match(line.strip())
- if not match:
- # this happens when we encounter extras
- # since they are written at the end of the file
- # we just exit
- break
- #raise ValueError('Distribution %s has ill formed '
- # 'requires.txt file (%s)' %
- # (self.name, line))
- else:
- if match.group('extras'):
- s = (('Distribution %s uses extra requirements '
- 'which are not supported in distutils') \
- % (self.name))
- warnings.warn(s)
- name = match.group('name')
- version = None
- if match.group('first'):
- version = match.group('first')
- if match.group('rest'):
- version += match.group('rest')
- version = version.replace(' ', '') # trim spaces
- if version is None:
- reqs.append(name)
- else:
- reqs.append('%s (%s)' % (name, version))
-
- if len(reqs) > 0:
- self.metadata['Requires-Dist'] += reqs
-
-
- if _cache_enabled:
- _cache_path_egg[self.path] = self
-
- def __repr__(self):
- return '%s-%s at %s' % (self.name, self.metadata.version, self.path)
-
- def get_installed_files(self, local=False):
-
- def _md5(path):
- f = open(path)
- try:
- content = f.read()
- finally:
- f.close()
- return md5(content).hexdigest()
-
- def _size(path):
- return os.stat(path)[ST_SIZE]
-
- path = self.path
- if local:
- path = path.replace('/', os.sep)
-
- # XXX What about scripts and data files ?
- if os.path.isfile(path):
- return [(path, _md5(path), _size(path))]
- else:
- files = []
- for root, dir, files_ in os.walk(path):
- for item in files_:
- item = os.path.join(root, item)
- files.append((item, _md5(item), _size(item)))
- return files
-
- return []
-
- def uses(self, path):
- return False
-
- def __eq__(self, other):
- return isinstance(other, EggInfoDistribution) and \
- self.path == other.path
-
- # See http://docs.python.org/reference/datamodel#object.__hash__
- __hash__ = object.__hash__
-
-
-def distinfo_dirname(name, version):
- """
- The *name* and *version* parameters are converted into their
- filename-escaped form, i.e. any ``'-'`` characters are replaced
- with ``'_'`` other than the one in ``'dist-info'`` and the one
- separating the name from the version number.
-
- :parameter name: is converted to a standard distribution name by replacing
- any runs of non- alphanumeric characters with a single
- ``'-'``.
- :type name: string
- :parameter version: is converted to a standard version string. Spaces
- become dots, and all other non-alphanumeric characters
- (except dots) become dashes, with runs of multiple
- dashes condensed to a single dash.
- :type version: string
- :returns: directory name
- :rtype: string"""
- file_extension = '.dist-info'
- name = name.replace('-', '_')
- normalized_version = suggest_normalized_version(version)
- # Because this is a lookup procedure, something will be returned even if
- # it is a version that cannot be normalized
- if normalized_version is None:
- # Unable to achieve normality?
- normalized_version = version
- return '-'.join([name, normalized_version]) + file_extension
-
-
-def get_distributions(use_egg_info=False, paths=sys.path):
- """
- Provides an iterator that looks for ``.dist-info`` directories in
- ``sys.path`` and returns :class:`Distribution` instances for each one of
- them. If the parameters *use_egg_info* is ``True``, then the ``.egg-info``
- files and directores are iterated as well.
-
- :rtype: iterator of :class:`Distribution` and :class:`EggInfoDistribution`
- instances
- """
- if not _cache_enabled:
- for dist in _yield_distributions(True, use_egg_info, paths):
- yield dist
- else:
- _generate_cache(use_egg_info, paths)
-
- for dist in _cache_path.itervalues():
- yield dist
-
- if use_egg_info:
- for dist in _cache_path_egg.itervalues():
- yield dist
-
-
-def get_distribution(name, use_egg_info=False, paths=None):
- """
- Scans all elements in ``sys.path`` and looks for all directories
- ending with ``.dist-info``. Returns a :class:`Distribution`
- corresponding to the ``.dist-info`` directory that contains the
- ``METADATA`` that matches *name* for the *name* metadata field.
- If no distribution exists with the given *name* and the parameter
- *use_egg_info* is set to ``True``, then all files and directories ending
- with ``.egg-info`` are scanned. A :class:`EggInfoDistribution` instance is
- returned if one is found that has metadata that matches *name* for the
- *name* metadata field.
-
- This function only returns the first result found, as no more than one
- value is expected. If the directory is not found, ``None`` is returned.
-
- :rtype: :class:`Distribution` or :class:`EggInfoDistribution` or None
- """
- if paths == None:
- paths = sys.path
-
- if not _cache_enabled:
- for dist in _yield_distributions(True, use_egg_info, paths):
- if dist.name == name:
- return dist
- else:
- _generate_cache(use_egg_info, paths)
-
- if name in _cache_name:
- return _cache_name[name][0]
- elif use_egg_info and name in _cache_name_egg:
- return _cache_name_egg[name][0]
- else:
- return None
-
-
-def obsoletes_distribution(name, version=None, use_egg_info=False):
- """
- Iterates over all distributions to find which distributions obsolete
- *name*.
-
- If a *version* is provided, it will be used to filter the results.
- If the argument *use_egg_info* is set to ``True``, then ``.egg-info``
- distributions will be considered as well.
-
- :type name: string
- :type version: string
- :parameter name:
- """
- for dist in get_distributions(use_egg_info):
- obsoleted = (dist.metadata['Obsoletes-Dist'] +
- dist.metadata['Obsoletes'])
- for obs in obsoleted:
- o_components = obs.split(' ', 1)
- if len(o_components) == 1 or version is None:
- if name == o_components[0]:
- yield dist
- break
- else:
- try:
- predicate = VersionPredicate(obs)
- except ValueError:
- raise DistutilsError(('Distribution %s has ill formed' +
- ' obsoletes field') % (dist.name,))
- if name == o_components[0] and predicate.match(version):
- yield dist
- break
-
-
-def provides_distribution(name, version=None, use_egg_info=False):
- """
- Iterates over all distributions to find which distributions provide *name*.
- If a *version* is provided, it will be used to filter the results. Scans
- all elements in ``sys.path`` and looks for all directories ending with
- ``.dist-info``. Returns a :class:`Distribution` corresponding to the
- ``.dist-info`` directory that contains a ``METADATA`` that matches *name*
- for the name metadata. If the argument *use_egg_info* is set to ``True``,
- then all files and directories ending with ``.egg-info`` are considered
- as well and returns an :class:`EggInfoDistribution` instance.
-
- This function only returns the first result found, since no more than
- one values are expected. If the directory is not found, returns ``None``.
-
- :parameter version: a version specifier that indicates the version
- required, conforming to the format in ``PEP-345``
-
- :type name: string
- :type version: string
- """
- predicate = None
- if not version is None:
- try:
- predicate = VersionPredicate(name + ' (' + version + ')')
- except ValueError:
- raise DistutilsError('Invalid name or version')
-
- for dist in get_distributions(use_egg_info):
- provided = dist.metadata['Provides-Dist'] + dist.metadata['Provides']
-
- for p in provided:
- p_components = p.rsplit(' ', 1)
- if len(p_components) == 1 or predicate is None:
- if name == p_components[0]:
- yield dist
- break
- else:
- p_name, p_ver = p_components
- if len(p_ver) < 2 or p_ver[0] != '(' or p_ver[-1] != ')':
- raise DistutilsError(('Distribution %s has invalid ' +
- 'provides field: %s') \
- % (dist.name, p))
- p_ver = p_ver[1:-1] # trim off the parenthesis
- if p_name == name and predicate.match(p_ver):
- yield dist
- break
-
-
-def get_file_users(path):
- """
- Iterates over all distributions to find out which distributions uses
- *path*.
-
- :parameter path: can be a local absolute path or a relative
- ``'/'``-separated path.
- :type path: string
- :rtype: iterator of :class:`Distribution` instances
- """
- for dist in get_distributions():
- if dist.uses(path):
- yield dist
-
-def resource_path(distribution_name, relative_path):
- dist = get_distribution(distribution_name)
- if dist != None:
- return dist.get_resource_path(relative_path)
- raise LookupError('No distribution named %s is installed.' %
- distribution_name)
-
-def resource_open(distribution_name, relative_path, * args, ** kwargs):
- file = open(resource_path(distribution_name, relative_path), * args,
- ** kwargs)
- return file
\ No newline at end of file
diff --git a/distutils2/_backport/tests/test_pkgutil.py b/distutils2/_backport/tests/test_pkgutil.py
deleted file mode 100644
--- a/distutils2/_backport/tests/test_pkgutil.py
+++ /dev/null
@@ -1,608 +0,0 @@
-# -*- coding: utf-8 -*-
-"""Tests for PEP 376 pkgutil functionality"""
-import imp
-import sys
-
-import csv
-import os
-import shutil
-import tempfile
-import zipfile
-try:
- from hashlib import md5
-except ImportError:
- from distutils2._backport.hashlib import md5
-
-from distutils2.errors import DistutilsError
-from distutils2.metadata import Metadata
-from distutils2.tests import unittest, run_unittest, support, TESTFN
-
-from distutils2._backport import pkgutil
-from distutils2._backport.pkgutil import (
- Distribution, EggInfoDistribution, get_distribution, get_distributions,
- provides_distribution, obsoletes_distribution, get_file_users,
- distinfo_dirname, _yield_distributions)
-
-try:
- from os.path import relpath
-except ImportError:
- try:
- from unittest.compatibility import relpath
- except ImportError:
- from unittest2.compatibility import relpath
-
-# Adapted from Python 2.7's trunk
-
-# TODO Add a test for getting a distribution that is provided by another
-# distribution.
-
-# TODO Add a test for absolute pathed RECORD items (e.g. /etc/myapp/config.ini)
-
-
-class TestPkgUtilData(unittest.TestCase):
-
- def setUp(self):
- super(TestPkgUtilData, self).setUp()
- self.dirname = tempfile.mkdtemp()
- sys.path.insert(0, self.dirname)
- pkgutil.disable_cache()
-
- def tearDown(self):
- super(TestPkgUtilData, self).tearDown()
- del sys.path[0]
- pkgutil.enable_cache()
- shutil.rmtree(self.dirname)
-
- def test_getdata_filesys(self):
- pkg = 'test_getdata_filesys'
-
- # Include a LF and a CRLF, to test that binary data is read back
- RESOURCE_DATA = 'Hello, world!\nSecond line\r\nThird line'
-
- # Make a package with some resources
- package_dir = os.path.join(self.dirname, pkg)
- os.mkdir(package_dir)
- # Empty init.py
- f = open(os.path.join(package_dir, '__init__.py'), "wb")
- try:
- pass
- finally:
- f.close()
- # Resource files, res.txt, sub/res.txt
- f = open(os.path.join(package_dir, 'res.txt'), "wb")
- try:
- f.write(RESOURCE_DATA)
- finally:
- f.close()
- os.mkdir(os.path.join(package_dir, 'sub'))
- f = open(os.path.join(package_dir, 'sub', 'res.txt'), "wb")
- try:
- f.write(RESOURCE_DATA)
- finally:
- f.close()
-
- # Check we can read the resources
- res1 = pkgutil.get_data(pkg, 'res.txt')
- self.assertEqual(res1, RESOURCE_DATA)
- res2 = pkgutil.get_data(pkg, 'sub/res.txt')
- self.assertEqual(res2, RESOURCE_DATA)
-
- del sys.modules[pkg]
-
- def test_getdata_zipfile(self):
- zip = 'test_getdata_zipfile.zip'
- pkg = 'test_getdata_zipfile'
-
- # Include a LF and a CRLF, to test that binary data is read back
- RESOURCE_DATA = 'Hello, world!\nSecond line\r\nThird line'
-
- # Make a package with some resources
- zip_file = os.path.join(self.dirname, zip)
- z = zipfile.ZipFile(zip_file, 'w')
- try:
- # Empty init.py
- z.writestr(pkg + '/__init__.py', "")
- # Resource files, res.txt, sub/res.txt
- z.writestr(pkg + '/res.txt', RESOURCE_DATA)
- z.writestr(pkg + '/sub/res.txt', RESOURCE_DATA)
- finally:
- z.close()
-
- # Check we can read the resources
- sys.path.insert(0, zip_file)
- res1 = pkgutil.get_data(pkg, 'res.txt')
- self.assertEqual(res1, RESOURCE_DATA)
- res2 = pkgutil.get_data(pkg, 'sub/res.txt')
- self.assertEqual(res2, RESOURCE_DATA)
-
- names = []
- for loader, name, ispkg in pkgutil.iter_modules([zip_file]):
- names.append(name)
- self.assertEqual(names, ['test_getdata_zipfile'])
-
- del sys.path[0]
-
- del sys.modules[pkg]
-
-# Adapted from Python 2.7's trunk
-
-
-class TestPkgUtilPEP302(unittest.TestCase):
-
- class MyTestLoader(object):
-
- def load_module(self, fullname):
- # Create an empty module
- mod = sys.modules.setdefault(fullname, imp.new_module(fullname))
- mod.__file__ = "<%s>" % self.__class__.__name__
- mod.__loader__ = self
- # Make it a package
- mod.__path__ = []
- # Count how many times the module is reloaded
- mod.__dict__['loads'] = mod.__dict__.get('loads', 0) + 1
- return mod
-
- def get_data(self, path):
- return "Hello, world!"
-
- class MyTestImporter(object):
-
- def find_module(self, fullname, path=None):
- return TestPkgUtilPEP302.MyTestLoader()
-
- def setUp(self):
- super(TestPkgUtilPEP302, self).setUp()
- pkgutil.disable_cache()
- sys.meta_path.insert(0, self.MyTestImporter())
-
- def tearDown(self):
- del sys.meta_path[0]
- pkgutil.enable_cache()
- super(TestPkgUtilPEP302, self).tearDown()
-
- def test_getdata_pep302(self):
- # Use a dummy importer/loader
- self.assertEqual(pkgutil.get_data('foo', 'dummy'), "Hello, world!")
- del sys.modules['foo']
-
- def test_alreadyloaded(self):
- # Ensure that get_data works without reloading - the "loads" module
- # variable in the example loader should count how many times a reload
- # occurs.
- import foo
- self.assertEqual(foo.loads, 1)
- self.assertEqual(pkgutil.get_data('foo', 'dummy'), "Hello, world!")
- self.assertEqual(foo.loads, 1)
- del sys.modules['foo']
-
-
-class TestPkgUtilDistribution(unittest.TestCase):
- # Tests the pkgutil.Distribution class
-
- def setUp(self):
- super(TestPkgUtilDistribution, self).setUp()
- self.fake_dists_path = os.path.abspath(
- os.path.join(os.path.dirname(__file__), 'fake_dists'))
- pkgutil.disable_cache()
-
- self.distinfo_dirs = [os.path.join(self.fake_dists_path, dir)
- for dir in os.listdir(self.fake_dists_path)
- if dir.endswith('.dist-info')]
-
- def get_hexdigest(file):
- md5_hash = md5()
- md5_hash.update(open(file).read())
- return md5_hash.hexdigest()
-
- def record_pieces(file):
- path = relpath(file, sys.prefix)
- digest = get_hexdigest(file)
- size = os.path.getsize(file)
- return [path, digest, size]
-
- self.records = {}
- for distinfo_dir in self.distinfo_dirs:
- # Setup the RECORD file for this dist
- record_file = os.path.join(distinfo_dir, 'RECORD')
- record_writer = csv.writer(open(record_file, 'w'), delimiter=',',
- quoting=csv.QUOTE_NONE)
- dist_location = distinfo_dir.replace('.dist-info', '')
-
- for path, dirs, files in os.walk(dist_location):
- for f in files:
- record_writer.writerow(record_pieces(
- os.path.join(path, f)))
- for file in ['INSTALLER', 'METADATA', 'REQUESTED']:
- record_writer.writerow(record_pieces(
- os.path.join(distinfo_dir, file)))
- record_writer.writerow([relpath(record_file, sys.prefix)])
- del record_writer # causes the RECORD file to close
- record_reader = csv.reader(open(record_file, 'rb'))
- record_data = []
- for row in record_reader:
- path, md5_, size = row[:] + \
- [None for i in xrange(len(row), 3)]
- record_data.append([path, (md5_, size, )])
- self.records[distinfo_dir] = dict(record_data)
-
- def tearDown(self):
- self.records = None
- for distinfo_dir in self.distinfo_dirs:
- record_file = os.path.join(distinfo_dir, 'RECORD')
- open(record_file, 'w').close()
- pkgutil.enable_cache()
- super(TestPkgUtilDistribution, self).tearDown()
-
- def test_instantiation(self):
- # Test the Distribution class's instantiation provides us with usable
- # attributes.
- here = os.path.abspath(os.path.dirname(__file__))
- name = 'choxie'
- version = '2.0.0.9'
- dist_path = os.path.join(here, 'fake_dists',
- distinfo_dirname(name, version))
- dist = Distribution(dist_path)
-
- self.assertEqual(dist.name, name)
- self.assertTrue(isinstance(dist.metadata, Metadata))
- self.assertEqual(dist.metadata['version'], version)
- self.assertTrue(isinstance(dist.requested, type(bool())))
-
- def test_installed_files(self):
- # Test the iteration of installed files.
- # Test the distribution's installed files
- for distinfo_dir in self.distinfo_dirs:
- dist = Distribution(distinfo_dir)
- for path, md5_, size in dist.get_installed_files():
- record_data = self.records[dist.path]
- self.assertIn(path, record_data)
- self.assertEqual(md5_, record_data[path][0])
- self.assertEqual(size, record_data[path][1])
-
- def test_uses(self):
- # Test to determine if a distribution uses a specified file.
- # Criteria to test against
- distinfo_name = 'grammar-1.0a4'
- distinfo_dir = os.path.join(self.fake_dists_path,
- distinfo_name + '.dist-info')
- true_path = [self.fake_dists_path, distinfo_name, \
- 'grammar', 'utils.py']
- true_path = relpath(os.path.join(*true_path), sys.prefix)
- false_path = [self.fake_dists_path, 'towel_stuff-0.1', 'towel_stuff',
- '__init__.py']
- false_path = relpath(os.path.join(*false_path), sys.prefix)
-
- # Test if the distribution uses the file in question
- dist = Distribution(distinfo_dir)
- self.assertTrue(dist.uses(true_path))
- self.assertFalse(dist.uses(false_path))
-
- def test_get_distinfo_file(self):
- # Test the retrieval of dist-info file objects.
- distinfo_name = 'choxie-2.0.0.9'
- other_distinfo_name = 'grammar-1.0a4'
- distinfo_dir = os.path.join(self.fake_dists_path,
- distinfo_name + '.dist-info')
- dist = Distribution(distinfo_dir)
- # Test for known good file matches
- distinfo_files = [
- # Relative paths
- 'INSTALLER', 'METADATA',
- # Absolute paths
- os.path.join(distinfo_dir, 'RECORD'),
- os.path.join(distinfo_dir, 'REQUESTED'),
- ]
-
- for distfile in distinfo_files:
- value = dist.get_distinfo_file(distfile)
- self.assertTrue(isinstance(value, file))
- # Is it the correct file?
- self.assertEqual(value.name, os.path.join(distinfo_dir, distfile))
-
- # Test an absolute path that is part of another distributions dist-info
- other_distinfo_file = os.path.join(self.fake_dists_path,
- other_distinfo_name + '.dist-info', 'REQUESTED')
- self.assertRaises(DistutilsError, dist.get_distinfo_file,
- other_distinfo_file)
- # Test for a file that does not exist and should not exist
- self.assertRaises(DistutilsError, dist.get_distinfo_file, \
- 'ENTRYPOINTS')
-
- def test_get_distinfo_files(self):
- # Test for the iteration of RECORD path entries.
- distinfo_name = 'towel_stuff-0.1'
- distinfo_dir = os.path.join(self.fake_dists_path,
- distinfo_name + '.dist-info')
- dist = Distribution(distinfo_dir)
- # Test for the iteration of the raw path
- distinfo_record_paths = self.records[distinfo_dir].keys()
- found = [path for path in dist.get_distinfo_files()]
- self.assertEqual(sorted(found), sorted(distinfo_record_paths))
- # Test for the iteration of local absolute paths
- distinfo_record_paths = [os.path.join(sys.prefix, path)
- for path in self.records[distinfo_dir]]
- found = [path for path in dist.get_distinfo_files(local=True)]
- self.assertEqual(sorted(found), sorted(distinfo_record_paths))
-
- def test_get_resources_path(self):
- distinfo_name = 'babar-0.1'
- distinfo_dir = os.path.join(self.fake_dists_path,
- distinfo_name + '.dist-info')
- dist = Distribution(distinfo_dir)
- resource_path = dist.get_resource_path('babar.png')
- self.assertEqual(resource_path, 'babar.png')
- self.assertRaises(KeyError, dist.get_resource_path, 'notexist')
-
-
-
-class TestPkgUtilPEP376(support.LoggingCatcher, support.WarningsCatcher,
- unittest.TestCase):
- # Tests for the new functionality added in PEP 376.
-
- def setUp(self):
- super(TestPkgUtilPEP376, self).setUp()
- pkgutil.disable_cache()
- # Setup the path environment with our fake distributions
- current_path = os.path.abspath(os.path.dirname(__file__))
- self.sys_path = sys.path[:]
- self.fake_dists_path = os.path.join(current_path, 'fake_dists')
- sys.path.insert(0, self.fake_dists_path)
-
- def tearDown(self):
- sys.path[:] = self.sys_path
- pkgutil.enable_cache()
- super(TestPkgUtilPEP376, self).tearDown()
-
- def test_distinfo_dirname(self):
- # Given a name and a version, we expect the distinfo_dirname function
- # to return a standard distribution information directory name.
-
- items = [# (name, version, standard_dirname)
- # Test for a very simple single word name and decimal
- # version number
- ('docutils', '0.5', 'docutils-0.5.dist-info'),
- # Test for another except this time with a '-' in the name, which
- # needs to be transformed during the name lookup
- ('python-ldap', '2.5', 'python_ldap-2.5.dist-info'),
- # Test for both '-' in the name and a funky version number
- ('python-ldap', '2.5 a---5', 'python_ldap-2.5 a---5.dist-info'),
- ]
-
- # Loop through the items to validate the results
- for name, version, standard_dirname in items:
- dirname = distinfo_dirname(name, version)
- self.assertEqual(dirname, standard_dirname)
-
- def test_get_distributions(self):
- # Lookup all distributions found in the ``sys.path``.
- # This test could potentially pick up other installed distributions
- fake_dists = [('grammar', '1.0a4'), ('choxie', '2.0.0.9'),
- ('towel-stuff', '0.1'), ('babar', '0.1')]
- found_dists = []
-
- # Verify the fake dists have been found.
- dists = [dist for dist in get_distributions()]
- for dist in dists:
- if not isinstance(dist, Distribution):
- self.fail("item received was not a Distribution instance: "
- "%s" % type(dist))
- if dist.name in dict(fake_dists) and \
- dist.path.startswith(self.fake_dists_path):
- found_dists.append((dist.name, dist.metadata['version'], ))
- else:
- # check that it doesn't find anything more than this
- self.assertFalse(dist.path.startswith(self.fake_dists_path))
- # otherwise we don't care what other distributions are found
-
- # Finally, test that we found all that we were looking for
- self.assertListEqual(sorted(found_dists), sorted(fake_dists))
-
- # Now, test if the egg-info distributions are found correctly as well
- fake_dists += [('bacon', '0.1'), ('cheese', '2.0.2'),
- ('coconuts-aster', '10.3'),
- ('banana', '0.4'), ('strawberry', '0.6'),
- ('truffles', '5.0'), ('nut', 'funkyversion')]
- found_dists = []
-
- dists = [dist for dist in get_distributions(use_egg_info=True)]
- for dist in dists:
- if not (isinstance(dist, Distribution) or \
- isinstance(dist, EggInfoDistribution)):
- self.fail("item received was not a Distribution or "
- "EggInfoDistribution instance: %s" % type(dist))
- if dist.name in dict(fake_dists) and \
- dist.path.startswith(self.fake_dists_path):
- found_dists.append((dist.name, dist.metadata['version']))
- else:
- self.assertFalse(dist.path.startswith(self.fake_dists_path))
-
- self.assertListEqual(sorted(fake_dists), sorted(found_dists))
-
- def test_get_distribution(self):
- # Test for looking up a distribution by name.
- # Test the lookup of the towel-stuff distribution
- name = 'towel-stuff' # Note: This is different from the directory name
-
- # Lookup the distribution
- dist = get_distribution(name)
- self.assertTrue(isinstance(dist, Distribution))
- self.assertEqual(dist.name, name)
-
- # Verify that an unknown distribution returns None
- self.assertEqual(None, get_distribution('bogus'))
-
- # Verify partial name matching doesn't work
- self.assertEqual(None, get_distribution('towel'))
-
- # Verify that it does not find egg-info distributions, when not
- # instructed to
- self.assertEqual(None, get_distribution('bacon'))
- self.assertEqual(None, get_distribution('cheese'))
- self.assertEqual(None, get_distribution('strawberry'))
- self.assertEqual(None, get_distribution('banana'))
-
- # Now check that it works well in both situations, when egg-info
- # is a file and directory respectively.
- dist = get_distribution('cheese', use_egg_info=True)
- self.assertTrue(isinstance(dist, EggInfoDistribution))
- self.assertEqual(dist.name, 'cheese')
-
- dist = get_distribution('bacon', use_egg_info=True)
- self.assertTrue(isinstance(dist, EggInfoDistribution))
- self.assertEqual(dist.name, 'bacon')
-
- dist = get_distribution('banana', use_egg_info=True)
- self.assertTrue(isinstance(dist, EggInfoDistribution))
- self.assertEqual(dist.name, 'banana')
-
- dist = get_distribution('strawberry', use_egg_info=True)
- self.assertTrue(isinstance(dist, EggInfoDistribution))
- self.assertEqual(dist.name, 'strawberry')
-
- def test_get_file_users(self):
- # Test the iteration of distributions that use a file.
- name = 'towel_stuff-0.1'
- path = os.path.join(self.fake_dists_path, name,
- 'towel_stuff', '__init__.py')
- for dist in get_file_users(path):
- self.assertTrue(isinstance(dist, Distribution))
- self.assertEqual(dist.name, name)
-
- def test_provides(self):
- # Test for looking up distributions by what they provide
- checkLists = lambda x, y: self.assertListEqual(sorted(x), sorted(y))
-
- l = [dist.name for dist in provides_distribution('truffles')]
- checkLists(l, ['choxie', 'towel-stuff'])
-
- l = [dist.name for dist in provides_distribution('truffles', '1.0')]
- checkLists(l, ['choxie'])
-
- l = [dist.name for dist in provides_distribution('truffles', '1.0',
- use_egg_info=True)]
- checkLists(l, ['choxie', 'cheese'])
-
- l = [dist.name for dist in provides_distribution('truffles', '1.1.2')]
- checkLists(l, ['towel-stuff'])
-
- l = [dist.name for dist in provides_distribution('truffles', '1.1')]
- checkLists(l, ['towel-stuff'])
-
- l = [dist.name for dist in provides_distribution('truffles', \
- '!=1.1,<=2.0')]
- checkLists(l, ['choxie'])
-
- l = [dist.name for dist in provides_distribution('truffles', \
- '!=1.1,<=2.0',
- use_egg_info=True)]
- checkLists(l, ['choxie', 'bacon', 'cheese'])
-
- l = [dist.name for dist in provides_distribution('truffles', '>1.0')]
- checkLists(l, ['towel-stuff'])
-
- l = [dist.name for dist in provides_distribution('truffles', '>1.5')]
- checkLists(l, [])
-
- l = [dist.name for dist in provides_distribution('truffles', '>1.5',
- use_egg_info=True)]
- checkLists(l, ['bacon'])
-
- l = [dist.name for dist in provides_distribution('truffles', '>=1.0')]
- checkLists(l, ['choxie', 'towel-stuff'])
-
- l = [dist.name for dist in provides_distribution('strawberry', '0.6',
- use_egg_info=True)]
- checkLists(l, ['coconuts-aster'])
-
- l = [dist.name for dist in provides_distribution('strawberry', '>=0.5',
- use_egg_info=True)]
- checkLists(l, ['coconuts-aster'])
-
- l = [dist.name for dist in provides_distribution('strawberry', '>0.6',
- use_egg_info=True)]
- checkLists(l, [])
-
- l = [dist.name for dist in provides_distribution('banana', '0.4',
- use_egg_info=True)]
- checkLists(l, ['coconuts-aster'])
-
- l = [dist.name for dist in provides_distribution('banana', '>=0.3',
- use_egg_info=True)]
- checkLists(l, ['coconuts-aster'])
-
- l = [dist.name for dist in provides_distribution('banana', '!=0.4',
- use_egg_info=True)]
- checkLists(l, [])
-
- def test_obsoletes(self):
- # Test looking for distributions based on what they obsolete
- checkLists = lambda x, y: self.assertListEqual(sorted(x), sorted(y))
-
- l = [dist.name for dist in obsoletes_distribution('truffles', '1.0')]
- checkLists(l, [])
-
- l = [dist.name for dist in obsoletes_distribution('truffles', '1.0',
- use_egg_info=True)]
- checkLists(l, ['cheese', 'bacon'])
-
- l = [dist.name for dist in obsoletes_distribution('truffles', '0.8')]
- checkLists(l, ['choxie'])
-
- l = [dist.name for dist in obsoletes_distribution('truffles', '0.8',
- use_egg_info=True)]
- checkLists(l, ['choxie', 'cheese'])
-
- l = [dist.name for dist in obsoletes_distribution('truffles', '0.9.6')]
- checkLists(l, ['choxie', 'towel-stuff'])
-
- l = [dist.name for dist in obsoletes_distribution('truffles', \
- '0.5.2.3')]
- checkLists(l, ['choxie', 'towel-stuff'])
-
- l = [dist.name for dist in obsoletes_distribution('truffles', '0.2')]
- checkLists(l, ['towel-stuff'])
-
- def test_yield_distribution(self):
- # tests the internal function _yield_distributions
- checkLists = lambda x, y: self.assertListEqual(sorted(x), sorted(y))
-
- eggs = [('bacon', '0.1'), ('banana', '0.4'), ('strawberry', '0.6'),
- ('truffles', '5.0'), ('cheese', '2.0.2'),
- ('coconuts-aster', '10.3'), ('nut', 'funkyversion')]
- dists = [('choxie', '2.0.0.9'), ('grammar', '1.0a4'),
- ('towel-stuff', '0.1'), ('babar', '0.1')]
-
- checkLists([], _yield_distributions(False, False))
-
- found = [(dist.name, dist.metadata['Version'])
- for dist in _yield_distributions(False, True)
- if dist.path.startswith(self.fake_dists_path)]
- checkLists(eggs, found)
-
- found = [(dist.name, dist.metadata['Version'])
- for dist in _yield_distributions(True, False)
- if dist.path.startswith(self.fake_dists_path)]
- checkLists(dists, found)
-
- found = [(dist.name, dist.metadata['Version'])
- for dist in _yield_distributions(True, True)
- if dist.path.startswith(self.fake_dists_path)]
- checkLists(dists + eggs, found)
-
-
-def test_suite():
- suite = unittest.TestSuite()
- load = unittest.defaultTestLoader.loadTestsFromTestCase
- suite.addTest(load(TestPkgUtilData))
- suite.addTest(load(TestPkgUtilDistribution))
- suite.addTest(load(TestPkgUtilPEP302))
- suite.addTest(load(TestPkgUtilPEP376))
- return suite
-
-
-def test_main():
- run_unittest(test_suite())
-
-
-if __name__ == "__main__":
- test_main()
diff --git a/test_distutils2.py b/test_distutils2.py
deleted file mode 100644
--- a/test_distutils2.py
+++ /dev/null
@@ -1,5 +0,0 @@
-import sys
-from distutils2.tests.__main__ import test_main
-
-if __name__ == '__main__':
- test_main()
-- 
Repository URL: http://hg.python.org/distutils2


More information about the Python-checkins mailing list

AltStyle によって変換されたページ (->オリジナル) /