mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2025-01-31 00:24:13 +00:00
setup: upgrade bundled zetuptoolz to zetuptoolz-0.6c15dev and make it unpacked and directly loaded by setup.py
Also fix the relevant "make clean" target behavior.
This commit is contained in:
parent
7b3a9a3d0e
commit
ba8f0c27f0
5
Makefile
5
Makefile
@ -255,10 +255,11 @@ test-clean:
|
|||||||
|
|
||||||
clean:
|
clean:
|
||||||
rm -rf build _trial_temp _test_memory .checked-deps .built
|
rm -rf build _trial_temp _test_memory .checked-deps .built
|
||||||
rm -f `find src/allmydata -name '*.so' -or -name '*.pyc'`
|
rm -f `find src *.egg -name '*.so' -or -name '*.pyc'`
|
||||||
rm -rf src/allmydata_tahoe.egg-info
|
rm -rf src/allmydata_tahoe.egg-info
|
||||||
rm -rf support dist
|
rm -rf support dist
|
||||||
rm -rf *.egg *.pyc
|
rm -rf `ls -d *.egg | grep -v setuptools-`
|
||||||
|
rm -rf *.pyc
|
||||||
rm -rf misc/dependencies/build misc/dependencies/temp
|
rm -rf misc/dependencies/build misc/dependencies/temp
|
||||||
rm -rf misc/dependencies/tahoe_deps.egg-info
|
rm -rf misc/dependencies/tahoe_deps.egg-info
|
||||||
rm -f bin/tahoe bin/tahoe-script.py
|
rm -f bin/tahoe bin/tahoe-script.py
|
||||||
|
250
ez_setup.py
250
ez_setup.py
@ -1,250 +0,0 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
"""Bootstrap setuptools installation
|
|
||||||
|
|
||||||
If you want to use setuptools in your package's setup.py, just include this
|
|
||||||
file in the same directory with it, and add this to the top of your setup.py::
|
|
||||||
|
|
||||||
from ez_setup import use_setuptools
|
|
||||||
use_setuptools()
|
|
||||||
|
|
||||||
If you want to require a specific version of setuptools, set a download
|
|
||||||
mirror, or use an alternate download directory, you can do so by supplying
|
|
||||||
the appropriate options to ``use_setuptools()``.
|
|
||||||
|
|
||||||
This file can also be run as a script to install or upgrade setuptools.
|
|
||||||
"""
|
|
||||||
import os, sys
|
|
||||||
DEFAULT_VERSION = "0.6c12dev"
|
|
||||||
DEFAULT_DIR = "misc/dependencies/"
|
|
||||||
DEFAULT_URL = "file:"+DEFAULT_DIR
|
|
||||||
|
|
||||||
md5_data = {
|
|
||||||
'setuptools-0.6c12dev.egg': '770da1c9e5446cf04273f0f1cdb8c09a',
|
|
||||||
}
|
|
||||||
|
|
||||||
import sys, os
|
|
||||||
|
|
||||||
def _validate_md5(egg_name, data):
|
|
||||||
if egg_name in md5_data:
|
|
||||||
from md5 import md5
|
|
||||||
digest = md5(data).hexdigest()
|
|
||||||
if digest != md5_data[egg_name]:
|
|
||||||
print >>sys.stderr, (
|
|
||||||
"md5 validation of %s failed! (Possible download problem?)"
|
|
||||||
% egg_name
|
|
||||||
)
|
|
||||||
sys.exit(2)
|
|
||||||
return data
|
|
||||||
|
|
||||||
|
|
||||||
def use_setuptools(
|
|
||||||
version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir,
|
|
||||||
min_version="0.6c12dev", download_delay=15
|
|
||||||
):
|
|
||||||
"""Automatically find/download setuptools and make it available on sys.path
|
|
||||||
|
|
||||||
`version` should be a valid setuptools version number that is available as
|
|
||||||
an egg for download under the `download_base` URL (which should end with a
|
|
||||||
'/'). `to_dir` is the directory where setuptools will be downloaded, if it
|
|
||||||
is not already available. If `download_delay` is specified, it is the
|
|
||||||
number of seconds that will be paused before initiating a download, should
|
|
||||||
one be required. If an older version of setuptools is installed but hasn't
|
|
||||||
been imported yet, this routine will go ahead and install the required
|
|
||||||
version and then use it. If an older version of setuptools has already been
|
|
||||||
imported then we can't upgrade to the new one, so this routine will print a
|
|
||||||
message to ``sys.stderr`` and raise SystemExit in an attempt to abort the
|
|
||||||
calling script.
|
|
||||||
"""
|
|
||||||
if min_version is None:
|
|
||||||
min_version = version
|
|
||||||
|
|
||||||
was_imported = 'pkg_resources' in sys.modules or 'setuptools' in sys.modules
|
|
||||||
def do_download():
|
|
||||||
egg = download_setuptools(version, download_base, to_dir, download_delay)
|
|
||||||
sys.path.insert(0, egg)
|
|
||||||
import setuptools; setuptools.bootstrap_install_from = egg
|
|
||||||
try:
|
|
||||||
import pkg_resources
|
|
||||||
except ImportError:
|
|
||||||
return do_download()
|
|
||||||
try:
|
|
||||||
pkg_resources.require("setuptools>="+min_version); return
|
|
||||||
except pkg_resources.VersionConflict, e:
|
|
||||||
if was_imported:
|
|
||||||
print >>sys.stderr, (
|
|
||||||
"The required version of setuptools (>=%s) is not available, and\n"
|
|
||||||
"can't be installed while this script is running. Please install\n"
|
|
||||||
" a more recent version first, using 'easy_install -U setuptools'."
|
|
||||||
"\n\n(Currently using %r)"
|
|
||||||
) % (min_version, e.args[0])
|
|
||||||
sys.exit(2)
|
|
||||||
else:
|
|
||||||
del pkg_resources, sys.modules['pkg_resources'] # reload ok
|
|
||||||
return do_download()
|
|
||||||
except pkg_resources.DistributionNotFound:
|
|
||||||
return do_download()
|
|
||||||
|
|
||||||
def download_setuptools(
|
|
||||||
version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir,
|
|
||||||
delay = 15
|
|
||||||
):
|
|
||||||
"""Download setuptools from a specified location and return its filename
|
|
||||||
|
|
||||||
`version` should be a valid setuptools version number that is available
|
|
||||||
as an egg for download under the `download_base` URL (which should end
|
|
||||||
with a '/'). `to_dir` is the directory where the egg will be downloaded.
|
|
||||||
`delay` is the number of seconds to pause before an actual download attempt.
|
|
||||||
"""
|
|
||||||
import urllib2, shutil
|
|
||||||
egg_name = "setuptools-%s.egg" % (version,)
|
|
||||||
url = download_base + egg_name
|
|
||||||
saveto = os.path.join(to_dir, egg_name)
|
|
||||||
src = dst = None
|
|
||||||
if not os.path.exists(saveto): # Avoid repeated downloads
|
|
||||||
try:
|
|
||||||
from distutils import log
|
|
||||||
if delay:
|
|
||||||
log.warn("""
|
|
||||||
---------------------------------------------------------------------------
|
|
||||||
This script requires setuptools version %s to run (even to display
|
|
||||||
help). I will attempt to download it for you (from
|
|
||||||
%s), but
|
|
||||||
you may need to enable firewall access for this script first.
|
|
||||||
I will start the download in %d seconds.
|
|
||||||
|
|
||||||
(Note: if this machine does not have network access, please obtain the file
|
|
||||||
|
|
||||||
%s
|
|
||||||
|
|
||||||
and place it in this directory before rerunning this script.)
|
|
||||||
---------------------------------------------------------------------------""",
|
|
||||||
version, download_base, delay, url
|
|
||||||
); from time import sleep; sleep(delay)
|
|
||||||
log.warn("Downloading %s", url)
|
|
||||||
src = urllib2.urlopen(url)
|
|
||||||
# Read/write all in one block, so we don't create a corrupt file
|
|
||||||
# if the download is interrupted.
|
|
||||||
data = _validate_md5(egg_name, src.read())
|
|
||||||
dst = open(saveto,"wb"); dst.write(data)
|
|
||||||
finally:
|
|
||||||
if src: src.close()
|
|
||||||
if dst: dst.close()
|
|
||||||
return os.path.realpath(saveto)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def main(argv, version=DEFAULT_VERSION):
|
|
||||||
"""Install or upgrade setuptools and EasyInstall"""
|
|
||||||
try:
|
|
||||||
import setuptools
|
|
||||||
except ImportError:
|
|
||||||
egg = None
|
|
||||||
try:
|
|
||||||
egg = download_setuptools(version, delay=0)
|
|
||||||
sys.path.insert(0,egg)
|
|
||||||
from setuptools.command.easy_install import main
|
|
||||||
return main(list(argv)+[egg]) # we're done here
|
|
||||||
finally:
|
|
||||||
if egg and os.path.exists(egg):
|
|
||||||
os.unlink(egg)
|
|
||||||
else:
|
|
||||||
if setuptools.__version__ == '0.0.1':
|
|
||||||
print >>sys.stderr, (
|
|
||||||
"You have an obsolete version of setuptools installed. Please\n"
|
|
||||||
"remove it from your system entirely before rerunning this script."
|
|
||||||
)
|
|
||||||
sys.exit(2)
|
|
||||||
|
|
||||||
req = "setuptools>="+version
|
|
||||||
import pkg_resources
|
|
||||||
try:
|
|
||||||
pkg_resources.require(req)
|
|
||||||
except pkg_resources.VersionConflict:
|
|
||||||
try:
|
|
||||||
from setuptools.command.easy_install import main
|
|
||||||
except ImportError:
|
|
||||||
from easy_install import main
|
|
||||||
main(list(argv)+[download_setuptools(delay=0)])
|
|
||||||
sys.exit(0) # try to force an exit
|
|
||||||
else:
|
|
||||||
if argv:
|
|
||||||
from setuptools.command.easy_install import main
|
|
||||||
main(argv)
|
|
||||||
else:
|
|
||||||
print "Setuptools version",version,"or greater has been installed."
|
|
||||||
print '(Run "ez_setup.py -U setuptools" to reinstall or upgrade.)'
|
|
||||||
|
|
||||||
def update_md5(filenames):
|
|
||||||
"""Update our built-in md5 registry"""
|
|
||||||
|
|
||||||
import re
|
|
||||||
from md5 import md5
|
|
||||||
|
|
||||||
for name in filenames:
|
|
||||||
base = os.path.basename(name)
|
|
||||||
f = open(name,'rb')
|
|
||||||
md5_data[base] = md5(f.read()).hexdigest()
|
|
||||||
f.close()
|
|
||||||
|
|
||||||
data = [" %r: %r,\n" % it for it in md5_data.items()]
|
|
||||||
data.sort()
|
|
||||||
repl = "".join(data)
|
|
||||||
|
|
||||||
import inspect
|
|
||||||
srcfile = inspect.getsourcefile(sys.modules[__name__])
|
|
||||||
f = open(srcfile, 'rb'); src = f.read(); f.close()
|
|
||||||
|
|
||||||
match = re.search("\nmd5_data = {\n([^}]+)}", src)
|
|
||||||
if not match:
|
|
||||||
print >>sys.stderr, "Internal error!"
|
|
||||||
sys.exit(2)
|
|
||||||
|
|
||||||
src = src[:match.start(1)] + repl + src[match.end(1):]
|
|
||||||
f = open(srcfile,'w')
|
|
||||||
f.write(src)
|
|
||||||
f.close()
|
|
||||||
|
|
||||||
|
|
||||||
if __name__=='__main__':
|
|
||||||
if len(sys.argv)>2 and sys.argv[1]=='--md5update':
|
|
||||||
update_md5(sys.argv[2:])
|
|
||||||
else:
|
|
||||||
main(sys.argv[1:])
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
19
setup.py
19
setup.py
@ -9,7 +9,7 @@
|
|||||||
#
|
#
|
||||||
# See the docs/about.html file for licensing information.
|
# See the docs/about.html file for licensing information.
|
||||||
|
|
||||||
import os, shutil, stat, subprocess, sys, zipfile, re
|
import glob, os, shutil, stat, subprocess, sys, zipfile, re
|
||||||
|
|
||||||
##### sys.path management
|
##### sys.path management
|
||||||
|
|
||||||
@ -38,23 +38,13 @@ def read_version_py(infname):
|
|||||||
|
|
||||||
version = read_version_py("src/allmydata/_version.py")
|
version = read_version_py("src/allmydata/_version.py")
|
||||||
|
|
||||||
try:
|
egg = os.path.realpath(glob.glob('setuptools-*.egg')[0])
|
||||||
from ez_setup import use_setuptools
|
sys.path.insert(0, egg)
|
||||||
except ImportError:
|
import setuptools; setuptools.bootstrap_install_from = egg
|
||||||
pass
|
|
||||||
else:
|
|
||||||
# This invokes our own customized version of ez_setup.py to make sure
|
|
||||||
# that setuptools v0.6c12dev (which is our own toothpick of setuptools)
|
|
||||||
# is used to build. Note that we can use any version of setuptools >=
|
|
||||||
# 0.6c6 to *run* -- see _auto_deps.py for run-time dependencies (a.k.a.
|
|
||||||
# "install_requires") -- this is only for build-time dependencies (a.k.a.
|
|
||||||
# "setup_requires").
|
|
||||||
use_setuptools(download_delay=0, min_version="0.6c12dev")
|
|
||||||
|
|
||||||
from setuptools import find_packages, setup
|
from setuptools import find_packages, setup
|
||||||
from setuptools.command import sdist
|
from setuptools.command import sdist
|
||||||
from setuptools import Command
|
from setuptools import Command
|
||||||
from pkg_resources import require
|
|
||||||
|
|
||||||
# Make the dependency-version-requirement, which is used by the Makefile at
|
# Make the dependency-version-requirement, which is used by the Makefile at
|
||||||
# build-time, also available to the app at runtime:
|
# build-time, also available to the app at runtime:
|
||||||
@ -255,6 +245,7 @@ class MakeExecutable(Command):
|
|||||||
f.write(line)
|
f.write(line)
|
||||||
f.close()
|
f.close()
|
||||||
if sys.platform == "win32":
|
if sys.platform == "win32":
|
||||||
|
from pkg_resources import require
|
||||||
setuptools_egg = require("setuptools")[0].location
|
setuptools_egg = require("setuptools")[0].location
|
||||||
if os.path.isfile(setuptools_egg):
|
if os.path.isfile(setuptools_egg):
|
||||||
z = zipfile.ZipFile(setuptools_egg, 'r')
|
z = zipfile.ZipFile(setuptools_egg, 'r')
|
||||||
|
183
setuptools-0.6c15dev.egg/EGG-INFO/PKG-INFO
Normal file
183
setuptools-0.6c15dev.egg/EGG-INFO/PKG-INFO
Normal file
@ -0,0 +1,183 @@
|
|||||||
|
Metadata-Version: 1.0
|
||||||
|
Name: setuptools
|
||||||
|
Version: 0.6c15dev
|
||||||
|
Summary: Download, build, install, upgrade, and uninstall Python packages -- easily!
|
||||||
|
Home-page: http://pypi.python.org/pypi/setuptools
|
||||||
|
Author: Phillip J. Eby
|
||||||
|
Author-email: distutils-sig@python.org
|
||||||
|
License: PSF or ZPL
|
||||||
|
Description: ===============================
|
||||||
|
Installing and Using Setuptools
|
||||||
|
===============================
|
||||||
|
|
||||||
|
.. contents:: **Table of Contents**
|
||||||
|
|
||||||
|
|
||||||
|
-------------------------
|
||||||
|
Installation Instructions
|
||||||
|
-------------------------
|
||||||
|
|
||||||
|
Windows
|
||||||
|
=======
|
||||||
|
|
||||||
|
Install setuptools using the provided ``.exe`` installer. If you've previously
|
||||||
|
installed older versions of setuptools, please delete all ``setuptools*.egg``
|
||||||
|
and ``setuptools.pth`` files from your system's ``site-packages`` directory
|
||||||
|
(and any other ``sys.path`` directories) FIRST.
|
||||||
|
|
||||||
|
If you are upgrading a previous version of setuptools that was installed using
|
||||||
|
an ``.exe`` installer, please be sure to also *uninstall that older version*
|
||||||
|
via your system's "Add/Remove Programs" feature, BEFORE installing the newer
|
||||||
|
version.
|
||||||
|
|
||||||
|
Once installation is complete, you will find an ``easy_install.exe`` program in
|
||||||
|
your Python ``Scripts`` subdirectory. Be sure to add this directory to your
|
||||||
|
``PATH`` environment variable, if you haven't already done so.
|
||||||
|
|
||||||
|
|
||||||
|
RPM-Based Systems
|
||||||
|
=================
|
||||||
|
|
||||||
|
Install setuptools using the provided source RPM. The included ``.spec`` file
|
||||||
|
assumes you are installing using the default ``python`` executable, and is not
|
||||||
|
specific to a particular Python version. The ``easy_install`` executable will
|
||||||
|
be installed to a system ``bin`` directory such as ``/usr/bin``.
|
||||||
|
|
||||||
|
If you wish to install to a location other than the default Python
|
||||||
|
installation's default ``site-packages`` directory (and ``$prefix/bin`` for
|
||||||
|
scripts), please use the ``.egg``-based installation approach described in the
|
||||||
|
following section.
|
||||||
|
|
||||||
|
|
||||||
|
Cygwin, Mac OS X, Linux, Other
|
||||||
|
==============================
|
||||||
|
|
||||||
|
1. Download the appropriate egg for your version of Python (e.g.
|
||||||
|
``setuptools-0.6c9-py2.4.egg``). Do NOT rename it.
|
||||||
|
|
||||||
|
2. Run it as if it were a shell script, e.g. ``sh setuptools-0.6c9-py2.4.egg``.
|
||||||
|
Setuptools will install itself using the matching version of Python (e.g.
|
||||||
|
``python2.4``), and will place the ``easy_install`` executable in the
|
||||||
|
default location for installing Python scripts (as determined by the
|
||||||
|
standard distutils configuration files, or by the Python installation).
|
||||||
|
|
||||||
|
If you want to install setuptools to somewhere other than ``site-packages`` or
|
||||||
|
your default distutils installation locations for libraries and scripts, you
|
||||||
|
may include EasyInstall command-line options such as ``--prefix``,
|
||||||
|
``--install-dir``, and so on, following the ``.egg`` filename on the same
|
||||||
|
command line. For example::
|
||||||
|
|
||||||
|
sh setuptools-0.6c9-py2.4.egg --prefix=~
|
||||||
|
|
||||||
|
You can use ``--help`` to get a full options list, but we recommend consulting
|
||||||
|
the `EasyInstall manual`_ for detailed instructions, especially `the section
|
||||||
|
on custom installation locations`_.
|
||||||
|
|
||||||
|
.. _EasyInstall manual: http://peak.telecommunity.com/DevCenter/EasyInstall
|
||||||
|
.. _the section on custom installation locations: http://peak.telecommunity.com/DevCenter/EasyInstall#custom-installation-locations
|
||||||
|
|
||||||
|
|
||||||
|
Cygwin Note
|
||||||
|
-----------
|
||||||
|
|
||||||
|
If you are trying to install setuptools for the **Windows** version of Python
|
||||||
|
(as opposed to the Cygwin version that lives in ``/usr/bin``), you must make
|
||||||
|
sure that an appropriate executable (``python2.3``, ``python2.4``, or
|
||||||
|
``python2.5``) is on your **Cygwin** ``PATH`` when invoking the egg. For
|
||||||
|
example, doing the following at a Cygwin bash prompt will install setuptools
|
||||||
|
for the **Windows** Python found at ``C:\\Python24``::
|
||||||
|
|
||||||
|
ln -s /cygdrive/c/Python24/python.exe python2.4
|
||||||
|
PATH=.:$PATH sh setuptools-0.6c9-py2.4.egg
|
||||||
|
rm python2.4
|
||||||
|
|
||||||
|
|
||||||
|
Downloads
|
||||||
|
=========
|
||||||
|
|
||||||
|
All setuptools downloads can be found at `the project's home page in the Python
|
||||||
|
Package Index`_. Scroll to the very bottom of the page to find the links.
|
||||||
|
|
||||||
|
.. _the project's home page in the Python Package Index: http://pypi.python.org/pypi/setuptools#files
|
||||||
|
|
||||||
|
In addition to the PyPI downloads, the development version of ``setuptools``
|
||||||
|
is available from the `Python SVN sandbox`_, and in-development versions of the
|
||||||
|
`0.6 branch`_ are available as well.
|
||||||
|
|
||||||
|
.. _0.6 branch: http://svn.python.org/projects/sandbox/branches/setuptools-0.6/#egg=setuptools-dev06
|
||||||
|
|
||||||
|
.. _Python SVN sandbox: http://svn.python.org/projects/sandbox/trunk/setuptools/#egg=setuptools-dev
|
||||||
|
|
||||||
|
--------------------------------
|
||||||
|
Using Setuptools and EasyInstall
|
||||||
|
--------------------------------
|
||||||
|
|
||||||
|
Here are some of the available manuals, tutorials, and other resources for
|
||||||
|
learning about Setuptools, Python Eggs, and EasyInstall:
|
||||||
|
|
||||||
|
* `The EasyInstall user's guide and reference manual`_
|
||||||
|
* `The setuptools Developer's Guide`_
|
||||||
|
* `The pkg_resources API reference`_
|
||||||
|
* `Package Compatibility Notes`_ (user-maintained)
|
||||||
|
* `The Internal Structure of Python Eggs`_
|
||||||
|
|
||||||
|
Questions, comments, and bug reports should be directed to the `distutils-sig
|
||||||
|
mailing list`_. If you have written (or know of) any tutorials, documentation,
|
||||||
|
plug-ins, or other resources for setuptools users, please let us know about
|
||||||
|
them there, so this reference list can be updated. If you have working,
|
||||||
|
*tested* patches to correct problems or add features, you may submit them to
|
||||||
|
the `setuptools bug tracker`_.
|
||||||
|
|
||||||
|
.. _setuptools bug tracker: http://bugs.python.org/setuptools/
|
||||||
|
.. _Package Compatibility Notes: http://peak.telecommunity.com/DevCenter/PackageNotes
|
||||||
|
.. _The Internal Structure of Python Eggs: http://peak.telecommunity.com/DevCenter/EggFormats
|
||||||
|
.. _The setuptools Developer's Guide: http://peak.telecommunity.com/DevCenter/setuptools
|
||||||
|
.. _The pkg_resources API reference: http://peak.telecommunity.com/DevCenter/PkgResources
|
||||||
|
.. _The EasyInstall user's guide and reference manual: http://peak.telecommunity.com/DevCenter/EasyInstall
|
||||||
|
.. _distutils-sig mailing list: http://mail.python.org/pipermail/distutils-sig/
|
||||||
|
|
||||||
|
|
||||||
|
-------
|
||||||
|
Credits
|
||||||
|
-------
|
||||||
|
|
||||||
|
* The original design for the ``.egg`` format and the ``pkg_resources`` API was
|
||||||
|
co-created by Phillip Eby and Bob Ippolito. Bob also implemented the first
|
||||||
|
version of ``pkg_resources``, and supplied the OS X operating system version
|
||||||
|
compatibility algorithm.
|
||||||
|
|
||||||
|
* Ian Bicking implemented many early "creature comfort" features of
|
||||||
|
easy_install, including support for downloading via Sourceforge and
|
||||||
|
Subversion repositories. Ian's comments on the Web-SIG about WSGI
|
||||||
|
application deployment also inspired the concept of "entry points" in eggs,
|
||||||
|
and he has given talks at PyCon and elsewhere to inform and educate the
|
||||||
|
community about eggs and setuptools.
|
||||||
|
|
||||||
|
* Jim Fulton contributed time and effort to build automated tests of various
|
||||||
|
aspects of ``easy_install``, and supplied the doctests for the command-line
|
||||||
|
``.exe`` wrappers on Windows.
|
||||||
|
|
||||||
|
* Phillip J. Eby is the principal author and maintainer of setuptools, and
|
||||||
|
first proposed the idea of an importable binary distribution format for
|
||||||
|
Python application plug-ins.
|
||||||
|
|
||||||
|
* Significant parts of the implementation of setuptools were funded by the Open
|
||||||
|
Source Applications Foundation, to provide a plug-in infrastructure for the
|
||||||
|
Chandler PIM application. In addition, many OSAF staffers (such as Mike
|
||||||
|
"Code Bear" Taylor) contributed their time and stress as guinea pigs for the
|
||||||
|
use of eggs and setuptools, even before eggs were "cool". (Thanks, guys!)
|
||||||
|
|
||||||
|
.. _files:
|
||||||
|
|
||||||
|
Keywords: CPAN PyPI distutils eggs package management
|
||||||
|
Platform: UNKNOWN
|
||||||
|
Classifier: Development Status :: 3 - Alpha
|
||||||
|
Classifier: Intended Audience :: Developers
|
||||||
|
Classifier: License :: OSI Approved :: Python Software Foundation License
|
||||||
|
Classifier: License :: OSI Approved :: Zope Public License
|
||||||
|
Classifier: Operating System :: OS Independent
|
||||||
|
Classifier: Programming Language :: Python
|
||||||
|
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
||||||
|
Classifier: Topic :: System :: Archiving :: Packaging
|
||||||
|
Classifier: Topic :: System :: Systems Administration
|
||||||
|
Classifier: Topic :: Utilities
|
43
setuptools-0.6c15dev.egg/EGG-INFO/SOURCES.txt
Normal file
43
setuptools-0.6c15dev.egg/EGG-INFO/SOURCES.txt
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
README.txt
|
||||||
|
easy_install.py
|
||||||
|
pkg_resources.py
|
||||||
|
setup.cfg
|
||||||
|
setup.py
|
||||||
|
setuptools/__init__.py
|
||||||
|
setuptools/archive_util.py
|
||||||
|
setuptools/depends.py
|
||||||
|
setuptools/dist.py
|
||||||
|
setuptools/extension.py
|
||||||
|
setuptools/package_index.py
|
||||||
|
setuptools/sandbox.py
|
||||||
|
setuptools/site-patch.py
|
||||||
|
setuptools.egg-info/PKG-INFO
|
||||||
|
setuptools.egg-info/SOURCES.txt
|
||||||
|
setuptools.egg-info/dependency_links.txt
|
||||||
|
setuptools.egg-info/entry_points.txt
|
||||||
|
setuptools.egg-info/top_level.txt
|
||||||
|
setuptools.egg-info/zip-safe
|
||||||
|
setuptools/command/__init__.py
|
||||||
|
setuptools/command/alias.py
|
||||||
|
setuptools/command/bdist_egg.py
|
||||||
|
setuptools/command/bdist_rpm.py
|
||||||
|
setuptools/command/bdist_wininst.py
|
||||||
|
setuptools/command/build_ext.py
|
||||||
|
setuptools/command/build_py.py
|
||||||
|
setuptools/command/develop.py
|
||||||
|
setuptools/command/easy_install.py
|
||||||
|
setuptools/command/egg_info.py
|
||||||
|
setuptools/command/install.py
|
||||||
|
setuptools/command/install_egg_info.py
|
||||||
|
setuptools/command/install_lib.py
|
||||||
|
setuptools/command/install_scripts.py
|
||||||
|
setuptools/command/register.py
|
||||||
|
setuptools/command/rotate.py
|
||||||
|
setuptools/command/saveopts.py
|
||||||
|
setuptools/command/sdist.py
|
||||||
|
setuptools/command/setopt.py
|
||||||
|
setuptools/command/test.py
|
||||||
|
setuptools/command/upload.py
|
||||||
|
setuptools/tests/__init__.py
|
||||||
|
setuptools/tests/test_packageindex.py
|
||||||
|
setuptools/tests/test_resources.py
|
1
setuptools-0.6c15dev.egg/EGG-INFO/dependency_links.txt
Normal file
1
setuptools-0.6c15dev.egg/EGG-INFO/dependency_links.txt
Normal file
@ -0,0 +1 @@
|
|||||||
|
|
58
setuptools-0.6c15dev.egg/EGG-INFO/entry_points.txt
Normal file
58
setuptools-0.6c15dev.egg/EGG-INFO/entry_points.txt
Normal file
@ -0,0 +1,58 @@
|
|||||||
|
[distutils.commands]
|
||||||
|
bdist_rpm = setuptools.command.bdist_rpm:bdist_rpm
|
||||||
|
rotate = setuptools.command.rotate:rotate
|
||||||
|
develop = setuptools.command.develop:develop
|
||||||
|
setopt = setuptools.command.setopt:setopt
|
||||||
|
build_py = setuptools.command.build_py:build_py
|
||||||
|
saveopts = setuptools.command.saveopts:saveopts
|
||||||
|
egg_info = setuptools.command.egg_info:egg_info
|
||||||
|
register = setuptools.command.register:register
|
||||||
|
install_egg_info = setuptools.command.install_egg_info:install_egg_info
|
||||||
|
alias = setuptools.command.alias:alias
|
||||||
|
easy_install = setuptools.command.easy_install:easy_install
|
||||||
|
install_scripts = setuptools.command.install_scripts:install_scripts
|
||||||
|
bdist_wininst = setuptools.command.bdist_wininst:bdist_wininst
|
||||||
|
bdist_egg = setuptools.command.bdist_egg:bdist_egg
|
||||||
|
install = setuptools.command.install:install
|
||||||
|
test = setuptools.command.test:test
|
||||||
|
install_lib = setuptools.command.install_lib:install_lib
|
||||||
|
build_ext = setuptools.command.build_ext:build_ext
|
||||||
|
sdist = setuptools.command.sdist:sdist
|
||||||
|
|
||||||
|
[egg_info.writers]
|
||||||
|
dependency_links.txt = setuptools.command.egg_info:overwrite_arg
|
||||||
|
requires.txt = setuptools.command.egg_info:write_requirements
|
||||||
|
PKG-INFO = setuptools.command.egg_info:write_pkg_info
|
||||||
|
eager_resources.txt = setuptools.command.egg_info:overwrite_arg
|
||||||
|
top_level.txt = setuptools.command.egg_info:write_toplevel_names
|
||||||
|
namespace_packages.txt = setuptools.command.egg_info:overwrite_arg
|
||||||
|
entry_points.txt = setuptools.command.egg_info:write_entries
|
||||||
|
depends.txt = setuptools.command.egg_info:warn_depends_obsolete
|
||||||
|
|
||||||
|
[console_scripts]
|
||||||
|
easy_install = setuptools.command.easy_install:main
|
||||||
|
easy_install-2.5 = setuptools.command.easy_install:main
|
||||||
|
|
||||||
|
[setuptools.file_finders]
|
||||||
|
svn_cvs = setuptools.command.sdist:_default_revctrl
|
||||||
|
|
||||||
|
[distutils.setup_keywords]
|
||||||
|
dependency_links = setuptools.dist:assert_string_list
|
||||||
|
entry_points = setuptools.dist:check_entry_points
|
||||||
|
extras_require = setuptools.dist:check_extras
|
||||||
|
test_runner = setuptools.dist:check_importable
|
||||||
|
package_data = setuptools.dist:check_package_data
|
||||||
|
install_requires = setuptools.dist:check_requirements
|
||||||
|
include_package_data = setuptools.dist:assert_bool
|
||||||
|
exclude_package_data = setuptools.dist:check_package_data
|
||||||
|
namespace_packages = setuptools.dist:check_nsp
|
||||||
|
test_suite = setuptools.dist:check_test_suite
|
||||||
|
eager_resources = setuptools.dist:assert_string_list
|
||||||
|
zip_safe = setuptools.dist:assert_bool
|
||||||
|
test_loader = setuptools.dist:check_importable
|
||||||
|
packages = setuptools.dist:check_packages
|
||||||
|
tests_require = setuptools.dist:check_requirements
|
||||||
|
|
||||||
|
[setuptools.installation]
|
||||||
|
eggsecutable = setuptools.command.easy_install:bootstrap
|
||||||
|
|
3
setuptools-0.6c15dev.egg/EGG-INFO/top_level.txt
Normal file
3
setuptools-0.6c15dev.egg/EGG-INFO/top_level.txt
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
easy_install
|
||||||
|
pkg_resources
|
||||||
|
setuptools
|
1
setuptools-0.6c15dev.egg/EGG-INFO/zip-safe
Normal file
1
setuptools-0.6c15dev.egg/EGG-INFO/zip-safe
Normal file
@ -0,0 +1 @@
|
|||||||
|
|
5
setuptools-0.6c15dev.egg/easy_install.py
Normal file
5
setuptools-0.6c15dev.egg/easy_install.py
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
"""Run the EasyInstall command"""
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
from setuptools.command.easy_install import main
|
||||||
|
main()
|
2625
setuptools-0.6c15dev.egg/pkg_resources.py
Normal file
2625
setuptools-0.6c15dev.egg/pkg_resources.py
Normal file
File diff suppressed because it is too large
Load Diff
82
setuptools-0.6c15dev.egg/setuptools/__init__.py
Normal file
82
setuptools-0.6c15dev.egg/setuptools/__init__.py
Normal file
@ -0,0 +1,82 @@
|
|||||||
|
"""Extensions to the 'distutils' for large or complex distributions"""
|
||||||
|
from setuptools.extension import Extension, Library
|
||||||
|
from setuptools.dist import Distribution, Feature, _get_unpatched
|
||||||
|
import distutils.core, setuptools.command
|
||||||
|
from setuptools.depends import Require
|
||||||
|
from distutils.core import Command as _Command
|
||||||
|
from distutils.util import convert_path
|
||||||
|
import os.path
|
||||||
|
|
||||||
|
__version__ = '0.6c15'
|
||||||
|
__all__ = [
|
||||||
|
'setup', 'Distribution', 'Feature', 'Command', 'Extension', 'Require',
|
||||||
|
'find_packages'
|
||||||
|
]
|
||||||
|
|
||||||
|
bootstrap_install_from = None
|
||||||
|
|
||||||
|
def find_packages(where='.', exclude=()):
|
||||||
|
"""Return a list all Python packages found within directory 'where'
|
||||||
|
|
||||||
|
'where' should be supplied as a "cross-platform" (i.e. URL-style) path; it
|
||||||
|
will be converted to the appropriate local path syntax. 'exclude' is a
|
||||||
|
sequence of package names to exclude; '*' can be used as a wildcard in the
|
||||||
|
names, such that 'foo.*' will exclude all subpackages of 'foo' (but not
|
||||||
|
'foo' itself).
|
||||||
|
"""
|
||||||
|
out = []
|
||||||
|
stack=[(convert_path(where), '')]
|
||||||
|
while stack:
|
||||||
|
where,prefix = stack.pop(0)
|
||||||
|
for name in os.listdir(where):
|
||||||
|
fn = os.path.join(where,name)
|
||||||
|
if ('.' not in name and os.path.isdir(fn) and
|
||||||
|
os.path.isfile(os.path.join(fn,'__init__.py'))
|
||||||
|
):
|
||||||
|
out.append(prefix+name); stack.append((fn,prefix+name+'.'))
|
||||||
|
for pat in list(exclude)+['ez_setup']:
|
||||||
|
from fnmatch import fnmatchcase
|
||||||
|
out = [item for item in out if not fnmatchcase(item,pat)]
|
||||||
|
return out
|
||||||
|
|
||||||
|
setup = distutils.core.setup
|
||||||
|
|
||||||
|
_Command = _get_unpatched(_Command)
|
||||||
|
|
||||||
|
class Command(_Command):
|
||||||
|
__doc__ = _Command.__doc__
|
||||||
|
|
||||||
|
command_consumes_arguments = False
|
||||||
|
|
||||||
|
def __init__(self, dist, **kw):
|
||||||
|
# Add support for keyword arguments
|
||||||
|
_Command.__init__(self,dist)
|
||||||
|
for k,v in kw.items():
|
||||||
|
setattr(self,k,v)
|
||||||
|
|
||||||
|
def reinitialize_command(self, command, reinit_subcommands=0, **kw):
|
||||||
|
cmd = _Command.reinitialize_command(self, command, reinit_subcommands)
|
||||||
|
for k,v in kw.items():
|
||||||
|
setattr(cmd,k,v) # update command with keywords
|
||||||
|
return cmd
|
||||||
|
|
||||||
|
import distutils.core
|
||||||
|
distutils.core.Command = Command # we can't patch distutils.cmd, alas
|
||||||
|
|
||||||
|
def findall(dir = os.curdir):
|
||||||
|
"""Find all files under 'dir' and return the list of full filenames
|
||||||
|
(relative to 'dir').
|
||||||
|
"""
|
||||||
|
all_files = []
|
||||||
|
for base, dirs, files in os.walk(dir):
|
||||||
|
if base==os.curdir or base.startswith(os.curdir+os.sep):
|
||||||
|
base = base[2:]
|
||||||
|
if base:
|
||||||
|
files = [os.path.join(base, f) for f in files]
|
||||||
|
all_files.extend(filter(os.path.isfile, files))
|
||||||
|
return all_files
|
||||||
|
|
||||||
|
import distutils.filelist
|
||||||
|
distutils.filelist.findall = findall # fix findall bug in distutils.
|
||||||
|
|
||||||
|
|
205
setuptools-0.6c15dev.egg/setuptools/archive_util.py
Normal file
205
setuptools-0.6c15dev.egg/setuptools/archive_util.py
Normal file
@ -0,0 +1,205 @@
|
|||||||
|
"""Utilities for extracting common archive formats"""
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"unpack_archive", "unpack_zipfile", "unpack_tarfile", "default_filter",
|
||||||
|
"UnrecognizedFormat", "extraction_drivers", "unpack_directory",
|
||||||
|
]
|
||||||
|
|
||||||
|
import zipfile, tarfile, os, shutil
|
||||||
|
from pkg_resources import ensure_directory
|
||||||
|
from distutils.errors import DistutilsError
|
||||||
|
|
||||||
|
class UnrecognizedFormat(DistutilsError):
|
||||||
|
"""Couldn't recognize the archive type"""
|
||||||
|
|
||||||
|
def default_filter(src,dst):
|
||||||
|
"""The default progress/filter callback; returns True for all files"""
|
||||||
|
return dst
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def unpack_archive(filename, extract_dir, progress_filter=default_filter,
|
||||||
|
drivers=None
|
||||||
|
):
|
||||||
|
"""Unpack `filename` to `extract_dir`, or raise ``UnrecognizedFormat``
|
||||||
|
|
||||||
|
`progress_filter` is a function taking two arguments: a source path
|
||||||
|
internal to the archive ('/'-separated), and a filesystem path where it
|
||||||
|
will be extracted. The callback must return the desired extract path
|
||||||
|
(which may be the same as the one passed in), or else ``None`` to skip
|
||||||
|
that file or directory. The callback can thus be used to report on the
|
||||||
|
progress of the extraction, as well as to filter the items extracted or
|
||||||
|
alter their extraction paths.
|
||||||
|
|
||||||
|
`drivers`, if supplied, must be a non-empty sequence of functions with the
|
||||||
|
same signature as this function (minus the `drivers` argument), that raise
|
||||||
|
``UnrecognizedFormat`` if they do not support extracting the designated
|
||||||
|
archive type. The `drivers` are tried in sequence until one is found that
|
||||||
|
does not raise an error, or until all are exhausted (in which case
|
||||||
|
``UnrecognizedFormat`` is raised). If you do not supply a sequence of
|
||||||
|
drivers, the module's ``extraction_drivers`` constant will be used, which
|
||||||
|
means that ``unpack_zipfile`` and ``unpack_tarfile`` will be tried, in that
|
||||||
|
order.
|
||||||
|
"""
|
||||||
|
for driver in drivers or extraction_drivers:
|
||||||
|
try:
|
||||||
|
driver(filename, extract_dir, progress_filter)
|
||||||
|
except UnrecognizedFormat:
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
raise UnrecognizedFormat(
|
||||||
|
"Not a recognized archive type: %s" % filename
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def unpack_directory(filename, extract_dir, progress_filter=default_filter):
|
||||||
|
""""Unpack" a directory, using the same interface as for archives
|
||||||
|
|
||||||
|
Raises ``UnrecognizedFormat`` if `filename` is not a directory
|
||||||
|
"""
|
||||||
|
if not os.path.isdir(filename):
|
||||||
|
raise UnrecognizedFormat("%s is not a directory" % (filename,))
|
||||||
|
|
||||||
|
paths = {filename:('',extract_dir)}
|
||||||
|
for base, dirs, files in os.walk(filename):
|
||||||
|
src,dst = paths[base]
|
||||||
|
for d in dirs:
|
||||||
|
paths[os.path.join(base,d)] = src+d+'/', os.path.join(dst,d)
|
||||||
|
for f in files:
|
||||||
|
name = src+f
|
||||||
|
target = os.path.join(dst,f)
|
||||||
|
target = progress_filter(src+f, target)
|
||||||
|
if not target:
|
||||||
|
continue # skip non-files
|
||||||
|
ensure_directory(target)
|
||||||
|
f = os.path.join(base,f)
|
||||||
|
shutil.copyfile(f, target)
|
||||||
|
shutil.copystat(f, target)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def unpack_zipfile(filename, extract_dir, progress_filter=default_filter):
|
||||||
|
"""Unpack zip `filename` to `extract_dir`
|
||||||
|
|
||||||
|
Raises ``UnrecognizedFormat`` if `filename` is not a zipfile (as determined
|
||||||
|
by ``zipfile.is_zipfile()``). See ``unpack_archive()`` for an explanation
|
||||||
|
of the `progress_filter` argument.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if not zipfile.is_zipfile(filename):
|
||||||
|
raise UnrecognizedFormat("%s is not a zip file" % (filename,))
|
||||||
|
|
||||||
|
z = zipfile.ZipFile(filename)
|
||||||
|
try:
|
||||||
|
for info in z.infolist():
|
||||||
|
name = info.filename
|
||||||
|
|
||||||
|
# don't extract absolute paths or ones with .. in them
|
||||||
|
if name.startswith('/') or '..' in name:
|
||||||
|
continue
|
||||||
|
|
||||||
|
target = os.path.join(extract_dir, *name.split('/'))
|
||||||
|
target = progress_filter(name, target)
|
||||||
|
if not target:
|
||||||
|
continue
|
||||||
|
if name.endswith('/'):
|
||||||
|
# directory
|
||||||
|
ensure_directory(target)
|
||||||
|
else:
|
||||||
|
# file
|
||||||
|
ensure_directory(target)
|
||||||
|
data = z.read(info.filename)
|
||||||
|
f = open(target,'wb')
|
||||||
|
try:
|
||||||
|
f.write(data)
|
||||||
|
finally:
|
||||||
|
f.close()
|
||||||
|
del data
|
||||||
|
finally:
|
||||||
|
z.close()
|
||||||
|
|
||||||
|
|
||||||
|
def unpack_tarfile(filename, extract_dir, progress_filter=default_filter):
|
||||||
|
"""Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir`
|
||||||
|
|
||||||
|
Raises ``UnrecognizedFormat`` if `filename` is not a tarfile (as determined
|
||||||
|
by ``tarfile.open()``). See ``unpack_archive()`` for an explanation
|
||||||
|
of the `progress_filter` argument.
|
||||||
|
"""
|
||||||
|
|
||||||
|
try:
|
||||||
|
tarobj = tarfile.open(filename)
|
||||||
|
except tarfile.TarError:
|
||||||
|
raise UnrecognizedFormat(
|
||||||
|
"%s is not a compressed or uncompressed tar file" % (filename,)
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
tarobj.chown = lambda *args: None # don't do any chowning!
|
||||||
|
for member in tarobj:
|
||||||
|
if member.isfile() or member.isdir():
|
||||||
|
name = member.name
|
||||||
|
# don't extract absolute paths or ones with .. in them
|
||||||
|
if not name.startswith('/') and '..' not in name:
|
||||||
|
dst = os.path.join(extract_dir, *name.split('/'))
|
||||||
|
dst = progress_filter(name, dst)
|
||||||
|
if dst:
|
||||||
|
if dst.endswith(os.sep):
|
||||||
|
dst = dst[:-1]
|
||||||
|
try:
|
||||||
|
tarobj._extract_member(member,dst) # XXX Ugh
|
||||||
|
except tarfile.ExtractError:
|
||||||
|
pass # chown/chmod/mkfifo/mknode/makedev failed
|
||||||
|
return True
|
||||||
|
finally:
|
||||||
|
tarobj.close()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
extraction_drivers = unpack_directory, unpack_zipfile, unpack_tarfile
|
||||||
|
|
||||||
|
|
BIN
setuptools-0.6c15dev.egg/setuptools/cli.exe
Normal file
BIN
setuptools-0.6c15dev.egg/setuptools/cli.exe
Normal file
Binary file not shown.
20
setuptools-0.6c15dev.egg/setuptools/command/__init__.py
Normal file
20
setuptools-0.6c15dev.egg/setuptools/command/__init__.py
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
__all__ = [
|
||||||
|
'alias', 'bdist_egg', 'bdist_rpm', 'build_ext', 'build_py', 'develop',
|
||||||
|
'easy_install', 'egg_info', 'install', 'install_lib', 'rotate', 'saveopts',
|
||||||
|
'sdist', 'setopt', 'test', 'upload', 'install_egg_info', 'install_scripts',
|
||||||
|
'register', 'bdist_wininst',
|
||||||
|
]
|
||||||
|
|
||||||
|
import sys
|
||||||
|
if sys.version>='2.5':
|
||||||
|
# In Python 2.5 and above, distutils includes its own upload command
|
||||||
|
__all__.remove('upload')
|
||||||
|
|
||||||
|
|
||||||
|
from distutils.command.bdist import bdist
|
||||||
|
|
||||||
|
if 'egg' not in bdist.format_commands:
|
||||||
|
bdist.format_command['egg'] = ('bdist_egg', "Python .egg file")
|
||||||
|
bdist.format_commands.append('egg')
|
||||||
|
|
||||||
|
del bdist, sys
|
79
setuptools-0.6c15dev.egg/setuptools/command/alias.py
Normal file
79
setuptools-0.6c15dev.egg/setuptools/command/alias.py
Normal file
@ -0,0 +1,79 @@
|
|||||||
|
import distutils, os
|
||||||
|
from setuptools import Command
|
||||||
|
from distutils.util import convert_path
|
||||||
|
from distutils import log
|
||||||
|
from distutils.errors import *
|
||||||
|
from setuptools.command.setopt import edit_config, option_base, config_file
|
||||||
|
|
||||||
|
def shquote(arg):
|
||||||
|
"""Quote an argument for later parsing by shlex.split()"""
|
||||||
|
for c in '"', "'", "\\", "#":
|
||||||
|
if c in arg: return repr(arg)
|
||||||
|
if arg.split()!=[arg]:
|
||||||
|
return repr(arg)
|
||||||
|
return arg
|
||||||
|
|
||||||
|
|
||||||
|
class alias(option_base):
|
||||||
|
"""Define a shortcut that invokes one or more commands"""
|
||||||
|
|
||||||
|
description = "define a shortcut to invoke one or more commands"
|
||||||
|
command_consumes_arguments = True
|
||||||
|
|
||||||
|
user_options = [
|
||||||
|
('remove', 'r', 'remove (unset) the alias'),
|
||||||
|
] + option_base.user_options
|
||||||
|
|
||||||
|
boolean_options = option_base.boolean_options + ['remove']
|
||||||
|
|
||||||
|
def initialize_options(self):
|
||||||
|
option_base.initialize_options(self)
|
||||||
|
self.args = None
|
||||||
|
self.remove = None
|
||||||
|
|
||||||
|
def finalize_options(self):
|
||||||
|
option_base.finalize_options(self)
|
||||||
|
if self.remove and len(self.args)!=1:
|
||||||
|
raise DistutilsOptionError(
|
||||||
|
"Must specify exactly one argument (the alias name) when "
|
||||||
|
"using --remove"
|
||||||
|
)
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
aliases = self.distribution.get_option_dict('aliases')
|
||||||
|
|
||||||
|
if not self.args:
|
||||||
|
print "Command Aliases"
|
||||||
|
print "---------------"
|
||||||
|
for alias in aliases:
|
||||||
|
print "setup.py alias", format_alias(alias, aliases)
|
||||||
|
return
|
||||||
|
|
||||||
|
elif len(self.args)==1:
|
||||||
|
alias, = self.args
|
||||||
|
if self.remove:
|
||||||
|
command = None
|
||||||
|
elif alias in aliases:
|
||||||
|
print "setup.py alias", format_alias(alias, aliases)
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
print "No alias definition found for %r" % alias
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
alias = self.args[0]
|
||||||
|
command = ' '.join(map(shquote,self.args[1:]))
|
||||||
|
|
||||||
|
edit_config(self.filename, {'aliases': {alias:command}}, self.dry_run)
|
||||||
|
|
||||||
|
|
||||||
|
def format_alias(name, aliases):
|
||||||
|
source, command = aliases[name]
|
||||||
|
if source == config_file('global'):
|
||||||
|
source = '--global-config '
|
||||||
|
elif source == config_file('user'):
|
||||||
|
source = '--user-config '
|
||||||
|
elif source == config_file('local'):
|
||||||
|
source = ''
|
||||||
|
else:
|
||||||
|
source = '--filename=%r' % source
|
||||||
|
return source+name+' '+command
|
533
setuptools-0.6c15dev.egg/setuptools/command/bdist_egg.py
Normal file
533
setuptools-0.6c15dev.egg/setuptools/command/bdist_egg.py
Normal file
@ -0,0 +1,533 @@
|
|||||||
|
"""setuptools.command.bdist_egg
|
||||||
|
|
||||||
|
Build .egg distributions"""
|
||||||
|
|
||||||
|
# This module should be kept compatible with Python 2.3
|
||||||
|
import sys, os, marshal
|
||||||
|
from setuptools import Command
|
||||||
|
from distutils.dir_util import remove_tree, mkpath
|
||||||
|
from distutils.sysconfig import get_python_version, get_python_lib
|
||||||
|
from distutils import log
|
||||||
|
from distutils.errors import DistutilsSetupError
|
||||||
|
from pkg_resources import get_build_platform, Distribution, ensure_directory
|
||||||
|
from pkg_resources import EntryPoint
|
||||||
|
from types import CodeType
|
||||||
|
from setuptools.extension import Library
|
||||||
|
|
||||||
|
def strip_module(filename):
|
||||||
|
if '.' in filename:
|
||||||
|
filename = os.path.splitext(filename)[0]
|
||||||
|
if filename.endswith('module'):
|
||||||
|
filename = filename[:-6]
|
||||||
|
return filename
|
||||||
|
|
||||||
|
def write_stub(resource, pyfile):
|
||||||
|
f = open(pyfile,'w')
|
||||||
|
f.write('\n'.join([
|
||||||
|
"def __bootstrap__():",
|
||||||
|
" global __bootstrap__, __loader__, __file__",
|
||||||
|
" import sys, pkg_resources, imp",
|
||||||
|
" __file__ = pkg_resources.resource_filename(__name__,%r)"
|
||||||
|
% resource,
|
||||||
|
" __loader__ = None; del __bootstrap__, __loader__",
|
||||||
|
" imp.load_dynamic(__name__,__file__)",
|
||||||
|
"__bootstrap__()",
|
||||||
|
"" # terminal \n
|
||||||
|
]))
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
# stub __init__.py for packages distributed without one
|
||||||
|
NS_PKG_STUB = '__import__("pkg_resources").declare_namespace(__name__)'
|
||||||
|
|
||||||
|
class bdist_egg(Command):
|
||||||
|
|
||||||
|
description = "create an \"egg\" distribution"
|
||||||
|
|
||||||
|
user_options = [
|
||||||
|
('bdist-dir=', 'b',
|
||||||
|
"temporary directory for creating the distribution"),
|
||||||
|
('plat-name=', 'p',
|
||||||
|
"platform name to embed in generated filenames "
|
||||||
|
"(default: %s)" % get_build_platform()),
|
||||||
|
('exclude-source-files', None,
|
||||||
|
"remove all .py files from the generated egg"),
|
||||||
|
('keep-temp', 'k',
|
||||||
|
"keep the pseudo-installation tree around after " +
|
||||||
|
"creating the distribution archive"),
|
||||||
|
('dist-dir=', 'd',
|
||||||
|
"directory to put final built distributions in"),
|
||||||
|
('skip-build', None,
|
||||||
|
"skip rebuilding everything (for testing/debugging)"),
|
||||||
|
]
|
||||||
|
|
||||||
|
boolean_options = [
|
||||||
|
'keep-temp', 'skip-build', 'exclude-source-files'
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def initialize_options (self):
|
||||||
|
self.bdist_dir = None
|
||||||
|
self.plat_name = None
|
||||||
|
self.keep_temp = 0
|
||||||
|
self.dist_dir = None
|
||||||
|
self.skip_build = 0
|
||||||
|
self.egg_output = None
|
||||||
|
self.exclude_source_files = None
|
||||||
|
|
||||||
|
|
||||||
|
def finalize_options(self):
|
||||||
|
ei_cmd = self.ei_cmd = self.get_finalized_command("egg_info")
|
||||||
|
self.egg_info = ei_cmd.egg_info
|
||||||
|
|
||||||
|
if self.bdist_dir is None:
|
||||||
|
bdist_base = self.get_finalized_command('bdist').bdist_base
|
||||||
|
self.bdist_dir = os.path.join(bdist_base, 'egg')
|
||||||
|
|
||||||
|
if self.plat_name is None:
|
||||||
|
self.plat_name = get_build_platform()
|
||||||
|
|
||||||
|
self.set_undefined_options('bdist',('dist_dir', 'dist_dir'))
|
||||||
|
|
||||||
|
if self.egg_output is None:
|
||||||
|
|
||||||
|
# Compute filename of the output egg
|
||||||
|
basename = Distribution(
|
||||||
|
None, None, ei_cmd.egg_name, ei_cmd.egg_version,
|
||||||
|
get_python_version(),
|
||||||
|
self.distribution.has_ext_modules() and self.plat_name
|
||||||
|
).egg_name()
|
||||||
|
|
||||||
|
self.egg_output = os.path.join(self.dist_dir, basename+'.egg')
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def do_install_data(self):
|
||||||
|
# Hack for packages that install data to install's --install-lib
|
||||||
|
self.get_finalized_command('install').install_lib = self.bdist_dir
|
||||||
|
|
||||||
|
site_packages = os.path.normcase(os.path.realpath(get_python_lib()))
|
||||||
|
old, self.distribution.data_files = self.distribution.data_files,[]
|
||||||
|
|
||||||
|
for item in old:
|
||||||
|
if isinstance(item,tuple) and len(item)==2:
|
||||||
|
if os.path.isabs(item[0]):
|
||||||
|
realpath = os.path.realpath(item[0])
|
||||||
|
normalized = os.path.normcase(realpath)
|
||||||
|
if normalized==site_packages or normalized.startswith(
|
||||||
|
site_packages+os.sep
|
||||||
|
):
|
||||||
|
item = realpath[len(site_packages)+1:], item[1]
|
||||||
|
# XXX else: raise ???
|
||||||
|
self.distribution.data_files.append(item)
|
||||||
|
|
||||||
|
try:
|
||||||
|
log.info("installing package data to %s" % self.bdist_dir)
|
||||||
|
self.call_command('install_data', force=0, root=None)
|
||||||
|
finally:
|
||||||
|
self.distribution.data_files = old
|
||||||
|
|
||||||
|
|
||||||
|
def get_outputs(self):
|
||||||
|
return [self.egg_output]
|
||||||
|
|
||||||
|
|
||||||
|
def call_command(self,cmdname,**kw):
|
||||||
|
"""Invoke reinitialized command `cmdname` with keyword args"""
|
||||||
|
for dirname in INSTALL_DIRECTORY_ATTRS:
|
||||||
|
kw.setdefault(dirname,self.bdist_dir)
|
||||||
|
kw.setdefault('skip_build',self.skip_build)
|
||||||
|
kw.setdefault('dry_run', self.dry_run)
|
||||||
|
cmd = self.reinitialize_command(cmdname, **kw)
|
||||||
|
self.run_command(cmdname)
|
||||||
|
return cmd
|
||||||
|
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
# Generate metadata first
|
||||||
|
self.run_command("egg_info")
|
||||||
|
# We run install_lib before install_data, because some data hacks
|
||||||
|
# pull their data path from the install_lib command.
|
||||||
|
log.info("installing library code to %s" % self.bdist_dir)
|
||||||
|
instcmd = self.get_finalized_command('install')
|
||||||
|
old_root = instcmd.root; instcmd.root = None
|
||||||
|
if self.distribution.has_c_libraries() and not self.skip_build:
|
||||||
|
self.run_command('build_clib')
|
||||||
|
cmd = self.call_command('install_lib', warn_dir=0)
|
||||||
|
instcmd.root = old_root
|
||||||
|
|
||||||
|
all_outputs, ext_outputs = self.get_ext_outputs()
|
||||||
|
self.stubs = []
|
||||||
|
to_compile = []
|
||||||
|
for (p,ext_name) in enumerate(ext_outputs):
|
||||||
|
filename,ext = os.path.splitext(ext_name)
|
||||||
|
pyfile = os.path.join(self.bdist_dir, strip_module(filename)+'.py')
|
||||||
|
self.stubs.append(pyfile)
|
||||||
|
log.info("creating stub loader for %s" % ext_name)
|
||||||
|
if not self.dry_run:
|
||||||
|
write_stub(os.path.basename(ext_name), pyfile)
|
||||||
|
to_compile.append(pyfile)
|
||||||
|
ext_outputs[p] = ext_name.replace(os.sep,'/')
|
||||||
|
|
||||||
|
to_compile.extend(self.make_init_files())
|
||||||
|
if to_compile:
|
||||||
|
cmd.byte_compile(to_compile)
|
||||||
|
if self.distribution.data_files:
|
||||||
|
self.do_install_data()
|
||||||
|
|
||||||
|
# Make the EGG-INFO directory
|
||||||
|
archive_root = self.bdist_dir
|
||||||
|
egg_info = os.path.join(archive_root,'EGG-INFO')
|
||||||
|
self.mkpath(egg_info)
|
||||||
|
if self.distribution.scripts:
|
||||||
|
script_dir = os.path.join(egg_info, 'scripts')
|
||||||
|
log.info("installing scripts to %s" % script_dir)
|
||||||
|
self.call_command('install_scripts',install_dir=script_dir,no_ep=1)
|
||||||
|
|
||||||
|
self.copy_metadata_to(egg_info)
|
||||||
|
native_libs = os.path.join(egg_info, "native_libs.txt")
|
||||||
|
if all_outputs:
|
||||||
|
log.info("writing %s" % native_libs)
|
||||||
|
if not self.dry_run:
|
||||||
|
ensure_directory(native_libs)
|
||||||
|
libs_file = open(native_libs, 'wt')
|
||||||
|
libs_file.write('\n'.join(all_outputs))
|
||||||
|
libs_file.write('\n')
|
||||||
|
libs_file.close()
|
||||||
|
elif os.path.isfile(native_libs):
|
||||||
|
log.info("removing %s" % native_libs)
|
||||||
|
if not self.dry_run:
|
||||||
|
os.unlink(native_libs)
|
||||||
|
|
||||||
|
write_safety_flag(
|
||||||
|
os.path.join(archive_root,'EGG-INFO'), self.zip_safe()
|
||||||
|
)
|
||||||
|
|
||||||
|
if os.path.exists(os.path.join(self.egg_info,'depends.txt')):
|
||||||
|
log.warn(
|
||||||
|
"WARNING: 'depends.txt' will not be used by setuptools 0.6!\n"
|
||||||
|
"Use the install_requires/extras_require setup() args instead."
|
||||||
|
)
|
||||||
|
|
||||||
|
if self.exclude_source_files:
|
||||||
|
self.zap_pyfiles()
|
||||||
|
|
||||||
|
# Make the archive
|
||||||
|
make_zipfile(self.egg_output, archive_root, verbose=self.verbose,
|
||||||
|
dry_run=self.dry_run, mode=self.gen_header())
|
||||||
|
if not self.keep_temp:
|
||||||
|
remove_tree(self.bdist_dir, dry_run=self.dry_run)
|
||||||
|
|
||||||
|
# Add to 'Distribution.dist_files' so that the "upload" command works
|
||||||
|
getattr(self.distribution,'dist_files',[]).append(
|
||||||
|
('bdist_egg',get_python_version(),self.egg_output))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def zap_pyfiles(self):
|
||||||
|
log.info("Removing .py files from temporary directory")
|
||||||
|
for base,dirs,files in walk_egg(self.bdist_dir):
|
||||||
|
for name in files:
|
||||||
|
if name.endswith('.py'):
|
||||||
|
path = os.path.join(base,name)
|
||||||
|
log.debug("Deleting %s", path)
|
||||||
|
os.unlink(path)
|
||||||
|
|
||||||
|
def zip_safe(self):
|
||||||
|
safe = getattr(self.distribution,'zip_safe',None)
|
||||||
|
if safe is not None:
|
||||||
|
return safe
|
||||||
|
log.warn("zip_safe flag not set; analyzing archive contents...")
|
||||||
|
return analyze_egg(self.bdist_dir, self.stubs)
|
||||||
|
|
||||||
|
def make_init_files(self):
|
||||||
|
"""Create missing package __init__ files"""
|
||||||
|
init_files = []
|
||||||
|
for base,dirs,files in walk_egg(self.bdist_dir):
|
||||||
|
if base==self.bdist_dir:
|
||||||
|
# don't put an __init__ in the root
|
||||||
|
continue
|
||||||
|
for name in files:
|
||||||
|
if name.endswith('.py'):
|
||||||
|
if '__init__.py' not in files:
|
||||||
|
pkg = base[len(self.bdist_dir)+1:].replace(os.sep,'.')
|
||||||
|
if self.distribution.has_contents_for(pkg):
|
||||||
|
log.warn("Creating missing __init__.py for %s",pkg)
|
||||||
|
filename = os.path.join(base,'__init__.py')
|
||||||
|
if not self.dry_run:
|
||||||
|
f = open(filename,'w'); f.write(NS_PKG_STUB)
|
||||||
|
f.close()
|
||||||
|
init_files.append(filename)
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
# not a package, don't traverse to subdirectories
|
||||||
|
dirs[:] = []
|
||||||
|
|
||||||
|
return init_files
|
||||||
|
|
||||||
|
def gen_header(self):
|
||||||
|
epm = EntryPoint.parse_map(self.distribution.entry_points or '')
|
||||||
|
ep = epm.get('setuptools.installation',{}).get('eggsecutable')
|
||||||
|
if ep is None:
|
||||||
|
return 'w' # not an eggsecutable, do it the usual way.
|
||||||
|
|
||||||
|
if not ep.attrs or ep.extras:
|
||||||
|
raise DistutilsSetupError(
|
||||||
|
"eggsecutable entry point (%r) cannot have 'extras' "
|
||||||
|
"or refer to a module" % (ep,)
|
||||||
|
)
|
||||||
|
|
||||||
|
pyver = sys.version[:3]
|
||||||
|
pkg = ep.module_name
|
||||||
|
full = '.'.join(ep.attrs)
|
||||||
|
base = ep.attrs[0]
|
||||||
|
basename = os.path.basename(self.egg_output)
|
||||||
|
|
||||||
|
header = (
|
||||||
|
"#!/bin/sh\n"
|
||||||
|
'if [ `basename $0` = "%(basename)s" ]\n'
|
||||||
|
'then exec python%(pyver)s -c "'
|
||||||
|
"import sys, os; sys.path.insert(0, os.path.abspath('$0')); "
|
||||||
|
"from %(pkg)s import %(base)s; sys.exit(%(full)s())"
|
||||||
|
'" "$@"\n'
|
||||||
|
'else\n'
|
||||||
|
' echo $0 is not the correct name for this egg file.\n'
|
||||||
|
' echo Please rename it back to %(basename)s and try again.\n'
|
||||||
|
' exec false\n'
|
||||||
|
'fi\n'
|
||||||
|
|
||||||
|
) % locals()
|
||||||
|
|
||||||
|
if not self.dry_run:
|
||||||
|
mkpath(os.path.dirname(self.egg_output), dry_run=self.dry_run)
|
||||||
|
f = open(self.egg_output, 'w')
|
||||||
|
f.write(header)
|
||||||
|
f.close()
|
||||||
|
return 'a'
|
||||||
|
|
||||||
|
|
||||||
|
def copy_metadata_to(self, target_dir):
|
||||||
|
prefix = os.path.join(self.egg_info,'')
|
||||||
|
for path in self.ei_cmd.filelist.files:
|
||||||
|
if path.startswith(prefix):
|
||||||
|
target = os.path.join(target_dir, path[len(prefix):])
|
||||||
|
ensure_directory(target)
|
||||||
|
self.copy_file(path, target)
|
||||||
|
|
||||||
|
def get_ext_outputs(self):
|
||||||
|
"""Get a list of relative paths to C extensions in the output distro"""
|
||||||
|
|
||||||
|
all_outputs = []
|
||||||
|
ext_outputs = []
|
||||||
|
|
||||||
|
paths = {self.bdist_dir:''}
|
||||||
|
for base, dirs, files in os.walk(self.bdist_dir):
|
||||||
|
for filename in files:
|
||||||
|
if os.path.splitext(filename)[1].lower() in NATIVE_EXTENSIONS:
|
||||||
|
all_outputs.append(paths[base]+filename)
|
||||||
|
for filename in dirs:
|
||||||
|
paths[os.path.join(base,filename)] = paths[base]+filename+'/'
|
||||||
|
|
||||||
|
if self.distribution.has_ext_modules():
|
||||||
|
build_cmd = self.get_finalized_command('build_ext')
|
||||||
|
for ext in build_cmd.extensions:
|
||||||
|
if isinstance(ext,Library):
|
||||||
|
continue
|
||||||
|
fullname = build_cmd.get_ext_fullname(ext.name)
|
||||||
|
filename = build_cmd.get_ext_filename(fullname)
|
||||||
|
if not os.path.basename(filename).startswith('dl-'):
|
||||||
|
if os.path.exists(os.path.join(self.bdist_dir,filename)):
|
||||||
|
ext_outputs.append(filename)
|
||||||
|
|
||||||
|
return all_outputs, ext_outputs
|
||||||
|
|
||||||
|
|
||||||
|
NATIVE_EXTENSIONS = dict.fromkeys('.dll .so .dylib .pyd'.split())
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def walk_egg(egg_dir):
|
||||||
|
"""Walk an unpacked egg's contents, skipping the metadata directory"""
|
||||||
|
walker = os.walk(egg_dir)
|
||||||
|
base,dirs,files = walker.next()
|
||||||
|
if 'EGG-INFO' in dirs:
|
||||||
|
dirs.remove('EGG-INFO')
|
||||||
|
yield base,dirs,files
|
||||||
|
for bdf in walker:
|
||||||
|
yield bdf
|
||||||
|
|
||||||
|
def analyze_egg(egg_dir, stubs):
|
||||||
|
# check for existing flag in EGG-INFO
|
||||||
|
for flag,fn in safety_flags.items():
|
||||||
|
if os.path.exists(os.path.join(egg_dir,'EGG-INFO',fn)):
|
||||||
|
return flag
|
||||||
|
if not can_scan(): return False
|
||||||
|
safe = True
|
||||||
|
for base, dirs, files in walk_egg(egg_dir):
|
||||||
|
for name in files:
|
||||||
|
if name.endswith('.py') or name.endswith('.pyw'):
|
||||||
|
continue
|
||||||
|
elif name.endswith('.pyc') or name.endswith('.pyo'):
|
||||||
|
# always scan, even if we already know we're not safe
|
||||||
|
safe = scan_module(egg_dir, base, name, stubs) and safe
|
||||||
|
return safe
|
||||||
|
|
||||||
|
def write_safety_flag(egg_dir, safe):
|
||||||
|
# Write or remove zip safety flag file(s)
|
||||||
|
for flag,fn in safety_flags.items():
|
||||||
|
fn = os.path.join(egg_dir, fn)
|
||||||
|
if os.path.exists(fn):
|
||||||
|
if safe is None or bool(safe)!=flag:
|
||||||
|
os.unlink(fn)
|
||||||
|
elif safe is not None and bool(safe)==flag:
|
||||||
|
f=open(fn,'wb'); f.write('\n'); f.close()
|
||||||
|
|
||||||
|
safety_flags = {
|
||||||
|
True: 'zip-safe',
|
||||||
|
False: 'not-zip-safe',
|
||||||
|
}
|
||||||
|
|
||||||
|
def scan_module(egg_dir, base, name, stubs):
|
||||||
|
"""Check whether module possibly uses unsafe-for-zipfile stuff"""
|
||||||
|
|
||||||
|
filename = os.path.join(base,name)
|
||||||
|
if filename[:-1] in stubs:
|
||||||
|
return True # Extension module
|
||||||
|
pkg = base[len(egg_dir)+1:].replace(os.sep,'.')
|
||||||
|
module = pkg+(pkg and '.' or '')+os.path.splitext(name)[0]
|
||||||
|
f = open(filename,'rb'); f.read(8) # skip magic & date
|
||||||
|
code = marshal.load(f); f.close()
|
||||||
|
safe = True
|
||||||
|
symbols = dict.fromkeys(iter_symbols(code))
|
||||||
|
for bad in ['__file__', '__path__']:
|
||||||
|
if bad in symbols:
|
||||||
|
log.warn("%s: module references %s", module, bad)
|
||||||
|
safe = False
|
||||||
|
if 'inspect' in symbols:
|
||||||
|
for bad in [
|
||||||
|
'getsource', 'getabsfile', 'getsourcefile', 'getfile'
|
||||||
|
'getsourcelines', 'findsource', 'getcomments', 'getframeinfo',
|
||||||
|
'getinnerframes', 'getouterframes', 'stack', 'trace'
|
||||||
|
]:
|
||||||
|
if bad in symbols:
|
||||||
|
log.warn("%s: module MAY be using inspect.%s", module, bad)
|
||||||
|
safe = False
|
||||||
|
if '__name__' in symbols and '__main__' in symbols and '.' not in module:
|
||||||
|
if sys.version[:3]=="2.4": # -m works w/zipfiles in 2.5
|
||||||
|
log.warn("%s: top-level module may be 'python -m' script", module)
|
||||||
|
safe = False
|
||||||
|
return safe
|
||||||
|
|
||||||
|
def iter_symbols(code):
|
||||||
|
"""Yield names and strings used by `code` and its nested code objects"""
|
||||||
|
for name in code.co_names: yield name
|
||||||
|
for const in code.co_consts:
|
||||||
|
if isinstance(const,basestring):
|
||||||
|
yield const
|
||||||
|
elif isinstance(const,CodeType):
|
||||||
|
for name in iter_symbols(const):
|
||||||
|
yield name
|
||||||
|
|
||||||
|
def can_scan():
|
||||||
|
if not sys.platform.startswith('java') and sys.platform != 'cli':
|
||||||
|
# CPython, PyPy, etc.
|
||||||
|
return True
|
||||||
|
log.warn("Unable to analyze compiled code on this platform.")
|
||||||
|
log.warn("Please ask the author to include a 'zip_safe'"
|
||||||
|
" setting (either True or False) in the package's setup.py")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# Attribute names of options for commands that might need to be convinced to
|
||||||
|
# install to the egg build directory
|
||||||
|
|
||||||
|
INSTALL_DIRECTORY_ATTRS = [
|
||||||
|
'install_lib', 'install_dir', 'install_data', 'install_base'
|
||||||
|
]
|
||||||
|
|
||||||
|
def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=None,
|
||||||
|
mode='w'
|
||||||
|
):
|
||||||
|
"""Create a zip file from all the files under 'base_dir'. The output
|
||||||
|
zip file will be named 'base_dir' + ".zip". Uses either the "zipfile"
|
||||||
|
Python module (if available) or the InfoZIP "zip" utility (if installed
|
||||||
|
and found on the default search path). If neither tool is available,
|
||||||
|
raises DistutilsExecError. Returns the name of the output zip file.
|
||||||
|
"""
|
||||||
|
import zipfile
|
||||||
|
mkpath(os.path.dirname(zip_filename), dry_run=dry_run)
|
||||||
|
log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir)
|
||||||
|
|
||||||
|
def visit(z, dirname, names):
|
||||||
|
for name in names:
|
||||||
|
path = os.path.normpath(os.path.join(dirname, name))
|
||||||
|
if os.path.isfile(path):
|
||||||
|
p = path[len(base_dir)+1:]
|
||||||
|
if not dry_run:
|
||||||
|
z.write(path, p)
|
||||||
|
log.debug("adding '%s'" % p)
|
||||||
|
|
||||||
|
if compress is None:
|
||||||
|
compress = (sys.version>="2.4") # avoid 2.3 zipimport bug when 64 bits
|
||||||
|
|
||||||
|
compression = [zipfile.ZIP_STORED, zipfile.ZIP_DEFLATED][bool(compress)]
|
||||||
|
if not dry_run:
|
||||||
|
z = zipfile.ZipFile(zip_filename, mode, compression=compression)
|
||||||
|
os.path.walk(base_dir, visit, z)
|
||||||
|
z.close()
|
||||||
|
else:
|
||||||
|
os.path.walk(base_dir, visit, None)
|
||||||
|
return zip_filename
|
||||||
|
#
|
82
setuptools-0.6c15dev.egg/setuptools/command/bdist_rpm.py
Normal file
82
setuptools-0.6c15dev.egg/setuptools/command/bdist_rpm.py
Normal file
@ -0,0 +1,82 @@
|
|||||||
|
# This is just a kludge so that bdist_rpm doesn't guess wrong about the
|
||||||
|
# distribution name and version, if the egg_info command is going to alter
|
||||||
|
# them, another kludge to allow you to build old-style non-egg RPMs, and
|
||||||
|
# finally, a kludge to track .rpm files for uploading when run on Python <2.5.
|
||||||
|
|
||||||
|
from distutils.command.bdist_rpm import bdist_rpm as _bdist_rpm
|
||||||
|
import sys, os
|
||||||
|
|
||||||
|
class bdist_rpm(_bdist_rpm):
|
||||||
|
|
||||||
|
def initialize_options(self):
|
||||||
|
_bdist_rpm.initialize_options(self)
|
||||||
|
self.no_egg = None
|
||||||
|
|
||||||
|
if sys.version<"2.5":
|
||||||
|
# Track for uploading any .rpm file(s) moved to self.dist_dir
|
||||||
|
def move_file(self, src, dst, level=1):
|
||||||
|
_bdist_rpm.move_file(self, src, dst, level)
|
||||||
|
if dst==self.dist_dir and src.endswith('.rpm'):
|
||||||
|
getattr(self.distribution,'dist_files',[]).append(
|
||||||
|
('bdist_rpm',
|
||||||
|
src.endswith('.src.rpm') and 'any' or sys.version[:3],
|
||||||
|
os.path.join(dst, os.path.basename(src)))
|
||||||
|
)
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
self.run_command('egg_info') # ensure distro name is up-to-date
|
||||||
|
_bdist_rpm.run(self)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def _make_spec_file(self):
|
||||||
|
version = self.distribution.get_version()
|
||||||
|
rpmversion = version.replace('-','_')
|
||||||
|
spec = _bdist_rpm._make_spec_file(self)
|
||||||
|
line23 = '%define version '+version
|
||||||
|
line24 = '%define version '+rpmversion
|
||||||
|
spec = [
|
||||||
|
line.replace(
|
||||||
|
"Source0: %{name}-%{version}.tar",
|
||||||
|
"Source0: %{name}-%{unmangled_version}.tar"
|
||||||
|
).replace(
|
||||||
|
"setup.py install ",
|
||||||
|
"setup.py install --single-version-externally-managed "
|
||||||
|
).replace(
|
||||||
|
"%setup",
|
||||||
|
"%setup -n %{name}-%{unmangled_version}"
|
||||||
|
).replace(line23,line24)
|
||||||
|
for line in spec
|
||||||
|
]
|
||||||
|
spec.insert(spec.index(line24)+1, "%define unmangled_version "+version)
|
||||||
|
return spec
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
82
setuptools-0.6c15dev.egg/setuptools/command/bdist_wininst.py
Normal file
82
setuptools-0.6c15dev.egg/setuptools/command/bdist_wininst.py
Normal file
@ -0,0 +1,82 @@
|
|||||||
|
from distutils.command.bdist_wininst import bdist_wininst as _bdist_wininst
|
||||||
|
import os, sys
|
||||||
|
|
||||||
|
class bdist_wininst(_bdist_wininst):
|
||||||
|
_good_upload = _bad_upload = None
|
||||||
|
|
||||||
|
def create_exe(self, arcname, fullname, bitmap=None):
|
||||||
|
_bdist_wininst.create_exe(self, arcname, fullname, bitmap)
|
||||||
|
installer_name = self.get_installer_filename(fullname)
|
||||||
|
if self.target_version:
|
||||||
|
pyversion = self.target_version
|
||||||
|
# fix 2.5+ bdist_wininst ignoring --target-version spec
|
||||||
|
self._bad_upload = ('bdist_wininst', 'any', installer_name)
|
||||||
|
else:
|
||||||
|
pyversion = 'any'
|
||||||
|
self._good_upload = ('bdist_wininst', pyversion, installer_name)
|
||||||
|
|
||||||
|
def _fix_upload_names(self):
|
||||||
|
good, bad = self._good_upload, self._bad_upload
|
||||||
|
dist_files = getattr(self.distribution, 'dist_files', [])
|
||||||
|
if bad in dist_files:
|
||||||
|
dist_files.remove(bad)
|
||||||
|
if good not in dist_files:
|
||||||
|
dist_files.append(good)
|
||||||
|
|
||||||
|
def reinitialize_command (self, command, reinit_subcommands=0):
|
||||||
|
cmd = self.distribution.reinitialize_command(
|
||||||
|
command, reinit_subcommands)
|
||||||
|
if command in ('install', 'install_lib'):
|
||||||
|
cmd.install_lib = None # work around distutils bug
|
||||||
|
return cmd
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
self._is_running = True
|
||||||
|
try:
|
||||||
|
_bdist_wininst.run(self)
|
||||||
|
self._fix_upload_names()
|
||||||
|
finally:
|
||||||
|
self._is_running = False
|
||||||
|
|
||||||
|
|
||||||
|
if not hasattr(_bdist_wininst, 'get_installer_filename'):
|
||||||
|
def get_installer_filename(self, fullname):
|
||||||
|
# Factored out to allow overriding in subclasses
|
||||||
|
if self.target_version:
|
||||||
|
# if we create an installer for a specific python version,
|
||||||
|
# it's better to include this in the name
|
||||||
|
installer_name = os.path.join(self.dist_dir,
|
||||||
|
"%s.win32-py%s.exe" %
|
||||||
|
(fullname, self.target_version))
|
||||||
|
else:
|
||||||
|
installer_name = os.path.join(self.dist_dir,
|
||||||
|
"%s.win32.exe" % fullname)
|
||||||
|
return installer_name
|
||||||
|
# get_installer_filename()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
285
setuptools-0.6c15dev.egg/setuptools/command/build_ext.py
Normal file
285
setuptools-0.6c15dev.egg/setuptools/command/build_ext.py
Normal file
@ -0,0 +1,285 @@
|
|||||||
|
from distutils.command.build_ext import build_ext as _du_build_ext
|
||||||
|
try:
|
||||||
|
# Attempt to use Pyrex for building extensions, if available
|
||||||
|
from Pyrex.Distutils.build_ext import build_ext as _build_ext
|
||||||
|
except ImportError:
|
||||||
|
_build_ext = _du_build_ext
|
||||||
|
|
||||||
|
import os, sys
|
||||||
|
from distutils.file_util import copy_file
|
||||||
|
from setuptools.extension import Library
|
||||||
|
from distutils.ccompiler import new_compiler
|
||||||
|
from distutils.sysconfig import customize_compiler, get_config_var
|
||||||
|
get_config_var("LDSHARED") # make sure _config_vars is initialized
|
||||||
|
from distutils.sysconfig import _config_vars
|
||||||
|
from distutils import log
|
||||||
|
from distutils.errors import *
|
||||||
|
|
||||||
|
have_rtld = False
|
||||||
|
use_stubs = False
|
||||||
|
libtype = 'shared'
|
||||||
|
|
||||||
|
if sys.platform == "darwin":
|
||||||
|
use_stubs = True
|
||||||
|
elif os.name != 'nt':
|
||||||
|
try:
|
||||||
|
from dl import RTLD_NOW
|
||||||
|
have_rtld = True
|
||||||
|
use_stubs = True
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def if_dl(s):
|
||||||
|
if have_rtld:
|
||||||
|
return s
|
||||||
|
return ''
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class build_ext(_build_ext):
|
||||||
|
def run(self):
|
||||||
|
"""Build extensions in build directory, then copy if --inplace"""
|
||||||
|
old_inplace, self.inplace = self.inplace, 0
|
||||||
|
_build_ext.run(self)
|
||||||
|
self.inplace = old_inplace
|
||||||
|
if old_inplace:
|
||||||
|
self.copy_extensions_to_source()
|
||||||
|
|
||||||
|
def copy_extensions_to_source(self):
|
||||||
|
build_py = self.get_finalized_command('build_py')
|
||||||
|
for ext in self.extensions:
|
||||||
|
fullname = self.get_ext_fullname(ext.name)
|
||||||
|
filename = self.get_ext_filename(fullname)
|
||||||
|
modpath = fullname.split('.')
|
||||||
|
package = '.'.join(modpath[:-1])
|
||||||
|
package_dir = build_py.get_package_dir(package)
|
||||||
|
dest_filename = os.path.join(package_dir,os.path.basename(filename))
|
||||||
|
src_filename = os.path.join(self.build_lib,filename)
|
||||||
|
|
||||||
|
# Always copy, even if source is older than destination, to ensure
|
||||||
|
# that the right extensions for the current Python/platform are
|
||||||
|
# used.
|
||||||
|
copy_file(
|
||||||
|
src_filename, dest_filename, verbose=self.verbose,
|
||||||
|
dry_run=self.dry_run
|
||||||
|
)
|
||||||
|
if ext._needs_stub:
|
||||||
|
self.write_stub(package_dir or os.curdir, ext, True)
|
||||||
|
|
||||||
|
|
||||||
|
if _build_ext is not _du_build_ext and not hasattr(_build_ext,'pyrex_sources'):
|
||||||
|
# Workaround for problems using some Pyrex versions w/SWIG and/or 2.4
|
||||||
|
def swig_sources(self, sources, *otherargs):
|
||||||
|
# first do any Pyrex processing
|
||||||
|
sources = _build_ext.swig_sources(self, sources) or sources
|
||||||
|
# Then do any actual SWIG stuff on the remainder
|
||||||
|
return _du_build_ext.swig_sources(self, sources, *otherargs)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def get_ext_filename(self, fullname):
|
||||||
|
filename = _build_ext.get_ext_filename(self,fullname)
|
||||||
|
if fullname in self.ext_map:
|
||||||
|
ext = self.ext_map[fullname]
|
||||||
|
if isinstance(ext,Library):
|
||||||
|
fn, ext = os.path.splitext(filename)
|
||||||
|
return self.shlib_compiler.library_filename(fn,libtype)
|
||||||
|
elif use_stubs and ext._links_to_dynamic:
|
||||||
|
d,fn = os.path.split(filename)
|
||||||
|
return os.path.join(d,'dl-'+fn)
|
||||||
|
return filename
|
||||||
|
|
||||||
|
def initialize_options(self):
|
||||||
|
_build_ext.initialize_options(self)
|
||||||
|
self.shlib_compiler = None
|
||||||
|
self.shlibs = []
|
||||||
|
self.ext_map = {}
|
||||||
|
|
||||||
|
def finalize_options(self):
|
||||||
|
_build_ext.finalize_options(self)
|
||||||
|
self.extensions = self.extensions or []
|
||||||
|
self.check_extensions_list(self.extensions)
|
||||||
|
self.shlibs = [ext for ext in self.extensions
|
||||||
|
if isinstance(ext,Library)]
|
||||||
|
if self.shlibs:
|
||||||
|
self.setup_shlib_compiler()
|
||||||
|
for ext in self.extensions:
|
||||||
|
ext._full_name = self.get_ext_fullname(ext.name)
|
||||||
|
for ext in self.extensions:
|
||||||
|
fullname = ext._full_name
|
||||||
|
self.ext_map[fullname] = ext
|
||||||
|
ltd = ext._links_to_dynamic = \
|
||||||
|
self.shlibs and self.links_to_dynamic(ext) or False
|
||||||
|
ext._needs_stub = ltd and use_stubs and not isinstance(ext,Library)
|
||||||
|
filename = ext._file_name = self.get_ext_filename(fullname)
|
||||||
|
libdir = os.path.dirname(os.path.join(self.build_lib,filename))
|
||||||
|
if ltd and libdir not in ext.library_dirs:
|
||||||
|
ext.library_dirs.append(libdir)
|
||||||
|
if ltd and use_stubs and os.curdir not in ext.runtime_library_dirs:
|
||||||
|
ext.runtime_library_dirs.append(os.curdir)
|
||||||
|
|
||||||
|
def setup_shlib_compiler(self):
|
||||||
|
compiler = self.shlib_compiler = new_compiler(
|
||||||
|
compiler=self.compiler, dry_run=self.dry_run, force=self.force
|
||||||
|
)
|
||||||
|
if sys.platform == "darwin":
|
||||||
|
tmp = _config_vars.copy()
|
||||||
|
try:
|
||||||
|
# XXX Help! I don't have any idea whether these are right...
|
||||||
|
_config_vars['LDSHARED'] = "gcc -Wl,-x -dynamiclib -undefined dynamic_lookup"
|
||||||
|
_config_vars['CCSHARED'] = " -dynamiclib"
|
||||||
|
_config_vars['SO'] = ".dylib"
|
||||||
|
customize_compiler(compiler)
|
||||||
|
finally:
|
||||||
|
_config_vars.clear()
|
||||||
|
_config_vars.update(tmp)
|
||||||
|
else:
|
||||||
|
customize_compiler(compiler)
|
||||||
|
|
||||||
|
if self.include_dirs is not None:
|
||||||
|
compiler.set_include_dirs(self.include_dirs)
|
||||||
|
if self.define is not None:
|
||||||
|
# 'define' option is a list of (name,value) tuples
|
||||||
|
for (name,value) in self.define:
|
||||||
|
compiler.define_macro(name, value)
|
||||||
|
if self.undef is not None:
|
||||||
|
for macro in self.undef:
|
||||||
|
compiler.undefine_macro(macro)
|
||||||
|
if self.libraries is not None:
|
||||||
|
compiler.set_libraries(self.libraries)
|
||||||
|
if self.library_dirs is not None:
|
||||||
|
compiler.set_library_dirs(self.library_dirs)
|
||||||
|
if self.rpath is not None:
|
||||||
|
compiler.set_runtime_library_dirs(self.rpath)
|
||||||
|
if self.link_objects is not None:
|
||||||
|
compiler.set_link_objects(self.link_objects)
|
||||||
|
|
||||||
|
# hack so distutils' build_extension() builds a library instead
|
||||||
|
compiler.link_shared_object = link_shared_object.__get__(compiler)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def get_export_symbols(self, ext):
|
||||||
|
if isinstance(ext,Library):
|
||||||
|
return ext.export_symbols
|
||||||
|
return _build_ext.get_export_symbols(self,ext)
|
||||||
|
|
||||||
|
def build_extension(self, ext):
|
||||||
|
_compiler = self.compiler
|
||||||
|
try:
|
||||||
|
if isinstance(ext,Library):
|
||||||
|
self.compiler = self.shlib_compiler
|
||||||
|
_build_ext.build_extension(self,ext)
|
||||||
|
if ext._needs_stub:
|
||||||
|
self.write_stub(
|
||||||
|
self.get_finalized_command('build_py').build_lib, ext
|
||||||
|
)
|
||||||
|
finally:
|
||||||
|
self.compiler = _compiler
|
||||||
|
|
||||||
|
def links_to_dynamic(self, ext):
|
||||||
|
"""Return true if 'ext' links to a dynamic lib in the same package"""
|
||||||
|
# XXX this should check to ensure the lib is actually being built
|
||||||
|
# XXX as dynamic, and not just using a locally-found version or a
|
||||||
|
# XXX static-compiled version
|
||||||
|
libnames = dict.fromkeys([lib._full_name for lib in self.shlibs])
|
||||||
|
pkg = '.'.join(ext._full_name.split('.')[:-1]+[''])
|
||||||
|
for libname in ext.libraries:
|
||||||
|
if pkg+libname in libnames: return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def get_outputs(self):
|
||||||
|
outputs = _build_ext.get_outputs(self)
|
||||||
|
optimize = self.get_finalized_command('build_py').optimize
|
||||||
|
for ext in self.extensions:
|
||||||
|
if ext._needs_stub:
|
||||||
|
base = os.path.join(self.build_lib, *ext._full_name.split('.'))
|
||||||
|
outputs.append(base+'.py')
|
||||||
|
outputs.append(base+'.pyc')
|
||||||
|
if optimize:
|
||||||
|
outputs.append(base+'.pyo')
|
||||||
|
return outputs
|
||||||
|
|
||||||
|
def write_stub(self, output_dir, ext, compile=False):
|
||||||
|
log.info("writing stub loader for %s to %s",ext._full_name, output_dir)
|
||||||
|
stub_file = os.path.join(output_dir, *ext._full_name.split('.'))+'.py'
|
||||||
|
if compile and os.path.exists(stub_file):
|
||||||
|
raise DistutilsError(stub_file+" already exists! Please delete.")
|
||||||
|
if not self.dry_run:
|
||||||
|
f = open(stub_file,'w')
|
||||||
|
f.write('\n'.join([
|
||||||
|
"def __bootstrap__():",
|
||||||
|
" global __bootstrap__, __file__, __loader__",
|
||||||
|
" import sys, os, pkg_resources, imp"+if_dl(", dl"),
|
||||||
|
" __file__ = pkg_resources.resource_filename(__name__,%r)"
|
||||||
|
% os.path.basename(ext._file_name),
|
||||||
|
" del __bootstrap__",
|
||||||
|
" if '__loader__' in globals():",
|
||||||
|
" del __loader__",
|
||||||
|
if_dl(" old_flags = sys.getdlopenflags()"),
|
||||||
|
" old_dir = os.getcwd()",
|
||||||
|
" try:",
|
||||||
|
" os.chdir(os.path.dirname(__file__))",
|
||||||
|
if_dl(" sys.setdlopenflags(dl.RTLD_NOW)"),
|
||||||
|
" imp.load_dynamic(__name__,__file__)",
|
||||||
|
" finally:",
|
||||||
|
if_dl(" sys.setdlopenflags(old_flags)"),
|
||||||
|
" os.chdir(old_dir)",
|
||||||
|
"__bootstrap__()",
|
||||||
|
"" # terminal \n
|
||||||
|
]))
|
||||||
|
f.close()
|
||||||
|
if compile:
|
||||||
|
from distutils.util import byte_compile
|
||||||
|
byte_compile([stub_file], optimize=0,
|
||||||
|
force=True, dry_run=self.dry_run)
|
||||||
|
optimize = self.get_finalized_command('install_lib').optimize
|
||||||
|
if optimize > 0:
|
||||||
|
byte_compile([stub_file], optimize=optimize,
|
||||||
|
force=True, dry_run=self.dry_run)
|
||||||
|
if os.path.exists(stub_file) and not self.dry_run:
|
||||||
|
os.unlink(stub_file)
|
||||||
|
|
||||||
|
|
||||||
|
if use_stubs or os.name=='nt':
|
||||||
|
# Build shared libraries
|
||||||
|
#
|
||||||
|
def link_shared_object(self, objects, output_libname, output_dir=None,
|
||||||
|
libraries=None, library_dirs=None, runtime_library_dirs=None,
|
||||||
|
export_symbols=None, debug=0, extra_preargs=None,
|
||||||
|
extra_postargs=None, build_temp=None, target_lang=None
|
||||||
|
): self.link(
|
||||||
|
self.SHARED_LIBRARY, objects, output_libname,
|
||||||
|
output_dir, libraries, library_dirs, runtime_library_dirs,
|
||||||
|
export_symbols, debug, extra_preargs, extra_postargs,
|
||||||
|
build_temp, target_lang
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# Build static libraries everywhere else
|
||||||
|
libtype = 'static'
|
||||||
|
|
||||||
|
def link_shared_object(self, objects, output_libname, output_dir=None,
|
||||||
|
libraries=None, library_dirs=None, runtime_library_dirs=None,
|
||||||
|
export_symbols=None, debug=0, extra_preargs=None,
|
||||||
|
extra_postargs=None, build_temp=None, target_lang=None
|
||||||
|
):
|
||||||
|
# XXX we need to either disallow these attrs on Library instances,
|
||||||
|
# or warn/abort here if set, or something...
|
||||||
|
#libraries=None, library_dirs=None, runtime_library_dirs=None,
|
||||||
|
#export_symbols=None, extra_preargs=None, extra_postargs=None,
|
||||||
|
#build_temp=None
|
||||||
|
|
||||||
|
assert output_dir is None # distutils build_ext doesn't pass this
|
||||||
|
output_dir,filename = os.path.split(output_libname)
|
||||||
|
basename, ext = os.path.splitext(filename)
|
||||||
|
if self.library_filename("x").startswith('lib'):
|
||||||
|
# strip 'lib' prefix; this is kludgy if some platform uses
|
||||||
|
# a different prefix
|
||||||
|
basename = basename[3:]
|
||||||
|
|
||||||
|
self.create_static_lib(
|
||||||
|
objects, basename, output_dir, debug, target_lang
|
||||||
|
)
|
205
setuptools-0.6c15dev.egg/setuptools/command/build_py.py
Normal file
205
setuptools-0.6c15dev.egg/setuptools/command/build_py.py
Normal file
@ -0,0 +1,205 @@
|
|||||||
|
import os.path, sys, fnmatch
|
||||||
|
from distutils.command.build_py import build_py as _build_py
|
||||||
|
from distutils.util import convert_path
|
||||||
|
from glob import glob
|
||||||
|
|
||||||
|
class build_py(_build_py):
|
||||||
|
"""Enhanced 'build_py' command that includes data files with packages
|
||||||
|
|
||||||
|
The data files are specified via a 'package_data' argument to 'setup()'.
|
||||||
|
See 'setuptools.dist.Distribution' for more details.
|
||||||
|
|
||||||
|
Also, this version of the 'build_py' command allows you to specify both
|
||||||
|
'py_modules' and 'packages' in the same setup operation.
|
||||||
|
"""
|
||||||
|
def finalize_options(self):
|
||||||
|
_build_py.finalize_options(self)
|
||||||
|
self.package_data = self.distribution.package_data
|
||||||
|
self.exclude_package_data = self.distribution.exclude_package_data or {}
|
||||||
|
if 'data_files' in self.__dict__: del self.__dict__['data_files']
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
"""Build modules, packages, and copy data files to build directory"""
|
||||||
|
if not self.py_modules and not self.packages:
|
||||||
|
return
|
||||||
|
|
||||||
|
if self.py_modules:
|
||||||
|
self.build_modules()
|
||||||
|
|
||||||
|
if self.packages:
|
||||||
|
self.build_packages()
|
||||||
|
self.build_package_data()
|
||||||
|
|
||||||
|
# Only compile actual .py files, using our base class' idea of what our
|
||||||
|
# output files are.
|
||||||
|
self.byte_compile(_build_py.get_outputs(self, include_bytecode=0))
|
||||||
|
|
||||||
|
def __getattr__(self,attr):
|
||||||
|
if attr=='data_files': # lazily compute data files
|
||||||
|
self.data_files = files = self._get_data_files(); return files
|
||||||
|
return _build_py.__getattr__(self,attr)
|
||||||
|
|
||||||
|
def _get_data_files(self):
|
||||||
|
"""Generate list of '(package,src_dir,build_dir,filenames)' tuples"""
|
||||||
|
self.analyze_manifest()
|
||||||
|
data = []
|
||||||
|
for package in self.packages or ():
|
||||||
|
# Locate package source directory
|
||||||
|
src_dir = self.get_package_dir(package)
|
||||||
|
|
||||||
|
# Compute package build directory
|
||||||
|
build_dir = os.path.join(*([self.build_lib] + package.split('.')))
|
||||||
|
|
||||||
|
# Length of path to strip from found files
|
||||||
|
plen = len(src_dir)+1
|
||||||
|
|
||||||
|
# Strip directory from globbed filenames
|
||||||
|
filenames = [
|
||||||
|
file[plen:] for file in self.find_data_files(package, src_dir)
|
||||||
|
]
|
||||||
|
data.append( (package, src_dir, build_dir, filenames) )
|
||||||
|
return data
|
||||||
|
|
||||||
|
def find_data_files(self, package, src_dir):
|
||||||
|
"""Return filenames for package's data files in 'src_dir'"""
|
||||||
|
globs = (self.package_data.get('', [])
|
||||||
|
+ self.package_data.get(package, []))
|
||||||
|
files = self.manifest_files.get(package, [])[:]
|
||||||
|
for pattern in globs:
|
||||||
|
# Each pattern has to be converted to a platform-specific path
|
||||||
|
files.extend(glob(os.path.join(src_dir, convert_path(pattern))))
|
||||||
|
return self.exclude_data_files(package, src_dir, files)
|
||||||
|
|
||||||
|
def build_package_data(self):
|
||||||
|
"""Copy data files into build directory"""
|
||||||
|
lastdir = None
|
||||||
|
for package, src_dir, build_dir, filenames in self.data_files:
|
||||||
|
for filename in filenames:
|
||||||
|
target = os.path.join(build_dir, filename)
|
||||||
|
self.mkpath(os.path.dirname(target))
|
||||||
|
self.copy_file(os.path.join(src_dir, filename), target)
|
||||||
|
|
||||||
|
|
||||||
|
def analyze_manifest(self):
|
||||||
|
self.manifest_files = mf = {}
|
||||||
|
if not self.distribution.include_package_data:
|
||||||
|
return
|
||||||
|
src_dirs = {}
|
||||||
|
for package in self.packages or ():
|
||||||
|
# Locate package source directory
|
||||||
|
src_dirs[assert_relative(self.get_package_dir(package))] = package
|
||||||
|
|
||||||
|
self.run_command('egg_info')
|
||||||
|
ei_cmd = self.get_finalized_command('egg_info')
|
||||||
|
for path in ei_cmd.filelist.files:
|
||||||
|
d,f = os.path.split(assert_relative(path))
|
||||||
|
prev = None
|
||||||
|
oldf = f
|
||||||
|
while d and d!=prev and d not in src_dirs:
|
||||||
|
prev = d
|
||||||
|
d, df = os.path.split(d)
|
||||||
|
f = os.path.join(df, f)
|
||||||
|
if d in src_dirs:
|
||||||
|
if path.endswith('.py') and f==oldf:
|
||||||
|
continue # it's a module, not data
|
||||||
|
mf.setdefault(src_dirs[d],[]).append(path)
|
||||||
|
|
||||||
|
def get_data_files(self): pass # kludge 2.4 for lazy computation
|
||||||
|
|
||||||
|
if sys.version<"2.4": # Python 2.4 already has this code
|
||||||
|
def get_outputs(self, include_bytecode=1):
|
||||||
|
"""Return complete list of files copied to the build directory
|
||||||
|
|
||||||
|
This includes both '.py' files and data files, as well as '.pyc'
|
||||||
|
and '.pyo' files if 'include_bytecode' is true. (This method is
|
||||||
|
needed for the 'install_lib' command to do its job properly, and to
|
||||||
|
generate a correct installation manifest.)
|
||||||
|
"""
|
||||||
|
return _build_py.get_outputs(self, include_bytecode) + [
|
||||||
|
os.path.join(build_dir, filename)
|
||||||
|
for package, src_dir, build_dir,filenames in self.data_files
|
||||||
|
for filename in filenames
|
||||||
|
]
|
||||||
|
|
||||||
|
def check_package(self, package, package_dir):
|
||||||
|
"""Check namespace packages' __init__ for declare_namespace"""
|
||||||
|
try:
|
||||||
|
return self.packages_checked[package]
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
init_py = _build_py.check_package(self, package, package_dir)
|
||||||
|
self.packages_checked[package] = init_py
|
||||||
|
|
||||||
|
if not init_py or not self.distribution.namespace_packages:
|
||||||
|
return init_py
|
||||||
|
|
||||||
|
for pkg in self.distribution.namespace_packages:
|
||||||
|
if pkg==package or pkg.startswith(package+'.'):
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
return init_py
|
||||||
|
|
||||||
|
f = open(init_py,'rU')
|
||||||
|
if 'declare_namespace' not in f.read():
|
||||||
|
from distutils.errors import DistutilsError
|
||||||
|
raise DistutilsError(
|
||||||
|
"Namespace package problem: %s is a namespace package, but its\n"
|
||||||
|
"__init__.py does not call declare_namespace()! Please fix it.\n"
|
||||||
|
'(See the setuptools manual under "Namespace Packages" for '
|
||||||
|
"details.)\n" % (package,)
|
||||||
|
)
|
||||||
|
f.close()
|
||||||
|
return init_py
|
||||||
|
|
||||||
|
def initialize_options(self):
|
||||||
|
self.packages_checked={}
|
||||||
|
_build_py.initialize_options(self)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def exclude_data_files(self, package, src_dir, files):
|
||||||
|
"""Filter filenames for package's data files in 'src_dir'"""
|
||||||
|
globs = (self.exclude_package_data.get('', [])
|
||||||
|
+ self.exclude_package_data.get(package, []))
|
||||||
|
bad = []
|
||||||
|
for pattern in globs:
|
||||||
|
bad.extend(
|
||||||
|
fnmatch.filter(
|
||||||
|
files, os.path.join(src_dir, convert_path(pattern))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
bad = dict.fromkeys(bad)
|
||||||
|
seen = {}
|
||||||
|
return [
|
||||||
|
f for f in files if f not in bad
|
||||||
|
and f not in seen and seen.setdefault(f,1) # ditch dupes
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def assert_relative(path):
|
||||||
|
if not os.path.isabs(path):
|
||||||
|
return path
|
||||||
|
from distutils.errors import DistutilsSetupError
|
||||||
|
raise DistutilsSetupError(
|
||||||
|
"""Error: setup script specifies an absolute path:
|
||||||
|
|
||||||
|
%s
|
||||||
|
|
||||||
|
setup() arguments must *always* be /-separated paths relative to the
|
||||||
|
setup.py directory, *never* absolute paths.
|
||||||
|
""" % path
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
164
setuptools-0.6c15dev.egg/setuptools/command/develop.py
Normal file
164
setuptools-0.6c15dev.egg/setuptools/command/develop.py
Normal file
@ -0,0 +1,164 @@
|
|||||||
|
from setuptools.command.easy_install import easy_install
|
||||||
|
from distutils.util import convert_path
|
||||||
|
from pkg_resources import Distribution, PathMetadata, normalize_path
|
||||||
|
from distutils import log
|
||||||
|
from distutils.errors import *
|
||||||
|
import sys, os, setuptools, glob
|
||||||
|
|
||||||
|
class develop(easy_install):
|
||||||
|
"""Set up package for development"""
|
||||||
|
|
||||||
|
description = "install package in 'development mode'"
|
||||||
|
|
||||||
|
user_options = easy_install.user_options + [
|
||||||
|
("uninstall", "u", "Uninstall this source package"),
|
||||||
|
("egg-path=", None, "Set the path to be used in the .egg-link file"),
|
||||||
|
]
|
||||||
|
|
||||||
|
boolean_options = easy_install.boolean_options + ['uninstall']
|
||||||
|
|
||||||
|
command_consumes_arguments = False # override base
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
if self.uninstall:
|
||||||
|
self.multi_version = True
|
||||||
|
self.uninstall_link()
|
||||||
|
else:
|
||||||
|
self.install_for_development()
|
||||||
|
self.warn_deprecated_options()
|
||||||
|
|
||||||
|
def initialize_options(self):
|
||||||
|
self.uninstall = None
|
||||||
|
self.egg_path = None
|
||||||
|
easy_install.initialize_options(self)
|
||||||
|
self.setup_path = None
|
||||||
|
self.always_copy_from = '.' # always copy eggs installed in curdir
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def finalize_options(self):
|
||||||
|
ei = self.get_finalized_command("egg_info")
|
||||||
|
if ei.broken_egg_info:
|
||||||
|
raise DistutilsError(
|
||||||
|
"Please rename %r to %r before using 'develop'"
|
||||||
|
% (ei.egg_info, ei.broken_egg_info)
|
||||||
|
)
|
||||||
|
self.args = [ei.egg_name]
|
||||||
|
easy_install.finalize_options(self)
|
||||||
|
# pick up setup-dir .egg files only: no .egg-info
|
||||||
|
self.package_index.scan(glob.glob('*.egg'))
|
||||||
|
|
||||||
|
self.egg_link = os.path.join(self.install_dir, ei.egg_name+'.egg-link')
|
||||||
|
self.egg_base = ei.egg_base
|
||||||
|
if self.egg_path is None:
|
||||||
|
self.egg_path = os.path.abspath(ei.egg_base)
|
||||||
|
|
||||||
|
target = normalize_path(self.egg_base)
|
||||||
|
if normalize_path(os.path.join(self.install_dir, self.egg_path)) != target:
|
||||||
|
raise DistutilsOptionError(
|
||||||
|
"--egg-path must be a relative path from the install"
|
||||||
|
" directory to "+target
|
||||||
|
)
|
||||||
|
|
||||||
|
# Make a distribution for the package's source
|
||||||
|
self.dist = Distribution(
|
||||||
|
target,
|
||||||
|
PathMetadata(target, os.path.abspath(ei.egg_info)),
|
||||||
|
project_name = ei.egg_name
|
||||||
|
)
|
||||||
|
|
||||||
|
p = self.egg_base.replace(os.sep,'/')
|
||||||
|
if p!= os.curdir:
|
||||||
|
p = '../' * (p.count('/')+1)
|
||||||
|
self.setup_path = p
|
||||||
|
p = normalize_path(os.path.join(self.install_dir, self.egg_path, p))
|
||||||
|
if p != normalize_path(os.curdir):
|
||||||
|
raise DistutilsOptionError(
|
||||||
|
"Can't get a consistent path to setup script from"
|
||||||
|
" installation directory", p, normalize_path(os.curdir))
|
||||||
|
|
||||||
|
def install_for_development(self):
|
||||||
|
# Ensure metadata is up-to-date
|
||||||
|
self.run_command('egg_info')
|
||||||
|
# Build extensions in-place
|
||||||
|
self.reinitialize_command('build_ext', inplace=1)
|
||||||
|
self.run_command('build_ext')
|
||||||
|
self.install_site_py() # ensure that target dir is site-safe
|
||||||
|
if setuptools.bootstrap_install_from:
|
||||||
|
self.easy_install(setuptools.bootstrap_install_from)
|
||||||
|
setuptools.bootstrap_install_from = None
|
||||||
|
|
||||||
|
# create an .egg-link in the installation dir, pointing to our egg
|
||||||
|
log.info("Creating %s (link to %s)", self.egg_link, self.egg_base)
|
||||||
|
if not self.dry_run:
|
||||||
|
f = open(self.egg_link,"w")
|
||||||
|
f.write(self.egg_path + "\n" + self.setup_path)
|
||||||
|
f.close()
|
||||||
|
# postprocess the installed distro, fixing up .pth, installing scripts,
|
||||||
|
# and handling requirements
|
||||||
|
self.process_distribution(None, self.dist, not self.no_deps)
|
||||||
|
|
||||||
|
|
||||||
|
def uninstall_link(self):
|
||||||
|
if os.path.exists(self.egg_link):
|
||||||
|
log.info("Removing %s (link to %s)", self.egg_link, self.egg_base)
|
||||||
|
contents = [line.rstrip() for line in file(self.egg_link)]
|
||||||
|
if contents not in ([self.egg_path], [self.egg_path, self.setup_path]):
|
||||||
|
log.warn("Link points to %s: uninstall aborted", contents)
|
||||||
|
return
|
||||||
|
if not self.dry_run:
|
||||||
|
os.unlink(self.egg_link)
|
||||||
|
if not self.dry_run:
|
||||||
|
self.update_pth(self.dist) # remove any .pth link to us
|
||||||
|
if self.distribution.scripts:
|
||||||
|
# XXX should also check for entry point scripts!
|
||||||
|
log.warn("Note: you must uninstall or replace scripts manually!")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def install_egg_scripts(self, dist):
|
||||||
|
if dist is not self.dist:
|
||||||
|
# Installing a dependency, so fall back to normal behavior
|
||||||
|
return easy_install.install_egg_scripts(self,dist)
|
||||||
|
|
||||||
|
# create wrapper scripts in the script dir, pointing to dist.scripts
|
||||||
|
|
||||||
|
# new-style...
|
||||||
|
self.install_wrapper_scripts(dist)
|
||||||
|
|
||||||
|
# ...and old-style
|
||||||
|
for script_name in self.distribution.scripts or []:
|
||||||
|
script_path = os.path.abspath(convert_path(script_name))
|
||||||
|
script_name = os.path.basename(script_path)
|
||||||
|
f = open(script_path,'rU')
|
||||||
|
script_text = f.read()
|
||||||
|
f.close()
|
||||||
|
self.install_script(dist, script_name, script_text, script_path)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
1730
setuptools-0.6c15dev.egg/setuptools/command/easy_install.py
Normal file
1730
setuptools-0.6c15dev.egg/setuptools/command/easy_install.py
Normal file
File diff suppressed because it is too large
Load Diff
451
setuptools-0.6c15dev.egg/setuptools/command/egg_info.py
Normal file
451
setuptools-0.6c15dev.egg/setuptools/command/egg_info.py
Normal file
@ -0,0 +1,451 @@
|
|||||||
|
"""setuptools.command.egg_info
|
||||||
|
|
||||||
|
Create a distribution's .egg-info directory and contents"""
|
||||||
|
|
||||||
|
# This module should be kept compatible with Python 2.3
|
||||||
|
import os, re
|
||||||
|
from setuptools import Command
|
||||||
|
from distutils.errors import *
|
||||||
|
from distutils import log
|
||||||
|
from setuptools.command.sdist import sdist
|
||||||
|
from distutils.util import convert_path
|
||||||
|
from distutils.filelist import FileList
|
||||||
|
from pkg_resources import parse_requirements, safe_name, parse_version, \
|
||||||
|
safe_version, yield_lines, EntryPoint, iter_entry_points, to_filename
|
||||||
|
from sdist import walk_revctrl
|
||||||
|
|
||||||
|
class egg_info(Command):
|
||||||
|
description = "create a distribution's .egg-info directory"
|
||||||
|
|
||||||
|
user_options = [
|
||||||
|
('egg-base=', 'e', "directory containing .egg-info directories"
|
||||||
|
" (default: top of the source tree)"),
|
||||||
|
('tag-svn-revision', 'r',
|
||||||
|
"Add subversion revision ID to version number"),
|
||||||
|
('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"),
|
||||||
|
('tag-build=', 'b', "Specify explicit tag to add to version number"),
|
||||||
|
('no-svn-revision', 'R',
|
||||||
|
"Don't add subversion revision ID [default]"),
|
||||||
|
('no-date', 'D', "Don't include date stamp [default]"),
|
||||||
|
]
|
||||||
|
|
||||||
|
boolean_options = ['tag-date', 'tag-svn-revision']
|
||||||
|
negative_opt = {'no-svn-revision': 'tag-svn-revision',
|
||||||
|
'no-date': 'tag-date'}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def initialize_options(self):
|
||||||
|
self.egg_name = None
|
||||||
|
self.egg_version = None
|
||||||
|
self.egg_base = None
|
||||||
|
self.egg_info = None
|
||||||
|
self.tag_build = None
|
||||||
|
self.tag_svn_revision = 0
|
||||||
|
self.tag_date = 0
|
||||||
|
self.broken_egg_info = False
|
||||||
|
self.vtags = None
|
||||||
|
|
||||||
|
def save_version_info(self, filename):
|
||||||
|
from setopt import edit_config
|
||||||
|
edit_config(
|
||||||
|
filename,
|
||||||
|
{'egg_info':
|
||||||
|
{'tag_svn_revision':0, 'tag_date': 0, 'tag_build': self.tags()}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def finalize_options (self):
|
||||||
|
self.egg_name = safe_name(self.distribution.get_name())
|
||||||
|
self.vtags = self.tags()
|
||||||
|
self.egg_version = self.tagged_version()
|
||||||
|
|
||||||
|
try:
|
||||||
|
list(
|
||||||
|
parse_requirements('%s==%s' % (self.egg_name,self.egg_version))
|
||||||
|
)
|
||||||
|
except ValueError:
|
||||||
|
raise DistutilsOptionError(
|
||||||
|
"Invalid distribution name or version syntax: %s-%s" %
|
||||||
|
(self.egg_name,self.egg_version)
|
||||||
|
)
|
||||||
|
|
||||||
|
if self.egg_base is None:
|
||||||
|
dirs = self.distribution.package_dir
|
||||||
|
self.egg_base = (dirs or {}).get('',os.curdir)
|
||||||
|
|
||||||
|
self.ensure_dirname('egg_base')
|
||||||
|
self.egg_info = to_filename(self.egg_name)+'.egg-info'
|
||||||
|
if self.egg_base != os.curdir:
|
||||||
|
self.egg_info = os.path.join(self.egg_base, self.egg_info)
|
||||||
|
if '-' in self.egg_name: self.check_broken_egg_info()
|
||||||
|
|
||||||
|
# Set package version for the benefit of dumber commands
|
||||||
|
# (e.g. sdist, bdist_wininst, etc.)
|
||||||
|
#
|
||||||
|
self.distribution.metadata.version = self.egg_version
|
||||||
|
|
||||||
|
# If we bootstrapped around the lack of a PKG-INFO, as might be the
|
||||||
|
# case in a fresh checkout, make sure that any special tags get added
|
||||||
|
# to the version info
|
||||||
|
#
|
||||||
|
pd = self.distribution._patched_dist
|
||||||
|
if pd is not None and pd.key==self.egg_name.lower():
|
||||||
|
pd._version = self.egg_version
|
||||||
|
pd._parsed_version = parse_version(self.egg_version)
|
||||||
|
self.distribution._patched_dist = None
|
||||||
|
|
||||||
|
|
||||||
|
def write_or_delete_file(self, what, filename, data, force=False):
|
||||||
|
"""Write `data` to `filename` or delete if empty
|
||||||
|
|
||||||
|
If `data` is non-empty, this routine is the same as ``write_file()``.
|
||||||
|
If `data` is empty but not ``None``, this is the same as calling
|
||||||
|
``delete_file(filename)`. If `data` is ``None``, then this is a no-op
|
||||||
|
unless `filename` exists, in which case a warning is issued about the
|
||||||
|
orphaned file (if `force` is false), or deleted (if `force` is true).
|
||||||
|
"""
|
||||||
|
if data:
|
||||||
|
self.write_file(what, filename, data)
|
||||||
|
elif os.path.exists(filename):
|
||||||
|
if data is None and not force:
|
||||||
|
log.warn(
|
||||||
|
"%s not set in setup(), but %s exists", what, filename
|
||||||
|
)
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
self.delete_file(filename)
|
||||||
|
|
||||||
|
def write_file(self, what, filename, data):
|
||||||
|
"""Write `data` to `filename` (if not a dry run) after announcing it
|
||||||
|
|
||||||
|
`what` is used in a log message to identify what is being written
|
||||||
|
to the file.
|
||||||
|
"""
|
||||||
|
log.info("writing %s to %s", what, filename)
|
||||||
|
if not self.dry_run:
|
||||||
|
f = open(filename, 'wb')
|
||||||
|
f.write(data)
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
def delete_file(self, filename):
|
||||||
|
"""Delete `filename` (if not a dry run) after announcing it"""
|
||||||
|
log.info("deleting %s", filename)
|
||||||
|
if not self.dry_run:
|
||||||
|
os.unlink(filename)
|
||||||
|
|
||||||
|
def tagged_version(self):
|
||||||
|
return safe_version(self.distribution.get_version() + self.vtags)
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
self.mkpath(self.egg_info)
|
||||||
|
installer = self.distribution.fetch_build_egg
|
||||||
|
for ep in iter_entry_points('egg_info.writers'):
|
||||||
|
writer = ep.load(installer=installer)
|
||||||
|
writer(self, ep.name, os.path.join(self.egg_info,ep.name))
|
||||||
|
|
||||||
|
# Get rid of native_libs.txt if it was put there by older bdist_egg
|
||||||
|
nl = os.path.join(self.egg_info, "native_libs.txt")
|
||||||
|
if os.path.exists(nl):
|
||||||
|
self.delete_file(nl)
|
||||||
|
|
||||||
|
self.find_sources()
|
||||||
|
|
||||||
|
def tags(self):
|
||||||
|
version = ''
|
||||||
|
if self.tag_build:
|
||||||
|
version+=self.tag_build
|
||||||
|
if self.tag_svn_revision and (
|
||||||
|
os.path.exists('.svn') or os.path.exists('PKG-INFO')
|
||||||
|
): version += '-r%s' % self.get_svn_revision()
|
||||||
|
if self.tag_date:
|
||||||
|
import time; version += time.strftime("-%Y%m%d")
|
||||||
|
return version
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def get_svn_revision(self):
|
||||||
|
revision = 0
|
||||||
|
urlre = re.compile('url="([^"]+)"')
|
||||||
|
revre = re.compile('committed-rev="(\d+)"')
|
||||||
|
|
||||||
|
for base,dirs,files in os.walk(os.curdir):
|
||||||
|
if '.svn' not in dirs:
|
||||||
|
dirs[:] = []
|
||||||
|
continue # no sense walking uncontrolled subdirs
|
||||||
|
dirs.remove('.svn')
|
||||||
|
f = open(os.path.join(base,'.svn','entries'))
|
||||||
|
data = f.read()
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
if data.startswith('<?xml'):
|
||||||
|
dirurl = urlre.search(data).group(1) # get repository URL
|
||||||
|
localrev = max([int(m.group(1)) for m in revre.finditer(data)]+[0])
|
||||||
|
else:
|
||||||
|
try: svnver = int(data.splitlines()[0])
|
||||||
|
except: svnver=-1
|
||||||
|
if data<8:
|
||||||
|
log.warn("unrecognized .svn/entries format; skipping %s", base)
|
||||||
|
dirs[:] = []
|
||||||
|
continue
|
||||||
|
|
||||||
|
data = map(str.splitlines,data.split('\n\x0c\n'))
|
||||||
|
del data[0][0] # get rid of the '8' or '9'
|
||||||
|
dirurl = data[0][3]
|
||||||
|
localrev = max([int(d[9]) for d in data if len(d)>9 and d[9]]+[0])
|
||||||
|
if base==os.curdir:
|
||||||
|
base_url = dirurl+'/' # save the root url
|
||||||
|
elif not dirurl.startswith(base_url):
|
||||||
|
dirs[:] = []
|
||||||
|
continue # not part of the same svn tree, skip it
|
||||||
|
revision = max(revision, localrev)
|
||||||
|
|
||||||
|
return str(revision or get_pkg_info_revision())
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def find_sources(self):
|
||||||
|
"""Generate SOURCES.txt manifest file"""
|
||||||
|
manifest_filename = os.path.join(self.egg_info,"SOURCES.txt")
|
||||||
|
mm = manifest_maker(self.distribution)
|
||||||
|
mm.manifest = manifest_filename
|
||||||
|
mm.run()
|
||||||
|
self.filelist = mm.filelist
|
||||||
|
|
||||||
|
def check_broken_egg_info(self):
|
||||||
|
bei = self.egg_name+'.egg-info'
|
||||||
|
if self.egg_base != os.curdir:
|
||||||
|
bei = os.path.join(self.egg_base, bei)
|
||||||
|
if os.path.exists(bei):
|
||||||
|
log.warn(
|
||||||
|
"-"*78+'\n'
|
||||||
|
"Note: Your current .egg-info directory has a '-' in its name;"
|
||||||
|
'\nthis will not work correctly with "setup.py develop".\n\n'
|
||||||
|
'Please rename %s to %s to correct this problem.\n'+'-'*78,
|
||||||
|
bei, self.egg_info
|
||||||
|
)
|
||||||
|
self.broken_egg_info = self.egg_info
|
||||||
|
self.egg_info = bei # make it work for now
|
||||||
|
|
||||||
|
class FileList(FileList):
|
||||||
|
"""File list that accepts only existing, platform-independent paths"""
|
||||||
|
|
||||||
|
def append(self, item):
|
||||||
|
if item.endswith('\r'): # Fix older sdists built on Windows
|
||||||
|
item = item[:-1]
|
||||||
|
path = convert_path(item)
|
||||||
|
if os.path.exists(path):
|
||||||
|
self.files.append(path)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class manifest_maker(sdist):
|
||||||
|
|
||||||
|
template = "MANIFEST.in"
|
||||||
|
|
||||||
|
def initialize_options (self):
|
||||||
|
self.use_defaults = 1
|
||||||
|
self.prune = 1
|
||||||
|
self.manifest_only = 1
|
||||||
|
self.force_manifest = 1
|
||||||
|
|
||||||
|
def finalize_options(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
self.filelist = FileList()
|
||||||
|
if not os.path.exists(self.manifest):
|
||||||
|
self.write_manifest() # it must exist so it'll get in the list
|
||||||
|
self.filelist.findall()
|
||||||
|
self.add_defaults()
|
||||||
|
if os.path.exists(self.template):
|
||||||
|
self.read_template()
|
||||||
|
self.prune_file_list()
|
||||||
|
self.filelist.sort()
|
||||||
|
self.filelist.remove_duplicates()
|
||||||
|
self.write_manifest()
|
||||||
|
|
||||||
|
def write_manifest (self):
|
||||||
|
"""Write the file list in 'self.filelist' (presumably as filled in
|
||||||
|
by 'add_defaults()' and 'read_template()') to the manifest file
|
||||||
|
named by 'self.manifest'.
|
||||||
|
"""
|
||||||
|
files = self.filelist.files
|
||||||
|
if os.sep!='/':
|
||||||
|
files = [f.replace(os.sep,'/') for f in files]
|
||||||
|
self.execute(write_file, (self.manifest, files),
|
||||||
|
"writing manifest file '%s'" % self.manifest)
|
||||||
|
|
||||||
|
def warn(self, msg): # suppress missing-file warnings from sdist
|
||||||
|
if not msg.startswith("standard file not found:"):
|
||||||
|
sdist.warn(self, msg)
|
||||||
|
|
||||||
|
def add_defaults(self):
|
||||||
|
sdist.add_defaults(self)
|
||||||
|
self.filelist.append(self.template)
|
||||||
|
self.filelist.append(self.manifest)
|
||||||
|
rcfiles = list(walk_revctrl())
|
||||||
|
if rcfiles:
|
||||||
|
self.filelist.extend(rcfiles)
|
||||||
|
elif os.path.exists(self.manifest):
|
||||||
|
self.read_manifest()
|
||||||
|
ei_cmd = self.get_finalized_command('egg_info')
|
||||||
|
self.filelist.include_pattern("*", prefix=ei_cmd.egg_info)
|
||||||
|
|
||||||
|
def prune_file_list (self):
|
||||||
|
build = self.get_finalized_command('build')
|
||||||
|
base_dir = self.distribution.get_fullname()
|
||||||
|
self.filelist.exclude_pattern(None, prefix=build.build_base)
|
||||||
|
self.filelist.exclude_pattern(None, prefix=base_dir)
|
||||||
|
sep = re.escape(os.sep)
|
||||||
|
self.filelist.exclude_pattern(sep+r'(RCS|CVS|\.svn)'+sep, is_regex=1)
|
||||||
|
|
||||||
|
|
||||||
|
def write_file (filename, contents):
|
||||||
|
"""Create a file with the specified name and write 'contents' (a
|
||||||
|
sequence of strings without line terminators) to it.
|
||||||
|
"""
|
||||||
|
f = open(filename, "wb") # always write POSIX-style manifest
|
||||||
|
f.write("\n".join(contents))
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def write_pkg_info(cmd, basename, filename):
|
||||||
|
log.info("writing %s", filename)
|
||||||
|
if not cmd.dry_run:
|
||||||
|
metadata = cmd.distribution.metadata
|
||||||
|
metadata.version, oldver = cmd.egg_version, metadata.version
|
||||||
|
metadata.name, oldname = cmd.egg_name, metadata.name
|
||||||
|
try:
|
||||||
|
# write unescaped data to PKG-INFO, so older pkg_resources
|
||||||
|
# can still parse it
|
||||||
|
metadata.write_pkg_info(cmd.egg_info)
|
||||||
|
finally:
|
||||||
|
metadata.name, metadata.version = oldname, oldver
|
||||||
|
|
||||||
|
safe = getattr(cmd.distribution,'zip_safe',None)
|
||||||
|
import bdist_egg; bdist_egg.write_safety_flag(cmd.egg_info, safe)
|
||||||
|
|
||||||
|
def warn_depends_obsolete(cmd, basename, filename):
|
||||||
|
if os.path.exists(filename):
|
||||||
|
log.warn(
|
||||||
|
"WARNING: 'depends.txt' is not used by setuptools 0.6!\n"
|
||||||
|
"Use the install_requires/extras_require setup() args instead."
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def write_requirements(cmd, basename, filename):
|
||||||
|
dist = cmd.distribution
|
||||||
|
data = ['\n'.join(yield_lines(dist.install_requires or ()))]
|
||||||
|
for extra,reqs in (dist.extras_require or {}).items():
|
||||||
|
data.append('\n\n[%s]\n%s' % (extra, '\n'.join(yield_lines(reqs))))
|
||||||
|
cmd.write_or_delete_file("requirements", filename, ''.join(data))
|
||||||
|
|
||||||
|
def write_toplevel_names(cmd, basename, filename):
|
||||||
|
pkgs = dict.fromkeys(
|
||||||
|
[k.split('.',1)[0]
|
||||||
|
for k in cmd.distribution.iter_distribution_names()
|
||||||
|
]
|
||||||
|
)
|
||||||
|
cmd.write_file("top-level names", filename, '\n'.join(pkgs)+'\n')
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def overwrite_arg(cmd, basename, filename):
|
||||||
|
write_arg(cmd, basename, filename, True)
|
||||||
|
|
||||||
|
def write_arg(cmd, basename, filename, force=False):
|
||||||
|
argname = os.path.splitext(basename)[0]
|
||||||
|
value = getattr(cmd.distribution, argname, None)
|
||||||
|
if value is not None:
|
||||||
|
value = '\n'.join(value)+'\n'
|
||||||
|
cmd.write_or_delete_file(argname, filename, value, force)
|
||||||
|
|
||||||
|
def write_entries(cmd, basename, filename):
|
||||||
|
ep = cmd.distribution.entry_points
|
||||||
|
|
||||||
|
if isinstance(ep,basestring) or ep is None:
|
||||||
|
data = ep
|
||||||
|
elif ep is not None:
|
||||||
|
data = []
|
||||||
|
for section, contents in ep.items():
|
||||||
|
if not isinstance(contents,basestring):
|
||||||
|
contents = EntryPoint.parse_group(section, contents)
|
||||||
|
contents = '\n'.join(map(str,contents.values()))
|
||||||
|
data.append('[%s]\n%s\n\n' % (section,contents))
|
||||||
|
data = ''.join(data)
|
||||||
|
|
||||||
|
cmd.write_or_delete_file('entry points', filename, data, True)
|
||||||
|
|
||||||
|
def get_pkg_info_revision():
|
||||||
|
# See if we can get a -r### off of PKG-INFO, in case this is an sdist of
|
||||||
|
# a subversion revision
|
||||||
|
#
|
||||||
|
if os.path.exists('PKG-INFO'):
|
||||||
|
f = open('PKG-INFO','rU')
|
||||||
|
for line in f:
|
||||||
|
match = re.match(r"Version:.*-r(\d+)\s*$", line)
|
||||||
|
if match:
|
||||||
|
return int(match.group(1))
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
#
|
123
setuptools-0.6c15dev.egg/setuptools/command/install.py
Normal file
123
setuptools-0.6c15dev.egg/setuptools/command/install.py
Normal file
@ -0,0 +1,123 @@
|
|||||||
|
import setuptools, sys, glob
|
||||||
|
from distutils.command.install import install as _install
|
||||||
|
from distutils.errors import DistutilsArgError
|
||||||
|
|
||||||
|
class install(_install):
|
||||||
|
"""Use easy_install to install the package, w/dependencies"""
|
||||||
|
|
||||||
|
user_options = _install.user_options + [
|
||||||
|
('old-and-unmanageable', None, "Try not to use this!"),
|
||||||
|
('single-version-externally-managed', None,
|
||||||
|
"used by system package builders to create 'flat' eggs"),
|
||||||
|
]
|
||||||
|
boolean_options = _install.boolean_options + [
|
||||||
|
'old-and-unmanageable', 'single-version-externally-managed',
|
||||||
|
]
|
||||||
|
new_commands = [
|
||||||
|
('install_egg_info', lambda self: True),
|
||||||
|
('install_scripts', lambda self: True),
|
||||||
|
]
|
||||||
|
_nc = dict(new_commands)
|
||||||
|
sub_commands = [
|
||||||
|
cmd for cmd in _install.sub_commands if cmd[0] not in _nc
|
||||||
|
] + new_commands
|
||||||
|
|
||||||
|
def initialize_options(self):
|
||||||
|
_install.initialize_options(self)
|
||||||
|
self.old_and_unmanageable = None
|
||||||
|
self.single_version_externally_managed = None
|
||||||
|
self.no_compile = None # make DISTUTILS_DEBUG work right!
|
||||||
|
|
||||||
|
def finalize_options(self):
|
||||||
|
_install.finalize_options(self)
|
||||||
|
if self.root:
|
||||||
|
self.single_version_externally_managed = True
|
||||||
|
elif self.single_version_externally_managed:
|
||||||
|
if not self.root and not self.record:
|
||||||
|
raise DistutilsArgError(
|
||||||
|
"You must specify --record or --root when building system"
|
||||||
|
" packages"
|
||||||
|
)
|
||||||
|
|
||||||
|
def handle_extra_path(self):
|
||||||
|
if self.root or self.single_version_externally_managed:
|
||||||
|
# explicit backward-compatibility mode, allow extra_path to work
|
||||||
|
return _install.handle_extra_path(self)
|
||||||
|
|
||||||
|
# Ignore extra_path when installing an egg (or being run by another
|
||||||
|
# command without --root or --single-version-externally-managed
|
||||||
|
self.path_file = None
|
||||||
|
self.extra_dirs = ''
|
||||||
|
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
# Explicit request for old-style install? Just do it
|
||||||
|
if self.old_and_unmanageable or self.single_version_externally_managed:
|
||||||
|
return _install.run(self)
|
||||||
|
|
||||||
|
# Attempt to detect whether we were called from setup() or by another
|
||||||
|
# command. If we were called by setup(), our caller will be the
|
||||||
|
# 'run_command' method in 'distutils.dist', and *its* caller will be
|
||||||
|
# the 'run_commands' method. If we were called any other way, our
|
||||||
|
# immediate caller *might* be 'run_command', but it won't have been
|
||||||
|
# called by 'run_commands'. This is slightly kludgy, but seems to
|
||||||
|
# work.
|
||||||
|
#
|
||||||
|
caller = sys._getframe(2)
|
||||||
|
caller_module = caller.f_globals.get('__name__','')
|
||||||
|
caller_name = caller.f_code.co_name
|
||||||
|
|
||||||
|
if caller_module != 'distutils.dist' or caller_name!='run_commands':
|
||||||
|
# We weren't called from the command line or setup(), so we
|
||||||
|
# should run in backward-compatibility mode to support bdist_*
|
||||||
|
# commands.
|
||||||
|
_install.run(self)
|
||||||
|
else:
|
||||||
|
self.do_egg_install()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def do_egg_install(self):
|
||||||
|
|
||||||
|
easy_install = self.distribution.get_command_class('easy_install')
|
||||||
|
|
||||||
|
cmd = easy_install(
|
||||||
|
self.distribution, args="x", root=self.root, record=self.record,
|
||||||
|
)
|
||||||
|
cmd.ensure_finalized() # finalize before bdist_egg munges install cmd
|
||||||
|
cmd.always_copy_from = '.' # make sure local-dir eggs get installed
|
||||||
|
|
||||||
|
# pick up setup-dir .egg files only: no .egg-info
|
||||||
|
cmd.package_index.scan(glob.glob('*.egg'))
|
||||||
|
|
||||||
|
self.run_command('bdist_egg')
|
||||||
|
args = [self.distribution.get_command_obj('bdist_egg').egg_output]
|
||||||
|
|
||||||
|
if setuptools.bootstrap_install_from:
|
||||||
|
# Bootstrap self-installation of setuptools
|
||||||
|
args.insert(0, setuptools.bootstrap_install_from)
|
||||||
|
|
||||||
|
cmd.args = args
|
||||||
|
cmd.run()
|
||||||
|
setuptools.bootstrap_install_from = None
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
#
|
123
setuptools-0.6c15dev.egg/setuptools/command/install_egg_info.py
Normal file
123
setuptools-0.6c15dev.egg/setuptools/command/install_egg_info.py
Normal file
@ -0,0 +1,123 @@
|
|||||||
|
from setuptools import Command
|
||||||
|
from setuptools.archive_util import unpack_archive
|
||||||
|
from distutils import log, dir_util
|
||||||
|
import os, shutil, pkg_resources
|
||||||
|
|
||||||
|
class install_egg_info(Command):
|
||||||
|
"""Install an .egg-info directory for the package"""
|
||||||
|
|
||||||
|
description = "Install an .egg-info directory for the package"
|
||||||
|
|
||||||
|
user_options = [
|
||||||
|
('install-dir=', 'd', "directory to install to"),
|
||||||
|
]
|
||||||
|
|
||||||
|
def initialize_options(self):
|
||||||
|
self.install_dir = None
|
||||||
|
|
||||||
|
def finalize_options(self):
|
||||||
|
self.set_undefined_options('install_lib',('install_dir','install_dir'))
|
||||||
|
ei_cmd = self.get_finalized_command("egg_info")
|
||||||
|
basename = pkg_resources.Distribution(
|
||||||
|
None, None, ei_cmd.egg_name, ei_cmd.egg_version
|
||||||
|
).egg_name()+'.egg-info'
|
||||||
|
self.source = ei_cmd.egg_info
|
||||||
|
self.target = os.path.join(self.install_dir, basename)
|
||||||
|
self.outputs = [self.target]
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
self.run_command('egg_info')
|
||||||
|
target = self.target
|
||||||
|
if os.path.isdir(self.target) and not os.path.islink(self.target):
|
||||||
|
dir_util.remove_tree(self.target, dry_run=self.dry_run)
|
||||||
|
elif os.path.exists(self.target):
|
||||||
|
self.execute(os.unlink,(self.target,),"Removing "+self.target)
|
||||||
|
if not self.dry_run:
|
||||||
|
pkg_resources.ensure_directory(self.target)
|
||||||
|
self.execute(self.copytree, (),
|
||||||
|
"Copying %s to %s" % (self.source, self.target)
|
||||||
|
)
|
||||||
|
self.install_namespaces()
|
||||||
|
|
||||||
|
def get_outputs(self):
|
||||||
|
return self.outputs
|
||||||
|
|
||||||
|
def copytree(self):
|
||||||
|
# Copy the .egg-info tree to site-packages
|
||||||
|
def skimmer(src,dst):
|
||||||
|
# filter out source-control directories; note that 'src' is always
|
||||||
|
# a '/'-separated path, regardless of platform. 'dst' is a
|
||||||
|
# platform-specific path.
|
||||||
|
for skip in '.svn/','CVS/':
|
||||||
|
if src.startswith(skip) or '/'+skip in src:
|
||||||
|
return None
|
||||||
|
self.outputs.append(dst)
|
||||||
|
log.debug("Copying %s to %s", src, dst)
|
||||||
|
return dst
|
||||||
|
unpack_archive(self.source, self.target, skimmer)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def install_namespaces(self):
|
||||||
|
nsp = self._get_all_ns_packages()
|
||||||
|
if not nsp: return
|
||||||
|
filename,ext = os.path.splitext(self.target)
|
||||||
|
filename += '-nspkg.pth'; self.outputs.append(filename)
|
||||||
|
log.info("Installing %s",filename)
|
||||||
|
if not self.dry_run:
|
||||||
|
f = open(filename,'wb')
|
||||||
|
for pkg in nsp:
|
||||||
|
pth = tuple(pkg.split('.'))
|
||||||
|
trailer = '\n'
|
||||||
|
if '.' in pkg:
|
||||||
|
trailer = (
|
||||||
|
"; m and setattr(sys.modules[%r], %r, m)\n"
|
||||||
|
% ('.'.join(pth[:-1]), pth[-1])
|
||||||
|
)
|
||||||
|
f.write(
|
||||||
|
"import sys,new,os; "
|
||||||
|
"p = os.path.join(sys._getframe(1).f_locals['sitedir'], "
|
||||||
|
"*%(pth)r); "
|
||||||
|
"ie = os.path.exists(os.path.join(p,'__init__.py')); "
|
||||||
|
"m = not ie and "
|
||||||
|
"sys.modules.setdefault(%(pkg)r,new.module(%(pkg)r)); "
|
||||||
|
"mp = (m or []) and m.__dict__.setdefault('__path__',[]); "
|
||||||
|
"(p not in mp) and mp.append(p)%(trailer)s"
|
||||||
|
% locals()
|
||||||
|
)
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
def _get_all_ns_packages(self):
|
||||||
|
nsp = {}
|
||||||
|
for pkg in self.distribution.namespace_packages or []:
|
||||||
|
pkg = pkg.split('.')
|
||||||
|
while pkg:
|
||||||
|
nsp['.'.join(pkg)] = 1
|
||||||
|
pkg.pop()
|
||||||
|
nsp=list(nsp)
|
||||||
|
nsp.sort() # set up shorter names first
|
||||||
|
return nsp
|
||||||
|
|
||||||
|
|
76
setuptools-0.6c15dev.egg/setuptools/command/install_lib.py
Normal file
76
setuptools-0.6c15dev.egg/setuptools/command/install_lib.py
Normal file
@ -0,0 +1,76 @@
|
|||||||
|
from distutils.command.install_lib import install_lib as _install_lib
|
||||||
|
import os
|
||||||
|
|
||||||
|
class install_lib(_install_lib):
|
||||||
|
"""Don't add compiled flags to filenames of non-Python files"""
|
||||||
|
|
||||||
|
def _bytecode_filenames (self, py_filenames):
|
||||||
|
bytecode_files = []
|
||||||
|
for py_file in py_filenames:
|
||||||
|
if not py_file.endswith('.py'):
|
||||||
|
continue
|
||||||
|
if self.compile:
|
||||||
|
bytecode_files.append(py_file + "c")
|
||||||
|
if self.optimize > 0:
|
||||||
|
bytecode_files.append(py_file + "o")
|
||||||
|
|
||||||
|
return bytecode_files
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
self.build()
|
||||||
|
outfiles = self.install()
|
||||||
|
if outfiles is not None:
|
||||||
|
# always compile, in case we have any extension stubs to deal with
|
||||||
|
self.byte_compile(outfiles)
|
||||||
|
|
||||||
|
def get_exclusions(self):
|
||||||
|
exclude = {}
|
||||||
|
nsp = self.distribution.namespace_packages
|
||||||
|
|
||||||
|
if (nsp and self.get_finalized_command('install')
|
||||||
|
.single_version_externally_managed
|
||||||
|
):
|
||||||
|
for pkg in nsp:
|
||||||
|
parts = pkg.split('.')
|
||||||
|
while parts:
|
||||||
|
pkgdir = os.path.join(self.install_dir, *parts)
|
||||||
|
for f in '__init__.py', '__init__.pyc', '__init__.pyo':
|
||||||
|
exclude[os.path.join(pkgdir,f)] = 1
|
||||||
|
parts.pop()
|
||||||
|
return exclude
|
||||||
|
|
||||||
|
def copy_tree(
|
||||||
|
self, infile, outfile,
|
||||||
|
preserve_mode=1, preserve_times=1, preserve_symlinks=0, level=1
|
||||||
|
):
|
||||||
|
assert preserve_mode and preserve_times and not preserve_symlinks
|
||||||
|
exclude = self.get_exclusions()
|
||||||
|
|
||||||
|
if not exclude:
|
||||||
|
return _install_lib.copy_tree(self, infile, outfile)
|
||||||
|
|
||||||
|
# Exclude namespace package __init__.py* files from the output
|
||||||
|
|
||||||
|
from setuptools.archive_util import unpack_directory
|
||||||
|
from distutils import log
|
||||||
|
|
||||||
|
outfiles = []
|
||||||
|
|
||||||
|
def pf(src, dst):
|
||||||
|
if dst in exclude:
|
||||||
|
log.warn("Skipping installation of %s (namespace package)",dst)
|
||||||
|
return False
|
||||||
|
|
||||||
|
log.info("copying %s -> %s", src, os.path.dirname(dst))
|
||||||
|
outfiles.append(dst)
|
||||||
|
return dst
|
||||||
|
|
||||||
|
unpack_directory(infile, outfile, pf)
|
||||||
|
return outfiles
|
||||||
|
|
||||||
|
def get_outputs(self):
|
||||||
|
outputs = _install_lib.get_outputs(self)
|
||||||
|
exclude = self.get_exclusions()
|
||||||
|
if exclude:
|
||||||
|
return [f for f in outputs if f not in exclude]
|
||||||
|
return outputs
|
@ -0,0 +1,82 @@
|
|||||||
|
from distutils.command.install_scripts import install_scripts \
|
||||||
|
as _install_scripts
|
||||||
|
from easy_install import get_script_args, sys_executable, chmod
|
||||||
|
from pkg_resources import Distribution, PathMetadata, ensure_directory
|
||||||
|
import os
|
||||||
|
from distutils import log
|
||||||
|
|
||||||
|
class install_scripts(_install_scripts):
|
||||||
|
"""Do normal script install, plus any egg_info wrapper scripts"""
|
||||||
|
|
||||||
|
def initialize_options(self):
|
||||||
|
_install_scripts.initialize_options(self)
|
||||||
|
self.no_ep = False
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
self.run_command("egg_info")
|
||||||
|
if self.distribution.scripts:
|
||||||
|
_install_scripts.run(self) # run first to set up self.outfiles
|
||||||
|
else:
|
||||||
|
self.outfiles = []
|
||||||
|
if self.no_ep:
|
||||||
|
# don't install entry point scripts into .egg file!
|
||||||
|
return
|
||||||
|
|
||||||
|
ei_cmd = self.get_finalized_command("egg_info")
|
||||||
|
dist = Distribution(
|
||||||
|
ei_cmd.egg_base, PathMetadata(ei_cmd.egg_base, ei_cmd.egg_info),
|
||||||
|
ei_cmd.egg_name, ei_cmd.egg_version,
|
||||||
|
)
|
||||||
|
bs_cmd = self.get_finalized_command('build_scripts')
|
||||||
|
executable = getattr(bs_cmd,'executable',sys_executable)
|
||||||
|
is_wininst = getattr(
|
||||||
|
self.get_finalized_command("bdist_wininst"), '_is_running', False
|
||||||
|
)
|
||||||
|
for args in get_script_args(dist, executable, is_wininst):
|
||||||
|
self.write_script(*args)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def write_script(self, script_name, contents, mode="t", *ignored):
|
||||||
|
"""Write an executable file to the scripts directory"""
|
||||||
|
log.info("Installing %s script to %s", script_name, self.install_dir)
|
||||||
|
target = os.path.join(self.install_dir, script_name)
|
||||||
|
self.outfiles.append(target)
|
||||||
|
|
||||||
|
if not self.dry_run:
|
||||||
|
ensure_directory(target)
|
||||||
|
f = open(target,"w"+mode)
|
||||||
|
f.write(contents)
|
||||||
|
f.close()
|
||||||
|
chmod(target,0755)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
10
setuptools-0.6c15dev.egg/setuptools/command/register.py
Normal file
10
setuptools-0.6c15dev.egg/setuptools/command/register.py
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
from distutils.command.register import register as _register
|
||||||
|
|
||||||
|
class register(_register):
|
||||||
|
__doc__ = _register.__doc__
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
# Make sure that we are using valid current name/version info
|
||||||
|
self.run_command('egg_info')
|
||||||
|
_register.run(self)
|
||||||
|
|
57
setuptools-0.6c15dev.egg/setuptools/command/rotate.py
Normal file
57
setuptools-0.6c15dev.egg/setuptools/command/rotate.py
Normal file
@ -0,0 +1,57 @@
|
|||||||
|
import distutils, os
|
||||||
|
from setuptools import Command
|
||||||
|
from distutils.util import convert_path
|
||||||
|
from distutils import log
|
||||||
|
from distutils.errors import *
|
||||||
|
|
||||||
|
class rotate(Command):
|
||||||
|
"""Delete older distributions"""
|
||||||
|
|
||||||
|
description = "delete older distributions, keeping N newest files"
|
||||||
|
user_options = [
|
||||||
|
('match=', 'm', "patterns to match (required)"),
|
||||||
|
('dist-dir=', 'd', "directory where the distributions are"),
|
||||||
|
('keep=', 'k', "number of matching distributions to keep"),
|
||||||
|
]
|
||||||
|
|
||||||
|
boolean_options = []
|
||||||
|
|
||||||
|
def initialize_options(self):
|
||||||
|
self.match = None
|
||||||
|
self.dist_dir = None
|
||||||
|
self.keep = None
|
||||||
|
|
||||||
|
def finalize_options(self):
|
||||||
|
if self.match is None:
|
||||||
|
raise DistutilsOptionError(
|
||||||
|
"Must specify one or more (comma-separated) match patterns "
|
||||||
|
"(e.g. '.zip' or '.egg')"
|
||||||
|
)
|
||||||
|
if self.keep is None:
|
||||||
|
raise DistutilsOptionError("Must specify number of files to keep")
|
||||||
|
try:
|
||||||
|
self.keep = int(self.keep)
|
||||||
|
except ValueError:
|
||||||
|
raise DistutilsOptionError("--keep must be an integer")
|
||||||
|
if isinstance(self.match, basestring):
|
||||||
|
self.match = [
|
||||||
|
convert_path(p.strip()) for p in self.match.split(',')
|
||||||
|
]
|
||||||
|
self.set_undefined_options('bdist',('dist_dir', 'dist_dir'))
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
self.run_command("egg_info")
|
||||||
|
from glob import glob
|
||||||
|
for pattern in self.match:
|
||||||
|
pattern = self.distribution.get_name()+'*'+pattern
|
||||||
|
files = glob(os.path.join(self.dist_dir,pattern))
|
||||||
|
files = [(os.path.getmtime(f),f) for f in files]
|
||||||
|
files.sort()
|
||||||
|
files.reverse()
|
||||||
|
|
||||||
|
log.info("%d file(s) matching %s", len(files), pattern)
|
||||||
|
files = files[self.keep:]
|
||||||
|
for (t,f) in files:
|
||||||
|
log.info("Deleting %s", f)
|
||||||
|
if not self.dry_run:
|
||||||
|
os.unlink(f)
|
24
setuptools-0.6c15dev.egg/setuptools/command/saveopts.py
Normal file
24
setuptools-0.6c15dev.egg/setuptools/command/saveopts.py
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
import distutils, os
|
||||||
|
from setuptools import Command
|
||||||
|
from setuptools.command.setopt import edit_config, option_base
|
||||||
|
|
||||||
|
class saveopts(option_base):
|
||||||
|
"""Save command-line options to a file"""
|
||||||
|
|
||||||
|
description = "save supplied options to setup.cfg or other config file"
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
dist = self.distribution
|
||||||
|
commands = dist.command_options.keys()
|
||||||
|
settings = {}
|
||||||
|
|
||||||
|
for cmd in commands:
|
||||||
|
|
||||||
|
if cmd=='saveopts':
|
||||||
|
continue # don't save our own options!
|
||||||
|
|
||||||
|
for opt,(src,val) in dist.get_option_dict(cmd).items():
|
||||||
|
if src=="command line":
|
||||||
|
settings.setdefault(cmd,{})[opt] = val
|
||||||
|
|
||||||
|
edit_config(self.filename, settings, self.dry_run)
|
246
setuptools-0.6c15dev.egg/setuptools/command/sdist.py
Normal file
246
setuptools-0.6c15dev.egg/setuptools/command/sdist.py
Normal file
@ -0,0 +1,246 @@
|
|||||||
|
from distutils.command.sdist import sdist as _sdist
|
||||||
|
from distutils.util import convert_path
|
||||||
|
from distutils import log
|
||||||
|
from glob import glob
|
||||||
|
import os, re, sys, pkg_resources
|
||||||
|
|
||||||
|
entities = [
|
||||||
|
("<","<"), (">", ">"), (""", '"'), ("'", "'"),
|
||||||
|
("&", "&")
|
||||||
|
]
|
||||||
|
|
||||||
|
def unescape(data):
|
||||||
|
for old,new in entities:
|
||||||
|
data = data.replace(old,new)
|
||||||
|
return data
|
||||||
|
|
||||||
|
def re_finder(pattern, postproc=None):
|
||||||
|
def find(dirname, filename):
|
||||||
|
f = open(filename,'rU')
|
||||||
|
data = f.read()
|
||||||
|
f.close()
|
||||||
|
for match in pattern.finditer(data):
|
||||||
|
path = match.group(1)
|
||||||
|
if postproc:
|
||||||
|
path = postproc(path)
|
||||||
|
yield joinpath(dirname,path)
|
||||||
|
return find
|
||||||
|
|
||||||
|
def joinpath(prefix,suffix):
|
||||||
|
if not prefix:
|
||||||
|
return suffix
|
||||||
|
return os.path.join(prefix,suffix)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def walk_revctrl(dirname=''):
|
||||||
|
"""Find all files under revision control"""
|
||||||
|
for ep in pkg_resources.iter_entry_points('setuptools.file_finders'):
|
||||||
|
for item in ep.load()(dirname):
|
||||||
|
yield item
|
||||||
|
|
||||||
|
def _default_revctrl(dirname=''):
|
||||||
|
for path, finder in finders:
|
||||||
|
path = joinpath(dirname,path)
|
||||||
|
if os.path.isfile(path):
|
||||||
|
for path in finder(dirname,path):
|
||||||
|
if os.path.isfile(path):
|
||||||
|
yield path
|
||||||
|
elif os.path.isdir(path):
|
||||||
|
for item in _default_revctrl(path):
|
||||||
|
yield item
|
||||||
|
|
||||||
|
def externals_finder(dirname, filename):
|
||||||
|
"""Find any 'svn:externals' directories"""
|
||||||
|
found = False
|
||||||
|
f = open(filename,'rb')
|
||||||
|
for line in iter(f.readline, ''): # can't use direct iter!
|
||||||
|
parts = line.split()
|
||||||
|
if len(parts)==2:
|
||||||
|
kind,length = parts
|
||||||
|
data = f.read(int(length))
|
||||||
|
if kind=='K' and data=='svn:externals':
|
||||||
|
found = True
|
||||||
|
elif kind=='V' and found:
|
||||||
|
f.close()
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
f.close()
|
||||||
|
return
|
||||||
|
|
||||||
|
for line in data.splitlines():
|
||||||
|
parts = line.split()
|
||||||
|
if parts:
|
||||||
|
yield joinpath(dirname, parts[0])
|
||||||
|
|
||||||
|
|
||||||
|
entries_pattern = re.compile(r'name="([^"]+)"(?![^>]+deleted="true")', re.I)
|
||||||
|
|
||||||
|
def entries_finder(dirname, filename):
|
||||||
|
f = open(filename,'rU')
|
||||||
|
data = f.read()
|
||||||
|
f.close()
|
||||||
|
if data.startswith('<?xml'):
|
||||||
|
for match in entries_pattern.finditer(data):
|
||||||
|
yield joinpath(dirname,unescape(match.group(1)))
|
||||||
|
else:
|
||||||
|
svnver=-1
|
||||||
|
try: svnver = int(data.splitlines()[0])
|
||||||
|
except: pass
|
||||||
|
if svnver<8:
|
||||||
|
log.warn("unrecognized .svn/entries format in %s", dirname)
|
||||||
|
return
|
||||||
|
for record in map(str.splitlines, data.split('\n\x0c\n')[1:]):
|
||||||
|
if not record or len(record)>=6 and record[5]=="delete":
|
||||||
|
continue # skip deleted
|
||||||
|
yield joinpath(dirname, record[0])
|
||||||
|
|
||||||
|
|
||||||
|
finders = [
|
||||||
|
(convert_path('CVS/Entries'),
|
||||||
|
re_finder(re.compile(r"^\w?/([^/]+)/", re.M))),
|
||||||
|
(convert_path('.svn/entries'), entries_finder),
|
||||||
|
(convert_path('.svn/dir-props'), externals_finder),
|
||||||
|
(convert_path('.svn/dir-prop-base'), externals_finder), # svn 1.4
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class sdist(_sdist):
|
||||||
|
"""Smart sdist that finds anything supported by revision control"""
|
||||||
|
|
||||||
|
user_options = [
|
||||||
|
('formats=', None,
|
||||||
|
"formats for source distribution (comma-separated list)"),
|
||||||
|
('keep-temp', 'k',
|
||||||
|
"keep the distribution tree around after creating " +
|
||||||
|
"archive file(s)"),
|
||||||
|
('dist-dir=', 'd',
|
||||||
|
"directory to put the source distribution archive(s) in "
|
||||||
|
"[default: dist]"),
|
||||||
|
]
|
||||||
|
|
||||||
|
negative_opt = {}
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
self.run_command('egg_info')
|
||||||
|
ei_cmd = self.get_finalized_command('egg_info')
|
||||||
|
self.filelist = ei_cmd.filelist
|
||||||
|
self.filelist.append(os.path.join(ei_cmd.egg_info,'SOURCES.txt'))
|
||||||
|
self.check_readme()
|
||||||
|
self.check_metadata()
|
||||||
|
self.make_distribution()
|
||||||
|
|
||||||
|
dist_files = getattr(self.distribution,'dist_files',[])
|
||||||
|
for file in self.archive_files:
|
||||||
|
data = ('sdist', '', file)
|
||||||
|
if data not in dist_files:
|
||||||
|
dist_files.append(data)
|
||||||
|
|
||||||
|
def read_template(self):
|
||||||
|
try:
|
||||||
|
_sdist.read_template(self)
|
||||||
|
except:
|
||||||
|
# grody hack to close the template file (MANIFEST.in)
|
||||||
|
# this prevents easy_install's attempt at deleting the file from
|
||||||
|
# dying and thus masking the real error
|
||||||
|
sys.exc_info()[2].tb_next.tb_frame.f_locals['template'].close()
|
||||||
|
raise
|
||||||
|
|
||||||
|
# Cribbed from old distutils code, to work around new distutils code
|
||||||
|
# that tries to do some of the same stuff as we do, in a way that makes
|
||||||
|
# us loop.
|
||||||
|
|
||||||
|
def add_defaults (self):
|
||||||
|
standards = [('README', 'README.txt'), self.distribution.script_name]
|
||||||
|
|
||||||
|
for fn in standards:
|
||||||
|
if type(fn) is tuple:
|
||||||
|
alts = fn
|
||||||
|
got_it = 0
|
||||||
|
for fn in alts:
|
||||||
|
if os.path.exists(fn):
|
||||||
|
got_it = 1
|
||||||
|
self.filelist.append(fn)
|
||||||
|
break
|
||||||
|
|
||||||
|
if not got_it:
|
||||||
|
self.warn("standard file not found: should have one of " +
|
||||||
|
', '.join(alts))
|
||||||
|
else:
|
||||||
|
if os.path.exists(fn):
|
||||||
|
self.filelist.append(fn)
|
||||||
|
else:
|
||||||
|
self.warn("standard file '%s' not found" % fn)
|
||||||
|
|
||||||
|
optional = ['test/test*.py', 'setup.cfg']
|
||||||
|
|
||||||
|
for pattern in optional:
|
||||||
|
files = filter(os.path.isfile, glob(pattern))
|
||||||
|
if files:
|
||||||
|
self.filelist.extend(files)
|
||||||
|
|
||||||
|
if self.distribution.has_pure_modules():
|
||||||
|
build_py = self.get_finalized_command('build_py')
|
||||||
|
self.filelist.extend(build_py.get_source_files())
|
||||||
|
|
||||||
|
if self.distribution.has_ext_modules():
|
||||||
|
build_ext = self.get_finalized_command('build_ext')
|
||||||
|
self.filelist.extend(build_ext.get_source_files())
|
||||||
|
|
||||||
|
if self.distribution.has_c_libraries():
|
||||||
|
build_clib = self.get_finalized_command('build_clib')
|
||||||
|
self.filelist.extend(build_clib.get_source_files())
|
||||||
|
|
||||||
|
if self.distribution.has_scripts():
|
||||||
|
build_scripts = self.get_finalized_command('build_scripts')
|
||||||
|
self.filelist.extend(build_scripts.get_source_files())
|
||||||
|
|
||||||
|
|
||||||
|
def check_readme(self):
|
||||||
|
alts = ("README", "README.txt")
|
||||||
|
for f in alts:
|
||||||
|
if os.path.exists(f):
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
self.warn(
|
||||||
|
"standard file not found: should have one of " +', '.join(alts)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def make_release_tree(self, base_dir, files):
|
||||||
|
_sdist.make_release_tree(self, base_dir, files)
|
||||||
|
|
||||||
|
# Save any egg_info command line options used to create this sdist
|
||||||
|
dest = os.path.join(base_dir, 'setup.cfg')
|
||||||
|
if hasattr(os,'link') and os.path.exists(dest):
|
||||||
|
# unlink and re-copy, since it might be hard-linked, and
|
||||||
|
# we don't want to change the source version
|
||||||
|
os.unlink(dest)
|
||||||
|
self.copy_file('setup.cfg', dest)
|
||||||
|
|
||||||
|
self.get_finalized_command('egg_info').save_version_info(dest)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
#
|
158
setuptools-0.6c15dev.egg/setuptools/command/setopt.py
Normal file
158
setuptools-0.6c15dev.egg/setuptools/command/setopt.py
Normal file
@ -0,0 +1,158 @@
|
|||||||
|
import distutils, os
|
||||||
|
from setuptools import Command
|
||||||
|
from distutils.util import convert_path
|
||||||
|
from distutils import log
|
||||||
|
from distutils.errors import *
|
||||||
|
|
||||||
|
__all__ = ['config_file', 'edit_config', 'option_base', 'setopt']
|
||||||
|
|
||||||
|
|
||||||
|
def config_file(kind="local"):
|
||||||
|
"""Get the filename of the distutils, local, global, or per-user config
|
||||||
|
|
||||||
|
`kind` must be one of "local", "global", or "user"
|
||||||
|
"""
|
||||||
|
if kind=='local':
|
||||||
|
return 'setup.cfg'
|
||||||
|
if kind=='global':
|
||||||
|
return os.path.join(
|
||||||
|
os.path.dirname(distutils.__file__),'distutils.cfg'
|
||||||
|
)
|
||||||
|
if kind=='user':
|
||||||
|
dot = os.name=='posix' and '.' or ''
|
||||||
|
return os.path.expanduser(convert_path("~/%spydistutils.cfg" % dot))
|
||||||
|
raise ValueError(
|
||||||
|
"config_file() type must be 'local', 'global', or 'user'", kind
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def edit_config(filename, settings, dry_run=False):
|
||||||
|
"""Edit a configuration file to include `settings`
|
||||||
|
|
||||||
|
`settings` is a dictionary of dictionaries or ``None`` values, keyed by
|
||||||
|
command/section name. A ``None`` value means to delete the entire section,
|
||||||
|
while a dictionary lists settings to be changed or deleted in that section.
|
||||||
|
A setting of ``None`` means to delete that setting.
|
||||||
|
"""
|
||||||
|
from ConfigParser import RawConfigParser
|
||||||
|
log.debug("Reading configuration from %s", filename)
|
||||||
|
opts = RawConfigParser()
|
||||||
|
opts.read([filename])
|
||||||
|
for section, options in settings.items():
|
||||||
|
if options is None:
|
||||||
|
log.info("Deleting section [%s] from %s", section, filename)
|
||||||
|
opts.remove_section(section)
|
||||||
|
else:
|
||||||
|
if not opts.has_section(section):
|
||||||
|
log.debug("Adding new section [%s] to %s", section, filename)
|
||||||
|
opts.add_section(section)
|
||||||
|
for option,value in options.items():
|
||||||
|
if value is None:
|
||||||
|
log.debug("Deleting %s.%s from %s",
|
||||||
|
section, option, filename
|
||||||
|
)
|
||||||
|
opts.remove_option(section,option)
|
||||||
|
if not opts.options(section):
|
||||||
|
log.info("Deleting empty [%s] section from %s",
|
||||||
|
section, filename)
|
||||||
|
opts.remove_section(section)
|
||||||
|
else:
|
||||||
|
log.debug(
|
||||||
|
"Setting %s.%s to %r in %s",
|
||||||
|
section, option, value, filename
|
||||||
|
)
|
||||||
|
opts.set(section,option,value)
|
||||||
|
|
||||||
|
log.info("Writing %s", filename)
|
||||||
|
if not dry_run:
|
||||||
|
f = open(filename,'w'); opts.write(f); f.close()
|
||||||
|
|
||||||
|
class option_base(Command):
|
||||||
|
"""Abstract base class for commands that mess with config files"""
|
||||||
|
|
||||||
|
user_options = [
|
||||||
|
('global-config', 'g',
|
||||||
|
"save options to the site-wide distutils.cfg file"),
|
||||||
|
('user-config', 'u',
|
||||||
|
"save options to the current user's pydistutils.cfg file"),
|
||||||
|
('filename=', 'f',
|
||||||
|
"configuration file to use (default=setup.cfg)"),
|
||||||
|
]
|
||||||
|
|
||||||
|
boolean_options = [
|
||||||
|
'global-config', 'user-config',
|
||||||
|
]
|
||||||
|
|
||||||
|
def initialize_options(self):
|
||||||
|
self.global_config = None
|
||||||
|
self.user_config = None
|
||||||
|
self.filename = None
|
||||||
|
|
||||||
|
def finalize_options(self):
|
||||||
|
filenames = []
|
||||||
|
if self.global_config:
|
||||||
|
filenames.append(config_file('global'))
|
||||||
|
if self.user_config:
|
||||||
|
filenames.append(config_file('user'))
|
||||||
|
if self.filename is not None:
|
||||||
|
filenames.append(self.filename)
|
||||||
|
if not filenames:
|
||||||
|
filenames.append(config_file('local'))
|
||||||
|
if len(filenames)>1:
|
||||||
|
raise DistutilsOptionError(
|
||||||
|
"Must specify only one configuration file option",
|
||||||
|
filenames
|
||||||
|
)
|
||||||
|
self.filename, = filenames
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class setopt(option_base):
|
||||||
|
"""Save command-line options to a file"""
|
||||||
|
|
||||||
|
description = "set an option in setup.cfg or another config file"
|
||||||
|
|
||||||
|
user_options = [
|
||||||
|
('command=', 'c', 'command to set an option for'),
|
||||||
|
('option=', 'o', 'option to set'),
|
||||||
|
('set-value=', 's', 'value of the option'),
|
||||||
|
('remove', 'r', 'remove (unset) the value'),
|
||||||
|
] + option_base.user_options
|
||||||
|
|
||||||
|
boolean_options = option_base.boolean_options + ['remove']
|
||||||
|
|
||||||
|
def initialize_options(self):
|
||||||
|
option_base.initialize_options(self)
|
||||||
|
self.command = None
|
||||||
|
self.option = None
|
||||||
|
self.set_value = None
|
||||||
|
self.remove = None
|
||||||
|
|
||||||
|
def finalize_options(self):
|
||||||
|
option_base.finalize_options(self)
|
||||||
|
if self.command is None or self.option is None:
|
||||||
|
raise DistutilsOptionError("Must specify --command *and* --option")
|
||||||
|
if self.set_value is None and not self.remove:
|
||||||
|
raise DistutilsOptionError("Must specify --set-value or --remove")
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
edit_config(
|
||||||
|
self.filename, {
|
||||||
|
self.command: {self.option.replace('-','_'):self.set_value}
|
||||||
|
},
|
||||||
|
self.dry_run
|
||||||
|
)
|
164
setuptools-0.6c15dev.egg/setuptools/command/test.py
Normal file
164
setuptools-0.6c15dev.egg/setuptools/command/test.py
Normal file
@ -0,0 +1,164 @@
|
|||||||
|
from setuptools import Command
|
||||||
|
from distutils.errors import DistutilsOptionError
|
||||||
|
import sys
|
||||||
|
from pkg_resources import *
|
||||||
|
from unittest import TestLoader, main
|
||||||
|
|
||||||
|
class ScanningLoader(TestLoader):
|
||||||
|
|
||||||
|
def loadTestsFromModule(self, module):
|
||||||
|
"""Return a suite of all tests cases contained in the given module
|
||||||
|
|
||||||
|
If the module is a package, load tests from all the modules in it.
|
||||||
|
If the module has an ``additional_tests`` function, call it and add
|
||||||
|
the return value to the tests.
|
||||||
|
"""
|
||||||
|
tests = []
|
||||||
|
if module.__name__!='setuptools.tests.doctest': # ugh
|
||||||
|
tests.append(TestLoader.loadTestsFromModule(self,module))
|
||||||
|
|
||||||
|
if hasattr(module, "additional_tests"):
|
||||||
|
tests.append(module.additional_tests())
|
||||||
|
|
||||||
|
if hasattr(module, '__path__'):
|
||||||
|
for file in resource_listdir(module.__name__, ''):
|
||||||
|
if file.endswith('.py') and file!='__init__.py':
|
||||||
|
submodule = module.__name__+'.'+file[:-3]
|
||||||
|
else:
|
||||||
|
if resource_exists(
|
||||||
|
module.__name__, file+'/__init__.py'
|
||||||
|
):
|
||||||
|
submodule = module.__name__+'.'+file
|
||||||
|
else:
|
||||||
|
continue
|
||||||
|
tests.append(self.loadTestsFromName(submodule))
|
||||||
|
|
||||||
|
if len(tests)!=1:
|
||||||
|
return self.suiteClass(tests)
|
||||||
|
else:
|
||||||
|
return tests[0] # don't create a nested suite for only one return
|
||||||
|
|
||||||
|
|
||||||
|
class test(Command):
|
||||||
|
"""Command to run unit tests after in-place build"""
|
||||||
|
|
||||||
|
description = "run unit tests after in-place build"
|
||||||
|
|
||||||
|
user_options = [
|
||||||
|
('test-module=','m', "Run 'test_suite' in specified module"),
|
||||||
|
('test-suite=','s',
|
||||||
|
"Test suite to run (e.g. 'some_module.test_suite')"),
|
||||||
|
('test-runner=','r', "Test runner to use"),
|
||||||
|
]
|
||||||
|
|
||||||
|
def initialize_options(self):
|
||||||
|
self.test_runner = None
|
||||||
|
self.test_suite = None
|
||||||
|
self.test_module = None
|
||||||
|
self.test_loader = None
|
||||||
|
|
||||||
|
def finalize_options(self):
|
||||||
|
if self.test_suite is None:
|
||||||
|
if self.test_module is None:
|
||||||
|
self.test_suite = self.distribution.test_suite
|
||||||
|
else:
|
||||||
|
self.test_suite = self.test_module+".test_suite"
|
||||||
|
elif self.test_module:
|
||||||
|
raise DistutilsOptionError(
|
||||||
|
"You may specify a module or a suite, but not both"
|
||||||
|
)
|
||||||
|
|
||||||
|
self.test_args = [self.test_suite]
|
||||||
|
|
||||||
|
if self.verbose:
|
||||||
|
self.test_args.insert(0,'--verbose')
|
||||||
|
if self.test_loader is None:
|
||||||
|
self.test_loader = getattr(self.distribution,'test_loader',None)
|
||||||
|
if self.test_loader is None:
|
||||||
|
self.test_loader = "setuptools.command.test:ScanningLoader"
|
||||||
|
if self.test_runner is None:
|
||||||
|
self.test_runner = getattr(self.distribution,'test_runner',None)
|
||||||
|
|
||||||
|
|
||||||
|
def with_project_on_sys_path(self, func):
|
||||||
|
# Ensure metadata is up-to-date
|
||||||
|
self.run_command('egg_info')
|
||||||
|
|
||||||
|
# Build extensions in-place
|
||||||
|
self.reinitialize_command('build_ext', inplace=1)
|
||||||
|
self.run_command('build_ext')
|
||||||
|
|
||||||
|
ei_cmd = self.get_finalized_command("egg_info")
|
||||||
|
|
||||||
|
old_path = sys.path[:]
|
||||||
|
old_modules = sys.modules.copy()
|
||||||
|
|
||||||
|
try:
|
||||||
|
sys.path.insert(0, normalize_path(ei_cmd.egg_base))
|
||||||
|
working_set.__init__()
|
||||||
|
add_activation_listener(lambda dist: dist.activate())
|
||||||
|
require('%s==%s' % (ei_cmd.egg_name, ei_cmd.egg_version))
|
||||||
|
func()
|
||||||
|
finally:
|
||||||
|
sys.path[:] = old_path
|
||||||
|
sys.modules.clear()
|
||||||
|
sys.modules.update(old_modules)
|
||||||
|
working_set.__init__()
|
||||||
|
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
if self.distribution.install_requires:
|
||||||
|
self.distribution.fetch_build_eggs(self.distribution.install_requires)
|
||||||
|
if self.distribution.tests_require:
|
||||||
|
self.distribution.fetch_build_eggs(self.distribution.tests_require)
|
||||||
|
|
||||||
|
if self.test_suite:
|
||||||
|
cmd = ' '.join(self.test_args)
|
||||||
|
if self.dry_run:
|
||||||
|
self.announce('skipping "unittest %s" (dry run)' % cmd)
|
||||||
|
else:
|
||||||
|
self.announce('running "unittest %s"' % cmd)
|
||||||
|
self.with_project_on_sys_path(self.run_tests)
|
||||||
|
|
||||||
|
|
||||||
|
def run_tests(self):
|
||||||
|
import unittest
|
||||||
|
loader_ep = EntryPoint.parse("x="+self.test_loader)
|
||||||
|
loader_class = loader_ep.load(require=False)
|
||||||
|
kw = {}
|
||||||
|
if self.test_runner is not None:
|
||||||
|
runner_ep = EntryPoint.parse("x="+self.test_runner)
|
||||||
|
runner_class = runner_ep.load(require=False)
|
||||||
|
kw['testRunner'] = runner_class()
|
||||||
|
unittest.main(
|
||||||
|
None, None, [unittest.__file__]+self.test_args,
|
||||||
|
testLoader = loader_class(), **kw
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
181
setuptools-0.6c15dev.egg/setuptools/command/upload.py
Normal file
181
setuptools-0.6c15dev.egg/setuptools/command/upload.py
Normal file
@ -0,0 +1,181 @@
|
|||||||
|
"""distutils.command.upload
|
||||||
|
|
||||||
|
Implements the Distutils 'upload' subcommand (upload package to PyPI)."""
|
||||||
|
|
||||||
|
from distutils.errors import *
|
||||||
|
from distutils.core import Command
|
||||||
|
from distutils.spawn import spawn
|
||||||
|
from distutils import log
|
||||||
|
try:
|
||||||
|
from hashlib import md5
|
||||||
|
except ImportError:
|
||||||
|
from md5 import md5
|
||||||
|
import os
|
||||||
|
import socket
|
||||||
|
import platform
|
||||||
|
import ConfigParser
|
||||||
|
import httplib
|
||||||
|
import base64
|
||||||
|
import urlparse
|
||||||
|
import cStringIO as StringIO
|
||||||
|
|
||||||
|
class upload(Command):
|
||||||
|
|
||||||
|
description = "upload binary package to PyPI"
|
||||||
|
|
||||||
|
DEFAULT_REPOSITORY = 'http://pypi.python.org/pypi'
|
||||||
|
|
||||||
|
user_options = [
|
||||||
|
('repository=', 'r',
|
||||||
|
"url of repository [default: %s]" % DEFAULT_REPOSITORY),
|
||||||
|
('show-response', None,
|
||||||
|
'display full response text from server'),
|
||||||
|
('sign', 's',
|
||||||
|
'sign files to upload using gpg'),
|
||||||
|
('identity=', 'i', 'GPG identity used to sign files'),
|
||||||
|
]
|
||||||
|
boolean_options = ['show-response', 'sign']
|
||||||
|
|
||||||
|
def initialize_options(self):
|
||||||
|
self.username = ''
|
||||||
|
self.password = ''
|
||||||
|
self.repository = ''
|
||||||
|
self.show_response = 0
|
||||||
|
self.sign = False
|
||||||
|
self.identity = None
|
||||||
|
|
||||||
|
def finalize_options(self):
|
||||||
|
if self.identity and not self.sign:
|
||||||
|
raise DistutilsOptionError(
|
||||||
|
"Must use --sign for --identity to have meaning"
|
||||||
|
)
|
||||||
|
if os.environ.has_key('HOME'):
|
||||||
|
rc = os.path.join(os.environ['HOME'], '.pypirc')
|
||||||
|
if os.path.exists(rc):
|
||||||
|
self.announce('Using PyPI login from %s' % rc)
|
||||||
|
config = ConfigParser.ConfigParser({
|
||||||
|
'username':'',
|
||||||
|
'password':'',
|
||||||
|
'repository':''})
|
||||||
|
config.read(rc)
|
||||||
|
if not self.repository:
|
||||||
|
self.repository = config.get('server-login', 'repository')
|
||||||
|
if not self.username:
|
||||||
|
self.username = config.get('server-login', 'username')
|
||||||
|
if not self.password:
|
||||||
|
self.password = config.get('server-login', 'password')
|
||||||
|
if not self.repository:
|
||||||
|
self.repository = self.DEFAULT_REPOSITORY
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
if not self.distribution.dist_files:
|
||||||
|
raise DistutilsOptionError("No dist file created in earlier command")
|
||||||
|
for command, pyversion, filename in self.distribution.dist_files:
|
||||||
|
self.upload_file(command, pyversion, filename)
|
||||||
|
|
||||||
|
def upload_file(self, command, pyversion, filename):
|
||||||
|
# Sign if requested
|
||||||
|
if self.sign:
|
||||||
|
gpg_args = ["gpg", "--detach-sign", "-a", filename]
|
||||||
|
if self.identity:
|
||||||
|
gpg_args[2:2] = ["--local-user", self.identity]
|
||||||
|
spawn(gpg_args,
|
||||||
|
dry_run=self.dry_run)
|
||||||
|
|
||||||
|
# Fill in the data
|
||||||
|
content = open(filename,'rb').read()
|
||||||
|
basename = os.path.basename(filename)
|
||||||
|
comment = ''
|
||||||
|
if command=='bdist_egg' and self.distribution.has_ext_modules():
|
||||||
|
comment = "built on %s" % platform.platform(terse=1)
|
||||||
|
data = {
|
||||||
|
':action':'file_upload',
|
||||||
|
'protcol_version':'1',
|
||||||
|
'name':self.distribution.get_name(),
|
||||||
|
'version':self.distribution.get_version(),
|
||||||
|
'content':(basename,content),
|
||||||
|
'filetype':command,
|
||||||
|
'pyversion':pyversion,
|
||||||
|
'md5_digest':md5(content).hexdigest(),
|
||||||
|
}
|
||||||
|
if command == 'bdist_rpm':
|
||||||
|
dist, version, id = platform.dist()
|
||||||
|
if dist:
|
||||||
|
comment = 'built for %s %s' % (dist, version)
|
||||||
|
elif command == 'bdist_dumb':
|
||||||
|
comment = 'built for %s' % platform.platform(terse=1)
|
||||||
|
data['comment'] = comment
|
||||||
|
|
||||||
|
if self.sign:
|
||||||
|
data['gpg_signature'] = (os.path.basename(filename) + ".asc",
|
||||||
|
open(filename+".asc").read())
|
||||||
|
|
||||||
|
# set up the authentication
|
||||||
|
auth = "Basic " + base64.encodestring(self.username + ":" + self.password).strip()
|
||||||
|
|
||||||
|
# Build up the MIME payload for the POST data
|
||||||
|
boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
|
||||||
|
sep_boundary = '\n--' + boundary
|
||||||
|
end_boundary = sep_boundary + '--'
|
||||||
|
body = StringIO.StringIO()
|
||||||
|
for key, value in data.items():
|
||||||
|
# handle multiple entries for the same name
|
||||||
|
if type(value) != type([]):
|
||||||
|
value = [value]
|
||||||
|
for value in value:
|
||||||
|
if type(value) is tuple:
|
||||||
|
fn = ';filename="%s"' % value[0]
|
||||||
|
value = value[1]
|
||||||
|
else:
|
||||||
|
fn = ""
|
||||||
|
value = str(value)
|
||||||
|
body.write(sep_boundary)
|
||||||
|
body.write('\nContent-Disposition: form-data; name="%s"'%key)
|
||||||
|
body.write(fn)
|
||||||
|
body.write("\n\n")
|
||||||
|
body.write(value)
|
||||||
|
if value and value[-1] == '\r':
|
||||||
|
body.write('\n') # write an extra newline (lurve Macs)
|
||||||
|
body.write(end_boundary)
|
||||||
|
body.write("\n")
|
||||||
|
body = body.getvalue()
|
||||||
|
|
||||||
|
self.announce("Submitting %s to %s" % (filename, self.repository), log.INFO)
|
||||||
|
|
||||||
|
# build the Request
|
||||||
|
# We can't use urllib2 since we need to send the Basic
|
||||||
|
# auth right with the first request
|
||||||
|
schema, netloc, url, params, query, fragments = \
|
||||||
|
urlparse.urlparse(self.repository)
|
||||||
|
assert not params and not query and not fragments
|
||||||
|
if schema == 'http':
|
||||||
|
http = httplib.HTTPConnection(netloc)
|
||||||
|
elif schema == 'https':
|
||||||
|
http = httplib.HTTPSConnection(netloc)
|
||||||
|
else:
|
||||||
|
raise AssertionError, "unsupported schema "+schema
|
||||||
|
|
||||||
|
data = ''
|
||||||
|
loglevel = log.INFO
|
||||||
|
try:
|
||||||
|
http.connect()
|
||||||
|
http.putrequest("POST", url)
|
||||||
|
http.putheader('Content-type',
|
||||||
|
'multipart/form-data; boundary=%s'%boundary)
|
||||||
|
http.putheader('Content-length', str(len(body)))
|
||||||
|
http.putheader('Authorization', auth)
|
||||||
|
http.endheaders()
|
||||||
|
http.send(body)
|
||||||
|
except socket.error, e:
|
||||||
|
self.announce(str(e), log.ERROR)
|
||||||
|
return
|
||||||
|
|
||||||
|
r = http.getresponse()
|
||||||
|
if r.status == 200:
|
||||||
|
self.announce('Server response (%s): %s' % (r.status, r.reason),
|
||||||
|
log.INFO)
|
||||||
|
else:
|
||||||
|
self.announce('Upload failed (%s): %s' % (r.status, r.reason),
|
||||||
|
log.ERROR)
|
||||||
|
if self.show_response:
|
||||||
|
print '-'*75, r.read(), '-'*75
|
246
setuptools-0.6c15dev.egg/setuptools/depends.py
Normal file
246
setuptools-0.6c15dev.egg/setuptools/depends.py
Normal file
@ -0,0 +1,246 @@
|
|||||||
|
from __future__ import generators
|
||||||
|
import sys, imp, marshal
|
||||||
|
from imp import PKG_DIRECTORY, PY_COMPILED, PY_SOURCE, PY_FROZEN
|
||||||
|
from distutils.version import StrictVersion, LooseVersion
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
'Require', 'find_module', 'get_module_constant', 'extract_constant'
|
||||||
|
]
|
||||||
|
|
||||||
|
class Require:
|
||||||
|
"""A prerequisite to building or installing a distribution"""
|
||||||
|
|
||||||
|
def __init__(self,name,requested_version,module,homepage='',
|
||||||
|
attribute=None,format=None
|
||||||
|
):
|
||||||
|
|
||||||
|
if format is None and requested_version is not None:
|
||||||
|
format = StrictVersion
|
||||||
|
|
||||||
|
if format is not None:
|
||||||
|
requested_version = format(requested_version)
|
||||||
|
if attribute is None:
|
||||||
|
attribute = '__version__'
|
||||||
|
|
||||||
|
self.__dict__.update(locals())
|
||||||
|
del self.self
|
||||||
|
|
||||||
|
|
||||||
|
def full_name(self):
|
||||||
|
"""Return full package/distribution name, w/version"""
|
||||||
|
if self.requested_version is not None:
|
||||||
|
return '%s-%s' % (self.name,self.requested_version)
|
||||||
|
return self.name
|
||||||
|
|
||||||
|
|
||||||
|
def version_ok(self,version):
|
||||||
|
"""Is 'version' sufficiently up-to-date?"""
|
||||||
|
return self.attribute is None or self.format is None or \
|
||||||
|
str(version)!="unknown" and version >= self.requested_version
|
||||||
|
|
||||||
|
|
||||||
|
def get_version(self, paths=None, default="unknown"):
|
||||||
|
|
||||||
|
"""Get version number of installed module, 'None', or 'default'
|
||||||
|
|
||||||
|
Search 'paths' for module. If not found, return 'None'. If found,
|
||||||
|
return the extracted version attribute, or 'default' if no version
|
||||||
|
attribute was specified, or the value cannot be determined without
|
||||||
|
importing the module. The version is formatted according to the
|
||||||
|
requirement's version format (if any), unless it is 'None' or the
|
||||||
|
supplied 'default'.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if self.attribute is None:
|
||||||
|
try:
|
||||||
|
f,p,i = find_module(self.module,paths)
|
||||||
|
if f: f.close()
|
||||||
|
return default
|
||||||
|
except ImportError:
|
||||||
|
return None
|
||||||
|
|
||||||
|
v = get_module_constant(self.module,self.attribute,default,paths)
|
||||||
|
|
||||||
|
if v is not None and v is not default and self.format is not None:
|
||||||
|
return self.format(v)
|
||||||
|
|
||||||
|
return v
|
||||||
|
|
||||||
|
|
||||||
|
def is_present(self,paths=None):
|
||||||
|
"""Return true if dependency is present on 'paths'"""
|
||||||
|
return self.get_version(paths) is not None
|
||||||
|
|
||||||
|
|
||||||
|
def is_current(self,paths=None):
|
||||||
|
"""Return true if dependency is present and up-to-date on 'paths'"""
|
||||||
|
version = self.get_version(paths)
|
||||||
|
if version is None:
|
||||||
|
return False
|
||||||
|
return self.version_ok(version)
|
||||||
|
|
||||||
|
|
||||||
|
def _iter_code(code):
|
||||||
|
|
||||||
|
"""Yield '(op,arg)' pair for each operation in code object 'code'"""
|
||||||
|
|
||||||
|
from array import array
|
||||||
|
from dis import HAVE_ARGUMENT, EXTENDED_ARG
|
||||||
|
|
||||||
|
bytes = array('b',code.co_code)
|
||||||
|
eof = len(code.co_code)
|
||||||
|
|
||||||
|
ptr = 0
|
||||||
|
extended_arg = 0
|
||||||
|
|
||||||
|
while ptr<eof:
|
||||||
|
|
||||||
|
op = bytes[ptr]
|
||||||
|
|
||||||
|
if op>=HAVE_ARGUMENT:
|
||||||
|
|
||||||
|
arg = bytes[ptr+1] + bytes[ptr+2]*256 + extended_arg
|
||||||
|
ptr += 3
|
||||||
|
|
||||||
|
if op==EXTENDED_ARG:
|
||||||
|
extended_arg = arg * 65536L
|
||||||
|
continue
|
||||||
|
|
||||||
|
else:
|
||||||
|
arg = None
|
||||||
|
ptr += 1
|
||||||
|
|
||||||
|
yield op,arg
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def find_module(module, paths=None):
|
||||||
|
"""Just like 'imp.find_module()', but with package support"""
|
||||||
|
|
||||||
|
parts = module.split('.')
|
||||||
|
|
||||||
|
while parts:
|
||||||
|
part = parts.pop(0)
|
||||||
|
f, path, (suffix,mode,kind) = info = imp.find_module(part, paths)
|
||||||
|
|
||||||
|
if kind==PKG_DIRECTORY:
|
||||||
|
parts = parts or ['__init__']
|
||||||
|
paths = [path]
|
||||||
|
|
||||||
|
elif parts:
|
||||||
|
raise ImportError("Can't find %r in %s" % (parts,module))
|
||||||
|
|
||||||
|
return info
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def get_module_constant(module, symbol, default=-1, paths=None):
|
||||||
|
|
||||||
|
"""Find 'module' by searching 'paths', and extract 'symbol'
|
||||||
|
|
||||||
|
Return 'None' if 'module' does not exist on 'paths', or it does not define
|
||||||
|
'symbol'. If the module defines 'symbol' as a constant, return the
|
||||||
|
constant. Otherwise, return 'default'."""
|
||||||
|
|
||||||
|
try:
|
||||||
|
f, path, (suffix,mode,kind) = find_module(module,paths)
|
||||||
|
except ImportError:
|
||||||
|
# Module doesn't exist
|
||||||
|
return None
|
||||||
|
|
||||||
|
try:
|
||||||
|
if kind==PY_COMPILED:
|
||||||
|
f.read(8) # skip magic & date
|
||||||
|
code = marshal.load(f)
|
||||||
|
elif kind==PY_FROZEN:
|
||||||
|
code = imp.get_frozen_object(module)
|
||||||
|
elif kind==PY_SOURCE:
|
||||||
|
code = compile(f.read(), path, 'exec')
|
||||||
|
else:
|
||||||
|
# Not something we can parse; we'll have to import it. :(
|
||||||
|
if module not in sys.modules:
|
||||||
|
imp.load_module(module,f,path,(suffix,mode,kind))
|
||||||
|
return getattr(sys.modules[module],symbol,None)
|
||||||
|
|
||||||
|
finally:
|
||||||
|
if f:
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
return extract_constant(code,symbol,default)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def extract_constant(code,symbol,default=-1):
|
||||||
|
"""Extract the constant value of 'symbol' from 'code'
|
||||||
|
|
||||||
|
If the name 'symbol' is bound to a constant value by the Python code
|
||||||
|
object 'code', return that value. If 'symbol' is bound to an expression,
|
||||||
|
return 'default'. Otherwise, return 'None'.
|
||||||
|
|
||||||
|
Return value is based on the first assignment to 'symbol'. 'symbol' must
|
||||||
|
be a global, or at least a non-"fast" local in the code block. That is,
|
||||||
|
only 'STORE_NAME' and 'STORE_GLOBAL' opcodes are checked, and 'symbol'
|
||||||
|
must be present in 'code.co_names'.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if symbol not in code.co_names:
|
||||||
|
# name's not there, can't possibly be an assigment
|
||||||
|
return None
|
||||||
|
|
||||||
|
name_idx = list(code.co_names).index(symbol)
|
||||||
|
|
||||||
|
STORE_NAME = 90
|
||||||
|
STORE_GLOBAL = 97
|
||||||
|
LOAD_CONST = 100
|
||||||
|
|
||||||
|
const = default
|
||||||
|
|
||||||
|
for op, arg in _iter_code(code):
|
||||||
|
|
||||||
|
if op==LOAD_CONST:
|
||||||
|
const = code.co_consts[arg]
|
||||||
|
elif arg==name_idx and (op==STORE_NAME or op==STORE_GLOBAL):
|
||||||
|
return const
|
||||||
|
else:
|
||||||
|
const = default
|
||||||
|
|
||||||
|
if sys.platform.startswith('java') or sys.platform == 'cli':
|
||||||
|
# XXX it'd be better to test assertions about bytecode instead...
|
||||||
|
del extract_constant, get_module_constant
|
||||||
|
__all__.remove('extract_constant')
|
||||||
|
__all__.remove('get_module_constant')
|
||||||
|
|
||||||
|
|
861
setuptools-0.6c15dev.egg/setuptools/dist.py
Normal file
861
setuptools-0.6c15dev.egg/setuptools/dist.py
Normal file
@ -0,0 +1,861 @@
|
|||||||
|
__all__ = ['Distribution']
|
||||||
|
|
||||||
|
from distutils.core import Distribution as _Distribution
|
||||||
|
from setuptools.depends import Require
|
||||||
|
from setuptools.command.install import install
|
||||||
|
from setuptools.command.sdist import sdist
|
||||||
|
from setuptools.command.install_lib import install_lib
|
||||||
|
from distutils.errors import DistutilsOptionError, DistutilsPlatformError
|
||||||
|
from distutils.errors import DistutilsSetupError
|
||||||
|
import setuptools, pkg_resources, distutils.core, distutils.dist, distutils.cmd
|
||||||
|
import os, distutils.log, re
|
||||||
|
|
||||||
|
def _get_unpatched(cls):
|
||||||
|
"""Protect against re-patching the distutils if reloaded
|
||||||
|
|
||||||
|
Also ensures that no other distutils extension monkeypatched the distutils
|
||||||
|
first.
|
||||||
|
"""
|
||||||
|
while cls.__module__.startswith('setuptools'):
|
||||||
|
cls, = cls.__bases__
|
||||||
|
if not cls.__module__.startswith('distutils'):
|
||||||
|
raise AssertionError(
|
||||||
|
"distutils has already been patched by %r" % cls
|
||||||
|
)
|
||||||
|
return cls
|
||||||
|
|
||||||
|
_Distribution = _get_unpatched(_Distribution)
|
||||||
|
|
||||||
|
sequence = tuple, list
|
||||||
|
|
||||||
|
def check_importable(dist, attr, value):
|
||||||
|
try:
|
||||||
|
ep = pkg_resources.EntryPoint.parse('x='+value)
|
||||||
|
assert not ep.extras
|
||||||
|
except (TypeError,ValueError,AttributeError,AssertionError):
|
||||||
|
raise DistutilsSetupError(
|
||||||
|
"%r must be importable 'module:attrs' string (got %r)"
|
||||||
|
% (attr,value)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def assert_string_list(dist, attr, value):
|
||||||
|
"""Verify that value is a string list or None"""
|
||||||
|
try:
|
||||||
|
assert ''.join(value)!=value
|
||||||
|
except (TypeError,ValueError,AttributeError,AssertionError):
|
||||||
|
raise DistutilsSetupError(
|
||||||
|
"%r must be a list of strings (got %r)" % (attr,value)
|
||||||
|
)
|
||||||
|
|
||||||
|
def check_nsp(dist, attr, value):
|
||||||
|
"""Verify that namespace packages are valid"""
|
||||||
|
assert_string_list(dist,attr,value)
|
||||||
|
for nsp in value:
|
||||||
|
if not dist.has_contents_for(nsp):
|
||||||
|
raise DistutilsSetupError(
|
||||||
|
"Distribution contains no modules or packages for " +
|
||||||
|
"namespace package %r" % nsp
|
||||||
|
)
|
||||||
|
if '.' in nsp:
|
||||||
|
parent = '.'.join(nsp.split('.')[:-1])
|
||||||
|
if parent not in value:
|
||||||
|
distutils.log.warn(
|
||||||
|
"WARNING: %r is declared as a package namespace, but %r"
|
||||||
|
" is not: please correct this in setup.py", nsp, parent
|
||||||
|
)
|
||||||
|
|
||||||
|
def check_extras(dist, attr, value):
|
||||||
|
"""Verify that extras_require mapping is valid"""
|
||||||
|
try:
|
||||||
|
for k,v in value.items():
|
||||||
|
list(pkg_resources.parse_requirements(v))
|
||||||
|
except (TypeError,ValueError,AttributeError):
|
||||||
|
raise DistutilsSetupError(
|
||||||
|
"'extras_require' must be a dictionary whose values are "
|
||||||
|
"strings or lists of strings containing valid project/version "
|
||||||
|
"requirement specifiers."
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def assert_bool(dist, attr, value):
|
||||||
|
"""Verify that value is True, False, 0, or 1"""
|
||||||
|
if bool(value) != value:
|
||||||
|
raise DistutilsSetupError(
|
||||||
|
"%r must be a boolean value (got %r)" % (attr,value)
|
||||||
|
)
|
||||||
|
def check_requirements(dist, attr, value):
|
||||||
|
"""Verify that install_requires is a valid requirements list"""
|
||||||
|
try:
|
||||||
|
list(pkg_resources.parse_requirements(value))
|
||||||
|
except (TypeError,ValueError):
|
||||||
|
raise DistutilsSetupError(
|
||||||
|
"%r must be a string or list of strings "
|
||||||
|
"containing valid project/version requirement specifiers" % (attr,)
|
||||||
|
)
|
||||||
|
def check_entry_points(dist, attr, value):
|
||||||
|
"""Verify that entry_points map is parseable"""
|
||||||
|
try:
|
||||||
|
pkg_resources.EntryPoint.parse_map(value)
|
||||||
|
except ValueError, e:
|
||||||
|
raise DistutilsSetupError(e)
|
||||||
|
|
||||||
|
def check_test_suite(dist, attr, value):
|
||||||
|
if not isinstance(value,basestring):
|
||||||
|
raise DistutilsSetupError("test_suite must be a string")
|
||||||
|
|
||||||
|
def check_package_data(dist, attr, value):
|
||||||
|
"""Verify that value is a dictionary of package names to glob lists"""
|
||||||
|
if isinstance(value,dict):
|
||||||
|
for k,v in value.items():
|
||||||
|
if not isinstance(k,str): break
|
||||||
|
try: iter(v)
|
||||||
|
except TypeError:
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
return
|
||||||
|
raise DistutilsSetupError(
|
||||||
|
attr+" must be a dictionary mapping package names to lists of "
|
||||||
|
"wildcard patterns"
|
||||||
|
)
|
||||||
|
|
||||||
|
def check_packages(dist, attr, value):
|
||||||
|
for pkgname in value:
|
||||||
|
if not re.match(r'\w+(\.\w+)*', pkgname):
|
||||||
|
distutils.log.warn(
|
||||||
|
"WARNING: %r not a valid package name; please use only"
|
||||||
|
".-separated package names in setup.py", pkgname
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class Distribution(_Distribution):
|
||||||
|
"""Distribution with support for features, tests, and package data
|
||||||
|
|
||||||
|
This is an enhanced version of 'distutils.dist.Distribution' that
|
||||||
|
effectively adds the following new optional keyword arguments to 'setup()':
|
||||||
|
|
||||||
|
'install_requires' -- a string or sequence of strings specifying project
|
||||||
|
versions that the distribution requires when installed, in the format
|
||||||
|
used by 'pkg_resources.require()'. They will be installed
|
||||||
|
automatically when the package is installed. If you wish to use
|
||||||
|
packages that are not available in PyPI, or want to give your users an
|
||||||
|
alternate download location, you can add a 'find_links' option to the
|
||||||
|
'[easy_install]' section of your project's 'setup.cfg' file, and then
|
||||||
|
setuptools will scan the listed web pages for links that satisfy the
|
||||||
|
requirements.
|
||||||
|
|
||||||
|
'extras_require' -- a dictionary mapping names of optional "extras" to the
|
||||||
|
additional requirement(s) that using those extras incurs. For example,
|
||||||
|
this::
|
||||||
|
|
||||||
|
extras_require = dict(reST = ["docutils>=0.3", "reSTedit"])
|
||||||
|
|
||||||
|
indicates that the distribution can optionally provide an extra
|
||||||
|
capability called "reST", but it can only be used if docutils and
|
||||||
|
reSTedit are installed. If the user installs your package using
|
||||||
|
EasyInstall and requests one of your extras, the corresponding
|
||||||
|
additional requirements will be installed if needed.
|
||||||
|
|
||||||
|
'features' -- a dictionary mapping option names to 'setuptools.Feature'
|
||||||
|
objects. Features are a portion of the distribution that can be
|
||||||
|
included or excluded based on user options, inter-feature dependencies,
|
||||||
|
and availability on the current system. Excluded features are omitted
|
||||||
|
from all setup commands, including source and binary distributions, so
|
||||||
|
you can create multiple distributions from the same source tree.
|
||||||
|
Feature names should be valid Python identifiers, except that they may
|
||||||
|
contain the '-' (minus) sign. Features can be included or excluded
|
||||||
|
via the command line options '--with-X' and '--without-X', where 'X' is
|
||||||
|
the name of the feature. Whether a feature is included by default, and
|
||||||
|
whether you are allowed to control this from the command line, is
|
||||||
|
determined by the Feature object. See the 'Feature' class for more
|
||||||
|
information.
|
||||||
|
|
||||||
|
'test_suite' -- the name of a test suite to run for the 'test' command.
|
||||||
|
If the user runs 'python setup.py test', the package will be installed,
|
||||||
|
and the named test suite will be run. The format is the same as
|
||||||
|
would be used on a 'unittest.py' command line. That is, it is the
|
||||||
|
dotted name of an object to import and call to generate a test suite.
|
||||||
|
|
||||||
|
'package_data' -- a dictionary mapping package names to lists of filenames
|
||||||
|
or globs to use to find data files contained in the named packages.
|
||||||
|
If the dictionary has filenames or globs listed under '""' (the empty
|
||||||
|
string), those names will be searched for in every package, in addition
|
||||||
|
to any names for the specific package. Data files found using these
|
||||||
|
names/globs will be installed along with the package, in the same
|
||||||
|
location as the package. Note that globs are allowed to reference
|
||||||
|
the contents of non-package subdirectories, as long as you use '/' as
|
||||||
|
a path separator. (Globs are automatically converted to
|
||||||
|
platform-specific paths at runtime.)
|
||||||
|
|
||||||
|
In addition to these new keywords, this class also has several new methods
|
||||||
|
for manipulating the distribution's contents. For example, the 'include()'
|
||||||
|
and 'exclude()' methods can be thought of as in-place add and subtract
|
||||||
|
commands that add or remove packages, modules, extensions, and so on from
|
||||||
|
the distribution. They are used by the feature subsystem to configure the
|
||||||
|
distribution for the included and excluded features.
|
||||||
|
"""
|
||||||
|
|
||||||
|
_patched_dist = None
|
||||||
|
|
||||||
|
def patch_missing_pkg_info(self, attrs):
|
||||||
|
# Fake up a replacement for the data that would normally come from
|
||||||
|
# PKG-INFO, but which might not yet be built if this is a fresh
|
||||||
|
# checkout.
|
||||||
|
#
|
||||||
|
if not attrs or 'name' not in attrs or 'version' not in attrs:
|
||||||
|
return
|
||||||
|
key = pkg_resources.safe_name(str(attrs['name'])).lower()
|
||||||
|
dist = pkg_resources.working_set.by_key.get(key)
|
||||||
|
if dist is not None and not dist.has_metadata('PKG-INFO'):
|
||||||
|
dist._version = pkg_resources.safe_version(str(attrs['version']))
|
||||||
|
self._patched_dist = dist
|
||||||
|
|
||||||
|
def __init__ (self, attrs=None):
|
||||||
|
have_package_data = hasattr(self, "package_data")
|
||||||
|
if not have_package_data:
|
||||||
|
self.package_data = {}
|
||||||
|
self.require_features = []
|
||||||
|
self.features = {}
|
||||||
|
self.dist_files = []
|
||||||
|
self.patch_missing_pkg_info(attrs)
|
||||||
|
# Make sure we have any eggs needed to interpret 'attrs'
|
||||||
|
if attrs is not None:
|
||||||
|
self.dependency_links = attrs.pop('dependency_links', [])
|
||||||
|
assert_string_list(self,'dependency_links',self.dependency_links)
|
||||||
|
if attrs and 'setup_requires' in attrs:
|
||||||
|
self.fetch_build_eggs(attrs.pop('setup_requires'))
|
||||||
|
for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'):
|
||||||
|
if not hasattr(self,ep.name):
|
||||||
|
setattr(self,ep.name,None)
|
||||||
|
_Distribution.__init__(self,attrs)
|
||||||
|
if isinstance(self.metadata.version, (int,long,float)):
|
||||||
|
# Some people apparently take "version number" too literally :)
|
||||||
|
self.metadata.version = str(self.metadata.version)
|
||||||
|
|
||||||
|
def parse_command_line(self):
|
||||||
|
"""Process features after parsing command line options"""
|
||||||
|
result = _Distribution.parse_command_line(self)
|
||||||
|
if self.features:
|
||||||
|
self._finalize_features()
|
||||||
|
return result
|
||||||
|
|
||||||
|
def _feature_attrname(self,name):
|
||||||
|
"""Convert feature name to corresponding option attribute name"""
|
||||||
|
return 'with_'+name.replace('-','_')
|
||||||
|
|
||||||
|
def fetch_build_eggs(self, requires):
|
||||||
|
"""Resolve pre-setup requirements"""
|
||||||
|
from pkg_resources import working_set, parse_requirements
|
||||||
|
for dist in working_set.resolve(
|
||||||
|
parse_requirements(requires), installer=self.fetch_build_egg
|
||||||
|
):
|
||||||
|
working_set.add(dist)
|
||||||
|
|
||||||
|
def finalize_options(self):
|
||||||
|
_Distribution.finalize_options(self)
|
||||||
|
if self.features:
|
||||||
|
self._set_global_opts_from_features()
|
||||||
|
|
||||||
|
for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'):
|
||||||
|
value = getattr(self,ep.name,None)
|
||||||
|
if value is not None:
|
||||||
|
ep.require(installer=self.fetch_build_egg)
|
||||||
|
ep.load()(self, ep.name, value)
|
||||||
|
|
||||||
|
def fetch_build_egg(self, req):
|
||||||
|
"""Fetch an egg needed for building"""
|
||||||
|
try:
|
||||||
|
cmd = self._egg_fetcher
|
||||||
|
except AttributeError:
|
||||||
|
from setuptools.command.easy_install import easy_install
|
||||||
|
dist = self.__class__({'script_args':['easy_install']})
|
||||||
|
dist.parse_config_files()
|
||||||
|
opts = dist.get_option_dict('easy_install')
|
||||||
|
keep = (
|
||||||
|
'find_links', 'site_dirs', 'index_url', 'optimize',
|
||||||
|
'site_dirs', 'allow_hosts'
|
||||||
|
)
|
||||||
|
for key in opts.keys():
|
||||||
|
if key not in keep:
|
||||||
|
del opts[key] # don't use any other settings
|
||||||
|
if self.dependency_links:
|
||||||
|
links = self.dependency_links[:]
|
||||||
|
if 'find_links' in opts:
|
||||||
|
links = opts['find_links'][1].split() + links
|
||||||
|
opts['find_links'] = ('setup', links)
|
||||||
|
cmd = easy_install(
|
||||||
|
dist, args=["x"], install_dir=os.curdir, exclude_scripts=True,
|
||||||
|
always_copy=False, build_directory=None, editable=False,
|
||||||
|
upgrade=False, multi_version=True, no_report = True
|
||||||
|
)
|
||||||
|
cmd.ensure_finalized()
|
||||||
|
self._egg_fetcher = cmd
|
||||||
|
return cmd.easy_install(req)
|
||||||
|
|
||||||
|
def _set_global_opts_from_features(self):
|
||||||
|
"""Add --with-X/--without-X options based on optional features"""
|
||||||
|
|
||||||
|
go = []
|
||||||
|
no = self.negative_opt.copy()
|
||||||
|
|
||||||
|
for name,feature in self.features.items():
|
||||||
|
self._set_feature(name,None)
|
||||||
|
feature.validate(self)
|
||||||
|
|
||||||
|
if feature.optional:
|
||||||
|
descr = feature.description
|
||||||
|
incdef = ' (default)'
|
||||||
|
excdef=''
|
||||||
|
if not feature.include_by_default():
|
||||||
|
excdef, incdef = incdef, excdef
|
||||||
|
|
||||||
|
go.append(('with-'+name, None, 'include '+descr+incdef))
|
||||||
|
go.append(('without-'+name, None, 'exclude '+descr+excdef))
|
||||||
|
no['without-'+name] = 'with-'+name
|
||||||
|
|
||||||
|
self.global_options = self.feature_options = go + self.global_options
|
||||||
|
self.negative_opt = self.feature_negopt = no
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def _finalize_features(self):
|
||||||
|
"""Add/remove features and resolve dependencies between them"""
|
||||||
|
|
||||||
|
# First, flag all the enabled items (and thus their dependencies)
|
||||||
|
for name,feature in self.features.items():
|
||||||
|
enabled = self.feature_is_included(name)
|
||||||
|
if enabled or (enabled is None and feature.include_by_default()):
|
||||||
|
feature.include_in(self)
|
||||||
|
self._set_feature(name,1)
|
||||||
|
|
||||||
|
# Then disable the rest, so that off-by-default features don't
|
||||||
|
# get flagged as errors when they're required by an enabled feature
|
||||||
|
for name,feature in self.features.items():
|
||||||
|
if not self.feature_is_included(name):
|
||||||
|
feature.exclude_from(self)
|
||||||
|
self._set_feature(name,0)
|
||||||
|
|
||||||
|
|
||||||
|
def get_command_class(self, command):
|
||||||
|
"""Pluggable version of get_command_class()"""
|
||||||
|
if command in self.cmdclass:
|
||||||
|
return self.cmdclass[command]
|
||||||
|
|
||||||
|
for ep in pkg_resources.iter_entry_points('distutils.commands',command):
|
||||||
|
ep.require(installer=self.fetch_build_egg)
|
||||||
|
self.cmdclass[command] = cmdclass = ep.load()
|
||||||
|
return cmdclass
|
||||||
|
else:
|
||||||
|
return _Distribution.get_command_class(self, command)
|
||||||
|
|
||||||
|
def print_commands(self):
|
||||||
|
for ep in pkg_resources.iter_entry_points('distutils.commands'):
|
||||||
|
if ep.name not in self.cmdclass:
|
||||||
|
cmdclass = ep.load(False) # don't require extras, we're not running
|
||||||
|
self.cmdclass[ep.name] = cmdclass
|
||||||
|
return _Distribution.print_commands(self)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def _set_feature(self,name,status):
|
||||||
|
"""Set feature's inclusion status"""
|
||||||
|
setattr(self,self._feature_attrname(name),status)
|
||||||
|
|
||||||
|
def feature_is_included(self,name):
|
||||||
|
"""Return 1 if feature is included, 0 if excluded, 'None' if unknown"""
|
||||||
|
return getattr(self,self._feature_attrname(name))
|
||||||
|
|
||||||
|
def include_feature(self,name):
|
||||||
|
"""Request inclusion of feature named 'name'"""
|
||||||
|
|
||||||
|
if self.feature_is_included(name)==0:
|
||||||
|
descr = self.features[name].description
|
||||||
|
raise DistutilsOptionError(
|
||||||
|
descr + " is required, but was excluded or is not available"
|
||||||
|
)
|
||||||
|
self.features[name].include_in(self)
|
||||||
|
self._set_feature(name,1)
|
||||||
|
|
||||||
|
def include(self,**attrs):
|
||||||
|
"""Add items to distribution that are named in keyword arguments
|
||||||
|
|
||||||
|
For example, 'dist.exclude(py_modules=["x"])' would add 'x' to
|
||||||
|
the distribution's 'py_modules' attribute, if it was not already
|
||||||
|
there.
|
||||||
|
|
||||||
|
Currently, this method only supports inclusion for attributes that are
|
||||||
|
lists or tuples. If you need to add support for adding to other
|
||||||
|
attributes in this or a subclass, you can add an '_include_X' method,
|
||||||
|
where 'X' is the name of the attribute. The method will be called with
|
||||||
|
the value passed to 'include()'. So, 'dist.include(foo={"bar":"baz"})'
|
||||||
|
will try to call 'dist._include_foo({"bar":"baz"})', which can then
|
||||||
|
handle whatever special inclusion logic is needed.
|
||||||
|
"""
|
||||||
|
for k,v in attrs.items():
|
||||||
|
include = getattr(self, '_include_'+k, None)
|
||||||
|
if include:
|
||||||
|
include(v)
|
||||||
|
else:
|
||||||
|
self._include_misc(k,v)
|
||||||
|
|
||||||
|
def exclude_package(self,package):
|
||||||
|
"""Remove packages, modules, and extensions in named package"""
|
||||||
|
|
||||||
|
pfx = package+'.'
|
||||||
|
if self.packages:
|
||||||
|
self.packages = [
|
||||||
|
p for p in self.packages
|
||||||
|
if p!=package and not p.startswith(pfx)
|
||||||
|
]
|
||||||
|
|
||||||
|
if self.py_modules:
|
||||||
|
self.py_modules = [
|
||||||
|
p for p in self.py_modules
|
||||||
|
if p!=package and not p.startswith(pfx)
|
||||||
|
]
|
||||||
|
|
||||||
|
if self.ext_modules:
|
||||||
|
self.ext_modules = [
|
||||||
|
p for p in self.ext_modules
|
||||||
|
if p.name!=package and not p.name.startswith(pfx)
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def has_contents_for(self,package):
|
||||||
|
"""Return true if 'exclude_package(package)' would do something"""
|
||||||
|
|
||||||
|
pfx = package+'.'
|
||||||
|
|
||||||
|
for p in self.iter_distribution_names():
|
||||||
|
if p==package or p.startswith(pfx):
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def _exclude_misc(self,name,value):
|
||||||
|
"""Handle 'exclude()' for list/tuple attrs without a special handler"""
|
||||||
|
if not isinstance(value,sequence):
|
||||||
|
raise DistutilsSetupError(
|
||||||
|
"%s: setting must be a list or tuple (%r)" % (name, value)
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
old = getattr(self,name)
|
||||||
|
except AttributeError:
|
||||||
|
raise DistutilsSetupError(
|
||||||
|
"%s: No such distribution setting" % name
|
||||||
|
)
|
||||||
|
if old is not None and not isinstance(old,sequence):
|
||||||
|
raise DistutilsSetupError(
|
||||||
|
name+": this setting cannot be changed via include/exclude"
|
||||||
|
)
|
||||||
|
elif old:
|
||||||
|
setattr(self,name,[item for item in old if item not in value])
|
||||||
|
|
||||||
|
def _include_misc(self,name,value):
|
||||||
|
"""Handle 'include()' for list/tuple attrs without a special handler"""
|
||||||
|
|
||||||
|
if not isinstance(value,sequence):
|
||||||
|
raise DistutilsSetupError(
|
||||||
|
"%s: setting must be a list (%r)" % (name, value)
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
old = getattr(self,name)
|
||||||
|
except AttributeError:
|
||||||
|
raise DistutilsSetupError(
|
||||||
|
"%s: No such distribution setting" % name
|
||||||
|
)
|
||||||
|
if old is None:
|
||||||
|
setattr(self,name,value)
|
||||||
|
elif not isinstance(old,sequence):
|
||||||
|
raise DistutilsSetupError(
|
||||||
|
name+": this setting cannot be changed via include/exclude"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
setattr(self,name,old+[item for item in value if item not in old])
|
||||||
|
|
||||||
|
def exclude(self,**attrs):
|
||||||
|
"""Remove items from distribution that are named in keyword arguments
|
||||||
|
|
||||||
|
For example, 'dist.exclude(py_modules=["x"])' would remove 'x' from
|
||||||
|
the distribution's 'py_modules' attribute. Excluding packages uses
|
||||||
|
the 'exclude_package()' method, so all of the package's contained
|
||||||
|
packages, modules, and extensions are also excluded.
|
||||||
|
|
||||||
|
Currently, this method only supports exclusion from attributes that are
|
||||||
|
lists or tuples. If you need to add support for excluding from other
|
||||||
|
attributes in this or a subclass, you can add an '_exclude_X' method,
|
||||||
|
where 'X' is the name of the attribute. The method will be called with
|
||||||
|
the value passed to 'exclude()'. So, 'dist.exclude(foo={"bar":"baz"})'
|
||||||
|
will try to call 'dist._exclude_foo({"bar":"baz"})', which can then
|
||||||
|
handle whatever special exclusion logic is needed.
|
||||||
|
"""
|
||||||
|
for k,v in attrs.items():
|
||||||
|
exclude = getattr(self, '_exclude_'+k, None)
|
||||||
|
if exclude:
|
||||||
|
exclude(v)
|
||||||
|
else:
|
||||||
|
self._exclude_misc(k,v)
|
||||||
|
|
||||||
|
def _exclude_packages(self,packages):
|
||||||
|
if not isinstance(packages,sequence):
|
||||||
|
raise DistutilsSetupError(
|
||||||
|
"packages: setting must be a list or tuple (%r)" % (packages,)
|
||||||
|
)
|
||||||
|
map(self.exclude_package, packages)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_command_opts(self, parser, args):
|
||||||
|
# Remove --with-X/--without-X options when processing command args
|
||||||
|
self.global_options = self.__class__.global_options
|
||||||
|
self.negative_opt = self.__class__.negative_opt
|
||||||
|
|
||||||
|
# First, expand any aliases
|
||||||
|
command = args[0]
|
||||||
|
aliases = self.get_option_dict('aliases')
|
||||||
|
while command in aliases:
|
||||||
|
src,alias = aliases[command]
|
||||||
|
del aliases[command] # ensure each alias can expand only once!
|
||||||
|
import shlex
|
||||||
|
args[:1] = shlex.split(alias,True)
|
||||||
|
command = args[0]
|
||||||
|
|
||||||
|
nargs = _Distribution._parse_command_opts(self, parser, args)
|
||||||
|
|
||||||
|
# Handle commands that want to consume all remaining arguments
|
||||||
|
cmd_class = self.get_command_class(command)
|
||||||
|
if getattr(cmd_class,'command_consumes_arguments',None):
|
||||||
|
self.get_option_dict(command)['args'] = ("command line", nargs)
|
||||||
|
if nargs is not None:
|
||||||
|
return []
|
||||||
|
|
||||||
|
return nargs
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def get_cmdline_options(self):
|
||||||
|
"""Return a '{cmd: {opt:val}}' map of all command-line options
|
||||||
|
|
||||||
|
Option names are all long, but do not include the leading '--', and
|
||||||
|
contain dashes rather than underscores. If the option doesn't take
|
||||||
|
an argument (e.g. '--quiet'), the 'val' is 'None'.
|
||||||
|
|
||||||
|
Note that options provided by config files are intentionally excluded.
|
||||||
|
"""
|
||||||
|
|
||||||
|
d = {}
|
||||||
|
|
||||||
|
for cmd,opts in self.command_options.items():
|
||||||
|
|
||||||
|
for opt,(src,val) in opts.items():
|
||||||
|
|
||||||
|
if src != "command line":
|
||||||
|
continue
|
||||||
|
|
||||||
|
opt = opt.replace('_','-')
|
||||||
|
|
||||||
|
if val==0:
|
||||||
|
cmdobj = self.get_command_obj(cmd)
|
||||||
|
neg_opt = self.negative_opt.copy()
|
||||||
|
neg_opt.update(getattr(cmdobj,'negative_opt',{}))
|
||||||
|
for neg,pos in neg_opt.items():
|
||||||
|
if pos==opt:
|
||||||
|
opt=neg
|
||||||
|
val=None
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
raise AssertionError("Shouldn't be able to get here")
|
||||||
|
|
||||||
|
elif val==1:
|
||||||
|
val = None
|
||||||
|
|
||||||
|
d.setdefault(cmd,{})[opt] = val
|
||||||
|
|
||||||
|
return d
|
||||||
|
|
||||||
|
|
||||||
|
def iter_distribution_names(self):
|
||||||
|
"""Yield all packages, modules, and extension names in distribution"""
|
||||||
|
|
||||||
|
for pkg in self.packages or ():
|
||||||
|
yield pkg
|
||||||
|
|
||||||
|
for module in self.py_modules or ():
|
||||||
|
yield module
|
||||||
|
|
||||||
|
for ext in self.ext_modules or ():
|
||||||
|
if isinstance(ext,tuple):
|
||||||
|
name, buildinfo = ext
|
||||||
|
else:
|
||||||
|
name = ext.name
|
||||||
|
if name.endswith('module'):
|
||||||
|
name = name[:-6]
|
||||||
|
yield name
|
||||||
|
|
||||||
|
# Install it throughout the distutils
|
||||||
|
for module in distutils.dist, distutils.core, distutils.cmd:
|
||||||
|
module.Distribution = Distribution
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class Feature:
|
||||||
|
"""A subset of the distribution that can be excluded if unneeded/wanted
|
||||||
|
|
||||||
|
Features are created using these keyword arguments:
|
||||||
|
|
||||||
|
'description' -- a short, human readable description of the feature, to
|
||||||
|
be used in error messages, and option help messages.
|
||||||
|
|
||||||
|
'standard' -- if true, the feature is included by default if it is
|
||||||
|
available on the current system. Otherwise, the feature is only
|
||||||
|
included if requested via a command line '--with-X' option, or if
|
||||||
|
another included feature requires it. The default setting is 'False'.
|
||||||
|
|
||||||
|
'available' -- if true, the feature is available for installation on the
|
||||||
|
current system. The default setting is 'True'.
|
||||||
|
|
||||||
|
'optional' -- if true, the feature's inclusion can be controlled from the
|
||||||
|
command line, using the '--with-X' or '--without-X' options. If
|
||||||
|
false, the feature's inclusion status is determined automatically,
|
||||||
|
based on 'availabile', 'standard', and whether any other feature
|
||||||
|
requires it. The default setting is 'True'.
|
||||||
|
|
||||||
|
'require_features' -- a string or sequence of strings naming features
|
||||||
|
that should also be included if this feature is included. Defaults to
|
||||||
|
empty list. May also contain 'Require' objects that should be
|
||||||
|
added/removed from the distribution.
|
||||||
|
|
||||||
|
'remove' -- a string or list of strings naming packages to be removed
|
||||||
|
from the distribution if this feature is *not* included. If the
|
||||||
|
feature *is* included, this argument is ignored. This argument exists
|
||||||
|
to support removing features that "crosscut" a distribution, such as
|
||||||
|
defining a 'tests' feature that removes all the 'tests' subpackages
|
||||||
|
provided by other features. The default for this argument is an empty
|
||||||
|
list. (Note: the named package(s) or modules must exist in the base
|
||||||
|
distribution when the 'setup()' function is initially called.)
|
||||||
|
|
||||||
|
other keywords -- any other keyword arguments are saved, and passed to
|
||||||
|
the distribution's 'include()' and 'exclude()' methods when the
|
||||||
|
feature is included or excluded, respectively. So, for example, you
|
||||||
|
could pass 'packages=["a","b"]' to cause packages 'a' and 'b' to be
|
||||||
|
added or removed from the distribution as appropriate.
|
||||||
|
|
||||||
|
A feature must include at least one 'requires', 'remove', or other
|
||||||
|
keyword argument. Otherwise, it can't affect the distribution in any way.
|
||||||
|
Note also that you can subclass 'Feature' to create your own specialized
|
||||||
|
feature types that modify the distribution in other ways when included or
|
||||||
|
excluded. See the docstrings for the various methods here for more detail.
|
||||||
|
Aside from the methods, the only feature attributes that distributions look
|
||||||
|
at are 'description' and 'optional'.
|
||||||
|
"""
|
||||||
|
def __init__(self, description, standard=False, available=True,
|
||||||
|
optional=True, require_features=(), remove=(), **extras
|
||||||
|
):
|
||||||
|
|
||||||
|
self.description = description
|
||||||
|
self.standard = standard
|
||||||
|
self.available = available
|
||||||
|
self.optional = optional
|
||||||
|
if isinstance(require_features,(str,Require)):
|
||||||
|
require_features = require_features,
|
||||||
|
|
||||||
|
self.require_features = [
|
||||||
|
r for r in require_features if isinstance(r,str)
|
||||||
|
]
|
||||||
|
er = [r for r in require_features if not isinstance(r,str)]
|
||||||
|
if er: extras['require_features'] = er
|
||||||
|
|
||||||
|
if isinstance(remove,str):
|
||||||
|
remove = remove,
|
||||||
|
self.remove = remove
|
||||||
|
self.extras = extras
|
||||||
|
|
||||||
|
if not remove and not require_features and not extras:
|
||||||
|
raise DistutilsSetupError(
|
||||||
|
"Feature %s: must define 'require_features', 'remove', or at least one"
|
||||||
|
" of 'packages', 'py_modules', etc."
|
||||||
|
)
|
||||||
|
|
||||||
|
def include_by_default(self):
|
||||||
|
"""Should this feature be included by default?"""
|
||||||
|
return self.available and self.standard
|
||||||
|
|
||||||
|
def include_in(self,dist):
|
||||||
|
|
||||||
|
"""Ensure feature and its requirements are included in distribution
|
||||||
|
|
||||||
|
You may override this in a subclass to perform additional operations on
|
||||||
|
the distribution. Note that this method may be called more than once
|
||||||
|
per feature, and so should be idempotent.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
if not self.available:
|
||||||
|
raise DistutilsPlatformError(
|
||||||
|
self.description+" is required,"
|
||||||
|
"but is not available on this platform"
|
||||||
|
)
|
||||||
|
|
||||||
|
dist.include(**self.extras)
|
||||||
|
|
||||||
|
for f in self.require_features:
|
||||||
|
dist.include_feature(f)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def exclude_from(self,dist):
|
||||||
|
|
||||||
|
"""Ensure feature is excluded from distribution
|
||||||
|
|
||||||
|
You may override this in a subclass to perform additional operations on
|
||||||
|
the distribution. This method will be called at most once per
|
||||||
|
feature, and only after all included features have been asked to
|
||||||
|
include themselves.
|
||||||
|
"""
|
||||||
|
|
||||||
|
dist.exclude(**self.extras)
|
||||||
|
|
||||||
|
if self.remove:
|
||||||
|
for item in self.remove:
|
||||||
|
dist.exclude_package(item)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def validate(self,dist):
|
||||||
|
|
||||||
|
"""Verify that feature makes sense in context of distribution
|
||||||
|
|
||||||
|
This method is called by the distribution just before it parses its
|
||||||
|
command line. It checks to ensure that the 'remove' attribute, if any,
|
||||||
|
contains only valid package/module names that are present in the base
|
||||||
|
distribution when 'setup()' is called. You may override it in a
|
||||||
|
subclass to perform any other required validation of the feature
|
||||||
|
against a target distribution.
|
||||||
|
"""
|
||||||
|
|
||||||
|
for item in self.remove:
|
||||||
|
if not dist.has_contents_for(item):
|
||||||
|
raise DistutilsSetupError(
|
||||||
|
"%s wants to be able to remove %s, but the distribution"
|
||||||
|
" doesn't contain any packages or modules under %s"
|
||||||
|
% (self.description, item, item)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
35
setuptools-0.6c15dev.egg/setuptools/extension.py
Normal file
35
setuptools-0.6c15dev.egg/setuptools/extension.py
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
from distutils.core import Extension as _Extension
|
||||||
|
from dist import _get_unpatched
|
||||||
|
_Extension = _get_unpatched(_Extension)
|
||||||
|
|
||||||
|
try:
|
||||||
|
from Pyrex.Distutils.build_ext import build_ext
|
||||||
|
except ImportError:
|
||||||
|
have_pyrex = False
|
||||||
|
else:
|
||||||
|
have_pyrex = True
|
||||||
|
|
||||||
|
|
||||||
|
class Extension(_Extension):
|
||||||
|
"""Extension that uses '.c' files in place of '.pyx' files"""
|
||||||
|
|
||||||
|
if not have_pyrex:
|
||||||
|
# convert .pyx extensions to .c
|
||||||
|
def __init__(self,*args,**kw):
|
||||||
|
_Extension.__init__(self,*args,**kw)
|
||||||
|
sources = []
|
||||||
|
for s in self.sources:
|
||||||
|
if s.endswith('.pyx'):
|
||||||
|
sources.append(s[:-3]+'c')
|
||||||
|
else:
|
||||||
|
sources.append(s)
|
||||||
|
self.sources = sources
|
||||||
|
|
||||||
|
class Library(Extension):
|
||||||
|
"""Just like a regular Extension, but built as a library instead"""
|
||||||
|
|
||||||
|
import sys, distutils.core, distutils.extension
|
||||||
|
distutils.core.Extension = Extension
|
||||||
|
distutils.extension.Extension = Extension
|
||||||
|
if 'distutils.command.build_ext' in sys.modules:
|
||||||
|
sys.modules['distutils.command.build_ext'].Extension = Extension
|
779
setuptools-0.6c15dev.egg/setuptools/package_index.py
Normal file
779
setuptools-0.6c15dev.egg/setuptools/package_index.py
Normal file
@ -0,0 +1,779 @@
|
|||||||
|
"""PyPI and direct package downloading"""
|
||||||
|
import sys, os.path, re, urlparse, urllib2, shutil, random, socket, cStringIO
|
||||||
|
import httplib, urllib
|
||||||
|
from pkg_resources import *
|
||||||
|
from distutils import log
|
||||||
|
from distutils.errors import DistutilsError
|
||||||
|
try:
|
||||||
|
from hashlib import md5
|
||||||
|
except ImportError:
|
||||||
|
from md5 import md5
|
||||||
|
from fnmatch import translate
|
||||||
|
EGG_FRAGMENT = re.compile(r'^egg=([-A-Za-z0-9_.]+)$')
|
||||||
|
HREF = re.compile("""href\\s*=\\s*['"]?([^'"> ]+)""", re.I)
|
||||||
|
# this is here to fix emacs' cruddy broken syntax highlighting
|
||||||
|
PYPI_MD5 = re.compile(
|
||||||
|
'<a href="([^"#]+)">([^<]+)</a>\n\s+\\(<a (?:title="MD5 hash"\n\s+)'
|
||||||
|
'href="[^?]+\?:action=show_md5&digest=([0-9a-f]{32})">md5</a>\\)'
|
||||||
|
)
|
||||||
|
URL_SCHEME = re.compile('([-+.a-z0-9]{2,}):',re.I).match
|
||||||
|
EXTENSIONS = ".tar.gz .tar.bz2 .tar .zip .tgz".split()
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
'PackageIndex', 'distros_for_url', 'parse_bdist_wininst',
|
||||||
|
'interpret_distro_name',
|
||||||
|
]
|
||||||
|
|
||||||
|
def parse_bdist_wininst(name):
|
||||||
|
"""Return (base,pyversion) or (None,None) for possible .exe name"""
|
||||||
|
|
||||||
|
lower = name.lower()
|
||||||
|
base, py_ver = None, None
|
||||||
|
|
||||||
|
if lower.endswith('.exe'):
|
||||||
|
if lower.endswith('.win32.exe'):
|
||||||
|
base = name[:-10]
|
||||||
|
elif lower.startswith('.win32-py',-16):
|
||||||
|
py_ver = name[-7:-4]
|
||||||
|
base = name[:-16]
|
||||||
|
|
||||||
|
return base,py_ver
|
||||||
|
|
||||||
|
def egg_info_for_url(url):
|
||||||
|
scheme, server, path, parameters, query, fragment = urlparse.urlparse(url)
|
||||||
|
base = urllib2.unquote(path.split('/')[-1])
|
||||||
|
if server=='sourceforge.net' and base=='download': # XXX Yuck
|
||||||
|
base = urllib2.unquote(path.split('/')[-2])
|
||||||
|
if '#' in base: base, fragment = base.split('#',1)
|
||||||
|
return base,fragment
|
||||||
|
|
||||||
|
def distros_for_url(url, metadata=None):
|
||||||
|
"""Yield egg or source distribution objects that might be found at a URL"""
|
||||||
|
base, fragment = egg_info_for_url(url)
|
||||||
|
for dist in distros_for_location(url, base, metadata): yield dist
|
||||||
|
if fragment:
|
||||||
|
match = EGG_FRAGMENT.match(fragment)
|
||||||
|
if match:
|
||||||
|
for dist in interpret_distro_name(
|
||||||
|
url, match.group(1), metadata, precedence = CHECKOUT_DIST
|
||||||
|
):
|
||||||
|
yield dist
|
||||||
|
|
||||||
|
def distros_for_location(location, basename, metadata=None):
|
||||||
|
"""Yield egg or source distribution objects based on basename"""
|
||||||
|
if basename.endswith('.egg.zip'):
|
||||||
|
basename = basename[:-4] # strip the .zip
|
||||||
|
if basename.endswith('.egg') and '-' in basename:
|
||||||
|
# only one, unambiguous interpretation
|
||||||
|
return [Distribution.from_location(location, basename, metadata)]
|
||||||
|
if basename.endswith('.exe'):
|
||||||
|
win_base, py_ver = parse_bdist_wininst(basename)
|
||||||
|
if win_base is not None:
|
||||||
|
return interpret_distro_name(
|
||||||
|
location, win_base, metadata, py_ver, BINARY_DIST, "win32"
|
||||||
|
)
|
||||||
|
# Try source distro extensions (.zip, .tgz, etc.)
|
||||||
|
#
|
||||||
|
for ext in EXTENSIONS:
|
||||||
|
if basename.endswith(ext):
|
||||||
|
basename = basename[:-len(ext)]
|
||||||
|
return interpret_distro_name(location, basename, metadata)
|
||||||
|
return [] # no extension matched
|
||||||
|
|
||||||
|
def distros_for_filename(filename, metadata=None):
|
||||||
|
"""Yield possible egg or source distribution objects based on a filename"""
|
||||||
|
return distros_for_location(
|
||||||
|
normalize_path(filename), os.path.basename(filename), metadata
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def interpret_distro_name(location, basename, metadata,
|
||||||
|
py_version=None, precedence=SOURCE_DIST, platform=None
|
||||||
|
):
|
||||||
|
"""Generate alternative interpretations of a source distro name
|
||||||
|
|
||||||
|
Note: if `location` is a filesystem filename, you should call
|
||||||
|
``pkg_resources.normalize_path()`` on it before passing it to this
|
||||||
|
routine!
|
||||||
|
"""
|
||||||
|
# Generate alternative interpretations of a source distro name
|
||||||
|
# Because some packages are ambiguous as to name/versions split
|
||||||
|
# e.g. "adns-python-1.1.0", "egenix-mx-commercial", etc.
|
||||||
|
# So, we generate each possible interepretation (e.g. "adns, python-1.1.0"
|
||||||
|
# "adns-python, 1.1.0", and "adns-python-1.1.0, no version"). In practice,
|
||||||
|
# the spurious interpretations should be ignored, because in the event
|
||||||
|
# there's also an "adns" package, the spurious "python-1.1.0" version will
|
||||||
|
# compare lower than any numeric version number, and is therefore unlikely
|
||||||
|
# to match a request for it. It's still a potential problem, though, and
|
||||||
|
# in the long run PyPI and the distutils should go for "safe" names and
|
||||||
|
# versions in distribution archive names (sdist and bdist).
|
||||||
|
|
||||||
|
parts = basename.split('-')
|
||||||
|
if not py_version:
|
||||||
|
for i,p in enumerate(parts[2:]):
|
||||||
|
if len(p)==5 and p.startswith('py2.'):
|
||||||
|
return # It's a bdist_dumb, not an sdist -- bail out
|
||||||
|
|
||||||
|
for p in range(1,len(parts)+1):
|
||||||
|
yield Distribution(
|
||||||
|
location, metadata, '-'.join(parts[:p]), '-'.join(parts[p:]),
|
||||||
|
py_version=py_version, precedence = precedence,
|
||||||
|
platform = platform
|
||||||
|
)
|
||||||
|
|
||||||
|
REL = re.compile("""<([^>]*\srel\s*=\s*['"]?([^'">]+)[^>]*)>""", re.I)
|
||||||
|
# this line is here to fix emacs' cruddy broken syntax highlighting
|
||||||
|
|
||||||
|
def find_external_links(url, page):
|
||||||
|
"""Find rel="homepage" and rel="download" links in `page`, yielding URLs"""
|
||||||
|
|
||||||
|
for match in REL.finditer(page):
|
||||||
|
tag, rel = match.groups()
|
||||||
|
rels = map(str.strip, rel.lower().split(','))
|
||||||
|
if 'homepage' in rels or 'download' in rels:
|
||||||
|
for match in HREF.finditer(tag):
|
||||||
|
yield urlparse.urljoin(url, htmldecode(match.group(1)))
|
||||||
|
|
||||||
|
for tag in ("<th>Home Page", "<th>Download URL"):
|
||||||
|
pos = page.find(tag)
|
||||||
|
if pos!=-1:
|
||||||
|
match = HREF.search(page,pos)
|
||||||
|
if match:
|
||||||
|
yield urlparse.urljoin(url, htmldecode(match.group(1)))
|
||||||
|
|
||||||
|
user_agent = "Python-urllib/%s setuptools/%s" % (
|
||||||
|
urllib2.__version__, require('setuptools')[0].version
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class PackageIndex(Environment):
|
||||||
|
"""A distribution index that scans web pages for download URLs"""
|
||||||
|
|
||||||
|
def __init__(self, index_url="http://pypi.python.org/simple", hosts=('*',),
|
||||||
|
*args, **kw
|
||||||
|
):
|
||||||
|
Environment.__init__(self,*args,**kw)
|
||||||
|
self.index_url = index_url + "/"[:not index_url.endswith('/')]
|
||||||
|
self.scanned_urls = {}
|
||||||
|
self.fetched_urls = {}
|
||||||
|
self.package_pages = {}
|
||||||
|
self.allows = re.compile('|'.join(map(translate,hosts))).match
|
||||||
|
self.to_scan = []
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def process_url(self, url, retrieve=False):
|
||||||
|
"""Evaluate a URL as a possible download, and maybe retrieve it"""
|
||||||
|
if url in self.scanned_urls and not retrieve:
|
||||||
|
return
|
||||||
|
self.scanned_urls[url] = True
|
||||||
|
if not URL_SCHEME(url):
|
||||||
|
self.process_filename(url)
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
dists = list(distros_for_url(url))
|
||||||
|
if dists:
|
||||||
|
if not self.url_ok(url):
|
||||||
|
return
|
||||||
|
self.debug("Found link: %s", url)
|
||||||
|
|
||||||
|
if dists or not retrieve or url in self.fetched_urls:
|
||||||
|
map(self.add, dists)
|
||||||
|
return # don't need the actual page
|
||||||
|
|
||||||
|
if not self.url_ok(url):
|
||||||
|
self.fetched_urls[url] = True
|
||||||
|
return
|
||||||
|
|
||||||
|
self.info("Reading %s", url)
|
||||||
|
self.fetched_urls[url] = True # prevent multiple fetch attempts
|
||||||
|
f = self.open_url(url, "Download error: %s -- Some packages may not be found!")
|
||||||
|
if f is None: return
|
||||||
|
self.fetched_urls[f.url] = True
|
||||||
|
if 'html' not in f.headers.get('content-type', '').lower():
|
||||||
|
f.close() # not html, we can't process it
|
||||||
|
return
|
||||||
|
|
||||||
|
base = f.url # handle redirects
|
||||||
|
page = f.read()
|
||||||
|
f.close()
|
||||||
|
if url.startswith(self.index_url) and getattr(f,'code',None)!=404:
|
||||||
|
page = self.process_index(url, page)
|
||||||
|
for match in HREF.finditer(page):
|
||||||
|
link = urlparse.urljoin(base, htmldecode(match.group(1)))
|
||||||
|
self.process_url(link)
|
||||||
|
|
||||||
|
def process_filename(self, fn, nested=False):
|
||||||
|
# process filenames or directories
|
||||||
|
if not os.path.exists(fn):
|
||||||
|
self.warn("Not found: %s", fn)
|
||||||
|
return
|
||||||
|
|
||||||
|
if os.path.isdir(fn) and not nested:
|
||||||
|
path = os.path.realpath(fn)
|
||||||
|
for item in os.listdir(path):
|
||||||
|
self.process_filename(os.path.join(path,item), True)
|
||||||
|
|
||||||
|
dists = distros_for_filename(fn)
|
||||||
|
if dists:
|
||||||
|
self.debug("Found: %s", fn)
|
||||||
|
map(self.add, dists)
|
||||||
|
|
||||||
|
def url_ok(self, url, fatal=False):
|
||||||
|
s = URL_SCHEME(url)
|
||||||
|
if (s and s.group(1).lower()=='file') or self.allows(urlparse.urlparse(url)[1]):
|
||||||
|
return True
|
||||||
|
msg = "\nLink to % s ***BLOCKED*** by --allow-hosts\n"
|
||||||
|
if fatal:
|
||||||
|
raise DistutilsError(msg % url)
|
||||||
|
else:
|
||||||
|
self.warn(msg, url)
|
||||||
|
|
||||||
|
def scan_egg_links(self, search_path):
|
||||||
|
for item in search_path:
|
||||||
|
if os.path.isdir(item):
|
||||||
|
for entry in os.listdir(item):
|
||||||
|
if entry.endswith('.egg-link'):
|
||||||
|
self.scan_egg_link(item, entry)
|
||||||
|
|
||||||
|
def scan_egg_link(self, path, entry):
|
||||||
|
lines = filter(None, map(str.strip, file(os.path.join(path, entry))))
|
||||||
|
if len(lines)==2:
|
||||||
|
for dist in find_distributions(os.path.join(path, lines[0])):
|
||||||
|
dist.location = os.path.join(path, *lines)
|
||||||
|
dist.precedence = SOURCE_DIST
|
||||||
|
self.add(dist)
|
||||||
|
|
||||||
|
def process_index(self,url,page):
|
||||||
|
"""Process the contents of a PyPI page"""
|
||||||
|
def scan(link):
|
||||||
|
# Process a URL to see if it's for a package page
|
||||||
|
if link.startswith(self.index_url):
|
||||||
|
parts = map(
|
||||||
|
urllib2.unquote, link[len(self.index_url):].split('/')
|
||||||
|
)
|
||||||
|
if len(parts)==2 and '#' not in parts[1]:
|
||||||
|
# it's a package page, sanitize and index it
|
||||||
|
pkg = safe_name(parts[0])
|
||||||
|
ver = safe_version(parts[1])
|
||||||
|
self.package_pages.setdefault(pkg.lower(),{})[link] = True
|
||||||
|
return to_filename(pkg), to_filename(ver)
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
# process an index page into the package-page index
|
||||||
|
for match in HREF.finditer(page):
|
||||||
|
scan( urlparse.urljoin(url, htmldecode(match.group(1))) )
|
||||||
|
|
||||||
|
pkg, ver = scan(url) # ensure this page is in the page index
|
||||||
|
if pkg:
|
||||||
|
# process individual package page
|
||||||
|
for new_url in find_external_links(url, page):
|
||||||
|
# Process the found URL
|
||||||
|
base, frag = egg_info_for_url(new_url)
|
||||||
|
if base.endswith('.py') and not frag:
|
||||||
|
if ver:
|
||||||
|
new_url+='#egg=%s-%s' % (pkg,ver)
|
||||||
|
else:
|
||||||
|
self.need_version_info(url)
|
||||||
|
self.scan_url(new_url)
|
||||||
|
|
||||||
|
return PYPI_MD5.sub(
|
||||||
|
lambda m: '<a href="%s#md5=%s">%s</a>' % m.group(1,3,2), page
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
return "" # no sense double-scanning non-package pages
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def need_version_info(self, url):
|
||||||
|
self.scan_all(
|
||||||
|
"Page at %s links to .py file(s) without version info; an index "
|
||||||
|
"scan is required.", url
|
||||||
|
)
|
||||||
|
|
||||||
|
def scan_all(self, msg=None, *args):
|
||||||
|
if self.index_url not in self.fetched_urls:
|
||||||
|
if msg: self.warn(msg,*args)
|
||||||
|
self.info(
|
||||||
|
"Scanning index of all packages (this may take a while)"
|
||||||
|
)
|
||||||
|
self.scan_url(self.index_url)
|
||||||
|
|
||||||
|
def find_packages(self, requirement):
|
||||||
|
self.scan_url(self.index_url + requirement.unsafe_name+'/')
|
||||||
|
|
||||||
|
if not self.package_pages.get(requirement.key):
|
||||||
|
# Fall back to safe version of the name
|
||||||
|
self.scan_url(self.index_url + requirement.project_name+'/')
|
||||||
|
|
||||||
|
if not self.package_pages.get(requirement.key):
|
||||||
|
# We couldn't find the target package, so search the index page too
|
||||||
|
self.not_found_in_index(requirement)
|
||||||
|
|
||||||
|
for url in list(self.package_pages.get(requirement.key,())):
|
||||||
|
# scan each page that might be related to the desired package
|
||||||
|
self.scan_url(url)
|
||||||
|
|
||||||
|
def obtain(self, requirement, installer=None):
|
||||||
|
self.prescan(); self.find_packages(requirement)
|
||||||
|
for dist in self[requirement.key]:
|
||||||
|
if dist in requirement:
|
||||||
|
return dist
|
||||||
|
self.debug("%s does not match %s", requirement, dist)
|
||||||
|
return super(PackageIndex, self).obtain(requirement,installer)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def check_md5(self, cs, info, filename, tfp):
|
||||||
|
if re.match('md5=[0-9a-f]{32}$', info):
|
||||||
|
self.debug("Validating md5 checksum for %s", filename)
|
||||||
|
if cs.hexdigest()!=info[4:]:
|
||||||
|
tfp.close()
|
||||||
|
os.unlink(filename)
|
||||||
|
raise DistutilsError(
|
||||||
|
"MD5 validation failed for "+os.path.basename(filename)+
|
||||||
|
"; possible download problem?"
|
||||||
|
)
|
||||||
|
|
||||||
|
def add_find_links(self, urls):
|
||||||
|
"""Add `urls` to the list that will be prescanned for searches"""
|
||||||
|
for url in urls:
|
||||||
|
if (
|
||||||
|
self.to_scan is None # if we have already "gone online"
|
||||||
|
or not URL_SCHEME(url) # or it's a local file/directory
|
||||||
|
or url.startswith('file:')
|
||||||
|
or list(distros_for_url(url)) # or a direct package link
|
||||||
|
):
|
||||||
|
# then go ahead and process it now
|
||||||
|
self.scan_url(url)
|
||||||
|
else:
|
||||||
|
# otherwise, defer retrieval till later
|
||||||
|
self.to_scan.append(url)
|
||||||
|
|
||||||
|
def prescan(self):
|
||||||
|
"""Scan urls scheduled for prescanning (e.g. --find-links)"""
|
||||||
|
if self.to_scan:
|
||||||
|
map(self.scan_url, self.to_scan)
|
||||||
|
self.to_scan = None # from now on, go ahead and process immediately
|
||||||
|
|
||||||
|
def not_found_in_index(self, requirement):
|
||||||
|
if self[requirement.key]: # we've seen at least one distro
|
||||||
|
meth, msg = self.info, "Couldn't retrieve index page for %r"
|
||||||
|
else: # no distros seen for this name, might be misspelled
|
||||||
|
meth, msg = (self.warn,
|
||||||
|
"Couldn't find index page for %r (maybe misspelled?)")
|
||||||
|
meth(msg, requirement.unsafe_name)
|
||||||
|
self.scan_all()
|
||||||
|
|
||||||
|
def download(self, spec, tmpdir):
|
||||||
|
"""Locate and/or download `spec` to `tmpdir`, returning a local path
|
||||||
|
|
||||||
|
`spec` may be a ``Requirement`` object, or a string containing a URL,
|
||||||
|
an existing local filename, or a project/version requirement spec
|
||||||
|
(i.e. the string form of a ``Requirement`` object). If it is the URL
|
||||||
|
of a .py file with an unambiguous ``#egg=name-version`` tag (i.e., one
|
||||||
|
that escapes ``-`` as ``_`` throughout), a trivial ``setup.py`` is
|
||||||
|
automatically created alongside the downloaded file.
|
||||||
|
|
||||||
|
If `spec` is a ``Requirement`` object or a string containing a
|
||||||
|
project/version requirement spec, this method returns the location of
|
||||||
|
a matching distribution (possibly after downloading it to `tmpdir`).
|
||||||
|
If `spec` is a locally existing file or directory name, it is simply
|
||||||
|
returned unchanged. If `spec` is a URL, it is downloaded to a subpath
|
||||||
|
of `tmpdir`, and the local filename is returned. Various errors may be
|
||||||
|
raised if a problem occurs during downloading.
|
||||||
|
"""
|
||||||
|
if not isinstance(spec,Requirement):
|
||||||
|
scheme = URL_SCHEME(spec)
|
||||||
|
if scheme:
|
||||||
|
# It's a url, download it to tmpdir
|
||||||
|
found = self._download_url(scheme.group(1), spec, tmpdir)
|
||||||
|
base, fragment = egg_info_for_url(spec)
|
||||||
|
if base.endswith('.py'):
|
||||||
|
found = self.gen_setup(found,fragment,tmpdir)
|
||||||
|
return found
|
||||||
|
elif os.path.exists(spec):
|
||||||
|
# Existing file or directory, just return it
|
||||||
|
return spec
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
spec = Requirement.parse(spec)
|
||||||
|
except ValueError:
|
||||||
|
raise DistutilsError(
|
||||||
|
"Not a URL, existing file, or requirement spec: %r" %
|
||||||
|
(spec,)
|
||||||
|
)
|
||||||
|
return getattr(self.fetch_distribution(spec, tmpdir),'location',None)
|
||||||
|
|
||||||
|
|
||||||
|
def fetch_distribution(self,
|
||||||
|
requirement, tmpdir, force_scan=False, source=False, develop_ok=False,
|
||||||
|
local_index=None,
|
||||||
|
):
|
||||||
|
"""Obtain a distribution suitable for fulfilling `requirement`
|
||||||
|
|
||||||
|
`requirement` must be a ``pkg_resources.Requirement`` instance.
|
||||||
|
If necessary, or if the `force_scan` flag is set, the requirement is
|
||||||
|
searched for in the (online) package index as well as the locally
|
||||||
|
installed packages. If a distribution matching `requirement` is found,
|
||||||
|
the returned distribution's ``location`` is the value you would have
|
||||||
|
gotten from calling the ``download()`` method with the matching
|
||||||
|
distribution's URL or filename. If no matching distribution is found,
|
||||||
|
``None`` is returned.
|
||||||
|
|
||||||
|
If the `source` flag is set, only source distributions and source
|
||||||
|
checkout links will be considered. Unless the `develop_ok` flag is
|
||||||
|
set, development and system eggs (i.e., those using the ``.egg-info``
|
||||||
|
format) will be ignored.
|
||||||
|
"""
|
||||||
|
# process a Requirement
|
||||||
|
self.info("Searching for %s", requirement)
|
||||||
|
skipped = {}
|
||||||
|
dist = None
|
||||||
|
|
||||||
|
def find(env, req):
|
||||||
|
# Find a matching distribution; may be called more than once
|
||||||
|
|
||||||
|
for dist in env[req.key]:
|
||||||
|
|
||||||
|
if dist.precedence==DEVELOP_DIST and not develop_ok:
|
||||||
|
if dist not in skipped:
|
||||||
|
self.warn("Skipping development or system egg: %s",dist)
|
||||||
|
skipped[dist] = 1
|
||||||
|
continue
|
||||||
|
|
||||||
|
if dist in req and (dist.precedence<=SOURCE_DIST or not source):
|
||||||
|
return dist
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
if force_scan:
|
||||||
|
self.prescan()
|
||||||
|
self.find_packages(requirement)
|
||||||
|
dist = find(self, requirement)
|
||||||
|
|
||||||
|
if local_index is not None:
|
||||||
|
dist = dist or find(local_index, requirement)
|
||||||
|
|
||||||
|
if dist is None and self.to_scan is not None:
|
||||||
|
self.prescan()
|
||||||
|
dist = find(self, requirement)
|
||||||
|
|
||||||
|
if dist is None and not force_scan:
|
||||||
|
self.find_packages(requirement)
|
||||||
|
dist = find(self, requirement)
|
||||||
|
|
||||||
|
if dist is None:
|
||||||
|
self.warn(
|
||||||
|
"No local packages or download links found for %s%s",
|
||||||
|
(source and "a source distribution of " or ""),
|
||||||
|
requirement,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self.info("Best match: %s", dist)
|
||||||
|
return dist.clone(location=self.download(dist.location, tmpdir))
|
||||||
|
|
||||||
|
|
||||||
|
def fetch(self, requirement, tmpdir, force_scan=False, source=False):
|
||||||
|
"""Obtain a file suitable for fulfilling `requirement`
|
||||||
|
|
||||||
|
DEPRECATED; use the ``fetch_distribution()`` method now instead. For
|
||||||
|
backward compatibility, this routine is identical but returns the
|
||||||
|
``location`` of the downloaded distribution instead of a distribution
|
||||||
|
object.
|
||||||
|
"""
|
||||||
|
dist = self.fetch_distribution(requirement,tmpdir,force_scan,source)
|
||||||
|
if dist is not None:
|
||||||
|
return dist.location
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def gen_setup(self, filename, fragment, tmpdir):
|
||||||
|
match = EGG_FRAGMENT.match(fragment)
|
||||||
|
dists = match and [d for d in
|
||||||
|
interpret_distro_name(filename, match.group(1), None) if d.version
|
||||||
|
] or []
|
||||||
|
|
||||||
|
if len(dists)==1: # unambiguous ``#egg`` fragment
|
||||||
|
basename = os.path.basename(filename)
|
||||||
|
|
||||||
|
# Make sure the file has been downloaded to the temp dir.
|
||||||
|
if os.path.dirname(filename) != tmpdir:
|
||||||
|
dst = os.path.join(tmpdir, basename)
|
||||||
|
from setuptools.command.easy_install import samefile
|
||||||
|
if not samefile(filename, dst):
|
||||||
|
shutil.copy2(filename, dst)
|
||||||
|
filename=dst
|
||||||
|
|
||||||
|
file = open(os.path.join(tmpdir, 'setup.py'), 'w')
|
||||||
|
file.write(
|
||||||
|
"from setuptools import setup\n"
|
||||||
|
"setup(name=%r, version=%r, py_modules=[%r])\n"
|
||||||
|
% (
|
||||||
|
dists[0].project_name, dists[0].version,
|
||||||
|
os.path.splitext(basename)[0]
|
||||||
|
)
|
||||||
|
)
|
||||||
|
file.close()
|
||||||
|
return filename
|
||||||
|
|
||||||
|
elif match:
|
||||||
|
raise DistutilsError(
|
||||||
|
"Can't unambiguously interpret project/version identifier %r; "
|
||||||
|
"any dashes in the name or version should be escaped using "
|
||||||
|
"underscores. %r" % (fragment,dists)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
raise DistutilsError(
|
||||||
|
"Can't process plain .py files without an '#egg=name-version'"
|
||||||
|
" suffix to enable automatic setup script generation."
|
||||||
|
)
|
||||||
|
|
||||||
|
dl_blocksize = 8192
|
||||||
|
def _download_to(self, url, filename):
|
||||||
|
self.info("Downloading %s", url)
|
||||||
|
# Download the file
|
||||||
|
fp, tfp, info = None, None, None
|
||||||
|
try:
|
||||||
|
if '#' in url:
|
||||||
|
url, info = url.split('#', 1)
|
||||||
|
fp = self.open_url(url)
|
||||||
|
if isinstance(fp, urllib2.HTTPError):
|
||||||
|
raise DistutilsError(
|
||||||
|
"Can't download %s: %s %s" % (url, fp.code,fp.msg)
|
||||||
|
)
|
||||||
|
cs = md5()
|
||||||
|
headers = fp.info()
|
||||||
|
blocknum = 0
|
||||||
|
bs = self.dl_blocksize
|
||||||
|
size = -1
|
||||||
|
if "content-length" in headers:
|
||||||
|
size = int(headers["Content-Length"])
|
||||||
|
self.reporthook(url, filename, blocknum, bs, size)
|
||||||
|
tfp = open(filename,'wb')
|
||||||
|
while True:
|
||||||
|
block = fp.read(bs)
|
||||||
|
if block:
|
||||||
|
cs.update(block)
|
||||||
|
tfp.write(block)
|
||||||
|
blocknum += 1
|
||||||
|
self.reporthook(url, filename, blocknum, bs, size)
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
if info: self.check_md5(cs, info, filename, tfp)
|
||||||
|
return headers
|
||||||
|
finally:
|
||||||
|
if fp: fp.close()
|
||||||
|
if tfp: tfp.close()
|
||||||
|
|
||||||
|
def reporthook(self, url, filename, blocknum, blksize, size):
|
||||||
|
pass # no-op
|
||||||
|
|
||||||
|
|
||||||
|
def open_url(self, url, warning=None):
|
||||||
|
if url.startswith('file:'): return local_open(url)
|
||||||
|
try:
|
||||||
|
return open_with_auth(url)
|
||||||
|
except urllib2.HTTPError, v:
|
||||||
|
return v
|
||||||
|
except urllib2.URLError, v:
|
||||||
|
reason = v.reason
|
||||||
|
except httplib.HTTPException, v:
|
||||||
|
reason = "%s: %s" % (v.__doc__ or v.__class__.__name__, v)
|
||||||
|
if warning:
|
||||||
|
self.warn(warning, reason)
|
||||||
|
else:
|
||||||
|
raise DistutilsError("Download error for %s: %s" % (url, reason))
|
||||||
|
|
||||||
|
def _download_url(self, scheme, url, tmpdir):
|
||||||
|
# Determine download filename
|
||||||
|
#
|
||||||
|
name, fragment = egg_info_for_url(url)
|
||||||
|
if name:
|
||||||
|
while '..' in name:
|
||||||
|
name = name.replace('..','.').replace('\\','_')
|
||||||
|
else:
|
||||||
|
name = "__downloaded__" # default if URL has no path contents
|
||||||
|
|
||||||
|
if name.endswith('.egg.zip'):
|
||||||
|
name = name[:-4] # strip the extra .zip before download
|
||||||
|
|
||||||
|
filename = os.path.join(tmpdir,name)
|
||||||
|
|
||||||
|
# Download the file
|
||||||
|
#
|
||||||
|
if scheme=='svn' or scheme.startswith('svn+'):
|
||||||
|
return self._download_svn(url, filename)
|
||||||
|
elif scheme=='file':
|
||||||
|
return urllib2.url2pathname(urlparse.urlparse(url)[2])
|
||||||
|
else:
|
||||||
|
self.url_ok(url, True) # raises error if not allowed
|
||||||
|
return self._attempt_download(url, filename)
|
||||||
|
|
||||||
|
|
||||||
|
def scan_url(self, url):
|
||||||
|
self.process_url(url, True)
|
||||||
|
|
||||||
|
|
||||||
|
def _attempt_download(self, url, filename):
|
||||||
|
headers = self._download_to(url, filename)
|
||||||
|
if 'html' in headers.get('content-type','').lower():
|
||||||
|
return self._download_html(url, headers, filename)
|
||||||
|
else:
|
||||||
|
return filename
|
||||||
|
|
||||||
|
def _download_html(self, url, headers, filename):
|
||||||
|
file = open(filename)
|
||||||
|
for line in file:
|
||||||
|
if line.strip():
|
||||||
|
# Check for a subversion index page
|
||||||
|
if re.search(r'<title>([^- ]+ - )?Revision \d+:', line):
|
||||||
|
# it's a subversion index page:
|
||||||
|
file.close()
|
||||||
|
os.unlink(filename)
|
||||||
|
return self._download_svn(url, filename)
|
||||||
|
break # not an index page
|
||||||
|
file.close()
|
||||||
|
os.unlink(filename)
|
||||||
|
raise DistutilsError("Unexpected HTML page found at "+url)
|
||||||
|
|
||||||
|
def _download_svn(self, url, filename):
|
||||||
|
url = url.split('#',1)[0] # remove any fragment for svn's sake
|
||||||
|
self.info("Doing subversion checkout from %s to %s", url, filename)
|
||||||
|
os.system("svn checkout -q %s %s" % (url, filename))
|
||||||
|
return filename
|
||||||
|
|
||||||
|
def debug(self, msg, *args):
|
||||||
|
log.debug(msg, *args)
|
||||||
|
|
||||||
|
def info(self, msg, *args):
|
||||||
|
log.info(msg, *args)
|
||||||
|
|
||||||
|
def warn(self, msg, *args):
|
||||||
|
log.warn(msg, *args)
|
||||||
|
|
||||||
|
# This pattern matches a character entity reference (a decimal numeric
|
||||||
|
# references, a hexadecimal numeric reference, or a named reference).
|
||||||
|
entity_sub = re.compile(r'&(#(\d+|x[\da-fA-F]+)|[\w.:-]+);?').sub
|
||||||
|
|
||||||
|
def uchr(c):
|
||||||
|
if not isinstance(c, int):
|
||||||
|
return c
|
||||||
|
if c>255: return unichr(c)
|
||||||
|
return chr(c)
|
||||||
|
|
||||||
|
def decode_entity(match):
|
||||||
|
what = match.group(1)
|
||||||
|
if what.startswith('#x'):
|
||||||
|
what = int(what[2:], 16)
|
||||||
|
elif what.startswith('#'):
|
||||||
|
what = int(what[1:])
|
||||||
|
else:
|
||||||
|
from htmlentitydefs import name2codepoint
|
||||||
|
what = name2codepoint.get(what, match.group(0))
|
||||||
|
return uchr(what)
|
||||||
|
|
||||||
|
def htmldecode(text):
|
||||||
|
"""Decode HTML entities in the given text."""
|
||||||
|
return entity_sub(decode_entity, text)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def open_with_auth(url):
|
||||||
|
"""Open a urllib2 request, handling HTTP authentication"""
|
||||||
|
|
||||||
|
scheme, netloc, path, params, query, frag = urlparse.urlparse(url)
|
||||||
|
|
||||||
|
if scheme in ('http', 'https'):
|
||||||
|
auth, host = urllib.splituser(netloc)
|
||||||
|
else:
|
||||||
|
auth = None
|
||||||
|
|
||||||
|
if auth:
|
||||||
|
auth = "Basic " + urllib2.unquote(auth).encode('base64').strip()
|
||||||
|
new_url = urlparse.urlunparse((scheme,host,path,params,query,frag))
|
||||||
|
request = urllib2.Request(new_url)
|
||||||
|
request.add_header("Authorization", auth)
|
||||||
|
else:
|
||||||
|
request = urllib2.Request(url)
|
||||||
|
|
||||||
|
request.add_header('User-Agent', user_agent)
|
||||||
|
fp = urllib2.urlopen(request)
|
||||||
|
|
||||||
|
if auth:
|
||||||
|
# Put authentication info back into request URL if same host,
|
||||||
|
# so that links found on the page will work
|
||||||
|
s2, h2, path2, param2, query2, frag2 = urlparse.urlparse(fp.url)
|
||||||
|
if s2==scheme and h2==host:
|
||||||
|
fp.url = urlparse.urlunparse((s2,netloc,path2,param2,query2,frag2))
|
||||||
|
|
||||||
|
return fp
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def fix_sf_url(url):
|
||||||
|
return url # backward compatibility
|
||||||
|
|
||||||
|
def local_open(url):
|
||||||
|
"""Read a local path, with special support for directories"""
|
||||||
|
scheme, server, path, param, query, frag = urlparse.urlparse(url)
|
||||||
|
filename = urllib2.url2pathname(path)
|
||||||
|
if os.path.isfile(filename):
|
||||||
|
return urllib2.urlopen(url)
|
||||||
|
elif path.endswith('/') and os.path.isdir(filename):
|
||||||
|
files = []
|
||||||
|
for f in os.listdir(filename):
|
||||||
|
if f=='index.html':
|
||||||
|
body = open(os.path.join(filename,f),'rb').read()
|
||||||
|
break
|
||||||
|
elif os.path.isdir(os.path.join(filename,f)):
|
||||||
|
f+='/'
|
||||||
|
files.append("<a href=%r>%s</a>" % (f,f))
|
||||||
|
else:
|
||||||
|
body = ("<html><head><title>%s</title>" % url) + \
|
||||||
|
"</head><body>%s</body></html>" % '\n'.join(files)
|
||||||
|
status, message = 200, "OK"
|
||||||
|
else:
|
||||||
|
status, message, body = 404, "Path not found", "Not found"
|
||||||
|
|
||||||
|
return urllib2.HTTPError(url, status, message,
|
||||||
|
{'content-type':'text/html'}, cStringIO.StringIO(body))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# this line is a kludge to keep the trailing blank lines for pje's editor
|
287
setuptools-0.6c15dev.egg/setuptools/sandbox.py
Normal file
287
setuptools-0.6c15dev.egg/setuptools/sandbox.py
Normal file
@ -0,0 +1,287 @@
|
|||||||
|
import os, sys, __builtin__, tempfile, operator, pkg_resources
|
||||||
|
_os = sys.modules[os.name]
|
||||||
|
_open = open
|
||||||
|
_file = file
|
||||||
|
|
||||||
|
from distutils.errors import DistutilsError
|
||||||
|
from pkg_resources import working_set
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"AbstractSandbox", "DirectorySandbox", "SandboxViolation", "run_setup",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def run_setup(setup_script, args):
|
||||||
|
"""Run a distutils setup script, sandboxed in its directory"""
|
||||||
|
old_dir = os.getcwd()
|
||||||
|
save_argv = sys.argv[:]
|
||||||
|
save_path = sys.path[:]
|
||||||
|
setup_dir = os.path.abspath(os.path.dirname(setup_script))
|
||||||
|
temp_dir = os.path.join(setup_dir,'temp')
|
||||||
|
if not os.path.isdir(temp_dir): os.makedirs(temp_dir)
|
||||||
|
save_tmp = tempfile.tempdir
|
||||||
|
save_modules = sys.modules.copy()
|
||||||
|
pr_state = pkg_resources.__getstate__()
|
||||||
|
try:
|
||||||
|
tempfile.tempdir = temp_dir; os.chdir(setup_dir)
|
||||||
|
try:
|
||||||
|
sys.argv[:] = [setup_script]+list(args)
|
||||||
|
sys.path.insert(0, setup_dir)
|
||||||
|
# reset to include setup dir, w/clean callback list
|
||||||
|
working_set.__init__()
|
||||||
|
working_set.callbacks.append(lambda dist:dist.activate())
|
||||||
|
DirectorySandbox(setup_dir).run(
|
||||||
|
lambda: execfile(
|
||||||
|
"setup.py",
|
||||||
|
{'__file__':setup_script, '__name__':'__main__'}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
except SystemExit, v:
|
||||||
|
if v.args and v.args[0]:
|
||||||
|
raise
|
||||||
|
# Normal exit, just return
|
||||||
|
finally:
|
||||||
|
pkg_resources.__setstate__(pr_state)
|
||||||
|
sys.modules.update(save_modules)
|
||||||
|
for key in list(sys.modules):
|
||||||
|
if key not in save_modules: del sys.modules[key]
|
||||||
|
os.chdir(old_dir)
|
||||||
|
sys.path[:] = save_path
|
||||||
|
sys.argv[:] = save_argv
|
||||||
|
tempfile.tempdir = save_tmp
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class AbstractSandbox:
|
||||||
|
"""Wrap 'os' module and 'open()' builtin for virtualizing setup scripts"""
|
||||||
|
|
||||||
|
_active = False
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self._attrs = [
|
||||||
|
name for name in dir(_os)
|
||||||
|
if not name.startswith('_') and hasattr(self,name)
|
||||||
|
]
|
||||||
|
|
||||||
|
def _copy(self, source):
|
||||||
|
for name in self._attrs:
|
||||||
|
setattr(os, name, getattr(source,name))
|
||||||
|
|
||||||
|
def run(self, func):
|
||||||
|
"""Run 'func' under os sandboxing"""
|
||||||
|
try:
|
||||||
|
self._copy(self)
|
||||||
|
__builtin__.file = self._file
|
||||||
|
__builtin__.open = self._open
|
||||||
|
self._active = True
|
||||||
|
return func()
|
||||||
|
finally:
|
||||||
|
self._active = False
|
||||||
|
__builtin__.open = _open
|
||||||
|
__builtin__.file = _file
|
||||||
|
self._copy(_os)
|
||||||
|
|
||||||
|
def _mk_dual_path_wrapper(name):
|
||||||
|
original = getattr(_os,name)
|
||||||
|
def wrap(self,src,dst,*args,**kw):
|
||||||
|
if self._active:
|
||||||
|
src,dst = self._remap_pair(name,src,dst,*args,**kw)
|
||||||
|
return original(src,dst,*args,**kw)
|
||||||
|
return wrap
|
||||||
|
|
||||||
|
for name in ["rename", "link", "symlink"]:
|
||||||
|
if hasattr(_os,name): locals()[name] = _mk_dual_path_wrapper(name)
|
||||||
|
|
||||||
|
|
||||||
|
def _mk_single_path_wrapper(name, original=None):
|
||||||
|
original = original or getattr(_os,name)
|
||||||
|
def wrap(self,path,*args,**kw):
|
||||||
|
if self._active:
|
||||||
|
path = self._remap_input(name,path,*args,**kw)
|
||||||
|
return original(path,*args,**kw)
|
||||||
|
return wrap
|
||||||
|
|
||||||
|
_open = _mk_single_path_wrapper('open', _open)
|
||||||
|
_file = _mk_single_path_wrapper('file', _file)
|
||||||
|
for name in [
|
||||||
|
"stat", "listdir", "chdir", "open", "chmod", "chown", "mkdir",
|
||||||
|
"remove", "unlink", "rmdir", "utime", "lchown", "chroot", "lstat",
|
||||||
|
"startfile", "mkfifo", "mknod", "pathconf", "access"
|
||||||
|
]:
|
||||||
|
if hasattr(_os,name): locals()[name] = _mk_single_path_wrapper(name)
|
||||||
|
|
||||||
|
def _mk_single_with_return(name):
|
||||||
|
original = getattr(_os,name)
|
||||||
|
def wrap(self,path,*args,**kw):
|
||||||
|
if self._active:
|
||||||
|
path = self._remap_input(name,path,*args,**kw)
|
||||||
|
return self._remap_output(name, original(path,*args,**kw))
|
||||||
|
return original(path,*args,**kw)
|
||||||
|
return wrap
|
||||||
|
|
||||||
|
for name in ['readlink', 'tempnam']:
|
||||||
|
if hasattr(_os,name): locals()[name] = _mk_single_with_return(name)
|
||||||
|
|
||||||
|
def _mk_query(name):
|
||||||
|
original = getattr(_os,name)
|
||||||
|
def wrap(self,*args,**kw):
|
||||||
|
retval = original(*args,**kw)
|
||||||
|
if self._active:
|
||||||
|
return self._remap_output(name, retval)
|
||||||
|
return retval
|
||||||
|
return wrap
|
||||||
|
|
||||||
|
for name in ['getcwd', 'tmpnam']:
|
||||||
|
if hasattr(_os,name): locals()[name] = _mk_query(name)
|
||||||
|
|
||||||
|
def _validate_path(self,path):
|
||||||
|
"""Called to remap or validate any path, whether input or output"""
|
||||||
|
return path
|
||||||
|
|
||||||
|
def _remap_input(self,operation,path,*args,**kw):
|
||||||
|
"""Called for path inputs"""
|
||||||
|
return self._validate_path(path)
|
||||||
|
|
||||||
|
def _remap_output(self,operation,path):
|
||||||
|
"""Called for path outputs"""
|
||||||
|
return self._validate_path(path)
|
||||||
|
|
||||||
|
def _remap_pair(self,operation,src,dst,*args,**kw):
|
||||||
|
"""Called for path pairs like rename, link, and symlink operations"""
|
||||||
|
return (
|
||||||
|
self._remap_input(operation+'-from',src,*args,**kw),
|
||||||
|
self._remap_input(operation+'-to',dst,*args,**kw)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class DirectorySandbox(AbstractSandbox):
|
||||||
|
"""Restrict operations to a single subdirectory - pseudo-chroot"""
|
||||||
|
|
||||||
|
write_ops = dict.fromkeys([
|
||||||
|
"open", "chmod", "chown", "mkdir", "remove", "unlink", "rmdir",
|
||||||
|
"utime", "lchown", "chroot", "mkfifo", "mknod", "tempnam",
|
||||||
|
])
|
||||||
|
|
||||||
|
def __init__(self,sandbox):
|
||||||
|
self._sandbox = os.path.normcase(os.path.realpath(sandbox))
|
||||||
|
self._prefix = os.path.join(self._sandbox,'')
|
||||||
|
AbstractSandbox.__init__(self)
|
||||||
|
|
||||||
|
def _violation(self, operation, *args, **kw):
|
||||||
|
raise SandboxViolation(operation, args, kw)
|
||||||
|
|
||||||
|
def _open(self, path, mode='r', *args, **kw):
|
||||||
|
if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path):
|
||||||
|
self._violation("open", path, mode, *args, **kw)
|
||||||
|
return _open(path,mode,*args,**kw)
|
||||||
|
|
||||||
|
def tmpnam(self): self._violation("tmpnam")
|
||||||
|
|
||||||
|
def _ok(self,path):
|
||||||
|
if hasattr(_os,'devnull') and path==_os.devnull: return True
|
||||||
|
active = self._active
|
||||||
|
try:
|
||||||
|
self._active = False
|
||||||
|
realpath = os.path.normcase(os.path.realpath(path))
|
||||||
|
if realpath==self._sandbox or realpath.startswith(self._prefix):
|
||||||
|
return True
|
||||||
|
finally:
|
||||||
|
self._active = active
|
||||||
|
|
||||||
|
def _remap_input(self,operation,path,*args,**kw):
|
||||||
|
"""Called for path inputs"""
|
||||||
|
if operation in self.write_ops and not self._ok(path):
|
||||||
|
self._violation(operation, os.path.realpath(path), *args, **kw)
|
||||||
|
return path
|
||||||
|
|
||||||
|
def _remap_pair(self,operation,src,dst,*args,**kw):
|
||||||
|
"""Called for path pairs like rename, link, and symlink operations"""
|
||||||
|
if not self._ok(src) or not self._ok(dst):
|
||||||
|
self._violation(operation, src, dst, *args, **kw)
|
||||||
|
return (src,dst)
|
||||||
|
|
||||||
|
def _file(self, path, mode='r', *args, **kw):
|
||||||
|
if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path):
|
||||||
|
self._violation("file", path, mode, *args, **kw)
|
||||||
|
return _file(path,mode,*args,**kw)
|
||||||
|
|
||||||
|
def open(self, file, flags, mode=0777):
|
||||||
|
"""Called for low-level os.open()"""
|
||||||
|
if flags & WRITE_FLAGS and not self._ok(file):
|
||||||
|
self._violation("os.open", file, flags, mode)
|
||||||
|
return _os.open(file,flags,mode)
|
||||||
|
|
||||||
|
WRITE_FLAGS = reduce(
|
||||||
|
operator.or_, [getattr(_os, a, 0) for a in
|
||||||
|
"O_WRONLY O_RDWR O_APPEND O_CREAT O_TRUNC O_TEMPORARY".split()]
|
||||||
|
)
|
||||||
|
|
||||||
|
class SandboxViolation(DistutilsError):
|
||||||
|
"""A setup script attempted to modify the filesystem outside the sandbox"""
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return """SandboxViolation: %s%r %s
|
||||||
|
|
||||||
|
The package setup script has attempted to modify files on your system
|
||||||
|
that are not within the EasyInstall build area, and has been aborted.
|
||||||
|
|
||||||
|
This package cannot be safely installed by EasyInstall, and may not
|
||||||
|
support alternate installation locations even if you run its setup
|
||||||
|
script by hand. Please inform the package's author and the EasyInstall
|
||||||
|
maintainers to find out if a fix or workaround is available.""" % self.args
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
#
|
74
setuptools-0.6c15dev.egg/setuptools/site-patch.py
Normal file
74
setuptools-0.6c15dev.egg/setuptools/site-patch.py
Normal file
@ -0,0 +1,74 @@
|
|||||||
|
def __boot():
|
||||||
|
import sys, imp, os, os.path
|
||||||
|
PYTHONPATH = os.environ.get('PYTHONPATH')
|
||||||
|
if PYTHONPATH is None or (sys.platform=='win32' and not PYTHONPATH):
|
||||||
|
PYTHONPATH = []
|
||||||
|
else:
|
||||||
|
PYTHONPATH = PYTHONPATH.split(os.pathsep)
|
||||||
|
|
||||||
|
pic = getattr(sys,'path_importer_cache',{})
|
||||||
|
stdpath = sys.path[len(PYTHONPATH):]
|
||||||
|
mydir = os.path.dirname(__file__)
|
||||||
|
#print "searching",stdpath,sys.path
|
||||||
|
|
||||||
|
for item in stdpath:
|
||||||
|
if item==mydir or not item:
|
||||||
|
continue # skip if current dir. on Windows, or my own directory
|
||||||
|
importer = pic.get(item)
|
||||||
|
if importer is not None:
|
||||||
|
loader = importer.find_module('site')
|
||||||
|
if loader is not None:
|
||||||
|
# This should actually reload the current module
|
||||||
|
loader.load_module('site')
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
stream, path, descr = imp.find_module('site',[item])
|
||||||
|
except ImportError:
|
||||||
|
continue
|
||||||
|
if stream is None:
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
# This should actually reload the current module
|
||||||
|
imp.load_module('site',stream,path,descr)
|
||||||
|
finally:
|
||||||
|
stream.close()
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
raise ImportError("Couldn't find the real 'site' module")
|
||||||
|
|
||||||
|
#print "loaded", __file__
|
||||||
|
|
||||||
|
known_paths = dict([(makepath(item)[1],1) for item in sys.path]) # 2.2 comp
|
||||||
|
|
||||||
|
oldpos = getattr(sys,'__egginsert',0) # save old insertion position
|
||||||
|
sys.__egginsert = 0 # and reset the current one
|
||||||
|
|
||||||
|
for item in PYTHONPATH:
|
||||||
|
addsitedir(item)
|
||||||
|
|
||||||
|
sys.__egginsert += oldpos # restore effective old position
|
||||||
|
|
||||||
|
d,nd = makepath(stdpath[0])
|
||||||
|
insert_at = None
|
||||||
|
new_path = []
|
||||||
|
|
||||||
|
for item in sys.path:
|
||||||
|
p,np = makepath(item)
|
||||||
|
|
||||||
|
if np==nd and insert_at is None:
|
||||||
|
# We've hit the first 'system' path entry, so added entries go here
|
||||||
|
insert_at = len(new_path)
|
||||||
|
|
||||||
|
if np in known_paths or insert_at is None:
|
||||||
|
new_path.append(item)
|
||||||
|
else:
|
||||||
|
# new path after the insert point, back-insert it
|
||||||
|
new_path.insert(insert_at, item)
|
||||||
|
insert_at += 1
|
||||||
|
|
||||||
|
sys.path[:] = new_path
|
||||||
|
|
||||||
|
if __name__=='site':
|
||||||
|
__boot()
|
||||||
|
del __boot
|
369
setuptools-0.6c15dev.egg/setuptools/tests/__init__.py
Normal file
369
setuptools-0.6c15dev.egg/setuptools/tests/__init__.py
Normal file
@ -0,0 +1,369 @@
|
|||||||
|
"""Tests for the 'setuptools' package"""
|
||||||
|
from unittest import TestSuite, TestCase, makeSuite, defaultTestLoader
|
||||||
|
import distutils.core, distutils.cmd
|
||||||
|
from distutils.errors import DistutilsOptionError, DistutilsPlatformError
|
||||||
|
from distutils.errors import DistutilsSetupError
|
||||||
|
import setuptools, setuptools.dist
|
||||||
|
from setuptools import Feature
|
||||||
|
from distutils.core import Extension
|
||||||
|
extract_constant, get_module_constant = None, None
|
||||||
|
from setuptools.depends import *
|
||||||
|
from distutils.version import StrictVersion, LooseVersion
|
||||||
|
from distutils.util import convert_path
|
||||||
|
import sys, os.path
|
||||||
|
|
||||||
|
def additional_tests():
|
||||||
|
import doctest, unittest
|
||||||
|
suite = unittest.TestSuite((
|
||||||
|
doctest.DocFileSuite('api_tests.txt',
|
||||||
|
optionflags=doctest.ELLIPSIS, package=__name__,
|
||||||
|
),
|
||||||
|
))
|
||||||
|
if sys.platform == 'win32':
|
||||||
|
suite.addTest(doctest.DocFileSuite('win_script_wrapper.txt'))
|
||||||
|
return suite
|
||||||
|
|
||||||
|
def makeSetup(**args):
|
||||||
|
"""Return distribution from 'setup(**args)', without executing commands"""
|
||||||
|
|
||||||
|
distutils.core._setup_stop_after = "commandline"
|
||||||
|
|
||||||
|
# Don't let system command line leak into tests!
|
||||||
|
args.setdefault('script_args',['install'])
|
||||||
|
|
||||||
|
try:
|
||||||
|
return setuptools.setup(**args)
|
||||||
|
finally:
|
||||||
|
distutils.core_setup_stop_after = None
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class DependsTests(TestCase):
|
||||||
|
|
||||||
|
def testExtractConst(self):
|
||||||
|
if not extract_constant: return # skip on non-bytecode platforms
|
||||||
|
|
||||||
|
def f1():
|
||||||
|
global x,y,z
|
||||||
|
x = "test"
|
||||||
|
y = z
|
||||||
|
|
||||||
|
# unrecognized name
|
||||||
|
self.assertEqual(extract_constant(f1.func_code,'q', -1), None)
|
||||||
|
|
||||||
|
# constant assigned
|
||||||
|
self.assertEqual(extract_constant(f1.func_code,'x', -1), "test")
|
||||||
|
|
||||||
|
# expression assigned
|
||||||
|
self.assertEqual(extract_constant(f1.func_code,'y', -1), -1)
|
||||||
|
|
||||||
|
# recognized name, not assigned
|
||||||
|
self.assertEqual(extract_constant(f1.func_code,'z', -1), None)
|
||||||
|
|
||||||
|
|
||||||
|
def testFindModule(self):
|
||||||
|
self.assertRaises(ImportError, find_module, 'no-such.-thing')
|
||||||
|
self.assertRaises(ImportError, find_module, 'setuptools.non-existent')
|
||||||
|
f,p,i = find_module('setuptools.tests'); f.close()
|
||||||
|
|
||||||
|
def testModuleExtract(self):
|
||||||
|
if not get_module_constant: return # skip on non-bytecode platforms
|
||||||
|
from distutils import __version__
|
||||||
|
self.assertEqual(
|
||||||
|
get_module_constant('distutils','__version__'), __version__
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
get_module_constant('sys','version'), sys.version
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
get_module_constant('setuptools.tests','__doc__'),__doc__
|
||||||
|
)
|
||||||
|
|
||||||
|
def testRequire(self):
|
||||||
|
if not extract_constant: return # skip on non-bytecode platforms
|
||||||
|
|
||||||
|
req = Require('Distutils','1.0.3','distutils')
|
||||||
|
|
||||||
|
self.assertEqual(req.name, 'Distutils')
|
||||||
|
self.assertEqual(req.module, 'distutils')
|
||||||
|
self.assertEqual(req.requested_version, '1.0.3')
|
||||||
|
self.assertEqual(req.attribute, '__version__')
|
||||||
|
self.assertEqual(req.full_name(), 'Distutils-1.0.3')
|
||||||
|
|
||||||
|
from distutils import __version__
|
||||||
|
self.assertEqual(req.get_version(), __version__)
|
||||||
|
self.failUnless(req.version_ok('1.0.9'))
|
||||||
|
self.failIf(req.version_ok('0.9.1'))
|
||||||
|
self.failIf(req.version_ok('unknown'))
|
||||||
|
|
||||||
|
self.failUnless(req.is_present())
|
||||||
|
self.failUnless(req.is_current())
|
||||||
|
|
||||||
|
req = Require('Distutils 3000','03000','distutils',format=LooseVersion)
|
||||||
|
self.failUnless(req.is_present())
|
||||||
|
self.failIf(req.is_current())
|
||||||
|
self.failIf(req.version_ok('unknown'))
|
||||||
|
|
||||||
|
req = Require('Do-what-I-mean','1.0','d-w-i-m')
|
||||||
|
self.failIf(req.is_present())
|
||||||
|
self.failIf(req.is_current())
|
||||||
|
|
||||||
|
req = Require('Tests', None, 'tests', homepage="http://example.com")
|
||||||
|
self.assertEqual(req.format, None)
|
||||||
|
self.assertEqual(req.attribute, None)
|
||||||
|
self.assertEqual(req.requested_version, None)
|
||||||
|
self.assertEqual(req.full_name(), 'Tests')
|
||||||
|
self.assertEqual(req.homepage, 'http://example.com')
|
||||||
|
|
||||||
|
paths = [os.path.dirname(p) for p in __path__]
|
||||||
|
self.failUnless(req.is_present(paths))
|
||||||
|
self.failUnless(req.is_current(paths))
|
||||||
|
|
||||||
|
|
||||||
|
class DistroTests(TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
self.e1 = Extension('bar.ext',['bar.c'])
|
||||||
|
self.e2 = Extension('c.y', ['y.c'])
|
||||||
|
|
||||||
|
self.dist = makeSetup(
|
||||||
|
packages=['a', 'a.b', 'a.b.c', 'b', 'c'],
|
||||||
|
py_modules=['b.d','x'],
|
||||||
|
ext_modules = (self.e1, self.e2),
|
||||||
|
package_dir = {},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def testDistroType(self):
|
||||||
|
self.failUnless(isinstance(self.dist,setuptools.dist.Distribution))
|
||||||
|
|
||||||
|
|
||||||
|
def testExcludePackage(self):
|
||||||
|
self.dist.exclude_package('a')
|
||||||
|
self.assertEqual(self.dist.packages, ['b','c'])
|
||||||
|
|
||||||
|
self.dist.exclude_package('b')
|
||||||
|
self.assertEqual(self.dist.packages, ['c'])
|
||||||
|
self.assertEqual(self.dist.py_modules, ['x'])
|
||||||
|
self.assertEqual(self.dist.ext_modules, [self.e1, self.e2])
|
||||||
|
|
||||||
|
self.dist.exclude_package('c')
|
||||||
|
self.assertEqual(self.dist.packages, [])
|
||||||
|
self.assertEqual(self.dist.py_modules, ['x'])
|
||||||
|
self.assertEqual(self.dist.ext_modules, [self.e1])
|
||||||
|
|
||||||
|
# test removals from unspecified options
|
||||||
|
makeSetup().exclude_package('x')
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def testIncludeExclude(self):
|
||||||
|
# remove an extension
|
||||||
|
self.dist.exclude(ext_modules=[self.e1])
|
||||||
|
self.assertEqual(self.dist.ext_modules, [self.e2])
|
||||||
|
|
||||||
|
# add it back in
|
||||||
|
self.dist.include(ext_modules=[self.e1])
|
||||||
|
self.assertEqual(self.dist.ext_modules, [self.e2, self.e1])
|
||||||
|
|
||||||
|
# should not add duplicate
|
||||||
|
self.dist.include(ext_modules=[self.e1])
|
||||||
|
self.assertEqual(self.dist.ext_modules, [self.e2, self.e1])
|
||||||
|
|
||||||
|
def testExcludePackages(self):
|
||||||
|
self.dist.exclude(packages=['c','b','a'])
|
||||||
|
self.assertEqual(self.dist.packages, [])
|
||||||
|
self.assertEqual(self.dist.py_modules, ['x'])
|
||||||
|
self.assertEqual(self.dist.ext_modules, [self.e1])
|
||||||
|
|
||||||
|
def testEmpty(self):
|
||||||
|
dist = makeSetup()
|
||||||
|
dist.include(packages=['a'], py_modules=['b'], ext_modules=[self.e2])
|
||||||
|
dist = makeSetup()
|
||||||
|
dist.exclude(packages=['a'], py_modules=['b'], ext_modules=[self.e2])
|
||||||
|
|
||||||
|
def testContents(self):
|
||||||
|
self.failUnless(self.dist.has_contents_for('a'))
|
||||||
|
self.dist.exclude_package('a')
|
||||||
|
self.failIf(self.dist.has_contents_for('a'))
|
||||||
|
|
||||||
|
self.failUnless(self.dist.has_contents_for('b'))
|
||||||
|
self.dist.exclude_package('b')
|
||||||
|
self.failIf(self.dist.has_contents_for('b'))
|
||||||
|
|
||||||
|
self.failUnless(self.dist.has_contents_for('c'))
|
||||||
|
self.dist.exclude_package('c')
|
||||||
|
self.failIf(self.dist.has_contents_for('c'))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def testInvalidIncludeExclude(self):
|
||||||
|
self.assertRaises(DistutilsSetupError,
|
||||||
|
self.dist.include, nonexistent_option='x'
|
||||||
|
)
|
||||||
|
self.assertRaises(DistutilsSetupError,
|
||||||
|
self.dist.exclude, nonexistent_option='x'
|
||||||
|
)
|
||||||
|
self.assertRaises(DistutilsSetupError,
|
||||||
|
self.dist.include, packages={'x':'y'}
|
||||||
|
)
|
||||||
|
self.assertRaises(DistutilsSetupError,
|
||||||
|
self.dist.exclude, packages={'x':'y'}
|
||||||
|
)
|
||||||
|
self.assertRaises(DistutilsSetupError,
|
||||||
|
self.dist.include, ext_modules={'x':'y'}
|
||||||
|
)
|
||||||
|
self.assertRaises(DistutilsSetupError,
|
||||||
|
self.dist.exclude, ext_modules={'x':'y'}
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertRaises(DistutilsSetupError,
|
||||||
|
self.dist.include, package_dir=['q']
|
||||||
|
)
|
||||||
|
self.assertRaises(DistutilsSetupError,
|
||||||
|
self.dist.exclude, package_dir=['q']
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class FeatureTests(TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
self.req = Require('Distutils','1.0.3','distutils')
|
||||||
|
self.dist = makeSetup(
|
||||||
|
features={
|
||||||
|
'foo': Feature("foo",standard=True,require_features=['baz',self.req]),
|
||||||
|
'bar': Feature("bar", standard=True, packages=['pkg.bar'],
|
||||||
|
py_modules=['bar_et'], remove=['bar.ext'],
|
||||||
|
),
|
||||||
|
'baz': Feature(
|
||||||
|
"baz", optional=False, packages=['pkg.baz'],
|
||||||
|
scripts = ['scripts/baz_it'],
|
||||||
|
libraries=[('libfoo','foo/foofoo.c')]
|
||||||
|
),
|
||||||
|
'dwim': Feature("DWIM", available=False, remove='bazish'),
|
||||||
|
},
|
||||||
|
script_args=['--without-bar', 'install'],
|
||||||
|
packages = ['pkg.bar', 'pkg.foo'],
|
||||||
|
py_modules = ['bar_et', 'bazish'],
|
||||||
|
ext_modules = [Extension('bar.ext',['bar.c'])]
|
||||||
|
)
|
||||||
|
|
||||||
|
def testDefaults(self):
|
||||||
|
self.failIf(
|
||||||
|
Feature(
|
||||||
|
"test",standard=True,remove='x',available=False
|
||||||
|
).include_by_default()
|
||||||
|
)
|
||||||
|
self.failUnless(
|
||||||
|
Feature("test",standard=True,remove='x').include_by_default()
|
||||||
|
)
|
||||||
|
# Feature must have either kwargs, removes, or require_features
|
||||||
|
self.assertRaises(DistutilsSetupError, Feature, "test")
|
||||||
|
|
||||||
|
def testAvailability(self):
|
||||||
|
self.assertRaises(
|
||||||
|
DistutilsPlatformError,
|
||||||
|
self.dist.features['dwim'].include_in, self.dist
|
||||||
|
)
|
||||||
|
|
||||||
|
def testFeatureOptions(self):
|
||||||
|
dist = self.dist
|
||||||
|
self.failUnless(
|
||||||
|
('with-dwim',None,'include DWIM') in dist.feature_options
|
||||||
|
)
|
||||||
|
self.failUnless(
|
||||||
|
('without-dwim',None,'exclude DWIM (default)') in dist.feature_options
|
||||||
|
)
|
||||||
|
self.failUnless(
|
||||||
|
('with-bar',None,'include bar (default)') in dist.feature_options
|
||||||
|
)
|
||||||
|
self.failUnless(
|
||||||
|
('without-bar',None,'exclude bar') in dist.feature_options
|
||||||
|
)
|
||||||
|
self.assertEqual(dist.feature_negopt['without-foo'],'with-foo')
|
||||||
|
self.assertEqual(dist.feature_negopt['without-bar'],'with-bar')
|
||||||
|
self.assertEqual(dist.feature_negopt['without-dwim'],'with-dwim')
|
||||||
|
self.failIf('without-baz' in dist.feature_negopt)
|
||||||
|
|
||||||
|
def testUseFeatures(self):
|
||||||
|
dist = self.dist
|
||||||
|
self.assertEqual(dist.with_foo,1)
|
||||||
|
self.assertEqual(dist.with_bar,0)
|
||||||
|
self.assertEqual(dist.with_baz,1)
|
||||||
|
self.failIf('bar_et' in dist.py_modules)
|
||||||
|
self.failIf('pkg.bar' in dist.packages)
|
||||||
|
self.failUnless('pkg.baz' in dist.packages)
|
||||||
|
self.failUnless('scripts/baz_it' in dist.scripts)
|
||||||
|
self.failUnless(('libfoo','foo/foofoo.c') in dist.libraries)
|
||||||
|
self.assertEqual(dist.ext_modules,[])
|
||||||
|
self.assertEqual(dist.require_features, [self.req])
|
||||||
|
|
||||||
|
# If we ask for bar, it should fail because we explicitly disabled
|
||||||
|
# it on the command line
|
||||||
|
self.assertRaises(DistutilsOptionError, dist.include_feature, 'bar')
|
||||||
|
|
||||||
|
def testFeatureWithInvalidRemove(self):
|
||||||
|
self.assertRaises(
|
||||||
|
SystemExit, makeSetup, features = {'x':Feature('x', remove='y')}
|
||||||
|
)
|
||||||
|
|
||||||
|
class TestCommandTests(TestCase):
|
||||||
|
|
||||||
|
def testTestIsCommand(self):
|
||||||
|
test_cmd = makeSetup().get_command_obj('test')
|
||||||
|
self.failUnless(isinstance(test_cmd, distutils.cmd.Command))
|
||||||
|
|
||||||
|
def testLongOptSuiteWNoDefault(self):
|
||||||
|
ts1 = makeSetup(script_args=['test','--test-suite=foo.tests.suite'])
|
||||||
|
ts1 = ts1.get_command_obj('test')
|
||||||
|
ts1.ensure_finalized()
|
||||||
|
self.assertEqual(ts1.test_suite, 'foo.tests.suite')
|
||||||
|
|
||||||
|
def testDefaultSuite(self):
|
||||||
|
ts2 = makeSetup(test_suite='bar.tests.suite').get_command_obj('test')
|
||||||
|
ts2.ensure_finalized()
|
||||||
|
self.assertEqual(ts2.test_suite, 'bar.tests.suite')
|
||||||
|
|
||||||
|
def testDefaultWModuleOnCmdLine(self):
|
||||||
|
ts3 = makeSetup(
|
||||||
|
test_suite='bar.tests',
|
||||||
|
script_args=['test','-m','foo.tests']
|
||||||
|
).get_command_obj('test')
|
||||||
|
ts3.ensure_finalized()
|
||||||
|
self.assertEqual(ts3.test_module, 'foo.tests')
|
||||||
|
self.assertEqual(ts3.test_suite, 'foo.tests.test_suite')
|
||||||
|
|
||||||
|
def testConflictingOptions(self):
|
||||||
|
ts4 = makeSetup(
|
||||||
|
script_args=['test','-m','bar.tests', '-s','foo.tests.suite']
|
||||||
|
).get_command_obj('test')
|
||||||
|
self.assertRaises(DistutilsOptionError, ts4.ensure_finalized)
|
||||||
|
|
||||||
|
def testNoSuite(self):
|
||||||
|
ts5 = makeSetup().get_command_obj('test')
|
||||||
|
ts5.ensure_finalized()
|
||||||
|
self.assertEqual(ts5.test_suite, None)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -0,0 +1,27 @@
|
|||||||
|
"""Package Index Tests
|
||||||
|
"""
|
||||||
|
# More would be better!
|
||||||
|
|
||||||
|
import os, shutil, tempfile, unittest, urllib2
|
||||||
|
import pkg_resources
|
||||||
|
import setuptools.package_index
|
||||||
|
|
||||||
|
class TestPackageIndex(unittest.TestCase):
|
||||||
|
|
||||||
|
def test_bad_urls(self):
|
||||||
|
index = setuptools.package_index.PackageIndex()
|
||||||
|
url = 'http://127.0.0.1/nonesuch/test_package_index'
|
||||||
|
try:
|
||||||
|
v = index.open_url(url)
|
||||||
|
except Exception, v:
|
||||||
|
self.assert_(url in str(v))
|
||||||
|
else:
|
||||||
|
self.assert_(isinstance(v,urllib2.HTTPError))
|
||||||
|
|
||||||
|
def test_url_ok(self):
|
||||||
|
index = setuptools.package_index.PackageIndex(
|
||||||
|
hosts=('www.example.com',)
|
||||||
|
)
|
||||||
|
url = 'file:///tmp/test_package_index'
|
||||||
|
self.assert_(index.url_ok(url, True))
|
||||||
|
|
533
setuptools-0.6c15dev.egg/setuptools/tests/test_resources.py
Normal file
533
setuptools-0.6c15dev.egg/setuptools/tests/test_resources.py
Normal file
@ -0,0 +1,533 @@
|
|||||||
|
#!/usr/bin/python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# NOTE: the shebang and encoding lines are for ScriptHeaderTests; do not remove
|
||||||
|
from unittest import TestCase, makeSuite; from pkg_resources import *
|
||||||
|
from setuptools.command.easy_install import get_script_header, is_sh
|
||||||
|
import os, pkg_resources, sys, StringIO
|
||||||
|
try: frozenset
|
||||||
|
except NameError:
|
||||||
|
from sets import ImmutableSet as frozenset
|
||||||
|
|
||||||
|
class Metadata(EmptyProvider):
|
||||||
|
"""Mock object to return metadata as if from an on-disk distribution"""
|
||||||
|
|
||||||
|
def __init__(self,*pairs):
|
||||||
|
self.metadata = dict(pairs)
|
||||||
|
|
||||||
|
def has_metadata(self,name):
|
||||||
|
return name in self.metadata
|
||||||
|
|
||||||
|
def get_metadata(self,name):
|
||||||
|
return self.metadata[name]
|
||||||
|
|
||||||
|
def get_metadata_lines(self,name):
|
||||||
|
return yield_lines(self.get_metadata(name))
|
||||||
|
|
||||||
|
class DistroTests(TestCase):
|
||||||
|
|
||||||
|
def testCollection(self):
|
||||||
|
# empty path should produce no distributions
|
||||||
|
ad = Environment([], platform=None, python=None)
|
||||||
|
self.assertEqual(list(ad), [])
|
||||||
|
self.assertEqual(ad['FooPkg'],[])
|
||||||
|
ad.add(Distribution.from_filename("FooPkg-1.3_1.egg"))
|
||||||
|
ad.add(Distribution.from_filename("FooPkg-1.4-py2.4-win32.egg"))
|
||||||
|
ad.add(Distribution.from_filename("FooPkg-1.2-py2.4.egg"))
|
||||||
|
|
||||||
|
# Name is in there now
|
||||||
|
self.failUnless(ad['FooPkg'])
|
||||||
|
# But only 1 package
|
||||||
|
self.assertEqual(list(ad), ['foopkg'])
|
||||||
|
|
||||||
|
# Distributions sort by version
|
||||||
|
self.assertEqual(
|
||||||
|
[dist.version for dist in ad['FooPkg']], ['1.4','1.3-1','1.2']
|
||||||
|
)
|
||||||
|
# Removing a distribution leaves sequence alone
|
||||||
|
ad.remove(ad['FooPkg'][1])
|
||||||
|
self.assertEqual(
|
||||||
|
[dist.version for dist in ad['FooPkg']], ['1.4','1.2']
|
||||||
|
)
|
||||||
|
# And inserting adds them in order
|
||||||
|
ad.add(Distribution.from_filename("FooPkg-1.9.egg"))
|
||||||
|
self.assertEqual(
|
||||||
|
[dist.version for dist in ad['FooPkg']], ['1.9','1.4','1.2']
|
||||||
|
)
|
||||||
|
|
||||||
|
ws = WorkingSet([])
|
||||||
|
foo12 = Distribution.from_filename("FooPkg-1.2-py2.4.egg")
|
||||||
|
foo14 = Distribution.from_filename("FooPkg-1.4-py2.4-win32.egg")
|
||||||
|
req, = parse_requirements("FooPkg>=1.3")
|
||||||
|
|
||||||
|
# Nominal case: no distros on path, should yield all applicable
|
||||||
|
self.assertEqual(ad.best_match(req,ws).version, '1.9')
|
||||||
|
# If a matching distro is already installed, should return only that
|
||||||
|
ws.add(foo14); self.assertEqual(ad.best_match(req,ws).version, '1.4')
|
||||||
|
|
||||||
|
# If the first matching distro is unsuitable, it's a version conflict
|
||||||
|
ws = WorkingSet([]); ws.add(foo12); ws.add(foo14)
|
||||||
|
self.assertRaises(VersionConflict, ad.best_match, req, ws)
|
||||||
|
|
||||||
|
# If more than one match on the path, the first one takes precedence
|
||||||
|
ws = WorkingSet([]); ws.add(foo14); ws.add(foo12); ws.add(foo14);
|
||||||
|
self.assertEqual(ad.best_match(req,ws).version, '1.4')
|
||||||
|
|
||||||
|
def checkFooPkg(self,d):
|
||||||
|
self.assertEqual(d.project_name, "FooPkg")
|
||||||
|
self.assertEqual(d.key, "foopkg")
|
||||||
|
self.assertEqual(d.version, "1.3-1")
|
||||||
|
self.assertEqual(d.py_version, "2.4")
|
||||||
|
self.assertEqual(d.platform, "win32")
|
||||||
|
self.assertEqual(d.parsed_version, parse_version("1.3-1"))
|
||||||
|
|
||||||
|
def testDistroBasics(self):
|
||||||
|
d = Distribution(
|
||||||
|
"/some/path",
|
||||||
|
project_name="FooPkg",version="1.3-1",py_version="2.4",platform="win32"
|
||||||
|
)
|
||||||
|
self.checkFooPkg(d)
|
||||||
|
|
||||||
|
d = Distribution("/some/path")
|
||||||
|
self.assertEqual(d.py_version, sys.version[:3])
|
||||||
|
self.assertEqual(d.platform, None)
|
||||||
|
|
||||||
|
def testDistroParse(self):
|
||||||
|
d = Distribution.from_filename("FooPkg-1.3_1-py2.4-win32.egg")
|
||||||
|
self.checkFooPkg(d)
|
||||||
|
d = Distribution.from_filename("FooPkg-1.3_1-py2.4-win32.egg-info")
|
||||||
|
self.checkFooPkg(d)
|
||||||
|
|
||||||
|
def testDistroMetadata(self):
|
||||||
|
d = Distribution(
|
||||||
|
"/some/path", project_name="FooPkg", py_version="2.4", platform="win32",
|
||||||
|
metadata = Metadata(
|
||||||
|
('PKG-INFO',"Metadata-Version: 1.0\nVersion: 1.3-1\n")
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.checkFooPkg(d)
|
||||||
|
|
||||||
|
|
||||||
|
def distRequires(self, txt):
|
||||||
|
return Distribution("/foo", metadata=Metadata(('depends.txt', txt)))
|
||||||
|
|
||||||
|
def checkRequires(self, dist, txt, extras=()):
|
||||||
|
self.assertEqual(
|
||||||
|
list(dist.requires(extras)),
|
||||||
|
list(parse_requirements(txt))
|
||||||
|
)
|
||||||
|
|
||||||
|
def testDistroDependsSimple(self):
|
||||||
|
for v in "Twisted>=1.5", "Twisted>=1.5\nZConfig>=2.0":
|
||||||
|
self.checkRequires(self.distRequires(v), v)
|
||||||
|
|
||||||
|
|
||||||
|
def testResolve(self):
|
||||||
|
ad = Environment([]); ws = WorkingSet([])
|
||||||
|
# Resolving no requirements -> nothing to install
|
||||||
|
self.assertEqual( list(ws.resolve([],ad)), [] )
|
||||||
|
# Request something not in the collection -> DistributionNotFound
|
||||||
|
self.assertRaises(
|
||||||
|
DistributionNotFound, ws.resolve, parse_requirements("Foo"), ad
|
||||||
|
)
|
||||||
|
Foo = Distribution.from_filename(
|
||||||
|
"/foo_dir/Foo-1.2.egg",
|
||||||
|
metadata=Metadata(('depends.txt', "[bar]\nBaz>=2.0"))
|
||||||
|
)
|
||||||
|
ad.add(Foo); ad.add(Distribution.from_filename("Foo-0.9.egg"))
|
||||||
|
|
||||||
|
# Request thing(s) that are available -> list to activate
|
||||||
|
for i in range(3):
|
||||||
|
targets = list(ws.resolve(parse_requirements("Foo"), ad))
|
||||||
|
self.assertEqual(targets, [Foo])
|
||||||
|
map(ws.add,targets)
|
||||||
|
self.assertRaises(VersionConflict, ws.resolve,
|
||||||
|
parse_requirements("Foo==0.9"), ad)
|
||||||
|
ws = WorkingSet([]) # reset
|
||||||
|
|
||||||
|
# Request an extra that causes an unresolved dependency for "Baz"
|
||||||
|
self.assertRaises(
|
||||||
|
DistributionNotFound, ws.resolve,parse_requirements("Foo[bar]"), ad
|
||||||
|
)
|
||||||
|
Baz = Distribution.from_filename(
|
||||||
|
"/foo_dir/Baz-2.1.egg", metadata=Metadata(('depends.txt', "Foo"))
|
||||||
|
)
|
||||||
|
ad.add(Baz)
|
||||||
|
|
||||||
|
# Activation list now includes resolved dependency
|
||||||
|
self.assertEqual(
|
||||||
|
list(ws.resolve(parse_requirements("Foo[bar]"), ad)), [Foo,Baz]
|
||||||
|
)
|
||||||
|
# Requests for conflicting versions produce VersionConflict
|
||||||
|
self.assertRaises( VersionConflict,
|
||||||
|
ws.resolve, parse_requirements("Foo==1.2\nFoo!=1.2"), ad
|
||||||
|
)
|
||||||
|
|
||||||
|
def testDistroDependsOptions(self):
|
||||||
|
d = self.distRequires("""
|
||||||
|
Twisted>=1.5
|
||||||
|
[docgen]
|
||||||
|
ZConfig>=2.0
|
||||||
|
docutils>=0.3
|
||||||
|
[fastcgi]
|
||||||
|
fcgiapp>=0.1""")
|
||||||
|
self.checkRequires(d,"Twisted>=1.5")
|
||||||
|
self.checkRequires(
|
||||||
|
d,"Twisted>=1.5 ZConfig>=2.0 docutils>=0.3".split(), ["docgen"]
|
||||||
|
)
|
||||||
|
self.checkRequires(
|
||||||
|
d,"Twisted>=1.5 fcgiapp>=0.1".split(), ["fastcgi"]
|
||||||
|
)
|
||||||
|
self.checkRequires(
|
||||||
|
d,"Twisted>=1.5 ZConfig>=2.0 docutils>=0.3 fcgiapp>=0.1".split(),
|
||||||
|
["docgen","fastcgi"]
|
||||||
|
)
|
||||||
|
self.checkRequires(
|
||||||
|
d,"Twisted>=1.5 fcgiapp>=0.1 ZConfig>=2.0 docutils>=0.3".split(),
|
||||||
|
["fastcgi", "docgen"]
|
||||||
|
)
|
||||||
|
self.assertRaises(UnknownExtra, d.requires, ["foo"])
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class EntryPointTests(TestCase):
|
||||||
|
|
||||||
|
def assertfields(self, ep):
|
||||||
|
self.assertEqual(ep.name,"foo")
|
||||||
|
self.assertEqual(ep.module_name,"setuptools.tests.test_resources")
|
||||||
|
self.assertEqual(ep.attrs, ("EntryPointTests",))
|
||||||
|
self.assertEqual(ep.extras, ("x",))
|
||||||
|
self.failUnless(ep.load() is EntryPointTests)
|
||||||
|
self.assertEqual(
|
||||||
|
str(ep),
|
||||||
|
"foo = setuptools.tests.test_resources:EntryPointTests [x]"
|
||||||
|
)
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
self.dist = Distribution.from_filename(
|
||||||
|
"FooPkg-1.2-py2.4.egg", metadata=Metadata(('requires.txt','[x]')))
|
||||||
|
|
||||||
|
def testBasics(self):
|
||||||
|
ep = EntryPoint(
|
||||||
|
"foo", "setuptools.tests.test_resources", ["EntryPointTests"],
|
||||||
|
["x"], self.dist
|
||||||
|
)
|
||||||
|
self.assertfields(ep)
|
||||||
|
|
||||||
|
def testParse(self):
|
||||||
|
s = "foo = setuptools.tests.test_resources:EntryPointTests [x]"
|
||||||
|
ep = EntryPoint.parse(s, self.dist)
|
||||||
|
self.assertfields(ep)
|
||||||
|
|
||||||
|
ep = EntryPoint.parse("bar baz= spammity[PING]")
|
||||||
|
self.assertEqual(ep.name,"bar baz")
|
||||||
|
self.assertEqual(ep.module_name,"spammity")
|
||||||
|
self.assertEqual(ep.attrs, ())
|
||||||
|
self.assertEqual(ep.extras, ("ping",))
|
||||||
|
|
||||||
|
ep = EntryPoint.parse(" fizzly = wocka:foo")
|
||||||
|
self.assertEqual(ep.name,"fizzly")
|
||||||
|
self.assertEqual(ep.module_name,"wocka")
|
||||||
|
self.assertEqual(ep.attrs, ("foo",))
|
||||||
|
self.assertEqual(ep.extras, ())
|
||||||
|
|
||||||
|
def testRejects(self):
|
||||||
|
for ep in [
|
||||||
|
"foo", "x=1=2", "x=a:b:c", "q=x/na", "fez=pish:tush-z", "x=f[a]>2",
|
||||||
|
]:
|
||||||
|
try: EntryPoint.parse(ep)
|
||||||
|
except ValueError: pass
|
||||||
|
else: raise AssertionError("Should've been bad", ep)
|
||||||
|
|
||||||
|
def checkSubMap(self, m):
|
||||||
|
self.assertEqual(len(m), len(self.submap_expect))
|
||||||
|
for key, ep in self.submap_expect.iteritems():
|
||||||
|
self.assertEqual(repr(m.get(key)), repr(ep))
|
||||||
|
|
||||||
|
submap_expect = dict(
|
||||||
|
feature1=EntryPoint('feature1', 'somemodule', ['somefunction']),
|
||||||
|
feature2=EntryPoint('feature2', 'another.module', ['SomeClass'], ['extra1','extra2']),
|
||||||
|
feature3=EntryPoint('feature3', 'this.module', extras=['something'])
|
||||||
|
)
|
||||||
|
submap_str = """
|
||||||
|
# define features for blah blah
|
||||||
|
feature1 = somemodule:somefunction
|
||||||
|
feature2 = another.module:SomeClass [extra1,extra2]
|
||||||
|
feature3 = this.module [something]
|
||||||
|
"""
|
||||||
|
|
||||||
|
def testParseList(self):
|
||||||
|
self.checkSubMap(EntryPoint.parse_group("xyz", self.submap_str))
|
||||||
|
self.assertRaises(ValueError, EntryPoint.parse_group, "x a", "foo=bar")
|
||||||
|
self.assertRaises(ValueError, EntryPoint.parse_group, "x",
|
||||||
|
["foo=baz", "foo=bar"])
|
||||||
|
|
||||||
|
def testParseMap(self):
|
||||||
|
m = EntryPoint.parse_map({'xyz':self.submap_str})
|
||||||
|
self.checkSubMap(m['xyz'])
|
||||||
|
self.assertEqual(m.keys(),['xyz'])
|
||||||
|
m = EntryPoint.parse_map("[xyz]\n"+self.submap_str)
|
||||||
|
self.checkSubMap(m['xyz'])
|
||||||
|
self.assertEqual(m.keys(),['xyz'])
|
||||||
|
self.assertRaises(ValueError, EntryPoint.parse_map, ["[xyz]", "[xyz]"])
|
||||||
|
self.assertRaises(ValueError, EntryPoint.parse_map, self.submap_str)
|
||||||
|
|
||||||
|
class RequirementsTests(TestCase):
|
||||||
|
|
||||||
|
def testBasics(self):
|
||||||
|
r = Requirement.parse("Twisted>=1.2")
|
||||||
|
self.assertEqual(str(r),"Twisted>=1.2")
|
||||||
|
self.assertEqual(repr(r),"Requirement.parse('Twisted>=1.2')")
|
||||||
|
self.assertEqual(r, Requirement("Twisted", [('>=','1.2')], ()))
|
||||||
|
self.assertEqual(r, Requirement("twisTed", [('>=','1.2')], ()))
|
||||||
|
self.assertNotEqual(r, Requirement("Twisted", [('>=','2.0')], ()))
|
||||||
|
self.assertNotEqual(r, Requirement("Zope", [('>=','1.2')], ()))
|
||||||
|
self.assertNotEqual(r, Requirement("Zope", [('>=','3.0')], ()))
|
||||||
|
self.assertNotEqual(r, Requirement.parse("Twisted[extras]>=1.2"))
|
||||||
|
|
||||||
|
def testOrdering(self):
|
||||||
|
r1 = Requirement("Twisted", [('==','1.2c1'),('>=','1.2')], ())
|
||||||
|
r2 = Requirement("Twisted", [('>=','1.2'),('==','1.2c1')], ())
|
||||||
|
self.assertEqual(r1,r2)
|
||||||
|
self.assertEqual(str(r1),str(r2))
|
||||||
|
self.assertEqual(str(r2),"Twisted==1.2c1,>=1.2")
|
||||||
|
|
||||||
|
def testBasicContains(self):
|
||||||
|
r = Requirement("Twisted", [('>=','1.2')], ())
|
||||||
|
foo_dist = Distribution.from_filename("FooPkg-1.3_1.egg")
|
||||||
|
twist11 = Distribution.from_filename("Twisted-1.1.egg")
|
||||||
|
twist12 = Distribution.from_filename("Twisted-1.2.egg")
|
||||||
|
self.failUnless(parse_version('1.2') in r)
|
||||||
|
self.failUnless(parse_version('1.1') not in r)
|
||||||
|
self.failUnless('1.2' in r)
|
||||||
|
self.failUnless('1.1' not in r)
|
||||||
|
self.failUnless(foo_dist not in r)
|
||||||
|
self.failUnless(twist11 not in r)
|
||||||
|
self.failUnless(twist12 in r)
|
||||||
|
|
||||||
|
def testAdvancedContains(self):
|
||||||
|
r, = parse_requirements("Foo>=1.2,<=1.3,==1.9,>2.0,!=2.5,<3.0,==4.5")
|
||||||
|
for v in ('1.2','1.2.2','1.3','1.9','2.0.1','2.3','2.6','3.0c1','4.5'):
|
||||||
|
self.failUnless(v in r, (v,r))
|
||||||
|
for v in ('1.2c1','1.3.1','1.5','1.9.1','2.0','2.5','3.0','4.0'):
|
||||||
|
self.failUnless(v not in r, (v,r))
|
||||||
|
|
||||||
|
|
||||||
|
def testOptionsAndHashing(self):
|
||||||
|
r1 = Requirement.parse("Twisted[foo,bar]>=1.2")
|
||||||
|
r2 = Requirement.parse("Twisted[bar,FOO]>=1.2")
|
||||||
|
r3 = Requirement.parse("Twisted[BAR,FOO]>=1.2.0")
|
||||||
|
self.assertEqual(r1,r2)
|
||||||
|
self.assertEqual(r1,r3)
|
||||||
|
self.assertEqual(r1.extras, ("foo","bar"))
|
||||||
|
self.assertEqual(r2.extras, ("bar","foo")) # extras are normalized
|
||||||
|
self.assertEqual(hash(r1), hash(r2))
|
||||||
|
self.assertEqual(
|
||||||
|
hash(r1), hash(("twisted", ((">=",parse_version("1.2")),),
|
||||||
|
frozenset(["foo","bar"])))
|
||||||
|
)
|
||||||
|
|
||||||
|
def testVersionEquality(self):
|
||||||
|
r1 = Requirement.parse("setuptools==0.3a2")
|
||||||
|
r2 = Requirement.parse("setuptools!=0.3a4")
|
||||||
|
d = Distribution.from_filename
|
||||||
|
|
||||||
|
self.failIf(d("setuptools-0.3a4.egg") in r1)
|
||||||
|
self.failIf(d("setuptools-0.3a1.egg") in r1)
|
||||||
|
self.failIf(d("setuptools-0.3a4.egg") in r2)
|
||||||
|
|
||||||
|
self.failUnless(d("setuptools-0.3a2.egg") in r1)
|
||||||
|
self.failUnless(d("setuptools-0.3a2.egg") in r2)
|
||||||
|
self.failUnless(d("setuptools-0.3a3.egg") in r2)
|
||||||
|
self.failUnless(d("setuptools-0.3a5.egg") in r2)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class ParseTests(TestCase):
|
||||||
|
|
||||||
|
def testEmptyParse(self):
|
||||||
|
self.assertEqual(list(parse_requirements('')), [])
|
||||||
|
|
||||||
|
def testYielding(self):
|
||||||
|
for inp,out in [
|
||||||
|
([], []), ('x',['x']), ([[]],[]), (' x\n y', ['x','y']),
|
||||||
|
(['x\n\n','y'], ['x','y']),
|
||||||
|
]:
|
||||||
|
self.assertEqual(list(pkg_resources.yield_lines(inp)),out)
|
||||||
|
|
||||||
|
def testSplitting(self):
|
||||||
|
self.assertEqual(
|
||||||
|
list(
|
||||||
|
pkg_resources.split_sections("""
|
||||||
|
x
|
||||||
|
[Y]
|
||||||
|
z
|
||||||
|
|
||||||
|
a
|
||||||
|
[b ]
|
||||||
|
# foo
|
||||||
|
c
|
||||||
|
[ d]
|
||||||
|
[q]
|
||||||
|
v
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
),
|
||||||
|
[(None,["x"]), ("Y",["z","a"]), ("b",["c"]), ("d",[]), ("q",["v"])]
|
||||||
|
)
|
||||||
|
self.assertRaises(ValueError,list,pkg_resources.split_sections("[foo"))
|
||||||
|
|
||||||
|
def testSafeName(self):
|
||||||
|
self.assertEqual(safe_name("adns-python"), "adns-python")
|
||||||
|
self.assertEqual(safe_name("WSGI Utils"), "WSGI-Utils")
|
||||||
|
self.assertEqual(safe_name("WSGI Utils"), "WSGI-Utils")
|
||||||
|
self.assertEqual(safe_name("Money$$$Maker"), "Money-Maker")
|
||||||
|
self.assertNotEqual(safe_name("peak.web"), "peak-web")
|
||||||
|
|
||||||
|
def testSafeVersion(self):
|
||||||
|
self.assertEqual(safe_version("1.2-1"), "1.2-1")
|
||||||
|
self.assertEqual(safe_version("1.2 alpha"), "1.2.alpha")
|
||||||
|
self.assertEqual(safe_version("2.3.4 20050521"), "2.3.4.20050521")
|
||||||
|
self.assertEqual(safe_version("Money$$$Maker"), "Money-Maker")
|
||||||
|
self.assertEqual(safe_version("peak.web"), "peak.web")
|
||||||
|
|
||||||
|
def testSimpleRequirements(self):
|
||||||
|
self.assertEqual(
|
||||||
|
list(parse_requirements('Twis-Ted>=1.2-1')),
|
||||||
|
[Requirement('Twis-Ted',[('>=','1.2-1')], ())]
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
list(parse_requirements('Twisted >=1.2, \ # more\n<2.0')),
|
||||||
|
[Requirement('Twisted',[('>=','1.2'),('<','2.0')], ())]
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
Requirement.parse("FooBar==1.99a3"),
|
||||||
|
Requirement("FooBar", [('==','1.99a3')], ())
|
||||||
|
)
|
||||||
|
self.assertRaises(ValueError,Requirement.parse,">=2.3")
|
||||||
|
self.assertRaises(ValueError,Requirement.parse,"x\\")
|
||||||
|
self.assertRaises(ValueError,Requirement.parse,"x==2 q")
|
||||||
|
self.assertRaises(ValueError,Requirement.parse,"X==1\nY==2")
|
||||||
|
self.assertRaises(ValueError,Requirement.parse,"#")
|
||||||
|
|
||||||
|
def testVersionEquality(self):
|
||||||
|
def c(s1,s2):
|
||||||
|
p1, p2 = parse_version(s1),parse_version(s2)
|
||||||
|
self.assertEqual(p1,p2, (s1,s2,p1,p2))
|
||||||
|
|
||||||
|
c('1.2-rc1', '1.2rc1')
|
||||||
|
c('0.4', '0.4.0')
|
||||||
|
c('0.4.0.0', '0.4.0')
|
||||||
|
c('0.4.0-0', '0.4-0')
|
||||||
|
c('0pl1', '0.0pl1')
|
||||||
|
c('0pre1', '0.0c1')
|
||||||
|
c('0.0.0preview1', '0c1')
|
||||||
|
c('0.0c1', '0-rc1')
|
||||||
|
c('1.2a1', '1.2.a.1'); c('1.2...a', '1.2a')
|
||||||
|
|
||||||
|
def testVersionOrdering(self):
|
||||||
|
def c(s1,s2):
|
||||||
|
p1, p2 = parse_version(s1),parse_version(s2)
|
||||||
|
self.failUnless(p1<p2, (s1,s2,p1,p2))
|
||||||
|
|
||||||
|
c('2.1','2.1.1')
|
||||||
|
c('2a1','2b0')
|
||||||
|
c('2a1','2.1')
|
||||||
|
c('2.3a1', '2.3')
|
||||||
|
c('2.1-1', '2.1-2')
|
||||||
|
c('2.1-1', '2.1.1')
|
||||||
|
c('2.1', '2.1pl4')
|
||||||
|
c('2.1a0-20040501', '2.1')
|
||||||
|
c('1.1', '02.1')
|
||||||
|
c('A56','B27')
|
||||||
|
c('3.2', '3.2.pl0')
|
||||||
|
c('3.2-1', '3.2pl1')
|
||||||
|
c('3.2pl1', '3.2pl1-1')
|
||||||
|
c('0.4', '4.0')
|
||||||
|
c('0.0.4', '0.4.0')
|
||||||
|
c('0pl1', '0.4pl1')
|
||||||
|
c('2.1.0-rc1','2.1.0')
|
||||||
|
c('2.1dev','2.1a0')
|
||||||
|
|
||||||
|
torture ="""
|
||||||
|
0.80.1-3 0.80.1-2 0.80.1-1 0.79.9999+0.80.0pre4-1
|
||||||
|
0.79.9999+0.80.0pre2-3 0.79.9999+0.80.0pre2-2
|
||||||
|
0.77.2-1 0.77.1-1 0.77.0-1
|
||||||
|
""".split()
|
||||||
|
|
||||||
|
for p,v1 in enumerate(torture):
|
||||||
|
for v2 in torture[p+1:]:
|
||||||
|
c(v2,v1)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class ScriptHeaderTests(TestCase):
|
||||||
|
non_ascii_exe = '/Users/José/bin/python'
|
||||||
|
|
||||||
|
def test_get_script_header(self):
|
||||||
|
if not sys.platform.startswith('java') or not is_sh(sys.executable):
|
||||||
|
# This test is for non-Jython platforms
|
||||||
|
self.assertEqual(get_script_header('#!/usr/local/bin/python'),
|
||||||
|
'#!%s\n' % os.path.normpath(sys.executable))
|
||||||
|
self.assertEqual(get_script_header('#!/usr/bin/python -x'),
|
||||||
|
'#!%s -x\n' % os.path.normpath(sys.executable))
|
||||||
|
self.assertEqual(get_script_header('#!/usr/bin/python',
|
||||||
|
executable=self.non_ascii_exe),
|
||||||
|
'#!%s -x\n' % self.non_ascii_exe)
|
||||||
|
|
||||||
|
def test_get_script_header_jython_workaround(self):
|
||||||
|
platform = sys.platform
|
||||||
|
sys.platform = 'java1.5.0_13'
|
||||||
|
stdout = sys.stdout
|
||||||
|
try:
|
||||||
|
# A mock sys.executable that uses a shebang line (this file)
|
||||||
|
exe = os.path.normpath(os.path.splitext(__file__)[0] + '.py')
|
||||||
|
self.assertEqual(
|
||||||
|
get_script_header('#!/usr/local/bin/python', executable=exe),
|
||||||
|
'#!/usr/bin/env %s\n' % exe)
|
||||||
|
|
||||||
|
# Ensure we generate what is basically a broken shebang line
|
||||||
|
# when there's options, with a warning emitted
|
||||||
|
sys.stdout = StringIO.StringIO()
|
||||||
|
self.assertEqual(get_script_header('#!/usr/bin/python -x',
|
||||||
|
executable=exe),
|
||||||
|
'#!%s -x\n' % exe)
|
||||||
|
self.assert_('Unable to adapt shebang line' in sys.stdout.getvalue())
|
||||||
|
sys.stdout = StringIO.StringIO()
|
||||||
|
self.assertEqual(get_script_header('#!/usr/bin/python',
|
||||||
|
executable=self.non_ascii_exe),
|
||||||
|
'#!%s -x\n' % self.non_ascii_exe)
|
||||||
|
self.assert_('Unable to adapt shebang line' in sys.stdout.getvalue())
|
||||||
|
finally:
|
||||||
|
sys.platform = platform
|
||||||
|
sys.stdout = stdout
|
||||||
|
|
Loading…
x
Reference in New Issue
Block a user