mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2025-01-31 08:25:35 +00:00
Merge remote-tracking branch 'origin/master' into 3239.python2-new-style-regression-test
This commit is contained in:
commit
3d2d61f853
@ -43,6 +43,17 @@ else
|
|||||||
JUNITXML=""
|
JUNITXML=""
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# A prefix for the test command that ensure it will exit after no more than a
|
||||||
|
# certain amount of time. Ideally, we would only enforce a "silent" period
|
||||||
|
# timeout but there isn't obviously a ready-made tool for that. The test
|
||||||
|
# suite only takes about 5 - 6 minutes on CircleCI right now. 15 minutes
|
||||||
|
# seems like a moderately safe window.
|
||||||
|
#
|
||||||
|
# This is primarily aimed at catching hangs on the PyPy job which runs for
|
||||||
|
# about 21 minutes and then gets killed by CircleCI in a way that fails the
|
||||||
|
# job and bypasses our "allowed failure" logic.
|
||||||
|
TIMEOUT="timeout --kill-after 1m 15m"
|
||||||
|
|
||||||
# Run the test suite as a non-root user. This is the expected usage some
|
# Run the test suite as a non-root user. This is the expected usage some
|
||||||
# small areas of the test suite assume non-root privileges (such as unreadable
|
# small areas of the test suite assume non-root privileges (such as unreadable
|
||||||
# files being unreadable).
|
# files being unreadable).
|
||||||
@ -63,7 +74,7 @@ else
|
|||||||
alternative="false"
|
alternative="false"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
${BOOTSTRAP_VENV}/bin/tox \
|
${TIMEOUT} ${BOOTSTRAP_VENV}/bin/tox \
|
||||||
-c ${PROJECT_ROOT}/tox.ini \
|
-c ${PROJECT_ROOT}/tox.ini \
|
||||||
--workdir /tmp/tahoe-lafs.tox \
|
--workdir /tmp/tahoe-lafs.tox \
|
||||||
-e "${TAHOE_LAFS_TOX_ENVIRONMENT}" \
|
-e "${TAHOE_LAFS_TOX_ENVIRONMENT}" \
|
||||||
|
1
newsfragments/2749.installation
Normal file
1
newsfragments/2749.installation
Normal file
@ -0,0 +1 @@
|
|||||||
|
Tahoe-LAFS no longer makes start-up time assertions about the versions of its dependencies. It is the responsibility of the administrator of the installation to ensure the correct version of dependencies are supplied.
|
0
newsfragments/3238.minor
Normal file
0
newsfragments/3238.minor
Normal file
@ -3,15 +3,14 @@ Decentralized storage grid.
|
|||||||
|
|
||||||
community web site: U{https://tahoe-lafs.org/}
|
community web site: U{https://tahoe-lafs.org/}
|
||||||
"""
|
"""
|
||||||
import six
|
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
class PackagingError(EnvironmentError):
|
"__version__",
|
||||||
"""
|
"full_version",
|
||||||
Raised when there is an error in packaging of Tahoe-LAFS or its
|
"branch",
|
||||||
dependencies which makes it impossible to proceed safely.
|
"__appname__",
|
||||||
"""
|
"__full_version__",
|
||||||
pass
|
]
|
||||||
|
|
||||||
__version__ = "unknown"
|
__version__ = "unknown"
|
||||||
try:
|
try:
|
||||||
@ -38,470 +37,3 @@ __appname__ = "tahoe-lafs"
|
|||||||
# in the "application" part of the Tahoe versioning scheme:
|
# in the "application" part of the Tahoe versioning scheme:
|
||||||
# https://tahoe-lafs.org/trac/tahoe-lafs/wiki/Versioning
|
# https://tahoe-lafs.org/trac/tahoe-lafs/wiki/Versioning
|
||||||
__full_version__ = __appname__ + '/' + str(__version__)
|
__full_version__ = __appname__ + '/' + str(__version__)
|
||||||
|
|
||||||
import os, platform, re, subprocess, sys, traceback
|
|
||||||
_distributor_id_cmdline_re = re.compile("(?:Distributor ID:)\s*(.*)", re.I)
|
|
||||||
_release_cmdline_re = re.compile("(?:Release:)\s*(.*)", re.I)
|
|
||||||
|
|
||||||
_distributor_id_file_re = re.compile("(?:DISTRIB_ID\s*=)\s*(.*)", re.I)
|
|
||||||
_release_file_re = re.compile("(?:DISTRIB_RELEASE\s*=)\s*(.*)", re.I)
|
|
||||||
|
|
||||||
_distname = None
|
|
||||||
_version = None
|
|
||||||
|
|
||||||
def get_linux_distro():
|
|
||||||
""" Tries to determine the name of the Linux OS distribution name.
|
|
||||||
|
|
||||||
First, try to parse a file named "/etc/lsb-release". If it exists, and
|
|
||||||
contains the "DISTRIB_ID=" line and the "DISTRIB_RELEASE=" line, then return
|
|
||||||
the strings parsed from that file.
|
|
||||||
|
|
||||||
If that doesn't work, then invoke platform.dist().
|
|
||||||
|
|
||||||
If that doesn't work, then try to execute "lsb_release", as standardized in
|
|
||||||
2001:
|
|
||||||
|
|
||||||
http://refspecs.freestandards.org/LSB_1.0.0/gLSB/lsbrelease.html
|
|
||||||
|
|
||||||
The current version of the standard is here:
|
|
||||||
|
|
||||||
http://refspecs.freestandards.org/LSB_3.2.0/LSB-Core-generic/LSB-Core-generic/lsbrelease.html
|
|
||||||
|
|
||||||
that lsb_release emitted, as strings.
|
|
||||||
|
|
||||||
Returns a tuple (distname,version). Distname is what LSB calls a
|
|
||||||
"distributor id", e.g. "Ubuntu". Version is what LSB calls a "release",
|
|
||||||
e.g. "8.04".
|
|
||||||
|
|
||||||
A version of this has been submitted to python as a patch for the standard
|
|
||||||
library module "platform":
|
|
||||||
|
|
||||||
http://bugs.python.org/issue3937
|
|
||||||
"""
|
|
||||||
global _distname,_version
|
|
||||||
if _distname and _version:
|
|
||||||
return (_distname, _version)
|
|
||||||
|
|
||||||
try:
|
|
||||||
etclsbrel = open("/etc/lsb-release", "rU")
|
|
||||||
for line in etclsbrel:
|
|
||||||
m = _distributor_id_file_re.search(line)
|
|
||||||
if m:
|
|
||||||
_distname = m.group(1).strip()
|
|
||||||
if _distname and _version:
|
|
||||||
return (_distname, _version)
|
|
||||||
m = _release_file_re.search(line)
|
|
||||||
if m:
|
|
||||||
_version = m.group(1).strip()
|
|
||||||
if _distname and _version:
|
|
||||||
return (_distname, _version)
|
|
||||||
except EnvironmentError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
(_distname, _version) = platform.dist()[:2]
|
|
||||||
if _distname and _version:
|
|
||||||
return (_distname, _version)
|
|
||||||
|
|
||||||
if os.path.isfile("/usr/bin/lsb_release") or os.path.isfile("/bin/lsb_release"):
|
|
||||||
try:
|
|
||||||
p = subprocess.Popen(["lsb_release", "--all"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
|
||||||
rc = p.wait()
|
|
||||||
if rc == 0:
|
|
||||||
for line in p.stdout.readlines():
|
|
||||||
m = _distributor_id_cmdline_re.search(line)
|
|
||||||
if m:
|
|
||||||
_distname = m.group(1).strip()
|
|
||||||
if _distname and _version:
|
|
||||||
return (_distname, _version)
|
|
||||||
|
|
||||||
m = _release_cmdline_re.search(p.stdout.read())
|
|
||||||
if m:
|
|
||||||
_version = m.group(1).strip()
|
|
||||||
if _distname and _version:
|
|
||||||
return (_distname, _version)
|
|
||||||
except EnvironmentError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
if os.path.exists("/etc/arch-release"):
|
|
||||||
return ("Arch_Linux", "")
|
|
||||||
|
|
||||||
return (_distname,_version)
|
|
||||||
|
|
||||||
def get_platform():
|
|
||||||
# Our version of platform.platform(), telling us both less and more than the
|
|
||||||
# Python Standard Library's version does.
|
|
||||||
# We omit details such as the Linux kernel version number, but we add a
|
|
||||||
# more detailed and correct rendition of the Linux distribution and
|
|
||||||
# distribution-version.
|
|
||||||
if "linux" in platform.system().lower():
|
|
||||||
return platform.system()+"-"+"_".join(get_linux_distro())+"-"+platform.machine()+"-"+"_".join([x for x in platform.architecture() if x])
|
|
||||||
else:
|
|
||||||
return platform.platform()
|
|
||||||
|
|
||||||
|
|
||||||
from allmydata.util import verlib
|
|
||||||
def normalized_version(verstr, what=None):
|
|
||||||
try:
|
|
||||||
suggested = verlib.suggest_normalized_version(verstr) or verstr
|
|
||||||
return verlib.NormalizedVersion(suggested)
|
|
||||||
except verlib.IrrationalVersionError:
|
|
||||||
raise
|
|
||||||
except StandardError:
|
|
||||||
cls, value, trace = sys.exc_info()
|
|
||||||
new_exc = PackagingError("could not parse %s due to %s: %s"
|
|
||||||
% (what or repr(verstr), cls.__name__, value))
|
|
||||||
six.reraise(cls, new_exc, trace)
|
|
||||||
|
|
||||||
|
|
||||||
def get_openssl_version():
|
|
||||||
try:
|
|
||||||
from OpenSSL import SSL
|
|
||||||
return extract_openssl_version(SSL)
|
|
||||||
except Exception:
|
|
||||||
return ("unknown", None, None)
|
|
||||||
|
|
||||||
def extract_openssl_version(ssl_module):
|
|
||||||
openssl_version = ssl_module.SSLeay_version(ssl_module.SSLEAY_VERSION)
|
|
||||||
if openssl_version.startswith('OpenSSL '):
|
|
||||||
openssl_version = openssl_version[8 :]
|
|
||||||
|
|
||||||
(version, _, comment) = openssl_version.partition(' ')
|
|
||||||
|
|
||||||
try:
|
|
||||||
openssl_cflags = ssl_module.SSLeay_version(ssl_module.SSLEAY_CFLAGS)
|
|
||||||
if '-DOPENSSL_NO_HEARTBEATS' in openssl_cflags.split(' '):
|
|
||||||
comment += ", no heartbeats"
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
return (version, None, comment if comment else None)
|
|
||||||
|
|
||||||
def get_package_versions_and_locations():
|
|
||||||
import warnings
|
|
||||||
from _auto_deps import package_imports, global_deprecation_messages, deprecation_messages, \
|
|
||||||
runtime_warning_messages, warning_imports, ignorable
|
|
||||||
|
|
||||||
def package_dir(srcfile):
|
|
||||||
return os.path.dirname(os.path.dirname(os.path.normcase(os.path.realpath(srcfile))))
|
|
||||||
|
|
||||||
# pkg_resources.require returns the distribution that pkg_resources attempted to put
|
|
||||||
# on sys.path, which can differ from the one that we actually import due to #1258,
|
|
||||||
# or any other bug that causes sys.path to be set up incorrectly. Therefore we
|
|
||||||
# must import the packages in order to check their versions and paths.
|
|
||||||
|
|
||||||
# This is to suppress all UserWarnings and various DeprecationWarnings and RuntimeWarnings
|
|
||||||
# (listed in _auto_deps.py).
|
|
||||||
|
|
||||||
warnings.filterwarnings("ignore", category=UserWarning, append=True)
|
|
||||||
|
|
||||||
for msg in global_deprecation_messages + deprecation_messages:
|
|
||||||
warnings.filterwarnings("ignore", category=DeprecationWarning, message=msg, append=True)
|
|
||||||
for msg in runtime_warning_messages:
|
|
||||||
warnings.filterwarnings("ignore", category=RuntimeWarning, message=msg, append=True)
|
|
||||||
try:
|
|
||||||
for modulename in warning_imports:
|
|
||||||
try:
|
|
||||||
__import__(modulename)
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
finally:
|
|
||||||
# Leave suppressions for UserWarnings and global_deprecation_messages active.
|
|
||||||
for _ in runtime_warning_messages + deprecation_messages:
|
|
||||||
warnings.filters.pop()
|
|
||||||
|
|
||||||
packages = []
|
|
||||||
pkg_resources_vers_and_locs = dict()
|
|
||||||
|
|
||||||
if not hasattr(sys, 'frozen'):
|
|
||||||
import pkg_resources
|
|
||||||
from _auto_deps import install_requires
|
|
||||||
|
|
||||||
pkg_resources_vers_and_locs = dict([(p.project_name.lower(), (str(p.version), p.location))
|
|
||||||
for p in pkg_resources.require(install_requires)])
|
|
||||||
|
|
||||||
def get_version(module):
|
|
||||||
if hasattr(module, '__version__'):
|
|
||||||
return str(getattr(module, '__version__'))
|
|
||||||
elif hasattr(module, 'version'):
|
|
||||||
ver = getattr(module, 'version')
|
|
||||||
if isinstance(ver, tuple):
|
|
||||||
return '.'.join(map(str, ver))
|
|
||||||
else:
|
|
||||||
return str(ver)
|
|
||||||
else:
|
|
||||||
return 'unknown'
|
|
||||||
|
|
||||||
for pkgname, modulename in [(__appname__, 'allmydata')] + package_imports:
|
|
||||||
if modulename:
|
|
||||||
try:
|
|
||||||
__import__(modulename)
|
|
||||||
module = sys.modules[modulename]
|
|
||||||
except ImportError:
|
|
||||||
etype, emsg, etrace = sys.exc_info()
|
|
||||||
trace_info = (etype, str(emsg), ([None] + traceback.extract_tb(etrace))[-1])
|
|
||||||
packages.append( (pkgname, (None, None, trace_info)) )
|
|
||||||
else:
|
|
||||||
comment = None
|
|
||||||
if pkgname == __appname__:
|
|
||||||
comment = "%s: %s" % (branch, full_version)
|
|
||||||
elif pkgname == 'setuptools' and hasattr(module, '_distribute'):
|
|
||||||
# distribute does not report its version in any module variables
|
|
||||||
comment = 'distribute'
|
|
||||||
ver = get_version(module)
|
|
||||||
loc = package_dir(module.__file__)
|
|
||||||
if ver == "unknown" and pkgname in pkg_resources_vers_and_locs:
|
|
||||||
(pr_ver, pr_loc) = pkg_resources_vers_and_locs[pkgname]
|
|
||||||
if loc == os.path.normcase(os.path.realpath(pr_loc)):
|
|
||||||
ver = pr_ver
|
|
||||||
packages.append( (pkgname, (ver, loc, comment)) )
|
|
||||||
elif pkgname == 'python':
|
|
||||||
packages.append( (pkgname, (platform.python_version(), sys.executable, None)) )
|
|
||||||
elif pkgname == 'platform':
|
|
||||||
packages.append( (pkgname, (get_platform(), None, None)) )
|
|
||||||
elif pkgname == 'OpenSSL':
|
|
||||||
packages.append( (pkgname, get_openssl_version()) )
|
|
||||||
|
|
||||||
cross_check_errors = []
|
|
||||||
|
|
||||||
if len(pkg_resources_vers_and_locs) > 0:
|
|
||||||
imported_packages = set([p.lower() for (p, _) in packages])
|
|
||||||
extra_packages = []
|
|
||||||
|
|
||||||
for pr_name, (pr_ver, pr_loc) in pkg_resources_vers_and_locs.iteritems():
|
|
||||||
if pr_name not in imported_packages and pr_name not in ignorable:
|
|
||||||
extra_packages.append( (pr_name, (pr_ver, pr_loc, "according to pkg_resources")) )
|
|
||||||
|
|
||||||
cross_check_errors = cross_check(pkg_resources_vers_and_locs, packages)
|
|
||||||
packages += extra_packages
|
|
||||||
|
|
||||||
return packages, cross_check_errors
|
|
||||||
|
|
||||||
|
|
||||||
def split_requirement(req):
|
|
||||||
"""
|
|
||||||
Split up a single requirement string into the different version constraint pieces.
|
|
||||||
|
|
||||||
This is like req.split(",") except it doesn't split on , found inside [].
|
|
||||||
|
|
||||||
:return: A list of the split up pieces.
|
|
||||||
"""
|
|
||||||
in_extras = False
|
|
||||||
pieces = []
|
|
||||||
chunk = ''
|
|
||||||
for ch in req:
|
|
||||||
if in_extras:
|
|
||||||
if ch == ']':
|
|
||||||
in_extras = False
|
|
||||||
chunk += ch
|
|
||||||
else:
|
|
||||||
if ch == '[':
|
|
||||||
in_extras = True
|
|
||||||
chunk += ch
|
|
||||||
elif ch == ',':
|
|
||||||
pieces.append(chunk)
|
|
||||||
chunk = ''
|
|
||||||
else:
|
|
||||||
chunk += ch
|
|
||||||
pieces.append(chunk)
|
|
||||||
return pieces
|
|
||||||
|
|
||||||
|
|
||||||
def check_requirement(req, vers_and_locs):
|
|
||||||
# We support only conjunctions of <=, >=, and !=
|
|
||||||
reqlist = split_requirement(req)
|
|
||||||
name = reqlist[0].split('<=')[0].split('>=')[0].split('!=')[0].strip(' ').split('[')[0]
|
|
||||||
if name not in vers_and_locs:
|
|
||||||
raise PackagingError("no version info for %s" % (name,))
|
|
||||||
if req.strip(' ') == name:
|
|
||||||
return
|
|
||||||
(actual, location, comment) = vers_and_locs[name]
|
|
||||||
if actual is None:
|
|
||||||
# comment is (type, message, (filename, line number, function name, text)) for the original ImportError
|
|
||||||
raise ImportError("for requirement %r: %s" % (req, comment))
|
|
||||||
if actual == 'unknown':
|
|
||||||
return
|
|
||||||
try:
|
|
||||||
actualver = normalized_version(actual, what="actual version %r of %s from %r" %
|
|
||||||
(actual, name, location))
|
|
||||||
matched = match_requirement(req, reqlist, actualver)
|
|
||||||
except verlib.IrrationalVersionError:
|
|
||||||
# meh, it probably doesn't matter
|
|
||||||
return
|
|
||||||
|
|
||||||
if not matched:
|
|
||||||
msg = ("We require %s, but could only find version %s.\n" % (req, actual))
|
|
||||||
if location and location != 'unknown':
|
|
||||||
msg += "The version we found is from %r.\n" % (location,)
|
|
||||||
msg += ("To resolve this problem, uninstall that version, either using your\n"
|
|
||||||
"operating system's package manager or by moving aside the directory.")
|
|
||||||
raise PackagingError(msg)
|
|
||||||
|
|
||||||
|
|
||||||
def match_requirement(req, reqlist, actualver):
|
|
||||||
for r in reqlist:
|
|
||||||
s = r.split('<=')
|
|
||||||
if len(s) == 2:
|
|
||||||
required = s[1].strip(' ')
|
|
||||||
if not (actualver <= normalized_version(required, what="required maximum version %r in %r" % (required, req))):
|
|
||||||
return False # maximum requirement not met
|
|
||||||
else:
|
|
||||||
s = r.split('>=')
|
|
||||||
if len(s) == 2:
|
|
||||||
required = s[1].strip(' ')
|
|
||||||
if not (actualver >= normalized_version(required, what="required minimum version %r in %r" % (required, req))):
|
|
||||||
return False # minimum requirement not met
|
|
||||||
else:
|
|
||||||
s = r.split('!=')
|
|
||||||
if len(s) == 2:
|
|
||||||
required = s[1].strip(' ')
|
|
||||||
if not (actualver != normalized_version(required, what="excluded version %r in %r" % (required, req))):
|
|
||||||
return False # not-equal requirement not met
|
|
||||||
else:
|
|
||||||
raise PackagingError("no version info or could not understand requirement %r" % (req,))
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def cross_check(pkg_resources_vers_and_locs, imported_vers_and_locs_list):
|
|
||||||
"""This function returns a list of errors due to any failed cross-checks."""
|
|
||||||
|
|
||||||
from _auto_deps import not_import_versionable
|
|
||||||
|
|
||||||
errors = []
|
|
||||||
not_pkg_resourceable = ['python', 'platform', __appname__.lower(), 'openssl']
|
|
||||||
|
|
||||||
for name, (imp_ver, imp_loc, imp_comment) in imported_vers_and_locs_list:
|
|
||||||
name = name.lower()
|
|
||||||
if name not in not_pkg_resourceable:
|
|
||||||
if name not in pkg_resources_vers_and_locs:
|
|
||||||
if name == "setuptools" and "distribute" in pkg_resources_vers_and_locs:
|
|
||||||
pr_ver, pr_loc = pkg_resources_vers_and_locs["distribute"]
|
|
||||||
if not (os.path.normpath(os.path.realpath(pr_loc)) == os.path.normpath(os.path.realpath(imp_loc))
|
|
||||||
and imp_comment == "distribute"):
|
|
||||||
errors.append("Warning: dependency 'setuptools' found to be version %r of 'distribute' from %r "
|
|
||||||
"by pkg_resources, but 'import setuptools' gave version %r [%s] from %r. "
|
|
||||||
"A version mismatch is expected, but a location mismatch is not."
|
|
||||||
% (pr_ver, pr_loc, imp_ver, imp_comment or 'probably *not* distribute', imp_loc))
|
|
||||||
else:
|
|
||||||
errors.append("Warning: dependency %r (version %r imported from %r) was not found by pkg_resources."
|
|
||||||
% (name, imp_ver, imp_loc))
|
|
||||||
continue
|
|
||||||
|
|
||||||
pr_ver, pr_loc = pkg_resources_vers_and_locs[name]
|
|
||||||
if imp_ver is None and imp_loc is None:
|
|
||||||
errors.append("Warning: dependency %r could not be imported. pkg_resources thought it should be possible "
|
|
||||||
"to import version %r from %r.\nThe exception trace was %r."
|
|
||||||
% (name, pr_ver, pr_loc, imp_comment))
|
|
||||||
continue
|
|
||||||
|
|
||||||
# If the pkg_resources version is identical to the imported version, don't attempt
|
|
||||||
# to normalize them, since it is unnecessary and may fail (ticket #2499).
|
|
||||||
if imp_ver != 'unknown' and pr_ver == imp_ver:
|
|
||||||
continue
|
|
||||||
|
|
||||||
try:
|
|
||||||
pr_normver = normalized_version(pr_ver)
|
|
||||||
except verlib.IrrationalVersionError:
|
|
||||||
continue
|
|
||||||
except Exception as e:
|
|
||||||
errors.append("Warning: version number %r found for dependency %r by pkg_resources could not be parsed. "
|
|
||||||
"The version found by import was %r from %r. "
|
|
||||||
"pkg_resources thought it should be found at %r. "
|
|
||||||
"The exception was %s: %s"
|
|
||||||
% (pr_ver, name, imp_ver, imp_loc, pr_loc, e.__class__.__name__, e))
|
|
||||||
else:
|
|
||||||
if imp_ver == 'unknown':
|
|
||||||
if name not in not_import_versionable:
|
|
||||||
errors.append("Warning: unexpectedly could not find a version number for dependency %r imported from %r. "
|
|
||||||
"pkg_resources thought it should be version %r at %r."
|
|
||||||
% (name, imp_loc, pr_ver, pr_loc))
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
imp_normver = normalized_version(imp_ver)
|
|
||||||
except verlib.IrrationalVersionError:
|
|
||||||
continue
|
|
||||||
except Exception as e:
|
|
||||||
errors.append("Warning: version number %r found for dependency %r (imported from %r) could not be parsed. "
|
|
||||||
"pkg_resources thought it should be version %r at %r. "
|
|
||||||
"The exception was %s: %s"
|
|
||||||
% (imp_ver, name, imp_loc, pr_ver, pr_loc, e.__class__.__name__, e))
|
|
||||||
else:
|
|
||||||
if pr_ver == 'unknown' or (pr_normver != imp_normver):
|
|
||||||
if not os.path.normpath(os.path.realpath(pr_loc)) == os.path.normpath(os.path.realpath(imp_loc)):
|
|
||||||
errors.append("Warning: dependency %r found to have version number %r (normalized to %r, from %r) "
|
|
||||||
"by pkg_resources, but version %r (normalized to %r, from %r) by import."
|
|
||||||
% (name, pr_ver, str(pr_normver), pr_loc, imp_ver, str(imp_normver), imp_loc))
|
|
||||||
|
|
||||||
return errors
|
|
||||||
|
|
||||||
|
|
||||||
_vers_and_locs_list, _cross_check_errors = get_package_versions_and_locations()
|
|
||||||
|
|
||||||
|
|
||||||
def get_error_string(errors, debug=False):
|
|
||||||
from allmydata._auto_deps import install_requires
|
|
||||||
|
|
||||||
msg = "\n%s\n" % ("\n".join(errors),)
|
|
||||||
if debug:
|
|
||||||
msg += ("\n"
|
|
||||||
"For debugging purposes, the PYTHONPATH was\n"
|
|
||||||
" %r\n"
|
|
||||||
"install_requires was\n"
|
|
||||||
" %r\n"
|
|
||||||
"sys.path after importing pkg_resources was\n"
|
|
||||||
" %s\n"
|
|
||||||
% (os.environ.get('PYTHONPATH'), install_requires, (os.pathsep+"\n ").join(sys.path)) )
|
|
||||||
return msg
|
|
||||||
|
|
||||||
def check_all_requirements():
|
|
||||||
"""This function returns a list of errors due to any failed checks."""
|
|
||||||
|
|
||||||
from allmydata._auto_deps import install_requires
|
|
||||||
|
|
||||||
fatal_errors = []
|
|
||||||
|
|
||||||
# We require at least 2.6 on all platforms.
|
|
||||||
# (On Python 3, we'll have failed long before this point.)
|
|
||||||
if sys.version_info < (2, 6):
|
|
||||||
try:
|
|
||||||
version_string = ".".join(map(str, sys.version_info))
|
|
||||||
except Exception:
|
|
||||||
version_string = repr(sys.version_info)
|
|
||||||
fatal_errors.append("Tahoe-LAFS currently requires Python v2.6 or greater (but less than v3), not %s"
|
|
||||||
% (version_string,))
|
|
||||||
|
|
||||||
vers_and_locs = dict(_vers_and_locs_list)
|
|
||||||
for requirement in install_requires:
|
|
||||||
try:
|
|
||||||
check_requirement(requirement, vers_and_locs)
|
|
||||||
except (ImportError, PackagingError) as e:
|
|
||||||
fatal_errors.append("%s: %s" % (e.__class__.__name__, e))
|
|
||||||
|
|
||||||
if fatal_errors:
|
|
||||||
raise PackagingError(get_error_string(fatal_errors + _cross_check_errors, debug=True))
|
|
||||||
|
|
||||||
check_all_requirements()
|
|
||||||
|
|
||||||
|
|
||||||
def get_package_versions():
|
|
||||||
return dict([(k, v) for k, (v, l, c) in _vers_and_locs_list])
|
|
||||||
|
|
||||||
def get_package_locations():
|
|
||||||
return dict([(k, l) for k, (v, l, c) in _vers_and_locs_list])
|
|
||||||
|
|
||||||
def get_package_versions_string(show_paths=False, debug=False):
|
|
||||||
res = []
|
|
||||||
for p, (v, loc, comment) in _vers_and_locs_list:
|
|
||||||
info = str(p) + ": " + str(v)
|
|
||||||
if comment:
|
|
||||||
info = info + " [%s]" % str(comment)
|
|
||||||
if show_paths:
|
|
||||||
info = info + " (%s)" % str(loc)
|
|
||||||
res.append(info)
|
|
||||||
|
|
||||||
output = "\n".join(res) + "\n"
|
|
||||||
|
|
||||||
if _cross_check_errors:
|
|
||||||
output += get_error_string(_cross_check_errors, debug=debug)
|
|
||||||
|
|
||||||
return output
|
|
||||||
|
@ -17,7 +17,7 @@ from twisted.application import service
|
|||||||
from twisted.python.failure import Failure
|
from twisted.python.failure import Failure
|
||||||
from foolscap.api import Tub, app_versions
|
from foolscap.api import Tub, app_versions
|
||||||
import foolscap.logging.log
|
import foolscap.logging.log
|
||||||
from allmydata import get_package_versions, get_package_versions_string
|
from allmydata.version_checks import get_package_versions, get_package_versions_string
|
||||||
from allmydata.util import log
|
from allmydata.util import log
|
||||||
from allmydata.util import fileutil, iputil
|
from allmydata.util import fileutil, iputil
|
||||||
from allmydata.util.assertutil import _assert
|
from allmydata.util.assertutil import _assert
|
||||||
|
@ -6,6 +6,7 @@ from six.moves import StringIO
|
|||||||
from twisted.python import usage
|
from twisted.python import usage
|
||||||
from twisted.internet import defer, task, threads
|
from twisted.internet import defer, task, threads
|
||||||
|
|
||||||
|
from allmydata.version_checks import get_package_versions_string
|
||||||
from allmydata.scripts.common import get_default_nodedir
|
from allmydata.scripts.common import get_default_nodedir
|
||||||
from allmydata.scripts import debug, create_node, cli, \
|
from allmydata.scripts import debug, create_node, cli, \
|
||||||
stats_gatherer, admin, magic_folder_cli, tahoe_daemonize, tahoe_start, \
|
stats_gatherer, admin, magic_folder_cli, tahoe_daemonize, tahoe_start, \
|
||||||
@ -76,13 +77,11 @@ class Options(usage.Options):
|
|||||||
]
|
]
|
||||||
|
|
||||||
def opt_version(self):
|
def opt_version(self):
|
||||||
import allmydata
|
print(get_package_versions_string(debug=True), file=self.stdout)
|
||||||
print(allmydata.get_package_versions_string(debug=True), file=self.stdout)
|
|
||||||
self.no_command_needed = True
|
self.no_command_needed = True
|
||||||
|
|
||||||
def opt_version_and_path(self):
|
def opt_version_and_path(self):
|
||||||
import allmydata
|
print(get_package_versions_string(show_paths=True, debug=True), file=self.stdout)
|
||||||
print(allmydata.get_package_versions_string(show_paths=True, debug=True), file=self.stdout)
|
|
||||||
self.no_command_needed = True
|
self.no_command_needed = True
|
||||||
|
|
||||||
opt_eliot_destination = opt_eliot_destination
|
opt_eliot_destination = opt_eliot_destination
|
||||||
|
@ -33,6 +33,9 @@ import allmydata.util.log
|
|||||||
from allmydata.node import OldConfigError, OldConfigOptionError, UnescapedHashError, _Config, read_config, create_node_dir
|
from allmydata.node import OldConfigError, OldConfigOptionError, UnescapedHashError, _Config, read_config, create_node_dir
|
||||||
from allmydata.node import config_from_string
|
from allmydata.node import config_from_string
|
||||||
from allmydata.frontends.auth import NeedRootcapLookupScheme
|
from allmydata.frontends.auth import NeedRootcapLookupScheme
|
||||||
|
from allmydata.version_checks import (
|
||||||
|
get_package_versions_string,
|
||||||
|
)
|
||||||
from allmydata import client
|
from allmydata import client
|
||||||
from allmydata.storage_client import StorageFarmBroker
|
from allmydata.storage_client import StorageFarmBroker
|
||||||
from allmydata.util import base32, fileutil, encodingutil
|
from allmydata.util import base32, fileutil, encodingutil
|
||||||
@ -532,7 +535,7 @@ class Basic(testutil.ReallyEqualMixin, testutil.NonASCIIPathMixin, unittest.Test
|
|||||||
self.failIfEqual(str(allmydata.__version__), "unknown")
|
self.failIfEqual(str(allmydata.__version__), "unknown")
|
||||||
self.failUnless("." in str(allmydata.__full_version__),
|
self.failUnless("." in str(allmydata.__full_version__),
|
||||||
"non-numeric version in '%s'" % allmydata.__version__)
|
"non-numeric version in '%s'" % allmydata.__version__)
|
||||||
all_versions = allmydata.get_package_versions_string()
|
all_versions = get_package_versions_string()
|
||||||
self.failUnless(allmydata.__appname__ in all_versions)
|
self.failUnless(allmydata.__appname__ in all_versions)
|
||||||
# also test stats
|
# also test stats
|
||||||
stats = c.get_stats()
|
stats = c.get_stats()
|
||||||
|
@ -1,29 +0,0 @@
|
|||||||
|
|
||||||
from twisted.trial import unittest
|
|
||||||
from twisted.python.monkey import MonkeyPatcher
|
|
||||||
|
|
||||||
import allmydata
|
|
||||||
import __builtin__
|
|
||||||
|
|
||||||
|
|
||||||
class T(unittest.TestCase):
|
|
||||||
def test_report_import_error(self):
|
|
||||||
marker = "wheeeyo"
|
|
||||||
real_import_func = __import__
|
|
||||||
def raiseIE_from_this_particular_func(name, *args):
|
|
||||||
if name == "foolscap":
|
|
||||||
raise ImportError(marker + " foolscap cant be imported")
|
|
||||||
else:
|
|
||||||
return real_import_func(name, *args)
|
|
||||||
|
|
||||||
# Let's run as little code as possible with __import__ patched.
|
|
||||||
patcher = MonkeyPatcher((__builtin__, '__import__', raiseIE_from_this_particular_func))
|
|
||||||
vers_and_locs, errors = patcher.runWithPatches(allmydata.get_package_versions_and_locations)
|
|
||||||
|
|
||||||
foolscap_stuffs = [stuff for (pkg, stuff) in vers_and_locs if pkg == 'foolscap']
|
|
||||||
self.failUnlessEqual(len(foolscap_stuffs), 1)
|
|
||||||
comment = str(foolscap_stuffs[0][2])
|
|
||||||
self.failUnlessIn(marker, comment)
|
|
||||||
self.failUnlessIn('raiseIE_from_this_particular_func', comment)
|
|
||||||
|
|
||||||
self.failUnless([e for e in errors if "dependency \'foolscap\' could not be imported" in e])
|
|
@ -23,6 +23,7 @@ from allmydata.util import fileutil, pollmixin
|
|||||||
from allmydata.util.encodingutil import unicode_to_argv, unicode_to_output, \
|
from allmydata.util.encodingutil import unicode_to_argv, unicode_to_output, \
|
||||||
get_filesystem_encoding
|
get_filesystem_encoding
|
||||||
from allmydata.test import common_util
|
from allmydata.test import common_util
|
||||||
|
from allmydata.version_checks import normalized_version
|
||||||
import allmydata
|
import allmydata
|
||||||
from allmydata import __appname__
|
from allmydata import __appname__
|
||||||
from .common_util import parse_cli, run_cli
|
from .common_util import parse_cli, run_cli
|
||||||
@ -112,8 +113,6 @@ class BinTahoe(common_util.SignalMixin, unittest.TestCase, RunBinTahoeMixin):
|
|||||||
def test_path(self):
|
def test_path(self):
|
||||||
d = self.run_bintahoe(["--version-and-path"])
|
d = self.run_bintahoe(["--version-and-path"])
|
||||||
def _cb(res):
|
def _cb(res):
|
||||||
from allmydata import normalized_version
|
|
||||||
|
|
||||||
out, err, rc_or_sig = res
|
out, err, rc_or_sig = res
|
||||||
self.failUnlessEqual(rc_or_sig, 0, str(res))
|
self.failUnlessEqual(rc_or_sig, 0, str(res))
|
||||||
|
|
||||||
|
@ -1,12 +1,16 @@
|
|||||||
|
|
||||||
import sys
|
import sys
|
||||||
import pkg_resources
|
import pkg_resources
|
||||||
from pkg_resources import Requirement
|
from operator import (
|
||||||
|
setitem,
|
||||||
|
)
|
||||||
from twisted.trial import unittest
|
from twisted.trial import unittest
|
||||||
|
|
||||||
from allmydata import check_requirement, cross_check, get_package_versions_and_locations, \
|
from allmydata.version_checks import (
|
||||||
extract_openssl_version, PackagingError
|
_cross_check as cross_check,
|
||||||
|
_extract_openssl_version as extract_openssl_version,
|
||||||
|
_get_package_versions_and_locations as get_package_versions_and_locations,
|
||||||
|
)
|
||||||
from allmydata.util.verlib import NormalizedVersion as V, \
|
from allmydata.util.verlib import NormalizedVersion as V, \
|
||||||
IrrationalVersionError, \
|
IrrationalVersionError, \
|
||||||
suggest_normalized_version as suggest
|
suggest_normalized_version as suggest
|
||||||
@ -28,43 +32,6 @@ class MockSSL(object):
|
|||||||
|
|
||||||
|
|
||||||
class CheckRequirement(unittest.TestCase):
|
class CheckRequirement(unittest.TestCase):
|
||||||
def test_check_requirement(self):
|
|
||||||
self._check_success("setuptools >= 0.6c6", {"setuptools": ("0.6", "", None)})
|
|
||||||
self._check_success("setuptools >= 0.6c6", {"setuptools": ("0.6", "", "distribute")})
|
|
||||||
self._check_success("pycrypto >= 2.1.0, != 2.2, != 2.4", {"pycrypto": ("2.1.0", "", None)})
|
|
||||||
self._check_success("pycrypto >= 2.1.0, != 2.2, != 2.4", {"pycrypto": ("2.3.0", "", None)})
|
|
||||||
self._check_success("pycrypto >= 2.1.0, != 2.2, != 2.4", {"pycrypto": ("2.4.1", "", None)})
|
|
||||||
self._check_success("Twisted >= 11.0.0, <= 12.2.0", {"Twisted": ("11.0.0", "", None)})
|
|
||||||
self._check_success("Twisted >= 11.0.0, <= 12.2.0", {"Twisted": ("12.2.0", "", None)})
|
|
||||||
|
|
||||||
self._check_success("zope.interface", {"zope.interface": ("unknown", "", None)})
|
|
||||||
self._check_success("mock", {"mock": ("0.6.0", "", None)})
|
|
||||||
self._check_success("foo >= 1.0", {"foo": ("1.0", "", None), "bar": ("2.0", "", None)})
|
|
||||||
|
|
||||||
self._check_success("foolscap[secure_connections] >= 0.6.0", {"foolscap": ("0.7.0", "", None)})
|
|
||||||
|
|
||||||
self._check_failure("foolscap[secure_connections] >= 0.6.0", {"foolscap": ("0.5.1", "", None)})
|
|
||||||
self._check_failure("pycrypto >= 2.1.0, != 2.2, != 2.4", {"pycrypto": ("2.2.0", "", None)})
|
|
||||||
self._check_failure("pycrypto >= 2.1.0, != 2.2, != 2.4", {"pycrypto": ("2.0.0", "", None)})
|
|
||||||
self._check_failure("Twisted >= 11.0.0, <= 12.2.0", {"Twisted": ("10.2.0", "", None)})
|
|
||||||
self._check_failure("Twisted >= 11.0.0, <= 12.2.0", {"Twisted": ("13.0.0", "", None)})
|
|
||||||
self._check_failure("foo >= 1.0", {})
|
|
||||||
|
|
||||||
self.failUnlessRaises(ImportError, check_requirement,
|
|
||||||
"foo >= 1.0", {"foo": (None, None, "foomodule")})
|
|
||||||
|
|
||||||
def _check_success(self, req, vers_and_locs):
|
|
||||||
check_requirement(req, vers_and_locs)
|
|
||||||
|
|
||||||
for pkg, ver in vers_and_locs.items():
|
|
||||||
self.failUnless(ver[0] in Requirement.parse(req), str((ver, req)))
|
|
||||||
|
|
||||||
def _check_failure(self, req, vers_and_locs):
|
|
||||||
self.failUnlessRaises(PackagingError, check_requirement, req, vers_and_locs)
|
|
||||||
|
|
||||||
for pkg, ver in vers_and_locs.items():
|
|
||||||
self.failIf(ver[0] in Requirement.parse(req), str((ver, req)))
|
|
||||||
|
|
||||||
def test_packages_from_pkg_resources(self):
|
def test_packages_from_pkg_resources(self):
|
||||||
if hasattr(sys, 'frozen'):
|
if hasattr(sys, 'frozen'):
|
||||||
raise unittest.SkipTest("This test doesn't apply to frozen builds.")
|
raise unittest.SkipTest("This test doesn't apply to frozen builds.")
|
||||||
@ -270,3 +237,26 @@ class VersionTestCase(unittest.TestCase):
|
|||||||
|
|
||||||
# zetuptoolz
|
# zetuptoolz
|
||||||
self.failUnlessEqual(suggest('0.6c16dev3'), '0.6c16.dev3')
|
self.failUnlessEqual(suggest('0.6c16dev3'), '0.6c16.dev3')
|
||||||
|
|
||||||
|
|
||||||
|
class T(unittest.TestCase):
|
||||||
|
def test_report_import_error(self):
|
||||||
|
"""
|
||||||
|
get_package_versions_and_locations reports a dependency if a dependency
|
||||||
|
cannot be imported.
|
||||||
|
"""
|
||||||
|
# Make sure we don't leave the system in a bad state.
|
||||||
|
self.addCleanup(
|
||||||
|
lambda foolscap=sys.modules["foolscap"]: setitem(
|
||||||
|
sys.modules,
|
||||||
|
"foolscap",
|
||||||
|
foolscap,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
# Make it look like Foolscap isn't installed.
|
||||||
|
sys.modules["foolscap"] = None
|
||||||
|
vers_and_locs, errors = get_package_versions_and_locations()
|
||||||
|
|
||||||
|
foolscap_stuffs = [stuff for (pkg, stuff) in vers_and_locs if pkg == 'foolscap']
|
||||||
|
self.failUnlessEqual(len(foolscap_stuffs), 1)
|
||||||
|
self.failUnless([e for e in errors if "dependency \'foolscap\' could not be imported" in e])
|
||||||
|
378
src/allmydata/version_checks.py
Normal file
378
src/allmydata/version_checks.py
Normal file
@ -0,0 +1,378 @@
|
|||||||
|
"""
|
||||||
|
Produce reports about the versions of Python software in use by Tahoe-LAFS
|
||||||
|
for debugging and auditing purposes.
|
||||||
|
"""
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"PackagingError",
|
||||||
|
"get_package_versions",
|
||||||
|
"get_package_versions_string",
|
||||||
|
"normalized_version",
|
||||||
|
]
|
||||||
|
|
||||||
|
import os, platform, re, subprocess, sys, traceback
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
|
from . import (
|
||||||
|
__appname__,
|
||||||
|
full_version,
|
||||||
|
branch,
|
||||||
|
)
|
||||||
|
from .util import (
|
||||||
|
verlib,
|
||||||
|
)
|
||||||
|
|
||||||
|
class PackagingError(EnvironmentError):
|
||||||
|
"""
|
||||||
|
Raised when there is an error in packaging of Tahoe-LAFS or its
|
||||||
|
dependencies which makes it impossible to proceed safely.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def get_package_versions():
|
||||||
|
return dict([(k, v) for k, (v, l, c) in _vers_and_locs_list])
|
||||||
|
|
||||||
|
def get_package_versions_string(show_paths=False, debug=False):
|
||||||
|
res = []
|
||||||
|
for p, (v, loc, comment) in _vers_and_locs_list:
|
||||||
|
info = str(p) + ": " + str(v)
|
||||||
|
if comment:
|
||||||
|
info = info + " [%s]" % str(comment)
|
||||||
|
if show_paths:
|
||||||
|
info = info + " (%s)" % str(loc)
|
||||||
|
res.append(info)
|
||||||
|
|
||||||
|
output = "\n".join(res) + "\n"
|
||||||
|
|
||||||
|
if _cross_check_errors:
|
||||||
|
output += _get_error_string(_cross_check_errors, debug=debug)
|
||||||
|
|
||||||
|
return output
|
||||||
|
|
||||||
|
_distributor_id_cmdline_re = re.compile("(?:Distributor ID:)\s*(.*)", re.I)
|
||||||
|
_release_cmdline_re = re.compile("(?:Release:)\s*(.*)", re.I)
|
||||||
|
|
||||||
|
_distributor_id_file_re = re.compile("(?:DISTRIB_ID\s*=)\s*(.*)", re.I)
|
||||||
|
_release_file_re = re.compile("(?:DISTRIB_RELEASE\s*=)\s*(.*)", re.I)
|
||||||
|
|
||||||
|
_distname = None
|
||||||
|
_version = None
|
||||||
|
|
||||||
|
def normalized_version(verstr, what=None):
|
||||||
|
try:
|
||||||
|
suggested = verlib.suggest_normalized_version(verstr) or verstr
|
||||||
|
return verlib.NormalizedVersion(suggested)
|
||||||
|
except verlib.IrrationalVersionError:
|
||||||
|
raise
|
||||||
|
except StandardError:
|
||||||
|
cls, value, trace = sys.exc_info()
|
||||||
|
new_exc = PackagingError("could not parse %s due to %s: %s"
|
||||||
|
% (what or repr(verstr), cls.__name__, value))
|
||||||
|
six.reraise(cls, new_exc, trace)
|
||||||
|
|
||||||
|
def _get_error_string(errors, debug=False):
|
||||||
|
from allmydata._auto_deps import install_requires
|
||||||
|
|
||||||
|
msg = "\n%s\n" % ("\n".join(errors),)
|
||||||
|
if debug:
|
||||||
|
msg += ("\n"
|
||||||
|
"For debugging purposes, the PYTHONPATH was\n"
|
||||||
|
" %r\n"
|
||||||
|
"install_requires was\n"
|
||||||
|
" %r\n"
|
||||||
|
"sys.path after importing pkg_resources was\n"
|
||||||
|
" %s\n"
|
||||||
|
% (os.environ.get('PYTHONPATH'), install_requires, (os.pathsep+"\n ").join(sys.path)) )
|
||||||
|
return msg
|
||||||
|
|
||||||
|
def _cross_check(pkg_resources_vers_and_locs, imported_vers_and_locs_list):
|
||||||
|
"""This function returns a list of errors due to any failed cross-checks."""
|
||||||
|
|
||||||
|
from _auto_deps import not_import_versionable
|
||||||
|
|
||||||
|
errors = []
|
||||||
|
not_pkg_resourceable = ['python', 'platform', __appname__.lower(), 'openssl']
|
||||||
|
|
||||||
|
for name, (imp_ver, imp_loc, imp_comment) in imported_vers_and_locs_list:
|
||||||
|
name = name.lower()
|
||||||
|
if name not in not_pkg_resourceable:
|
||||||
|
if name not in pkg_resources_vers_and_locs:
|
||||||
|
if name == "setuptools" and "distribute" in pkg_resources_vers_and_locs:
|
||||||
|
pr_ver, pr_loc = pkg_resources_vers_and_locs["distribute"]
|
||||||
|
if not (os.path.normpath(os.path.realpath(pr_loc)) == os.path.normpath(os.path.realpath(imp_loc))
|
||||||
|
and imp_comment == "distribute"):
|
||||||
|
errors.append("Warning: dependency 'setuptools' found to be version %r of 'distribute' from %r "
|
||||||
|
"by pkg_resources, but 'import setuptools' gave version %r [%s] from %r. "
|
||||||
|
"A version mismatch is expected, but a location mismatch is not."
|
||||||
|
% (pr_ver, pr_loc, imp_ver, imp_comment or 'probably *not* distribute', imp_loc))
|
||||||
|
else:
|
||||||
|
errors.append("Warning: dependency %r (version %r imported from %r) was not found by pkg_resources."
|
||||||
|
% (name, imp_ver, imp_loc))
|
||||||
|
continue
|
||||||
|
|
||||||
|
pr_ver, pr_loc = pkg_resources_vers_and_locs[name]
|
||||||
|
if imp_ver is None and imp_loc is None:
|
||||||
|
errors.append("Warning: dependency %r could not be imported. pkg_resources thought it should be possible "
|
||||||
|
"to import version %r from %r.\nThe exception trace was %r."
|
||||||
|
% (name, pr_ver, pr_loc, imp_comment))
|
||||||
|
continue
|
||||||
|
|
||||||
|
# If the pkg_resources version is identical to the imported version, don't attempt
|
||||||
|
# to normalize them, since it is unnecessary and may fail (ticket #2499).
|
||||||
|
if imp_ver != 'unknown' and pr_ver == imp_ver:
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
pr_normver = normalized_version(pr_ver)
|
||||||
|
except verlib.IrrationalVersionError:
|
||||||
|
continue
|
||||||
|
except Exception as e:
|
||||||
|
errors.append("Warning: version number %r found for dependency %r by pkg_resources could not be parsed. "
|
||||||
|
"The version found by import was %r from %r. "
|
||||||
|
"pkg_resources thought it should be found at %r. "
|
||||||
|
"The exception was %s: %s"
|
||||||
|
% (pr_ver, name, imp_ver, imp_loc, pr_loc, e.__class__.__name__, e))
|
||||||
|
else:
|
||||||
|
if imp_ver == 'unknown':
|
||||||
|
if name not in not_import_versionable:
|
||||||
|
errors.append("Warning: unexpectedly could not find a version number for dependency %r imported from %r. "
|
||||||
|
"pkg_resources thought it should be version %r at %r."
|
||||||
|
% (name, imp_loc, pr_ver, pr_loc))
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
imp_normver = normalized_version(imp_ver)
|
||||||
|
except verlib.IrrationalVersionError:
|
||||||
|
continue
|
||||||
|
except Exception as e:
|
||||||
|
errors.append("Warning: version number %r found for dependency %r (imported from %r) could not be parsed. "
|
||||||
|
"pkg_resources thought it should be version %r at %r. "
|
||||||
|
"The exception was %s: %s"
|
||||||
|
% (imp_ver, name, imp_loc, pr_ver, pr_loc, e.__class__.__name__, e))
|
||||||
|
else:
|
||||||
|
if pr_ver == 'unknown' or (pr_normver != imp_normver):
|
||||||
|
if not os.path.normpath(os.path.realpath(pr_loc)) == os.path.normpath(os.path.realpath(imp_loc)):
|
||||||
|
errors.append("Warning: dependency %r found to have version number %r (normalized to %r, from %r) "
|
||||||
|
"by pkg_resources, but version %r (normalized to %r, from %r) by import."
|
||||||
|
% (name, pr_ver, str(pr_normver), pr_loc, imp_ver, str(imp_normver), imp_loc))
|
||||||
|
|
||||||
|
return errors
|
||||||
|
|
||||||
|
def _get_openssl_version():
|
||||||
|
try:
|
||||||
|
from OpenSSL import SSL
|
||||||
|
return _extract_openssl_version(SSL)
|
||||||
|
except Exception:
|
||||||
|
return ("unknown", None, None)
|
||||||
|
|
||||||
|
def _extract_openssl_version(ssl_module):
|
||||||
|
openssl_version = ssl_module.SSLeay_version(ssl_module.SSLEAY_VERSION)
|
||||||
|
if openssl_version.startswith('OpenSSL '):
|
||||||
|
openssl_version = openssl_version[8 :]
|
||||||
|
|
||||||
|
(version, _, comment) = openssl_version.partition(' ')
|
||||||
|
|
||||||
|
try:
|
||||||
|
openssl_cflags = ssl_module.SSLeay_version(ssl_module.SSLEAY_CFLAGS)
|
||||||
|
if '-DOPENSSL_NO_HEARTBEATS' in openssl_cflags.split(' '):
|
||||||
|
comment += ", no heartbeats"
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return (version, None, comment if comment else None)
|
||||||
|
|
||||||
|
def _get_linux_distro():
|
||||||
|
""" Tries to determine the name of the Linux OS distribution name.
|
||||||
|
|
||||||
|
First, try to parse a file named "/etc/lsb-release". If it exists, and
|
||||||
|
contains the "DISTRIB_ID=" line and the "DISTRIB_RELEASE=" line, then return
|
||||||
|
the strings parsed from that file.
|
||||||
|
|
||||||
|
If that doesn't work, then invoke platform.dist().
|
||||||
|
|
||||||
|
If that doesn't work, then try to execute "lsb_release", as standardized in
|
||||||
|
2001:
|
||||||
|
|
||||||
|
http://refspecs.freestandards.org/LSB_1.0.0/gLSB/lsbrelease.html
|
||||||
|
|
||||||
|
The current version of the standard is here:
|
||||||
|
|
||||||
|
http://refspecs.freestandards.org/LSB_3.2.0/LSB-Core-generic/LSB-Core-generic/lsbrelease.html
|
||||||
|
|
||||||
|
that lsb_release emitted, as strings.
|
||||||
|
|
||||||
|
Returns a tuple (distname,version). Distname is what LSB calls a
|
||||||
|
"distributor id", e.g. "Ubuntu". Version is what LSB calls a "release",
|
||||||
|
e.g. "8.04".
|
||||||
|
|
||||||
|
A version of this has been submitted to python as a patch for the standard
|
||||||
|
library module "platform":
|
||||||
|
|
||||||
|
http://bugs.python.org/issue3937
|
||||||
|
"""
|
||||||
|
global _distname,_version
|
||||||
|
if _distname and _version:
|
||||||
|
return (_distname, _version)
|
||||||
|
|
||||||
|
try:
|
||||||
|
etclsbrel = open("/etc/lsb-release", "rU")
|
||||||
|
for line in etclsbrel:
|
||||||
|
m = _distributor_id_file_re.search(line)
|
||||||
|
if m:
|
||||||
|
_distname = m.group(1).strip()
|
||||||
|
if _distname and _version:
|
||||||
|
return (_distname, _version)
|
||||||
|
m = _release_file_re.search(line)
|
||||||
|
if m:
|
||||||
|
_version = m.group(1).strip()
|
||||||
|
if _distname and _version:
|
||||||
|
return (_distname, _version)
|
||||||
|
except EnvironmentError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
(_distname, _version) = platform.dist()[:2]
|
||||||
|
if _distname and _version:
|
||||||
|
return (_distname, _version)
|
||||||
|
|
||||||
|
if os.path.isfile("/usr/bin/lsb_release") or os.path.isfile("/bin/lsb_release"):
|
||||||
|
try:
|
||||||
|
p = subprocess.Popen(["lsb_release", "--all"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
|
rc = p.wait()
|
||||||
|
if rc == 0:
|
||||||
|
for line in p.stdout.readlines():
|
||||||
|
m = _distributor_id_cmdline_re.search(line)
|
||||||
|
if m:
|
||||||
|
_distname = m.group(1).strip()
|
||||||
|
if _distname and _version:
|
||||||
|
return (_distname, _version)
|
||||||
|
|
||||||
|
m = _release_cmdline_re.search(p.stdout.read())
|
||||||
|
if m:
|
||||||
|
_version = m.group(1).strip()
|
||||||
|
if _distname and _version:
|
||||||
|
return (_distname, _version)
|
||||||
|
except EnvironmentError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if os.path.exists("/etc/arch-release"):
|
||||||
|
return ("Arch_Linux", "")
|
||||||
|
|
||||||
|
return (_distname,_version)
|
||||||
|
|
||||||
|
def _get_platform():
|
||||||
|
# Our version of platform.platform(), telling us both less and more than the
|
||||||
|
# Python Standard Library's version does.
|
||||||
|
# We omit details such as the Linux kernel version number, but we add a
|
||||||
|
# more detailed and correct rendition of the Linux distribution and
|
||||||
|
# distribution-version.
|
||||||
|
if "linux" in platform.system().lower():
|
||||||
|
return (
|
||||||
|
platform.system() + "-" +
|
||||||
|
"_".join(_get_linux_distro()) + "-" +
|
||||||
|
platform.machine() + "-" +
|
||||||
|
"_".join([x for x in platform.architecture() if x])
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
return platform.platform()
|
||||||
|
|
||||||
|
def _get_package_versions_and_locations():
|
||||||
|
import warnings
|
||||||
|
from _auto_deps import package_imports, global_deprecation_messages, deprecation_messages, \
|
||||||
|
runtime_warning_messages, warning_imports, ignorable
|
||||||
|
|
||||||
|
def package_dir(srcfile):
|
||||||
|
return os.path.dirname(os.path.dirname(os.path.normcase(os.path.realpath(srcfile))))
|
||||||
|
|
||||||
|
# pkg_resources.require returns the distribution that pkg_resources attempted to put
|
||||||
|
# on sys.path, which can differ from the one that we actually import due to #1258,
|
||||||
|
# or any other bug that causes sys.path to be set up incorrectly. Therefore we
|
||||||
|
# must import the packages in order to check their versions and paths.
|
||||||
|
|
||||||
|
# This is to suppress all UserWarnings and various DeprecationWarnings and RuntimeWarnings
|
||||||
|
# (listed in _auto_deps.py).
|
||||||
|
|
||||||
|
warnings.filterwarnings("ignore", category=UserWarning, append=True)
|
||||||
|
|
||||||
|
for msg in global_deprecation_messages + deprecation_messages:
|
||||||
|
warnings.filterwarnings("ignore", category=DeprecationWarning, message=msg, append=True)
|
||||||
|
for msg in runtime_warning_messages:
|
||||||
|
warnings.filterwarnings("ignore", category=RuntimeWarning, message=msg, append=True)
|
||||||
|
try:
|
||||||
|
for modulename in warning_imports:
|
||||||
|
try:
|
||||||
|
__import__(modulename)
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
finally:
|
||||||
|
# Leave suppressions for UserWarnings and global_deprecation_messages active.
|
||||||
|
for _ in runtime_warning_messages + deprecation_messages:
|
||||||
|
warnings.filters.pop()
|
||||||
|
|
||||||
|
packages = []
|
||||||
|
pkg_resources_vers_and_locs = dict()
|
||||||
|
|
||||||
|
if not hasattr(sys, 'frozen'):
|
||||||
|
import pkg_resources
|
||||||
|
from _auto_deps import install_requires
|
||||||
|
|
||||||
|
pkg_resources_vers_and_locs = dict([(p.project_name.lower(), (str(p.version), p.location))
|
||||||
|
for p in pkg_resources.require(install_requires)])
|
||||||
|
|
||||||
|
def get_version(module):
|
||||||
|
if hasattr(module, '__version__'):
|
||||||
|
return str(getattr(module, '__version__'))
|
||||||
|
elif hasattr(module, 'version'):
|
||||||
|
ver = getattr(module, 'version')
|
||||||
|
if isinstance(ver, tuple):
|
||||||
|
return '.'.join(map(str, ver))
|
||||||
|
else:
|
||||||
|
return str(ver)
|
||||||
|
else:
|
||||||
|
return 'unknown'
|
||||||
|
|
||||||
|
for pkgname, modulename in [(__appname__, 'allmydata')] + package_imports:
|
||||||
|
if modulename:
|
||||||
|
try:
|
||||||
|
__import__(modulename)
|
||||||
|
module = sys.modules[modulename]
|
||||||
|
except ImportError:
|
||||||
|
etype, emsg, etrace = sys.exc_info()
|
||||||
|
trace_info = (etype, str(emsg), ([None] + traceback.extract_tb(etrace))[-1])
|
||||||
|
packages.append( (pkgname, (None, None, trace_info)) )
|
||||||
|
else:
|
||||||
|
comment = None
|
||||||
|
if pkgname == __appname__:
|
||||||
|
comment = "%s: %s" % (branch, full_version)
|
||||||
|
elif pkgname == 'setuptools' and hasattr(module, '_distribute'):
|
||||||
|
# distribute does not report its version in any module variables
|
||||||
|
comment = 'distribute'
|
||||||
|
ver = get_version(module)
|
||||||
|
loc = package_dir(module.__file__)
|
||||||
|
if ver == "unknown" and pkgname in pkg_resources_vers_and_locs:
|
||||||
|
(pr_ver, pr_loc) = pkg_resources_vers_and_locs[pkgname]
|
||||||
|
if loc == os.path.normcase(os.path.realpath(pr_loc)):
|
||||||
|
ver = pr_ver
|
||||||
|
packages.append( (pkgname, (ver, loc, comment)) )
|
||||||
|
elif pkgname == 'python':
|
||||||
|
packages.append( (pkgname, (platform.python_version(), sys.executable, None)) )
|
||||||
|
elif pkgname == 'platform':
|
||||||
|
packages.append( (pkgname, (_get_platform(), None, None)) )
|
||||||
|
elif pkgname == 'OpenSSL':
|
||||||
|
packages.append( (pkgname, _get_openssl_version()) )
|
||||||
|
|
||||||
|
cross_check_errors = []
|
||||||
|
|
||||||
|
if len(pkg_resources_vers_and_locs) > 0:
|
||||||
|
imported_packages = set([p.lower() for (p, _) in packages])
|
||||||
|
extra_packages = []
|
||||||
|
|
||||||
|
for pr_name, (pr_ver, pr_loc) in pkg_resources_vers_and_locs.iteritems():
|
||||||
|
if pr_name not in imported_packages and pr_name not in ignorable:
|
||||||
|
extra_packages.append( (pr_name, (pr_ver, pr_loc, "according to pkg_resources")) )
|
||||||
|
|
||||||
|
cross_check_errors = _cross_check(pkg_resources_vers_and_locs, packages)
|
||||||
|
packages += extra_packages
|
||||||
|
|
||||||
|
return packages, cross_check_errors
|
||||||
|
|
||||||
|
|
||||||
|
_vers_and_locs_list, _cross_check_errors = _get_package_versions_and_locations()
|
@ -5,7 +5,7 @@ from nevow.static import File as nevow_File
|
|||||||
from nevow.util import resource_filename
|
from nevow.util import resource_filename
|
||||||
import allmydata
|
import allmydata
|
||||||
import json
|
import json
|
||||||
from allmydata import get_package_versions_string
|
from allmydata.version_checks import get_package_versions_string
|
||||||
from allmydata.util import idlib
|
from allmydata.util import idlib
|
||||||
from allmydata.web.common import (
|
from allmydata.web.common import (
|
||||||
getxmlfile,
|
getxmlfile,
|
||||||
|
@ -7,7 +7,7 @@ from nevow.static import File as nevow_File # TODO: merge with static.File?
|
|||||||
from nevow.util import resource_filename
|
from nevow.util import resource_filename
|
||||||
|
|
||||||
import allmydata # to display import path
|
import allmydata # to display import path
|
||||||
from allmydata import get_package_versions_string
|
from allmydata.version_checks import get_package_versions_string
|
||||||
from allmydata.util import log
|
from allmydata.util import log
|
||||||
from allmydata.interfaces import IFileNode
|
from allmydata.interfaces import IFileNode
|
||||||
from allmydata.web import filenode, directory, unlinked, status, operations
|
from allmydata.web import filenode, directory, unlinked, status, operations
|
||||||
|
Loading…
x
Reference in New Issue
Block a user