mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2024-12-20 05:28:04 +00:00
Merge pull request #792 from jaraco/3394.py38-tests
Improve compatibility for later Pythons and restore tahoe command on Python 3 Fixes: ticket:3394
This commit is contained in:
commit
23e1223c94
0
newsfragments/3394.minor
Normal file
0
newsfragments/3394.minor
Normal file
2
setup.py
2
setup.py
@ -366,7 +366,7 @@ setup(name="tahoe-lafs", # also set in __init__.py
|
|||||||
classifiers=trove_classifiers,
|
classifiers=trove_classifiers,
|
||||||
# We support Python 2.7, and we're working on support for 3.6 (the
|
# We support Python 2.7, and we're working on support for 3.6 (the
|
||||||
# highest version that PyPy currently supports).
|
# highest version that PyPy currently supports).
|
||||||
python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <3.7",
|
python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*",
|
||||||
install_requires=install_requires,
|
install_requires=install_requires,
|
||||||
extras_require={
|
extras_require={
|
||||||
# Duplicate the Twisted pywin32 dependency here. See
|
# Duplicate the Twisted pywin32 dependency here. See
|
||||||
|
@ -559,7 +559,7 @@ class ShortReadOnlySFTPFile(PrefixingLogMixin):
|
|||||||
"""I represent a file handle to a particular file on an SFTP connection.
|
"""I represent a file handle to a particular file on an SFTP connection.
|
||||||
I am used only for short immutable files opened in read-only mode.
|
I am used only for short immutable files opened in read-only mode.
|
||||||
When I am created, the file contents start to be downloaded to memory.
|
When I am created, the file contents start to be downloaded to memory.
|
||||||
self.async is used to delay read requests until the download has finished."""
|
self.async_ is used to delay read requests until the download has finished."""
|
||||||
|
|
||||||
def __init__(self, userpath, filenode, metadata):
|
def __init__(self, userpath, filenode, metadata):
|
||||||
PrefixingLogMixin.__init__(self, facility="tahoe.sftp", prefix=userpath)
|
PrefixingLogMixin.__init__(self, facility="tahoe.sftp", prefix=userpath)
|
||||||
@ -569,7 +569,7 @@ class ShortReadOnlySFTPFile(PrefixingLogMixin):
|
|||||||
userpath=userpath, filenode=filenode)
|
userpath=userpath, filenode=filenode)
|
||||||
self.filenode = filenode
|
self.filenode = filenode
|
||||||
self.metadata = metadata
|
self.metadata = metadata
|
||||||
self.async = download_to_data(filenode)
|
self.async_ = download_to_data(filenode)
|
||||||
self.closed = False
|
self.closed = False
|
||||||
|
|
||||||
def readChunk(self, offset, length):
|
def readChunk(self, offset, length):
|
||||||
@ -598,7 +598,7 @@ class ShortReadOnlySFTPFile(PrefixingLogMixin):
|
|||||||
else:
|
else:
|
||||||
eventually_callback(d)(data[offset:offset+length]) # truncated if offset+length > len(data)
|
eventually_callback(d)(data[offset:offset+length]) # truncated if offset+length > len(data)
|
||||||
return data
|
return data
|
||||||
self.async.addCallbacks(_read, eventually_errback(d))
|
self.async_.addCallbacks(_read, eventually_errback(d))
|
||||||
d.addBoth(_convert_error, request)
|
d.addBoth(_convert_error, request)
|
||||||
return d
|
return d
|
||||||
|
|
||||||
@ -639,7 +639,7 @@ class GeneralSFTPFile(PrefixingLogMixin):
|
|||||||
storing the file contents. In order to allow write requests to be satisfied
|
storing the file contents. In order to allow write requests to be satisfied
|
||||||
immediately, there is effectively a FIFO queue between requests made to this
|
immediately, there is effectively a FIFO queue between requests made to this
|
||||||
file handle, and requests to my OverwriteableFileConsumer. This queue is
|
file handle, and requests to my OverwriteableFileConsumer. This queue is
|
||||||
implemented by the callback chain of self.async.
|
implemented by the callback chain of self.async_.
|
||||||
|
|
||||||
When first constructed, I am in an 'unopened' state that causes most
|
When first constructed, I am in an 'unopened' state that causes most
|
||||||
operations to be delayed until 'open' is called."""
|
operations to be delayed until 'open' is called."""
|
||||||
@ -654,7 +654,7 @@ class GeneralSFTPFile(PrefixingLogMixin):
|
|||||||
self.flags = flags
|
self.flags = flags
|
||||||
self.close_notify = close_notify
|
self.close_notify = close_notify
|
||||||
self.convergence = convergence
|
self.convergence = convergence
|
||||||
self.async = defer.Deferred()
|
self.async_ = defer.Deferred()
|
||||||
# Creating or truncating the file is a change, but if FXF_EXCL is set, a zero-length file has already been created.
|
# Creating or truncating the file is a change, but if FXF_EXCL is set, a zero-length file has already been created.
|
||||||
self.has_changed = (flags & (FXF_CREAT | FXF_TRUNC)) and not (flags & FXF_EXCL)
|
self.has_changed = (flags & (FXF_CREAT | FXF_TRUNC)) and not (flags & FXF_EXCL)
|
||||||
self.closed = False
|
self.closed = False
|
||||||
@ -664,7 +664,7 @@ class GeneralSFTPFile(PrefixingLogMixin):
|
|||||||
self.filenode = None
|
self.filenode = None
|
||||||
self.metadata = None
|
self.metadata = None
|
||||||
|
|
||||||
# self.consumer should only be relied on in callbacks for self.async, since it might
|
# self.consumer should only be relied on in callbacks for self.async_, since it might
|
||||||
# not be set before then.
|
# not be set before then.
|
||||||
self.consumer = None
|
self.consumer = None
|
||||||
|
|
||||||
@ -691,7 +691,7 @@ class GeneralSFTPFile(PrefixingLogMixin):
|
|||||||
self.consumer = OverwriteableFileConsumer(0, tempfile_maker)
|
self.consumer = OverwriteableFileConsumer(0, tempfile_maker)
|
||||||
self.consumer.download_done("download not needed")
|
self.consumer.download_done("download not needed")
|
||||||
else:
|
else:
|
||||||
self.async.addCallback(lambda ignored: filenode.get_best_readable_version())
|
self.async_.addCallback(lambda ignored: filenode.get_best_readable_version())
|
||||||
|
|
||||||
def _read(version):
|
def _read(version):
|
||||||
if noisy: self.log("_read", level=NOISY)
|
if noisy: self.log("_read", level=NOISY)
|
||||||
@ -707,9 +707,9 @@ class GeneralSFTPFile(PrefixingLogMixin):
|
|||||||
self.consumer.download_done(res)
|
self.consumer.download_done(res)
|
||||||
d.addBoth(_finished)
|
d.addBoth(_finished)
|
||||||
# It is correct to drop d here.
|
# It is correct to drop d here.
|
||||||
self.async.addCallback(_read)
|
self.async_.addCallback(_read)
|
||||||
|
|
||||||
eventually_callback(self.async)(None)
|
eventually_callback(self.async_)(None)
|
||||||
|
|
||||||
if noisy: self.log("open done", level=NOISY)
|
if noisy: self.log("open done", level=NOISY)
|
||||||
return self
|
return self
|
||||||
@ -739,7 +739,7 @@ class GeneralSFTPFile(PrefixingLogMixin):
|
|||||||
self.log(".sync()", level=OPERATIONAL)
|
self.log(".sync()", level=OPERATIONAL)
|
||||||
|
|
||||||
d = defer.Deferred()
|
d = defer.Deferred()
|
||||||
self.async.addBoth(eventually_callback(d))
|
self.async_.addBoth(eventually_callback(d))
|
||||||
def _done(res):
|
def _done(res):
|
||||||
if noisy: self.log("_done(%r) in .sync()" % (res,), level=NOISY)
|
if noisy: self.log("_done(%r) in .sync()" % (res,), level=NOISY)
|
||||||
return res
|
return res
|
||||||
@ -765,7 +765,7 @@ class GeneralSFTPFile(PrefixingLogMixin):
|
|||||||
d2.addBoth(eventually_callback(d))
|
d2.addBoth(eventually_callback(d))
|
||||||
# It is correct to drop d2 here.
|
# It is correct to drop d2 here.
|
||||||
return None
|
return None
|
||||||
self.async.addCallbacks(_read, eventually_errback(d))
|
self.async_.addCallbacks(_read, eventually_errback(d))
|
||||||
d.addBoth(_convert_error, request)
|
d.addBoth(_convert_error, request)
|
||||||
return d
|
return d
|
||||||
|
|
||||||
@ -802,8 +802,8 @@ class GeneralSFTPFile(PrefixingLogMixin):
|
|||||||
self.consumer.overwrite(write_offset, data)
|
self.consumer.overwrite(write_offset, data)
|
||||||
if noisy: self.log("overwrite done", level=NOISY)
|
if noisy: self.log("overwrite done", level=NOISY)
|
||||||
return None
|
return None
|
||||||
self.async.addCallback(_write)
|
self.async_.addCallback(_write)
|
||||||
# don't addErrback to self.async, just allow subsequent async ops to fail.
|
# don't addErrback to self.async_, just allow subsequent async ops to fail.
|
||||||
return defer.succeed(None)
|
return defer.succeed(None)
|
||||||
|
|
||||||
def _do_close(self, res, d=None):
|
def _do_close(self, res, d=None):
|
||||||
@ -812,7 +812,7 @@ class GeneralSFTPFile(PrefixingLogMixin):
|
|||||||
if self.consumer:
|
if self.consumer:
|
||||||
status = self.consumer.close()
|
status = self.consumer.close()
|
||||||
|
|
||||||
# We must close_notify before re-firing self.async.
|
# We must close_notify before re-firing self.async_.
|
||||||
if self.close_notify:
|
if self.close_notify:
|
||||||
self.close_notify(self.userpath, self.parent, self.childname, self)
|
self.close_notify(self.userpath, self.parent, self.childname, self)
|
||||||
|
|
||||||
@ -841,7 +841,7 @@ class GeneralSFTPFile(PrefixingLogMixin):
|
|||||||
# download.) Any reads that depended on file content that could not be downloaded
|
# download.) Any reads that depended on file content that could not be downloaded
|
||||||
# will have failed. It is important that we don't close the consumer until
|
# will have failed. It is important that we don't close the consumer until
|
||||||
# previous read operations have completed.
|
# previous read operations have completed.
|
||||||
self.async.addBoth(self._do_close)
|
self.async_.addBoth(self._do_close)
|
||||||
return defer.succeed(None)
|
return defer.succeed(None)
|
||||||
|
|
||||||
# We must capture the abandoned, parent, and childname variables synchronously
|
# We must capture the abandoned, parent, and childname variables synchronously
|
||||||
@ -875,16 +875,16 @@ class GeneralSFTPFile(PrefixingLogMixin):
|
|||||||
return d2
|
return d2
|
||||||
|
|
||||||
# If the file has been abandoned, we don't want the close operation to get "stuck",
|
# If the file has been abandoned, we don't want the close operation to get "stuck",
|
||||||
# even if self.async fails to re-fire. Completing the close independently of self.async
|
# even if self.async_ fails to re-fire. Completing the close independently of self.async_
|
||||||
# in that case should ensure that dropping an ssh connection is sufficient to abandon
|
# in that case should ensure that dropping an ssh connection is sufficient to abandon
|
||||||
# any heisenfiles that were not explicitly closed in that connection.
|
# any heisenfiles that were not explicitly closed in that connection.
|
||||||
if abandoned or not has_changed:
|
if abandoned or not has_changed:
|
||||||
d = defer.succeed(None)
|
d = defer.succeed(None)
|
||||||
self.async.addBoth(self._do_close)
|
self.async_.addBoth(self._do_close)
|
||||||
else:
|
else:
|
||||||
d = defer.Deferred()
|
d = defer.Deferred()
|
||||||
self.async.addCallback(_commit)
|
self.async_.addCallback(_commit)
|
||||||
self.async.addBoth(self._do_close, d)
|
self.async_.addBoth(self._do_close, d)
|
||||||
d.addBoth(_convert_error, request)
|
d.addBoth(_convert_error, request)
|
||||||
return d
|
return d
|
||||||
|
|
||||||
@ -908,7 +908,7 @@ class GeneralSFTPFile(PrefixingLogMixin):
|
|||||||
attrs = _populate_attrs(self.filenode, self.metadata, size=self.consumer.get_current_size())
|
attrs = _populate_attrs(self.filenode, self.metadata, size=self.consumer.get_current_size())
|
||||||
eventually_callback(d)(attrs)
|
eventually_callback(d)(attrs)
|
||||||
return None
|
return None
|
||||||
self.async.addCallbacks(_get, eventually_errback(d))
|
self.async_.addCallbacks(_get, eventually_errback(d))
|
||||||
d.addBoth(_convert_error, request)
|
d.addBoth(_convert_error, request)
|
||||||
return d
|
return d
|
||||||
|
|
||||||
@ -946,7 +946,7 @@ class GeneralSFTPFile(PrefixingLogMixin):
|
|||||||
self.consumer.set_current_size(size)
|
self.consumer.set_current_size(size)
|
||||||
eventually_callback(d)(None)
|
eventually_callback(d)(None)
|
||||||
return None
|
return None
|
||||||
self.async.addCallbacks(_set, eventually_errback(d))
|
self.async_.addCallbacks(_set, eventually_errback(d))
|
||||||
d.addBoth(_convert_error, request)
|
d.addBoth(_convert_error, request)
|
||||||
return d
|
return d
|
||||||
|
|
||||||
|
@ -9,10 +9,7 @@ import os.path
|
|||||||
import re
|
import re
|
||||||
import types
|
import types
|
||||||
import errno
|
import errno
|
||||||
try:
|
from six.moves import configparser
|
||||||
import ConfigParser
|
|
||||||
except ImportError:
|
|
||||||
import configparser as ConfigParser
|
|
||||||
import tempfile
|
import tempfile
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
from base64 import b32decode, b32encode
|
from base64 import b32decode, b32encode
|
||||||
@ -185,7 +182,7 @@ def read_config(basedir, portnumfile, generated_files=[], _valid_config=None):
|
|||||||
|
|
||||||
# (try to) read the main config file
|
# (try to) read the main config file
|
||||||
config_fname = os.path.join(basedir, "tahoe.cfg")
|
config_fname = os.path.join(basedir, "tahoe.cfg")
|
||||||
parser = ConfigParser.SafeConfigParser()
|
parser = configparser.SafeConfigParser()
|
||||||
try:
|
try:
|
||||||
parser = configutil.get_config(config_fname)
|
parser = configutil.get_config(config_fname)
|
||||||
except EnvironmentError as e:
|
except EnvironmentError as e:
|
||||||
@ -208,7 +205,7 @@ def config_from_string(basedir, portnumfile, config_str, _valid_config=None):
|
|||||||
_valid_config = _common_valid_config()
|
_valid_config = _common_valid_config()
|
||||||
|
|
||||||
# load configuration from in-memory string
|
# load configuration from in-memory string
|
||||||
parser = ConfigParser.SafeConfigParser()
|
parser = configparser.SafeConfigParser()
|
||||||
parser.readfp(BytesIO(config_str))
|
parser.readfp(BytesIO(config_str))
|
||||||
|
|
||||||
fname = "<in-memory>"
|
fname = "<in-memory>"
|
||||||
@ -303,7 +300,7 @@ class _Config(object):
|
|||||||
def items(self, section, default=_None):
|
def items(self, section, default=_None):
|
||||||
try:
|
try:
|
||||||
return self.config.items(section)
|
return self.config.items(section)
|
||||||
except ConfigParser.NoSectionError:
|
except configparser.NoSectionError:
|
||||||
if default is _None:
|
if default is _None:
|
||||||
raise
|
raise
|
||||||
return default
|
return default
|
||||||
@ -318,7 +315,7 @@ class _Config(object):
|
|||||||
raise UnescapedHashError(section, option, item)
|
raise UnescapedHashError(section, option, item)
|
||||||
|
|
||||||
return item
|
return item
|
||||||
except (ConfigParser.NoOptionError, ConfigParser.NoSectionError):
|
except (configparser.NoOptionError, configparser.NoSectionError):
|
||||||
if default is _None:
|
if default is _None:
|
||||||
raise MissingConfigEntry(
|
raise MissingConfigEntry(
|
||||||
"{} is missing the [{}]{} entry".format(
|
"{} is missing the [{}]{} entry".format(
|
||||||
|
@ -2,7 +2,7 @@ from __future__ import print_function
|
|||||||
|
|
||||||
import os, sys, urllib, textwrap
|
import os, sys, urllib, textwrap
|
||||||
import codecs
|
import codecs
|
||||||
from ConfigParser import NoSectionError
|
from six.moves.configparser import NoSectionError
|
||||||
from os.path import join
|
from os.path import join
|
||||||
from twisted.python import usage
|
from twisted.python import usage
|
||||||
from allmydata.util.assertutil import precondition
|
from allmydata.util.assertutil import precondition
|
||||||
|
@ -2,7 +2,8 @@ from __future__ import print_function
|
|||||||
|
|
||||||
import os
|
import os
|
||||||
from six.moves import cStringIO as StringIO
|
from six.moves import cStringIO as StringIO
|
||||||
import urlparse, httplib
|
from six.moves import urllib, http_client
|
||||||
|
import six
|
||||||
import allmydata # for __full_version__
|
import allmydata # for __full_version__
|
||||||
|
|
||||||
from allmydata.util.encodingutil import quote_output
|
from allmydata.util.encodingutil import quote_output
|
||||||
@ -12,9 +13,9 @@ from socket import error as socket_error
|
|||||||
# copied from twisted/web/client.py
|
# copied from twisted/web/client.py
|
||||||
def parse_url(url, defaultPort=None):
|
def parse_url(url, defaultPort=None):
|
||||||
url = url.strip()
|
url = url.strip()
|
||||||
parsed = urlparse.urlparse(url)
|
parsed = urllib.parse.urlparse(url)
|
||||||
scheme = parsed[0]
|
scheme = parsed[0]
|
||||||
path = urlparse.urlunparse(('','')+parsed[2:])
|
path = urllib.parse.urlunparse(('','')+parsed[2:])
|
||||||
if defaultPort is None:
|
if defaultPort is None:
|
||||||
if scheme == 'https':
|
if scheme == 'https':
|
||||||
defaultPort = 443
|
defaultPort = 443
|
||||||
@ -40,7 +41,7 @@ class BadResponse(object):
|
|||||||
def do_http(method, url, body=""):
|
def do_http(method, url, body=""):
|
||||||
if isinstance(body, str):
|
if isinstance(body, str):
|
||||||
body = StringIO(body)
|
body = StringIO(body)
|
||||||
elif isinstance(body, unicode):
|
elif isinstance(body, six.text_type):
|
||||||
raise TypeError("do_http body must be a bytestring, not unicode")
|
raise TypeError("do_http body must be a bytestring, not unicode")
|
||||||
else:
|
else:
|
||||||
# We must give a Content-Length header to twisted.web, otherwise it
|
# We must give a Content-Length header to twisted.web, otherwise it
|
||||||
@ -51,9 +52,9 @@ def do_http(method, url, body=""):
|
|||||||
assert body.read
|
assert body.read
|
||||||
scheme, host, port, path = parse_url(url)
|
scheme, host, port, path = parse_url(url)
|
||||||
if scheme == "http":
|
if scheme == "http":
|
||||||
c = httplib.HTTPConnection(host, port)
|
c = http_client.HTTPConnection(host, port)
|
||||||
elif scheme == "https":
|
elif scheme == "https":
|
||||||
c = httplib.HTTPSConnection(host, port)
|
c = http_client.HTTPSConnection(host, port)
|
||||||
else:
|
else:
|
||||||
raise ValueError("unknown scheme '%s', need http or https" % scheme)
|
raise ValueError("unknown scheme '%s', need http or https" % scheme)
|
||||||
c.putrequest(method, path)
|
c.putrequest(method, path)
|
||||||
|
@ -1,17 +1,19 @@
|
|||||||
|
|
||||||
import sys
|
import sys
|
||||||
|
import six
|
||||||
from allmydata.util.assertutil import precondition
|
from allmydata.util.assertutil import precondition
|
||||||
from allmydata.util.fileutil import abspath_expanduser_unicode
|
from allmydata.util.fileutil import abspath_expanduser_unicode
|
||||||
|
|
||||||
|
|
||||||
_default_nodedir = None
|
_default_nodedir = None
|
||||||
if sys.platform == 'win32':
|
if sys.platform == 'win32':
|
||||||
from allmydata.windows import registry
|
from allmydata.windows import registry
|
||||||
path = registry.get_base_dir_path()
|
path = registry.get_base_dir_path()
|
||||||
if path:
|
if path:
|
||||||
precondition(isinstance(path, unicode), path)
|
precondition(isinstance(path, six.text_type), path)
|
||||||
_default_nodedir = abspath_expanduser_unicode(path)
|
_default_nodedir = abspath_expanduser_unicode(path)
|
||||||
|
|
||||||
if _default_nodedir is None:
|
if _default_nodedir is None:
|
||||||
path = abspath_expanduser_unicode(u"~/.tahoe")
|
path = abspath_expanduser_unicode(u"~/.tahoe")
|
||||||
precondition(isinstance(path, unicode), path)
|
precondition(isinstance(path, six.text_type), path)
|
||||||
_default_nodedir = path
|
_default_nodedir = path
|
||||||
|
@ -2,6 +2,7 @@ from __future__ import print_function
|
|||||||
|
|
||||||
import os, sys
|
import os, sys
|
||||||
from six.moves import StringIO
|
from six.moves import StringIO
|
||||||
|
import six
|
||||||
|
|
||||||
from twisted.python import usage
|
from twisted.python import usage
|
||||||
from twisted.internet import defer, task, threads
|
from twisted.internet import defer, task, threads
|
||||||
@ -71,8 +72,8 @@ class Options(usage.Options):
|
|||||||
]
|
]
|
||||||
optParameters = [
|
optParameters = [
|
||||||
["node-directory", "d", None, NODEDIR_HELP],
|
["node-directory", "d", None, NODEDIR_HELP],
|
||||||
["wormhole-server", None, u"ws://wormhole.tahoe-lafs.org:4000/v1", "The magic wormhole server to use.", unicode],
|
["wormhole-server", None, u"ws://wormhole.tahoe-lafs.org:4000/v1", "The magic wormhole server to use.", six.text_type],
|
||||||
["wormhole-invite-appid", None, u"tahoe-lafs.org/invite", "The appid to use on the wormhole server.", unicode],
|
["wormhole-invite-appid", None, u"tahoe-lafs.org/invite", "The appid to use on the wormhole server.", six.text_type],
|
||||||
]
|
]
|
||||||
|
|
||||||
def opt_version(self):
|
def opt_version(self):
|
||||||
@ -180,6 +181,8 @@ def _maybe_enable_eliot_logging(options, reactor):
|
|||||||
return options
|
return options
|
||||||
|
|
||||||
def run():
|
def run():
|
||||||
|
# TODO(3035): Remove tox-check when error becomes a warning
|
||||||
|
if 'TOX_ENV_NAME' not in os.environ:
|
||||||
assert sys.version_info < (3,), u"Tahoe-LAFS does not run under Python 3. Please use Python 2.7.x."
|
assert sys.version_info < (3,), u"Tahoe-LAFS does not run under Python 3. Please use Python 2.7.x."
|
||||||
|
|
||||||
if sys.platform == "win32":
|
if sys.platform == "win32":
|
||||||
|
@ -809,7 +809,7 @@ class Errors(GridTestMixin, CLITestMixin, unittest.TestCase):
|
|||||||
# Simulate a connection error
|
# Simulate a connection error
|
||||||
def _socket_error(*args, **kwargs):
|
def _socket_error(*args, **kwargs):
|
||||||
raise socket_error('test error')
|
raise socket_error('test error')
|
||||||
self.patch(allmydata.scripts.common_http.httplib.HTTPConnection,
|
self.patch(allmydata.scripts.common_http.http_client.HTTPConnection,
|
||||||
"endheaders", _socket_error)
|
"endheaders", _socket_error)
|
||||||
|
|
||||||
d = self.do_cli("mkdir")
|
d = self.do_cli("mkdir")
|
||||||
|
4
tox.ini
4
tox.ini
@ -58,9 +58,7 @@ commands =
|
|||||||
# version pinning we do limits the variability of this output
|
# version pinning we do limits the variability of this output
|
||||||
pip freeze
|
pip freeze
|
||||||
|
|
||||||
# The tahoe script isn't sufficiently ported for this to succeed on
|
tahoe --version
|
||||||
# Python 3.x yet.
|
|
||||||
!py36: tahoe --version
|
|
||||||
|
|
||||||
!coverage: trial {env:TAHOE_LAFS_TRIAL_ARGS:--rterrors} {posargs:{env:TEST_SUITE}}
|
!coverage: trial {env:TAHOE_LAFS_TRIAL_ARGS:--rterrors} {posargs:{env:TEST_SUITE}}
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user