Merge 'origin/master' into 3382.remove-multi-format-page

This commit is contained in:
Sajith Sasidharan 2020-09-15 18:25:07 -04:00
commit 0ca3192640
21 changed files with 207 additions and 74 deletions

View File

@ -1,8 +1,17 @@
# https://circleci.com/docs/2.0/
version: 2
# We use version 2.1 of CircleCI's configuration format (the docs are still at
# the 2.0 link) in order to have access to Windows executors. This means we
# can't use dots in job names anymore. They have a new "parameters" feature
# that is supposed to remove the need to have version numbers in job names (the
# source of our dots), but switching to that is going to be a bigger refactor:
#
# https://discuss.circleci.com/t/v2-1-job-name-validation/31123
# https://circleci.com/docs/2.0/reusing-config/
#
version: 2.1
workflows:
version: 2
ci:
jobs:
# Platforms
@ -11,13 +20,13 @@ workflows:
requires:
- "debian-9"
- "ubuntu-20.04"
- "ubuntu-18.04":
- "ubuntu-20-04"
- "ubuntu-18-04":
requires:
- "ubuntu-20.04"
- "ubuntu-16.04":
- "ubuntu-20-04"
- "ubuntu-16-04":
requires:
- "ubuntu-20.04"
- "ubuntu-20-04"
- "fedora-29"
- "fedora-28":
@ -26,13 +35,13 @@ workflows:
- "centos-8"
- "nixos-19.09"
- "nixos-19-09"
# Test against PyPy 2.7
- "pypy2.7-buster"
- "pypy27-buster"
# Just one Python 3.6 configuration while the port is in-progress.
- "python3.6"
- "python36"
# Other assorted tasks and configurations
- "lint"
@ -67,13 +76,13 @@ workflows:
jobs:
- "build-image-debian-8"
- "build-image-debian-9"
- "build-image-ubuntu-16.04"
- "build-image-ubuntu-18.04"
- "build-image-ubuntu-20.04"
- "build-image-ubuntu-16-04"
- "build-image-ubuntu-18-04"
- "build-image-ubuntu-20-04"
- "build-image-fedora-28"
- "build-image-fedora-29"
- "build-image-centos-8"
- "build-image-pypy-2.7-buster"
- "build-image-pypy27-buster"
- "build-image-python36-ubuntu"
@ -203,7 +212,7 @@ jobs:
user: "nobody"
pypy2.7-buster:
pypy27-buster:
<<: *DEBIAN
docker:
- image: "tahoelafsci/pypy:buster-py2"
@ -260,21 +269,21 @@ jobs:
- run: *RUN_TESTS
ubuntu-16.04:
ubuntu-16-04:
<<: *DEBIAN
docker:
- image: "tahoelafsci/ubuntu:16.04-py2.7"
user: "nobody"
ubuntu-18.04: &UBUNTU_18_04
ubuntu-18-04: &UBUNTU_18_04
<<: *DEBIAN
docker:
- image: "tahoelafsci/ubuntu:18.04-py2.7"
user: "nobody"
python3.6:
python36:
<<: *UBUNTU_18_04
docker:
- image: "tahoelafsci/ubuntu:18.04-py3"
@ -289,7 +298,7 @@ jobs:
TAHOE_LAFS_TOX_ENVIRONMENT: "py36-coverage"
ubuntu-20.04:
ubuntu-20-04:
<<: *DEBIAN
docker:
- image: "tahoelafsci/ubuntu:20.04"
@ -331,7 +340,7 @@ jobs:
user: "nobody"
nixos-19.09:
nixos-19-09:
docker:
# Run in a highly Nix-capable environment.
- image: "nixorg/nix:circleci"
@ -476,7 +485,7 @@ jobs:
PYTHON_VERSION: "2.7"
build-image-ubuntu-16.04:
build-image-ubuntu-16-04:
<<: *BUILD_IMAGE
environment:
@ -485,7 +494,7 @@ jobs:
PYTHON_VERSION: "2.7"
build-image-ubuntu-18.04:
build-image-ubuntu-18-04:
<<: *BUILD_IMAGE
environment:
@ -503,7 +512,7 @@ jobs:
PYTHON_VERSION: "3"
build-image-ubuntu-20.04:
build-image-ubuntu-20-04:
<<: *BUILD_IMAGE
environment:
@ -539,7 +548,7 @@ jobs:
TAG: "29"
build-image-pypy-2.7-buster:
build-image-pypy27-buster:
<<: *BUILD_IMAGE
environment:

34
.codecov.yml Normal file
View File

@ -0,0 +1,34 @@
# Override defaults for codecov.io checks.
#
# Documentation is at https://docs.codecov.io/docs/codecov-yaml;
# reference is at https://docs.codecov.io/docs/codecovyml-reference.
#
# To validate this file, use:
#
# curl --data-binary @.codecov.yml https://codecov.io/validate
#
# Codecov's defaults seem to leave red marks in GitHub CI checks in a
# rather arbitrary manner, probably because of non-determinism in
# coverage (see https://tahoe-lafs.org/trac/tahoe-lafs/ticket/2891)
# and maybe because computers are bad with floating point numbers.
# Allow coverage percentage a precision of zero decimals, and round to
# the nearest number (for example, 89.957 to to 90; 89.497 to 89%).
# Coverage above 90% is good, below 80% is bad.
coverage:
round: nearest
range: 80..90
precision: 0
# Aim for a target test coverage of 90% in codecov/project check (do
# not allow project coverage to drop below that), and allow
# codecov/patch a threshold of 1% (allow coverage in changes to drop
# by that much, and no less). That should be good enough for us.
status:
project:
default:
target: 90%
threshold: 1%
patch:
default:
threshold: 1%

0
newsfragments/3391.minor Normal file
View File

0
newsfragments/3394.minor Normal file
View File

0
newsfragments/3406.minor Normal file
View File

0
newsfragments/3411.minor Normal file
View File

View File

@ -366,7 +366,7 @@ setup(name="tahoe-lafs", # also set in __init__.py
classifiers=trove_classifiers,
# We support Python 2.7, and we're working on support for 3.6 (the
# highest version that PyPy currently supports).
python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <3.7",
python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*",
install_requires=install_requires,
extras_require={
# Duplicate the Twisted pywin32 dependency here. See

View File

@ -559,7 +559,7 @@ class ShortReadOnlySFTPFile(PrefixingLogMixin):
"""I represent a file handle to a particular file on an SFTP connection.
I am used only for short immutable files opened in read-only mode.
When I am created, the file contents start to be downloaded to memory.
self.async is used to delay read requests until the download has finished."""
self.async_ is used to delay read requests until the download has finished."""
def __init__(self, userpath, filenode, metadata):
PrefixingLogMixin.__init__(self, facility="tahoe.sftp", prefix=userpath)
@ -569,7 +569,7 @@ class ShortReadOnlySFTPFile(PrefixingLogMixin):
userpath=userpath, filenode=filenode)
self.filenode = filenode
self.metadata = metadata
self.async = download_to_data(filenode)
self.async_ = download_to_data(filenode)
self.closed = False
def readChunk(self, offset, length):
@ -598,7 +598,7 @@ class ShortReadOnlySFTPFile(PrefixingLogMixin):
else:
eventually_callback(d)(data[offset:offset+length]) # truncated if offset+length > len(data)
return data
self.async.addCallbacks(_read, eventually_errback(d))
self.async_.addCallbacks(_read, eventually_errback(d))
d.addBoth(_convert_error, request)
return d
@ -639,7 +639,7 @@ class GeneralSFTPFile(PrefixingLogMixin):
storing the file contents. In order to allow write requests to be satisfied
immediately, there is effectively a FIFO queue between requests made to this
file handle, and requests to my OverwriteableFileConsumer. This queue is
implemented by the callback chain of self.async.
implemented by the callback chain of self.async_.
When first constructed, I am in an 'unopened' state that causes most
operations to be delayed until 'open' is called."""
@ -654,7 +654,7 @@ class GeneralSFTPFile(PrefixingLogMixin):
self.flags = flags
self.close_notify = close_notify
self.convergence = convergence
self.async = defer.Deferred()
self.async_ = defer.Deferred()
# Creating or truncating the file is a change, but if FXF_EXCL is set, a zero-length file has already been created.
self.has_changed = (flags & (FXF_CREAT | FXF_TRUNC)) and not (flags & FXF_EXCL)
self.closed = False
@ -664,7 +664,7 @@ class GeneralSFTPFile(PrefixingLogMixin):
self.filenode = None
self.metadata = None
# self.consumer should only be relied on in callbacks for self.async, since it might
# self.consumer should only be relied on in callbacks for self.async_, since it might
# not be set before then.
self.consumer = None
@ -691,7 +691,7 @@ class GeneralSFTPFile(PrefixingLogMixin):
self.consumer = OverwriteableFileConsumer(0, tempfile_maker)
self.consumer.download_done("download not needed")
else:
self.async.addCallback(lambda ignored: filenode.get_best_readable_version())
self.async_.addCallback(lambda ignored: filenode.get_best_readable_version())
def _read(version):
if noisy: self.log("_read", level=NOISY)
@ -707,9 +707,9 @@ class GeneralSFTPFile(PrefixingLogMixin):
self.consumer.download_done(res)
d.addBoth(_finished)
# It is correct to drop d here.
self.async.addCallback(_read)
self.async_.addCallback(_read)
eventually_callback(self.async)(None)
eventually_callback(self.async_)(None)
if noisy: self.log("open done", level=NOISY)
return self
@ -739,7 +739,7 @@ class GeneralSFTPFile(PrefixingLogMixin):
self.log(".sync()", level=OPERATIONAL)
d = defer.Deferred()
self.async.addBoth(eventually_callback(d))
self.async_.addBoth(eventually_callback(d))
def _done(res):
if noisy: self.log("_done(%r) in .sync()" % (res,), level=NOISY)
return res
@ -765,7 +765,7 @@ class GeneralSFTPFile(PrefixingLogMixin):
d2.addBoth(eventually_callback(d))
# It is correct to drop d2 here.
return None
self.async.addCallbacks(_read, eventually_errback(d))
self.async_.addCallbacks(_read, eventually_errback(d))
d.addBoth(_convert_error, request)
return d
@ -802,8 +802,8 @@ class GeneralSFTPFile(PrefixingLogMixin):
self.consumer.overwrite(write_offset, data)
if noisy: self.log("overwrite done", level=NOISY)
return None
self.async.addCallback(_write)
# don't addErrback to self.async, just allow subsequent async ops to fail.
self.async_.addCallback(_write)
# don't addErrback to self.async_, just allow subsequent async ops to fail.
return defer.succeed(None)
def _do_close(self, res, d=None):
@ -812,7 +812,7 @@ class GeneralSFTPFile(PrefixingLogMixin):
if self.consumer:
status = self.consumer.close()
# We must close_notify before re-firing self.async.
# We must close_notify before re-firing self.async_.
if self.close_notify:
self.close_notify(self.userpath, self.parent, self.childname, self)
@ -841,7 +841,7 @@ class GeneralSFTPFile(PrefixingLogMixin):
# download.) Any reads that depended on file content that could not be downloaded
# will have failed. It is important that we don't close the consumer until
# previous read operations have completed.
self.async.addBoth(self._do_close)
self.async_.addBoth(self._do_close)
return defer.succeed(None)
# We must capture the abandoned, parent, and childname variables synchronously
@ -875,16 +875,16 @@ class GeneralSFTPFile(PrefixingLogMixin):
return d2
# If the file has been abandoned, we don't want the close operation to get "stuck",
# even if self.async fails to re-fire. Completing the close independently of self.async
# even if self.async_ fails to re-fire. Completing the close independently of self.async_
# in that case should ensure that dropping an ssh connection is sufficient to abandon
# any heisenfiles that were not explicitly closed in that connection.
if abandoned or not has_changed:
d = defer.succeed(None)
self.async.addBoth(self._do_close)
self.async_.addBoth(self._do_close)
else:
d = defer.Deferred()
self.async.addCallback(_commit)
self.async.addBoth(self._do_close, d)
self.async_.addCallback(_commit)
self.async_.addBoth(self._do_close, d)
d.addBoth(_convert_error, request)
return d
@ -908,7 +908,7 @@ class GeneralSFTPFile(PrefixingLogMixin):
attrs = _populate_attrs(self.filenode, self.metadata, size=self.consumer.get_current_size())
eventually_callback(d)(attrs)
return None
self.async.addCallbacks(_get, eventually_errback(d))
self.async_.addCallbacks(_get, eventually_errback(d))
d.addBoth(_convert_error, request)
return d
@ -946,7 +946,7 @@ class GeneralSFTPFile(PrefixingLogMixin):
self.consumer.set_current_size(size)
eventually_callback(d)(None)
return None
self.async.addCallbacks(_set, eventually_errback(d))
self.async_.addCallbacks(_set, eventually_errback(d))
d.addBoth(_convert_error, request)
return d

View File

@ -9,10 +9,7 @@ import os.path
import re
import types
import errno
try:
import ConfigParser
except ImportError:
import configparser as ConfigParser
from six.moves import configparser
import tempfile
from io import BytesIO
from base64 import b32decode, b32encode
@ -185,7 +182,7 @@ def read_config(basedir, portnumfile, generated_files=[], _valid_config=None):
# (try to) read the main config file
config_fname = os.path.join(basedir, "tahoe.cfg")
parser = ConfigParser.SafeConfigParser()
parser = configparser.SafeConfigParser()
try:
parser = configutil.get_config(config_fname)
except EnvironmentError as e:
@ -208,7 +205,7 @@ def config_from_string(basedir, portnumfile, config_str, _valid_config=None):
_valid_config = _common_valid_config()
# load configuration from in-memory string
parser = ConfigParser.SafeConfigParser()
parser = configparser.SafeConfigParser()
parser.readfp(BytesIO(config_str))
fname = "<in-memory>"
@ -303,7 +300,7 @@ class _Config(object):
def items(self, section, default=_None):
try:
return self.config.items(section)
except ConfigParser.NoSectionError:
except configparser.NoSectionError:
if default is _None:
raise
return default
@ -318,7 +315,7 @@ class _Config(object):
raise UnescapedHashError(section, option, item)
return item
except (ConfigParser.NoOptionError, ConfigParser.NoSectionError):
except (configparser.NoOptionError, configparser.NoSectionError):
if default is _None:
raise MissingConfigEntry(
"{} is missing the [{}]{} entry".format(

View File

@ -2,7 +2,7 @@ from __future__ import print_function
import os, sys, urllib, textwrap
import codecs
from ConfigParser import NoSectionError
from six.moves.configparser import NoSectionError
from os.path import join
from twisted.python import usage
from allmydata.util.assertutil import precondition

View File

@ -2,7 +2,8 @@ from __future__ import print_function
import os
from six.moves import cStringIO as StringIO
import urlparse, httplib
from six.moves import urllib, http_client
import six
import allmydata # for __full_version__
from allmydata.util.encodingutil import quote_output
@ -12,9 +13,9 @@ from socket import error as socket_error
# copied from twisted/web/client.py
def parse_url(url, defaultPort=None):
url = url.strip()
parsed = urlparse.urlparse(url)
parsed = urllib.parse.urlparse(url)
scheme = parsed[0]
path = urlparse.urlunparse(('','')+parsed[2:])
path = urllib.parse.urlunparse(('','')+parsed[2:])
if defaultPort is None:
if scheme == 'https':
defaultPort = 443
@ -40,7 +41,7 @@ class BadResponse(object):
def do_http(method, url, body=""):
if isinstance(body, str):
body = StringIO(body)
elif isinstance(body, unicode):
elif isinstance(body, six.text_type):
raise TypeError("do_http body must be a bytestring, not unicode")
else:
# We must give a Content-Length header to twisted.web, otherwise it
@ -51,9 +52,9 @@ def do_http(method, url, body=""):
assert body.read
scheme, host, port, path = parse_url(url)
if scheme == "http":
c = httplib.HTTPConnection(host, port)
c = http_client.HTTPConnection(host, port)
elif scheme == "https":
c = httplib.HTTPSConnection(host, port)
c = http_client.HTTPSConnection(host, port)
else:
raise ValueError("unknown scheme '%s', need http or https" % scheme)
c.putrequest(method, path)

View File

@ -1,17 +1,19 @@
import sys
import six
from allmydata.util.assertutil import precondition
from allmydata.util.fileutil import abspath_expanduser_unicode
_default_nodedir = None
if sys.platform == 'win32':
from allmydata.windows import registry
path = registry.get_base_dir_path()
if path:
precondition(isinstance(path, unicode), path)
precondition(isinstance(path, six.text_type), path)
_default_nodedir = abspath_expanduser_unicode(path)
if _default_nodedir is None:
path = abspath_expanduser_unicode(u"~/.tahoe")
precondition(isinstance(path, unicode), path)
precondition(isinstance(path, six.text_type), path)
_default_nodedir = path

View File

@ -2,6 +2,7 @@ from __future__ import print_function
import os, sys
from six.moves import StringIO
import six
from twisted.python import usage
from twisted.internet import defer, task, threads
@ -71,8 +72,8 @@ class Options(usage.Options):
]
optParameters = [
["node-directory", "d", None, NODEDIR_HELP],
["wormhole-server", None, u"ws://wormhole.tahoe-lafs.org:4000/v1", "The magic wormhole server to use.", unicode],
["wormhole-invite-appid", None, u"tahoe-lafs.org/invite", "The appid to use on the wormhole server.", unicode],
["wormhole-server", None, u"ws://wormhole.tahoe-lafs.org:4000/v1", "The magic wormhole server to use.", six.text_type],
["wormhole-invite-appid", None, u"tahoe-lafs.org/invite", "The appid to use on the wormhole server.", six.text_type],
]
def opt_version(self):
@ -180,7 +181,9 @@ def _maybe_enable_eliot_logging(options, reactor):
return options
def run():
assert sys.version_info < (3,), u"Tahoe-LAFS does not run under Python 3. Please use Python 2.7.x."
# TODO(3035): Remove tox-check when error becomes a warning
if 'TOX_ENV_NAME' not in os.environ:
assert sys.version_info < (3,), u"Tahoe-LAFS does not run under Python 3. Please use Python 2.7.x."
if sys.platform == "win32":
from allmydata.windows.fixups import initialize

View File

@ -1,4 +1,14 @@
from future.utils import PY3
"""
Ported to Python 3.
"""
from __future__ import unicode_literals
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from future.utils import PY2, PY3
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
import os.path
from allmydata.util import base32

View File

@ -1,3 +1,16 @@
"""
Ported to Python 3.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
import struct, time
class LeaseInfo(object):

View File

@ -1,3 +1,16 @@
"""
Ported to Python 3.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
import os, stat, struct
from allmydata.interfaces import BadWriteEnablerError

View File

@ -1,4 +1,15 @@
#! /usr/bin/python
"""
Ported to Python 3.
"""
from __future__ import unicode_literals
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from future.utils import PY2
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
from allmydata.storage.mutable import MutableShareFile
from allmydata.storage.immutable import ShareFile

View File

@ -809,7 +809,7 @@ class Errors(GridTestMixin, CLITestMixin, unittest.TestCase):
# Simulate a connection error
def _socket_error(*args, **kwargs):
raise socket_error('test error')
self.patch(allmydata.scripts.common_http.httplib.HTTPConnection,
self.patch(allmydata.scripts.common_http.http_client.HTTPConnection,
"endheaders", _socket_error)
d = self.do_cli("mkdir")

View File

@ -29,10 +29,12 @@ import itertools
from allmydata import interfaces
from allmydata.util import fileutil, hashutil, base32
from allmydata.storage.server import StorageServer
from allmydata.storage.shares import get_share_file
from allmydata.storage.mutable import MutableShareFile
from allmydata.storage.immutable import BucketWriter, BucketReader
from allmydata.storage.immutable import BucketWriter, BucketReader, ShareFile
from allmydata.storage.common import DataTooLargeError, storage_index_to_dir, \
UnknownMutableContainerVersionError, UnknownImmutableContainerVersionError
UnknownMutableContainerVersionError, UnknownImmutableContainerVersionError, \
si_b2a, si_a2b
from allmydata.storage.lease import LeaseInfo
from allmydata.immutable.layout import WriteBucketProxy, WriteBucketProxy_v2, \
ReadBucketProxy
@ -52,6 +54,42 @@ from allmydata.storage_client import (
from .common_py3 import FakeCanary, LoggingServiceParent, ShouldFailMixin
class UtilTests(unittest.TestCase):
"""Tests for allmydata.storage.common and .shares."""
def test_encoding(self):
"""b2a/a2b are the same as base32."""
s = b"\xFF HELLO \xF3"
result = si_b2a(s)
self.assertEqual(base32.b2a(s), result)
self.assertEqual(si_a2b(result), s)
def test_storage_index_to_dir(self):
"""storage_index_to_dir creates a native string path."""
s = b"\xFF HELLO \xF3"
path = storage_index_to_dir(s)
parts = os.path.split(path)
self.assertEqual(parts[0], parts[1][:2])
self.assertIsInstance(path, native_str)
def test_get_share_file_mutable(self):
"""A mutable share is identified by get_share_file()."""
path = self.mktemp()
msf = MutableShareFile(path)
msf.create(b"12", b"abc") # arbitrary values
loaded = get_share_file(path)
self.assertIsInstance(loaded, MutableShareFile)
self.assertEqual(loaded.home, path)
def test_get_share_file_immutable(self):
"""An immutable share is identified by get_share_file()."""
path = self.mktemp()
_ = ShareFile(path, max_size=1000, create=True)
loaded = get_share_file(path)
self.assertIsInstance(loaded, ShareFile)
self.assertEqual(loaded.home, path)
class FakeStatsProvider(object):
def count(self, name, delta=1):
pass

View File

@ -35,8 +35,12 @@ PORTED_MODULES = [
"allmydata.immutable.happiness_upload",
"allmydata.interfaces",
"allmydata.monitor",
"allmydata.storage.common",
"allmydata.storage.crawler",
"allmydata.storage.expirer",
"allmydata.storage.lease",
"allmydata.storage.mutable",
"allmydata.storage.shares",
"allmydata.test.common_py3",
"allmydata.uri",
"allmydata.util._python3",

View File

@ -58,9 +58,7 @@ commands =
# version pinning we do limits the variability of this output
pip freeze
# The tahoe script isn't sufficiently ported for this to succeed on
# Python 3.x yet.
!py36: tahoe --version
tahoe --version
!coverage: trial {env:TAHOE_LAFS_TRIAL_ARGS:--rterrors} {posargs:{env:TEST_SUITE}}