mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2025-01-18 10:46:24 +00:00
Merge remote-tracking branch 'origin/master' into 4015-more-linting
This commit is contained in:
commit
57b93b2230
@ -47,3 +47,7 @@ export PIP_FIND_LINKS="file://${WHEELHOUSE_PATH}"
|
||||
# above, it may still not be able to get us a compatible version unless we
|
||||
# explicitly ask for one.
|
||||
"${PIP}" install --upgrade setuptools==44.0.0 wheel
|
||||
|
||||
# Just about every user of this image wants to use tox from the bootstrap
|
||||
# virtualenv so go ahead and install it now.
|
||||
"${PIP}" install "tox~=3.0"
|
||||
|
@ -3,18 +3,6 @@
|
||||
# https://vaneyckt.io/posts/safer_bash_scripts_with_set_euxo_pipefail/
|
||||
set -euxo pipefail
|
||||
|
||||
# Basic Python packages that you just need to have around to do anything,
|
||||
# practically speaking.
|
||||
BASIC_DEPS="pip wheel"
|
||||
|
||||
# Python packages we need to support the test infrastructure. *Not* packages
|
||||
# Tahoe-LAFS itself (implementation or test suite) need.
|
||||
TEST_DEPS="tox~=3.0"
|
||||
|
||||
# Python packages we need to generate test reports for CI infrastructure.
|
||||
# *Not* packages Tahoe-LAFS itself (implement or test suite) need.
|
||||
REPORTING_DEPS="python-subunit junitxml subunitreporter"
|
||||
|
||||
# The filesystem location of the wheelhouse which we'll populate with wheels
|
||||
# for all of our dependencies.
|
||||
WHEELHOUSE_PATH="$1"
|
||||
@ -41,15 +29,5 @@ export PIP_FIND_LINKS="file://${WHEELHOUSE_PATH}"
|
||||
LANG="en_US.UTF-8" "${PIP}" \
|
||||
wheel \
|
||||
--wheel-dir "${WHEELHOUSE_PATH}" \
|
||||
"${PROJECT_ROOT}"[test] \
|
||||
${BASIC_DEPS} \
|
||||
${TEST_DEPS} \
|
||||
${REPORTING_DEPS}
|
||||
|
||||
# Not strictly wheelhouse population but ... Note we omit basic deps here.
|
||||
# They're in the wheelhouse if Tahoe-LAFS wants to drag them in but it will
|
||||
# have to ask.
|
||||
"${PIP}" \
|
||||
install \
|
||||
${TEST_DEPS} \
|
||||
${REPORTING_DEPS}
|
||||
"${PROJECT_ROOT}"[testenv] \
|
||||
"${PROJECT_ROOT}"[test]
|
||||
|
@ -79,9 +79,10 @@ else
|
||||
alternative="false"
|
||||
fi
|
||||
|
||||
WORKDIR=/tmp/tahoe-lafs.tox
|
||||
${TIMEOUT} ${BOOTSTRAP_VENV}/bin/tox \
|
||||
-c ${PROJECT_ROOT}/tox.ini \
|
||||
--workdir /tmp/tahoe-lafs.tox \
|
||||
--workdir "${WORKDIR}" \
|
||||
-e "${TAHOE_LAFS_TOX_ENVIRONMENT}" \
|
||||
${TAHOE_LAFS_TOX_ARGS} || "${alternative}"
|
||||
|
||||
@ -93,5 +94,6 @@ if [ -n "${ARTIFACTS}" ]; then
|
||||
|
||||
# Create a junitxml results area.
|
||||
mkdir -p "$(dirname "${JUNITXML}")"
|
||||
"${BOOTSTRAP_VENV}"/bin/subunit2junitxml < "${SUBUNIT2}" > "${JUNITXML}" || "${alternative}"
|
||||
|
||||
"${WORKDIR}/${TAHOE_LAFS_TOX_ENVIRONMENT}/bin/subunit2junitxml" < "${SUBUNIT2}" > "${JUNITXML}" || "${alternative}"
|
||||
fi
|
||||
|
@ -26,12 +26,7 @@ shift || :
|
||||
|
||||
# Tell pip where it can find any existing wheels.
|
||||
export PIP_FIND_LINKS="file://${WHEELHOUSE_PATH}"
|
||||
|
||||
# It is tempting to also set PIP_NO_INDEX=1 but (a) that will cause problems
|
||||
# between the time dependencies change and the images are re-built and (b) the
|
||||
# upcoming-deprecations job wants to install some dependencies from github and
|
||||
# it's awkward to get that done any earlier than the tox run. So, we don't
|
||||
# set it.
|
||||
export PIP_NO_INDEX="1"
|
||||
|
||||
# Get everything else installed in it, too.
|
||||
"${BOOTSTRAP_VENV}"/bin/tox \
|
||||
|
@ -4,6 +4,7 @@ Ported to Python 3.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import sys
|
||||
import shutil
|
||||
from time import sleep
|
||||
@ -49,6 +50,11 @@ from .util import (
|
||||
)
|
||||
|
||||
|
||||
# No reason for HTTP requests to take longer than two minutes in the
|
||||
# integration tests. See allmydata/scripts/common_http.py for usage.
|
||||
os.environ["__TAHOE_CLI_HTTP_TIMEOUT"] = "120"
|
||||
|
||||
|
||||
# pytest customization hooks
|
||||
|
||||
def pytest_addoption(parser):
|
||||
|
0
newsfragments/4005.minor
Normal file
0
newsfragments/4005.minor
Normal file
0
newsfragments/4012.minor
Normal file
0
newsfragments/4012.minor
Normal file
0
newsfragments/4019.minor
Normal file
0
newsfragments/4019.minor
Normal file
1
newsfragments/4020.minor
Normal file
1
newsfragments/4020.minor
Normal file
@ -0,0 +1 @@
|
||||
|
35
setup.py
35
setup.py
@ -141,8 +141,10 @@ install_requires = [
|
||||
|
||||
# HTTP server and client
|
||||
"klein",
|
||||
|
||||
# 2.2.0 has a bug: https://github.com/pallets/werkzeug/issues/2465
|
||||
"werkzeug != 2.2.0",
|
||||
# 2.3.x has an incompatibility with Klein: https://github.com/twisted/klein/pull/575
|
||||
"werkzeug != 2.2.0, < 2.3",
|
||||
"treq",
|
||||
"cbor2",
|
||||
|
||||
@ -398,15 +400,31 @@ setup(name="tahoe-lafs", # also set in __init__.py
|
||||
"dulwich",
|
||||
"gpg",
|
||||
],
|
||||
"test": [
|
||||
# Pin a specific pyflakes so we don't have different folks
|
||||
# disagreeing on what is or is not a lint issue. We can bump
|
||||
# this version from time to time, but we will do it
|
||||
# intentionally.
|
||||
"ruff==0.0.261",
|
||||
|
||||
# Here are the dependencies required to set up a reproducible test
|
||||
# environment. This could be for CI or local development. These
|
||||
# are *not* library dependencies of the test suite itself. They are
|
||||
# the tools we use to run the test suite at all.
|
||||
"testenv": [
|
||||
# Pin all of these versions for the same reason you ever want to
|
||||
# pin anything: to prevent new releases with regressions from
|
||||
# introducing spurious failures into CI runs for whatever
|
||||
# development work is happening at the time. The versions
|
||||
# selected here are just the current versions at the time.
|
||||
# Bumping them to keep up with future releases is fine as long
|
||||
# as those releases are known to actually work.
|
||||
"pip==22.0.3",
|
||||
"wheel==0.37.1",
|
||||
"setuptools==60.9.1",
|
||||
"subunitreporter==22.2.0",
|
||||
"python-subunit==1.4.2",
|
||||
"junitxml==0.7",
|
||||
"coverage ~= 5.0",
|
||||
],
|
||||
|
||||
# Here are the library dependencies of the test suite.
|
||||
"test": [
|
||||
"mock",
|
||||
"tox ~= 3.0",
|
||||
"pytest",
|
||||
"pytest-twisted",
|
||||
"hypothesis >= 3.6.1",
|
||||
@ -415,7 +433,6 @@ setup(name="tahoe-lafs", # also set in __init__.py
|
||||
"fixtures",
|
||||
"beautifulsoup4",
|
||||
"html5lib",
|
||||
"junitxml",
|
||||
# Pin old version until
|
||||
# https://github.com/paramiko/paramiko/issues/1961 is fixed.
|
||||
"paramiko < 2.9",
|
||||
|
@ -1,19 +1,11 @@
|
||||
"""
|
||||
Ported to Python 3.
|
||||
Blocking HTTP client APIs.
|
||||
"""
|
||||
from __future__ import unicode_literals
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import os
|
||||
from io import BytesIO
|
||||
from six.moves import urllib, http_client
|
||||
import six
|
||||
from http import client as http_client
|
||||
import urllib
|
||||
import allmydata # for __full_version__
|
||||
|
||||
from allmydata.util.encodingutil import quote_output
|
||||
@ -51,7 +43,7 @@ class BadResponse(object):
|
||||
def do_http(method, url, body=b""):
|
||||
if isinstance(body, bytes):
|
||||
body = BytesIO(body)
|
||||
elif isinstance(body, six.text_type):
|
||||
elif isinstance(body, str):
|
||||
raise TypeError("do_http body must be a bytestring, not unicode")
|
||||
else:
|
||||
# We must give a Content-Length header to twisted.web, otherwise it
|
||||
@ -61,10 +53,17 @@ def do_http(method, url, body=b""):
|
||||
assert body.seek
|
||||
assert body.read
|
||||
scheme, host, port, path = parse_url(url)
|
||||
|
||||
# For testing purposes, allow setting a timeout on HTTP requests. If this
|
||||
# ever become a user-facing feature, this should probably be a CLI option?
|
||||
timeout = os.environ.get("__TAHOE_CLI_HTTP_TIMEOUT", None)
|
||||
if timeout is not None:
|
||||
timeout = float(timeout)
|
||||
|
||||
if scheme == "http":
|
||||
c = http_client.HTTPConnection(host, port)
|
||||
c = http_client.HTTPConnection(host, port, timeout=timeout, blocksize=65536)
|
||||
elif scheme == "https":
|
||||
c = http_client.HTTPSConnection(host, port)
|
||||
c = http_client.HTTPSConnection(host, port, timeout=timeout, blocksize=65536)
|
||||
else:
|
||||
raise ValueError("unknown scheme '%s', need http or https" % scheme)
|
||||
c.putrequest(method, path)
|
||||
@ -85,7 +84,7 @@ def do_http(method, url, body=b""):
|
||||
return BadResponse(url, err)
|
||||
|
||||
while True:
|
||||
data = body.read(8192)
|
||||
data = body.read(65536)
|
||||
if not data:
|
||||
break
|
||||
c.send(data)
|
||||
@ -94,16 +93,14 @@ def do_http(method, url, body=b""):
|
||||
|
||||
|
||||
def format_http_success(resp):
|
||||
# ensure_text() shouldn't be necessary when Python 2 is dropped.
|
||||
return quote_output(
|
||||
"%s %s" % (resp.status, six.ensure_text(resp.reason)),
|
||||
"%s %s" % (resp.status, resp.reason),
|
||||
quotemarks=False)
|
||||
|
||||
def format_http_error(msg, resp):
|
||||
# ensure_text() shouldn't be necessary when Python 2 is dropped.
|
||||
return quote_output(
|
||||
"%s: %s %s\n%s" % (msg, resp.status, six.ensure_text(resp.reason),
|
||||
six.ensure_text(resp.read())),
|
||||
"%s: %s %s\n%r" % (msg, resp.status, resp.reason,
|
||||
resp.read()),
|
||||
quotemarks=False)
|
||||
|
||||
def check_http_error(resp, stderr):
|
||||
|
@ -5,7 +5,7 @@ HTTP client that talks to the HTTP storage server.
|
||||
from __future__ import annotations
|
||||
|
||||
from eliot import start_action, register_exception_extractor
|
||||
from typing import Union, Optional, Sequence, Mapping, BinaryIO
|
||||
from typing import Union, Optional, Sequence, Mapping, BinaryIO, cast, TypedDict, Set
|
||||
from base64 import b64encode
|
||||
from io import BytesIO
|
||||
from os import SEEK_END
|
||||
@ -20,7 +20,7 @@ from werkzeug.datastructures import Range, ContentRange
|
||||
from twisted.web.http_headers import Headers
|
||||
from twisted.web import http
|
||||
from twisted.web.iweb import IPolicyForHTTPS, IResponse
|
||||
from twisted.internet.defer import inlineCallbacks, returnValue, fail, Deferred, succeed
|
||||
from twisted.internet.defer import inlineCallbacks, Deferred, succeed
|
||||
from twisted.internet.interfaces import (
|
||||
IOpenSSLClientConnectionCreator,
|
||||
IReactorTime,
|
||||
@ -447,24 +447,28 @@ class StorageClient(object):
|
||||
method, url, headers=headers, timeout=timeout, **kwargs
|
||||
)
|
||||
|
||||
def decode_cbor(self, response, schema: Schema):
|
||||
async def decode_cbor(self, response, schema: Schema) -> object:
|
||||
"""Given HTTP response, return decoded CBOR body."""
|
||||
|
||||
def got_content(f: BinaryIO):
|
||||
data = f.read()
|
||||
schema.validate_cbor(data)
|
||||
return loads(data)
|
||||
|
||||
if response.code > 199 and response.code < 300:
|
||||
content_type = get_content_type(response.headers)
|
||||
if content_type == CBOR_MIME_TYPE:
|
||||
return limited_content(response, self._clock).addCallback(got_content)
|
||||
with start_action(action_type="allmydata:storage:http-client:decode-cbor"):
|
||||
if response.code > 199 and response.code < 300:
|
||||
content_type = get_content_type(response.headers)
|
||||
if content_type == CBOR_MIME_TYPE:
|
||||
f = await limited_content(response, self._clock)
|
||||
data = f.read()
|
||||
schema.validate_cbor(data)
|
||||
return loads(data)
|
||||
else:
|
||||
raise ClientException(
|
||||
-1,
|
||||
"Server didn't send CBOR, content type is {}".format(
|
||||
content_type
|
||||
),
|
||||
)
|
||||
else:
|
||||
raise ClientException(-1, "Server didn't send CBOR")
|
||||
else:
|
||||
return treq.content(response).addCallback(
|
||||
lambda data: fail(ClientException(response.code, response.phrase, data))
|
||||
)
|
||||
data = (
|
||||
await limited_content(response, self._clock, max_length=10_000)
|
||||
).read()
|
||||
raise ClientException(response.code, response.phrase, data)
|
||||
|
||||
|
||||
@define(hash=True)
|
||||
@ -475,20 +479,24 @@ class StorageClientGeneral(object):
|
||||
|
||||
_client: StorageClient
|
||||
|
||||
@inlineCallbacks
|
||||
def get_version(self):
|
||||
@async_to_deferred
|
||||
async def get_version(self):
|
||||
"""
|
||||
Return the version metadata for the server.
|
||||
"""
|
||||
url = self._client.relative_url("/storage/v1/version")
|
||||
response = yield self._client.request("GET", url)
|
||||
decoded_response = yield self._client.decode_cbor(
|
||||
response, _SCHEMAS["get_version"]
|
||||
response = await self._client.request("GET", url)
|
||||
decoded_response = cast(
|
||||
Mapping[bytes, object],
|
||||
await self._client.decode_cbor(response, _SCHEMAS["get_version"]),
|
||||
)
|
||||
# Add some features we know are true because the HTTP API
|
||||
# specification requires them and because other parts of the storage
|
||||
# client implementation assumes they will be present.
|
||||
decoded_response[b"http://allmydata.org/tahoe/protocols/storage/v1"].update(
|
||||
cast(
|
||||
Mapping[bytes, object],
|
||||
decoded_response[b"http://allmydata.org/tahoe/protocols/storage/v1"],
|
||||
).update(
|
||||
{
|
||||
b"tolerates-immutable-read-overrun": True,
|
||||
b"delete-mutable-shares-with-zero-length-writev": True,
|
||||
@ -496,7 +504,7 @@ class StorageClientGeneral(object):
|
||||
b"prevents-read-past-end-of-share-data": True,
|
||||
}
|
||||
)
|
||||
returnValue(decoded_response)
|
||||
return decoded_response
|
||||
|
||||
@inlineCallbacks
|
||||
def add_or_renew_lease(
|
||||
@ -647,16 +655,16 @@ class StorageClientImmutables(object):
|
||||
|
||||
_client: StorageClient
|
||||
|
||||
@inlineCallbacks
|
||||
def create(
|
||||
@async_to_deferred
|
||||
async def create(
|
||||
self,
|
||||
storage_index,
|
||||
share_numbers,
|
||||
allocated_size,
|
||||
upload_secret,
|
||||
lease_renew_secret,
|
||||
lease_cancel_secret,
|
||||
): # type: (bytes, set[int], int, bytes, bytes, bytes) -> Deferred[ImmutableCreateResult]
|
||||
storage_index: bytes,
|
||||
share_numbers: set[int],
|
||||
allocated_size: int,
|
||||
upload_secret: bytes,
|
||||
lease_renew_secret: bytes,
|
||||
lease_cancel_secret: bytes,
|
||||
) -> ImmutableCreateResult:
|
||||
"""
|
||||
Create a new storage index for an immutable.
|
||||
|
||||
@ -675,7 +683,7 @@ class StorageClientImmutables(object):
|
||||
)
|
||||
message = {"share-numbers": share_numbers, "allocated-size": allocated_size}
|
||||
|
||||
response = yield self._client.request(
|
||||
response = await self._client.request(
|
||||
"POST",
|
||||
url,
|
||||
lease_renew_secret=lease_renew_secret,
|
||||
@ -683,14 +691,13 @@ class StorageClientImmutables(object):
|
||||
upload_secret=upload_secret,
|
||||
message_to_serialize=message,
|
||||
)
|
||||
decoded_response = yield self._client.decode_cbor(
|
||||
response, _SCHEMAS["allocate_buckets"]
|
||||
decoded_response = cast(
|
||||
Mapping[str, Set[int]],
|
||||
await self._client.decode_cbor(response, _SCHEMAS["allocate_buckets"]),
|
||||
)
|
||||
returnValue(
|
||||
ImmutableCreateResult(
|
||||
already_have=decoded_response["already-have"],
|
||||
allocated=decoded_response["allocated"],
|
||||
)
|
||||
return ImmutableCreateResult(
|
||||
already_have=decoded_response["already-have"],
|
||||
allocated=decoded_response["allocated"],
|
||||
)
|
||||
|
||||
@inlineCallbacks
|
||||
@ -716,10 +723,15 @@ class StorageClientImmutables(object):
|
||||
response.code,
|
||||
)
|
||||
|
||||
@inlineCallbacks
|
||||
def write_share_chunk(
|
||||
self, storage_index, share_number, upload_secret, offset, data
|
||||
): # type: (bytes, int, bytes, int, bytes) -> Deferred[UploadProgress]
|
||||
@async_to_deferred
|
||||
async def write_share_chunk(
|
||||
self,
|
||||
storage_index: bytes,
|
||||
share_number: int,
|
||||
upload_secret: bytes,
|
||||
offset: int,
|
||||
data: bytes,
|
||||
) -> UploadProgress:
|
||||
"""
|
||||
Upload a chunk of data for a specific share.
|
||||
|
||||
@ -737,7 +749,7 @@ class StorageClientImmutables(object):
|
||||
_encode_si(storage_index), share_number
|
||||
)
|
||||
)
|
||||
response = yield self._client.request(
|
||||
response = await self._client.request(
|
||||
"PATCH",
|
||||
url,
|
||||
upload_secret=upload_secret,
|
||||
@ -761,13 +773,16 @@ class StorageClientImmutables(object):
|
||||
raise ClientException(
|
||||
response.code,
|
||||
)
|
||||
body = yield self._client.decode_cbor(
|
||||
response, _SCHEMAS["immutable_write_share_chunk"]
|
||||
body = cast(
|
||||
Mapping[str, Sequence[Mapping[str, int]]],
|
||||
await self._client.decode_cbor(
|
||||
response, _SCHEMAS["immutable_write_share_chunk"]
|
||||
),
|
||||
)
|
||||
remaining = RangeMap()
|
||||
for chunk in body["required"]:
|
||||
remaining.set(True, chunk["begin"], chunk["end"])
|
||||
returnValue(UploadProgress(finished=finished, required=remaining))
|
||||
return UploadProgress(finished=finished, required=remaining)
|
||||
|
||||
def read_share_chunk(
|
||||
self, storage_index, share_number, offset, length
|
||||
@ -779,21 +794,23 @@ class StorageClientImmutables(object):
|
||||
self._client, "immutable", storage_index, share_number, offset, length
|
||||
)
|
||||
|
||||
@inlineCallbacks
|
||||
def list_shares(self, storage_index: bytes) -> Deferred[set[int]]:
|
||||
@async_to_deferred
|
||||
async def list_shares(self, storage_index: bytes) -> Set[int]:
|
||||
"""
|
||||
Return the set of shares for a given storage index.
|
||||
"""
|
||||
url = self._client.relative_url(
|
||||
"/storage/v1/immutable/{}/shares".format(_encode_si(storage_index))
|
||||
)
|
||||
response = yield self._client.request(
|
||||
response = await self._client.request(
|
||||
"GET",
|
||||
url,
|
||||
)
|
||||
if response.code == http.OK:
|
||||
body = yield self._client.decode_cbor(response, _SCHEMAS["list_shares"])
|
||||
returnValue(set(body))
|
||||
return cast(
|
||||
Set[int],
|
||||
await self._client.decode_cbor(response, _SCHEMAS["list_shares"]),
|
||||
)
|
||||
else:
|
||||
raise ClientException(response.code)
|
||||
|
||||
@ -863,6 +880,13 @@ class ReadTestWriteResult:
|
||||
reads: Mapping[int, Sequence[bytes]]
|
||||
|
||||
|
||||
# Result type for mutable read/test/write HTTP response. Can't just use
|
||||
# dict[int,list[bytes]] because on Python 3.8 that will error out.
|
||||
MUTABLE_RTW = TypedDict(
|
||||
"MUTABLE_RTW", {"success": bool, "data": Mapping[int, Sequence[bytes]]}
|
||||
)
|
||||
|
||||
|
||||
@frozen
|
||||
class StorageClientMutables:
|
||||
"""
|
||||
@ -909,8 +933,11 @@ class StorageClientMutables:
|
||||
message_to_serialize=message,
|
||||
)
|
||||
if response.code == http.OK:
|
||||
result = await self._client.decode_cbor(
|
||||
response, _SCHEMAS["mutable_read_test_write"]
|
||||
result = cast(
|
||||
MUTABLE_RTW,
|
||||
await self._client.decode_cbor(
|
||||
response, _SCHEMAS["mutable_read_test_write"]
|
||||
),
|
||||
)
|
||||
return ReadTestWriteResult(success=result["success"], reads=result["data"])
|
||||
else:
|
||||
@ -931,7 +958,7 @@ class StorageClientMutables:
|
||||
)
|
||||
|
||||
@async_to_deferred
|
||||
async def list_shares(self, storage_index: bytes) -> set[int]:
|
||||
async def list_shares(self, storage_index: bytes) -> Set[int]:
|
||||
"""
|
||||
List the share numbers for a given storage index.
|
||||
"""
|
||||
@ -940,8 +967,11 @@ class StorageClientMutables:
|
||||
)
|
||||
response = await self._client.request("GET", url)
|
||||
if response.code == http.OK:
|
||||
return await self._client.decode_cbor(
|
||||
response, _SCHEMAS["mutable_list_shares"]
|
||||
return cast(
|
||||
Set[int],
|
||||
await self._client.decode_cbor(
|
||||
response, _SCHEMAS["mutable_list_shares"]
|
||||
),
|
||||
)
|
||||
else:
|
||||
raise ClientException(response.code)
|
||||
|
@ -34,7 +34,7 @@ from hyperlink import DecodedURL
|
||||
from collections_extended import RangeMap
|
||||
from twisted.internet.task import Clock, Cooperator
|
||||
from twisted.internet.interfaces import IReactorTime, IReactorFromThreads
|
||||
from twisted.internet.defer import CancelledError, Deferred
|
||||
from twisted.internet.defer import CancelledError, Deferred, ensureDeferred
|
||||
from twisted.web import http
|
||||
from twisted.web.http_headers import Headers
|
||||
from werkzeug import routing
|
||||
@ -520,6 +520,7 @@ class HttpTestFixture(Fixture):
|
||||
Like ``result_of``, but supports fake reactor and ``treq`` testing
|
||||
infrastructure necessary to support asynchronous HTTP server endpoints.
|
||||
"""
|
||||
d = ensureDeferred(d)
|
||||
result = []
|
||||
error = []
|
||||
d.addCallbacks(result.append, error.append)
|
||||
|
54
tox.ini
54
tox.ini
@ -23,38 +23,34 @@ minversion = 2.4
|
||||
|
||||
[testenv]
|
||||
passenv = TAHOE_LAFS_* PIP_* SUBUNITREPORTER_* USERPROFILE HOMEDRIVE HOMEPATH
|
||||
# Get "certifi" to avoid bug #2913. Basically if a `setup_requires=...` causes
|
||||
# a package to be installed (with setuptools) then it'll fail on certain
|
||||
# platforms (travis's OX-X 10.12, Slackware 14.2) because PyPI's TLS
|
||||
# requirements (TLS >= 1.2) are incompatible with the old TLS clients
|
||||
# available to those systems. Installing it ahead of time (with pip) avoids
|
||||
# this problem.
|
||||
deps =
|
||||
# Pin all of these versions for the same reason you ever want to pin
|
||||
# anything: to prevent new releases with regressions from introducing
|
||||
# spurious failures into CI runs for whatever development work is
|
||||
# happening at the time. The versions selected here are just the current
|
||||
# versions at the time. Bumping them to keep up with future releases is
|
||||
# fine as long as those releases are known to actually work.
|
||||
pip==22.0.3
|
||||
setuptools==60.9.1
|
||||
wheel==0.37.1
|
||||
subunitreporter==22.2.0
|
||||
# As an exception, we don't pin certifi because it contains CA
|
||||
# certificates which necessarily change over time. Pinning this is
|
||||
# guaranteed to cause things to break eventually as old certificates
|
||||
# expire and as new ones are used in the wild that aren't present in
|
||||
# whatever version we pin. Hopefully there won't be functionality
|
||||
# regressions in new releases of this package that cause us the kind of
|
||||
# suffering we're trying to avoid with the above pins.
|
||||
certifi
|
||||
# We pull in certify *here* to avoid bug #2913. Basically if a
|
||||
# `setup_requires=...` causes a package to be installed (with setuptools)
|
||||
# then it'll fail on certain platforms (travis's OX-X 10.12, Slackware
|
||||
# 14.2) because PyPI's TLS requirements (TLS >= 1.2) are incompatible with
|
||||
# the old TLS clients available to those systems. Installing it ahead of
|
||||
# time (with pip) avoids this problem.
|
||||
#
|
||||
# We don't pin an exact version of it because it contains CA certificates
|
||||
# which necessarily change over time. Pinning this is guaranteed to cause
|
||||
# things to break eventually as old certificates expire and as new ones
|
||||
# are used in the wild that aren't present in whatever version we pin.
|
||||
# Hopefully there won't be functionality regressions in new releases of
|
||||
# this package that cause us the kind of suffering we're trying to avoid
|
||||
# with the above pins.
|
||||
certifi
|
||||
|
||||
# We add usedevelop=False because testing against a true installation gives
|
||||
# more useful results.
|
||||
usedevelop = False
|
||||
# We use extras=test to get things like "mock" that are required for our unit
|
||||
# tests.
|
||||
extras = test
|
||||
|
||||
extras =
|
||||
# Get general testing environment dependencies so we can run the tests
|
||||
# how we like.
|
||||
testenv
|
||||
|
||||
# And get all of the test suite's actual direct Python dependencies.
|
||||
test
|
||||
|
||||
setenv =
|
||||
# Define TEST_SUITE in the environment as an aid to constructing the
|
||||
@ -101,7 +97,9 @@ commands =
|
||||
basepython = python3
|
||||
skip_install = true
|
||||
deps =
|
||||
ruff
|
||||
# Pin a specific version so we get consistent outcomes; update this
|
||||
# occasionally:
|
||||
ruff == 0.0.263
|
||||
towncrier
|
||||
# On macOS, git inside of towncrier needs $HOME.
|
||||
passenv = HOME
|
||||
|
Loading…
Reference in New Issue
Block a user