mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2024-12-24 15:16:41 +00:00
Merge pull request #1271 from tahoe-lafs/3988-failing-test-http
Fix failing integration test Fixes ticket:3988
This commit is contained in:
commit
f12b78e199
@ -393,7 +393,7 @@ def alice(
|
||||
finalize=False,
|
||||
)
|
||||
)
|
||||
await_client_ready(process)
|
||||
pytest_twisted.blockon(await_client_ready(process))
|
||||
|
||||
# 1. Create a new RW directory cap:
|
||||
cli(process, "create-alias", "test")
|
||||
@ -424,7 +424,7 @@ alice-key ssh-rsa {ssh_public_key} {rwcap}
|
||||
|
||||
# 4. Restart the node with new SFTP config.
|
||||
pytest_twisted.blockon(process.restart_async(reactor, request))
|
||||
await_client_ready(process)
|
||||
pytest_twisted.blockon(await_client_ready(process))
|
||||
print(f"Alice pid: {process.transport.pid}")
|
||||
return process
|
||||
|
||||
@ -439,7 +439,7 @@ def bob(reactor, temp_dir, introducer_furl, flog_gatherer, storage_nodes, reques
|
||||
storage=False,
|
||||
)
|
||||
)
|
||||
await_client_ready(process)
|
||||
pytest_twisted.blockon(await_client_ready(process))
|
||||
return process
|
||||
|
||||
|
||||
|
@ -4,7 +4,6 @@ and stdout.
|
||||
"""
|
||||
|
||||
from subprocess import Popen, PIPE, check_output, check_call
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
from pytest_twisted import ensureDeferred
|
||||
@ -50,6 +49,7 @@ def test_put_from_stdin(alice, get_put_alias, tmpdir):
|
||||
assert read_bytes(tempfile) == DATA
|
||||
|
||||
|
||||
@run_in_thread
|
||||
def test_get_to_stdout(alice, get_put_alias, tmpdir):
|
||||
"""
|
||||
It's possible to upload a file, and then download it to stdout.
|
||||
@ -67,6 +67,7 @@ def test_get_to_stdout(alice, get_put_alias, tmpdir):
|
||||
assert p.wait() == 0
|
||||
|
||||
|
||||
@run_in_thread
|
||||
def test_large_file(alice, get_put_alias, tmp_path):
|
||||
"""
|
||||
It's possible to upload and download a larger file.
|
||||
@ -85,10 +86,6 @@ def test_large_file(alice, get_put_alias, tmp_path):
|
||||
assert outfile.read_bytes() == tempfile.read_bytes()
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.platform.startswith("win"),
|
||||
reason="reconfigure() has issues on Windows"
|
||||
)
|
||||
@ensureDeferred
|
||||
async def test_upload_download_immutable_different_default_max_segment_size(alice, get_put_alias, tmpdir, request):
|
||||
"""
|
||||
|
@ -31,7 +31,7 @@ def test_upload_immutable(reactor, temp_dir, introducer_furl, flog_gatherer, sto
|
||||
happy=7,
|
||||
total=10,
|
||||
)
|
||||
util.await_client_ready(edna)
|
||||
yield util.await_client_ready(edna)
|
||||
|
||||
node_dir = join(temp_dir, 'edna')
|
||||
|
||||
|
@ -42,8 +42,8 @@ if PY2:
|
||||
def test_onion_service_storage(reactor, request, temp_dir, flog_gatherer, tor_network, tor_introducer_furl):
|
||||
carol = yield _create_anonymous_node(reactor, 'carol', 8008, request, temp_dir, flog_gatherer, tor_network, tor_introducer_furl)
|
||||
dave = yield _create_anonymous_node(reactor, 'dave', 8009, request, temp_dir, flog_gatherer, tor_network, tor_introducer_furl)
|
||||
util.await_client_ready(carol, minimum_number_of_servers=2)
|
||||
util.await_client_ready(dave, minimum_number_of_servers=2)
|
||||
yield util.await_client_ready(carol, minimum_number_of_servers=2)
|
||||
yield util.await_client_ready(dave, minimum_number_of_servers=2)
|
||||
|
||||
# ensure both nodes are connected to "a grid" by uploading
|
||||
# something via carol, and retrieve it using dave.
|
||||
|
@ -18,6 +18,7 @@ import allmydata.uri
|
||||
from allmydata.util import jsonbytes as json
|
||||
|
||||
from . import util
|
||||
from .util import run_in_thread
|
||||
|
||||
import requests
|
||||
import html5lib
|
||||
@ -25,6 +26,7 @@ from bs4 import BeautifulSoup
|
||||
|
||||
from pytest_twisted import ensureDeferred
|
||||
|
||||
@run_in_thread
|
||||
def test_index(alice):
|
||||
"""
|
||||
we can download the index file
|
||||
@ -32,6 +34,7 @@ def test_index(alice):
|
||||
util.web_get(alice, u"")
|
||||
|
||||
|
||||
@run_in_thread
|
||||
def test_index_json(alice):
|
||||
"""
|
||||
we can download the index file as json
|
||||
@ -41,6 +44,7 @@ def test_index_json(alice):
|
||||
json.loads(data)
|
||||
|
||||
|
||||
@run_in_thread
|
||||
def test_upload_download(alice):
|
||||
"""
|
||||
upload a file, then download it via readcap
|
||||
@ -70,6 +74,7 @@ def test_upload_download(alice):
|
||||
assert str(data, "utf-8") == FILE_CONTENTS
|
||||
|
||||
|
||||
@run_in_thread
|
||||
def test_put(alice):
|
||||
"""
|
||||
use PUT to create a file
|
||||
@ -89,6 +94,7 @@ def test_put(alice):
|
||||
assert cap.needed_shares == int(cfg.get_config("client", "shares.needed"))
|
||||
|
||||
|
||||
@run_in_thread
|
||||
def test_helper_status(storage_nodes):
|
||||
"""
|
||||
successfully GET the /helper_status page
|
||||
@ -101,6 +107,7 @@ def test_helper_status(storage_nodes):
|
||||
assert str(dom.h1.string) == u"Helper Status"
|
||||
|
||||
|
||||
@run_in_thread
|
||||
def test_deep_stats(alice):
|
||||
"""
|
||||
create a directory, do deep-stats on it and prove the /operations/
|
||||
@ -417,6 +424,7 @@ async def test_directory_deep_check(reactor, request, alice):
|
||||
assert dom is not None, "Operation never completed"
|
||||
|
||||
|
||||
@run_in_thread
|
||||
def test_storage_info(storage_nodes):
|
||||
"""
|
||||
retrieve and confirm /storage URI for one storage node
|
||||
@ -428,6 +436,7 @@ def test_storage_info(storage_nodes):
|
||||
)
|
||||
|
||||
|
||||
@run_in_thread
|
||||
def test_storage_info_json(storage_nodes):
|
||||
"""
|
||||
retrieve and confirm /storage?t=json URI for one storage node
|
||||
@ -442,6 +451,7 @@ def test_storage_info_json(storage_nodes):
|
||||
assert data[u"stats"][u"storage_server.reserved_space"] == 1000000000
|
||||
|
||||
|
||||
@run_in_thread
|
||||
def test_introducer_info(introducer):
|
||||
"""
|
||||
retrieve and confirm /introducer URI for the introducer
|
||||
@ -460,6 +470,7 @@ def test_introducer_info(introducer):
|
||||
assert "subscription_summary" in data
|
||||
|
||||
|
||||
@run_in_thread
|
||||
def test_mkdir_with_children(alice):
|
||||
"""
|
||||
create a directory using ?t=mkdir-with-children
|
||||
|
@ -430,6 +430,31 @@ class FileShouldVanishException(Exception):
|
||||
)
|
||||
|
||||
|
||||
def run_in_thread(f):
|
||||
"""Decorator for integration tests that runs code in a thread.
|
||||
|
||||
Because we're using pytest_twisted, tests that rely on the reactor are
|
||||
expected to return a Deferred and use async APIs so the reactor can run.
|
||||
|
||||
In the case of the integration test suite, it launches nodes in the
|
||||
background using Twisted APIs. The nodes stdout and stderr is read via
|
||||
Twisted code. If the reactor doesn't run, reads don't happen, and
|
||||
eventually the buffers fill up, and the nodes block when they try to flush
|
||||
logs.
|
||||
|
||||
We can switch to Twisted APIs (treq instead of requests etc.), but
|
||||
sometimes it's easier or expedient to just have a blocking test. So this
|
||||
decorator allows you to run the test in a thread, and the reactor can keep
|
||||
running in the main thread.
|
||||
|
||||
See https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3597 for tracking bug.
|
||||
"""
|
||||
@wraps(f)
|
||||
def test(*args, **kwargs):
|
||||
return deferToThread(lambda: f(*args, **kwargs))
|
||||
return test
|
||||
|
||||
|
||||
def await_file_contents(path, contents, timeout=15, error_if=None):
|
||||
"""
|
||||
wait up to `timeout` seconds for the file at `path` (any path-like
|
||||
@ -555,6 +580,7 @@ def web_post(tahoe, uri_fragment, **kwargs):
|
||||
return resp.content
|
||||
|
||||
|
||||
@run_in_thread
|
||||
def await_client_ready(tahoe, timeout=10, liveness=60*2, minimum_number_of_servers=1):
|
||||
"""
|
||||
Uses the status API to wait for a client-type node (in `tahoe`, a
|
||||
@ -622,30 +648,6 @@ def generate_ssh_key(path):
|
||||
f.write(s.encode("ascii"))
|
||||
|
||||
|
||||
def run_in_thread(f):
|
||||
"""Decorator for integration tests that runs code in a thread.
|
||||
|
||||
Because we're using pytest_twisted, tests that rely on the reactor are
|
||||
expected to return a Deferred and use async APIs so the reactor can run.
|
||||
|
||||
In the case of the integration test suite, it launches nodes in the
|
||||
background using Twisted APIs. The nodes stdout and stderr is read via
|
||||
Twisted code. If the reactor doesn't run, reads don't happen, and
|
||||
eventually the buffers fill up, and the nodes block when they try to flush
|
||||
logs.
|
||||
|
||||
We can switch to Twisted APIs (treq instead of requests etc.), but
|
||||
sometimes it's easier or expedient to just have a blocking test. So this
|
||||
decorator allows you to run the test in a thread, and the reactor can keep
|
||||
running in the main thread.
|
||||
|
||||
See https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3597 for tracking bug.
|
||||
"""
|
||||
@wraps(f)
|
||||
def test(*args, **kwargs):
|
||||
return deferToThread(lambda: f(*args, **kwargs))
|
||||
return test
|
||||
|
||||
@frozen
|
||||
class CHK:
|
||||
"""
|
||||
@ -792,16 +794,11 @@ async def reconfigure(reactor, request, node: TahoeProcess,
|
||||
)
|
||||
|
||||
if changed:
|
||||
# TODO reconfigure() seems to have issues on Windows. If you need to
|
||||
# use it there, delete this assert and try to figure out what's going
|
||||
# on...
|
||||
assert not sys.platform.startswith("win")
|
||||
|
||||
# restart the node
|
||||
print(f"Restarting {node.node_dir} for ZFEC reconfiguration")
|
||||
await node.restart_async(reactor, request)
|
||||
print("Restarted. Waiting for ready state.")
|
||||
await_client_ready(node)
|
||||
await await_client_ready(node)
|
||||
print("Ready.")
|
||||
else:
|
||||
print("Config unchanged, not restarting.")
|
||||
|
0
newsfragments/3988.minor
Normal file
0
newsfragments/3988.minor
Normal file
Loading…
Reference in New Issue
Block a user