Test using an integration test.

This commit is contained in:
Itamar Turner-Trauring 2023-02-07 09:44:51 -05:00
parent ea052b3c80
commit 3bc3cf39d0
2 changed files with 49 additions and 45 deletions

View File

@ -3,11 +3,13 @@ Integration tests for getting and putting files, including reading from stdin
and stdout.
"""
from subprocess import Popen, PIPE
from subprocess import Popen, PIPE, check_output
import pytest
from pytest_twisted import ensureDeferred
from twisted.internet import reactor
from .util import run_in_thread, cli
from .util import run_in_thread, cli, reconfigure
DATA = b"abc123 this is not utf-8 decodable \xff\x00\x33 \x11"
try:
@ -62,3 +64,47 @@ def test_get_to_stdout(alice, get_put_alias, tmpdir):
)
assert p.stdout.read() == DATA
assert p.wait() == 0
@ensureDeferred
async def test_upload_download_immutable_different_default_max_segment_size(alice, get_put_alias, tmpdir, request):
"""
Tahoe-LAFS used to have a default max segment size of 128KB, and is now
1MB. Test that an upload created when 128KB was the default can be
downloaded with 1MB as the default (i.e. old uploader, new downloader), and
vice versa, (new uploader, old downloader).
"""
tempfile = tmpdir.join("file")
large_data = DATA * 100_000
assert len(large_data) > 2 * 1024 * 1024
with tempfile.open("wb") as f:
f.write(large_data)
async def set_segment_size(segment_size):
await reconfigure(
reactor,
request,
alice,
(1, 1, 1),
None,
max_segment_size=segment_size
)
# 1. Upload file 1 with default segment size set to 1MB
await set_segment_size(1024 * 1024)
cli(alice, "put", str(tempfile), "getput:seg1024kb")
# 2. Download file 1 with default segment size set to 128KB
await set_segment_size(128 * 1024)
assert large_data == check_output(
["tahoe", "--node-directory", alice.node_dir, "get", "getput:seg1024kb", "-"]
)
# 3. Upload file 2 with default segment size set to 128KB
cli(alice, "put", str(tempfile), "getput:seg128kb")
# 4. Download file 2 with default segment size set to 1MB
await set_segment_size(1024 * 1024)
assert large_data == check_output(
["tahoe", "--node-directory", alice.node_dir, "get", "getput:seg128kb", "-"]
)

View File

@ -28,16 +28,13 @@ from allmydata.storage.server import si_a2b
from allmydata.immutable import offloaded, upload
from allmydata.immutable.literal import LiteralFileNode
from allmydata.immutable.filenode import ImmutableFileNode
from allmydata.immutable.downloader.node import DownloadNode
from allmydata.util import idlib, mathutil
from allmydata.util import log, base32
from allmydata.util.encodingutil import quote_output, unicode_to_argv
from allmydata.util.fileutil import abspath_expanduser_unicode
from allmydata.util.consumer import MemoryConsumer, download_to_data
from allmydata.util.deferredutil import async_to_deferred
from allmydata.interfaces import IDirectoryNode, IFileNode, \
NoSuchChildError, NoSharesError, SDMF_VERSION, MDMF_VERSION, \
DEFAULT_IMMUTABLE_MAX_SEGMENT_SIZE
NoSuchChildError, NoSharesError, SDMF_VERSION, MDMF_VERSION
from allmydata.monitor import Monitor
from allmydata.mutable.common import NotWriteableError
from allmydata.mutable import layout as mutable_layout
@ -1814,45 +1811,6 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
d.addCallback(_got_lit_filenode)
return d
@async_to_deferred
async def test_upload_download_immutable_different_default_max_segment_size(self):
"""
Tahoe-LAFS used to have a default max segment size of 128KB, and is now
1MB. Test that an upload created when 128KB was the default can be
downloaded with 1MB as the default (i.e. old uploader, new downloader),
and vice versa, (new uploader, old downloader).
"""
await self.set_up_nodes(2)
# Just 1 share:
for c in self.clients:
c.encoding_params["k"] = 1
c.encoding_params["happy"] = 1
c.encoding_params["n"] = 1
await self._upload_download_different_max_segment(128 * 1024, 1024 * 1024)
await self._upload_download_different_max_segment(1024 * 1024, 128 * 1024)
async def _upload_download_different_max_segment(
self, upload_segment_size, download_segment_size
):
"""Upload with one max segment size, download with another."""
data = b"123456789" * 1_000_000
uploader = self.clients[0].getServiceNamed("uploader")
uploadable = upload.Data(data, convergence=None)
assert uploadable.max_segment_size == None
uploadable.max_segment_size = upload_segment_size
results = await uploader.upload(uploadable)
assert DownloadNode.default_max_segment_size == DEFAULT_IMMUTABLE_MAX_SEGMENT_SIZE
self.patch(DownloadNode, "default_max_segment_size", download_segment_size)
uri = results.get_uri()
node = self.clients[1].create_node_from_uri(uri)
mc = await node.read(MemoryConsumer(), 0, None)
self.assertEqual(b"".join(mc.chunks), data)
class Connections(SystemTestMixin, unittest.TestCase):
FORCE_FOOLSCAP_FOR_STORAGE = True