diff --git a/src/allmydata/immutable/downloader/node.py b/src/allmydata/immutable/downloader/node.py index 10ce0e5c7..02153444a 100644 --- a/src/allmydata/immutable/downloader/node.py +++ b/src/allmydata/immutable/downloader/node.py @@ -49,6 +49,8 @@ class DownloadNode(object): """Internal class which manages downloads and holds state. External callers use CiphertextFileNode instead.""" + default_max_segment_size = DEFAULT_MAX_SEGMENT_SIZE + # Share._node points to me def __init__(self, verifycap, storage_broker, secret_holder, terminator, history, download_status): @@ -76,7 +78,7 @@ class DownloadNode(object): # .guessed_segment_size, .guessed_num_segments, and # .ciphertext_hash_tree (with a dummy, to let us guess which hashes # we'll need) - self._build_guessed_tables(DEFAULT_MAX_SEGMENT_SIZE) + self._build_guessed_tables(self.default_max_segment_size) # filled in when we parse a valid UEB self.have_UEB = False diff --git a/src/allmydata/test/test_system.py b/src/allmydata/test/test_system.py index 10a64c1fe..235565020 100644 --- a/src/allmydata/test/test_system.py +++ b/src/allmydata/test/test_system.py @@ -28,13 +28,16 @@ from allmydata.storage.server import si_a2b from allmydata.immutable import offloaded, upload from allmydata.immutable.literal import LiteralFileNode from allmydata.immutable.filenode import ImmutableFileNode +from allmydata.immutable.downloader.node import DownloadNode from allmydata.util import idlib, mathutil from allmydata.util import log, base32 from allmydata.util.encodingutil import quote_output, unicode_to_argv from allmydata.util.fileutil import abspath_expanduser_unicode from allmydata.util.consumer import MemoryConsumer, download_to_data +from allmydata.util.deferredutil import async_to_deferred from allmydata.interfaces import IDirectoryNode, IFileNode, \ - NoSuchChildError, NoSharesError, SDMF_VERSION, MDMF_VERSION + NoSuchChildError, NoSharesError, SDMF_VERSION, MDMF_VERSION, \ + DEFAULT_MAX_SEGMENT_SIZE from allmydata.monitor import Monitor from allmydata.mutable.common import NotWriteableError from allmydata.mutable import layout as mutable_layout @@ -1811,6 +1814,46 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase): d.addCallback(_got_lit_filenode) return d + @async_to_deferred + async def test_upload_download_immutable_different_default_max_segment_size(self): + """ + Tahoe-LAFS used to have a default max segment size of 128KB, and is now + 1MB. Test that an upload created when 128KB was the default can be + downloaded with 1MB as the default (i.e. old uploader, new downloader), + and vice versa, (new uploader, old downloader). + """ + await self.set_up_nodes(2) + + # Just 1 share: + for c in self.clients: + c.encoding_params["k"] = 1 + c.encoding_params["happy"] = 1 + c.encoding_params["n"] = 1 + + await self._upload_download_different_max_segment(128 * 1024, 1024 * 1024) + + await self._upload_download_different_max_segment(1024 * 1024, 128 * 1024) + + + async def _upload_download_different_max_segment( + self, upload_segment_size, download_segment_size + ): + """Upload with one max segment size, download with another.""" + data = b"123456789" * 1_000_000 + + uploader = self.clients[0].getServiceNamed("uploader") + uploadable = upload.Data(data, convergence=None) + assert uploadable.max_segment_size == None + uploadable.max_segment_size = upload_segment_size + results = await uploader.upload(uploadable) + + assert DownloadNode.default_max_segment_size == DEFAULT_MAX_SEGMENT_SIZE + self.patch(DownloadNode, "default_max_segment_size", download_segment_size) + uri = results.get_uri() + node = self.clients[1].create_node_from_uri(uri) + mc = await node.read(MemoryConsumer(), 0, None) + self.assertEqual(b"".join(mc.chunks), data) + class Connections(SystemTestMixin, unittest.TestCase): FORCE_FOOLSCAP_FOR_STORAGE = True