Merge remote-tracking branch 'origin/master' into replace-mach-nix

This commit is contained in:
Jean-Paul Calderone 2023-03-15 15:33:03 -04:00
commit b665240523
3 changed files with 39 additions and 2 deletions

View File

@ -3,7 +3,7 @@ Integration tests for getting and putting files, including reading from stdin
and stdout.
"""
from subprocess import Popen, PIPE, check_output
from subprocess import Popen, PIPE, check_output, check_call
import sys
import pytest
@ -67,6 +67,24 @@ def test_get_to_stdout(alice, get_put_alias, tmpdir):
assert p.wait() == 0
def test_large_file(alice, get_put_alias, tmp_path):
"""
It's possible to upload and download a larger file.
We avoid stdin/stdout since that's flaky on Windows.
"""
tempfile = tmp_path / "file"
with tempfile.open("wb") as f:
f.write(DATA * 1_000_000)
cli(alice, "put", str(tempfile), "getput:largefile")
outfile = tmp_path / "out"
check_call(
["tahoe", "--node-directory", alice.node_dir, "get", "getput:largefile", str(outfile)],
)
assert outfile.read_bytes() == tempfile.read_bytes()
@pytest.mark.skipif(
sys.platform.startswith("win"),
reason="reconfigure() has issues on Windows"

0
newsfragments/3959.minor Normal file
View File

View File

@ -33,6 +33,7 @@ from allmydata.util import log, base32
from allmydata.util.encodingutil import quote_output, unicode_to_argv
from allmydata.util.fileutil import abspath_expanduser_unicode
from allmydata.util.consumer import MemoryConsumer, download_to_data
from allmydata.util.deferredutil import async_to_deferred
from allmydata.interfaces import IDirectoryNode, IFileNode, \
NoSuchChildError, NoSharesError, SDMF_VERSION, MDMF_VERSION
from allmydata.monitor import Monitor
@ -657,7 +658,25 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
self.failUnlessEqual(res, NEWERDATA)
d.addCallback(_check_download_5)
def _corrupt_shares(res):
# The previous checks upload a complete replacement. This uses a
# different API that is supposed to do a partial write at an offset.
@async_to_deferred
async def _check_write_at_offset(newnode):
log.msg("writing at offset")
start = b"abcdef"
expected = b"abXYef"
uri = self._mutable_node_1.get_uri()
newnode = self.clients[0].create_node_from_uri(uri)
await newnode.overwrite(MutableData(start))
version = await newnode.get_mutable_version()
await version.update(MutableData(b"XY"), 2)
result = await newnode.download_best_version()
self.assertEqual(result, expected)
# Revert to previous version
await newnode.overwrite(MutableData(NEWERDATA))
d.addCallback(_check_write_at_offset)
def _corrupt_shares(_res):
# run around and flip bits in all but k of the shares, to test
# the hash checks
shares = self._find_all_shares(self.basedir)