mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2024-12-19 13:07:56 +00:00
Merge remote-tracking branch 'origin/master' into 3832.backdated-leases
This commit is contained in:
commit
ff511bbcf2
5
.readthedocs.yaml
Normal file
5
.readthedocs.yaml
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
version: 2
|
||||||
|
|
||||||
|
python:
|
||||||
|
install:
|
||||||
|
- requirements: docs/requirements.txt
|
4
docs/requirements.txt
Normal file
4
docs/requirements.txt
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
sphinx
|
||||||
|
docutils<0.18 # https://github.com/sphinx-doc/sphinx/issues/9788
|
||||||
|
recommonmark
|
||||||
|
sphinx_rtd_theme
|
0
newsfragments/3831.minor
Normal file
0
newsfragments/3831.minor
Normal file
0
newsfragments/3835.minor
Normal file
0
newsfragments/3835.minor
Normal file
@ -25,6 +25,11 @@ if PY2:
|
|||||||
from past.builtins import unicode
|
from past.builtins import unicode
|
||||||
from six import ensure_text
|
from six import ensure_text
|
||||||
|
|
||||||
|
try:
|
||||||
|
from typing import Dict, Callable
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
import os
|
import os
|
||||||
from base64 import b32encode
|
from base64 import b32encode
|
||||||
from functools import (
|
from functools import (
|
||||||
@ -479,6 +484,18 @@ class GridTestMixin(object):
|
|||||||
|
|
||||||
def set_up_grid(self, num_clients=1, num_servers=10,
|
def set_up_grid(self, num_clients=1, num_servers=10,
|
||||||
client_config_hooks={}, oneshare=False):
|
client_config_hooks={}, oneshare=False):
|
||||||
|
"""
|
||||||
|
Create a Tahoe-LAFS storage grid.
|
||||||
|
|
||||||
|
:param num_clients: See ``NoNetworkGrid``
|
||||||
|
:param num_servers: See `NoNetworkGrid``
|
||||||
|
:param client_config_hooks: See ``NoNetworkGrid``
|
||||||
|
|
||||||
|
:param bool oneshare: If ``True`` then the first client node is
|
||||||
|
configured with ``n == k == happy == 1``.
|
||||||
|
|
||||||
|
:return: ``None``
|
||||||
|
"""
|
||||||
# self.basedir must be set
|
# self.basedir must be set
|
||||||
port_assigner = SameProcessStreamEndpointAssigner()
|
port_assigner = SameProcessStreamEndpointAssigner()
|
||||||
port_assigner.setUp()
|
port_assigner.setUp()
|
||||||
@ -557,6 +574,15 @@ class GridTestMixin(object):
|
|||||||
return sorted(shares)
|
return sorted(shares)
|
||||||
|
|
||||||
def copy_shares(self, uri):
|
def copy_shares(self, uri):
|
||||||
|
# type: (bytes) -> Dict[bytes, bytes]
|
||||||
|
"""
|
||||||
|
Read all of the share files for the given capability from the storage area
|
||||||
|
of the storage servers created by ``set_up_grid``.
|
||||||
|
|
||||||
|
:param bytes uri: A Tahoe-LAFS data capability.
|
||||||
|
|
||||||
|
:return: A ``dict`` mapping share file names to share file contents.
|
||||||
|
"""
|
||||||
shares = {}
|
shares = {}
|
||||||
for (shnum, serverid, sharefile) in self.find_uri_shares(uri):
|
for (shnum, serverid, sharefile) in self.find_uri_shares(uri):
|
||||||
with open(sharefile, "rb") as f:
|
with open(sharefile, "rb") as f:
|
||||||
@ -601,10 +627,15 @@ class GridTestMixin(object):
|
|||||||
f.write(corruptdata)
|
f.write(corruptdata)
|
||||||
|
|
||||||
def corrupt_all_shares(self, uri, corruptor, debug=False):
|
def corrupt_all_shares(self, uri, corruptor, debug=False):
|
||||||
|
# type: (bytes, Callable[[bytes, bool], bytes], bool) -> None
|
||||||
|
"""
|
||||||
|
Apply ``corruptor`` to the contents of all share files associated with a
|
||||||
|
given capability and replace the share file contents with its result.
|
||||||
|
"""
|
||||||
for (i_shnum, i_serverid, i_sharefile) in self.find_uri_shares(uri):
|
for (i_shnum, i_serverid, i_sharefile) in self.find_uri_shares(uri):
|
||||||
with open(i_sharefile, "rb") as f:
|
with open(i_sharefile, "rb") as f:
|
||||||
sharedata = f.read()
|
sharedata = f.read()
|
||||||
corruptdata = corruptor(sharedata, debug=debug)
|
corruptdata = corruptor(sharedata, debug)
|
||||||
with open(i_sharefile, "wb") as f:
|
with open(i_sharefile, "wb") as f:
|
||||||
f.write(corruptdata)
|
f.write(corruptdata)
|
||||||
|
|
||||||
|
@ -14,6 +14,11 @@ if PY2:
|
|||||||
# a previous run. This asserts that the current code is capable of decoding
|
# a previous run. This asserts that the current code is capable of decoding
|
||||||
# shares from a previous version.
|
# shares from a previous version.
|
||||||
|
|
||||||
|
try:
|
||||||
|
from typing import Any
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
import six
|
import six
|
||||||
import os
|
import os
|
||||||
from twisted.trial import unittest
|
from twisted.trial import unittest
|
||||||
@ -951,12 +956,52 @@ class Corruption(_Base, unittest.TestCase):
|
|||||||
self.corrupt_shares_numbered(imm_uri, [2], _corruptor)
|
self.corrupt_shares_numbered(imm_uri, [2], _corruptor)
|
||||||
|
|
||||||
def _corrupt_set(self, ign, imm_uri, which, newvalue):
|
def _corrupt_set(self, ign, imm_uri, which, newvalue):
|
||||||
|
# type: (Any, bytes, int, int) -> None
|
||||||
|
"""
|
||||||
|
Replace a single byte share file number 2 for the given capability with a
|
||||||
|
new byte.
|
||||||
|
|
||||||
|
:param imm_uri: Corrupt share number 2 belonging to this capability.
|
||||||
|
:param which: The byte position to replace.
|
||||||
|
:param newvalue: The new byte value to set in the share.
|
||||||
|
"""
|
||||||
log.msg("corrupt %d" % which)
|
log.msg("corrupt %d" % which)
|
||||||
def _corruptor(s, debug=False):
|
def _corruptor(s, debug=False):
|
||||||
return s[:which] + bchr(newvalue) + s[which+1:]
|
return s[:which] + bchr(newvalue) + s[which+1:]
|
||||||
self.corrupt_shares_numbered(imm_uri, [2], _corruptor)
|
self.corrupt_shares_numbered(imm_uri, [2], _corruptor)
|
||||||
|
|
||||||
def test_each_byte(self):
|
def test_each_byte(self):
|
||||||
|
"""
|
||||||
|
Test share selection behavior of the downloader in the face of certain
|
||||||
|
kinds of data corruption.
|
||||||
|
|
||||||
|
1. upload a small share to the no-network grid
|
||||||
|
2. read all of the resulting share files out of the no-network storage servers
|
||||||
|
3. for each of
|
||||||
|
|
||||||
|
a. each byte of the share file version field
|
||||||
|
b. each byte of the immutable share version field
|
||||||
|
c. each byte of the immutable share data offset field
|
||||||
|
d. the most significant byte of the block_shares offset field
|
||||||
|
e. one of the bytes of one of the merkle trees
|
||||||
|
f. one of the bytes of the share hashes list
|
||||||
|
|
||||||
|
i. flip the least significant bit in all of the the share files
|
||||||
|
ii. perform the download/check/restore process
|
||||||
|
|
||||||
|
4. add 2 ** 24 to the share file version number
|
||||||
|
5. perform the download/check/restore process
|
||||||
|
|
||||||
|
6. add 2 ** 24 to the share version number
|
||||||
|
7. perform the download/check/restore process
|
||||||
|
|
||||||
|
The download/check/restore process is:
|
||||||
|
|
||||||
|
1. attempt to download the data
|
||||||
|
2. assert that the recovered plaintext is correct
|
||||||
|
3. assert that only the "correct" share numbers were used to reconstruct the plaintext
|
||||||
|
4. restore all of the share files to their pristine condition
|
||||||
|
"""
|
||||||
# Setting catalog_detection=True performs an exhaustive test of the
|
# Setting catalog_detection=True performs an exhaustive test of the
|
||||||
# Downloader's response to corruption in the lsb of each byte of the
|
# Downloader's response to corruption in the lsb of each byte of the
|
||||||
# 2070-byte share, with two goals: make sure we tolerate all forms of
|
# 2070-byte share, with two goals: make sure we tolerate all forms of
|
||||||
@ -1145,8 +1190,18 @@ class Corruption(_Base, unittest.TestCase):
|
|||||||
return d
|
return d
|
||||||
|
|
||||||
def _corrupt_flip_all(self, ign, imm_uri, which):
|
def _corrupt_flip_all(self, ign, imm_uri, which):
|
||||||
|
# type: (Any, bytes, int) -> None
|
||||||
|
"""
|
||||||
|
Flip the least significant bit at a given byte position in all share files
|
||||||
|
for the given capability.
|
||||||
|
"""
|
||||||
def _corruptor(s, debug=False):
|
def _corruptor(s, debug=False):
|
||||||
return s[:which] + bchr(ord(s[which:which+1])^0x01) + s[which+1:]
|
# type: (bytes, bool) -> bytes
|
||||||
|
before_corruption = s[:which]
|
||||||
|
after_corruption = s[which+1:]
|
||||||
|
original_byte = s[which:which+1]
|
||||||
|
corrupt_byte = bchr(ord(original_byte) ^ 0x01)
|
||||||
|
return b"".join([before_corruption, corrupt_byte, after_corruption])
|
||||||
self.corrupt_all_shares(imm_uri, _corruptor)
|
self.corrupt_all_shares(imm_uri, _corruptor)
|
||||||
|
|
||||||
class DownloadV2(_Base, unittest.TestCase):
|
class DownloadV2(_Base, unittest.TestCase):
|
||||||
|
7
tox.ini
7
tox.ini
@ -217,13 +217,8 @@ commands =
|
|||||||
# your web browser.
|
# your web browser.
|
||||||
|
|
||||||
[testenv:docs]
|
[testenv:docs]
|
||||||
# we pin docutils because of https://sourceforge.net/p/docutils/bugs/301/
|
|
||||||
# which asserts when it reads links to .svg files (e.g. about.rst)
|
|
||||||
deps =
|
deps =
|
||||||
sphinx
|
-r docs/requirements.txt
|
||||||
docutils==0.12
|
|
||||||
recommonmark
|
|
||||||
sphinx_rtd_theme
|
|
||||||
# normal install is not needed for docs, and slows things down
|
# normal install is not needed for docs, and slows things down
|
||||||
skip_install = True
|
skip_install = True
|
||||||
commands =
|
commands =
|
||||||
|
Loading…
Reference in New Issue
Block a user