More accurate names.

This commit is contained in:
Itamar Turner-Trauring 2023-02-07 09:03:45 -05:00
parent 921a2083dc
commit f4255cdaa3
9 changed files with 20 additions and 20 deletions

View File

@ -46,7 +46,7 @@ from allmydata.util.configutil import (
write_config,
)
from allmydata import client
from allmydata.interfaces import DEFAULT_MAX_SEGMENT_SIZE
from allmydata.interfaces import DEFAULT_IMMUTABLE_MAX_SEGMENT_SIZE
import pytest_twisted
@ -774,7 +774,7 @@ async def reconfigure(reactor, request, node: TahoeProcess,
config.write_private_config("convergence", base32.b2a(convergence))
if max_segment_size is not None:
cur_segment_size = int(config.get_config("client", "shares._max_immutable_segment_size_for_testing", DEFAULT_MAX_SEGMENT_SIZE))
cur_segment_size = int(config.get_config("client", "shares._max_immutable_segment_size_for_testing", DEFAULT_IMMUTABLE_MAX_SEGMENT_SIZE))
if cur_segment_size != max_segment_size:
changed = True
config.set_config(

View File

@ -5,7 +5,7 @@ from __future__ import print_function
import sys, math
from allmydata import uri, storage
from allmydata.immutable import upload
from allmydata.interfaces import DEFAULT_MAX_SEGMENT_SIZE
from allmydata.interfaces import DEFAULT_IMMUTABLE_MAX_SEGMENT_SIZE
from allmydata.util import mathutil
def roundup(size, blocksize=4096):
@ -26,7 +26,7 @@ class BigFakeString(object):
def tell(self):
return self.fp
def calc(filesize, params=(3,7,10), segsize=DEFAULT_MAX_SEGMENT_SIZE):
def calc(filesize, params=(3,7,10), segsize=DEFAULT_IMMUTABLE_MAX_SEGMENT_SIZE):
num_shares = params[2]
if filesize <= upload.Uploader.URI_LIT_SIZE_THRESHOLD:
urisize = len(uri.LiteralFileURI("A"*filesize).to_string())

View File

@ -50,7 +50,7 @@ from allmydata.interfaces import (
IStatsProducer,
SDMF_VERSION,
MDMF_VERSION,
DEFAULT_MAX_SEGMENT_SIZE,
DEFAULT_IMMUTABLE_MAX_SEGMENT_SIZE,
IFoolscapStoragePlugin,
IAnnounceableStorageServer,
)
@ -607,7 +607,7 @@ class _Client(node.Node, pollmixin.PollMixin):
DEFAULT_ENCODING_PARAMETERS = {"k": 3,
"happy": 7,
"n": 10,
"max_segment_size": DEFAULT_MAX_SEGMENT_SIZE,
"max_segment_size": DEFAULT_IMMUTABLE_MAX_SEGMENT_SIZE,
}
def __init__(self, config, main_tub, i2p_provider, tor_provider, introducer_clients,

View File

@ -19,7 +19,7 @@ from foolscap.api import eventually
from allmydata import uri
from allmydata.codec import CRSDecoder
from allmydata.util import base32, log, hashutil, mathutil, observer
from allmydata.interfaces import DEFAULT_MAX_SEGMENT_SIZE
from allmydata.interfaces import DEFAULT_IMMUTABLE_MAX_SEGMENT_SIZE
from allmydata.hashtree import IncompleteHashTree, BadHashError, \
NotEnoughHashesError
@ -49,7 +49,7 @@ class DownloadNode(object):
"""Internal class which manages downloads and holds state. External
callers use CiphertextFileNode instead."""
default_max_segment_size = DEFAULT_MAX_SEGMENT_SIZE
default_max_segment_size = DEFAULT_IMMUTABLE_MAX_SEGMENT_SIZE
# Share._node points to me
def __init__(self, verifycap, storage_broker, secret_holder,

View File

@ -48,7 +48,7 @@ from allmydata.util.rrefutil import add_version_to_remote_reference
from allmydata.interfaces import IUploadable, IUploader, IUploadResults, \
IEncryptedUploadable, RIEncryptedUploadable, IUploadStatus, \
NoServersError, InsufficientVersionError, UploadUnhappinessError, \
DEFAULT_MAX_SEGMENT_SIZE, IPeerSelector
DEFAULT_IMMUTABLE_MAX_SEGMENT_SIZE, IPeerSelector
from allmydata.immutable import layout
from io import BytesIO
@ -1692,7 +1692,7 @@ class AssistedUploader(object):
class BaseUploadable(object):
# this is overridden by max_segment_size
default_max_segment_size = DEFAULT_MAX_SEGMENT_SIZE
default_max_segment_size = DEFAULT_IMMUTABLE_MAX_SEGMENT_SIZE
default_params_set = False
max_segment_size = None

View File

@ -41,7 +41,7 @@ URI = StringConstraint(300) # kind of arbitrary
MAX_BUCKETS = 256 # per peer -- zfec offers at most 256 shares per file
DEFAULT_MAX_SEGMENT_SIZE = 1024*1024
DEFAULT_IMMUTABLE_MAX_SEGMENT_SIZE = 1024*1024
ShareData = StringConstraint(None)
URIExtensionData = StringConstraint(1000)

View File

@ -36,7 +36,7 @@ from allmydata.mutable.layout import get_version_from_checkstring,\
SDMFSlotWriteProxy
KiB = 1024
DEFAULT_MAX_SEGMENT_SIZE = 128 * KiB
DEFAULT_MUTABLE_MAX_SEGMENT_SIZE = 128 * KiB
PUSHING_BLOCKS_STATE = 0
PUSHING_EVERYTHING_ELSE_STATE = 1
DONE_STATE = 2
@ -367,7 +367,7 @@ class Publish(object):
self.data = newdata
self.datalength = newdata.get_size()
#if self.datalength >= DEFAULT_MAX_SEGMENT_SIZE:
#if self.datalength >= DEFAULT_MUTABLE_MAX_SEGMENT_SIZE:
# self._version = MDMF_VERSION
#else:
# self._version = SDMF_VERSION
@ -551,7 +551,7 @@ class Publish(object):
def setup_encoding_parameters(self, offset=0):
if self._version == MDMF_VERSION:
segment_size = DEFAULT_MAX_SEGMENT_SIZE # 128 KiB by default
segment_size = DEFAULT_MUTABLE_MAX_SEGMENT_SIZE # 128 KiB by default
else:
segment_size = self.datalength # SDMF is only one segment
# this must be a multiple of self.required_shares

View File

@ -20,7 +20,7 @@ from testtools.matchers import (
from twisted.internet import defer
from allmydata.interfaces import MDMF_VERSION
from allmydata.mutable.filenode import MutableFileNode
from allmydata.mutable.publish import MutableData, DEFAULT_MAX_SEGMENT_SIZE
from allmydata.mutable.publish import MutableData, DEFAULT_MUTABLE_MAX_SEGMENT_SIZE
from ..no_network import GridTestMixin
from .. import common_util as testutil
@ -180,7 +180,7 @@ class Update(GridTestMixin, AsyncTestCase, testutil.ShouldFailMixin):
# long -- this is 7 segments in the default segment size. So we
# need to add 2 segments worth of data to push it over a
# power-of-two boundary.
segment = b"a" * DEFAULT_MAX_SEGMENT_SIZE
segment = b"a" * DEFAULT_MUTABLE_MAX_SEGMENT_SIZE
new_data = self.data + (segment * 2)
d0 = self.do_upload_mdmf()
def _run(ign):
@ -232,9 +232,9 @@ class Update(GridTestMixin, AsyncTestCase, testutil.ShouldFailMixin):
return d0
def test_multiple_segment_replace(self):
replace_offset = 2 * DEFAULT_MAX_SEGMENT_SIZE
replace_offset = 2 * DEFAULT_MUTABLE_MAX_SEGMENT_SIZE
new_data = self.data[:replace_offset]
new_segment = b"a" * DEFAULT_MAX_SEGMENT_SIZE
new_segment = b"a" * DEFAULT_MUTABLE_MAX_SEGMENT_SIZE
new_data += 2 * new_segment
new_data += b"replaced"
rest_offset = len(new_data)

View File

@ -37,7 +37,7 @@ from allmydata.util.consumer import MemoryConsumer, download_to_data
from allmydata.util.deferredutil import async_to_deferred
from allmydata.interfaces import IDirectoryNode, IFileNode, \
NoSuchChildError, NoSharesError, SDMF_VERSION, MDMF_VERSION, \
DEFAULT_MAX_SEGMENT_SIZE
DEFAULT_IMMUTABLE_MAX_SEGMENT_SIZE
from allmydata.monitor import Monitor
from allmydata.mutable.common import NotWriteableError
from allmydata.mutable import layout as mutable_layout
@ -1846,7 +1846,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
uploadable.max_segment_size = upload_segment_size
results = await uploader.upload(uploadable)
assert DownloadNode.default_max_segment_size == DEFAULT_MAX_SEGMENT_SIZE
assert DownloadNode.default_max_segment_size == DEFAULT_IMMUTABLE_MAX_SEGMENT_SIZE
self.patch(DownloadNode, "default_max_segment_size", download_segment_size)
uri = results.get_uri()
node = self.clients[1].create_node_from_uri(uri)