mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2025-01-21 03:55:27 +00:00
Change direct accesses to an_uri.storage_index to calls to .get_storage_index() (fixes #948)
This commit is contained in:
parent
6a7feea455
commit
973f0afdd3
@ -346,7 +346,7 @@ class DirectoryNode:
|
||||
return self._uri
|
||||
|
||||
def get_storage_index(self):
|
||||
return self._uri._filenode_uri.storage_index
|
||||
return self._uri.get_storage_index()
|
||||
|
||||
def check(self, monitor, verify=False, add_lease=False):
|
||||
"""Perform a file check. See IChecker.check for details."""
|
||||
|
@ -39,7 +39,7 @@ class Checker(log.PrefixingLogMixin):
|
||||
for (serverid, serverrref) in servers:
|
||||
assert precondition(isinstance(serverid, str))
|
||||
|
||||
prefix = "%s" % base32.b2a_l(verifycap.storage_index[:8], 60)
|
||||
prefix = "%s" % base32.b2a_l(verifycap.get_storage_index()[:8], 60)
|
||||
log.PrefixingLogMixin.__init__(self, facility="tahoe.immutable.checker", prefix=prefix)
|
||||
|
||||
self._verifycap = verifycap
|
||||
@ -50,10 +50,10 @@ class Checker(log.PrefixingLogMixin):
|
||||
self._add_lease = add_lease
|
||||
|
||||
frs = file_renewal_secret_hash(secret_holder.get_renewal_secret(),
|
||||
self._verifycap.storage_index)
|
||||
self._verifycap.get_storage_index())
|
||||
self.file_renewal_secret = frs
|
||||
fcs = file_cancel_secret_hash(secret_holder.get_cancel_secret(),
|
||||
self._verifycap.storage_index)
|
||||
self._verifycap.get_storage_index())
|
||||
self.file_cancel_secret = fcs
|
||||
|
||||
def _get_renewal_secret(self, peerid):
|
||||
@ -145,7 +145,7 @@ class Checker(log.PrefixingLogMixin):
|
||||
results."""
|
||||
|
||||
vcap = self._verifycap
|
||||
b = layout.ReadBucketProxy(bucket, serverid, vcap.storage_index)
|
||||
b = layout.ReadBucketProxy(bucket, serverid, vcap.get_storage_index())
|
||||
veup = download.ValidatedExtendedURIProxy(b, vcap)
|
||||
d = veup.start()
|
||||
|
||||
@ -249,7 +249,7 @@ class Checker(log.PrefixingLogMixin):
|
||||
then disconnected and ceased responding, or returned a failure, it is
|
||||
still marked with the True flag for 'success'.
|
||||
"""
|
||||
d = self._get_buckets(ss, self._verifycap.storage_index, serverid)
|
||||
d = self._get_buckets(ss, self._verifycap.get_storage_index(), serverid)
|
||||
|
||||
def _got_buckets(result):
|
||||
bucketdict, serverid, success = result
|
||||
@ -296,12 +296,12 @@ class Checker(log.PrefixingLogMixin):
|
||||
def _curry_empty_corrupted(res):
|
||||
buckets, serverid, responded = res
|
||||
return (set(buckets), serverid, set(), set(), responded)
|
||||
d = self._get_buckets(ss, self._verifycap.storage_index, serverid)
|
||||
d = self._get_buckets(ss, self._verifycap.get_storage_index(), serverid)
|
||||
d.addCallback(_curry_empty_corrupted)
|
||||
return d
|
||||
|
||||
def _format_results(self, results):
|
||||
cr = CheckResults(self._verifycap, self._verifycap.storage_index)
|
||||
cr = CheckResults(self._verifycap, self._verifycap.get_storage_index())
|
||||
d = {}
|
||||
d['count-shares-needed'] = self._verifycap.needed_shares
|
||||
d['count-shares-expected'] = self._verifycap.total_shares
|
||||
@ -316,9 +316,9 @@ class Checker(log.PrefixingLogMixin):
|
||||
for sharenum in theseverifiedshares:
|
||||
verifiedshares.setdefault(sharenum, set()).add(thisserverid)
|
||||
for sharenum in thesecorruptshares:
|
||||
corruptsharelocators.append((thisserverid, self._verifycap.storage_index, sharenum))
|
||||
corruptsharelocators.append((thisserverid, self._verifycap.get_storage_index(), sharenum))
|
||||
for sharenum in theseincompatibleshares:
|
||||
incompatiblesharelocators.append((thisserverid, self._verifycap.storage_index, sharenum))
|
||||
incompatiblesharelocators.append((thisserverid, self._verifycap.get_storage_index(), sharenum))
|
||||
|
||||
d['count-shares-good'] = len(verifiedshares)
|
||||
d['count-good-share-hosts'] = len([s for s in servers.keys() if servers[s]])
|
||||
|
@ -751,14 +751,14 @@ class CiphertextDownloader(log.PrefixingLogMixin):
|
||||
precondition(IVerifierURI.providedBy(v), v)
|
||||
precondition(IDownloadTarget.providedBy(target), target)
|
||||
|
||||
prefix=base32.b2a_l(v.storage_index[:8], 60)
|
||||
log.PrefixingLogMixin.__init__(self, facility="tahoe.immutable.download", prefix=prefix)
|
||||
self._storage_broker = storage_broker
|
||||
|
||||
self._verifycap = v
|
||||
self._storage_index = v.storage_index
|
||||
self._storage_index = v.get_storage_index()
|
||||
self._uri_extension_hash = v.uri_extension_hash
|
||||
|
||||
prefix=base32.b2a_l(self._storage_index[:8], 60)
|
||||
log.PrefixingLogMixin.__init__(self, facility="tahoe.immutable.download", prefix=prefix)
|
||||
|
||||
self._started = time.time()
|
||||
self._status = s = DownloadStatus()
|
||||
s.set_status("Starting")
|
||||
|
@ -225,7 +225,7 @@ class ImmutableFileNode(_ImmutableFileNodeBase, log.PrefixingLogMixin):
|
||||
return self.u.to_string()
|
||||
|
||||
def get_storage_index(self):
|
||||
return self.u.storage_index
|
||||
return self.u.get_storage_index()
|
||||
|
||||
def check_and_repair(self, monitor, verify=False, add_lease=False):
|
||||
verifycap = self.get_verify_cap()
|
||||
@ -238,7 +238,7 @@ class ImmutableFileNode(_ImmutableFileNodeBase, log.PrefixingLogMixin):
|
||||
monitor=monitor)
|
||||
d = c.start()
|
||||
def _maybe_repair(cr):
|
||||
crr = CheckAndRepairResults(self.u.storage_index)
|
||||
crr = CheckAndRepairResults(self.u.get_storage_index())
|
||||
crr.pre_repair_results = cr
|
||||
if cr.is_healthy():
|
||||
crr.post_repair_results = cr
|
||||
|
@ -46,7 +46,7 @@ class Repairer(log.PrefixingLogMixin):
|
||||
def __init__(self, storage_broker, secret_holder, verifycap, monitor):
|
||||
assert precondition(isinstance(verifycap, CHKFileVerifierURI))
|
||||
|
||||
logprefix = si_b2a(verifycap.storage_index)[:5]
|
||||
logprefix = si_b2a(verifycap.get_storage_index())[:5]
|
||||
log.PrefixingLogMixin.__init__(self, "allmydata.immutable.repairer",
|
||||
prefix=logprefix)
|
||||
|
||||
|
@ -389,15 +389,15 @@ def dump_uri_instance(u, nodeid, secret, out, show_header=True):
|
||||
print >>out, " UEB hash:", base32.b2a(u.uri_extension_hash)
|
||||
print >>out, " size:", u.size
|
||||
print >>out, " k/N: %d/%d" % (u.needed_shares, u.total_shares)
|
||||
print >>out, " storage index:", si_b2a(u.storage_index)
|
||||
_dump_secrets(u.storage_index, secret, nodeid, out)
|
||||
print >>out, " storage index:", si_b2a(u.get_storage_index())
|
||||
_dump_secrets(u.get_storage_index(), secret, nodeid, out)
|
||||
elif isinstance(u, uri.CHKFileVerifierURI):
|
||||
if show_header:
|
||||
print >>out, "CHK Verifier URI:"
|
||||
print >>out, " UEB hash:", base32.b2a(u.uri_extension_hash)
|
||||
print >>out, " size:", u.size
|
||||
print >>out, " k/N: %d/%d" % (u.needed_shares, u.total_shares)
|
||||
print >>out, " storage index:", si_b2a(u.storage_index)
|
||||
print >>out, " storage index:", si_b2a(u.get_storage_index())
|
||||
|
||||
elif isinstance(u, uri.LiteralFileURI):
|
||||
if show_header:
|
||||
@ -409,25 +409,25 @@ def dump_uri_instance(u, nodeid, secret, out, show_header=True):
|
||||
print >>out, "SSK Writeable URI:"
|
||||
print >>out, " writekey:", base32.b2a(u.writekey)
|
||||
print >>out, " readkey:", base32.b2a(u.readkey)
|
||||
print >>out, " storage index:", si_b2a(u.storage_index)
|
||||
print >>out, " storage index:", si_b2a(u.get_storage_index())
|
||||
print >>out, " fingerprint:", base32.b2a(u.fingerprint)
|
||||
print >>out
|
||||
if nodeid:
|
||||
we = hashutil.ssk_write_enabler_hash(u.writekey, nodeid)
|
||||
print >>out, " write_enabler:", base32.b2a(we)
|
||||
print >>out
|
||||
_dump_secrets(u.storage_index, secret, nodeid, out)
|
||||
_dump_secrets(u.get_storage_index(), secret, nodeid, out)
|
||||
|
||||
elif isinstance(u, uri.ReadonlySSKFileURI):
|
||||
if show_header:
|
||||
print >>out, "SSK Read-only URI:"
|
||||
print >>out, " readkey:", base32.b2a(u.readkey)
|
||||
print >>out, " storage index:", si_b2a(u.storage_index)
|
||||
print >>out, " storage index:", si_b2a(u.get_storage_index())
|
||||
print >>out, " fingerprint:", base32.b2a(u.fingerprint)
|
||||
elif isinstance(u, uri.SSKVerifierURI):
|
||||
if show_header:
|
||||
print >>out, "SSK Verifier URI:"
|
||||
print >>out, " storage index:", si_b2a(u.storage_index)
|
||||
print >>out, " storage index:", si_b2a(u.get_storage_index())
|
||||
print >>out, " fingerprint:", base32.b2a(u.fingerprint)
|
||||
|
||||
elif isinstance(u, uri.DirectoryURI):
|
||||
|
@ -45,9 +45,9 @@ class FakeCHKFileNode:
|
||||
bad_shares = {}
|
||||
|
||||
def __init__(self, filecap):
|
||||
precondition(isinstance(filecap, uri.CHKFileURI), filecap)
|
||||
precondition(isinstance(filecap, (uri.CHKFileURI, uri.LiteralFileURI)), filecap)
|
||||
self.my_uri = filecap
|
||||
self.storage_index = self.my_uri.storage_index
|
||||
self.storage_index = self.my_uri.get_storage_index()
|
||||
|
||||
def get_uri(self):
|
||||
return self.my_uri.to_string()
|
||||
@ -190,7 +190,7 @@ class FakeMutableFileNode:
|
||||
assert isinstance(filecap, (uri.WriteableSSKFileURI,
|
||||
uri.ReadonlySSKFileURI))
|
||||
self.my_uri = filecap
|
||||
self.storage_index = self.my_uri.storage_index
|
||||
self.storage_index = self.my_uri.get_storage_index()
|
||||
return self
|
||||
def get_cap(self):
|
||||
return self.my_uri
|
||||
@ -1002,7 +1002,7 @@ class ShareManglingMixin(SystemTestMixin):
|
||||
else:
|
||||
k = random.choice(ks)
|
||||
del shares[k]
|
||||
self.replace_shares(shares, storage_index=self.uri.storage_index)
|
||||
self.replace_shares(shares, storage_index=self.uri.get_storage_index())
|
||||
|
||||
return unused
|
||||
|
||||
@ -1012,7 +1012,7 @@ class ShareManglingMixin(SystemTestMixin):
|
||||
assert ks, (shares.keys(), sharenum)
|
||||
k = ks[0]
|
||||
shares[k] = corruptor_func(shares[k])
|
||||
self.replace_shares(shares, storage_index=self.uri.storage_index)
|
||||
self.replace_shares(shares, storage_index=self.uri.get_storage_index())
|
||||
return corruptor_func
|
||||
|
||||
def _corrupt_all_shares(self, unused, corruptor_func):
|
||||
|
@ -68,7 +68,7 @@ class WebResultsRendering(unittest.TestCase, WebRenderingMixin):
|
||||
serverid_1 = "\x00"*20
|
||||
serverid_f = "\xff"*20
|
||||
u = uri.CHKFileURI("\x00"*16, "\x00"*32, 3, 10, 1234)
|
||||
cr = check_results.CheckResults(u, u.storage_index)
|
||||
cr = check_results.CheckResults(u, u.get_storage_index())
|
||||
cr.set_healthy(True)
|
||||
cr.set_needs_rebalancing(False)
|
||||
cr.set_summary("groovy")
|
||||
@ -108,7 +108,7 @@ class WebResultsRendering(unittest.TestCase, WebRenderingMixin):
|
||||
cr.set_healthy(False)
|
||||
cr.set_recoverable(False)
|
||||
cr.set_summary("rather dead")
|
||||
data["list-corrupt-shares"] = [(serverid_1, u.storage_index, 2)]
|
||||
data["list-corrupt-shares"] = [(serverid_1, u.get_storage_index(), 2)]
|
||||
cr.set_data(data)
|
||||
html = self.render2(w)
|
||||
s = self.remove_tags(html)
|
||||
@ -167,7 +167,7 @@ class WebResultsRendering(unittest.TestCase, WebRenderingMixin):
|
||||
serverid_f = "\xff"*20
|
||||
u = uri.CHKFileURI("\x00"*16, "\x00"*32, 3, 10, 1234)
|
||||
|
||||
pre_cr = check_results.CheckResults(u, u.storage_index)
|
||||
pre_cr = check_results.CheckResults(u, u.get_storage_index())
|
||||
pre_cr.set_healthy(False)
|
||||
pre_cr.set_recoverable(True)
|
||||
pre_cr.set_needs_rebalancing(False)
|
||||
@ -185,7 +185,7 @@ class WebResultsRendering(unittest.TestCase, WebRenderingMixin):
|
||||
}
|
||||
pre_cr.set_data(data)
|
||||
|
||||
post_cr = check_results.CheckResults(u, u.storage_index)
|
||||
post_cr = check_results.CheckResults(u, u.get_storage_index())
|
||||
post_cr.set_healthy(True)
|
||||
post_cr.set_recoverable(True)
|
||||
post_cr.set_needs_rebalancing(False)
|
||||
@ -203,7 +203,7 @@ class WebResultsRendering(unittest.TestCase, WebRenderingMixin):
|
||||
}
|
||||
post_cr.set_data(data)
|
||||
|
||||
crr = check_results.CheckAndRepairResults(u.storage_index)
|
||||
crr = check_results.CheckAndRepairResults(u.get_storage_index())
|
||||
crr.pre_repair_results = pre_cr
|
||||
crr.post_repair_results = post_cr
|
||||
crr.repair_attempted = False
|
||||
|
@ -549,7 +549,7 @@ class Dirnode(GridTestMixin, unittest.TestCase,
|
||||
self.expected_verifycaps.add(u_v)
|
||||
si = n.get_storage_index()
|
||||
self.expected_storage_indexes.add(base32.b2a(si))
|
||||
expected_si = n._uri._filenode_uri.storage_index
|
||||
expected_si = n._uri.get_storage_index()
|
||||
self.failUnlessEqual(si, expected_si)
|
||||
|
||||
d = n.list()
|
||||
|
@ -46,7 +46,7 @@ class Node(unittest.TestCase):
|
||||
self.failUnlessEqual(fn1.get_write_uri(), None)
|
||||
self.failUnlessEqual(fn1.get_readonly_uri(), u.to_string())
|
||||
self.failUnlessEqual(fn1.get_size(), 1000)
|
||||
self.failUnlessEqual(fn1.get_storage_index(), u.storage_index)
|
||||
self.failUnlessEqual(fn1.get_storage_index(), u.get_storage_index())
|
||||
fn1.raise_error()
|
||||
fn2.raise_error()
|
||||
d = {}
|
||||
|
@ -21,7 +21,7 @@ class Test(common.ShareManglingMixin, unittest.TestCase):
|
||||
# The following process of deleting 8 of the shares and asserting that you can't
|
||||
# download it is more to test this test code than to test the Tahoe code...
|
||||
def _then_delete_8(unused=None):
|
||||
self.replace_shares(stash[0], storage_index=self.uri.storage_index)
|
||||
self.replace_shares(stash[0], storage_index=self.uri.get_storage_index())
|
||||
for i in range(8):
|
||||
self._delete_a_share()
|
||||
d.addCallback(_then_delete_8)
|
||||
|
@ -1923,7 +1923,7 @@ class Problems(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin):
|
||||
privkey_s = privkey.serialize()
|
||||
u = uri.WriteableSSKFileURI(ssk_writekey_hash(privkey_s),
|
||||
ssk_pubkey_fingerprint_hash(pubkey_s))
|
||||
self._storage_index = u.storage_index
|
||||
self._storage_index = u.get_storage_index()
|
||||
d.addCallback(_got_key)
|
||||
def _break_peer0(res):
|
||||
si = self._storage_index
|
||||
|
@ -657,7 +657,7 @@ class Repairer(GridTestMixin, unittest.TestCase, RepairTestMixin,
|
||||
return res
|
||||
d.addCallback(_stash_it)
|
||||
def _put_it_all_back(ignored):
|
||||
self.replace_shares(stash[0], storage_index=self.uri.storage_index)
|
||||
self.replace_shares(stash[0], storage_index=self.uri.get_storage_index())
|
||||
return ignored
|
||||
|
||||
def _repair_from_corruption(shnum, corruptor_func):
|
||||
|
@ -257,8 +257,8 @@ class GoodServer(unittest.TestCase, ShouldFailMixin):
|
||||
def _check_large(self, newuri, size):
|
||||
u = uri.from_string(newuri)
|
||||
self.failUnless(isinstance(u, uri.CHKFileURI))
|
||||
self.failUnless(isinstance(u.storage_index, str))
|
||||
self.failUnlessEqual(len(u.storage_index), 16)
|
||||
self.failUnless(isinstance(u.get_storage_index(), str))
|
||||
self.failUnlessEqual(len(u.get_storage_index()), 16)
|
||||
self.failUnless(isinstance(u.key, str))
|
||||
self.failUnlessEqual(len(u.key), 16)
|
||||
self.failUnlessEqual(u.size, size)
|
||||
@ -382,8 +382,8 @@ class ServerErrors(unittest.TestCase, ShouldFailMixin):
|
||||
def _check_large(self, newuri, size):
|
||||
u = uri.from_string(newuri)
|
||||
self.failUnless(isinstance(u, uri.CHKFileURI))
|
||||
self.failUnless(isinstance(u.storage_index, str))
|
||||
self.failUnlessEqual(len(u.storage_index), 16)
|
||||
self.failUnless(isinstance(u.get_storage_index(), str))
|
||||
self.failUnlessEqual(len(u.get_storage_index()), 16)
|
||||
self.failUnless(isinstance(u.key, str))
|
||||
self.failUnlessEqual(len(u.key), 16)
|
||||
self.failUnlessEqual(u.size, size)
|
||||
@ -470,8 +470,8 @@ class PeerSelection(unittest.TestCase):
|
||||
def _check_large(self, newuri, size):
|
||||
u = uri.from_string(newuri)
|
||||
self.failUnless(isinstance(u, uri.CHKFileURI))
|
||||
self.failUnless(isinstance(u.storage_index, str))
|
||||
self.failUnlessEqual(len(u.storage_index), 16)
|
||||
self.failUnless(isinstance(u.get_storage_index(), str))
|
||||
self.failUnlessEqual(len(u.get_storage_index()), 16)
|
||||
self.failUnless(isinstance(u.key, str))
|
||||
self.failUnlessEqual(len(u.key), 16)
|
||||
self.failUnlessEqual(u.size, size)
|
||||
|
@ -102,7 +102,7 @@ class CHKFile(unittest.TestCase):
|
||||
needed_shares=needed_shares,
|
||||
total_shares=total_shares,
|
||||
size=size)
|
||||
self.failUnlessEqual(u.storage_index, storage_index)
|
||||
self.failUnlessEqual(u.get_storage_index(), storage_index)
|
||||
self.failUnlessEqual(u.key, key)
|
||||
self.failUnlessEqual(u.uri_extension_hash, uri_extension_hash)
|
||||
self.failUnlessEqual(u.needed_shares, needed_shares)
|
||||
@ -122,7 +122,7 @@ class CHKFile(unittest.TestCase):
|
||||
self.failUnlessEqual(uri.CHKFileURI.init_from_human_encoding(he), u)
|
||||
|
||||
u2 = uri.from_string(u.to_string())
|
||||
self.failUnlessEqual(u2.storage_index, storage_index)
|
||||
self.failUnlessEqual(u2.get_storage_index(), storage_index)
|
||||
self.failUnlessEqual(u2.key, key)
|
||||
self.failUnlessEqual(u2.uri_extension_hash, uri_extension_hash)
|
||||
self.failUnlessEqual(u2.needed_shares, needed_shares)
|
||||
@ -319,10 +319,10 @@ class Mutable(unittest.TestCase):
|
||||
|
||||
u5 = u4.get_verify_cap()
|
||||
self.failUnless(IVerifierURI.providedBy(u5))
|
||||
self.failUnlessEqual(u5.storage_index, u.storage_index)
|
||||
self.failUnlessEqual(u5.get_storage_index(), u.get_storage_index())
|
||||
u7 = u.get_verify_cap()
|
||||
self.failUnless(IVerifierURI.providedBy(u7))
|
||||
self.failUnlessEqual(u7.storage_index, u.storage_index)
|
||||
self.failUnlessEqual(u7.get_storage_index(), u.get_storage_index())
|
||||
|
||||
he = u5.to_human_encoding()
|
||||
u5_h = uri.SSKVerifierURI.init_from_human_encoding(he)
|
||||
|
@ -1529,7 +1529,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, unittest.TestCase):
|
||||
self.failUnless(filecap.startswith("URI:SSK:"), filecap)
|
||||
self.filecap = filecap
|
||||
u = uri.WriteableSSKFileURI.init_from_string(filecap)
|
||||
self.failUnless(u.storage_index in FakeMutableFileNode.all_contents)
|
||||
self.failUnless(u.get_storage_index() in FakeMutableFileNode.all_contents)
|
||||
n = self.s.create_node_from_uri(filecap)
|
||||
return n.download_best_version()
|
||||
d.addCallback(_check)
|
||||
@ -2781,7 +2781,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, unittest.TestCase):
|
||||
self.failUnless(filecap.startswith("URI:SSK:"), filecap)
|
||||
self.filecap = filecap
|
||||
u = uri.WriteableSSKFileURI.init_from_string(filecap)
|
||||
self.failUnless(u.storage_index in FakeMutableFileNode.all_contents)
|
||||
self.failUnless(u.get_storage_index() in FakeMutableFileNode.all_contents)
|
||||
n = self.s.create_node_from_uri(filecap)
|
||||
return n.download_best_version()
|
||||
d.addCallback(_check1)
|
||||
|
@ -434,7 +434,7 @@ class _DirectoryBaseURI(_BaseURI):
|
||||
return self._filenode_uri.to_string().split(':')[2][:5]
|
||||
|
||||
def abbrev_si(self):
|
||||
return base32.b2a(self._filenode_uri.storage_index)[:5]
|
||||
return base32.b2a(self._filenode_uri.get_storage_index())[:5]
|
||||
|
||||
def is_mutable(self):
|
||||
return True
|
||||
|
Loading…
Reference in New Issue
Block a user