2006-12-01 09:54:28 +00:00
|
|
|
|
|
|
|
from twisted.trial import unittest
|
2007-03-30 21:54:33 +00:00
|
|
|
from twisted.python.failure import Failure
|
2007-07-13 22:09:01 +00:00
|
|
|
from twisted.internet import defer
|
2006-12-02 02:17:26 +00:00
|
|
|
from cStringIO import StringIO
|
2006-12-01 09:54:28 +00:00
|
|
|
|
2007-07-13 23:38:25 +00:00
|
|
|
from allmydata import upload, encode
|
2007-07-12 20:22:36 +00:00
|
|
|
from allmydata.uri import unpack_uri, unpack_lit
|
2007-07-13 22:09:01 +00:00
|
|
|
from allmydata.util.assertutil import precondition
|
|
|
|
from foolscap import eventual
|
2006-12-01 09:54:28 +00:00
|
|
|
|
2007-07-13 22:09:01 +00:00
|
|
|
class FakePeer:
|
|
|
|
def __init__(self, mode="good"):
|
|
|
|
self.ss = FakeStorageServer(mode)
|
|
|
|
|
|
|
|
def callRemote(self, methname, *args, **kwargs):
|
|
|
|
def _call():
|
|
|
|
meth = getattr(self, methname)
|
|
|
|
return meth(*args, **kwargs)
|
|
|
|
return defer.maybeDeferred(_call)
|
|
|
|
|
|
|
|
def get_service(self, sname):
|
|
|
|
assert sname == "storageserver"
|
|
|
|
return self.ss
|
|
|
|
|
|
|
|
class FakeStorageServer:
|
|
|
|
def __init__(self, mode):
|
|
|
|
self.mode = mode
|
|
|
|
def callRemote(self, methname, *args, **kwargs):
|
|
|
|
def _call():
|
|
|
|
meth = getattr(self, methname)
|
|
|
|
return meth(*args, **kwargs)
|
|
|
|
d = eventual.fireEventually()
|
|
|
|
d.addCallback(lambda res: _call())
|
|
|
|
return d
|
|
|
|
|
|
|
|
def allocate_buckets(self, crypttext_hash, sharenums,
|
2007-07-13 23:38:25 +00:00
|
|
|
share_size, canary):
|
2007-07-13 22:09:01 +00:00
|
|
|
#print "FakeStorageServer.allocate_buckets(num=%d, size=%d)" % (len(sharenums), share_size)
|
|
|
|
if self.mode == "full":
|
|
|
|
return (set(), {},)
|
|
|
|
elif self.mode == "already got them":
|
|
|
|
return (set(sharenums), {},)
|
|
|
|
else:
|
|
|
|
return (set(),
|
|
|
|
dict([( shnum, FakeBucketWriter(share_size) )
|
|
|
|
for shnum in sharenums]),
|
|
|
|
)
|
|
|
|
|
|
|
|
class FakeBucketWriter:
|
|
|
|
# a diagnostic version of storageserver.BucketWriter
|
|
|
|
def __init__(self, size):
|
|
|
|
self.data = StringIO()
|
|
|
|
self.closed = False
|
|
|
|
self._size = size
|
|
|
|
|
|
|
|
def callRemote(self, methname, *args, **kwargs):
|
|
|
|
def _call():
|
|
|
|
meth = getattr(self, "remote_" + methname)
|
|
|
|
return meth(*args, **kwargs)
|
|
|
|
d = eventual.fireEventually()
|
|
|
|
d.addCallback(lambda res: _call())
|
|
|
|
return d
|
|
|
|
|
|
|
|
def remote_write(self, offset, data):
|
|
|
|
precondition(not self.closed)
|
|
|
|
precondition(offset >= 0)
|
|
|
|
precondition(offset+len(data) <= self._size,
|
|
|
|
"offset=%d + data=%d > size=%d" %
|
|
|
|
(offset, len(data), self._size))
|
|
|
|
self.data.seek(offset)
|
|
|
|
self.data.write(data)
|
|
|
|
|
|
|
|
def remote_close(self):
|
|
|
|
precondition(not self.closed)
|
|
|
|
self.closed = True
|
2007-03-30 21:54:33 +00:00
|
|
|
|
2006-12-01 09:54:28 +00:00
|
|
|
class FakeClient:
|
2007-03-30 21:54:33 +00:00
|
|
|
def __init__(self, mode="good"):
|
|
|
|
self.mode = mode
|
2007-06-10 03:46:04 +00:00
|
|
|
def get_permuted_peers(self, storage_index):
|
|
|
|
return [ ("%20d"%fakeid, "%20d"%fakeid, FakePeer(self.mode),)
|
|
|
|
for fakeid in range(50) ]
|
2007-07-12 22:33:30 +00:00
|
|
|
def get_encoding_parameters(self):
|
|
|
|
return None
|
2007-01-16 04:22:22 +00:00
|
|
|
|
2007-07-12 20:22:36 +00:00
|
|
|
DATA = """
|
|
|
|
Once upon a time, there was a beautiful princess named Buttercup. She lived
|
|
|
|
in a magical land where every file was stored securely among millions of
|
|
|
|
machines, and nobody ever worried about their data being lost ever again.
|
|
|
|
The End.
|
|
|
|
"""
|
|
|
|
assert len(DATA) > upload.Uploader.URI_LIT_SIZE_THRESHOLD
|
|
|
|
|
|
|
|
SIZE_ZERO = 0
|
|
|
|
SIZE_SMALL = 16
|
|
|
|
SIZE_LARGE = len(DATA)
|
|
|
|
|
2007-03-30 21:54:33 +00:00
|
|
|
class GoodServer(unittest.TestCase):
|
2007-01-16 04:22:22 +00:00
|
|
|
def setUp(self):
|
2007-03-30 21:54:33 +00:00
|
|
|
self.node = FakeClient(mode="good")
|
2007-03-30 17:52:19 +00:00
|
|
|
self.u = upload.Uploader()
|
|
|
|
self.u.running = True
|
|
|
|
self.u.parent = self.node
|
2007-01-16 04:22:22 +00:00
|
|
|
|
2007-07-12 20:22:36 +00:00
|
|
|
def _check_small(self, uri, size):
|
|
|
|
self.failUnless(isinstance(uri, str))
|
|
|
|
self.failUnless(uri.startswith("URI:LIT:"))
|
|
|
|
d = unpack_lit(uri)
|
|
|
|
self.failUnlessEqual(len(d), size)
|
|
|
|
|
|
|
|
def _check_large(self, uri, size):
|
2007-01-16 04:22:22 +00:00
|
|
|
self.failUnless(isinstance(uri, str))
|
|
|
|
self.failUnless(uri.startswith("URI:"))
|
2007-05-23 18:18:49 +00:00
|
|
|
d = unpack_uri(uri)
|
2007-06-02 01:48:01 +00:00
|
|
|
self.failUnless(isinstance(d['storage_index'], str))
|
2007-06-08 04:47:21 +00:00
|
|
|
self.failUnlessEqual(len(d['storage_index']), 32)
|
2007-05-23 18:18:49 +00:00
|
|
|
self.failUnless(isinstance(d['key'], str))
|
|
|
|
self.failUnlessEqual(len(d['key']), 16)
|
2007-07-12 20:22:36 +00:00
|
|
|
self.failUnlessEqual(d['size'], size)
|
|
|
|
|
|
|
|
def get_data(self, size):
|
|
|
|
return DATA[:size]
|
|
|
|
|
|
|
|
def test_data_zero(self):
|
|
|
|
data = self.get_data(SIZE_ZERO)
|
|
|
|
d = self.u.upload_data(data)
|
|
|
|
d.addCallback(self._check_small, SIZE_ZERO)
|
|
|
|
return d
|
2007-03-30 23:50:50 +00:00
|
|
|
|
2007-07-12 20:22:36 +00:00
|
|
|
def test_data_small(self):
|
|
|
|
data = self.get_data(SIZE_SMALL)
|
2007-01-16 04:22:22 +00:00
|
|
|
d = self.u.upload_data(data)
|
2007-07-12 20:22:36 +00:00
|
|
|
d.addCallback(self._check_small, SIZE_SMALL)
|
2007-01-16 04:22:22 +00:00
|
|
|
return d
|
|
|
|
|
2007-07-12 20:22:36 +00:00
|
|
|
def test_data_large(self):
|
|
|
|
data = self.get_data(SIZE_LARGE)
|
|
|
|
d = self.u.upload_data(data)
|
|
|
|
d.addCallback(self._check_large, SIZE_LARGE)
|
|
|
|
return d
|
|
|
|
|
|
|
|
def test_filehandle_zero(self):
|
|
|
|
data = self.get_data(SIZE_ZERO)
|
2007-01-16 04:22:22 +00:00
|
|
|
d = self.u.upload_filehandle(StringIO(data))
|
2007-07-12 20:22:36 +00:00
|
|
|
d.addCallback(self._check_small, SIZE_ZERO)
|
|
|
|
return d
|
|
|
|
|
|
|
|
def test_filehandle_small(self):
|
|
|
|
data = self.get_data(SIZE_SMALL)
|
|
|
|
d = self.u.upload_filehandle(StringIO(data))
|
|
|
|
d.addCallback(self._check_small, SIZE_SMALL)
|
|
|
|
return d
|
|
|
|
|
|
|
|
def test_filehandle_large(self):
|
|
|
|
data = self.get_data(SIZE_LARGE)
|
|
|
|
d = self.u.upload_filehandle(StringIO(data))
|
|
|
|
d.addCallback(self._check_large, SIZE_LARGE)
|
|
|
|
return d
|
|
|
|
|
|
|
|
def test_filename_zero(self):
|
|
|
|
fn = "Uploader-test_filename_zero.data"
|
|
|
|
f = open(fn, "wb")
|
|
|
|
data = self.get_data(SIZE_ZERO)
|
|
|
|
f.write(data)
|
|
|
|
f.close()
|
|
|
|
d = self.u.upload_filename(fn)
|
|
|
|
d.addCallback(self._check_small, SIZE_ZERO)
|
|
|
|
return d
|
|
|
|
|
|
|
|
def test_filename_small(self):
|
|
|
|
fn = "Uploader-test_filename_small.data"
|
|
|
|
f = open(fn, "wb")
|
|
|
|
data = self.get_data(SIZE_SMALL)
|
|
|
|
f.write(data)
|
|
|
|
f.close()
|
|
|
|
d = self.u.upload_filename(fn)
|
|
|
|
d.addCallback(self._check_small, SIZE_SMALL)
|
2007-01-16 04:22:22 +00:00
|
|
|
return d
|
|
|
|
|
2007-07-12 20:22:36 +00:00
|
|
|
def test_filename_large(self):
|
|
|
|
fn = "Uploader-test_filename_large.data"
|
2007-04-04 23:12:30 +00:00
|
|
|
f = open(fn, "wb")
|
2007-07-12 20:22:36 +00:00
|
|
|
data = self.get_data(SIZE_LARGE)
|
2007-01-16 04:22:22 +00:00
|
|
|
f.write(data)
|
|
|
|
f.close()
|
|
|
|
d = self.u.upload_filename(fn)
|
2007-07-12 20:22:36 +00:00
|
|
|
d.addCallback(self._check_large, SIZE_LARGE)
|
2007-01-16 04:22:22 +00:00
|
|
|
return d
|
2007-03-30 21:54:33 +00:00
|
|
|
|
|
|
|
class FullServer(unittest.TestCase):
|
|
|
|
def setUp(self):
|
|
|
|
self.node = FakeClient(mode="full")
|
|
|
|
self.u = upload.Uploader()
|
|
|
|
self.u.running = True
|
|
|
|
self.u.parent = self.node
|
|
|
|
|
|
|
|
def _should_fail(self, f):
|
2007-06-08 05:20:55 +00:00
|
|
|
self.failUnless(isinstance(f, Failure) and f.check(encode.NotEnoughPeersError))
|
2007-03-30 21:54:33 +00:00
|
|
|
|
2007-07-12 20:22:36 +00:00
|
|
|
def test_data_large(self):
|
|
|
|
data = DATA
|
2007-03-30 21:54:33 +00:00
|
|
|
d = self.u.upload_data(data)
|
|
|
|
d.addBoth(self._should_fail)
|
|
|
|
return d
|
|
|
|
|
2007-04-24 00:30:40 +00:00
|
|
|
|
|
|
|
# TODO:
|
|
|
|
# upload with exactly 75 peers (shares_of_happiness)
|
|
|
|
# have a download fail
|
|
|
|
# cancel a download (need to implement more cancel stuff)
|