2008-01-17 08:18:10 +00:00
|
|
|
import os
|
2008-01-10 03:25:50 +00:00
|
|
|
from twisted.trial import unittest
|
|
|
|
from twisted.application import service
|
|
|
|
|
2009-05-22 00:38:23 +00:00
|
|
|
from foolscap.api import Tub, fireEventually, flushEventualQueue
|
2008-01-10 03:25:50 +00:00
|
|
|
from foolscap.logging import log
|
|
|
|
|
2009-02-18 21:46:55 +00:00
|
|
|
from allmydata.storage.server import si_b2a
|
2009-01-07 04:48:22 +00:00
|
|
|
from allmydata.immutable import offloaded, upload
|
|
|
|
from allmydata import uri
|
2008-02-13 14:38:08 +00:00
|
|
|
from allmydata.util import hashutil, fileutil, mathutil
|
2008-01-28 19:58:13 +00:00
|
|
|
from pycryptopp.cipher.aes import AES
|
2008-01-10 03:25:50 +00:00
|
|
|
|
2008-01-16 10:03:35 +00:00
|
|
|
MiB = 1024*1024
|
|
|
|
|
2008-02-07 00:51:11 +00:00
|
|
|
DATA = "I need help\n" * 1000
|
|
|
|
|
2008-01-11 11:53:37 +00:00
|
|
|
class CHKUploadHelper_fake(offloaded.CHKUploadHelper):
|
|
|
|
def start_encrypted(self, eu):
|
|
|
|
d = eu.get_size()
|
|
|
|
def _got_size(size):
|
2008-01-16 10:03:35 +00:00
|
|
|
d2 = eu.get_all_encoding_parameters()
|
|
|
|
def _got_parms(parms):
|
|
|
|
needed_shares, happy, total_shares, segsize = parms
|
2008-02-07 00:51:11 +00:00
|
|
|
ueb_data = {"needed_shares": needed_shares,
|
|
|
|
"total_shares": total_shares,
|
|
|
|
"segment_size": segsize,
|
|
|
|
"size": size,
|
|
|
|
}
|
|
|
|
self._results.uri_extension_data = ueb_data
|
2009-01-07 04:48:22 +00:00
|
|
|
self._results.verifycapstr = uri.CHKFileVerifierURI(self._storage_index, "x"*32,
|
|
|
|
needed_shares, total_shares,
|
|
|
|
size).to_string()
|
|
|
|
return self._results
|
2008-01-16 10:03:35 +00:00
|
|
|
d2.addCallback(_got_parms)
|
|
|
|
return d2
|
2008-01-11 11:53:37 +00:00
|
|
|
d.addCallback(_got_size)
|
|
|
|
return d
|
2008-01-10 03:25:50 +00:00
|
|
|
|
2008-01-11 11:53:37 +00:00
|
|
|
class CHKUploadHelper_already_uploaded(offloaded.CHKUploadHelper):
|
|
|
|
def start(self):
|
2008-02-06 08:52:25 +00:00
|
|
|
res = upload.UploadResults()
|
|
|
|
res.uri_extension_hash = hashutil.uri_extension_hash("")
|
2008-02-07 00:51:11 +00:00
|
|
|
|
|
|
|
# we're pretending that the file they're trying to upload was already
|
|
|
|
# present in the grid. We return some information about the file, so
|
|
|
|
# the client can decide if they like the way it looks. The parameters
|
|
|
|
# used here are chosen to match the defaults.
|
|
|
|
PARAMS = FakeClient.DEFAULT_ENCODING_PARAMETERS
|
|
|
|
ueb_data = {"needed_shares": PARAMS["k"],
|
|
|
|
"total_shares": PARAMS["n"],
|
|
|
|
"segment_size": min(PARAMS["max_segment_size"], len(DATA)),
|
|
|
|
"size": len(DATA),
|
|
|
|
}
|
|
|
|
res.uri_extension_data = ueb_data
|
2008-01-11 11:53:37 +00:00
|
|
|
return (res, None)
|
2008-01-10 03:25:50 +00:00
|
|
|
|
|
|
|
class FakeClient(service.MultiService):
|
2008-01-16 10:03:35 +00:00
|
|
|
DEFAULT_ENCODING_PARAMETERS = {"k":25,
|
|
|
|
"happy": 75,
|
|
|
|
"n": 100,
|
|
|
|
"max_segment_size": 1*MiB,
|
|
|
|
}
|
2008-03-27 22:55:32 +00:00
|
|
|
stats_provider = None
|
2008-01-15 04:16:58 +00:00
|
|
|
def log(self, *args, **kwargs):
|
|
|
|
return log.msg(*args, **kwargs)
|
2008-01-10 03:25:50 +00:00
|
|
|
def get_encoding_parameters(self):
|
2008-01-16 10:03:35 +00:00
|
|
|
return self.DEFAULT_ENCODING_PARAMETERS
|
2008-02-05 20:05:13 +00:00
|
|
|
def get_permuted_peers(self, service_name, storage_index):
|
2008-01-31 01:49:02 +00:00
|
|
|
return []
|
2008-01-10 03:25:50 +00:00
|
|
|
|
|
|
|
def flush_but_dont_ignore(res):
|
2009-05-22 00:38:23 +00:00
|
|
|
d = flushEventualQueue()
|
2008-01-10 03:25:50 +00:00
|
|
|
def _done(ignored):
|
|
|
|
return res
|
|
|
|
d.addCallback(_done)
|
|
|
|
return d
|
|
|
|
|
2008-11-22 03:07:27 +00:00
|
|
|
def wait_a_few_turns(ignored=None):
|
2009-05-22 00:38:23 +00:00
|
|
|
d = fireEventually()
|
|
|
|
d.addCallback(fireEventually)
|
|
|
|
d.addCallback(fireEventually)
|
|
|
|
d.addCallback(fireEventually)
|
|
|
|
d.addCallback(fireEventually)
|
|
|
|
d.addCallback(fireEventually)
|
2008-11-22 03:07:27 +00:00
|
|
|
return d
|
|
|
|
|
2008-03-24 16:46:06 +00:00
|
|
|
def upload_data(uploader, data, convergence):
|
|
|
|
u = upload.Data(data, convergence=convergence)
|
2008-01-31 02:03:19 +00:00
|
|
|
return uploader.upload(u)
|
|
|
|
|
2008-01-10 03:25:50 +00:00
|
|
|
class AssistedUpload(unittest.TestCase):
|
|
|
|
def setUp(self):
|
|
|
|
self.s = FakeClient()
|
|
|
|
self.s.startService()
|
|
|
|
|
|
|
|
self.tub = t = Tub()
|
|
|
|
t.setServiceParent(self.s)
|
|
|
|
self.s.tub = t
|
|
|
|
# we never actually use this for network traffic, so it can use a
|
|
|
|
# bogus host/port
|
|
|
|
t.setLocation("bogus:1234")
|
|
|
|
|
2008-01-17 08:18:10 +00:00
|
|
|
def setUpHelper(self, basedir):
|
|
|
|
fileutil.make_dirs(basedir)
|
|
|
|
self.helper = h = offloaded.Helper(basedir)
|
2008-01-11 11:53:37 +00:00
|
|
|
h.chk_upload_helper_class = CHKUploadHelper_fake
|
2008-01-10 03:25:50 +00:00
|
|
|
h.setServiceParent(self.s)
|
2008-01-17 08:18:10 +00:00
|
|
|
self.helper_furl = self.tub.registerReference(h)
|
2008-01-10 03:25:50 +00:00
|
|
|
|
|
|
|
def tearDown(self):
|
|
|
|
d = self.s.stopService()
|
2009-05-22 00:38:23 +00:00
|
|
|
d.addCallback(fireEventually)
|
2008-01-10 03:25:50 +00:00
|
|
|
d.addBoth(flush_but_dont_ignore)
|
|
|
|
return d
|
|
|
|
|
|
|
|
|
|
|
|
def test_one(self):
|
2008-01-17 08:18:10 +00:00
|
|
|
self.basedir = "helper/AssistedUpload/test_one"
|
|
|
|
self.setUpHelper(self.basedir)
|
2008-01-10 03:25:50 +00:00
|
|
|
u = upload.Uploader(self.helper_furl)
|
|
|
|
u.setServiceParent(self.s)
|
|
|
|
|
2008-11-22 03:07:27 +00:00
|
|
|
d = wait_a_few_turns()
|
2008-01-10 03:25:50 +00:00
|
|
|
|
|
|
|
def _ready(res):
|
|
|
|
assert u._helper
|
|
|
|
|
2008-03-24 16:46:06 +00:00
|
|
|
return upload_data(u, DATA, convergence="some convergence string")
|
2008-01-10 03:25:50 +00:00
|
|
|
d.addCallback(_ready)
|
2008-02-06 04:01:38 +00:00
|
|
|
def _uploaded(results):
|
2009-01-09 03:59:41 +00:00
|
|
|
the_uri = results.uri
|
|
|
|
assert "CHK" in the_uri
|
2008-01-10 03:25:50 +00:00
|
|
|
d.addCallback(_uploaded)
|
|
|
|
|
2008-01-17 08:18:10 +00:00
|
|
|
def _check_empty(res):
|
|
|
|
files = os.listdir(os.path.join(self.basedir, "CHK_encoding"))
|
|
|
|
self.failUnlessEqual(files, [])
|
|
|
|
files = os.listdir(os.path.join(self.basedir, "CHK_incoming"))
|
|
|
|
self.failUnlessEqual(files, [])
|
|
|
|
d.addCallback(_check_empty)
|
|
|
|
|
2008-01-10 03:25:50 +00:00
|
|
|
return d
|
|
|
|
|
2008-01-28 19:58:13 +00:00
|
|
|
def test_previous_upload_failed(self):
|
|
|
|
self.basedir = "helper/AssistedUpload/test_previous_upload_failed"
|
|
|
|
self.setUpHelper(self.basedir)
|
|
|
|
|
|
|
|
# we want to make sure that an upload which fails (leaving the
|
|
|
|
# ciphertext in the CHK_encoding/ directory) does not prevent a later
|
|
|
|
# attempt to upload that file from working. We simulate this by
|
2008-02-07 02:50:47 +00:00
|
|
|
# populating the directory manually. The hardest part is guessing the
|
|
|
|
# storage index.
|
|
|
|
|
|
|
|
k = FakeClient.DEFAULT_ENCODING_PARAMETERS["k"]
|
|
|
|
n = FakeClient.DEFAULT_ENCODING_PARAMETERS["n"]
|
|
|
|
max_segsize = FakeClient.DEFAULT_ENCODING_PARAMETERS["max_segment_size"]
|
|
|
|
segsize = min(max_segsize, len(DATA))
|
|
|
|
# this must be a multiple of 'required_shares'==k
|
|
|
|
segsize = mathutil.next_multiple(segsize, k)
|
|
|
|
|
2008-03-24 16:46:06 +00:00
|
|
|
key = hashutil.convergence_hash(k, n, segsize, DATA, "test convergence string")
|
2008-02-07 02:50:47 +00:00
|
|
|
assert len(key) == 16
|
2008-01-28 19:58:13 +00:00
|
|
|
encryptor = AES(key)
|
2008-02-01 19:27:37 +00:00
|
|
|
SI = hashutil.storage_index_hash(key)
|
2009-02-18 21:46:55 +00:00
|
|
|
SI_s = si_b2a(SI)
|
2008-01-28 19:58:13 +00:00
|
|
|
encfile = os.path.join(self.basedir, "CHK_encoding", SI_s)
|
|
|
|
f = open(encfile, "wb")
|
|
|
|
f.write(encryptor.process(DATA))
|
|
|
|
f.close()
|
|
|
|
|
|
|
|
u = upload.Uploader(self.helper_furl)
|
|
|
|
u.setServiceParent(self.s)
|
|
|
|
|
2008-11-22 03:07:27 +00:00
|
|
|
d = wait_a_few_turns()
|
2008-01-28 19:58:13 +00:00
|
|
|
|
|
|
|
def _ready(res):
|
|
|
|
assert u._helper
|
2008-03-24 16:46:06 +00:00
|
|
|
return upload_data(u, DATA, convergence="test convergence string")
|
2008-01-28 19:58:13 +00:00
|
|
|
d.addCallback(_ready)
|
2008-02-06 04:01:38 +00:00
|
|
|
def _uploaded(results):
|
2009-01-09 03:59:41 +00:00
|
|
|
the_uri = results.uri
|
|
|
|
assert "CHK" in the_uri
|
2008-01-28 19:58:13 +00:00
|
|
|
d.addCallback(_uploaded)
|
|
|
|
|
|
|
|
def _check_empty(res):
|
|
|
|
files = os.listdir(os.path.join(self.basedir, "CHK_encoding"))
|
|
|
|
self.failUnlessEqual(files, [])
|
|
|
|
files = os.listdir(os.path.join(self.basedir, "CHK_incoming"))
|
|
|
|
self.failUnlessEqual(files, [])
|
|
|
|
d.addCallback(_check_empty)
|
|
|
|
|
|
|
|
return d
|
2008-01-11 11:53:37 +00:00
|
|
|
|
|
|
|
def test_already_uploaded(self):
|
2008-01-17 08:18:10 +00:00
|
|
|
self.basedir = "helper/AssistedUpload/test_already_uploaded"
|
|
|
|
self.setUpHelper(self.basedir)
|
2008-01-11 11:53:37 +00:00
|
|
|
self.helper.chk_upload_helper_class = CHKUploadHelper_already_uploaded
|
|
|
|
u = upload.Uploader(self.helper_furl)
|
|
|
|
u.setServiceParent(self.s)
|
|
|
|
|
2008-11-22 03:07:27 +00:00
|
|
|
d = wait_a_few_turns()
|
2008-01-11 11:53:37 +00:00
|
|
|
|
|
|
|
def _ready(res):
|
|
|
|
assert u._helper
|
|
|
|
|
2008-03-24 16:46:06 +00:00
|
|
|
return upload_data(u, DATA, convergence="some convergence string")
|
2008-01-11 11:53:37 +00:00
|
|
|
d.addCallback(_ready)
|
2008-02-06 04:01:38 +00:00
|
|
|
def _uploaded(results):
|
2009-01-09 03:59:41 +00:00
|
|
|
the_uri = results.uri
|
|
|
|
assert "CHK" in the_uri
|
2008-01-11 11:53:37 +00:00
|
|
|
d.addCallback(_uploaded)
|
|
|
|
|
2008-01-17 08:18:10 +00:00
|
|
|
def _check_empty(res):
|
|
|
|
files = os.listdir(os.path.join(self.basedir, "CHK_encoding"))
|
|
|
|
self.failUnlessEqual(files, [])
|
|
|
|
files = os.listdir(os.path.join(self.basedir, "CHK_incoming"))
|
|
|
|
self.failUnlessEqual(files, [])
|
|
|
|
d.addCallback(_check_empty)
|
|
|
|
|
2008-01-11 11:53:37 +00:00
|
|
|
return d
|