import os.path, re, urllib, time import simplejson from StringIO import StringIO from twisted.application import service from twisted.trial import unittest from twisted.internet import defer, reactor from twisted.internet.task import Clock from twisted.web import client, error, http, html from twisted.python import failure, log from foolscap.api import fireEventually, flushEventualQueue from nevow.util import escapeToXML from nevow import rend from allmydata import interfaces, uri, webish, dirnode from allmydata.storage.shares import get_share_file from allmydata.storage_client import StorageFarmBroker, StubServer from allmydata.immutable import upload from allmydata.immutable.downloader.status import DownloadStatus from allmydata.dirnode import DirectoryNode from allmydata.nodemaker import NodeMaker from allmydata.unknown import UnknownNode from allmydata.web import status, common from allmydata.scripts.debug import CorruptShareOptions, corrupt_share from allmydata.util import fileutil, base32, hashutil from allmydata.util.consumer import download_to_data from allmydata.util.netstring import split_netstring from allmydata.util.encodingutil import to_str from allmydata.test.common import FakeCHKFileNode, FakeMutableFileNode, \ create_chk_filenode, WebErrorMixin, ShouldFailMixin, \ make_mutable_file_uri, create_mutable_filenode from allmydata.interfaces import IMutableFileNode, SDMF_VERSION, MDMF_VERSION from allmydata.mutable import servermap, publish, retrieve import allmydata.test.common_util as testutil from allmydata.test.no_network import GridTestMixin from allmydata.test.common_web import HTTPClientGETFactory, \ HTTPClientHEADFactory from allmydata.client import Client, SecretHolder from allmydata.introducer import IntroducerNode # create a fake uploader/downloader, and a couple of fake dirnodes, then # create a webserver that works against them timeout = 480 # Most of these take longer than 240 seconds on Francois's arm box. unknown_rwcap = u"lafs://from_the_future_rw_\u263A".encode('utf-8') unknown_rocap = u"ro.lafs://readonly_from_the_future_ro_\u263A".encode('utf-8') unknown_immcap = u"imm.lafs://immutable_from_the_future_imm_\u263A".encode('utf-8') FAVICON_MARKUP = '' class FakeStatsProvider: def get_stats(self): stats = {'stats': {}, 'counters': {}} return stats class FakeNodeMaker(NodeMaker): encoding_params = { 'k': 3, 'n': 10, 'happy': 7, 'max_segment_size':128*1024 # 1024=KiB } def _create_lit(self, cap): return FakeCHKFileNode(cap, self.all_contents) def _create_immutable(self, cap): return FakeCHKFileNode(cap, self.all_contents) def _create_mutable(self, cap): return FakeMutableFileNode(None, None, self.encoding_params, None, self.all_contents).init_from_cap(cap) def create_mutable_file(self, contents="", keysize=None, version=SDMF_VERSION): n = FakeMutableFileNode(None, None, self.encoding_params, None, self.all_contents) return n.create(contents, version=version) class FakeUploader(service.Service): name = "uploader" def upload(self, uploadable): d = uploadable.get_size() d.addCallback(lambda size: uploadable.read(size)) def _got_data(datav): data = "".join(datav) n = create_chk_filenode(data, self.all_contents) ur = upload.UploadResults(file_size=len(data), ciphertext_fetched=0, preexisting_shares=0, pushed_shares=10, sharemap={}, servermap={}, timings={}, uri_extension_data={}, uri_extension_hash="fake", verifycapstr="fakevcap") ur.set_uri(n.get_uri()) return ur d.addCallback(_got_data) return d def get_helper_info(self): return (None, False) def build_one_ds(): ds = DownloadStatus("storage_index", 1234) now = time.time() serverA = StubServer(hashutil.tagged_hash("foo", "serverid_a")[:20]) serverB = StubServer(hashutil.tagged_hash("foo", "serverid_b")[:20]) storage_index = hashutil.storage_index_hash("SI") e0 = ds.add_segment_request(0, now) e0.activate(now+0.5) e0.deliver(now+1, 0, 100, 0.5) # when, start,len, decodetime e1 = ds.add_segment_request(1, now+2) e1.error(now+3) # two outstanding requests e2 = ds.add_segment_request(2, now+4) e3 = ds.add_segment_request(3, now+5) del e2,e3 # hush pyflakes # simulate a segment which gets delivered faster than a system clock tick (ticket #1166) e = ds.add_segment_request(4, now) e.activate(now) e.deliver(now, 0, 140, 0.5) e = ds.add_dyhb_request(serverA, now) e.finished([1,2], now+1) e = ds.add_dyhb_request(serverB, now+2) # left unfinished e = ds.add_read_event(0, 120, now) e.update(60, 0.5, 0.1) # bytes, decrypttime, pausetime e.finished(now+1) e = ds.add_read_event(120, 30, now+2) # left unfinished e = ds.add_block_request(serverA, 1, 100, 20, now) e.finished(20, now+1) e = ds.add_block_request(serverB, 1, 120, 30, now+1) # left unfinished # make sure that add_read_event() can come first too ds1 = DownloadStatus(storage_index, 1234) e = ds1.add_read_event(0, 120, now) e.update(60, 0.5, 0.1) # bytes, decrypttime, pausetime e.finished(now+1) return ds class FakeHistory: _all_upload_status = [upload.UploadStatus()] _all_download_status = [build_one_ds()] _all_mapupdate_statuses = [servermap.UpdateStatus()] _all_publish_statuses = [publish.PublishStatus()] _all_retrieve_statuses = [retrieve.RetrieveStatus()] def list_all_upload_statuses(self): return self._all_upload_status def list_all_download_statuses(self): return self._all_download_status def list_all_mapupdate_statuses(self): return self._all_mapupdate_statuses def list_all_publish_statuses(self): return self._all_publish_statuses def list_all_retrieve_statuses(self): return self._all_retrieve_statuses def list_all_helper_statuses(self): return [] class FakeClient(Client): def __init__(self): # don't upcall to Client.__init__, since we only want to initialize a # minimal subset service.MultiService.__init__(self) self.all_contents = {} self.nodeid = "fake_nodeid" self.nickname = "fake_nickname" self.introducer_furl = "None" self.stats_provider = FakeStatsProvider() self._secret_holder = SecretHolder("lease secret", "convergence secret") self.helper = None self.convergence = "some random string" self.storage_broker = StorageFarmBroker(None, permute_peers=True) self.introducer_client = None self.history = FakeHistory() self.uploader = FakeUploader() self.uploader.all_contents = self.all_contents self.uploader.setServiceParent(self) self.blacklist = None self.nodemaker = FakeNodeMaker(None, self._secret_holder, None, self.uploader, None, None, None, None) self.nodemaker.all_contents = self.all_contents self.mutable_file_default = SDMF_VERSION def startService(self): return service.MultiService.startService(self) def stopService(self): return service.MultiService.stopService(self) MUTABLE_SIZELIMIT = FakeMutableFileNode.MUTABLE_SIZELIMIT class WebMixin(object): def setUp(self): self.s = FakeClient() self.s.startService() self.staticdir = self.mktemp() self.clock = Clock() self.ws = webish.WebishServer(self.s, "0", staticdir=self.staticdir, clock=self.clock) self.ws.setServiceParent(self.s) self.webish_port = self.ws.getPortnum() self.webish_url = self.ws.getURL() assert self.webish_url.endswith("/") self.webish_url = self.webish_url[:-1] # these tests add their own / l = [ self.s.create_dirnode() for x in range(6) ] d = defer.DeferredList(l) def _then(res): self.public_root = res[0][1] assert interfaces.IDirectoryNode.providedBy(self.public_root), res self.public_url = "/uri/" + self.public_root.get_uri() self.private_root = res[1][1] foo = res[2][1] self._foo_node = foo self._foo_uri = foo.get_uri() self._foo_readonly_uri = foo.get_readonly_uri() self._foo_verifycap = foo.get_verify_cap().to_string() # NOTE: we ignore the deferred on all set_uri() calls, because we # know the fake nodes do these synchronously self.public_root.set_uri(u"foo", foo.get_uri(), foo.get_readonly_uri()) self.BAR_CONTENTS, n, self._bar_txt_uri = self.makefile(0) foo.set_uri(u"bar.txt", self._bar_txt_uri, self._bar_txt_uri) self._bar_txt_verifycap = n.get_verify_cap().to_string() # sdmf # XXX: Do we ever use this? self.BAZ_CONTENTS, n, self._baz_txt_uri, self._baz_txt_readonly_uri = self.makefile_mutable(0) foo.set_uri(u"baz.txt", self._baz_txt_uri, self._baz_txt_readonly_uri) # mdmf self.QUUX_CONTENTS, n, self._quux_txt_uri, self._quux_txt_readonly_uri = self.makefile_mutable(0, mdmf=True) assert self._quux_txt_uri.startswith("URI:MDMF") foo.set_uri(u"quux.txt", self._quux_txt_uri, self._quux_txt_readonly_uri) foo.set_uri(u"empty", res[3][1].get_uri(), res[3][1].get_readonly_uri()) sub_uri = res[4][1].get_uri() self._sub_uri = sub_uri foo.set_uri(u"sub", sub_uri, sub_uri) sub = self.s.create_node_from_uri(sub_uri) self._sub_node = sub _ign, n, blocking_uri = self.makefile(1) foo.set_uri(u"blockingfile", blocking_uri, blocking_uri) # filenode to test for html encoding issues self._htmlname_unicode = u"<&weirdly'named\"file>>>_