from __future__ import print_function import os.path, re, urllib, time, cgi import json import treq from bs4 import BeautifulSoup from twisted.application import service from twisted.internet import defer from twisted.internet.defer import inlineCallbacks, returnValue, maybeDeferred from twisted.internet.task import Clock from twisted.web import client, error, http from twisted.python import failure, log from nevow.context import WebContext from nevow.inevow import ( ICanHandleException, IRequest, IData, ) from nevow.util import escapeToXML from nevow.loaders import stan from nevow.testutil import FakeRequest from nevow.appserver import ( processingFailed, DefaultExceptionHandler, ) from allmydata import interfaces, uri, webish from allmydata.storage_client import StorageFarmBroker, StubServer from allmydata.immutable import upload from allmydata.immutable.downloader.status import DownloadStatus from allmydata.dirnode import DirectoryNode from allmydata.nodemaker import NodeMaker from allmydata.web import status from allmydata.web.common import WebError, MultiFormatPage from allmydata.util import fileutil, base32, hashutil from allmydata.util.consumer import download_to_data from allmydata.util.encodingutil import to_str from ...util.connection_status import ConnectionStatus from ..common import ( EMPTY_CLIENT_CONFIG, FakeCHKFileNode, FakeMutableFileNode, create_chk_filenode, WebErrorMixin, make_mutable_file_uri, create_mutable_filenode, TrialTestCase, ) from .common import ( assert_soup_has_favicon, assert_soup_has_text, assert_soup_has_tag_with_attributes, assert_soup_has_tag_with_content, assert_soup_has_tag_with_attributes_and_content, ) from allmydata.interfaces import IMutableFileNode, SDMF_VERSION, MDMF_VERSION from allmydata.mutable import servermap, publish, retrieve from .. import common_util as testutil from ..common_web import ( do_http, Error, ) from allmydata.client import _Client, SecretHolder from .common import unknown_rwcap, unknown_rocap, unknown_immcap, FAVICON_MARKUP # create a fake uploader/downloader, and a couple of fake dirnodes, then # create a webserver that works against them class FakeStatsProvider(object): def get_stats(self): stats = {'stats': {}, 'counters': {}} return stats class FakeNodeMaker(NodeMaker): encoding_params = { 'k': 3, 'n': 10, 'happy': 7, 'max_segment_size':128*1024 # 1024=KiB } def _create_lit(self, cap): return FakeCHKFileNode(cap, self.all_contents) def _create_immutable(self, cap): return FakeCHKFileNode(cap, self.all_contents) def _create_mutable(self, cap): return FakeMutableFileNode(None, None, self.encoding_params, None, self.all_contents).init_from_cap(cap) def create_mutable_file(self, contents="", keysize=None, version=SDMF_VERSION): n = FakeMutableFileNode(None, None, self.encoding_params, None, self.all_contents) return n.create(contents, version=version) class FakeUploader(service.Service): name = "uploader" helper_furl = None helper_connected = False def upload(self, uploadable, **kw): d = uploadable.get_size() d.addCallback(lambda size: uploadable.read(size)) def _got_data(datav): data = "".join(datav) n = create_chk_filenode(data, self.all_contents) ur = upload.UploadResults(file_size=len(data), ciphertext_fetched=0, preexisting_shares=0, pushed_shares=10, sharemap={}, servermap={}, timings={}, uri_extension_data={}, uri_extension_hash="fake", verifycapstr="fakevcap") ur.set_uri(n.get_uri()) return ur d.addCallback(_got_data) return d def get_helper_info(self): return (self.helper_furl, self.helper_connected) def build_one_ds(): ds = DownloadStatus("storage_index", 1234) now = time.time() serverA = StubServer(hashutil.tagged_hash("foo", "serverid_a")[:20]) serverB = StubServer(hashutil.tagged_hash("foo", "serverid_b")[:20]) storage_index = hashutil.storage_index_hash("SI") e0 = ds.add_segment_request(0, now) e0.activate(now+0.5) e0.deliver(now+1, 0, 100, 0.5) # when, start,len, decodetime e1 = ds.add_segment_request(1, now+2) e1.error(now+3) # two outstanding requests e2 = ds.add_segment_request(2, now+4) e3 = ds.add_segment_request(3, now+5) del e2,e3 # hush pyflakes # simulate a segment which gets delivered faster than a system clock tick (ticket #1166) e = ds.add_segment_request(4, now) e.activate(now) e.deliver(now, 0, 140, 0.5) e = ds.add_dyhb_request(serverA, now) e.finished([1,2], now+1) e = ds.add_dyhb_request(serverB, now+2) # left unfinished e = ds.add_read_event(0, 120, now) e.update(60, 0.5, 0.1) # bytes, decrypttime, pausetime e.finished(now+1) e = ds.add_read_event(120, 30, now+2) # left unfinished e = ds.add_block_request(serverA, 1, 100, 20, now) e.finished(20, now+1) e = ds.add_block_request(serverB, 1, 120, 30, now+1) # left unfinished # make sure that add_read_event() can come first too ds1 = DownloadStatus(storage_index, 1234) e = ds1.add_read_event(0, 120, now) e.update(60, 0.5, 0.1) # bytes, decrypttime, pausetime e.finished(now+1) return ds class FakeHistory(object): _all_upload_status = [upload.UploadStatus()] _all_download_status = [build_one_ds()] _all_mapupdate_statuses = [servermap.UpdateStatus()] _all_publish_statuses = [publish.PublishStatus()] _all_retrieve_statuses = [retrieve.RetrieveStatus()] def list_all_upload_statuses(self): return self._all_upload_status def list_all_download_statuses(self): return self._all_download_status def list_all_mapupdate_statuses(self): return self._all_mapupdate_statuses def list_all_publish_statuses(self): return self._all_publish_statuses def list_all_retrieve_statuses(self): return self._all_retrieve_statuses def list_all_helper_statuses(self): return [] class FakeDisplayableServer(StubServer): def __init__(self, serverid, nickname, connected, last_connect_time, last_loss_time, last_rx_time): StubServer.__init__(self, serverid) self.announcement = {"my-version": "tahoe-lafs-fake", "service-name": "storage", "nickname": nickname} self.connected = connected self.last_loss_time = last_loss_time self.last_rx_time = last_rx_time self.last_connect_time = last_connect_time def on_status_changed(self, cb): # TODO: try to remove me cb(self) def is_connected(self): # TODO: remove me return self.connected def get_version(self): return { "application-version": "1.0" } def get_permutation_seed(self): return "" def get_announcement(self): return self.announcement def get_nickname(self): return self.announcement["nickname"] def get_available_space(self): return 123456 def get_connection_status(self): return ConnectionStatus(self.connected, "summary", {}, self.last_connect_time, self.last_rx_time) class FakeBucketCounter(object): def get_state(self): return {"last-complete-bucket-count": 0} def get_progress(self): return {"estimated-time-per-cycle": 0, "cycle-in-progress": False, "remaining-wait-time": 0} class FakeLeaseChecker(object): def __init__(self): self.expiration_enabled = False self.mode = "age" self.override_lease_duration = None self.sharetypes_to_expire = {} def get_state(self): return {"history": None} def get_progress(self): return {"estimated-time-per-cycle": 0, "cycle-in-progress": False, "remaining-wait-time": 0} class FakeStorageServer(service.MultiService): name = 'storage' def __init__(self, nodeid, nickname): service.MultiService.__init__(self) self.my_nodeid = nodeid self.nickname = nickname self.bucket_counter = FakeBucketCounter() self.lease_checker = FakeLeaseChecker() def get_stats(self): return {"storage_server.accepting_immutable_shares": False} def on_status_changed(self, cb): cb(self) class FakeClient(_Client): def __init__(self): # don't upcall to Client.__init__, since we only want to initialize a # minimal subset service.MultiService.__init__(self) self.all_contents = {} self.nodeid = "fake_nodeid" self.nickname = u"fake_nickname \u263A" self.introducer_furls = [] self.introducer_clients = [] self.stats_provider = FakeStatsProvider() self._secret_holder = SecretHolder("lease secret", "convergence secret") self.helper = None self.convergence = "some random string" self.storage_broker = StorageFarmBroker( permute_peers=True, tub_maker=None, node_config=EMPTY_CLIENT_CONFIG, ) # fake knowledge of another server self.storage_broker.test_add_server("other_nodeid", FakeDisplayableServer( serverid="other_nodeid", nickname=u"other_nickname \u263B", connected = True, last_connect_time = 10, last_loss_time = 20, last_rx_time = 30)) self.storage_broker.test_add_server("disconnected_nodeid", FakeDisplayableServer( serverid="disconnected_nodeid", nickname=u"disconnected_nickname \u263B", connected = False, last_connect_time = None, last_loss_time = 25, last_rx_time = 35)) self.introducer_client = None self.history = FakeHistory() self.uploader = FakeUploader() self.uploader.all_contents = self.all_contents self.uploader.setServiceParent(self) self.blacklist = None self.nodemaker = FakeNodeMaker(None, self._secret_holder, None, self.uploader, None, None, None, None) self.nodemaker.all_contents = self.all_contents self.mutable_file_default = SDMF_VERSION self.addService(FakeStorageServer(self.nodeid, self.nickname)) def get_long_nodeid(self): return "v0-nodeid" def get_long_tubid(self): return "tubid" def get_auth_token(self): return 'a fake debug auth token' def startService(self): return service.MultiService.startService(self) def stopService(self): return service.MultiService.stopService(self) MUTABLE_SIZELIMIT = FakeMutableFileNode.MUTABLE_SIZELIMIT class WebMixin(testutil.TimezoneMixin): def setUp(self): self.setTimezone('UTC-13:00') self.s = FakeClient() self.s.startService() self.staticdir = self.mktemp() self.clock = Clock() self.fakeTime = 86460 # 1d 0h 1m 0s self.ws = webish.WebishServer(self.s, "0", staticdir=self.staticdir, clock=self.clock, now_fn=lambda:self.fakeTime) self.ws.setServiceParent(self.s) self.webish_port = self.ws.getPortnum() self.webish_url = self.ws.getURL() assert self.webish_url.endswith("/") self.webish_url = self.webish_url[:-1] # these tests add their own / l = [ self.s.create_dirnode() for x in range(6) ] d = defer.DeferredList(l) def _then(res): self.public_root = res[0][1] assert interfaces.IDirectoryNode.providedBy(self.public_root), res self.public_url = "/uri/" + self.public_root.get_uri() self.private_root = res[1][1] foo = res[2][1] self._foo_node = foo self._foo_uri = foo.get_uri() self._foo_readonly_uri = foo.get_readonly_uri() self._foo_verifycap = foo.get_verify_cap().to_string() # NOTE: we ignore the deferred on all set_uri() calls, because we # know the fake nodes do these synchronously self.public_root.set_uri(u"foo", foo.get_uri(), foo.get_readonly_uri()) self.BAR_CONTENTS, n, self._bar_txt_uri = self.makefile(0) foo.set_uri(u"bar.txt", self._bar_txt_uri, self._bar_txt_uri) self._bar_txt_verifycap = n.get_verify_cap().to_string() # sdmf # XXX: Do we ever use this? self.BAZ_CONTENTS, n, self._baz_txt_uri, self._baz_txt_readonly_uri = self.makefile_mutable(0) foo.set_uri(u"baz.txt", self._baz_txt_uri, self._baz_txt_readonly_uri) # mdmf self.QUUX_CONTENTS, n, self._quux_txt_uri, self._quux_txt_readonly_uri = self.makefile_mutable(0, mdmf=True) assert self._quux_txt_uri.startswith("URI:MDMF") foo.set_uri(u"quux.txt", self._quux_txt_uri, self._quux_txt_readonly_uri) foo.set_uri(u"empty", res[3][1].get_uri(), res[3][1].get_readonly_uri()) sub_uri = res[4][1].get_uri() self._sub_uri = sub_uri foo.set_uri(u"sub", sub_uri, sub_uri) sub = self.s.create_node_from_uri(sub_uri) self._sub_node = sub _ign, n, blocking_uri = self.makefile(1) foo.set_uri(u"blockingfile", blocking_uri, blocking_uri) # filenode to test for html encoding issues self._htmlname_unicode = u"<&weirdly'named\"file>>>_