Merge pull request #672 from meejah/ticket3252-port-web-directory.remaining.1

Ticket3252: port web/directory.* from nevow to twisted.web
This commit is contained in:
meejah 2020-04-23 13:04:41 -06:00 committed by GitHub
commit 4dc3702ead
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
14 changed files with 849 additions and 610 deletions

0
newsfragments/3263.other Normal file
View File

View File

@ -51,8 +51,8 @@ def mkdir(contents, options):
return dircap
def put_child(dirurl, childname, childcap):
assert dirurl[-1] == "/"
url = dirurl + urllib.quote(unicode_to_url(childname)) + "?t=uri"
assert dirurl[-1] != "/"
url = dirurl + "/" + urllib.quote(unicode_to_url(childname)) + "?t=uri"
resp = do_http("PUT", url, childcap)
if resp.status not in (200, 201):
raise HTTPError("Error during put_child", resp)
@ -105,6 +105,9 @@ class BackerUpper(object):
archives_url = to_url + "Archives/"
archives_url = archives_url.rstrip("/")
to_url = to_url.rstrip("/")
# first step: make sure the target directory exists, as well as the
# Archives/ subdirectory.
resp = do_http("GET", archives_url + "?t=json")

View File

@ -911,8 +911,14 @@ class Mkdir(GridTestMixin, CLITestMixin, unittest.TestCase):
self.failUnlessReallyEqual(err, "")
self.failUnlessIn(st, out)
return out
def _mkdir(ign, mutable_type, uri_prefix, dirname):
d2 = self.do_cli("mkdir", "--format="+mutable_type, dirname)
"""
:param str mutable_type: 'sdmf' or 'mdmf' (or uppercase versions)
:param str uri_prefix: kind of URI
:param str dirname: the directory alias
"""
d2 = self.do_cli("mkdir", "--format={}".format(mutable_type), dirname)
d2.addCallback(_check, uri_prefix)
def _stash_filecap(cap):
u = uri.from_string(cap)

View File

@ -4,6 +4,7 @@ __all__ = [
"SyncTestCase",
"AsyncTestCase",
"AsyncBrokenTestCase",
"TrialTestCase",
"flush_logged_errors",
"skip",
@ -11,6 +12,7 @@ __all__ = [
]
import os, random, struct
import six
import tempfile
from tempfile import mktemp
from functools import partial
@ -57,6 +59,7 @@ from twisted.internet.interfaces import (
IReactorSocket,
)
from twisted.internet.endpoints import AdoptedStreamServerEndpoint
from twisted.trial.unittest import TestCase as _TrialTestCase
from allmydata import uri
from allmydata.interfaces import IMutableFileNode, IImmutableFileNode,\
@ -1242,3 +1245,29 @@ class AsyncBrokenTestCase(_TestCaseMixin, TestCase):
run_tests_with = EliotLoggedRunTest.make_factory(
AsynchronousDeferredRunTestForBrokenTwisted.make_factory(timeout=60.0),
)
class TrialTestCase(_TrialTestCase):
"""
A twisted.trial.unittest.TestCaes with Tahoe required fixes
applied. Currently these are:
- ensure that .fail() passes a bytes msg on Python2
"""
def fail(self, msg):
"""
Ensure our msg is a native string on Python2. If it was Unicode,
we encode it as utf8 and hope for the best. On Python3 we take
no action.
This is necessary because Twisted passes the 'msg' argument
along to the constructor of an exception; on Python2,
Exception will accept a `unicode` instance but will fail if
you try to turn that Exception instance into a string.
"""
if six.PY2:
if isinstance(msg, six.text_type):
return super(self, TrialTestCase).fail(msg.encode("utf8"))
return super(self, TrialTestCase).fail(msg)

View File

@ -9,6 +9,7 @@ from twisted.internet.interfaces import IConsumer
from allmydata import uri, dirnode
from allmydata.client import _Client
from allmydata.immutable import upload
from allmydata.immutable.literal import LiteralFileNode
from allmydata.interfaces import IImmutableFileNode, IMutableFileNode, \
ExistingChildError, NoSuchChildError, MustNotBeUnknownRWError, \
MustBeDeepImmutableError, MustBeReadonlyError, \
@ -27,6 +28,9 @@ from allmydata.nodemaker import NodeMaker
from base64 import b32decode
import allmydata.test.common_util as testutil
from hypothesis import given
from hypothesis.strategies import text
if six.PY3:
long = int
@ -1460,6 +1464,33 @@ class Packing(testutil.ReallyEqualMixin, unittest.TestCase):
kids[unicode(name)] = (nm.create_from_cap(caps[name]), {})
return kids
@given(text(min_size=1, max_size=20))
def test_pack_unpack_unicode_hypothesis(self, name):
"""
pack -> unpack results in the same objects (with a unicode name)
"""
nm = NodeMaker(None, None, None, None, None, {"k": 3, "n": 10}, None, None)
fn = MinimalFakeMutableFile()
# FIXME TODO: we shouldn't have to do this out here, but
# Hypothesis found that a name with "\x2000" does not make the
# round-trip properly .. so for now we'll only give the packer
# normalized names.
# See also:
# https://tahoe-lafs.org/trac/tahoe-lafs/ticket/2606
# https://tahoe-lafs.org/trac/tahoe-lafs/ticket/1076
name = unicodedata.normalize('NFC', name)
kids = {
name: (LiteralFileNode(uri.from_string(one_uri)), {}),
}
packed = dirnode.pack_children(kids, fn.get_writekey(), deep_immutable=False)
write_uri = "URI:SSK-RO:e3mdrzfwhoq42hy5ubcz6rp3o4:ybyibhnp3vvwuq2vaw2ckjmesgkklfs6ghxleztqidihjyofgw7q"
filenode = nm.create_from_cap(write_uri)
dn = dirnode.DirectoryNode(filenode, nm, None)
unkids = dn._unpack_contents(packed)
self.assertEqual(kids, unkids)
def test_deep_immutable(self):
nm = NodeMaker(None, None, None, None, None, {"k": 3, "n": 10}, None, None)
fn = MinimalFakeMutableFile()

View File

@ -18,6 +18,64 @@ def assert_soup_has_favicon(testcase, soup):
any(t[u'href'] == u'/icon.png' for t in links), soup)
def assert_soup_has_tag_with_attributes(testcase, soup, tag_name, attrs):
"""
Using a ``TestCase`` object ``testcase``, assert that the passed
in ``BeatufulSoup`` object ``soup`` contains a tag ``tag_name``
(unicode) which has all the attributes in ``attrs`` (dict).
"""
tags = soup.find_all(tag_name)
for tag in tags:
if all(v in tag.attrs.get(k, []) for k, v in attrs.items()):
return # we found every attr in this tag; done
testcase.fail(
u"No <{}> tags contain attributes: {}".format(tag_name, attrs)
)
def assert_soup_has_tag_with_attributes_and_content(testcase, soup, tag_name, content, attrs):
"""
Using a ``TestCase`` object ``testcase``, assert that the passed
in ``BeatufulSoup`` object ``soup`` contains a tag ``tag_name``
(unicode) which has all the attributes in ``attrs`` (dict) and
contains the string ``content`` (unicode).
"""
assert_soup_has_tag_with_attributes(testcase, soup, tag_name, attrs)
assert_soup_has_tag_with_content(testcase, soup, tag_name, content)
def _normalized_contents(tag):
"""
:returns: all the text contents of the tag with whitespace
normalized: all newlines removed and at most one space between
words.
"""
return u" ".join(tag.text.split())
def assert_soup_has_tag_with_content(testcase, soup, tag_name, content):
"""
Using a ``TestCase`` object ``testcase``, assert that the passed
in ``BeatufulSoup`` object ``soup`` contains a tag ``tag_name``
(unicode) which contains the string ``content`` (unicode).
"""
tags = soup.find_all(tag_name)
for tag in tags:
if content in tag.contents:
return
# make these "fuzzy" options?
for c in tag.contents:
if content in c:
return
if content in _normalized_contents(tag):
return
testcase.fail(
u"No <{}> tag contains the text '{}'".format(tag_name, content)
)
def assert_soup_has_text(testcase, soup, text):
"""
Using a ``TestCase`` object ``testcase``, assert that the passed in

View File

@ -3,6 +3,9 @@ from __future__ import print_function
import os.path, re, urllib
import json
from six.moves import StringIO
from bs4 import BeautifulSoup
from nevow import rend
from twisted.trial import unittest
from allmydata import uri, dirnode
@ -325,8 +328,8 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
def _stash_root_and_create_file(n):
self.rootnode = n
self.rooturl = "uri/" + urllib.quote(n.get_uri()) + "/"
self.rourl = "uri/" + urllib.quote(n.get_readonly_uri()) + "/"
self.rooturl = "uri/" + urllib.quote(n.get_uri())
self.rourl = "uri/" + urllib.quote(n.get_readonly_uri())
if not immutable:
return self.rootnode.set_node(name, future_node)
d.addCallback(_stash_root_and_create_file)
@ -386,7 +389,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
d.addCallback(lambda ign: self.GET(expected_info_url))
d.addCallback(_check_info, expect_rw_uri=False, expect_ro_uri=False)
d.addCallback(lambda ign: self.GET("%s%s?t=info" % (self.rooturl, str(name))))
d.addCallback(lambda ign: self.GET("%s/%s?t=info" % (self.rooturl, str(name))))
d.addCallback(_check_info, expect_rw_uri=False, expect_ro_uri=True)
def _check_json(res, expect_rw_uri):
@ -410,7 +413,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
# TODO: check metadata contents
self.failUnlessIn("metadata", data[1])
d.addCallback(lambda ign: self.GET("%s%s?t=json" % (self.rooturl, str(name))))
d.addCallback(lambda ign: self.GET("%s/%s?t=json" % (self.rooturl, str(name))))
d.addCallback(_check_json, expect_rw_uri=not immutable)
# and make sure that a read-only version of the directory can be
@ -425,7 +428,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
d.addCallback(lambda ign: self.GET(self.rourl+"?t=json"))
d.addCallback(_check_directory_json, expect_rw_uri=False)
d.addCallback(lambda ign: self.GET("%s%s?t=json" % (self.rourl, str(name))))
d.addCallback(lambda ign: self.GET("%s/%s?t=json" % (self.rourl, str(name))))
d.addCallback(_check_json, expect_rw_uri=False)
# TODO: check that getting t=info from the Info link in the ro directory
@ -492,7 +495,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
self.failUnlessIn("CHK", cap.to_string())
self.cap = cap
self.rootnode = dn
self.rooturl = "uri/" + urllib.quote(dn.get_uri()) + "/"
self.rooturl = "uri/" + urllib.quote(dn.get_uri())
return download_to_data(dn._node)
d.addCallback(_created)
@ -534,19 +537,28 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
# Make sure the lonely child can be listed in HTML...
d.addCallback(lambda ign: self.GET(self.rooturl))
def _check_html(res):
soup = BeautifulSoup(res, 'html5lib')
self.failIfIn("URI:SSK", res)
get_lonely = "".join([r'<td>FILE</td>',
r'\s+<td>',
r'<a href="[^"]+%s[^"]+" rel="noreferrer">lonely</a>' % (urllib.quote(lonely_uri),),
r'</td>',
r'\s+<td align="right">%d</td>' % len("one"),
])
self.failUnless(re.search(get_lonely, res), res)
found = False
for td in soup.find_all(u"td"):
if td.text != u"FILE":
continue
a = td.findNextSibling()(u"a")[0]
self.assertIn(urllib.quote(lonely_uri), a[u"href"])
self.assertEqual(u"lonely", a.text)
self.assertEqual(a[u"rel"], [u"noreferrer"])
self.assertEqual(u"{}".format(len("one")), td.findNextSibling().findNextSibling().text)
found = True
break
self.assertTrue(found)
# find the More Info link for name, should be relative
mo = re.search(r'<a href="([^"]+)">More Info</a>', res)
info_url = mo.group(1)
self.failUnless(info_url.endswith(urllib.quote(lonely_uri) + "?t=info"), info_url)
infos = list(
a[u"href"]
for a in soup.find_all(u"a")
if a.text == u"More Info"
)
self.assertEqual(1, len(infos))
self.assertTrue(infos[0].endswith(urllib.quote(lonely_uri) + "?t=info"))
d.addCallback(_check_html)
# ... and in JSON.
@ -573,7 +585,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
d = c0.create_dirnode()
def _stash_root_and_create_file(n):
self.rootnode = n
self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri())
return n.add_file(u"good", upload.Data(DATA, convergence=""))
d.addCallback(_stash_root_and_create_file)
def _stash_uri(fn, which):
@ -747,7 +759,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
d = c0.create_dirnode()
def _stash_root_and_create_file(n):
self.rootnode = n
self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri())
return n.add_file(u"good", upload.Data(DATA, convergence=""))
d.addCallback(_stash_root_and_create_file)
def _stash_uri(fn, which):
@ -960,7 +972,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
def _stash_root_and_create_file(n):
self.rootnode = n
self.uris["root"] = n.get_uri()
self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri())
return n.add_file(u"one", upload.Data(DATA, convergence=""))
d.addCallback(_stash_root_and_create_file)
def _stash_uri(fn, which):
@ -1027,8 +1039,8 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
DATA = "data" * 100
d = c0.create_dirnode()
def _stash_root(n):
self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri()) + "/"
self.fileurls["imaginary"] = self.fileurls["root"] + "imaginary"
self.fileurls["root"] = "uri/" + urllib.quote(n.get_uri())
self.fileurls["imaginary"] = self.fileurls["root"] + "/imaginary"
return n
d.addCallback(_stash_root)
d.addCallback(lambda ign: c0.upload(upload.Data(DATA, convergence="")))
@ -1044,14 +1056,14 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
d.addCallback(lambda ign: c0.create_dirnode())
def _mangle_dirnode_1share(n):
u = n.get_uri()
url = self.fileurls["dir-1share"] = "uri/" + urllib.quote(u) + "/"
url = self.fileurls["dir-1share"] = "uri/" + urllib.quote(u)
self.fileurls["dir-1share-json"] = url + "?t=json"
self.delete_shares_numbered(u, range(1,10))
d.addCallback(_mangle_dirnode_1share)
d.addCallback(lambda ign: c0.create_dirnode())
def _mangle_dirnode_0share(n):
u = n.get_uri()
url = self.fileurls["dir-0share"] = "uri/" + urllib.quote(u) + "/"
url = self.fileurls["dir-0share"] = "uri/" + urllib.quote(u)
self.fileurls["dir-0share-json"] = url + "?t=json"
self.delete_shares_numbered(u, range(0,10))
d.addCallback(_mangle_dirnode_0share)
@ -1330,8 +1342,8 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
self.dir_si_b32 = base32.b2a(dn.get_storage_index())
self.dir_url_base = "uri/"+dn.get_write_uri()
self.dir_url_json1 = "uri/"+dn.get_write_uri()+"?t=json"
self.dir_url_json2 = "uri/"+dn.get_write_uri()+"/?t=json"
self.dir_url_json_ro = "uri/"+dn.get_readonly_uri()+"/?t=json"
self.dir_url_json2 = "uri/"+dn.get_write_uri()+"?t=json"
self.dir_url_json_ro = "uri/"+dn.get_readonly_uri()+"?t=json"
self.child_url = "uri/"+dn.get_readonly_uri()+"/child"
d.addCallback(_get_dircap)
d.addCallback(lambda ign: self.GET(self.dir_url_base, followRedirect=True))

View File

@ -7,7 +7,6 @@ import treq
from bs4 import BeautifulSoup
from twisted.application import service
from twisted.trial import unittest
from twisted.internet import defer
from twisted.internet.defer import inlineCallbacks, returnValue, maybeDeferred
from twisted.internet.task import Clock
@ -48,11 +47,14 @@ from ..common import (
WebErrorMixin,
make_mutable_file_uri,
create_mutable_filenode,
TrialTestCase,
)
from .common import (
assert_soup_has_favicon,
assert_soup_has_text,
assert_soup_has_tag_with_attributes,
)
from allmydata.interfaces import IMutableFileNode, SDMF_VERSION, MDMF_VERSION
from allmydata.mutable import servermap, publish, retrieve
from .. import common_util as testutil
@ -652,7 +654,7 @@ class WebMixin(testutil.TimezoneMixin):
class MultiFormatPageTests(unittest.TestCase):
class MultiFormatPageTests(TrialTestCase):
"""
Tests for ``MultiFormatPage``.
"""
@ -761,7 +763,7 @@ class MultiFormatPageTests(unittest.TestCase):
class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixin, unittest.TestCase):
class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixin, TrialTestCase):
maxDiff = None
def test_create(self):
@ -814,64 +816,6 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
d.addCallback(_check)
return d
def test_welcome(self):
d = self.GET("/")
def _check(res):
# TODO: replace this with a parser
self.failUnlessIn('<title>Tahoe-LAFS - Welcome</title>', res)
self.failUnlessIn(FAVICON_MARKUP, res)
self.failUnlessIn('<a href="status">Recent and Active Operations</a>', res)
self.failUnlessIn('<a href="statistics">Operational Statistics</a>', res)
self.failUnless(re.search('<input (type="hidden" |name="t" |value="report-incident" ){3}/>',res), res)
self.failUnlessIn('Page rendered at', res)
self.failUnlessIn('Tahoe-LAFS code imported from:', res)
res_u = res.decode('utf-8')
self.failUnlessIn(u'<td>fake_nickname \u263A</td>', res_u)
self.failUnlessIn(u'<div class="nickname">other_nickname \u263B</div>', res_u)
self.failUnlessIn(u'Connected to <span>1</span>\n of <span>2</span> known storage servers', res_u)
def timestamp(t):
return (u'"%s"' % (t,)) if self.have_working_tzset() else u'"[^"]*"'
# TODO: use a real parser to make sure these two nodes are siblings
self.failUnless(re.search(
u'<div class="status-indicator"><img (src="img/connected-yes.png" |alt="Connected" ){2}/></div>'
u'\s+'
u'<div class="nickname">other_nickname \u263B</div>',
res_u), repr(res_u))
self.failUnless(re.search(
u'<a( class="timestamp"| title=%s){2}>\s+1d\u00A00h\u00A00m\u00A050s\s+</a>'
% timestamp(u'1970-01-01 13:00:10'), res_u), repr(res_u))
# same for these two nodes
self.failUnless(re.search(
u'<div class="status-indicator"><img (src="img/connected-no.png" |alt="Disconnected" ){2}/></div>'
u'\s+'
u'<div class="nickname">disconnected_nickname \u263B</div>',
res_u), repr(res_u))
self.failUnless(re.search(
u'<a( class="timestamp"| title="N/A"){2}>\s+N/A\s+</a>',
res_u), repr(res_u))
self.failUnless(re.search(
u'<td class="service-last-received-data"><a( class="timestamp"| title=%s){2}>'
u'1d\u00A00h\u00A00m\u00A030s</a></td>'
% timestamp(u'1970-01-01 13:00:30'), res_u), repr(res_u))
self.failUnless(re.search(
u'<td class="service-last-received-data"><a( class="timestamp"| title=%s){2}>'
u'1d\u00A00h\u00A00m\u00A025s</a></td>'
% timestamp(u'1970-01-01 13:00:35'), res_u), repr(res_u))
self.failUnlessIn(u'\u00A9 <a href="https://tahoe-lafs.org/">Tahoe-LAFS Software Foundation', res_u)
self.failUnlessIn('<td><h3>Available</h3></td>', res)
self.failUnlessIn('123.5kB', res)
self.s.basedir = 'web/test_welcome'
fileutil.make_dirs("web/test_welcome")
fileutil.make_dirs("web/test_welcome/private")
return self.GET("/")
d.addCallback(_check)
return d
def test_introducer_status(self):
class MockIntroducerClient(object):
def __init__(self, connected):
@ -1291,7 +1235,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
def test_GET_FILE_URI_badchild(self):
base = "/uri/%s/boguschild" % urllib.quote(self._bar_txt_uri)
errmsg = "Files have no children, certainly not named 'boguschild'"
errmsg = "Files have no children named 'boguschild'"
d = self.shouldFail2(error.Error, "test_GET_FILE_URI_badchild",
"400 Bad Request", errmsg,
self.GET, base)
@ -1301,7 +1245,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
base = "/uri/%s/boguschild" % urllib.quote(self._bar_txt_uri)
errmsg = "Cannot create directory 'boguschild', because its parent is a file, not a directory"
d = self.shouldFail2(error.Error, "test_GET_FILE_URI_badchild",
"400 Bad Request", errmsg,
"409 Conflict", errmsg,
self.PUT, base, "")
return d
@ -1756,134 +1700,171 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
d.addBoth(self.should404, "test_GET_FILEURL_uri_missing")
return d
def _check_upload_and_mkdir_forms(self, html):
# We should have a form to create a file, with radio buttons that allow
# the user to toggle whether it is a CHK/LIT (default), SDMF, or MDMF file.
self.failUnless(re.search('<input (name="t" |value="upload" |type="hidden" ){3}/>', html), html)
self.failUnless(re.search('<input [^/]*id="upload-chk"', html), html)
self.failUnless(re.search('<input [^/]*id="upload-sdmf"', html), html)
self.failUnless(re.search('<input [^/]*id="upload-mdmf"', html), html)
# We should also have the ability to create a mutable directory, with
# radio buttons that allow the user to toggle whether it is an SDMF (default)
# or MDMF directory.
self.failUnless(re.search('<input (name="t" |value="mkdir" |type="hidden" ){3}/>', html), html)
self.failUnless(re.search('<input [^/]*id="mkdir-sdmf"', html), html)
self.failUnless(re.search('<input [^/]*id="mkdir-mdmf"', html), html)
self.failUnlessIn(FAVICON_MARKUP, html)
def _check_upload_and_mkdir_forms(self, soup):
"""
Confirm `soup` contains a form to create a file, with radio
buttons that allow the user to toggle whether it is a CHK/LIT
(default), SDMF, or MDMF file.
"""
found = []
desired_ids = (
u"upload-chk",
u"upload-sdmf",
u"upload-mdmf",
u"mkdir-sdmf",
u"mkdir-mdmf",
)
for input_tag in soup.find_all(u"input"):
if input_tag.get(u"id", u"") in desired_ids:
found.append(input_tag)
else:
if input_tag.get(u"name", u"") == u"t" and input_tag.get(u"type", u"") == u"hidden":
if input_tag[u"value"] == u"upload":
found.append(input_tag)
elif input_tag[u"value"] == u"mkdir":
found.append(input_tag)
self.assertEqual(len(found), 7, u"Failed to find all 7 <input> tags")
assert_soup_has_favicon(self, soup)
@inlineCallbacks
def test_GET_DIRECTORY_html(self):
d = self.GET(self.public_url + "/foo", followRedirect=True)
def _check(html):
self.failUnlessIn('<li class="toolbar-item"><a href="../../..">Return to Welcome page</a></li>', html)
self._check_upload_and_mkdir_forms(html)
self.failUnlessIn("quux", html)
d.addCallback(_check)
return d
data = yield self.GET(self.public_url + "/foo", followRedirect=True)
soup = BeautifulSoup(data, 'html5lib')
self._check_upload_and_mkdir_forms(soup)
toolbars = soup.find_all(u"li", {u"class": u"toolbar-item"})
self.assertTrue(any(li.text == u"Return to Welcome page" for li in toolbars))
self.failUnlessIn("quux", data)
@inlineCallbacks
def test_GET_DIRECTORY_html_filenode_encoding(self):
d = self.GET(self.public_url + "/foo", followRedirect=True)
def _check(html):
# Check if encoded entries are there
self.failUnlessIn('@@named=/' + self._htmlname_urlencoded + '" rel="noreferrer">'
+ self._htmlname_escaped + '</a>', html)
self.failUnlessIn('value="' + self._htmlname_escaped_attr + '"', html)
self.failIfIn(self._htmlname_escaped_double, html)
# Make sure that Nevow escaping actually works by checking for unsafe characters
# and that '&' is escaped.
for entity in '<>':
self.failUnlessIn(entity, self._htmlname_raw)
self.failIfIn(entity, self._htmlname_escaped)
self.failUnlessIn('&', re.sub(r'&(amp|lt|gt|quot|apos);', '', self._htmlname_raw))
self.failIfIn('&', re.sub(r'&(amp|lt|gt|quot|apos);', '', self._htmlname_escaped))
d.addCallback(_check)
return d
data = yield self.GET(self.public_url + "/foo", followRedirect=True)
soup = BeautifulSoup(data, 'html5lib')
# Check if encoded entries are there
target_ref = u'@@named=/{}'.format(self._htmlname_urlencoded)
# at least one <a> tag has our weirdly-named file properly
# encoded (or else BeautifulSoup would produce an error)
self.assertTrue(
any(
a.text == self._htmlname_unicode and a[u"href"].endswith(target_ref)
for a in soup.find_all(u"a", {u"rel": u"noreferrer"})
)
)
# XXX leaving this as-is, but consider using beautfulsoup here too?
# Make sure that Nevow escaping actually works by checking for unsafe characters
# and that '&' is escaped.
for entity in '<>':
self.failUnlessIn(entity, self._htmlname_raw)
self.failIfIn(entity, self._htmlname_escaped)
self.failUnlessIn('&', re.sub(r'&(amp|lt|gt|quot|apos);', '', self._htmlname_raw))
self.failIfIn('&', re.sub(r'&(amp|lt|gt|quot|apos);', '', self._htmlname_escaped))
@inlineCallbacks
def test_GET_root_html(self):
d = self.GET("/")
d.addCallback(self._check_upload_and_mkdir_forms)
return d
data = yield self.GET("/")
soup = BeautifulSoup(data, 'html5lib')
self._check_upload_and_mkdir_forms(soup)
@inlineCallbacks
def test_GET_DIRURL(self):
# the addSlash means we get a redirect here
data = yield self.GET(self.public_url + "/foo", followRedirect=True)
soup = BeautifulSoup(data, 'html5lib')
# from /uri/$URI/foo/ , we need ../../../ to get back to the root
ROOT = "../../.."
d = self.GET(self.public_url + "/foo", followRedirect=True)
def _check(res):
self.failUnlessIn('<a href="%s">Return to Welcome page' % ROOT, res)
root = u"../../.."
self.assertTrue(
any(
a.text == u"Return to Welcome page"
for a in soup.find_all(u"a", {u"href": root})
)
)
# the FILE reference points to a URI, but it should end in bar.txt
bar_url = ("%s/file/%s/@@named=/bar.txt" %
(ROOT, urllib.quote(self._bar_txt_uri)))
get_bar = "".join([r'<td>FILE</td>',
r'\s+<td>',
r'<a href="%s" rel="noreferrer">bar.txt</a>' % bar_url,
r'</td>',
r'\s+<td align="right">%d</td>' % len(self.BAR_CONTENTS),
])
self.failUnless(re.search(get_bar, res), res)
for label in ['unlink', 'rename/relink']:
for line in res.split("\n"):
# find the line that contains the relevant button for bar.txt
if ("form action" in line and
('value="%s"' % (label,)) in line and
'value="bar.txt"' in line):
# the form target should use a relative URL
foo_url = urllib.quote("%s/uri/%s/" % (ROOT, self._foo_uri))
self.failUnlessIn('action="%s"' % foo_url, line)
# and the when_done= should too
#done_url = urllib.quote(???)
#self.failUnlessIn('name="when_done" value="%s"' % done_url, line)
# the FILE reference points to a URI, but it should end in bar.txt
bar_url = "{}/file/{}/@@named=/bar.txt".format(root, urllib.quote(self._bar_txt_uri))
self.assertTrue(
any(
a.text == u"bar.txt"
for a in soup.find_all(u"a", {u"href": bar_url})
)
)
self.assertTrue(
any(
td.text == u"{}".format(len(self.BAR_CONTENTS))
for td in soup.find_all(u"td", {u"align": u"right"})
)
)
foo_url = urllib.quote("{}/uri/{}/".format(root, self._foo_uri))
forms = soup.find_all(u"form", {u"action": foo_url})
found = []
for form in forms:
if form.find_all(u"input", {u"name": u"name", u"value": u"bar.txt"}):
kind = form.find_all(u"input", {u"type": u"submit"})[0][u"value"]
found.append(kind)
if kind == u"unlink":
self.assertTrue(form[u"method"] == u"post")
self.assertEqual(
set(found),
{u"unlink", u"rename/relink"}
)
# 'unlink' needs to use POST because it directly has a side effect
if label == 'unlink':
self.failUnlessIn('method="post"', line)
break
else:
self.fail("unable to find '%s bar.txt' line" % (label,))
# the DIR reference just points to a URI
sub_url = ("%s/uri/%s/" % (ROOT, urllib.quote(self._sub_uri)))
get_sub = ((r'<td>DIR</td>')
+r'\s+<td><a href="%s">sub</a></td>' % sub_url)
self.failUnless(re.search(get_sub, res), res)
d.addCallback(_check)
sub_url = "{}/uri/{}/".format(root, urllib.quote(self._sub_uri))
self.assertTrue(
any(
td.findNextSibling()(u"a")[0][u"href"] == sub_url
for td in soup.find_all(u"td")
if td.text == u"DIR"
)
)
@inlineCallbacks
def test_GET_DIRURL_readonly(self):
# look at a readonly directory
d.addCallback(lambda res:
self.GET(self.public_url + "/reedownlee", followRedirect=True))
def _check2(res):
self.failUnlessIn("(read-only)", res)
self.failIfIn("Upload a file", res)
d.addCallback(_check2)
data = yield self.GET(self.public_url + "/reedownlee", followRedirect=True)
self.failUnlessIn("(read-only)", data)
self.failIfIn("Upload a file", data)
# and at a directory that contains a readonly directory
d.addCallback(lambda res:
self.GET(self.public_url, followRedirect=True))
def _check3(res):
self.failUnless(re.search('<td>DIR-RO</td>'
r'\s+<td><a href="[\.\/]+/uri/URI%3ADIR2-RO%3A[^"]+">reedownlee</a></td>', res), res)
d.addCallback(_check3)
@inlineCallbacks
def test_GET_DIRURL_readonly_dir(self):
# look at a directory that contains a readonly directory
data = yield self.GET(self.public_url, followRedirect=True)
soup = BeautifulSoup(data, 'html5lib')
ro_links = list(
td.findNextSibling()(u"a")[0]
for td in soup.find_all(u"td")
if td.text == u"DIR-RO"
)
self.assertEqual(1, len(ro_links))
self.assertEqual(u"reedownlee", ro_links[0].text)
self.assertTrue(u"URI%3ADIR2-RO%3A" in ro_links[0][u"href"])
# and an empty directory
d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty/"))
def _check4(res):
self.failUnlessIn("directory is empty", res)
MKDIR_BUTTON_RE=re.compile('<input (type="hidden" |name="t" |value="mkdir" ){3}/>.*<legend class="freeform-form-label">Create a new directory in this directory</legend>.*<input (type="submit" |class="btn" |value="Create" ){3}/>', re.I)
self.failUnless(MKDIR_BUTTON_RE.search(res), res)
d.addCallback(_check4)
@inlineCallbacks
def test_GET_DIRURL_empty(self):
# look at an empty directory
data = yield self.GET(self.public_url + "/foo/empty")
soup = BeautifulSoup(data, 'html5lib')
self.failUnlessIn("directory is empty", data)
mkdir_inputs = soup.find_all(u"input", {u"type": u"hidden", u"name": u"t", u"value": u"mkdir"})
self.assertEqual(1, len(mkdir_inputs))
self.assertEqual(
u"Create a new directory in this directory",
mkdir_inputs[0].parent(u"legend")[0].text
)
# and at a literal directory
@inlineCallbacks
def test_GET_DIRURL_literal(self):
# look at a literal directory
tiny_litdir_uri = "URI:DIR2-LIT:gqytunj2onug64tufqzdcosvkjetutcjkq5gw4tvm5vwszdgnz5hgyzufqydulbshj5x2lbm" # contains one child which is itself also LIT
d.addCallback(lambda res:
self.GET("/uri/" + tiny_litdir_uri + "/", followRedirect=True))
def _check5(res):
self.failUnlessIn('(immutable)', res)
self.failUnless(re.search('<td>FILE</td>'
r'\s+<td><a href="[\.\/]+/file/URI%3ALIT%3Akrugkidfnzsc4/@@named=/short" rel="noreferrer">short</a></td>', res), res)
d.addCallback(_check5)
return d
data = yield self.GET("/uri/" + tiny_litdir_uri, followRedirect=True)
soup = BeautifulSoup(data, 'html5lib')
self.failUnlessIn('(immutable)', data)
file_links = list(
td.findNextSibling()(u"a")[0]
for td in soup.find_all(u"td")
if td.text == u"FILE"
)
self.assertEqual(1, len(file_links))
self.assertEqual(u"short", file_links[0].text)
self.assertTrue(file_links[0][u"href"].endswith(u"/file/URI%3ALIT%3Akrugkidfnzsc4/@@named=/short"))
@inlineCallbacks
def test_GET_DIRURL_badtype(self):
@ -1936,7 +1917,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
def test_POST_DIRURL_manifest(self):
d = defer.succeed(None)
def getman(ignored, output):
url = self.webish_url + self.public_url + "/foo/?t=start-manifest&ophandle=125"
url = self.webish_url + self.public_url + "/foo?t=start-manifest&ophandle=125"
d = do_http("post", url, allow_redirects=True,
browser_like_redirects=True)
d.addCallback(self.wait_for_operation, "125")
@ -1988,7 +1969,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
return d
def test_POST_DIRURL_deepsize(self):
url = self.webish_url + self.public_url + "/foo/?t=start-deep-size&ophandle=126"
url = self.webish_url + self.public_url + "/foo?t=start-deep-size&ophandle=126"
d = do_http("post", url, allow_redirects=True,
browser_like_redirects=True)
d.addCallback(self.wait_for_operation, "126")
@ -2017,7 +1998,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
return d
def test_POST_DIRURL_deepstats(self):
url = self.webish_url + self.public_url + "/foo/?t=start-deep-stats&ophandle=127"
url = self.webish_url + self.public_url + "/foo?t=start-deep-stats&ophandle=127"
d = do_http("post", url,
allow_redirects=True, browser_like_redirects=True)
d.addCallback(self.wait_for_operation, "127")
@ -2046,7 +2027,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
return d
def test_POST_DIRURL_stream_manifest(self):
d = self.POST(self.public_url + "/foo/?t=stream-manifest")
d = self.POST(self.public_url + "/foo?t=stream-manifest")
def _check(res):
self.failUnless(res.endswith("\n"))
units = [json.loads(t) for t in res[:-1].split("\n")]
@ -2708,7 +2689,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
# slightly differently
d.addCallback(lambda res:
self.GET(self.public_url + "/foo/",
self.GET(self.public_url + "/foo",
followRedirect=True))
def _check_page(res):
# TODO: assert more about the contents
@ -2726,7 +2707,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
# look at the JSON form of the enclosing directory
d.addCallback(lambda res:
self.GET(self.public_url + "/foo/?t=json",
self.GET(self.public_url + "/foo?t=json",
followRedirect=True))
def _check_page_json(res):
parsed = json.loads(res)
@ -2864,7 +2845,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
body, headers = self.build_form(t="upload", when_done="/THERE",
file=("new.txt", self.NEWFILE_CONTENTS))
yield self.shouldRedirectTo(self.webish_url + self.public_url + "/foo",
self.webish_url + "/THERE",
"/THERE",
method="post", data=body, headers=headers,
code=http.FOUND)
fn = self._foo_node
@ -2940,7 +2921,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
@inlineCallbacks
def test_POST_DIRURL_check(self):
foo_url = self.public_url + "/foo/"
foo_url = self.public_url + "/foo"
res = yield self.POST(foo_url, t="check")
self.failUnlessIn("Healthy :", res)
@ -2962,7 +2943,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
@inlineCallbacks
def test_POST_DIRURL_check_and_repair(self):
foo_url = self.public_url + "/foo/"
foo_url = self.public_url + "/foo"
res = yield self.POST(foo_url, t="check", repair="true")
self.failUnlessIn("Healthy :", res)
@ -3518,7 +3499,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
body, headers = self.build_form(t="mkdir", name="newdir",
when_done="/THERE")
yield self.shouldRedirectTo(self.webish_url + self.public_url + "/foo",
self.webish_url + "/THERE",
"/THERE",
method="post", data=body, headers=headers,
code=http.FOUND)
res = yield self._foo_node.get(u"newdir")
@ -3528,7 +3509,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
def test_POST_mkdir_whendone_queryarg(self):
body, headers = self.build_form(t="mkdir", name="newdir")
url = self.webish_url + self.public_url + "/foo?when_done=/THERE"
yield self.shouldRedirectTo(url, self.webish_url + "/THERE",
yield self.shouldRedirectTo(url, "/THERE",
method="post", data=body, headers=headers,
code=http.FOUND)
res = yield self._foo_node.get(u"newdir")
@ -4077,15 +4058,22 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
self.GET, "/uri")
return d
@inlineCallbacks
def test_GET_rename_form(self):
d = self.GET(self.public_url + "/foo?t=rename-form&name=bar.txt",
followRedirect=True)
def _check(res):
self.failUnless(re.search('<input (name="when_done" |value="." |type="hidden" ){3}/>', res), res)
self.failUnless(re.search(r'<input (readonly="true" |type="text" |name="from_name" |value="bar\.txt" ){4}/>', res), res)
self.failUnlessIn(FAVICON_MARKUP, res)
d.addCallback(_check)
return d
data = yield self.GET(
self.public_url + "/foo?t=rename-form&name=bar.txt",
followRedirect=True
)
soup = BeautifulSoup(data, 'html5lib')
assert_soup_has_favicon(self, soup)
assert_soup_has_tag_with_attributes(
self, soup, u"input",
{u"name": u"when_done", u"value": u".", u"type": u"hidden"},
)
assert_soup_has_tag_with_attributes(
self, soup, u"input",
{u"readonly": u"true", u"name": u"from_name", u"value": u"bar.txt", u"type": u"text"},
)
def log(self, res, msg):
#print "MSG: %s RES: %s" % (msg, res)
@ -4432,7 +4420,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
@inlineCallbacks
def test_ophandle_cancel(self):
url = self.webish_url + self.public_url + "/foo/?t=start-manifest&ophandle=128"
url = self.webish_url + self.public_url + "/foo?t=start-manifest&ophandle=128"
yield do_http("post", url,
allow_redirects=True, browser_like_redirects=True)
res = yield self.GET("/operations/128?t=status&output=JSON")
@ -4451,7 +4439,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
@inlineCallbacks
def test_ophandle_retainfor(self):
url = self.webish_url + self.public_url + "/foo/?t=start-manifest&ophandle=129&retain-for=60"
url = self.webish_url + self.public_url + "/foo?t=start-manifest&ophandle=129&retain-for=60"
yield do_http("post", url,
allow_redirects=True, browser_like_redirects=True)
res = yield self.GET("/operations/129?t=status&output=JSON&retain-for=0")
@ -4465,7 +4453,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
@inlineCallbacks
def test_ophandle_release_after_complete(self):
url = self.webish_url + self.public_url + "/foo/?t=start-manifest&ophandle=130"
url = self.webish_url + self.public_url + "/foo?t=start-manifest&ophandle=130"
yield do_http("post", url,
allow_redirects=True, browser_like_redirects=True)
yield self.wait_for_operation(None, "130")
@ -4479,7 +4467,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
# uncollected ophandles should expire after 4 days
def _make_uncollected_ophandle(ophandle):
url = (self.webish_url + self.public_url +
"/foo/?t=start-manifest&ophandle=%d" % ophandle)
"/foo?t=start-manifest&ophandle=%d" % ophandle)
# When we start the operation, the webapi server will want to
# redirect us to the page for the ophandle, so we get
# confirmation that the operation has started. If the manifest
@ -4517,7 +4505,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
# collected ophandles should expire after 1 day
def _make_collected_ophandle(ophandle):
url = (self.webish_url + self.public_url +
"/foo/?t=start-manifest&ophandle=%d" % ophandle)
"/foo?t=start-manifest&ophandle=%d" % ophandle)
# By following the initial redirect, we collect the ophandle
# we've just created.
return do_http("post", url,

View File

@ -100,8 +100,7 @@ def parse_offset_arg(offset):
def get_root(ctx_or_req):
req = IRequest(ctx_or_req)
# the addSlash=True gives us one extra (empty) segment
depth = len(req.prepath) + len(req.postpath) - 1
depth = len(req.prepath) + len(req.postpath)
link = "/".join([".."] * depth)
return link
@ -203,8 +202,7 @@ def plural(sequence_or_length):
return ""
return "s"
def text_plain(text, ctx):
req = IRequest(ctx)
def text_plain(text, req):
req.setHeader("content-type", "text/plain")
req.setHeader("content-length", b"%d" % len(text))
return text
@ -367,6 +365,9 @@ class NeedOperationHandleError(WebError):
pass
# XXX should be phased out by the nevow -> twisted.web port (that is,
# this whole class should have no users and can be deleted once the
# port away from nevow is complete)
class RenderMixin(object):
def renderHTTP(self, ctx):
@ -525,6 +526,10 @@ class SlotsSequenceElement(template.Element):
self.loader = template.TagLoader(tag)
self.seq = seq
@template.renderer
def header(self, request, tag):
return tag
@template.renderer
def item(self, request, tag):
"""

File diff suppressed because it is too large Load Diff

View File

@ -1,8 +1,8 @@
<!DOCTYPE html>
<html lang="en" xmlns:n="http://nevow.com/ns/nevow/0.1">
<html lang="en" xmlns:t="http://twistedmatrix.com/ns/twisted.web.template/0.1">
<head>
<meta charset="utf-8"/>
<title n:render="title"></title>
<title t:render="title"></title>
<meta name="viewport" content="width=device-width, initial-scale=1.0"/>
<meta name="description" content="Tahoe-LAFS is a free and open cloud storage engine"/>
<meta name="author" content="Tahoe-LAFS"/>
@ -30,19 +30,19 @@
<div class="span3">
<div class="well sidebar-nav">
<ul class="nav nav-list">
<li class="toolbar-item" n:render="welcome" />
<li class="toolbar-item" t:render="welcome" />
<li class="toolbar-item"><a href=".">Refresh</a></li>
<li class="toolbar-item"><a href="?t=info">More info on this directory</a></li>
<li class="toolbar-item" n:render="show_readonly" />
<li class="toolbar-item" t:render="show_readonly" />
</ul>
</div>
</div>
<div class="span9">
<h1 n:render="header"></h1>
<h1 t:render="header"></h1>
<div n:render="try_children">
<table class="table table-striped tahoe-directory" n:render="sequence" n:data="children">
<tr n:pattern="header">
<div t:render="try_children">
<table class="table table-striped tahoe-directory" t:render="children">
<tr t:render="header">
<th>Type</th>
<th>Filename</th>
<th>Size</th>
@ -51,25 +51,25 @@
<th></th>
<th></th>
</tr>
<tr n:pattern="item" n:render="row">
<td><n:slot name="type"/></td>
<td><n:slot name="filename"/></td>
<td align="right"><n:slot name="size"/></td>
<td><n:slot name="times"/></td>
<td><n:slot name="unlink"/></td>
<td><n:slot name="rename"/></td>
<td><n:slot name="info"/></td>
<tr t:render="item">
<td><t:slot name="type"/></td>
<td><t:slot name="filename"/></td>
<td align="right"><t:slot name="size"/></td>
<td><t:slot name="times"/></td>
<td><t:slot name="unlink"/></td>
<td><t:slot name="rename"/></td>
<td><t:slot name="info"/></td>
</tr>
<tr n:pattern="empty"><td colspan="9" class="empty-marker">This directory is empty.</td></tr>
<tr t:render="empty"><td colspan="9" class="empty-marker">This directory is empty.</td></tr>
</table>
</div>
<div class="tahoe-directory-footer">
<div n:render="forms"/>
<div t:render="forms"/>
<div class="results" n:render="results"/>
<div class="results" t:render="results"/>
</div>
</div><!--/span-->
</div><!--/row-->
@ -82,4 +82,4 @@
</div><!--/.fluid-container-->
</body>
</html>
</html>

View File

@ -3,8 +3,12 @@ import json
from twisted.web import http, static
from twisted.internet import defer
from nevow import url, rend
from nevow.inevow import IRequest
from twisted.web.resource import (
Resource, # note: Resource is an old-style class
ErrorPage,
)
from nevow import url
from allmydata.interfaces import ExistingChildError
from allmydata.monitor import Monitor
@ -15,7 +19,7 @@ from allmydata.util import log, base32
from allmydata.util.encodingutil import quote_output
from allmydata.blacklist import FileProhibited, ProhibitedNode
from allmydata.web.common import text_plain, WebError, RenderMixin, \
from allmydata.web.common import text_plain, WebError, \
boolean_of_arg, get_arg, should_create_intermediate_directories, \
MyExceptionHandler, parse_replace_arg, parse_offset_arg, \
get_format, get_mutable_type, get_filenode_metadata
@ -87,17 +91,16 @@ class ReplaceMeMixin(object):
return d
class PlaceHolderNodeHandler(RenderMixin, rend.Page, ReplaceMeMixin):
class PlaceHolderNodeHandler(Resource, ReplaceMeMixin):
def __init__(self, client, parentnode, name):
rend.Page.__init__(self)
super(PlaceHolderNodeHandler, self).__init__()
self.client = client
assert parentnode
self.parentnode = parentnode
self.name = name
self.node = None
def render_PUT(self, ctx):
req = IRequest(ctx)
def render_PUT(self, req):
t = get_arg(req, "t", "").strip()
replace = parse_replace_arg(get_arg(req, "replace", "true"))
@ -112,8 +115,7 @@ class PlaceHolderNodeHandler(RenderMixin, rend.Page, ReplaceMeMixin):
raise WebError("PUT to a file: bad t=%s" % t)
def render_POST(self, ctx):
req = IRequest(ctx)
def render_POST(self, req):
t = get_arg(req, "t", "").strip()
replace = boolean_of_arg(get_arg(req, "replace", "true"))
if t == "upload":
@ -131,31 +133,36 @@ class PlaceHolderNodeHandler(RenderMixin, rend.Page, ReplaceMeMixin):
when_done = get_arg(req, "when_done", None)
if when_done:
d.addCallback(lambda res: url.URL.fromString(when_done))
d.addCallback(lambda res: when_done)
return d
class FileNodeHandler(RenderMixin, rend.Page, ReplaceMeMixin):
class FileNodeHandler(Resource, ReplaceMeMixin, object):
def __init__(self, client, node, parentnode=None, name=None):
rend.Page.__init__(self)
super(FileNodeHandler, self).__init__()
self.client = client
assert node
self.node = node
self.parentnode = parentnode
self.name = name
def childFactory(self, ctx, name):
req = IRequest(ctx)
def getChild(self, name, req):
if isinstance(self.node, ProhibitedNode):
raise FileProhibited(self.node.reason)
if should_create_intermediate_directories(req):
raise WebError("Cannot create directory %s, because its "
"parent is a file, not a directory" % quote_output(name, encoding='utf-8'))
raise WebError("Files have no children, certainly not named %s"
% quote_output(name, encoding='utf-8'))
return ErrorPage(
http.CONFLICT,
u"Cannot create directory %s, because its parent is a file, "
u"not a directory" % quote_output(name, encoding='utf-8'),
"no details"
)
return ErrorPage(
http.BAD_REQUEST,
u"Files have no children named %s" % quote_output(name, encoding='utf-8'),
"no details",
)
def render_GET(self, ctx):
req = IRequest(ctx)
def render_GET(self, req):
t = get_arg(req, "t", "").strip()
# t=info contains variable ophandles, so is not allowed an ETag.
@ -202,18 +209,17 @@ class FileNodeHandler(RenderMixin, rend.Page, ReplaceMeMixin):
self.parentnode.get_metadata_for(self.name))
else:
d.addCallback(lambda ignored: None)
d.addCallback(lambda md: FileJSONMetadata(ctx, self.node, md))
d.addCallback(lambda md: _file_json_metadata(req, self.node, md))
return d
if t == "info":
return MoreInfo(self.node)
if t == "uri":
return FileURI(ctx, self.node)
return _file_uri(req, self.node)
if t == "readonly-uri":
return FileReadOnlyURI(ctx, self.node)
return _file_read_only_uri(req, self.node)
raise WebError("GET file: bad t=%s" % t)
def render_HEAD(self, ctx):
req = IRequest(ctx)
def render_HEAD(self, req):
t = get_arg(req, "t", "").strip()
if t:
raise WebError("HEAD file: bad t=%s" % t)
@ -222,8 +228,7 @@ class FileNodeHandler(RenderMixin, rend.Page, ReplaceMeMixin):
d.addCallback(lambda dn: FileDownloader(dn, filename))
return d
def render_PUT(self, ctx):
req = IRequest(ctx)
def render_PUT(self, req):
t = get_arg(req, "t", "").strip()
replace = parse_replace_arg(get_arg(req, "replace", "true"))
offset = parse_offset_arg(get_arg(req, "offset", None))
@ -265,8 +270,7 @@ class FileNodeHandler(RenderMixin, rend.Page, ReplaceMeMixin):
raise WebError("PUT to a file: bad t=%s" % t)
def render_POST(self, ctx):
req = IRequest(ctx)
def render_POST(self, req):
t = get_arg(req, "t", "").strip()
replace = boolean_of_arg(get_arg(req, "replace", "true"))
if t == "check":
@ -309,7 +313,7 @@ class FileNodeHandler(RenderMixin, rend.Page, ReplaceMeMixin):
d.addCallback(self._maybe_literal, CheckResultsRenderer)
return d
def render_DELETE(self, ctx):
def render_DELETE(self, req):
assert self.parentnode and self.name
d = self.parentnode.delete(self.name)
d.addCallback(lambda res: self.node.get_uri())
@ -346,9 +350,9 @@ class FileNodeHandler(RenderMixin, rend.Page, ReplaceMeMixin):
return d
class FileDownloader(rend.Page):
class FileDownloader(Resource, object):
def __init__(self, filenode, filename):
rend.Page.__init__(self)
super(FileDownloader, self).__init__()
self.filenode = filenode
self.filename = filename
@ -400,8 +404,7 @@ class FileDownloader(rend.Page):
except ValueError:
return None
def renderHTTP(self, ctx):
req = IRequest(ctx)
def render(self, req):
gte = static.getTypeAndEncoding
ctype, encoding = gte(self.filename,
static.File.contentTypes,
@ -490,12 +493,12 @@ class FileDownloader(rend.Page):
# We haven't written anything yet, so we can provide a
# sensible error message.
eh = MyExceptionHandler()
eh.renderHTTP_exception(ctx, f)
eh.renderHTTP_exception(req, f)
d.addCallbacks(_finished, _error)
return req.deferred
def FileJSONMetadata(ctx, filenode, edge_metadata):
def _file_json_metadata(req, filenode, edge_metadata):
rw_uri = filenode.get_write_uri()
ro_uri = filenode.get_readonly_uri()
data = ("filenode", get_filenode_metadata(filenode))
@ -509,16 +512,19 @@ def FileJSONMetadata(ctx, filenode, edge_metadata):
if edge_metadata is not None:
data[1]['metadata'] = edge_metadata
return text_plain(json.dumps(data, indent=1) + "\n", ctx)
return text_plain(json.dumps(data, indent=1) + "\n", req)
def FileURI(ctx, filenode):
return text_plain(filenode.get_uri(), ctx)
def FileReadOnlyURI(ctx, filenode):
def _file_uri(req, filenode):
return text_plain(filenode.get_uri(), req)
def _file_read_only_uri(req, filenode):
if filenode.is_readonly():
return text_plain(filenode.get_uri(), ctx)
return text_plain(filenode.get_readonly_uri(), ctx)
return text_plain(filenode.get_uri(), req)
return text_plain(filenode.get_readonly_uri(), req)
class FileNodeDownloadHandler(FileNodeHandler):
def childFactory(self, ctx, name):
def getChild(self, name, req):
return FileNodeDownloadHandler(self.client, self.node, name=name)

View File

@ -1,6 +1,6 @@
<html xmlns:n="http://nevow.com/ns/nevow/0.1">
<html xmlns:t="http://twistedmatrix.com/ns/twisted.web.template/0.1">
<head>
<title n:render="title"></title>
<title t:render="title"></title>
<link href="/tahoe.css" rel="stylesheet" type="text/css"/>
<link href="/icon.png" rel="shortcut icon" />
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
@ -8,43 +8,23 @@
<body>
<h2 n:render="header" />
<h2 t:render="header" />
<div class="freeform-form">
<form action="." method="post" enctype="multipart/form-data">
<fieldset>
<legend class="freeform-form-label">Rename child</legend>
<input type="hidden" name="t" value="rename" />
<input n:render="when_done" />
<input t:render="when_done" />
Rename child:
<input type="text" name="from_name" readonly="true" n:render="get_name" /><br />
<input type="text" name="from_name" readonly="true" t:render="get_name" /><br />
to name:
<input type="text" name="to_name" /><br />
<input type="submit" value="rename" />
</fieldset>
</form>
<form action="." method="post" enctype="multipart/form-data">
<fieldset>
<legend class="freeform-form-label">Move Child to Different Directory</legend>
<input type="hidden" name="t" value="move" />
<input n:render="when_done" />
Move child: <input type="text" name="from_name" readonly="true"
n:render="get_name" /><br />
to name: <input type="text" name="to_name" n:render="get_name"/><br />
in directory: <input type="text" name="to_dir" />
<input checked="checked" type="radio" id="tt-name"
value="name" name="target_type" />
<label for="tt-name"> Subdirectory</label>
<input type="radio" id="tt-uri" value="uri" name="target_type"/>
<label for="tt-uri"> URI</label>
<br />
<input type="submit" value="move" /><br />
</fieldset>
</form>
</div>
</body></html>

View File

@ -9,7 +9,7 @@ from twisted.web import (
)
from twisted.web.util import redirectTo
from hyperlink import URL
from hyperlink import DecodedURL, URL
from nevow import rend, tags as T
from nevow.inevow import IRequest
@ -133,6 +133,15 @@ class URIHandler(resource.Resource, object):
and creates and appropriate handler (depending on the kind of
capability it was passed).
"""
# this is in case a URI like "/uri/?cap=<valid capability>" is
# passed -- we re-direct to the non-trailing-slash version so
# that there is just one valid URI for "uri" resource.
if not name:
u = DecodedURL.from_text(req.uri.decode('utf8'))
u = u.replace(
path=(s for s in u.path if s), # remove empty segments
)
return redirectTo(u.to_uri().to_text().encode('utf8'), req)
try:
node = self.client.create_node_from_uri(name)
return directory.make_handler_for(node, self.client)
@ -142,16 +151,15 @@ class URIHandler(resource.Resource, object):
)
class FileHandler(rend.Page):
class FileHandler(resource.Resource, object):
# I handle /file/$FILECAP[/IGNORED] , which provides a URL from which a
# file can be downloaded correctly by tools like "wget".
def __init__(self, client):
rend.Page.__init__(self, client)
super(FileHandler, self).__init__()
self.client = client
def childFactory(self, ctx, name):
req = IRequest(ctx)
def getChild(self, name, req):
if req.method not in ("GET", "HEAD"):
raise WebError("/file can only be used with GET or HEAD")
# 'name' must be a file URI
@ -165,7 +173,7 @@ class FileHandler(rend.Page):
raise WebError("'%s' is not a file-cap" % name)
return filenode.FileNodeDownloadHandler(self.client, node)
def renderHTTP(self, ctx):
def render_GET(self, ctx):
raise WebError("/file must be followed by a file-cap and a name",
http.NOT_FOUND)