2019-08-24 18:40:24 +00:00
|
|
|
"""
|
|
|
|
These tests were originally written to achieve some level of
|
|
|
|
coverage for the WebAPI functionality during Python3 porting (there
|
|
|
|
aren't many tests of the Web API period).
|
|
|
|
|
|
|
|
Most of the tests have cursory asserts and encode 'what the WebAPI did
|
|
|
|
at the time of testing' -- not necessarily a cohesive idea of what the
|
|
|
|
WebAPI *should* do in every situation. It's not clear the latter
|
|
|
|
exists anywhere, however.
|
|
|
|
"""
|
|
|
|
|
2023-01-18 18:28:24 +00:00
|
|
|
from __future__ import annotations
|
2021-05-07 13:44:44 +00:00
|
|
|
|
2019-07-24 07:33:15 +00:00
|
|
|
import time
|
2024-05-03 20:40:01 +00:00
|
|
|
from base64 import urlsafe_b64encode
|
2021-05-11 17:47:49 +00:00
|
|
|
from urllib.parse import unquote as url_unquote, quote as url_quote
|
2019-07-24 07:33:15 +00:00
|
|
|
|
2024-05-03 20:40:01 +00:00
|
|
|
from cryptography.hazmat.primitives.serialization import load_pem_private_key
|
2023-05-04 17:54:04 +00:00
|
|
|
from twisted.internet.threads import deferToThread
|
2023-05-04 17:05:06 +00:00
|
|
|
|
2019-08-08 21:53:28 +00:00
|
|
|
import allmydata.uri
|
2024-05-03 20:40:01 +00:00
|
|
|
from allmydata.crypto.rsa import (
|
|
|
|
create_signing_keypair,
|
|
|
|
der_string_from_signing_key,
|
|
|
|
)
|
|
|
|
from allmydata.mutable.common import derive_mutable_keys
|
2021-05-11 17:47:49 +00:00
|
|
|
from allmydata.util import jsonbytes as json
|
2019-08-08 21:53:28 +00:00
|
|
|
|
2021-05-11 17:47:49 +00:00
|
|
|
from . import util
|
2023-03-20 19:08:22 +00:00
|
|
|
from .util import run_in_thread
|
2019-07-24 07:33:15 +00:00
|
|
|
|
2019-08-07 00:32:50 +00:00
|
|
|
import requests
|
2019-08-13 16:28:47 +00:00
|
|
|
import html5lib
|
2019-08-14 20:43:17 +00:00
|
|
|
from bs4 import BeautifulSoup
|
2019-07-24 07:33:15 +00:00
|
|
|
|
2023-05-04 17:05:06 +00:00
|
|
|
import pytest_twisted
|
2019-07-24 07:33:15 +00:00
|
|
|
|
2023-03-20 19:08:22 +00:00
|
|
|
@run_in_thread
|
2019-07-24 07:33:15 +00:00
|
|
|
def test_index(alice):
|
|
|
|
"""
|
|
|
|
we can download the index file
|
|
|
|
"""
|
2023-07-25 02:08:41 +00:00
|
|
|
util.web_get(alice.process, u"")
|
2019-08-24 18:20:41 +00:00
|
|
|
|
|
|
|
|
2023-03-20 19:08:22 +00:00
|
|
|
@run_in_thread
|
2019-08-24 18:20:41 +00:00
|
|
|
def test_index_json(alice):
|
|
|
|
"""
|
|
|
|
we can download the index file as json
|
|
|
|
"""
|
2023-07-25 02:08:41 +00:00
|
|
|
data = util.web_get(alice.process, u"", params={u"t": u"json"})
|
2019-08-24 18:20:41 +00:00
|
|
|
# it should be valid json
|
|
|
|
json.loads(data)
|
2019-07-24 07:33:15 +00:00
|
|
|
|
|
|
|
|
2023-03-20 19:08:22 +00:00
|
|
|
@run_in_thread
|
2019-07-24 07:33:15 +00:00
|
|
|
def test_upload_download(alice):
|
|
|
|
"""
|
|
|
|
upload a file, then download it via readcap
|
|
|
|
"""
|
|
|
|
|
2019-08-20 05:50:52 +00:00
|
|
|
FILE_CONTENTS = u"some contents"
|
2019-07-24 07:33:15 +00:00
|
|
|
|
|
|
|
readcap = util.web_post(
|
2023-07-25 02:08:41 +00:00
|
|
|
alice.process, u"uri",
|
2019-07-24 07:33:15 +00:00
|
|
|
data={
|
2019-08-08 21:33:09 +00:00
|
|
|
u"t": u"upload",
|
|
|
|
u"format": u"mdmf",
|
2019-07-24 07:33:15 +00:00
|
|
|
},
|
|
|
|
files={
|
2019-08-08 21:33:09 +00:00
|
|
|
u"file": FILE_CONTENTS,
|
2019-07-24 07:33:15 +00:00
|
|
|
},
|
|
|
|
)
|
|
|
|
readcap = readcap.strip()
|
|
|
|
|
|
|
|
data = util.web_get(
|
2023-07-25 02:08:41 +00:00
|
|
|
alice.process, u"uri",
|
2019-07-24 07:33:15 +00:00
|
|
|
params={
|
2019-08-08 21:33:09 +00:00
|
|
|
u"uri": readcap,
|
|
|
|
u"filename": u"boom",
|
2019-07-24 07:33:15 +00:00
|
|
|
}
|
|
|
|
)
|
2021-05-12 13:20:49 +00:00
|
|
|
assert str(data, "utf-8") == FILE_CONTENTS
|
2019-08-07 00:17:58 +00:00
|
|
|
|
|
|
|
|
2023-03-20 19:08:22 +00:00
|
|
|
@run_in_thread
|
2019-08-07 00:17:58 +00:00
|
|
|
def test_put(alice):
|
|
|
|
"""
|
|
|
|
use PUT to create a file
|
|
|
|
"""
|
|
|
|
|
2019-08-24 18:53:36 +00:00
|
|
|
FILE_CONTENTS = b"added via PUT" * 20
|
2019-08-07 00:17:58 +00:00
|
|
|
|
|
|
|
resp = requests.put(
|
2023-07-25 02:08:41 +00:00
|
|
|
util.node_url(alice.process.node_dir, u"uri"),
|
2019-08-24 18:53:36 +00:00
|
|
|
data=FILE_CONTENTS,
|
2019-08-07 00:17:58 +00:00
|
|
|
)
|
2019-08-08 21:53:28 +00:00
|
|
|
cap = allmydata.uri.from_string(resp.text.strip().encode('ascii'))
|
2023-07-25 02:08:41 +00:00
|
|
|
cfg = alice.process.get_config()
|
2019-08-08 21:53:28 +00:00
|
|
|
assert isinstance(cap, allmydata.uri.CHKFileURI)
|
2019-08-24 18:53:36 +00:00
|
|
|
assert cap.size == len(FILE_CONTENTS)
|
|
|
|
assert cap.total_shares == int(cfg.get_config("client", "shares.total"))
|
|
|
|
assert cap.needed_shares == int(cfg.get_config("client", "shares.needed"))
|
2019-08-07 00:17:58 +00:00
|
|
|
|
|
|
|
|
2023-03-20 19:08:22 +00:00
|
|
|
@run_in_thread
|
2019-08-07 00:17:58 +00:00
|
|
|
def test_helper_status(storage_nodes):
|
|
|
|
"""
|
|
|
|
successfully GET the /helper_status page
|
|
|
|
"""
|
|
|
|
|
2020-11-14 08:56:03 +00:00
|
|
|
url = util.node_url(storage_nodes[0].process.node_dir, "helper_status")
|
2019-08-07 00:17:58 +00:00
|
|
|
resp = requests.get(url)
|
2019-08-07 00:32:50 +00:00
|
|
|
assert resp.status_code >= 200 and resp.status_code < 300
|
2019-08-14 20:43:17 +00:00
|
|
|
dom = BeautifulSoup(resp.content, "html5lib")
|
2021-05-12 13:20:49 +00:00
|
|
|
assert str(dom.h1.string) == u"Helper Status"
|
2019-08-07 23:19:38 +00:00
|
|
|
|
|
|
|
|
2023-03-20 19:08:22 +00:00
|
|
|
@run_in_thread
|
2019-08-07 23:53:37 +00:00
|
|
|
def test_deep_stats(alice):
|
2019-08-07 23:19:38 +00:00
|
|
|
"""
|
|
|
|
create a directory, do deep-stats on it and prove the /operations/
|
|
|
|
URIs work
|
|
|
|
"""
|
|
|
|
resp = requests.post(
|
2023-07-25 02:08:41 +00:00
|
|
|
util.node_url(alice.process.node_dir, "uri"),
|
2019-08-07 23:19:38 +00:00
|
|
|
params={
|
|
|
|
"format": "sdmf",
|
|
|
|
"t": "mkdir",
|
|
|
|
"redirect_to_result": "true",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
assert resp.status_code >= 200 and resp.status_code < 300
|
|
|
|
|
|
|
|
# when creating a directory, we'll be re-directed to a URL
|
2019-08-24 20:36:14 +00:00
|
|
|
# containing our writecap..
|
2021-05-11 17:47:49 +00:00
|
|
|
uri = url_unquote(resp.url)
|
2019-08-07 23:19:38 +00:00
|
|
|
assert 'URI:DIR2:' in uri
|
|
|
|
dircap = uri[uri.find("URI:DIR2:"):].rstrip('/')
|
2023-07-25 02:08:41 +00:00
|
|
|
dircap_uri = util.node_url(alice.process.node_dir, "uri/{}".format(url_quote(dircap)))
|
2019-08-07 23:19:38 +00:00
|
|
|
|
|
|
|
# POST a file into this directory
|
2019-08-20 05:50:52 +00:00
|
|
|
FILE_CONTENTS = u"a file in a directory"
|
2019-08-07 23:19:38 +00:00
|
|
|
|
|
|
|
resp = requests.post(
|
|
|
|
dircap_uri,
|
|
|
|
data={
|
2019-08-08 21:33:09 +00:00
|
|
|
u"t": u"upload",
|
2019-08-07 23:19:38 +00:00
|
|
|
},
|
|
|
|
files={
|
2019-08-08 21:33:09 +00:00
|
|
|
u"file": FILE_CONTENTS,
|
2019-08-07 23:19:38 +00:00
|
|
|
},
|
|
|
|
)
|
2021-01-15 16:52:16 +00:00
|
|
|
resp.raise_for_status()
|
2019-08-07 23:19:38 +00:00
|
|
|
|
|
|
|
# confirm the file is in the directory
|
|
|
|
resp = requests.get(
|
|
|
|
dircap_uri,
|
|
|
|
params={
|
2019-08-08 21:33:09 +00:00
|
|
|
u"t": u"json",
|
2019-08-07 23:19:38 +00:00
|
|
|
},
|
|
|
|
)
|
|
|
|
d = json.loads(resp.content)
|
|
|
|
k, data = d
|
2019-08-08 21:33:09 +00:00
|
|
|
assert k == u"dirnode"
|
2019-08-07 23:19:38 +00:00
|
|
|
assert len(data['children']) == 1
|
2021-05-11 17:47:49 +00:00
|
|
|
k, child = list(data['children'].values())[0]
|
2019-08-08 21:33:09 +00:00
|
|
|
assert k == u"filenode"
|
2019-08-07 23:19:38 +00:00
|
|
|
assert child['size'] == len(FILE_CONTENTS)
|
|
|
|
|
|
|
|
# perform deep-stats on it...
|
|
|
|
resp = requests.post(
|
|
|
|
dircap_uri,
|
|
|
|
data={
|
2019-08-08 21:33:09 +00:00
|
|
|
u"t": u"start-deep-stats",
|
|
|
|
u"ophandle": u"something_random",
|
2019-08-07 23:19:38 +00:00
|
|
|
},
|
|
|
|
)
|
|
|
|
assert resp.status_code >= 200 and resp.status_code < 300
|
|
|
|
|
2019-08-07 23:25:07 +00:00
|
|
|
# confirm we get information from the op .. after its done
|
2019-08-24 18:53:26 +00:00
|
|
|
tries = 10
|
|
|
|
while tries > 0:
|
|
|
|
tries -= 1
|
2019-08-07 23:25:07 +00:00
|
|
|
resp = requests.get(
|
2023-07-25 02:08:41 +00:00
|
|
|
util.node_url(alice.process.node_dir, u"operations/something_random"),
|
2019-08-07 23:25:07 +00:00
|
|
|
)
|
|
|
|
d = json.loads(resp.content)
|
|
|
|
if d['size-literal-files'] == len(FILE_CONTENTS):
|
|
|
|
print("stats completed successfully")
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
print("{} != {}; waiting".format(d['size-literal-files'], len(FILE_CONTENTS)))
|
|
|
|
time.sleep(.5)
|
2019-08-10 18:09:34 +00:00
|
|
|
|
|
|
|
|
2023-05-04 17:05:06 +00:00
|
|
|
@run_in_thread
|
2019-08-10 18:09:34 +00:00
|
|
|
def test_status(alice):
|
|
|
|
"""
|
|
|
|
confirm we get something sensible from /status and the various sub-types
|
|
|
|
"""
|
2019-08-11 05:46:59 +00:00
|
|
|
|
2019-08-10 18:09:34 +00:00
|
|
|
# upload a file
|
|
|
|
# (because of the nature of the integration-tests, we can only
|
|
|
|
# assert things about "our" file because we don't know what other
|
|
|
|
# operations may have happened in the grid before our test runs).
|
2019-08-11 05:46:59 +00:00
|
|
|
|
2019-08-20 05:50:52 +00:00
|
|
|
FILE_CONTENTS = u"all the Important Data of alice\n" * 1200
|
2019-08-11 05:46:59 +00:00
|
|
|
|
2019-08-10 18:09:34 +00:00
|
|
|
resp = requests.put(
|
2023-07-25 02:08:41 +00:00
|
|
|
util.node_url(alice.process.node_dir, u"uri"),
|
2019-08-11 05:46:59 +00:00
|
|
|
data=FILE_CONTENTS,
|
2019-08-10 18:09:34 +00:00
|
|
|
)
|
|
|
|
cap = resp.text.strip()
|
2019-08-11 05:46:59 +00:00
|
|
|
|
2019-08-10 18:09:34 +00:00
|
|
|
print("Uploaded data, cap={}".format(cap))
|
|
|
|
resp = requests.get(
|
2023-07-25 02:08:41 +00:00
|
|
|
util.node_url(alice.process.node_dir, u"uri/{}".format(url_quote(cap))),
|
2019-08-10 18:09:34 +00:00
|
|
|
)
|
|
|
|
|
2019-08-11 05:46:59 +00:00
|
|
|
print("Downloaded {} bytes of data".format(len(resp.content)))
|
2021-05-12 13:20:49 +00:00
|
|
|
assert str(resp.content, "ascii") == FILE_CONTENTS
|
2019-08-10 18:09:34 +00:00
|
|
|
|
|
|
|
resp = requests.get(
|
2023-07-25 02:08:41 +00:00
|
|
|
util.node_url(alice.process.node_dir, "status"),
|
2019-08-10 18:09:34 +00:00
|
|
|
)
|
2019-08-13 16:28:47 +00:00
|
|
|
dom = html5lib.parse(resp.content)
|
2019-08-10 18:09:34 +00:00
|
|
|
|
2019-08-13 16:28:47 +00:00
|
|
|
hrefs = [
|
|
|
|
a.get('href')
|
|
|
|
for a in dom.iter(u'{http://www.w3.org/1999/xhtml}a')
|
|
|
|
]
|
2019-08-10 18:09:34 +00:00
|
|
|
|
2019-08-11 05:46:59 +00:00
|
|
|
found_upload = False
|
|
|
|
found_download = False
|
2019-08-10 18:09:34 +00:00
|
|
|
for href in hrefs:
|
2020-07-17 18:30:17 +00:00
|
|
|
if href == u"/" or not href:
|
2019-08-10 18:09:34 +00:00
|
|
|
continue
|
2023-07-25 02:08:41 +00:00
|
|
|
resp = requests.get(util.node_url(alice.process.node_dir, href))
|
2020-07-17 18:30:17 +00:00
|
|
|
if href.startswith(u"/status/up"):
|
2021-05-11 17:47:49 +00:00
|
|
|
assert b"File Upload Status" in resp.content
|
|
|
|
if b"Total Size: %d" % (len(FILE_CONTENTS),) in resp.content:
|
2019-08-11 05:46:59 +00:00
|
|
|
found_upload = True
|
2020-07-17 18:30:17 +00:00
|
|
|
elif href.startswith(u"/status/down"):
|
2021-05-11 17:47:49 +00:00
|
|
|
assert b"File Download Status" in resp.content
|
|
|
|
if b"Total Size: %d" % (len(FILE_CONTENTS),) in resp.content:
|
2019-08-11 05:46:59 +00:00
|
|
|
found_download = True
|
2019-08-10 18:09:34 +00:00
|
|
|
|
2019-08-12 00:38:18 +00:00
|
|
|
# download the specialized event information
|
|
|
|
resp = requests.get(
|
2023-07-25 02:08:41 +00:00
|
|
|
util.node_url(alice.process.node_dir, u"{}/event_json".format(href)),
|
2019-08-12 00:38:18 +00:00
|
|
|
)
|
|
|
|
js = json.loads(resp.content)
|
|
|
|
# there's usually just one "read" operation, but this can handle many ..
|
|
|
|
total_bytes = sum([st['bytes_returned'] for st in js['read']], 0)
|
|
|
|
assert total_bytes == len(FILE_CONTENTS)
|
|
|
|
|
|
|
|
|
2019-08-11 05:46:59 +00:00
|
|
|
assert found_upload, "Failed to find the file we uploaded in the status-page"
|
|
|
|
assert found_download, "Failed to find the file we downloaded in the status-page"
|
2019-08-12 06:43:35 +00:00
|
|
|
|
|
|
|
|
2023-05-04 17:54:04 +00:00
|
|
|
@pytest_twisted.ensureDeferred
|
2023-01-18 18:28:24 +00:00
|
|
|
async def test_directory_deep_check(reactor, request, alice):
|
2019-08-12 06:43:35 +00:00
|
|
|
"""
|
|
|
|
use deep-check and confirm the result pages work
|
|
|
|
"""
|
2023-01-18 18:28:24 +00:00
|
|
|
# Make sure the node is configured compatibly with expectations of this
|
|
|
|
# test.
|
|
|
|
happy = 3
|
|
|
|
required = 2
|
|
|
|
total = 4
|
|
|
|
|
2023-07-25 02:12:01 +00:00
|
|
|
await alice.reconfigure_zfec(reactor, (happy, required, total), convergence=None)
|
2023-05-04 17:54:04 +00:00
|
|
|
await deferToThread(_test_directory_deep_check_blocking, alice)
|
|
|
|
|
2019-08-12 06:43:35 +00:00
|
|
|
|
2023-05-04 17:54:04 +00:00
|
|
|
def _test_directory_deep_check_blocking(alice):
|
2019-08-12 06:43:35 +00:00
|
|
|
# create a directory
|
|
|
|
resp = requests.post(
|
2023-07-25 02:08:41 +00:00
|
|
|
util.node_url(alice.process.node_dir, u"uri"),
|
2019-08-12 06:43:35 +00:00
|
|
|
params={
|
|
|
|
u"t": u"mkdir",
|
|
|
|
u"redirect_to_result": u"true",
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
|
|
|
# get json information about our directory
|
|
|
|
dircap_url = resp.url
|
|
|
|
resp = requests.get(
|
|
|
|
dircap_url,
|
|
|
|
params={u"t": u"json"},
|
|
|
|
)
|
2020-10-27 13:49:58 +00:00
|
|
|
# Just verify it is valid JSON.
|
|
|
|
json.loads(resp.content)
|
2019-08-12 06:43:35 +00:00
|
|
|
|
|
|
|
# upload a file of pangrams into the directory
|
2019-08-20 05:50:52 +00:00
|
|
|
FILE_CONTENTS = u"Sphinx of black quartz, judge my vow.\n" * (2048*10)
|
2019-08-12 06:43:35 +00:00
|
|
|
|
|
|
|
resp = requests.post(
|
|
|
|
dircap_url,
|
|
|
|
params={
|
|
|
|
u"t": u"upload",
|
|
|
|
u"upload-chk": u"upload-chk",
|
|
|
|
},
|
|
|
|
files={
|
|
|
|
u"file": FILE_CONTENTS,
|
|
|
|
}
|
|
|
|
)
|
2019-08-15 00:15:50 +00:00
|
|
|
cap0 = resp.content
|
|
|
|
print("Uploaded data0, cap={}".format(cap0))
|
2019-08-12 06:43:35 +00:00
|
|
|
|
2019-08-15 00:15:50 +00:00
|
|
|
# a different pangram
|
2019-08-20 05:50:52 +00:00
|
|
|
FILE_CONTENTS = u"The five boxing wizards jump quickly.\n" * (2048*10)
|
2019-08-12 06:43:35 +00:00
|
|
|
|
2019-08-15 00:15:50 +00:00
|
|
|
resp = requests.post(
|
|
|
|
dircap_url,
|
|
|
|
params={
|
|
|
|
u"t": u"upload",
|
|
|
|
u"upload-chk": u"upload-chk",
|
|
|
|
},
|
|
|
|
files={
|
|
|
|
u"file": FILE_CONTENTS,
|
|
|
|
}
|
|
|
|
)
|
|
|
|
cap1 = resp.content
|
|
|
|
print("Uploaded data1, cap={}".format(cap1))
|
2019-08-12 06:43:35 +00:00
|
|
|
|
2019-08-15 00:15:50 +00:00
|
|
|
resp = requests.get(
|
2023-07-25 02:08:41 +00:00
|
|
|
util.node_url(alice.process.node_dir, u"uri/{}".format(url_quote(cap0))),
|
2019-08-12 06:43:35 +00:00
|
|
|
params={u"t": u"info"},
|
|
|
|
)
|
|
|
|
|
|
|
|
def check_repair_data(checkdata):
|
2023-01-18 18:28:24 +00:00
|
|
|
assert checkdata["healthy"]
|
2019-08-12 06:43:35 +00:00
|
|
|
assert checkdata["count-happiness"] == 4
|
|
|
|
assert checkdata["count-good-share-hosts"] == 4
|
|
|
|
assert checkdata["count-shares-good"] == 4
|
|
|
|
assert checkdata["count-corrupt-shares"] == 0
|
|
|
|
assert checkdata["list-corrupt-shares"] == []
|
|
|
|
|
|
|
|
# do a "check" (once for HTML, then with JSON for easier asserts)
|
|
|
|
resp = requests.post(
|
|
|
|
dircap_url,
|
|
|
|
params={
|
|
|
|
u"t": u"check",
|
|
|
|
u"return_to": u".",
|
|
|
|
u"verify": u"true",
|
|
|
|
}
|
|
|
|
)
|
|
|
|
resp = requests.post(
|
|
|
|
dircap_url,
|
|
|
|
params={
|
|
|
|
u"t": u"check",
|
|
|
|
u"return_to": u".",
|
|
|
|
u"verify": u"true",
|
|
|
|
u"output": u"JSON",
|
|
|
|
}
|
|
|
|
)
|
|
|
|
check_repair_data(json.loads(resp.content)["results"])
|
|
|
|
|
|
|
|
# "check and repair"
|
|
|
|
resp = requests.post(
|
|
|
|
dircap_url,
|
|
|
|
params={
|
|
|
|
u"t": u"check",
|
|
|
|
u"return_to": u".",
|
|
|
|
u"verify": u"true",
|
|
|
|
u"repair": u"true",
|
|
|
|
}
|
|
|
|
)
|
|
|
|
resp = requests.post(
|
|
|
|
dircap_url,
|
|
|
|
params={
|
|
|
|
u"t": u"check",
|
|
|
|
u"return_to": u".",
|
|
|
|
u"verify": u"true",
|
|
|
|
u"repair": u"true",
|
|
|
|
u"output": u"JSON",
|
|
|
|
}
|
|
|
|
)
|
|
|
|
check_repair_data(json.loads(resp.content)["post-repair-results"]["results"])
|
|
|
|
|
|
|
|
# start a "deep check and repair"
|
|
|
|
resp = requests.post(
|
|
|
|
dircap_url,
|
|
|
|
params={
|
|
|
|
u"t": u"start-deep-check",
|
|
|
|
u"return_to": u".",
|
2019-08-15 00:15:50 +00:00
|
|
|
u"verify": u"on",
|
|
|
|
u"repair": u"on",
|
2019-08-12 06:43:35 +00:00
|
|
|
u"output": u"JSON",
|
|
|
|
u"ophandle": u"deadbeef",
|
|
|
|
}
|
|
|
|
)
|
|
|
|
deepcheck_uri = resp.url
|
|
|
|
|
|
|
|
data = json.loads(resp.content)
|
2019-08-24 18:53:26 +00:00
|
|
|
tries = 10
|
|
|
|
while not data['finished'] and tries > 0:
|
|
|
|
tries -= 1
|
2019-08-12 06:43:35 +00:00
|
|
|
time.sleep(0.5)
|
|
|
|
print("deep-check not finished, reloading")
|
2019-08-15 00:15:50 +00:00
|
|
|
resp = requests.get(deepcheck_uri, params={u"output": "JSON"})
|
2019-08-12 06:43:35 +00:00
|
|
|
data = json.loads(resp.content)
|
|
|
|
print("deep-check finished")
|
|
|
|
assert data[u"stats"][u"count-immutable-files"] == 1
|
|
|
|
assert data[u"stats"][u"count-literal-files"] == 0
|
2019-08-15 00:15:50 +00:00
|
|
|
assert data[u"stats"][u"largest-immutable-file"] == 778240
|
2019-08-12 06:43:35 +00:00
|
|
|
assert data[u"count-objects-checked"] == 2
|
|
|
|
|
2019-08-15 00:15:50 +00:00
|
|
|
# also get the HTML version
|
|
|
|
resp = requests.post(
|
|
|
|
dircap_url,
|
|
|
|
params={
|
|
|
|
u"t": u"start-deep-check",
|
|
|
|
u"return_to": u".",
|
|
|
|
u"verify": u"on",
|
|
|
|
u"repair": u"on",
|
|
|
|
u"ophandle": u"definitely_random",
|
|
|
|
}
|
|
|
|
)
|
|
|
|
deepcheck_uri = resp.url
|
|
|
|
|
|
|
|
# if the operations isn't done, there's an <H2> tag with the
|
2019-08-24 20:26:27 +00:00
|
|
|
# reload link; otherwise there's only an <H1> tag..wait up to 5
|
|
|
|
# seconds for this to respond properly.
|
2019-08-15 00:15:50 +00:00
|
|
|
for _ in range(5):
|
|
|
|
resp = requests.get(deepcheck_uri)
|
|
|
|
dom = BeautifulSoup(resp.content, "html5lib")
|
2021-05-12 13:20:49 +00:00
|
|
|
if dom.h1 and u'Results' in str(dom.h1.string):
|
2019-08-15 00:15:50 +00:00
|
|
|
break
|
2021-05-12 13:20:49 +00:00
|
|
|
if dom.h2 and dom.h2.a and u"Reload" in str(dom.h2.a.string):
|
2019-08-15 00:15:50 +00:00
|
|
|
dom = None
|
|
|
|
time.sleep(1)
|
|
|
|
assert dom is not None, "Operation never completed"
|
|
|
|
|
2019-08-12 06:43:35 +00:00
|
|
|
|
2023-03-20 19:08:22 +00:00
|
|
|
@run_in_thread
|
2019-08-12 06:43:35 +00:00
|
|
|
def test_storage_info(storage_nodes):
|
|
|
|
"""
|
|
|
|
retrieve and confirm /storage URI for one storage node
|
|
|
|
"""
|
|
|
|
storage0 = storage_nodes[0]
|
|
|
|
|
2019-08-24 18:37:28 +00:00
|
|
|
requests.get(
|
2020-11-14 08:56:03 +00:00
|
|
|
util.node_url(storage0.process.node_dir, u"storage"),
|
2019-08-12 06:43:35 +00:00
|
|
|
)
|
2019-08-24 18:37:28 +00:00
|
|
|
|
|
|
|
|
2023-03-20 19:08:22 +00:00
|
|
|
@run_in_thread
|
2019-08-24 18:37:28 +00:00
|
|
|
def test_storage_info_json(storage_nodes):
|
|
|
|
"""
|
|
|
|
retrieve and confirm /storage?t=json URI for one storage node
|
|
|
|
"""
|
|
|
|
storage0 = storage_nodes[0]
|
|
|
|
|
2019-08-12 06:43:35 +00:00
|
|
|
resp = requests.get(
|
2020-11-14 08:56:03 +00:00
|
|
|
util.node_url(storage0.process.node_dir, u"storage"),
|
2019-08-12 06:43:35 +00:00
|
|
|
params={u"t": u"json"},
|
|
|
|
)
|
|
|
|
data = json.loads(resp.content)
|
|
|
|
assert data[u"stats"][u"storage_server.reserved_space"] == 1000000000
|
|
|
|
|
|
|
|
|
2023-03-20 19:08:22 +00:00
|
|
|
@run_in_thread
|
2019-08-12 06:43:35 +00:00
|
|
|
def test_introducer_info(introducer):
|
|
|
|
"""
|
|
|
|
retrieve and confirm /introducer URI for the introducer
|
|
|
|
"""
|
|
|
|
resp = requests.get(
|
2020-11-14 08:56:03 +00:00
|
|
|
util.node_url(introducer.process.node_dir, u""),
|
2019-08-12 06:43:35 +00:00
|
|
|
)
|
2021-05-11 17:47:49 +00:00
|
|
|
assert b"Introducer" in resp.content
|
2019-08-12 06:43:35 +00:00
|
|
|
|
|
|
|
resp = requests.get(
|
2020-11-14 08:56:03 +00:00
|
|
|
util.node_url(introducer.process.node_dir, u""),
|
2019-08-12 06:43:35 +00:00
|
|
|
params={u"t": u"json"},
|
|
|
|
)
|
|
|
|
data = json.loads(resp.content)
|
|
|
|
assert "announcement_summary" in data
|
|
|
|
assert "subscription_summary" in data
|
2019-08-13 20:04:46 +00:00
|
|
|
|
|
|
|
|
2023-03-20 19:08:22 +00:00
|
|
|
@run_in_thread
|
2019-08-13 20:04:46 +00:00
|
|
|
def test_mkdir_with_children(alice):
|
|
|
|
"""
|
|
|
|
create a directory using ?t=mkdir-with-children
|
|
|
|
"""
|
|
|
|
|
|
|
|
# create a file to put in our directory
|
2019-08-20 05:50:52 +00:00
|
|
|
FILE_CONTENTS = u"some file contents\n" * 500
|
2019-08-13 20:04:46 +00:00
|
|
|
resp = requests.put(
|
2023-07-25 02:08:41 +00:00
|
|
|
util.node_url(alice.process.node_dir, u"uri"),
|
2019-08-13 20:04:46 +00:00
|
|
|
data=FILE_CONTENTS,
|
|
|
|
)
|
|
|
|
filecap = resp.content.strip()
|
|
|
|
|
|
|
|
# create a (sub) directory to put in our directory
|
|
|
|
resp = requests.post(
|
2023-07-25 02:08:41 +00:00
|
|
|
util.node_url(alice.process.node_dir, u"uri"),
|
2019-08-13 20:04:46 +00:00
|
|
|
params={
|
|
|
|
u"t": u"mkdir",
|
|
|
|
}
|
|
|
|
)
|
|
|
|
# (we need both the read-write and read-only URIs I guess)
|
|
|
|
dircap = resp.content
|
|
|
|
dircap_obj = allmydata.uri.from_string(dircap)
|
|
|
|
dircap_ro = dircap_obj.get_readonly().to_string()
|
|
|
|
|
|
|
|
# create json information about our directory
|
|
|
|
meta = {
|
|
|
|
"a_file": [
|
|
|
|
"filenode", {
|
|
|
|
"ro_uri": filecap,
|
|
|
|
"metadata": {
|
|
|
|
"ctime": 1202777696.7564139,
|
|
|
|
"mtime": 1202777696.7564139,
|
|
|
|
"tahoe": {
|
|
|
|
"linkcrtime": 1202777696.7564139,
|
|
|
|
"linkmotime": 1202777696.7564139
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
],
|
|
|
|
"some_subdir": [
|
|
|
|
"dirnode", {
|
|
|
|
"rw_uri": dircap,
|
|
|
|
"ro_uri": dircap_ro,
|
|
|
|
"metadata": {
|
|
|
|
"ctime": 1202778102.7589991,
|
|
|
|
"mtime": 1202778111.2160511,
|
|
|
|
"tahoe": {
|
|
|
|
"linkcrtime": 1202777696.7564139,
|
|
|
|
"linkmotime": 1202777696.7564139
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
]
|
|
|
|
}
|
|
|
|
|
|
|
|
# create a new directory with one file and one sub-dir (all-at-once)
|
|
|
|
resp = util.web_post(
|
2023-07-25 02:08:41 +00:00
|
|
|
alice.process, u"uri",
|
2019-08-13 20:04:46 +00:00
|
|
|
params={u"t": "mkdir-with-children"},
|
|
|
|
data=json.dumps(meta),
|
|
|
|
)
|
2021-05-11 17:47:49 +00:00
|
|
|
assert resp.startswith(b"URI:DIR2")
|
2019-08-13 20:04:46 +00:00
|
|
|
cap = allmydata.uri.from_string(resp)
|
|
|
|
assert isinstance(cap, allmydata.uri.DirectoryURI)
|
2024-05-03 20:40:01 +00:00
|
|
|
|
|
|
|
|
|
|
|
@run_in_thread
|
|
|
|
def test_mkdir_with_random_private_key(alice):
|
|
|
|
"""
|
|
|
|
Create a new directory with ?t=mkdir&private-key=... using a
|
|
|
|
randomly-generated RSA private key.
|
|
|
|
|
|
|
|
The writekey and fingerprint derived from the provided RSA key
|
|
|
|
should match those of the newly-created directory capability.
|
|
|
|
"""
|
|
|
|
|
|
|
|
privkey, pubkey = create_signing_keypair(2048)
|
|
|
|
|
|
|
|
writekey, _, fingerprint = derive_mutable_keys((pubkey, privkey))
|
|
|
|
|
|
|
|
# The "private-key" parameter takes a DER-encoded RSA private key
|
|
|
|
# encoded in URL-safe base64; PEM blocks are not supported.
|
|
|
|
privkey_der = der_string_from_signing_key(privkey)
|
|
|
|
privkey_encoded = urlsafe_b64encode(privkey_der).decode("ascii")
|
|
|
|
|
|
|
|
resp = util.web_post(
|
|
|
|
alice.process, u"uri",
|
|
|
|
params={
|
|
|
|
u"t": "mkdir",
|
|
|
|
u"private-key": privkey_encoded,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
assert resp.startswith(b"URI:DIR2")
|
|
|
|
|
|
|
|
dircap = allmydata.uri.from_string(resp)
|
|
|
|
assert isinstance(dircap, allmydata.uri.DirectoryURI)
|
|
|
|
|
|
|
|
# DirectoryURI objects lack 'writekey' and 'fingerprint' attributes
|
|
|
|
# so extract them from the enclosed WriteableSSKFileURI object.
|
|
|
|
filecap = dircap.get_filenode_cap()
|
|
|
|
assert isinstance(filecap, allmydata.uri.WriteableSSKFileURI)
|
|
|
|
|
|
|
|
assert (writekey, fingerprint) == (filecap.writekey, filecap.fingerprint)
|
|
|
|
|
|
|
|
|
|
|
|
@run_in_thread
|
|
|
|
def test_mkdir_with_known_private_key(alice):
|
|
|
|
"""
|
|
|
|
Create a new directory with ?t=mkdir&private-key=... using a
|
|
|
|
known-in-advance RSA private key.
|
|
|
|
|
|
|
|
The writekey and fingerprint derived from the provided RSA key
|
|
|
|
should match those of the newly-created directory capability.
|
|
|
|
In addition, because the writekey and fingerprint are derived
|
|
|
|
deterministically, given the same RSA private key, the resultant
|
|
|
|
directory capability should always be the same.
|
|
|
|
"""
|
|
|
|
# Randomly generated with `openssl genrsa -out privkey.pem 2048`
|
|
|
|
privkey_pem = """-----BEGIN RSA PRIVATE KEY-----
|
|
|
|
MIIEowIBAAKCAQEAoa9i8v9YIzb+3yRHyXLm4j1eWK9lQc6lFwoQhik8y+joD+5A
|
|
|
|
v73OlDZAcn6vzlU72vwrJ1f4o54nEVm0rhNrhwCsiHCdxxEDEoqZ8w/19vc4hWj4
|
|
|
|
SYwGirhcnyb2ysZSV8v9Lm5HiFe5zZM4jzCzf2rzt0YRlZZj9nhSglaiHZ9BE2e0
|
|
|
|
vzOl6GePDz6yS4jbh2RsPsDQtqXNOqZwfGUd+iTsbSxXcm8+rNrT1VAbx6+1Sr0r
|
|
|
|
aDyc/jp8S1JwJ0ofJLsU3Pb6DYazFf12CNTsrKF1L0hAsbN8v2DSunZIQqQLQGfp
|
|
|
|
0hnNO9V8q9FjvVu8XY/HhgoTvtESU3vuq+BnIwIDAQABAoIBAGpWDP+/y9mtK8bZ
|
|
|
|
95SXyx10Ov6crD2xiIY0ilWR/XgmP6lqio8QaDK104D5rOpIyErnmgIQK2iAdTVG
|
|
|
|
CDyMbSWm3dIGLt5jY9/n5AQltSCtyzCCrvi/7PWC9vd9Csal1DYF5QeKY+VZvMtl
|
|
|
|
Tcduwj7EunEI1jvJYwkQbUNncsuDi+88/JNwa8DJp1IrR4goxNflGl7mNzfq49re
|
|
|
|
lhSyezfLSTZKDa3A6sYnNFAAOy82iXZuLXCqKuwRuaiFFilB0R0/egzBSUeBwMJk
|
|
|
|
sS+SvHHXwv9HsYt4pYiiZFm8HxB4NKYtdpHpvJVJcG9vOXjewnA5YHWVDJsrBfu6
|
|
|
|
0kPgbcECgYEA0bqfX2Vc6DizwjWVn9yVlckjQNGTnwf/B9eGW2MgTn6YADe0yjFm
|
|
|
|
KCtr34hEZc/hv3kBnoLOqSvZJiser8ve3SmwxfmpjEfJdIgA5J5DbCEGBiDm9PMy
|
|
|
|
0lYsfjykzYykehdasb8f4xd+SPMuTC/CFb1MCTlohex7qn7Xt9IskBECgYEAxVtF
|
|
|
|
iXwFJPQUil2bSFGnxtaI/8ijypLOkP3CyuVnEcbMt74jDt1hdooRxjQ9VVlg7r7i
|
|
|
|
EvebPKMukWxdVcQ/38i97oB/oN7MIH0QBCDWTdTQokuNQSEknGLouj6YtLAWRcyJ
|
|
|
|
9DDENSaGtP42le5dD60hZc732jN09fGxNa6gN/MCgYB5ux98CGJ3q0mzBNUW17q/
|
|
|
|
GOLsYXiUitidHZyveIas6M+i+LJn1WpdEG7pbLd+fL2kHEEzVutKx9efTtHd6bAu
|
|
|
|
oF8pWfLuKFCm4bXa/H1XyocrkXdcX7h0222xy9NAN0zUTK/okW2Zqu4yu2t47xNw
|
|
|
|
+NGkXPztFsjkugDNgiE5cQKBgQDDy/BqHPORnOIAACw9jF1SpKcYdPsiz5FGQawO
|
|
|
|
1ZbzCPMzW9y2M6YtD3/gzxUGZv0G/7OUs7h8aTybJBJZM7FXGHZud2ent0J2/Px1
|
|
|
|
zAow/3DZgvEp63LCAFL5635ezM/cAbff3r3aKVW9nPOUvf3vvokC01oMTb68/kMc
|
|
|
|
ihoERwKBgFsoRUrgGPSfG1UZt8BpIXbG/8qfoy/Vy77BRqvJ6ZpdM9RPqdAl7Sih
|
|
|
|
cdqfxs8w0NVvj+gvM/1CGO0J9lZW2f1J81haIoyUpiITFdoyzLKXLhMSbaF4Y7Hn
|
|
|
|
yC/N5w3cCLa2LLKoLG8hagFDlXBGSmpT1zgKBk4YxNn6CLdMSzPR
|
|
|
|
-----END RSA PRIVATE KEY-----
|
|
|
|
"""
|
|
|
|
|
|
|
|
privkey = load_pem_private_key(
|
|
|
|
privkey_pem.encode("ascii"), password=None
|
|
|
|
)
|
|
|
|
pubkey = privkey.public_key()
|
|
|
|
|
|
|
|
writekey, _, fingerprint = derive_mutable_keys((pubkey, privkey))
|
|
|
|
|
|
|
|
# The "private-key" parameter takes a DER-encoded RSA private key
|
|
|
|
# encoded in URL-safe base64; PEM blocks are not supported.
|
|
|
|
privkey_der = der_string_from_signing_key(privkey)
|
|
|
|
privkey_encoded = urlsafe_b64encode(privkey_der).decode("ascii")
|
|
|
|
|
|
|
|
resp = util.web_post(
|
|
|
|
alice.process, u"uri",
|
|
|
|
params={
|
|
|
|
u"t": "mkdir",
|
|
|
|
u"private-key": privkey_encoded,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
assert resp.startswith(b"URI:DIR2")
|
|
|
|
|
|
|
|
dircap = allmydata.uri.from_string(resp)
|
|
|
|
assert isinstance(dircap, allmydata.uri.DirectoryURI)
|
|
|
|
|
|
|
|
# DirectoryURI objects lack 'writekey' and 'fingerprint' attributes
|
|
|
|
# so extract them from the enclosed WriteableSSKFileURI object.
|
|
|
|
filecap = dircap.get_filenode_cap()
|
|
|
|
assert isinstance(filecap, allmydata.uri.WriteableSSKFileURI)
|
|
|
|
|
|
|
|
assert (writekey, fingerprint) == (filecap.writekey, filecap.fingerprint)
|
|
|
|
|
|
|
|
assert resp == b"URI:DIR2:3oo7j7f7qqxnet2z2lf57ucup4:cpktmsxlqnd5yeekytxjxvff5e6d6fv7py6rftugcndvss7tzd2a"
|
2024-05-07 18:49:40 +00:00
|
|
|
|
|
|
|
|
|
|
|
@run_in_thread
|
|
|
|
def test_mkdir_with_children_and_random_private_key(alice):
|
|
|
|
"""
|
|
|
|
Create a new directory with ?t=mkdir-with-children&private-key=...
|
|
|
|
using a randomly-generated RSA private key.
|
|
|
|
|
|
|
|
The writekey and fingerprint derived from the provided RSA key
|
|
|
|
should match those of the newly-created directory capability.
|
|
|
|
"""
|
|
|
|
|
|
|
|
# create a file to put in our directory
|
|
|
|
FILE_CONTENTS = u"some file contents\n" * 500
|
|
|
|
resp = requests.put(
|
|
|
|
util.node_url(alice.process.node_dir, u"uri"),
|
|
|
|
data=FILE_CONTENTS,
|
|
|
|
)
|
|
|
|
filecap = resp.content.strip()
|
|
|
|
|
|
|
|
# create a (sub) directory to put in our directory
|
|
|
|
resp = requests.post(
|
|
|
|
util.node_url(alice.process.node_dir, u"uri"),
|
|
|
|
params={
|
|
|
|
u"t": u"mkdir",
|
|
|
|
}
|
|
|
|
)
|
|
|
|
# (we need both the read-write and read-only URIs I guess)
|
|
|
|
dircap = resp.content
|
|
|
|
dircap_obj = allmydata.uri.from_string(dircap)
|
|
|
|
dircap_ro = dircap_obj.get_readonly().to_string()
|
|
|
|
|
|
|
|
# create json information about our directory
|
|
|
|
meta = {
|
|
|
|
"a_file": [
|
|
|
|
"filenode", {
|
|
|
|
"ro_uri": filecap,
|
|
|
|
"metadata": {
|
|
|
|
"ctime": 1202777696.7564139,
|
|
|
|
"mtime": 1202777696.7564139,
|
|
|
|
"tahoe": {
|
|
|
|
"linkcrtime": 1202777696.7564139,
|
|
|
|
"linkmotime": 1202777696.7564139
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
],
|
|
|
|
"some_subdir": [
|
|
|
|
"dirnode", {
|
|
|
|
"rw_uri": dircap,
|
|
|
|
"ro_uri": dircap_ro,
|
|
|
|
"metadata": {
|
|
|
|
"ctime": 1202778102.7589991,
|
|
|
|
"mtime": 1202778111.2160511,
|
|
|
|
"tahoe": {
|
|
|
|
"linkcrtime": 1202777696.7564139,
|
|
|
|
"linkmotime": 1202777696.7564139
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
]
|
|
|
|
}
|
|
|
|
|
|
|
|
privkey, pubkey = create_signing_keypair(2048)
|
|
|
|
|
|
|
|
writekey, _, fingerprint = derive_mutable_keys((pubkey, privkey))
|
|
|
|
|
|
|
|
# The "private-key" parameter takes a DER-encoded RSA private key
|
|
|
|
# encoded in URL-safe base64; PEM blocks are not supported.
|
|
|
|
privkey_der = der_string_from_signing_key(privkey)
|
|
|
|
privkey_encoded = urlsafe_b64encode(privkey_der).decode("ascii")
|
|
|
|
|
|
|
|
# create a new directory with one file and one sub-dir (all-at-once)
|
|
|
|
# with the supplied RSA private key
|
|
|
|
resp = util.web_post(
|
|
|
|
alice.process, u"uri",
|
|
|
|
params={
|
|
|
|
u"t": "mkdir-with-children",
|
|
|
|
u"private-key": privkey_encoded,
|
|
|
|
},
|
|
|
|
data=json.dumps(meta),
|
|
|
|
)
|
|
|
|
assert resp.startswith(b"URI:DIR2")
|
|
|
|
|
|
|
|
dircap = allmydata.uri.from_string(resp)
|
|
|
|
assert isinstance(dircap, allmydata.uri.DirectoryURI)
|
|
|
|
|
|
|
|
# DirectoryURI objects lack 'writekey' and 'fingerprint' attributes
|
|
|
|
# so extract them from the enclosed WriteableSSKFileURI object.
|
|
|
|
filecap = dircap.get_filenode_cap()
|
|
|
|
assert isinstance(filecap, allmydata.uri.WriteableSSKFileURI)
|
|
|
|
|
|
|
|
assert (writekey, fingerprint) == (filecap.writekey, filecap.fingerprint)
|
2024-05-07 19:15:01 +00:00
|
|
|
|
|
|
|
|
|
|
|
@run_in_thread
|
|
|
|
def test_mkdir_with_children_and_known_private_key(alice):
|
|
|
|
"""
|
|
|
|
Create a new directory with ?t=mkdir-with-children&private-key=...
|
|
|
|
using a known-in-advance RSA private key.
|
|
|
|
|
|
|
|
|
|
|
|
The writekey and fingerprint derived from the provided RSA key
|
|
|
|
should match those of the newly-created directory capability.
|
|
|
|
In addition, because the writekey and fingerprint are derived
|
|
|
|
deterministically, given the same RSA private key, the resultant
|
|
|
|
directory capability should always be the same.
|
|
|
|
"""
|
|
|
|
|
|
|
|
# create a file to put in our directory
|
|
|
|
FILE_CONTENTS = u"some file contents\n" * 500
|
|
|
|
resp = requests.put(
|
|
|
|
util.node_url(alice.process.node_dir, u"uri"),
|
|
|
|
data=FILE_CONTENTS,
|
|
|
|
)
|
|
|
|
filecap = resp.content.strip()
|
|
|
|
|
|
|
|
# create a (sub) directory to put in our directory
|
|
|
|
resp = requests.post(
|
|
|
|
util.node_url(alice.process.node_dir, u"uri"),
|
|
|
|
params={
|
|
|
|
u"t": u"mkdir",
|
|
|
|
}
|
|
|
|
)
|
|
|
|
# (we need both the read-write and read-only URIs I guess)
|
|
|
|
dircap = resp.content
|
|
|
|
dircap_obj = allmydata.uri.from_string(dircap)
|
|
|
|
dircap_ro = dircap_obj.get_readonly().to_string()
|
|
|
|
|
|
|
|
# create json information about our directory
|
|
|
|
meta = {
|
|
|
|
"a_file": [
|
|
|
|
"filenode", {
|
|
|
|
"ro_uri": filecap,
|
|
|
|
"metadata": {
|
|
|
|
"ctime": 1202777696.7564139,
|
|
|
|
"mtime": 1202777696.7564139,
|
|
|
|
"tahoe": {
|
|
|
|
"linkcrtime": 1202777696.7564139,
|
|
|
|
"linkmotime": 1202777696.7564139
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
],
|
|
|
|
"some_subdir": [
|
|
|
|
"dirnode", {
|
|
|
|
"rw_uri": dircap,
|
|
|
|
"ro_uri": dircap_ro,
|
|
|
|
"metadata": {
|
|
|
|
"ctime": 1202778102.7589991,
|
|
|
|
"mtime": 1202778111.2160511,
|
|
|
|
"tahoe": {
|
|
|
|
"linkcrtime": 1202777696.7564139,
|
|
|
|
"linkmotime": 1202777696.7564139
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
]
|
|
|
|
}
|
|
|
|
|
|
|
|
# Randomly generated with `openssl genrsa -out privkey.pem 2048`
|
|
|
|
privkey_pem = """-----BEGIN RSA PRIVATE KEY-----
|
|
|
|
MIIEowIBAAKCAQEA2PL5Ry2BGuuUtRJa20WS0fwBOqVIVSXDVuSvZFYTT1Xji19J
|
|
|
|
q+ohHcFnIIYHAq0zQG+NgNjK5rogY/5TfbwIhfwLufleeAdL9jXTfxan0o/wwFA1
|
|
|
|
DAIHcYsTEYI2dfQe4acOLFY6/Hh6iXCbHvSzzUnEmYkgwCAZvc0v/lD8pMnz/6gQ
|
|
|
|
2nJnAASfFovcAvfr1T+MZzLJGQem3f2IFp1frurQyFmzFRtZMO5B9PDSsFG4yJVf
|
|
|
|
cz0iSP8wlc9QydImmJGRvu4xEOkx/55B/XaUdb6CIGpCTkLsDOlImvZt9UHDSgXq
|
|
|
|
qcE/T7SYMIXqbep64tJw9enjomH+n1KVh9UA2wIDAQABAoIBABCSTrQ/J5N010EV
|
|
|
|
i9cf810S0M03/tRyM/+ZLESPxp3Sw7TLrIbzNWBee5AibLqpnDaZzsc+yBDjusGo
|
|
|
|
lZwPFt+VJxgnki288PJ3nhYhFuSglhU6izLFnOfxZZ16wsozwYAfEJgWZh8O3N1O
|
|
|
|
uqqcqndN4TSRIu1KBm1XFQlqCkJT/stzYjO4k1vhgZT4pqhYRdx7q7FAap4v+sNs
|
|
|
|
Svhm1blvOXlyeumAbFBdGFttpTxIOGRzI1bp00jcLK4rgssTTxNyEiVu4oJhQY/k
|
|
|
|
0CptSUzpGio8DZ0/8bNnKCkw8YATUWJZQgSmKraRwAYMMR/SZa7WqjEc2KRTj6xQ
|
|
|
|
pHmYwZECgYEA700a/7ur8+EwTSulLgDveAOtTV0xEbhuq6cJQgNrEp2rbFqie6FX
|
|
|
|
g/YJKzEpEnUvj/yOzhEcw3CdQDUaxndlqY87QIhUWMcsnfMPsM1FjhmfksR8s3TF
|
|
|
|
WZNqa0RAKmcRoLohGclSvRV2OVU8+10mLUwJfR86Nl5+auR3LxWLyB8CgYEA6BaR
|
|
|
|
r+Z7oTlgkdEDVhnQ58Msktv58y28N+VIbYS79bV01jqUUlogm5uTvdvq5nyENXHx
|
|
|
|
gnK88mVzWYBMk83D01HlOC5DhpspTVEQQG2V/If6KZa56mxiHP3Mab9jLew9w/kA
|
|
|
|
g6l/04ATSA8g4i2H/Bz0eEyPEBt6o/+SO0Xv38UCgYEAyTTLvrrNmgF922UXPdcL
|
|
|
|
gp2U2bfBymSIqUuJPTgij0SDHlgWxlyieRImI2ryXdKqayav7BP3W10U2yfLm5RI
|
|
|
|
pokICPqX8Q2HNkdoqf/uu8xPn9gWAc3tIaQRlp+MVBrVd48IxeXA67tf7FT/MVrg
|
|
|
|
/rUwRUQ8bfqF0NrIW46COYECgYAYDJamGoT/DNoD4hutZVlvWpsY0LCS0U9qn1ik
|
|
|
|
+Jcde+MSe9l4uxwb48AocUxi+84bV6ZF9Su9FmQghxnoSu8ay6ar7qdSoGtkNp0v
|
|
|
|
f+uF0nVKr/Kt5vM3u9jdsFZPoOY5k2jJO9wiB2h4FBE9PqiTqFBw0sYUTjSkH8yA
|
|
|
|
VdvoXQKBgFqCC8Y82eVf0/ORGTgG/KhZ72WFQKHyAeryvoLuadZ6JAI6qW9U1l9P
|
|
|
|
18SMnCO+opGN5GH2Qx7gdg17KzWzTW1gnbv0QUPNnnYEJU8VYMelNuKa8tmNgFH7
|
|
|
|
inAwsxbbWoR08ai4exzbJrNrLpDRg5ih2wMtknN6D8m+EAvBC/Gj
|
|
|
|
-----END RSA PRIVATE KEY-----
|
|
|
|
"""
|
|
|
|
|
|
|
|
privkey = load_pem_private_key(
|
|
|
|
privkey_pem.encode("ascii"), password=None
|
|
|
|
)
|
|
|
|
pubkey = privkey.public_key()
|
|
|
|
|
|
|
|
writekey, _, fingerprint = derive_mutable_keys((pubkey, privkey))
|
|
|
|
|
|
|
|
# The "private-key" parameter takes a DER-encoded RSA private key
|
|
|
|
# encoded in URL-safe base64; PEM blocks are not supported.
|
|
|
|
privkey_der = der_string_from_signing_key(privkey)
|
|
|
|
privkey_encoded = urlsafe_b64encode(privkey_der).decode("ascii")
|
|
|
|
|
|
|
|
# create a new directory with one file and one sub-dir (all-at-once)
|
|
|
|
# with the supplied RSA private key
|
|
|
|
resp = util.web_post(
|
|
|
|
alice.process, u"uri",
|
|
|
|
params={
|
|
|
|
u"t": "mkdir-with-children",
|
|
|
|
u"private-key": privkey_encoded,
|
|
|
|
},
|
|
|
|
data=json.dumps(meta),
|
|
|
|
)
|
|
|
|
assert resp.startswith(b"URI:DIR2")
|
|
|
|
|
|
|
|
dircap = allmydata.uri.from_string(resp)
|
|
|
|
assert isinstance(dircap, allmydata.uri.DirectoryURI)
|
|
|
|
|
|
|
|
# DirectoryURI objects lack 'writekey' and 'fingerprint' attributes
|
|
|
|
# so extract them from the enclosed WriteableSSKFileURI object.
|
|
|
|
filecap = dircap.get_filenode_cap()
|
|
|
|
assert isinstance(filecap, allmydata.uri.WriteableSSKFileURI)
|
|
|
|
|
|
|
|
assert (writekey, fingerprint) == (filecap.writekey, filecap.fingerprint)
|
|
|
|
|
|
|
|
assert resp == b"URI:DIR2:ppwzpwrd37xi7tpribxyaa25uy:imdws47wwpzfkc5vfllo4ugspb36iit4cqps6ttuhaouc66jb2da"
|