2019-07-24 07:33:15 +00:00
|
|
|
import sys
|
|
|
|
import time
|
|
|
|
import shutil
|
2019-07-30 21:01:06 +00:00
|
|
|
import json
|
2019-08-07 23:19:38 +00:00
|
|
|
import urllib2
|
2019-07-24 07:33:15 +00:00
|
|
|
from os import mkdir, unlink, utime
|
|
|
|
from os.path import join, exists, getmtime
|
|
|
|
|
2019-08-08 21:53:28 +00:00
|
|
|
import allmydata.uri
|
|
|
|
|
2019-07-24 07:33:15 +00:00
|
|
|
import util
|
|
|
|
|
2019-08-07 00:32:50 +00:00
|
|
|
import requests
|
2019-07-24 07:33:15 +00:00
|
|
|
import pytest_twisted
|
|
|
|
|
|
|
|
|
|
|
|
def test_index(alice):
|
|
|
|
"""
|
|
|
|
we can download the index file
|
|
|
|
"""
|
2019-08-08 21:33:09 +00:00
|
|
|
util.web_get(alice.node_dir, u"")
|
2019-07-24 07:33:15 +00:00
|
|
|
# ...and json mode is valid json
|
2019-08-08 21:33:09 +00:00
|
|
|
json.loads(util.web_get(alice.node_dir, u"?t=json"))
|
2019-07-24 07:33:15 +00:00
|
|
|
|
|
|
|
|
|
|
|
def test_upload_download(alice):
|
|
|
|
"""
|
|
|
|
upload a file, then download it via readcap
|
|
|
|
"""
|
|
|
|
|
|
|
|
FILE_CONTENTS = "some contents"
|
|
|
|
|
|
|
|
readcap = util.web_post(
|
2019-08-08 21:28:54 +00:00
|
|
|
alice.node_dir,
|
2019-08-08 21:33:09 +00:00
|
|
|
u"uri",
|
2019-07-24 07:33:15 +00:00
|
|
|
data={
|
2019-08-08 21:33:09 +00:00
|
|
|
u"t": u"upload",
|
|
|
|
u"format": u"mdmf",
|
2019-07-24 07:33:15 +00:00
|
|
|
},
|
|
|
|
files={
|
2019-08-08 21:33:09 +00:00
|
|
|
u"file": FILE_CONTENTS,
|
2019-07-24 07:33:15 +00:00
|
|
|
},
|
|
|
|
)
|
|
|
|
readcap = readcap.strip()
|
|
|
|
|
|
|
|
data = util.web_get(
|
2019-08-08 21:33:09 +00:00
|
|
|
alice.node_dir, u"uri",
|
2019-07-24 07:33:15 +00:00
|
|
|
params={
|
2019-08-08 21:33:09 +00:00
|
|
|
u"uri": readcap,
|
|
|
|
u"filename": u"boom",
|
2019-07-24 07:33:15 +00:00
|
|
|
}
|
|
|
|
)
|
|
|
|
assert data == FILE_CONTENTS
|
2019-08-07 00:17:58 +00:00
|
|
|
|
|
|
|
|
|
|
|
def test_put(alice):
|
|
|
|
"""
|
|
|
|
use PUT to create a file
|
|
|
|
"""
|
|
|
|
|
2019-08-08 21:33:09 +00:00
|
|
|
FILE_CONTENTS = b"added via PUT"
|
2019-08-07 00:17:58 +00:00
|
|
|
|
|
|
|
resp = requests.put(
|
2019-08-08 21:33:09 +00:00
|
|
|
util.node_url(alice.node_dir, u"uri"),
|
2019-08-07 00:17:58 +00:00
|
|
|
files={
|
2019-08-08 21:33:09 +00:00
|
|
|
u"file": FILE_CONTENTS,
|
2019-08-07 00:17:58 +00:00
|
|
|
},
|
|
|
|
)
|
2019-08-08 21:53:28 +00:00
|
|
|
cap = allmydata.uri.from_string(resp.text.strip().encode('ascii'))
|
|
|
|
assert isinstance(cap, allmydata.uri.CHKFileURI)
|
|
|
|
assert cap.size == 153
|
|
|
|
assert cap.total_shares == 4
|
|
|
|
assert cap.needed_shares == 2
|
2019-08-07 00:17:58 +00:00
|
|
|
|
|
|
|
|
|
|
|
def test_helper_status(storage_nodes):
|
|
|
|
"""
|
|
|
|
successfully GET the /helper_status page
|
|
|
|
"""
|
|
|
|
|
2019-08-08 21:28:54 +00:00
|
|
|
url = util.node_url(storage_nodes[0].node_dir, "helper_status")
|
2019-08-07 00:17:58 +00:00
|
|
|
resp = requests.get(url)
|
2019-08-07 00:32:50 +00:00
|
|
|
assert resp.status_code >= 200 and resp.status_code < 300
|
2019-08-07 23:19:38 +00:00
|
|
|
|
|
|
|
|
2019-08-07 23:53:37 +00:00
|
|
|
def test_deep_stats(alice):
|
2019-08-07 23:19:38 +00:00
|
|
|
"""
|
|
|
|
create a directory, do deep-stats on it and prove the /operations/
|
|
|
|
URIs work
|
|
|
|
"""
|
|
|
|
resp = requests.post(
|
2019-08-08 21:28:54 +00:00
|
|
|
util.node_url(alice.node_dir, "uri"),
|
2019-08-07 23:19:38 +00:00
|
|
|
params={
|
|
|
|
"format": "sdmf",
|
|
|
|
"t": "mkdir",
|
|
|
|
"redirect_to_result": "true",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
assert resp.status_code >= 200 and resp.status_code < 300
|
|
|
|
|
|
|
|
# when creating a directory, we'll be re-directed to a URL
|
|
|
|
# containing our writecap.. (XXX doesn't this violate the "URLs
|
|
|
|
# leak" maxim?)
|
|
|
|
uri = urllib2.unquote(resp.url)
|
|
|
|
assert 'URI:DIR2:' in uri
|
|
|
|
dircap = uri[uri.find("URI:DIR2:"):].rstrip('/')
|
2019-08-08 21:28:54 +00:00
|
|
|
dircap_uri = util.node_url(alice.node_dir, "uri/{}".format(urllib2.quote(dircap)))
|
2019-08-07 23:19:38 +00:00
|
|
|
|
|
|
|
# POST a file into this directory
|
2019-08-08 21:33:09 +00:00
|
|
|
FILE_CONTENTS = b"a file in a directory"
|
2019-08-07 23:19:38 +00:00
|
|
|
|
|
|
|
resp = requests.post(
|
|
|
|
dircap_uri,
|
|
|
|
data={
|
2019-08-08 21:33:09 +00:00
|
|
|
u"t": u"upload",
|
|
|
|
u"when_done": u".",
|
2019-08-07 23:19:38 +00:00
|
|
|
},
|
|
|
|
files={
|
2019-08-08 21:33:09 +00:00
|
|
|
u"file": FILE_CONTENTS,
|
2019-08-07 23:19:38 +00:00
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
# confirm the file is in the directory
|
|
|
|
resp = requests.get(
|
|
|
|
dircap_uri,
|
|
|
|
params={
|
2019-08-08 21:33:09 +00:00
|
|
|
u"t": u"json",
|
2019-08-07 23:19:38 +00:00
|
|
|
},
|
|
|
|
)
|
|
|
|
d = json.loads(resp.content)
|
|
|
|
k, data = d
|
2019-08-08 21:33:09 +00:00
|
|
|
assert k == u"dirnode"
|
2019-08-07 23:19:38 +00:00
|
|
|
assert len(data['children']) == 1
|
|
|
|
k, child = data['children'].values()[0]
|
2019-08-08 21:33:09 +00:00
|
|
|
assert k == u"filenode"
|
2019-08-07 23:19:38 +00:00
|
|
|
assert child['size'] == len(FILE_CONTENTS)
|
|
|
|
|
|
|
|
# perform deep-stats on it...
|
|
|
|
resp = requests.post(
|
|
|
|
dircap_uri,
|
|
|
|
data={
|
2019-08-08 21:33:09 +00:00
|
|
|
u"t": u"start-deep-stats",
|
|
|
|
u"ophandle": u"something_random",
|
2019-08-07 23:19:38 +00:00
|
|
|
},
|
|
|
|
)
|
|
|
|
assert resp.status_code >= 200 and resp.status_code < 300
|
|
|
|
|
2019-08-07 23:25:07 +00:00
|
|
|
# confirm we get information from the op .. after its done
|
|
|
|
while True:
|
|
|
|
resp = requests.get(
|
2019-08-08 21:33:09 +00:00
|
|
|
util.node_url(alice.node_dir, u"operations/something_random"),
|
2019-08-07 23:25:07 +00:00
|
|
|
)
|
|
|
|
d = json.loads(resp.content)
|
|
|
|
if d['size-literal-files'] == len(FILE_CONTENTS):
|
|
|
|
print("stats completed successfully")
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
print("{} != {}; waiting".format(d['size-literal-files'], len(FILE_CONTENTS)))
|
|
|
|
time.sleep(.5)
|
2019-08-10 18:09:34 +00:00
|
|
|
|
|
|
|
|
|
|
|
def test_status(alice):
|
|
|
|
"""
|
|
|
|
confirm we get something sensible from /status and the various sub-types
|
|
|
|
"""
|
2019-08-11 05:46:59 +00:00
|
|
|
|
2019-08-10 18:09:34 +00:00
|
|
|
# upload a file
|
|
|
|
# (because of the nature of the integration-tests, we can only
|
|
|
|
# assert things about "our" file because we don't know what other
|
|
|
|
# operations may have happened in the grid before our test runs).
|
2019-08-11 05:46:59 +00:00
|
|
|
|
|
|
|
FILE_CONTENTS = b"all the Important Data of alice\n" * 1200
|
|
|
|
|
2019-08-10 18:09:34 +00:00
|
|
|
resp = requests.put(
|
|
|
|
util.node_url(alice.node_dir, u"uri"),
|
2019-08-11 05:46:59 +00:00
|
|
|
data=FILE_CONTENTS,
|
2019-08-10 18:09:34 +00:00
|
|
|
)
|
|
|
|
cap = resp.text.strip()
|
2019-08-11 05:46:59 +00:00
|
|
|
|
2019-08-10 18:09:34 +00:00
|
|
|
print("Uploaded data, cap={}".format(cap))
|
|
|
|
resp = requests.get(
|
|
|
|
util.node_url(alice.node_dir, u"uri/{}".format(urllib2.quote(cap))),
|
|
|
|
)
|
|
|
|
|
2019-08-11 05:46:59 +00:00
|
|
|
print("Downloaded {} bytes of data".format(len(resp.content)))
|
2019-08-10 18:09:34 +00:00
|
|
|
assert resp.content == FILE_CONTENTS
|
|
|
|
|
|
|
|
# find our upload and download status pages
|
|
|
|
from twisted.web import microdom
|
|
|
|
from collections import defaultdict
|
|
|
|
from twisted.web import microdom
|
|
|
|
from collections import defaultdict
|
|
|
|
|
|
|
|
resp = requests.get(
|
|
|
|
util.node_url(alice.node_dir, "status"),
|
|
|
|
)
|
|
|
|
dom = microdom.parseString(resp.content)
|
|
|
|
|
|
|
|
# so, we *could* descend the DOM "more properly" .. or just look
|
|
|
|
# at the URIs
|
|
|
|
hrefs = set()
|
|
|
|
for td in dom.getElementsByTagName('td'):
|
|
|
|
for link in dom.getElementsByTagName('a'):
|
|
|
|
hrefs.add(link.getAttribute('href'))
|
|
|
|
|
2019-08-11 05:46:59 +00:00
|
|
|
found_upload = False
|
|
|
|
found_download = False
|
2019-08-10 18:09:34 +00:00
|
|
|
for href in hrefs:
|
2019-08-11 05:46:59 +00:00
|
|
|
print("href: {}".format(href))
|
|
|
|
if href.startswith(u"/") or not href:
|
2019-08-10 18:09:34 +00:00
|
|
|
continue
|
|
|
|
resp = requests.get(
|
|
|
|
util.node_url(alice.node_dir, u"status/{}".format(href)),
|
|
|
|
)
|
2019-08-11 05:46:59 +00:00
|
|
|
if href.startswith(u'up'):
|
2019-08-10 18:09:34 +00:00
|
|
|
assert "File Upload Status" in resp.content
|
2019-08-11 05:46:59 +00:00
|
|
|
if "Total Size: {}".format(len(FILE_CONTENTS)) in resp.content:
|
|
|
|
found_upload = True
|
|
|
|
elif href.startswith(u'down'):
|
2019-08-10 18:09:34 +00:00
|
|
|
print(href)
|
|
|
|
assert "File Download Status" in resp.content
|
2019-08-11 05:46:59 +00:00
|
|
|
if "Total Size: {}".format(len(FILE_CONTENTS)) in resp.content:
|
|
|
|
found_download = True
|
2019-08-10 18:09:34 +00:00
|
|
|
|
2019-08-12 00:38:18 +00:00
|
|
|
# download the specialized event information
|
|
|
|
resp = requests.get(
|
|
|
|
util.node_url(alice.node_dir, u"status/{}/event_json".format(href)),
|
|
|
|
)
|
|
|
|
js = json.loads(resp.content)
|
|
|
|
# there's usually just one "read" operation, but this can handle many ..
|
|
|
|
total_bytes = sum([st['bytes_returned'] for st in js['read']], 0)
|
|
|
|
assert total_bytes == len(FILE_CONTENTS)
|
|
|
|
|
|
|
|
|
2019-08-11 05:46:59 +00:00
|
|
|
assert found_upload, "Failed to find the file we uploaded in the status-page"
|
|
|
|
assert found_download, "Failed to find the file we downloaded in the status-page"
|