Merge pull request #787 from tahoe-lafs/3387.expirer-python-3

Port allmydata.storage.expirer to Python 3

Fixes ticket:3387
This commit is contained in:
Itamar Turner-Trauring 2020-08-26 10:31:44 -04:00 committed by GitHub
commit 2d8851e4cc
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 266 additions and 195 deletions

2
.gitignore vendored
View File

@ -1,4 +1,4 @@
venv
venv*
# vim swap files
*.swp

0
newsfragments/3387.minor Normal file
View File

View File

@ -1,3 +1,13 @@
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
# We omit anything that might end up in pickle, just in case.
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, range, str, max, min # noqa: F401
import time, os, pickle, struct
from allmydata.storage.crawler import ShareCrawler
from allmydata.storage.shares import get_share_file

View File

@ -48,8 +48,9 @@ class MutableShareFile(object):
# our sharefiles share with a recognizable string, plus some random
# binary data to reduce the chance that a regular text file will look
# like a sharefile.
MAGIC = "Tahoe mutable container v1\n" + "\x75\x09\x44\x03\x8e"
MAGIC = b"Tahoe mutable container v1\n" + b"\x75\x09\x44\x03\x8e"
assert len(MAGIC) == 32
assert isinstance(MAGIC, bytes)
MAX_SIZE = MAX_MUTABLE_SHARE_SIZE
# TODO: decide upon a policy for max share size
@ -86,7 +87,7 @@ class MutableShareFile(object):
self.MAGIC, my_nodeid, write_enabler,
data_length, extra_lease_offset,
)
leases = ("\x00" * self.LEASE_SIZE) * 4
leases = (b"\x00" * self.LEASE_SIZE) * 4
f.write(header + leases)
# data goes here, empty after creation
f.write(struct.pack(">L", num_extra_leases))
@ -154,7 +155,7 @@ class MutableShareFile(object):
# Zero out the old lease info (in order to minimize the chance that
# it could accidentally be exposed to a reader later, re #1528).
f.seek(old_extra_lease_offset)
f.write('\x00' * leases_size)
f.write(b'\x00' * leases_size)
f.flush()
# An interrupt here will corrupt the leases.
@ -193,7 +194,7 @@ class MutableShareFile(object):
# Fill any newly exposed empty space with 0's.
if offset > data_length:
f.seek(self.DATA_OFFSET+data_length)
f.write('\x00'*(offset - data_length))
f.write(b'\x00'*(offset - data_length))
f.flush()
new_data_length = offset+length
@ -325,10 +326,10 @@ class MutableShareFile(object):
modified = 0
remaining = 0
blank_lease = LeaseInfo(owner_num=0,
renew_secret="\x00"*32,
cancel_secret="\x00"*32,
renew_secret=b"\x00"*32,
cancel_secret=b"\x00"*32,
expiration_time=0,
nodeid="\x00"*20)
nodeid=b"\x00"*20)
with open(self.home, 'rb+') as f:
for (leasenum,lease) in self._enumerate_leases(f):
accepting_nodeids.add(lease.nodeid)

View File

@ -1,13 +1,25 @@
"""
Tests for twisted.storage that uses Web APIs.
Partially ported to Python 3.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2, PY3
if PY2:
# Omitted list sinc it broke a test on Python 2. Shouldn't require further
# work, when we switch to Python 3 we'll be dropping this, anyway.
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, object, range, str, max, min # noqa: F401
import time
import os.path
import re
import json
from unittest import skipIf
from twisted.trial import unittest
@ -18,7 +30,10 @@ from twisted.web.template import flattenString
# We need to use `nevow.inevow.IRequest` for now for compatibility
# with the code in web/common.py. Once nevow bits are gone from
# web/common.py, we can use `twisted.web.iweb.IRequest` here.
from nevow.inevow import IRequest
if PY2:
from nevow.inevow import IRequest
else:
from twisted.web.iweb import IRequest
from twisted.web.server import Request
from twisted.web.test.requesthelper import DummyChannel
@ -36,11 +51,11 @@ from allmydata.web.storage import (
StorageStatusElement,
remove_prefix
)
from .test_storage import FakeCanary
from .common_py3 import FakeCanary
def remove_tags(s):
s = re.sub(r'<[^>]*>', ' ', s)
s = re.sub(r'\s+', ' ', s)
s = re.sub(br'<[^>]*>', b' ', s)
s = re.sub(br'\s+', b' ', s)
return s
def renderSynchronously(ss):
@ -89,8 +104,10 @@ class MyStorageServer(StorageServer):
self.bucket_counter = MyBucketCountingCrawler(self, statefile)
self.bucket_counter.setServiceParent(self)
class BucketCounter(unittest.TestCase, pollmixin.PollMixin):
@skipIf(PY3, "Not ported yet.")
def setUp(self):
self.s = service.MultiService()
self.s.startService()
@ -100,7 +117,7 @@ class BucketCounter(unittest.TestCase, pollmixin.PollMixin):
def test_bucket_counter(self):
basedir = "storage/BucketCounter/bucket_counter"
fileutil.make_dirs(basedir)
ss = StorageServer(basedir, "\x00" * 20)
ss = StorageServer(basedir, b"\x00" * 20)
# to make sure we capture the bucket-counting-crawler in the middle
# of a cycle, we reach in and reduce its maximum slice time to 0. We
# also make it start sooner than usual.
@ -157,7 +174,7 @@ class BucketCounter(unittest.TestCase, pollmixin.PollMixin):
def test_bucket_counter_cleanup(self):
basedir = "storage/BucketCounter/bucket_counter_cleanup"
fileutil.make_dirs(basedir)
ss = StorageServer(basedir, "\x00" * 20)
ss = StorageServer(basedir, b"\x00" * 20)
# to make sure we capture the bucket-counting-crawler in the middle
# of a cycle, we reach in and reduce its maximum slice time to 0.
ss.bucket_counter.slow_start = 0
@ -190,16 +207,16 @@ class BucketCounter(unittest.TestCase, pollmixin.PollMixin):
def _check2(ignored):
ss.bucket_counter.cpu_slice = orig_cpu_slice
s = ss.bucket_counter.get_state()
self.failIf(-12 in s["bucket-counts"], s["bucket-counts"].keys())
self.failIf(-12 in s["bucket-counts"], list(s["bucket-counts"].keys()))
self.failIf("bogusprefix!" in s["storage-index-samples"],
s["storage-index-samples"].keys())
list(s["storage-index-samples"].keys()))
d.addCallback(_check2)
return d
def test_bucket_counter_eta(self):
basedir = "storage/BucketCounter/bucket_counter_eta"
fileutil.make_dirs(basedir)
ss = MyStorageServer(basedir, "\x00" * 20)
ss = MyStorageServer(basedir, b"\x00" * 20)
ss.bucket_counter.slow_start = 0
# these will be fired inside finished_prefix()
hooks = ss.bucket_counter.hook_ds = [defer.Deferred() for i in range(3)]
@ -275,27 +292,27 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
def make_shares(self, ss):
def make(si):
return (si, hashutil.tagged_hash("renew", si),
hashutil.tagged_hash("cancel", si))
return (si, hashutil.tagged_hash(b"renew", si),
hashutil.tagged_hash(b"cancel", si))
def make_mutable(si):
return (si, hashutil.tagged_hash("renew", si),
hashutil.tagged_hash("cancel", si),
hashutil.tagged_hash("write-enabler", si))
return (si, hashutil.tagged_hash(b"renew", si),
hashutil.tagged_hash(b"cancel", si),
hashutil.tagged_hash(b"write-enabler", si))
def make_extra_lease(si, num):
return (hashutil.tagged_hash("renew-%d" % num, si),
hashutil.tagged_hash("cancel-%d" % num, si))
return (hashutil.tagged_hash(b"renew-%d" % num, si),
hashutil.tagged_hash(b"cancel-%d" % num, si))
immutable_si_0, rs0, cs0 = make("\x00" * 16)
immutable_si_1, rs1, cs1 = make("\x01" * 16)
immutable_si_0, rs0, cs0 = make(b"\x00" * 16)
immutable_si_1, rs1, cs1 = make(b"\x01" * 16)
rs1a, cs1a = make_extra_lease(immutable_si_1, 1)
mutable_si_2, rs2, cs2, we2 = make_mutable("\x02" * 16)
mutable_si_3, rs3, cs3, we3 = make_mutable("\x03" * 16)
mutable_si_2, rs2, cs2, we2 = make_mutable(b"\x02" * 16)
mutable_si_3, rs3, cs3, we3 = make_mutable(b"\x03" * 16)
rs3a, cs3a = make_extra_lease(mutable_si_3, 1)
sharenums = [0]
canary = FakeCanary()
# note: 'tahoe debug dump-share' will not handle this file, since the
# inner contents are not a valid CHK share
data = "\xff" * 1000
data = b"\xff" * 1000
a,w = ss.remote_allocate_buckets(immutable_si_0, rs0, cs0, sharenums,
1000, canary)
@ -322,7 +339,7 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
def test_basic(self):
basedir = "storage/LeaseCrawler/basic"
fileutil.make_dirs(basedir)
ss = InstrumentedStorageServer(basedir, "\x00" * 20)
ss = InstrumentedStorageServer(basedir, b"\x00" * 20)
# make it start sooner than usual.
lc = ss.lease_checker
lc.slow_start = 0
@ -339,7 +356,7 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
storage_index_to_dir(immutable_si_0),
"not-a-share")
f = open(fn, "wb")
f.write("I am not a share.\n")
f.write(b"I am not a share.\n")
f.close()
# this is before the crawl has started, so we're not in a cycle yet
@ -398,25 +415,25 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
d.addCallback(lambda ign: renderDeferred(webstatus))
def _check_html_in_cycle(html):
s = remove_tags(html)
self.failUnlessIn("So far, this cycle has examined "
"1 shares in 1 buckets (0 mutable / 1 immutable) ", s)
self.failUnlessIn("and has recovered: "
"0 shares, 0 buckets (0 mutable / 0 immutable), "
"0 B (0 B / 0 B)", s)
self.failUnlessIn("If expiration were enabled, "
"we would have recovered: "
"0 shares, 0 buckets (0 mutable / 0 immutable),"
" 0 B (0 B / 0 B) by now", s)
self.failUnlessIn("and the remainder of this cycle "
"would probably recover: "
"0 shares, 0 buckets (0 mutable / 0 immutable),"
" 0 B (0 B / 0 B)", s)
self.failUnlessIn("and the whole cycle would probably recover: "
"0 shares, 0 buckets (0 mutable / 0 immutable),"
" 0 B (0 B / 0 B)", s)
self.failUnlessIn("if we were strictly using each lease's default "
"31-day lease lifetime", s)
self.failUnlessIn("this cycle would be expected to recover: ", s)
self.failUnlessIn(b"So far, this cycle has examined "
b"1 shares in 1 buckets (0 mutable / 1 immutable) ", s)
self.failUnlessIn(b"and has recovered: "
b"0 shares, 0 buckets (0 mutable / 0 immutable), "
b"0 B (0 B / 0 B)", s)
self.failUnlessIn(b"If expiration were enabled, "
b"we would have recovered: "
b"0 shares, 0 buckets (0 mutable / 0 immutable),"
b" 0 B (0 B / 0 B) by now", s)
self.failUnlessIn(b"and the remainder of this cycle "
b"would probably recover: "
b"0 shares, 0 buckets (0 mutable / 0 immutable),"
b" 0 B (0 B / 0 B)", s)
self.failUnlessIn(b"and the whole cycle would probably recover: "
b"0 shares, 0 buckets (0 mutable / 0 immutable),"
b" 0 B (0 B / 0 B)", s)
self.failUnlessIn(b"if we were strictly using each lease's default "
b"31-day lease lifetime", s)
self.failUnlessIn(b"this cycle would be expected to recover: ", s)
d.addCallback(_check_html_in_cycle)
# wait for the crawler to finish the first cycle. Nothing should have
@ -473,11 +490,11 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
d.addCallback(lambda ign: renderDeferred(webstatus))
def _check_html(html):
s = remove_tags(html)
self.failUnlessIn("recovered: 0 shares, 0 buckets "
"(0 mutable / 0 immutable), 0 B (0 B / 0 B) ", s)
self.failUnlessIn("and saw a total of 4 shares, 4 buckets "
"(2 mutable / 2 immutable),", s)
self.failUnlessIn("but expiration was not enabled", s)
self.failUnlessIn(b"recovered: 0 shares, 0 buckets "
b"(0 mutable / 0 immutable), 0 B (0 B / 0 B) ", s)
self.failUnlessIn(b"and saw a total of 4 shares, 4 buckets "
b"(2 mutable / 2 immutable),", s)
self.failUnlessIn(b"but expiration was not enabled", s)
d.addCallback(_check_html)
d.addCallback(lambda ign: renderJSON(webstatus))
def _check_json(raw):
@ -505,7 +522,7 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
fileutil.make_dirs(basedir)
# setting expiration_time to 2000 means that any lease which is more
# than 2000s old will be expired.
ss = InstrumentedStorageServer(basedir, "\x00" * 20,
ss = InstrumentedStorageServer(basedir, b"\x00" * 20,
expiration_enabled=True,
expiration_mode="age",
expiration_override_lease_duration=2000)
@ -578,11 +595,11 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
# predictor thinks we'll have 5 shares and that we'll delete them
# all. This part of the test depends upon the SIs landing right
# where they do now.
self.failUnlessIn("The remainder of this cycle is expected to "
"recover: 4 shares, 4 buckets", s)
self.failUnlessIn("The whole cycle is expected to examine "
"5 shares in 5 buckets and to recover: "
"5 shares, 5 buckets", s)
self.failUnlessIn(b"The remainder of this cycle is expected to "
b"recover: 4 shares, 4 buckets", s)
self.failUnlessIn(b"The whole cycle is expected to examine "
b"5 shares in 5 buckets and to recover: "
b"5 shares, 5 buckets", s)
d.addCallback(_check_html_in_cycle)
# wait for the crawler to finish the first cycle. Two shares should
@ -632,9 +649,9 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
d.addCallback(lambda ign: renderDeferred(webstatus))
def _check_html(html):
s = remove_tags(html)
self.failUnlessIn("Expiration Enabled: expired leases will be removed", s)
self.failUnlessIn("Leases created or last renewed more than 33 minutes ago will be considered expired.", s)
self.failUnlessIn(" recovered: 2 shares, 2 buckets (1 mutable / 1 immutable), ", s)
self.failUnlessIn(b"Expiration Enabled: expired leases will be removed", s)
self.failUnlessIn(b"Leases created or last renewed more than 33 minutes ago will be considered expired.", s)
self.failUnlessIn(b" recovered: 2 shares, 2 buckets (1 mutable / 1 immutable), ", s)
d.addCallback(_check_html)
return d
@ -645,7 +662,7 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
# is more than 2000s old will be expired.
now = time.time()
then = int(now - 2000)
ss = InstrumentedStorageServer(basedir, "\x00" * 20,
ss = InstrumentedStorageServer(basedir, b"\x00" * 20,
expiration_enabled=True,
expiration_mode="cutoff-date",
expiration_cutoff_date=then)
@ -722,11 +739,11 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
# predictor thinks we'll have 5 shares and that we'll delete them
# all. This part of the test depends upon the SIs landing right
# where they do now.
self.failUnlessIn("The remainder of this cycle is expected to "
"recover: 4 shares, 4 buckets", s)
self.failUnlessIn("The whole cycle is expected to examine "
"5 shares in 5 buckets and to recover: "
"5 shares, 5 buckets", s)
self.failUnlessIn(b"The remainder of this cycle is expected to "
b"recover: 4 shares, 4 buckets", s)
self.failUnlessIn(b"The whole cycle is expected to examine "
b"5 shares in 5 buckets and to recover: "
b"5 shares, 5 buckets", s)
d.addCallback(_check_html_in_cycle)
# wait for the crawler to finish the first cycle. Two shares should
@ -778,12 +795,13 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
d.addCallback(lambda ign: renderDeferred(webstatus))
def _check_html(html):
s = remove_tags(html)
self.failUnlessIn("Expiration Enabled:"
" expired leases will be removed", s)
date = time.strftime("%Y-%m-%d (%d-%b-%Y) UTC", time.gmtime(then))
substr = "Leases created or last renewed before %s will be considered expired." % date
self.failUnlessIn(b"Expiration Enabled:"
b" expired leases will be removed", s)
date = time.strftime(
u"%Y-%m-%d (%d-%b-%Y) UTC", time.gmtime(then)).encode("ascii")
substr =b"Leases created or last renewed before %s will be considered expired." % date
self.failUnlessIn(substr, s)
self.failUnlessIn(" recovered: 2 shares, 2 buckets (1 mutable / 1 immutable), ", s)
self.failUnlessIn(b" recovered: 2 shares, 2 buckets (1 mutable / 1 immutable), ", s)
d.addCallback(_check_html)
return d
@ -792,7 +810,7 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
fileutil.make_dirs(basedir)
now = time.time()
then = int(now - 2000)
ss = StorageServer(basedir, "\x00" * 20,
ss = StorageServer(basedir, b"\x00" * 20,
expiration_enabled=True,
expiration_mode="cutoff-date",
expiration_cutoff_date=then,
@ -840,7 +858,7 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
d.addCallback(lambda ign: renderDeferred(webstatus))
def _check_html(html):
s = remove_tags(html)
self.failUnlessIn("The following sharetypes will be expired: immutable.", s)
self.failUnlessIn(b"The following sharetypes will be expired: immutable.", s)
d.addCallback(_check_html)
return d
@ -849,7 +867,7 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
fileutil.make_dirs(basedir)
now = time.time()
then = int(now - 2000)
ss = StorageServer(basedir, "\x00" * 20,
ss = StorageServer(basedir, b"\x00" * 20,
expiration_enabled=True,
expiration_mode="cutoff-date",
expiration_cutoff_date=then,
@ -897,7 +915,7 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
d.addCallback(lambda ign: renderDeferred(webstatus))
def _check_html(html):
s = remove_tags(html)
self.failUnlessIn("The following sharetypes will be expired: mutable.", s)
self.failUnlessIn(b"The following sharetypes will be expired: mutable.", s)
d.addCallback(_check_html)
return d
@ -905,14 +923,14 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
basedir = "storage/LeaseCrawler/bad_mode"
fileutil.make_dirs(basedir)
e = self.failUnlessRaises(ValueError,
StorageServer, basedir, "\x00" * 20,
StorageServer, basedir, b"\x00" * 20,
expiration_mode="bogus")
self.failUnlessIn("GC mode 'bogus' must be 'age' or 'cutoff-date'", str(e))
def test_limited_history(self):
basedir = "storage/LeaseCrawler/limited_history"
fileutil.make_dirs(basedir)
ss = StorageServer(basedir, "\x00" * 20)
ss = StorageServer(basedir, b"\x00" * 20)
# make it start sooner than usual.
lc = ss.lease_checker
lc.slow_start = 0
@ -944,7 +962,7 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
def test_unpredictable_future(self):
basedir = "storage/LeaseCrawler/unpredictable_future"
fileutil.make_dirs(basedir)
ss = StorageServer(basedir, "\x00" * 20)
ss = StorageServer(basedir, b"\x00" * 20)
# make it start sooner than usual.
lc = ss.lease_checker
lc.slow_start = 0
@ -1007,7 +1025,7 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
def test_no_st_blocks(self):
basedir = "storage/LeaseCrawler/no_st_blocks"
fileutil.make_dirs(basedir)
ss = No_ST_BLOCKS_StorageServer(basedir, "\x00" * 20,
ss = No_ST_BLOCKS_StorageServer(basedir, b"\x00" * 20,
expiration_mode="age",
expiration_override_lease_duration=-1000)
# a negative expiration_time= means the "configured-"
@ -1046,7 +1064,7 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
]
basedir = "storage/LeaseCrawler/share_corruption"
fileutil.make_dirs(basedir)
ss = InstrumentedStorageServer(basedir, "\x00" * 20)
ss = InstrumentedStorageServer(basedir, b"\x00" * 20)
w = StorageStatus(ss)
# make it start sooner than usual.
lc = ss.lease_checker
@ -1064,7 +1082,7 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
fn = os.path.join(ss.sharedir, storage_index_to_dir(first), "0")
f = open(fn, "rb+")
f.seek(0)
f.write("BAD MAGIC")
f.write(b"BAD MAGIC")
f.close()
# if get_share_file() doesn't see the correct mutable magic, it
# assumes the file is an immutable share, and then
@ -1073,7 +1091,7 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
# UnknownImmutableContainerVersionError.
# also create an empty bucket
empty_si = base32.b2a("\x04"*16)
empty_si = base32.b2a(b"\x04"*16)
empty_bucket_dir = os.path.join(ss.sharedir,
storage_index_to_dir(empty_si))
fileutil.make_dirs(empty_bucket_dir)
@ -1094,7 +1112,9 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
rec = so_far["space-recovered"]
self.failUnlessEqual(rec["examined-buckets"], 1)
self.failUnlessEqual(rec["examined-shares"], 0)
self.failUnlessEqual(so_far["corrupt-shares"], [(first_b32, 0)])
[(actual_b32, i)] = so_far["corrupt-shares"]
actual_b32 = actual_b32.encode("ascii")
self.failUnlessEqual((actual_b32, i), (first_b32, 0))
d.addCallback(_after_first_bucket)
d.addCallback(lambda ign: renderJSON(w))
@ -1103,13 +1123,15 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
# grr. json turns all dict keys into strings.
so_far = data["lease-checker"]["cycle-to-date"]
corrupt_shares = so_far["corrupt-shares"]
# it also turns all tuples into lists
self.failUnlessEqual(corrupt_shares, [[first_b32, 0]])
# it also turns all tuples into lists, and result is unicode:
[(actual_b32, i)] = corrupt_shares
actual_b32 = actual_b32.encode("ascii")
self.failUnlessEqual([actual_b32, i], [first_b32, 0])
d.addCallback(_check_json)
d.addCallback(lambda ign: renderDeferred(w))
def _check_html(html):
s = remove_tags(html)
self.failUnlessIn("Corrupt shares: SI %s shnum 0" % first_b32, s)
self.failUnlessIn(b"Corrupt shares: SI %s shnum 0" % first_b32, s)
d.addCallback(_check_html)
def _wait():
@ -1122,19 +1144,22 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
rec = last["space-recovered"]
self.failUnlessEqual(rec["examined-buckets"], 5)
self.failUnlessEqual(rec["examined-shares"], 3)
self.failUnlessEqual(last["corrupt-shares"], [(first_b32, 0)])
[(actual_b32, i)] = last["corrupt-shares"]
actual_b32 = actual_b32.encode("ascii")
self.failUnlessEqual((actual_b32, i), (first_b32, 0))
d.addCallback(_after_first_cycle)
d.addCallback(lambda ign: renderJSON(w))
def _check_json_history(raw):
data = json.loads(raw)
last = data["lease-checker"]["history"]["0"]
corrupt_shares = last["corrupt-shares"]
self.failUnlessEqual(corrupt_shares, [[first_b32, 0]])
[(actual_b32, i)] = last["corrupt-shares"]
actual_b32 = actual_b32.encode("ascii")
self.failUnlessEqual([actual_b32, i], [first_b32, 0])
d.addCallback(_check_json_history)
d.addCallback(lambda ign: renderDeferred(w))
def _check_html_history(html):
s = remove_tags(html)
self.failUnlessIn("Corrupt shares: SI %s shnum 0" % first_b32, s)
self.failUnlessIn(b"Corrupt shares: SI %s shnum 0" % first_b32, s)
d.addCallback(_check_html_history)
def _cleanup(res):
@ -1147,6 +1172,7 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
class WebStatus(unittest.TestCase, pollmixin.PollMixin):
@skipIf(PY3, "Not ported yet.")
def setUp(self):
self.s = service.MultiService()
self.s.startService()
@ -1161,7 +1187,7 @@ class WebStatus(unittest.TestCase, pollmixin.PollMixin):
def test_status(self):
basedir = "storage/WebStatus/status"
fileutil.make_dirs(basedir)
nodeid = "\x00" * 20
nodeid = b"\x00" * 20
ss = StorageServer(basedir, nodeid)
ss.setServiceParent(self.s)
w = StorageStatus(ss, "nickname")
@ -1195,7 +1221,7 @@ class WebStatus(unittest.TestCase, pollmixin.PollMixin):
# (test runs on all platforms).
basedir = "storage/WebStatus/status_no_disk_stats"
fileutil.make_dirs(basedir)
ss = StorageServer(basedir, "\x00" * 20)
ss = StorageServer(basedir, b"\x00" * 20)
ss.setServiceParent(self.s)
w = StorageStatus(ss)
html = renderSynchronously(w)
@ -1215,7 +1241,7 @@ class WebStatus(unittest.TestCase, pollmixin.PollMixin):
# show that no shares will be accepted, and get_available_space() should be 0.
basedir = "storage/WebStatus/status_bad_disk_stats"
fileutil.make_dirs(basedir)
ss = StorageServer(basedir, "\x00" * 20)
ss = StorageServer(basedir, b"\x00" * 20)
ss.setServiceParent(self.s)
w = StorageStatus(ss)
html = renderSynchronously(w)
@ -1235,7 +1261,7 @@ class WebStatus(unittest.TestCase, pollmixin.PollMixin):
basedir = "storage/WebStatus/status_right_disk_stats"
fileutil.make_dirs(basedir)
ss = StorageServer(basedir, "\x00" * 20, reserved_space=reserved)
ss = StorageServer(basedir, b"\x00" * 20, reserved_space=reserved)
expecteddir = ss.sharedir
def call_get_disk_stats(whichdir, reserved_space=0):
@ -1269,7 +1295,7 @@ class WebStatus(unittest.TestCase, pollmixin.PollMixin):
def test_readonly(self):
basedir = "storage/WebStatus/readonly"
fileutil.make_dirs(basedir)
ss = StorageServer(basedir, "\x00" * 20, readonly_storage=True)
ss = StorageServer(basedir, b"\x00" * 20, readonly_storage=True)
ss.setServiceParent(self.s)
w = StorageStatus(ss)
html = renderSynchronously(w)
@ -1280,7 +1306,7 @@ class WebStatus(unittest.TestCase, pollmixin.PollMixin):
def test_reserved(self):
basedir = "storage/WebStatus/reserved"
fileutil.make_dirs(basedir)
ss = StorageServer(basedir, "\x00" * 20, reserved_space=10e6)
ss = StorageServer(basedir, b"\x00" * 20, reserved_space=10e6)
ss.setServiceParent(self.s)
w = StorageStatus(ss)
html = renderSynchronously(w)
@ -1291,7 +1317,7 @@ class WebStatus(unittest.TestCase, pollmixin.PollMixin):
def test_huge_reserved(self):
basedir = "storage/WebStatus/reserved"
fileutil.make_dirs(basedir)
ss = StorageServer(basedir, "\x00" * 20, reserved_space=10e6)
ss = StorageServer(basedir, b"\x00" * 20, reserved_space=10e6)
ss.setServiceParent(self.s)
w = StorageStatus(ss)
html = renderSynchronously(w)

View File

@ -33,6 +33,7 @@ PORTED_MODULES = [
"allmydata.hashtree",
"allmydata.immutable.happiness_upload",
"allmydata.storage.crawler",
"allmydata.storage.expirer",
"allmydata.test.common_py3",
"allmydata.uri",
"allmydata.util._python3",
@ -82,6 +83,7 @@ PORTED_TEST_MODULES = [
"allmydata.test.test_python3",
"allmydata.test.test_spans",
"allmydata.test.test_statistics",
"allmydata.test.test_storage_web", # partial, WIP
"allmydata.test.test_time_format",
"allmydata.test.test_uri",
"allmydata.test.test_util",

View File

@ -15,11 +15,15 @@ from allmydata.interfaces import ExistingChildError, NoSuchChildError, \
EmptyPathnameComponentError, MustBeDeepImmutableError, \
MustBeReadonlyError, MustNotBeUnknownRWError, SDMF_VERSION, MDMF_VERSION
from allmydata.mutable.common import UnrecoverableFileError
from allmydata.util import abbreviate
from allmydata.util.hashutil import timing_safe_compare
from allmydata.util.time_format import format_time, format_delta
from allmydata.util.encodingutil import to_bytes, quote_output
# Originally part of this module, so still part of its API:
from .common_py3 import ( # noqa: F401
get_arg, abbreviate_time, MultiFormatResource, WebError
)
def get_filenode_metadata(filenode):
metadata = {'mutable': filenode.is_mutable()}
@ -104,24 +108,6 @@ def get_root(ctx_or_req):
link = "/".join([".."] * depth)
return link
def get_arg(ctx_or_req, argname, default=None, multiple=False):
"""Extract an argument from either the query args (req.args) or the form
body fields (req.fields). If multiple=False, this returns a single value
(or the default, which defaults to None), and the query args take
precedence. If multiple=True, this returns a tuple of arguments (possibly
empty), starting with all those in the query args.
"""
req = IRequest(ctx_or_req)
results = []
if argname in req.args:
results.extend(req.args[argname])
if req.fields and argname in req.fields:
results.append(req.fields[argname].value)
if multiple:
return tuple(results)
if results:
return results[0]
return default
def convert_children_json(nodemaker, children_json):
"""I convert the JSON output of GET?t=json into the dict-of-nodes input
@ -141,20 +127,6 @@ def convert_children_json(nodemaker, children_json):
children[namex] = (childnode, metadata)
return children
def abbreviate_time(data):
# 1.23s, 790ms, 132us
if data is None:
return ""
s = float(data)
if s >= 10:
return abbreviate.abbreviate_time(data)
if s >= 1.0:
return "%.2fs" % s
if s >= 0.01:
return "%.0fms" % (1000*s)
if s >= 0.001:
return "%.1fms" % (1000*s)
return "%.0fus" % (1000000*s)
def compute_rate(bytes, seconds):
if bytes is None:
@ -219,10 +191,6 @@ def render_time(t):
def render_time_attr(t):
return format_time(time.localtime(t))
class WebError(Exception):
def __init__(self, text, code=http.BAD_REQUEST):
self.text = text
self.code = code
# XXX: to make UnsupportedMethod return 501 NOT_IMPLEMENTED instead of 500
# Internal Server Error, we either need to do that ICanHandleException trick,
@ -421,62 +389,6 @@ class MultiFormatPage(Page):
return lambda ctx: renderer(IRequest(ctx))
class MultiFormatResource(resource.Resource, object):
"""
``MultiFormatResource`` is a ``resource.Resource`` that can be rendered in
a number of different formats.
Rendered format is controlled by a query argument (given by
``self.formatArgument``). Different resources may support different
formats but ``json`` is a pretty common one. ``html`` is the default
format if nothing else is given as the ``formatDefault``.
"""
formatArgument = "t"
formatDefault = None
def render(self, req):
"""
Dispatch to a renderer for a particular format, as selected by a query
argument.
A renderer for the format given by the query argument matching
``formatArgument`` will be selected and invoked. render_HTML will be
used as a default if no format is selected (either by query arguments
or by ``formatDefault``).
:return: The result of the selected renderer.
"""
t = get_arg(req, self.formatArgument, self.formatDefault)
renderer = self._get_renderer(t)
return renderer(req)
def _get_renderer(self, fmt):
"""
Get the renderer for the indicated format.
:param str fmt: The format. If a method with a prefix of ``render_``
and a suffix of this format (upper-cased) is found, it will be
used.
:return: A callable which takes a twisted.web Request and renders a
response.
"""
renderer = None
if fmt is not None:
try:
renderer = getattr(self, "render_{}".format(fmt.upper()))
except AttributeError:
raise WebError(
"Unknown {} value: {!r}".format(self.formatArgument, fmt),
)
if renderer is None:
renderer = self.render_HTML
return renderer
class SlotsSequenceElement(template.Element):
"""
``SlotsSequenceElement` is a minimal port of nevow's sequence renderer for

View File

@ -0,0 +1,120 @@
"""
Common utilities that are available from Python 3.
Can eventually be merged back into allmydata.web.common.
"""
from future.utils import PY2
if PY2:
from nevow.inevow import IRequest as INevowRequest
else:
INevowRequest = None
from twisted.web import resource, http
from twisted.web.iweb import IRequest
from allmydata.util import abbreviate
class WebError(Exception):
def __init__(self, text, code=http.BAD_REQUEST):
self.text = text
self.code = code
def get_arg(ctx_or_req, argname, default=None, multiple=False):
"""Extract an argument from either the query args (req.args) or the form
body fields (req.fields). If multiple=False, this returns a single value
(or the default, which defaults to None), and the query args take
precedence. If multiple=True, this returns a tuple of arguments (possibly
empty), starting with all those in the query args.
"""
results = []
if PY2:
req = INevowRequest(ctx_or_req)
if argname in req.args:
results.extend(req.args[argname])
if req.fields and argname in req.fields:
results.append(req.fields[argname].value)
else:
req = IRequest(ctx_or_req)
if argname in req.args:
results.extend(req.args[argname])
if multiple:
return tuple(results)
if results:
return results[0]
return default
class MultiFormatResource(resource.Resource, object):
"""
``MultiFormatResource`` is a ``resource.Resource`` that can be rendered in
a number of different formats.
Rendered format is controlled by a query argument (given by
``self.formatArgument``). Different resources may support different
formats but ``json`` is a pretty common one. ``html`` is the default
format if nothing else is given as the ``formatDefault``.
"""
formatArgument = "t"
formatDefault = None
def render(self, req):
"""
Dispatch to a renderer for a particular format, as selected by a query
argument.
A renderer for the format given by the query argument matching
``formatArgument`` will be selected and invoked. render_HTML will be
used as a default if no format is selected (either by query arguments
or by ``formatDefault``).
:return: The result of the selected renderer.
"""
t = get_arg(req, self.formatArgument, self.formatDefault)
renderer = self._get_renderer(t)
return renderer(req)
def _get_renderer(self, fmt):
"""
Get the renderer for the indicated format.
:param str fmt: The format. If a method with a prefix of ``render_``
and a suffix of this format (upper-cased) is found, it will be
used.
:return: A callable which takes a twisted.web Request and renders a
response.
"""
renderer = None
if fmt is not None:
try:
renderer = getattr(self, "render_{}".format(fmt.upper()))
except AttributeError:
raise WebError(
"Unknown {} value: {!r}".format(self.formatArgument, fmt),
)
if renderer is None:
renderer = self.render_HTML
return renderer
def abbreviate_time(data):
# 1.23s, 790ms, 132us
if data is None:
return ""
s = float(data)
if s >= 10:
return abbreviate.abbreviate_time(data)
if s >= 1.0:
return "%.2fs" % s
if s >= 0.01:
return "%.0fms" % (1000*s)
if s >= 0.001:
return "%.1fms" % (1000*s)
return "%.0fus" % (1000000*s)

View File

@ -8,7 +8,7 @@ from twisted.web.template import (
renderer,
renderElement
)
from allmydata.web.common import (
from allmydata.web.common_py3 import (
abbreviate_time,
MultiFormatResource
)