Merge remote-tracking branch 'origin/master' into 3382.remove-multi-format-page

This commit is contained in:
Jean-Paul Calderone 2020-09-25 09:37:02 -04:00
commit dc15f19ee0
83 changed files with 874 additions and 485 deletions

1
.gitignore vendored
View File

@ -37,6 +37,7 @@ zope.interface-*.egg
/tahoe-deps/
/tahoe-deps.tar.gz
/.coverage
/.coverage.*
/.coverage.el
/coverage-html/
/miscaptures.txt

View File

@ -1,37 +1,53 @@
# Tahoe LFS Development and maintenance tasks
#
# NOTE: this Makefile requires GNU make
default:
@echo "no default target"
### Defensive settings for make:
# https://tech.davis-hansson.com/p/make/
SHELL := bash
.ONESHELL:
.SHELLFLAGS := -xeu -o pipefail -c
.SILENT:
.DELETE_ON_ERROR:
MAKEFLAGS += --warn-undefined-variables
MAKEFLAGS += --no-builtin-rules
# Local target variables
PYTHON=python
export PYTHON
PYFLAKES=flake8
export PYFLAKES
SOURCES=src/allmydata static misc setup.py
APPNAME=tahoe-lafs
# Top-level, phony targets
.PHONY: default
default:
@echo "no default target"
.PHONY: test
## Run all tests and code reports
test: .tox
tox -p auto
# This is necessary only if you want to automatically produce a new
# _version.py file from the current git history (without doing a build).
.PHONY: make-version
make-version:
$(PYTHON) ./setup.py update_version
.built:
$(MAKE) build
src/allmydata/_version.py:
$(MAKE) make-version
# Build OS X pkg packages.
.PHONY: build-osx-pkg test-osx-pkg upload-osx-pkg
.PHONY: build-osx-pkg
build-osx-pkg:
misc/build_helpers/build-osx-pkg.sh $(APPNAME)
.PHONY: test-osx-pkg
test-osx-pkg:
$(PYTHON) misc/build_helpers/test-osx-pkg.py
.PHONY: upload-osx-pkg
upload-osx-pkg:
# [Failure instance: Traceback: <class 'OpenSSL.SSL.Error'>: [('SSL routines', 'ssl3_read_bytes', 'tlsv1 alert unknown ca'), ('SSL routines', 'ssl3_write_bytes', 'ssl handshake failure')]
#
@ -42,29 +58,12 @@ upload-osx-pkg:
# echo not uploading tahoe-lafs-osx-pkg because this is not trunk but is branch \"${BB_BRANCH}\" ; \
# fi
# code coverage-based testing is disabled temporarily, as we switch to tox.
# This will eventually be added to a tox environment. The following comments
# and variable settings are retained as notes for that future effort.
## # code coverage: install the "coverage" package from PyPI, do "make
## # test-coverage" to do a unit test run with coverage-gathering enabled, then
## # use "make coverage-output" to generate an HTML report. Also see "make
## # .coverage.el" and misc/coding_tools/coverage.el for Emacs integration.
##
## # This might need to be python-coverage on Debian-based distros.
## COVERAGE=coverage
##
## COVERAGEARGS=--branch --source=src/allmydata
##
## # --include appeared in coverage-3.4
## COVERAGE_OMIT=--include '$(CURDIR)/src/allmydata/*' --omit '$(CURDIR)/src/allmydata/test/*'
.PHONY: code-checks
#code-checks: build version-and-path check-interfaces check-miscaptures -find-trailing-spaces -check-umids pyflakes
code-checks: check-interfaces check-debugging check-miscaptures -find-trailing-spaces -check-umids pyflakes
.PHONY: check-interfaces
check-interfaces:
$(PYTHON) misc/coding_tools/check-interfaces.py 2>&1 |tee violations.txt
@echo
@ -188,6 +187,7 @@ distclean: clean
rm -rf src/*.egg-info
rm -f src/allmydata/_version.py
rm -f src/allmydata/_appname.py
rm -rf ./.tox/
.PHONY: find-trailing-spaces
@ -220,3 +220,12 @@ tarballs: # delegated to tox, so setup.py can update setuptools if needed
.PHONY: upload-tarballs
upload-tarballs:
@if [ "X${BB_BRANCH}" = "Xmaster" ] || [ "X${BB_BRANCH}" = "X" ]; then for f in dist/*; do flappclient --furlfile ~/.tahoe-tarball-upload.furl upload-file $$f; done ; else echo not uploading tarballs because this is not trunk but is branch \"${BB_BRANCH}\" ; fi
# Real targets
src/allmydata/_version.py:
$(MAKE) make-version
.tox: tox.ini setup.py
tox --notest -p all

View File

@ -156,6 +156,6 @@ for pkg in sorted(platform_independent_pkgs):
print('</table>')
# The document does validate, but not when it is included at the bottom of a directory listing.
#print '<hr>'
#print '<a href="http://validator.w3.org/check?uri=referer" target="_blank"><img border="0" src="http://www.w3.org/Icons/valid-html401-blue" alt="Valid HTML 4.01 Transitional" height="31" width="88"></a>'
#print('<hr>')
#print('<a href="http://validator.w3.org/check?uri=referer" target="_blank"><img border="0" src="http://www.w3.org/Icons/valid-html401-blue" alt="Valid HTML 4.01 Transitional" height="31" width="88"></a>')
print('</body></html>')

View File

@ -1,5 +1,7 @@
#! /usr/bin/python
from __future__ import print_function
import math
from allmydata.util import statistics
from numpy import array, matrix, dot
@ -72,11 +74,11 @@ class ReliabilityModel(object):
repair = self.build_repair_matrix(k, N, R)
#print "DECAY:", decay
#print "OLD-POST-REPAIR:", old_post_repair
#print "NEW-POST-REPAIR:", decay * repair
#print "REPAIR:", repair
#print "DIFF:", (old_post_repair - decay * repair)
#print("DECAY:", decay)
#print("OLD-POST-REPAIR:", old_post_repair)
#print("NEW-POST-REPAIR:", decay * repair)
#print("REPAIR:", repair)
#print("DIFF:", (old_post_repair - decay * repair))
START = array([0]*N + [1])
DEAD = array([1]*k + [0]*(1+N-k))
@ -85,9 +87,9 @@ class ReliabilityModel(object):
[N-i for i in range(k, R)] +
[0]*(1+N-R))
assert REPAIR_newshares.shape[0] == N+1
#print "START", START
#print "REPAIRp", REPAIRp
#print "REPAIR_newshares", REPAIR_newshares
#print("START", START)
#print("REPAIRp", REPAIRp)
#print("REPAIR_newshares", REPAIR_newshares)
unmaintained_state = START
maintained_state = START
@ -141,15 +143,15 @@ class ReliabilityModel(object):
# return "%dy.%dm" % (int(seconds/YEAR), int( (seconds%YEAR)/MONTH))
#needed_repairs_total = sum(needed_repairs)
#needed_new_shares_total = sum(needed_new_shares)
#print "at 2y:"
#print " unmaintained", unmaintained_state
#print " maintained", maintained_state
#print " number of repairs", needed_repairs_total
#print " new shares generated", needed_new_shares_total
#print("at 2y:")
#print(" unmaintained", unmaintained_state)
#print(" maintained", maintained_state)
#print(" number of repairs", needed_repairs_total)
#print(" new shares generated", needed_new_shares_total)
#repair_rate_inv = report_span / needed_repairs_total
#print " avg repair rate: once every %s" % yandm(repair_rate_inv)
#print " avg repair download: one share every %s" % yandm(repair_rate_inv/k)
#print " avg repair upload: one share every %s" % yandm(report_span / needed_new_shares_total)
#print(" avg repair rate: once every %s" % yandm(repair_rate_inv))
#print(" avg repair download: one share every %s" % yandm(repair_rate_inv/k))
#print(" avg repair upload: one share every %s" % yandm(report_span / needed_new_shares_total))
return report

View File

@ -1,3 +1,4 @@
from __future__ import print_function
import unittest
from allmydata import provisioning
@ -99,7 +100,7 @@ class Reliability(unittest.TestCase):
self.failUnlessEqual(len(r.samples), 20)
last_row = r.samples[-1]
#print last_row
#print(last_row)
(when, unmaintained_shareprobs, maintained_shareprobs,
P_repaired_last_check_period,
cumulative_number_of_repairs,

View File

@ -74,7 +74,7 @@ class B(object):
count += 1
inline = self.inf.readline()
# print self.stats
# print(self.stats)
benchutil.print_bench_footer(UNITS_PER_SECOND=1000000)
print("(microseconds)")

View File

@ -89,9 +89,9 @@ def scan(root):
num_files = 0
num_dirs = 0
for absroot, dirs, files in os.walk(root):
#print absroot
#print " %d files" % len(files)
#print " %d subdirs" % len(dirs)
#print(absroot)
#print(" %d files" % len(files))
#print(" %d subdirs" % len(dirs))
num_files += len(files)
num_dirs += len(dirs)
stringsize = len(''.join(files) + ''.join(dirs))

View File

@ -146,8 +146,8 @@ def calculate(K, K1, K2, q_max, L_hash, trees):
lg_q = lg(q_cand)
lg_pforge = [lg_px[x] + (lg_q*x - lg_K2)*q_cand for x in xrange(1, j)]
if max(lg_pforge) < -L_hash + lg(j) and lg_px[j-1] + 1.0 < -L_hash:
#print "K = %d, K1 = %d, K2 = %d, L_hash = %d, lg_K2 = %.3f, q = %d, lg_pforge_1 = %.3f, lg_pforge_2 = %.3f, lg_pforge_3 = %.3f" \
# % (K, K1, K2, L_hash, lg_K2, q, lg_pforge_1, lg_pforge_2, lg_pforge_3)
#print("K = %d, K1 = %d, K2 = %d, L_hash = %d, lg_K2 = %.3f, q = %d, lg_pforge_1 = %.3f, lg_pforge_2 = %.3f, lg_pforge_3 = %.3f"
# % (K, K1, K2, L_hash, lg_K2, q, lg_pforge_1, lg_pforge_2, lg_pforge_3))
q = q_cand
break
@ -268,7 +268,7 @@ def search():
trees[y] = (h, c_y, (dau, tri))
#for x in xrange(1, K_max+1):
# print x, trees[x]
# print(x, trees[x])
candidates = []
progress = 0

View File

@ -130,8 +130,8 @@ class Ring(object):
# used is actual per-server ciphertext
usedpf = [1.0*u/numfiles for u in used]
# usedpf is actual per-server-per-file ciphertext
#print "min/max usage: %s/%s" % (abbreviate_space(used[-1]),
# abbreviate_space(used[0]))
#print("min/max usage: %s/%s" % (abbreviate_space(used[-1]),
# abbreviate_space(used[0])))
avg_usage_per_file = avg_space_per_file/len(self.servers)
# avg_usage_per_file is expected per-server-per-file ciphertext
spreadpf = usedpf[0] - usedpf[-1]
@ -146,7 +146,7 @@ class Ring(object):
abbreviate_space(avg_usage_per_file) ), end=' ')
print("spread-pf: %s (%.2f%%)" % (
abbreviate_space(spreadpf), 100.0*spreadpf/avg_usage_per_file), end=' ')
#print "average_usage:", abbreviate_space(average_usagepf)
#print("average_usage:", abbreviate_space(average_usagepf))
print("stddev: %s (%.2f%%)" % (abbreviate_space(std_deviation),
100.0*sd_of_total))
if self.SHOW_MINMAX:
@ -176,14 +176,14 @@ def do_run(ring, opts):
for filenum in count(0):
#used = list(reversed(sorted([s.used for s in ring.servers])))
#used = [s.used for s in ring.servers]
#print used
#print(used)
si = myhash(fileseed+str(filenum)).hexdigest()
filesize = make_up_a_file_size(si)
sharesize = filesize / opts["k"]
if filenum%4000==0 and filenum > 1:
ring.dump_usage(filenum, avg_space_per_file)
servers = ring.servers_for_si(si)
#print ring.show_servers(servers[:opts["N"]])
#print(ring.show_servers(servers[:opts["N"]]))
remaining_shares = opts["N"]
index = 0
server_was_full = False

View File

@ -59,7 +59,7 @@ def go(permutedpeerlist):
server.full_at_tick = tick
fullservers += 1
if fullservers == len(servers):
# print "Couldn't place share -- all servers full. Stopping."
# print("Couldn't place share -- all servers full. Stopping.")
return (servers, doubled_up_shares)
i += 1

View File

@ -96,9 +96,9 @@ class Sizes(object):
# means storing (and eventually transmitting) more hashes. This
# count includes all the low-level share hashes and the root.
hash_nodes = (num_leaves*k - 1) / (k - 1)
#print "hash_depth", d
#print "num_leaves", num_leaves
#print "hash_nodes", hash_nodes
#print("hash_depth", d)
#print("num_leaves", num_leaves)
#print("hash_nodes", hash_nodes)
# the storage overhead is this
self.share_storage_overhead = 32 * (hash_nodes - 1)
# the transmission overhead is smaller: if we actually transmit

0
newsfragments/3408.minor Normal file
View File

0
newsfragments/3409.minor Normal file
View File

0
newsfragments/3415.minor Normal file
View File

0
newsfragments/3416.minor Normal file
View File

1
newsfragments/3421.minor Normal file
View File

@ -0,0 +1 @@
Various, minor development `./Makefile` cleanup and improvement.

0
newsfragments/3422.minor Normal file
View File

0
newsfragments/3423.minor Normal file
View File

0
newsfragments/3424.minor Normal file
View File

0
newsfragments/3425.minor Normal file
View File

0
newsfragments/3426.minor Normal file
View File

0
newsfragments/3429.minor Normal file
View File

0
newsfragments/3430.minor Normal file
View File

0
newsfragments/3440.minor Normal file
View File

View File

@ -147,7 +147,7 @@ def _make_secret():
Returns a base32-encoded random secret of hashutil.CRYPTO_VAL_SIZE
bytes.
"""
return base32.b2a(os.urandom(hashutil.CRYPTO_VAL_SIZE)) + "\n"
return base32.b2a(os.urandom(hashutil.CRYPTO_VAL_SIZE)) + b"\n"
class SecretHolder(object):
@ -739,12 +739,12 @@ class _Client(node.Node, pollmixin.PollMixin):
# existing key
def _make_key():
private_key, _ = ed25519.create_signing_keypair()
return ed25519.string_from_signing_key(private_key) + "\n"
return ed25519.string_from_signing_key(private_key) + b"\n"
private_key_str = self.config.get_or_create_private_config("node.privkey", _make_key)
private_key, public_key = ed25519.signing_keypair_from_string(private_key_str)
public_key_str = ed25519.string_from_verifying_key(public_key)
self.config.write_config_file("node.pubkey", public_key_str + "\n", "w")
self.config.write_config_file("node.pubkey", public_key_str + b"\n", "wb")
self._node_private_key = private_key
self._node_public_key = public_key
@ -971,7 +971,7 @@ class _Client(node.Node, pollmixin.PollMixin):
"""
self.config.write_private_config(
'api_auth_token',
urlsafe_b64encode(os.urandom(32)) + '\n',
urlsafe_b64encode(os.urandom(32)) + b'\n',
)
def get_storage_broker(self):
@ -1021,7 +1021,7 @@ class _Client(node.Node, pollmixin.PollMixin):
c = ControlServer()
c.setServiceParent(self)
control_url = self.control_tub.registerReference(c)
self.config.write_private_config("control.furl", control_url + "\n")
self.config.write_private_config("control.furl", control_url + b"\n")
def init_helper(self):
self.helper = Helper(self.config.get_config_path("helper"),

View File

@ -117,7 +117,7 @@ class ValidatedExtendedURIProxy(object):
# Next: things that are optional and not redundant: crypttext_hash
if d.has_key('crypttext_hash'):
if 'crypttext_hash' in d:
self.crypttext_hash = d['crypttext_hash']
if len(self.crypttext_hash) != CRYPTO_VAL_SIZE:
raise BadURIExtension('crypttext_hash is required to be hashutil.CRYPTO_VAL_SIZE bytes, not %s bytes' % (len(self.crypttext_hash),))
@ -126,11 +126,11 @@ class ValidatedExtendedURIProxy(object):
# Next: things that are optional, redundant, and required to be
# consistent: codec_name, codec_params, tail_codec_params,
# num_segments, size, needed_shares, total_shares
if d.has_key('codec_name'):
if d['codec_name'] != "crs":
if 'codec_name' in d:
if d['codec_name'] != b"crs":
raise UnsupportedErasureCodec(d['codec_name'])
if d.has_key('codec_params'):
if 'codec_params' in d:
ucpss, ucpns, ucpts = codec.parse_params(d['codec_params'])
if ucpss != self.segment_size:
raise BadURIExtension("inconsistent erasure code params: "
@ -145,7 +145,7 @@ class ValidatedExtendedURIProxy(object):
"self._verifycap.total_shares: %s" %
(ucpts, self._verifycap.total_shares))
if d.has_key('tail_codec_params'):
if 'tail_codec_params' in d:
utcpss, utcpns, utcpts = codec.parse_params(d['tail_codec_params'])
if utcpss != self.tail_segment_size:
raise BadURIExtension("inconsistent erasure code params: utcpss: %s != "
@ -162,7 +162,7 @@ class ValidatedExtendedURIProxy(object):
"self._verifycap.total_shares: %s" % (utcpts,
self._verifycap.total_shares))
if d.has_key('num_segments'):
if 'num_segments' in d:
if d['num_segments'] != self.num_segments:
raise BadURIExtension("inconsistent num_segments: size: %s, "
"segment_size: %s, computed_num_segments: %s, "
@ -170,18 +170,18 @@ class ValidatedExtendedURIProxy(object):
self.segment_size,
self.num_segments, d['num_segments']))
if d.has_key('size'):
if 'size' in d:
if d['size'] != self._verifycap.size:
raise BadURIExtension("inconsistent size: URI size: %s, UEB size: %s" %
(self._verifycap.size, d['size']))
if d.has_key('needed_shares'):
if 'needed_shares' in d:
if d['needed_shares'] != self._verifycap.needed_shares:
raise BadURIExtension("inconsistent needed shares: URI needed shares: %s, UEB "
"needed shares: %s" % (self._verifycap.total_shares,
d['needed_shares']))
if d.has_key('total_shares'):
if 'total_shares' in d:
if d['total_shares'] != self._verifycap.total_shares:
raise BadURIExtension("inconsistent total shares: URI total shares: %s, UEB "
"total shares: %s" % (self._verifycap.total_shares,
@ -428,7 +428,7 @@ class ValidatedReadBucketProxy(log.PrefixingLogMixin):
lines.append("%3d: %s" % (i, base32.b2a_or_none(h)))
self.log(" sharehashes:\n" + "\n".join(lines) + "\n")
lines = []
for i,h in blockhashes.items():
for i,h in list(blockhashes.items()):
lines.append("%3d: %s" % (i, base32.b2a_or_none(h)))
log.msg(" blockhashes:\n" + "\n".join(lines) + "\n")
raise BadOrMissingHash(le)
@ -695,7 +695,7 @@ class Checker(log.PrefixingLogMixin):
bucketdict, success = result
shareverds = []
for (sharenum, bucket) in bucketdict.items():
for (sharenum, bucket) in list(bucketdict.items()):
d = self._download_and_verify(s, sharenum, bucket)
shareverds.append(d)

View File

@ -1,3 +1,4 @@
from __future__ import print_function
from twisted.python.failure import Failure
from foolscap.api import eventually
@ -100,7 +101,7 @@ class SegmentFetcher(object):
self._node.fetch_failed(self, f)
return
#print "LOOP", self._blocks.keys(), "active:", self._active_share_map, "overdue:", self._overdue_share_map, "unused:", self._shares
#print("LOOP", self._blocks.keys(), "active:", self._active_share_map, "overdue:", self._overdue_share_map, "unused:", self._shares)
# Should we sent out more requests?
while len(set(self._blocks.keys())
| set(self._active_share_map.keys())

View File

@ -106,7 +106,7 @@ class ShareFinder(object):
server = None
try:
if self._servers:
server = self._servers.next()
server = next(self._servers)
except StopIteration:
self._servers = None
@ -175,7 +175,7 @@ class ShareFinder(object):
shnums=shnums_s, name=server.get_name(),
level=log.NOISY, parent=lp, umid="0fcEZw")
shares = []
for shnum, bucket in buckets.iteritems():
for shnum, bucket in buckets.items():
s = self._create_share(shnum, bucket, server, dyhb_rtt)
shares.append(s)
self._deliver_shares(shares)

View File

@ -353,14 +353,14 @@ class DownloadNode(object):
# each segment is turned into N blocks. All but the last are of size
# block_size, and the last is of size tail_block_size
block_size = segment_size / k
tail_block_size = tail_segment_padded / k
block_size = segment_size // k
tail_block_size = tail_segment_padded // k
return { "tail_segment_size": tail_segment_size,
"tail_segment_padded": tail_segment_padded,
"num_segments": num_segments,
"block_size": block_size,
"tail_block_size": tail_block_size,
"tail_block_size": tail_block_size
}
@ -455,7 +455,7 @@ class DownloadNode(object):
shares = []
shareids = []
for (shareid, share) in blocks.iteritems():
for (shareid, share) in blocks.items():
assert len(share) == block_size
shareids.append(shareid)
shares.append(share)
@ -465,7 +465,7 @@ class DownloadNode(object):
del shares
def _process(buffers):
decodetime = now() - start
segment = "".join(buffers)
segment = b"".join(buffers)
assert len(segment) == decoded_size
del buffers
if tail:

View File

@ -85,8 +85,8 @@ class Share(object):
self._requested_blocks = [] # (segnum, set(observer2..))
v = server.get_version()
ver = v["http://allmydata.org/tahoe/protocols/storage/v1"]
self._overrun_ok = ver["tolerates-immutable-read-overrun"]
ver = v[b"http://allmydata.org/tahoe/protocols/storage/v1"]
self._overrun_ok = ver[b"tolerates-immutable-read-overrun"]
# If _overrun_ok and we guess the offsets correctly, we can get
# everything in one RTT. If _overrun_ok and we guess wrong, we might
# need two RTT (but we could get lucky and do it in one). If overrun

View File

@ -89,7 +89,7 @@ class DownloadStatus(object):
def __init__(self, storage_index, size):
self.storage_index = storage_index
self.size = size
self.counter = self.statusid_counter.next()
self.counter = next(self.statusid_counter)
self.helper = False
self.first_timestamp = None

View File

@ -205,7 +205,7 @@ class Encoder(object):
assert IStorageBucketWriter.providedBy(landlords[k])
self.landlords = landlords.copy()
assert isinstance(servermap, dict)
for v in servermap.itervalues():
for v in servermap.values():
assert isinstance(v, set)
self.servermap = servermap.copy()
@ -410,7 +410,7 @@ class Encoder(object):
assert isinstance(data, (list,tuple))
if self._aborted:
raise UploadAborted()
data = "".join(data)
data = b"".join(data)
precondition(len(data) <= read_size, len(data), read_size)
if not allow_short:
precondition(len(data) == read_size, len(data), read_size)
@ -418,7 +418,7 @@ class Encoder(object):
self._crypttext_hasher.update(data)
if allow_short and len(data) < read_size:
# padding
data += "\x00" * (read_size - len(data))
data += b"\x00" * (read_size - len(data))
encrypted_pieces = [data[i:i+input_chunk_size]
for i in range(0, len(data), input_chunk_size)]
return encrypted_pieces

View File

@ -198,7 +198,7 @@ def _distribute_homeless_shares(mappings, homeless_shares, peers_to_shares):
available peers. If possible a share will be placed on the server it was
originally on, signifying the lease should be renewed instead.
"""
#print "mappings, homeless_shares, peers_to_shares %s %s %s" % (mappings, homeless_shares, peers_to_shares)
#print("mappings, homeless_shares, peers_to_shares %s %s %s" % (mappings, homeless_shares, peers_to_shares))
servermap_peerids = set([key for key in peers_to_shares])
servermap_shareids = set()
for key in sorted(peers_to_shares.keys()):
@ -272,8 +272,8 @@ def _servermap_flow_graph(peers, shares, servermap):
indexedShares = []
sink_num = len(peers) + len(shares) + 1
graph.append([peer_to_index[peer] for peer in peers])
#print "share_to_index %s" % share_to_index
#print "servermap %s" % servermap
#print("share_to_index %s" % share_to_index)
#print("servermap %s" % servermap)
for peer in peers:
if peer in servermap:
for s in servermap[peer]:
@ -386,8 +386,8 @@ def share_placement(peers, readonly_peers, shares, peers_to_shares):
new_shares = new_shares - existing_shares - used_shares
new_mappings = _calculate_mappings(new_peers, new_shares)
#print "new_peers %s" % new_peers
#print "new_mappings %s" % new_mappings
#print("new_peers %s" % new_peers)
#print("new_mappings %s" % new_mappings)
mappings = dict(list(readonly_mappings.items()) + list(existing_mappings.items()) + list(new_mappings.items()))
homeless_shares = set()
for share in mappings:

View File

@ -1,4 +1,5 @@
from six.moves import cStringIO as StringIO
from io import BytesIO
from zope.interface import implementer
from twisted.internet import defer
from twisted.internet.interfaces import IPushProducer
@ -104,7 +105,7 @@ class LiteralFileNode(_ImmutableFileNodeBase):
# vfs.adapters.ftp._FileToConsumerAdapter), neither of which is
# likely to be used as the target for a Tahoe download.
d = basic.FileSender().beginFileTransfer(StringIO(data), consumer)
d = basic.FileSender().beginFileTransfer(BytesIO(data), consumer)
d.addCallback(lambda lastSent: consumer)
return d

View File

@ -1,4 +1,4 @@
from past.builtins import long
from past.builtins import long, unicode
import os, time, weakref, itertools
from zope.interface import implementer
@ -27,7 +27,7 @@ from allmydata.interfaces import IUploadable, IUploader, IUploadResults, \
DEFAULT_MAX_SEGMENT_SIZE, IProgress, IPeerSelector
from allmydata.immutable import layout
from six.moves import cStringIO as StringIO
from io import BytesIO
from .happiness_upload import share_placement, calculate_happiness
from ..util.eliotutil import (
@ -226,7 +226,7 @@ EXTENSION_SIZE = 1000
# this.
def pretty_print_shnum_to_servers(s):
return ', '.join([ "sh%s: %s" % (k, '+'.join([idlib.shortnodeid_b2a(x) for x in v])) for k, v in s.iteritems() ])
return ', '.join([ "sh%s: %s" % (k, '+'.join([idlib.shortnodeid_b2a(x) for x in v])) for k, v in s.items() ])
class ServerTracker(object):
def __init__(self, server,
@ -283,7 +283,7 @@ class ServerTracker(object):
#log.msg("%s._got_reply(%s)" % (self, (alreadygot, buckets)))
(alreadygot, buckets) = alreadygot_and_buckets
b = {}
for sharenum, rref in buckets.iteritems():
for sharenum, rref in buckets.items():
bp = self.wbp_class(rref, self._server, self.sharesize,
self.blocksize,
self.num_segments,
@ -299,7 +299,7 @@ class ServerTracker(object):
I abort the remote bucket writers for all shares. This is a good idea
to conserve space on the storage server.
"""
self.abort_some_buckets(self.buckets.keys())
self.abort_some_buckets(list(self.buckets.keys()))
def abort_some_buckets(self, sharenums):
"""
@ -352,7 +352,7 @@ class PeerSelector(object):
def get_sharemap_of_preexisting_shares(self):
preexisting = dictutil.DictOfSets()
for server, shares in self.existing_shares.iteritems():
for server, shares in self.existing_shares.items():
for share in shares:
preexisting.add(share, server)
return preexisting
@ -419,8 +419,8 @@ class Tahoe2ServerSelector(log.PrefixingLogMixin):
# 12GiB). See #439 for details.
def _get_maxsize(server):
v0 = server.get_version()
v1 = v0["http://allmydata.org/tahoe/protocols/storage/v1"]
return v1["maximum-immutable-share-size"]
v1 = v0[b"http://allmydata.org/tahoe/protocols/storage/v1"]
return v1[b"maximum-immutable-share-size"]
for server in candidate_servers:
self.peer_selector.add_peer(server.get_serverid())
@ -700,7 +700,7 @@ class Tahoe2ServerSelector(log.PrefixingLogMixin):
% (self, self._get_progress_message(),
pretty_print_shnum_to_servers(merged),
[', '.join([str_shareloc(k,v)
for k,v in st.buckets.iteritems()])
for k,v in st.buckets.items()])
for st in self.use_trackers],
pretty_print_shnum_to_servers(self.preexisting_shares))
self.log(msg, level=log.OPERATIONAL)
@ -951,7 +951,7 @@ class EncryptAnUploadable(object):
self._encryptor = aes.create_encryptor(key)
storage_index = storage_index_hash(key)
assert isinstance(storage_index, str)
assert isinstance(storage_index, bytes)
# There's no point to having the SI be longer than the key, so we
# specify that it is truncated to the same 128 bits as the AES key.
assert len(storage_index) == 16 # SHA-256 truncated to 128b
@ -1120,7 +1120,7 @@ class UploadStatus(object):
self.progress = [0.0, 0.0, 0.0]
self.active = True
self.results = None
self.counter = self.statusid_counter.next()
self.counter = next(self.statusid_counter)
self.started = time.time()
def get_started(self):
@ -1281,7 +1281,7 @@ class CHKUploader(object):
"""
msgtempl = "set_shareholders; upload_trackers is %s, already_serverids is %s"
values = ([', '.join([str_shareloc(k,v)
for k,v in st.buckets.iteritems()])
for k,v in st.buckets.items()])
for st in upload_trackers], already_serverids)
self.log(msgtempl % values, level=log.OPERATIONAL)
# record already-present shares in self._results
@ -1377,7 +1377,7 @@ class LiteralUploader(object):
self._progress.set_progress_total(size)
return read_this_many_bytes(uploadable, size)
d.addCallback(_got_size)
d.addCallback(lambda data: uri.LiteralFileURI("".join(data)))
d.addCallback(lambda data: uri.LiteralFileURI(b"".join(data)))
d.addCallback(lambda u: u.to_string())
d.addCallback(self._build_results)
return d
@ -1500,7 +1500,7 @@ class AssistedUploader(object):
Returns a Deferred that will fire with the UploadResults instance.
"""
precondition(isinstance(storage_index, str), storage_index)
precondition(isinstance(storage_index, bytes), storage_index)
self._started = time.time()
eu = IEncryptedUploadable(encrypted_uploadable)
eu.set_upload_status(self._upload_status)
@ -1653,7 +1653,7 @@ class BaseUploadable(object):
def set_default_encoding_parameters(self, default_params):
assert isinstance(default_params, dict)
for k,v in default_params.items():
precondition(isinstance(k, str), k, v)
precondition(isinstance(k, (bytes, unicode)), k, v)
precondition(isinstance(v, int), k, v)
if "k" in default_params:
self.default_encoding_param_k = default_params["k"]
@ -1697,7 +1697,7 @@ class FileHandle(BaseUploadable):
then the hash will be hashed together with the string in the
"convergence" argument to form the encryption key.
"""
assert convergence is None or isinstance(convergence, str), (convergence, type(convergence))
assert convergence is None or isinstance(convergence, bytes), (convergence, type(convergence))
self._filehandle = filehandle
self._key = None
self.convergence = convergence
@ -1773,7 +1773,7 @@ class FileName(FileHandle):
then the hash will be hashed together with the string in the
"convergence" argument to form the encryption key.
"""
assert convergence is None or isinstance(convergence, str), (convergence, type(convergence))
assert convergence is None or isinstance(convergence, bytes), (convergence, type(convergence))
FileHandle.__init__(self, open(filename, "rb"), convergence=convergence)
def close(self):
FileHandle.close(self)
@ -1787,8 +1787,8 @@ class Data(FileHandle):
then the hash will be hashed together with the string in the
"convergence" argument to form the encryption key.
"""
assert convergence is None or isinstance(convergence, str), (convergence, type(convergence))
FileHandle.__init__(self, StringIO(data), convergence=convergence)
assert convergence is None or isinstance(convergence, bytes), (convergence, type(convergence))
FileHandle.__init__(self, BytesIO(data), convergence=convergence)
@implementer(IUploader)
class Uploader(service.MultiService, log.PrefixingLogMixin):
@ -1818,7 +1818,7 @@ class Uploader(service.MultiService, log.PrefixingLogMixin):
self.log("got helper connection, getting versions")
default = { "http://allmydata.org/tahoe/protocols/helper/v1" :
{ },
"application-version": "unknown: no get_version()",
"application-version": b"unknown: no get_version()",
}
d = add_version_to_remote_reference(helper, default)
d.addCallback(self._got_versioned_helper)

View File

@ -362,7 +362,7 @@ class _Config(object):
if default is _None:
raise MissingConfigEntry("The required configuration file %s is missing."
% (quote_output(privname),))
if isinstance(default, basestring):
if isinstance(default, (bytes, unicode)):
value = default
else:
value = default()
@ -375,7 +375,7 @@ class _Config(object):
return it.
"""
privname = os.path.join(self._basedir, "private", name)
with open(privname, "w") as f:
with open(privname, "wb") as f:
f.write(value)
def get_private_config(self, name, default=_None):
@ -759,7 +759,9 @@ class Node(service.MultiService):
"""
Initialize/create a directory for temporary files.
"""
tempdir_config = self.config.get_config("node", "tempdir", "tmp").decode('utf-8')
tempdir_config = self.config.get_config("node", "tempdir", "tmp")
if isinstance(tempdir_config, bytes):
tempdir_config = tempdir_config.decode('utf-8')
tempdir = self.config.get_config_path(tempdir_config)
if not os.path.exists(tempdir):
fileutil.make_dirs(tempdir)

View File

@ -50,8 +50,8 @@ class NodeMaker(object):
def create_from_cap(self, writecap, readcap=None, deep_immutable=False, name=u"<unknown name>"):
# this returns synchronously. It starts with a "cap string".
assert isinstance(writecap, (str, type(None))), type(writecap)
assert isinstance(readcap, (str, type(None))), type(readcap)
assert isinstance(writecap, (bytes, type(None))), type(writecap)
assert isinstance(readcap, (bytes, type(None))), type(readcap)
bigcap = writecap or readcap
if not bigcap:
@ -63,9 +63,9 @@ class NodeMaker(object):
# The name doesn't matter for caching since it's only used in the error
# attribute of an UnknownNode, and we don't cache those.
if deep_immutable:
memokey = "I" + bigcap
memokey = b"I" + bigcap
else:
memokey = "M" + bigcap
memokey = b"M" + bigcap
if memokey in self._node_cache:
node = self._node_cache[memokey]
else:

View File

@ -346,7 +346,7 @@ def dump_MDMF_share(m, length, options):
print(" MDMF contents:", file=out)
print(" seqnum: %d" % seqnum, file=out)
print(" root_hash: %s" % base32.b2a(root_hash), file=out)
#print >>out, " IV: %s" % base32.b2a(IV)
#print(" IV: %s" % base32.b2a(IV), file=out)
print(" required_shares: %d" % k, file=out)
print(" total_shares: %d" % N, file=out)
print(" segsize: %d" % segsize, file=out)

View File

@ -858,7 +858,7 @@ class Copier(object):
def progress(self, message):
#print message
#print(message)
if self.progressfunc:
self.progressfunc(message)

View File

@ -44,7 +44,7 @@ class ManifestStreamer(LineOnlyReceiver, object):
if resp.status not in (200, 302):
print(format_http_error("ERROR", resp), file=stderr)
return 1
#print "RESP", dir(resp)
#print("RESP", dir(resp))
# use Twisted to split this into lines
self.in_error = False
while True:

View File

@ -1,5 +1,7 @@
from __future__ import print_function
from past.builtins import unicode
import json
import os
import pprint
@ -155,6 +157,8 @@ class StatsProvider(Referenceable, service.MultiService):
service.MultiService.startService(self)
def count(self, name, delta=1):
if isinstance(name, unicode):
name = name.encode("utf-8")
val = self.counters.setdefault(name, 0)
self.counters[name] = val + delta
@ -170,7 +174,18 @@ class StatsProvider(Referenceable, service.MultiService):
return ret
def remote_get_stats(self):
return self.get_stats()
# The remote API expects keys to be bytes:
def to_bytes(d):
result = {}
for (k, v) in d.items():
if isinstance(k, unicode):
k = k.encode("utf-8")
result[k] = v
return result
stats = self.get_stats()
return {b"counters": to_bytes(stats["counters"]),
b"stats": to_bytes(stats["stats"])}
def _connected(self, gatherer, nickname):
gatherer.callRemoteOnly('provide', self, nickname or '')

View File

@ -1,4 +1,15 @@
from future.utils import bytes_to_native_str
"""
Ported to Python 3.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2, bytes_to_native_str
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
import os, stat, struct, time

View File

@ -1,4 +1,18 @@
from future.utils import bytes_to_native_str
"""
Ported to Python 3.
"""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import bytes_to_native_str, PY2
if PY2:
# Omit open() to get native behavior where open("w") always accepts native
# strings. Omit bytes so we don't leak future's custom bytes.
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, pow, round, super, dict, list, object, range, str, max, min # noqa: F401
import os, re, struct, time
import weakref
import six
@ -228,16 +242,18 @@ class StorageServer(service.MultiService, Referenceable):
# We're on a platform that has no API to get disk stats.
remaining_space = 2**64
version = { "http://allmydata.org/tahoe/protocols/storage/v1" :
{ "maximum-immutable-share-size": remaining_space,
"maximum-mutable-share-size": MAX_MUTABLE_SHARE_SIZE,
"available-space": remaining_space,
"tolerates-immutable-read-overrun": True,
"delete-mutable-shares-with-zero-length-writev": True,
"fills-holes-with-zero-bytes": True,
"prevents-read-past-end-of-share-data": True,
# Unicode strings might be nicer, but for now sticking to bytes since
# this is what the wire protocol has always been.
version = { b"http://allmydata.org/tahoe/protocols/storage/v1" :
{ b"maximum-immutable-share-size": remaining_space,
b"maximum-mutable-share-size": MAX_MUTABLE_SHARE_SIZE,
b"available-space": remaining_space,
b"tolerates-immutable-read-overrun": True,
b"delete-mutable-shares-with-zero-length-writev": True,
b"fills-holes-with-zero-bytes": True,
b"prevents-read-past-end-of-share-data": True,
},
"application-version": str(allmydata.__full_version__),
b"application-version": allmydata.__full_version__.encode("utf-8"),
}
return version
@ -671,7 +687,7 @@ class StorageServer(service.MultiService, Referenceable):
filename = os.path.join(bucketdir, sharenum_s)
msf = MutableShareFile(filename, self)
datavs[sharenum] = msf.readv(readv)
log.msg("returning shares %s" % (datavs.keys(),),
log.msg("returning shares %s" % (list(datavs.keys()),),
facility="tahoe.storage", level=log.NOISY, parent=lp)
self.add_latency("readv", time.time() - start)
return datavs

View File

@ -28,6 +28,7 @@ the foolscap-based server implemented in src/allmydata/storage/*.py .
#
# 6: implement other sorts of IStorageClient classes: S3, etc
from past.builtins import unicode
import re, time, hashlib
try:
@ -489,12 +490,15 @@ class _FoolscapStorage(object):
*nickname* is optional.
"""
m = re.match(r'pb://(\w+)@', furl)
m = re.match(br'pb://(\w+)@', furl)
assert m, furl
tubid_s = m.group(1).lower()
tubid = base32.a2b(tubid_s)
if "permutation-seed-base32" in ann:
ps = base32.a2b(str(ann["permutation-seed-base32"]))
seed = ann["permutation-seed-base32"]
if isinstance(seed, unicode):
seed = seed.encode("utf-8")
ps = base32.a2b(seed)
elif re.search(r'^v0-[0-9a-zA-Z]{52}$', server_id):
ps = base32.a2b(server_id[3:])
else:
@ -509,7 +513,7 @@ class _FoolscapStorage(object):
assert server_id
long_description = server_id
if server_id.startswith("v0-"):
if server_id.startswith(b"v0-"):
# remove v0- prefix from abbreviated name
short_description = server_id[3:3+8]
else:
@ -621,19 +625,19 @@ class NativeStorageServer(service.MultiService):
"""
VERSION_DEFAULTS = {
"http://allmydata.org/tahoe/protocols/storage/v1" :
{ "maximum-immutable-share-size": 2**32 - 1,
"maximum-mutable-share-size": 2*1000*1000*1000, # maximum prior to v1.9.2
"tolerates-immutable-read-overrun": False,
"delete-mutable-shares-with-zero-length-writev": False,
"available-space": None,
b"http://allmydata.org/tahoe/protocols/storage/v1" :
{ b"maximum-immutable-share-size": 2**32 - 1,
b"maximum-mutable-share-size": 2*1000*1000*1000, # maximum prior to v1.9.2
b"tolerates-immutable-read-overrun": False,
b"delete-mutable-shares-with-zero-length-writev": False,
b"available-space": None,
},
"application-version": "unknown: no get_version()",
b"application-version": "unknown: no get_version()",
}
def __init__(self, server_id, ann, tub_maker, handler_overrides, node_config, config=StorageClientConfig()):
service.MultiService.__init__(self)
assert isinstance(server_id, str)
assert isinstance(server_id, bytes)
self._server_id = server_id
self.announcement = ann
self._tub_maker = tub_maker
@ -694,12 +698,14 @@ class NativeStorageServer(service.MultiService):
# Nope
pass
else:
if isinstance(furl, unicode):
furl = furl.encode("utf-8")
# See comment above for the _storage_from_foolscap_plugin case
# about passing in get_rref.
storage_server = _StorageServer(get_rref=self.get_rref)
return _FoolscapStorage.from_announcement(
self._server_id,
furl.encode("utf-8"),
furl,
ann,
storage_server,
)
@ -767,7 +773,7 @@ class NativeStorageServer(service.MultiService):
version = self.get_version()
if version is None:
return None
protocol_v1_version = version.get('http://allmydata.org/tahoe/protocols/storage/v1', {})
protocol_v1_version = version.get(b'http://allmydata.org/tahoe/protocols/storage/v1', {})
available_space = protocol_v1_version.get('available-space')
if available_space is None:
available_space = protocol_v1_version.get('maximum-immutable-share-size', None)

View File

@ -115,21 +115,21 @@ class SystemFramework(pollmixin.PollMixin):
self.failed.raiseException()
def setUp(self):
#print "STARTING"
#print("STARTING")
self.stats = {}
self.statsfile = open(os.path.join(self.basedir, "stats.out"), "a")
self.make_introducer()
d = self.start_client()
def _record_control_furl(control_furl):
self.control_furl = control_furl
#print "OBTAINING '%s'" % (control_furl,)
#print("OBTAINING '%s'" % (control_furl,))
return self.tub.getReference(self.control_furl)
d.addCallback(_record_control_furl)
def _record_control(control_rref):
self.control_rref = control_rref
d.addCallback(_record_control)
def _ready(res):
#print "CLIENT READY"
#print("CLIENT READY")
pass
d.addCallback(_ready)
return d
@ -430,10 +430,10 @@ this file are ignored.
return d
def do_test(self):
#print "CLIENT STARTED"
#print "FURL", self.control_furl
#print "RREF", self.control_rref
#print
#print("CLIENT STARTED")
#print("FURL", self.control_furl)
#print("RREF", self.control_rref)
#print()
kB = 1000; MB = 1000*1000
files = {}
uris = {}

View File

@ -362,7 +362,7 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
# self.do_cli("deep-check", "--repair", self.rooturi))
#def _deep_check_repair_failed((rc, out, err)):
# self.failIfEqual(rc, 0)
# print err
# print(err)
# self.failUnlessIn("ERROR: UnrecoverableFileError", err)
# self.failIf("done:" in out, out)
#d.addCallback(_deep_check_repair_failed)

View File

@ -983,7 +983,7 @@ class CopyOut(GridTestMixin, CLITestMixin, unittest.TestCase):
def do_one_test(self, case, orig_expected):
expected = set(orig_expected)
printable_expected = ",".join(sorted(expected))
#print "---", case, ":", printable_expected
#print("---", case, ":", printable_expected)
for f in orig_expected:
# f is "dir/file" or "dir/sub/file" or "dir/" or "dir/sub/"
@ -1010,7 +1010,7 @@ class CopyOut(GridTestMixin, CLITestMixin, unittest.TestCase):
# then we run various forms of "cp [-r] TAHOETHING to[/missing]"
# and see what happens.
d = defer.succeed(None)
#print
#print()
for line in COPYOUT_TESTCASES.splitlines():
if "#" in line:

View File

@ -123,7 +123,7 @@ class ShouldFailMixin(object):
class ReallyEqualMixin(object):
def failUnlessReallyEqual(self, a, b, msg=None):
self.assertEqual(a, b, msg)
self.assertEqual(type(a), type(b), "a :: %r, b :: %r, %r" % (a, b, msg))
self.assertEqual(type(a), type(b), "a :: %r (%s), b :: %r (%s), %r" % (a, type(a), b, type(b), msg))
def skip_if_cannot_represent_filename(u):

View File

@ -1,9 +1,26 @@
import treq
from twisted.internet import defer
from twisted.internet.defer import (
maybeDeferred,
inlineCallbacks,
returnValue,
)
from twisted.web.error import Error
@defer.inlineCallbacks
from nevow.context import WebContext
from nevow.testutil import FakeRequest
from nevow.appserver import (
processingFailed,
DefaultExceptionHandler,
)
from nevow.inevow import (
ICanHandleException,
IRequest,
IResource as INevowResource,
IData,
)
@inlineCallbacks
def do_http(method, url, **kwargs):
response = yield treq.request(method, url, persistent=False, **kwargs)
body = yield treq.content(response)
@ -11,4 +28,35 @@ def do_http(method, url, **kwargs):
# https://github.com/twisted/treq/pull/159 has landed
if 400 <= response.code < 600:
raise Error(response.code, response=body)
defer.returnValue(body)
returnValue(body)
def render(resource, query_args):
"""
Render (in the manner of the Nevow appserver) a Nevow ``Page`` or a
Twisted ``Resource`` against a request with the given query arguments .
:param resource: The page or resource to render.
:param query_args: The query arguments to put into the request being
rendered. A mapping from ``bytes`` to ``list`` of ``bytes``.
:return Deferred: A Deferred that fires with the rendered response body as
``bytes``.
"""
ctx = WebContext(tag=resource)
req = FakeRequest(args=query_args)
ctx.remember(DefaultExceptionHandler(), ICanHandleException)
ctx.remember(req, IRequest)
ctx.remember(None, IData)
def maybe_concat(res):
if isinstance(res, bytes):
return req.v + res
return req.v
resource = INevowResource(resource)
d = maybeDeferred(resource.renderHTTP, ctx)
d.addErrback(processingFailed, req, ctx)
d.addCallback(maybe_concat)
return d

View File

@ -108,7 +108,7 @@ class Update(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin):
for (start,end) in gotmods]
expspans = ["%d:%d=%s" % (start,end,expected[start:end])
for (start,end) in expmods]
#print "expecting: %s" % expspans
#print("expecting: %s" % expspans)
if got != expected:
print("differences:")

View File

@ -1,3 +1,10 @@
"""
Ported to Python 3.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
# This contains a test harness that creates a full Tahoe grid in a single
# process (actually in a single MultiService) which does not use the network.
@ -13,6 +20,11 @@
# Tubs, so it is not useful for tests that involve a Helper or the
# control.furl .
from future.utils import PY2, PY3
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
from past.builtins import unicode
import os
from zope.interface import implementer
from twisted.application import service
@ -257,6 +269,11 @@ class _NoNetworkClient(_Client):
pass
#._servers will be set by the NoNetworkGrid which creates us
if PY3:
def init_web(self, *args, **kwargs):
print("Web service is temporarily disabled until nevow is gone.")
class SimpleStats(object):
def __init__(self):
self.counters = {}
@ -323,7 +340,7 @@ class NoNetworkGrid(service.MultiService):
@defer.inlineCallbacks
def make_client(self, i, write_config=True):
clientid = hashutil.tagged_hash("clientid", str(i))[:20]
clientid = hashutil.tagged_hash(b"clientid", b"%d" % i)[:20]
clientdir = os.path.join(self.basedir, "clients",
idlib.shortnodeid_b2a(clientid))
fileutil.make_dirs(clientdir)
@ -358,7 +375,7 @@ class NoNetworkGrid(service.MultiService):
defer.returnValue(c)
def make_server(self, i, readonly=False):
serverid = hashutil.tagged_hash("serverid", str(i))[:20]
serverid = hashutil.tagged_hash(b"serverid", b"%d" % i)[:20]
serverdir = os.path.join(self.basedir, "servers",
idlib.shortnodeid_b2a(serverid), "storage")
fileutil.make_dirs(serverdir)
@ -381,18 +398,18 @@ class NoNetworkGrid(service.MultiService):
self.rebuild_serverlist()
def get_all_serverids(self):
return self.proxies_by_id.keys()
return list(self.proxies_by_id.keys())
def rebuild_serverlist(self):
self._check_clients()
self.all_servers = frozenset(self.proxies_by_id.values())
self.all_servers = frozenset(list(self.proxies_by_id.values()))
for c in self.clients:
c._servers = self.all_servers
def remove_server(self, serverid):
# it's enough to remove the server from c._servers (we don't actually
# have to detach and stopService it)
for i,ss in self.servers_by_number.items():
for i,ss in list(self.servers_by_number.items()):
if ss.my_nodeid == serverid:
del self.servers_by_number[i]
break
@ -422,7 +439,7 @@ class NoNetworkGrid(service.MultiService):
def nuke_from_orbit(self):
""" Empty all share directories in this grid. It's the only way to be sure ;-) """
for server in self.servers_by_number.values():
for server in list(self.servers_by_number.values()):
for prefixdir in os.listdir(server.sharedir):
if prefixdir != 'incoming':
fileutil.rm_dir(os.path.join(server.sharedir, prefixdir))
@ -462,10 +479,12 @@ class GridTestMixin(object):
def _record_webports_and_baseurls(self):
self.g._check_clients()
self.client_webports = [c.getServiceNamed("webish").getPortnum()
for c in self.g.clients]
self.client_baseurls = [c.getServiceNamed("webish").getURL()
for c in self.g.clients]
if PY2:
# Temporarily disabled on Python 3 until Nevow is gone:
self.client_webports = [c.getServiceNamed("webish").getPortnum()
for c in self.g.clients]
self.client_baseurls = [c.getServiceNamed("webish").getURL()
for c in self.g.clients]
def get_client_config(self, i=0):
self.g._check_clients()
@ -506,7 +525,7 @@ class GridTestMixin(object):
si = tahoe_uri.from_string(uri).get_storage_index()
prefixdir = storage_index_to_dir(si)
shares = []
for i,ss in self.g.servers_by_number.items():
for i,ss in list(self.g.servers_by_number.items()):
serverid = ss.my_nodeid
basedir = os.path.join(ss.sharedir, prefixdir)
if not os.path.exists(basedir):
@ -527,7 +546,7 @@ class GridTestMixin(object):
return shares
def restore_all_shares(self, shares):
for sharefile, data in shares.items():
for sharefile, data in list(shares.items()):
with open(sharefile, "wb") as f:
f.write(data)

View File

@ -1,3 +1,5 @@
from __future__ import print_function
import sys
import os.path, time
from six.moves import cStringIO as StringIO
@ -229,7 +231,7 @@ class BackupDB(unittest.TestCase):
files = [fn for fn in listdir_unicode(unicode(basedir)) if fn.endswith(".txt")]
self.failUnlessEqual(len(files), 1)
foo_fn = os.path.join(basedir, files[0])
#print foo_fn, type(foo_fn)
#print(foo_fn, type(foo_fn))
r = bdb.check_file(foo_fn)
self.failUnlessEqual(r.was_uploaded(), False)
@ -240,7 +242,7 @@ class BackupDB(unittest.TestCase):
self.failUnlessEqual(r.should_check(), False)
bar_fn = self.writeto(u"b\u00e5r.txt", "bar.txt")
#print bar_fn, type(bar_fn)
#print(bar_fn, type(bar_fn))
r = bdb.check_file(bar_fn)
self.failUnlessEqual(r.was_uploaded(), False)

View File

@ -691,7 +691,7 @@ class BalancingAct(GridTestMixin, unittest.TestCase):
def add_three(_, i):
# Add a new server with just share 3
self.add_server_with_share(i, self.uri, 3)
#print self._pretty_shares_chart(self.uri)
#print(self._pretty_shares_chart(self.uri))
for i in range(1,5):
d.addCallback(add_three, i)

View File

@ -1,3 +1,4 @@
from future.utils import native_str
import os, json, urllib
from twisted.trial import unittest
@ -945,7 +946,7 @@ class DeepCheckWebBad(DeepCheckBase, unittest.TestCase):
def _corrupt_some_shares(self, node):
for (shnum, serverid, sharefile) in self.find_uri_shares(node.get_uri()):
if shnum in (0,1):
yield run_cli("debug", "corrupt-share", sharefile)
yield run_cli("debug", "corrupt-share", native_str(sharefile))
def _delete_most_shares(self, node):
self.delete_shares_numbered(node.get_uri(), range(1,10))

View File

@ -67,7 +67,7 @@ class DeferredUtilTests(unittest.TestCase, deferredutil.WaitForDelayedCallsMixin
Trial would report an unclean reactor error for this test.
"""
def _trigger():
#print "trigger"
#print("trigger")
pass
reactor.callLater(0.1, _trigger)

View File

@ -1,3 +1,16 @@
"""
Ported to Python 3.
"""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
from past.builtins import chr as byteschr, long
from zope.interface import implementer
from twisted.trial import unittest
from twisted.internet import defer
@ -15,7 +28,7 @@ class LostPeerError(Exception):
pass
def flip_bit(good): # flips the last bit
return good[:-1] + chr(ord(good[-1]) ^ 0x01)
return good[:-1] + byteschr(ord(good[-1]) ^ 0x01)
@implementer(IStorageBucketWriter, IStorageBucketReader)
class FakeBucketReaderWriterProxy(object):
@ -158,7 +171,7 @@ class FakeBucketReaderWriterProxy(object):
def make_data(length):
data = "happy happy joy joy" * 100
data = b"happy happy joy joy" * 100
assert length <= len(data)
return data[:length]
@ -173,32 +186,32 @@ class ValidatedExtendedURIProxy(unittest.TestCase):
if _TMP % K != 0:
_TMP += (K - (_TMP % K))
TAIL_SEGSIZE = _TMP
_TMP = SIZE / SEGSIZE
_TMP = SIZE // SEGSIZE
if SIZE % SEGSIZE != 0:
_TMP += 1
NUM_SEGMENTS = _TMP
mindict = { 'segment_size': SEGSIZE,
'crypttext_root_hash': '0'*hashutil.CRYPTO_VAL_SIZE,
'share_root_hash': '1'*hashutil.CRYPTO_VAL_SIZE }
optional_consistent = { 'crypttext_hash': '2'*hashutil.CRYPTO_VAL_SIZE,
'codec_name': "crs",
'codec_params': "%d-%d-%d" % (SEGSIZE, K, M),
'tail_codec_params': "%d-%d-%d" % (TAIL_SEGSIZE, K, M),
'crypttext_root_hash': b'0'*hashutil.CRYPTO_VAL_SIZE,
'share_root_hash': b'1'*hashutil.CRYPTO_VAL_SIZE }
optional_consistent = { 'crypttext_hash': b'2'*hashutil.CRYPTO_VAL_SIZE,
'codec_name': b"crs",
'codec_params': b"%d-%d-%d" % (SEGSIZE, K, M),
'tail_codec_params': b"%d-%d-%d" % (TAIL_SEGSIZE, K, M),
'num_segments': NUM_SEGMENTS,
'size': SIZE,
'needed_shares': K,
'total_shares': M,
'plaintext_hash': "anything",
'plaintext_root_hash': "anything", }
'plaintext_hash': b"anything",
'plaintext_root_hash': b"anything", }
# optional_inconsistent = { 'crypttext_hash': ('2'*(hashutil.CRYPTO_VAL_SIZE-1), "", 77),
optional_inconsistent = { 'crypttext_hash': (77,),
'codec_name': ("digital fountain", ""),
'codec_params': ("%d-%d-%d" % (SEGSIZE, K-1, M),
"%d-%d-%d" % (SEGSIZE-1, K, M),
"%d-%d-%d" % (SEGSIZE, K, M-1)),
'tail_codec_params': ("%d-%d-%d" % (TAIL_SEGSIZE, K-1, M),
"%d-%d-%d" % (TAIL_SEGSIZE-1, K, M),
"%d-%d-%d" % (TAIL_SEGSIZE, K, M-1)),
'codec_name': (b"digital fountain", b""),
'codec_params': (b"%d-%d-%d" % (SEGSIZE, K-1, M),
b"%d-%d-%d" % (SEGSIZE-1, K, M),
b"%d-%d-%d" % (SEGSIZE, K, M-1)),
'tail_codec_params': (b"%d-%d-%d" % (TAIL_SEGSIZE, K-1, M),
b"%d-%d-%d" % (TAIL_SEGSIZE-1, K, M),
b"%d-%d-%d" % (TAIL_SEGSIZE, K, M-1)),
'num_segments': (NUM_SEGMENTS-1,),
'size': (SIZE-1,),
'needed_shares': (K-1,),
@ -209,7 +222,7 @@ class ValidatedExtendedURIProxy(unittest.TestCase):
uebhash = hashutil.uri_extension_hash(uebstring)
fb = FakeBucketReaderWriterProxy()
fb.put_uri_extension(uebstring)
verifycap = uri.CHKFileVerifierURI(storage_index='x'*16, uri_extension_hash=uebhash, needed_shares=self.K, total_shares=self.M, size=self.SIZE)
verifycap = uri.CHKFileVerifierURI(storage_index=b'x'*16, uri_extension_hash=uebhash, needed_shares=self.K, total_shares=self.M, size=self.SIZE)
vup = checker.ValidatedExtendedURIProxy(fb, verifycap)
return vup.start()
@ -232,7 +245,7 @@ class ValidatedExtendedURIProxy(unittest.TestCase):
def test_reject_insufficient(self):
dl = []
for k in self.mindict.iterkeys():
for k in self.mindict.keys():
insuffdict = self.mindict.copy()
del insuffdict[k]
d = self._test_reject(insuffdict)
@ -241,7 +254,7 @@ class ValidatedExtendedURIProxy(unittest.TestCase):
def test_accept_optional(self):
dl = []
for k in self.optional_consistent.iterkeys():
for k in self.optional_consistent.keys():
mydict = self.mindict.copy()
mydict[k] = self.optional_consistent[k]
d = self._test_accept(mydict)
@ -250,7 +263,7 @@ class ValidatedExtendedURIProxy(unittest.TestCase):
def test_reject_optional(self):
dl = []
for k in self.optional_inconsistent.iterkeys():
for k in self.optional_inconsistent.keys():
for v in self.optional_inconsistent[k]:
mydict = self.mindict.copy()
mydict[k] = v
@ -264,7 +277,7 @@ class Encode(unittest.TestCase):
data = make_data(datalen)
# force use of multiple segments
e = encode.Encoder()
u = upload.Data(data, convergence="some convergence string")
u = upload.Data(data, convergence=b"some convergence string")
u.set_default_encoding_parameters({'max_segment_size': max_segment_size,
'k': 25, 'happy': 75, 'n': 100})
eu = upload.EncryptAnUploadable(u)
@ -294,7 +307,7 @@ class Encode(unittest.TestCase):
def _check(res):
verifycap = res
self.failUnless(isinstance(verifycap.uri_extension_hash, str))
self.failUnless(isinstance(verifycap.uri_extension_hash, bytes))
self.failUnlessEqual(len(verifycap.uri_extension_hash), 32)
for i,peer in enumerate(all_shareholders):
self.failUnless(peer.closed)
@ -398,7 +411,7 @@ class Roundtrip(GridTestMixin, unittest.TestCase):
self.basedir = self.mktemp()
self.set_up_grid()
self.c0 = self.g.clients[0]
DATA = "p"*size
DATA = b"p"*size
d = self.upload(DATA)
d.addCallback(lambda n: download_to_data(n))
def _downloaded(newdata):

View File

@ -182,9 +182,9 @@ class Happiness(unittest.TestCase):
# we can achieve more happiness by moving "2" or "3" to server "d"
places = happiness_upload.share_placement(peers, readonly_peers, shares, peers_to_shares)
#print "places %s" % places
#print("places %s" % places)
#places = happiness_upload.slow_share_placement(peers, readonly_peers, shares, peers_to_shares)
#print "places %s" % places
#print("places %s" % places)
happiness = happiness_upload.calculate_happiness(places)
self.assertEqual(4, happiness)

View File

@ -57,7 +57,7 @@ class Complete(unittest.TestCase):
]
self.failUnlessEqual(list(ht.depth_first()), expected)
d = "\n" + ht.dump()
#print d
#print(d)
self.failUnless("\n 0:" in d)
self.failUnless("\n 1:" in d)
self.failUnless("\n 3:" in d)

View File

@ -224,7 +224,7 @@ class Test(GridTestMixin, unittest.TestCase, common.ShouldFailMixin):
d.addCallback(self._download_and_check_plaintext)
def _after_download(ign):
num_reads = self._count_reads()
#print num_reads
#print(num_reads)
self.failIf(num_reads > 41, num_reads)
d.addCallback(_after_download)
return d
@ -237,7 +237,7 @@ class Test(GridTestMixin, unittest.TestCase, common.ShouldFailMixin):
self.delete_shares_numbered(self.uri, range(7)))
d.addCallback(self._download_and_check_plaintext)
def _after_download(num_reads):
#print num_reads
#print(num_reads)
self.failIf(num_reads > 41, num_reads)
d.addCallback(_after_download)
return d
@ -284,7 +284,7 @@ class Test(GridTestMixin, unittest.TestCase, common.ShouldFailMixin):
download_to_data, self.filenode)
def _check_numreads(ign):
num_reads = self._count_reads() - start_reads
#print num_reads
#print(num_reads)
# To pass this test, you are required to give up before
# reading all of the share data. Actually, we could give up

View File

@ -1,5 +1,16 @@
"""
Test the NoNetworkGrid test harness.
# Test the NoNetworkGrid test harness
Ported to Python 3.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
from twisted.trial import unittest
from twisted.application import service
@ -41,8 +52,8 @@ class Harness(unittest.TestCase):
g.setServiceParent(self.s)
c0 = g.clients[0]
DATA = "Data to upload" * 100
data = Data(DATA, "")
DATA = b"Data to upload" * 100
data = Data(DATA, b"")
d = c0.upload(data)
def _uploaded(res):
n = c0.create_node_from_uri(res.get_uri())

View File

@ -29,8 +29,8 @@ class Pipeline(unittest.TestCase):
return d
def failUnlessCallsAre(self, expected):
#print self.calls
#print expected
#print(self.calls)
#print(expected)
self.failUnlessEqual(len(self.calls), len(expected), self.calls)
for i,c in enumerate(self.calls):
self.failUnlessEqual(c[1:], expected[i], str(i))

View File

@ -335,7 +335,7 @@ class Verifier(GridTestMixin, unittest.TestCase, RepairTestMixin):
self.corrupt_shares_numbered(self.uri, [0], _corruptor)
results = {}
def _did_check(vr, i):
#print "corrupt %d: healthy=%s" % (i, vr.is_healthy())
#print("corrupt %d: healthy=%s" % (i, vr.is_healthy()))
results[i] = vr.is_healthy()
def _start(ign):
d = defer.succeed(None)

View File

@ -246,14 +246,14 @@ class ByteSpans(unittest.TestCase):
ns1.add(start, length); ns2.add(start, length)
return ns1, ns2
#print
#print()
for i in range(1000):
what = sha256(seed+bytes(i))
op = what[0:1]
subop = what[1:2]
start = int(what[2:4], 16)
length = max(1,int(what[5:6], 16))
#print what
#print(what)
if op in b"0":
if subop in b"01234":
s1 = S1(); s2 = S2()
@ -261,34 +261,34 @@ class ByteSpans(unittest.TestCase):
s1 = S1(start, length); s2 = S2(start, length)
else:
s1 = S1(s1); s2 = S2(s2)
#print "s2 = %s" % s2.dump()
#print("s2 = %s" % s2.dump())
elif op in b"123":
#print "s2.add(%d,%d)" % (start, length)
#print("s2.add(%d,%d)" % (start, length))
s1.add(start, length); s2.add(start, length)
elif op in b"456":
#print "s2.remove(%d,%d)" % (start, length)
#print("s2.remove(%d,%d)" % (start, length))
s1.remove(start, length); s2.remove(start, length)
elif op in b"78":
ns1, ns2 = _create(what[7:11])
#print "s2 + %s" % ns2.dump()
#print("s2 + %s" % ns2.dump())
s1 = s1 + ns1; s2 = s2 + ns2
elif op in b"9a":
ns1, ns2 = _create(what[7:11])
#print "%s - %s" % (s2.dump(), ns2.dump())
#print("%s - %s" % (s2.dump(), ns2.dump()))
s1 = s1 - ns1; s2 = s2 - ns2
elif op in b"bc":
ns1, ns2 = _create(what[7:11])
#print "s2 += %s" % ns2.dump()
#print("s2 += %s" % ns2.dump())
s1 += ns1; s2 += ns2
elif op in b"de":
ns1, ns2 = _create(what[7:11])
#print "%s -= %s" % (s2.dump(), ns2.dump())
#print("%s -= %s" % (s2.dump(), ns2.dump()))
s1 -= ns1; s2 -= ns2
else:
ns1, ns2 = _create(what[7:11])
#print "%s &= %s" % (s2.dump(), ns2.dump())
#print("%s &= %s" % (s2.dump(), ns2.dump()))
s1 = s1 & ns1; s2 = s2 & ns2
#print "s2 now %s" % s2.dump()
#print("s2 now %s" % s2.dump())
self.failUnlessEqual(list(s1.each()), list(s2.each()))
self.failUnlessEqual(s1.len(), s2.len())
self.failUnlessEqual(bool(s1), bool(s2))
@ -324,7 +324,7 @@ class ByteSpans(unittest.TestCase):
def _test_overlap(self, a, b, c, d):
s1 = set(range(a,a+b))
s2 = set(range(c,c+d))
#print "---"
#print("---")
#self._show_overlap(s1, "1")
#self._show_overlap(s2, "2")
o = overlap(a,b,c,d)
@ -580,33 +580,33 @@ class StringSpans(unittest.TestCase):
ns2.add(start, _randstr(length, what[7:9]))
return ns1, ns2
#print
#print()
for i in range(1000):
what = sha256(seed+bytes(i))
op = what[0:1]
subop = what[1:2]
start = int(what[2:4], 16)
length = max(1,int(what[5:6], 16))
#print what
#print(what)
if op in b"0":
if subop in b"0123456":
s1 = S1(); s2 = S2()
else:
s1, s2 = _create(what[7:11])
#print "s2 = %s" % list(s2._dump())
#print("s2 = %s" % list(s2._dump()))
elif op in b"123456":
#print "s2.add(%d,%d)" % (start, length)
#print("s2.add(%d,%d)" % (start, length))
s1.add(start, _randstr(length, what[7:9]));
s2.add(start, _randstr(length, what[7:9]))
elif op in b"789abc":
#print "s2.remove(%d,%d)" % (start, length)
#print("s2.remove(%d,%d)" % (start, length))
s1.remove(start, length); s2.remove(start, length)
else:
#print "s2.pop(%d,%d)" % (start, length)
#print("s2.pop(%d,%d)" % (start, length))
d1 = s1.pop(start, length); d2 = s2.pop(start, length)
self.failUnlessEqual(d1, d2)
#print "s1 now %s" % list(s1._dump())
#print "s2 now %s" % list(s2._dump())
#print("s1 now %s" % list(s1._dump()))
#print("s2 now %s" % list(s2._dump()))
self.failUnlessEqual(s1.len(), s2.len())
self.failUnlessEqual(list(s1._dump()), list(s2._dump()))
for j in range(100):

View File

@ -366,21 +366,21 @@ class Server(unittest.TestCase):
def test_declares_fixed_1528(self):
ss = self.create("test_declares_fixed_1528")
ver = ss.remote_get_version()
sv1 = ver['http://allmydata.org/tahoe/protocols/storage/v1']
self.failUnless(sv1.get('prevents-read-past-end-of-share-data'), sv1)
sv1 = ver[b'http://allmydata.org/tahoe/protocols/storage/v1']
self.failUnless(sv1.get(b'prevents-read-past-end-of-share-data'), sv1)
def test_declares_maximum_share_sizes(self):
ss = self.create("test_declares_maximum_share_sizes")
ver = ss.remote_get_version()
sv1 = ver['http://allmydata.org/tahoe/protocols/storage/v1']
self.failUnlessIn('maximum-immutable-share-size', sv1)
self.failUnlessIn('maximum-mutable-share-size', sv1)
sv1 = ver[b'http://allmydata.org/tahoe/protocols/storage/v1']
self.failUnlessIn(b'maximum-immutable-share-size', sv1)
self.failUnlessIn(b'maximum-mutable-share-size', sv1)
def test_declares_available_space(self):
ss = self.create("test_declares_available_space")
ver = ss.remote_get_version()
sv1 = ver['http://allmydata.org/tahoe/protocols/storage/v1']
self.failUnlessIn('available-space', sv1)
sv1 = ver[b'http://allmydata.org/tahoe/protocols/storage/v1']
self.failUnlessIn(b'available-space', sv1)
def allocate(self, ss, storage_index, sharenums, size, canary=None):
renew_secret = hashutil.tagged_hash(b"blah", b"%d" % next(self._lease_secret))
@ -740,6 +740,12 @@ class Server(unittest.TestCase):
leases = list(ss.get_leases(b"si3"))
self.failUnlessEqual(len(leases), 2)
def test_have_shares(self):
"""By default the StorageServer has no shares."""
workdir = self.workdir("test_have_shares")
ss = StorageServer(workdir, b"\x00" * 20, readonly_storage=True)
self.assertFalse(ss.have_shares())
def test_readonly(self):
workdir = self.workdir("test_readonly")
ss = StorageServer(workdir, b"\x00" * 20, readonly_storage=True)
@ -974,8 +980,8 @@ class MutableServer(unittest.TestCase):
# Also see if the server explicitly declares that it supports this
# feature.
ver = ss.remote_get_version()
storage_v1_ver = ver["http://allmydata.org/tahoe/protocols/storage/v1"]
self.failUnless(storage_v1_ver.get("fills-holes-with-zero-bytes"))
storage_v1_ver = ver[b"http://allmydata.org/tahoe/protocols/storage/v1"]
self.failUnless(storage_v1_ver.get(b"fills-holes-with-zero-bytes"))
# If the size is dropped to zero the share is deleted.
answer = rstaraw(b"si1", secrets,
@ -3006,3 +3012,38 @@ class Stats(unittest.TestCase):
self.failUnless(output["get"]["95_0_percentile"] is None, output)
self.failUnless(output["get"]["99_0_percentile"] is None, output)
self.failUnless(output["get"]["99_9_percentile"] is None, output)
class ShareFileTests(unittest.TestCase):
"""Tests for allmydata.storage.immutable.ShareFile."""
def get_sharefile(self):
sf = ShareFile(self.mktemp(), max_size=1000, create=True)
sf.write_share_data(0, b"abc")
sf.write_share_data(2, b"DEF")
# Should be b'abDEF' now.
return sf
def test_read_write(self):
"""Basic writes can be read."""
sf = self.get_sharefile()
self.assertEqual(sf.read_share_data(0, 3), b"abD")
self.assertEqual(sf.read_share_data(1, 4), b"bDEF")
def test_reads_beyond_file_end(self):
"""Reads beyond the file size are truncated."""
sf = self.get_sharefile()
self.assertEqual(sf.read_share_data(0, 10), b"abDEF")
self.assertEqual(sf.read_share_data(5, 10), b"")
def test_too_large_write(self):
"""Can't do write larger than file size."""
sf = self.get_sharefile()
with self.assertRaises(DataTooLargeError):
sf.write_share_data(0, b"x" * 3000)
def test_no_leases_cancelled(self):
"""If no leases were cancelled, IndexError is raised."""
sf = self.get_sharefile()
with self.assertRaises(IndexError):
sf.cancel_lease(b"garbage")

View File

@ -1308,7 +1308,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
d = self.clients[1].tub.getReference(sp_furl)
d.addCallback(lambda sp_rref: sp_rref.callRemote("get_stats"))
def _got_stats(stats):
#print "STATS"
#print("STATS")
#from pprint import pprint
#pprint(stats)
s = stats["stats"]
@ -1748,7 +1748,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
return d
def log(self, res, *args, **kwargs):
# print "MSG: %s RES: %s" % (msg, args)
# print("MSG: %s RES: %s" % (msg, args))
log.msg(*args, **kwargs)
return res
@ -2647,8 +2647,8 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
## return self._run_cli(argv)
## d.addCallback(_ls_missing)
## def _check_ls_missing((out,err)):
## print "OUT", out
## print "ERR", err
## print("OUT", out)
## print("ERR", err)
## self.failUnlessEqual(err, "")
## d.addCallback(_check_ls_missing)

View File

@ -1,7 +1,20 @@
# -*- coding: utf-8 -*-
"""
Ported to Python 3.
"""
from __future__ import unicode_literals
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from future.utils import PY2
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
import os, shutil
from six.moves import cStringIO as StringIO
from io import BytesIO
from twisted.trial import unittest
from twisted.python.failure import Failure
from twisted.internet import defer, task
@ -22,6 +35,7 @@ from allmydata.client import _Client
from .common import (
EMPTY_CLIENT_CONFIG,
)
from functools import reduce
MiB = 1024*1024
@ -33,25 +47,25 @@ class Uploadable(unittest.TestCase):
def shouldEqual(self, data, expected):
self.failUnless(isinstance(data, list))
for e in data:
self.failUnless(isinstance(e, str))
s = "".join(data)
self.failUnless(isinstance(e, bytes))
s = b"".join(data)
self.failUnlessEqual(s, expected)
def test_filehandle_random_key(self):
return self._test_filehandle(convergence=None)
def test_filehandle_convergent_encryption(self):
return self._test_filehandle(convergence="some convergence string")
return self._test_filehandle(convergence=b"some convergence string")
def _test_filehandle(self, convergence):
s = StringIO("a"*41)
s = BytesIO(b"a"*41)
u = upload.FileHandle(s, convergence=convergence)
d = u.get_size()
d.addCallback(self.failUnlessEqual, 41)
d.addCallback(lambda res: u.read(1))
d.addCallback(self.shouldEqual, "a")
d.addCallback(self.shouldEqual, b"a")
d.addCallback(lambda res: u.read(80))
d.addCallback(self.shouldEqual, "a"*40)
d.addCallback(self.shouldEqual, b"a"*40)
d.addCallback(lambda res: u.close()) # this doesn't close the filehandle
d.addCallback(lambda res: s.close()) # that privilege is reserved for us
return d
@ -60,28 +74,28 @@ class Uploadable(unittest.TestCase):
basedir = "upload/Uploadable/test_filename"
os.makedirs(basedir)
fn = os.path.join(basedir, "file")
f = open(fn, "w")
f.write("a"*41)
f = open(fn, "wb")
f.write(b"a"*41)
f.close()
u = upload.FileName(fn, convergence=None)
d = u.get_size()
d.addCallback(self.failUnlessEqual, 41)
d.addCallback(lambda res: u.read(1))
d.addCallback(self.shouldEqual, "a")
d.addCallback(self.shouldEqual, b"a")
d.addCallback(lambda res: u.read(80))
d.addCallback(self.shouldEqual, "a"*40)
d.addCallback(self.shouldEqual, b"a"*40)
d.addCallback(lambda res: u.close())
return d
def test_data(self):
s = "a"*41
s = b"a"*41
u = upload.Data(s, convergence=None)
d = u.get_size()
d.addCallback(self.failUnlessEqual, 41)
d.addCallback(lambda res: u.read(1))
d.addCallback(self.shouldEqual, "a")
d.addCallback(self.shouldEqual, b"a")
d.addCallback(lambda res: u.read(80))
d.addCallback(self.shouldEqual, "a"*40)
d.addCallback(self.shouldEqual, b"a"*40)
d.addCallback(lambda res: u.close())
return d
@ -104,19 +118,19 @@ class FakeStorageServer(object):
self._alloc_queries = 0
self._get_queries = 0
self.version = {
"http://allmydata.org/tahoe/protocols/storage/v1" :
b"http://allmydata.org/tahoe/protocols/storage/v1" :
{
"maximum-immutable-share-size": 2**32 - 1,
b"maximum-immutable-share-size": 2**32 - 1,
},
"application-version": str(allmydata.__full_version__),
b"application-version": bytes(allmydata.__full_version__, "ascii"),
}
if mode == "small":
self.version = {
"http://allmydata.org/tahoe/protocols/storage/v1" :
b"http://allmydata.org/tahoe/protocols/storage/v1" :
{
"maximum-immutable-share-size": 10,
b"maximum-immutable-share-size": 10,
},
"application-version": str(allmydata.__full_version__),
b"application-version": bytes(allmydata.__full_version__, "ascii"),
}
@ -130,7 +144,7 @@ class FakeStorageServer(object):
def allocate_buckets(self, storage_index, renew_secret, cancel_secret,
sharenums, share_size, canary):
# print "FakeStorageServer.allocate_buckets(num=%d, size=%d, mode=%s, queries=%d)" % (len(sharenums), share_size, self.mode, self._alloc_queries)
# print("FakeStorageServer.allocate_buckets(num=%d, size=%d, mode=%s, queries=%d)" % (len(sharenums), share_size, self.mode, self._alloc_queries))
if self.mode == "timeout":
return defer.Deferred()
if self.mode == "first-fail":
@ -167,7 +181,7 @@ class FakeStorageServer(object):
class FakeBucketWriter(object):
# a diagnostic version of storageserver.BucketWriter
def __init__(self, size):
self.data = StringIO()
self.data = BytesIO()
self.closed = False
self._size = size
@ -213,10 +227,10 @@ class FakeClient(object):
def __init__(self, mode="good", num_servers=50, reactor=None):
self.num_servers = num_servers
self.encoding_params = self.DEFAULT_ENCODING_PARAMETERS.copy()
if type(mode) is str:
if isinstance(mode, str):
mode = dict([i,mode] for i in range(num_servers))
servers = [
("%20d" % fakeid, FakeStorageServer(mode[fakeid], reactor=reactor))
(b"%20d" % fakeid, FakeStorageServer(mode[fakeid], reactor=reactor))
for fakeid in range(self.num_servers)
]
self.storage_broker = StorageFarmBroker(
@ -225,7 +239,7 @@ class FakeClient(object):
node_config=EMPTY_CLIENT_CONFIG,
)
for (serverid, rref) in servers:
ann = {"anonymous-storage-FURL": "pb://%s@nowhere/fake" % base32.b2a(serverid),
ann = {"anonymous-storage-FURL": b"pb://%s@nowhere/fake" % base32.b2a(serverid),
"permutation-seed-base32": base32.b2a(serverid) }
self.storage_broker.test_add_rref(serverid, rref, ann)
self.last_servers = [s[1] for s in servers]
@ -236,7 +250,7 @@ class FakeClient(object):
return self.encoding_params
def get_storage_broker(self):
return self.storage_broker
_secret_holder = client.SecretHolder("lease secret", "convergence secret")
_secret_holder = client.SecretHolder(b"lease secret", b"convergence secret")
class GotTooFarError(Exception):
pass
@ -247,7 +261,7 @@ class GiganticUploadable(upload.FileHandle):
self._fp = 0
def get_encryption_key(self):
return defer.succeed("\x00" * 16)
return defer.succeed(b"\x00" * 16)
def get_size(self):
return defer.succeed(self._size)
def read(self, length):
@ -257,11 +271,11 @@ class GiganticUploadable(upload.FileHandle):
if self._fp > 1000000:
# terminate the test early.
raise GotTooFarError("we shouldn't be allowed to get this far")
return defer.succeed(["\x00" * length])
return defer.succeed([b"\x00" * length])
def close(self):
pass
DATA = """
DATA = b"""
Once upon a time, there was a beautiful princess named Buttercup. She lived
in a magical land where every file was stored securely among millions of
machines, and nobody ever worried about their data being lost ever again.
@ -304,9 +318,9 @@ class GoodServer(unittest.TestCase, ShouldFailMixin, SetDEPMixin):
def _check_large(self, newuri, size):
u = uri.from_string(newuri)
self.failUnless(isinstance(u, uri.CHKFileURI))
self.failUnless(isinstance(u.get_storage_index(), str))
self.failUnless(isinstance(u.get_storage_index(), bytes))
self.failUnlessEqual(len(u.get_storage_index()), 16)
self.failUnless(isinstance(u.key, str))
self.failUnless(isinstance(u.key, bytes))
self.failUnlessEqual(len(u.key), 16)
self.failUnlessEqual(u.size, size)
@ -367,21 +381,21 @@ class GoodServer(unittest.TestCase, ShouldFailMixin, SetDEPMixin):
def test_filehandle_zero(self):
data = self.get_data(SIZE_ZERO)
d = upload_filehandle(self.u, StringIO(data))
d = upload_filehandle(self.u, BytesIO(data))
d.addCallback(extract_uri)
d.addCallback(self._check_small, SIZE_ZERO)
return d
def test_filehandle_small(self):
data = self.get_data(SIZE_SMALL)
d = upload_filehandle(self.u, StringIO(data))
d = upload_filehandle(self.u, BytesIO(data))
d.addCallback(extract_uri)
d.addCallback(self._check_small, SIZE_SMALL)
return d
def test_filehandle_large(self):
data = self.get_data(SIZE_LARGE)
d = upload_filehandle(self.u, StringIO(data))
d = upload_filehandle(self.u, BytesIO(data))
d.addCallback(extract_uri)
d.addCallback(self._check_large, SIZE_LARGE)
return d
@ -429,9 +443,9 @@ class ServerErrors(unittest.TestCase, ShouldFailMixin, SetDEPMixin):
def _check_large(self, newuri, size):
u = uri.from_string(newuri)
self.failUnless(isinstance(u, uri.CHKFileURI))
self.failUnless(isinstance(u.get_storage_index(), str))
self.failUnless(isinstance(u.get_storage_index(), bytes))
self.failUnlessEqual(len(u.get_storage_index()), 16)
self.failUnless(isinstance(u.key, str))
self.failUnless(isinstance(u.key, bytes))
self.failUnlessEqual(len(u.key), 16)
self.failUnlessEqual(u.size, size)
@ -599,9 +613,9 @@ class ServerSelection(unittest.TestCase):
def _check_large(self, newuri, size):
u = uri.from_string(newuri)
self.failUnless(isinstance(u, uri.CHKFileURI))
self.failUnless(isinstance(u.get_storage_index(), str))
self.failUnless(isinstance(u.get_storage_index(), bytes))
self.failUnlessEqual(len(u.get_storage_index()), 16)
self.failUnless(isinstance(u.key, str))
self.failUnless(isinstance(u.key, bytes))
self.failUnlessEqual(len(u.key), 16)
self.failUnlessEqual(u.size, size)
@ -764,40 +778,40 @@ class ServerSelection(unittest.TestCase):
class StorageIndex(unittest.TestCase):
def test_params_must_matter(self):
DATA = "I am some data"
DATA = b"I am some data"
PARAMS = _Client.DEFAULT_ENCODING_PARAMETERS
u = upload.Data(DATA, convergence="")
u = upload.Data(DATA, convergence=b"")
u.set_default_encoding_parameters(PARAMS)
eu = upload.EncryptAnUploadable(u)
d1 = eu.get_storage_index()
# CHK means the same data should encrypt the same way
u = upload.Data(DATA, convergence="")
u = upload.Data(DATA, convergence=b"")
u.set_default_encoding_parameters(PARAMS)
eu = upload.EncryptAnUploadable(u)
d1a = eu.get_storage_index()
# but if we use a different convergence string it should be different
u = upload.Data(DATA, convergence="wheee!")
u = upload.Data(DATA, convergence=b"wheee!")
u.set_default_encoding_parameters(PARAMS)
eu = upload.EncryptAnUploadable(u)
d1salt1 = eu.get_storage_index()
# and if we add yet a different convergence it should be different again
u = upload.Data(DATA, convergence="NOT wheee!")
u = upload.Data(DATA, convergence=b"NOT wheee!")
u.set_default_encoding_parameters(PARAMS)
eu = upload.EncryptAnUploadable(u)
d1salt2 = eu.get_storage_index()
# and if we use the first string again it should be the same as last time
u = upload.Data(DATA, convergence="wheee!")
u = upload.Data(DATA, convergence=b"wheee!")
u.set_default_encoding_parameters(PARAMS)
eu = upload.EncryptAnUploadable(u)
d1salt1a = eu.get_storage_index()
# and if we change the encoding parameters, it should be different (from the same convergence string with different encoding parameters)
u = upload.Data(DATA, convergence="")
u = upload.Data(DATA, convergence=b"")
u.set_default_encoding_parameters(PARAMS)
u.encoding_param_k = u.default_encoding_param_k + 1
eu = upload.EncryptAnUploadable(u)
@ -838,10 +852,10 @@ def combinations(iterable, r):
n = len(pool)
if r > n:
return
indices = range(r)
indices = list(range(r))
yield tuple(pool[i] for i in indices)
while True:
for i in reversed(range(r)):
for i in reversed(list(range(r))):
if indices[i] != i + n - r:
break
else:
@ -855,7 +869,7 @@ def is_happy_enough(servertoshnums, h, k):
""" I calculate whether servertoshnums achieves happiness level h. I do this with a naïve "brute force search" approach. (See src/allmydata/util/happinessutil.py for a better algorithm.) """
if len(servertoshnums) < h:
return False
for happysetcombo in combinations(servertoshnums.iterkeys(), h):
for happysetcombo in combinations(iter(servertoshnums.keys()), h):
for subsetcombo in combinations(happysetcombo, k):
shnums = reduce(set.union, [ servertoshnums[s] for s in subsetcombo ])
if len(shnums) < k:
@ -886,7 +900,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
assert self.g, "I tried to find a grid at self.g, but failed"
servertoshnums = {} # k: server, v: set(shnum)
for i, c in self.g.servers_by_number.iteritems():
for i, c in self.g.servers_by_number.items():
for (dirp, dirns, fns) in os.walk(c.sharedir):
for fn in fns:
try:
@ -909,7 +923,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
assert self.g, "I tried to find a grid at self.g, but failed"
broker = self.g.clients[0].storage_broker
sh = self.g.clients[0]._secret_holder
data = upload.Data("data" * 10000, convergence="")
data = upload.Data(b"data" * 10000, convergence=b"")
data.set_default_encoding_parameters({'k': 3, 'happy': 4, 'n': 10})
uploadable = upload.EncryptAnUploadable(data)
encoder = encode.Encoder()
@ -926,9 +940,9 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
def _have_shareholders(upload_trackers_and_already_servers):
(upload_trackers, already_servers) = upload_trackers_and_already_servers
assert servers_to_break <= len(upload_trackers)
for index in xrange(servers_to_break):
for index in range(servers_to_break):
tracker = list(upload_trackers)[index]
for share in tracker.buckets.keys():
for share in list(tracker.buckets.keys()):
tracker.buckets[share].abort()
buckets = {}
servermap = already_servers.copy()
@ -1002,7 +1016,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
if "n" in kwargs and "k" in kwargs:
client.encoding_params['k'] = kwargs['k']
client.encoding_params['n'] = kwargs['n']
data = upload.Data("data" * 10000, convergence="")
data = upload.Data(b"data" * 10000, convergence=b"")
self.data = data
d = client.upload(data)
def _store_uri(ur):
@ -1021,8 +1035,8 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
self.set_up_grid(client_config_hooks=hooks)
c0 = self.g.clients[0]
DATA = "data" * 100
u = upload.Data(DATA, convergence="")
DATA = b"data" * 100
u = upload.Data(DATA, convergence=b"")
d = c0.upload(u)
d.addCallback(lambda ur: c0.create_node_from_uri(ur.get_uri()))
m = monitor.Monitor()
@ -1045,7 +1059,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
def test_happy_semantics(self):
self._setUp(2)
DATA = upload.Data("kittens" * 10000, convergence="")
DATA = upload.Data(b"kittens" * 10000, convergence=b"")
# These parameters are unsatisfiable with only 2 servers.
self.set_encoding_parameters(k=3, happy=5, n=10)
d = self.shouldFail(UploadUnhappinessError, "test_happy_semantics",
@ -1077,7 +1091,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
self.basedir = "upload/EncodingParameters/aborted_shares"
self.set_up_grid(num_servers=4)
c = self.g.clients[0]
DATA = upload.Data(100 * "kittens", convergence="")
DATA = upload.Data(100 * b"kittens", convergence=b"")
# These parameters are unsatisfiable with only 4 servers, but should
# work with 5, as long as the original 4 are not stuck in the open
# BucketWriter state (open() but not
@ -1155,8 +1169,8 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
"We were asked to place shares on at "
"least 4 servers such that any 3 of them have "
"enough shares to recover the file",
client.upload, upload.Data("data" * 10000,
convergence="")))
client.upload, upload.Data(b"data" * 10000,
convergence=b"")))
# Do comment:52, but like this:
# server 2: empty
@ -1188,8 +1202,8 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
"that any 3 of them have enough shares to recover "
"the file, but we were asked to place shares on "
"at least 4 such servers.",
client.upload, upload.Data("data" * 10000,
convergence="")))
client.upload, upload.Data(b"data" * 10000,
convergence=b"")))
return d
@ -1230,7 +1244,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
return client
d.addCallback(_reset_encoding_parameters)
d.addCallback(lambda client:
client.upload(upload.Data("data" * 10000, convergence="")))
client.upload(upload.Data(b"data" * 10000, convergence=b"")))
d.addCallback(lambda ign:
self.failUnless(self._has_happy_share_distribution()))
@ -1259,7 +1273,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
self._add_server_with_share(server_number=1, share_number=2))
# Copy all of the other shares to server number 2
def _copy_shares(ign):
for i in xrange(0, 10):
for i in range(0, 10):
self._copy_share_to_server(i, 2)
d.addCallback(_copy_shares)
# Remove the first server, and add a placeholder with share 0
@ -1270,7 +1284,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
# Now try uploading.
d.addCallback(_reset_encoding_parameters)
d.addCallback(lambda client:
client.upload(upload.Data("data" * 10000, convergence="")))
client.upload(upload.Data(b"data" * 10000, convergence=b"")))
d.addCallback(lambda ign:
self.failUnless(self._has_happy_share_distribution()))
@ -1299,7 +1313,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
self.g.remove_server(self.g.servers_by_number[0].my_nodeid))
d.addCallback(_reset_encoding_parameters)
d.addCallback(lambda client:
client.upload(upload.Data("data" * 10000, convergence="")))
client.upload(upload.Data(b"data" * 10000, convergence=b"")))
# Make sure that only as many shares as necessary to satisfy
# servers of happiness were pushed.
d.addCallback(lambda results:
@ -1330,7 +1344,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
d.addCallback(_setup)
d.addCallback(lambda client:
client.upload(upload.Data("data" * 10000, convergence="")))
client.upload(upload.Data(b"data" * 10000, convergence=b"")))
d.addCallback(lambda ign:
self.failUnless(self._has_happy_share_distribution()))
return d
@ -1353,7 +1367,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
readonly=True))
# Copy all of the other shares to server number 2
def _copy_shares(ign):
for i in xrange(1, 10):
for i in range(1, 10):
self._copy_share_to_server(i, 2)
d.addCallback(_copy_shares)
# Remove server 0, and add another in its place
@ -1368,7 +1382,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
return client
d.addCallback(_reset_encoding_parameters)
d.addCallback(lambda client:
client.upload(upload.Data("data" * 10000, convergence="")))
client.upload(upload.Data(b"data" * 10000, convergence=b"")))
d.addCallback(lambda ign:
self.failUnless(self._has_happy_share_distribution()))
return d
@ -1396,7 +1410,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
self._add_server_with_share(server_number=2, share_number=0,
readonly=True))
def _copy_shares(ign):
for i in xrange(1, 10):
for i in range(1, 10):
self._copy_share_to_server(i, 2)
d.addCallback(_copy_shares)
d.addCallback(lambda ign:
@ -1407,7 +1421,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
return client
d.addCallback(_reset_encoding_parameters)
d.addCallback(lambda client:
client.upload(upload.Data("data" * 10000, convergence="")))
client.upload(upload.Data(b"data" * 10000, convergence=b"")))
d.addCallback(lambda ign:
self.failUnless(self._has_happy_share_distribution()))
return d
@ -1512,7 +1526,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
d.addCallback(lambda ign:
self._add_server(4))
def _copy_shares(ign):
for i in xrange(1, 10):
for i in range(1, 10):
self._copy_share_to_server(i, 1)
d.addCallback(_copy_shares)
d.addCallback(lambda ign:
@ -1523,7 +1537,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
return client
d.addCallback(_prepare_client)
d.addCallback(lambda client:
client.upload(upload.Data("data" * 10000, convergence="")))
client.upload(upload.Data(b"data" * 10000, convergence=b"")))
d.addCallback(lambda ign:
self.failUnless(self._has_happy_share_distribution()))
return d
@ -1536,7 +1550,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
self.basedir = self.mktemp()
d = self._setup_and_upload()
def _setup(ign):
for i in xrange(1, 11):
for i in range(1, 11):
self._add_server(server_number=i)
self.g.remove_server(self.g.servers_by_number[0].my_nodeid)
c = self.g.clients[0]
@ -1550,8 +1564,8 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
d.addCallback(lambda c:
self.shouldFail(UploadUnhappinessError, "test_query_counting",
"0 queries placed some shares",
c.upload, upload.Data("data" * 10000,
convergence="")))
c.upload, upload.Data(b"data" * 10000,
convergence=b"")))
# Now try with some readonly servers. We want to make sure that
# the readonly server share discovery phase is counted correctly.
def _reset(ign):
@ -1561,7 +1575,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
d.addCallback(lambda ign:
self._setup_and_upload())
def _then(ign):
for i in xrange(1, 11):
for i in range(1, 11):
self._add_server(server_number=i)
self._add_server(server_number=11, readonly=True)
self._add_server(server_number=12, readonly=True)
@ -1574,8 +1588,8 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
self.shouldFail(UploadUnhappinessError, "test_query_counting",
"4 placed none (of which 4 placed none due to "
"the server being full",
c.upload, upload.Data("data" * 10000,
convergence="")))
c.upload, upload.Data(b"data" * 10000,
convergence=b"")))
# Now try the case where the upload process finds a bunch of the
# shares that it wants to place on the first server, including
# the one that it wanted to allocate there. Though no shares will
@ -1587,11 +1601,11 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
self._setup_and_upload())
def _next(ign):
for i in xrange(1, 11):
for i in range(1, 11):
self._add_server(server_number=i)
# Copy all of the shares to server 9, since that will be
# the first one that the selector sees.
for i in xrange(10):
for i in range(10):
self._copy_share_to_server(i, 9)
# Remove server 0, and its contents
self.g.remove_server(self.g.servers_by_number[0].my_nodeid)
@ -1603,8 +1617,8 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
d.addCallback(lambda c:
self.shouldFail(UploadUnhappinessError, "test_query_counting",
"0 queries placed some shares",
c.upload, upload.Data("data" * 10000,
convergence="")))
c.upload, upload.Data(b"data" * 10000,
convergence=b"")))
return d
@ -1612,7 +1626,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
self.basedir = self.mktemp()
d = self._setup_and_upload()
def _then(ign):
for i in xrange(1, 11):
for i in range(1, 11):
self._add_server(server_number=i, readonly=True)
self.g.remove_server(self.g.servers_by_number[0].my_nodeid)
c = self.g.clients[0]
@ -1626,7 +1640,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
"test_upper_limit_on_readonly_queries",
"sent 8 queries to 8 servers",
client.upload,
upload.Data('data' * 10000, convergence="")))
upload.Data(b'data' * 10000, convergence=b"")))
return d
@ -1668,7 +1682,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
"(of which 5 placed none due to the server being "
"full and 0 placed none due to an error)",
client.upload,
upload.Data("data" * 10000, convergence="")))
upload.Data(b"data" * 10000, convergence=b"")))
# server 1: read-only, no shares
@ -1709,7 +1723,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
"(of which 4 placed none due to the server being "
"full and 1 placed none due to an error)",
client.upload,
upload.Data("data" * 10000, convergence="")))
upload.Data(b"data" * 10000, convergence=b"")))
# server 0, server 1 = empty, accepting shares
# This should place all of the shares, but still fail with happy=4.
# We want to make sure that the exception message is worded correctly.
@ -1725,8 +1739,8 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
"server(s). We were asked to place shares on at "
"least 4 server(s) such that any 3 of them have "
"enough shares to recover the file.",
client.upload, upload.Data("data" * 10000,
convergence="")))
client.upload, upload.Data(b"data" * 10000,
convergence=b"")))
# servers 0 - 4 = empty, accepting shares
# This too should place all the shares, and this too should fail,
# but since the effective happiness is more than the k encoding
@ -1750,8 +1764,8 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
"that any 3 of them have enough shares to recover "
"the file, but we were asked to place shares on "
"at least 7 such servers.",
client.upload, upload.Data("data" * 10000,
convergence="")))
client.upload, upload.Data(b"data" * 10000,
convergence=b"")))
# server 0: shares 0 - 9
# server 1: share 0, read-only
# server 2: share 0, read-only
@ -1782,8 +1796,8 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
"to place shares on at least 7 servers such that "
"any 3 of them have enough shares to recover the "
"file",
client.upload, upload.Data("data" * 10000,
convergence="")))
client.upload, upload.Data(b"data" * 10000,
convergence=b"")))
return d
@ -1815,7 +1829,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
d.addCallback(_setup)
d.addCallback(lambda client:
client.upload(upload.Data("data" * 10000, convergence="")))
client.upload(upload.Data(b"data" * 10000, convergence=b"")))
d.addCallback(lambda ign:
self.failUnless(self._has_happy_share_distribution()))
return d
@ -1873,7 +1887,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
d.addCallback(_setup)
d.addCallback(lambda client:
client.upload(upload.Data("data" * 10000, convergence="")))
client.upload(upload.Data(b"data" * 10000, convergence=b"")))
d.addCallback(lambda ign:
self.failUnless(self._has_happy_share_distribution()))
return d
@ -1911,7 +1925,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
return c
d.addCallback(_server_setup)
d.addCallback(lambda client:
client.upload(upload.Data("data" * 10000, convergence="")))
client.upload(upload.Data(b"data" * 10000, convergence=b"")))
d.addCallback(lambda ign:
self.failUnless(self._has_happy_share_distribution()))
return d
@ -1935,12 +1949,12 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
self._add_server_with_share(server_number=8, share_number=4)
self._add_server_with_share(server_number=5, share_number=5)
self._add_server_with_share(server_number=10, share_number=7)
for i in xrange(4):
for i in range(4):
self._copy_share_to_server(i, 2)
return self.g.clients[0]
d.addCallback(_server_setup)
d.addCallback(lambda client:
client.upload(upload.Data("data" * 10000, convergence="")))
client.upload(upload.Data(b"data" * 10000, convergence=b"")))
d.addCallback(lambda ign:
self.failUnless(self._has_happy_share_distribution()))
return d
@ -1963,14 +1977,14 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
self.shouldFail(UploadUnhappinessError,
"test_server_selection_bucket_abort",
"",
client.upload, upload.Data("data" * 10000,
convergence="")))
client.upload, upload.Data(b"data" * 10000,
convergence=b"")))
# wait for the abort messages to get there.
def _turn_barrier(res):
return fireEventually(res)
d.addCallback(_turn_barrier)
def _then(ignored):
for server in self.g.servers_by_number.values():
for server in list(self.g.servers_by_number.values()):
self.failUnlessEqual(server.allocated_size(), 0)
d.addCallback(_then)
return d
@ -1996,7 +2010,7 @@ class EncodingParameters(GridTestMixin, unittest.TestCase, SetDEPMixin,
return fireEventually(res)
d.addCallback(_turn_barrier)
def _then(ignored):
for server in self.g.servers_by_number.values():
for server in list(self.g.servers_by_number.values()):
self.failUnlessEqual(server.allocated_size(), 0)
d.addCallback(_then)
return d

View File

@ -210,17 +210,17 @@ class Extension(testutil.ReallyEqualMixin, unittest.TestCase):
}
ext = uri.pack_extension(data)
d = uri.unpack_extension(ext)
self.failUnlessReallyEqual(d[b"stuff"], b"value")
self.failUnlessReallyEqual(d[b"size"], 12)
self.failUnlessReallyEqual(d[b"big_hash"], hashutil.tagged_hash(b"foo", b"bar"))
self.failUnlessReallyEqual(d["stuff"], b"value")
self.failUnlessReallyEqual(d["size"], 12)
self.failUnlessReallyEqual(d["big_hash"], hashutil.tagged_hash(b"foo", b"bar"))
readable = uri.unpack_extension_readable(ext)
self.failUnlessReallyEqual(readable[b"needed_shares"], 3)
self.failUnlessReallyEqual(readable[b"stuff"], b"value")
self.failUnlessReallyEqual(readable[b"size"], 12)
self.failUnlessReallyEqual(readable[b"big_hash"],
self.failUnlessReallyEqual(readable["needed_shares"], 3)
self.failUnlessReallyEqual(readable["stuff"], b"value")
self.failUnlessReallyEqual(readable["size"], 12)
self.failUnlessReallyEqual(readable["big_hash"],
base32.b2a(hashutil.tagged_hash(b"foo", b"bar")))
self.failUnlessReallyEqual(readable[b"UEB_hash"],
self.failUnlessReallyEqual(readable["UEB_hash"],
base32.b2a(hashutil.uri_extension_hash(ext)))
class Unknown(testutil.ReallyEqualMixin, unittest.TestCase):

View File

@ -260,7 +260,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
#
#d.addCallback(self.CHECK, "dead", "t=check&repair=true")
#def _got_html_dead(res):
# print res
# print(res)
# self.failUnlessIn("Healthy : healthy", res)
# self.failIfIn("Not Healthy", res)
# self.failUnlessIn("No repair necessary", res)

View File

@ -2,6 +2,8 @@ from mock import Mock
import time
from bs4 import BeautifulSoup
from twisted.trial import unittest
from twisted.web.template import Tag
from twisted.web.test.requesthelper import DummyRequest
@ -14,12 +16,14 @@ from ...storage_client import (
from ...web.root import RootElement
from ...util.connection_status import ConnectionStatus
from allmydata.web.root import URIHandler
from allmydata.web.common import WebError
from allmydata.client import _Client
from hypothesis import given
from hypothesis.strategies import text
from .common import (
assert_soup_has_tag_with_content,
)
from ..common import (
EMPTY_CLIENT_CONFIG,
@ -57,8 +61,19 @@ class RenderSlashUri(unittest.TestCase):
A (trivially) invalid capbility is an error
"""
self.request.args[b"uri"] = [b"not a capability"]
with self.assertRaises(WebError):
self.res.render_GET(self.request)
response_body = self.res.render_GET(self.request)
soup = BeautifulSoup(response_body, 'html5lib')
assert_soup_has_tag_with_content(
self, soup, "title", "400 - Error",
)
assert_soup_has_tag_with_content(
self, soup, "h1", "Error",
)
assert_soup_has_tag_with_content(
self, soup, "p", "Invalid capability",
)
@given(
text()
@ -68,8 +83,19 @@ class RenderSlashUri(unittest.TestCase):
Let hypothesis try a bunch of invalid capabilities
"""
self.request.args[b"uri"] = [cap.encode('utf8')]
with self.assertRaises(WebError):
self.res.render_GET(self.request)
response_body = self.res.render_GET(self.request)
soup = BeautifulSoup(response_body, 'html5lib')
assert_soup_has_tag_with_content(
self, soup, "title", "400 - Error",
)
assert_soup_has_tag_with_content(
self, soup, "h1", "Error",
)
assert_soup_has_tag_with_content(
self, soup, "p", "Invalid capability",
)
class RenderServiceRow(unittest.TestCase):

View File

@ -46,14 +46,23 @@ from .common import (
unknown_immcap,
)
from allmydata.interfaces import IMutableFileNode, SDMF_VERSION, MDMF_VERSION
from allmydata.interfaces import (
IMutableFileNode, SDMF_VERSION, MDMF_VERSION,
FileTooLargeError,
MustBeReadonlyError,
)
from allmydata.mutable import servermap, publish, retrieve
from .. import common_util as testutil
from ..common_py3 import TimezoneMixin
from ..common_web import (
do_http,
Error,
render,
)
from ...web.common import (
humanize_exception,
)
from allmydata.client import _Client, SecretHolder
# create a fake uploader/downloader, and a couple of fake dirnodes, then
@ -648,6 +657,8 @@ class MultiFormatResourceTests(TrialTestCase):
"""
Tests for ``MultiFormatResource``.
"""
def render(self, resource, **queryargs):
return self.successResultOf(render(resource, queryargs))
def resource(self):
"""
@ -668,38 +679,6 @@ class MultiFormatResourceTests(TrialTestCase):
return Content()
def render(self, resource, **query_args):
"""
Render a ``Resource`` against a request with the given query arguments.
:param resource: The Nevow resource to render.
:param query_args: The query arguments to put into the request being
rendered. A mapping from ``bytes`` to ``list`` of ``bytes``.
:return: The rendered response body as ``bytes``.
"""
# TODO: probably should: (1) refactor this out of here to a
# common module (test.common_web maybe?), and (2) replace
# nevow.inevow.IRequest with twisted.web.iweb.IRequest. For
# (2) to happen, we will have to update web.common.get_arg()
# etc first.
from zope.interface import implementer
from nevow.inevow import IRequest
from twisted.web.server import Request
from twisted.web.test.requesthelper import DummyChannel
@implementer(IRequest)
class FakeRequest(Request):
def __init__(self, args):
Request.__init__(self, DummyChannel())
self.args = args
self.fields = dict()
return resource.render(FakeRequest(args=query_args))
def test_select_format(self):
"""
The ``formatArgument`` attribute of a ``MultiFormatResource`` subclass
@ -4308,7 +4287,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
)
def log(self, res, msg):
#print "MSG: %s RES: %s" % (msg, res)
#print("MSG: %s RES: %s" % (msg, res))
log.msg(msg)
return res
@ -4788,3 +4767,33 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
# doesn't reveal anything. This addresses #1720.
d.addCallback(lambda e: self.assertEquals(str(e), "404 Not Found"))
return d
class HumanizeExceptionTests(TrialTestCase):
"""
Tests for ``humanize_exception``.
"""
def test_mustbereadonly(self):
"""
``humanize_exception`` describes ``MustBeReadonlyError``.
"""
text, code = humanize_exception(
MustBeReadonlyError(
"URI:DIR2 directory writecap used in a read-only context",
"<unknown name>",
),
)
self.assertIn("MustBeReadonlyError", text)
self.assertEqual(code, http.BAD_REQUEST)
def test_filetoolarge(self):
"""
``humanize_exception`` describes ``FileTooLargeError``.
"""
text, code = humanize_exception(
FileTooLargeError(
"This file is too large to be uploaded (data_size).",
),
)
self.assertIn("FileTooLargeError", text)
self.assertEqual(code, http.REQUEST_ENTITY_TOO_LARGE)

View File

@ -13,8 +13,10 @@ from __future__ import unicode_literals
from future.utils import PY2
if PY2:
# Don't import bytes, to prevent leaks.
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, dict, list, object, range, str, max, min # noqa: F401
# Don't import bytes or str, to prevent future's newbytes leaking and
# breaking code that only expects normal bytes.
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, dict, list, object, range, max, min # noqa: F401
str = unicode
from past.builtins import unicode, long
@ -928,11 +930,11 @@ def unpack_extension(data):
assert data[length:length+1] == b','
data = data[length+1:]
d[key] = value
d[str(key, "utf-8")] = value
# convert certain things to numbers
for intkey in (b'size', b'segment_size', b'num_segments',
b'needed_shares', b'total_shares'):
for intkey in ('size', 'segment_size', 'num_segments',
'needed_shares', 'total_shares'):
if intkey in d:
d[intkey] = int(d[intkey])
return d
@ -940,9 +942,9 @@ def unpack_extension(data):
def unpack_extension_readable(data):
unpacked = unpack_extension(data)
unpacked[b"UEB_hash"] = hashutil.uri_extension_hash(data)
unpacked["UEB_hash"] = hashutil.uri_extension_hash(data)
for k in sorted(unpacked.keys()):
if b'hash' in k:
if 'hash' in k:
unpacked[k] = base32.b2a(unpacked[k])
return unpacked

View File

@ -38,10 +38,13 @@ PORTED_MODULES = [
"allmydata.storage.common",
"allmydata.storage.crawler",
"allmydata.storage.expirer",
"allmydata.storage.immutable",
"allmydata.storage.lease",
"allmydata.storage.mutable",
"allmydata.storage.server",
"allmydata.storage.shares",
"allmydata.test.common_py3",
"allmydata.test.no_network",
"allmydata.uri",
"allmydata.util._python3",
"allmydata.util.abbreviate",
@ -82,6 +85,7 @@ PORTED_TEST_MODULES = [
"allmydata.test.test_crypto",
"allmydata.test.test_deferredutil",
"allmydata.test.test_dictutil",
"allmydata.test.test_encode",
"allmydata.test.test_encodingutil",
"allmydata.test.test_happiness",
"allmydata.test.test_hashtree",
@ -91,6 +95,7 @@ PORTED_TEST_MODULES = [
"allmydata.test.test_log",
"allmydata.test.test_monitor",
"allmydata.test.test_netstring",
"allmydata.test.test_no_network",
"allmydata.test.test_observer",
"allmydata.test.test_pipeline",
"allmydata.test.test_python3",
@ -99,6 +104,7 @@ PORTED_TEST_MODULES = [
"allmydata.test.test_storage",
"allmydata.test.test_storage_web",
"allmydata.test.test_time_format",
"allmydata.test.test_upload",
"allmydata.test.test_uri",
"allmydata.test.test_util",
"allmydata.test.test_version",

View File

@ -36,5 +36,5 @@ def download_to_data(n, offset=0, size=None, progress=None):
:param progress: None or an IProgress implementer
"""
d = n.read(MemoryConsumer(progress=progress), offset, size)
d.addCallback(lambda mc: "".join(mc.chunks))
d.addCallback(lambda mc: b"".join(mc.chunks))
return d

View File

@ -10,7 +10,10 @@ from __future__ import unicode_literals
from future.utils import PY2
if PY2:
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
# Don't import bytes to prevent leaking future's bytes.
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, dict, list, object, range, str, max, min, bytes as future_bytes # noqa: F401
else:
future_bytes = bytes
from past.builtins import chr as byteschr
@ -213,7 +216,7 @@ def bucket_cancel_secret_hash(file_cancel_secret, peerid):
def _xor(a, b):
return b"".join([byteschr(c ^ b) for c in a])
return b"".join([byteschr(c ^ b) for c in future_bytes(a)])
def hmac(tag, data):

View File

@ -55,10 +55,10 @@ class Spans(object):
def add(self, start, length):
assert start >= 0
assert length > 0
#print " ADD [%d+%d -%d) to %s" % (start, length, start+length, self.dump())
#print(" ADD [%d+%d -%d) to %s" % (start, length, start+length, self.dump()))
first_overlap = last_overlap = None
for i,(s_start,s_length) in enumerate(self._spans):
#print " (%d+%d)-> overlap=%s adjacent=%s" % (s_start,s_length, overlap(s_start, s_length, start, length), adjacent(s_start, s_length, start, length))
#print(" (%d+%d)-> overlap=%s adjacent=%s" % (s_start,s_length, overlap(s_start, s_length, start, length), adjacent(s_start, s_length, start, length)))
if (overlap(s_start, s_length, start, length)
or adjacent(s_start, s_length, start, length)):
last_overlap = i
@ -68,7 +68,7 @@ class Spans(object):
# no overlap
if first_overlap is not None:
break
#print " first_overlap", first_overlap, last_overlap
#print(" first_overlap", first_overlap, last_overlap)
if first_overlap is None:
# no overlap, so just insert the span and sort by starting
# position.
@ -83,7 +83,7 @@ class Spans(object):
newspan_length = newspan_end - newspan_start
newspan = (newspan_start, newspan_length)
self._spans[first_overlap:last_overlap+1] = [newspan]
#print " ADD done: %s" % self.dump()
#print(" ADD done: %s" % self.dump())
self._check()
return self
@ -91,7 +91,7 @@ class Spans(object):
def remove(self, start, length):
assert start >= 0
assert length > 0
#print " REMOVE [%d+%d -%d) from %s" % (start, length, start+length, self.dump())
#print(" REMOVE [%d+%d -%d) from %s" % (start, length, start+length, self.dump()))
first_complete_overlap = last_complete_overlap = None
for i,(s_start,s_length) in enumerate(self._spans):
s_end = s_start + s_length
@ -144,7 +144,7 @@ class Spans(object):
break
if first_complete_overlap is not None:
del self._spans[first_complete_overlap:last_complete_overlap+1]
#print " REMOVE done: %s" % self.dump()
#print(" REMOVE done: %s" % self.dump())
self._check()
return self
@ -282,26 +282,26 @@ class DataSpans(object):
def get(self, start, length):
# returns a string of LENGTH, or None
#print "get", start, length, self.spans
#print("get", start, length, self.spans)
end = start+length
for (s_start,s_data) in self.spans:
s_end = s_start+len(s_data)
#print " ",s_start,s_end
#print(" ",s_start,s_end)
if s_start <= start < s_end:
# we want some data from this span. Because we maintain
# strictly merged and non-overlapping spans, everything we
# want must be in this span.
offset = start - s_start
if offset + length > len(s_data):
#print " None, span falls short"
#print(" None, span falls short")
return None # span falls short
#print " some", s_data[offset:offset+length]
#print(" some", s_data[offset:offset+length])
return s_data[offset:offset+length]
if s_start >= end:
# we've gone too far: no further spans will overlap
#print " None, gone too far"
#print(" None, gone too far")
return None
#print " None, ran out of spans"
#print(" None, ran out of spans")
return None
def add(self, start, data):
@ -310,13 +310,13 @@ class DataSpans(object):
# add new spans
# sort
# merge adjacent spans
#print "add", start, data, self.spans
#print("add", start, data, self.spans)
end = start + len(data)
i = 0
while len(data):
#print " loop", start, data, i, len(self.spans), self.spans
#print(" loop", start, data, i, len(self.spans), self.spans)
if i >= len(self.spans):
#print " append and done"
#print(" append and done")
# append a last span
self.spans.append( (start, data) )
break
@ -333,7 +333,7 @@ class DataSpans(object):
# A). We handle E by replacing the middle and terminating.
if start < s_start:
# case A: insert a new span, then loop with the remainder
#print " insert new span"
#print(" insert new span")
s_len = s_start-start
self.spans.insert(i, (start, data[:s_len]))
i += 1
@ -343,12 +343,12 @@ class DataSpans(object):
s_len = len(s_data)
s_end = s_start+s_len
if s_start <= start < s_end:
#print " modify this span", s_start, start, s_end
#print(" modify this span", s_start, start, s_end)
# we want to modify some data in this span: a prefix, a
# suffix, or the whole thing
if s_start == start:
if s_end <= end:
#print " replace whole segment"
#print(" replace whole segment")
# case C: replace this segment
self.spans[i] = (s_start, data[:s_len])
i += 1
@ -357,36 +357,36 @@ class DataSpans(object):
# C2 is where len(data)>0
continue
# case B: modify the prefix, retain the suffix
#print " modify prefix"
#print(" modify prefix")
self.spans[i] = (s_start, data + s_data[len(data):])
break
if start > s_start and end < s_end:
# case E: modify the middle
#print " modify middle"
#print(" modify middle")
prefix_len = start - s_start # we retain this much
suffix_len = s_end - end # and retain this much
newdata = s_data[:prefix_len] + data + s_data[-suffix_len:]
self.spans[i] = (s_start, newdata)
break
# case D: retain the prefix, modify the suffix
#print " modify suffix"
#print(" modify suffix")
prefix_len = start - s_start # we retain this much
suffix_len = s_len - prefix_len # we replace this much
#print " ", s_data, prefix_len, suffix_len, s_len, data
#print(" ", s_data, prefix_len, suffix_len, s_len, data)
self.spans[i] = (s_start,
s_data[:prefix_len] + data[:suffix_len])
i += 1
start += suffix_len
data = data[suffix_len:]
#print " now", start, data
#print(" now", start, data)
# D2 is where len(data)>0
continue
# else we're not there yet
#print " still looking"
#print(" still looking")
i += 1
continue
# now merge adjacent spans
#print " merging", self.spans
#print(" merging", self.spans)
newspans = []
for (s_start,s_data) in self.spans:
if newspans and adjacent(newspans[-1][0], len(newspans[-1][1]),
@ -396,12 +396,12 @@ class DataSpans(object):
newspans.append( (s_start, s_data) )
self.spans = newspans
self.assert_invariants()
#print " done", self.spans
#print(" done", self.spans)
def remove(self, start, length):
i = 0
end = start + length
#print "remove", start, length, self.spans
#print("remove", start, length, self.spans)
while i < len(self.spans):
(s_start,s_data) = self.spans[i]
if s_start >= end:
@ -441,7 +441,7 @@ class DataSpans(object):
self.spans[i] = (s_start, left)
self.spans.insert(i+1, (o_end, right))
break
#print " done", self.spans
#print(" done", self.spans)
def pop(self, start, length):
data = self.get(start, length)

View File

@ -15,8 +15,10 @@ from twisted.web.template import (
tags,
)
from allmydata.web.common import (
exception_to_child,
get_arg,
get_root,
render_exception,
WebError,
MultiFormatResource,
SlotsSequenceElement,
@ -224,9 +226,11 @@ class LiteralCheckResultsRenderer(MultiFormatResource, ResultsBase):
super(LiteralCheckResultsRenderer, self).__init__()
self._client = client
@render_exception
def render_HTML(self, req):
return renderElement(req, LiteralCheckResultsRendererElement())
@render_exception
def render_JSON(self, req):
req.setHeader("content-type", "text/plain")
data = json_check_results(None)
@ -275,9 +279,11 @@ class CheckResultsRenderer(MultiFormatResource):
self._client = client
self._results = ICheckResults(results)
@render_exception
def render_HTML(self, req):
return renderElement(req, CheckResultsRendererElement(self._client, self._results))
@render_exception
def render_JSON(self, req):
req.setHeader("content-type", "text/plain")
data = json_check_results(self._results)
@ -343,10 +349,12 @@ class CheckAndRepairResultsRenderer(MultiFormatResource):
if results:
self._results = ICheckAndRepairResults(results)
@render_exception
def render_HTML(self, req):
elem = CheckAndRepairResultsRendererElement(self._client, self._results)
return renderElement(req, elem)
@render_exception
def render_JSON(self, req):
req.setHeader("content-type", "text/plain")
data = json_check_and_repair_results(self._results)
@ -411,6 +419,7 @@ class DeepCheckResultsRenderer(MultiFormatResource):
self._client = client
self.monitor = monitor
@exception_to_child
def getChild(self, name, req):
if not name:
return self
@ -425,10 +434,12 @@ class DeepCheckResultsRenderer(MultiFormatResource):
raise WebError("No detailed results for SI %s" % html.escape(name),
http.NOT_FOUND)
@render_exception
def render_HTML(self, req):
elem = DeepCheckResultsRendererElement(self.monitor)
return renderElement(req, elem)
@render_exception
def render_JSON(self, req):
req.setHeader("content-type", "text/plain")
data = {}
@ -615,6 +626,7 @@ class DeepCheckAndRepairResultsRenderer(MultiFormatResource):
self._client = client
self.monitor = monitor
@exception_to_child
def getChild(self, name, req):
if not name:
return self
@ -629,10 +641,12 @@ class DeepCheckAndRepairResultsRenderer(MultiFormatResource):
raise WebError("No detailed results for SI %s" % html.escape(name),
http.NOT_FOUND)
@render_exception
def render_HTML(self, req):
elem = DeepCheckAndRepairResultsRendererElement(self.monitor)
return renderElement(req, elem)
@render_exception
def render_JSON(self, req):
req.setHeader("content-type", "text/plain")
res = self.monitor.get_status()

View File

@ -1,10 +1,10 @@
import time
import json
from functools import wraps
from twisted.web import http, server, resource, template
from twisted.python import log
from twisted.python.failure import Failure
from nevow import loaders, appserver
from nevow.inevow import IRequest
from nevow.util import resource_filename
@ -20,7 +20,7 @@ from allmydata.util.encodingutil import to_bytes, quote_output
# Originally part of this module, so still part of its API:
from .common_py3 import ( # noqa: F401
get_arg, abbreviate_time, MultiFormatResource, WebError
get_arg, abbreviate_time, MultiFormatResource, WebError,
)
@ -201,34 +201,40 @@ def should_create_intermediate_directories(req):
return bool(req.method in ("PUT", "POST") and
t not in ("delete", "rename", "rename-form", "check"))
def humanize_failure(f):
# return text, responsecode
if f.check(EmptyPathnameComponentError):
def humanize_exception(exc):
"""
Like ``humanize_failure`` but for an exception.
:param Exception exc: The exception to describe.
:return: See ``humanize_failure``.
"""
if isinstance(exc, EmptyPathnameComponentError):
return ("The webapi does not allow empty pathname components, "
"i.e. a double slash", http.BAD_REQUEST)
if f.check(ExistingChildError):
if isinstance(exc, ExistingChildError):
return ("There was already a child by that name, and you asked me "
"to not replace it.", http.CONFLICT)
if f.check(NoSuchChildError):
quoted_name = quote_output(f.value.args[0], encoding="utf-8", quotemarks=False)
if isinstance(exc, NoSuchChildError):
quoted_name = quote_output(exc.args[0], encoding="utf-8", quotemarks=False)
return ("No such child: %s" % quoted_name, http.NOT_FOUND)
if f.check(NotEnoughSharesError):
if isinstance(exc, NotEnoughSharesError):
t = ("NotEnoughSharesError: This indicates that some "
"servers were unavailable, or that shares have been "
"lost to server departure, hard drive failure, or disk "
"corruption. You should perform a filecheck on "
"this object to learn more.\n\nThe full error message is:\n"
"%s") % str(f.value)
"%s") % str(exc)
return (t, http.GONE)
if f.check(NoSharesError):
if isinstance(exc, NoSharesError):
t = ("NoSharesError: no shares could be found. "
"Zero shares usually indicates a corrupt URI, or that "
"no servers were connected, but it might also indicate "
"severe corruption. You should perform a filecheck on "
"this object to learn more.\n\nThe full error message is:\n"
"%s") % str(f.value)
"%s") % str(exc)
return (t, http.GONE)
if f.check(UnrecoverableFileError):
if isinstance(exc, UnrecoverableFileError):
t = ("UnrecoverableFileError: the directory (or mutable file) could "
"not be retrieved, because there were insufficient good shares. "
"This might indicate that no servers were connected, "
@ -237,9 +243,9 @@ def humanize_failure(f):
"failure, or disk corruption. You should perform a filecheck on "
"this object to learn more.")
return (t, http.GONE)
if f.check(MustNotBeUnknownRWError):
quoted_name = quote_output(f.value.args[1], encoding="utf-8")
immutable = f.value.args[2]
if isinstance(exc, MustNotBeUnknownRWError):
quoted_name = quote_output(exc.args[1], encoding="utf-8")
immutable = exc.args[2]
if immutable:
t = ("MustNotBeUnknownRWError: an operation to add a child named "
"%s to a directory was given an unknown cap in a write slot.\n"
@ -259,29 +265,43 @@ def humanize_failure(f):
"writecap in the write slot if desired, would also work in this "
"case.") % quoted_name
return (t, http.BAD_REQUEST)
if f.check(MustBeDeepImmutableError):
quoted_name = quote_output(f.value.args[1], encoding="utf-8")
if isinstance(exc, MustBeDeepImmutableError):
quoted_name = quote_output(exc.args[1], encoding="utf-8")
t = ("MustBeDeepImmutableError: a cap passed to this operation for "
"the child named %s, needed to be immutable but was not. Either "
"the cap is being added to an immutable directory, or it was "
"originally retrieved from an immutable directory as an unknown "
"cap.") % quoted_name
return (t, http.BAD_REQUEST)
if f.check(MustBeReadonlyError):
quoted_name = quote_output(f.value.args[1], encoding="utf-8")
if isinstance(exc, MustBeReadonlyError):
quoted_name = quote_output(exc.args[1], encoding="utf-8")
t = ("MustBeReadonlyError: a cap passed to this operation for "
"the child named '%s', needed to be read-only but was not. "
"The cap is being passed in a read slot (ro_uri), or was retrieved "
"from a read slot as an unknown cap.") % quoted_name
return (t, http.BAD_REQUEST)
if f.check(blacklist.FileProhibited):
t = "Access Prohibited: %s" % quote_output(f.value.reason, encoding="utf-8", quotemarks=False)
if isinstance(exc, blacklist.FileProhibited):
t = "Access Prohibited: %s" % quote_output(exc.reason, encoding="utf-8", quotemarks=False)
return (t, http.FORBIDDEN)
if f.check(WebError):
return (f.value.text, f.value.code)
if f.check(FileTooLargeError):
return (f.getTraceback(), http.REQUEST_ENTITY_TOO_LARGE)
return (str(f), None)
if isinstance(exc, WebError):
return (exc.text, exc.code)
if isinstance(exc, FileTooLargeError):
return ("FileTooLargeError: %s" % (exc,), http.REQUEST_ENTITY_TOO_LARGE)
return (str(exc), None)
def humanize_failure(f):
"""
Create an human-oriented description of a failure along with some HTTP
metadata.
:param Failure f: The failure to describe.
:return (bytes, int): A tuple of some prose and an HTTP code describing
the failure.
"""
return humanize_exception(f.value)
class MyExceptionHandler(appserver.DefaultExceptionHandler, object):
def simple(self, ctx, text, code=http.BAD_REQUEST):
@ -423,8 +443,38 @@ class TokenOnlyWebApi(resource.Resource, object):
req.setResponseCode(e.code)
return json.dumps({"error": e.text})
except Exception as e:
message, code = humanize_failure(Failure())
message, code = humanize_exception(e)
req.setResponseCode(500 if code is None else code)
return json.dumps({"error": message})
else:
raise WebError("'%s' invalid type for 't' arg" % (t,), http.BAD_REQUEST)
def exception_to_child(f):
"""
Decorate ``getChild`` method with exception handling behavior to render an
error page reflecting the exception.
"""
@wraps(f)
def g(self, name, req):
try:
return f(self, name, req)
except Exception as e:
description, status = humanize_exception(e)
return resource.ErrorPage(status, "Error", description)
return g
def render_exception(f):
"""
Decorate a ``render_*`` method with exception handling behavior to render
an error page reflecting the exception.
"""
@wraps(f)
def g(self, request):
try:
return f(self, request)
except Exception as e:
description, status = humanize_exception(e)
return resource.ErrorPage(status, "Error", description).render(request)
return g

View File

@ -48,6 +48,7 @@ from allmydata.web.common import (
parse_replace_arg,
should_create_intermediate_directories,
humanize_failure,
humanize_exception,
convert_children_json,
get_format,
get_mutable_type,
@ -55,6 +56,8 @@ from allmydata.web.common import (
render_time,
MultiFormatResource,
SlotsSequenceElement,
exception_to_child,
render_exception,
)
from allmydata.web.filenode import ReplaceMeMixin, \
FileNodeHandler, PlaceHolderNodeHandler
@ -94,6 +97,7 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
self.name = name
self._operations = client.get_web_service().get_operations()
@exception_to_child
def getChild(self, name, req):
"""
Dynamically create a child for the given request and name
@ -113,9 +117,7 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
# we will follow suit.
for segment in req.prepath:
if not segment:
raise EmptyPathnameComponentError(
u"The webapi does not allow empty pathname components",
)
raise EmptyPathnameComponentError()
d = self.node.get(name)
d.addBoth(self._got_child, req, name)
@ -210,6 +212,7 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
d.addCallback(lambda res: self.node.get_uri())
return d
@render_exception
def render_GET(self, req):
# This is where all of the directory-related ?t=* code goes.
t = get_arg(req, "t", "").strip()
@ -248,6 +251,7 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
raise WebError("GET directory: bad t=%s" % t)
@render_exception
def render_PUT(self, req):
t = get_arg(req, "t", "").strip()
replace = parse_replace_arg(get_arg(req, "replace", "true"))
@ -267,6 +271,7 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
raise WebError("PUT to a directory")
@render_exception
def render_POST(self, req):
t = get_arg(req, "t", "").strip()
@ -674,7 +679,7 @@ class DirectoryAsHTML(Element):
try:
children = yield self.node.list()
except Exception as e:
text, code = humanize_failure(Failure(e))
text, code = humanize_exception(e)
children = None
self.dirnode_children_error = text
@ -1458,6 +1463,7 @@ class UnknownNodeHandler(Resource, object):
self.parentnode = parentnode
self.name = name
@render_exception
def render_GET(self, req):
t = get_arg(req, "t", "").strip()
if t == "info":

View File

@ -17,14 +17,31 @@ from allmydata.mutable.publish import MutableFileHandle
from allmydata.mutable.common import MODE_READ
from allmydata.util import log, base32
from allmydata.util.encodingutil import quote_output
from allmydata.blacklist import FileProhibited, ProhibitedNode
from allmydata.blacklist import (
FileProhibited,
ProhibitedNode,
)
from allmydata.web.common import text_plain, WebError, \
boolean_of_arg, get_arg, should_create_intermediate_directories, \
MyExceptionHandler, parse_replace_arg, parse_offset_arg, \
get_format, get_mutable_type, get_filenode_metadata
from allmydata.web.check_results import CheckResultsRenderer, \
CheckAndRepairResultsRenderer, LiteralCheckResultsRenderer
from allmydata.web.common import (
boolean_of_arg,
exception_to_child,
get_arg,
get_filenode_metadata,
get_format,
get_mutable_type,
parse_offset_arg,
parse_replace_arg,
render_exception,
should_create_intermediate_directories,
text_plain,
MyExceptionHandler,
WebError,
)
from allmydata.web.check_results import (
CheckResultsRenderer,
CheckAndRepairResultsRenderer,
LiteralCheckResultsRenderer,
)
from allmydata.web.info import MoreInfo
class ReplaceMeMixin(object):
@ -100,6 +117,7 @@ class PlaceHolderNodeHandler(Resource, ReplaceMeMixin):
self.name = name
self.node = None
@render_exception
def render_PUT(self, req):
t = get_arg(req, "t", "").strip()
replace = parse_replace_arg(get_arg(req, "replace", "true"))
@ -115,6 +133,7 @@ class PlaceHolderNodeHandler(Resource, ReplaceMeMixin):
raise WebError("PUT to a file: bad t=%s" % t)
@render_exception
def render_POST(self, req):
t = get_arg(req, "t", "").strip()
replace = boolean_of_arg(get_arg(req, "replace", "true"))
@ -146,6 +165,7 @@ class FileNodeHandler(Resource, ReplaceMeMixin, object):
self.parentnode = parentnode
self.name = name
@exception_to_child
def getChild(self, name, req):
if isinstance(self.node, ProhibitedNode):
raise FileProhibited(self.node.reason)
@ -162,6 +182,7 @@ class FileNodeHandler(Resource, ReplaceMeMixin, object):
"no details",
)
@render_exception
def render_GET(self, req):
t = get_arg(req, "t", "").strip()
@ -219,6 +240,7 @@ class FileNodeHandler(Resource, ReplaceMeMixin, object):
return _file_read_only_uri(req, self.node)
raise WebError("GET file: bad t=%s" % t)
@render_exception
def render_HEAD(self, req):
t = get_arg(req, "t", "").strip()
if t:
@ -228,6 +250,7 @@ class FileNodeHandler(Resource, ReplaceMeMixin, object):
d.addCallback(lambda dn: FileDownloader(dn, filename))
return d
@render_exception
def render_PUT(self, req):
t = get_arg(req, "t", "").strip()
replace = parse_replace_arg(get_arg(req, "replace", "true"))
@ -270,6 +293,7 @@ class FileNodeHandler(Resource, ReplaceMeMixin, object):
raise WebError("PUT to a file: bad t=%s" % t)
@render_exception
def render_POST(self, req):
t = get_arg(req, "t", "").strip()
replace = boolean_of_arg(get_arg(req, "replace", "true"))
@ -313,6 +337,7 @@ class FileNodeHandler(Resource, ReplaceMeMixin, object):
d.addCallback(self._maybe_literal, CheckResultsRenderer)
return d
@render_exception
def render_DELETE(self, req):
assert self.parentnode and self.name
d = self.parentnode.delete(self.name)
@ -404,6 +429,7 @@ class FileDownloader(Resource, object):
except ValueError:
return None
@render_exception
def render(self, req):
gte = static.getTypeAndEncoding
ctype, encoding = gte(self.filename,
@ -526,5 +552,7 @@ def _file_read_only_uri(req, filenode):
class FileNodeDownloadHandler(FileNodeHandler):
@exception_to_child
def getChild(self, name, req):
return FileNodeDownloadHandler(self.client, self.node, name=name)

View File

@ -17,6 +17,7 @@ from allmydata.web.common import (
get_root,
get_arg,
boolean_of_arg,
exception_to_child,
)
MINUTE = 60
@ -95,6 +96,7 @@ class OphandleTable(resource.Resource, service.Service):
# https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3314
return url.URL.fromString(target)
@exception_to_child
def getChild(self, name, req):
ophandle = name
if ophandle not in self.handles:

View File

@ -34,11 +34,13 @@ from allmydata.web import storage
from allmydata.web.common import (
abbreviate_size,
WebError,
exception_to_child,
get_arg,
MultiFormatResource,
SlotsSequenceElement,
get_format,
get_mutable_type,
render_exception,
render_time_delta,
render_time,
render_time_attr,
@ -58,6 +60,7 @@ class URIHandler(resource.Resource, object):
super(URIHandler, self).__init__()
self.client = client
@render_exception
def render_GET(self, req):
"""
Historically, accessing this via "GET /uri?uri=<capabilitiy>"
@ -88,6 +91,7 @@ class URIHandler(resource.Resource, object):
redir_uri = redir_uri.add(k.decode('utf8'), v.decode('utf8'))
return redirectTo(redir_uri.to_text().encode('utf8'), req)
@render_exception
def render_PUT(self, req):
"""
either "PUT /uri" to create an unlinked file, or
@ -109,6 +113,7 @@ class URIHandler(resource.Resource, object):
)
raise WebError(errmsg, http.BAD_REQUEST)
@render_exception
def render_POST(self, req):
"""
"POST /uri?t=upload&file=newfile" to upload an
@ -135,6 +140,7 @@ class URIHandler(resource.Resource, object):
"and POST?t=mkdir")
raise WebError(errmsg, http.BAD_REQUEST)
@exception_to_child
def getChild(self, name, req):
"""
Most requests look like /uri/<cap> so this fetches the capability
@ -167,6 +173,7 @@ class FileHandler(resource.Resource, object):
super(FileHandler, self).__init__()
self.client = client
@exception_to_child
def getChild(self, name, req):
if req.method not in ("GET", "HEAD"):
raise WebError("/file can only be used with GET or HEAD")
@ -181,6 +188,7 @@ class FileHandler(resource.Resource, object):
raise WebError("'%s' is not a file-cap" % name)
return filenode.FileNodeDownloadHandler(self.client, node)
@render_exception
def render_GET(self, ctx):
raise WebError("/file must be followed by a file-cap and a name",
http.NOT_FOUND)
@ -188,6 +196,7 @@ class FileHandler(resource.Resource, object):
class IncidentReporter(MultiFormatResource):
"""Handler for /report_incident POST request"""
@render_exception
def render(self, req):
if req.method != "POST":
raise WebError("/report_incident can only be used with POST")
@ -236,6 +245,7 @@ class Root(MultiFormatResource):
self.putChild("report_incident", IncidentReporter())
@exception_to_child
def getChild(self, path, request):
if not path:
# Render "/" path.
@ -254,9 +264,11 @@ class Root(MultiFormatResource):
storage_server = None
return storage.StorageStatus(storage_server, self._client.nickname)
@render_exception
def render_HTML(self, req):
return renderElement(req, RootElement(self._client, self._now_fn))
@render_exception
def render_JSON(self, req):
req.setHeader("content-type", "application/json; charset=utf-8")
intro_summaries = [s.summary for s in self._client.introducer_connection_statuses()]

View File

@ -18,8 +18,10 @@ from allmydata.web.common import (
abbreviate_time,
abbreviate_rate,
abbreviate_size,
exception_to_child,
plural,
compute_rate,
render_exception,
render_time,
MultiFormatResource,
SlotsSequenceElement,
@ -192,6 +194,7 @@ class UploadStatusPage(Resource, object):
super(UploadStatusPage, self).__init__()
self._upload_status = upload_status
@render_exception
def render_GET(self, req):
elem = UploadStatusElement(self._upload_status)
return renderElement(req, elem)
@ -367,6 +370,7 @@ class _EventJson(Resource, object):
def __init__(self, download_status):
self._download_status = download_status
@render_exception
def render(self, request):
request.setHeader("content-type", "text/plain")
data = { } # this will be returned to the GET
@ -424,6 +428,7 @@ class DownloadStatusPage(Resource, object):
self._download_status = download_status
self.putChild("event_json", _EventJson(self._download_status))
@render_exception
def render_GET(self, req):
elem = DownloadStatusElement(self._download_status)
return renderElement(req, elem)
@ -806,6 +811,7 @@ class RetrieveStatusPage(MultiFormatResource):
super(RetrieveStatusPage, self).__init__()
self._retrieve_status = retrieve_status
@render_exception
def render_HTML(self, req):
elem = RetrieveStatusElement(self._retrieve_status)
return renderElement(req, elem)
@ -929,6 +935,7 @@ class PublishStatusPage(MultiFormatResource):
super(PublishStatusPage, self).__init__()
self._publish_status = publish_status
@render_exception
def render_HTML(self, req):
elem = PublishStatusElement(self._publish_status);
return renderElement(req, elem)
@ -1087,6 +1094,7 @@ class MapupdateStatusPage(MultiFormatResource):
super(MapupdateStatusPage, self).__init__()
self._update_status = update_status
@render_exception
def render_HTML(self, req):
elem = MapupdateStatusElement(self._update_status);
return renderElement(req, elem)
@ -1248,11 +1256,13 @@ class Status(MultiFormatResource):
super(Status, self).__init__()
self.history = history
@render_exception
def render_HTML(self, req):
elem = StatusElement(self._get_active_operations(),
self._get_recent_operations())
return renderElement(req, elem)
@render_exception
def render_JSON(self, req):
# modern browsers now render this instead of forcing downloads
req.setHeader("content-type", "application/json")
@ -1268,6 +1278,7 @@ class Status(MultiFormatResource):
return json.dumps(data, indent=1) + "\n"
@exception_to_child
def getChild(self, path, request):
# The "if (path is empty) return self" line should handle
# trailing slash in request path.
@ -1420,9 +1431,11 @@ class HelperStatus(MultiFormatResource):
super(HelperStatus, self).__init__()
self._helper = helper
@render_exception
def render_HTML(self, req):
return renderElement(req, HelperStatusElement(self._helper))
@render_exception
def render_JSON(self, req):
req.setHeader("content-type", "text/plain")
if self._helper:
@ -1512,9 +1525,11 @@ class Statistics(MultiFormatResource):
super(Statistics, self).__init__()
self._provider = provider
@render_exception
def render_HTML(self, req):
return renderElement(req, StatisticsElement(self._provider))
@render_exception
def render_JSON(self, req):
stats = self._provider.get_stats()
req.setHeader("content-type", "text/plain")

View File

@ -80,6 +80,7 @@ commands =
[testenv:codechecks]
basepython = python2.7
# On macOS, git inside of towncrier needs $HOME.
passenv = HOME
whitelist_externals =