diff --git a/.appveyor.yml b/.appveyor.yml
deleted file mode 100644
index f6efe785a..000000000
--- a/.appveyor.yml
+++ /dev/null
@@ -1,95 +0,0 @@
-# adapted from https://packaging.python.org/en/latest/appveyor/
-
-environment:
-
- matrix:
-
- # For Python versions available on Appveyor, see
- # http://www.appveyor.com/docs/installed-software#python
- - PYTHON: "C:\\Python27"
- - PYTHON: "C:\\Python27-x64"
- # DISTUTILS_USE_SDK: "1"
- # TOX_TESTENV_PASSENV: "DISTUTILS_USE_SDK INCLUDE LIB"
-
-install:
- - |
- %PYTHON%\python.exe -m pip install -U pip
- %PYTHON%\python.exe -m pip install wheel tox==3.9.0 virtualenv
-
-# note:
-# %PYTHON% has: python.exe
-# %PYTHON%\Scripts has: pip.exe, tox.exe (and others installed by bare pip)
-
-# We have a custom "build" system. We don't need MSBuild or whatever.
-build: off
-
-# Do not build feature branch with open pull requests. This is documented but
-# it's not clear it does anything.
-skip_branch_with_pr: true
-
-# This, perhaps, is effective.
-branches:
- # whitelist
- only:
- - 'master'
-
-skip_commits:
- files:
- # The Windows builds are unaffected by news fragments.
- - 'newsfragments/*'
- # Also, all this build junk.
- - '.circleci/*'
- - '.lgtm.yml'
- - '.travis.yml'
-
-# we run from C:\projects\tahoe-lafs
-
-test_script:
- # Put your test command here.
- # Note that you must use the environment variable %PYTHON% to refer to
- # the interpreter you're using - Appveyor does not do anything special
- # to put the Python version you want to use on PATH.
- - |
- %PYTHON%\Scripts\tox.exe -e coverage
- %PYTHON%\Scripts\tox.exe -e pyinstaller
- # To verify that the resultant PyInstaller-generated binary executes
- # cleanly (i.e., that it terminates with an exit code of 0 and isn't
- # failing due to import/packaging-related errors, etc.).
- - dist\Tahoe-LAFS\tahoe.exe --version
-
-after_test:
- # This builds the main tahoe wheel, and wheels for all dependencies.
- # Again, you only need build.cmd if you're building C extensions for
- # 64-bit Python 3.3/3.4. And you need to use %PYTHON% to get the correct
- # interpreter. If _trial_temp still exists, the "pip wheel" fails on
- # _trial_temp\local_dir (not sure why).
- - |
- copy _trial_temp\test.log trial_test_log.txt
- rd /s /q _trial_temp
- %PYTHON%\python.exe setup.py bdist_wheel
- %PYTHON%\python.exe -m pip wheel -w dist .
- - |
- %PYTHON%\python.exe -m pip install codecov "coverage ~= 4.5"
- %PYTHON%\python.exe -m coverage xml -o coverage.xml -i
- %PYTHON%\python.exe -m codecov -X search -X gcov -f coverage.xml
-
-artifacts:
- # bdist_wheel puts your built wheel in the dist directory
- # "pip wheel -w dist ." puts all the dependency wheels there too
- # this gives us a zipfile with everything
- - path: 'dist\*'
- - path: trial_test_log.txt
- name: Trial test.log
- - path: eliot.log
- name: Eliot test log
-
-on_failure:
- # Artifacts are not normally uploaded when the job fails. To get the test
- # logs, we have to push them ourselves.
- - ps: Push-AppveyorArtifact _trial_temp\test.log -Filename trial.log
- - ps: Push-AppveyorArtifact eliot.log -Filename eliot.log
-
-#on_success:
-# You can use this step to upload your artifacts to a public website.
-# See Appveyor's documentation for more details. Or you can simply
-# access your wheels from the Appveyor "artifacts" tab for your build.
diff --git a/.circleci/config.yml b/.circleci/config.yml
index 9f7381f33..df181f058 100644
--- a/.circleci/config.yml
+++ b/.circleci/config.yml
@@ -285,7 +285,7 @@ jobs:
# this reporter on Python 3. So drop that and just specify the
# reporter.
TAHOE_LAFS_TRIAL_ARGS: "--reporter=subunitv2-file"
- TAHOE_LAFS_TOX_ENVIRONMENT: "py36"
+ TAHOE_LAFS_TOX_ENVIRONMENT: "py36-coverage"
ubuntu-20.04:
@@ -508,6 +508,7 @@ jobs:
environment:
DISTRO: "ubuntu"
TAG: "20.04"
+ PYTHON_VERSION: "2.7"
build-image-centos-8:
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 7cd97dcca..34a4e0875 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -49,8 +49,8 @@ jobs:
- name: Display tool versions
run: python misc/build_helpers/show-tool-versions.py
- - name: Run "tox -e coverage"
- run: tox -e coverage
+ - name: Run "tox -e py27-coverage"
+ run: tox -e py27-coverage
- name: Upload eliot.log in case of failure
uses: actions/upload-artifact@v1
diff --git a/.gitignore b/.gitignore
index 8191c173b..99f905526 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,4 +1,4 @@
-venv
+venv*
# vim swap files
*.swp
diff --git a/docs/how_to_make_a_tahoe-lafs_release.org b/docs/how_to_make_a_tahoe-lafs_release.org
index 44b9e3dd1..b3f2a84d7 100644
--- a/docs/how_to_make_a_tahoe-lafs_release.org
+++ b/docs/how_to_make_a_tahoe-lafs_release.org
@@ -36,7 +36,7 @@ people are Release Maintainers:
- [ ] documentation is ready (see above)
- [ ] (Release Maintainer): git tag -s -u 0xE34E62D06D0E69CFCA4179FFBDE0D31D68666A7A -m "release Tahoe-LAFS-X.Y.Z" tahoe-lafs-X.Y.Z
- [ ] build code locally:
- tox -e py27,codechecks,coverage,deprecations,docs,integration,upcoming-deprecations
+ tox -e py27,codechecks,deprecations,docs,integration,upcoming-deprecations
- [ ] created tarballs (they'll be in dist/ for later comparison)
tox -e tarballs
- [ ] release version is reporting itself as intended version
diff --git a/newsfragments/3355.other b/newsfragments/3355.other
new file mode 100644
index 000000000..4e854e4dd
--- /dev/null
+++ b/newsfragments/3355.other
@@ -0,0 +1 @@
+The "coverage" tox environment has been replaced by the "py27-coverage" and "py36-coverage" environments.
diff --git a/newsfragments/3367.minor b/newsfragments/3367.minor
new file mode 100644
index 000000000..e69de29bb
diff --git a/newsfragments/3377.minor b/newsfragments/3377.minor
new file mode 100644
index 000000000..e69de29bb
diff --git a/newsfragments/3381.minor b/newsfragments/3381.minor
new file mode 100644
index 000000000..e69de29bb
diff --git a/newsfragments/3387.minor b/newsfragments/3387.minor
new file mode 100644
index 000000000..e69de29bb
diff --git a/newsfragments/3388.minor b/newsfragments/3388.minor
new file mode 100644
index 000000000..e69de29bb
diff --git a/newsfragments/3395.minor b/newsfragments/3395.minor
new file mode 100644
index 000000000..e69de29bb
diff --git a/src/allmydata/interfaces.py b/src/allmydata/interfaces.py
index 081d9a33d..c93c1d81d 100644
--- a/src/allmydata/interfaces.py
+++ b/src/allmydata/interfaces.py
@@ -1,4 +1,3 @@
-
from past.builtins import long
from zope.interface import Interface, Attribute
diff --git a/src/allmydata/storage/expirer.py b/src/allmydata/storage/expirer.py
index a13c188bd..ffe2bf774 100644
--- a/src/allmydata/storage/expirer.py
+++ b/src/allmydata/storage/expirer.py
@@ -1,3 +1,13 @@
+from __future__ import division
+from __future__ import absolute_import
+from __future__ import print_function
+from __future__ import unicode_literals
+
+from future.utils import PY2
+if PY2:
+ # We omit anything that might end up in pickle, just in case.
+ from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, range, str, max, min # noqa: F401
+
import time, os, pickle, struct
from allmydata.storage.crawler import ShareCrawler
from allmydata.storage.shares import get_share_file
diff --git a/src/allmydata/storage/mutable.py b/src/allmydata/storage/mutable.py
index 287ed8fb9..c108dfe32 100644
--- a/src/allmydata/storage/mutable.py
+++ b/src/allmydata/storage/mutable.py
@@ -48,8 +48,9 @@ class MutableShareFile(object):
# our sharefiles share with a recognizable string, plus some random
# binary data to reduce the chance that a regular text file will look
# like a sharefile.
- MAGIC = "Tahoe mutable container v1\n" + "\x75\x09\x44\x03\x8e"
+ MAGIC = b"Tahoe mutable container v1\n" + b"\x75\x09\x44\x03\x8e"
assert len(MAGIC) == 32
+ assert isinstance(MAGIC, bytes)
MAX_SIZE = MAX_MUTABLE_SHARE_SIZE
# TODO: decide upon a policy for max share size
@@ -86,7 +87,7 @@ class MutableShareFile(object):
self.MAGIC, my_nodeid, write_enabler,
data_length, extra_lease_offset,
)
- leases = ("\x00" * self.LEASE_SIZE) * 4
+ leases = (b"\x00" * self.LEASE_SIZE) * 4
f.write(header + leases)
# data goes here, empty after creation
f.write(struct.pack(">L", num_extra_leases))
@@ -154,7 +155,7 @@ class MutableShareFile(object):
# Zero out the old lease info (in order to minimize the chance that
# it could accidentally be exposed to a reader later, re #1528).
f.seek(old_extra_lease_offset)
- f.write('\x00' * leases_size)
+ f.write(b'\x00' * leases_size)
f.flush()
# An interrupt here will corrupt the leases.
@@ -193,7 +194,7 @@ class MutableShareFile(object):
# Fill any newly exposed empty space with 0's.
if offset > data_length:
f.seek(self.DATA_OFFSET+data_length)
- f.write('\x00'*(offset - data_length))
+ f.write(b'\x00'*(offset - data_length))
f.flush()
new_data_length = offset+length
@@ -325,10 +326,10 @@ class MutableShareFile(object):
modified = 0
remaining = 0
blank_lease = LeaseInfo(owner_num=0,
- renew_secret="\x00"*32,
- cancel_secret="\x00"*32,
+ renew_secret=b"\x00"*32,
+ cancel_secret=b"\x00"*32,
expiration_time=0,
- nodeid="\x00"*20)
+ nodeid=b"\x00"*20)
with open(self.home, 'rb+') as f:
for (leasenum,lease) in self._enumerate_leases(f):
accepting_nodeids.add(lease.nodeid)
diff --git a/src/allmydata/test/cli/test_create.py b/src/allmydata/test/cli/test_create.py
index 75162c39e..f013c0205 100644
--- a/src/allmydata/test/cli/test_create.py
+++ b/src/allmydata/test/cli/test_create.py
@@ -6,6 +6,8 @@ from twisted.python import usage
from allmydata.util import configutil
from ..common_util import run_cli, parse_cli
from ...scripts import create_node
+from ... import client
+
def read_config(basedir):
tahoe_cfg = os.path.join(basedir, "tahoe.cfg")
@@ -33,6 +35,31 @@ class Config(unittest.TestCase):
e = self.assertRaises(usage.UsageError, parse_cli, verb, *args)
self.assertIn("option %s not recognized" % (option,), str(e))
+ def test_create_client_config(self):
+ d = self.mktemp()
+ os.mkdir(d)
+ fname = os.path.join(d, 'tahoe.cfg')
+
+ with open(fname, 'w') as f:
+ opts = {"nickname": "nick",
+ "webport": "tcp:3456",
+ "hide-ip": False,
+ "listen": "none",
+ "shares-needed": "1",
+ "shares-happy": "1",
+ "shares-total": "1",
+ }
+ create_node.write_node_config(f, opts)
+ create_node.write_client_config(f, opts)
+
+ config = configutil.get_config(fname)
+ # should succeed, no exceptions
+ configutil.validate_config(
+ fname,
+ config,
+ client._valid_config(),
+ )
+
@defer.inlineCallbacks
def test_client(self):
basedir = self.mktemp()
diff --git a/src/allmydata/test/test_configutil.py b/src/allmydata/test/test_configutil.py
index 45eb6ac25..c57381289 100644
--- a/src/allmydata/test/test_configutil.py
+++ b/src/allmydata/test/test_configutil.py
@@ -1,14 +1,26 @@
+"""
+Tests for allmydata.util.configutil.
+
+Ported to Python 3.
+"""
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+from future.utils import PY2
+if PY2:
+ # Omitted dict, cause worried about interactions.
+ from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, list, object, range, str, max, min # noqa: F401
+
import os.path
from twisted.trial import unittest
from allmydata.util import configutil
-from allmydata.test.no_network import GridTestMixin
-from ..scripts import create_node
-from .. import client
-class ConfigUtilTests(GridTestMixin, unittest.TestCase):
+class ConfigUtilTests(unittest.TestCase):
def setUp(self):
super(ConfigUtilTests, self).setUp()
self.static_valid_config = configutil.ValidConfiguration(
@@ -20,10 +32,22 @@ class ConfigUtilTests(GridTestMixin, unittest.TestCase):
lambda section_name, item_name: (section_name, item_name) == ("node", "valid"),
)
+ def create_tahoe_cfg(self, cfg):
+ d = self.mktemp()
+ os.mkdir(d)
+ fname = os.path.join(d, 'tahoe.cfg')
+ with open(fname, "w") as f:
+ f.write(cfg)
+ return fname
+
def test_config_utils(self):
- self.basedir = "cli/ConfigUtilTests/test-config-utils"
- self.set_up_grid(oneshare=True)
- tahoe_cfg = os.path.join(self.get_clientdir(i=0), "tahoe.cfg")
+ tahoe_cfg = self.create_tahoe_cfg("""\
+[node]
+nickname = client-0
+web.port = adopt-socket:fd=5
+[storage]
+enabled = false
+""")
# test that at least one option was read correctly
config = configutil.get_config(tahoe_cfg)
@@ -45,12 +69,7 @@ class ConfigUtilTests(GridTestMixin, unittest.TestCase):
self.failUnlessEqual(config.get("node", "descriptor"), descriptor)
def test_config_validation_success(self):
- d = self.mktemp()
- os.mkdir(d)
- fname = os.path.join(d, 'tahoe.cfg')
-
- with open(fname, 'w') as f:
- f.write('[node]\nvalid = foo\n')
+ fname = self.create_tahoe_cfg('[node]\nvalid = foo\n')
config = configutil.get_config(fname)
# should succeed, no exceptions
@@ -66,12 +85,7 @@ class ConfigUtilTests(GridTestMixin, unittest.TestCase):
validation but are matched by the dynamic validation is considered
valid.
"""
- d = self.mktemp()
- os.mkdir(d)
- fname = os.path.join(d, 'tahoe.cfg')
-
- with open(fname, 'w') as f:
- f.write('[node]\nvalid = foo\n')
+ fname = self.create_tahoe_cfg('[node]\nvalid = foo\n')
config = configutil.get_config(fname)
# should succeed, no exceptions
@@ -82,12 +96,7 @@ class ConfigUtilTests(GridTestMixin, unittest.TestCase):
)
def test_config_validation_invalid_item(self):
- d = self.mktemp()
- os.mkdir(d)
- fname = os.path.join(d, 'tahoe.cfg')
-
- with open(fname, 'w') as f:
- f.write('[node]\nvalid = foo\ninvalid = foo\n')
+ fname = self.create_tahoe_cfg('[node]\nvalid = foo\ninvalid = foo\n')
config = configutil.get_config(fname)
e = self.assertRaises(
@@ -103,12 +112,7 @@ class ConfigUtilTests(GridTestMixin, unittest.TestCase):
A configuration with a section that is matched by neither the static nor
dynamic validators is rejected.
"""
- d = self.mktemp()
- os.mkdir(d)
- fname = os.path.join(d, 'tahoe.cfg')
-
- with open(fname, 'w') as f:
- f.write('[node]\nvalid = foo\n[invalid]\n')
+ fname = self.create_tahoe_cfg('[node]\nvalid = foo\n[invalid]\n')
config = configutil.get_config(fname)
e = self.assertRaises(
@@ -124,12 +128,7 @@ class ConfigUtilTests(GridTestMixin, unittest.TestCase):
A configuration with a section that is matched by neither the static nor
dynamic validators is rejected.
"""
- d = self.mktemp()
- os.mkdir(d)
- fname = os.path.join(d, 'tahoe.cfg')
-
- with open(fname, 'w') as f:
- f.write('[node]\nvalid = foo\n[invalid]\n')
+ fname = self.create_tahoe_cfg('[node]\nvalid = foo\n[invalid]\n')
config = configutil.get_config(fname)
e = self.assertRaises(
@@ -145,12 +144,7 @@ class ConfigUtilTests(GridTestMixin, unittest.TestCase):
A configuration with a section, item pair that is matched by neither the
static nor dynamic validators is rejected.
"""
- d = self.mktemp()
- os.mkdir(d)
- fname = os.path.join(d, 'tahoe.cfg')
-
- with open(fname, 'w') as f:
- f.write('[node]\nvalid = foo\ninvalid = foo\n')
+ fname = self.create_tahoe_cfg('[node]\nvalid = foo\ninvalid = foo\n')
config = configutil.get_config(fname)
e = self.assertRaises(
@@ -160,28 +154,3 @@ class ConfigUtilTests(GridTestMixin, unittest.TestCase):
self.dynamic_valid_config,
)
self.assertIn("section [node] contains unknown option 'invalid'", str(e))
-
- def test_create_client_config(self):
- d = self.mktemp()
- os.mkdir(d)
- fname = os.path.join(d, 'tahoe.cfg')
-
- with open(fname, 'w') as f:
- opts = {"nickname": "nick",
- "webport": "tcp:3456",
- "hide-ip": False,
- "listen": "none",
- "shares-needed": "1",
- "shares-happy": "1",
- "shares-total": "1",
- }
- create_node.write_node_config(f, opts)
- create_node.write_client_config(f, opts)
-
- config = configutil.get_config(fname)
- # should succeed, no exceptions
- configutil.validate_config(
- fname,
- config,
- client._valid_config(),
- )
diff --git a/src/allmydata/test/test_connection_status.py b/src/allmydata/test/test_connection_status.py
new file mode 100644
index 000000000..2bd8bf6ab
--- /dev/null
+++ b/src/allmydata/test/test_connection_status.py
@@ -0,0 +1,122 @@
+"""
+Tests for allmydata.util.connection_status.
+
+Port to Python 3.
+"""
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+from future.utils import PY2
+if PY2:
+ from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
+
+import mock
+
+from twisted.trial import unittest
+
+from ..util import connection_status
+
+class Status(unittest.TestCase):
+ def test_hint_statuses(self):
+ ncs = connection_status._hint_statuses(["h2","h1"],
+ {"h1": "hand1", "h4": "hand4"},
+ {"h1": "st1", "h2": "st2",
+ "h3": "st3"})
+ self.assertEqual(ncs, {"h1 via hand1": "st1",
+ "h2": "st2"})
+
+ def test_reconnector_connected(self):
+ ci = mock.Mock()
+ ci.connectorStatuses = {"h1": "st1"}
+ ci.connectionHandlers = {"h1": "hand1"}
+ ci.winningHint = "h1"
+ ci.establishedAt = 120
+ ri = mock.Mock()
+ ri.state = "connected"
+ ri.connectionInfo = ci
+ rc = mock.Mock
+ rc.getReconnectionInfo = mock.Mock(return_value=ri)
+ cs = connection_status.from_foolscap_reconnector(rc, 123)
+ self.assertEqual(cs.connected, True)
+ self.assertEqual(cs.summary, "Connected to h1 via hand1")
+ self.assertEqual(cs.non_connected_statuses, {})
+ self.assertEqual(cs.last_connection_time, 120)
+ self.assertEqual(cs.last_received_time, 123)
+
+ def test_reconnector_connected_others(self):
+ ci = mock.Mock()
+ ci.connectorStatuses = {"h1": "st1", "h2": "st2"}
+ ci.connectionHandlers = {"h1": "hand1"}
+ ci.winningHint = "h1"
+ ci.establishedAt = 120
+ ri = mock.Mock()
+ ri.state = "connected"
+ ri.connectionInfo = ci
+ rc = mock.Mock
+ rc.getReconnectionInfo = mock.Mock(return_value=ri)
+ cs = connection_status.from_foolscap_reconnector(rc, 123)
+ self.assertEqual(cs.connected, True)
+ self.assertEqual(cs.summary, "Connected to h1 via hand1")
+ self.assertEqual(cs.non_connected_statuses, {"h2": "st2"})
+ self.assertEqual(cs.last_connection_time, 120)
+ self.assertEqual(cs.last_received_time, 123)
+
+ def test_reconnector_connected_listener(self):
+ ci = mock.Mock()
+ ci.connectorStatuses = {"h1": "st1", "h2": "st2"}
+ ci.connectionHandlers = {"h1": "hand1"}
+ ci.listenerStatus = ("listener1", "successful")
+ ci.winningHint = None
+ ci.establishedAt = 120
+ ri = mock.Mock()
+ ri.state = "connected"
+ ri.connectionInfo = ci
+ rc = mock.Mock
+ rc.getReconnectionInfo = mock.Mock(return_value=ri)
+ cs = connection_status.from_foolscap_reconnector(rc, 123)
+ self.assertEqual(cs.connected, True)
+ self.assertEqual(cs.summary, "Connected via listener (listener1)")
+ self.assertEqual(cs.non_connected_statuses,
+ {"h1 via hand1": "st1", "h2": "st2"})
+ self.assertEqual(cs.last_connection_time, 120)
+ self.assertEqual(cs.last_received_time, 123)
+
+ def test_reconnector_connecting(self):
+ ci = mock.Mock()
+ ci.connectorStatuses = {"h1": "st1", "h2": "st2"}
+ ci.connectionHandlers = {"h1": "hand1"}
+ ri = mock.Mock()
+ ri.state = "connecting"
+ ri.connectionInfo = ci
+ rc = mock.Mock
+ rc.getReconnectionInfo = mock.Mock(return_value=ri)
+ cs = connection_status.from_foolscap_reconnector(rc, 123)
+ self.assertEqual(cs.connected, False)
+ self.assertEqual(cs.summary, "Trying to connect")
+ self.assertEqual(cs.non_connected_statuses,
+ {"h1 via hand1": "st1", "h2": "st2"})
+ self.assertEqual(cs.last_connection_time, None)
+ self.assertEqual(cs.last_received_time, 123)
+
+ def test_reconnector_waiting(self):
+ ci = mock.Mock()
+ ci.connectorStatuses = {"h1": "st1", "h2": "st2"}
+ ci.connectionHandlers = {"h1": "hand1"}
+ ri = mock.Mock()
+ ri.state = "waiting"
+ ri.lastAttempt = 10
+ ri.nextAttempt = 20
+ ri.connectionInfo = ci
+ rc = mock.Mock
+ rc.getReconnectionInfo = mock.Mock(return_value=ri)
+ with mock.patch("time.time", return_value=12):
+ cs = connection_status.from_foolscap_reconnector(rc, 5)
+ self.assertEqual(cs.connected, False)
+ self.assertEqual(cs.summary,
+ "Reconnecting in 8 seconds (last attempt 2s ago)")
+ self.assertEqual(cs.non_connected_statuses,
+ {"h1 via hand1": "st1", "h2": "st2"})
+ self.assertEqual(cs.last_connection_time, None)
+ self.assertEqual(cs.last_received_time, 5)
diff --git a/src/allmydata/test/test_connections.py b/src/allmydata/test/test_connections.py
index 3e2806dd0..9b5bd7f30 100644
--- a/src/allmydata/test/test_connections.py
+++ b/src/allmydata/test/test_connections.py
@@ -7,7 +7,6 @@ from foolscap.connections import tcp
from ..node import PrivacyError, config_from_string
from ..node import create_connection_handlers
from ..node import create_main_tub, _tub_portlocation
-from ..util import connection_status
from ..util.i2p_provider import create as create_i2p_provider
from ..util.tor_provider import create as create_tor_provider
@@ -463,106 +462,3 @@ class Privacy(unittest.TestCase):
str(ctx.exception),
"tub.location includes tcp: hint",
)
-
-class Status(unittest.TestCase):
- def test_hint_statuses(self):
- ncs = connection_status._hint_statuses(["h2","h1"],
- {"h1": "hand1", "h4": "hand4"},
- {"h1": "st1", "h2": "st2",
- "h3": "st3"})
- self.assertEqual(ncs, {"h1 via hand1": "st1",
- "h2": "st2"})
-
- def test_reconnector_connected(self):
- ci = mock.Mock()
- ci.connectorStatuses = {"h1": "st1"}
- ci.connectionHandlers = {"h1": "hand1"}
- ci.winningHint = "h1"
- ci.establishedAt = 120
- ri = mock.Mock()
- ri.state = "connected"
- ri.connectionInfo = ci
- rc = mock.Mock
- rc.getReconnectionInfo = mock.Mock(return_value=ri)
- cs = connection_status.from_foolscap_reconnector(rc, 123)
- self.assertEqual(cs.connected, True)
- self.assertEqual(cs.summary, "Connected to h1 via hand1")
- self.assertEqual(cs.non_connected_statuses, {})
- self.assertEqual(cs.last_connection_time, 120)
- self.assertEqual(cs.last_received_time, 123)
-
- def test_reconnector_connected_others(self):
- ci = mock.Mock()
- ci.connectorStatuses = {"h1": "st1", "h2": "st2"}
- ci.connectionHandlers = {"h1": "hand1"}
- ci.winningHint = "h1"
- ci.establishedAt = 120
- ri = mock.Mock()
- ri.state = "connected"
- ri.connectionInfo = ci
- rc = mock.Mock
- rc.getReconnectionInfo = mock.Mock(return_value=ri)
- cs = connection_status.from_foolscap_reconnector(rc, 123)
- self.assertEqual(cs.connected, True)
- self.assertEqual(cs.summary, "Connected to h1 via hand1")
- self.assertEqual(cs.non_connected_statuses, {"h2": "st2"})
- self.assertEqual(cs.last_connection_time, 120)
- self.assertEqual(cs.last_received_time, 123)
-
- def test_reconnector_connected_listener(self):
- ci = mock.Mock()
- ci.connectorStatuses = {"h1": "st1", "h2": "st2"}
- ci.connectionHandlers = {"h1": "hand1"}
- ci.listenerStatus = ("listener1", "successful")
- ci.winningHint = None
- ci.establishedAt = 120
- ri = mock.Mock()
- ri.state = "connected"
- ri.connectionInfo = ci
- rc = mock.Mock
- rc.getReconnectionInfo = mock.Mock(return_value=ri)
- cs = connection_status.from_foolscap_reconnector(rc, 123)
- self.assertEqual(cs.connected, True)
- self.assertEqual(cs.summary, "Connected via listener (listener1)")
- self.assertEqual(cs.non_connected_statuses,
- {"h1 via hand1": "st1", "h2": "st2"})
- self.assertEqual(cs.last_connection_time, 120)
- self.assertEqual(cs.last_received_time, 123)
-
- def test_reconnector_connecting(self):
- ci = mock.Mock()
- ci.connectorStatuses = {"h1": "st1", "h2": "st2"}
- ci.connectionHandlers = {"h1": "hand1"}
- ri = mock.Mock()
- ri.state = "connecting"
- ri.connectionInfo = ci
- rc = mock.Mock
- rc.getReconnectionInfo = mock.Mock(return_value=ri)
- cs = connection_status.from_foolscap_reconnector(rc, 123)
- self.assertEqual(cs.connected, False)
- self.assertEqual(cs.summary, "Trying to connect")
- self.assertEqual(cs.non_connected_statuses,
- {"h1 via hand1": "st1", "h2": "st2"})
- self.assertEqual(cs.last_connection_time, None)
- self.assertEqual(cs.last_received_time, 123)
-
- def test_reconnector_waiting(self):
- ci = mock.Mock()
- ci.connectorStatuses = {"h1": "st1", "h2": "st2"}
- ci.connectionHandlers = {"h1": "hand1"}
- ri = mock.Mock()
- ri.state = "waiting"
- ri.lastAttempt = 10
- ri.nextAttempt = 20
- ri.connectionInfo = ci
- rc = mock.Mock
- rc.getReconnectionInfo = mock.Mock(return_value=ri)
- with mock.patch("time.time", return_value=12):
- cs = connection_status.from_foolscap_reconnector(rc, 5)
- self.assertEqual(cs.connected, False)
- self.assertEqual(cs.summary,
- "Reconnecting in 8 seconds (last attempt 2s ago)")
- self.assertEqual(cs.non_connected_statuses,
- {"h1 via hand1": "st1", "h2": "st2"})
- self.assertEqual(cs.last_connection_time, None)
- self.assertEqual(cs.last_received_time, 5)
diff --git a/src/allmydata/test/test_storage_web.py b/src/allmydata/test/test_storage_web.py
index ee6d7a393..19f98851f 100644
--- a/src/allmydata/test/test_storage_web.py
+++ b/src/allmydata/test/test_storage_web.py
@@ -1,8 +1,19 @@
"""
Tests for twisted.storage that uses Web APIs.
+
+Partially ported to Python 3.
"""
from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+from future.utils import PY2
+if PY2:
+ # Omitted list sinc it broke a test on Python 2. Shouldn't require further
+ # work, when we switch to Python 3 we'll be dropping this, anyway.
+ from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, object, range, str, max, min # noqa: F401
import time
import os.path
@@ -18,7 +29,10 @@ from twisted.web.template import flattenString
# We need to use `nevow.inevow.IRequest` for now for compatibility
# with the code in web/common.py. Once nevow bits are gone from
# web/common.py, we can use `twisted.web.iweb.IRequest` here.
-from nevow.inevow import IRequest
+if PY2:
+ from nevow.inevow import IRequest
+else:
+ from twisted.web.iweb import IRequest
from twisted.web.server import Request
from twisted.web.test.requesthelper import DummyChannel
@@ -36,11 +50,11 @@ from allmydata.web.storage import (
StorageStatusElement,
remove_prefix
)
-from .test_storage import FakeCanary
+from .common_py3 import FakeCanary
def remove_tags(s):
- s = re.sub(r'<[^>]*>', ' ', s)
- s = re.sub(r'\s+', ' ', s)
+ s = re.sub(br'<[^>]*>', b' ', s)
+ s = re.sub(br'\s+', b' ', s)
return s
def renderSynchronously(ss):
@@ -89,6 +103,7 @@ class MyStorageServer(StorageServer):
self.bucket_counter = MyBucketCountingCrawler(self, statefile)
self.bucket_counter.setServiceParent(self)
+
class BucketCounter(unittest.TestCase, pollmixin.PollMixin):
def setUp(self):
@@ -100,7 +115,7 @@ class BucketCounter(unittest.TestCase, pollmixin.PollMixin):
def test_bucket_counter(self):
basedir = "storage/BucketCounter/bucket_counter"
fileutil.make_dirs(basedir)
- ss = StorageServer(basedir, "\x00" * 20)
+ ss = StorageServer(basedir, b"\x00" * 20)
# to make sure we capture the bucket-counting-crawler in the middle
# of a cycle, we reach in and reduce its maximum slice time to 0. We
# also make it start sooner than usual.
@@ -113,12 +128,12 @@ class BucketCounter(unittest.TestCase, pollmixin.PollMixin):
# this sample is before the crawler has started doing anything
html = renderSynchronously(w)
- self.failUnlessIn("
Storage Server Status
", html)
+ self.failUnlessIn(b"Storage Server Status
", html)
s = remove_tags(html)
- self.failUnlessIn("Accepting new shares: Yes", s)
- self.failUnlessIn("Reserved space: - 0 B (0)", s)
- self.failUnlessIn("Total buckets: Not computed yet", s)
- self.failUnlessIn("Next crawl in", s)
+ self.failUnlessIn(b"Accepting new shares: Yes", s)
+ self.failUnlessIn(b"Reserved space: - 0 B (0)", s)
+ self.failUnlessIn(b"Total buckets: Not computed yet", s)
+ self.failUnlessIn(b"Next crawl in", s)
# give the bucket-counting-crawler one tick to get started. The
# cpu_slice=0 will force it to yield right after it processes the
@@ -137,8 +152,8 @@ class BucketCounter(unittest.TestCase, pollmixin.PollMixin):
ss.bucket_counter.cpu_slice = 100.0 # finish as fast as possible
html = renderSynchronously(w)
s = remove_tags(html)
- self.failUnlessIn(" Current crawl ", s)
- self.failUnlessIn(" (next work in ", s)
+ self.failUnlessIn(b" Current crawl ", s)
+ self.failUnlessIn(b" (next work in ", s)
d.addCallback(_check)
# now give it enough time to complete a full cycle
@@ -149,15 +164,15 @@ class BucketCounter(unittest.TestCase, pollmixin.PollMixin):
ss.bucket_counter.cpu_slice = orig_cpu_slice
html = renderSynchronously(w)
s = remove_tags(html)
- self.failUnlessIn("Total buckets: 0 (the number of", s)
- self.failUnless("Next crawl in 59 minutes" in s or "Next crawl in 60 minutes" in s, s)
+ self.failUnlessIn(b"Total buckets: 0 (the number of", s)
+ self.failUnless(b"Next crawl in 59 minutes" in s or "Next crawl in 60 minutes" in s, s)
d.addCallback(_check2)
return d
def test_bucket_counter_cleanup(self):
basedir = "storage/BucketCounter/bucket_counter_cleanup"
fileutil.make_dirs(basedir)
- ss = StorageServer(basedir, "\x00" * 20)
+ ss = StorageServer(basedir, b"\x00" * 20)
# to make sure we capture the bucket-counting-crawler in the middle
# of a cycle, we reach in and reduce its maximum slice time to 0.
ss.bucket_counter.slow_start = 0
@@ -190,16 +205,16 @@ class BucketCounter(unittest.TestCase, pollmixin.PollMixin):
def _check2(ignored):
ss.bucket_counter.cpu_slice = orig_cpu_slice
s = ss.bucket_counter.get_state()
- self.failIf(-12 in s["bucket-counts"], s["bucket-counts"].keys())
+ self.failIf(-12 in s["bucket-counts"], list(s["bucket-counts"].keys()))
self.failIf("bogusprefix!" in s["storage-index-samples"],
- s["storage-index-samples"].keys())
+ list(s["storage-index-samples"].keys()))
d.addCallback(_check2)
return d
def test_bucket_counter_eta(self):
basedir = "storage/BucketCounter/bucket_counter_eta"
fileutil.make_dirs(basedir)
- ss = MyStorageServer(basedir, "\x00" * 20)
+ ss = MyStorageServer(basedir, b"\x00" * 20)
ss.bucket_counter.slow_start = 0
# these will be fired inside finished_prefix()
hooks = ss.bucket_counter.hook_ds = [defer.Deferred() for i in range(3)]
@@ -211,20 +226,20 @@ class BucketCounter(unittest.TestCase, pollmixin.PollMixin):
# no ETA is available yet
html = renderSynchronously(w)
s = remove_tags(html)
- self.failUnlessIn("complete (next work", s)
+ self.failUnlessIn(b"complete (next work", s)
def _check_2(ignored):
# one prefix has finished, so an ETA based upon that elapsed time
# should be available.
html = renderSynchronously(w)
s = remove_tags(html)
- self.failUnlessIn("complete (ETA ", s)
+ self.failUnlessIn(b"complete (ETA ", s)
def _check_3(ignored):
# two prefixes have finished
html = renderSynchronously(w)
s = remove_tags(html)
- self.failUnlessIn("complete (ETA ", s)
+ self.failUnlessIn(b"complete (ETA ", s)
d.callback("done")
hooks[0].addCallback(_check_1).addErrback(d.errback)
@@ -275,27 +290,27 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
def make_shares(self, ss):
def make(si):
- return (si, hashutil.tagged_hash("renew", si),
- hashutil.tagged_hash("cancel", si))
+ return (si, hashutil.tagged_hash(b"renew", si),
+ hashutil.tagged_hash(b"cancel", si))
def make_mutable(si):
- return (si, hashutil.tagged_hash("renew", si),
- hashutil.tagged_hash("cancel", si),
- hashutil.tagged_hash("write-enabler", si))
+ return (si, hashutil.tagged_hash(b"renew", si),
+ hashutil.tagged_hash(b"cancel", si),
+ hashutil.tagged_hash(b"write-enabler", si))
def make_extra_lease(si, num):
- return (hashutil.tagged_hash("renew-%d" % num, si),
- hashutil.tagged_hash("cancel-%d" % num, si))
+ return (hashutil.tagged_hash(b"renew-%d" % num, si),
+ hashutil.tagged_hash(b"cancel-%d" % num, si))
- immutable_si_0, rs0, cs0 = make("\x00" * 16)
- immutable_si_1, rs1, cs1 = make("\x01" * 16)
+ immutable_si_0, rs0, cs0 = make(b"\x00" * 16)
+ immutable_si_1, rs1, cs1 = make(b"\x01" * 16)
rs1a, cs1a = make_extra_lease(immutable_si_1, 1)
- mutable_si_2, rs2, cs2, we2 = make_mutable("\x02" * 16)
- mutable_si_3, rs3, cs3, we3 = make_mutable("\x03" * 16)
+ mutable_si_2, rs2, cs2, we2 = make_mutable(b"\x02" * 16)
+ mutable_si_3, rs3, cs3, we3 = make_mutable(b"\x03" * 16)
rs3a, cs3a = make_extra_lease(mutable_si_3, 1)
sharenums = [0]
canary = FakeCanary()
# note: 'tahoe debug dump-share' will not handle this file, since the
# inner contents are not a valid CHK share
- data = "\xff" * 1000
+ data = b"\xff" * 1000
a,w = ss.remote_allocate_buckets(immutable_si_0, rs0, cs0, sharenums,
1000, canary)
@@ -322,7 +337,7 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
def test_basic(self):
basedir = "storage/LeaseCrawler/basic"
fileutil.make_dirs(basedir)
- ss = InstrumentedStorageServer(basedir, "\x00" * 20)
+ ss = InstrumentedStorageServer(basedir, b"\x00" * 20)
# make it start sooner than usual.
lc = ss.lease_checker
lc.slow_start = 0
@@ -339,7 +354,7 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
storage_index_to_dir(immutable_si_0),
"not-a-share")
f = open(fn, "wb")
- f.write("I am not a share.\n")
+ f.write(b"I am not a share.\n")
f.close()
# this is before the crawl has started, so we're not in a cycle yet
@@ -398,25 +413,25 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
d.addCallback(lambda ign: renderDeferred(webstatus))
def _check_html_in_cycle(html):
s = remove_tags(html)
- self.failUnlessIn("So far, this cycle has examined "
- "1 shares in 1 buckets (0 mutable / 1 immutable) ", s)
- self.failUnlessIn("and has recovered: "
- "0 shares, 0 buckets (0 mutable / 0 immutable), "
- "0 B (0 B / 0 B)", s)
- self.failUnlessIn("If expiration were enabled, "
- "we would have recovered: "
- "0 shares, 0 buckets (0 mutable / 0 immutable),"
- " 0 B (0 B / 0 B) by now", s)
- self.failUnlessIn("and the remainder of this cycle "
- "would probably recover: "
- "0 shares, 0 buckets (0 mutable / 0 immutable),"
- " 0 B (0 B / 0 B)", s)
- self.failUnlessIn("and the whole cycle would probably recover: "
- "0 shares, 0 buckets (0 mutable / 0 immutable),"
- " 0 B (0 B / 0 B)", s)
- self.failUnlessIn("if we were strictly using each lease's default "
- "31-day lease lifetime", s)
- self.failUnlessIn("this cycle would be expected to recover: ", s)
+ self.failUnlessIn(b"So far, this cycle has examined "
+ b"1 shares in 1 buckets (0 mutable / 1 immutable) ", s)
+ self.failUnlessIn(b"and has recovered: "
+ b"0 shares, 0 buckets (0 mutable / 0 immutable), "
+ b"0 B (0 B / 0 B)", s)
+ self.failUnlessIn(b"If expiration were enabled, "
+ b"we would have recovered: "
+ b"0 shares, 0 buckets (0 mutable / 0 immutable),"
+ b" 0 B (0 B / 0 B) by now", s)
+ self.failUnlessIn(b"and the remainder of this cycle "
+ b"would probably recover: "
+ b"0 shares, 0 buckets (0 mutable / 0 immutable),"
+ b" 0 B (0 B / 0 B)", s)
+ self.failUnlessIn(b"and the whole cycle would probably recover: "
+ b"0 shares, 0 buckets (0 mutable / 0 immutable),"
+ b" 0 B (0 B / 0 B)", s)
+ self.failUnlessIn(b"if we were strictly using each lease's default "
+ b"31-day lease lifetime", s)
+ self.failUnlessIn(b"this cycle would be expected to recover: ", s)
d.addCallback(_check_html_in_cycle)
# wait for the crawler to finish the first cycle. Nothing should have
@@ -473,11 +488,11 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
d.addCallback(lambda ign: renderDeferred(webstatus))
def _check_html(html):
s = remove_tags(html)
- self.failUnlessIn("recovered: 0 shares, 0 buckets "
- "(0 mutable / 0 immutable), 0 B (0 B / 0 B) ", s)
- self.failUnlessIn("and saw a total of 4 shares, 4 buckets "
- "(2 mutable / 2 immutable),", s)
- self.failUnlessIn("but expiration was not enabled", s)
+ self.failUnlessIn(b"recovered: 0 shares, 0 buckets "
+ b"(0 mutable / 0 immutable), 0 B (0 B / 0 B) ", s)
+ self.failUnlessIn(b"and saw a total of 4 shares, 4 buckets "
+ b"(2 mutable / 2 immutable),", s)
+ self.failUnlessIn(b"but expiration was not enabled", s)
d.addCallback(_check_html)
d.addCallback(lambda ign: renderJSON(webstatus))
def _check_json(raw):
@@ -505,7 +520,7 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
fileutil.make_dirs(basedir)
# setting expiration_time to 2000 means that any lease which is more
# than 2000s old will be expired.
- ss = InstrumentedStorageServer(basedir, "\x00" * 20,
+ ss = InstrumentedStorageServer(basedir, b"\x00" * 20,
expiration_enabled=True,
expiration_mode="age",
expiration_override_lease_duration=2000)
@@ -578,11 +593,11 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
# predictor thinks we'll have 5 shares and that we'll delete them
# all. This part of the test depends upon the SIs landing right
# where they do now.
- self.failUnlessIn("The remainder of this cycle is expected to "
- "recover: 4 shares, 4 buckets", s)
- self.failUnlessIn("The whole cycle is expected to examine "
- "5 shares in 5 buckets and to recover: "
- "5 shares, 5 buckets", s)
+ self.failUnlessIn(b"The remainder of this cycle is expected to "
+ b"recover: 4 shares, 4 buckets", s)
+ self.failUnlessIn(b"The whole cycle is expected to examine "
+ b"5 shares in 5 buckets and to recover: "
+ b"5 shares, 5 buckets", s)
d.addCallback(_check_html_in_cycle)
# wait for the crawler to finish the first cycle. Two shares should
@@ -632,9 +647,9 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
d.addCallback(lambda ign: renderDeferred(webstatus))
def _check_html(html):
s = remove_tags(html)
- self.failUnlessIn("Expiration Enabled: expired leases will be removed", s)
- self.failUnlessIn("Leases created or last renewed more than 33 minutes ago will be considered expired.", s)
- self.failUnlessIn(" recovered: 2 shares, 2 buckets (1 mutable / 1 immutable), ", s)
+ self.failUnlessIn(b"Expiration Enabled: expired leases will be removed", s)
+ self.failUnlessIn(b"Leases created or last renewed more than 33 minutes ago will be considered expired.", s)
+ self.failUnlessIn(b" recovered: 2 shares, 2 buckets (1 mutable / 1 immutable), ", s)
d.addCallback(_check_html)
return d
@@ -645,7 +660,7 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
# is more than 2000s old will be expired.
now = time.time()
then = int(now - 2000)
- ss = InstrumentedStorageServer(basedir, "\x00" * 20,
+ ss = InstrumentedStorageServer(basedir, b"\x00" * 20,
expiration_enabled=True,
expiration_mode="cutoff-date",
expiration_cutoff_date=then)
@@ -722,11 +737,11 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
# predictor thinks we'll have 5 shares and that we'll delete them
# all. This part of the test depends upon the SIs landing right
# where they do now.
- self.failUnlessIn("The remainder of this cycle is expected to "
- "recover: 4 shares, 4 buckets", s)
- self.failUnlessIn("The whole cycle is expected to examine "
- "5 shares in 5 buckets and to recover: "
- "5 shares, 5 buckets", s)
+ self.failUnlessIn(b"The remainder of this cycle is expected to "
+ b"recover: 4 shares, 4 buckets", s)
+ self.failUnlessIn(b"The whole cycle is expected to examine "
+ b"5 shares in 5 buckets and to recover: "
+ b"5 shares, 5 buckets", s)
d.addCallback(_check_html_in_cycle)
# wait for the crawler to finish the first cycle. Two shares should
@@ -778,12 +793,13 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
d.addCallback(lambda ign: renderDeferred(webstatus))
def _check_html(html):
s = remove_tags(html)
- self.failUnlessIn("Expiration Enabled:"
- " expired leases will be removed", s)
- date = time.strftime("%Y-%m-%d (%d-%b-%Y) UTC", time.gmtime(then))
- substr = "Leases created or last renewed before %s will be considered expired." % date
+ self.failUnlessIn(b"Expiration Enabled:"
+ b" expired leases will be removed", s)
+ date = time.strftime(
+ u"%Y-%m-%d (%d-%b-%Y) UTC", time.gmtime(then)).encode("ascii")
+ substr =b"Leases created or last renewed before %s will be considered expired." % date
self.failUnlessIn(substr, s)
- self.failUnlessIn(" recovered: 2 shares, 2 buckets (1 mutable / 1 immutable), ", s)
+ self.failUnlessIn(b" recovered: 2 shares, 2 buckets (1 mutable / 1 immutable), ", s)
d.addCallback(_check_html)
return d
@@ -792,7 +808,7 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
fileutil.make_dirs(basedir)
now = time.time()
then = int(now - 2000)
- ss = StorageServer(basedir, "\x00" * 20,
+ ss = StorageServer(basedir, b"\x00" * 20,
expiration_enabled=True,
expiration_mode="cutoff-date",
expiration_cutoff_date=then,
@@ -840,7 +856,7 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
d.addCallback(lambda ign: renderDeferred(webstatus))
def _check_html(html):
s = remove_tags(html)
- self.failUnlessIn("The following sharetypes will be expired: immutable.", s)
+ self.failUnlessIn(b"The following sharetypes will be expired: immutable.", s)
d.addCallback(_check_html)
return d
@@ -849,7 +865,7 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
fileutil.make_dirs(basedir)
now = time.time()
then = int(now - 2000)
- ss = StorageServer(basedir, "\x00" * 20,
+ ss = StorageServer(basedir, b"\x00" * 20,
expiration_enabled=True,
expiration_mode="cutoff-date",
expiration_cutoff_date=then,
@@ -897,7 +913,7 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
d.addCallback(lambda ign: renderDeferred(webstatus))
def _check_html(html):
s = remove_tags(html)
- self.failUnlessIn("The following sharetypes will be expired: mutable.", s)
+ self.failUnlessIn(b"The following sharetypes will be expired: mutable.", s)
d.addCallback(_check_html)
return d
@@ -905,14 +921,14 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
basedir = "storage/LeaseCrawler/bad_mode"
fileutil.make_dirs(basedir)
e = self.failUnlessRaises(ValueError,
- StorageServer, basedir, "\x00" * 20,
+ StorageServer, basedir, b"\x00" * 20,
expiration_mode="bogus")
self.failUnlessIn("GC mode 'bogus' must be 'age' or 'cutoff-date'", str(e))
def test_limited_history(self):
basedir = "storage/LeaseCrawler/limited_history"
fileutil.make_dirs(basedir)
- ss = StorageServer(basedir, "\x00" * 20)
+ ss = StorageServer(basedir, b"\x00" * 20)
# make it start sooner than usual.
lc = ss.lease_checker
lc.slow_start = 0
@@ -944,7 +960,7 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
def test_unpredictable_future(self):
basedir = "storage/LeaseCrawler/unpredictable_future"
fileutil.make_dirs(basedir)
- ss = StorageServer(basedir, "\x00" * 20)
+ ss = StorageServer(basedir, b"\x00" * 20)
# make it start sooner than usual.
lc = ss.lease_checker
lc.slow_start = 0
@@ -1007,7 +1023,7 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
def test_no_st_blocks(self):
basedir = "storage/LeaseCrawler/no_st_blocks"
fileutil.make_dirs(basedir)
- ss = No_ST_BLOCKS_StorageServer(basedir, "\x00" * 20,
+ ss = No_ST_BLOCKS_StorageServer(basedir, b"\x00" * 20,
expiration_mode="age",
expiration_override_lease_duration=-1000)
# a negative expiration_time= means the "configured-"
@@ -1046,7 +1062,7 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
]
basedir = "storage/LeaseCrawler/share_corruption"
fileutil.make_dirs(basedir)
- ss = InstrumentedStorageServer(basedir, "\x00" * 20)
+ ss = InstrumentedStorageServer(basedir, b"\x00" * 20)
w = StorageStatus(ss)
# make it start sooner than usual.
lc = ss.lease_checker
@@ -1064,7 +1080,7 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
fn = os.path.join(ss.sharedir, storage_index_to_dir(first), "0")
f = open(fn, "rb+")
f.seek(0)
- f.write("BAD MAGIC")
+ f.write(b"BAD MAGIC")
f.close()
# if get_share_file() doesn't see the correct mutable magic, it
# assumes the file is an immutable share, and then
@@ -1073,7 +1089,7 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
# UnknownImmutableContainerVersionError.
# also create an empty bucket
- empty_si = base32.b2a("\x04"*16)
+ empty_si = base32.b2a(b"\x04"*16)
empty_bucket_dir = os.path.join(ss.sharedir,
storage_index_to_dir(empty_si))
fileutil.make_dirs(empty_bucket_dir)
@@ -1094,7 +1110,9 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
rec = so_far["space-recovered"]
self.failUnlessEqual(rec["examined-buckets"], 1)
self.failUnlessEqual(rec["examined-shares"], 0)
- self.failUnlessEqual(so_far["corrupt-shares"], [(first_b32, 0)])
+ [(actual_b32, i)] = so_far["corrupt-shares"]
+ actual_b32 = actual_b32.encode("ascii")
+ self.failUnlessEqual((actual_b32, i), (first_b32, 0))
d.addCallback(_after_first_bucket)
d.addCallback(lambda ign: renderJSON(w))
@@ -1103,13 +1121,15 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
# grr. json turns all dict keys into strings.
so_far = data["lease-checker"]["cycle-to-date"]
corrupt_shares = so_far["corrupt-shares"]
- # it also turns all tuples into lists
- self.failUnlessEqual(corrupt_shares, [[first_b32, 0]])
+ # it also turns all tuples into lists, and result is unicode:
+ [(actual_b32, i)] = corrupt_shares
+ actual_b32 = actual_b32.encode("ascii")
+ self.failUnlessEqual([actual_b32, i], [first_b32, 0])
d.addCallback(_check_json)
d.addCallback(lambda ign: renderDeferred(w))
def _check_html(html):
s = remove_tags(html)
- self.failUnlessIn("Corrupt shares: SI %s shnum 0" % first_b32, s)
+ self.failUnlessIn(b"Corrupt shares: SI %s shnum 0" % first_b32, s)
d.addCallback(_check_html)
def _wait():
@@ -1122,19 +1142,22 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
rec = last["space-recovered"]
self.failUnlessEqual(rec["examined-buckets"], 5)
self.failUnlessEqual(rec["examined-shares"], 3)
- self.failUnlessEqual(last["corrupt-shares"], [(first_b32, 0)])
+ [(actual_b32, i)] = last["corrupt-shares"]
+ actual_b32 = actual_b32.encode("ascii")
+ self.failUnlessEqual((actual_b32, i), (first_b32, 0))
d.addCallback(_after_first_cycle)
d.addCallback(lambda ign: renderJSON(w))
def _check_json_history(raw):
data = json.loads(raw)
last = data["lease-checker"]["history"]["0"]
- corrupt_shares = last["corrupt-shares"]
- self.failUnlessEqual(corrupt_shares, [[first_b32, 0]])
+ [(actual_b32, i)] = last["corrupt-shares"]
+ actual_b32 = actual_b32.encode("ascii")
+ self.failUnlessEqual([actual_b32, i], [first_b32, 0])
d.addCallback(_check_json_history)
d.addCallback(lambda ign: renderDeferred(w))
def _check_html_history(html):
s = remove_tags(html)
- self.failUnlessIn("Corrupt shares: SI %s shnum 0" % first_b32, s)
+ self.failUnlessIn(b"Corrupt shares: SI %s shnum 0" % first_b32, s)
d.addCallback(_check_html_history)
def _cleanup(res):
@@ -1156,23 +1179,23 @@ class WebStatus(unittest.TestCase, pollmixin.PollMixin):
def test_no_server(self):
w = StorageStatus(None)
html = renderSynchronously(w)
- self.failUnlessIn("No Storage Server Running
", html)
+ self.failUnlessIn(b"No Storage Server Running
", html)
def test_status(self):
basedir = "storage/WebStatus/status"
fileutil.make_dirs(basedir)
- nodeid = "\x00" * 20
+ nodeid = b"\x00" * 20
ss = StorageServer(basedir, nodeid)
ss.setServiceParent(self.s)
w = StorageStatus(ss, "nickname")
d = renderDeferred(w)
def _check_html(html):
- self.failUnlessIn("Storage Server Status
", html)
+ self.failUnlessIn(b"Storage Server Status
", html)
s = remove_tags(html)
- self.failUnlessIn("Server Nickname: nickname", s)
- self.failUnlessIn("Server Nodeid: %s" % base32.b2a(nodeid), s)
- self.failUnlessIn("Accepting new shares: Yes", s)
- self.failUnlessIn("Reserved space: - 0 B (0)", s)
+ self.failUnlessIn(b"Server Nickname: nickname", s)
+ self.failUnlessIn(b"Server Nodeid: %s" % base32.b2a(nodeid), s)
+ self.failUnlessIn(b"Accepting new shares: Yes", s)
+ self.failUnlessIn(b"Reserved space: - 0 B (0)", s)
d.addCallback(_check_html)
d.addCallback(lambda ign: renderJSON(w))
def _check_json(raw):
@@ -1195,15 +1218,15 @@ class WebStatus(unittest.TestCase, pollmixin.PollMixin):
# (test runs on all platforms).
basedir = "storage/WebStatus/status_no_disk_stats"
fileutil.make_dirs(basedir)
- ss = StorageServer(basedir, "\x00" * 20)
+ ss = StorageServer(basedir, b"\x00" * 20)
ss.setServiceParent(self.s)
w = StorageStatus(ss)
html = renderSynchronously(w)
- self.failUnlessIn("Storage Server Status
", html)
+ self.failUnlessIn(b"Storage Server Status
", html)
s = remove_tags(html)
- self.failUnlessIn("Accepting new shares: Yes", s)
- self.failUnlessIn("Total disk space: ?", s)
- self.failUnlessIn("Space Available to Tahoe: ?", s)
+ self.failUnlessIn(b"Accepting new shares: Yes", s)
+ self.failUnlessIn(b"Total disk space: ?", s)
+ self.failUnlessIn(b"Space Available to Tahoe: ?", s)
self.failUnless(ss.get_available_space() is None)
def test_status_bad_disk_stats(self):
@@ -1215,15 +1238,15 @@ class WebStatus(unittest.TestCase, pollmixin.PollMixin):
# show that no shares will be accepted, and get_available_space() should be 0.
basedir = "storage/WebStatus/status_bad_disk_stats"
fileutil.make_dirs(basedir)
- ss = StorageServer(basedir, "\x00" * 20)
+ ss = StorageServer(basedir, b"\x00" * 20)
ss.setServiceParent(self.s)
w = StorageStatus(ss)
html = renderSynchronously(w)
- self.failUnlessIn("Storage Server Status
", html)
+ self.failUnlessIn(b"Storage Server Status
", html)
s = remove_tags(html)
- self.failUnlessIn("Accepting new shares: No", s)
- self.failUnlessIn("Total disk space: ?", s)
- self.failUnlessIn("Space Available to Tahoe: ?", s)
+ self.failUnlessIn(b"Accepting new shares: No", s)
+ self.failUnlessIn(b"Total disk space: ?", s)
+ self.failUnlessIn(b"Space Available to Tahoe: ?", s)
self.failUnlessEqual(ss.get_available_space(), 0)
def test_status_right_disk_stats(self):
@@ -1235,7 +1258,7 @@ class WebStatus(unittest.TestCase, pollmixin.PollMixin):
basedir = "storage/WebStatus/status_right_disk_stats"
fileutil.make_dirs(basedir)
- ss = StorageServer(basedir, "\x00" * 20, reserved_space=reserved)
+ ss = StorageServer(basedir, b"\x00" * 20, reserved_space=reserved)
expecteddir = ss.sharedir
def call_get_disk_stats(whichdir, reserved_space=0):
@@ -1256,48 +1279,48 @@ class WebStatus(unittest.TestCase, pollmixin.PollMixin):
w = StorageStatus(ss)
html = renderSynchronously(w)
- self.failUnlessIn("Storage Server Status
", html)
+ self.failUnlessIn(b"Storage Server Status
", html)
s = remove_tags(html)
- self.failUnlessIn("Total disk space: 5.00 GB", s)
- self.failUnlessIn("Disk space used: - 1.00 GB", s)
- self.failUnlessIn("Disk space free (root): 4.00 GB", s)
- self.failUnlessIn("Disk space free (non-root): 3.00 GB", s)
- self.failUnlessIn("Reserved space: - 1.00 GB", s)
- self.failUnlessIn("Space Available to Tahoe: 2.00 GB", s)
+ self.failUnlessIn(b"Total disk space: 5.00 GB", s)
+ self.failUnlessIn(b"Disk space used: - 1.00 GB", s)
+ self.failUnlessIn(b"Disk space free (root): 4.00 GB", s)
+ self.failUnlessIn(b"Disk space free (non-root): 3.00 GB", s)
+ self.failUnlessIn(b"Reserved space: - 1.00 GB", s)
+ self.failUnlessIn(b"Space Available to Tahoe: 2.00 GB", s)
self.failUnlessEqual(ss.get_available_space(), 2*GB)
def test_readonly(self):
basedir = "storage/WebStatus/readonly"
fileutil.make_dirs(basedir)
- ss = StorageServer(basedir, "\x00" * 20, readonly_storage=True)
+ ss = StorageServer(basedir, b"\x00" * 20, readonly_storage=True)
ss.setServiceParent(self.s)
w = StorageStatus(ss)
html = renderSynchronously(w)
- self.failUnlessIn("Storage Server Status
", html)
+ self.failUnlessIn(b"Storage Server Status
", html)
s = remove_tags(html)
- self.failUnlessIn("Accepting new shares: No", s)
+ self.failUnlessIn(b"Accepting new shares: No", s)
def test_reserved(self):
basedir = "storage/WebStatus/reserved"
fileutil.make_dirs(basedir)
- ss = StorageServer(basedir, "\x00" * 20, reserved_space=10e6)
+ ss = StorageServer(basedir, b"\x00" * 20, reserved_space=10e6)
ss.setServiceParent(self.s)
w = StorageStatus(ss)
html = renderSynchronously(w)
- self.failUnlessIn("Storage Server Status
", html)
+ self.failUnlessIn(b"Storage Server Status
", html)
s = remove_tags(html)
- self.failUnlessIn("Reserved space: - 10.00 MB (10000000)", s)
+ self.failUnlessIn(b"Reserved space: - 10.00 MB (10000000)", s)
def test_huge_reserved(self):
basedir = "storage/WebStatus/reserved"
fileutil.make_dirs(basedir)
- ss = StorageServer(basedir, "\x00" * 20, reserved_space=10e6)
+ ss = StorageServer(basedir, b"\x00" * 20, reserved_space=10e6)
ss.setServiceParent(self.s)
w = StorageStatus(ss)
html = renderSynchronously(w)
- self.failUnlessIn("Storage Server Status
", html)
+ self.failUnlessIn(b"Storage Server Status
", html)
s = remove_tags(html)
- self.failUnlessIn("Reserved space: - 10.00 MB (10000000)", s)
+ self.failUnlessIn(b"Reserved space: - 10.00 MB (10000000)", s)
def test_util(self):
w = StorageStatusElement(None, None)
diff --git a/src/allmydata/test/test_uri.py b/src/allmydata/test/test_uri.py
index c04b1259d..f89fae151 100644
--- a/src/allmydata/test/test_uri.py
+++ b/src/allmydata/test/test_uri.py
@@ -1,3 +1,17 @@
+"""
+Tests for allmydata.uri.
+
+Ported to Python 3.
+"""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+from future.utils import PY2
+if PY2:
+ from future.builtins import filter, map, zip, ascii, chr, dict, hex, input, next, oct, open, pow, round, super, bytes, int, list, object, range, str, max, min # noqa: F401
import os
from twisted.trial import unittest
@@ -40,24 +54,24 @@ class Literal(testutil.ReallyEqualMixin, unittest.TestCase):
self.failUnlessReallyEqual(u.get_verify_cap(), None)
def test_empty(self):
- data = "" # This data is some *very* small data!
+ data = b"" # This data is some *very* small data!
return self._help_test(data)
def test_pack(self):
- data = "This is some small data"
+ data = b"This is some small data"
return self._help_test(data)
def test_nonascii(self):
- data = "This contains \x00 and URI:LIT: and \n, oh my."
+ data = b"This contains \x00 and URI:LIT: and \n, oh my."
return self._help_test(data)
class Compare(testutil.ReallyEqualMixin, unittest.TestCase):
def test_compare(self):
- lit1 = uri.LiteralFileURI("some data")
- fileURI = 'URI:CHK:f5ahxa25t4qkktywz6teyfvcx4:opuioq7tj2y6idzfp6cazehtmgs5fdcebcz3cygrxyydvcozrmeq:3:10:345834'
+ lit1 = uri.LiteralFileURI(b"some data")
+ fileURI = b'URI:CHK:f5ahxa25t4qkktywz6teyfvcx4:opuioq7tj2y6idzfp6cazehtmgs5fdcebcz3cygrxyydvcozrmeq:3:10:345834'
chk1 = uri.CHKFileURI.init_from_string(fileURI)
chk2 = uri.CHKFileURI.init_from_string(fileURI)
- unk = uri.UnknownURI("lafs://from_the_future")
+ unk = uri.UnknownURI(b"lafs://from_the_future")
self.failIfEqual(lit1, chk1)
self.failUnlessReallyEqual(chk1, chk2)
self.failIfEqual(chk1, "not actually a URI")
@@ -66,21 +80,24 @@ class Compare(testutil.ReallyEqualMixin, unittest.TestCase):
self.failUnlessReallyEqual(len(s), 3) # since chk1==chk2
def test_is_uri(self):
- lit1 = uri.LiteralFileURI("some data").to_string()
+ lit1 = uri.LiteralFileURI(b"some data").to_string()
self.failUnless(uri.is_uri(lit1))
self.failIf(uri.is_uri(None))
def test_is_literal_file_uri(self):
- lit1 = uri.LiteralFileURI("some data").to_string()
+ lit1 = uri.LiteralFileURI(b"some data").to_string()
self.failUnless(uri.is_literal_file_uri(lit1))
self.failIf(uri.is_literal_file_uri(None))
self.failIf(uri.is_literal_file_uri("foo"))
self.failIf(uri.is_literal_file_uri("ro.foo"))
- self.failIf(uri.is_literal_file_uri("URI:LITfoo"))
+ self.failIf(uri.is_literal_file_uri(b"URI:LITfoo"))
self.failUnless(uri.is_literal_file_uri("ro.URI:LIT:foo"))
self.failUnless(uri.is_literal_file_uri("imm.URI:LIT:foo"))
def test_has_uri_prefix(self):
+ self.failUnless(uri.has_uri_prefix(b"URI:foo"))
+ self.failUnless(uri.has_uri_prefix(b"ro.URI:foo"))
+ self.failUnless(uri.has_uri_prefix(b"imm.URI:foo"))
self.failUnless(uri.has_uri_prefix("URI:foo"))
self.failUnless(uri.has_uri_prefix("ro.URI:foo"))
self.failUnless(uri.has_uri_prefix("imm.URI:foo"))
@@ -89,9 +106,9 @@ class Compare(testutil.ReallyEqualMixin, unittest.TestCase):
class CHKFile(testutil.ReallyEqualMixin, unittest.TestCase):
def test_pack(self):
- key = "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
+ key = b"\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
storage_index = hashutil.storage_index_hash(key)
- uri_extension_hash = hashutil.uri_extension_hash("stuff")
+ uri_extension_hash = hashutil.uri_extension_hash(b"stuff")
needed_shares = 25
total_shares = 100
size = 1234
@@ -138,26 +155,26 @@ class CHKFile(testutil.ReallyEqualMixin, unittest.TestCase):
self.failUnlessReallyEqual(u.to_string(), u2imm.to_string())
v = u.get_verify_cap()
- self.failUnless(isinstance(v.to_string(), str))
+ self.failUnless(isinstance(v.to_string(), bytes))
self.failUnless(v.is_readonly())
self.failIf(v.is_mutable())
v2 = uri.from_string(v.to_string())
self.failUnlessReallyEqual(v, v2)
- v3 = uri.CHKFileVerifierURI(storage_index="\x00"*16,
- uri_extension_hash="\x00"*32,
+ v3 = uri.CHKFileVerifierURI(storage_index=b"\x00"*16,
+ uri_extension_hash=b"\x00"*32,
needed_shares=3,
total_shares=10,
size=1234)
- self.failUnless(isinstance(v3.to_string(), str))
+ self.failUnless(isinstance(v3.to_string(), bytes))
self.failUnless(v3.is_readonly())
self.failIf(v3.is_mutable())
def test_pack_badly(self):
- key = "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
+ key = b"\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
storage_index = hashutil.storage_index_hash(key)
- uri_extension_hash = hashutil.uri_extension_hash("stuff")
+ uri_extension_hash = hashutil.uri_extension_hash(b"stuff")
needed_shares = 25
total_shares = 100
size = 1234
@@ -186,35 +203,37 @@ class CHKFile(testutil.ReallyEqualMixin, unittest.TestCase):
class Extension(testutil.ReallyEqualMixin, unittest.TestCase):
def test_pack(self):
- data = {"stuff": "value",
- "size": 12,
- "needed_shares": 3,
- "big_hash": hashutil.tagged_hash("foo", "bar"),
+ data = {b"stuff": b"value",
+ b"size": 12,
+ b"needed_shares": 3,
+ b"big_hash": hashutil.tagged_hash(b"foo", b"bar"),
}
ext = uri.pack_extension(data)
d = uri.unpack_extension(ext)
- self.failUnlessReallyEqual(d["stuff"], "value")
- self.failUnlessReallyEqual(d["size"], 12)
- self.failUnlessReallyEqual(d["big_hash"], hashutil.tagged_hash("foo", "bar"))
+ self.failUnlessReallyEqual(d[b"stuff"], b"value")
+ self.failUnlessReallyEqual(d[b"size"], 12)
+ self.failUnlessReallyEqual(d[b"big_hash"], hashutil.tagged_hash(b"foo", b"bar"))
readable = uri.unpack_extension_readable(ext)
- self.failUnlessReallyEqual(readable["needed_shares"], 3)
- self.failUnlessReallyEqual(readable["stuff"], "value")
- self.failUnlessReallyEqual(readable["size"], 12)
- self.failUnlessReallyEqual(readable["big_hash"],
- base32.b2a(hashutil.tagged_hash("foo", "bar")))
- self.failUnlessReallyEqual(readable["UEB_hash"],
+ self.failUnlessReallyEqual(readable[b"needed_shares"], 3)
+ self.failUnlessReallyEqual(readable[b"stuff"], b"value")
+ self.failUnlessReallyEqual(readable[b"size"], 12)
+ self.failUnlessReallyEqual(readable[b"big_hash"],
+ base32.b2a(hashutil.tagged_hash(b"foo", b"bar")))
+ self.failUnlessReallyEqual(readable[b"UEB_hash"],
base32.b2a(hashutil.uri_extension_hash(ext)))
class Unknown(testutil.ReallyEqualMixin, unittest.TestCase):
def test_from_future(self):
# any URI type that we don't recognize should be treated as unknown
- future_uri = "I am a URI from the future. Whatever you do, don't "
+ future_uri = b"I am a URI from the future. Whatever you do, don't "
u = uri.from_string(future_uri)
self.failUnless(isinstance(u, uri.UnknownURI))
self.failUnlessReallyEqual(u.to_string(), future_uri)
self.failUnless(u.get_readonly() is None)
self.failUnless(u.get_error() is None)
+ future_uri_unicode = future_uri.decode("utf-8")
+ self.assertEqual(future_uri, uri.from_string(future_uri_unicode).to_string())
u2 = uri.UnknownURI(future_uri, error=CapConstraintError("..."))
self.failUnlessReallyEqual(u.to_string(), future_uri)
@@ -222,7 +241,7 @@ class Unknown(testutil.ReallyEqualMixin, unittest.TestCase):
self.failUnless(isinstance(u2.get_error(), CapConstraintError))
# Future caps might have non-ASCII chars in them. (Or maybe not, who can tell about the future?)
- future_uri = u"I am a cap from the \u263A future. Whatever you ".encode('utf-8')
+ future_uri = u"I am a cap from the \u263A future. Whatever you ".encode("utf-8")
u = uri.from_string(future_uri)
self.failUnless(isinstance(u, uri.UnknownURI))
self.failUnlessReallyEqual(u.to_string(), future_uri)
@@ -236,15 +255,15 @@ class Unknown(testutil.ReallyEqualMixin, unittest.TestCase):
class Constraint(testutil.ReallyEqualMixin, unittest.TestCase):
def test_constraint(self):
- bad = "http://127.0.0.1:3456/uri/URI%3ADIR2%3Agh3l5rbvnv2333mrfvalmjfr4i%3Alz6l7u3z3b7g37s4zkdmfpx5ly4ib4m6thrpbusi6ys62qtc6mma/"
+ bad = b"http://127.0.0.1:3456/uri/URI%3ADIR2%3Agh3l5rbvnv2333mrfvalmjfr4i%3Alz6l7u3z3b7g37s4zkdmfpx5ly4ib4m6thrpbusi6ys62qtc6mma/"
self.failUnlessRaises(uri.BadURIError, uri.DirectoryURI.init_from_string, bad)
- fileURI = 'URI:CHK:gh3l5rbvnv2333mrfvalmjfr4i:lz6l7u3z3b7g37s4zkdmfpx5ly4ib4m6thrpbusi6ys62qtc6mma:3:10:345834'
+ fileURI = b'URI:CHK:gh3l5rbvnv2333mrfvalmjfr4i:lz6l7u3z3b7g37s4zkdmfpx5ly4ib4m6thrpbusi6ys62qtc6mma:3:10:345834'
uri.CHKFileURI.init_from_string(fileURI)
class Mutable(testutil.ReallyEqualMixin, unittest.TestCase):
def setUp(self):
- self.writekey = "\x01" * 16
- self.fingerprint = "\x02" * 32
+ self.writekey = b"\x01" * 16
+ self.fingerprint = b"\x02" * 32
self.readkey = hashutil.ssk_readkey_hash(self.writekey)
self.storage_index = hashutil.ssk_storage_index_hash(self.readkey)
@@ -410,28 +429,29 @@ class Mutable(testutil.ReallyEqualMixin, unittest.TestCase):
u1 = uri.WriteableMDMFFileURI(self.writekey, self.fingerprint)
cap = u1.to_string()
- cap2 = cap+":I COME FROM THE FUTURE"
+ cap2 = cap+b":I COME FROM THE FUTURE"
u2 = uri.WriteableMDMFFileURI.init_from_string(cap2)
self.failUnlessReallyEqual(self.writekey, u2.writekey)
self.failUnlessReallyEqual(self.fingerprint, u2.fingerprint)
self.failIf(u2.is_readonly())
self.failUnless(u2.is_mutable())
- cap3 = cap+":"+os.urandom(40) # parse *that*!
+
+ cap3 = cap+b":" + os.urandom(40)
u3 = uri.WriteableMDMFFileURI.init_from_string(cap3)
self.failUnlessReallyEqual(self.writekey, u3.writekey)
self.failUnlessReallyEqual(self.fingerprint, u3.fingerprint)
self.failIf(u3.is_readonly())
self.failUnless(u3.is_mutable())
- cap4 = u1.get_readonly().to_string()+":ooh scary future stuff"
+ cap4 = u1.get_readonly().to_string()+b":ooh scary future stuff"
u4 = uri.from_string_mutable_filenode(cap4)
self.failUnlessReallyEqual(self.readkey, u4.readkey)
self.failUnlessReallyEqual(self.fingerprint, u4.fingerprint)
self.failUnless(u4.is_readonly())
self.failUnless(u4.is_mutable())
- cap5 = u1.get_verify_cap().to_string()+":spoilers!"
+ cap5 = u1.get_verify_cap().to_string()+b":spoilers!"
u5 = uri.from_string(cap5)
self.failUnlessReallyEqual(self.storage_index, u5.storage_index)
self.failUnlessReallyEqual(self.fingerprint, u5.fingerprint)
@@ -468,8 +488,8 @@ class Mutable(testutil.ReallyEqualMixin, unittest.TestCase):
class Dirnode(testutil.ReallyEqualMixin, unittest.TestCase):
def test_pack(self):
- writekey = "\x01" * 16
- fingerprint = "\x02" * 32
+ writekey = b"\x01" * 16
+ fingerprint = b"\x02" * 32
n = uri.WriteableSSKFileURI(writekey, fingerprint)
u1 = uri.DirectoryURI(n)
@@ -536,8 +556,8 @@ class Dirnode(testutil.ReallyEqualMixin, unittest.TestCase):
u1.get_verify_cap()._filenode_uri)
def test_immutable(self):
- readkey = "\x01" * 16
- uri_extension_hash = hashutil.uri_extension_hash("stuff")
+ readkey = b"\x01" * 16
+ uri_extension_hash = hashutil.uri_extension_hash(b"stuff")
needed_shares = 3
total_shares = 10
size = 1234
@@ -548,7 +568,7 @@ class Dirnode(testutil.ReallyEqualMixin, unittest.TestCase):
total_shares=total_shares,
size=size)
fncap = fnuri.to_string()
- self.failUnlessReallyEqual(fncap, "URI:CHK:aeaqcaibaeaqcaibaeaqcaibae:nf3nimquen7aeqm36ekgxomalstenpkvsdmf6fplj7swdatbv5oa:3:10:1234")
+ self.failUnlessReallyEqual(fncap, b"URI:CHK:aeaqcaibaeaqcaibaeaqcaibae:nf3nimquen7aeqm36ekgxomalstenpkvsdmf6fplj7swdatbv5oa:3:10:1234")
u1 = uri.ImmutableDirectoryURI(fnuri)
self.failUnless(u1.is_readonly())
self.failIf(u1.is_mutable())
@@ -587,20 +607,20 @@ class Dirnode(testutil.ReallyEqualMixin, unittest.TestCase):
self.failUnless(IVerifierURI.providedBy(u2_verifier))
u2vs = u2_verifier.to_string()
# URI:DIR2-CHK-Verifier:$key:$ueb:$k:$n:$size
- self.failUnless(u2vs.startswith("URI:DIR2-CHK-Verifier:"), u2vs)
+ self.failUnless(u2vs.startswith(b"URI:DIR2-CHK-Verifier:"), u2vs)
u2_verifier_fileuri = u2_verifier.get_filenode_cap()
self.failUnless(IVerifierURI.providedBy(u2_verifier_fileuri))
u2vfs = u2_verifier_fileuri.to_string()
# URI:CHK-Verifier:$key:$ueb:$k:$n:$size
self.failUnlessReallyEqual(u2vfs, fnuri.get_verify_cap().to_string())
- self.failUnlessReallyEqual(u2vs[len("URI:DIR2-"):], u2vfs[len("URI:"):])
+ self.failUnlessReallyEqual(u2vs[len(b"URI:DIR2-"):], u2vfs[len(b"URI:"):])
self.failUnless(str(u2_verifier))
def test_literal(self):
- u0 = uri.LiteralFileURI("data")
+ u0 = uri.LiteralFileURI(b"data")
u1 = uri.LiteralDirectoryURI(u0)
self.failUnless(str(u1))
- self.failUnlessReallyEqual(u1.to_string(), "URI:DIR2-LIT:mrqxiyi")
+ self.failUnlessReallyEqual(u1.to_string(), b"URI:DIR2-LIT:mrqxiyi")
self.failUnless(u1.is_readonly())
self.failIf(u1.is_mutable())
self.failUnless(IURI.providedBy(u1))
@@ -608,11 +628,11 @@ class Dirnode(testutil.ReallyEqualMixin, unittest.TestCase):
self.failUnless(IDirnodeURI.providedBy(u1))
self.failUnlessReallyEqual(u1.get_verify_cap(), None)
self.failUnlessReallyEqual(u1.get_storage_index(), None)
- self.failUnlessReallyEqual(u1.abbrev_si(), "")
+ self.failUnlessReallyEqual(u1.abbrev_si(), b"")
def test_mdmf(self):
- writekey = "\x01" * 16
- fingerprint = "\x02" * 32
+ writekey = b"\x01" * 16
+ fingerprint = b"\x02" * 32
uri1 = uri.WriteableMDMFFileURI(writekey, fingerprint)
d1 = uri.MDMFDirectoryURI(uri1)
self.failIf(d1.is_readonly())
@@ -635,8 +655,8 @@ class Dirnode(testutil.ReallyEqualMixin, unittest.TestCase):
self.failUnlessIsInstance(d3, uri.UnknownURI)
def test_mdmf_attenuation(self):
- writekey = "\x01" * 16
- fingerprint = "\x02" * 32
+ writekey = b"\x01" * 16
+ fingerprint = b"\x02" * 32
uri1 = uri.WriteableMDMFFileURI(writekey, fingerprint)
d1 = uri.MDMFDirectoryURI(uri1)
@@ -676,8 +696,8 @@ class Dirnode(testutil.ReallyEqualMixin, unittest.TestCase):
def test_mdmf_verifier(self):
# I'm not sure what I want to write here yet.
- writekey = "\x01" * 16
- fingerprint = "\x02" * 32
+ writekey = b"\x01" * 16
+ fingerprint = b"\x02" * 32
uri1 = uri.WriteableMDMFFileURI(writekey, fingerprint)
d1 = uri.MDMFDirectoryURI(uri1)
v1 = d1.get_verify_cap()
diff --git a/src/allmydata/uri.py b/src/allmydata/uri.py
index 051b45f79..b601226da 100644
--- a/src/allmydata/uri.py
+++ b/src/allmydata/uri.py
@@ -1,3 +1,22 @@
+"""
+URIs (kinda sorta, really they're capabilities?).
+
+Ported to Python 3.
+
+Methods ending in to_string() are actually to_bytes(), possibly should be fixed
+in follow-up port.
+"""
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+from future.utils import PY2
+if PY2:
+ # Don't import bytes, to prevent leaks.
+ from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, dict, list, object, range, str, max, min # noqa: F401
+
+from past.builtins import unicode, long
import re
@@ -24,10 +43,10 @@ class BadURIError(CapConstraintError):
# - make variable and method names consistently use _uri for an URI string,
# and _cap for a Cap object (decoded URI)
-BASE32STR_128bits = '(%s{25}%s)' % (base32.BASE32CHAR, base32.BASE32CHAR_3bits)
-BASE32STR_256bits = '(%s{51}%s)' % (base32.BASE32CHAR, base32.BASE32CHAR_1bits)
+BASE32STR_128bits = b'(%s{25}%s)' % (base32.BASE32CHAR, base32.BASE32CHAR_3bits)
+BASE32STR_256bits = b'(%s{51}%s)' % (base32.BASE32CHAR, base32.BASE32CHAR_1bits)
-NUMBER='([0-9]+)'
+NUMBER=b'([0-9]+)'
class _BaseURI(object):
@@ -53,10 +72,10 @@ class _BaseURI(object):
@implementer(IURI, IImmutableFileURI)
class CHKFileURI(_BaseURI):
- BASE_STRING='URI:CHK:'
- STRING_RE=re.compile('^URI:CHK:'+BASE32STR_128bits+':'+
- BASE32STR_256bits+':'+NUMBER+':'+NUMBER+':'+NUMBER+
- '$')
+ BASE_STRING=b'URI:CHK:'
+ STRING_RE=re.compile(b'^URI:CHK:'+BASE32STR_128bits+b':'+
+ BASE32STR_256bits+b':'+NUMBER+b':'+NUMBER+b':'+NUMBER+
+ b'$')
def __init__(self, key, uri_extension_hash, needed_shares, total_shares,
size):
@@ -82,7 +101,7 @@ class CHKFileURI(_BaseURI):
assert isinstance(self.total_shares, int)
assert isinstance(self.size, (int,long))
- return ('URI:CHK:%s:%s:%d:%d:%d' %
+ return (b'URI:CHK:%s:%s:%d:%d:%d' %
(base32.b2a(self.key),
base32.b2a(self.uri_extension_hash),
self.needed_shares,
@@ -112,9 +131,9 @@ class CHKFileURI(_BaseURI):
@implementer(IVerifierURI)
class CHKFileVerifierURI(_BaseURI):
- BASE_STRING='URI:CHK-Verifier:'
- STRING_RE=re.compile('^URI:CHK-Verifier:'+BASE32STR_128bits+':'+
- BASE32STR_256bits+':'+NUMBER+':'+NUMBER+':'+NUMBER)
+ BASE_STRING=b'URI:CHK-Verifier:'
+ STRING_RE=re.compile(b'^URI:CHK-Verifier:'+BASE32STR_128bits+b':'+
+ BASE32STR_256bits+b':'+NUMBER+b':'+NUMBER+b':'+NUMBER)
def __init__(self, storage_index, uri_extension_hash,
needed_shares, total_shares, size):
@@ -138,7 +157,7 @@ class CHKFileVerifierURI(_BaseURI):
assert isinstance(self.total_shares, int)
assert isinstance(self.size, (int,long))
- return ('URI:CHK-Verifier:%s:%s:%d:%d:%d' %
+ return (b'URI:CHK-Verifier:%s:%s:%d:%d:%d' %
(si_b2a(self.storage_index),
base32.b2a(self.uri_extension_hash),
self.needed_shares,
@@ -161,12 +180,12 @@ class CHKFileVerifierURI(_BaseURI):
@implementer(IURI, IImmutableFileURI)
class LiteralFileURI(_BaseURI):
- BASE_STRING='URI:LIT:'
- STRING_RE=re.compile('^URI:LIT:'+base32.BASE32STR_anybytes+'$')
+ BASE_STRING=b'URI:LIT:'
+ STRING_RE=re.compile(b'^URI:LIT:'+base32.BASE32STR_anybytes+b'$')
def __init__(self, data=None):
if data is not None:
- assert isinstance(data, str)
+ assert isinstance(data, bytes)
self.data = data
@classmethod
@@ -177,7 +196,7 @@ class LiteralFileURI(_BaseURI):
return cls(base32.a2b(mo.group(1)))
def to_string(self):
- return 'URI:LIT:%s' % base32.b2a(self.data)
+ return b'URI:LIT:%s' % base32.b2a(self.data)
def is_readonly(self):
return True
@@ -202,9 +221,9 @@ class LiteralFileURI(_BaseURI):
@implementer(IURI, IMutableFileURI)
class WriteableSSKFileURI(_BaseURI):
- BASE_STRING='URI:SSK:'
- STRING_RE=re.compile('^'+BASE_STRING+BASE32STR_128bits+':'+
- BASE32STR_256bits+'$')
+ BASE_STRING=b'URI:SSK:'
+ STRING_RE=re.compile(b'^'+BASE_STRING+BASE32STR_128bits+b':'+
+ BASE32STR_256bits+b'$')
def __init__(self, writekey, fingerprint):
self.writekey = writekey
@@ -221,10 +240,10 @@ class WriteableSSKFileURI(_BaseURI):
return cls(base32.a2b(mo.group(1)), base32.a2b(mo.group(2)))
def to_string(self):
- assert isinstance(self.writekey, str)
- assert isinstance(self.fingerprint, str)
- return 'URI:SSK:%s:%s' % (base32.b2a(self.writekey),
- base32.b2a(self.fingerprint))
+ assert isinstance(self.writekey, bytes)
+ assert isinstance(self.fingerprint, bytes)
+ return b'URI:SSK:%s:%s' % (base32.b2a(self.writekey),
+ base32.b2a(self.fingerprint))
def __repr__(self):
return "<%s %s>" % (self.__class__.__name__, self.abbrev())
@@ -251,8 +270,8 @@ class WriteableSSKFileURI(_BaseURI):
@implementer(IURI, IMutableFileURI)
class ReadonlySSKFileURI(_BaseURI):
- BASE_STRING='URI:SSK-RO:'
- STRING_RE=re.compile('^URI:SSK-RO:'+BASE32STR_128bits+':'+BASE32STR_256bits+'$')
+ BASE_STRING=b'URI:SSK-RO:'
+ STRING_RE=re.compile(b'^URI:SSK-RO:'+BASE32STR_128bits+b':'+BASE32STR_256bits+b'$')
def __init__(self, readkey, fingerprint):
self.readkey = readkey
@@ -268,10 +287,10 @@ class ReadonlySSKFileURI(_BaseURI):
return cls(base32.a2b(mo.group(1)), base32.a2b(mo.group(2)))
def to_string(self):
- assert isinstance(self.readkey, str)
- assert isinstance(self.fingerprint, str)
- return 'URI:SSK-RO:%s:%s' % (base32.b2a(self.readkey),
- base32.b2a(self.fingerprint))
+ assert isinstance(self.readkey, bytes)
+ assert isinstance(self.fingerprint, bytes)
+ return b'URI:SSK-RO:%s:%s' % (base32.b2a(self.readkey),
+ base32.b2a(self.fingerprint))
def __repr__(self):
return "<%s %s>" % (self.__class__.__name__, self.abbrev())
@@ -298,8 +317,8 @@ class ReadonlySSKFileURI(_BaseURI):
@implementer(IVerifierURI)
class SSKVerifierURI(_BaseURI):
- BASE_STRING='URI:SSK-Verifier:'
- STRING_RE=re.compile('^'+BASE_STRING+BASE32STR_128bits+':'+BASE32STR_256bits+'$')
+ BASE_STRING=b'URI:SSK-Verifier:'
+ STRING_RE=re.compile(b'^'+BASE_STRING+BASE32STR_128bits+b':'+BASE32STR_256bits+b'$')
def __init__(self, storage_index, fingerprint):
assert len(storage_index) == 16
@@ -314,10 +333,10 @@ class SSKVerifierURI(_BaseURI):
return cls(si_a2b(mo.group(1)), base32.a2b(mo.group(2)))
def to_string(self):
- assert isinstance(self.storage_index, str)
- assert isinstance(self.fingerprint, str)
- return 'URI:SSK-Verifier:%s:%s' % (si_b2a(self.storage_index),
- base32.b2a(self.fingerprint))
+ assert isinstance(self.storage_index, bytes)
+ assert isinstance(self.fingerprint, bytes)
+ return b'URI:SSK-Verifier:%s:%s' % (si_b2a(self.storage_index),
+ base32.b2a(self.fingerprint))
def is_readonly(self):
return True
@@ -335,8 +354,8 @@ class SSKVerifierURI(_BaseURI):
@implementer(IURI, IMutableFileURI)
class WriteableMDMFFileURI(_BaseURI):
- BASE_STRING='URI:MDMF:'
- STRING_RE=re.compile('^'+BASE_STRING+BASE32STR_128bits+':'+BASE32STR_256bits+'(:|$)')
+ BASE_STRING=b'URI:MDMF:'
+ STRING_RE=re.compile(b'^'+BASE_STRING+BASE32STR_128bits+b':'+BASE32STR_256bits+b'(:|$)')
def __init__(self, writekey, fingerprint):
self.writekey = writekey
@@ -353,10 +372,10 @@ class WriteableMDMFFileURI(_BaseURI):
return cls(base32.a2b(mo.group(1)), base32.a2b(mo.group(2)))
def to_string(self):
- assert isinstance(self.writekey, str)
- assert isinstance(self.fingerprint, str)
- ret = 'URI:MDMF:%s:%s' % (base32.b2a(self.writekey),
- base32.b2a(self.fingerprint))
+ assert isinstance(self.writekey, bytes)
+ assert isinstance(self.fingerprint, bytes)
+ ret = b'URI:MDMF:%s:%s' % (base32.b2a(self.writekey),
+ base32.b2a(self.fingerprint))
return ret
def __repr__(self):
@@ -384,8 +403,8 @@ class WriteableMDMFFileURI(_BaseURI):
@implementer(IURI, IMutableFileURI)
class ReadonlyMDMFFileURI(_BaseURI):
- BASE_STRING='URI:MDMF-RO:'
- STRING_RE=re.compile('^' +BASE_STRING+BASE32STR_128bits+':'+BASE32STR_256bits+'(:|$)')
+ BASE_STRING=b'URI:MDMF-RO:'
+ STRING_RE=re.compile(b'^' +BASE_STRING+BASE32STR_128bits+b':'+BASE32STR_256bits+b'(:|$)')
def __init__(self, readkey, fingerprint):
self.readkey = readkey
@@ -402,10 +421,10 @@ class ReadonlyMDMFFileURI(_BaseURI):
return cls(base32.a2b(mo.group(1)), base32.a2b(mo.group(2)))
def to_string(self):
- assert isinstance(self.readkey, str)
- assert isinstance(self.fingerprint, str)
- ret = 'URI:MDMF-RO:%s:%s' % (base32.b2a(self.readkey),
- base32.b2a(self.fingerprint))
+ assert isinstance(self.readkey, bytes)
+ assert isinstance(self.fingerprint, bytes)
+ ret = b'URI:MDMF-RO:%s:%s' % (base32.b2a(self.readkey),
+ base32.b2a(self.fingerprint))
return ret
def __repr__(self):
@@ -433,8 +452,8 @@ class ReadonlyMDMFFileURI(_BaseURI):
@implementer(IVerifierURI)
class MDMFVerifierURI(_BaseURI):
- BASE_STRING='URI:MDMF-Verifier:'
- STRING_RE=re.compile('^'+BASE_STRING+BASE32STR_128bits+':'+BASE32STR_256bits+'(:|$)')
+ BASE_STRING=b'URI:MDMF-Verifier:'
+ STRING_RE=re.compile(b'^'+BASE_STRING+BASE32STR_128bits+b':'+BASE32STR_256bits+b'(:|$)')
def __init__(self, storage_index, fingerprint):
assert len(storage_index) == 16
@@ -449,10 +468,10 @@ class MDMFVerifierURI(_BaseURI):
return cls(si_a2b(mo.group(1)), base32.a2b(mo.group(2)))
def to_string(self):
- assert isinstance(self.storage_index, str)
- assert isinstance(self.fingerprint, str)
- ret = 'URI:MDMF-Verifier:%s:%s' % (si_b2a(self.storage_index),
- base32.b2a(self.fingerprint))
+ assert isinstance(self.storage_index, bytes)
+ assert isinstance(self.fingerprint, bytes)
+ ret = b'URI:MDMF-Verifier:%s:%s' % (si_b2a(self.storage_index),
+ base32.b2a(self.fingerprint))
return ret
def is_readonly(self):
@@ -494,12 +513,12 @@ class _DirectoryBaseURI(_BaseURI):
return self.BASE_STRING+bits
def abbrev(self):
- return self._filenode_uri.to_string().split(':')[2][:5]
+ return self._filenode_uri.to_string().split(b':')[2][:5]
def abbrev_si(self):
si = self._filenode_uri.get_storage_index()
if si is None:
- return ""
+ return b""
return base32.b2a(si)[:5]
def is_mutable(self):
@@ -518,8 +537,8 @@ class _DirectoryBaseURI(_BaseURI):
@implementer(IDirectoryURI)
class DirectoryURI(_DirectoryBaseURI):
- BASE_STRING='URI:DIR2:'
- BASE_STRING_RE=re.compile('^'+BASE_STRING)
+ BASE_STRING=b'URI:DIR2:'
+ BASE_STRING_RE=re.compile(b'^'+BASE_STRING)
INNER_URI_CLASS=WriteableSSKFileURI
def __init__(self, filenode_uri=None):
@@ -537,8 +556,8 @@ class DirectoryURI(_DirectoryBaseURI):
@implementer(IReadonlyDirectoryURI)
class ReadonlyDirectoryURI(_DirectoryBaseURI):
- BASE_STRING='URI:DIR2-RO:'
- BASE_STRING_RE=re.compile('^'+BASE_STRING)
+ BASE_STRING=b'URI:DIR2-RO:'
+ BASE_STRING_RE=re.compile(b'^'+BASE_STRING)
INNER_URI_CLASS=ReadonlySSKFileURI
def __init__(self, filenode_uri=None):
@@ -571,8 +590,8 @@ class _ImmutableDirectoryBaseURI(_DirectoryBaseURI):
class ImmutableDirectoryURI(_ImmutableDirectoryBaseURI):
- BASE_STRING='URI:DIR2-CHK:'
- BASE_STRING_RE=re.compile('^'+BASE_STRING)
+ BASE_STRING=b'URI:DIR2-CHK:'
+ BASE_STRING_RE=re.compile(b'^'+BASE_STRING)
INNER_URI_CLASS=CHKFileURI
def get_verify_cap(self):
@@ -581,8 +600,8 @@ class ImmutableDirectoryURI(_ImmutableDirectoryBaseURI):
class LiteralDirectoryURI(_ImmutableDirectoryBaseURI):
- BASE_STRING='URI:DIR2-LIT:'
- BASE_STRING_RE=re.compile('^'+BASE_STRING)
+ BASE_STRING=b'URI:DIR2-LIT:'
+ BASE_STRING_RE=re.compile(b'^'+BASE_STRING)
INNER_URI_CLASS=LiteralFileURI
def get_verify_cap(self):
@@ -593,8 +612,8 @@ class LiteralDirectoryURI(_ImmutableDirectoryBaseURI):
@implementer(IDirectoryURI)
class MDMFDirectoryURI(_DirectoryBaseURI):
- BASE_STRING='URI:DIR2-MDMF:'
- BASE_STRING_RE=re.compile('^'+BASE_STRING)
+ BASE_STRING=b'URI:DIR2-MDMF:'
+ BASE_STRING_RE=re.compile(b'^'+BASE_STRING)
INNER_URI_CLASS=WriteableMDMFFileURI
def __init__(self, filenode_uri=None):
@@ -615,8 +634,8 @@ class MDMFDirectoryURI(_DirectoryBaseURI):
@implementer(IReadonlyDirectoryURI)
class ReadonlyMDMFDirectoryURI(_DirectoryBaseURI):
- BASE_STRING='URI:DIR2-MDMF-RO:'
- BASE_STRING_RE=re.compile('^'+BASE_STRING)
+ BASE_STRING=b'URI:DIR2-MDMF-RO:'
+ BASE_STRING_RE=re.compile(b'^'+BASE_STRING)
INNER_URI_CLASS=ReadonlyMDMFFileURI
def __init__(self, filenode_uri=None):
@@ -653,8 +672,8 @@ def wrap_dirnode_cap(filecap):
@implementer(IVerifierURI)
class MDMFDirectoryURIVerifier(_DirectoryBaseURI):
- BASE_STRING='URI:DIR2-MDMF-Verifier:'
- BASE_STRING_RE=re.compile('^'+BASE_STRING)
+ BASE_STRING=b'URI:DIR2-MDMF-Verifier:'
+ BASE_STRING_RE=re.compile(b'^'+BASE_STRING)
INNER_URI_CLASS=MDMFVerifierURI
def __init__(self, filenode_uri=None):
@@ -678,8 +697,8 @@ class MDMFDirectoryURIVerifier(_DirectoryBaseURI):
@implementer(IVerifierURI)
class DirectoryURIVerifier(_DirectoryBaseURI):
- BASE_STRING='URI:DIR2-Verifier:'
- BASE_STRING_RE=re.compile('^'+BASE_STRING)
+ BASE_STRING=b'URI:DIR2-Verifier:'
+ BASE_STRING_RE=re.compile(b'^'+BASE_STRING)
INNER_URI_CLASS=SSKVerifierURI
def __init__(self, filenode_uri=None):
@@ -702,8 +721,8 @@ class DirectoryURIVerifier(_DirectoryBaseURI):
@implementer(IVerifierURI)
class ImmutableDirectoryURIVerifier(DirectoryURIVerifier):
- BASE_STRING='URI:DIR2-CHK-Verifier:'
- BASE_STRING_RE=re.compile('^'+BASE_STRING)
+ BASE_STRING=b'URI:DIR2-CHK-Verifier:'
+ BASE_STRING_RE=re.compile(b'^'+BASE_STRING)
INNER_URI_CLASS=CHKFileVerifierURI
@@ -725,12 +744,15 @@ class UnknownURI(object):
return None
-ALLEGED_READONLY_PREFIX = 'ro.'
-ALLEGED_IMMUTABLE_PREFIX = 'imm.'
+ALLEGED_READONLY_PREFIX = b'ro.'
+ALLEGED_IMMUTABLE_PREFIX = b'imm.'
def from_string(u, deep_immutable=False, name=u""):
- if not isinstance(u, str):
- raise TypeError("URI must be str: %r" % (u,))
+ """Create URI from either unicode or byte string."""
+ if isinstance(u, unicode):
+ u = u.encode("utf-8")
+ if not isinstance(u, bytes):
+ raise TypeError("URI must be unicode string or bytes: %r" % (u,))
# We allow and check ALLEGED_READONLY_PREFIX or ALLEGED_IMMUTABLE_PREFIX
# on all URIs, even though we would only strictly need to do so for caps of
@@ -748,62 +770,62 @@ def from_string(u, deep_immutable=False, name=u""):
error = None
try:
- if s.startswith('URI:CHK:'):
+ if s.startswith(b'URI:CHK:'):
return CHKFileURI.init_from_string(s)
- elif s.startswith('URI:CHK-Verifier:'):
+ elif s.startswith(b'URI:CHK-Verifier:'):
return CHKFileVerifierURI.init_from_string(s)
- elif s.startswith('URI:LIT:'):
+ elif s.startswith(b'URI:LIT:'):
return LiteralFileURI.init_from_string(s)
- elif s.startswith('URI:SSK:'):
+ elif s.startswith(b'URI:SSK:'):
if can_be_writeable:
return WriteableSSKFileURI.init_from_string(s)
kind = "URI:SSK file writecap"
- elif s.startswith('URI:SSK-RO:'):
+ elif s.startswith(b'URI:SSK-RO:'):
if can_be_mutable:
return ReadonlySSKFileURI.init_from_string(s)
kind = "URI:SSK-RO readcap to a mutable file"
- elif s.startswith('URI:SSK-Verifier:'):
+ elif s.startswith(b'URI:SSK-Verifier:'):
return SSKVerifierURI.init_from_string(s)
- elif s.startswith('URI:MDMF:'):
+ elif s.startswith(b'URI:MDMF:'):
if can_be_writeable:
return WriteableMDMFFileURI.init_from_string(s)
kind = "URI:MDMF file writecap"
- elif s.startswith('URI:MDMF-RO:'):
+ elif s.startswith(b'URI:MDMF-RO:'):
if can_be_mutable:
return ReadonlyMDMFFileURI.init_from_string(s)
kind = "URI:MDMF-RO readcap to a mutable file"
- elif s.startswith('URI:MDMF-Verifier:'):
+ elif s.startswith(b'URI:MDMF-Verifier:'):
return MDMFVerifierURI.init_from_string(s)
- elif s.startswith('URI:DIR2:'):
+ elif s.startswith(b'URI:DIR2:'):
if can_be_writeable:
return DirectoryURI.init_from_string(s)
kind = "URI:DIR2 directory writecap"
- elif s.startswith('URI:DIR2-RO:'):
+ elif s.startswith(b'URI:DIR2-RO:'):
if can_be_mutable:
return ReadonlyDirectoryURI.init_from_string(s)
kind = "URI:DIR2-RO readcap to a mutable directory"
- elif s.startswith('URI:DIR2-Verifier:'):
+ elif s.startswith(b'URI:DIR2-Verifier:'):
return DirectoryURIVerifier.init_from_string(s)
- elif s.startswith('URI:DIR2-CHK:'):
+ elif s.startswith(b'URI:DIR2-CHK:'):
return ImmutableDirectoryURI.init_from_string(s)
- elif s.startswith('URI:DIR2-CHK-Verifier:'):
+ elif s.startswith(b'URI:DIR2-CHK-Verifier:'):
return ImmutableDirectoryURIVerifier.init_from_string(s)
- elif s.startswith('URI:DIR2-LIT:'):
+ elif s.startswith(b'URI:DIR2-LIT:'):
return LiteralDirectoryURI.init_from_string(s)
- elif s.startswith('URI:DIR2-MDMF:'):
+ elif s.startswith(b'URI:DIR2-MDMF:'):
if can_be_writeable:
return MDMFDirectoryURI.init_from_string(s)
kind = "URI:DIR2-MDMF directory writecap"
- elif s.startswith('URI:DIR2-MDMF-RO:'):
+ elif s.startswith(b'URI:DIR2-MDMF-RO:'):
if can_be_mutable:
return ReadonlyMDMFDirectoryURI.init_from_string(s)
kind = "URI:DIR2-MDMF-RO readcap to a mutable directory"
- elif s.startswith('URI:DIR2-MDMF-Verifier:'):
+ elif s.startswith(b'URI:DIR2-MDMF-Verifier:'):
return MDMFDirectoryURIVerifier.init_from_string(s)
- elif s.startswith('x-tahoe-future-test-writeable:') and not can_be_writeable:
+ elif s.startswith(b'x-tahoe-future-test-writeable:') and not can_be_writeable:
# For testing how future writeable caps would behave in read-only contexts.
kind = "x-tahoe-future-test-writeable: testing cap"
- elif s.startswith('x-tahoe-future-test-mutable:') and not can_be_mutable:
+ elif s.startswith(b'x-tahoe-future-test-mutable:') and not can_be_mutable:
# For testing how future mutable readcaps would behave in immutable contexts.
kind = "x-tahoe-future-test-mutable: testing cap"
else:
@@ -829,18 +851,22 @@ def is_uri(s):
return False
def is_literal_file_uri(s):
- if not isinstance(s, str):
+ if isinstance(s, unicode):
+ s = s.encode("utf-8")
+ if not isinstance(s, bytes):
return False
- return (s.startswith('URI:LIT:') or
- s.startswith(ALLEGED_READONLY_PREFIX + 'URI:LIT:') or
- s.startswith(ALLEGED_IMMUTABLE_PREFIX + 'URI:LIT:'))
+ return (s.startswith(b'URI:LIT:') or
+ s.startswith(ALLEGED_READONLY_PREFIX + b'URI:LIT:') or
+ s.startswith(ALLEGED_IMMUTABLE_PREFIX + b'URI:LIT:'))
def has_uri_prefix(s):
- if not isinstance(s, str):
+ if isinstance(s, unicode):
+ s = s.encode("utf-8")
+ if not isinstance(s, bytes):
return False
- return (s.startswith("URI:") or
- s.startswith(ALLEGED_READONLY_PREFIX + 'URI:') or
- s.startswith(ALLEGED_IMMUTABLE_PREFIX + 'URI:'))
+ return (s.startswith(b"URI:") or
+ s.startswith(ALLEGED_READONLY_PREFIX + b'URI:') or
+ s.startswith(ALLEGED_IMMUTABLE_PREFIX + b'URI:'))
# These take the same keyword arguments as from_string above.
@@ -850,26 +876,26 @@ def from_string_dirnode(s, **kwargs):
_assert(IDirnodeURI.providedBy(u))
return u
-registerAdapter(from_string_dirnode, str, IDirnodeURI)
+registerAdapter(from_string_dirnode, bytes, IDirnodeURI)
def from_string_filenode(s, **kwargs):
u = from_string(s, **kwargs)
_assert(IFileURI.providedBy(u))
return u
-registerAdapter(from_string_filenode, str, IFileURI)
+registerAdapter(from_string_filenode, bytes, IFileURI)
def from_string_mutable_filenode(s, **kwargs):
u = from_string(s, **kwargs)
_assert(IMutableFileURI.providedBy(u))
return u
-registerAdapter(from_string_mutable_filenode, str, IMutableFileURI)
+registerAdapter(from_string_mutable_filenode, bytes, IMutableFileURI)
def from_string_verifier(s, **kwargs):
u = from_string(s, **kwargs)
_assert(IVerifierURI.providedBy(u))
return u
-registerAdapter(from_string_verifier, str, IVerifierURI)
+registerAdapter(from_string_verifier, bytes, IVerifierURI)
def pack_extension(data):
@@ -877,34 +903,36 @@ def pack_extension(data):
for k in sorted(data.keys()):
value = data[k]
if isinstance(value, (int, long)):
- value = "%d" % value
- assert isinstance(value, str), k
- assert re.match(r'^[a-zA-Z_\-]+$', k)
- pieces.append(k + ':' + hashutil.netstring(value))
- uri_extension = ''.join(pieces)
+ value = b"%d" % value
+ if isinstance(k, unicode):
+ k = k.encode("utf-8")
+ assert isinstance(value, bytes), k
+ assert re.match(br'^[a-zA-Z_\-]+$', k)
+ pieces.append(k + b':' + hashutil.netstring(value))
+ uri_extension = b''.join(pieces)
return uri_extension
def unpack_extension(data):
d = {}
while data:
- colon = data.index(':')
+ colon = data.index(b':')
key = data[:colon]
data = data[colon+1:]
- colon = data.index(':')
+ colon = data.index(b':')
number = data[:colon]
length = int(number)
data = data[colon+1:]
value = data[:length]
- assert data[length] == ','
+ assert data[length:length+1] == b','
data = data[length+1:]
d[key] = value
# convert certain things to numbers
- for intkey in ('size', 'segment_size', 'num_segments',
- 'needed_shares', 'total_shares'):
+ for intkey in (b'size', b'segment_size', b'num_segments',
+ b'needed_shares', b'total_shares'):
if intkey in d:
d[intkey] = int(d[intkey])
return d
@@ -912,9 +940,9 @@ def unpack_extension(data):
def unpack_extension_readable(data):
unpacked = unpack_extension(data)
- unpacked["UEB_hash"] = hashutil.uri_extension_hash(data)
+ unpacked[b"UEB_hash"] = hashutil.uri_extension_hash(data)
for k in sorted(unpacked.keys()):
- if 'hash' in k:
+ if b'hash' in k:
unpacked[k] = base32.b2a(unpacked[k])
return unpacked
diff --git a/src/allmydata/util/_python3.py b/src/allmydata/util/_python3.py
index 794edef40..2ec91ed96 100644
--- a/src/allmydata/util/_python3.py
+++ b/src/allmydata/util/_python3.py
@@ -20,7 +20,7 @@ from __future__ import print_function
from future.utils import PY2
if PY2:
- from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
+ from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
# Keep these sorted alphabetically, to reduce merge conflicts:
PORTED_MODULES = [
@@ -33,12 +33,16 @@ PORTED_MODULES = [
"allmydata.hashtree",
"allmydata.immutable.happiness_upload",
"allmydata.storage.crawler",
+ "allmydata.storage.expirer",
"allmydata.test.common_py3",
+ "allmydata.uri",
"allmydata.util._python3",
"allmydata.util.abbreviate",
"allmydata.util.assertutil",
"allmydata.util.base32",
"allmydata.util.base62",
+ "allmydata.util.configutil",
+ "allmydata.util.connection_status",
"allmydata.util.deferredutil",
"allmydata.util.fileutil",
"allmydata.util.dictutil",
@@ -64,6 +68,8 @@ PORTED_TEST_MODULES = [
"allmydata.test.test_abbreviate",
"allmydata.test.test_base32",
"allmydata.test.test_base62",
+ "allmydata.test.test_configutil",
+ "allmydata.test.test_connection_status",
"allmydata.test.test_crawler",
"allmydata.test.test_crypto",
"allmydata.test.test_deferredutil",
@@ -81,7 +87,9 @@ PORTED_TEST_MODULES = [
"allmydata.test.test_python3",
"allmydata.test.test_spans",
"allmydata.test.test_statistics",
+ "allmydata.test.test_storage_web",
"allmydata.test.test_time_format",
+ "allmydata.test.test_uri",
"allmydata.test.test_util",
"allmydata.test.test_version",
]
diff --git a/src/allmydata/util/configutil.py b/src/allmydata/util/configutil.py
index 70c0d4a5f..1a1a93f18 100644
--- a/src/allmydata/util/configutil.py
+++ b/src/allmydata/util/configutil.py
@@ -1,8 +1,32 @@
+"""
+Read/write config files.
-from six.moves.configparser import SafeConfigParser
+Configuration is returned as native strings.
+
+Ported to Python 3.
+"""
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+from future.utils import PY2
+if PY2:
+ # We don't do open(), because we want files to read/write native strs when
+ # we do "r" or "w".
+ from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
+
+if PY2:
+ # In theory on Python 2 configparser also works, but then code gets the
+ # wrong exceptions and they don't get handled. So just use native parser
+ # for now.
+ from ConfigParser import SafeConfigParser
+else:
+ from configparser import SafeConfigParser
import attr
+
class UnknownConfigError(Exception):
"""
An unknown config item was found.
@@ -12,11 +36,16 @@ class UnknownConfigError(Exception):
def get_config(tahoe_cfg):
+ """Load the config, returning a SafeConfigParser.
+
+ Configuration is returned as native strings.
+ """
config = SafeConfigParser()
- with open(tahoe_cfg, "rb") as f:
- # Skip any initial Byte Order Mark. Since this is an ordinary file, we
- # don't need to handle incomplete reads, and can assume seekability.
- if f.read(3) != '\xEF\xBB\xBF':
+ with open(tahoe_cfg, "r") as f:
+ # On Python 2, where we read in bytes, skip any initial Byte Order
+ # Mark. Since this is an ordinary file, we don't need to handle
+ # incomplete reads, and can assume seekability.
+ if PY2 and f.read(3) != b'\xEF\xBB\xBF':
f.seek(0)
config.readfp(f)
return config
@@ -28,7 +57,7 @@ def set_config(config, section, option, value):
assert config.get(section, option) == value
def write_config(tahoe_cfg, config):
- with open(tahoe_cfg, "wb") as f:
+ with open(tahoe_cfg, "w") as f:
config.write(f)
def validate_config(fname, cfg, valid_config):
diff --git a/src/allmydata/util/connection_status.py b/src/allmydata/util/connection_status.py
index 44c12f220..0e8595e81 100644
--- a/src/allmydata/util/connection_status.py
+++ b/src/allmydata/util/connection_status.py
@@ -1,3 +1,18 @@
+"""
+Parse connection status from Foolscap.
+
+Ported to Python 3.
+"""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+from future.utils import PY2
+if PY2:
+ from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
+
import time
from zope.interface import implementer
from ..interfaces import IConnectionStatus
@@ -37,9 +52,12 @@ def _hint_statuses(which, handlers, statuses):
def from_foolscap_reconnector(rc, last_received):
ri = rc.getReconnectionInfo()
- # See foolscap/reconnector.py, ReconnectionInfo, for details about
- # possible states.
+ # See foolscap/reconnector.py, ReconnectionInfo, for details about possible
+ # states. The returned result is a native string, it seems, so convert to
+ # unicode.
state = ri.state
+ if isinstance(state, bytes): # Python 2
+ state = str(state, "ascii")
if state == "unstarted":
return ConnectionStatus.unstarted()
diff --git a/src/allmydata/web/common.py b/src/allmydata/web/common.py
index 788557480..4a2bc2c3c 100644
--- a/src/allmydata/web/common.py
+++ b/src/allmydata/web/common.py
@@ -15,11 +15,15 @@ from allmydata.interfaces import ExistingChildError, NoSuchChildError, \
EmptyPathnameComponentError, MustBeDeepImmutableError, \
MustBeReadonlyError, MustNotBeUnknownRWError, SDMF_VERSION, MDMF_VERSION
from allmydata.mutable.common import UnrecoverableFileError
-from allmydata.util import abbreviate
from allmydata.util.hashutil import timing_safe_compare
from allmydata.util.time_format import format_time, format_delta
from allmydata.util.encodingutil import to_bytes, quote_output
+# Originally part of this module, so still part of its API:
+from .common_py3 import ( # noqa: F401
+ get_arg, abbreviate_time, MultiFormatResource, WebError
+)
+
def get_filenode_metadata(filenode):
metadata = {'mutable': filenode.is_mutable()}
@@ -104,24 +108,6 @@ def get_root(ctx_or_req):
link = "/".join([".."] * depth)
return link
-def get_arg(ctx_or_req, argname, default=None, multiple=False):
- """Extract an argument from either the query args (req.args) or the form
- body fields (req.fields). If multiple=False, this returns a single value
- (or the default, which defaults to None), and the query args take
- precedence. If multiple=True, this returns a tuple of arguments (possibly
- empty), starting with all those in the query args.
- """
- req = IRequest(ctx_or_req)
- results = []
- if argname in req.args:
- results.extend(req.args[argname])
- if req.fields and argname in req.fields:
- results.append(req.fields[argname].value)
- if multiple:
- return tuple(results)
- if results:
- return results[0]
- return default
def convert_children_json(nodemaker, children_json):
"""I convert the JSON output of GET?t=json into the dict-of-nodes input
@@ -141,20 +127,6 @@ def convert_children_json(nodemaker, children_json):
children[namex] = (childnode, metadata)
return children
-def abbreviate_time(data):
- # 1.23s, 790ms, 132us
- if data is None:
- return ""
- s = float(data)
- if s >= 10:
- return abbreviate.abbreviate_time(data)
- if s >= 1.0:
- return "%.2fs" % s
- if s >= 0.01:
- return "%.0fms" % (1000*s)
- if s >= 0.001:
- return "%.1fms" % (1000*s)
- return "%.0fus" % (1000000*s)
def compute_rate(bytes, seconds):
if bytes is None:
@@ -219,10 +191,6 @@ def render_time(t):
def render_time_attr(t):
return format_time(time.localtime(t))
-class WebError(Exception):
- def __init__(self, text, code=http.BAD_REQUEST):
- self.text = text
- self.code = code
# XXX: to make UnsupportedMethod return 501 NOT_IMPLEMENTED instead of 500
# Internal Server Error, we either need to do that ICanHandleException trick,
@@ -421,62 +389,6 @@ class MultiFormatPage(Page):
return lambda ctx: renderer(IRequest(ctx))
-class MultiFormatResource(resource.Resource, object):
- """
- ``MultiFormatResource`` is a ``resource.Resource`` that can be rendered in
- a number of different formats.
-
- Rendered format is controlled by a query argument (given by
- ``self.formatArgument``). Different resources may support different
- formats but ``json`` is a pretty common one. ``html`` is the default
- format if nothing else is given as the ``formatDefault``.
- """
- formatArgument = "t"
- formatDefault = None
-
- def render(self, req):
- """
- Dispatch to a renderer for a particular format, as selected by a query
- argument.
-
- A renderer for the format given by the query argument matching
- ``formatArgument`` will be selected and invoked. render_HTML will be
- used as a default if no format is selected (either by query arguments
- or by ``formatDefault``).
-
- :return: The result of the selected renderer.
- """
- t = get_arg(req, self.formatArgument, self.formatDefault)
- renderer = self._get_renderer(t)
- return renderer(req)
-
- def _get_renderer(self, fmt):
- """
- Get the renderer for the indicated format.
-
- :param str fmt: The format. If a method with a prefix of ``render_``
- and a suffix of this format (upper-cased) is found, it will be
- used.
-
- :return: A callable which takes a twisted.web Request and renders a
- response.
- """
- renderer = None
-
- if fmt is not None:
- try:
- renderer = getattr(self, "render_{}".format(fmt.upper()))
- except AttributeError:
- raise WebError(
- "Unknown {} value: {!r}".format(self.formatArgument, fmt),
- )
-
- if renderer is None:
- renderer = self.render_HTML
-
- return renderer
-
-
class SlotsSequenceElement(template.Element):
"""
``SlotsSequenceElement` is a minimal port of nevow's sequence renderer for
diff --git a/src/allmydata/web/common_py3.py b/src/allmydata/web/common_py3.py
new file mode 100644
index 000000000..06751a8e8
--- /dev/null
+++ b/src/allmydata/web/common_py3.py
@@ -0,0 +1,120 @@
+"""
+Common utilities that are available from Python 3.
+
+Can eventually be merged back into allmydata.web.common.
+"""
+
+from future.utils import PY2
+
+if PY2:
+ from nevow.inevow import IRequest as INevowRequest
+else:
+ INevowRequest = None
+
+from twisted.web import resource, http
+from twisted.web.iweb import IRequest
+
+from allmydata.util import abbreviate
+
+
+class WebError(Exception):
+ def __init__(self, text, code=http.BAD_REQUEST):
+ self.text = text
+ self.code = code
+
+
+def get_arg(ctx_or_req, argname, default=None, multiple=False):
+ """Extract an argument from either the query args (req.args) or the form
+ body fields (req.fields). If multiple=False, this returns a single value
+ (or the default, which defaults to None), and the query args take
+ precedence. If multiple=True, this returns a tuple of arguments (possibly
+ empty), starting with all those in the query args.
+ """
+ results = []
+ if PY2:
+ req = INevowRequest(ctx_or_req)
+ if argname in req.args:
+ results.extend(req.args[argname])
+ if req.fields and argname in req.fields:
+ results.append(req.fields[argname].value)
+ else:
+ req = IRequest(ctx_or_req)
+ if argname in req.args:
+ results.extend(req.args[argname])
+ if multiple:
+ return tuple(results)
+ if results:
+ return results[0]
+ return default
+
+
+class MultiFormatResource(resource.Resource, object):
+ """
+ ``MultiFormatResource`` is a ``resource.Resource`` that can be rendered in
+ a number of different formats.
+
+ Rendered format is controlled by a query argument (given by
+ ``self.formatArgument``). Different resources may support different
+ formats but ``json`` is a pretty common one. ``html`` is the default
+ format if nothing else is given as the ``formatDefault``.
+ """
+ formatArgument = "t"
+ formatDefault = None
+
+ def render(self, req):
+ """
+ Dispatch to a renderer for a particular format, as selected by a query
+ argument.
+
+ A renderer for the format given by the query argument matching
+ ``formatArgument`` will be selected and invoked. render_HTML will be
+ used as a default if no format is selected (either by query arguments
+ or by ``formatDefault``).
+
+ :return: The result of the selected renderer.
+ """
+ t = get_arg(req, self.formatArgument, self.formatDefault)
+ renderer = self._get_renderer(t)
+ return renderer(req)
+
+ def _get_renderer(self, fmt):
+ """
+ Get the renderer for the indicated format.
+
+ :param str fmt: The format. If a method with a prefix of ``render_``
+ and a suffix of this format (upper-cased) is found, it will be
+ used.
+
+ :return: A callable which takes a twisted.web Request and renders a
+ response.
+ """
+ renderer = None
+
+ if fmt is not None:
+ try:
+ renderer = getattr(self, "render_{}".format(fmt.upper()))
+ except AttributeError:
+ raise WebError(
+ "Unknown {} value: {!r}".format(self.formatArgument, fmt),
+ )
+
+ if renderer is None:
+ renderer = self.render_HTML
+
+ return renderer
+
+
+def abbreviate_time(data):
+ # 1.23s, 790ms, 132us
+ if data is None:
+ return ""
+ s = float(data)
+ if s >= 10:
+ return abbreviate.abbreviate_time(data)
+ if s >= 1.0:
+ return "%.2fs" % s
+ if s >= 0.01:
+ return "%.0fms" % (1000*s)
+ if s >= 0.001:
+ return "%.1fms" % (1000*s)
+ return "%.0fus" % (1000000*s)
diff --git a/src/allmydata/web/directory.py b/src/allmydata/web/directory.py
index 9fdecbcb4..cabcd023e 100644
--- a/src/allmydata/web/directory.py
+++ b/src/allmydata/web/directory.py
@@ -53,7 +53,6 @@ from allmydata.web.common import (
get_mutable_type,
get_filenode_metadata,
render_time,
- MultiFormatPage,
MultiFormatResource,
SlotsSequenceElement,
)
@@ -1213,7 +1212,7 @@ class ManifestElement(ReloadableMonitorElement):
class ManifestResults(MultiFormatResource, ReloadMixin):
- # Control MultiFormatPage
+ # Control MultiFormatResource
formatArgument = "output"
formatDefault = "html"
@@ -1268,8 +1267,9 @@ class ManifestResults(MultiFormatResource, ReloadMixin):
return json.dumps(status, indent=1)
-class DeepSizeResults(MultiFormatPage):
- # Control MultiFormatPage
+class DeepSizeResults(MultiFormatResource):
+
+ # Control MultiFormatResource
formatArgument = "output"
formatDefault = "html"
diff --git a/src/allmydata/web/storage.py b/src/allmydata/web/storage.py
index ba6609456..cf3264dac 100644
--- a/src/allmydata/web/storage.py
+++ b/src/allmydata/web/storage.py
@@ -8,7 +8,7 @@ from twisted.web.template import (
renderer,
renderElement
)
-from allmydata.web.common import (
+from allmydata.web.common_py3 import (
abbreviate_time,
MultiFormatResource
)
diff --git a/tox.ini b/tox.ini
index a9469adb0..6bc24273c 100644
--- a/tox.ini
+++ b/tox.ini
@@ -44,37 +44,40 @@ usedevelop = False
# We use extras=test to get things like "mock" that are required for our unit
# tests.
extras = test
+
+setenv =
+ # Define TEST_SUITE in the environment as an aid to constructing the
+ # correct test command below.
+ !py36: TEST_SUITE = allmydata
+ py36: TEST_SUITE = allmydata.test.python3_tests
+
commands =
- trial {env:TAHOE_LAFS_TRIAL_ARGS:--rterrors} {posargs:allmydata}
+ # As an aid to debugging, dump all of the Python packages and their
+ # versions that are installed in the test environment. This is
+ # particularly useful to get from CI runs - though hopefully the
+ # version pinning we do limits the variability of this output
+ pip freeze
+
tahoe --version
-[testenv:py36]
-commands =
- trial {env:TAHOE_LAFS_TRIAL_ARGS:--rterrors} {posargs:allmydata.test.python3_tests}
- tahoe --version
+ !coverage: trial {env:TAHOE_LAFS_TRIAL_ARGS:--rterrors} {posargs:{env:TEST_SUITE}}
+
+ # measuring coverage is somewhat slower than not measuring coverage
+ # so only do it on request.
+ coverage: coverage run -m twisted.trial {env:TAHOE_LAFS_TRIAL_ARGS:--rterrors --reporter=timing} {posargs:{env:TEST_SUITE}}
+ coverage: coverage combine
+ coverage: coverage xml
+
[testenv:integration]
setenv =
COVERAGE_PROCESS_START=.coveragerc
commands =
- # NOTE: 'run with "py.test --keep-tempdir -s -v integration/" to debug failures'
- py.test --coverage -v {posargs:integration}
+ # NOTE: 'run with "py.test --keep-tempdir -s -v integration/" to debug failures'
+ py.test --coverage -v {posargs:integration}
coverage combine
coverage report
-[testenv:coverage]
-# coverage (with --branch) takes about 65% longer to run
-commands =
- # As an aid to debugging, dump all of the Python packages and their
- # versions that are installed in the test environment. This is
- # particularly useful to get from CI runs - though hopefully the
- # version pinning we do limits the variability of this output
- # somewhat.
- pip freeze
- tahoe --version
- coverage run --branch -m twisted.trial {env:TAHOE_LAFS_TRIAL_ARGS:--rterrors --reporter=timing} {posargs:allmydata}
- coverage combine
- coverage xml
[testenv:codechecks]
# On macOS, git inside of towncrier needs $HOME.
@@ -88,11 +91,11 @@ commands =
python misc/coding_tools/find-trailing-spaces.py -r src static misc setup.py
python misc/coding_tools/check-miscaptures.py
- # If towncrier.check fails, you forgot to add a towncrier news
- # fragment explaining the change in this branch. Create one at
- # `newsfragments/.` with some text for the news
- # file. See pyproject.toml for legal values.
- python -m towncrier.check --pyproject towncrier.pyproject.toml
+ # If towncrier.check fails, you forgot to add a towncrier news
+ # fragment explaining the change in this branch. Create one at
+ # `newsfragments/.` with some text for the news
+ # file. See pyproject.toml for legal values.
+ python -m towncrier.check --pyproject towncrier.pyproject.toml
[testenv:draftnews]
passenv = TAHOE_LAFS_* PIP_* SUBUNITREPORTER_* USERPROFILE HOMEDRIVE HOMEPATH
@@ -111,9 +114,9 @@ commands =
#
# Some discussion is available at
# https://github.com/pypa/pip/issues/5696
- #
- # towncrier post 19.2 (unreleased as of this writing) adds a --config
- # option that can be used instead of this file shuffling.
+ #
+ # towncrier post 19.2 (unreleased as of this writing) adds a --config
+ # option that can be used instead of this file shuffling.
mv towncrier.pyproject.toml pyproject.toml
# towncrier 19.2 + works with python2.7
@@ -139,9 +142,9 @@ commands =
#
# Some discussion is available at
# https://github.com/pypa/pip/issues/5696
- #
- # towncrier post 19.2 (unreleased as of this writing) adds a --config
- # option that can be used instead of this file shuffling.
+ #
+ # towncrier post 19.2 (unreleased as of this writing) adds a --config
+ # option that can be used instead of this file shuffling.
mv towncrier.pyproject.toml pyproject.toml
# towncrier 19.2 + works with python2.7